Skip to content

Commit

Permalink
🚨 Commit Build Artifact from GitHub Actions
Browse files Browse the repository at this point in the history
  • Loading branch information
github-actions[bot] authored and actions-user committed Jan 26, 2024
1 parent c98335b commit ed955a7
Show file tree
Hide file tree
Showing 4 changed files with 108 additions and 32 deletions.
35 changes: 27 additions & 8 deletions dist/main_bun.mjs
Original file line number Diff line number Diff line change
Expand Up @@ -1331,14 +1331,19 @@ var LogLevel = /* @__PURE__ */ ((LogLevel2) => {
LogLevel2[LogLevel2["debug"] = 7] = "debug";
return LogLevel2;
})(LogLevel || {});
var currentlevel = 3 /* error */;
function gen_logger(id) {
return mapValues(LogLevel, (value, name) => {
return (msg) => {
outFunc(name, value, `${id} ${msg}`);
outFunc(name, value, `${id} ${JSON.stringify(msg)}`);
};
});
}
function outFunc(_levelName, levelValue, _msg) {
function outFunc(levelName, levelValue, msg) {
if (levelValue > currentlevel) {
return;
}
console.log(`${Date.now().toLocaleString()} ${levelName} ${msg}`);
}
function mapValues(obj, fn) {
return Object.fromEntries(
Expand Down Expand Up @@ -1976,7 +1981,16 @@ function genModel(genAi, req) {
maxOutputTokens: req.max_tokens ?? void 0,
temperature: req.temperature ?? void 0,
topP: req.top_p ?? void 0
}
},
safetySettings: [
HarmCategory.HARM_CATEGORY_HATE_SPEECH,
HarmCategory.HARM_CATEGORY_SEXUALLY_EXPLICIT,
HarmCategory.HARM_CATEGORY_DANGEROUS_CONTENT,
HarmCategory.HARM_CATEGORY_HARASSMENT
].map((category) => ({
category,
threshold: HarmBlockThreshold.BLOCK_NONE
}))
});
return model;
}
Expand All @@ -1987,8 +2001,12 @@ var nonStreamingChatProxyHandler = async (c, req, genAi) => {
const model = genModel(genAi, req);
const geminiResp = await model.generateContent({
contents: openAiMessageToGeminiMessage(req.messages)
}).then((it) => it.response.text()).catch((err) => err?.message ?? err.toString());
log2.debug(JSON.stringify(geminiResp));
}).then((it) => it.response.text()).catch((err) => {
log2.error(req);
log2.error(err);
return err?.message ?? err.toString();
});
log2.debug(geminiResp);
const resp = {
id: "chatcmpl-abc123",
object: "chat.completion",
Expand Down Expand Up @@ -2063,12 +2081,13 @@ var streamingChatProxyHandler = async (c, req, genAi) => {
data: JSON.stringify(genOpenAiResp("", true))
});
const geminiResult = (await response).text();
log2.info(JSON.stringify(geminiResult));
log2.info(geminiResult);
}).catch(async (e) => {
await sseStream.writeSSE({
data: JSON.stringify(genOpenAiResp(e.toString(), true))
});
log2.info(e);
log2.error(req);
log2.error(e);
});
await sseStream.writeSSE({ data: "[DONE]" });
await sseStream.close();
Expand All @@ -2079,7 +2098,7 @@ var streamingChatProxyHandler = async (c, req, genAi) => {
var chatProxyHandler = async (c) => {
const log2 = c.var.log;
const req = await c.req.json();
log2.debug(JSON.stringify(req));
log2.debug(req);
const headers = c.req.header();
const apiKey = getToken(headers);
if (apiKey == null) {
Expand Down
35 changes: 27 additions & 8 deletions dist/main_cloudflare-workers.mjs
Original file line number Diff line number Diff line change
Expand Up @@ -1331,14 +1331,19 @@ var LogLevel = /* @__PURE__ */ ((LogLevel2) => {
LogLevel2[LogLevel2["debug"] = 7] = "debug";
return LogLevel2;
})(LogLevel || {});
var currentlevel = 3 /* error */;
function gen_logger(id) {
return mapValues(LogLevel, (value, name) => {
return (msg) => {
outFunc(name, value, `${id} ${msg}`);
outFunc(name, value, `${id} ${JSON.stringify(msg)}`);
};
});
}
function outFunc(_levelName, levelValue, _msg) {
function outFunc(levelName, levelValue, msg) {
if (levelValue > currentlevel) {
return;
}
console.log(`${Date.now().toLocaleString()} ${levelName} ${msg}`);
}
function mapValues(obj, fn) {
return Object.fromEntries(
Expand Down Expand Up @@ -1976,7 +1981,16 @@ function genModel(genAi, req) {
maxOutputTokens: req.max_tokens ?? void 0,
temperature: req.temperature ?? void 0,
topP: req.top_p ?? void 0
}
},
safetySettings: [
HarmCategory.HARM_CATEGORY_HATE_SPEECH,
HarmCategory.HARM_CATEGORY_SEXUALLY_EXPLICIT,
HarmCategory.HARM_CATEGORY_DANGEROUS_CONTENT,
HarmCategory.HARM_CATEGORY_HARASSMENT
].map((category) => ({
category,
threshold: HarmBlockThreshold.BLOCK_NONE
}))
});
return model;
}
Expand All @@ -1987,8 +2001,12 @@ var nonStreamingChatProxyHandler = async (c, req, genAi) => {
const model = genModel(genAi, req);
const geminiResp = await model.generateContent({
contents: openAiMessageToGeminiMessage(req.messages)
}).then((it) => it.response.text()).catch((err) => err?.message ?? err.toString());
log2.debug(JSON.stringify(geminiResp));
}).then((it) => it.response.text()).catch((err) => {
log2.error(req);
log2.error(err);
return err?.message ?? err.toString();
});
log2.debug(geminiResp);
const resp = {
id: "chatcmpl-abc123",
object: "chat.completion",
Expand Down Expand Up @@ -2063,12 +2081,13 @@ var streamingChatProxyHandler = async (c, req, genAi) => {
data: JSON.stringify(genOpenAiResp("", true))
});
const geminiResult = (await response).text();
log2.info(JSON.stringify(geminiResult));
log2.info(geminiResult);
}).catch(async (e) => {
await sseStream.writeSSE({
data: JSON.stringify(genOpenAiResp(e.toString(), true))
});
log2.info(e);
log2.error(req);
log2.error(e);
});
await sseStream.writeSSE({ data: "[DONE]" });
await sseStream.close();
Expand All @@ -2079,7 +2098,7 @@ var streamingChatProxyHandler = async (c, req, genAi) => {
var chatProxyHandler = async (c) => {
const log2 = c.var.log;
const req = await c.req.json();
log2.debug(JSON.stringify(req));
log2.debug(req);
const headers = c.req.header();
const apiKey = getToken(headers);
if (apiKey == null) {
Expand Down
35 changes: 27 additions & 8 deletions dist/main_deno.mjs
Original file line number Diff line number Diff line change
Expand Up @@ -1331,14 +1331,19 @@ var LogLevel = /* @__PURE__ */ ((LogLevel2) => {
LogLevel2[LogLevel2["debug"] = 7] = "debug";
return LogLevel2;
})(LogLevel || {});
var currentlevel = 3 /* error */;
function gen_logger(id) {
return mapValues(LogLevel, (value, name) => {
return (msg) => {
outFunc(name, value, `${id} ${msg}`);
outFunc(name, value, `${id} ${JSON.stringify(msg)}`);
};
});
}
function outFunc(_levelName, levelValue, _msg) {
function outFunc(levelName, levelValue, msg) {
if (levelValue > currentlevel) {
return;
}
console.log(`${Date.now().toLocaleString()} ${levelName} ${msg}`);
}
function mapValues(obj, fn) {
return Object.fromEntries(
Expand Down Expand Up @@ -1976,7 +1981,16 @@ function genModel(genAi, req) {
maxOutputTokens: req.max_tokens ?? void 0,
temperature: req.temperature ?? void 0,
topP: req.top_p ?? void 0
}
},
safetySettings: [
HarmCategory.HARM_CATEGORY_HATE_SPEECH,
HarmCategory.HARM_CATEGORY_SEXUALLY_EXPLICIT,
HarmCategory.HARM_CATEGORY_DANGEROUS_CONTENT,
HarmCategory.HARM_CATEGORY_HARASSMENT
].map((category) => ({
category,
threshold: HarmBlockThreshold.BLOCK_NONE
}))
});
return model;
}
Expand All @@ -1987,8 +2001,12 @@ var nonStreamingChatProxyHandler = async (c, req, genAi) => {
const model = genModel(genAi, req);
const geminiResp = await model.generateContent({
contents: openAiMessageToGeminiMessage(req.messages)
}).then((it) => it.response.text()).catch((err) => err?.message ?? err.toString());
log2.debug(JSON.stringify(geminiResp));
}).then((it) => it.response.text()).catch((err) => {
log2.error(req);
log2.error(err);
return err?.message ?? err.toString();
});
log2.debug(geminiResp);
const resp = {
id: "chatcmpl-abc123",
object: "chat.completion",
Expand Down Expand Up @@ -2063,12 +2081,13 @@ var streamingChatProxyHandler = async (c, req, genAi) => {
data: JSON.stringify(genOpenAiResp("", true))
});
const geminiResult = (await response).text();
log2.info(JSON.stringify(geminiResult));
log2.info(geminiResult);
}).catch(async (e) => {
await sseStream.writeSSE({
data: JSON.stringify(genOpenAiResp(e.toString(), true))
});
log2.info(e);
log2.error(req);
log2.error(e);
});
await sseStream.writeSSE({ data: "[DONE]" });
await sseStream.close();
Expand All @@ -2079,7 +2098,7 @@ var streamingChatProxyHandler = async (c, req, genAi) => {
var chatProxyHandler = async (c) => {
const log2 = c.var.log;
const req = await c.req.json();
log2.debug(JSON.stringify(req));
log2.debug(req);
const headers = c.req.header();
const apiKey = getToken(headers);
if (apiKey == null) {
Expand Down
35 changes: 27 additions & 8 deletions dist/main_node.mjs
Original file line number Diff line number Diff line change
Expand Up @@ -1642,14 +1642,19 @@ var LogLevel = /* @__PURE__ */ ((LogLevel2) => {
LogLevel2[LogLevel2["debug"] = 7] = "debug";
return LogLevel2;
})(LogLevel || {});
var currentlevel = 3 /* error */;
function gen_logger(id) {
return mapValues(LogLevel, (value, name) => {
return (msg) => {
outFunc(name, value, `${id} ${msg}`);
outFunc(name, value, `${id} ${JSON.stringify(msg)}`);
};
});
}
function outFunc(_levelName, levelValue, _msg) {
function outFunc(levelName, levelValue, msg) {
if (levelValue > currentlevel) {
return;
}
console.log(`${Date.now().toLocaleString()} ${levelName} ${msg}`);
}
function mapValues(obj, fn) {
return Object.fromEntries(
Expand Down Expand Up @@ -2287,7 +2292,16 @@ function genModel(genAi, req) {
maxOutputTokens: req.max_tokens ?? void 0,
temperature: req.temperature ?? void 0,
topP: req.top_p ?? void 0
}
},
safetySettings: [
HarmCategory.HARM_CATEGORY_HATE_SPEECH,
HarmCategory.HARM_CATEGORY_SEXUALLY_EXPLICIT,
HarmCategory.HARM_CATEGORY_DANGEROUS_CONTENT,
HarmCategory.HARM_CATEGORY_HARASSMENT
].map((category) => ({
category,
threshold: HarmBlockThreshold.BLOCK_NONE
}))
});
return model;
}
Expand All @@ -2298,8 +2312,12 @@ var nonStreamingChatProxyHandler = async (c, req, genAi) => {
const model = genModel(genAi, req);
const geminiResp = await model.generateContent({
contents: openAiMessageToGeminiMessage(req.messages)
}).then((it) => it.response.text()).catch((err) => err?.message ?? err.toString());
log2.debug(JSON.stringify(geminiResp));
}).then((it) => it.response.text()).catch((err) => {
log2.error(req);
log2.error(err);
return err?.message ?? err.toString();
});
log2.debug(geminiResp);
const resp = {
id: "chatcmpl-abc123",
object: "chat.completion",
Expand Down Expand Up @@ -2374,12 +2392,13 @@ var streamingChatProxyHandler = async (c, req, genAi) => {
data: JSON.stringify(genOpenAiResp("", true))
});
const geminiResult = (await response).text();
log2.info(JSON.stringify(geminiResult));
log2.info(geminiResult);
}).catch(async (e) => {
await sseStream.writeSSE({
data: JSON.stringify(genOpenAiResp(e.toString(), true))
});
log2.info(e);
log2.error(req);
log2.error(e);
});
await sseStream.writeSSE({ data: "[DONE]" });
await sseStream.close();
Expand All @@ -2390,7 +2409,7 @@ var streamingChatProxyHandler = async (c, req, genAi) => {
var chatProxyHandler = async (c) => {
const log2 = c.var.log;
const req = await c.req.json();
log2.debug(JSON.stringify(req));
log2.debug(req);
const headers = c.req.header();
const apiKey = getToken(headers);
if (apiKey == null) {
Expand Down

0 comments on commit ed955a7

Please sign in to comment.