Skip to content

Commit

Permalink
style: change code style
Browse files Browse the repository at this point in the history
  • Loading branch information
zuisong committed Feb 20, 2025
1 parent 3de240b commit 9eaa723
Show file tree
Hide file tree
Showing 5 changed files with 60 additions and 81 deletions.
28 changes: 12 additions & 16 deletions dist/main_bun.mjs
Original file line number Diff line number Diff line change
Expand Up @@ -105,22 +105,18 @@ function genModel(req) {
const model = GeminiModel.modelMapping(req.model);
let functions = req.tools?.filter((it) => it.type === "function")?.map((it) => it.function) ?? [];
functions = functions.concat((req.functions ?? []).map((it) => ({ strict: null, ...it })));
let responseMimeType;
let responseSchema;
switch (req.response_format?.type) {
case "json_object":
responseMimeType = "application/json";
break;
case "json_schema":
responseMimeType = "application/json";
responseSchema = req.response_format.json_schema.schema;
break;
case "text":
responseMimeType = "text/plain";
break;
default:
break;
}
const [responseMimeType, responseSchema] = (() => {
switch (req.response_format?.type) {
case "json_object":
return ["application/json", void 0];
case "json_schema":
return ["application/json", req.response_format.json_schema.schema];
case "text":
return ["text/plain", void 0];
default:
return [void 0, void 0];
}
})();
const generateContentRequest = {
contents: openAiMessageToGeminiMessage(req.messages),
generationConfig: {
Expand Down
28 changes: 12 additions & 16 deletions dist/main_cloudflare-workers.mjs
Original file line number Diff line number Diff line change
Expand Up @@ -105,22 +105,18 @@ function genModel(req) {
const model = GeminiModel.modelMapping(req.model);
let functions = req.tools?.filter((it) => it.type === "function")?.map((it) => it.function) ?? [];
functions = functions.concat((req.functions ?? []).map((it) => ({ strict: null, ...it })));
let responseMimeType;
let responseSchema;
switch (req.response_format?.type) {
case "json_object":
responseMimeType = "application/json";
break;
case "json_schema":
responseMimeType = "application/json";
responseSchema = req.response_format.json_schema.schema;
break;
case "text":
responseMimeType = "text/plain";
break;
default:
break;
}
const [responseMimeType, responseSchema] = (() => {
switch (req.response_format?.type) {
case "json_object":
return ["application/json", void 0];
case "json_schema":
return ["application/json", req.response_format.json_schema.schema];
case "text":
return ["text/plain", void 0];
default:
return [void 0, void 0];
}
})();
const generateContentRequest = {
contents: openAiMessageToGeminiMessage(req.messages),
generationConfig: {
Expand Down
28 changes: 12 additions & 16 deletions dist/main_deno.mjs
Original file line number Diff line number Diff line change
Expand Up @@ -105,22 +105,18 @@ function genModel(req) {
const model = GeminiModel.modelMapping(req.model);
let functions = req.tools?.filter((it) => it.type === "function")?.map((it) => it.function) ?? [];
functions = functions.concat((req.functions ?? []).map((it) => ({ strict: null, ...it })));
let responseMimeType;
let responseSchema;
switch (req.response_format?.type) {
case "json_object":
responseMimeType = "application/json";
break;
case "json_schema":
responseMimeType = "application/json";
responseSchema = req.response_format.json_schema.schema;
break;
case "text":
responseMimeType = "text/plain";
break;
default:
break;
}
const [responseMimeType, responseSchema] = (() => {
switch (req.response_format?.type) {
case "json_object":
return ["application/json", void 0];
case "json_schema":
return ["application/json", req.response_format.json_schema.schema];
case "text":
return ["text/plain", void 0];
default:
return [void 0, void 0];
}
})();
const generateContentRequest = {
contents: openAiMessageToGeminiMessage(req.messages),
generationConfig: {
Expand Down
28 changes: 12 additions & 16 deletions dist/main_node.mjs
Original file line number Diff line number Diff line change
Expand Up @@ -563,22 +563,18 @@ function genModel(req) {
const model = GeminiModel.modelMapping(req.model);
let functions = req.tools?.filter((it) => it.type === "function")?.map((it) => it.function) ?? [];
functions = functions.concat((req.functions ?? []).map((it) => ({ strict: null, ...it })));
let responseMimeType;
let responseSchema;
switch (req.response_format?.type) {
case "json_object":
responseMimeType = "application/json";
break;
case "json_schema":
responseMimeType = "application/json";
responseSchema = req.response_format.json_schema.schema;
break;
case "text":
responseMimeType = "text/plain";
break;
default:
break;
}
const [responseMimeType, responseSchema] = (() => {
switch (req.response_format?.type) {
case "json_object":
return ["application/json", void 0];
case "json_schema":
return ["application/json", req.response_format.json_schema.schema];
case "text":
return ["text/plain", void 0];
default:
return [void 0, void 0];
}
})();
const generateContentRequest = {
contents: openAiMessageToGeminiMessage(req.messages),
generationConfig: {
Expand Down
29 changes: 12 additions & 17 deletions src/utils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -78,23 +78,18 @@ export function genModel(req: OpenAI.Chat.ChatCompletionCreateParams): [GeminiMo

functions = functions.concat((req.functions ?? []).map((it) => ({ strict: null, ...it })))

let responseMimeType: string | undefined
let responseSchema: JsonSchema | undefined

switch (req.response_format?.type) {
case "json_object":
responseMimeType = "application/json"
break
case "json_schema":
responseMimeType = "application/json"
responseSchema = req.response_format.json_schema.schema
break
case "text":
responseMimeType = "text/plain"
break
default:
break
}
const [responseMimeType, responseSchema] = (() => {
switch (req.response_format?.type) {
case "json_object":
return ["application/json", undefined]
case "json_schema":
return ["application/json", req.response_format.json_schema.schema satisfies JsonSchema | undefined]
case "text":
return ["text/plain", undefined]
default:
return [undefined, undefined]
}
})()

const generateContentRequest: GenerateContentRequest = {
contents: openAiMessageToGeminiMessage(req.messages),
Expand Down

0 comments on commit 9eaa723

Please sign in to comment.