-
Notifications
You must be signed in to change notification settings - Fork 63
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Browse files
Browse the repository at this point in the history
- Loading branch information
Showing
22 changed files
with
2,446 additions
and
149 deletions.
There are no files selected for viewing
Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,19 +1,22 @@ | ||
export type ChatMessage = { | ||
role: "user" | "assistant" | "system"; | ||
content: string; | ||
}; | ||
import { OpenAI } from "mongodb-rag-core/openai"; | ||
|
||
export function chatMessage<T extends ChatMessage>(t: T) { | ||
export function chatMessage<T extends OpenAI.ChatCompletionMessageParam>(t: T) { | ||
return t; | ||
} | ||
|
||
export const systemMessage = (content: string) => | ||
chatMessage({ role: "system", content }); | ||
export const systemMessage = ( | ||
args: Omit<OpenAI.ChatCompletionSystemMessageParam, "role"> | ||
): OpenAI.ChatCompletionSystemMessageParam => | ||
chatMessage({ role: "system", ...args }); | ||
|
||
export const userMessage = (content: string) => | ||
chatMessage({ role: "user", content }); | ||
export const userMessage = ( | ||
args: Omit<OpenAI.ChatCompletionUserMessageParam, "role"> | ||
): OpenAI.ChatCompletionUserMessageParam => | ||
chatMessage({ role: "user", ...args }); | ||
|
||
export const assistantMessage = (content: string) => | ||
chatMessage({ role: "assistant", content }); | ||
export const assistantMessage = ( | ||
args: Omit<OpenAI.ChatCompletionAssistantMessageParam, "role"> | ||
): OpenAI.ChatCompletionAssistantMessageParam => | ||
chatMessage({ role: "assistant", ...args }); | ||
|
||
export * from "./makeGenerateChatCompletion"; |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
101 changes: 101 additions & 0 deletions
101
packages/mongodb-artifact-generator/src/chat/makeGeneratePrompts.ts
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,101 @@ | ||
import { OpenAI } from "mongodb-rag-core/openai"; | ||
import { FormattedJiraIssueWithSummary } from "../commands/generateJiraPromptResponse"; | ||
import { RunLogger } from "../runlogger"; | ||
import { | ||
asJsonSchema, | ||
formatFewShotExamples, | ||
formatMessagesForArtifact, | ||
PromptExamplePair, | ||
} from "./utils"; | ||
import { z } from "zod"; | ||
import { stripIndents } from "common-tags"; | ||
|
||
export type GeneratedPrompts = z.infer<typeof GeneratedPrompts>; | ||
export const GeneratedPrompts = z.object({ | ||
prompts: z.array(z.string()).min(1).max(4), | ||
}); | ||
|
||
const generatePromptsTool: OpenAI.FunctionDefinition = { | ||
name: "generatePrompts", | ||
description: | ||
"A list of generated example prompts that would elicit a given response.", | ||
parameters: asJsonSchema(GeneratedPrompts), | ||
}; | ||
|
||
export type MakeGeneratePromptsArgs = { | ||
openAi: { | ||
client: OpenAI; | ||
model: string; | ||
}; | ||
logger?: RunLogger; | ||
directions?: string; | ||
examples?: PromptExamplePair[]; | ||
}; | ||
|
||
export type GeneratePromptsArgs = FormattedJiraIssueWithSummary; | ||
|
||
export function makeGeneratePrompts({ | ||
openAi, | ||
logger, | ||
directions, | ||
examples = [], | ||
}: MakeGeneratePromptsArgs) { | ||
return async function generatePrompts({ | ||
issue, | ||
summary, | ||
}: GeneratePromptsArgs) { | ||
const messages = [ | ||
{ | ||
role: "system", | ||
content: [ | ||
`Your task is to convert a provided input into a prompt-response format. The format mimics a conversation where one participant sends a prompt and the other replies with a response.`, | ||
directions ?? "", | ||
].join("\n"), | ||
}, | ||
...formatFewShotExamples({ | ||
examples, | ||
functionName: generatePromptsTool.name, | ||
responseSchema: GeneratedPrompts, | ||
}), | ||
{ | ||
role: "user", | ||
content: JSON.stringify({ issue, summary }), | ||
}, | ||
] satisfies OpenAI.ChatCompletionMessageParam[]; | ||
const result = await openAi.client.chat.completions.create({ | ||
model: openAi.model, | ||
messages, | ||
temperature: 0, | ||
max_tokens: 1500, | ||
functions: [generatePromptsTool], | ||
function_call: { | ||
name: generatePromptsTool.name, | ||
}, | ||
}); | ||
const response = result.choices[0].message; | ||
if (response === undefined) { | ||
throw new Error("No response from OpenAI"); | ||
} | ||
if ( | ||
response.function_call === undefined || | ||
response.function_call === null | ||
) { | ||
throw new Error("No function call in response from OpenAI"); | ||
} | ||
const generatedPrompts = GeneratedPrompts.parse( | ||
JSON.parse(response.function_call.arguments) | ||
); | ||
|
||
logger?.appendArtifact( | ||
`chatTemplates/generatePrompts-${Date.now()}.json`, | ||
stripIndents` | ||
${formatMessagesForArtifact(messages).join("\n")} | ||
<Summary> | ||
${JSON.stringify(summary)} | ||
</Summary> | ||
` | ||
); | ||
|
||
return generatedPrompts; | ||
}; | ||
} |
106 changes: 106 additions & 0 deletions
106
packages/mongodb-artifact-generator/src/chat/makeGenerateResponse.ts
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,106 @@ | ||
import { OpenAI } from "mongodb-rag-core/openai"; | ||
import { FormattedJiraIssue } from "../commands/generateJiraPromptResponse"; | ||
import { RunLogger } from "../runlogger"; | ||
import { | ||
asJsonSchema, | ||
formatFewShotExamples, | ||
formatMessagesForArtifact, | ||
PromptExamplePair, | ||
} from "./utils"; | ||
import { z } from "zod"; | ||
import { stripIndents } from "common-tags"; | ||
import { Summary } from "./makeSummarizer"; | ||
|
||
export type GeneratedResponse = z.infer<typeof GeneratedResponse>; | ||
export const GeneratedResponse = z.object({ | ||
response: z.string().describe("The generated response text."), | ||
}); | ||
|
||
const generateResponseTool: OpenAI.FunctionDefinition = { | ||
name: "generateResponse", | ||
description: "A response generated based on a given context.", | ||
parameters: asJsonSchema(GeneratedResponse), | ||
}; | ||
|
||
export type MakeGenerateResponseArgs = { | ||
openAi: { | ||
client: OpenAI; | ||
model: string; | ||
}; | ||
logger?: RunLogger; | ||
directions?: string; | ||
examples?: PromptExamplePair[]; | ||
}; | ||
|
||
export type GenerateResponseArgs = { | ||
issue: FormattedJiraIssue; | ||
summary: Summary; | ||
prompt: string; | ||
}; | ||
|
||
export function makeGenerateResponse({ | ||
openAi, | ||
logger, | ||
directions, | ||
examples = [], | ||
}: MakeGenerateResponseArgs) { | ||
return async function generateResponse({ | ||
issue, | ||
summary, | ||
prompt, | ||
}: GenerateResponseArgs) { | ||
const messages = [ | ||
{ | ||
role: "system", | ||
content: [ | ||
`Your task is to generate a response to a provided input. The response should be relevant to the input and based only on the provided context.`, | ||
directions ?? "", | ||
].join("\n"), | ||
}, | ||
...formatFewShotExamples({ | ||
examples, | ||
functionName: generateResponseTool.name, | ||
responseSchema: GeneratedResponse, | ||
}), | ||
{ | ||
role: "user", | ||
content: JSON.stringify({ issue, summary, prompt }), | ||
}, | ||
] satisfies OpenAI.ChatCompletionMessageParam[]; | ||
const result = await openAi.client.chat.completions.create({ | ||
model: openAi.model, | ||
messages, | ||
temperature: 0, | ||
max_tokens: 1500, | ||
functions: [generateResponseTool], | ||
function_call: { | ||
name: generateResponseTool.name, | ||
}, | ||
}); | ||
const response = result.choices[0].message; | ||
if (response === undefined) { | ||
throw new Error("No response from OpenAI"); | ||
} | ||
if ( | ||
response.function_call === undefined || | ||
response.function_call === null | ||
) { | ||
throw new Error("No function call in response from OpenAI"); | ||
} | ||
const generatedResponse = GeneratedResponse.parse( | ||
JSON.parse(response.function_call.arguments) | ||
); | ||
|
||
logger?.appendArtifact( | ||
`chatTemplates/generateResponse-${Date.now()}.json`, | ||
stripIndents` | ||
${formatMessagesForArtifact(messages).join("\n")} | ||
<Summary> | ||
${JSON.stringify(summary)} | ||
</Summary> | ||
` | ||
); | ||
|
||
return generatedResponse; | ||
}; | ||
} |
Oops, something went wrong.