Skip to content

Commit

Permalink
(EAI-383) Fetch & Refine Top Jira Tickets (#539)
Browse files Browse the repository at this point in the history
  • Loading branch information
nlarew authored Nov 8, 2024
1 parent 37be250 commit 201aa43
Show file tree
Hide file tree
Showing 22 changed files with 2,446 additions and 149 deletions.
25 changes: 24 additions & 1 deletion package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 3 additions & 1 deletion packages/mongodb-artifact-generator/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,8 @@
"papaparse": "^5.4.1",
"yaml": "^2.3.1",
"yargs": "^17",
"zod": "^3.22.4"
"zod": "^3.22.4",
"zod-to-json-schema": "^3.23.2",
"zod-validation-error": "^3.4.0"
}
}
12 changes: 12 additions & 0 deletions packages/mongodb-artifact-generator/src/Config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -39,10 +39,22 @@ export type Config = {
*/
jiraApi?: Constructor<JiraApi>;

/**
The maximum number of concurrent requests to make to the Jira API.
@default 12
*/
jiraApiMaxConcurrency?: number;

/**
The GitHub API client.
*/
githubApi?: Constructor<Octokit>;

/**
The maximum number of concurrent requests to make to an LLM generator.
@default 8
*/
llmMaxConcurrency?: number;
};

export type Constructor<T> = (() => T) | (() => Promise<T>);
25 changes: 14 additions & 11 deletions packages/mongodb-artifact-generator/src/chat/index.ts
Original file line number Diff line number Diff line change
@@ -1,19 +1,22 @@
export type ChatMessage = {
role: "user" | "assistant" | "system";
content: string;
};
import { OpenAI } from "mongodb-rag-core/openai";

export function chatMessage<T extends ChatMessage>(t: T) {
export function chatMessage<T extends OpenAI.ChatCompletionMessageParam>(t: T) {
return t;
}

export const systemMessage = (content: string) =>
chatMessage({ role: "system", content });
export const systemMessage = (
args: Omit<OpenAI.ChatCompletionSystemMessageParam, "role">
): OpenAI.ChatCompletionSystemMessageParam =>
chatMessage({ role: "system", ...args });

export const userMessage = (content: string) =>
chatMessage({ role: "user", content });
export const userMessage = (
args: Omit<OpenAI.ChatCompletionUserMessageParam, "role">
): OpenAI.ChatCompletionUserMessageParam =>
chatMessage({ role: "user", ...args });

export const assistantMessage = (content: string) =>
chatMessage({ role: "assistant", content });
export const assistantMessage = (
args: Omit<OpenAI.ChatCompletionAssistantMessageParam, "role">
): OpenAI.ChatCompletionAssistantMessageParam =>
chatMessage({ role: "assistant", ...args });

export * from "./makeGenerateChatCompletion";
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,10 @@ import {
assertEnvVars,
CORE_OPENAI_CHAT_COMPLETION_ENV_VARS,
} from "mongodb-rag-core";
import { AzureOpenAI } from "mongodb-rag-core/openai";
import { ChatMessage } from ".";
import { OpenAI, AzureOpenAI } from "mongodb-rag-core/openai";

export type GenerateChatCompletion = (
messages: ChatMessage[]
messages: OpenAI.ChatCompletionMessageParam[]
) => Promise<string | undefined>;

export function makeGenerateChatCompletion(): GenerateChatCompletion {
Expand Down
101 changes: 101 additions & 0 deletions packages/mongodb-artifact-generator/src/chat/makeGeneratePrompts.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,101 @@
import { OpenAI } from "mongodb-rag-core/openai";
import { FormattedJiraIssueWithSummary } from "../commands/generateJiraPromptResponse";
import { RunLogger } from "../runlogger";
import {
asJsonSchema,
formatFewShotExamples,
formatMessagesForArtifact,
PromptExamplePair,
} from "./utils";
import { z } from "zod";
import { stripIndents } from "common-tags";

export type GeneratedPrompts = z.infer<typeof GeneratedPrompts>;
export const GeneratedPrompts = z.object({
prompts: z.array(z.string()).min(1).max(4),
});

const generatePromptsTool: OpenAI.FunctionDefinition = {
name: "generatePrompts",
description:
"A list of generated example prompts that would elicit a given response.",
parameters: asJsonSchema(GeneratedPrompts),
};

export type MakeGeneratePromptsArgs = {
openAi: {
client: OpenAI;
model: string;
};
logger?: RunLogger;
directions?: string;
examples?: PromptExamplePair[];
};

export type GeneratePromptsArgs = FormattedJiraIssueWithSummary;

export function makeGeneratePrompts({
openAi,
logger,
directions,
examples = [],
}: MakeGeneratePromptsArgs) {
return async function generatePrompts({
issue,
summary,
}: GeneratePromptsArgs) {
const messages = [
{
role: "system",
content: [
`Your task is to convert a provided input into a prompt-response format. The format mimics a conversation where one participant sends a prompt and the other replies with a response.`,
directions ?? "",
].join("\n"),
},
...formatFewShotExamples({
examples,
functionName: generatePromptsTool.name,
responseSchema: GeneratedPrompts,
}),
{
role: "user",
content: JSON.stringify({ issue, summary }),
},
] satisfies OpenAI.ChatCompletionMessageParam[];
const result = await openAi.client.chat.completions.create({
model: openAi.model,
messages,
temperature: 0,
max_tokens: 1500,
functions: [generatePromptsTool],
function_call: {
name: generatePromptsTool.name,
},
});
const response = result.choices[0].message;
if (response === undefined) {
throw new Error("No response from OpenAI");
}
if (
response.function_call === undefined ||
response.function_call === null
) {
throw new Error("No function call in response from OpenAI");
}
const generatedPrompts = GeneratedPrompts.parse(
JSON.parse(response.function_call.arguments)
);

logger?.appendArtifact(
`chatTemplates/generatePrompts-${Date.now()}.json`,
stripIndents`
${formatMessagesForArtifact(messages).join("\n")}
<Summary>
${JSON.stringify(summary)}
</Summary>
`
);

return generatedPrompts;
};
}
106 changes: 106 additions & 0 deletions packages/mongodb-artifact-generator/src/chat/makeGenerateResponse.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,106 @@
import { OpenAI } from "mongodb-rag-core/openai";
import { FormattedJiraIssue } from "../commands/generateJiraPromptResponse";
import { RunLogger } from "../runlogger";
import {
asJsonSchema,
formatFewShotExamples,
formatMessagesForArtifact,
PromptExamplePair,
} from "./utils";
import { z } from "zod";
import { stripIndents } from "common-tags";
import { Summary } from "./makeSummarizer";

export type GeneratedResponse = z.infer<typeof GeneratedResponse>;
export const GeneratedResponse = z.object({
response: z.string().describe("The generated response text."),
});

const generateResponseTool: OpenAI.FunctionDefinition = {
name: "generateResponse",
description: "A response generated based on a given context.",
parameters: asJsonSchema(GeneratedResponse),
};

export type MakeGenerateResponseArgs = {
openAi: {
client: OpenAI;
model: string;
};
logger?: RunLogger;
directions?: string;
examples?: PromptExamplePair[];
};

export type GenerateResponseArgs = {
issue: FormattedJiraIssue;
summary: Summary;
prompt: string;
};

export function makeGenerateResponse({
openAi,
logger,
directions,
examples = [],
}: MakeGenerateResponseArgs) {
return async function generateResponse({
issue,
summary,
prompt,
}: GenerateResponseArgs) {
const messages = [
{
role: "system",
content: [
`Your task is to generate a response to a provided input. The response should be relevant to the input and based only on the provided context.`,
directions ?? "",
].join("\n"),
},
...formatFewShotExamples({
examples,
functionName: generateResponseTool.name,
responseSchema: GeneratedResponse,
}),
{
role: "user",
content: JSON.stringify({ issue, summary, prompt }),
},
] satisfies OpenAI.ChatCompletionMessageParam[];
const result = await openAi.client.chat.completions.create({
model: openAi.model,
messages,
temperature: 0,
max_tokens: 1500,
functions: [generateResponseTool],
function_call: {
name: generateResponseTool.name,
},
});
const response = result.choices[0].message;
if (response === undefined) {
throw new Error("No response from OpenAI");
}
if (
response.function_call === undefined ||
response.function_call === null
) {
throw new Error("No function call in response from OpenAI");
}
const generatedResponse = GeneratedResponse.parse(
JSON.parse(response.function_call.arguments)
);

logger?.appendArtifact(
`chatTemplates/generateResponse-${Date.now()}.json`,
stripIndents`
${formatMessagesForArtifact(messages).join("\n")}
<Summary>
${JSON.stringify(summary)}
</Summary>
`
);

return generatedResponse;
};
}
Loading

0 comments on commit 201aa43

Please sign in to comment.