diff --git a/src/config/llm.ts b/src/config/llm.ts index 75fd7a0..7b7a61f 100644 --- a/src/config/llm.ts +++ b/src/config/llm.ts @@ -1,35 +1,35 @@ -import { llmModels, LLMNodeConfiguration, LLMProvider } from '../services/llm/types.js'; +import { LLMNodeConfiguration, LLMProvider } from '../services/llm/types.js'; export const llmDefaultConfig = { nodes: { decision: { provider: LLMProvider.OPENAI, - model: llmModels.openai.gpt4o_mini, + model: 'gpt-4o-mini', temperature: 0.2, } as LLMNodeConfiguration, analyze: { provider: LLMProvider.ANTHROPIC, - model: llmModels.anthropic.claude35sonnet, + model: 'claude-3-5-sonnet-latest', temperature: 0.5, } as LLMNodeConfiguration, generation: { provider: LLMProvider.ANTHROPIC, - model: llmModels.anthropic.claude35sonnet, + model: 'claude-3-5-sonnet-latest', temperature: 0.8, } as LLMNodeConfiguration, response: { provider: LLMProvider.OPENAI, - model: llmModels.openai.gpt4o_mini, + model: 'gpt-4o-mini', temperature: 0.8, } as LLMNodeConfiguration, orchestrator: { provider: LLMProvider.ANTHROPIC, - model: llmModels.anthropic.claude35sonnet, + model: 'claude-3-5-sonnet-latest', temperature: 0.2, } as LLMNodeConfiguration, prompt_summarizer: { provider: LLMProvider.OPENAI, - model: llmModels.openai.gpt4o_mini, + model: 'gpt-4o-mini', temperature: 0.2, } as LLMNodeConfiguration, }, diff --git a/src/services/llm/types.ts b/src/services/llm/types.ts index 81e5af3..ff9cf62 100644 --- a/src/services/llm/types.ts +++ b/src/services/llm/types.ts @@ -15,22 +15,3 @@ export type LLMNodeConfiguration = { model: string; temperature: number; }; - -export const llmModels = { - openai: { - gpt4_turbo: 'gpt-4-turbo-preview', - gpt4o_mini: 'gpt-4o-mini', - o3_mini: 'o3-mini', - }, - anthropic: { - claude35sonnet: 'claude-3-5-sonnet-latest', - claude35haiku: 'claude-3-5-haiku-latest', - }, - deepseek: { - deepseekChat: 'deepseek-chat', - deepseekReasoner: 'deepseek-reasoner', - }, - ollama: { - llama3: 'llama3.1', - }, -};