diff --git a/autogpt_platform/backend/.env.example b/autogpt_platform/backend/.env.example index 75b0daf87d9f..fa22de569764 100644 --- a/autogpt_platform/backend/.env.example +++ b/autogpt_platform/backend/.env.example @@ -78,6 +78,7 @@ TWITTER_CLIENT_SECRET= # LLM OPENAI_API_KEY= ANTHROPIC_API_KEY= +AIML_API_KEY= GROQ_API_KEY= OPEN_ROUTER_API_KEY= diff --git a/autogpt_platform/backend/backend/blocks/llm.py b/autogpt_platform/backend/backend/blocks/llm.py index 6f242a443584..16c1c2ebac48 100644 --- a/autogpt_platform/backend/backend/blocks/llm.py +++ b/autogpt_platform/backend/backend/blocks/llm.py @@ -37,6 +37,7 @@ ProviderName.OLLAMA, ProviderName.OPENAI, ProviderName.OPEN_ROUTER, + ProviderName.AIML_API, ] AICredentials = CredentialsMetaInput[LLMProviderName, Literal["api_key"]] @@ -100,6 +101,12 @@ class LlmModel(str, Enum, metaclass=LlmModelMeta): # Anthropic models CLAUDE_3_5_SONNET = "claude-3-5-sonnet-latest" CLAUDE_3_HAIKU = "claude-3-haiku-20240307" + # AI/ML API models + AIML_API_QWEN2_5_72B = "Qwen/Qwen2.5-72B-Instruct-Turbo" + AIML_API_LLAMA3_1_70B = "nvidia/llama-3.1-nemotron-70b-instruct" + AIML_API_LLAMA3_3_70B = "meta-llama/Llama-3.3-70B-Instruct-Turbo" + AIML_API_META_LLAMA_3_1_70B = "meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo" + AIML_API_LLAMA_3_2_3B = "meta-llama/Llama-3.2-3B-Instruct-Turbo" # Groq models LLAMA3_8B = "llama3-8b-8192" LLAMA3_70B = "llama3-70b-8192" @@ -157,6 +164,11 @@ def context_window(self) -> int: LlmModel.GPT3_5_TURBO: ModelMetadata("openai", 16385), LlmModel.CLAUDE_3_5_SONNET: ModelMetadata("anthropic", 200000), LlmModel.CLAUDE_3_HAIKU: ModelMetadata("anthropic", 200000), + LlmModel.AIML_API_QWEN2_5_72B: ModelMetadata("aiml_api", 32000), + LlmModel.AIML_API_LLAMA3_1_70B: ModelMetadata("aiml_api", 128000), + LlmModel.AIML_API_LLAMA3_3_70B: ModelMetadata("aiml_api", 128000), + LlmModel.AIML_API_META_LLAMA_3_1_70B: ModelMetadata("aiml_api", 131000), + LlmModel.AIML_API_LLAMA_3_2_3B: ModelMetadata("aiml_api", 128000), LlmModel.LLAMA3_8B: ModelMetadata("groq", 8192), LlmModel.LLAMA3_70B: ModelMetadata("groq", 8192), LlmModel.MIXTRAL_8X7B: ModelMetadata("groq", 32768), @@ -439,6 +451,23 @@ def llm_call( response.usage.prompt_tokens if response.usage else 0, response.usage.completion_tokens if response.usage else 0, ) + elif provider == "aiml_api": + client = openai.OpenAI( + base_url="https://api.aimlapi.com/v2", + api_key=credentials.api_key.get_secret_value(), + ) + + completion = client.chat.completions.create( + model=llm_model.value, + messages=prompt, # type: ignore + max_tokens=max_tokens, + ) + + return ( + completion.choices[0].message.content or "", + completion.usage.prompt_tokens if completion.usage else 0, + completion.usage.completion_tokens if completion.usage else 0, + ) else: raise ValueError(f"Unsupported LLM provider: {provider}") @@ -515,16 +544,19 @@ def parse_response(resp: str) -> tuple[dict[str, Any], str | None]: if input_data.expected_format: parsed_dict, parsed_error = parse_response(response_text) if not parsed_error: - yield "response", { - k: ( - json.loads(v) - if isinstance(v, str) - and v.startswith("[") - and v.endswith("]") - else (", ".join(v) if isinstance(v, list) else v) - ) - for k, v in parsed_dict.items() - } + yield ( + "response", + { + k: ( + json.loads(v) + if isinstance(v, str) + and v.startswith("[") + and v.endswith("]") + else (", ".join(v) if isinstance(v, list) else v) + ) + for k, v in parsed_dict.items() + }, + ) return else: yield "response", {"response": response_text} diff --git a/autogpt_platform/backend/backend/data/block_cost_config.py b/autogpt_platform/backend/backend/data/block_cost_config.py index 8209a2d95a9f..67a1330e460f 100644 --- a/autogpt_platform/backend/backend/data/block_cost_config.py +++ b/autogpt_platform/backend/backend/data/block_cost_config.py @@ -20,6 +20,7 @@ from backend.data.block import Block from backend.data.cost import BlockCost, BlockCostType from backend.integrations.credentials_store import ( + aiml_api_credentials, anthropic_credentials, did_credentials, groq_credentials, @@ -43,6 +44,11 @@ LlmModel.GPT3_5_TURBO: 1, LlmModel.CLAUDE_3_5_SONNET: 4, LlmModel.CLAUDE_3_HAIKU: 1, + LlmModel.AIML_API_QWEN2_5_72B: 1, + LlmModel.AIML_API_LLAMA3_1_70B: 1, + LlmModel.AIML_API_LLAMA3_3_70B: 1, + LlmModel.AIML_API_META_LLAMA_3_1_70B: 1, + LlmModel.AIML_API_LLAMA_3_2_3B: 1, LlmModel.LLAMA3_8B: 1, LlmModel.LLAMA3_70B: 1, LlmModel.MIXTRAL_8X7B: 1, @@ -143,6 +149,23 @@ for model, cost in MODEL_COST.items() if MODEL_METADATA[model].provider == "open_router" ] + # AI/ML Api Models + + [ + BlockCost( + cost_type=BlockCostType.RUN, + cost_filter={ + "model": model, + "credentials": { + "id": aiml_api_credentials.id, + "provider": aiml_api_credentials.provider, + "type": aiml_api_credentials.type, + }, + }, + cost_amount=cost, + ) + for model, cost in MODEL_COST.items() + if MODEL_METADATA[model].provider == "aiml_api" + ] ) # =============== This is the exhaustive list of cost for each Block =============== # diff --git a/autogpt_platform/backend/backend/integrations/credentials_store.py b/autogpt_platform/backend/backend/integrations/credentials_store.py index 3aa7a7fb8a9e..ac34e65047c0 100644 --- a/autogpt_platform/backend/backend/integrations/credentials_store.py +++ b/autogpt_platform/backend/backend/integrations/credentials_store.py @@ -51,6 +51,13 @@ title="Use Credits for OpenAI", expires_at=None, ) +aiml_api_credentials = APIKeyCredentials( + id="aad82a89-9794-4ebb-977f-d736aa5260a3", + provider="aiml_api", + api_key=SecretStr(settings.secrets.aiml_api_key), + title="Use Credits for AI/ML API", + expires_at=None, +) anthropic_credentials = APIKeyCredentials( id="24e5d942-d9e3-4798-8151-90143ee55629", provider="anthropic", @@ -128,6 +135,7 @@ ideogram_credentials, replicate_credentials, openai_credentials, + aiml_api_credentials, anthropic_credentials, groq_credentials, did_credentials, @@ -179,6 +187,8 @@ def get_all_creds(self, user_id: str) -> list[Credentials]: all_credentials.append(replicate_credentials) if settings.secrets.openai_api_key: all_credentials.append(openai_credentials) + if settings.secrets.aiml_api_key: + all_credentials.append(aiml_api_credentials) if settings.secrets.anthropic_api_key: all_credentials.append(anthropic_credentials) if settings.secrets.did_api_key: diff --git a/autogpt_platform/backend/backend/integrations/providers.py b/autogpt_platform/backend/backend/integrations/providers.py index d08d50e0219e..1a02b99e17d6 100644 --- a/autogpt_platform/backend/backend/integrations/providers.py +++ b/autogpt_platform/backend/backend/integrations/providers.py @@ -4,6 +4,7 @@ # --8<-- [start:ProviderName] class ProviderName(str, Enum): ANTHROPIC = "anthropic" + AIML_API = "aiml_api" COMPASS = "compass" DISCORD = "discord" D_ID = "d_id" diff --git a/autogpt_platform/backend/backend/util/settings.py b/autogpt_platform/backend/backend/util/settings.py index 399a2d41f7fb..f7126b438aac 100644 --- a/autogpt_platform/backend/backend/util/settings.py +++ b/autogpt_platform/backend/backend/util/settings.py @@ -270,6 +270,7 @@ class Secrets(UpdateTrackingModel["Secrets"], BaseSettings): ) openai_api_key: str = Field(default="", description="OpenAI API key") + aiml_api_key: str = Field(default="", description="'AI/ML API' key") anthropic_api_key: str = Field(default="", description="Anthropic API key") groq_api_key: str = Field(default="", description="Groq API key") open_router_api_key: str = Field(default="", description="Open Router API Key") diff --git a/autogpt_platform/frontend/src/app/profile/page.tsx b/autogpt_platform/frontend/src/app/profile/page.tsx index b3097577db3a..2ca8be997f4a 100644 --- a/autogpt_platform/frontend/src/app/profile/page.tsx +++ b/autogpt_platform/frontend/src/app/profile/page.tsx @@ -103,6 +103,7 @@ export default function PrivatePage() { "6b9fc200-4726-4973-86c9-cd526f5ce5db", // Replicate "53c25cb8-e3ee-465c-a4d1-e75a4c899c2a", // OpenAI "24e5d942-d9e3-4798-8151-90143ee55629", // Anthropic + "aad82a89-9794-4ebb-977f-d736aa5260a3", // AI/ML API "4ec22295-8f97-4dd1-b42b-2c6957a02545", // Groq "7f7b0654-c36b-4565-8fa7-9a52575dfae2", // D-ID "7f26de70-ba0d-494e-ba76-238e65e7b45f", // Jina diff --git a/autogpt_platform/frontend/src/app/store/(user)/integrations/page.tsx b/autogpt_platform/frontend/src/app/store/(user)/integrations/page.tsx index a4fa36ab29e3..44b699b4bf56 100644 --- a/autogpt_platform/frontend/src/app/store/(user)/integrations/page.tsx +++ b/autogpt_platform/frontend/src/app/store/(user)/integrations/page.tsx @@ -103,6 +103,7 @@ export default function PrivatePage() { "6b9fc200-4726-4973-86c9-cd526f5ce5db", // Replicate "53c25cb8-e3ee-465c-a4d1-e75a4c899c2a", // OpenAI "24e5d942-d9e3-4798-8151-90143ee55629", // Anthropic + "aad82a89-9794-4ebb-977f-d736aa5260a3", // AI/ML "4ec22295-8f97-4dd1-b42b-2c6957a02545", // Groq "7f7b0654-c36b-4565-8fa7-9a52575dfae2", // D-ID "7f26de70-ba0d-494e-ba76-238e65e7b45f", // Jina diff --git a/autogpt_platform/frontend/src/components/integrations/credentials-input.tsx b/autogpt_platform/frontend/src/components/integrations/credentials-input.tsx index 7a32b4378e9e..7a2ec0d65921 100644 --- a/autogpt_platform/frontend/src/components/integrations/credentials-input.tsx +++ b/autogpt_platform/frontend/src/components/integrations/credentials-input.tsx @@ -59,6 +59,7 @@ export const providerIcons: Record< github: FaGithub, google: FaGoogle, groq: fallbackIcon, + aiml_api: fallbackIcon, notion: NotionLogoIcon, nvidia: fallbackIcon, discord: FaDiscord, diff --git a/autogpt_platform/frontend/src/components/integrations/credentials-provider.tsx b/autogpt_platform/frontend/src/components/integrations/credentials-provider.tsx index 38c97443cebf..562f0d266ba5 100644 --- a/autogpt_platform/frontend/src/components/integrations/credentials-provider.tsx +++ b/autogpt_platform/frontend/src/components/integrations/credentials-provider.tsx @@ -16,6 +16,7 @@ const CREDENTIALS_PROVIDER_NAMES = Object.values( // --8<-- [start:CredentialsProviderNames] const providerDisplayNames: Record = { + aiml_api: "AI/ML", anthropic: "Anthropic", discord: "Discord", d_id: "D-ID", diff --git a/autogpt_platform/frontend/src/lib/autogpt-server-api/types.ts b/autogpt_platform/frontend/src/lib/autogpt-server-api/types.ts index 0eaf4fcbede2..dc3ebe886818 100644 --- a/autogpt_platform/frontend/src/lib/autogpt-server-api/types.ts +++ b/autogpt_platform/frontend/src/lib/autogpt-server-api/types.ts @@ -102,6 +102,7 @@ export type CredentialsType = "api_key" | "oauth2"; // --8<-- [start:BlockIOCredentialsSubSchema] export const PROVIDER_NAMES = { ANTHROPIC: "anthropic", + AIML_API: "aiml_api", D_ID: "d_id", DISCORD: "discord", E2B: "e2b", diff --git a/docs/content/index.md b/docs/content/index.md index 243b56ea4193..cd698443644b 100644 --- a/docs/content/index.md +++ b/docs/content/index.md @@ -66,6 +66,7 @@ The platform comes pre-integrated with cutting-edge LLM providers: - OpenAI - Anthropic +- AI/ML API - Groq - Llama