Skip to content

Commit

Permalink
Merge pull request #2471 from Agenta-AI/release/v0.33.3
Browse files Browse the repository at this point in the history
v0.33.3
  • Loading branch information
jp-agenta authored Feb 10, 2025
2 parents 134115f + 554da81 commit 49331b4
Show file tree
Hide file tree
Showing 11 changed files with 194 additions and 156 deletions.
2 changes: 1 addition & 1 deletion agenta-backend/pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[tool.poetry]
name = "agenta_backend"
version = "0.33.2"
version = "0.33.3"
description = ""
authors = ["Mahmoud Mabrouk <[email protected]>"]
readme = "README.md"
Expand Down
3 changes: 2 additions & 1 deletion agenta-cli/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,8 @@ The easiest way to get started is through Agenta Cloud. It is free to signup, an
```bash
mkdir agenta && cd agenta
curl -L https://raw.githubusercontent.com/agenta-ai/agenta/main/docker-compose.gh.yml -o docker-compose.gh.yml
docker compose -f docker-compose.gh.yml up -d
echo "OPENAI_API_KEY=sk-xxx" > .env
docker compose -f docker-compose.gh.yml --env-file .env up -d --pull always
```

#### Deployment Options
Expand Down
151 changes: 117 additions & 34 deletions agenta-cli/agenta/sdk/assets.py
Original file line number Diff line number Diff line change
@@ -1,23 +1,13 @@
supported_llm_models = {
"Aleph Alpha": [
"luminous-base",
"luminous-base-control",
"luminous-extended-control",
"luminous-supreme",
],
"Anthropic": [
"anthropic/claude-3-5-sonnet-20241022",
"anthropic/claude-3-5-sonnet-20240620",
"anthropic/claude-3-5-haiku-20241022",
"anthropic/claude-3-opus-20240229",
"anthropic/claude-3-sonnet-20240229",
"anthropic/claude-3-haiku-20240307",
"anthropic/claude-2.1",
"anthropic/claude-2",
"anthropic/claude-instant-1.2",
"anthropic/claude-instant-1",
],
"Anyscale": [
"anyscale/meta-llama/Llama-2-13b-chat-hf",
"anyscale/meta-llama/Llama-2-70b-chat-hf",
],
"Cohere": [
"cohere/command-light",
Expand All @@ -29,26 +19,37 @@
"deepinfra/meta-llama/Llama-2-13b-chat-hf",
"deepinfra/codellama/CodeLlama-34b-Instruct-hf",
"deepinfra/mistralai/Mistral-7B-Instruct-v0.1",
"deepinfra/jondurbin/airoboros-l2-70b-gpt4-1.4.1",
],
"Gemini": [
"gemini/gemini-2.0-flash-001",
"gemini/gemini-2.0-flash-lite-preview-02-05",
"gemini/gemini-1.5-pro-latest",
"gemini/gemini-1.5-flash",
"gemini/gemini-1.5-flash-8b",
],
"Groq": [
"groq/llama3-8b-8192",
"groq/deepseek-r1-distill-llama-70b",
"groq/deepseek-r1-distill-llama-70b-specdec",
"groq/gemma2-9b-it",
"groq/llama-3.1-8b-instant",
"groq/llama-3.2-11b-vision-preview",
"groq/llama-3.2-1b-preview",
"groq/llama-3.2-3b-preview",
"groq/llama-3.2-90b-vision-preview",
"groq/llama-3.3-70b-specdec",
"groq/llama-3.3-70b-versatile",
"groq/llama-guard-3-8b",
"groq/llama3-70b-8192",
"groq/llama2-70b-4096",
"groq/llama3-8b-8192",
"groq/mixtral-8x7b-32768",
"groq/gemma-7b-it",
],
"Mistral": [
"mistral/mistral-tiny",
"mistral/mistral-small",
"mistral/mistral-medium",
"mistral/mistral-large-latest",
],
"Open AI": [
"OpenAI": [
"gpt-3.5-turbo-1106",
"gpt-3.5-turbo",
"gpt-4",
Expand All @@ -57,32 +58,114 @@
"gpt-4-1106-preview",
],
"OpenRouter": [
"openrouter/openai/gpt-3.5-turbo",
"openrouter/openai/gpt-3.5-turbo-16k",
"openrouter/anthropic/claude-instant-v1",
"openrouter/anthropic/claude-3-opus",
"openrouter/anthropic/claude-3-sonnet",
"openrouter/anthropic/claude-3.5-haiku",
"openrouter/anthropic/claude-3.5-haiku-20241022",
"openrouter/anthropic/claude-3.5-haiku-20241022:beta",
"openrouter/anthropic/claude-3.5-haiku:beta",
"openrouter/anthropic/claude-3.5-sonnet",
"openrouter/anthropic/claude-3.5-sonnet-20240620",
"openrouter/anthropic/claude-3.5-sonnet-20240620:beta",
"openrouter/anthropic/claude-3.5-sonnet:beta",
"openrouter/cohere/command",
"openrouter/cohere/command-r",
"openrouter/cohere/command-r-03-2024",
"openrouter/cohere/command-r-08-2024",
"openrouter/cohere/command-r-plus",
"openrouter/cohere/command-r-plus-04-2024",
"openrouter/cohere/command-r-plus-08-2024",
"openrouter/cohere/command-r7b-12-2024",
"openrouter/deepseek/deepseek-chat",
"openrouter/deepseek/deepseek-chat-v2.5",
"openrouter/deepseek/deepseek-r1",
"openrouter/deepseek/deepseek-r1:nitro",
"openrouter/deepseek/deepseek-r1-distill-llama-70b",
"openrouter/deepseek/deepseek-r1-distill-qwen-1.5b",
"openrouter/deepseek/deepseek-r1-distill-qwen-14b",
"openrouter/deepseek/deepseek-r1-distill-qwen-32b",
"openrouter/databricks/dbrx-instruct",
"openrouter/google/gemini-2.0-flash-001",
"openrouter/google/gemini-2.0-flash-exp:free",
"openrouter/google/gemini-2.0-flash-lite-preview-02-05:free",
"openrouter/google/gemini-2.0-flash-thinking-exp-1219:free",
"openrouter/google/gemini-2.0-flash-thinking-exp:free",
"openrouter/google/gemini-2.0-pro-exp-02-05:free",
"openrouter/google/gemini-exp-1206:free",
"openrouter/google/gemini-flash-1.5",
"openrouter/google/gemini-flash-1.5-8b",
"openrouter/google/gemini-flash-1.5-8b-exp",
"openrouter/google/gemini-pro",
"openrouter/google/gemini-pro-1.5",
"openrouter/google/gemini-pro-vision",
"openrouter/google/palm-2-chat-bison",
"openrouter/google/palm-2-codechat-bison",
"openrouter/meta-llama/llama-2-13b-chat",
"openrouter/meta-llama/llama-2-70b-chat",
"openrouter/meta-llama/llama-3.3-70b-instruct",
"openrouter/meta-llama/llama-3.2-90b-vision-instruct",
"openrouter/meta-llama/llama-3.1-405b-instruct",
"openrouter/mistralai/mistral-large",
"openrouter/mistralai/mistral-medium",
"openrouter/mistralai/mistral-small",
"openrouter/mistralai/mixtral-8x7b-instruct",
"openrouter/nousresearch/hermes-3-llama-3.1-405b",
"openrouter/nousresearch/hermes-3-llama-3.1-70b",
"openrouter/nousresearch/nous-hermes-2-mixtral-8x7b-dpo",
"openrouter/qwen/qwen-max",
"openrouter/qwen/qwen-plus",
"openrouter/qwen/qwen-2.5-72b-instruct",
"openrouter/qwen/qwen-2.5-coder-32b-instruct",
"openrouter/x-ai/grok-2",
"openrouter/x-ai/grok-2-1212",
"openrouter/x-ai/grok-2-vision-1212",
"openrouter/x-ai/grok-vision-beta",
"openrouter/google/gemini-2.0-flash-001",
"openrouter/perplexity/sonar-reasoning",
],
"Perplexity AI": [
"perplexity/sonar",
"perplexity/sonar-pro",
"perplexity/sonar-reasoning",
"perplexity/sonar-reasoning-pro",
],
"Together AI": [
"together_ai/togethercomputer/llama-2-70b-chat",
"together_ai/togethercomputer/llama-2-70b",
"together_ai/togethercomputer/LLaMA-2-7B-32K",
"together_ai/togethercomputer/Llama-2-7B-32K-Instruct",
"together_ai/togethercomputer/llama-2-7b",
"together_ai/togethercomputer/alpaca-7b",
"together_ai/togethercomputer/CodeLlama-34b-Instruct",
"together_ai/togethercomputer/CodeLlama-34b-Python",
"together_ai/WizardLM/WizardCoder-Python-34B-V1.0",
"together_ai/NousResearch/Nous-Hermes-Llama2-13b",
"together_ai/Austism/chronos-hermes-13b",
"togetherai": [
"together_ai/deepseek-ai/DeepSeek-R1",
"together_ai/deepseek-ai/DeepSeek-R1-Distill-Llama-70B",
"together_ai/deepseek-ai/DeepSeek-R1-Distill-Qwen-1.5B",
"together_ai/deepseek-ai/DeepSeek-R1-Distill-Qwen-14B",
"together_ai/deepseek-ai/DeepSeek-V3",
"together_ai/meta-llama/Llama-3.3-70B-Instruct-Turbo",
"together_ai/meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo",
"together_ai/meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo",
"together_ai/meta-llama/Meta-Llama-3.1-405B-Instruct-Turbo",
"together_ai/meta-llama/Meta-Llama-3-8B-Instruct-Turbo",
"together_ai/meta-llama/Meta-Llama-3-70B-Instruct-Turbo",
"together_ai/meta-llama/Llama-3.2-3B-Instruct-Turbo",
"together_ai/meta-llama/Meta-Llama-3-8B-Instruct-Lite",
"together_ai/meta-llama/Meta-Llama-3-70B-Instruct-Lite",
"together_ai/meta-llama/Llama-3-8b-chat-hf",
"together_ai/meta-llama/Llama-3-70b-chat-hf",
"together_ai/nvidia/Llama-3.1-Nemotron-70B-Instruct-HF",
"together_ai/Qwen/Qwen2.5-Coder-32B-Instruct",
"together_ai/Qwen/QwQ-32B-Preview",
"together_ai/microsoft/WizardLM-2-8x22B",
"together_ai/google/gemma-2-27b-it",
"together_ai/google/gemma-2-9b-it",
"together_ai/databricks/dbrx-instruct",
"together_ai/google/gemma-2b-it",
"together_ai/Gryphe/MythoMax-L2-13b",
"together_ai/meta-llama/Llama-2-13b-chat-hf",
"together_ai/mistralai/Mistral-Small-24B-Instruct-2501",
"together_ai/mistralai/Mistral-7B-Instruct-v0.1",
"together_ai/mistralai/Mistral-7B-Instruct-v0.2",
"together_ai/mistralai/Mistral-7B-Instruct-v0.3",
"together_ai/mistralai/Mixtral-8x7B-Instruct-v0.1",
"together_ai/mistralai/Mixtral-8x22B-Instruct-v0.1",
"together_ai/NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO",
"together_ai/Qwen/Qwen2.5-7B-Instruct-Turbo",
"together_ai/Qwen/Qwen2.5-72B-Instruct-Turbo",
"together_ai/Qwen/Qwen2-72B-Instruct",
"together_ai/Qwen/Qwen2-VL-72B-Instruct",
"together_ai/upstage/SOLAR-10.7B-Instruct-v1.0",
],
}

Expand Down
38 changes: 0 additions & 38 deletions agenta-cli/agenta/sdk/middleware/auth.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,44 +57,6 @@ def __init__(self, app: FastAPI):
self.resource_id = ag.DEFAULT_AGENTA_SINGLETON_INSTANCE.service_id

async def dispatch(self, request: Request, call_next: Callable):
# Extract request details
host = request.client.host if request.client else "unknown"
path = request.url.path
query = dict(request.query_params)
headers = dict(request.headers)

import logging
import json

# Log the request details
logging.error(
json.dumps(
{
"host": host,
"method": request.method,
"path": path,
"query_params": query,
"headers": headers,
},
indent=2,
ensure_ascii=False,
)
)

print(
json.dumps(
{
"host": host,
"method": request.method,
"path": path,
"query_params": query,
"headers": headers,
},
indent=2,
ensure_ascii=False,
)
)

try:
if request.url.path in _ALWAYS_ALLOW_LIST:
request.state.auth = {}
Expand Down
2 changes: 1 addition & 1 deletion agenta-cli/pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[tool.poetry]
name = "agenta"
version = "0.33.2"
version = "0.33.3"
description = "The SDK for agenta is an open-source LLMOps platform."
readme = "README.md"
authors = ["Mahmoud Mabrouk <[email protected]>"]
Expand Down
4 changes: 2 additions & 2 deletions agenta-web/package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion agenta-web/package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "agenta",
"version": "0.33.2",
"version": "0.33.3",
"private": true,
"engines": {
"node": ">=18"
Expand Down
64 changes: 57 additions & 7 deletions docker-compose.gh.yml
Original file line number Diff line number Diff line change
Expand Up @@ -191,20 +191,70 @@ services:
condition: service_healthy

completion:
container_name: agenta-completion-1
image: ghcr.io/agenta-ai/agenta-completion
extends:
file: ./services/completion/docker-compose.gh.yml
service: completion
environment:
#
- OPENAI_API_KEY=${OPENAI_API_KEY}
- MISTRAL_API_KEY=${MISTRAL_API_KEY}
- COHERE_API_KEY=${COHERE_API_KEY}
- ANTHROPIC_API_KEY=${ANTHROPIC_API_KEY}
- ANYSCALE_API_KEY=${ANYSCALE_API_KEY}
- PERPLEXITYAI_API_KEY=${PERPLEXITYAI_API_KEY}
- DEEPINFRA_API_KEY=${DEEPINFRA_API_KEY}
- TOGETHERAI_API_KEY=${TOGETHERAI_API_KEY}
- ALEPHALPHA_API_KEY=${ALEPHALPHA_API_KEY}
- OPENROUTER_API_KEY=${OPENROUTER_API_KEY}
- GROQ_API_KEY=${GROQ_API_KEY}
- GEMINI_API_KEY=${GEMINI_API_KEY}
#
- AGENTA_HOST=${DOMAIN_NAME:-http://host.docker.internal}:${AGENTA_PORT:-80}

extra_hosts:
- "host.docker.internal:host-gateway"
labels:
- "traefik.http.routers.completion.rule=PathPrefix(`/services/completion/`)"
- "traefik.http.routers.completion.entrypoints=web"
- "traefik.http.middlewares.completion-strip.stripprefix.prefixes=/services/completion"
- "traefik.http.middlewares.completion-strip.stripprefix.forceslash=true"
- "traefik.http.routers.completion.middlewares=completion-strip"
- "traefik.http.services.completion.loadbalancer.server.port=80"
- "traefik.http.routers.completion.service=completion"
networks:
- agenta-network
restart: always

chat:
container_name: agenta-chat-1
image: ghcr.io/agenta-ai/agenta-chat
extends:
file: ./services/chat/docker-compose.gh.yml
service: chat
environment:
#
- OPENAI_API_KEY=${OPENAI_API_KEY}
- MISTRAL_API_KEY=${MISTRAL_API_KEY}
- COHERE_API_KEY=${COHERE_API_KEY}
- ANTHROPIC_API_KEY=${ANTHROPIC_API_KEY}
- ANYSCALE_API_KEY=${ANYSCALE_API_KEY}
- PERPLEXITYAI_API_KEY=${PERPLEXITYAI_API_KEY}
- DEEPINFRA_API_KEY=${DEEPINFRA_API_KEY}
- TOGETHERAI_API_KEY=${TOGETHERAI_API_KEY}
- ALEPHALPHA_API_KEY=${ALEPHALPHA_API_KEY}
- OPENROUTER_API_KEY=${OPENROUTER_API_KEY}
- GROQ_API_KEY=${GROQ_API_KEY}
- GEMINI_API_KEY=${GEMINI_API_KEY}
#
- AGENTA_HOST=${DOMAIN_NAME:-http://host.docker.internal}:${AGENTA_PORT:-80}
extra_hosts:
- "host.docker.internal:host-gateway"
labels:
- "traefik.http.routers.chat.rule=PathPrefix(`/services/chat/`)"
- "traefik.http.routers.chat.entrypoints=web"
- "traefik.http.middlewares.chat-strip.stripprefix.prefixes=/services/chat"
- "traefik.http.middlewares.chat-strip.stripprefix.forceslash=true"
- "traefik.http.routers.chat.middlewares=chat-strip"
- "traefik.http.services.chat.loadbalancer.server.port=80"
- "traefik.http.routers.chat.service=chat"
networks:
- agenta-network
restart: always

networks:
agenta-network:
Expand Down
Loading

0 comments on commit 49331b4

Please sign in to comment.