Skip to content

Commit

Permalink
fix: ability to specify external Ollama server (#389)
Browse files Browse the repository at this point in the history
Closes: #387
  • Loading branch information
grahamwhiteuk authored Feb 25, 2025
1 parent 5d538f4 commit 91cbbb9
Show file tree
Hide file tree
Showing 3 changed files with 24 additions and 2 deletions.
7 changes: 7 additions & 0 deletions python/.env.example
Original file line number Diff line number Diff line change
Expand Up @@ -17,3 +17,10 @@ BEEAI_LOG_LEVEL=INFO
# WATSONX_URL=your-watsonx-instance-base-url
# WATSONX_PROJECT_ID=your-watsonx-project-id
# WATSONX_APIKEY=your-watsonx-api-key

########################
### Ollama specific configuration
########################

# OLLAMA_BASE_URL=http://localhost:11434
# OLLAMA_CHAT_MODEL=llama3.1:8b
7 changes: 6 additions & 1 deletion python/beeai_framework/adapters/ollama/backend/chat.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,12 @@ def provider_id(self) -> ProviderName:
return "ollama"

def __init__(self, model_id: str | None = None, settings: dict | None = None) -> None:
_settings = settings.copy() if settings is not None else None

if _settings is not None and not hasattr(_settings, "base_url") and "OLLAMA_BASE_URL" in os.environ:
_settings["base_url"] = os.getenv("OLLAMA_BASE_URL")

super().__init__(
model_id if model_id else os.getenv("OLLAMA_CHAT_MODEL", "llama3.1:8b"),
settings={"base_url": "http://localhost:11434"} | (settings or {}),
settings={"base_url": "http://localhost:11434"} | (_settings or {}),
)
12 changes: 11 additions & 1 deletion python/tests/backend/test_chatmodel.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@


import asyncio
import os
from collections.abc import AsyncGenerator

import pytest
Expand Down Expand Up @@ -129,8 +130,17 @@ async def test_chat_model_abort(reverse_words_chat: ChatModel, chat_messages_lis

@pytest.mark.unit
def test_chat_model_from() -> None:
ollama_chat_model = ChatModel.from_name("ollama:llama3.1")
# Ollama with Llama model and base_url specified in code
os.environ.pop("OLLAMA_BASE_URL", None)
ollama_chat_model = ChatModel.from_name("ollama:llama3.1", {"base_url": "http://somewhere:12345"})
assert isinstance(ollama_chat_model, OllamaChatModel)
assert ollama_chat_model.settings["base_url"] == "http://somewhere:12345"

# Ollama with Granite model and base_url specified in env var
os.environ["OLLAMA_BASE_URL"] = "http://somewhere-else:12345"
ollama_chat_model = ChatModel.from_name("ollama:granite3.1-dense:8b")
assert isinstance(ollama_chat_model, OllamaChatModel)
assert ollama_chat_model.settings["base_url"] == "http://somewhere-else:12345"

watsonx_chat_model = ChatModel.from_name("watsonx:ibm/granite-3-8b-instruct")
assert isinstance(watsonx_chat_model, WatsonxChatModel)

0 comments on commit 91cbbb9

Please sign in to comment.