diff --git a/src/ai-service/routers/LLM.py b/src/ai-service/routers/LLM.py index f418707a..0b729b85 100644 --- a/src/ai-service/routers/LLM.py +++ b/src/ai-service/routers/LLM.py @@ -11,7 +11,8 @@ def get_llm(): useLocalLLM: bool = False useAzureOpenAI: bool = False kernel = False - + endpoint: str = '' + if os.environ.get("USE_LOCAL_LLM"): useLocalLLM = os.environ.get("USE_LOCAL_LLM").lower() == "true" @@ -24,7 +25,7 @@ def get_llm(): # if useLocalLLM or useAzureOpenAI are set to true, get the endpoint from the environment variables if useLocalLLM or useAzureOpenAI: - endpoint: str = os.environ.get("AI_ENDPOINT") or os.environ.get("AZURE_OPENAI_ENDPOINT") + endpoint = os.environ.get("AI_ENDPOINT") or os.environ.get("AZURE_OPENAI_ENDPOINT") if isinstance(endpoint, str) == False or endpoint == "": raise Exception("AI_ENDPOINT or AZURE_OPENAI_ENDPOINT environment variable must be set when USE_LOCAL_LLM or USE_AZURE_OPENAI is set to true") @@ -67,4 +68,4 @@ def get_llm(): else: print("Authenticating to Azure OpenAI with OpenAI API key") kernel.add_chat_service("dv", AzureChatCompletion(deployment_name=deployment, endpoint=endpoint, api_key=api_key)) - return kernel, useLocalLLM, endpoint \ No newline at end of file + return kernel, useLocalLLM, endpoint