From d9fe8c838d5e9b95126d34cb06b7e021ca9e5eb3 Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Fri, 29 Dec 2023 11:00:34 -0300 Subject: [PATCH] Refactor ChatOllamaEndpoint.py --- src/backend/langflow/components/llms/ChatOllamaEndpoint.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/src/backend/langflow/components/llms/ChatOllamaEndpoint.py b/src/backend/langflow/components/llms/ChatOllamaEndpoint.py index 94e9df695ee..3201ff432ab 100644 --- a/src/backend/langflow/components/llms/ChatOllamaEndpoint.py +++ b/src/backend/langflow/components/llms/ChatOllamaEndpoint.py @@ -1,13 +1,12 @@ -from typing import Optional, List, Dict, Any -from langchain.chat_models.base import BaseChatModel +from typing import Any, Dict, List, Optional # from langchain_community.chat_models import ChatOllama from langchain.chat_models import ChatOllama +from langchain.chat_models.base import BaseChatModel # from langchain.chat_models import ChatOllama from langflow import CustomComponent - # whe When a callback component is added to Langflow, the comment must be uncommented. # from langchain.callbacks.manager import CallbackManager @@ -246,7 +245,7 @@ def build( llm_params = {k: v for k, v in llm_params.items() if v is not None} try: - output = ChatOllama(**llm_params) + output = ChatOllama(**llm_params) # type: ignore except Exception as e: raise ValueError("Could not initialize Ollama LLM.") from e