From 3623d9ac9cb08925f308fbb33fae01f93747bd1e Mon Sep 17 00:00:00 2001 From: Gabriel Luiz Freitas Almeida Date: Wed, 21 Aug 2024 22:35:59 -0300 Subject: [PATCH] chore(starter-projects): Update OpenAIModelComponent inputs and build_model method (#3489) --- .../Basic Prompting (Hello, World).json | 2 +- .../initial_setup/starter_projects/Blog Writer.json | 2 +- .../initial_setup/starter_projects/Complex Agent.json | 10 +++++----- .../initial_setup/starter_projects/Document QA.json | 2 +- .../starter_projects/Hierarchical Agent.json | 4 ++-- .../initial_setup/starter_projects/Memory Chatbot.json | 2 +- .../starter_projects/Sequential Agent.json | 2 +- .../starter_projects/Vector Store RAG.json | 6 +++--- 8 files changed, 15 insertions(+), 15 deletions(-) diff --git a/src/backend/base/langflow/initial_setup/starter_projects/Basic Prompting (Hello, World).json b/src/backend/base/langflow/initial_setup/starter_projects/Basic Prompting (Hello, World).json index 6794b116d2b..c79bdb1070d 100644 --- a/src/backend/base/langflow/initial_setup/starter_projects/Basic Prompting (Hello, World).json +++ b/src/backend/base/langflow/initial_setup/starter_projects/Basic Prompting (Hello, World).json @@ -707,7 +707,7 @@ "show": true, "title_case": false, "type": "code", - "value": "import operator\nfrom functools import reduce\n\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import OPENAI_MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.inputs import (\n BoolInput,\n DictInput,\n DropdownInput,\n FloatInput,\n IntInput,\n SecretStrInput,\n StrInput,\n)\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n name = \"OpenAIModel\"\n\n inputs = LCModelComponent._base_inputs + [\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n range_spec=RangeSpec(min=0, max=128000),\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n BoolInput(\n name=\"json_mode\",\n display_name=\"JSON Mode\",\n advanced=True,\n info=\"If True, it will output JSON regardless of passing a schema.\",\n ),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n advanced=False,\n options=OPENAI_MODEL_NAMES,\n value=OPENAI_MODEL_NAMES[0],\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n # self.output_schema is a list of dictionaries\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict) or self.json_mode\n seed = self.seed\n\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature or 0.1,\n seed=seed,\n )\n if json_mode:\n if output_schema_dict:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\") # type: ignore\n else:\n output = output.bind(response_format={\"type\": \"json_object\"}) # type: ignore\n\n return output # type: ignore\n\n def _get_exception_message(self, e: Exception):\n \"\"\"\n Get a message from an OpenAI exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n\n try:\n from openai import BadRequestError\n except ImportError:\n return\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\") # type: ignore\n if message:\n return message\n return\n" + "value": "import operator\nfrom functools import reduce\n\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import OPENAI_MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.inputs import (\n BoolInput,\n DictInput,\n DropdownInput,\n FloatInput,\n IntInput,\n SecretStrInput,\n StrInput,\n)\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n name = \"OpenAIModel\"\n\n inputs = LCModelComponent._base_inputs + [\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n range_spec=RangeSpec(min=0, max=128000),\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n BoolInput(\n name=\"json_mode\",\n display_name=\"JSON Mode\",\n advanced=True,\n info=\"If True, it will output JSON regardless of passing a schema.\",\n ),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n advanced=False,\n options=OPENAI_MODEL_NAMES,\n value=OPENAI_MODEL_NAMES[0],\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n # self.output_schema is a list of dictionaries\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict) or self.json_mode\n seed = self.seed\n\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature if temperature is not None else 0.1,\n seed=seed,\n )\n if json_mode:\n if output_schema_dict:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\") # type: ignore\n else:\n output = output.bind(response_format={\"type\": \"json_object\"}) # type: ignore\n\n return output # type: ignore\n\n def _get_exception_message(self, e: Exception):\n \"\"\"\n Get a message from an OpenAI exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n\n try:\n from openai import BadRequestError\n except ImportError:\n return\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\") # type: ignore\n if message:\n return message\n return\n" }, "input_value": { "advanced": false, diff --git a/src/backend/base/langflow/initial_setup/starter_projects/Blog Writer.json b/src/backend/base/langflow/initial_setup/starter_projects/Blog Writer.json index def5b5eb934..24651b62089 100644 --- a/src/backend/base/langflow/initial_setup/starter_projects/Blog Writer.json +++ b/src/backend/base/langflow/initial_setup/starter_projects/Blog Writer.json @@ -895,7 +895,7 @@ "show": true, "title_case": false, "type": "code", - "value": "import operator\nfrom functools import reduce\n\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import OPENAI_MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.inputs import (\n BoolInput,\n DictInput,\n DropdownInput,\n FloatInput,\n IntInput,\n SecretStrInput,\n StrInput,\n)\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n name = \"OpenAIModel\"\n\n inputs = LCModelComponent._base_inputs + [\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n range_spec=RangeSpec(min=0, max=128000),\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n BoolInput(\n name=\"json_mode\",\n display_name=\"JSON Mode\",\n advanced=True,\n info=\"If True, it will output JSON regardless of passing a schema.\",\n ),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n advanced=False,\n options=OPENAI_MODEL_NAMES,\n value=OPENAI_MODEL_NAMES[0],\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n # self.output_schema is a list of dictionaries\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict) or self.json_mode\n seed = self.seed\n\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature or 0.1,\n seed=seed,\n )\n if json_mode:\n if output_schema_dict:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\") # type: ignore\n else:\n output = output.bind(response_format={\"type\": \"json_object\"}) # type: ignore\n\n return output # type: ignore\n\n def _get_exception_message(self, e: Exception):\n \"\"\"\n Get a message from an OpenAI exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n\n try:\n from openai import BadRequestError\n except ImportError:\n return\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\") # type: ignore\n if message:\n return message\n return\n" + "value": "import operator\nfrom functools import reduce\n\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import OPENAI_MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.inputs import (\n BoolInput,\n DictInput,\n DropdownInput,\n FloatInput,\n IntInput,\n SecretStrInput,\n StrInput,\n)\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n name = \"OpenAIModel\"\n\n inputs = LCModelComponent._base_inputs + [\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n range_spec=RangeSpec(min=0, max=128000),\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n BoolInput(\n name=\"json_mode\",\n display_name=\"JSON Mode\",\n advanced=True,\n info=\"If True, it will output JSON regardless of passing a schema.\",\n ),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n advanced=False,\n options=OPENAI_MODEL_NAMES,\n value=OPENAI_MODEL_NAMES[0],\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n # self.output_schema is a list of dictionaries\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict) or self.json_mode\n seed = self.seed\n\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature if temperature is not None else 0.1,\n seed=seed,\n )\n if json_mode:\n if output_schema_dict:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\") # type: ignore\n else:\n output = output.bind(response_format={\"type\": \"json_object\"}) # type: ignore\n\n return output # type: ignore\n\n def _get_exception_message(self, e: Exception):\n \"\"\"\n Get a message from an OpenAI exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n\n try:\n from openai import BadRequestError\n except ImportError:\n return\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\") # type: ignore\n if message:\n return message\n return\n" }, "input_value": { "advanced": false, diff --git a/src/backend/base/langflow/initial_setup/starter_projects/Complex Agent.json b/src/backend/base/langflow/initial_setup/starter_projects/Complex Agent.json index 100013bf461..f946f5b8ac4 100644 --- a/src/backend/base/langflow/initial_setup/starter_projects/Complex Agent.json +++ b/src/backend/base/langflow/initial_setup/starter_projects/Complex Agent.json @@ -937,7 +937,7 @@ "show": true, "title_case": false, "type": "code", - "value": "import operator\nfrom functools import reduce\n\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import OPENAI_MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.inputs import (\n BoolInput,\n DictInput,\n DropdownInput,\n FloatInput,\n IntInput,\n SecretStrInput,\n StrInput,\n)\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n name = \"OpenAIModel\"\n\n inputs = LCModelComponent._base_inputs + [\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n range_spec=RangeSpec(min=0, max=128000),\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n BoolInput(\n name=\"json_mode\",\n display_name=\"JSON Mode\",\n advanced=True,\n info=\"If True, it will output JSON regardless of passing a schema.\",\n ),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n advanced=False,\n options=OPENAI_MODEL_NAMES,\n value=OPENAI_MODEL_NAMES[0],\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n # self.output_schema is a list of dictionaries\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict) or self.json_mode\n seed = self.seed\n\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature or 0.1,\n seed=seed,\n )\n if json_mode:\n if output_schema_dict:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\") # type: ignore\n else:\n output = output.bind(response_format={\"type\": \"json_object\"}) # type: ignore\n\n return output # type: ignore\n\n def _get_exception_message(self, e: Exception):\n \"\"\"\n Get a message from an OpenAI exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n\n try:\n from openai import BadRequestError\n except ImportError:\n return\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\") # type: ignore\n if message:\n return message\n return\n" + "value": "import operator\nfrom functools import reduce\n\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import OPENAI_MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.inputs import (\n BoolInput,\n DictInput,\n DropdownInput,\n FloatInput,\n IntInput,\n SecretStrInput,\n StrInput,\n)\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n name = \"OpenAIModel\"\n\n inputs = LCModelComponent._base_inputs + [\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n range_spec=RangeSpec(min=0, max=128000),\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n BoolInput(\n name=\"json_mode\",\n display_name=\"JSON Mode\",\n advanced=True,\n info=\"If True, it will output JSON regardless of passing a schema.\",\n ),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n advanced=False,\n options=OPENAI_MODEL_NAMES,\n value=OPENAI_MODEL_NAMES[0],\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n # self.output_schema is a list of dictionaries\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict) or self.json_mode\n seed = self.seed\n\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature if temperature is not None else 0.1,\n seed=seed,\n )\n if json_mode:\n if output_schema_dict:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\") # type: ignore\n else:\n output = output.bind(response_format={\"type\": \"json_object\"}) # type: ignore\n\n return output # type: ignore\n\n def _get_exception_message(self, e: Exception):\n \"\"\"\n Get a message from an OpenAI exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n\n try:\n from openai import BadRequestError\n except ImportError:\n return\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\") # type: ignore\n if message:\n return message\n return\n" }, "input_value": { "advanced": false, @@ -2064,7 +2064,7 @@ "show": true, "title_case": false, "type": "code", - "value": "import operator\nfrom functools import reduce\n\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import OPENAI_MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.inputs import (\n BoolInput,\n DictInput,\n DropdownInput,\n FloatInput,\n IntInput,\n SecretStrInput,\n StrInput,\n)\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n name = \"OpenAIModel\"\n\n inputs = LCModelComponent._base_inputs + [\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n range_spec=RangeSpec(min=0, max=128000),\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n BoolInput(\n name=\"json_mode\",\n display_name=\"JSON Mode\",\n advanced=True,\n info=\"If True, it will output JSON regardless of passing a schema.\",\n ),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n advanced=False,\n options=OPENAI_MODEL_NAMES,\n value=OPENAI_MODEL_NAMES[0],\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n # self.output_schema is a list of dictionaries\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict) or self.json_mode\n seed = self.seed\n\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature or 0.1,\n seed=seed,\n )\n if json_mode:\n if output_schema_dict:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\") # type: ignore\n else:\n output = output.bind(response_format={\"type\": \"json_object\"}) # type: ignore\n\n return output # type: ignore\n\n def _get_exception_message(self, e: Exception):\n \"\"\"\n Get a message from an OpenAI exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n\n try:\n from openai import BadRequestError\n except ImportError:\n return\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\") # type: ignore\n if message:\n return message\n return\n" + "value": "import operator\nfrom functools import reduce\n\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import OPENAI_MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.inputs import (\n BoolInput,\n DictInput,\n DropdownInput,\n FloatInput,\n IntInput,\n SecretStrInput,\n StrInput,\n)\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n name = \"OpenAIModel\"\n\n inputs = LCModelComponent._base_inputs + [\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n range_spec=RangeSpec(min=0, max=128000),\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n BoolInput(\n name=\"json_mode\",\n display_name=\"JSON Mode\",\n advanced=True,\n info=\"If True, it will output JSON regardless of passing a schema.\",\n ),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n advanced=False,\n options=OPENAI_MODEL_NAMES,\n value=OPENAI_MODEL_NAMES[0],\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n # self.output_schema is a list of dictionaries\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict) or self.json_mode\n seed = self.seed\n\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature if temperature is not None else 0.1,\n seed=seed,\n )\n if json_mode:\n if output_schema_dict:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\") # type: ignore\n else:\n output = output.bind(response_format={\"type\": \"json_object\"}) # type: ignore\n\n return output # type: ignore\n\n def _get_exception_message(self, e: Exception):\n \"\"\"\n Get a message from an OpenAI exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n\n try:\n from openai import BadRequestError\n except ImportError:\n return\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\") # type: ignore\n if message:\n return message\n return\n" }, "input_value": { "advanced": false, @@ -2764,7 +2764,7 @@ "show": true, "title_case": false, "type": "code", - "value": "import operator\nfrom functools import reduce\n\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import OPENAI_MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.inputs import (\n BoolInput,\n DictInput,\n DropdownInput,\n FloatInput,\n IntInput,\n SecretStrInput,\n StrInput,\n)\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n name = \"OpenAIModel\"\n\n inputs = LCModelComponent._base_inputs + [\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n range_spec=RangeSpec(min=0, max=128000),\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n BoolInput(\n name=\"json_mode\",\n display_name=\"JSON Mode\",\n advanced=True,\n info=\"If True, it will output JSON regardless of passing a schema.\",\n ),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n advanced=False,\n options=OPENAI_MODEL_NAMES,\n value=OPENAI_MODEL_NAMES[0],\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n # self.output_schema is a list of dictionaries\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict) or self.json_mode\n seed = self.seed\n\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature or 0.1,\n seed=seed,\n )\n if json_mode:\n if output_schema_dict:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\") # type: ignore\n else:\n output = output.bind(response_format={\"type\": \"json_object\"}) # type: ignore\n\n return output # type: ignore\n\n def _get_exception_message(self, e: Exception):\n \"\"\"\n Get a message from an OpenAI exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n\n try:\n from openai import BadRequestError\n except ImportError:\n return\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\") # type: ignore\n if message:\n return message\n return\n" + "value": "import operator\nfrom functools import reduce\n\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import OPENAI_MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.inputs import (\n BoolInput,\n DictInput,\n DropdownInput,\n FloatInput,\n IntInput,\n SecretStrInput,\n StrInput,\n)\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n name = \"OpenAIModel\"\n\n inputs = LCModelComponent._base_inputs + [\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n range_spec=RangeSpec(min=0, max=128000),\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n BoolInput(\n name=\"json_mode\",\n display_name=\"JSON Mode\",\n advanced=True,\n info=\"If True, it will output JSON regardless of passing a schema.\",\n ),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n advanced=False,\n options=OPENAI_MODEL_NAMES,\n value=OPENAI_MODEL_NAMES[0],\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n # self.output_schema is a list of dictionaries\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict) or self.json_mode\n seed = self.seed\n\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature if temperature is not None else 0.1,\n seed=seed,\n )\n if json_mode:\n if output_schema_dict:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\") # type: ignore\n else:\n output = output.bind(response_format={\"type\": \"json_object\"}) # type: ignore\n\n return output # type: ignore\n\n def _get_exception_message(self, e: Exception):\n \"\"\"\n Get a message from an OpenAI exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n\n try:\n from openai import BadRequestError\n except ImportError:\n return\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\") # type: ignore\n if message:\n return message\n return\n" }, "input_value": { "advanced": false, @@ -3184,7 +3184,7 @@ "show": true, "title_case": false, "type": "code", - "value": "import operator\nfrom functools import reduce\n\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import OPENAI_MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.inputs import (\n BoolInput,\n DictInput,\n DropdownInput,\n FloatInput,\n IntInput,\n SecretStrInput,\n StrInput,\n)\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n name = \"OpenAIModel\"\n\n inputs = LCModelComponent._base_inputs + [\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n range_spec=RangeSpec(min=0, max=128000),\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n BoolInput(\n name=\"json_mode\",\n display_name=\"JSON Mode\",\n advanced=True,\n info=\"If True, it will output JSON regardless of passing a schema.\",\n ),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n advanced=False,\n options=OPENAI_MODEL_NAMES,\n value=OPENAI_MODEL_NAMES[0],\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n # self.output_schema is a list of dictionaries\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict) or self.json_mode\n seed = self.seed\n\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature or 0.1,\n seed=seed,\n )\n if json_mode:\n if output_schema_dict:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\") # type: ignore\n else:\n output = output.bind(response_format={\"type\": \"json_object\"}) # type: ignore\n\n return output # type: ignore\n\n def _get_exception_message(self, e: Exception):\n \"\"\"\n Get a message from an OpenAI exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n\n try:\n from openai import BadRequestError\n except ImportError:\n return\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\") # type: ignore\n if message:\n return message\n return\n" + "value": "import operator\nfrom functools import reduce\n\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import OPENAI_MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.inputs import (\n BoolInput,\n DictInput,\n DropdownInput,\n FloatInput,\n IntInput,\n SecretStrInput,\n StrInput,\n)\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n name = \"OpenAIModel\"\n\n inputs = LCModelComponent._base_inputs + [\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n range_spec=RangeSpec(min=0, max=128000),\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n BoolInput(\n name=\"json_mode\",\n display_name=\"JSON Mode\",\n advanced=True,\n info=\"If True, it will output JSON regardless of passing a schema.\",\n ),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n advanced=False,\n options=OPENAI_MODEL_NAMES,\n value=OPENAI_MODEL_NAMES[0],\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n # self.output_schema is a list of dictionaries\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict) or self.json_mode\n seed = self.seed\n\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature if temperature is not None else 0.1,\n seed=seed,\n )\n if json_mode:\n if output_schema_dict:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\") # type: ignore\n else:\n output = output.bind(response_format={\"type\": \"json_object\"}) # type: ignore\n\n return output # type: ignore\n\n def _get_exception_message(self, e: Exception):\n \"\"\"\n Get a message from an OpenAI exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n\n try:\n from openai import BadRequestError\n except ImportError:\n return\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\") # type: ignore\n if message:\n return message\n return\n" }, "input_value": { "advanced": false, @@ -3629,7 +3629,7 @@ "show": true, "title_case": false, "type": "code", - "value": "import operator\nfrom functools import reduce\n\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import OPENAI_MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.inputs import (\n BoolInput,\n DictInput,\n DropdownInput,\n FloatInput,\n IntInput,\n SecretStrInput,\n StrInput,\n)\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n name = \"OpenAIModel\"\n\n inputs = LCModelComponent._base_inputs + [\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n range_spec=RangeSpec(min=0, max=128000),\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n BoolInput(\n name=\"json_mode\",\n display_name=\"JSON Mode\",\n advanced=True,\n info=\"If True, it will output JSON regardless of passing a schema.\",\n ),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n advanced=False,\n options=OPENAI_MODEL_NAMES,\n value=OPENAI_MODEL_NAMES[0],\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n # self.output_schema is a list of dictionaries\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict) or self.json_mode\n seed = self.seed\n\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature or 0.1,\n seed=seed,\n )\n if json_mode:\n if output_schema_dict:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\") # type: ignore\n else:\n output = output.bind(response_format={\"type\": \"json_object\"}) # type: ignore\n\n return output # type: ignore\n\n def _get_exception_message(self, e: Exception):\n \"\"\"\n Get a message from an OpenAI exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n\n try:\n from openai import BadRequestError\n except ImportError:\n return\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\") # type: ignore\n if message:\n return message\n return\n" + "value": "import operator\nfrom functools import reduce\n\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import OPENAI_MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.inputs import (\n BoolInput,\n DictInput,\n DropdownInput,\n FloatInput,\n IntInput,\n SecretStrInput,\n StrInput,\n)\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n name = \"OpenAIModel\"\n\n inputs = LCModelComponent._base_inputs + [\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n range_spec=RangeSpec(min=0, max=128000),\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n BoolInput(\n name=\"json_mode\",\n display_name=\"JSON Mode\",\n advanced=True,\n info=\"If True, it will output JSON regardless of passing a schema.\",\n ),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n advanced=False,\n options=OPENAI_MODEL_NAMES,\n value=OPENAI_MODEL_NAMES[0],\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n # self.output_schema is a list of dictionaries\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict) or self.json_mode\n seed = self.seed\n\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature if temperature is not None else 0.1,\n seed=seed,\n )\n if json_mode:\n if output_schema_dict:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\") # type: ignore\n else:\n output = output.bind(response_format={\"type\": \"json_object\"}) # type: ignore\n\n return output # type: ignore\n\n def _get_exception_message(self, e: Exception):\n \"\"\"\n Get a message from an OpenAI exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n\n try:\n from openai import BadRequestError\n except ImportError:\n return\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\") # type: ignore\n if message:\n return message\n return\n" }, "input_value": { "advanced": false, diff --git a/src/backend/base/langflow/initial_setup/starter_projects/Document QA.json b/src/backend/base/langflow/initial_setup/starter_projects/Document QA.json index 5b491ca631f..5a8fe5dabb0 100644 --- a/src/backend/base/langflow/initial_setup/starter_projects/Document QA.json +++ b/src/backend/base/langflow/initial_setup/starter_projects/Document QA.json @@ -785,7 +785,7 @@ "show": true, "title_case": false, "type": "code", - "value": "import operator\nfrom functools import reduce\n\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import OPENAI_MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.inputs import (\n BoolInput,\n DictInput,\n DropdownInput,\n FloatInput,\n IntInput,\n SecretStrInput,\n StrInput,\n)\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n name = \"OpenAIModel\"\n\n inputs = LCModelComponent._base_inputs + [\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n range_spec=RangeSpec(min=0, max=128000),\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n BoolInput(\n name=\"json_mode\",\n display_name=\"JSON Mode\",\n advanced=True,\n info=\"If True, it will output JSON regardless of passing a schema.\",\n ),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n advanced=False,\n options=OPENAI_MODEL_NAMES,\n value=OPENAI_MODEL_NAMES[0],\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n # self.output_schema is a list of dictionaries\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict) or self.json_mode\n seed = self.seed\n\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature or 0.1,\n seed=seed,\n )\n if json_mode:\n if output_schema_dict:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\") # type: ignore\n else:\n output = output.bind(response_format={\"type\": \"json_object\"}) # type: ignore\n\n return output # type: ignore\n\n def _get_exception_message(self, e: Exception):\n \"\"\"\n Get a message from an OpenAI exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n\n try:\n from openai import BadRequestError\n except ImportError:\n return\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\") # type: ignore\n if message:\n return message\n return\n" + "value": "import operator\nfrom functools import reduce\n\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import OPENAI_MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.inputs import (\n BoolInput,\n DictInput,\n DropdownInput,\n FloatInput,\n IntInput,\n SecretStrInput,\n StrInput,\n)\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n name = \"OpenAIModel\"\n\n inputs = LCModelComponent._base_inputs + [\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n range_spec=RangeSpec(min=0, max=128000),\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n BoolInput(\n name=\"json_mode\",\n display_name=\"JSON Mode\",\n advanced=True,\n info=\"If True, it will output JSON regardless of passing a schema.\",\n ),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n advanced=False,\n options=OPENAI_MODEL_NAMES,\n value=OPENAI_MODEL_NAMES[0],\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n # self.output_schema is a list of dictionaries\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict) or self.json_mode\n seed = self.seed\n\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature if temperature is not None else 0.1,\n seed=seed,\n )\n if json_mode:\n if output_schema_dict:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\") # type: ignore\n else:\n output = output.bind(response_format={\"type\": \"json_object\"}) # type: ignore\n\n return output # type: ignore\n\n def _get_exception_message(self, e: Exception):\n \"\"\"\n Get a message from an OpenAI exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n\n try:\n from openai import BadRequestError\n except ImportError:\n return\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\") # type: ignore\n if message:\n return message\n return\n" }, "input_value": { "advanced": false, diff --git a/src/backend/base/langflow/initial_setup/starter_projects/Hierarchical Agent.json b/src/backend/base/langflow/initial_setup/starter_projects/Hierarchical Agent.json index b23483bafdc..ce0a3442a36 100644 --- a/src/backend/base/langflow/initial_setup/starter_projects/Hierarchical Agent.json +++ b/src/backend/base/langflow/initial_setup/starter_projects/Hierarchical Agent.json @@ -636,7 +636,7 @@ "show": true, "title_case": false, "type": "code", - "value": "import operator\nfrom functools import reduce\n\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import OPENAI_MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.inputs import (\n BoolInput,\n DictInput,\n DropdownInput,\n FloatInput,\n IntInput,\n SecretStrInput,\n StrInput,\n)\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n name = \"OpenAIModel\"\n\n inputs = LCModelComponent._base_inputs + [\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n range_spec=RangeSpec(min=0, max=128000),\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n BoolInput(\n name=\"json_mode\",\n display_name=\"JSON Mode\",\n advanced=True,\n info=\"If True, it will output JSON regardless of passing a schema.\",\n ),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n advanced=False,\n options=OPENAI_MODEL_NAMES,\n value=OPENAI_MODEL_NAMES[0],\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n # self.output_schema is a list of dictionaries\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict) or self.json_mode\n seed = self.seed\n\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature or 0.1,\n seed=seed,\n )\n if json_mode:\n if output_schema_dict:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\") # type: ignore\n else:\n output = output.bind(response_format={\"type\": \"json_object\"}) # type: ignore\n\n return output # type: ignore\n\n def _get_exception_message(self, e: Exception):\n \"\"\"\n Get a message from an OpenAI exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n\n try:\n from openai import BadRequestError\n except ImportError:\n return\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\") # type: ignore\n if message:\n return message\n return\n" + "value": "import operator\nfrom functools import reduce\n\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import OPENAI_MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.inputs import (\n BoolInput,\n DictInput,\n DropdownInput,\n FloatInput,\n IntInput,\n SecretStrInput,\n StrInput,\n)\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n name = \"OpenAIModel\"\n\n inputs = LCModelComponent._base_inputs + [\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n range_spec=RangeSpec(min=0, max=128000),\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n BoolInput(\n name=\"json_mode\",\n display_name=\"JSON Mode\",\n advanced=True,\n info=\"If True, it will output JSON regardless of passing a schema.\",\n ),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n advanced=False,\n options=OPENAI_MODEL_NAMES,\n value=OPENAI_MODEL_NAMES[0],\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n # self.output_schema is a list of dictionaries\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict) or self.json_mode\n seed = self.seed\n\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature if temperature is not None else 0.1,\n seed=seed,\n )\n if json_mode:\n if output_schema_dict:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\") # type: ignore\n else:\n output = output.bind(response_format={\"type\": \"json_object\"}) # type: ignore\n\n return output # type: ignore\n\n def _get_exception_message(self, e: Exception):\n \"\"\"\n Get a message from an OpenAI exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n\n try:\n from openai import BadRequestError\n except ImportError:\n return\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\") # type: ignore\n if message:\n return message\n return\n" }, "input_value": { "advanced": false, @@ -1783,7 +1783,7 @@ "show": true, "title_case": false, "type": "code", - "value": "import operator\nfrom functools import reduce\n\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import OPENAI_MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.inputs import (\n BoolInput,\n DictInput,\n DropdownInput,\n FloatInput,\n IntInput,\n SecretStrInput,\n StrInput,\n)\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n name = \"OpenAIModel\"\n\n inputs = LCModelComponent._base_inputs + [\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n range_spec=RangeSpec(min=0, max=128000),\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n BoolInput(\n name=\"json_mode\",\n display_name=\"JSON Mode\",\n advanced=True,\n info=\"If True, it will output JSON regardless of passing a schema.\",\n ),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n advanced=False,\n options=OPENAI_MODEL_NAMES,\n value=OPENAI_MODEL_NAMES[0],\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n # self.output_schema is a list of dictionaries\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict) or self.json_mode\n seed = self.seed\n\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature or 0.1,\n seed=seed,\n )\n if json_mode:\n if output_schema_dict:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\") # type: ignore\n else:\n output = output.bind(response_format={\"type\": \"json_object\"}) # type: ignore\n\n return output # type: ignore\n\n def _get_exception_message(self, e: Exception):\n \"\"\"\n Get a message from an OpenAI exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n\n try:\n from openai import BadRequestError\n except ImportError:\n return\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\") # type: ignore\n if message:\n return message\n return\n" + "value": "import operator\nfrom functools import reduce\n\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import OPENAI_MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.inputs import (\n BoolInput,\n DictInput,\n DropdownInput,\n FloatInput,\n IntInput,\n SecretStrInput,\n StrInput,\n)\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n name = \"OpenAIModel\"\n\n inputs = LCModelComponent._base_inputs + [\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n range_spec=RangeSpec(min=0, max=128000),\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n BoolInput(\n name=\"json_mode\",\n display_name=\"JSON Mode\",\n advanced=True,\n info=\"If True, it will output JSON regardless of passing a schema.\",\n ),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n advanced=False,\n options=OPENAI_MODEL_NAMES,\n value=OPENAI_MODEL_NAMES[0],\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n # self.output_schema is a list of dictionaries\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict) or self.json_mode\n seed = self.seed\n\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature if temperature is not None else 0.1,\n seed=seed,\n )\n if json_mode:\n if output_schema_dict:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\") # type: ignore\n else:\n output = output.bind(response_format={\"type\": \"json_object\"}) # type: ignore\n\n return output # type: ignore\n\n def _get_exception_message(self, e: Exception):\n \"\"\"\n Get a message from an OpenAI exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n\n try:\n from openai import BadRequestError\n except ImportError:\n return\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\") # type: ignore\n if message:\n return message\n return\n" }, "input_value": { "advanced": false, diff --git a/src/backend/base/langflow/initial_setup/starter_projects/Memory Chatbot.json b/src/backend/base/langflow/initial_setup/starter_projects/Memory Chatbot.json index 05b1bf5768e..089b84c536e 100644 --- a/src/backend/base/langflow/initial_setup/starter_projects/Memory Chatbot.json +++ b/src/backend/base/langflow/initial_setup/starter_projects/Memory Chatbot.json @@ -565,7 +565,7 @@ "show": true, "title_case": false, "type": "code", - "value": "import operator\nfrom functools import reduce\n\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import OPENAI_MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.inputs import (\n BoolInput,\n DictInput,\n DropdownInput,\n FloatInput,\n IntInput,\n SecretStrInput,\n StrInput,\n)\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n name = \"OpenAIModel\"\n\n inputs = LCModelComponent._base_inputs + [\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n range_spec=RangeSpec(min=0, max=128000),\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n BoolInput(\n name=\"json_mode\",\n display_name=\"JSON Mode\",\n advanced=True,\n info=\"If True, it will output JSON regardless of passing a schema.\",\n ),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n advanced=False,\n options=OPENAI_MODEL_NAMES,\n value=OPENAI_MODEL_NAMES[0],\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n # self.output_schema is a list of dictionaries\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict) or self.json_mode\n seed = self.seed\n\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature or 0.1,\n seed=seed,\n )\n if json_mode:\n if output_schema_dict:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\") # type: ignore\n else:\n output = output.bind(response_format={\"type\": \"json_object\"}) # type: ignore\n\n return output # type: ignore\n\n def _get_exception_message(self, e: Exception):\n \"\"\"\n Get a message from an OpenAI exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n\n try:\n from openai import BadRequestError\n except ImportError:\n return\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\") # type: ignore\n if message:\n return message\n return\n" + "value": "import operator\nfrom functools import reduce\n\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import OPENAI_MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.inputs import (\n BoolInput,\n DictInput,\n DropdownInput,\n FloatInput,\n IntInput,\n SecretStrInput,\n StrInput,\n)\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n name = \"OpenAIModel\"\n\n inputs = LCModelComponent._base_inputs + [\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n range_spec=RangeSpec(min=0, max=128000),\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n BoolInput(\n name=\"json_mode\",\n display_name=\"JSON Mode\",\n advanced=True,\n info=\"If True, it will output JSON regardless of passing a schema.\",\n ),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n advanced=False,\n options=OPENAI_MODEL_NAMES,\n value=OPENAI_MODEL_NAMES[0],\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n # self.output_schema is a list of dictionaries\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict) or self.json_mode\n seed = self.seed\n\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature if temperature is not None else 0.1,\n seed=seed,\n )\n if json_mode:\n if output_schema_dict:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\") # type: ignore\n else:\n output = output.bind(response_format={\"type\": \"json_object\"}) # type: ignore\n\n return output # type: ignore\n\n def _get_exception_message(self, e: Exception):\n \"\"\"\n Get a message from an OpenAI exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n\n try:\n from openai import BadRequestError\n except ImportError:\n return\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\") # type: ignore\n if message:\n return message\n return\n" }, "input_value": { "advanced": false, diff --git a/src/backend/base/langflow/initial_setup/starter_projects/Sequential Agent.json b/src/backend/base/langflow/initial_setup/starter_projects/Sequential Agent.json index 4ed68a30616..258af3ae731 100644 --- a/src/backend/base/langflow/initial_setup/starter_projects/Sequential Agent.json +++ b/src/backend/base/langflow/initial_setup/starter_projects/Sequential Agent.json @@ -650,7 +650,7 @@ "show": true, "title_case": false, "type": "code", - "value": "import operator\nfrom functools import reduce\n\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import OPENAI_MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.inputs import (\n BoolInput,\n DictInput,\n DropdownInput,\n FloatInput,\n IntInput,\n SecretStrInput,\n StrInput,\n)\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n name = \"OpenAIModel\"\n\n inputs = LCModelComponent._base_inputs + [\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n range_spec=RangeSpec(min=0, max=128000),\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n BoolInput(\n name=\"json_mode\",\n display_name=\"JSON Mode\",\n advanced=True,\n info=\"If True, it will output JSON regardless of passing a schema.\",\n ),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n advanced=False,\n options=OPENAI_MODEL_NAMES,\n value=OPENAI_MODEL_NAMES[0],\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n # self.output_schema is a list of dictionaries\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict) or self.json_mode\n seed = self.seed\n\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature or 0.1,\n seed=seed,\n )\n if json_mode:\n if output_schema_dict:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\") # type: ignore\n else:\n output = output.bind(response_format={\"type\": \"json_object\"}) # type: ignore\n\n return output # type: ignore\n\n def _get_exception_message(self, e: Exception):\n \"\"\"\n Get a message from an OpenAI exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n\n try:\n from openai import BadRequestError\n except ImportError:\n return\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\") # type: ignore\n if message:\n return message\n return\n" + "value": "import operator\nfrom functools import reduce\n\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import OPENAI_MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.inputs import (\n BoolInput,\n DictInput,\n DropdownInput,\n FloatInput,\n IntInput,\n SecretStrInput,\n StrInput,\n)\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n name = \"OpenAIModel\"\n\n inputs = LCModelComponent._base_inputs + [\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n range_spec=RangeSpec(min=0, max=128000),\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n BoolInput(\n name=\"json_mode\",\n display_name=\"JSON Mode\",\n advanced=True,\n info=\"If True, it will output JSON regardless of passing a schema.\",\n ),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n advanced=False,\n options=OPENAI_MODEL_NAMES,\n value=OPENAI_MODEL_NAMES[0],\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n # self.output_schema is a list of dictionaries\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict) or self.json_mode\n seed = self.seed\n\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature if temperature is not None else 0.1,\n seed=seed,\n )\n if json_mode:\n if output_schema_dict:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\") # type: ignore\n else:\n output = output.bind(response_format={\"type\": \"json_object\"}) # type: ignore\n\n return output # type: ignore\n\n def _get_exception_message(self, e: Exception):\n \"\"\"\n Get a message from an OpenAI exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n\n try:\n from openai import BadRequestError\n except ImportError:\n return\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\") # type: ignore\n if message:\n return message\n return\n" }, "input_value": { "advanced": false, diff --git a/src/backend/base/langflow/initial_setup/starter_projects/Vector Store RAG.json b/src/backend/base/langflow/initial_setup/starter_projects/Vector Store RAG.json index 6378998e19c..c62b2492c7c 100644 --- a/src/backend/base/langflow/initial_setup/starter_projects/Vector Store RAG.json +++ b/src/backend/base/langflow/initial_setup/starter_projects/Vector Store RAG.json @@ -658,7 +658,7 @@ "show": true, "title_case": false, "type": "code", - "value": "from langchain_core.vectorstores import VectorStore\nfrom loguru import logger\n\nfrom langflow.base.vectorstores.model import LCVectorStoreComponent\nfrom langflow.helpers import docs_to_data\nfrom langflow.inputs import DictInput, FloatInput\nfrom langflow.io import (\n BoolInput,\n DataInput,\n DropdownInput,\n HandleInput,\n IntInput,\n MultilineInput,\n SecretStrInput,\n StrInput,\n)\nfrom langflow.schema import Data\n\n\nclass AstraVectorStoreComponent(LCVectorStoreComponent):\n display_name: str = \"Astra DB\"\n description: str = \"Implementation of Vector Store using Astra DB with search capabilities\"\n documentation: str = \"https://python.langchain.com/docs/integrations/vectorstores/astradb\"\n name = \"AstraDB\"\n icon: str = \"AstraDB\"\n\n _cached_vectorstore: VectorStore | None = None\n\n inputs = [\n StrInput(\n name=\"collection_name\",\n display_name=\"Collection Name\",\n info=\"The name of the collection within Astra DB where the vectors will be stored.\",\n required=True,\n ),\n SecretStrInput(\n name=\"token\",\n display_name=\"Astra DB Application Token\",\n info=\"Authentication token for accessing Astra DB.\",\n value=\"ASTRA_DB_APPLICATION_TOKEN\",\n required=True,\n ),\n SecretStrInput(\n name=\"api_endpoint\",\n display_name=\"API Endpoint\",\n info=\"API endpoint URL for the Astra DB service.\",\n value=\"ASTRA_DB_API_ENDPOINT\",\n required=True,\n ),\n MultilineInput(\n name=\"search_input\",\n display_name=\"Search Input\",\n ),\n DataInput(\n name=\"ingest_data\",\n display_name=\"Ingest Data\",\n is_list=True,\n ),\n StrInput(\n name=\"namespace\",\n display_name=\"Namespace\",\n info=\"Optional namespace within Astra DB to use for the collection.\",\n advanced=True,\n ),\n DropdownInput(\n name=\"metric\",\n display_name=\"Metric\",\n info=\"Optional distance metric for vector comparisons in the vector store.\",\n options=[\"cosine\", \"dot_product\", \"euclidean\"],\n advanced=True,\n ),\n IntInput(\n name=\"batch_size\",\n display_name=\"Batch Size\",\n info=\"Optional number of data to process in a single batch.\",\n advanced=True,\n ),\n IntInput(\n name=\"bulk_insert_batch_concurrency\",\n display_name=\"Bulk Insert Batch Concurrency\",\n info=\"Optional concurrency level for bulk insert operations.\",\n advanced=True,\n ),\n IntInput(\n name=\"bulk_insert_overwrite_concurrency\",\n display_name=\"Bulk Insert Overwrite Concurrency\",\n info=\"Optional concurrency level for bulk insert operations that overwrite existing data.\",\n advanced=True,\n ),\n IntInput(\n name=\"bulk_delete_concurrency\",\n display_name=\"Bulk Delete Concurrency\",\n info=\"Optional concurrency level for bulk delete operations.\",\n advanced=True,\n ),\n DropdownInput(\n name=\"setup_mode\",\n display_name=\"Setup Mode\",\n info=\"Configuration mode for setting up the vector store, with options like 'Sync', 'Async', or 'Off'.\",\n options=[\"Sync\", \"Async\", \"Off\"],\n advanced=True,\n value=\"Sync\",\n ),\n BoolInput(\n name=\"pre_delete_collection\",\n display_name=\"Pre Delete Collection\",\n info=\"Boolean flag to determine whether to delete the collection before creating a new one.\",\n advanced=True,\n ),\n StrInput(\n name=\"metadata_indexing_include\",\n display_name=\"Metadata Indexing Include\",\n info=\"Optional list of metadata fields to include in the indexing.\",\n advanced=True,\n ),\n HandleInput(\n name=\"embedding\",\n display_name=\"Embedding or Astra Vectorize\",\n input_types=[\"Embeddings\", \"dict\"],\n info=\"Allows either an embedding model or an Astra Vectorize configuration.\", # TODO: This should be optional, but need to refactor langchain-astradb first.\n ),\n StrInput(\n name=\"metadata_indexing_exclude\",\n display_name=\"Metadata Indexing Exclude\",\n info=\"Optional list of metadata fields to exclude from the indexing.\",\n advanced=True,\n ),\n StrInput(\n name=\"collection_indexing_policy\",\n display_name=\"Collection Indexing Policy\",\n info=\"Optional dictionary defining the indexing policy for the collection.\",\n advanced=True,\n ),\n IntInput(\n name=\"number_of_results\",\n display_name=\"Number of Results\",\n info=\"Number of results to return.\",\n advanced=True,\n value=4,\n ),\n DropdownInput(\n name=\"search_type\",\n display_name=\"Search Type\",\n info=\"Search type to use\",\n options=[\"Similarity\", \"Similarity with score threshold\", \"MMR (Max Marginal Relevance)\"],\n value=\"Similarity\",\n advanced=True,\n ),\n FloatInput(\n name=\"search_score_threshold\",\n display_name=\"Search Score Threshold\",\n info=\"Minimum similarity score threshold for search results. (when using 'Similarity with score threshold')\",\n value=0,\n advanced=True,\n ),\n DictInput(\n name=\"search_filter\",\n display_name=\"Search Metadata Filter\",\n info=\"Optional dictionary of filters to apply to the search query.\",\n advanced=True,\n is_list=True,\n ),\n ]\n\n def _build_vector_store(self):\n # cache the vector store to avoid re-initializing and ingest data again\n if self._cached_vectorstore:\n return self._cached_vectorstore\n\n try:\n from langchain_astradb import AstraDBVectorStore\n from langchain_astradb.utils.astradb import SetupMode\n except ImportError:\n raise ImportError(\n \"Could not import langchain Astra DB integration package. \"\n \"Please install it with `pip install langchain-astradb`.\"\n )\n\n try:\n if not self.setup_mode:\n self.setup_mode = self._inputs[\"setup_mode\"].options[0]\n\n setup_mode_value = SetupMode[self.setup_mode.upper()]\n except KeyError:\n raise ValueError(f\"Invalid setup mode: {self.setup_mode}\")\n\n if not isinstance(self.embedding, dict):\n embedding_dict = {\"embedding\": self.embedding}\n else:\n from astrapy.info import CollectionVectorServiceOptions\n\n dict_options = self.embedding.get(\"collection_vector_service_options\", {})\n dict_options[\"authentication\"] = {\n k: v for k, v in dict_options.get(\"authentication\", {}).items() if k and v\n }\n dict_options[\"parameters\"] = {k: v for k, v in dict_options.get(\"parameters\", {}).items() if k and v}\n embedding_dict = {\n \"collection_vector_service_options\": CollectionVectorServiceOptions.from_dict(dict_options)\n }\n collection_embedding_api_key = self.embedding.get(\"collection_embedding_api_key\")\n if collection_embedding_api_key:\n embedding_dict[\"collection_embedding_api_key\"] = collection_embedding_api_key\n\n vector_store_kwargs = {\n **embedding_dict,\n \"collection_name\": self.collection_name,\n \"token\": self.token,\n \"api_endpoint\": self.api_endpoint,\n \"namespace\": self.namespace or None,\n \"metric\": self.metric or None,\n \"batch_size\": self.batch_size or None,\n \"bulk_insert_batch_concurrency\": self.bulk_insert_batch_concurrency or None,\n \"bulk_insert_overwrite_concurrency\": self.bulk_insert_overwrite_concurrency or None,\n \"bulk_delete_concurrency\": self.bulk_delete_concurrency or None,\n \"setup_mode\": setup_mode_value,\n \"pre_delete_collection\": self.pre_delete_collection or False,\n }\n\n if self.metadata_indexing_include:\n vector_store_kwargs[\"metadata_indexing_include\"] = self.metadata_indexing_include\n elif self.metadata_indexing_exclude:\n vector_store_kwargs[\"metadata_indexing_exclude\"] = self.metadata_indexing_exclude\n elif self.collection_indexing_policy:\n vector_store_kwargs[\"collection_indexing_policy\"] = self.collection_indexing_policy\n\n try:\n vector_store = AstraDBVectorStore(**vector_store_kwargs)\n except Exception as e:\n raise ValueError(f\"Error initializing AstraDBVectorStore: {str(e)}\") from e\n\n self._add_documents_to_vector_store(vector_store)\n\n self._cached_vectorstore = vector_store\n\n return vector_store\n\n def _add_documents_to_vector_store(self, vector_store):\n documents = []\n for _input in self.ingest_data or []:\n if isinstance(_input, Data):\n documents.append(_input.to_lc_document())\n else:\n raise ValueError(\"Vector Store Inputs must be Data objects.\")\n\n if documents:\n logger.debug(f\"Adding {len(documents)} documents to the Vector Store.\")\n try:\n vector_store.add_documents(documents)\n except Exception as e:\n raise ValueError(f\"Error adding documents to AstraDBVectorStore: {str(e)}\") from e\n else:\n logger.debug(\"No documents to add to the Vector Store.\")\n\n def _map_search_type(self):\n if self.search_type == \"Similarity with score threshold\":\n return \"similarity_score_threshold\"\n elif self.search_type == \"MMR (Max Marginal Relevance)\":\n return \"mmr\"\n else:\n return \"similarity\"\n\n def _build_search_args(self):\n args = {\n \"k\": self.number_of_results,\n \"score_threshold\": self.search_score_threshold,\n }\n\n if self.search_filter:\n clean_filter = {k: v for k, v in self.search_filter.items() if k and v}\n if len(clean_filter) > 0:\n args[\"filter\"] = clean_filter\n return args\n\n def search_documents(self) -> list[Data]:\n vector_store = self._build_vector_store()\n\n logger.debug(f\"Search input: {self.search_input}\")\n logger.debug(f\"Search type: {self.search_type}\")\n logger.debug(f\"Number of results: {self.number_of_results}\")\n\n if self.search_input and isinstance(self.search_input, str) and self.search_input.strip():\n try:\n search_type = self._map_search_type()\n search_args = self._build_search_args()\n\n docs = vector_store.search(query=self.search_input, search_type=search_type, **search_args)\n except Exception as e:\n raise ValueError(f\"Error performing search in AstraDBVectorStore: {str(e)}\") from e\n\n logger.debug(f\"Retrieved documents: {len(docs)}\")\n\n data = docs_to_data(docs)\n logger.debug(f\"Converted documents to data: {len(data)}\")\n self.status = data\n return data\n else:\n logger.debug(\"No search input provided. Skipping search.\")\n return []\n\n def get_retriever_kwargs(self):\n search_args = self._build_search_args()\n return {\n \"search_type\": self._map_search_type(),\n \"search_kwargs\": search_args,\n }\n\n def build_vector_store(self):\n vector_store = self._build_vector_store()\n return vector_store\n" + "value": "from loguru import logger\n\nfrom langflow.base.vectorstores.model import LCVectorStoreComponent, check_cached_vector_store\nfrom langflow.helpers import docs_to_data\nfrom langflow.inputs import DictInput, FloatInput\nfrom langflow.io import (\n BoolInput,\n DataInput,\n DropdownInput,\n HandleInput,\n IntInput,\n MultilineInput,\n SecretStrInput,\n StrInput,\n)\nfrom langflow.schema import Data\n\n\nclass AstraVectorStoreComponent(LCVectorStoreComponent):\n display_name: str = \"Astra DB\"\n description: str = \"Implementation of Vector Store using Astra DB with search capabilities\"\n documentation: str = \"https://python.langchain.com/docs/integrations/vectorstores/astradb\"\n name = \"AstraDB\"\n icon: str = \"AstraDB\"\n\n inputs = [\n StrInput(\n name=\"collection_name\",\n display_name=\"Collection Name\",\n info=\"The name of the collection within Astra DB where the vectors will be stored.\",\n required=True,\n ),\n SecretStrInput(\n name=\"token\",\n display_name=\"Astra DB Application Token\",\n info=\"Authentication token for accessing Astra DB.\",\n value=\"ASTRA_DB_APPLICATION_TOKEN\",\n required=True,\n ),\n SecretStrInput(\n name=\"api_endpoint\",\n display_name=\"API Endpoint\",\n info=\"API endpoint URL for the Astra DB service.\",\n value=\"ASTRA_DB_API_ENDPOINT\",\n required=True,\n ),\n MultilineInput(\n name=\"search_input\",\n display_name=\"Search Input\",\n ),\n DataInput(\n name=\"ingest_data\",\n display_name=\"Ingest Data\",\n is_list=True,\n ),\n StrInput(\n name=\"namespace\",\n display_name=\"Namespace\",\n info=\"Optional namespace within Astra DB to use for the collection.\",\n advanced=True,\n ),\n DropdownInput(\n name=\"metric\",\n display_name=\"Metric\",\n info=\"Optional distance metric for vector comparisons in the vector store.\",\n options=[\"cosine\", \"dot_product\", \"euclidean\"],\n advanced=True,\n ),\n IntInput(\n name=\"batch_size\",\n display_name=\"Batch Size\",\n info=\"Optional number of data to process in a single batch.\",\n advanced=True,\n ),\n IntInput(\n name=\"bulk_insert_batch_concurrency\",\n display_name=\"Bulk Insert Batch Concurrency\",\n info=\"Optional concurrency level for bulk insert operations.\",\n advanced=True,\n ),\n IntInput(\n name=\"bulk_insert_overwrite_concurrency\",\n display_name=\"Bulk Insert Overwrite Concurrency\",\n info=\"Optional concurrency level for bulk insert operations that overwrite existing data.\",\n advanced=True,\n ),\n IntInput(\n name=\"bulk_delete_concurrency\",\n display_name=\"Bulk Delete Concurrency\",\n info=\"Optional concurrency level for bulk delete operations.\",\n advanced=True,\n ),\n DropdownInput(\n name=\"setup_mode\",\n display_name=\"Setup Mode\",\n info=\"Configuration mode for setting up the vector store, with options like 'Sync', 'Async', or 'Off'.\",\n options=[\"Sync\", \"Async\", \"Off\"],\n advanced=True,\n value=\"Sync\",\n ),\n BoolInput(\n name=\"pre_delete_collection\",\n display_name=\"Pre Delete Collection\",\n info=\"Boolean flag to determine whether to delete the collection before creating a new one.\",\n advanced=True,\n ),\n StrInput(\n name=\"metadata_indexing_include\",\n display_name=\"Metadata Indexing Include\",\n info=\"Optional list of metadata fields to include in the indexing.\",\n advanced=True,\n ),\n HandleInput(\n name=\"embedding\",\n display_name=\"Embedding or Astra Vectorize\",\n input_types=[\"Embeddings\", \"dict\"],\n info=\"Allows either an embedding model or an Astra Vectorize configuration.\", # TODO: This should be optional, but need to refactor langchain-astradb first.\n ),\n StrInput(\n name=\"metadata_indexing_exclude\",\n display_name=\"Metadata Indexing Exclude\",\n info=\"Optional list of metadata fields to exclude from the indexing.\",\n advanced=True,\n ),\n StrInput(\n name=\"collection_indexing_policy\",\n display_name=\"Collection Indexing Policy\",\n info=\"Optional dictionary defining the indexing policy for the collection.\",\n advanced=True,\n ),\n IntInput(\n name=\"number_of_results\",\n display_name=\"Number of Results\",\n info=\"Number of results to return.\",\n advanced=True,\n value=4,\n ),\n DropdownInput(\n name=\"search_type\",\n display_name=\"Search Type\",\n info=\"Search type to use\",\n options=[\"Similarity\", \"Similarity with score threshold\", \"MMR (Max Marginal Relevance)\"],\n value=\"Similarity\",\n advanced=True,\n ),\n FloatInput(\n name=\"search_score_threshold\",\n display_name=\"Search Score Threshold\",\n info=\"Minimum similarity score threshold for search results. (when using 'Similarity with score threshold')\",\n value=0,\n advanced=True,\n ),\n DictInput(\n name=\"search_filter\",\n display_name=\"Search Metadata Filter\",\n info=\"Optional dictionary of filters to apply to the search query.\",\n advanced=True,\n is_list=True,\n ),\n ]\n\n @check_cached_vector_store\n def build_vector_store(self):\n try:\n from langchain_astradb import AstraDBVectorStore\n from langchain_astradb.utils.astradb import SetupMode\n except ImportError:\n raise ImportError(\n \"Could not import langchain Astra DB integration package. \"\n \"Please install it with `pip install langchain-astradb`.\"\n )\n\n try:\n if not self.setup_mode:\n self.setup_mode = self._inputs[\"setup_mode\"].options[0]\n\n setup_mode_value = SetupMode[self.setup_mode.upper()]\n except KeyError:\n raise ValueError(f\"Invalid setup mode: {self.setup_mode}\")\n\n if not isinstance(self.embedding, dict):\n embedding_dict = {\"embedding\": self.embedding}\n else:\n from astrapy.info import CollectionVectorServiceOptions\n\n dict_options = self.embedding.get(\"collection_vector_service_options\", {})\n dict_options[\"authentication\"] = {\n k: v for k, v in dict_options.get(\"authentication\", {}).items() if k and v\n }\n dict_options[\"parameters\"] = {k: v for k, v in dict_options.get(\"parameters\", {}).items() if k and v}\n embedding_dict = {\n \"collection_vector_service_options\": CollectionVectorServiceOptions.from_dict(dict_options)\n }\n collection_embedding_api_key = self.embedding.get(\"collection_embedding_api_key\")\n if collection_embedding_api_key:\n embedding_dict[\"collection_embedding_api_key\"] = collection_embedding_api_key\n\n vector_store_kwargs = {\n **embedding_dict,\n \"collection_name\": self.collection_name,\n \"token\": self.token,\n \"api_endpoint\": self.api_endpoint,\n \"namespace\": self.namespace or None,\n \"metric\": self.metric or None,\n \"batch_size\": self.batch_size or None,\n \"bulk_insert_batch_concurrency\": self.bulk_insert_batch_concurrency or None,\n \"bulk_insert_overwrite_concurrency\": self.bulk_insert_overwrite_concurrency or None,\n \"bulk_delete_concurrency\": self.bulk_delete_concurrency or None,\n \"setup_mode\": setup_mode_value,\n \"pre_delete_collection\": self.pre_delete_collection or False,\n }\n\n if self.metadata_indexing_include:\n vector_store_kwargs[\"metadata_indexing_include\"] = self.metadata_indexing_include\n elif self.metadata_indexing_exclude:\n vector_store_kwargs[\"metadata_indexing_exclude\"] = self.metadata_indexing_exclude\n elif self.collection_indexing_policy:\n vector_store_kwargs[\"collection_indexing_policy\"] = self.collection_indexing_policy\n\n try:\n vector_store = AstraDBVectorStore(**vector_store_kwargs)\n except Exception as e:\n raise ValueError(f\"Error initializing AstraDBVectorStore: {str(e)}\") from e\n\n self._add_documents_to_vector_store(vector_store)\n return vector_store\n\n def _add_documents_to_vector_store(self, vector_store):\n documents = []\n for _input in self.ingest_data or []:\n if isinstance(_input, Data):\n documents.append(_input.to_lc_document())\n else:\n raise ValueError(\"Vector Store Inputs must be Data objects.\")\n\n if documents:\n logger.debug(f\"Adding {len(documents)} documents to the Vector Store.\")\n try:\n vector_store.add_documents(documents)\n except Exception as e:\n raise ValueError(f\"Error adding documents to AstraDBVectorStore: {str(e)}\") from e\n else:\n logger.debug(\"No documents to add to the Vector Store.\")\n\n def _map_search_type(self):\n if self.search_type == \"Similarity with score threshold\":\n return \"similarity_score_threshold\"\n elif self.search_type == \"MMR (Max Marginal Relevance)\":\n return \"mmr\"\n else:\n return \"similarity\"\n\n def _build_search_args(self):\n args = {\n \"k\": self.number_of_results,\n \"score_threshold\": self.search_score_threshold,\n }\n\n if self.search_filter:\n clean_filter = {k: v for k, v in self.search_filter.items() if k and v}\n if len(clean_filter) > 0:\n args[\"filter\"] = clean_filter\n return args\n\n def search_documents(self) -> list[Data]:\n vector_store = self.build_vector_store()\n\n logger.debug(f\"Search input: {self.search_input}\")\n logger.debug(f\"Search type: {self.search_type}\")\n logger.debug(f\"Number of results: {self.number_of_results}\")\n\n if self.search_input and isinstance(self.search_input, str) and self.search_input.strip():\n try:\n search_type = self._map_search_type()\n search_args = self._build_search_args()\n\n docs = vector_store.search(query=self.search_input, search_type=search_type, **search_args)\n except Exception as e:\n raise ValueError(f\"Error performing search in AstraDBVectorStore: {str(e)}\") from e\n\n logger.debug(f\"Retrieved documents: {len(docs)}\")\n\n data = docs_to_data(docs)\n logger.debug(f\"Converted documents to data: {len(data)}\")\n self.status = data\n return data\n else:\n logger.debug(\"No search input provided. Skipping search.\")\n return []\n\n def get_retriever_kwargs(self):\n search_args = self._build_search_args()\n return {\n \"search_type\": self._map_search_type(),\n \"search_kwargs\": search_args,\n }\n" }, "collection_indexing_policy": { "advanced": true, @@ -1872,7 +1872,7 @@ "show": true, "title_case": false, "type": "code", - "value": "from langchain_core.vectorstores import VectorStore\nfrom loguru import logger\n\nfrom langflow.base.vectorstores.model import LCVectorStoreComponent\nfrom langflow.helpers import docs_to_data\nfrom langflow.inputs import DictInput, FloatInput\nfrom langflow.io import (\n BoolInput,\n DataInput,\n DropdownInput,\n HandleInput,\n IntInput,\n MultilineInput,\n SecretStrInput,\n StrInput,\n)\nfrom langflow.schema import Data\n\n\nclass AstraVectorStoreComponent(LCVectorStoreComponent):\n display_name: str = \"Astra DB\"\n description: str = \"Implementation of Vector Store using Astra DB with search capabilities\"\n documentation: str = \"https://python.langchain.com/docs/integrations/vectorstores/astradb\"\n name = \"AstraDB\"\n icon: str = \"AstraDB\"\n\n _cached_vectorstore: VectorStore | None = None\n\n inputs = [\n StrInput(\n name=\"collection_name\",\n display_name=\"Collection Name\",\n info=\"The name of the collection within Astra DB where the vectors will be stored.\",\n required=True,\n ),\n SecretStrInput(\n name=\"token\",\n display_name=\"Astra DB Application Token\",\n info=\"Authentication token for accessing Astra DB.\",\n value=\"ASTRA_DB_APPLICATION_TOKEN\",\n required=True,\n ),\n SecretStrInput(\n name=\"api_endpoint\",\n display_name=\"API Endpoint\",\n info=\"API endpoint URL for the Astra DB service.\",\n value=\"ASTRA_DB_API_ENDPOINT\",\n required=True,\n ),\n MultilineInput(\n name=\"search_input\",\n display_name=\"Search Input\",\n ),\n DataInput(\n name=\"ingest_data\",\n display_name=\"Ingest Data\",\n is_list=True,\n ),\n StrInput(\n name=\"namespace\",\n display_name=\"Namespace\",\n info=\"Optional namespace within Astra DB to use for the collection.\",\n advanced=True,\n ),\n DropdownInput(\n name=\"metric\",\n display_name=\"Metric\",\n info=\"Optional distance metric for vector comparisons in the vector store.\",\n options=[\"cosine\", \"dot_product\", \"euclidean\"],\n advanced=True,\n ),\n IntInput(\n name=\"batch_size\",\n display_name=\"Batch Size\",\n info=\"Optional number of data to process in a single batch.\",\n advanced=True,\n ),\n IntInput(\n name=\"bulk_insert_batch_concurrency\",\n display_name=\"Bulk Insert Batch Concurrency\",\n info=\"Optional concurrency level for bulk insert operations.\",\n advanced=True,\n ),\n IntInput(\n name=\"bulk_insert_overwrite_concurrency\",\n display_name=\"Bulk Insert Overwrite Concurrency\",\n info=\"Optional concurrency level for bulk insert operations that overwrite existing data.\",\n advanced=True,\n ),\n IntInput(\n name=\"bulk_delete_concurrency\",\n display_name=\"Bulk Delete Concurrency\",\n info=\"Optional concurrency level for bulk delete operations.\",\n advanced=True,\n ),\n DropdownInput(\n name=\"setup_mode\",\n display_name=\"Setup Mode\",\n info=\"Configuration mode for setting up the vector store, with options like 'Sync', 'Async', or 'Off'.\",\n options=[\"Sync\", \"Async\", \"Off\"],\n advanced=True,\n value=\"Sync\",\n ),\n BoolInput(\n name=\"pre_delete_collection\",\n display_name=\"Pre Delete Collection\",\n info=\"Boolean flag to determine whether to delete the collection before creating a new one.\",\n advanced=True,\n ),\n StrInput(\n name=\"metadata_indexing_include\",\n display_name=\"Metadata Indexing Include\",\n info=\"Optional list of metadata fields to include in the indexing.\",\n advanced=True,\n ),\n HandleInput(\n name=\"embedding\",\n display_name=\"Embedding or Astra Vectorize\",\n input_types=[\"Embeddings\", \"dict\"],\n info=\"Allows either an embedding model or an Astra Vectorize configuration.\", # TODO: This should be optional, but need to refactor langchain-astradb first.\n ),\n StrInput(\n name=\"metadata_indexing_exclude\",\n display_name=\"Metadata Indexing Exclude\",\n info=\"Optional list of metadata fields to exclude from the indexing.\",\n advanced=True,\n ),\n StrInput(\n name=\"collection_indexing_policy\",\n display_name=\"Collection Indexing Policy\",\n info=\"Optional dictionary defining the indexing policy for the collection.\",\n advanced=True,\n ),\n IntInput(\n name=\"number_of_results\",\n display_name=\"Number of Results\",\n info=\"Number of results to return.\",\n advanced=True,\n value=4,\n ),\n DropdownInput(\n name=\"search_type\",\n display_name=\"Search Type\",\n info=\"Search type to use\",\n options=[\"Similarity\", \"Similarity with score threshold\", \"MMR (Max Marginal Relevance)\"],\n value=\"Similarity\",\n advanced=True,\n ),\n FloatInput(\n name=\"search_score_threshold\",\n display_name=\"Search Score Threshold\",\n info=\"Minimum similarity score threshold for search results. (when using 'Similarity with score threshold')\",\n value=0,\n advanced=True,\n ),\n DictInput(\n name=\"search_filter\",\n display_name=\"Search Metadata Filter\",\n info=\"Optional dictionary of filters to apply to the search query.\",\n advanced=True,\n is_list=True,\n ),\n ]\n\n def _build_vector_store(self):\n # cache the vector store to avoid re-initializing and ingest data again\n if self._cached_vectorstore:\n return self._cached_vectorstore\n\n try:\n from langchain_astradb import AstraDBVectorStore\n from langchain_astradb.utils.astradb import SetupMode\n except ImportError:\n raise ImportError(\n \"Could not import langchain Astra DB integration package. \"\n \"Please install it with `pip install langchain-astradb`.\"\n )\n\n try:\n if not self.setup_mode:\n self.setup_mode = self._inputs[\"setup_mode\"].options[0]\n\n setup_mode_value = SetupMode[self.setup_mode.upper()]\n except KeyError:\n raise ValueError(f\"Invalid setup mode: {self.setup_mode}\")\n\n if not isinstance(self.embedding, dict):\n embedding_dict = {\"embedding\": self.embedding}\n else:\n from astrapy.info import CollectionVectorServiceOptions\n\n dict_options = self.embedding.get(\"collection_vector_service_options\", {})\n dict_options[\"authentication\"] = {\n k: v for k, v in dict_options.get(\"authentication\", {}).items() if k and v\n }\n dict_options[\"parameters\"] = {k: v for k, v in dict_options.get(\"parameters\", {}).items() if k and v}\n embedding_dict = {\n \"collection_vector_service_options\": CollectionVectorServiceOptions.from_dict(dict_options)\n }\n collection_embedding_api_key = self.embedding.get(\"collection_embedding_api_key\")\n if collection_embedding_api_key:\n embedding_dict[\"collection_embedding_api_key\"] = collection_embedding_api_key\n\n vector_store_kwargs = {\n **embedding_dict,\n \"collection_name\": self.collection_name,\n \"token\": self.token,\n \"api_endpoint\": self.api_endpoint,\n \"namespace\": self.namespace or None,\n \"metric\": self.metric or None,\n \"batch_size\": self.batch_size or None,\n \"bulk_insert_batch_concurrency\": self.bulk_insert_batch_concurrency or None,\n \"bulk_insert_overwrite_concurrency\": self.bulk_insert_overwrite_concurrency or None,\n \"bulk_delete_concurrency\": self.bulk_delete_concurrency or None,\n \"setup_mode\": setup_mode_value,\n \"pre_delete_collection\": self.pre_delete_collection or False,\n }\n\n if self.metadata_indexing_include:\n vector_store_kwargs[\"metadata_indexing_include\"] = self.metadata_indexing_include\n elif self.metadata_indexing_exclude:\n vector_store_kwargs[\"metadata_indexing_exclude\"] = self.metadata_indexing_exclude\n elif self.collection_indexing_policy:\n vector_store_kwargs[\"collection_indexing_policy\"] = self.collection_indexing_policy\n\n try:\n vector_store = AstraDBVectorStore(**vector_store_kwargs)\n except Exception as e:\n raise ValueError(f\"Error initializing AstraDBVectorStore: {str(e)}\") from e\n\n self._add_documents_to_vector_store(vector_store)\n\n self._cached_vectorstore = vector_store\n\n return vector_store\n\n def _add_documents_to_vector_store(self, vector_store):\n documents = []\n for _input in self.ingest_data or []:\n if isinstance(_input, Data):\n documents.append(_input.to_lc_document())\n else:\n raise ValueError(\"Vector Store Inputs must be Data objects.\")\n\n if documents:\n logger.debug(f\"Adding {len(documents)} documents to the Vector Store.\")\n try:\n vector_store.add_documents(documents)\n except Exception as e:\n raise ValueError(f\"Error adding documents to AstraDBVectorStore: {str(e)}\") from e\n else:\n logger.debug(\"No documents to add to the Vector Store.\")\n\n def _map_search_type(self):\n if self.search_type == \"Similarity with score threshold\":\n return \"similarity_score_threshold\"\n elif self.search_type == \"MMR (Max Marginal Relevance)\":\n return \"mmr\"\n else:\n return \"similarity\"\n\n def _build_search_args(self):\n args = {\n \"k\": self.number_of_results,\n \"score_threshold\": self.search_score_threshold,\n }\n\n if self.search_filter:\n clean_filter = {k: v for k, v in self.search_filter.items() if k and v}\n if len(clean_filter) > 0:\n args[\"filter\"] = clean_filter\n return args\n\n def search_documents(self) -> list[Data]:\n vector_store = self._build_vector_store()\n\n logger.debug(f\"Search input: {self.search_input}\")\n logger.debug(f\"Search type: {self.search_type}\")\n logger.debug(f\"Number of results: {self.number_of_results}\")\n\n if self.search_input and isinstance(self.search_input, str) and self.search_input.strip():\n try:\n search_type = self._map_search_type()\n search_args = self._build_search_args()\n\n docs = vector_store.search(query=self.search_input, search_type=search_type, **search_args)\n except Exception as e:\n raise ValueError(f\"Error performing search in AstraDBVectorStore: {str(e)}\") from e\n\n logger.debug(f\"Retrieved documents: {len(docs)}\")\n\n data = docs_to_data(docs)\n logger.debug(f\"Converted documents to data: {len(data)}\")\n self.status = data\n return data\n else:\n logger.debug(\"No search input provided. Skipping search.\")\n return []\n\n def get_retriever_kwargs(self):\n search_args = self._build_search_args()\n return {\n \"search_type\": self._map_search_type(),\n \"search_kwargs\": search_args,\n }\n\n def build_vector_store(self):\n vector_store = self._build_vector_store()\n return vector_store\n" + "value": "from loguru import logger\n\nfrom langflow.base.vectorstores.model import LCVectorStoreComponent, check_cached_vector_store\nfrom langflow.helpers import docs_to_data\nfrom langflow.inputs import DictInput, FloatInput\nfrom langflow.io import (\n BoolInput,\n DataInput,\n DropdownInput,\n HandleInput,\n IntInput,\n MultilineInput,\n SecretStrInput,\n StrInput,\n)\nfrom langflow.schema import Data\n\n\nclass AstraVectorStoreComponent(LCVectorStoreComponent):\n display_name: str = \"Astra DB\"\n description: str = \"Implementation of Vector Store using Astra DB with search capabilities\"\n documentation: str = \"https://python.langchain.com/docs/integrations/vectorstores/astradb\"\n name = \"AstraDB\"\n icon: str = \"AstraDB\"\n\n inputs = [\n StrInput(\n name=\"collection_name\",\n display_name=\"Collection Name\",\n info=\"The name of the collection within Astra DB where the vectors will be stored.\",\n required=True,\n ),\n SecretStrInput(\n name=\"token\",\n display_name=\"Astra DB Application Token\",\n info=\"Authentication token for accessing Astra DB.\",\n value=\"ASTRA_DB_APPLICATION_TOKEN\",\n required=True,\n ),\n SecretStrInput(\n name=\"api_endpoint\",\n display_name=\"API Endpoint\",\n info=\"API endpoint URL for the Astra DB service.\",\n value=\"ASTRA_DB_API_ENDPOINT\",\n required=True,\n ),\n MultilineInput(\n name=\"search_input\",\n display_name=\"Search Input\",\n ),\n DataInput(\n name=\"ingest_data\",\n display_name=\"Ingest Data\",\n is_list=True,\n ),\n StrInput(\n name=\"namespace\",\n display_name=\"Namespace\",\n info=\"Optional namespace within Astra DB to use for the collection.\",\n advanced=True,\n ),\n DropdownInput(\n name=\"metric\",\n display_name=\"Metric\",\n info=\"Optional distance metric for vector comparisons in the vector store.\",\n options=[\"cosine\", \"dot_product\", \"euclidean\"],\n advanced=True,\n ),\n IntInput(\n name=\"batch_size\",\n display_name=\"Batch Size\",\n info=\"Optional number of data to process in a single batch.\",\n advanced=True,\n ),\n IntInput(\n name=\"bulk_insert_batch_concurrency\",\n display_name=\"Bulk Insert Batch Concurrency\",\n info=\"Optional concurrency level for bulk insert operations.\",\n advanced=True,\n ),\n IntInput(\n name=\"bulk_insert_overwrite_concurrency\",\n display_name=\"Bulk Insert Overwrite Concurrency\",\n info=\"Optional concurrency level for bulk insert operations that overwrite existing data.\",\n advanced=True,\n ),\n IntInput(\n name=\"bulk_delete_concurrency\",\n display_name=\"Bulk Delete Concurrency\",\n info=\"Optional concurrency level for bulk delete operations.\",\n advanced=True,\n ),\n DropdownInput(\n name=\"setup_mode\",\n display_name=\"Setup Mode\",\n info=\"Configuration mode for setting up the vector store, with options like 'Sync', 'Async', or 'Off'.\",\n options=[\"Sync\", \"Async\", \"Off\"],\n advanced=True,\n value=\"Sync\",\n ),\n BoolInput(\n name=\"pre_delete_collection\",\n display_name=\"Pre Delete Collection\",\n info=\"Boolean flag to determine whether to delete the collection before creating a new one.\",\n advanced=True,\n ),\n StrInput(\n name=\"metadata_indexing_include\",\n display_name=\"Metadata Indexing Include\",\n info=\"Optional list of metadata fields to include in the indexing.\",\n advanced=True,\n ),\n HandleInput(\n name=\"embedding\",\n display_name=\"Embedding or Astra Vectorize\",\n input_types=[\"Embeddings\", \"dict\"],\n info=\"Allows either an embedding model or an Astra Vectorize configuration.\", # TODO: This should be optional, but need to refactor langchain-astradb first.\n ),\n StrInput(\n name=\"metadata_indexing_exclude\",\n display_name=\"Metadata Indexing Exclude\",\n info=\"Optional list of metadata fields to exclude from the indexing.\",\n advanced=True,\n ),\n StrInput(\n name=\"collection_indexing_policy\",\n display_name=\"Collection Indexing Policy\",\n info=\"Optional dictionary defining the indexing policy for the collection.\",\n advanced=True,\n ),\n IntInput(\n name=\"number_of_results\",\n display_name=\"Number of Results\",\n info=\"Number of results to return.\",\n advanced=True,\n value=4,\n ),\n DropdownInput(\n name=\"search_type\",\n display_name=\"Search Type\",\n info=\"Search type to use\",\n options=[\"Similarity\", \"Similarity with score threshold\", \"MMR (Max Marginal Relevance)\"],\n value=\"Similarity\",\n advanced=True,\n ),\n FloatInput(\n name=\"search_score_threshold\",\n display_name=\"Search Score Threshold\",\n info=\"Minimum similarity score threshold for search results. (when using 'Similarity with score threshold')\",\n value=0,\n advanced=True,\n ),\n DictInput(\n name=\"search_filter\",\n display_name=\"Search Metadata Filter\",\n info=\"Optional dictionary of filters to apply to the search query.\",\n advanced=True,\n is_list=True,\n ),\n ]\n\n @check_cached_vector_store\n def build_vector_store(self):\n try:\n from langchain_astradb import AstraDBVectorStore\n from langchain_astradb.utils.astradb import SetupMode\n except ImportError:\n raise ImportError(\n \"Could not import langchain Astra DB integration package. \"\n \"Please install it with `pip install langchain-astradb`.\"\n )\n\n try:\n if not self.setup_mode:\n self.setup_mode = self._inputs[\"setup_mode\"].options[0]\n\n setup_mode_value = SetupMode[self.setup_mode.upper()]\n except KeyError:\n raise ValueError(f\"Invalid setup mode: {self.setup_mode}\")\n\n if not isinstance(self.embedding, dict):\n embedding_dict = {\"embedding\": self.embedding}\n else:\n from astrapy.info import CollectionVectorServiceOptions\n\n dict_options = self.embedding.get(\"collection_vector_service_options\", {})\n dict_options[\"authentication\"] = {\n k: v for k, v in dict_options.get(\"authentication\", {}).items() if k and v\n }\n dict_options[\"parameters\"] = {k: v for k, v in dict_options.get(\"parameters\", {}).items() if k and v}\n embedding_dict = {\n \"collection_vector_service_options\": CollectionVectorServiceOptions.from_dict(dict_options)\n }\n collection_embedding_api_key = self.embedding.get(\"collection_embedding_api_key\")\n if collection_embedding_api_key:\n embedding_dict[\"collection_embedding_api_key\"] = collection_embedding_api_key\n\n vector_store_kwargs = {\n **embedding_dict,\n \"collection_name\": self.collection_name,\n \"token\": self.token,\n \"api_endpoint\": self.api_endpoint,\n \"namespace\": self.namespace or None,\n \"metric\": self.metric or None,\n \"batch_size\": self.batch_size or None,\n \"bulk_insert_batch_concurrency\": self.bulk_insert_batch_concurrency or None,\n \"bulk_insert_overwrite_concurrency\": self.bulk_insert_overwrite_concurrency or None,\n \"bulk_delete_concurrency\": self.bulk_delete_concurrency or None,\n \"setup_mode\": setup_mode_value,\n \"pre_delete_collection\": self.pre_delete_collection or False,\n }\n\n if self.metadata_indexing_include:\n vector_store_kwargs[\"metadata_indexing_include\"] = self.metadata_indexing_include\n elif self.metadata_indexing_exclude:\n vector_store_kwargs[\"metadata_indexing_exclude\"] = self.metadata_indexing_exclude\n elif self.collection_indexing_policy:\n vector_store_kwargs[\"collection_indexing_policy\"] = self.collection_indexing_policy\n\n try:\n vector_store = AstraDBVectorStore(**vector_store_kwargs)\n except Exception as e:\n raise ValueError(f\"Error initializing AstraDBVectorStore: {str(e)}\") from e\n\n self._add_documents_to_vector_store(vector_store)\n return vector_store\n\n def _add_documents_to_vector_store(self, vector_store):\n documents = []\n for _input in self.ingest_data or []:\n if isinstance(_input, Data):\n documents.append(_input.to_lc_document())\n else:\n raise ValueError(\"Vector Store Inputs must be Data objects.\")\n\n if documents:\n logger.debug(f\"Adding {len(documents)} documents to the Vector Store.\")\n try:\n vector_store.add_documents(documents)\n except Exception as e:\n raise ValueError(f\"Error adding documents to AstraDBVectorStore: {str(e)}\") from e\n else:\n logger.debug(\"No documents to add to the Vector Store.\")\n\n def _map_search_type(self):\n if self.search_type == \"Similarity with score threshold\":\n return \"similarity_score_threshold\"\n elif self.search_type == \"MMR (Max Marginal Relevance)\":\n return \"mmr\"\n else:\n return \"similarity\"\n\n def _build_search_args(self):\n args = {\n \"k\": self.number_of_results,\n \"score_threshold\": self.search_score_threshold,\n }\n\n if self.search_filter:\n clean_filter = {k: v for k, v in self.search_filter.items() if k and v}\n if len(clean_filter) > 0:\n args[\"filter\"] = clean_filter\n return args\n\n def search_documents(self) -> list[Data]:\n vector_store = self.build_vector_store()\n\n logger.debug(f\"Search input: {self.search_input}\")\n logger.debug(f\"Search type: {self.search_type}\")\n logger.debug(f\"Number of results: {self.number_of_results}\")\n\n if self.search_input and isinstance(self.search_input, str) and self.search_input.strip():\n try:\n search_type = self._map_search_type()\n search_args = self._build_search_args()\n\n docs = vector_store.search(query=self.search_input, search_type=search_type, **search_args)\n except Exception as e:\n raise ValueError(f\"Error performing search in AstraDBVectorStore: {str(e)}\") from e\n\n logger.debug(f\"Retrieved documents: {len(docs)}\")\n\n data = docs_to_data(docs)\n logger.debug(f\"Converted documents to data: {len(data)}\")\n self.status = data\n return data\n else:\n logger.debug(\"No search input provided. Skipping search.\")\n return []\n\n def get_retriever_kwargs(self):\n search_args = self._build_search_args()\n return {\n \"search_type\": self._map_search_type(),\n \"search_kwargs\": search_args,\n }\n" }, "collection_indexing_policy": { "advanced": true, @@ -3168,7 +3168,7 @@ "show": true, "title_case": false, "type": "code", - "value": "import operator\nfrom functools import reduce\n\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import OPENAI_MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.inputs import (\n BoolInput,\n DictInput,\n DropdownInput,\n FloatInput,\n IntInput,\n SecretStrInput,\n StrInput,\n)\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n name = \"OpenAIModel\"\n\n inputs = LCModelComponent._base_inputs + [\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n range_spec=RangeSpec(min=0, max=128000),\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n BoolInput(\n name=\"json_mode\",\n display_name=\"JSON Mode\",\n advanced=True,\n info=\"If True, it will output JSON regardless of passing a schema.\",\n ),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n advanced=False,\n options=OPENAI_MODEL_NAMES,\n value=OPENAI_MODEL_NAMES[0],\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n # self.output_schema is a list of dictionaries\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict) or self.json_mode\n seed = self.seed\n\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature or 0.1,\n seed=seed,\n )\n if json_mode:\n if output_schema_dict:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\") # type: ignore\n else:\n output = output.bind(response_format={\"type\": \"json_object\"}) # type: ignore\n\n return output # type: ignore\n\n def _get_exception_message(self, e: Exception):\n \"\"\"\n Get a message from an OpenAI exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n\n try:\n from openai import BadRequestError\n except ImportError:\n return\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\") # type: ignore\n if message:\n return message\n return\n" + "value": "import operator\nfrom functools import reduce\n\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import OPENAI_MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.inputs import (\n BoolInput,\n DictInput,\n DropdownInput,\n FloatInput,\n IntInput,\n SecretStrInput,\n StrInput,\n)\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n name = \"OpenAIModel\"\n\n inputs = LCModelComponent._base_inputs + [\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n range_spec=RangeSpec(min=0, max=128000),\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n BoolInput(\n name=\"json_mode\",\n display_name=\"JSON Mode\",\n advanced=True,\n info=\"If True, it will output JSON regardless of passing a schema.\",\n ),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n advanced=False,\n options=OPENAI_MODEL_NAMES,\n value=OPENAI_MODEL_NAMES[0],\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n # self.output_schema is a list of dictionaries\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict) or self.json_mode\n seed = self.seed\n\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature if temperature is not None else 0.1,\n seed=seed,\n )\n if json_mode:\n if output_schema_dict:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\") # type: ignore\n else:\n output = output.bind(response_format={\"type\": \"json_object\"}) # type: ignore\n\n return output # type: ignore\n\n def _get_exception_message(self, e: Exception):\n \"\"\"\n Get a message from an OpenAI exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n\n try:\n from openai import BadRequestError\n except ImportError:\n return\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\") # type: ignore\n if message:\n return message\n return\n" }, "input_value": { "advanced": false,