diff --git a/src/backend/base/langflow/initial_setup/starter_projects/Agent Flow.json b/src/backend/base/langflow/initial_setup/starter_projects/Agent Flow.json
new file mode 100644
index 000000000000..2cebb6049a5f
--- /dev/null
+++ b/src/backend/base/langflow/initial_setup/starter_projects/Agent Flow.json
@@ -0,0 +1,1424 @@
+{
+ "data": {
+ "edges": [
+ {
+ "className": "",
+ "data": {
+ "sourceHandle": {
+ "dataType": "ToolCallingAgent",
+ "id": "ToolCallingAgent-hz7Eb",
+ "name": "response",
+ "output_types": [
+ "Message"
+ ]
+ },
+ "targetHandle": {
+ "fieldName": "input_value",
+ "id": "ChatOutput-INBLf",
+ "inputTypes": [
+ "Message"
+ ],
+ "type": "str"
+ }
+ },
+ "id": "reactflow__edge-ToolCallingAgent-hz7Eb{œdataTypeœ:œToolCallingAgentœ,œidœ:œToolCallingAgent-hz7Ebœ,œnameœ:œresponseœ,œoutput_typesœ:[œMessageœ]}-ChatOutput-INBLf{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-INBLfœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}",
+ "source": "ToolCallingAgent-hz7Eb",
+ "sourceHandle": "{œdataTypeœ: œToolCallingAgentœ, œidœ: œToolCallingAgent-hz7Ebœ, œnameœ: œresponseœ, œoutput_typesœ: [œMessageœ]}",
+ "target": "ChatOutput-INBLf",
+ "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œChatOutput-INBLfœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}"
+ },
+ {
+ "className": "",
+ "data": {
+ "sourceHandle": {
+ "dataType": "OpenAIModel",
+ "id": "OpenAIModel-JusJ7",
+ "name": "model_output",
+ "output_types": [
+ "LanguageModel"
+ ]
+ },
+ "targetHandle": {
+ "fieldName": "llm",
+ "id": "ToolCallingAgent-hz7Eb",
+ "inputTypes": [
+ "LanguageModel"
+ ],
+ "type": "other"
+ }
+ },
+ "id": "reactflow__edge-OpenAIModel-JusJ7{œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-JusJ7œ,œnameœ:œmodel_outputœ,œoutput_typesœ:[œLanguageModelœ]}-ToolCallingAgent-hz7Eb{œfieldNameœ:œllmœ,œidœ:œToolCallingAgent-hz7Ebœ,œinputTypesœ:[œLanguageModelœ],œtypeœ:œotherœ}",
+ "source": "OpenAIModel-JusJ7",
+ "sourceHandle": "{œdataTypeœ: œOpenAIModelœ, œidœ: œOpenAIModel-JusJ7œ, œnameœ: œmodel_outputœ, œoutput_typesœ: [œLanguageModelœ]}",
+ "target": "ToolCallingAgent-hz7Eb",
+ "targetHandle": "{œfieldNameœ: œllmœ, œidœ: œToolCallingAgent-hz7Ebœ, œinputTypesœ: [œLanguageModelœ], œtypeœ: œotherœ}"
+ },
+ {
+ "className": "",
+ "data": {
+ "sourceHandle": {
+ "dataType": "CalculatorTool",
+ "id": "CalculatorTool-CN8yi",
+ "name": "api_build_tool",
+ "output_types": [
+ "Tool"
+ ]
+ },
+ "targetHandle": {
+ "fieldName": "tools",
+ "id": "ToolCallingAgent-hz7Eb",
+ "inputTypes": [
+ "Tool",
+ "BaseTool"
+ ],
+ "type": "other"
+ }
+ },
+ "id": "reactflow__edge-CalculatorTool-CN8yi{œdataTypeœ:œCalculatorToolœ,œidœ:œCalculatorTool-CN8yiœ,œnameœ:œapi_build_toolœ,œoutput_typesœ:[œToolœ]}-ToolCallingAgent-hz7Eb{œfieldNameœ:œtoolsœ,œidœ:œToolCallingAgent-hz7Ebœ,œinputTypesœ:[œToolœ,œBaseToolœ],œtypeœ:œotherœ}",
+ "source": "CalculatorTool-CN8yi",
+ "sourceHandle": "{œdataTypeœ: œCalculatorToolœ, œidœ: œCalculatorTool-CN8yiœ, œnameœ: œapi_build_toolœ, œoutput_typesœ: [œToolœ]}",
+ "target": "ToolCallingAgent-hz7Eb",
+ "targetHandle": "{œfieldNameœ: œtoolsœ, œidœ: œToolCallingAgent-hz7Ebœ, œinputTypesœ: [œToolœ, œBaseToolœ], œtypeœ: œotherœ}"
+ },
+ {
+ "className": "",
+ "data": {
+ "sourceHandle": {
+ "dataType": "ChatInput",
+ "id": "ChatInput-oZ2ae",
+ "name": "message",
+ "output_types": [
+ "Message"
+ ]
+ },
+ "targetHandle": {
+ "fieldName": "input_value",
+ "id": "ToolCallingAgent-hz7Eb",
+ "inputTypes": [
+ "Message"
+ ],
+ "type": "str"
+ }
+ },
+ "id": "reactflow__edge-ChatInput-oZ2ae{œdataTypeœ:œChatInputœ,œidœ:œChatInput-oZ2aeœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}-ToolCallingAgent-hz7Eb{œfieldNameœ:œinput_valueœ,œidœ:œToolCallingAgent-hz7Ebœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}",
+ "source": "ChatInput-oZ2ae",
+ "sourceHandle": "{œdataTypeœ: œChatInputœ, œidœ: œChatInput-oZ2aeœ, œnameœ: œmessageœ, œoutput_typesœ: [œMessageœ]}",
+ "target": "ToolCallingAgent-hz7Eb",
+ "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œToolCallingAgent-hz7Ebœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}"
+ },
+ {
+ "className": "",
+ "data": {
+ "sourceHandle": {
+ "dataType": "PythonREPLTool",
+ "id": "PythonREPLTool-Lq9f4",
+ "name": "tool",
+ "output_types": [
+ "Tool"
+ ]
+ },
+ "targetHandle": {
+ "fieldName": "tools",
+ "id": "ToolCallingAgent-hz7Eb",
+ "inputTypes": [
+ "Tool",
+ "BaseTool"
+ ],
+ "type": "other"
+ }
+ },
+ "id": "reactflow__edge-PythonREPLTool-Lq9f4{œdataTypeœ:œPythonREPLToolœ,œidœ:œPythonREPLTool-Lq9f4œ,œnameœ:œtoolœ,œoutput_typesœ:[œToolœ]}-ToolCallingAgent-hz7Eb{œfieldNameœ:œtoolsœ,œidœ:œToolCallingAgent-hz7Ebœ,œinputTypesœ:[œToolœ,œBaseToolœ],œtypeœ:œotherœ}",
+ "source": "PythonREPLTool-Lq9f4",
+ "sourceHandle": "{œdataTypeœ: œPythonREPLToolœ, œidœ: œPythonREPLTool-Lq9f4œ, œnameœ: œtoolœ, œoutput_typesœ: [œToolœ]}",
+ "target": "ToolCallingAgent-hz7Eb",
+ "targetHandle": "{œfieldNameœ: œtoolsœ, œidœ: œToolCallingAgent-hz7Ebœ, œinputTypesœ: [œToolœ, œBaseToolœ], œtypeœ: œotherœ}"
+ }
+ ],
+ "nodes": [
+ {
+ "data": {
+ "id": "ChatInput-oZ2ae",
+ "node": {
+ "base_classes": [
+ "Message"
+ ],
+ "beta": false,
+ "conditional_paths": [],
+ "custom_fields": {},
+ "description": "Get chat inputs from the Playground.",
+ "display_name": "Chat Input",
+ "documentation": "",
+ "edited": false,
+ "field_order": [
+ "input_value",
+ "should_store_message",
+ "sender",
+ "sender_name",
+ "session_id",
+ "files"
+ ],
+ "frozen": false,
+ "icon": "ChatInput",
+ "lf_version": "1.0.16",
+ "output_types": [],
+ "outputs": [
+ {
+ "cache": true,
+ "display_name": "Message",
+ "method": "message_response",
+ "name": "message",
+ "selected": "Message",
+ "types": [
+ "Message"
+ ],
+ "value": "__UNDEFINED__"
+ }
+ ],
+ "pinned": false,
+ "template": {
+ "_type": "Component",
+ "code": {
+ "advanced": true,
+ "dynamic": true,
+ "fileTypes": [],
+ "file_path": "",
+ "info": "",
+ "list": false,
+ "load_from_db": false,
+ "multiline": true,
+ "name": "code",
+ "password": false,
+ "placeholder": "",
+ "required": true,
+ "show": true,
+ "title_case": false,
+ "type": "code",
+ "value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, FileInput, MessageTextInput, MultilineInput, Output\nfrom langflow.memory import store_message\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_USER, MESSAGE_SENDER_NAME_USER\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"ChatInput\"\n name = \"ChatInput\"\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n )\n\n if (\n self.session_id\n and isinstance(message, Message)\n and isinstance(message.text, str)\n and self.should_store_message\n ):\n store_message(\n message,\n flow_id=self.graph.flow_id,\n )\n self.message.value = message\n\n self.status = message\n return message\n"
+ },
+ "files": {
+ "_input_type": "FileInput",
+ "advanced": true,
+ "display_name": "Files",
+ "dynamic": false,
+ "fileTypes": [
+ "txt",
+ "md",
+ "mdx",
+ "csv",
+ "json",
+ "yaml",
+ "yml",
+ "xml",
+ "html",
+ "htm",
+ "pdf",
+ "docx",
+ "py",
+ "sh",
+ "sql",
+ "js",
+ "ts",
+ "tsx",
+ "jpg",
+ "jpeg",
+ "png",
+ "bmp",
+ "image"
+ ],
+ "file_path": "",
+ "info": "Files to be sent with the message.",
+ "list": true,
+ "name": "files",
+ "placeholder": "",
+ "required": false,
+ "show": true,
+ "title_case": false,
+ "trace_as_metadata": true,
+ "type": "file",
+ "value": ""
+ },
+ "input_value": {
+ "_input_type": "MultilineInput",
+ "advanced": false,
+ "display_name": "Text",
+ "dynamic": false,
+ "info": "Message to be passed as input.",
+ "input_types": [
+ "Message"
+ ],
+ "list": false,
+ "load_from_db": false,
+ "multiline": true,
+ "name": "input_value",
+ "placeholder": "",
+ "required": false,
+ "show": true,
+ "title_case": false,
+ "trace_as_input": true,
+ "trace_as_metadata": true,
+ "type": "str",
+ "value": "write short python scsript to say hello world"
+ },
+ "sender": {
+ "_input_type": "DropdownInput",
+ "advanced": true,
+ "combobox": false,
+ "display_name": "Sender Type",
+ "dynamic": false,
+ "info": "Type of sender.",
+ "name": "sender",
+ "options": [
+ "Machine",
+ "User"
+ ],
+ "placeholder": "",
+ "required": false,
+ "show": true,
+ "title_case": false,
+ "trace_as_metadata": true,
+ "type": "str",
+ "value": "User"
+ },
+ "sender_name": {
+ "_input_type": "MessageTextInput",
+ "advanced": true,
+ "display_name": "Sender Name",
+ "dynamic": false,
+ "info": "Name of the sender.",
+ "input_types": [
+ "Message"
+ ],
+ "list": false,
+ "load_from_db": false,
+ "name": "sender_name",
+ "placeholder": "",
+ "required": false,
+ "show": true,
+ "title_case": false,
+ "trace_as_input": true,
+ "trace_as_metadata": true,
+ "type": "str",
+ "value": "User"
+ },
+ "session_id": {
+ "_input_type": "MessageTextInput",
+ "advanced": true,
+ "display_name": "Session ID",
+ "dynamic": false,
+ "info": "The session ID of the chat. If empty, the current session ID parameter will be used.",
+ "input_types": [
+ "Message"
+ ],
+ "list": false,
+ "load_from_db": false,
+ "name": "session_id",
+ "placeholder": "",
+ "required": false,
+ "show": true,
+ "title_case": false,
+ "trace_as_input": true,
+ "trace_as_metadata": true,
+ "type": "str",
+ "value": ""
+ },
+ "should_store_message": {
+ "_input_type": "BoolInput",
+ "advanced": true,
+ "display_name": "Store Messages",
+ "dynamic": false,
+ "info": "Store the message in the history.",
+ "list": false,
+ "name": "should_store_message",
+ "placeholder": "",
+ "required": false,
+ "show": true,
+ "title_case": false,
+ "trace_as_metadata": true,
+ "type": "bool",
+ "value": true
+ }
+ }
+ },
+ "type": "ChatInput"
+ },
+ "dragging": false,
+ "height": 298,
+ "id": "ChatInput-oZ2ae",
+ "position": {
+ "x": 1760.192972923414,
+ "y": -191.51901724049213
+ },
+ "positionAbsolute": {
+ "x": 1760.192972923414,
+ "y": -191.51901724049213
+ },
+ "selected": false,
+ "type": "genericNode",
+ "width": 384
+ },
+ {
+ "data": {
+ "id": "ChatOutput-INBLf",
+ "node": {
+ "base_classes": [
+ "Message"
+ ],
+ "beta": false,
+ "conditional_paths": [],
+ "custom_fields": {},
+ "description": "Display a chat message in the Playground.",
+ "display_name": "Chat Output",
+ "documentation": "",
+ "edited": false,
+ "field_order": [
+ "input_value",
+ "should_store_message",
+ "sender",
+ "sender_name",
+ "session_id",
+ "data_template"
+ ],
+ "frozen": false,
+ "icon": "ChatOutput",
+ "lf_version": "1.0.16",
+ "output_types": [],
+ "outputs": [
+ {
+ "cache": true,
+ "display_name": "Message",
+ "method": "message_response",
+ "name": "message",
+ "selected": "Message",
+ "types": [
+ "Message"
+ ],
+ "value": "__UNDEFINED__"
+ }
+ ],
+ "pinned": false,
+ "template": {
+ "_type": "Component",
+ "code": {
+ "advanced": true,
+ "dynamic": true,
+ "fileTypes": [],
+ "file_path": "",
+ "info": "",
+ "list": false,
+ "load_from_db": false,
+ "multiline": true,
+ "name": "code",
+ "password": false,
+ "placeholder": "",
+ "required": true,
+ "show": true,
+ "title_case": false,
+ "type": "code",
+ "value": "from langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, MessageTextInput, Output\nfrom langflow.memory import store_message\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_NAME_AI, MESSAGE_SENDER_USER, MESSAGE_SENDER_AI\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n name = \"ChatOutput\"\n\n inputs = [\n MessageTextInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Message to be passed as output.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_AI,\n advanced=True,\n info=\"Type of sender.\",\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_AI,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"data_template\",\n display_name=\"Data Template\",\n value=\"{text}\",\n advanced=True,\n info=\"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.\",\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n )\n if (\n self.session_id\n and isinstance(message, Message)\n and isinstance(message.text, str)\n and self.should_store_message\n ):\n store_message(\n message,\n flow_id=self.graph.flow_id,\n )\n self.message.value = message\n\n self.status = message\n return message\n"
+ },
+ "data_template": {
+ "_input_type": "MessageTextInput",
+ "advanced": true,
+ "display_name": "Data Template",
+ "dynamic": false,
+ "info": "Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.",
+ "input_types": [
+ "Message"
+ ],
+ "list": false,
+ "load_from_db": false,
+ "name": "data_template",
+ "placeholder": "",
+ "required": false,
+ "show": true,
+ "title_case": false,
+ "trace_as_input": true,
+ "trace_as_metadata": true,
+ "type": "str",
+ "value": "{text}"
+ },
+ "input_value": {
+ "_input_type": "MessageTextInput",
+ "advanced": false,
+ "display_name": "Text",
+ "dynamic": false,
+ "info": "Message to be passed as output.",
+ "input_types": [
+ "Message"
+ ],
+ "list": false,
+ "load_from_db": false,
+ "name": "input_value",
+ "placeholder": "",
+ "required": false,
+ "show": true,
+ "title_case": false,
+ "trace_as_input": true,
+ "trace_as_metadata": true,
+ "type": "str",
+ "value": ""
+ },
+ "sender": {
+ "_input_type": "DropdownInput",
+ "advanced": true,
+ "combobox": false,
+ "display_name": "Sender Type",
+ "dynamic": false,
+ "info": "Type of sender.",
+ "name": "sender",
+ "options": [
+ "Machine",
+ "User"
+ ],
+ "placeholder": "",
+ "required": false,
+ "show": true,
+ "title_case": false,
+ "trace_as_metadata": true,
+ "type": "str",
+ "value": "Machine"
+ },
+ "sender_name": {
+ "_input_type": "MessageTextInput",
+ "advanced": true,
+ "display_name": "Sender Name",
+ "dynamic": false,
+ "info": "Name of the sender.",
+ "input_types": [
+ "Message"
+ ],
+ "list": false,
+ "load_from_db": false,
+ "name": "sender_name",
+ "placeholder": "",
+ "required": false,
+ "show": true,
+ "title_case": false,
+ "trace_as_input": true,
+ "trace_as_metadata": true,
+ "type": "str",
+ "value": "AI"
+ },
+ "session_id": {
+ "_input_type": "MessageTextInput",
+ "advanced": true,
+ "display_name": "Session ID",
+ "dynamic": false,
+ "info": "The session ID of the chat. If empty, the current session ID parameter will be used.",
+ "input_types": [
+ "Message"
+ ],
+ "list": false,
+ "load_from_db": false,
+ "name": "session_id",
+ "placeholder": "",
+ "required": false,
+ "show": true,
+ "title_case": false,
+ "trace_as_input": true,
+ "trace_as_metadata": true,
+ "type": "str",
+ "value": ""
+ },
+ "should_store_message": {
+ "_input_type": "BoolInput",
+ "advanced": true,
+ "display_name": "Store Messages",
+ "dynamic": false,
+ "info": "Store the message in the history.",
+ "list": false,
+ "name": "should_store_message",
+ "placeholder": "",
+ "required": false,
+ "show": true,
+ "title_case": false,
+ "trace_as_metadata": true,
+ "type": "bool",
+ "value": true
+ }
+ }
+ },
+ "type": "ChatOutput"
+ },
+ "dragging": false,
+ "height": 298,
+ "id": "ChatOutput-INBLf",
+ "position": {
+ "x": 3968.8870036313238,
+ "y": 627.770746142633
+ },
+ "positionAbsolute": {
+ "x": 3968.8870036313238,
+ "y": 627.770746142633
+ },
+ "selected": false,
+ "type": "genericNode",
+ "width": 384
+ },
+ {
+ "data": {
+ "description": "Generates text using OpenAI LLMs.",
+ "display_name": "OpenAI",
+ "id": "OpenAIModel-JusJ7",
+ "node": {
+ "base_classes": [
+ "LanguageModel",
+ "Message"
+ ],
+ "beta": false,
+ "conditional_paths": [],
+ "custom_fields": {},
+ "description": "Generates text using OpenAI LLMs.",
+ "display_name": "OpenAI",
+ "documentation": "",
+ "edited": false,
+ "field_order": [
+ "input_value",
+ "system_message",
+ "stream",
+ "max_tokens",
+ "model_kwargs",
+ "json_mode",
+ "output_schema",
+ "model_name",
+ "openai_api_base",
+ "api_key",
+ "temperature",
+ "seed"
+ ],
+ "frozen": false,
+ "icon": "OpenAI",
+ "lf_version": "1.0.16",
+ "output_types": [],
+ "outputs": [
+ {
+ "cache": true,
+ "display_name": "Text",
+ "method": "text_response",
+ "name": "text_output",
+ "selected": "Message",
+ "types": [
+ "Message"
+ ],
+ "value": "__UNDEFINED__"
+ },
+ {
+ "cache": true,
+ "display_name": "Language Model",
+ "method": "build_model",
+ "name": "model_output",
+ "selected": "LanguageModel",
+ "types": [
+ "LanguageModel"
+ ],
+ "value": "__UNDEFINED__"
+ }
+ ],
+ "pinned": false,
+ "template": {
+ "_type": "Component",
+ "api_key": {
+ "_input_type": "SecretStrInput",
+ "advanced": false,
+ "display_name": "OpenAI API Key",
+ "dynamic": false,
+ "info": "The OpenAI API Key to use for the OpenAI model.",
+ "input_types": [
+ "Message"
+ ],
+ "load_from_db": true,
+ "name": "api_key",
+ "password": true,
+ "placeholder": "",
+ "required": false,
+ "show": true,
+ "title_case": false,
+ "type": "str",
+ "value": ""
+ },
+ "code": {
+ "advanced": true,
+ "dynamic": true,
+ "fileTypes": [],
+ "file_path": "",
+ "info": "",
+ "list": false,
+ "load_from_db": false,
+ "multiline": true,
+ "name": "code",
+ "password": false,
+ "placeholder": "",
+ "required": true,
+ "show": true,
+ "title_case": false,
+ "type": "code",
+ "value": "import operator\nfrom functools import reduce\n\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import OPENAI_MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.inputs import (\n BoolInput,\n DictInput,\n DropdownInput,\n FloatInput,\n IntInput,\n SecretStrInput,\n StrInput,\n)\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n name = \"OpenAIModel\"\n\n inputs = LCModelComponent._base_inputs + [\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n range_spec=RangeSpec(min=0, max=128000),\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n BoolInput(\n name=\"json_mode\",\n display_name=\"JSON Mode\",\n advanced=True,\n info=\"If True, it will output JSON regardless of passing a schema.\",\n ),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n advanced=False,\n options=OPENAI_MODEL_NAMES,\n value=OPENAI_MODEL_NAMES[0],\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n # self.output_schema is a list of dictionaries\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict) or self.json_mode\n seed = self.seed\n\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature if temperature is not None else 0.1,\n seed=seed,\n )\n if json_mode:\n if output_schema_dict:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\") # type: ignore\n else:\n output = output.bind(response_format={\"type\": \"json_object\"}) # type: ignore\n\n return output # type: ignore\n\n def _get_exception_message(self, e: Exception):\n \"\"\"\n Get a message from an OpenAI exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n\n try:\n from openai import BadRequestError\n except ImportError:\n return\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\") # type: ignore\n if message:\n return message\n return\n"
+ },
+ "input_value": {
+ "_input_type": "MessageInput",
+ "advanced": false,
+ "display_name": "Input",
+ "dynamic": false,
+ "info": "",
+ "input_types": [
+ "Message"
+ ],
+ "list": false,
+ "load_from_db": false,
+ "name": "input_value",
+ "placeholder": "",
+ "required": false,
+ "show": true,
+ "title_case": false,
+ "trace_as_input": true,
+ "trace_as_metadata": true,
+ "type": "str",
+ "value": ""
+ },
+ "json_mode": {
+ "_input_type": "BoolInput",
+ "advanced": true,
+ "display_name": "JSON Mode",
+ "dynamic": false,
+ "info": "If True, it will output JSON regardless of passing a schema.",
+ "list": false,
+ "name": "json_mode",
+ "placeholder": "",
+ "required": false,
+ "show": true,
+ "title_case": false,
+ "trace_as_metadata": true,
+ "type": "bool",
+ "value": false
+ },
+ "max_tokens": {
+ "_input_type": "IntInput",
+ "advanced": true,
+ "display_name": "Max Tokens",
+ "dynamic": false,
+ "info": "The maximum number of tokens to generate. Set to 0 for unlimited tokens.",
+ "list": false,
+ "name": "max_tokens",
+ "placeholder": "",
+ "range_spec": {
+ "max": 128000,
+ "min": 0,
+ "step": 0.1,
+ "step_type": "float"
+ },
+ "required": false,
+ "show": true,
+ "title_case": false,
+ "trace_as_metadata": true,
+ "type": "int",
+ "value": ""
+ },
+ "model_kwargs": {
+ "_input_type": "DictInput",
+ "advanced": true,
+ "display_name": "Model Kwargs",
+ "dynamic": false,
+ "info": "",
+ "list": false,
+ "name": "model_kwargs",
+ "placeholder": "",
+ "required": false,
+ "show": true,
+ "title_case": false,
+ "trace_as_input": true,
+ "type": "dict",
+ "value": {}
+ },
+ "model_name": {
+ "_input_type": "DropdownInput",
+ "advanced": false,
+ "combobox": false,
+ "display_name": "Model Name",
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "name": "model_name",
+ "options": [
+ "gpt-4o-mini",
+ "gpt-4o",
+ "gpt-4-turbo",
+ "gpt-4-turbo-preview",
+ "gpt-4",
+ "gpt-3.5-turbo",
+ "gpt-3.5-turbo-0125"
+ ],
+ "placeholder": "",
+ "required": false,
+ "show": true,
+ "title_case": false,
+ "trace_as_metadata": true,
+ "type": "str",
+ "value": "gpt-4o-mini"
+ },
+ "openai_api_base": {
+ "_input_type": "StrInput",
+ "advanced": true,
+ "display_name": "OpenAI API Base",
+ "dynamic": false,
+ "info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.",
+ "list": false,
+ "load_from_db": false,
+ "name": "openai_api_base",
+ "placeholder": "",
+ "required": false,
+ "show": true,
+ "title_case": false,
+ "trace_as_metadata": true,
+ "type": "str",
+ "value": ""
+ },
+ "output_schema": {
+ "_input_type": "DictInput",
+ "advanced": true,
+ "display_name": "Schema",
+ "dynamic": false,
+ "info": "The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.",
+ "list": true,
+ "name": "output_schema",
+ "placeholder": "",
+ "required": false,
+ "show": true,
+ "title_case": false,
+ "trace_as_input": true,
+ "type": "dict",
+ "value": {}
+ },
+ "seed": {
+ "_input_type": "IntInput",
+ "advanced": true,
+ "display_name": "Seed",
+ "dynamic": false,
+ "info": "The seed controls the reproducibility of the job.",
+ "list": false,
+ "name": "seed",
+ "placeholder": "",
+ "required": false,
+ "show": true,
+ "title_case": false,
+ "trace_as_metadata": true,
+ "type": "int",
+ "value": 1
+ },
+ "stream": {
+ "_input_type": "BoolInput",
+ "advanced": true,
+ "display_name": "Stream",
+ "dynamic": false,
+ "info": "Stream the response from the model. Streaming works only in Chat.",
+ "list": false,
+ "name": "stream",
+ "placeholder": "",
+ "required": false,
+ "show": true,
+ "title_case": false,
+ "trace_as_metadata": true,
+ "type": "bool",
+ "value": false
+ },
+ "system_message": {
+ "_input_type": "MessageTextInput",
+ "advanced": true,
+ "display_name": "System Message",
+ "dynamic": false,
+ "info": "System message to pass to the model.",
+ "input_types": [
+ "Message"
+ ],
+ "list": false,
+ "load_from_db": false,
+ "name": "system_message",
+ "placeholder": "",
+ "required": false,
+ "show": true,
+ "title_case": false,
+ "trace_as_input": true,
+ "trace_as_metadata": true,
+ "type": "str",
+ "value": ""
+ },
+ "temperature": {
+ "_input_type": "FloatInput",
+ "advanced": false,
+ "display_name": "Temperature",
+ "dynamic": false,
+ "info": "",
+ "list": false,
+ "name": "temperature",
+ "placeholder": "",
+ "required": false,
+ "show": true,
+ "title_case": false,
+ "trace_as_metadata": true,
+ "type": "float",
+ "value": 0.1
+ }
+ }
+ },
+ "type": "OpenAIModel"
+ },
+ "dragging": false,
+ "height": 601,
+ "id": "OpenAIModel-JusJ7",
+ "position": {
+ "x": 2538.9919009235173,
+ "y": 1206.8619086167491
+ },
+ "positionAbsolute": {
+ "x": 2538.9919009235173,
+ "y": 1206.8619086167491
+ },
+ "selected": false,
+ "type": "genericNode",
+ "width": 384
+ },
+ {
+ "data": {
+ "id": "ToolCallingAgent-hz7Eb",
+ "node": {
+ "base_classes": [
+ "AgentExecutor",
+ "Message"
+ ],
+ "beta": true,
+ "conditional_paths": [],
+ "custom_fields": {},
+ "description": "Agent that uses tools to perform the given task.",
+ "display_name": "Agent",
+ "documentation": "",
+ "edited": true,
+ "field_order": [
+ "input_value",
+ "handle_parsing_errors",
+ "verbose",
+ "max_iterations",
+ "tools",
+ "llm",
+ "system_prompt",
+ "user_prompt",
+ "chat_history"
+ ],
+ "frozen": false,
+ "icon": "bot-message-square",
+ "lf_version": "1.0.16",
+ "output_types": [],
+ "outputs": [
+ {
+ "cache": true,
+ "display_name": "Agent",
+ "hidden": true,
+ "method": "build_agent",
+ "name": "agent",
+ "selected": "AgentExecutor",
+ "types": [
+ "AgentExecutor"
+ ],
+ "value": "__UNDEFINED__"
+ },
+ {
+ "cache": true,
+ "display_name": "Response",
+ "method": "message_response",
+ "name": "response",
+ "selected": "Message",
+ "types": [
+ "Message"
+ ],
+ "value": "__UNDEFINED__"
+ }
+ ],
+ "pinned": false,
+ "template": {
+ "_type": "Component",
+ "chat_history": {
+ "_input_type": "DataInput",
+ "advanced": true,
+ "display_name": "Chat History",
+ "dynamic": false,
+ "info": "",
+ "input_types": [
+ "Data"
+ ],
+ "list": true,
+ "name": "chat_history",
+ "placeholder": "",
+ "required": false,
+ "show": true,
+ "title_case": false,
+ "trace_as_input": true,
+ "trace_as_metadata": true,
+ "type": "other",
+ "value": ""
+ },
+ "code": {
+ "advanced": true,
+ "dynamic": true,
+ "fileTypes": [],
+ "file_path": "",
+ "info": "",
+ "list": false,
+ "load_from_db": false,
+ "multiline": true,
+ "name": "code",
+ "password": false,
+ "placeholder": "",
+ "required": true,
+ "show": true,
+ "title_case": false,
+ "type": "code",
+ "value": "from typing import Optional, List\n\nfrom langchain.agents import create_tool_calling_agent\nfrom langchain_core.prompts import ChatPromptTemplate, PromptTemplate, HumanMessagePromptTemplate\nfrom langflow.base.agents.agent import LCToolsAgentComponent\nfrom langflow.inputs import MultilineInput\nfrom langflow.inputs.inputs import HandleInput, DataInput\nfrom langflow.schema import Data\n\n\nclass ToolCallingAgentComponent(LCToolsAgentComponent):\n display_name: str = \"Tool Calling Agent\"\n description: str = \"Agent that uses tools\"\n icon = \"bot-message-square\"\n beta = True\n name = \"ToolCallingAgent\"\n\n inputs = LCToolsAgentComponent._base_inputs + [\n HandleInput(name=\"llm\", display_name=\"Language Model\", input_types=[\"LanguageModel\"], required=True),\n MultilineInput(\n name=\"system_prompt\",\n display_name=\"System Prompt\",\n info=\"System prompt for the agent.\",\n value=\"You are a helpful assistant\",\n ),\n MultilineInput(\n name=\"user_prompt\", display_name=\"Prompt\", info=\"This prompt must contain 'input' key.\", value=\"{input}\"\n ),\n DataInput(name=\"chat_history\", display_name=\"Chat History\", is_list=True, advanced=True),\n ]\n\n def get_chat_history_data(self) -> Optional[List[Data]]:\n return self.chat_history\n\n def create_agent_runnable(self):\n if \"input\" not in self.user_prompt:\n raise ValueError(\"Prompt must contain 'input' key.\")\n messages = [\n (\"system\", self.system_prompt),\n (\"placeholder\", \"{chat_history}\"),\n HumanMessagePromptTemplate(prompt=PromptTemplate(input_variables=[\"input\"], template=self.user_prompt)),\n (\"placeholder\", \"{agent_scratchpad}\"),\n ]\n prompt = ChatPromptTemplate.from_messages(messages)\n return create_tool_calling_agent(self.llm, self.tools, prompt)\n"
+ },
+ "handle_parsing_errors": {
+ "_input_type": "BoolInput",
+ "advanced": true,
+ "display_name": "Handle Parse Errors",
+ "dynamic": false,
+ "info": "",
+ "list": false,
+ "name": "handle_parsing_errors",
+ "placeholder": "",
+ "required": false,
+ "show": true,
+ "title_case": false,
+ "trace_as_metadata": true,
+ "type": "bool",
+ "value": true
+ },
+ "input_value": {
+ "_input_type": "MessageTextInput",
+ "advanced": false,
+ "display_name": "Input",
+ "dynamic": false,
+ "info": "",
+ "input_types": [
+ "Message"
+ ],
+ "list": false,
+ "load_from_db": false,
+ "name": "input_value",
+ "placeholder": "",
+ "required": false,
+ "show": true,
+ "title_case": false,
+ "trace_as_input": true,
+ "trace_as_metadata": true,
+ "type": "str",
+ "value": ""
+ },
+ "llm": {
+ "_input_type": "HandleInput",
+ "advanced": false,
+ "display_name": "Language Model",
+ "dynamic": false,
+ "info": "",
+ "input_types": [
+ "LanguageModel"
+ ],
+ "list": false,
+ "name": "llm",
+ "placeholder": "",
+ "required": true,
+ "show": true,
+ "title_case": false,
+ "trace_as_metadata": true,
+ "type": "other",
+ "value": ""
+ },
+ "max_iterations": {
+ "_input_type": "IntInput",
+ "advanced": true,
+ "display_name": "Max Iterations",
+ "dynamic": false,
+ "info": "",
+ "list": false,
+ "name": "max_iterations",
+ "placeholder": "",
+ "required": false,
+ "show": true,
+ "title_case": false,
+ "trace_as_metadata": true,
+ "type": "int",
+ "value": 15
+ },
+ "system_prompt": {
+ "_input_type": "MultilineInput",
+ "advanced": false,
+ "display_name": "System Prompt",
+ "dynamic": false,
+ "info": "System prompt for the agent.",
+ "input_types": [
+ "Message"
+ ],
+ "list": false,
+ "load_from_db": false,
+ "multiline": true,
+ "name": "system_prompt",
+ "placeholder": "",
+ "required": false,
+ "show": true,
+ "title_case": false,
+ "trace_as_input": true,
+ "trace_as_metadata": true,
+ "type": "str",
+ "value": "You are an Amazing Agent that can use the tools provided to you and answer the question "
+ },
+ "tools": {
+ "_input_type": "HandleInput",
+ "advanced": false,
+ "display_name": "Tools",
+ "dynamic": false,
+ "info": "",
+ "input_types": [
+ "Tool",
+ "BaseTool"
+ ],
+ "list": true,
+ "load_from_db": false,
+ "name": "tools",
+ "placeholder": "",
+ "required": false,
+ "show": true,
+ "title_case": false,
+ "trace_as_metadata": true,
+ "type": "other",
+ "value": ""
+ },
+ "user_prompt": {
+ "_input_type": "MultilineInput",
+ "advanced": false,
+ "display_name": "Prompt",
+ "dynamic": false,
+ "info": "This prompt must contain 'input' key.",
+ "input_types": [
+ "Message"
+ ],
+ "list": false,
+ "load_from_db": false,
+ "multiline": true,
+ "name": "user_prompt",
+ "placeholder": "",
+ "required": false,
+ "show": true,
+ "title_case": false,
+ "trace_as_input": true,
+ "trace_as_metadata": true,
+ "type": "str",
+ "value": "{input}"
+ },
+ "verbose": {
+ "_input_type": "BoolInput",
+ "advanced": true,
+ "display_name": "Verbose",
+ "dynamic": false,
+ "info": "",
+ "list": false,
+ "name": "verbose",
+ "placeholder": "",
+ "required": false,
+ "show": true,
+ "title_case": false,
+ "trace_as_metadata": true,
+ "type": "bool",
+ "value": true
+ }
+ }
+ },
+ "type": "ToolCallingAgent"
+ },
+ "dragging": false,
+ "height": 594,
+ "id": "ToolCallingAgent-hz7Eb",
+ "position": {
+ "x": 3276.3854573966964,
+ "y": 516.3304705434241
+ },
+ "positionAbsolute": {
+ "x": 3276.3854573966964,
+ "y": 516.3304705434241
+ },
+ "selected": false,
+ "type": "genericNode",
+ "width": 384
+ },
+ {
+ "data": {
+ "id": "CalculatorTool-CN8yi",
+ "node": {
+ "base_classes": [
+ "Data",
+ "list",
+ "Sequence",
+ "Tool"
+ ],
+ "beta": false,
+ "conditional_paths": [],
+ "custom_fields": {},
+ "description": "Perform basic arithmetic operations on a given expression.",
+ "display_name": "Calculator",
+ "documentation": "",
+ "edited": true,
+ "field_order": [
+ "expression"
+ ],
+ "frozen": false,
+ "icon": "calculator",
+ "lf_version": "1.0.16",
+ "official": false,
+ "output_types": [],
+ "outputs": [
+ {
+ "cache": true,
+ "display_name": "Data",
+ "method": "run_model",
+ "name": "api_run_model",
+ "selected": "Data",
+ "types": [
+ "Data",
+ "list"
+ ],
+ "value": "__UNDEFINED__"
+ },
+ {
+ "cache": true,
+ "display_name": "Tool",
+ "method": "build_tool",
+ "name": "api_build_tool",
+ "selected": "Tool",
+ "types": [
+ "Tool",
+ "Sequence"
+ ],
+ "value": "__UNDEFINED__"
+ }
+ ],
+ "pinned": false,
+ "template": {
+ "_type": "Component",
+ "code": {
+ "advanced": true,
+ "dynamic": true,
+ "fileTypes": [],
+ "file_path": "",
+ "info": "",
+ "list": false,
+ "load_from_db": false,
+ "multiline": true,
+ "name": "code",
+ "password": false,
+ "placeholder": "",
+ "required": true,
+ "show": true,
+ "title_case": false,
+ "type": "code",
+ "value": "import ast\r\nimport operator\r\nfrom typing import List\r\nfrom pydantic import BaseModel, Field\r\nfrom langflow.base.langchain_utilities.model import LCToolComponent\r\nfrom langflow.inputs import MessageTextInput\r\nfrom langflow.schema import Data\r\nfrom langflow.field_typing import Tool\r\nfrom langchain.tools import StructuredTool\r\n\r\nclass CalculatorToolComponent(LCToolComponent):\r\n display_name = \"Calculator\"\r\n description = \"Perform basic arithmetic operations on a given expression.\"\r\n icon = \"calculator\"\r\n name = \"CalculatorTool\"\r\n\r\n inputs = [\r\n MessageTextInput(\r\n name=\"expression\",\r\n display_name=\"Expression\",\r\n info=\"The arithmetic expression to evaluate (e.g., '4*4*(33/22)+12-20').\",\r\n ),\r\n ]\r\n\r\n class CalculatorToolSchema(BaseModel):\r\n expression: str = Field(..., description=\"The arithmetic expression to evaluate.\")\r\n\r\n def run_model(self) -> List[Data]:\r\n return self._evaluate_expression(self.expression)\r\n\r\n def build_tool(self) -> Tool:\r\n return StructuredTool.from_function(\r\n name=\"calculator\",\r\n description=\"Evaluate basic arithmetic expressions. Input should be a string containing the expression.\",\r\n func=self._evaluate_expression,\r\n args_schema=self.CalculatorToolSchema,\r\n )\r\n\r\n def _evaluate_expression(self, expression: str) -> List[Data]:\r\n try:\r\n # Define the allowed operators\r\n operators = {\r\n ast.Add: operator.add,\r\n ast.Sub: operator.sub,\r\n ast.Mult: operator.mul,\r\n ast.Div: operator.truediv,\r\n ast.Pow: operator.pow,\r\n }\r\n\r\n def eval_expr(node):\r\n if isinstance(node, ast.Num):\r\n return node.n\r\n elif isinstance(node, ast.BinOp):\r\n return operators[type(node.op)](eval_expr(node.left), eval_expr(node.right))\r\n elif isinstance(node, ast.UnaryOp):\r\n return operators[type(node.op)](eval_expr(node.operand))\r\n else:\r\n raise TypeError(node)\r\n\r\n # Parse the expression and evaluate it\r\n tree = ast.parse(expression, mode='eval')\r\n result = eval_expr(tree.body)\r\n\r\n # Format the result to a reasonable number of decimal places\r\n formatted_result = f\"{result:.6f}\".rstrip('0').rstrip('.')\r\n\r\n self.status = formatted_result\r\n return [Data(data={\"result\": formatted_result})]\r\n\r\n except (SyntaxError, TypeError, KeyError) as e:\r\n error_message = f\"Invalid expression: {str(e)}\"\r\n self.status = error_message\r\n return [Data(data={\"error\": error_message})]\r\n except ZeroDivisionError:\r\n error_message = \"Error: Division by zero\"\r\n self.status = error_message\r\n return [Data(data={\"error\": error_message})]\r\n except Exception as e:\r\n error_message = f\"Error: {str(e)}\"\r\n self.status = error_message\r\n return [Data(data={\"error\": error_message})]"
+ },
+ "expression": {
+ "_input_type": "MessageTextInput",
+ "advanced": false,
+ "display_name": "Expression",
+ "dynamic": false,
+ "info": "The arithmetic expression to evaluate (e.g., '4*4*(33/22)+12-20').",
+ "input_types": [
+ "Message"
+ ],
+ "list": false,
+ "load_from_db": false,
+ "name": "expression",
+ "placeholder": "",
+ "required": false,
+ "show": true,
+ "title_case": false,
+ "trace_as_input": true,
+ "trace_as_metadata": true,
+ "type": "str",
+ "value": "2+2"
+ }
+ }
+ },
+ "type": "CalculatorTool"
+ },
+ "dragging": false,
+ "height": 371,
+ "id": "CalculatorTool-CN8yi",
+ "position": {
+ "x": 2330.062076024461,
+ "y": 429.6717346334192
+ },
+ "positionAbsolute": {
+ "x": 2330.062076024461,
+ "y": 429.6717346334192
+ },
+ "selected": false,
+ "type": "genericNode",
+ "width": 384
+ },
+ {
+ "data": {
+ "id": "PythonREPLTool-Lq9f4",
+ "node": {
+ "base_classes": [
+ "BaseTool",
+ "Generic",
+ "object",
+ "Runnable",
+ "RunnableSerializable",
+ "Serializable",
+ "Tool"
+ ],
+ "beta": false,
+ "conditional_paths": [],
+ "custom_fields": {
+ "description": null,
+ "global_imports": null,
+ "name": null
+ },
+ "description": "A tool for running Python code in a REPL environment.",
+ "display_name": "Python REPL Tool",
+ "documentation": "",
+ "edited": false,
+ "field_order": [],
+ "frozen": false,
+ "lf_version": "1.0.16",
+ "output_types": [
+ "Tool"
+ ],
+ "outputs": [
+ {
+ "cache": true,
+ "display_name": "Tool",
+ "hidden": null,
+ "method": null,
+ "name": "tool",
+ "selected": "Tool",
+ "types": [
+ "Tool"
+ ],
+ "value": "__UNDEFINED__"
+ }
+ ],
+ "pinned": false,
+ "template": {
+ "_type": "CustomComponent",
+ "code": {
+ "advanced": true,
+ "dynamic": true,
+ "fileTypes": [],
+ "file_path": "",
+ "info": "",
+ "list": false,
+ "load_from_db": false,
+ "multiline": true,
+ "name": "code",
+ "password": false,
+ "placeholder": "",
+ "required": true,
+ "show": true,
+ "title_case": false,
+ "type": "code",
+ "value": "import importlib\nfrom langchain_experimental.utilities import PythonREPL\n\nfrom langflow.base.tools.base import build_status_from_tool\nfrom langflow.custom import CustomComponent\nfrom langchain_core.tools import Tool\n\n\nclass PythonREPLToolComponent(CustomComponent):\n display_name = \"Python REPL Tool\"\n description = \"A tool for running Python code in a REPL environment.\"\n name = \"PythonREPLTool\"\n\n def build_config(self):\n return {\n \"name\": {\"display_name\": \"Name\", \"info\": \"The name of the tool.\"},\n \"description\": {\"display_name\": \"Description\", \"info\": \"A description of the tool.\"},\n \"global_imports\": {\n \"display_name\": \"Global Imports\",\n \"info\": \"A list of modules to import globally, e.g. ['math', 'numpy'].\",\n },\n }\n\n def get_globals(self, globals: list[str]) -> dict:\n \"\"\"\n Retrieves the global variables from the specified modules.\n\n Args:\n globals (list[str]): A list of module names.\n\n Returns:\n dict: A dictionary containing the global variables from the specified modules.\n \"\"\"\n global_dict = {}\n for module in globals:\n try:\n imported_module = importlib.import_module(module)\n global_dict[imported_module.__name__] = imported_module\n except ImportError:\n raise ImportError(f\"Could not import module {module}\")\n return global_dict\n\n def build(\n self,\n name: str = \"python_repl\",\n description: str = \"A Python shell. Use this to execute python commands. Input should be a valid python command. If you want to see the output of a value, you should print it out with `print(...)`.\",\n global_imports: list[str] = [\"math\"],\n ) -> Tool:\n \"\"\"\n Builds a Python REPL tool.\n\n Args:\n name (str, optional): The name of the tool. Defaults to \"python_repl\".\n description (str, optional): The description of the tool. Defaults to \"A Python shell. Use this to execute python commands. Input should be a valid python command. If you want to see the output of a value, you should print it out with `print(...)`. \".\n global_imports (list[str], optional): A list of global imports to be available in the Python REPL. Defaults to [\"math\"].\n\n Returns:\n Tool: The built Python REPL tool.\n \"\"\"\n _globals = self.get_globals(global_imports)\n python_repl = PythonREPL(_globals=_globals)\n tool = Tool(\n name=name,\n description=description,\n func=python_repl.run,\n )\n self.status = build_status_from_tool(tool)\n return tool\n"
+ },
+ "description": {
+ "advanced": false,
+ "display_name": "Description",
+ "dynamic": false,
+ "fileTypes": [],
+ "file_path": "",
+ "info": "A description of the tool.",
+ "input_types": [
+ "Text"
+ ],
+ "list": false,
+ "load_from_db": false,
+ "multiline": false,
+ "name": "description",
+ "password": false,
+ "placeholder": "",
+ "required": false,
+ "show": true,
+ "title_case": false,
+ "type": "str",
+ "value": "A Python shell. Use this to execute python commands. Input should be a valid python command. If you want to see the output of a value, you should print it out with `print(...)`."
+ },
+ "global_imports": {
+ "advanced": false,
+ "display_name": "Global Imports",
+ "dynamic": false,
+ "fileTypes": [],
+ "file_path": "",
+ "info": "A list of modules to import globally, e.g. ['math', 'numpy'].",
+ "input_types": [
+ "Text"
+ ],
+ "list": true,
+ "load_from_db": false,
+ "multiline": false,
+ "name": "global_imports",
+ "password": false,
+ "placeholder": "",
+ "required": false,
+ "show": true,
+ "title_case": false,
+ "type": "str",
+ "value": [
+ "math"
+ ]
+ },
+ "name": {
+ "advanced": false,
+ "display_name": "Name",
+ "dynamic": false,
+ "fileTypes": [],
+ "file_path": "",
+ "info": "The name of the tool.",
+ "input_types": [
+ "Text"
+ ],
+ "list": false,
+ "load_from_db": false,
+ "multiline": false,
+ "name": "name",
+ "password": false,
+ "placeholder": "",
+ "required": false,
+ "show": true,
+ "title_case": false,
+ "type": "str",
+ "value": "python_repl"
+ }
+ }
+ },
+ "type": "PythonREPLTool"
+ },
+ "dragging": false,
+ "height": 498,
+ "id": "PythonREPLTool-Lq9f4",
+ "position": {
+ "x": 1913.7230985868182,
+ "y": 692.706669085293
+ },
+ "positionAbsolute": {
+ "x": 1913.7230985868182,
+ "y": 692.706669085293
+ },
+ "selected": false,
+ "type": "genericNode",
+ "width": 384
+ }
+ ],
+ "viewport": {
+ "x": -754.0306037880273,
+ "y": 162.78671921468404,
+ "zoom": 0.4165803465525388
+ }
+ },
+ "description": "Single Agent Flow to get you started. This flow contains a calculator and a Python REPL tool, that could be used by our tool calling agent.",
+ "endpoint_name": null,
+ "id": "b4267655-485a-44eb-9232-0bc120086418",
+ "is_component": false,
+ "last_tested_version": "1.0.17",
+ "name": "Simple Agent"
+}
\ No newline at end of file
diff --git a/src/backend/base/langflow/initial_setup/starter_projects/Complex Agent.json b/src/backend/base/langflow/initial_setup/starter_projects/Complex Agent.json
index f946f5b8ac48..db2db6158a2a 100644
--- a/src/backend/base/langflow/initial_setup/starter_projects/Complex Agent.json
+++ b/src/backend/base/langflow/initial_setup/starter_projects/Complex Agent.json
@@ -4179,5 +4179,5 @@
"id": "07cd68d7-d864-4cfe-9901-0ccc61d6e80d",
"is_component": false,
"last_tested_version": "1.0.9",
- "name": "Complex Agent"
+ "name": "Dynamic Agent"
}
\ No newline at end of file
diff --git a/src/backend/base/langflow/initial_setup/starter_projects/Travel Planning Agents.json b/src/backend/base/langflow/initial_setup/starter_projects/Travel Planning Agents.json
new file mode 100644
index 000000000000..e89f43ac10f8
--- /dev/null
+++ b/src/backend/base/langflow/initial_setup/starter_projects/Travel Planning Agents.json
@@ -0,0 +1,2338 @@
+{
+ "data": {
+ "edges": [
+ {
+ "className": "",
+ "data": {
+ "sourceHandle": {
+ "dataType": "OpenAIModel",
+ "id": "OpenAIModel-2o0gd",
+ "name": "model_output",
+ "output_types": [
+ "LanguageModel"
+ ]
+ },
+ "targetHandle": {
+ "fieldName": "llm",
+ "id": "ToolCallingAgent-vCq4v",
+ "inputTypes": [
+ "LanguageModel"
+ ],
+ "type": "other"
+ }
+ },
+ "id": "reactflow__edge-OpenAIModel-2o0gd{œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-2o0gdœ,œnameœ:œmodel_outputœ,œoutput_typesœ:[œLanguageModelœ]}-ToolCallingAgent-vCq4v{œfieldNameœ:œllmœ,œidœ:œToolCallingAgent-vCq4vœ,œinputTypesœ:[œLanguageModelœ],œtypeœ:œotherœ}",
+ "source": "OpenAIModel-2o0gd",
+ "sourceHandle": "{œdataTypeœ: œOpenAIModelœ, œidœ: œOpenAIModel-2o0gdœ, œnameœ: œmodel_outputœ, œoutput_typesœ: [œLanguageModelœ]}",
+ "target": "ToolCallingAgent-vCq4v",
+ "targetHandle": "{œfieldNameœ: œllmœ, œidœ: œToolCallingAgent-vCq4vœ, œinputTypesœ: [œLanguageModelœ], œtypeœ: œotherœ}"
+ },
+ {
+ "className": "",
+ "data": {
+ "sourceHandle": {
+ "dataType": "ChatInput",
+ "id": "ChatInput-z8SS4",
+ "name": "message",
+ "output_types": [
+ "Message"
+ ]
+ },
+ "targetHandle": {
+ "fieldName": "input_value",
+ "id": "ToolCallingAgent-vCq4v",
+ "inputTypes": [
+ "Message"
+ ],
+ "type": "str"
+ }
+ },
+ "id": "reactflow__edge-ChatInput-z8SS4{œdataTypeœ:œChatInputœ,œidœ:œChatInput-z8SS4œ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}-ToolCallingAgent-vCq4v{œfieldNameœ:œinput_valueœ,œidœ:œToolCallingAgent-vCq4vœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}",
+ "source": "ChatInput-z8SS4",
+ "sourceHandle": "{œdataTypeœ: œChatInputœ, œidœ: œChatInput-z8SS4œ, œnameœ: œmessageœ, œoutput_typesœ: [œMessageœ]}",
+ "target": "ToolCallingAgent-vCq4v",
+ "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œToolCallingAgent-vCq4vœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}"
+ },
+ {
+ "className": "",
+ "data": {
+ "sourceHandle": {
+ "dataType": "ToolCallingAgent",
+ "id": "ToolCallingAgent-rT7Y8",
+ "name": "response",
+ "output_types": [
+ "Message"
+ ]
+ },
+ "targetHandle": {
+ "fieldName": "input_value",
+ "id": "ToolCallingAgent-Zo0ES",
+ "inputTypes": [
+ "Message"
+ ],
+ "type": "str"
+ }
+ },
+ "id": "reactflow__edge-ToolCallingAgent-rT7Y8{œdataTypeœ:œToolCallingAgentœ,œidœ:œToolCallingAgent-rT7Y8œ,œnameœ:œresponseœ,œoutput_typesœ:[œMessageœ]}-ToolCallingAgent-Zo0ES{œfieldNameœ:œinput_valueœ,œidœ:œToolCallingAgent-Zo0ESœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}",
+ "source": "ToolCallingAgent-rT7Y8",
+ "sourceHandle": "{œdataTypeœ: œToolCallingAgentœ, œidœ: œToolCallingAgent-rT7Y8œ, œnameœ: œresponseœ, œoutput_typesœ: [œMessageœ]}",
+ "target": "ToolCallingAgent-Zo0ES",
+ "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œToolCallingAgent-Zo0ESœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}"
+ },
+ {
+ "className": "",
+ "data": {
+ "sourceHandle": {
+ "dataType": "ToolCallingAgent",
+ "id": "ToolCallingAgent-vCq4v",
+ "name": "response",
+ "output_types": [
+ "Message"
+ ]
+ },
+ "targetHandle": {
+ "fieldName": "input_value",
+ "id": "ToolCallingAgent-rT7Y8",
+ "inputTypes": [
+ "Message"
+ ],
+ "type": "str"
+ }
+ },
+ "id": "reactflow__edge-ToolCallingAgent-vCq4v{œdataTypeœ:œToolCallingAgentœ,œidœ:œToolCallingAgent-vCq4vœ,œnameœ:œresponseœ,œoutput_typesœ:[œMessageœ]}-ToolCallingAgent-rT7Y8{œfieldNameœ:œinput_valueœ,œidœ:œToolCallingAgent-rT7Y8œ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}",
+ "source": "ToolCallingAgent-vCq4v",
+ "sourceHandle": "{œdataTypeœ: œToolCallingAgentœ, œidœ: œToolCallingAgent-vCq4vœ, œnameœ: œresponseœ, œoutput_typesœ: [œMessageœ]}",
+ "target": "ToolCallingAgent-rT7Y8",
+ "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œToolCallingAgent-rT7Y8œ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}"
+ },
+ {
+ "className": "",
+ "data": {
+ "sourceHandle": {
+ "dataType": "SearchAPI",
+ "id": "SearchAPI-BaDvF",
+ "name": "api_build_tool",
+ "output_types": [
+ "Tool"
+ ]
+ },
+ "targetHandle": {
+ "fieldName": "tools",
+ "id": "ToolCallingAgent-vCq4v",
+ "inputTypes": [
+ "Tool",
+ "BaseTool"
+ ],
+ "type": "other"
+ }
+ },
+ "id": "reactflow__edge-SearchAPI-BaDvF{œdataTypeœ:œSearchAPIœ,œidœ:œSearchAPI-BaDvFœ,œnameœ:œapi_build_toolœ,œoutput_typesœ:[œToolœ]}-ToolCallingAgent-vCq4v{œfieldNameœ:œtoolsœ,œidœ:œToolCallingAgent-vCq4vœ,œinputTypesœ:[œToolœ,œBaseToolœ],œtypeœ:œotherœ}",
+ "source": "SearchAPI-BaDvF",
+ "sourceHandle": "{œdataTypeœ: œSearchAPIœ, œidœ: œSearchAPI-BaDvFœ, œnameœ: œapi_build_toolœ, œoutput_typesœ: [œToolœ]}",
+ "target": "ToolCallingAgent-vCq4v",
+ "targetHandle": "{œfieldNameœ: œtoolsœ, œidœ: œToolCallingAgent-vCq4vœ, œinputTypesœ: [œToolœ, œBaseToolœ], œtypeœ: œotherœ}"
+ },
+ {
+ "className": "",
+ "data": {
+ "sourceHandle": {
+ "dataType": "url_content_fetcher",
+ "id": "url_content_fetcher-5EFR2",
+ "name": "api_build_tool",
+ "output_types": [
+ "Tool"
+ ]
+ },
+ "targetHandle": {
+ "fieldName": "tools",
+ "id": "ToolCallingAgent-vCq4v",
+ "inputTypes": [
+ "Tool",
+ "BaseTool"
+ ],
+ "type": "other"
+ }
+ },
+ "id": "reactflow__edge-url_content_fetcher-5EFR2{œdataTypeœ:œurl_content_fetcherœ,œidœ:œurl_content_fetcher-5EFR2œ,œnameœ:œapi_build_toolœ,œoutput_typesœ:[œToolœ]}-ToolCallingAgent-vCq4v{œfieldNameœ:œtoolsœ,œidœ:œToolCallingAgent-vCq4vœ,œinputTypesœ:[œToolœ,œBaseToolœ],œtypeœ:œotherœ}",
+ "source": "url_content_fetcher-5EFR2",
+ "sourceHandle": "{œdataTypeœ: œurl_content_fetcherœ, œidœ: œurl_content_fetcher-5EFR2œ, œnameœ: œapi_build_toolœ, œoutput_typesœ: [œToolœ]}",
+ "target": "ToolCallingAgent-vCq4v",
+ "targetHandle": "{œfieldNameœ: œtoolsœ, œidœ: œToolCallingAgent-vCq4vœ, œinputTypesœ: [œToolœ, œBaseToolœ], œtypeœ: œotherœ}"
+ },
+ {
+ "className": "",
+ "data": {
+ "sourceHandle": {
+ "dataType": "SearchAPI",
+ "id": "SearchAPI-BaDvF",
+ "name": "api_build_tool",
+ "output_types": [
+ "Tool"
+ ]
+ },
+ "targetHandle": {
+ "fieldName": "tools",
+ "id": "ToolCallingAgent-rT7Y8",
+ "inputTypes": [
+ "Tool",
+ "BaseTool"
+ ],
+ "type": "other"
+ }
+ },
+ "id": "reactflow__edge-SearchAPI-BaDvF{œdataTypeœ:œSearchAPIœ,œidœ:œSearchAPI-BaDvFœ,œnameœ:œapi_build_toolœ,œoutput_typesœ:[œToolœ]}-ToolCallingAgent-rT7Y8{œfieldNameœ:œtoolsœ,œidœ:œToolCallingAgent-rT7Y8œ,œinputTypesœ:[œToolœ,œBaseToolœ],œtypeœ:œotherœ}",
+ "source": "SearchAPI-BaDvF",
+ "sourceHandle": "{œdataTypeœ: œSearchAPIœ, œidœ: œSearchAPI-BaDvFœ, œnameœ: œapi_build_toolœ, œoutput_typesœ: [œToolœ]}",
+ "target": "ToolCallingAgent-rT7Y8",
+ "targetHandle": "{œfieldNameœ: œtoolsœ, œidœ: œToolCallingAgent-rT7Y8œ, œinputTypesœ: [œToolœ, œBaseToolœ], œtypeœ: œotherœ}"
+ },
+ {
+ "className": "",
+ "data": {
+ "sourceHandle": {
+ "dataType": "url_content_fetcher",
+ "id": "url_content_fetcher-5EFR2",
+ "name": "api_build_tool",
+ "output_types": [
+ "Tool"
+ ]
+ },
+ "targetHandle": {
+ "fieldName": "tools",
+ "id": "ToolCallingAgent-rT7Y8",
+ "inputTypes": [
+ "Tool",
+ "BaseTool"
+ ],
+ "type": "other"
+ }
+ },
+ "id": "reactflow__edge-url_content_fetcher-5EFR2{œdataTypeœ:œurl_content_fetcherœ,œidœ:œurl_content_fetcher-5EFR2œ,œnameœ:œapi_build_toolœ,œoutput_typesœ:[œToolœ]}-ToolCallingAgent-rT7Y8{œfieldNameœ:œtoolsœ,œidœ:œToolCallingAgent-rT7Y8œ,œinputTypesœ:[œToolœ,œBaseToolœ],œtypeœ:œotherœ}",
+ "source": "url_content_fetcher-5EFR2",
+ "sourceHandle": "{œdataTypeœ: œurl_content_fetcherœ, œidœ: œurl_content_fetcher-5EFR2œ, œnameœ: œapi_build_toolœ, œoutput_typesœ: [œToolœ]}",
+ "target": "ToolCallingAgent-rT7Y8",
+ "targetHandle": "{œfieldNameœ: œtoolsœ, œidœ: œToolCallingAgent-rT7Y8œ, œinputTypesœ: [œToolœ, œBaseToolœ], œtypeœ: œotherœ}"
+ },
+ {
+ "className": "",
+ "data": {
+ "sourceHandle": {
+ "dataType": "url_content_fetcher",
+ "id": "url_content_fetcher-5EFR2",
+ "name": "api_build_tool",
+ "output_types": [
+ "Tool"
+ ]
+ },
+ "targetHandle": {
+ "fieldName": "tools",
+ "id": "ToolCallingAgent-Zo0ES",
+ "inputTypes": [
+ "Tool",
+ "BaseTool"
+ ],
+ "type": "other"
+ }
+ },
+ "id": "reactflow__edge-url_content_fetcher-5EFR2{œdataTypeœ:œurl_content_fetcherœ,œidœ:œurl_content_fetcher-5EFR2œ,œnameœ:œapi_build_toolœ,œoutput_typesœ:[œToolœ]}-ToolCallingAgent-Zo0ES{œfieldNameœ:œtoolsœ,œidœ:œToolCallingAgent-Zo0ESœ,œinputTypesœ:[œToolœ,œBaseToolœ],œtypeœ:œotherœ}",
+ "source": "url_content_fetcher-5EFR2",
+ "sourceHandle": "{œdataTypeœ: œurl_content_fetcherœ, œidœ: œurl_content_fetcher-5EFR2œ, œnameœ: œapi_build_toolœ, œoutput_typesœ: [œToolœ]}",
+ "target": "ToolCallingAgent-Zo0ES",
+ "targetHandle": "{œfieldNameœ: œtoolsœ, œidœ: œToolCallingAgent-Zo0ESœ, œinputTypesœ: [œToolœ, œBaseToolœ], œtypeœ: œotherœ}"
+ },
+ {
+ "className": "",
+ "data": {
+ "sourceHandle": {
+ "dataType": "SearchAPI",
+ "id": "SearchAPI-BaDvF",
+ "name": "api_build_tool",
+ "output_types": [
+ "Tool"
+ ]
+ },
+ "targetHandle": {
+ "fieldName": "tools",
+ "id": "ToolCallingAgent-Zo0ES",
+ "inputTypes": [
+ "Tool",
+ "BaseTool"
+ ],
+ "type": "other"
+ }
+ },
+ "id": "reactflow__edge-SearchAPI-BaDvF{œdataTypeœ:œSearchAPIœ,œidœ:œSearchAPI-BaDvFœ,œnameœ:œapi_build_toolœ,œoutput_typesœ:[œToolœ]}-ToolCallingAgent-Zo0ES{œfieldNameœ:œtoolsœ,œidœ:œToolCallingAgent-Zo0ESœ,œinputTypesœ:[œToolœ,œBaseToolœ],œtypeœ:œotherœ}",
+ "source": "SearchAPI-BaDvF",
+ "sourceHandle": "{œdataTypeœ: œSearchAPIœ, œidœ: œSearchAPI-BaDvFœ, œnameœ: œapi_build_toolœ, œoutput_typesœ: [œToolœ]}",
+ "target": "ToolCallingAgent-Zo0ES",
+ "targetHandle": "{œfieldNameœ: œtoolsœ, œidœ: œToolCallingAgent-Zo0ESœ, œinputTypesœ: [œToolœ, œBaseToolœ], œtypeœ: œotherœ}"
+ },
+ {
+ "className": "",
+ "data": {
+ "sourceHandle": {
+ "dataType": "ToolCallingAgent",
+ "id": "ToolCallingAgent-Zo0ES",
+ "name": "response",
+ "output_types": [
+ "Message"
+ ]
+ },
+ "targetHandle": {
+ "fieldName": "input_value",
+ "id": "ChatOutput-zxRey",
+ "inputTypes": [
+ "Message"
+ ],
+ "type": "str"
+ }
+ },
+ "id": "reactflow__edge-ToolCallingAgent-Zo0ES{œdataTypeœ:œToolCallingAgentœ,œidœ:œToolCallingAgent-Zo0ESœ,œnameœ:œresponseœ,œoutput_typesœ:[œMessageœ]}-ChatOutput-zxRey{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-zxReyœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}",
+ "source": "ToolCallingAgent-Zo0ES",
+ "sourceHandle": "{œdataTypeœ: œToolCallingAgentœ, œidœ: œToolCallingAgent-Zo0ESœ, œnameœ: œresponseœ, œoutput_typesœ: [œMessageœ]}",
+ "target": "ChatOutput-zxRey",
+ "targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œChatOutput-zxReyœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}"
+ },
+ {
+ "className": "",
+ "data": {
+ "sourceHandle": {
+ "dataType": "OpenAIModel",
+ "id": "OpenAIModel-2o0gd",
+ "name": "model_output",
+ "output_types": [
+ "LanguageModel"
+ ]
+ },
+ "targetHandle": {
+ "fieldName": "llm",
+ "id": "ToolCallingAgent-Zo0ES",
+ "inputTypes": [
+ "LanguageModel"
+ ],
+ "type": "other"
+ }
+ },
+ "id": "reactflow__edge-OpenAIModel-2o0gd{œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-2o0gdœ,œnameœ:œmodel_outputœ,œoutput_typesœ:[œLanguageModelœ]}-ToolCallingAgent-Zo0ES{œfieldNameœ:œllmœ,œidœ:œToolCallingAgent-Zo0ESœ,œinputTypesœ:[œLanguageModelœ],œtypeœ:œotherœ}",
+ "source": "OpenAIModel-2o0gd",
+ "sourceHandle": "{œdataTypeœ: œOpenAIModelœ, œidœ: œOpenAIModel-2o0gdœ, œnameœ: œmodel_outputœ, œoutput_typesœ: [œLanguageModelœ]}",
+ "target": "ToolCallingAgent-Zo0ES",
+ "targetHandle": "{œfieldNameœ: œllmœ, œidœ: œToolCallingAgent-Zo0ESœ, œinputTypesœ: [œLanguageModelœ], œtypeœ: œotherœ}"
+ },
+ {
+ "className": "",
+ "data": {
+ "sourceHandle": {
+ "dataType": "OpenAIModel",
+ "id": "OpenAIModel-2o0gd",
+ "name": "model_output",
+ "output_types": [
+ "LanguageModel"
+ ]
+ },
+ "targetHandle": {
+ "fieldName": "llm",
+ "id": "ToolCallingAgent-rT7Y8",
+ "inputTypes": [
+ "LanguageModel"
+ ],
+ "type": "other"
+ }
+ },
+ "id": "reactflow__edge-OpenAIModel-2o0gd{œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-2o0gdœ,œnameœ:œmodel_outputœ,œoutput_typesœ:[œLanguageModelœ]}-ToolCallingAgent-rT7Y8{œfieldNameœ:œllmœ,œidœ:œToolCallingAgent-rT7Y8œ,œinputTypesœ:[œLanguageModelœ],œtypeœ:œotherœ}",
+ "source": "OpenAIModel-2o0gd",
+ "sourceHandle": "{œdataTypeœ: œOpenAIModelœ, œidœ: œOpenAIModel-2o0gdœ, œnameœ: œmodel_outputœ, œoutput_typesœ: [œLanguageModelœ]}",
+ "target": "ToolCallingAgent-rT7Y8",
+ "targetHandle": "{œfieldNameœ: œllmœ, œidœ: œToolCallingAgent-rT7Y8œ, œinputTypesœ: [œLanguageModelœ], œtypeœ: œotherœ}"
+ },
+ {
+ "className": "",
+ "data": {
+ "sourceHandle": {
+ "dataType": "CalculatorTool",
+ "id": "CalculatorTool-CYR2I",
+ "name": "api_build_tool",
+ "output_types": [
+ "Tool"
+ ]
+ },
+ "targetHandle": {
+ "fieldName": "tools",
+ "id": "ToolCallingAgent-Zo0ES",
+ "inputTypes": [
+ "Tool",
+ "BaseTool"
+ ],
+ "type": "other"
+ }
+ },
+ "id": "reactflow__edge-CalculatorTool-CYR2I{œdataTypeœ:œCalculatorToolœ,œidœ:œCalculatorTool-CYR2Iœ,œnameœ:œapi_build_toolœ,œoutput_typesœ:[œToolœ]}-ToolCallingAgent-Zo0ES{œfieldNameœ:œtoolsœ,œidœ:œToolCallingAgent-Zo0ESœ,œinputTypesœ:[œToolœ,œBaseToolœ],œtypeœ:œotherœ}",
+ "source": "CalculatorTool-CYR2I",
+ "sourceHandle": "{œdataTypeœ: œCalculatorToolœ, œidœ: œCalculatorTool-CYR2Iœ, œnameœ: œapi_build_toolœ, œoutput_typesœ: [œToolœ]}",
+ "target": "ToolCallingAgent-Zo0ES",
+ "targetHandle": "{œfieldNameœ: œtoolsœ, œidœ: œToolCallingAgent-Zo0ESœ, œinputTypesœ: [œToolœ, œBaseToolœ], œtypeœ: œotherœ}"
+ }
+ ],
+ "nodes": [
+ {
+ "data": {
+ "id": "ChatInput-z8SS4",
+ "node": {
+ "base_classes": [
+ "Message"
+ ],
+ "beta": false,
+ "conditional_paths": [],
+ "custom_fields": {},
+ "description": "Get chat inputs from the Playground.",
+ "display_name": "Chat Input",
+ "documentation": "",
+ "edited": false,
+ "field_order": [
+ "input_value",
+ "should_store_message",
+ "sender",
+ "sender_name",
+ "session_id",
+ "files"
+ ],
+ "frozen": false,
+ "icon": "ChatInput",
+ "lf_version": "1.0.15",
+ "output_types": [],
+ "outputs": [
+ {
+ "cache": true,
+ "display_name": "Message",
+ "method": "message_response",
+ "name": "message",
+ "selected": "Message",
+ "types": [
+ "Message"
+ ],
+ "value": "__UNDEFINED__"
+ }
+ ],
+ "pinned": false,
+ "template": {
+ "_type": "Component",
+ "code": {
+ "advanced": true,
+ "dynamic": true,
+ "fileTypes": [],
+ "file_path": "",
+ "info": "",
+ "list": false,
+ "load_from_db": false,
+ "multiline": true,
+ "name": "code",
+ "password": false,
+ "placeholder": "",
+ "required": true,
+ "show": true,
+ "title_case": false,
+ "type": "code",
+ "value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, FileInput, MessageTextInput, MultilineInput, Output\nfrom langflow.memory import store_message\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_USER, MESSAGE_SENDER_NAME_USER\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"ChatInput\"\n name = \"ChatInput\"\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n )\n\n if (\n self.session_id\n and isinstance(message, Message)\n and isinstance(message.text, str)\n and self.should_store_message\n ):\n store_message(\n message,\n flow_id=self.graph.flow_id,\n )\n self.message.value = message\n\n self.status = message\n return message\n"
+ },
+ "files": {
+ "_input_type": "FileInput",
+ "advanced": true,
+ "display_name": "Files",
+ "dynamic": false,
+ "fileTypes": [
+ "txt",
+ "md",
+ "mdx",
+ "csv",
+ "json",
+ "yaml",
+ "yml",
+ "xml",
+ "html",
+ "htm",
+ "pdf",
+ "docx",
+ "py",
+ "sh",
+ "sql",
+ "js",
+ "ts",
+ "tsx",
+ "jpg",
+ "jpeg",
+ "png",
+ "bmp",
+ "image"
+ ],
+ "file_path": "",
+ "info": "Files to be sent with the message.",
+ "list": true,
+ "name": "files",
+ "placeholder": "",
+ "required": false,
+ "show": true,
+ "title_case": false,
+ "trace_as_metadata": true,
+ "type": "file",
+ "value": ""
+ },
+ "input_value": {
+ "_input_type": "MultilineInput",
+ "advanced": false,
+ "display_name": "Text",
+ "dynamic": false,
+ "info": "Message to be passed as input.",
+ "input_types": [
+ "Message"
+ ],
+ "list": false,
+ "load_from_db": false,
+ "multiline": true,
+ "name": "input_value",
+ "placeholder": "",
+ "required": false,
+ "show": true,
+ "title_case": false,
+ "trace_as_input": true,
+ "trace_as_metadata": true,
+ "type": "str",
+ "value": "Create a travel itinerary for a trip from São Paulo to Uberlândia, MG on August 23, 2024. The traveler enjoys drinking beer, eating pão de queijo, and drinking special coffee."
+ },
+ "sender": {
+ "_input_type": "DropdownInput",
+ "advanced": true,
+ "combobox": false,
+ "display_name": "Sender Type",
+ "dynamic": false,
+ "info": "Type of sender.",
+ "name": "sender",
+ "options": [
+ "Machine",
+ "User"
+ ],
+ "placeholder": "",
+ "required": false,
+ "show": true,
+ "title_case": false,
+ "trace_as_metadata": true,
+ "type": "str",
+ "value": "User"
+ },
+ "sender_name": {
+ "_input_type": "MessageTextInput",
+ "advanced": true,
+ "display_name": "Sender Name",
+ "dynamic": false,
+ "info": "Name of the sender.",
+ "input_types": [
+ "Message"
+ ],
+ "list": false,
+ "load_from_db": false,
+ "name": "sender_name",
+ "placeholder": "",
+ "required": false,
+ "show": true,
+ "title_case": false,
+ "trace_as_input": true,
+ "trace_as_metadata": true,
+ "type": "str",
+ "value": "User"
+ },
+ "session_id": {
+ "_input_type": "MessageTextInput",
+ "advanced": true,
+ "display_name": "Session ID",
+ "dynamic": false,
+ "info": "The session ID of the chat. If empty, the current session ID parameter will be used.",
+ "input_types": [
+ "Message"
+ ],
+ "list": false,
+ "load_from_db": false,
+ "name": "session_id",
+ "placeholder": "",
+ "required": false,
+ "show": true,
+ "title_case": false,
+ "trace_as_input": true,
+ "trace_as_metadata": true,
+ "type": "str",
+ "value": ""
+ },
+ "should_store_message": {
+ "_input_type": "BoolInput",
+ "advanced": true,
+ "display_name": "Store Messages",
+ "dynamic": false,
+ "info": "Store the message in the history.",
+ "list": false,
+ "name": "should_store_message",
+ "placeholder": "",
+ "required": false,
+ "show": true,
+ "title_case": false,
+ "trace_as_metadata": true,
+ "type": "bool",
+ "value": true
+ }
+ }
+ },
+ "type": "ChatInput"
+ },
+ "dragging": false,
+ "height": 298,
+ "id": "ChatInput-z8SS4",
+ "position": {
+ "x": 1702.183330569805,
+ "y": 313.3797217631409
+ },
+ "positionAbsolute": {
+ "x": 1702.183330569805,
+ "y": 313.3797217631409
+ },
+ "selected": false,
+ "type": "genericNode",
+ "width": 384
+ },
+ {
+ "data": {
+ "id": "ChatOutput-zxRey",
+ "node": {
+ "base_classes": [
+ "Message"
+ ],
+ "beta": false,
+ "conditional_paths": [],
+ "custom_fields": {},
+ "description": "Display a chat message in the Playground.",
+ "display_name": "Chat Output",
+ "documentation": "",
+ "edited": false,
+ "field_order": [
+ "input_value",
+ "should_store_message",
+ "sender",
+ "sender_name",
+ "session_id",
+ "data_template"
+ ],
+ "frozen": false,
+ "icon": "ChatOutput",
+ "lf_version": "1.0.15",
+ "output_types": [],
+ "outputs": [
+ {
+ "cache": true,
+ "display_name": "Message",
+ "method": "message_response",
+ "name": "message",
+ "selected": "Message",
+ "types": [
+ "Message"
+ ],
+ "value": "__UNDEFINED__"
+ }
+ ],
+ "pinned": false,
+ "template": {
+ "_type": "Component",
+ "code": {
+ "advanced": true,
+ "dynamic": true,
+ "fileTypes": [],
+ "file_path": "",
+ "info": "",
+ "list": false,
+ "load_from_db": false,
+ "multiline": true,
+ "name": "code",
+ "password": false,
+ "placeholder": "",
+ "required": true,
+ "show": true,
+ "title_case": false,
+ "type": "code",
+ "value": "from langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, MessageTextInput, Output\nfrom langflow.memory import store_message\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_NAME_AI, MESSAGE_SENDER_USER, MESSAGE_SENDER_AI\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n name = \"ChatOutput\"\n\n inputs = [\n MessageTextInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Message to be passed as output.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_AI,\n advanced=True,\n info=\"Type of sender.\",\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_AI,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"data_template\",\n display_name=\"Data Template\",\n value=\"{text}\",\n advanced=True,\n info=\"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.\",\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n )\n if (\n self.session_id\n and isinstance(message, Message)\n and isinstance(message.text, str)\n and self.should_store_message\n ):\n store_message(\n message,\n flow_id=self.graph.flow_id,\n )\n self.message.value = message\n\n self.status = message\n return message\n"
+ },
+ "data_template": {
+ "_input_type": "MessageTextInput",
+ "advanced": true,
+ "display_name": "Data Template",
+ "dynamic": false,
+ "info": "Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.",
+ "input_types": [
+ "Message"
+ ],
+ "list": false,
+ "load_from_db": false,
+ "name": "data_template",
+ "placeholder": "",
+ "required": false,
+ "show": true,
+ "title_case": false,
+ "trace_as_input": true,
+ "trace_as_metadata": true,
+ "type": "str",
+ "value": "{text}"
+ },
+ "input_value": {
+ "_input_type": "MessageTextInput",
+ "advanced": false,
+ "display_name": "Text",
+ "dynamic": false,
+ "info": "Message to be passed as output.",
+ "input_types": [
+ "Message"
+ ],
+ "list": false,
+ "load_from_db": false,
+ "name": "input_value",
+ "placeholder": "",
+ "required": false,
+ "show": true,
+ "title_case": false,
+ "trace_as_input": true,
+ "trace_as_metadata": true,
+ "type": "str",
+ "value": ""
+ },
+ "sender": {
+ "_input_type": "DropdownInput",
+ "advanced": true,
+ "combobox": false,
+ "display_name": "Sender Type",
+ "dynamic": false,
+ "info": "Type of sender.",
+ "name": "sender",
+ "options": [
+ "Machine",
+ "User"
+ ],
+ "placeholder": "",
+ "required": false,
+ "show": true,
+ "title_case": false,
+ "trace_as_metadata": true,
+ "type": "str",
+ "value": "Machine"
+ },
+ "sender_name": {
+ "_input_type": "MessageTextInput",
+ "advanced": true,
+ "display_name": "Sender Name",
+ "dynamic": false,
+ "info": "Name of the sender.",
+ "input_types": [
+ "Message"
+ ],
+ "list": false,
+ "load_from_db": false,
+ "name": "sender_name",
+ "placeholder": "",
+ "required": false,
+ "show": true,
+ "title_case": false,
+ "trace_as_input": true,
+ "trace_as_metadata": true,
+ "type": "str",
+ "value": "AI"
+ },
+ "session_id": {
+ "_input_type": "MessageTextInput",
+ "advanced": true,
+ "display_name": "Session ID",
+ "dynamic": false,
+ "info": "The session ID of the chat. If empty, the current session ID parameter will be used.",
+ "input_types": [
+ "Message"
+ ],
+ "list": false,
+ "load_from_db": false,
+ "name": "session_id",
+ "placeholder": "",
+ "required": false,
+ "show": true,
+ "title_case": false,
+ "trace_as_input": true,
+ "trace_as_metadata": true,
+ "type": "str",
+ "value": ""
+ },
+ "should_store_message": {
+ "_input_type": "BoolInput",
+ "advanced": true,
+ "display_name": "Store Messages",
+ "dynamic": false,
+ "info": "Store the message in the history.",
+ "list": false,
+ "name": "should_store_message",
+ "placeholder": "",
+ "required": false,
+ "show": true,
+ "title_case": false,
+ "trace_as_metadata": true,
+ "type": "bool",
+ "value": true
+ }
+ }
+ },
+ "type": "ChatOutput"
+ },
+ "dragging": false,
+ "height": 298,
+ "id": "ChatOutput-zxRey",
+ "position": {
+ "x": 3968.8870036313238,
+ "y": 627.770746142633
+ },
+ "positionAbsolute": {
+ "x": 3968.8870036313238,
+ "y": 627.770746142633
+ },
+ "selected": false,
+ "type": "genericNode",
+ "width": 384
+ },
+ {
+ "data": {
+ "description": "Generates text using OpenAI LLMs.",
+ "display_name": "OpenAI",
+ "id": "OpenAIModel-2o0gd",
+ "node": {
+ "base_classes": [
+ "LanguageModel",
+ "Message"
+ ],
+ "beta": false,
+ "conditional_paths": [],
+ "custom_fields": {},
+ "description": "Generates text using OpenAI LLMs.",
+ "display_name": "OpenAI",
+ "documentation": "",
+ "edited": false,
+ "field_order": [
+ "input_value",
+ "system_message",
+ "stream",
+ "max_tokens",
+ "model_kwargs",
+ "json_mode",
+ "output_schema",
+ "model_name",
+ "openai_api_base",
+ "api_key",
+ "temperature",
+ "seed"
+ ],
+ "frozen": false,
+ "icon": "OpenAI",
+ "output_types": [],
+ "outputs": [
+ {
+ "cache": true,
+ "display_name": "Text",
+ "method": "text_response",
+ "name": "text_output",
+ "selected": "Message",
+ "types": [
+ "Message"
+ ],
+ "value": "__UNDEFINED__"
+ },
+ {
+ "cache": true,
+ "display_name": "Language Model",
+ "method": "build_model",
+ "name": "model_output",
+ "selected": "LanguageModel",
+ "types": [
+ "LanguageModel"
+ ],
+ "value": "__UNDEFINED__"
+ }
+ ],
+ "pinned": false,
+ "template": {
+ "_type": "Component",
+ "api_key": {
+ "_input_type": "SecretStrInput",
+ "advanced": false,
+ "display_name": "OpenAI API Key",
+ "dynamic": false,
+ "info": "The OpenAI API Key to use for the OpenAI model.",
+ "input_types": [
+ "Message"
+ ],
+ "load_from_db": true,
+ "name": "api_key",
+ "password": true,
+ "placeholder": "",
+ "required": false,
+ "show": true,
+ "title_case": false,
+ "type": "str",
+ "value": ""
+ },
+ "code": {
+ "advanced": true,
+ "dynamic": true,
+ "fileTypes": [],
+ "file_path": "",
+ "info": "",
+ "list": false,
+ "load_from_db": false,
+ "multiline": true,
+ "name": "code",
+ "password": false,
+ "placeholder": "",
+ "required": true,
+ "show": true,
+ "title_case": false,
+ "type": "code",
+ "value": "import operator\nfrom functools import reduce\n\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import OPENAI_MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.inputs import (\n BoolInput,\n DictInput,\n DropdownInput,\n FloatInput,\n IntInput,\n SecretStrInput,\n StrInput,\n)\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n name = \"OpenAIModel\"\n\n inputs = LCModelComponent._base_inputs + [\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n range_spec=RangeSpec(min=0, max=128000),\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n BoolInput(\n name=\"json_mode\",\n display_name=\"JSON Mode\",\n advanced=True,\n info=\"If True, it will output JSON regardless of passing a schema.\",\n ),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n advanced=False,\n options=OPENAI_MODEL_NAMES,\n value=OPENAI_MODEL_NAMES[0],\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n # self.output_schema is a list of dictionaries\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict) or self.json_mode\n seed = self.seed\n\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature if temperature is not None else 0.1,\n seed=seed,\n )\n if json_mode:\n if output_schema_dict:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\") # type: ignore\n else:\n output = output.bind(response_format={\"type\": \"json_object\"}) # type: ignore\n\n return output # type: ignore\n\n def _get_exception_message(self, e: Exception):\n \"\"\"\n Get a message from an OpenAI exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n\n try:\n from openai import BadRequestError\n except ImportError:\n return\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\") # type: ignore\n if message:\n return message\n return\n"
+ },
+ "input_value": {
+ "_input_type": "MessageInput",
+ "advanced": false,
+ "display_name": "Input",
+ "dynamic": false,
+ "info": "",
+ "input_types": [
+ "Message"
+ ],
+ "list": false,
+ "load_from_db": false,
+ "name": "input_value",
+ "placeholder": "",
+ "required": false,
+ "show": true,
+ "title_case": false,
+ "trace_as_input": true,
+ "trace_as_metadata": true,
+ "type": "str",
+ "value": ""
+ },
+ "json_mode": {
+ "_input_type": "BoolInput",
+ "advanced": true,
+ "display_name": "JSON Mode",
+ "dynamic": false,
+ "info": "If True, it will output JSON regardless of passing a schema.",
+ "list": false,
+ "name": "json_mode",
+ "placeholder": "",
+ "required": false,
+ "show": true,
+ "title_case": false,
+ "trace_as_metadata": true,
+ "type": "bool",
+ "value": false
+ },
+ "max_tokens": {
+ "_input_type": "IntInput",
+ "advanced": true,
+ "display_name": "Max Tokens",
+ "dynamic": false,
+ "info": "The maximum number of tokens to generate. Set to 0 for unlimited tokens.",
+ "list": false,
+ "name": "max_tokens",
+ "placeholder": "",
+ "range_spec": {
+ "max": 128000,
+ "min": 0,
+ "step": 0.1,
+ "step_type": "float"
+ },
+ "required": false,
+ "show": true,
+ "title_case": false,
+ "trace_as_metadata": true,
+ "type": "int",
+ "value": ""
+ },
+ "model_kwargs": {
+ "_input_type": "DictInput",
+ "advanced": true,
+ "display_name": "Model Kwargs",
+ "dynamic": false,
+ "info": "",
+ "list": false,
+ "name": "model_kwargs",
+ "placeholder": "",
+ "required": false,
+ "show": true,
+ "title_case": false,
+ "trace_as_input": true,
+ "type": "dict",
+ "value": {}
+ },
+ "model_name": {
+ "_input_type": "DropdownInput",
+ "advanced": false,
+ "combobox": false,
+ "display_name": "Model Name",
+ "dynamic": false,
+ "info": "",
+ "load_from_db": false,
+ "name": "model_name",
+ "options": [
+ "gpt-4o-mini",
+ "gpt-4o",
+ "gpt-4-turbo",
+ "gpt-4-turbo-preview",
+ "gpt-4",
+ "gpt-3.5-turbo",
+ "gpt-3.5-turbo-0125"
+ ],
+ "placeholder": "",
+ "required": false,
+ "show": true,
+ "title_case": false,
+ "trace_as_metadata": true,
+ "type": "str",
+ "value": "gpt-4o"
+ },
+ "openai_api_base": {
+ "_input_type": "StrInput",
+ "advanced": true,
+ "display_name": "OpenAI API Base",
+ "dynamic": false,
+ "info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.",
+ "list": false,
+ "load_from_db": false,
+ "name": "openai_api_base",
+ "placeholder": "",
+ "required": false,
+ "show": true,
+ "title_case": false,
+ "trace_as_metadata": true,
+ "type": "str",
+ "value": ""
+ },
+ "output_schema": {
+ "_input_type": "DictInput",
+ "advanced": true,
+ "display_name": "Schema",
+ "dynamic": false,
+ "info": "The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.",
+ "list": true,
+ "name": "output_schema",
+ "placeholder": "",
+ "required": false,
+ "show": true,
+ "title_case": false,
+ "trace_as_input": true,
+ "type": "dict",
+ "value": {}
+ },
+ "seed": {
+ "_input_type": "IntInput",
+ "advanced": true,
+ "display_name": "Seed",
+ "dynamic": false,
+ "info": "The seed controls the reproducibility of the job.",
+ "list": false,
+ "name": "seed",
+ "placeholder": "",
+ "required": false,
+ "show": true,
+ "title_case": false,
+ "trace_as_metadata": true,
+ "type": "int",
+ "value": 1
+ },
+ "stream": {
+ "_input_type": "BoolInput",
+ "advanced": true,
+ "display_name": "Stream",
+ "dynamic": false,
+ "info": "Stream the response from the model. Streaming works only in Chat.",
+ "list": false,
+ "name": "stream",
+ "placeholder": "",
+ "required": false,
+ "show": true,
+ "title_case": false,
+ "trace_as_metadata": true,
+ "type": "bool",
+ "value": false
+ },
+ "system_message": {
+ "_input_type": "MessageTextInput",
+ "advanced": true,
+ "display_name": "System Message",
+ "dynamic": false,
+ "info": "System message to pass to the model.",
+ "input_types": [
+ "Message"
+ ],
+ "list": false,
+ "load_from_db": false,
+ "name": "system_message",
+ "placeholder": "",
+ "required": false,
+ "show": true,
+ "title_case": false,
+ "trace_as_input": true,
+ "trace_as_metadata": true,
+ "type": "str",
+ "value": ""
+ },
+ "temperature": {
+ "_input_type": "FloatInput",
+ "advanced": false,
+ "display_name": "Temperature",
+ "dynamic": false,
+ "info": "",
+ "list": false,
+ "name": "temperature",
+ "placeholder": "",
+ "required": false,
+ "show": true,
+ "title_case": false,
+ "trace_as_metadata": true,
+ "type": "float",
+ "value": 0.1
+ }
+ }
+ },
+ "type": "OpenAIModel"
+ },
+ "dragging": false,
+ "height": 603,
+ "id": "OpenAIModel-2o0gd",
+ "position": {
+ "x": 2141.564227810534,
+ "y": 492.8566267469695
+ },
+ "positionAbsolute": {
+ "x": 2141.564227810534,
+ "y": 492.8566267469695
+ },
+ "selected": true,
+ "type": "genericNode",
+ "width": 384
+ },
+ {
+ "data": {
+ "id": "ToolCallingAgent-vCq4v",
+ "node": {
+ "base_classes": [
+ "AgentExecutor",
+ "Message"
+ ],
+ "beta": true,
+ "conditional_paths": [],
+ "custom_fields": {},
+ "description": "Agent that uses tools",
+ "display_name": "City Selection Agent",
+ "documentation": "",
+ "edited": true,
+ "field_order": [
+ "input_value",
+ "handle_parsing_errors",
+ "verbose",
+ "max_iterations",
+ "tools",
+ "llm",
+ "system_prompt",
+ "user_prompt",
+ "chat_history"
+ ],
+ "frozen": false,
+ "icon": "bot-message-square",
+ "output_types": [],
+ "outputs": [
+ {
+ "cache": true,
+ "display_name": "Agent",
+ "hidden": true,
+ "method": "build_agent",
+ "name": "agent",
+ "selected": "AgentExecutor",
+ "types": [
+ "AgentExecutor"
+ ],
+ "value": "__UNDEFINED__"
+ },
+ {
+ "cache": true,
+ "display_name": "Response",
+ "method": "message_response",
+ "name": "response",
+ "selected": "Message",
+ "types": [
+ "Message"
+ ],
+ "value": "__UNDEFINED__"
+ }
+ ],
+ "pinned": false,
+ "template": {
+ "_type": "Component",
+ "chat_history": {
+ "_input_type": "DataInput",
+ "advanced": true,
+ "display_name": "Chat History",
+ "dynamic": false,
+ "info": "",
+ "input_types": [
+ "Data"
+ ],
+ "list": true,
+ "name": "chat_history",
+ "placeholder": "",
+ "required": false,
+ "show": true,
+ "title_case": false,
+ "trace_as_input": true,
+ "trace_as_metadata": true,
+ "type": "other",
+ "value": ""
+ },
+ "code": {
+ "advanced": true,
+ "dynamic": true,
+ "fileTypes": [],
+ "file_path": "",
+ "info": "",
+ "list": false,
+ "load_from_db": false,
+ "multiline": true,
+ "name": "code",
+ "password": false,
+ "placeholder": "",
+ "required": true,
+ "show": true,
+ "title_case": false,
+ "type": "code",
+ "value": "from typing import Optional, List\n\nfrom langchain.agents import create_tool_calling_agent\nfrom langchain_core.prompts import ChatPromptTemplate, PromptTemplate, HumanMessagePromptTemplate\nfrom langflow.base.agents.agent import LCToolsAgentComponent\nfrom langflow.inputs import MultilineInput\nfrom langflow.inputs.inputs import HandleInput, DataInput\nfrom langflow.schema import Data\n\n\nclass ToolCallingAgentComponent(LCToolsAgentComponent):\n display_name: str = \"Tool Calling Agent\"\n description: str = \"Agent that uses tools\"\n icon = \"bot-message-square\"\n beta = True\n name = \"ToolCallingAgent\"\n\n inputs = LCToolsAgentComponent._base_inputs + [\n HandleInput(name=\"llm\", display_name=\"Language Model\", input_types=[\"LanguageModel\"], required=True),\n MultilineInput(\n name=\"system_prompt\",\n display_name=\"System Prompt\",\n info=\"System prompt for the agent.\",\n value=\"You are a helpful assistant\",\n ),\n MultilineInput(\n name=\"user_prompt\", display_name=\"Prompt\", info=\"This prompt must contain 'input' key.\", value=\"{input}\"\n ),\n DataInput(name=\"chat_history\", display_name=\"Chat History\", is_list=True, advanced=True),\n ]\n\n def get_chat_history_data(self) -> Optional[List[Data]]:\n return self.chat_history\n\n def create_agent_runnable(self):\n if \"input\" not in self.user_prompt:\n raise ValueError(\"Prompt must contain 'input' key.\")\n messages = [\n (\"system\", self.system_prompt),\n (\"placeholder\", \"{chat_history}\"),\n HumanMessagePromptTemplate(prompt=PromptTemplate(input_variables=[\"input\"], template=self.user_prompt)),\n (\"placeholder\", \"{agent_scratchpad}\"),\n ]\n prompt = ChatPromptTemplate.from_messages(messages)\n return create_tool_calling_agent(self.llm, self.tools, prompt)\n"
+ },
+ "handle_parsing_errors": {
+ "_input_type": "BoolInput",
+ "advanced": true,
+ "display_name": "Handle Parse Errors",
+ "dynamic": false,
+ "info": "",
+ "list": false,
+ "name": "handle_parsing_errors",
+ "placeholder": "",
+ "required": false,
+ "show": true,
+ "title_case": false,
+ "trace_as_metadata": true,
+ "type": "bool",
+ "value": true
+ },
+ "input_value": {
+ "_input_type": "MessageTextInput",
+ "advanced": false,
+ "display_name": "Input",
+ "dynamic": false,
+ "info": "",
+ "input_types": [
+ "Message"
+ ],
+ "list": false,
+ "load_from_db": false,
+ "name": "input_value",
+ "placeholder": "",
+ "required": false,
+ "show": true,
+ "title_case": false,
+ "trace_as_input": true,
+ "trace_as_metadata": true,
+ "type": "str",
+ "value": ""
+ },
+ "llm": {
+ "_input_type": "HandleInput",
+ "advanced": false,
+ "display_name": "Language Model",
+ "dynamic": false,
+ "info": "",
+ "input_types": [
+ "LanguageModel"
+ ],
+ "list": false,
+ "name": "llm",
+ "placeholder": "",
+ "required": true,
+ "show": true,
+ "title_case": false,
+ "trace_as_metadata": true,
+ "type": "other",
+ "value": ""
+ },
+ "max_iterations": {
+ "_input_type": "IntInput",
+ "advanced": true,
+ "display_name": "Max Iterations",
+ "dynamic": false,
+ "info": "",
+ "list": false,
+ "name": "max_iterations",
+ "placeholder": "",
+ "required": false,
+ "show": true,
+ "title_case": false,
+ "trace_as_metadata": true,
+ "type": "int",
+ "value": 15
+ },
+ "system_prompt": {
+ "_input_type": "MultilineInput",
+ "advanced": false,
+ "display_name": "System Prompt",
+ "dynamic": false,
+ "info": "System prompt for the agent.",
+ "input_types": [
+ "Message"
+ ],
+ "list": false,
+ "load_from_db": false,
+ "multiline": true,
+ "name": "system_prompt",
+ "placeholder": "",
+ "required": false,
+ "show": true,
+ "title_case": false,
+ "trace_as_input": true,
+ "trace_as_metadata": true,
+ "type": "str",
+ "value": "You are a City Selection Expert, skilled in analyzing travel data to pick ideal destinations. Your goal is to select the best city based on weather, season, and prices. Use the provided tools to gather information and make an informed decision. Your final output should be a detailed report on the chosen city, including flight costs, weather forecasts, and attractions."
+ },
+ "tools": {
+ "_input_type": "HandleInput",
+ "advanced": false,
+ "display_name": "Tools",
+ "dynamic": false,
+ "info": "",
+ "input_types": [
+ "Tool",
+ "BaseTool"
+ ],
+ "list": true,
+ "load_from_db": false,
+ "name": "tools",
+ "placeholder": "",
+ "required": false,
+ "show": true,
+ "title_case": false,
+ "trace_as_metadata": true,
+ "type": "other",
+ "value": ""
+ },
+ "user_prompt": {
+ "_input_type": "MultilineInput",
+ "advanced": false,
+ "display_name": "Prompt",
+ "dynamic": false,
+ "info": "This prompt must contain 'input' key.",
+ "input_types": [
+ "Message"
+ ],
+ "list": false,
+ "load_from_db": false,
+ "multiline": true,
+ "name": "user_prompt",
+ "placeholder": "",
+ "required": false,
+ "show": true,
+ "title_case": false,
+ "trace_as_input": true,
+ "trace_as_metadata": true,
+ "type": "str",
+ "value": "{input}"
+ },
+ "verbose": {
+ "_input_type": "BoolInput",
+ "advanced": true,
+ "display_name": "Verbose",
+ "dynamic": false,
+ "info": "",
+ "list": false,
+ "name": "verbose",
+ "placeholder": "",
+ "required": false,
+ "show": true,
+ "title_case": false,
+ "trace_as_metadata": true,
+ "type": "bool",
+ "value": true
+ }
+ }
+ },
+ "type": "ToolCallingAgent"
+ },
+ "dragging": false,
+ "height": 566,
+ "id": "ToolCallingAgent-vCq4v",
+ "position": {
+ "x": 2624.854312225424,
+ "y": 459.94036227507735
+ },
+ "positionAbsolute": {
+ "x": 2624.854312225424,
+ "y": 459.94036227507735
+ },
+ "selected": false,
+ "type": "genericNode",
+ "width": 384
+ },
+ {
+ "data": {
+ "id": "SearchAPI-BaDvF",
+ "node": {
+ "base_classes": [
+ "Data",
+ "list",
+ "Tool"
+ ],
+ "beta": false,
+ "conditional_paths": [],
+ "custom_fields": {},
+ "description": "Call the searchapi.io API",
+ "display_name": "Search API",
+ "documentation": "https://www.searchapi.io/docs/google",
+ "edited": false,
+ "field_order": [
+ "engine",
+ "api_key",
+ "input_value",
+ "search_params"
+ ],
+ "frozen": false,
+ "lf_version": "1.0.15",
+ "official": false,
+ "output_types": [],
+ "outputs": [
+ {
+ "cache": true,
+ "display_name": "Data",
+ "method": "run_model",
+ "name": "api_run_model",
+ "selected": "Data",
+ "types": [
+ "Data"
+ ],
+ "value": "__UNDEFINED__"
+ },
+ {
+ "cache": true,
+ "display_name": "Tool",
+ "method": "build_tool",
+ "name": "api_build_tool",
+ "selected": "Tool",
+ "types": [
+ "Tool"
+ ],
+ "value": "__UNDEFINED__"
+ }
+ ],
+ "pinned": false,
+ "template": {
+ "_type": "Component",
+ "api_key": {
+ "_input_type": "SecretStrInput",
+ "advanced": false,
+ "display_name": "SearchAPI API Key",
+ "dynamic": false,
+ "info": "",
+ "input_types": [
+ "Message"
+ ],
+ "load_from_db": true,
+ "name": "api_key",
+ "password": true,
+ "placeholder": "",
+ "required": true,
+ "show": true,
+ "title_case": false,
+ "type": "str",
+ "value": ""
+ },
+ "code": {
+ "advanced": true,
+ "dynamic": true,
+ "fileTypes": [],
+ "file_path": "",
+ "info": "",
+ "list": false,
+ "load_from_db": false,
+ "multiline": true,
+ "name": "code",
+ "password": false,
+ "placeholder": "",
+ "required": true,
+ "show": true,
+ "title_case": false,
+ "type": "code",
+ "value": "from typing import Union\n\nfrom langchain_community.utilities.searchapi import SearchApiAPIWrapper\n\nfrom langflow.base.langchain_utilities.model import LCToolComponent\nfrom langflow.inputs import SecretStrInput, MultilineInput, DictInput, MessageTextInput\nfrom langflow.schema import Data\nfrom langflow.field_typing import Tool\n\n\nclass SearchAPIComponent(LCToolComponent):\n display_name: str = \"Search API\"\n description: str = \"Call the searchapi.io API\"\n name = \"SearchAPI\"\n documentation: str = \"https://www.searchapi.io/docs/google\"\n\n inputs = [\n MessageTextInput(name=\"engine\", display_name=\"Engine\", value=\"google\"),\n SecretStrInput(name=\"api_key\", display_name=\"SearchAPI API Key\", required=True),\n MultilineInput(\n name=\"input_value\",\n display_name=\"Input\",\n ),\n DictInput(name=\"search_params\", display_name=\"Search parameters\", advanced=True, is_list=True),\n ]\n\n def run_model(self) -> Union[Data, list[Data]]:\n wrapper = self._build_wrapper()\n results = wrapper.results(query=self.input_value, **(self.search_params or {}))\n list_results = results.get(\"organic_results\", [])\n data = [Data(data=result, text=result[\"snippet\"]) for result in list_results]\n self.status = data\n return data\n\n def build_tool(self) -> Tool:\n wrapper = self._build_wrapper()\n return Tool(\n name=\"search_api\",\n description=\"Search for recent results.\",\n func=lambda x: wrapper.run(query=x, **(self.search_params or {})),\n )\n\n def _build_wrapper(self):\n return SearchApiAPIWrapper(engine=self.engine, searchapi_api_key=self.api_key)\n"
+ },
+ "engine": {
+ "_input_type": "MessageTextInput",
+ "advanced": false,
+ "display_name": "Engine",
+ "dynamic": false,
+ "info": "",
+ "input_types": [
+ "Message"
+ ],
+ "list": false,
+ "load_from_db": false,
+ "name": "engine",
+ "placeholder": "",
+ "required": false,
+ "show": true,
+ "title_case": false,
+ "trace_as_input": true,
+ "trace_as_metadata": true,
+ "type": "str",
+ "value": "google"
+ },
+ "input_value": {
+ "_input_type": "MultilineInput",
+ "advanced": false,
+ "display_name": "Input",
+ "dynamic": false,
+ "info": "",
+ "input_types": [
+ "Message"
+ ],
+ "list": false,
+ "load_from_db": false,
+ "multiline": true,
+ "name": "input_value",
+ "placeholder": "",
+ "required": false,
+ "show": true,
+ "title_case": false,
+ "trace_as_input": true,
+ "trace_as_metadata": true,
+ "type": "str",
+ "value": "langflow docs"
+ },
+ "search_params": {
+ "_input_type": "DictInput",
+ "advanced": true,
+ "display_name": "Search parameters",
+ "dynamic": false,
+ "info": "",
+ "list": true,
+ "name": "search_params",
+ "placeholder": "",
+ "required": false,
+ "show": true,
+ "title_case": false,
+ "trace_as_input": true,
+ "type": "dict",
+ "value": {}
+ }
+ }
+ },
+ "type": "SearchAPI"
+ },
+ "dragging": false,
+ "height": 515,
+ "id": "SearchAPI-BaDvF",
+ "position": {
+ "x": 2147.8260389952898,
+ "y": -42.27285098321369
+ },
+ "positionAbsolute": {
+ "x": 2147.8260389952898,
+ "y": -42.27285098321369
+ },
+ "selected": false,
+ "type": "genericNode",
+ "width": 384
+ },
+ {
+ "data": {
+ "id": "url_content_fetcher-5EFR2",
+ "node": {
+ "base_classes": [
+ "Data",
+ "list",
+ "Tool"
+ ],
+ "beta": false,
+ "conditional_paths": [],
+ "custom_fields": {},
+ "description": "Fetch content from a single URL.",
+ "display_name": "URL Content Fetcher",
+ "documentation": "https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/web_base",
+ "edited": true,
+ "field_order": [
+ "url",
+ "fetch_params"
+ ],
+ "frozen": false,
+ "icon": "globe",
+ "lf_version": "1.0.15",
+ "official": false,
+ "output_types": [],
+ "outputs": [
+ {
+ "cache": true,
+ "display_name": "Data",
+ "method": "run_model",
+ "name": "api_run_model",
+ "selected": "Data",
+ "types": [
+ "Data",
+ "list"
+ ],
+ "value": "__UNDEFINED__"
+ },
+ {
+ "cache": true,
+ "display_name": "Tool",
+ "method": "build_tool",
+ "name": "api_build_tool",
+ "selected": "Tool",
+ "types": [
+ "Tool"
+ ],
+ "value": "__UNDEFINED__"
+ }
+ ],
+ "pinned": false,
+ "template": {
+ "_type": "Component",
+ "code": {
+ "advanced": true,
+ "dynamic": true,
+ "fileTypes": [],
+ "file_path": "",
+ "info": "",
+ "list": false,
+ "load_from_db": false,
+ "multiline": true,
+ "name": "code",
+ "password": false,
+ "placeholder": "",
+ "required": true,
+ "show": true,
+ "title_case": false,
+ "type": "code",
+ "value": "from typing import Union, Optional\r\nfrom langchain_community.document_loaders.web_base import WebBaseLoader\r\nfrom langflow.base.langchain_utilities.model import LCToolComponent\r\nfrom langflow.inputs import MessageTextInput, DictInput\r\nfrom langflow.schema import Data\r\nfrom langflow.field_typing import Tool\r\nfrom langchain.tools import StructuredTool\r\nfrom pydantic import BaseModel, Field\r\n\r\nclass URLToolComponent(LCToolComponent):\r\n display_name: str = \"URL Content Fetcher\"\r\n description: str = \"Fetch content from a single URL.\"\r\n name = \"url_content_fetcher\"\r\n documentation: str = \"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/web_base\"\r\n icon=\"globe\"\r\n \r\n inputs = [\r\n MessageTextInput(\r\n name=\"url\",\r\n display_name=\"URL\",\r\n info=\"Enter a single URL to fetch content from.\",\r\n ),\r\n DictInput(name=\"fetch_params\", display_name=\"Fetch parameters\", advanced=True, is_list=True),\r\n ]\r\n\r\n class URLContentFetcherSchema(BaseModel):\r\n url: str = Field(..., description=\"The URL to fetch content from\")\r\n fetch_params: Optional[dict] = Field(default=None, description=\"Additional parameters for fetching\")\r\n\r\n def run_model(self) -> Union[Data, list[Data]]:\r\n wrapper = self._build_wrapper()\r\n content = wrapper.load()[0]\r\n data = Data(data={\"content\": content.page_content, \"metadata\": content.metadata}, \r\n text=content.page_content[:500])\r\n self.status = data\r\n return data\r\n\r\n def build_tool(self) -> Tool:\r\n return StructuredTool.from_function(\r\n name=\"url_content_fetcher\",\r\n description=\"Fetch content from a single URL. Input should be a URL string only.\",\r\n func=self._fetch_url_content,\r\n args_schema=self.URLContentFetcherSchema,\r\n )\r\n\r\n def _build_wrapper(self):\r\n return WebBaseLoader(web_paths=[self.url], encoding=\"utf-8\", **self.fetch_params or {})\r\n\r\n def _fetch_url_content(self, url: str, fetch_params: Optional[dict] = None) -> dict:\r\n loader = WebBaseLoader(web_paths=[url], encoding=\"utf-8\", **(fetch_params or {}))\r\n content = loader.load()[0]\r\n return {\r\n \"content\": content.page_content,\r\n \"metadata\": content.metadata\r\n }"
+ },
+ "fetch_params": {
+ "_input_type": "DictInput",
+ "advanced": true,
+ "display_name": "Fetch parameters",
+ "dynamic": false,
+ "info": "",
+ "list": true,
+ "name": "fetch_params",
+ "placeholder": "",
+ "required": false,
+ "show": true,
+ "title_case": false,
+ "trace_as_input": true,
+ "type": "dict",
+ "value": {}
+ },
+ "url": {
+ "_input_type": "MessageTextInput",
+ "advanced": false,
+ "display_name": "URL",
+ "dynamic": false,
+ "info": "Enter a single URL to fetch content from.",
+ "input_types": [
+ "Message"
+ ],
+ "list": false,
+ "load_from_db": false,
+ "name": "url",
+ "placeholder": "",
+ "required": false,
+ "show": true,
+ "title_case": false,
+ "trace_as_input": true,
+ "trace_as_metadata": true,
+ "type": "str",
+ "value": ""
+ }
+ }
+ },
+ "type": "url_content_fetcher"
+ },
+ "dragging": false,
+ "height": 343,
+ "id": "url_content_fetcher-5EFR2",
+ "position": {
+ "x": 2629.911251521856,
+ "y": 77.86269189756271
+ },
+ "positionAbsolute": {
+ "x": 2629.911251521856,
+ "y": 77.86269189756271
+ },
+ "selected": false,
+ "type": "genericNode",
+ "width": 384
+ },
+ {
+ "data": {
+ "id": "ToolCallingAgent-rT7Y8",
+ "node": {
+ "base_classes": [
+ "AgentExecutor",
+ "Message"
+ ],
+ "beta": true,
+ "conditional_paths": [],
+ "custom_fields": {},
+ "description": "Agent that uses tools",
+ "display_name": "Local Expert Agent",
+ "documentation": "",
+ "edited": true,
+ "field_order": [
+ "input_value",
+ "handle_parsing_errors",
+ "verbose",
+ "max_iterations",
+ "tools",
+ "llm",
+ "system_prompt",
+ "user_prompt",
+ "chat_history"
+ ],
+ "frozen": false,
+ "icon": "bot-message-square",
+ "output_types": [],
+ "outputs": [
+ {
+ "cache": true,
+ "display_name": "Agent",
+ "hidden": true,
+ "method": "build_agent",
+ "name": "agent",
+ "selected": "AgentExecutor",
+ "types": [
+ "AgentExecutor"
+ ],
+ "value": "__UNDEFINED__"
+ },
+ {
+ "cache": true,
+ "display_name": "Response",
+ "method": "message_response",
+ "name": "response",
+ "selected": "Message",
+ "types": [
+ "Message"
+ ],
+ "value": "__UNDEFINED__"
+ }
+ ],
+ "pinned": false,
+ "template": {
+ "_type": "Component",
+ "chat_history": {
+ "_input_type": "DataInput",
+ "advanced": true,
+ "display_name": "Chat History",
+ "dynamic": false,
+ "info": "",
+ "input_types": [
+ "Data"
+ ],
+ "list": true,
+ "name": "chat_history",
+ "placeholder": "",
+ "required": false,
+ "show": true,
+ "title_case": false,
+ "trace_as_input": true,
+ "trace_as_metadata": true,
+ "type": "other",
+ "value": ""
+ },
+ "code": {
+ "advanced": true,
+ "dynamic": true,
+ "fileTypes": [],
+ "file_path": "",
+ "info": "",
+ "list": false,
+ "load_from_db": false,
+ "multiline": true,
+ "name": "code",
+ "password": false,
+ "placeholder": "",
+ "required": true,
+ "show": true,
+ "title_case": false,
+ "type": "code",
+ "value": "from typing import Optional, List\n\nfrom langchain.agents import create_tool_calling_agent\nfrom langchain_core.prompts import ChatPromptTemplate, PromptTemplate, HumanMessagePromptTemplate\nfrom langflow.base.agents.agent import LCToolsAgentComponent\nfrom langflow.inputs import MultilineInput\nfrom langflow.inputs.inputs import HandleInput, DataInput\nfrom langflow.schema import Data\n\n\nclass ToolCallingAgentComponent(LCToolsAgentComponent):\n display_name: str = \"Tool Calling Agent\"\n description: str = \"Agent that uses tools\"\n icon = \"bot-message-square\"\n beta = True\n name = \"ToolCallingAgent\"\n\n inputs = LCToolsAgentComponent._base_inputs + [\n HandleInput(name=\"llm\", display_name=\"Language Model\", input_types=[\"LanguageModel\"], required=True),\n MultilineInput(\n name=\"system_prompt\",\n display_name=\"System Prompt\",\n info=\"System prompt for the agent.\",\n value=\"You are a helpful assistant\",\n ),\n MultilineInput(\n name=\"user_prompt\", display_name=\"Prompt\", info=\"This prompt must contain 'input' key.\", value=\"{input}\"\n ),\n DataInput(name=\"chat_history\", display_name=\"Chat History\", is_list=True, advanced=True),\n ]\n\n def get_chat_history_data(self) -> Optional[List[Data]]:\n return self.chat_history\n\n def create_agent_runnable(self):\n if \"input\" not in self.user_prompt:\n raise ValueError(\"Prompt must contain 'input' key.\")\n messages = [\n (\"system\", self.system_prompt),\n (\"placeholder\", \"{chat_history}\"),\n HumanMessagePromptTemplate(prompt=PromptTemplate(input_variables=[\"input\"], template=self.user_prompt)),\n (\"placeholder\", \"{agent_scratchpad}\"),\n ]\n prompt = ChatPromptTemplate.from_messages(messages)\n return create_tool_calling_agent(self.llm, self.tools, prompt)\n"
+ },
+ "handle_parsing_errors": {
+ "_input_type": "BoolInput",
+ "advanced": true,
+ "display_name": "Handle Parse Errors",
+ "dynamic": false,
+ "info": "",
+ "list": false,
+ "name": "handle_parsing_errors",
+ "placeholder": "",
+ "required": false,
+ "show": true,
+ "title_case": false,
+ "trace_as_metadata": true,
+ "type": "bool",
+ "value": true
+ },
+ "input_value": {
+ "_input_type": "MessageTextInput",
+ "advanced": false,
+ "display_name": "Input",
+ "dynamic": false,
+ "info": "",
+ "input_types": [
+ "Message"
+ ],
+ "list": false,
+ "load_from_db": false,
+ "name": "input_value",
+ "placeholder": "",
+ "required": false,
+ "show": true,
+ "title_case": false,
+ "trace_as_input": true,
+ "trace_as_metadata": true,
+ "type": "str",
+ "value": ""
+ },
+ "llm": {
+ "_input_type": "HandleInput",
+ "advanced": false,
+ "display_name": "Language Model",
+ "dynamic": false,
+ "info": "",
+ "input_types": [
+ "LanguageModel"
+ ],
+ "list": false,
+ "name": "llm",
+ "placeholder": "",
+ "required": true,
+ "show": true,
+ "title_case": false,
+ "trace_as_metadata": true,
+ "type": "other",
+ "value": ""
+ },
+ "max_iterations": {
+ "_input_type": "IntInput",
+ "advanced": true,
+ "display_name": "Max Iterations",
+ "dynamic": false,
+ "info": "",
+ "list": false,
+ "name": "max_iterations",
+ "placeholder": "",
+ "required": false,
+ "show": true,
+ "title_case": false,
+ "trace_as_metadata": true,
+ "type": "int",
+ "value": 15
+ },
+ "system_prompt": {
+ "_input_type": "MultilineInput",
+ "advanced": false,
+ "display_name": "System Prompt",
+ "dynamic": false,
+ "info": "System prompt for the agent.",
+ "input_types": [
+ "Message"
+ ],
+ "list": false,
+ "load_from_db": false,
+ "multiline": true,
+ "name": "system_prompt",
+ "placeholder": "",
+ "required": false,
+ "show": true,
+ "title_case": false,
+ "trace_as_input": true,
+ "trace_as_metadata": true,
+ "type": "str",
+ "value": "You are a knowledgeable Local Expert with extensive information about the selected city, its attractions, and customs. Your goal is to provide the BEST insights about the city. Compile an in-depth guide for travelers, including key attractions, local customs, special events, and daily activity recommendations. Focus on hidden gems and local hotspots. Your final output should be a comprehensive city guide, rich in cultural insights and practical tips."
+ },
+ "tools": {
+ "_input_type": "HandleInput",
+ "advanced": false,
+ "display_name": "Tools",
+ "dynamic": false,
+ "info": "",
+ "input_types": [
+ "Tool",
+ "BaseTool"
+ ],
+ "list": true,
+ "load_from_db": false,
+ "name": "tools",
+ "placeholder": "",
+ "required": false,
+ "show": true,
+ "title_case": false,
+ "trace_as_metadata": true,
+ "type": "other",
+ "value": ""
+ },
+ "user_prompt": {
+ "_input_type": "MultilineInput",
+ "advanced": false,
+ "display_name": "Prompt",
+ "dynamic": false,
+ "info": "This prompt must contain 'input' key.",
+ "input_types": [
+ "Message"
+ ],
+ "list": false,
+ "load_from_db": false,
+ "multiline": true,
+ "name": "user_prompt",
+ "placeholder": "",
+ "required": false,
+ "show": true,
+ "title_case": false,
+ "trace_as_input": true,
+ "trace_as_metadata": true,
+ "type": "str",
+ "value": "{input}"
+ },
+ "verbose": {
+ "_input_type": "BoolInput",
+ "advanced": true,
+ "display_name": "Verbose",
+ "dynamic": false,
+ "info": "",
+ "list": false,
+ "name": "verbose",
+ "placeholder": "",
+ "required": false,
+ "show": true,
+ "title_case": false,
+ "trace_as_metadata": true,
+ "type": "bool",
+ "value": true
+ }
+ }
+ },
+ "type": "ToolCallingAgent"
+ },
+ "dragging": false,
+ "height": 566,
+ "id": "ToolCallingAgent-rT7Y8",
+ "position": {
+ "x": 3092.2977772950007,
+ "y": 456.1302150057377
+ },
+ "positionAbsolute": {
+ "x": 3092.2977772950007,
+ "y": 456.1302150057377
+ },
+ "selected": false,
+ "type": "genericNode",
+ "width": 384
+ },
+ {
+ "data": {
+ "id": "ToolCallingAgent-Zo0ES",
+ "node": {
+ "base_classes": [
+ "AgentExecutor",
+ "Message"
+ ],
+ "beta": true,
+ "conditional_paths": [],
+ "custom_fields": {},
+ "description": "Agent that uses tools",
+ "display_name": "Travel Concierge Agent",
+ "documentation": "",
+ "edited": true,
+ "field_order": [
+ "input_value",
+ "handle_parsing_errors",
+ "verbose",
+ "max_iterations",
+ "tools",
+ "llm",
+ "system_prompt",
+ "user_prompt",
+ "chat_history"
+ ],
+ "frozen": false,
+ "icon": "bot-message-square",
+ "output_types": [],
+ "outputs": [
+ {
+ "cache": true,
+ "display_name": "Agent",
+ "hidden": true,
+ "method": "build_agent",
+ "name": "agent",
+ "selected": "AgentExecutor",
+ "types": [
+ "AgentExecutor"
+ ],
+ "value": "__UNDEFINED__"
+ },
+ {
+ "cache": true,
+ "display_name": "Response",
+ "method": "message_response",
+ "name": "response",
+ "selected": "Message",
+ "types": [
+ "Message"
+ ],
+ "value": "__UNDEFINED__"
+ }
+ ],
+ "pinned": false,
+ "template": {
+ "_type": "Component",
+ "chat_history": {
+ "_input_type": "DataInput",
+ "advanced": true,
+ "display_name": "Chat History",
+ "dynamic": false,
+ "info": "",
+ "input_types": [
+ "Data"
+ ],
+ "list": true,
+ "name": "chat_history",
+ "placeholder": "",
+ "required": false,
+ "show": true,
+ "title_case": false,
+ "trace_as_input": true,
+ "trace_as_metadata": true,
+ "type": "other",
+ "value": ""
+ },
+ "code": {
+ "advanced": true,
+ "dynamic": true,
+ "fileTypes": [],
+ "file_path": "",
+ "info": "",
+ "list": false,
+ "load_from_db": false,
+ "multiline": true,
+ "name": "code",
+ "password": false,
+ "placeholder": "",
+ "required": true,
+ "show": true,
+ "title_case": false,
+ "type": "code",
+ "value": "from typing import Optional, List\n\nfrom langchain.agents import create_tool_calling_agent\nfrom langchain_core.prompts import ChatPromptTemplate, PromptTemplate, HumanMessagePromptTemplate\nfrom langflow.base.agents.agent import LCToolsAgentComponent\nfrom langflow.inputs import MultilineInput\nfrom langflow.inputs.inputs import HandleInput, DataInput\nfrom langflow.schema import Data\n\n\nclass ToolCallingAgentComponent(LCToolsAgentComponent):\n display_name: str = \"Tool Calling Agent\"\n description: str = \"Agent that uses tools\"\n icon = \"bot-message-square\"\n beta = True\n name = \"ToolCallingAgent\"\n\n inputs = LCToolsAgentComponent._base_inputs + [\n HandleInput(name=\"llm\", display_name=\"Language Model\", input_types=[\"LanguageModel\"], required=True),\n MultilineInput(\n name=\"system_prompt\",\n display_name=\"System Prompt\",\n info=\"System prompt for the agent.\",\n value=\"You are a helpful assistant\",\n ),\n MultilineInput(\n name=\"user_prompt\", display_name=\"Prompt\", info=\"This prompt must contain 'input' key.\", value=\"{input}\"\n ),\n DataInput(name=\"chat_history\", display_name=\"Chat History\", is_list=True, advanced=True),\n ]\n\n def get_chat_history_data(self) -> Optional[List[Data]]:\n return self.chat_history\n\n def create_agent_runnable(self):\n if \"input\" not in self.user_prompt:\n raise ValueError(\"Prompt must contain 'input' key.\")\n messages = [\n (\"system\", self.system_prompt),\n (\"placeholder\", \"{chat_history}\"),\n HumanMessagePromptTemplate(prompt=PromptTemplate(input_variables=[\"input\"], template=self.user_prompt)),\n (\"placeholder\", \"{agent_scratchpad}\"),\n ]\n prompt = ChatPromptTemplate.from_messages(messages)\n return create_tool_calling_agent(self.llm, self.tools, prompt)\n"
+ },
+ "handle_parsing_errors": {
+ "_input_type": "BoolInput",
+ "advanced": true,
+ "display_name": "Handle Parse Errors",
+ "dynamic": false,
+ "info": "",
+ "list": false,
+ "name": "handle_parsing_errors",
+ "placeholder": "",
+ "required": false,
+ "show": true,
+ "title_case": false,
+ "trace_as_metadata": true,
+ "type": "bool",
+ "value": true
+ },
+ "input_value": {
+ "_input_type": "MessageTextInput",
+ "advanced": false,
+ "display_name": "Input",
+ "dynamic": false,
+ "info": "",
+ "input_types": [
+ "Message"
+ ],
+ "list": false,
+ "load_from_db": false,
+ "name": "input_value",
+ "placeholder": "",
+ "required": false,
+ "show": true,
+ "title_case": false,
+ "trace_as_input": true,
+ "trace_as_metadata": true,
+ "type": "str",
+ "value": ""
+ },
+ "llm": {
+ "_input_type": "HandleInput",
+ "advanced": false,
+ "display_name": "Language Model",
+ "dynamic": false,
+ "info": "",
+ "input_types": [
+ "LanguageModel"
+ ],
+ "list": false,
+ "name": "llm",
+ "placeholder": "",
+ "required": true,
+ "show": true,
+ "title_case": false,
+ "trace_as_metadata": true,
+ "type": "other",
+ "value": ""
+ },
+ "max_iterations": {
+ "_input_type": "IntInput",
+ "advanced": true,
+ "display_name": "Max Iterations",
+ "dynamic": false,
+ "info": "",
+ "list": false,
+ "name": "max_iterations",
+ "placeholder": "",
+ "required": false,
+ "show": true,
+ "title_case": false,
+ "trace_as_metadata": true,
+ "type": "int",
+ "value": 15
+ },
+ "system_prompt": {
+ "_input_type": "MultilineInput",
+ "advanced": false,
+ "display_name": "System Prompt",
+ "dynamic": false,
+ "info": "System prompt for the agent.",
+ "input_types": [
+ "Message"
+ ],
+ "list": false,
+ "load_from_db": false,
+ "multiline": true,
+ "name": "system_prompt",
+ "placeholder": "",
+ "required": false,
+ "show": true,
+ "title_case": false,
+ "trace_as_input": true,
+ "trace_as_metadata": true,
+ "type": "str",
+ "value": "You are an Amazing Travel Concierge, a specialist in travel planning and logistics with decades of experience. Your goal is to create the most amazing travel itineraries with budget and packing suggestions for the city. Expand the city guide into a full 7-day travel itinerary with detailed per-day plans. Include weather forecasts, places to eat, packing suggestions, and a budget breakdown. Suggest actual places to visit, hotels to stay, and restaurants to go to. Your final output should be a complete expanded travel plan, formatted as markdown, encompassing a daily schedule, anticipated weather conditions, recommended clothing and items to pack, and a detailed budget."
+ },
+ "tools": {
+ "_input_type": "HandleInput",
+ "advanced": false,
+ "display_name": "Tools",
+ "dynamic": false,
+ "info": "",
+ "input_types": [
+ "Tool",
+ "BaseTool"
+ ],
+ "list": true,
+ "load_from_db": false,
+ "name": "tools",
+ "placeholder": "",
+ "required": false,
+ "show": true,
+ "title_case": false,
+ "trace_as_metadata": true,
+ "type": "other",
+ "value": ""
+ },
+ "user_prompt": {
+ "_input_type": "MultilineInput",
+ "advanced": false,
+ "display_name": "Prompt",
+ "dynamic": false,
+ "info": "This prompt must contain 'input' key.",
+ "input_types": [
+ "Message"
+ ],
+ "list": false,
+ "load_from_db": false,
+ "multiline": true,
+ "name": "user_prompt",
+ "placeholder": "",
+ "required": false,
+ "show": true,
+ "title_case": false,
+ "trace_as_input": true,
+ "trace_as_metadata": true,
+ "type": "str",
+ "value": "{input}"
+ },
+ "verbose": {
+ "_input_type": "BoolInput",
+ "advanced": true,
+ "display_name": "Verbose",
+ "dynamic": false,
+ "info": "",
+ "list": false,
+ "name": "verbose",
+ "placeholder": "",
+ "required": false,
+ "show": true,
+ "title_case": false,
+ "trace_as_metadata": true,
+ "type": "bool",
+ "value": true
+ }
+ }
+ },
+ "type": "ToolCallingAgent"
+ },
+ "dragging": false,
+ "height": 566,
+ "id": "ToolCallingAgent-Zo0ES",
+ "position": {
+ "x": 3515.829696775688,
+ "y": 461.15233262803827
+ },
+ "positionAbsolute": {
+ "x": 3515.829696775688,
+ "y": 461.15233262803827
+ },
+ "selected": false,
+ "type": "genericNode",
+ "width": 384
+ },
+ {
+ "data": {
+ "id": "CalculatorTool-CYR2I",
+ "node": {
+ "base_classes": [
+ "Data",
+ "list",
+ "Sequence",
+ "Tool"
+ ],
+ "beta": false,
+ "conditional_paths": [],
+ "custom_fields": {},
+ "description": "Perform basic arithmetic operations on a given expression.",
+ "display_name": "Calculator",
+ "documentation": "",
+ "edited": true,
+ "field_order": [
+ "expression"
+ ],
+ "frozen": false,
+ "icon": "calculator",
+ "lf_version": "1.0.15",
+ "official": false,
+ "output_types": [],
+ "outputs": [
+ {
+ "cache": true,
+ "display_name": "Data",
+ "method": "run_model",
+ "name": "api_run_model",
+ "selected": "Data",
+ "types": [
+ "Data",
+ "list"
+ ],
+ "value": "__UNDEFINED__"
+ },
+ {
+ "cache": true,
+ "display_name": "Tool",
+ "method": "build_tool",
+ "name": "api_build_tool",
+ "selected": "Tool",
+ "types": [
+ "Tool",
+ "Sequence"
+ ],
+ "value": "__UNDEFINED__"
+ }
+ ],
+ "pinned": false,
+ "template": {
+ "_type": "Component",
+ "code": {
+ "advanced": true,
+ "dynamic": true,
+ "fileTypes": [],
+ "file_path": "",
+ "info": "",
+ "list": false,
+ "load_from_db": false,
+ "multiline": true,
+ "name": "code",
+ "password": false,
+ "placeholder": "",
+ "required": true,
+ "show": true,
+ "title_case": false,
+ "type": "code",
+ "value": "import ast\r\nimport operator\r\nfrom typing import List\r\nfrom pydantic import BaseModel, Field\r\nfrom langflow.base.langchain_utilities.model import LCToolComponent\r\nfrom langflow.inputs import MessageTextInput\r\nfrom langflow.schema import Data\r\nfrom langflow.field_typing import Tool\r\nfrom langchain.tools import StructuredTool\r\n\r\nclass CalculatorToolComponent(LCToolComponent):\r\n display_name = \"Calculator\"\r\n description = \"Perform basic arithmetic operations on a given expression.\"\r\n icon = \"calculator\"\r\n name = \"CalculatorTool\"\r\n\r\n inputs = [\r\n MessageTextInput(\r\n name=\"expression\",\r\n display_name=\"Expression\",\r\n info=\"The arithmetic expression to evaluate (e.g., '4*4*(33/22)+12-20').\",\r\n ),\r\n ]\r\n\r\n class CalculatorToolSchema(BaseModel):\r\n expression: str = Field(..., description=\"The arithmetic expression to evaluate.\")\r\n\r\n def run_model(self) -> List[Data]:\r\n return self._evaluate_expression(self.expression)\r\n\r\n def build_tool(self) -> Tool:\r\n return StructuredTool.from_function(\r\n name=\"calculator\",\r\n description=\"Evaluate basic arithmetic expressions. Input should be a string containing the expression.\",\r\n func=self._evaluate_expression,\r\n args_schema=self.CalculatorToolSchema,\r\n )\r\n\r\n def _evaluate_expression(self, expression: str) -> List[Data]:\r\n try:\r\n # Define the allowed operators\r\n operators = {\r\n ast.Add: operator.add,\r\n ast.Sub: operator.sub,\r\n ast.Mult: operator.mul,\r\n ast.Div: operator.truediv,\r\n ast.Pow: operator.pow,\r\n }\r\n\r\n def eval_expr(node):\r\n if isinstance(node, ast.Num):\r\n return node.n\r\n elif isinstance(node, ast.BinOp):\r\n return operators[type(node.op)](eval_expr(node.left), eval_expr(node.right))\r\n elif isinstance(node, ast.UnaryOp):\r\n return operators[type(node.op)](eval_expr(node.operand))\r\n else:\r\n raise TypeError(node)\r\n\r\n # Parse the expression and evaluate it\r\n tree = ast.parse(expression, mode='eval')\r\n result = eval_expr(tree.body)\r\n\r\n # Format the result to a reasonable number of decimal places\r\n formatted_result = f\"{result:.6f}\".rstrip('0').rstrip('.')\r\n\r\n self.status = formatted_result\r\n return [Data(data={\"result\": formatted_result})]\r\n\r\n except (SyntaxError, TypeError, KeyError) as e:\r\n error_message = f\"Invalid expression: {str(e)}\"\r\n self.status = error_message\r\n return [Data(data={\"error\": error_message})]\r\n except ZeroDivisionError:\r\n error_message = \"Error: Division by zero\"\r\n self.status = error_message\r\n return [Data(data={\"error\": error_message})]\r\n except Exception as e:\r\n error_message = f\"Error: {str(e)}\"\r\n self.status = error_message\r\n return [Data(data={\"error\": error_message})]"
+ },
+ "expression": {
+ "_input_type": "MessageTextInput",
+ "advanced": false,
+ "display_name": "Expression",
+ "dynamic": false,
+ "info": "The arithmetic expression to evaluate (e.g., '4*4*(33/22)+12-20').",
+ "input_types": [
+ "Message"
+ ],
+ "list": false,
+ "load_from_db": false,
+ "name": "expression",
+ "placeholder": "",
+ "required": false,
+ "show": true,
+ "title_case": false,
+ "trace_as_input": true,
+ "trace_as_metadata": true,
+ "type": "str",
+ "value": "2+2"
+ }
+ }
+ },
+ "type": "CalculatorTool"
+ },
+ "dragging": false,
+ "height": 371,
+ "id": "CalculatorTool-CYR2I",
+ "position": {
+ "x": 3094.040233522674,
+ "y": 59.811211480422756
+ },
+ "positionAbsolute": {
+ "x": 3094.040233522674,
+ "y": 59.811211480422756
+ },
+ "selected": false,
+ "type": "genericNode",
+ "width": 384
+ }
+ ],
+ "viewport": {
+ "x": -708.3644867946784,
+ "y": 248.9889129182872,
+ "zoom": 0.43911377041251404
+ }
+ },
+ "description": "Multi Agent system to plan trips.",
+ "endpoint_name": null,
+ "id": "8971db1c-5a0e-4f07-9562-478f3170e845",
+ "is_component": false,
+ "last_tested_version": "1.0.17",
+ "name": "Travel Planning Agents"
+}
\ No newline at end of file
diff --git a/src/frontend/src/modals/newFlowModal/components/undrawCards/index.tsx b/src/frontend/src/modals/newFlowModal/components/undrawCards/index.tsx
index 0bcfb75ef7b6..290f08fd60d7 100644
--- a/src/frontend/src/modals/newFlowModal/components/undrawCards/index.tsx
+++ b/src/frontend/src/modals/newFlowModal/components/undrawCards/index.tsx
@@ -94,7 +94,7 @@ export default function UndrawCardComponent({
preserveAspectRatio="xMidYMid meet"
/>
);
- case "Sequential Tasks Agent":
+ case "Simple Agent":
return (
);
- case "Hierarchical Tasks Agent":
+ case "Travel Planning Agents":
return (
);
- case "Complex Agent":
+ case "Dynamic Agent":
return (
e.name == "Vector Store RAG")!}
/>
)}
- {examples.find((e) => e.name == "Sequential Tasks Agent") && (
+ {examples.find((e) => e.name == "Simple Agent") && (
e.name == "Sequential Tasks Agent")!}
+ flow={examples.find((e) => e.name == "Simple Agent")!}
/>
)}
- {examples.find((e) => e.name == "Hierarchical Tasks Agent") && (
+ {examples.find((e) => e.name == "Travel Planning Agents") && (
e.name == "Hierarchical Tasks Agent")!
- }
+ flow={examples.find((e) => e.name == "Travel Planning Agents")!}
/>
)}
- {examples.find((e) => e.name == "Complex Agent") && (
+ {examples.find((e) => e.name == "Dynamic Agent") && (
e.name == "Complex Agent")!}
+ flow={examples.find((e) => e.name == "Dynamic Agent")!}
/>
)}
diff --git a/src/frontend/tests/end-to-end/Complex Agent.spec.ts b/src/frontend/tests/end-to-end/Dynamic Agent.spec.ts
similarity index 94%
rename from src/frontend/tests/end-to-end/Complex Agent.spec.ts
rename to src/frontend/tests/end-to-end/Dynamic Agent.spec.ts
index d75b939ea092..0a798185d1cd 100644
--- a/src/frontend/tests/end-to-end/Complex Agent.spec.ts
+++ b/src/frontend/tests/end-to-end/Dynamic Agent.spec.ts
@@ -2,7 +2,7 @@ import { expect, test } from "@playwright/test";
import * as dotenv from "dotenv";
import path from "path";
-test("Complex Agent", async ({ page }) => {
+test("Dynamic Agent", async ({ page }) => {
test.skip(
!process?.env?.OPENAI_API_KEY,
"OPENAI_API_KEY required to run this test",
@@ -42,7 +42,7 @@ test("Complex Agent", async ({ page }) => {
modalCount = await page.getByTestId("modal-title")?.count();
}
- await page.getByRole("heading", { name: "Complex Agent" }).click();
+ await page.getByRole("heading", { name: "Dynamic Agent" }).click();
await page.waitForSelector('[title="fit view"]', {
timeout: 100000,
@@ -104,5 +104,5 @@ test("Complex Agent", async ({ page }) => {
const concatAllText = textContents.join(" ");
expect(concatAllText.toLocaleLowerCase()).toContain("apple");
const allTextLength = concatAllText.length;
- expect(allTextLength).toBeGreaterThan(500);
+ expect(allTextLength).toBeGreaterThan(100);
});
diff --git a/src/frontend/tests/end-to-end/Hierarchical Tasks Agent.spec.ts b/src/frontend/tests/end-to-end/Hierarchical Tasks Agent.spec.ts
deleted file mode 100644
index 3752b24c4fe4..000000000000
--- a/src/frontend/tests/end-to-end/Hierarchical Tasks Agent.spec.ts
+++ /dev/null
@@ -1,110 +0,0 @@
-import { expect, test } from "@playwright/test";
-import * as dotenv from "dotenv";
-import path from "path";
-
-test("Hierarchical Tasks Agent", async ({ page }) => {
- test.skip(
- !process?.env?.OPENAI_API_KEY,
- "OPENAI_API_KEY required to run this test",
- );
-
- test.skip(
- !process?.env?.BRAVE_SEARCH_API_KEY,
- "BRAVE_SEARCH_API_KEY required to run this test",
- );
-
- if (!process.env.CI) {
- dotenv.config({ path: path.resolve(__dirname, "../../.env") });
- }
-
- await page.goto("/");
- await page.waitForSelector('[data-testid="mainpage_title"]', {
- timeout: 30000,
- });
-
- await page.waitForSelector('[id="new-project-btn"]', {
- timeout: 30000,
- });
-
- let modalCount = 0;
- try {
- const modalTitleElement = await page?.getByTestId("modal-title");
- if (modalTitleElement) {
- modalCount = await modalTitleElement.count();
- }
- } catch (error) {
- modalCount = 0;
- }
-
- while (modalCount === 0) {
- await page.getByText("New Project", { exact: true }).click();
- await page.waitForTimeout(3000);
- modalCount = await page.getByTestId("modal-title")?.count();
- }
-
- await page.getByRole("heading", { name: "Hierarchical Tasks Agent" }).click();
-
- await page.waitForSelector('[title="fit view"]', {
- timeout: 100000,
- });
-
- await page.getByTitle("fit view").click();
- await page.getByTitle("zoom out").click();
- await page.getByTitle("zoom out").click();
- await page.getByTitle("zoom out").click();
-
- let outdatedComponents = await page.getByTestId("icon-AlertTriangle").count();
-
- while (outdatedComponents > 0) {
- await page.getByTestId("icon-AlertTriangle").first().click();
- await page.waitForTimeout(1000);
- outdatedComponents = await page.getByTestId("icon-AlertTriangle").count();
- }
-
- await page
- .getByTestId("popover-anchor-input-api_key")
- .first()
- .fill(process.env.OPENAI_API_KEY ?? "");
-
- await page
- .getByTestId("popover-anchor-input-api_key")
- .nth(1)
- .fill(process.env.OPENAI_API_KEY ?? "");
-
- await page.getByTestId("dropdown_str_model_name").first().click();
- await page.getByTestId("gpt-4o-1-option").first().click();
-
- await page.waitForTimeout(1000);
-
- await page.getByTestId("dropdown_str_model_name").last().click();
- await page.getByTestId("gpt-4o-1-option").last().click();
-
- await page.waitForTimeout(1000);
-
- await page
- .getByTestId("popover-anchor-input-api_key")
- .last()
- .fill(process.env.BRAVE_SEARCH_API_KEY ?? "");
-
- await page.waitForTimeout(1000);
-
- await page.getByTestId("button_run_chat output").click();
- await page.waitForSelector("text=built successfully", { timeout: 60000 * 3 });
-
- await page.getByText("built successfully").last().click({
- timeout: 15000,
- });
-
- await page.getByText("Playground", { exact: true }).click();
-
- await page.waitForTimeout(3000);
-
- const textContents = await page
- .getByTestId("div-chat-message")
- .allTextContents();
-
- const concatAllText = textContents.join(" ");
- expect(concatAllText.toLocaleLowerCase()).toContain("langflow");
- const allTextLength = concatAllText.length;
- expect(allTextLength).toBeGreaterThan(500);
-});
diff --git a/src/frontend/tests/end-to-end/Sequential Tasks Agent.spec.ts b/src/frontend/tests/end-to-end/Simple Agent.spec.ts
similarity index 65%
rename from src/frontend/tests/end-to-end/Sequential Tasks Agent.spec.ts
rename to src/frontend/tests/end-to-end/Simple Agent.spec.ts
index d841ccab67fc..d8325d36b92d 100644
--- a/src/frontend/tests/end-to-end/Sequential Tasks Agent.spec.ts
+++ b/src/frontend/tests/end-to-end/Simple Agent.spec.ts
@@ -2,17 +2,12 @@ import { expect, test } from "@playwright/test";
import * as dotenv from "dotenv";
import path from "path";
-test("Sequential Tasks Agent", async ({ page }) => {
+test("Simple Agent", async ({ page }) => {
test.skip(
!process?.env?.OPENAI_API_KEY,
"OPENAI_API_KEY required to run this test",
);
- test.skip(
- !process?.env?.BRAVE_SEARCH_API_KEY,
- "BRAVE_SEARCH_API_KEY required to run this test",
- );
-
if (!process.env.CI) {
dotenv.config({ path: path.resolve(__dirname, "../../.env") });
}
@@ -42,7 +37,7 @@ test("Sequential Tasks Agent", async ({ page }) => {
modalCount = await page.getByTestId("modal-title")?.count();
}
- await page.getByRole("heading", { name: "Sequential Tasks Agent" }).click();
+ await page.getByRole("heading", { name: "Simple Agent" }).click();
await page.waitForSelector('[title="fit view"]', {
timeout: 100000,
@@ -63,7 +58,6 @@ test("Sequential Tasks Agent", async ({ page }) => {
await page
.getByTestId("popover-anchor-input-api_key")
- .first()
.fill(process.env.OPENAI_API_KEY ?? "");
await page.getByTestId("dropdown_str_model_name").click();
@@ -71,15 +65,8 @@ test("Sequential Tasks Agent", async ({ page }) => {
await page.waitForTimeout(1000);
- await page
- .getByTestId("popover-anchor-input-api_key")
- .last()
- .fill(process.env.BRAVE_SEARCH_API_KEY ?? "");
-
- await page.waitForTimeout(1000);
-
await page.getByTestId("button_run_chat output").click();
- await page.waitForSelector("text=built successfully", { timeout: 60000 * 3 });
+ await page.waitForSelector("text=built successfully", { timeout: 30000 });
await page.getByText("built successfully").last().click({
timeout: 15000,
@@ -87,26 +74,33 @@ test("Sequential Tasks Agent", async ({ page }) => {
await page.getByText("Playground", { exact: true }).click();
+ await page.waitForSelector(
+ "text=write short python scsript to say hello world",
+ {
+ timeout: 30000,
+ },
+ );
+
await page.waitForTimeout(1000);
- expect(
- page
- .getByPlaceholder("No chat input variables found. Click to run your flow")
- .last(),
- ).toBeVisible();
+ expect(await page.getByText("User")).toBeVisible();
- await page.getByText("Topic", { exact: true }).nth(1).isVisible();
- await page.getByText("Topic", { exact: true }).nth(1).click();
- expect(await page.getByPlaceholder("Enter text...").inputValue()).toBe(
- "Agile",
- );
+ expect(await page.locator(".language-python")).toBeVisible();
+
+ let pythonWords = await page.getByText("Hello, World!").count();
+
+ expect(pythonWords).toBe(2);
+
+ await page.getByTestId("icon-Copy").last().click();
+
+ await page.waitForTimeout(500);
+
+ await page.getByPlaceholder("Send a message...").click();
+ await page.keyboard.press("Control+V");
+
+ await page.waitForTimeout(500);
- const textContents = await page
- .getByTestId("div-chat-message")
- .allTextContents();
+ pythonWords = await page.getByText("Hello, World!").count();
- const concatAllText = textContents.join(" ");
- expect(concatAllText.toLocaleLowerCase()).toContain("agile");
- const allTextLength = concatAllText.length;
- expect(allTextLength).toBeGreaterThan(500);
+ expect(pythonWords).toBe(3);
});
diff --git a/src/frontend/tests/end-to-end/Travel Planning Agent.spec.ts b/src/frontend/tests/end-to-end/Travel Planning Agent.spec.ts
new file mode 100644
index 000000000000..923b3137760a
--- /dev/null
+++ b/src/frontend/tests/end-to-end/Travel Planning Agent.spec.ts
@@ -0,0 +1,152 @@
+import { expect, test } from "@playwright/test";
+import * as dotenv from "dotenv";
+import path from "path";
+
+test("Travel Planning Agent", async ({ page }) => {
+ test.skip(
+ !process?.env?.OPENAI_API_KEY,
+ "OPENAI_API_KEY required to run this test",
+ );
+
+ if (!process.env.CI) {
+ dotenv.config({ path: path.resolve(__dirname, "../../.env") });
+ }
+
+ await page.goto("/");
+ await page.waitForSelector('[data-testid="mainpage_title"]', {
+ timeout: 30000,
+ });
+
+ await page.waitForSelector('[id="new-project-btn"]', {
+ timeout: 30000,
+ });
+
+ let modalCount = 0;
+ try {
+ const modalTitleElement = await page?.getByTestId("modal-title");
+ if (modalTitleElement) {
+ modalCount = await modalTitleElement.count();
+ }
+ } catch (error) {
+ modalCount = 0;
+ }
+
+ while (modalCount === 0) {
+ await page.getByText("New Project", { exact: true }).click();
+ await page.waitForTimeout(3000);
+ modalCount = await page.getByTestId("modal-title")?.count();
+ }
+
+ await page.getByRole("heading", { name: "Travel Planning Agents" }).click();
+
+ await page.waitForSelector('[title="fit view"]', {
+ timeout: 100000,
+ });
+
+ await page.getByTitle("fit view").click();
+ await page.getByTitle("zoom out").click();
+ await page.getByTitle("zoom out").click();
+ await page.getByTitle("zoom out").click();
+
+ let outdatedComponents = await page.getByTestId("icon-AlertTriangle").count();
+
+ while (outdatedComponents > 0) {
+ await page.getByTestId("icon-AlertTriangle").first().click();
+ await page.waitForTimeout(1000);
+ outdatedComponents = await page.getByTestId("icon-AlertTriangle").count();
+ }
+
+ await page.getByTestId("extended-disclosure").click();
+ await page.getByPlaceholder("Search").click();
+ await page.getByPlaceholder("Search").fill("yahoo finance");
+ await page.waitForTimeout(1000);
+
+ await page.getByText("SearchAPI").last().click();
+ await page.waitForTimeout(1000);
+ await page.keyboard.press("Backspace");
+
+ await page.getByTitle("zoom out").click();
+ await page.getByTitle("zoom out").click();
+ await page
+ .locator('//*[@id="react-flow-id"]')
+ .hover()
+ .then(async () => {
+ await page.mouse.down();
+ await page.mouse.move(-100, 100);
+ });
+
+ await page.mouse.up();
+
+ await page
+ .getByTestId("toolsYahoo Finance News Tool")
+ .dragTo(page.locator('//*[@id="react-flow-id"]'));
+
+ await page.getByTitle("fit view").click();
+
+ await page.getByTitle("zoom out").click();
+ await page.getByTitle("zoom out").click();
+
+ //connection 1
+ const yahooElementOutput = await page
+ .getByTestId("handle-yfinancetool-shownode-tool-right")
+ .nth(0);
+ await yahooElementOutput.hover();
+ await page.mouse.down();
+ const agentOne = await page
+ .getByTestId("handle-toolcallingagent-shownode-tools-left")
+ .nth(0);
+ await agentOne.hover();
+ await page.mouse.up();
+
+ //connection 2
+ await yahooElementOutput.hover();
+ await page.mouse.down();
+ const agentTwo = await page
+ .getByTestId("handle-toolcallingagent-shownode-tools-left")
+ .nth(1);
+ await agentTwo.hover();
+ await page.mouse.up();
+
+ //connection 3
+ await yahooElementOutput.hover();
+ await page.mouse.down();
+ const agentThree = await page
+ .getByTestId("handle-toolcallingagent-shownode-tools-left")
+ .nth(2);
+ await agentThree.hover();
+ await page.mouse.up();
+
+ await page
+ .getByTestId("popover-anchor-input-api_key")
+ .first()
+ .fill(process.env.OPENAI_API_KEY ?? "");
+
+ await page.getByTestId("dropdown_str_model_name").click();
+ await page.getByTestId("gpt-4o-1-option").click();
+
+ await page.waitForTimeout(1000);
+
+ await page.getByTestId("button_run_chat output").click();
+ await page.waitForSelector("text=built successfully", { timeout: 60000 * 3 });
+
+ await page.getByText("built successfully").last().click({
+ timeout: 15000,
+ });
+
+ await page.getByText("Playground", { exact: true }).click();
+
+ await page.waitForSelector("text=default session", {
+ timeout: 30000,
+ });
+
+ await page.waitForTimeout(1000);
+
+ const output = await page.getByTestId("div-chat-message").allTextContents();
+ const outputText = output.join("\n");
+
+ expect(outputText.toLowerCase()).toContain("weather");
+ expect(outputText.toLowerCase()).toContain("budget");
+
+ expect(outputText.toLowerCase()).toContain("uberlândia");
+ expect(outputText.toLowerCase()).toContain("pão de queijo");
+});