Skip to content

Commit

Permalink
fix: update message reload and update langchain-core (#3054)
Browse files Browse the repository at this point in the history
* refactor: update langchain-core to version 0.2.24

* refactor: convert inner messages to BaseMessage in load_lc_prompt method

* refactor: update ChatPromptTemplate instantiation in message.py

* refactor: update langflow-base dependency to use local path for development

* [autofix.ci] apply automated fixes

* refactor: update ChatPromptTemplate instantiation in message.py

* refactor: add async_from_template_and_variables and sync from_template_and_variables

* feat(tests): add unit test for Message schema serialization and prompt loading

* refactor: update langchain-core dependency to version 0.2.24

* chore: new lock

* mypy

* chore: format pyproject

* refactor: rename async_from_template_and_variables to from_template_and_variables in Message class

* refactor: Rename async_from_template_and_variables to from_template_and_variables in Message class

---------

Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
Co-authored-by: Nicolò Boschi <[email protected]>
  • Loading branch information
3 people authored Jul 30, 2024
1 parent 3e6d3dc commit 4382e42
Show file tree
Hide file tree
Showing 7 changed files with 565 additions and 590 deletions.
1,063 changes: 497 additions & 566 deletions poetry.lock

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ langflow = "langflow.__main__:main"

[tool.poetry.dependencies]
python = ">=3.10,<3.13"
langflow-base = { path = "./src/backend/base", develop = true }
langflow-base ={ path = "./src/backend/base", develop = true }
beautifulsoup4 = "^4.12.2"
google-search-results = "^2.4.1"
google-api-python-client = "^2.130.0"
Expand Down
2 changes: 1 addition & 1 deletion src/backend/base/langflow/graph/graph/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -1014,7 +1014,7 @@ async def get_next_runnable_vertices(self, lock: asyncio.Lock, vertex: "Vertex",
else:
self.run_manager.add_to_vertices_being_run(next_v_id)
if cache and self.flow_id:
set_cache_coro = partial(get_chat_service().set_cache, key=self.flow_id)
set_cache_coro = partial(get_chat_service().set_cache, self.flow_id)
await set_cache_coro(self, lock)
return next_runnable_vertices

Expand Down
48 changes: 31 additions & 17 deletions src/backend/base/langflow/schema/message.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import asyncio
from datetime import datetime, timezone
from typing import Annotated, Any, AsyncIterator, Iterator, List, Optional
from uuid import UUID
Expand All @@ -15,10 +16,10 @@
from langflow.schema.data import Data
from langflow.schema.image import Image, get_file_paths, is_image_file
from langflow.utils.constants import (
MESSAGE_SENDER_USER,
MESSAGE_SENDER_NAME_USER,
MESSAGE_SENDER_NAME_AI,
MESSAGE_SENDER_AI,
MESSAGE_SENDER_NAME_AI,
MESSAGE_SENDER_NAME_USER,
MESSAGE_SENDER_USER,
)


Expand Down Expand Up @@ -171,20 +172,23 @@ async def get_file_content_dicts(self):
def load_lc_prompt(self):
if "prompt" not in self:
raise ValueError("Prompt is required.")
loaded_prompt = load(self.prompt)
# Rebuild HumanMessages if they are instance of BaseMessage
if isinstance(loaded_prompt, ChatPromptTemplate):
messages = []
for message in loaded_prompt.messages:
if isinstance(message, HumanMessage):
# self.prompt was passed through jsonable_encoder
# so inner messages are not BaseMessage
# we need to convert them to BaseMessage
messages = []
for message in self.prompt.get("kwargs", {}).get("messages", []):
match message:
case HumanMessage():
messages.append(message)
elif message.type == "human":
messages.append(HumanMessage(content=message.content))
elif message.type == "system":
messages.append(SystemMessage(content=message.content))
elif message.type == "ai":
messages.append(AIMessage(content=message.content))
loaded_prompt.messages = messages
case _ if message.get("type") == "human":
messages.append(HumanMessage(content=message.get("content")))
case _ if message.get("type") == "system":
messages.append(SystemMessage(content=message.get("content")))
case _ if message.get("type") == "ai":
messages.append(AIMessage(content=message.get("content")))

self.prompt["kwargs"]["messages"] = messages
loaded_prompt = load(self.prompt)
return loaded_prompt

@classmethod
Expand Down Expand Up @@ -216,7 +220,17 @@ async def from_template_and_variables(cls, template: str, **variables):
if contents:
message = HumanMessage(content=[{"type": "text", "text": text}] + contents)

prompt_template = ChatPromptTemplate.from_messages([message]) # type: ignore
prompt_template = ChatPromptTemplate(messages=[message]) # type: ignore
instance.prompt = jsonable_encoder(prompt_template.to_json())
instance.messages = instance.prompt.get("kwargs", {}).get("messages", [])
return instance

@classmethod
def sync_from_template_and_variables(cls, template: str, **variables):
# Run the async version in a sync way
try:
loop = asyncio.get_running_loop()
except RuntimeError:
return asyncio.run(cls.from_template_and_variables(template, **variables))
else:
return loop.run_until_complete(cls.from_template_and_variables(template, **variables))
8 changes: 4 additions & 4 deletions src/backend/base/poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion src/backend/base/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ httpx = "*"
uvicorn = "^0.30.0"
gunicorn = "^22.0.0"
langchain = "~0.2.0"
langchain-core = "0.2.23"
langchain-core = "^0.2.24"
langchainhub = "~0.1.15"
sqlmodel = "^0.0.18"
loguru = "^0.7.1"
Expand Down
30 changes: 30 additions & 0 deletions src/backend/tests/unit/schema/test_schema_message.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
import pytest
from langchain_core.prompts.chat import ChatPromptTemplate

from langflow.schema.message import Message


@pytest.fixture
def client():
pass


@pytest.mark.asyncio
async def test_message_async_prompt_serialization():
template = "Hello, {name}!"
message = await Message.from_template_and_variables(template, name="Langflow")
assert message.text == "Hello, Langflow!"

prompt = message.load_lc_prompt()
assert isinstance(prompt, ChatPromptTemplate)
assert prompt.messages[0].content == "Hello, Langflow!"


def test_message_prompt_serialization():
template = "Hello, {name}!"
message = Message.sync_from_template_and_variables(template, name="Langflow")
assert message.text == "Hello, Langflow!"

prompt = message.load_lc_prompt()
assert isinstance(prompt, ChatPromptTemplate)
assert prompt.messages[0].content == "Hello, Langflow!"

0 comments on commit 4382e42

Please sign in to comment.