Skip to content

Commit

Permalink
Sync with upstream
Browse files Browse the repository at this point in the history
  • Loading branch information
Крестников Константин Николаевич authored and Крестников Константин Николаевич committed Nov 15, 2023
2 parents 82677ca + 7372bcf commit 53620bb
Show file tree
Hide file tree
Showing 16 changed files with 870 additions and 506 deletions.
1 change: 1 addition & 0 deletions .github/workflows/langserve_ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ on:
- '.github/workflows/_test.yml'
- '.github/workflows/langserve_ci.yml'
- 'langserve/**'
- 'tests/**'
- 'examples/**'
- 'pyproject.toml'
- 'poetry.lock'
Expand Down
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ Get your LangServe instance started quickly with

For more examples, see the templates
[index](https://github.com/langchain-ai/langchain/blob/master/templates/docs/INDEX.md)
or the [examples](./examples) directory.
or the [examples](https://github.com/langchain-ai/langserve/tree/main/examples) directory.

### Server

Expand Down
2 changes: 1 addition & 1 deletion examples/conversational_retrieval_chain/server.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ class ChatHistory(BaseModel):

chat_history: List[Tuple[str, str]] = Field(
...,
extra={"widget": {"type": "chat", "input": "question"}},
extra={"widget": {"type": "chat", "input": "question", "output": "answer"}},
)
question: str

Expand Down
10 changes: 8 additions & 2 deletions langserve/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -512,10 +512,13 @@ def stream(
_raise_exception_from_data(
sse.data, httpx.Request(method="POST", url=endpoint)
)
elif sse.event == "metadata":
# Nothing to do for metadata for the regular remote client.
continue
elif sse.event == "end":
break
else:
logger.error(
_log_error_message_once(
f"Encountered an unsupported event type: `{sse.event}`. "
f"Try upgrading the remote client to the latest version."
f"Ignoring events of type `{sse.event}`."
Expand Down Expand Up @@ -593,10 +596,13 @@ async def astream(
_raise_exception_from_data(
sse.data, httpx.Request(method="POST", url=endpoint)
)
elif sse.event == "metadata":
# Nothing to do for metadata for the regular remote client.
continue
elif sse.event == "end":
break
else:
logger.error(
_log_error_message_once(
f"Encountered an unsupported event type: `{sse.event}`. "
f"Try upgrading the remote client to the latest version."
f"Ignoring events of type `{sse.event}`."
Expand Down

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion langserve/playground/dist/index.html
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
<link rel="icon" href="/____LANGSERVE_BASE_URL/favicon.ico" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>Playground</title>
<script type="module" crossorigin src="/____LANGSERVE_BASE_URL/assets/index-7c3e1e1d.js"></script>
<script type="module" crossorigin src="/____LANGSERVE_BASE_URL/assets/index-32c8d712.js"></script>
<link rel="stylesheet" href="/____LANGSERVE_BASE_URL/assets/index-c6cde0dd.css">
</head>
<body>
Expand Down
10 changes: 6 additions & 4 deletions langserve/playground/src/App.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -137,7 +137,9 @@ export const cells = [
];

function App() {
const [isIframe] = useState(() => window.self !== window.top);
const [isEmbedded] = useState(() =>
window.location.search.includes("embeded=true")
);

// it is possible that defaults are being applied _after_
// the initial update message has been sent from the parent window
Expand Down Expand Up @@ -280,7 +282,7 @@ function App() {

{Object.keys(schemas.config).length > 0 && (
<div className="flex flex-col gap-3 [&:has(.content>.vertical-layout:first-child:last-child:empty)]:hidden">
{!isIframe && (
{!isEmbedded && (
<h2 className="text-xl font-semibold">Configure</h2>
)}

Expand Down Expand Up @@ -313,7 +315,7 @@ function App() {
</div>
)}

{!isIframe && (
{!isEmbedded && (
<div className="flex flex-col gap-3">
<h2 className="text-xl font-semibold">Try it</h2>

Expand Down Expand Up @@ -374,7 +376,7 @@ function App() {
<div className="gap-4 grid grid-cols-2 sticky -mx-4 px-4 py-4 bottom-0 bg-background md:static md:bg-transparent">
<div className="md:hidden absolute inset-x-0 bottom-full h-5 bg-gradient-to-t from-black/5 to-black/0" />

{isIframe ? (
{isEmbedded ? (
<>
<button
type="button"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,10 @@ export const ChatMessageTuplesControlRenderer = withJsonFormsControlProps(

const human = traverseNaiveJsonPath(ctx.input, widget.input ?? "");
const ai = traverseNaiveJsonPath(ctx.output, widget.output ?? "");
props.handleChange(props.path, [...data, [human, ai]]);

if (typeof human === "string" && typeof ai === "string") {
props.handleChange(props.path, [...data, [human, ai]]);
}
});

return (
Expand Down
3 changes: 3 additions & 0 deletions langserve/schema.py
Original file line number Diff line number Diff line change
Expand Up @@ -96,6 +96,9 @@ class Feedback(BaseFeedback):
Represents feedback given on an individual run
"""

id: UUID
"""The unique ID of the feedback that was created."""

created_at: datetime
"""The time the feedback was created."""

Expand Down
30 changes: 14 additions & 16 deletions langserve/serialization.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,11 +11,11 @@
sensitive information from the server to the client.
"""
import abc
import json
import logging
from functools import lru_cache
from typing import Any, Dict, List, Union

import orjson
from langchain.prompts.base import StringPromptValue
from langchain.prompts.chat import ChatPromptValueConcrete
from langchain.schema.agent import AgentAction, AgentActionMessageLog, AgentFinish
Expand Down Expand Up @@ -83,14 +83,11 @@ class WellKnownLCObject(BaseModel):
]


# Custom JSON Encoder
class _LangChainEncoder(json.JSONEncoder):
"""Custom JSON Encoder that can encode pydantic objects as well."""

def default(self, obj) -> Any:
if isinstance(obj, BaseModel):
return obj.dict()
return super().default(obj)
def default(obj) -> Any:
"""Default serialization for well known objects."""
if isinstance(obj, BaseModel):
return obj.dict()
return super().default(obj)


def _decode_lc_objects(value: Any) -> Any:
Expand Down Expand Up @@ -149,11 +146,11 @@ def dumpd(self, obj: Any) -> Any:
"""Convert the given object to a JSON serializable object."""

@abc.abstractmethod
def dumps(self, obj: Any) -> str:
def dumps(self, obj: Any) -> bytes:
"""Dump the given object as a JSON string."""

@abc.abstractmethod
def loads(self, s: str) -> Any:
def loads(self, s: bytes) -> Any:
"""Load the given JSON string."""

@abc.abstractmethod
Expand All @@ -164,18 +161,19 @@ def loadd(self, obj: Any) -> Any:
class WellKnownLCSerializer(Serializer):
def dumpd(self, obj: Any) -> Any:
"""Convert the given object to a JSON serializable object."""
return json.loads(json.dumps(obj, cls=_LangChainEncoder)) # :*(
return orjson.loads(orjson.dumps(obj, default=default))

def dumps(self, obj: Any) -> str:
def dumps(self, obj: Any) -> bytes:
"""Dump the given object as a JSON string."""
return json.dumps(obj, cls=_LangChainEncoder)
return orjson.dumps(obj, default=default)

def loadd(self, obj: Any) -> Any:
"""Load the given object."""
return _decode_lc_objects(obj)

def loads(self, s: str) -> Any:
def loads(self, s: bytes) -> Any:
"""Load the given JSON string."""
return self.loadd(json.loads(s))
return self.loadd(orjson.loads(s))


def _project_top_level(model: BaseModel) -> Dict[str, Any]:
Expand Down
Loading

0 comments on commit 53620bb

Please sign in to comment.