Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix: patch user_id sent in openai and anthropic payloads #2064

Open
wants to merge 1 commit into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion letta/agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -535,7 +535,7 @@ def _get_ai_reply(
# agent_state=self.agent_state,
llm_config=self.agent_state.llm_config,
messages=message_sequence,
user_id=self.agent_state.user_id,
user_id=self.agent_state.id,
functions=allowed_functions,
functions_python=self.functions_python,
function_call=function_call,
Expand Down
6 changes: 6 additions & 0 deletions letta/llm_api/anthropic.py
Original file line number Diff line number Diff line change
Expand Up @@ -360,5 +360,11 @@ def anthropic_chat_completions_request(
data.pop("user", None)
data.pop("tool_choice", None)

# insert user_id for Anthropic
if "metadata" not in data:
data["metadata"] = {}
if user_id is not None:
data["metadata"]["user_id"] = user_id

response_json = make_post_request(url, headers, data)
return convert_anthropic_response_to_chatcompletion(response_json=response_json, inner_thoughts_xml_tag=inner_thoughts_xml_tag)
12 changes: 10 additions & 2 deletions letta/llm_api/llm_api_tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -150,7 +150,15 @@ def create(
# only is a problem if we are *not* using an openai proxy
raise ValueError(f"OpenAI key is missing from letta config file")

data = build_openai_chat_completions_request(llm_config, messages, user_id, functions, function_call, use_tool_naming, max_tokens)
data = build_openai_chat_completions_request(
llm_config=llm_config,
messages=messages,
user_id=user_id,
functions=functions,
function_call=function_call,
use_tool_naming=use_tool_naming,
max_tokens=max_tokens,
)
if stream: # Client requested token streaming
data.stream = True
assert isinstance(stream_interface, AgentChunkStreamingInterface) or isinstance(
Expand Down Expand Up @@ -253,7 +261,7 @@ def create(
messages=[cast_message_to_subtype(m.to_openai_dict()) for m in messages],
tools=[{"type": "function", "function": f} for f in functions] if functions else None,
# tool_choice=function_call,
# user=str(user_id),
user=user_id,
# NOTE: max_tokens is required for Anthropic API
max_tokens=1024, # TODO make dynamic
),
Expand Down
2 changes: 1 addition & 1 deletion letta/memory.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ def summarize_messages(
llm_config_no_inner_thoughts.put_inner_thoughts_in_kwargs = False
response = create(
llm_config=llm_config_no_inner_thoughts,
user_id=agent_state.user_id,
user_id=agent_state.id,
messages=message_sequence,
stream=False,
)
Expand Down
Loading