Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

chore: Various bug fixes #2356

Merged
merged 25 commits into from
Jan 16, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
25 commits
Select commit Hold shift + click to select a range
8933188
docs: patch tools docs and composio docs (#534)
cpacker Jan 11, 2025
cff8737
feat: new routes to gather a job's messages and usage statistics (#564)
mlong93 Jan 12, 2025
df24721
fix: patch gpt4omini bug (#619)
cpacker Jan 13, 2025
d728b24
fix: propagate use_assistant_message through send paths (#615)
carenthomas Jan 13, 2025
4d49aa4
feat: Add reset messages route for agents (#601)
mattzh72 Jan 13, 2025
7f574eb
chore: Deprecate module field on tool (#600)
mattzh72 Jan 13, 2025
cfde9b0
chore: move project_id to project slug (#620)
4shub Jan 13, 2025
c2c371f
fix: Include at least a system message during agent reset (#631)
mattzh72 Jan 13, 2025
7158bb6
fix: changed all instances of 16384 to 8192 (gpt4 context window) (#605)
cpacker Jan 13, 2025
1fcfc47
fix: prevent adding duplicate messages to JobMessages table (#625)
mlong93 Jan 13, 2025
55394e2
fix: tool attachment bug for agent creation (#635)
mlong93 Jan 14, 2025
59daa16
fix: Force recreate all e2b sandboxes on tool execution (#637)
mattzh72 Jan 14, 2025
27d2d52
fix: createAgent issue (#639)
4shub Jan 14, 2025
6c71719
feat: Added list_tags route (#645)
mlong93 Jan 14, 2025
d06cb58
feat: add text search for agent names (#662)
mlong93 Jan 15, 2025
69f909e
chore: Move `send_message_to_agent` (#665)
mattzh72 Jan 15, 2025
8981b30
fix: async message doc return type (#664)
carenthomas Jan 15, 2025
4552923
feat: single tool rule test case (#666)
mlong93 Jan 15, 2025
b843552
fix: updated `send_message_async` request params, `get_run_messages` …
mlong93 Jan 15, 2025
d8d6ee8
chore: add unit tests for sdk client (#663)
carenthomas Jan 16, 2025
9a5af1e
feat: Native agent to agent messaging (#668)
mattzh72 Jan 16, 2025
02ec8c7
chore: Bump composio version to 0.6.15 (#672)
mattzh72 Jan 16, 2025
bc61fab
chore: Merge OSS (#674)
mattzh72 Jan 16, 2025
81814ec
fix: Add retry decorator to stochastic tests (#678)
mattzh72 Jan 16, 2025
d059af7
Merge branch 'main' into matt-improvements-jan-16
mattzh72 Jan 16, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
31 changes: 31 additions & 0 deletions alembic/versions/22a6e413d89c_remove_module_field_on_tool.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
"""Remove module field on tool

Revision ID: 22a6e413d89c
Revises: 88f9432739a9
Create Date: 2025-01-10 17:38:23.811795

"""

from typing import Sequence, Union

import sqlalchemy as sa

from alembic import op

# revision identifiers, used by Alembic.
revision: str = "22a6e413d89c"
down_revision: Union[str, None] = "88f9432739a9"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None


def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column("tools", "module")
# ### end Alembic commands ###


def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.add_column("tools", sa.Column("module", sa.VARCHAR(), autoincrement=False, nullable=True))
# ### end Alembic commands ###
53 changes: 53 additions & 0 deletions alembic/versions/7778731d15e2_added_jobusagestatistics_table.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
"""Added JobUsageStatistics table

Revision ID: 7778731d15e2
Revises: 8d70372ad130
Create Date: 2025-01-09 13:20:25.555740

"""

from typing import Sequence, Union

import sqlalchemy as sa

from alembic import op

# revision identifiers, used by Alembic.
revision: str = "7778731d15e2"
down_revision: Union[str, None] = "8d70372ad130"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None


def upgrade() -> None:
# Create job_usage_statistics table
op.create_table(
"job_usage_statistics",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("job_id", sa.String(), nullable=False),
sa.Column("step_id", sa.String(), nullable=True),
sa.Column("completion_tokens", sa.Integer(), server_default=sa.text("0"), nullable=False),
sa.Column("prompt_tokens", sa.Integer(), server_default=sa.text("0"), nullable=False),
sa.Column("total_tokens", sa.Integer(), server_default=sa.text("0"), nullable=False),
sa.Column("step_count", sa.Integer(), server_default=sa.text("0"), nullable=False),
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.text("now()"), nullable=True),
sa.Column("updated_at", sa.DateTime(timezone=True), server_default=sa.text("now()"), nullable=True),
sa.Column("is_deleted", sa.Boolean(), server_default=sa.text("FALSE"), nullable=False),
sa.Column("_created_by_id", sa.String(), nullable=True),
sa.Column("_last_updated_by_id", sa.String(), nullable=True),
sa.ForeignKeyConstraint(["job_id"], ["jobs.id"], name="fk_job_usage_statistics_job_id", ondelete="CASCADE"),
sa.PrimaryKeyConstraint("id", name="pk_job_usage_statistics"),
)

# Create indexes
op.create_index("ix_job_usage_statistics_created_at", "job_usage_statistics", ["created_at"])
op.create_index("ix_job_usage_statistics_job_id", "job_usage_statistics", ["job_id"])


def downgrade() -> None:
# Drop indexes
op.drop_index("ix_job_usage_statistics_created_at", "job_usage_statistics")
op.drop_index("ix_job_usage_statistics_job_id", "job_usage_statistics")

# Drop table
op.drop_table("job_usage_statistics")
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
"""change JobMessage unique constraint to (job_id,message_id)

Revision ID: 7f652fdd3dba
Revises: 22a6e413d89c
Create Date: 2025-01-13 14:36:13.626344

"""

from typing import Sequence, Union

from alembic import op

# revision identifiers, used by Alembic.
revision: str = "7f652fdd3dba"
down_revision: Union[str, None] = "22a6e413d89c"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None


def upgrade() -> None:
# Drop the old unique constraint
op.drop_constraint("uq_job_messages_message_id", "job_messages", type_="unique")

# Add the new composite unique constraint
op.create_unique_constraint("unique_job_message", "job_messages", ["job_id", "message_id"])


def downgrade() -> None:
# Drop the new composite constraint
op.drop_constraint("unique_job_message", "job_messages", type_="unique")

# Restore the old unique constraint
op.create_unique_constraint("uq_job_messages_message_id", "job_messages", ["message_id"])
37 changes: 37 additions & 0 deletions alembic/versions/88f9432739a9_add_jobtype_to_job_table.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
"""add JobType to Job table

Revision ID: 88f9432739a9
Revises: 7778731d15e2
Create Date: 2025-01-10 13:46:44.089110

"""

from typing import Sequence, Union

import sqlalchemy as sa

from alembic import op
from letta.orm.enums import JobType

# revision identifiers, used by Alembic.
revision: str = "88f9432739a9"
down_revision: Union[str, None] = "7778731d15e2"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None


def upgrade() -> None:
# Add job_type column with default value
op.add_column("jobs", sa.Column("job_type", sa.String(), nullable=True))

# Set existing rows to have the default value of JobType.JOB
job_value = JobType.JOB.value
op.execute(f"UPDATE jobs SET job_type = '{job_value}' WHERE job_type IS NULL")

# Make the column non-nullable after setting default values
op.alter_column("jobs", "job_type", existing_type=sa.String(), nullable=False)


def downgrade() -> None:
# Remove the job_type column
op.drop_column("jobs", "job_type")
47 changes: 47 additions & 0 deletions alembic/versions/8d70372ad130_adding_jobmessages_table.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
"""adding JobMessages table

Revision ID: 8d70372ad130
Revises: cdb3db091113
Create Date: 2025-01-08 17:57:20.325596

"""

from typing import Sequence, Union

import sqlalchemy as sa

from alembic import op

# revision identifiers, used by Alembic.
revision: str = "8d70372ad130"
down_revision: Union[str, None] = "cdb3db091113"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None


def upgrade() -> None:
op.create_table(
"job_messages",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("job_id", sa.String(), nullable=False),
sa.Column("message_id", sa.String(), nullable=False),
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.text("now()"), nullable=True),
sa.Column("updated_at", sa.DateTime(timezone=True), server_default=sa.text("now()"), nullable=True),
sa.Column("is_deleted", sa.Boolean(), server_default=sa.text("FALSE"), nullable=False),
sa.Column("_created_by_id", sa.String(), nullable=True),
sa.Column("_last_updated_by_id", sa.String(), nullable=True),
sa.ForeignKeyConstraint(["job_id"], ["jobs.id"], name="fk_job_messages_job_id", ondelete="CASCADE"),
sa.ForeignKeyConstraint(["message_id"], ["messages.id"], name="fk_job_messages_message_id", ondelete="CASCADE", use_alter=True),
sa.PrimaryKeyConstraint("id", name="pk_job_messages"),
sa.UniqueConstraint("message_id", name="uq_job_messages_message_id"),
)

# Add indexes
op.create_index("ix_job_messages_job_id", "job_messages", ["job_id"], unique=False)
op.create_index("ix_job_messages_created_at", "job_messages", ["created_at"], unique=False)


def downgrade() -> None:
op.drop_index("ix_job_messages_created_at", "job_messages")
op.drop_index("ix_job_messages_job_id", "job_messages")
op.drop_table("job_messages")
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
"""adding request_config to Job table

Revision ID: f595e0e8013e
Revises: 7f652fdd3dba
Create Date: 2025-01-14 14:34:34.203363

"""

from typing import Sequence, Union

import sqlalchemy as sa

from alembic import op

# revision identifiers, used by Alembic.
revision: str = "f595e0e8013e"
down_revision: Union[str, None] = "7f652fdd3dba"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None


def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.add_column("jobs", sa.Column("request_config", sa.JSON, nullable=True))
# ### end Alembic commands ###


def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column("jobs", "request_config")
# ### end Alembic commands ###
2 changes: 1 addition & 1 deletion examples/tool_rule_usage.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
from letta.schemas.tool_rule import ChildToolRule, InitToolRule, TerminalToolRule
from tests.helpers.endpoints_helper import assert_invoked_send_message_with_keyword, setup_agent
from tests.helpers.utils import cleanup
from tests.test_model_letta_perfomance import llm_config_dir
from tests.test_model_letta_performance import llm_config_dir

"""
This example shows how you can constrain tool calls in your agent.
Expand Down
1 change: 1 addition & 0 deletions letta/__init__.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
__version__ = "0.6.9"


# import clients
from letta.client.client import LocalClient, RESTClient, create_client

Expand Down
24 changes: 24 additions & 0 deletions letta/agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
FIRST_MESSAGE_ATTEMPTS,
FUNC_FAILED_HEARTBEAT_MESSAGE,
LETTA_CORE_TOOL_MODULE_NAME,
LETTA_MULTI_AGENT_TOOL_MODULE_NAME,
LLM_MAX_TOKENS,
MESSAGE_SUMMARY_TRUNC_KEEP_N_LAST,
MESSAGE_SUMMARY_TRUNC_TOKEN_FRAC,
Expand All @@ -25,6 +26,7 @@
from letta.llm_api.helpers import is_context_overflow_error
from letta.llm_api.llm_api_tools import create
from letta.local_llm.utils import num_tokens_from_functions, num_tokens_from_messages
from letta.log import get_logger
from letta.memory import summarize_messages
from letta.orm import User
from letta.orm.enums import ToolType
Expand All @@ -44,6 +46,7 @@
from letta.services.agent_manager import AgentManager
from letta.services.block_manager import BlockManager
from letta.services.helpers.agent_manager_helper import check_supports_structured_output, compile_memory_metadata_block
from letta.services.job_manager import JobManager
from letta.services.message_manager import MessageManager
from letta.services.passage_manager import PassageManager
from letta.services.tool_execution_sandbox import ToolExecutionSandbox
Expand Down Expand Up @@ -128,6 +131,7 @@ def __init__(
self.message_manager = MessageManager()
self.passage_manager = PassageManager()
self.agent_manager = AgentManager()
self.job_manager = JobManager()

# State needed for heartbeat pausing

Expand All @@ -141,6 +145,9 @@ def __init__(
# Load last function response from message history
self.last_function_response = self.load_last_function_response()

# Logger that the Agent specifically can use, will also report the agent_state ID with the logs
self.logger = get_logger(agent_state.id)

def load_last_function_response(self):
"""Load the last function response from message history"""
in_context_messages = self.agent_manager.get_in_context_messages(agent_id=self.agent_state.id, actor=self.user)
Expand Down Expand Up @@ -205,6 +212,10 @@ def execute_tool_and_persist_state(self, function_name: str, function_args: dict
callable_func = get_function_from_module(LETTA_CORE_TOOL_MODULE_NAME, function_name)
function_args["self"] = self # need to attach self to arg since it's dynamically linked
function_response = callable_func(**function_args)
elif target_letta_tool.tool_type == ToolType.LETTA_MULTI_AGENT_CORE:
callable_func = get_function_from_module(LETTA_MULTI_AGENT_TOOL_MODULE_NAME, function_name)
function_args["self"] = self # need to attach self to arg since it's dynamically linked
function_response = callable_func(**function_args)
elif target_letta_tool.tool_type == ToolType.LETTA_MEMORY_CORE:
callable_func = get_function_from_module(LETTA_CORE_TOOL_MODULE_NAME, function_name)
agent_state_copy = self.agent_state.__deepcopy__()
Expand Down Expand Up @@ -675,11 +686,15 @@ def inner_step(
skip_verify: bool = False,
stream: bool = False, # TODO move to config?
step_count: Optional[int] = None,
metadata: Optional[dict] = None,
) -> AgentStepResponse:
"""Runs a single step in the agent loop (generates at most one LLM call)"""

try:

# Extract job_id from metadata if present
job_id = metadata.get("job_id") if metadata else None

# Step 0: update core memory
# only pulling latest block data if shared memory is being used
current_persisted_memory = Memory(
Expand Down Expand Up @@ -754,9 +769,17 @@ def inner_step(
f"last response total_tokens ({current_total_tokens}) < {MESSAGE_SUMMARY_WARNING_FRAC * int(self.agent_state.llm_config.context_window)}"
)

# Persisting into Messages
self.agent_state = self.agent_manager.append_to_in_context_messages(
all_new_messages, agent_id=self.agent_state.id, actor=self.user
)
if job_id:
for message in all_new_messages:
self.job_manager.add_message_to_job(
job_id=job_id,
message_id=message.id,
actor=self.user,
)

return AgentStepResponse(
messages=all_new_messages,
Expand Down Expand Up @@ -784,6 +807,7 @@ def inner_step(
first_message_retry_limit=first_message_retry_limit,
skip_verify=skip_verify,
stream=stream,
metadata=metadata,
)

else:
Expand Down
Loading
Loading