Skip to content

Commit

Permalink
update documentation, prep for 0.3.0 release
Browse files Browse the repository at this point in the history
  • Loading branch information
krohling committed Jan 13, 2024
1 parent 7852d5d commit 44955c7
Show file tree
Hide file tree
Showing 22 changed files with 852 additions and 37 deletions.
15 changes: 1 addition & 14 deletions bondai/agents/group_chat/group_conversation.py
Original file line number Diff line number Diff line change
Expand Up @@ -250,7 +250,7 @@ def send_message(
next_message = previous_message
except AgentException as e:
print("Error occurred, rewinding conversation...")
print(e)
# print(e)
# The recipient agent has errored out. We will rewind the conversation and try again.
previous_message = (
self._messages[-2]
Expand All @@ -270,19 +270,6 @@ def send_message(
finally:
self._status = AgentStatus.IDLE

def send_message_async(
self,
recipient_name: str,
message: str,
sender_name: str = USER_MEMBER_NAME,
):
async def send_message_coroutine():
return self.send_message(
recipient_name=recipient_name, message=message, sender_name=sender_name
)

return asyncio.run(send_message_coroutine())

def reset_memory(self):
self._messages.clear()
for member in self.members:
Expand Down
10 changes: 8 additions & 2 deletions bondai/agents/group_chat/user_proxy.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,12 @@


class UserProxy(EventMixin, ConversationMember):
def __init__(self, persona: str | None = None, parse_recipients: bool = True):
def __init__(
self,
persona: str | None = None,
parse_recipients: bool = True,
auto_exit: bool = False,
):
EventMixin.__init__(
self,
allowed_events=[
Expand All @@ -32,6 +37,7 @@ def __init__(self, persona: str | None = None, parse_recipients: bool = True):
)
self._status = AgentStatus.IDLE
self._parse_recipients = parse_recipients
self._auto_exit = auto_exit

def send_message(
self,
Expand Down Expand Up @@ -67,7 +73,7 @@ def send_message(

cprint("\n" + agent_message.message + "\n", "white")

if not agent_message.require_response:
if not agent_message.require_response or self._auto_exit:
agent_message.success = True
agent_message.cost = 0.0
agent_message.completed_at = datetime.now()
Expand Down
8 changes: 3 additions & 5 deletions bondai/memory/memory_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@
from bondai.util import load_local_resource
from .archival.datasources import (
ArchivalMemoryDataSource,
InMemoryArchivalMemoryDataSource,
PersistentArchivalMemoryDataSource,
)
from .archival.tools import ArchivalMemoryInsertTool, ArchivalMemorySearchTool
Expand All @@ -19,7 +18,6 @@
)
from .core.datasources import (
CoreMemoryDataSource,
InMemoryCoreMemoryDataSource,
PersistentCoreMemoryDataSource,
)
from .core.tools import CoreMemoryAppendTool, CoreMemoryReplaceTool
Expand All @@ -35,10 +33,10 @@ def __init__(
core_memory_datasource: CoreMemoryDataSource | None = None,
conversation_memory_datasource: ConversationMemoryDataSource | None = None,
archival_memory_datasource: ArchivalMemoryDataSource | None = None,
prompt_builder: Callable[..., str] = JinjaPromptBuilder(
DEFAULT_PROMPT_TEMPLATE
),
prompt_builder: Callable[..., str] | None = None,
):
if prompt_builder is None:
prompt_builder = JinjaPromptBuilder(DEFAULT_PROMPT_TEMPLATE)
self._core_memory_datasource = core_memory_datasource
self._conversation_memory_datasource = conversation_memory_datasource
self._archival_memory_datasource = archival_memory_datasource
Expand Down
16 changes: 7 additions & 9 deletions tests/conversational/hierarchical_conversation.py
Original file line number Diff line number Diff line change
@@ -1,41 +1,35 @@
from bondai.models.openai import get_total_cost, OpenAILLM, OpenAIModelNames
from bondai.tools.file import FileWriteTool
from bondai.agents import ConversationalAgent
from bondai.agents import ConversationalAgent, ConversationMemberEventNames
from bondai.agents.group_chat import (
GroupConversation,
TeamConversationConfig,
UserProxy,
)

llm = OpenAILLM(OpenAIModelNames.GPT4_0613)

user_proxy = UserProxy()
user_proxy = UserProxy(auto_exit=True)

agent_a1 = ConversationalAgent(
name="A1",
instructions="You are a team leader A1, your team consists of A2, A3. You can talk to your team members as well as the other team leader B1, whose team member is B2. Your team members have the values for x and y.",
# llm=llm
)
agent_a2 = ConversationalAgent(
name="A2",
instructions="You are team member A2, you know the secret value of x but not y, x = 9. Tell others x to cooperate.",
# llm=llm
)
agent_a3 = ConversationalAgent(
name="A3",
instructions="You are team member A3, You know the secret value of y but not x, y = 5. Tell others y to cooperate.",
# llm=llm
)
agent_b1 = ConversationalAgent(
name="B1",
instructions="You are a team leader B1, your team consists of B2. You can talk to your team members as wel as the other team leader A1, whose team members are A2, A3.",
# llm=llm
)
agent_b2 = ConversationalAgent(
name="B2",
instructions="You are team member B2. Your task is to find out the value of x and y from the other agents and compute the product. Once you have the answer you must save the value to a file named 'answer.txt' and share the answer with the user",
tools=[FileWriteTool()],
# llm=llm
)

conversation = GroupConversation(
Expand All @@ -47,8 +41,12 @@
)
)

conversation.on(
ConversationMemberEventNames.MESSAGE_RECEIVED,
lambda _, m: print(f"{m.sender_name} to {m.recipient_name}: {m.message}"),
)

conversation.send_message(
agent_b2.name,
"Find the product of x and then notify the user. The other agents know x and y.",
)
print(f"Total Cost: {get_total_cost()}")
15 changes: 15 additions & 0 deletions website/docs/agent-memory/agent-memory.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
---
sidebar_position: 5
---

# Memory Management

Memory Management in BondAI is inspired by the tiered memory approach detailed in the [MemGPT: Towards LLMs as Operating Systems](https://arxiv.org/pdf/2310.08560.pdf) paper. This system mirrors operating systems' memory hierarchies, enhancing large language models' (LLMs) ability to handle extensive contexts and complex conversations. The memory system in BondAI consists of:

- **Core Memory**: Directly integrated into the agent's system prompt, this memory system provides immediate access to essential, current information relevant to ongoing tasks but is limited in size.

- **Conversation Memory**: Captures the complete history of conversational interactions, allowing agents to use keyword search to reference past dialogues.

- **Archival Memory**: Effectively limitless in size, it stores extensive historical data and information. Using semantic search, enabled by the `faiss` library, Archival Memory allows agents to easily access extremely large datasets via what is effectively an implicit RAG pipeline.

All of these memory systems are automatically managed by the **MemoryManager** class which automatically equips BondAI agents with the necessay tools for searching and editing their memory systems. Additionally, the **MemoryManager** is responsible for updating the Agent's system prompt to ensure the appopriate information is included.
117 changes: 117 additions & 0 deletions website/docs/agent-memory/archival-memory.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,117 @@
---
sidebar_position: 3
---

# Archival Memory

Archival Memory in BondAI, inspired by the [MemGPT paper](https://arxiv.org/pdf/2310.08560.pdf), represents an advanced memory layer that enables semantic search over a virtually infinite memory space. It utilizes embeddings and the faiss library to store and retrieve large volumes of data, making it particularly suitable for extensive historical information, comprehensive data sets, and long-term memory retention. This memory layer allows BondAI agents to access information beyond the immediate conversation or core memory.

# ArchivalMemoryDataSource
**bondai.memory.ArchivalMemoryDataSource**

The ArchivalMemoryDataSource class is an abstract base class defining the interface for archival memory. It allows for the insertion of content and provides a semantic search mechanism to retrieve relevant information based on query embeddings.

```
class ArchivalMemoryDataSource(ABC):
@property
@abstractmethod
def size(self) -> int:
pass
@abstractmethod
def insert(self, content: str):
pass
@abstractmethod
def insert_bulk(self, content: List[str]):
pass
@abstractmethod
def search(self, query: str, page: int = 0) -> List[str]:
pass
@abstractmethod
def clear(self):
pass
```


### Key Features

- **Semantic Search**: Leverages embeddings for deep semantic search, offering precise and relevant results.
- **Vast Memory Capacity**: Suitable for large-scale data storage, effectively handling extensive information.
- **Dynamic Data Management**: Supports insertion, bulk insertion, and deletion of memory content.


# InMemoryArchivalMemoryDataSource
**bondai.memory.InMemoryArchivalMemoryDataSource**

The InMemoryArchivalMemoryDataSource class provides an in-memory implementation of ArchivalMemoryDataSource. This variant is designed for temporary storage and fast access to archival data, primarily used in testing or non-persistent applications.

```
class InMemoryArchivalMemoryDataSource(ArchivalMemoryDataSource):
def __init__(self, embedding_model: EmbeddingModel | None = None, page_size=10):
...
```

### Usage Example

```python
from bondai.memory.archival.datasources import InMemoryArchivalMemoryDataSource
from bondai.models.openai import OpenAIEmbeddingModel, OpenAIModelNames

# Initialize an In-Memory Archival Memory Data Source
in_memory_archival = InMemoryArchivalMemoryDataSource(
embedding_model=OpenAIEmbeddingModel(OpenAIModelNames.TEXT_EMBEDDING_ADA_002)
)

# Insert and search content
in_memory_archival.insert("Temporary archival data")
results = in_memory_archival.search("archival data")
print(results)
```

### Parameters

- **embedding_model**: (EmbeddingModel): Model used for creating content embeddings.
- **page_size (int)**: Number of search results returned per page.


# PersistentArchivalMemoryDataSource
**bondai.memory.PersistentArchivalMemoryDataSource**

PersistentArchivalMemoryDataSource is a concrete implementation of ArchivalMemoryDataSource. It stores data persistently, ensuring the archival memory is retained across sessions.

```
class PersistentArchivalMemoryDataSource(ArchivalMemoryDataSource):
def __init__(
self,
file_path: str = "./.memory/archival-memory.json",
embedding_model: EmbeddingModel | None = None,
page_size=10,
):
...
```

### Usage Example

```python
from bondai.memory.archival.datasources import PersistentArchivalMemoryDataSource
from bondai.models.openai import OpenAIEmbeddingModel, OpenAIModelNames

# Initialize a Persistent Archival Memory Data Source
archival_memory = PersistentArchivalMemoryDataSource(
embedding_model=OpenAIEmbeddingModel(OpenAIModelNames.TEXT_EMBEDDING_ADA_002)
)

# Insert and search content
archival_memory.insert("Historical data on global trends")
results = archival_memory.search("global trends")
print(results)
```

### Parameters

- **file_path (str)**: File path for storing archival memory data.
- **embedding_model (EmbeddingModel)**: Model used for creating content embeddings.
- **page_size (int)**: Number of search results returned per page.
117 changes: 117 additions & 0 deletions website/docs/agent-memory/conversation-memory.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,117 @@
---
sidebar_position: 2
---

# Conversation Memory

Conversation Memory in BondAI, inspired by the [MemGPT paper](https://arxiv.org/pdf/2310.08560.pdf), assists with maintaining a coherent and continuous dialogue with users. It stores the complete history of interactions and messages, allowing agents to reference previous conversations and provide more relevant and personalized responses. This memory layer is crucial for tasks that require recalling past interactions that may no longer fit inside the LLM context window.

# ConversationMemoryDataSource
**bondai.memory.ConversationMemoryDataSource**

The ConversationMemoryDataSource class is an abstract base class in BondAI that defines the interface for conversation memory management. It outlines methods for adding, removing, searching, and clearing conversation messages, facilitating dynamic interaction history management.

```
class ConversationMemoryDataSource(ABC):
@property
@abstractmethod
def messages(self) -> List[AgentMessage]:
pass
@abstractmethod
def add(self, message: AgentMessage):
pass
@abstractmethod
def remove(self, message: AgentMessage):
pass
def remove_after(self, timestamp: datetime, inclusive: bool = True):
pass
@abstractmethod
def search(
self,
query: str,
start_date: datetime = None,
end_date: datetime = None,
page: int = 0,
) -> List[str]:
pass
@abstractmethod
def clear(self):
pass
```


### Key Features

- **Dynamic Interaction History**: Stores and manages the history of conversations between agents and users.
- **Search Functionality**: Provides methods to search through past messages based on queries or date ranges.
- **Message Management**: Offers functions to add new messages, remove specific messages, and clear the entire history.


# InMemoryConversationMemoryDataSource
**bondai.memory.InMemoryConversationMemoryDataSource**

The InMemoryConversationMemoryDataSource class is an implementation of ConversationMemoryDataSource that stores conversation history in memory. This variant is suitable for temporary or testing environments where persistence of conversation history is not necessary.

```
class InMemoryConversationMemoryDataSource(ConversationMemoryDataSource):
def __init__(self, page_size=10):
...
```

### Usage Example

```python
from bondai.memory.conversation.datasources import InMemoryConversationMemoryDataSource

# Initialize an In-Memory Conversation Memory Data Source
conversation_memory = InMemoryConversationMemoryDataSource()

# Add messages
conversation_memory.add(ConversationMessage(message="My dog's name is Max."))

# Search messages
results = conversation_memory.search('dog')
print(results)
```

### Parameters

- **page_size (int)**: Determines the number of messages to return per page during search operations.


# PersistentConversationMemoryDataSource
**bondai.memory.PersistentConversationMemoryDataSource**

The PersistentConversationMemoryDataSource class offers a persistent approach to storing conversation history. It saves the interaction data to a file, ensuring that conversation history is maintained even after the agent or application restarts.

```
class PersistentConversationMemoryDataSource(InMemoryConversationMemoryDataSource):
def __init__(
self,
file_path: str = "./.memory/conversation-memory.json",
page_size=10
):
...
```

### Usage Example

```python
from bondai.memory.conversation.datasources import PersistentConversationMemoryDataSource

# Initialize a Persistent Conversation Memory Data Source
persistent_memory = PersistentConversationMemoryDataSource()

# Adding a message automatically saves it disk
persistent_memory.add(ConversationMessage(message="Persistent message"))
```

### Parameters

- **file_path (str)**: Path to the file where conversation history is stored.
- **page_size (int)**: The number of messages to display per page in search results.
Loading

0 comments on commit 44955c7

Please sign in to comment.