Skip to content

Commit

Permalink
Merge branch 'main' into filesurfer
Browse files Browse the repository at this point in the history
  • Loading branch information
afourney authored Nov 25, 2024
2 parents 3ffd56e + a14f208 commit 3fddc23
Show file tree
Hide file tree
Showing 29 changed files with 363 additions and 300 deletions.
21 changes: 21 additions & 0 deletions .github/workflows/checks.yml
Original file line number Diff line number Diff line change
Expand Up @@ -153,6 +153,27 @@ jobs:
poe --directory ${{ matrix.package }} docs-check
working-directory: ./python

docs-example-check:
runs-on: ubuntu-latest
strategy:
matrix:
package: ["./packages/autogen-core"]
steps:
- uses: actions/checkout@v4
- uses: astral-sh/setup-uv@v3
with:
enable-cache: true
- uses: actions/setup-python@v5
with:
python-version: "3.11"
- run: uv sync --locked --all-extras
working-directory: ./python
- name: Run task
run: |
source ${{ github.workspace }}/python/.venv/bin/activate
poe --directory ${{ matrix.package }} docs-check-examples
working-directory: ./python

check-proto-changes-python:
runs-on: ubuntu-latest
steps:
Expand Down
6 changes: 5 additions & 1 deletion dotnet/samples/Hello/HelloAgent/HelloAgent.csproj
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,11 @@
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
</PropertyGroup>

<ItemGroup>
<None Update="appsettings.json">
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
</None>
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Hosting" />
</ItemGroup>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,6 @@
<PackageReference Include="Grpc.AspNetCore" />
<PackageReference Include="Grpc.Net.ClientFactory" />
<PackageReference Include="Grpc.Tools" PrivateAssets="All" />
<PackageReference Include="Microsoft.Orleans.Core.Abstractions" />
<PackageReference Include="Microsoft.Orleans.Sdk" />
<PackageReference Include="Microsoft.SemanticKernel" />
</ItemGroup>
</Project>
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ public static IHostApplicationBuilder AddAgentService(this IHostApplicationBuild

public static IHostApplicationBuilder AddLocalAgentService(this IHostApplicationBuilder builder, bool useGrpc = true)
{
return builder.AddAgentService(local: false, useGrpc);
return builder.AddAgentService(local: true, useGrpc);
}

public static WebApplication MapAgentService(this WebApplication app, bool local = false, bool useGrpc = true)
Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
// Copyright (c) Microsoft Corporation. All rights reserved.
// IGateway.cs
using Microsoft.AutoGen.Abstractions;

namespace Microsoft.AutoGen.Abstractions;
namespace Microsoft.AutoGen.Agents;

public interface IGateway : IGrainObserver
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,9 +24,11 @@ class CodeExecutorAgent(BaseChatAgent):
.. code-block:: python
import asyncio
from autogen_agentchat.agents import CodeExecutorAgent
from autogen_agentchat.messages import TextMessage
from autogen_ext.code_executors import DockerCommandLineCodeExecutor
from autogen_core.base import CancellationToken
async def run_code_executor_agent() -> None:
Expand All @@ -51,8 +53,7 @@ async def run_code_executor_agent() -> None:
await code_executor.stop()
# Use asyncio.run(run_code_executor_agent()) when running in a script.
await run_code_executor_agent()
asyncio.run(run_code_executor_agent())
"""

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,15 +23,15 @@ class TerminationCondition(ABC):
.. code-block:: python
import asyncio
from autogen_agentchat.teams import MaxTurnsTermination, TextMentionTermination
from autogen_agentchat.task import MaxMessageTermination, TextMentionTermination
async def main() -> None:
# Terminate the conversation after 10 turns or if the text "TERMINATE" is mentioned.
cond1 = MaxTurnsTermination(10) | TextMentionTermination("TERMINATE")
cond1 = MaxMessageTermination(10) | TextMentionTermination("TERMINATE")
# Terminate the conversation after 10 turns and if the text "TERMINATE" is mentioned.
cond2 = MaxTurnsTermination(10) & TextMentionTermination("TERMINATE")
cond2 = MaxMessageTermination(10) & TextMentionTermination("TERMINATE")
# ...
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -218,6 +218,8 @@ class ExternalTermination(TerminationCondition):
.. code-block:: python
from autogen_agentchat.task import ExternalTermination
termination = ExternalTermination()
# Run the team in an asyncio task.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -257,16 +257,18 @@ async def book_trip() -> str:
.. code-block:: python
import asyncio
from typing import Sequence
from autogen_ext.models import OpenAIChatCompletionClient
from autogen_agentchat.agents import AssistantAgent
from autogen_agentchat.teams import SelectorGroupChat
from autogen_agentchat.task import TextMentionTermination, Console
from autogen_agentchat.messages import AgentMessage
async def main() -> None:
model_client = OpenAIChatCompletionClient(model="gpt-4o")
def check_caculation(x: int, y: int, answer: int) -> str:
def check_calculation(x: int, y: int, answer: int) -> str:
if x + y == answer:
return "Correct!"
else:
Expand All @@ -281,12 +283,12 @@ def check_caculation(x: int, y: int, answer: int) -> str:
agent2 = AssistantAgent(
"Agent2",
model_client,
tools=[check_caculation],
tools=[check_calculation],
description="For checking calculation",
system_message="Check the answer and respond with 'Correct!' or 'Incorrect!'",
)
def selector_func(messages):
def selector_func(messages: Sequence[AgentMessage]) -> str | None:
if len(messages) == 1 or messages[-1].content == "Incorrect!":
return "Agent1"
if messages[-1].source == "Agent1":
Expand Down
98 changes: 98 additions & 0 deletions python/packages/autogen-core/docs/src/_extension/code_lint.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,98 @@
# Modified from: https://github.com/kai687/sphinxawesome-codelinter

import tempfile
from typing import AbstractSet, Any, Iterable

from docutils import nodes
from sphinx.application import Sphinx
from sphinx.builders import Builder
from sphinx.util import logging
from sphinx.util.console import darkgreen, darkred, red, teal, faint # type: ignore[attr-defined]

from pygments import highlight # type: ignore
from pygments.lexers import PythonLexer
from pygments.formatters import TerminalFormatter

logger = logging.getLogger(__name__)

__version__ = "0.1.0"


class CodeLinter(Builder):
"""Iterate over all ``literal_block`` nodes.
pipe them into any command line tool that
can read from standard input.
"""

name = "code_lint"
allow_parallel = True

def init(self) -> None:
"""Initialize."""
self._had_errors = False
pass

def get_outdated_docs(self) -> str | Iterable[str]:
"""Check for outdated files.
Return an iterable of outdated output files, or a string describing what an
update will build.
"""
return self.env.found_docs

def get_target_uri(self, docname: str, typ: str | None = None) -> str:
"""Return Target URI for a document name."""
return ""

def prepare_writing(self, docnames: AbstractSet[str]) -> None:
"""Run these steps before documents are written."""
return

def write_doc(self, docname: str, doctree: nodes.Node) -> None:
path_prefix: str = self.app.config.code_lint_path_prefix
supported_languages = set(["python"])

if not docname.startswith(path_prefix):
return

for code in doctree.findall(nodes.literal_block):
if code["language"] in supported_languages:
logger.info("Checking a code block in %s...", docname, nonl=True)
if "ignore" in code["classes"]:
logger.info(" " + darkgreen("OK[ignored]"))
continue

# Create a temporary file to store the code block
with tempfile.NamedTemporaryFile(mode="wb", suffix=".py") as temp_file:
temp_file.write(code.astext().encode())
temp_file.flush()

# Run pyright on the temporary file using subprocess.run
import subprocess

result = subprocess.run(["pyright", temp_file.name], capture_output=True, text=True)
if result.returncode != 0:
logger.info(" " + darkred("FAIL"))
highlighted_code = highlight(code.astext(), PythonLexer(), TerminalFormatter()) # type: ignore
output = f"{faint('========================================================')}\n{red('Error')}: Pyright found issues in {teal(docname)}:\n{faint('--------------------------------------------------------')}\n{highlighted_code}\n{faint('--------------------------------------------------------')}\n\n{teal('pyright output:')}\n{red(result.stdout)}{faint('========================================================')}\n"
logger.info(output)
self._had_errors = True
else:
logger.info(" " + darkgreen("OK"))

def finish(self) -> None:
"""Finish the build process."""
if self._had_errors:
raise RuntimeError("Code linting failed - see earlier output")


def setup(app: Sphinx) -> dict[str, Any]:
app.add_builder(CodeLinter)
app.add_config_value("code_lint_path_prefix", "", "env")

return {
"version": __version__,
"parallel_read_safe": True,
"parallel_write_safe": True,
}
11 changes: 10 additions & 1 deletion python/packages/autogen-core/docs/src/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,8 @@
"sphinx_copybutton",
"_extension.gallery_directive",
"myst_nb",
"sphinxcontrib.autodoc_pydantic"
"sphinxcontrib.autodoc_pydantic",
"_extension.code_lint",
]
suppress_warnings = ["myst.header"]

Expand Down Expand Up @@ -148,6 +149,14 @@

intersphinx_mapping = {"python": ("https://docs.python.org/3", None)}

code_lint_path_prefix = "reference/python"

nb_mime_priority_overrides = [
('code_lint', 'image/jpeg', 100),
('code_lint', 'image/png', 100),
('code_lint', 'text/plain', 100)
]


def setup_to_main(
app: Sphinx, pagename: str, templatename: str, context, doctree
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,10 @@ Install the `autogen-agentchat` package using pip:
pip install 'autogen-agentchat==0.4.0.dev6'
```

```{note}
Python 3.10 or later is required.
```

## Install OpenAI for Model Client

To use the OpenAI and Azure OpenAI models, you need to install the following
Expand Down
8 changes: 8 additions & 0 deletions python/packages/autogen-core/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,7 @@ dev-dependencies = [
"sphinx",
"sphinxcontrib-apidoc",
"autodoc_pydantic~=2.2",
"pygments",

# Documentation tooling
"sphinx-autobuild",
Expand Down Expand Up @@ -153,3 +154,10 @@ ref = "docs-apidoc-all"

[[tool.poe.tasks.docs-check.sequence]]
cmd = "sphinx-build --fail-on-warning docs/src docs/build"

[[tool.poe.tasks.docs-check-examples.sequence]]
ref = "docs-apidoc-all"

[[tool.poe.tasks.docs-check-examples.sequence]]
cmd = "sphinx-build -b code_lint docs/src docs/build"

Original file line number Diff line number Diff line change
Expand Up @@ -147,6 +147,23 @@ def _stop_when_idle(self) -> bool:
return self._run_state == RunContext.RunState.UNTIL_IDLE and self._runtime.idle


def _warn_if_none(value: Any, handler_name: str) -> None:
"""
Utility function to check if the intervention handler returned None and issue a warning.
Args:
value: The return value to check
handler_name: Name of the intervention handler method for the warning message
"""
if value is None:
warnings.warn(
f"Intervention handler {handler_name} returned None. This might be unintentional. "
"Consider returning the original message or DropMessage explicitly.",
RuntimeWarning,
stacklevel=2,
)


class SingleThreadedAgentRuntime(AgentRuntime):
def __init__(
self,
Expand Down Expand Up @@ -433,6 +450,7 @@ async def process_next(self) -> None:
):
try:
temp_message = await handler.on_send(message, sender=sender, recipient=recipient)
_warn_if_none(temp_message, "on_send")
except BaseException as e:
future.set_exception(e)
return
Expand All @@ -456,6 +474,7 @@ async def process_next(self) -> None:
):
try:
temp_message = await handler.on_publish(message, sender=sender)
_warn_if_none(temp_message, "on_publish")
except BaseException as e:
# TODO: we should raise the intervention exception to the publisher.
logger.error(f"Exception raised in in intervention handler: {e}", exc_info=True)
Expand All @@ -474,6 +493,7 @@ async def process_next(self) -> None:
for handler in self._intervention_handlers:
try:
temp_message = await handler.on_response(message, sender=sender, recipient=recipient)
_warn_if_none(temp_message, "on_response")
except BaseException as e:
# TODO: should we raise the exception to sender of the response instead?
future.set_exception(e)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -89,19 +89,6 @@ async def register(
agent_factory (Callable[[], T]): The factory that creates the agent, where T is a concrete Agent type. Inside the factory, use `autogen_core.base.AgentInstantiationContext` to access variables like the current runtime and agent ID.
subscriptions (Callable[[], list[Subscription]] | list[Subscription] | None, optional): The subscriptions that the agent should be subscribed to. Defaults to None.
Example:
.. code-block:: python
runtime.register(
"chat_agent",
lambda: ChatCompletionAgent(
description="A generic chat agent.",
system_messages=[SystemMessage("You are a helpful assistant")],
model_client=OpenAIChatCompletionClient(model="gpt-4o"),
memory=BufferedChatMemory(buffer_size=10),
),
)
"""
...

Expand All @@ -117,20 +104,6 @@ async def register_factory(
Args:
type (str): The type of agent this factory creates. It is not the same as agent class name. The `type` parameter is used to differentiate between different factory functions rather than agent classes.
agent_factory (Callable[[], T]): The factory that creates the agent, where T is a concrete Agent type. Inside the factory, use `autogen_core.base.AgentInstantiationContext` to access variables like the current runtime and agent ID.
Example:
.. code-block:: python
runtime.register(
"chat_agent",
lambda: ChatCompletionAgent(
description="A generic chat agent.",
system_messages=[SystemMessage("You are a helpful assistant")],
model_client=OpenAIChatCompletionClient(model="gpt-4o"),
memory=BufferedChatMemory(buffer_size=10),
),
)
"""
...

Expand Down
Loading

0 comments on commit 3fddc23

Please sign in to comment.