Skip to content

Commit

Permalink
feat: supervsior crew
Browse files Browse the repository at this point in the history
  • Loading branch information
Luisotee committed Nov 12, 2024
1 parent 853cd6d commit d5ed3cd
Show file tree
Hide file tree
Showing 16 changed files with 241 additions and 3 deletions.
2 changes: 0 additions & 2 deletions apps/ai_api/.env.example
Original file line number Diff line number Diff line change
@@ -1,7 +1,5 @@
IS_DEBUG=True
API_KEY=sample_api_key
IS_DEBUG=True
API_KEY=sample_api_key
OPENAI_API_KEY=sk-proj-xxxx
CEREBRAS_API_KEY=csk-xxxx
SERPER_API_KEY=xxxx
Expand Down
3 changes: 2 additions & 1 deletion apps/ai_api/eda_ai_api/api/routes/router.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
from fastapi import APIRouter

from eda_ai_api.api.routes import grant, heartbeat
from eda_ai_api.api.routes import grant, heartbeat, supervisor

api_router = APIRouter()
api_router.include_router(heartbeat.router, tags=["health"], prefix="/health")
api_router.include_router(grant.router, tags=["discovery"], prefix="/grant")
api_router.include_router(supervisor.router, tags=["supervisor"], prefix="/supervisor")
69 changes: 69 additions & 0 deletions apps/ai_api/eda_ai_api/api/routes/supervisor.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
from fastapi import APIRouter
from eda_ai_api.models.supervisor import SupervisorRequest, SupervisorResponse
from crewai import Agent, Crew, Task, Process
from crewai.project import CrewBase, agent, crew, task
from opportunity_finder.crew import OpportunityFinderCrew
from crewai import LLM
import os

router = APIRouter()


# Use the same Cerebras LLM configuration as in OpportunityFinderCrew
cerebras_llm = LLM(
model="sambanova/Meta-Llama-3.1-70B-Instruct",
api_key=os.environ.get("SAMBANOVA_API_KEY"),
# base_url="https://api.cerebras.ai/v1",
temperature=0.5,
)


@CrewBase
class SupervisorCrew:
@agent
def decision_maker(self) -> Agent:
return Agent(
role="Decision Maker",
goal="Analyze user requests and route them to appropriate endpoints",
backstory="I am an AI supervisor that determines which endpoint to call based on user messages",
allow_delegation=False,
llm=cerebras_llm, # Add explicit LLM configuration
)

@task
def route_decision_task(self, message: str) -> Task:
return Task(
description=f"""
Analyze this message: "{message}"
Choose between these endpoints:
1. discovery - For finding grant opportunities
2. heartbeat - For checking system health
Return only: "discovery" or "heartbeat"
""",
agent=self.decision_maker,
)

@crew
def crew(self) -> Crew:
return Crew(
agents=[self.decision_maker],
tasks=[self.route_decision_task],
process=Process.sequential,
verbose=True,
)


@router.post("/supervisor", response_model=SupervisorResponse)
async def supervisor_route(request: SupervisorRequest) -> SupervisorResponse:
supervisor = SupervisorCrew()
decision = supervisor.crew().kickoff(inputs={"message": request.message})

if decision.lower() == "discovery":
topics = ["AI", "Technology"] # Extract topics from message
crew = OpportunityFinderCrew()
result = crew.crew().kickoff(inputs={"topics": ", ".join(topics)})
else:
result = {"is_alive": True}

return SupervisorResponse(result=str(result))
9 changes: 9 additions & 0 deletions apps/ai_api/eda_ai_api/models/supervisor.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
from pydantic import BaseModel


class SupervisorResponse(BaseModel):
result: str


class SupervisorRequest(BaseModel):
message: str
Binary file modified bun.lockb
Binary file not shown.
6 changes: 6 additions & 0 deletions plugins/supervisor/.env.example
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
OPENAI_API_KEY=sk-xxxx
# GROQ_API_KEY=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
CEREBRAS_API_KEY=csk-xxxx
SERPER_API_KEY=xxxx
LANGTRACE_API_KEY=xxxx
LANGTRACE_API_HOST=http://localhost:3000/api/trace
2 changes: 2 additions & 0 deletions plugins/supervisor/.gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
.env
__pycache__/
54 changes: 54 additions & 0 deletions plugins/supervisor/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
# OpportunityFinder Crew

Welcome to the OpportunityFinder Crew project, powered by [crewAI](https://crewai.com). This template is designed to help you set up a multi-agent AI system with ease, leveraging the powerful and flexible framework provided by crewAI. Our goal is to enable your agents to collaborate effectively on complex tasks, maximizing their collective intelligence and capabilities.

## Installation

Ensure you have Python >=3.10 <=3.13 installed on your system. This project uses [UV](https://docs.astral.sh/uv/) for dependency management and package handling, offering a seamless setup and execution experience.

First, if you haven't already, install uv:

```bash
pip install uv
```

Next, navigate to your project directory and install the dependencies:

(Optional) Lock the dependencies and install them by using the CLI command:
```bash
crewai install
```
### Customizing

**Add your `OPENAI_API_KEY` into the `.env` file**

- Modify `src/opportunity_finder/config/agents.yaml` to define your agents
- Modify `src/opportunity_finder/config/tasks.yaml` to define your tasks
- Modify `src/opportunity_finder/crew.py` to add your own logic, tools and specific args
- Modify `src/opportunity_finder/main.py` to add custom inputs for your agents and tasks

## Running the Project

To kickstart your crew of AI agents and begin task execution, run this from the root folder of your project:

```bash
$ crewai run
```

This command initializes the opportunity-finder Crew, assembling the agents and assigning them tasks as defined in your configuration.

This example, unmodified, will run the create a `report.md` file with the output of a research on LLMs in the root folder.

## Understanding Your Crew

The opportunity-finder Crew is composed of multiple AI agents, each with unique roles, goals, and tools. These agents collaborate on a series of tasks, defined in `config/tasks.yaml`, leveraging their collective skills to achieve complex objectives. The `config/agents.yaml` file outlines the capabilities and configurations of each agent in your crew.

## Support

For support, questions, or feedback regarding the OpportunityFinder Crew or crewAI.
- Visit our [documentation](https://docs.crewai.com)
- Reach out to us through our [GitHub repository](https://github.com/joaomdmoura/crewai)
- [Join our Discord](https://discord.com/invite/X4JWnZnxPb)
- [Chat with our docs](https://chatg.pt/DWjSBZn)

Let's create wonders together with the power and simplicity of crewAI.
17 changes: 17 additions & 0 deletions plugins/supervisor/pyproject.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
[project]
name = "supervisor"
version = "0.1.0"
description = "Supervisor crew using crewAI"
authors = [{ name = "Your Name", email = "[email protected]" }]
requires-python = ">=3.10,<=3.13"
dependencies = [
"crewai[agentops,tools]>=0.76.2,<1.0.0",
]

[project.scripts]
supervisor = "supervisor.main:run"
run_crew = "supervisor.main:run"

[build-system]
requires = ["hatchling"]
build-backend = "hatchling.build"
Empty file.
4 changes: 4 additions & 0 deletions plugins/supervisor/src/opportunity_finder/config/agents.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
decision_maker:
role: "Decision Maker"
goal: "Analyze user requests and route them to appropriate endpoints"
backstory: "You're an expert supervisor that determines which endpoint to call based on user messages. You understand the context and requirements for different types of requests and can accurately route them to the correct service."
9 changes: 9 additions & 0 deletions plugins/supervisor/src/opportunity_finder/config/tasks.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
route_decision_task:
description: >
Analyze the user message and determine the appropriate service to handle the request.
Consider the context and requirements to route between:
1. discovery - For finding grant opportunities
2. heartbeat - For checking system health
expected_output: >
A single word response: either "discovery" or "heartbeat"
agent: decision_maker
43 changes: 43 additions & 0 deletions plugins/supervisor/src/opportunity_finder/crew.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
import os
import datetime
from crewai import Agent, Crew, Process, Task, LLM
from crewai.project import CrewBase, agent, crew, task

cerebras_llm = LLM(
model="cerebras/llama3.1-70b",
api_key=os.environ.get("CEREBRAS_API_KEY"),
base_url="https://api.cerebras.ai/v1",
temperature=0.5,
)


@CrewBase
class SupervisorCrew:
"""Supervisor crew"""

@agent
def decision_maker(self) -> Agent:
return Agent(
config=self.agents_config["decision_maker"],
verbose=True,
allow_delegation=False,
llm=cerebras_llm,
)

@task
def route_decision_task(self, message: str) -> Task:
return Task(
config=self.tasks_config["route_decision_task"],
context={"message": message},
)

@crew
def crew(self) -> Crew:
"""Creates the Supervisor crew"""
return Crew(
agents=[self.decision_maker],
tasks=[self.route_decision_task],
process=Process.sequential,
verbose=True,
output_log_file=f'SupervisorCrew_{datetime.datetime.now().strftime("%Y%m%d_%H%M%S")}.log',
)
14 changes: 14 additions & 0 deletions plugins/supervisor/src/opportunity_finder/main.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
from supervisor.crew import SupervisorCrew


def run():
"""Run the supervisor crew"""
crew = SupervisorCrew()
result = crew.crew().kickoff(
inputs={"message": "Find grant opportunities for AI projects"}
)
print(f"Decision: {result}")


if __name__ == "__main__":
run()
Empty file.
12 changes: 12 additions & 0 deletions plugins/supervisor/src/opportunity_finder/tools/custom_tool.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
from crewai_tools import BaseTool


class MyCustomTool(BaseTool):
name: str = "Name of my tool"
description: str = (
"Clear description for what this tool is useful for, you agent will need this information to use it."
)

def _run(self, argument: str) -> str:
# Implementation goes here
return "this is an example of a tool output, ignore it and move along."

0 comments on commit d5ed3cd

Please sign in to comment.