Skip to content

Commit

Permalink
Merge pull request #15 from digidem/luisotee/intent_classification
Browse files Browse the repository at this point in the history
feat: add supervisor and onboarding crew #11 #1
  • Loading branch information
luandro authored Nov 22, 2024
2 parents 6ce60e8 + 2af9c57 commit 0b4c35e
Show file tree
Hide file tree
Showing 28 changed files with 5,056 additions and 113 deletions.
15 changes: 7 additions & 8 deletions apps/ai_api/.env.example
Original file line number Diff line number Diff line change
@@ -1,8 +1,7 @@
IS_DEBUG=True
API_KEY=sample_api_key
IS_DEBUG=True
API_KEY=sample_api_key
OPENAI_API_KEY=sk-proj-xxxx
CEREBRAS_API_KEY=csk-xxxx
SERPER_API_KEY=xxxx
LANGTRACE_API_KEY=xxxx
IS_DEBUG=True
API_KEY=sample_api_key # NOT USED
OPENAI_API_KEY=sk-proj-xxxx # NOT USED
CEREBRAS_API_KEY=csk-xxxx # NOT USED
SERPER_API_KEY=xxxx # Needed to use Serper tool
LANGTRACE_API_KEY=xxxx
GROQ_API_KEY=xxxx # API key for the LLM model
19 changes: 19 additions & 0 deletions apps/ai_api/eda_ai_api/api/routes/onboarding.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
from fastapi import APIRouter
from onboarding.crew import OnboardingCrew
from pydantic import BaseModel


class OnboardingGuideResponse(BaseModel):
guide: str


router = APIRouter()


@router.get("/guide", response_model=OnboardingGuideResponse, name="guide")
async def get_bot_guide() -> OnboardingGuideResponse:
"""Generate a user guide explaining how to use the Earth Defenders Assistant."""
crew_output = OnboardingCrew().crew().kickoff()
# Extract the raw string content from the CrewOutput object
guide_text = str(crew_output.raw)
return OnboardingGuideResponse(guide=guide_text)
4 changes: 3 additions & 1 deletion apps/ai_api/eda_ai_api/api/routes/router.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,9 @@
from fastapi import APIRouter

from eda_ai_api.api.routes import grant, heartbeat
from eda_ai_api.api.routes import grant, heartbeat, onboarding, supervisor

api_router = APIRouter()
api_router.include_router(heartbeat.router, tags=["health"], prefix="/health")
api_router.include_router(grant.router, tags=["discovery"], prefix="/grant")
api_router.include_router(supervisor.router, tags=["supervisor"], prefix="/supervisor")
api_router.include_router(onboarding.router, tags=["onboarding"], prefix="/onboarding")
134 changes: 134 additions & 0 deletions apps/ai_api/eda_ai_api/api/routes/supervisor.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,134 @@
import os

from fastapi import APIRouter
from langchain.chains import LLMChain
from langchain.prompts import PromptTemplate
from langchain_groq import ChatGroq
from onboarding.crew import OnboardingCrew
from opportunity_finder.crew import OpportunityFinderCrew
from proposal_writer.crew import ProposalWriterCrew

from eda_ai_api.models.supervisor import SupervisorRequest, SupervisorResponse

router = APIRouter()

# Setup LLM and prompt
llm = ChatGroq(
model_name="llama3-groq-70b-8192-tool-use-preview",
api_key=os.environ.get("GROQ_API_KEY"),
temperature=0.5,
)

ROUTER_TEMPLATE = """
Given a user message, determine the appropriate service to handle the request.
Choose between:
- discovery: For finding grant opportunities
- proposal: For writing grant proposals
- onboarding: For getting help using the system
- heartbeat: For checking system health
User message: {message}
Return only one word (discovery/proposal/onboarding/heartbeat):"""


TOPIC_EXTRACTOR_TEMPLATE = """
Extract up to 5 most relevant topics for grant opportunity research from the user message.
Return only a comma-separated list of topics (maximum 5), no other text.
User message: {message}
Topics:"""

PROPOSAL_EXTRACTOR_TEMPLATE = """
Extract the community project name and grant program name from the user message.
Return in format: project_name|grant_name
If either cannot be determined, use "unknown" as placeholder.
User message: {message}
Output:"""

# Create prompt templates
router_prompt = PromptTemplate(input_variables=["message"], template=ROUTER_TEMPLATE)
topic_prompt = PromptTemplate(
input_variables=["message"], template=TOPIC_EXTRACTOR_TEMPLATE
)
proposal_prompt = PromptTemplate(
input_variables=["message"], template=PROPOSAL_EXTRACTOR_TEMPLATE
)

# Create LLM chains
router_chain = LLMChain(llm=llm, prompt=router_prompt)
topic_chain = LLMChain(llm=llm, prompt=topic_prompt)
proposal_chain = LLMChain(llm=llm, prompt=proposal_prompt)


@router.post("/supervisor", response_model=SupervisorResponse)
async def supervisor_route(request: SupervisorRequest) -> SupervisorResponse:
try:
# Get routing decision from LLM
decision = router_chain.run(message=request.message).strip().lower()

# Print input message and decision for debugging
print("\n==================================================")
print(f" INPUT MESSAGE: {request.message}")
print("==================================================")
print(f" DECISION: {decision}")
print("==================================================\n")

# Handle different decision paths
if decision == "discovery":
# Extract topics using LLM (limited to 5 in prompt)
topics_raw = topic_chain.run(message=request.message)
topics = [t.strip() for t in topics_raw.split(",") if t.strip()][
:5
] # Safety check
if not topics:
topics = ["AI", "Technology"] # Fallback topics

print("==================================================")
print(f" EXTRACTED TOPICS: {topics}")
print("==================================================\n")

result = (
OpportunityFinderCrew()
.crew()
.kickoff(inputs={"topics": ", ".join(topics)})
)

elif decision == "proposal":
# Extract project and grant details using LLM
extracted = proposal_chain.run(message=request.message).split("|")
community_project = (
extracted[0].strip() if len(extracted) > 0 else "unknown"
)
grant_call = extracted[1].strip() if len(extracted) > 1 else "unknown"

print("==================================================")
print(f" PROJECT NAME: {community_project}")
print(f" GRANT PROGRAM: {grant_call}")
print("==================================================\n")

result = (
ProposalWriterCrew(
community_project=community_project, grant_call=grant_call
)
.crew()
.kickoff()
)

elif decision == "heartbeat":
result = {"is_alive": True}

elif decision == "onboarding":
# Generate guide using OnboardingCrew
result = OnboardingCrew().crew().kickoff()

else:
result = {"error": f"Unknown decision type: {decision}"}

except Exception as e:
result = {"error": f"Error processing request: {str(e)}"}

return SupervisorResponse(result=str(result))
13 changes: 13 additions & 0 deletions apps/ai_api/eda_ai_api/models/onboarding.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
from pydantic import BaseModel


class OnboardingResponse(BaseModel):
"""Response model for onboarding explanations"""

explanation: str


class OnboardingRequest(BaseModel):
"""Request model for onboarding queries"""

topic: str | None = None
9 changes: 9 additions & 0 deletions apps/ai_api/eda_ai_api/models/supervisor.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
from pydantic import BaseModel


class SupervisorResponse(BaseModel):
result: str


class SupervisorRequest(BaseModel):
message: str
5 changes: 5 additions & 0 deletions apps/ai_api/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,10 @@ dependencies = [
"joblib>=1.3.2",
"loguru>=0.7.2",
"pydantic>=2.5.3",
"langchain-groq>=0.2.1",
"opportunity-finder",
"proposal_writer",
"onboarding"
]

[project.optional-dependencies]
Expand Down Expand Up @@ -67,6 +70,8 @@ ignore_missing_imports = true

[tool.uv.sources]
opportunity-finder = { path = "../../plugins/grant_plugin/opportunity_finder" }
proposal_writer = { path = "../../plugins/grant_plugin/proposal_writer" }
onboarding = { path = "../../plugins/onboarding" }

[build-system]
requires = ["hatchling"]
Expand Down
2 changes: 0 additions & 2 deletions apps/ai_api/tests/conftest.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@


import pytest
from starlette.config import environ
from starlette.testclient import TestClient
Expand Down
Loading

0 comments on commit 0b4c35e

Please sign in to comment.