- Frontend: Vite + React + TypeScript chat interface - Backend: FastAPI gateway with LangGraph routing - Knowledge Service: ChromaDB RAG with Gitea scraper - LangGraph Service: Multi-agent orchestration - Airflow: Scheduled Gitea ingestion DAG - Documentation: Complete plan and implementation guides Architecture: - Modular Docker Compose per service - External ai-mesh network for communication - Fast rebuilds with /app/packages pattern - Intelligent agent routing (no hardcoded keywords) Services: - Frontend (5173): React chat UI - Chat Gateway (8000): FastAPI entry point - LangGraph (8090): Agent orchestration - Knowledge (8080): ChromaDB RAG - Airflow (8081): Scheduled ingestion - PostgreSQL (5432): Chat history Excludes: node_modules, .venv, chroma_db, logs, .env files Includes: All source code, configs, docs, docker files
81 lines
2.3 KiB
Python
81 lines
2.3 KiB
Python
from fastapi import FastAPI
|
|
from fastapi.middleware.cors import CORSMiddleware
|
|
from pydantic import BaseModel
|
|
from supervisor_agent import process_query
|
|
import logging
|
|
import sys
|
|
|
|
logging.basicConfig(level=logging.INFO, stream=sys.stdout)
|
|
logger = logging.getLogger(__name__)
|
|
|
|
app = FastAPI(title="LangGraph Supervisor Service")
|
|
|
|
app.add_middleware(
|
|
CORSMiddleware,
|
|
allow_origins=["*"],
|
|
allow_credentials=True,
|
|
allow_methods=["*"],
|
|
allow_headers=["*"],
|
|
)
|
|
|
|
class QueryRequest(BaseModel):
|
|
query: str
|
|
|
|
class QueryResponse(BaseModel):
|
|
response: str
|
|
agent_used: str
|
|
context: dict
|
|
|
|
@app.get("/health")
|
|
async def health():
|
|
return {"status": "healthy", "service": "langgraph-supervisor"}
|
|
|
|
@app.post("/query", response_model=QueryResponse)
|
|
async def query_supervisor(request: QueryRequest):
|
|
"""Main entry point for agent orchestration."""
|
|
logger.info(f"Received query: {request.query}")
|
|
|
|
try:
|
|
result = await process_query(request.query)
|
|
|
|
return QueryResponse(
|
|
response=result["response"],
|
|
agent_used=result["context"].get("source", "unknown"),
|
|
context=result["context"]
|
|
)
|
|
except Exception as e:
|
|
logger.error(f"Error processing query: {e}")
|
|
return QueryResponse(
|
|
response="Error processing your request",
|
|
agent_used="error",
|
|
context={"error": str(e)}
|
|
)
|
|
|
|
@app.get("/agents")
|
|
async def list_agents():
|
|
"""List available specialist agents."""
|
|
return {
|
|
"agents": [
|
|
{
|
|
"name": "librarian",
|
|
"description": "Queries the knowledge base for semantic information",
|
|
"triggers": ["repo", "code", "git", "hobby", "about", "skill"]
|
|
},
|
|
{
|
|
"name": "opencode",
|
|
"description": "Handles coding tasks and file modifications",
|
|
"triggers": ["write", "edit", "create", "fix", "implement"]
|
|
},
|
|
{
|
|
"name": "brain",
|
|
"description": "General LLM for reasoning and generation",
|
|
"triggers": ["default", "general questions"]
|
|
}
|
|
]
|
|
}
|
|
|
|
if __name__ == "__main__":
|
|
import uvicorn
|
|
uvicorn.run(app, host="0.0.0.0", port=8090)
|
|
|