Initial commit: Multi-service AI agent system

- Frontend: Vite + React + TypeScript chat interface
- Backend: FastAPI gateway with LangGraph routing
- Knowledge Service: ChromaDB RAG with Gitea scraper
- LangGraph Service: Multi-agent orchestration
- Airflow: Scheduled Gitea ingestion DAG
- Documentation: Complete plan and implementation guides

Architecture:
- Modular Docker Compose per service
- External ai-mesh network for communication
- Fast rebuilds with /app/packages pattern
- Intelligent agent routing (no hardcoded keywords)

Services:
- Frontend (5173): React chat UI
- Chat Gateway (8000): FastAPI entry point
- LangGraph (8090): Agent orchestration
- Knowledge (8080): ChromaDB RAG
- Airflow (8081): Scheduled ingestion
- PostgreSQL (5432): Chat history

Excludes: node_modules, .venv, chroma_db, logs, .env files
Includes: All source code, configs, docs, docker files
This commit is contained in:
2026-02-27 19:51:06 +11:00
commit 628ba96998
44 changed files with 7177 additions and 0 deletions

View File

@@ -0,0 +1,153 @@
from typing import TypedDict, Annotated, Sequence
from langgraph.graph import StateGraph, END
from langchain_core.messages import BaseMessage, HumanMessage, AIMessage
import operator
import httpx
import os
import logging
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
# State definition
class AgentState(TypedDict):
messages: Annotated[Sequence[BaseMessage], operator.add]
next_agent: str
context: dict
# Agent routing logic
def supervisor_node(state: AgentState):
"""Supervisor decides which specialist agent to call."""
last_message = state["messages"][-1].content.lower()
# Simple routing logic based on keywords
if any(kw in last_message for kw in ["repo", "code", "git", "github", "gitea", "project", "development"]):
return {"next_agent": "librarian"}
elif any(kw in last_message for kw in ["write", "edit", "create", "fix", "bug", "implement", "code change"]):
return {"next_agent": "opencode"}
elif any(kw in last_message for kw in ["sam", "hobby", "music", "experience", "skill", "about"]):
return {"next_agent": "librarian"}
else:
return {"next_agent": "brain"} # Default to general LLM
def librarian_agent(state: AgentState):
"""Librarian agent - queries knowledge base (ChromaDB)."""
last_message = state["messages"][-1].content
try:
# Call knowledge service
response = httpx.post(
"http://knowledge-service:8080/query",
json={"question": last_message},
timeout=10.0
)
if response.status_code == 200:
context = response.json().get("context", "")
return {
"messages": [AIMessage(content=f"Based on my knowledge base:\n\n{context}")],
"context": {"source": "librarian", "context": context}
}
except Exception as e:
logger.error(f"Librarian error: {e}")
return {
"messages": [AIMessage(content="I couldn't find relevant information in the knowledge base.")],
"context": {"source": "librarian", "error": str(e)}
}
def opencode_agent(state: AgentState):
"""Opencode agent - handles coding tasks via MCP."""
last_message = state["messages"][-1].content
# Placeholder - would integrate with opencode-brain
return {
"messages": [AIMessage(content=f"I'm the coding agent. I would help you with: {last_message}")],
"context": {"source": "opencode", "action": "coding_task"}
}
def brain_agent(state: AgentState):
"""Brain agent - general LLM fallback."""
last_message = state["messages"][-1].content
try:
# Call opencode-brain service
auth = httpx.BasicAuth("opencode", os.getenv("OPENCODE_PASSWORD", "sam4jo"))
timeout_long = httpx.Timeout(180.0, connect=10.0)
with httpx.AsyncClient(auth=auth, timeout=timeout_long) as client:
# Create session
session_res = client.post("http://opencode-brain:5000/session", json={"title": "Supervisor Query"})
session_id = session_res.json()["id"]
# Send message
response = client.post(
f"http://opencode-brain:5000/session/{session_id}/message",
json={"parts": [{"type": "text", "text": last_message}]}
)
data = response.json()
if "parts" in data:
for part in data["parts"]:
if part.get("type") == "text":
return {
"messages": [AIMessage(content=part["text"])],
"context": {"source": "brain"}
}
except Exception as e:
logger.error(f"Brain error: {e}")
return {
"messages": [AIMessage(content="I'm thinking about this...")],
"context": {"source": "brain"}
}
def route_decision(state: AgentState):
"""Routing function based on supervisor decision."""
return state["next_agent"]
# Build the graph
workflow = StateGraph(AgentState)
# Add nodes
workflow.add_node("supervisor", supervisor_node)
workflow.add_node("librarian", librarian_agent)
workflow.add_node("opencode", opencode_agent)
workflow.add_node("brain", brain_agent)
# Add edges
workflow.set_entry_point("supervisor")
# Conditional routing from supervisor
workflow.add_conditional_edges(
"supervisor",
route_decision,
{
"librarian": "librarian",
"opencode": "opencode",
"brain": "brain"
}
)
# All specialist agents end
workflow.add_edge("librarian", END)
workflow.add_edge("opencode", END)
workflow.add_edge("brain", END)
# Compile the graph
supervisor_graph = workflow.compile()
# Main entry point for queries
async def process_query(query: str) -> dict:
"""Process a query through the supervisor graph."""
result = await supervisor_graph.ainvoke({
"messages": [HumanMessage(content=query)],
"next_agent": "",
"context": {}
})
return {
"response": result["messages"][-1].content,
"context": result.get("context", {})
}