commit 628ba9699869d6bf86cb4069151ac81e2832ea7a Author: Sam Rolfe Date: Fri Feb 27 19:51:06 2026 +1100 Initial commit: Multi-service AI agent system - Frontend: Vite + React + TypeScript chat interface - Backend: FastAPI gateway with LangGraph routing - Knowledge Service: ChromaDB RAG with Gitea scraper - LangGraph Service: Multi-agent orchestration - Airflow: Scheduled Gitea ingestion DAG - Documentation: Complete plan and implementation guides Architecture: - Modular Docker Compose per service - External ai-mesh network for communication - Fast rebuilds with /app/packages pattern - Intelligent agent routing (no hardcoded keywords) Services: - Frontend (5173): React chat UI - Chat Gateway (8000): FastAPI entry point - LangGraph (8090): Agent orchestration - Knowledge (8080): ChromaDB RAG - Airflow (8081): Scheduled ingestion - PostgreSQL (5432): Chat history Excludes: node_modules, .venv, chroma_db, logs, .env files Includes: All source code, configs, docs, docker files diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..599b8db --- /dev/null +++ b/.gitignore @@ -0,0 +1,52 @@ +# Dependencies +node_modules/ +.venv/ +__pycache__/ +*.pyc + +# Build outputs +dist/ +dist-ssr/ + +# Databases and vector stores +chroma_db/ +*.sqlite3 +*.db + +# Logs +logs/ +*.log +npm-debug.log* +yarn-debug.log* +pnpm-debug.log* + +# Environment variables (secrets!) +.env +.env.local +.env.*.local + +# IDE +.vscode/* +!.vscode/extensions.json +.idea/ +.DS_Store +*.suo +*.ntvs* +*.njsproj +*.sln +*.sw? + +# Airflow runtime +airflow/logs/ +airflow/config/ +airflow/plugins/ + +# Testing +.coverage +htmlcov/ +.pytest_cache/ + +# Project management files (not code) +action.md +ideas.org +project_journal.org diff --git a/README.md b/README.md new file mode 100644 index 0000000..a527e2d --- /dev/null +++ b/README.md @@ -0,0 +1,107 @@ +# AboutMe AI Chat Demo + +A comprehensive AI agent system with multi-service architecture for personal knowledge management and intelligent query responses. + +## Architecture Overview + +``` +User Query → Chat Gateway → LangGraph Supervisor → [Librarian | Opencode | Brain] + ↓ + Knowledge Service (ChromaDB) ← Airflow ← Gitea API +``` + +## Services + +| Service | Port | Technology | Purpose | +|---------|------|------------|---------| +| Frontend | 5173 | Vite + React + TS | Chat UI | +| Chat Gateway | 8000 | FastAPI | API entry point | +| LangGraph | 8090 | FastAPI + LangGraph | Agent orchestration | +| Knowledge | 8080 | FastAPI + ChromaDB | RAG / Vector search | +| Airflow | 8081 | Apache Airflow | Scheduled ingestion | +| PostgreSQL | 5432 | Postgres 15 | Chat history | + +## Quick Start + +```bash +# 1. Ensure Docker network exists +docker network create ai-mesh + +# 2. Start Knowledge Service +cd knowledge_service && docker-compose up -d + +# 3. Start LangGraph Service +cd ../langgraph_service && docker-compose up -d + +# 4. Start Chat Demo +cd ../aboutme_chat_demo && docker-compose up -d + +# 5. Start Airflow (optional) +cd ../airflow && docker-compose up -d +``` + +## Environment Variables + +Create `.env` files in each service directory: + +**knowledge_service/.env:** +``` +OPENROUTER_API_KEY=your_key_here +GITEA_URL=https://gitea.lab.audasmedia.com.au +GITEA_TOKEN=your_token +GITEA_USERNAME=sam +``` + +**langgraph_service/.env:** +``` +OPENCODE_PASSWORD=sam4jo +``` + +**airflow/.env:** +``` +AIRFLOW_UID=1000 +GITEA_TOKEN=your_token +``` + +## Project Structure + +``` +aboutme_chat_demo/ +├── frontend/ # React chat interface +├── backend/ # FastAPI gateway (routes to LangGraph) +├── plan.md # Full project roadmap +└── code_1.md # Implementation guide + +knowledge_service/ +├── main.py # FastAPI + ChromaDB +├── gitea_scraper.py # Gitea API integration +└── docker-compose.yml + +langgraph_service/ +├── main.py # FastAPI entry point +├── supervisor_agent.py # LangGraph orchestration +└── docker-compose.yml + +airflow/ +├── dags/ # Workflow definitions +│ └── gitea_ingestion_dag.py +└── docker-compose.yml +``` + +## Technologies + +- **Frontend:** Vite, React 19, TypeScript, Tailwind CSS, TanStack Query +- **Backend:** FastAPI, Python 3.11, httpx +- **AI/ML:** LangGraph, LangChain, ChromaDB, OpenRouter API +- **Orchestration:** Apache Airflow (CeleryExecutor) +- **Infrastructure:** Docker, Docker Compose + +## Documentation + +- `plan.md` - Complete project roadmap (7 phases) +- `code_1.md` - Modular implementation guide +- `code.md` - Legacy implementation reference + +## License + +MIT diff --git a/airflow/dags/gitea_ingestion_dag.py b/airflow/dags/gitea_ingestion_dag.py new file mode 100644 index 0000000..848938d --- /dev/null +++ b/airflow/dags/gitea_ingestion_dag.py @@ -0,0 +1,144 @@ +""" +Airflow DAG for scheduled Gitea repository ingestion. +Runs daily to fetch new/updated repos and ingest into ChromaDB. +""" +from datetime import datetime, timedelta +from airflow import DAG +from airflow.operators.python import PythonOperator +from airflow.providers.http.operators.http import SimpleHttpOperator +import os +import sys +import json + +# Add knowledge_service to path for imports +sys.path.insert(0, '/opt/airflow/dags/repo') + +default_args = { + 'owner': 'airflow', + 'depends_on_past': False, + 'email_on_failure': False, + 'email_on_retry': False, + 'retries': 1, + 'retry_delay': timedelta(minutes=5), +} + +def fetch_gitea_repos(**context): + """Task: Fetch all repositories from Gitea.""" + from gitea_scraper import GiteaScraper + + scraper = GiteaScraper( + base_url=os.getenv("GITEA_URL", "https://gitea.lab.audasmedia.com.au"), + token=os.getenv("GITEA_TOKEN", ""), + username=os.getenv("GITEA_USERNAME", "sam") + ) + + repos = scraper.get_user_repos() + + # Push to XCom for downstream tasks + context['ti'].xcom_push(key='repo_count', value=len(repos)) + context['ti'].xcom_push(key='repos', value=[ + { + 'name': r.name, + 'description': r.description, + 'url': r.url, + 'updated_at': r.updated_at + } + for r in repos + ]) + + return f"Fetched {len(repos)} repositories" + +def fetch_readmes(**context): + """Task: Fetch READMEs for all repositories.""" + from gitea_scraper import GiteaScraper + + ti = context['ti'] + repos = ti.xcom_pull(task_ids='fetch_repos', key='repos') + + scraper = GiteaScraper( + base_url=os.getenv("GITEA_URL", "https://gitea.lab.audasmedia.com.au"), + token=os.getenv("GITEA_TOKEN", ""), + username=os.getenv("GITEA_USERNAME", "sam") + ) + + readme_data = [] + for repo in repos[:10]: # Limit to 10 repos per run for testing + readme = scraper.get_readme(repo['name']) + if readme: + readme_data.append({ + 'repo': repo['name'], + 'content': readme[:5000], # First 5000 chars + 'url': repo['url'] + }) + + ti.xcom_push(key='readme_data', value=readme_data) + + return f"Fetched {len(readme_data)} READMEs" + +def ingest_to_chroma(**context): + """Task: Ingest fetched data into ChromaDB via knowledge service.""" + import httpx + + ti = context['ti'] + readme_data = ti.xcom_pull(task_ids='fetch_readmes', key='readme_data') + + knowledge_service_url = os.getenv("KNOWLEDGE_SERVICE_URL", "http://knowledge-service:8080") + + documents_ingested = 0 + for item in readme_data: + try: + # Call knowledge service ingest endpoint + response = httpx.post( + f"{knowledge_service_url}/ingest", + json={ + 'source': f"gitea:{item['repo']}", + 'content': item['content'], + 'metadata': { + 'repo': item['repo'], + 'url': item['url'], + 'type': 'readme' + } + }, + timeout=30.0 + ) + + if response.status_code == 200: + documents_ingested += 1 + + except Exception as e: + print(f"Error ingesting {item['repo']}: {e}") + + return f"Ingested {documents_ingested} documents into ChromaDB" + +# Define the DAG +with DAG( + 'gitea_daily_ingestion', + default_args=default_args, + description='Daily ingestion of Gitea repositories into knowledge base', + schedule_interval=timedelta(days=1), # Run daily + start_date=datetime(2024, 1, 1), + catchup=False, + tags=['gitea', 'ingestion', 'knowledge'], +) as dag: + + # Task 1: Fetch repository list + fetch_repos_task = PythonOperator( + task_id='fetch_repos', + python_callable=fetch_gitea_repos, + ) + + # Task 2: Fetch README content + fetch_readmes_task = PythonOperator( + task_id='fetch_readmes', + python_callable=fetch_readmes, + ) + + # Task 3: Ingest into ChromaDB + ingest_task = PythonOperator( + task_id='ingest_to_chroma', + python_callable=ingest_to_chroma, + ) + + # Define task dependencies + fetch_repos_task >> fetch_readmes_task >> ingest_task + diff --git a/airflow/dags/gitea_scraper.py b/airflow/dags/gitea_scraper.py new file mode 100644 index 0000000..dd0fda6 --- /dev/null +++ b/airflow/dags/gitea_scraper.py @@ -0,0 +1,121 @@ +import os +import httpx +import logging +from typing import List, Dict, Optional +from dataclasses import dataclass +from datetime import datetime + +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger(__name__) + +@dataclass +class RepoMetadata: + name: str + description: str + url: str + default_branch: str + updated_at: str + language: Optional[str] + +class GiteaScraper: + def __init__(self, base_url: str, token: str, username: str = "sam"): + self.base_url = base_url.rstrip("/") + self.token = token + self.username = username + self.headers = {"Authorization": f"token {token}"} + + def get_user_repos(self) -> List[RepoMetadata]: + """Fetch all repositories for the user.""" + repos = [] + page = 1 + + while True: + url = f"{self.base_url}/api/v1/users/{self.username}/repos?page={page}&limit=50" + + try: + response = httpx.get(url, headers=self.headers, timeout=30.0) + response.raise_for_status() + + data = response.json() + if not data: + break + + for repo in data: + repos.append(RepoMetadata( + name=repo["name"], + description=repo.get("description", ""), + url=repo["html_url"], + default_branch=repo["default_branch"], + updated_at=repo["updated_at"], + language=repo.get("language") + )) + + logger.info(f"Fetched page {page}, got {len(data)} repos") + page += 1 + + except Exception as e: + logger.error(f"Error fetching repos: {e}") + break + + return repos + + def get_readme(self, repo_name: str) -> str: + """Fetch README content for a repository.""" + # Try common README filenames + readme_names = ["README.md", "readme.md", "Readme.md", "README.rst"] + + for readme_name in readme_names: + url = f"{self.base_url}/api/v1/repos/{self.username}/{repo_name}/raw/{readme_name}" + + try: + response = httpx.get(url, headers=self.headers, timeout=10.0) + if response.status_code == 200: + return response.text + except Exception as e: + logger.warning(f"Failed to fetch {readme_name}: {e}") + continue + + return "" + + def get_repo_files(self, repo_name: str, path: str = "") -> List[Dict]: + """List files in a repository directory.""" + url = f"{self.base_url}/api/v1/repos/{self.username}/{repo_name}/contents/{path}" + + try: + response = httpx.get(url, headers=self.headers, timeout=10.0) + response.raise_for_status() + return response.json() + except Exception as e: + logger.error(f"Error listing files in {repo_name}/{path}: {e}") + return [] + + def get_file_content(self, repo_name: str, filepath: str) -> str: + """Fetch content of a specific file.""" + url = f"{self.base_url}/api/v1/repos/{self.username}/{repo_name}/raw/{filepath}" + + try: + response = httpx.get(url, headers=self.headers, timeout=10.0) + if response.status_code == 200: + return response.text + except Exception as e: + logger.error(f"Error fetching file {filepath}: {e}") + + return "" + +# Test function +if __name__ == "__main__": + scraper = GiteaScraper( + base_url=os.getenv("GITEA_URL", "https://gitea.lab.audasmedia.com.au"), + token=os.getenv("GITEA_TOKEN", ""), + username=os.getenv("GITEA_USERNAME", "sam") + ) + + repos = scraper.get_user_repos() + print(f"Found {len(repos)} repositories") + + for repo in repos[:3]: # Test with first 3 + print(f"\nRepo: {repo.name}") + readme = scraper.get_readme(repo.name) + if readme: + print(f"README preview: {readme[:200]}...") + diff --git a/airflow/docker-compose.yml b/airflow/docker-compose.yml new file mode 100644 index 0000000..169bdc2 --- /dev/null +++ b/airflow/docker-compose.yml @@ -0,0 +1,181 @@ +version: '3.8' + +x-airflow-common: + &airflow-common + image: ${AIRFLOW_IMAGE_NAME:-apache/airflow:2.8.1} + environment: + &airflow-common-env + AIRFLOW__CORE__EXECUTOR: CeleryExecutor + AIRFLOW__DATABASE__SQL_ALCHEMY_CONN: postgresql+psycopg2://airflow:airflow@postgres/airflow + AIRFLOW__CELERY__RESULT_BACKEND: db+postgresql://airflow:airflow@postgres/airflow + AIRFLOW__CELERY__BROKER_URL: redis://:@redis:6379/0 + AIRFLOW__CORE__FERNET_KEY: '' + AIRFLOW__CORE__DAGS_ARE_PAUSED_AT_CREATION: 'true' + AIRFLOW__CORE__LOAD_EXAMPLES: 'false' + AIRFLOW__API__AUTH_BACKENDS: 'airflow.api.auth.backend.basic_auth,airflow.api.auth.backend.session' + AIRFLOW__SCHEDULER__ENABLE_HEALTH_CHECK: 'true' + _PIP_ADDITIONAL_REQUIREMENTS: ${_PIP_ADDITIONAL_REQUIREMENTS:-} + volumes: + - ${AIRFLOW_PROJ_DIR:-.}/dags:/opt/airflow/dags + - ${AIRFLOW_PROJ_DIR:-.}/logs:/opt/airflow/logs + - ${AIRFLOW_PROJ_DIR:-.}/config:/opt/airflow/config + - ${AIRFLOW_PROJ_DIR:-.}/plugins:/opt/airflow/plugins + user: "${AIRFLOW_UID:-50000}:0" + depends_on: + &airflow-common-depends-on + redis: + condition: service_healthy + postgres: + condition: service_healthy + +services: + postgres: + image: postgres:13 + environment: + POSTGRES_USER: airflow + POSTGRES_PASSWORD: airflow + POSTGRES_DB: airflow + volumes: + - postgres-db-volume:/var/lib/postgresql/data + healthcheck: + test: ["CMD", "pg_isready", "-U", "airflow"] + interval: 10s + retries: 5 + start_period: 5s + restart: always + networks: + - ai-mesh + + redis: + image: redis:latest + expose: + - 6379 + healthcheck: + test: ["CMD", "redis-cli", "ping"] + interval: 10s + timeout: 30s + retries: 50 + start_period: 30s + restart: always + networks: + - ai-mesh + + airflow-webserver: + <<: *airflow-common + command: webserver + ports: + - "8081:8080" + healthcheck: + test: ["CMD", "curl", "--fail", "http://localhost:8080/health"] + interval: 30s + timeout: 10s + retries: 5 + start_period: 30s + restart: always + depends_on: + <<: *airflow-common-depends-on + airflow-init: + condition: service_completed_successfully + networks: + - ai-mesh + + airflow-scheduler: + <<: *airflow-common + command: scheduler + healthcheck: + test: ["CMD", "curl", "--fail", "http://localhost:8974/health"] + interval: 30s + timeout: 10s + retries: 5 + start_period: 30s + restart: always + depends_on: + <<: *airflow-common-depends-on + airflow-init: + condition: service_completed_successfully + networks: + - ai-mesh + + airflow-worker: + <<: *airflow-common + command: celery worker + healthcheck: + test: + - "CMD-SHELL" + - 'celery --app airflow.providers.celery.executors.celery_executor.app inspect ping -d "celery@$${HOSTNAME}" || celery --app airflow.executors.celery_executor.app inspect ping -d "celery@$${HOSTNAME}"' + interval: 30s + timeout: 10s + retries: 5 + start_period: 30s + restart: always + depends_on: + <<: *airflow-common-depends-on + airflow-init: + condition: service_completed_successfully + networks: + - ai-mesh + + airflow-triggerer: + <<: *airflow-common + command: triggerer + healthcheck: + test: ["CMD-SHELL", 'airflow jobs check --job-type TriggererJob --hostname "$${HOSTNAME}"'] + interval: 30s + timeout: 10s + retries: 5 + start_period: 30s + restart: always + depends_on: + <<: *airflow-common-depends-on + airflow-init: + condition: service_completed_successfully + networks: + - ai-mesh + + airflow-init: + <<: *airflow-common + entrypoint: /bin/bash + command: + - -c + - | + if [[ -z "${AIRFLOW_UID}" ]]; then + echo "WARNING!!!: AIRFLOW_UID not set!" + echo "Using default UID: 50000" + export AIRFLOW_UID=50000 + fi + mkdir -p /sources/logs /sources/dags /sources/plugins + chown -R "${AIRFLOW_UID}:0" /sources/{logs,dags,plugins} + exec /entrypoint airflow version + environment: + <<: *airflow-common-env + _AIRFLOW_DB_MIGRATE: 'true' + _AIRFLOW_WWW_USER_CREATE: 'true' + _AIRFLOW_WWW_USER_USERNAME: ${_AIRFLOW_WWW_USER_USERNAME:-airflow} + _AIRFLOW_WWW_USER_PASSWORD: ${_AIRFLOW_WWW_USER_PASSWORD:-airflow} + user: "0:0" + volumes: + - ${AIRFLOW_PROJ_DIR:-.}:/sources + networks: + - ai-mesh + + airflow-cli: + <<: *airflow-common + profiles: + - debug + environment: + <<: *airflow-common-env + CONNECTION_CHECK_MAX_COUNT: "0" + command: + - bash + - -c + - airflow + networks: + - ai-mesh + +volumes: + postgres-db-volume: + +networks: + ai-mesh: + external: true + diff --git a/backend/Dockerfile b/backend/Dockerfile new file mode 100644 index 0000000..31774d8 --- /dev/null +++ b/backend/Dockerfile @@ -0,0 +1,8 @@ +FROM python:3.11-slim +WORKDIR /app +RUN apt-get update && apt-get install -y libpq-dev gcc +COPY requirements.txt . +RUN pip install --no-cache-dir -r requirements.txt +COPY . . +CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "8000", "--reload"] + diff --git a/backend/main.py b/backend/main.py new file mode 100644 index 0000000..a02c2ec --- /dev/null +++ b/backend/main.py @@ -0,0 +1,58 @@ +from fastapi import FastAPI +from fastapi.middleware.cors import CORSMiddleware +from pydantic import BaseModel +import httpx +import logging +import sys +import traceback + +logging.basicConfig(level=logging.INFO, format="%(asctime)s [%(levelname)s] %(message)s", handlers=[logging.StreamHandler(sys.stdout)]) +logger = logging.getLogger(__name__) + +app = FastAPI() +app.add_middleware(CORSMiddleware, allow_origins=["*"], allow_credentials=True, allow_methods=["*"], allow_headers=["*"]) + +class MessageRequest(BaseModel): + message: str + +BRAIN_URL = "http://opencode-brain:5000" +KNOWLEDGE_URL = "http://knowledge-service:8080/query" +AUTH = httpx.BasicAuth("opencode", "sam4jo") + +@app.post("/chat") +async def chat(request: MessageRequest): + user_msg = request.message.lower() + timeout_long = httpx.Timeout(180.0, connect=10.0) + timeout_short = httpx.Timeout(5.0, connect=2.0) + + context = "" + # Check for keywords to trigger Librarian (DB) lookup + if any(kw in user_msg for kw in ["sam", "hobby", "music", "guitar", "skiing", "experience"]): + logger.info("Gateway: Consulting Librarian (DB)...") + async with httpx.AsyncClient(timeout=timeout_short) as client: + try: + k_res = await client.post(KNOWLEDGE_URL, json={"question": request.message}) + if k_res.status_code == 200: + context = k_res.json().get("context", "") + except Exception as e: + logger.warning(f"Gateway: Librarian offline/slow: {str(e)}") + + # Forward to Brain (LLM) + async with httpx.AsyncClient(auth=AUTH, timeout=timeout_long) as brain_client: + try: + session_res = await brain_client.post(f"{BRAIN_URL}/session", json={"title": "Demo"}) + session_id = session_res.json()["id"] + final_prompt = f"CONTEXT:\n{context}\n\nUSER: {request.message}" if context else request.message + response = await brain_client.post(f"{BRAIN_URL}/session/{session_id}/message", json={"parts": [{"type": "text", "text": final_prompt}]}) + + # FIX: Iterate through parts array to find text response + data = response.json() + if "parts" in data: + for part in data["parts"]: + if part.get("type") == "text" and "text" in part: + return {"response": part["text"]} + + return {"response": "AI responded but no text found in expected format."} + except Exception: + logger.error(f"Gateway: Brain failure: {traceback.format_exc()}") + return {"response": "Error: The Brain is taking too long or is disconnected."} diff --git a/backend/main.py.new b/backend/main.py.new new file mode 100644 index 0000000..4357970 --- /dev/null +++ b/backend/main.py.new @@ -0,0 +1,49 @@ +from fastapi import FastAPI +from fastapi.middleware.cors import CORSMiddleware +from pydantic import BaseModel +import httpx +import logging +import sys +import traceback +import os + +logging.basicConfig(level=logging.INFO, format="%(asctime)s [%(levelname)s] %(message)s", handlers=[logging.StreamHandler(sys.stdout)]) +logger = logging.getLogger(__name__) + +app = FastAPI() +app.add_middleware(CORSMiddleware, allow_origins=["*"], allow_credentials=True, allow_methods=["*"], allow_headers=["*"]) + +class MessageRequest(BaseModel): + message: str + +LANGGRAPH_URL = os.getenv("LANGGRAPH_URL", "http://langgraph-service:8090") + +@app.post("/chat") +async def chat(request: MessageRequest): + """Updated chat endpoint that routes through LangGraph Supervisor.""" + logger.info(f"Gateway: Received message: {request.message}") + + try: + # Call LangGraph Supervisor instead of direct brain + async with httpx.AsyncClient(timeout=httpx.Timeout(60.0, connect=10.0)) as client: + response = await client.post( + f"{LANGGRAPH_URL}/query", + json={"query": request.message} + ) + + if response.status_code == 200: + result = response.json() + logger.info(f"Gateway: Response from {result.get('agent_used', 'unknown')} agent") + return {"response": result["response"]} + else: + logger.error(f"Gateway: LangGraph error {response.status_code}") + return {"response": "Error: Orchestration service unavailable"} + + except Exception as e: + logger.error(f"Gateway: Error routing through LangGraph: {traceback.format_exc()}") + return {"response": "Error: Unable to process your request at this time."} + +@app.get("/health") +async def health(): + return {"status": "healthy", "service": "chat-gateway"} + diff --git a/backend/requirements.txt b/backend/requirements.txt new file mode 100644 index 0000000..1720a26 --- /dev/null +++ b/backend/requirements.txt @@ -0,0 +1,8 @@ +fastapi +uvicorn +sqlalchemy +psycopg2-binary +pydantic +httpx +pytest +pytest-asyncio diff --git a/backend/tests/test_gateway.py b/backend/tests/test_gateway.py new file mode 100644 index 0000000..46ff101 --- /dev/null +++ b/backend/tests/test_gateway.py @@ -0,0 +1,79 @@ +import pytest +from fastapi.testclient import TestClient +from main import app +import httpx +from unittest.mock import AsyncMock, patch + +client = TestClient(app) + +@pytest.mark.asyncio +async def test_chat_general_query(): + """Test that a general query (no personal keywords) skips the Librarian.""" + with patch("httpx.AsyncClient.post", new_callable=AsyncMock) as mock_post: + # Mock Brain response + mock_response = AsyncMock() + mock_response.status_code = 200 + mock_response.json.return_value = { + "info": {"id": "msg_123"}, + "parts": [{"type": "text", "text": "I am a general AI."}] + } + + # First call is for session creation, second for message + mock_post.side_effect = [AsyncMock(status_code=200, json=lambda: {"id": "ses_123"}), mock_response] + + response = client.post("/chat", json={"message": "What is 2+2?"}) + + assert response.status_code == 200 + assert response.json()["response"] == "I am a general AI." + # Verify Librarian (knowledge-service) was NOT called + # The knowledge service URL is http://knowledge-service:8080/query + calls = [call.args[0] for call in mock_post.call_args_list] + assert not any("knowledge-service" in url for url in calls) + +@pytest.mark.asyncio +async def test_chat_personal_query_success(): + """Test that a personal query calls the Librarian and injects context.""" + with patch("httpx.AsyncClient.post", new_callable=AsyncMock) as mock_post: + # 1. Mock Librarian Response + mock_k_res = AsyncMock() + mock_k_res.status_code = 200 + mock_k_res.json.return_value = {"context": "Sam likes red guitars."} + + # 2. Mock Brain Session Response + mock_s_res = AsyncMock() + mock_s_res.status_code = 200 + mock_s_res.json.return_value = {"id": "ses_123"} + + # 3. Mock Brain Message Response + mock_b_res = AsyncMock() + mock_b_res.status_code = 200 + mock_b_res.json.return_value = { + "parts": [{"type": "text", "text": "I see Sam likes red guitars."}] + } + + mock_post.side_effect = [mock_k_res, mock_s_res, mock_b_res] + + response = client.post("/chat", json={"message": "Tell me about Sam's music"}) + + assert response.status_code == 200 + assert "red guitars" in response.json()["response"] + + # Verify Librarian was called + calls = [call.args[0] for call in mock_post.call_args_list] + assert any("knowledge-service" in url for url in calls) + +@pytest.mark.asyncio +async def test_chat_librarian_timeout_failover(): + """Test that the gateway fails over instantly (5s) if Librarian is slow.""" + with patch("httpx.AsyncClient.post", new_callable=AsyncMock) as mock_post: + # Mock Librarian Timeout + mock_post.side_effect = [ + httpx.TimeoutException("Timeout"), # Librarian call + AsyncMock(status_code=200, json=lambda: {"id": "ses_123"}), # Brain Session + AsyncMock(status_code=200, json=lambda: {"parts": [{"type": "text", "text": "Direct Brain Response"}]}) # Brain Msg + ] + + response = client.post("/chat", json={"message": "Sam's hobbies?"}) + + assert response.status_code == 200 + assert response.json()["response"] == "Direct Brain Response" diff --git a/code.md b/code.md new file mode 100644 index 0000000..9134011 --- /dev/null +++ b/code.md @@ -0,0 +1,1107 @@ +# Implementation Plan: Gitea Ingestion, Airflow Scheduling, and LangGraph Orchestration + +## Overview +Building a complete AI agent pipeline with: +1. **Gitea API Scraper** - Custom module to fetch repos, READMEs, and code +2. **Apache Airflow** - Multi-service Docker setup for scheduled ingestion +3. **LangGraph Supervisor** - Agent orchestration service for multi-agent routing + +--- + +## Phase 1: Gitea API Scraper Module + +### File: `/home/sam/development/knowledge_service/gitea_scraper.py` + +```python +""" +Gitea API Scraper - Fetches repos, READMEs, and source code +for ingestion into the knowledge base. +""" +import os +import httpx +import logging +from typing import List, Dict, Optional +from dataclasses import dataclass +from datetime import datetime + +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger(__name__) + +@dataclass +class RepoMetadata: + name: str + description: str + url: str + default_branch: str + updated_at: str + language: Optional[str] + +class GiteaScraper: + def __init__(self, base_url: str, token: str, username: str = "sam"): + self.base_url = base_url.rstrip("/") + self.token = token + self.username = username + self.headers = {"Authorization": f"token {token}"} + + def get_user_repos(self) -> List[RepoMetadata]: + """Fetch all repositories for the user.""" + repos = [] + page = 1 + + while True: + url = f"{self.base_url}/api/v1/users/{self.username}/repos?page={page}&limit=50" + + try: + response = httpx.get(url, headers=self.headers, timeout=30.0) + response.raise_for_status() + + data = response.json() + if not data: + break + + for repo in data: + repos.append(RepoMetadata( + name=repo["name"], + description=repo.get("description", ""), + url=repo["html_url"], + default_branch=repo["default_branch"], + updated_at=repo["updated_at"], + language=repo.get("language") + )) + + logger.info(f"Fetched page {page}, got {len(data)} repos") + page += 1 + + except Exception as e: + logger.error(f"Error fetching repos: {e}") + break + + return repos + + def get_readme(self, repo_name: str) -> str: + """Fetch README content for a repository.""" + # Try common README filenames + readme_names = ["README.md", "readme.md", "Readme.md", "README.rst"] + + for readme_name in readme_names: + url = f"{self.base_url}/api/v1/repos/{self.username}/{repo_name}/raw/{readme_name}" + + try: + response = httpx.get(url, headers=self.headers, timeout=10.0) + if response.status_code == 200: + return response.text + except Exception as e: + logger.warning(f"Failed to fetch {readme_name}: {e}") + continue + + return "" + + def get_repo_files(self, repo_name: str, path: str = "") -> List[Dict]: + """List files in a repository directory.""" + url = f"{self.base_url}/api/v1/repos/{self.username}/{repo_name}/contents/{path}" + + try: + response = httpx.get(url, headers=self.headers, timeout=10.0) + response.raise_for_status() + return response.json() + except Exception as e: + logger.error(f"Error listing files in {repo_name}/{path}: {e}") + return [] + + def get_file_content(self, repo_name: str, filepath: str) -> str: + """Fetch content of a specific file.""" + url = f"{self.base_url}/api/v1/repos/{self.username}/{repo_name}/raw/{filepath}" + + try: + response = httpx.get(url, headers=self.headers, timeout=10.0) + if response.status_code == 200: + return response.text + except Exception as e: + logger.error(f"Error fetching file {filepath}: {e}") + + return "" + +# Test function +if __name__ == "__main__": + scraper = GiteaScraper( + base_url=os.getenv("GITEA_URL", "https://gitea.lab.audasmedia.com.au"), + token=os.getenv("GITEA_TOKEN", ""), + username=os.getenv("GITEA_USERNAME", "sam") + ) + + repos = scraper.get_user_repos() + print(f"Found {len(repos)} repositories") + + for repo in repos[:3]: # Test with first 3 + print(f"\nRepo: {repo.name}") + readme = scraper.get_readme(repo.name) + if readme: + print(f"README preview: {readme[:200]}...") +``` + +--- + +## Phase 2: Apache Airflow Multi-Service Setup + +### File: `/home/sam/development/airflow/docker-compose.yml` + +```yaml +version: '3.8' + +x-airflow-common: + &airflow-common + image: ${AIRFLOW_IMAGE_NAME:-apache/airflow:2.8.1} + environment: + &airflow-common-env + AIRFLOW__CORE__EXECUTOR: CeleryExecutor + AIRFLOW__DATABASE__SQL_ALCHEMY_CONN: postgresql+psycopg2://airflow:airflow@postgres/airflow + AIRFLOW__CELERY__RESULT_BACKEND: db+postgresql://airflow:airflow@postgres/airflow + AIRFLOW__CELERY__BROKER_URL: redis://:@redis:6379/0 + AIRFLOW__CORE__FERNET_KEY: '' + AIRFLOW__CORE__DAGS_ARE_PAUSED_AT_CREATION: 'true' + AIRFLOW__CORE__LOAD_EXAMPLES: 'false' + AIRFLOW__API__AUTH_BACKENDS: 'airflow.api.auth.backend.basic_auth,airflow.api.auth.backend.session' + AIRFLOW__SCHEDULER__ENABLE_HEALTH_CHECK: 'true' + _PIP_ADDITIONAL_REQUIREMENTS: ${_PIP_ADDITIONAL_REQUIREMENTS:-} + volumes: + - ${AIRFLOW_PROJ_DIR:-.}/dags:/opt/airflow/dags + - ${AIRFLOW_PROJ_DIR:-.}/logs:/opt/airflow/logs + - ${AIRFLOW_PROJ_DIR:-.}/config:/opt/airflow/config + - ${AIRFLOW_PROJ_DIR:-.}/plugins:/opt/airflow/plugins + user: "${AIRFLOW_UID:-50000}:0" + depends_on: + &airflow-common-depends-on + redis: + condition: service_healthy + postgres: + condition: service_healthy + +services: + postgres: + image: postgres:13 + environment: + POSTGRES_USER: airflow + POSTGRES_PASSWORD: airflow + POSTGRES_DB: airflow + volumes: + - postgres-db-volume:/var/lib/postgresql/data + healthcheck: + test: ["CMD", "pg_isready", "-U", "airflow"] + interval: 10s + retries: 5 + start_period: 5s + restart: always + networks: + - ai-mesh + + redis: + image: redis:latest + expose: + - 6379 + healthcheck: + test: ["CMD", "redis-cli", "ping"] + interval: 10s + timeout: 30s + retries: 50 + start_period: 30s + restart: always + networks: + - ai-mesh + + airflow-webserver: + <<: *airflow-common + command: webserver + ports: + - "8081:8080" + healthcheck: + test: ["CMD", "curl", "--fail", "http://localhost:8080/health"] + interval: 30s + timeout: 10s + retries: 5 + start_period: 30s + restart: always + depends_on: + <<: *airflow-common-depends-on + airflow-init: + condition: service_completed_successfully + networks: + - ai-mesh + + airflow-scheduler: + <<: *airflow-common + command: scheduler + healthcheck: + test: ["CMD", "curl", "--fail", "http://localhost:8974/health"] + interval: 30s + timeout: 10s + retries: 5 + start_period: 30s + restart: always + depends_on: + <<: *airflow-common-depends-on + airflow-init: + condition: service_completed_successfully + networks: + - ai-mesh + + airflow-worker: + <<: *airflow-common + command: celery worker + healthcheck: + test: + - "CMD-SHELL" + - 'celery --app airflow.providers.celery.executors.celery_executor.app inspect ping -d "celery@$${HOSTNAME}" || celery --app airflow.executors.celery_executor.app inspect ping -d "celery@$${HOSTNAME}"' + interval: 30s + timeout: 10s + retries: 5 + start_period: 30s + restart: always + depends_on: + <<: *airflow-common-depends-on + airflow-init: + condition: service_completed_successfully + networks: + - ai-mesh + + airflow-triggerer: + <<: *airflow-common + command: triggerer + healthcheck: + test: ["CMD-SHELL", 'airflow jobs check --job-type TriggererJob --hostname "$${HOSTNAME}"'] + interval: 30s + timeout: 10s + retries: 5 + start_period: 30s + restart: always + depends_on: + <<: *airflow-common-depends-on + airflow-init: + condition: service_completed_successfully + networks: + - ai-mesh + + airflow-init: + <<: *airflow-common + entrypoint: /bin/bash + command: + - -c + - | + if [[ -z "${AIRFLOW_UID}" ]]; then + echo "WARNING!!!: AIRFLOW_UID not set!" + echo "Using default UID: 50000" + export AIRFLOW_UID=50000 + fi + mkdir -p /sources/logs /sources/dags /sources/plugins + chown -R "${AIRFLOW_UID}:0" /sources/{logs,dags,plugins} + exec /entrypoint airflow version + environment: + <<: *airflow-common-env + _AIRFLOW_DB_MIGRATE: 'true' + _AIRFLOW_WWW_USER_CREATE: 'true' + _AIRFLOW_WWW_USER_USERNAME: ${_AIRFLOW_WWW_USER_USERNAME:-airflow} + _AIRFLOW_WWW_USER_PASSWORD: ${_AIRFLOW_WWW_USER_PASSWORD:-airflow} + user: "0:0" + volumes: + - ${AIRFLOW_PROJ_DIR:-.}:/sources + networks: + - ai-mesh + + airflow-cli: + <<: *airflow-common + profiles: + - debug + environment: + <<: *airflow-common-env + CONNECTION_CHECK_MAX_COUNT: "0" + command: + - bash + - -c + - airflow + networks: + - ai-mesh + +volumes: + postgres-db-volume: + +networks: + ai-mesh: + external: true +``` + +### File: `/home/sam/development/airflow/dags/gitea_ingestion_dag.py` + +```python +""" +Airflow DAG for scheduled Gitea repository ingestion. +Runs daily to fetch new/updated repos and ingest into ChromaDB. +""" +from datetime import datetime, timedelta +from airflow import DAG +from airflow.operators.python import PythonOperator +from airflow.providers.http.operators.http import SimpleHttpOperator +import os +import sys +import json + +# Add knowledge_service to path for imports +sys.path.insert(0, '/opt/airflow/dags/repo') + +default_args = { + 'owner': 'airflow', + 'depends_on_past': False, + 'email_on_failure': False, + 'email_on_retry': False, + 'retries': 1, + 'retry_delay': timedelta(minutes=5), +} + +def fetch_gitea_repos(**context): + """Task: Fetch all repositories from Gitea.""" + from gitea_scraper import GiteaScraper + + scraper = GiteaScraper( + base_url=os.getenv("GITEA_URL", "https://gitea.lab.audasmedia.com.au"), + token=os.getenv("GITEA_TOKEN", ""), + username=os.getenv("GITEA_USERNAME", "sam") + ) + + repos = scraper.get_user_repos() + + # Push to XCom for downstream tasks + context['ti'].xcom_push(key='repo_count', value=len(repos)) + context['ti'].xcom_push(key='repos', value=[ + { + 'name': r.name, + 'description': r.description, + 'url': r.url, + 'updated_at': r.updated_at + } + for r in repos + ]) + + return f"Fetched {len(repos)} repositories" + +def fetch_readmes(**context): + """Task: Fetch READMEs for all repositories.""" + from gitea_scraper import GiteaScraper + + ti = context['ti'] + repos = ti.xcom_pull(task_ids='fetch_repos', key='repos') + + scraper = GiteaScraper( + base_url=os.getenv("GITEA_URL", "https://gitea.lab.audasmedia.com.au"), + token=os.getenv("GITEA_TOKEN", ""), + username=os.getenv("GITEA_USERNAME", "sam") + ) + + readme_data = [] + for repo in repos[:10]: # Limit to 10 repos per run for testing + readme = scraper.get_readme(repo['name']) + if readme: + readme_data.append({ + 'repo': repo['name'], + 'content': readme[:5000], # First 5000 chars + 'url': repo['url'] + }) + + ti.xcom_push(key='readme_data', value=readme_data) + + return f"Fetched {len(readme_data)} READMEs" + +def ingest_to_chroma(**context): + """Task: Ingest fetched data into ChromaDB via knowledge service.""" + import httpx + + ti = context['ti'] + readme_data = ti.xcom_pull(task_ids='fetch_readmes', key='readme_data') + + knowledge_service_url = os.getenv("KNOWLEDGE_SERVICE_URL", "http://knowledge-service:8080") + + documents_ingested = 0 + for item in readme_data: + try: + # Call knowledge service ingest endpoint + response = httpx.post( + f"{knowledge_service_url}/ingest", + json={ + 'source': f"gitea:{item['repo']}", + 'content': item['content'], + 'metadata': { + 'repo': item['repo'], + 'url': item['url'], + 'type': 'readme' + } + }, + timeout=30.0 + ) + + if response.status_code == 200: + documents_ingested += 1 + + except Exception as e: + print(f"Error ingesting {item['repo']}: {e}") + + return f"Ingested {documents_ingested} documents into ChromaDB" + +# Define the DAG +with DAG( + 'gitea_daily_ingestion', + default_args=default_args, + description='Daily ingestion of Gitea repositories into knowledge base', + schedule_interval=timedelta(days=1), # Run daily + start_date=datetime(2024, 1, 1), + catchup=False, + tags=['gitea', 'ingestion', 'knowledge'], +) as dag: + + # Task 1: Fetch repository list + fetch_repos_task = PythonOperator( + task_id='fetch_repos', + python_callable=fetch_gitea_repos, + ) + + # Task 2: Fetch README content + fetch_readmes_task = PythonOperator( + task_id='fetch_readmes', + python_callable=fetch_readmes, + ) + + # Task 3: Ingest into ChromaDB + ingest_task = PythonOperator( + task_id='ingest_to_chroma', + python_callable=ingest_to_chroma, + ) + + # Define task dependencies + fetch_repos_task >> fetch_readmes_task >> ingest_task +``` + +### File: `/home/sam/development/airflow/.env` + +```bash +# Airflow Configuration +AIRFLOW_UID=1000 +AIRFLOW_GID=0 +AIRFLOW_PROJ_DIR=. +_AIRFLOW_WWW_USER_USERNAME=admin +_AIRFLOW_WWW_USER_PASSWORD=admin + +# Gitea Configuration +GITEA_URL=https://gitea.lab.audasmedia.com.au +GITEA_TOKEN=your_token_here +GITEA_USERNAME=sam + +# Knowledge Service +KNOWLEDGE_SERVICE_URL=http://knowledge-service:8080 +``` + +--- + +## Phase 3: LangGraph Supervisor Service + +### File: `/home/sam/development/langgraph_service/requirements.txt` + +``` +fastapi +uvicorn +langgraph +langchain +langchain-community +langchain-openai +httpx +pydantic +``` + +### File: `/home/sam/development/langgraph_service/supervisor_agent.py` + +```python +""" +LangGraph Supervisor Agent - Routes queries to specialist agents +""" +from typing import TypedDict, Annotated, Sequence +from langgraph.graph import StateGraph, END +from langchain_core.messages import BaseMessage, HumanMessage, AIMessage +import operator +import httpx +import os +import logging + +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger(__name__) + +# State definition +class AgentState(TypedDict): + messages: Annotated[Sequence[BaseMessage], operator.add] + next_agent: str + context: dict + +# Agent routing logic +def supervisor_node(state: AgentState): + """Supervisor decides which specialist agent to call.""" + last_message = state["messages"][-1].content.lower() + + # Simple routing logic based on keywords + if any(kw in last_message for kw in ["repo", "code", "git", "github", "gitea", "project", "development"]): + return {"next_agent": "librarian"} + elif any(kw in last_message for kw in ["write", "edit", "create", "fix", "bug", "implement", "code change"]): + return {"next_agent": "opencode"} + elif any(kw in last_message for kw in ["sam", "hobby", "music", "experience", "skill", "about"]): + return {"next_agent": "librarian"} + else: + return {"next_agent": "brain"} # Default to general LLM + +def librarian_agent(state: AgentState): + """Librarian agent - queries knowledge base (ChromaDB).""" + last_message = state["messages"][-1].content + + try: + # Call knowledge service + response = httpx.post( + "http://knowledge-service:8080/query", + json={"question": last_message}, + timeout=10.0 + ) + + if response.status_code == 200: + context = response.json().get("context", "") + return { + "messages": [AIMessage(content=f"Based on my knowledge base:\n\n{context}")], + "context": {"source": "librarian", "context": context} + } + except Exception as e: + logger.error(f"Librarian error: {e}") + + return { + "messages": [AIMessage(content="I couldn't find relevant information in the knowledge base.")], + "context": {"source": "librarian", "error": str(e)} + } + +def opencode_agent(state: AgentState): + """Opencode agent - handles coding tasks via MCP.""" + last_message = state["messages"][-1].content + + # Placeholder - would integrate with opencode-brain + return { + "messages": [AIMessage(content=f"I'm the coding agent. I would help you with: {last_message}")], + "context": {"source": "opencode", "action": "coding_task"} + } + +def brain_agent(state: AgentState): + """Brain agent - general LLM fallback.""" + last_message = state["messages"][-1].content + + try: + # Call opencode-brain service + auth = httpx.BasicAuth("opencode", os.getenv("OPENCODE_PASSWORD", "sam4jo")) + timeout_long = httpx.Timeout(180.0, connect=10.0) + + with httpx.AsyncClient(auth=auth, timeout=timeout_long) as client: + # Create session + session_res = client.post("http://opencode-brain:5000/session", json={"title": "Supervisor Query"}) + session_id = session_res.json()["id"] + + # Send message + response = client.post( + f"http://opencode-brain:5000/session/{session_id}/message", + json={"parts": [{"type": "text", "text": last_message}]} + ) + + data = response.json() + if "parts" in data: + for part in data["parts"]: + if part.get("type") == "text": + return { + "messages": [AIMessage(content=part["text"])], + "context": {"source": "brain"} + } + except Exception as e: + logger.error(f"Brain error: {e}") + + return { + "messages": [AIMessage(content="I'm thinking about this...")], + "context": {"source": "brain"} + } + +def route_decision(state: AgentState): + """Routing function based on supervisor decision.""" + return state["next_agent"] + +# Build the graph +workflow = StateGraph(AgentState) + +# Add nodes +workflow.add_node("supervisor", supervisor_node) +workflow.add_node("librarian", librarian_agent) +workflow.add_node("opencode", opencode_agent) +workflow.add_node("brain", brain_agent) + +# Add edges +workflow.set_entry_point("supervisor") + +# Conditional routing from supervisor +workflow.add_conditional_edges( + "supervisor", + route_decision, + { + "librarian": "librarian", + "opencode": "opencode", + "brain": "brain" + } +) + +# All specialist agents end +workflow.add_edge("librarian", END) +workflow.add_edge("opencode", END) +workflow.add_edge("brain", END) + +# Compile the graph +supervisor_graph = workflow.compile() + +# Main entry point for queries +async def process_query(query: str) -> dict: + """Process a query through the supervisor graph.""" + result = await supervisor_graph.ainvoke({ + "messages": [HumanMessage(content=query)], + "next_agent": "", + "context": {} + }) + + return { + "response": result["messages"][-1].content, + "context": result.get("context", {}) + } +``` + +### File: `/home/sam/development/langgraph_service/main.py` + +```python +""" +LangGraph Supervisor Service - FastAPI wrapper for agent orchestration +""" +from fastapi import FastAPI +from fastapi.middleware.cors import CORSMiddleware +from pydantic import BaseModel +from supervisor_agent import process_query +import logging +import sys + +logging.basicConfig(level=logging.INFO, stream=sys.stdout) +logger = logging.getLogger(__name__) + +app = FastAPI(title="LangGraph Supervisor Service") + +app.add_middleware( + CORSMiddleware, + allow_origins=["*"], + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], +) + +class QueryRequest(BaseModel): + query: str + +class QueryResponse(BaseModel): + response: str + agent_used: str + context: dict + +@app.get("/health") +async def health(): + return {"status": "healthy", "service": "langgraph-supervisor"} + +@app.post("/query", response_model=QueryResponse) +async def query_supervisor(request: QueryRequest): + """Main entry point for agent orchestration.""" + logger.info(f"Received query: {request.query}") + + try: + result = await process_query(request.query) + + return QueryResponse( + response=result["response"], + agent_used=result["context"].get("source", "unknown"), + context=result["context"] + ) + except Exception as e: + logger.error(f"Error processing query: {e}") + return QueryResponse( + response="Error processing your request", + agent_used="error", + context={"error": str(e)} + ) + +@app.get("/agents") +async def list_agents(): + """List available specialist agents.""" + return { + "agents": [ + { + "name": "librarian", + "description": "Queries the knowledge base for semantic information", + "triggers": ["repo", "code", "git", "hobby", "about", "skill"] + }, + { + "name": "opencode", + "description": "Handles coding tasks and file modifications", + "triggers": ["write", "edit", "create", "fix", "implement"] + }, + { + "name": "brain", + "description": "General LLM for reasoning and generation", + "triggers": ["default", "general questions"] + } + ] + } + +if __name__ == "__main__": + import uvicorn + uvicorn.run(app, host="0.0.0.0", port=8090) +``` + +### File: `/home/sam/development/langgraph_service/Dockerfile` + +```dockerfile +FROM python:3.11-slim + +# Install system dependencies +RUN apt-get update && apt-get install -y \ + gcc \ + g++ \ + && rm -rf /var/lib/apt/lists/* + +# Create app directory +WORKDIR /app + +# Copy requirements +COPY requirements.txt . +RUN pip install --no-cache-dir -r requirements.txt + +# Copy code +COPY . . + +EXPOSE 8090 + +CMD ["python3", "-m", "uvicorn", "main:app", "--host", "0.0.0.0", "--port", "8090"] +``` + +--- + +## Phase 4: Integration - Updated Docker Compose + +### File: `/home/sam/development/docker-compose.integrated.yml` + +```yaml +version: '3.8' + +services: + # Existing Knowledge Service + knowledge-service: + build: ./knowledge_service + container_name: knowledge-service + ports: + - "8080:8080" + volumes: + - ./knowledge_service/data:/app/code/data + - ./knowledge_service/chroma_db:/app/code/chroma_db + - ./knowledge_service/main.py:/app/code/main.py:ro + - ./knowledge_service/gitea_scraper.py:/app/code/gitea_scraper.py:ro + environment: + - PYTHONUNBUFFERED=1 + - OPENROUTER_API_KEY=${OPENROUTER_API_KEY} + - PYTHONPATH=/app/packages + - GITEA_URL=${GITEA_URL} + - GITEA_TOKEN=${GITEA_TOKEN} + - GITEA_USERNAME=${GITEA_USERNAME:-sam} + networks: + - ai-mesh + restart: unless-stopped + + # LangGraph Supervisor Service + langgraph-service: + build: ./langgraph_service + container_name: langgraph-service + ports: + - "8090:8090" + environment: + - OPENCODE_PASSWORD=${OPENCODE_PASSWORD:-sam4jo} + - KNOWLEDGE_SERVICE_URL=http://knowledge-service:8080 + depends_on: + - knowledge-service + networks: + - ai-mesh + restart: unless-stopped + + # Chat Gateway (Updated to use LangGraph) + chat-gateway: + build: ./aboutme_chat_demo/backend + container_name: chat-gateway + ports: + - "8000:8000" + volumes: + - ./aboutme_chat_demo/backend:/app + environment: + - DATABASE_URL=postgresql://sam:sam4jo@db:5432/chat_demo + - LANGGRAPH_URL=http://langgraph-service:8090 + depends_on: + - langgraph-service + - db + networks: + - ai-mesh + restart: unless-stopped + + # Frontend + frontend: + build: ./aboutme_chat_demo/frontend + container_name: chat-frontend + ports: + - "5173:5173" + volumes: + - ./aboutme_chat_demo/frontend:/app + - /app/node_modules + environment: + - CHOKIDAR_USEPOLLING=true + networks: + - ai-mesh + + # PostgreSQL for chat history + db: + image: postgres:15-alpine + container_name: chat-db + environment: + POSTGRES_USER: sam + POSTGRES_PASSWORD: sam4jo + POSTGRES_DB: chat_demo + ports: + - "5432:5432" + volumes: + - postgres_data:/var/lib/postgresql/data + networks: + - ai-mesh + restart: unless-stopped + +volumes: + postgres_data: + +networks: + ai-mesh: + external: true +``` + +--- + +## Phase 5: Updated Chat Gateway + +### File: `/home/sam/development/aboutme_chat_demo/backend/main.py` + +```python +from fastapi import FastAPI +from fastapi.middleware.cors import CORSMiddleware +from pydantic import BaseModel +import httpx +import logging +import sys +import traceback +import os + +logging.basicConfig(level=logging.INFO, format="%(asctime)s [%(levelname)s] %(message)s", handlers=[logging.StreamHandler(sys.stdout)]) +logger = logging.getLogger(__name__) + +app = FastAPI() +app.add_middleware(CORSMiddleware, allow_origins=["*"], allow_credentials=True, allow_methods=["*"], allow_headers=["*"]) + +class MessageRequest(BaseModel): + message: str + +LANGGRAPH_URL = os.getenv("LANGGRAPH_URL", "http://langgraph-service:8090") + +@app.post("/chat") +async def chat(request: MessageRequest): + """Updated chat endpoint that routes through LangGraph Supervisor.""" + logger.info(f"Gateway: Received message: {request.message}") + + try: + # Call LangGraph Supervisor instead of direct brain + async with httpx.AsyncClient(timeout=httpx.Timeout(60.0, connect=10.0)) as client: + response = await client.post( + f"{LANGGRAPH_URL}/query", + json={"query": request.message} + ) + + if response.status_code == 200: + result = response.json() + logger.info(f"Gateway: Response from {result.get('agent_used', 'unknown')} agent") + return {"response": result["response"]} + else: + logger.error(f"Gateway: LangGraph error {response.status_code}") + return {"response": "Error: Orchestration service unavailable"} + + except Exception as e: + logger.error(f"Gateway: Error routing through LangGraph: {traceback.format_exc()}") + return {"response": "Error: Unable to process your request at this time."} + +@app.get("/health") +async def health(): + return {"status": "healthy", "service": "chat-gateway"} +``` + +--- + +## Terminal Commands + +### Setup Airflow Environment + +```bash +# Create airflow directory structure +mkdir -p /home/sam/development/airflow/{dags,logs,config,plugins} + +# Copy gitea_scraper.py to airflow dags folder +cp /home/sam/development/knowledge_service/gitea_scraper.py /home/sam/development/airflow/dags/ + +# Set proper permissions (Airflow runs as UID 50000 in container) +echo -e "AIRFLOW_UID=1000\nAIRFLOW_GID=0" > /home/sam/development/airflow/.env + +# Start Airflow services +cd /home/sam/development/airflow +docker-compose up -d + +# Check Airflow webserver (wait 30 seconds for init) +sleep 30 +curl http://localhost:8081/health + +# Access Airflow UI +# http://localhost:8081 (login: admin/admin) +``` + +### Setup LangGraph Service + +```bash +# Create langgraph_service directory +mkdir -p /home/sam/development/langgraph_service + +# Write requirements.txt +cat > /home/sam/development/langgraph_service/requirements.txt << 'EOF' +fastapi +uvicorn +langgraph +langchain +langchain-community +langchain-openai +httpx +pydantic +EOF + +# Build and start LangGraph service +cd /home/sam/development/langgraph_service +docker build -t langgraph-service:latest . +docker run -d \ + --name langgraph-service \ + -p 8090:8090 \ + --network ai-mesh \ + -e OPENCODE_PASSWORD=sam4jo \ + langgraph-service:latest + +# Test LangGraph service +curl http://localhost:8090/health +curl http://localhost:8090/agents +``` + +### Test Gitea Scraper Locally + +```bash +# Set environment variables +export GITEA_URL=https://gitea.lab.audasmedia.com.au +export GITEA_TOKEN=your_token_here +export GITEA_USERNAME=sam + +# Run scraper test +cd /home/sam/development/knowledge_service +python gitea_scraper.py +``` + +### Start Complete Integrated Stack + +```bash +# Ensure ai-mesh network exists +docker network create ai-mesh 2>/dev/null || true + +# Start all services +cd /home/sam/development +docker-compose -f docker-compose.integrated.yml up -d + +# Verify all services +curl http://localhost:8000/health # Chat Gateway +curl http://localhost:8080/health # Knowledge Service +curl http://localhost:8090/health # LangGraph Service +curl http://localhost:8081/health # Airflow + +# Test end-to-end +curl -X POST http://localhost:8000/chat \ + -H "Content-Type: application/json" \ + -d '{"message": "What are Sam\'s hobbies?"}' +``` + +### Manual Trigger Airflow DAG + +```bash +# Trigger the Gitea ingestion DAG manually +curl -X POST http://localhost:8081/api/v1/dags/gitea_daily_ingestion/dagRuns \ + -H "Content-Type: application/json" \ + -u admin:admin \ + -d '{"conf": {}}' +``` + +--- + +## Architecture Summary + +``` +User Query + | + v +┌─────────────────┐ +│ Chat Gateway │ (Port 8000) +│ (FastAPI) │ +└────────┬────────┘ + | + v +┌─────────────────┐ +│ LangGraph │ (Port 8090) +│ Supervisor │ - Routes to specialist agents +│ (StateGraph) │ +└────────┬────────┘ + | + ┌────┴────┬──────────┐ + ▼ ▼ ▼ +┌────────┐ ┌──────────┐ ┌────────┐ +│Librarian│ │Opencode │ │ Brain │ +│(RAG) │ │(Coding) │ │(LLM) │ +└────┬───┘ └──────────┘ └────────┘ + | + v +┌─────────────────┐ ┌─────────────────┐ +│ Knowledge │◄────│ Apache Airflow │ +│ Service │ │ (Port 8081) │ +│ (ChromaDB) │ │ - Scheduled │ +│ (Port 8080) │ │ ingestion │ +└─────────────────┘ └────────┬────────┘ + | + v + ┌──────────────┐ + │ Gitea API │ + │ Scraper │ + └──────────────┘ +``` + +--- + +## Next Steps + +1. **Add Gitea token** to `.env` file +2. **Build and test** Gitea scraper locally +3. **Deploy Airflow** with `docker-compose up -d` +4. **Build LangGraph service** and test routing +5. **Update Chat Gateway** to use LangGraph +6. **Test end-to-end** flow with a query like "What coding projects does Sam have?" + +**All code is ready for copy-paste implementation.** diff --git a/code_1.md b/code_1.md new file mode 100644 index 0000000..034093f --- /dev/null +++ b/code_1.md @@ -0,0 +1,1128 @@ +# Modular Implementation: Gitea, Airflow, and LangGraph + +## Overview +Self-contained modular architecture with separate docker-compose files per service. +Each service has its own packages directory for fast rebuilds. + +--- + +## Phase 1: LangGraph Supervisor Service + +### Directory Structure +``` +/home/sam/development/langgraph_service/ +├── docker-compose.yml +├── Dockerfile +├── requirements.txt +├── main.py +└── supervisor_agent.py +``` + +### File: `/home/sam/development/langgraph_service/requirements.txt` + +``` +fastapi +uvicorn +langgraph +langchain +langchain-community +langchain-openai +httpx +pydantic +``` + +### File: `/home/sam/development/langgraph_service/Dockerfile` + +```dockerfile +FROM python:3.11-slim + +# Install system dependencies +RUN apt-get update && apt-get install -y \ + gcc \ + g++ \ + && rm -rf /var/lib/apt/lists/* + +# Create directories +RUN mkdir -p /app/packages /app/code + +WORKDIR /app + +# Install packages to isolated directory +COPY requirements.txt . +RUN pip install --target=/app/packages -r requirements.txt + +# Copy code +COPY . /app/code/ + +ENV PYTHONPATH=/app/packages +ENV PYTHONUNBUFFERED=1 + +WORKDIR /app/code +EXPOSE 8090 + +CMD ["python3", "-m", "uvicorn", "main:app", "--host", "0.0.0.0", "--port", "8090"] +``` + +### File: `/home/sam/development/langgraph_service/supervisor_agent.py` + +```python +""" +LangGraph Supervisor Agent - Routes queries to specialist agents +""" +from typing import TypedDict, Annotated, Sequence +from langgraph.graph import StateGraph, END +from langchain_core.messages import BaseMessage, HumanMessage, AIMessage +import operator +import httpx +import os +import logging + +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger(__name__) + +# State definition +class AgentState(TypedDict): + messages: Annotated[Sequence[BaseMessage], operator.add] + next_agent: str + context: dict + +# Agent routing logic +def supervisor_node(state: AgentState): + """Supervisor decides which specialist agent to call.""" + last_message = state["messages"][-1].content.lower() + + # Simple routing logic based on keywords + if any(kw in last_message for kw in ["repo", "code", "git", "github", "gitea", "project", "development"]): + return {"next_agent": "librarian"} + elif any(kw in last_message for kw in ["write", "edit", "create", "fix", "bug", "implement", "code change"]): + return {"next_agent": "opencode"} + elif any(kw in last_message for kw in ["sam", "hobby", "music", "experience", "skill", "about"]): + return {"next_agent": "librarian"} + else: + return {"next_agent": "brain"} # Default to general LLM + +def librarian_agent(state: AgentState): + """Librarian agent - queries knowledge base (ChromaDB).""" + last_message = state["messages"][-1].content + + try: + # Call knowledge service + response = httpx.post( + "http://knowledge-service:8080/query", + json={"question": last_message}, + timeout=10.0 + ) + + if response.status_code == 200: + context = response.json().get("context", "") + return { + "messages": [AIMessage(content=f"Based on my knowledge base:\n\n{context}")], + "context": {"source": "librarian", "context": context} + } + except Exception as e: + logger.error(f"Librarian error: {e}") + + return { + "messages": [AIMessage(content="I couldn't find relevant information in the knowledge base.")], + "context": {"source": "librarian", "error": str(e)} + } + +def opencode_agent(state: AgentState): + """Opencode agent - handles coding tasks via MCP.""" + last_message = state["messages"][-1].content + + # Placeholder - would integrate with opencode-brain + return { + "messages": [AIMessage(content=f"I'm the coding agent. I would help you with: {last_message}")], + "context": {"source": "opencode", "action": "coding_task"} + } + +def brain_agent(state: AgentState): + """Brain agent - general LLM fallback.""" + last_message = state["messages"][-1].content + + try: + # Call opencode-brain service + auth = httpx.BasicAuth("opencode", os.getenv("OPENCODE_PASSWORD", "sam4jo")) + timeout_long = httpx.Timeout(180.0, connect=10.0) + + with httpx.AsyncClient(auth=auth, timeout=timeout_long) as client: + # Create session + session_res = client.post("http://opencode-brain:5000/session", json={"title": "Supervisor Query"}) + session_id = session_res.json()["id"] + + # Send message + response = client.post( + f"http://opencode-brain:5000/session/{session_id}/message", + json={"parts": [{"type": "text", "text": last_message}]} + ) + + data = response.json() + if "parts" in data: + for part in data["parts"]: + if part.get("type") == "text": + return { + "messages": [AIMessage(content=part["text"])], + "context": {"source": "brain"} + } + except Exception as e: + logger.error(f"Brain error: {e}") + + return { + "messages": [AIMessage(content="I'm thinking about this...")], + "context": {"source": "brain"} + } + +def route_decision(state: AgentState): + """Routing function based on supervisor decision.""" + return state["next_agent"] + +# Build the graph +workflow = StateGraph(AgentState) + +# Add nodes +workflow.add_node("supervisor", supervisor_node) +workflow.add_node("librarian", librarian_agent) +workflow.add_node("opencode", opencode_agent) +workflow.add_node("brain", brain_agent) + +# Add edges +workflow.set_entry_point("supervisor") + +# Conditional routing from supervisor +workflow.add_conditional_edges( + "supervisor", + route_decision, + { + "librarian": "librarian", + "opencode": "opencode", + "brain": "brain" + } +) + +# All specialist agents end +workflow.add_edge("librarian", END) +workflow.add_edge("opencode", END) +workflow.add_edge("brain", END) + +# Compile the graph +supervisor_graph = workflow.compile() + +# Main entry point for queries +async def process_query(query: str) -> dict: + """Process a query through the supervisor graph.""" + result = await supervisor_graph.ainvoke({ + "messages": [HumanMessage(content=query)], + "next_agent": "", + "context": {} + }) + + return { + "response": result["messages"][-1].content, + "context": result.get("context", {}) + } +``` + +### File: `/home/sam/development/langgraph_service/main.py` + +```python +""" +LangGraph Supervisor Service - FastAPI wrapper for agent orchestration +""" +from fastapi import FastAPI +from fastapi.middleware.cors import CORSMiddleware +from pydantic import BaseModel +from supervisor_agent import process_query +import logging +import sys + +logging.basicConfig(level=logging.INFO, stream=sys.stdout) +logger = logging.getLogger(__name__) + +app = FastAPI(title="LangGraph Supervisor Service") + +app.add_middleware( + CORSMiddleware, + allow_origins=["*"], + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], +) + +class QueryRequest(BaseModel): + query: str + +class QueryResponse(BaseModel): + response: str + agent_used: str + context: dict + +@app.get("/health") +async def health(): + return {"status": "healthy", "service": "langgraph-supervisor"} + +@app.post("/query", response_model=QueryResponse) +async def query_supervisor(request: QueryRequest): + """Main entry point for agent orchestration.""" + logger.info(f"Received query: {request.query}") + + try: + result = await process_query(request.query) + + return QueryResponse( + response=result["response"], + agent_used=result["context"].get("source", "unknown"), + context=result["context"] + ) + except Exception as e: + logger.error(f"Error processing query: {e}") + return QueryResponse( + response="Error processing your request", + agent_used="error", + context={"error": str(e)} + ) + +@app.get("/agents") +async def list_agents(): + """List available specialist agents.""" + return { + "agents": [ + { + "name": "librarian", + "description": "Queries the knowledge base for semantic information", + "triggers": ["repo", "code", "git", "hobby", "about", "skill"] + }, + { + "name": "opencode", + "description": "Handles coding tasks and file modifications", + "triggers": ["write", "edit", "create", "fix", "implement"] + }, + { + "name": "brain", + "description": "General LLM for reasoning and generation", + "triggers": ["default", "general questions"] + } + ] + } + +if __name__ == "__main__": + import uvicorn + uvicorn.run(app, host="0.0.0.0", port=8090) +``` + +### File: `/home/sam/development/langgraph_service/docker-compose.yml` + +```yaml +version: '3.8' + +services: + langgraph-service: + build: . + image: langgraph-service:latest + container_name: langgraph-service + ports: + - "8090:8090" + volumes: + # Only mount code files, not packages + - ./main.py:/app/code/main.py:ro + - ./supervisor_agent.py:/app/code/supervisor_agent.py:ro + environment: + - PYTHONUNBUFFERED=1 + - PYTHONPATH=/app/packages + - OPENCODE_PASSWORD=${OPENCODE_PASSWORD:-sam4jo} + - KNOWLEDGE_SERVICE_URL=http://knowledge-service:8080 + networks: + - ai-mesh + restart: unless-stopped + +networks: + ai-mesh: + external: true +``` + +--- + +## Phase 2: Updated Chat Gateway (Replaces Hardcoded Logic) + +### File: `/home/sam/development/aboutme_chat_demo/backend/main.py` + +```python +""" +Chat Gateway - Routes all queries through LangGraph Supervisor +Removes hardcoded keywords, uses intelligent routing instead +""" +from fastapi import FastAPI +from fastapi.middleware.cors import CORSMiddleware +from pydantic import BaseModel +import httpx +import logging +import sys +import traceback +import os + +logging.basicConfig(level=logging.INFO, format="%(asctime)s [%(levelname)s] %(message)s", handlers=[logging.StreamHandler(sys.stdout)]) +logger = logging.getLogger(__name__) + +app = FastAPI() +app.add_middleware(CORSMiddleware, allow_origins=["*"], allow_credentials=True, allow_methods=["*"], allow_headers=["*"]) + +class MessageRequest(BaseModel): + message: str + +LANGGRAPH_URL = os.getenv("LANGGRAPH_URL", "http://langgraph-service:8090") + +@app.post("/chat") +async def chat(request: MessageRequest): + """ + Routes all queries through LangGraph Supervisor. + No hardcoded keywords - LangGraph intelligently routes to: + - Librarian: For knowledge base queries (RAG) + - Opencode: For coding tasks + - Brain: For general LLM queries + """ + logger.info(f"Gateway: Routing query to LangGraph: {request.message}") + + try: + async with httpx.AsyncClient(timeout=httpx.Timeout(60.0, connect=10.0)) as client: + response = await client.post( + f"{LANGGRAPH_URL}/query", + json={"query": request.message} + ) + + if response.status_code == 200: + result = response.json() + agent_used = result.get("agent_used", "unknown") + logger.info(f"Gateway: Response from {agent_used} agent") + return {"response": result["response"]} + else: + logger.error(f"Gateway: LangGraph error {response.status_code}") + return {"response": "Error: Orchestration service unavailable"} + + except Exception as e: + logger.error(f"Gateway: Error routing through LangGraph: {traceback.format_exc()}") + return {"response": "Error: Unable to process your request at this time."} + +@app.get("/health") +async def health(): + return {"status": "healthy", "service": "chat-gateway"} + +@app.get("/agents") +async def list_agents(): + """List available agents from LangGraph.""" + try: + async with httpx.AsyncClient(timeout=httpx.Timeout(10.0)) as client: + response = await client.get(f"{LANGGRAPH_URL}/agents") + if response.status_code == 200: + return response.json() + except Exception as e: + logger.error(f"Error fetching agents: {e}") + + return {"agents": [], "error": "Could not retrieve agent list"} +``` + +### File: `/home/sam/development/aboutme_chat_demo/docker-compose.yml` + +```yaml +version: '3.8' + +services: + db: + image: postgres:15-alpine + environment: + POSTGRES_USER: sam + POSTGRES_PASSWORD: sam4jo + POSTGRES_DB: chat_demo + ports: + - "5432:5432" + volumes: + - postgres_data:/var/lib/postgresql/data + networks: + - ai-mesh + restart: unless-stopped + + backend: + build: ./backend + ports: + - "8000:8000" + environment: + DATABASE_URL: postgresql://sam:sam4jo@db:5432/chat_demo + LANGGRAPH_URL: http://langgraph-service:8090 + volumes: + - ./backend:/app + depends_on: + - db + - langgraph-service + networks: + - ai-mesh + restart: unless-stopped + + frontend: + build: ./frontend + ports: + - "5173:5173" + volumes: + - ./frontend:/app + - /app/node_modules + environment: + - CHOKIDAR_USEPOLLING=true + depends_on: + - backend + networks: + - ai-mesh + +volumes: + postgres_data: + +networks: + ai-mesh: + external: true +``` + +--- + +## Phase 3: Gitea Scraper Module + +### File: `/home/sam/development/knowledge_service/gitea_scraper.py` + +```python +""" +Gitea API Scraper - Fetches repos, READMEs, and source code +for ingestion into the knowledge base. +""" +import os +import httpx +import logging +from typing import List, Dict, Optional +from dataclasses import dataclass +from datetime import datetime + +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger(__name__) + +@dataclass +class RepoMetadata: + name: str + description: str + url: str + default_branch: str + updated_at: str + language: Optional[str] + +class GiteaScraper: + def __init__(self, base_url: str, token: str, username: str = "sam"): + self.base_url = base_url.rstrip("/") + self.token = token + self.username = username + self.headers = {"Authorization": f"token {token}"} + + def get_user_repos(self) -> List[RepoMetadata]: + """Fetch all repositories for the user.""" + repos = [] + page = 1 + + while True: + url = f"{self.base_url}/api/v1/users/{self.username}/repos?page={page}&limit=50" + + try: + response = httpx.get(url, headers=self.headers, timeout=30.0) + response.raise_for_status() + + data = response.json() + if not data: + break + + for repo in data: + repos.append(RepoMetadata( + name=repo["name"], + description=repo.get("description", ""), + url=repo["html_url"], + default_branch=repo["default_branch"], + updated_at=repo["updated_at"], + language=repo.get("language") + )) + + logger.info(f"Fetched page {page}, got {len(data)} repos") + page += 1 + + except Exception as e: + logger.error(f"Error fetching repos: {e}") + break + + return repos + + def get_readme(self, repo_name: str) -> str: + """Fetch README content for a repository.""" + readme_names = ["README.md", "readme.md", "Readme.md", "README.rst"] + + for readme_name in readme_names: + url = f"{self.base_url}/api/v1/repos/{self.username}/{repo_name}/raw/{readme_name}" + + try: + response = httpx.get(url, headers=self.headers, timeout=10.0) + if response.status_code == 200: + return response.text + except Exception as e: + logger.warning(f"Failed to fetch {readme_name}: {e}") + continue + + return "" + + def get_repo_files(self, repo_name: str, path: str = "") -> List[Dict]: + """List files in a repository directory.""" + url = f"{self.base_url}/api/v1/repos/{self.username}/{repo_name}/contents/{path}" + + try: + response = httpx.get(url, headers=self.headers, timeout=10.0) + response.raise_for_status() + return response.json() + except Exception as e: + logger.error(f"Error listing files in {repo_name}/{path}: {e}") + return [] + + def get_file_content(self, repo_name: str, filepath: str) -> str: + """Fetch content of a specific file.""" + url = f"{self.base_url}/api/v1/repos/{self.username}/{repo_name}/raw/{filepath}" + + try: + response = httpx.get(url, headers=self.headers, timeout=10.0) + if response.status_code == 200: + return response.text + except Exception as e: + logger.error(f"Error fetching file {filepath}: {e}") + + return "" + +# Test function +if __name__ == "__main__": + scraper = GiteaScraper( + base_url=os.getenv("GITEA_URL", "https://gitea.lab.audasmedia.com.au"), + token=os.getenv("GITEA_TOKEN", ""), + username=os.getenv("GITEA_USERNAME", "sam") + ) + + repos = scraper.get_user_repos() + print(f"Found {len(repos)} repositories") + + for repo in repos[:3]: + print(f"\nRepo: {repo.name}") + readme = scraper.get_readme(repo.name) + if readme: + print(f"README preview: {readme[:200]}...") +``` + +--- + +## Phase 4: Apache Airflow Setup + +### Directory Structure +``` +/home/sam/development/airflow/ +├── docker-compose.yml +├── .env +└── dags/ + └── gitea_ingestion_dag.py +``` + +### File: `/home/sam/development/airflow/.env` + +```bash +# Airflow Configuration +AIRFLOW_UID=1000 +AIRFLOW_GID=0 +AIRFLOW_PROJ_DIR=. +_AIRFLOW_WWW_USER_USERNAME=admin +_AIRFLOW_WWW_USER_PASSWORD=admin + +# Gitea Configuration +GITEA_URL=https://gitea.lab.audasmedia.com.au +GITEA_TOKEN=your_token_here +GITEA_USERNAME=sam + +# Knowledge Service +KNOWLEDGE_SERVICE_URL=http://knowledge-service:8080 +``` + +### File: `/home/sam/development/airflow/dags/gitea_ingestion_dag.py` + +```python +""" +Airflow DAG for scheduled Gitea repository ingestion. +Runs daily to fetch new/updated repos and ingest into ChromaDB. +""" +from datetime import datetime, timedelta +from airflow import DAG +from airflow.operators.python import PythonOperator +import os +import sys + +# Add knowledge_service to path for imports +sys.path.insert(0, '/opt/airflow/dags/repo') + +default_args = { + 'owner': 'airflow', + 'depends_on_past': False, + 'email_on_failure': False, + 'email_on_retry': False, + 'retries': 1, + 'retry_delay': timedelta(minutes=5), +} + +def fetch_gitea_repos(**context): + """Task: Fetch all repositories from Gitea.""" + from gitea_scraper import GiteaScraper + + scraper = GiteaScraper( + base_url=os.getenv("GITEA_URL", "https://gitea.lab.audasmedia.com.au"), + token=os.getenv("GITEA_TOKEN", ""), + username=os.getenv("GITEA_USERNAME", "sam") + ) + + repos = scraper.get_user_repos() + + # Push to XCom for downstream tasks + context['ti'].xcom_push(key='repo_count', value=len(repos)) + context['ti'].xcom_push(key='repos', value=[ + { + 'name': r.name, + 'description': r.description, + 'url': r.url, + 'updated_at': r.updated_at + } + for r in repos + ]) + + return f"Fetched {len(repos)} repositories" + +def fetch_readmes(**context): + """Task: Fetch READMEs for all repositories.""" + from gitea_scraper import GiteaScraper + + ti = context['ti'] + repos = ti.xcom_pull(task_ids='fetch_repos', key='repos') + + scraper = GiteaScraper( + base_url=os.getenv("GITEA_URL", "https://gitea.lab.audasmedia.com.au"), + token=os.getenv("GITEA_TOKEN", ""), + username=os.getenv("GITEA_USERNAME", "sam") + ) + + readme_data = [] + for repo in repos[:10]: # Limit to 10 repos per run for testing + readme = scraper.get_readme(repo['name']) + if readme: + readme_data.append({ + 'repo': repo['name'], + 'content': readme[:5000], # First 5000 chars + 'url': repo['url'] + }) + + ti.xcom_push(key='readme_data', value=readme_data) + + return f"Fetched {len(readme_data)} READMEs" + +def ingest_to_chroma(**context): + """Task: Ingest fetched data into ChromaDB via knowledge service.""" + import httpx + + ti = context['ti'] + readme_data = ti.xcom_pull(task_ids='fetch_readmes', key='readme_data') + + knowledge_service_url = os.getenv("KNOWLEDGE_SERVICE_URL", "http://knowledge-service:8080") + + documents_ingested = 0 + for item in readme_data: + try: + # Call knowledge service ingest endpoint + response = httpx.post( + f"{knowledge_service_url}/ingest", + json={ + 'source': f"gitea:{item['repo']}", + 'content': item['content'], + 'metadata': { + 'repo': item['repo'], + 'url': item['url'], + 'type': 'readme' + } + }, + timeout=30.0 + ) + + if response.status_code == 200: + documents_ingested += 1 + + except Exception as e: + print(f"Error ingesting {item['repo']}: {e}") + + return f"Ingested {documents_ingested} documents into ChromaDB" + +# Define the DAG +with DAG( + 'gitea_daily_ingestion', + default_args=default_args, + description='Daily ingestion of Gitea repositories into knowledge base', + schedule_interval=timedelta(days=1), + start_date=datetime(2024, 1, 1), + catchup=False, + tags=['gitea', 'ingestion', 'knowledge'], +) as dag: + + fetch_repos_task = PythonOperator( + task_id='fetch_repos', + python_callable=fetch_gitea_repos, + ) + + fetch_readmes_task = PythonOperator( + task_id='fetch_readmes', + python_callable=fetch_readmes, + ) + + ingest_task = PythonOperator( + task_id='ingest_to_chroma', + python_callable=ingest_to_chroma, + ) + + fetch_repos_task >> fetch_readmes_task >> ingest_task +``` + +### File: `/home/sam/development/airflow/docker-compose.yml` + +```yaml +version: '3.8' + +x-airflow-common: + &airflow-common + image: ${AIRFLOW_IMAGE_NAME:-apache/airflow:2.8.1} + environment: + &airflow-common-env + AIRFLOW__CORE__EXECUTOR: CeleryExecutor + AIRFLOW__DATABASE__SQL_ALCHEMY_CONN: postgresql+psycopg2://airflow:airflow@postgres/airflow + AIRFLOW__CELERY__RESULT_BACKEND: db+postgresql://airflow:airflow@postgres/airflow + AIRFLOW__CELERY__BROKER_URL: redis://:@redis:6379/0 + AIRFLOW__CORE__FERNET_KEY: '' + AIRFLOW__CORE__DAGS_ARE_PAUSED_AT_CREATION: 'true' + AIRFLOW__CORE__LOAD_EXAMPLES: 'false' + AIRFLOW__API__AUTH_BACKENDS: 'airflow.api.auth.backend.basic_auth,airflow.api.auth.backend.session' + AIRFLOW__SCHEDULER__ENABLE_HEALTH_CHECK: 'true' + volumes: + - ${AIRFLOW_PROJ_DIR:-.}/dags:/opt/airflow/dags + - ${AIRFLOW_PROJ_DIR:-.}/logs:/opt/airflow/logs + - ${AIRFLOW_PROJ_DIR:-.}/config:/opt/airflow/config + - ${AIRFLOW_PROJ_DIR:-.}/plugins:/opt/airflow/plugins + - /home/sam/development/knowledge_service:/opt/airflow/dags/repo:ro + user: "${AIRFLOW_UID:-50000}:0" + depends_on: + &airflow-common-depends-on + redis: + condition: service_healthy + postgres: + condition: service_healthy + +services: + postgres: + image: postgres:13 + environment: + POSTGRES_USER: airflow + POSTGRES_PASSWORD: airflow + POSTGRES_DB: airflow + volumes: + - postgres-db-volume:/var/lib/postgresql/data + healthcheck: + test: ["CMD", "pg_isready", "-U", "airflow"] + interval: 10s + retries: 5 + start_period: 5s + restart: always + networks: + - ai-mesh + + redis: + image: redis:latest + expose: + - 6379 + healthcheck: + test: ["CMD", "redis-cli", "ping"] + interval: 10s + timeout: 30s + retries: 50 + start_period: 30s + restart: always + networks: + - ai-mesh + + airflow-webserver: + <<: *airflow-common + command: webserver + ports: + - "8081:8080" + healthcheck: + test: ["CMD", "curl", "--fail", "http://localhost:8080/health"] + interval: 30s + timeout: 10s + retries: 5 + start_period: 30s + restart: always + depends_on: + <<: *airflow-common-depends-on + airflow-init: + condition: service_completed_successfully + networks: + - ai-mesh + + airflow-scheduler: + <<: *airflow-common + command: scheduler + healthcheck: + test: ["CMD", "curl", "--fail", "http://localhost:8974/health"] + interval: 30s + timeout: 10s + retries: 5 + start_period: 30s + restart: always + depends_on: + <<: *airflow-common-depends-on + airflow-init: + condition: service_completed_successfully + networks: + - ai-mesh + + airflow-worker: + <<: *airflow-common + command: celery worker + healthcheck: + test: + - "CMD-SHELL" + - 'celery --app airflow.providers.celery.executors.celery_executor.app inspect ping -d "celery@$${HOSTNAME}" || celery --app airflow.executors.celery_executor.app inspect ping -d "celery@$${HOSTNAME}"' + interval: 30s + timeout: 10s + retries: 5 + start_period: 30s + restart: always + depends_on: + <<: *airflow-common-depends-on + airflow-init: + condition: service_completed_successfully + networks: + - ai-mesh + + airflow-init: + <<: *airflow-common + entrypoint: /bin/bash + command: + - -c + - | + if [[ -z "${AIRFLOW_UID}" ]]; then + echo "WARNING!!!: AIRFLOW_UID not set!" + echo "Using default UID: 50000" + export AIRFLOW_UID=50000 + fi + mkdir -p /sources/logs /sources/dags /sources/plugins + chown -R "${AIRFLOW_UID}:0" /sources/{logs,dags,plugins} + exec /entrypoint airflow version + environment: + <<: *airflow-common-env + _AIRFLOW_DB_MIGRATE: 'true' + _AIRFLOW_WWW_USER_CREATE: 'true' + _AIRFLOW_WWW_USER_USERNAME: ${_AIRFLOW_WWW_USER_USERNAME:-airflow} + _AIRFLOW_WWW_USER_PASSWORD: ${_AIRFLOW_WWW_USER_PASSWORD:-airflow} + user: "0:0" + volumes: + - ${AIRFLOW_PROJ_DIR:-.}:/sources + networks: + - ai-mesh + +volumes: + postgres-db-volume: + +networks: + ai-mesh: + external: true +``` + +--- + +## Phase 5: Terminal Commands + +### Setup LangGraph Service + +```bash +# Create langgraph_service directory +mkdir -p /home/sam/development/langgraph_service + +# Create requirements.txt +cat > /home/sam/development/langgraph_service/requirements.txt << 'EOF' +fastapi +uvicorn +langgraph +langchain +langchain-community +langchain-openai +httpx +pydantic +EOF + +# Start LangGraph service +cd /home/sam/development/langgraph_service +docker-compose up -d --build + +# Verify +sleep 10 +curl http://localhost:8090/health +curl http://localhost:8090/agents +``` + +### Setup Airflow + +```bash +# Create directory structure +mkdir -p /home/sam/development/airflow/{dags,logs,config,plugins} + +# Create .env file +cat > /home/sam/development/airflow/.env << 'EOF' +AIRFLOW_UID=1000 +AIRFLOW_GID=0 +AIRFLOW_PROJ_DIR=. +_AIRFLOW_WWW_USER_USERNAME=admin +_AIRFLOW_WWW_USER_PASSWORD=admin +GITEA_URL=https://gitea.lab.audasmedia.com.au +GITEA_TOKEN=your_token_here +GITEA_USERNAME=sam +KNOWLEDGE_SERVICE_URL=http://knowledge-service:8080 +EOF + +# Copy gitea_scraper.py to dags +cp /home/sam/development/knowledge_service/gitea_scraper.py /home/sam/development/airflow/dags/ + +# Start Airflow +cd /home/sam/development/airflow +docker-compose up -d + +# Wait and verify +sleep 60 +curl http://localhost:8081/health +``` + +### Update Chat Gateway + +```bash +# Replace backend main.py with new version +# (Copy the main.py content from this file to: /home/sam/development/aboutme_chat_demo/backend/main.py) + +# Update docker-compose to include langgraph dependency +cd /home/sam/development/aboutme_chat_demo +docker-compose up -d --build + +# Verify +curl http://localhost:8000/health +``` + +### Test Gitea Scraper Locally + +```bash +# Set environment variables +export GITEA_URL=https://gitea.lab.audasmedia.com.au +export GITEA_TOKEN=your_token_here +export GITEA_USERNAME=sam + +# Run test +cd /home/sam/development/knowledge_service +python gitea_scraper.py +``` + +### Trigger Airflow DAG Manually + +```bash +# Trigger the DAG +curl -X POST http://localhost:8081/api/v1/dags/gitea_daily_ingestion/dagRuns \ + -H "Content-Type: application/json" \ + -u admin:admin \ + -d '{"conf": {}}' +``` + +### Complete Stack Startup + +```bash +# Ensure network exists +docker network create ai-mesh 2>/dev/null || true + +# Start all services in order +cd /home/sam/development/knowledge_service && docker-compose up -d +cd /home/sam/development/langgraph_service && docker-compose up -d +cd /home/sam/development/aboutme_chat_demo && docker-compose up -d +cd /home/sam/development/airflow && docker-compose up -d + +# Verify all services +echo "Testing services..." +sleep 30 +curl -s http://localhost:8000/health && echo "✓ Chat Gateway" +curl -s http://localhost:8080/health && echo "✓ Knowledge Service" +curl -s http://localhost:8090/health && echo "✓ LangGraph Service" +curl -s http://localhost:8081/health && echo "✓ Airflow" +``` + +### Test End-to-End + +```bash +# Test query through LangGraph +curl -X POST http://localhost:8000/chat \ + -H "Content-Type: application/json" \ + -d '{"message": "What are Sam'"'"'s coding projects?"}' + +# Test agent routing +curl -X POST http://localhost:8000/chat \ + -H "Content-Type: application/json" \ + -d '{"message": "Write a Python function to calculate fibonacci"}' +``` + +--- + +## Architecture Summary + +``` + User Query + | + v + ┌───────────────────────┐ + │ Chat Gateway │ Port 8000 + │ (FastAPI) │ Routes to LangGraph + └───────────┬───────────┘ + | + v + ┌───────────────────────┐ + │ LangGraph Supervisor │ Port 8090 + │ (Agent Router) │ - Decides agent + └───────────┬───────────┘ + | + ┌───────────────┼───────────────┐ + | | | + v v v + ┌─────────┐ ┌──────────┐ ┌──────────┐ + │Librarian│ │ Opencode │ │ Brain │ + │ (RAG) │ │ (Coding) │ │ (LLM) │ + └────┬────┘ └──────────┘ └──────────┘ + | + v +┌─────────────────┐ ┌─────────────────┐ +│ Knowledge │◄────│ Apache Airflow │ +│ Service │ │ (Scheduler) │ +│ (ChromaDB) │ └────────┬────────┘ +│ Port 8080 │ | +└─────────────────┘ v + ┌──────────────┐ + │ Gitea API │ + │ Scraper │ + │ (Daily DAG) │ + └──────────────┘ +``` + +--- + +## Key Improvements + +1. **Modular Docker Compose**: Each service has its own file +2. **Self-contained packages**: LangGraph has its own venv, no sharing +3. **Fast rebuilds**: /app/packages pattern for all services +4. **No hardcoded keywords**: LangGraph intelligently routes based on context +5. **Scalable**: Each service can be updated independently +6. **Scheduled ingestion**: Airflow runs daily Gitea scrapes + +All code is modular, self-contained, and ready for copy-paste implementation. diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..9ac5c8c --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,41 @@ +services: + db: + image: postgres:15-alpine + environment: + POSTGRES_USER: sam + POSTGRES_PASSWORD: sam4jo + POSTGRES_DB: chat_demo + ports: + - "5432:5432" + volumes: + - postgres_data:/var/lib/postgresql/data + networks: + - ai-mesh + backend: + build: ./backend + ports: + - "8000:8000" + environment: + DATABASE_URL: postgresql://sam:sam4jo@db:5432/chat_demo + volumes: + - ./backend:/app + depends_on: + - db + networks: + - ai-mesh + frontend: + build: ./frontend + ports: + - "5173:5173" + volumes: + - ./frontend:/app + - /app/node_modules + environment: + - CHOKIDAR_USEPOLLING=true + networks: + - ai-mesh +volumes: + postgres_data: +networks: + ai-mesh: + external: true diff --git a/frontend/.gitignore b/frontend/.gitignore new file mode 100644 index 0000000..a547bf3 --- /dev/null +++ b/frontend/.gitignore @@ -0,0 +1,24 @@ +# Logs +logs +*.log +npm-debug.log* +yarn-debug.log* +yarn-error.log* +pnpm-debug.log* +lerna-debug.log* + +node_modules +dist +dist-ssr +*.local + +# Editor directories and files +.vscode/* +!.vscode/extensions.json +.idea +.DS_Store +*.suo +*.ntvs* +*.njsproj +*.sln +*.sw? diff --git a/frontend/Dockerfile b/frontend/Dockerfile new file mode 100644 index 0000000..1a98403 --- /dev/null +++ b/frontend/Dockerfile @@ -0,0 +1,7 @@ +FROM node:20-alpine +WORKDIR /app +COPY package.json pnpm-lock.yaml ./ +RUN npm install -g pnpm && pnpm install +COPY . . +CMD ["pnpm", "run", "dev", "--host", "0.0.0.0"] + diff --git a/frontend/README.md b/frontend/README.md new file mode 100644 index 0000000..d2e7761 --- /dev/null +++ b/frontend/README.md @@ -0,0 +1,73 @@ +# React + TypeScript + Vite + +This template provides a minimal setup to get React working in Vite with HMR and some ESLint rules. + +Currently, two official plugins are available: + +- [@vitejs/plugin-react](https://github.com/vitejs/vite-plugin-react/blob/main/packages/plugin-react) uses [Babel](https://babeljs.io/) (or [oxc](https://oxc.rs) when used in [rolldown-vite](https://vite.dev/guide/rolldown)) for Fast Refresh +- [@vitejs/plugin-react-swc](https://github.com/vitejs/vite-plugin-react/blob/main/packages/plugin-react-swc) uses [SWC](https://swc.rs/) for Fast Refresh + +## React Compiler + +The React Compiler is not enabled on this template because of its impact on dev & build performances. To add it, see [this documentation](https://react.dev/learn/react-compiler/installation). + +## Expanding the ESLint configuration + +If you are developing a production application, we recommend updating the configuration to enable type-aware lint rules: + +```js +export default defineConfig([ + globalIgnores(['dist']), + { + files: ['**/*.{ts,tsx}'], + extends: [ + // Other configs... + + // Remove tseslint.configs.recommended and replace with this + tseslint.configs.recommendedTypeChecked, + // Alternatively, use this for stricter rules + tseslint.configs.strictTypeChecked, + // Optionally, add this for stylistic rules + tseslint.configs.stylisticTypeChecked, + + // Other configs... + ], + languageOptions: { + parserOptions: { + project: ['./tsconfig.node.json', './tsconfig.app.json'], + tsconfigRootDir: import.meta.dirname, + }, + // other options... + }, + }, +]) +``` + +You can also install [eslint-plugin-react-x](https://github.com/Rel1cx/eslint-react/tree/main/packages/plugins/eslint-plugin-react-x) and [eslint-plugin-react-dom](https://github.com/Rel1cx/eslint-react/tree/main/packages/plugins/eslint-plugin-react-dom) for React-specific lint rules: + +```js +// eslint.config.js +import reactX from 'eslint-plugin-react-x' +import reactDom from 'eslint-plugin-react-dom' + +export default defineConfig([ + globalIgnores(['dist']), + { + files: ['**/*.{ts,tsx}'], + extends: [ + // Other configs... + // Enable lint rules for React + reactX.configs['recommended-typescript'], + // Enable lint rules for React DOM + reactDom.configs.recommended, + ], + languageOptions: { + parserOptions: { + project: ['./tsconfig.node.json', './tsconfig.app.json'], + tsconfigRootDir: import.meta.dirname, + }, + // other options... + }, + }, +]) +``` diff --git a/frontend/eslint.config.js b/frontend/eslint.config.js new file mode 100644 index 0000000..5e6b472 --- /dev/null +++ b/frontend/eslint.config.js @@ -0,0 +1,23 @@ +import js from '@eslint/js' +import globals from 'globals' +import reactHooks from 'eslint-plugin-react-hooks' +import reactRefresh from 'eslint-plugin-react-refresh' +import tseslint from 'typescript-eslint' +import { defineConfig, globalIgnores } from 'eslint/config' + +export default defineConfig([ + globalIgnores(['dist']), + { + files: ['**/*.{ts,tsx}'], + extends: [ + js.configs.recommended, + tseslint.configs.recommended, + reactHooks.configs.flat.recommended, + reactRefresh.configs.vite, + ], + languageOptions: { + ecmaVersion: 2020, + globals: globals.browser, + }, + }, +]) diff --git a/frontend/index.html b/frontend/index.html new file mode 100644 index 0000000..072a57e --- /dev/null +++ b/frontend/index.html @@ -0,0 +1,13 @@ + + + + + + + frontend + + +
+ + + diff --git a/frontend/package.json b/frontend/package.json new file mode 100644 index 0000000..e578d81 --- /dev/null +++ b/frontend/package.json @@ -0,0 +1,36 @@ +{ + "name": "frontend", + "private": true, + "version": "0.0.0", + "type": "module", + "scripts": { + "dev": "vite", + "build": "tsc -b && vite build", + "lint": "eslint .", + "preview": "vite preview" + }, + "dependencies": { + "@tanstack/react-query": "^5.90.21", + "axios": "^1.13.5", + "react": "^19.2.0", + "react-dom": "^19.2.0" + }, + "devDependencies": { + "@eslint/js": "^9.39.1", + "@tailwindcss/vite": "^4.2.0", + "@types/node": "^24.10.1", + "@types/react": "^19.2.7", + "@types/react-dom": "^19.2.3", + "@vitejs/plugin-react": "^5.1.1", + "autoprefixer": "^10.4.24", + "eslint": "^9.39.1", + "eslint-plugin-react-hooks": "^7.0.1", + "eslint-plugin-react-refresh": "^0.4.24", + "globals": "^16.5.0", + "postcss": "^8.5.6", + "tailwindcss": "^4.2.0", + "typescript": "~5.9.3", + "typescript-eslint": "^8.48.0", + "vite": "^7.3.1" + } +} diff --git a/frontend/pnpm-lock.yaml b/frontend/pnpm-lock.yaml new file mode 100644 index 0000000..4a193f2 --- /dev/null +++ b/frontend/pnpm-lock.yaml @@ -0,0 +1,2634 @@ +lockfileVersion: '9.0' + +settings: + autoInstallPeers: true + excludeLinksFromLockfile: false + +importers: + + .: + dependencies: + '@tanstack/react-query': + specifier: ^5.90.21 + version: 5.90.21(react@19.2.4) + axios: + specifier: ^1.13.5 + version: 1.13.5 + react: + specifier: ^19.2.0 + version: 19.2.4 + react-dom: + specifier: ^19.2.0 + version: 19.2.4(react@19.2.4) + devDependencies: + '@eslint/js': + specifier: ^9.39.1 + version: 9.39.3 + '@tailwindcss/vite': + specifier: ^4.2.0 + version: 4.2.0(vite@7.3.1(@types/node@24.10.13)(jiti@2.6.1)(lightningcss@1.31.1)) + '@types/node': + specifier: ^24.10.1 + version: 24.10.13 + '@types/react': + specifier: ^19.2.7 + version: 19.2.14 + '@types/react-dom': + specifier: ^19.2.3 + version: 19.2.3(@types/react@19.2.14) + '@vitejs/plugin-react': + specifier: ^5.1.1 + version: 5.1.4(vite@7.3.1(@types/node@24.10.13)(jiti@2.6.1)(lightningcss@1.31.1)) + autoprefixer: + specifier: ^10.4.24 + version: 10.4.24(postcss@8.5.6) + eslint: + specifier: ^9.39.1 + version: 9.39.3(jiti@2.6.1) + eslint-plugin-react-hooks: + specifier: ^7.0.1 + version: 7.0.1(eslint@9.39.3(jiti@2.6.1)) + eslint-plugin-react-refresh: + specifier: ^0.4.24 + version: 0.4.26(eslint@9.39.3(jiti@2.6.1)) + globals: + specifier: ^16.5.0 + version: 16.5.0 + postcss: + specifier: ^8.5.6 + version: 8.5.6 + tailwindcss: + specifier: ^4.2.0 + version: 4.2.0 + typescript: + specifier: ~5.9.3 + version: 5.9.3 + typescript-eslint: + specifier: ^8.48.0 + version: 8.56.0(eslint@9.39.3(jiti@2.6.1))(typescript@5.9.3) + vite: + specifier: ^7.3.1 + version: 7.3.1(@types/node@24.10.13)(jiti@2.6.1)(lightningcss@1.31.1) + +packages: + + '@babel/code-frame@7.29.0': + resolution: {integrity: sha512-9NhCeYjq9+3uxgdtp20LSiJXJvN0FeCtNGpJxuMFZ1Kv3cWUNb6DOhJwUvcVCzKGR66cw4njwM6hrJLqgOwbcw==} + engines: {node: '>=6.9.0'} + + '@babel/compat-data@7.29.0': + resolution: {integrity: sha512-T1NCJqT/j9+cn8fvkt7jtwbLBfLC/1y1c7NtCeXFRgzGTsafi68MRv8yzkYSapBnFA6L3U2VSc02ciDzoAJhJg==} + engines: {node: '>=6.9.0'} + + '@babel/core@7.29.0': + resolution: {integrity: sha512-CGOfOJqWjg2qW/Mb6zNsDm+u5vFQ8DxXfbM09z69p5Z6+mE1ikP2jUXw+j42Pf1XTYED2Rni5f95npYeuwMDQA==} + engines: {node: '>=6.9.0'} + + '@babel/generator@7.29.1': + resolution: {integrity: sha512-qsaF+9Qcm2Qv8SRIMMscAvG4O3lJ0F1GuMo5HR/Bp02LopNgnZBC/EkbevHFeGs4ls/oPz9v+Bsmzbkbe+0dUw==} + engines: {node: '>=6.9.0'} + + '@babel/helper-compilation-targets@7.28.6': + resolution: {integrity: sha512-JYtls3hqi15fcx5GaSNL7SCTJ2MNmjrkHXg4FSpOA/grxK8KwyZ5bubHsCq8FXCkua6xhuaaBit+3b7+VZRfcA==} + engines: {node: '>=6.9.0'} + + '@babel/helper-globals@7.28.0': + resolution: {integrity: sha512-+W6cISkXFa1jXsDEdYA8HeevQT/FULhxzR99pxphltZcVaugps53THCeiWA8SguxxpSp3gKPiuYfSWopkLQ4hw==} + engines: {node: '>=6.9.0'} + + '@babel/helper-module-imports@7.28.6': + resolution: {integrity: sha512-l5XkZK7r7wa9LucGw9LwZyyCUscb4x37JWTPz7swwFE/0FMQAGpiWUZn8u9DzkSBWEcK25jmvubfpw2dnAMdbw==} + engines: {node: '>=6.9.0'} + + '@babel/helper-module-transforms@7.28.6': + resolution: {integrity: sha512-67oXFAYr2cDLDVGLXTEABjdBJZ6drElUSI7WKp70NrpyISso3plG9SAGEF6y7zbha/wOzUByWWTJvEDVNIUGcA==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0 + + '@babel/helper-plugin-utils@7.28.6': + resolution: {integrity: sha512-S9gzZ/bz83GRysI7gAD4wPT/AI3uCnY+9xn+Mx/KPs2JwHJIz1W8PZkg2cqyt3RNOBM8ejcXhV6y8Og7ly/Dug==} + engines: {node: '>=6.9.0'} + + '@babel/helper-string-parser@7.27.1': + resolution: {integrity: sha512-qMlSxKbpRlAridDExk92nSobyDdpPijUq2DW6oDnUqd0iOGxmQjyqhMIihI9+zv4LPyZdRje2cavWPbCbWm3eA==} + engines: {node: '>=6.9.0'} + + '@babel/helper-validator-identifier@7.28.5': + resolution: {integrity: sha512-qSs4ifwzKJSV39ucNjsvc6WVHs6b7S03sOh2OcHF9UHfVPqWWALUsNUVzhSBiItjRZoLHx7nIarVjqKVusUZ1Q==} + engines: {node: '>=6.9.0'} + + '@babel/helper-validator-option@7.27.1': + resolution: {integrity: sha512-YvjJow9FxbhFFKDSuFnVCe2WxXk1zWc22fFePVNEaWJEu8IrZVlda6N0uHwzZrUM1il7NC9Mlp4MaJYbYd9JSg==} + engines: {node: '>=6.9.0'} + + '@babel/helpers@7.28.6': + resolution: {integrity: sha512-xOBvwq86HHdB7WUDTfKfT/Vuxh7gElQ+Sfti2Cy6yIWNW05P8iUslOVcZ4/sKbE+/jQaukQAdz/gf3724kYdqw==} + engines: {node: '>=6.9.0'} + + '@babel/parser@7.29.0': + resolution: {integrity: sha512-IyDgFV5GeDUVX4YdF/3CPULtVGSXXMLh1xVIgdCgxApktqnQV0r7/8Nqthg+8YLGaAtdyIlo2qIdZrbCv4+7ww==} + engines: {node: '>=6.0.0'} + hasBin: true + + '@babel/plugin-transform-react-jsx-self@7.27.1': + resolution: {integrity: sha512-6UzkCs+ejGdZ5mFFC/OCUrv028ab2fp1znZmCZjAOBKiBK2jXD1O+BPSfX8X2qjJ75fZBMSnQn3Rq2mrBJK2mw==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-transform-react-jsx-source@7.27.1': + resolution: {integrity: sha512-zbwoTsBruTeKB9hSq73ha66iFeJHuaFkUbwvqElnygoNbj/jHRsSeokowZFN3CZ64IvEqcmmkVe89OPXc7ldAw==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/template@7.28.6': + resolution: {integrity: sha512-YA6Ma2KsCdGb+WC6UpBVFJGXL58MDA6oyONbjyF/+5sBgxY/dwkhLogbMT2GXXyU84/IhRw/2D1Os1B/giz+BQ==} + engines: {node: '>=6.9.0'} + + '@babel/traverse@7.29.0': + resolution: {integrity: sha512-4HPiQr0X7+waHfyXPZpWPfWL/J7dcN1mx9gL6WdQVMbPnF3+ZhSMs8tCxN7oHddJE9fhNE7+lxdnlyemKfJRuA==} + engines: {node: '>=6.9.0'} + + '@babel/types@7.29.0': + resolution: {integrity: sha512-LwdZHpScM4Qz8Xw2iKSzS+cfglZzJGvofQICy7W7v4caru4EaAmyUuO6BGrbyQ2mYV11W0U8j5mBhd14dd3B0A==} + engines: {node: '>=6.9.0'} + + '@esbuild/aix-ppc64@0.27.3': + resolution: {integrity: sha512-9fJMTNFTWZMh5qwrBItuziu834eOCUcEqymSH7pY+zoMVEZg3gcPuBNxH1EvfVYe9h0x/Ptw8KBzv7qxb7l8dg==} + engines: {node: '>=18'} + cpu: [ppc64] + os: [aix] + + '@esbuild/android-arm64@0.27.3': + resolution: {integrity: sha512-YdghPYUmj/FX2SYKJ0OZxf+iaKgMsKHVPF1MAq/P8WirnSpCStzKJFjOjzsW0QQ7oIAiccHdcqjbHmJxRb/dmg==} + engines: {node: '>=18'} + cpu: [arm64] + os: [android] + + '@esbuild/android-arm@0.27.3': + resolution: {integrity: sha512-i5D1hPY7GIQmXlXhs2w8AWHhenb00+GxjxRncS2ZM7YNVGNfaMxgzSGuO8o8SJzRc/oZwU2bcScvVERk03QhzA==} + engines: {node: '>=18'} + cpu: [arm] + os: [android] + + '@esbuild/android-x64@0.27.3': + resolution: {integrity: sha512-IN/0BNTkHtk8lkOM8JWAYFg4ORxBkZQf9zXiEOfERX/CzxW3Vg1ewAhU7QSWQpVIzTW+b8Xy+lGzdYXV6UZObQ==} + engines: {node: '>=18'} + cpu: [x64] + os: [android] + + '@esbuild/darwin-arm64@0.27.3': + resolution: {integrity: sha512-Re491k7ByTVRy0t3EKWajdLIr0gz2kKKfzafkth4Q8A5n1xTHrkqZgLLjFEHVD+AXdUGgQMq+Godfq45mGpCKg==} + engines: {node: '>=18'} + cpu: [arm64] + os: [darwin] + + '@esbuild/darwin-x64@0.27.3': + resolution: {integrity: sha512-vHk/hA7/1AckjGzRqi6wbo+jaShzRowYip6rt6q7VYEDX4LEy1pZfDpdxCBnGtl+A5zq8iXDcyuxwtv3hNtHFg==} + engines: {node: '>=18'} + cpu: [x64] + os: [darwin] + + '@esbuild/freebsd-arm64@0.27.3': + resolution: {integrity: sha512-ipTYM2fjt3kQAYOvo6vcxJx3nBYAzPjgTCk7QEgZG8AUO3ydUhvelmhrbOheMnGOlaSFUoHXB6un+A7q4ygY9w==} + engines: {node: '>=18'} + cpu: [arm64] + os: [freebsd] + + '@esbuild/freebsd-x64@0.27.3': + resolution: {integrity: sha512-dDk0X87T7mI6U3K9VjWtHOXqwAMJBNN2r7bejDsc+j03SEjtD9HrOl8gVFByeM0aJksoUuUVU9TBaZa2rgj0oA==} + engines: {node: '>=18'} + cpu: [x64] + os: [freebsd] + + '@esbuild/linux-arm64@0.27.3': + resolution: {integrity: sha512-sZOuFz/xWnZ4KH3YfFrKCf1WyPZHakVzTiqji3WDc0BCl2kBwiJLCXpzLzUBLgmp4veFZdvN5ChW4Eq/8Fc2Fg==} + engines: {node: '>=18'} + cpu: [arm64] + os: [linux] + + '@esbuild/linux-arm@0.27.3': + resolution: {integrity: sha512-s6nPv2QkSupJwLYyfS+gwdirm0ukyTFNl3KTgZEAiJDd+iHZcbTPPcWCcRYH+WlNbwChgH2QkE9NSlNrMT8Gfw==} + engines: {node: '>=18'} + cpu: [arm] + os: [linux] + + '@esbuild/linux-ia32@0.27.3': + resolution: {integrity: sha512-yGlQYjdxtLdh0a3jHjuwOrxQjOZYD/C9PfdbgJJF3TIZWnm/tMd/RcNiLngiu4iwcBAOezdnSLAwQDPqTmtTYg==} + engines: {node: '>=18'} + cpu: [ia32] + os: [linux] + + '@esbuild/linux-loong64@0.27.3': + resolution: {integrity: sha512-WO60Sn8ly3gtzhyjATDgieJNet/KqsDlX5nRC5Y3oTFcS1l0KWba+SEa9Ja1GfDqSF1z6hif/SkpQJbL63cgOA==} + engines: {node: '>=18'} + cpu: [loong64] + os: [linux] + + '@esbuild/linux-mips64el@0.27.3': + resolution: {integrity: sha512-APsymYA6sGcZ4pD6k+UxbDjOFSvPWyZhjaiPyl/f79xKxwTnrn5QUnXR5prvetuaSMsb4jgeHewIDCIWljrSxw==} + engines: {node: '>=18'} + cpu: [mips64el] + os: [linux] + + '@esbuild/linux-ppc64@0.27.3': + resolution: {integrity: sha512-eizBnTeBefojtDb9nSh4vvVQ3V9Qf9Df01PfawPcRzJH4gFSgrObw+LveUyDoKU3kxi5+9RJTCWlj4FjYXVPEA==} + engines: {node: '>=18'} + cpu: [ppc64] + os: [linux] + + '@esbuild/linux-riscv64@0.27.3': + resolution: {integrity: sha512-3Emwh0r5wmfm3ssTWRQSyVhbOHvqegUDRd0WhmXKX2mkHJe1SFCMJhagUleMq+Uci34wLSipf8Lagt4LlpRFWQ==} + engines: {node: '>=18'} + cpu: [riscv64] + os: [linux] + + '@esbuild/linux-s390x@0.27.3': + resolution: {integrity: sha512-pBHUx9LzXWBc7MFIEEL0yD/ZVtNgLytvx60gES28GcWMqil8ElCYR4kvbV2BDqsHOvVDRrOxGySBM9Fcv744hw==} + engines: {node: '>=18'} + cpu: [s390x] + os: [linux] + + '@esbuild/linux-x64@0.27.3': + resolution: {integrity: sha512-Czi8yzXUWIQYAtL/2y6vogER8pvcsOsk5cpwL4Gk5nJqH5UZiVByIY8Eorm5R13gq+DQKYg0+JyQoytLQas4dA==} + engines: {node: '>=18'} + cpu: [x64] + os: [linux] + + '@esbuild/netbsd-arm64@0.27.3': + resolution: {integrity: sha512-sDpk0RgmTCR/5HguIZa9n9u+HVKf40fbEUt+iTzSnCaGvY9kFP0YKBWZtJaraonFnqef5SlJ8/TiPAxzyS+UoA==} + engines: {node: '>=18'} + cpu: [arm64] + os: [netbsd] + + '@esbuild/netbsd-x64@0.27.3': + resolution: {integrity: sha512-P14lFKJl/DdaE00LItAukUdZO5iqNH7+PjoBm+fLQjtxfcfFE20Xf5CrLsmZdq5LFFZzb5JMZ9grUwvtVYzjiA==} + engines: {node: '>=18'} + cpu: [x64] + os: [netbsd] + + '@esbuild/openbsd-arm64@0.27.3': + resolution: {integrity: sha512-AIcMP77AvirGbRl/UZFTq5hjXK+2wC7qFRGoHSDrZ5v5b8DK/GYpXW3CPRL53NkvDqb9D+alBiC/dV0Fb7eJcw==} + engines: {node: '>=18'} + cpu: [arm64] + os: [openbsd] + + '@esbuild/openbsd-x64@0.27.3': + resolution: {integrity: sha512-DnW2sRrBzA+YnE70LKqnM3P+z8vehfJWHXECbwBmH/CU51z6FiqTQTHFenPlHmo3a8UgpLyH3PT+87OViOh1AQ==} + engines: {node: '>=18'} + cpu: [x64] + os: [openbsd] + + '@esbuild/openharmony-arm64@0.27.3': + resolution: {integrity: sha512-NinAEgr/etERPTsZJ7aEZQvvg/A6IsZG/LgZy+81wON2huV7SrK3e63dU0XhyZP4RKGyTm7aOgmQk0bGp0fy2g==} + engines: {node: '>=18'} + cpu: [arm64] + os: [openharmony] + + '@esbuild/sunos-x64@0.27.3': + resolution: {integrity: sha512-PanZ+nEz+eWoBJ8/f8HKxTTD172SKwdXebZ0ndd953gt1HRBbhMsaNqjTyYLGLPdoWHy4zLU7bDVJztF5f3BHA==} + engines: {node: '>=18'} + cpu: [x64] + os: [sunos] + + '@esbuild/win32-arm64@0.27.3': + resolution: {integrity: sha512-B2t59lWWYrbRDw/tjiWOuzSsFh1Y/E95ofKz7rIVYSQkUYBjfSgf6oeYPNWHToFRr2zx52JKApIcAS/D5TUBnA==} + engines: {node: '>=18'} + cpu: [arm64] + os: [win32] + + '@esbuild/win32-ia32@0.27.3': + resolution: {integrity: sha512-QLKSFeXNS8+tHW7tZpMtjlNb7HKau0QDpwm49u0vUp9y1WOF+PEzkU84y9GqYaAVW8aH8f3GcBck26jh54cX4Q==} + engines: {node: '>=18'} + cpu: [ia32] + os: [win32] + + '@esbuild/win32-x64@0.27.3': + resolution: {integrity: sha512-4uJGhsxuptu3OcpVAzli+/gWusVGwZZHTlS63hh++ehExkVT8SgiEf7/uC/PclrPPkLhZqGgCTjd0VWLo6xMqA==} + engines: {node: '>=18'} + cpu: [x64] + os: [win32] + + '@eslint-community/eslint-utils@4.9.1': + resolution: {integrity: sha512-phrYmNiYppR7znFEdqgfWHXR6NCkZEK7hwWDHZUjit/2/U0r6XvkDl0SYnoM51Hq7FhCGdLDT6zxCCOY1hexsQ==} + engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} + peerDependencies: + eslint: ^6.0.0 || ^7.0.0 || >=8.0.0 + + '@eslint-community/regexpp@4.12.2': + resolution: {integrity: sha512-EriSTlt5OC9/7SXkRSCAhfSxxoSUgBm33OH+IkwbdpgoqsSsUg7y3uh+IICI/Qg4BBWr3U2i39RpmycbxMq4ew==} + engines: {node: ^12.0.0 || ^14.0.0 || >=16.0.0} + + '@eslint/config-array@0.21.1': + resolution: {integrity: sha512-aw1gNayWpdI/jSYVgzN5pL0cfzU02GT3NBpeT/DXbx1/1x7ZKxFPd9bwrzygx/qiwIQiJ1sw/zD8qY/kRvlGHA==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + + '@eslint/config-helpers@0.4.2': + resolution: {integrity: sha512-gBrxN88gOIf3R7ja5K9slwNayVcZgK6SOUORm2uBzTeIEfeVaIhOpCtTox3P6R7o2jLFwLFTLnC7kU/RGcYEgw==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + + '@eslint/core@0.17.0': + resolution: {integrity: sha512-yL/sLrpmtDaFEiUj1osRP4TI2MDz1AddJL+jZ7KSqvBuliN4xqYY54IfdN8qD8Toa6g1iloph1fxQNkjOxrrpQ==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + + '@eslint/eslintrc@3.3.3': + resolution: {integrity: sha512-Kr+LPIUVKz2qkx1HAMH8q1q6azbqBAsXJUxBl/ODDuVPX45Z9DfwB8tPjTi6nNZ8BuM3nbJxC5zCAg5elnBUTQ==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + + '@eslint/js@9.39.3': + resolution: {integrity: sha512-1B1VkCq6FuUNlQvlBYb+1jDu/gV297TIs/OeiaSR9l1H27SVW55ONE1e1Vp16NqP683+xEGzxYtv4XCiDPaQiw==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + + '@eslint/object-schema@2.1.7': + resolution: {integrity: sha512-VtAOaymWVfZcmZbp6E2mympDIHvyjXs/12LqWYjVw6qjrfF+VK+fyG33kChz3nnK+SU5/NeHOqrTEHS8sXO3OA==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + + '@eslint/plugin-kit@0.4.1': + resolution: {integrity: sha512-43/qtrDUokr7LJqoF2c3+RInu/t4zfrpYdoSDfYyhg52rwLV6TnOvdG4fXm7IkSB3wErkcmJS9iEhjVtOSEjjA==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + + '@humanfs/core@0.19.1': + resolution: {integrity: sha512-5DyQ4+1JEUzejeK1JGICcideyfUbGixgS9jNgex5nqkW+cY7WZhxBigmieN5Qnw9ZosSNVC9KQKyb+GUaGyKUA==} + engines: {node: '>=18.18.0'} + + '@humanfs/node@0.16.7': + resolution: {integrity: sha512-/zUx+yOsIrG4Y43Eh2peDeKCxlRt/gET6aHfaKpuq267qXdYDFViVHfMaLyygZOnl0kGWxFIgsBy8QFuTLUXEQ==} + engines: {node: '>=18.18.0'} + + '@humanwhocodes/module-importer@1.0.1': + resolution: {integrity: sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==} + engines: {node: '>=12.22'} + + '@humanwhocodes/retry@0.4.3': + resolution: {integrity: sha512-bV0Tgo9K4hfPCek+aMAn81RppFKv2ySDQeMoSZuvTASywNTnVJCArCZE2FWqpvIatKu7VMRLWlR1EazvVhDyhQ==} + engines: {node: '>=18.18'} + + '@jridgewell/gen-mapping@0.3.13': + resolution: {integrity: sha512-2kkt/7niJ6MgEPxF0bYdQ6etZaA+fQvDcLKckhy1yIQOzaoKjBBjSj63/aLVjYE3qhRt5dvM+uUyfCg6UKCBbA==} + + '@jridgewell/remapping@2.3.5': + resolution: {integrity: sha512-LI9u/+laYG4Ds1TDKSJW2YPrIlcVYOwi2fUC6xB43lueCjgxV4lffOCZCtYFiH6TNOX+tQKXx97T4IKHbhyHEQ==} + + '@jridgewell/resolve-uri@3.1.2': + resolution: {integrity: sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==} + engines: {node: '>=6.0.0'} + + '@jridgewell/sourcemap-codec@1.5.5': + resolution: {integrity: sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og==} + + '@jridgewell/trace-mapping@0.3.31': + resolution: {integrity: sha512-zzNR+SdQSDJzc8joaeP8QQoCQr8NuYx2dIIytl1QeBEZHJ9uW6hebsrYgbz8hJwUQao3TWCMtmfV8Nu1twOLAw==} + + '@rolldown/pluginutils@1.0.0-rc.3': + resolution: {integrity: sha512-eybk3TjzzzV97Dlj5c+XrBFW57eTNhzod66y9HrBlzJ6NsCrWCp/2kaPS3K9wJmurBC0Tdw4yPjXKZqlznim3Q==} + + '@rollup/rollup-android-arm-eabi@4.59.0': + resolution: {integrity: sha512-upnNBkA6ZH2VKGcBj9Fyl9IGNPULcjXRlg0LLeaioQWueH30p6IXtJEbKAgvyv+mJaMxSm1l6xwDXYjpEMiLMg==} + cpu: [arm] + os: [android] + + '@rollup/rollup-android-arm64@4.59.0': + resolution: {integrity: sha512-hZ+Zxj3SySm4A/DylsDKZAeVg0mvi++0PYVceVyX7hemkw7OreKdCvW2oQ3T1FMZvCaQXqOTHb8qmBShoqk69Q==} + cpu: [arm64] + os: [android] + + '@rollup/rollup-darwin-arm64@4.59.0': + resolution: {integrity: sha512-W2Psnbh1J8ZJw0xKAd8zdNgF9HRLkdWwwdWqubSVk0pUuQkoHnv7rx4GiF9rT4t5DIZGAsConRE3AxCdJ4m8rg==} + cpu: [arm64] + os: [darwin] + + '@rollup/rollup-darwin-x64@4.59.0': + resolution: {integrity: sha512-ZW2KkwlS4lwTv7ZVsYDiARfFCnSGhzYPdiOU4IM2fDbL+QGlyAbjgSFuqNRbSthybLbIJ915UtZBtmuLrQAT/w==} + cpu: [x64] + os: [darwin] + + '@rollup/rollup-freebsd-arm64@4.59.0': + resolution: {integrity: sha512-EsKaJ5ytAu9jI3lonzn3BgG8iRBjV4LxZexygcQbpiU0wU0ATxhNVEpXKfUa0pS05gTcSDMKpn3Sx+QB9RlTTA==} + cpu: [arm64] + os: [freebsd] + + '@rollup/rollup-freebsd-x64@4.59.0': + resolution: {integrity: sha512-d3DuZi2KzTMjImrxoHIAODUZYoUUMsuUiY4SRRcJy6NJoZ6iIqWnJu9IScV9jXysyGMVuW+KNzZvBLOcpdl3Vg==} + cpu: [x64] + os: [freebsd] + + '@rollup/rollup-linux-arm-gnueabihf@4.59.0': + resolution: {integrity: sha512-t4ONHboXi/3E0rT6OZl1pKbl2Vgxf9vJfWgmUoCEVQVxhW6Cw/c8I6hbbu7DAvgp82RKiH7TpLwxnJeKv2pbsw==} + cpu: [arm] + os: [linux] + + '@rollup/rollup-linux-arm-musleabihf@4.59.0': + resolution: {integrity: sha512-CikFT7aYPA2ufMD086cVORBYGHffBo4K8MQ4uPS/ZnY54GKj36i196u8U+aDVT2LX4eSMbyHtyOh7D7Zvk2VvA==} + cpu: [arm] + os: [linux] + + '@rollup/rollup-linux-arm64-gnu@4.59.0': + resolution: {integrity: sha512-jYgUGk5aLd1nUb1CtQ8E+t5JhLc9x5WdBKew9ZgAXg7DBk0ZHErLHdXM24rfX+bKrFe+Xp5YuJo54I5HFjGDAA==} + cpu: [arm64] + os: [linux] + + '@rollup/rollup-linux-arm64-musl@4.59.0': + resolution: {integrity: sha512-peZRVEdnFWZ5Bh2KeumKG9ty7aCXzzEsHShOZEFiCQlDEepP1dpUl/SrUNXNg13UmZl+gzVDPsiCwnV1uI0RUA==} + cpu: [arm64] + os: [linux] + + '@rollup/rollup-linux-loong64-gnu@4.59.0': + resolution: {integrity: sha512-gbUSW/97f7+r4gHy3Jlup8zDG190AuodsWnNiXErp9mT90iCy9NKKU0Xwx5k8VlRAIV2uU9CsMnEFg/xXaOfXg==} + cpu: [loong64] + os: [linux] + + '@rollup/rollup-linux-loong64-musl@4.59.0': + resolution: {integrity: sha512-yTRONe79E+o0FWFijasoTjtzG9EBedFXJMl888NBEDCDV9I2wGbFFfJQQe63OijbFCUZqxpHz1GzpbtSFikJ4Q==} + cpu: [loong64] + os: [linux] + + '@rollup/rollup-linux-ppc64-gnu@4.59.0': + resolution: {integrity: sha512-sw1o3tfyk12k3OEpRddF68a1unZ5VCN7zoTNtSn2KndUE+ea3m3ROOKRCZxEpmT9nsGnogpFP9x6mnLTCaoLkA==} + cpu: [ppc64] + os: [linux] + + '@rollup/rollup-linux-ppc64-musl@4.59.0': + resolution: {integrity: sha512-+2kLtQ4xT3AiIxkzFVFXfsmlZiG5FXYW7ZyIIvGA7Bdeuh9Z0aN4hVyXS/G1E9bTP/vqszNIN/pUKCk/BTHsKA==} + cpu: [ppc64] + os: [linux] + + '@rollup/rollup-linux-riscv64-gnu@4.59.0': + resolution: {integrity: sha512-NDYMpsXYJJaj+I7UdwIuHHNxXZ/b/N2hR15NyH3m2qAtb/hHPA4g4SuuvrdxetTdndfj9b1WOmy73kcPRoERUg==} + cpu: [riscv64] + os: [linux] + + '@rollup/rollup-linux-riscv64-musl@4.59.0': + resolution: {integrity: sha512-nLckB8WOqHIf1bhymk+oHxvM9D3tyPndZH8i8+35p/1YiVoVswPid2yLzgX7ZJP0KQvnkhM4H6QZ5m0LzbyIAg==} + cpu: [riscv64] + os: [linux] + + '@rollup/rollup-linux-s390x-gnu@4.59.0': + resolution: {integrity: sha512-oF87Ie3uAIvORFBpwnCvUzdeYUqi2wY6jRFWJAy1qus/udHFYIkplYRW+wo+GRUP4sKzYdmE1Y3+rY5Gc4ZO+w==} + cpu: [s390x] + os: [linux] + + '@rollup/rollup-linux-x64-gnu@4.59.0': + resolution: {integrity: sha512-3AHmtQq/ppNuUspKAlvA8HtLybkDflkMuLK4DPo77DfthRb71V84/c4MlWJXixZz4uruIH4uaa07IqoAkG64fg==} + cpu: [x64] + os: [linux] + + '@rollup/rollup-linux-x64-musl@4.59.0': + resolution: {integrity: sha512-2UdiwS/9cTAx7qIUZB/fWtToJwvt0Vbo0zmnYt7ED35KPg13Q0ym1g442THLC7VyI6JfYTP4PiSOWyoMdV2/xg==} + cpu: [x64] + os: [linux] + + '@rollup/rollup-openbsd-x64@4.59.0': + resolution: {integrity: sha512-M3bLRAVk6GOwFlPTIxVBSYKUaqfLrn8l0psKinkCFxl4lQvOSz8ZrKDz2gxcBwHFpci0B6rttydI4IpS4IS/jQ==} + cpu: [x64] + os: [openbsd] + + '@rollup/rollup-openharmony-arm64@4.59.0': + resolution: {integrity: sha512-tt9KBJqaqp5i5HUZzoafHZX8b5Q2Fe7UjYERADll83O4fGqJ49O1FsL6LpdzVFQcpwvnyd0i+K/VSwu/o/nWlA==} + cpu: [arm64] + os: [openharmony] + + '@rollup/rollup-win32-arm64-msvc@4.59.0': + resolution: {integrity: sha512-V5B6mG7OrGTwnxaNUzZTDTjDS7F75PO1ae6MJYdiMu60sq0CqN5CVeVsbhPxalupvTX8gXVSU9gq+Rx1/hvu6A==} + cpu: [arm64] + os: [win32] + + '@rollup/rollup-win32-ia32-msvc@4.59.0': + resolution: {integrity: sha512-UKFMHPuM9R0iBegwzKF4y0C4J9u8C6MEJgFuXTBerMk7EJ92GFVFYBfOZaSGLu6COf7FxpQNqhNS4c4icUPqxA==} + cpu: [ia32] + os: [win32] + + '@rollup/rollup-win32-x64-gnu@4.59.0': + resolution: {integrity: sha512-laBkYlSS1n2L8fSo1thDNGrCTQMmxjYY5G0WFWjFFYZkKPjsMBsgJfGf4TLxXrF6RyhI60L8TMOjBMvXiTcxeA==} + cpu: [x64] + os: [win32] + + '@rollup/rollup-win32-x64-msvc@4.59.0': + resolution: {integrity: sha512-2HRCml6OztYXyJXAvdDXPKcawukWY2GpR5/nxKp4iBgiO3wcoEGkAaqctIbZcNB6KlUQBIqt8VYkNSj2397EfA==} + cpu: [x64] + os: [win32] + + '@tailwindcss/node@4.2.0': + resolution: {integrity: sha512-Yv+fn/o2OmL5fh/Ir62VXItdShnUxfpkMA4Y7jdeC8O81WPB8Kf6TT6GSHvnqgSwDzlB5iT7kDpeXxLsUS0T6Q==} + + '@tailwindcss/oxide-android-arm64@4.2.0': + resolution: {integrity: sha512-F0QkHAVaW/JNBWl4CEKWdZ9PMb0khw5DCELAOnu+RtjAfx5Zgw+gqCHFvqg3AirU1IAd181fwOtJQ5I8Yx5wtw==} + engines: {node: '>= 20'} + cpu: [arm64] + os: [android] + + '@tailwindcss/oxide-darwin-arm64@4.2.0': + resolution: {integrity: sha512-I0QylkXsBsJMZ4nkUNSR04p6+UptjcwhcVo3Zu828ikiEqHjVmQL9RuQ6uT/cVIiKpvtVA25msu/eRV97JeNSA==} + engines: {node: '>= 20'} + cpu: [arm64] + os: [darwin] + + '@tailwindcss/oxide-darwin-x64@4.2.0': + resolution: {integrity: sha512-6TmQIn4p09PBrmnkvbYQ0wbZhLtbaksCDx7Y7R3FYYx0yxNA7xg5KP7dowmQ3d2JVdabIHvs3Hx4K3d5uCf8xg==} + engines: {node: '>= 20'} + cpu: [x64] + os: [darwin] + + '@tailwindcss/oxide-freebsd-x64@4.2.0': + resolution: {integrity: sha512-qBudxDvAa2QwGlq9y7VIzhTvp2mLJ6nD/G8/tI70DCDoneaUeLWBJaPcbfzqRIWraj+o969aDQKvKW9dvkUizw==} + engines: {node: '>= 20'} + cpu: [x64] + os: [freebsd] + + '@tailwindcss/oxide-linux-arm-gnueabihf@4.2.0': + resolution: {integrity: sha512-7XKkitpy5NIjFZNUQPeUyNJNJn1CJeV7rmMR+exHfTuOsg8rxIO9eNV5TSEnqRcaOK77zQpsyUkBWmPy8FgdSg==} + engines: {node: '>= 20'} + cpu: [arm] + os: [linux] + + '@tailwindcss/oxide-linux-arm64-gnu@4.2.0': + resolution: {integrity: sha512-Mff5a5Q3WoQR01pGU1gr29hHM1N93xYrKkGXfPw/aRtK4bOc331Ho4Tgfsm5WDGvpevqMpdlkCojT3qlCQbCpA==} + engines: {node: '>= 20'} + cpu: [arm64] + os: [linux] + + '@tailwindcss/oxide-linux-arm64-musl@4.2.0': + resolution: {integrity: sha512-XKcSStleEVnbH6W/9DHzZv1YhjE4eSS6zOu2eRtYAIh7aV4o3vIBs+t/B15xlqoxt6ef/0uiqJVB6hkHjWD/0A==} + engines: {node: '>= 20'} + cpu: [arm64] + os: [linux] + + '@tailwindcss/oxide-linux-x64-gnu@4.2.0': + resolution: {integrity: sha512-/hlXCBqn9K6fi7eAM0RsobHwJYa5V/xzWspVTzxnX+Ft9v6n+30Pz8+RxCn7sQL/vRHHLS30iQPrHQunu6/vJA==} + engines: {node: '>= 20'} + cpu: [x64] + os: [linux] + + '@tailwindcss/oxide-linux-x64-musl@4.2.0': + resolution: {integrity: sha512-lKUaygq4G7sWkhQbfdRRBkaq4LY39IriqBQ+Gk6l5nKq6Ay2M2ZZb1tlIyRNgZKS8cbErTwuYSor0IIULC0SHw==} + engines: {node: '>= 20'} + cpu: [x64] + os: [linux] + + '@tailwindcss/oxide-wasm32-wasi@4.2.0': + resolution: {integrity: sha512-xuDjhAsFdUuFP5W9Ze4k/o4AskUtI8bcAGU4puTYprr89QaYFmhYOPfP+d1pH+k9ets6RoE23BXZM1X1jJqoyw==} + engines: {node: '>=14.0.0'} + cpu: [wasm32] + bundledDependencies: + - '@napi-rs/wasm-runtime' + - '@emnapi/core' + - '@emnapi/runtime' + - '@tybys/wasm-util' + - '@emnapi/wasi-threads' + - tslib + + '@tailwindcss/oxide-win32-arm64-msvc@4.2.0': + resolution: {integrity: sha512-2UU/15y1sWDEDNJXxEIrfWKC2Yb4YgIW5Xz2fKFqGzFWfoMHWFlfa1EJlGO2Xzjkq/tvSarh9ZTjvbxqWvLLXA==} + engines: {node: '>= 20'} + cpu: [arm64] + os: [win32] + + '@tailwindcss/oxide-win32-x64-msvc@4.2.0': + resolution: {integrity: sha512-CrFadmFoc+z76EV6LPG1jx6XceDsaCG3lFhyLNo/bV9ByPrE+FnBPckXQVP4XRkN76h3Fjt/a+5Er/oA/nCBvQ==} + engines: {node: '>= 20'} + cpu: [x64] + os: [win32] + + '@tailwindcss/oxide@4.2.0': + resolution: {integrity: sha512-AZqQzADaj742oqn2xjl5JbIOzZB/DGCYF/7bpvhA8KvjUj9HJkag6bBuwZvH1ps6dfgxNHyuJVlzSr2VpMgdTQ==} + engines: {node: '>= 20'} + + '@tailwindcss/vite@4.2.0': + resolution: {integrity: sha512-da9mFCaHpoOgtQiWtDGIikTrSpUFBtIZCG3jy/u2BGV+l/X1/pbxzmIUxNt6JWm19N3WtGi4KlJdSH/Si83WOA==} + peerDependencies: + vite: ^5.2.0 || ^6 || ^7 + + '@tanstack/query-core@5.90.20': + resolution: {integrity: sha512-OMD2HLpNouXEfZJWcKeVKUgQ5n+n3A2JFmBaScpNDUqSrQSjiveC7dKMe53uJUg1nDG16ttFPz2xfilz6i2uVg==} + + '@tanstack/react-query@5.90.21': + resolution: {integrity: sha512-0Lu6y5t+tvlTJMTO7oh5NSpJfpg/5D41LlThfepTixPYkJ0sE2Jj0m0f6yYqujBwIXlId87e234+MxG3D3g7kg==} + peerDependencies: + react: ^18 || ^19 + + '@types/babel__core@7.20.5': + resolution: {integrity: sha512-qoQprZvz5wQFJwMDqeseRXWv3rqMvhgpbXFfVyWhbx9X47POIA6i/+dXefEmZKoAgOaTdaIgNSMqMIU61yRyzA==} + + '@types/babel__generator@7.27.0': + resolution: {integrity: sha512-ufFd2Xi92OAVPYsy+P4n7/U7e68fex0+Ee8gSG9KX7eo084CWiQ4sdxktvdl0bOPupXtVJPY19zk6EwWqUQ8lg==} + + '@types/babel__template@7.4.4': + resolution: {integrity: sha512-h/NUaSyG5EyxBIp8YRxo4RMe2/qQgvyowRwVMzhYhBCONbW8PUsg4lkFMrhgZhUe5z3L3MiLDuvyJ/CaPa2A8A==} + + '@types/babel__traverse@7.28.0': + resolution: {integrity: sha512-8PvcXf70gTDZBgt9ptxJ8elBeBjcLOAcOtoO/mPJjtji1+CdGbHgm77om1GrsPxsiE+uXIpNSK64UYaIwQXd4Q==} + + '@types/estree@1.0.8': + resolution: {integrity: sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==} + + '@types/json-schema@7.0.15': + resolution: {integrity: sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==} + + '@types/node@24.10.13': + resolution: {integrity: sha512-oH72nZRfDv9lADUBSo104Aq7gPHpQZc4BTx38r9xf9pg5LfP6EzSyH2n7qFmmxRQXh7YlUXODcYsg6PuTDSxGg==} + + '@types/react-dom@19.2.3': + resolution: {integrity: sha512-jp2L/eY6fn+KgVVQAOqYItbF0VY/YApe5Mz2F0aykSO8gx31bYCZyvSeYxCHKvzHG5eZjc+zyaS5BrBWya2+kQ==} + peerDependencies: + '@types/react': ^19.2.0 + + '@types/react@19.2.14': + resolution: {integrity: sha512-ilcTH/UniCkMdtexkoCN0bI7pMcJDvmQFPvuPvmEaYA/NSfFTAgdUSLAoVjaRJm7+6PvcM+q1zYOwS4wTYMF9w==} + + '@typescript-eslint/eslint-plugin@8.56.0': + resolution: {integrity: sha512-lRyPDLzNCuae71A3t9NEINBiTn7swyOhvUj3MyUOxb8x6g6vPEFoOU+ZRmGMusNC3X3YMhqMIX7i8ShqhT74Pw==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + peerDependencies: + '@typescript-eslint/parser': ^8.56.0 + eslint: ^8.57.0 || ^9.0.0 || ^10.0.0 + typescript: '>=4.8.4 <6.0.0' + + '@typescript-eslint/parser@8.56.0': + resolution: {integrity: sha512-IgSWvLobTDOjnaxAfDTIHaECbkNlAlKv2j5SjpB2v7QHKv1FIfjwMy8FsDbVfDX/KjmCmYICcw7uGaXLhtsLNg==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + peerDependencies: + eslint: ^8.57.0 || ^9.0.0 || ^10.0.0 + typescript: '>=4.8.4 <6.0.0' + + '@typescript-eslint/project-service@8.56.0': + resolution: {integrity: sha512-M3rnyL1vIQOMeWxTWIW096/TtVP+8W3p/XnaFflhmcFp+U4zlxUxWj4XwNs6HbDeTtN4yun0GNTTDBw/SvufKg==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + peerDependencies: + typescript: '>=4.8.4 <6.0.0' + + '@typescript-eslint/scope-manager@8.56.0': + resolution: {integrity: sha512-7UiO/XwMHquH+ZzfVCfUNkIXlp/yQjjnlYUyYz7pfvlK3/EyyN6BK+emDmGNyQLBtLGaYrTAI6KOw8tFucWL2w==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + + '@typescript-eslint/tsconfig-utils@8.56.0': + resolution: {integrity: sha512-bSJoIIt4o3lKXD3xmDh9chZcjCz5Lk8xS7Rxn+6l5/pKrDpkCwtQNQQwZ2qRPk7TkUYhrq3WPIHXOXlbXP0itg==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + peerDependencies: + typescript: '>=4.8.4 <6.0.0' + + '@typescript-eslint/type-utils@8.56.0': + resolution: {integrity: sha512-qX2L3HWOU2nuDs6GzglBeuFXviDODreS58tLY/BALPC7iu3Fa+J7EOTwnX9PdNBxUI7Uh0ntP0YWGnxCkXzmfA==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + peerDependencies: + eslint: ^8.57.0 || ^9.0.0 || ^10.0.0 + typescript: '>=4.8.4 <6.0.0' + + '@typescript-eslint/types@8.56.0': + resolution: {integrity: sha512-DBsLPs3GsWhX5HylbP9HNG15U0bnwut55Lx12bHB9MpXxQ+R5GC8MwQe+N1UFXxAeQDvEsEDY6ZYwX03K7Z6HQ==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + + '@typescript-eslint/typescript-estree@8.56.0': + resolution: {integrity: sha512-ex1nTUMWrseMltXUHmR2GAQ4d+WjkZCT4f+4bVsps8QEdh0vlBsaCokKTPlnqBFqqGaxilDNJG7b8dolW2m43Q==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + peerDependencies: + typescript: '>=4.8.4 <6.0.0' + + '@typescript-eslint/utils@8.56.0': + resolution: {integrity: sha512-RZ3Qsmi2nFGsS+n+kjLAYDPVlrzf7UhTffrDIKr+h2yzAlYP/y5ZulU0yeDEPItos2Ph46JAL5P/On3pe7kDIQ==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + peerDependencies: + eslint: ^8.57.0 || ^9.0.0 || ^10.0.0 + typescript: '>=4.8.4 <6.0.0' + + '@typescript-eslint/visitor-keys@8.56.0': + resolution: {integrity: sha512-q+SL+b+05Ud6LbEE35qe4A99P+htKTKVbyiNEe45eCbJFyh/HVK9QXwlrbz+Q4L8SOW4roxSVwXYj4DMBT7Ieg==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + + '@vitejs/plugin-react@5.1.4': + resolution: {integrity: sha512-VIcFLdRi/VYRU8OL/puL7QXMYafHmqOnwTZY50U1JPlCNj30PxCMx65c494b1K9be9hX83KVt0+gTEwTWLqToA==} + engines: {node: ^20.19.0 || >=22.12.0} + peerDependencies: + vite: ^4.2.0 || ^5.0.0 || ^6.0.0 || ^7.0.0 + + acorn-jsx@5.3.2: + resolution: {integrity: sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==} + peerDependencies: + acorn: ^6.0.0 || ^7.0.0 || ^8.0.0 + + acorn@8.16.0: + resolution: {integrity: sha512-UVJyE9MttOsBQIDKw1skb9nAwQuR5wuGD3+82K6JgJlm/Y+KI92oNsMNGZCYdDsVtRHSak0pcV5Dno5+4jh9sw==} + engines: {node: '>=0.4.0'} + hasBin: true + + ajv@6.14.0: + resolution: {integrity: sha512-IWrosm/yrn43eiKqkfkHis7QioDleaXQHdDVPKg0FSwwd/DuvyX79TZnFOnYpB7dcsFAMmtFztZuXPDvSePkFw==} + + ansi-styles@4.3.0: + resolution: {integrity: sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==} + engines: {node: '>=8'} + + argparse@2.0.1: + resolution: {integrity: sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==} + + asynckit@0.4.0: + resolution: {integrity: sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==} + + autoprefixer@10.4.24: + resolution: {integrity: sha512-uHZg7N9ULTVbutaIsDRoUkoS8/h3bdsmVJYZ5l3wv8Cp/6UIIoRDm90hZ+BwxUj/hGBEzLxdHNSKuFpn8WOyZw==} + engines: {node: ^10 || ^12 || >=14} + hasBin: true + peerDependencies: + postcss: ^8.1.0 + + axios@1.13.5: + resolution: {integrity: sha512-cz4ur7Vb0xS4/KUN0tPWe44eqxrIu31me+fbang3ijiNscE129POzipJJA6zniq2C/Z6sJCjMimjS8Lc/GAs8Q==} + + balanced-match@1.0.2: + resolution: {integrity: sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==} + + balanced-match@4.0.4: + resolution: {integrity: sha512-BLrgEcRTwX2o6gGxGOCNyMvGSp35YofuYzw9h1IMTRmKqttAZZVU67bdb9Pr2vUHA8+j3i2tJfjO6C6+4myGTA==} + engines: {node: 18 || 20 || >=22} + + baseline-browser-mapping@2.10.0: + resolution: {integrity: sha512-lIyg0szRfYbiy67j9KN8IyeD7q7hcmqnJ1ddWmNt19ItGpNN64mnllmxUNFIOdOm6by97jlL6wfpTTJrmnjWAA==} + engines: {node: '>=6.0.0'} + hasBin: true + + brace-expansion@1.1.12: + resolution: {integrity: sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==} + + brace-expansion@5.0.3: + resolution: {integrity: sha512-fy6KJm2RawA5RcHkLa1z/ScpBeA762UF9KmZQxwIbDtRJrgLzM10depAiEQ+CXYcoiqW1/m96OAAoke2nE9EeA==} + engines: {node: 18 || 20 || >=22} + + browserslist@4.28.1: + resolution: {integrity: sha512-ZC5Bd0LgJXgwGqUknZY/vkUQ04r8NXnJZ3yYi4vDmSiZmC/pdSN0NbNRPxZpbtO4uAfDUAFffO8IZoM3Gj8IkA==} + engines: {node: ^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7} + hasBin: true + + call-bind-apply-helpers@1.0.2: + resolution: {integrity: sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==} + engines: {node: '>= 0.4'} + + callsites@3.1.0: + resolution: {integrity: sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==} + engines: {node: '>=6'} + + caniuse-lite@1.0.30001772: + resolution: {integrity: sha512-mIwLZICj+ntVTw4BT2zfp+yu/AqV6GMKfJVJMx3MwPxs+uk/uj2GLl2dH8LQbjiLDX66amCga5nKFyDgRR43kg==} + + chalk@4.1.2: + resolution: {integrity: sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==} + engines: {node: '>=10'} + + color-convert@2.0.1: + resolution: {integrity: sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==} + engines: {node: '>=7.0.0'} + + color-name@1.1.4: + resolution: {integrity: sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==} + + combined-stream@1.0.8: + resolution: {integrity: sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==} + engines: {node: '>= 0.8'} + + concat-map@0.0.1: + resolution: {integrity: sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==} + + convert-source-map@2.0.0: + resolution: {integrity: sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==} + + cross-spawn@7.0.6: + resolution: {integrity: sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==} + engines: {node: '>= 8'} + + csstype@3.2.3: + resolution: {integrity: sha512-z1HGKcYy2xA8AGQfwrn0PAy+PB7X/GSj3UVJW9qKyn43xWa+gl5nXmU4qqLMRzWVLFC8KusUX8T/0kCiOYpAIQ==} + + debug@4.4.3: + resolution: {integrity: sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==} + engines: {node: '>=6.0'} + peerDependencies: + supports-color: '*' + peerDependenciesMeta: + supports-color: + optional: true + + deep-is@0.1.4: + resolution: {integrity: sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==} + + delayed-stream@1.0.0: + resolution: {integrity: sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==} + engines: {node: '>=0.4.0'} + + detect-libc@2.1.2: + resolution: {integrity: sha512-Btj2BOOO83o3WyH59e8MgXsxEQVcarkUOpEYrubB0urwnN10yQ364rsiByU11nZlqWYZm05i/of7io4mzihBtQ==} + engines: {node: '>=8'} + + dunder-proto@1.0.1: + resolution: {integrity: sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==} + engines: {node: '>= 0.4'} + + electron-to-chromium@1.5.302: + resolution: {integrity: sha512-sM6HAN2LyK82IyPBpznDRqlTQAtuSaO+ShzFiWTvoMJLHyZ+Y39r8VMfHzwbU8MVBzQ4Wdn85+wlZl2TLGIlwg==} + + enhanced-resolve@5.19.0: + resolution: {integrity: sha512-phv3E1Xl4tQOShqSte26C7Fl84EwUdZsyOuSSk9qtAGyyQs2s3jJzComh+Abf4g187lUUAvH+H26omrqia2aGg==} + engines: {node: '>=10.13.0'} + + es-define-property@1.0.1: + resolution: {integrity: sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==} + engines: {node: '>= 0.4'} + + es-errors@1.3.0: + resolution: {integrity: sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==} + engines: {node: '>= 0.4'} + + es-object-atoms@1.1.1: + resolution: {integrity: sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==} + engines: {node: '>= 0.4'} + + es-set-tostringtag@2.1.0: + resolution: {integrity: sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==} + engines: {node: '>= 0.4'} + + esbuild@0.27.3: + resolution: {integrity: sha512-8VwMnyGCONIs6cWue2IdpHxHnAjzxnw2Zr7MkVxB2vjmQ2ivqGFb4LEG3SMnv0Gb2F/G/2yA8zUaiL1gywDCCg==} + engines: {node: '>=18'} + hasBin: true + + escalade@3.2.0: + resolution: {integrity: sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==} + engines: {node: '>=6'} + + escape-string-regexp@4.0.0: + resolution: {integrity: sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==} + engines: {node: '>=10'} + + eslint-plugin-react-hooks@7.0.1: + resolution: {integrity: sha512-O0d0m04evaNzEPoSW+59Mezf8Qt0InfgGIBJnpC0h3NH/WjUAR7BIKUfysC6todmtiZ/A0oUVS8Gce0WhBrHsA==} + engines: {node: '>=18'} + peerDependencies: + eslint: ^3.0.0 || ^4.0.0 || ^5.0.0 || ^6.0.0 || ^7.0.0 || ^8.0.0-0 || ^9.0.0 + + eslint-plugin-react-refresh@0.4.26: + resolution: {integrity: sha512-1RETEylht2O6FM/MvgnyvT+8K21wLqDNg4qD51Zj3guhjt433XbnnkVttHMyaVyAFD03QSV4LPS5iE3VQmO7XQ==} + peerDependencies: + eslint: '>=8.40' + + eslint-scope@8.4.0: + resolution: {integrity: sha512-sNXOfKCn74rt8RICKMvJS7XKV/Xk9kA7DyJr8mJik3S7Cwgy3qlkkmyS2uQB3jiJg6VNdZd/pDBJu0nvG2NlTg==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + + eslint-visitor-keys@3.4.3: + resolution: {integrity: sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==} + engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} + + eslint-visitor-keys@4.2.1: + resolution: {integrity: sha512-Uhdk5sfqcee/9H/rCOJikYz67o0a2Tw2hGRPOG2Y1R2dg7brRe1uG0yaNQDHu+TO/uQPF/5eCapvYSmHUjt7JQ==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + + eslint-visitor-keys@5.0.1: + resolution: {integrity: sha512-tD40eHxA35h0PEIZNeIjkHoDR4YjjJp34biM0mDvplBe//mB+IHCqHDGV7pxF+7MklTvighcCPPZC7ynWyjdTA==} + engines: {node: ^20.19.0 || ^22.13.0 || >=24} + + eslint@9.39.3: + resolution: {integrity: sha512-VmQ+sifHUbI/IcSopBCF/HO3YiHQx/AVd3UVyYL6weuwW+HvON9VYn5l6Zl1WZzPWXPNZrSQpxwkkZ/VuvJZzg==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + hasBin: true + peerDependencies: + jiti: '*' + peerDependenciesMeta: + jiti: + optional: true + + espree@10.4.0: + resolution: {integrity: sha512-j6PAQ2uUr79PZhBjP5C5fhl8e39FmRnOjsD5lGnWrFU8i2G776tBK7+nP8KuQUTTyAZUwfQqXAgrVH5MbH9CYQ==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + + esquery@1.7.0: + resolution: {integrity: sha512-Ap6G0WQwcU/LHsvLwON1fAQX9Zp0A2Y6Y/cJBl9r/JbW90Zyg4/zbG6zzKa2OTALELarYHmKu0GhpM5EO+7T0g==} + engines: {node: '>=0.10'} + + esrecurse@4.3.0: + resolution: {integrity: sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==} + engines: {node: '>=4.0'} + + estraverse@5.3.0: + resolution: {integrity: sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==} + engines: {node: '>=4.0'} + + esutils@2.0.3: + resolution: {integrity: sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==} + engines: {node: '>=0.10.0'} + + fast-deep-equal@3.1.3: + resolution: {integrity: sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==} + + fast-json-stable-stringify@2.1.0: + resolution: {integrity: sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==} + + fast-levenshtein@2.0.6: + resolution: {integrity: sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==} + + fdir@6.5.0: + resolution: {integrity: sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg==} + engines: {node: '>=12.0.0'} + peerDependencies: + picomatch: ^3 || ^4 + peerDependenciesMeta: + picomatch: + optional: true + + file-entry-cache@8.0.0: + resolution: {integrity: sha512-XXTUwCvisa5oacNGRP9SfNtYBNAMi+RPwBFmblZEF7N7swHYQS6/Zfk7SRwx4D5j3CH211YNRco1DEMNVfZCnQ==} + engines: {node: '>=16.0.0'} + + find-up@5.0.0: + resolution: {integrity: sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==} + engines: {node: '>=10'} + + flat-cache@4.0.1: + resolution: {integrity: sha512-f7ccFPK3SXFHpx15UIGyRJ/FJQctuKZ0zVuN3frBo4HnK3cay9VEW0R6yPYFHC0AgqhukPzKjq22t5DmAyqGyw==} + engines: {node: '>=16'} + + flatted@3.3.3: + resolution: {integrity: sha512-GX+ysw4PBCz0PzosHDepZGANEuFCMLrnRTiEy9McGjmkCQYwRq4A/X786G/fjM/+OjsWSU1ZrY5qyARZmO/uwg==} + + follow-redirects@1.15.11: + resolution: {integrity: sha512-deG2P0JfjrTxl50XGCDyfI97ZGVCxIpfKYmfyrQ54n5FO/0gfIES8C/Psl6kWVDolizcaaxZJnTS0QSMxvnsBQ==} + engines: {node: '>=4.0'} + peerDependencies: + debug: '*' + peerDependenciesMeta: + debug: + optional: true + + form-data@4.0.5: + resolution: {integrity: sha512-8RipRLol37bNs2bhoV67fiTEvdTrbMUYcFTiy3+wuuOnUog2QBHCZWXDRijWQfAkhBj2Uf5UnVaiWwA5vdd82w==} + engines: {node: '>= 6'} + + fraction.js@5.3.4: + resolution: {integrity: sha512-1X1NTtiJphryn/uLQz3whtY6jK3fTqoE3ohKs0tT+Ujr1W59oopxmoEh7Lu5p6vBaPbgoM0bzveAW4Qi5RyWDQ==} + + fsevents@2.3.3: + resolution: {integrity: sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==} + engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0} + os: [darwin] + + function-bind@1.1.2: + resolution: {integrity: sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==} + + gensync@1.0.0-beta.2: + resolution: {integrity: sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==} + engines: {node: '>=6.9.0'} + + get-intrinsic@1.3.0: + resolution: {integrity: sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==} + engines: {node: '>= 0.4'} + + get-proto@1.0.1: + resolution: {integrity: sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==} + engines: {node: '>= 0.4'} + + glob-parent@6.0.2: + resolution: {integrity: sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==} + engines: {node: '>=10.13.0'} + + globals@14.0.0: + resolution: {integrity: sha512-oahGvuMGQlPw/ivIYBjVSrWAfWLBeku5tpPE2fOPLi+WHffIWbuh2tCjhyQhTBPMf5E9jDEH4FOmTYgYwbKwtQ==} + engines: {node: '>=18'} + + globals@16.5.0: + resolution: {integrity: sha512-c/c15i26VrJ4IRt5Z89DnIzCGDn9EcebibhAOjw5ibqEHsE1wLUgkPn9RDmNcUKyU87GeaL633nyJ+pplFR2ZQ==} + engines: {node: '>=18'} + + gopd@1.2.0: + resolution: {integrity: sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==} + engines: {node: '>= 0.4'} + + graceful-fs@4.2.11: + resolution: {integrity: sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==} + + has-flag@4.0.0: + resolution: {integrity: sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==} + engines: {node: '>=8'} + + has-symbols@1.1.0: + resolution: {integrity: sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==} + engines: {node: '>= 0.4'} + + has-tostringtag@1.0.2: + resolution: {integrity: sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==} + engines: {node: '>= 0.4'} + + hasown@2.0.2: + resolution: {integrity: sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==} + engines: {node: '>= 0.4'} + + hermes-estree@0.25.1: + resolution: {integrity: sha512-0wUoCcLp+5Ev5pDW2OriHC2MJCbwLwuRx+gAqMTOkGKJJiBCLjtrvy4PWUGn6MIVefecRpzoOZ/UV6iGdOr+Cw==} + + hermes-parser@0.25.1: + resolution: {integrity: sha512-6pEjquH3rqaI6cYAXYPcz9MS4rY6R4ngRgrgfDshRptUZIc3lw0MCIJIGDj9++mfySOuPTHB4nrSW99BCvOPIA==} + + ignore@5.3.2: + resolution: {integrity: sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g==} + engines: {node: '>= 4'} + + ignore@7.0.5: + resolution: {integrity: sha512-Hs59xBNfUIunMFgWAbGX5cq6893IbWg4KnrjbYwX3tx0ztorVgTDA6B2sxf8ejHJ4wz8BqGUMYlnzNBer5NvGg==} + engines: {node: '>= 4'} + + import-fresh@3.3.1: + resolution: {integrity: sha512-TR3KfrTZTYLPB6jUjfx6MF9WcWrHL9su5TObK4ZkYgBdWKPOFoSoQIdEuTuR82pmtxH2spWG9h6etwfr1pLBqQ==} + engines: {node: '>=6'} + + imurmurhash@0.1.4: + resolution: {integrity: sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==} + engines: {node: '>=0.8.19'} + + is-extglob@2.1.1: + resolution: {integrity: sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==} + engines: {node: '>=0.10.0'} + + is-glob@4.0.3: + resolution: {integrity: sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==} + engines: {node: '>=0.10.0'} + + isexe@2.0.0: + resolution: {integrity: sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==} + + jiti@2.6.1: + resolution: {integrity: sha512-ekilCSN1jwRvIbgeg/57YFh8qQDNbwDb9xT/qu2DAHbFFZUicIl4ygVaAvzveMhMVr3LnpSKTNnwt8PoOfmKhQ==} + hasBin: true + + js-tokens@4.0.0: + resolution: {integrity: sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==} + + js-yaml@4.1.1: + resolution: {integrity: sha512-qQKT4zQxXl8lLwBtHMWwaTcGfFOZviOJet3Oy/xmGk2gZH677CJM9EvtfdSkgWcATZhj/55JZ0rmy3myCT5lsA==} + hasBin: true + + jsesc@3.1.0: + resolution: {integrity: sha512-/sM3dO2FOzXjKQhJuo0Q173wf2KOo8t4I8vHy6lF9poUp7bKT0/NHE8fPX23PwfhnykfqnC2xRxOnVw5XuGIaA==} + engines: {node: '>=6'} + hasBin: true + + json-buffer@3.0.1: + resolution: {integrity: sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==} + + json-schema-traverse@0.4.1: + resolution: {integrity: sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==} + + json-stable-stringify-without-jsonify@1.0.1: + resolution: {integrity: sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==} + + json5@2.2.3: + resolution: {integrity: sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==} + engines: {node: '>=6'} + hasBin: true + + keyv@4.5.4: + resolution: {integrity: sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw==} + + levn@0.4.1: + resolution: {integrity: sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==} + engines: {node: '>= 0.8.0'} + + lightningcss-android-arm64@1.31.1: + resolution: {integrity: sha512-HXJF3x8w9nQ4jbXRiNppBCqeZPIAfUo8zE/kOEGbW5NZvGc/K7nMxbhIr+YlFlHW5mpbg/YFPdbnCh1wAXCKFg==} + engines: {node: '>= 12.0.0'} + cpu: [arm64] + os: [android] + + lightningcss-darwin-arm64@1.31.1: + resolution: {integrity: sha512-02uTEqf3vIfNMq3h/z2cJfcOXnQ0GRwQrkmPafhueLb2h7mqEidiCzkE4gBMEH65abHRiQvhdcQ+aP0D0g67sg==} + engines: {node: '>= 12.0.0'} + cpu: [arm64] + os: [darwin] + + lightningcss-darwin-x64@1.31.1: + resolution: {integrity: sha512-1ObhyoCY+tGxtsz1lSx5NXCj3nirk0Y0kB/g8B8DT+sSx4G9djitg9ejFnjb3gJNWo7qXH4DIy2SUHvpoFwfTA==} + engines: {node: '>= 12.0.0'} + cpu: [x64] + os: [darwin] + + lightningcss-freebsd-x64@1.31.1: + resolution: {integrity: sha512-1RINmQKAItO6ISxYgPwszQE1BrsVU5aB45ho6O42mu96UiZBxEXsuQ7cJW4zs4CEodPUioj/QrXW1r9pLUM74A==} + engines: {node: '>= 12.0.0'} + cpu: [x64] + os: [freebsd] + + lightningcss-linux-arm-gnueabihf@1.31.1: + resolution: {integrity: sha512-OOCm2//MZJ87CdDK62rZIu+aw9gBv4azMJuA8/KB74wmfS3lnC4yoPHm0uXZ/dvNNHmnZnB8XLAZzObeG0nS1g==} + engines: {node: '>= 12.0.0'} + cpu: [arm] + os: [linux] + + lightningcss-linux-arm64-gnu@1.31.1: + resolution: {integrity: sha512-WKyLWztD71rTnou4xAD5kQT+982wvca7E6QoLpoawZ1gP9JM0GJj4Tp5jMUh9B3AitHbRZ2/H3W5xQmdEOUlLg==} + engines: {node: '>= 12.0.0'} + cpu: [arm64] + os: [linux] + + lightningcss-linux-arm64-musl@1.31.1: + resolution: {integrity: sha512-mVZ7Pg2zIbe3XlNbZJdjs86YViQFoJSpc41CbVmKBPiGmC4YrfeOyz65ms2qpAobVd7WQsbW4PdsSJEMymyIMg==} + engines: {node: '>= 12.0.0'} + cpu: [arm64] + os: [linux] + + lightningcss-linux-x64-gnu@1.31.1: + resolution: {integrity: sha512-xGlFWRMl+0KvUhgySdIaReQdB4FNudfUTARn7q0hh/V67PVGCs3ADFjw+6++kG1RNd0zdGRlEKa+T13/tQjPMA==} + engines: {node: '>= 12.0.0'} + cpu: [x64] + os: [linux] + + lightningcss-linux-x64-musl@1.31.1: + resolution: {integrity: sha512-eowF8PrKHw9LpoZii5tdZwnBcYDxRw2rRCyvAXLi34iyeYfqCQNA9rmUM0ce62NlPhCvof1+9ivRaTY6pSKDaA==} + engines: {node: '>= 12.0.0'} + cpu: [x64] + os: [linux] + + lightningcss-win32-arm64-msvc@1.31.1: + resolution: {integrity: sha512-aJReEbSEQzx1uBlQizAOBSjcmr9dCdL3XuC/6HLXAxmtErsj2ICo5yYggg1qOODQMtnjNQv2UHb9NpOuFtYe4w==} + engines: {node: '>= 12.0.0'} + cpu: [arm64] + os: [win32] + + lightningcss-win32-x64-msvc@1.31.1: + resolution: {integrity: sha512-I9aiFrbd7oYHwlnQDqr1Roz+fTz61oDDJX7n9tYF9FJymH1cIN1DtKw3iYt6b8WZgEjoNwVSncwF4wx/ZedMhw==} + engines: {node: '>= 12.0.0'} + cpu: [x64] + os: [win32] + + lightningcss@1.31.1: + resolution: {integrity: sha512-l51N2r93WmGUye3WuFoN5k10zyvrVs0qfKBhyC5ogUQ6Ew6JUSswh78mbSO+IU3nTWsyOArqPCcShdQSadghBQ==} + engines: {node: '>= 12.0.0'} + + locate-path@6.0.0: + resolution: {integrity: sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==} + engines: {node: '>=10'} + + lodash.merge@4.6.2: + resolution: {integrity: sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==} + + lru-cache@5.1.1: + resolution: {integrity: sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==} + + magic-string@0.30.21: + resolution: {integrity: sha512-vd2F4YUyEXKGcLHoq+TEyCjxueSeHnFxyyjNp80yg0XV4vUhnDer/lvvlqM/arB5bXQN5K2/3oinyCRyx8T2CQ==} + + math-intrinsics@1.1.0: + resolution: {integrity: sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==} + engines: {node: '>= 0.4'} + + mime-db@1.52.0: + resolution: {integrity: sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==} + engines: {node: '>= 0.6'} + + mime-types@2.1.35: + resolution: {integrity: sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==} + engines: {node: '>= 0.6'} + + minimatch@3.1.3: + resolution: {integrity: sha512-M2GCs7Vk83NxkUyQV1bkABc4yxgz9kILhHImZiBPAZ9ybuvCb0/H7lEl5XvIg3g+9d4eNotkZA5IWwYl0tibaA==} + + minimatch@9.0.6: + resolution: {integrity: sha512-kQAVowdR33euIqeA0+VZTDqU+qo1IeVY+hrKYtZMio3Pg0P0vuh/kwRylLUddJhB6pf3q/botcOvRtx4IN1wqQ==} + engines: {node: '>=16 || 14 >=14.17'} + + ms@2.1.3: + resolution: {integrity: sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==} + + nanoid@3.3.11: + resolution: {integrity: sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==} + engines: {node: ^10 || ^12 || ^13.7 || ^14 || >=15.0.1} + hasBin: true + + natural-compare@1.4.0: + resolution: {integrity: sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==} + + node-releases@2.0.27: + resolution: {integrity: sha512-nmh3lCkYZ3grZvqcCH+fjmQ7X+H0OeZgP40OierEaAptX4XofMh5kwNbWh7lBduUzCcV/8kZ+NDLCwm2iorIlA==} + + optionator@0.9.4: + resolution: {integrity: sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g==} + engines: {node: '>= 0.8.0'} + + p-limit@3.1.0: + resolution: {integrity: sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==} + engines: {node: '>=10'} + + p-locate@5.0.0: + resolution: {integrity: sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==} + engines: {node: '>=10'} + + parent-module@1.0.1: + resolution: {integrity: sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==} + engines: {node: '>=6'} + + path-exists@4.0.0: + resolution: {integrity: sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==} + engines: {node: '>=8'} + + path-key@3.1.1: + resolution: {integrity: sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==} + engines: {node: '>=8'} + + picocolors@1.1.1: + resolution: {integrity: sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==} + + picomatch@4.0.3: + resolution: {integrity: sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==} + engines: {node: '>=12'} + + postcss-value-parser@4.2.0: + resolution: {integrity: sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ==} + + postcss@8.5.6: + resolution: {integrity: sha512-3Ybi1tAuwAP9s0r1UQ2J4n5Y0G05bJkpUIO0/bI9MhwmD70S5aTWbXGBwxHrelT+XM1k6dM0pk+SwNkpTRN7Pg==} + engines: {node: ^10 || ^12 || >=14} + + prelude-ls@1.2.1: + resolution: {integrity: sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==} + engines: {node: '>= 0.8.0'} + + proxy-from-env@1.1.0: + resolution: {integrity: sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==} + + punycode@2.3.1: + resolution: {integrity: sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==} + engines: {node: '>=6'} + + react-dom@19.2.4: + resolution: {integrity: sha512-AXJdLo8kgMbimY95O2aKQqsz2iWi9jMgKJhRBAxECE4IFxfcazB2LmzloIoibJI3C12IlY20+KFaLv+71bUJeQ==} + peerDependencies: + react: ^19.2.4 + + react-refresh@0.18.0: + resolution: {integrity: sha512-QgT5//D3jfjJb6Gsjxv0Slpj23ip+HtOpnNgnb2S5zU3CB26G/IDPGoy4RJB42wzFE46DRsstbW6tKHoKbhAxw==} + engines: {node: '>=0.10.0'} + + react@19.2.4: + resolution: {integrity: sha512-9nfp2hYpCwOjAN+8TZFGhtWEwgvWHXqESH8qT89AT/lWklpLON22Lc8pEtnpsZz7VmawabSU0gCjnj8aC0euHQ==} + engines: {node: '>=0.10.0'} + + resolve-from@4.0.0: + resolution: {integrity: sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==} + engines: {node: '>=4'} + + rollup@4.59.0: + resolution: {integrity: sha512-2oMpl67a3zCH9H79LeMcbDhXW/UmWG/y2zuqnF2jQq5uq9TbM9TVyXvA4+t+ne2IIkBdrLpAaRQAvo7YI/Yyeg==} + engines: {node: '>=18.0.0', npm: '>=8.0.0'} + hasBin: true + + scheduler@0.27.0: + resolution: {integrity: sha512-eNv+WrVbKu1f3vbYJT/xtiF5syA5HPIMtf9IgY/nKg0sWqzAUEvqY/xm7OcZc/qafLx/iO9FgOmeSAp4v5ti/Q==} + + semver@6.3.1: + resolution: {integrity: sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==} + hasBin: true + + semver@7.7.4: + resolution: {integrity: sha512-vFKC2IEtQnVhpT78h1Yp8wzwrf8CM+MzKMHGJZfBtzhZNycRFnXsHk6E5TxIkkMsgNS7mdX3AGB7x2QM2di4lA==} + engines: {node: '>=10'} + hasBin: true + + shebang-command@2.0.0: + resolution: {integrity: sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==} + engines: {node: '>=8'} + + shebang-regex@3.0.0: + resolution: {integrity: sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==} + engines: {node: '>=8'} + + source-map-js@1.2.1: + resolution: {integrity: sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==} + engines: {node: '>=0.10.0'} + + strip-json-comments@3.1.1: + resolution: {integrity: sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==} + engines: {node: '>=8'} + + supports-color@7.2.0: + resolution: {integrity: sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==} + engines: {node: '>=8'} + + tailwindcss@4.2.0: + resolution: {integrity: sha512-yYzTZ4++b7fNYxFfpnberEEKu43w44aqDMNM9MHMmcKuCH7lL8jJ4yJ7LGHv7rSwiqM0nkiobF9I6cLlpS2P7Q==} + + tapable@2.3.0: + resolution: {integrity: sha512-g9ljZiwki/LfxmQADO3dEY1CbpmXT5Hm2fJ+QaGKwSXUylMybePR7/67YW7jOrrvjEgL1Fmz5kzyAjWVWLlucg==} + engines: {node: '>=6'} + + tinyglobby@0.2.15: + resolution: {integrity: sha512-j2Zq4NyQYG5XMST4cbs02Ak8iJUdxRM0XI5QyxXuZOzKOINmWurp3smXu3y5wDcJrptwpSjgXHzIQxR0omXljQ==} + engines: {node: '>=12.0.0'} + + ts-api-utils@2.4.0: + resolution: {integrity: sha512-3TaVTaAv2gTiMB35i3FiGJaRfwb3Pyn/j3m/bfAvGe8FB7CF6u+LMYqYlDh7reQf7UNvoTvdfAqHGmPGOSsPmA==} + engines: {node: '>=18.12'} + peerDependencies: + typescript: '>=4.8.4' + + type-check@0.4.0: + resolution: {integrity: sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==} + engines: {node: '>= 0.8.0'} + + typescript-eslint@8.56.0: + resolution: {integrity: sha512-c7toRLrotJ9oixgdW7liukZpsnq5CZ7PuKztubGYlNppuTqhIoWfhgHo/7EU0v06gS2l/x0i2NEFK1qMIf0rIg==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + peerDependencies: + eslint: ^8.57.0 || ^9.0.0 || ^10.0.0 + typescript: '>=4.8.4 <6.0.0' + + typescript@5.9.3: + resolution: {integrity: sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==} + engines: {node: '>=14.17'} + hasBin: true + + undici-types@7.16.0: + resolution: {integrity: sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw==} + + update-browserslist-db@1.2.3: + resolution: {integrity: sha512-Js0m9cx+qOgDxo0eMiFGEueWztz+d4+M3rGlmKPT+T4IS/jP4ylw3Nwpu6cpTTP8R1MAC1kF4VbdLt3ARf209w==} + hasBin: true + peerDependencies: + browserslist: '>= 4.21.0' + + uri-js@4.4.1: + resolution: {integrity: sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==} + + vite@7.3.1: + resolution: {integrity: sha512-w+N7Hifpc3gRjZ63vYBXA56dvvRlNWRczTdmCBBa+CotUzAPf5b7YMdMR/8CQoeYE5LX3W4wj6RYTgonm1b9DA==} + engines: {node: ^20.19.0 || >=22.12.0} + hasBin: true + peerDependencies: + '@types/node': ^20.19.0 || >=22.12.0 + jiti: '>=1.21.0' + less: ^4.0.0 + lightningcss: ^1.21.0 + sass: ^1.70.0 + sass-embedded: ^1.70.0 + stylus: '>=0.54.8' + sugarss: ^5.0.0 + terser: ^5.16.0 + tsx: ^4.8.1 + yaml: ^2.4.2 + peerDependenciesMeta: + '@types/node': + optional: true + jiti: + optional: true + less: + optional: true + lightningcss: + optional: true + sass: + optional: true + sass-embedded: + optional: true + stylus: + optional: true + sugarss: + optional: true + terser: + optional: true + tsx: + optional: true + yaml: + optional: true + + which@2.0.2: + resolution: {integrity: sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==} + engines: {node: '>= 8'} + hasBin: true + + word-wrap@1.2.5: + resolution: {integrity: sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA==} + engines: {node: '>=0.10.0'} + + yallist@3.1.1: + resolution: {integrity: sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==} + + yocto-queue@0.1.0: + resolution: {integrity: sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==} + engines: {node: '>=10'} + + zod-validation-error@4.0.2: + resolution: {integrity: sha512-Q6/nZLe6jxuU80qb/4uJ4t5v2VEZ44lzQjPDhYJNztRQ4wyWc6VF3D3Kb/fAuPetZQnhS3hnajCf9CsWesghLQ==} + engines: {node: '>=18.0.0'} + peerDependencies: + zod: ^3.25.0 || ^4.0.0 + + zod@4.3.6: + resolution: {integrity: sha512-rftlrkhHZOcjDwkGlnUtZZkvaPHCsDATp4pGpuOOMDaTdDDXF91wuVDJoWoPsKX/3YPQ5fHuF3STjcYyKr+Qhg==} + +snapshots: + + '@babel/code-frame@7.29.0': + dependencies: + '@babel/helper-validator-identifier': 7.28.5 + js-tokens: 4.0.0 + picocolors: 1.1.1 + + '@babel/compat-data@7.29.0': {} + + '@babel/core@7.29.0': + dependencies: + '@babel/code-frame': 7.29.0 + '@babel/generator': 7.29.1 + '@babel/helper-compilation-targets': 7.28.6 + '@babel/helper-module-transforms': 7.28.6(@babel/core@7.29.0) + '@babel/helpers': 7.28.6 + '@babel/parser': 7.29.0 + '@babel/template': 7.28.6 + '@babel/traverse': 7.29.0 + '@babel/types': 7.29.0 + '@jridgewell/remapping': 2.3.5 + convert-source-map: 2.0.0 + debug: 4.4.3 + gensync: 1.0.0-beta.2 + json5: 2.2.3 + semver: 6.3.1 + transitivePeerDependencies: + - supports-color + + '@babel/generator@7.29.1': + dependencies: + '@babel/parser': 7.29.0 + '@babel/types': 7.29.0 + '@jridgewell/gen-mapping': 0.3.13 + '@jridgewell/trace-mapping': 0.3.31 + jsesc: 3.1.0 + + '@babel/helper-compilation-targets@7.28.6': + dependencies: + '@babel/compat-data': 7.29.0 + '@babel/helper-validator-option': 7.27.1 + browserslist: 4.28.1 + lru-cache: 5.1.1 + semver: 6.3.1 + + '@babel/helper-globals@7.28.0': {} + + '@babel/helper-module-imports@7.28.6': + dependencies: + '@babel/traverse': 7.29.0 + '@babel/types': 7.29.0 + transitivePeerDependencies: + - supports-color + + '@babel/helper-module-transforms@7.28.6(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-module-imports': 7.28.6 + '@babel/helper-validator-identifier': 7.28.5 + '@babel/traverse': 7.29.0 + transitivePeerDependencies: + - supports-color + + '@babel/helper-plugin-utils@7.28.6': {} + + '@babel/helper-string-parser@7.27.1': {} + + '@babel/helper-validator-identifier@7.28.5': {} + + '@babel/helper-validator-option@7.27.1': {} + + '@babel/helpers@7.28.6': + dependencies: + '@babel/template': 7.28.6 + '@babel/types': 7.29.0 + + '@babel/parser@7.29.0': + dependencies: + '@babel/types': 7.29.0 + + '@babel/plugin-transform-react-jsx-self@7.27.1(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-plugin-utils': 7.28.6 + + '@babel/plugin-transform-react-jsx-source@7.27.1(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-plugin-utils': 7.28.6 + + '@babel/template@7.28.6': + dependencies: + '@babel/code-frame': 7.29.0 + '@babel/parser': 7.29.0 + '@babel/types': 7.29.0 + + '@babel/traverse@7.29.0': + dependencies: + '@babel/code-frame': 7.29.0 + '@babel/generator': 7.29.1 + '@babel/helper-globals': 7.28.0 + '@babel/parser': 7.29.0 + '@babel/template': 7.28.6 + '@babel/types': 7.29.0 + debug: 4.4.3 + transitivePeerDependencies: + - supports-color + + '@babel/types@7.29.0': + dependencies: + '@babel/helper-string-parser': 7.27.1 + '@babel/helper-validator-identifier': 7.28.5 + + '@esbuild/aix-ppc64@0.27.3': + optional: true + + '@esbuild/android-arm64@0.27.3': + optional: true + + '@esbuild/android-arm@0.27.3': + optional: true + + '@esbuild/android-x64@0.27.3': + optional: true + + '@esbuild/darwin-arm64@0.27.3': + optional: true + + '@esbuild/darwin-x64@0.27.3': + optional: true + + '@esbuild/freebsd-arm64@0.27.3': + optional: true + + '@esbuild/freebsd-x64@0.27.3': + optional: true + + '@esbuild/linux-arm64@0.27.3': + optional: true + + '@esbuild/linux-arm@0.27.3': + optional: true + + '@esbuild/linux-ia32@0.27.3': + optional: true + + '@esbuild/linux-loong64@0.27.3': + optional: true + + '@esbuild/linux-mips64el@0.27.3': + optional: true + + '@esbuild/linux-ppc64@0.27.3': + optional: true + + '@esbuild/linux-riscv64@0.27.3': + optional: true + + '@esbuild/linux-s390x@0.27.3': + optional: true + + '@esbuild/linux-x64@0.27.3': + optional: true + + '@esbuild/netbsd-arm64@0.27.3': + optional: true + + '@esbuild/netbsd-x64@0.27.3': + optional: true + + '@esbuild/openbsd-arm64@0.27.3': + optional: true + + '@esbuild/openbsd-x64@0.27.3': + optional: true + + '@esbuild/openharmony-arm64@0.27.3': + optional: true + + '@esbuild/sunos-x64@0.27.3': + optional: true + + '@esbuild/win32-arm64@0.27.3': + optional: true + + '@esbuild/win32-ia32@0.27.3': + optional: true + + '@esbuild/win32-x64@0.27.3': + optional: true + + '@eslint-community/eslint-utils@4.9.1(eslint@9.39.3(jiti@2.6.1))': + dependencies: + eslint: 9.39.3(jiti@2.6.1) + eslint-visitor-keys: 3.4.3 + + '@eslint-community/regexpp@4.12.2': {} + + '@eslint/config-array@0.21.1': + dependencies: + '@eslint/object-schema': 2.1.7 + debug: 4.4.3 + minimatch: 3.1.3 + transitivePeerDependencies: + - supports-color + + '@eslint/config-helpers@0.4.2': + dependencies: + '@eslint/core': 0.17.0 + + '@eslint/core@0.17.0': + dependencies: + '@types/json-schema': 7.0.15 + + '@eslint/eslintrc@3.3.3': + dependencies: + ajv: 6.14.0 + debug: 4.4.3 + espree: 10.4.0 + globals: 14.0.0 + ignore: 5.3.2 + import-fresh: 3.3.1 + js-yaml: 4.1.1 + minimatch: 3.1.3 + strip-json-comments: 3.1.1 + transitivePeerDependencies: + - supports-color + + '@eslint/js@9.39.3': {} + + '@eslint/object-schema@2.1.7': {} + + '@eslint/plugin-kit@0.4.1': + dependencies: + '@eslint/core': 0.17.0 + levn: 0.4.1 + + '@humanfs/core@0.19.1': {} + + '@humanfs/node@0.16.7': + dependencies: + '@humanfs/core': 0.19.1 + '@humanwhocodes/retry': 0.4.3 + + '@humanwhocodes/module-importer@1.0.1': {} + + '@humanwhocodes/retry@0.4.3': {} + + '@jridgewell/gen-mapping@0.3.13': + dependencies: + '@jridgewell/sourcemap-codec': 1.5.5 + '@jridgewell/trace-mapping': 0.3.31 + + '@jridgewell/remapping@2.3.5': + dependencies: + '@jridgewell/gen-mapping': 0.3.13 + '@jridgewell/trace-mapping': 0.3.31 + + '@jridgewell/resolve-uri@3.1.2': {} + + '@jridgewell/sourcemap-codec@1.5.5': {} + + '@jridgewell/trace-mapping@0.3.31': + dependencies: + '@jridgewell/resolve-uri': 3.1.2 + '@jridgewell/sourcemap-codec': 1.5.5 + + '@rolldown/pluginutils@1.0.0-rc.3': {} + + '@rollup/rollup-android-arm-eabi@4.59.0': + optional: true + + '@rollup/rollup-android-arm64@4.59.0': + optional: true + + '@rollup/rollup-darwin-arm64@4.59.0': + optional: true + + '@rollup/rollup-darwin-x64@4.59.0': + optional: true + + '@rollup/rollup-freebsd-arm64@4.59.0': + optional: true + + '@rollup/rollup-freebsd-x64@4.59.0': + optional: true + + '@rollup/rollup-linux-arm-gnueabihf@4.59.0': + optional: true + + '@rollup/rollup-linux-arm-musleabihf@4.59.0': + optional: true + + '@rollup/rollup-linux-arm64-gnu@4.59.0': + optional: true + + '@rollup/rollup-linux-arm64-musl@4.59.0': + optional: true + + '@rollup/rollup-linux-loong64-gnu@4.59.0': + optional: true + + '@rollup/rollup-linux-loong64-musl@4.59.0': + optional: true + + '@rollup/rollup-linux-ppc64-gnu@4.59.0': + optional: true + + '@rollup/rollup-linux-ppc64-musl@4.59.0': + optional: true + + '@rollup/rollup-linux-riscv64-gnu@4.59.0': + optional: true + + '@rollup/rollup-linux-riscv64-musl@4.59.0': + optional: true + + '@rollup/rollup-linux-s390x-gnu@4.59.0': + optional: true + + '@rollup/rollup-linux-x64-gnu@4.59.0': + optional: true + + '@rollup/rollup-linux-x64-musl@4.59.0': + optional: true + + '@rollup/rollup-openbsd-x64@4.59.0': + optional: true + + '@rollup/rollup-openharmony-arm64@4.59.0': + optional: true + + '@rollup/rollup-win32-arm64-msvc@4.59.0': + optional: true + + '@rollup/rollup-win32-ia32-msvc@4.59.0': + optional: true + + '@rollup/rollup-win32-x64-gnu@4.59.0': + optional: true + + '@rollup/rollup-win32-x64-msvc@4.59.0': + optional: true + + '@tailwindcss/node@4.2.0': + dependencies: + '@jridgewell/remapping': 2.3.5 + enhanced-resolve: 5.19.0 + jiti: 2.6.1 + lightningcss: 1.31.1 + magic-string: 0.30.21 + source-map-js: 1.2.1 + tailwindcss: 4.2.0 + + '@tailwindcss/oxide-android-arm64@4.2.0': + optional: true + + '@tailwindcss/oxide-darwin-arm64@4.2.0': + optional: true + + '@tailwindcss/oxide-darwin-x64@4.2.0': + optional: true + + '@tailwindcss/oxide-freebsd-x64@4.2.0': + optional: true + + '@tailwindcss/oxide-linux-arm-gnueabihf@4.2.0': + optional: true + + '@tailwindcss/oxide-linux-arm64-gnu@4.2.0': + optional: true + + '@tailwindcss/oxide-linux-arm64-musl@4.2.0': + optional: true + + '@tailwindcss/oxide-linux-x64-gnu@4.2.0': + optional: true + + '@tailwindcss/oxide-linux-x64-musl@4.2.0': + optional: true + + '@tailwindcss/oxide-wasm32-wasi@4.2.0': + optional: true + + '@tailwindcss/oxide-win32-arm64-msvc@4.2.0': + optional: true + + '@tailwindcss/oxide-win32-x64-msvc@4.2.0': + optional: true + + '@tailwindcss/oxide@4.2.0': + optionalDependencies: + '@tailwindcss/oxide-android-arm64': 4.2.0 + '@tailwindcss/oxide-darwin-arm64': 4.2.0 + '@tailwindcss/oxide-darwin-x64': 4.2.0 + '@tailwindcss/oxide-freebsd-x64': 4.2.0 + '@tailwindcss/oxide-linux-arm-gnueabihf': 4.2.0 + '@tailwindcss/oxide-linux-arm64-gnu': 4.2.0 + '@tailwindcss/oxide-linux-arm64-musl': 4.2.0 + '@tailwindcss/oxide-linux-x64-gnu': 4.2.0 + '@tailwindcss/oxide-linux-x64-musl': 4.2.0 + '@tailwindcss/oxide-wasm32-wasi': 4.2.0 + '@tailwindcss/oxide-win32-arm64-msvc': 4.2.0 + '@tailwindcss/oxide-win32-x64-msvc': 4.2.0 + + '@tailwindcss/vite@4.2.0(vite@7.3.1(@types/node@24.10.13)(jiti@2.6.1)(lightningcss@1.31.1))': + dependencies: + '@tailwindcss/node': 4.2.0 + '@tailwindcss/oxide': 4.2.0 + tailwindcss: 4.2.0 + vite: 7.3.1(@types/node@24.10.13)(jiti@2.6.1)(lightningcss@1.31.1) + + '@tanstack/query-core@5.90.20': {} + + '@tanstack/react-query@5.90.21(react@19.2.4)': + dependencies: + '@tanstack/query-core': 5.90.20 + react: 19.2.4 + + '@types/babel__core@7.20.5': + dependencies: + '@babel/parser': 7.29.0 + '@babel/types': 7.29.0 + '@types/babel__generator': 7.27.0 + '@types/babel__template': 7.4.4 + '@types/babel__traverse': 7.28.0 + + '@types/babel__generator@7.27.0': + dependencies: + '@babel/types': 7.29.0 + + '@types/babel__template@7.4.4': + dependencies: + '@babel/parser': 7.29.0 + '@babel/types': 7.29.0 + + '@types/babel__traverse@7.28.0': + dependencies: + '@babel/types': 7.29.0 + + '@types/estree@1.0.8': {} + + '@types/json-schema@7.0.15': {} + + '@types/node@24.10.13': + dependencies: + undici-types: 7.16.0 + + '@types/react-dom@19.2.3(@types/react@19.2.14)': + dependencies: + '@types/react': 19.2.14 + + '@types/react@19.2.14': + dependencies: + csstype: 3.2.3 + + '@typescript-eslint/eslint-plugin@8.56.0(@typescript-eslint/parser@8.56.0(eslint@9.39.3(jiti@2.6.1))(typescript@5.9.3))(eslint@9.39.3(jiti@2.6.1))(typescript@5.9.3)': + dependencies: + '@eslint-community/regexpp': 4.12.2 + '@typescript-eslint/parser': 8.56.0(eslint@9.39.3(jiti@2.6.1))(typescript@5.9.3) + '@typescript-eslint/scope-manager': 8.56.0 + '@typescript-eslint/type-utils': 8.56.0(eslint@9.39.3(jiti@2.6.1))(typescript@5.9.3) + '@typescript-eslint/utils': 8.56.0(eslint@9.39.3(jiti@2.6.1))(typescript@5.9.3) + '@typescript-eslint/visitor-keys': 8.56.0 + eslint: 9.39.3(jiti@2.6.1) + ignore: 7.0.5 + natural-compare: 1.4.0 + ts-api-utils: 2.4.0(typescript@5.9.3) + typescript: 5.9.3 + transitivePeerDependencies: + - supports-color + + '@typescript-eslint/parser@8.56.0(eslint@9.39.3(jiti@2.6.1))(typescript@5.9.3)': + dependencies: + '@typescript-eslint/scope-manager': 8.56.0 + '@typescript-eslint/types': 8.56.0 + '@typescript-eslint/typescript-estree': 8.56.0(typescript@5.9.3) + '@typescript-eslint/visitor-keys': 8.56.0 + debug: 4.4.3 + eslint: 9.39.3(jiti@2.6.1) + typescript: 5.9.3 + transitivePeerDependencies: + - supports-color + + '@typescript-eslint/project-service@8.56.0(typescript@5.9.3)': + dependencies: + '@typescript-eslint/tsconfig-utils': 8.56.0(typescript@5.9.3) + '@typescript-eslint/types': 8.56.0 + debug: 4.4.3 + typescript: 5.9.3 + transitivePeerDependencies: + - supports-color + + '@typescript-eslint/scope-manager@8.56.0': + dependencies: + '@typescript-eslint/types': 8.56.0 + '@typescript-eslint/visitor-keys': 8.56.0 + + '@typescript-eslint/tsconfig-utils@8.56.0(typescript@5.9.3)': + dependencies: + typescript: 5.9.3 + + '@typescript-eslint/type-utils@8.56.0(eslint@9.39.3(jiti@2.6.1))(typescript@5.9.3)': + dependencies: + '@typescript-eslint/types': 8.56.0 + '@typescript-eslint/typescript-estree': 8.56.0(typescript@5.9.3) + '@typescript-eslint/utils': 8.56.0(eslint@9.39.3(jiti@2.6.1))(typescript@5.9.3) + debug: 4.4.3 + eslint: 9.39.3(jiti@2.6.1) + ts-api-utils: 2.4.0(typescript@5.9.3) + typescript: 5.9.3 + transitivePeerDependencies: + - supports-color + + '@typescript-eslint/types@8.56.0': {} + + '@typescript-eslint/typescript-estree@8.56.0(typescript@5.9.3)': + dependencies: + '@typescript-eslint/project-service': 8.56.0(typescript@5.9.3) + '@typescript-eslint/tsconfig-utils': 8.56.0(typescript@5.9.3) + '@typescript-eslint/types': 8.56.0 + '@typescript-eslint/visitor-keys': 8.56.0 + debug: 4.4.3 + minimatch: 9.0.6 + semver: 7.7.4 + tinyglobby: 0.2.15 + ts-api-utils: 2.4.0(typescript@5.9.3) + typescript: 5.9.3 + transitivePeerDependencies: + - supports-color + + '@typescript-eslint/utils@8.56.0(eslint@9.39.3(jiti@2.6.1))(typescript@5.9.3)': + dependencies: + '@eslint-community/eslint-utils': 4.9.1(eslint@9.39.3(jiti@2.6.1)) + '@typescript-eslint/scope-manager': 8.56.0 + '@typescript-eslint/types': 8.56.0 + '@typescript-eslint/typescript-estree': 8.56.0(typescript@5.9.3) + eslint: 9.39.3(jiti@2.6.1) + typescript: 5.9.3 + transitivePeerDependencies: + - supports-color + + '@typescript-eslint/visitor-keys@8.56.0': + dependencies: + '@typescript-eslint/types': 8.56.0 + eslint-visitor-keys: 5.0.1 + + '@vitejs/plugin-react@5.1.4(vite@7.3.1(@types/node@24.10.13)(jiti@2.6.1)(lightningcss@1.31.1))': + dependencies: + '@babel/core': 7.29.0 + '@babel/plugin-transform-react-jsx-self': 7.27.1(@babel/core@7.29.0) + '@babel/plugin-transform-react-jsx-source': 7.27.1(@babel/core@7.29.0) + '@rolldown/pluginutils': 1.0.0-rc.3 + '@types/babel__core': 7.20.5 + react-refresh: 0.18.0 + vite: 7.3.1(@types/node@24.10.13)(jiti@2.6.1)(lightningcss@1.31.1) + transitivePeerDependencies: + - supports-color + + acorn-jsx@5.3.2(acorn@8.16.0): + dependencies: + acorn: 8.16.0 + + acorn@8.16.0: {} + + ajv@6.14.0: + dependencies: + fast-deep-equal: 3.1.3 + fast-json-stable-stringify: 2.1.0 + json-schema-traverse: 0.4.1 + uri-js: 4.4.1 + + ansi-styles@4.3.0: + dependencies: + color-convert: 2.0.1 + + argparse@2.0.1: {} + + asynckit@0.4.0: {} + + autoprefixer@10.4.24(postcss@8.5.6): + dependencies: + browserslist: 4.28.1 + caniuse-lite: 1.0.30001772 + fraction.js: 5.3.4 + picocolors: 1.1.1 + postcss: 8.5.6 + postcss-value-parser: 4.2.0 + + axios@1.13.5: + dependencies: + follow-redirects: 1.15.11 + form-data: 4.0.5 + proxy-from-env: 1.1.0 + transitivePeerDependencies: + - debug + + balanced-match@1.0.2: {} + + balanced-match@4.0.4: {} + + baseline-browser-mapping@2.10.0: {} + + brace-expansion@1.1.12: + dependencies: + balanced-match: 1.0.2 + concat-map: 0.0.1 + + brace-expansion@5.0.3: + dependencies: + balanced-match: 4.0.4 + + browserslist@4.28.1: + dependencies: + baseline-browser-mapping: 2.10.0 + caniuse-lite: 1.0.30001772 + electron-to-chromium: 1.5.302 + node-releases: 2.0.27 + update-browserslist-db: 1.2.3(browserslist@4.28.1) + + call-bind-apply-helpers@1.0.2: + dependencies: + es-errors: 1.3.0 + function-bind: 1.1.2 + + callsites@3.1.0: {} + + caniuse-lite@1.0.30001772: {} + + chalk@4.1.2: + dependencies: + ansi-styles: 4.3.0 + supports-color: 7.2.0 + + color-convert@2.0.1: + dependencies: + color-name: 1.1.4 + + color-name@1.1.4: {} + + combined-stream@1.0.8: + dependencies: + delayed-stream: 1.0.0 + + concat-map@0.0.1: {} + + convert-source-map@2.0.0: {} + + cross-spawn@7.0.6: + dependencies: + path-key: 3.1.1 + shebang-command: 2.0.0 + which: 2.0.2 + + csstype@3.2.3: {} + + debug@4.4.3: + dependencies: + ms: 2.1.3 + + deep-is@0.1.4: {} + + delayed-stream@1.0.0: {} + + detect-libc@2.1.2: {} + + dunder-proto@1.0.1: + dependencies: + call-bind-apply-helpers: 1.0.2 + es-errors: 1.3.0 + gopd: 1.2.0 + + electron-to-chromium@1.5.302: {} + + enhanced-resolve@5.19.0: + dependencies: + graceful-fs: 4.2.11 + tapable: 2.3.0 + + es-define-property@1.0.1: {} + + es-errors@1.3.0: {} + + es-object-atoms@1.1.1: + dependencies: + es-errors: 1.3.0 + + es-set-tostringtag@2.1.0: + dependencies: + es-errors: 1.3.0 + get-intrinsic: 1.3.0 + has-tostringtag: 1.0.2 + hasown: 2.0.2 + + esbuild@0.27.3: + optionalDependencies: + '@esbuild/aix-ppc64': 0.27.3 + '@esbuild/android-arm': 0.27.3 + '@esbuild/android-arm64': 0.27.3 + '@esbuild/android-x64': 0.27.3 + '@esbuild/darwin-arm64': 0.27.3 + '@esbuild/darwin-x64': 0.27.3 + '@esbuild/freebsd-arm64': 0.27.3 + '@esbuild/freebsd-x64': 0.27.3 + '@esbuild/linux-arm': 0.27.3 + '@esbuild/linux-arm64': 0.27.3 + '@esbuild/linux-ia32': 0.27.3 + '@esbuild/linux-loong64': 0.27.3 + '@esbuild/linux-mips64el': 0.27.3 + '@esbuild/linux-ppc64': 0.27.3 + '@esbuild/linux-riscv64': 0.27.3 + '@esbuild/linux-s390x': 0.27.3 + '@esbuild/linux-x64': 0.27.3 + '@esbuild/netbsd-arm64': 0.27.3 + '@esbuild/netbsd-x64': 0.27.3 + '@esbuild/openbsd-arm64': 0.27.3 + '@esbuild/openbsd-x64': 0.27.3 + '@esbuild/openharmony-arm64': 0.27.3 + '@esbuild/sunos-x64': 0.27.3 + '@esbuild/win32-arm64': 0.27.3 + '@esbuild/win32-ia32': 0.27.3 + '@esbuild/win32-x64': 0.27.3 + + escalade@3.2.0: {} + + escape-string-regexp@4.0.0: {} + + eslint-plugin-react-hooks@7.0.1(eslint@9.39.3(jiti@2.6.1)): + dependencies: + '@babel/core': 7.29.0 + '@babel/parser': 7.29.0 + eslint: 9.39.3(jiti@2.6.1) + hermes-parser: 0.25.1 + zod: 4.3.6 + zod-validation-error: 4.0.2(zod@4.3.6) + transitivePeerDependencies: + - supports-color + + eslint-plugin-react-refresh@0.4.26(eslint@9.39.3(jiti@2.6.1)): + dependencies: + eslint: 9.39.3(jiti@2.6.1) + + eslint-scope@8.4.0: + dependencies: + esrecurse: 4.3.0 + estraverse: 5.3.0 + + eslint-visitor-keys@3.4.3: {} + + eslint-visitor-keys@4.2.1: {} + + eslint-visitor-keys@5.0.1: {} + + eslint@9.39.3(jiti@2.6.1): + dependencies: + '@eslint-community/eslint-utils': 4.9.1(eslint@9.39.3(jiti@2.6.1)) + '@eslint-community/regexpp': 4.12.2 + '@eslint/config-array': 0.21.1 + '@eslint/config-helpers': 0.4.2 + '@eslint/core': 0.17.0 + '@eslint/eslintrc': 3.3.3 + '@eslint/js': 9.39.3 + '@eslint/plugin-kit': 0.4.1 + '@humanfs/node': 0.16.7 + '@humanwhocodes/module-importer': 1.0.1 + '@humanwhocodes/retry': 0.4.3 + '@types/estree': 1.0.8 + ajv: 6.14.0 + chalk: 4.1.2 + cross-spawn: 7.0.6 + debug: 4.4.3 + escape-string-regexp: 4.0.0 + eslint-scope: 8.4.0 + eslint-visitor-keys: 4.2.1 + espree: 10.4.0 + esquery: 1.7.0 + esutils: 2.0.3 + fast-deep-equal: 3.1.3 + file-entry-cache: 8.0.0 + find-up: 5.0.0 + glob-parent: 6.0.2 + ignore: 5.3.2 + imurmurhash: 0.1.4 + is-glob: 4.0.3 + json-stable-stringify-without-jsonify: 1.0.1 + lodash.merge: 4.6.2 + minimatch: 3.1.3 + natural-compare: 1.4.0 + optionator: 0.9.4 + optionalDependencies: + jiti: 2.6.1 + transitivePeerDependencies: + - supports-color + + espree@10.4.0: + dependencies: + acorn: 8.16.0 + acorn-jsx: 5.3.2(acorn@8.16.0) + eslint-visitor-keys: 4.2.1 + + esquery@1.7.0: + dependencies: + estraverse: 5.3.0 + + esrecurse@4.3.0: + dependencies: + estraverse: 5.3.0 + + estraverse@5.3.0: {} + + esutils@2.0.3: {} + + fast-deep-equal@3.1.3: {} + + fast-json-stable-stringify@2.1.0: {} + + fast-levenshtein@2.0.6: {} + + fdir@6.5.0(picomatch@4.0.3): + optionalDependencies: + picomatch: 4.0.3 + + file-entry-cache@8.0.0: + dependencies: + flat-cache: 4.0.1 + + find-up@5.0.0: + dependencies: + locate-path: 6.0.0 + path-exists: 4.0.0 + + flat-cache@4.0.1: + dependencies: + flatted: 3.3.3 + keyv: 4.5.4 + + flatted@3.3.3: {} + + follow-redirects@1.15.11: {} + + form-data@4.0.5: + dependencies: + asynckit: 0.4.0 + combined-stream: 1.0.8 + es-set-tostringtag: 2.1.0 + hasown: 2.0.2 + mime-types: 2.1.35 + + fraction.js@5.3.4: {} + + fsevents@2.3.3: + optional: true + + function-bind@1.1.2: {} + + gensync@1.0.0-beta.2: {} + + get-intrinsic@1.3.0: + dependencies: + call-bind-apply-helpers: 1.0.2 + es-define-property: 1.0.1 + es-errors: 1.3.0 + es-object-atoms: 1.1.1 + function-bind: 1.1.2 + get-proto: 1.0.1 + gopd: 1.2.0 + has-symbols: 1.1.0 + hasown: 2.0.2 + math-intrinsics: 1.1.0 + + get-proto@1.0.1: + dependencies: + dunder-proto: 1.0.1 + es-object-atoms: 1.1.1 + + glob-parent@6.0.2: + dependencies: + is-glob: 4.0.3 + + globals@14.0.0: {} + + globals@16.5.0: {} + + gopd@1.2.0: {} + + graceful-fs@4.2.11: {} + + has-flag@4.0.0: {} + + has-symbols@1.1.0: {} + + has-tostringtag@1.0.2: + dependencies: + has-symbols: 1.1.0 + + hasown@2.0.2: + dependencies: + function-bind: 1.1.2 + + hermes-estree@0.25.1: {} + + hermes-parser@0.25.1: + dependencies: + hermes-estree: 0.25.1 + + ignore@5.3.2: {} + + ignore@7.0.5: {} + + import-fresh@3.3.1: + dependencies: + parent-module: 1.0.1 + resolve-from: 4.0.0 + + imurmurhash@0.1.4: {} + + is-extglob@2.1.1: {} + + is-glob@4.0.3: + dependencies: + is-extglob: 2.1.1 + + isexe@2.0.0: {} + + jiti@2.6.1: {} + + js-tokens@4.0.0: {} + + js-yaml@4.1.1: + dependencies: + argparse: 2.0.1 + + jsesc@3.1.0: {} + + json-buffer@3.0.1: {} + + json-schema-traverse@0.4.1: {} + + json-stable-stringify-without-jsonify@1.0.1: {} + + json5@2.2.3: {} + + keyv@4.5.4: + dependencies: + json-buffer: 3.0.1 + + levn@0.4.1: + dependencies: + prelude-ls: 1.2.1 + type-check: 0.4.0 + + lightningcss-android-arm64@1.31.1: + optional: true + + lightningcss-darwin-arm64@1.31.1: + optional: true + + lightningcss-darwin-x64@1.31.1: + optional: true + + lightningcss-freebsd-x64@1.31.1: + optional: true + + lightningcss-linux-arm-gnueabihf@1.31.1: + optional: true + + lightningcss-linux-arm64-gnu@1.31.1: + optional: true + + lightningcss-linux-arm64-musl@1.31.1: + optional: true + + lightningcss-linux-x64-gnu@1.31.1: + optional: true + + lightningcss-linux-x64-musl@1.31.1: + optional: true + + lightningcss-win32-arm64-msvc@1.31.1: + optional: true + + lightningcss-win32-x64-msvc@1.31.1: + optional: true + + lightningcss@1.31.1: + dependencies: + detect-libc: 2.1.2 + optionalDependencies: + lightningcss-android-arm64: 1.31.1 + lightningcss-darwin-arm64: 1.31.1 + lightningcss-darwin-x64: 1.31.1 + lightningcss-freebsd-x64: 1.31.1 + lightningcss-linux-arm-gnueabihf: 1.31.1 + lightningcss-linux-arm64-gnu: 1.31.1 + lightningcss-linux-arm64-musl: 1.31.1 + lightningcss-linux-x64-gnu: 1.31.1 + lightningcss-linux-x64-musl: 1.31.1 + lightningcss-win32-arm64-msvc: 1.31.1 + lightningcss-win32-x64-msvc: 1.31.1 + + locate-path@6.0.0: + dependencies: + p-locate: 5.0.0 + + lodash.merge@4.6.2: {} + + lru-cache@5.1.1: + dependencies: + yallist: 3.1.1 + + magic-string@0.30.21: + dependencies: + '@jridgewell/sourcemap-codec': 1.5.5 + + math-intrinsics@1.1.0: {} + + mime-db@1.52.0: {} + + mime-types@2.1.35: + dependencies: + mime-db: 1.52.0 + + minimatch@3.1.3: + dependencies: + brace-expansion: 1.1.12 + + minimatch@9.0.6: + dependencies: + brace-expansion: 5.0.3 + + ms@2.1.3: {} + + nanoid@3.3.11: {} + + natural-compare@1.4.0: {} + + node-releases@2.0.27: {} + + optionator@0.9.4: + dependencies: + deep-is: 0.1.4 + fast-levenshtein: 2.0.6 + levn: 0.4.1 + prelude-ls: 1.2.1 + type-check: 0.4.0 + word-wrap: 1.2.5 + + p-limit@3.1.0: + dependencies: + yocto-queue: 0.1.0 + + p-locate@5.0.0: + dependencies: + p-limit: 3.1.0 + + parent-module@1.0.1: + dependencies: + callsites: 3.1.0 + + path-exists@4.0.0: {} + + path-key@3.1.1: {} + + picocolors@1.1.1: {} + + picomatch@4.0.3: {} + + postcss-value-parser@4.2.0: {} + + postcss@8.5.6: + dependencies: + nanoid: 3.3.11 + picocolors: 1.1.1 + source-map-js: 1.2.1 + + prelude-ls@1.2.1: {} + + proxy-from-env@1.1.0: {} + + punycode@2.3.1: {} + + react-dom@19.2.4(react@19.2.4): + dependencies: + react: 19.2.4 + scheduler: 0.27.0 + + react-refresh@0.18.0: {} + + react@19.2.4: {} + + resolve-from@4.0.0: {} + + rollup@4.59.0: + dependencies: + '@types/estree': 1.0.8 + optionalDependencies: + '@rollup/rollup-android-arm-eabi': 4.59.0 + '@rollup/rollup-android-arm64': 4.59.0 + '@rollup/rollup-darwin-arm64': 4.59.0 + '@rollup/rollup-darwin-x64': 4.59.0 + '@rollup/rollup-freebsd-arm64': 4.59.0 + '@rollup/rollup-freebsd-x64': 4.59.0 + '@rollup/rollup-linux-arm-gnueabihf': 4.59.0 + '@rollup/rollup-linux-arm-musleabihf': 4.59.0 + '@rollup/rollup-linux-arm64-gnu': 4.59.0 + '@rollup/rollup-linux-arm64-musl': 4.59.0 + '@rollup/rollup-linux-loong64-gnu': 4.59.0 + '@rollup/rollup-linux-loong64-musl': 4.59.0 + '@rollup/rollup-linux-ppc64-gnu': 4.59.0 + '@rollup/rollup-linux-ppc64-musl': 4.59.0 + '@rollup/rollup-linux-riscv64-gnu': 4.59.0 + '@rollup/rollup-linux-riscv64-musl': 4.59.0 + '@rollup/rollup-linux-s390x-gnu': 4.59.0 + '@rollup/rollup-linux-x64-gnu': 4.59.0 + '@rollup/rollup-linux-x64-musl': 4.59.0 + '@rollup/rollup-openbsd-x64': 4.59.0 + '@rollup/rollup-openharmony-arm64': 4.59.0 + '@rollup/rollup-win32-arm64-msvc': 4.59.0 + '@rollup/rollup-win32-ia32-msvc': 4.59.0 + '@rollup/rollup-win32-x64-gnu': 4.59.0 + '@rollup/rollup-win32-x64-msvc': 4.59.0 + fsevents: 2.3.3 + + scheduler@0.27.0: {} + + semver@6.3.1: {} + + semver@7.7.4: {} + + shebang-command@2.0.0: + dependencies: + shebang-regex: 3.0.0 + + shebang-regex@3.0.0: {} + + source-map-js@1.2.1: {} + + strip-json-comments@3.1.1: {} + + supports-color@7.2.0: + dependencies: + has-flag: 4.0.0 + + tailwindcss@4.2.0: {} + + tapable@2.3.0: {} + + tinyglobby@0.2.15: + dependencies: + fdir: 6.5.0(picomatch@4.0.3) + picomatch: 4.0.3 + + ts-api-utils@2.4.0(typescript@5.9.3): + dependencies: + typescript: 5.9.3 + + type-check@0.4.0: + dependencies: + prelude-ls: 1.2.1 + + typescript-eslint@8.56.0(eslint@9.39.3(jiti@2.6.1))(typescript@5.9.3): + dependencies: + '@typescript-eslint/eslint-plugin': 8.56.0(@typescript-eslint/parser@8.56.0(eslint@9.39.3(jiti@2.6.1))(typescript@5.9.3))(eslint@9.39.3(jiti@2.6.1))(typescript@5.9.3) + '@typescript-eslint/parser': 8.56.0(eslint@9.39.3(jiti@2.6.1))(typescript@5.9.3) + '@typescript-eslint/typescript-estree': 8.56.0(typescript@5.9.3) + '@typescript-eslint/utils': 8.56.0(eslint@9.39.3(jiti@2.6.1))(typescript@5.9.3) + eslint: 9.39.3(jiti@2.6.1) + typescript: 5.9.3 + transitivePeerDependencies: + - supports-color + + typescript@5.9.3: {} + + undici-types@7.16.0: {} + + update-browserslist-db@1.2.3(browserslist@4.28.1): + dependencies: + browserslist: 4.28.1 + escalade: 3.2.0 + picocolors: 1.1.1 + + uri-js@4.4.1: + dependencies: + punycode: 2.3.1 + + vite@7.3.1(@types/node@24.10.13)(jiti@2.6.1)(lightningcss@1.31.1): + dependencies: + esbuild: 0.27.3 + fdir: 6.5.0(picomatch@4.0.3) + picomatch: 4.0.3 + postcss: 8.5.6 + rollup: 4.59.0 + tinyglobby: 0.2.15 + optionalDependencies: + '@types/node': 24.10.13 + fsevents: 2.3.3 + jiti: 2.6.1 + lightningcss: 1.31.1 + + which@2.0.2: + dependencies: + isexe: 2.0.0 + + word-wrap@1.2.5: {} + + yallist@3.1.1: {} + + yocto-queue@0.1.0: {} + + zod-validation-error@4.0.2(zod@4.3.6): + dependencies: + zod: 4.3.6 + + zod@4.3.6: {} diff --git a/frontend/public/vite.svg b/frontend/public/vite.svg new file mode 100644 index 0000000..e7b8dfb --- /dev/null +++ b/frontend/public/vite.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/frontend/src/App.css b/frontend/src/App.css new file mode 100644 index 0000000..b9d355d --- /dev/null +++ b/frontend/src/App.css @@ -0,0 +1,42 @@ +#root { + max-width: 1280px; + margin: 0 auto; + padding: 2rem; + text-align: center; +} + +.logo { + height: 6em; + padding: 1.5em; + will-change: filter; + transition: filter 300ms; +} +.logo:hover { + filter: drop-shadow(0 0 2em #646cffaa); +} +.logo.react:hover { + filter: drop-shadow(0 0 2em #61dafbaa); +} + +@keyframes logo-spin { + from { + transform: rotate(0deg); + } + to { + transform: rotate(360deg); + } +} + +@media (prefers-reduced-motion: no-preference) { + a:nth-of-type(2) .logo { + animation: logo-spin infinite 20s linear; + } +} + +.card { + padding: 2em; +} + +.read-the-docs { + color: #888; +} diff --git a/frontend/src/App.tsx b/frontend/src/App.tsx new file mode 100644 index 0000000..de5f2db --- /dev/null +++ b/frontend/src/App.tsx @@ -0,0 +1,17 @@ +import { QueryClient, QueryClientProvider } from '@tanstack/react-query' +import ChatInterface from './components/ChatInterface' + +const queryClient = new QueryClient() + +function App() { + return ( + +
+ +
+
+ ) +} + +export default App + diff --git a/frontend/src/assets/react.svg b/frontend/src/assets/react.svg new file mode 100644 index 0000000..6c87de9 --- /dev/null +++ b/frontend/src/assets/react.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/frontend/src/components/ChatInterface.tsx b/frontend/src/components/ChatInterface.tsx new file mode 100644 index 0000000..4c1043b --- /dev/null +++ b/frontend/src/components/ChatInterface.tsx @@ -0,0 +1,125 @@ +import { useState, useRef, useEffect } from 'react'; +import { useMutation } from '@tanstack/react-query'; +import axios from 'axios'; + +type Message = { + id: string; + text: string; + sender: 'user' | 'ai'; +}; + +export default function ChatInterface() { + const [messages, setMessages] = useState([]); + const [input, setInput] = useState(''); + const messagesEndRef = useRef(null); + + const scrollToBottom = () => { + messagesEndRef.current?.scrollIntoView({ behavior: 'smooth' }); + }; + + useEffect(() => { + scrollToBottom(); + }, [messages]); + + const chatMutation = useMutation({ + mutationFn: async (messageText: string) => { + const response = await axios.post('http://localhost:8000/chat', { + message: messageText, + }); + return response.data; + }, + onSuccess: (data) => { + setMessages((prev) => [ + ...prev, + { id: Date.now().toString(), text: data.response, sender: 'ai' }, + ]); + }, + onError: () => { + setMessages((prev) => [ + ...prev, + { id: Date.now().toString(), text: "Error: Could not connect to the backend.", sender: 'ai' }, + ]); + }, + }); + + const handleSubmit = (e: React.FormEvent) => { + e.preventDefault(); + if (!input.trim() || chatMutation.isPending) return; + + const userMessage = input.trim(); + setInput(''); + setMessages((prev) => [ + ...prev, + { id: Date.now().toString(), text: userMessage, sender: 'user' }, + ]); + + chatMutation.mutate(userMessage); + }; + + return ( +
+ {/* Header */} +
+

Sam Rolfe - AI

+

Ask about skills, experience, hobbies

+
+ + {/* Message Area */} +
+ {messages.length === 0 && ( +
+ Send a message to start the conversation! +
+ )} + {messages.map((msg) => ( +
+
+ {msg.text} +
+
+ ))} + {chatMutation.isPending && ( +
+
+
+
+
+
+
+ )} +
+
+ + {/* Input Area */} +
+
+ setInput(e.target.value)} + placeholder="Type your message..." + className="flex-1 bg-gray-800 text-white border border-gray-700 rounded-lg px-4 py-2 focus:outline-none focus:border-blue-500 transition-colors" + disabled={chatMutation.isPending} + /> + +
+
+
+ ); +} + diff --git a/frontend/src/index.css b/frontend/src/index.css new file mode 100644 index 0000000..d4f5523 --- /dev/null +++ b/frontend/src/index.css @@ -0,0 +1,5 @@ +@import "tailwindcss"; + +@theme { + --font-sans: system-ui, Avenir, Helvetica, Arial, sans-serif; +} \ No newline at end of file diff --git a/frontend/src/main.tsx b/frontend/src/main.tsx new file mode 100644 index 0000000..bef5202 --- /dev/null +++ b/frontend/src/main.tsx @@ -0,0 +1,10 @@ +import { StrictMode } from 'react' +import { createRoot } from 'react-dom/client' +import './index.css' +import App from './App.tsx' + +createRoot(document.getElementById('root')!).render( + + + , +) diff --git a/frontend/tsconfig.app.json b/frontend/tsconfig.app.json new file mode 100644 index 0000000..a9b5a59 --- /dev/null +++ b/frontend/tsconfig.app.json @@ -0,0 +1,28 @@ +{ + "compilerOptions": { + "tsBuildInfoFile": "./node_modules/.tmp/tsconfig.app.tsbuildinfo", + "target": "ES2022", + "useDefineForClassFields": true, + "lib": ["ES2022", "DOM", "DOM.Iterable"], + "module": "ESNext", + "types": ["vite/client"], + "skipLibCheck": true, + + /* Bundler mode */ + "moduleResolution": "bundler", + "allowImportingTsExtensions": true, + "verbatimModuleSyntax": true, + "moduleDetection": "force", + "noEmit": true, + "jsx": "react-jsx", + + /* Linting */ + "strict": true, + "noUnusedLocals": true, + "noUnusedParameters": true, + "erasableSyntaxOnly": true, + "noFallthroughCasesInSwitch": true, + "noUncheckedSideEffectImports": true + }, + "include": ["src"] +} diff --git a/frontend/tsconfig.json b/frontend/tsconfig.json new file mode 100644 index 0000000..1ffef60 --- /dev/null +++ b/frontend/tsconfig.json @@ -0,0 +1,7 @@ +{ + "files": [], + "references": [ + { "path": "./tsconfig.app.json" }, + { "path": "./tsconfig.node.json" } + ] +} diff --git a/frontend/tsconfig.node.json b/frontend/tsconfig.node.json new file mode 100644 index 0000000..8a67f62 --- /dev/null +++ b/frontend/tsconfig.node.json @@ -0,0 +1,26 @@ +{ + "compilerOptions": { + "tsBuildInfoFile": "./node_modules/.tmp/tsconfig.node.tsbuildinfo", + "target": "ES2023", + "lib": ["ES2023"], + "module": "ESNext", + "types": ["node"], + "skipLibCheck": true, + + /* Bundler mode */ + "moduleResolution": "bundler", + "allowImportingTsExtensions": true, + "verbatimModuleSyntax": true, + "moduleDetection": "force", + "noEmit": true, + + /* Linting */ + "strict": true, + "noUnusedLocals": true, + "noUnusedParameters": true, + "erasableSyntaxOnly": true, + "noFallthroughCasesInSwitch": true, + "noUncheckedSideEffectImports": true + }, + "include": ["vite.config.ts"] +} diff --git a/frontend/vite.config.ts b/frontend/vite.config.ts new file mode 100644 index 0000000..b424f89 --- /dev/null +++ b/frontend/vite.config.ts @@ -0,0 +1,11 @@ +import { defineConfig } from 'vite' +import react from '@vitejs/plugin-react' +import tailwindcss from '@tailwindcss/vite' + +// https://vite.dev/config/ +export default defineConfig({ + plugins: [ + tailwindcss(), + react() + ], +}) \ No newline at end of file diff --git a/knowledge_service/Dockerfile b/knowledge_service/Dockerfile new file mode 100644 index 0000000..bf3bd99 --- /dev/null +++ b/knowledge_service/Dockerfile @@ -0,0 +1,29 @@ +FROM python:3.11-slim + +# Install system dependencies +RUN apt-get update && apt-get install -y \ + libstdc++6 \ + gcc \ + g++ \ + && rm -rf /var/lib/apt/lists/* + +# Create directories +RUN mkdir -p /app/packages /app/code + +# Install Python packages to a specific location +WORKDIR /app +COPY requirements.txt . +RUN pip install --target=/app/packages -r requirements.txt + +# Copy initial code (will be overridden by volume mount in dev) +COPY . /app/code/ + +# Set Python to find packages in /app/packages +ENV PYTHONPATH=/app/packages +ENV PYTHONUNBUFFERED=1 + +WORKDIR /app/code +EXPOSE 8080 + +CMD ["python3", "-m", "uvicorn", "main:app", "--host", "0.0.0.0", "--port", "8080"] + diff --git a/knowledge_service/data/hobbies.md b/knowledge_service/data/hobbies.md new file mode 100644 index 0000000..1987339 --- /dev/null +++ b/knowledge_service/data/hobbies.md @@ -0,0 +1,15 @@ +# Sam's Hobbies + +## Music +- Enjoys playing guitar and synthesizers. +- Collects vintage vinyl. + +## Gardening +- Maintains a local vegetable patch. +- Focuses on organic heirloom tomatoes. + +## Skiing +- Advanced skier, prefers off-piste and backcountry in the Alps. + +## Art +- Digital illustration and oil painting. diff --git a/knowledge_service/docker-compose.yml b/knowledge_service/docker-compose.yml new file mode 100644 index 0000000..e2a8743 --- /dev/null +++ b/knowledge_service/docker-compose.yml @@ -0,0 +1,24 @@ +services: + knowledge-service: + build: . + image: sam/knowledge-service:latest + container_name: knowledge-service + ports: + - "8080:8080" + volumes: + # Only mount the code directory, not packages + - ./data:/app/code/data + - ./chroma_db:/app/code/chroma_db + - ./main.py:/app/code/main.py:ro # Read-only mount for safety + environment: + - PYTHONUNBUFFERED=1 + - OPENROUTER_API_KEY=${OPENROUTER_API_KEY} + - PYTHONPATH=/app/packages + networks: + - ai-mesh + restart: unless-stopped + +networks: + ai-mesh: + external: true + diff --git a/knowledge_service/gitea_scraper.py b/knowledge_service/gitea_scraper.py new file mode 100644 index 0000000..dd0fda6 --- /dev/null +++ b/knowledge_service/gitea_scraper.py @@ -0,0 +1,121 @@ +import os +import httpx +import logging +from typing import List, Dict, Optional +from dataclasses import dataclass +from datetime import datetime + +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger(__name__) + +@dataclass +class RepoMetadata: + name: str + description: str + url: str + default_branch: str + updated_at: str + language: Optional[str] + +class GiteaScraper: + def __init__(self, base_url: str, token: str, username: str = "sam"): + self.base_url = base_url.rstrip("/") + self.token = token + self.username = username + self.headers = {"Authorization": f"token {token}"} + + def get_user_repos(self) -> List[RepoMetadata]: + """Fetch all repositories for the user.""" + repos = [] + page = 1 + + while True: + url = f"{self.base_url}/api/v1/users/{self.username}/repos?page={page}&limit=50" + + try: + response = httpx.get(url, headers=self.headers, timeout=30.0) + response.raise_for_status() + + data = response.json() + if not data: + break + + for repo in data: + repos.append(RepoMetadata( + name=repo["name"], + description=repo.get("description", ""), + url=repo["html_url"], + default_branch=repo["default_branch"], + updated_at=repo["updated_at"], + language=repo.get("language") + )) + + logger.info(f"Fetched page {page}, got {len(data)} repos") + page += 1 + + except Exception as e: + logger.error(f"Error fetching repos: {e}") + break + + return repos + + def get_readme(self, repo_name: str) -> str: + """Fetch README content for a repository.""" + # Try common README filenames + readme_names = ["README.md", "readme.md", "Readme.md", "README.rst"] + + for readme_name in readme_names: + url = f"{self.base_url}/api/v1/repos/{self.username}/{repo_name}/raw/{readme_name}" + + try: + response = httpx.get(url, headers=self.headers, timeout=10.0) + if response.status_code == 200: + return response.text + except Exception as e: + logger.warning(f"Failed to fetch {readme_name}: {e}") + continue + + return "" + + def get_repo_files(self, repo_name: str, path: str = "") -> List[Dict]: + """List files in a repository directory.""" + url = f"{self.base_url}/api/v1/repos/{self.username}/{repo_name}/contents/{path}" + + try: + response = httpx.get(url, headers=self.headers, timeout=10.0) + response.raise_for_status() + return response.json() + except Exception as e: + logger.error(f"Error listing files in {repo_name}/{path}: {e}") + return [] + + def get_file_content(self, repo_name: str, filepath: str) -> str: + """Fetch content of a specific file.""" + url = f"{self.base_url}/api/v1/repos/{self.username}/{repo_name}/raw/{filepath}" + + try: + response = httpx.get(url, headers=self.headers, timeout=10.0) + if response.status_code == 200: + return response.text + except Exception as e: + logger.error(f"Error fetching file {filepath}: {e}") + + return "" + +# Test function +if __name__ == "__main__": + scraper = GiteaScraper( + base_url=os.getenv("GITEA_URL", "https://gitea.lab.audasmedia.com.au"), + token=os.getenv("GITEA_TOKEN", ""), + username=os.getenv("GITEA_USERNAME", "sam") + ) + + repos = scraper.get_user_repos() + print(f"Found {len(repos)} repositories") + + for repo in repos[:3]: # Test with first 3 + print(f"\nRepo: {repo.name}") + readme = scraper.get_readme(repo.name) + if readme: + print(f"README preview: {readme[:200]}...") + diff --git a/knowledge_service/knowledge_agent_plan.md b/knowledge_service/knowledge_agent_plan.md new file mode 100644 index 0000000..c4268b6 --- /dev/null +++ b/knowledge_service/knowledge_agent_plan.md @@ -0,0 +1,56 @@ +# GOAL + +Build a \"Deep Knowledge Agent\" (DKA) that acts as a secure, +quarantined bridge between the Chat Gateway and private data sources. + +# ARCHITECTURE OVERVIEW + +## Layers + +1. Public Gateway: FastAPI (The \"Voice\"). +2. Orchestration Layer: LangGraph Supervisor (The \"Router\"). +3. Quarantined Agent: DKA / Librarian (The \"Keeper of Secrets\"). + - Strictly Read-Only. + - Accesses ChromaDB and Media stores. +4. Specialist Agent: Opencode (The \"Engineer\"). + +## Data Sources (The \"Knowledge Mesh\") + +- [ ] **Code**: Gitea (Repos, Markdown docs). +- [ ] **Notes**: Trilium Next, Obsidian, Flatnotes, HedgeDoc. +- [ ] **Wiki**: DokuWiki. +- [ ] **Inventory**: HomeBox (Physical gear, photos). +- [ ] **Tasks**: Vikunja. +- [ ] **Media**: Immich (Photos/Videos metadata via Gemini Vision). + +## Agent Tooling & Orchestration + +- [ ] **Orchestrators**: CAO CLI, Agent Pipe. +- [ ] **External Agents**: Goose, Aider, Opencode (Specialist). + +# COMPONENT DETAILS + +## The Librarian (DKA - LangGraph) + +- Purpose: Semantic retrieval and data synthesis from vectors. +- Tools: + - `query_chroma`: Search the vector database. + - `fetch_media_link`: Returns a signed URL/path for Immich/HomeBox + images. +- Constraints: + - NO `bash` or `write` tools. + +## The Ingestion Pipeline (Airflow/Custom Python) + +- [ ] **Multi-Source Scrapers**: API-based (Gitea, Immich) and + File-based (Obsidian). +- [ ] **Vision Integration**: Gemini analyzes Immich photos to create + searchable text descriptions. +- [ ] **Storage**: ChromaDB (Vectors) + PostgreSQL (Metadata/Hashes). + +# [TODO]{.todo .TODO} LIST \[0/4\] {#list-04} + +- [ ] Create \'knowledge~service~\' directory. +- [ ] Implement `test_rag.py` (Hello World retrieval). +- [ ] Build basic scraper for `hobbies.org`. +- [ ] Integrate DKA logic into the FastAPI Gateway. diff --git a/knowledge_service/knowledge_agent_plan.org b/knowledge_service/knowledge_agent_plan.org new file mode 100644 index 0000000..5a0a21e --- /dev/null +++ b/knowledge_service/knowledge_agent_plan.org @@ -0,0 +1,47 @@ +#+TITLE: Phase 3: Knowledge Engine & Agent Orchestration +#+AUTHOR: Giordano (via opencode) +#+OPTIONS: toc:2 + +* GOAL +Build a "Deep Knowledge Agent" (DKA) that acts as a secure, quarantined bridge between the Chat Gateway and private data sources. + +* ARCHITECTURE OVERVIEW +** Layers +1. Public Gateway: FastAPI (The "Voice"). +2. Orchestration Layer: LangGraph Supervisor (The "Router"). +3. Quarantined Agent: DKA / Librarian (The "Keeper of Secrets"). + - Strictly Read-Only. + - Accesses ChromaDB and Media stores. +4. Specialist Agent: Opencode (The "Engineer"). + +** Data Sources (The "Knowledge Mesh") +- [ ] *Code*: Gitea (Repos, Markdown docs). +- [ ] *Notes*: Trilium Next, Obsidian, Flatnotes, HedgeDoc. +- [ ] *Wiki*: DokuWiki. +- [ ] *Inventory*: HomeBox (Physical gear, photos). +- [ ] *Tasks*: Vikunja. +- [ ] *Media*: Immich (Photos/Videos metadata via Gemini Vision). + +** Agent Tooling & Orchestration +- [ ] *Orchestrators*: CAO CLI, Agent Pipe. +- [ ] *External Agents*: Goose, Aider, Opencode (Specialist). + +* COMPONENT DETAILS +** The Librarian (DKA - LangGraph) +- Purpose: Semantic retrieval and data synthesis from vectors. +- Tools: + - ~query_chroma~: Search the vector database. + - ~fetch_media_link~: Returns a signed URL/path for Immich/HomeBox images. +- Constraints: + - NO ~bash~ or ~write~ tools. + +** The Ingestion Pipeline (Airflow/Custom Python) +- [ ] *Multi-Source Scrapers*: API-based (Gitea, Immich) and File-based (Obsidian). +- [ ] *Vision Integration*: Gemini analyzes Immich photos to create searchable text descriptions. +- [ ] *Storage*: ChromaDB (Vectors) + PostgreSQL (Metadata/Hashes). + +* TODO LIST [0/4] +- [ ] Create 'knowledge_service' directory. +- [ ] Implement ~test_rag.py~ (Hello World retrieval). +- [ ] Build basic scraper for ~hobbies.org~. +- [ ] Integrate DKA logic into the FastAPI Gateway. diff --git a/knowledge_service/main.py b/knowledge_service/main.py new file mode 100644 index 0000000..d199425 --- /dev/null +++ b/knowledge_service/main.py @@ -0,0 +1,52 @@ +from fastapi import FastAPI +from pydantic import BaseModel +from langchain_community.document_loaders import TextLoader +from langchain_openai import OpenAIEmbeddings +from langchain_community.vectorstores import Chroma +from langchain_text_splitters import RecursiveCharacterTextSplitter +import os +import logging +import sys + +logging.basicConfig(level=logging.INFO, stream=sys.stdout) +logger = logging.getLogger(__name__) + +app = FastAPI() +vector_db = None + +# Voyage-2 embeddings via OpenRouter API +embeddings = OpenAIEmbeddings( + model="openai/text-embedding-3-small", + openai_api_base="https://openrouter.ai/api/v1", + openai_api_key=os.getenv("OPENROUTER_API_KEY") +) + +@app.on_event("startup") +async def startup_event(): + global vector_db + data_path = "./data/hobbies.md" + if os.path.exists(data_path): + try: + loader = TextLoader(data_path) + documents = loader.load() + text_splitter = RecursiveCharacterTextSplitter(chunk_size=500, chunk_overlap=50) + chunks = text_splitter.split_documents(documents) + vector_db = Chroma.from_documents(documents=chunks, embedding=embeddings, persist_directory="./chroma_db") + logger.info("Librarian: ChromaDB is loaded with openAi embeddings.") + except Exception as e: + logger.error(f"Librarian: DB error: {str(e)}") + else: + logger.warning(f"Librarian: Missing data file at {data_path}") + +@app.get("/health") +async def health(): + return {"status": "ready", "vectors_loaded": vector_db is not None} + +class QueryRequest(BaseModel): + question: str + +@app.post("/query") +async def query_knowledge(request: QueryRequest): + if not vector_db: return {"context": ""} + results = vector_db.similarity_search(request.question, k=2) + return {"context": "\n".join([res.page_content for res in results])} diff --git a/knowledge_service/requirements.txt b/knowledge_service/requirements.txt new file mode 100644 index 0000000..dca3bac --- /dev/null +++ b/knowledge_service/requirements.txt @@ -0,0 +1,7 @@ +fastapi +uvicorn +langchain +langchain-community +langchain-openai +langchain-text-splitters +chromadb diff --git a/langgraph_service/Dockerfile b/langgraph_service/Dockerfile new file mode 100644 index 0000000..eccd9d6 --- /dev/null +++ b/langgraph_service/Dockerfile @@ -0,0 +1,22 @@ +FROM python:3.11-slim + +# Install system dependencies +RUN apt-get update && apt-get install -y \ + gcc \ + g++ \ + && rm -rf /var/lib/apt/lists/* + +# Create app directory +WORKDIR /app + +# Copy requirements +COPY requirements.txt . +RUN pip install --no-cache-dir -r requirements.txt + +# Copy code +COPY . . + +EXPOSE 8090 + +CMD ["python3", "-m", "uvicorn", "main:app", "--host", "0.0.0.0", "--port", "8090"] + diff --git a/langgraph_service/main.py b/langgraph_service/main.py new file mode 100644 index 0000000..c716726 --- /dev/null +++ b/langgraph_service/main.py @@ -0,0 +1,80 @@ +from fastapi import FastAPI +from fastapi.middleware.cors import CORSMiddleware +from pydantic import BaseModel +from supervisor_agent import process_query +import logging +import sys + +logging.basicConfig(level=logging.INFO, stream=sys.stdout) +logger = logging.getLogger(__name__) + +app = FastAPI(title="LangGraph Supervisor Service") + +app.add_middleware( + CORSMiddleware, + allow_origins=["*"], + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], +) + +class QueryRequest(BaseModel): + query: str + +class QueryResponse(BaseModel): + response: str + agent_used: str + context: dict + +@app.get("/health") +async def health(): + return {"status": "healthy", "service": "langgraph-supervisor"} + +@app.post("/query", response_model=QueryResponse) +async def query_supervisor(request: QueryRequest): + """Main entry point for agent orchestration.""" + logger.info(f"Received query: {request.query}") + + try: + result = await process_query(request.query) + + return QueryResponse( + response=result["response"], + agent_used=result["context"].get("source", "unknown"), + context=result["context"] + ) + except Exception as e: + logger.error(f"Error processing query: {e}") + return QueryResponse( + response="Error processing your request", + agent_used="error", + context={"error": str(e)} + ) + +@app.get("/agents") +async def list_agents(): + """List available specialist agents.""" + return { + "agents": [ + { + "name": "librarian", + "description": "Queries the knowledge base for semantic information", + "triggers": ["repo", "code", "git", "hobby", "about", "skill"] + }, + { + "name": "opencode", + "description": "Handles coding tasks and file modifications", + "triggers": ["write", "edit", "create", "fix", "implement"] + }, + { + "name": "brain", + "description": "General LLM for reasoning and generation", + "triggers": ["default", "general questions"] + } + ] + } + +if __name__ == "__main__": + import uvicorn + uvicorn.run(app, host="0.0.0.0", port=8090) + diff --git a/langgraph_service/requirements.txt b/langgraph_service/requirements.txt new file mode 100644 index 0000000..b72b5f0 --- /dev/null +++ b/langgraph_service/requirements.txt @@ -0,0 +1,9 @@ +fastapi +uvicorn +langgraph +langchain +langchain-community +langchain-openai +httpx +pydantic + diff --git a/langgraph_service/supervisor_agent.py b/langgraph_service/supervisor_agent.py new file mode 100644 index 0000000..e330660 --- /dev/null +++ b/langgraph_service/supervisor_agent.py @@ -0,0 +1,153 @@ +from typing import TypedDict, Annotated, Sequence +from langgraph.graph import StateGraph, END +from langchain_core.messages import BaseMessage, HumanMessage, AIMessage +import operator +import httpx +import os +import logging + +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger(__name__) + +# State definition +class AgentState(TypedDict): + messages: Annotated[Sequence[BaseMessage], operator.add] + next_agent: str + context: dict + +# Agent routing logic +def supervisor_node(state: AgentState): + """Supervisor decides which specialist agent to call.""" + last_message = state["messages"][-1].content.lower() + + # Simple routing logic based on keywords + if any(kw in last_message for kw in ["repo", "code", "git", "github", "gitea", "project", "development"]): + return {"next_agent": "librarian"} + elif any(kw in last_message for kw in ["write", "edit", "create", "fix", "bug", "implement", "code change"]): + return {"next_agent": "opencode"} + elif any(kw in last_message for kw in ["sam", "hobby", "music", "experience", "skill", "about"]): + return {"next_agent": "librarian"} + else: + return {"next_agent": "brain"} # Default to general LLM + +def librarian_agent(state: AgentState): + """Librarian agent - queries knowledge base (ChromaDB).""" + last_message = state["messages"][-1].content + + try: + # Call knowledge service + response = httpx.post( + "http://knowledge-service:8080/query", + json={"question": last_message}, + timeout=10.0 + ) + + if response.status_code == 200: + context = response.json().get("context", "") + return { + "messages": [AIMessage(content=f"Based on my knowledge base:\n\n{context}")], + "context": {"source": "librarian", "context": context} + } + except Exception as e: + logger.error(f"Librarian error: {e}") + + return { + "messages": [AIMessage(content="I couldn't find relevant information in the knowledge base.")], + "context": {"source": "librarian", "error": str(e)} + } + +def opencode_agent(state: AgentState): + """Opencode agent - handles coding tasks via MCP.""" + last_message = state["messages"][-1].content + + # Placeholder - would integrate with opencode-brain + return { + "messages": [AIMessage(content=f"I'm the coding agent. I would help you with: {last_message}")], + "context": {"source": "opencode", "action": "coding_task"} + } + +def brain_agent(state: AgentState): + """Brain agent - general LLM fallback.""" + last_message = state["messages"][-1].content + + try: + # Call opencode-brain service + auth = httpx.BasicAuth("opencode", os.getenv("OPENCODE_PASSWORD", "sam4jo")) + timeout_long = httpx.Timeout(180.0, connect=10.0) + + with httpx.AsyncClient(auth=auth, timeout=timeout_long) as client: + # Create session + session_res = client.post("http://opencode-brain:5000/session", json={"title": "Supervisor Query"}) + session_id = session_res.json()["id"] + + # Send message + response = client.post( + f"http://opencode-brain:5000/session/{session_id}/message", + json={"parts": [{"type": "text", "text": last_message}]} + ) + + data = response.json() + if "parts" in data: + for part in data["parts"]: + if part.get("type") == "text": + return { + "messages": [AIMessage(content=part["text"])], + "context": {"source": "brain"} + } + except Exception as e: + logger.error(f"Brain error: {e}") + + return { + "messages": [AIMessage(content="I'm thinking about this...")], + "context": {"source": "brain"} + } + +def route_decision(state: AgentState): + """Routing function based on supervisor decision.""" + return state["next_agent"] + +# Build the graph +workflow = StateGraph(AgentState) + +# Add nodes +workflow.add_node("supervisor", supervisor_node) +workflow.add_node("librarian", librarian_agent) +workflow.add_node("opencode", opencode_agent) +workflow.add_node("brain", brain_agent) + +# Add edges +workflow.set_entry_point("supervisor") + +# Conditional routing from supervisor +workflow.add_conditional_edges( + "supervisor", + route_decision, + { + "librarian": "librarian", + "opencode": "opencode", + "brain": "brain" + } +) + +# All specialist agents end +workflow.add_edge("librarian", END) +workflow.add_edge("opencode", END) +workflow.add_edge("brain", END) + +# Compile the graph +supervisor_graph = workflow.compile() + +# Main entry point for queries +async def process_query(query: str) -> dict: + """Process a query through the supervisor graph.""" + result = await supervisor_graph.ainvoke({ + "messages": [HumanMessage(content=query)], + "next_agent": "", + "context": {} + }) + + return { + "response": result["messages"][-1].content, + "context": result.get("context", {}) + } + diff --git a/plan.md b/plan.md new file mode 100644 index 0000000..ab282f9 --- /dev/null +++ b/plan.md @@ -0,0 +1,396 @@ +# Project Plan: aboutme_chat_demo + +## Goal +Build a comprehensive AI agent system that ingests data from self-hosted services (Gitea, notes, wikis), stores it in a vector database, and provides intelligent responses through a multi-agent orchestration layer. The system emphasizes modular containerized architecture, industry-standard tools, and employment-relevant skills. + +--- + +## Phase 1: Foundation & Core Infrastructure (COMPLETED) + +### Phase 1.1: Frontend Application +**Location:** `/home/sam/development/aboutme_chat_demo/frontend/` + +**Stack & Tools:** +- **Framework:** Vite 6.2.0 + React 19.0.0 + TypeScript +- **Styling:** Tailwind CSS 4.0.0 +- **State Management:** TanStack Query (React Query) 5.67.0 +- **Build Tool:** Vite with React plugin +- **Linting:** ESLint 9.21.0 + typescript-eslint 8.24.0 + +**Components Implemented:** +- `ChatInterface.tsx` - Auto-expanding text input with scrolling message list +- `App.tsx` - Main application container +- Real-time chat UI with message history +- HTTP client integration to backend gateway + +**Docker Configuration:** +- Hot-reload development setup +- Volume mounting for instant code changes +- Node modules isolation (`/app/node_modules`) + +### Phase 1.2: Chat Gateway (Orchestration Entry Point) +**Location:** `/home/sam/development/aboutme_chat_demo/backend/` + +**Stack & Tools:** +- **Framework:** FastAPI (Python 3.11) +- **HTTP Client:** httpx 0.28.1 +- **CORS:** Configured for all origins (development) + +**Architecture Changes:** +- **OLD:** Hardcoded keyword matching (`["sam", "hobby", "music", "guitar", "skiing", "experience"]`) to trigger knowledge lookup +- **NEW:** Thin routing layer - all queries passed to LangGraph Supervisor for intelligent agent selection +- Removed direct Brain (LLM) integration +- Removed direct Knowledge Service calls +- Now acts as stateless entry point to LangGraph orchestration layer + +**Endpoints:** +- `POST /chat` - Routes queries to LangGraph Supervisor +- `GET /health` - Service health check +- `GET /agents` - Lists available agents from LangGraph + +### Phase 1.3: Knowledge Service (Librarian Agent) +**Location:** `/home/sam/development/knowledge_service/` + +**Stack & Tools:** +- **Framework:** FastAPI + Uvicorn +- **Vector Database:** ChromaDB 1.5.1 +- **Embeddings:** OpenAI via OpenRouter API (text-embedding-3-small) +- **LLM Framework:** LangChain ecosystem + - langchain 1.2.10 + - langchain-community 0.4.1 + - langchain-core 1.2.15 + - langchain-text-splitters 1.1.1 + - langchain-openai +- **Document Processing:** RecursiveCharacterTextSplitter + +**Key Files:** +- `main.py` - FastAPI endpoints for /query and /health +- `gitea_scraper.py` - Gitea API integration module (NEW) +- `data/hobbies.md` - Sample knowledge base content +- `chroma_db/` - Persistent vector storage + +**Docker Architecture (Optimized):** +- **Pattern:** Separate `/app/packages` (cached) from `/app/code` (volume-mounted) +- **Benefits:** + - Code changes apply instantly without rebuild + - Package installation happens once during image build + - PYTHONPATH=/app/packages ensures imports work +- **Volumes:** + - `./data:/app/code/data` - Knowledge documents + - `./chroma_db:/app/code/chroma_db` - Vector database persistence + - `./main.py:/app/code/main.py:ro` - Read-only code mount + +### Phase 1.4: LangGraph Supervisor Service (NEW) +**Location:** `/home/sam/development/langgraph_service/` + +**Stack & Tools:** +- **Framework:** FastAPI + Uvicorn +- **Orchestration:** LangGraph 1.0.9 + - langgraph-checkpoint 4.0.0 + - langgraph-prebuilt 1.0.8 + - langgraph-sdk 0.3.9 +- **State Management:** TypedDict with Annotated operators +- **Message Types:** LangChain Core Messages (HumanMessage, AIMessage) + +**Architecture:** +- **Supervisor Node:** Analyzes queries and routes to specialist agents +- **Agent Graph:** StateGraph with conditional edges +- **Three Specialist Agents:** + 1. **Librarian Agent** - Queries ChromaDB via knowledge-service:8080 + 2. **Opencode Agent** - Placeholder for coding tasks (MCP integration ready) + 3. **Brain Agent** - Fallback to OpenCode Brain LLM (opencode-brain:5000) + +**Routing Logic:** +``` +Query → Supervisor → [Librarian | Opencode | Brain] +- "repo/code/git/project" → Librarian (RAG) +- "write/edit/create/fix" → Opencode (Coding) +- "sam/hobby/music/about" → Librarian (RAG) +- Default → Brain (General LLM) +``` + +**Docker Configuration:** +- Self-contained with own `/app/packages` +- No package sharing with other services (modular) +- Port 8090 exposed + +### Phase 1.5: Apache Airflow (Scheduled Ingestion) +**Location:** `/home/sam/development/airflow/` + +**Stack & Tools:** +- **Orchestration:** Apache Airflow 2.8.1 +- **Executor:** CeleryExecutor (distributed task processing) +- **Database:** PostgreSQL 13 (metadata) +- **Message Queue:** Redis (Celery broker) +- **Services:** + - airflow-webserver (UI + API) + - airflow-scheduler (DAG scheduling) + - airflow-worker (task execution) + - airflow-triggerer (deferrable operators) + +**DAG: gitea_daily_ingestion** +- **Schedule:** Daily +- **Tasks:** + 1. `fetch_repos` - Get all user repos from Gitea API + 2. `fetch_readmes` - Download README files + 3. `ingest_to_chroma` - Store in Knowledge Service + +**Integration:** +- Mounts `knowledge_service/gitea_scraper.py` into DAGs folder +- Environment variables for Gitea API token +- Network: ai-mesh (communicates with knowledge-service) + +### Phase 1.6: Gitea Scraper Module +**Location:** `/home/sam/development/knowledge_service/gitea_scraper.py` + +**Functionality:** +- **API Integration:** Gitea REST API v1 +- **Authentication:** Token-based (Authorization header) +- **Methods:** + - `get_user_repos()` - Paginated repo listing + - `get_readme(repo_name)` - README content with fallback names + - `get_repo_files(repo_name, path)` - Directory listing + - `get_file_content(repo_name, filepath)` - File download + +**Data Model:** +- `RepoMetadata` dataclass (name, description, url, branch, updated_at, language) + +### Phase 1.7: Docker Infrastructure + +**Network:** +- `ai-mesh` (external) - Shared bridge network for all services + +**Services Overview:** +| Service | Port | Purpose | Dependencies | +|---------|------|---------|--------------| +| frontend | 5173 | React UI | backend | +| backend | 8000 | Chat Gateway | langgraph-service, db | +| db | 5432 | PostgreSQL (chat history) | - | +| knowledge-service | 8080 | RAG / Vector DB | - | +| langgraph-service | 8090 | Agent Orchestration | knowledge-service | +| airflow-webserver | 8081 | Workflow UI | postgres, redis | +| airflow-scheduler | - | DAG scheduling | postgres, redis | +| airflow-worker | - | Task execution | postgres, redis | +| redis | 6379 | Message broker | - | +| postgres (airflow) | - | Airflow metadata | - | + +**Container Patterns:** +- All Python services use `/app/packages` + `/app/code` separation +- Node.js services use volume mounting for hot reload +- PostgreSQL uses named volumes for persistence +- External network (`ai-mesh`) for cross-service communication + +--- + +## Phase 2: Multi-Source Knowledge Ingestion (IN PROGRESS) + +### Goal +Expand beyond Gitea to ingest data from all self-hosted knowledge sources. + +### Data Sources to Integrate: +1. **Notes & Documentation** + - **Trilium Next** - Hierarchical note-taking (tree structure) + - **Obsidian** - Markdown vault with backlinks + - **Flatnotes** - Flat file markdown notes + - **HedgeDoc** - Collaborative markdown editor + +2. **Wiki** + - **DokuWiki** - Structured wiki content + +3. **Project Management** + - **Vikunja** - Task lists and project tracking + +4. **Media & Assets** + - **Immich** - Photo/video metadata + Gemini Vision API for content description + - **HomeBox** - Physical inventory with images + +### Technical Approach: +- **Crawling:** Selenium/Playwright for JavaScript-heavy UIs +- **Extraction:** Firecrawl or LangChain loaders for structured content +- **Vision:** Gemini Vision API for image-to-text conversion +- **Storage:** ChromaDB (vectors) + PostgreSQL (metadata, hashes for deduplication) +- **Scheduling:** Additional Airflow DAGs per source + +--- + +## Phase 3: Advanced Agent Capabilities + +### Goal +Integrate external AI tools and expand agent capabilities. + +### Agent Tooling: +1. **MCP (Model Context Protocol) Servers** + - Git MCP - Local repository operations + - Filesystem MCP - Secure file access + - Memory MCP - Knowledge graph persistence + - Custom Gitea MCP (if/when available) + +2. **External Agents** + - **Goose** - CLI-based agent for local task execution + - **Aider** - AI pair programming + - **Opencode** - Already integrated (Brain Agent) + - **Automaker** - Workflow automation + - **Autocoder** - Code generation + +3. **Orchestration Tools** + - **CAO CLI** - Agent orchestrator + - **Agent Pipe** - Pipeline management + +### Integration Pattern: +- Each external tool wrapped as LangGraph node +- Supervisor routes to appropriate specialist +- State management for multi-turn interactions + +--- + +## Phase 4: Production Hardening + +### Goal +Prepare system for production deployment. + +### Authentication & Security: +- **Laravel** - User authentication service (Phase 4 original plan) +- **JWT tokens** - Session management +- **API key management** - Secure credential storage +- **Network policies** - Inter-service communication restrictions + +### Monitoring & Observability: +- **LangSmith** - LLM tracing and debugging +- **Langfuse** - LLM observability (note: currently in per-project install list) +- **Prometheus/Grafana** - Metrics and dashboards +- **Airflow monitoring** - DAG success/failure alerting + +### Scaling: +- **ChromaDB** - Migration to server mode for concurrent access +- **Airflow** - Multiple Celery workers +- **Load balancing** - Nginx reverse proxy +- **Backup strategies** - Vector DB snapshots, PostgreSQL dumps + +--- + +## Phase 5: Workflow Automation & Visual Tools + +### Goal +Add visual prototyping and automation capabilities. + +### Tools to Integrate: +1. **Flowise** - Visual LangChain builder + - Prototype agent flows without coding + - Export to Python code + - Debug RAG pipelines visually + +2. **Windmill** - Turn scripts into workflows + - Schedule Python/LangChain scripts + - Reactive triggers (e.g., on-commit) + - Low-code workflow builder + +3. **Activepieces** - Event-driven automation + - Webhook triggers from Gitea + - Integration with external APIs + - Visual workflow designer + +4. **N8N** - Alternative workflow automation + - Consider if Activepieces doesn't meet needs + +### Use Cases: +- **On-commit triggers:** Gitea push → immediate re-scan → notification +- **Scheduled reports:** Weekly summary of new/updated projects +- **Reactive workflows:** New photo uploaded → Gemini Vision → update knowledge base + +--- + +## Phase 6: Knowledge Library Options & RAG Enhancement + +### Goal +Advanced retrieval and knowledge organization. + +### RAG Pipeline Improvements: +1. **Hybrid Search** + - Semantic search (ChromaDB) + Keyword search (PostgreSQL) + - Re-ranking with cross-encoders + - Query expansion and decomposition + +2. **Multi-Modal RAG** + - Image retrieval (Immich + CLIP embeddings) + - Document parsing (PDFs, code files) + - Structured data (tables, lists) + +3. **Knowledge Organization** + - Entity extraction and linking + - Knowledge graph construction + - Hierarchical chunking strategies + +### Alternative Vector Stores (Evaluation): +- **pgvector** - PostgreSQL native (if ChromaDB limitations hit) +- **Weaviate** - GraphQL interface, hybrid search +- **Qdrant** - Rust-based, high performance +- **Milvus** - Enterprise-grade, distributed + +--- + +## Phase 7: User Experience & Interface + +### Goal +Enhanced frontend and interaction patterns. + +### Frontend Enhancements: +1. **Chat Interface Improvements** + - Streaming responses (Server-Sent Events) + - Message threading and context + - File upload for document ingestion + - Image display (for Immich integration) + +2. **Knowledge Browser** + - View ingested documents + - Search knowledge base directly + - See confidence scores and sources + - Manual document upload/ingestion trigger + +3. **Agent Management** + - View active agents + - Configure agent behavior + - Monitor agent performance + - Override routing decisions + +### Mobile & Accessibility: +- Responsive design improvements +- Mobile app (React Native or PWA) +- Accessibility compliance (WCAG) + +--- + +## Technology Stack Summary + +### Core Frameworks: +- **Backend:** FastAPI (Python 3.11) +- **Frontend:** Vite + React 19 + TypeScript +- **Styling:** Tailwind CSS +- **Database:** PostgreSQL 15 +- **Vector DB:** ChromaDB 1.5.1 + +### AI/ML Stack: +- **LLM Orchestration:** LangGraph 1.0.9 + LangChain +- **Embeddings:** OpenAI via OpenRouter (text-embedding-3-small) +- **LLM:** OpenCode Brain (opencode-brain:5000) +- **Vision:** Gemini Vision API (Phase 2) + +### Workflow & Scheduling: +- **Orchestration:** Apache Airflow 2.8.1 (CeleryExecutor) +- **Message Queue:** Redis +- **External Tools:** Flowise, Windmill, Activepieces + +### Development Tools: +- **Containers:** Docker + Docker Compose +- **Networking:** Bridge network (ai-mesh) +- **Testing:** curl/httpx for API testing +- **Version Control:** Gitea (self-hosted) + +### Skills Demonstrated: +- Containerized microservices architecture +- Multi-agent AI orchestration (LangGraph) +- Vector database implementation (RAG) +- ETL pipeline development (Airflow) +- API integration and web scraping +- Modular, maintainable code organization +- Industry-standard AI tooling (LangChain ecosystem) +- Workflow automation and scheduling