Add .gitignore, remove __pycache__ from tracking, and update MCP/orchestrator modules
- Add .gitignore to exclude .env, __pycache__, node_modules, and IDE files - Remove all __pycache__ bytecode files from version control - Add MCP config files (mcp.json, mcp.json.example) - Add MCP manager, registry, and config modules - Update routes, orchestrator engine, and agent base with latest changes Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
63
src/main.py
63
src/main.py
@@ -1,7 +1,10 @@
|
||||
"""Agentic Microservice — FastAPI application entry point.
|
||||
|
||||
Wires together all components: Redis storage, model adapters, MCP client,
|
||||
Wires together all components: Redis storage, model adapters, MCP registry,
|
||||
context engine, orchestrator, and SSE streaming.
|
||||
|
||||
MCP servers are per-session: the global mcp.json defines WHAT servers
|
||||
to run, and each session provides project-specific env vars.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
@@ -20,7 +23,7 @@ from .adapters.openai_adapter import OpenAIAdapter
|
||||
from .api.routes import router, set_dependencies
|
||||
from .config import settings
|
||||
from .context.engine import ContextEngine
|
||||
from .mcp.client import MCPClient
|
||||
from .mcp.registry import MCPRegistry
|
||||
from .memory.store import MemoryStore
|
||||
from .orchestrator.engine import OrchestratorEngine
|
||||
from .storage.redis import RedisStorage
|
||||
@@ -34,8 +37,8 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
# Global instances (initialized in lifespan)
|
||||
redis_storage = RedisStorage()
|
||||
mcp_client = MCPClient()
|
||||
sse_emitter = SSEEmitter(redis_storage=redis_storage)
|
||||
mcp_registry = MCPRegistry()
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
@@ -45,11 +48,9 @@ async def lifespan(app: FastAPI):
|
||||
|
||||
# 1. Connect Redis
|
||||
await redis_storage.connect()
|
||||
|
||||
# Wire SSE emitter to Redis for event persistence (re-set after connect)
|
||||
sse_emitter.set_storage(redis_storage)
|
||||
|
||||
# 2. Initialize model adapter (based on configured provider)
|
||||
# 2. Initialize model adapter
|
||||
if settings.default_model_provider == "openai":
|
||||
model_adapter = OpenAIAdapter()
|
||||
logger.info("Using OpenAI adapter (model: %s)", settings.default_model_id)
|
||||
@@ -57,36 +58,37 @@ async def lifespan(app: FastAPI):
|
||||
model_adapter = ClaudeAdapter()
|
||||
logger.info("Using Claude adapter (model: %s)", settings.default_model_id)
|
||||
|
||||
# 3. Initialize memory store (uses same Redis connection)
|
||||
# 3. Initialize memory store
|
||||
memory_store = MemoryStore(redis_storage.client)
|
||||
|
||||
# 4. Initialize context engine (with memory store for knowledge base)
|
||||
# 4. Initialize context engine
|
||||
context_engine = ContextEngine(memory_store=memory_store)
|
||||
|
||||
# 5. Start MCP client (if configured)
|
||||
if settings.mcp_server_command:
|
||||
try:
|
||||
await mcp_client.start()
|
||||
logger.info("MCP client started with %d tools", len(mcp_client.tools))
|
||||
except Exception as e:
|
||||
logger.warning("MCP client failed to start: %s — continuing without MCP", e)
|
||||
# 5. Load MCP config template (servers are started per-session)
|
||||
if settings.mcp_config_path:
|
||||
config_path = pathlib.Path(settings.mcp_config_path)
|
||||
if not config_path.is_absolute():
|
||||
config_path = pathlib.Path(__file__).resolve().parent.parent / settings.mcp_config_path
|
||||
mcp_registry._config_path = config_path
|
||||
elif settings.mcp_server_command:
|
||||
# Legacy: create a synthetic config from env vars
|
||||
from .mcp.config import MCPConfigFile, MCPServerConfig
|
||||
mcp_registry._config = MCPConfigFile(mcpServers={
|
||||
"default": MCPServerConfig(
|
||||
command=settings.mcp_server_command,
|
||||
args=list(settings.mcp_server_args),
|
||||
)
|
||||
})
|
||||
mcp_registry.load_config()
|
||||
|
||||
# 6. Initialize orchestrator
|
||||
orchestrator = OrchestratorEngine(
|
||||
model_adapter=model_adapter,
|
||||
context_engine=context_engine,
|
||||
mcp_client=mcp_client,
|
||||
memory_store=memory_store,
|
||||
sse_emitter=sse_emitter,
|
||||
)
|
||||
|
||||
# 7. Wire dependencies into API routes
|
||||
# 6. Wire dependencies (orchestrator is created per-message with session's MCP)
|
||||
set_dependencies(
|
||||
storage=redis_storage,
|
||||
orchestrator=orchestrator,
|
||||
sse_emitter=sse_emitter,
|
||||
model_adapter=model_adapter,
|
||||
context_engine=context_engine,
|
||||
memory_store=memory_store,
|
||||
sse_emitter=sse_emitter,
|
||||
mcp_registry=mcp_registry,
|
||||
)
|
||||
|
||||
logger.info("All systems initialized. Serving on %s:%d", settings.host, settings.port)
|
||||
@@ -95,7 +97,7 @@ async def lifespan(app: FastAPI):
|
||||
|
||||
# Shutdown
|
||||
logger.info("Shutting down...")
|
||||
await mcp_client.stop()
|
||||
await mcp_registry.stop_all()
|
||||
await redis_storage.disconnect()
|
||||
logger.info("Shutdown complete.")
|
||||
|
||||
@@ -106,7 +108,6 @@ app = FastAPI(
|
||||
lifespan=lifespan,
|
||||
)
|
||||
|
||||
# CORS
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_origins=["*"],
|
||||
@@ -115,23 +116,19 @@ app.add_middleware(
|
||||
allow_headers=["*"],
|
||||
)
|
||||
|
||||
# Mount API routes
|
||||
app.include_router(router, prefix="/api/v1")
|
||||
|
||||
|
||||
# Health check
|
||||
@app.get("/health")
|
||||
async def health() -> dict[str, str]:
|
||||
return {"status": "ok", "service": settings.service_name}
|
||||
|
||||
|
||||
# Root redirect
|
||||
@app.get("/")
|
||||
async def root():
|
||||
return RedirectResponse(url="/dashboard/")
|
||||
|
||||
|
||||
# Dashboard static files (mounted AFTER API routes)
|
||||
_dashboard_dir = pathlib.Path(__file__).resolve().parent.parent / "dashboard"
|
||||
if _dashboard_dir.is_dir():
|
||||
app.mount("/dashboard", StaticFiles(directory=str(_dashboard_dir), html=True), name="dashboard")
|
||||
|
||||
Reference in New Issue
Block a user