wire llms, ui tweaks
This commit is contained in:
72
api/main.py
72
api/main.py
@@ -165,6 +165,78 @@ async def set_active_scenario(req: ScenarioUpdate):
|
||||
return {"error": str(e)}
|
||||
|
||||
|
||||
# ── LLM config routes ──
|
||||
|
||||
@app.get("/config/llm")
|
||||
async def get_llm_config():
|
||||
"""Current LLM provider configuration."""
|
||||
import os
|
||||
provider = os.getenv("LLM_PROVIDER", "groq")
|
||||
return {
|
||||
"provider": provider,
|
||||
"providers": {
|
||||
"groq": {
|
||||
"configured": bool(os.getenv("GROQ_API_KEY")),
|
||||
"model": os.getenv("GROQ_MODEL", "llama-3.3-70b-versatile"),
|
||||
},
|
||||
"anthropic": {
|
||||
"configured": bool(os.getenv("ANTHROPIC_API_KEY")),
|
||||
"model": os.getenv("ANTHROPIC_MODEL", "claude-sonnet-4-20250514"),
|
||||
},
|
||||
"bedrock": {
|
||||
"configured": bool(os.getenv("AWS_ACCESS_KEY_ID")),
|
||||
"model": os.getenv("BEDROCK_MODEL_ID", "anthropic.claude-sonnet-4-20250514-v1:0"),
|
||||
},
|
||||
"openai": {
|
||||
"configured": bool(os.getenv("OPENAI_API_KEY")),
|
||||
"model": os.getenv("OPENAI_MODEL", "gpt-4o"),
|
||||
"base_url": os.getenv("OPENAI_BASE_URL", ""),
|
||||
},
|
||||
"template": {
|
||||
"configured": True,
|
||||
"model": "none (structured fallback)",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
class LLMConfigUpdate(BaseModel):
|
||||
provider: str
|
||||
api_key: str | None = None
|
||||
model: str | None = None
|
||||
base_url: str | None = None
|
||||
|
||||
|
||||
@app.put("/config/llm")
|
||||
async def set_llm_config(req: LLMConfigUpdate):
|
||||
"""Switch LLM provider at runtime. Sets env vars for MCP subprocesses."""
|
||||
import os
|
||||
|
||||
os.environ["LLM_PROVIDER"] = req.provider
|
||||
|
||||
if req.provider == "groq" and req.api_key:
|
||||
os.environ["GROQ_API_KEY"] = req.api_key
|
||||
if req.model:
|
||||
os.environ["GROQ_MODEL"] = req.model
|
||||
elif req.provider == "anthropic" and req.api_key:
|
||||
os.environ["ANTHROPIC_API_KEY"] = req.api_key
|
||||
if req.model:
|
||||
os.environ["ANTHROPIC_MODEL"] = req.model
|
||||
elif req.provider == "openai" and req.api_key:
|
||||
os.environ["OPENAI_API_KEY"] = req.api_key
|
||||
if req.model:
|
||||
os.environ["OPENAI_MODEL"] = req.model
|
||||
if req.base_url:
|
||||
os.environ["OPENAI_BASE_URL"] = req.base_url
|
||||
elif req.provider == "bedrock":
|
||||
if req.model:
|
||||
os.environ["BEDROCK_MODEL_ID"] = req.model
|
||||
|
||||
# No need to update server configs — _server_config() reads env at connect time
|
||||
|
||||
return await get_llm_config()
|
||||
|
||||
|
||||
# ── Scenario data routes ──
|
||||
|
||||
@app.get("/scenarios/data/flights")
|
||||
|
||||
Reference in New Issue
Block a user