extract parser.py and tool_runner.py from agent code
This commit is contained in:
@@ -4,42 +4,17 @@ Composes the three domain-scoped MCP servers into namespaced configurations
|
||||
that agents connect to as a single client.
|
||||
"""
|
||||
|
||||
import json
|
||||
import os
|
||||
from contextlib import asynccontextmanager
|
||||
from typing import Any
|
||||
|
||||
from fastmcp import Client
|
||||
|
||||
|
||||
def _env() -> dict:
|
||||
"""Forward LLM-related env vars and active scenario to MCP server subprocesses."""
|
||||
import os
|
||||
from mcp_servers.data.scenarios.manager import scenario_manager
|
||||
|
||||
env = {}
|
||||
for key in (
|
||||
"LLM_PROVIDER", "GROQ_API_KEY", "GROQ_MODEL",
|
||||
"ANTHROPIC_API_KEY", "ANTHROPIC_MODEL",
|
||||
"OPENAI_API_KEY", "OPENAI_BASE_URL", "OPENAI_MODEL",
|
||||
"AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY", "AWS_DEFAULT_REGION",
|
||||
"BEDROCK_MODEL_ID", "USE_BEDROCK",
|
||||
"PATH",
|
||||
):
|
||||
val = os.getenv(key)
|
||||
if val:
|
||||
env[key] = val
|
||||
env["ACTIVE_SCENARIO"] = scenario_manager.active_id
|
||||
return env
|
||||
|
||||
|
||||
def _server_config(module: str) -> dict:
|
||||
"""Build server config with current env vars (called at connect time, not import time)."""
|
||||
return {
|
||||
"command": "uv",
|
||||
"args": ["run", "python", "-m", module],
|
||||
"env": _env(),
|
||||
}
|
||||
|
||||
from agents.shared.parser import (
|
||||
parse_prompt_result,
|
||||
parse_resource_result,
|
||||
parse_tool_result,
|
||||
)
|
||||
|
||||
SERVER_MODULES = {
|
||||
"shared": "mcp_servers.shared",
|
||||
@@ -47,12 +22,33 @@ SERVER_MODULES = {
|
||||
"passenger": "mcp_servers.passenger",
|
||||
}
|
||||
|
||||
# Agent profiles — which servers each agent connects to
|
||||
AGENT_PROFILES = {
|
||||
"fce": ["shared", "ops", "passenger"],
|
||||
"handover": ["shared", "ops"],
|
||||
}
|
||||
|
||||
_FORWARDED_ENV_KEYS = (
|
||||
"LLM_PROVIDER", "GROQ_API_KEY", "GROQ_MODEL",
|
||||
"ANTHROPIC_API_KEY", "ANTHROPIC_MODEL",
|
||||
"OPENAI_API_KEY", "OPENAI_BASE_URL", "OPENAI_MODEL",
|
||||
"AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY", "AWS_DEFAULT_REGION",
|
||||
"BEDROCK_MODEL_ID", "USE_BEDROCK",
|
||||
"PATH",
|
||||
)
|
||||
|
||||
|
||||
def _subprocess_env() -> dict:
|
||||
"""Env vars forwarded to MCP server subprocesses (LLM config + active scenario)."""
|
||||
from mcp_servers.data.scenarios.manager import scenario_manager
|
||||
|
||||
env = {k: os.environ[k] for k in _FORWARDED_ENV_KEYS if k in os.environ}
|
||||
env["ACTIVE_SCENARIO"] = scenario_manager.active_id
|
||||
return env
|
||||
|
||||
|
||||
def _server_config(module: str) -> dict:
|
||||
return {"command": "uv", "args": ["run", "python", "-m", module], "env": _subprocess_env()}
|
||||
|
||||
|
||||
class MCPMultiClient:
|
||||
"""Manages connections to multiple MCP servers via fastmcp Client."""
|
||||
@@ -61,7 +57,6 @@ class MCPMultiClient:
|
||||
self._clients: dict[str, Client] = {}
|
||||
|
||||
async def connect(self, server_names: list[str]) -> None:
|
||||
"""Connect to the specified MCP servers."""
|
||||
for name in server_names:
|
||||
if name not in SERVER_MODULES:
|
||||
raise ValueError(f"Unknown server: {name}. Available: {list(SERVER_MODULES.keys())}")
|
||||
@@ -71,7 +66,6 @@ class MCPMultiClient:
|
||||
self._clients[name] = client
|
||||
|
||||
async def close(self) -> None:
|
||||
"""Close all server connections."""
|
||||
for client in self._clients.values():
|
||||
try:
|
||||
await client.__aexit__(None, None, None)
|
||||
@@ -79,71 +73,23 @@ class MCPMultiClient:
|
||||
pass
|
||||
self._clients.clear()
|
||||
|
||||
async def call_tool(self, server: str, tool_name: str, arguments: dict) -> Any:
|
||||
"""Call a tool on a specific server. Returns parsed result."""
|
||||
def _client(self, server: str) -> Client:
|
||||
client = self._clients.get(server)
|
||||
if not client:
|
||||
raise ValueError(f"Not connected to server: {server}")
|
||||
return client
|
||||
|
||||
result = await client.call_tool(tool_name, arguments)
|
||||
|
||||
# Parse the result content
|
||||
if isinstance(result, list):
|
||||
texts = [c.text for c in result if hasattr(c, "text")]
|
||||
elif hasattr(result, "content"):
|
||||
texts = [c.text for c in result.content if hasattr(c, "text")]
|
||||
else:
|
||||
return result
|
||||
|
||||
if len(texts) == 1:
|
||||
try:
|
||||
return json.loads(texts[0])
|
||||
except (json.JSONDecodeError, TypeError):
|
||||
return texts[0]
|
||||
elif len(texts) > 1:
|
||||
parsed = []
|
||||
for t in texts:
|
||||
try:
|
||||
parsed.append(json.loads(t))
|
||||
except (json.JSONDecodeError, TypeError):
|
||||
parsed.append(t)
|
||||
return parsed
|
||||
return None
|
||||
async def call_tool(self, server: str, tool_name: str, arguments: dict) -> Any:
|
||||
result = await self._client(server).call_tool(tool_name, arguments)
|
||||
return parse_tool_result(result)
|
||||
|
||||
async def read_resource(self, server: str, uri: str) -> Any:
|
||||
"""Read a resource from a specific server."""
|
||||
client = self._clients.get(server)
|
||||
if not client:
|
||||
raise ValueError(f"Not connected to server: {server}")
|
||||
|
||||
result = await client.read_resource(uri)
|
||||
if isinstance(result, str):
|
||||
try:
|
||||
return json.loads(result)
|
||||
except (json.JSONDecodeError, TypeError):
|
||||
return result
|
||||
return result
|
||||
result = await self._client(server).read_resource(uri)
|
||||
return parse_resource_result(result)
|
||||
|
||||
async def get_prompt(self, server: str, prompt_name: str, arguments: dict) -> str:
|
||||
"""Get a rendered prompt from a specific server."""
|
||||
client = self._clients.get(server)
|
||||
if not client:
|
||||
raise ValueError(f"Not connected to server: {server}")
|
||||
|
||||
result = await client.get_prompt(prompt_name, arguments)
|
||||
if isinstance(result, str):
|
||||
return result
|
||||
# Handle structured prompt response
|
||||
texts = []
|
||||
if hasattr(result, "messages"):
|
||||
for msg in result.messages:
|
||||
if hasattr(msg.content, "text"):
|
||||
texts.append(msg.content.text)
|
||||
elif isinstance(msg.content, list):
|
||||
for c in msg.content:
|
||||
if hasattr(c, "text"):
|
||||
texts.append(c.text)
|
||||
return "\n".join(texts) if texts else str(result)
|
||||
result = await self._client(server).get_prompt(prompt_name, arguments)
|
||||
return parse_prompt_result(result)
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
|
||||
Reference in New Issue
Block a user