phase 9
This commit is contained in:
36
detect/providers/base.py
Normal file
36
detect/providers/base.py
Normal file
@@ -0,0 +1,36 @@
|
||||
"""Cloud LLM provider protocol and model metadata."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from typing import Protocol
|
||||
|
||||
|
||||
@dataclass
|
||||
class ModelInfo:
|
||||
"""Metadata for a cloud LLM model."""
|
||||
id: str
|
||||
vision: bool = True
|
||||
cost_per_input_token: float = 0.0
|
||||
cost_per_output_token: float = 0.0
|
||||
max_output_tokens: int = 4096
|
||||
notes: str = ""
|
||||
|
||||
|
||||
@dataclass
|
||||
class ProviderResponse:
|
||||
answer: str
|
||||
total_tokens: int = 0
|
||||
|
||||
|
||||
class CloudProvider(Protocol):
|
||||
"""
|
||||
Interface for cloud LLM providers.
|
||||
|
||||
Each provider handles its own auth, payload format, and response parsing.
|
||||
The pipeline only calls call() and reads the response.
|
||||
"""
|
||||
name: str
|
||||
models: dict[str, ModelInfo]
|
||||
|
||||
def call(self, image_b64: str, prompt: str) -> ProviderResponse: ...
|
||||
Reference in New Issue
Block a user