This commit is contained in:
2026-04-03 01:24:37 -03:00
parent cae9312db1
commit db3b94a6a1
5 changed files with 34 additions and 6 deletions

View File

@@ -34,6 +34,7 @@ class SessionContext:
mentioned_frames: list[FrameRef] = field(default_factory=list)
transcript_segments: list[TranscriptRef] = field(default_factory=list)
mentioned_transcripts: list[TranscriptRef] = field(default_factory=list)
history: list[tuple[str, str]] = field(default_factory=list) # [(role, text), ...]
class AgentProvider(ABC):

View File

@@ -62,6 +62,12 @@ def _build_prompt(message: str, context: SessionContext) -> str:
tm2, ts2 = divmod(int(t.end), 60)
lines.append(f" {t.id} [{tm1:02d}:{ts1:02d}-{tm2:02d}:{ts2:02d}] {t.text}")
if context.history:
lines.append("\nConversation history:")
for role, text in context.history:
prefix = "User" if role == "user" else "Assistant"
lines.append(f" {prefix}: {text}")
lines.append(f"\nUser message: {message}")
return "\n".join(lines)

View File

@@ -118,12 +118,14 @@ class OpenAICompatProvider(AgentProvider):
except Exception as e:
log.warning("Could not encode frame %s: %s", frame.id, e)
messages = [{"role": "system", "content": SYSTEM_PROMPT}]
for role, text in context.history:
messages.append({"role": role, "content": text})
messages.append({"role": "user", "content": content})
stream = client.chat.completions.create(
model=self._model,
messages=[
{"role": "system", "content": SYSTEM_PROMPT},
{"role": "user", "content": content},
],
messages=messages,
stream=True,
)
for chunk in stream:

View File

@@ -146,6 +146,8 @@ class AgentRunner:
def __init__(self):
self._provider: AgentProvider | None = None
self._history: list[tuple[str, str]] = [] # (role, text)
self.include_history = False # toggled by UI
def _get_provider(self) -> AgentProvider:
if self._provider is None:
@@ -178,6 +180,9 @@ class AgentRunner:
def model(self, value: str):
self._get_provider().model = value
def clear_history(self):
self._history.clear()
def send(
self,
message: str,
@@ -205,9 +210,14 @@ class AgentRunner:
mentioned_frames=mentioned_frames,
transcript_segments=transcript,
mentioned_transcripts=mentioned_transcripts,
history=list(self._history) if self.include_history else [],
)
self._history.append(("user", message))
response_chunks = []
for chunk in provider.stream(message, context):
response_chunks.append(chunk)
on_chunk(chunk)
self._history.append(("assistant", "".join(response_chunks)))
on_done(None)
except Exception as e:
log.error("Agent error: %s", e)