first claude draft

This commit is contained in:
buenosairesam
2025-12-29 14:40:06 -03:00
commit 116d4032e2
69 changed files with 5020 additions and 0 deletions

34
shared/events/__init__.py Normal file
View File

@@ -0,0 +1,34 @@
"""
Event publishing/subscribing abstraction layer.
Supports:
- Redis Pub/Sub (default, simple)
- Redis Streams (with consumer groups, persistence)
- Kafka (future, for high-throughput)
Usage:
from shared.events import get_publisher, get_subscriber
# Publishing
async with get_publisher() as pub:
await pub.publish("metrics.raw", {"machine_id": "m1", ...})
# Subscribing
async with get_subscriber(["metrics.raw", "alerts.*"]) as sub:
async for topic, message in sub.consume():
process(topic, message)
"""
from .base import EventPublisher, EventSubscriber, Event
from .redis_pubsub import RedisPubSubPublisher, RedisPubSubSubscriber
from .factory import get_publisher, get_subscriber
__all__ = [
"EventPublisher",
"EventSubscriber",
"Event",
"RedisPubSubPublisher",
"RedisPubSubSubscriber",
"get_publisher",
"get_subscriber",
]

117
shared/events/base.py Normal file
View File

@@ -0,0 +1,117 @@
"""Abstract base classes for event publishing and subscribing."""
from abc import ABC, abstractmethod
from dataclasses import dataclass, field
from datetime import datetime
from typing import Any, AsyncIterator
import uuid
@dataclass
class Event:
"""Standard event envelope."""
topic: str
payload: dict[str, Any]
event_id: str = field(default_factory=lambda: str(uuid.uuid4()))
timestamp: datetime = field(default_factory=datetime.utcnow)
source: str = ""
def to_dict(self) -> dict[str, Any]:
return {
"event_id": self.event_id,
"topic": self.topic,
"timestamp": self.timestamp.isoformat(),
"source": self.source,
"payload": self.payload,
}
@classmethod
def from_dict(cls, data: dict[str, Any]) -> "Event":
return cls(
event_id=data.get("event_id", str(uuid.uuid4())),
topic=data["topic"],
timestamp=datetime.fromisoformat(data["timestamp"]) if "timestamp" in data else datetime.utcnow(),
source=data.get("source", ""),
payload=data.get("payload", {}),
)
class EventPublisher(ABC):
"""Abstract base for event publishers."""
@abstractmethod
async def connect(self) -> None:
"""Establish connection to the message broker."""
pass
@abstractmethod
async def disconnect(self) -> None:
"""Close connection to the message broker."""
pass
@abstractmethod
async def publish(self, topic: str, payload: dict[str, Any], **kwargs) -> str:
"""
Publish an event to a topic.
Args:
topic: The topic/channel to publish to
payload: The event data
**kwargs: Additional options (e.g., headers, partition key)
Returns:
The event ID
"""
pass
async def publish_event(self, event: Event) -> str:
"""Publish a pre-constructed Event object."""
return await self.publish(event.topic, event.payload, event_id=event.event_id)
async def __aenter__(self) -> "EventPublisher":
await self.connect()
return self
async def __aexit__(self, exc_type, exc_val, exc_tb) -> None:
await self.disconnect()
class EventSubscriber(ABC):
"""Abstract base for event subscribers."""
@abstractmethod
async def connect(self) -> None:
"""Establish connection to the message broker."""
pass
@abstractmethod
async def disconnect(self) -> None:
"""Close connection and unsubscribe."""
pass
@abstractmethod
async def subscribe(self, topics: list[str]) -> None:
"""
Subscribe to one or more topics.
Args:
topics: List of topics/patterns to subscribe to
"""
pass
@abstractmethod
async def consume(self) -> AsyncIterator[Event]:
"""
Async generator that yields events as they arrive.
Yields:
Event objects
"""
pass
async def __aenter__(self) -> "EventSubscriber":
await self.connect()
return self
async def __aexit__(self, exc_type, exc_val, exc_tb) -> None:
await self.disconnect()

101
shared/events/factory.py Normal file
View File

@@ -0,0 +1,101 @@
"""Factory functions for creating event publishers and subscribers."""
import os
from enum import Enum
from .base import EventPublisher, EventSubscriber
from .redis_pubsub import RedisPubSubPublisher, RedisPubSubSubscriber
class EventBackend(str, Enum):
"""Supported event backends."""
REDIS_PUBSUB = "redis_pubsub"
REDIS_STREAMS = "redis_streams" # Future
KAFKA = "kafka" # Future
def get_publisher(
backend: EventBackend | str | None = None,
source: str = "",
**kwargs,
) -> EventPublisher:
"""
Factory function to get an event publisher.
Args:
backend: The event backend to use (default: from EVENTS_BACKEND env var or redis_pubsub)
source: Identifier for the source service
**kwargs: Backend-specific options
Returns:
An EventPublisher instance
Environment variables:
EVENTS_BACKEND: Default backend (redis_pubsub, redis_streams, kafka)
REDIS_URL: Redis connection URL
KAFKA_BOOTSTRAP_SERVERS: Kafka bootstrap servers (future)
"""
if backend is None:
backend = os.getenv("EVENTS_BACKEND", EventBackend.REDIS_PUBSUB)
if isinstance(backend, str):
backend = EventBackend(backend)
if backend == EventBackend.REDIS_PUBSUB:
redis_url = kwargs.get("redis_url") or os.getenv(
"REDIS_URL", "redis://localhost:6379"
)
return RedisPubSubPublisher(redis_url=redis_url, source=source)
elif backend == EventBackend.REDIS_STREAMS:
raise NotImplementedError("Redis Streams backend not yet implemented")
elif backend == EventBackend.KAFKA:
raise NotImplementedError("Kafka backend not yet implemented")
else:
raise ValueError(f"Unknown event backend: {backend}")
def get_subscriber(
topics: list[str] | None = None,
backend: EventBackend | str | None = None,
**kwargs,
) -> EventSubscriber:
"""
Factory function to get an event subscriber.
Args:
topics: Topics to subscribe to
backend: The event backend to use (default: from EVENTS_BACKEND env var or redis_pubsub)
**kwargs: Backend-specific options
Returns:
An EventSubscriber instance
Environment variables:
EVENTS_BACKEND: Default backend (redis_pubsub, redis_streams, kafka)
REDIS_URL: Redis connection URL
KAFKA_BOOTSTRAP_SERVERS: Kafka bootstrap servers (future)
"""
if backend is None:
backend = os.getenv("EVENTS_BACKEND", EventBackend.REDIS_PUBSUB)
if isinstance(backend, str):
backend = EventBackend(backend)
if backend == EventBackend.REDIS_PUBSUB:
redis_url = kwargs.get("redis_url") or os.getenv(
"REDIS_URL", "redis://localhost:6379"
)
return RedisPubSubSubscriber(redis_url=redis_url, topics=topics)
elif backend == EventBackend.REDIS_STREAMS:
raise NotImplementedError("Redis Streams backend not yet implemented")
elif backend == EventBackend.KAFKA:
raise NotImplementedError("Kafka backend not yet implemented")
else:
raise ValueError(f"Unknown event backend: {backend}")

View File

@@ -0,0 +1,142 @@
"""Redis Pub/Sub implementation of event publishing/subscribing."""
import asyncio
import json
import logging
from typing import Any, AsyncIterator
import redis.asyncio as redis
from .base import Event, EventPublisher, EventSubscriber
logger = logging.getLogger(__name__)
class RedisPubSubPublisher(EventPublisher):
"""Redis Pub/Sub based event publisher."""
def __init__(
self,
redis_url: str = "redis://localhost:6379",
source: str = "",
):
self.redis_url = redis_url
self.source = source
self._client: redis.Redis | None = None
async def connect(self) -> None:
self._client = redis.from_url(self.redis_url, decode_responses=True)
await self._client.ping()
logger.info(f"Connected to Redis at {self.redis_url}")
async def disconnect(self) -> None:
if self._client:
await self._client.close()
self._client = None
logger.info("Disconnected from Redis")
async def publish(self, topic: str, payload: dict[str, Any], **kwargs) -> str:
if not self._client:
raise RuntimeError("Publisher not connected")
event = Event(
topic=topic,
payload=payload,
event_id=kwargs.get("event_id", None)
or Event(topic="", payload={}).event_id,
source=self.source,
)
message = json.dumps(event.to_dict())
await self._client.publish(topic, message)
logger.debug(f"Published event {event.event_id} to {topic}")
return event.event_id
class RedisPubSubSubscriber(EventSubscriber):
"""Redis Pub/Sub based event subscriber."""
def __init__(
self,
redis_url: str = "redis://localhost:6379",
topics: list[str] | None = None,
):
self.redis_url = redis_url
self._topics = topics or []
self._client: redis.Redis | None = None
self._pubsub: redis.client.PubSub | None = None
self._running = False
async def connect(self) -> None:
self._client = redis.from_url(self.redis_url, decode_responses=True)
await self._client.ping()
self._pubsub = self._client.pubsub()
logger.info(f"Connected to Redis at {self.redis_url}")
if self._topics:
await self.subscribe(self._topics)
async def disconnect(self) -> None:
self._running = False
if self._pubsub:
await self._pubsub.unsubscribe()
await self._pubsub.close()
self._pubsub = None
if self._client:
await self._client.close()
self._client = None
logger.info("Disconnected from Redis")
async def subscribe(self, topics: list[str]) -> None:
if not self._pubsub:
raise RuntimeError("Subscriber not connected")
# Separate pattern subscriptions from regular ones
patterns = [t for t in topics if "*" in t]
channels = [t for t in topics if "*" not in t]
if channels:
await self._pubsub.subscribe(*channels)
logger.info(f"Subscribed to channels: {channels}")
if patterns:
await self._pubsub.psubscribe(*patterns)
logger.info(f"Subscribed to patterns: {patterns}")
self._topics.extend(topics)
async def consume(self) -> AsyncIterator[Event]:
if not self._pubsub:
raise RuntimeError("Subscriber not connected")
self._running = True
while self._running:
try:
message = await self._pubsub.get_message(
ignore_subscribe_messages=True,
timeout=1.0,
)
if message is None:
await asyncio.sleep(0.01)
continue
if message["type"] not in ("message", "pmessage"):
continue
try:
data = json.loads(message["data"])
event = Event.from_dict(data)
yield event
except (json.JSONDecodeError, KeyError) as e:
logger.warning(f"Failed to parse event: {e}")
continue
except asyncio.CancelledError:
self._running = False
break
except Exception as e:
logger.error(f"Error consuming events: {e}")
await asyncio.sleep(1.0)