1.1 changes

This commit is contained in:
buenosairesam
2025-12-29 14:17:53 -03:00
parent 11fde0636f
commit c5546cf7fc
58 changed files with 1048 additions and 496 deletions

117
cfg/amar/.env.example Normal file
View File

@@ -0,0 +1,117 @@
# =============================================================================
# AMAR Room - Configuration Template
# =============================================================================
# Copy this to .env and fill in values for your environment.
# Uses absolute paths to plug soleprint to existing Dockerfiles.
# =============================================================================
# =============================================================================
# DEPLOYMENT
# =============================================================================
# Container name prefix (e.g., amar_backend, amar_db)
DEPLOYMENT_NAME=amar
# =============================================================================
# NETWORK
# =============================================================================
NETWORK_NAME=amar_network
# =============================================================================
# PATHS (absolute paths to external code/dockerfiles - REQUIRED)
# =============================================================================
BACKEND_PATH=/abs/path/to/amar_django_back
FRONTEND_PATH=/abs/path/to/amar_frontend
DOCKERFILE_BACKEND=/abs/path/to/Dockerfile.backend
DOCKERFILE_FRONTEND=/abs/path/to/Dockerfile.frontend
# =============================================================================
# DATABASE
# =============================================================================
# Database dump file (relative to dumps/ directory)
DB_DUMP=test.sql
# =============================================================================
# PORTS
# =============================================================================
BACKEND_PORT=8000
FRONTEND_PORT=3000
# Soleprint ports
SOLEPRINT_PORT=12000
ARTERY_PORT=12001
ATLAS_PORT=12002
STATION_PORT=12003
# =============================================================================
# BACKEND SERVER (Uvicorn)
# =============================================================================
# Dev: 1 worker with hot reload
# Prod: 4 workers without reload
BACKEND_WORKERS=1
BACKEND_RELOAD=--reload
# Database connection
POSTGRES_DB=amarback
POSTGRES_USER=postgres
POSTGRES_PASSWORD=your_secure_password_here
# =============================================================================
# DJANGO
# =============================================================================
SECRET_KEY=your_django_secret_key_here
DEBUG=False
ALLOWED_HOSTS=amar.room.mcrn.ar,localhost
DJANGO_ENV=production
# =============================================================================
# CORS
# =============================================================================
CORS_ALLOW_ALL=false
CORS_ALLOWED_ORIGINS=
# =============================================================================
# GOOGLE SERVICES
# =============================================================================
SUBJECT_CALENDAR=
SHEET_ID=
RANGE_NAME=
GOOGLE_MAPS_API_KEY=
# =============================================================================
# ANALYTICS
# =============================================================================
GA4_MEASUREMENT_ID=
AMPLITUDE_API_KEY=
HOTJAR_API_KEY=
# =============================================================================
# MERCADO PAGO
# =============================================================================
ACCESS_TOKEN_MERCADO_PAGO=
MP_PLATFORM_ACCESS_TOKEN=
USER_ID=
# =============================================================================
# WEB PUSH
# =============================================================================
WEBPUSH_VAPID_PUBLIC_KEY=
WEBPUSH_VAPID_PRIVATE_KEY=
WEBPUSH_VAPID_ADMIN_EMAIL=
# =============================================================================
# INIT
# =============================================================================
USER_PASSWORD=initial_admin_password
# =============================================================================
# FRONTEND
# =============================================================================
NEXT_PUBLIC_APP_API_URL_BACKOFFICE=
NEXT_PUBLIC_APP_API_URL_STAGE=
NEXT_PUBLIC_IS_STAGE=false
NEXT_PUBLIC_FB_PIXEL_ID=
NEXT_PUBLIC_TAG_MANAGER=
NEXT_PUBLIC_WHATSAPP_CONTACT=
NEXT_PUBLIC_API_KEY=
NEXT_PUBLIC_AMPLITUDE_API_KEY=
NEXT_PUBLIC_GMAPS_API_KEY=

202
cfg/amar/docker-compose.yml Normal file
View File

@@ -0,0 +1,202 @@
# Amar Room - Docker Compose
#
# Creates: db, redis, backend, celery, celery-beat, frontend
# Network: Joins external network defined by NETWORK_NAME
#
# Usage:
# cd cfg/amar && docker compose up -d
#
# Required Environment Variables (from .env):
# - DEPLOYMENT_NAME: Prefix for container names
# - NETWORK_NAME: Network to join
# - BACKEND_PATH, FRONTEND_PATH: Source code paths (absolute)
# - DOCKERFILE_BACKEND, DOCKERFILE_FRONTEND: Dockerfile paths (absolute)
# - DB_DUMP: Database dump file (relative to dumps/)
# - Plus all app-specific vars (POSTGRES_*, etc.)
services:
db:
image: postgis/postgis:15-3.4
container_name: ${DEPLOYMENT_NAME}_db
environment:
POSTGRES_DB: ${POSTGRES_DB}
POSTGRES_USER: ${POSTGRES_USER}
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD}
volumes:
- pgdata:/var/lib/postgresql/data
# Database dump loaded on init (if volume is empty)
- ./dumps/${DB_DUMP}:/docker-entrypoint-initdb.d/dump.sql:ro
healthcheck:
test: ["CMD-SHELL", "pg_isready -U ${POSTGRES_USER} -d ${POSTGRES_DB}"]
interval: 10s
timeout: 5s
retries: 5
networks:
- default
redis:
image: redis:7-alpine
container_name: ${DEPLOYMENT_NAME}_redis
volumes:
- redisdata:/data
healthcheck:
test: ["CMD", "redis-cli", "ping"]
interval: 10s
timeout: 5s
retries: 5
networks:
- default
backend:
build:
context: ${BACKEND_PATH}
dockerfile: ${DOCKERFILE_BACKEND}
target: runtime
container_name: ${DEPLOYMENT_NAME}_backend
environment:
- SECRET_KEY
- DEBUG
- ALLOWED_HOSTS
- DJANGO_ENV
- DB_NAME=${POSTGRES_DB}
- DB_USER=${POSTGRES_USER}
- DB_PASSWORD=${POSTGRES_PASSWORD}
- DB_HOST=db
- DB_PORT=5432
- CELERY_BROKER_URL=redis://redis:6379/0
- CELERY_RESULT_BACKEND=redis://redis:6379/0
- CORS_ALLOW_ALL
- CORS_ALLOWED_ORIGINS
# Uvicorn config (dev: 1 worker + reload, prod: 4 workers)
- WORKERS=${BACKEND_WORKERS:-1}
- RELOAD=${BACKEND_RELOAD:---reload}
# Google
- SUBJECT_CALENDAR
- SHEET_ID
- RANGE_NAME
- GOOGLE_MAPS_API_KEY
# Analytics
- GA4_MEASUREMENT_ID
- AMPLITUDE_API_KEY
- HOTJAR_API_KEY
# Payments
- ACCESS_TOKEN_MERCADO_PAGO
- MP_PLATFORM_ACCESS_TOKEN
- USER_ID
# Push
- WEBPUSH_VAPID_PUBLIC_KEY
- WEBPUSH_VAPID_PRIVATE_KEY
- WEBPUSH_VAPID_ADMIN_EMAIL
# Init
- USER_PASSWORD
volumes:
- ${BACKEND_PATH}:/app
- backend_static:/var/etc/static
- backend_media:/app/media
ports:
- "${BACKEND_PORT}:8000"
depends_on:
db:
condition: service_healthy
redis:
condition: service_healthy
networks:
- default
command: >
sh -c "python manage.py migrate --noinput &&
uvicorn amar_django_back.asgi:application --host 0.0.0.0 --port 8000 --workers $${WORKERS:-1} $${RELOAD}"
celery:
build:
context: ${BACKEND_PATH}
dockerfile: ${DOCKERFILE_BACKEND}
target: runtime
container_name: ${DEPLOYMENT_NAME}_celery
environment:
- SECRET_KEY
- DEBUG
- DJANGO_ENV
- DB_NAME=${POSTGRES_DB}
- DB_USER=${POSTGRES_USER}
- DB_PASSWORD=${POSTGRES_PASSWORD}
- DB_HOST=db
- DB_PORT=5432
- CELERY_BROKER_URL=redis://redis:6379/0
- CELERY_RESULT_BACKEND=redis://redis:6379/0
volumes:
- ${BACKEND_PATH}:/app
- backend_media:/app/media
depends_on:
db:
condition: service_healthy
redis:
condition: service_healthy
networks:
- default
command: celery -A amar_django_back worker -l INFO
celery-beat:
build:
context: ${BACKEND_PATH}
dockerfile: ${DOCKERFILE_BACKEND}
target: runtime
container_name: ${DEPLOYMENT_NAME}_celery_beat
environment:
- SECRET_KEY
- DEBUG
- DJANGO_ENV
- DB_NAME=${POSTGRES_DB}
- DB_USER=${POSTGRES_USER}
- DB_PASSWORD=${POSTGRES_PASSWORD}
- DB_HOST=db
- DB_PORT=5432
- CELERY_BROKER_URL=redis://redis:6379/0
- CELERY_RESULT_BACKEND=redis://redis:6379/0
volumes:
- ${BACKEND_PATH}:/app
depends_on:
db:
condition: service_healthy
redis:
condition: service_healthy
networks:
- default
command: celery -A amar_django_back beat -l INFO
frontend:
build:
context: ${FRONTEND_PATH}
dockerfile: ${DOCKERFILE_FRONTEND}
target: development
container_name: ${DEPLOYMENT_NAME}_frontend
environment:
- NEXT_PUBLIC_APP_API_URL_BACKOFFICE
- NEXT_PUBLIC_APP_API_URL_STAGE
- NEXT_PUBLIC_IS_STAGE
- NEXT_PUBLIC_FB_PIXEL_ID
- NEXT_PUBLIC_TAG_MANAGER
- NEXT_PUBLIC_WHATSAPP_CONTACT
- NEXT_PUBLIC_API_KEY
- NEXT_PUBLIC_AMPLITUDE_API_KEY
- NEXT_PUBLIC_GMAPS_API_KEY
volumes:
- ${FRONTEND_PATH}/src:/app/src
- ${FRONTEND_PATH}/public:/app/public
ports:
- "${FRONTEND_PORT}:3000"
depends_on:
- backend
networks:
- default
# CMD from Dockerfile.frontend development stage: npm run dev
volumes:
pgdata:
redisdata:
backend_static:
backend_media:
networks:
default:
external: true
name: ${NETWORK_NAME}

13
cfg/amar/link/Dockerfile Normal file
View File

@@ -0,0 +1,13 @@
FROM python:3.11-slim
WORKDIR /app
# Install dependencies
COPY requirements.txt .
RUN pip install --no-cache-dir -r requirements.txt
# Copy application
COPY . .
# Run
CMD ["python", "main.py"]

120
cfg/amar/link/README.md Normal file
View File

@@ -0,0 +1,120 @@
# Link Room - Adapter Layer
Provides framework-agnostic data navigation between managed apps (AMAR) and soleprint.
## Status: Initial Implementation ✅
**Working:**
- ✅ FastAPI service with adapter pattern
- ✅ BaseAdapter interface for pluggable frameworks
- ✅ DjangoAdapter with AMAR database queries
- ✅ Docker build and container starts
-`/health` endpoint (adapter loads successfully)
-`/api/queries` endpoint (lists available queries)
**Pending:**
- ⏳ Database connection (needs DB_HOST env var fix)
- ⏳ Complete all entity queries (Pet, Vet, ServiceRequest, etc.)
- ⏳ Ward integration (consume JSON and render graph)
## Architecture
```
Managed App (AMAR) ←─── link_room ───→ Soleprint (Ward)
↓ ↓ ↓
Database Adapter Layer Graph Renderer
(SQL → JSON) (JSON → SVG)
```
**JSON Contract:**
```json
{
"nodes": [
{"id": "User_123", "type": "User", "label": "john", "data": {...}}
],
"edges": [
{"from": "User_123", "to": "PetOwner_456", "label": "has profile"}
],
"summary": {
"title": "User #123",
"credentials": "john | Password: Amar2025!",
"fields": {"Email": "john@example.com"}
}
}
```
## Endpoints
- `GET /health` - Health check with adapter status
- `GET /api/queries` - List available predefined queries
- `GET /api/navigate?query=user_with_pets` - Query mode
- `GET /api/navigate?entity=User&id=123` - Entity navigation mode
## Available Queries
1. `user_with_pets` - User with Pet ownership
2. `user_with_requests` - User with ServiceRequests
## Usage
```bash
# Start (from core_room/ctrl)
./start.sh link_room -d --build
# Test
curl http://localhost:8100/health
curl http://localhost:8100/api/queries
curl "http://localhost:8100/api/navigate?query=user_with_pets"
# Logs
docker logs core_room_link_room
```
## Environment Variables
From `core_room/.env`:
- `NEST_NAME` - Container naming
- `NETWORK_NAME` - Docker network
- `DB_HOST` - Database host (needs fix: should point to db container)
- `DB_PORT` - Database port
- `DB_NAME` - Database name
- `DB_USER` - Database user
- `DB_PASSWORD` - Database password
- `ADAPTER_TYPE` - Adapter to use (default: django)
## Next Steps
1. **Fix DB connection** - Set correct DB_HOST in core_room/.env
2. **Complete queries** - Add remaining entity types
3. **Ward integration** - Create ward consumer for JSON
4. **Add graphviz rendering** - Move from data_browse reference
5. **Test end-to-end** - Query → JSON → SVG → Display
## Files
```
link_room/
├── README.md # This file
├── main.py # FastAPI app with endpoints
├── requirements.txt # Python dependencies
├── Dockerfile # Container build
├── docker-compose.yml # Service definition
└── adapters/
├── __init__.py # BaseAdapter interface
└── django.py # DjangoAdapter implementation
```
## Design Goals
**Framework-agnostic** - Works with Django, Rails, Express, etc.
**Decoupled** - Managed app owns data, link_room translates
**Pluggable** - Adapters for different frameworks
**Authenticated** - Ready for remote deployment
**Incremental** - Build and test each piece
## Reference
Previous approach (databrowse direct DB) saved in:
- Branch: `ref/databrowse-direct-db` (ward repo)
- Problem: Tight coupling, won't work remote
- Solution: This adapter pattern

View File

@@ -0,0 +1,43 @@
"""
Adapters for different managed app frameworks.
"""
from typing import Dict, List, Any, Optional
from abc import ABC, abstractmethod
class BaseAdapter(ABC):
"""Base adapter interface."""
def __init__(self, config: Dict[str, Any]):
"""
Initialize adapter with configuration.
Args:
config: Database connection or API endpoint configuration
"""
self.config = config
@abstractmethod
def navigate(
self,
query: Optional[str] = None,
entity: Optional[str] = None,
id: Optional[int] = None
) -> Dict[str, Any]:
"""
Navigate data graph.
Returns:
{
"nodes": [{"id": str, "type": str, "label": str, "data": dict}],
"edges": [{"from": str, "to": str, "label": str}],
"summary": {"title": str, "credentials": str|None, "fields": dict}
}
"""
pass
@abstractmethod
def get_queries(self) -> List[str]:
"""Return list of available query names."""
pass

View File

@@ -0,0 +1,235 @@
"""
Django adapter for AMAR.
Queries AMAR's PostgreSQL database directly.
"""
from typing import Dict, List, Any, Optional
from sqlalchemy import create_engine, text
from . import BaseAdapter
class DjangoAdapter(BaseAdapter):
"""Adapter for Django/AMAR."""
def __init__(self, config: Dict[str, Any]):
super().__init__(config)
self.engine = self._create_engine()
def _create_engine(self):
"""Create SQLAlchemy engine from config."""
db_url = (
f"postgresql://{self.config['user']}:{self.config['password']}"
f"@{self.config['host']}:{self.config['port']}/{self.config['name']}"
)
return create_engine(db_url, pool_pre_ping=True)
def _execute(self, sql: str) -> List[Dict[str, Any]]:
"""Execute SQL and return results as list of dicts."""
with self.engine.connect() as conn:
result = conn.execute(text(sql))
rows = result.fetchall()
columns = result.keys()
return [dict(zip(columns, row)) for row in rows]
def get_queries(self) -> List[str]:
"""Available predefined queries."""
return [
"user_with_pets",
"user_with_requests",
]
def navigate(
self,
query: Optional[str] = None,
entity: Optional[str] = None,
id: Optional[int] = None
) -> Dict[str, Any]:
"""Navigate data graph."""
if query:
return self._query_mode(query)
elif entity and id:
return self._entity_mode(entity, id)
else:
raise ValueError("Must provide either query or entity+id")
def _query_mode(self, query_name: str) -> Dict[str, Any]:
"""Execute predefined query."""
if query_name == "user_with_pets":
sql = """
SELECT
u.id as user_id, u.username, u.email,
po.id as petowner_id, po.first_name, po.last_name, po.phone,
p.id as pet_id, p.name as pet_name, p.pet_type, p.age
FROM auth_user u
JOIN mascotas_petowner po ON po.user_id = u.id
JOIN mascotas_pet p ON p.owner_id = po.id
WHERE p.deleted = false
LIMIT 1
"""
elif query_name == "user_with_requests":
sql = """
SELECT
u.id as user_id, u.username, u.email,
po.id as petowner_id, po.first_name, po.last_name,
sr.id as request_id, sr.state, sr.created_at
FROM auth_user u
JOIN mascotas_petowner po ON po.user_id = u.id
JOIN solicitudes_servicerequest sr ON sr.petowner_id = po.id
WHERE sr.deleted = false
ORDER BY sr.created_at DESC
LIMIT 1
"""
else:
raise ValueError(f"Unknown query: {query_name}")
rows = self._execute(sql)
if not rows:
return self._empty_response()
return self._rows_to_graph(rows[0])
def _entity_mode(self, entity: str, id: int) -> Dict[str, Any]:
"""Navigate to specific entity."""
if entity == "User":
sql = f"""
SELECT
u.id as user_id, u.username, u.email,
po.id as petowner_id, po.first_name, po.last_name, po.phone
FROM auth_user u
LEFT JOIN mascotas_petowner po ON po.user_id = u.id
WHERE u.id = {id}
"""
else:
raise ValueError(f"Unknown entity: {entity}")
rows = self._execute(sql)
if not rows:
return self._empty_response()
return self._rows_to_graph(rows[0])
def _rows_to_graph(self, row: Dict[str, Any]) -> Dict[str, Any]:
"""Convert SQL row to graph structure."""
nodes = []
edges = []
# User node
if "user_id" in row and row["user_id"]:
nodes.append({
"id": f"User_{row['user_id']}",
"type": "User",
"label": row.get("username") or row.get("email", ""),
"data": {
"id": row["user_id"],
"username": row.get("username"),
"email": row.get("email"),
}
})
# PetOwner node
if "petowner_id" in row and row["petowner_id"]:
name = f"{row.get('first_name', '')} {row.get('last_name', '')}".strip()
nodes.append({
"id": f"PetOwner_{row['petowner_id']}",
"type": "PetOwner",
"label": name or "PetOwner",
"data": {
"id": row["petowner_id"],
"first_name": row.get("first_name"),
"last_name": row.get("last_name"),
"phone": row.get("phone"),
}
})
if "user_id" in row and row["user_id"]:
edges.append({
"from": f"User_{row['user_id']}",
"to": f"PetOwner_{row['petowner_id']}",
"label": "has profile"
})
# Pet node
if "pet_id" in row and row["pet_id"]:
nodes.append({
"id": f"Pet_{row['pet_id']}",
"type": "Pet",
"label": row.get("pet_name", "Pet"),
"data": {
"id": row["pet_id"],
"name": row.get("pet_name"),
"pet_type": row.get("pet_type"),
"age": row.get("age"),
}
})
if "petowner_id" in row and row["petowner_id"]:
edges.append({
"from": f"PetOwner_{row['petowner_id']}",
"to": f"Pet_{row['pet_id']}",
"label": "owns"
})
# ServiceRequest node
if "request_id" in row and row["request_id"]:
nodes.append({
"id": f"ServiceRequest_{row['request_id']}",
"type": "ServiceRequest",
"label": f"Request #{row['request_id']}",
"data": {
"id": row["request_id"],
"state": row.get("state"),
"created_at": str(row.get("created_at", "")),
}
})
if "petowner_id" in row and row["petowner_id"]:
edges.append({
"from": f"PetOwner_{row['petowner_id']}",
"to": f"ServiceRequest_{row['request_id']}",
"label": "requested"
})
# Build summary from first User node
summary = self._build_summary(nodes)
return {
"nodes": nodes,
"edges": edges,
"summary": summary
}
def _build_summary(self, nodes: List[Dict]) -> Dict[str, Any]:
"""Build summary from nodes."""
# Find User node
user_node = next((n for n in nodes if n["type"] == "User"), None)
if user_node:
data = user_node["data"]
return {
"title": f"User #{data['id']}",
"credentials": f"{data.get('username', 'N/A')} | Password: Amar2025!",
"fields": {
"Email": data.get("email", "N/A"),
"Username": data.get("username", "N/A"),
}
}
# Fallback
return {
"title": "No data",
"credentials": None,
"fields": {}
}
def _empty_response(self) -> Dict[str, Any]:
"""Return empty response structure."""
return {
"nodes": [],
"edges": [],
"summary": {
"title": "No data found",
"credentials": None,
"fields": {}
}
}

View File

@@ -0,0 +1,25 @@
services:
link_nest:
build:
context: .
dockerfile: Dockerfile
container_name: ${NEST_NAME}_link_nest
ports:
- "8100:8000"
environment:
- PORT=8000
- ADAPTER_TYPE=${ADAPTER_TYPE:-django}
- DB_HOST=${DB_HOST}
- DB_PORT=${DB_PORT}
- DB_NAME=${DB_NAME}
- DB_USER=${DB_USER}
- DB_PASSWORD=${DB_PASSWORD}
volumes:
- ./:/app
networks:
- default
networks:
default:
external: true
name: ${NETWORK_NAME}

105
cfg/amar/link/main.py Normal file
View File

@@ -0,0 +1,105 @@
"""
Link Room - Adapter layer between managed apps and soleprint.
Exposes standardized JSON endpoints for data navigation.
Framework-agnostic via pluggable adapters.
"""
import os
from typing import Optional
from fastapi import FastAPI, HTTPException
app = FastAPI(title="Link Room", version="0.1.0")
# Lazy-loaded adapter instance
_adapter = None
def get_adapter():
"""Get or create adapter instance."""
global _adapter
if _adapter is None:
adapter_type = os.getenv("ADAPTER_TYPE", "django")
# Database config from environment
db_config = {
"host": os.getenv("DB_HOST", "localhost"),
"port": int(os.getenv("DB_PORT", "5432")),
"name": os.getenv("DB_NAME", "amarback"),
"user": os.getenv("DB_USER", "postgres"),
"password": os.getenv("DB_PASSWORD", ""),
}
if adapter_type == "django":
from adapters.django import DjangoAdapter
_adapter = DjangoAdapter(db_config)
else:
raise ValueError(f"Unknown adapter type: {adapter_type}")
return _adapter
@app.get("/health")
def health():
"""Health check."""
adapter_type = os.getenv("ADAPTER_TYPE", "django")
# Test adapter connection
adapter_ok = False
try:
adapter = get_adapter()
adapter_ok = True
except Exception as e:
print(f"Adapter error: {e}")
return {
"status": "ok" if adapter_ok else "degraded",
"service": "link-room",
"adapter": adapter_type,
"adapter_loaded": adapter_ok,
}
@app.get("/api/queries")
def list_queries():
"""List available predefined queries."""
adapter = get_adapter()
return {
"queries": adapter.get_queries()
}
@app.get("/api/navigate")
def navigate(query: Optional[str] = None, entity: Optional[str] = None, id: Optional[int] = None):
"""
Navigate data graph.
Query mode: ?query=user_with_pets
Navigation mode: ?entity=User&id=123
Returns:
{
"nodes": [...],
"edges": [...],
"summary": {...}
}
"""
try:
adapter = get_adapter()
result = adapter.navigate(query=query, entity=entity, id=id)
return result
except ValueError as e:
raise HTTPException(status_code=400, detail=str(e))
except Exception as e:
print(f"Navigate error: {e}")
raise HTTPException(status_code=500, detail=str(e))
if __name__ == "__main__":
import uvicorn
uvicorn.run(
"main:app",
host="0.0.0.0",
port=int(os.getenv("PORT", "8000")),
reload=True,
)

View File

@@ -0,0 +1,4 @@
fastapi
uvicorn[standard]
psycopg2-binary
sqlalchemy