migrated all pawprint work
This commit is contained in:
4
artery/veins/jira/.env.example
Normal file
4
artery/veins/jira/.env.example
Normal file
@@ -0,0 +1,4 @@
|
||||
JIRA_URL=https://yourcompany.atlassian.net
|
||||
JIRA_EMAIL=your.email@company.com
|
||||
JIRA_API_TOKEN=your_api_token
|
||||
API_PORT=8001
|
||||
37
artery/veins/jira/README.md
Normal file
37
artery/veins/jira/README.md
Normal file
@@ -0,0 +1,37 @@
|
||||
# Jira Vein
|
||||
|
||||
Jira connector for Pawprint Artery.
|
||||
|
||||
## Authentication
|
||||
|
||||
Two ways to provide Jira credentials:
|
||||
|
||||
### 1. Web UI (Headers)
|
||||
Enter credentials in the web form at https://artery.mcrn.ar
|
||||
- Credentials sent as `X-Jira-Email` and `X-Jira-Token` headers
|
||||
- Use for demos, testing, or when credentials change frequently
|
||||
|
||||
### 2. Local .env file (Fallback)
|
||||
Create `.env` (not committed to git):
|
||||
```bash
|
||||
cp .env.example .env
|
||||
# Edit .env with your credentials
|
||||
```
|
||||
|
||||
The system tries headers first, then falls back to `.env`.
|
||||
|
||||
## Getting a Jira API Token
|
||||
|
||||
1. Go to https://id.atlassian.com/manage-profile/security/api-tokens
|
||||
2. Click "Create API token"
|
||||
3. Copy the token (starts with `ATATT3x`)
|
||||
4. Use in UI or add to `.env`
|
||||
|
||||
## Endpoints
|
||||
|
||||
- `GET /jira/health` - Connection test
|
||||
- `GET /jira/mine` - My assigned tickets
|
||||
- `GET /jira/ticket/{key}` - Ticket details
|
||||
- `POST /jira/search` - Raw JQL search
|
||||
|
||||
Add `?text=true` for LLM-friendly output.
|
||||
1
artery/veins/jira/__init__.py
Normal file
1
artery/veins/jira/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
# Jira Vein
|
||||
0
artery/veins/jira/api/__init__.py
Normal file
0
artery/veins/jira/api/__init__.py
Normal file
299
artery/veins/jira/api/routes.py
Normal file
299
artery/veins/jira/api/routes.py
Normal file
@@ -0,0 +1,299 @@
|
||||
"""
|
||||
API routes for Jira vein.
|
||||
"""
|
||||
|
||||
import base64
|
||||
import logging
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query
|
||||
from fastapi.responses import PlainTextResponse, StreamingResponse
|
||||
from typing import Optional, Union
|
||||
from io import BytesIO
|
||||
|
||||
from ..core.auth import get_jira_credentials, JiraCredentials
|
||||
from ..core.client import connect_jira, JiraClientError
|
||||
from ..core.config import settings
|
||||
from ..core.query import JQL, Queries
|
||||
from ..models.ticket import Ticket, TicketDetail, TicketList
|
||||
from ..models.formatter import format_ticket_list, format_ticket_detail
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
def _download_attachments(jira, ticket: TicketDetail) -> TicketDetail:
|
||||
"""Download attachment content and populate base64 field."""
|
||||
for att in ticket.attachments:
|
||||
try:
|
||||
response = jira._session.get(att.url)
|
||||
if response.status_code == 200:
|
||||
att.content_base64 = base64.b64encode(response.content).decode("utf-8")
|
||||
except Exception:
|
||||
pass # Skip failed downloads
|
||||
return ticket
|
||||
|
||||
|
||||
def _search(creds: JiraCredentials, jql: JQL, page: int, page_size: int) -> TicketList:
|
||||
jira = connect_jira(creds.email, creds.token)
|
||||
start = (page - 1) * page_size
|
||||
issues = jira.search_issues(jql.build(), startAt=start, maxResults=page_size)
|
||||
tickets = [Ticket.from_jira(i, settings.jira_url) for i in issues]
|
||||
return TicketList(tickets=tickets, total=issues.total, page=page, page_size=page_size)
|
||||
|
||||
|
||||
def _maybe_text(data: Union[TicketList, TicketDetail], text: bool):
|
||||
if not text:
|
||||
return data
|
||||
if isinstance(data, TicketList):
|
||||
return PlainTextResponse(format_ticket_list(data))
|
||||
return PlainTextResponse(format_ticket_detail(data))
|
||||
|
||||
|
||||
@router.get("/health")
|
||||
def health(creds: JiraCredentials = Depends(get_jira_credentials)):
|
||||
try:
|
||||
jira = connect_jira(creds.email, creds.token)
|
||||
me = jira.myself()
|
||||
return {"status": "ok", "user": me["displayName"]}
|
||||
except Exception as e:
|
||||
raise HTTPException(500, str(e))
|
||||
|
||||
|
||||
@router.get("/mine")
|
||||
def my_tickets(
|
||||
creds: JiraCredentials = Depends(get_jira_credentials),
|
||||
page: int = Query(1, ge=1),
|
||||
page_size: int = Query(50, ge=1, le=100),
|
||||
project: Optional[str] = None,
|
||||
text: bool = False,
|
||||
):
|
||||
"""Get my assigned open tickets."""
|
||||
try:
|
||||
return _maybe_text(_search(creds, Queries.my_tickets(project), page, page_size), text)
|
||||
except Exception as e:
|
||||
raise HTTPException(500, str(e))
|
||||
|
||||
|
||||
@router.get("/backlog")
|
||||
def backlog(
|
||||
creds: JiraCredentials = Depends(get_jira_credentials),
|
||||
project: str = Query(...),
|
||||
page: int = Query(1, ge=1),
|
||||
page_size: int = Query(50, ge=1, le=100),
|
||||
text: bool = False,
|
||||
):
|
||||
"""Get backlog tickets for a project."""
|
||||
try:
|
||||
return _maybe_text(_search(creds, Queries.backlog(project), page, page_size), text)
|
||||
except Exception as e:
|
||||
raise HTTPException(500, str(e))
|
||||
|
||||
|
||||
@router.get("/sprint")
|
||||
def current_sprint(
|
||||
creds: JiraCredentials = Depends(get_jira_credentials),
|
||||
project: str = Query(...),
|
||||
page: int = Query(1, ge=1),
|
||||
page_size: int = Query(50, ge=1, le=100),
|
||||
text: bool = False,
|
||||
):
|
||||
"""Get current sprint tickets for a project."""
|
||||
try:
|
||||
return _maybe_text(_search(creds, Queries.current_sprint(project), page, page_size), text)
|
||||
except Exception as e:
|
||||
raise HTTPException(500, str(e))
|
||||
|
||||
|
||||
@router.get("/ticket/{key}")
|
||||
def get_ticket(
|
||||
key: str,
|
||||
creds: JiraCredentials = Depends(get_jira_credentials),
|
||||
text: bool = False,
|
||||
include_attachments: bool = False,
|
||||
include_children: bool = True,
|
||||
):
|
||||
"""Get ticket details with comments, attachments, and child work items."""
|
||||
try:
|
||||
jira = connect_jira(creds.email, creds.token)
|
||||
issue = jira.issue(key, expand="comments")
|
||||
ticket = TicketDetail.from_jira(issue, settings.jira_url)
|
||||
if include_attachments and ticket.attachments:
|
||||
ticket = _download_attachments(jira, ticket)
|
||||
|
||||
# Fetch child work items if requested and ticket has subtasks
|
||||
children = []
|
||||
if include_children and ticket.subtasks:
|
||||
# Fetch all children in one query
|
||||
child_jql = f"key in ({','.join(ticket.subtasks)})"
|
||||
child_issues = jira.search_issues(child_jql, maxResults=len(ticket.subtasks))
|
||||
children = [Ticket.from_jira(i, settings.jira_url) for i in child_issues]
|
||||
# Sort children by key
|
||||
children.sort(key=lambda t: t.key)
|
||||
|
||||
# Return as special format that includes children
|
||||
if text:
|
||||
from ..models.formatter import format_ticket_with_children
|
||||
return PlainTextResponse(format_ticket_with_children(ticket, children))
|
||||
|
||||
# For JSON, add children to response
|
||||
result = ticket.model_dump()
|
||||
result["children"] = [c.model_dump() for c in children]
|
||||
return result
|
||||
|
||||
except Exception as e:
|
||||
# Return the actual Jira error for debugging
|
||||
raise HTTPException(404, f"Error fetching {key}: {str(e)}")
|
||||
|
||||
|
||||
@router.post("/search")
|
||||
def search(
|
||||
jql: str,
|
||||
creds: JiraCredentials = Depends(get_jira_credentials),
|
||||
page: int = Query(1, ge=1),
|
||||
page_size: int = Query(50, ge=1, le=100),
|
||||
text: bool = False,
|
||||
):
|
||||
"""Search with raw JQL."""
|
||||
try:
|
||||
return _maybe_text(_search(creds, JQL().raw(jql), page, page_size), text)
|
||||
except Exception as e:
|
||||
raise HTTPException(500, str(e))
|
||||
|
||||
|
||||
@router.post("/epic/{key}/process")
|
||||
def process_epic(
|
||||
key: str,
|
||||
creds: JiraCredentials = Depends(get_jira_credentials),
|
||||
):
|
||||
"""Process epic: fetch epic and all children, save to files."""
|
||||
import time
|
||||
import json
|
||||
from pathlib import Path
|
||||
from fastapi.responses import StreamingResponse
|
||||
|
||||
logger.info(f"EPIC endpoint called: key={key}, email={creds.email}")
|
||||
|
||||
def generate():
|
||||
try:
|
||||
logger.info(f"Starting EPIC process for {key}")
|
||||
jira = connect_jira(creds.email, creds.token)
|
||||
logger.info(f"Connected to Jira for {key}")
|
||||
|
||||
# Fetch epic
|
||||
yield json.dumps({"status": "fetching_epic", "completed": 0, "total": 0}) + "\n"
|
||||
logger.info(f"Sent fetching_epic status for {key}")
|
||||
time.sleep(0.5)
|
||||
|
||||
logger.info(f"Fetching issue {key}")
|
||||
epic_issue = jira.issue(key, expand="comments")
|
||||
logger.info(f"Got issue {key}")
|
||||
epic = TicketDetail.from_jira(epic_issue, settings.jira_url)
|
||||
logger.info(f"Parsed epic: {epic.key} with {len(epic.subtasks)} subtasks")
|
||||
|
||||
# Get children keys from subtasks
|
||||
if not epic.subtasks:
|
||||
yield json.dumps({"status": "no_children", "completed": 0, "total": 0}) + "\n"
|
||||
return
|
||||
|
||||
total = len(epic.subtasks)
|
||||
|
||||
# Create storage folder in larder
|
||||
larder_path = Path(__file__).parent.parent.parent.parent / "larder" / "jira_epics" / key
|
||||
larder_path.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Save epic
|
||||
epic_file = larder_path / f"{key}.json"
|
||||
with open(epic_file, "w") as f:
|
||||
json.dump(epic.model_dump(), f, indent=2, default=str)
|
||||
|
||||
yield json.dumps({"status": "processing", "completed": 0, "total": total}) + "\n"
|
||||
|
||||
# Fetch each child
|
||||
children = []
|
||||
for idx, child_key in enumerate(epic.subtasks, 1):
|
||||
time.sleep(0.8) # Human speed
|
||||
|
||||
try:
|
||||
child_issue = jira.issue(child_key, expand="comments")
|
||||
child = TicketDetail.from_jira(child_issue, settings.jira_url)
|
||||
|
||||
# Save child
|
||||
child_file = larder_path / f"{child_key}.json"
|
||||
with open(child_file, "w") as f:
|
||||
json.dump(child.model_dump(), f, indent=2, default=str)
|
||||
|
||||
# Collect children for text formatting
|
||||
children.append(Ticket.from_jira(child_issue, settings.jira_url))
|
||||
|
||||
yield json.dumps({"status": "processing", "completed": idx, "total": total}) + "\n"
|
||||
except Exception as e:
|
||||
import traceback
|
||||
yield json.dumps({
|
||||
"status": "error",
|
||||
"completed": idx,
|
||||
"total": total,
|
||||
"error": str(e),
|
||||
"error_type": type(e).__name__,
|
||||
"child_key": child_key,
|
||||
"traceback": traceback.format_exc()
|
||||
}) + "\n"
|
||||
|
||||
# Format as text for display
|
||||
from ..models.formatter import format_ticket_with_children
|
||||
formatted_text = format_ticket_with_children(epic, children)
|
||||
|
||||
yield json.dumps({
|
||||
"status": "complete",
|
||||
"completed": total,
|
||||
"total": total,
|
||||
"path": str(larder_path),
|
||||
"text": formatted_text
|
||||
}) + "\n"
|
||||
|
||||
except Exception as e:
|
||||
import traceback
|
||||
yield json.dumps({
|
||||
"status": "error",
|
||||
"error": str(e),
|
||||
"error_type": type(e).__name__,
|
||||
"traceback": traceback.format_exc()
|
||||
}) + "\n"
|
||||
|
||||
return StreamingResponse(generate(), media_type="application/x-ndjson")
|
||||
|
||||
|
||||
@router.get("/epic/{key}/status")
|
||||
def get_epic_status(key: str):
|
||||
"""Check if epic has been processed and files exist."""
|
||||
from pathlib import Path
|
||||
import json
|
||||
|
||||
larder_path = Path(__file__).parent.parent.parent.parent / "larder" / "jira_epics" / key
|
||||
|
||||
if not larder_path.exists():
|
||||
return {"processed": False}
|
||||
|
||||
files = list(larder_path.glob("*.json"))
|
||||
return {
|
||||
"processed": True,
|
||||
"path": str(larder_path),
|
||||
"files": [f.name for f in files],
|
||||
"count": len(files)
|
||||
}
|
||||
|
||||
|
||||
@router.get("/attachment/{attachment_id}")
|
||||
def get_attachment(
|
||||
attachment_id: str,
|
||||
creds: JiraCredentials = Depends(get_jira_credentials),
|
||||
):
|
||||
"""Stream attachment content directly from Jira."""
|
||||
jira = connect_jira(creds.email, creds.token)
|
||||
att_url = f"{settings.jira_url}/rest/api/2/attachment/content/{attachment_id}"
|
||||
response = jira._session.get(att_url, allow_redirects=True)
|
||||
if response.status_code != 200:
|
||||
raise HTTPException(404, f"Attachment not found: {attachment_id}")
|
||||
content_type = response.headers.get("Content-Type", "application/octet-stream")
|
||||
return StreamingResponse(
|
||||
BytesIO(response.content),
|
||||
media_type=content_type,
|
||||
)
|
||||
0
artery/veins/jira/core/__init__.py
Normal file
0
artery/veins/jira/core/__init__.py
Normal file
37
artery/veins/jira/core/auth.py
Normal file
37
artery/veins/jira/core/auth.py
Normal file
@@ -0,0 +1,37 @@
|
||||
"""
|
||||
Jira credentials authentication for Jira vein.
|
||||
"""
|
||||
|
||||
from dataclasses import dataclass
|
||||
from fastapi import Header, HTTPException
|
||||
from .config import settings
|
||||
|
||||
|
||||
@dataclass
|
||||
class JiraCredentials:
|
||||
email: str
|
||||
token: str
|
||||
|
||||
|
||||
async def get_jira_credentials(
|
||||
x_jira_email: str | None = Header(None),
|
||||
x_jira_token: str | None = Header(None),
|
||||
) -> JiraCredentials:
|
||||
"""
|
||||
Dependency that extracts Jira credentials from headers or falls back to config.
|
||||
|
||||
- Headers provided → per-request credentials (web demo)
|
||||
- No headers → use .env credentials (API/standalone)
|
||||
"""
|
||||
# Use headers if provided (check for non-empty strings)
|
||||
if x_jira_email and x_jira_token and x_jira_email.strip() and x_jira_token.strip():
|
||||
return JiraCredentials(email=x_jira_email.strip(), token=x_jira_token.strip())
|
||||
|
||||
# Fall back to config
|
||||
if settings.jira_email and settings.jira_api_token:
|
||||
return JiraCredentials(email=settings.jira_email, token=settings.jira_api_token)
|
||||
|
||||
raise HTTPException(
|
||||
status_code=401,
|
||||
detail="Missing credentials: provide X-Jira-Email and X-Jira-Token headers, or configure in .env",
|
||||
)
|
||||
19
artery/veins/jira/core/client.py
Normal file
19
artery/veins/jira/core/client.py
Normal file
@@ -0,0 +1,19 @@
|
||||
"""
|
||||
Jira connection client.
|
||||
"""
|
||||
|
||||
from jira import JIRA
|
||||
|
||||
from .config import settings
|
||||
|
||||
|
||||
class JiraClientError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def connect_jira(email: str, token: str) -> JIRA:
|
||||
"""Create a Jira connection with the given credentials."""
|
||||
return JIRA(
|
||||
server=settings.jira_url,
|
||||
basic_auth=(email, token),
|
||||
)
|
||||
23
artery/veins/jira/core/config.py
Normal file
23
artery/veins/jira/core/config.py
Normal file
@@ -0,0 +1,23 @@
|
||||
"""
|
||||
Jira credentials loaded from .env file.
|
||||
"""
|
||||
|
||||
from pathlib import Path
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
ENV_FILE = Path(__file__).parent.parent / ".env"
|
||||
|
||||
|
||||
class JiraConfig(BaseSettings):
|
||||
jira_url: str
|
||||
jira_email: str | None = None # Optional: can be provided per-request via headers
|
||||
jira_api_token: str | None = None # Optional: can be provided per-request via headers
|
||||
api_port: int = 8001
|
||||
|
||||
model_config = {
|
||||
"env_file": ENV_FILE,
|
||||
"env_file_encoding": "utf-8",
|
||||
}
|
||||
|
||||
|
||||
settings = JiraConfig()
|
||||
86
artery/veins/jira/core/query.py
Normal file
86
artery/veins/jira/core/query.py
Normal file
@@ -0,0 +1,86 @@
|
||||
"""
|
||||
JQL query builder.
|
||||
"""
|
||||
|
||||
from typing import Optional, List
|
||||
|
||||
|
||||
class JQL:
|
||||
"""Fluent JQL builder."""
|
||||
|
||||
def __init__(self):
|
||||
self._parts: List[str] = []
|
||||
self._order: Optional[str] = None
|
||||
|
||||
def _q(self, val: str) -> str:
|
||||
return f'"{val}"' if " " in val else val
|
||||
|
||||
# Conditions
|
||||
def assigned_to_me(self) -> "JQL":
|
||||
self._parts.append("assignee = currentUser()")
|
||||
return self
|
||||
|
||||
def project(self, key: str) -> "JQL":
|
||||
self._parts.append(f"project = {self._q(key)}")
|
||||
return self
|
||||
|
||||
def sprint_open(self) -> "JQL":
|
||||
self._parts.append("sprint in openSprints()")
|
||||
return self
|
||||
|
||||
def in_backlog(self) -> "JQL":
|
||||
self._parts.append("sprint is EMPTY")
|
||||
return self
|
||||
|
||||
def not_done(self) -> "JQL":
|
||||
self._parts.append("statusCategory != Done")
|
||||
return self
|
||||
|
||||
def status(self, name: str) -> "JQL":
|
||||
self._parts.append(f"status = {self._q(name)}")
|
||||
return self
|
||||
|
||||
def label(self, name: str) -> "JQL":
|
||||
self._parts.append(f"labels = {self._q(name)}")
|
||||
return self
|
||||
|
||||
def text(self, search: str) -> "JQL":
|
||||
self._parts.append(f'text ~ "{search}"')
|
||||
return self
|
||||
|
||||
def issue_type(self, name: str) -> "JQL":
|
||||
self._parts.append(f"issuetype = {self._q(name)}")
|
||||
return self
|
||||
|
||||
def raw(self, jql: str) -> "JQL":
|
||||
self._parts.append(jql)
|
||||
return self
|
||||
|
||||
# Ordering
|
||||
def order_by(self, field: str, desc: bool = True) -> "JQL":
|
||||
self._order = f"ORDER BY {field} {'DESC' if desc else 'ASC'}"
|
||||
return self
|
||||
|
||||
def build(self) -> str:
|
||||
jql = " AND ".join(self._parts)
|
||||
if self._order:
|
||||
jql = f"{jql} {self._order}"
|
||||
return jql.strip()
|
||||
|
||||
|
||||
# Preset queries for main use cases
|
||||
class Queries:
|
||||
@staticmethod
|
||||
def my_tickets(project: Optional[str] = None) -> JQL:
|
||||
q = JQL().assigned_to_me().not_done().order_by("updated")
|
||||
if project:
|
||||
q.project(project)
|
||||
return q
|
||||
|
||||
@staticmethod
|
||||
def backlog(project: str) -> JQL:
|
||||
return JQL().project(project).in_backlog().not_done().order_by("priority")
|
||||
|
||||
@staticmethod
|
||||
def current_sprint(project: str) -> JQL:
|
||||
return JQL().project(project).sprint_open().order_by("priority")
|
||||
15
artery/veins/jira/main.py
Normal file
15
artery/veins/jira/main.py
Normal file
@@ -0,0 +1,15 @@
|
||||
"""
|
||||
Jira Vein - FastAPI app.
|
||||
"""
|
||||
|
||||
from fastapi import FastAPI
|
||||
from .api.routes import router
|
||||
from .core.config import settings
|
||||
|
||||
app = FastAPI(title="Jira Vein", version="0.1.0")
|
||||
app.include_router(router)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import uvicorn
|
||||
uvicorn.run(app, host="0.0.0.0", port=settings.api_port)
|
||||
0
artery/veins/jira/models/__init__.py
Normal file
0
artery/veins/jira/models/__init__.py
Normal file
182
artery/veins/jira/models/formatter.py
Normal file
182
artery/veins/jira/models/formatter.py
Normal file
@@ -0,0 +1,182 @@
|
||||
"""
|
||||
Text formatters for LLM/human readable output.
|
||||
"""
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .ticket import Attachment, Ticket, TicketDetail, TicketList
|
||||
|
||||
|
||||
def _fmt_size(size: int) -> str:
|
||||
"""Format bytes to human readable."""
|
||||
for unit in ["B", "KB", "MB", "GB"]:
|
||||
if size < 1024:
|
||||
return f"{size:.1f}{unit}" if unit != "B" else f"{size}{unit}"
|
||||
size /= 1024
|
||||
return f"{size:.1f}TB"
|
||||
|
||||
|
||||
def _fmt_dt(dt) -> str:
|
||||
if not dt:
|
||||
return "-"
|
||||
return dt.strftime("%Y-%m-%d %H:%M")
|
||||
|
||||
|
||||
def format_ticket(t: "Ticket") -> str:
|
||||
lines = [
|
||||
f"[{t.key}] {t.summary}",
|
||||
f" Project: {t.project} | Type: {t.issue_type} | Priority: {t.priority or '-'}",
|
||||
f" Status: {t.status} ({t.status_category or '-'})",
|
||||
f" Assignee: {t.assignee or '-'} | Reporter: {t.reporter or '-'}",
|
||||
f" Labels: {', '.join(t.labels) if t.labels else '-'}",
|
||||
f" Created: {_fmt_dt(t.created)} | Updated: {_fmt_dt(t.updated)}",
|
||||
f" URL: {t.url}",
|
||||
]
|
||||
if t.description:
|
||||
lines.append(f" Description: {t.description}")
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
def format_ticket_detail(t: "TicketDetail") -> str:
|
||||
lines = [
|
||||
f"# {t.key}: {t.summary}",
|
||||
"",
|
||||
f"Project: {t.project}",
|
||||
f"Type: {t.issue_type}",
|
||||
f"Status: {t.status} ({t.status_category or '-'})",
|
||||
f"Priority: {t.priority or '-'}",
|
||||
f"Assignee: {t.assignee or '-'}",
|
||||
f"Reporter: {t.reporter or '-'}",
|
||||
f"Labels: {', '.join(t.labels) if t.labels else '-'}",
|
||||
f"Created: {_fmt_dt(t.created)}",
|
||||
f"Updated: {_fmt_dt(t.updated)}",
|
||||
f"Parent: {t.parent_key or '-'}",
|
||||
f"Subtasks: {', '.join(t.subtasks) if t.subtasks else '-'}",
|
||||
f"Linked issues: {', '.join(t.linked_issues) if t.linked_issues else '-'}",
|
||||
f"URL: {t.url}",
|
||||
"",
|
||||
"## Description",
|
||||
t.description or "(no description)",
|
||||
"",
|
||||
]
|
||||
|
||||
lines.append(f"## Comments ({len(t.comments)})")
|
||||
if t.comments:
|
||||
for c in t.comments:
|
||||
lines.append(f"### {c.get('author', 'Unknown')} ({c.get('created', '')[:16] if c.get('created') else '-'})")
|
||||
lines.append(c.get("body", ""))
|
||||
lines.append("")
|
||||
else:
|
||||
lines.append("(no comments)")
|
||||
|
||||
lines.append("")
|
||||
lines.append(f"## Attachments ({len(t.attachments)})")
|
||||
if t.attachments:
|
||||
for a in t.attachments:
|
||||
has_content = "[downloaded]" if a.content_base64 else ""
|
||||
lines.append(f"- {a.filename} ({_fmt_size(a.size)}, {a.mimetype}) {has_content}")
|
||||
else:
|
||||
lines.append("(no attachments)")
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
def format_ticket_with_children(parent: "TicketDetail", children: list) -> str:
|
||||
"""Format a ticket with its children (subtasks/stories)."""
|
||||
lines = [
|
||||
f"# {parent.key}: {parent.summary}",
|
||||
"",
|
||||
f"Project: {parent.project}",
|
||||
f"Type: {parent.issue_type}",
|
||||
f"Status: {parent.status} ({parent.status_category or '-'})",
|
||||
f"Priority: {parent.priority or '-'}",
|
||||
f"Assignee: {parent.assignee or '-'}",
|
||||
f"Reporter: {parent.reporter or '-'}",
|
||||
f"Labels: {', '.join(parent.labels) if parent.labels else '-'}",
|
||||
f"Created: {_fmt_dt(parent.created)}",
|
||||
f"Updated: {_fmt_dt(parent.updated)}",
|
||||
f"URL: {parent.url}",
|
||||
"",
|
||||
"## Description",
|
||||
parent.description or "(no description)",
|
||||
"",
|
||||
]
|
||||
|
||||
# Add children section
|
||||
if children:
|
||||
child_type = "Sub-tasks" if parent.issue_type in ("Story", "Task") else "Stories"
|
||||
lines.append(f"## {child_type} ({len(children)})")
|
||||
lines.append("=" * 60)
|
||||
lines.append("")
|
||||
|
||||
for child in children:
|
||||
lines.append(f"[{child.key}] {child.summary}")
|
||||
lines.append(f" Type: {child.issue_type} | Status: {child.status} | Priority: {child.priority or '-'}")
|
||||
lines.append(f" Assignee: {child.assignee or '-'}")
|
||||
lines.append(f" URL: {child.url}")
|
||||
lines.append("")
|
||||
lines.append("-" * 60)
|
||||
lines.append("")
|
||||
|
||||
lines.append(f"## Comments ({len(parent.comments)})")
|
||||
if parent.comments:
|
||||
for c in parent.comments:
|
||||
lines.append(f"### {c.get('author', 'Unknown')} ({c.get('created', '')[:16] if c.get('created') else '-'})")
|
||||
lines.append(c.get("body", ""))
|
||||
lines.append("")
|
||||
else:
|
||||
lines.append("(no comments)")
|
||||
|
||||
lines.append("")
|
||||
lines.append(f"## Attachments ({len(parent.attachments)})")
|
||||
if parent.attachments:
|
||||
for a in parent.attachments:
|
||||
has_content = "[downloaded]" if a.content_base64 else ""
|
||||
lines.append(f"- {a.filename} ({_fmt_size(a.size)}, {a.mimetype}) {has_content}")
|
||||
else:
|
||||
lines.append("(no attachments)")
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
def format_ticket_list(tl: "TicketList") -> str:
|
||||
# Sort for text output: stories with subtasks, then bugs
|
||||
stories = []
|
||||
bugs = []
|
||||
subtasks = []
|
||||
|
||||
for t in tl.tickets:
|
||||
if t.parent_key:
|
||||
subtasks.append(t)
|
||||
elif t.issue_type in ("Story", "Epic", "Task"):
|
||||
stories.append(t)
|
||||
elif t.issue_type == "Bug":
|
||||
bugs.append(t)
|
||||
else:
|
||||
stories.append(t) # fallback
|
||||
|
||||
# Build sorted list: parent stories, then their subtasks, then bugs
|
||||
sorted_tickets = []
|
||||
for story in sorted(stories, key=lambda t: t.key):
|
||||
sorted_tickets.append(story)
|
||||
# Add subtasks for this story
|
||||
story_subtasks = [st for st in subtasks if st.parent_key == story.key]
|
||||
sorted_tickets.extend(sorted(story_subtasks, key=lambda t: t.key))
|
||||
|
||||
# Add bugs at the end
|
||||
sorted_tickets.extend(sorted(bugs, key=lambda t: t.key))
|
||||
|
||||
lines = [
|
||||
f"Total: {tl.total} | Page: {tl.page} | Page size: {tl.page_size}",
|
||||
f"Showing: {len(tl.tickets)} tickets",
|
||||
"=" * 60,
|
||||
"",
|
||||
]
|
||||
for i, t in enumerate(sorted_tickets):
|
||||
lines.append(format_ticket(t))
|
||||
if i < len(sorted_tickets) - 1:
|
||||
lines.append("")
|
||||
lines.append("-" * 60)
|
||||
lines.append("")
|
||||
return "\n".join(lines)
|
||||
135
artery/veins/jira/models/ticket.py
Normal file
135
artery/veins/jira/models/ticket.py
Normal file
@@ -0,0 +1,135 @@
|
||||
"""
|
||||
Ticket models with self-parsing from Jira objects.
|
||||
"""
|
||||
|
||||
from pydantic import BaseModel
|
||||
from typing import Optional, List
|
||||
from datetime import datetime
|
||||
|
||||
|
||||
class Attachment(BaseModel):
|
||||
id: str
|
||||
filename: str
|
||||
mimetype: str
|
||||
size: int # bytes
|
||||
url: str
|
||||
content_base64: Optional[str] = None # populated when include_attachments=true
|
||||
|
||||
@classmethod
|
||||
def from_jira(cls, att) -> "Attachment":
|
||||
return cls(
|
||||
id=att.id,
|
||||
filename=att.filename,
|
||||
mimetype=att.mimeType,
|
||||
size=att.size,
|
||||
url=att.content,
|
||||
)
|
||||
|
||||
|
||||
class Ticket(BaseModel):
|
||||
key: str
|
||||
summary: str
|
||||
description: Optional[str] = None
|
||||
status: str
|
||||
status_category: Optional[str] = None
|
||||
issue_type: str
|
||||
priority: Optional[str] = None
|
||||
project: str
|
||||
assignee: Optional[str] = None
|
||||
reporter: Optional[str] = None
|
||||
labels: List[str] = []
|
||||
created: Optional[datetime] = None
|
||||
updated: Optional[datetime] = None
|
||||
url: str
|
||||
parent_key: Optional[str] = None # For subtasks
|
||||
|
||||
@classmethod
|
||||
def from_jira(cls, issue, base_url: str) -> "Ticket":
|
||||
f = issue.fields
|
||||
status_cat = None
|
||||
if hasattr(f.status, "statusCategory"):
|
||||
status_cat = f.status.statusCategory.name
|
||||
|
||||
# Get parent key for subtasks
|
||||
parent = None
|
||||
if hasattr(f, "parent") and f.parent:
|
||||
parent = f.parent.key
|
||||
|
||||
return cls(
|
||||
key=issue.key,
|
||||
summary=f.summary or "",
|
||||
description=f.description,
|
||||
status=f.status.name,
|
||||
status_category=status_cat,
|
||||
issue_type=f.issuetype.name,
|
||||
priority=f.priority.name if f.priority else None,
|
||||
project=f.project.key,
|
||||
assignee=f.assignee.displayName if f.assignee else None,
|
||||
reporter=f.reporter.displayName if f.reporter else None,
|
||||
labels=f.labels or [],
|
||||
created=cls._parse_dt(f.created),
|
||||
updated=cls._parse_dt(f.updated),
|
||||
url=f"{base_url}/browse/{issue.key}",
|
||||
parent_key=parent,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _parse_dt(val: Optional[str]) -> Optional[datetime]:
|
||||
if not val:
|
||||
return None
|
||||
try:
|
||||
return datetime.fromisoformat(val.replace("Z", "+00:00"))
|
||||
except ValueError:
|
||||
return None
|
||||
|
||||
|
||||
class TicketDetail(Ticket):
|
||||
comments: List[dict] = []
|
||||
linked_issues: List[str] = []
|
||||
subtasks: List[str] = []
|
||||
attachments: List[Attachment] = []
|
||||
|
||||
@classmethod
|
||||
def from_jira(cls, issue, base_url: str) -> "TicketDetail":
|
||||
base = Ticket.from_jira(issue, base_url)
|
||||
f = issue.fields
|
||||
|
||||
comments = []
|
||||
if hasattr(f, "comment") and f.comment:
|
||||
for c in f.comment.comments:
|
||||
comments.append({
|
||||
"author": c.author.displayName if hasattr(c, "author") else None,
|
||||
"body": c.body,
|
||||
"created": c.created,
|
||||
})
|
||||
|
||||
linked = []
|
||||
if hasattr(f, "issuelinks") and f.issuelinks:
|
||||
for link in f.issuelinks:
|
||||
if hasattr(link, "outwardIssue"):
|
||||
linked.append(link.outwardIssue.key)
|
||||
if hasattr(link, "inwardIssue"):
|
||||
linked.append(link.inwardIssue.key)
|
||||
|
||||
subtasks = []
|
||||
if hasattr(f, "subtasks") and f.subtasks:
|
||||
subtasks = [st.key for st in f.subtasks]
|
||||
|
||||
attachments = []
|
||||
if hasattr(f, "attachment") and f.attachment:
|
||||
attachments = [Attachment.from_jira(a) for a in f.attachment]
|
||||
|
||||
return cls(
|
||||
**base.model_dump(),
|
||||
comments=comments,
|
||||
linked_issues=linked,
|
||||
subtasks=subtasks,
|
||||
attachments=attachments,
|
||||
)
|
||||
|
||||
|
||||
class TicketList(BaseModel):
|
||||
tickets: List[Ticket]
|
||||
total: int
|
||||
page: int
|
||||
page_size: int
|
||||
5
artery/veins/jira/requirements.txt
Normal file
5
artery/veins/jira/requirements.txt
Normal file
@@ -0,0 +1,5 @@
|
||||
fastapi>=0.104.0
|
||||
uvicorn>=0.24.0
|
||||
jira>=3.5.0
|
||||
pydantic>=2.0.0
|
||||
pydantic-settings>=2.0.0
|
||||
19
artery/veins/jira/run.py
Normal file
19
artery/veins/jira/run.py
Normal file
@@ -0,0 +1,19 @@
|
||||
#!/usr/bin/env python
|
||||
"""Run the Jira vein API."""
|
||||
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
# Add parent to path for imports
|
||||
sys.path.insert(0, str(Path(__file__).parent))
|
||||
|
||||
import uvicorn
|
||||
from core.config import settings
|
||||
|
||||
if __name__ == "__main__":
|
||||
uvicorn.run(
|
||||
"main:app",
|
||||
host="0.0.0.0",
|
||||
port=settings.api_port,
|
||||
reload=True,
|
||||
)
|
||||
Reference in New Issue
Block a user