mirror of
https://github.com/blackboxprogramming/BlackRoad-Operating-System.git
synced 2026-03-17 07:57:19 -05:00
## Domain Architecture - Complete domain-to-service mapping for 16 verified domains - Subdomain architecture for blackroad.systems and blackroad.io - GitHub organization mapping (BlackRoad-OS repos) - Railway service-to-domain configuration - DNS configuration templates for Cloudflare ## Extracted Services ### AIops Service (services/aiops/) - Canary analysis for deployment validation - Config drift detection - Event correlation engine - Auto-remediation with runbook mapping - SLO budget management ### Analytics Service (services/analytics/) - Rule-based anomaly detection with safe expression evaluation - Cohort analysis with multi-metric aggregation - Decision engine with credit budget constraints - Narrative report generation ### Codex Governance (services/codex/) - 82+ governance principles (entries) - Codex Pantheon with 48+ agent archetypes - Manifesto defining ethical framework ## Integration Points - AIops → infra.blackroad.systems (blackroad-os-infra) - Analytics → core.blackroad.systems (blackroad-os-core) - Codex → operator.blackroad.systems (blackroad-os-operator) 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude <noreply@anthropic.com>
58 lines
2.4 KiB
Python
58 lines
2.4 KiB
Python
import json
|
|
from datetime import datetime
|
|
from pathlib import Path
|
|
from typing import List
|
|
|
|
from .utils import increment, log_event, validate
|
|
|
|
ROOT = Path(__file__).resolve().parents[1]
|
|
ART = ROOT / "artifacts" / "reports"
|
|
|
|
|
|
def build_report(plan_path: Path, out_prefix: Path) -> None:
|
|
plan = json.loads(plan_path.read_text())
|
|
anomalies_path = ROOT / "artifacts" / "anomalies" / "latest.json"
|
|
cohorts_dir = ROOT / "artifacts" / "cohorts"
|
|
anomalies = json.loads(anomalies_path.read_text()) if anomalies_path.exists() else []
|
|
cohort_files = list(cohorts_dir.glob("*.json"))
|
|
cohort_names = [f.stem for f in cohort_files]
|
|
sections = [
|
|
{"title": "What happened", "body": f"Anomalies: {len(anomalies)}"},
|
|
{"title": "Why it matters", "body": "Impacting key KPIs"},
|
|
{"title": "What we're doing", "body": ", ".join(a["action"] for a in plan.get("actions", [])) or "No actions"},
|
|
{"title": "Risks & Next Steps", "body": "Monitor cohorts: " + ", ".join(cohort_names)},
|
|
]
|
|
data = {"sections": sections}
|
|
validate(data, "narrative.schema.json")
|
|
ts = datetime.utcnow().strftime("%Y%m%d%H%M%S")
|
|
ART.mkdir(parents=True, exist_ok=True)
|
|
out_prefix.parent.mkdir(parents=True, exist_ok=True)
|
|
md_path = out_prefix.with_suffix(".md")
|
|
md_lines = [f"# Executive Report {ts}", ""]
|
|
for s in sections:
|
|
md_lines.append(f"## {s['title']}")
|
|
md_lines.append(s["body"])
|
|
md_lines.append("")
|
|
md_path.write_text("\n".join(md_lines))
|
|
try:
|
|
from pptx import Presentation # type: ignore
|
|
|
|
ppt_path = out_prefix.with_suffix(".pptx")
|
|
pres = Presentation()
|
|
for s in sections:
|
|
slide = pres.slides.add_slide(pres.slide_layouts[1])
|
|
slide.shapes.title.text = s["title"]
|
|
slide.shapes.placeholders[1].text = s["body"]
|
|
pres.save(ppt_path)
|
|
except Exception:
|
|
slides_md = out_prefix.with_name(out_prefix.name + "_slides.md")
|
|
slide_lines: List[str] = []
|
|
for s in sections:
|
|
slide_lines.append(f"# {s['title']}")
|
|
slide_lines.append(s["body"])
|
|
slide_lines.append("")
|
|
slides_md.write_text("\n".join(slide_lines))
|
|
(out_prefix.with_suffix(".json")).write_text(json.dumps(data, indent=2))
|
|
increment("narrative_built")
|
|
log_event({"type": "narrative_built", "plan": str(plan_path)})
|