sync: update from blackroad-operator 2026-03-14

Synced from BlackRoad-OS-Inc/blackroad-operator/orgs/personal/new_world
BlackRoad OS — Pave Tomorrow.

RoadChain-SHA2048: b76200263d8fb3e1
RoadChain-Identity: alexa@sovereign
RoadChain-Full: b76200263d8fb3e1b7b4c5a231c28e0148970f5382a802c4b4aa815a3a3ba02a7c028868bd9f7f538dda3c0d2fcb73dda3c0f9a9662cfb070440928170c77549fb9b5a0459118689e1a681ccb344a6954a97402048d6f42cbb55e6f14cf897b3c0205535917a43958da8dd2614efa620f2cb220116a5e774d4095878ad20765f1cc46725c82076970a14627e7c4cd54a2cac3699714e4b3e0b3d6c5913645d49d02c190d52f34061ff95fd7dca8416810380ac2093d70db3291cc403efd321fcf09bc55cd448bafe32c2ddb54ecad88d5ea8cfdd3f7ce8160b6a1f7dbdcd9d83e2cea16823f3645f67a3465ef86de0314750da03530346032aa0db08decaa3de
This commit is contained in:
2026-03-14 15:09:57 -05:00
parent 1b45b71d46
commit 959d388a60
14 changed files with 70 additions and 1704 deletions

View File

@@ -1,8 +0,0 @@
node_modules
.git
.env
*.log
dist
__pycache__
.pytest_cache
.next

1
.github/FUNDING.yml vendored
View File

@@ -1 +0,0 @@
github: blackboxprogramming

View File

@@ -1,43 +0,0 @@
version: 2
updates:
# npm dependencies
- package-ecosystem: "npm"
directory: "/"
schedule:
interval: "weekly"
day: "monday"
open-pull-requests-limit: 10
reviewers:
- "blackboxprogramming"
labels:
- "dependencies"
- "automated"
commit-message:
prefix: "chore"
include: "scope"
# GitHub Actions
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "weekly"
day: "monday"
open-pull-requests-limit: 5
labels:
- "dependencies"
- "github-actions"
commit-message:
prefix: "ci"
# pip dependencies
- package-ecosystem: "pip"
directory: "/"
schedule:
interval: "weekly"
day: "monday"
open-pull-requests-limit: 10
labels:
- "dependencies"
- "python"
commit-message:
prefix: "chore"

View File

@@ -1,33 +0,0 @@
name: "🚀 Auto Deploy"
on:
push:
branches: [main, master]
workflow_dispatch:
jobs:
deploy:
name: Deploy to Cloudflare Pages
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Setup Node
uses: actions/setup-node@v4
with:
node-version: '20'
- name: Install & Build
run: |
if [ -f "package.json" ]; then
npm install
npm run build 2>/dev/null || true
fi
- name: Deploy to Cloudflare Pages
uses: cloudflare/wrangler-action@v3
with:
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
command: pages deploy . --project-name=${{ github.event.repository.name }}

View File

@@ -1,8 +0,0 @@
name: CI
on: [push, pull_request]
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- run: echo "Build steps pending"

View File

@@ -9,7 +9,7 @@ jobs:
deploy:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v6
- uses: actions/checkout@v3
- name: Brand Compliance Check
run: |
@@ -26,7 +26,7 @@ jobs:
echo "✅ Brand compliance check passed"
- name: Setup Node.js
uses: actions/setup-node@v6
uses: actions/setup-node@v3
with:
node-version: '18'

View File

@@ -1,86 +0,0 @@
name: 🔧 Self-Healing
on:
schedule:
- cron: '*/30 * * * *' # Every 30 minutes
workflow_dispatch:
workflow_run:
workflows: ["🚀 Auto Deploy"]
types: [completed]
jobs:
monitor:
name: Monitor Deployments
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v6
- name: Check Health
id: health
run: |
if [ ! -z "${{ secrets.DEPLOY_URL }}" ]; then
STATUS=$(curl -s -o /dev/null -w "%{http_code}" ${{ secrets.DEPLOY_URL }}/api/health || echo "000")
echo "status=$STATUS" >> $GITHUB_OUTPUT
else
echo "status=skip" >> $GITHUB_OUTPUT
fi
- name: Auto-Rollback
if: steps.health.outputs.status != '200' && steps.health.outputs.status != 'skip'
run: |
echo "🚨 Health check failed (Status: ${{ steps.health.outputs.status }})"
echo "Triggering rollback..."
gh workflow run auto-deploy.yml --ref $(git rev-parse HEAD~1)
env:
GH_TOKEN: ${{ github.token }}
- name: Attempt Auto-Fix
if: steps.health.outputs.status != '200' && steps.health.outputs.status != 'skip'
run: |
echo "🔧 Attempting automatic fixes..."
# Check for common issues
if [ -f "package.json" ]; then
npm ci || true
npm run build || true
fi
- name: Create Issue on Failure
if: failure()
uses: actions/github-script@v8
with:
script: |
github.rest.issues.create({
owner: context.repo.owner,
repo: context.repo.repo,
title: '🚨 Self-Healing: Deployment Health Check Failed',
body: `Deployment health check failed.\n\nStatus: ${{ steps.health.outputs.status }}\nWorkflow: ${context.workflow}\nRun: ${context.runId}`,
labels: ['bug', 'deployment', 'auto-generated']
})
dependency-updates:
name: Auto Update Dependencies
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v6
- name: Setup Node
if: hashFiles('package.json') != ''
uses: actions/setup-node@v6
with:
node-version: '20'
- name: Update npm dependencies
if: hashFiles('package.json') != ''
run: |
npm update
if [ -n "$(git status --porcelain)" ]; then
git config user.name "BlackRoad Bot"
git config user.email "bot@blackroad.io"
git add package*.json
git commit -m "chore: auto-update dependencies"
git push
fi

1
.gitignore vendored
View File

@@ -1 +0,0 @@
__pycache__/

View File

@@ -1,12 +0,0 @@
FROM node:20-alpine AS builder
WORKDIR /app
COPY package*.json ./
RUN npm ci --production
COPY . .
RUN npm run build --if-present
FROM node:20-alpine
WORKDIR /app
COPY --from=builder /app .
EXPOSE 3000
CMD ["node", "src/index.js"]

1332
LICENSE

File diff suppressed because it is too large Load Diff

View File

@@ -1,11 +1,5 @@
# LUCIDIA AI CORE - ARCHITECTURAL SPECIFICATION
[![Python](https://img.shields.io/badge/python-3.10%2B-3776AB.svg)](https://python.org)
[![Ollama](https://img.shields.io/badge/Ollama-router-FF6B2B.svg)](https://ollama.ai)
[![AI Core](https://img.shields.io/badge/AI-Lucidia_Core-CC00AA.svg)](https://blackroad.io)
## Context
Designing the technical architecture for Lucidia AI, a symbolic adaptive universal computing system implementing the 20-equation unified substrate framework with trinary logic and multi-substrate execution capabilities.
@@ -244,3 +238,22 @@ integration_scenarios = [
- Entry 2: Develop concentration→discrete mapping protocols with uncertainty preservation
The Lucidia architecture provides a complete technical foundation for the worlds first adaptive universal computing system, capable of choosing optimal physics for each computational task while maintaining symbolic reasoning capabilities through trinary logic.
---
## 📜 License & Copyright
**Copyright © 2026 BlackRoad OS, Inc. All Rights Reserved.**
**CEO:** Alexa Amundson | **PROPRIETARY AND CONFIDENTIAL**
This software is NOT for commercial resale. Testing purposes only.
### 🏢 Enterprise Scale:
- 30,000 AI Agents
- 30,000 Human Employees
- CEO: Alexa Amundson
**Contact:** blackroad.systems@gmail.com
See [LICENSE](LICENSE) for complete terms.

View File

@@ -1,28 +1,2 @@
# Placeholder for substrate arbitrator logic
# Implements Equation 13: Base-switching optimization for multi-substrate selection
#
# Ollama routing: any request that contains @copilot, @lucidia, or
# @blackboxprogramming is sent exclusively to the local Ollama instance.
# No external AI provider is contacted.
from ..ollama_router import route as ollama_route
def arbitrate(request_text: str, **kwargs) -> str:
"""
Top-level entry point for substrate arbitration.
If the request mentions @copilot, @lucidia, or @blackboxprogramming the
query is forwarded **directly** to the local Ollama service. No cloud or
third-party AI provider is involved.
"""
response = ollama_route(request_text, **kwargs)
if response is not None:
return response
# No trigger found proceed with normal substrate selection logic
return _default_arbitration(request_text)
def _default_arbitration(request_text: str) -> str:
"""Stub for future multi-substrate selection logic (Equation 13)."""
return ""

View File

@@ -1,76 +0,0 @@
# ollama_router.py
# Routes all @copilot, @lucidia, and @blackboxprogramming mentions directly to
# the local Ollama instance. No external AI provider is used.
import re
import urllib.request
import urllib.error
import json
from typing import Optional
# Trigger mentions that unconditionally route to Ollama
OLLAMA_TRIGGERS = re.compile(
r"@(copilot|lucidia|blackboxprogramming)\b",
re.IGNORECASE,
)
# Default Ollama endpoint (local hardware, private network)
OLLAMA_BASE_URL = "http://localhost:11434"
DEFAULT_MODEL = "llama3"
def contains_ollama_trigger(text: str) -> bool:
"""Return True if the text contains any mention that must go to Ollama."""
return bool(OLLAMA_TRIGGERS.search(text))
def _strip_triggers(text: str) -> str:
"""Remove @mention prefixes before forwarding the prompt."""
return OLLAMA_TRIGGERS.sub("", text).strip()
def query_ollama(
prompt: str,
model: str = DEFAULT_MODEL,
base_url: str = OLLAMA_BASE_URL,
stream: bool = False,
) -> str:
"""
Send a prompt directly to the local Ollama instance and return the response.
Raises ``ConnectionError`` if Ollama is unreachable so callers know
immediately that no external fallback will be attempted.
"""
url = f"{base_url}/api/generate"
payload = json.dumps({"model": model, "prompt": prompt, "stream": stream}).encode()
req = urllib.request.Request(
url,
data=payload,
headers={"Content-Type": "application/json"},
method="POST",
)
try:
with urllib.request.urlopen(req) as resp:
body = json.loads(resp.read().decode())
return body.get("response", "")
except urllib.error.URLError as exc:
raise ConnectionError(
f"Ollama is not reachable at {base_url}. "
"Ensure the Ollama service is running on your local machine. "
f"Original error: {exc}"
) from exc
def route(text: str, model: str = DEFAULT_MODEL, base_url: str = OLLAMA_BASE_URL) -> Optional[str]:
"""
Inspect *text* for @copilot / @lucidia / @blackboxprogramming mentions.
* If a trigger is found → strip the mention and send to Ollama exclusively.
* If no trigger is found → return ``None`` (caller may handle normally).
No external AI provider is ever contacted.
"""
if not contains_ollama_trigger(text):
return None
clean_prompt = _strip_triggers(text)
return query_ollama(clean_prompt, model=model, base_url=base_url)

View File

@@ -1,97 +0,0 @@
# test_ollama_router.py
# Lightweight tests for the Ollama routing logic.
# Run with: python -m pytest lucidiaAI/LUCIDIA_CORE/test_ollama_router.py -v
# or: python lucidiaAI/LUCIDIA_CORE/test_ollama_router.py
import sys
import os
import json
import unittest
from unittest.mock import patch, MagicMock
# Allow running directly from the repo root or this directory
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
import ollama_router as router
class TestOllamaTriggerDetection(unittest.TestCase):
"""Verify that @mention detection works correctly."""
def test_copilot_trigger(self):
self.assertTrue(router.contains_ollama_trigger("@copilot explain this"))
def test_lucidia_trigger(self):
self.assertTrue(router.contains_ollama_trigger("Hey @lucidia, what is 2+2?"))
def test_blackboxprogramming_trigger(self):
self.assertTrue(router.contains_ollama_trigger("@blackboxprogramming run a check"))
def test_case_insensitive(self):
self.assertTrue(router.contains_ollama_trigger("@Copilot do this"))
self.assertTrue(router.contains_ollama_trigger("@LUCIDIA answer me"))
def test_no_trigger(self):
self.assertFalse(router.contains_ollama_trigger("just a normal message"))
def test_other_mention_not_a_trigger(self):
self.assertFalse(router.contains_ollama_trigger("@someone else"))
class TestStripTriggers(unittest.TestCase):
def test_strips_copilot(self):
result = router._strip_triggers("@copilot explain this")
self.assertNotIn("@copilot", result.lower())
self.assertIn("explain this", result)
def test_strips_multiple(self):
result = router._strip_triggers("@lucidia @copilot hello")
self.assertNotIn("@lucidia", result.lower())
self.assertNotIn("@copilot", result.lower())
class TestRoute(unittest.TestCase):
"""route() must call Ollama for trigger messages and return None otherwise."""
def test_returns_none_for_non_trigger(self):
result = router.route("no mention here")
self.assertIsNone(result)
def _make_mock_response(self, text: str):
mock_resp = MagicMock()
mock_resp.__enter__ = lambda s: s
mock_resp.__exit__ = MagicMock(return_value=False)
mock_resp.read.return_value = json.dumps({"response": text}).encode()
return mock_resp
def test_routes_copilot_to_ollama(self):
mock_resp = self._make_mock_response("42")
with patch("urllib.request.urlopen", return_value=mock_resp):
result = router.route("@copilot what is 6*7?")
self.assertEqual(result, "42")
def test_routes_lucidia_to_ollama(self):
mock_resp = self._make_mock_response("hello")
with patch("urllib.request.urlopen", return_value=mock_resp):
result = router.route("@lucidia say hello")
self.assertEqual(result, "hello")
def test_routes_blackboxprogramming_to_ollama(self):
mock_resp = self._make_mock_response("ok")
with patch("urllib.request.urlopen", return_value=mock_resp):
result = router.route("@blackboxprogramming run task")
self.assertEqual(result, "ok")
def test_no_external_fallback_on_connection_error(self):
"""If Ollama is down the router must raise ConnectionError, not silently
fall back to a cloud provider."""
import urllib.error
with patch(
"urllib.request.urlopen",
side_effect=urllib.error.URLError("refused"),
):
with self.assertRaises(ConnectionError):
router.route("@copilot do something")
if __name__ == "__main__":
unittest.main(verbosity=2)