mirror of
https://github.com/blackboxprogramming/new_world.git
synced 2026-03-17 08:57:23 -05:00
feat: route @copilot/@lucidia/@blackboxprogramming mentions exclusively to local Ollama
Co-authored-by: blackboxprogramming <118287761+blackboxprogramming@users.noreply.github.com>
This commit is contained in:
@@ -1,2 +1,28 @@
|
||||
# Placeholder for substrate arbitrator logic
|
||||
# Implements Equation 13: Base-switching optimization for multi-substrate selection
|
||||
#
|
||||
# Ollama routing: any request that contains @copilot, @lucidia, or
|
||||
# @blackboxprogramming is sent exclusively to the local Ollama instance.
|
||||
# No external AI provider is contacted.
|
||||
|
||||
from ..ollama_router import route as ollama_route
|
||||
|
||||
|
||||
def arbitrate(request_text: str, **kwargs) -> str:
|
||||
"""
|
||||
Top-level entry point for substrate arbitration.
|
||||
|
||||
If the request mentions @copilot, @lucidia, or @blackboxprogramming the
|
||||
query is forwarded **directly** to the local Ollama service. No cloud or
|
||||
third-party AI provider is involved.
|
||||
"""
|
||||
response = ollama_route(request_text, **kwargs)
|
||||
if response is not None:
|
||||
return response
|
||||
# No trigger found – proceed with normal substrate selection logic
|
||||
return _default_arbitration(request_text)
|
||||
|
||||
|
||||
def _default_arbitration(request_text: str) -> str:
|
||||
"""Stub for future multi-substrate selection logic (Equation 13)."""
|
||||
return ""
|
||||
|
||||
BIN
lucidiaAI/LUCIDIA_CORE/__pycache__/ollama_router.cpython-312.pyc
Normal file
BIN
lucidiaAI/LUCIDIA_CORE/__pycache__/ollama_router.cpython-312.pyc
Normal file
Binary file not shown.
76
lucidiaAI/LUCIDIA_CORE/ollama_router.py
Normal file
76
lucidiaAI/LUCIDIA_CORE/ollama_router.py
Normal file
@@ -0,0 +1,76 @@
|
||||
# ollama_router.py
|
||||
# Routes all @copilot, @lucidia, and @blackboxprogramming mentions directly to
|
||||
# the local Ollama instance. No external AI provider is used.
|
||||
|
||||
import re
|
||||
import urllib.request
|
||||
import urllib.error
|
||||
import json
|
||||
from typing import Optional
|
||||
|
||||
# Trigger mentions that unconditionally route to Ollama
|
||||
OLLAMA_TRIGGERS = re.compile(
|
||||
r"@(copilot|lucidia|blackboxprogramming)\b",
|
||||
re.IGNORECASE,
|
||||
)
|
||||
|
||||
# Default Ollama endpoint (local hardware, private network)
|
||||
OLLAMA_BASE_URL = "http://localhost:11434"
|
||||
DEFAULT_MODEL = "llama3"
|
||||
|
||||
|
||||
def contains_ollama_trigger(text: str) -> bool:
|
||||
"""Return True if the text contains any mention that must go to Ollama."""
|
||||
return bool(OLLAMA_TRIGGERS.search(text))
|
||||
|
||||
|
||||
def _strip_triggers(text: str) -> str:
|
||||
"""Remove @mention prefixes before forwarding the prompt."""
|
||||
return OLLAMA_TRIGGERS.sub("", text).strip()
|
||||
|
||||
|
||||
def query_ollama(
|
||||
prompt: str,
|
||||
model: str = DEFAULT_MODEL,
|
||||
base_url: str = OLLAMA_BASE_URL,
|
||||
stream: bool = False,
|
||||
) -> str:
|
||||
"""
|
||||
Send a prompt directly to the local Ollama instance and return the response.
|
||||
|
||||
Raises ``ConnectionError`` if Ollama is unreachable so callers know
|
||||
immediately that no external fallback will be attempted.
|
||||
"""
|
||||
url = f"{base_url}/api/generate"
|
||||
payload = json.dumps({"model": model, "prompt": prompt, "stream": stream}).encode()
|
||||
req = urllib.request.Request(
|
||||
url,
|
||||
data=payload,
|
||||
headers={"Content-Type": "application/json"},
|
||||
method="POST",
|
||||
)
|
||||
try:
|
||||
with urllib.request.urlopen(req) as resp:
|
||||
body = json.loads(resp.read().decode())
|
||||
return body.get("response", "")
|
||||
except urllib.error.URLError as exc:
|
||||
raise ConnectionError(
|
||||
f"Ollama is not reachable at {base_url}. "
|
||||
"Ensure the Ollama service is running on your local machine. "
|
||||
f"Original error: {exc}"
|
||||
) from exc
|
||||
|
||||
|
||||
def route(text: str, model: str = DEFAULT_MODEL, base_url: str = OLLAMA_BASE_URL) -> Optional[str]:
|
||||
"""
|
||||
Inspect *text* for @copilot / @lucidia / @blackboxprogramming mentions.
|
||||
|
||||
* If a trigger is found → strip the mention and send to Ollama exclusively.
|
||||
* If no trigger is found → return ``None`` (caller may handle normally).
|
||||
|
||||
No external AI provider is ever contacted.
|
||||
"""
|
||||
if not contains_ollama_trigger(text):
|
||||
return None
|
||||
clean_prompt = _strip_triggers(text)
|
||||
return query_ollama(clean_prompt, model=model, base_url=base_url)
|
||||
97
lucidiaAI/LUCIDIA_CORE/test_ollama_router.py
Normal file
97
lucidiaAI/LUCIDIA_CORE/test_ollama_router.py
Normal file
@@ -0,0 +1,97 @@
|
||||
# test_ollama_router.py
|
||||
# Lightweight tests for the Ollama routing logic.
|
||||
# Run with: python -m pytest lucidiaAI/LUCIDIA_CORE/test_ollama_router.py -v
|
||||
# or: python lucidiaAI/LUCIDIA_CORE/test_ollama_router.py
|
||||
|
||||
import sys
|
||||
import os
|
||||
import json
|
||||
import unittest
|
||||
from unittest.mock import patch, MagicMock
|
||||
|
||||
# Allow running directly from the repo root or this directory
|
||||
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
|
||||
import ollama_router as router
|
||||
|
||||
|
||||
class TestOllamaTriggerDetection(unittest.TestCase):
|
||||
"""Verify that @mention detection works correctly."""
|
||||
|
||||
def test_copilot_trigger(self):
|
||||
self.assertTrue(router.contains_ollama_trigger("@copilot explain this"))
|
||||
|
||||
def test_lucidia_trigger(self):
|
||||
self.assertTrue(router.contains_ollama_trigger("Hey @lucidia, what is 2+2?"))
|
||||
|
||||
def test_blackboxprogramming_trigger(self):
|
||||
self.assertTrue(router.contains_ollama_trigger("@blackboxprogramming run a check"))
|
||||
|
||||
def test_case_insensitive(self):
|
||||
self.assertTrue(router.contains_ollama_trigger("@Copilot do this"))
|
||||
self.assertTrue(router.contains_ollama_trigger("@LUCIDIA answer me"))
|
||||
|
||||
def test_no_trigger(self):
|
||||
self.assertFalse(router.contains_ollama_trigger("just a normal message"))
|
||||
|
||||
def test_other_mention_not_a_trigger(self):
|
||||
self.assertFalse(router.contains_ollama_trigger("@someone else"))
|
||||
|
||||
|
||||
class TestStripTriggers(unittest.TestCase):
|
||||
def test_strips_copilot(self):
|
||||
result = router._strip_triggers("@copilot explain this")
|
||||
self.assertNotIn("@copilot", result.lower())
|
||||
self.assertIn("explain this", result)
|
||||
|
||||
def test_strips_multiple(self):
|
||||
result = router._strip_triggers("@lucidia @copilot hello")
|
||||
self.assertNotIn("@lucidia", result.lower())
|
||||
self.assertNotIn("@copilot", result.lower())
|
||||
|
||||
|
||||
class TestRoute(unittest.TestCase):
|
||||
"""route() must call Ollama for trigger messages and return None otherwise."""
|
||||
|
||||
def test_returns_none_for_non_trigger(self):
|
||||
result = router.route("no mention here")
|
||||
self.assertIsNone(result)
|
||||
|
||||
def _make_mock_response(self, text: str):
|
||||
mock_resp = MagicMock()
|
||||
mock_resp.__enter__ = lambda s: s
|
||||
mock_resp.__exit__ = MagicMock(return_value=False)
|
||||
mock_resp.read.return_value = json.dumps({"response": text}).encode()
|
||||
return mock_resp
|
||||
|
||||
def test_routes_copilot_to_ollama(self):
|
||||
mock_resp = self._make_mock_response("42")
|
||||
with patch("urllib.request.urlopen", return_value=mock_resp):
|
||||
result = router.route("@copilot what is 6*7?")
|
||||
self.assertEqual(result, "42")
|
||||
|
||||
def test_routes_lucidia_to_ollama(self):
|
||||
mock_resp = self._make_mock_response("hello")
|
||||
with patch("urllib.request.urlopen", return_value=mock_resp):
|
||||
result = router.route("@lucidia say hello")
|
||||
self.assertEqual(result, "hello")
|
||||
|
||||
def test_routes_blackboxprogramming_to_ollama(self):
|
||||
mock_resp = self._make_mock_response("ok")
|
||||
with patch("urllib.request.urlopen", return_value=mock_resp):
|
||||
result = router.route("@blackboxprogramming run task")
|
||||
self.assertEqual(result, "ok")
|
||||
|
||||
def test_no_external_fallback_on_connection_error(self):
|
||||
"""If Ollama is down the router must raise ConnectionError, not silently
|
||||
fall back to a cloud provider."""
|
||||
import urllib.error
|
||||
with patch(
|
||||
"urllib.request.urlopen",
|
||||
side_effect=urllib.error.URLError("refused"),
|
||||
):
|
||||
with self.assertRaises(ConnectionError):
|
||||
router.route("@copilot do something")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main(verbosity=2)
|
||||
Reference in New Issue
Block a user