feat: add Ollama router — @copilot, @lucidia, @blackboxprogramming → local Ollama

Co-authored-by: blackboxprogramming <118287761+blackboxprogramming@users.noreply.github.com>
This commit is contained in:
copilot-swe-agent[bot]
2026-03-03 04:56:55 +00:00
parent c20e1bf1b9
commit 92e970b37e
11 changed files with 471 additions and 0 deletions

View File

@@ -3,6 +3,10 @@
This package exposes the core simulation classes for quantum circuits and This package exposes the core simulation classes for quantum circuits and
energy/particle dynamics. Importing from the package will pull in energy/particle dynamics. Importing from the package will pull in
`QuantumCircuit` and the energysimulation functions directly. `QuantumCircuit` and the energysimulation functions directly.
It also exposes the Ollama routing layer, which directs every request
mentioning @copilot, @lucidia, or @blackboxprogramming to the local
Ollama instance without contacting any external AI providers.
""" """
from .quantum_simulator import QuantumCircuit from .quantum_simulator import QuantumCircuit
@@ -11,10 +15,31 @@ from .energy_simulator import (
battery_discharge, battery_discharge,
simulate_particle_collision, simulate_particle_collision,
) )
from .ollama_router import (
dispatch,
route_to_ollama,
contains_ollama_handle,
strip_handles,
OLLAMA_BASE_URL,
DEFAULT_MODEL,
OLLAMA_HANDLES,
OllamaConnectionError,
OllamaRequestError,
)
__all__ = [ __all__ = [
"QuantumCircuit", "QuantumCircuit",
"solar_panel_output", "solar_panel_output",
"battery_discharge", "battery_discharge",
"simulate_particle_collision", "simulate_particle_collision",
# Ollama router
"dispatch",
"route_to_ollama",
"contains_ollama_handle",
"strip_handles",
"OLLAMA_BASE_URL",
"DEFAULT_MODEL",
"OLLAMA_HANDLES",
"OllamaConnectionError",
"OllamaRequestError",
] ]

View File

@@ -0,0 +1,209 @@
"""Ollama router for @copilot, @lucidia, and @blackboxprogramming mentions.
All requests that mention @copilot, @lucidia, or @blackboxprogramming are
routed directly to the local Ollama instance. No external AI providers
(ChatGPT, Copilot, Claude, etc.) are contacted.
Example
-------
```python
from native_ai_quantum_energy.ollama_router import dispatch
# All three handles reach the same local Ollama server
response = dispatch("@copilot explain quantum entanglement")
response = dispatch("@lucidia what is a Hadamard gate?")
response = dispatch("@blackboxprogramming optimise this circuit")
```
"""
from __future__ import annotations
import json
import re
import urllib.error
import urllib.request
from typing import Optional
# ── Public constants ──────────────────────────────────────────────────────────
#: Default base URL for the local Ollama service.
OLLAMA_BASE_URL: str = "http://localhost:11434"
#: Default model name used when none is specified by the caller.
DEFAULT_MODEL: str = "llama3"
#: All @handles that are routed to the local Ollama instance.
OLLAMA_HANDLES: frozenset[str] = frozenset(
{"@copilot", "@lucidia", "@blackboxprogramming"}
)
# ── Internal helpers ──────────────────────────────────────────────────────────
_HANDLE_PATTERN: re.Pattern[str] = re.compile(
r"@(?:copilot|lucidia|blackboxprogramming)\b", re.IGNORECASE
)
# ── Public API ────────────────────────────────────────────────────────────────
def contains_ollama_handle(text: str) -> bool:
"""Return ``True`` if *text* contains at least one recognised @handle.
The check is case-insensitive, so ``@Copilot``, ``@LUCIDIA``, and
``@BlackBoxProgramming`` are all accepted.
Parameters
----------
text : str
Arbitrary user input to inspect.
Returns
-------
bool
``True`` when one of the routing handles is present, ``False``
otherwise.
"""
return bool(_HANDLE_PATTERN.search(text))
def strip_handles(text: str) -> str:
"""Remove all recognised @handles from *text*.
The stripped text is returned with surrounding whitespace removed so
that the prompt sent to Ollama is clean.
Parameters
----------
text : str
User input that may contain @handle prefixes.
Returns
-------
str
The input with every occurrence of the routing handles removed and
leading/trailing whitespace stripped.
"""
return _HANDLE_PATTERN.sub("", text).strip()
def route_to_ollama(
prompt: str,
model: str = DEFAULT_MODEL,
base_url: str = OLLAMA_BASE_URL,
) -> str:
"""Send *prompt* to the local Ollama instance and return its response.
The @handle prefixes are stripped before the prompt is forwarded so
that Ollama receives a clean query. All communication goes to the
local Ollama server; no external AI providers are contacted.
Parameters
----------
prompt : str
User prompt (with or without @handle mentions).
model : str, optional
Ollama model to query. Defaults to ``DEFAULT_MODEL``.
base_url : str, optional
Base URL of the local Ollama service. Defaults to
``OLLAMA_BASE_URL`` (``http://localhost:11434``).
Returns
-------
str
The text response returned by Ollama.
Raises
------
OllamaConnectionError
If the Ollama service cannot be reached (network error, service
not running, wrong URL, etc.).
OllamaRequestError
If Ollama returns an HTTP error or a response that cannot be
parsed.
"""
clean_prompt = strip_handles(prompt)
url = f"{base_url}/api/generate"
payload = json.dumps(
{"model": model, "prompt": clean_prompt, "stream": False}
).encode("utf-8")
req = urllib.request.Request(
url,
data=payload,
headers={"Content-Type": "application/json"},
method="POST",
)
try:
with urllib.request.urlopen(req) as response:
body = response.read().decode("utf-8")
except urllib.error.URLError as exc:
raise OllamaConnectionError(
f"Cannot reach Ollama at {base_url}: {exc}"
) from exc
try:
result = json.loads(body)
return result["response"]
except (json.JSONDecodeError, KeyError) as exc:
raise OllamaRequestError(
f"Unexpected response from Ollama: {exc}"
) from exc
def dispatch(
prompt: str,
model: str = DEFAULT_MODEL,
base_url: str = OLLAMA_BASE_URL,
) -> str:
"""Route *prompt* to Ollama when it contains a recognised @handle.
This is the primary entry point for the routing layer. Every prompt
that mentions ``@copilot``, ``@lucidia``, or ``@blackboxprogramming``
is forwarded to the local Ollama instance. No external AI providers
are used.
Parameters
----------
prompt : str
The user prompt, expected to include at least one @handle.
model : str, optional
Ollama model name. Defaults to ``DEFAULT_MODEL``.
base_url : str, optional
Ollama server base URL. Defaults to ``OLLAMA_BASE_URL``.
Returns
-------
str
The response text from Ollama.
Raises
------
ValueError
If the prompt does not contain any of the recognised @handles.
OllamaConnectionError
If the local Ollama service cannot be reached.
OllamaRequestError
If Ollama returns an unexpected or error response.
"""
if not contains_ollama_handle(prompt):
raise ValueError(
"Prompt must include one of the following handles to be routed "
"to the local Ollama instance: "
+ ", ".join(sorted(OLLAMA_HANDLES))
)
return route_to_ollama(prompt, model=model, base_url=base_url)
# ── Exceptions ────────────────────────────────────────────────────────────────
class OllamaConnectionError(OSError):
"""Raised when the local Ollama service cannot be reached."""
class OllamaRequestError(RuntimeError):
"""Raised when Ollama returns an unexpected or error response."""

237
tests/test_ollama_router.py Normal file
View File

@@ -0,0 +1,237 @@
"""Tests for the Ollama router module.
These tests verify routing behaviour without requiring a live Ollama
instance. All network calls are intercepted by monkeypatching
``urllib.request.urlopen``.
"""
from __future__ import annotations
import io
import json
import urllib.error
from typing import Any
from unittest.mock import MagicMock, patch
import pytest
from native_ai_quantum_energy.ollama_router import (
DEFAULT_MODEL,
OLLAMA_BASE_URL,
OLLAMA_HANDLES,
OllamaConnectionError,
OllamaRequestError,
contains_ollama_handle,
dispatch,
route_to_ollama,
strip_handles,
)
# ── Helper ────────────────────────────────────────────────────────────────────
def _mock_urlopen(response_text: str) -> Any:
"""Return a context-manager mock that yields an HTTP-like response."""
body = json.dumps({"response": response_text}).encode("utf-8")
mock_resp = MagicMock()
mock_resp.read.return_value = body
mock_resp.__enter__ = lambda s: s
mock_resp.__exit__ = MagicMock(return_value=False)
return mock_resp
# ── contains_ollama_handle ────────────────────────────────────────────────────
@pytest.mark.parametrize(
"text",
[
"@copilot explain this",
"@lucidia what is entropy?",
"@blackboxprogramming optimise the circuit",
"Hi @Copilot, please help", # mixed case
"question for @LUCIDIA here", # upper case
"@BlackBoxProgramming do something", # camel case
"use @copilot and @lucidia together",# multiple handles
],
)
def test_contains_handle_returns_true(text: str) -> None:
assert contains_ollama_handle(text) is True
@pytest.mark.parametrize(
"text",
[
"just a plain question",
"@chatgpt help me",
"@claude answer this",
"@openai response please",
"no handle at all",
],
)
def test_contains_handle_returns_false(text: str) -> None:
assert contains_ollama_handle(text) is False
# ── strip_handles ─────────────────────────────────────────────────────────────
def test_strip_handles_removes_copilot() -> None:
assert strip_handles("@copilot explain this") == "explain this"
def test_strip_handles_removes_lucidia() -> None:
assert strip_handles("@lucidia what is entropy?") == "what is entropy?"
def test_strip_handles_removes_blackboxprogramming() -> None:
assert strip_handles("@blackboxprogramming run a sim") == "run a sim"
def test_strip_handles_case_insensitive() -> None:
assert strip_handles("@COPILOT help") == "help"
assert strip_handles("@Lucidia help") == "help"
assert strip_handles("@BlackBoxProgramming help") == "help"
def test_strip_handles_removes_multiple() -> None:
result = strip_handles("@copilot and @lucidia and @blackboxprogramming: hi")
assert "@copilot" not in result.lower()
assert "@lucidia" not in result.lower()
assert "@blackboxprogramming" not in result.lower()
def test_strip_handles_leaves_unrelated_text() -> None:
result = strip_handles("@copilot tell me about quantum gates")
assert "quantum gates" in result
# ── route_to_ollama ───────────────────────────────────────────────────────────
def test_route_to_ollama_sends_correct_payload() -> None:
mock_resp = _mock_urlopen("hello from ollama")
with patch("urllib.request.urlopen", return_value=mock_resp) as mock_open:
result = route_to_ollama("explain superposition", model="llama3")
assert result == "hello from ollama"
# Inspect the Request that was passed to urlopen
call_args = mock_open.call_args[0][0]
sent_payload = json.loads(call_args.data.decode("utf-8"))
assert sent_payload["model"] == "llama3"
assert sent_payload["prompt"] == "explain superposition"
assert sent_payload["stream"] is False
def test_route_to_ollama_strips_handles_before_forwarding() -> None:
mock_resp = _mock_urlopen("answer")
with patch("urllib.request.urlopen", return_value=mock_resp) as mock_open:
route_to_ollama("@copilot explain entanglement")
call_args = mock_open.call_args[0][0]
sent_payload = json.loads(call_args.data.decode("utf-8"))
assert "@copilot" not in sent_payload["prompt"]
assert "explain entanglement" in sent_payload["prompt"]
def test_route_to_ollama_uses_custom_base_url() -> None:
custom_url = "http://my-server:11434"
mock_resp = _mock_urlopen("ok")
with patch("urllib.request.urlopen", return_value=mock_resp) as mock_open:
route_to_ollama("hello", base_url=custom_url)
call_args = mock_open.call_args[0][0]
assert call_args.full_url.startswith(custom_url)
def test_route_to_ollama_raises_connection_error_on_network_failure() -> None:
with patch(
"urllib.request.urlopen",
side_effect=urllib.error.URLError("connection refused"),
):
with pytest.raises(OllamaConnectionError, match="Cannot reach Ollama"):
route_to_ollama("hello")
def test_route_to_ollama_raises_request_error_on_bad_json() -> None:
bad_resp = MagicMock()
bad_resp.read.return_value = b"not-json"
bad_resp.__enter__ = lambda s: s
bad_resp.__exit__ = MagicMock(return_value=False)
with patch("urllib.request.urlopen", return_value=bad_resp):
with pytest.raises(OllamaRequestError, match="Unexpected response"):
route_to_ollama("hello")
def test_route_to_ollama_raises_request_error_on_missing_key() -> None:
bad_resp = MagicMock()
bad_resp.read.return_value = json.dumps({"model": "llama3"}).encode()
bad_resp.__enter__ = lambda s: s
bad_resp.__exit__ = MagicMock(return_value=False)
with patch("urllib.request.urlopen", return_value=bad_resp):
with pytest.raises(OllamaRequestError, match="Unexpected response"):
route_to_ollama("hello")
# ── dispatch ──────────────────────────────────────────────────────────────────
@pytest.mark.parametrize(
"handle",
["@copilot", "@lucidia", "@blackboxprogramming"],
)
def test_dispatch_routes_all_handles_to_ollama(handle: str) -> None:
mock_resp = _mock_urlopen(f"response for {handle}")
with patch("urllib.request.urlopen", return_value=mock_resp):
result = dispatch(f"{handle} what is quantum computing?")
assert "response for" in result
def test_dispatch_raises_value_error_without_handle() -> None:
with pytest.raises(ValueError, match="@copilot"):
dispatch("plain question without any handle")
def test_dispatch_does_not_contact_external_providers() -> None:
"""Verify that dispatch only ever calls the specified local Ollama URL."""
expected_base = "http://localhost:11434"
mock_resp = _mock_urlopen("ok")
with patch("urllib.request.urlopen", return_value=mock_resp) as mock_open:
dispatch("@copilot hello", base_url=expected_base)
for call in mock_open.call_args_list:
req = call[0][0]
assert req.full_url.startswith(expected_base), (
"dispatch must only contact the local Ollama instance"
)
def test_dispatch_accepts_custom_model_and_url() -> None:
mock_resp = _mock_urlopen("custom")
with patch("urllib.request.urlopen", return_value=mock_resp) as mock_open:
result = dispatch(
"@lucidia summarise this",
model="mistral",
base_url="http://127.0.0.1:11434",
)
assert result == "custom"
call_args = mock_open.call_args[0][0]
sent_payload = json.loads(call_args.data.decode("utf-8"))
assert sent_payload["model"] == "mistral"
# ── OLLAMA_HANDLES constant ───────────────────────────────────────────────────
def test_ollama_handles_contains_required_handles() -> None:
assert "@copilot" in OLLAMA_HANDLES
assert "@lucidia" in OLLAMA_HANDLES
assert "@blackboxprogramming" in OLLAMA_HANDLES
def test_ollama_handles_does_not_include_external_providers() -> None:
for handle in OLLAMA_HANDLES:
assert "openai" not in handle
assert "claude" not in handle
assert "chatgpt" not in handle