mirror of
https://github.com/blackboxprogramming/lucidia.git
synced 2026-03-17 04:57:15 -05:00
Add workflow_dispatch trigger to CI
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com> RoadChain-SHA2048: 4e589c297428ae0a RoadChain-Identity: alexa@sovereign RoadChain-Full: 4e589c297428ae0a530709b82b8405889371c78bc8e97f0b7122d3de1d34d929bce404546b89a1caaf9327949254806c5c2f78c104c7d7aeb0c3cb3b33d210f9651778fe14136058b56bee6931556b06461449b4da6a9b621258b06de582bc1553a31e4774d55572a3f231ff56002bea307dd0b235bc973f6794c7fba9c749605dbab53ae5982fd18fb51a5248c2391c0b257896a05ecf0d2efa9b4e5641d459ad8408c961b063bef5ce53d797a73ed7376680470e02bb8b0882d4d1c17ac7a5c2beac4f7a7d384e5a05b216f7e8d059e87a187102fca60180b1d0321e680327fceee677e32129b5c1de040cc9e965dc5c51df080d6580206e189bdf8a19d7a3
This commit is contained in:
144
main.py
Normal file → Executable file
144
main.py
Normal file → Executable file
@@ -1,106 +1,48 @@
|
||||
import os, sqlite3
|
||||
from typing import Optional, Dict, Any
|
||||
from fastapi import FastAPI, HTTPException
|
||||
from pydantic import BaseModel
|
||||
from providers import get_enabled, call_tool
|
||||
import requests
|
||||
import os
|
||||
from connectors import route_connector
|
||||
|
||||
# ---- tiny sqlite memory ----
|
||||
DB_PATH = "/home/pi/lucidia/lucidia.db"
|
||||
conn = sqlite3.connect(DB_PATH, check_same_thread=False)
|
||||
conn.execute("CREATE TABLE IF NOT EXISTS memory (k TEXT PRIMARY KEY, v TEXT)")
|
||||
OLLAMA = "http://localhost:11434/api/chat"
|
||||
CLAUDE_KEY = os.environ.get("ANTHROPIC_API_KEY", "")
|
||||
|
||||
app = FastAPI(title="Lucidia")
|
||||
def ask_ollama(msg):
|
||||
r = requests.post(OLLAMA, json={
|
||||
"model": "lucidia",
|
||||
"messages": [{"role": "user", "content": msg}],
|
||||
"stream": False
|
||||
})
|
||||
return r.json()["message"]["content"]
|
||||
|
||||
@app.get("/")
|
||||
def root():
|
||||
return {"lucidia": "online"}
|
||||
def ask_claude(msg):
|
||||
if not CLAUDE_KEY:
|
||||
return "No Claude API key set"
|
||||
r = requests.post("https://api.anthropic.com/v1/messages",
|
||||
headers={
|
||||
"x-api-key": CLAUDE_KEY,
|
||||
"content-type": "application/json",
|
||||
"anthropic-version": "2023-06-01"
|
||||
},
|
||||
json={
|
||||
"model": "claude-sonnet-4-20250514",
|
||||
"max_tokens": 1024,
|
||||
"messages": [{"role": "user", "content": msg}]
|
||||
})
|
||||
return r.json()["content"][0]["text"]
|
||||
|
||||
@app.get("/healthz")
|
||||
def healthz():
|
||||
return {"ok": True}
|
||||
def route(msg):
|
||||
connector, result = route_connector(msg)
|
||||
if connector:
|
||||
return connector, result
|
||||
decision = ask_ollama(f"Reply ONLY 'yes' or 'no'. Does this need Claude? {msg}")
|
||||
if "yes" in decision.lower():
|
||||
return "claude", ask_claude(msg)
|
||||
return "lucidia", ask_ollama(msg)
|
||||
|
||||
# ---- memory endpoints ----
|
||||
class MemoryPut(BaseModel):
|
||||
key: str
|
||||
value: str
|
||||
|
||||
@app.post("/memory/put")
|
||||
def memory_put(payload: MemoryPut):
|
||||
conn.execute("REPLACE INTO memory(k,v) VALUES (?,?)", (payload.key, payload.value))
|
||||
conn.commit()
|
||||
return {"ok": True}
|
||||
|
||||
@app.get("/memory/get")
|
||||
def memory_get(key: str):
|
||||
row = conn.execute("SELECT v FROM memory WHERE k=?", (key,)).fetchone()
|
||||
return {"key": key, "value": (row[0] if row else None)}
|
||||
|
||||
# ---- minimal service endpoints (placeholders; real calls later) ----
|
||||
@app.post("/slack/say")
|
||||
def slack_say(channel: str = "#general", text: str = "Lucidia says hi"):
|
||||
r = call_tool("slack.say", {"channel": channel, "text": text})
|
||||
if "error" in r: raise HTTPException(500, r["error"])
|
||||
return r
|
||||
|
||||
@app.get("/asana/me")
|
||||
def asana_me():
|
||||
r = call_tool("asana.me", {})
|
||||
if "error" in r: raise HTTPException(500, r["error"])
|
||||
return r
|
||||
|
||||
@app.get("/linear/me")
|
||||
def linear_me():
|
||||
r = call_tool("linear.me", {})
|
||||
if "error" in r: raise HTTPException(500, r["error"])
|
||||
return r
|
||||
|
||||
# ---- agent skeleton ----
|
||||
class AgentMsg(BaseModel):
|
||||
message: Optional[str] = None
|
||||
tool: Optional[str] = None
|
||||
args: Optional[Dict[str, Any]] = None
|
||||
|
||||
@app.get("/agent/capabilities")
|
||||
def agent_caps():
|
||||
return {"enabled": list(get_enabled().keys())}
|
||||
|
||||
@app.post("/agent/chat")
|
||||
def agent_chat(payload: AgentMsg):
|
||||
# If a tool is provided, call it; message is optional.
|
||||
if payload.tool:
|
||||
r = call_tool(payload.tool, payload.args or {})
|
||||
if "error" in r: raise HTTPException(500, r["error"])
|
||||
return {"message": "tool_result", "result": r}
|
||||
return {
|
||||
"message": (payload.message or "").strip(),
|
||||
"you_can_call": list(get_enabled().keys()),
|
||||
"hint": "POST {'tool':'slack.say','args':{'channel':'#general','text':'hi'}}"
|
||||
}
|
||||
|
||||
|
||||
from pydantic import BaseModel
|
||||
import json, urllib.request
|
||||
|
||||
class CompleteReq(BaseModel):
|
||||
prompt: str
|
||||
max_tokens: int = 128
|
||||
|
||||
@app.post("/agent/complete")
|
||||
def agent_complete(body: CompleteReq):
|
||||
req = {
|
||||
"prompt": body.prompt,
|
||||
"n_predict": body.max_tokens,
|
||||
"temperature": 0.7,
|
||||
}
|
||||
data = json.dumps(req).encode()
|
||||
try:
|
||||
with urllib.request.urlopen(urllib.request.Request(
|
||||
"http://127.0.0.1:8080/completion",
|
||||
data=data,
|
||||
headers={"Content-Type":"application/json"},
|
||||
method="POST",
|
||||
), timeout=60) as r:
|
||||
out = json.loads(r.read().decode())
|
||||
return {"text": out.get("content", ""), "raw": out}
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
if __name__ == "__main__":
|
||||
print("Lucidia online. Connectors: github, cloudflare, vercel, claude")
|
||||
while True:
|
||||
you = input("\nyou: ")
|
||||
if you in ["q", "quit", "exit"]:
|
||||
break
|
||||
who, answer = route(you)
|
||||
print(f"\n[{who}]: {answer}")
|
||||
|
||||
Reference in New Issue
Block a user