mirror of
https://github.com/blackboxprogramming/BlackRoad-Operating-System.git
synced 2026-03-17 04:57:15 -05:00
Add LEITL Protocol - Live Everyone In The Loop multi-agent collaboration
This commit introduces the LEITL (Live Everyone In The Loop) protocol system, enabling multiple AI agents to collaborate in real-time with shared WebDAV context. ## What was built: ### Backend Infrastructure: - **WebDAV Context Manager** (`backend/app/services/webdav_context.py`) - Sync files from WebDAV servers - Keyword matching and relevance scoring - Redis caching for performance - Support for multiple file types (md, txt, py, json, etc.) - **LEITL Protocol Service** (`backend/app/services/leitl_protocol.py`) - Session registration and management - Heartbeat monitoring with auto-cleanup - Message broadcasting via Redis PubSub - Activity logging and history - WebSocket connection management - **LEITL API Router** (`backend/app/routers/leitl.py`) - Session management endpoints (register, heartbeat, end) - WebSocket endpoint for real-time events - Message broadcasting endpoints - WebDAV context sync endpoint - Quick-start endpoint for easy activation - Full OpenAPI documentation ### Frontend Dashboard: - **LEITL Dashboard App** (`backend/static/js/apps/leitl.js`) - Real-time session monitoring - Live activity feed - Recent message display - WebSocket integration - Quick-start interface - Auto-refresh capabilities - **Desktop Integration** (`backend/static/index.html`) - Added LEITL icon to desktop - Added LEITL to Start menu - Window management integration - Taskbar support ### Documentation: - **Protocol Specification** (`docs/LEITL_PROTOCOL.md`) - Complete architecture overview - API documentation - WebSocket protocol details - Security considerations - Event types and schemas - **Usage Guide** (`docs/LEITL_USAGE_GUIDE.md`) - Quick-start prompts for AI assistants - Dashboard usage instructions - API examples - Troubleshooting guide - Multi-agent collaboration examples ## Key Features: ✅ Multi-agent live collaboration ✅ Shared WebDAV context across sessions ✅ Real-time event broadcasting via WebSocket ✅ Session health monitoring with heartbeat ✅ Auto-cleanup of dead sessions ✅ Redis-backed message queue ✅ Beautiful Windows 95-styled dashboard ✅ Full API documentation ✅ Security with JWT auth and rate limiting ## Usage: AI assistants can activate LEITL with simple prompts like: - "Turn on LEITL. Enable WebDAV context." - "Start LEITL session. Pull from WebDAV: <url>" - "LEITL mode ON 🔥" Dashboard access: http://localhost:8000 → 🔥 LEITL icon ## Answers Alexa's Challenge: This implementation answers the challenge to enable "collaboration between multiple AI states for LEITL (Live Everyone In The Loop)" with full communication capabilities and shared context management. 🎁 Prize unlocked: Multi-agent swarm collaboration! 🐝✨
This commit is contained in:
347
backend/app/services/leitl_protocol.py
Normal file
347
backend/app/services/leitl_protocol.py
Normal file
@@ -0,0 +1,347 @@
|
||||
"""LEITL Protocol - Live Everyone In The Loop multi-agent communication"""
|
||||
import asyncio
|
||||
import json
|
||||
import secrets
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Dict, List, Optional, Set
|
||||
|
||||
from app.redis_client import get_redis
|
||||
|
||||
|
||||
class LEITLSession:
|
||||
"""Represents a single LEITL agent session"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
session_id: str,
|
||||
agent_name: str,
|
||||
agent_type: str,
|
||||
tags: Optional[List[str]] = None
|
||||
):
|
||||
self.session_id = session_id
|
||||
self.agent_name = agent_name
|
||||
self.agent_type = agent_type
|
||||
self.tags = tags or []
|
||||
self.started_at = datetime.utcnow()
|
||||
self.last_heartbeat = datetime.utcnow()
|
||||
self.status = "active"
|
||||
self.current_task = None
|
||||
self.context_sources = []
|
||||
|
||||
def to_dict(self) -> Dict:
|
||||
"""Convert to dictionary"""
|
||||
return {
|
||||
"session_id": self.session_id,
|
||||
"agent_name": self.agent_name,
|
||||
"agent_type": self.agent_type,
|
||||
"tags": self.tags,
|
||||
"started_at": self.started_at.isoformat(),
|
||||
"last_heartbeat": self.last_heartbeat.isoformat(),
|
||||
"status": self.status,
|
||||
"current_task": self.current_task,
|
||||
"context_sources": self.context_sources,
|
||||
"uptime": str(datetime.utcnow() - self.started_at)
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, data: Dict) -> 'LEITLSession':
|
||||
"""Create from dictionary"""
|
||||
session = cls(
|
||||
session_id=data["session_id"],
|
||||
agent_name=data["agent_name"],
|
||||
agent_type=data["agent_type"],
|
||||
tags=data.get("tags", [])
|
||||
)
|
||||
session.started_at = datetime.fromisoformat(data["started_at"])
|
||||
session.last_heartbeat = datetime.fromisoformat(data["last_heartbeat"])
|
||||
session.status = data.get("status", "active")
|
||||
session.current_task = data.get("current_task")
|
||||
session.context_sources = data.get("context_sources", [])
|
||||
return session
|
||||
|
||||
|
||||
class LEITLProtocol:
|
||||
"""Manages LEITL multi-agent communication protocol"""
|
||||
|
||||
def __init__(self):
|
||||
self.redis = None
|
||||
self.heartbeat_timeout = 60 # seconds
|
||||
self.cleanup_interval = 30 # seconds
|
||||
self._cleanup_task = None
|
||||
self._active_websockets: Dict[str, Set] = {} # session_id -> set of websockets
|
||||
|
||||
async def initialize(self):
|
||||
"""Initialize Redis and start cleanup task"""
|
||||
self.redis = await get_redis()
|
||||
# Start cleanup task
|
||||
self._cleanup_task = asyncio.create_task(self._cleanup_dead_sessions())
|
||||
|
||||
async def shutdown(self):
|
||||
"""Shutdown protocol"""
|
||||
if self._cleanup_task:
|
||||
self._cleanup_task.cancel()
|
||||
try:
|
||||
await self._cleanup_task
|
||||
except asyncio.CancelledError:
|
||||
pass
|
||||
|
||||
async def register_session(
|
||||
self,
|
||||
agent_name: str,
|
||||
agent_type: str = "assistant",
|
||||
tags: Optional[List[str]] = None
|
||||
) -> LEITLSession:
|
||||
"""
|
||||
Register a new LEITL session
|
||||
|
||||
Args:
|
||||
agent_name: Name of the agent (e.g., "Cece", "Claude")
|
||||
agent_type: Type of agent (e.g., "code_assistant", "chat")
|
||||
tags: Optional tags for categorization
|
||||
|
||||
Returns:
|
||||
LEITLSession object with session_id
|
||||
"""
|
||||
# Generate session ID
|
||||
session_id = f"leitl-{agent_name.lower()}-{secrets.token_hex(8)}"
|
||||
|
||||
# Create session
|
||||
session = LEITLSession(
|
||||
session_id=session_id,
|
||||
agent_name=agent_name,
|
||||
agent_type=agent_type,
|
||||
tags=tags or []
|
||||
)
|
||||
|
||||
# Store in Redis
|
||||
await self._save_session(session)
|
||||
|
||||
# Add to active sessions set
|
||||
await self.redis.sadd("leitl:sessions:active", session_id)
|
||||
|
||||
# Broadcast session started event
|
||||
await self.broadcast_event(
|
||||
event_type="session.started",
|
||||
session_id=session_id,
|
||||
data={
|
||||
"agent_name": agent_name,
|
||||
"agent_type": agent_type,
|
||||
"tags": tags or []
|
||||
}
|
||||
)
|
||||
|
||||
return session
|
||||
|
||||
async def heartbeat(
|
||||
self,
|
||||
session_id: str,
|
||||
current_task: Optional[str] = None
|
||||
):
|
||||
"""
|
||||
Update session heartbeat
|
||||
|
||||
Args:
|
||||
session_id: Session ID
|
||||
current_task: Current task description (optional)
|
||||
"""
|
||||
session = await self._get_session(session_id)
|
||||
if not session:
|
||||
return
|
||||
|
||||
session.last_heartbeat = datetime.utcnow()
|
||||
if current_task:
|
||||
session.current_task = current_task
|
||||
|
||||
await self._save_session(session)
|
||||
|
||||
# Broadcast heartbeat event
|
||||
await self.broadcast_event(
|
||||
event_type="session.heartbeat",
|
||||
session_id=session_id,
|
||||
data={
|
||||
"current_task": current_task
|
||||
}
|
||||
)
|
||||
|
||||
async def end_session(self, session_id: str):
|
||||
"""End a session and cleanup"""
|
||||
# Get session
|
||||
session = await self._get_session(session_id)
|
||||
if not session:
|
||||
return
|
||||
|
||||
# Remove from active set
|
||||
await self.redis.srem("leitl:sessions:active", session_id)
|
||||
|
||||
# Broadcast session ended
|
||||
await self.broadcast_event(
|
||||
event_type="session.ended",
|
||||
session_id=session_id,
|
||||
data={
|
||||
"agent_name": session.agent_name,
|
||||
"uptime": str(datetime.utcnow() - session.started_at)
|
||||
}
|
||||
)
|
||||
|
||||
# Delete session
|
||||
await self.redis.delete(f"leitl:session:{session_id}")
|
||||
|
||||
async def get_active_sessions(self) -> List[Dict]:
|
||||
"""Get all active sessions"""
|
||||
session_ids = await self.redis.smembers("leitl:sessions:active")
|
||||
sessions = []
|
||||
|
||||
for session_id in session_ids:
|
||||
session = await self._get_session(session_id)
|
||||
if session:
|
||||
sessions.append(session.to_dict())
|
||||
|
||||
return sessions
|
||||
|
||||
async def broadcast_event(
|
||||
self,
|
||||
event_type: str,
|
||||
session_id: str,
|
||||
data: Optional[Dict] = None
|
||||
):
|
||||
"""
|
||||
Broadcast event to all active sessions
|
||||
|
||||
Args:
|
||||
event_type: Type of event (e.g., "task.started", "context.updated")
|
||||
session_id: Originating session ID
|
||||
data: Event data
|
||||
"""
|
||||
message = {
|
||||
"event_type": event_type,
|
||||
"session_id": session_id,
|
||||
"timestamp": datetime.utcnow().isoformat(),
|
||||
"data": data or {}
|
||||
}
|
||||
|
||||
# Publish to Redis PubSub
|
||||
await self.redis.publish(
|
||||
"leitl:events",
|
||||
json.dumps(message)
|
||||
)
|
||||
|
||||
# Store in recent messages (last 100)
|
||||
await self.redis.lpush(
|
||||
"leitl:messages",
|
||||
json.dumps(message)
|
||||
)
|
||||
await self.redis.ltrim("leitl:messages", 0, 99)
|
||||
|
||||
# Log activity
|
||||
await self._log_activity(message)
|
||||
|
||||
# Send to connected WebSockets
|
||||
await self._broadcast_to_websockets(message)
|
||||
|
||||
async def get_recent_messages(self, limit: int = 20) -> List[Dict]:
|
||||
"""Get recent broadcast messages"""
|
||||
messages = await self.redis.lrange("leitl:messages", 0, limit - 1)
|
||||
return [json.loads(msg) for msg in messages]
|
||||
|
||||
async def get_activity_log(self, since: Optional[datetime] = None, limit: int = 50) -> List[Dict]:
|
||||
"""Get activity log"""
|
||||
# Get all activity entries
|
||||
entries = await self.redis.lrange("leitl:activity", 0, limit - 1)
|
||||
activities = [json.loads(entry) for entry in entries]
|
||||
|
||||
# Filter by timestamp if provided
|
||||
if since:
|
||||
activities = [
|
||||
a for a in activities
|
||||
if datetime.fromisoformat(a["timestamp"]) >= since
|
||||
]
|
||||
|
||||
return activities
|
||||
|
||||
async def register_websocket(self, session_id: str, websocket):
|
||||
"""Register a WebSocket for a session"""
|
||||
if session_id not in self._active_websockets:
|
||||
self._active_websockets[session_id] = set()
|
||||
self._active_websockets[session_id].add(websocket)
|
||||
|
||||
async def unregister_websocket(self, session_id: str, websocket):
|
||||
"""Unregister a WebSocket"""
|
||||
if session_id in self._active_websockets:
|
||||
self._active_websockets[session_id].discard(websocket)
|
||||
if not self._active_websockets[session_id]:
|
||||
del self._active_websockets[session_id]
|
||||
|
||||
async def _broadcast_to_websockets(self, message: Dict):
|
||||
"""Broadcast message to all connected WebSockets"""
|
||||
# Send to all sessions
|
||||
dead_sockets = []
|
||||
|
||||
for session_id, sockets in self._active_websockets.items():
|
||||
for ws in sockets:
|
||||
try:
|
||||
await ws.send_json(message)
|
||||
except:
|
||||
dead_sockets.append((session_id, ws))
|
||||
|
||||
# Cleanup dead sockets
|
||||
for session_id, ws in dead_sockets:
|
||||
await self.unregister_websocket(session_id, ws)
|
||||
|
||||
async def _save_session(self, session: LEITLSession):
|
||||
"""Save session to Redis"""
|
||||
await self.redis.set(
|
||||
f"leitl:session:{session.session_id}",
|
||||
json.dumps(session.to_dict()),
|
||||
ex=3600 # 1 hour TTL
|
||||
)
|
||||
|
||||
async def _get_session(self, session_id: str) -> Optional[LEITLSession]:
|
||||
"""Get session from Redis"""
|
||||
data = await self.redis.get(f"leitl:session:{session_id}")
|
||||
if data:
|
||||
return LEITLSession.from_dict(json.loads(data))
|
||||
return None
|
||||
|
||||
async def _log_activity(self, message: Dict):
|
||||
"""Log activity to Redis list"""
|
||||
await self.redis.lpush(
|
||||
"leitl:activity",
|
||||
json.dumps(message)
|
||||
)
|
||||
await self.redis.ltrim("leitl:activity", 0, 999) # Keep last 1000
|
||||
# Set expiration on activity log
|
||||
await self.redis.expire("leitl:activity", 86400) # 24 hours
|
||||
|
||||
async def _cleanup_dead_sessions(self):
|
||||
"""Background task to cleanup dead sessions"""
|
||||
while True:
|
||||
try:
|
||||
await asyncio.sleep(self.cleanup_interval)
|
||||
|
||||
# Get all active session IDs
|
||||
session_ids = await self.redis.smembers("leitl:sessions:active")
|
||||
|
||||
now = datetime.utcnow()
|
||||
|
||||
for session_id in session_ids:
|
||||
session = await self._get_session(session_id)
|
||||
if not session:
|
||||
# Session data missing, remove from active set
|
||||
await self.redis.srem("leitl:sessions:active", session_id)
|
||||
continue
|
||||
|
||||
# Check if heartbeat timeout
|
||||
time_since_heartbeat = (now - session.last_heartbeat).total_seconds()
|
||||
|
||||
if time_since_heartbeat > self.heartbeat_timeout:
|
||||
# Session is dead, cleanup
|
||||
await self.end_session(session_id)
|
||||
|
||||
except asyncio.CancelledError:
|
||||
break
|
||||
except Exception as e:
|
||||
print(f"Error in LEITL cleanup: {e}")
|
||||
|
||||
|
||||
# Singleton instance
|
||||
leitl_protocol = LEITLProtocol()
|
||||
319
backend/app/services/webdav_context.py
Normal file
319
backend/app/services/webdav_context.py
Normal file
@@ -0,0 +1,319 @@
|
||||
"""WebDAV Context Manager - Sync and serve context from WebDAV sources"""
|
||||
import asyncio
|
||||
import hashlib
|
||||
import json
|
||||
from datetime import datetime, timedelta
|
||||
from typing import List, Dict, Optional, Set
|
||||
import httpx
|
||||
from xml.etree import ElementTree as ET
|
||||
|
||||
from app.redis_client import get_redis
|
||||
|
||||
|
||||
class WebDAVContextManager:
|
||||
"""Manages WebDAV context synchronization and retrieval"""
|
||||
|
||||
def __init__(self):
|
||||
self.redis = None
|
||||
self.sync_interval = 300 # 5 minutes
|
||||
self.cache_ttl = 3600 # 1 hour
|
||||
self._sync_task = None
|
||||
|
||||
async def initialize(self):
|
||||
"""Initialize Redis connection and start background sync"""
|
||||
self.redis = await get_redis()
|
||||
|
||||
async def sync_and_get(
|
||||
self,
|
||||
webdav_url: str,
|
||||
username: Optional[str] = None,
|
||||
password: Optional[str] = None,
|
||||
query: Optional[str] = None,
|
||||
file_types: Optional[List[str]] = None,
|
||||
max_results: int = 10
|
||||
) -> Dict:
|
||||
"""
|
||||
Sync WebDAV files and return matching context
|
||||
|
||||
Args:
|
||||
webdav_url: Base WebDAV URL
|
||||
username: WebDAV auth username
|
||||
password: WebDAV auth password
|
||||
query: Search query for matching files
|
||||
file_types: Filter by file extensions (e.g., ['md', 'txt'])
|
||||
max_results: Maximum number of results
|
||||
|
||||
Returns:
|
||||
{
|
||||
"query": str,
|
||||
"matched_files": List[Dict],
|
||||
"total_matches": int,
|
||||
"cached": bool,
|
||||
"synced_at": str
|
||||
}
|
||||
"""
|
||||
# Check cache first
|
||||
cache_key = self._get_cache_key(webdav_url, query, file_types)
|
||||
cached = await self._get_cached_context(cache_key)
|
||||
if cached:
|
||||
return {**cached, "cached": True}
|
||||
|
||||
# Fetch files from WebDAV
|
||||
try:
|
||||
files = await self._fetch_webdav_files(
|
||||
webdav_url, username, password
|
||||
)
|
||||
|
||||
# Filter and match files
|
||||
matched = self._match_files(files, query, file_types, max_results)
|
||||
|
||||
# Fetch content for matched files
|
||||
for file_info in matched:
|
||||
content = await self._fetch_file_content(
|
||||
file_info['url'],
|
||||
username,
|
||||
password
|
||||
)
|
||||
file_info['content'] = content
|
||||
file_info['relevance'] = self._calculate_relevance(
|
||||
content, query
|
||||
)
|
||||
|
||||
# Sort by relevance
|
||||
matched.sort(key=lambda x: x['relevance'], reverse=True)
|
||||
|
||||
result = {
|
||||
"query": query or "all",
|
||||
"matched_files": matched,
|
||||
"total_matches": len(matched),
|
||||
"cached": False,
|
||||
"synced_at": datetime.utcnow().isoformat()
|
||||
}
|
||||
|
||||
# Cache result
|
||||
await self._cache_context(cache_key, result)
|
||||
|
||||
return result
|
||||
|
||||
except Exception as e:
|
||||
return {
|
||||
"error": str(e),
|
||||
"matched_files": [],
|
||||
"total_matches": 0,
|
||||
"cached": False
|
||||
}
|
||||
|
||||
async def _fetch_webdav_files(
|
||||
self,
|
||||
webdav_url: str,
|
||||
username: Optional[str],
|
||||
password: Optional[str]
|
||||
) -> List[Dict]:
|
||||
"""Fetch file list from WebDAV using PROPFIND"""
|
||||
async with httpx.AsyncClient() as client:
|
||||
# PROPFIND request to list files
|
||||
headers = {
|
||||
'Depth': '1',
|
||||
'Content-Type': 'application/xml'
|
||||
}
|
||||
|
||||
propfind_body = """<?xml version="1.0" encoding="utf-8" ?>
|
||||
<D:propfind xmlns:D="DAV:">
|
||||
<D:prop>
|
||||
<D:displayname/>
|
||||
<D:getcontentlength/>
|
||||
<D:getcontenttype/>
|
||||
<D:getlastmodified/>
|
||||
</D:prop>
|
||||
</D:propfind>
|
||||
"""
|
||||
|
||||
auth = (username, password) if username and password else None
|
||||
|
||||
response = await client.request(
|
||||
'PROPFIND',
|
||||
webdav_url,
|
||||
headers=headers,
|
||||
content=propfind_body,
|
||||
auth=auth,
|
||||
timeout=30.0
|
||||
)
|
||||
|
||||
if response.status_code not in [207, 200]:
|
||||
raise Exception(f"WebDAV PROPFIND failed: {response.status_code}")
|
||||
|
||||
# Parse XML response
|
||||
files = self._parse_propfind_response(response.text, webdav_url)
|
||||
return files
|
||||
|
||||
def _parse_propfind_response(
|
||||
self,
|
||||
xml_response: str,
|
||||
base_url: str
|
||||
) -> List[Dict]:
|
||||
"""Parse WebDAV PROPFIND XML response"""
|
||||
files = []
|
||||
root = ET.fromstring(xml_response)
|
||||
|
||||
# XML namespace handling
|
||||
ns = {'D': 'DAV:'}
|
||||
|
||||
for response in root.findall('.//D:response', ns):
|
||||
href = response.find('D:href', ns)
|
||||
if href is None:
|
||||
continue
|
||||
|
||||
# Get properties
|
||||
propstat = response.find('.//D:propstat', ns)
|
||||
if propstat is None:
|
||||
continue
|
||||
|
||||
prop = propstat.find('D:prop', ns)
|
||||
if prop is None:
|
||||
continue
|
||||
|
||||
# Extract file info
|
||||
displayname = prop.find('D:displayname', ns)
|
||||
contentlength = prop.find('D:getcontentlength', ns)
|
||||
contenttype = prop.find('D:getcontenttype', ns)
|
||||
lastmodified = prop.find('D:getlastmodified', ns)
|
||||
|
||||
# Skip directories
|
||||
if contenttype is not None and 'directory' in contenttype.text:
|
||||
continue
|
||||
|
||||
file_path = href.text
|
||||
file_name = displayname.text if displayname is not None else file_path.split('/')[-1]
|
||||
|
||||
files.append({
|
||||
'name': file_name,
|
||||
'path': file_path,
|
||||
'url': f"{base_url.rstrip('/')}/{file_path.lstrip('/')}",
|
||||
'size': int(contentlength.text) if contentlength is not None else 0,
|
||||
'type': contenttype.text if contenttype is not None else 'application/octet-stream',
|
||||
'modified': lastmodified.text if lastmodified is not None else None
|
||||
})
|
||||
|
||||
return files
|
||||
|
||||
async def _fetch_file_content(
|
||||
self,
|
||||
file_url: str,
|
||||
username: Optional[str],
|
||||
password: Optional[str]
|
||||
) -> str:
|
||||
"""Fetch file content from WebDAV"""
|
||||
async with httpx.AsyncClient() as client:
|
||||
auth = (username, password) if username and password else None
|
||||
response = await client.get(file_url, auth=auth, timeout=30.0)
|
||||
|
||||
if response.status_code == 200:
|
||||
# Try to decode as text
|
||||
try:
|
||||
return response.text
|
||||
except:
|
||||
return f"[Binary file, size: {len(response.content)} bytes]"
|
||||
else:
|
||||
return f"[Error fetching content: {response.status_code}]"
|
||||
|
||||
def _match_files(
|
||||
self,
|
||||
files: List[Dict],
|
||||
query: Optional[str],
|
||||
file_types: Optional[List[str]],
|
||||
max_results: int
|
||||
) -> List[Dict]:
|
||||
"""Match files based on query and file types"""
|
||||
matched = []
|
||||
|
||||
for file_info in files:
|
||||
# Filter by file type
|
||||
if file_types:
|
||||
ext = file_info['name'].split('.')[-1].lower()
|
||||
if ext not in file_types:
|
||||
continue
|
||||
|
||||
# Simple keyword matching in filename
|
||||
if query:
|
||||
if query.lower() not in file_info['name'].lower():
|
||||
continue
|
||||
|
||||
matched.append(file_info)
|
||||
|
||||
if len(matched) >= max_results:
|
||||
break
|
||||
|
||||
return matched
|
||||
|
||||
def _calculate_relevance(
|
||||
self,
|
||||
content: str,
|
||||
query: Optional[str]
|
||||
) -> float:
|
||||
"""Calculate relevance score (0.0 to 1.0)"""
|
||||
if not query:
|
||||
return 0.5
|
||||
|
||||
# Simple keyword frequency
|
||||
query_lower = query.lower()
|
||||
content_lower = content.lower()
|
||||
|
||||
# Count occurrences
|
||||
count = content_lower.count(query_lower)
|
||||
|
||||
# Normalize by content length (prevent bias toward long docs)
|
||||
words = len(content.split())
|
||||
if words == 0:
|
||||
return 0.0
|
||||
|
||||
# Score based on density
|
||||
density = count / words
|
||||
return min(density * 100, 1.0) # Cap at 1.0
|
||||
|
||||
def _get_cache_key(
|
||||
self,
|
||||
webdav_url: str,
|
||||
query: Optional[str],
|
||||
file_types: Optional[List[str]]
|
||||
) -> str:
|
||||
"""Generate cache key for context"""
|
||||
parts = [
|
||||
webdav_url,
|
||||
query or "all",
|
||||
",".join(sorted(file_types or []))
|
||||
]
|
||||
key_str = "|".join(parts)
|
||||
hash_str = hashlib.md5(key_str.encode()).hexdigest()
|
||||
return f"leitl:context:{hash_str}"
|
||||
|
||||
async def _get_cached_context(
|
||||
self,
|
||||
cache_key: str
|
||||
) -> Optional[Dict]:
|
||||
"""Get cached context from Redis"""
|
||||
if not self.redis:
|
||||
return None
|
||||
|
||||
cached = await self.redis.get(cache_key)
|
||||
if cached:
|
||||
return json.loads(cached)
|
||||
return None
|
||||
|
||||
async def _cache_context(
|
||||
self,
|
||||
cache_key: str,
|
||||
context: Dict
|
||||
):
|
||||
"""Cache context in Redis"""
|
||||
if not self.redis:
|
||||
return
|
||||
|
||||
await self.redis.setex(
|
||||
cache_key,
|
||||
self.cache_ttl,
|
||||
json.dumps(context)
|
||||
)
|
||||
|
||||
|
||||
# Singleton instance
|
||||
webdav_context_manager = WebDAVContextManager()
|
||||
Reference in New Issue
Block a user