sync: 2026-03-16 21:00 — 22 files from Alexandria
Some checks failed
Lint & Format / detect (push) Has been cancelled
Lint & Format / js-lint (push) Has been cancelled
Lint & Format / py-lint (push) Has been cancelled
Lint & Format / sh-lint (push) Has been cancelled
Lint & Format / go-lint (push) Has been cancelled
Monorepo Lint / lint-shell (push) Has been cancelled
Monorepo Lint / lint-js (push) Has been cancelled
Some checks failed
Lint & Format / detect (push) Has been cancelled
Lint & Format / js-lint (push) Has been cancelled
Lint & Format / py-lint (push) Has been cancelled
Lint & Format / sh-lint (push) Has been cancelled
Lint & Format / go-lint (push) Has been cancelled
Monorepo Lint / lint-shell (push) Has been cancelled
Monorepo Lint / lint-js (push) Has been cancelled
RoadChain-SHA2048: 53e02304cc601f0a RoadChain-Identity: alexa@sovereign RoadChain-Full: 53e02304cc601f0a1f8f4f417be48bf645bca001ad747ebbba1abf60b5017d9a56b0f6396ecf073034944293d3041338b5ad343c1b5bf5667d3037877483c5872433f5d32945430dd852c92370a14e3018c4318455de08077fcc8c4dc1aa4170d6c60f21921b6674fe387b00eeaba236fbe3f242b01429ca18b7272868907496b0babae84bc4e8842940ae6f19ec7d5160dfa0c7ad33499fef787512fa21657ce7279651f9605cb12fd71dc0d0d1b852eb916f61dc408d5a109e7e632ac610ac25b8cbfae4082f922fa6eac4f2d5635a99513f0f56c855e6ff2f61cf3321446011d6db51621711cf64cfc30cef0dacf4bd9abf971da607acb5b3b7fcff46cb32
This commit is contained in:
228
operator/memory/memory-slack-sync.py
Normal file
228
operator/memory/memory-slack-sync.py
Normal file
@@ -0,0 +1,228 @@
|
||||
#!/usr/bin/env python3
|
||||
"""BlackRoad Memory → Slack D1 Sync
|
||||
Pushes local memory to the Slack Worker's D1 database
|
||||
so /collab /todos /codex /til /memory /search work from Slack.
|
||||
|
||||
Usage: python3 memory-slack-sync.py [full|tils|codex|projects|journal|sessions]
|
||||
"""
|
||||
import json, glob, os, sys, sqlite3, urllib.request
|
||||
|
||||
SLACK_API = "https://blackroad-slack.amundsonalexa.workers.dev/memory"
|
||||
HOME = os.path.expanduser("~")
|
||||
MEMORY = f"{HOME}/.blackroad/memory"
|
||||
CODEX_DB = f"{MEMORY}/codex/codex.db"
|
||||
COLLAB_DB = f"{HOME}/.blackroad/collaboration.db"
|
||||
TODO_DIR = f"{MEMORY}/infinite-todos/projects"
|
||||
TIL_DIR = f"{MEMORY}/til"
|
||||
JOURNAL = f"{MEMORY}/journals/master-journal.jsonl"
|
||||
|
||||
P = "\033[38;5;205m"
|
||||
G = "\033[38;5;82m"
|
||||
C = "\033[0;36m"
|
||||
R = "\033[0m"
|
||||
|
||||
def post(data):
|
||||
req = urllib.request.Request(
|
||||
SLACK_API, data=json.dumps(data).encode(),
|
||||
headers={"Content-Type": "application/json", "User-Agent": "BlackRoad-Memory-Sync/1.0"}, method="POST"
|
||||
)
|
||||
try:
|
||||
resp = urllib.request.urlopen(req, timeout=30)
|
||||
return json.loads(resp.read())
|
||||
except Exception as e:
|
||||
print(f" {P}WARN{R} Push failed: {e}")
|
||||
return {}
|
||||
|
||||
def sync_tils():
|
||||
print(f"{C}Syncing TILs...{R}")
|
||||
tils = []
|
||||
for f in sorted(glob.glob(f"{TIL_DIR}/til-*.json")):
|
||||
try:
|
||||
with open(f) as fh:
|
||||
d = json.load(fh)
|
||||
tils.append({
|
||||
"til_id": d.get("til_id", os.path.basename(f).replace(".json", "")),
|
||||
"category": d.get("category", "tip"),
|
||||
"learning": d.get("learning", "")[:500],
|
||||
"broadcaster": d.get("broadcaster", "")[:50],
|
||||
"created_at": d.get("timestamp", d.get("created_at", "")),
|
||||
})
|
||||
except:
|
||||
pass
|
||||
|
||||
# Batch in groups of 50
|
||||
for i in range(0, len(tils), 50):
|
||||
batch = tils[i:i+50]
|
||||
post({"type": "sync", "tils": batch})
|
||||
print(f" {G}✓{R} Synced {min(i+50, len(tils))}/{len(tils)} TILs")
|
||||
|
||||
print(f"{G}✓{R} Synced {len(tils)} TILs total")
|
||||
|
||||
def sync_codex():
|
||||
print(f"{C}Syncing Codex...{R}")
|
||||
if not os.path.exists(CODEX_DB):
|
||||
print(" No codex DB"); return
|
||||
|
||||
db = sqlite3.connect(CODEX_DB)
|
||||
db.row_factory = sqlite3.Row
|
||||
entries = []
|
||||
|
||||
for row in db.execute("SELECT id, name, category, problem, solution, created_at FROM solutions LIMIT 200"):
|
||||
entries.append({
|
||||
"codex_id": str(row["id"]),
|
||||
"name": row["name"],
|
||||
"type": "solution",
|
||||
"category": row["category"] or "",
|
||||
"description": f"{row['problem'] or ''} → {row['solution'] or ''}"[:300],
|
||||
"created_at": row["created_at"] or "",
|
||||
})
|
||||
|
||||
for row in db.execute("SELECT id, pattern_name, pattern_type, description, tags, first_seen FROM patterns LIMIT 100"):
|
||||
entries.append({
|
||||
"codex_id": f"p-{row['id']}",
|
||||
"name": row["pattern_name"],
|
||||
"type": "pattern",
|
||||
"category": row["tags"] or "",
|
||||
"description": (row["description"] or "")[:300],
|
||||
"created_at": row["first_seen"] or "",
|
||||
})
|
||||
|
||||
for row in db.execute("SELECT id, practice_name, category, description, created_at FROM best_practices LIMIT 100"):
|
||||
entries.append({
|
||||
"codex_id": f"bp-{row['id']}",
|
||||
"name": row["practice_name"],
|
||||
"type": "best_practice",
|
||||
"category": row["category"] or "",
|
||||
"description": (row["description"] or "")[:300],
|
||||
"created_at": row["created_at"] or "",
|
||||
})
|
||||
|
||||
for row in db.execute("SELECT id, name, description, severity, first_detected FROM anti_patterns LIMIT 100"):
|
||||
entries.append({
|
||||
"codex_id": f"ap-{row['id']}",
|
||||
"name": row["name"],
|
||||
"type": "anti_pattern",
|
||||
"category": row["severity"] or "",
|
||||
"description": (row["description"] or "")[:300],
|
||||
"created_at": row["first_detected"] or "",
|
||||
})
|
||||
|
||||
for row in db.execute("SELECT id, title, what_happened, lessons, timestamp FROM lessons_learned LIMIT 50"):
|
||||
entries.append({
|
||||
"codex_id": f"ll-{row['id']}",
|
||||
"name": row["title"],
|
||||
"type": "lesson",
|
||||
"category": "",
|
||||
"description": (row["what_happened"] or "")[:300],
|
||||
"created_at": row["timestamp"] or "",
|
||||
})
|
||||
|
||||
db.close()
|
||||
post({"type": "sync", "codex": entries})
|
||||
print(f"{G}✓{R} Synced {len(entries)} codex entries")
|
||||
|
||||
def sync_projects():
|
||||
print(f"{C}Syncing Projects & Todos...{R}")
|
||||
if not os.path.isdir(TODO_DIR):
|
||||
print(" No projects dir"); return
|
||||
|
||||
projects = []
|
||||
todos = []
|
||||
for f in glob.glob(f"{TODO_DIR}/*.json"):
|
||||
try:
|
||||
with open(f) as fh:
|
||||
d = json.load(fh)
|
||||
projects.append({
|
||||
"project_id": d.get("project_id", ""),
|
||||
"title": d.get("title", ""),
|
||||
"description": d.get("description", "")[:200],
|
||||
"progress": d.get("progress", 0),
|
||||
"status": d.get("status", "active"),
|
||||
"timescale": d.get("timescale", "forever"),
|
||||
"owner": d.get("owner", "")[:50],
|
||||
})
|
||||
for t in d.get("todos", []):
|
||||
todos.append({
|
||||
"todo_id": t.get("id", ""),
|
||||
"project_id": d.get("project_id", ""),
|
||||
"text": t.get("text", "")[:200],
|
||||
"priority": t.get("priority", "medium"),
|
||||
"status": t.get("status", "pending"),
|
||||
"created_at": t.get("created_at", ""),
|
||||
})
|
||||
except:
|
||||
pass
|
||||
|
||||
post({"type": "sync", "projects": projects, "todos": todos})
|
||||
print(f"{G}✓{R} Synced {len(projects)} projects, {len(todos)} todos")
|
||||
|
||||
def sync_sessions():
|
||||
print(f"{C}Syncing Sessions...{R}")
|
||||
if not os.path.exists(COLLAB_DB):
|
||||
print(" No collab DB"); return
|
||||
|
||||
db = sqlite3.connect(COLLAB_DB)
|
||||
db.row_factory = sqlite3.Row
|
||||
sessions = []
|
||||
for row in db.execute("SELECT session_id, status, focus, last_seen, agent_id, started_at FROM sessions ORDER BY last_seen DESC LIMIT 50"):
|
||||
sessions.append({
|
||||
"session_id": row["session_id"],
|
||||
"status": row["status"] or "active",
|
||||
"focus": row["focus"] or "",
|
||||
"last_seen": row["last_seen"] or "",
|
||||
"agent_id": row["agent_id"] or "",
|
||||
"created_at": row["started_at"] or "",
|
||||
})
|
||||
db.close()
|
||||
post({"type": "sync", "sessions": sessions})
|
||||
print(f"{G}✓{R} Synced {len(sessions)} sessions")
|
||||
|
||||
def sync_journal():
|
||||
print(f"{C}Syncing Journal (last 200)...{R}")
|
||||
if not os.path.exists(JOURNAL):
|
||||
print(" No journal"); return
|
||||
|
||||
entries = []
|
||||
with open(JOURNAL) as f:
|
||||
lines = f.readlines()
|
||||
for line in lines[-200:]:
|
||||
try:
|
||||
d = json.loads(line.strip())
|
||||
entries.append({
|
||||
"entry_id": (d.get("sha256", "") or "")[:32] or str(len(entries)),
|
||||
"action": d.get("action", "?"),
|
||||
"entity": d.get("entity", "?"),
|
||||
"details": (d.get("details", "") or "")[:200],
|
||||
"source": "mac",
|
||||
"created_at": d.get("timestamp", ""),
|
||||
})
|
||||
except:
|
||||
pass
|
||||
|
||||
post({"type": "sync", "journal": entries})
|
||||
print(f"{G}✓{R} Synced {len(entries)} journal entries")
|
||||
|
||||
def full_sync():
|
||||
print(f"{P}╔════════════════════════════════════════════════╗{R}")
|
||||
print(f"{P}║ Memory → Slack D1 Full Sync ║{R}")
|
||||
print(f"{P}╚════════════════════════════════════════════════╝{R}\n")
|
||||
sync_sessions()
|
||||
sync_journal()
|
||||
sync_tils()
|
||||
sync_codex()
|
||||
sync_projects()
|
||||
print(f"\n{G}✓ Full sync complete{R}")
|
||||
|
||||
# Verify
|
||||
try:
|
||||
resp = urllib.request.urlopen(f"{SLACK_API}/stats", timeout=10)
|
||||
stats = json.loads(resp.read())
|
||||
print(f" D1 Stats: journal={stats['journal']} tils={stats['tils']} codex={stats['codex']} projects={stats['projects']}")
|
||||
except:
|
||||
pass
|
||||
|
||||
if __name__ == "__main__":
|
||||
cmd = sys.argv[1] if len(sys.argv) > 1 else "full"
|
||||
{"full": full_sync, "tils": sync_tils, "codex": sync_codex,
|
||||
"projects": sync_projects, "journal": sync_journal, "sessions": sync_sessions
|
||||
}.get(cmd, full_sync)()
|
||||
237
operator/memory/memory-slack-sync.sh
Executable file
237
operator/memory/memory-slack-sync.sh
Executable file
@@ -0,0 +1,237 @@
|
||||
#!/bin/bash
|
||||
# BlackRoad Memory → Slack D1 Sync
|
||||
# Pushes local memory system data to the Slack Worker's D1 database
|
||||
# so /collab /todos /codex /til /memory /search work from Slack
|
||||
# Usage: ./memory-slack-sync.sh [full|tils|codex|projects|journal]
|
||||
|
||||
set -e
|
||||
|
||||
SLACK_API="https://blackroad-slack.amundsonalexa.workers.dev/memory"
|
||||
MEMORY_DIR="$HOME/.blackroad/memory"
|
||||
CODEX_DB="$MEMORY_DIR/codex/codex.db"
|
||||
COLLAB_DB="$HOME/.blackroad/collaboration.db"
|
||||
TODO_DIR="$MEMORY_DIR/infinite-todos/projects"
|
||||
TIL_DIR="$MEMORY_DIR/til"
|
||||
JOURNAL="$MEMORY_DIR/journals/master-journal.jsonl"
|
||||
|
||||
PINK='\033[38;5;205m'
|
||||
GREEN='\033[38;5;82m'
|
||||
CYAN='\033[0;36m'
|
||||
RESET='\033[0m'
|
||||
|
||||
sync_tils() {
|
||||
echo -e "${CYAN}Syncing TILs...${RESET}"
|
||||
local tils="["
|
||||
local first=true
|
||||
local count=0
|
||||
|
||||
for f in "$TIL_DIR"/til-*.json; do
|
||||
[ -f "$f" ] || continue
|
||||
local data
|
||||
data=$(cat "$f" 2>/dev/null) || continue
|
||||
local til_id category learning broadcaster created_at
|
||||
til_id=$(echo "$data" | python3 -c "import sys,json; d=json.load(sys.stdin); print(d.get('til_id',''))" 2>/dev/null) || continue
|
||||
category=$(echo "$data" | python3 -c "import sys,json; d=json.load(sys.stdin); print(d.get('category','tip'))" 2>/dev/null)
|
||||
learning=$(echo "$data" | python3 -c "import sys,json; d=json.load(sys.stdin); print(d.get('learning','')[:500])" 2>/dev/null)
|
||||
broadcaster=$(echo "$data" | python3 -c "import sys,json; d=json.load(sys.stdin); print(d.get('broadcaster','')[:50])" 2>/dev/null)
|
||||
created_at=$(echo "$data" | python3 -c "import sys,json; d=json.load(sys.stdin); print(d.get('timestamp',d.get('created_at','')))" 2>/dev/null)
|
||||
|
||||
[ -z "$til_id" ] || [ -z "$learning" ] && continue
|
||||
|
||||
if [ "$first" = true ]; then first=false; else tils+=","; fi
|
||||
tils+=$(python3 -c "import json; print(json.dumps({'til_id':'$til_id','category':'$category','learning':$(python3 -c "import json; print(json.dumps('$learning'))"),'broadcaster':'$broadcaster','created_at':'$created_at'}))")
|
||||
count=$((count + 1))
|
||||
|
||||
# Batch in groups of 50
|
||||
if [ $((count % 50)) -eq 0 ]; then
|
||||
tils+="]"
|
||||
curl -s -X POST "$SLACK_API" -H 'Content-Type: application/json' -d "{\"type\":\"sync\",\"tils\":$tils}" > /dev/null
|
||||
tils="["
|
||||
first=true
|
||||
echo -e " ${GREEN}✓${RESET} Synced $count TILs..."
|
||||
fi
|
||||
done
|
||||
|
||||
tils+="]"
|
||||
if [ "$first" = false ]; then
|
||||
curl -s -X POST "$SLACK_API" -H 'Content-Type: application/json' -d "{\"type\":\"sync\",\"tils\":$tils}" > /dev/null
|
||||
fi
|
||||
echo -e "${GREEN}✓${RESET} Synced $count TILs"
|
||||
}
|
||||
|
||||
sync_codex() {
|
||||
echo -e "${CYAN}Syncing Codex...${RESET}"
|
||||
[ -f "$CODEX_DB" ] || { echo "No codex DB"; return; }
|
||||
|
||||
local codex_json
|
||||
codex_json=$(sqlite3 "$CODEX_DB" "SELECT json_group_array(json_object(
|
||||
'codex_id', CAST(id AS TEXT),
|
||||
'name', name,
|
||||
'type', 'solution',
|
||||
'category', COALESCE(category,''),
|
||||
'description', COALESCE(problem,'') || ' → ' || COALESCE(solution,''),
|
||||
'created_at', COALESCE(created_at,'')
|
||||
)) FROM solutions LIMIT 200;" 2>/dev/null)
|
||||
|
||||
local patterns_json
|
||||
patterns_json=$(sqlite3 "$CODEX_DB" "SELECT json_group_array(json_object(
|
||||
'codex_id', 'p-' || CAST(id AS TEXT),
|
||||
'name', pattern_name,
|
||||
'type', COALESCE(pattern_type,'pattern'),
|
||||
'category', COALESCE(tags,''),
|
||||
'description', COALESCE(description,''),
|
||||
'created_at', COALESCE(first_seen,'')
|
||||
)) FROM patterns LIMIT 100;" 2>/dev/null)
|
||||
|
||||
local practices_json
|
||||
practices_json=$(sqlite3 "$CODEX_DB" "SELECT json_group_array(json_object(
|
||||
'codex_id', 'bp-' || CAST(id AS TEXT),
|
||||
'name', practice_name,
|
||||
'type', 'best_practice',
|
||||
'category', COALESCE(category,''),
|
||||
'description', COALESCE(description,''),
|
||||
'created_at', COALESCE(created_at,'')
|
||||
)) FROM best_practices LIMIT 100;" 2>/dev/null)
|
||||
|
||||
local anti_json
|
||||
anti_json=$(sqlite3 "$CODEX_DB" "SELECT json_group_array(json_object(
|
||||
'codex_id', 'ap-' || CAST(id AS TEXT),
|
||||
'name', name,
|
||||
'type', 'anti_pattern',
|
||||
'category', COALESCE(severity,''),
|
||||
'description', COALESCE(description,''),
|
||||
'created_at', COALESCE(first_detected,'')
|
||||
)) FROM anti_patterns LIMIT 100;" 2>/dev/null)
|
||||
|
||||
# Merge all arrays
|
||||
local all_codex
|
||||
all_codex=$(python3 -c "
|
||||
import json, sys
|
||||
a = json.loads('''$codex_json''') if '''$codex_json''' != '[]' else []
|
||||
b = json.loads('''$patterns_json''') if '''$patterns_json''' != '[]' else []
|
||||
c = json.loads('''$practices_json''') if '''$practices_json''' != '[]' else []
|
||||
d = json.loads('''$anti_json''') if '''$anti_json''' != '[]' else []
|
||||
print(json.dumps(a + b + c + d))
|
||||
")
|
||||
|
||||
local count
|
||||
count=$(echo "$all_codex" | python3 -c "import sys,json; print(len(json.load(sys.stdin)))")
|
||||
|
||||
curl -s -X POST "$SLACK_API" -H 'Content-Type: application/json' -d "{\"type\":\"sync\",\"codex\":$all_codex}" > /dev/null
|
||||
echo -e "${GREEN}✓${RESET} Synced $count codex entries"
|
||||
}
|
||||
|
||||
sync_projects() {
|
||||
echo -e "${CYAN}Syncing Projects & Todos...${RESET}"
|
||||
[ -d "$TODO_DIR" ] || { echo "No projects dir"; return; }
|
||||
|
||||
local projects="[]"
|
||||
local todos="[]"
|
||||
local count=0
|
||||
|
||||
projects=$(python3 -c "
|
||||
import json, glob, os
|
||||
projects = []
|
||||
todos = []
|
||||
for f in glob.glob('$TODO_DIR/*.json'):
|
||||
try:
|
||||
with open(f) as fh:
|
||||
d = json.load(fh)
|
||||
projects.append({
|
||||
'project_id': d.get('project_id',''),
|
||||
'title': d.get('title',''),
|
||||
'description': d.get('description','')[:200],
|
||||
'progress': d.get('progress',0),
|
||||
'status': d.get('status','active'),
|
||||
'timescale': d.get('timescale','forever'),
|
||||
'owner': d.get('owner','')[:50],
|
||||
})
|
||||
for t in d.get('todos',[]):
|
||||
todos.append({
|
||||
'todo_id': t.get('id',''),
|
||||
'project_id': d.get('project_id',''),
|
||||
'text': t.get('text','')[:200],
|
||||
'priority': t.get('priority','medium'),
|
||||
'status': t.get('status','pending'),
|
||||
'created_at': t.get('created_at',''),
|
||||
})
|
||||
except: pass
|
||||
print(json.dumps({'projects': projects, 'todos': todos, 'count': len(projects)}))
|
||||
")
|
||||
|
||||
count=$(echo "$projects" | python3 -c "import sys,json; print(json.load(sys.stdin)['count'])")
|
||||
curl -s -X POST "$SLACK_API" -H 'Content-Type: application/json' -d "{\"type\":\"sync\",$(echo "$projects" | python3 -c "import sys,json; d=json.load(sys.stdin); print(json.dumps({'projects':d['projects'],'todos':d['todos']})[1:-1])")}" > /dev/null
|
||||
echo -e "${GREEN}✓${RESET} Synced $count projects"
|
||||
}
|
||||
|
||||
sync_sessions() {
|
||||
echo -e "${CYAN}Syncing Sessions...${RESET}"
|
||||
[ -f "$COLLAB_DB" ] || { echo "No collab DB"; return; }
|
||||
|
||||
local sessions
|
||||
sessions=$(sqlite3 "$COLLAB_DB" "SELECT json_group_array(json_object(
|
||||
'session_id', session_id,
|
||||
'status', status,
|
||||
'focus', COALESCE(focus,''),
|
||||
'last_seen', last_seen,
|
||||
'agent_id', COALESCE(agent_id,''),
|
||||
'created_at', started_at
|
||||
)) FROM sessions ORDER BY last_seen DESC LIMIT 50;" 2>/dev/null)
|
||||
|
||||
local count
|
||||
count=$(echo "$sessions" | python3 -c "import sys,json; print(len(json.load(sys.stdin)))")
|
||||
curl -s -X POST "$SLACK_API" -H 'Content-Type: application/json' -d "{\"type\":\"sync\",\"sessions\":$sessions}" > /dev/null
|
||||
echo -e "${GREEN}✓${RESET} Synced $count sessions"
|
||||
}
|
||||
|
||||
sync_journal() {
|
||||
echo -e "${CYAN}Syncing Journal (last 100)...${RESET}"
|
||||
[ -f "$JOURNAL" ] || { echo "No journal"; return; }
|
||||
|
||||
local entries
|
||||
entries=$(tail -100 "$JOURNAL" | python3 -c "
|
||||
import sys, json
|
||||
entries = []
|
||||
for line in sys.stdin:
|
||||
try:
|
||||
d = json.loads(line.strip())
|
||||
entries.append({
|
||||
'entry_id': d.get('sha256','')[:32] or str(len(entries)),
|
||||
'action': d.get('action','?'),
|
||||
'entity': d.get('entity','?'),
|
||||
'details': d.get('details','')[:200],
|
||||
'source': 'mac',
|
||||
'created_at': d.get('timestamp',''),
|
||||
})
|
||||
except: pass
|
||||
print(json.dumps(entries))
|
||||
")
|
||||
|
||||
local count
|
||||
count=$(echo "$entries" | python3 -c "import sys,json; print(len(json.load(sys.stdin)))")
|
||||
curl -s -X POST "$SLACK_API" -H 'Content-Type: application/json' -d "{\"type\":\"sync\",\"journal\":$entries}" > /dev/null
|
||||
echo -e "${GREEN}✓${RESET} Synced $count journal entries"
|
||||
}
|
||||
|
||||
case "${1:-full}" in
|
||||
full)
|
||||
echo -e "${PINK}╔════════════════════════════════════════════════╗${RESET}"
|
||||
echo -e "${PINK}║ Memory → Slack D1 Full Sync ║${RESET}"
|
||||
echo -e "${PINK}╚════════════════════════════════════════════════╝${RESET}\n"
|
||||
sync_sessions
|
||||
sync_journal
|
||||
sync_tils
|
||||
sync_codex
|
||||
sync_projects
|
||||
echo -e "\n${GREEN}✓ Full sync complete${RESET}"
|
||||
echo -e " Test: curl -s $SLACK_API/stats | python3 -m json.tool"
|
||||
;;
|
||||
tils) sync_tils ;;
|
||||
codex) sync_codex ;;
|
||||
projects) sync_projects ;;
|
||||
journal) sync_journal ;;
|
||||
sessions) sync_sessions ;;
|
||||
*)
|
||||
echo "Usage: $0 [full|tils|codex|projects|journal|sessions]"
|
||||
;;
|
||||
esac
|
||||
Reference in New Issue
Block a user