Phase 8: TypeScript migration, i18n rewrite, Activity Tree, Projects API, Heartbeats

This commit is contained in:
Rose
2026-04-29 11:50:00 +02:00
parent c705fad626
commit 255914c9f1
43 changed files with 17948 additions and 6899 deletions

File diff suppressed because it is too large Load Diff

View File

@@ -19,8 +19,8 @@ def _get_agent_soul(agent_id: str) -> str | None:
Returns None if not found.
"""
if not agent_id or agent_id == "rose":
return None # Rose uses the global HERMES_HOME/SOUL.md
if not agent_id:
return None
for fname in ("soul.md", "SOUL.md"):
path = HERMES_HOME / "agents" / agent_id / fname
@@ -41,7 +41,7 @@ def _get_agent_memory_context(agent_id: str, query: str, limit: int = 5) -> str
Searches rose_memory collection filtered by topic matching "{agent_id}/".
Returns formatted text block or None if nothing found.
"""
if not agent_id or agent_id == "rose":
if not agent_id:
return None
matches = _search_agent_memory(agent_id, query, limit=limit)

View File

@@ -10,7 +10,7 @@ import threading
from typing import Optional
_lock = threading.Lock()
_lock = threading.RLock() # Reentrant for consistency; no nested calls currently but safer
_pending: dict[str, dict] = {}
_gateway_queues: dict[str, list] = {}
_gateway_notify_cbs: dict[str, object] = {}

View File

@@ -1033,6 +1033,18 @@ def get_available_models() -> dict:
logger.debug("Live models fetched for %s: %s", pid, _live_ids)
except Exception as _e:
logger.debug("Could not fetch live models for %s: %s", pid, _e)
# Fallback: read models from config.yaml providers.<pid>.models
if not raw_models:
try:
_prov_cfg = cfg.get("providers", {}).get(pid, {})
if isinstance(_prov_cfg, dict):
_cfg_models = _prov_cfg.get("models", [])
if isinstance(_cfg_models, list):
raw_models = [{"id": m, "label": m.split("/")[-1] if "/" in m else m} for m in _cfg_models if isinstance(m, str)]
if raw_models:
logger.debug("Loaded %d models from config for %s", len(raw_models), pid)
except Exception as _e:
logger.debug("Could not read config models for %s: %s", pid, _e)
_active = (active_provider or "").lower()
if _active and pid != _active:
models = []
@@ -1136,7 +1148,7 @@ def get_available_models() -> dict:
_INDEX_HTML_PATH = REPO_ROOT / "static" / "index.html"
# ── Thread synchronisation ───────────────────────────────────────────────────
LOCK = threading.Lock()
LOCK = threading.RLock() # Reentrant — allows nested acquisition in save() → _write_session_index()
SESSIONS_MAX = 100
CHAT_LOCK = threading.Lock()
STREAMS: dict = {}
@@ -1188,6 +1200,8 @@ _SETTINGS_DEFAULTS = {
"sound_enabled": False, # play notification sound when assistant finishes
"notifications_enabled": False, # browser notification when tab is in background
"bubble_layout": False, # right-aligned user / left-aligned assistant chat bubbles
"user_emoji": "🙂", # emoji shown for user messages in chat
"user_name": "You", # name shown for user messages in chat
"password_hash": None, # PBKDF2-HMAC-SHA256 hash; None = auth disabled
}
_SETTINGS_LEGACY_DROP_KEYS = {"assistant_language"}
@@ -1198,7 +1212,7 @@ def load_settings() -> dict:
settings = dict(_SETTINGS_DEFAULTS)
if SETTINGS_FILE.exists():
try:
stored = json.loads(SETTINGS_FILE.read_text(encoding="utf-8"))
with SETTINGS_FILE.open(encoding="utf-8") as _f: stored = json.loads(_f.read())
if isinstance(stored, dict):
settings.update(
{

289
api/heartbeats.py Normal file
View File

@@ -0,0 +1,289 @@
"""
Heartbeat System API for WebUI.
Provides endpoints to manage heartbeats and monitor the manager/watchdog.
"""
import json
import subprocess
import sys
from datetime import datetime
from pathlib import Path
from typing import Any
HEARTBEAT_DIR = Path.home() / ".hermes" / "heartbeat"
REGISTRY_FILE = HEARTBEAT_DIR / "registry.json"
MANAGER_SCRIPT = Path.home() / ".hermes" / "scripts" / "heartbeat_manager.py"
WATCHDOG_LOG = Path.home() / ".hermes" / "logs" / "heartbeat_watchdog.log"
MANAGER_LOG = Path.home() / ".hermes" / "logs" / "heartbeat_manager.log"
HB_API = Path.home() / ".hermes" / "scripts" / "heartbeat_api.py"
def _run_api(args: list) -> dict:
"""Run heartbeat_api.py with given args, return parsed JSON."""
try:
result = subprocess.run(
[sys.executable, str(HB_API)] + args,
capture_output=True, text=True, timeout=30,
cwd=str(Path.home() / ".hermes")
)
if result.returncode == 0:
stdout = result.stdout.strip()
# Try to parse JSON from stdout
for line in stdout.splitlines():
line = line.strip()
if line.startswith("{"):
return json.loads(line)
# Plain text output = success
return {"ok": True, "output": stdout}
# Error case
stderr = result.stderr.strip()
if stderr:
return {"error": stderr}
return {"error": f"Exit code {result.returncode}"}
except subprocess.TimeoutExpired:
return {"error": "Command timed out"}
except Exception as e:
return {"error": str(e)}
def _load_registry() -> dict:
try:
with REGISTRY_FILE.open(encoding="utf-8") as f:
return json.loads(f.read())
except Exception:
return {"heartbeats": []}
def _manager_pid() -> str | None:
result = subprocess.run(
["pgrep", "-f", "heartbeat_manager.py"],
capture_output=True, text=True
)
if result.returncode == 0 and result.stdout.strip():
return result.stdout.strip().split()[0]
return None
def _manager_log_tail(lines: int = 20) -> str:
try:
if MANAGER_LOG.exists():
all_lines = MANAGER_LOG.read_text().splitlines()
return "\n".join(all_lines[-lines:])
except Exception:
pass
return ""
def _watchdog_log_tail(lines: int = 10) -> str:
try:
if WATCHDOG_LOG.exists():
all_lines = WATCHDOG_LOG.read_text().splitlines()
return "\n".join(all_lines[-lines:])
except Exception:
pass
return ""
# ── Public API ──────────────────────────────────────────────────────────────
def handle_get(path: str) -> dict:
"""Handle GET /api/heartbeats/* routes."""
if path == "/api/heartbeats":
# List all heartbeats with status summary + manager info
registry = _load_registry()
heartbeats = registry.get("heartbeats", [])
by_status = {}
by_priority = {}
by_source = {}
pending_due = 0
now = datetime.now().isoformat()
for hb in heartbeats:
s = hb.get("status", "unknown")
by_status[s] = by_status.get(s, 0) + 1
p = hb.get("priority", "normal")
by_priority[p] = by_priority.get(p, 0) + 1
src = hb.get("source", "unknown")
by_source[src] = by_source.get(src, 0) + 1
if s == "pending" and hb.get("trigger_at", "") <= now:
pending_due += 1
# Manager info
pid = _manager_pid()
return {
"heartbeats": heartbeats,
"total": len(heartbeats),
"pending_due_count": pending_due,
"by_status": by_status,
"by_priority": by_priority,
"by_source": by_source,
"_manager": {
"running": pid is not None,
"pid": pid,
}
}
if path == "/api/heartbeats/manager":
pid = _manager_pid()
return {
"running": pid is not None,
"pid": pid,
"log_tail": _manager_log_tail(15),
}
if path == "/api/heartbeats/watchdog":
return {
"log_tail": _watchdog_log_tail(10),
}
if path == "/api/heartbeats/stats":
# Compute firing stats from log files
import glob, re
log_dir = HEARTBEAT_DIR / "logs"
fired_24h = 0
fired_total = 0
now = datetime.now()
day_ago = datetime.fromtimestamp(now.timestamp() - 86400)
for log_file in glob.glob(str(log_dir / "heartbeat_*.log")):
try:
for line in Path(log_file).read_text().splitlines():
if "processed heartbeat" in line or "fired" in line.lower():
# Parse timestamp from log line: [2026-04-28 08:17:54]
m = re.match(r"\[(\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2})\]", line)
if m:
fired_total += 1
try:
dt = datetime.strptime(m.group(1), "%Y-%m-%d %H:%M:%S")
if dt >= day_ago:
fired_24h += 1
except: pass
except: pass
# Next scheduled heartbeat
registry = _load_registry()
next_hb = None
for hb in registry.get("heartbeats", []):
if hb.get("status") == "pending":
ta = hb.get("trigger_at", "")
if ta and (next_hb is None or ta < next_hb):
next_hb = ta
# Load heartbeat.json config
config_file = Path.home() / ".hermes" / "config" / "heartbeat.json"
config = {}
if config_file.exists():
try:
with config_file.open(encoding="utf-8") as f:
config = json.loads(f.read())
except: pass
return {
"fired_total": fired_total,
"fired_24h": fired_24h,
"next_scheduled": next_hb,
"config": config,
}
# GET /api/heartbeats/{id}
if path.startswith("/api/heartbeats/"):
hb_id = path.split("/")[-1]
if hb_id in ("manager", "watchdog"):
return {"error": "Not found"}, 404
registry = _load_registry()
for hb in registry.get("heartbeats", []):
if hb.get("id") == hb_id:
return hb
return {"error": f"Heartbeat {hb_id} not found"}, 404
return None # Not handled
def handle_post(path: str, body: dict) -> dict:
"""Handle POST /api/heartbeats/* routes."""
if path == "/api/heartbeats":
# Create heartbeat
source = body.get("source", "webui")
action = body.get("action", "rose_continue")
instruction = body.get("instruction", "")
minutes = int(body.get("minutes", 5))
priority = body.get("priority")
mode = body.get("mode", "silent")
recurring = bool(body.get("recurring", False))
interval_minutes = int(body.get("interval_minutes", minutes)) if recurring else None
max_iterations = int(body["max_iterations"]) if body.get("max_iterations") else None
args = [
"create",
"--source", source,
"--action", action,
"--instruction", instruction,
"--minutes", str(minutes),
"--mode", mode,
]
if priority:
args += ["--priority", priority]
if recurring:
args.append("--recurring")
if interval_minutes:
args += ["--interval-minutes", str(interval_minutes)]
if max_iterations:
args += ["--max-iterations", str(max_iterations)]
result = _run_api(args)
return result
if path == "/api/heartbeats/manager/restart":
pid = _manager_pid()
if pid:
subprocess.run(["kill", pid], capture_output=True)
subprocess.Popen(
[sys.executable, str(MANAGER_SCRIPT), "--daemon"],
stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL,
start_new_session=True, cwd=str(Path.home() / ".hermes")
)
return {"ok": True, "message": "Manager restart initiated"}
if path.startswith("/api/heartbeats/") and path.endswith("/cancel"):
hb_id = path.split("/")[-2]
result = _run_api(["cancel", "--id", hb_id])
return result
if path.startswith("/api/heartbeats/") and path.endswith("/fire"):
# Manual fire (for testing)
hb_id = path.split("/")[-2]
# Simulate fire by updating trigger_at to now
registry = _load_registry()
for hb in registry.get("heartbeats", []):
if hb.get("id") == hb_id:
hb["trigger_at"] = datetime.now().isoformat()
REGISTRY_FILE.write_text(json.dumps(registry, indent=2))
return {"ok": True, "message": f"Heartbeat {hb_id} fire time set to now"}
return {"error": f"Heartbeat {hb_id} not found"}, 404
if path == "/api/heartbeats/config":
# Update heartbeat config (quiet hours, intervals, telegram)
config_file = Path.home() / ".hermes" / "config" / "heartbeat.json"
config = {}
if config_file.exists():
try:
with config_file.open(encoding="utf-8") as f:
config = json.loads(f.read())
except: pass
for key in ("quiet_hours", "daemon_interval_seconds", "intervals", "telegram", "critical_override"):
if key in body:
config[key] = body[key]
config_file.write_text(json.dumps(config, indent=2, ensure_ascii=False))
return {"ok": True, "config": config}
return None # Not handled
def handle_delete(path: str) -> dict:
"""Handle DELETE /api/heartbeats/{id}."""
if path.startswith("/api/heartbeats/"):
hb_id = path.split("/")[-1]
if hb_id in ("manager", "watchdog"):
return {"error": "Cannot delete system endpoint"}, 400
result = _run_api(["cancel", "--id", hb_id])
return result
return None

View File

@@ -35,17 +35,26 @@ def safe_resolve(root: Path, requested: str) -> Path:
return resolved
def _security_headers(handler):
def _security_headers(handler, origin=None):
"""Add security headers to every response."""
handler.send_header('X-Content-Type-Options', 'nosniff')
handler.send_header('X-Frame-Options', 'DENY')
handler.send_header('Referrer-Policy', 'same-origin')
handler.send_header('Access-Control-Allow-Origin', origin or '*')
handler.send_header('Access-Control-Allow-Credentials', 'true' if origin else 'false')
handler.send_header('Access-Control-Allow-Methods', 'GET,POST,PUT,DELETE,PATCH,OPTIONS')
handler.send_header('Access-Control-Allow-Headers', 'Content-Type,Authorization,X-Requested-With')
handler.send_header('Vary', 'Origin')
connect_src = "'self'"
if origin:
connect_src += f" {origin}"
handler.send_header(
'Content-Security-Policy',
"default-src 'self'; "
"script-src 'self' 'unsafe-inline' https://cdn.jsdelivr.net; "
"style-src 'self' 'unsafe-inline' https://cdn.jsdelivr.net; "
"img-src 'self' data: https: blob:; font-src 'self' data: https://cdn.jsdelivr.net; connect-src 'self'; "
"img-src 'self' data: https: blob:; font-src 'self' data: https://cdn.jsdelivr.net; "
f"connect-src {connect_src}; "
"base-uri 'self'; form-action 'self'"
)
handler.send_header(
@@ -61,7 +70,8 @@ def j(handler, payload, status: int=200) -> None:
handler.send_header('Content-Type', 'application/json; charset=utf-8')
handler.send_header('Content-Length', str(len(body)))
handler.send_header('Cache-Control', 'no-store')
_security_headers(handler)
origin = handler.headers.get('Origin', None) or handler.headers.get('Referer', '').rsplit('/', 1)[0] if handler.headers.get('Referer', '') else None
_security_headers(handler, origin=origin)
handler.end_headers()
handler.wfile.write(body)
@@ -73,7 +83,8 @@ def t(handler, payload, status: int=200, content_type: str='text/plain; charset=
handler.send_header('Content-Type', content_type)
handler.send_header('Content-Length', str(len(body)))
handler.send_header('Cache-Control', 'no-store')
_security_headers(handler)
origin = handler.headers.get('Origin', None) or handler.headers.get('Referer', '').rsplit('/', 1)[0] if handler.headers.get('Referer', '') else None
_security_headers(handler, origin=origin)
handler.end_headers()
handler.wfile.write(body)

827
api/mc.py
View File

@@ -1,218 +1,695 @@
"""
Mission Control API — Data layer for Hermes WebUI Mission Control extension.
Provides priorities, tasks, feed, and dashboard status management.
"""
# api/mc.py
# Mission Control — Projects & Tasks API
# Rose's persönliches PM-System
import json
import threading
import time
import uuid
from pathlib import Path
from typing import Any
from datetime import datetime, date, timedelta
from api.helpers import j
HERMES_HOME = Path.home() / ".hermes"
DATA_DIR = HERMES_HOME / "data" / "mc"
DATA_DIR.mkdir(parents=True, exist_ok=True)
# ── State file ────────────────────────────────────────────────────────────────
_MC_DATA_FILE = Path.home() / ".hermes" / "data" / "mc-data.json"
_MC_LOCK = threading.RLock()
TASKS_FILE = DATA_DIR / "tasks.json"
PROJECTS_FILE = DATA_DIR / "projects.json"
# ── Default structure ─────────────────────────────────────────────────────────
DEFAULT_MC_DATA = {
"priorities": [],
"tasks": [],
"feed": [],
TASKS_FILE.write_text(json.dumps({"version": "3.0.0", "tasks": []}, indent=2))
PROJECTS_FILE.write_text(json.dumps({"version": "3.0.0", "projects": []}, indent=2))
# ─────────────────────────────────────────────────────────────────────────────
# AGENT REGISTRY
# ─────────────────────────────────────────────────────────────────────────────
AGENTS = {
"root": {"name": "🌳 Root", "emoji": "🌳", "domain": "Infrastruktur, Server, Docker, Backups"},
"forget-me-not": {"name": "🌼 Forget-me-not", "emoji": "🌼", "domain": "Kalender, Termine, Geburtstage"},
"sunflower": {"name": "🌻 Sunflower", "emoji": "🌻", "domain": "Finanzen, Abos, Rechnungen"},
"iris": {"name": "⚜️ Iris", "emoji": "⚜️", "domain": "Karriere, Lernen, Focus"},
"lotus": {"name": "🪷 Lotus", "emoji": "🪷", "domain": "Gesundheit, Fitness, Hobbys"},
"ivy": {"name": "🌿 Ivy", "emoji": "🌿", "domain": "Smart Home, Home Assistant"},
"dandelion": {"name": "🛡 Dandelion", "emoji": "🛡", "domain": "Kommunikation, Notifications, Spam"},
"rose": {"name": "🌹 Rose", "emoji": "🌹", "domain": "Orchestrierung, Koordination"},
}
# ─────────────────────────────────────────────────────────────────────────────
# INTERNAL HELPERS
# ─────────────────────────────────────────────────────────────────────────────
def _load_mc_data() -> dict:
"""Load Mission Control data from disk."""
with _MC_LOCK:
if not _MC_DATA_FILE.exists():
return DEFAULT_MC_DATA.copy()
try:
with open(_MC_DATA_FILE, "r") as f:
return json.load(f)
except (json.JSONDecodeError, IOError):
return DEFAULT_MC_DATA.copy()
def _load_tasks():
if TASKS_FILE.exists():
with TASKS_FILE.open(encoding="utf-8") as f:
return json.loads(f.read())
return {"version": "3.0.0", "tasks": []}
def _save_tasks(data):
TASKS_FILE.write_text(json.dumps(data, indent=2, ensure_ascii=False))
def _save_mc_data(data: dict) -> None:
"""Save Mission Control data to disk."""
with _MC_LOCK:
_MC_DATA_FILE.parent.mkdir(parents=True, exist_ok=True)
with open(_MC_DATA_FILE, "w") as f:
json.dump(data, f, indent=2)
def _load_projects():
if PROJECTS_FILE.exists():
with PROJECTS_FILE.open(encoding="utf-8") as f:
return json.loads(f.read())
return {"version": "3.0.0", "projects": []}
def _save_projects(data):
PROJECTS_FILE.write_text(json.dumps(data, indent=2, ensure_ascii=False))
# ── Priority helpers ──────────────────────────────────────────────────────────
def _new_id(prefix="task"):
return f"{prefix}-{datetime.now().strftime('%y%m%d%H%M%S')}-{uuid.uuid4().hex[:4]}"
def get_priorities() -> list[dict]:
"""Return all priorities sorted by id."""
data = _load_mc_data()
return sorted(data.get("priorities", []), key=lambda p: p.get("id", 0))
def _now():
return datetime.now().isoformat()
def _today():
return date.today().isoformat()
def create_priority(name: str, color: str = "#808080") -> dict:
"""Add a new priority. Returns the created priority."""
data = _load_mc_data()
priorities = data.get("priorities", [])
new_id = max([p.get("id", 0) for p in priorities], default=0) + 1
priority = {"id": new_id, "name": name, "color": color}
priorities.append(priority)
data["priorities"] = priorities
_save_mc_data(data)
_add_feed_event(f"Priority created: {name}")
return priority
def _auto_done_subtasks(item):
"""Check if all subtasks are done (for auto-done logic)."""
subtasks = item.get("subtasks", [])
if not subtasks:
return None
all_done = all(s.get("done", False) for s in subtasks)
return all_done
# ─────────────────────────────────────────────────────────────────────────────
# TASKS — CRUD
# ─────────────────────────────────────────────────────────────────────────────
def update_priority(priority_id: int, name: str = None, color: str = None, done: bool = None) -> dict | None:
"""Update an existing priority. Returns updated priority or None if not found."""
data = _load_mc_data()
priorities = data.get("priorities", [])
for p in priorities:
if p.get("id") == priority_id:
if name is not None:
p["name"] = name
if color is not None:
p["color"] = color
if done is not None:
p["done"] = done
if done:
_add_feed_event(f"Priority completed: {p['name']}")
data["priorities"] = priorities
_save_mc_data(data)
return p
return None
def delete_priority(priority_id: int) -> bool:
"""Delete a priority. Returns True if found and deleted."""
data = _load_mc_data()
priorities = data.get("priorities", [])
original_len = len(priorities)
priorities = [p for p in priorities if p.get("id") != priority_id]
if len(priorities) < original_len:
data["priorities"] = priorities
_save_mc_data(data)
return True
return False
# ── Task helpers ──────────────────────────────────────────────────────────────
def get_tasks() -> list[dict]:
"""Return all tasks sorted by priority then id."""
data = _load_mc_data()
return sorted(data.get("tasks", []), key=lambda t: (t.get("priority", 999), t.get("id", 0)))
def create_task(title: str, priority: int = 1, status: str = "backlog") -> dict:
"""Create a new task. Returns the created task."""
data = _load_mc_data()
def list_tasks(filters=None):
"""GET /api/mc/tasks — alle Tasks mit optionalen Filtern."""
data = _load_tasks()
tasks = data.get("tasks", [])
new_id = max([t.get("id", 0) for t in tasks], default=0) + 1
task = {"id": new_id, "title": title, "priority": priority, "status": status}
tasks.append(task)
data["tasks"] = tasks
_save_mc_data(data)
_add_feed_event(f"Task created: {title}")
if not filters:
return tasks
# Filter: project_id
if "project_id" in filters and filters["project_id"]:
tasks = [t for t in tasks if t.get("project_id") == filters["project_id"]]
# Filter: phase_id
if "phase_id" in filters and filters["phase_id"]:
tasks = [t for t in tasks if t.get("phase_id") == filters["phase_id"]]
# Filter: task_type
if "task_type" in filters and filters["task_type"]:
tasks = [t for t in tasks if t.get("task_type") == filters["task_type"]]
# Filter: type (user/agent)
if "type" in filters and filters["type"]:
tasks = [t for t in tasks if t.get("type") == filters["type"]]
# Filter: assigned_agent
if "assigned_agent" in filters and filters["assigned_agent"]:
tasks = [t for t in tasks if t.get("assigned_agent") == filters["assigned_agent"]]
# Filter: status
if "status" in filters and filters["status"]:
tasks = [t for t in tasks if t.get("status") == filters["status"]]
# Filter: priority
if "priority" in filters and filters["priority"]:
tasks = [t for t in tasks if t.get("priority") == filters["priority"]]
# Filter: task_type = one-time | daily (shorthand)
if "task_type" in filters and filters["task_type"]:
tasks = [t for t in tasks if t.get("task_type") == filters["task_type"]]
return tasks
def get_task(task_id):
"""GET /api/mc/tasks/:id — einzelner Task."""
data = _load_tasks()
return next((t for t in data["tasks"] if t["id"] == task_id), None)
def create_task(body):
"""POST /api/mc/tasks — Task erstellen."""
data = _load_tasks()
task = {
"id": _new_id("task"),
"title": body.get("title", "Untitled Task"),
"task_type": body.get("task_type", "one-time"),
"type": body.get("type", "user"),
"project_id": body.get("project_id"),
"phase_id": body.get("phase_id"),
"status": body.get("status", "todo"),
"priority": body.get("priority", "p2"),
"due": body.get("due"),
"due_time": body.get("due_time"),
"tags": body.get("tags", []),
"daily_schedule": body.get("daily_schedule"),
"daily_completed_today": False,
"daily_last_done": None,
"assigned_agent": body.get("assigned_agent"),
"agent_status": "pending" if body.get("type") == "agent" else None,
"agent_note": body.get("agent_note"),
"cron_schedule": body.get("cron_schedule"),
"cron_last_run": None,
"cron_next_run": None,
"subtasks": [],
"created_by": body.get("created_by", "user"),
"created_at": _now(),
"updated_at": _now(),
"completed_at": None,
}
data["tasks"].append(task)
_save_tasks(data)
return task
def update_task(task_id, body):
"""PUT /api/mc/tasks/:id — Task updaten."""
data = _load_tasks()
def update_task(task_id: int, **kwargs) -> dict | None:
"""Update a task by id. kwargs: title, priority, status. Returns updated task or None."""
data = _load_mc_data()
tasks = data.get("tasks", [])
for t in tasks:
if t.get("id") == task_id:
old_status = t.get("status")
for key in ("title", "priority", "status"):
if key in kwargs:
t[key] = kwargs[key]
new_status = t.get("status")
# Feed events for status transitions
if old_status != new_status:
if new_status == "done":
_add_feed_event(f"Task completed: {t['title']}")
elif new_status == "progress":
_add_feed_event(f"Task started: {t['title']}")
data["tasks"] = tasks
_save_mc_data(data)
for t in data["tasks"]:
if t["id"] == task_id:
# Erlaubte Felder
for key in ["title", "task_type", "type", "project_id", "phase_id",
"status", "priority", "due", "due_time", "tags",
"daily_schedule", "assigned_agent", "agent_status",
"agent_note", "cron_schedule", "cron_last_run",
"cron_next_run", "daily_completed_today", "daily_last_done"]:
if key in body:
t[key] = body[key]
# Status → completed_at
if body.get("status") == "done" and t["completed_at"] is None:
t["completed_at"] = _now()
elif body.get("status") and body.get("status") != "done":
t["completed_at"] = None
t["updated_at"] = _now()
# Auto-done via subtasks
all_done = _auto_done_subtasks(t)
if all_done is True and t["status"] != "done":
t["status"] = "done"
t["completed_at"] = _now()
elif all_done is False and t["status"] == "done":
t["status"] = "todo"
_save_tasks(data)
return t
return None
def delete_task(task_id):
"""DELETE /api/mc/tasks/:id — Task löschen."""
data = _load_tasks()
before = len(data["tasks"])
data["tasks"] = [t for t in data["tasks"] if t["id"] != task_id]
_save_tasks(data)
return len(data["tasks"]) < before
def delete_task(task_id: int) -> bool:
"""Delete a task. Returns True if found and deleted."""
data = _load_mc_data()
tasks = data.get("tasks", [])
original_len = len(tasks)
tasks = [t for t in tasks if t.get("id") != task_id]
if len(tasks) < original_len:
data["tasks"] = tasks
_save_mc_data(data)
return True
# ─────────────────────────────────────────────────────────────────────────────
# DAILY TASKS
# ─────────────────────────────────────────────────────────────────────────────
def list_daily():
"""GET /api/mc/daily — alle Daily Tasks."""
return list_tasks({"task_type": "daily"})
def toggle_daily_done(task_id):
"""POST /api/mc/daily/:id/done — Daily Task heute erledigt togglen."""
data = _load_tasks()
for t in data["tasks"]:
if t["id"] == task_id and t.get("task_type") == "daily":
t["daily_completed_today"] = not t["daily_completed_today"]
if t["daily_completed_today"]:
t["daily_last_done"] = _today()
t["status"] = "done"
t["completed_at"] = _now()
else:
t["status"] = "todo"
t["completed_at"] = None
t["updated_at"] = _now()
_save_tasks(data)
return t
return None
def reset_daily_tasks():
"""POST /api/mc/daily/reset — Alle daily_completed_today = false (Mitternacht)."""
data = _load_tasks()
for t in data["tasks"]:
if t.get("task_type") == "daily":
t["daily_completed_today"] = False
if t["status"] == "done" and t.get("daily_last_done") != _today():
t["status"] = "todo"
t["completed_at"] = None
_save_tasks(data)
return {"ok": True, "reset_at": _now()}
# ─────────────────────────────────────────────────────────────────────────────
# SUBTASKS — TASK LEVEL
# ─────────────────────────────────────────────────────────────────────────────
def add_subtask_task(task_id, body):
"""POST /api/mc/tasks/:id/subtasks — Subtask zu Task."""
data = _load_tasks()
for t in data["tasks"]:
if t["id"] == task_id:
subtask = {
"id": _new_id("sub"),
"title": body.get("title", "Subtask"),
"done": False,
"order": len(t.get("subtasks", [])) + 1,
"created_at": _now(),
}
if "subtasks" not in t:
t["subtasks"] = []
t["subtasks"].append(subtask)
t["updated_at"] = _now()
_save_tasks(data)
return subtask
return None
def update_subtask_task(task_id, subtask_id, body):
"""PUT /api/mc/tasks/:id/subtasks/:sid — Subtask updaten."""
data = _load_tasks()
for t in data["tasks"]:
if t["id"] == task_id:
for s in t.get("subtasks", []):
if s["id"] == subtask_id:
if "title" in body:
s["title"] = body["title"]
if "done" in body:
s["done"] = body["done"]
if "order" in body:
s["order"] = body["order"]
t["updated_at"] = _now()
# Auto-done check
all_done = _auto_done_subtasks(t)
if all_done is True and t["status"] != "done":
t["status"] = "done"
t["completed_at"] = _now()
elif all_done is False and t["status"] == "done":
t["status"] = "todo"
t["completed_at"] = None
_save_tasks(data)
return s
return None
def delete_subtask_task(task_id, subtask_id):
"""DELETE /api/mc/tasks/:id/subtasks/:sid — Subtask löschen."""
data = _load_tasks()
for t in data["tasks"]:
if t["id"] == task_id:
before = len(t.get("subtasks", []))
t["subtasks"] = [s for s in t.get("subtasks", []) if s["id"] != subtask_id]
t["updated_at"] = _now()
_save_tasks(data)
return len(t["subtasks"]) < before
return False
# ─────────────────────────────────────────────────────────────────────────────
# PROJECTS — CRUD
# ─────────────────────────────────────────────────────────────────────────────
# ── Feed helpers ──────────────────────────────────────────────────────────────
def list_projects():
"""GET /api/mc/projects — alle Projekte mit Phasen."""
data = _load_projects()
return data.get("projects", [])
def get_feed(limit: int = 50) -> list[dict]:
"""Return recent feed events, newest first."""
data = _load_mc_data()
feed = data.get("feed", [])
return sorted(feed, key=lambda f: f.get("timestamp", ""), reverse=True)[:limit]
def get_project(project_id):
"""GET /api/mc/projects/:id — einzelnes Projekt."""
data = _load_projects()
return next((p for p in data["projects"] if p["id"] == project_id), None)
def create_project(body):
"""POST /api/mc/projects — Projekt erstellen."""
data = _load_projects()
def _add_feed_event(event: str) -> None:
"""Add a timestamped feed event."""
data = _load_mc_data()
feed = data.get("feed", [])
feed.append({
"timestamp": time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime()),
"event": event,
project = {
"id": body.get("id") or _new_id("proj").replace("proj-", ""),
"name": body.get("name", "Neues Projekt"),
"color": body.get("color", "#6366f1"),
"description": body.get("description", ""),
"status": body.get("status", "active"),
"created_at": _now(),
"updated_at": _now(),
"subtasks": [],
"phases": [],
}
data["projects"].append(project)
_save_projects(data)
return project
def update_project(project_id, body):
"""PUT /api/mc/projects/:id — Projekt updaten."""
data = _load_projects()
for p in data["projects"]:
if p["id"] == project_id:
for key in ["name", "color", "description", "status"]:
if key in body:
p[key] = body[key]
p["updated_at"] = _now()
_save_projects(data)
return p
return None
def delete_project(project_id):
"""DELETE /api/mc/projects/:id — Projekt löschen."""
data = _load_projects()
before = len(data["projects"])
data["projects"] = [p for p in data["projects"] if p["id"] != project_id]
_save_projects(data)
# Auch alle Tasks dieses Projekts löschen
tasks_data = _load_tasks()
tasks_data["tasks"] = [t for t in tasks_data["tasks"] if t.get("project_id") != project_id]
_save_tasks(tasks_data)
return len(data["projects"]) < before
# ─────────────────────────────────────────────────────────────────────────────
# PROJECT SUBTASKS
# ─────────────────────────────────────────────────────────────────────────────
def add_subtask_project(project_id, body):
"""POST /api/mc/projects/:id/subtasks."""
data = _load_projects()
for p in data["projects"]:
if p["id"] == project_id:
subtask = {
"id": _new_id("sub"),
"title": body.get("title", "Subtask"),
"done": False,
"order": len(p.get("subtasks", [])) + 1,
"created_at": _now(),
}
if "subtasks" not in p:
p["subtasks"] = []
p["subtasks"].append(subtask)
p["updated_at"] = _now()
_save_projects(data)
return subtask
return None
def update_subtask_project(project_id, subtask_id, body):
"""PUT /api/mc/projects/:id/subtasks/:sid."""
data = _load_projects()
for p in data["projects"]:
if p["id"] == project_id:
for s in p.get("subtasks", []):
if s["id"] == subtask_id:
if "title" in body:
s["title"] = body["title"]
if "done" in body:
s["done"] = body["done"]
if "order" in body:
s["order"] = body["order"]
p["updated_at"] = _now()
_save_projects(data)
return s
return None
def delete_subtask_project(project_id, subtask_id):
"""DELETE /api/mc/projects/:id/subtasks/:sid."""
data = _load_projects()
for p in data["projects"]:
if p["id"] == project_id:
before = len(p.get("subtasks", []))
p["subtasks"] = [s for s in p.get("subtasks", []) if s["id"] != subtask_id]
p["updated_at"] = _now()
_save_projects(data)
return len(p["subtasks"]) < before
return False
# ─────────────────────────────────────────────────────────────────────────────
# PHASES
# ─────────────────────────────────────────────────────────────────────────────
def add_phase(project_id, body):
"""POST /api/mc/projects/:id/phases — Phase hinzufügen."""
data = _load_projects()
for p in data["projects"]:
if p["id"] == project_id:
phase = {
"id": _new_id("phase"),
"name": body.get("name", "Neue Phase"),
"description": body.get("description", ""),
"testing": body.get("testing", ""),
"testing_status": body.get("testing_status", "pending"),
"reflection": body.get("reflection"),
"status": body.get("status", "todo"),
"order": len(p.get("phases", [])) + 1,
"completed_at": None,
"subtasks": [],
}
if "phases" not in p:
p["phases"] = []
p["phases"].append(phase)
p["updated_at"] = _now()
_save_projects(data)
return phase
return None
def update_phase(phase_id, body):
"""PUT /api/mc/phases/:id — Phase updaten."""
data = _load_projects()
for p in data["projects"]:
for ph in p.get("phases", []):
if ph["id"] == phase_id:
for key in ["name", "description", "testing", "testing_status",
"reflection", "status", "order"]:
if key in body:
ph[key] = body[key]
if body.get("status") == "done" and ph["completed_at"] is None:
ph["completed_at"] = _now()
elif body.get("status") and body.get("status") != "done":
ph["completed_at"] = None
p["updated_at"] = _now()
_save_projects(data)
return ph
return None
def delete_phase(phase_id):
"""DELETE /api/mc/phases/:id — Phase löschen."""
data = _load_projects()
for p in data["projects"]:
before = len(p.get("phases", []))
p["phases"] = [ph for ph in p.get("phases", []) if ph["id"] != phase_id]
if len(p["phases"]) < before:
p["updated_at"] = _now()
_save_projects(data)
# Tasks dieser Phase auf standalone setzen
tasks_data = _load_tasks()
for t in tasks_data["tasks"]:
if t.get("phase_id") == phase_id:
t["phase_id"] = None
_save_tasks(tasks_data)
return True
return False
def complete_phase(phase_id):
"""PUT /api/mc/phases/:id/complete — Phase als done markieren."""
return update_phase(phase_id, {"status": "done", "completed_at": _now()})
# ─────────────────────────────────────────────────────────────────────────────
# PHASE SUBTASKS
# ─────────────────────────────────────────────────────────────────────────────
def add_subtask_phase(phase_id, body):
"""POST /api/mc/phases/:id/subtasks."""
data = _load_projects()
for p in data["projects"]:
for ph in p.get("phases", []):
if ph["id"] == phase_id:
subtask = {
"id": _new_id("sub"),
"title": body.get("title", "Subtask"),
"done": False,
"order": len(ph.get("subtasks", [])) + 1,
"created_at": _now(),
}
if "subtasks" not in ph:
ph["subtasks"] = []
ph["subtasks"].append(subtask)
p["updated_at"] = _now()
_save_projects(data)
return subtask
return None
def update_subtask_phase(phase_id, subtask_id, body):
"""PUT /api/mc/phases/:id/subtasks/:sid."""
data = _load_projects()
for p in data["projects"]:
for ph in p.get("phases", []):
if ph["id"] == phase_id:
for s in ph.get("subtasks", []):
if s["id"] == subtask_id:
if "title" in body:
s["title"] = body["title"]
if "done" in body:
s["done"] = body["done"]
if "order" in body:
s["order"] = body["order"]
p["updated_at"] = _now()
# Auto-done check für phase
all_done = all(st.get("done", False) for st in ph.get("subtasks", []))
if all_done and ph["status"] != "done":
ph["status"] = "done"
ph["completed_at"] = _now()
_save_projects(data)
return s
return None
def delete_subtask_phase(phase_id, subtask_id):
"""DELETE /api/mc/phases/:id/subtasks/:sid."""
data = _load_projects()
for p in data["projects"]:
for ph in p.get("phases", []):
if ph["id"] == phase_id:
before = len(ph.get("subtasks", []))
ph["subtasks"] = [s for s in ph.get("subtasks", []) if s["id"] != subtask_id]
p["updated_at"] = _now()
_save_projects(data)
return len(ph["subtasks"]) < before
return False
# ─────────────────────────────────────────────────────────────────────────────
# AGENT ACTIONS
# ─────────────────────────────────────────────────────────────────────────────
def agent_progress(task_id, body):
"""POST /api/mc/tasks/:id/progress — Agent meldet Fortschritt."""
return update_task(task_id, {
"agent_status": body.get("agent_status"),
"agent_note": body.get("agent_note"),
"cron_last_run": body.get("cron_last_run"),
"cron_next_run": body.get("cron_next_run"),
})
# Keep only last 200 events
data["feed"] = feed[-200:]
_save_mc_data(data)
def agent_note(task_id, body):
"""POST /api/mc/tasks/:id/note — Agent setzt Notiz."""
return update_task(task_id, {"agent_note": body.get("note")})
# ── Dashboard status ──────────────────────────────────────────────────────────
def get_agents():
"""GET /api/mc/agents — Agent-Registry."""
return AGENTS
def get_dashboard_status() -> dict:
"""Return aggregated dashboard status for Mission Control."""
data = _load_mc_data()
priorities = data.get("priorities", [])
tasks = data.get("tasks", [])
# ─────────────────────────────────────────────────────────────────────────────
# STATS
# ─────────────────────────────────────────────────────────────────────────────
priorities_total = len(priorities)
priorities_done = sum(1 for p in priorities if p.get("done"))
def get_stats():
"""GET /api/mc/stats — Statistiken."""
tasks_data = _load_tasks()
projects_data = _load_projects()
tasks = tasks_data.get("tasks", [])
projects = projects_data.get("projects", [])
tasks_backlog = sum(1 for t in tasks if t.get("status") == "backlog")
tasks_progress = sum(1 for t in tasks if t.get("status") == "progress")
tasks_done = sum(1 for t in tasks if t.get("status") == "done")
total = len(tasks)
done = len([t for t in tasks if t.get("status") == "done"])
today = _today()
feed = get_feed(limit=5)
latest_event = feed[0]["event"] if feed else "No recent activity"
# Overdue
overdue = len([t for t in tasks if t.get("due") and t["due"] < today and t.get("status") != "done"])
# Health assessment
if tasks_done == 0 and tasks_backlog == 0 and tasks_progress == 0:
health = "empty"
elif tasks_progress > 0 and tasks_done > 0:
health = "healthy"
elif tasks_progress > 0:
health = "active"
elif tasks_backlog > 0:
health = "warning"
else:
health = "ok"
# By priority
by_priority = {"p1": 0, "p2": 0, "p3": 0}
for t in tasks:
p = t.get("priority", "p2")
if p in by_priority:
by_priority[p] += 1
# By type
by_type = {"one-time": 0, "daily": 0}
for t in tasks:
tt = t.get("task_type", "one-time")
if tt in by_type:
by_type[tt] += 1
# By status
by_status = {"todo": 0, "in_progress": 0, "review": 0, "done": 0}
for t in tasks:
s = t.get("status", "todo")
if s in by_status:
by_status[s] += 1
# Daily done today
daily_done_today = len([t for t in tasks if t.get("task_type") == "daily" and t.get("daily_completed_today", False)])
# Streak: consecutive days with completions going backwards
streak = 0
check_date = date.today()
done_dates = set()
for t in tasks:
c = t.get("completed_at")
if c:
done_dates.add(c[:10])
while True:
d = check_date.isoformat()
if d in done_dates:
streak += 1
check_date -= timedelta(days=1)
else:
break
# Agent activity
agent_activity = {}
for agent_id, agent in AGENTS.items():
agent_tasks = [t for t in tasks if t.get("assigned_agent") == agent_id]
active = next((t for t in agent_tasks if t.get("agent_status") in ("running", "pending")), None)
agent_activity[agent_id] = {
**agent,
"task_count": len(agent_tasks),
"status": "active" if active else "idle",
"current_task": active["title"] if active else None,
}
# Project progress
project_progress = []
for pr in projects:
pr_tasks = [t for t in tasks if t.get("project_id") == pr["id"]]
pr_tasks_done = len([t for t in pr_tasks if t.get("status") == "done"])
phases_total = len(pr.get("phases", []))
phases_done = len([ph for ph in pr.get("phases", []) if ph.get("status") == "done"])
project_progress.append({
"id": pr["id"],
"name": pr["name"],
"color": pr.get("color"),
"status": pr.get("status"),
"tasks_total": len(pr_tasks),
"tasks_done": pr_tasks_done,
"phases_total": phases_total,
"phases_done": phases_done,
})
return {
"priorities_total": priorities_total,
"priorities_done": priorities_done,
"tasks_backlog": tasks_backlog,
"tasks_progress": tasks_progress,
"tasks_done": tasks_done,
"latest_feed_event": latest_event,
"dashboard_health": health,
"timestamp": time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime()),
"total": total,
"done": done,
"overdue": overdue,
"streak": streak,
"daily_done_today": daily_done_today,
"by_priority": by_priority,
"by_type": by_type,
"by_status": by_status,
"project_progress": project_progress,
"agent_activity": agent_activity,
}

View File

@@ -32,8 +32,8 @@ def _write_session_index():
for s in SESSIONS.values():
if not any(e['session_id'] == s.session_id for e in entries):
entries.append(s.compact())
entries.sort(key=lambda s: s['updated_at'], reverse=True)
SESSION_INDEX_FILE.write_text(json.dumps(entries, ensure_ascii=False, indent=2), encoding='utf-8')
entries.sort(key=lambda s: s['updated_at'], reverse=True)
SESSION_INDEX_FILE.write_text(json.dumps(entries, ensure_ascii=False, indent=2), encoding='utf-8')
class Session:
@@ -97,7 +97,8 @@ class Session:
p = SESSION_DIR / f'{sid}.json'
if not p.exists():
return None
return cls(**json.loads(p.read_text(encoding='utf-8')))
with p.open(encoding='utf-8') as f:
return cls(**json.loads(f.read()))
def compact(self) -> dict:
return {
@@ -156,7 +157,8 @@ def all_sessions():
# Phase C: try index first for O(1) read; fall back to full scan
if SESSION_INDEX_FILE.exists():
try:
index = json.loads(SESSION_INDEX_FILE.read_text(encoding='utf-8'))
with SESSION_INDEX_FILE.open(encoding='utf-8') as f:
index = json.loads(f.read())
# Overlay any in-memory sessions that may be newer than the index
index_map = {s['session_id']: s for s in index}
with LOCK:
@@ -212,7 +214,7 @@ def load_projects() -> list:
if not PROJECTS_FILE.exists():
return []
try:
return json.loads(PROJECTS_FILE.read_text(encoding='utf-8'))
with PROJECTS_FILE.open(encoding='utf-8') as _f: return json.loads(_f.read())
except Exception:
return []

280
api/projects.py Normal file
View File

@@ -0,0 +1,280 @@
# api/projects.py
# Projects Tab Backend — Rose's Projects & Tasks Dashboard
import json
from pathlib import Path
from datetime import datetime
HERMES_HOME = Path.home() / ".hermes"
PROJECTS_DIR = HERMES_HOME / "projects"
DATA_FILE = HERMES_HOME / "data" / "projects.json"
DATA_FILE.parent.mkdir(parents=True, exist_ok=True)
def _load():
"""Lädt data/projects.json oder gibt leere Struktur zurück."""
if DATA_FILE.exists():
with DATA_FILE.open(encoding="utf-8") as f:
return json.loads(f.read())
return {"version": "1.0.0", "projects": [], "daily_tasks": [], "recurring_tasks": []}
def _save(data):
"""Speichert data/projects.json."""
DATA_FILE.write_text(json.dumps(data, indent=2, ensure_ascii=False))
def list_projects():
"""Liest projects/ Ordner aus, synced mit data/projects.json.
Jeder Unterordner in ~/.hermes/projects/ wird als Projekt registriert.
Bereits existierende Projekte (nach folder) werden nicht dupliziert.
"""
data = _load()
# Sync: jede Folder in projects/ → Projekt-Eintrag wenn nicht vorhanden
for folder in sorted(PROJECTS_DIR.iterdir()):
if folder.is_dir() and not folder.name.startswith('.'):
exists = any(p.get('folder') == folder.name for p in data['projects'])
if not exists:
data['projects'].append({
"id": folder.name,
"name": folder.name.replace('-', ' ').replace('_', ' ').title(),
"description": "",
"folder": folder.name,
"category": "unknown",
"status": "active",
"created": datetime.now().date().isoformat(),
"updated": datetime.now().isoformat(),
"tasks": []
})
_save(data)
return data['projects']
def get_project(project_id):
"""Holt ein einzelnes Projekt nach ID."""
data = _load()
return next((p for p in data['projects'] if p['id'] == project_id), None)
def create_task(project_id, task):
"""Erstellt Task in Projekt oder als daily/recurring.
Args:
project_id: ID des Projekts (für project tasks) oder None
task: dict mit title, task_type, status, priority, due, tags
Returns:
Das erstellte Task-Objekt mit generierter ID
"""
data = _load()
# ID generieren basierend auf task_type
task_type = task.get('task_type', 'project')
if task_type == 'daily':
existing = len(data.get('daily_tasks', []))
task['id'] = f"daily-{existing + 1:03d}"
elif task_type == 'recurring':
existing = len(data.get('recurring_tasks', []))
task['id'] = f"recurring-{existing + 1:03d}"
else:
existing = sum(len(p.get('tasks', [])) for p in data['projects'])
task['id'] = f"project-{existing + 1:03d}"
task['created'] = datetime.now().isoformat()
task['completed'] = None
if task_type == 'project' and project_id:
for p in data['projects']:
if p['id'] == project_id:
if 'tasks' not in p:
p['tasks'] = []
p['tasks'].append(task)
p['updated'] = datetime.now().isoformat()
break
elif task_type == 'project':
# Unassigned project task → find or create Inbox project
inbox = next((p for p in data['projects'] if p['id'] == 'inbox'), None)
if not inbox:
inbox = {
'id': 'inbox',
'name': '📥 Inbox',
'color': '#6366f1',
'tasks': [],
'created': datetime.now().isoformat(),
'updated': datetime.now().isoformat()
}
data['projects'].insert(0, inbox)
inbox['tasks'].append(task)
inbox['updated'] = datetime.now().isoformat()
elif task_type == 'daily':
data['daily_tasks'].append(task)
elif task_type == 'recurring':
data['recurring_tasks'].append(task)
_save(data)
return task
def update_task(task_id, updates):
"""Updated Task (status, priority, due, etc.).
Sucht Task in allen drei Listen (projects.tasks, daily_tasks, recurring_tasks).
"""
data = _load()
# Search in project tasks
for p in data['projects']:
for t in p.get('tasks', []):
if t['id'] == task_id:
t.update(updates)
p['updated'] = datetime.now().isoformat()
_save(data)
return t
# Search in daily tasks
for t in data.get('daily_tasks', []):
if t['id'] == task_id:
t.update(updates)
_save(data)
return t
# Search in recurring tasks
for t in data.get('recurring_tasks', []):
if t['id'] == task_id:
t.update(updates)
_save(data)
return t
return None
def delete_task(task_id):
"""Löscht Task aus allen drei Listen."""
data = _load()
# Remove from project tasks
for p in data['projects']:
p['tasks'] = [t for t in p.get('tasks', []) if t['id'] != task_id]
# Remove from daily tasks
data['daily_tasks'] = [t for t in data.get('daily_tasks', []) if t['id'] != task_id]
# Remove from recurring tasks
data['recurring_tasks'] = [t for t in data.get('recurring_tasks', []) if t['id'] != task_id]
_save(data)
return True
def get_all_tasks():
"""Holt alle Tasks für Kanban-View.
Fügt project_name hinzu für Project-Tasks.
Setzt Defaults für fehlende Felder (defensive).
"""
data = _load()
tasks = []
# Defaults für alle Tasks
DEFAULT_FIELDS = {
'title': 'Untitled Task',
'task_type': 'project',
'status': 'todo',
'priority': 'p2',
'due': None,
'tags': [],
'project_id': None,
'project_name': None,
'completed': None,
}
for p in data['projects']:
for t in p.get('tasks', []):
t = dict(t) # Copy to avoid mutating original
t['project_name'] = p.get('name')
# Apply defaults for missing fields
for k, v in DEFAULT_FIELDS.items():
t.setdefault(k, v)
tasks.append(t)
for t in data.get('daily_tasks', []):
t = dict(t)
for k, v in DEFAULT_FIELDS.items():
t.setdefault(k, v)
t.setdefault('status', 'pending')
tasks.append(t)
for t in data.get('recurring_tasks', []):
t = dict(t)
for k, v in DEFAULT_FIELDS.items():
t.setdefault(k, v)
t.setdefault('status', 'pending')
tasks.append(t)
return tasks
def get_stats():
"""Statistiken für Projects Tab.
Returns:
dict mit total_tasks, done, today_completed, active_projects,
streak, by_priority, by_type, overdue
"""
from datetime import date, timedelta
data = _load()
all_tasks = get_all_tasks()
done = [t for t in all_tasks if t.get('status') == 'done']
today = date.today().isoformat()
today_done = [t for t in done if (t.get('completed') or '').startswith(today)]
# Streak: consecutive days with completions going backwards
streak = 0
check_date = date.today()
done_dates = set()
for t in done:
c = t.get('completed')
if c:
done_dates.add(c[:10])
while True:
d = check_date.isoformat()
if d in done_dates:
streak += 1
check_date -= timedelta(days=1)
else:
break
# By priority
by_priority = {'p1': 0, 'p2': 0, 'p3': 0}
for t in all_tasks:
p = t.get('priority', 'p2')
if p in by_priority:
by_priority[p] += 1
# By type
by_type = {'project': 0, 'daily': 0, 'recurring': 0}
for t in all_tasks:
by_type[t.get('task_type', 'project')] += 1
# Overdue
overdue = [
t for t in all_tasks
if t.get('due') and t['due'] < today and t.get('status') != 'done'
]
return {
"total_tasks": len(all_tasks),
"done": len(done),
"today_completed": len(today_done),
"active_projects": len([p for p in data['projects'] if p.get('status') == 'active']),
"streak": streak,
"by_priority": by_priority,
"by_type": by_type,
"overdue": len(overdue),
}

View File

@@ -61,6 +61,20 @@ from api import heartbeats as _heartbeats
import re as _re
_re_path = _re.compile(r"^(?P<path>/[^?]*)")
def _extract_origin_from_headers(handler) -> str | None:
"""Extract the best origin from request headers (Origin or Referer)."""
origin = handler.headers.get('Origin', '')
if origin:
return origin
referer = handler.headers.get('Referer', '')
if referer:
# Extract origin from Referer header
m = _re.match(r'^(https?://[^/]+)', referer)
if m:
return m.group(1)
return None
def _normalize_host_port(value: str) -> tuple[str, str | None]:
"""Split a host or host:port string into (hostname, port|None).
Handles IPv6 bracket notation, e.g. [::1]:8080."""
@@ -128,12 +142,15 @@ def _check_csrf(handler) -> bool:
origin = handler.headers.get("Origin", "")
referer = handler.headers.get("Referer", "")
host = handler.headers.get("Host", "")
x_fwd_host = handler.headers.get("X-Forwarded-Host", "")
if not origin and not referer:
return True # non-browser clients (curl, agent) have no Origin
target = origin or referer
# Extract host:port from origin/referer
m = _re.match(r"^https?://([^/]+)", target)
if not m:
import sys
print(f"[CSRF DEBUG] no host match in target={target!r}", flush=True, file=sys.stderr)
return False
origin_host = m.group(1)
origin_scheme = m.group(0).split('://')[0].lower() # 'http' or 'https'
@@ -142,6 +159,9 @@ def _check_csrf(handler) -> bool:
origin_value = m.group(0).rstrip('/').lower()
if origin_value in _allowed_public_origins():
return True
# Allow dev-mission.sabo.synology.me for development
if origin_name == "dev-mission.sabo.synology.me":
return True
# Allow same-origin: check Host, X-Forwarded-Host (reverse proxy), and
# X-Real-Host against the origin. Reverse proxies (Caddy, nginx) set
# X-Forwarded-Host to the client's original Host header.
@@ -158,6 +178,9 @@ def _check_csrf(handler) -> bool:
allowed_name, allowed_port = _normalize_host_port(allowed)
if origin_name == allowed_name and _ports_match(origin_scheme, origin_port, allowed_port):
return True
# DEBUG: log what we rejected
import sys
print(f"[CSRF DEBUG] REJECTED origin={origin!r} referer={referer!r} host={host!r} x_fwd_host={x_fwd_host!r} origin_name={origin_name}", flush=True, file=sys.stderr)
return False
@@ -569,7 +592,13 @@ def handle_get(handler, parsed) -> bool:
return j(handler, {"sessions": safe_merged, "cli_count": len(deduped_cli)})
if parsed.path == "/api/projects":
return j(handler, {"projects": load_projects()})
# Transform from old {project_id, name} format to new {id, name} format
raw = load_projects()
projects = [{"id": p.get("project_id") or p.get("id"),
"name": p.get("name", ""),
"color": p.get("color", "#6366f1"),
"tasks": []} for p in raw]
return j(handler, {"projects": projects})
# ── Projects Tab Tasks (NEW) ──────────────────────────────────────────────
from api import projects as _projects
@@ -961,6 +990,10 @@ def handle_get(handler, parsed) -> bool:
return j(handler, {"error": str(e)}, status=500)
# GET /api/heartbeats — list all + status
if parsed.path == "/api/heartbeats/stats":
return j(handler, _heartbeats.handle_get(parsed.path))
if parsed.path == "/api/heartbeats/config":
return j(handler, _heartbeats.handle_get(parsed.path))
if parsed.path == "/api/heartbeats" or parsed.path.startswith("/api/heartbeats/"):
result = _heartbeats.handle_get(parsed.path)
if result is not None:
@@ -1031,6 +1064,10 @@ def handle_post(handler, parsed) -> bool:
except ValueError as e:
return bad(handler, str(e))
s = new_session(workspace=workspace, model=body.get("model"))
# Save agent to session if provided
if body.get("agent"):
s.agent = body.get("agent")
s.save()
return j(handler, {"session": s.compact() | {"messages": s.messages}})
if parsed.path == "/api/sessions/cleanup":
@@ -1052,6 +1089,21 @@ def handle_post(handler, parsed) -> bool:
s.save()
return j(handler, {"session": s.compact()})
if parsed.path == "/api/session/reorder":
# Drag & drop reorder — update the session's updated_at to reposition it
try:
require(body, "session_id", "weight")
except ValueError as e:
return bad(handler, str(e))
try:
s = get_session(body["session_id"])
except KeyError:
return bad(handler, "Session not found", 404)
# weight is a float timestamp used as sort key; set it to target + small delta
s.updated_at = float(body["weight"])
s.save()
return j(handler, {"ok": True})
if parsed.path == "/api/personality/set":
try:
require(body, "session_id")
@@ -1478,6 +1530,10 @@ def handle_post(handler, parsed) -> bool:
if "bot_name" in body:
body["bot_name"] = (str(body["bot_name"]) or "").strip() or "Hermes"
if "user_emoji" in body:
body["user_emoji"] = (str(body["user_emoji"]) or "").strip()[:8] or "🙂"
if "user_name" in body:
body["user_name"] = (str(body["user_name"]) or "").strip()[:32] or "You"
auth_enabled_before = is_auth_enabled()
current_cookie = parse_cookie(handler)
@@ -1658,7 +1714,8 @@ def handle_post(handler, parsed) -> bool:
# Unassign all sessions that belonged to this project
if SESSION_INDEX_FILE.exists():
try:
index = json.loads(SESSION_INDEX_FILE.read_text(encoding="utf-8"))
with SESSION_INDEX_FILE.open(encoding="utf-8") as f:
index = json.loads(f.read())
for entry in index:
if entry.get("project_id") == body["project_id"]:
try:
@@ -1737,6 +1794,12 @@ def handle_post(handler, parsed) -> bool:
return True
# POST /api/heartbeats — create heartbeat
if parsed.path == "/api/heartbeats/config":
result = _heartbeats.handle_post(parsed.path, body)
if result is not None:
status = 200
if isinstance(result, tuple): result, status = result
return j(handler, result, status=status)
if parsed.path == "/api/heartbeats" or parsed.path.startswith("/api/heartbeats/"):
result = _heartbeats.handle_post(parsed.path, body)
if result is not None:
@@ -1780,6 +1843,10 @@ def handle_put(handler, parsed) -> bool:
agent_id = parsed.path.split("/")[-2]
return j(handler, _agents.update_agent_memory(agent_id, body.get("content", "")))
# PUT /api/skills/toggle
if parsed.path == "/api/skills/toggle":
return _handle_skill_toggle(handler, body)
return False # 404
@@ -1953,6 +2020,8 @@ def _handle_sse_stream(handler, parsed):
return j(handler, {"error": "stream not found"}, status=404)
handler.send_response(200)
handler.send_header("Content-Type", "text/event-stream; charset=utf-8")
# NOTE: Content-Encoding:gzip removed — requires gzip writer wrapper on wfile
# Without actual gzip compression the header would cause browser decode errors
handler.send_header("Cache-Control", "no-cache")
handler.send_header("X-Accel-Buffering", "no")
handler.send_header("Connection", "keep-alive")
@@ -1966,7 +2035,7 @@ def _handle_sse_stream(handler, parsed):
handler.wfile.flush()
continue
_sse(handler, event, data)
if event in ("stream_end", "error", "cancel"):
if event in ("stream_end", "error", "cancel", "apperror"):
break
except (BrokenPipeError, ConnectionResetError):
pass
@@ -3232,6 +3301,12 @@ def _handle_skill_save(handler, body):
if category and ("/" in category or ".." in category):
return bad(handler, "Invalid category")
from tools.skills_tool import SKILLS_DIR
import shutil
# Find and remove ALL existing instances of this skill (handles category-change updates)
existing = list(SKILLS_DIR.rglob(f"{skill_name}/SKILL.md"))
for old_file in existing:
shutil.rmtree(str(old_file.parent))
if category:
skill_dir = SKILLS_DIR / category / skill_name
@@ -3264,6 +3339,34 @@ def _handle_skill_delete(handler, body):
return j(handler, {"ok": True, "name": body["name"]})
def _handle_skill_toggle(handler, body):
"""Enable or disable a skill by name."""
name = body.get("name")
if not name:
return bad(handler, "Missing field: name")
enabled = body.get("enabled")
if enabled is None:
return bad(handler, "Missing field: enabled")
import sys as _sys
from pathlib import Path as _P
_agent_path = (_P(__file__).parent.parent / "hermes-agent").resolve()
if str(_agent_path) not in _sys.path:
_sys.path.insert(0, str(_agent_path))
from hermes_cli.skills_config import get_disabled_skills, save_disabled_skills
from api.config import load_config
config = load_config()
disabled = get_disabled_skills(config)
if enabled:
disabled.discard(name)
else:
disabled.add(name)
save_disabled_skills(config, disabled)
return j(handler, {"ok": True, "name": name, "enabled": enabled})
def _handle_memory_write(handler, body):
try:
require(body, "section", "content")

View File

@@ -777,9 +777,13 @@ def _sse(handler, event, data):
def _run_agent_streaming(session_id, msg_text, model, workspace, stream_id, attachments=None, agent=None):
"""Run agent in background thread, writing SSE events to STREAMS[stream_id]."""
print(f'[DEBUG streaming] started stream_id={stream_id}', flush=True)
q = STREAMS.get(stream_id)
print(f'[DEBUG streaming] STREAMS keys={list(STREAMS.keys())}', flush=True)
if q is None:
print(f'[DEBUG streaming] queue is None for stream_id={stream_id}', flush=True)
return
print(f'[DEBUG streaming] queue found, agent={agent}', flush=True)
s = None
_rt = {}
old_cwd = None
@@ -937,12 +941,41 @@ def _run_agent_streaming(session_id, msg_text, model, workspace, stream_id, atta
_reasoning_text = '' # accumulates reasoning/thinking trace for persistence
_live_tool_calls = [] # tool progress fallback when final messages omit tool IDs
_token_buf = [] # token text buffer for batching
_token_buf_timer = None # threading.Timer reference
_token_buf_closed = False # True after sentinel seen
def _flush_token_buf():
nonlocal _token_buf_timer
if _token_buf_closed or not _token_buf:
return
# Grab and clear the buffer atomically
batch = ''.join(_token_buf)
_token_buf.clear()
# Cancel any pending timer
if _token_buf_timer is not None:
_token_buf_timer.cancel()
_token_buf_timer = None
# _buf_closed guard ensures we never put after sentinel
if not _token_buf_closed:
put('token', {'text': batch})
def on_token(text):
nonlocal _token_sent
nonlocal _token_sent, _token_buf_timer, _token_buf_closed
if text is None:
# Flush any remaining buffered tokens, then mark closed
_flush_token_buf()
_token_buf_closed = True
return # end-of-stream sentinel
_token_sent = True
put('token', {'text': text})
_token_buf.append(text)
if len(_token_buf) >= 20:
# Flush immediately on 20-token threshold
_flush_token_buf()
elif _token_buf_timer is None:
# Start 100ms debounce timer (only if not already pending)
_token_buf_timer = threading.Timer(0.1, _flush_token_buf)
_token_buf_timer.start()
def on_reasoning(text):
nonlocal _reasoning_text
@@ -1318,6 +1351,13 @@ def _run_agent_streaming(session_id, msg_text, model, workspace, stream_id, atta
if isinstance(_rm, dict) and _rm.get('role') == 'assistant':
_rm['reasoning'] = _reasoning_text
break
# Tag the last assistant message with per-turn token usage so the UI
# can display it on that specific message instead of the cumulative total.
if s.messages:
for _rm in reversed(s.messages):
if isinstance(_rm, dict) and _rm.get('role') == 'assistant':
_rm['_usage'] = {'in': input_tokens, 'out': output_tokens}
break
s.save()
# Sync to state.db for /insights (opt-in setting)
try:
@@ -1342,6 +1382,9 @@ def _run_agent_streaming(session_id, msg_text, model, workspace, stream_id, atta
usage['context_length'] = getattr(_cc, 'context_length', 0) or 0
usage['threshold_tokens'] = getattr(_cc, 'threshold_tokens', 0) or 0
usage['last_prompt_tokens'] = getattr(_cc, 'last_prompt_tokens', 0) or 0
# Send cumulative session totals separately so UI can label them as "session total"
usage['_session_input_tokens'] = s.input_tokens or 0
usage['_session_output_tokens'] = s.output_tokens or 0
# (reasoning trace already attached + saved above, before s.save())
raw_session = s.compact() | {'messages': s.messages, 'tool_calls': tool_calls}
put('done', {'session': redact_session_data(raw_session), 'usage': usage})

View File

@@ -139,7 +139,8 @@ def _migrate_global_workspaces() -> list:
if not _GLOBAL_WS_FILE.exists():
return []
try:
raw = json.loads(_GLOBAL_WS_FILE.read_text(encoding='utf-8'))
with _GLOBAL_WS_FILE.open(encoding='utf-8') as f:
raw = json.loads(f.read())
cleaned = _clean_workspace_list(raw)
if len(cleaned) != len(raw):
# Rewrite the cleaned version so future reads are already clean
@@ -155,7 +156,8 @@ def load_workspaces() -> list:
ws_file = _workspaces_file()
if ws_file.exists():
try:
raw = json.loads(ws_file.read_text(encoding='utf-8'))
with ws_file.open(encoding='utf-8') as f:
raw = json.loads(f.read())
cleaned = _clean_workspace_list(raw)
if len(cleaned) != len(raw):
# Persist the cleaned version so stale entries don't keep reappearing