Add load-aware cockpit queue dispatcher
- New CockpitQueueDispatcher: per-project serialized task queues - LoadMonitor: checks system load/memory before dispatching - Parallel execution across projects with round-robin fairness - CLI commands: cockpit queue, cockpit dispatch Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
114
lib/cockpit.py
114
lib/cockpit.py
@@ -695,6 +695,71 @@ def cockpit_attach_cmd(project: str) -> str:
|
||||
return f"docker exec -it {container_name} tmux attach-session -t agent"
|
||||
|
||||
|
||||
def cockpit_queue_task(project: str, task: str, context: str = "",
|
||||
priority: str = "normal") -> Dict:
|
||||
"""
|
||||
Queue a task for background dispatch.
|
||||
|
||||
Tasks are queued per-project and dispatched serially within each project,
|
||||
but in parallel across projects (with load awareness).
|
||||
|
||||
Args:
|
||||
project: Target project name
|
||||
task: Task description
|
||||
context: Project context
|
||||
priority: "high" or "normal"
|
||||
|
||||
Returns: {"success": bool, "task_id": str, "message": str}
|
||||
"""
|
||||
try:
|
||||
from cockpit_queue_dispatcher import CockpitQueueDispatcher
|
||||
import yaml
|
||||
|
||||
config_path = Path("/opt/server-agents/orchestrator/config/luzia.yaml")
|
||||
if config_path.exists():
|
||||
config = yaml.safe_load(config_path.read_text())
|
||||
else:
|
||||
config = {"projects": {}}
|
||||
|
||||
dispatcher = CockpitQueueDispatcher(config)
|
||||
task_id = dispatcher.enqueue_task(project, task, context, priority)
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"task_id": task_id,
|
||||
"message": f"Task queued for {project}",
|
||||
"queue_position": len(dispatcher.get_pending_tasks(project))
|
||||
}
|
||||
except ImportError:
|
||||
return {"success": False, "message": "Queue dispatcher not available"}
|
||||
except Exception as e:
|
||||
return {"success": False, "message": str(e)}
|
||||
|
||||
|
||||
def cockpit_queue_status() -> Dict:
|
||||
"""
|
||||
Get status of the task queue and dispatcher.
|
||||
|
||||
Returns: {"success": bool, "status": dict}
|
||||
"""
|
||||
try:
|
||||
from cockpit_queue_dispatcher import CockpitQueueDispatcher
|
||||
import yaml
|
||||
|
||||
config_path = Path("/opt/server-agents/orchestrator/config/luzia.yaml")
|
||||
if config_path.exists():
|
||||
config = yaml.safe_load(config_path.read_text())
|
||||
else:
|
||||
config = {"projects": {}}
|
||||
|
||||
dispatcher = CockpitQueueDispatcher(config)
|
||||
return {"success": True, "status": dispatcher.get_status()}
|
||||
except ImportError:
|
||||
return {"success": False, "message": "Queue dispatcher not available"}
|
||||
except Exception as e:
|
||||
return {"success": False, "message": str(e)}
|
||||
|
||||
|
||||
def cockpit_dispatch_task(project: str, task: str, context: str, config: dict,
|
||||
show_output: bool = True, timeout: int = 600) -> Dict:
|
||||
"""
|
||||
@@ -1057,6 +1122,11 @@ def route_cockpit(config: dict, args: list, kwargs: dict) -> int:
|
||||
print(" output <project> Get recent output")
|
||||
print(" status [project] Show cockpit status")
|
||||
print(" attach <project> Show attach command")
|
||||
print("")
|
||||
print("Queue commands (per-project serialized, parallel across projects):")
|
||||
print(" queue <project> <task> Queue task for background dispatch")
|
||||
print(" queue --status Show dispatcher status")
|
||||
print(" dispatch Run one dispatch cycle")
|
||||
return 0
|
||||
|
||||
subcommand = args[0]
|
||||
@@ -1161,5 +1231,49 @@ def route_cockpit(config: dict, args: list, kwargs: dict) -> int:
|
||||
print(f" {cmd}")
|
||||
return 0
|
||||
|
||||
if subcommand == "queue":
|
||||
if len(subargs) < 2:
|
||||
print("Usage: luzia cockpit queue <project> <task>")
|
||||
print(" luzia cockpit queue --status")
|
||||
return 1
|
||||
if subargs[0] == "--status":
|
||||
result = cockpit_queue_status()
|
||||
if result["success"]:
|
||||
print(json.dumps(result["status"], indent=2))
|
||||
return 0
|
||||
print(f"Error: {result['message']}")
|
||||
return 1
|
||||
|
||||
project = subargs[0]
|
||||
task = " ".join(subargs[1:])
|
||||
result = cockpit_queue_task(project, task)
|
||||
if result["success"]:
|
||||
print(f"OK: {result['message']}")
|
||||
print(f" Task ID: {result['task_id']}")
|
||||
print(f" Queue position: {result.get('queue_position', 'unknown')}")
|
||||
return 0
|
||||
print(f"Error: {result['message']}")
|
||||
return 1
|
||||
|
||||
if subcommand == "dispatch":
|
||||
# Run one dispatch cycle
|
||||
try:
|
||||
from cockpit_queue_dispatcher import CockpitQueueDispatcher
|
||||
import yaml
|
||||
|
||||
config_path = Path("/opt/server-agents/orchestrator/config/luzia.yaml")
|
||||
if config_path.exists():
|
||||
cfg = yaml.safe_load(config_path.read_text())
|
||||
else:
|
||||
cfg = config
|
||||
|
||||
dispatcher = CockpitQueueDispatcher(cfg)
|
||||
result = dispatcher.run_dispatch_cycle()
|
||||
print(json.dumps(result, indent=2))
|
||||
return 0
|
||||
except Exception as e:
|
||||
print(f"Error: {e}")
|
||||
return 1
|
||||
|
||||
print(f"Unknown subcommand: {subcommand}")
|
||||
return 1
|
||||
|
||||
Reference in New Issue
Block a user