Refactor cockpit to use DockerTmuxController pattern
Based on claude-code-tools TmuxCLIController, this refactor: - Added DockerTmuxController class for robust tmux session management - Implements send_keys() with configurable delay_enter - Implements capture_pane() for output retrieval - Implements wait_for_prompt() for pattern-based completion detection - Implements wait_for_idle() for content-hash-based idle detection - Implements wait_for_shell_prompt() for shell prompt detection Also includes workflow improvements: - Pre-task git snapshot before agent execution - Post-task commit protocol in agent guidelines Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
392
lib/qa_validator.py
Normal file
392
lib/qa_validator.py
Normal file
@@ -0,0 +1,392 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Luzia QA Validator - Ensures code and documentation stay in sync
|
||||
|
||||
Validates:
|
||||
1. All route_* functions have corresponding KG entities
|
||||
2. All projects in config.json are documented
|
||||
3. Cross-references resolve
|
||||
4. No stale/orphaned documentation
|
||||
|
||||
Integrates with luzia qa command.
|
||||
"""
|
||||
|
||||
import ast
|
||||
import json
|
||||
import re
|
||||
from pathlib import Path
|
||||
from typing import Dict, List, Tuple, Any
|
||||
from datetime import datetime
|
||||
|
||||
# Import our KG module
|
||||
import sys
|
||||
sys.path.insert(0, str(Path(__file__).parent))
|
||||
from knowledge_graph import KnowledgeGraph, KG_PATHS
|
||||
|
||||
LUZIA_PATH = Path("/opt/server-agents/orchestrator/bin/luzia")
|
||||
CONFIG_PATH = Path("/opt/server-agents/orchestrator/config.json")
|
||||
|
||||
|
||||
class QAValidator:
|
||||
"""Validates code-documentation synchronization."""
|
||||
|
||||
def __init__(self):
|
||||
self.issues: List[Dict] = []
|
||||
self.warnings: List[Dict] = []
|
||||
self.info: List[Dict] = []
|
||||
|
||||
def _add_issue(self, category: str, message: str, details: str = None):
|
||||
self.issues.append({
|
||||
"category": category,
|
||||
"message": message,
|
||||
"details": details,
|
||||
"severity": "error"
|
||||
})
|
||||
|
||||
def _add_warning(self, category: str, message: str, details: str = None):
|
||||
self.warnings.append({
|
||||
"category": category,
|
||||
"message": message,
|
||||
"details": details,
|
||||
"severity": "warning"
|
||||
})
|
||||
|
||||
def _add_info(self, category: str, message: str, details: str = None):
|
||||
self.info.append({
|
||||
"category": category,
|
||||
"message": message,
|
||||
"details": details,
|
||||
"severity": "info"
|
||||
})
|
||||
|
||||
# --- Code Analysis ---
|
||||
|
||||
def extract_routes(self) -> List[Dict]:
|
||||
"""Extract all route_* functions from luzia script."""
|
||||
routes = []
|
||||
|
||||
if not LUZIA_PATH.exists():
|
||||
self._add_issue("code", "Luzia script not found", str(LUZIA_PATH))
|
||||
return routes
|
||||
|
||||
content = LUZIA_PATH.read_text()
|
||||
|
||||
# Find all route_* function definitions with their docstrings
|
||||
# Pattern: def route_name(...): followed by optional newline and """docstring"""
|
||||
pattern = r'def (route_\w+)\([^)]*\)[^:]*:\s*\n?\s*"""(.*?)"""'
|
||||
matches = re.findall(pattern, content, re.DOTALL)
|
||||
|
||||
for name, docstring in matches:
|
||||
# Extract command pattern from docstring (Handler: luzia xxx)
|
||||
cmd_match = re.search(r'Handler:\s*(.+?)(?:\n|$)', docstring)
|
||||
command = cmd_match.group(1).strip() if cmd_match else ""
|
||||
|
||||
routes.append({
|
||||
"function": name,
|
||||
"command": command,
|
||||
"docstring": docstring.strip()[:200],
|
||||
})
|
||||
|
||||
return routes
|
||||
|
||||
def extract_router_patterns(self) -> List[str]:
|
||||
"""Extract registered routes from Router class."""
|
||||
patterns = []
|
||||
|
||||
if not LUZIA_PATH.exists():
|
||||
return patterns
|
||||
|
||||
content = LUZIA_PATH.read_text()
|
||||
|
||||
# Find self.routes list
|
||||
pattern = r'self\.routes\s*=\s*\[(.*?)\]'
|
||||
match = re.search(pattern, content, re.DOTALL)
|
||||
|
||||
if match:
|
||||
routes_block = match.group(1)
|
||||
# Extract route handler names
|
||||
handler_pattern = r'\(self\._match_\w+,\s*(route_\w+|self\._route_\w+),'
|
||||
patterns = re.findall(handler_pattern, routes_block)
|
||||
|
||||
return patterns
|
||||
|
||||
def validate_routes(self) -> bool:
|
||||
"""Validate all route functions are registered."""
|
||||
routes = self.extract_routes()
|
||||
registered = self.extract_router_patterns()
|
||||
|
||||
# Check each route function is registered
|
||||
for route in routes:
|
||||
func = route["function"]
|
||||
if func not in registered and f"self._{func}" not in registered:
|
||||
# Internal routes start with _route_ instead of route_
|
||||
if not func.startswith("_"):
|
||||
self._add_warning(
|
||||
"routes",
|
||||
f"Route function '{func}' not registered in Router",
|
||||
route["docstring"][:100]
|
||||
)
|
||||
|
||||
self._add_info("routes", f"Found {len(routes)} route functions, {len(registered)} registered")
|
||||
return len(self.issues) == 0
|
||||
|
||||
# --- Documentation Validation ---
|
||||
|
||||
def validate_command_docs(self) -> bool:
|
||||
"""Validate all commands are documented in KG."""
|
||||
try:
|
||||
kg = KnowledgeGraph("sysadmin")
|
||||
except Exception as e:
|
||||
self._add_warning("kg", f"Could not open sysadmin KG: {e}")
|
||||
return True # Not an error if KG doesn't exist yet
|
||||
|
||||
routes = self.extract_routes()
|
||||
documented = {e["name"] for e in kg.list_entities("command")}
|
||||
|
||||
for route in routes:
|
||||
cmd_name = route["function"].replace("route_", "luzia_")
|
||||
if cmd_name not in documented:
|
||||
self._add_warning(
|
||||
"docs",
|
||||
f"Command '{route['function']}' not documented in KG",
|
||||
f"Add with: luzia docs add sysadmin {cmd_name} command ..."
|
||||
)
|
||||
|
||||
return len(self.issues) == 0
|
||||
|
||||
def validate_project_docs(self) -> bool:
|
||||
"""Validate all projects in config are documented."""
|
||||
if not CONFIG_PATH.exists():
|
||||
self._add_issue("config", "Config file not found", str(CONFIG_PATH))
|
||||
return False
|
||||
|
||||
try:
|
||||
config = json.loads(CONFIG_PATH.read_text())
|
||||
except Exception as e:
|
||||
self._add_issue("config", f"Could not parse config: {e}")
|
||||
return False
|
||||
|
||||
try:
|
||||
kg = KnowledgeGraph("projects")
|
||||
except Exception as e:
|
||||
self._add_warning("kg", f"Could not open projects KG: {e}")
|
||||
return True
|
||||
|
||||
projects = config.get("projects", {}).keys()
|
||||
documented = {e["name"] for e in kg.list_entities("project")}
|
||||
|
||||
for project in projects:
|
||||
if project not in documented:
|
||||
self._add_warning(
|
||||
"docs",
|
||||
f"Project '{project}' not documented in KG",
|
||||
f"Add with: luzia docs add projects {project} project ..."
|
||||
)
|
||||
|
||||
self._add_info("projects", f"Found {len(projects)} projects in config")
|
||||
return len(self.issues) == 0
|
||||
|
||||
# --- Syntax Validation ---
|
||||
|
||||
def validate_python_syntax(self) -> bool:
|
||||
"""Validate Python syntax of luzia script."""
|
||||
if not LUZIA_PATH.exists():
|
||||
self._add_issue("syntax", "Luzia script not found")
|
||||
return False
|
||||
|
||||
try:
|
||||
content = LUZIA_PATH.read_text()
|
||||
ast.parse(content)
|
||||
self._add_info("syntax", "Python syntax valid")
|
||||
return True
|
||||
except SyntaxError as e:
|
||||
self._add_issue("syntax", f"Syntax error: {e.msg}", f"Line {e.lineno}: {e.text}")
|
||||
return False
|
||||
|
||||
# --- Full Validation ---
|
||||
|
||||
def validate_all(self) -> Dict:
|
||||
"""Run all validations."""
|
||||
self.issues = []
|
||||
self.warnings = []
|
||||
self.info = []
|
||||
|
||||
results = {
|
||||
"syntax": self.validate_python_syntax(),
|
||||
"routes": self.validate_routes(),
|
||||
"command_docs": self.validate_command_docs(),
|
||||
"project_docs": self.validate_project_docs(),
|
||||
}
|
||||
|
||||
return {
|
||||
"passed": len(self.issues) == 0,
|
||||
"results": results,
|
||||
"issues": self.issues,
|
||||
"warnings": self.warnings,
|
||||
"info": self.info,
|
||||
"summary": {
|
||||
"errors": len(self.issues),
|
||||
"warnings": len(self.warnings),
|
||||
"info": len(self.info),
|
||||
},
|
||||
"timestamp": datetime.now().isoformat(),
|
||||
}
|
||||
|
||||
# --- Auto-sync ---
|
||||
|
||||
def sync_routes_to_kg(self) -> Dict:
|
||||
"""Sync route functions to sysadmin KG."""
|
||||
try:
|
||||
kg = KnowledgeGraph("sysadmin")
|
||||
except Exception as e:
|
||||
return {"error": str(e)}
|
||||
|
||||
routes = self.extract_routes()
|
||||
added = 0
|
||||
updated = 0
|
||||
|
||||
for route in routes:
|
||||
name = route["function"].replace("route_", "luzia_")
|
||||
existing = kg.get_entity(name)
|
||||
|
||||
# Build content from docstring
|
||||
content = f"Command: {route['command']}\n\n{route['docstring']}"
|
||||
|
||||
kg.add_entity(
|
||||
name=name,
|
||||
entity_type="command",
|
||||
content=content,
|
||||
metadata={"function": route["function"], "auto_synced": True},
|
||||
source="luzia_script"
|
||||
)
|
||||
|
||||
if existing:
|
||||
updated += 1
|
||||
else:
|
||||
added += 1
|
||||
|
||||
return {
|
||||
"added": added,
|
||||
"updated": updated,
|
||||
"total": len(routes),
|
||||
}
|
||||
|
||||
def sync_projects_to_kg(self) -> Dict:
|
||||
"""Sync projects from config to KG."""
|
||||
if not CONFIG_PATH.exists():
|
||||
return {"error": "Config not found"}
|
||||
|
||||
try:
|
||||
config = json.loads(CONFIG_PATH.read_text())
|
||||
kg = KnowledgeGraph("projects")
|
||||
except Exception as e:
|
||||
return {"error": str(e)}
|
||||
|
||||
projects = config.get("projects", {})
|
||||
added = 0
|
||||
updated = 0
|
||||
|
||||
for name, info in projects.items():
|
||||
existing = kg.get_entity(name)
|
||||
|
||||
content = f"Description: {info.get('description', 'N/A')}\n"
|
||||
content += f"Focus: {info.get('focus', 'N/A')}\n"
|
||||
content += f"Path: {info.get('path', f'/home/{name}')}"
|
||||
|
||||
kg.add_entity(
|
||||
name=name,
|
||||
entity_type="project",
|
||||
content=content,
|
||||
metadata=info,
|
||||
source="config.json"
|
||||
)
|
||||
|
||||
if existing:
|
||||
updated += 1
|
||||
else:
|
||||
added += 1
|
||||
|
||||
return {
|
||||
"added": added,
|
||||
"updated": updated,
|
||||
"total": len(projects),
|
||||
}
|
||||
|
||||
|
||||
def run_qa(sync: bool = False, verbose: bool = False) -> int:
|
||||
"""Run QA validation and optionally sync."""
|
||||
validator = QAValidator()
|
||||
|
||||
print("\n=== Luzia QA Validation ===\n")
|
||||
|
||||
results = validator.validate_all()
|
||||
|
||||
# Show results
|
||||
for category, passed in results["results"].items():
|
||||
status = "[OK]" if passed else "[FAIL]"
|
||||
print(f" {status} {category}")
|
||||
|
||||
# Show issues
|
||||
if results["issues"]:
|
||||
print("\nErrors:")
|
||||
for issue in results["issues"]:
|
||||
print(f" [!] {issue['category']}: {issue['message']}")
|
||||
if verbose and issue.get("details"):
|
||||
print(f" {issue['details']}")
|
||||
|
||||
if results["warnings"]:
|
||||
print("\nWarnings:")
|
||||
for warn in results["warnings"]:
|
||||
print(f" [?] {warn['category']}: {warn['message']}")
|
||||
if verbose and warn.get("details"):
|
||||
print(f" {warn['details']}")
|
||||
|
||||
if verbose and results["info"]:
|
||||
print("\nInfo:")
|
||||
for info in results["info"]:
|
||||
print(f" [i] {info['category']}: {info['message']}")
|
||||
|
||||
print(f"\nSummary: {results['summary']['errors']} errors, {results['summary']['warnings']} warnings")
|
||||
|
||||
# Sync if requested
|
||||
if sync:
|
||||
print("\n--- Syncing to Knowledge Graph ---")
|
||||
|
||||
route_result = validator.sync_routes_to_kg()
|
||||
if "error" in route_result:
|
||||
print(f" Routes: Error - {route_result['error']}")
|
||||
else:
|
||||
print(f" Routes: {route_result['added']} added, {route_result['updated']} updated")
|
||||
|
||||
project_result = validator.sync_projects_to_kg()
|
||||
if "error" in project_result:
|
||||
print(f" Projects: Error - {project_result['error']}")
|
||||
else:
|
||||
print(f" Projects: {project_result['added']} added, {project_result['updated']} updated")
|
||||
|
||||
return 0 if results["passed"] else 1
|
||||
|
||||
|
||||
# --- CLI ---
|
||||
|
||||
if __name__ == "__main__":
|
||||
import argparse
|
||||
|
||||
parser = argparse.ArgumentParser(description="Luzia QA Validator")
|
||||
parser.add_argument("--sync", action="store_true", help="Sync code to KG")
|
||||
parser.add_argument("--verbose", "-v", action="store_true", help="Verbose output")
|
||||
parser.add_argument("--json", action="store_true", help="Output as JSON")
|
||||
parser.add_argument("--learn", action="store_true", help="Extract learnings on QA pass")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
if args.learn:
|
||||
# Use learning integration for QA + learning extraction
|
||||
from qa_learning_integration import run_integrated_qa
|
||||
exit(run_integrated_qa(verbose=args.verbose, sync=args.sync))
|
||||
elif args.json:
|
||||
validator = QAValidator()
|
||||
results = validator.validate_all()
|
||||
print(json.dumps(results, indent=2))
|
||||
else:
|
||||
exit(run_qa(sync=args.sync, verbose=args.verbose))
|
||||
Reference in New Issue
Block a user