feat(engineering): add tc-tracker skill
Self-contained skill for tracking technical changes with structured JSON records, an enforced state machine, and a session handoff format that lets a new AI session resume work cleanly when a previous one expires. Includes: - 5 stdlib-only Python scripts (init, create, update, status, validator) all supporting --help and --json - 3 reference docs (lifecycle state machine, JSON schema, handoff format) - /tc dispatcher in commands/tc.md Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
277
engineering/tc-tracker/scripts/tc_create.py
Normal file
277
engineering/tc-tracker/scripts/tc_create.py
Normal file
@@ -0,0 +1,277 @@
|
||||
#!/usr/bin/env python3
|
||||
"""TC Create — Create a new Technical Change record.
|
||||
|
||||
Generates the next sequential TC ID, scaffolds the record directory, writes a
|
||||
fully populated tc_record.json (status=planned, R1 creation revision), and
|
||||
appends a registry entry with recomputed statistics.
|
||||
|
||||
Usage:
|
||||
python3 tc_create.py --root . --name user-auth \\
|
||||
--title "Add JWT authentication" --scope feature --priority high \\
|
||||
--summary "Adds JWT login + middleware" \\
|
||||
--motivation "Required for protected endpoints"
|
||||
|
||||
Exit codes:
|
||||
0 = created
|
||||
1 = warnings (e.g. validation soft warnings)
|
||||
2 = critical error (registry missing, bad args, schema invalid)
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
|
||||
VALID_STATUSES = ("planned", "in_progress", "blocked", "implemented", "tested", "deployed")
|
||||
VALID_SCOPES = ("feature", "bugfix", "refactor", "infrastructure", "documentation", "hotfix", "enhancement")
|
||||
VALID_PRIORITIES = ("critical", "high", "medium", "low")
|
||||
|
||||
|
||||
def now_iso() -> str:
|
||||
return datetime.now(timezone.utc).isoformat(timespec="seconds")
|
||||
|
||||
|
||||
def slugify(text: str) -> str:
|
||||
text = text.lower().strip()
|
||||
text = re.sub(r"[^a-z0-9\s-]", "", text)
|
||||
text = re.sub(r"[\s_]+", "-", text)
|
||||
text = re.sub(r"-+", "-", text)
|
||||
return text.strip("-")
|
||||
|
||||
|
||||
def date_slug(dt: datetime) -> str:
|
||||
return dt.strftime("%m-%d-%y")
|
||||
|
||||
|
||||
def write_json_atomic(path: Path, data: dict) -> None:
|
||||
tmp = path.with_suffix(path.suffix + ".tmp")
|
||||
tmp.write_text(json.dumps(data, indent=2) + "\n", encoding="utf-8")
|
||||
tmp.replace(path)
|
||||
|
||||
|
||||
def compute_stats(records: list) -> dict:
|
||||
stats = {
|
||||
"total": len(records),
|
||||
"by_status": {s: 0 for s in VALID_STATUSES},
|
||||
"by_scope": {s: 0 for s in VALID_SCOPES},
|
||||
"by_priority": {p: 0 for p in VALID_PRIORITIES},
|
||||
}
|
||||
for rec in records:
|
||||
for key, bucket in (("status", "by_status"), ("scope", "by_scope"), ("priority", "by_priority")):
|
||||
v = rec.get(key, "")
|
||||
if v in stats[bucket]:
|
||||
stats[bucket][v] += 1
|
||||
return stats
|
||||
|
||||
|
||||
def build_record(tc_id: str, title: str, scope: str, priority: str, summary: str,
|
||||
motivation: str, project_name: str, author: str, session_id: str,
|
||||
platform: str, model: str) -> dict:
|
||||
ts = now_iso()
|
||||
return {
|
||||
"tc_id": tc_id,
|
||||
"parent_tc": None,
|
||||
"title": title,
|
||||
"status": "planned",
|
||||
"priority": priority,
|
||||
"created": ts,
|
||||
"updated": ts,
|
||||
"created_by": author,
|
||||
"project": project_name,
|
||||
"description": {
|
||||
"summary": summary,
|
||||
"motivation": motivation,
|
||||
"scope": scope,
|
||||
"detailed_design": None,
|
||||
"breaking_changes": [],
|
||||
"dependencies": [],
|
||||
},
|
||||
"files_affected": [],
|
||||
"revision_history": [
|
||||
{
|
||||
"revision_id": "R1",
|
||||
"timestamp": ts,
|
||||
"author": author,
|
||||
"summary": "TC record created",
|
||||
"field_changes": [
|
||||
{"field": "status", "action": "set", "new_value": "planned", "reason": "initial creation"},
|
||||
],
|
||||
}
|
||||
],
|
||||
"sub_tcs": [],
|
||||
"test_cases": [],
|
||||
"approval": {
|
||||
"approved": False,
|
||||
"approved_by": None,
|
||||
"approved_date": None,
|
||||
"approval_notes": "",
|
||||
"test_coverage_status": "none",
|
||||
},
|
||||
"session_context": {
|
||||
"current_session": {
|
||||
"session_id": session_id,
|
||||
"platform": platform,
|
||||
"model": model,
|
||||
"started": ts,
|
||||
"last_active": ts,
|
||||
},
|
||||
"handoff": {
|
||||
"progress_summary": "",
|
||||
"next_steps": [],
|
||||
"blockers": [],
|
||||
"key_context": [],
|
||||
"files_in_progress": [],
|
||||
"decisions_made": [],
|
||||
},
|
||||
"session_history": [],
|
||||
},
|
||||
"tags": [],
|
||||
"related_tcs": [],
|
||||
"notes": "",
|
||||
"metadata": {
|
||||
"project": project_name,
|
||||
"created_by": author,
|
||||
"last_modified_by": author,
|
||||
"last_modified": ts,
|
||||
"estimated_effort": None,
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
def main() -> int:
|
||||
parser = argparse.ArgumentParser(description="Create a new TC record.")
|
||||
parser.add_argument("--root", default=".", help="Project root (default: current directory)")
|
||||
parser.add_argument("--name", required=True, help="Functionality slug (kebab-case, e.g. user-auth)")
|
||||
parser.add_argument("--title", required=True, help="Human-readable title (5-120 chars)")
|
||||
parser.add_argument("--scope", required=True, choices=VALID_SCOPES, help="Change category")
|
||||
parser.add_argument("--priority", default="medium", choices=VALID_PRIORITIES, help="Priority level")
|
||||
parser.add_argument("--summary", required=True, help="Concise summary (10+ chars)")
|
||||
parser.add_argument("--motivation", required=True, help="Why this change is needed")
|
||||
parser.add_argument("--author", default=None, help="Author identifier (defaults to config default_author)")
|
||||
parser.add_argument("--session-id", default=None, help="Session identifier (default: auto)")
|
||||
parser.add_argument("--platform", default="claude_code", choices=("claude_code", "claude_web", "api", "other"))
|
||||
parser.add_argument("--model", default="unknown", help="AI model identifier")
|
||||
parser.add_argument("--json", action="store_true", help="Output as JSON")
|
||||
args = parser.parse_args()
|
||||
|
||||
root = Path(args.root).resolve()
|
||||
tc_dir = root / "docs" / "TC"
|
||||
config_path = tc_dir / "tc_config.json"
|
||||
registry_path = tc_dir / "tc_registry.json"
|
||||
|
||||
if not config_path.exists() or not registry_path.exists():
|
||||
msg = f"TC tracking not initialized at {tc_dir}. Run tc_init.py first."
|
||||
print(json.dumps({"status": "error", "error": msg}) if args.json else f"ERROR: {msg}")
|
||||
return 2
|
||||
|
||||
try:
|
||||
config = json.loads(config_path.read_text(encoding="utf-8"))
|
||||
registry = json.loads(registry_path.read_text(encoding="utf-8"))
|
||||
except (OSError, json.JSONDecodeError) as e:
|
||||
msg = f"Failed to read config/registry: {e}"
|
||||
print(json.dumps({"status": "error", "error": msg}) if args.json else f"ERROR: {msg}")
|
||||
return 2
|
||||
|
||||
project_name = config.get("project_name", "Unknown Project")
|
||||
author = args.author or config.get("default_author", "Claude")
|
||||
session_id = args.session_id or f"session-{int(datetime.now().timestamp())}-{os.getpid()}"
|
||||
|
||||
if len(args.title) < 5 or len(args.title) > 120:
|
||||
msg = "Title must be 5-120 characters."
|
||||
print(json.dumps({"status": "error", "error": msg}) if args.json else f"ERROR: {msg}")
|
||||
return 2
|
||||
if len(args.summary) < 10:
|
||||
msg = "Summary must be at least 10 characters."
|
||||
print(json.dumps({"status": "error", "error": msg}) if args.json else f"ERROR: {msg}")
|
||||
return 2
|
||||
|
||||
name_slug = slugify(args.name)
|
||||
if not name_slug:
|
||||
msg = "Invalid name slug."
|
||||
print(json.dumps({"status": "error", "error": msg}) if args.json else f"ERROR: {msg}")
|
||||
return 2
|
||||
|
||||
next_num = registry.get("next_tc_number", 1)
|
||||
today = datetime.now()
|
||||
tc_id = f"TC-{next_num:03d}-{date_slug(today)}-{name_slug}"
|
||||
|
||||
record_dir = tc_dir / "records" / tc_id
|
||||
if record_dir.exists():
|
||||
msg = f"Record directory already exists: {record_dir}"
|
||||
print(json.dumps({"status": "error", "error": msg}) if args.json else f"ERROR: {msg}")
|
||||
return 2
|
||||
|
||||
record = build_record(
|
||||
tc_id=tc_id,
|
||||
title=args.title,
|
||||
scope=args.scope,
|
||||
priority=args.priority,
|
||||
summary=args.summary,
|
||||
motivation=args.motivation,
|
||||
project_name=project_name,
|
||||
author=author,
|
||||
session_id=session_id,
|
||||
platform=args.platform,
|
||||
model=args.model,
|
||||
)
|
||||
|
||||
try:
|
||||
record_dir.mkdir(parents=True, exist_ok=False)
|
||||
(tc_dir / "evidence" / tc_id).mkdir(parents=True, exist_ok=True)
|
||||
write_json_atomic(record_dir / "tc_record.json", record)
|
||||
except OSError as e:
|
||||
msg = f"Failed to write record: {e}"
|
||||
print(json.dumps({"status": "error", "error": msg}) if args.json else f"ERROR: {msg}")
|
||||
return 2
|
||||
|
||||
registry_entry = {
|
||||
"tc_id": tc_id,
|
||||
"title": args.title,
|
||||
"status": "planned",
|
||||
"scope": args.scope,
|
||||
"priority": args.priority,
|
||||
"created": record["created"],
|
||||
"updated": record["updated"],
|
||||
"path": f"records/{tc_id}/tc_record.json",
|
||||
}
|
||||
registry["records"].append(registry_entry)
|
||||
registry["next_tc_number"] = next_num + 1
|
||||
registry["updated"] = now_iso()
|
||||
registry["statistics"] = compute_stats(registry["records"])
|
||||
|
||||
try:
|
||||
write_json_atomic(registry_path, registry)
|
||||
except OSError as e:
|
||||
msg = f"Failed to update registry: {e}"
|
||||
print(json.dumps({"status": "error", "error": msg}) if args.json else f"ERROR: {msg}")
|
||||
return 2
|
||||
|
||||
result = {
|
||||
"status": "created",
|
||||
"tc_id": tc_id,
|
||||
"title": args.title,
|
||||
"scope": args.scope,
|
||||
"priority": args.priority,
|
||||
"record_path": str(record_dir / "tc_record.json"),
|
||||
}
|
||||
if args.json:
|
||||
print(json.dumps(result, indent=2))
|
||||
else:
|
||||
print(f"Created {tc_id}")
|
||||
print(f" Title: {args.title}")
|
||||
print(f" Scope: {args.scope}")
|
||||
print(f" Priority: {args.priority}")
|
||||
print(f" Record: {record_dir / 'tc_record.json'}")
|
||||
print()
|
||||
print(f"Next: tc_update.py --root {args.root} --tc-id {tc_id} --set-status in_progress")
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
196
engineering/tc-tracker/scripts/tc_init.py
Normal file
196
engineering/tc-tracker/scripts/tc_init.py
Normal file
@@ -0,0 +1,196 @@
|
||||
#!/usr/bin/env python3
|
||||
"""TC Init — Initialize TC tracking inside a project.
|
||||
|
||||
Creates docs/TC/ with tc_config.json, tc_registry.json, records/, and evidence/.
|
||||
Idempotent: re-running on an already-initialized project reports current stats
|
||||
and exits cleanly.
|
||||
|
||||
Usage:
|
||||
python3 tc_init.py --project "My Project" --root .
|
||||
python3 tc_init.py --project "My Project" --root /path/to/project --json
|
||||
|
||||
Exit codes:
|
||||
0 = initialized OR already initialized
|
||||
1 = warnings (e.g. partial state)
|
||||
2 = bad CLI args / I/O error
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import sys
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
|
||||
VALID_STATUSES = ("planned", "in_progress", "blocked", "implemented", "tested", "deployed")
|
||||
VALID_SCOPES = ("feature", "bugfix", "refactor", "infrastructure", "documentation", "hotfix", "enhancement")
|
||||
VALID_PRIORITIES = ("critical", "high", "medium", "low")
|
||||
|
||||
|
||||
def now_iso() -> str:
|
||||
return datetime.now(timezone.utc).isoformat(timespec="seconds")
|
||||
|
||||
|
||||
def detect_project_name(root: Path) -> str:
|
||||
"""Try CLAUDE.md heading, package.json name, pyproject.toml name, then directory basename."""
|
||||
claude_md = root / "CLAUDE.md"
|
||||
if claude_md.exists():
|
||||
try:
|
||||
for line in claude_md.read_text(encoding="utf-8").splitlines():
|
||||
line = line.strip()
|
||||
if line.startswith("# "):
|
||||
return line[2:].strip()
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
pkg = root / "package.json"
|
||||
if pkg.exists():
|
||||
try:
|
||||
data = json.loads(pkg.read_text(encoding="utf-8"))
|
||||
name = data.get("name")
|
||||
if isinstance(name, str) and name.strip():
|
||||
return name.strip()
|
||||
except (OSError, json.JSONDecodeError):
|
||||
pass
|
||||
|
||||
pyproject = root / "pyproject.toml"
|
||||
if pyproject.exists():
|
||||
try:
|
||||
for line in pyproject.read_text(encoding="utf-8").splitlines():
|
||||
stripped = line.strip()
|
||||
if stripped.startswith("name") and "=" in stripped:
|
||||
value = stripped.split("=", 1)[1].strip().strip('"').strip("'")
|
||||
if value:
|
||||
return value
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
return root.resolve().name
|
||||
|
||||
|
||||
def build_config(project_name: str) -> dict:
|
||||
return {
|
||||
"project_name": project_name,
|
||||
"tc_root": "docs/TC",
|
||||
"created": now_iso(),
|
||||
"auto_track": True,
|
||||
"default_author": "Claude",
|
||||
"categories": list(VALID_SCOPES),
|
||||
}
|
||||
|
||||
|
||||
def build_registry(project_name: str) -> dict:
|
||||
return {
|
||||
"project_name": project_name,
|
||||
"created": now_iso(),
|
||||
"updated": now_iso(),
|
||||
"next_tc_number": 1,
|
||||
"records": [],
|
||||
"statistics": {
|
||||
"total": 0,
|
||||
"by_status": {s: 0 for s in VALID_STATUSES},
|
||||
"by_scope": {s: 0 for s in VALID_SCOPES},
|
||||
"by_priority": {p: 0 for p in VALID_PRIORITIES},
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
def write_json_atomic(path: Path, data: dict) -> None:
|
||||
"""Write JSON to a temp file and rename, to avoid partial writes."""
|
||||
tmp = path.with_suffix(path.suffix + ".tmp")
|
||||
tmp.write_text(json.dumps(data, indent=2) + "\n", encoding="utf-8")
|
||||
tmp.replace(path)
|
||||
|
||||
|
||||
def main() -> int:
|
||||
parser = argparse.ArgumentParser(description="Initialize TC tracking in a project.")
|
||||
parser.add_argument("--root", default=".", help="Project root directory (default: current directory)")
|
||||
parser.add_argument("--project", help="Project name (auto-detected if omitted)")
|
||||
parser.add_argument("--force", action="store_true", help="Re-initialize even if config exists (preserves registry)")
|
||||
parser.add_argument("--json", action="store_true", help="Output as JSON")
|
||||
args = parser.parse_args()
|
||||
|
||||
root = Path(args.root).resolve()
|
||||
if not root.exists() or not root.is_dir():
|
||||
msg = f"Project root does not exist or is not a directory: {root}"
|
||||
print(json.dumps({"status": "error", "error": msg}) if args.json else f"ERROR: {msg}")
|
||||
return 2
|
||||
|
||||
tc_dir = root / "docs" / "TC"
|
||||
config_path = tc_dir / "tc_config.json"
|
||||
registry_path = tc_dir / "tc_registry.json"
|
||||
|
||||
if config_path.exists() and not args.force:
|
||||
try:
|
||||
cfg = json.loads(config_path.read_text(encoding="utf-8"))
|
||||
except (OSError, json.JSONDecodeError) as e:
|
||||
msg = f"Existing tc_config.json is unreadable: {e}"
|
||||
print(json.dumps({"status": "error", "error": msg}) if args.json else f"ERROR: {msg}")
|
||||
return 2
|
||||
|
||||
stats = {}
|
||||
if registry_path.exists():
|
||||
try:
|
||||
reg = json.loads(registry_path.read_text(encoding="utf-8"))
|
||||
stats = reg.get("statistics", {})
|
||||
except (OSError, json.JSONDecodeError):
|
||||
stats = {}
|
||||
|
||||
result = {
|
||||
"status": "already_initialized",
|
||||
"project_name": cfg.get("project_name"),
|
||||
"tc_root": str(tc_dir),
|
||||
"statistics": stats,
|
||||
}
|
||||
if args.json:
|
||||
print(json.dumps(result, indent=2))
|
||||
else:
|
||||
print(f"TC tracking already initialized for project '{cfg.get('project_name')}'.")
|
||||
print(f" TC root: {tc_dir}")
|
||||
if stats:
|
||||
print(f" Total TCs: {stats.get('total', 0)}")
|
||||
return 0
|
||||
|
||||
project_name = args.project or detect_project_name(root)
|
||||
|
||||
try:
|
||||
tc_dir.mkdir(parents=True, exist_ok=True)
|
||||
(tc_dir / "records").mkdir(exist_ok=True)
|
||||
(tc_dir / "evidence").mkdir(exist_ok=True)
|
||||
write_json_atomic(config_path, build_config(project_name))
|
||||
if not registry_path.exists() or args.force:
|
||||
write_json_atomic(registry_path, build_registry(project_name))
|
||||
except OSError as e:
|
||||
msg = f"Failed to create TC directories or files: {e}"
|
||||
print(json.dumps({"status": "error", "error": msg}) if args.json else f"ERROR: {msg}")
|
||||
return 2
|
||||
|
||||
result = {
|
||||
"status": "initialized",
|
||||
"project_name": project_name,
|
||||
"tc_root": str(tc_dir),
|
||||
"files_created": [
|
||||
str(config_path),
|
||||
str(registry_path),
|
||||
str(tc_dir / "records"),
|
||||
str(tc_dir / "evidence"),
|
||||
],
|
||||
}
|
||||
if args.json:
|
||||
print(json.dumps(result, indent=2))
|
||||
else:
|
||||
print(f"Initialized TC tracking for project '{project_name}'")
|
||||
print(f" TC root: {tc_dir}")
|
||||
print(f" Config: {config_path}")
|
||||
print(f" Registry: {registry_path}")
|
||||
print(f" Records: {tc_dir / 'records'}")
|
||||
print(f" Evidence: {tc_dir / 'evidence'}")
|
||||
print()
|
||||
print("Next: python3 tc_create.py --root . --name <slug> --title <title> --scope <scope> ...")
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
200
engineering/tc-tracker/scripts/tc_status.py
Normal file
200
engineering/tc-tracker/scripts/tc_status.py
Normal file
@@ -0,0 +1,200 @@
|
||||
#!/usr/bin/env python3
|
||||
"""TC Status — Show TC status for one record or the entire registry.
|
||||
|
||||
Usage:
|
||||
# Single TC
|
||||
python3 tc_status.py --root . --tc-id <TC-ID>
|
||||
python3 tc_status.py --root . --tc-id <TC-ID> --json
|
||||
|
||||
# All TCs (registry summary)
|
||||
python3 tc_status.py --root . --all
|
||||
python3 tc_status.py --root . --all --json
|
||||
|
||||
Exit codes:
|
||||
0 = ok
|
||||
1 = warnings (e.g. validation issues found while reading)
|
||||
2 = critical error (file missing, parse error, bad args)
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def find_record_path(tc_dir: Path, tc_id: str) -> Path | None:
|
||||
direct = tc_dir / "records" / tc_id / "tc_record.json"
|
||||
if direct.exists():
|
||||
return direct
|
||||
for entry in (tc_dir / "records").glob("*"):
|
||||
if entry.is_dir() and entry.name.startswith(tc_id):
|
||||
candidate = entry / "tc_record.json"
|
||||
if candidate.exists():
|
||||
return candidate
|
||||
return None
|
||||
|
||||
|
||||
def render_single(record: dict) -> str:
|
||||
lines = []
|
||||
lines.append(f"TC: {record.get('tc_id')}")
|
||||
lines.append(f" Title: {record.get('title')}")
|
||||
lines.append(f" Status: {record.get('status')}")
|
||||
lines.append(f" Priority: {record.get('priority')}")
|
||||
desc = record.get("description", {}) or {}
|
||||
lines.append(f" Scope: {desc.get('scope')}")
|
||||
lines.append(f" Created: {record.get('created')}")
|
||||
lines.append(f" Updated: {record.get('updated')}")
|
||||
lines.append(f" Author: {record.get('created_by')}")
|
||||
lines.append("")
|
||||
|
||||
summary = desc.get("summary") or ""
|
||||
if summary:
|
||||
lines.append(f" Summary: {summary}")
|
||||
motivation = desc.get("motivation") or ""
|
||||
if motivation:
|
||||
lines.append(f" Motivation: {motivation}")
|
||||
lines.append("")
|
||||
|
||||
files = record.get("files_affected", []) or []
|
||||
lines.append(f" Files affected: {len(files)}")
|
||||
for f in files[:10]:
|
||||
lines.append(f" - {f.get('path')} ({f.get('action')})")
|
||||
if len(files) > 10:
|
||||
lines.append(f" ... and {len(files) - 10} more")
|
||||
lines.append("")
|
||||
|
||||
tests = record.get("test_cases", []) or []
|
||||
pass_count = sum(1 for t in tests if t.get("status") == "pass")
|
||||
fail_count = sum(1 for t in tests if t.get("status") == "fail")
|
||||
lines.append(f" Tests: {pass_count} pass / {fail_count} fail / {len(tests)} total")
|
||||
lines.append("")
|
||||
|
||||
revs = record.get("revision_history", []) or []
|
||||
lines.append(f" Revisions: {len(revs)}")
|
||||
if revs:
|
||||
latest = revs[-1]
|
||||
lines.append(f" Latest: {latest.get('revision_id')} {latest.get('timestamp')}")
|
||||
lines.append(f" {latest.get('author')}: {latest.get('summary')}")
|
||||
lines.append("")
|
||||
|
||||
handoff = (record.get("session_context", {}) or {}).get("handoff", {}) or {}
|
||||
if any(handoff.get(k) for k in ("progress_summary", "next_steps", "blockers", "key_context")):
|
||||
lines.append(" Handoff:")
|
||||
if handoff.get("progress_summary"):
|
||||
lines.append(f" Progress: {handoff['progress_summary']}")
|
||||
if handoff.get("next_steps"):
|
||||
lines.append(" Next steps:")
|
||||
for s in handoff["next_steps"]:
|
||||
lines.append(f" - {s}")
|
||||
if handoff.get("blockers"):
|
||||
lines.append(" Blockers:")
|
||||
for b in handoff["blockers"]:
|
||||
lines.append(f" ! {b}")
|
||||
if handoff.get("key_context"):
|
||||
lines.append(" Key context:")
|
||||
for c in handoff["key_context"]:
|
||||
lines.append(f" * {c}")
|
||||
|
||||
appr = record.get("approval", {}) or {}
|
||||
lines.append("")
|
||||
lines.append(f" Approved: {appr.get('approved')} ({appr.get('test_coverage_status')} coverage)")
|
||||
if appr.get("approved"):
|
||||
lines.append(f" By: {appr.get('approved_by')} on {appr.get('approved_date')}")
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
def render_registry(registry: dict) -> str:
|
||||
lines = []
|
||||
lines.append(f"Project: {registry.get('project_name')}")
|
||||
lines.append(f"Updated: {registry.get('updated')}")
|
||||
stats = registry.get("statistics", {}) or {}
|
||||
lines.append(f"Total TCs: {stats.get('total', 0)}")
|
||||
by_status = stats.get("by_status", {}) or {}
|
||||
lines.append("By status:")
|
||||
for status, count in by_status.items():
|
||||
if count:
|
||||
lines.append(f" {status:12} {count}")
|
||||
lines.append("")
|
||||
|
||||
records = registry.get("records", []) or []
|
||||
if records:
|
||||
lines.append(f"{'TC ID':40} {'Status':14} {'Scope':14} {'Priority':10} Title")
|
||||
lines.append("-" * 100)
|
||||
for rec in records:
|
||||
lines.append("{:40} {:14} {:14} {:10} {}".format(
|
||||
rec.get("tc_id", "")[:40],
|
||||
rec.get("status", "")[:14],
|
||||
rec.get("scope", "")[:14],
|
||||
rec.get("priority", "")[:10],
|
||||
rec.get("title", ""),
|
||||
))
|
||||
else:
|
||||
lines.append("No TC records yet. Run tc_create.py to add one.")
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
def main() -> int:
|
||||
parser = argparse.ArgumentParser(description="Show TC status.")
|
||||
parser.add_argument("--root", default=".", help="Project root (default: current directory)")
|
||||
group = parser.add_mutually_exclusive_group(required=True)
|
||||
group.add_argument("--tc-id", help="Show this single TC")
|
||||
group.add_argument("--all", action="store_true", help="Show registry summary for all TCs")
|
||||
parser.add_argument("--json", action="store_true", help="Output as JSON")
|
||||
args = parser.parse_args()
|
||||
|
||||
root = Path(args.root).resolve()
|
||||
tc_dir = root / "docs" / "TC"
|
||||
registry_path = tc_dir / "tc_registry.json"
|
||||
|
||||
if not registry_path.exists():
|
||||
msg = f"TC tracking not initialized at {tc_dir}. Run tc_init.py first."
|
||||
print(json.dumps({"status": "error", "error": msg}) if args.json else f"ERROR: {msg}")
|
||||
return 2
|
||||
|
||||
try:
|
||||
registry = json.loads(registry_path.read_text(encoding="utf-8"))
|
||||
except (OSError, json.JSONDecodeError) as e:
|
||||
msg = f"Failed to read registry: {e}"
|
||||
print(json.dumps({"status": "error", "error": msg}) if args.json else f"ERROR: {msg}")
|
||||
return 2
|
||||
|
||||
if args.all:
|
||||
if args.json:
|
||||
print(json.dumps({
|
||||
"status": "ok",
|
||||
"project_name": registry.get("project_name"),
|
||||
"updated": registry.get("updated"),
|
||||
"statistics": registry.get("statistics", {}),
|
||||
"records": registry.get("records", []),
|
||||
}, indent=2))
|
||||
else:
|
||||
print(render_registry(registry))
|
||||
return 0
|
||||
|
||||
record_path = find_record_path(tc_dir, args.tc_id)
|
||||
if record_path is None:
|
||||
msg = f"TC not found: {args.tc_id}"
|
||||
print(json.dumps({"status": "error", "error": msg}) if args.json else f"ERROR: {msg}")
|
||||
return 2
|
||||
|
||||
try:
|
||||
record = json.loads(record_path.read_text(encoding="utf-8"))
|
||||
except (OSError, json.JSONDecodeError) as e:
|
||||
msg = f"Failed to read record: {e}"
|
||||
print(json.dumps({"status": "error", "error": msg}) if args.json else f"ERROR: {msg}")
|
||||
return 2
|
||||
|
||||
if args.json:
|
||||
print(json.dumps({"status": "ok", "record": record}, indent=2))
|
||||
else:
|
||||
print(render_single(record))
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
361
engineering/tc-tracker/scripts/tc_update.py
Normal file
361
engineering/tc-tracker/scripts/tc_update.py
Normal file
@@ -0,0 +1,361 @@
|
||||
#!/usr/bin/env python3
|
||||
"""TC Update — Update an existing TC record.
|
||||
|
||||
Each invocation appends a sequential R<n> revision entry, refreshes the
|
||||
`updated` timestamp, validates the resulting record, and writes atomically.
|
||||
|
||||
Usage:
|
||||
# Status transition (validated against state machine)
|
||||
python3 tc_update.py --root . --tc-id <TC-ID> \\
|
||||
--set-status in_progress --reason "Starting implementation"
|
||||
|
||||
# Add files
|
||||
python3 tc_update.py --root . --tc-id <TC-ID> \\
|
||||
--add-file src/auth.py:created \\
|
||||
--add-file src/middleware.py:modified
|
||||
|
||||
# Add a test case
|
||||
python3 tc_update.py --root . --tc-id <TC-ID> \\
|
||||
--add-test "Login returns JWT" \\
|
||||
--test-procedure "POST /login with valid creds" \\
|
||||
--test-expected "200 + token in body"
|
||||
|
||||
# Append handoff data
|
||||
python3 tc_update.py --root . --tc-id <TC-ID> \\
|
||||
--handoff-progress "JWT middleware wired up" \\
|
||||
--handoff-next "Write integration tests" \\
|
||||
--handoff-next "Update README" \\
|
||||
--handoff-blocker "Waiting on test fixtures"
|
||||
|
||||
# Append a freeform note
|
||||
python3 tc_update.py --root . --tc-id <TC-ID> --note "Decision: use HS256"
|
||||
|
||||
Exit codes:
|
||||
0 = updated
|
||||
1 = warnings (e.g. validation produced errors but write skipped)
|
||||
2 = critical error (file missing, invalid transition, parse error)
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import re
|
||||
import sys
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
|
||||
VALID_STATUSES = ("planned", "in_progress", "blocked", "implemented", "tested", "deployed")
|
||||
VALID_TRANSITIONS = {
|
||||
"planned": ["in_progress", "blocked"],
|
||||
"in_progress": ["blocked", "implemented"],
|
||||
"blocked": ["in_progress", "planned"],
|
||||
"implemented": ["tested", "in_progress"],
|
||||
"tested": ["deployed", "in_progress"],
|
||||
"deployed": ["in_progress"],
|
||||
}
|
||||
VALID_FILE_ACTIONS = ("created", "modified", "deleted", "renamed")
|
||||
VALID_TEST_STATUSES = ("pending", "pass", "fail", "skip", "blocked")
|
||||
VALID_SCOPES = ("feature", "bugfix", "refactor", "infrastructure", "documentation", "hotfix", "enhancement")
|
||||
VALID_PRIORITIES = ("critical", "high", "medium", "low")
|
||||
|
||||
|
||||
def now_iso() -> str:
|
||||
return datetime.now(timezone.utc).isoformat(timespec="seconds")
|
||||
|
||||
|
||||
def write_json_atomic(path: Path, data: dict) -> None:
|
||||
tmp = path.with_suffix(path.suffix + ".tmp")
|
||||
tmp.write_text(json.dumps(data, indent=2) + "\n", encoding="utf-8")
|
||||
tmp.replace(path)
|
||||
|
||||
|
||||
def find_record_path(tc_dir: Path, tc_id: str) -> Path | None:
|
||||
direct = tc_dir / "records" / tc_id / "tc_record.json"
|
||||
if direct.exists():
|
||||
return direct
|
||||
for entry in (tc_dir / "records").glob("*"):
|
||||
if entry.is_dir() and entry.name.startswith(tc_id):
|
||||
candidate = entry / "tc_record.json"
|
||||
if candidate.exists():
|
||||
return candidate
|
||||
return None
|
||||
|
||||
|
||||
def validate_transition(current: str, new: str) -> str | None:
|
||||
if current == new:
|
||||
return None
|
||||
allowed = VALID_TRANSITIONS.get(current, [])
|
||||
if new not in allowed:
|
||||
return f"Invalid transition '{current}' -> '{new}'. Allowed: {', '.join(allowed) or 'none'}"
|
||||
return None
|
||||
|
||||
|
||||
def next_revision_id(record: dict) -> str:
|
||||
return f"R{len(record.get('revision_history', [])) + 1}"
|
||||
|
||||
|
||||
def next_test_id(record: dict) -> str:
|
||||
return f"T{len(record.get('test_cases', [])) + 1}"
|
||||
|
||||
|
||||
def compute_stats(records: list) -> dict:
|
||||
stats = {
|
||||
"total": len(records),
|
||||
"by_status": {s: 0 for s in VALID_STATUSES},
|
||||
"by_scope": {s: 0 for s in VALID_SCOPES},
|
||||
"by_priority": {p: 0 for p in VALID_PRIORITIES},
|
||||
}
|
||||
for rec in records:
|
||||
for key, bucket in (("status", "by_status"), ("scope", "by_scope"), ("priority", "by_priority")):
|
||||
v = rec.get(key, "")
|
||||
if v in stats[bucket]:
|
||||
stats[bucket][v] += 1
|
||||
return stats
|
||||
|
||||
|
||||
def parse_file_arg(spec: str) -> tuple[str, str]:
|
||||
"""Parse 'path:action' or just 'path' (default action: modified)."""
|
||||
if ":" in spec:
|
||||
path, action = spec.rsplit(":", 1)
|
||||
action = action.strip()
|
||||
if action not in VALID_FILE_ACTIONS:
|
||||
raise ValueError(f"Invalid file action '{action}'. Must be one of {VALID_FILE_ACTIONS}")
|
||||
return path.strip(), action
|
||||
return spec.strip(), "modified"
|
||||
|
||||
|
||||
def main() -> int:
|
||||
parser = argparse.ArgumentParser(description="Update an existing TC record.")
|
||||
parser.add_argument("--root", default=".", help="Project root (default: current directory)")
|
||||
parser.add_argument("--tc-id", required=True, help="Target TC ID (full or prefix)")
|
||||
parser.add_argument("--author", default=None, help="Author for this revision (defaults to config)")
|
||||
parser.add_argument("--reason", default="", help="Reason for the change (recorded in revision)")
|
||||
|
||||
parser.add_argument("--set-status", choices=VALID_STATUSES, help="Transition status (state machine enforced)")
|
||||
parser.add_argument("--add-file", action="append", default=[], metavar="path[:action]",
|
||||
help="Add a file. Action defaults to 'modified'. Repeatable.")
|
||||
parser.add_argument("--add-test", help="Add a test case with this title")
|
||||
parser.add_argument("--test-procedure", action="append", default=[],
|
||||
help="Procedure step for the test being added. Repeatable.")
|
||||
parser.add_argument("--test-expected", help="Expected result for the test being added")
|
||||
|
||||
parser.add_argument("--handoff-progress", help="Set progress_summary in handoff")
|
||||
parser.add_argument("--handoff-next", action="append", default=[], help="Append to next_steps. Repeatable.")
|
||||
parser.add_argument("--handoff-blocker", action="append", default=[], help="Append to blockers. Repeatable.")
|
||||
parser.add_argument("--handoff-context", action="append", default=[], help="Append to key_context. Repeatable.")
|
||||
|
||||
parser.add_argument("--note", help="Append a freeform note (with timestamp)")
|
||||
parser.add_argument("--tag", action="append", default=[], help="Add a tag. Repeatable.")
|
||||
|
||||
parser.add_argument("--json", action="store_true", help="Output as JSON")
|
||||
args = parser.parse_args()
|
||||
|
||||
root = Path(args.root).resolve()
|
||||
tc_dir = root / "docs" / "TC"
|
||||
config_path = tc_dir / "tc_config.json"
|
||||
registry_path = tc_dir / "tc_registry.json"
|
||||
|
||||
if not config_path.exists() or not registry_path.exists():
|
||||
msg = f"TC tracking not initialized at {tc_dir}. Run tc_init.py first."
|
||||
print(json.dumps({"status": "error", "error": msg}) if args.json else f"ERROR: {msg}")
|
||||
return 2
|
||||
|
||||
record_path = find_record_path(tc_dir, args.tc_id)
|
||||
if record_path is None:
|
||||
msg = f"TC not found: {args.tc_id}"
|
||||
print(json.dumps({"status": "error", "error": msg}) if args.json else f"ERROR: {msg}")
|
||||
return 2
|
||||
|
||||
try:
|
||||
config = json.loads(config_path.read_text(encoding="utf-8"))
|
||||
registry = json.loads(registry_path.read_text(encoding="utf-8"))
|
||||
record = json.loads(record_path.read_text(encoding="utf-8"))
|
||||
except (OSError, json.JSONDecodeError) as e:
|
||||
msg = f"Failed to read JSON: {e}"
|
||||
print(json.dumps({"status": "error", "error": msg}) if args.json else f"ERROR: {msg}")
|
||||
return 2
|
||||
|
||||
author = args.author or config.get("default_author", "Claude")
|
||||
ts = now_iso()
|
||||
|
||||
field_changes = []
|
||||
summary_parts = []
|
||||
|
||||
if args.set_status:
|
||||
current = record.get("status")
|
||||
new = args.set_status
|
||||
err = validate_transition(current, new)
|
||||
if err:
|
||||
print(json.dumps({"status": "error", "error": err}) if args.json else f"ERROR: {err}")
|
||||
return 2
|
||||
if current != new:
|
||||
record["status"] = new
|
||||
field_changes.append({
|
||||
"field": "status", "action": "changed",
|
||||
"old_value": current, "new_value": new, "reason": args.reason or None,
|
||||
})
|
||||
summary_parts.append(f"status: {current} -> {new}")
|
||||
|
||||
for spec in args.add_file:
|
||||
try:
|
||||
path, action = parse_file_arg(spec)
|
||||
except ValueError as e:
|
||||
print(json.dumps({"status": "error", "error": str(e)}) if args.json else f"ERROR: {e}")
|
||||
return 2
|
||||
record.setdefault("files_affected", []).append({
|
||||
"path": path, "action": action, "description": None,
|
||||
"lines_added": None, "lines_removed": None,
|
||||
})
|
||||
field_changes.append({
|
||||
"field": "files_affected", "action": "added",
|
||||
"new_value": {"path": path, "action": action},
|
||||
"reason": args.reason or None,
|
||||
})
|
||||
summary_parts.append(f"+file {path} ({action})")
|
||||
|
||||
if args.add_test:
|
||||
if not args.test_procedure or not args.test_expected:
|
||||
msg = "--add-test requires at least one --test-procedure and --test-expected"
|
||||
print(json.dumps({"status": "error", "error": msg}) if args.json else f"ERROR: {msg}")
|
||||
return 2
|
||||
test_id = next_test_id(record)
|
||||
new_test = {
|
||||
"test_id": test_id,
|
||||
"title": args.add_test,
|
||||
"procedure": list(args.test_procedure),
|
||||
"expected_result": args.test_expected,
|
||||
"actual_result": None,
|
||||
"status": "pending",
|
||||
"evidence": [],
|
||||
"tested_by": None,
|
||||
"tested_date": None,
|
||||
}
|
||||
record.setdefault("test_cases", []).append(new_test)
|
||||
field_changes.append({
|
||||
"field": "test_cases", "action": "added",
|
||||
"new_value": test_id, "reason": args.reason or None,
|
||||
})
|
||||
summary_parts.append(f"+test {test_id}: {args.add_test}")
|
||||
|
||||
handoff = record.setdefault("session_context", {}).setdefault("handoff", {
|
||||
"progress_summary": "", "next_steps": [], "blockers": [],
|
||||
"key_context": [], "files_in_progress": [], "decisions_made": [],
|
||||
})
|
||||
|
||||
if args.handoff_progress is not None:
|
||||
old = handoff.get("progress_summary", "")
|
||||
handoff["progress_summary"] = args.handoff_progress
|
||||
field_changes.append({
|
||||
"field": "session_context.handoff.progress_summary",
|
||||
"action": "changed", "old_value": old, "new_value": args.handoff_progress,
|
||||
"reason": args.reason or None,
|
||||
})
|
||||
summary_parts.append("handoff: updated progress_summary")
|
||||
|
||||
for step in args.handoff_next:
|
||||
handoff.setdefault("next_steps", []).append(step)
|
||||
field_changes.append({
|
||||
"field": "session_context.handoff.next_steps",
|
||||
"action": "added", "new_value": step, "reason": args.reason or None,
|
||||
})
|
||||
summary_parts.append(f"handoff: +next_step '{step}'")
|
||||
|
||||
for blk in args.handoff_blocker:
|
||||
handoff.setdefault("blockers", []).append(blk)
|
||||
field_changes.append({
|
||||
"field": "session_context.handoff.blockers",
|
||||
"action": "added", "new_value": blk, "reason": args.reason or None,
|
||||
})
|
||||
summary_parts.append(f"handoff: +blocker '{blk}'")
|
||||
|
||||
for ctx in args.handoff_context:
|
||||
handoff.setdefault("key_context", []).append(ctx)
|
||||
field_changes.append({
|
||||
"field": "session_context.handoff.key_context",
|
||||
"action": "added", "new_value": ctx, "reason": args.reason or None,
|
||||
})
|
||||
summary_parts.append(f"handoff: +context")
|
||||
|
||||
if args.note:
|
||||
existing = record.get("notes", "") or ""
|
||||
addition = f"[{ts}] {args.note}"
|
||||
record["notes"] = (existing + "\n" + addition).strip() if existing else addition
|
||||
field_changes.append({
|
||||
"field": "notes", "action": "added",
|
||||
"new_value": args.note, "reason": args.reason or None,
|
||||
})
|
||||
summary_parts.append("note appended")
|
||||
|
||||
for tag in args.tag:
|
||||
if tag not in record.setdefault("tags", []):
|
||||
record["tags"].append(tag)
|
||||
field_changes.append({
|
||||
"field": "tags", "action": "added",
|
||||
"new_value": tag, "reason": args.reason or None,
|
||||
})
|
||||
summary_parts.append(f"+tag {tag}")
|
||||
|
||||
if not field_changes:
|
||||
msg = "No changes specified. Use --set-status, --add-file, --add-test, --handoff-*, --note, or --tag."
|
||||
print(json.dumps({"status": "noop", "message": msg}) if args.json else msg)
|
||||
return 0
|
||||
|
||||
revision = {
|
||||
"revision_id": next_revision_id(record),
|
||||
"timestamp": ts,
|
||||
"author": author,
|
||||
"summary": "; ".join(summary_parts) if summary_parts else "TC updated",
|
||||
"field_changes": field_changes,
|
||||
}
|
||||
record.setdefault("revision_history", []).append(revision)
|
||||
|
||||
record["updated"] = ts
|
||||
meta = record.setdefault("metadata", {})
|
||||
meta["last_modified"] = ts
|
||||
meta["last_modified_by"] = author
|
||||
|
||||
cs = record.setdefault("session_context", {}).setdefault("current_session", {})
|
||||
cs["last_active"] = ts
|
||||
|
||||
try:
|
||||
write_json_atomic(record_path, record)
|
||||
except OSError as e:
|
||||
msg = f"Failed to write record: {e}"
|
||||
print(json.dumps({"status": "error", "error": msg}) if args.json else f"ERROR: {msg}")
|
||||
return 2
|
||||
|
||||
for entry in registry.get("records", []):
|
||||
if entry.get("tc_id") == record["tc_id"]:
|
||||
entry["status"] = record["status"]
|
||||
entry["updated"] = ts
|
||||
break
|
||||
registry["updated"] = ts
|
||||
registry["statistics"] = compute_stats(registry.get("records", []))
|
||||
|
||||
try:
|
||||
write_json_atomic(registry_path, registry)
|
||||
except OSError as e:
|
||||
msg = f"Failed to update registry: {e}"
|
||||
print(json.dumps({"status": "error", "error": msg}) if args.json else f"ERROR: {msg}")
|
||||
return 2
|
||||
|
||||
result = {
|
||||
"status": "updated",
|
||||
"tc_id": record["tc_id"],
|
||||
"revision": revision["revision_id"],
|
||||
"summary": revision["summary"],
|
||||
"current_status": record["status"],
|
||||
}
|
||||
if args.json:
|
||||
print(json.dumps(result, indent=2))
|
||||
else:
|
||||
print(f"Updated {record['tc_id']} ({revision['revision_id']})")
|
||||
print(f" {revision['summary']}")
|
||||
print(f" Status: {record['status']}")
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
347
engineering/tc-tracker/scripts/tc_validator.py
Normal file
347
engineering/tc-tracker/scripts/tc_validator.py
Normal file
@@ -0,0 +1,347 @@
|
||||
#!/usr/bin/env python3
|
||||
"""TC Validator — Validate a TC record or registry against the schema and state machine.
|
||||
|
||||
Enforces:
|
||||
* Schema shape (required fields, types, enum values)
|
||||
* State machine transitions (planned -> in_progress -> implemented -> tested -> deployed)
|
||||
* Sequential R<n> revision IDs and T<n> test IDs
|
||||
* TC ID format (TC-NNN-MM-DD-YY-slug)
|
||||
* Sub-TC ID format (TC-NNN.A or TC-NNN.A.N)
|
||||
* Approval consistency (approved=true requires approved_by + approved_date)
|
||||
|
||||
Usage:
|
||||
python3 tc_validator.py --record path/to/tc_record.json
|
||||
python3 tc_validator.py --registry path/to/tc_registry.json
|
||||
python3 tc_validator.py --record path/to/tc_record.json --json
|
||||
|
||||
Exit codes:
|
||||
0 = valid
|
||||
1 = validation errors
|
||||
2 = file not found / JSON parse error / bad CLI args
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import re
|
||||
import sys
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
|
||||
VALID_STATUSES = ("planned", "in_progress", "blocked", "implemented", "tested", "deployed")
|
||||
|
||||
VALID_TRANSITIONS = {
|
||||
"planned": ["in_progress", "blocked"],
|
||||
"in_progress": ["blocked", "implemented"],
|
||||
"blocked": ["in_progress", "planned"],
|
||||
"implemented": ["tested", "in_progress"],
|
||||
"tested": ["deployed", "in_progress"],
|
||||
"deployed": ["in_progress"],
|
||||
}
|
||||
|
||||
VALID_SCOPES = ("feature", "bugfix", "refactor", "infrastructure", "documentation", "hotfix", "enhancement")
|
||||
VALID_PRIORITIES = ("critical", "high", "medium", "low")
|
||||
VALID_FILE_ACTIONS = ("created", "modified", "deleted", "renamed")
|
||||
VALID_TEST_STATUSES = ("pending", "pass", "fail", "skip", "blocked")
|
||||
VALID_EVIDENCE_TYPES = ("log_snippet", "screenshot", "file_reference", "command_output")
|
||||
VALID_FIELD_CHANGE_ACTIONS = ("set", "changed", "added", "removed")
|
||||
VALID_PLATFORMS = ("claude_code", "claude_web", "api", "other")
|
||||
VALID_COVERAGE = ("none", "partial", "full")
|
||||
VALID_FILE_IN_PROGRESS_STATES = ("editing", "needs_review", "partially_done", "ready")
|
||||
|
||||
TC_ID_PATTERN = re.compile(r"^TC-\d{3}-\d{2}-\d{2}-\d{2}-[a-z0-9]+(-[a-z0-9]+)*$")
|
||||
SUB_TC_PATTERN = re.compile(r"^TC-\d{3}\.[A-Z](\.\d+)?$")
|
||||
REVISION_ID_PATTERN = re.compile(r"^R(\d+)$")
|
||||
TEST_ID_PATTERN = re.compile(r"^T(\d+)$")
|
||||
|
||||
|
||||
def _enum(value, valid, name):
|
||||
if value not in valid:
|
||||
return [f"Field '{name}' has invalid value '{value}'. Must be one of: {', '.join(str(v) for v in valid)}"]
|
||||
return []
|
||||
|
||||
|
||||
def _string(value, name, min_length=0, max_length=None):
|
||||
errors = []
|
||||
if not isinstance(value, str):
|
||||
return [f"Field '{name}' must be a string, got {type(value).__name__}"]
|
||||
if len(value) < min_length:
|
||||
errors.append(f"Field '{name}' must be at least {min_length} characters, got {len(value)}")
|
||||
if max_length is not None and len(value) > max_length:
|
||||
errors.append(f"Field '{name}' must be at most {max_length} characters, got {len(value)}")
|
||||
return errors
|
||||
|
||||
|
||||
def _iso(value, name):
|
||||
if value is None:
|
||||
return []
|
||||
if not isinstance(value, str):
|
||||
return [f"Field '{name}' must be an ISO 8601 datetime string"]
|
||||
try:
|
||||
datetime.fromisoformat(value)
|
||||
except ValueError:
|
||||
return [f"Field '{name}' is not a valid ISO 8601 datetime: '{value}'"]
|
||||
return []
|
||||
|
||||
|
||||
def _required(record, fields, prefix=""):
|
||||
errors = []
|
||||
for f in fields:
|
||||
if f not in record:
|
||||
path = f"{prefix}.{f}" if prefix else f
|
||||
errors.append(f"Missing required field: '{path}'")
|
||||
return errors
|
||||
|
||||
|
||||
def validate_tc_id(tc_id):
|
||||
"""Validate a TC identifier."""
|
||||
if not isinstance(tc_id, str):
|
||||
return [f"tc_id must be a string, got {type(tc_id).__name__}"]
|
||||
if not TC_ID_PATTERN.match(tc_id):
|
||||
return [f"tc_id '{tc_id}' does not match pattern TC-NNN-MM-DD-YY-slug"]
|
||||
return []
|
||||
|
||||
|
||||
def validate_state_transition(current, new):
|
||||
"""Validate a state machine transition. Same-status is a no-op."""
|
||||
errors = []
|
||||
if current not in VALID_STATUSES:
|
||||
errors.append(f"Current status '{current}' is invalid")
|
||||
if new not in VALID_STATUSES:
|
||||
errors.append(f"New status '{new}' is invalid")
|
||||
if errors:
|
||||
return errors
|
||||
if current == new:
|
||||
return []
|
||||
allowed = VALID_TRANSITIONS.get(current, [])
|
||||
if new not in allowed:
|
||||
return [f"Invalid transition '{current}' -> '{new}'. Allowed from '{current}': {', '.join(allowed) or 'none'}"]
|
||||
return []
|
||||
|
||||
|
||||
def validate_tc_record(record):
|
||||
"""Validate a TC record dict against the schema."""
|
||||
errors = []
|
||||
if not isinstance(record, dict):
|
||||
return [f"TC record must be a JSON object, got {type(record).__name__}"]
|
||||
|
||||
top_required = [
|
||||
"tc_id", "title", "status", "priority", "created", "updated",
|
||||
"created_by", "project", "description", "files_affected",
|
||||
"revision_history", "test_cases", "approval", "session_context",
|
||||
"tags", "related_tcs", "notes", "metadata",
|
||||
]
|
||||
errors.extend(_required(record, top_required))
|
||||
|
||||
if "tc_id" in record:
|
||||
errors.extend(validate_tc_id(record["tc_id"]))
|
||||
if "title" in record:
|
||||
errors.extend(_string(record["title"], "title", 5, 120))
|
||||
if "status" in record:
|
||||
errors.extend(_enum(record["status"], VALID_STATUSES, "status"))
|
||||
if "priority" in record:
|
||||
errors.extend(_enum(record["priority"], VALID_PRIORITIES, "priority"))
|
||||
for ts in ("created", "updated"):
|
||||
if ts in record:
|
||||
errors.extend(_iso(record[ts], ts))
|
||||
if "created_by" in record:
|
||||
errors.extend(_string(record["created_by"], "created_by", 1))
|
||||
if "project" in record:
|
||||
errors.extend(_string(record["project"], "project", 1))
|
||||
|
||||
desc = record.get("description")
|
||||
if isinstance(desc, dict):
|
||||
errors.extend(_required(desc, ["summary", "motivation", "scope"], "description"))
|
||||
if "summary" in desc:
|
||||
errors.extend(_string(desc["summary"], "description.summary", 10))
|
||||
if "motivation" in desc:
|
||||
errors.extend(_string(desc["motivation"], "description.motivation", 1))
|
||||
if "scope" in desc:
|
||||
errors.extend(_enum(desc["scope"], VALID_SCOPES, "description.scope"))
|
||||
elif "description" in record:
|
||||
errors.append("Field 'description' must be an object")
|
||||
|
||||
files = record.get("files_affected")
|
||||
if isinstance(files, list):
|
||||
for i, f in enumerate(files):
|
||||
prefix = f"files_affected[{i}]"
|
||||
if not isinstance(f, dict):
|
||||
errors.append(f"{prefix} must be an object")
|
||||
continue
|
||||
errors.extend(_required(f, ["path", "action"], prefix))
|
||||
if "action" in f:
|
||||
errors.extend(_enum(f["action"], VALID_FILE_ACTIONS, f"{prefix}.action"))
|
||||
elif "files_affected" in record:
|
||||
errors.append("Field 'files_affected' must be an array")
|
||||
|
||||
revs = record.get("revision_history")
|
||||
if isinstance(revs, list):
|
||||
if len(revs) < 1:
|
||||
errors.append("revision_history must have at least 1 entry")
|
||||
for i, rev in enumerate(revs):
|
||||
prefix = f"revision_history[{i}]"
|
||||
if not isinstance(rev, dict):
|
||||
errors.append(f"{prefix} must be an object")
|
||||
continue
|
||||
errors.extend(_required(rev, ["revision_id", "timestamp", "author", "summary"], prefix))
|
||||
rid = rev.get("revision_id")
|
||||
if isinstance(rid, str):
|
||||
m = REVISION_ID_PATTERN.match(rid)
|
||||
if not m:
|
||||
errors.append(f"{prefix}.revision_id '{rid}' must match R<n>")
|
||||
elif int(m.group(1)) != i + 1:
|
||||
errors.append(f"{prefix}.revision_id is '{rid}' but expected 'R{i + 1}' (must be sequential)")
|
||||
if "timestamp" in rev:
|
||||
errors.extend(_iso(rev["timestamp"], f"{prefix}.timestamp"))
|
||||
elif "revision_history" in record:
|
||||
errors.append("Field 'revision_history' must be an array")
|
||||
|
||||
tests = record.get("test_cases")
|
||||
if isinstance(tests, list):
|
||||
for i, tc in enumerate(tests):
|
||||
prefix = f"test_cases[{i}]"
|
||||
if not isinstance(tc, dict):
|
||||
errors.append(f"{prefix} must be an object")
|
||||
continue
|
||||
errors.extend(_required(tc, ["test_id", "title", "procedure", "expected_result", "status"], prefix))
|
||||
tid = tc.get("test_id")
|
||||
if isinstance(tid, str):
|
||||
m = TEST_ID_PATTERN.match(tid)
|
||||
if not m:
|
||||
errors.append(f"{prefix}.test_id '{tid}' must match T<n>")
|
||||
elif int(m.group(1)) != i + 1:
|
||||
errors.append(f"{prefix}.test_id is '{tid}' but expected 'T{i + 1}' (must be sequential)")
|
||||
if "status" in tc:
|
||||
errors.extend(_enum(tc["status"], VALID_TEST_STATUSES, f"{prefix}.status"))
|
||||
|
||||
appr = record.get("approval")
|
||||
if isinstance(appr, dict):
|
||||
errors.extend(_required(appr, ["approved", "test_coverage_status"], "approval"))
|
||||
if appr.get("approved") is True:
|
||||
if not appr.get("approved_by"):
|
||||
errors.append("approval.approved_by is required when approval.approved is true")
|
||||
if not appr.get("approved_date"):
|
||||
errors.append("approval.approved_date is required when approval.approved is true")
|
||||
if "test_coverage_status" in appr:
|
||||
errors.extend(_enum(appr["test_coverage_status"], VALID_COVERAGE, "approval.test_coverage_status"))
|
||||
elif "approval" in record:
|
||||
errors.append("Field 'approval' must be an object")
|
||||
|
||||
ctx = record.get("session_context")
|
||||
if isinstance(ctx, dict):
|
||||
errors.extend(_required(ctx, ["current_session"], "session_context"))
|
||||
cs = ctx.get("current_session")
|
||||
if isinstance(cs, dict):
|
||||
errors.extend(_required(cs, ["session_id", "platform", "model", "started"], "session_context.current_session"))
|
||||
if "platform" in cs:
|
||||
errors.extend(_enum(cs["platform"], VALID_PLATFORMS, "session_context.current_session.platform"))
|
||||
if "started" in cs:
|
||||
errors.extend(_iso(cs["started"], "session_context.current_session.started"))
|
||||
|
||||
meta = record.get("metadata")
|
||||
if isinstance(meta, dict):
|
||||
errors.extend(_required(meta, ["project", "created_by", "last_modified_by", "last_modified"], "metadata"))
|
||||
if "last_modified" in meta:
|
||||
errors.extend(_iso(meta["last_modified"], "metadata.last_modified"))
|
||||
|
||||
return errors
|
||||
|
||||
|
||||
def validate_registry(registry):
|
||||
"""Validate a TC registry dict."""
|
||||
errors = []
|
||||
if not isinstance(registry, dict):
|
||||
return [f"Registry must be an object, got {type(registry).__name__}"]
|
||||
errors.extend(_required(registry, ["project_name", "created", "updated", "next_tc_number", "records", "statistics"]))
|
||||
if "next_tc_number" in registry:
|
||||
v = registry["next_tc_number"]
|
||||
if not isinstance(v, int) or v < 1:
|
||||
errors.append(f"next_tc_number must be a positive integer, got {v}")
|
||||
if isinstance(registry.get("records"), list):
|
||||
for i, rec in enumerate(registry["records"]):
|
||||
prefix = f"records[{i}]"
|
||||
if not isinstance(rec, dict):
|
||||
errors.append(f"{prefix} must be an object")
|
||||
continue
|
||||
errors.extend(_required(rec, ["tc_id", "title", "status", "scope", "priority", "created", "updated", "path"], prefix))
|
||||
if "status" in rec:
|
||||
errors.extend(_enum(rec["status"], VALID_STATUSES, f"{prefix}.status"))
|
||||
if "scope" in rec:
|
||||
errors.extend(_enum(rec["scope"], VALID_SCOPES, f"{prefix}.scope"))
|
||||
if "priority" in rec:
|
||||
errors.extend(_enum(rec["priority"], VALID_PRIORITIES, f"{prefix}.priority"))
|
||||
return errors
|
||||
|
||||
|
||||
def slugify(text):
|
||||
"""Convert text to a kebab-case slug."""
|
||||
text = text.lower().strip()
|
||||
text = re.sub(r"[^a-z0-9\s-]", "", text)
|
||||
text = re.sub(r"[\s_]+", "-", text)
|
||||
text = re.sub(r"-+", "-", text)
|
||||
return text.strip("-")
|
||||
|
||||
|
||||
def compute_registry_statistics(records):
|
||||
"""Recompute registry statistics from the records array."""
|
||||
stats = {
|
||||
"total": len(records),
|
||||
"by_status": {s: 0 for s in VALID_STATUSES},
|
||||
"by_scope": {s: 0 for s in VALID_SCOPES},
|
||||
"by_priority": {p: 0 for p in VALID_PRIORITIES},
|
||||
}
|
||||
for rec in records:
|
||||
for key, bucket in (("status", "by_status"), ("scope", "by_scope"), ("priority", "by_priority")):
|
||||
v = rec.get(key, "")
|
||||
if v in stats[bucket]:
|
||||
stats[bucket][v] += 1
|
||||
return stats
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(description="Validate a TC record or registry.")
|
||||
group = parser.add_mutually_exclusive_group(required=True)
|
||||
group.add_argument("--record", help="Path to tc_record.json")
|
||||
group.add_argument("--registry", help="Path to tc_registry.json")
|
||||
parser.add_argument("--json", action="store_true", help="Output results as JSON")
|
||||
args = parser.parse_args()
|
||||
|
||||
target = args.record or args.registry
|
||||
path = Path(target)
|
||||
if not path.exists():
|
||||
msg = f"File not found: {path}"
|
||||
print(json.dumps({"status": "error", "error": msg}) if args.json else f"ERROR: {msg}")
|
||||
return 2
|
||||
|
||||
try:
|
||||
data = json.loads(path.read_text(encoding="utf-8"))
|
||||
except json.JSONDecodeError as e:
|
||||
msg = f"Invalid JSON in {path}: {e}"
|
||||
print(json.dumps({"status": "error", "error": msg}) if args.json else f"ERROR: {msg}")
|
||||
return 2
|
||||
|
||||
errors = validate_registry(data) if args.registry else validate_tc_record(data)
|
||||
|
||||
if args.json:
|
||||
result = {
|
||||
"status": "valid" if not errors else "invalid",
|
||||
"file": str(path),
|
||||
"kind": "registry" if args.registry else "record",
|
||||
"error_count": len(errors),
|
||||
"errors": errors,
|
||||
}
|
||||
print(json.dumps(result, indent=2))
|
||||
else:
|
||||
if errors:
|
||||
print(f"VALIDATION ERRORS ({len(errors)}):")
|
||||
for i, err in enumerate(errors, 1):
|
||||
print(f" {i}. {err}")
|
||||
else:
|
||||
print("VALID")
|
||||
|
||||
return 1 if errors else 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
Reference in New Issue
Block a user