feat: enhancement workflow preset system with multi-target CLI
- Add YAML-based enhancement workflow presets shipped inside the package (default, minimal, security-focus, architecture-comprehensive, api-documentation) - Add `skill-seekers workflows` subcommand: list, show, copy, add, remove, validate - copy/add/remove all accept multiple names/files in one invocation with partial-failure behaviour - `add --name` override restricted to single-file operations - Add 5 MCP tools: list_workflows, get_workflow, create_workflow, update_workflow, delete_workflow - Fix: create command _add_common_args() now correctly forwards each --enhance-workflow as a separate flag instead of passing the whole list as a single argument - Update README: reposition as "data layer for AI systems" with AI Skills front and centre - Update CHANGELOG, QUICK_REFERENCE, CLAUDE.md with workflow preset details - 1,880+ tests passing Co-Authored-By: Claude Sonnet 4.5 <noreply@anthropic.com>
This commit is contained in:
@@ -376,7 +376,8 @@ class CreateCommand:
|
||||
|
||||
# Enhancement Workflow arguments (NEW - Phase 2)
|
||||
if getattr(self.args, "enhance_workflow", None):
|
||||
argv.extend(["--enhance-workflow", self.args.enhance_workflow])
|
||||
for wf in self.args.enhance_workflow:
|
||||
argv.extend(["--enhance-workflow", wf])
|
||||
if getattr(self.args, "enhance_stage", None):
|
||||
for stage in self.args.enhance_stage:
|
||||
argv.extend(["--enhance-stage", stage])
|
||||
|
||||
@@ -27,6 +27,7 @@ import logging
|
||||
import os
|
||||
from dataclasses import dataclass, field
|
||||
from datetime import datetime
|
||||
from importlib.resources import files as importlib_files
|
||||
from pathlib import Path
|
||||
from typing import Any, Literal
|
||||
|
||||
@@ -99,25 +100,63 @@ class WorkflowEngine:
|
||||
self.history: list[dict[str, Any]] = []
|
||||
self.enhancer = None # Lazy load UnifiedEnhancer
|
||||
|
||||
def _load_workflow(self, workflow_path: str | Path) -> EnhancementWorkflow:
|
||||
"""Load workflow from YAML file."""
|
||||
workflow_path = Path(workflow_path)
|
||||
def _load_workflow(self, workflow_ref: str | Path) -> EnhancementWorkflow:
|
||||
"""Load workflow from YAML file using 3-level search order.
|
||||
|
||||
# Resolve path (support both absolute and relative)
|
||||
if not workflow_path.is_absolute():
|
||||
# Try relative to CWD first
|
||||
if not workflow_path.exists():
|
||||
# Try in config directory
|
||||
config_dir = Path.home() / ".config" / "skill-seekers" / "workflows"
|
||||
workflow_path = config_dir / workflow_path
|
||||
Search order:
|
||||
1. Raw file path (absolute or relative) — existing behaviour
|
||||
2. ~/.config/skill-seekers/workflows/{name}.yaml — user overrides/custom
|
||||
3. skill_seekers/workflows/{name}.yaml via importlib.resources — bundled defaults
|
||||
"""
|
||||
workflow_ref = Path(workflow_ref)
|
||||
|
||||
if not workflow_path.exists():
|
||||
raise FileNotFoundError(f"Workflow not found: {workflow_path}")
|
||||
# Add .yaml extension for bare names
|
||||
name_str = str(workflow_ref)
|
||||
if not name_str.endswith((".yaml", ".yml")):
|
||||
yaml_ref = Path(name_str + ".yaml")
|
||||
else:
|
||||
yaml_ref = workflow_ref
|
||||
|
||||
logger.info(f"📋 Loading workflow: {workflow_path}")
|
||||
resolved_path: Path | None = None
|
||||
yaml_text: str | None = None
|
||||
|
||||
with open(workflow_path, encoding="utf-8") as f:
|
||||
data = yaml.safe_load(f)
|
||||
# Level 1: absolute path or relative-to-CWD
|
||||
if yaml_ref.is_absolute():
|
||||
if yaml_ref.exists():
|
||||
resolved_path = yaml_ref
|
||||
else:
|
||||
cwd_path = Path.cwd() / yaml_ref
|
||||
if cwd_path.exists():
|
||||
resolved_path = cwd_path
|
||||
elif yaml_ref.exists():
|
||||
resolved_path = yaml_ref
|
||||
|
||||
# Level 2: user config directory
|
||||
if resolved_path is None:
|
||||
user_dir = Path.home() / ".config" / "skill-seekers" / "workflows"
|
||||
user_path = user_dir / yaml_ref.name
|
||||
if user_path.exists():
|
||||
resolved_path = user_path
|
||||
|
||||
# Level 3: bundled package workflows via importlib.resources
|
||||
if resolved_path is None:
|
||||
bare_name = yaml_ref.name # e.g. "security-focus.yaml"
|
||||
try:
|
||||
pkg_ref = importlib_files("skill_seekers.workflows").joinpath(bare_name)
|
||||
yaml_text = pkg_ref.read_text(encoding="utf-8")
|
||||
logger.info(f"📋 Loading bundled workflow: {bare_name}")
|
||||
except (FileNotFoundError, TypeError, ModuleNotFoundError):
|
||||
raise FileNotFoundError(
|
||||
f"Workflow '{yaml_ref.stem}' not found. "
|
||||
"Use 'skill-seekers workflows list' to see available workflows."
|
||||
)
|
||||
|
||||
if resolved_path is not None:
|
||||
logger.info(f"📋 Loading workflow: {resolved_path}")
|
||||
with open(resolved_path, encoding="utf-8") as f:
|
||||
data = yaml.safe_load(f)
|
||||
else:
|
||||
data = yaml.safe_load(yaml_text)
|
||||
|
||||
# Handle inheritance (extends)
|
||||
if "extends" in data and data["extends"]:
|
||||
@@ -430,103 +469,27 @@ class WorkflowEngine:
|
||||
logger.info(f"💾 Saved workflow history: {output_path}")
|
||||
|
||||
|
||||
def create_default_workflows():
|
||||
"""Create default workflow templates in user config directory."""
|
||||
config_dir = Path.home() / ".config" / "skill-seekers" / "workflows"
|
||||
config_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Default workflow
|
||||
default_workflow = {
|
||||
"name": "Default Enhancement",
|
||||
"description": "Standard AI enhancement with all features",
|
||||
"version": "1.0",
|
||||
"applies_to": ["codebase_analysis", "doc_scraping", "github_analysis"],
|
||||
"stages": [
|
||||
{
|
||||
"name": "base_analysis",
|
||||
"type": "builtin",
|
||||
"target": "patterns",
|
||||
"enabled": True,
|
||||
},
|
||||
{
|
||||
"name": "test_examples",
|
||||
"type": "builtin",
|
||||
"target": "examples",
|
||||
"enabled": True,
|
||||
},
|
||||
],
|
||||
"post_process": {
|
||||
"add_metadata": {"enhanced": True, "workflow": "default"}
|
||||
},
|
||||
}
|
||||
|
||||
# Security-focused workflow
|
||||
security_workflow = {
|
||||
"name": "Security-Focused Analysis",
|
||||
"description": "Emphasize security patterns and vulnerabilities",
|
||||
"version": "1.0",
|
||||
"applies_to": ["codebase_analysis"],
|
||||
"variables": {"focus_area": "security"},
|
||||
"stages": [
|
||||
{
|
||||
"name": "base_patterns",
|
||||
"type": "builtin",
|
||||
"target": "patterns",
|
||||
},
|
||||
{
|
||||
"name": "security_analysis",
|
||||
"type": "custom",
|
||||
"target": "security",
|
||||
"uses_history": True,
|
||||
"prompt": """Based on the patterns detected: {previous_results}
|
||||
|
||||
Perform deep security analysis:
|
||||
|
||||
1. **Authentication/Authorization**:
|
||||
- Auth bypass risks?
|
||||
- Token handling secure?
|
||||
- Session management issues?
|
||||
|
||||
2. **Input Validation**:
|
||||
- User input sanitized?
|
||||
- SQL injection risks?
|
||||
- XSS vulnerabilities?
|
||||
|
||||
3. **Data Exposure**:
|
||||
- Sensitive data in logs?
|
||||
- Secrets in config?
|
||||
- PII handling?
|
||||
|
||||
4. **Cryptography**:
|
||||
- Weak algorithms?
|
||||
- Hardcoded keys?
|
||||
- Insecure RNG?
|
||||
|
||||
Output as JSON with 'findings' array.""",
|
||||
},
|
||||
],
|
||||
"post_process": {
|
||||
"add_metadata": {"security_reviewed": True},
|
||||
},
|
||||
}
|
||||
|
||||
# Save workflows
|
||||
workflows = {
|
||||
"default.yaml": default_workflow,
|
||||
"security-focus.yaml": security_workflow,
|
||||
}
|
||||
|
||||
for filename, workflow_data in workflows.items():
|
||||
workflow_file = config_dir / filename
|
||||
if not workflow_file.exists():
|
||||
with open(workflow_file, "w", encoding="utf-8") as f:
|
||||
yaml.dump(workflow_data, f, default_flow_style=False, sort_keys=False)
|
||||
logger.info(f"✅ Created workflow: {workflow_file}")
|
||||
|
||||
return config_dir
|
||||
def list_bundled_workflows() -> list[str]:
|
||||
"""Return names of all bundled default workflows (without .yaml extension)."""
|
||||
try:
|
||||
pkg = importlib_files("skill_seekers.workflows")
|
||||
names = []
|
||||
for item in pkg.iterdir():
|
||||
name = str(item.name)
|
||||
if name.endswith((".yaml", ".yml")):
|
||||
names.append(name.removesuffix(".yaml").removesuffix(".yml"))
|
||||
return sorted(names)
|
||||
except Exception:
|
||||
return []
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Create default workflows
|
||||
create_default_workflows()
|
||||
print("✅ Default workflows created!")
|
||||
def list_user_workflows() -> list[str]:
|
||||
"""Return names of all user-defined workflows (without .yaml extension)."""
|
||||
user_dir = Path.home() / ".config" / "skill-seekers" / "workflows"
|
||||
if not user_dir.exists():
|
||||
return []
|
||||
names = []
|
||||
for p in user_dir.iterdir():
|
||||
if p.suffix in (".yaml", ".yml"):
|
||||
names.append(p.stem)
|
||||
return sorted(names)
|
||||
|
||||
@@ -62,6 +62,7 @@ COMMAND_MODULES = {
|
||||
"update": "skill_seekers.cli.incremental_updater",
|
||||
"multilang": "skill_seekers.cli.multilang_support",
|
||||
"quality": "skill_seekers.cli.quality_metrics",
|
||||
"workflows": "skill_seekers.cli.workflows_command",
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -27,6 +27,7 @@ from .stream_parser import StreamParser
|
||||
from .update_parser import UpdateParser
|
||||
from .multilang_parser import MultilangParser
|
||||
from .quality_parser import QualityParser
|
||||
from .workflows_parser import WorkflowsParser
|
||||
|
||||
# Registry of all parsers (in order of usage frequency)
|
||||
PARSERS = [
|
||||
@@ -50,6 +51,7 @@ PARSERS = [
|
||||
UpdateParser(),
|
||||
MultilangParser(),
|
||||
QualityParser(),
|
||||
WorkflowsParser(),
|
||||
]
|
||||
|
||||
|
||||
|
||||
85
src/skill_seekers/cli/parsers/workflows_parser.py
Normal file
85
src/skill_seekers/cli/parsers/workflows_parser.py
Normal file
@@ -0,0 +1,85 @@
|
||||
"""Workflows subcommand parser."""
|
||||
|
||||
from .base import SubcommandParser
|
||||
|
||||
|
||||
class WorkflowsParser(SubcommandParser):
|
||||
"""Parser for the workflows subcommand."""
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
return "workflows"
|
||||
|
||||
@property
|
||||
def help(self) -> str:
|
||||
return "Manage enhancement workflow presets"
|
||||
|
||||
@property
|
||||
def description(self) -> str:
|
||||
return (
|
||||
"List, inspect, copy, add, remove, and validate enhancement workflow "
|
||||
"presets. Bundled presets ship with the package; user presets live in "
|
||||
"~/.config/skill-seekers/workflows/."
|
||||
)
|
||||
|
||||
def add_arguments(self, parser) -> None:
|
||||
subparsers = parser.add_subparsers(dest="workflows_action", metavar="ACTION")
|
||||
|
||||
# list
|
||||
subparsers.add_parser(
|
||||
"list",
|
||||
help="List all available workflows (bundled + user)",
|
||||
)
|
||||
|
||||
# show
|
||||
show_p = subparsers.add_parser(
|
||||
"show",
|
||||
help="Print YAML content of a workflow",
|
||||
)
|
||||
show_p.add_argument("workflow_name", help="Workflow name (e.g. security-focus)")
|
||||
|
||||
# copy
|
||||
copy_p = subparsers.add_parser(
|
||||
"copy",
|
||||
help="Copy bundled workflow(s) to user dir for editing",
|
||||
)
|
||||
copy_p.add_argument(
|
||||
"workflow_names",
|
||||
nargs="+",
|
||||
help="Bundled workflow name(s) to copy",
|
||||
)
|
||||
|
||||
# add
|
||||
add_p = subparsers.add_parser(
|
||||
"add",
|
||||
help="Install a custom YAML file into the user workflow directory",
|
||||
)
|
||||
add_p.add_argument(
|
||||
"files",
|
||||
nargs="+",
|
||||
help="Path(s) to YAML workflow file(s) to install",
|
||||
)
|
||||
add_p.add_argument(
|
||||
"--name",
|
||||
help="Override the workflow filename (stem); only valid when adding a single file",
|
||||
)
|
||||
|
||||
# remove
|
||||
remove_p = subparsers.add_parser(
|
||||
"remove",
|
||||
help="Delete workflow(s) from the user directory (bundled workflows cannot be removed)",
|
||||
)
|
||||
remove_p.add_argument(
|
||||
"workflow_names",
|
||||
nargs="+",
|
||||
help="User workflow name(s) to remove",
|
||||
)
|
||||
|
||||
# validate
|
||||
validate_p = subparsers.add_parser(
|
||||
"validate",
|
||||
help="Parse and validate a workflow by name or file path",
|
||||
)
|
||||
validate_p.add_argument(
|
||||
"workflow_name", help="Workflow name or path to YAML file"
|
||||
)
|
||||
311
src/skill_seekers/cli/workflows_command.py
Normal file
311
src/skill_seekers/cli/workflows_command.py
Normal file
@@ -0,0 +1,311 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Workflows CLI Command
|
||||
|
||||
Manage enhancement workflow presets:
|
||||
list List all workflows (bundled + user)
|
||||
show Print YAML content of a workflow
|
||||
copy Copy a bundled workflow to user dir for editing
|
||||
add Install a custom YAML into user dir
|
||||
remove Delete a user workflow (bundled ones cannot be removed)
|
||||
validate Parse and validate a workflow YAML
|
||||
|
||||
Usage:
|
||||
skill-seekers workflows list
|
||||
skill-seekers workflows show security-focus
|
||||
skill-seekers workflows copy security-focus
|
||||
skill-seekers workflows add ./my-workflow.yaml
|
||||
skill-seekers workflows remove my-workflow
|
||||
skill-seekers workflows validate security-focus
|
||||
"""
|
||||
|
||||
import shutil
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
import yaml
|
||||
|
||||
from skill_seekers.cli.enhancement_workflow import (
|
||||
WorkflowEngine,
|
||||
list_bundled_workflows,
|
||||
)
|
||||
|
||||
USER_WORKFLOWS_DIR = Path.home() / ".config" / "skill-seekers" / "workflows"
|
||||
|
||||
|
||||
def _ensure_user_dir() -> Path:
|
||||
USER_WORKFLOWS_DIR.mkdir(parents=True, exist_ok=True)
|
||||
return USER_WORKFLOWS_DIR
|
||||
|
||||
|
||||
def _bundled_yaml_text(name: str) -> str | None:
|
||||
"""Return raw YAML text of a bundled workflow, or None if not found."""
|
||||
from importlib.resources import files as importlib_files
|
||||
|
||||
for suffix in (".yaml", ".yml"):
|
||||
try:
|
||||
pkg_ref = importlib_files("skill_seekers.workflows").joinpath(name + suffix)
|
||||
return pkg_ref.read_text(encoding="utf-8")
|
||||
except (FileNotFoundError, TypeError, ModuleNotFoundError):
|
||||
continue
|
||||
return None
|
||||
|
||||
|
||||
def _workflow_yaml_text(name_or_path: str) -> str | None:
|
||||
"""Resolve a workflow by name or path and return its raw YAML text."""
|
||||
# Try as a file path first
|
||||
p = Path(name_or_path)
|
||||
if p.suffix in (".yaml", ".yml") and p.exists():
|
||||
return p.read_text(encoding="utf-8")
|
||||
|
||||
# Try as a name with .yaml extension
|
||||
for suffix in (".yaml", ".yml"):
|
||||
candidate = Path(name_or_path + suffix)
|
||||
if candidate.exists():
|
||||
return candidate.read_text(encoding="utf-8")
|
||||
|
||||
# User dir
|
||||
user_file = USER_WORKFLOWS_DIR / (name_or_path + ".yaml")
|
||||
if user_file.exists():
|
||||
return user_file.read_text(encoding="utf-8")
|
||||
user_file_yml = USER_WORKFLOWS_DIR / (name_or_path + ".yml")
|
||||
if user_file_yml.exists():
|
||||
return user_file_yml.read_text(encoding="utf-8")
|
||||
|
||||
# Bundled
|
||||
return _bundled_yaml_text(name_or_path)
|
||||
|
||||
|
||||
def _list_user_workflow_names() -> list[str]:
|
||||
"""Return names of user workflows (without extension) from USER_WORKFLOWS_DIR."""
|
||||
if not USER_WORKFLOWS_DIR.exists():
|
||||
return []
|
||||
return sorted(
|
||||
p.stem for p in USER_WORKFLOWS_DIR.iterdir() if p.suffix in (".yaml", ".yml")
|
||||
)
|
||||
|
||||
|
||||
def cmd_list() -> int:
|
||||
"""List all available workflows."""
|
||||
bundled = list_bundled_workflows()
|
||||
user = _list_user_workflow_names()
|
||||
|
||||
if not bundled and not user:
|
||||
print("No workflows found.")
|
||||
return 0
|
||||
|
||||
if bundled:
|
||||
print("Bundled workflows (read-only):")
|
||||
for name in bundled:
|
||||
# Load description from YAML
|
||||
text = _bundled_yaml_text(name)
|
||||
desc = ""
|
||||
if text:
|
||||
try:
|
||||
data = yaml.safe_load(text)
|
||||
desc = data.get("description", "")
|
||||
except Exception:
|
||||
pass
|
||||
print(f" {name:<32} {desc}")
|
||||
|
||||
if user:
|
||||
print("\nUser workflows (~/.config/skill-seekers/workflows/):")
|
||||
for name in user:
|
||||
user_file = USER_WORKFLOWS_DIR / (name + ".yaml")
|
||||
if not user_file.exists():
|
||||
user_file = USER_WORKFLOWS_DIR / (name + ".yml")
|
||||
desc = ""
|
||||
try:
|
||||
data = yaml.safe_load(user_file.read_text(encoding="utf-8"))
|
||||
desc = data.get("description", "")
|
||||
except Exception:
|
||||
pass
|
||||
print(f" {name:<32} {desc}")
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
def cmd_show(name: str) -> int:
|
||||
"""Print YAML content of a workflow."""
|
||||
text = _workflow_yaml_text(name)
|
||||
if text is None:
|
||||
print(f"Error: Workflow '{name}' not found.", file=sys.stderr)
|
||||
print("Use 'skill-seekers workflows list' to see available workflows.", file=sys.stderr)
|
||||
return 1
|
||||
print(text, end="")
|
||||
return 0
|
||||
|
||||
|
||||
def cmd_copy(names: list[str]) -> int:
|
||||
"""Copy one or more bundled workflows to user dir."""
|
||||
rc = 0
|
||||
for name in names:
|
||||
text = _bundled_yaml_text(name)
|
||||
if text is None:
|
||||
print(f"Error: Bundled workflow '{name}' not found.", file=sys.stderr)
|
||||
bundled = list_bundled_workflows()
|
||||
if bundled:
|
||||
print(f"Available bundled workflows: {', '.join(bundled)}", file=sys.stderr)
|
||||
rc = 1
|
||||
continue
|
||||
|
||||
dest = _ensure_user_dir() / (name + ".yaml")
|
||||
if dest.exists():
|
||||
print(f"Warning: '{dest}' already exists. Overwriting.")
|
||||
|
||||
dest.write_text(text, encoding="utf-8")
|
||||
print(f"Copied '{name}' to: {dest}")
|
||||
print(f"Edit it with your favourite editor, then reference it as '--enhance-workflow {name}'")
|
||||
|
||||
return rc
|
||||
|
||||
|
||||
def cmd_add(file_paths: list[str], override_name: str | None = None) -> int:
|
||||
"""Install one or more custom YAML workflows into user dir."""
|
||||
if override_name and len(file_paths) > 1:
|
||||
print("Error: --name cannot be used when adding multiple files.", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
rc = 0
|
||||
for file_path in file_paths:
|
||||
src = Path(file_path)
|
||||
if not src.exists():
|
||||
print(f"Error: File '{file_path}' does not exist.", file=sys.stderr)
|
||||
rc = 1
|
||||
continue
|
||||
if src.suffix not in (".yaml", ".yml"):
|
||||
print(f"Error: '{file_path}' must have a .yaml or .yml extension.", file=sys.stderr)
|
||||
rc = 1
|
||||
continue
|
||||
|
||||
# Validate before installing
|
||||
try:
|
||||
text = src.read_text(encoding="utf-8")
|
||||
data = yaml.safe_load(text)
|
||||
if not isinstance(data, dict):
|
||||
raise ValueError("YAML root must be a mapping")
|
||||
if "stages" not in data:
|
||||
raise ValueError("Workflow must contain a 'stages' key")
|
||||
except Exception as exc:
|
||||
print(f"Error: Invalid workflow YAML '{file_path}' – {exc}", file=sys.stderr)
|
||||
rc = 1
|
||||
continue
|
||||
|
||||
dest_name = override_name if override_name else src.stem
|
||||
dest = _ensure_user_dir() / (dest_name + ".yaml")
|
||||
|
||||
if dest.exists():
|
||||
print(f"Warning: '{dest}' already exists. Overwriting.")
|
||||
|
||||
shutil.copy2(src, dest)
|
||||
print(f"Installed workflow '{dest_name}' to: {dest}")
|
||||
|
||||
return rc
|
||||
|
||||
|
||||
def cmd_remove(names: list[str]) -> int:
|
||||
"""Delete one or more user workflows."""
|
||||
rc = 0
|
||||
bundled = list_bundled_workflows()
|
||||
for name in names:
|
||||
if name in bundled:
|
||||
print(
|
||||
f"Error: '{name}' is a bundled workflow and cannot be removed.",
|
||||
file=sys.stderr,
|
||||
)
|
||||
print("Use 'skill-seekers workflows copy' to create an editable copy.", file=sys.stderr)
|
||||
rc = 1
|
||||
continue
|
||||
|
||||
removed = False
|
||||
for suffix in (".yaml", ".yml"):
|
||||
candidate = USER_WORKFLOWS_DIR / (name + suffix)
|
||||
if candidate.exists():
|
||||
candidate.unlink()
|
||||
print(f"Removed workflow: {candidate}")
|
||||
removed = True
|
||||
break
|
||||
|
||||
if not removed:
|
||||
print(f"Error: User workflow '{name}' not found.", file=sys.stderr)
|
||||
rc = 1
|
||||
|
||||
return rc
|
||||
|
||||
|
||||
def cmd_validate(name_or_path: str) -> int:
|
||||
"""Parse and validate a workflow."""
|
||||
try:
|
||||
engine = WorkflowEngine(name_or_path)
|
||||
wf = engine.workflow
|
||||
print(f"✅ Workflow '{wf.name}' is valid.")
|
||||
print(f" Description : {wf.description}")
|
||||
print(f" Version : {wf.version}")
|
||||
print(f" Stages : {len(wf.stages)}")
|
||||
for stage in wf.stages:
|
||||
status = "enabled" if stage.enabled else "disabled"
|
||||
print(f" - {stage.name} ({stage.type}, {status})")
|
||||
return 0
|
||||
except FileNotFoundError as exc:
|
||||
print(f"Error: {exc}", file=sys.stderr)
|
||||
return 1
|
||||
except Exception as exc:
|
||||
print(f"Error: Invalid workflow – {exc}", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
|
||||
def main(argv=None) -> None:
|
||||
"""Entry point for skill-seekers-workflows."""
|
||||
import argparse
|
||||
|
||||
parser = argparse.ArgumentParser(
|
||||
prog="skill-seekers-workflows",
|
||||
description="Manage enhancement workflow presets",
|
||||
)
|
||||
subparsers = parser.add_subparsers(dest="action", metavar="ACTION")
|
||||
|
||||
subparsers.add_parser("list", help="List all workflows (bundled + user)")
|
||||
|
||||
show_p = subparsers.add_parser("show", help="Print YAML content of a workflow")
|
||||
show_p.add_argument("workflow_name")
|
||||
|
||||
copy_p = subparsers.add_parser("copy", help="Copy bundled workflow(s) to user dir")
|
||||
copy_p.add_argument("workflow_names", nargs="+")
|
||||
|
||||
add_p = subparsers.add_parser("add", help="Install custom YAML file(s) into user dir")
|
||||
add_p.add_argument("files", nargs="+")
|
||||
add_p.add_argument("--name")
|
||||
|
||||
remove_p = subparsers.add_parser("remove", help="Delete user workflow(s)")
|
||||
remove_p.add_argument("workflow_names", nargs="+")
|
||||
|
||||
validate_p = subparsers.add_parser("validate", help="Validate a workflow by name or file")
|
||||
validate_p.add_argument("workflow_name")
|
||||
|
||||
args = parser.parse_args(argv)
|
||||
|
||||
if args.action is None:
|
||||
parser.print_help()
|
||||
sys.exit(0)
|
||||
|
||||
rc = 0
|
||||
if args.action == "list":
|
||||
rc = cmd_list()
|
||||
elif args.action == "show":
|
||||
rc = cmd_show(args.workflow_name)
|
||||
elif args.action == "copy":
|
||||
rc = cmd_copy(args.workflow_names)
|
||||
elif args.action == "add":
|
||||
rc = cmd_add(args.files, getattr(args, "name", None))
|
||||
elif args.action == "remove":
|
||||
rc = cmd_remove(args.workflow_names)
|
||||
elif args.action == "validate":
|
||||
rc = cmd_validate(args.workflow_name)
|
||||
else:
|
||||
parser.print_help()
|
||||
|
||||
sys.exit(rc)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -103,6 +103,12 @@ try:
|
||||
submit_config_impl,
|
||||
upload_skill_impl,
|
||||
validate_config_impl,
|
||||
# Workflow tools
|
||||
list_workflows_impl,
|
||||
get_workflow_impl,
|
||||
create_workflow_impl,
|
||||
update_workflow_impl,
|
||||
delete_workflow_impl,
|
||||
)
|
||||
except ImportError:
|
||||
# Fallback for direct script execution
|
||||
@@ -137,6 +143,11 @@ except ImportError:
|
||||
submit_config_impl,
|
||||
upload_skill_impl,
|
||||
validate_config_impl,
|
||||
list_workflows_impl,
|
||||
get_workflow_impl,
|
||||
create_workflow_impl,
|
||||
update_workflow_impl,
|
||||
delete_workflow_impl,
|
||||
)
|
||||
|
||||
# Initialize FastMCP server
|
||||
@@ -1178,6 +1189,100 @@ async def export_to_qdrant(
|
||||
return str(result)
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# WORKFLOW TOOLS (5 tools)
|
||||
# ============================================================================
|
||||
|
||||
|
||||
@safe_tool_decorator(
|
||||
description="List all available enhancement workflows (bundled defaults + user-created). Returns name, description, and source (bundled/user) for each."
|
||||
)
|
||||
async def list_workflows() -> str:
|
||||
"""List all available enhancement workflow presets."""
|
||||
result = list_workflows_impl({})
|
||||
if isinstance(result, list) and result:
|
||||
return result[0].text if hasattr(result[0], "text") else str(result[0])
|
||||
return str(result)
|
||||
|
||||
|
||||
@safe_tool_decorator(
|
||||
description="Get the full YAML content of a named enhancement workflow. Searches user dir first, then bundled defaults."
|
||||
)
|
||||
async def get_workflow(name: str) -> str:
|
||||
"""
|
||||
Get full YAML content of a workflow.
|
||||
|
||||
Args:
|
||||
name: Workflow name (e.g. 'security-focus', 'default')
|
||||
|
||||
Returns:
|
||||
YAML content of the workflow, or error message if not found.
|
||||
"""
|
||||
result = get_workflow_impl({"name": name})
|
||||
if isinstance(result, list) and result:
|
||||
return result[0].text if hasattr(result[0], "text") else str(result[0])
|
||||
return str(result)
|
||||
|
||||
|
||||
@safe_tool_decorator(
|
||||
description="Create a new user workflow from YAML content. The workflow is saved to ~/.config/skill-seekers/workflows/."
|
||||
)
|
||||
async def create_workflow(name: str, content: str) -> str:
|
||||
"""
|
||||
Create a new user workflow.
|
||||
|
||||
Args:
|
||||
name: Workflow name (becomes the filename stem, e.g. 'my-custom')
|
||||
content: Full YAML content of the workflow
|
||||
|
||||
Returns:
|
||||
Success message with file path, or error message.
|
||||
"""
|
||||
result = create_workflow_impl({"name": name, "content": content})
|
||||
if isinstance(result, list) and result:
|
||||
return result[0].text if hasattr(result[0], "text") else str(result[0])
|
||||
return str(result)
|
||||
|
||||
|
||||
@safe_tool_decorator(
|
||||
description="Update (overwrite) an existing user workflow. Cannot update bundled workflows."
|
||||
)
|
||||
async def update_workflow(name: str, content: str) -> str:
|
||||
"""
|
||||
Update an existing user workflow.
|
||||
|
||||
Args:
|
||||
name: Workflow name to update
|
||||
content: New YAML content
|
||||
|
||||
Returns:
|
||||
Success message, or error if workflow is bundled or invalid.
|
||||
"""
|
||||
result = update_workflow_impl({"name": name, "content": content})
|
||||
if isinstance(result, list) and result:
|
||||
return result[0].text if hasattr(result[0], "text") else str(result[0])
|
||||
return str(result)
|
||||
|
||||
|
||||
@safe_tool_decorator(
|
||||
description="Delete a user workflow by name. Bundled workflows cannot be deleted."
|
||||
)
|
||||
async def delete_workflow(name: str) -> str:
|
||||
"""
|
||||
Delete a user workflow.
|
||||
|
||||
Args:
|
||||
name: Workflow name to delete
|
||||
|
||||
Returns:
|
||||
Success message, or error if workflow is bundled or not found.
|
||||
"""
|
||||
result = delete_workflow_impl({"name": name})
|
||||
if isinstance(result, list) and result:
|
||||
return result[0].text if hasattr(result[0], "text") else str(result[0])
|
||||
return str(result)
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# MAIN ENTRY POINT
|
||||
# ============================================================================
|
||||
|
||||
@@ -96,6 +96,21 @@ from .vector_db_tools import (
|
||||
from .vector_db_tools import (
|
||||
export_to_weaviate_impl,
|
||||
)
|
||||
from .workflow_tools import (
|
||||
create_workflow_tool as create_workflow_impl,
|
||||
)
|
||||
from .workflow_tools import (
|
||||
delete_workflow_tool as delete_workflow_impl,
|
||||
)
|
||||
from .workflow_tools import (
|
||||
get_workflow_tool as get_workflow_impl,
|
||||
)
|
||||
from .workflow_tools import (
|
||||
list_workflows_tool as list_workflows_impl,
|
||||
)
|
||||
from .workflow_tools import (
|
||||
update_workflow_tool as update_workflow_impl,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"__version__",
|
||||
@@ -132,4 +147,10 @@ __all__ = [
|
||||
"export_to_chroma_impl",
|
||||
"export_to_faiss_impl",
|
||||
"export_to_qdrant_impl",
|
||||
# Workflow tools
|
||||
"list_workflows_impl",
|
||||
"get_workflow_impl",
|
||||
"create_workflow_impl",
|
||||
"update_workflow_impl",
|
||||
"delete_workflow_impl",
|
||||
]
|
||||
|
||||
226
src/skill_seekers/mcp/tools/workflow_tools.py
Normal file
226
src/skill_seekers/mcp/tools/workflow_tools.py
Normal file
@@ -0,0 +1,226 @@
|
||||
"""
|
||||
MCP Tool Implementations for Workflow Management
|
||||
|
||||
5 tools:
|
||||
list_workflows – list all workflows (bundled + user) with source info
|
||||
get_workflow – return full YAML of a named workflow
|
||||
create_workflow – write a new YAML to user dir
|
||||
update_workflow – overwrite an existing user workflow
|
||||
delete_workflow – remove a user workflow by name
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from pathlib import Path
|
||||
|
||||
import yaml
|
||||
|
||||
try:
|
||||
from mcp.types import TextContent
|
||||
except ImportError:
|
||||
# Graceful degradation for testing without mcp installed
|
||||
class TextContent: # type: ignore[no-redef]
|
||||
def __init__(self, type: str, text: str):
|
||||
self.type = type
|
||||
self.text = text
|
||||
|
||||
USER_WORKFLOWS_DIR = Path.home() / ".config" / "skill-seekers" / "workflows"
|
||||
|
||||
|
||||
def _ensure_user_dir() -> Path:
|
||||
USER_WORKFLOWS_DIR.mkdir(parents=True, exist_ok=True)
|
||||
return USER_WORKFLOWS_DIR
|
||||
|
||||
|
||||
def _bundled_names() -> list[str]:
|
||||
from importlib.resources import files as importlib_files
|
||||
|
||||
try:
|
||||
pkg = importlib_files("skill_seekers.workflows")
|
||||
names = []
|
||||
for item in pkg.iterdir():
|
||||
name = str(item.name)
|
||||
if name.endswith((".yaml", ".yml")):
|
||||
names.append(name.removesuffix(".yaml").removesuffix(".yml"))
|
||||
return sorted(names)
|
||||
except Exception:
|
||||
return []
|
||||
|
||||
|
||||
def _user_names() -> list[str]:
|
||||
if not USER_WORKFLOWS_DIR.exists():
|
||||
return []
|
||||
return sorted(
|
||||
p.stem for p in USER_WORKFLOWS_DIR.iterdir() if p.suffix in (".yaml", ".yml")
|
||||
)
|
||||
|
||||
|
||||
def _read_bundled(name: str) -> str | None:
|
||||
from importlib.resources import files as importlib_files
|
||||
|
||||
for suffix in (".yaml", ".yml"):
|
||||
try:
|
||||
pkg_ref = importlib_files("skill_seekers.workflows").joinpath(name + suffix)
|
||||
return pkg_ref.read_text(encoding="utf-8")
|
||||
except (FileNotFoundError, TypeError, ModuleNotFoundError):
|
||||
continue
|
||||
return None
|
||||
|
||||
|
||||
def _read_workflow(name: str) -> str | None:
|
||||
"""Read YAML text: user dir first, then bundled."""
|
||||
for suffix in (".yaml", ".yml"):
|
||||
p = USER_WORKFLOWS_DIR / (name + suffix)
|
||||
if p.exists():
|
||||
return p.read_text(encoding="utf-8")
|
||||
return _read_bundled(name)
|
||||
|
||||
|
||||
def _validate_yaml(text: str) -> dict:
|
||||
"""Parse and basic-validate workflow YAML; returns parsed dict."""
|
||||
data = yaml.safe_load(text)
|
||||
if not isinstance(data, dict):
|
||||
raise ValueError("Workflow YAML root must be a mapping")
|
||||
if "stages" not in data:
|
||||
raise ValueError("Workflow must contain a 'stages' key")
|
||||
return data
|
||||
|
||||
|
||||
# ──────────────────────────────────────────────────────────────────────────────
|
||||
# Tool implementations
|
||||
# ──────────────────────────────────────────────────────────────────────────────
|
||||
|
||||
|
||||
def list_workflows_tool(args: dict) -> list:
|
||||
"""Return all workflows with name, description, and source."""
|
||||
result: list[dict[str, str]] = []
|
||||
|
||||
for name in _bundled_names():
|
||||
desc = ""
|
||||
text = _read_bundled(name)
|
||||
if text:
|
||||
try:
|
||||
data = yaml.safe_load(text)
|
||||
desc = data.get("description", "")
|
||||
except Exception:
|
||||
pass
|
||||
result.append({"name": name, "description": desc, "source": "bundled"})
|
||||
|
||||
for name in _user_names():
|
||||
desc = ""
|
||||
text = _read_workflow(name)
|
||||
if text:
|
||||
try:
|
||||
data = yaml.safe_load(text)
|
||||
desc = data.get("description", "")
|
||||
except Exception:
|
||||
pass
|
||||
result.append({"name": name, "description": desc, "source": "user"})
|
||||
|
||||
output = yaml.dump(result, default_flow_style=False, sort_keys=False)
|
||||
return [TextContent(type="text", text=output)]
|
||||
|
||||
|
||||
def get_workflow_tool(args: dict) -> list:
|
||||
"""Return full YAML content of a named workflow."""
|
||||
name = args.get("name", "").strip()
|
||||
if not name:
|
||||
return [TextContent(type="text", text="Error: 'name' parameter is required.")]
|
||||
|
||||
text = _read_workflow(name)
|
||||
if text is None:
|
||||
bundled = _bundled_names()
|
||||
user = _user_names()
|
||||
available = bundled + [f"{n} (user)" for n in user]
|
||||
msg = (
|
||||
f"Error: Workflow '{name}' not found.\n"
|
||||
f"Available workflows: {', '.join(available) if available else 'none'}"
|
||||
)
|
||||
return [TextContent(type="text", text=msg)]
|
||||
|
||||
return [TextContent(type="text", text=text)]
|
||||
|
||||
|
||||
def create_workflow_tool(args: dict) -> list:
|
||||
"""Write a new workflow YAML to the user directory."""
|
||||
name = args.get("name", "").strip()
|
||||
content = args.get("content", "")
|
||||
|
||||
if not name:
|
||||
return [TextContent(type="text", text="Error: 'name' parameter is required.")]
|
||||
if not content:
|
||||
return [TextContent(type="text", text="Error: 'content' parameter is required.")]
|
||||
|
||||
# Validate
|
||||
try:
|
||||
_validate_yaml(content)
|
||||
except Exception as exc:
|
||||
return [TextContent(type="text", text=f"Error: Invalid workflow YAML – {exc}")]
|
||||
|
||||
dest = _ensure_user_dir() / (name + ".yaml")
|
||||
if dest.exists():
|
||||
return [
|
||||
TextContent(
|
||||
type="text",
|
||||
text=f"Error: Workflow '{name}' already exists in user dir. Use update_workflow to overwrite.",
|
||||
)
|
||||
]
|
||||
|
||||
dest.write_text(content, encoding="utf-8")
|
||||
return [TextContent(type="text", text=f"Created workflow '{name}' at: {dest}")]
|
||||
|
||||
|
||||
def update_workflow_tool(args: dict) -> list:
|
||||
"""Overwrite an existing user workflow. Cannot update bundled workflows."""
|
||||
name = args.get("name", "").strip()
|
||||
content = args.get("content", "")
|
||||
|
||||
if not name:
|
||||
return [TextContent(type="text", text="Error: 'name' parameter is required.")]
|
||||
if not content:
|
||||
return [TextContent(type="text", text="Error: 'content' parameter is required.")]
|
||||
|
||||
if name in _bundled_names() and name not in _user_names():
|
||||
return [
|
||||
TextContent(
|
||||
type="text",
|
||||
text=(
|
||||
f"Error: '{name}' is a bundled workflow and cannot be updated. "
|
||||
"Use create_workflow with a different name, or copy it first with "
|
||||
"'skill-seekers workflows copy'."
|
||||
),
|
||||
)
|
||||
]
|
||||
|
||||
# Validate
|
||||
try:
|
||||
_validate_yaml(content)
|
||||
except Exception as exc:
|
||||
return [TextContent(type="text", text=f"Error: Invalid workflow YAML – {exc}")]
|
||||
|
||||
dest = _ensure_user_dir() / (name + ".yaml")
|
||||
dest.write_text(content, encoding="utf-8")
|
||||
return [TextContent(type="text", text=f"Updated workflow '{name}' at: {dest}")]
|
||||
|
||||
|
||||
def delete_workflow_tool(args: dict) -> list:
|
||||
"""Remove a user workflow by name. Bundled workflows cannot be deleted."""
|
||||
name = args.get("name", "").strip()
|
||||
if not name:
|
||||
return [TextContent(type="text", text="Error: 'name' parameter is required.")]
|
||||
|
||||
if name in _bundled_names():
|
||||
return [
|
||||
TextContent(
|
||||
type="text",
|
||||
text=f"Error: '{name}' is a bundled workflow and cannot be deleted.",
|
||||
)
|
||||
]
|
||||
|
||||
for suffix in (".yaml", ".yml"):
|
||||
candidate = USER_WORKFLOWS_DIR / (name + suffix)
|
||||
if candidate.exists():
|
||||
candidate.unlink()
|
||||
return [TextContent(type="text", text=f"Deleted user workflow: {candidate}")]
|
||||
|
||||
return [TextContent(type="text", text=f"Error: User workflow '{name}' not found.")]
|
||||
1
src/skill_seekers/workflows/__init__.py
Normal file
1
src/skill_seekers/workflows/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
"""Bundled default enhancement workflow presets."""
|
||||
71
src/skill_seekers/workflows/api-documentation.yaml
Normal file
71
src/skill_seekers/workflows/api-documentation.yaml
Normal file
@@ -0,0 +1,71 @@
|
||||
name: api-documentation
|
||||
description: Generate comprehensive API documentation from code analysis
|
||||
version: "1.0"
|
||||
applies_to:
|
||||
- codebase_analysis
|
||||
- github_analysis
|
||||
variables:
|
||||
depth: comprehensive
|
||||
stages:
|
||||
- name: base_patterns
|
||||
type: builtin
|
||||
target: patterns
|
||||
enabled: true
|
||||
uses_history: false
|
||||
- name: api_extraction
|
||||
type: custom
|
||||
target: api
|
||||
uses_history: false
|
||||
enabled: true
|
||||
prompt: >
|
||||
Extract and document all public API endpoints, functions, and interfaces
|
||||
from this codebase.
|
||||
|
||||
For each API element include:
|
||||
1. Name and signature
|
||||
2. Purpose and description
|
||||
3. Parameters (name, type, required/optional, description)
|
||||
4. Return value (type, description)
|
||||
5. Exceptions that may be raised
|
||||
6. Usage example
|
||||
|
||||
Output JSON with an "api_reference" array of API elements.
|
||||
- name: usage_examples
|
||||
type: custom
|
||||
target: examples
|
||||
uses_history: true
|
||||
enabled: true
|
||||
prompt: >
|
||||
Based on the API reference, create practical usage examples that demonstrate
|
||||
common integration patterns.
|
||||
|
||||
Create examples for:
|
||||
1. Basic getting-started scenario
|
||||
2. Common use case (most frequently used APIs)
|
||||
3. Advanced integration pattern
|
||||
4. Error handling example
|
||||
|
||||
Output JSON with a "usage_examples" array where each item has
|
||||
"title", "description", and "code" fields.
|
||||
- name: integration_guide
|
||||
type: custom
|
||||
target: integration
|
||||
uses_history: true
|
||||
enabled: true
|
||||
prompt: >
|
||||
Create a concise integration guide based on the API documentation and examples.
|
||||
|
||||
Include:
|
||||
1. Prerequisites and installation
|
||||
2. Authentication setup (if applicable)
|
||||
3. Quick start in 5 minutes
|
||||
4. Common gotchas and how to avoid them
|
||||
5. Links to further resources
|
||||
|
||||
Output JSON with an "integration_guide" object.
|
||||
post_process:
|
||||
reorder_sections: []
|
||||
add_metadata:
|
||||
enhanced: true
|
||||
workflow: api-documentation
|
||||
has_api_docs: true
|
||||
72
src/skill_seekers/workflows/architecture-comprehensive.yaml
Normal file
72
src/skill_seekers/workflows/architecture-comprehensive.yaml
Normal file
@@ -0,0 +1,72 @@
|
||||
name: architecture-comprehensive
|
||||
description: Deep architectural analysis including patterns, dependencies, and design quality
|
||||
version: "1.0"
|
||||
applies_to:
|
||||
- codebase_analysis
|
||||
- github_analysis
|
||||
variables:
|
||||
depth: comprehensive
|
||||
stages:
|
||||
- name: base_patterns
|
||||
type: builtin
|
||||
target: patterns
|
||||
enabled: true
|
||||
uses_history: false
|
||||
- name: architecture_overview
|
||||
type: custom
|
||||
target: architecture
|
||||
uses_history: false
|
||||
enabled: true
|
||||
prompt: >
|
||||
Analyse the architectural patterns and design decisions in this codebase.
|
||||
|
||||
Identify:
|
||||
1. Overall architectural style (MVC, microservices, layered, hexagonal, etc.)
|
||||
2. Key design patterns used and their purpose
|
||||
3. Component boundaries and responsibilities
|
||||
4. Data flow between components
|
||||
5. External dependencies and integration points
|
||||
|
||||
Output JSON with an "architecture" object containing:
|
||||
- "style": primary architectural style
|
||||
- "patterns": list of design patterns detected
|
||||
- "components": list of key components with descriptions
|
||||
- "data_flow": description of data flow
|
||||
- "quality_score": 1-10 rating with justification
|
||||
- name: dependency_analysis
|
||||
type: custom
|
||||
target: dependencies
|
||||
uses_history: true
|
||||
enabled: true
|
||||
prompt: >
|
||||
Based on the architectural overview, analyse the dependency structure.
|
||||
|
||||
Identify:
|
||||
1. Circular dependencies (red flags)
|
||||
2. High coupling between modules
|
||||
3. Opportunities for dependency injection
|
||||
4. Third-party dependency risks (outdated, unmaintained)
|
||||
5. Suggested refactoring priorities
|
||||
|
||||
Output JSON with a "dependency_analysis" object.
|
||||
- name: improvement_roadmap
|
||||
type: custom
|
||||
target: roadmap
|
||||
uses_history: true
|
||||
enabled: true
|
||||
prompt: >
|
||||
Based on the full architectural analysis, create an improvement roadmap.
|
||||
|
||||
Provide:
|
||||
1. Top 3 quick wins (low effort, high impact)
|
||||
2. Medium-term improvements (1-3 months)
|
||||
3. Long-term architectural goals
|
||||
4. Technical debt prioritisation
|
||||
|
||||
Output JSON with a "roadmap" object containing "quick_wins", "medium_term", and "long_term" arrays.
|
||||
post_process:
|
||||
reorder_sections: []
|
||||
add_metadata:
|
||||
enhanced: true
|
||||
workflow: architecture-comprehensive
|
||||
deep_analysis: true
|
||||
24
src/skill_seekers/workflows/default.yaml
Normal file
24
src/skill_seekers/workflows/default.yaml
Normal file
@@ -0,0 +1,24 @@
|
||||
name: default
|
||||
description: Standard AI enhancement with all features enabled
|
||||
version: "1.0"
|
||||
applies_to:
|
||||
- codebase_analysis
|
||||
- doc_scraping
|
||||
- github_analysis
|
||||
variables: {}
|
||||
stages:
|
||||
- name: base_analysis
|
||||
type: builtin
|
||||
target: patterns
|
||||
enabled: true
|
||||
uses_history: false
|
||||
- name: test_examples
|
||||
type: builtin
|
||||
target: examples
|
||||
enabled: true
|
||||
uses_history: false
|
||||
post_process:
|
||||
reorder_sections: []
|
||||
add_metadata:
|
||||
enhanced: true
|
||||
workflow: default
|
||||
27
src/skill_seekers/workflows/minimal.yaml
Normal file
27
src/skill_seekers/workflows/minimal.yaml
Normal file
@@ -0,0 +1,27 @@
|
||||
name: minimal
|
||||
description: Lightweight enhancement - SKILL.md only, no heavy analysis
|
||||
version: "1.0"
|
||||
applies_to:
|
||||
- codebase_analysis
|
||||
- doc_scraping
|
||||
- github_analysis
|
||||
variables:
|
||||
depth: surface
|
||||
stages:
|
||||
- name: skill_md_polish
|
||||
type: custom
|
||||
target: skill_md
|
||||
uses_history: false
|
||||
enabled: true
|
||||
prompt: >
|
||||
Review the following SKILL.md content and make minimal improvements:
|
||||
- Fix obvious formatting issues
|
||||
- Ensure the overview section is clear and concise
|
||||
- Remove duplicate or redundant information
|
||||
|
||||
Return the improved content as plain text without extra commentary.
|
||||
post_process:
|
||||
reorder_sections: []
|
||||
add_metadata:
|
||||
enhanced: true
|
||||
workflow: minimal
|
||||
59
src/skill_seekers/workflows/security-focus.yaml
Normal file
59
src/skill_seekers/workflows/security-focus.yaml
Normal file
@@ -0,0 +1,59 @@
|
||||
name: security-focus
|
||||
description: "Security-focused review: vulnerabilities, auth, data handling"
|
||||
version: "1.0"
|
||||
applies_to:
|
||||
- codebase_analysis
|
||||
- python
|
||||
- javascript
|
||||
- typescript
|
||||
variables:
|
||||
depth: comprehensive
|
||||
stages:
|
||||
- name: base_patterns
|
||||
type: builtin
|
||||
target: patterns
|
||||
enabled: true
|
||||
uses_history: false
|
||||
- name: vulnerabilities
|
||||
type: custom
|
||||
target: security
|
||||
uses_history: false
|
||||
enabled: true
|
||||
prompt: >
|
||||
Analyze this codebase for OWASP Top 10 and common security vulnerabilities.
|
||||
|
||||
Focus on:
|
||||
1. Injection flaws (SQL, command, LDAP injection)
|
||||
2. Broken authentication and session management
|
||||
3. Sensitive data exposure (secrets in code, logging PII)
|
||||
4. Security misconfigurations
|
||||
5. Cross-site scripting (XSS) risks
|
||||
6. Insecure direct object references
|
||||
|
||||
Output JSON with a "findings" array where each item has:
|
||||
- "category": vulnerability category
|
||||
- "severity": "critical" | "high" | "medium" | "low"
|
||||
- "description": what the issue is
|
||||
- "recommendation": how to fix it
|
||||
- name: auth_review
|
||||
type: custom
|
||||
target: auth
|
||||
uses_history: true
|
||||
enabled: true
|
||||
prompt: >
|
||||
Examine authentication and authorisation patterns in this codebase.
|
||||
|
||||
Review:
|
||||
1. Token handling and storage
|
||||
2. Password hashing mechanisms
|
||||
3. Session expiry and invalidation
|
||||
4. Role-based access control implementation
|
||||
5. OAuth/JWT usage correctness
|
||||
|
||||
Output JSON with an "auth_analysis" object containing "strengths" and "weaknesses" arrays.
|
||||
post_process:
|
||||
reorder_sections: []
|
||||
add_metadata:
|
||||
enhanced: true
|
||||
workflow: security-focus
|
||||
security_reviewed: true
|
||||
Reference in New Issue
Block a user