feat(plugins): add plugin-safe compatibility filtering
This commit is contained in:
@@ -1,5 +1,6 @@
|
||||
import os
|
||||
import json
|
||||
import pathlib
|
||||
import re
|
||||
import sys
|
||||
from collections.abc import Mapping
|
||||
@@ -7,6 +8,8 @@ from datetime import date, datetime
|
||||
|
||||
import yaml
|
||||
from _project_paths import find_repo_root
|
||||
from plugin_compatibility import build_report as build_plugin_compatibility_report
|
||||
from plugin_compatibility import compatibility_by_path as plugin_compatibility_by_path
|
||||
|
||||
# Ensure UTF-8 output for Windows compatibility
|
||||
if sys.platform == 'win32':
|
||||
@@ -846,9 +849,12 @@ def parse_frontmatter(content):
|
||||
print(f"⚠️ YAML parsing error: {e}")
|
||||
return {}
|
||||
|
||||
def generate_index(skills_dir, output_file):
|
||||
def generate_index(skills_dir, output_file, compatibility_report=None):
|
||||
print(f"🏗️ Generating index from: {skills_dir}")
|
||||
skills = []
|
||||
if compatibility_report is None:
|
||||
compatibility_report = build_plugin_compatibility_report(pathlib.Path(skills_dir))
|
||||
compatibility_lookup = plugin_compatibility_by_path(compatibility_report)
|
||||
|
||||
for root, dirs, files in os.walk(skills_dir):
|
||||
# Skip .disabled or hidden directories
|
||||
@@ -873,7 +879,19 @@ def generate_index(skills_dir, output_file):
|
||||
"description": "",
|
||||
"risk": "unknown",
|
||||
"source": "unknown",
|
||||
"date_added": None
|
||||
"date_added": None,
|
||||
"plugin": {
|
||||
"targets": {
|
||||
"codex": "supported",
|
||||
"claude": "supported",
|
||||
},
|
||||
"setup": {
|
||||
"type": "none",
|
||||
"summary": "",
|
||||
"docs": None,
|
||||
},
|
||||
"reasons": [],
|
||||
},
|
||||
}
|
||||
|
||||
try:
|
||||
@@ -906,6 +924,14 @@ def generate_index(skills_dir, output_file):
|
||||
if skill_info["id"] in CURATED_CATEGORY_OVERRIDES:
|
||||
skill_info["category"] = CURATED_CATEGORY_OVERRIDES[skill_info["id"]]
|
||||
skill_info["category"] = normalize_category(skill_info["category"])
|
||||
|
||||
plugin_info = compatibility_lookup.get(skill_info["path"])
|
||||
if plugin_info:
|
||||
skill_info["plugin"] = {
|
||||
"targets": dict(plugin_info["targets"]),
|
||||
"setup": dict(plugin_info["setup"]),
|
||||
"reasons": list(plugin_info["reasons"]),
|
||||
}
|
||||
|
||||
# Fallback for description if missing in frontmatter (legacy support)
|
||||
if not skill_info["description"]:
|
||||
|
||||
352
tools/scripts/plugin_compatibility.py
Normal file
352
tools/scripts/plugin_compatibility.py
Normal file
@@ -0,0 +1,352 @@
|
||||
#!/usr/bin/env python3
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import io
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
from collections.abc import Mapping
|
||||
from datetime import date, datetime
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
import yaml
|
||||
|
||||
from _project_paths import find_repo_root
|
||||
|
||||
|
||||
PLUGIN_COMPATIBILITY_PATH = Path("data") / "plugin-compatibility.json"
|
||||
SKILL_RUNTIME_FILES = (
|
||||
"package.json",
|
||||
"requirements.txt",
|
||||
"pyproject.toml",
|
||||
"Cargo.toml",
|
||||
"go.mod",
|
||||
)
|
||||
LOCAL_LINK_RE = re.compile(r"\[[^\]]*\]\(([^)]+)\)")
|
||||
ABSOLUTE_HOST_PATH_RE = re.compile(r"(/Users/|[A-Za-z]:/Users/)")
|
||||
AGENT_HOME_PATTERNS = {
|
||||
"claude": re.compile(r"~/.claude(?:/|$)"),
|
||||
"codex": re.compile(r"~/.codex(?:/|$)"),
|
||||
"cursor": re.compile(r"~/.cursor(?:/|$)"),
|
||||
"gemini": re.compile(r"~/.gemini(?:/|$)"),
|
||||
}
|
||||
SUPPORTED_TARGETS = ("codex", "claude")
|
||||
SUPPORTED_PLUGIN_STATES = {"supported", "blocked"}
|
||||
|
||||
|
||||
def configure_utf8_output() -> None:
|
||||
if sys.platform != "win32":
|
||||
return
|
||||
|
||||
for stream_name in ("stdout", "stderr"):
|
||||
stream = getattr(sys, stream_name)
|
||||
try:
|
||||
stream.reconfigure(encoding="utf-8", errors="backslashreplace")
|
||||
continue
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
buffer = getattr(stream, "buffer", None)
|
||||
if buffer is not None:
|
||||
setattr(
|
||||
sys,
|
||||
stream_name,
|
||||
io.TextIOWrapper(buffer, encoding="utf-8", errors="backslashreplace"),
|
||||
)
|
||||
|
||||
|
||||
def _normalize_yaml_value(value: Any) -> Any:
|
||||
if isinstance(value, Mapping):
|
||||
return {key: _normalize_yaml_value(val) for key, val in value.items()}
|
||||
if isinstance(value, list):
|
||||
return [_normalize_yaml_value(item) for item in value]
|
||||
if isinstance(value, (date, datetime)):
|
||||
return value.isoformat()
|
||||
return value
|
||||
|
||||
|
||||
def parse_frontmatter(content: str) -> dict[str, Any]:
|
||||
match = re.search(r"^---\s*\n(.*?)\n---", content, re.DOTALL)
|
||||
if not match:
|
||||
return {}
|
||||
|
||||
try:
|
||||
parsed = yaml.safe_load(match.group(1)) or {}
|
||||
except yaml.YAMLError:
|
||||
return {}
|
||||
|
||||
if not isinstance(parsed, Mapping):
|
||||
return {}
|
||||
|
||||
return dict(_normalize_yaml_value(parsed))
|
||||
|
||||
|
||||
def _iter_skill_dirs(skills_root: Path) -> list[Path]:
|
||||
skill_dirs: list[Path] = []
|
||||
|
||||
for root, dirs, files in os.walk(skills_root):
|
||||
dirs[:] = [directory for directory in dirs if not directory.startswith(".")]
|
||||
if "SKILL.md" in files:
|
||||
skill_dirs.append(Path(root))
|
||||
|
||||
return sorted(skill_dirs)
|
||||
|
||||
|
||||
def _skill_id_from_dir(skills_root: Path, skill_dir: Path) -> str:
|
||||
return str(skill_dir.relative_to(skills_root)).replace(os.sep, "/")
|
||||
|
||||
|
||||
def _skill_repo_path(skills_root: Path, skill_dir: Path) -> str:
|
||||
parent = skills_root.parent
|
||||
return str(skill_dir.relative_to(parent)).replace(os.sep, "/")
|
||||
|
||||
|
||||
def _runtime_dependency_files(skill_dir: Path) -> list[str]:
|
||||
return sorted(
|
||||
file_name
|
||||
for file_name in SKILL_RUNTIME_FILES
|
||||
if (skill_dir / file_name).is_file()
|
||||
)
|
||||
|
||||
|
||||
def _local_link_reasons(content: str, skill_dir: Path) -> set[str]:
|
||||
reasons: set[str] = set()
|
||||
|
||||
for link in LOCAL_LINK_RE.findall(content):
|
||||
link_clean = link.split("#", 1)[0].strip()
|
||||
if not link_clean:
|
||||
continue
|
||||
if link_clean.startswith(("http://", "https://", "mailto:", "<", ">")):
|
||||
continue
|
||||
if os.path.isabs(link_clean):
|
||||
continue
|
||||
|
||||
target_path = (skill_dir / link_clean).resolve(strict=False)
|
||||
if not target_path.exists():
|
||||
reasons.add("broken_local_reference")
|
||||
|
||||
return reasons
|
||||
|
||||
|
||||
def _setup_from_metadata(metadata: dict[str, Any]) -> dict[str, Any]:
|
||||
plugin_data = metadata.get("plugin")
|
||||
if not isinstance(plugin_data, Mapping):
|
||||
return {
|
||||
"type": "none",
|
||||
"summary": "",
|
||||
"docs": None,
|
||||
}
|
||||
|
||||
setup = plugin_data.get("setup")
|
||||
if not isinstance(setup, Mapping):
|
||||
return {
|
||||
"type": "none",
|
||||
"summary": "",
|
||||
"docs": None,
|
||||
}
|
||||
|
||||
setup_type = str(setup.get("type", "none")).strip().lower() or "none"
|
||||
if setup_type not in {"none", "manual"}:
|
||||
setup_type = "none"
|
||||
|
||||
summary = str(setup.get("summary", "")).strip()
|
||||
docs = setup.get("docs")
|
||||
docs_value = str(docs).strip() if isinstance(docs, str) and docs.strip() else None
|
||||
|
||||
return {
|
||||
"type": setup_type,
|
||||
"summary": summary,
|
||||
"docs": docs_value,
|
||||
}
|
||||
|
||||
|
||||
def _explicit_target_restrictions(metadata: dict[str, Any]) -> dict[str, str | None]:
|
||||
restrictions = {target: None for target in SUPPORTED_TARGETS}
|
||||
plugin_data = metadata.get("plugin")
|
||||
if not isinstance(plugin_data, Mapping):
|
||||
return restrictions
|
||||
|
||||
targets = plugin_data.get("targets")
|
||||
if not isinstance(targets, Mapping):
|
||||
return restrictions
|
||||
|
||||
for target in SUPPORTED_TARGETS:
|
||||
value = targets.get(target)
|
||||
if value is None:
|
||||
continue
|
||||
state = str(value).strip().lower()
|
||||
if state in SUPPORTED_PLUGIN_STATES:
|
||||
restrictions[target] = state
|
||||
|
||||
return restrictions
|
||||
|
||||
|
||||
def _setup_is_valid(setup: dict[str, Any], skill_dir: Path) -> bool:
|
||||
if setup["type"] != "manual":
|
||||
return False
|
||||
|
||||
if not setup["summary"]:
|
||||
return False
|
||||
|
||||
docs = setup.get("docs")
|
||||
if not docs:
|
||||
return False
|
||||
|
||||
docs_path = (skill_dir / docs).resolve(strict=False)
|
||||
try:
|
||||
docs_path.relative_to(skill_dir.resolve())
|
||||
except ValueError:
|
||||
return False
|
||||
|
||||
return docs_path.is_file()
|
||||
|
||||
|
||||
def _initial_target_reasons() -> dict[str, set[str]]:
|
||||
return {target: set() for target in SUPPORTED_TARGETS}
|
||||
|
||||
|
||||
def analyze_skill(skill_dir: Path, skills_root: Path) -> dict[str, Any]:
|
||||
content = (skill_dir / "SKILL.md").read_text(encoding="utf-8")
|
||||
metadata = parse_frontmatter(content)
|
||||
setup = _setup_from_metadata(metadata)
|
||||
restrictions = _explicit_target_restrictions(metadata)
|
||||
target_reasons = _initial_target_reasons()
|
||||
|
||||
if ABSOLUTE_HOST_PATH_RE.search(content):
|
||||
for target in SUPPORTED_TARGETS:
|
||||
target_reasons[target].add("absolute_host_path")
|
||||
|
||||
local_link_reasons = _local_link_reasons(content, skill_dir)
|
||||
for reason in local_link_reasons:
|
||||
for target in SUPPORTED_TARGETS:
|
||||
target_reasons[target].add(reason)
|
||||
|
||||
for agent_name, pattern in AGENT_HOME_PATTERNS.items():
|
||||
if not pattern.search(content):
|
||||
continue
|
||||
|
||||
if agent_name == "claude":
|
||||
target_reasons["codex"].add("target_specific_home_path")
|
||||
elif agent_name == "codex":
|
||||
target_reasons["claude"].add("target_specific_home_path")
|
||||
else:
|
||||
for target in SUPPORTED_TARGETS:
|
||||
target_reasons[target].add("target_specific_home_path")
|
||||
|
||||
runtime_files = _runtime_dependency_files(skill_dir)
|
||||
if runtime_files and not _setup_is_valid(setup, skill_dir):
|
||||
for target in SUPPORTED_TARGETS:
|
||||
target_reasons[target].add("undeclared_runtime_dependency")
|
||||
|
||||
for target, explicit_state in restrictions.items():
|
||||
if explicit_state == "blocked":
|
||||
target_reasons[target].add("explicit_target_restriction")
|
||||
|
||||
statuses = {
|
||||
target: "blocked" if target_reasons[target] else "supported"
|
||||
for target in SUPPORTED_TARGETS
|
||||
}
|
||||
|
||||
union_reasons = sorted({reason for reasons in target_reasons.values() for reason in reasons})
|
||||
|
||||
return {
|
||||
"id": _skill_id_from_dir(skills_root, skill_dir),
|
||||
"path": _skill_repo_path(skills_root, skill_dir),
|
||||
"targets": statuses,
|
||||
"setup": setup,
|
||||
"reasons": union_reasons,
|
||||
"blocked_reasons": {
|
||||
target: sorted(target_reasons[target])
|
||||
for target in SUPPORTED_TARGETS
|
||||
},
|
||||
"runtime_files": runtime_files,
|
||||
}
|
||||
|
||||
|
||||
def build_report(skills_root: Path) -> dict[str, Any]:
|
||||
skills_root = Path(skills_root)
|
||||
skills = [analyze_skill(skill_dir, skills_root) for skill_dir in _iter_skill_dirs(skills_root)]
|
||||
|
||||
summary = {
|
||||
"total_skills": len(skills),
|
||||
"supported": {
|
||||
target: sum(1 for skill in skills if skill["targets"][target] == "supported")
|
||||
for target in SUPPORTED_TARGETS
|
||||
},
|
||||
"blocked": {
|
||||
target: sum(1 for skill in skills if skill["targets"][target] == "blocked")
|
||||
for target in SUPPORTED_TARGETS
|
||||
},
|
||||
"manual_setup": sum(1 for skill in skills if skill["setup"]["type"] == "manual"),
|
||||
}
|
||||
|
||||
return {
|
||||
"skills": skills,
|
||||
"summary": summary,
|
||||
}
|
||||
|
||||
|
||||
def sync_plugin_compatibility(root: Path) -> dict[str, Any]:
|
||||
root = Path(root)
|
||||
skills_root = root / "skills"
|
||||
report = build_report(skills_root)
|
||||
output_path = root / PLUGIN_COMPATIBILITY_PATH
|
||||
output_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
output_path.write_text(json.dumps(report, indent=2, ensure_ascii=False) + "\n", encoding="utf-8")
|
||||
return report
|
||||
|
||||
|
||||
def load_plugin_compatibility(root: Path) -> dict[str, Any]:
|
||||
root = Path(root)
|
||||
path = root / PLUGIN_COMPATIBILITY_PATH
|
||||
if path.is_file():
|
||||
return json.loads(path.read_text(encoding="utf-8"))
|
||||
return sync_plugin_compatibility(root)
|
||||
|
||||
|
||||
def compatibility_by_skill_id(report: dict[str, Any]) -> dict[str, dict[str, Any]]:
|
||||
return {skill["id"]: skill for skill in report.get("skills", [])}
|
||||
|
||||
|
||||
def compatibility_by_path(report: dict[str, Any]) -> dict[str, dict[str, Any]]:
|
||||
return {skill["path"]: skill for skill in report.get("skills", [])}
|
||||
|
||||
|
||||
def parse_args() -> argparse.Namespace:
|
||||
parser = argparse.ArgumentParser(description="Generate plugin compatibility status for skills.")
|
||||
parser.add_argument(
|
||||
"--check",
|
||||
action="store_true",
|
||||
help="Validate that data/plugin-compatibility.json is in sync without writing files.",
|
||||
)
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
def main() -> int:
|
||||
args = parse_args()
|
||||
root = find_repo_root(__file__)
|
||||
expected_report = build_report(root / "skills")
|
||||
report_path = root / PLUGIN_COMPATIBILITY_PATH
|
||||
|
||||
if args.check:
|
||||
if not report_path.is_file():
|
||||
raise SystemExit("data/plugin-compatibility.json is missing")
|
||||
|
||||
current_report = json.loads(report_path.read_text(encoding="utf-8"))
|
||||
if current_report != expected_report:
|
||||
raise SystemExit("data/plugin-compatibility.json is out of sync")
|
||||
print("✅ Plugin compatibility report is in sync.")
|
||||
return 0
|
||||
|
||||
report_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
report_path.write_text(json.dumps(expected_report, indent=2, ensure_ascii=False) + "\n", encoding="utf-8")
|
||||
print("✅ Plugin compatibility report synced.")
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
configure_utf8_output()
|
||||
raise SystemExit(main())
|
||||
@@ -9,6 +9,8 @@ from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
from _project_paths import find_repo_root
|
||||
from plugin_compatibility import build_report as build_plugin_compatibility_report
|
||||
from plugin_compatibility import compatibility_by_skill_id, sync_plugin_compatibility
|
||||
from update_readme import configure_utf8_output, load_metadata
|
||||
|
||||
|
||||
@@ -22,12 +24,14 @@ AUTHOR = {
|
||||
}
|
||||
ROOT_CLAUDE_PLUGIN_NAME = "antigravity-awesome-skills"
|
||||
ROOT_CODEX_PLUGIN_NAME = "antigravity-awesome-skills"
|
||||
ROOT_CLAUDE_PLUGIN_DIRNAME = "antigravity-awesome-skills-claude"
|
||||
EDITORIAL_BUNDLES_PATH = Path("data") / "editorial-bundles.json"
|
||||
EDITORIAL_TEMPLATE_PATH = Path("tools") / "templates" / "editorial-bundles.md.tmpl"
|
||||
CLAUDE_MARKETPLACE_PATH = Path(".claude-plugin") / "marketplace.json"
|
||||
CLAUDE_PLUGIN_PATH = Path(".claude-plugin") / "plugin.json"
|
||||
CODEX_MARKETPLACE_PATH = Path(".agents") / "plugins" / "marketplace.json"
|
||||
CODEX_ROOT_PLUGIN_PATH = Path("plugins") / ROOT_CODEX_PLUGIN_NAME / ".codex-plugin" / "plugin.json"
|
||||
CLAUDE_ROOT_PLUGIN_PATH = Path("plugins") / ROOT_CLAUDE_PLUGIN_DIRNAME / ".claude-plugin" / "plugin.json"
|
||||
ACRONYM_TOKENS = {
|
||||
"ab": "A/B",
|
||||
"adb": "ADB",
|
||||
@@ -164,6 +168,10 @@ def _bundle_codex_long_description(bundle: dict[str, Any]) -> str:
|
||||
return f"{audience} Covers {' and '.join(highlights)}."
|
||||
|
||||
|
||||
def _format_count_label(count: int) -> str:
|
||||
return f"{count:,}"
|
||||
|
||||
|
||||
def _validate_bundle_skill_id(skill_id: str) -> None:
|
||||
if not SAFE_SKILL_ID_RE.fullmatch(skill_id):
|
||||
raise ValueError(f"Invalid skill id in editorial bundles manifest: {skill_id!r}")
|
||||
@@ -225,7 +233,28 @@ def _validate_editorial_bundles(root: Path, payload: dict[str, Any]) -> list[dic
|
||||
return bundles
|
||||
|
||||
|
||||
def _render_bundle_sections(bundles: list[dict[str, Any]]) -> str:
|
||||
def _bundle_target_status(bundle: dict[str, Any], compatibility: dict[str, dict[str, Any]]) -> dict[str, Any]:
|
||||
bundle_skills = [compatibility[skill["id"]] for skill in bundle["skills"] if skill["id"] in compatibility]
|
||||
return {
|
||||
"codex": bool(bundle_skills) and all(skill["targets"]["codex"] == "supported" for skill in bundle_skills),
|
||||
"claude": bool(bundle_skills) and all(skill["targets"]["claude"] == "supported" for skill in bundle_skills),
|
||||
"manual_setup": any(skill["setup"]["type"] == "manual" for skill in bundle_skills),
|
||||
}
|
||||
|
||||
|
||||
def _render_bundle_plugin_status(bundle_status: dict[str, Any]) -> str:
|
||||
codex_status = "Codex plugin-safe" if bundle_status["codex"] else "Codex pending hardening"
|
||||
claude_status = "Claude plugin-safe" if bundle_status["claude"] else "Claude pending hardening"
|
||||
parts = [codex_status, claude_status]
|
||||
if bundle_status["manual_setup"]:
|
||||
parts.append("Requires manual setup")
|
||||
return " · ".join(parts)
|
||||
|
||||
|
||||
def _render_bundle_sections(
|
||||
bundles: list[dict[str, Any]],
|
||||
compatibility: dict[str, dict[str, Any]],
|
||||
) -> str:
|
||||
lines: list[str] = []
|
||||
current_group: str | None = None
|
||||
|
||||
@@ -238,23 +267,34 @@ def _render_bundle_sections(bundles: list[dict[str, Any]]) -> str:
|
||||
lines.append("")
|
||||
current_group = group
|
||||
|
||||
bundle_status = _bundle_target_status(bundle, compatibility)
|
||||
lines.append(f'### {bundle["emoji"]} {bundle["tagline"]}')
|
||||
lines.append("")
|
||||
lines.append(f'_{bundle["audience"]}_')
|
||||
lines.append("")
|
||||
lines.append(f'**Plugin status:** {_render_bundle_plugin_status(bundle_status)}')
|
||||
lines.append("")
|
||||
for skill in bundle["skills"]:
|
||||
skill_status = compatibility.get(skill["id"], {})
|
||||
plugin_info = skill_status.get("setup", {}) if isinstance(skill_status, dict) else {}
|
||||
suffix = " _(manual setup)_" if plugin_info.get("type") == "manual" else ""
|
||||
lines.append(
|
||||
f'- [`{skill["id"]}`](../../skills/{skill["id"]}/): {skill["summary"]}'
|
||||
f'- [`{skill["id"]}`](../../skills/{skill["id"]}/): {skill["summary"]}{suffix}'
|
||||
)
|
||||
lines.append("")
|
||||
|
||||
return "\n".join(lines).strip() + "\n"
|
||||
|
||||
|
||||
def render_bundles_doc(root: Path, metadata: dict[str, Any], bundles: list[dict[str, Any]]) -> str:
|
||||
def render_bundles_doc(
|
||||
root: Path,
|
||||
metadata: dict[str, Any],
|
||||
bundles: list[dict[str, Any]],
|
||||
compatibility: dict[str, dict[str, Any]],
|
||||
) -> str:
|
||||
template = (root / EDITORIAL_TEMPLATE_PATH).read_text(encoding="utf-8")
|
||||
return (
|
||||
template.replace("{{bundle_sections}}", _render_bundle_sections(bundles).rstrip())
|
||||
template.replace("{{bundle_sections}}", _render_bundle_sections(bundles, compatibility).rstrip())
|
||||
.replace("{{total_skills_label}}", metadata["total_skills_label"])
|
||||
.replace("{{bundle_count}}", str(len(bundles)))
|
||||
)
|
||||
@@ -292,14 +332,14 @@ def _copy_skill_directory(root: Path, skill_id: str, destination_root: Path) ->
|
||||
raise ValueError(f"Copied bundle skill '{skill_id}' is missing SKILL.md in {skill_dest}")
|
||||
|
||||
|
||||
def _root_claude_plugin_manifest(metadata: dict[str, Any]) -> dict[str, Any]:
|
||||
def _root_claude_plugin_manifest(metadata: dict[str, Any], supported_skill_count: int) -> dict[str, Any]:
|
||||
supported_label = _format_count_label(supported_skill_count)
|
||||
return {
|
||||
"name": ROOT_CLAUDE_PLUGIN_NAME,
|
||||
"version": metadata["version"],
|
||||
"description": (
|
||||
f"Universal agentic skill library for Claude Code with "
|
||||
f"{metadata['total_skills_label']} reusable skills across coding, security, "
|
||||
"design, product, and operations workflows."
|
||||
f"Plugin-safe Claude Code distribution of Antigravity Awesome Skills with "
|
||||
f"{supported_label} supported skills."
|
||||
),
|
||||
"author": AUTHOR,
|
||||
"homepage": REPO_URL,
|
||||
@@ -309,17 +349,18 @@ def _root_claude_plugin_manifest(metadata: dict[str, Any]) -> dict[str, Any]:
|
||||
"claude-code",
|
||||
"skills",
|
||||
"agentic-skills",
|
||||
"ai-coding",
|
||||
"plugin-safe",
|
||||
"productivity",
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
def _root_codex_plugin_manifest(metadata: dict[str, Any]) -> dict[str, Any]:
|
||||
def _root_codex_plugin_manifest(metadata: dict[str, Any], supported_skill_count: int) -> dict[str, Any]:
|
||||
supported_label = _format_count_label(supported_skill_count)
|
||||
return {
|
||||
"name": ROOT_CODEX_PLUGIN_NAME,
|
||||
"version": metadata["version"],
|
||||
"description": "Repository-backed Codex plugin for the Antigravity Awesome Skills library.",
|
||||
"description": "Plugin-safe Codex plugin for the Antigravity Awesome Skills library.",
|
||||
"author": AUTHOR,
|
||||
"homepage": REPO_URL,
|
||||
"repository": REPO_URL,
|
||||
@@ -329,18 +370,18 @@ def _root_codex_plugin_manifest(metadata: dict[str, Any]) -> dict[str, Any]:
|
||||
"skills",
|
||||
"agentic-skills",
|
||||
"developer-tools",
|
||||
"productivity",
|
||||
"plugin-safe",
|
||||
],
|
||||
"skills": "./skills/",
|
||||
"interface": {
|
||||
"displayName": "Antigravity Awesome Skills",
|
||||
"shortDescription": (
|
||||
f'{metadata["total_skills_label"]} reusable skills for coding, security, '
|
||||
"product, and ops workflows."
|
||||
f"{supported_label} plugin-safe skills for coding, security, product, and ops workflows."
|
||||
),
|
||||
"longDescription": (
|
||||
"Install the Antigravity Awesome Skills catalog as a Codex plugin and expose "
|
||||
"the repository's curated skills library through a single marketplace entry."
|
||||
"Install a plugin-safe Codex distribution of Antigravity Awesome Skills. "
|
||||
"Skills that still need hardening or target-specific setup remain available in the repo "
|
||||
"but are excluded from this plugin."
|
||||
),
|
||||
"developerName": AUTHOR["name"],
|
||||
"category": "Productivity",
|
||||
@@ -412,46 +453,6 @@ def _bundle_codex_plugin_manifest(metadata: dict[str, Any], bundle: dict[str, An
|
||||
}
|
||||
|
||||
|
||||
def _root_codex_plugin_manifest(metadata: dict[str, Any]) -> dict[str, Any]:
|
||||
return {
|
||||
"name": ROOT_CODEX_PLUGIN_NAME,
|
||||
"version": metadata["version"],
|
||||
"description": "Repository-backed Codex plugin for the Antigravity Awesome Skills library.",
|
||||
"author": AUTHOR,
|
||||
"homepage": REPO_URL,
|
||||
"repository": REPO_URL,
|
||||
"license": "MIT",
|
||||
"keywords": [
|
||||
"codex",
|
||||
"skills",
|
||||
"agentic-skills",
|
||||
"developer-tools",
|
||||
"productivity",
|
||||
],
|
||||
"skills": "./skills/",
|
||||
"interface": {
|
||||
"displayName": "Antigravity Awesome Skills",
|
||||
"shortDescription": (
|
||||
f'{metadata["total_skills_label"]} reusable skills for coding, security, product, and ops workflows.'
|
||||
),
|
||||
"longDescription": (
|
||||
"Install the Antigravity Awesome Skills catalog as a Codex plugin and expose "
|
||||
"the repository's curated skills library through a single marketplace entry."
|
||||
),
|
||||
"developerName": AUTHOR["name"],
|
||||
"category": "Productivity",
|
||||
"capabilities": ["Interactive", "Write"],
|
||||
"websiteURL": REPO_URL,
|
||||
"defaultPrompt": [
|
||||
"Use @brainstorming to plan a new feature.",
|
||||
"Use @test-driven-development to fix a bug safely.",
|
||||
"Use @lint-and-validate to verify this branch.",
|
||||
],
|
||||
"brandColor": "#111827",
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
def _bundle_claude_marketplace_entry(metadata: dict[str, Any], bundle: dict[str, Any]) -> dict[str, Any]:
|
||||
plugin_name = _bundle_plugin_name(bundle["id"])
|
||||
return {
|
||||
@@ -475,14 +476,18 @@ def _bundle_claude_marketplace_entry(metadata: dict[str, Any], bundle: dict[str,
|
||||
}
|
||||
|
||||
|
||||
def _render_claude_marketplace(metadata: dict[str, Any], bundles: list[dict[str, Any]]) -> dict[str, Any]:
|
||||
def _render_claude_marketplace(
|
||||
metadata: dict[str, Any],
|
||||
bundles: list[dict[str, Any]],
|
||||
bundle_support: dict[str, dict[str, Any]],
|
||||
) -> dict[str, Any]:
|
||||
plugins = [
|
||||
{
|
||||
"name": ROOT_CLAUDE_PLUGIN_NAME,
|
||||
"version": metadata["version"],
|
||||
"description": (
|
||||
"Expose the full repository `skills/` tree to Claude Code through a "
|
||||
"single marketplace entry."
|
||||
"Expose the plugin-safe Claude Code subset of Antigravity Awesome Skills "
|
||||
"through a single marketplace entry."
|
||||
),
|
||||
"author": AUTHOR,
|
||||
"homepage": REPO_URL,
|
||||
@@ -495,17 +500,21 @@ def _render_claude_marketplace(metadata: dict[str, Any], bundles: list[dict[str,
|
||||
"plugin",
|
||||
"marketplace",
|
||||
],
|
||||
"source": "./",
|
||||
"source": f"./plugins/{ROOT_CLAUDE_PLUGIN_DIRNAME}",
|
||||
}
|
||||
]
|
||||
plugins.extend(_bundle_claude_marketplace_entry(metadata, bundle) for bundle in bundles)
|
||||
plugins.extend(
|
||||
_bundle_claude_marketplace_entry(metadata, bundle)
|
||||
for bundle in bundles
|
||||
if bundle_support[bundle["id"]]["claude"]
|
||||
)
|
||||
return {
|
||||
"name": ROOT_CLAUDE_PLUGIN_NAME,
|
||||
"owner": AUTHOR,
|
||||
"metadata": {
|
||||
"description": (
|
||||
"Claude Code marketplace entries for the full Antigravity Awesome Skills "
|
||||
"library and its editorial bundles."
|
||||
"Claude Code marketplace entries for the plugin-safe Antigravity Awesome Skills "
|
||||
"library and its compatible editorial bundles."
|
||||
),
|
||||
"version": metadata["version"],
|
||||
},
|
||||
@@ -513,7 +522,10 @@ def _render_claude_marketplace(metadata: dict[str, Any], bundles: list[dict[str,
|
||||
}
|
||||
|
||||
|
||||
def _render_codex_marketplace(bundles: list[dict[str, Any]]) -> dict[str, Any]:
|
||||
def _render_codex_marketplace(
|
||||
bundles: list[dict[str, Any]],
|
||||
bundle_support: dict[str, dict[str, Any]],
|
||||
) -> dict[str, Any]:
|
||||
plugins: list[dict[str, Any]] = [
|
||||
{
|
||||
"name": ROOT_CODEX_PLUGIN_NAME,
|
||||
@@ -530,6 +542,8 @@ def _render_codex_marketplace(bundles: list[dict[str, Any]]) -> dict[str, Any]:
|
||||
]
|
||||
|
||||
for bundle in bundles:
|
||||
if not bundle_support[bundle["id"]]["codex"]:
|
||||
continue
|
||||
plugins.append(
|
||||
{
|
||||
"name": _bundle_plugin_name(bundle["id"]),
|
||||
@@ -554,7 +568,63 @@ def _render_codex_marketplace(bundles: list[dict[str, Any]]) -> dict[str, Any]:
|
||||
}
|
||||
|
||||
|
||||
def _sync_bundle_plugin_directory(root: Path, metadata: dict[str, Any], bundle: dict[str, Any]) -> None:
|
||||
def _materialize_plugin_skills(root: Path, destination_root: Path, skill_ids: list[str]) -> None:
|
||||
if destination_root.is_symlink() or destination_root.is_file():
|
||||
destination_root.unlink()
|
||||
elif destination_root.exists():
|
||||
shutil.rmtree(destination_root)
|
||||
destination_root.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
for skill_id in skill_ids:
|
||||
_copy_skill_directory(root, skill_id, destination_root)
|
||||
|
||||
|
||||
def _supported_skill_ids(
|
||||
compatibility: dict[str, dict[str, Any]],
|
||||
target: str,
|
||||
) -> list[str]:
|
||||
return sorted(
|
||||
skill_id
|
||||
for skill_id, skill in compatibility.items()
|
||||
if skill["targets"][target] == "supported"
|
||||
)
|
||||
|
||||
|
||||
def _sync_root_plugins(
|
||||
root: Path,
|
||||
metadata: dict[str, Any],
|
||||
compatibility: dict[str, dict[str, Any]],
|
||||
) -> None:
|
||||
codex_skill_ids = _supported_skill_ids(compatibility, "codex")
|
||||
claude_skill_ids = _supported_skill_ids(compatibility, "claude")
|
||||
|
||||
codex_root = root / "plugins" / ROOT_CODEX_PLUGIN_NAME
|
||||
claude_root = root / "plugins" / ROOT_CLAUDE_PLUGIN_DIRNAME
|
||||
|
||||
_materialize_plugin_skills(root, codex_root / "skills", codex_skill_ids)
|
||||
_materialize_plugin_skills(root, claude_root / "skills", claude_skill_ids)
|
||||
|
||||
_write_json(
|
||||
codex_root / ".codex-plugin" / "plugin.json",
|
||||
_root_codex_plugin_manifest(metadata, len(codex_skill_ids)),
|
||||
)
|
||||
claude_manifest = _root_claude_plugin_manifest(metadata, len(claude_skill_ids))
|
||||
_write_json(
|
||||
claude_root / ".claude-plugin" / "plugin.json",
|
||||
claude_manifest,
|
||||
)
|
||||
_write_json(root / CLAUDE_PLUGIN_PATH, claude_manifest)
|
||||
|
||||
|
||||
def _sync_bundle_plugin_directory(
|
||||
root: Path,
|
||||
metadata: dict[str, Any],
|
||||
bundle: dict[str, Any],
|
||||
support: dict[str, Any],
|
||||
) -> None:
|
||||
if not support["codex"] and not support["claude"]:
|
||||
return
|
||||
|
||||
plugin_name = _bundle_plugin_name(bundle["id"])
|
||||
plugin_root = root / "plugins" / plugin_name
|
||||
if plugin_root.exists():
|
||||
@@ -566,24 +636,31 @@ def _sync_bundle_plugin_directory(root: Path, metadata: dict[str, Any], bundle:
|
||||
for skill in bundle["skills"]:
|
||||
_copy_skill_directory(root, skill["id"], bundle_skills_root)
|
||||
|
||||
_write_json(
|
||||
plugin_root / ".claude-plugin" / "plugin.json",
|
||||
_bundle_claude_plugin_manifest(metadata, bundle),
|
||||
)
|
||||
_write_json(
|
||||
plugin_root / ".codex-plugin" / "plugin.json",
|
||||
_bundle_codex_plugin_manifest(metadata, bundle),
|
||||
)
|
||||
if support["claude"]:
|
||||
_write_json(
|
||||
plugin_root / ".claude-plugin" / "plugin.json",
|
||||
_bundle_claude_plugin_manifest(metadata, bundle),
|
||||
)
|
||||
if support["codex"]:
|
||||
_write_json(
|
||||
plugin_root / ".codex-plugin" / "plugin.json",
|
||||
_bundle_codex_plugin_manifest(metadata, bundle),
|
||||
)
|
||||
|
||||
|
||||
def sync_editorial_bundle_plugins(root: Path, metadata: dict[str, Any], bundles: list[dict[str, Any]]) -> None:
|
||||
def sync_editorial_bundle_plugins(
|
||||
root: Path,
|
||||
metadata: dict[str, Any],
|
||||
bundles: list[dict[str, Any]],
|
||||
bundle_support: dict[str, dict[str, Any]],
|
||||
) -> None:
|
||||
plugins_root = root / "plugins"
|
||||
for candidate in plugins_root.glob("antigravity-bundle-*"):
|
||||
if candidate.is_dir():
|
||||
shutil.rmtree(candidate)
|
||||
|
||||
for bundle in bundles:
|
||||
_sync_bundle_plugin_directory(root, metadata, bundle)
|
||||
_sync_bundle_plugin_directory(root, metadata, bundle, bundle_support[bundle["id"]])
|
||||
|
||||
|
||||
def load_editorial_bundles(root: Path) -> list[dict[str, Any]]:
|
||||
@@ -594,14 +671,35 @@ def load_editorial_bundles(root: Path) -> list[dict[str, Any]]:
|
||||
|
||||
def sync_editorial_bundles(root: Path) -> None:
|
||||
metadata = load_metadata(str(root))
|
||||
compatibility_report = sync_plugin_compatibility(root)
|
||||
compatibility = compatibility_by_skill_id(compatibility_report)
|
||||
bundles = load_editorial_bundles(root)
|
||||
bundle_support = {
|
||||
bundle["id"]: _bundle_target_status(bundle, compatibility)
|
||||
for bundle in bundles
|
||||
}
|
||||
|
||||
_write_text(root / "docs" / "users" / "bundles.md", render_bundles_doc(root, metadata, bundles))
|
||||
_write_json(root / CLAUDE_PLUGIN_PATH, _root_claude_plugin_manifest(metadata))
|
||||
_write_json(root / CLAUDE_MARKETPLACE_PATH, _render_claude_marketplace(metadata, bundles))
|
||||
_write_json(root / CODEX_ROOT_PLUGIN_PATH, _root_codex_plugin_manifest(metadata))
|
||||
_write_json(root / CODEX_MARKETPLACE_PATH, _render_codex_marketplace(bundles))
|
||||
sync_editorial_bundle_plugins(root, metadata, bundles)
|
||||
_write_text(
|
||||
root / "docs" / "users" / "bundles.md",
|
||||
render_bundles_doc(root, metadata, bundles, compatibility),
|
||||
)
|
||||
_sync_root_plugins(root, metadata, compatibility)
|
||||
_write_json(
|
||||
root / CLAUDE_MARKETPLACE_PATH,
|
||||
_render_claude_marketplace(metadata, bundles, bundle_support),
|
||||
)
|
||||
_write_json(
|
||||
root / CODEX_MARKETPLACE_PATH,
|
||||
_render_codex_marketplace(bundles, bundle_support),
|
||||
)
|
||||
_write_json(
|
||||
root / CODEX_ROOT_PLUGIN_PATH,
|
||||
_root_codex_plugin_manifest(
|
||||
metadata,
|
||||
len(_supported_skill_ids(compatibility, "codex")),
|
||||
),
|
||||
)
|
||||
sync_editorial_bundle_plugins(root, metadata, bundles, bundle_support)
|
||||
|
||||
|
||||
def parse_args() -> argparse.Namespace:
|
||||
@@ -619,8 +717,10 @@ def main() -> int:
|
||||
root = find_repo_root(__file__)
|
||||
if args.check:
|
||||
metadata = load_metadata(str(root))
|
||||
compatibility_report = build_plugin_compatibility_report(root / "skills")
|
||||
compatibility = compatibility_by_skill_id(compatibility_report)
|
||||
bundles = load_editorial_bundles(root)
|
||||
expected_doc = render_bundles_doc(root, metadata, bundles)
|
||||
expected_doc = render_bundles_doc(root, metadata, bundles, compatibility)
|
||||
current_doc = (root / "docs" / "users" / "bundles.md").read_text(encoding="utf-8")
|
||||
if current_doc != expected_doc:
|
||||
raise SystemExit("docs/users/bundles.md is out of sync with data/editorial-bundles.json")
|
||||
|
||||
@@ -7,6 +7,7 @@ import subprocess
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
from plugin_compatibility import compatibility_by_skill_id, load_plugin_compatibility
|
||||
from sync_editorial_bundles import load_editorial_bundles, render_bundles_doc
|
||||
from update_readme import configure_utf8_output, find_repo_root, load_metadata, update_readme
|
||||
|
||||
@@ -188,7 +189,8 @@ def sync_bundles_doc(content: str, metadata: dict, base_dir: str | Path | None =
|
||||
template_path = root / "tools" / "templates" / "editorial-bundles.md.tmpl"
|
||||
if manifest_path.is_file() and template_path.is_file():
|
||||
bundles = load_editorial_bundles(root)
|
||||
return render_bundles_doc(root, metadata, bundles)
|
||||
compatibility = compatibility_by_skill_id(load_plugin_compatibility(root))
|
||||
return render_bundles_doc(root, metadata, bundles, compatibility)
|
||||
|
||||
bundle_count = count_documented_bundles(content)
|
||||
if bundle_count == 0:
|
||||
|
||||
@@ -6,8 +6,11 @@ const { findProjectRoot } = require("../../lib/project-root");
|
||||
const projectRoot = findProjectRoot(__dirname);
|
||||
const marketplacePath = path.join(projectRoot, ".claude-plugin", "marketplace.json");
|
||||
const editorialBundlesPath = path.join(projectRoot, "data", "editorial-bundles.json");
|
||||
const compatibilityPath = path.join(projectRoot, "data", "plugin-compatibility.json");
|
||||
const marketplace = JSON.parse(fs.readFileSync(marketplacePath, "utf8"));
|
||||
const editorialBundles = JSON.parse(fs.readFileSync(editorialBundlesPath, "utf8")).bundles || [];
|
||||
const compatibility = JSON.parse(fs.readFileSync(compatibilityPath, "utf8")).skills || [];
|
||||
const compatibilityById = new Map(compatibility.map((skill) => [skill.id, skill]));
|
||||
|
||||
assert.ok(Array.isArray(marketplace.plugins), "marketplace.json must define a plugins array");
|
||||
assert.ok(marketplace.plugins.length > 0, "marketplace.json must contain at least one plugin");
|
||||
@@ -16,8 +19,15 @@ assert.strictEqual(
|
||||
"antigravity-awesome-skills",
|
||||
"full library Claude plugin should remain the first marketplace entry",
|
||||
);
|
||||
assert.strictEqual(
|
||||
marketplace.plugins[0]?.source,
|
||||
"./plugins/antigravity-awesome-skills-claude",
|
||||
"full library Claude plugin should resolve to the filtered plugin directory",
|
||||
);
|
||||
|
||||
const expectedBundlePluginNames = editorialBundles.map((bundle) => `antigravity-bundle-${bundle.id}`);
|
||||
const expectedBundlePluginNames = editorialBundles
|
||||
.filter((bundle) => bundle.skills.every((skill) => compatibilityById.get(skill.id)?.targets?.claude === "supported"))
|
||||
.map((bundle) => `antigravity-bundle-${bundle.id}`);
|
||||
for (const pluginName of expectedBundlePluginNames) {
|
||||
assert.ok(
|
||||
marketplace.plugins.some((plugin) => plugin.name === pluginName),
|
||||
@@ -25,6 +35,29 @@ for (const pluginName of expectedBundlePluginNames) {
|
||||
);
|
||||
}
|
||||
|
||||
for (const bundle of editorialBundles) {
|
||||
const pluginName = `antigravity-bundle-${bundle.id}`;
|
||||
const included = marketplace.plugins.some((plugin) => plugin.name === pluginName);
|
||||
const claudeSupported = bundle.skills.every(
|
||||
(skill) => compatibilityById.get(skill.id)?.targets?.claude === "supported",
|
||||
);
|
||||
assert.strictEqual(
|
||||
included,
|
||||
claudeSupported,
|
||||
`bundle plugin ${pluginName} inclusion should match Claude compatibility`,
|
||||
);
|
||||
}
|
||||
|
||||
const pluginRoot = path.join(projectRoot, "plugins", "antigravity-awesome-skills-claude", "skills");
|
||||
for (const skill of compatibility) {
|
||||
const copiedPath = path.join(pluginRoot, ...skill.id.split("/"));
|
||||
if (skill.targets.claude === "supported") {
|
||||
assert.ok(fs.existsSync(copiedPath), `Claude root plugin should include supported skill ${skill.id}`);
|
||||
} else {
|
||||
assert.ok(!fs.existsSync(copiedPath), `Claude root plugin should exclude blocked skill ${skill.id}`);
|
||||
}
|
||||
}
|
||||
|
||||
for (const plugin of marketplace.plugins) {
|
||||
assert.strictEqual(
|
||||
typeof plugin.source,
|
||||
|
||||
@@ -6,8 +6,11 @@ const { findProjectRoot } = require("../../lib/project-root");
|
||||
const projectRoot = findProjectRoot(__dirname);
|
||||
const marketplacePath = path.join(projectRoot, ".agents", "plugins", "marketplace.json");
|
||||
const editorialBundlesPath = path.join(projectRoot, "data", "editorial-bundles.json");
|
||||
const compatibilityPath = path.join(projectRoot, "data", "plugin-compatibility.json");
|
||||
const marketplace = JSON.parse(fs.readFileSync(marketplacePath, "utf8"));
|
||||
const editorialBundles = JSON.parse(fs.readFileSync(editorialBundlesPath, "utf8")).bundles || [];
|
||||
const compatibility = JSON.parse(fs.readFileSync(compatibilityPath, "utf8")).skills || [];
|
||||
const compatibilityById = new Map(compatibility.map((skill) => [skill.id, skill]));
|
||||
|
||||
assert.strictEqual(
|
||||
marketplace.name,
|
||||
@@ -64,10 +67,27 @@ assert.strictEqual(pluginManifest.skills, "./skills/");
|
||||
const pluginSkillsPath = path.join(pluginRoot, "skills");
|
||||
assert.ok(fs.existsSync(pluginSkillsPath), "Codex plugin skills path must exist");
|
||||
assert.ok(fs.statSync(pluginSkillsPath).isDirectory(), "Codex plugin skills path must be a directory");
|
||||
for (const skill of compatibility) {
|
||||
const copiedPath = path.join(pluginSkillsPath, ...skill.id.split("/"));
|
||||
if (skill.targets.codex === "supported") {
|
||||
assert.ok(fs.existsSync(copiedPath), `Codex root plugin should include supported skill ${skill.id}`);
|
||||
} else {
|
||||
assert.ok(!fs.existsSync(copiedPath), `Codex root plugin should exclude blocked skill ${skill.id}`);
|
||||
}
|
||||
}
|
||||
|
||||
for (const bundle of editorialBundles) {
|
||||
const bundlePluginName = `antigravity-bundle-${bundle.id}`;
|
||||
const bundleEntry = marketplace.plugins.find((plugin) => plugin.name === bundlePluginName);
|
||||
const codexSupported = bundle.skills.every(
|
||||
(skill) => compatibilityById.get(skill.id)?.targets?.codex === "supported",
|
||||
);
|
||||
|
||||
if (!codexSupported) {
|
||||
assert.ok(!bundleEntry, `marketplace.json must exclude incompatible bundle plugin ${bundlePluginName}`);
|
||||
continue;
|
||||
}
|
||||
|
||||
assert.ok(bundleEntry, `marketplace.json must include bundle plugin ${bundlePluginName}`);
|
||||
assert.deepStrictEqual(
|
||||
bundleEntry.source,
|
||||
|
||||
81
tools/scripts/tests/plugin_directories.test.js
Normal file
81
tools/scripts/tests/plugin_directories.test.js
Normal file
@@ -0,0 +1,81 @@
|
||||
const assert = require("assert");
|
||||
const fs = require("fs");
|
||||
const path = require("path");
|
||||
const { findProjectRoot } = require("../../lib/project-root");
|
||||
|
||||
const projectRoot = findProjectRoot(__dirname);
|
||||
const pluginsRoot = path.join(projectRoot, "plugins");
|
||||
const claudeMarketplace = JSON.parse(
|
||||
fs.readFileSync(path.join(projectRoot, ".claude-plugin", "marketplace.json"), "utf8"),
|
||||
);
|
||||
const codexMarketplace = JSON.parse(
|
||||
fs.readFileSync(path.join(projectRoot, ".agents", "plugins", "marketplace.json"), "utf8"),
|
||||
);
|
||||
|
||||
const claudePluginPaths = new Set(
|
||||
claudeMarketplace.plugins.map((plugin) => plugin.source.replace(/^\.\//, "")),
|
||||
);
|
||||
const codexPluginPaths = new Set(
|
||||
codexMarketplace.plugins.map((plugin) => plugin.source.path.replace(/^\.\//, "")),
|
||||
);
|
||||
const knownPluginPaths = new Set([...claudePluginPaths, ...codexPluginPaths]);
|
||||
|
||||
for (const relativePluginPath of knownPluginPaths) {
|
||||
const pluginDir = path.join(projectRoot, relativePluginPath);
|
||||
assert.ok(fs.existsSync(pluginDir), `plugin directory must exist: ${relativePluginPath}`);
|
||||
assert.ok(fs.statSync(pluginDir).isDirectory(), `plugin path must be a directory: ${relativePluginPath}`);
|
||||
|
||||
const skillsDir = path.join(pluginDir, "skills");
|
||||
assert.ok(fs.existsSync(skillsDir), `plugin skills dir must exist: ${relativePluginPath}`);
|
||||
assert.ok(fs.statSync(skillsDir).isDirectory(), `plugin skills dir must be a directory: ${relativePluginPath}`);
|
||||
|
||||
const skillMarkdownFiles = [];
|
||||
const stack = [skillsDir];
|
||||
while (stack.length > 0) {
|
||||
const currentDir = stack.pop();
|
||||
for (const child of fs.readdirSync(currentDir, { withFileTypes: true })) {
|
||||
const childPath = path.join(currentDir, child.name);
|
||||
if (child.isDirectory()) {
|
||||
stack.push(childPath);
|
||||
} else if (child.isFile() && child.name === "SKILL.md") {
|
||||
skillMarkdownFiles.push(childPath);
|
||||
}
|
||||
}
|
||||
}
|
||||
assert.ok(skillMarkdownFiles.length > 0, `plugin must contain at least one skill: ${relativePluginPath}`);
|
||||
|
||||
const codexManifestPath = path.join(pluginDir, ".codex-plugin", "plugin.json");
|
||||
if (fs.existsSync(codexManifestPath)) {
|
||||
const codexManifest = JSON.parse(fs.readFileSync(codexManifestPath, "utf8"));
|
||||
assert.strictEqual(codexManifest.skills, "./skills/");
|
||||
assert.ok(
|
||||
codexMarketplace.plugins.some((plugin) => plugin.name === codexManifest.name),
|
||||
`Codex marketplace should expose ${codexManifest.name}`,
|
||||
);
|
||||
}
|
||||
|
||||
const claudeManifestPath = path.join(pluginDir, ".claude-plugin", "plugin.json");
|
||||
if (fs.existsSync(claudeManifestPath)) {
|
||||
const claudeManifest = JSON.parse(fs.readFileSync(claudeManifestPath, "utf8"));
|
||||
assert.ok(
|
||||
claudeMarketplace.plugins.some((plugin) => plugin.name === claudeManifest.name),
|
||||
`Claude marketplace should expose ${claudeManifest.name}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
for (const entry of fs.readdirSync(pluginsRoot, { withFileTypes: true })) {
|
||||
if (!entry.isDirectory()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const relativePluginPath = path.join("plugins", entry.name);
|
||||
if (entry.name.startsWith("antigravity-bundle-") || entry.name === "antigravity-awesome-skills" || entry.name === "antigravity-awesome-skills-claude") {
|
||||
assert.ok(
|
||||
knownPluginPaths.has(relativePluginPath),
|
||||
`generated plugin directory should be represented in a marketplace: ${relativePluginPath}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
console.log("ok");
|
||||
@@ -15,7 +15,9 @@ const LOCAL_TEST_COMMANDS = [
|
||||
[path.join(TOOL_TESTS, "build_catalog_bundles.test.js")],
|
||||
[path.join(TOOL_TESTS, "claude_plugin_marketplace.test.js")],
|
||||
[path.join(TOOL_TESTS, "codex_plugin_marketplace.test.js")],
|
||||
[path.join(TOOL_TESTS, "plugin_directories.test.js")],
|
||||
[path.join(TOOL_SCRIPTS, "run-python.js"), path.join(TOOL_TESTS, "test_editorial_bundles.py")],
|
||||
[path.join(TOOL_SCRIPTS, "run-python.js"), path.join(TOOL_TESTS, "test_plugin_compatibility.py")],
|
||||
[path.join(TOOL_TESTS, "installer_antigravity_guidance.test.js")],
|
||||
[path.join(TOOL_TESTS, "jetski_gemini_loader.test.cjs")],
|
||||
[path.join(TOOL_TESTS, "npm_package_contents.test.js")],
|
||||
|
||||
@@ -20,6 +20,10 @@ editorial_bundles = load_module(
|
||||
TOOLS_SCRIPTS / "sync_editorial_bundles.py",
|
||||
"sync_editorial_bundles",
|
||||
)
|
||||
plugin_compatibility = load_module(
|
||||
TOOLS_SCRIPTS / "plugin_compatibility.py",
|
||||
"plugin_compatibility_json",
|
||||
)
|
||||
get_bundle_skills = load_module(
|
||||
TOOLS_SCRIPTS / "get-bundle-skills.py",
|
||||
"get_bundle_skills_json",
|
||||
@@ -29,6 +33,8 @@ get_bundle_skills = load_module(
|
||||
class EditorialBundlesTests(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.manifest_bundles = editorial_bundles.load_editorial_bundles(REPO_ROOT)
|
||||
self.compatibility_report = plugin_compatibility.load_plugin_compatibility(REPO_ROOT)
|
||||
self.compatibility_by_id = plugin_compatibility.compatibility_by_skill_id(self.compatibility_report)
|
||||
|
||||
def test_manifest_has_unique_ids_and_existing_skills(self):
|
||||
bundle_ids = [bundle["id"] for bundle in self.manifest_bundles]
|
||||
@@ -42,7 +48,12 @@ class EditorialBundlesTests(unittest.TestCase):
|
||||
|
||||
def test_bundles_doc_matches_renderer(self):
|
||||
metadata = editorial_bundles.load_metadata(str(REPO_ROOT))
|
||||
expected = editorial_bundles.render_bundles_doc(REPO_ROOT, metadata, self.manifest_bundles)
|
||||
expected = editorial_bundles.render_bundles_doc(
|
||||
REPO_ROOT,
|
||||
metadata,
|
||||
self.manifest_bundles,
|
||||
self.compatibility_by_id,
|
||||
)
|
||||
actual = (REPO_ROOT / "docs" / "users" / "bundles.md").read_text(encoding="utf-8")
|
||||
self.assertEqual(actual, expected)
|
||||
|
||||
@@ -74,8 +85,22 @@ class EditorialBundlesTests(unittest.TestCase):
|
||||
self.assertTrue((sample_skill_dir / "SKILL.md").is_file())
|
||||
|
||||
def test_generated_plugin_count_matches_manifest(self):
|
||||
generated_plugins = sorted(path.name for path in (REPO_ROOT / "plugins").iterdir() if path.is_dir() and path.name.startswith("antigravity-bundle-"))
|
||||
expected_plugins = sorted(f'antigravity-bundle-{bundle["id"]}' for bundle in self.manifest_bundles)
|
||||
generated_plugins = sorted(
|
||||
path.name
|
||||
for path in (REPO_ROOT / "plugins").iterdir()
|
||||
if path.is_dir() and path.name.startswith("antigravity-bundle-")
|
||||
)
|
||||
expected_plugins = sorted(
|
||||
f'antigravity-bundle-{bundle["id"]}'
|
||||
for bundle in self.manifest_bundles
|
||||
if any(
|
||||
all(
|
||||
self.compatibility_by_id[skill["id"]]["targets"][target] == "supported"
|
||||
for skill in bundle["skills"]
|
||||
)
|
||||
for target in ("codex", "claude")
|
||||
)
|
||||
)
|
||||
self.assertEqual(generated_plugins, expected_plugins)
|
||||
|
||||
def test_sample_bundle_copy_matches_source_file_inventory(self):
|
||||
@@ -99,6 +124,24 @@ class EditorialBundlesTests(unittest.TestCase):
|
||||
)
|
||||
self.assertEqual(copied_files, source_files, f'copied bundle skill should match source inventory for {skill["id"]}')
|
||||
|
||||
def test_root_plugins_only_include_supported_skills_for_target(self):
|
||||
codex_root = REPO_ROOT / "plugins" / "antigravity-awesome-skills" / "skills"
|
||||
claude_root = REPO_ROOT / "plugins" / "antigravity-awesome-skills-claude" / "skills"
|
||||
|
||||
for skill_id, compatibility in self.compatibility_by_id.items():
|
||||
codex_path = codex_root / skill_id
|
||||
claude_path = claude_root / skill_id
|
||||
self.assertEqual(
|
||||
codex_path.exists(),
|
||||
compatibility["targets"]["codex"] == "supported",
|
||||
f"Codex root plugin inclusion mismatch for {skill_id}",
|
||||
)
|
||||
self.assertEqual(
|
||||
claude_path.exists(),
|
||||
compatibility["targets"]["claude"] == "supported",
|
||||
f"Claude root plugin inclusion mismatch for {skill_id}",
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
|
||||
@@ -45,6 +45,8 @@ class GenerateIndexSecurityTests(unittest.TestCase):
|
||||
skills = generate_index.generate_index(str(skills_dir), str(output_file))
|
||||
|
||||
self.assertEqual([skill["id"] for skill in skills], ["safe-skill"])
|
||||
self.assertIn("plugin", skills[0])
|
||||
self.assertEqual(skills[0]["plugin"]["targets"]["codex"], "supported")
|
||||
written = json.loads(output_file.read_text(encoding="utf-8"))
|
||||
self.assertEqual([skill["id"] for skill in written], ["safe-skill"])
|
||||
|
||||
|
||||
123
tools/scripts/tests/test_plugin_compatibility.py
Normal file
123
tools/scripts/tests/test_plugin_compatibility.py
Normal file
@@ -0,0 +1,123 @@
|
||||
import importlib.util
|
||||
import json
|
||||
import pathlib
|
||||
import sys
|
||||
import tempfile
|
||||
import unittest
|
||||
|
||||
|
||||
REPO_ROOT = pathlib.Path(__file__).resolve().parents[3]
|
||||
TOOLS_SCRIPTS = REPO_ROOT / "tools" / "scripts"
|
||||
if str(TOOLS_SCRIPTS) not in sys.path:
|
||||
sys.path.insert(0, str(TOOLS_SCRIPTS))
|
||||
|
||||
|
||||
def load_module(module_path: pathlib.Path, module_name: str):
|
||||
spec = importlib.util.spec_from_file_location(module_name, module_path)
|
||||
module = importlib.util.module_from_spec(spec)
|
||||
spec.loader.exec_module(module)
|
||||
return module
|
||||
|
||||
|
||||
plugin_compatibility = load_module(
|
||||
TOOLS_SCRIPTS / "plugin_compatibility.py",
|
||||
"plugin_compatibility_test",
|
||||
)
|
||||
|
||||
|
||||
class PluginCompatibilityTests(unittest.TestCase):
|
||||
def _write_skill(self, skills_dir: pathlib.Path, skill_id: str, content: str) -> pathlib.Path:
|
||||
skill_dir = skills_dir / skill_id
|
||||
skill_dir.mkdir(parents=True, exist_ok=True)
|
||||
(skill_dir / "SKILL.md").write_text(content, encoding="utf-8")
|
||||
return skill_dir
|
||||
|
||||
def test_absolute_host_paths_block_both_targets(self):
|
||||
with tempfile.TemporaryDirectory() as temp_dir:
|
||||
skills_dir = pathlib.Path(temp_dir) / "skills"
|
||||
self._write_skill(
|
||||
skills_dir,
|
||||
"absolute-path-skill",
|
||||
"---\nname: absolute-path-skill\ndescription: Example\n---\nUse /Users/tester/private/file\n",
|
||||
)
|
||||
|
||||
report = plugin_compatibility.build_report(skills_dir)
|
||||
entry = report["skills"][0]
|
||||
self.assertEqual(entry["targets"]["codex"], "blocked")
|
||||
self.assertEqual(entry["targets"]["claude"], "blocked")
|
||||
self.assertIn("absolute_host_path", entry["reasons"])
|
||||
|
||||
def test_claude_home_paths_only_block_codex(self):
|
||||
with tempfile.TemporaryDirectory() as temp_dir:
|
||||
skills_dir = pathlib.Path(temp_dir) / "skills"
|
||||
self._write_skill(
|
||||
skills_dir,
|
||||
"claude-home-skill",
|
||||
"---\nname: claude-home-skill\ndescription: Example\n---\nRead ~/.claude/projects/cache\n",
|
||||
)
|
||||
|
||||
report = plugin_compatibility.build_report(skills_dir)
|
||||
entry = report["skills"][0]
|
||||
self.assertEqual(entry["targets"]["codex"], "blocked")
|
||||
self.assertEqual(entry["targets"]["claude"], "supported")
|
||||
self.assertIn("target_specific_home_path", entry["blocked_reasons"]["codex"])
|
||||
|
||||
def test_runtime_dependency_requires_explicit_manual_setup(self):
|
||||
with tempfile.TemporaryDirectory() as temp_dir:
|
||||
skills_dir = pathlib.Path(temp_dir) / "skills"
|
||||
skill_dir = self._write_skill(
|
||||
skills_dir,
|
||||
"dependency-skill",
|
||||
"---\nname: dependency-skill\ndescription: Example\n---\nbody\n",
|
||||
)
|
||||
(skill_dir / "requirements.txt").write_text("requests\n", encoding="utf-8")
|
||||
|
||||
report = plugin_compatibility.build_report(skills_dir)
|
||||
entry = report["skills"][0]
|
||||
self.assertEqual(entry["targets"]["codex"], "blocked")
|
||||
self.assertEqual(entry["targets"]["claude"], "blocked")
|
||||
self.assertIn("undeclared_runtime_dependency", entry["reasons"])
|
||||
|
||||
def test_manual_setup_metadata_can_make_runtime_skill_supported(self):
|
||||
with tempfile.TemporaryDirectory() as temp_dir:
|
||||
skills_dir = pathlib.Path(temp_dir) / "skills"
|
||||
skill_dir = self._write_skill(
|
||||
skills_dir,
|
||||
"manual-setup-skill",
|
||||
(
|
||||
"---\n"
|
||||
"name: manual-setup-skill\n"
|
||||
"description: Example\n"
|
||||
"plugin:\n"
|
||||
" setup:\n"
|
||||
" type: manual\n"
|
||||
" summary: Run the setup command once.\n"
|
||||
" docs: SKILL.md\n"
|
||||
"---\n"
|
||||
"body\n"
|
||||
),
|
||||
)
|
||||
(skill_dir / "package.json").write_text(json.dumps({"name": "manual-setup-skill"}), encoding="utf-8")
|
||||
|
||||
report = plugin_compatibility.build_report(skills_dir)
|
||||
entry = report["skills"][0]
|
||||
self.assertEqual(entry["targets"]["codex"], "supported")
|
||||
self.assertEqual(entry["targets"]["claude"], "supported")
|
||||
self.assertEqual(entry["setup"]["type"], "manual")
|
||||
|
||||
def test_repo_sample_skills_have_expected_status(self):
|
||||
report = plugin_compatibility.build_report(REPO_ROOT / "skills")
|
||||
entries = plugin_compatibility.compatibility_by_skill_id(report)
|
||||
|
||||
for skill_id in ("project-skill-audit", "molykit", "claude-code-expert"):
|
||||
self.assertEqual(entries[skill_id]["targets"]["codex"], "blocked")
|
||||
self.assertEqual(entries[skill_id]["targets"]["claude"], "blocked")
|
||||
self.assertIn("absolute_host_path", entries[skill_id]["reasons"])
|
||||
|
||||
self.assertEqual(entries["playwright-skill"]["targets"]["codex"], "supported")
|
||||
self.assertEqual(entries["playwright-skill"]["targets"]["claude"], "supported")
|
||||
self.assertEqual(entries["playwright-skill"]["setup"]["type"], "manual")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
@@ -6,6 +6,8 @@
|
||||
|
||||
> **Important:** bundles are installable plugin subsets and activation presets, not invokable mega-skills such as `@web-wizard` or `/essentials-bundle`. Use the individual skills listed in the pack, install the bundle as a dedicated marketplace plugin, or use the activation scripts if you want only that bundle's skills active in your live Antigravity directory.
|
||||
|
||||
> **Plugin compatibility:** root plugins and bundle plugins only publish plugin-safe skills. If a bundle shows `pending hardening`, the skills still exist in the repository, but that bundle is not yet published for that target. `Requires manual setup` means the bundle is installable, but one or more included skills need an explicit setup step before first use.
|
||||
|
||||
## Quick Start
|
||||
|
||||
1. **Install the repository or bundle plugin:**
|
||||
|
||||
Reference in New Issue
Block a user