feat(repo): Add contributor sync and consistency audits

Add maintainer automation for repo-state hygiene so contributor acknowledgements, count-sensitive docs, and GitHub About metadata stay aligned from the same workflow.

Cover the new scripts with regression tests and wire them into the local test suite to keep future maintenance changes from drifting silently.
This commit is contained in:
sickn33
2026-03-21 10:48:00 +01:00
parent 4a8e52276a
commit 694721223c
11 changed files with 646 additions and 31 deletions

View File

@@ -0,0 +1,214 @@
#!/usr/bin/env python3
from __future__ import annotations
import argparse
import json
import subprocess
import sys
from pathlib import Path
from _project_paths import find_repo_root
import sync_repo_metadata
from update_readme import configure_utf8_output, load_metadata, apply_metadata
def _read_text(path: Path) -> str:
return path.read_text(encoding="utf-8")
def _package_expected_description(metadata: dict) -> str:
return (
f"{metadata['total_skills_label']} agentic skills for Claude Code, Gemini CLI, "
"Cursor, Antigravity & more. Installer CLI."
)
def _expected_readme(content: str, metadata: dict) -> str:
return sync_repo_metadata.sync_readme_copy(apply_metadata(content, metadata), metadata)
def _expected_getting_started(content: str, metadata: dict) -> str:
return sync_repo_metadata.sync_getting_started(content, metadata)
def _expected_bundles(content: str, metadata: dict) -> str:
return sync_repo_metadata.sync_bundles_doc(content, metadata)
def _expected_regex_sync(content: str, replacements: list[tuple[str, str]]) -> str:
return sync_repo_metadata.sync_regex_text(content, replacements)
def _expected_jetski_cortex(content: str, metadata: dict) -> str:
return sync_repo_metadata.sync_jetski_cortex(content, metadata)
def find_local_consistency_issues(base_dir: str | Path) -> list[str]:
root = Path(base_dir)
metadata = load_metadata(str(root))
issues: list[str] = []
package_json = json.loads(_read_text(root / "package.json"))
if package_json.get("description") != _package_expected_description(metadata):
issues.append("package.json description is out of sync with the live skills count")
file_checks = [
("README.md", _expected_readme),
("docs/users/getting-started.md", _expected_getting_started),
("docs/users/bundles.md", _expected_bundles),
("docs/integrations/jetski-cortex.md", _expected_jetski_cortex),
(
"docs/users/claude-code-skills.md",
lambda content, current_metadata: _expected_regex_sync(
content,
[(r"\d[\d,]*\+ skills", f"{current_metadata['total_skills_label']} skills")],
),
),
(
"docs/users/gemini-cli-skills.md",
lambda content, current_metadata: _expected_regex_sync(
content,
[(r"\d[\d,]*\+ files", f"{current_metadata['total_skills_label']} files")],
),
),
(
"docs/users/usage.md",
lambda content, current_metadata: _expected_regex_sync(
content,
[
(r"\d[\d,]*\+ skill files", f"{current_metadata['total_skills_label']} skill files"),
(r"\d[\d,]*\+ tools", f"{current_metadata['total_skills_label']} tools"),
(r"all \d[\d,]*\+ skills", f"all {current_metadata['total_skills_label']} skills"),
(
r"have \d[\d,]*\+ skills installed locally",
f"have {current_metadata['total_skills_label']} skills installed locally",
),
],
),
),
(
"docs/users/visual-guide.md",
lambda content, current_metadata: _expected_regex_sync(
content,
[
(r"\d[\d,]*\+ skills live here", f"{current_metadata['total_skills_label']} skills live here"),
(r"\d[\d,]*\+ total", f"{current_metadata['total_skills_label']} total"),
(r"\d[\d,]*\+ SKILLS", f"{current_metadata['total_skills_label']} SKILLS"),
],
),
),
(
"docs/users/kiro-integration.md",
lambda content, current_metadata: _expected_regex_sync(
content,
[(r"\d[\d,]*\+ specialized areas", f"{current_metadata['total_skills_label']} specialized areas")],
),
),
(
"docs/maintainers/repo-growth-seo.md",
lambda content, current_metadata: _expected_regex_sync(
content,
[
(r"\d[\d,]*\+ agentic skills", f"{current_metadata['total_skills_label']} agentic skills"),
(r"\d[\d,]*\+ Agentic Skills", f"{current_metadata['total_skills_label']} Agentic Skills"),
],
),
),
(
"docs/maintainers/skills-update-guide.md",
lambda content, current_metadata: _expected_regex_sync(
content,
[
(
r"All \d[\d,]*\+ skills from the skills directory",
f"All {current_metadata['total_skills_label']} skills from the skills directory",
)
],
),
),
(
"docs/integrations/jetski-gemini-loader/README.md",
lambda content, current_metadata: _expected_regex_sync(
content,
[(r"\d[\d,]*\+ skills", f"{current_metadata['total_skills_label']} skills")],
),
),
]
for relative_path, transform in file_checks:
path = root / relative_path
if not path.is_file():
issues.append(f"{relative_path} is missing")
continue
original = _read_text(path)
expected = transform(original, metadata)
if original != expected:
issues.append(f"{relative_path} contains stale or inconsistent generated claims")
return issues
def find_github_about_issues(base_dir: str | Path) -> list[str]:
root = Path(base_dir)
metadata = load_metadata(str(root))
result = subprocess.run(
[
"gh",
"repo",
"view",
metadata["repo"],
"--json",
"description,homepageUrl,repositoryTopics",
],
check=True,
capture_output=True,
text=True,
)
payload = json.loads(result.stdout)
issues: list[str] = []
if payload.get("description") != sync_repo_metadata.build_about_description(metadata):
issues.append("GitHub About description is out of sync")
if payload.get("homepageUrl") != sync_repo_metadata.GITHUB_HOMEPAGE_URL:
issues.append("GitHub About homepage is out of sync")
current_topics = sorted(
entry["name"] for entry in payload.get("repositoryTopics", []) if isinstance(entry, dict) and "name" in entry
)
expected_topics = sorted(sync_repo_metadata.build_about_topics())
if current_topics != expected_topics:
issues.append("GitHub About topics are out of sync")
return issues
def parse_args() -> argparse.Namespace:
parser = argparse.ArgumentParser(description="Audit repository consistency for generated claims.")
parser.add_argument(
"--check-github-about",
action="store_true",
help="Also verify the live GitHub About description, homepage, and topics via gh CLI.",
)
return parser.parse_args()
def main() -> int:
args = parse_args()
root = find_repo_root(__file__)
issues = find_local_consistency_issues(root)
if args.check_github_about:
issues.extend(find_github_about_issues(root))
if issues:
for issue in issues:
print(f"{issue}")
return 1
print("✅ Repository consistency audit passed.")
return 0
if __name__ == "__main__":
configure_utf8_output()
sys.exit(main())

View File

@@ -0,0 +1,25 @@
#!/usr/bin/env python3
from __future__ import annotations
import sys
from _project_paths import find_repo_root
from audit_consistency import find_local_consistency_issues
from update_readme import configure_utf8_output
def main() -> int:
root = find_repo_root(__file__)
issues = find_local_consistency_issues(root)
if issues:
for issue in issues:
print(f"{issue}")
return 1
print("✅ No stale claims detected in active docs.")
return 0
if __name__ == "__main__":
configure_utf8_output()
sys.exit(main())

View File

@@ -0,0 +1,133 @@
#!/usr/bin/env python3
from __future__ import annotations
import argparse
import json
import re
import subprocess
import sys
from pathlib import Path
from _project_paths import find_repo_root
from update_readme import configure_utf8_output, load_metadata
CONTRIBUTOR_SECTION_START = "We officially thank the following contributors for their help in making this repository awesome!\n\n"
SPECIAL_LINK_OVERRIDES = {
"Copilot": "https://github.com/apps/copilot-swe-agent",
"github-actions[bot]": "https://github.com/apps/github-actions",
"copilot-swe-agent[bot]": "https://github.com/apps/copilot-swe-agent",
}
def parse_existing_contributor_links(content: str) -> dict[str, str]:
links: dict[str, str] = {}
pattern = re.compile(r"^- \[@(?P<label>.+?)\]\((?P<url>https://github\.com/.+?)\)$")
for line in content.splitlines():
match = pattern.match(line.strip())
if not match:
continue
links[match.group("label")] = match.group("url")
return links
def parse_contributors_response(payload: list[dict]) -> list[str]:
contributors: list[str] = []
seen: set[str] = set()
for entry in payload:
login = entry.get("login")
if not isinstance(login, str) or not login or login in seen:
continue
seen.add(login)
contributors.append(login)
return contributors
def infer_contributor_url(login: str, existing_links: dict[str, str]) -> str:
if login in existing_links:
return existing_links[login]
if login in SPECIAL_LINK_OVERRIDES:
return SPECIAL_LINK_OVERRIDES[login]
if login.endswith("[bot]"):
app_name = login[: -len("[bot]")]
return f"https://github.com/apps/{app_name}"
return f"https://github.com/{login}"
def render_contributor_lines(contributors: list[str], existing_links: dict[str, str]) -> str:
lines = []
for login in contributors:
url = infer_contributor_url(login, existing_links)
lines.append(f"- [@{login}]({url})")
return "\n".join(lines)
def update_repo_contributors_section(content: str, contributors: list[str]) -> str:
existing_links = parse_existing_contributor_links(content)
rendered_list = render_contributor_lines(contributors, existing_links)
if CONTRIBUTOR_SECTION_START not in content or "\n## " not in content:
raise ValueError("README.md does not contain the expected Repo Contributors section structure.")
start_index = content.index(CONTRIBUTOR_SECTION_START) + len(CONTRIBUTOR_SECTION_START)
end_index = content.index("\n## ", start_index)
return f"{content[:start_index]}{rendered_list}\n{content[end_index:]}"
def fetch_contributors(repo: str) -> list[str]:
result = subprocess.run(
[
"gh",
"api",
f"repos/{repo}/contributors?per_page=100",
"--paginate",
"--slurp",
],
check=True,
capture_output=True,
text=True,
)
payload = json.loads(result.stdout)
flat_entries: list[dict] = []
for page in payload:
if isinstance(page, list):
flat_entries.extend(entry for entry in page if isinstance(entry, dict))
return parse_contributors_response(flat_entries)
def sync_contributors(base_dir: str | Path, dry_run: bool = False) -> bool:
root = Path(base_dir)
metadata = load_metadata(str(root))
contributors = fetch_contributors(metadata["repo"])
readme_path = root / "README.md"
original = readme_path.read_text(encoding="utf-8")
updated = update_repo_contributors_section(original, contributors)
if updated == original:
return False
if dry_run:
print(f"[dry-run] Would update contributors in {readme_path}")
return True
readme_path.write_text(updated, encoding="utf-8", newline="\n")
print(f"✅ Updated contributors in {readme_path}")
return True
def parse_args() -> argparse.Namespace:
parser = argparse.ArgumentParser(description="Synchronize the README Repo Contributors section.")
parser.add_argument("--dry-run", action="store_true", help="Preview contributor changes without writing files.")
return parser.parse_args()
def main() -> int:
args = parse_args()
root = find_repo_root(__file__)
sync_contributors(root, dry_run=args.dry_run)
return 0
if __name__ == "__main__":
configure_utf8_output()
sys.exit(main())

View File

@@ -20,6 +20,7 @@ const LOCAL_TEST_COMMANDS = [
[path.join(TOOL_TESTS, "docs_security_content.test.js")],
[path.join(TOOL_SCRIPTS, "run-python.js"), path.join(TOOL_TESTS, "test_bundle_activation_security.py")],
[path.join(TOOL_SCRIPTS, "run-python.js"), path.join(TOOL_TESTS, "test_audit_skills.py")],
[path.join(TOOL_SCRIPTS, "run-python.js"), path.join(TOOL_TESTS, "test_audit_consistency.py")],
[path.join(TOOL_SCRIPTS, "run-python.js"), path.join(TOOL_TESTS, "test_cleanup_synthetic_skill_sections.py")],
[path.join(TOOL_SCRIPTS, "run-python.js"), path.join(TOOL_TESTS, "test_fix_missing_skill_metadata.py")],
[path.join(TOOL_SCRIPTS, "run-python.js"), path.join(TOOL_TESTS, "test_fix_missing_skill_sections.py")],
@@ -28,6 +29,7 @@ const LOCAL_TEST_COMMANDS = [
[path.join(TOOL_SCRIPTS, "run-python.js"), path.join(TOOL_TESTS, "test_repair_description_usage_summaries.py")],
[path.join(TOOL_SCRIPTS, "run-python.js"), path.join(TOOL_TESTS, "test_sync_microsoft_skills_security.py")],
[path.join(TOOL_SCRIPTS, "run-python.js"), path.join(TOOL_TESTS, "test_sync_repo_metadata.py")],
[path.join(TOOL_SCRIPTS, "run-python.js"), path.join(TOOL_TESTS, "test_sync_contributors.py")],
[path.join(TOOL_SCRIPTS, "run-python.js"), path.join(TOOL_TESTS, "test_validate_skills_headings.py")],
];
const NETWORK_TEST_COMMANDS = [

View File

@@ -0,0 +1,133 @@
import importlib.util
import json
import sys
import tempfile
import unittest
from pathlib import Path
REPO_ROOT = Path(__file__).resolve().parents[3]
TOOLS_SCRIPTS_DIR = REPO_ROOT / "tools" / "scripts"
if str(TOOLS_SCRIPTS_DIR) not in sys.path:
sys.path.insert(0, str(TOOLS_SCRIPTS_DIR))
def load_module(relative_path: str, module_name: str):
module_path = REPO_ROOT / relative_path
spec = importlib.util.spec_from_file_location(module_name, module_path)
module = importlib.util.module_from_spec(spec)
assert spec.loader is not None
sys.modules[module_name] = module
spec.loader.exec_module(module)
return module
audit_consistency = load_module(
"tools/scripts/audit_consistency.py",
"audit_consistency_test",
)
class AuditConsistencyTests(unittest.TestCase):
def write_repo_state(self, root: Path, total_skills: int = 1304, count_label: str = "1,304+"):
(root / "docs" / "users").mkdir(parents=True)
(root / "docs" / "maintainers").mkdir(parents=True)
(root / "docs" / "integrations" / "jetski-gemini-loader").mkdir(parents=True)
(root / "package.json").write_text(
json.dumps(
{
"name": "antigravity-awesome-skills",
"version": "8.4.0",
"description": f"{count_label} agentic skills for Claude Code, Gemini CLI, Cursor, Antigravity & more. Installer CLI.",
}
),
encoding="utf-8",
)
(root / "skills_index.json").write_text(json.dumps([{}] * total_skills), encoding="utf-8")
(root / "README.md").write_text(
f"""<!-- registry-sync: version=8.4.0; skills={total_skills}; stars=26132; updated_at=2026-03-21T00:00:00+00:00 -->
# 🌌 Antigravity Awesome Skills: {count_label} Agentic Skills for Claude Code, Gemini CLI, Cursor, Copilot & More
> **Installable GitHub library of {count_label} agentic skills for Claude Code, Cursor, Codex CLI, Gemini CLI, Antigravity, and other AI coding assistants.**
[![GitHub stars](https://img.shields.io/badge/⭐%2026%2C000%2B%20Stars-gold?style=for-the-badge)](https://github.com/sickn33/antigravity-awesome-skills/stargazers)
**Current release: V8.4.0.** Trusted by 26k+ GitHub stargazers, this repository combines official and community skill collections with bundles, workflows, installation paths, and docs that help you go from first install to daily use quickly.
- **Broad coverage with real utility**: {count_label} skills across development, testing, security, infrastructure, product, and marketing.
If you want a faster answer than "browse all {count_label} skills", start with a tool-specific guide:
""",
encoding="utf-8",
)
(root / "docs" / "users" / "getting-started.md").write_text(
"# Getting Started with Antigravity Awesome Skills (V8.4.0)\n",
encoding="utf-8",
)
(root / "docs" / "users" / "claude-code-skills.md").write_text(
f"- It includes {count_label} skills instead of a narrow single-domain starter pack.\n",
encoding="utf-8",
)
(root / "docs" / "users" / "gemini-cli-skills.md").write_text(
f"- It helps new users get started with bundles and workflows rather than forcing a cold start from {count_label} files.\n",
encoding="utf-8",
)
(root / "docs" / "users" / "usage.md").write_text(
f"✅ **Downloaded {count_label} skill files**\n- You installed a toolbox with {count_label} tools\nDon't try to use all {count_label} skills at once.\nNo. Even though you have {count_label} skills installed locally\n",
encoding="utf-8",
)
(root / "docs" / "users" / "visual-guide.md").write_text(
f"{count_label} skills live here\n{count_label} total\n{count_label} SKILLS\n",
encoding="utf-8",
)
(root / "docs" / "users" / "bundles.md").write_text(
f'### 🚀 The "Essentials" Pack\n_Last updated: March 2026 | Total Skills: {count_label} | Total Bundles: 1_\n',
encoding="utf-8",
)
(root / "docs" / "users" / "kiro-integration.md").write_text(
f"- **Domain expertise** across {count_label} specialized areas\n",
encoding="utf-8",
)
(root / "docs" / "maintainers" / "repo-growth-seo.md").write_text(
f"> Installable GitHub library of {count_label} agentic skills for Claude Code, Cursor, Codex CLI, Gemini CLI, Antigravity, and other AI coding assistants.\n> Installable GitHub library of {count_label} agentic skills for Claude Code, Cursor, Codex CLI, Gemini CLI, Antigravity, and more. Includes installer CLI, bundles, workflows, and official/community skill collections.\n- use a clean preview image that says `{count_label} Agentic Skills`;\n",
encoding="utf-8",
)
(root / "docs" / "maintainers" / "skills-update-guide.md").write_text(
f"- All {count_label} skills from the skills directory\n",
encoding="utf-8",
)
(root / "docs" / "integrations" / "jetski-cortex.md").write_text(
"1.304+ skill\nCon oltre 1.304 skill, questo approccio\n",
encoding="utf-8",
)
(root / "docs" / "integrations" / "jetski-gemini-loader" / "README.md").write_text(
f"This pattern avoids context overflow when you have {count_label} skills installed.\n",
encoding="utf-8",
)
def test_local_consistency_passes_for_aligned_docs(self):
with tempfile.TemporaryDirectory() as temp_dir:
root = Path(temp_dir)
self.write_repo_state(root)
issues = audit_consistency.find_local_consistency_issues(root)
self.assertEqual(issues, [])
def test_local_consistency_flags_stale_claims(self):
with tempfile.TemporaryDirectory() as temp_dir:
root = Path(temp_dir)
self.write_repo_state(root, count_label="1,304+")
(root / "docs" / "users" / "usage.md").write_text(
"✅ **Downloaded 1,273+ skill files**\n",
encoding="utf-8",
)
issues = audit_consistency.find_local_consistency_issues(root)
self.assertTrue(any("docs/users/usage.md" in issue for issue in issues))
if __name__ == "__main__":
unittest.main()

View File

@@ -0,0 +1,86 @@
import importlib.util
import sys
import unittest
from pathlib import Path
REPO_ROOT = Path(__file__).resolve().parents[3]
TOOLS_SCRIPTS_DIR = REPO_ROOT / "tools" / "scripts"
if str(TOOLS_SCRIPTS_DIR) not in sys.path:
sys.path.insert(0, str(TOOLS_SCRIPTS_DIR))
def load_module(relative_path: str, module_name: str):
module_path = REPO_ROOT / relative_path
spec = importlib.util.spec_from_file_location(module_name, module_path)
module = importlib.util.module_from_spec(spec)
assert spec.loader is not None
sys.modules[module_name] = module
spec.loader.exec_module(module)
return module
sync_contributors = load_module(
"tools/scripts/sync_contributors.py",
"sync_contributors_test",
)
class SyncContributorsTests(unittest.TestCase):
def test_parse_existing_contributor_links_preserves_custom_urls(self):
content = """## Repo Contributors
- [@alice](https://github.com/alice)
- [@github-actions[bot]](https://github.com/apps/github-actions)
- [@Copilot](https://github.com/apps/copilot-swe-agent)
"""
links = sync_contributors.parse_existing_contributor_links(content)
self.assertEqual(links["alice"], "https://github.com/alice")
self.assertEqual(links["github-actions[bot]"], "https://github.com/apps/github-actions")
self.assertEqual(links["Copilot"], "https://github.com/apps/copilot-swe-agent")
def test_update_repo_contributors_section_renders_latest_contributors(self):
content = """## Repo Contributors
<a href="https://github.com/sickn33/antigravity-awesome-skills/graphs/contributors">
<img src="https://contrib.rocks/image?repo=sickn33/antigravity-awesome-skills" alt="Repository contributors" />
</a>
Made with [contrib.rocks](https://contrib.rocks).
We officially thank the following contributors for their help in making this repository awesome!
- [@alice](https://github.com/alice)
- [@Copilot](https://github.com/apps/copilot-swe-agent)
## License
"""
updated = sync_contributors.update_repo_contributors_section(
content,
["alice", "github-actions[bot]", "Copilot", "new-user"],
)
self.assertIn("- [@alice](https://github.com/alice)", updated)
self.assertIn("- [@github-actions[bot]](https://github.com/apps/github-actions)", updated)
self.assertIn("- [@Copilot](https://github.com/apps/copilot-swe-agent)", updated)
self.assertIn("- [@new-user](https://github.com/new-user)", updated)
self.assertEqual(updated.count("## Repo Contributors"), 1)
self.assertEqual(updated.count("## License"), 1)
def test_parse_contributors_response_dedupes_and_sorts_order(self):
payload = [
{"login": "alice"},
{"login": "bob"},
{"login": "alice"},
{"login": "github-actions[bot]"},
]
contributors = sync_contributors.parse_contributors_response(payload)
self.assertEqual(contributors, ["alice", "bob", "github-actions[bot]"])
if __name__ == "__main__":
unittest.main()