feat(repo): Add merge-batch and README credit gates

This commit is contained in:
sickn33
2026-04-05 10:36:47 +02:00
parent 3c546a9dbe
commit 51061dfcf1
18 changed files with 1730 additions and 9 deletions

View File

@@ -0,0 +1,255 @@
#!/usr/bin/env python3
from __future__ import annotations
import argparse
import json
import re
import subprocess
import sys
from collections.abc import Mapping
from datetime import date, datetime
from pathlib import Path
import yaml
from _project_paths import find_repo_root
GITHUB_REPO_PATTERN = re.compile(r"https://github\.com/([A-Za-z0-9_.-]+/[A-Za-z0-9_.-]+)")
SOURCE_REPO_PATTERN = re.compile(r"^[A-Za-z0-9_.-]+/[A-Za-z0-9_.-]+$")
VALID_SOURCE_TYPES = {"official", "community", "self"}
def normalize_yaml_value(value):
if isinstance(value, Mapping):
return {key: normalize_yaml_value(val) for key, val in value.items()}
if isinstance(value, list):
return [normalize_yaml_value(item) for item in value]
if isinstance(value, (date, datetime)):
return value.isoformat()
return value
def parse_frontmatter(content: str) -> dict[str, object]:
match = re.search(r"^---\s*\n(.*?)\n?---(?:\s*\n|$)", content, re.DOTALL)
if not match:
return {}
try:
parsed = yaml.safe_load(match.group(1)) or {}
except yaml.YAMLError:
return {}
parsed = normalize_yaml_value(parsed)
if not isinstance(parsed, Mapping):
return {}
return dict(parsed)
def normalize_repo_slug(value: str | None) -> str | None:
if not isinstance(value, str):
return None
candidate = value.strip().strip('"').strip("'")
if candidate.startswith("https://github.com/"):
candidate = candidate[len("https://github.com/") :]
candidate = candidate.rstrip("/")
candidate = candidate.removesuffix(".git")
candidate = candidate.split("#", 1)[0]
candidate = candidate.split("?", 1)[0]
match = re.match(r"^([A-Za-z0-9_.-]+/[A-Za-z0-9_.-]+)", candidate)
if not match:
return None
return match.group(1).lower()
def run_git(args: list[str], cwd: str | Path, capture: bool = True) -> str:
result = subprocess.run(
["git", *args],
cwd=str(cwd),
check=False,
capture_output=capture,
text=True,
)
if result.returncode != 0:
stderr = result.stderr.strip() if capture and result.stderr else ""
raise RuntimeError(stderr or f"git {' '.join(args)} failed with exit code {result.returncode}")
return result.stdout.strip() if capture else ""
def get_changed_files(base_dir: str | Path, base_ref: str, head_ref: str) -> list[str]:
output = run_git(["diff", "--name-only", f"{base_ref}...{head_ref}"], cwd=base_dir)
files = []
seen = set()
for raw_line in output.splitlines():
normalized = raw_line.replace("\\", "/").strip()
if not normalized or normalized in seen:
continue
seen.add(normalized)
files.append(normalized)
return files
def is_skill_file(file_path: str) -> bool:
normalized = file_path.replace("\\", "/")
return normalized.startswith("skills/") and normalized.endswith("/SKILL.md")
def extract_credit_repos(readme_text: str) -> dict[str, set[str]]:
credits = {"official": set(), "community": set()}
current_section: str | None = None
for line in readme_text.splitlines():
heading = re.match(r"^(#{2,6})\s+(.*)$", line.strip())
if heading:
title = heading.group(2).strip()
if title == "Official Sources":
current_section = "official"
continue
if title == "Community Contributors":
current_section = "community"
continue
current_section = None
continue
if current_section is None:
continue
for repo_match in GITHUB_REPO_PATTERN.finditer(line):
credits[current_section].add(repo_match.group(1).lower())
return credits
def classify_source(metadata: dict[str, object]) -> str | None:
raw_source_type = metadata.get("source_type")
if isinstance(raw_source_type, str) and raw_source_type.strip():
source_type = raw_source_type.strip().lower()
return source_type if source_type in VALID_SOURCE_TYPES else None
raw_source = metadata.get("source")
if isinstance(raw_source, str) and raw_source.strip().lower() == "self":
return "self"
if metadata.get("source_repo"):
return "community"
return None
def collect_reports(base_dir: str | Path, base_ref: str, head_ref: str) -> dict[str, object]:
root = Path(base_dir)
changed_files = get_changed_files(root, base_ref, head_ref)
skill_files = [file_path for file_path in changed_files if is_skill_file(file_path)]
readme_path = root / "README.md"
readme_text = readme_path.read_text(encoding="utf-8")
readme_credit_sets = extract_credit_repos(readme_text)
warnings: list[str] = []
errors: list[str] = []
checked_skills: list[dict[str, object]] = []
for rel_path in skill_files:
skill_path = root / rel_path
content = skill_path.read_text(encoding="utf-8")
metadata = parse_frontmatter(content)
source_type = classify_source(metadata)
raw_source_repo = metadata.get("source_repo")
source_repo = normalize_repo_slug(raw_source_repo)
source_value = metadata.get("source")
checked_skills.append(
{
"path": rel_path,
"source": source_value,
"source_type": source_type,
"source_repo": source_repo,
}
)
if source_type is None and metadata.get("source_type") is not None:
errors.append(f"{rel_path}: invalid source_type {metadata.get('source_type')!r}")
continue
if raw_source_repo is not None and source_repo is None:
errors.append(f"{rel_path}: invalid source_repo {raw_source_repo!r}; expected OWNER/REPO")
continue
if source_type == "self":
continue
if source_repo is None:
if isinstance(source_value, str) and source_value.strip().lower() != "self":
warnings.append(
f"{rel_path}: external source declared without source_repo; README credit check skipped"
)
continue
if not SOURCE_REPO_PATTERN.match(source_repo):
errors.append(f"{rel_path}: invalid source_repo {source_repo!r}; expected OWNER/REPO")
continue
bucket = "official" if source_type == "official" else "community"
if source_repo not in readme_credit_sets[bucket]:
location_hint = "### Official Sources" if bucket == "official" else "### Community Contributors"
errors.append(
f"{rel_path}: source_repo {source_repo} is missing from {location_hint} in README.md"
)
# If the source repo only exists in the wrong bucket, keep the failure focused on the missing
# required attribution instead of reporting duplicate noise.
return {
"changed_files": changed_files,
"skill_files": skill_files,
"checked_skills": checked_skills,
"warnings": warnings,
"errors": errors,
"readme_credits": {
bucket: sorted(repos)
for bucket, repos in readme_credit_sets.items()
},
}
def check_readme_credits(base_dir: str | Path, base_ref: str, head_ref: str) -> dict[str, object]:
return collect_reports(base_dir, base_ref, head_ref)
def parse_args() -> argparse.Namespace:
parser = argparse.ArgumentParser(description="Validate README credits for changed skills.")
parser.add_argument("--base", default="origin/main", help="Base ref for git diff (default: origin/main)")
parser.add_argument("--head", default="HEAD", help="Head ref for git diff (default: HEAD)")
parser.add_argument("--json", action="store_true", help="Print the report as JSON.")
return parser.parse_args()
def main() -> int:
args = parse_args()
root = find_repo_root(__file__)
report = check_readme_credits(root, args.base, args.head)
if args.json:
print(json.dumps(report, indent=2))
else:
if report["skill_files"]:
print(f"[readme-credits] Changed skill files: {len(report['skill_files'])}")
else:
print("[readme-credits] No changed skill files detected.")
for warning in report["warnings"]:
print(f"⚠️ {warning}")
for error in report["errors"]:
print(f"{error}")
return 0 if not report["errors"] else 1
if __name__ == "__main__":
try:
sys.exit(main())
except RuntimeError as exc:
print(f"{exc}", file=sys.stderr)
sys.exit(1)

View File

@@ -0,0 +1,646 @@
#!/usr/bin/env node
const fs = require("fs");
const path = require("path");
const { spawnSync } = require("child_process");
const { findProjectRoot } = require("../lib/project-root");
const {
hasQualityChecklist,
normalizeRepoPath,
} = require("../lib/workflow-contract");
const REOPEN_COMMENT =
"Maintainer workflow refresh: closing and reopening to retrigger pull_request checks against the updated PR body.";
const DEFAULT_POLL_SECONDS = 20;
const BASE_BRANCH_MODIFIED_PATTERNS = [
/base branch was modified/i,
/base branch has been modified/i,
/branch was modified/i,
];
const REQUIRED_CHECKS = [
["pr-policy", ["pr-policy"]],
["source-validation", ["source-validation"]],
["artifact-preview", ["artifact-preview"]],
];
const SKILL_REVIEW_REQUIRED = ["review", "Skill Review & Optimize", "Skill Review & Optimize / review"];
function parseArgs(argv) {
const args = {
prs: null,
pollSeconds: DEFAULT_POLL_SECONDS,
dryRun: false,
};
for (let index = 0; index < argv.length; index += 1) {
const arg = argv[index];
if (arg === "--prs") {
args.prs = argv[index + 1] || null;
index += 1;
} else if (arg === "--poll-seconds") {
args.pollSeconds = Number(argv[index + 1]);
index += 1;
} else if (arg === "--dry-run") {
args.dryRun = true;
}
}
if (typeof args.pollSeconds !== "number" || Number.isNaN(args.pollSeconds) || args.pollSeconds <= 0) {
args.pollSeconds = DEFAULT_POLL_SECONDS;
}
return args;
}
function readJson(filePath) {
return JSON.parse(fs.readFileSync(filePath, "utf8"));
}
function readRepositorySlug(projectRoot) {
const packageJson = readJson(path.join(projectRoot, "package.json"));
const repository = packageJson.repository;
const rawUrl =
typeof repository === "string"
? repository
: repository && typeof repository.url === "string"
? repository.url
: null;
if (!rawUrl) {
throw new Error("package.json repository.url is required to resolve the GitHub slug.");
}
const match = rawUrl.match(/github\.com[:/](?<slug>[^/]+\/[^/]+?)(?:\.git)?$/i);
if (!match?.groups?.slug) {
throw new Error(`Could not derive a GitHub repo slug from repository url: ${rawUrl}`);
}
return match.groups.slug;
}
function runCommand(command, args, cwd, options = {}) {
const result = spawnSync(command, args, {
cwd,
encoding: "utf8",
input: options.input,
stdio: options.capture
? ["pipe", "pipe", "pipe"]
: options.input !== undefined
? ["pipe", "inherit", "inherit"]
: ["inherit", "inherit", "inherit"],
shell: process.platform === "win32",
});
if (result.error) {
throw result.error;
}
if (typeof result.status !== "number" || result.status !== 0) {
const stderr = options.capture ? result.stderr.trim() : "";
throw new Error(stderr || `${command} ${args.join(" ")} failed with status ${result.status}`);
}
return options.capture ? result.stdout.trim() : "";
}
function runGhJson(projectRoot, args, options = {}) {
const stdout = runCommand(
"gh",
[...args, "--json", options.jsonFields || ""].filter(Boolean),
projectRoot,
{ capture: true, input: options.input },
);
return JSON.parse(stdout || "null");
}
function runGhApiJson(projectRoot, args, options = {}) {
const ghArgs = ["api", ...args];
if (options.paginate) {
ghArgs.push("--paginate");
}
if (options.slurp) {
ghArgs.push("--slurp");
}
const stdout = runCommand("gh", ghArgs, projectRoot, { capture: true, input: options.input });
return JSON.parse(stdout || "null");
}
function flattenGhSlurpPayload(payload) {
if (!Array.isArray(payload)) {
return [];
}
const flattened = [];
for (const page of payload) {
if (Array.isArray(page)) {
flattened.push(...page);
} else if (page && typeof page === "object") {
flattened.push(page);
}
}
return flattened;
}
function ensureOnMainAndClean(projectRoot) {
const branch = runCommand("git", ["rev-parse", "--abbrev-ref", "HEAD"], projectRoot, {
capture: true,
});
if (branch !== "main") {
throw new Error(`merge-batch must run from main. Current branch: ${branch}`);
}
const status = runCommand(
"git",
["status", "--porcelain", "--untracked-files=no"],
projectRoot,
{ capture: true },
);
if (status) {
throw new Error("merge-batch requires a clean tracked working tree before starting.");
}
}
function parsePrList(prs) {
if (!prs) {
throw new Error("Usage: merge_batch.cjs --prs 450,449,446,451");
}
const parsed = prs
.split(/[\s,]+/)
.map((value) => Number.parseInt(value, 10))
.filter((value) => Number.isInteger(value) && value > 0);
if (!parsed.length) {
throw new Error("No valid PR numbers were provided.");
}
return [...new Set(parsed)];
}
function extractSummaryBlock(body) {
const text = String(body || "").replace(/\r\n/g, "\n").trim();
if (!text) {
return "";
}
const sectionMatch = text.match(/^\s*##\s+/m);
if (!sectionMatch) {
return text;
}
const prefix = text.slice(0, sectionMatch.index).trimEnd();
return prefix;
}
function extractTemplateSections(templateContent) {
const text = String(templateContent || "").replace(/\r\n/g, "\n").trim();
const sectionMatch = text.match(/^\s*##\s+/m);
if (!sectionMatch) {
return text;
}
return text.slice(sectionMatch.index).trim();
}
function normalizePrBody(body, templateContent) {
const summary = extractSummaryBlock(body);
const templateSections = extractTemplateSections(templateContent);
if (!summary) {
return templateSections;
}
return `${summary}\n\n${templateSections}`.trim();
}
function loadPullRequestTemplate(projectRoot) {
return fs.readFileSync(path.join(projectRoot, ".github", "PULL_REQUEST_TEMPLATE.md"), "utf8");
}
function loadPullRequestDetails(projectRoot, repoSlug, prNumber) {
const details = runGhJson(projectRoot, ["pr", "view", String(prNumber)], {
jsonFields: [
"body",
"mergeStateStatus",
"mergeable",
"number",
"title",
"headRefOid",
"url",
].join(","),
});
const filesPayload = runGhApiJson(projectRoot, [
`repos/${repoSlug}/pulls/${prNumber}/files?per_page=100`,
], {
paginate: true,
slurp: true,
});
const files = flattenGhSlurpPayload(filesPayload)
.map((entry) => normalizeRepoPath(entry?.filename))
.filter(Boolean);
return {
...details,
files,
hasSkillChanges: files.some((filePath) => filePath.endsWith("/SKILL.md") || filePath === "SKILL.md"),
};
}
function needsBodyRefresh(prDetails) {
return !hasQualityChecklist(prDetails.body);
}
function getRequiredCheckAliases(prDetails) {
const aliases = REQUIRED_CHECKS.map(([, value]) => value);
if (prDetails.hasSkillChanges) {
aliases.push(SKILL_REVIEW_REQUIRED);
}
return aliases;
}
function mergeableIsConflict(prDetails) {
const mergeable = String(prDetails.mergeable || "").toUpperCase();
const mergeState = String(prDetails.mergeStateStatus || "").toUpperCase();
return mergeable === "CONFLICTING" || mergeState === "DIRTY";
}
function selectLatestCheckRuns(checkRuns) {
const byName = new Map();
for (const run of checkRuns) {
const name = String(run?.name || "");
if (!name) {
continue;
}
const previous = byName.get(name);
if (!previous) {
byName.set(name, run);
continue;
}
const currentKey = run.completed_at || run.started_at || run.created_at || "";
const previousKey = previous.completed_at || previous.started_at || previous.created_at || "";
if (currentKey > previousKey || (currentKey === previousKey && Number(run.id || 0) > Number(previous.id || 0))) {
byName.set(name, run);
}
}
return byName;
}
function checkRunMatchesAliases(checkRun, aliases) {
const name = String(checkRun?.name || "");
return aliases.some((alias) => name === alias || name.endsWith(` / ${alias}`));
}
function summarizeRequiredCheckRuns(checkRuns, requiredAliases) {
const latestByName = selectLatestCheckRuns(checkRuns);
const summaries = [];
for (const aliases of requiredAliases) {
const latestRun = [...latestByName.values()].find((run) => checkRunMatchesAliases(run, aliases));
const label = aliases[0];
if (!latestRun) {
summaries.push({ label, state: "missing", conclusion: null, run: null });
continue;
}
const status = String(latestRun.status || "").toLowerCase();
const conclusion = String(latestRun.conclusion || "").toLowerCase();
if (status !== "completed") {
summaries.push({ label, state: "pending", conclusion, run: latestRun });
continue;
}
if (["success", "neutral", "skipped"].includes(conclusion)) {
summaries.push({ label, state: "success", conclusion, run: latestRun });
continue;
}
summaries.push({ label, state: "failed", conclusion, run: latestRun });
}
return summaries;
}
function formatCheckSummary(summaries) {
return summaries
.map((summary) => {
if (summary.state === "success") {
return `${summary.label}: ${summary.conclusion || "success"}`;
}
if (summary.state === "pending") {
return `${summary.label}: pending (${summary.conclusion || "in progress"})`;
}
if (summary.state === "failed") {
return `${summary.label}: failed (${summary.conclusion || "unknown"})`;
}
return `${summary.label}: missing`;
})
.join(", ");
}
function getHeadSha(projectRoot, repoSlug, prNumber) {
const details = runGhJson(projectRoot, ["pr", "view", String(prNumber)], {
jsonFields: "headRefOid",
});
return details.headRefOid;
}
function listActionRequiredRuns(projectRoot, repoSlug, headSha) {
const payload = runGhApiJson(projectRoot, [
`repos/${repoSlug}/actions/runs?head_sha=${headSha}&status=action_required&per_page=100`,
], {
paginate: true,
slurp: true,
});
const runs = flattenGhSlurpPayload(payload).filter((run) => Number.isInteger(Number(run?.id)));
const seen = new Set();
return runs.filter((run) => {
const id = Number(run.id);
if (seen.has(id)) {
return false;
}
seen.add(id);
return true;
});
}
function approveActionRequiredRuns(projectRoot, repoSlug, headSha) {
const runs = listActionRequiredRuns(projectRoot, repoSlug, headSha);
for (const run of runs) {
runCommand(
"gh",
["api", "-X", "POST", `repos/${repoSlug}/actions/runs/${run.id}/approve`],
projectRoot,
);
}
return runs;
}
function listCheckRuns(projectRoot, repoSlug, headSha) {
const payload = runGhApiJson(projectRoot, [
`repos/${repoSlug}/commits/${headSha}/check-runs?per_page=100`,
]);
return Array.isArray(payload?.check_runs) ? payload.check_runs : [];
}
async function waitForRequiredChecks(
projectRoot,
repoSlug,
headSha,
requiredAliases,
pollSeconds,
maxAttempts = 180,
) {
for (let attempt = 1; attempt <= maxAttempts; attempt += 1) {
const checkRuns = listCheckRuns(projectRoot, repoSlug, headSha);
const summaries = summarizeRequiredCheckRuns(checkRuns, requiredAliases);
const pending = summaries.filter((summary) => summary.state === "pending" || summary.state === "missing");
const failed = summaries.filter((summary) => summary.state === "failed");
console.log(`[merge-batch] Checks for ${headSha}: ${formatCheckSummary(summaries)}`);
if (failed.length) {
throw new Error(
`Required checks failed for ${headSha}: ${failed.map((item) => `${item.label} (${item.conclusion || "failed"})`).join(", ")}`,
);
}
if (!pending.length) {
return summaries;
}
await new Promise((resolve) => setTimeout(resolve, pollSeconds * 1000));
}
throw new Error(`Timed out waiting for required checks on ${headSha}.`);
}
function patchPrBody(projectRoot, repoSlug, prNumber, body) {
const payload = JSON.stringify({ body });
runCommand(
"gh",
["api", `repos/${repoSlug}/pulls/${prNumber}`, "-X", "PATCH", "--input", "-"],
projectRoot,
{ input: payload },
);
}
function closeAndReopenPr(projectRoot, prNumber) {
runCommand("gh", ["pr", "close", String(prNumber), "--comment", REOPEN_COMMENT], projectRoot);
runCommand("gh", ["pr", "reopen", String(prNumber)], projectRoot);
}
function isRetryableMergeError(error) {
const message = String(error?.message || error || "");
return BASE_BRANCH_MODIFIED_PATTERNS.some((pattern) => pattern.test(message));
}
function gitCheckoutMain(projectRoot) {
runCommand("git", ["checkout", "main"], projectRoot);
}
function gitPullMain(projectRoot) {
runCommand("git", ["pull", "--ff-only", "origin", "main"], projectRoot);
}
function syncContributors(projectRoot) {
runCommand("npm", ["run", "sync:contributors"], projectRoot);
}
function commitAndPushReadmeIfChanged(projectRoot) {
const status = runCommand("git", ["status", "--porcelain", "--untracked-files=no"], projectRoot, {
capture: true,
});
if (!status) {
return { changed: false };
}
const lines = status.split(/\r?\n/).filter(Boolean);
const unexpected = lines.filter((line) => !line.includes("README.md"));
if (unexpected.length) {
throw new Error(`merge-batch expected sync:contributors to touch README.md only. Unexpected drift: ${unexpected.join(", ")}`);
}
runCommand("git", ["add", "README.md"], projectRoot);
const staged = runCommand("git", ["diff", "--cached", "--name-only"], projectRoot, { capture: true });
if (!staged.includes("README.md")) {
return { changed: false };
}
runCommand("git", ["commit", "-m", "chore: sync contributor credits after merge batch"], projectRoot);
runCommand("git", ["push", "origin", "main"], projectRoot);
return { changed: true };
}
async function mergePullRequest(projectRoot, repoSlug, prNumber, options) {
const template = loadPullRequestTemplate(projectRoot);
let prDetails = loadPullRequestDetails(projectRoot, repoSlug, prNumber);
console.log(`[merge-batch] PR #${prNumber}: ${prDetails.title}`);
if (mergeableIsConflict(prDetails)) {
throw new Error(`PR #${prNumber} is in conflict state; resolve conflicts on the PR branch before merging.`);
}
let bodyRefreshed = false;
if (needsBodyRefresh(prDetails)) {
const normalizedBody = normalizePrBody(prDetails.body, template);
if (!options.dryRun) {
patchPrBody(projectRoot, repoSlug, prNumber, normalizedBody);
closeAndReopenPr(projectRoot, prNumber);
}
bodyRefreshed = true;
console.log(`[merge-batch] PR #${prNumber}: refreshed PR body and retriggered checks.`);
prDetails = loadPullRequestDetails(projectRoot, repoSlug, prNumber);
}
const headSha = getHeadSha(projectRoot, repoSlug, prNumber);
const approvedRuns = options.dryRun ? [] : approveActionRequiredRuns(projectRoot, repoSlug, headSha);
if (approvedRuns.length) {
console.log(
`[merge-batch] PR #${prNumber}: approved ${approvedRuns.length} fork run(s) waiting on action_required.`,
);
}
const requiredCheckAliases = getRequiredCheckAliases(prDetails);
if (!options.dryRun) {
await waitForRequiredChecks(projectRoot, repoSlug, headSha, requiredCheckAliases, options.pollSeconds);
}
if (options.dryRun) {
console.log(`[merge-batch] PR #${prNumber}: dry run complete, skipping merge and post-merge sync.`);
return {
prNumber,
bodyRefreshed,
merged: false,
approvedRuns: [],
followUp: { changed: false },
};
}
let merged = false;
for (let attempt = 1; attempt <= 3; attempt += 1) {
try {
if (!options.dryRun) {
runCommand("gh", ["pr", "merge", String(prNumber), "--squash"], projectRoot);
}
merged = true;
break;
} catch (error) {
if (!isRetryableMergeError(error) || attempt === 3) {
throw error;
}
console.log(`[merge-batch] PR #${prNumber}: base branch changed, refreshing main and retrying merge.`);
gitCheckoutMain(projectRoot);
gitPullMain(projectRoot);
prDetails = loadPullRequestDetails(projectRoot, repoSlug, prNumber);
const refreshedSha = prDetails.headRefOid || headSha;
if (!options.dryRun) {
await waitForRequiredChecks(projectRoot, repoSlug, refreshedSha, requiredCheckAliases, options.pollSeconds);
}
}
}
if (!merged) {
throw new Error(`Failed to merge PR #${prNumber}.`);
}
console.log(`[merge-batch] PR #${prNumber}: merged.`);
gitCheckoutMain(projectRoot);
gitPullMain(projectRoot);
syncContributors(projectRoot);
const followUp = commitAndPushReadmeIfChanged(projectRoot);
if (followUp.changed) {
console.log(`[merge-batch] PR #${prNumber}: README follow-up committed and pushed.`);
}
return {
prNumber,
bodyRefreshed,
merged,
approvedRuns: approvedRuns.map((run) => run.id),
followUp,
};
}
async function runBatch(projectRoot, prNumbers, options = {}) {
const repoSlug = readRepositorySlug(projectRoot);
const results = [];
ensureOnMainAndClean(projectRoot);
for (const prNumber of prNumbers) {
const result = await mergePullRequest(projectRoot, repoSlug, prNumber, options);
results.push(result);
}
return results;
}
async function main() {
const args = parseArgs(process.argv.slice(2));
const projectRoot = findProjectRoot(__dirname);
const prNumbers = parsePrList(args.prs);
if (args.dryRun) {
console.log(`[merge-batch] Dry run for PRs: ${prNumbers.join(", ")}`);
}
const results = await runBatch(projectRoot, prNumbers, {
dryRun: args.dryRun,
pollSeconds: args.pollSeconds,
});
console.log(
`[merge-batch] Completed ${results.length} PR(s): ${results.map((result) => `#${result.prNumber}`).join(", ")}`,
);
}
if (require.main === module) {
main().catch((error) => {
console.error(`[merge-batch] ${error.message}`);
process.exit(1);
});
}
module.exports = {
approveActionRequiredRuns,
baseBranchModifiedPatterns: BASE_BRANCH_MODIFIED_PATTERNS,
checkRunMatchesAliases,
closeAndReopenPr,
commitAndPushReadmeIfChanged,
ensureOnMainAndClean,
extractSummaryBlock,
extractTemplateSections,
formatCheckSummary,
getRequiredCheckAliases,
gitCheckoutMain,
gitPullMain,
isRetryableMergeError,
listActionRequiredRuns,
listCheckRuns,
loadPullRequestDetails,
loadPullRequestTemplate,
mergePullRequest,
mergeableIsConflict,
normalizePrBody,
parseArgs,
parsePrList,
readRepositorySlug,
runBatch,
selectLatestCheckRuns,
summarizeRequiredCheckRuns,
waitForRequiredChecks,
};

View File

@@ -31,6 +31,22 @@ def parse_existing_contributor_links(content: str) -> dict[str, str]:
return links
def parse_existing_contributor_order(content: str) -> list[str]:
order: list[str] = []
seen: set[str] = set()
pattern = re.compile(r"^- \[@(?P<label>.+?)\]\((?P<url>https://github\.com/.+?)\)$")
for line in content.splitlines():
match = pattern.match(line.strip())
if not match:
continue
label = match.group("label")
if label in seen:
continue
seen.add(label)
order.append(label)
return order
def parse_contributors_response(payload: list[dict]) -> list[str]:
contributors: list[str] = []
seen: set[str] = set()
@@ -43,6 +59,16 @@ def parse_contributors_response(payload: list[dict]) -> list[str]:
return contributors
def order_contributors_for_render(contributors: list[str], existing_order: list[str]) -> list[str]:
contributor_set = set(contributors)
ordered_existing = [login for login in existing_order if login in contributor_set]
new_contributors = sorted(
(login for login in contributors if login not in existing_order),
key=lambda login: login.casefold(),
)
return ordered_existing + new_contributors
def infer_contributor_url(login: str, existing_links: dict[str, str]) -> str:
if login in existing_links:
return existing_links[login]
@@ -64,7 +90,11 @@ def render_contributor_lines(contributors: list[str], existing_links: dict[str,
def update_repo_contributors_section(content: str, contributors: list[str]) -> str:
existing_links = parse_existing_contributor_links(content)
rendered_list = render_contributor_lines(contributors, existing_links)
ordered_contributors = order_contributors_for_render(
contributors,
parse_existing_contributor_order(content),
)
rendered_list = render_contributor_lines(ordered_contributors, existing_links)
if CONTRIBUTOR_SECTION_START not in content or "\n## " not in content:
raise ValueError("README.md does not contain the expected Repo Contributors section structure.")

View File

@@ -23,6 +23,14 @@ assert.ok(
packageJson.scripts["check:warning-budget"],
"package.json should expose a warning-budget guardrail command",
);
assert.ok(
packageJson.scripts["check:readme-credits"],
"package.json should expose a README credit validation command",
);
assert.ok(
packageJson.scripts["merge:batch"],
"package.json should expose a maintainer merge-batch command",
);
assert.ok(
packageJson.scripts["audit:maintainer"],
"package.json should expose a maintainer audit command",
@@ -125,6 +133,11 @@ assert.match(
/- name: Audit npm dependencies[\s\S]*?run: npm audit --audit-level=high/,
"CI should run npm audit at high severity",
);
assert.match(
ciWorkflow,
/- name: Verify README source credits for changed skills[\s\S]*?run: npm run check:readme-credits -- --base "origin\/\$\{\{ github\.base_ref \}\}" --head HEAD/,
"PR CI should verify README source credits for changed skills",
);
assert.match(
ciWorkflow,
/main-validation-and-sync:[\s\S]*?- name: Audit npm dependencies[\s\S]*?run: npm audit --audit-level=high/,

View File

@@ -0,0 +1,74 @@
const assert = require("assert");
const path = require("path");
const mergeBatch = require(path.join(__dirname, "..", "merge_batch.cjs"));
function makeCheckRun(name, status, conclusion, startedAt, id) {
return {
name,
status,
conclusion,
started_at: startedAt,
completed_at: startedAt,
created_at: startedAt,
id,
};
}
{
const parsed = mergeBatch.parsePrList("450, 449 446");
assert.deepStrictEqual(parsed, [450, 449, 446]);
}
{
const summary = mergeBatch.extractSummaryBlock(`Summary line 1\nSummary line 2\n\n## Change Classification\n- [ ] Skill PR`);
assert.strictEqual(summary, "Summary line 1\nSummary line 2");
}
{
const template = `# Pull Request Description\n\nIntro\n\n## Change Classification\n- [ ] Skill PR\n\n## Quality Bar Checklist ✅\n- [ ] Standards`;
const body = mergeBatch.normalizePrBody(
`Short summary\n\n## Change Classification\n- [ ] Old item`,
template,
);
assert.ok(body.startsWith("Short summary"));
assert.ok(body.includes("## Change Classification"));
assert.ok(body.includes("## Quality Bar Checklist ✅"));
assert.ok(!body.includes("Old item"));
}
{
const aliases = mergeBatch.getRequiredCheckAliases({ hasSkillChanges: true });
assert.ok(aliases.some((entry) => Array.isArray(entry) && entry.includes("review")));
assert.ok(aliases.some((entry) => Array.isArray(entry) && entry.includes("pr-policy")));
}
{
const runs = [
makeCheckRun("pr-policy", "completed", "failure", "2026-04-01T10:00:00Z", 1),
makeCheckRun("pr-policy", "completed", "success", "2026-04-01T10:10:00Z", 2),
makeCheckRun("source-validation", "in_progress", null, "2026-04-01T10:11:00Z", 3),
makeCheckRun("review", "completed", "success", "2026-04-01T10:12:00Z", 4),
];
const summaries = mergeBatch.summarizeRequiredCheckRuns(runs, [
["pr-policy"],
["source-validation"],
["review", "Skill Review & Optimize"],
]);
assert.deepStrictEqual(
summaries.map((entry) => entry.state),
["success", "pending", "success"],
);
const latest = mergeBatch.selectLatestCheckRuns(runs);
assert.strictEqual(latest.get("pr-policy").conclusion, "success");
}
{
assert.strictEqual(mergeBatch.isRetryableMergeError(new Error("Base branch was modified")), true);
assert.strictEqual(mergeBatch.isRetryableMergeError(new Error("Something else")), false);
}
console.log("ok");

View File

@@ -22,10 +22,12 @@ const LOCAL_TEST_COMMANDS = [
[path.join(TOOL_TESTS, "installer_filters.test.js")],
[path.join(TOOL_TESTS, "installer_update_sync.test.js")],
[path.join(TOOL_TESTS, "jetski_gemini_loader.test.cjs")],
[path.join(TOOL_TESTS, "merge_batch.test.js")],
[path.join(TOOL_TESTS, "npm_package_contents.test.js")],
[path.join(TOOL_TESTS, "setup_web_sync.test.js")],
[path.join(TOOL_TESTS, "skill_filter.test.js")],
[path.join(TOOL_TESTS, "validate_skills_headings.test.js")],
[path.join(TOOL_TESTS, "validate_skills_metadata.test.js")],
[path.join(TOOL_TESTS, "workflow_contracts.test.js")],
[path.join(TOOL_TESTS, "docs_security_content.test.js")],
[path.join(TOOL_SCRIPTS, "run-python.js"), path.join(TOOL_TESTS, "test_bundle_activation_security.py")],
@@ -36,11 +38,13 @@ const LOCAL_TEST_COMMANDS = [
[path.join(TOOL_SCRIPTS, "run-python.js"), path.join(TOOL_TESTS, "test_fix_missing_skill_sections.py")],
[path.join(TOOL_SCRIPTS, "run-python.js"), path.join(TOOL_TESTS, "test_fix_truncated_descriptions.py")],
[path.join(TOOL_SCRIPTS, "run-python.js"), path.join(TOOL_TESTS, "test_generate_index_categories.py")],
[path.join(TOOL_SCRIPTS, "run-python.js"), path.join(TOOL_TESTS, "test_repair_description_usage_summaries.py")],
[path.join(TOOL_SCRIPTS, "run-python.js"), path.join(TOOL_TESTS, "test_sync_microsoft_skills_security.py")],
[path.join(TOOL_SCRIPTS, "run-python.js"), path.join(TOOL_TESTS, "test_repair_description_usage_summaries.py")],
[path.join(TOOL_SCRIPTS, "run-python.js"), path.join(TOOL_TESTS, "test_readme_credits.py")],
[path.join(TOOL_SCRIPTS, "run-python.js"), path.join(TOOL_TESTS, "test_sync_microsoft_skills_security.py")],
[path.join(TOOL_SCRIPTS, "run-python.js"), path.join(TOOL_TESTS, "test_sync_repo_metadata.py")],
[path.join(TOOL_SCRIPTS, "run-python.js"), path.join(TOOL_TESTS, "test_sync_contributors.py")],
[path.join(TOOL_SCRIPTS, "run-python.js"), path.join(TOOL_TESTS, "test_sync_contributors.py")],
[path.join(TOOL_SCRIPTS, "run-python.js"), path.join(TOOL_TESTS, "test_sync_risk_labels.py")],
[path.join(TOOL_SCRIPTS, "run-python.js"), path.join(TOOL_TESTS, "test_skill_source_metadata.py")],
[path.join(TOOL_SCRIPTS, "run-python.js"), path.join(TOOL_TESTS, "test_validation_warning_budget.py")],
[path.join(TOOL_SCRIPTS, "run-python.js"), path.join(TOOL_TESTS, "test_whatsapp_config_logging_security.py")],
[path.join(TOOL_SCRIPTS, "run-python.js"), path.join(TOOL_TESTS, "test_maintainer_audit.py")],

View File

@@ -0,0 +1,383 @@
import importlib.util
import os
import subprocess
import sys
import tempfile
import unittest
from pathlib import Path
REPO_ROOT = Path(__file__).resolve().parents[3]
TOOLS_SCRIPTS_DIR = REPO_ROOT / "tools" / "scripts"
if str(TOOLS_SCRIPTS_DIR) not in sys.path:
sys.path.insert(0, str(TOOLS_SCRIPTS_DIR))
TEMP_DIRS = []
def load_module(relative_path: str, module_name: str):
module_path = REPO_ROOT / relative_path
spec = importlib.util.spec_from_file_location(module_name, module_path)
module = importlib.util.module_from_spec(spec)
assert spec.loader is not None
sys.modules[module_name] = module
spec.loader.exec_module(module)
return module
check_readme_credits = load_module(
"tools/scripts/check_readme_credits.py",
"check_readme_credits_test",
)
def git(root: Path, *args: str) -> None:
subprocess.run(["git", *args], cwd=root, check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
def write_skill(root: Path, slug: str, frontmatter: str, body: str = "# Skill\n") -> Path:
skill_dir = root / "skills" / slug
skill_dir.mkdir(parents=True, exist_ok=True)
skill_path = skill_dir / "SKILL.md"
skill_path.write_text(f"---\n{frontmatter}\n---\n\n{body}", encoding="utf-8")
return skill_path
def init_repo(readme: str, skill_files: dict[str, str]) -> Path:
tmp = tempfile.TemporaryDirectory()
root = Path(tmp.name)
TEMP_DIRS.append(tmp)
git(root, "init", "-b", "main")
git(root, "config", "user.email", "tests@example.com")
git(root, "config", "user.name", "Tests")
(root / "README.md").write_text(readme, encoding="utf-8")
for slug, frontmatter in skill_files.items():
write_skill(root, slug, frontmatter)
git(root, "add", ".")
git(root, "commit", "-m", "base")
return root
class ReadmeCreditsTests(unittest.TestCase):
def test_no_skill_changes_is_noop(self):
root = init_repo(
"""# Repo
## Credits & Sources
### Official Sources
- [owner/tool](https://github.com/owner/tool)
### Community Contributors
- [other/tool](https://github.com/other/tool)
""",
{
"example": """name: example
description: Example
source: self
""",
},
)
base = subprocess.run(
["git", "rev-parse", "HEAD"],
cwd=root,
check=True,
capture_output=True,
text=True,
).stdout.strip()
report = check_readme_credits.check_readme_credits(root, base, "HEAD")
self.assertEqual(report["skill_files"], [])
self.assertEqual(report["warnings"], [])
self.assertEqual(report["errors"], [])
def test_external_source_without_source_repo_warns_only(self):
root = init_repo(
"""# Repo
## Credits & Sources
### Official Sources
- [owner/tool](https://github.com/owner/tool)
### Community Contributors
- [other/tool](https://github.com/other/tool)
""",
{
"example": """name: example
description: Example
source: self
""",
},
)
git(root, "checkout", "-b", "feature")
write_skill(
root,
"example",
"""name: example
description: Example
source: community
""",
)
git(root, "add", "skills/example/SKILL.md")
git(root, "commit", "-m", "update skill")
base = subprocess.run(
["git", "rev-parse", "main"],
cwd=root,
check=True,
capture_output=True,
text=True,
).stdout.strip()
report = check_readme_credits.check_readme_credits(root, base, "HEAD")
self.assertEqual(report["errors"], [])
self.assertTrue(any("without source_repo" in warning for warning in report["warnings"]))
def test_source_repo_must_exist_in_community_bucket_when_defaulted(self):
root = init_repo(
"""# Repo
## Credits & Sources
### Official Sources
- [owner/tool](https://github.com/owner/tool)
### Community Contributors
- [other/tool](https://github.com/other/tool)
""",
{
"example": """name: example
description: Example
source: self
""",
},
)
git(root, "checkout", "-b", "feature")
write_skill(
root,
"example",
"""name: example
description: Example
source: community
source_repo: other/tool
""",
)
git(root, "add", "skills/example/SKILL.md")
git(root, "commit", "-m", "update skill")
base = subprocess.run(
["git", "rev-parse", "main"],
cwd=root,
check=True,
capture_output=True,
text=True,
).stdout.strip()
report = check_readme_credits.check_readme_credits(root, base, "HEAD")
self.assertEqual(report["warnings"], [])
self.assertEqual(report["errors"], [])
def test_source_repo_passes_in_official_bucket(self):
root = init_repo(
"""# Repo
## Credits & Sources
### Official Sources
- [owner/tool](https://github.com/owner/tool)
### Community Contributors
- [other/tool](https://github.com/other/tool)
""",
{
"example": """name: example
description: Example
source: self
""",
},
)
git(root, "checkout", "-b", "feature")
write_skill(
root,
"example",
"""name: example
description: Example
source: community
source_type: official
source_repo: owner/tool
""",
)
git(root, "add", "skills/example/SKILL.md")
git(root, "commit", "-m", "update skill")
base = subprocess.run(
["git", "rev-parse", "main"],
cwd=root,
check=True,
capture_output=True,
text=True,
).stdout.strip()
report = check_readme_credits.check_readme_credits(root, base, "HEAD")
self.assertEqual(report["warnings"], [])
self.assertEqual(report["errors"], [])
def test_source_repo_missing_from_required_bucket_fails(self):
root = init_repo(
"""# Repo
## Credits & Sources
### Official Sources
- [owner/tool](https://github.com/owner/tool)
### Community Contributors
- [other/tool](https://github.com/other/tool)
""",
{
"example": """name: example
description: Example
source: self
""",
},
)
git(root, "checkout", "-b", "feature")
write_skill(
root,
"example",
"""name: example
description: Example
source: community
source_repo: owner/tool
""",
)
git(root, "add", "skills/example/SKILL.md")
git(root, "commit", "-m", "update skill")
base = subprocess.run(
["git", "rev-parse", "main"],
cwd=root,
check=True,
capture_output=True,
text=True,
).stdout.strip()
report = check_readme_credits.check_readme_credits(root, base, "HEAD")
self.assertEqual(report["warnings"], [])
self.assertTrue(any("missing from ### Community Contributors" in error for error in report["errors"]))
def test_self_source_skips_readme_lookup(self):
root = init_repo(
"""# Repo
## Credits & Sources
### Official Sources
- [owner/tool](https://github.com/owner/tool)
### Community Contributors
- [other/tool](https://github.com/other/tool)
""",
{
"example": """name: example
description: Example
source: community
source_repo: other/tool
""",
},
)
git(root, "checkout", "-b", "feature")
write_skill(
root,
"example",
"""name: example
description: Example
source: self
source_type: self
""",
)
git(root, "add", "skills/example/SKILL.md")
git(root, "commit", "-m", "update skill")
base = subprocess.run(
["git", "rev-parse", "main"],
cwd=root,
check=True,
capture_output=True,
text=True,
).stdout.strip()
report = check_readme_credits.check_readme_credits(root, base, "HEAD")
self.assertEqual(report["warnings"], [])
self.assertEqual(report["errors"], [])
def test_invalid_source_type_is_rejected(self):
root = init_repo(
"""# Repo
## Credits & Sources
### Official Sources
- [owner/tool](https://github.com/owner/tool)
### Community Contributors
- [other/tool](https://github.com/other/tool)
""",
{
"example": """name: example
description: Example
source: self
""",
},
)
git(root, "checkout", "-b", "feature")
write_skill(
root,
"example",
"""name: example
description: Example
source: community
source_type: moon
source_repo: other/tool
""",
)
git(root, "add", "skills/example/SKILL.md")
git(root, "commit", "-m", "update skill")
base = subprocess.run(
["git", "rev-parse", "main"],
cwd=root,
check=True,
capture_output=True,
text=True,
).stdout.strip()
report = check_readme_credits.check_readme_credits(root, base, "HEAD")
self.assertTrue(any("invalid source_type" in error for error in report["errors"]))
if __name__ == "__main__":
unittest.main()

View File

@@ -0,0 +1,102 @@
import importlib.util
import sys
import tempfile
import unittest
from pathlib import Path
REPO_ROOT = Path(__file__).resolve().parents[3]
TOOLS_SCRIPTS_DIR = REPO_ROOT / "tools" / "scripts"
if str(TOOLS_SCRIPTS_DIR) not in sys.path:
sys.path.insert(0, str(TOOLS_SCRIPTS_DIR))
def load_module(relative_path: str, module_name: str):
module_path = REPO_ROOT / relative_path
spec = importlib.util.spec_from_file_location(module_name, module_path)
module = importlib.util.module_from_spec(spec)
assert spec.loader is not None
spec.loader.exec_module(module)
return module
validate_skills = load_module("tools/scripts/validate_skills.py", "validate_skills_source_metadata")
class SkillSourceMetadataTests(unittest.TestCase):
def _write_skill(self, skills_dir: Path, name: str, frontmatter_lines: list[str]) -> None:
skill_dir = skills_dir / name
skill_dir.mkdir(parents=True)
content = "\n".join(
[
"---",
*frontmatter_lines,
"---",
"",
"# Demo",
"",
"## When to Use",
"- Test scenario",
]
)
(skill_dir / "SKILL.md").write_text(content, encoding="utf-8")
def test_valid_source_repo_and_source_type_pass(self):
with tempfile.TemporaryDirectory() as temp_dir:
skills_dir = Path(temp_dir) / "skills"
self._write_skill(
skills_dir,
"demo",
[
"name: demo",
"description: ok",
"risk: safe",
"source: community",
"source_repo: openai/skills",
"source_type: official",
],
)
results = validate_skills.collect_validation_results(str(skills_dir))
self.assertEqual(results["errors"], [])
def test_invalid_source_repo_fails_validation(self):
with tempfile.TemporaryDirectory() as temp_dir:
skills_dir = Path(temp_dir) / "skills"
self._write_skill(
skills_dir,
"demo",
[
"name: demo",
"description: ok",
"risk: safe",
"source: community",
"source_repo: not-a-repo",
],
)
results = validate_skills.collect_validation_results(str(skills_dir))
self.assertTrue(any("Invalid 'source_repo' format" in error for error in results["errors"]))
def test_invalid_source_type_is_rejected(self):
with tempfile.TemporaryDirectory() as temp_dir:
skills_dir = Path(temp_dir) / "skills"
self._write_skill(
skills_dir,
"demo",
[
"name: demo",
"description: ok",
"risk: safe",
"source: community",
"source_repo: openai/skills",
"source_type: partner",
],
)
results = validate_skills.collect_validation_results(str(skills_dir))
self.assertTrue(any("Invalid 'source_type' value" in error for error in results["errors"]))
if __name__ == "__main__":
unittest.main()

View File

@@ -69,6 +69,55 @@ We officially thank the following contributors for their help in making this rep
self.assertEqual(updated.count("## Repo Contributors"), 1)
self.assertEqual(updated.count("## License"), 1)
def test_order_contributors_for_render_preserves_existing_order_and_appends_new(self):
ordered = sync_contributors.order_contributors_for_render(
["new-z", "bob", "alice", "new-a", "github-actions[bot]"],
["alice", "github-actions[bot]", "bob", "removed-user"],
)
self.assertEqual(
ordered,
["alice", "github-actions[bot]", "bob", "new-a", "new-z"],
)
def test_update_repo_contributors_section_avoids_reordering_existing_entries(self):
content = """## Repo Contributors
<a href="https://github.com/sickn33/antigravity-awesome-skills/graphs/contributors">
<img src="https://contrib.rocks/image?repo=sickn33/antigravity-awesome-skills" alt="Repository contributors" />
</a>
Made with [contrib.rocks](https://contrib.rocks).
We officially thank the following contributors for their help in making this repository awesome!
- [@alice](https://github.com/alice)
- [@github-actions[bot]](https://github.com/apps/github-actions)
- [@bob](https://github.com/bob)
## License
"""
updated = sync_contributors.update_repo_contributors_section(
content,
["bob", "new-user", "alice", "github-actions[bot]"],
)
contributor_block = updated.split(
"We officially thank the following contributors for their help in making this repository awesome!\n\n",
1,
)[1].split("\n## License", 1)[0]
self.assertEqual(
contributor_block.strip().splitlines(),
[
"- [@alice](https://github.com/alice)",
"- [@github-actions[bot]](https://github.com/apps/github-actions)",
"- [@bob](https://github.com/bob)",
"- [@new-user](https://github.com/new-user)",
],
)
def test_parse_contributors_response_dedupes_and_sorts_order(self):
payload = [
{"login": "alice"},

View File

@@ -0,0 +1,40 @@
const assert = require("assert");
const {
ALLOWED_FIELDS,
SOURCE_REPO_PATTERN,
VALID_SOURCE_TYPES,
validateSourceMetadata,
} = require("../validate-skills.js");
assert.ok(ALLOWED_FIELDS.has("source_repo"), "source_repo should be an allowed frontmatter field");
assert.ok(ALLOWED_FIELDS.has("source_type"), "source_type should be an allowed frontmatter field");
assert.match("openai/skills", SOURCE_REPO_PATTERN, "OWNER/REPO should be accepted");
assert.doesNotMatch("not-a-repo", SOURCE_REPO_PATTERN, "source_repo must require OWNER/REPO");
assert.ok(VALID_SOURCE_TYPES.has("official"));
assert.ok(VALID_SOURCE_TYPES.has("community"));
assert.ok(VALID_SOURCE_TYPES.has("self"));
assert.deepStrictEqual(
validateSourceMetadata({ source_repo: "openai/skills", source_type: "official" }, "demo-skill"),
[],
"valid source metadata should pass",
);
assert.ok(
validateSourceMetadata({ source_repo: "invalid", source_type: "official" }, "demo-skill").some((error) =>
error.includes("source_repo must match OWNER/REPO"),
),
"invalid source_repo should fail",
);
assert.ok(
validateSourceMetadata({ source_repo: "openai/skills", source_type: "partner" }, "demo-skill").some((error) =>
error.includes("source_type must be one of"),
),
"invalid source_type should fail",
);
console.log("ok");

View File

@@ -32,11 +32,15 @@ const MAX_NAME_LENGTH = 64;
const MAX_DESCRIPTION_LENGTH = 1024;
const MAX_COMPATIBILITY_LENGTH = 500;
const MAX_SKILL_LINES = 500;
const SOURCE_REPO_PATTERN = /^[A-Za-z0-9_.-]+\/[A-Za-z0-9_.-]+$/;
const VALID_SOURCE_TYPES = new Set(["official", "community", "self"]);
const ALLOWED_FIELDS = new Set([
"name",
"description",
"risk",
"source",
"source_repo",
"source_type",
"license",
"compatibility",
"metadata",
@@ -133,6 +137,36 @@ function addStrictSectionErrors(label, missing, baselineSet) {
}
}
function validateSourceMetadata(data, skillId) {
const sourceErrors = [];
if (data.source_repo !== undefined) {
const sourceRepoError = validateStringField("source_repo", data.source_repo, {
min: 3,
max: 256,
});
if (sourceRepoError) {
sourceErrors.push(`${sourceRepoError} (${skillId})`);
} else if (!SOURCE_REPO_PATTERN.test(String(data.source_repo).trim())) {
sourceErrors.push(`source_repo must match OWNER/REPO. (${skillId})`);
}
}
if (data.source_type !== undefined) {
const sourceTypeError = validateStringField("source_type", data.source_type, {
min: 4,
max: 16,
});
if (sourceTypeError) {
sourceErrors.push(`${sourceTypeError} (${skillId})`);
} else if (!VALID_SOURCE_TYPES.has(String(data.source_type).trim())) {
sourceErrors.push(`source_type must be one of official, community, self. (${skillId})`);
}
}
return sourceErrors;
}
function run() {
const skillIds = listSkillIds(SKILLS_DIR);
const baseline = loadBaseline();
@@ -221,6 +255,8 @@ function run() {
}
}
validateSourceMetadata(data, skillId).forEach(addError);
if (data["allowed-tools"] !== undefined) {
if (typeof data["allowed-tools"] !== "string") {
addError(
@@ -354,6 +390,10 @@ if (require.main === module) {
}
module.exports = {
ALLOWED_FIELDS,
SOURCE_REPO_PATTERN,
VALID_SOURCE_TYPES,
hasUseSection,
run,
validateSourceMetadata,
};

View File

@@ -35,6 +35,8 @@ WHEN_TO_USE_PATTERNS = [
re.compile(r"^##\s+Use\s+this\s+skill\s+when", re.MULTILINE | re.IGNORECASE),
re.compile(r"^##\s+When\s+to\s+Use\s+This\s+Skill", re.MULTILINE | re.IGNORECASE),
]
SOURCE_REPO_PATTERN = re.compile(r"^[A-Za-z0-9_.-]+/[A-Za-z0-9_.-]+$")
VALID_SOURCE_TYPES = {"official", "community", "self"}
def has_when_to_use_section(content):
return any(pattern.search(content) for pattern in WHEN_TO_USE_PATTERNS)
@@ -147,6 +149,20 @@ def collect_validation_results(skills_dir, strict_mode=False):
if strict_mode: errors.append(msg.replace("⚠️", ""))
else: warnings.append(msg)
source_repo = metadata.get("source_repo")
if source_repo is not None:
if not isinstance(source_repo, str) or not SOURCE_REPO_PATTERN.fullmatch(source_repo.strip()):
errors.append(
f"{rel_path}: Invalid 'source_repo' format. Must be OWNER/REPO, got '{source_repo}'"
)
source_type = metadata.get("source_type")
if source_type is not None:
if not isinstance(source_type, str) or source_type not in VALID_SOURCE_TYPES:
errors.append(
f"{rel_path}: Invalid 'source_type' value. Must be one of {sorted(VALID_SOURCE_TYPES)}"
)
# Date Added Validation (optional field)
if "date_added" in metadata:
if not date_pattern.match(metadata["date_added"]):