fix(ci): Track canonical plugin drift
Treat generated plugin mirrors and marketplace outputs as managed canonical artifacts so the main-branch sync bot can stage and commit them instead of failing on unmanaged drift. Ignore web-app coverage output during maintainer runs and update the mirrored Office unpack scripts so plugin copies stay aligned with the hardened source implementations.
This commit is contained in:
1
apps/web-app/.gitignore
vendored
1
apps/web-app/.gitignore
vendored
@@ -10,6 +10,7 @@ lerna-debug.log*
|
||||
node_modules
|
||||
dist
|
||||
dist-ssr
|
||||
coverage
|
||||
*.local
|
||||
|
||||
# Editor directories and files
|
||||
|
||||
@@ -2,22 +2,47 @@
|
||||
"""Unpack and format XML contents of Office files (.docx, .pptx, .xlsx)"""
|
||||
|
||||
import random
|
||||
import shutil
|
||||
import stat
|
||||
import sys
|
||||
import zipfile
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def _is_zip_symlink(member: zipfile.ZipInfo) -> bool:
|
||||
return stat.S_ISLNK(member.external_attr >> 16)
|
||||
|
||||
|
||||
def _is_safe_destination(output_root: Path, member_name: str) -> bool:
|
||||
destination = output_root / member_name
|
||||
return destination.resolve().is_relative_to(output_root.resolve())
|
||||
|
||||
|
||||
def _extract_member(archive: zipfile.ZipFile, member: zipfile.ZipInfo, output_root: Path):
|
||||
destination = output_root / member.filename
|
||||
if member.is_dir():
|
||||
destination.mkdir(parents=True, exist_ok=True)
|
||||
return
|
||||
|
||||
destination.parent.mkdir(parents=True, exist_ok=True)
|
||||
with archive.open(member, "r") as source, open(destination, "wb") as target:
|
||||
shutil.copyfileobj(source, target)
|
||||
|
||||
|
||||
def extract_archive_safely(input_file: str | Path, output_dir: str | Path):
|
||||
output_path = Path(output_dir)
|
||||
output_path.mkdir(parents=True, exist_ok=True)
|
||||
output_root = output_path.resolve()
|
||||
|
||||
with zipfile.ZipFile(input_file) as archive:
|
||||
for member in archive.infolist():
|
||||
destination = output_path / member.filename
|
||||
if not destination.resolve().is_relative_to(output_path.resolve()):
|
||||
if _is_zip_symlink(member):
|
||||
raise ValueError(f"Unsafe archive entry: {member.filename}")
|
||||
if not _is_safe_destination(output_root, member.filename):
|
||||
raise ValueError(f"Unsafe archive entry: {member.filename}")
|
||||
|
||||
archive.extractall(output_path)
|
||||
for member in archive.infolist():
|
||||
_extract_member(archive, member, output_path)
|
||||
|
||||
|
||||
def pretty_print_xml(output_path: Path):
|
||||
|
||||
@@ -2,22 +2,47 @@
|
||||
"""Unpack and format XML contents of Office files (.docx, .pptx, .xlsx)"""
|
||||
|
||||
import random
|
||||
import shutil
|
||||
import stat
|
||||
import sys
|
||||
import zipfile
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def _is_zip_symlink(member: zipfile.ZipInfo) -> bool:
|
||||
return stat.S_ISLNK(member.external_attr >> 16)
|
||||
|
||||
|
||||
def _is_safe_destination(output_root: Path, member_name: str) -> bool:
|
||||
destination = output_root / member_name
|
||||
return destination.resolve().is_relative_to(output_root.resolve())
|
||||
|
||||
|
||||
def _extract_member(archive: zipfile.ZipFile, member: zipfile.ZipInfo, output_root: Path):
|
||||
destination = output_root / member.filename
|
||||
if member.is_dir():
|
||||
destination.mkdir(parents=True, exist_ok=True)
|
||||
return
|
||||
|
||||
destination.parent.mkdir(parents=True, exist_ok=True)
|
||||
with archive.open(member, "r") as source, open(destination, "wb") as target:
|
||||
shutil.copyfileobj(source, target)
|
||||
|
||||
|
||||
def extract_archive_safely(input_file: str | Path, output_dir: str | Path):
|
||||
output_path = Path(output_dir)
|
||||
output_path.mkdir(parents=True, exist_ok=True)
|
||||
output_root = output_path.resolve()
|
||||
|
||||
with zipfile.ZipFile(input_file) as archive:
|
||||
for member in archive.infolist():
|
||||
destination = output_path / member.filename
|
||||
if not destination.resolve().is_relative_to(output_path.resolve()):
|
||||
if _is_zip_symlink(member):
|
||||
raise ValueError(f"Unsafe archive entry: {member.filename}")
|
||||
if not _is_safe_destination(output_root, member.filename):
|
||||
raise ValueError(f"Unsafe archive entry: {member.filename}")
|
||||
|
||||
archive.extractall(output_path)
|
||||
for member in archive.infolist():
|
||||
_extract_member(archive, member, output_path)
|
||||
|
||||
|
||||
def pretty_print_xml(output_path: Path):
|
||||
|
||||
@@ -2,22 +2,47 @@
|
||||
"""Unpack and format XML contents of Office files (.docx, .pptx, .xlsx)"""
|
||||
|
||||
import random
|
||||
import shutil
|
||||
import stat
|
||||
import sys
|
||||
import zipfile
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def _is_zip_symlink(member: zipfile.ZipInfo) -> bool:
|
||||
return stat.S_ISLNK(member.external_attr >> 16)
|
||||
|
||||
|
||||
def _is_safe_destination(output_root: Path, member_name: str) -> bool:
|
||||
destination = output_root / member_name
|
||||
return destination.resolve().is_relative_to(output_root.resolve())
|
||||
|
||||
|
||||
def _extract_member(archive: zipfile.ZipFile, member: zipfile.ZipInfo, output_root: Path):
|
||||
destination = output_root / member.filename
|
||||
if member.is_dir():
|
||||
destination.mkdir(parents=True, exist_ok=True)
|
||||
return
|
||||
|
||||
destination.parent.mkdir(parents=True, exist_ok=True)
|
||||
with archive.open(member, "r") as source, open(destination, "wb") as target:
|
||||
shutil.copyfileobj(source, target)
|
||||
|
||||
|
||||
def extract_archive_safely(input_file: str | Path, output_dir: str | Path):
|
||||
output_path = Path(output_dir)
|
||||
output_path.mkdir(parents=True, exist_ok=True)
|
||||
output_root = output_path.resolve()
|
||||
|
||||
with zipfile.ZipFile(input_file) as archive:
|
||||
for member in archive.infolist():
|
||||
destination = output_path / member.filename
|
||||
if not destination.resolve().is_relative_to(output_path.resolve()):
|
||||
if _is_zip_symlink(member):
|
||||
raise ValueError(f"Unsafe archive entry: {member.filename}")
|
||||
if not _is_safe_destination(output_root, member.filename):
|
||||
raise ValueError(f"Unsafe archive entry: {member.filename}")
|
||||
|
||||
archive.extractall(output_path)
|
||||
for member in archive.infolist():
|
||||
_extract_member(archive, member, output_path)
|
||||
|
||||
|
||||
def pretty_print_xml(output_path: Path):
|
||||
|
||||
@@ -2,22 +2,47 @@
|
||||
"""Unpack and format XML contents of Office files (.docx, .pptx, .xlsx)"""
|
||||
|
||||
import random
|
||||
import shutil
|
||||
import stat
|
||||
import sys
|
||||
import zipfile
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def _is_zip_symlink(member: zipfile.ZipInfo) -> bool:
|
||||
return stat.S_ISLNK(member.external_attr >> 16)
|
||||
|
||||
|
||||
def _is_safe_destination(output_root: Path, member_name: str) -> bool:
|
||||
destination = output_root / member_name
|
||||
return destination.resolve().is_relative_to(output_root.resolve())
|
||||
|
||||
|
||||
def _extract_member(archive: zipfile.ZipFile, member: zipfile.ZipInfo, output_root: Path):
|
||||
destination = output_root / member.filename
|
||||
if member.is_dir():
|
||||
destination.mkdir(parents=True, exist_ok=True)
|
||||
return
|
||||
|
||||
destination.parent.mkdir(parents=True, exist_ok=True)
|
||||
with archive.open(member, "r") as source, open(destination, "wb") as target:
|
||||
shutil.copyfileobj(source, target)
|
||||
|
||||
|
||||
def extract_archive_safely(input_file: str | Path, output_dir: str | Path):
|
||||
output_path = Path(output_dir)
|
||||
output_path.mkdir(parents=True, exist_ok=True)
|
||||
output_root = output_path.resolve()
|
||||
|
||||
with zipfile.ZipFile(input_file) as archive:
|
||||
for member in archive.infolist():
|
||||
destination = output_path / member.filename
|
||||
if not destination.resolve().is_relative_to(output_path.resolve()):
|
||||
if _is_zip_symlink(member):
|
||||
raise ValueError(f"Unsafe archive entry: {member.filename}")
|
||||
if not _is_safe_destination(output_root, member.filename):
|
||||
raise ValueError(f"Unsafe archive entry: {member.filename}")
|
||||
|
||||
archive.extractall(output_path)
|
||||
for member in archive.infolist():
|
||||
_extract_member(archive, member, output_path)
|
||||
|
||||
|
||||
def pretty_print_xml(output_path: Path):
|
||||
|
||||
@@ -2,22 +2,47 @@
|
||||
"""Unpack and format XML contents of Office files (.docx, .pptx, .xlsx)"""
|
||||
|
||||
import random
|
||||
import shutil
|
||||
import stat
|
||||
import sys
|
||||
import zipfile
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def _is_zip_symlink(member: zipfile.ZipInfo) -> bool:
|
||||
return stat.S_ISLNK(member.external_attr >> 16)
|
||||
|
||||
|
||||
def _is_safe_destination(output_root: Path, member_name: str) -> bool:
|
||||
destination = output_root / member_name
|
||||
return destination.resolve().is_relative_to(output_root.resolve())
|
||||
|
||||
|
||||
def _extract_member(archive: zipfile.ZipFile, member: zipfile.ZipInfo, output_root: Path):
|
||||
destination = output_root / member.filename
|
||||
if member.is_dir():
|
||||
destination.mkdir(parents=True, exist_ok=True)
|
||||
return
|
||||
|
||||
destination.parent.mkdir(parents=True, exist_ok=True)
|
||||
with archive.open(member, "r") as source, open(destination, "wb") as target:
|
||||
shutil.copyfileobj(source, target)
|
||||
|
||||
|
||||
def extract_archive_safely(input_file: str | Path, output_dir: str | Path):
|
||||
output_path = Path(output_dir)
|
||||
output_path.mkdir(parents=True, exist_ok=True)
|
||||
output_root = output_path.resolve()
|
||||
|
||||
with zipfile.ZipFile(input_file) as archive:
|
||||
for member in archive.infolist():
|
||||
destination = output_path / member.filename
|
||||
if not destination.resolve().is_relative_to(output_path.resolve()):
|
||||
if _is_zip_symlink(member):
|
||||
raise ValueError(f"Unsafe archive entry: {member.filename}")
|
||||
if not _is_safe_destination(output_root, member.filename):
|
||||
raise ValueError(f"Unsafe archive entry: {member.filename}")
|
||||
|
||||
archive.extractall(output_path)
|
||||
for member in archive.infolist():
|
||||
_extract_member(archive, member, output_path)
|
||||
|
||||
|
||||
def pretty_print_xml(output_path: Path):
|
||||
|
||||
@@ -2,22 +2,47 @@
|
||||
"""Unpack and format XML contents of Office files (.docx, .pptx, .xlsx)"""
|
||||
|
||||
import random
|
||||
import shutil
|
||||
import stat
|
||||
import sys
|
||||
import zipfile
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def _is_zip_symlink(member: zipfile.ZipInfo) -> bool:
|
||||
return stat.S_ISLNK(member.external_attr >> 16)
|
||||
|
||||
|
||||
def _is_safe_destination(output_root: Path, member_name: str) -> bool:
|
||||
destination = output_root / member_name
|
||||
return destination.resolve().is_relative_to(output_root.resolve())
|
||||
|
||||
|
||||
def _extract_member(archive: zipfile.ZipFile, member: zipfile.ZipInfo, output_root: Path):
|
||||
destination = output_root / member.filename
|
||||
if member.is_dir():
|
||||
destination.mkdir(parents=True, exist_ok=True)
|
||||
return
|
||||
|
||||
destination.parent.mkdir(parents=True, exist_ok=True)
|
||||
with archive.open(member, "r") as source, open(destination, "wb") as target:
|
||||
shutil.copyfileobj(source, target)
|
||||
|
||||
|
||||
def extract_archive_safely(input_file: str | Path, output_dir: str | Path):
|
||||
output_path = Path(output_dir)
|
||||
output_path.mkdir(parents=True, exist_ok=True)
|
||||
output_root = output_path.resolve()
|
||||
|
||||
with zipfile.ZipFile(input_file) as archive:
|
||||
for member in archive.infolist():
|
||||
destination = output_path / member.filename
|
||||
if not destination.resolve().is_relative_to(output_path.resolve()):
|
||||
if _is_zip_symlink(member):
|
||||
raise ValueError(f"Unsafe archive entry: {member.filename}")
|
||||
if not _is_safe_destination(output_root, member.filename):
|
||||
raise ValueError(f"Unsafe archive entry: {member.filename}")
|
||||
|
||||
archive.extractall(output_path)
|
||||
for member in archive.infolist():
|
||||
_extract_member(archive, member, output_path)
|
||||
|
||||
|
||||
def pretty_print_xml(output_path: Path):
|
||||
|
||||
@@ -5,9 +5,13 @@
|
||||
"data/skills_index.json",
|
||||
"data/catalog.json",
|
||||
"data/bundles.json",
|
||||
"data/plugin-compatibility.json",
|
||||
"data/aliases.json",
|
||||
"apps/web-app/public/sitemap.xml",
|
||||
"apps/web-app/public/skills.json.backup"
|
||||
"apps/web-app/public/skills.json.backup",
|
||||
".agents/plugins/",
|
||||
".claude-plugin/",
|
||||
"plugins/"
|
||||
],
|
||||
"mixedFiles": [
|
||||
"README.md",
|
||||
|
||||
@@ -21,6 +21,21 @@ function normalizeRepoPath(filePath) {
|
||||
return String(filePath || "").replace(/\\/g, "/").replace(/^\.\//, "");
|
||||
}
|
||||
|
||||
function matchesContractEntry(filePath, entry) {
|
||||
const normalizedPath = normalizeRepoPath(filePath);
|
||||
const normalizedEntry = normalizeRepoPath(entry);
|
||||
|
||||
if (!normalizedEntry) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (normalizedEntry.endsWith("/")) {
|
||||
return normalizedPath.startsWith(normalizedEntry);
|
||||
}
|
||||
|
||||
return normalizedPath === normalizedEntry;
|
||||
}
|
||||
|
||||
function escapeRegExp(value) {
|
||||
return value.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
||||
}
|
||||
@@ -57,11 +72,11 @@ function getManagedFiles(contract, options = {}) {
|
||||
}
|
||||
|
||||
function isDerivedFile(filePath, contract) {
|
||||
return contract.derivedFiles.includes(normalizeRepoPath(filePath));
|
||||
return contract.derivedFiles.some((entry) => matchesContractEntry(filePath, entry));
|
||||
}
|
||||
|
||||
function isMixedFile(filePath, contract) {
|
||||
return contract.mixedFiles.includes(normalizeRepoPath(filePath));
|
||||
return contract.mixedFiles.some((entry) => matchesContractEntry(filePath, entry));
|
||||
}
|
||||
|
||||
function isDocLikeFile(filePath) {
|
||||
@@ -177,5 +192,6 @@ module.exports = {
|
||||
isMixedFile,
|
||||
loadWorkflowContract,
|
||||
normalizeRepoPath,
|
||||
matchesContractEntry,
|
||||
requiresReferencesValidation,
|
||||
};
|
||||
|
||||
@@ -60,6 +60,10 @@ assert.strictEqual(
|
||||
for (const filePath of [
|
||||
"apps/web-app/public/sitemap.xml",
|
||||
"apps/web-app/public/skills.json.backup",
|
||||
"data/plugin-compatibility.json",
|
||||
".agents/plugins/",
|
||||
".claude-plugin/",
|
||||
"plugins/",
|
||||
]) {
|
||||
assert.ok(
|
||||
generatedFiles.derivedFiles.includes(filePath),
|
||||
@@ -67,6 +71,13 @@ for (const filePath of [
|
||||
);
|
||||
}
|
||||
|
||||
const webAppGitignore = readText("apps/web-app/.gitignore");
|
||||
assert.match(
|
||||
webAppGitignore,
|
||||
/^coverage$/m,
|
||||
"web-app coverage output should be ignored so maintainer sync jobs stay clean",
|
||||
);
|
||||
|
||||
for (const filePath of [
|
||||
"README.md",
|
||||
"package.json",
|
||||
|
||||
@@ -16,7 +16,11 @@ const contract = {
|
||||
"data/skills_index.json",
|
||||
"data/catalog.json",
|
||||
"data/bundles.json",
|
||||
"data/plugin-compatibility.json",
|
||||
"data/aliases.json",
|
||||
".agents/plugins/",
|
||||
".claude-plugin/",
|
||||
"plugins/",
|
||||
],
|
||||
mixedFiles: ["README.md"],
|
||||
releaseManagedFiles: ["CHANGELOG.md", "package.json", "package-lock.json", "README.md"],
|
||||
@@ -45,6 +49,20 @@ assert.deepStrictEqual(
|
||||
getDirectDerivedChanges(["skills/example/SKILL.md", "data/catalog.json"], contract),
|
||||
["data/catalog.json"],
|
||||
);
|
||||
assert.deepStrictEqual(
|
||||
getDirectDerivedChanges(
|
||||
[
|
||||
"plugins/antigravity-awesome-skills/skills/docx-official/ooxml/scripts/unpack.py",
|
||||
".agents/plugins/marketplace.json",
|
||||
"skills/example/SKILL.md",
|
||||
],
|
||||
contract,
|
||||
),
|
||||
[
|
||||
"plugins/antigravity-awesome-skills/skills/docx-official/ooxml/scripts/unpack.py",
|
||||
".agents/plugins/marketplace.json",
|
||||
],
|
||||
);
|
||||
|
||||
const changelog = [
|
||||
"## [7.7.0] - 2026-03-13 - \"Merge Friction Reduction\"",
|
||||
|
||||
Reference in New Issue
Block a user