fix(security): Harden skill activation and loading flows

Harden batch activation, dev refresh gating, Microsoft sync path
handling, and Jetski skill loading against command injection,
symlink traversal, and client-side star tampering.

Add regression coverage for the security-sensitive paths and
update the internal triage addendum for the Jetski loader fix.
This commit is contained in:
sickn33
2026-03-18 18:49:15 +01:00
parent 55033462ff
commit 4883b0dbb4
21 changed files with 410 additions and 96 deletions

View File

@@ -56,6 +56,13 @@ function isLoopbackHost(hostname) {
|| host.startsWith('127.');
}
function isLoopbackRemoteAddress(remoteAddress) {
const address = normalizeHost(remoteAddress);
return address === '::1'
|| address.startsWith('127.')
|| address.startsWith('::ffff:127.');
}
function getRequestHost(req) {
const hostHeader = req.headers?.host || '';
@@ -70,8 +77,12 @@ function getRequestHost(req) {
}
}
function getRequestRemoteAddress(req) {
return req.socket?.remoteAddress || req.connection?.remoteAddress || '';
}
function isDevLoopbackRequest(req) {
return isLoopbackHost(getRequestHost(req));
return isLoopbackRemoteAddress(getRequestRemoteAddress(req));
}
function isTokenAuthorized(req) {

View File

@@ -105,6 +105,29 @@ describe('refresh-skills plugin security', () => {
host: '192.168.1.1:5173',
origin: 'http://192.168.1.1:5173',
},
socket: {
remoteAddress: '192.168.1.1',
},
};
const res = createResponse();
await handler(req, res);
expect(res.statusCode).toBe(403);
expect(JSON.parse(res.body).error).toMatch('loopback');
});
it('rejects requests from a non-loopback remote address even when host headers look local', async () => {
const handler = await loadRefreshHandler();
const req = {
method: 'POST',
headers: {
host: 'localhost:5173',
origin: 'http://localhost:5173',
},
socket: {
remoteAddress: '203.0.113.7',
},
};
const res = createResponse();
@@ -123,6 +146,9 @@ describe('refresh-skills plugin security', () => {
host: 'localhost:5173',
origin: 'http://localhost:5173',
},
socket: {
remoteAddress: '127.0.0.1',
},
};
const res = createResponse();
@@ -139,6 +165,9 @@ describe('refresh-skills plugin security', () => {
host: 'localhost:5173',
origin: 'http://localhost:5173',
},
socket: {
remoteAddress: '127.0.0.1',
},
};
const res = createResponse();
@@ -156,6 +185,9 @@ describe('refresh-skills plugin security', () => {
host: '[::1]:5173',
origin: 'http://[::1]:5173',
},
socket: {
remoteAddress: '::1',
},
};
const res = createResponse();

View File

@@ -4,11 +4,30 @@ import { useSkillStars } from '../useSkillStars';
const STORAGE_KEY = 'user_stars';
const supabaseMocks = vi.hoisted(() => {
const maybeSingle = vi.fn();
const upsert = vi.fn();
const select = vi.fn(() => ({ eq: vi.fn(() => ({ maybeSingle })) }));
const from = vi.fn(() => ({ select, upsert }));
return { maybeSingle, upsert, select, from };
});
vi.mock('../../lib/supabase', () => ({
supabase: {
from: supabaseMocks.from,
},
}));
describe('useSkillStars', () => {
beforeEach(() => {
// Clear localStorage mock before each test
localStorage.clear();
vi.clearAllMocks();
supabaseMocks.from.mockReturnValue({ select: supabaseMocks.select, upsert: supabaseMocks.upsert });
supabaseMocks.select.mockReturnValue({ eq: vi.fn(() => ({ maybeSingle: supabaseMocks.maybeSingle })) });
supabaseMocks.maybeSingle.mockResolvedValue({ data: null, error: null });
supabaseMocks.upsert.mockResolvedValue({ error: null });
});
describe('Initialization', () => {
@@ -27,6 +46,19 @@ describe('useSkillStars', () => {
expect(result.current.hasStarred).toBe(false);
});
it('should overlay a local star on top of the shared count', async () => {
localStorage.setItem(STORAGE_KEY, JSON.stringify({ 'test-skill': true }));
supabaseMocks.maybeSingle.mockResolvedValue({ data: { star_count: 7 }, error: null });
const { result } = renderHook(() => useSkillStars('test-skill'));
await waitFor(() => {
expect(result.current.starCount).toBe(8);
});
expect(result.current.hasStarred).toBe(true);
});
it('should read starred status from localStorage on init', () => {
localStorage.setItem(STORAGE_KEY, JSON.stringify({ 'test-skill': true }));
@@ -94,6 +126,7 @@ describe('useSkillStars', () => {
expect(result.current.starCount).toBe(1);
expect(result.current.hasStarred).toBe(true);
});
expect(supabaseMocks.upsert).not.toHaveBeenCalled();
});
it('should persist starred status to localStorage', async () => {
@@ -107,6 +140,7 @@ describe('useSkillStars', () => {
STORAGE_KEY,
JSON.stringify({ 'persist-skill': true })
);
expect(supabaseMocks.upsert).not.toHaveBeenCalled();
});
it('should set loading state during operation', async () => {

View File

@@ -10,7 +10,6 @@ vi.mock('../../lib/supabase', () => ({
supabase: {
from,
},
sharedStarWritesEnabled: false,
}));
describe('useSkillStars shared writes', () => {

View File

@@ -1,5 +1,5 @@
import { useState, useEffect, useCallback } from 'react';
import { sharedStarWritesEnabled, supabase } from '../lib/supabase';
import { supabase } from '../lib/supabase';
const STORAGE_KEY = 'user_stars';
@@ -68,7 +68,7 @@ export function useSkillStars(skillId: string | undefined): UseSkillStarsReturn
.maybeSingle();
if (!error && data) {
setStarCount(data.star_count);
setStarCount(data.star_count + (userStars[skillId] ? 1 : 0));
}
} catch (err) {
console.warn('Failed to fetch star count:', err);
@@ -81,7 +81,7 @@ export function useSkillStars(skillId: string | undefined): UseSkillStarsReturn
/**
* Handle star button click
* Prevents double-starring, updates optimistically, syncs to Supabase
* Prevents double-starring, updates optimistically, persists local state
*/
const handleStarClick = useCallback(async () => {
if (!skillId || isLoading) return;
@@ -100,48 +100,8 @@ export function useSkillStars(skillId: string | undefined): UseSkillStarsReturn
// Persist to localStorage
const updatedStars = { ...userStars, [skillId]: true };
saveUserStarsToStorage(updatedStars);
// Sync to Supabase if available
if (supabase && sharedStarWritesEnabled) {
try {
// Fetch current count first
const { data: current } = await supabase
.from('skill_stars')
.select('star_count')
.eq('skill_id', skillId)
.maybeSingle();
const newCount = (current?.star_count || 0) + 1;
// Upsert: insert or update in one call
const { error: upsertError } = await supabase
.from('skill_stars')
.upsert(
{ skill_id: skillId, star_count: newCount },
{ onConflict: 'skill_id' }
);
if (upsertError) {
console.warn('Failed to upsert star count:', upsertError);
} else {
setStarCount(newCount);
}
} catch (err) {
console.warn('Failed to sync star to Supabase:', err);
}
}
} catch (error) {
// Rollback optimistic update on error
console.error('Failed to star skill:', error);
setStarCount(prev => Math.max(0, prev - 1));
setHasStarred(false);
// Remove from localStorage on error
const userStars = getUserStarsFromStorage();
if (userStars[skillId]) {
const { [skillId]: _, ...rest } = userStars;
saveUserStarsToStorage(rest);
}
} finally {
setIsLoading(false);
}

View File

@@ -11,10 +11,6 @@ const supabaseAnonKey =
(import.meta as ImportMeta & { env: Record<string, string> }).env.VITE_SUPABASE_ANON_KEY
|| 'sb_publishable_CyVwHGbtT80AuDFmXNkc9Q_YNcamTGg'
export const sharedStarWritesEnabled =
((import.meta as ImportMeta & { env: Record<string, string> }).env.VITE_ENABLE_SHARED_STAR_WRITES ?? '')
.toLowerCase() === 'true'
// Create a single supabase client for interacting with the database
export const supabase: SupabaseClient = createClient(supabaseUrl, supabaseAnonKey)

View File

@@ -83,16 +83,34 @@ export async function loadSkillBodies(
): Promise<string[]> {
const bodies: string[] = [];
const rootPath = path.resolve(skillsRoot);
const rootRealPath = await fs.promises.realpath(rootPath);
for (const meta of metas) {
const fullPath = path.resolve(rootPath, meta.path, "SKILL.md");
const relativePath = path.relative(rootPath, fullPath);
const skillDirPath = path.resolve(rootPath, meta.path);
const relativePath = path.relative(rootPath, skillDirPath);
if (relativePath.startsWith("..") || path.isAbsolute(relativePath)) {
throw new Error(`Skill path escapes skills root: ${meta.id}`);
}
const text = await fs.promises.readFile(fullPath, "utf8");
const skillDirStat = await fs.promises.lstat(skillDirPath);
if (!skillDirStat.isDirectory() || skillDirStat.isSymbolicLink()) {
throw new Error(`Skill directory must be a regular directory inside the skills root: ${meta.id}`);
}
const fullPath = path.join(skillDirPath, "SKILL.md");
const skillFileStat = await fs.promises.lstat(fullPath);
if (!skillFileStat.isFile() || skillFileStat.isSymbolicLink()) {
throw new Error(`SKILL.md must be a regular file inside the skills root: ${meta.id}`);
}
const realPath = await fs.promises.realpath(fullPath);
const realRelativePath = path.relative(rootRealPath, realPath);
if (realRelativePath.startsWith("..") || path.isAbsolute(realRelativePath)) {
throw new Error(`SKILL.md resolves outside the skills root: ${meta.id}`);
}
const text = await fs.promises.readFile(realPath, "utf8");
bodies.push(text);
}

View File

@@ -0,0 +1,22 @@
# Security Findings Triage Addendum (2026-03-18)
This addendum supersedes the previous Jetski loader assessment in
`security-findings-triage-2026-03-15.md`.
## Correction
- Finding: `Example loader trusts manifest paths, enabling file read`
- Path: `docs/integrations/jetski-gemini-loader/loader.ts`
- Previous triage status on 2026-03-15: `obsolete/not reproducible on current HEAD`
- Corrected assessment: the loader was still reproducible via a symlinked
`SKILL.md` that resolved outside `skillsRoot`. A local proof read the linked
file contents successfully.
## Current Status
- The loader now rejects symlinked skill directories and symlinked `SKILL.md`
files.
- The loader now resolves the real path for `SKILL.md` and rejects any target
outside the configured `skillsRoot`.
- Regression coverage lives in
`tools/scripts/tests/jetski_gemini_loader.test.js`.

View File

@@ -13,6 +13,7 @@ echo Activating Antigravity skills...
:: --- ARGUMENT HANDLING ---
set "DO_CLEAR=0"
set "EXTRA_ARGS="
set "SKILLS_LIST_FILE=%TEMP%\skills_list_%RANDOM%_%RANDOM%.txt"
for %%a in (%*) do (
if /I "%%a"=="--clear" (
@@ -63,37 +64,76 @@ mkdir "%SKILLS_DIR%" 2>nul
:: --- BUNDLE EXPANSION ---
set "ESSENTIALS="
echo Expanding bundles...
if exist "%SKILLS_LIST_FILE%" del "%SKILLS_LIST_FILE%" 2>nul
python --version >nul 2>&1
if not errorlevel 1 (
:: Safely pass all arguments to Python (filtering out --clear)
python "%~dp0..\tools\scripts\get-bundle-skills.py" !EXTRA_ARGS! > "%TEMP%\skills_list.txt" 2>nul
python "%~dp0..\tools\scripts\get-bundle-skills.py" !EXTRA_ARGS! > "%SKILLS_LIST_FILE%" 2>nul
:: If no other arguments, expand Essentials
if "!EXTRA_ARGS!"=="" python "%~dp0..\tools\scripts\get-bundle-skills.py" Essentials > "%TEMP%\skills_list.txt" 2>nul
if exist "%TEMP%\skills_list.txt" (
set /p ESSENTIALS=<"%TEMP%\skills_list.txt"
del "%TEMP%\skills_list.txt"
)
if "!EXTRA_ARGS!"=="" python "%~dp0..\tools\scripts\get-bundle-skills.py" Essentials > "%SKILLS_LIST_FILE%" 2>nul
)
:: Fallback if Python fails or returned empty
if "!ESSENTIALS!"=="" (
if not exist "%SKILLS_LIST_FILE%" (
if "!EXTRA_ARGS!"=="" (
echo Using default essentials...
set "ESSENTIALS=api-security-best-practices auth-implementation-patterns backend-security-coder frontend-security-coder cc-skill-security-review pci-compliance frontend-design react-best-practices react-patterns nextjs-best-practices tailwind-patterns form-cro seo-audit ui-ux-pro-max 3d-web-experience canvas-design mobile-design scroll-experience senior-fullstack frontend-developer backend-dev-guidelines api-patterns database-design stripe-integration agent-evaluation langgraph mcp-builder prompt-engineering ai-agents-architect rag-engineer llm-app-patterns rag-implementation prompt-caching context-window-management langfuse"
> "%SKILLS_LIST_FILE%" (
echo api-security-best-practices
echo auth-implementation-patterns
echo backend-security-coder
echo frontend-security-coder
echo cc-skill-security-review
echo pci-compliance
echo frontend-design
echo react-best-practices
echo react-patterns
echo nextjs-best-practices
echo tailwind-patterns
echo form-cro
echo seo-audit
echo ui-ux-pro-max
echo 3d-web-experience
echo canvas-design
echo mobile-design
echo scroll-experience
echo senior-fullstack
echo frontend-developer
echo backend-dev-guidelines
echo api-patterns
echo database-design
echo stripe-integration
echo agent-evaluation
echo langgraph
echo mcp-builder
echo prompt-engineering
echo ai-agents-architect
echo rag-engineer
echo llm-app-patterns
echo rag-implementation
echo prompt-caching
echo context-window-management
echo langfuse
)
) else (
:: Just use the literal arguments
set "ESSENTIALS=!EXTRA_ARGS!"
:: Use only literal arguments that match the safe skill-id allowlist
> "%SKILLS_LIST_FILE%" (
for %%a in (%*) do (
if /I not "%%a"=="--clear" (
echo(%%a| findstr /r /x "[A-Za-z0-9._-][A-Za-z0-9._-]*" >nul && echo %%a
)
)
)
)
)
:: --- RESTORATION ---
echo Restoring selected skills...
for %%s in (!ESSENTIALS!) do (
if exist "%SKILLS_LIST_FILE%" (
for /f "usebackq delims=" %%s in ("%SKILLS_LIST_FILE%") do (
if exist "%SKILLS_DIR%\%%s" (
echo . %%s ^(already active^)
) else if exist "%LIBRARY_DIR%\%%s" (
@@ -103,6 +143,8 @@ for %%s in (!ESSENTIALS!) do (
echo - %%s ^(not found in library^)
)
)
)
if exist "%SKILLS_LIST_FILE%" del "%SKILLS_LIST_FILE%" 2>nul
echo.
echo Done! Antigravity skills are now activated.

View File

@@ -45,10 +45,9 @@ Verify CLI is logged in:
apify info # Should return your username
```
If not logged in, check if `APIFY_TOKEN` environment variable is defined. If not, ask the user to generate one at https://console.apify.com/settings/integrations, then:
If not logged in, check if `APIFY_TOKEN` environment variable is defined. If not, ask the user to generate one at https://console.apify.com/settings/integrations, add it to their shell or secret manager without putting the literal token in command history, then run:
```bash
export APIFY_TOKEN="your_token_here"
apify login
```

View File

@@ -33,12 +33,12 @@ Reference the [cli-start template Dockerfile](https://github.com/apify/actor-tem
```dockerfile
FROM apify/actor-node:20
# Install ubi for easy GitHub release installation
RUN curl --silent --location \
https://raw.githubusercontent.com/houseabsolute/ubi/master/bootstrap/bootstrap-ubi.sh | sh
# Install ubi from a package source or a verified release artifact
# Example: use your base image package manager or vendor a pinned binary in the build context
# RUN apt-get update && apt-get install -y ubi
# Install your CLI tool from GitHub releases (example)
# RUN ubi --project your-org/your-tool --in /usr/local/bin
# RUN install -m 0755 ./vendor/your-tool /usr/local/bin/your-tool
# Or install apify-cli and jq manually
RUN npm install -g apify-cli

View File

@@ -3,8 +3,29 @@ import sys
import re
from pathlib import Path
def get_bundle_skills(bundle_queries):
SAFE_SKILL_ID_PATTERN = re.compile(r"^[A-Za-z0-9._-]+$")
def is_safe_skill_id(skill_id):
return bool(SAFE_SKILL_ID_PATTERN.fullmatch(skill_id or ""))
def filter_safe_skill_ids(skill_ids):
return [skill_id for skill_id in skill_ids if is_safe_skill_id(skill_id)]
def format_skills_for_batch(skill_ids):
safe_skill_ids = filter_safe_skill_ids(skill_ids)
if not safe_skill_ids:
return ""
return "\n".join(safe_skill_ids) + "\n"
def get_bundle_skills(bundle_queries, bundles_path=None):
if bundles_path is None:
bundles_path = Path(__file__).parent.parent.parent / "docs" / "users" / "bundles.md"
else:
bundles_path = Path(bundles_path)
if not bundles_path.exists():
print(f"Error: {bundles_path} not found", file=sys.stderr)
return []
@@ -25,11 +46,12 @@ def get_bundle_skills(bundle_queries):
found = True
# Extract skill names from bullet points: - [`skill-name`](../../skills/skill-name/)
skills = re.findall(r'- \[`([^`]+)`\]', section)
selected_skills.update(skills)
selected_skills.update(filter_safe_skill_ids(skills))
if not found:
# If query not found in any header, check if it's a skill name itself
# (Just in case the user passed a skill name instead of a bundle)
if is_safe_skill_id(query):
selected_skills.add(query)
return sorted(list(selected_skills))
@@ -43,4 +65,4 @@ if __name__ == "__main__":
skills = get_bundle_skills(queries)
if skills:
print(" ".join(skills))
sys.stdout.write(format_skills_for_batch(skills))

View File

@@ -79,6 +79,17 @@ def is_path_within(base_dir: Path, target_path: Path) -> bool:
return False
def is_safe_regular_file(file_path: Path, source_root: Path) -> bool:
try:
if file_path.is_symlink():
return False
if not file_path.is_file():
return False
return is_path_within(source_root, file_path.resolve())
except OSError:
return False
def sanitize_flat_name(candidate: str | None, fallback: str) -> str:
"""Accept only flat skill directory names; fall back on unsafe values."""
if not candidate:
@@ -102,14 +113,9 @@ def sanitize_flat_name(candidate: str | None, fallback: str) -> str:
def copy_safe_skill_files(source_dir: Path, target_dir: Path, source_root: Path):
"""Copy regular files only when their resolved path stays inside source_root."""
for file_item in source_dir.iterdir():
if file_item.name == "SKILL.md" or file_item.is_symlink() or not file_item.is_file():
if file_item.name == "SKILL.md" or not is_safe_regular_file(file_item, source_root):
continue
resolved = file_item.resolve()
if not is_path_within(source_root, resolved):
continue
shutil.copy2(resolved, target_dir / file_item.name)
shutil.copy2(file_item.resolve(), target_dir / file_item.name)
def extract_skill_name(skill_md_path: Path) -> str | None:
"""Extract the 'name' field from SKILL.md YAML frontmatter using PyYAML."""
@@ -220,13 +226,17 @@ def find_github_skills(source_dir: Path, already_synced_names: set):
return results
for skill_dir in github_skills.iterdir():
if not skill_dir.is_dir() or not (skill_dir / "SKILL.md").exists():
if skill_dir.is_symlink() or not skill_dir.is_dir():
continue
skill_md = skill_dir / "SKILL.md"
if not is_safe_regular_file(skill_md, source_dir):
continue
if skill_dir.name not in already_synced_names:
results.append({
"relative_path": Path(".github/skills") / skill_dir.name,
"skill_md": skill_dir / "SKILL.md",
"skill_md": skill_md,
"source_dir": skill_dir,
})

View File

@@ -9,6 +9,16 @@ GITHUB_REPO="/Users/nicco/Antigravity Projects/antigravity-awesome-skills/skills
LOCAL_LIBRARY="/Users/nicco/.gemini/antigravity/scratch/.agent/skills"
BACKUP_DIR="/Users/nicco/.gemini/antigravity/scratch/.agent/skills_backup_$(date +%Y%m%d_%H%M%S)"
remove_local_skill_dirs() {
find "$1" -mindepth 1 -maxdepth 1 -type d | while IFS= read -r item; do
if [ -L "$item" ]; then
echo " ⚠️ Skipping symlinked directory: $(basename "$item")"
continue
fi
rm -rf -- "$item"
done
}
# 35 Recommended Skills
RECOMMENDED_SKILLS=(
# Tier S - Core Development (13)
@@ -76,10 +86,7 @@ echo ""
# Clear local library (keep README.md if exists)
echo "🗑️ Clearing local library..."
cd "$LOCAL_LIBRARY"
for item in */; do
rm -rf "$item"
done
remove_local_skill_dirs "$LOCAL_LIBRARY"
echo "✅ Local library cleared"
echo ""

View File

@@ -0,0 +1,20 @@
const assert = require("assert");
const fs = require("fs");
const path = require("path");
const repoRoot = path.resolve(__dirname, "../..", "..");
const batchScript = fs.readFileSync(
path.join(repoRoot, "scripts", "activate-skills.bat"),
"utf8",
);
assert.doesNotMatch(
batchScript,
/for %%s in \(!ESSENTIALS!\) do \(/,
"activate-skills.bat must not iterate untrusted skills with tokenized FOR syntax",
);
assert.match(
batchScript,
/for \/f .*%%s in \("%SKILLS_LIST_FILE%"\) do \(/i,
"activate-skills.bat should read one validated skill id per line from the temp file",
);

View File

@@ -8,6 +8,10 @@ const apifySkill = fs.readFileSync(
path.join(repoRoot, 'skills', 'apify-actorization', 'SKILL.md'),
'utf8',
);
const apifyCliReference = fs.readFileSync(
path.join(repoRoot, 'skills', 'apify-actorization', 'references', 'cli-actorization.md'),
'utf8',
);
const audioExample = fs.readFileSync(
path.join(repoRoot, 'skills', 'audio-transcriber', 'examples', 'basic-transcription.sh'),
'utf8',
@@ -165,6 +169,7 @@ assert.match(audioExample, /AUDIO_FILE_ENV/, 'audio example should pass shell va
assert.strictEqual(/\|\s*(bash|sh)\b/.test(apifySkill), false, 'SKILL.md must not recommend pipe-to-shell installs');
assert.strictEqual(/\|\s*iex\b/i.test(apifySkill), false, 'SKILL.md must not recommend PowerShell pipe-to-iex installs');
assert.strictEqual(/apify login -t\b/.test(apifySkill), false, 'SKILL.md must not put tokens on the command line');
assert.strictEqual(/\bcurl\b[\s\S]*?\|\s*(?:bash|sh)\b/i.test(apifyCliReference), false, 'cli reference must not recommend pipe-to-shell installs');
function violationCount(list) {
return list.length;

View File

@@ -105,6 +105,24 @@ async function main() {
]),
/Skill path escapes skills root/,
);
const symlinkedDir = path.join(fixtureRoot, "skills", "symlinked");
const outsideDir = path.join(fixtureRoot, "outside-symlink");
fs.mkdirSync(symlinkedDir, { recursive: true });
fs.mkdirSync(outsideDir, { recursive: true });
fs.writeFileSync(path.join(outsideDir, "secret.md"), "# secret\n", "utf8");
fs.symlinkSync(
path.join(outsideDir, "secret.md"),
path.join(symlinkedDir, "SKILL.md"),
);
await assert.rejects(
() =>
loadSkillBodies(fixtureRoot, [
{ id: "symlinked", path: "skills/symlinked", name: "symlinked" },
]),
/symlink|outside the skills root|regular file/i,
);
} finally {
fs.rmSync(fixtureRoot, { recursive: true, force: true });
}

View File

@@ -19,4 +19,6 @@ assert.strictEqual(
"tracked Python bytecode should not ship in skill directories",
);
assert.match(syncRecommended, /cp -RP/, "recommended skills sync should preserve symlinks instead of dereferencing them");
assert.doesNotMatch(syncRecommended, /for item in \*\/; do\s+rm -rf "\$item"/, "recommended skills sync must not delete matched paths via naive glob iteration");
assert.match(syncRecommended, /readlink|test -L|find .* -type d/, "recommended skills sync should explicitly avoid following directory symlinks during cleanup");
assert.doesNotMatch(alphaVantage, /--- Unknown/, "alpha-vantage frontmatter should not contain malformed delimiters");

View File

@@ -8,12 +8,15 @@ const ENABLED_VALUES = new Set(["1", "true", "yes", "on"]);
const TOOL_SCRIPTS = path.join("tools", "scripts");
const TOOL_TESTS = path.join(TOOL_SCRIPTS, "tests");
const LOCAL_TEST_COMMANDS = [
[path.join(TOOL_TESTS, "activate_skills_batch_security.test.js")],
[path.join(TOOL_TESTS, "claude_plugin_marketplace.test.js")],
[path.join(TOOL_TESTS, "jetski_gemini_loader.test.js")],
[path.join(TOOL_TESTS, "npm_package_contents.test.js")],
[path.join(TOOL_TESTS, "validate_skills_headings.test.js")],
[path.join(TOOL_TESTS, "workflow_contracts.test.js")],
[path.join(TOOL_TESTS, "docs_security_content.test.js")],
[path.join(TOOL_SCRIPTS, "run-python.js"), path.join(TOOL_TESTS, "test_bundle_activation_security.py")],
[path.join(TOOL_SCRIPTS, "run-python.js"), path.join(TOOL_TESTS, "test_sync_microsoft_skills_security.py")],
[path.join(TOOL_SCRIPTS, "run-python.js"), path.join(TOOL_TESTS, "test_validate_skills_headings.py")],
];
const NETWORK_TEST_COMMANDS = [

View File

@@ -0,0 +1,60 @@
import importlib.util
import pathlib
import sys
import tempfile
import unittest
REPO_ROOT = pathlib.Path(__file__).resolve().parents[3]
TOOLS_SCRIPTS = REPO_ROOT / "tools" / "scripts"
def load_module(module_path: pathlib.Path, module_name: str):
spec = importlib.util.spec_from_file_location(module_name, module_path)
module = importlib.util.module_from_spec(spec)
spec.loader.exec_module(module)
return module
get_bundle_skills = load_module(
TOOLS_SCRIPTS / "get-bundle-skills.py",
"get_bundle_skills",
)
class BundleActivationSecurityTests(unittest.TestCase):
def test_format_skills_for_batch_emits_newline_delimited_safe_ids(self):
formatted = get_bundle_skills.format_skills_for_batch([
"safe-skill",
"nested.skill_2",
"unsafe&calc",
"another|bad",
])
self.assertEqual(formatted, "safe-skill\nnested.skill_2\n")
def test_get_bundle_skills_rejects_unsafe_bundle_entries(self):
with tempfile.TemporaryDirectory() as temp_dir:
bundles_path = pathlib.Path(temp_dir) / "bundles.md"
bundles_path.write_text(
"\n".join(
[
"### Essentials",
"- [`safe-skill`](../../skills/safe-skill/)",
"- [`unsafe&calc`](../../skills/unsafe/)",
"- [`safe_two`](../../skills/safe_two/)",
]
),
encoding="utf-8",
)
skills = get_bundle_skills.get_bundle_skills(
["Essentials"],
bundles_path=bundles_path,
)
self.assertEqual(skills, ["safe-skill", "safe_two"])
if __name__ == "__main__":
unittest.main()

View File

@@ -41,6 +41,60 @@ class SyncMicrosoftSkillsSecurityTests(unittest.TestCase):
child.unlink()
outside.rmdir()
def test_find_github_skills_ignores_symlinked_directories(self):
with tempfile.TemporaryDirectory() as temp_dir:
root = Path(temp_dir)
github_skills = root / ".github" / "skills"
github_skills.mkdir(parents=True)
safe_skill = github_skills / "safe-skill"
safe_skill.mkdir()
(safe_skill / "SKILL.md").write_text("---\nname: safe-skill\n---\n", encoding="utf-8")
outside = Path(tempfile.mkdtemp())
try:
escaped = outside / "escaped-skill"
escaped.mkdir()
(escaped / "SKILL.md").write_text("---\nname: escaped\n---\n", encoding="utf-8")
(github_skills / "escape").symlink_to(escaped, target_is_directory=True)
entries = sms.find_github_skills(root, set())
relative_paths = {str(entry["relative_path"]) for entry in entries}
self.assertEqual(relative_paths, {".github/skills/safe-skill"})
finally:
for child in escaped.iterdir():
child.unlink()
escaped.rmdir()
outside.rmdir()
def test_find_github_skills_ignores_symlinked_skill_markdown(self):
with tempfile.TemporaryDirectory() as temp_dir:
root = Path(temp_dir)
github_skills = root / ".github" / "skills"
github_skills.mkdir(parents=True)
safe_skill = github_skills / "safe-skill"
safe_skill.mkdir()
(safe_skill / "SKILL.md").write_text("---\nname: safe-skill\n---\n", encoding="utf-8")
linked_skill = github_skills / "linked-skill"
linked_skill.mkdir()
outside = Path(tempfile.mkdtemp())
try:
target = outside / "SKILL.md"
target.write_text("---\nname: escaped\n---\n", encoding="utf-8")
(linked_skill / "SKILL.md").symlink_to(target)
entries = sms.find_github_skills(root, set())
relative_paths = {str(entry["relative_path"]) for entry in entries}
self.assertEqual(relative_paths, {".github/skills/safe-skill"})
finally:
target.unlink()
outside.rmdir()
if __name__ == "__main__":
unittest.main()