Files
antigravity-skills-reference/skills/skill-sentinel/scripts/analyzers/dependencies.py
ProgramadorBrasil 61ec71c5c7 feat: add 52 specialized AI agent skills (#217)
New skills covering 10 categories:

**Security & Audit**: 007 (STRIDE/PASTA/OWASP), cred-omega (secrets management)
**AI Personas**: Karpathy, Hinton, Sutskever, LeCun (4 sub-skills), Altman, Musk, Gates, Jobs, Buffett
**Multi-agent Orchestration**: agent-orchestrator, task-intelligence, multi-advisor
**Code Analysis**: matematico-tao (Terence Tao-inspired mathematical code analysis)
**Social & Messaging**: Instagram Graph API, Telegram Bot, WhatsApp Cloud API, social-orchestrator
**Image Generation**: AI Studio (Gemini), Stability AI, ComfyUI Gateway, image-studio router
**Brazilian Domain**: 6 auction specialist modules, 2 legal advisors, auctioneers data scraper
**Product & Growth**: design, invention, monetization, analytics, growth engine
**DevOps & LLM Ops**: Docker/CI-CD/AWS, RAG/embeddings/fine-tuning
**Skill Governance**: installer, sentinel auditor, context management

Each skill includes:
- Standardized YAML frontmatter (name, description, risk, source, tags, tools)
- Structured sections (Overview, When to Use, How it Works, Best Practices)
- Python scripts and reference documentation where applicable
- Cross-platform compatibility (Claude Code, Antigravity, Cursor, Gemini CLI, Codex CLI)

Co-authored-by: ProgramadorBrasil <214873561+ProgramadorBrasil@users.noreply.github.com>
Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-07 10:04:07 +01:00

122 lines
4.5 KiB
Python

"""
Analyzer de dependencias.
Verifica: requirements.txt existe, versoes pinadas, dependencias nao usadas,
dependencias importadas mas nao listadas.
"""
from __future__ import annotations
import re
from pathlib import Path
from typing import Any, Dict, List, Set, Tuple
def _extract_imports(source: str) -> Set[str]:
"""Extrai nomes de pacotes importados de um arquivo Python."""
imports = set()
for match in re.finditer(r'^(?:import|from)\s+(\w+)', source, re.MULTILINE):
pkg = match.group(1)
# Ignorar stdlib e imports relativos
if pkg not in {
"os", "sys", "re", "json", "ast", "pathlib", "datetime", "typing",
"uuid", "hashlib", "sqlite3", "argparse", "collections", "functools",
"time", "math", "io", "csv", "logging", "traceback", "textwrap",
"urllib", "http", "shutil", "subprocess", "tempfile", "threading",
"concurrent", "asyncio", "dataclasses", "enum", "abc", "copy",
"config", "db", "governance", "scanner", "analyzers", # modulos internos
}:
imports.add(pkg)
return imports
def _normalize_pkg_name(name: str) -> str:
"""Normaliza nome de pacote para comparacao (- e _ sao equivalentes)."""
return name.lower().replace("-", "_")
def analyze(skill_data: Dict[str, Any]) -> Tuple[float, List[Dict[str, Any]]]:
"""Analisa dependencias de uma skill. Retorna (score, findings)."""
score = 100.0
findings: List[Dict[str, Any]] = []
skill_name = skill_data["name"]
skill_path = Path(skill_data["path"])
requirements = skill_data.get("requirements", [])
reqs_path = skill_path / "scripts" / "requirements.txt"
# Sem requirements.txt
if not reqs_path.exists():
# Se nao tem Python files, nao precisa
if skill_data.get("file_count", 0) > 0:
findings.append({
"skill_name": skill_name,
"dimension": "dependencies",
"severity": "medium",
"category": "missing_requirements",
"title": "requirements.txt nao encontrado",
"recommendation": "Criar scripts/requirements.txt com todas as dependencias",
"effort": "low",
"impact": "medium",
})
score -= 15
return max(0.0, score), findings
# Verificar versoes pinadas
unpinned = [r for r in requirements if not r.get("pinned")]
if unpinned and len(requirements) > 1:
names = ", ".join(r["name"] for r in unpinned[:5])
findings.append({
"skill_name": skill_name,
"dimension": "dependencies",
"severity": "low",
"category": "unpinned_versions",
"title": f"{len(unpinned)} dependencia(s) sem versao pinada",
"description": f"Pacotes sem ==: {names}",
"recommendation": "Pinar versoes com == para reproducibilidade (ex: requests==2.31.0)",
"effort": "low",
"impact": "medium",
})
score -= min(10, len(unpinned) * 2)
# Verificar deps importadas vs listadas
all_imports: Set[str] = set()
for rel_path in skill_data.get("python_files", []):
filepath = skill_path / rel_path
if not filepath.exists():
continue
try:
source = filepath.read_text(encoding="utf-8", errors="replace")
except OSError:
continue
all_imports.update(_extract_imports(source))
listed_names = {_normalize_pkg_name(r["name"]) for r in requirements}
# Importadas mas nao listadas (possivel dep faltando)
# Mapear nomes de import para nomes de pacote (alguns diferem)
import_to_pkg = {
"PIL": "pillow",
"cv2": "opencv_python",
"bs4": "beautifulsoup4",
"yaml": "pyyaml",
"dotenv": "python_dotenv",
"playwright": "playwright",
}
for imp in all_imports:
pkg_name = _normalize_pkg_name(import_to_pkg.get(imp, imp))
if pkg_name not in listed_names:
findings.append({
"skill_name": skill_name,
"dimension": "dependencies",
"severity": "low",
"category": "unlisted_dependency",
"title": f"Pacote '{imp}' importado mas nao em requirements.txt",
"recommendation": f"Adicionar {imp} ao requirements.txt",
"effort": "low",
"impact": "low",
})
score -= 2
return max(0.0, min(100.0, score)), findings