fix: restore auth and transport integrity defaults

This commit is contained in:
sck_0
2026-03-15 08:40:53 +01:00
parent fe07e07215
commit a8b1e88f11
6 changed files with 111 additions and 11 deletions

View File

@@ -0,0 +1,42 @@
import { describe, it, expect, vi, beforeEach } from 'vitest';
import { renderHook, act, waitFor } from '@testing-library/react';
const maybeSingle = vi.fn().mockResolvedValue({ data: null, error: null });
const upsert = vi.fn().mockResolvedValue({ error: null });
const select = vi.fn(() => ({ eq: vi.fn(() => ({ maybeSingle })) }));
const from = vi.fn(() => ({ select, upsert }));
vi.mock('../../lib/supabase', () => ({
supabase: {
from,
},
sharedStarWritesEnabled: false,
}));
describe('useSkillStars shared writes', () => {
beforeEach(() => {
localStorage.clear();
vi.clearAllMocks();
from.mockReturnValue({ select, upsert });
select.mockReturnValue({ eq: vi.fn(() => ({ maybeSingle })) });
maybeSingle.mockResolvedValue({ data: null, error: null });
upsert.mockResolvedValue({ error: null });
});
it('does not upsert shared star counts when frontend writes are disabled', async () => {
const { useSkillStars } = await import('../useSkillStars');
const { result } = renderHook(() => useSkillStars('shared-stars-disabled'));
await waitFor(() => {
expect(result.current.isLoading).toBe(false);
});
await act(async () => {
await result.current.handleStarClick();
});
expect(upsert).not.toHaveBeenCalled();
expect(result.current.hasStarred).toBe(true);
expect(result.current.starCount).toBe(1);
});
});

View File

@@ -1,5 +1,5 @@
import { useState, useEffect, useCallback } from 'react'; import { useState, useEffect, useCallback } from 'react';
import { supabase } from '../lib/supabase'; import { sharedStarWritesEnabled, supabase } from '../lib/supabase';
const STORAGE_KEY = 'user_stars'; const STORAGE_KEY = 'user_stars';
@@ -102,7 +102,7 @@ export function useSkillStars(skillId: string | undefined): UseSkillStarsReturn
saveUserStarsToStorage(updatedStars); saveUserStarsToStorage(updatedStars);
// Sync to Supabase if available // Sync to Supabase if available
if (supabase) { if (supabase && sharedStarWritesEnabled) {
try { try {
// Fetch current count first // Fetch current count first
const { data: current } = await supabase const { data: current } = await supabase

View File

@@ -11,6 +11,10 @@ const supabaseAnonKey =
(import.meta as ImportMeta & { env: Record<string, string> }).env.VITE_SUPABASE_ANON_KEY (import.meta as ImportMeta & { env: Record<string, string> }).env.VITE_SUPABASE_ANON_KEY
|| 'sb_publishable_CyVwHGbtT80AuDFmXNkc9Q_YNcamTGg' || 'sb_publishable_CyVwHGbtT80AuDFmXNkc9Q_YNcamTGg'
export const sharedStarWritesEnabled =
((import.meta as ImportMeta & { env: Record<string, string> }).env.VITE_ENABLE_SHARED_STAR_WRITES ?? '')
.toLowerCase() === 'true'
// Create a single supabase client for interacting with the database // Create a single supabase client for interacting with the database
export const supabase: SupabaseClient = createClient(supabaseUrl, supabaseAnonKey) export const supabase: SupabaseClient = createClient(supabaseUrl, supabaseAnonKey)

View File

@@ -7,17 +7,19 @@ from __future__ import annotations
import asyncio import asyncio
import logging import logging
import os
from abc import ABC, abstractmethod from abc import ABC, abstractmethod
from dataclasses import dataclass, field from dataclasses import dataclass, field
from datetime import datetime, timezone from datetime import datetime, timezone
from typing import List, Optional from typing import Any, List, Optional
import httpx
from bs4 import BeautifulSoup
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
def should_verify_tls() -> bool:
return os.getenv("JUNTA_INSECURE_TLS", "").lower() not in {"1", "true", "yes", "on"}
@dataclass @dataclass
class Leiloeiro: class Leiloeiro:
estado: str estado: str
@@ -80,16 +82,20 @@ class AbstractJuntaScraper(ABC):
params: Optional[dict] = None, params: Optional[dict] = None,
data: Optional[dict] = None, data: Optional[dict] = None,
method: str = "GET", method: str = "GET",
) -> Optional[BeautifulSoup]: ) -> Optional[Any]:
"""Faz o request HTTP com retry e retorna BeautifulSoup ou None.""" """Faz o request HTTP com retry e retorna BeautifulSoup ou None."""
import httpx
from bs4 import BeautifulSoup
target = url or self.url target = url or self.url
verify_tls = should_verify_tls()
for attempt in range(1, self.max_retries + 1): for attempt in range(1, self.max_retries + 1):
try: try:
async with httpx.AsyncClient( async with httpx.AsyncClient(
headers=self.HEADERS, headers=self.HEADERS,
timeout=self.timeout, timeout=self.timeout,
follow_redirects=True, follow_redirects=True,
verify=False, # alguns sites gov têm cert self-signed verify=verify_tls,
) as client: ) as client:
if method.upper() == "POST": if method.upper() == "POST":
resp = await client.post(target, data=data, params=params) resp = await client.post(target, data=data, params=params)
@@ -173,9 +179,12 @@ class AbstractJuntaScraper(ABC):
url: Optional[str] = None, url: Optional[str] = None,
wait_selector: Optional[str] = None, wait_selector: Optional[str] = None,
wait_ms: int = 3000, wait_ms: int = 3000,
) -> Optional[BeautifulSoup]: ) -> Optional[Any]:
"""Renderiza página com JavaScript usando Playwright. Retorna BeautifulSoup ou None.""" """Renderiza página com JavaScript usando Playwright. Retorna BeautifulSoup ou None."""
from bs4 import BeautifulSoup
target = url or self.url target = url or self.url
verify_tls = should_verify_tls()
try: try:
from playwright.async_api import async_playwright from playwright.async_api import async_playwright
except ImportError: except ImportError:
@@ -188,7 +197,7 @@ class AbstractJuntaScraper(ABC):
ctx = await browser.new_context( ctx = await browser.new_context(
user_agent=self.HEADERS["User-Agent"], user_agent=self.HEADERS["User-Agent"],
locale="pt-BR", locale="pt-BR",
ignore_https_errors=True, ignore_https_errors=not verify_tls,
) )
page = await ctx.new_page() page = await ctx.new_page()
await page.goto(target, timeout=60000, wait_until="networkidle") await page.goto(target, timeout=60000, wait_until="networkidle")

View File

@@ -24,6 +24,7 @@ from pathlib import Path
sys.path.insert(0, str(Path(__file__).parent)) sys.path.insert(0, str(Path(__file__).parent))
from db import Database from db import Database
from scraper.base_scraper import should_verify_tls
from scraper.states import SCRAPERS from scraper.states import SCRAPERS
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@@ -114,7 +115,10 @@ async def _direct_extract(estado: str, url: str) -> list[dict]:
results = [] results = []
try: try:
async with httpx.AsyncClient( async with httpx.AsyncClient(
headers=headers, timeout=30.0, follow_redirects=True, verify=False headers=headers,
timeout=30.0,
follow_redirects=True,
verify=should_verify_tls(),
) as client: ) as client:
resp = await client.get(url) resp = await client.get(url)
if resp.status_code >= 400: if resp.status_code >= 400:

View File

@@ -0,0 +1,41 @@
import importlib.util
import os
import sys
import unittest
from pathlib import Path
REPO_ROOT = Path(__file__).resolve().parents[3]
def load_module(relative_path: str, module_name: str):
module_path = REPO_ROOT / relative_path
spec = importlib.util.spec_from_file_location(module_name, module_path)
module = importlib.util.module_from_spec(spec)
assert spec.loader is not None
sys.modules[module_name] = module
spec.loader.exec_module(module)
return module
base_scraper = load_module(
"skills/junta-leiloeiros/scripts/scraper/base_scraper.py",
"junta_base_scraper",
)
class JuntaTlsSecurityTests(unittest.TestCase):
def test_tls_verification_is_enabled_by_default(self):
os.environ.pop("JUNTA_INSECURE_TLS", None)
self.assertTrue(base_scraper.should_verify_tls())
def test_tls_verification_can_be_disabled_explicitly(self):
os.environ["JUNTA_INSECURE_TLS"] = "1"
try:
self.assertFalse(base_scraper.should_verify_tls())
finally:
os.environ.pop("JUNTA_INSECURE_TLS", None)
if __name__ == "__main__":
unittest.main()