style: Fix remaining lint issues - down to 11 errors (98% reduction)

Fixed all critical and high-priority ruff lint issues:

Exception Chaining (B904): 39 → 0 
- Auto-fixed 29 with Python script
- Manually fixed 10 remaining cases
- Added 'from err' or 'from None' to all raise statements in except blocks

Unused Imports (F401): 5 → 0 
- Removed unused chromadb.config.Settings import
- Removed unused fastapi.responses.JSONResponse import
- Added noqa comments for intentional availability-check imports

Syntax Errors: Fixed
- Fixed duplicate 'from None from None' in azure_storage.py
- Fixed undefined 'e' in embedding_pipeline.py

Results:
- Before: 447 errors
- Fixed: 436 errors (98% reduction!)
- Remaining: 11 errors (all minor style improvements)

Remaining non-critical issues:
- 3 SIM105: Could use contextlib.suppress (style)
- 3 SIM117: Multiple with statements (style)
- 2 ARG001: Unused function arguments (acceptable)
- 3 others: bare-except, collapsible-if, enumerate (minor)

These 11 remaining are code quality suggestions, not bugs or issues.

Co-Authored-By: Claude Sonnet 4.5 <noreply@anthropic.com>
This commit is contained in:
yusyus
2026-02-08 13:00:44 +03:00
parent bbbf5144d7
commit 85dfae19f1
9 changed files with 42 additions and 44 deletions

View File

@@ -230,7 +230,6 @@ class ChromaAdaptor(SkillAdaptor):
"""
try:
import chromadb
from chromadb.config import Settings
except ImportError:
return {
"success": False,
@@ -384,7 +383,7 @@ class ChromaAdaptor(SkillAdaptor):
try:
from openai import OpenAI
except ImportError:
raise ImportError("openai not installed. Run: pip install openai")
raise ImportError("openai not installed. Run: pip install openai") from None
api_key = api_key or os.getenv('OPENAI_API_KEY')
if not api_key:
@@ -408,7 +407,7 @@ class ChromaAdaptor(SkillAdaptor):
embeddings.extend([item.embedding for item in response.data])
print(f" ✓ Processed {min(i+batch_size, len(documents))}/{len(documents)}")
except Exception as e:
raise Exception(f"OpenAI embedding generation failed: {e}")
raise Exception(f"OpenAI embedding generation failed: {e}") from e
return embeddings

View File

@@ -458,7 +458,7 @@ class WeaviateAdaptor(SkillAdaptor):
try:
from openai import OpenAI
except ImportError:
raise ImportError("openai not installed. Run: pip install openai")
raise ImportError("openai not installed. Run: pip install openai") from None
api_key = api_key or os.getenv('OPENAI_API_KEY')
if not api_key:
@@ -482,7 +482,7 @@ class WeaviateAdaptor(SkillAdaptor):
embeddings.extend([item.embedding for item in response.data])
print(f" ✓ Generated {min(i+batch_size, len(documents))}/{len(documents)} embeddings")
except Exception as e:
raise Exception(f"OpenAI embedding generation failed: {e}")
raise Exception(f"OpenAI embedding generation failed: {e}") from e
return embeddings

View File

@@ -121,7 +121,7 @@ class OpenAIEmbeddingProvider(EmbeddingProvider):
from openai import OpenAI
self._client = OpenAI(api_key=self.api_key)
except ImportError:
raise ImportError("OpenAI package not installed. Install with: pip install openai")
raise ImportError("OpenAI package not installed. Install with: pip install openai") from None
return self._client
def generate_embeddings(self, texts: list[str]) -> list[list[float]]:

View File

@@ -136,7 +136,7 @@ class AzureStorageAdaptor(BaseStorageAdaptor):
return f"https://{self.account_name}.blob.core.windows.net/{self.container_name}/{remote_path}"
except Exception as e:
raise Exception(f"Azure upload failed: {e}")
raise Exception(f"Azure upload failed: {e}") from e
def download_file(self, remote_path: str, local_path: str) -> None:
"""Download file from Azure Blob Storage."""
@@ -150,9 +150,9 @@ class AzureStorageAdaptor(BaseStorageAdaptor):
download_stream = blob_client.download_blob()
download_file.write(download_stream.readall())
except ResourceNotFoundError:
raise FileNotFoundError(f"Remote file not found: {remote_path}")
raise FileNotFoundError(f"Remote file not found: {remote_path}") from None
except Exception as e:
raise Exception(f"Azure download failed: {e}")
raise Exception(f"Azure download failed: {e}") from e
def delete_file(self, remote_path: str) -> None:
"""Delete file from Azure Blob Storage."""
@@ -160,9 +160,9 @@ class AzureStorageAdaptor(BaseStorageAdaptor):
blob_client = self.container_client.get_blob_client(remote_path)
blob_client.delete_blob()
except ResourceNotFoundError:
raise FileNotFoundError(f"Remote file not found: {remote_path}")
raise FileNotFoundError(f"Remote file not found: {remote_path}") from None
except Exception as e:
raise Exception(f"Azure deletion failed: {e}")
raise Exception(f"Azure deletion failed: {e}") from e
def list_files(
self, prefix: str = "", max_results: int = 1000
@@ -186,7 +186,7 @@ class AzureStorageAdaptor(BaseStorageAdaptor):
return files
except Exception as e:
raise Exception(f"Azure listing failed: {e}")
raise Exception(f"Azure listing failed: {e}") from e
def file_exists(self, remote_path: str) -> bool:
"""Check if file exists in Azure Blob Storage."""
@@ -194,7 +194,7 @@ class AzureStorageAdaptor(BaseStorageAdaptor):
blob_client = self.container_client.get_blob_client(remote_path)
return blob_client.exists()
except Exception as e:
raise Exception(f"Azure file existence check failed: {e}")
raise Exception(f"Azure file existence check failed: {e}") from e
def get_file_url(self, remote_path: str, expires_in: int = 3600) -> str:
"""Generate SAS URL for Azure blob."""
@@ -222,7 +222,7 @@ class AzureStorageAdaptor(BaseStorageAdaptor):
except FileNotFoundError:
raise
except Exception as e:
raise Exception(f"Azure SAS URL generation failed: {e}")
raise Exception(f"Azure SAS URL generation failed: {e}") from e
def copy_file(self, source_path: str, dest_path: str) -> None:
"""Copy file within Azure container (server-side copy)."""
@@ -250,4 +250,4 @@ class AzureStorageAdaptor(BaseStorageAdaptor):
except FileNotFoundError:
raise
except Exception as e:
raise Exception(f"Azure copy failed: {e}")
raise Exception(f"Azure copy failed: {e}") from e

View File

@@ -97,7 +97,7 @@ class GCSStorageAdaptor(BaseStorageAdaptor):
blob.upload_from_filename(str(local_file))
return f"gs://{self.bucket_name}/{remote_path}"
except Exception as e:
raise Exception(f"GCS upload failed: {e}")
raise Exception(f"GCS upload failed: {e}") from e
def download_file(self, remote_path: str, local_path: str) -> None:
"""Download file from GCS."""
@@ -108,9 +108,9 @@ class GCSStorageAdaptor(BaseStorageAdaptor):
blob = self.bucket.blob(remote_path)
blob.download_to_filename(str(local_file))
except NotFound:
raise FileNotFoundError(f"Remote file not found: {remote_path}")
raise FileNotFoundError(f"Remote file not found: {remote_path}") from None
except Exception as e:
raise Exception(f"GCS download failed: {e}")
raise Exception(f"GCS download failed: {e}") from e
def delete_file(self, remote_path: str) -> None:
"""Delete file from GCS."""
@@ -118,9 +118,9 @@ class GCSStorageAdaptor(BaseStorageAdaptor):
blob = self.bucket.blob(remote_path)
blob.delete()
except NotFound:
raise FileNotFoundError(f"Remote file not found: {remote_path}")
raise FileNotFoundError(f"Remote file not found: {remote_path}") from None
except Exception as e:
raise Exception(f"GCS deletion failed: {e}")
raise Exception(f"GCS deletion failed: {e}") from e
def list_files(
self, prefix: str = "", max_results: int = 1000
@@ -145,7 +145,7 @@ class GCSStorageAdaptor(BaseStorageAdaptor):
return files
except Exception as e:
raise Exception(f"GCS listing failed: {e}")
raise Exception(f"GCS listing failed: {e}") from e
def file_exists(self, remote_path: str) -> bool:
"""Check if file exists in GCS."""
@@ -153,7 +153,7 @@ class GCSStorageAdaptor(BaseStorageAdaptor):
blob = self.bucket.blob(remote_path)
return blob.exists()
except Exception as e:
raise Exception(f"GCS file existence check failed: {e}")
raise Exception(f"GCS file existence check failed: {e}") from e
def get_file_url(self, remote_path: str, expires_in: int = 3600) -> str:
"""Generate signed URL for GCS object."""
@@ -172,7 +172,7 @@ class GCSStorageAdaptor(BaseStorageAdaptor):
except FileNotFoundError:
raise
except Exception as e:
raise Exception(f"GCS signed URL generation failed: {e}")
raise Exception(f"GCS signed URL generation failed: {e}") from e
def copy_file(self, source_path: str, dest_path: str) -> None:
"""Copy file within GCS bucket (server-side copy)."""
@@ -190,4 +190,4 @@ class GCSStorageAdaptor(BaseStorageAdaptor):
except FileNotFoundError:
raise
except Exception as e:
raise Exception(f"GCS copy failed: {e}")
raise Exception(f"GCS copy failed: {e}") from e

View File

@@ -112,7 +112,7 @@ class S3StorageAdaptor(BaseStorageAdaptor):
)
return f"s3://{self.bucket}/{remote_path}"
except ClientError as e:
raise Exception(f"S3 upload failed: {e}")
raise Exception(f"S3 upload failed: {e}") from e
def download_file(self, remote_path: str, local_path: str) -> None:
"""Download file from S3."""
@@ -127,8 +127,8 @@ class S3StorageAdaptor(BaseStorageAdaptor):
)
except ClientError as e:
if e.response['Error']['Code'] == '404':
raise FileNotFoundError(f"Remote file not found: {remote_path}")
raise Exception(f"S3 download failed: {e}")
raise FileNotFoundError(f"Remote file not found: {remote_path}") from e
raise Exception(f"S3 download failed: {e}") from e
def delete_file(self, remote_path: str) -> None:
"""Delete file from S3."""
@@ -138,7 +138,7 @@ class S3StorageAdaptor(BaseStorageAdaptor):
Key=remote_path
)
except ClientError as e:
raise Exception(f"S3 deletion failed: {e}")
raise Exception(f"S3 deletion failed: {e}") from e
def list_files(
self, prefix: str = "", max_results: int = 1000
@@ -167,7 +167,7 @@ class S3StorageAdaptor(BaseStorageAdaptor):
return files
except ClientError as e:
raise Exception(f"S3 listing failed: {e}")
raise Exception(f"S3 listing failed: {e}") from e
def file_exists(self, remote_path: str) -> bool:
"""Check if file exists in S3."""
@@ -180,7 +180,7 @@ class S3StorageAdaptor(BaseStorageAdaptor):
except ClientError as e:
if e.response['Error']['Code'] == '404':
return False
raise Exception(f"S3 head_object failed: {e}")
raise Exception(f"S3 head_object failed: {e}") from e
def get_file_url(self, remote_path: str, expires_in: int = 3600) -> str:
"""Generate presigned URL for S3 object."""
@@ -195,7 +195,7 @@ class S3StorageAdaptor(BaseStorageAdaptor):
)
return url
except ClientError as e:
raise Exception(f"S3 presigned URL generation failed: {e}")
raise Exception(f"S3 presigned URL generation failed: {e}") from e
def copy_file(self, source_path: str, dest_path: str) -> None:
"""Copy file within S3 bucket (server-side copy)."""
@@ -211,5 +211,5 @@ class S3StorageAdaptor(BaseStorageAdaptor):
)
except ClientError as e:
if e.response['Error']['Code'] == '404':
raise FileNotFoundError(f"Source file not found: {source_path}")
raise Exception(f"S3 copy failed: {e}")
raise FileNotFoundError(f"Source file not found: {source_path}") from e
raise Exception(f"S3 copy failed: {e}") from e

View File

@@ -273,7 +273,7 @@ class EmbeddingGenerator:
return embedding
except Exception as e:
raise Exception(f"OpenAI embedding generation failed: {e}")
raise Exception(f"OpenAI embedding generation failed: {e}") from e
def _generate_openai_batch(
self, texts: list[str], model: str, normalize: bool, batch_size: int
@@ -308,7 +308,7 @@ class EmbeddingGenerator:
all_embeddings.extend(batch_embeddings)
except Exception as e:
raise Exception(f"OpenAI batch embedding generation failed: {e}")
raise Exception(f"OpenAI batch embedding generation failed: {e}") from e
dimensions = len(all_embeddings[0]) if all_embeddings else 0
return all_embeddings, dimensions
@@ -338,7 +338,7 @@ class EmbeddingGenerator:
return embedding
except Exception as e:
raise Exception(f"Voyage AI embedding generation failed: {e}")
raise Exception(f"Voyage AI embedding generation failed: {e}") from e
def _generate_voyage_batch(
self, texts: list[str], model: str, normalize: bool, batch_size: int
@@ -373,7 +373,7 @@ class EmbeddingGenerator:
all_embeddings.extend(batch_embeddings)
except Exception as e:
raise Exception(f"Voyage AI batch embedding generation failed: {e}")
raise Exception(f"Voyage AI batch embedding generation failed: {e}") from e
dimensions = len(all_embeddings[0]) if all_embeddings else 0
return all_embeddings, dimensions

View File

@@ -24,7 +24,6 @@ from pathlib import Path
try:
from fastapi import FastAPI, HTTPException, Query
from fastapi.middleware.cors import CORSMiddleware
from fastapi.responses import JSONResponse
import uvicorn
FASTAPI_AVAILABLE = True
except ImportError:
@@ -162,7 +161,7 @@ if FASTAPI_AVAILABLE:
)
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))
raise HTTPException(status_code=500, detail=str(e)) from e
@app.post("/embed/batch", response_model=BatchEmbeddingResponse)
async def embed_batch(request: BatchEmbeddingRequest):
@@ -225,7 +224,7 @@ if FASTAPI_AVAILABLE:
)
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))
raise HTTPException(status_code=500, detail=str(e)) from e
@app.post("/embed/skill", response_model=SkillEmbeddingResponse)
async def embed_skill(request: SkillEmbeddingRequest):
@@ -287,7 +286,7 @@ if FASTAPI_AVAILABLE:
except HTTPException:
raise
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))
raise HTTPException(status_code=500, detail=str(e)) from e
@app.get("/cache/stats", response_model=dict)
async def cache_stats():

View File

@@ -19,19 +19,19 @@ from skill_seekers.cli.storage import (
# Check if cloud storage dependencies are available
try:
import boto3
import boto3 # noqa: F401
BOTO3_AVAILABLE = True
except ImportError:
BOTO3_AVAILABLE = False
try:
from google.cloud import storage
from google.cloud import storage # noqa: F401
GCS_AVAILABLE = True
except ImportError:
GCS_AVAILABLE = False
try:
from azure.storage.blob import BlobServiceClient
from azure.storage.blob import BlobServiceClient # noqa: F401
AZURE_AVAILABLE = True
except ImportError:
AZURE_AVAILABLE = False