Release v1.21.0: Add macos-cleaner skill
- Add macos-cleaner v1.0.0 - Intelligent macOS disk space recovery - Safety-first philosophy with risk categorization (Safe/Caution/Keep) - Smart analysis: caches, app remnants, large files, dev environments - Interactive cleanup with explicit user confirmation - Bundled scripts: analyze_caches, analyze_dev_env, analyze_large_files, find_app_remnants, safe_delete, cleanup_report - Comprehensive references: cleanup_targets, mole_integration, safety_rules - Update marketplace to v1.21.0 - Update all documentation (README.md, README.zh-CN.md, CHANGELOG.md, CLAUDE.md) Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
211
macos-cleaner/scripts/analyze_caches.py
Executable file
211
macos-cleaner/scripts/analyze_caches.py
Executable file
@@ -0,0 +1,211 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Analyze macOS cache directories and categorize them by size and safety.
|
||||
|
||||
Usage:
|
||||
python3 analyze_caches.py [--user-only] [--min-size SIZE]
|
||||
|
||||
Options:
|
||||
--user-only Only scan user caches (~/Library/Caches), skip system caches
|
||||
--min-size Minimum size in MB to report (default: 10)
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import subprocess
|
||||
import argparse
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def get_dir_size(path):
|
||||
"""
|
||||
Get directory size using du command.
|
||||
|
||||
Args:
|
||||
path: Directory path
|
||||
|
||||
Returns:
|
||||
Size in bytes, or 0 if error
|
||||
"""
|
||||
try:
|
||||
result = subprocess.run(
|
||||
['du', '-sk', path],
|
||||
capture_output=True,
|
||||
text=True,
|
||||
timeout=30
|
||||
)
|
||||
if result.returncode == 0:
|
||||
# du -sk returns size in KB
|
||||
size_kb = int(result.stdout.split()[0])
|
||||
return size_kb * 1024 # Convert to bytes
|
||||
return 0
|
||||
except (subprocess.TimeoutExpired, ValueError, IndexError):
|
||||
return 0
|
||||
|
||||
|
||||
def format_size(bytes_size):
|
||||
"""Convert bytes to human-readable format."""
|
||||
for unit in ['B', 'KB', 'MB', 'GB', 'TB']:
|
||||
if bytes_size < 1024.0:
|
||||
return f"{bytes_size:.1f} {unit}"
|
||||
bytes_size /= 1024.0
|
||||
return f"{bytes_size:.1f} PB"
|
||||
|
||||
|
||||
def analyze_cache_dir(base_path, min_size_bytes):
|
||||
"""
|
||||
Analyze a cache directory and list subdirectories by size.
|
||||
|
||||
Args:
|
||||
base_path: Path to cache directory
|
||||
min_size_bytes: Minimum size to report
|
||||
|
||||
Returns:
|
||||
List of (name, path, size_bytes) tuples
|
||||
"""
|
||||
if not os.path.exists(base_path):
|
||||
return []
|
||||
|
||||
results = []
|
||||
try:
|
||||
for entry in os.scandir(base_path):
|
||||
if entry.is_dir():
|
||||
size = get_dir_size(entry.path)
|
||||
if size >= min_size_bytes:
|
||||
results.append((entry.name, entry.path, size))
|
||||
except PermissionError:
|
||||
print(f"⚠️ Permission denied: {base_path}", file=sys.stderr)
|
||||
return []
|
||||
|
||||
# Sort by size descending
|
||||
results.sort(key=lambda x: x[2], reverse=True)
|
||||
return results
|
||||
|
||||
|
||||
def categorize_safety(name):
|
||||
"""
|
||||
Categorize cache safety based on name patterns.
|
||||
|
||||
Returns:
|
||||
('safe'|'check'|'keep', reason)
|
||||
"""
|
||||
name_lower = name.lower()
|
||||
|
||||
# Known safe to delete
|
||||
safe_patterns = [
|
||||
'chrome', 'firefox', 'safari', 'edge', # Browsers
|
||||
'spotify', 'slack', 'discord', # Communication
|
||||
'pip', 'npm', 'homebrew', # Package managers
|
||||
'temp', 'tmp', 'cache' # Generic temp
|
||||
]
|
||||
if any(pattern in name_lower for pattern in safe_patterns):
|
||||
return ('safe', 'Application regenerates cache automatically')
|
||||
|
||||
# Check before deleting
|
||||
check_patterns = [
|
||||
'xcode', 'android', # IDEs (may slow next launch)
|
||||
'jetbrains', 'vscode',
|
||||
'docker' # May contain important build cache
|
||||
]
|
||||
if any(pattern in name_lower for pattern in check_patterns):
|
||||
return ('check', 'May slow down next application launch')
|
||||
|
||||
# Default: check first
|
||||
return ('check', 'Unknown application, verify before deleting')
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(
|
||||
description='Analyze macOS cache directories'
|
||||
)
|
||||
parser.add_argument(
|
||||
'--user-only',
|
||||
action='store_true',
|
||||
help='Only scan user caches (skip system caches)'
|
||||
)
|
||||
parser.add_argument(
|
||||
'--min-size',
|
||||
type=int,
|
||||
default=10,
|
||||
help='Minimum size in MB to report (default: 10)'
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
min_size_bytes = args.min_size * 1024 * 1024 # Convert MB to bytes
|
||||
|
||||
print("🔍 Analyzing macOS Cache Directories")
|
||||
print("=" * 50)
|
||||
|
||||
# User caches
|
||||
user_cache_path = os.path.expanduser('~/Library/Caches')
|
||||
print(f"\n📂 User Caches: {user_cache_path}")
|
||||
print("-" * 50)
|
||||
|
||||
user_caches = analyze_cache_dir(user_cache_path, min_size_bytes)
|
||||
total_user = 0
|
||||
|
||||
if user_caches:
|
||||
print(f"{'Application':<40} {'Size':<12} {'Safety'}")
|
||||
print("-" * 70)
|
||||
for name, path, size in user_caches:
|
||||
safety, reason = categorize_safety(name)
|
||||
safety_icon = {'safe': '🟢', 'check': '🟡', 'keep': '🔴'}[safety]
|
||||
print(f"{name:<40} {format_size(size):<12} {safety_icon}")
|
||||
total_user += size
|
||||
print("-" * 70)
|
||||
print(f"{'Total':<40} {format_size(total_user):<12}")
|
||||
else:
|
||||
print("No cache directories found above minimum size.")
|
||||
|
||||
# User logs
|
||||
user_log_path = os.path.expanduser('~/Library/Logs')
|
||||
if os.path.exists(user_log_path):
|
||||
log_size = get_dir_size(user_log_path)
|
||||
if log_size >= min_size_bytes:
|
||||
print(f"\n📝 User Logs: {user_log_path}")
|
||||
print(f" Size: {format_size(log_size)} 🟢 Safe to delete")
|
||||
total_user += log_size
|
||||
|
||||
# System caches (if not --user-only)
|
||||
if not args.user_only:
|
||||
print(f"\n\n📂 System Caches: /Library/Caches")
|
||||
print("-" * 50)
|
||||
print("⚠️ Requires administrator privileges to delete")
|
||||
|
||||
system_cache_path = '/Library/Caches'
|
||||
system_caches = analyze_cache_dir(system_cache_path, min_size_bytes)
|
||||
total_system = 0
|
||||
|
||||
if system_caches:
|
||||
print(f"{'Application':<40} {'Size':<12}")
|
||||
print("-" * 70)
|
||||
for name, path, size in system_caches[:10]: # Top 10 only
|
||||
print(f"{name:<40} {format_size(size):<12}")
|
||||
total_system += size
|
||||
if len(system_caches) > 10:
|
||||
print(f"... and {len(system_caches) - 10} more")
|
||||
print("-" * 70)
|
||||
print(f"{'Total':<40} {format_size(total_system):<12}")
|
||||
else:
|
||||
print("No cache directories found above minimum size.")
|
||||
|
||||
# Summary
|
||||
print("\n" + "=" * 50)
|
||||
print("📊 Summary")
|
||||
print("=" * 50)
|
||||
print(f"Total User Caches: {format_size(total_user)}")
|
||||
if not args.user_only:
|
||||
print(f"Total System Caches: {format_size(total_system)}")
|
||||
print(f"Combined Total: {format_size(total_user + total_system)}")
|
||||
|
||||
print("\n💡 Next Steps:")
|
||||
print(" 1. Review the list above")
|
||||
print(" 2. Identify caches marked 🟢 (safe to delete)")
|
||||
print(" 3. For 🟡 items, verify the application is not running")
|
||||
print(" 4. Use safe_delete.py for interactive cleanup")
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
||||
307
macos-cleaner/scripts/analyze_dev_env.py
Executable file
307
macos-cleaner/scripts/analyze_dev_env.py
Executable file
@@ -0,0 +1,307 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Analyze development environment and find cleanable resources.
|
||||
|
||||
Checks:
|
||||
- Docker (images, containers, volumes, build cache)
|
||||
- Homebrew cache
|
||||
- npm cache
|
||||
- pip cache
|
||||
- Old .git directories in archived projects
|
||||
|
||||
Usage:
|
||||
python3 analyze_dev_env.py
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import subprocess
|
||||
import json
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def format_size(bytes_size):
|
||||
"""Convert bytes to human-readable format."""
|
||||
if bytes_size is None:
|
||||
return "Unknown"
|
||||
for unit in ['B', 'KB', 'MB', 'GB', 'TB']:
|
||||
if bytes_size < 1024.0:
|
||||
return f"{bytes_size:.1f} {unit}"
|
||||
bytes_size /= 1024.0
|
||||
return f"{bytes_size:.1f} PB"
|
||||
|
||||
|
||||
def run_command(cmd):
|
||||
"""Run command and return output, or None if error."""
|
||||
try:
|
||||
result = subprocess.run(
|
||||
cmd,
|
||||
capture_output=True,
|
||||
text=True,
|
||||
timeout=30
|
||||
)
|
||||
if result.returncode == 0:
|
||||
return result.stdout.strip()
|
||||
return None
|
||||
except (subprocess.TimeoutExpired, FileNotFoundError):
|
||||
return None
|
||||
|
||||
|
||||
def get_dir_size(path):
|
||||
"""Get directory size using du command."""
|
||||
output = run_command(['du', '-sk', path])
|
||||
if output:
|
||||
try:
|
||||
size_kb = int(output.split()[0])
|
||||
return size_kb * 1024 # Convert to bytes
|
||||
except (ValueError, IndexError):
|
||||
pass
|
||||
return 0
|
||||
|
||||
|
||||
def check_docker():
|
||||
"""Check Docker resources."""
|
||||
print("\n🐳 Docker Resources")
|
||||
print("=" * 50)
|
||||
|
||||
# Check if Docker is installed
|
||||
if not run_command(['which', 'docker']):
|
||||
print(" Docker not installed or not in PATH")
|
||||
return 0
|
||||
|
||||
# Check if Docker daemon is running
|
||||
if not run_command(['docker', 'info']):
|
||||
print(" Docker daemon not running")
|
||||
return 0
|
||||
|
||||
total_size = 0
|
||||
|
||||
# Images
|
||||
images_output = run_command(['docker', 'images', '-q'])
|
||||
if images_output:
|
||||
image_count = len(images_output.split('\n'))
|
||||
print(f"\n📦 Images: {image_count}")
|
||||
|
||||
# Get size estimate
|
||||
system_output = run_command(['docker', 'system', 'df', '--format', '{{json .}}'])
|
||||
if system_output:
|
||||
for line in system_output.split('\n'):
|
||||
try:
|
||||
data = json.loads(line)
|
||||
if data.get('Type') == 'Images':
|
||||
size_str = data.get('Size', '')
|
||||
# Parse size (format like "1.2GB")
|
||||
if 'GB' in size_str:
|
||||
size = float(size_str.replace('GB', '')) * 1024 * 1024 * 1024
|
||||
elif 'MB' in size_str:
|
||||
size = float(size_str.replace('MB', '')) * 1024 * 1024
|
||||
else:
|
||||
size = 0
|
||||
print(f" Total size: {format_size(size)}")
|
||||
total_size += size
|
||||
except (json.JSONDecodeError, ValueError):
|
||||
pass
|
||||
|
||||
# Containers
|
||||
containers_output = run_command(['docker', 'ps', '-a', '-q'])
|
||||
if containers_output:
|
||||
container_count = len(containers_output.split('\n'))
|
||||
stopped = run_command(['docker', 'ps', '-a', '-f', 'status=exited', '-q'])
|
||||
stopped_count = len(stopped.split('\n')) if stopped else 0
|
||||
print(f"\n📦 Containers: {container_count} total, {stopped_count} stopped")
|
||||
|
||||
# Volumes
|
||||
volumes_output = run_command(['docker', 'volume', 'ls', '-q'])
|
||||
if volumes_output:
|
||||
volume_count = len(volumes_output.split('\n'))
|
||||
print(f"\n📦 Volumes: {volume_count}")
|
||||
|
||||
# List volumes
|
||||
for volume in volumes_output.split('\n')[:5]: # Show first 5
|
||||
inspect = run_command(['docker', 'volume', 'inspect', volume])
|
||||
print(f" - {volume}")
|
||||
if volume_count > 5:
|
||||
print(f" ... and {volume_count - 5} more")
|
||||
|
||||
# Build cache
|
||||
buildx_output = run_command(['docker', 'buildx', 'du'])
|
||||
if buildx_output and 'Total:' in buildx_output:
|
||||
print(f"\n📦 Build Cache:")
|
||||
for line in buildx_output.split('\n'):
|
||||
if 'Total:' in line:
|
||||
print(f" {line}")
|
||||
|
||||
print(f"\n💡 Cleanup command: docker system prune -a --volumes")
|
||||
print(f" ⚠️ Warning: This will remove ALL unused Docker resources")
|
||||
|
||||
return total_size
|
||||
|
||||
|
||||
def check_homebrew():
|
||||
"""Check Homebrew cache."""
|
||||
print("\n🍺 Homebrew")
|
||||
print("=" * 50)
|
||||
|
||||
if not run_command(['which', 'brew']):
|
||||
print(" Homebrew not installed")
|
||||
return 0
|
||||
|
||||
cache_path = run_command(['brew', '--cache'])
|
||||
if cache_path and os.path.exists(cache_path):
|
||||
size = get_dir_size(cache_path)
|
||||
print(f" Cache location: {cache_path}")
|
||||
print(f" Cache size: {format_size(size)}")
|
||||
print(f"\n💡 Cleanup command: brew cleanup -s")
|
||||
return size
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
def check_npm():
|
||||
"""Check npm cache."""
|
||||
print("\n📦 npm")
|
||||
print("=" * 50)
|
||||
|
||||
if not run_command(['which', 'npm']):
|
||||
print(" npm not installed")
|
||||
return 0
|
||||
|
||||
cache_path = run_command(['npm', 'config', 'get', 'cache'])
|
||||
if cache_path and cache_path != 'undefined' and os.path.exists(cache_path):
|
||||
size = get_dir_size(cache_path)
|
||||
print(f" Cache location: {cache_path}")
|
||||
print(f" Cache size: {format_size(size)}")
|
||||
print(f"\n💡 Cleanup command: npm cache clean --force")
|
||||
return size
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
def check_pip():
|
||||
"""Check pip cache."""
|
||||
print("\n🐍 pip")
|
||||
print("=" * 50)
|
||||
|
||||
# Try pip3 first
|
||||
pip_cmd = 'pip3' if run_command(['which', 'pip3']) else 'pip'
|
||||
|
||||
if not run_command(['which', pip_cmd]):
|
||||
print(" pip not installed")
|
||||
return 0
|
||||
|
||||
cache_dir = run_command([pip_cmd, 'cache', 'dir'])
|
||||
if cache_dir and os.path.exists(cache_dir):
|
||||
size = get_dir_size(cache_dir)
|
||||
print(f" Cache location: {cache_dir}")
|
||||
print(f" Cache size: {format_size(size)}")
|
||||
print(f"\n💡 Cleanup command: {pip_cmd} cache purge")
|
||||
return size
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
def check_old_git_repos():
|
||||
"""Find large .git directories in archived projects."""
|
||||
print("\n📁 Old Git Repositories")
|
||||
print("=" * 50)
|
||||
|
||||
home = Path.home()
|
||||
common_project_dirs = [
|
||||
home / 'Projects',
|
||||
home / 'workspace',
|
||||
home / 'dev',
|
||||
home / 'src',
|
||||
home / 'code'
|
||||
]
|
||||
|
||||
git_repos = []
|
||||
total_size = 0
|
||||
|
||||
for project_dir in common_project_dirs:
|
||||
if not project_dir.exists():
|
||||
continue
|
||||
|
||||
# Find .git directories
|
||||
try:
|
||||
result = subprocess.run(
|
||||
['find', str(project_dir), '-name', '.git', '-type', 'd', '-maxdepth', 3],
|
||||
capture_output=True,
|
||||
text=True,
|
||||
timeout=30
|
||||
)
|
||||
if result.returncode == 0:
|
||||
for git_path in result.stdout.strip().split('\n'):
|
||||
if git_path:
|
||||
size = get_dir_size(git_path)
|
||||
if size > 10 * 1024 * 1024: # > 10 MB
|
||||
git_repos.append((git_path, size))
|
||||
total_size += size
|
||||
except subprocess.TimeoutExpired:
|
||||
continue
|
||||
|
||||
if git_repos:
|
||||
# Sort by size
|
||||
git_repos.sort(key=lambda x: x[1], reverse=True)
|
||||
|
||||
print(f" Found {len(git_repos)} .git directories > 10 MB")
|
||||
print(f"\n Top 10 largest:")
|
||||
for path, size in git_repos[:10]:
|
||||
# Get parent directory name (project name)
|
||||
project_name = Path(path).parent.name
|
||||
print(f" - {project_name:<30} {format_size(size)}")
|
||||
|
||||
print(f"\n Total: {format_size(total_size)}")
|
||||
print(f"\n💡 If these are archived projects, consider:")
|
||||
print(f" 1. Delete .git history: rm -rf <project>/.git")
|
||||
print(f" 2. Or compress entire project: tar -czf archive.tar.gz <project>")
|
||||
else:
|
||||
print(" No large .git directories found in common project locations")
|
||||
|
||||
return total_size
|
||||
|
||||
|
||||
def main():
|
||||
print("🔍 Development Environment Analysis")
|
||||
print("=" * 50)
|
||||
|
||||
total_savings = 0
|
||||
|
||||
# Check each component
|
||||
docker_size = check_docker()
|
||||
brew_size = check_homebrew()
|
||||
npm_size = check_npm()
|
||||
pip_size = check_pip()
|
||||
git_size = check_old_git_repos()
|
||||
|
||||
# Summary
|
||||
print("\n\n📊 Summary")
|
||||
print("=" * 50)
|
||||
if docker_size:
|
||||
print(f"Docker: {format_size(docker_size)}")
|
||||
total_savings += docker_size
|
||||
if brew_size:
|
||||
print(f"Homebrew cache: {format_size(brew_size)}")
|
||||
total_savings += brew_size
|
||||
if npm_size:
|
||||
print(f"npm cache: {format_size(npm_size)}")
|
||||
total_savings += npm_size
|
||||
if pip_size:
|
||||
print(f"pip cache: {format_size(pip_size)}")
|
||||
total_savings += pip_size
|
||||
if git_size:
|
||||
print(f"Old .git repos: {format_size(git_size)}")
|
||||
total_savings += git_size
|
||||
|
||||
print("-" * 50)
|
||||
print(f"Potential savings: {format_size(total_savings)}")
|
||||
|
||||
print("\n💡 Next Steps:")
|
||||
print(" 1. Review Docker volumes before cleanup (may contain data)")
|
||||
print(" 2. Package manager caches are safe to delete")
|
||||
print(" 3. For .git directories, ensure project is truly archived")
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
||||
241
macos-cleaner/scripts/analyze_large_files.py
Executable file
241
macos-cleaner/scripts/analyze_large_files.py
Executable file
@@ -0,0 +1,241 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Find large files on macOS and categorize them.
|
||||
|
||||
Usage:
|
||||
python3 analyze_large_files.py [--threshold SIZE] [--path PATH] [--limit N]
|
||||
|
||||
Options:
|
||||
--threshold Minimum file size in MB (default: 100)
|
||||
--path Path to search (default: ~)
|
||||
--limit Maximum number of results (default: 50)
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import argparse
|
||||
import subprocess
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def format_size(bytes_size):
|
||||
"""Convert bytes to human-readable format."""
|
||||
for unit in ['B', 'KB', 'MB', 'GB', 'TB']:
|
||||
if bytes_size < 1024.0:
|
||||
return f"{bytes_size:.1f} {unit}"
|
||||
bytes_size /= 1024.0
|
||||
return f"{bytes_size:.1f} PB"
|
||||
|
||||
|
||||
def categorize_file(path):
|
||||
"""
|
||||
Categorize file by type and suggest safety.
|
||||
|
||||
Returns:
|
||||
(category, icon, safety_note)
|
||||
"""
|
||||
suffix = path.suffix.lower()
|
||||
|
||||
# Video files
|
||||
video_exts = {'.mp4', '.mov', '.avi', '.mkv', '.m4v', '.flv', '.wmv'}
|
||||
if suffix in video_exts:
|
||||
return ('Video', '🎬', 'Review and archive to external storage')
|
||||
|
||||
# Archive files
|
||||
archive_exts = {'.zip', '.tar', '.gz', '.bz2', '.7z', '.rar', '.dmg'}
|
||||
if suffix in archive_exts:
|
||||
return ('Archive', '📦', 'Extract if needed, then delete archive')
|
||||
|
||||
# Disk images
|
||||
disk_exts = {'.iso', '.img', '.toast'}
|
||||
if suffix in disk_exts:
|
||||
return ('Disk Image', '💿', 'Delete after installation/use')
|
||||
|
||||
# Database files
|
||||
db_exts = {'.db', '.sqlite', '.sqlite3', '.sql'}
|
||||
if suffix in db_exts:
|
||||
return ('Database', '🗄️', '⚠️ Verify not in use before deleting')
|
||||
|
||||
# Data files
|
||||
data_exts = {'.csv', '.json', '.xml', '.parquet', '.arrow'}
|
||||
if suffix in data_exts:
|
||||
return ('Data File', '📊', 'Archive or compress if historical data')
|
||||
|
||||
# Log files
|
||||
if suffix == '.log' or 'log' in path.name.lower():
|
||||
return ('Log File', '📝', 'Safe to delete old logs')
|
||||
|
||||
# Build artifacts
|
||||
build_patterns = ['.o', '.a', '.so', '.dylib', '.framework']
|
||||
if suffix in build_patterns:
|
||||
return ('Build Artifact', '🔨', 'Safe to delete, rebuild will regenerate')
|
||||
|
||||
# Virtual machine images
|
||||
vm_exts = {'.vmdk', '.vdi', '.qcow2', '.vhd'}
|
||||
if suffix in vm_exts:
|
||||
return ('VM Image', '💻', '⚠️ Contains VM data, verify before deleting')
|
||||
|
||||
# Other
|
||||
return ('Other', '📄', 'Review before deleting')
|
||||
|
||||
|
||||
def find_large_files(search_path, threshold_bytes, limit):
|
||||
"""
|
||||
Find files larger than threshold using find command.
|
||||
|
||||
Args:
|
||||
search_path: Path to search
|
||||
threshold_bytes: Minimum size in bytes
|
||||
limit: Maximum results
|
||||
|
||||
Returns:
|
||||
List of (path, size_bytes) tuples
|
||||
"""
|
||||
# Convert bytes to 512-byte blocks (find -size uses 512-byte blocks)
|
||||
threshold_blocks = threshold_bytes // 512
|
||||
|
||||
# Exclude common directories to avoid
|
||||
exclude_dirs = [
|
||||
'.Trash',
|
||||
'Library/Caches',
|
||||
'Library/Application Support/MobileSync', # iOS backups
|
||||
'.git',
|
||||
'node_modules',
|
||||
'__pycache__'
|
||||
]
|
||||
|
||||
# Build find command
|
||||
cmd = ['find', search_path, '-type', 'f', '-size', f'+{threshold_blocks}']
|
||||
|
||||
# Add exclusions
|
||||
for exclude in exclude_dirs:
|
||||
cmd.extend(['-not', '-path', f'*/{exclude}/*'])
|
||||
|
||||
try:
|
||||
result = subprocess.run(
|
||||
cmd,
|
||||
capture_output=True,
|
||||
text=True,
|
||||
timeout=120
|
||||
)
|
||||
|
||||
if result.returncode != 0:
|
||||
print(f"⚠️ Warning: find command had errors", file=sys.stderr)
|
||||
|
||||
files = []
|
||||
for line in result.stdout.strip().split('\n'):
|
||||
if not line:
|
||||
continue
|
||||
try:
|
||||
path = Path(line)
|
||||
if path.exists():
|
||||
size = path.stat().st_size
|
||||
files.append((path, size))
|
||||
except (OSError, PermissionError):
|
||||
continue
|
||||
|
||||
# Sort by size descending
|
||||
files.sort(key=lambda x: x[1], reverse=True)
|
||||
return files[:limit]
|
||||
|
||||
except subprocess.TimeoutExpired:
|
||||
print("⚠️ Search timed out, showing partial results", file=sys.stderr)
|
||||
return []
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(
|
||||
description='Find large files on macOS'
|
||||
)
|
||||
parser.add_argument(
|
||||
'--threshold',
|
||||
type=int,
|
||||
default=100,
|
||||
help='Minimum file size in MB (default: 100)'
|
||||
)
|
||||
parser.add_argument(
|
||||
'--path',
|
||||
default=os.path.expanduser('~'),
|
||||
help='Path to search (default: ~)'
|
||||
)
|
||||
parser.add_argument(
|
||||
'--limit',
|
||||
type=int,
|
||||
default=50,
|
||||
help='Maximum number of results (default: 50)'
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
threshold_bytes = args.threshold * 1024 * 1024
|
||||
search_path = os.path.expanduser(args.path)
|
||||
|
||||
print(f"🔍 Searching for files larger than {args.threshold} MB")
|
||||
print(f"📂 Search path: {search_path}")
|
||||
print("=" * 80)
|
||||
print("This may take a few minutes...\n")
|
||||
|
||||
large_files = find_large_files(search_path, threshold_bytes, args.limit)
|
||||
|
||||
if not large_files:
|
||||
print("✅ No large files found above the threshold.")
|
||||
return 0
|
||||
|
||||
print(f"\n📦 Found {len(large_files)} large files")
|
||||
print("=" * 80)
|
||||
print(f"{'#':<4} {'Size':<12} {'Type':<12} {'Location'}")
|
||||
print("-" * 80)
|
||||
|
||||
# Group by category
|
||||
by_category = {}
|
||||
total_size = 0
|
||||
|
||||
for i, (path, size) in enumerate(large_files, 1):
|
||||
category, icon, note = categorize_file(path)
|
||||
|
||||
# Shorten path for display
|
||||
try:
|
||||
rel_path = path.relative_to(Path.home())
|
||||
display_path = f"~/{rel_path}"
|
||||
except ValueError:
|
||||
display_path = str(path)
|
||||
|
||||
# Truncate long paths
|
||||
if len(display_path) > 45:
|
||||
display_path = display_path[:42] + "..."
|
||||
|
||||
print(f"{i:<4} {format_size(size):<12} {icon} {category:<10} {display_path}")
|
||||
|
||||
# Track by category
|
||||
if category not in by_category:
|
||||
by_category[category] = {'count': 0, 'size': 0, 'note': note}
|
||||
by_category[category]['count'] += 1
|
||||
by_category[category]['size'] += size
|
||||
total_size += size
|
||||
|
||||
print("-" * 80)
|
||||
print(f"{'Total':<4} {format_size(total_size):<12}")
|
||||
|
||||
# Category summary
|
||||
print("\n\n📊 Breakdown by Category")
|
||||
print("=" * 80)
|
||||
for category, data in sorted(
|
||||
by_category.items(),
|
||||
key=lambda x: x[1]['size'],
|
||||
reverse=True
|
||||
):
|
||||
print(f"\n{category}")
|
||||
print(f" Files: {data['count']}")
|
||||
print(f" Total: {format_size(data['size'])}")
|
||||
print(f" 💡 {data['note']}")
|
||||
|
||||
print("\n\n💡 Next Steps:")
|
||||
print(" 1. Review the list and identify files you no longer need")
|
||||
print(" 2. For videos/archives: consider moving to external storage")
|
||||
print(" 3. For databases/VMs: verify they're not in use")
|
||||
print(" 4. Use safe_delete.py for interactive cleanup")
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
||||
234
macos-cleaner/scripts/cleanup_report.py
Executable file
234
macos-cleaner/scripts/cleanup_report.py
Executable file
@@ -0,0 +1,234 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Generate before/after cleanup reports.
|
||||
|
||||
Usage:
|
||||
# Capture before snapshot
|
||||
python3 cleanup_report.py --snapshot before
|
||||
|
||||
# Capture after snapshot and generate report
|
||||
python3 cleanup_report.py --snapshot after --compare
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import json
|
||||
import argparse
|
||||
import subprocess
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def format_size(bytes_size):
|
||||
"""Convert bytes to human-readable format."""
|
||||
for unit in ['B', 'KB', 'MB', 'GB', 'TB']:
|
||||
if bytes_size < 1024.0:
|
||||
return f"{bytes_size:.1f} {unit}"
|
||||
bytes_size /= 1024.0
|
||||
return f"{bytes_size:.1f} PB"
|
||||
|
||||
|
||||
def get_disk_usage():
|
||||
"""
|
||||
Get current disk usage.
|
||||
|
||||
Returns:
|
||||
dict with total, used, available, percent
|
||||
"""
|
||||
try:
|
||||
result = subprocess.run(
|
||||
['df', '-k', '/'],
|
||||
capture_output=True,
|
||||
text=True
|
||||
)
|
||||
|
||||
if result.returncode == 0:
|
||||
lines = result.stdout.strip().split('\n')
|
||||
if len(lines) >= 2:
|
||||
# Parse df output
|
||||
parts = lines[1].split()
|
||||
total_kb = int(parts[1])
|
||||
used_kb = int(parts[2])
|
||||
available_kb = int(parts[3])
|
||||
percent = int(parts[4].rstrip('%'))
|
||||
|
||||
return {
|
||||
'total': total_kb * 1024,
|
||||
'used': used_kb * 1024,
|
||||
'available': available_kb * 1024,
|
||||
'percent': percent,
|
||||
'timestamp': datetime.now().isoformat()
|
||||
}
|
||||
except:
|
||||
pass
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def save_snapshot(name):
|
||||
"""Save disk usage snapshot to file."""
|
||||
snapshot_dir = Path.home() / '.macos-cleaner'
|
||||
snapshot_dir.mkdir(exist_ok=True)
|
||||
|
||||
snapshot_file = snapshot_dir / f'{name}.json'
|
||||
|
||||
usage = get_disk_usage()
|
||||
if usage:
|
||||
with snapshot_file.open('w') as f:
|
||||
json.dump(usage, f, indent=2)
|
||||
print(f"✅ Snapshot saved: {snapshot_file}")
|
||||
return True
|
||||
else:
|
||||
print("❌ Failed to get disk usage")
|
||||
return False
|
||||
|
||||
|
||||
def load_snapshot(name):
|
||||
"""Load disk usage snapshot from file."""
|
||||
snapshot_dir = Path.home() / '.macos-cleaner'
|
||||
snapshot_file = snapshot_dir / f'{name}.json'
|
||||
|
||||
if not snapshot_file.exists():
|
||||
print(f"❌ Snapshot not found: {snapshot_file}")
|
||||
return None
|
||||
|
||||
with snapshot_file.open('r') as f:
|
||||
return json.load(f)
|
||||
|
||||
|
||||
def generate_report(before, after):
|
||||
"""Generate comparison report."""
|
||||
print("\n" + "=" * 60)
|
||||
print("📊 Cleanup Report")
|
||||
print("=" * 60)
|
||||
|
||||
# Time
|
||||
before_time = datetime.fromisoformat(before['timestamp'])
|
||||
after_time = datetime.fromisoformat(after['timestamp'])
|
||||
duration = after_time - before_time
|
||||
|
||||
print(f"\nCleanup Duration: {duration}")
|
||||
print(f"Before: {before_time.strftime('%Y-%m-%d %H:%M:%S')}")
|
||||
print(f"After: {after_time.strftime('%Y-%m-%d %H:%M:%S')}")
|
||||
|
||||
# Disk usage comparison
|
||||
print("\n" + "-" * 60)
|
||||
print("Disk Usage")
|
||||
print("-" * 60)
|
||||
|
||||
before_used = before['used']
|
||||
after_used = after['used']
|
||||
recovered = before_used - after_used
|
||||
|
||||
print(f"Before: {format_size(before_used):>12} ({before['percent']}%)")
|
||||
print(f"After: {format_size(after_used):>12} ({after['percent']}%)")
|
||||
print("-" * 60)
|
||||
|
||||
if recovered > 0:
|
||||
print(f"✅ Recovered: {format_size(recovered):>12}")
|
||||
percent_recovered = (recovered / before_used) * 100
|
||||
print(f" ({percent_recovered:.1f}% of used space)")
|
||||
elif recovered < 0:
|
||||
print(f"⚠️ Space increased: {format_size(abs(recovered)):>12}")
|
||||
print(" (This may be due to system activity during cleanup)")
|
||||
else:
|
||||
print("No change in disk usage")
|
||||
|
||||
# Available space
|
||||
print("\n" + "-" * 60)
|
||||
print("Available Space")
|
||||
print("-" * 60)
|
||||
|
||||
before_avail = before['available']
|
||||
after_avail = after['available']
|
||||
gained = after_avail - before_avail
|
||||
|
||||
print(f"Before: {format_size(before_avail):>12}")
|
||||
print(f"After: {format_size(after_avail):>12}")
|
||||
print("-" * 60)
|
||||
|
||||
if gained > 0:
|
||||
print(f"✅ Gained: {format_size(gained):>12}")
|
||||
elif gained < 0:
|
||||
print(f"⚠️ Lost: {format_size(abs(gained)):>12}")
|
||||
else:
|
||||
print("No change")
|
||||
|
||||
# Recommendations
|
||||
print("\n" + "=" * 60)
|
||||
|
||||
if after['percent'] > 90:
|
||||
print("⚠️ Warning: Disk is still >90% full")
|
||||
print("\n💡 Recommendations:")
|
||||
print(" - Consider moving large files to external storage")
|
||||
print(" - Review and delete old projects")
|
||||
print(" - Check for large application data")
|
||||
elif after['percent'] > 80:
|
||||
print("⚠️ Disk usage is still high (>80%)")
|
||||
print("\n💡 Recommendations:")
|
||||
print(" - Run cleanup again in 1-2 weeks")
|
||||
print(" - Monitor large file creation")
|
||||
else:
|
||||
print("✅ Disk usage is healthy!")
|
||||
print("\n💡 Maintenance Tips:")
|
||||
print(" - Run cleanup monthly")
|
||||
print(" - Empty Trash regularly")
|
||||
print(" - Clear browser caches weekly")
|
||||
|
||||
print("=" * 60)
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(
|
||||
description='Generate cleanup reports'
|
||||
)
|
||||
parser.add_argument(
|
||||
'--snapshot',
|
||||
choices=['before', 'after'],
|
||||
required=True,
|
||||
help='Snapshot type (before or after cleanup)'
|
||||
)
|
||||
parser.add_argument(
|
||||
'--compare',
|
||||
action='store_true',
|
||||
help='Compare with before snapshot (use with --snapshot after)'
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
if args.snapshot == 'before':
|
||||
# Save before snapshot
|
||||
print("📸 Capturing disk usage before cleanup...")
|
||||
if save_snapshot('before'):
|
||||
usage = get_disk_usage()
|
||||
print(f"\nCurrent Usage: {format_size(usage['used'])} ({usage['percent']}%)")
|
||||
print(f"Available: {format_size(usage['available'])}")
|
||||
print("\n💡 Run cleanup operations, then:")
|
||||
print(" python3 cleanup_report.py --snapshot after --compare")
|
||||
return 0
|
||||
|
||||
elif args.snapshot == 'after':
|
||||
# Save after snapshot
|
||||
print("📸 Capturing disk usage after cleanup...")
|
||||
if not save_snapshot('after'):
|
||||
return 1
|
||||
|
||||
if args.compare:
|
||||
# Load before snapshot and compare
|
||||
before = load_snapshot('before')
|
||||
after = load_snapshot('after')
|
||||
|
||||
if before and after:
|
||||
generate_report(before, after)
|
||||
else:
|
||||
print("❌ Cannot compare: missing snapshots")
|
||||
return 1
|
||||
else:
|
||||
usage = get_disk_usage()
|
||||
print(f"\nCurrent Usage: {format_size(usage['used'])} ({usage['percent']}%)")
|
||||
print(f"Available: {format_size(usage['available'])}")
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
||||
246
macos-cleaner/scripts/find_app_remnants.py
Executable file
246
macos-cleaner/scripts/find_app_remnants.py
Executable file
@@ -0,0 +1,246 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Find orphaned application support files and preferences.
|
||||
|
||||
This script identifies directories in ~/Library that may belong to
|
||||
uninstalled applications.
|
||||
|
||||
Usage:
|
||||
python3 find_app_remnants.py [--min-size SIZE]
|
||||
|
||||
Options:
|
||||
--min-size Minimum size in MB to report (default: 10)
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import subprocess
|
||||
import argparse
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def format_size(bytes_size):
|
||||
"""Convert bytes to human-readable format."""
|
||||
for unit in ['B', 'KB', 'MB', 'GB', 'TB']:
|
||||
if bytes_size < 1024.0:
|
||||
return f"{bytes_size:.1f} {unit}"
|
||||
bytes_size /= 1024.0
|
||||
return f"{bytes_size:.1f} PB"
|
||||
|
||||
|
||||
def get_dir_size(path):
|
||||
"""Get directory size using du command."""
|
||||
try:
|
||||
result = subprocess.run(
|
||||
['du', '-sk', path],
|
||||
capture_output=True,
|
||||
text=True,
|
||||
timeout=30
|
||||
)
|
||||
if result.returncode == 0:
|
||||
size_kb = int(result.stdout.split()[0])
|
||||
return size_kb * 1024
|
||||
return 0
|
||||
except (subprocess.TimeoutExpired, ValueError, IndexError):
|
||||
return 0
|
||||
|
||||
|
||||
def get_installed_apps():
|
||||
"""Get list of installed application names."""
|
||||
apps = set()
|
||||
|
||||
# System applications
|
||||
system_app_dir = Path('/Applications')
|
||||
if system_app_dir.exists():
|
||||
for app in system_app_dir.iterdir():
|
||||
if app.suffix == '.app':
|
||||
# Remove .app suffix
|
||||
apps.add(app.stem)
|
||||
|
||||
# User applications
|
||||
user_app_dir = Path.home() / 'Applications'
|
||||
if user_app_dir.exists():
|
||||
for app in user_app_dir.iterdir():
|
||||
if app.suffix == '.app':
|
||||
apps.add(app.stem)
|
||||
|
||||
return apps
|
||||
|
||||
|
||||
def normalize_name(name):
|
||||
"""
|
||||
Normalize app name for matching.
|
||||
|
||||
Examples:
|
||||
'Google Chrome' -> 'googlechrome'
|
||||
'com.apple.Safari' -> 'safari'
|
||||
"""
|
||||
# Remove common prefixes
|
||||
for prefix in ['com.', 'org.', 'net.', 'io.']:
|
||||
if name.startswith(prefix):
|
||||
name = name[len(prefix):]
|
||||
|
||||
# Remove non-alphanumeric
|
||||
name = ''.join(c for c in name if c.isalnum())
|
||||
|
||||
return name.lower()
|
||||
|
||||
|
||||
def is_likely_orphaned(dir_name, installed_apps):
|
||||
"""
|
||||
Check if directory is likely orphaned.
|
||||
|
||||
Returns:
|
||||
(is_orphaned, confidence, reason)
|
||||
confidence: 'high' | 'medium' | 'low'
|
||||
"""
|
||||
norm_dir = normalize_name(dir_name)
|
||||
|
||||
# Check exact matches
|
||||
for app in installed_apps:
|
||||
norm_app = normalize_name(app)
|
||||
if norm_app in norm_dir or norm_dir in norm_app:
|
||||
return (False, None, f"Matches installed app: {app}")
|
||||
|
||||
# System/common directories to always keep
|
||||
system_dirs = {
|
||||
'apple', 'safari', 'finder', 'mail', 'messages', 'notes',
|
||||
'photos', 'music', 'calendar', 'contacts', 'reminders',
|
||||
'preferences', 'cookies', 'webkit', 'coredata',
|
||||
'cloudkit', 'icloud', 'appstore', 'systemmigration'
|
||||
}
|
||||
|
||||
if any(sys_dir in norm_dir for sys_dir in system_dirs):
|
||||
return (False, None, "System/built-in application")
|
||||
|
||||
# If we get here, likely orphaned
|
||||
return (True, 'medium', "No matching application found")
|
||||
|
||||
|
||||
def analyze_library_dir(library_path, min_size_bytes, installed_apps):
|
||||
"""
|
||||
Analyze a Library subdirectory for orphaned data.
|
||||
|
||||
Args:
|
||||
library_path: Path to scan (e.g., ~/Library/Application Support)
|
||||
min_size_bytes: Minimum size to report
|
||||
installed_apps: Set of installed app names
|
||||
|
||||
Returns:
|
||||
List of (name, path, size, confidence, reason) tuples
|
||||
"""
|
||||
if not os.path.exists(library_path):
|
||||
return []
|
||||
|
||||
results = []
|
||||
|
||||
try:
|
||||
for entry in os.scandir(library_path):
|
||||
if entry.is_dir():
|
||||
size = get_dir_size(entry.path)
|
||||
if size >= min_size_bytes:
|
||||
is_orphaned, confidence, reason = is_likely_orphaned(
|
||||
entry.name,
|
||||
installed_apps
|
||||
)
|
||||
if is_orphaned:
|
||||
results.append((
|
||||
entry.name,
|
||||
entry.path,
|
||||
size,
|
||||
confidence,
|
||||
reason
|
||||
))
|
||||
except PermissionError:
|
||||
print(f"⚠️ Permission denied: {library_path}", file=sys.stderr)
|
||||
return []
|
||||
|
||||
# Sort by size descending
|
||||
results.sort(key=lambda x: x[2], reverse=True)
|
||||
return results
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(
|
||||
description='Find orphaned application data'
|
||||
)
|
||||
parser.add_argument(
|
||||
'--min-size',
|
||||
type=int,
|
||||
default=10,
|
||||
help='Minimum size in MB to report (default: 10)'
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
min_size_bytes = args.min_size * 1024 * 1024
|
||||
|
||||
print("🔍 Searching for Orphaned Application Data")
|
||||
print("=" * 70)
|
||||
|
||||
# Get installed apps
|
||||
print("Scanning installed applications...")
|
||||
installed_apps = get_installed_apps()
|
||||
print(f"Found {len(installed_apps)} installed applications\n")
|
||||
|
||||
# Directories to check
|
||||
library_dirs = {
|
||||
'Application Support': Path.home() / 'Library' / 'Application Support',
|
||||
'Containers': Path.home() / 'Library' / 'Containers',
|
||||
'Preferences': Path.home() / 'Library' / 'Preferences',
|
||||
'Saved Application State': Path.home() / 'Library' / 'Saved Application State'
|
||||
}
|
||||
|
||||
all_orphans = []
|
||||
total_size = 0
|
||||
|
||||
for category, path in library_dirs.items():
|
||||
print(f"\n📂 {category}")
|
||||
print("-" * 70)
|
||||
|
||||
orphans = analyze_library_dir(path, min_size_bytes, installed_apps)
|
||||
|
||||
if orphans:
|
||||
print(f"{'Name':<40} {'Size':<12} {'Confidence'}")
|
||||
print("-" * 70)
|
||||
|
||||
for name, full_path, size, confidence, reason in orphans:
|
||||
conf_icon = {'high': '🔴', 'medium': '🟡', 'low': '🟢'}[confidence]
|
||||
# Truncate long names
|
||||
display_name = name if len(name) <= 37 else name[:34] + "..."
|
||||
print(f"{display_name:<40} {format_size(size):<12} {conf_icon} {confidence}")
|
||||
|
||||
all_orphans.append((category, name, full_path, size, confidence, reason))
|
||||
total_size += size
|
||||
else:
|
||||
print("No orphaned data found above minimum size")
|
||||
|
||||
# Summary
|
||||
print("\n\n📊 Summary")
|
||||
print("=" * 70)
|
||||
print(f"Total orphaned data found: {len(all_orphans)} items")
|
||||
print(f"Total size: {format_size(total_size)}")
|
||||
|
||||
if all_orphans:
|
||||
print("\n\n🗑️ Recommended Deletions (Medium/High Confidence)")
|
||||
print("=" * 70)
|
||||
|
||||
for category, name, path, size, confidence, reason in all_orphans:
|
||||
if confidence in ['medium', 'high']:
|
||||
print(f"\n{name}")
|
||||
print(f" Location: {path}")
|
||||
print(f" Size: {format_size(size)}")
|
||||
print(f" Reason: {reason}")
|
||||
print(f" ⚠️ Verify this app is truly uninstalled before deleting")
|
||||
|
||||
print("\n\n💡 Next Steps:")
|
||||
print(" 1. Double-check each item in /Applications and ~/Applications")
|
||||
print(" 2. Search Spotlight for the application name")
|
||||
print(" 3. If truly uninstalled, safe to delete with:")
|
||||
print(" rm -rf '<path>'")
|
||||
print(" 4. Or use safe_delete.py for interactive cleanup")
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
||||
305
macos-cleaner/scripts/safe_delete.py
Executable file
305
macos-cleaner/scripts/safe_delete.py
Executable file
@@ -0,0 +1,305 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Interactive safe file/directory deletion with confirmation.
|
||||
|
||||
Usage:
|
||||
python3 safe_delete.py <path1> [path2] [path3] ...
|
||||
python3 safe_delete.py --batch <file_with_paths>
|
||||
|
||||
Options:
|
||||
--batch FILE Read paths from a file (one per line)
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import shutil
|
||||
import argparse
|
||||
import subprocess
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def format_size(bytes_size):
|
||||
"""Convert bytes to human-readable format."""
|
||||
for unit in ['B', 'KB', 'MB', 'GB', 'TB']:
|
||||
if bytes_size < 1024.0:
|
||||
return f"{bytes_size:.1f} {unit}"
|
||||
bytes_size /= 1024.0
|
||||
return f"{bytes_size:.1f} PB"
|
||||
|
||||
|
||||
def get_size(path):
|
||||
"""Get size of file or directory."""
|
||||
path_obj = Path(path)
|
||||
|
||||
if not path_obj.exists():
|
||||
return 0
|
||||
|
||||
if path_obj.is_file():
|
||||
return path_obj.stat().st_size
|
||||
elif path_obj.is_dir():
|
||||
try:
|
||||
result = subprocess.run(
|
||||
['du', '-sk', path],
|
||||
capture_output=True,
|
||||
text=True,
|
||||
timeout=30
|
||||
)
|
||||
if result.returncode == 0:
|
||||
size_kb = int(result.stdout.split()[0])
|
||||
return size_kb * 1024
|
||||
except (subprocess.TimeoutExpired, ValueError, IndexError):
|
||||
pass
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
def get_description(path):
|
||||
"""Get human-readable description of path."""
|
||||
path_obj = Path(path)
|
||||
|
||||
if not path_obj.exists():
|
||||
return "Path does not exist"
|
||||
|
||||
if path_obj.is_file():
|
||||
suffix = path_obj.suffix or "file"
|
||||
return f"File ({suffix})"
|
||||
elif path_obj.is_dir():
|
||||
try:
|
||||
# Count items
|
||||
items = list(path_obj.iterdir())
|
||||
return f"Directory ({len(items)} items)"
|
||||
except PermissionError:
|
||||
return "Directory (permission denied to list)"
|
||||
|
||||
return "Unknown"
|
||||
|
||||
|
||||
def confirm_delete(path, size, description):
|
||||
"""
|
||||
Ask user to confirm deletion.
|
||||
|
||||
Args:
|
||||
path: File/directory path
|
||||
size: Size in bytes
|
||||
description: What this file/directory is
|
||||
|
||||
Returns:
|
||||
True if user confirms, False otherwise
|
||||
"""
|
||||
print(f"\n🗑️ Confirm Deletion")
|
||||
print("━" * 50)
|
||||
print(f"Path: {path}")
|
||||
print(f"Size: {format_size(size)}")
|
||||
print(f"Description: {description}")
|
||||
|
||||
# Additional safety check for important paths
|
||||
path_str = str(path).lower()
|
||||
danger_patterns = [
|
||||
'documents', 'desktop', 'pictures', 'movies',
|
||||
'downloads', 'music', '.ssh', 'credentials'
|
||||
]
|
||||
|
||||
if any(pattern in path_str for pattern in danger_patterns):
|
||||
print("\n⚠️ WARNING: This path may contain important personal data!")
|
||||
print(" Consider backing up before deletion.")
|
||||
|
||||
response = input("\nDelete this item? [y/N]: ").strip().lower()
|
||||
return response == 'y'
|
||||
|
||||
|
||||
def batch_confirm(items):
|
||||
"""
|
||||
Show all items, ask for batch confirmation.
|
||||
|
||||
Args:
|
||||
items: List of (path, size, description) tuples
|
||||
|
||||
Returns:
|
||||
List of items user approved
|
||||
"""
|
||||
print("\n📋 Items to Delete:")
|
||||
print("━" * 70)
|
||||
print(f"{'#':<4} {'Size':<12} {'Path'}")
|
||||
print("-" * 70)
|
||||
|
||||
for i, (path, size, description) in enumerate(items, 1):
|
||||
# Truncate long paths
|
||||
display_path = str(path)
|
||||
if len(display_path) > 48:
|
||||
display_path = display_path[:45] + "..."
|
||||
print(f"{i:<4} {format_size(size):<12} {display_path}")
|
||||
|
||||
total_size = sum(item[1] for item in items)
|
||||
print("-" * 70)
|
||||
print(f"{'Total':<4} {format_size(total_size):<12}")
|
||||
|
||||
print("\nOptions:")
|
||||
print(" 'all' - Delete all items")
|
||||
print(" '1,3,5' - Delete specific items by number")
|
||||
print(" '1-5' - Delete range of items")
|
||||
print(" 'none' - Cancel (default)")
|
||||
|
||||
response = input("\nYour choice: ").strip().lower()
|
||||
|
||||
if response == '' or response == 'none':
|
||||
return []
|
||||
elif response == 'all':
|
||||
return items
|
||||
else:
|
||||
selected = []
|
||||
# Parse response
|
||||
parts = response.replace(' ', '').split(',')
|
||||
|
||||
for part in parts:
|
||||
try:
|
||||
if '-' in part:
|
||||
# Range: 1-5
|
||||
start, end = part.split('-')
|
||||
start_idx = int(start) - 1
|
||||
end_idx = int(end) - 1
|
||||
for i in range(start_idx, end_idx + 1):
|
||||
if 0 <= i < len(items):
|
||||
selected.append(items[i])
|
||||
else:
|
||||
# Single number
|
||||
idx = int(part) - 1
|
||||
if 0 <= idx < len(items):
|
||||
selected.append(items[idx])
|
||||
except ValueError:
|
||||
print(f"⚠️ Ignoring invalid selection: {part}")
|
||||
continue
|
||||
|
||||
return selected
|
||||
|
||||
|
||||
def delete_path(path):
|
||||
"""
|
||||
Delete a file or directory.
|
||||
|
||||
Returns:
|
||||
(success, message)
|
||||
"""
|
||||
try:
|
||||
path_obj = Path(path)
|
||||
|
||||
if not path_obj.exists():
|
||||
return (False, "Path does not exist")
|
||||
|
||||
if path_obj.is_file():
|
||||
path_obj.unlink()
|
||||
elif path_obj.is_dir():
|
||||
shutil.rmtree(path)
|
||||
else:
|
||||
return (False, "Unknown path type")
|
||||
|
||||
return (True, "Deleted successfully")
|
||||
|
||||
except PermissionError:
|
||||
return (False, "Permission denied")
|
||||
except Exception as e:
|
||||
return (False, f"Error: {str(e)}")
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(
|
||||
description='Interactive safe deletion'
|
||||
)
|
||||
parser.add_argument(
|
||||
'paths',
|
||||
nargs='*',
|
||||
help='Paths to delete'
|
||||
)
|
||||
parser.add_argument(
|
||||
'--batch',
|
||||
metavar='FILE',
|
||||
help='Read paths from file (one per line)'
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
# Collect paths
|
||||
paths = []
|
||||
|
||||
if args.batch:
|
||||
# Read from file
|
||||
batch_file = Path(args.batch)
|
||||
if not batch_file.exists():
|
||||
print(f"❌ Batch file not found: {args.batch}")
|
||||
return 1
|
||||
|
||||
with batch_file.open('r') as f:
|
||||
for line in f:
|
||||
line = line.strip()
|
||||
if line and not line.startswith('#'):
|
||||
paths.append(line)
|
||||
else:
|
||||
paths = args.paths
|
||||
|
||||
if not paths:
|
||||
parser.print_help()
|
||||
return 1
|
||||
|
||||
# Prepare items
|
||||
items = []
|
||||
for path in paths:
|
||||
size = get_size(path)
|
||||
description = get_description(path)
|
||||
items.append((path, size, description))
|
||||
|
||||
# Remove non-existent paths
|
||||
items = [(p, s, d) for p, s, d in items if Path(p).exists()]
|
||||
|
||||
if not items:
|
||||
print("❌ No valid paths to delete")
|
||||
return 1
|
||||
|
||||
# Interactive mode
|
||||
if len(items) == 1:
|
||||
# Single item - simple confirmation
|
||||
path, size, description = items[0]
|
||||
if not confirm_delete(path, size, description):
|
||||
print("\n✅ Deletion cancelled")
|
||||
return 0
|
||||
|
||||
success, message = delete_path(path)
|
||||
if success:
|
||||
print(f"\n✅ {message}")
|
||||
print(f" Freed: {format_size(size)}")
|
||||
return 0
|
||||
else:
|
||||
print(f"\n❌ {message}")
|
||||
return 1
|
||||
|
||||
else:
|
||||
# Multiple items - batch confirmation
|
||||
selected = batch_confirm(items)
|
||||
|
||||
if not selected:
|
||||
print("\n✅ Deletion cancelled")
|
||||
return 0
|
||||
|
||||
# Delete selected items
|
||||
print(f"\n🗑️ Deleting {len(selected)} items...")
|
||||
print("━" * 50)
|
||||
|
||||
success_count = 0
|
||||
total_freed = 0
|
||||
|
||||
for path, size, description in selected:
|
||||
success, message = delete_path(path)
|
||||
status_icon = "✅" if success else "❌"
|
||||
print(f"{status_icon} {path}: {message}")
|
||||
|
||||
if success:
|
||||
success_count += 1
|
||||
total_freed += size
|
||||
|
||||
print("━" * 50)
|
||||
print(f"\n📊 Results:")
|
||||
print(f" Successfully deleted: {success_count}/{len(selected)}")
|
||||
print(f" Total freed: {format_size(total_freed)}")
|
||||
|
||||
return 0 if success_count == len(selected) else 1
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
||||
Reference in New Issue
Block a user