feat: add engineering team skills with fullstack-engineer package
Add comprehensive fullstack engineering skill package: Fullstack Engineer: - Code quality analyzer (Python tool) - Fullstack scaffolder for rapid project setup (Python tool) - Project scaffolder with best practices (Python tool) - Architecture patterns reference (MVC, microservices, event-driven) - Development workflows (Git, CI/CD, testing) - Tech stack guide (frontend, backend, database, DevOps) Includes packaged .zip archive for easy distribution and comprehensive roadmap for future engineering skills. This expands the library to 9 production-ready skills across 4 domains: Marketing, C-Level, Product Team, and Engineering. 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
@@ -0,0 +1,466 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Code Quality Analyzer - Analyzes code for quality metrics, patterns, and best practices
|
||||
"""
|
||||
|
||||
import os
|
||||
import re
|
||||
import ast
|
||||
import json
|
||||
from pathlib import Path
|
||||
from typing import Dict, List, Tuple
|
||||
import subprocess
|
||||
|
||||
class CodeQualityAnalyzer:
|
||||
def __init__(self, project_path: str):
|
||||
self.project_path = Path(project_path)
|
||||
self.results = {
|
||||
'metrics': {},
|
||||
'issues': [],
|
||||
'suggestions': [],
|
||||
'score': 0
|
||||
}
|
||||
|
||||
def analyze(self) -> Dict:
|
||||
"""Run complete code quality analysis"""
|
||||
print("🔍 Analyzing code quality...")
|
||||
|
||||
# Analyze different aspects
|
||||
self._analyze_code_metrics()
|
||||
self._analyze_complexity()
|
||||
self._analyze_dependencies()
|
||||
self._analyze_security()
|
||||
self._analyze_performance()
|
||||
self._analyze_tests()
|
||||
self._analyze_documentation()
|
||||
|
||||
# Calculate overall score
|
||||
self._calculate_score()
|
||||
|
||||
return self.results
|
||||
|
||||
def _analyze_code_metrics(self):
|
||||
"""Analyze basic code metrics"""
|
||||
metrics = {
|
||||
'total_files': 0,
|
||||
'total_lines': 0,
|
||||
'code_lines': 0,
|
||||
'comment_lines': 0,
|
||||
'blank_lines': 0,
|
||||
'avg_file_length': 0,
|
||||
'languages': {}
|
||||
}
|
||||
|
||||
extensions = {
|
||||
'.ts': 'TypeScript',
|
||||
'.tsx': 'TypeScript React',
|
||||
'.js': 'JavaScript',
|
||||
'.jsx': 'JavaScript React',
|
||||
'.py': 'Python',
|
||||
'.go': 'Go',
|
||||
'.sql': 'SQL',
|
||||
'.graphql': 'GraphQL'
|
||||
}
|
||||
|
||||
for file_path in self.project_path.rglob('*'):
|
||||
if file_path.is_file() and file_path.suffix in extensions:
|
||||
if 'node_modules' in str(file_path) or 'dist' in str(file_path):
|
||||
continue
|
||||
|
||||
metrics['total_files'] += 1
|
||||
lang = extensions[file_path.suffix]
|
||||
metrics['languages'][lang] = metrics['languages'].get(lang, 0) + 1
|
||||
|
||||
with open(file_path, 'r', encoding='utf-8', errors='ignore') as f:
|
||||
lines = f.readlines()
|
||||
metrics['total_lines'] += len(lines)
|
||||
|
||||
for line in lines:
|
||||
stripped = line.strip()
|
||||
if not stripped:
|
||||
metrics['blank_lines'] += 1
|
||||
elif stripped.startswith(('#', '//', '/*', '*')):
|
||||
metrics['comment_lines'] += 1
|
||||
else:
|
||||
metrics['code_lines'] += 1
|
||||
|
||||
if metrics['total_files'] > 0:
|
||||
metrics['avg_file_length'] = metrics['total_lines'] / metrics['total_files']
|
||||
metrics['comment_ratio'] = metrics['comment_lines'] / metrics['code_lines'] if metrics['code_lines'] > 0 else 0
|
||||
|
||||
self.results['metrics'] = metrics
|
||||
|
||||
def _analyze_complexity(self):
|
||||
"""Analyze code complexity"""
|
||||
complexity_issues = []
|
||||
|
||||
# Check for TypeScript/JavaScript files
|
||||
for file_path in self.project_path.rglob('*.ts'):
|
||||
if 'node_modules' in str(file_path):
|
||||
continue
|
||||
|
||||
with open(file_path, 'r', encoding='utf-8', errors='ignore') as f:
|
||||
content = f.read()
|
||||
|
||||
# Check function length
|
||||
functions = re.findall(r'function\s+\w+|(?:const|let|var)\s+\w+\s*=\s*(?:async\s*)?\([^)]*\)\s*=>', content)
|
||||
if len(functions) > 20:
|
||||
complexity_issues.append({
|
||||
'file': str(file_path.relative_to(self.project_path)),
|
||||
'issue': 'Too many functions in file',
|
||||
'severity': 'warning',
|
||||
'count': len(functions)
|
||||
})
|
||||
|
||||
# Check for deeply nested code
|
||||
max_indent = 0
|
||||
for line in content.split('\n'):
|
||||
indent = len(line) - len(line.lstrip())
|
||||
max_indent = max(max_indent, indent)
|
||||
|
||||
if max_indent > 16: # More than 4 levels of indentation
|
||||
complexity_issues.append({
|
||||
'file': str(file_path.relative_to(self.project_path)),
|
||||
'issue': 'Deep nesting detected',
|
||||
'severity': 'warning',
|
||||
'max_indent': max_indent
|
||||
})
|
||||
|
||||
# Check for long functions
|
||||
function_blocks = re.split(r'function\s+\w+|(?:const|let|var)\s+\w+\s*=\s*(?:async\s*)?\([^)]*\)\s*=>', content)
|
||||
for block in function_blocks[1:]:
|
||||
lines = block.split('\n')
|
||||
if len(lines) > 50:
|
||||
complexity_issues.append({
|
||||
'file': str(file_path.relative_to(self.project_path)),
|
||||
'issue': 'Long function detected',
|
||||
'severity': 'warning',
|
||||
'lines': len(lines)
|
||||
})
|
||||
|
||||
self.results['complexity'] = complexity_issues
|
||||
|
||||
def _analyze_dependencies(self):
|
||||
"""Analyze project dependencies"""
|
||||
dependency_analysis = {
|
||||
'outdated': [],
|
||||
'security_issues': [],
|
||||
'unused': [],
|
||||
'missing': []
|
||||
}
|
||||
|
||||
# Check package.json files
|
||||
for package_json in self.project_path.rglob('package.json'):
|
||||
if 'node_modules' in str(package_json):
|
||||
continue
|
||||
|
||||
with open(package_json, 'r') as f:
|
||||
data = json.load(f)
|
||||
|
||||
deps = {**data.get('dependencies', {}), **data.get('devDependencies', {})}
|
||||
|
||||
# Check for common issues
|
||||
for dep, version in deps.items():
|
||||
if version.startswith('^') or version.startswith('~'):
|
||||
# Good - using semver
|
||||
pass
|
||||
elif version == '*' or version == 'latest':
|
||||
dependency_analysis['security_issues'].append({
|
||||
'package': dep,
|
||||
'issue': 'Using wildcard version',
|
||||
'recommendation': 'Pin to specific version'
|
||||
})
|
||||
|
||||
self.results['dependencies'] = dependency_analysis
|
||||
|
||||
def _analyze_security(self):
|
||||
"""Analyze security issues"""
|
||||
security_issues = []
|
||||
|
||||
patterns = [
|
||||
(r'console\.log\(.*password.*\)', 'Password logged to console'),
|
||||
(r'eval\(', 'Use of eval() detected'),
|
||||
(r'innerHTML\s*=', 'Direct innerHTML manipulation'),
|
||||
(r'document\.write\(', 'Use of document.write()'),
|
||||
(r'api[kK]ey\s*=\s*[\'"][^\'"]+[\'"]', 'Hardcoded API key'),
|
||||
(r'password\s*=\s*[\'"][^\'"]+[\'"]', 'Hardcoded password'),
|
||||
(r'secret\s*=\s*[\'"][^\'"]+[\'"]', 'Hardcoded secret'),
|
||||
(r'TODO:?\s+security', 'Security TODO found'),
|
||||
(r'FIXME:?\s+security', 'Security FIXME found'),
|
||||
]
|
||||
|
||||
for file_path in self.project_path.rglob('*'):
|
||||
if file_path.is_file() and file_path.suffix in ['.ts', '.tsx', '.js', '.jsx']:
|
||||
if 'node_modules' in str(file_path) or '.test.' in str(file_path):
|
||||
continue
|
||||
|
||||
with open(file_path, 'r', encoding='utf-8', errors='ignore') as f:
|
||||
content = f.read()
|
||||
|
||||
for pattern, issue in patterns:
|
||||
matches = re.finditer(pattern, content, re.IGNORECASE)
|
||||
for match in matches:
|
||||
line_num = content[:match.start()].count('\n') + 1
|
||||
security_issues.append({
|
||||
'file': str(file_path.relative_to(self.project_path)),
|
||||
'line': line_num,
|
||||
'issue': issue,
|
||||
'severity': 'high' if 'password' in issue.lower() or 'key' in issue.lower() else 'medium'
|
||||
})
|
||||
|
||||
self.results['security'] = security_issues
|
||||
|
||||
def _analyze_performance(self):
|
||||
"""Analyze performance issues"""
|
||||
performance_issues = []
|
||||
|
||||
patterns = [
|
||||
(r'for\s*\(.*\)\s*{[^}]*for\s*\(', 'Nested loops detected'),
|
||||
(r'async\s+\w+\s*\([^)]*\)\s*{[^}]*await[^}]*await', 'Multiple sequential awaits'),
|
||||
(r'\.map\([^)]+\)\.filter\([^)]+\)\.map\(', 'Inefficient chaining'),
|
||||
(r'document\.querySelector.*inside.*loop', 'DOM query in loop'),
|
||||
(r'useState\([^)]*\).*useState\([^)]*\).*useState\([^)]*\).*useState\([^)]*\).*useState\(', 'Too many useState hooks'),
|
||||
]
|
||||
|
||||
for file_path in self.project_path.rglob('*'):
|
||||
if file_path.is_file() and file_path.suffix in ['.ts', '.tsx', '.js', '.jsx']:
|
||||
if 'node_modules' in str(file_path):
|
||||
continue
|
||||
|
||||
with open(file_path, 'r', encoding='utf-8', errors='ignore') as f:
|
||||
content = f.read()
|
||||
|
||||
for pattern, issue in patterns:
|
||||
if re.search(pattern, content, re.IGNORECASE | re.DOTALL):
|
||||
performance_issues.append({
|
||||
'file': str(file_path.relative_to(self.project_path)),
|
||||
'issue': issue,
|
||||
'severity': 'medium'
|
||||
})
|
||||
|
||||
self.results['performance'] = performance_issues
|
||||
|
||||
def _analyze_tests(self):
|
||||
"""Analyze test coverage and quality"""
|
||||
test_analysis = {
|
||||
'test_files': 0,
|
||||
'test_suites': 0,
|
||||
'test_cases': 0,
|
||||
'coverage_configured': False,
|
||||
'e2e_tests': False,
|
||||
'unit_tests': False,
|
||||
'integration_tests': False
|
||||
}
|
||||
|
||||
# Count test files
|
||||
for test_file in self.project_path.rglob('*.test.*'):
|
||||
if 'node_modules' not in str(test_file):
|
||||
test_analysis['test_files'] += 1
|
||||
test_analysis['unit_tests'] = True
|
||||
|
||||
with open(test_file, 'r', encoding='utf-8', errors='ignore') as f:
|
||||
content = f.read()
|
||||
test_analysis['test_suites'] += len(re.findall(r'describe\(', content))
|
||||
test_analysis['test_cases'] += len(re.findall(r'(?:it|test)\(', content))
|
||||
|
||||
# Check for E2E tests
|
||||
if (self.project_path / 'cypress').exists() or (self.project_path / 'tests' / 'e2e').exists():
|
||||
test_analysis['e2e_tests'] = True
|
||||
|
||||
# Check for coverage configuration
|
||||
for config_file in ['jest.config.js', 'jest.config.ts', 'package.json']:
|
||||
config_path = self.project_path / config_file
|
||||
if config_path.exists():
|
||||
with open(config_path, 'r') as f:
|
||||
if 'coverage' in f.read():
|
||||
test_analysis['coverage_configured'] = True
|
||||
break
|
||||
|
||||
self.results['tests'] = test_analysis
|
||||
|
||||
def _analyze_documentation(self):
|
||||
"""Analyze documentation quality"""
|
||||
doc_analysis = {
|
||||
'readme_exists': False,
|
||||
'api_docs': False,
|
||||
'inline_comments_ratio': 0,
|
||||
'jsdoc_coverage': 0,
|
||||
'missing_docs': []
|
||||
}
|
||||
|
||||
# Check for README
|
||||
if (self.project_path / 'README.md').exists():
|
||||
doc_analysis['readme_exists'] = True
|
||||
with open(self.project_path / 'README.md', 'r') as f:
|
||||
readme_content = f.read()
|
||||
doc_analysis['readme_quality'] = {
|
||||
'has_installation': 'installation' in readme_content.lower(),
|
||||
'has_usage': 'usage' in readme_content.lower(),
|
||||
'has_api': 'api' in readme_content.lower(),
|
||||
'has_contributing': 'contributing' in readme_content.lower(),
|
||||
'length': len(readme_content)
|
||||
}
|
||||
|
||||
# Check for API documentation
|
||||
if (self.project_path / 'docs').exists() or (self.project_path / 'api-docs').exists():
|
||||
doc_analysis['api_docs'] = True
|
||||
|
||||
# Check JSDoc coverage
|
||||
total_functions = 0
|
||||
documented_functions = 0
|
||||
|
||||
for file_path in self.project_path.rglob('*.ts'):
|
||||
if 'node_modules' in str(file_path) or '.test.' in str(file_path):
|
||||
continue
|
||||
|
||||
with open(file_path, 'r', encoding='utf-8', errors='ignore') as f:
|
||||
content = f.read()
|
||||
|
||||
# Count functions
|
||||
functions = re.findall(r'(?:export\s+)?(?:async\s+)?function\s+(\w+)', content)
|
||||
total_functions += len(functions)
|
||||
|
||||
# Count documented functions (with JSDoc)
|
||||
jsdoc_blocks = re.findall(r'/\*\*[\s\S]*?\*/', content)
|
||||
documented_functions += len(jsdoc_blocks)
|
||||
|
||||
if total_functions > 0:
|
||||
doc_analysis['jsdoc_coverage'] = (documented_functions / total_functions) * 100
|
||||
|
||||
doc_analysis['inline_comments_ratio'] = self.results['metrics'].get('comment_ratio', 0)
|
||||
|
||||
self.results['documentation'] = doc_analysis
|
||||
|
||||
def _calculate_score(self):
|
||||
"""Calculate overall quality score"""
|
||||
score = 100
|
||||
|
||||
# Deduct points for issues
|
||||
score -= len(self.results.get('security', [])) * 5
|
||||
score -= len(self.results.get('complexity', [])) * 2
|
||||
score -= len(self.results.get('performance', [])) * 3
|
||||
|
||||
# Add points for good practices
|
||||
if self.results.get('tests', {}).get('test_files', 0) > 0:
|
||||
score += 10
|
||||
if self.results.get('tests', {}).get('coverage_configured'):
|
||||
score += 5
|
||||
if self.results.get('tests', {}).get('e2e_tests'):
|
||||
score += 5
|
||||
if self.results.get('documentation', {}).get('readme_exists'):
|
||||
score += 5
|
||||
if self.results.get('documentation', {}).get('jsdoc_coverage', 0) > 50:
|
||||
score += 5
|
||||
|
||||
# Ensure score is between 0 and 100
|
||||
self.results['score'] = max(0, min(100, score))
|
||||
|
||||
# Generate recommendations
|
||||
self._generate_recommendations()
|
||||
|
||||
def _generate_recommendations(self):
|
||||
"""Generate improvement recommendations"""
|
||||
recommendations = []
|
||||
|
||||
if self.results['score'] < 70:
|
||||
recommendations.append({
|
||||
'priority': 'high',
|
||||
'category': 'overall',
|
||||
'recommendation': 'Code quality needs significant improvement'
|
||||
})
|
||||
|
||||
if len(self.results.get('security', [])) > 0:
|
||||
recommendations.append({
|
||||
'priority': 'critical',
|
||||
'category': 'security',
|
||||
'recommendation': f"Fix {len(self.results['security'])} security issues immediately"
|
||||
})
|
||||
|
||||
if self.results.get('tests', {}).get('test_files', 0) < 5:
|
||||
recommendations.append({
|
||||
'priority': 'high',
|
||||
'category': 'testing',
|
||||
'recommendation': 'Increase test coverage - aim for 80%+ coverage'
|
||||
})
|
||||
|
||||
if not self.results.get('documentation', {}).get('readme_exists'):
|
||||
recommendations.append({
|
||||
'priority': 'medium',
|
||||
'category': 'documentation',
|
||||
'recommendation': 'Add comprehensive README.md'
|
||||
})
|
||||
|
||||
if self.results.get('documentation', {}).get('jsdoc_coverage', 0) < 30:
|
||||
recommendations.append({
|
||||
'priority': 'medium',
|
||||
'category': 'documentation',
|
||||
'recommendation': 'Add JSDoc comments to public functions'
|
||||
})
|
||||
|
||||
self.results['recommendations'] = recommendations
|
||||
|
||||
def generate_report(self) -> str:
|
||||
"""Generate human-readable report"""
|
||||
report = []
|
||||
report.append("=" * 60)
|
||||
report.append("CODE QUALITY ANALYSIS REPORT")
|
||||
report.append("=" * 60)
|
||||
report.append(f"\nOverall Score: {self.results['score']}/100")
|
||||
report.append("-" * 60)
|
||||
|
||||
# Metrics
|
||||
metrics = self.results.get('metrics', {})
|
||||
report.append("\n📊 Code Metrics:")
|
||||
report.append(f" Total Files: {metrics.get('total_files', 0)}")
|
||||
report.append(f" Total Lines: {metrics.get('total_lines', 0)}")
|
||||
report.append(f" Code Lines: {metrics.get('code_lines', 0)}")
|
||||
report.append(f" Comment Ratio: {metrics.get('comment_ratio', 0):.1%}")
|
||||
|
||||
# Security
|
||||
security = self.results.get('security', [])
|
||||
report.append(f"\n🔒 Security Issues: {len(security)}")
|
||||
for issue in security[:5]: # Show first 5
|
||||
report.append(f" ⚠️ {issue['file']}:{issue['line']} - {issue['issue']}")
|
||||
|
||||
# Tests
|
||||
tests = self.results.get('tests', {})
|
||||
report.append(f"\n🧪 Test Coverage:")
|
||||
report.append(f" Test Files: {tests.get('test_files', 0)}")
|
||||
report.append(f" Test Cases: {tests.get('test_cases', 0)}")
|
||||
report.append(f" E2E Tests: {'✅' if tests.get('e2e_tests') else '❌'}")
|
||||
report.append(f" Coverage Config: {'✅' if tests.get('coverage_configured') else '❌'}")
|
||||
|
||||
# Documentation
|
||||
docs = self.results.get('documentation', {})
|
||||
report.append(f"\n📚 Documentation:")
|
||||
report.append(f" README: {'✅' if docs.get('readme_exists') else '❌'}")
|
||||
report.append(f" JSDoc Coverage: {docs.get('jsdoc_coverage', 0):.1f}%")
|
||||
|
||||
# Recommendations
|
||||
report.append(f"\n💡 Top Recommendations:")
|
||||
for rec in self.results.get('recommendations', [])[:5]:
|
||||
emoji = '🔴' if rec['priority'] == 'critical' else '🟡' if rec['priority'] == 'high' else '🟢'
|
||||
report.append(f" {emoji} [{rec['category']}] {rec['recommendation']}")
|
||||
|
||||
return "\n".join(report)
|
||||
|
||||
def main():
|
||||
import sys
|
||||
|
||||
if len(sys.argv) < 2:
|
||||
print("Usage: python code_quality_analyzer.py <project_path>")
|
||||
sys.exit(1)
|
||||
|
||||
project_path = sys.argv[1]
|
||||
analyzer = CodeQualityAnalyzer(project_path)
|
||||
analyzer.analyze()
|
||||
print(analyzer.generate_report())
|
||||
|
||||
# Optionally save JSON report
|
||||
if len(sys.argv) > 2 and sys.argv[2] == '--json':
|
||||
with open('code_quality_report.json', 'w') as f:
|
||||
json.dump(analyzer.results, f, indent=2)
|
||||
print(f"\n📄 JSON report saved to code_quality_report.json")
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
1531
engineering-team/fullstack-engineer/scripts/fullstack_scaffolder.py
Normal file
1531
engineering-team/fullstack-engineer/scripts/fullstack_scaffolder.py
Normal file
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,849 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Project Scaffolder - Quickly scaffold fullstack projects with best practices
|
||||
"""
|
||||
|
||||
import os
|
||||
import json
|
||||
import subprocess
|
||||
from pathlib import Path
|
||||
from typing import Dict, List
|
||||
import argparse
|
||||
|
||||
class ProjectScaffolder:
|
||||
def __init__(self, project_name: str, project_type: str):
|
||||
self.project_name = project_name
|
||||
self.project_type = project_type
|
||||
self.root_path = Path.cwd() / project_name
|
||||
|
||||
def create_nextjs_graphql_project(self):
|
||||
"""Create a Next.js + GraphQL + PostgreSQL project"""
|
||||
print(f"🚀 Creating Next.js + GraphQL project: {self.project_name}")
|
||||
|
||||
# Create project structure
|
||||
dirs = [
|
||||
"frontend/src/components",
|
||||
"frontend/src/pages/api",
|
||||
"frontend/src/lib",
|
||||
"frontend/src/hooks",
|
||||
"frontend/src/styles",
|
||||
"frontend/src/types",
|
||||
"frontend/src/utils",
|
||||
"frontend/public",
|
||||
"backend/src/resolvers",
|
||||
"backend/src/schema",
|
||||
"backend/src/models",
|
||||
"backend/src/services",
|
||||
"backend/src/middleware",
|
||||
"backend/src/utils",
|
||||
"backend/src/types",
|
||||
"database/migrations",
|
||||
"database/seeds",
|
||||
"docker",
|
||||
"tests/unit",
|
||||
"tests/integration",
|
||||
"tests/e2e",
|
||||
".github/workflows"
|
||||
]
|
||||
|
||||
for dir_path in dirs:
|
||||
(self.root_path / dir_path).mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Create configuration files
|
||||
self._create_package_json()
|
||||
self._create_docker_compose()
|
||||
self._create_env_example()
|
||||
self._create_typescript_config()
|
||||
self._create_eslint_config()
|
||||
self._create_prettier_config()
|
||||
self._create_github_workflows()
|
||||
self._create_readme()
|
||||
|
||||
print("✅ Project structure created successfully!")
|
||||
|
||||
def _create_package_json(self):
|
||||
"""Create package.json files for frontend and backend"""
|
||||
|
||||
# Frontend package.json
|
||||
frontend_package = {
|
||||
"name": f"{self.project_name}-frontend",
|
||||
"version": "1.0.0",
|
||||
"private": True,
|
||||
"scripts": {
|
||||
"dev": "next dev",
|
||||
"build": "next build",
|
||||
"start": "next start",
|
||||
"lint": "next lint",
|
||||
"test": "jest --watch",
|
||||
"test:ci": "jest --ci --coverage",
|
||||
"type-check": "tsc --noEmit",
|
||||
"format": "prettier --write ."
|
||||
},
|
||||
"dependencies": {
|
||||
"next": "^14.0.0",
|
||||
"react": "^18.2.0",
|
||||
"react-dom": "^18.2.0",
|
||||
"@apollo/client": "^3.8.0",
|
||||
"graphql": "^16.8.0",
|
||||
"axios": "^1.6.0",
|
||||
"@tanstack/react-query": "^5.0.0",
|
||||
"zustand": "^4.4.0",
|
||||
"zod": "^3.22.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^20.0.0",
|
||||
"@types/react": "^18.2.0",
|
||||
"@types/react-dom": "^18.2.0",
|
||||
"typescript": "^5.3.0",
|
||||
"eslint": "^8.50.0",
|
||||
"eslint-config-next": "^14.0.0",
|
||||
"prettier": "^3.1.0",
|
||||
"jest": "^29.7.0",
|
||||
"@testing-library/react": "^14.1.0",
|
||||
"@testing-library/jest-dom": "^6.1.0",
|
||||
"cypress": "^13.6.0"
|
||||
}
|
||||
}
|
||||
|
||||
# Backend package.json
|
||||
backend_package = {
|
||||
"name": f"{self.project_name}-backend",
|
||||
"version": "1.0.0",
|
||||
"private": True,
|
||||
"scripts": {
|
||||
"dev": "nodemon --exec ts-node src/index.ts",
|
||||
"build": "tsc",
|
||||
"start": "node dist/index.js",
|
||||
"test": "jest --watch",
|
||||
"test:ci": "jest --ci --coverage",
|
||||
"lint": "eslint src --ext .ts",
|
||||
"format": "prettier --write src",
|
||||
"migrate": "knex migrate:latest",
|
||||
"seed": "knex seed:run"
|
||||
},
|
||||
"dependencies": {
|
||||
"apollo-server-express": "^3.13.0",
|
||||
"express": "^4.18.0",
|
||||
"graphql": "^16.8.0",
|
||||
"pg": "^8.11.0",
|
||||
"knex": "^3.1.0",
|
||||
"bcryptjs": "^2.4.3",
|
||||
"jsonwebtoken": "^9.0.0",
|
||||
"dotenv": "^16.3.0",
|
||||
"cors": "^2.8.5",
|
||||
"helmet": "^7.1.0",
|
||||
"winston": "^3.11.0",
|
||||
"joi": "^17.11.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^20.0.0",
|
||||
"@types/express": "^4.17.0",
|
||||
"@types/bcryptjs": "^2.4.0",
|
||||
"@types/jsonwebtoken": "^9.0.0",
|
||||
"typescript": "^5.3.0",
|
||||
"ts-node": "^10.9.0",
|
||||
"nodemon": "^3.0.0",
|
||||
"eslint": "^8.50.0",
|
||||
"@typescript-eslint/parser": "^6.10.0",
|
||||
"@typescript-eslint/eslint-plugin": "^6.10.0",
|
||||
"prettier": "^3.1.0",
|
||||
"jest": "^29.7.0",
|
||||
"@types/jest": "^29.5.0",
|
||||
"supertest": "^6.3.0"
|
||||
}
|
||||
}
|
||||
|
||||
# Write package.json files
|
||||
with open(self.root_path / "frontend" / "package.json", "w") as f:
|
||||
json.dump(frontend_package, f, indent=2)
|
||||
|
||||
with open(self.root_path / "backend" / "package.json", "w") as f:
|
||||
json.dump(backend_package, f, indent=2)
|
||||
|
||||
def _create_docker_compose(self):
|
||||
"""Create docker-compose.yml for local development"""
|
||||
docker_compose = """version: '3.8'
|
||||
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:15-alpine
|
||||
environment:
|
||||
POSTGRES_USER: ${DB_USER:-developer}
|
||||
POSTGRES_PASSWORD: ${DB_PASSWORD:-password}
|
||||
POSTGRES_DB: ${DB_NAME:-projectdb}
|
||||
ports:
|
||||
- "5432:5432"
|
||||
volumes:
|
||||
- postgres_data:/var/lib/postgresql/data
|
||||
- ./database/init.sql:/docker-entrypoint-initdb.d/init.sql
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "pg_isready -U ${DB_USER:-developer}"]
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
|
||||
redis:
|
||||
image: redis:7-alpine
|
||||
ports:
|
||||
- "6379:6379"
|
||||
volumes:
|
||||
- redis_data:/data
|
||||
healthcheck:
|
||||
test: ["CMD", "redis-cli", "ping"]
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
|
||||
backend:
|
||||
build:
|
||||
context: ./backend
|
||||
dockerfile: ../docker/backend.Dockerfile
|
||||
ports:
|
||||
- "4000:4000"
|
||||
environment:
|
||||
NODE_ENV: development
|
||||
DATABASE_URL: postgresql://${DB_USER:-developer}:${DB_PASSWORD:-password}@postgres:5432/${DB_NAME:-projectdb}
|
||||
REDIS_URL: redis://redis:6379
|
||||
JWT_SECRET: ${JWT_SECRET:-your-secret-key}
|
||||
depends_on:
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
redis:
|
||||
condition: service_healthy
|
||||
volumes:
|
||||
- ./backend:/app
|
||||
- /app/node_modules
|
||||
command: npm run dev
|
||||
|
||||
frontend:
|
||||
build:
|
||||
context: ./frontend
|
||||
dockerfile: ../docker/frontend.Dockerfile
|
||||
ports:
|
||||
- "3000:3000"
|
||||
environment:
|
||||
NEXT_PUBLIC_API_URL: http://backend:4000/graphql
|
||||
depends_on:
|
||||
- backend
|
||||
volumes:
|
||||
- ./frontend:/app
|
||||
- /app/node_modules
|
||||
- /app/.next
|
||||
command: npm run dev
|
||||
|
||||
volumes:
|
||||
postgres_data:
|
||||
redis_data:
|
||||
"""
|
||||
with open(self.root_path / "docker-compose.yml", "w") as f:
|
||||
f.write(docker_compose)
|
||||
|
||||
# Create Dockerfiles
|
||||
backend_dockerfile = """FROM node:18-alpine
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY package*.json ./
|
||||
RUN npm ci
|
||||
|
||||
COPY . .
|
||||
|
||||
RUN npm run build
|
||||
|
||||
EXPOSE 4000
|
||||
|
||||
CMD ["npm", "start"]
|
||||
"""
|
||||
|
||||
frontend_dockerfile = """FROM node:18-alpine
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY package*.json ./
|
||||
RUN npm ci
|
||||
|
||||
COPY . .
|
||||
|
||||
RUN npm run build
|
||||
|
||||
EXPOSE 3000
|
||||
|
||||
CMD ["npm", "start"]
|
||||
"""
|
||||
|
||||
with open(self.root_path / "docker" / "backend.Dockerfile", "w") as f:
|
||||
f.write(backend_dockerfile)
|
||||
|
||||
with open(self.root_path / "docker" / "frontend.Dockerfile", "w") as f:
|
||||
f.write(frontend_dockerfile)
|
||||
|
||||
def _create_env_example(self):
|
||||
"""Create .env.example file"""
|
||||
env_content = """# Database
|
||||
DB_HOST=localhost
|
||||
DB_PORT=5432
|
||||
DB_USER=developer
|
||||
DB_PASSWORD=password
|
||||
DB_NAME=projectdb
|
||||
DATABASE_URL=postgresql://developer:password@localhost:5432/projectdb
|
||||
|
||||
# Redis
|
||||
REDIS_URL=redis://localhost:6379
|
||||
|
||||
# JWT
|
||||
JWT_SECRET=your-secret-key-change-this-in-production
|
||||
JWT_EXPIRY=7d
|
||||
|
||||
# API
|
||||
API_PORT=4000
|
||||
NEXT_PUBLIC_API_URL=http://localhost:4000/graphql
|
||||
|
||||
# Frontend
|
||||
NEXT_PUBLIC_APP_URL=http://localhost:3000
|
||||
|
||||
# Environment
|
||||
NODE_ENV=development
|
||||
|
||||
# Monitoring (optional)
|
||||
SENTRY_DSN=
|
||||
NEW_RELIC_LICENSE_KEY=
|
||||
|
||||
# AWS (optional)
|
||||
AWS_REGION=
|
||||
AWS_ACCESS_KEY_ID=
|
||||
AWS_SECRET_ACCESS_KEY=
|
||||
S3_BUCKET_NAME=
|
||||
"""
|
||||
with open(self.root_path / ".env.example", "w") as f:
|
||||
f.write(env_content)
|
||||
|
||||
def _create_typescript_config(self):
|
||||
"""Create TypeScript configuration files"""
|
||||
|
||||
# Frontend tsconfig.json
|
||||
frontend_tsconfig = {
|
||||
"compilerOptions": {
|
||||
"target": "ES2022",
|
||||
"lib": ["dom", "dom.iterable", "esnext"],
|
||||
"allowJs": True,
|
||||
"skipLibCheck": True,
|
||||
"strict": True,
|
||||
"forceConsistentCasingInFileNames": True,
|
||||
"noEmit": True,
|
||||
"esModuleInterop": True,
|
||||
"module": "esnext",
|
||||
"moduleResolution": "node",
|
||||
"resolveJsonModule": True,
|
||||
"isolatedModules": True,
|
||||
"jsx": "preserve",
|
||||
"incremental": True,
|
||||
"paths": {
|
||||
"@/*": ["./src/*"],
|
||||
"@components/*": ["./src/components/*"],
|
||||
"@hooks/*": ["./src/hooks/*"],
|
||||
"@lib/*": ["./src/lib/*"],
|
||||
"@types/*": ["./src/types/*"],
|
||||
"@utils/*": ["./src/utils/*"]
|
||||
}
|
||||
},
|
||||
"include": ["next-env.d.ts", "**/*.ts", "**/*.tsx"],
|
||||
"exclude": ["node_modules"]
|
||||
}
|
||||
|
||||
# Backend tsconfig.json
|
||||
backend_tsconfig = {
|
||||
"compilerOptions": {
|
||||
"target": "ES2022",
|
||||
"module": "commonjs",
|
||||
"lib": ["ES2022"],
|
||||
"outDir": "./dist",
|
||||
"rootDir": "./src",
|
||||
"strict": True,
|
||||
"esModuleInterop": True,
|
||||
"skipLibCheck": True,
|
||||
"forceConsistentCasingInFileNames": True,
|
||||
"resolveJsonModule": True,
|
||||
"declaration": True,
|
||||
"declarationMap": True,
|
||||
"sourceMap": True,
|
||||
"noUnusedLocals": True,
|
||||
"noUnusedParameters": True,
|
||||
"noImplicitReturns": True,
|
||||
"noFallthroughCasesInSwitch": True,
|
||||
"paths": {
|
||||
"@/*": ["./src/*"],
|
||||
"@models/*": ["./src/models/*"],
|
||||
"@services/*": ["./src/services/*"],
|
||||
"@resolvers/*": ["./src/resolvers/*"],
|
||||
"@middleware/*": ["./src/middleware/*"],
|
||||
"@utils/*": ["./src/utils/*"],
|
||||
"@types/*": ["./src/types/*"]
|
||||
}
|
||||
},
|
||||
"include": ["src/**/*"],
|
||||
"exclude": ["node_modules", "dist", "**/*.test.ts"]
|
||||
}
|
||||
|
||||
with open(self.root_path / "frontend" / "tsconfig.json", "w") as f:
|
||||
json.dump(frontend_tsconfig, f, indent=2)
|
||||
|
||||
with open(self.root_path / "backend" / "tsconfig.json", "w") as f:
|
||||
json.dump(backend_tsconfig, f, indent=2)
|
||||
|
||||
def _create_eslint_config(self):
|
||||
"""Create ESLint configuration"""
|
||||
eslintrc = {
|
||||
"extends": [
|
||||
"eslint:recommended",
|
||||
"plugin:@typescript-eslint/recommended",
|
||||
"plugin:react/recommended",
|
||||
"plugin:react-hooks/recommended",
|
||||
"next/core-web-vitals",
|
||||
"prettier"
|
||||
],
|
||||
"parser": "@typescript-eslint/parser",
|
||||
"plugins": ["@typescript-eslint", "react", "react-hooks"],
|
||||
"rules": {
|
||||
"@typescript-eslint/no-unused-vars": ["error", {"argsIgnorePattern": "^_"}],
|
||||
"@typescript-eslint/no-explicit-any": "error",
|
||||
"react/react-in-jsx-scope": "off",
|
||||
"react/prop-types": "off",
|
||||
"no-console": ["warn", {"allow": ["warn", "error"]}]
|
||||
}
|
||||
}
|
||||
|
||||
with open(self.root_path / ".eslintrc.json", "w") as f:
|
||||
json.dump(eslintrc, f, indent=2)
|
||||
|
||||
def _create_prettier_config(self):
|
||||
"""Create Prettier configuration"""
|
||||
prettierrc = {
|
||||
"semi": True,
|
||||
"singleQuote": True,
|
||||
"tabWidth": 2,
|
||||
"trailingComma": "es5",
|
||||
"printWidth": 100,
|
||||
"bracketSpacing": True,
|
||||
"arrowParens": "always",
|
||||
"endOfLine": "lf"
|
||||
}
|
||||
|
||||
with open(self.root_path / ".prettierrc", "w") as f:
|
||||
json.dump(prettierrc, f, indent=2)
|
||||
|
||||
# .prettierignore
|
||||
prettierignore = """node_modules
|
||||
dist
|
||||
.next
|
||||
coverage
|
||||
*.log
|
||||
.env
|
||||
.env.local
|
||||
"""
|
||||
with open(self.root_path / ".prettierignore", "w") as f:
|
||||
f.write(prettierignore)
|
||||
|
||||
def _create_github_workflows(self):
|
||||
"""Create GitHub Actions workflows"""
|
||||
ci_workflow = """name: CI/CD Pipeline
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main, develop]
|
||||
pull_request:
|
||||
branches: [main, develop]
|
||||
|
||||
env:
|
||||
NODE_VERSION: '18'
|
||||
|
||||
jobs:
|
||||
lint:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: ${{ env.NODE_VERSION }}
|
||||
cache: 'npm'
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
cd frontend && npm ci
|
||||
cd ../backend && npm ci
|
||||
|
||||
- name: Run ESLint
|
||||
run: |
|
||||
cd frontend && npm run lint
|
||||
cd ../backend && npm run lint
|
||||
|
||||
- name: Run Type Check
|
||||
run: |
|
||||
cd frontend && npm run type-check
|
||||
cd ../backend && npx tsc --noEmit
|
||||
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:15
|
||||
env:
|
||||
POSTGRES_USER: test
|
||||
POSTGRES_PASSWORD: test
|
||||
POSTGRES_DB: testdb
|
||||
options: >-
|
||||
--health-cmd pg_isready
|
||||
--health-interval 10s
|
||||
--health-timeout 5s
|
||||
--health-retries 5
|
||||
ports:
|
||||
- 5432:5432
|
||||
|
||||
redis:
|
||||
image: redis:7
|
||||
options: >-
|
||||
--health-cmd "redis-cli ping"
|
||||
--health-interval 10s
|
||||
--health-timeout 5s
|
||||
--health-retries 5
|
||||
ports:
|
||||
- 6379:6379
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: ${{ env.NODE_VERSION }}
|
||||
cache: 'npm'
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
cd frontend && npm ci
|
||||
cd ../backend && npm ci
|
||||
|
||||
- name: Run backend tests
|
||||
env:
|
||||
DATABASE_URL: postgresql://test:test@localhost:5432/testdb
|
||||
REDIS_URL: redis://localhost:6379
|
||||
JWT_SECRET: test-secret
|
||||
run: |
|
||||
cd backend
|
||||
npm run test:ci
|
||||
|
||||
- name: Run frontend tests
|
||||
run: |
|
||||
cd frontend
|
||||
npm run test:ci
|
||||
|
||||
- name: Upload coverage
|
||||
uses: codecov/codecov-action@v3
|
||||
with:
|
||||
files: ./frontend/coverage/lcov.info,./backend/coverage/lcov.info
|
||||
|
||||
security:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Run security audit
|
||||
run: |
|
||||
cd frontend && npm audit --audit-level=moderate
|
||||
cd ../backend && npm audit --audit-level=moderate
|
||||
|
||||
- name: Run Snyk security scan
|
||||
uses: snyk/actions/node@master
|
||||
env:
|
||||
SNYK_TOKEN: ${{ secrets.SNYK_TOKEN }}
|
||||
with:
|
||||
args: --severity-threshold=high
|
||||
|
||||
build:
|
||||
needs: [lint, test]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: ${{ env.NODE_VERSION }}
|
||||
cache: 'npm'
|
||||
|
||||
- name: Build frontend
|
||||
run: |
|
||||
cd frontend
|
||||
npm ci
|
||||
npm run build
|
||||
|
||||
- name: Build backend
|
||||
run: |
|
||||
cd backend
|
||||
npm ci
|
||||
npm run build
|
||||
|
||||
- name: Build Docker images
|
||||
run: |
|
||||
docker build -f docker/frontend.Dockerfile -t frontend:latest ./frontend
|
||||
docker build -f docker/backend.Dockerfile -t backend:latest ./backend
|
||||
"""
|
||||
with open(self.root_path / ".github" / "workflows" / "ci.yml", "w") as f:
|
||||
f.write(ci_workflow)
|
||||
|
||||
def _create_readme(self):
|
||||
"""Create comprehensive README.md"""
|
||||
readme = f"""# {self.project_name}
|
||||
|
||||
## 🚀 Tech Stack
|
||||
|
||||
### Frontend
|
||||
- **Framework**: Next.js 14 with TypeScript
|
||||
- **State Management**: Zustand
|
||||
- **Data Fetching**: Apollo Client (GraphQL) & TanStack Query
|
||||
- **Styling**: Tailwind CSS / CSS Modules
|
||||
- **Testing**: Jest, React Testing Library, Cypress
|
||||
|
||||
### Backend
|
||||
- **Runtime**: Node.js with TypeScript
|
||||
- **Framework**: Express + Apollo Server
|
||||
- **Database**: PostgreSQL with Knex.js
|
||||
- **Caching**: Redis
|
||||
- **Authentication**: JWT
|
||||
- **Testing**: Jest, Supertest
|
||||
|
||||
### DevOps
|
||||
- **Containerization**: Docker & Docker Compose
|
||||
- **CI/CD**: GitHub Actions
|
||||
- **Monitoring**: Sentry, New Relic (optional)
|
||||
- **Cloud**: AWS/GCP/Azure ready
|
||||
|
||||
## 📦 Project Structure
|
||||
|
||||
```
|
||||
{self.project_name}/
|
||||
├── frontend/ # Next.js application
|
||||
│ ├── src/
|
||||
│ │ ├── components/ # React components
|
||||
│ │ ├── pages/ # Next.js pages
|
||||
│ │ ├── hooks/ # Custom React hooks
|
||||
│ │ ├── lib/ # Libraries and configs
|
||||
│ │ ├── styles/ # Global styles
|
||||
│ │ ├── types/ # TypeScript types
|
||||
│ │ └── utils/ # Utility functions
|
||||
│ └── public/ # Static assets
|
||||
├── backend/ # Node.js GraphQL API
|
||||
│ └── src/
|
||||
│ ├── resolvers/ # GraphQL resolvers
|
||||
│ ├── schema/ # GraphQL schema
|
||||
│ ├── models/ # Database models
|
||||
│ ├── services/ # Business logic
|
||||
│ ├── middleware/ # Express middleware
|
||||
│ └── utils/ # Utilities
|
||||
├── database/ # Database files
|
||||
│ ├── migrations/ # Database migrations
|
||||
│ └── seeds/ # Seed data
|
||||
├── tests/ # Test files
|
||||
│ ├── unit/ # Unit tests
|
||||
│ ├── integration/ # Integration tests
|
||||
│ └── e2e/ # End-to-end tests
|
||||
├── docker/ # Docker configurations
|
||||
└── .github/ # GitHub Actions workflows
|
||||
```
|
||||
|
||||
## 🛠️ Getting Started
|
||||
|
||||
### Prerequisites
|
||||
- Node.js 18+
|
||||
- Docker & Docker Compose
|
||||
- PostgreSQL 15+ (or use Docker)
|
||||
- Redis 7+ (or use Docker)
|
||||
|
||||
### Installation
|
||||
|
||||
1. Clone the repository
|
||||
```bash
|
||||
git clone <repository-url>
|
||||
cd {self.project_name}
|
||||
```
|
||||
|
||||
2. Copy environment variables
|
||||
```bash
|
||||
cp .env.example .env
|
||||
# Edit .env with your values
|
||||
```
|
||||
|
||||
3. Start services with Docker Compose
|
||||
```bash
|
||||
docker-compose up -d
|
||||
```
|
||||
|
||||
4. Install dependencies
|
||||
```bash
|
||||
# Frontend
|
||||
cd frontend && npm install
|
||||
|
||||
# Backend
|
||||
cd ../backend && npm install
|
||||
```
|
||||
|
||||
5. Run database migrations
|
||||
```bash
|
||||
cd backend
|
||||
npm run migrate
|
||||
npm run seed # Optional: seed data
|
||||
```
|
||||
|
||||
6. Start development servers
|
||||
```bash
|
||||
# Terminal 1 - Backend
|
||||
cd backend && npm run dev
|
||||
|
||||
# Terminal 2 - Frontend
|
||||
cd frontend && npm run dev
|
||||
```
|
||||
|
||||
Visit:
|
||||
- Frontend: http://localhost:3000
|
||||
- GraphQL Playground: http://localhost:4000/graphql
|
||||
- PostgreSQL: localhost:5432
|
||||
- Redis: localhost:6379
|
||||
|
||||
## 📝 Development
|
||||
|
||||
### Commands
|
||||
|
||||
#### Frontend
|
||||
```bash
|
||||
npm run dev # Start development server
|
||||
npm run build # Build for production
|
||||
npm run start # Start production server
|
||||
npm run test # Run tests
|
||||
npm run lint # Lint code
|
||||
npm run type-check # TypeScript check
|
||||
```
|
||||
|
||||
#### Backend
|
||||
```bash
|
||||
npm run dev # Start development server
|
||||
npm run build # Build TypeScript
|
||||
npm run start # Start production server
|
||||
npm run test # Run tests
|
||||
npm run lint # Lint code
|
||||
npm run migrate # Run migrations
|
||||
npm run seed # Run seeders
|
||||
```
|
||||
|
||||
### Code Style
|
||||
- ESLint for linting
|
||||
- Prettier for formatting
|
||||
- Husky for pre-commit hooks
|
||||
- Conventional Commits
|
||||
|
||||
### Testing Strategy
|
||||
- Unit Tests: Jest
|
||||
- Integration Tests: Supertest
|
||||
- E2E Tests: Cypress
|
||||
- Coverage Goal: 80%+
|
||||
|
||||
## 🚀 Deployment
|
||||
|
||||
### Using Docker
|
||||
```bash
|
||||
# Build images
|
||||
docker build -f docker/frontend.Dockerfile -t {self.project_name}-frontend:latest ./frontend
|
||||
docker build -f docker/backend.Dockerfile -t {self.project_name}-backend:latest ./backend
|
||||
|
||||
# Run containers
|
||||
docker-compose -f docker-compose.production.yml up -d
|
||||
```
|
||||
|
||||
### Environment Variables
|
||||
See `.env.example` for all required environment variables.
|
||||
|
||||
## 📊 Monitoring
|
||||
|
||||
- **Error Tracking**: Sentry
|
||||
- **APM**: New Relic / DataDog
|
||||
- **Logs**: Winston + CloudWatch
|
||||
- **Metrics**: Prometheus + Grafana
|
||||
|
||||
## 🔒 Security
|
||||
|
||||
- JWT authentication
|
||||
- Input validation with Joi/Zod
|
||||
- SQL injection prevention (Knex.js)
|
||||
- XSS protection (React)
|
||||
- CORS configuration
|
||||
- Rate limiting
|
||||
- Security headers (Helmet)
|
||||
|
||||
## 📚 Documentation
|
||||
|
||||
- API Documentation: `/graphql` (GraphQL Playground)
|
||||
- Component Storybook: `npm run storybook`
|
||||
- Database Schema: `/database/schema.md`
|
||||
|
||||
## 🤝 Contributing
|
||||
|
||||
1. Fork the repository
|
||||
2. Create feature branch (`git checkout -b feature/amazing-feature`)
|
||||
3. Commit changes (`git commit -m 'feat: add amazing feature'`)
|
||||
4. Push to branch (`git push origin feature/amazing-feature`)
|
||||
5. Open Pull Request
|
||||
|
||||
## 📄 License
|
||||
|
||||
This project is licensed under the MIT License.
|
||||
"""
|
||||
with open(self.root_path / "README.md", "w") as f:
|
||||
f.write(readme)
|
||||
|
||||
def scaffold(self):
|
||||
"""Main scaffolding method"""
|
||||
if self.project_type == "nextjs-graphql":
|
||||
self.create_nextjs_graphql_project()
|
||||
elif self.project_type == "react-native":
|
||||
self.create_react_native_project()
|
||||
elif self.project_type == "microservices":
|
||||
self.create_microservices_project()
|
||||
else:
|
||||
print(f"Project type '{self.project_type}' not supported")
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def create_react_native_project(self):
|
||||
"""Create React Native project structure"""
|
||||
# Implementation for React Native
|
||||
print("React Native scaffolding - to be implemented")
|
||||
|
||||
def create_microservices_project(self):
|
||||
"""Create Microservices architecture"""
|
||||
# Implementation for Microservices
|
||||
print("Microservices scaffolding - to be implemented")
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(description='Scaffold a fullstack project')
|
||||
parser.add_argument('project_name', help='Name of the project')
|
||||
parser.add_argument('--type',
|
||||
choices=['nextjs-graphql', 'react-native', 'microservices'],
|
||||
default='nextjs-graphql',
|
||||
help='Type of project to scaffold')
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
scaffolder = ProjectScaffolder(args.project_name, args.type)
|
||||
if scaffolder.scaffold():
|
||||
print(f"\n✨ Project '{args.project_name}' created successfully!")
|
||||
print(f"📁 Location: {scaffolder.root_path}")
|
||||
print("\n🎯 Next steps:")
|
||||
print(" 1. cd " + args.project_name)
|
||||
print(" 2. docker-compose up -d")
|
||||
print(" 3. cd frontend && npm install")
|
||||
print(" 4. cd ../backend && npm install")
|
||||
print(" 5. npm run dev (in both directories)")
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
Reference in New Issue
Block a user