feat: implement comprehensive startup system and fix authentication
Major improvements: - Created startup orchestration system with health monitoring and graceful shutdown - Fixed user registration and login with simplified authentication flow - Rebuilt authentication forms from scratch with direct API integration - Implemented comprehensive debugging and error handling - Added Redis fallback functionality for disabled environments - Fixed CORS configuration for cross-origin frontend requests - Simplified password validation to 6+ characters (removed complexity requirements) - Added toast notifications at app level for better UX feedback - Created comprehensive startup/shutdown scripts with OODA methodology - Fixed database validation and connection issues - Implemented TokenService memory fallback when Redis is disabled Technical details: - New SimpleLoginForm.tsx and SimpleRegisterForm.tsx components - Enhanced CORS middleware with additional allowed origins - Simplified auth validators and removed strict password requirements - Added extensive logging and diagnostic capabilities - Fixed authentication middleware token validation - Implemented graceful Redis error handling throughout the stack - Created modular startup system with configurable health checks 🤖 Generated with [Claude Code](https://claude.ai/code) Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
parent
d41d1e8125
commit
e681c446b6
36 changed files with 7719 additions and 183 deletions
591
scripts/startup-checks.js
Normal file
591
scripts/startup-checks.js
Normal file
|
|
@ -0,0 +1,591 @@
|
|||
/**
|
||||
* Shattered Void MMO - Comprehensive Startup Checks
|
||||
*
|
||||
* This module performs thorough pre-flight checks to ensure all dependencies,
|
||||
* configurations, and system requirements are met before starting the game.
|
||||
*/
|
||||
|
||||
const fs = require('fs').promises;
|
||||
const path = require('path');
|
||||
const { exec } = require('child_process');
|
||||
const { promisify } = require('util');
|
||||
const net = require('net');
|
||||
|
||||
const execAsync = promisify(exec);
|
||||
|
||||
class StartupChecks {
|
||||
constructor() {
|
||||
this.checks = [];
|
||||
this.results = {};
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a check to the validation suite
|
||||
*/
|
||||
addCheck(name, checkFunction, required = true) {
|
||||
this.checks.push({
|
||||
name,
|
||||
function: checkFunction,
|
||||
required
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Run all registered checks
|
||||
*/
|
||||
async runAllChecks() {
|
||||
const startTime = Date.now();
|
||||
const results = {
|
||||
success: true,
|
||||
checks: {},
|
||||
failures: [],
|
||||
duration: 0
|
||||
};
|
||||
|
||||
// Register all standard checks
|
||||
this.registerStandardChecks();
|
||||
|
||||
console.log(`🔍 Running ${this.checks.length} startup checks...`);
|
||||
|
||||
for (const check of this.checks) {
|
||||
try {
|
||||
console.log(` ⏳ ${check.name}...`);
|
||||
const checkResult = await check.function();
|
||||
|
||||
results.checks[check.name] = {
|
||||
success: true,
|
||||
required: check.required,
|
||||
details: checkResult
|
||||
};
|
||||
|
||||
console.log(` ✅ ${check.name}`);
|
||||
} catch (error) {
|
||||
const failure = {
|
||||
name: check.name,
|
||||
required: check.required,
|
||||
error: error.message
|
||||
};
|
||||
|
||||
results.checks[check.name] = {
|
||||
success: false,
|
||||
required: check.required,
|
||||
error: error.message
|
||||
};
|
||||
|
||||
results.failures.push(failure);
|
||||
|
||||
if (check.required) {
|
||||
results.success = false;
|
||||
console.log(` ❌ ${check.name}: ${error.message}`);
|
||||
} else {
|
||||
console.log(` ⚠️ ${check.name}: ${error.message} (optional)`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
results.duration = Date.now() - startTime;
|
||||
return results;
|
||||
}
|
||||
|
||||
/**
|
||||
* Register all standard checks
|
||||
*/
|
||||
registerStandardChecks() {
|
||||
// Node.js version check
|
||||
this.addCheck('Node.js Version', this.checkNodeVersion, true);
|
||||
|
||||
// NPM availability
|
||||
this.addCheck('NPM Availability', this.checkNpmAvailability, true);
|
||||
|
||||
// Environment configuration
|
||||
this.addCheck('Environment Configuration', this.checkEnvironmentConfig, true);
|
||||
|
||||
// Required directories
|
||||
this.addCheck('Directory Structure', this.checkDirectoryStructure, true);
|
||||
|
||||
// Package dependencies
|
||||
this.addCheck('Package Dependencies', this.checkPackageDependencies, true);
|
||||
|
||||
// Port availability
|
||||
this.addCheck('Port Availability', this.checkPortAvailability, true);
|
||||
|
||||
// Database configuration
|
||||
this.addCheck('Database Configuration', this.checkDatabaseConfig, true);
|
||||
|
||||
// Redis configuration
|
||||
this.addCheck('Redis Configuration', this.checkRedisConfig, false);
|
||||
|
||||
// Log directories
|
||||
this.addCheck('Log Directories', this.checkLogDirectories, true);
|
||||
|
||||
// Frontend availability
|
||||
this.addCheck('Frontend Dependencies', this.checkFrontendDependencies, false);
|
||||
|
||||
// Memory availability
|
||||
this.addCheck('System Memory', this.checkSystemMemory, true);
|
||||
|
||||
// Disk space
|
||||
this.addCheck('Disk Space', this.checkDiskSpace, true);
|
||||
|
||||
// File permissions
|
||||
this.addCheck('File Permissions', this.checkFilePermissions, true);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check Node.js version requirements
|
||||
*/
|
||||
async checkNodeVersion() {
|
||||
const requiredMajor = 18;
|
||||
const currentVersion = process.version;
|
||||
const major = parseInt(currentVersion.slice(1).split('.')[0]);
|
||||
|
||||
if (major < requiredMajor) {
|
||||
throw new Error(`Node.js ${requiredMajor}+ required, found ${currentVersion}`);
|
||||
}
|
||||
|
||||
return {
|
||||
current: currentVersion,
|
||||
required: `>=${requiredMajor}.0.0`,
|
||||
valid: true
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Check NPM availability
|
||||
*/
|
||||
async checkNpmAvailability() {
|
||||
try {
|
||||
const { stdout } = await execAsync('npm --version');
|
||||
const version = stdout.trim();
|
||||
|
||||
return {
|
||||
version,
|
||||
available: true
|
||||
};
|
||||
} catch (error) {
|
||||
throw new Error('NPM not found in PATH');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check environment configuration
|
||||
*/
|
||||
async checkEnvironmentConfig() {
|
||||
const envFile = path.join(process.cwd(), '.env');
|
||||
const config = {
|
||||
hasEnvFile: false,
|
||||
requiredVars: [],
|
||||
missingVars: [],
|
||||
warnings: []
|
||||
};
|
||||
|
||||
// Check for .env file
|
||||
try {
|
||||
await fs.access(envFile);
|
||||
config.hasEnvFile = true;
|
||||
} catch {
|
||||
config.warnings.push('No .env file found, using defaults');
|
||||
}
|
||||
|
||||
// Required environment variables (with defaults)
|
||||
const requiredVars = [
|
||||
{ name: 'NODE_ENV', default: 'development' },
|
||||
{ name: 'PORT', default: '3000' },
|
||||
{ name: 'DB_HOST', default: 'localhost' },
|
||||
{ name: 'DB_PORT', default: '5432' },
|
||||
{ name: 'DB_NAME', default: 'shattered_void_dev' },
|
||||
{ name: 'DB_USER', default: 'postgres' }
|
||||
];
|
||||
|
||||
for (const varConfig of requiredVars) {
|
||||
const value = process.env[varConfig.name];
|
||||
if (!value) {
|
||||
config.missingVars.push({
|
||||
name: varConfig.name,
|
||||
default: varConfig.default
|
||||
});
|
||||
} else {
|
||||
config.requiredVars.push({
|
||||
name: varConfig.name,
|
||||
value: varConfig.name.includes('PASSWORD') ? '[HIDDEN]' : value
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return config;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check directory structure
|
||||
*/
|
||||
async checkDirectoryStructure() {
|
||||
const requiredDirs = [
|
||||
'src',
|
||||
'src/controllers',
|
||||
'src/services',
|
||||
'src/routes',
|
||||
'src/database',
|
||||
'src/database/migrations',
|
||||
'config',
|
||||
'scripts'
|
||||
];
|
||||
|
||||
const optionalDirs = [
|
||||
'frontend',
|
||||
'frontend/src',
|
||||
'frontend/dist',
|
||||
'logs',
|
||||
'tests'
|
||||
];
|
||||
|
||||
const results = {
|
||||
required: [],
|
||||
optional: [],
|
||||
missing: []
|
||||
};
|
||||
|
||||
// Check required directories
|
||||
for (const dir of requiredDirs) {
|
||||
try {
|
||||
const stats = await fs.stat(dir);
|
||||
if (stats.isDirectory()) {
|
||||
results.required.push(dir);
|
||||
} else {
|
||||
results.missing.push(dir);
|
||||
}
|
||||
} catch {
|
||||
results.missing.push(dir);
|
||||
}
|
||||
}
|
||||
|
||||
// Check optional directories
|
||||
for (const dir of optionalDirs) {
|
||||
try {
|
||||
const stats = await fs.stat(dir);
|
||||
if (stats.isDirectory()) {
|
||||
results.optional.push(dir);
|
||||
}
|
||||
} catch {
|
||||
// Optional directories are not reported as missing
|
||||
}
|
||||
}
|
||||
|
||||
if (results.missing.length > 0) {
|
||||
throw new Error(`Missing required directories: ${results.missing.join(', ')}`);
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check package dependencies
|
||||
*/
|
||||
async checkPackageDependencies() {
|
||||
const packageJsonPath = path.join(process.cwd(), 'package.json');
|
||||
const nodeModulesPath = path.join(process.cwd(), 'node_modules');
|
||||
|
||||
try {
|
||||
// Check package.json exists
|
||||
const packageJson = JSON.parse(await fs.readFile(packageJsonPath, 'utf8'));
|
||||
|
||||
// Check node_modules exists
|
||||
await fs.access(nodeModulesPath);
|
||||
|
||||
// Check critical dependencies
|
||||
const criticalDeps = [
|
||||
'express',
|
||||
'pg',
|
||||
'knex',
|
||||
'winston',
|
||||
'dotenv',
|
||||
'socket.io'
|
||||
];
|
||||
|
||||
const missing = [];
|
||||
for (const dep of criticalDeps) {
|
||||
try {
|
||||
await fs.access(path.join(nodeModulesPath, dep));
|
||||
} catch {
|
||||
missing.push(dep);
|
||||
}
|
||||
}
|
||||
|
||||
if (missing.length > 0) {
|
||||
throw new Error(`Missing critical dependencies: ${missing.join(', ')}`);
|
||||
}
|
||||
|
||||
return {
|
||||
packageJson: packageJson.name,
|
||||
version: packageJson.version,
|
||||
dependencies: Object.keys(packageJson.dependencies || {}).length,
|
||||
devDependencies: Object.keys(packageJson.devDependencies || {}).length,
|
||||
criticalDeps: criticalDeps.length
|
||||
};
|
||||
} catch (error) {
|
||||
throw new Error(`Package validation failed: ${error.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check port availability
|
||||
*/
|
||||
async checkPortAvailability() {
|
||||
const backendPort = process.env.PORT || 3000;
|
||||
const frontendPort = process.env.FRONTEND_PORT || 5173;
|
||||
|
||||
const checkPort = (port) => {
|
||||
return new Promise((resolve, reject) => {
|
||||
const server = net.createServer();
|
||||
|
||||
server.listen(port, (err) => {
|
||||
if (err) {
|
||||
reject(new Error(`Port ${port} is in use`));
|
||||
} else {
|
||||
server.close(() => resolve(port));
|
||||
}
|
||||
});
|
||||
|
||||
server.on('error', (err) => {
|
||||
reject(new Error(`Port ${port} is in use`));
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
const results = {
|
||||
backend: await checkPort(backendPort),
|
||||
frontend: null
|
||||
};
|
||||
|
||||
// Only check frontend port if frontend is enabled
|
||||
if (process.env.ENABLE_FRONTEND !== 'false') {
|
||||
try {
|
||||
results.frontend = await checkPort(frontendPort);
|
||||
} catch (error) {
|
||||
// Frontend port check is not critical
|
||||
results.frontendError = error.message;
|
||||
}
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check database configuration
|
||||
*/
|
||||
async checkDatabaseConfig() {
|
||||
const config = {
|
||||
host: process.env.DB_HOST || 'localhost',
|
||||
port: process.env.DB_PORT || 5432,
|
||||
database: process.env.DB_NAME || 'shattered_void_dev',
|
||||
user: process.env.DB_USER || 'postgres'
|
||||
};
|
||||
|
||||
// Check if database connection parameters are reasonable
|
||||
if (!config.host || !config.port || !config.database || !config.user) {
|
||||
throw new Error('Incomplete database configuration');
|
||||
}
|
||||
|
||||
// Validate port number
|
||||
const port = parseInt(config.port);
|
||||
if (isNaN(port) || port < 1 || port > 65535) {
|
||||
throw new Error(`Invalid database port: ${config.port}`);
|
||||
}
|
||||
|
||||
return {
|
||||
host: config.host,
|
||||
port: config.port,
|
||||
database: config.database,
|
||||
user: config.user,
|
||||
configured: true
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Check Redis configuration (optional)
|
||||
*/
|
||||
async checkRedisConfig() {
|
||||
const config = {
|
||||
host: process.env.REDIS_HOST || 'localhost',
|
||||
port: process.env.REDIS_PORT || 6379,
|
||||
enabled: process.env.DISABLE_REDIS !== 'true'
|
||||
};
|
||||
|
||||
if (!config.enabled) {
|
||||
return {
|
||||
enabled: false,
|
||||
message: 'Redis disabled by configuration'
|
||||
};
|
||||
}
|
||||
|
||||
// Validate port number
|
||||
const port = parseInt(config.port);
|
||||
if (isNaN(port) || port < 1 || port > 65535) {
|
||||
throw new Error(`Invalid Redis port: ${config.port}`);
|
||||
}
|
||||
|
||||
return {
|
||||
host: config.host,
|
||||
port: config.port,
|
||||
enabled: true
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Check log directories
|
||||
*/
|
||||
async checkLogDirectories() {
|
||||
const logDir = path.join(process.cwd(), 'logs');
|
||||
|
||||
try {
|
||||
// Check if logs directory exists
|
||||
await fs.access(logDir);
|
||||
|
||||
// Check if it's writable
|
||||
await fs.access(logDir, fs.constants.W_OK);
|
||||
|
||||
return {
|
||||
directory: logDir,
|
||||
exists: true,
|
||||
writable: true
|
||||
};
|
||||
} catch {
|
||||
// Create logs directory if it doesn't exist
|
||||
try {
|
||||
await fs.mkdir(logDir, { recursive: true });
|
||||
return {
|
||||
directory: logDir,
|
||||
exists: true,
|
||||
writable: true,
|
||||
created: true
|
||||
};
|
||||
} catch (error) {
|
||||
throw new Error(`Cannot create logs directory: ${error.message}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check frontend dependencies (optional)
|
||||
*/
|
||||
async checkFrontendDependencies() {
|
||||
const frontendDir = path.join(process.cwd(), 'frontend');
|
||||
|
||||
try {
|
||||
// Check if frontend directory exists
|
||||
await fs.access(frontendDir);
|
||||
|
||||
// Check package.json
|
||||
const packageJsonPath = path.join(frontendDir, 'package.json');
|
||||
const packageJson = JSON.parse(await fs.readFile(packageJsonPath, 'utf8'));
|
||||
|
||||
// Check node_modules
|
||||
const nodeModulesPath = path.join(frontendDir, 'node_modules');
|
||||
await fs.access(nodeModulesPath);
|
||||
|
||||
return {
|
||||
directory: frontendDir,
|
||||
name: packageJson.name,
|
||||
version: packageJson.version,
|
||||
dependencies: Object.keys(packageJson.dependencies || {}).length,
|
||||
hasNodeModules: true
|
||||
};
|
||||
} catch (error) {
|
||||
throw new Error(`Frontend not available: ${error.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check system memory
|
||||
*/
|
||||
async checkSystemMemory() {
|
||||
const totalMemory = require('os').totalmem();
|
||||
const freeMemory = require('os').freemem();
|
||||
const usedMemory = totalMemory - freeMemory;
|
||||
|
||||
const totalGB = totalMemory / (1024 * 1024 * 1024);
|
||||
const freeGB = freeMemory / (1024 * 1024 * 1024);
|
||||
const usedGB = usedMemory / (1024 * 1024 * 1024);
|
||||
|
||||
// Minimum 1GB free memory recommended
|
||||
if (freeGB < 1) {
|
||||
throw new Error(`Low memory: ${freeGB.toFixed(2)}GB free, 1GB+ recommended`);
|
||||
}
|
||||
|
||||
return {
|
||||
total: `${totalGB.toFixed(2)}GB`,
|
||||
used: `${usedGB.toFixed(2)}GB`,
|
||||
free: `${freeGB.toFixed(2)}GB`,
|
||||
usage: `${((usedGB / totalGB) * 100).toFixed(1)}%`
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Check disk space
|
||||
*/
|
||||
async checkDiskSpace() {
|
||||
try {
|
||||
const { stdout } = await execAsync('df -h .');
|
||||
const lines = stdout.trim().split('\n');
|
||||
const data = lines[1].split(/\s+/);
|
||||
|
||||
const size = data[1];
|
||||
const used = data[2];
|
||||
const available = data[3];
|
||||
const usage = data[4];
|
||||
|
||||
// Extract numeric percentage
|
||||
const usagePercent = parseInt(usage.replace('%', ''));
|
||||
|
||||
// Warn if disk usage is over 90%
|
||||
if (usagePercent > 90) {
|
||||
throw new Error(`High disk usage: ${usage} used, <10% available`);
|
||||
}
|
||||
|
||||
return {
|
||||
size,
|
||||
used,
|
||||
available,
|
||||
usage: `${usagePercent}%`
|
||||
};
|
||||
} catch (error) {
|
||||
// Fallback for non-Unix systems or when df is not available
|
||||
return {
|
||||
message: 'Disk space check not available on this system',
|
||||
available: true
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check file permissions
|
||||
*/
|
||||
async checkFilePermissions() {
|
||||
const criticalFiles = [
|
||||
'src/server.js',
|
||||
'package.json',
|
||||
'knexfile.js'
|
||||
];
|
||||
|
||||
const results = {
|
||||
readable: [],
|
||||
unreadable: []
|
||||
};
|
||||
|
||||
for (const file of criticalFiles) {
|
||||
try {
|
||||
await fs.access(file, fs.constants.R_OK);
|
||||
results.readable.push(file);
|
||||
} catch {
|
||||
results.unreadable.push(file);
|
||||
}
|
||||
}
|
||||
|
||||
if (results.unreadable.length > 0) {
|
||||
throw new Error(`Cannot read critical files: ${results.unreadable.join(', ')}`);
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = StartupChecks;
|
||||
Loading…
Add table
Add a link
Reference in a new issue