const { Pool } = require('pg'); let pool; async function connectDB() { pool = new Pool({ connectionString: process.env.DATABASE_URL, ssl: process.env.NODE_ENV === 'production' && process.env.DATABASE_URL && !process.env.DATABASE_URL.includes('localhost') && !process.env.DATABASE_URL.includes('postgres:') ? { rejectUnauthorized: false } : false }); // Test connection const client = await pool.connect(); console.log('✅ PostgreSQL connected'); // Run migrations await runMigrations(client); client.release(); return pool; } async function runMigrations(client) { // Create migrations tracking table await client.query(` CREATE TABLE IF NOT EXISTS schema_migrations ( id SERIAL PRIMARY KEY, filename VARCHAR(255) UNIQUE NOT NULL, applied_at TIMESTAMP DEFAULT NOW() ) `); const fs = require('fs'); const path = require('path'); const migrationsDir = path.join(__dirname, 'migrations'); if (!fs.existsSync(migrationsDir)) return; const files = fs.readdirSync(migrationsDir).sort(); for (const file of files) { if (!file.endsWith('.sql')) continue; const { rows } = await client.query( 'SELECT id FROM schema_migrations WHERE filename = $1', [file] ); if (rows.length > 0) continue; const sql = fs.readFileSync(path.join(migrationsDir, file), 'utf8'); await client.query(sql); await client.query( 'INSERT INTO schema_migrations (filename) VALUES ($1)', [file] ); console.log(`✅ Migration applied: ${file}`); } } function getPool() { return pool; } module.exports = { connectDB, getPool };