Files
shokuninmarche/scripts/migrate.js
airewit-developer c8909befb1 feat: Foundation — auth system, 9 migrations, React frontend
Backend:
- Express server with JWT httpOnly cookie auth
- POST /api/auth/register, /api/auth/login, /api/auth/logout, GET /api/auth/me
- bcrypt 12 rounds, generic 401 errors (no email/password field disclosure)
- Auth middleware protects all /api/* routes except register/login
- pg Pool database connection

Frontend (React + Vite + TailwindCSS + shadcn/ui):
- AuthContext with session restore on page load via /api/auth/me
- ProtectedRoute redirects unauthenticated users to /login
- LoginPage, RegisterPage — Hebrew RTL layout (dir=rtl), inline validation
- DashboardPage placeholder
- shadcn/ui components: Button, Input, Label, Card

Database:
- 9 migrations (001-009): extensions, users, events, vendors, guests,
  bookings, invitations, vendor_ratings, organizer_preferences
- pg_trgm for fuzzy Hebrew search, GIN indexes on style_tags
- Phase 2+3 fields included: source, payment_status, contract_value,
  vendor ratings 6-dimension, organizer preferences
- Idempotent migration runner with schema_migrations tracking table

Infrastructure:
- Dockerfile (multi-stage: build React → production node:20-alpine)
- docker-compose.yml with PostgreSQL healthcheck, expose not ports
- Migrations run automatically on container start

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-02-21 18:22:42 +00:00

52 lines
1.5 KiB
JavaScript

require('dotenv').config();
const fs = require('fs');
const path = require('path');
const { Pool } = require('pg');
const pool = new Pool({ connectionString: process.env.DATABASE_URL });
async function migrate() {
const migrationsDir = path.join(__dirname, '..', 'migrations');
const files = fs.readdirSync(migrationsDir).filter(f => f.endsWith('.sql')).sort();
// Ensure migrations tracking table exists
await pool.query(`
CREATE TABLE IF NOT EXISTS schema_migrations (
filename VARCHAR(255) PRIMARY KEY,
applied_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
)
`);
for (const file of files) {
const { rows } = await pool.query(
'SELECT filename FROM schema_migrations WHERE filename = $1',
[file]
);
if (rows.length > 0) {
console.log(` ✓ already applied: ${file}`);
continue;
}
const sql = fs.readFileSync(path.join(migrationsDir, file), 'utf8');
// Extract and run only the UP section (before the -- DOWN comment)
const upSection = sql.split('-- DOWN')[0].trim();
try {
await pool.query(upSection);
await pool.query('INSERT INTO schema_migrations (filename) VALUES ($1)', [file]);
console.log(` ✅ applied: ${file}`);
} catch (err) {
console.error(` ❌ failed: ${file}`, err.message);
process.exit(1);
}
}
console.log('Migrations complete.');
await pool.end();
}
migrate().catch(err => {
console.error('Migration error:', err);
process.exit(1);
});