import 'dotenv/config' import { readFileSync, readdirSync } from 'fs' import { join, dirname } from 'path' import { fileURLToPath } from 'url' import pg from 'pg' const __dirname = dirname(fileURLToPath(import.meta.url)) const pool = new pg.Pool({ host: process.env.DB_HOST || 'localhost', port: parseInt(process.env.DB_PORT || '5432'), database: process.env.DB_NAME || 'guidly', user: process.env.DB_USER || 'guidly', password: process.env.DB_PASSWORD || '', ssl: process.env.DATABASE_SSL === 'false' ? false : undefined, }) async function migrate() { console.log('Running migrations...') // Create migrations tracking table await pool.query(` CREATE TABLE IF NOT EXISTS _migrations ( id SERIAL PRIMARY KEY, name VARCHAR(255) NOT NULL UNIQUE, executed_at TIMESTAMPTZ NOT NULL DEFAULT NOW() ) `) // Get already executed migrations const { rows: executed } = await pool.query('SELECT name FROM _migrations ORDER BY name') const executedNames = new Set(executed.map((r: { name: string }) => r.name)) // Read migration files const migrationsDir = join(__dirname, '..', 'db', 'migrations') const files = readdirSync(migrationsDir) .filter(f => f.endsWith('.sql')) .sort() for (const file of files) { if (executedNames.has(file)) { console.log(` [skip] ${file} (already executed)`) continue } const sql = readFileSync(join(migrationsDir, file), 'utf-8') console.log(` [run] ${file}`) try { await pool.query(sql) await pool.query('INSERT INTO _migrations (name) VALUES ($1)', [file]) console.log(` [ok] ${file}`) } catch (error: any) { console.error(` [fail] ${file}:`, error.message) process.exit(1) } } console.log('Migrations complete!') await pool.end() } migrate().catch((error) => { console.error('Migration failed:', error) process.exit(1) })