Skip to content

Instantly share code, notes, and snippets.

@niquola
Created March 1, 2026 12:48
Show Gist options
  • Select an option

  • Save niquola/d87d285e914c94ee31d995282bf9f7c5 to your computer and use it in GitHub Desktop.

Select an option

Save niquola/d87d285e914c94ee31d995282bf9f7c5 to your computer and use it in GitHub Desktop.
PostgreSQL database tool — migrations, CRUD, type generation, psql wrapper. Single file, zero deps, Bun runtime.
// PostgreSQL database tool — migrations & SQL CLI, single file, zero dependencies, Bun runtime.
//
// Env:
// DATABASE_URL (required) — PostgreSQL connection string.
// Loaded automatically by Bun from .env file in project root.
// Example .env:
// DATABASE_URL=postgres://user:pass@localhost:5432/mydb
//
// CLI (default = dev, --test = test database):
// bun src/db.ts [--test] up [count] — apply pending migrations
// bun src/db.ts [--test] down [count] — roll back recent (default: 1)
// bun src/db.ts [--test] status — show applied/pending
// bun src/db.ts [--test] sql "<query>" — execute arbitrary SQL, print results as JSON
// bun src/db.ts [--test] types [table…] — generate TS types from table metadata
// bun src/db.ts [--test] psql [psql-args…] — open psql with connection pre-configured
// bun src/db.ts create-migration <name> — create .up.sql + .down.sql templates
//
// Library:
// import { connect, create, save, remove } from "./src/db"
// const db = connect("dev") — connect to dev database (DATABASE_URL)
// const db = connect("test") — connect to test database (<db>_test)
//
// create(db, table, obj) — INSERT, returns the new row.
// save(db, table, obj) — upsert: INSERT or ON CONFLICT (id) DO UPDATE. Returns the row.
// remove(db, table, id) — DELETE by id, returns the deleted row or null.
// generateTypes(db, table) — returns TS type string from PG metadata. Defaults/nullable = optional.
// up(db, count?) — apply pending migrations
// down(db, count?) — roll back recent migrations
// status(db) — list applied/pending migrations
// ensureTestDb() — creates <db>_test database if it doesn't exist.
// createMigration(name) — create migration .up.sql + .down.sql templates
//
// Inline usage with bun -e:
// bun -e "$(cat <<'EOF'
// import { connect, create } from "./src/db"
// const db = connect()
// console.log(await create(db, "users", { name: "alice" }))
// db.close()
// EOF
// )"
//
// Tests (in same file):
// bun test ./src/db.ts
//
// Migration format:
// migrations/YYYYMMDDHHMMSS_name.up.sql
// migrations/YYYYMMDDHHMMSS_name.down.sql
// Both files required. Sorted lexicographically. Plain SQL only.
//
// Design decisions:
// - Paired SQL files over single-file markers: just readFile, no parsing.
// - Both up+down required: forces rollback planning at write time.
// - pg_try_advisory_lock (fail-fast): no silent waits, clear error on contention.
// - One transaction per migration: rollback on failure, never records failed migration.
// - Checksum (wyhash) on .up.sql content: detects drift after apply.
// - Integrity check before every command: applied migrations must exist on disk
// with matching checksums.
import { sql as defaultDb, SQL } from "bun";
import { mkdir } from "node:fs/promises";
import { join, basename } from "node:path";
const LOCK_ID = 791730412;
const DIR = join(process.cwd(), "migrations");
const RE = /^(\d{14})_(.+)\.(up|down)\.sql$/;
type Migration = { version: string; name: string; upPath: string; downPath: string; checksum: string };
type Applied = { version: string; checksum: string; applied_at: Date };
function hash(data: Uint8Array): string {
return Bun.hash(data).toString(16);
}
async function discoverMigrations(): Promise<Migration[]> {
const groups = new Map<string, { version: string; name: string; up?: string; down?: string }>();
for (const f of new Bun.Glob("*.sql").scanSync(DIR)) {
const m = RE.exec(f);
if (!m) throw new Error(`Invalid migration filename: ${f}`);
const [, version, name, dir] = m;
const key = `${version}_${name}`;
const g = groups.get(key) ?? groups.set(key, { version, name }).get(key)!;
g[dir as "up" | "down"] = join(DIR, f);
}
const result: Migration[] = [];
for (const [key, g] of [...groups].sort((a, b) => a[0].localeCompare(b[0]))) {
if (!g.up || !g.down) throw new Error(`Both .up.sql and .down.sql required for ${key}`);
const bytes = await Bun.file(g.up).bytes();
result.push({ version: g.version, name: g.name, upPath: g.up, downPath: g.down, checksum: hash(bytes) });
}
return result;
}
function validate(applied: Applied[], fileMap: Map<string, Migration>): void {
for (const entry of applied) {
const f = fileMap.get(entry.version);
if (!f) throw new Error(`Applied migration ${entry.version} missing from disk`);
if (entry.checksum !== f.checksum) throw new Error(`Checksum mismatch for ${entry.version}_${f.name}`);
}
}
async function withContext<T>(db: SQL, fn: (files: Migration[], applied: Applied[], fileMap: Map<string, Migration>) => Promise<T>): Promise<T> {
await db`CREATE TABLE IF NOT EXISTS schema_migrations (
version TEXT PRIMARY KEY, checksum TEXT NOT NULL, applied_at TIMESTAMPTZ NOT NULL DEFAULT now())`;
const [{ locked }] = await db`SELECT pg_try_advisory_lock(${LOCK_ID}) as locked`;
if (!locked) throw new Error("Another migration is running");
try {
const files = await discoverMigrations();
const applied: Applied[] = await db`SELECT version, checksum, applied_at FROM schema_migrations ORDER BY version` as any;
const fileMap = new Map(files.map(f => [f.version, f]));
validate(applied, fileMap);
return await fn(files, applied, fileMap);
} finally {
await db`SELECT pg_advisory_unlock(${LOCK_ID})`;
}
}
function slugify(name: string): string {
return name.trim().toLowerCase().replace(/[^a-z0-9]+/g, "_").replace(/^_+|_+$/g, "");
}
async function createMigration(name: string): Promise<string> {
const slug = slugify(name);
if (!slug) throw new Error("Migration name must contain at least one alphanumeric character");
await mkdir(DIR, { recursive: true });
const version = new Date().toISOString().replace(/[-T:]/g, "").slice(0, 14);
const base = `${version}_${slug}`;
for (const ext of [".up.sql", ".down.sql"]) {
const path = join(DIR, base + ext);
if (await Bun.file(path).exists()) throw new Error(`Already exists: ${base}${ext}`);
await Bun.write(path, `-- ${slug} ${ext.slice(1, -4)}\n`);
console.log(`Created ${base}${ext}`);
}
return base;
}
async function up(db: SQL, count?: number): Promise<string[]> {
return withContext(db, async (files, applied) => {
const appliedSet = new Set(applied.map(a => a.version));
let pending = files.filter(f => !appliedSet.has(f.version));
if (count !== undefined) pending = pending.slice(0, count);
if (!pending.length) { console.log("Database is up to date."); return []; }
for (const m of pending) {
await db.begin(async (tx) => {
await tx.file(m.upPath);
await tx`INSERT INTO schema_migrations (version, checksum) VALUES (${m.version}, ${m.checksum})`;
});
console.log(`Applied ${m.version}_${m.name}`);
}
console.log(`\n${pending.length} migration(s) applied.`);
return pending.map(m => m.version);
});
}
async function down(db: SQL, count = 1): Promise<string[]> {
return withContext(db, async (_, applied, fileMap) => {
const toRollback = applied.reverse().slice(0, count);
if (!toRollback.length) { console.log("Nothing to roll back."); return []; }
for (const entry of toRollback) {
const m = fileMap.get(entry.version)!;
await db.begin(async (tx) => {
await tx.file(m.downPath);
await tx`DELETE FROM schema_migrations WHERE version = ${entry.version}`;
});
console.log(`Rolled back ${m.version}_${m.name}`);
}
console.log(`\n${toRollback.length} migration(s) rolled back.`);
return toRollback.map(e => e.version);
});
}
type StatusResult = { applied: string[]; pending: string[] };
async function status(db: SQL): Promise<StatusResult> {
return withContext(db, async (files, applied) => {
const appliedSet = new Set(applied.map(a => a.version));
console.log("Migration Status\n");
for (const f of files) {
const a = applied.find(a => a.version === f.version);
const date = a ? ` applied ${a.applied_at.toISOString().replace("T", " ").slice(0, 19)}` : "";
console.log(` [${appliedSet.has(f.version) ? "x" : " "}] ${f.version}_${f.name}${date}`);
}
if (!files.length) console.log(" No migrations found.");
return {
applied: applied.map(a => a.version),
pending: files.filter(f => !appliedSet.has(f.version)).map(f => f.version),
};
});
}
async function save(db: SQL, table: string, obj: Record<string, unknown>): Promise<Record<string, unknown>> {
const keys = Object.keys(obj);
if (!keys.length) throw new Error("save: object must have at least one key");
const cols = keys.map(k => `"${k}"`).join(", ");
const vals = keys.map((_, i) => `$${i + 1}`).join(", ");
const set = keys.filter(k => k !== "id").map(k => `"${k}" = EXCLUDED."${k}"`).join(", ");
const values = keys.map(k => obj[k]);
const query = set
? `INSERT INTO "${table}" (${cols}) VALUES (${vals}) ON CONFLICT (id) DO UPDATE SET ${set} RETURNING *`
: `INSERT INTO "${table}" (${cols}) VALUES (${vals}) RETURNING *`;
const [row] = await db.unsafe(query, values);
return row;
}
async function create(db: SQL, table: string, obj: Record<string, unknown>): Promise<Record<string, unknown>> {
const keys = Object.keys(obj);
if (!keys.length) throw new Error("create: object must have at least one key");
const cols = keys.map(k => `"${k}"`).join(", ");
const vals = keys.map((_, i) => `$${i + 1}`).join(", ");
const values = keys.map(k => obj[k]);
const [row] = await db.unsafe(`INSERT INTO "${table}" (${cols}) VALUES (${vals}) RETURNING *`, values);
return row;
}
async function remove(db: SQL, table: string, id: unknown): Promise<Record<string, unknown> | null> {
const [row] = await db.unsafe(`DELETE FROM "${table}" WHERE id = $1 RETURNING *`, [id]);
return row ?? null;
}
function pgToTs(pgType: string): string {
switch (pgType) {
case "integer": case "smallint": case "real": case "double precision": return "number";
case "bigint": case "numeric": case "money": return "string";
case "boolean": return "boolean";
case "text": case "character varying": case "character": case "uuid": return "string";
case "timestamp without time zone": case "timestamp with time zone": case "date": return "Date";
case "json": case "jsonb": return "unknown";
case "bytea": return "Uint8Array";
case "ARRAY": return "unknown[]";
default: return "unknown";
}
}
function toPascalCase(name: string): string {
return name.split(/[_-]/).map(w => w.charAt(0).toUpperCase() + w.slice(1)).join("");
}
async function generateTypes(db: SQL, table: string): Promise<string> {
const cols = await db.unsafe(`
SELECT c.column_name as name, c.data_type as type, c.is_nullable as nullable,
c.column_default as default_value, col_description(cls.oid, c.ordinal_position) as comment
FROM information_schema.columns c
JOIN pg_class cls ON cls.relname = c.table_name
JOIN pg_namespace ns ON ns.oid = cls.relnamespace AND ns.nspname = c.table_schema
WHERE c.table_schema = 'public' AND c.table_name = $1
ORDER BY c.ordinal_position`, [table]);
if (!cols.length) throw new Error(`Table "${table}" not found`);
const typeName = toPascalCase(table);
const fields = cols.map((c: any) => {
const opt = c.nullable === "YES" || c.default_value !== null ? "?" : "";
const tsType = (c.type === "json" || c.type === "jsonb") && c.comment ? c.comment : pgToTs(c.type);
return ` ${c.name}${opt}: ${tsType};`;
});
return [`export type ${typeName} = {`, ...fields, "};", ""].join("\n");
}
type Env = "dev" | "test";
function parseDbUrl(): { base: string; name: string } {
const u = process.env.DATABASE_URL || "";
const i = u.lastIndexOf("/");
if (i === -1) throw new Error("Cannot parse DATABASE_URL");
return { base: u.slice(0, i), name: u.slice(i + 1) };
}
function connect(env: Env = "dev"): SQL {
const { base, name } = parseDbUrl();
const dbName = env === "test" ? name + "_test" : name;
return new SQL({ url: `${base}/${dbName}` });
}
async function ensureTestDb(): Promise<void> {
const dev = connect("dev");
const { name } = parseDbUrl();
try { await dev.unsafe(`CREATE DATABASE "${name}_test"`); } catch {}
dev.close();
}
export { createMigration, up, down, status, save, create, remove, generateTypes, connect, ensureTestDb, type Env, type StatusResult };
// --- CLI (bun src/migrate.ts <command>) ---
if (import.meta.main && process.env.NODE_ENV !== "test") {
const args = process.argv.slice(2);
const env: Env = args.includes("--test") ? "test" : "dev";
const rest = args.filter(a => a !== "--test");
const [command, ...cmdArgs] = rest;
if (env === "test") await ensureTestDb();
const db = connect(env);
function parseCount(arg?: string): number | undefined {
if (!arg) return undefined;
const n = parseInt(arg, 10);
if (isNaN(n) || n < 1) throw new Error(`Invalid count: ${arg}`);
return n;
}
try {
switch (command) {
case "create-migration":
if (!cmdArgs[0]) { console.error("Usage: bun src/db.ts create-migration <name>"); process.exit(1); }
await createMigration(cmdArgs[0]); break;
case "up": await up(db, parseCount(cmdArgs[0])); break;
case "down": await down(db, parseCount(cmdArgs[0]) ?? 1); break;
case "status": await status(db); break;
case "sql": {
const query = cmdArgs.join(" ");
if (!query) { console.error("Usage: bun src/db.ts sql <query>"); process.exit(1); }
const rows = await db.unsafe(query);
if (!rows.length) { console.log("No rows returned."); break; }
console.log(JSON.stringify(rows, null, 2));
break;
}
case "psql": {
const url = new URL(process.env.DATABASE_URL || "");
const dbName = env === "test" ? url.pathname.slice(1) + "_test" : url.pathname.slice(1);
const psqlBin = Bun.which("psql") ?? "/opt/homebrew/opt/libpq/bin/psql";
const proc = Bun.spawn([psqlBin, "-h", url.hostname, "-p", url.port || "5432", "-U", url.username, dbName, ...cmdArgs], {
stdin: "inherit", stdout: "inherit", stderr: "inherit",
env: { ...process.env, PGPASSWORD: url.password },
});
process.exit(await proc.exited);
}
case "types": {
const tables = [...cmdArgs];
if (!tables.length) {
const all = await db.unsafe(`SELECT table_name FROM information_schema.tables
WHERE table_schema = 'public' AND table_type = 'BASE TABLE'
AND table_name != 'schema_migrations' ORDER BY table_name`);
tables.push(...all.map((r: any) => r.table_name));
}
if (!tables.length) { console.log("No tables found."); break; }
for (const t of tables) console.log(await generateTypes(db, t));
break;
}
default:
console.log(`Usage: bun src/db.ts [--test] <command>\n\nCommands:\n create-migration <name> Create .up.sql + .down.sql templates\n up [count] Apply pending migrations\n down [count] Roll back recent migrations (default: 1)\n status Show migration status\n sql "<query>" Execute arbitrary SQL, print results as JSON\n types [table ...] Generate TypeScript types from table metadata\n psql [args ...] Open psql with connection pre-configured\n\nFlags:\n --test Run against test database (<db>_test)`);
process.exit(command ? 1 : 0);
}
} catch (err) {
console.error(err instanceof Error ? err.message : String(err));
process.exit(1);
} finally {
db.close();
}
}
// --- Tests (bun test ./src/db.ts) ---
if (process.env.NODE_ENV === "test") {
const { test, expect, beforeAll, beforeEach, afterAll } = await import("bun:test");
const { mkdir, rm, writeFile, mkdtemp } = await import("node:fs/promises");
const { tmpdir } = await import("node:os");
const SELF = import.meta.path;
const tempDirs: string[] = [];
let db: SQL;
async function cli(cwd: string, ...args: string[]): Promise<{ stdout: string; stderr: string; exitCode: number }> {
const proc = Bun.spawn(["bun", SELF, "--test", ...args], {
cwd,
env: { ...process.env, NODE_ENV: "" },
stdout: "pipe",
stderr: "pipe",
});
const [stdout, stderr] = await Promise.all([
new Response(proc.stdout).text(),
new Response(proc.stderr).text(),
]);
return { stdout, stderr, exitCode: await proc.exited };
}
async function resetDb(): Promise<void> {
await db.unsafe(`DROP SCHEMA IF EXISTS public CASCADE; CREATE SCHEMA public`);
}
async function workspace(): Promise<string> {
const dir = await mkdtemp(join(tmpdir(), "migrate-test-"));
tempDirs.push(dir);
await mkdir(join(dir, "migrations"), { recursive: true });
return dir;
}
async function writeMigration(dir: string, version: string, name: string, upSql: string, downSql: string): Promise<void> {
await writeFile(join(dir, "migrations", `${version}_${name}.up.sql`), upSql);
await writeFile(join(dir, "migrations", `${version}_${name}.down.sql`), downSql);
}
beforeAll(async () => {
await ensureTestDb();
db = connect("test");
});
beforeEach(async () => { await resetDb(); });
afterAll(async () => {
db.close();
await Promise.all(tempDirs.map(d => rm(d, { recursive: true, force: true })));
});
test("migration lifecycle: up → status → idempotent up → down", async () => {
const dir = await workspace();
await writeMigration(dir, "20260101000000", "create_users",
"CREATE TABLE users (id SERIAL PRIMARY KEY, name TEXT NOT NULL);", "DROP TABLE users;");
await writeMigration(dir, "20260101000100", "create_posts",
"CREATE TABLE posts (id SERIAL PRIMARY KEY, user_id INT REFERENCES users(id), title TEXT);", "DROP TABLE posts;");
// apply both migrations
const up1 = await cli(dir, "up");
expect(up1.exitCode).toBe(0);
expect(up1.stdout).toContain("20260101000000_create_users");
expect(up1.stdout).toContain("20260101000100_create_posts");
// status shows both applied
const st = await cli(dir, "status");
expect(st.stdout).toContain("[x] 20260101000000_create_users");
expect(st.stdout).toContain("[x] 20260101000100_create_posts");
// generate types from migrated tables
const usersType = await generateTypes(db, "users");
expect(usersType).toContain("export type Users = {");
expect(usersType).toContain(" name: string;"); // NOT NULL, no default → required
expect(usersType).toContain(" id?: number;"); // has default → optional
const postsType = await generateTypes(db, "posts");
expect(postsType).toContain("export type Posts = {");
expect(postsType).toContain(" title?: string;"); // nullable → optional
// second up is idempotent
const up2 = await cli(dir, "up");
expect(up2.exitCode).toBe(0);
expect(up2.stdout).toContain("up to date");
// roll back last migration only
const dn = await cli(dir, "down");
expect(dn.exitCode).toBe(0);
expect(dn.stdout).toContain("20260101000100_create_posts");
// posts gone, users still there
const [{ exists: postsExist }] = await db`SELECT to_regclass('public.posts') IS NOT NULL as exists`;
expect(postsExist).toBe(false);
const [{ exists: usersExist }] = await db`SELECT to_regclass('public.users') IS NOT NULL as exists`;
expect(usersExist).toBe(true);
});
test("migration integrity: checksum drift and missing files", async () => {
const dir = await workspace();
await writeMigration(dir, "20260101000000", "create_users",
"CREATE TABLE users (id SERIAL PRIMARY KEY, name TEXT NOT NULL);", "DROP TABLE users;");
await cli(dir, "up");
// modify applied migration file → checksum drift
await writeFile(join(dir, "migrations", "20260101000000_create_users.up.sql"),
"CREATE TABLE users (id SERIAL PRIMARY KEY, name TEXT, email TEXT);");
const drift = await cli(dir, "up");
expect(drift.exitCode).toBe(1);
expect(drift.stderr).toContain("Checksum mismatch");
// delete migration files → missing from disk
await rm(join(dir, "migrations", "20260101000000_create_users.up.sql"));
await rm(join(dir, "migrations", "20260101000000_create_users.down.sql"));
const missing = await cli(dir, "status");
expect(missing.exitCode).toBe(1);
expect(missing.stderr).toContain("missing from disk");
});
test("migration lock contention", async () => {
const dir = await workspace();
await writeMigration(dir, "20260101000000", "create_users",
"CREATE TABLE users (id SERIAL PRIMARY KEY, name TEXT NOT NULL);", "DROP TABLE users;");
await db`SELECT pg_advisory_lock(${LOCK_ID})`;
try {
const result = await cli(dir, "up");
expect(result.exitCode).toBe(1);
expect(result.stderr).toContain("Another migration is running");
} finally {
await db`SELECT pg_advisory_unlock(${LOCK_ID})`;
}
});
test("CRUD lifecycle: create → save/upsert → remove", async () => {
await db.unsafe("CREATE TABLE items (id SERIAL PRIMARY KEY, name TEXT NOT NULL, score INT)");
// create inserts new rows
const alice = await create(db, "items", { name: "alice", score: 10 });
expect(alice.id).toBe(1);
expect(alice.name).toBe("alice");
const bob = await create(db, "items", { name: "bob", score: 42 });
expect(bob.id).toBe(2);
// create rejects duplicate id
expect(create(db, "items", { id: 1, name: "dup" })).rejects.toThrow();
// save upserts — update alice's score
const updated = await save(db, "items", { id: alice.id, name: "alice", score: 99 });
expect(updated.id).toBe(alice.id);
expect(updated.score).toBe(99);
const [{ count }] = await db`SELECT count(*)::int as count FROM items`;
expect(count).toBe(2); // still 2 rows
// save also works as insert (without id)
const carol = await save(db, "items", { name: "carol", score: 77 });
expect(carol.id).toBe(3);
// generate types mid-flow — reflects actual table schema
const ts = await generateTypes(db, "items");
expect(ts).toContain("export type Items = {");
expect(ts).toContain(" name: string;"); // NOT NULL → required
expect(ts).toContain(" score?: number;"); // nullable → optional
expect(ts).toContain(" id?: number;"); // has default → optional
expect(generateTypes(db, "nonexistent")).rejects.toThrow("not found");
// remove returns deleted row
const deleted = await remove(db, "items", bob.id);
expect(deleted!.name).toBe("bob");
// remove returns null for missing id
expect(await remove(db, "items", 999)).toBeNull();
// verify final state
const rows = await db`SELECT name FROM items ORDER BY id`;
expect(rows.map((r: any) => r.name)).toEqual(["alice", "carol"]);
});
test("CRUD rejects empty objects", async () => {
expect(create(db, "x", {})).rejects.toThrow("at least one key");
expect(save(db, "x", {})).rejects.toThrow("at least one key");
});
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment