|
// ./supabase/split-schema.ts |
|
|
|
import * as TOML from '@iarna/toml' |
|
import fs from 'fs' |
|
import { Parser } from 'node-sql-parser' |
|
|
|
interface MigrationsConfig { |
|
schema_paths: string[] |
|
} |
|
|
|
interface DatabaseConfig { |
|
migrations: MigrationsConfig |
|
} |
|
|
|
interface TomlConfig { |
|
db: DatabaseConfig |
|
} |
|
|
|
// Track tables and functions that have been assigned to files |
|
const assignedTables = new Set<string>() |
|
const assignedFunctions = new Set<string>() |
|
|
|
// Function to sanitize table name for file name |
|
function sanitizeTableName(name: string): string { |
|
return name.toLowerCase().replace(/[^a-z0-9_]/g, '_') |
|
} |
|
|
|
function getSupabaseDir(): string { |
|
// Get directory from command line argument |
|
const dirArg = process.argv[2] |
|
|
|
// Remove trailing slash if present |
|
return dirArg ? dirArg.replace(/\/$/, '') : './supabase' |
|
} |
|
|
|
// Replace the constant with the function |
|
const supabaseDir = getSupabaseDir() |
|
|
|
// Function to read config.toml and extract schema_paths |
|
function readConfigPaths(): string[] { |
|
const configContent = fs.readFileSync(`${supabaseDir}/config.toml`, 'utf8') |
|
const parsedConfig = TOML.parse(configContent) |
|
const config = parsedConfig as unknown as TomlConfig |
|
return config.db?.migrations?.schema_paths || [] |
|
} |
|
|
|
// Function to get function name from CREATE or ALTER statement |
|
function extractFunctionNameFromSql(sql: string): string | null { |
|
const createMatch = sql.match( |
|
/CREATE\s+(?:OR\s+REPLACE\s+)?FUNCTION\s+"public"\."([^"]+)"/i, |
|
) |
|
const alterMatch = sql.match(/ALTER\s+FUNCTION\s+"public"\."([^"]+)"/i) |
|
return (createMatch || alterMatch)?.[1] || null |
|
} |
|
|
|
// Extract table name from function name if possible |
|
function extractTableFromFunctionName(functionName: string): string | null { |
|
if (!functionName) return null |
|
|
|
// Common patterns like update_tablename_field or get_tablename_data |
|
const matches = functionName.match( |
|
/(?:update|get|create|delete|insert|select|handle|process)_([a-z_]+)(?:_|$)/, |
|
) |
|
if (matches && matches[1]) { |
|
return matches[1] |
|
} |
|
|
|
return null |
|
} |
|
|
|
// Function to extract table name from a statement that couldn't be parsed |
|
function extractTableNameFromRawStatement(sql: string): string | null { |
|
// Helper to extract table name from schema-qualified name |
|
function extractTableFromQualified(name: string): string { |
|
// Handle quoted identifiers like "public"."table_name" or unquoted like public.table_name |
|
const match = name.match(/(?:[\w_]+|"[\w_]+")\s*\.\s*(?:[\w_]+|"([\w_]+)")/) |
|
if (match) { |
|
// If we captured a quoted identifier, use that, otherwise use the full match |
|
const tablePart = match[1] || match[0].split('.')[1] |
|
return tablePart.replace(/"/g, '').trim() |
|
} |
|
return name.replace(/"/g, '').trim() |
|
} |
|
|
|
// Handle ALTER TABLE ONLY statements |
|
const alterTableOnlyMatch = sql.match( |
|
/ALTER\s+TABLE\s+ONLY\s+(?:(?:[\w_]+|"[\w_]+")\s*\.\s*)?(?:[\w_]+|"([\w_]+)")/i, |
|
) |
|
if (alterTableOnlyMatch) { |
|
const tableName = |
|
alterTableOnlyMatch[1] || |
|
alterTableOnlyMatch[0].split(/ALTER\s+TABLE\s+ONLY\s+(?:public\.)?/i)[1] |
|
return extractTableFromQualified(tableName) |
|
} |
|
|
|
// Handle ALTER PUBLICATION statements with ADD TABLE ONLY |
|
const alterPublicationOnlyMatch = sql.match( |
|
/ALTER\s+PUBLICATION\s+.*?\s+ADD\s+TABLE\s+ONLY\s+(?:(?:[\w_]+|"[\w_]+")\s*\.\s*)?(?:[\w_]+|"([\w_]+)")/i, |
|
) |
|
if (alterPublicationOnlyMatch) { |
|
const tableName = |
|
alterPublicationOnlyMatch[1] || |
|
alterPublicationOnlyMatch[0].split(/\s+ONLY\s+(?:public\.)?/i)[1] |
|
return extractTableFromQualified(tableName) |
|
} |
|
|
|
// Handle CREATE INDEX statements |
|
const createIndexMatch = sql.match( |
|
/CREATE\s+(?:UNIQUE\s+)?INDEX\s+(?:IF\s+NOT\s+EXISTS\s+)?[\w_"]+\s+ON\s+(?:(?:[\w_]+|"[\w_]+")\s*\.\s*)?(?:[\w_]+|"([\w_]+)")/i, |
|
) |
|
if (createIndexMatch) { |
|
const tableName = |
|
createIndexMatch[1] || |
|
createIndexMatch[0].split(/\s+ON\s+(?:public\.)?/i)[1] |
|
return extractTableFromQualified(tableName) |
|
} |
|
|
|
// Handle CREATE TRIGGER statements |
|
const createTriggerMatch = sql.match( |
|
/CREATE\s+(?:OR\s+REPLACE\s+)?TRIGGER\s+(?:IF\s+NOT\s+EXISTS\s+)?[\w_"]+\s+(?:BEFORE|AFTER|INSTEAD\s+OF)\s+(?:INSERT|UPDATE|DELETE|TRUNCATE)\s+ON\s+(?:(?:[\w_]+|"[\w_]+")\s*\.\s*)?(?:[\w_]+|"([\w_]+)")/i, |
|
) |
|
if (createTriggerMatch) { |
|
const tableName = |
|
createTriggerMatch[1] || |
|
createTriggerMatch[0].split(/\s+ON\s+(?:public\.)?/i)[1] |
|
return extractTableFromQualified(tableName) |
|
} |
|
|
|
// Handle CREATE TABLE statements |
|
const createTableMatch = sql.match( |
|
/CREATE\s+TABLE\s+(?:IF\s+NOT\s+EXISTS\s+)?(?:(?:[\w_]+|"[\w_]+")\s*\.\s*)?(?:[\w_]+|"([\w_]+)")/i, |
|
) |
|
if (createTableMatch) { |
|
const tableName = |
|
createTableMatch[1] || |
|
createTableMatch[0].split( |
|
/CREATE\s+TABLE\s+(?:IF\s+NOT\s+EXISTS\s+)?/i, |
|
)[1] |
|
return extractTableFromQualified(tableName) |
|
} |
|
|
|
// Handle CREATE POLICY statements |
|
const policyMatch = sql.match( |
|
/CREATE\s+POLICY\s+.*\s+ON\s+(?:(?:[\w_]+|"[\w_]+")\s*\.\s*)?(?:[\w_]+|"([\w_]+)")/i, |
|
) |
|
if (policyMatch) { |
|
const tableName = policyMatch[1] || policyMatch[0].split(/\s+ON\s+/i)[1] |
|
return extractTableFromQualified(tableName) |
|
} |
|
|
|
// Handle GRANT statements on tables |
|
const grantTableMatch = sql.match( |
|
/GRANT\s+.*\s+ON\s+(?:TABLE\s+)?(?:(?:[\w_]+|"[\w_]+")\s*\.\s*)?(?:[\w_]+|"([\w_]+)")/i, |
|
) |
|
if (grantTableMatch) { |
|
const tableName = |
|
grantTableMatch[1] || |
|
grantTableMatch[0].split(/\s+ON\s+(?:TABLE\s+)?/i)[1] |
|
return extractTableFromQualified(tableName) |
|
} |
|
|
|
// Handle ALTER statements |
|
const alterMatch = sql.match( |
|
/ALTER\s+TABLE\s+(?:(?:[\w_]+|"[\w_]+")\s*\.\s*)?(?:[\w_]+|"([\w_]+)")/i, |
|
) |
|
if (alterMatch) { |
|
const tableName = |
|
alterMatch[1] || alterMatch[0].split(/ALTER\s+TABLE\s+/i)[1] |
|
return extractTableFromQualified(tableName) |
|
} |
|
|
|
// Handle COMMENT ON statements |
|
const commentMatch = sql.match( |
|
/COMMENT\s+ON\s+(?:COLUMN\s+)?(?:(?:[\w_]+|"[\w_]+")\s*\.\s*)?(?:[\w_]+|"([\w_]+)")/i, |
|
) |
|
if (commentMatch) { |
|
const tableName = |
|
commentMatch[1] || |
|
commentMatch[0].split(/COMMENT\s+ON\s+(?:COLUMN\s+)?/i)[1] |
|
return extractTableFromQualified(tableName) |
|
} |
|
|
|
// Handle ALTER PUBLICATION statements |
|
const alterPublicationMatch = sql.match( |
|
/ALTER\s+PUBLICATION\s+.*?\s+(?:ADD|SET)\s+TABLE\s+(?:(?:[\w_]+|"[\w_]+")\s*\.\s*)?(?:[\w_]+|"([\w_]+)")/i, |
|
) |
|
if (alterPublicationMatch) { |
|
const tableName = |
|
alterPublicationMatch[1] || |
|
alterPublicationMatch[0].split(/\s+TABLE\s+/i)[1] |
|
return extractTableFromQualified(tableName) |
|
} |
|
|
|
return null |
|
} |
|
|
|
// Process schema paths into pattern matching rules |
|
function processSchemaPatterns(schemaPaths: string[]): { |
|
tablePatterns: { |
|
pattern: RegExp |
|
file: string |
|
order: number |
|
name: string |
|
}[] |
|
functionPatterns: { pattern: RegExp; file: string }[] |
|
} { |
|
const tablePatterns: { |
|
pattern: RegExp |
|
file: string |
|
order: number |
|
name: string |
|
}[] = [] |
|
const functionPatterns: { pattern: RegExp; file: string }[] = [] |
|
|
|
// Start numbering for config-specified tables from 002 |
|
let fileIndex = 2 |
|
let functionIndex = 102 // Start specified functions from 102 (since 101 is for unspecified) |
|
|
|
// First pass: identify utility functions (they should be defined first) |
|
const utilityFunctionNames = new Set( |
|
schemaPaths |
|
.map((path) => { |
|
const match = path.match(/\*?_fn_([a-z_]+)\.sql$/i) |
|
return match ? match[1] : null |
|
}) |
|
.filter(Boolean), |
|
) |
|
|
|
// Add patterns for utility functions first (they'll be in 000_fn_*) |
|
utilityFunctionNames.forEach((name) => { |
|
functionPatterns.push({ |
|
pattern: new RegExp(`^${name}$`, 'i'), |
|
file: `${supabaseDir}/schemas/000_fn_${name}.sql`, |
|
}) |
|
}) |
|
|
|
// Second pass: handle regular patterns |
|
schemaPaths.forEach((pathPattern, index) => { |
|
// Skip function patterns as they've been handled in first pass |
|
const fnMatch = pathPattern.match(/\*?_fn_([a-z_]+)\.sql$/i) |
|
if (fnMatch) { |
|
return // Skip as these were handled in first pass |
|
} |
|
|
|
// Handle regular table patterns |
|
const match = pathPattern.match(/\*?_([a-z_]+)\.sql$/i) |
|
if (match) { |
|
const name = match[1] |
|
tablePatterns.push({ |
|
pattern: new RegExp(`^${name}$`, 'i'), |
|
file: `${supabaseDir}/schemas/${fileIndex.toString().padStart(3, '0')}_${name}.sql`, |
|
order: index, |
|
name: name, |
|
}) |
|
fileIndex++ |
|
} |
|
}) |
|
|
|
return { tablePatterns, functionPatterns } |
|
} |
|
|
|
// Function to determine which file a statement should go into |
|
function determineTargetFile( |
|
statement: any, |
|
patterns: { |
|
tablePatterns: { |
|
pattern: RegExp |
|
file: string |
|
order: number |
|
name: string |
|
}[] |
|
functionPatterns: { pattern: RegExp; file: string }[] |
|
}, |
|
rawSql: string, |
|
fileStatements: Record<string, string[]>, |
|
): string | null { |
|
// Ignore specific statements |
|
const sqlTrimmed = rawSql.trim().toUpperCase() |
|
if ( |
|
sqlTrimmed.startsWith('ALTER SCHEMA') && |
|
sqlTrimmed.includes('OWNER TO') |
|
) { |
|
console.log('[info] Ignoring schema ownership statement:', rawSql.trim()) |
|
return 'ignore' |
|
} |
|
if (sqlTrimmed.startsWith('RESET ALL')) { |
|
console.log('[info] Ignoring RESET ALL statement:', rawSql.trim()) |
|
return 'ignore' |
|
} |
|
|
|
// Handle extensions first (they need to be created before anything else) |
|
if (rawSql.trim().toUpperCase().startsWith('CREATE EXTENSION')) { |
|
return `${supabaseDir}/schemas/000_extensions.sql` |
|
} |
|
|
|
// Handle administrative commands that should go to 000_schema.sql |
|
if ( |
|
(rawSql.trim().toUpperCase().startsWith('ALTER PUBLICATION') && |
|
rawSql.includes('OWNER TO')) || |
|
// Add schema-level GRANT and COMMENT statements |
|
(rawSql.trim().toUpperCase().startsWith('GRANT') && |
|
(rawSql.includes('ON SCHEMA') || rawSql.includes('USAGE ON SCHEMA'))) || |
|
rawSql.trim().toUpperCase().startsWith('COMMENT ON SCHEMA') || |
|
// Add ALTER DEFAULT PRIVILEGES statements |
|
rawSql.trim().toUpperCase().startsWith('ALTER DEFAULT PRIVILEGES') || |
|
// Add GRANT ALL ON FUNCTION without specific function name |
|
(rawSql.trim().toUpperCase().startsWith('GRANT ALL ON FUNCTION') && |
|
!rawSql.match(/GRANT\s+ALL\s+ON\s+FUNCTION\s+"public"\."([^"]+)"\s*\(/i)) |
|
) { |
|
return `${supabaseDir}/schemas/000_schema.sql` |
|
} |
|
|
|
// Handle function definitions and alterations |
|
if ( |
|
rawSql |
|
.trim() |
|
.toUpperCase() |
|
.startsWith('CREATE OR REPLACE FUNCTION "PUBLIC".') || |
|
rawSql.trim().toUpperCase().startsWith('ALTER FUNCTION "PUBLIC".') |
|
) { |
|
const functionName = extractFunctionNameFromSql(rawSql) |
|
if (functionName) { |
|
// Check if we already have a file for this function |
|
for (const [file, _] of Object.entries(fileStatements)) { |
|
if (file.includes(`_fn_${functionName}.sql`)) { |
|
return file |
|
} |
|
} |
|
|
|
// Check if the function matches any pattern from config (including utility functions) |
|
for (const { pattern, file } of patterns.functionPatterns) { |
|
if (pattern.test(functionName)) { |
|
assignedFunctions.add(functionName) |
|
return file |
|
} |
|
} |
|
|
|
// If function hasn't been assigned to a file yet, create a new file for it with 101 prefix |
|
if (!assignedFunctions.has(functionName)) { |
|
assignedFunctions.add(functionName) |
|
const fileName = `${supabaseDir}/schemas/101_fn_${functionName}.sql` |
|
// Initialize the array for this file if it doesn't exist |
|
fileStatements[fileName] = fileStatements[fileName] || [] |
|
return fileName |
|
} |
|
} |
|
return null |
|
} |
|
|
|
// Check if it's a function grant |
|
if (rawSql.trim().toUpperCase().startsWith('GRANT ALL ON FUNCTION')) { |
|
// Extract function name from GRANT statement, ignoring parameters |
|
const functionMatch = rawSql.match( |
|
/GRANT\s+ALL\s+ON\s+FUNCTION\s+"public"\."([^"]+)"\s*\(/i, |
|
) |
|
if (functionMatch) { |
|
const functionName = functionMatch[1] |
|
|
|
// Check if we already have a file for this function |
|
for (const [file, _] of Object.entries(fileStatements)) { |
|
if (file.includes(`_fn_${functionName}.sql`)) { |
|
return file |
|
} |
|
} |
|
|
|
// If function has been assigned but not to a pattern, find its existing file |
|
const fileName = `${supabaseDir}/schemas/101_fn_${functionName}.sql` |
|
// Initialize the array for this file if it doesn't exist |
|
fileStatements[fileName] = fileStatements[fileName] || [] |
|
return fileName |
|
} |
|
// If we can't extract function name, put in schema.sql |
|
return `${supabaseDir}/schemas/000_schema.sql` |
|
} |
|
|
|
// Special handling for foreign key constraints |
|
if (rawSql.includes('FOREIGN KEY') && rawSql.includes('REFERENCES')) { |
|
// Extract the current table and referenced table |
|
const currentTableMatch = rawSql.match( |
|
/ALTER\s+TABLE\s+(?:ONLY\s+)?(?:"public"\.)?["']?([^"'\s]+)["']?/i, |
|
) |
|
const referencedTableMatch = rawSql.match( |
|
/REFERENCES\s+(?:"public"\.)?["']?([^"'\s]+)["']?/i, |
|
) |
|
|
|
if (currentTableMatch && referencedTableMatch) { |
|
const currentTable = currentTableMatch[1].replace(/['"]/g, '') |
|
const referencedTable = referencedTableMatch[1].replace(/['"]/g, '') |
|
|
|
// Find the order of both tables in config |
|
const currentTablePattern = patterns.tablePatterns.find( |
|
(p) => p.name === currentTable, |
|
) |
|
const referencedTablePattern = patterns.tablePatterns.find( |
|
(p) => p.name === referencedTable, |
|
) |
|
|
|
if (currentTablePattern && referencedTablePattern) { |
|
// If referenced table comes later in config, move this constraint to referenced table's file |
|
if (referencedTablePattern.order > currentTablePattern.order) { |
|
console.log( |
|
`[info] Moving foreign key constraint from ${currentTable} to ${referencedTable} based on config order`, |
|
) |
|
return referencedTablePattern.file |
|
} |
|
} |
|
} |
|
} |
|
|
|
// Handle different statement types |
|
let tableName = null |
|
|
|
if (typeof statement === 'object' && statement !== null) { |
|
if (statement.type === 'create') { |
|
if (statement.keyword === 'table') { |
|
// Handle schema-qualified table names |
|
const tableObj = statement.table?.[0] |
|
if (tableObj) { |
|
tableName = tableObj.table |
|
// If table name is quoted, remove quotes |
|
if (tableName?.startsWith('"') && tableName?.endsWith('"')) { |
|
tableName = tableName.slice(1, -1) |
|
} |
|
} |
|
} else if (statement.keyword === 'index') { |
|
// Handle CREATE INDEX |
|
tableName = statement.table?.table |
|
if (tableName?.startsWith('"') && tableName?.endsWith('"')) { |
|
tableName = tableName.slice(1, -1) |
|
} |
|
} else if (statement.keyword === 'trigger') { |
|
// Handle CREATE TRIGGER |
|
const triggerOn = statement.on?.table |
|
if (triggerOn?.startsWith('"') && triggerOn?.endsWith('"')) { |
|
tableName = triggerOn.slice(1, -1) |
|
} else { |
|
tableName = triggerOn |
|
} |
|
} else if (statement.keyword === 'function') { |
|
tableName = extractTableFromFunctionName( |
|
statement.function?.name?.value, |
|
) |
|
} |
|
} else if (statement.type === 'alter') { |
|
if (statement.keyword === 'table') { |
|
tableName = statement.table?.name?.table |
|
if (tableName?.startsWith('"') && tableName?.endsWith('"')) { |
|
tableName = tableName.slice(1, -1) |
|
} |
|
} else if (statement.keyword === 'publication') { |
|
// Try to get table name from ALTER PUBLICATION statement |
|
const tableList = statement.tables || [] |
|
if (tableList.length > 0) { |
|
tableName = tableList[0].table |
|
if (tableName?.startsWith('"') && tableName?.endsWith('"')) { |
|
tableName = tableName.slice(1, -1) |
|
} |
|
} |
|
} |
|
} |
|
} |
|
|
|
// If we couldn't get the table name from AST, try raw SQL |
|
if (!tableName) { |
|
tableName = extractTableNameFromRawStatement(rawSql) |
|
} |
|
|
|
if (!tableName) return null |
|
|
|
// Check if the table matches any pattern from config |
|
for (const { pattern, file } of patterns.tablePatterns) { |
|
if (pattern.test(tableName)) { |
|
assignedTables.add(tableName) |
|
return file |
|
} |
|
} |
|
|
|
// If table hasn't been assigned to a file yet, create a new file for it |
|
if (!assignedTables.has(tableName)) { |
|
assignedTables.add(tableName) |
|
const fileName = `${supabaseDir}/schemas/001_${sanitizeTableName(tableName)}.sql` |
|
// Initialize the array for this file if it doesn't exist |
|
fileStatements[fileName] = fileStatements[fileName] || [] |
|
return fileName |
|
} |
|
|
|
// If table has been assigned but not to a pattern, find its existing file |
|
for (const [file, _] of Object.entries(fileStatements)) { |
|
if (file.includes(`_${sanitizeTableName(tableName)}.sql`)) { |
|
return file |
|
} |
|
} |
|
|
|
return null |
|
} |
|
|
|
// Main function to split the schema |
|
async function splitSchema(schemaContent: string) { |
|
try { |
|
// Reset counters for new run |
|
assignedTables.clear() |
|
assignedFunctions.clear() |
|
|
|
// Read schema paths from config |
|
const schemaPaths = readConfigPaths() |
|
console.log('[info] Schema paths:') |
|
console.log( |
|
schemaPaths |
|
.filter((path) => typeof path === 'string') |
|
.map((path) => `- ${path}`) |
|
.join('\n'), |
|
) |
|
|
|
// Process patterns |
|
const patterns = processSchemaPatterns(schemaPaths) |
|
|
|
// Create a parser instance with PostgreSQL mode |
|
const parser = new Parser() |
|
const opt = { database: 'postgresql' } |
|
|
|
// Group statements by target file |
|
const fileStatements: Record<string, string[]> = { |
|
schema: [], // Default file for statements that don't match any pattern |
|
} |
|
|
|
// Keep track of all files we're going to write |
|
const filesToWrite = new Set<string>() |
|
filesToWrite.add(`${supabaseDir}/schemas/000_schema.sql`) |
|
filesToWrite.add(`${supabaseDir}/schemas/000_extensions.sql`) |
|
|
|
// Read existing schema files to detect files that need to be deleted |
|
const existingFiles = fs |
|
.readdirSync(`${supabaseDir}/schemas`) |
|
.filter((file) => file.endsWith('.sql')) |
|
.map((file) => `${supabaseDir}/schemas/${file}`) |
|
|
|
// Split into individual statements, handling dollar-quoted strings |
|
const statements: string[] = [] |
|
let currentStatement = '' |
|
let inDollarQuote = false |
|
let currentDollarTag = '' |
|
|
|
// Split the content into lines while preserving newlines |
|
const lines = schemaContent.split(/(?<=\n)/) |
|
|
|
for (const line of lines) { |
|
// Check for dollar quote start/end |
|
const dollarStart = line.match(/\$\w*\$/) |
|
const dollarEnd = line.match(/\$\w*\$;?\s*$/) |
|
|
|
if (!inDollarQuote && dollarStart) { |
|
inDollarQuote = true |
|
currentDollarTag = dollarStart[0] |
|
} else if ( |
|
inDollarQuote && |
|
dollarEnd && |
|
dollarEnd[0].startsWith(currentDollarTag) |
|
) { |
|
inDollarQuote = false |
|
currentDollarTag = '' |
|
} |
|
|
|
currentStatement += line |
|
|
|
// Only split on semicolon if we're not inside a dollar-quoted string |
|
if (!inDollarQuote && line.trim().endsWith(';')) { |
|
if (currentStatement.trim()) { |
|
statements.push(currentStatement.trim()) |
|
} |
|
currentStatement = '' |
|
} |
|
} |
|
|
|
// Add any remaining statement |
|
if (currentStatement.trim()) { |
|
statements.push(currentStatement.trim()) |
|
} |
|
|
|
// Parse each statement and determine which file it should go into |
|
for (const statement of statements) { |
|
try { |
|
// Parse the statement to get its AST |
|
const ast = parser.astify(statement, opt) |
|
|
|
// For array of statements (can happen with some SQL constructs) |
|
const statementList = Array.isArray(ast) ? ast : [ast] |
|
|
|
let targetFile = null |
|
|
|
// Check each statement in the array |
|
for (const stmt of statementList) { |
|
const file = determineTargetFile( |
|
stmt, |
|
patterns, |
|
statement, |
|
fileStatements, |
|
) |
|
if (file) { |
|
targetFile = file |
|
break |
|
} |
|
} |
|
|
|
// Add to appropriate file bucket |
|
if (targetFile) { |
|
if (targetFile === 'ignore') { |
|
continue // Skip this statement |
|
} |
|
fileStatements[targetFile] = fileStatements[targetFile] || [] |
|
fileStatements[targetFile].push(statement) |
|
} else { |
|
fileStatements['schema'].push(statement) |
|
} |
|
} catch (error) { |
|
// For unparseable statements, try to determine the file based on raw SQL |
|
const targetFile = determineTargetFile( |
|
null, |
|
patterns, |
|
statement, |
|
fileStatements, |
|
) |
|
|
|
if (targetFile) { |
|
fileStatements[targetFile] = fileStatements[targetFile] || [] |
|
fileStatements[targetFile].push(statement) |
|
} else { |
|
console.warn( |
|
`Failed to parse statement: ${statement.substring(0, 100)}...`, |
|
) |
|
fileStatements['schema'].push(statement) |
|
} |
|
} |
|
} |
|
|
|
// Write files and track which ones we write |
|
for (const [file, stmts] of Object.entries(fileStatements)) { |
|
if (stmts.length > 0) { |
|
// Only write files that have content |
|
const content = stmts.join('\n\n') + '\n' // Add newline at the end |
|
if (file === 'schema') { |
|
fs.writeFileSync(`${supabaseDir}/schemas/000_schema.sql`, content) |
|
} else { |
|
if (file === 'ignore') continue |
|
fs.writeFileSync(file, content) |
|
filesToWrite.add(file) |
|
} |
|
} |
|
} |
|
|
|
// Find and delete old files that are no longer needed |
|
const filesToDelete = existingFiles.filter((file) => { |
|
// Don't delete 000_*.sql files |
|
if (file.match(/000_[^\/]+\.sql$/)) return false |
|
|
|
// If we're not writing this file in this run |
|
if (!filesToWrite.has(file)) { |
|
// Extract the base name without numbering |
|
const baseNameMatch = file.match(/\d{3}_(.+)\.sql$/) |
|
if (!baseNameMatch) return false |
|
const baseName = baseNameMatch[1] |
|
|
|
// Check if we're writing a file with the same base name but different numbering |
|
return Array.from(filesToWrite).some((newFile) => { |
|
const newBaseNameMatch = newFile.match(/\d{3}_(.+)\.sql$/) |
|
return newBaseNameMatch && newBaseNameMatch[1] === baseName |
|
}) |
|
} |
|
return false |
|
}) |
|
|
|
// Delete old files and show rename messages |
|
for (const oldFile of filesToDelete) { |
|
// Find the new file with the same base name |
|
const oldBaseMatch = oldFile.match(/\d{3}_(.+)\.sql$/) |
|
if (oldBaseMatch) { |
|
const baseName = oldBaseMatch[1] |
|
const newFile = Array.from(filesToWrite).find((file) => { |
|
const newBaseMatch = file.match(/\d{3}_(.+)\.sql$/) |
|
return newBaseMatch && newBaseMatch[1] === baseName |
|
}) |
|
|
|
if (newFile) { |
|
// Extract just the filename without the full path for cleaner message |
|
const oldFileName = oldFile.split('/').pop() || oldFile |
|
const newFileName = newFile.split('/').pop() || newFile |
|
console.log(`[info] Moved ${oldFileName} → ${newFileName}`) |
|
} |
|
} |
|
fs.unlinkSync(oldFile) |
|
} |
|
|
|
console.log('[info] Schema split complete!') |
|
} catch (error) { |
|
console.error('Error splitting schema:', error) |
|
process.exit(1) |
|
} |
|
} |
|
|
|
// Function to read from stdin |
|
async function readStdin(): Promise<string> { |
|
return new Promise((resolve, reject) => { |
|
let data = '' |
|
|
|
process.stdin.setEncoding('utf8') |
|
|
|
process.stdin.on('data', (chunk) => { |
|
data += chunk |
|
}) |
|
|
|
process.stdin.on('end', () => { |
|
resolve(data) |
|
}) |
|
|
|
process.stdin.on('error', (error) => { |
|
reject(error) |
|
}) |
|
}) |
|
} |
|
|
|
// Main execution |
|
if (require.main === module) { |
|
;(async () => { |
|
try { |
|
// Check if we're receiving input from stdin |
|
if (!process.stdin.isTTY) { |
|
const schemaContent = await readStdin() |
|
await splitSchema(schemaContent) |
|
} else { |
|
console.error( |
|
'Error: No input provided. Please pipe SQL content to this script.', |
|
) |
|
console.error( |
|
'Example: supabase db dump --local --schema public | bun ./supabase/split-schema.ts [dir]', |
|
) |
|
console.error('If [dir] is not specified, defaults to ./supabase/') |
|
process.exit(1) |
|
} |
|
} catch (error) { |
|
console.error('Error:', error) |
|
process.exit(1) |
|
} |
|
})() |
|
} |