Created
February 12, 2025 13:56
-
-
Save alexwilson/b1e4fa1eb0017c67132c25eb1c134e5e to your computer and use it in GitHub Desktop.
Proxy PageRouter Pages
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import { promises as fs } from 'fs'; | |
import * as path from 'path'; | |
import * as ts from 'typescript'; | |
/** | |
* AST walking logic inspired by https://gist.github.com/Glavin001/6281f12ee97f40fb8fbde5a319457119/revisions | |
*/ | |
/** | |
* Recursively traverses a directory and collects all file paths | |
* that match the provided suffixes. | |
* | |
* @param dir - The directory to traverse. | |
* @param suffixes - The file suffixes to match (e.g., ['.page.ts', '.page.tsx']). | |
* @returns A promise that resolves to an array of matched file paths. | |
*/ | |
async function traverseDirectory(dir: string, suffixes: string[]): Promise<string[]> { | |
let results: string[] = []; | |
try { | |
const entries = await fs.readdir(dir, { withFileTypes: true }); | |
for (const entry of entries) { | |
const fullPath = path.join(dir, entry.name); | |
if (entry.isDirectory()) { | |
const subDirResults = await traverseDirectory(fullPath, suffixes); | |
results = results.concat(subDirResults); | |
} else if (entry.isFile() && suffixes.some(suffix => entry.name.endsWith(suffix))) { | |
results.push(fullPath); | |
} | |
} | |
return results; | |
} catch (err) { | |
console.error(`Error traversing directory ${dir}:`, err); | |
throw err; | |
} | |
} | |
/** | |
* Extracts exports from a TypeScript source file. | |
* Identifies the default export and named exports that start with 'get'. | |
* | |
* @param sourceFile - The TypeScript source file. | |
* @returns An object containing a boolean indicating the presence of a default export and an array of 'get' named exports. | |
*/ | |
function getExportsForSourceFile(sourceFile: ts.SourceFile): { hasDefaultExport: boolean; getExports: string[] } { | |
const getExports: string[] = []; | |
let hasDefaultExport = false; | |
const hasModifier = (node: ts.Node, kind: ts.SyntaxKind): boolean => { | |
const modifiers = ts.canHaveModifiers(node) ? ts.getModifiers(node) : undefined; | |
return !!modifiers && modifiers.some((mod: ts.Modifier) => mod.kind === kind); | |
}; | |
const visit = (node: ts.Node) => { | |
if (ts.isExportAssignment(node)) { | |
// Handles `export default ...` | |
hasDefaultExport = true; | |
} else if (ts.isExportDeclaration(node) && node.exportClause && !node.moduleSpecifier) { | |
// Handles `export { ... }` | |
node.exportClause.forEachChild(child => { | |
if (ts.isExportSpecifier(child)) { | |
const exportName = child.name.text; | |
if (exportName.startsWith('get')) { | |
getExports.push(exportName); | |
} | |
} | |
}); | |
} else if (ts.isFunctionDeclaration(node) && hasModifier(node, ts.SyntaxKind.ExportKeyword)) { | |
// Handles `export function getX() { }` and `export default function handler() { }` | |
if (node.name) { | |
const exportName = node.name.text; | |
if (exportName.startsWith('get')) { | |
getExports.push(exportName); | |
} | |
} | |
if (hasModifier(node, ts.SyntaxKind.DefaultKeyword)) { | |
hasDefaultExport = true; | |
} | |
} else if (ts.isVariableStatement(node) && hasModifier(node, ts.SyntaxKind.ExportKeyword)) { | |
// Handles `export const getX = ...` | |
node.declarationList.declarations.forEach(decl => { | |
if (ts.isIdentifier(decl.name) && decl.name.text.startsWith('get')) { | |
getExports.push(decl.name.text); | |
} | |
}); | |
// Check if any variable in the statement has the 'Default' modifier | |
if (hasModifier(node, ts.SyntaxKind.DefaultKeyword)) { | |
hasDefaultExport = true; | |
} | |
} | |
ts.forEachChild(node, visit); | |
}; | |
visit(sourceFile); | |
return { hasDefaultExport, getExports }; | |
} | |
/** | |
* Generates the content for the new page file by re-exporting the default export | |
* and specific named exports from the legacy file. | |
* | |
* @param importPath - The relative import path to the legacy file (without extension). | |
* @param getExports - The array of named exports starting with 'get'. | |
* @param hasDefaultExport - Whether the legacy file has a default export. | |
* @returns The content to be written to the new file. | |
*/ | |
function generateFileContent(importPath: string, getExports: string[], hasDefaultExport: boolean): string { | |
let content = ''; | |
if (hasDefaultExport) { | |
content += `export { default } from '${importPath}';\n`; | |
} | |
if (getExports.length > 0) { | |
content += `export { ${getExports.join(', ')} } from '${importPath}';\n`; | |
} | |
return content; | |
} | |
/** | |
* Parses command-line arguments and returns configuration options. | |
* | |
* @returns An object containing the configuration options. | |
*/ | |
function parseArguments(): { | |
oldDir: string; | |
newDir: string; | |
dryRun: boolean; | |
overwrite: boolean; | |
} { | |
const args = process.argv.slice(2); | |
const options: { [key: string]: boolean } = {}; | |
const positional: string[] = []; | |
args.forEach(arg => { | |
if (arg.startsWith('--')) { | |
options[arg.slice(2)] = true; | |
} else { | |
positional.push(arg); | |
} | |
}); | |
if (positional.length < 2) { | |
console.error('Error: Please provide both the old and new PageRouter directories.'); | |
console.error('Usage: ts-node tools/generate-pagerouter-pages.ts <oldDir> <newDir> [--dry-run] [--overwrite]'); | |
process.exit(1); | |
} | |
return { | |
oldDir: positional[0], | |
newDir: positional[1], | |
dryRun: options['dry-run'] || false, | |
overwrite: options['overwrite'] || false, | |
}; | |
} | |
/** | |
* Main function to generate the corresponding page files. | |
*/ | |
async function generatePages() { | |
const { oldDir, newDir, dryRun, overwrite } = parseArguments(); | |
const projectRoot = process.cwd(); | |
const legacyPagesDir = path.isAbsolute(oldDir) ? oldDir : path.join(projectRoot, oldDir); | |
const targetPagesDir = path.isAbsolute(newDir) ? newDir : path.join(projectRoot, newDir); | |
const targetSuffixes = ['.page.ts', '.page.tsx']; | |
console.log('Scanning legacy pages...'); | |
let legacyFiles: string[]; | |
try { | |
legacyFiles = await traverseDirectory(legacyPagesDir, targetSuffixes); | |
if (legacyFiles.length === 0) { | |
console.log('No legacy page files found.'); | |
return; | |
} | |
} catch (err) { | |
console.error('Failed to scan legacy pages:', err); | |
process.exit(1); | |
} | |
console.log(`Found ${legacyFiles.length} legacy page file(s).`); | |
for (const legacyFilePath of legacyFiles) { | |
const relativeLegacyPath = path.relative(legacyPagesDir, legacyFilePath); | |
const parsedPath = path.parse(relativeLegacyPath); | |
if (!targetSuffixes.some(suffix => parsedPath.base.endsWith(suffix))) { | |
console.warn(`Skipping file with unsupported suffix: ${legacyFilePath}`); | |
continue; | |
} | |
const baseName = parsedPath.name.replace('.page', ''); | |
const newFileName = baseName + parsedPath.ext; | |
const targetFileDir = path.join(targetPagesDir, parsedPath.dir); | |
const targetFilePath = path.join(targetFileDir, newFileName); | |
let relativeImportPath = path.relative(targetFileDir, legacyFilePath); | |
relativeImportPath = relativeImportPath.replace(/\.tsx?$/, '').split(path.sep).join('/'); | |
if (!relativeImportPath.startsWith('.')) { | |
relativeImportPath = './' + relativeImportPath; | |
} | |
// Parse the legacy file to extract exports | |
let getExports: string[] = []; | |
let hasDefaultExport = false; | |
try { | |
const legacyFileContent = await fs.readFile(legacyFilePath, 'utf8'); | |
const sourceFile = ts.createSourceFile( | |
legacyFilePath, | |
legacyFileContent, | |
ts.ScriptTarget.ES2015, | |
/* setParentNodes */ true | |
); | |
const exportsInfo = getExportsForSourceFile(sourceFile); | |
getExports = exportsInfo.getExports; | |
hasDefaultExport = exportsInfo.hasDefaultExport; | |
} catch (err) { | |
console.error(`Failed to read or parse file ${legacyFilePath}:`, err); | |
continue; // Skip to the next file | |
} | |
const fileContent = generateFileContent(relativeImportPath, getExports, hasDefaultExport); | |
try { | |
// Check if the target file already exists | |
try { | |
await fs.access(targetFilePath); | |
if (overwrite) { | |
console.warn(`Overwriting existing file: ${path.relative(projectRoot, targetFilePath)}`); | |
} else { | |
console.warn(`File already exists and will be skipped: ${path.relative(projectRoot, targetFilePath)}`); | |
continue; // Skip to the next file | |
} | |
} catch { | |
// File does not exist, proceed to create it | |
} | |
if (!dryRun) { | |
await fs.mkdir(targetFileDir, { recursive: true }); | |
await fs.writeFile(targetFilePath, fileContent, 'utf8'); | |
console.log(`Created: ${path.relative(projectRoot, targetFilePath)} with exports: ${getExports.join(', ') || 'None'}`); | |
} else { | |
console.log(`Dry-Run: Would create ${path.relative(projectRoot, targetFilePath)} with exports: ${getExports.join(', ') || 'None'}`); | |
} | |
} catch (err) { | |
console.error(`Failed to create file ${targetFilePath}:`, err); | |
} | |
} | |
console.log('Page generation completed.'); | |
} | |
// Execute the script | |
generatePages().catch((err) => { | |
console.error('An unexpected error occurred:', err); | |
process.exit(1); | |
}); |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment