Last active
December 22, 2025 23:27
-
-
Save Chizaruu/7c0e288315044ac215666f4a8f8003a3 to your computer and use it in GitHub Desktop.
Parses SQL migrations directly. Handles JSONB defaults, SQL comments → JSDoc, and geometric types (Point/Polygon as structured types, not unknown). Auto-detects Prettier. Works offline. npm install -D type-fest npx tsx generate-types.ts
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| #!/usr/bin/env tsx | |
| /* eslint-disable @typescript-eslint/no-unused-vars */ | |
| /* eslint-disable no-console */ | |
| /* eslint-disable camelcase */ | |
| /** | |
| * Complete Supabase Type Generator - Schema-File First with Index Support | |
| * | |
| * This script: | |
| * 1. Reads SQL schema files from your Supabase directory (default for --local) | |
| * 2. Parses table definitions, columns, enums, composite types, AND INDEXES directly from SQL | |
| * 3. Generates TypeScript types that preserve EXACT database naming | |
| * 4. Parses JSONB columns with default values for enhanced types | |
| * 5. Includes index information in generated types | |
| * 6. Auto-detects Prettier configuration for consistent indentation | |
| * 7. Only connects to database when explicitly requested with --db flag | |
| * 8. Automatically excludes 'this' and 'constraint' columns from generated types | |
| * | |
| * Indentation Priority: | |
| * - --indent flag (highest priority) | |
| * - Prettier config (.prettierrc, .prettierrc.json, etc.) | |
| * - Default (2 spaces) | |
| * | |
| * Perfect for local development - no database connection needed! | |
| */ | |
| import { execSync } from 'child_process'; | |
| import { readFileSync, writeFileSync, existsSync, mkdirSync, statSync, unlinkSync } from 'fs'; | |
| import { join, resolve } from 'path'; | |
| import { globSync } from 'glob'; | |
| /** | |
| * ============================================================================ | |
| * GENERATOR CONFIGURATION | |
| * ============================================================================ | |
| */ | |
| const GENERATOR_CONFIG = { | |
| // Paths | |
| useLocalFlag: true, | |
| useWorkdirFlag: true, | |
| defaultWorkdir: './supabase', | |
| defaultOutputDir: './src/lib/types/generated', | |
| defaultSchema: 'public', | |
| // Type generation options | |
| extractNestedTypes: false, | |
| deduplicateTypes: true, | |
| namingConvention: 'preserve' as | |
| | 'preserve' | |
| | 'PascalCase' | |
| | 'camelCase' | |
| | 'snake_case' | |
| | 'SCREAMING_SNAKE_CASE', | |
| alphabetical: false, | |
| indentSize: 2 as number, // Number of spaces for indentation | |
| usePrettier: false, // Auto-detect and use Prettier config | |
| includeIndexes: false, // Include index metadata in generated types (opt-in) | |
| includeComments: true, // Include SQL comments as JSDoc in generated types | |
| // Source priority: 'sql' = read from SQL files, 'db' = query database | |
| defaultSource: 'sql' as 'sql' | 'db', | |
| // Logging | |
| verboseLogging: true | |
| } as const; | |
| type NamingConvention = | |
| | 'preserve' | |
| | 'PascalCase' | |
| | 'camelCase' | |
| | 'snake_case' | |
| | 'SCREAMING_SNAKE_CASE'; | |
| type SourceType = 'sql' | 'db'; | |
| /** | |
| * ============================================================================ | |
| * TYPE DEFINITIONS | |
| * ============================================================================ | |
| */ | |
| interface SupabaseConfig { | |
| db?: { | |
| migrations?: { | |
| schema_paths?: string[]; | |
| }; | |
| }; | |
| } | |
| interface PrettierConfig { | |
| tabWidth?: number; | |
| useTabs?: boolean; | |
| semi?: boolean; | |
| singleQuote?: boolean; | |
| trailingComma?: string; | |
| printWidth?: number; | |
| } | |
| interface ColumnDefinition { | |
| name: string; | |
| type: string; | |
| nullable: boolean; | |
| defaultValue: string | null; | |
| isArray: boolean; | |
| isPrimaryKey: boolean; | |
| isUnique: boolean; | |
| comment?: string; | |
| foreignKey?: { | |
| table: string; | |
| column: string; | |
| schema?: string; | |
| }; | |
| } | |
| interface IndexDefinition { | |
| name: string; | |
| tableName: string; | |
| columns: string[]; | |
| isUnique: boolean; | |
| method?: string; // btree, hash, gin, gist, etc. | |
| whereClause?: string; | |
| } | |
| interface TableDefinition { | |
| schema: string; | |
| name: string; | |
| columns: ColumnDefinition[]; | |
| comment?: string; | |
| relationships: RelationshipDefinition[]; | |
| indexes: IndexDefinition[]; | |
| } | |
| interface RelationshipDefinition { | |
| foreignKeyName: string; | |
| columns: string[]; | |
| isOneToOne: boolean; | |
| referencedRelation: string; | |
| referencedColumns: string[]; | |
| } | |
| interface EnumDefinition { | |
| schema: string; | |
| name: string; | |
| values: string[]; | |
| } | |
| interface FunctionDefinition { | |
| schema: string; | |
| name: string; | |
| args: Array<{ name: string; type: string; hasDefault?: boolean }>; | |
| returns: string; | |
| } | |
| interface CompositeTypeDefinition { | |
| schema: string; | |
| name: string; | |
| attributes: Array<{ name: string; type: string }>; | |
| } | |
| interface GeneratorConfig { | |
| supabase: { | |
| source: SourceType; | |
| connectionString?: string; | |
| schema: string; | |
| useWorkdirFlag: boolean; | |
| inputWorkdir: string | null; | |
| configWorkdir: string | null; | |
| configPath: string; | |
| }; | |
| schemaPaths: string[]; | |
| output: { | |
| dir: string; | |
| tempFile: string; | |
| finalFile: string; | |
| }; | |
| extractNestedTypes: boolean; | |
| deduplicateTypes: boolean; | |
| verboseLogging: boolean; | |
| namingConvention: NamingConvention; | |
| alphabetical: boolean; | |
| indentSize: number; | |
| includeIndexes: boolean; | |
| includeComments: boolean; | |
| } | |
| interface JsonbColumn { | |
| table: string; | |
| column: string; | |
| defaultValue: string | null; | |
| comment?: string; | |
| fileName: string; | |
| } | |
| interface TypeDefinition { | |
| table: string; | |
| column: string; | |
| name: string; | |
| typeDefinition: string; | |
| comment?: string; | |
| example?: Record<string, unknown>; | |
| nestedTypes?: TypeDefinition[]; | |
| } | |
| /** | |
| * ============================================================================ | |
| * CONSOLE OUTPUT | |
| * ============================================================================ | |
| */ | |
| const colors = { | |
| reset: '\x1b[0m', | |
| bright: '\x1b[1m', | |
| green: '\x1b[32m', | |
| blue: '\x1b[34m', | |
| yellow: '\x1b[33m', | |
| red: '\x1b[31m', | |
| cyan: '\x1b[36m' | |
| } as const; | |
| type ColorName = keyof typeof colors; | |
| let VERBOSE_LOGGING = true; | |
| /** | |
| * | |
| * @param message - The message to log | |
| * @param color - The color to use for the message | |
| * @param force - Whether to force logging regardless of verbosity setting | |
| */ | |
| function log(message: string, color: ColorName = 'reset', force = false): void { | |
| if (VERBOSE_LOGGING || force) { | |
| console.log(`${colors[color]}${message}${colors.reset}`); | |
| } | |
| } | |
| /** | |
| * ============================================================================ | |
| * COMMAND LINE PARSING | |
| * ============================================================================ | |
| * | |
| * Available Flags: | |
| * | |
| * Source Options: | |
| * --local [workdir] Read from local SQL files (default) | |
| * --workdir <path> Specify Supabase working directory | |
| * --db, --use-database Query database for schema | |
| * --connection-string <url> Database connection string | |
| * | |
| * Output Options: | |
| * --output, -o <dir> Output directory for generated types | |
| * --schema <name> Database schema name (default: public) | |
| * | |
| * Formatting Options: | |
| * --indent, --indent-size <n> Indentation size (1-8 spaces, overrides Prettier) | |
| * --use-prettier Use Prettier config for indentation (default: false) | |
| * --no-prettier Ignore Prettier config, use default indentation | |
| * --naming <convention> Naming convention: preserve, PascalCase, camelCase, | |
| * snake_case, SCREAMING_SNAKE_CASE | |
| * --alphabetical, --sort Sort types alphabetically | |
| * | |
| * Type Generation Options: | |
| * --extract-nested, --deep-nested Extract nested types from JSONB | |
| * --deduplicate, --dedupe Deduplicate type definitions | |
| * --no-deduplicate, --no-dedupe Disable type deduplication | |
| * --include-indexes, --indexes Include index metadata in generated types | |
| * --no-comments, --skip-comments Disable parsing SQL comments as JSDoc | |
| * | |
| * Output Options: | |
| * --no-logs, --silent, --quiet Suppress log output | |
| * | |
| * Examples: | |
| * ./generate-types.ts # Use defaults, auto-detect Prettier | |
| * ./generate-types.ts --indent 4 # Force 4-space indentation | |
| * ./generate-types.ts --no-prettier # Ignore Prettier, use 2 spaces | |
| * ./generate-types.ts --naming camelCase # Convert names to camelCase | |
| * ./generate-types.ts --alphabetical # Sort all types alphabetically | |
| */ | |
| /** | |
| * Parse command line arguments | |
| * @returns Parsed arguments | |
| */ | |
| function parseCommandLineArgs() { | |
| const args = process.argv.slice(2); | |
| let useLocal: boolean = GENERATOR_CONFIG.useLocalFlag; | |
| let explicitWorkdirFlag = false; | |
| let workdir: string | null = null; | |
| let connectionString: string | null = null; | |
| let useDatabase = false; // Only use DB if explicitly requested | |
| let schema: string = GENERATOR_CONFIG.defaultSchema; | |
| let outputDir: string | null = null; | |
| let extractNestedTypes: boolean = GENERATOR_CONFIG.extractNestedTypes; | |
| let deduplicateTypes: boolean = GENERATOR_CONFIG.deduplicateTypes; | |
| let verboseLogging: boolean = GENERATOR_CONFIG.verboseLogging; | |
| let namingConvention: NamingConvention = GENERATOR_CONFIG.namingConvention; | |
| let alphabetical: boolean = GENERATOR_CONFIG.alphabetical; | |
| let indentSize: number | null = null; // null means auto-detect | |
| let usePrettier: boolean = false; | |
| let includeIndexes: boolean = GENERATOR_CONFIG.includeIndexes; | |
| let includeComments: boolean = GENERATOR_CONFIG.includeComments; | |
| for (let i = 0; i < args.length; i++) { | |
| const arg = args[i]; | |
| if (arg === '--local') { | |
| useLocal = true; | |
| if (i + 1 < args.length && !args[i + 1].startsWith('--')) { | |
| workdir = args[++i]; | |
| explicitWorkdirFlag = false; | |
| } | |
| } else if (arg === '--workdir') { | |
| explicitWorkdirFlag = true; | |
| workdir = args[++i]; | |
| } else if (arg === '--db' || arg === '--use-database') { | |
| useDatabase = true; | |
| } else if (arg === '--connection-string') { | |
| connectionString = args[++i]; | |
| useDatabase = true; | |
| } else if (arg === '--schema') { | |
| schema = args[++i]; | |
| } else if (arg === '--output' || arg === '-o') { | |
| outputDir = args[++i]; | |
| } else if (arg === '--deep-nested' || arg === '--extract-nested') { | |
| extractNestedTypes = true; | |
| } else if (arg === '--deduplicate' || arg === '--dedupe' || arg === '--dedupe-types') { | |
| deduplicateTypes = true; | |
| } else if (arg === '--no-deduplicate' || arg === '--no-dedupe') { | |
| deduplicateTypes = false; | |
| } else if (arg === '--no-logs' || arg === '--silent' || arg === '--quiet') { | |
| verboseLogging = false; | |
| } else if (arg === '--alphabetical' || arg === '--sort' || arg === '--sort-alphabetical') { | |
| alphabetical = true; | |
| } else if (arg === '--use-prettier') { | |
| usePrettier = true; | |
| } else if (arg === '--no-prettier') { | |
| usePrettier = false; | |
| } else if (arg === '--indent' || arg === '--indent-size') { | |
| const size = parseInt(args[++i]); | |
| if (!isNaN(size) && size > 0 && size <= 8) { | |
| indentSize = size; | |
| } else { | |
| log( | |
| `Invalid indent size: ${args[i]}. Must be between 1 and 8. Using default or Prettier config.`, | |
| 'yellow', | |
| true | |
| ); | |
| } | |
| } else if (arg === '--include-indexes' || arg === '--indexes') { | |
| includeIndexes = true; | |
| } else if (arg === '--no-comments' || arg === '--skip-comments') { | |
| includeComments = false; | |
| } else if (arg === '--naming' || arg === '--naming-convention') { | |
| const nextArg = args[++i]; | |
| if ( | |
| [ | |
| 'preserve', | |
| 'PascalCase', | |
| 'camelCase', | |
| 'snake_case', | |
| 'SCREAMING_SNAKE_CASE' | |
| ].includes(nextArg) | |
| ) { | |
| namingConvention = nextArg as NamingConvention; | |
| } else { | |
| log( | |
| `Invalid naming convention: ${nextArg}. Using default: ${GENERATOR_CONFIG.namingConvention}`, | |
| 'yellow', | |
| true | |
| ); | |
| } | |
| } else if (!arg.startsWith('--')) { | |
| workdir = arg; | |
| explicitWorkdirFlag = false; | |
| } | |
| } | |
| // Determine source: SQL files by default, DB only if explicitly requested | |
| const source: SourceType = useDatabase ? 'db' : 'sql'; | |
| return { | |
| useLocal, | |
| useWorkdir: explicitWorkdirFlag, | |
| workdir: workdir || GENERATOR_CONFIG.defaultWorkdir, | |
| connectionString: connectionString || process.env.DATABASE_URL, | |
| source, | |
| schema, | |
| outputDir: outputDir || GENERATOR_CONFIG.defaultOutputDir, | |
| extractNestedTypes, | |
| deduplicateTypes, | |
| verboseLogging, | |
| namingConvention, | |
| alphabetical, | |
| indentSize, | |
| usePrettier, | |
| includeIndexes, | |
| includeComments | |
| }; | |
| } | |
| /** | |
| * ============================================================================ | |
| * TOML CONFIG PARSING | |
| * ============================================================================ | |
| */ | |
| /** | |
| * @param content - TOML file content | |
| * @returns Parsed SupabaseConfig | |
| */ | |
| function parseToml(content: string): SupabaseConfig { | |
| const config: SupabaseConfig = {}; | |
| const lines = content.split('\n'); | |
| let currentSection: string[] = []; | |
| let inArray = false; | |
| let arrayKey = ''; | |
| let arrayItems: string[] = []; | |
| for (let i = 0; i < lines.length; i++) { | |
| const line = lines[i]; | |
| const trimmed = line.trim(); | |
| if (trimmed.startsWith('#')) { | |
| continue; | |
| } | |
| if (trimmed.startsWith('[') && trimmed.endsWith(']')) { | |
| currentSection = trimmed.slice(1, -1).split('.'); | |
| continue; | |
| } | |
| if (trimmed.includes('=[') || (trimmed.includes('=') && trimmed.includes('['))) { | |
| const match = trimmed.match(/^(\w+)\s*=\s*\[/); | |
| if (match) { | |
| arrayKey = match[1]; | |
| inArray = true; | |
| arrayItems = []; | |
| if (trimmed.includes(']')) { | |
| const itemMatches = trimmed.matchAll(/"([^"]+)"/g); | |
| for (const item of itemMatches) { | |
| arrayItems.push(item[1]); | |
| } | |
| inArray = false; | |
| if ( | |
| currentSection[0] === 'db' && | |
| currentSection[1] === 'migrations' && | |
| arrayKey === 'schema_paths' | |
| ) { | |
| if (!config.db) { | |
| config.db = {}; | |
| } | |
| if (!config.db.migrations) { | |
| config.db.migrations = {}; | |
| } | |
| config.db.migrations.schema_paths = arrayItems; | |
| } | |
| } else { | |
| const itemMatches = trimmed.matchAll(/"([^"]+)"/g); | |
| for (const item of itemMatches) { | |
| arrayItems.push(item[1]); | |
| } | |
| } | |
| continue; | |
| } | |
| } | |
| if (inArray) { | |
| if (trimmed.includes(']')) { | |
| const itemMatches = trimmed.matchAll(/"([^"]+)"/g); | |
| for (const item of itemMatches) { | |
| arrayItems.push(item[1]); | |
| } | |
| inArray = false; | |
| if ( | |
| currentSection[0] === 'db' && | |
| currentSection[1] === 'migrations' && | |
| arrayKey === 'schema_paths' | |
| ) { | |
| if (!config.db) { | |
| config.db = {}; | |
| } | |
| if (!config.db.migrations) { | |
| config.db.migrations = {}; | |
| } | |
| config.db.migrations.schema_paths = arrayItems; | |
| } | |
| } else { | |
| const itemMatches = trimmed.matchAll(/"([^"]+)"/g); | |
| for (const item of itemMatches) { | |
| arrayItems.push(item[1]); | |
| } | |
| } | |
| continue; | |
| } | |
| } | |
| return config; | |
| } | |
| /** | |
| * @param workdir - Supabase working directory | |
| * @returns Tuple of schema paths and actual workdir used | |
| */ | |
| function readSupabaseConfig(workdir: string | null): [string[], string | null] { | |
| let configPath = workdir | |
| ? join(workdir, 'config.toml') | |
| : join(process.cwd(), 'supabase', 'config.toml'); | |
| let actualWorkdir = workdir; | |
| if (workdir && !existsSync(configPath)) { | |
| const fallbackPath = join(workdir, 'supabase', 'config.toml'); | |
| if (existsSync(fallbackPath)) { | |
| log(` Config not found at: ${configPath}`, 'yellow'); | |
| log(` Using fallback: ${fallbackPath}`, 'cyan'); | |
| configPath = fallbackPath; | |
| actualWorkdir = join(workdir, 'supabase'); | |
| } | |
| } | |
| if (!existsSync(configPath)) { | |
| log(` Warning: config.toml not found at ${configPath}`, 'yellow'); | |
| log(` Falling back to default schema paths`, 'yellow'); | |
| const defaultPaths = ['migrations/*.sql', 'migrations/**/*.sql']; | |
| return [defaultPaths, actualWorkdir]; | |
| } | |
| log(` Reading config from: ${configPath}`, 'cyan'); | |
| const content = readFileSync(configPath, 'utf8'); | |
| const config = parseToml(content); | |
| const schemaPaths = config.db?.migrations?.schema_paths || []; | |
| if (schemaPaths.length === 0) { | |
| log(` Warning: No schema_paths found in config.toml`, 'yellow'); | |
| const defaultPaths = ['migrations/*.sql', 'migrations/**/*.sql']; | |
| return [defaultPaths, actualWorkdir]; | |
| } | |
| log(` Found ${schemaPaths.length} schema path(s) in config`, 'green'); | |
| // Show which are glob patterns | |
| const globPatterns = schemaPaths.filter((p) => p.includes('*')); | |
| const specificFiles = schemaPaths.filter((p) => !p.includes('*')); | |
| if (globPatterns.length > 0) { | |
| log( | |
| ` ${globPatterns.length} glob pattern(s), ${specificFiles.length} specific file(s)`, | |
| 'cyan' | |
| ); | |
| } | |
| return [schemaPaths, actualWorkdir]; | |
| } | |
| /** | |
| * @param schemaPaths - Schema paths from config | |
| * @param workdir - Supabase working directory | |
| * @returns Resolved list of SQL schema files | |
| */ | |
| function resolveSchemaFiles(schemaPaths: string[], workdir: string | null): string[] { | |
| const baseDir = workdir ? resolve(process.cwd(), workdir) : resolve(process.cwd(), 'supabase'); | |
| log(`\n 📁 Resolving schema files from base directory: ${baseDir}`, 'cyan'); | |
| const allFiles: string[] = []; | |
| const seenFiles = new Set<string>(); // Track files we've already added to avoid duplicates | |
| for (const schemaPath of schemaPaths) { | |
| const pattern = join(baseDir, schemaPath); | |
| try { | |
| // Check if this is a glob pattern | |
| if (pattern.includes('*')) { | |
| // Glob pattern - expand it | |
| // IMPORTANT: glob requires forward slashes on all platforms (including Windows) | |
| const normalizedPattern = pattern.replace(/\\/g, '/'); | |
| log(` 🔍 Expanding glob pattern: ${schemaPath}`, 'cyan'); | |
| const files = globSync(normalizedPattern, { nodir: true, absolute: true }); | |
| log( | |
| ` Found ${files.length} file(s) matching pattern`, | |
| files.length > 0 ? 'green' : 'yellow' | |
| ); | |
| // Add files in the order glob returns them, skipping duplicates | |
| let addedCount = 0; | |
| let skippedCount = 0; | |
| for (const file of files) { | |
| // Normalize path separators for consistent comparison | |
| const normalized = resolve(file).replace(/\\/g, '/'); | |
| if (normalized.endsWith('.sql')) { | |
| if (seenFiles.has(normalized)) { | |
| skippedCount++; | |
| if (VERBOSE_LOGGING) { | |
| const relativePath = normalized.replace( | |
| baseDir.replace(/\\/g, '/') + '/', | |
| '' | |
| ); | |
| log(` ⊝ ${relativePath} (duplicate, skipped)`, 'yellow'); | |
| } | |
| } else { | |
| allFiles.push(normalized); | |
| seenFiles.add(normalized); | |
| addedCount++; | |
| if (VERBOSE_LOGGING) { | |
| const relativePath = normalized.replace( | |
| baseDir.replace(/\\/g, '/') + '/', | |
| '' | |
| ); | |
| log(` ✓ ${relativePath}`, 'green'); | |
| } | |
| } | |
| } | |
| } | |
| if (addedCount > 0 || skippedCount > 0) { | |
| log(` Added: ${addedCount}, Skipped (duplicates): ${skippedCount}`, 'cyan'); | |
| } | |
| } else { | |
| // Specific file path - check if it exists | |
| const normalized = resolve(pattern).replace(/\\/g, '/'); | |
| if ( | |
| existsSync(pattern) && | |
| statSync(pattern).isFile() && | |
| normalized.endsWith('.sql') | |
| ) { | |
| if (seenFiles.has(normalized)) { | |
| log(` ⊝ ${schemaPath} (duplicate, skipped)`, 'yellow'); | |
| } else { | |
| allFiles.push(normalized); | |
| seenFiles.add(normalized); | |
| const relativePath = schemaPath; | |
| log(` ✓ ${relativePath}`, 'green'); | |
| } | |
| } else if (!existsSync(pattern)) { | |
| log(` ✗ File not found: ${schemaPath}`, 'yellow'); | |
| } else if (!normalized.endsWith('.sql')) { | |
| log(` ⊝ Skipped (not .sql): ${schemaPath}`, 'yellow'); | |
| } | |
| } | |
| } catch (error) { | |
| const errorMsg = error instanceof Error ? error.message : String(error); | |
| log(` ✗ Error resolving pattern ${schemaPath}: ${errorMsg}`, 'red'); | |
| } | |
| } | |
| log( | |
| `\n 📊 Total unique SQL files resolved: ${allFiles.length}`, | |
| allFiles.length > 0 ? 'green' : 'red' | |
| ); | |
| return allFiles; | |
| } | |
| /** | |
| * ============================================================================ | |
| * PRETTIER CONFIG DETECTION | |
| * ============================================================================ | |
| */ | |
| /** | |
| * @returns Detected Prettier configuration or null if none found | |
| */ | |
| function detectPrettierConfig(): PrettierConfig | null { | |
| const possiblePaths = [ | |
| '.prettierrc', | |
| '.prettierrc.json', | |
| '.prettierrc.yaml', | |
| '.prettierrc.yml', | |
| 'prettier.config.js', | |
| '.prettierrc.js', | |
| 'package.json' | |
| ]; | |
| for (const configPath of possiblePaths) { | |
| const fullPath = join(process.cwd(), configPath); | |
| if (!existsSync(fullPath)) { | |
| continue; | |
| } | |
| try { | |
| if (configPath === 'package.json') { | |
| const content = readFileSync(fullPath, 'utf8'); | |
| const packageJson = JSON.parse(content); | |
| if (packageJson.prettier) { | |
| log(` Found Prettier config in package.json`, 'cyan'); | |
| return packageJson.prettier as PrettierConfig; | |
| } | |
| continue; | |
| } | |
| if (configPath.endsWith('.json') || configPath === '.prettierrc') { | |
| const content = readFileSync(fullPath, 'utf8'); | |
| const config = JSON.parse(content); | |
| log(` Found Prettier config: ${configPath}`, 'cyan'); | |
| return config as PrettierConfig; | |
| } | |
| if (configPath.endsWith('.yaml') || configPath.endsWith('.yml')) { | |
| const content = readFileSync(fullPath, 'utf8'); | |
| const config = parseSimpleYaml(content); | |
| if (config) { | |
| log(` Found Prettier config: ${configPath}`, 'cyan'); | |
| return config as PrettierConfig; | |
| } | |
| } | |
| if (configPath.endsWith('.js')) { | |
| // For .js files, we can't easily parse them without eval | |
| // So we'll just log that it exists but can't be used | |
| log(` Found ${configPath} but cannot parse JS config files`, 'yellow'); | |
| log(` Use --indent to specify indentation manually`, 'yellow'); | |
| } | |
| } catch (error) { | |
| // Failed to parse this config file, continue to next | |
| continue; | |
| } | |
| } | |
| return null; | |
| } | |
| /** | |
| * @param content - YAML file content | |
| * @returns Parsed PrettierConfig or null | |
| */ | |
| function parseSimpleYaml(content: string): PrettierConfig | null { | |
| try { | |
| const config: PrettierConfig = {}; | |
| const lines = content.split('\n'); | |
| for (const line of lines) { | |
| const trimmed = line.trim(); | |
| if (!trimmed || trimmed.startsWith('#')) { | |
| continue; | |
| } | |
| const match = trimmed.match(/^(\w+):\s*(.+)$/); | |
| if (!match) { | |
| continue; | |
| } | |
| const [, key, value] = match; | |
| const cleanValue = value.trim(); | |
| if (key === 'tabWidth') { | |
| config.tabWidth = parseInt(cleanValue); | |
| } else if (key === 'useTabs') { | |
| config.useTabs = cleanValue === 'true'; | |
| } else if (key === 'semi') { | |
| config.semi = cleanValue === 'true'; | |
| } else if (key === 'singleQuote') { | |
| config.singleQuote = cleanValue === 'true'; | |
| } else if (key === 'trailingComma') { | |
| config.trailingComma = cleanValue.replace(/['"]/g, ''); | |
| } else if (key === 'printWidth') { | |
| config.printWidth = parseInt(cleanValue); | |
| } | |
| } | |
| return Object.keys(config).length > 0 ? config : null; | |
| } catch { | |
| return null; | |
| } | |
| } | |
| /** | |
| * @param prettierConfig - Detected Prettier configuration | |
| * @returns Indent size from Prettier config or null if not specified | |
| */ | |
| function getPrettierIndentSize(prettierConfig: PrettierConfig | null): number | null { | |
| if (!prettierConfig) { | |
| return null; | |
| } | |
| // If useTabs is true, Prettier uses tabs (we'll default to 2 spaces as fallback) | |
| if (prettierConfig.useTabs) { | |
| log(` Prettier uses tabs, defaulting to 2 spaces for type generation`, 'yellow'); | |
| return 2; | |
| } | |
| // Return tabWidth if specified | |
| if (prettierConfig.tabWidth !== undefined) { | |
| return prettierConfig.tabWidth; | |
| } | |
| return null; | |
| } | |
| /** | |
| * ============================================================================ | |
| * SQL PARSING - ALTER TABLE FOREIGN KEY CONSTRAINTS | |
| * ============================================================================ | |
| */ | |
| /** | |
| * @param sqlContent - SQL content to parse | |
| * @param schema - Default schema name | |
| * @returns Parsed relationship or null if none found | |
| */ | |
| function parseAlterTableForeignKey( | |
| sqlContent: string, | |
| schema: string = 'public' | |
| ): { tableName: string; relationship: RelationshipDefinition } | null { | |
| // Match ALTER TABLE ... ADD CONSTRAINT ... FOREIGN KEY statements | |
| // Examples: | |
| // alter table only "invoices" add constraint "fkPayOrder" foreign key ("payorder_id") references "payorders" ("id") on delete set null; | |
| // alter table users add constraint fk_role foreign key (role_id) references roles(id); | |
| const alterMatch = sqlContent.match( | |
| /alter\s+table\s+(?:only\s+)?(?:["']?(\w+)["']?\.)?["']?(\w+)["']?\s+add\s+constraint\s+["']?(\w+)["']?\s+foreign\s+key\s*\(([^)]+)\)\s+references\s+(?:["']?(\w+)["']?\.)?["']?(\w+)["']?\s*\(([^)]+)\)/i | |
| ); | |
| if (!alterMatch) { | |
| return null; | |
| } | |
| const tableSchema = alterMatch[1] || schema; | |
| const tableName = alterMatch[2]; | |
| const constraintName = alterMatch[3]; | |
| const columnsStr = alterMatch[4]; | |
| const refSchema = alterMatch[5]; | |
| const refTable = alterMatch[6]; | |
| const refColumnsStr = alterMatch[7]; | |
| // Parse columns - handle quoted and unquoted names | |
| const columns = columnsStr | |
| .split(',') | |
| .map((col) => col.trim().replace(/["']/g, '')) | |
| .filter((col) => col.length > 0); | |
| const refColumns = refColumnsStr | |
| .split(',') | |
| .map((col) => col.trim().replace(/["']/g, '')) | |
| .filter((col) => col.length > 0); | |
| // Don't include schema prefix in referencedRelation - it should just be the table name | |
| // Supabase's type system handles schema resolution separately | |
| const referencedRelation = refTable; | |
| // Check if it's one-to-one (we can't really tell from ALTER TABLE, so default to false) | |
| // User would need to have a UNIQUE constraint on the column for true one-to-one | |
| const isOneToOne = false; | |
| return { | |
| tableName, | |
| relationship: { | |
| foreignKeyName: constraintName, | |
| columns, | |
| isOneToOne, | |
| referencedRelation, | |
| referencedColumns: refColumns | |
| } | |
| }; | |
| } | |
| /** | |
| * Parse ALTER TABLE ADD CONSTRAINT UNIQUE statements | |
| * @param sqlContent - SQL content to parse | |
| * @param schema - Default schema name | |
| * @returns Parsed unique constraint or null if none found | |
| */ | |
| function parseAlterTableUnique( | |
| sqlContent: string, | |
| schema: string = 'public' | |
| ): { tableName: string; columns: string[] } | null { | |
| // Match ALTER TABLE ... ADD CONSTRAINT ... UNIQUE statements | |
| // Examples: | |
| // alter table invoices add constraint unique_payorder_id unique (payorder_id); | |
| // alter table users add unique (email); | |
| const alterMatch = sqlContent.match( | |
| /alter\s+table\s+(?:only\s+)?(?:["']?(\w+)["']?\.)?["']?(\w+)["']?\s+add\s+(?:constraint\s+["']?\w+["']?\s+)?unique\s*\(([^)]+)\)/i | |
| ); | |
| if (!alterMatch) { | |
| return null; | |
| } | |
| const tableSchema = alterMatch[1] || schema; | |
| const tableName = alterMatch[2]; | |
| const columnsStr = alterMatch[3]; | |
| // Parse columns - handle quoted and unquoted names | |
| const columns = columnsStr | |
| .split(',') | |
| .map((col) => col.trim().replace(/["']/g, '')) | |
| .filter((col) => col.length > 0); | |
| return { | |
| tableName, | |
| columns | |
| }; | |
| } | |
| /** | |
| * ============================================================================ | |
| * SQL PARSING - INDEX DEFINITIONS | |
| * ============================================================================ | |
| */ | |
| /** | |
| * @param sqlContent - SQL content to parse | |
| * @param schema - Default schema name | |
| * @returns Parsed index definition or null if none found | |
| */ | |
| function parseIndexDefinition( | |
| sqlContent: string, | |
| schema: string = 'public' | |
| ): IndexDefinition | null { | |
| // Match CREATE INDEX statements | |
| // Examples: | |
| // create index "idx_booking_internal" on "booking" using "btree" ("internal"); | |
| // create unique index loads_loadNumber_null_warehouse_idx on public.loads ("loadNumber") | |
| // create index idx_user_roles_role_id on user_roles (role_id); | |
| // create index "idx_warehouses_address_lookup" on "warehouses" ("city", "country", "addressState") where city is not null | |
| const indexMatch = sqlContent.match( | |
| /create\s+(unique\s+)?index\s+(?:if\s+not\s+exists\s+)?["']?(\w+)["']?\s+on\s+(?:["']?(\w+)["']?\.)?["']?(\w+)["']?\s*(?:using\s+["']?(\w+)["']?)?\s*\(([^)]+)\)(?:\s+where\s+(.+))?/i | |
| ); | |
| if (!indexMatch) { | |
| return null; | |
| } | |
| const isUnique = Boolean(indexMatch[1]); | |
| const indexName = indexMatch[2]; | |
| const indexSchema = indexMatch[3] || schema; | |
| const tableName = indexMatch[4]; | |
| const method = indexMatch[5]; | |
| const columnsStr = indexMatch[6]; | |
| const whereClause = indexMatch[7]?.trim(); | |
| // Parse columns - handle quoted and unquoted names | |
| const columns = columnsStr | |
| .split(',') | |
| .map((col) => col.trim().replace(/["']/g, '')) | |
| .filter((col) => col.length > 0); | |
| return { | |
| name: indexName, | |
| tableName, | |
| columns, | |
| isUnique, | |
| method, | |
| whereClause | |
| }; | |
| } | |
| /** | |
| * ============================================================================ | |
| * SQL PARSING - COMMENT STATEMENTS | |
| * ============================================================================ | |
| */ | |
| /** | |
| * Parse table comment statement | |
| * @param sqlContent - SQL content to parse | |
| * @param schema - Default schema name | |
| * @returns Object with table name and comment, or null if none found | |
| */ | |
| function parseTableComment( | |
| sqlContent: string, | |
| schema: string = 'public' | |
| ): { tableName: string; comment: string; schema: string } | null { | |
| // Match: COMMENT on table "schema"."table_name" is 'comment text'; | |
| // or: COMMENT on table "table_name" is 'comment text'; | |
| const commentMatch = sqlContent.match( | |
| /comment\s+on\s+table\s+(?:["']?(\w+)["']?\.)?["']?(\w+)["']?\s+is\s+'([^']+)'/i | |
| ); | |
| if (!commentMatch) { | |
| return null; | |
| } | |
| const tableSchema = commentMatch[1] || schema; | |
| const tableName = commentMatch[2]; | |
| const comment = commentMatch[3]; | |
| return { | |
| tableName, | |
| comment, | |
| schema: tableSchema | |
| }; | |
| } | |
| /** | |
| * Parse column comment statement | |
| * @param sqlContent - SQL content to parse | |
| * @param schema - Default schema name | |
| * @returns Object with table name, column name, and comment, or null if none found | |
| */ | |
| function parseColumnComment( | |
| sqlContent: string, | |
| schema: string = 'public' | |
| ): { tableName: string; columnName: string; comment: string; schema: string } | null { | |
| // Match: COMMENT on column "schema"."table_name"."column_name" is 'comment text'; | |
| // or: COMMENT on column "table_name"."column_name" is 'comment text'; | |
| const commentMatch = sqlContent.match( | |
| /comment\s+on\s+column\s+(?:["']?(\w+)["']?\.)?["']?(\w+)["']?\.["']?(\w+)["']?\s+is\s+'([^']+)'/i | |
| ); | |
| if (!commentMatch) { | |
| return null; | |
| } | |
| // If we have 4 groups, first is schema | |
| const tableSchema = commentMatch[1] || schema; | |
| const tableName = commentMatch[2]; | |
| const columnName = commentMatch[3]; | |
| const comment = commentMatch[4]; | |
| return { | |
| tableName, | |
| columnName, | |
| comment, | |
| schema: tableSchema | |
| }; | |
| } | |
| /** | |
| * ============================================================================ | |
| * SQL PARSING - TABLE DEFINITIONS | |
| * ============================================================================ | |
| */ | |
| /** | |
| * @param sqlContent - SQL content to parse | |
| * @param schema - Default schema name | |
| * @returns Parsed table definition or null if none found | |
| */ | |
| function parseTableDefinition( | |
| sqlContent: string, | |
| schema: string = 'public' | |
| ): TableDefinition | null { | |
| // Match CREATE TABLE - but we need to manually find the closing parenthesis | |
| // because column definitions can have nested parentheses | |
| const tableStartMatch = sqlContent.match( | |
| /create\s+table\s+(?:if\s+not\s+exists\s+)?(?:["']?(\w+)["']?\.)?["']?(\w+)["']?\s*\(/i | |
| ); | |
| if (!tableStartMatch) { | |
| return null; | |
| } | |
| const tableSchema = tableStartMatch[1] || schema; | |
| const tableName = tableStartMatch[2]; | |
| const startIndex = tableStartMatch.index! + tableStartMatch[0].length; | |
| // Find the matching closing parenthesis | |
| let parenDepth = 1; | |
| let endIndex = startIndex; | |
| let inString = false; | |
| let stringChar: string | null = null; | |
| while (endIndex < sqlContent.length && parenDepth > 0) { | |
| const char = sqlContent[endIndex]; | |
| const prevChar = endIndex > 0 ? sqlContent[endIndex - 1] : ''; | |
| // Handle string state | |
| if ((char === '"' || char === "'") && prevChar !== '\\') { | |
| if (!inString) { | |
| inString = true; | |
| stringChar = char; | |
| } else if (char === stringChar) { | |
| inString = false; | |
| stringChar = null; | |
| } | |
| } | |
| // Only count parentheses outside of strings | |
| if (!inString) { | |
| if (char === '(') { | |
| parenDepth++; | |
| } | |
| if (char === ')') { | |
| parenDepth--; | |
| } | |
| } | |
| endIndex++; | |
| } | |
| if (parenDepth !== 0) { | |
| // Couldn't find matching closing paren | |
| return null; | |
| } | |
| let tableBody = sqlContent.substring(startIndex, endIndex - 1); | |
| if (!tableBody || tableBody.trim().length === 0) { | |
| return null; | |
| } | |
| // REMOVE COMMENTS FIRST (before normalizing whitespace) | |
| tableBody = tableBody.replace(/--[^\n]*/g, ''); | |
| // Clean up the table body - normalize whitespace | |
| tableBody = tableBody.replace(/\s+/g, ' ').trim(); | |
| const columns: ColumnDefinition[] = []; | |
| const relationships: RelationshipDefinition[] = []; | |
| // Split by comma, but respect parentheses and strings | |
| const columnDefs = splitByComma(tableBody); | |
| for (const colDef of columnDefs) { | |
| const trimmed = colDef.trim(); | |
| // Skip empty and comments | |
| if (!trimmed || trimmed.startsWith('--')) { | |
| continue; | |
| } | |
| // Check for CONSTRAINT-based foreign keys | |
| const constraintMatch = trimmed.match( | |
| /^constraint\s+["']?(\w+)["']?\s+foreign\s+key\s*\(([^)]+)\)\s+references\s+(?:["']?(\w+)["']?\.)?["']?(\w+)["']?\s*\(([^)]+)\)/i | |
| ); | |
| if (constraintMatch) { | |
| const constraintName = constraintMatch[1]; | |
| const columns = constraintMatch[2].split(',').map((c) => c.trim().replace(/["']/g, '')); | |
| const refSchema = constraintMatch[3]; | |
| const refTable = constraintMatch[4]; | |
| const refColumns = constraintMatch[5] | |
| .split(',') | |
| .map((c) => c.trim().replace(/["']/g, '')); | |
| const referencedRelation = refSchema ? `${refSchema}.${refTable}` : refTable; | |
| relationships.push({ | |
| foreignKeyName: constraintName, | |
| columns: columns, | |
| isOneToOne: false, | |
| referencedRelation: referencedRelation, | |
| referencedColumns: refColumns | |
| }); | |
| continue; | |
| } | |
| // Skip other constraint definitions and SQL keywords | |
| if ( | |
| /^(primary\s+key|foreign\s+key|unique\s*\(|check\s*\(|case\s+when|when\s+|then\s+|else\s+|end\s*$|otherwise)/i.test( | |
| trimmed | |
| ) | |
| ) { | |
| continue; | |
| } | |
| const col = parseColumnDefinition(trimmed, tableName); | |
| if (col) { | |
| columns.push(col); | |
| // If column has inline foreign key, add to relationships | |
| if (col.foreignKey) { | |
| const fkeyName = `${tableName}_${col.name}_fkey`; | |
| const referencedRelation = col.foreignKey.schema | |
| ? `${col.foreignKey.schema}.${col.foreignKey.table}` | |
| : col.foreignKey.table; | |
| relationships.push({ | |
| foreignKeyName: fkeyName, | |
| columns: [col.name], | |
| isOneToOne: col.isUnique, | |
| referencedRelation: referencedRelation, | |
| referencedColumns: [col.foreignKey.column] | |
| }); | |
| } | |
| } | |
| } | |
| if (columns.length === 0) { | |
| return null; | |
| } | |
| return { | |
| schema: tableSchema, | |
| name: tableName, | |
| columns, | |
| relationships, | |
| indexes: [] // Will be populated later | |
| }; | |
| } | |
| /** | |
| * Split string by comma, respecting parentheses and quotes | |
| * @param str - Input string | |
| * @returns Array of split parts | |
| */ | |
| function splitByComma(str: string): string[] { | |
| const parts: string[] = []; | |
| let current = ''; | |
| let parenDepth = 0; | |
| let inString = false; | |
| let stringChar: string | null = null; | |
| for (let i = 0; i < str.length; i++) { | |
| const char = str[i]; | |
| const prevChar = i > 0 ? str[i - 1] : ''; | |
| // Handle string state | |
| if ((char === '"' || char === "'") && prevChar !== '\\') { | |
| if (!inString) { | |
| inString = true; | |
| stringChar = char; | |
| } else if (char === stringChar) { | |
| inString = false; | |
| stringChar = null; | |
| } | |
| } | |
| // Handle parentheses (only when not in string) | |
| if (!inString) { | |
| if (char === '(') { | |
| parenDepth++; | |
| } | |
| if (char === ')') { | |
| parenDepth--; | |
| } | |
| // Split on comma when we're at top level | |
| if (char === ',' && parenDepth === 0) { | |
| parts.push(current.trim()); | |
| current = ''; | |
| continue; | |
| } | |
| } | |
| current += char; | |
| } | |
| // Don't forget the last part | |
| if (current.trim()) { | |
| parts.push(current.trim()); | |
| } | |
| return parts; | |
| } | |
| /** | |
| * Parse a single column definition | |
| * @param colDef - Column definition string | |
| * @param tableName - Optional table name for context | |
| * @returns Parsed ColumnDefinition or null if parsing failed | |
| */ | |
| function parseColumnDefinition(colDef: string, tableName?: string): ColumnDefinition | null { | |
| const trimmed = colDef.trim(); | |
| // Extract column name - handle quoted names properly | |
| // Matches: "column_name" type ... or column_name type ... | |
| let nameMatch = trimmed.match(/^"([^"]+)"\s+(.+)$/); | |
| if (!nameMatch) { | |
| nameMatch = trimmed.match(/^'([^']+)'\s+(.+)$/); | |
| } | |
| if (!nameMatch) { | |
| nameMatch = trimmed.match(/^(\w+)\s+(.+)$/); | |
| } | |
| if (!nameMatch) { | |
| return null; | |
| } | |
| const colName = nameMatch[1]; | |
| const rest = nameMatch[2].trim(); | |
| // Extract type - this is tricky because types can be complex like: | |
| // - text | |
| // - uuid | |
| // - "loadType" (quoted custom type) | |
| // - timestamp with time zone | |
| // - character varying(255) | |
| // - numeric(10,2) | |
| // - text[] | |
| // - jsonb | |
| // Try to match various type patterns | |
| let colType = ''; | |
| let remainingConstraints = rest; | |
| // Pattern 0: Quoted type name (for custom types like enums) | |
| // Handle both simple quoted types and schema-qualified types like "schema"."type_name" | |
| const quotedTypeMatch = rest.match(/^(?:["']([^"']+)["']\.)?["']([^"']+)["']/); | |
| if (quotedTypeMatch) { | |
| // quotedTypeMatch[1] is the schema (if present) | |
| // quotedTypeMatch[2] is the type name | |
| // Use just the type name without schema prefix for enum matching | |
| colType = quotedTypeMatch[2] || quotedTypeMatch[1]; | |
| remainingConstraints = rest.substring(quotedTypeMatch[0].length).trim(); | |
| } | |
| // Pattern 1: Multi-word types (timestamp with time zone, double precision, etc.) | |
| else { | |
| const multiWordTypes = [ | |
| 'timestamp with time zone', | |
| 'timestamp without time zone', | |
| 'time with time zone', | |
| 'time without time zone', | |
| 'double precision', | |
| 'character varying' | |
| ]; | |
| for (const multiType of multiWordTypes) { | |
| if (rest.toLowerCase().startsWith(multiType)) { | |
| colType = multiType; | |
| remainingConstraints = rest.substring(multiType.length).trim(); | |
| // Check for size/precision like (255) or (10,2) | |
| const sizeMatch = remainingConstraints.match(/^\s*(\([^)]+\))/); | |
| if (sizeMatch) { | |
| colType += sizeMatch[1]; | |
| remainingConstraints = remainingConstraints | |
| .substring(sizeMatch[0].length) | |
| .trim(); | |
| } | |
| break; | |
| } | |
| } | |
| } | |
| // Pattern 2: Single word type (possibly with size/precision) | |
| if (!colType) { | |
| const typeMatch = rest.match(/^(\w+)(\s*\([^)]+\))?/); | |
| if (typeMatch) { | |
| colType = typeMatch[1] + (typeMatch[2] || ''); | |
| remainingConstraints = rest.substring(typeMatch[0].length).trim(); | |
| } else { | |
| // Couldn't parse type | |
| return null; | |
| } | |
| } | |
| // Check for array notation | |
| const isArray = | |
| colType.includes('[]') || | |
| remainingConstraints.toLowerCase().startsWith('[]') || | |
| /\barray\b/i.test(remainingConstraints); | |
| if (isArray) { | |
| colType = colType.replace('[]', '').trim(); | |
| remainingConstraints = remainingConstraints.replace(/^\s*\[\]/, '').trim(); | |
| } | |
| // Parse constraints from remaining string | |
| const constraintsLower = remainingConstraints.toLowerCase(); | |
| const notNull = /\bnot\s+null\b/.test(constraintsLower); | |
| const isPrimaryKey = /\bprimary\s+key\b/.test(constraintsLower); | |
| const isUnique = /\bunique\b/.test(constraintsLower) && !isPrimaryKey; | |
| // Extract default value (everything between DEFAULT and next constraint or end) | |
| let defaultValue: string | null = null; | |
| const defaultMatch = remainingConstraints.match( | |
| /\bdefault\s+(.+?)(?:\s+(?:not\s+null|primary\s+key|unique|references|check|constraint)|$)/i | |
| ); | |
| if (defaultMatch) { | |
| defaultValue = defaultMatch[1].trim(); | |
| } | |
| // Extract foreign key reference (inline) | |
| // Format: references table_name (column_name) | |
| let foreignKey: { table: string; column: string; schema?: string } | undefined; | |
| const referencesMatch = remainingConstraints.match( | |
| /\breferences\s+(?:["']?(\w+)["']?\.)?["']?(\w+)["']?\s*\(["']?(\w+)["']?\)/i | |
| ); | |
| if (referencesMatch) { | |
| foreignKey = { | |
| schema: referencesMatch[1], | |
| table: referencesMatch[2], | |
| column: referencesMatch[3] | |
| }; | |
| } | |
| return { | |
| name: colName, | |
| type: colType, | |
| nullable: !notNull && !isPrimaryKey, | |
| defaultValue, | |
| isArray, | |
| isPrimaryKey, | |
| isUnique, | |
| foreignKey | |
| }; | |
| } | |
| /** | |
| * ============================================================================ | |
| * SQL PARSING - ENUMS | |
| * ============================================================================ | |
| */ | |
| /** | |
| * @param sqlContent - SQL content to parse | |
| * @param schema - Default schema name | |
| * @returns Parsed enum definition or null if none found | |
| */ | |
| function parseEnumDefinition(sqlContent: string, schema: string = 'public'): EnumDefinition | null { | |
| // Match CREATE TYPE ... AS ENUM (with or without trailing semicolon) | |
| const enumMatch = sqlContent.match( | |
| /create\s+type\s+(?:["']?(\w+)["']?\.)?["']?(\w+)["']?\s+as\s+enum\s*\(([\s\S]*?)\)/i | |
| ); | |
| if (!enumMatch) { | |
| return null; | |
| } | |
| const enumSchema = enumMatch[1] || schema; | |
| const enumName = enumMatch[2]; | |
| const valuesStr = enumMatch[3]; | |
| // Extract enum values | |
| const values: string[] = []; | |
| const valueMatches = valuesStr.matchAll(/['"]([^'"]+)['"]/g); | |
| for (const match of valueMatches) { | |
| values.push(match[1]); | |
| } | |
| if (values.length === 0) { | |
| return null; | |
| } | |
| return { | |
| schema: enumSchema, | |
| name: enumName, | |
| values | |
| }; | |
| } | |
| /** | |
| * ============================================================================ | |
| * SQL PARSING - FUNCTIONS | |
| * ============================================================================ | |
| */ | |
| /** | |
| * @param sqlContent - SQL content to parse | |
| * @param schema - Default schema name | |
| * @returns Parsed function definition or null if none found | |
| */ | |
| function parseFunctionDefinition( | |
| sqlContent: string, | |
| schema: string = 'public' | |
| ): FunctionDefinition | null { | |
| // Match CREATE FUNCTION - capture return type but stop at SQL keywords | |
| // Handles: json, jsonb, text, uuid, timestamp with time zone, setof record, table(...), etc. | |
| const functionMatch = sqlContent.match( | |
| /create\s+(?:or\s+replace\s+)?function\s+(?:["']?(\w+)["']?\.)?["']?(\w+)["']?\s*\(([^)]*)\)\s+returns\s+(.*?)(?=\s+(?:language|as|security|stable|immutable|volatile|strict|leakproof|called|parallel|cost|rows|support|window|set|begin))/is | |
| ); | |
| if (!functionMatch) { | |
| return null; | |
| } | |
| const funcSchema = functionMatch[1] || schema; | |
| const funcName = functionMatch[2]; | |
| const argsStr = functionMatch[3]; | |
| // Strip quotes from return type (handles both "json" and json) | |
| const returnsType = functionMatch[4].trim().replace(/^["']|["']$/g, ''); | |
| // Parse arguments | |
| const args: Array<{ name: string; type: string; hasDefault?: boolean }> = []; | |
| if (argsStr && argsStr.trim().length > 0) { | |
| const argParts = argsStr.split(','); | |
| for (const argPart of argParts) { | |
| const trimmed = argPart.trim(); | |
| if (!trimmed) { | |
| continue; | |
| } | |
| // Match: arg_name arg_type [default value] | |
| // Handles both quoted and unquoted names/types, including types with parentheses | |
| // Examples: "role_name" "text", role_name text, amount numeric(10,2) | |
| let argMatch = trimmed.match(/^"(\w+)"\s+"([\w\s[\]().,]+)"(?:\s+default\s+.+)?$/i); | |
| if (!argMatch) { | |
| argMatch = trimmed.match(/^"(\w+)"\s+([\w\s[\]().,]+?)(?:\s+default\s+.+)?$/i); | |
| } | |
| if (!argMatch) { | |
| argMatch = trimmed.match(/^(\w+)\s+"([\w\s[\]().,]+)"(?:\s+default\s+.+)?$/i); | |
| } | |
| if (!argMatch) { | |
| argMatch = trimmed.match(/^(\w+)\s+([\w\s[\]().,]+?)(?:\s+default\s+.+)?$/i); | |
| } | |
| if (argMatch) { | |
| const hasDefault = /\bdefault\b/i.test(trimmed); | |
| args.push({ | |
| name: argMatch[1], | |
| type: argMatch[2].trim(), | |
| hasDefault | |
| }); | |
| } | |
| } | |
| } | |
| return { | |
| schema: funcSchema, | |
| name: funcName, | |
| args, | |
| returns: returnsType | |
| }; | |
| } | |
| /** | |
| * ============================================================================ | |
| * SQL PARSING - COMPOSITE TYPES | |
| * ============================================================================ | |
| */ | |
| /** | |
| * @param sqlContent - SQL content to parse | |
| * @param schema - Default schema name | |
| * @returns Parsed composite type definition or null if none found | |
| */ | |
| function parseCompositeType( | |
| sqlContent: string, | |
| schema: string = 'public' | |
| ): CompositeTypeDefinition | null { | |
| // Match CREATE TYPE ... AS (...) (with or without trailing semicolon) | |
| const typeMatch = sqlContent.match( | |
| /create\s+type\s+(?:["']?(\w+)["']?\.)?["']?(\w+)["']?\s+as\s*\(([\s\S]*?)\)/i | |
| ); | |
| if (!typeMatch) { | |
| return null; | |
| } | |
| const typeSchema = typeMatch[1] || schema; | |
| const typeName = typeMatch[2]; | |
| const attrsStr = typeMatch[3]; | |
| const attributes: Array<{ name: string; type: string }> = []; | |
| const lines = attrsStr.split(','); | |
| for (const line of lines) { | |
| const trimmed = line.trim(); | |
| const attrMatch = trimmed.match(/["']?(\w+)["']?\s+([\w\s[\]()]+)/i); | |
| if (attrMatch) { | |
| attributes.push({ | |
| name: attrMatch[1], | |
| type: attrMatch[2].trim() | |
| }); | |
| } | |
| } | |
| if (attributes.length === 0) { | |
| return null; | |
| } | |
| return { | |
| schema: typeSchema, | |
| name: typeName, | |
| attributes | |
| }; | |
| } | |
| /** | |
| * ============================================================================ | |
| * PARSE ALL SQL FILES | |
| * ============================================================================ | |
| */ | |
| /** | |
| * @param filePaths - Array of SQL file paths to parse | |
| * @param schema - Default schema name | |
| * @param includeComments - Whether to parse comments | |
| * @returns Parsed definitions from all files | |
| */ | |
| function parseSqlFiles( | |
| filePaths: string[], | |
| schema: string, | |
| includeComments: boolean = true | |
| ): { | |
| tables: TableDefinition[]; | |
| enums: EnumDefinition[]; | |
| functions: FunctionDefinition[]; | |
| compositeTypes: CompositeTypeDefinition[]; | |
| } { | |
| log('\n📊 Step 1: Parsing SQL schema files...', 'bright'); | |
| const tables: TableDefinition[] = []; | |
| const enums: EnumDefinition[] = []; | |
| const functions: FunctionDefinition[] = []; | |
| const compositeTypes: CompositeTypeDefinition[] = []; | |
| const indexesByTable: Map<string, IndexDefinition[]> = new Map(); | |
| const alterTableForeignKeys: Array<{ | |
| tableName: string; | |
| relationship: RelationshipDefinition; | |
| }> = []; | |
| const alterTableUniques: Array<{ tableName: string; columns: string[] }> = []; | |
| for (const filePath of filePaths) { | |
| try { | |
| const content = readFileSync(filePath, 'utf8'); | |
| const fileName = filePath.split(/[/\\]/).pop() || filePath; | |
| // Debug: Show file info | |
| log(`\n Processing: ${fileName}`, 'cyan'); | |
| log(` Size: ${content.length} bytes`, 'cyan'); | |
| // Split by statement (semicolon followed by newline or end of string) | |
| const statements = content.split(/;(?:\s*\n|$)/); | |
| log(` Statements: ${statements.length}`, 'cyan'); | |
| let fileTableCount = 0; | |
| let fileEnumCount = 0; | |
| let fileFunctionCount = 0; | |
| let fileCompositeCount = 0; | |
| let fileRelationshipCount = 0; | |
| let fileIndexCount = 0; | |
| let fileAlterFKCount = 0; | |
| let fileAlterUniqueCount = 0; | |
| let fileTableCommentCount = 0; | |
| let fileColumnCommentCount = 0; | |
| // Storage for comments to attach later (only if enabled) | |
| const tableComments = includeComments ? new Map<string, string>() : null; | |
| const columnComments = includeComments ? new Map<string, Map<string, string>>() : null; | |
| for (const statement of statements) { | |
| const trimmed = statement.trim(); | |
| if (!trimmed) { | |
| continue; | |
| } | |
| // Try parsing as table | |
| const table = parseTableDefinition(trimmed, schema); | |
| if (table) { | |
| tables.push(table); | |
| fileTableCount++; | |
| fileRelationshipCount += table.relationships.length; | |
| log( | |
| ` ✓ Table: ${table.name} (${table.columns.length} columns, ${table.relationships.length} relationships)`, | |
| 'green' | |
| ); | |
| continue; | |
| } | |
| // Try parsing as enum | |
| const enumDef = parseEnumDefinition(trimmed, schema); | |
| if (enumDef) { | |
| enums.push(enumDef); | |
| fileEnumCount++; | |
| log(` ✓ Enum: ${enumDef.name} (${enumDef.values.length} values)`, 'green'); | |
| continue; | |
| } | |
| // Try parsing as function | |
| const funcDef = parseFunctionDefinition(trimmed, schema); | |
| if (funcDef) { | |
| functions.push(funcDef); | |
| fileFunctionCount++; | |
| log(` ✓ Function: ${funcDef.name} (${funcDef.args.length} args)`, 'green'); | |
| continue; | |
| } | |
| // Try parsing as composite type | |
| const compositeDef = parseCompositeType(trimmed, schema); | |
| if (compositeDef) { | |
| compositeTypes.push(compositeDef); | |
| fileCompositeCount++; | |
| log(` ✓ Composite: ${compositeDef.name}`, 'green'); | |
| continue; | |
| } | |
| // Try parsing as index | |
| const indexDef = parseIndexDefinition(trimmed, schema); | |
| if (indexDef) { | |
| if (!indexesByTable.has(indexDef.tableName)) { | |
| indexesByTable.set(indexDef.tableName, []); | |
| } | |
| indexesByTable.get(indexDef.tableName)!.push(indexDef); | |
| fileIndexCount++; | |
| const uniqueStr = indexDef.isUnique ? ' (unique)' : ''; | |
| const methodStr = indexDef.method ? ` using ${indexDef.method}` : ''; | |
| log( | |
| ` ✓ Index: ${indexDef.name}${uniqueStr} on ${indexDef.tableName}${methodStr}`, | |
| 'green' | |
| ); | |
| continue; | |
| } | |
| // Try parsing as ALTER TABLE ADD CONSTRAINT FOREIGN KEY | |
| const alterFK = parseAlterTableForeignKey(trimmed, schema); | |
| if (alterFK) { | |
| alterTableForeignKeys.push(alterFK); | |
| fileAlterFKCount++; | |
| log( | |
| ` ✓ Alter FK: ${alterFK.relationship.foreignKeyName} on ${alterFK.tableName}`, | |
| 'green' | |
| ); | |
| continue; | |
| } | |
| // Try parsing as ALTER TABLE ADD UNIQUE | |
| const alterUnique = parseAlterTableUnique(trimmed, schema); | |
| if (alterUnique) { | |
| alterTableUniques.push(alterUnique); | |
| fileAlterUniqueCount++; | |
| log( | |
| ` ✓ Alter UNIQUE on ${alterUnique.tableName} (${alterUnique.columns.join(', ')})`, | |
| 'green' | |
| ); | |
| continue; | |
| } | |
| // Try parsing as table comment (only if enabled) | |
| if (includeComments) { | |
| const tableComment = parseTableComment(trimmed, schema); | |
| if (tableComment) { | |
| tableComments!.set(tableComment.tableName, tableComment.comment); | |
| fileTableCommentCount++; | |
| log(` ✓ Table comment: ${tableComment.tableName}`, 'green'); | |
| continue; | |
| } | |
| // Try parsing as column comment | |
| const columnComment = parseColumnComment(trimmed, schema); | |
| if (columnComment) { | |
| if (!columnComments!.has(columnComment.tableName)) { | |
| columnComments!.set(columnComment.tableName, new Map()); | |
| } | |
| columnComments! | |
| .get(columnComment.tableName)! | |
| .set(columnComment.columnName, columnComment.comment); | |
| fileColumnCommentCount++; | |
| log( | |
| ` ✓ Column comment: ${columnComment.tableName}.${columnComment.columnName}`, | |
| 'green' | |
| ); | |
| continue; | |
| } | |
| } | |
| } | |
| if ( | |
| fileTableCount === 0 && | |
| fileEnumCount === 0 && | |
| fileFunctionCount === 0 && | |
| fileCompositeCount === 0 && | |
| fileIndexCount === 0 && | |
| fileAlterFKCount === 0 && | |
| fileAlterUniqueCount === 0 && | |
| fileTableCommentCount === 0 && | |
| fileColumnCommentCount === 0 | |
| ) { | |
| log(` ⚠️ No objects found in this file`, 'yellow'); | |
| } else { | |
| const summary: string[] = []; | |
| if (fileIndexCount > 0) { | |
| summary.push(`${fileIndexCount} index(es)`); | |
| } | |
| if (fileFunctionCount > 0) { | |
| summary.push(`${fileFunctionCount} function(s)`); | |
| } | |
| if (fileAlterFKCount > 0) { | |
| summary.push(`${fileAlterFKCount} ALTER FK(s)`); | |
| } | |
| if (fileAlterUniqueCount > 0) { | |
| summary.push(`${fileAlterUniqueCount} ALTER UNIQUE(s)`); | |
| } | |
| if (fileTableCommentCount > 0) { | |
| summary.push(`${fileTableCommentCount} table comment(s)`); | |
| } | |
| if (fileColumnCommentCount > 0) { | |
| summary.push(`${fileColumnCommentCount} column comment(s)`); | |
| } | |
| if (summary.length > 0) { | |
| log(` Found: ${summary.join(', ')}`, 'cyan'); | |
| } | |
| } | |
| // Attach comments to tables and columns (only if enabled) | |
| if (includeComments && tableComments && columnComments) { | |
| for (const table of tables) { | |
| // Attach table comment | |
| const tableComment = tableComments.get(table.name); | |
| if (tableComment) { | |
| table.comment = tableComment; | |
| } | |
| // Attach column comments | |
| const colComments = columnComments.get(table.name); | |
| if (colComments) { | |
| for (const column of table.columns) { | |
| const comment = colComments.get(column.name); | |
| if (comment) { | |
| column.comment = comment; | |
| } | |
| } | |
| } | |
| } | |
| } | |
| } catch (error) { | |
| const errorMsg = error instanceof Error ? error.message : String(error); | |
| log(` ✗ Error parsing ${filePath}: ${errorMsg}`, 'red'); | |
| } | |
| } | |
| // Attach indexes to their tables | |
| for (const table of tables) { | |
| const tableIndexes = indexesByTable.get(table.name) || []; | |
| table.indexes = tableIndexes; | |
| } | |
| // Apply ALTER TABLE UNIQUE constraints to columns | |
| for (const alterUnique of alterTableUniques) { | |
| const table = tables.find((t) => t.name === alterUnique.tableName); | |
| if (table) { | |
| for (const colName of alterUnique.columns) { | |
| const column = table.columns.find((c) => c.name === colName); | |
| if (column) { | |
| column.isUnique = true; | |
| } | |
| } | |
| } | |
| } | |
| // Attach ALTER TABLE foreign keys to their tables | |
| for (const alterFK of alterTableForeignKeys) { | |
| const table = tables.find((t) => t.name === alterFK.tableName); | |
| if (table) { | |
| const fkColumns = alterFK.relationship.columns; | |
| let isOneToOne = false; | |
| // For single-column foreign keys | |
| if (fkColumns.length === 1) { | |
| const colName = fkColumns[0]; | |
| const column = table.columns.find((c) => c.name === colName); | |
| const columnHasUnique = column?.isUnique || column?.isPrimaryKey; | |
| // Check if there's a unique index on this single column | |
| const hasUniqueIndex = table.indexes.some( | |
| (idx) => idx.isUnique && idx.columns.length === 1 && idx.columns[0] === colName | |
| ); | |
| isOneToOne = columnHasUnique || hasUniqueIndex; | |
| } | |
| // For multi-column foreign keys | |
| else if (fkColumns.length > 1) { | |
| // Check if there's a composite unique constraint/index that exactly matches the FK columns | |
| const hasMatchingCompositeUnique = table.indexes.some((idx) => { | |
| if (!idx.isUnique || idx.columns.length !== fkColumns.length) { | |
| return false; | |
| } | |
| // Check if the index columns exactly match the FK columns (order doesn't matter) | |
| const indexColsSet = new Set(idx.columns); | |
| const fkColsSet = new Set(fkColumns); | |
| if (indexColsSet.size !== fkColsSet.size) { | |
| return false; | |
| } | |
| for (const col of fkColsSet) { | |
| if (!indexColsSet.has(col)) { | |
| return false; | |
| } | |
| } | |
| return true; | |
| }); | |
| isOneToOne = hasMatchingCompositeUnique; | |
| } | |
| // Update isOneToOne if we detected a unique constraint or index | |
| if (isOneToOne) { | |
| alterFK.relationship.isOneToOne = true; | |
| if (VERBOSE_LOGGING) { | |
| log( | |
| ` ✓ Detected one-to-one relationship: ${alterFK.tableName}.${fkColumns.join(', ')} -> ${alterFK.relationship.referencedRelation}`, | |
| 'cyan' | |
| ); | |
| } | |
| } | |
| table.relationships.push(alterFK.relationship); | |
| } else { | |
| log( | |
| ` ⚠️ Warning: ALTER TABLE FK references non-existent table: ${alterFK.tableName}`, | |
| 'yellow' | |
| ); | |
| } | |
| } | |
| const totalRelationships = tables.reduce((sum, t) => sum + t.relationships.length, 0); | |
| if (totalRelationships > 0) { | |
| log(` Relationships: ${totalRelationships}`, 'green'); | |
| if (alterTableForeignKeys.length > 0) { | |
| log(` (including ${alterTableForeignKeys.length} from ALTER TABLE)`, 'cyan'); | |
| } | |
| } | |
| const tablesWithComments = tables.filter((t) => t.comment).length; | |
| const totalColumnComments = tables.reduce( | |
| (sum, t) => sum + t.columns.filter((c) => c.comment).length, | |
| 0 | |
| ); | |
| if (tablesWithComments > 0 || totalColumnComments > 0) { | |
| log( | |
| ` Comments: ${tablesWithComments} table(s), ${totalColumnComments} column(s)`, | |
| 'green' | |
| ); | |
| } | |
| const totalIndexes = tables.reduce((sum, t) => sum + t.indexes.length, 0); | |
| if (totalIndexes > 0) { | |
| log(` Indexes: ${totalIndexes}`, 'green'); | |
| } | |
| if (enums.length > 0) { | |
| log(` Enums: ${enums.length}`, 'green'); | |
| } | |
| if (functions.length > 0) { | |
| log(` Functions: ${functions.length}`, 'green'); | |
| } | |
| if (compositeTypes.length > 0) { | |
| log(` Composite types: ${compositeTypes.length}`, 'green'); | |
| } | |
| return { tables, enums, functions, compositeTypes }; | |
| } | |
| /** | |
| * ============================================================================ | |
| * GEOMETRIC TYPE DETECTION | |
| * ============================================================================ | |
| */ | |
| /** | |
| * @param tables - Array of table definitions | |
| * @returns Set of detected geometric types used in the tables | |
| */ | |
| function detectGeometricTypes(tables: TableDefinition[]): Set<string> { | |
| const geometricTypes = new Set<string>([ | |
| 'point', | |
| 'line', | |
| 'lseg', | |
| 'box', | |
| 'path', | |
| 'polygon', | |
| 'circle' | |
| ]); | |
| const usedTypes = new Set<string>(); | |
| for (const table of tables) { | |
| for (const column of table.columns) { | |
| const baseType = column.type | |
| .toLowerCase() | |
| .replace(/\([^)]*\)/, '') | |
| .trim(); | |
| if (geometricTypes.has(baseType)) { | |
| usedTypes.add(baseType); | |
| } | |
| } | |
| } | |
| return usedTypes; | |
| } | |
| /** | |
| * ============================================================================ | |
| * TYPE MAPPING | |
| * ============================================================================ | |
| */ | |
| /** | |
| * @param pgType - PostgreSQL data type | |
| * @param isArray - Whether the type is an array | |
| * @param schema - Schema name for enums | |
| * @param availableEnums - Set of available enum type names | |
| * @param useGeometricTypes - Whether to use geometric types | |
| * @returns Mapped TypeScript type as string | |
| */ | |
| function mapPostgresTypeToTypeScript( | |
| pgType: string, | |
| isArray: boolean, | |
| schema: string = 'public', | |
| availableEnums: Set<string> = new Set(), | |
| useGeometricTypes: boolean = false | |
| ): string { | |
| // Remove any size/precision for built-in type matching | |
| const baseTypeName = pgType | |
| .toLowerCase() | |
| .replace(/\([^)]*\)/, '') | |
| .trim(); | |
| const typeMap: Record<string, string> = { | |
| smallint: 'number', | |
| integer: 'number', | |
| bigint: 'number', | |
| int2: 'number', | |
| int4: 'number', | |
| int8: 'number', | |
| decimal: 'number', | |
| numeric: 'number', | |
| real: 'number', | |
| 'double precision': 'number', | |
| float4: 'number', | |
| float8: 'number', | |
| money: 'number', | |
| smallserial: 'number', | |
| serial: 'number', | |
| bigserial: 'number', | |
| 'character varying': 'string', | |
| varchar: 'string', | |
| character: 'string', | |
| char: 'string', | |
| text: 'string', | |
| citext: 'string', | |
| boolean: 'boolean', | |
| bool: 'boolean', | |
| timestamp: 'string', | |
| 'timestamp without time zone': 'string', | |
| 'timestamp with time zone': 'string', | |
| timestamptz: 'string', | |
| date: 'string', | |
| time: 'string', | |
| 'time without time zone': 'string', | |
| 'time with time zone': 'string', | |
| timetz: 'string', | |
| interval: 'string', | |
| json: 'Json', | |
| jsonb: 'Json', | |
| uuid: 'string', | |
| bytea: 'string', | |
| inet: 'string', | |
| cidr: 'string', | |
| macaddr: 'string', | |
| macaddr8: 'string', | |
| void: 'void', | |
| trigger: 'unknown', | |
| event_trigger: 'unknown', | |
| record: 'unknown' | |
| }; | |
| // Geometric types - use exported types if enabled, otherwise string | |
| const geometricTypeMap: Record<string, string> = { | |
| point: useGeometricTypes ? 'Point' : 'string', | |
| line: useGeometricTypes ? 'Line' : 'string', | |
| lseg: useGeometricTypes ? 'LineSegment' : 'string', | |
| box: useGeometricTypes ? 'Box' : 'string', | |
| path: useGeometricTypes ? 'Path' : 'string', | |
| polygon: useGeometricTypes ? 'Polygon' : 'string', | |
| circle: useGeometricTypes ? 'Circle' : 'string' | |
| }; | |
| let baseType: string; | |
| // Check if it's a built-in type (case-insensitive) | |
| if (typeMap[baseTypeName]) { | |
| baseType = typeMap[baseTypeName]; | |
| } | |
| // Check if it's a geometric type | |
| else if (geometricTypeMap[baseTypeName]) { | |
| baseType = geometricTypeMap[baseTypeName]; | |
| } | |
| // Check if it's a custom enum type (case-sensitive - preserve original casing) | |
| else if (availableEnums.has(pgType)) { | |
| // Use the original casing from the column definition | |
| baseType = `Database["${schema}"]["Enums"]["${pgType}"]`; | |
| } | |
| // Unknown type | |
| else { | |
| baseType = 'unknown'; | |
| } | |
| return isArray ? `${baseType}[]` : baseType; | |
| } | |
| /** | |
| * ============================================================================ | |
| * NAMING CONVENTION | |
| * ============================================================================ | |
| */ | |
| /** | |
| * @param str - Input string | |
| * @param convention - Naming convention to apply | |
| * @returns Converted string | |
| */ | |
| function convertCase(str: string, convention: NamingConvention): string { | |
| if (convention === 'preserve') { | |
| return str; | |
| } | |
| const words = str | |
| .replace(/([a-z])([A-Z])/g, '$1_$2') | |
| .toLowerCase() | |
| .split('_') | |
| .filter((w) => w.length > 0); | |
| switch (convention) { | |
| case 'PascalCase': | |
| return words.map((w) => w.charAt(0).toUpperCase() + w.slice(1).toLowerCase()).join(''); | |
| case 'camelCase': | |
| return words | |
| .map((w, i) => | |
| i === 0 ? w.toLowerCase() : w.charAt(0).toUpperCase() + w.slice(1).toLowerCase() | |
| ) | |
| .join(''); | |
| case 'snake_case': | |
| return words.map((w) => w.toLowerCase()).join('_'); | |
| case 'SCREAMING_SNAKE_CASE': | |
| return words.map((w) => w.toUpperCase()).join('_'); | |
| default: | |
| return str; | |
| } | |
| } | |
| /** | |
| * ============================================================================ | |
| * JSONB PARSING (from previous implementation) | |
| * ============================================================================ | |
| */ | |
| /** | |
| * @param sqlContent - SQL content to parse | |
| * @param fileName - Name of the file containing the SQL content | |
| * @returns Array of parsed JsonbColumn objects | |
| */ | |
| function parseJsonbColumns(sqlContent: string, fileName: string): JsonbColumn[] { | |
| const jsonbColumns: JsonbColumn[] = []; | |
| const createTableMatch = sqlContent.match( | |
| /create table (?:if not exists )?["']?(\w+)["']?\s*\(([\s\S]*?)\);/i | |
| ); | |
| if (!createTableMatch) { | |
| return jsonbColumns; | |
| } | |
| const tableName = createTableMatch[1]; | |
| const tableBody = createTableMatch[2]; | |
| const columnPattern = /(\w+)\s+jsonb(?:\s+not null)?\s+default\s+/gi; | |
| let match: RegExpExecArray | null; | |
| while ((match = columnPattern.exec(tableBody)) !== null) { | |
| const columnName = match[1]; | |
| const startPos = match.index + match[0].length; | |
| let defaultValue = ''; | |
| if (tableBody[startPos] === "'") { | |
| const endQuote = tableBody.indexOf("'", startPos + 1); | |
| if (endQuote !== -1) { | |
| defaultValue = tableBody.substring(startPos, endQuote + 1); | |
| if (tableBody.substring(endQuote + 1, endQuote + 8) === '::jsonb') { | |
| defaultValue += '::jsonb'; | |
| } | |
| } | |
| } else if ( | |
| tableBody.substring(startPos, startPos + 18).toLowerCase() === 'jsonb_build_object' | |
| ) { | |
| let depth = 0; | |
| let inString = false; | |
| let stringChar: string | null = null; | |
| let i = startPos; | |
| while (i < tableBody.length) { | |
| const char = tableBody[i]; | |
| if ((char === "'" || char === '"') && (i === 0 || tableBody[i - 1] !== '\\')) { | |
| if (!inString) { | |
| inString = true; | |
| stringChar = char; | |
| } else if (char === stringChar) { | |
| inString = false; | |
| stringChar = null; | |
| } | |
| } | |
| if (!inString) { | |
| if (char === '(') { | |
| depth++; | |
| } | |
| if (char === ')') { | |
| depth--; | |
| if (depth === 0) { | |
| defaultValue = tableBody.substring(startPos, i + 1); | |
| break; | |
| } | |
| } | |
| } | |
| i++; | |
| } | |
| } | |
| if (defaultValue) { | |
| jsonbColumns.push({ | |
| table: tableName, | |
| column: columnName, | |
| defaultValue: defaultValue, | |
| fileName: fileName | |
| }); | |
| } | |
| } | |
| return jsonbColumns; | |
| } | |
| /** | |
| * @param sqlValue - SQL value string to parse | |
| * @returns Parsed object or null if parsing failed | |
| */ | |
| function parseJsonbBuildObject(sqlValue: string): Record<string, unknown> | null { | |
| try { | |
| const content = sqlValue.replace(/^jsonb_build_object\s*\(\s*/i, '').replace(/\s*\)$/, ''); | |
| const parts: string[] = []; | |
| let current = ''; | |
| let depth = 0; | |
| let inString = false; | |
| let stringChar: string | null = null; | |
| for (let i = 0; i < content.length; i++) { | |
| const char = content[i]; | |
| if ((char === "'" || char === '"') && (i === 0 || content[i - 1] !== '\\')) { | |
| if (!inString) { | |
| inString = true; | |
| stringChar = char; | |
| } else if (char === stringChar) { | |
| inString = false; | |
| stringChar = null; | |
| } | |
| } | |
| if (!inString) { | |
| if (char === '(') { | |
| depth++; | |
| } | |
| if (char === ')') { | |
| depth--; | |
| } | |
| if (char === ',' && depth === 0) { | |
| parts.push(current.trim()); | |
| current = ''; | |
| continue; | |
| } | |
| } | |
| current += char; | |
| } | |
| if (current.trim()) { | |
| parts.push(current.trim()); | |
| } | |
| const result: Record<string, unknown> = {}; | |
| for (let i = 0; i < parts.length; i += 2) { | |
| if (i + 1 >= parts.length) { | |
| break; | |
| } | |
| const key = parts[i].replace(/^['"]|['"]$/g, ''); | |
| let value = parts[i + 1].trim(); | |
| if (value.startsWith('jsonb_build_object(')) { | |
| result[key] = parseJsonbBuildObject(value); | |
| } else { | |
| value = value.replace(/^['"]|['"]$/g, ''); | |
| if (value === 'true' || value === 'false') { | |
| result[key] = value === 'true'; | |
| } else if (!isNaN(Number(value)) && value !== '') { | |
| result[key] = Number(value); | |
| } else { | |
| result[key] = value; | |
| } | |
| } | |
| } | |
| return result; | |
| } catch { | |
| return null; | |
| } | |
| } | |
| /** | |
| * @param table - Table name | |
| * @param column - Column name | |
| * @param convention - Naming convention | |
| * @returns Generated type name | |
| */ | |
| function generateTypeName(table: string, column: string, convention: NamingConvention): string { | |
| const combined = `${table}_${column}`; | |
| return convertCase(combined, convention); | |
| } | |
| /** | |
| * @param value - The value to infer the type from | |
| * @param indent - Current indentation level | |
| * @returns Inferred TypeScript type as string | |
| */ | |
| function inferTypeFromValue(value: unknown, indent = 0): string { | |
| const indentStr = ' '.repeat(indent); | |
| if (value === null || value === undefined) { | |
| return 'unknown'; | |
| } | |
| if (Array.isArray(value)) { | |
| if (value.length === 0) { | |
| return 'unknown[]'; | |
| } | |
| return `${inferTypeFromValue(value[0], indent)}[]`; | |
| } | |
| if (typeof value === 'object') { | |
| const entries = Object.entries(value).map( | |
| ([key, val]) => `${indentStr} ${key}: ${inferTypeFromValue(val, indent + 1)}` | |
| ); | |
| return `{\n${entries.join('\n')}\n${indentStr}}`; | |
| } | |
| if (typeof value === 'string') { | |
| return 'string'; | |
| } | |
| if (typeof value === 'number') { | |
| return 'number'; | |
| } | |
| if (typeof value === 'boolean') { | |
| return 'boolean'; | |
| } | |
| return 'unknown'; | |
| } | |
| /** | |
| * @param value - The value to extract nested types from | |
| * @param baseName - Base name for nested types | |
| * @param indent - Current indentation level | |
| * @param convention - Naming convention | |
| * @returns Object containing type definition and array of nested types | |
| */ | |
| function extractNestedTypes( | |
| value: unknown, | |
| baseName: string, | |
| indent = 0, | |
| convention: NamingConvention = 'preserve' | |
| ): { typeDefinition: string; nestedTypes: TypeDefinition[] } { | |
| const nestedTypes: TypeDefinition[] = []; | |
| if (value === null || value === undefined) { | |
| return { typeDefinition: 'unknown', nestedTypes: [] }; | |
| } | |
| if (Array.isArray(value)) { | |
| if (value.length === 0) { | |
| return { typeDefinition: 'unknown[]', nestedTypes: [] }; | |
| } | |
| const result = extractNestedTypes(value[0], `${baseName}_item`, indent, convention); | |
| return { | |
| typeDefinition: `${result.typeDefinition}[]`, | |
| nestedTypes: result.nestedTypes | |
| }; | |
| } | |
| if (typeof value === 'object') { | |
| const indentStr = ' '.repeat(indent); | |
| const entries: string[] = []; | |
| for (const [key, val] of Object.entries(value)) { | |
| if (val !== null && typeof val === 'object' && !Array.isArray(val)) { | |
| const nestedTypeName = convertCase(`${baseName}_${key}`, convention); | |
| const nestedResult = extractNestedTypes( | |
| val, | |
| `${baseName}_${key}`, | |
| indent + 1, | |
| convention | |
| ); | |
| nestedTypes.push({ | |
| table: '', | |
| column: '', | |
| name: nestedTypeName, | |
| typeDefinition: nestedResult.typeDefinition, | |
| nestedTypes: nestedResult.nestedTypes | |
| }); | |
| entries.push(`${indentStr} ${key}: ${nestedTypeName}`); | |
| } else { | |
| const result = extractNestedTypes( | |
| val, | |
| `${baseName}_${key}`, | |
| indent + 1, | |
| convention | |
| ); | |
| entries.push(`${indentStr} ${key}: ${result.typeDefinition}`); | |
| nestedTypes.push(...result.nestedTypes); | |
| } | |
| } | |
| return { | |
| typeDefinition: `{\n${entries.join('\n')}\n${indentStr}}`, | |
| nestedTypes | |
| }; | |
| } | |
| if (typeof value === 'string') { | |
| return { typeDefinition: 'string', nestedTypes: [] }; | |
| } | |
| if (typeof value === 'number') { | |
| return { typeDefinition: 'number', nestedTypes: [] }; | |
| } | |
| if (typeof value === 'boolean') { | |
| return { typeDefinition: 'boolean', nestedTypes: [] }; | |
| } | |
| return { typeDefinition: 'unknown', nestedTypes: [] }; | |
| } | |
| /** | |
| * @param column - JsonbColumn to generate type definition for | |
| * @param extractNested - Whether to extract nested types | |
| * @param convention - Naming convention | |
| * @returns Generated TypeDefinition | |
| */ | |
| function generateTypeDefinition( | |
| column: JsonbColumn, | |
| extractNested: boolean, | |
| convention: NamingConvention | |
| ): TypeDefinition { | |
| const typeName = generateTypeName(column.table, column.column, convention); | |
| if (!column.defaultValue) { | |
| return { | |
| table: column.table, | |
| column: column.column, | |
| name: typeName, | |
| typeDefinition: 'Record<string, unknown>', | |
| comment: column.comment, | |
| nestedTypes: [] | |
| }; | |
| } | |
| let jsonStructure: Record<string, unknown> | null = null; | |
| if (column.defaultValue.startsWith('jsonb_build_object')) { | |
| jsonStructure = parseJsonbBuildObject(column.defaultValue); | |
| } else if (column.defaultValue.match(/^'.*'::jsonb$/)) { | |
| try { | |
| jsonStructure = JSON.parse(column.defaultValue.replace(/^'|'::jsonb$/g, '')); | |
| } catch { | |
| // Failed to parse | |
| } | |
| } | |
| if (!jsonStructure) { | |
| return { | |
| table: column.table, | |
| column: column.column, | |
| name: typeName, | |
| typeDefinition: 'Record<string, unknown>', | |
| comment: column.comment, | |
| nestedTypes: [] | |
| }; | |
| } | |
| if (extractNested) { | |
| const result = extractNestedTypes( | |
| jsonStructure, | |
| `${column.table}_${column.column}`, | |
| 0, | |
| convention | |
| ); | |
| return { | |
| table: column.table, | |
| column: column.column, | |
| name: typeName, | |
| typeDefinition: result.typeDefinition, | |
| comment: column.comment, | |
| example: jsonStructure, | |
| nestedTypes: result.nestedTypes | |
| }; | |
| } | |
| return { | |
| table: column.table, | |
| column: column.column, | |
| name: typeName, | |
| typeDefinition: inferTypeFromValue(jsonStructure), | |
| comment: column.comment, | |
| example: jsonStructure, | |
| nestedTypes: [] | |
| }; | |
| } | |
| /** | |
| * @param config - Generator configuration | |
| * @returns Array of generated TypeDefinition objects | |
| */ | |
| function scanSchemas(config: GeneratorConfig): TypeDefinition[] { | |
| log('\n🔍 Step 2: Scanning SQL schemas for JSONB columns...', 'bright'); | |
| log(` Scanning ${config.schemaPaths.length} schema file(s)`, 'cyan'); | |
| const allColumns: JsonbColumn[] = []; | |
| for (const schemaPath of config.schemaPaths) { | |
| try { | |
| const content = readFileSync(schemaPath, 'utf8'); | |
| const fileName = schemaPath.split('/').pop() || schemaPath; | |
| const columns = parseJsonbColumns(content, fileName); | |
| if (columns.length > 0) { | |
| log(` ✓ ${fileName}: ${columns.length} JSONB column(s)`, 'green'); | |
| allColumns.push(...columns); | |
| } | |
| } catch { | |
| log(` Warning: Could not read ${schemaPath}`, 'yellow'); | |
| } | |
| } | |
| if (allColumns.length === 0) { | |
| log(' ⚠️ No JSONB columns found', 'yellow'); | |
| return []; | |
| } | |
| log(` 📊 Total JSONB columns found: ${allColumns.length}`, 'green'); | |
| return allColumns.map((col) => | |
| generateTypeDefinition(col, config.extractNestedTypes, config.namingConvention) | |
| ); | |
| } | |
| /** | |
| * ============================================================================ | |
| * TYPE DEDUPLICATION | |
| * ============================================================================ | |
| */ | |
| /** | |
| * @param typeDefinition - Type definition string | |
| * @returns Normalized type definition string | |
| */ | |
| function normalizeTypeDefinition(typeDefinition: string): string { | |
| return typeDefinition | |
| .replace(/\s+/g, ' ') | |
| .replace(/\s*([{}:,])\s*/g, '$1') | |
| .trim(); | |
| } | |
| /** | |
| * @param types - Array of TypeDefinition objects | |
| * @returns Deduplicated array of TypeDefinition objects | |
| */ | |
| function flattenTypes(types: TypeDefinition[]): TypeDefinition[] { | |
| const result: TypeDefinition[] = []; | |
| for (const type of types) { | |
| if (type.nestedTypes && type.nestedTypes.length > 0) { | |
| result.push(...flattenTypes(type.nestedTypes)); | |
| } | |
| result.push(type); | |
| } | |
| return result; | |
| } | |
| /** | |
| * ============================================================================ | |
| * TYPE GENERATION | |
| * ============================================================================ | |
| */ | |
| /** | |
| * @param indexes - Array of index definitions | |
| * @param convention - Naming convention | |
| * @param indentSize - Indentation size | |
| * @returns Generated index metadata string | |
| */ | |
| function generateIndexMetadata( | |
| indexes: IndexDefinition[], | |
| convention: NamingConvention, | |
| indentSize: number = 2 | |
| ): string { | |
| if (indexes.length === 0) { | |
| return '[]'; | |
| } | |
| const indent = ' '.repeat(indentSize); | |
| const indexDefs = indexes.map((idx) => { | |
| const columns = idx.columns.map((c) => `"${convertCase(c, convention)}"`).join(', '); | |
| const methodStr = idx.method ? `\n${indent.repeat(6)}method: "${idx.method}"` : ''; | |
| const whereStr = idx.whereClause | |
| ? `\n${indent.repeat(6)}where: "${idx.whereClause.replace(/"/g, '\\"')}"` | |
| : ''; | |
| return `${indent.repeat(5)}{ | |
| ${indent.repeat(6)}name: "${idx.name}" | |
| ${indent.repeat(6)}columns: [${columns}] | |
| ${indent.repeat(6)}isUnique: ${idx.isUnique}${methodStr}${whereStr} | |
| ${indent.repeat(5)}}`; | |
| }); | |
| return `[\n${indexDefs.join(',\n')}\n${indent.repeat(4)}]`; | |
| } | |
| /** | |
| * @param table - Table definition | |
| * @param convention - Naming convention | |
| * @param indentSize - Indentation size | |
| * @param includeIndexes - Whether to include index metadata | |
| * @param includeComments - Whether to include comments | |
| * @param availableEnums - Set of available enum type names | |
| * @param schema - Default schema name | |
| * @param useGeometricTypes - Whether to use geometric types | |
| * @returns Generated table type string | |
| */ | |
| function generateTableType( | |
| table: TableDefinition, | |
| convention: NamingConvention, | |
| indentSize: number = 2, | |
| includeIndexes: boolean = false, | |
| includeComments: boolean = true, | |
| availableEnums: Set<string> = new Set(), | |
| schema: string = 'public', | |
| useGeometricTypes: boolean = false | |
| ): string { | |
| const tableName = convertCase(table.name, convention); | |
| const indent = ' '.repeat(indentSize); | |
| // Filter out 'this' and 'constraint' columns | |
| const filteredColumns = table.columns.filter((col) => { | |
| const colNameLower = col.name.toLowerCase(); | |
| return colNameLower !== 'this' && colNameLower !== 'constraint'; | |
| }); | |
| const rowColumns = filteredColumns.map((col) => { | |
| const colName = convertCase(col.name, convention); | |
| const tsType = mapPostgresTypeToTypeScript( | |
| col.type, | |
| col.isArray, | |
| schema, | |
| availableEnums, | |
| useGeometricTypes | |
| ); | |
| const nullable = col.nullable ? ' | null' : ''; | |
| // Add JSDoc comment if available and enabled | |
| const commentLine = | |
| includeComments && col.comment ? `${indent.repeat(5)}/** ${col.comment} */\n` : ''; | |
| return `${commentLine}${indent.repeat(5)}${colName}: ${tsType}${nullable}`; | |
| }); | |
| const insertColumns = filteredColumns.map((col) => { | |
| const colName = convertCase(col.name, convention); | |
| const tsType = mapPostgresTypeToTypeScript( | |
| col.type, | |
| col.isArray, | |
| schema, | |
| availableEnums, | |
| useGeometricTypes | |
| ); | |
| const hasDefault = col.defaultValue !== null; | |
| const isOptional = hasDefault || col.nullable; | |
| const optionalMark = isOptional ? '?' : ''; | |
| const nullable = col.nullable ? ' | null' : ''; | |
| // Add JSDoc comment if available | |
| const commentLine = col.comment ? `${indent.repeat(5)}/** ${col.comment} */\n` : ''; | |
| return `${commentLine}${indent.repeat(5)}${colName}${optionalMark}: ${tsType}${nullable}`; | |
| }); | |
| const updateColumns = filteredColumns.map((col) => { | |
| const colName = convertCase(col.name, convention); | |
| const tsType = mapPostgresTypeToTypeScript( | |
| col.type, | |
| col.isArray, | |
| schema, | |
| availableEnums, | |
| useGeometricTypes | |
| ); | |
| const nullable = col.nullable ? ' | null' : ''; | |
| // Add JSDoc comment if available | |
| const commentLine = col.comment ? `${indent.repeat(5)}/** ${col.comment} */\n` : ''; | |
| return `${commentLine}${indent.repeat(5)}${colName}?: ${tsType}${nullable}`; | |
| }); | |
| // Generate relationships array | |
| const relationships = table.relationships.map((rel) => { | |
| const columns = rel.columns.map((c) => `"${convertCase(c, convention)}"`).join(', '); | |
| const refColumns = rel.referencedColumns.map((c) => `"${c}"`).join(', '); | |
| return `${indent.repeat(5)}{ | |
| ${indent.repeat(6)}foreignKeyName: "${rel.foreignKeyName}" | |
| ${indent.repeat(6)}columns: [${columns}] | |
| ${indent.repeat(6)}isOneToOne: ${rel.isOneToOne} | |
| ${indent.repeat(6)}referencedRelation: "${rel.referencedRelation}" | |
| ${indent.repeat(6)}referencedColumns: [${refColumns}] | |
| ${indent.repeat(5)}}`; | |
| }); | |
| const relationshipsStr = | |
| relationships.length > 0 ? `[\n${relationships.join(',\n')}\n${indent.repeat(4)}]` : '[]'; | |
| // Generate indexes metadata (only if includeIndexes is enabled) | |
| let indexesField = ''; | |
| if (includeIndexes) { | |
| const indexesStr = generateIndexMetadata(table.indexes, convention, indentSize); | |
| indexesField = `\n${indent.repeat(4)}Indexes: ${indexesStr}`; | |
| } | |
| // Add table comment as JSDoc if available and enabled | |
| const tableCommentBlock = | |
| includeComments && table.comment | |
| ? `${indent.repeat(3)}/**\n${indent.repeat(3)} * ${table.comment}\n${indent.repeat(3)} */\n` | |
| : ''; | |
| return `${tableCommentBlock}${indent.repeat(3)}${tableName}: { | |
| ${indent.repeat(4)}Row: { | |
| ${rowColumns.join('\n')} | |
| ${indent.repeat(4)}} | |
| ${indent.repeat(4)}Insert: { | |
| ${insertColumns.join('\n')} | |
| ${indent.repeat(4)}} | |
| ${indent.repeat(4)}Update: { | |
| ${updateColumns.join('\n')} | |
| ${indent.repeat(4)}} | |
| ${indent.repeat(4)}Relationships: ${relationshipsStr}${indexesField} | |
| ${indent.repeat(3)}}`; | |
| } | |
| /** | |
| * @param enums - Enum definitions | |
| * @param convention - Naming convention | |
| * @param indentSize - Indentation size | |
| * @returns Generated enum type string | |
| */ | |
| function generateEnumTypes( | |
| enums: EnumDefinition[], | |
| convention: NamingConvention, | |
| indentSize: number = 2 | |
| ): string { | |
| if (enums.length === 0) { | |
| return ''; | |
| } | |
| const indent = ' '.repeat(indentSize); | |
| const enumDefs = enums.map((e) => { | |
| const enumName = convertCase(e.name, convention); | |
| const values = e.values.map((v) => `"${v}"`).join(' | '); | |
| return `${indent.repeat(3)}${enumName}: ${values}`; | |
| }); | |
| return enumDefs.join('\n'); | |
| } | |
| /** | |
| * @param types - Composite type definitions | |
| * @param convention - Naming convention | |
| * @param indentSize - Indentation size | |
| * @returns Generated composite type definitions | |
| */ | |
| function generateCompositeTypes( | |
| types: CompositeTypeDefinition[], | |
| convention: NamingConvention, | |
| indentSize: number = 2 | |
| ): string { | |
| if (types.length === 0) { | |
| return ''; | |
| } | |
| const indent = ' '.repeat(indentSize); | |
| const typeDefs = types.map((t) => { | |
| const typeName = convertCase(t.name, convention); | |
| const attrs = t.attributes.map((a) => { | |
| const attrName = convertCase(a.name, convention); | |
| return `${indent.repeat(4)}${attrName}: ${a.type}`; | |
| }); | |
| return `${indent.repeat(3)}${typeName}: {\n${attrs.join('\n')}\n${indent.repeat(3)}}`; | |
| }); | |
| return typeDefs.join('\n'); | |
| } | |
| /** | |
| * @param functions - Function definitions | |
| * @param convention - Naming convention | |
| * @param indentSize - Indentation size | |
| * @param availableEnums - Set of available enum type names | |
| * @param schema - Default schema name | |
| * @param useGeometricTypes - Whether to use geometric types | |
| * @param alphabetical - Whether to sort arguments alphabetically | |
| * @returns Generated function type definitions | |
| */ | |
| function generateFunctionTypes( | |
| functions: FunctionDefinition[], | |
| convention: NamingConvention, | |
| indentSize: number = 2, | |
| availableEnums: Set<string> = new Set(), | |
| schema: string = 'public', | |
| useGeometricTypes: boolean = false, | |
| alphabetical: boolean = false | |
| ): string { | |
| if (functions.length === 0) { | |
| return ''; | |
| } | |
| const indent = ' '.repeat(indentSize); | |
| const funcDefs = functions.map((f) => { | |
| const funcName = convertCase(f.name, convention); | |
| // Generate Args object | |
| let argsStr = ''; | |
| if (f.args.length === 0) { | |
| argsStr = ` never`; | |
| } else { | |
| // Sort args alphabetically if requested | |
| const sortedArgs = alphabetical | |
| ? [...f.args].sort((a, b) => a.name.localeCompare(b.name)) | |
| : f.args; | |
| const argProps = sortedArgs.map((arg) => { | |
| const argName = convertCase(arg.name, convention); | |
| const tsType = mapPostgresTypeToTypeScript( | |
| arg.type, | |
| arg.type.includes('[]'), | |
| schema, | |
| availableEnums, | |
| useGeometricTypes | |
| ); | |
| const optional = arg.hasDefault ? '?' : ''; | |
| return `${indent.repeat(5)}${argName}${optional}: ${tsType}`; | |
| }); | |
| argsStr = `\n${argProps.join('\n')}\n${indent.repeat(4)}`; | |
| } | |
| // Map return type | |
| const returnsType = mapPostgresTypeToTypeScript( | |
| f.returns, | |
| f.returns.includes('[]'), | |
| schema, | |
| availableEnums, | |
| useGeometricTypes | |
| ); | |
| // Format as one-liner if no arguments or single argument | |
| if (argsStr === ' never') { | |
| return `${indent.repeat(3)}${funcName}: { Args: never; Returns: ${returnsType} }`; | |
| } | |
| // Check if it's a single argument (no newlines in argsStr) | |
| const argLines = argsStr | |
| .trim() | |
| .split('\n') | |
| .filter((line) => line.trim()); | |
| if (argLines.length === 1) { | |
| // Single argument - one liner with proper spacing | |
| return `${indent.repeat(3)}${funcName}: { Args: { ${argsStr.trim()} }; Returns: ${returnsType} }`; | |
| } | |
| // Multiple arguments - multi-line | |
| return `${indent.repeat(3)}${funcName}: { | |
| ${indent.repeat(4)}Args: {${argsStr}} | |
| ${indent.repeat(4)}Returns: ${returnsType} | |
| ${indent.repeat(3)}}`; | |
| }); | |
| return funcDefs.join('\n'); | |
| } | |
| /** | |
| * Generate geometric type exports based on which types are used | |
| * @param usedTypes - Set of used geometric types | |
| * @param indentSize - Indentation size | |
| * @returns Generated geometric type definitions | |
| */ | |
| function generateGeometricTypes(usedTypes: Set<string>, indentSize: number = 2): string { | |
| if (usedTypes.size === 0) { | |
| return ''; | |
| } | |
| const indent = ' '.repeat(indentSize); | |
| const types: string[] = []; | |
| if (usedTypes.has('point')) { | |
| types.push(`export type Point = { x: number; y: number } | string`); | |
| } | |
| if (usedTypes.has('line')) { | |
| types.push(`export type Line = { a: number; b: number; c: number } | string`); | |
| } | |
| if (usedTypes.has('lseg')) { | |
| types.push(`export type LineSegment = { p1: Point; p2: Point } | string`); | |
| } | |
| if (usedTypes.has('box')) { | |
| types.push(`export type Box = { upperRight: Point; lowerLeft: Point } | string`); | |
| } | |
| if (usedTypes.has('path')) { | |
| types.push(`export type Path = { points: Point[]; open: boolean } | string`); | |
| } | |
| if (usedTypes.has('polygon')) { | |
| types.push(`export type Polygon = { points: Point[] } | string`); | |
| } | |
| if (usedTypes.has('circle')) { | |
| types.push(`export type Circle = { center: Point; radius: number } | string`); | |
| } | |
| if (types.length === 0) { | |
| return ''; | |
| } | |
| return `// ============================================================================ | |
| // Geometric Type Definitions | |
| // ============================================================================ | |
| // PostgreSQL geometric types with structured alternatives | |
| ${types.join('\n')} | |
| `; | |
| } | |
| /** | |
| * @param enumsBySchema - Enums grouped by schema | |
| * @param allSchemas - Set of all schema names | |
| * @param convention - Naming convention | |
| * @param indentSize - Indentation size | |
| * @returns Generated constants string | |
| */ | |
| function generateConstants( | |
| enumsBySchema: Record<string, EnumDefinition[]>, | |
| allSchemas: Set<string>, | |
| convention: NamingConvention, | |
| indentSize: number = 2 | |
| ): string { | |
| const indent = ' '.repeat(indentSize); | |
| const schemaConstants: string[] = []; | |
| for (const schemaName of Array.from(allSchemas).sort()) { | |
| const schemaEnums = enumsBySchema[schemaName] || []; | |
| if (schemaEnums.length === 0) { | |
| // Empty schema - just empty Enums object | |
| schemaConstants.push(`${indent}${schemaName}: { | |
| ${indent.repeat(2)}Enums: {}, | |
| ${indent}}`); | |
| } else { | |
| // Schema has enums | |
| const enumConstants = schemaEnums.map((enumDef) => { | |
| const enumName = convertCase(enumDef.name, convention); | |
| const values = enumDef.values.map((v) => `"${v}"`).join(', '); | |
| return `${indent.repeat(3)}${enumName}: [${values}]`; | |
| }); | |
| schemaConstants.push(`${indent}${schemaName}: { | |
| ${indent.repeat(2)}Enums: { | |
| ${enumConstants.join(',\n')} | |
| ${indent.repeat(2)}}, | |
| ${indent}}`); | |
| } | |
| } | |
| return ` | |
| // ============================================================================ | |
| // Constants - Runtime Enum Values | |
| // ============================================================================ | |
| // Use these for dropdowns, validation, and type guards | |
| export const Constants = { | |
| ${schemaConstants.join(',\n')} | |
| } as const | |
| `; | |
| } | |
| /** | |
| * @param config - Generator configuration | |
| * @param tables - Table definitions | |
| * @param enums - Enum definitions | |
| * @param functions - Function definitions | |
| * @param compositeTypes - Composite type definitions | |
| * @param jsonbTypes - JSONB type definitions | |
| */ | |
| function generateFinalTypes( | |
| config: GeneratorConfig, | |
| tables: TableDefinition[], | |
| enums: EnumDefinition[], | |
| functions: FunctionDefinition[], | |
| compositeTypes: CompositeTypeDefinition[], | |
| jsonbTypes: TypeDefinition[] | |
| ): void { | |
| log('\n🔨 Step 3: Generating type definitions...', 'bright'); | |
| const finalPath = join(config.output.dir, config.output.finalFile); | |
| const convention = config.namingConvention; | |
| const schema = config.supabase.schema; | |
| const indentSize = config.indentSize; | |
| const indent = ' '.repeat(indentSize); | |
| // Detect which geometric types are used across all tables | |
| const usedGeometricTypes = detectGeometricTypes(tables); | |
| const useGeometricTypes = usedGeometricTypes.size > 0; | |
| if (useGeometricTypes) { | |
| log( | |
| ` ✓ Detected ${usedGeometricTypes.size} geometric type(s): ${Array.from(usedGeometricTypes).join(', ')}`, | |
| 'cyan' | |
| ); | |
| } | |
| // Sort tables, enums, functions, and composite types alphabetically if requested | |
| if (config.alphabetical) { | |
| log(' ✓ Sorting types alphabetically', 'cyan'); | |
| tables.sort((a, b) => a.name.localeCompare(b.name)); | |
| enums.sort((a, b) => a.name.localeCompare(b.name)); | |
| functions.sort((a, b) => a.name.localeCompare(b.name)); | |
| compositeTypes.sort((a, b) => a.name.localeCompare(b.name)); | |
| jsonbTypes.sort((a, b) => a.name.localeCompare(b.name)); | |
| // Also sort columns, relationships, and indexes within each table alphabetically | |
| for (const table of tables) { | |
| table.columns.sort((a, b) => a.name.localeCompare(b.name)); | |
| table.relationships.sort((a, b) => a.foreignKeyName.localeCompare(b.foreignKeyName)); | |
| table.indexes.sort((a, b) => a.name.localeCompare(b.name)); | |
| } | |
| // Sort composite type attributes alphabetically | |
| for (const compType of compositeTypes) { | |
| compType.attributes.sort((a, b) => a.name.localeCompare(b.name)); | |
| } | |
| } | |
| // Flatten JSONB types if needed | |
| let allJsonbTypes = config.extractNestedTypes ? flattenTypes(jsonbTypes) : jsonbTypes; | |
| // Deduplicate types if requested | |
| if (config.deduplicateTypes && allJsonbTypes.length > 0) { | |
| log(' ✓ Deduplicating types...', 'cyan'); | |
| const typeMap = new Map<string, TypeDefinition>(); | |
| const normalizedToName = new Map<string, string>(); | |
| const nameToCanonical = new Map<string, string>(); | |
| // First pass: identify unique types and build mapping | |
| for (const type of allJsonbTypes) { | |
| const normalized = normalizeTypeDefinition(type.typeDefinition); | |
| if (normalizedToName.has(normalized)) { | |
| // This is a duplicate - map it to the canonical name | |
| const canonicalName = normalizedToName.get(normalized)!; | |
| nameToCanonical.set(type.name, canonicalName); | |
| } else { | |
| // This is a unique type structure - it's the canonical version | |
| normalizedToName.set(normalized, type.name); | |
| typeMap.set(type.name, type); | |
| nameToCanonical.set(type.name, type.name); | |
| } | |
| } | |
| // Second pass: update all type definitions to use canonical names | |
| const deduplicatedTypes: TypeDefinition[] = []; | |
| const seen = new Set<string>(); | |
| for (const type of allJsonbTypes) { | |
| const normalized = normalizeTypeDefinition(type.typeDefinition); | |
| const canonicalName = normalizedToName.get(normalized)!; | |
| if (!seen.has(canonicalName)) { | |
| seen.add(canonicalName); | |
| let canonicalType = typeMap.get(canonicalName)!; | |
| // Update references in this type's definition to use canonical names | |
| let updatedDefinition = canonicalType.typeDefinition; | |
| for (const [oldName, newName] of nameToCanonical.entries()) { | |
| if (oldName !== newName) { | |
| // Replace all occurrences of the old type name with the canonical one | |
| const regex = new RegExp(`\\b${oldName}\\b`, 'g'); | |
| updatedDefinition = updatedDefinition.replace(regex, newName); | |
| } | |
| } | |
| // Create updated type with canonical references | |
| canonicalType = { | |
| ...canonicalType, | |
| typeDefinition: updatedDefinition | |
| }; | |
| deduplicatedTypes.push(canonicalType); | |
| } | |
| } | |
| const removedCount = allJsonbTypes.length - deduplicatedTypes.length; | |
| if (removedCount > 0) { | |
| log(` Removed ${removedCount} duplicate type(s)`, 'green'); | |
| } | |
| allJsonbTypes = deduplicatedTypes; | |
| } | |
| // Sort flattened types if alphabetical | |
| if (config.alphabetical && config.extractNestedTypes) { | |
| allJsonbTypes.sort((a, b) => a.name.localeCompare(b.name)); | |
| } | |
| // Group by schema | |
| const tablesBySchema: Record<string, TableDefinition[]> = {}; | |
| const enumsBySchema: Record<string, EnumDefinition[]> = {}; | |
| const functionsBySchema: Record<string, FunctionDefinition[]> = {}; | |
| const compositeTypesBySchema: Record<string, CompositeTypeDefinition[]> = {}; | |
| for (const table of tables) { | |
| if (!tablesBySchema[table.schema]) { | |
| tablesBySchema[table.schema] = []; | |
| } | |
| tablesBySchema[table.schema].push(table); | |
| } | |
| for (const enumDef of enums) { | |
| if (!enumsBySchema[enumDef.schema]) { | |
| enumsBySchema[enumDef.schema] = []; | |
| } | |
| enumsBySchema[enumDef.schema].push(enumDef); | |
| } | |
| for (const func of functions) { | |
| if (!functionsBySchema[func.schema]) { | |
| functionsBySchema[func.schema] = []; | |
| } | |
| functionsBySchema[func.schema].push(func); | |
| } | |
| for (const compType of compositeTypes) { | |
| if (!compositeTypesBySchema[compType.schema]) { | |
| compositeTypesBySchema[compType.schema] = []; | |
| } | |
| compositeTypesBySchema[compType.schema].push(compType); | |
| } | |
| // Get all schemas | |
| const allSchemas = new Set<string>([ | |
| ...Object.keys(tablesBySchema), | |
| ...Object.keys(enumsBySchema), | |
| ...Object.keys(functionsBySchema), | |
| ...Object.keys(compositeTypesBySchema) | |
| ]); | |
| // Always include graphql_public if not present | |
| allSchemas.add('graphql_public'); | |
| // Generate base types for each schema | |
| const schemaTypes: string[] = []; | |
| for (const schemaName of Array.from(allSchemas).sort()) { | |
| const schemaTables = tablesBySchema[schemaName] || []; | |
| const schemaEnums = enumsBySchema[schemaName] || []; | |
| const schemaFunctions = functionsBySchema[schemaName] || []; | |
| const schemaCompositeTypes = compositeTypesBySchema[schemaName] || []; | |
| // Create a set of available enum names for this schema (preserve original casing) | |
| const availableEnums = new Set(schemaEnums.map((e) => e.name)); | |
| const tableTypes = schemaTables | |
| .map((table) => | |
| generateTableType( | |
| table, | |
| convention, | |
| indentSize, | |
| config.includeIndexes, | |
| config.includeComments, | |
| availableEnums, | |
| schemaName, | |
| useGeometricTypes | |
| ) | |
| ) | |
| .join('\n'); | |
| const enumTypes = generateEnumTypes(schemaEnums, convention, indentSize); | |
| const functionTypes = generateFunctionTypes( | |
| schemaFunctions, | |
| convention, | |
| indentSize, | |
| availableEnums, | |
| schemaName, | |
| useGeometricTypes, | |
| config.alphabetical | |
| ); | |
| const compositeTypesDef = generateCompositeTypes( | |
| schemaCompositeTypes, | |
| convention, | |
| indentSize | |
| ); | |
| // Special handling for graphql_public schema | |
| if (schemaName === 'graphql_public') { | |
| schemaTypes.push(`${indent}graphql_public: { | |
| ${indent.repeat(2)}Tables: { | |
| ${indent.repeat(3)}[_ in never]: never | |
| ${indent.repeat(2)}} | |
| ${indent.repeat(2)}Views: { | |
| ${indent.repeat(3)}[_ in never]: never | |
| ${indent.repeat(2)}} | |
| ${indent.repeat(2)}Functions: { | |
| ${indent.repeat(3)}graphql: { | |
| ${indent.repeat(4)}Args: { | |
| ${indent.repeat(5)}extensions?: Json | |
| ${indent.repeat(5)}operationName?: string | |
| ${indent.repeat(5)}query?: string | |
| ${indent.repeat(5)}variables?: Json | |
| ${indent.repeat(4)}} | |
| ${indent.repeat(4)}Returns: Json | |
| ${indent.repeat(3)}} | |
| ${indent.repeat(2)}} | |
| ${indent.repeat(2)}Enums: { | |
| ${indent.repeat(3)}[_ in never]: never | |
| ${indent.repeat(2)}} | |
| ${indent.repeat(2)}CompositeTypes: { | |
| ${indent.repeat(3)}[_ in never]: never | |
| ${indent.repeat(2)}} | |
| ${indent}}`); | |
| } else { | |
| schemaTypes.push(`${indent}${schemaName}: { | |
| ${indent.repeat(2)}Tables: { | |
| ${tableTypes || `${indent.repeat(3)}[_ in never]: never`} | |
| ${indent.repeat(2)}} | |
| ${indent.repeat(2)}Views: { | |
| ${indent.repeat(3)}[_ in never]: never | |
| ${indent.repeat(2)}} | |
| ${indent.repeat(2)}Functions: { | |
| ${functionTypes || `${indent.repeat(3)}[_ in never]: never`} | |
| ${indent.repeat(2)}} | |
| ${indent.repeat(2)}Enums: { | |
| ${enumTypes || `${indent.repeat(3)}[_ in never]: never`} | |
| ${indent.repeat(2)}} | |
| ${indent.repeat(2)}CompositeTypes: { | |
| ${compositeTypesDef || `${indent.repeat(3)}[_ in never]: never`} | |
| ${indent.repeat(2)}} | |
| ${indent}}`); | |
| } | |
| } | |
| // Generate JSONB type definitions | |
| let jsonbTypeDefinitions = ''; | |
| if (allJsonbTypes.length > 0) { | |
| jsonbTypeDefinitions = allJsonbTypes | |
| .map((type) => { | |
| if (!type.table && !type.column) { | |
| return `export type ${type.name} = ${type.typeDefinition};`; | |
| } | |
| let output = ''; | |
| if (type.comment) { | |
| output += `/**\n * ${type.comment}\n`; | |
| if (type.example) { | |
| output += ' *\n * Example:\n * ```json\n'; | |
| output += JSON.stringify(type.example, null, 2) | |
| .split('\n') | |
| .map((l) => ` * ${l}`) | |
| .join('\n'); | |
| output += '\n * ```\n'; | |
| } | |
| output += ' */\n'; | |
| } | |
| output += `export type ${type.name} = ${type.typeDefinition};`; | |
| return output; | |
| }) | |
| .join('\n\n'); | |
| } | |
| // Generate MergeDeep structure for JSONB overrides | |
| const typesByTable: Record<string, TypeDefinition[]> = {}; | |
| jsonbTypes.forEach((type) => { | |
| if (type.table) { | |
| if (!typesByTable[type.table]) { | |
| typesByTable[type.table] = []; | |
| } | |
| typesByTable[type.table].push(type); | |
| } | |
| }); | |
| const hasMergeDeep = Object.keys(typesByTable).length > 0; | |
| let mergeDeepStructure = ''; | |
| if (hasMergeDeep) { | |
| // Get table entries and optionally sort them | |
| const tableEntries = Object.entries(typesByTable); | |
| if (config.alphabetical) { | |
| tableEntries.sort(([a], [b]) => a.localeCompare(b)); | |
| } | |
| mergeDeepStructure = tableEntries | |
| .map(([tableName, types]) => { | |
| const convertedTableName = convertCase(tableName, convention); | |
| // Optionally sort the column types within each table | |
| const sortedTypes = config.alphabetical | |
| ? [...types].sort((a, b) => a.column.localeCompare(b.column)) | |
| : types; | |
| const rowFields = sortedTypes | |
| .map((type) => { | |
| const convertedColName = convertCase(type.column, convention); | |
| return `${indent.repeat(6)}${convertedColName}: ${type.name} | null`; | |
| }) | |
| .join('\n'); | |
| return `${indent.repeat(4)}${convertedTableName}: { | |
| ${indent.repeat(5)}Row: { | |
| ${rowFields} | |
| ${indent.repeat(5)}} | |
| ${indent.repeat(4)}}`; | |
| }) | |
| .join('\n'); | |
| } | |
| // Create header | |
| const namingNote = convention === 'preserve' ? 'preserve (exact database naming)' : convention; | |
| const sourceNote = config.supabase.source === 'sql' ? 'SQL files' : 'database'; | |
| const sortNote = config.alphabetical ? ' (alphabetically sorted)' : ''; | |
| // Determine indent source for header | |
| let indentSourceNote = ''; | |
| if (indentSize === 2) { | |
| // Check if it's default or from Prettier | |
| const prettierConfig = detectPrettierConfig(); | |
| const prettierIndent = getPrettierIndentSize(prettierConfig); | |
| if (prettierIndent === indentSize) { | |
| indentSourceNote = ' (from Prettier config)'; | |
| } else { | |
| indentSourceNote = ' (default)'; | |
| } | |
| } else { | |
| indentSourceNote = ' (custom)'; | |
| } | |
| const header = `/** | |
| * Auto-generated TypeScript types for Supabase | |
| * Generated: ${new Date().toISOString()} | |
| * Source: ${sourceNote} | |
| * Naming convention: ${namingNote} | |
| * Schema: ${schema}${sortNote} | |
| * Indentation: ${indentSize} spaces${indentSourceNote} | |
| * Note: 'this' and 'constraint' columns are automatically excluded | |
| * | |
| * DO NOT EDIT MANUALLY - Run type generation script to regenerate | |
| */ | |
| ${hasMergeDeep ? "import type { MergeDeep } from 'type-fest';\n\n" : ''}`; | |
| // Json type | |
| const jsonType = `export type Json = | |
| ${indent}| string | |
| ${indent}| number | |
| ${indent}| boolean | |
| ${indent}| null | |
| ${indent}| { [key: string]: Json | undefined } | |
| ${indent}| Json[] | |
| `; | |
| // Optional geometric type helpers | |
| const geometricTypesSection = generateGeometricTypes(usedGeometricTypes, indentSize); | |
| // JSONB section - now at the bottom | |
| const jsonbSection = | |
| allJsonbTypes.length > 0 | |
| ? ` | |
| // ============================================================================ | |
| // JSONB Column Type Definitions | |
| // ============================================================================ | |
| // These types provide structured definitions for JSONB columns with default values | |
| ${jsonbTypeDefinitions} | |
| ` | |
| : ''; | |
| // Generate Constants for runtime enum access | |
| const constantsSection = generateConstants(enumsBySchema, allSchemas, convention, indentSize); | |
| // Base database type | |
| const baseDatabaseType = `${hasMergeDeep ? 'type DatabaseGenerated = ' : 'export type Database = '}{ | |
| ${schemaTypes.join('\n')} | |
| } | |
| `; | |
| // Enhanced database type with JSONB overrides | |
| const enhancedDatabaseType = hasMergeDeep | |
| ? ` | |
| // ============================================================================ | |
| // Enhanced Database Type with JSONB Support | |
| // ============================================================================ | |
| /** | |
| * Enhanced Database type with specific JSONB column types | |
| * Uses MergeDeep to override generic Json types with specific structures | |
| */ | |
| export type Database = MergeDeep< | |
| ${indent}DatabaseGenerated, | |
| ${indent}{ | |
| ${indent.repeat(2)}${schema}: { | |
| ${indent.repeat(3)}Tables: { | |
| ${mergeDeepStructure} | |
| ${indent.repeat(3)}} | |
| ${indent.repeat(2)}} | |
| ${indent}} | |
| >; | |
| ` | |
| : '\n'; | |
| // Helper types | |
| const helperTypes = ` | |
| type DatabaseWithoutInternals = Omit<Database, "__InternalSupabase"> | |
| type DefaultSchema = DatabaseWithoutInternals[Extract<keyof Database, "public">] | |
| export type Tables< | |
| ${indent}DefaultSchemaTableNameOrOptions extends | |
| ${indent.repeat(2)}| keyof (DefaultSchema["Tables"] & DefaultSchema["Views"]) | |
| ${indent.repeat(2)}| { schema: keyof DatabaseWithoutInternals }, | |
| ${indent}TableName extends DefaultSchemaTableNameOrOptions extends { | |
| ${indent.repeat(2)}schema: keyof DatabaseWithoutInternals | |
| ${indent}} | |
| ${indent.repeat(2)}? keyof (DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] & | |
| ${indent.repeat(4)}DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Views"]) | |
| ${indent.repeat(2)}: never = never, | |
| > = DefaultSchemaTableNameOrOptions extends { | |
| ${indent}schema: keyof DatabaseWithoutInternals | |
| } | |
| ${indent}? (DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] & | |
| ${indent.repeat(3)}DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Views"])[TableName] extends { | |
| ${indent.repeat(3)}Row: infer R | |
| ${indent.repeat(2)}} | |
| ${indent.repeat(2)}? R | |
| ${indent.repeat(2)}: never | |
| ${indent}: DefaultSchemaTableNameOrOptions extends keyof (DefaultSchema["Tables"] & | |
| ${indent.repeat(4)}DefaultSchema["Views"]) | |
| ${indent.repeat(2)}? (DefaultSchema["Tables"] & | |
| ${indent.repeat(4)}DefaultSchema["Views"])[DefaultSchemaTableNameOrOptions] extends { | |
| ${indent.repeat(4)}Row: infer R | |
| ${indent.repeat(3)}} | |
| ${indent.repeat(3)}? R | |
| ${indent.repeat(3)}: never | |
| ${indent.repeat(2)}: never | |
| export type TablesInsert< | |
| ${indent}DefaultSchemaTableNameOrOptions extends | |
| ${indent.repeat(2)}| keyof DefaultSchema["Tables"] | |
| ${indent.repeat(2)}| { schema: keyof DatabaseWithoutInternals }, | |
| ${indent}TableName extends DefaultSchemaTableNameOrOptions extends { | |
| ${indent.repeat(2)}schema: keyof DatabaseWithoutInternals | |
| ${indent}} | |
| ${indent.repeat(2)}? keyof DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] | |
| ${indent.repeat(2)}: never = never, | |
| > = DefaultSchemaTableNameOrOptions extends { | |
| ${indent}schema: keyof DatabaseWithoutInternals | |
| } | |
| ${indent}? DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Tables"][TableName] extends { | |
| ${indent.repeat(3)}Insert: infer I | |
| ${indent.repeat(2)}} | |
| ${indent.repeat(2)}? I | |
| ${indent.repeat(2)}: never | |
| ${indent}: DefaultSchemaTableNameOrOptions extends keyof DefaultSchema["Tables"] | |
| ${indent.repeat(2)}? DefaultSchema["Tables"][DefaultSchemaTableNameOrOptions] extends { | |
| ${indent.repeat(4)}Insert: infer I | |
| ${indent.repeat(3)}} | |
| ${indent.repeat(3)}? I | |
| ${indent.repeat(3)}: never | |
| ${indent.repeat(2)}: never | |
| export type TablesUpdate< | |
| ${indent}DefaultSchemaTableNameOrOptions extends | |
| ${indent.repeat(2)}| keyof DefaultSchema["Tables"] | |
| ${indent.repeat(2)}| { schema: keyof DatabaseWithoutInternals }, | |
| ${indent}TableName extends DefaultSchemaTableNameOrOptions extends { | |
| ${indent.repeat(2)}schema: keyof DatabaseWithoutInternals | |
| ${indent}} | |
| ${indent.repeat(2)}? keyof DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] | |
| ${indent.repeat(2)}: never = never, | |
| > = DefaultSchemaTableNameOrOptions extends { | |
| ${indent}schema: keyof DatabaseWithoutInternals | |
| } | |
| ${indent}? DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Tables"][TableName] extends { | |
| ${indent.repeat(3)}Update: infer U | |
| ${indent.repeat(2)}} | |
| ${indent.repeat(2)}? U | |
| ${indent.repeat(2)}: never | |
| ${indent}: DefaultSchemaTableNameOrOptions extends keyof DefaultSchema["Tables"] | |
| ${indent.repeat(2)}? DefaultSchema["Tables"][DefaultSchemaTableNameOrOptions] extends { | |
| ${indent.repeat(4)}Update: infer U | |
| ${indent.repeat(3)}} | |
| ${indent.repeat(3)}? U | |
| ${indent.repeat(3)}: never | |
| ${indent.repeat(2)}: never | |
| export type Enums< | |
| ${indent}DefaultSchemaEnumNameOrOptions extends | |
| ${indent.repeat(2)}| keyof DefaultSchema["Enums"] | |
| ${indent.repeat(2)}| { schema: keyof DatabaseWithoutInternals }, | |
| ${indent}EnumName extends DefaultSchemaEnumNameOrOptions extends { | |
| ${indent.repeat(2)}schema: keyof DatabaseWithoutInternals | |
| ${indent}} | |
| ${indent.repeat(2)}? keyof DatabaseWithoutInternals[DefaultSchemaEnumNameOrOptions["schema"]]["Enums"] | |
| ${indent.repeat(2)}: never = never, | |
| > = DefaultSchemaEnumNameOrOptions extends { | |
| ${indent}schema: keyof DatabaseWithoutInternals | |
| } | |
| ${indent}? DatabaseWithoutInternals[DefaultSchemaEnumNameOrOptions["schema"]]["Enums"][EnumName] | |
| ${indent}: DefaultSchemaEnumNameOrOptions extends keyof DefaultSchema["Enums"] | |
| ${indent.repeat(2)}? DefaultSchema["Enums"][DefaultSchemaEnumNameOrOptions] | |
| ${indent.repeat(2)}: never | |
| export type CompositeTypes< | |
| ${indent}PublicCompositeTypeNameOrOptions extends | |
| ${indent.repeat(2)}| keyof DefaultSchema["CompositeTypes"] | |
| ${indent.repeat(2)}| { schema: keyof DatabaseWithoutInternals }, | |
| ${indent}CompositeTypeName extends PublicCompositeTypeNameOrOptions extends { | |
| ${indent.repeat(2)}schema: keyof DatabaseWithoutInternals | |
| ${indent}} | |
| ${indent.repeat(2)}? keyof DatabaseWithoutInternals[PublicCompositeTypeNameOrOptions["schema"]]["CompositeTypes"] | |
| ${indent.repeat(2)}: never = never, | |
| > = PublicCompositeTypeNameOrOptions extends { | |
| ${indent}schema: keyof DatabaseWithoutInternals | |
| } | |
| ${indent}? DatabaseWithoutInternals[PublicCompositeTypeNameOrOptions["schema"]]["CompositeTypes"][CompositeTypeName] | |
| ${indent}: PublicCompositeTypeNameOrOptions extends keyof DefaultSchema["CompositeTypes"] | |
| ${indent.repeat(2)}? DefaultSchema["CompositeTypes"][PublicCompositeTypeNameOrOptions] | |
| ${indent.repeat(2)}: never | |
| `; | |
| // Combine everything - JSONB types and Constants now at the bottom | |
| const content = | |
| header + | |
| geometricTypesSection + | |
| jsonType + | |
| baseDatabaseType + | |
| enhancedDatabaseType + | |
| helperTypes + | |
| jsonbSection + | |
| constantsSection; | |
| writeFileSync(finalPath, content, 'utf8'); | |
| log(` ✓ Types generated: ${finalPath}`, 'green'); | |
| log(` ✓ ${tables.length} table(s)`, 'green'); | |
| const totalRelationships = tables.reduce((sum, t) => sum + t.relationships.length, 0); | |
| if (totalRelationships > 0) { | |
| log(` ✓ ${totalRelationships} relationship(s)`, 'green'); | |
| } | |
| const totalIndexes = tables.reduce((sum, t) => sum + t.indexes.length, 0); | |
| if (totalIndexes > 0) { | |
| if (config.includeIndexes) { | |
| log(` Indexes: ${totalIndexes} (included in generated types)`, 'green'); | |
| } else { | |
| log( | |
| ` Indexes: ${totalIndexes} (found but not included in types - use --include-indexes to include)`, | |
| 'cyan' | |
| ); | |
| } | |
| } | |
| if (enums.length > 0) { | |
| log(` ✓ ${enums.length} enum(s)`, 'green'); | |
| } | |
| if (functions.length > 0) { | |
| log(` ✓ ${functions.length} function(s)`, 'green'); | |
| } | |
| if (compositeTypes.length > 0) { | |
| log(` ✓ ${compositeTypes.length} composite type(s)`, 'green'); | |
| } | |
| if (allJsonbTypes.length > 0) { | |
| const mainTypes = jsonbTypes.filter((t) => t.table); | |
| log( | |
| ` ✓ ${allJsonbTypes.length} JSONB type(s) (${mainTypes.length} main, ${allJsonbTypes.length - mainTypes.length} nested)`, | |
| 'green' | |
| ); | |
| } | |
| if (usedGeometricTypes.size > 0) { | |
| log( | |
| ` ✓ ${usedGeometricTypes.size} geometric type(s) exported: ${Array.from(usedGeometricTypes).join(', ')}`, | |
| 'green' | |
| ); | |
| } | |
| log(` ✓ ${allSchemas.size} schema(s): ${Array.from(allSchemas).join(', ')}`, 'cyan'); | |
| log(` ✓ Automatically excluded 'this' and 'constraint' columns`, 'cyan'); | |
| } | |
| /** | |
| * ============================================================================ | |
| * MAIN EXECUTION | |
| * ============================================================================ | |
| */ | |
| /** | |
| * Initializes the generator configuration by parsing CLI arguments and reading Supabase config. | |
| * @returns The complete generator configuration. | |
| */ | |
| function initializeConfig(): GeneratorConfig { | |
| const cliArgs = parseCommandLineArgs(); | |
| log('\n🔧 Initializing configuration...', 'bright'); | |
| log(` Source: ${cliArgs.source === 'sql' ? 'SQL files' : 'Database'}`, 'cyan'); | |
| log(` Schema: ${cliArgs.schema}`, 'cyan'); | |
| log(` Output directory: ${cliArgs.outputDir}`, 'cyan'); | |
| log(` Naming convention: ${cliArgs.namingConvention}`, 'cyan'); | |
| log(` Extract nested types: ${cliArgs.extractNestedTypes}`, 'cyan'); | |
| log(` Deduplicate types: ${cliArgs.deduplicateTypes}`, 'cyan'); | |
| log(` Alphabetical sorting: ${cliArgs.alphabetical}`, 'cyan'); | |
| log(` Include indexes: ${cliArgs.includeIndexes}`, 'cyan'); | |
| log(` Include comments: ${cliArgs.includeComments}`, 'cyan'); | |
| log(` Exclude 'this' and 'constraint' columns: true (automatic)`, 'cyan'); | |
| // Determine indentation size | |
| let finalIndentSize = GENERATOR_CONFIG.indentSize; | |
| let indentSource = 'default'; | |
| // Priority 1: Explicit --indent flag | |
| if (cliArgs.indentSize !== null) { | |
| finalIndentSize = cliArgs.indentSize; | |
| indentSource = 'CLI flag'; | |
| } | |
| // Priority 2: Prettier config (if enabled) | |
| else if (cliArgs.usePrettier) { | |
| const prettierConfig = detectPrettierConfig(); | |
| const prettierIndent = getPrettierIndentSize(prettierConfig); | |
| if (prettierIndent !== null) { | |
| finalIndentSize = prettierIndent; | |
| indentSource = 'Prettier config'; | |
| } else if (prettierConfig) { | |
| log(` Prettier config found but no tabWidth specified, using default`, 'yellow'); | |
| } | |
| } | |
| log(` Indentation: ${finalIndentSize} spaces (${indentSource})`, 'cyan'); | |
| const [configSchemaPaths, configWorkdir] = readSupabaseConfig(cliArgs.workdir); | |
| const resolvedPaths = resolveSchemaFiles(configSchemaPaths, configWorkdir); | |
| if (resolvedPaths.length > 0) { | |
| log(` Resolved ${resolvedPaths.length} SQL schema file(s)`, 'green'); | |
| // Show breakdown if glob patterns were used | |
| const hasGlobs = configSchemaPaths.some((p) => p.includes('*')); | |
| if (hasGlobs && GENERATOR_CONFIG.verboseLogging) { | |
| log( | |
| ` (expanded from ${configSchemaPaths.length} path(s) including glob patterns)`, | |
| 'cyan' | |
| ); | |
| } | |
| } else { | |
| log(` ⚠️ No SQL files resolved from schema paths`, 'yellow'); | |
| } | |
| let outputSuffix = ''; | |
| if (cliArgs.workdir && cliArgs.workdir !== GENERATOR_CONFIG.defaultWorkdir) { | |
| const pathParts = cliArgs.workdir | |
| .split(/[/\\]/) | |
| .filter((p) => p && p !== '.' && p !== 'supabase'); | |
| if (pathParts.length > 0) { | |
| const lastPart = pathParts[pathParts.length - 1]; | |
| outputSuffix = lastPart.charAt(0).toUpperCase() + lastPart.slice(1); | |
| } | |
| } | |
| return { | |
| supabase: { | |
| source: cliArgs.source, | |
| connectionString: cliArgs.connectionString, | |
| schema: cliArgs.schema, | |
| useWorkdirFlag: cliArgs.useWorkdir, | |
| inputWorkdir: cliArgs.workdir, | |
| configWorkdir: configWorkdir, | |
| configPath: configWorkdir ? join(configWorkdir, 'config.toml') : '' | |
| }, | |
| schemaPaths: resolvedPaths, | |
| output: { | |
| dir: cliArgs.outputDir, | |
| tempFile: `database${outputSuffix}-temp.ts`, | |
| finalFile: `database${outputSuffix}.ts` | |
| }, | |
| extractNestedTypes: cliArgs.extractNestedTypes, | |
| deduplicateTypes: cliArgs.deduplicateTypes, | |
| verboseLogging: cliArgs.verboseLogging, | |
| namingConvention: cliArgs.namingConvention, | |
| alphabetical: cliArgs.alphabetical, | |
| indentSize: finalIndentSize, | |
| includeIndexes: cliArgs.includeIndexes, | |
| includeComments: cliArgs.includeComments | |
| }; | |
| } | |
| /** | |
| * | |
| */ | |
| function main(): void { | |
| log('╔════════════════════════════════════════════════════════════╗', 'bright'); | |
| log('║ Supabase Type Generator (with Index Support) ║', 'bright'); | |
| log('╚════════════════════════════════════════════════════════════╝', 'bright'); | |
| const config = initializeConfig(); | |
| VERBOSE_LOGGING = config.verboseLogging; | |
| // Ensure output directory exists | |
| if (!existsSync(config.output.dir)) { | |
| mkdirSync(config.output.dir, { recursive: true }); | |
| } | |
| // Parse SQL files | |
| const { tables, enums, functions, compositeTypes } = parseSqlFiles( | |
| config.schemaPaths, | |
| config.supabase.schema, | |
| config.includeComments | |
| ); | |
| if (tables.length === 0) { | |
| log('\n❌ No tables found in SQL files!', 'red', true); | |
| log(' Check your schema paths in config.toml', 'yellow', true); | |
| process.exit(1); | |
| } | |
| // Scan for JSONB columns | |
| const jsonbTypes = scanSchemas(config); | |
| // Generate final types | |
| generateFinalTypes(config, tables, enums, functions, compositeTypes, jsonbTypes); | |
| log('\n✅ Type generation complete!', 'green', true); | |
| log(`\n📝 Import your types with:`, 'cyan', true); | |
| log( | |
| ` import type { Database, Tables, Enums } from './${config.output.finalFile.replace('.ts', '')}'`, | |
| 'cyan', | |
| true | |
| ); | |
| } | |
| // Run the script | |
| main(); |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment