Created
February 6, 2026 02:49
-
-
Save copy-ninja1/7b63022ad525c0fb5700f9e520de1129 to your computer and use it in GitHub Desktop.
Pair of TypeScript scripts to export a MongoDB database to JSON files and import them back.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| import * as fs from 'fs'; | |
| import * as path from 'path'; | |
| import { MongoClient, ObjectId } from 'mongodb'; | |
| function toPlainJson(doc: unknown): unknown { | |
| if (doc === null || doc === undefined) return doc; | |
| if (typeof doc !== 'object') return doc; | |
| if (doc instanceof ObjectId) return doc.toString(); | |
| if (doc instanceof Date) return doc.toISOString(); | |
| if (Array.isArray(doc)) return doc.map(toPlainJson); | |
| const out: Record<string, unknown> = {}; | |
| for (const [k, v] of Object.entries(doc)) { | |
| out[k] = toPlainJson(v); | |
| } | |
| return out; | |
| } | |
| const ENV_PATH = path.resolve(__dirname, '../.env'); | |
| const EXPORT_DIR = path.resolve(__dirname, '../export'); | |
| function loadEnv(): void { | |
| if (!fs.existsSync(ENV_PATH)) { | |
| console.error('.env not found at', ENV_PATH); | |
| process.exit(1); | |
| } | |
| const content = fs.readFileSync(ENV_PATH, 'utf-8'); | |
| for (const line of content.split('\n')) { | |
| const trimmed = line.trim(); | |
| if (!trimmed || trimmed.startsWith('#')) continue; | |
| const match = trimmed.match(/^([A-Za-z_][A-Za-z0-9_]*)=(.*)$/); | |
| if (match) { | |
| const value = match[2].replace(/^["']|["']$/g, '').trim(); | |
| process.env[match[1]] = value; | |
| } | |
| } | |
| } | |
| async function exportDatabase(): Promise<void> { | |
| loadEnv(); | |
| const uri = process.env.DATABASE_URI; | |
| if (!uri) { | |
| console.error('DATABASE_URI is not set in .env'); | |
| process.exit(1); | |
| } | |
| const client = new MongoClient(uri); | |
| const startTime = Date.now(); | |
| try { | |
| await client.connect(); | |
| console.log('Connected to MongoDB'); | |
| const admin = client.db().admin(); | |
| const { databases } = await admin.listDatabases(); | |
| const dbsToExport = databases.filter( | |
| (d) => d.name !== 'admin' && d.name !== 'local' && d.name !== 'config' | |
| ); | |
| let totalCollections = 0; | |
| for (const { name: dbName } of dbsToExport) { | |
| const db = client.db(dbName); | |
| const collections = await db.listCollections().toArray(); | |
| totalCollections += collections.length; | |
| } | |
| console.log( | |
| `\nExporting ${dbsToExport.length} database(s), ${totalCollections} collection(s) total.\n` | |
| ); | |
| let step = 0; | |
| for (let dbIndex = 0; dbIndex < dbsToExport.length; dbIndex++) { | |
| const { name: dbName } = dbsToExport[dbIndex]; | |
| const db = client.db(dbName); | |
| const collections = await db.listCollections().toArray(); | |
| const dbExportDir = path.join(EXPORT_DIR, dbName); | |
| fs.mkdirSync(dbExportDir, { recursive: true }); | |
| for (let collIndex = 0; collIndex < collections.length; collIndex++) { | |
| step++; | |
| const collName = collections[collIndex].name; | |
| process.stdout.write( | |
| ` [${step}/${totalCollections}] db: ${dbName} → ${collName} ... ` | |
| ); | |
| const collStart = Date.now(); | |
| const collection = db.collection(collName); | |
| const documents = await collection.find({}).toArray(); | |
| const json = JSON.stringify(documents.map((doc) => toPlainJson(doc)), null, 2); | |
| const filePath = path.join(dbExportDir, `${collName}.json`); | |
| fs.writeFileSync(filePath, json, 'utf-8'); | |
| const collElapsed = ((Date.now() - collStart) / 1000).toFixed(1); | |
| const totalElapsed = ((Date.now() - startTime) / 1000).toFixed(1); | |
| console.log( | |
| `${documents.length} docs in ${collElapsed}s (total: ${totalElapsed}s)` | |
| ); | |
| } | |
| } | |
| const totalSeconds = ((Date.now() - startTime) / 1000).toFixed(1); | |
| console.log(`\nExport complete in ${totalSeconds}s. Output: ${EXPORT_DIR}`); | |
| } catch (err) { | |
| console.error('Export failed:', err); | |
| process.exit(1); | |
| } finally { | |
| await client.close(); | |
| console.log('Disconnected from MongoDB'); | |
| } | |
| } | |
| exportDatabase(); |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| import * as fs from 'fs'; | |
| import * as path from 'path'; | |
| import { MongoClient, ObjectId } from 'mongodb'; | |
| const OBJECT_ID_HEX_LENGTH = 24; | |
| const ISO_DATE_REGEX = | |
| /^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(?:\.\d{3})?Z?$/; | |
| function isObjectIdString(value: unknown): value is string { | |
| return ( | |
| typeof value === 'string' && | |
| value.length === OBJECT_ID_HEX_LENGTH && | |
| /^[a-f0-9]+$/i.test(value) | |
| ); | |
| } | |
| function isIsoDateString(value: unknown): value is string { | |
| return typeof value === 'string' && ISO_DATE_REGEX.test(value); | |
| } | |
| function fromPlainJson(doc: unknown): unknown { | |
| if (doc === null || doc === undefined) return doc; | |
| if (typeof doc !== 'object') return doc; | |
| if (Array.isArray(doc)) return doc.map(fromPlainJson); | |
| const out: Record<string, unknown> = {}; | |
| for (const [k, v] of Object.entries(doc)) { | |
| let value = v; | |
| if (isObjectIdString(v) && (k === '_id' || k === 'id' || k.endsWith('Id'))) { | |
| value = new ObjectId(v); | |
| } else if (isIsoDateString(v)) { | |
| value = new Date(v); | |
| } else { | |
| value = fromPlainJson(v); | |
| } | |
| out[k] = value; | |
| } | |
| return out; | |
| } | |
| const ENV_PATH = path.resolve(__dirname, '../.env'); | |
| const EXPORT_DIR = path.resolve(__dirname, '../export'); | |
| function loadEnv(): void { | |
| if (!fs.existsSync(ENV_PATH)) { | |
| console.error('.env not found at', ENV_PATH); | |
| process.exit(1); | |
| } | |
| const content = fs.readFileSync(ENV_PATH, 'utf-8'); | |
| for (const line of content.split('\n')) { | |
| const trimmed = line.trim(); | |
| if (!trimmed || trimmed.startsWith('#')) continue; | |
| const match = trimmed.match(/^([A-Za-z_][A-Za-z0-9_]*)=(.*)$/); | |
| if (match) { | |
| const value = match[2].replace(/^["']|["']$/g, '').trim(); | |
| process.env[match[1]] = value; | |
| } | |
| } | |
| } | |
| function discoverImportFiles(): { dbName: string; collectionName: string; filePath: string }[] { | |
| if (!fs.existsSync(EXPORT_DIR)) { | |
| console.error('Export directory not found:', EXPORT_DIR); | |
| process.exit(1); | |
| } | |
| const entries = fs.readdirSync(EXPORT_DIR, { withFileTypes: true }); | |
| const files: { dbName: string; collectionName: string; filePath: string }[] = []; | |
| for (const entry of entries) { | |
| if (!entry.isDirectory()) continue; | |
| const dbDir = path.join(EXPORT_DIR, entry.name); | |
| const collFiles = fs.readdirSync(dbDir); | |
| for (const f of collFiles) { | |
| if (!f.endsWith('.json')) continue; | |
| const collectionName = f.slice(0, -5); | |
| files.push({ | |
| dbName: entry.name, | |
| collectionName, | |
| filePath: path.join(dbDir, f), | |
| }); | |
| } | |
| } | |
| return files.sort((a, b) => { | |
| const dbCmp = a.dbName.localeCompare(b.dbName); | |
| return dbCmp !== 0 ? dbCmp : a.collectionName.localeCompare(b.collectionName); | |
| }); | |
| } | |
| async function importDatabase(): Promise<void> { | |
| loadEnv(); | |
| const dropCollections = process.argv.includes('--no-drop') | |
| ? false | |
| : (process.env.DROP_COLLECTIONS ?? '1') !== '0'; | |
| const uri = process.env.DATABASE_URI; | |
| if (!uri) { | |
| console.error('DATABASE_URI is not set in .env'); | |
| process.exit(1); | |
| } | |
| const files = discoverImportFiles(); | |
| if (files.length === 0) { | |
| console.error('No .json collection files found in', EXPORT_DIR); | |
| process.exit(1); | |
| } | |
| const dbNames = [...new Set(files.map((f) => f.dbName))]; | |
| console.log( | |
| `\nImporting ${dbNames.length} database(s), ${files.length} collection(s). Drop before insert: ${dropCollections}\n` | |
| ); | |
| const client = new MongoClient(uri); | |
| const startTime = Date.now(); | |
| try { | |
| await client.connect(); | |
| console.log('Connected to MongoDB\n'); | |
| for (let i = 0; i < files.length; i++) { | |
| const { dbName, collectionName, filePath } = files[i]; | |
| const step = i + 1; | |
| process.stdout.write( | |
| ` [${step}/${files.length}] db: ${dbName} → ${collectionName} ... ` | |
| ); | |
| const collStart = Date.now(); | |
| const raw = fs.readFileSync(filePath, 'utf-8'); | |
| let documents: unknown[]; | |
| try { | |
| documents = JSON.parse(raw); | |
| } catch (e) { | |
| console.log('ERROR: invalid JSON'); | |
| continue; | |
| } | |
| if (!Array.isArray(documents)) { | |
| console.log('ERROR: file is not a JSON array'); | |
| continue; | |
| } | |
| const restored = documents.map((doc) => fromPlainJson(doc)) as Record<string, unknown>[]; | |
| const db = client.db(dbName); | |
| const collection = db.collection(collectionName); | |
| if (dropCollections) { | |
| await collection.drop().catch(() => {}); | |
| } | |
| if (restored.length > 0) { | |
| await collection.insertMany(restored); | |
| } | |
| const collElapsed = ((Date.now() - collStart) / 1000).toFixed(1); | |
| const totalElapsed = ((Date.now() - startTime) / 1000).toFixed(1); | |
| console.log( | |
| `${restored.length} docs in ${collElapsed}s (total: ${totalElapsed}s)` | |
| ); | |
| } | |
| const totalSeconds = ((Date.now() - startTime) / 1000).toFixed(1); | |
| console.log(`\nImport complete in ${totalSeconds}s.`); | |
| } catch (err) { | |
| console.error('Import failed:', err); | |
| process.exit(1); | |
| } finally { | |
| await client.close(); | |
| console.log('Disconnected from MongoDB'); | |
| } | |
| } | |
| importDatabase(); |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment