Created
December 11, 2025 07:57
-
-
Save matteohoeren/d81eabde6b83420a194ded6202f006f8 to your computer and use it in GitHub Desktop.
Supabase Storage Sync (Remote → Local)
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| /** | |
| * Sync Storage Script | |
| * | |
| * This script downloads all assets from your production Supabase storage buckets | |
| * and uploads them to your local Supabase instance. | |
| * | |
| * Setup: | |
| * pnpm add -D dotenv ora chalk cli-progress @types/cli-progress | |
| * | |
| * Usage: | |
| * npx tsx scripts/sync-storage.ts | |
| * | |
| * Requirements: | |
| * - .env.prod file with production credentials (NEXT_PUBLIC_SUPABASE_URL, SUPABASE_SERVICE_ROLE_KEY) | |
| * - .env file with local credentials (NEXT_PUBLIC_SUPABASE_URL, SUPABASE_SERVICE_ROLE_KEY) | |
| */ | |
| import { createClient, SupabaseClient } from "@supabase/supabase-js"; | |
| import chalk from "chalk"; | |
| import cliProgress from "cli-progress"; | |
| import dotenv from "dotenv"; | |
| import fs from "fs"; | |
| import ora from "ora"; | |
| import path from "path"; | |
| const spinner = ora(); | |
| // Helper to parse env file safely | |
| function getEnvConfig(filePath: string, type: "Production" | "Local") { | |
| if (!fs.existsSync(filePath)) { | |
| spinner.fail( | |
| chalk.red(`Error: ${type} environment file not found at ${filePath}`), | |
| ); | |
| console.log( | |
| chalk.yellow( | |
| `Please ensure you have a ${path.basename(filePath)} file in the root directory.`, | |
| ), | |
| ); | |
| process.exit(1); | |
| } | |
| const content = fs.readFileSync(filePath, "utf-8"); | |
| return dotenv.parse(content); | |
| } | |
| async function main() { | |
| console.log(chalk.cyan.bold("\n🔄 Supabase Storage Sync\n")); | |
| const prodEnvPath = path.join(process.cwd(), ".env.prod"); | |
| const localEnvPath = path.join(process.cwd(), ".env"); | |
| spinner.start("Reading environment configurations..."); | |
| const prodEnv = getEnvConfig(prodEnvPath, "Production"); | |
| const localEnv = getEnvConfig(localEnvPath, "Local"); | |
| const prodUrl = prodEnv.NEXT_PUBLIC_SUPABASE_URL; | |
| const prodKey = prodEnv.SUPABASE_SERVICE_ROLE_KEY; | |
| const localUrl = | |
| localEnv.NEXT_PUBLIC_SUPABASE_URL || "http://127.0.0.1:54321"; | |
| const localKey = localEnv.SUPABASE_SERVICE_ROLE_KEY; | |
| if (!prodUrl || !prodKey) { | |
| spinner.fail(chalk.red("Missing production credentials in .env.prod")); | |
| console.log( | |
| chalk.yellow( | |
| "Required: NEXT_PUBLIC_SUPABASE_URL, SUPABASE_SERVICE_ROLE_KEY", | |
| ), | |
| ); | |
| process.exit(1); | |
| } | |
| if (!localKey) { | |
| spinner.fail(chalk.red("Missing local service role key in .env")); | |
| console.log(chalk.yellow("Required: SUPABASE_SERVICE_ROLE_KEY")); | |
| process.exit(1); | |
| } | |
| spinner.succeed(chalk.green("Environment configurations loaded")); | |
| console.log(chalk.gray(`\n Production: ${prodUrl}`)); | |
| console.log(chalk.gray(` Local: ${localUrl}\n`)); | |
| const prod = createClient(prodUrl, prodKey); | |
| const local = createClient(localUrl, localKey); | |
| spinner.start("Fetching buckets from production..."); | |
| const { data: buckets, error: bucketsError } = | |
| await prod.storage.listBuckets(); | |
| if (bucketsError) { | |
| spinner.fail(chalk.red("Failed to list buckets from production")); | |
| console.error(chalk.red(bucketsError.message)); | |
| process.exit(1); | |
| } | |
| spinner.succeed(chalk.green(`Found ${buckets.length} buckets in production`)); | |
| for (const bucket of buckets) { | |
| console.log(chalk.blue.bold(`\n📦 Processing bucket: ${bucket.name}`)); | |
| // Create bucket locally if not exists | |
| spinner.start(`Checking bucket ${bucket.name} locally...`); | |
| const { error: getBucketError } = await local.storage.getBucket( | |
| bucket.name, | |
| ); | |
| if (getBucketError) { | |
| spinner.text = `Creating bucket ${bucket.name} locally...`; | |
| const { error: createError } = await local.storage.createBucket( | |
| bucket.name, | |
| { | |
| public: bucket.public, | |
| fileSizeLimit: bucket.file_size_limit, | |
| allowedMimeTypes: bucket.allowed_mime_types, | |
| }, | |
| ); | |
| if (createError) { | |
| spinner.fail(chalk.red(`Failed to create bucket ${bucket.name}`)); | |
| console.error(chalk.red(createError.message)); | |
| } else { | |
| spinner.succeed(chalk.green(`Created bucket ${bucket.name}`)); | |
| } | |
| } else { | |
| spinner.succeed(chalk.green(`Bucket ${bucket.name} exists locally`)); | |
| } | |
| // 1. Scan files | |
| spinner.start("Scanning files in production..."); | |
| const allFiles = await scanFiles(prod, bucket.name, ""); | |
| spinner.succeed( | |
| chalk.green( | |
| `Found ${allFiles.length} files in production bucket ${bucket.name}`, | |
| ), | |
| ); | |
| if (allFiles.length === 0) continue; | |
| // 2. Scan local files to skip existing | |
| spinner.start("Scanning files in local..."); | |
| const localFiles = await scanFiles(local, bucket.name, ""); | |
| spinner.succeed( | |
| chalk.green( | |
| `Found ${localFiles.length} files in local bucket ${bucket.name}`, | |
| ), | |
| ); | |
| const localFileSet = new Set(localFiles.map((f) => f.path)); | |
| const filesToSync = allFiles.filter((f) => !localFileSet.has(f.path)); | |
| if (filesToSync.length === 0) { | |
| console.log( | |
| chalk.green( | |
| ` All ${allFiles.length} files already exist locally. Skipping.`, | |
| ), | |
| ); | |
| continue; | |
| } | |
| console.log( | |
| chalk.yellow( | |
| ` ${filesToSync.length} files to sync (${allFiles.length - filesToSync.length} skipped)`, | |
| ), | |
| ); | |
| console.log(""); // Add spacing | |
| const progressBar = new cliProgress.SingleBar({ | |
| format: | |
| "Syncing |" + | |
| chalk.cyan("{bar}") + | |
| "| {percentage}% || {value}/{total} Files || {file}", | |
| barCompleteChar: "•", | |
| barIncompleteChar: "·", | |
| hideCursor: true, | |
| }); | |
| progressBar.start(filesToSync.length, 0, { file: "Starting..." }); | |
| let processed = 0; | |
| for (const file of filesToSync) { | |
| progressBar.update(processed, { file: file.path }); | |
| const { data: blob, error: downloadError } = await prod.storage | |
| .from(bucket.name) | |
| .download(file.path); | |
| if (downloadError) { | |
| // Log error but don't break progress bar layout too much | |
| // console.error(chalk.red(`\nFailed to download ${file.path}: ${downloadError.message}`)); | |
| } else { | |
| const arrayBuffer = await blob.arrayBuffer(); | |
| const buffer = Buffer.from(arrayBuffer); | |
| const { error: uploadError } = await local.storage | |
| .from(bucket.name) | |
| .upload(file.path, buffer, { | |
| contentType: file.metadata?.mimetype as string | undefined, | |
| upsert: true, | |
| }); | |
| if (uploadError) { | |
| // console.error(chalk.red(`\nFailed to upload ${file.path}: ${uploadError.message}`)); | |
| } | |
| } | |
| processed++; | |
| progressBar.update(processed); | |
| } | |
| progressBar.stop(); | |
| } | |
| console.log(chalk.green.bold("\n✨ Sync completed successfully!\n")); | |
| } | |
| interface FileItem { | |
| name: string; | |
| path: string; | |
| metadata: Record<string, unknown>; | |
| } | |
| async function scanFiles( | |
| client: SupabaseClient, | |
| bucketName: string, | |
| pathPrefix: string, | |
| ): Promise<FileItem[]> { | |
| let allFiles: FileItem[] = []; | |
| // Pagination loop | |
| let offset = 0; | |
| const limit = 100; | |
| let hasMore = true; | |
| while (hasMore) { | |
| const { data: files, error } = await client.storage | |
| .from(bucketName) | |
| .list(pathPrefix || undefined, { | |
| limit, | |
| offset, | |
| sortBy: { column: "name", order: "asc" }, | |
| }); | |
| if (error) { | |
| console.error( | |
| chalk.red(`Error listing files in ${bucketName}/${pathPrefix}:`), | |
| error.message, | |
| ); | |
| return allFiles; | |
| } | |
| if (!files || files.length === 0) { | |
| hasMore = false; | |
| break; | |
| } | |
| for (const file of files) { | |
| const isFolder = !file.id; | |
| if (isFolder) { | |
| const subFiles = await scanFiles( | |
| client, | |
| bucketName, | |
| `${pathPrefix}${file.name}/`, | |
| ); | |
| allFiles = [...allFiles, ...subFiles]; | |
| } else { | |
| allFiles.push({ | |
| name: file.name, | |
| path: `${pathPrefix}${file.name}`, | |
| metadata: file.metadata, | |
| }); | |
| } | |
| } | |
| if (files.length < limit) { | |
| hasMore = false; | |
| } else { | |
| offset += limit; | |
| } | |
| } | |
| return allFiles; | |
| } | |
| main().catch((err) => { | |
| spinner.fail(chalk.red("Unexpected error occurred")); | |
| console.error(err); | |
| process.exit(1); | |
| }); |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment