- schema/canonical.sql: 29 tables across 3 databases, CHECK constraints, foreign keys, 13 indexes, WAL mode, schema_version tracking - tools/validate-schema.ts: applies DDL to in-memory SQLite, extracts PRAGMA table_info + sqlite_master metadata as JSON - tools/migrate-db.ts: CLI for Tauri→Electrobun data migration with atomic transaction, version fencing, INSERT OR IGNORE - docs/SWITCHING.md: migration guide with prerequisites and troubleshooting
219 lines
7.4 KiB
TypeScript
219 lines
7.4 KiB
TypeScript
#!/usr/bin/env bun
|
|
/**
|
|
* migrate-db.ts — Migrate AGOR data from a Tauri (source) database to an
|
|
* Electrobun (target) database using the canonical schema.
|
|
*
|
|
* Usage:
|
|
* bun tools/migrate-db.ts --from <source.db> --to <target.db>
|
|
* bun tools/migrate-db.ts --from ~/.local/share/agor/sessions.db \
|
|
* --to ~/.config/agor/settings.db
|
|
*
|
|
* Behavior:
|
|
* - Opens source DB read-only (never modifies it).
|
|
* - Creates/opens target DB, applies canonical.sql if schema_version absent.
|
|
* - Copies rows for every table present in BOTH source and target.
|
|
* - Wraps all inserts in a single transaction (atomic rollback on failure).
|
|
* - Reports per-table row counts.
|
|
* - Writes migration fence to schema_version in target.
|
|
*/
|
|
|
|
import { Database } from "bun:sqlite";
|
|
import { readFileSync, existsSync } from "fs";
|
|
import { join, resolve } from "path";
|
|
|
|
// ── CLI args ──────────────────────────────────────────────────────────────────
|
|
|
|
function usage(): never {
|
|
console.error("Usage: bun tools/migrate-db.ts --from <source.db> --to <target.db>");
|
|
process.exit(1);
|
|
}
|
|
|
|
const args = process.argv.slice(2);
|
|
let fromPath = "";
|
|
let toPath = "";
|
|
|
|
for (let i = 0; i < args.length; i++) {
|
|
if (args[i] === "--from" && args[i + 1]) fromPath = args[++i];
|
|
else if (args[i] === "--to" && args[i + 1]) toPath = args[++i];
|
|
else if (args[i] === "--help" || args[i] === "-h") usage();
|
|
}
|
|
|
|
if (!fromPath || !toPath) usage();
|
|
|
|
fromPath = resolve(fromPath);
|
|
toPath = resolve(toPath);
|
|
|
|
if (!existsSync(fromPath)) {
|
|
console.error(`Source database not found: ${fromPath}`);
|
|
process.exit(1);
|
|
}
|
|
|
|
// ── Load canonical DDL ────────────────────────────────────────────────────────
|
|
|
|
const schemaPath = join(import.meta.dir, "..", "schema", "canonical.sql");
|
|
let ddl: string;
|
|
try {
|
|
ddl = readFileSync(schemaPath, "utf-8");
|
|
} catch (err) {
|
|
console.error(`Failed to read canonical schema: ${err}`);
|
|
process.exit(1);
|
|
}
|
|
|
|
// ── Open databases ────────────────────────────────────────────────────────────
|
|
|
|
const sourceDb = new Database(fromPath, { readonly: true });
|
|
const targetDb = new Database(toPath);
|
|
|
|
// Apply pragmas to target
|
|
targetDb.exec("PRAGMA journal_mode = WAL");
|
|
targetDb.exec("PRAGMA foreign_keys = OFF"); // Disable during migration for insert order flexibility
|
|
targetDb.exec("PRAGMA busy_timeout = 5000");
|
|
|
|
// Apply canonical schema to target if needed
|
|
const hasVersion = (() => {
|
|
try {
|
|
const row = targetDb
|
|
.query<{ cnt: number }, []>("SELECT COUNT(*) AS cnt FROM schema_version")
|
|
.get();
|
|
return (row?.cnt ?? 0) > 0;
|
|
} catch {
|
|
return false;
|
|
}
|
|
})();
|
|
|
|
if (!hasVersion) {
|
|
console.log("Applying canonical schema to target database...");
|
|
targetDb.exec(ddl);
|
|
}
|
|
|
|
// ── Discover migratable tables ────────────────────────────────────────────────
|
|
|
|
/** Get regular (non-virtual, non-internal) table names from a database. */
|
|
function getTableNames(db: Database): Set<string> {
|
|
const rows = db
|
|
.prepare(
|
|
`SELECT name FROM sqlite_master
|
|
WHERE type = 'table'
|
|
AND name NOT LIKE 'sqlite_%'
|
|
AND name NOT LIKE '%_content'
|
|
AND name NOT LIKE '%_data'
|
|
AND name NOT LIKE '%_idx'
|
|
AND name NOT LIKE '%_config'
|
|
AND name NOT LIKE '%_docsize'
|
|
ORDER BY name`,
|
|
)
|
|
.all() as Array<{ name: string }>;
|
|
return new Set(rows.map((r) => r.name));
|
|
}
|
|
|
|
const sourceTables = getTableNames(sourceDb);
|
|
const targetTables = getTableNames(targetDb);
|
|
|
|
// Only migrate tables present in both source and target
|
|
const migratable = [...sourceTables].filter((t) => targetTables.has(t));
|
|
|
|
if (migratable.length === 0) {
|
|
console.log("No overlapping tables found between source and target.");
|
|
sourceDb.close();
|
|
targetDb.close();
|
|
process.exit(0);
|
|
}
|
|
|
|
console.log(`\nMigrating ${migratable.length} tables from:`);
|
|
console.log(` source: ${fromPath}`);
|
|
console.log(` target: ${toPath}\n`);
|
|
|
|
// ── Migrate data ──────────────────────────────────────────────────────────────
|
|
|
|
interface MigrationResult {
|
|
table: string;
|
|
rows: number;
|
|
skipped: boolean;
|
|
error?: string;
|
|
}
|
|
|
|
const results: MigrationResult[] = [];
|
|
|
|
const migrate = targetDb.transaction(() => {
|
|
for (const table of migratable) {
|
|
// Skip schema_version — we write our own fence
|
|
if (table === "schema_version") {
|
|
results.push({ table, rows: 0, skipped: true });
|
|
continue;
|
|
}
|
|
|
|
try {
|
|
// Read all rows from source
|
|
const rows = sourceDb.prepare(`SELECT * FROM "${table}"`).all();
|
|
|
|
if (rows.length === 0) {
|
|
results.push({ table, rows: 0, skipped: false });
|
|
continue;
|
|
}
|
|
|
|
// Get column names from the first row
|
|
const columns = Object.keys(rows[0] as Record<string, unknown>);
|
|
const placeholders = columns.map(() => "?").join(", ");
|
|
const colList = columns.map((c) => `"${c}"`).join(", ");
|
|
|
|
const insertStmt = targetDb.prepare(
|
|
`INSERT OR IGNORE INTO "${table}" (${colList}) VALUES (${placeholders})`,
|
|
);
|
|
|
|
let count = 0;
|
|
for (const row of rows) {
|
|
const values = columns.map((c) => (row as Record<string, unknown>)[c] ?? null);
|
|
insertStmt.run(...values);
|
|
count++;
|
|
}
|
|
|
|
results.push({ table, rows: count, skipped: false });
|
|
} catch (err) {
|
|
const msg = err instanceof Error ? err.message : String(err);
|
|
results.push({ table, rows: 0, skipped: false, error: msg });
|
|
throw new Error(`Migration failed on table '${table}': ${msg}`);
|
|
}
|
|
}
|
|
});
|
|
|
|
try {
|
|
migrate();
|
|
} catch (err) {
|
|
console.error(`\nMIGRATION ROLLED BACK: ${err}`);
|
|
sourceDb.close();
|
|
targetDb.close();
|
|
process.exit(1);
|
|
}
|
|
|
|
// ── Write version fence ───────────────────────────────────────────────────────
|
|
|
|
const timestamp = new Date().toISOString();
|
|
targetDb.exec("DELETE FROM schema_version");
|
|
targetDb
|
|
.prepare(
|
|
"INSERT INTO schema_version (version, migration_source, migration_timestamp) VALUES (?, ?, ?)",
|
|
)
|
|
.run(1, "migrate-db", timestamp);
|
|
|
|
// Re-enable foreign keys
|
|
targetDb.exec("PRAGMA foreign_keys = ON");
|
|
|
|
// ── Report ────────────────────────────────────────────────────────────────────
|
|
|
|
console.log("Table Rows Status");
|
|
console.log("─".repeat(50));
|
|
|
|
let totalRows = 0;
|
|
for (const r of results) {
|
|
const status = r.skipped ? "skipped" : r.error ? `ERROR: ${r.error}` : "ok";
|
|
const rowStr = String(r.rows).padStart(8);
|
|
console.log(`${r.table.padEnd(25)}${rowStr} ${status}`);
|
|
totalRows += r.rows;
|
|
}
|
|
|
|
console.log("─".repeat(50));
|
|
console.log(`Total: ${totalRows} rows migrated across ${results.filter((r) => !r.skipped && !r.error).length} tables`);
|
|
console.log(`Version fence: v1 at ${timestamp}`);
|
|
|
|
sourceDb.close();
|
|
targetDb.close();
|