mirror of
https://github.com/nicotsx/ironmount.git
synced 2025-12-10 12:10:51 +01:00
refactor: use short ids to allow changing the name of volumes & repos (#67)
* refactor: use short ids to allow changing the name of volumes & repos * refactor: address PR feedbacks * fix: make short_id non null after initial population
This commit is contained in:
89
app/server/modules/lifecycle/checkpoint.ts
Normal file
89
app/server/modules/lifecycle/checkpoint.ts
Normal file
@@ -0,0 +1,89 @@
|
||||
import { eq, sql } from "drizzle-orm";
|
||||
import { db } from "../../db/db";
|
||||
import { appMetadataTable, usersTable } from "../../db/schema";
|
||||
import { logger } from "../../utils/logger";
|
||||
import { REQUIRED_MIGRATIONS } from "~/server/core/constants";
|
||||
|
||||
const MIGRATION_KEY_PREFIX = "migration:";
|
||||
|
||||
export const recordMigrationCheckpoint = async (version: string): Promise<void> => {
|
||||
const key = `${MIGRATION_KEY_PREFIX}${version}`;
|
||||
const now = Math.floor(Date.now() / 1000);
|
||||
|
||||
await db
|
||||
.insert(appMetadataTable)
|
||||
.values({
|
||||
key,
|
||||
value: JSON.stringify({ completedAt: new Date().toISOString() }),
|
||||
createdAt: now,
|
||||
updatedAt: now,
|
||||
})
|
||||
.onConflictDoUpdate({
|
||||
target: appMetadataTable.key,
|
||||
set: {
|
||||
value: JSON.stringify({ completedAt: new Date().toISOString() }),
|
||||
updatedAt: now,
|
||||
},
|
||||
});
|
||||
|
||||
logger.info(`Recorded migration checkpoint for ${version}`);
|
||||
};
|
||||
|
||||
export const hasMigrationCheckpoint = async (version: string): Promise<boolean> => {
|
||||
const key = `${MIGRATION_KEY_PREFIX}${version}`;
|
||||
const result = await db.query.appMetadataTable.findFirst({
|
||||
where: eq(appMetadataTable.key, key),
|
||||
});
|
||||
return result !== undefined;
|
||||
};
|
||||
|
||||
export const validateRequiredMigrations = async (requiredVersions: string[]): Promise<void> => {
|
||||
const userCount = await db.select({ count: sql<number>`count(*)` }).from(usersTable);
|
||||
const isFreshInstall = userCount[0]?.count === 0;
|
||||
|
||||
if (isFreshInstall) {
|
||||
logger.info("Fresh installation detected, skipping migration checkpoint validation.");
|
||||
|
||||
for (const version of requiredVersions) {
|
||||
const hasCheckpoint = await hasMigrationCheckpoint(version);
|
||||
if (!hasCheckpoint) {
|
||||
await recordMigrationCheckpoint(version);
|
||||
}
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
for (const version of requiredVersions) {
|
||||
const hasCheckpoint = await hasMigrationCheckpoint(version);
|
||||
if (!hasCheckpoint) {
|
||||
logger.error(`
|
||||
================================================================================
|
||||
MIGRATION ERROR: Required migration ${version} has not been run.
|
||||
|
||||
You are attempting to start a version of Zerobyte that requires migration
|
||||
checkpoints from previous versions. This typically happens when you skip
|
||||
versions during an upgrade.
|
||||
|
||||
To fix this:
|
||||
1. First upgrade to version ${version} and run the application once
|
||||
2. Validate that everything is still working correctly
|
||||
3. Then upgrade to the current version
|
||||
|
||||
================================================================================
|
||||
`);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
export const getMigrationCheckpoints = async (): Promise<{ version: string; completedAt: string }[]> => {
|
||||
const results = await db.query.appMetadataTable.findMany({
|
||||
where: (table, { like }) => like(table.key, `${MIGRATION_KEY_PREFIX}%`),
|
||||
});
|
||||
|
||||
return results.map((r) => ({
|
||||
version: r.key.replace(MIGRATION_KEY_PREFIX, ""),
|
||||
completedAt: JSON.parse(r.value).completedAt,
|
||||
}));
|
||||
};
|
||||
193
app/server/modules/lifecycle/migration.ts
Normal file
193
app/server/modules/lifecycle/migration.ts
Normal file
@@ -0,0 +1,193 @@
|
||||
import * as fs from "node:fs/promises";
|
||||
import * as path from "node:path";
|
||||
import { eq } from "drizzle-orm";
|
||||
import { db } from "../../db/db";
|
||||
import { repositoriesTable } from "../../db/schema";
|
||||
import { VOLUME_MOUNT_BASE, REPOSITORY_BASE } from "../../core/constants";
|
||||
import { logger } from "../../utils/logger";
|
||||
import { hasMigrationCheckpoint, recordMigrationCheckpoint } from "./checkpoint";
|
||||
import type { RepositoryConfig } from "~/schemas/restic";
|
||||
|
||||
const MIGRATION_VERSION = "v0.14.0";
|
||||
|
||||
interface MigrationResult {
|
||||
success: boolean;
|
||||
errors: Array<{ name: string; error: string }>;
|
||||
}
|
||||
|
||||
export class MigrationError extends Error {
|
||||
version: string;
|
||||
failedItems: Array<{ name: string; error: string }>;
|
||||
|
||||
constructor(version: string, failedItems: Array<{ name: string; error: string }>) {
|
||||
const itemNames = failedItems.map((e) => e.name).join(", ");
|
||||
super(`Migration ${version} failed for: ${itemNames}`);
|
||||
this.version = version;
|
||||
this.failedItems = failedItems;
|
||||
this.name = "MigrationError";
|
||||
}
|
||||
}
|
||||
|
||||
export const migrateToShortIds = async () => {
|
||||
const alreadyMigrated = await hasMigrationCheckpoint(MIGRATION_VERSION);
|
||||
if (alreadyMigrated) {
|
||||
logger.debug(`Migration ${MIGRATION_VERSION} already completed, skipping.`);
|
||||
return;
|
||||
}
|
||||
|
||||
logger.info(`Starting short ID migration (${MIGRATION_VERSION})...`);
|
||||
|
||||
const volumeResult = await migrateVolumeFolders();
|
||||
const repoResult = await migrateRepositoryFolders();
|
||||
|
||||
const allErrors = [...volumeResult.errors, ...repoResult.errors];
|
||||
|
||||
if (allErrors.length > 0) {
|
||||
for (const err of allErrors) {
|
||||
logger.error(`Migration failure - ${err.name}: ${err.error}`);
|
||||
}
|
||||
throw new MigrationError(MIGRATION_VERSION, allErrors);
|
||||
}
|
||||
|
||||
await recordMigrationCheckpoint(MIGRATION_VERSION);
|
||||
|
||||
logger.info(`Short ID migration (${MIGRATION_VERSION}) complete.`);
|
||||
};
|
||||
|
||||
const migrateVolumeFolders = async (): Promise<MigrationResult> => {
|
||||
const errors: Array<{ name: string; error: string }> = [];
|
||||
const volumes = await db.query.volumesTable.findMany({});
|
||||
|
||||
for (const volume of volumes) {
|
||||
if (volume.config.backend === "directory") {
|
||||
continue;
|
||||
}
|
||||
|
||||
const oldPath = path.join(VOLUME_MOUNT_BASE, volume.name);
|
||||
const newPath = path.join(VOLUME_MOUNT_BASE, volume.shortId);
|
||||
|
||||
const oldExists = await pathExists(oldPath);
|
||||
const newExists = await pathExists(newPath);
|
||||
|
||||
if (oldExists && !newExists) {
|
||||
try {
|
||||
logger.info(`Migrating volume folder: ${oldPath} -> ${newPath}`);
|
||||
await fs.rename(oldPath, newPath);
|
||||
logger.info(`Successfully migrated volume folder for "${volume.name}"`);
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : String(error);
|
||||
errors.push({ name: `volume:${volume.name}`, error: errorMessage });
|
||||
}
|
||||
} else if (oldExists && newExists) {
|
||||
logger.warn(
|
||||
`Both old (${oldPath}) and new (${newPath}) paths exist for volume "${volume.name}". Manual intervention may be required.`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
return { success: errors.length === 0, errors };
|
||||
};
|
||||
|
||||
const migrateRepositoryFolders = async (): Promise<MigrationResult> => {
|
||||
const errors: Array<{ name: string; error: string }> = [];
|
||||
const repositories = await db.query.repositoriesTable.findMany({});
|
||||
|
||||
for (const repo of repositories) {
|
||||
if (repo.config.backend !== "local") {
|
||||
continue;
|
||||
}
|
||||
|
||||
const config = repo.config as Extract<RepositoryConfig, { backend: "local" }>;
|
||||
|
||||
if (config.name === repo.shortId) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const basePath = config.path || REPOSITORY_BASE;
|
||||
const oldPath = path.join(basePath, config.name);
|
||||
const newPath = path.join(basePath, repo.shortId);
|
||||
|
||||
const oldExists = await pathExists(oldPath);
|
||||
const newExists = await pathExists(newPath);
|
||||
|
||||
if (oldExists && !newExists) {
|
||||
try {
|
||||
logger.info(`Migrating repository folder: ${oldPath} -> ${newPath}`);
|
||||
await fs.rename(oldPath, newPath);
|
||||
|
||||
const updatedConfig: RepositoryConfig = {
|
||||
...config,
|
||||
name: repo.shortId,
|
||||
};
|
||||
|
||||
await db
|
||||
.update(repositoriesTable)
|
||||
.set({
|
||||
config: updatedConfig,
|
||||
updatedAt: Math.floor(Date.now() / 1000),
|
||||
})
|
||||
.where(eq(repositoriesTable.id, repo.id));
|
||||
|
||||
logger.info(`Successfully migrated repository folder and config for "${repo.name}"`);
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : String(error);
|
||||
errors.push({ name: `repository:${repo.name}`, error: errorMessage });
|
||||
}
|
||||
} else if (oldExists && newExists) {
|
||||
logger.warn(
|
||||
`Both old (${oldPath}) and new (${newPath}) paths exist for repository "${repo.name}". Manual intervention may be required.`,
|
||||
);
|
||||
} else if (!oldExists && !newExists) {
|
||||
try {
|
||||
logger.info(`Updating config.name for repository "${repo.name}" (no folder exists yet)`);
|
||||
|
||||
const updatedConfig: RepositoryConfig = {
|
||||
...config,
|
||||
name: repo.shortId,
|
||||
};
|
||||
|
||||
await db
|
||||
.update(repositoriesTable)
|
||||
.set({
|
||||
config: updatedConfig,
|
||||
updatedAt: Math.floor(Date.now() / 1000),
|
||||
})
|
||||
.where(eq(repositoriesTable.id, repo.id));
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : String(error);
|
||||
errors.push({ name: `repository:${repo.name}`, error: errorMessage });
|
||||
}
|
||||
} else if (newExists && !oldExists && config.name !== repo.shortId) {
|
||||
try {
|
||||
logger.info(`Folder already at new path, updating config.name for repository "${repo.name}"`);
|
||||
|
||||
const updatedConfig: RepositoryConfig = {
|
||||
...config,
|
||||
name: repo.shortId,
|
||||
};
|
||||
|
||||
await db
|
||||
.update(repositoriesTable)
|
||||
.set({
|
||||
config: updatedConfig,
|
||||
updatedAt: Math.floor(Date.now() / 1000),
|
||||
})
|
||||
.where(eq(repositoriesTable.id, repo.id));
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : String(error);
|
||||
errors.push({ name: `repository:${repo.name}`, error: errorMessage });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return { success: errors.length === 0, errors };
|
||||
};
|
||||
|
||||
const pathExists = async (p: string): Promise<boolean> => {
|
||||
try {
|
||||
await fs.access(p);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
};
|
||||
Reference in New Issue
Block a user