feat(backend): backup service with retention policy

This commit is contained in:
Nicolas Meienberger
2025-10-25 19:43:36 +02:00
parent 2202ad3247
commit 43e31596f1
16 changed files with 1688 additions and 3 deletions

View File

@@ -0,0 +1,64 @@
import { Hono } from "hono";
import { validator } from "hono-openapi";
import {
createBackupScheduleBody,
createBackupScheduleDto,
deleteBackupScheduleDto,
getBackupScheduleDto,
listBackupSchedulesDto,
runBackupNowDto,
updateBackupScheduleBody,
updateBackupScheduleDto,
} from "./backups.dto";
import { backupsService } from "./backups.service";
export const backupScheduleController = new Hono()
.get("/", listBackupSchedulesDto, async (c) => {
const schedules = await backupsService.listSchedules();
return c.json({ schedules }, 200);
})
.get("/:scheduleId", getBackupScheduleDto, async (c) => {
const scheduleId = c.req.param("scheduleId");
const schedule = await backupsService.getSchedule(Number(scheduleId));
return c.json({ schedule }, 200);
})
.post("/", createBackupScheduleDto, validator("json", createBackupScheduleBody), async (c) => {
const body = c.req.valid("json");
const schedule = await backupsService.createSchedule(body);
return c.json({ message: "Backup schedule created successfully", schedule }, 201);
})
.patch("/:scheduleId", updateBackupScheduleDto, validator("json", updateBackupScheduleBody), async (c) => {
const scheduleId = c.req.param("scheduleId");
const body = c.req.valid("json");
const schedule = await backupsService.updateSchedule(Number(scheduleId), body);
return c.json({ message: "Backup schedule updated successfully", schedule }, 200);
})
.delete("/:scheduleId", deleteBackupScheduleDto, async (c) => {
const scheduleId = c.req.param("scheduleId");
await backupsService.deleteSchedule(Number(scheduleId));
return c.json({ message: "Backup schedule deleted successfully" }, 200);
})
.post("/:scheduleId/run", runBackupNowDto, async (c) => {
const scheduleId = c.req.param("scheduleId");
backupsService.executeBackup(Number(scheduleId)).catch((error) => {
console.error("Backup execution failed:", error);
});
return c.json(
{
message: "Backup started",
backupStarted: true,
},
200,
);
});

View File

@@ -0,0 +1,205 @@
import { type } from "arktype";
import { describeRoute, resolver } from "hono-openapi";
const retentionPolicySchema = type({
keepLast: "number?",
keepHourly: "number?",
keepDaily: "number?",
keepWeekly: "number?",
keepMonthly: "number?",
keepYearly: "number?",
keepWithinDuration: "string?",
});
export type RetentionPolicy = typeof retentionPolicySchema.infer;
const backupScheduleSchema = type({
id: "number",
volumeId: "number",
volumeName: "string",
repositoryId: "string",
repositoryName: "string",
enabled: "boolean",
cronExpression: "string",
retentionPolicy: retentionPolicySchema.or("null"),
excludePatterns: "string[]",
includePatterns: "string[]",
lastBackupAt: "number | null",
lastBackupStatus: "'success' | 'error' | null",
lastBackupError: "string | null",
nextBackupAt: "number | null",
createdAt: "number",
updatedAt: "number",
});
export type BackupScheduleDto = typeof backupScheduleSchema.infer;
/**
* List all backup schedules
*/
export const listBackupSchedulesResponse = type({
schedules: backupScheduleSchema.array(),
});
export type ListBackupSchedulesResponseDto = typeof listBackupSchedulesResponse.infer;
export const listBackupSchedulesDto = describeRoute({
description: "List all backup schedules",
tags: ["Backups"],
operationId: "listBackupSchedules",
responses: {
200: {
description: "List of backup schedules",
content: {
"application/json": {
schema: resolver(listBackupSchedulesResponse),
},
},
},
},
});
/**
* Get a single backup schedule
*/
export const getBackupScheduleResponse = type({
schedule: backupScheduleSchema,
});
export type GetBackupScheduleResponseDto = typeof getBackupScheduleResponse.infer;
export const getBackupScheduleDto = describeRoute({
description: "Get a backup schedule by ID",
tags: ["Backups"],
operationId: "getBackupSchedule",
responses: {
200: {
description: "Backup schedule details",
content: {
"application/json": {
schema: resolver(getBackupScheduleResponse),
},
},
},
},
});
/**
* Create a new backup schedule
*/
export const createBackupScheduleBody = type({
volumeId: "number",
repositoryId: "string",
enabled: "boolean",
cronExpression: "string",
retentionPolicy: retentionPolicySchema.optional(),
excludePatterns: "string[]?",
includePatterns: "string[]?",
tags: "string[]?",
});
export type CreateBackupScheduleBody = typeof createBackupScheduleBody.infer;
export const createBackupScheduleResponse = type({
message: "string",
schedule: backupScheduleSchema,
});
export const createBackupScheduleDto = describeRoute({
description: "Create a new backup schedule for a volume",
operationId: "createBackupSchedule",
tags: ["Backups"],
responses: {
201: {
description: "Backup schedule created successfully",
content: {
"application/json": {
schema: resolver(createBackupScheduleResponse),
},
},
},
},
});
/**
* Update a backup schedule
*/
export const updateBackupScheduleBody = type({
repositoryId: "string?",
enabled: "boolean?",
cronExpression: "string?",
retentionPolicy: retentionPolicySchema.optional(),
excludePatterns: "string[]?",
includePatterns: "string[]?",
tags: "string[]?",
});
export type UpdateBackupScheduleBody = typeof updateBackupScheduleBody.infer;
export const updateBackupScheduleResponse = type({
message: "string",
schedule: backupScheduleSchema,
});
export const updateBackupScheduleDto = describeRoute({
description: "Update a backup schedule",
operationId: "updateBackupSchedule",
tags: ["Backups"],
responses: {
200: {
description: "Backup schedule updated successfully",
content: {
"application/json": {
schema: resolver(updateBackupScheduleResponse),
},
},
},
},
});
/**
* Delete a backup schedule
*/
export const deleteBackupScheduleResponse = type({
message: "string",
});
export const deleteBackupScheduleDto = describeRoute({
description: "Delete a backup schedule",
operationId: "deleteBackupSchedule",
tags: ["Backups"],
responses: {
200: {
description: "Backup schedule deleted successfully",
content: {
"application/json": {
schema: resolver(deleteBackupScheduleResponse),
},
},
},
},
});
/**
* Run a backup immediately
*/
export const runBackupNowResponse = type({
message: "string",
backupStarted: "boolean",
});
export const runBackupNowDto = describeRoute({
description: "Trigger a backup immediately for a schedule",
operationId: "runBackupNow",
tags: ["Backups"],
responses: {
200: {
description: "Backup started successfully",
content: {
"application/json": {
schema: resolver(runBackupNowResponse),
},
},
},
},
});

View File

@@ -0,0 +1,258 @@
import { eq } from "drizzle-orm";
import cron from "node-cron";
import { CronExpressionParser } from "cron-parser";
import { NotFoundError, BadRequestError } from "http-errors-enhanced";
import { db } from "../../db/db";
import { backupSchedulesTable, repositoriesTable, volumesTable } from "../../db/schema";
import { restic } from "../../utils/restic";
import { logger } from "../../utils/logger";
import { getVolumePath } from "../volumes/helpers";
import type { CreateBackupScheduleBody, UpdateBackupScheduleBody } from "./backups.dto";
import { toMessage } from "../../utils/errors";
const calculateNextRun = (cronExpression: string): Date => {
try {
const interval = CronExpressionParser.parse(cronExpression, {
currentDate: new Date(),
tz: "UTC",
});
return interval.next().toDate();
} catch (error) {
logger.error(`Failed to parse cron expression "${cronExpression}": ${error}`);
const fallback = new Date();
fallback.setMinutes(fallback.getMinutes() + 1);
return fallback;
}
};
const listSchedules = async () => {
const schedules = await db.query.backupSchedulesTable.findMany({});
return schedules;
};
const getSchedule = async (scheduleId: number) => {
const schedule = await db.query.backupSchedulesTable.findFirst({
where: eq(volumesTable.id, scheduleId),
});
if (!schedule) {
throw new NotFoundError("Backup schedule not found");
}
return schedule;
};
const createSchedule = async (data: CreateBackupScheduleBody) => {
if (!cron.validate(data.cronExpression)) {
throw new BadRequestError("Invalid cron expression");
}
const volume = await db.query.volumesTable.findFirst({
where: eq(volumesTable.id, data.volumeId),
});
if (!volume) {
throw new NotFoundError("Volume not found");
}
const repository = await db.query.repositoriesTable.findFirst({
where: eq(repositoriesTable.id, data.repositoryId),
});
if (!repository) {
throw new NotFoundError("Repository not found");
}
const existingSchedule = await db.query.backupSchedulesTable.findFirst({
where: eq(backupSchedulesTable.volumeId, data.volumeId),
});
if (existingSchedule) {
throw new BadRequestError("Volume already has a backup schedule");
}
const nextBackupAt = calculateNextRun(data.cronExpression);
const [newSchedule] = await db
.insert(backupSchedulesTable)
.values({
volumeId: data.volumeId,
repositoryId: data.repositoryId,
enabled: data.enabled,
cronExpression: data.cronExpression,
retentionPolicy: data.retentionPolicy ?? null,
excludePatterns: data.excludePatterns ?? [],
includePatterns: data.includePatterns ?? [],
nextBackupAt: nextBackupAt,
})
.returning();
if (!newSchedule) {
throw new Error("Failed to create backup schedule");
}
return newSchedule;
};
const updateSchedule = async (scheduleId: number, data: UpdateBackupScheduleBody) => {
const schedule = await db.query.backupSchedulesTable.findFirst({
where: eq(backupSchedulesTable.id, scheduleId),
});
if (!schedule) {
throw new NotFoundError("Backup schedule not found");
}
if (data.cronExpression && !cron.validate(data.cronExpression)) {
throw new BadRequestError("Invalid cron expression");
}
if (data.repositoryId) {
const repository = await db.query.repositoriesTable.findFirst({
where: eq(repositoriesTable.id, data.repositoryId),
});
if (!repository) {
throw new NotFoundError("Repository not found");
}
}
const cronExpression = data.cronExpression ?? schedule.cronExpression;
const nextBackupAt = data.cronExpression ? calculateNextRun(cronExpression) : schedule.nextBackupAt;
const [updated] = await db
.update(backupSchedulesTable)
.set({ ...data, nextBackupAt, updatedAt: new Date() })
.where(eq(backupSchedulesTable.id, scheduleId))
.returning();
if (!updated) {
throw new Error("Failed to update backup schedule");
}
return updated;
};
const deleteSchedule = async (scheduleId: number) => {
const schedule = await db.query.backupSchedulesTable.findFirst({
where: eq(backupSchedulesTable.id, scheduleId),
});
if (!schedule) {
throw new NotFoundError("Backup schedule not found");
}
await db.delete(backupSchedulesTable).where(eq(backupSchedulesTable.id, scheduleId));
};
const executeBackup = async (scheduleId: number) => {
const schedule = await db.query.backupSchedulesTable.findFirst({
where: eq(backupSchedulesTable.id, scheduleId),
});
if (!schedule) {
throw new NotFoundError("Backup schedule not found");
}
const volume = await db.query.volumesTable.findFirst({
where: eq(volumesTable.id, schedule.volumeId),
});
if (!volume) {
throw new NotFoundError("Volume not found");
}
const repository = await db.query.repositoriesTable.findFirst({
where: eq(repositoriesTable.id, schedule.repositoryId),
});
if (!repository) {
throw new NotFoundError("Repository not found");
}
if (volume.status !== "mounted") {
throw new BadRequestError("Volume is not mounted");
}
logger.info(`Starting backup for volume ${volume.name} to repository ${repository.name}`);
try {
const volumePath = getVolumePath(volume.name);
const backupOptions: {
exclude?: string[];
include?: string[];
tags?: string[];
} = {};
if (schedule.excludePatterns && schedule.excludePatterns.length > 0) {
backupOptions.exclude = schedule.excludePatterns;
}
if (schedule.includePatterns && schedule.includePatterns.length > 0) {
backupOptions.include = schedule.includePatterns;
}
await restic.backup(repository.config, volumePath, backupOptions);
if (schedule.retentionPolicy) {
await restic.forget(repository.config, schedule.retentionPolicy);
}
const nextBackupAt = calculateNextRun(schedule.cronExpression);
await db
.update(backupSchedulesTable)
.set({
lastBackupAt: new Date(),
lastBackupStatus: "success",
lastBackupError: null,
nextBackupAt: nextBackupAt,
updatedAt: new Date(),
})
.where(eq(backupSchedulesTable.id, scheduleId));
logger.info(`Backup completed successfully for volume ${volume.name} to repository ${repository.name}`);
} catch (error) {
logger.error(`Backup failed for volume ${volume.name} to repository ${repository.name}: ${toMessage(error)}`);
await db
.update(backupSchedulesTable)
.set({
lastBackupAt: new Date(),
lastBackupStatus: "error",
lastBackupError: toMessage(error),
updatedAt: new Date(),
})
.where(eq(backupSchedulesTable.id, scheduleId));
throw error;
}
};
const getSchedulesToExecute = async () => {
const now = new Date();
const schedules = await db.query.backupSchedulesTable.findMany({
where: eq(backupSchedulesTable.enabled, true),
});
const schedulesToRun: number[] = [];
for (const schedule of schedules) {
if (!schedule.nextBackupAt || schedule.nextBackupAt <= now) {
schedulesToRun.push(schedule.id);
}
}
return schedulesToRun;
};
export const backupsService = {
listSchedules,
getSchedule,
createSchedule,
updateSchedule,
deleteSchedule,
executeBackup,
getSchedulesToExecute,
};

View File

@@ -7,6 +7,7 @@ import { restic } from "../../utils/restic";
import { volumeService } from "../volumes/volume.service";
import { CleanupDanglingMountsJob } from "../../jobs/cleanup-dangling";
import { VolumeHealthCheckJob } from "../../jobs/healthchecks";
import { BackupExecutionJob } from "../../jobs/backup-execution";
export const startup = async () => {
await Scheduler.start();
@@ -30,4 +31,5 @@ export const startup = async () => {
Scheduler.build(CleanupDanglingMountsJob).schedule("0 * * * *");
Scheduler.build(VolumeHealthCheckJob).schedule("* * * * *");
Scheduler.build(BackupExecutionJob).schedule("* * * * *");
};