feat(automatic-backup-to-usb) - done v1
This commit is contained in:
@@ -1,14 +1,19 @@
|
||||
import cron from "node-cron";
|
||||
import fs from "fs";
|
||||
import { storage } from "../storage";
|
||||
import { NotificationTypes } from "@repo/db/types";
|
||||
import { backupDatabaseToPath } from "../services/databaseBackupService";
|
||||
|
||||
/**
|
||||
* Daily cron job to check if users haven't backed up in 7 days
|
||||
* Creates a backup notification if overdue
|
||||
*/
|
||||
export const startBackupCron = () => {
|
||||
cron.schedule("0 9 * * *", async () => {
|
||||
console.log("🔄 Running daily backup check...");
|
||||
cron.schedule("0 2 */3 * *", async () => {
|
||||
// Every 3 calendar days, at 2 AM
|
||||
// cron.schedule("*/10 * * * * *", async () => { // Every 10 seconds (for Test)
|
||||
|
||||
console.log("🔄 Running backup check...");
|
||||
|
||||
const userBatchSize = 100;
|
||||
let userOffset = 0;
|
||||
@@ -23,7 +28,52 @@ export const startBackupCron = () => {
|
||||
if (user.id == null) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const destination = await storage.getActiveBackupDestination(user.id);
|
||||
const lastBackup = await storage.getLastBackup(user.id);
|
||||
|
||||
// ==============================
|
||||
// CASE 1: Destination exists → auto backup
|
||||
// ==============================
|
||||
if (destination) {
|
||||
if (!fs.existsSync(destination.path)) {
|
||||
await storage.createNotification(
|
||||
user.id,
|
||||
"BACKUP",
|
||||
"❌ Automatic backup failed: external drive not connected."
|
||||
);
|
||||
continue;
|
||||
}
|
||||
|
||||
try {
|
||||
const filename = `dental_backup_${Date.now()}.zip`;
|
||||
|
||||
await backupDatabaseToPath({
|
||||
destinationPath: destination.path,
|
||||
filename,
|
||||
});
|
||||
|
||||
await storage.createBackup(user.id);
|
||||
await storage.deleteNotificationsByType(user.id, "BACKUP");
|
||||
|
||||
console.log(`✅ Auto backup successful for user ${user.id}`);
|
||||
continue;
|
||||
} catch (err) {
|
||||
console.error(`Auto backup failed for user ${user.id}`, err);
|
||||
|
||||
await storage.createNotification(
|
||||
user.id,
|
||||
"BACKUP",
|
||||
"❌ Automatic backup failed. Please check your backup destination."
|
||||
);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
// ==============================
|
||||
// CASE 2: No destination → fallback to reminder
|
||||
// ==============================
|
||||
|
||||
const daysSince = lastBackup?.createdAt
|
||||
? (Date.now() - new Date(lastBackup.createdAt).getTime()) /
|
||||
(1000 * 60 * 60 * 24)
|
||||
|
||||
@@ -6,6 +6,7 @@ import fs from "fs";
|
||||
import { prisma } from "@repo/db/client";
|
||||
import { storage } from "../storage";
|
||||
import archiver from "archiver";
|
||||
import { backupDatabaseToPath } from "../services/databaseBackupService";
|
||||
|
||||
const router = Router();
|
||||
|
||||
@@ -33,6 +34,8 @@ router.post("/backup", async (req: Request, res: Response): Promise<any> => {
|
||||
return res.status(401).json({ error: "Unauthorized" });
|
||||
}
|
||||
|
||||
const destination = await storage.getActiveBackupDestination(userId);
|
||||
|
||||
// create a unique tmp directory for directory-format dump
|
||||
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "dental_backup_")); // MUST
|
||||
|
||||
@@ -240,4 +243,118 @@ router.get("/status", async (req: Request, res: Response): Promise<any> => {
|
||||
}
|
||||
});
|
||||
|
||||
// ==============================
|
||||
// Backup Destination CRUD
|
||||
// ==============================
|
||||
|
||||
// CREATE / UPDATE destination
|
||||
router.post("/destination", async (req, res) => {
|
||||
const userId = req.user?.id;
|
||||
const { path: destinationPath } = req.body;
|
||||
|
||||
if (!userId) return res.status(401).json({ error: "Unauthorized" });
|
||||
if (!destinationPath)
|
||||
return res.status(400).json({ error: "Path is required" });
|
||||
|
||||
// validate path exists
|
||||
if (!fs.existsSync(destinationPath)) {
|
||||
return res.status(400).json({
|
||||
error: "Backup path does not exist or drive not connected",
|
||||
});
|
||||
}
|
||||
|
||||
try {
|
||||
const destination = await storage.createBackupDestination(
|
||||
userId,
|
||||
destinationPath
|
||||
);
|
||||
res.json(destination);
|
||||
} catch (err) {
|
||||
console.error(err);
|
||||
res.status(500).json({ error: "Failed to save backup destination" });
|
||||
}
|
||||
});
|
||||
|
||||
// GET all destinations
|
||||
router.get("/destination", async (req, res) => {
|
||||
const userId = req.user?.id;
|
||||
if (!userId) return res.status(401).json({ error: "Unauthorized" });
|
||||
|
||||
const destinations = await storage.getAllBackupDestination(userId);
|
||||
res.json(destinations);
|
||||
});
|
||||
|
||||
// UPDATE destination
|
||||
router.put("/destination/:id", async (req, res) => {
|
||||
const userId = req.user?.id;
|
||||
const id = Number(req.params.id);
|
||||
const { path: destinationPath } = req.body;
|
||||
|
||||
if (!userId) return res.status(401).json({ error: "Unauthorized" });
|
||||
if (!destinationPath)
|
||||
return res.status(400).json({ error: "Path is required" });
|
||||
|
||||
if (!fs.existsSync(destinationPath)) {
|
||||
return res.status(400).json({ error: "Path does not exist" });
|
||||
}
|
||||
|
||||
const updated = await storage.updateBackupDestination(
|
||||
id,
|
||||
userId,
|
||||
destinationPath
|
||||
);
|
||||
|
||||
res.json(updated);
|
||||
});
|
||||
|
||||
// DELETE destination
|
||||
router.delete("/destination/:id", async (req, res) => {
|
||||
const userId = req.user?.id;
|
||||
const id = Number(req.params.id);
|
||||
|
||||
if (!userId) return res.status(401).json({ error: "Unauthorized" });
|
||||
|
||||
await storage.deleteBackupDestination(id, userId);
|
||||
res.json({ success: true });
|
||||
});
|
||||
|
||||
router.post("/backup-path", async (req, res) => {
|
||||
const userId = req.user?.id;
|
||||
if (!userId) return res.status(401).json({ error: "Unauthorized" });
|
||||
|
||||
const destination = await storage.getActiveBackupDestination(userId);
|
||||
if (!destination) {
|
||||
return res.status(400).json({
|
||||
error: "No backup destination configured",
|
||||
});
|
||||
}
|
||||
|
||||
if (!fs.existsSync(destination.path)) {
|
||||
return res.status(400).json({
|
||||
error:
|
||||
"Backup destination not found. External drive may be disconnected.",
|
||||
});
|
||||
}
|
||||
|
||||
const filename = `dental_backup_${Date.now()}.zip`;
|
||||
|
||||
try {
|
||||
await backupDatabaseToPath({
|
||||
destinationPath: destination.path,
|
||||
filename,
|
||||
});
|
||||
|
||||
await storage.createBackup(userId);
|
||||
await storage.deleteNotificationsByType(userId, "BACKUP");
|
||||
|
||||
res.json({ success: true, filename });
|
||||
} catch (err: any) {
|
||||
console.error(err);
|
||||
res.status(500).json({
|
||||
error: "Backup to destination failed",
|
||||
details: err.message,
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
export default router;
|
||||
|
||||
85
apps/Backend/src/services/databaseBackupService.ts
Normal file
85
apps/Backend/src/services/databaseBackupService.ts
Normal file
@@ -0,0 +1,85 @@
|
||||
import { spawn } from "child_process";
|
||||
import fs from "fs";
|
||||
import os from "os";
|
||||
import path from "path";
|
||||
import archiver from "archiver";
|
||||
|
||||
function safeRmDir(dir: string) {
|
||||
try {
|
||||
fs.rmSync(dir, { recursive: true, force: true });
|
||||
} catch {}
|
||||
}
|
||||
|
||||
interface BackupToPathParams {
|
||||
destinationPath: string;
|
||||
filename: string;
|
||||
}
|
||||
|
||||
export async function backupDatabaseToPath({
|
||||
destinationPath,
|
||||
filename,
|
||||
}: BackupToPathParams): Promise<void> {
|
||||
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "dental_backup_"));
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const pgDump = spawn(
|
||||
"pg_dump",
|
||||
[
|
||||
"-Fd",
|
||||
"-j",
|
||||
"4",
|
||||
"--no-acl",
|
||||
"--no-owner",
|
||||
"-h",
|
||||
process.env.DB_HOST || "localhost",
|
||||
"-U",
|
||||
process.env.DB_USER || "postgres",
|
||||
process.env.DB_NAME || "dental_db",
|
||||
"-f",
|
||||
tmpDir,
|
||||
],
|
||||
{
|
||||
env: {
|
||||
...process.env,
|
||||
PGPASSWORD: process.env.DB_PASSWORD,
|
||||
},
|
||||
}
|
||||
);
|
||||
|
||||
let pgError = "";
|
||||
|
||||
pgDump.stderr.on("data", (d) => (pgError += d.toString()));
|
||||
|
||||
pgDump.on("close", async (code) => {
|
||||
if (code !== 0) {
|
||||
safeRmDir(tmpDir);
|
||||
return reject(new Error(pgError || "pg_dump failed"));
|
||||
}
|
||||
|
||||
const outputFile = path.join(destinationPath, filename);
|
||||
const outputStream = fs.createWriteStream(outputFile);
|
||||
|
||||
const archive = archiver("zip");
|
||||
|
||||
outputStream.on("error", (err) => {
|
||||
safeRmDir(tmpDir);
|
||||
reject(err);
|
||||
});
|
||||
|
||||
archive.on("error", (err) => {
|
||||
safeRmDir(tmpDir);
|
||||
reject(err);
|
||||
});
|
||||
|
||||
archive.pipe(outputStream);
|
||||
archive.directory(tmpDir + path.sep, false);
|
||||
|
||||
archive.finalize();
|
||||
|
||||
archive.on("end", () => {
|
||||
safeRmDir(tmpDir);
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
import { DatabaseBackup } from "@repo/db/types";
|
||||
import { DatabaseBackup, BackupDestination } from "@repo/db/types";
|
||||
import { prisma as db } from "@repo/db/client";
|
||||
|
||||
export interface IStorage {
|
||||
@@ -7,6 +7,33 @@ export interface IStorage {
|
||||
getLastBackup(userId: number): Promise<DatabaseBackup | null>;
|
||||
getBackups(userId: number, limit?: number): Promise<DatabaseBackup[]>;
|
||||
deleteBackups(userId: number): Promise<number>; // clears all for user
|
||||
|
||||
// ==============================
|
||||
// Backup Destination methods
|
||||
// ==============================
|
||||
createBackupDestination(
|
||||
userId: number,
|
||||
path: string
|
||||
): Promise<BackupDestination>;
|
||||
|
||||
getActiveBackupDestination(
|
||||
userId: number
|
||||
): Promise<BackupDestination | null>;
|
||||
|
||||
getAllBackupDestination(
|
||||
userId: number
|
||||
): Promise<BackupDestination[]>;
|
||||
|
||||
updateBackupDestination(
|
||||
id: number,
|
||||
userId: number,
|
||||
path: string
|
||||
): Promise<BackupDestination>;
|
||||
|
||||
deleteBackupDestination(
|
||||
id: number,
|
||||
userId: number
|
||||
): Promise<BackupDestination>;
|
||||
}
|
||||
|
||||
export const databaseBackupStorage: IStorage = {
|
||||
@@ -36,4 +63,51 @@ export const databaseBackupStorage: IStorage = {
|
||||
const result = await db.databaseBackup.deleteMany({ where: { userId } });
|
||||
return result.count;
|
||||
},
|
||||
};
|
||||
|
||||
// ==============================
|
||||
// Backup Destination methods
|
||||
// ==============================
|
||||
async createBackupDestination(userId, path) {
|
||||
// deactivate existing destination
|
||||
await db.backupDestination.updateMany({
|
||||
where: { userId },
|
||||
data: { isActive: false },
|
||||
});
|
||||
|
||||
return db.backupDestination.create({
|
||||
data: { userId, path },
|
||||
});
|
||||
},
|
||||
|
||||
async getActiveBackupDestination(userId) {
|
||||
return db.backupDestination.findFirst({
|
||||
where: { userId, isActive: true },
|
||||
});
|
||||
},
|
||||
|
||||
async getAllBackupDestination(userId) {
|
||||
return db.backupDestination.findMany({
|
||||
where: { userId },
|
||||
orderBy: { createdAt: "desc" },
|
||||
});
|
||||
},
|
||||
|
||||
async updateBackupDestination(id, userId, path) {
|
||||
// optional: make this one active
|
||||
await db.backupDestination.updateMany({
|
||||
where: { userId },
|
||||
data: { isActive: false },
|
||||
});
|
||||
|
||||
return db.backupDestination.update({
|
||||
where: { id, userId },
|
||||
data: { path, isActive: true },
|
||||
});
|
||||
},
|
||||
|
||||
async deleteBackupDestination(id, userId) {
|
||||
return db.backupDestination.delete({
|
||||
where: { id, userId },
|
||||
});
|
||||
},
|
||||
};
|
||||
Reference in New Issue
Block a user