initial commit

This commit is contained in:
2026-04-04 22:13:55 -04:00
commit 5d77e207c9
10181 changed files with 522212 additions and 0 deletions

13
apps/Backend/.env Executable file
View File

@@ -0,0 +1,13 @@
NODE_ENV="development"
HOST=0.0.0.0
PORT=5000
# FRONTEND_URLS=http://localhost:3000,http://192.168.1.8:3000
# FRONTEND_URLS=http://localhost:3000
FRONTEND_URLS=http://192.168.1.37:3000
SELENIUM_AGENT_BASE_URL=http://localhost:5002
JWT_SECRET = 'dentalsecret'
DB_HOST=localhost
DB_USER=postgres
DB_PASSWORD=mypassword
DB_NAME=dentalapp
DATABASE_URL=postgresql://postgres:mypassword@localhost:5432/dentalapp

13
apps/Backend/.env.example Executable file
View File

@@ -0,0 +1,13 @@
NODE_ENV="development"
HOST=0.0.0.0
PORT=5000
# FRONTEND_URLS=http://localhost:3000,http://192.168.1.8:3000
# FRONTEND_URLS=http://localhost:3000
FRONTEND_URLS=http://192.168.1.37:3000
SELENIUM_AGENT_BASE_URL=http://localhost:5002
JWT_SECRET = 'dentalsecret'
DB_HOST=localhost
DB_USER=postgres
DB_PASSWORD=mypassword
DB_NAME=dentalapp
DATABASE_URL=postgresql://postgres:mypassword@localhost:5433/dentalapp

View File

@@ -0,0 +1,36 @@
> backend@1.0.0 dev
> ts-node-dev --respawn --transpile-only src/index.ts
[INFO] 11:56:33 ts-node-dev ver. 2.0.0 (using ts-node ver. 10.9.2, typescript ver. 5.9.3)
✅ Server running in development mode at http://0.0.0.0:5000
[2026-01-25T16:57:06.447Z] OPTIONS /api/notifications 204 in 3ms
[2026-01-25T16:57:06.448Z] OPTIONS /api/users/ 204 in 0ms
[2026-01-25T16:57:06.454Z] GET /api/notifications 403 in 5ms
[2026-01-25T16:57:06.456Z] GET /api/users/ 403 in 1ms
[2026-01-25T16:57:13.454Z] OPTIONS /api/auth/login 204 in 1ms
[2026-01-25T16:57:13.676Z] POST /api/auth/login 200 in 208ms :: {"user":{"id":2,"username":"admin"},"token…
[2026-01-25T16:57:13.700Z] OPTIONS /api/notifications 204 in 1ms
[2026-01-25T16:57:13.715Z] GET /api/notifications 200 in 13ms :: []
[2026-01-25T16:57:14.010Z] OPTIONS /api/patients/recent 204 in 1ms
[2026-01-25T16:57:14.077Z] GET /api/patients/recent 200 in 60ms :: {"patients":[{"id":129,"firstName":"ROB…
[2026-01-25T16:57:16.531Z] OPTIONS /api/database-management/destination 204 in 1ms
[2026-01-25T16:57:16.535Z] OPTIONS /api/database-management/status 204 in 1ms
[2026-01-25T16:57:16.563Z] GET /api/database-management/destination 200 in 24ms :: [{"id":6,"userId":2,"pa…
[2026-01-25T16:57:16.596Z] GET /api/database-management/status 200 in 48ms :: {"connected":true,"size":"20…
[2026-01-25T16:57:21.719Z] OPTIONS /api/patients/recent 204 in 1ms
[2026-01-25T16:57:21.732Z] GET /api/patients/recent 304 in 9ms :: {"patients":[{"id":129,"firstName":"ROBE…
[2026-01-25T16:57:30.632Z] OPTIONS /api/appointments/day 204 in 0ms
[2026-01-25T16:57:30.633Z] OPTIONS /api/staffs/ 204 in 0ms
[2026-01-25T16:57:30.649Z] GET /api/appointments/day 200 in 11ms :: {"appointments":[],"patients":[]}
[2026-01-25T16:57:30.651Z] GET /api/staffs/ 201 in 6ms :: [{"id":4,"userId":2,"name":"Dr.S","email":null,"…
[2026-01-25T16:57:32.881Z] OPTIONS /api/patients/recent 204 in 0ms
[2026-01-25T16:57:32.882Z] OPTIONS /api/claims/recent 204 in 0ms
[2026-01-25T16:57:32.898Z] GET /api/patients/recent 304 in 12ms :: {"patients":[{"id":129,"firstName":"ROB…
[2026-01-25T16:57:32.922Z] GET /api/claims/recent 200 in 32ms :: {"claims":[{"id":160,"patientId":129,"app…
[2026-01-25T16:57:39.896Z] OPTIONS /api/patients/128 204 in 0ms
[2026-01-25T16:57:39.896Z] OPTIONS /api/staffs/ 204 in 0ms
[2026-01-25T16:57:39.897Z] OPTIONS /api/npiProviders/ 204 in 0ms
[2026-01-25T16:57:39.904Z] GET /api/patients/128 200 in 4ms :: {"id":128,"firstName":"BORIN","lastName":"K…
[2026-01-25T16:57:39.907Z] GET /api/staffs/ 201 in 5ms :: [{"id":4,"userId":2,"name":"Dr.S","email":null,"…
[2026-01-25T16:57:39.908Z] GET /api/npiProviders/ 304 in 4ms :: [{"id":2,"userId":2,"npiNumber":"157873261…

51
apps/Backend/package.json Executable file
View File

@@ -0,0 +1,51 @@
{
"name": "backend",
"version": "1.0.0",
"description": "",
"main": "index.js",
"scripts": {
"dev": "ts-node-dev --respawn --transpile-only src/index.ts",
"build": "tsc",
"start": "node dist/index.js"
},
"keywords": [],
"author": "",
"license": "ISC",
"type": "commonjs",
"dependencies": {
"archiver": "^7.0.1",
"axios": "^1.9.0",
"bcrypt": "^5.1.1",
"cors": "^2.8.5",
"dotenv": "^16.0.3",
"express": "^4.21.2",
"express-session": "^1.18.1",
"form-data": "^4.0.2",
"jsonwebtoken": "^9.0.2",
"multer": "^2.0.0",
"node-cron": "^4.2.1",
"passport": "^0.7.0",
"passport-local": "^1.0.0",
"pdfkit": "^0.17.2",
"socket.io": "^4.8.1",
"ws": "^8.18.0",
"zod": "^3.24.2",
"zod-validation-error": "^3.4.0"
},
"devDependencies": {
"@types/archiver": "^6.0.3",
"@types/bcrypt": "^5.0.2",
"@types/cors": "^2.8.18",
"@types/express": "^5.0.1",
"@types/express-session": "^1.18.0",
"@types/form-data": "^2.2.1",
"@types/jsonwebtoken": "^9.0.9",
"@types/multer": "^1.4.12",
"@types/node": "20.16.11",
"@types/passport": "^1.0.16",
"@types/passport-local": "^1.0.38",
"@types/pdfkit": "^0.17.3",
"@types/ws": "^8.5.13",
"ts-node-dev": "^2.0.0"
}
}

85
apps/Backend/src/app.ts Executable file
View File

@@ -0,0 +1,85 @@
import express from "express";
import cors from "cors";
import routes from "./routes";
import { errorHandler } from "./middlewares/error.middleware";
import { apiLogger } from "./middlewares/logger.middleware";
import authRoutes from "./routes/auth";
import { authenticateJWT } from "./middlewares/auth.middleware";
import dotenv from "dotenv";
import { startBackupCron } from "./cron/backupCheck";
import path from "path";
dotenv.config();
const NODE_ENV = (
process.env.NODE_ENV ||
process.env.ENV ||
"development"
).toLowerCase();
const app = express();
app.use(express.json());
app.use(express.urlencoded({ extended: true })); // For form data
app.use(apiLogger);
// --- CORS handling (flexible for dev and strict for prod) ---
/**
* FRONTEND_URLS env value: comma-separated allowed origins
* Example: FRONTEND_URLS=http://localhost:3000,http://192.168.1.8:3000
*/
const rawFrontendUrls =
process.env.FRONTEND_URLS || process.env.FRONTEND_URL || "";
const FRONTEND_URLS = rawFrontendUrls
.split(",")
.map((s) => s.trim())
.filter(Boolean);
// helper to see if origin is allowed
function isOriginAllowed(origin?: string | null) {
if (!origin) return true; // allow non-browser clients (curl/postman)
if (NODE_ENV !== "production") {
// Dev mode: allow localhost origins automatically
if (
origin.startsWith("http://localhost") ||
origin.startsWith("http://127.0.0.1") ||
origin.startsWith("http://192.168.0.240")
)
return true;
// allow explicit FRONTEND_URLS if provided
if (FRONTEND_URLS.includes(origin)) return true;
// optionally allow the server's LAN IP if FRONTEND_LAN_IP is provided
const lanIp = process.env.FRONTEND_LAN_IP;
if (lanIp && origin.startsWith(`http://${lanIp}`)) return true;
// fallback: deny if not matched
return false;
}
// production: strict whitelist — must match configured FRONTEND_URLS exactly
return FRONTEND_URLS.includes(origin);
}
app.use(
cors({
origin: (origin, cb) => {
if (isOriginAllowed(origin)) return cb(null, true);
cb(new Error(`CORS: Origin ${origin} not allowed`));
},
methods: ["GET", "POST", "PUT", "PATCH", "DELETE", "OPTIONS"],
allowedHeaders: ["Content-Type", "Authorization"],
credentials: true,
})
);
// Serve static files from uploads directory
app.use("/uploads", express.static(path.join(process.cwd(), "uploads")));
app.use("/api/auth", authRoutes);
app.use("/api", authenticateJWT, routes);
app.use(errorHandler);
//startig cron job
startBackupCron();
export default app;

View File

@@ -0,0 +1,100 @@
import cron from "node-cron";
import fs from "fs";
import { storage } from "../storage";
import { NotificationTypes } from "@repo/db/types";
import { backupDatabaseToPath } from "../services/databaseBackupService";
/**
* Daily cron job to check if users haven't backed up in 7 days
* Creates a backup notification if overdue
*/
export const startBackupCron = () => {
cron.schedule("0 22 * * *", async () => {
// Every calendar days, at 10 PM
// cron.schedule("*/10 * * * * *", async () => { // Every 10 seconds (for Test)
console.log("🔄 Running backup check...");
const userBatchSize = 100;
let userOffset = 0;
while (true) {
// Fetch a batch of users
const users = await storage.getUsers(userBatchSize, userOffset);
if (!users || users.length === 0) break;
for (const user of users) {
try {
if (user.id == null) {
continue;
}
const destination = await storage.getActiveBackupDestination(user.id);
const lastBackup = await storage.getLastBackup(user.id);
// ==============================
// CASE 1: Destination exists → auto backup
// ==============================
if (destination) {
if (!fs.existsSync(destination.path)) {
await storage.createNotification(
user.id,
"BACKUP",
"❌ Automatic backup failed: external drive not connected."
);
continue;
}
try {
const filename = `dental_backup_${Date.now()}.zip`;
await backupDatabaseToPath({
destinationPath: destination.path,
filename,
});
await storage.createBackup(user.id);
await storage.deleteNotificationsByType(user.id, "BACKUP");
console.log(`✅ Auto backup successful for user ${user.id}`);
continue;
} catch (err) {
console.error(`Auto backup failed for user ${user.id}`, err);
await storage.createNotification(
user.id,
"BACKUP",
"❌ Automatic backup failed. Please check your backup destination."
);
continue;
}
}
// ==============================
// CASE 2: No destination → fallback to reminder
// ==============================
const daysSince = lastBackup?.createdAt
? (Date.now() - new Date(lastBackup.createdAt).getTime()) /
(1000 * 60 * 60 * 24)
: Infinity;
if (daysSince >= 7) {
await storage.createNotification(
user.id,
"BACKUP" as NotificationTypes,
"⚠️ It has been more than 7 days since your last backup."
);
console.log(`Notification created for user ${user.id}`);
}
} catch (err) {
console.error(`Error processing user ${user.id}:`, err);
}
}
userOffset += userBatchSize; // next user batch
}
console.log("✅ Daily backup check completed.");
});
};

53
apps/Backend/src/index.ts Executable file
View File

@@ -0,0 +1,53 @@
import app from "./app";
import dotenv from "dotenv";
import http from "http";
import { initSocket } from "./socket";
dotenv.config();
const NODE_ENV = (
process.env.NODE_ENV ||
process.env.ENV ||
"development"
).toLowerCase();
const HOST = process.env.HOST || "0.0.0.0";
const PORT = Number(process.env.PORT) || 5000;
// HTTP server from express app
const server = http.createServer(app);
// Initialize socket.io on this server
initSocket(server);
server.listen(PORT, HOST, () => {
console.log(
`✅ Server running in ${NODE_ENV} mode at http://${HOST}:${PORT}`
);
});
// Handle startup errors
server.on("error", (err: NodeJS.ErrnoException) => {
if (err.code === "EADDRINUSE") {
console.error(`❌ Port ${PORT} is already in use`);
} else {
console.error("❌ Server failed to start:", err);
}
process.exit(1); // Exit with failure
});
// Graceful shutdown
const shutdown = (signal: string) => {
console.log(`⚡ Received ${signal}, shutting down gracefully...`);
server.close(() => {
console.log("✅ HTTP server closed");
// TODO: Close DB connections if needed
// db.$disconnect().then(() => console.log("✅ Database disconnected"));
process.exit(0);
});
};
process.on("SIGINT", () => shutdown("SIGINT"));
process.on("SIGTERM", () => shutdown("SIGTERM"));

View File

@@ -0,0 +1,26 @@
import jwt from 'jsonwebtoken';
import { Request, Response, NextFunction } from 'express';
const JWT_SECRET = process.env.JWT_SECRET || 'your-jwt-secret'; // Secret used for signing JWTs
export function authenticateJWT(req: Request, res: Response, next: NextFunction): void{
// Check the Authorization header for the Bearer token
const token = req.header('Authorization')?.split(' ')[1]; // Extract token from Authorization header
if (!token) {
res.status(401).send("Access denied. No token provided.");
return;
}
// Verify JWT
jwt.verify(token, JWT_SECRET, (err, decoded) => {
if (err) {
return res.status(403).send("Forbidden. Invalid token.");
}
// Attach the decoded user data to the request object
req.user = decoded as Express.User;
next(); // Proceed to the next middleware or route handler
});
}

View File

@@ -0,0 +1,6 @@
import { Request, Response, NextFunction } from 'express';
export const errorHandler = (err: any, _req: Request, res: Response, _next: NextFunction) => {
console.error(err);
res.status(err.status || 500).json({ message: err.message || 'Internal Server Error' });
};

View File

@@ -0,0 +1,33 @@
import { Request, Response, NextFunction } from "express";
function log(message: string) {
console.log(`[${new Date().toISOString()}] ${message}`);
}
export function apiLogger(req: Request, res: Response, next: NextFunction) {
const start = Date.now();
const path = req.path;
let capturedJsonResponse: Record<string, any> | undefined = undefined;
const originalResJson = res.json;
res.json = function (bodyJson, ...args) {
capturedJsonResponse = bodyJson;
return originalResJson.apply(res, [bodyJson, ...args]);
};
res.on("finish", () => {
const duration = Date.now() - start;
if (path.startsWith("/api")) {
let logLine = `${req.method} ${path} ${res.statusCode} in ${duration}ms`;
if (capturedJsonResponse) {
logLine += ` :: ${JSON.stringify(capturedJsonResponse)}`;
}
if (logLine.length > 80) {
logLine = logLine.slice(0, 79) + "…";
}
log(logLine);
}
});
next();
}

View File

@@ -0,0 +1,165 @@
import { Router, Request, Response } from "express";
import { storage } from "../storage";
import { prisma } from "@repo/db/client";
import {
insertAppointmentProcedureSchema,
updateAppointmentProcedureSchema,
} from "@repo/db/types";
const router = Router();
/**
* GET /api/appointment-procedures/:appointmentId
* Get all procedures for an appointment
*/
router.get("/:appointmentId", async (req: Request, res: Response) => {
try {
const appointmentId = Number(req.params.appointmentId);
if (isNaN(appointmentId)) {
return res.status(400).json({ message: "Invalid appointmentId" });
}
const rows = await storage.getByAppointmentId(appointmentId);
return res.json(rows);
} catch (err: any) {
console.error("GET appointment procedures error", err);
return res.status(500).json({ message: err.message ?? "Server error" });
}
});
router.get(
"/prefill-from-appointment/:appointmentId",
async (req: Request, res: Response) => {
try {
const appointmentId = Number(req.params.appointmentId);
if (!appointmentId || isNaN(appointmentId)) {
return res.status(400).json({ error: "Invalid appointmentId" });
}
const data = await storage.getPrefillDataByAppointmentId(appointmentId);
if (!data) {
return res.status(404).json({ error: "Appointment not found" });
}
return res.json(data);
} catch (err: any) {
console.error("prefill-from-appointment error", err);
return res
.status(500)
.json({ error: err.message ?? "Failed to prefill claim data" });
}
}
);
/**
* POST /api/appointment-procedures
* Add single manual procedure
*/
router.post("/", async (req: Request, res: Response) => {
try {
const parsed = insertAppointmentProcedureSchema.parse(req.body);
const created = await storage.createProcedure(parsed);
return res.json(created);
} catch (err: any) {
console.error("POST appointment procedure error", err);
if (err.name === "ZodError") {
return res.status(400).json({ message: err.errors });
}
return res.status(500).json({ message: err.message ?? "Server error" });
}
});
/**
* POST /api/appointment-procedures/bulk
* Add multiple procedures (combos)
*/
router.post("/bulk", async (req: Request, res: Response) => {
try {
const rows = req.body;
if (!Array.isArray(rows) || rows.length === 0) {
return res.status(400).json({ message: "Invalid payload" });
}
const count = await storage.createProceduresBulk(rows);
return res.json({ success: true, count });
} catch (err: any) {
console.error("POST bulk appointment procedures error", err);
return res.status(500).json({ message: err.message ?? "Server error" });
}
});
/**
* PUT /api/appointment-procedures/:id
* Update a procedure
*/
router.put("/:id", async (req: Request, res: Response) => {
try {
const id = Number(req.params.id);
if (isNaN(id)) {
return res.status(400).json({ message: "Invalid id" });
}
const parsed = updateAppointmentProcedureSchema.parse(req.body);
const updated = await storage.updateProcedure(id, parsed);
return res.json(updated);
} catch (err: any) {
console.error("PUT appointment procedure error", err);
if (err.name === "ZodError") {
return res.status(400).json({ message: err.errors });
}
return res.status(500).json({ message: err.message ?? "Server error" });
}
});
/**
* DELETE /api/appointment-procedures/:id
* Delete single procedure
*/
router.delete("/:id", async (req: Request, res: Response) => {
try {
const id = Number(req.params.id);
if (isNaN(id)) {
return res.status(400).json({ message: "Invalid id" });
}
await storage.deleteProcedure(id);
return res.json({ success: true });
} catch (err: any) {
console.error("DELETE appointment procedure error", err);
return res.status(500).json({ message: err.message ?? "Server error" });
}
});
/**
* DELETE /api/appointment-procedures/clear/:appointmentId
* Clear all procedures for appointment
*/
router.delete("/clear/:appointmentId", async (req: Request, res: Response) => {
try {
const appointmentId = Number(req.params.appointmentId);
if (isNaN(appointmentId)) {
return res.status(400).json({ message: "Invalid appointmentId" });
}
await storage.clearByAppointmentId(appointmentId);
return res.json({ success: true });
} catch (err: any) {
console.error("CLEAR appointment procedures error", err);
return res.status(500).json({ message: err.message ?? "Server error" });
}
});
export default router;

View File

@@ -0,0 +1,439 @@
import { Router } from "express";
import type { Request, Response } from "express";
import { storage } from "../storage";
import { z } from "zod";
import {
insertAppointmentSchema,
updateAppointmentSchema,
} from "@repo/db/types";
const router = Router();
// Get all appointments
router.get("/all", async (req: Request, res: Response): Promise<any> => {
try {
const appointments = await storage.getAllAppointments();
res.json(appointments);
} catch (error) {
res.status(500).json({ message: "Failed to retrieve all appointments" });
}
});
/**
* GET /api/appointments/day?date=YYYY-MM-DD
* Response: { appointments: Appointment[], patients: Patient[] }
*/
router.get("/day", async (req: Request, res: Response): Promise<any> => {
function isValidYMD(s: string) {
return /^\d{4}-\d{2}-\d{2}$/.test(s);
}
try {
const rawDate = req.query.date as string | undefined;
if (!rawDate || !isValidYMD(rawDate)) {
return res.status(400).json({ message: "Date query param is required." });
}
if (!req.user) return res.status(401).json({ message: "Unauthorized" });
// Build literal UTC day bounds from the YYYY-MM-DD query string
const start = new Date(`${rawDate}T00:00:00.000Z`);
const end = new Date(`${rawDate}T23:59:59.999Z`);
if (isNaN(start.getTime()) || isNaN(end.getTime())) {
return res.status(400).json({ message: "Invalid date format" });
}
// Call the storage method that takes a start/end range (no change to storage needed)
const appointments = await storage.getAppointmentsOnRange(start, end);
// dedupe patient ids referenced by those appointments
const patientIds = Array.from(
new Set(appointments.map((a) => a.patientId).filter(Boolean))
);
const patients = patientIds.length
? await storage.getPatientsByIds(patientIds)
: [];
return res.json({ appointments, patients });
} catch (err) {
console.error("Error in /api/appointments/day:", err);
res.status(500).json({ message: "Failed to load appointments for date" });
}
});
// Get recent appointments (paginated)
router.get("/recent", async (req: Request, res: Response) => {
try {
const limit = Math.max(1, parseInt(req.query.limit as string) || 10);
const offset = Math.max(0, parseInt(req.query.offset as string) || 0);
const all = await storage.getRecentAppointments(limit, offset);
res.json({ data: all, limit, offset });
} catch (err) {
res.status(500).json({ message: "Failed to get recent appointments" });
}
});
// Get a single appointment by ID
router.get(
"/:id",
async (req: Request, res: Response): Promise<any> => {
try {
const appointmentIdParam = req.params.id;
// Ensure that patientIdParam exists and is a valid number
if (!appointmentIdParam) {
return res.status(400).json({ message: "Appointment ID is required" });
}
const appointmentId = parseInt(appointmentIdParam);
const appointment = await storage.getAppointment(appointmentId);
if (!appointment) {
return res.status(404).json({ message: "Appointment not found" });
}
res.json(appointment);
} catch (error) {
res.status(500).json({ message: "Failed to retrieve appointment" });
}
}
);
// Get all appointments for a specific patient
router.get(
"/:patientId/appointments",
async (req: Request, res: Response): Promise<any> => {
try {
const rawPatientId = req.params.patientId;
if (!rawPatientId) {
return res.status(400).json({ message: "Patient ID is required" });
}
const patientId = parseInt(rawPatientId);
if (isNaN(patientId)) {
return res.status(400).json({ message: "Invalid patient ID" });
}
const patient = await storage.getPatient(patientId);
if (!patient)
return res.status(404).json({ message: "Patient not found" });
const appointments = await storage.getAppointmentsByPatientId(patientId);
res.json(appointments);
} catch (err) {
res.status(500).json({ message: "Failed to get patient appointments" });
}
}
);
/**
* GET /api/appointments/:id/patient
*/
router.get(
"/:id/patient",
async (req: Request, res: Response): Promise<any> => {
try {
const rawId = req.params.id;
if (!rawId) {
return res.status(400).json({ message: "Appointment ID is required" });
}
const apptId = parseInt(rawId, 10);
if (Number.isNaN(apptId) || apptId <= 0) {
return res.status(400).json({ message: "Invalid appointment ID" });
}
const patient = await storage.getPatientFromAppointmentId(apptId);
if (!patient) {
return res
.status(404)
.json({ message: "Patient not found for the given appointment" });
}
return res.json(patient);
} catch (err) {
return res
.status(500)
.json({ message: "Failed to retrieve patient for appointment" });
}
}
);
// Create a new appointment
router.post(
"/upsert",
async (req: Request, res: Response): Promise<any> => {
try {
// Validate request body
const appointmentData = insertAppointmentSchema.parse({
...req.body,
userId: req.user!.id,
});
const originalStartTime = appointmentData.startTime;
const MAX_END_TIME = "18:30";
// 1. Verify patient exists and belongs to user
const patient = await storage.getPatient(appointmentData.patientId);
if (!patient) {
return res.status(404).json({ message: "Patient not found" });
}
// 2. Attempt to find the next available slot
let [hour, minute] = originalStartTime.split(":").map(Number);
const pad = (n: number) => n.toString().padStart(2, "0");
// Step by 15 minutes to support quarter-hour starts, but keep appointment duration 30 mins
const STEP_MINUTES = 15;
const APPT_DURATION_MINUTES = 30;
while (`${pad(hour)}:${pad(minute)}` <= MAX_END_TIME) {
const currentStartTime = `${pad(hour)}:${pad(minute)}`;
// Check patient appointment at this time
const sameDayAppointment =
await storage.getPatientAppointmentByDateTime(
appointmentData.patientId,
appointmentData.date,
currentStartTime
);
// Check staff conflict at this time
const staffConflict = await storage.getStaffAppointmentByDateTime(
appointmentData.staffId,
appointmentData.date,
currentStartTime,
sameDayAppointment?.id // Ignore self if updating
);
if (!staffConflict) {
const endMinute = minute + APPT_DURATION_MINUTES;
let endHour = hour + Math.floor(endMinute / 60);
let realEndMinute = endMinute % 60;
const currentEndTime = `${pad(endHour)}:${pad(realEndMinute)}`;
const payload = {
...appointmentData,
startTime: currentStartTime,
endTime: currentEndTime,
};
let responseData;
if (sameDayAppointment?.id !== undefined) {
const updated = await storage.updateAppointment(
sameDayAppointment.id,
payload
);
responseData = {
...updated,
originalRequestedTime: originalStartTime,
finalScheduledTime: currentStartTime,
message:
originalStartTime !== currentStartTime
? `Your requested time (${originalStartTime}) was unavailable. Appointment was updated to ${currentStartTime}.`
: `Appointment successfully updated at ${currentStartTime}.`,
};
return res.status(200).json(responseData);
}
const created = await storage.createAppointment(payload);
responseData = {
...created,
originalRequestedTime: originalStartTime,
finalScheduledTime: currentStartTime,
message:
originalStartTime !== currentStartTime
? `Your requested time (${originalStartTime}) was unavailable. Appointment was scheduled at ${currentStartTime}.`
: `Appointment successfully scheduled at ${currentStartTime}.`,
};
return res.status(201).json(responseData);
}
// Move to next STEP_MINUTES slot
minute += STEP_MINUTES;
if (minute >= 60) {
hour += Math.floor(minute / 60);
minute = minute % 60;
}
}
return res.status(409).json({
message:
"No available slots remaining until 6:30 PM for this Staff. Please choose another day.",
});
} catch (error) {
console.error("Error in upsert appointment:", error);
if (error instanceof z.ZodError) {
console.log(
"Validation error details:",
JSON.stringify(error.format(), null, 2)
);
return res.status(400).json({
message: "Validation error",
errors: error.format(),
});
}
res.status(500).json({
message: "Failed to upsert appointment",
error: error instanceof Error ? error.message : String(error),
});
}
}
);
// Update an existing appointment
router.put(
"/:id",
async (req: Request, res: Response): Promise<any> => {
try {
const appointmentData = updateAppointmentSchema.parse({
...req.body,
userId: req.user!.id,
});
const appointmentIdParam = req.params.id;
if (!appointmentIdParam) {
return res.status(400).json({ message: "Appointment ID is required" });
}
const appointmentId = parseInt(appointmentIdParam);
// 1. Verify patient exists and belongs to user
const patient = await storage.getPatient(appointmentData.patientId);
if (!patient) {
return res.status(404).json({ message: "Patient not found" });
}
// 2. Check if appointment exists and belongs to user
const existingAppointment = await storage.getAppointment(appointmentId);
if (!existingAppointment) {
console.log("Appointment not found:", appointmentId);
return res.status(404).json({ message: "Appointment not found" });
}
// 4. Reject patientId change (not allowed)
if (
appointmentData.patientId &&
appointmentData.patientId !== existingAppointment.patientId
) {
return res
.status(400)
.json({ message: "Changing patientId is not allowed" });
}
// 5. Check for conflicting appointments (same patient OR staff at same time)
const date = appointmentData.date ?? existingAppointment.date;
const startTime =
appointmentData.startTime ?? existingAppointment.startTime;
const staffId = appointmentData.staffId ?? existingAppointment.staffId;
const patientConflict = await storage.getPatientConflictAppointment(
existingAppointment.patientId,
date,
startTime,
appointmentId
);
if (patientConflict) {
return res.status(409).json({
message: "This patient already has an appointment at this time.",
});
}
const staffConflict = await storage.getStaffConflictAppointment(
staffId,
date,
startTime,
appointmentId
);
if (staffConflict) {
return res.status(409).json({
message: "This time slot is already booked for the selected staff.",
});
}
// 6. if date gets updated, then also update the aptmnt status to unknown.
// Normalize to YYYY-MM-DD to avoid timezone problems (model uses @db.Date)
const oldYMD = new Date(existingAppointment.date)
.toISOString()
.slice(0, 10);
const newYMD = new Date(date).toISOString().slice(0, 10);
const isDateChanged = oldYMD !== newYMD;
const updatePayload = {
...appointmentData,
...(isDateChanged ? { eligibilityStatus: "UNKNOWN" as const } : {}),
};
// Update appointment
const updatedAppointment = await storage.updateAppointment(
appointmentId,
updatePayload
);
return res.json(updatedAppointment);
} catch (error) {
console.error("Error updating appointment:", error);
if (error instanceof z.ZodError) {
console.log(
"Validation error details:",
JSON.stringify(error.format(), null, 2)
);
return res.status(400).json({
message: "Validation error",
errors: error.format(),
});
}
res.status(500).json({
message: "Failed to update appointment",
error: error instanceof Error ? error.message : String(error),
});
}
}
);
// Delete an appointment
router.delete("/:id", async (req: Request, res: Response): Promise<any> => {
try {
const appointmentIdParam = req.params.id;
if (!appointmentIdParam) {
return res.status(400).json({ message: "Appointment ID is required" });
}
const appointmentId = parseInt(appointmentIdParam);
// Check if appointment exists and belongs to user
const existingAppointment = await storage.getAppointment(appointmentId);
if (!existingAppointment) {
return res.status(404).json({ message: "Appointment not found" });
}
if (existingAppointment.userId !== req.user!.id) {
return res.status(403).json({
message:
"Forbidden: Appointment belongs to a different user, you can't delete this.",
});
}
// Delete appointment
await storage.deleteAppointment(appointmentId);
res.status(204).send();
} catch (error) {
res.status(500).json({ message: "Failed to delete appointment" });
}
});
export default router;

99
apps/Backend/src/routes/auth.ts Executable file
View File

@@ -0,0 +1,99 @@
import express, { Request, Response, NextFunction } from "express";
import jwt from "jsonwebtoken";
import bcrypt from "bcrypt";
import { storage } from "../storage";
import { UserUncheckedCreateInputObjectSchema } from "@repo/db/usedSchemas";
import { z } from "zod";
type SelectUser = z.infer<typeof UserUncheckedCreateInputObjectSchema>;
const JWT_SECRET = process.env.JWT_SECRET || "your-jwt-secret";
const JWT_EXPIRATION = "24h"; // JWT expiration time (1 day)
// Function to hash password using bcrypt
async function hashPassword(password: string) {
const saltRounds = 10; // Salt rounds for bcrypt
const hashedPassword = await bcrypt.hash(password, saltRounds);
return hashedPassword;
}
// Function to compare passwords using bcrypt
async function comparePasswords(supplied: string, stored: string) {
const isMatch = await bcrypt.compare(supplied, stored);
return isMatch;
}
// Function to generate JWT
function generateToken(user: SelectUser) {
return jwt.sign({ id: user.id, username: user.username }, JWT_SECRET, {
expiresIn: JWT_EXPIRATION,
});
}
const router = express.Router();
// User registration route
router.post(
"/register",
async (req: Request, res: Response, next: NextFunction): Promise<any> => {
try {
const existingUser = await storage.getUserByUsername(req.body.username);
if (existingUser) {
return res.status(400).send("Username already exists");
}
const hashedPassword = await hashPassword(req.body.password);
const user = await storage.createUser({
...req.body,
password: hashedPassword,
});
// Generate a JWT token for the user after successful registration
const token = generateToken(user);
const { password, ...safeUser } = user;
return res.status(201).json({ user: safeUser, token });
} catch (error) {
console.error("Registration error:", error);
return res.status(500).json({ error: "Internal server error" });
}
}
);
// User login route
router.post(
"/login",
async (req: Request, res: Response, next: NextFunction): Promise<any> => {
try {
const user = await storage.getUserByUsername(req.body.username);
if (!user) {
return res.status(401).json({ error: "Invalid username or password" });
}
const isPasswordMatch = await comparePasswords(
req.body.password,
user.password
);
if (!isPasswordMatch) {
return res.status(401).json({ error: "Invalid password or password" });
}
// Generate a JWT token for the user after successful login
const token = generateToken(user);
const { password, ...safeUser } = user;
return res.status(200).json({ user: safeUser, token });
} catch (error) {
return res.status(500).json({ error: "Internal server error" });
}
}
);
// Logout route (client-side action to remove the token)
router.post("/logout", (req: Request, res: Response) => {
// For JWT-based auth, logout is handled on the client (by removing token)
res.status(200).send("Logged out successfully");
});
export default router;

578
apps/Backend/src/routes/claims.ts Executable file
View File

@@ -0,0 +1,578 @@
import { Router } from "express";
import { Request, Response } from "express";
import { storage } from "../storage";
import { z } from "zod";
import multer from "multer";
import { forwardToSeleniumClaimAgent } from "../services/seleniumClaimClient";
import path from "path";
import axios from "axios";
import { Prisma } from "@repo/db/generated/prisma";
import { Decimal } from "decimal.js";
import {
ExtendedClaimSchema,
InputServiceLine,
updateClaimSchema,
} from "@repo/db/types";
import { forwardToSeleniumClaimPreAuthAgent } from "../services/seleniumInsuranceClaimPreAuthClient";
const router = Router();
// Routes
const multerStorage = multer.memoryStorage(); // NO DISK
const upload = multer({
storage: multerStorage,
limits: { fileSize: 5 * 1024 * 1024 }, // 5MB limit per file
fileFilter: (req, file, cb) => {
const allowed = [
"application/pdf",
"image/jpeg",
"image/png",
"image/webp",
];
if (allowed.includes(file.mimetype)) {
cb(null, true);
} else {
cb(new Error("Unsupported file type"));
}
},
});
router.post(
"/mh-provider-login",
async (req: Request, res: Response): Promise<any> => {
if (!req.user || !req.user.id) {
return res.status(401).json({ error: "Unauthorized: user info missing" });
}
try {
const { memberId, dateOfBirth, submissionDate, firstName, lastName, procedureCode, toothNumber, toothSurface, insuranceSiteKey } = req.body;
if (!memberId || !dateOfBirth || !submissionDate || !firstName || !lastName || !procedureCode || !insuranceSiteKey) {
return res.status(400).json({ error: "Missing required fields: memberId, dateOfBirth, submissionDate, firstName, lastName, procedureCode, insuranceSiteKey" });
}
const credentials = await storage.getInsuranceCredentialByUserAndSiteKey(
req.user.id,
insuranceSiteKey
);
if (!credentials) {
return res.status(404).json({
error:
"No insurance credentials found for this provider. Kindly Update this at Settings Page.",
});
}
const enrichedData = {
data: {
memberId,
dateOfBirth,
submissionDate,
firstName,
lastName,
procedureCode,
toothNumber,
toothSurface,
insuranceSiteKey,
massdhpUsername: credentials.username,
massdhpPassword: credentials.password,
},
};
const seleniumRes = await axios.post(
"http://localhost:5002/claims-login",
enrichedData
);
const result = seleniumRes.data;
if (result?.status !== "success") {
return res.status(502).json({ error: result?.message || "Selenium service error" });
}
return res.json({ status: "success", message: "Claims automation completed. Browser remains open." });
} catch (err: any) {
console.error(err);
return res.status(500).json({
error: err?.message || "Failed to contact selenium service",
});
}
}
);
router.post(
"/selenium-claim",
upload.fields([
{ name: "pdfs", maxCount: 10 },
{ name: "images", maxCount: 10 },
]),
async (req: Request, res: Response): Promise<any> => {
if (!req.files || !req.body.data) {
return res
.status(400)
.json({ error: "Missing files or claim data for selenium" });
}
if (!req.user || !req.user.id) {
return res.status(401).json({ error: "Unauthorized: user info missing" });
}
try {
const claimData = JSON.parse(req.body.data);
const pdfs =
(req.files as Record<string, Express.Multer.File[]>).pdfs ?? [];
const images =
(req.files as Record<string, Express.Multer.File[]>).images ?? [];
const credentials = await storage.getInsuranceCredentialByUserAndSiteKey(
req.user.id,
claimData.insuranceSiteKey
);
if (!credentials) {
return res.status(404).json({
error:
"No insurance credentials found for this provider. Kindly Update this at Settings Page.",
});
}
const enrichedData = {
...claimData,
massdhpUsername: credentials.username,
massdhpPassword: credentials.password,
};
const result = await forwardToSeleniumClaimAgent(enrichedData, [
...pdfs,
...images,
]);
// Store claimNumber if returned from Selenium
if (result?.claimNumber && claimData.claimId) {
try {
await storage.updateClaim(claimData.claimId, {
claimNumber: result.claimNumber,
});
console.log(`Updated claim ${claimData.claimId} with claimNumber: ${result.claimNumber}`);
} catch (updateErr) {
console.error("Failed to update claim with claimNumber:", updateErr);
}
}
res.json({
...result,
claimId: claimData.claimId,
});
} catch (err: any) {
console.error(err);
return res.status(500).json({
error: err.message || "Failed to forward to selenium agent",
});
}
}
);
router.post(
"/selenium/fetchpdf",
async (req: Request, res: Response): Promise<any> => {
function sendError(res: Response, message: string, status = 400) {
console.error("Error:", message);
return res.status(status).json({ error: message });
}
try {
if (!req.user || !req.user.id) {
return sendError(res, "Unauthorized: user info missing", 401);
}
const { patientId, pdf_url, groupTitleKey } = req.body;
if (!pdf_url) {
return sendError(res, "Missing pdf_url");
}
if (!patientId) {
return sendError(res, "Missing Patient Id");
}
const parsedPatientId = parseInt(patientId);
console.log("Fetching PDF from URL:", pdf_url);
const filename = path.basename(new URL(pdf_url).pathname);
console.log("Extracted filename:", filename);
// Always fetch from localhost regardless of what hostname is in the pdf_url,
// since both backend and selenium service run on the same machine.
const seleniumPort = process.env.SELENIUM_PORT || "5002";
const localPdfUrl = `http://localhost:${seleniumPort}/downloads/${filename}`;
console.log("Fetching PDF from local URL:", localPdfUrl);
const pdfResponse = await axios.get(localPdfUrl, {
responseType: "arraybuffer",
timeout: 15000,
});
console.log("PDF fetched successfully, size:", pdfResponse.data.length);
// Allowed keys as a literal tuple to derive a union type
const allowedKeys = [
"INSURANCE_CLAIM",
"INSURANCE_CLAIM_PREAUTH",
] as const;
type GroupKey = (typeof allowedKeys)[number];
const isGroupKey = (v: any): v is GroupKey =>
(allowedKeys as readonly string[]).includes(v);
if (!isGroupKey(groupTitleKey)) {
return sendError(
res,
`Invalid groupTitleKey. Must be one of: ${allowedKeys.join(", ")}`
);
}
const GROUP_TITLES: Record<GroupKey, string> = {
INSURANCE_CLAIM: "Claims",
INSURANCE_CLAIM_PREAUTH: "Claims Preauth",
};
const groupTitle = GROUP_TITLES[groupTitleKey];
// ✅ Find or create PDF group for this claim
let group = await storage.findPdfGroupByPatientTitleKey(
parsedPatientId,
groupTitleKey
);
if (!group) {
group = await storage.createPdfGroup(
parsedPatientId,
groupTitle,
groupTitleKey
);
}
// ✅ Save PDF file into that group
const created = await storage.createPdfFile(group.id!, filename, pdfResponse.data);
// Extract the PDF file ID for opening the viewer
let pdfFileId: number | null = null;
if (created && typeof created === "object" && "id" in created) {
pdfFileId = Number(created.id);
} else if (typeof created === "number") {
pdfFileId = created;
}
return res.json({
success: true,
pdfPath: `/temp/${filename}`,
pdf_url,
fileName: filename,
pdfFileId,
// pdfFilename: filename,
});
} catch (err: any) {
console.error("Error in /selenium/fetchpdf:", err);
console.error("Error details:", {
message: err.message,
code: err.code,
response: err.response?.status,
responseData: err.response?.data,
});
const errorMsg = err.response?.data || err.message || "Failed to Fetch and Download the pdf";
return sendError(res, `Failed to Fetch and Download the pdf: ${errorMsg}`, 500);
}
}
);
router.post(
"/selenium-claim-pre-auth",
upload.fields([
{ name: "pdfs", maxCount: 10 },
{ name: "images", maxCount: 10 },
]),
async (req: Request, res: Response): Promise<any> => {
if (!req.files || !req.body.data) {
return res
.status(400)
.json({ error: "Missing files or claim data for selenium" });
}
if (!req.user || !req.user.id) {
return res.status(401).json({ error: "Unauthorized: user info missing" });
}
try {
const claimData = JSON.parse(req.body.data);
const pdfs =
(req.files as Record<string, Express.Multer.File[]>).pdfs ?? [];
const images =
(req.files as Record<string, Express.Multer.File[]>).images ?? [];
const credentials = await storage.getInsuranceCredentialByUserAndSiteKey(
req.user.id,
claimData.insuranceSiteKey
);
if (!credentials) {
return res.status(404).json({
error:
"No insurance credentials found for this provider. Kindly Update this at Settings Page.",
});
}
const enrichedData = {
...claimData,
massdhpUsername: credentials.username,
massdhpPassword: credentials.password,
};
const result = await forwardToSeleniumClaimPreAuthAgent(enrichedData, [
...pdfs,
...images,
]);
res.json({
...result,
claimId: claimData.claimId,
});
} catch (err: any) {
console.error(err);
return res.status(500).json({
error: err.message || "Failed to forward to selenium agent",
});
}
}
);
// GET /api/claims/recent
router.get("/recent", async (req: Request, res: Response) => {
try {
const limit = parseInt(req.query.limit as string) || 10;
const offset = parseInt(req.query.offset as string) || 0;
const [claims, totalCount] = await Promise.all([
storage.getRecentClaims(limit, offset),
storage.getTotalClaimCount(),
]);
res.json({ claims, totalCount });
} catch (error) {
console.error("Failed to retrieve recent claims:", error);
res.status(500).json({ message: "Failed to retrieve recent claims" });
}
});
// GET /api/claims/patient/:patientId
router.get(
"/patient/:patientId",
async (req: Request, res: Response): Promise<any> => {
try {
const patientIdParam = Array.isArray(req.params.patientId) ? req.params.patientId[0] : req.params.patientId;
if (!patientIdParam) {
return res.status(400).json({ message: "Missing patientId" });
}
const patientId = parseInt(patientIdParam);
if (isNaN(patientId)) {
return res.status(400).json({ message: "Invalid patientId" });
}
const limit = parseInt(req.query.limit as string) || 10;
const offset = parseInt(req.query.offset as string) || 0;
if (isNaN(patientId)) {
return res.status(400).json({ message: "Invalid patient ID" });
}
const [claims, totalCount] = await Promise.all([
storage.getRecentClaimsByPatientId(patientId, limit, offset),
storage.getTotalClaimCountByPatient(patientId),
]);
res.json({ claims, totalCount });
} catch (error) {
console.error("Failed to retrieve claims for patient:", error);
res.status(500).json({ message: "Failed to retrieve patient claims" });
}
}
);
// Get all claims count.
router.get("/all", async (req: Request, res: Response) => {
try {
const claims = await storage.getTotalClaimCount();
res.json(claims);
} catch (error) {
res.status(500).json({ message: "Failed to retrieve claims count" });
}
});
// Get a single claim by ID
router.get("/:id", async (req: Request, res: Response): Promise<any> => {
try {
const idParam = Array.isArray(req.params.id) ? req.params.id[0] : req.params.id;
if (!idParam) {
return res.status(400).json({ error: "Missing claim ID" });
}
const claimId = parseInt(idParam, 10);
if (isNaN(claimId)) {
return res.status(400).json({ error: "Invalid claim ID" });
}
const claim = await storage.getClaim(claimId);
if (!claim) {
return res.status(404).json({ message: "Claim not found" });
}
res.json(claim);
} catch (error) {
res.status(500).json({ message: "Failed to retrieve claim" });
}
});
// Create a new claim
router.post("/", async (req: Request, res: Response): Promise<any> => {
try {
// --- TRANSFORM claimFiles (if provided) into Prisma nested-create shape
if (Array.isArray(req.body.claimFiles)) {
// each item expected: { filename: string, mimeType: string }
req.body.claimFiles = {
create: req.body.claimFiles.map((f: any) => ({
filename: String(f.filename),
mimeType: String(f.mimeType || f.mime || ""),
})),
};
}
// --- TRANSFORM serviceLines
if (
!Array.isArray(req.body.serviceLines) ||
req.body.serviceLines.length === 0
) {
return res.status(400).json({
message: "At least one service line is required to create a claim",
});
}
if (Array.isArray(req.body.serviceLines)) {
req.body.serviceLines = req.body.serviceLines.map(
(line: InputServiceLine) => ({
...line,
totalBilled: Number(line.totalBilled),
totalAdjusted: 0,
totalPaid: 0,
totalDue: Number(line.totalBilled),
})
);
req.body.serviceLines = { create: req.body.serviceLines };
}
const parsedClaim = ExtendedClaimSchema.parse({
...req.body,
userId: req.user!.id,
});
// Step 1: Calculate total billed from service lines
const serviceLinesCreateInput = (
parsedClaim.serviceLines as Prisma.ServiceLineCreateNestedManyWithoutClaimInput
)?.create;
const lines = Array.isArray(serviceLinesCreateInput)
? (serviceLinesCreateInput as unknown as {
totalBilled: number | string;
}[])
: [];
const totalBilled = lines.reduce(
(sum, line) => sum + Number(line.totalBilled ?? 0),
0
);
// Step 2: Create claim (with service lines)
const claim = await storage.createClaim(parsedClaim);
// Step 3: Create empty payment
await storage.createPayment({
claimId: claim.id,
patientId: claim.patientId,
userId: req.user!.id,
totalBilled: new Decimal(totalBilled),
totalPaid: new Decimal(0),
totalDue: new Decimal(totalBilled),
status: "PENDING",
notes: "",
});
res.status(201).json(claim);
} catch (error) {
if (error instanceof z.ZodError) {
return res.status(400).json({
message: "Validation error",
errors: error.format(),
});
}
console.error("❌ Failed to create claim:", error); // logs full error to server
// Send more detailed info to the client (for dev only)
return res.status(500).json({
message: "Failed to create claim",
error: error instanceof Error ? error.message : String(error),
});
}
});
// Update a claim
router.put("/:id", async (req: Request, res: Response): Promise<any> => {
try {
const idParam = Array.isArray(req.params.id) ? req.params.id[0] : req.params.id;
if (!idParam) {
return res.status(400).json({ error: "Missing claim ID" });
}
const claimId = parseInt(idParam, 10);
if (isNaN(claimId)) {
return res.status(400).json({ error: "Invalid claim ID" });
}
const existingClaim = await storage.getClaim(claimId);
if (!existingClaim) {
return res.status(404).json({ message: "Claim not found" });
}
const claimData = updateClaimSchema.parse(req.body);
const updatedClaim = await storage.updateClaim(claimId, claimData);
res.json(updatedClaim);
} catch (error) {
if (error instanceof z.ZodError) {
return res.status(400).json({
message: "Validation error",
errors: error.format(),
});
}
res.status(500).json({ message: "Failed to update claim" });
}
});
// Delete a claim
router.delete("/:id", async (req: Request, res: Response): Promise<any> => {
try {
const idParam = Array.isArray(req.params.id) ? req.params.id[0] : req.params.id;
if (!idParam) {
return res.status(400).json({ error: "Missing claim ID" });
}
const claimId = parseInt(idParam, 10);
if (isNaN(claimId)) {
return res.status(400).json({ error: "Invalid claim ID" });
}
const existingClaim = await storage.getClaim(claimId);
if (!existingClaim) {
return res.status(404).json({ message: "Claim not found" });
}
if (existingClaim.userId !== req.user!.id) {
return res.status(403).json({
message:
"Forbidden: Claim belongs to a different user, you can't delete this.",
});
}
await storage.deleteClaim(claimId);
res.status(204).send();
} catch (error) {
res.status(500).json({ message: "Failed to delete claim" });
}
});
export default router;

View File

@@ -0,0 +1,556 @@
import express, { Request, Response } from "express";
import storage from "../storage";
import { serializeFile } from "../utils/prismaFileUtils";
import { CloudFolder } from "@repo/db/types";
const router = express.Router();
/* ---------- Helpers ---------- */
function parsePositiveInt(v: unknown, fallback: number) {
const n = Number(v);
if (!Number.isFinite(n) || n < 0) return fallback;
return Math.floor(n);
}
function sendError(
res: Response,
status: number,
message: string,
details?: any
) {
return res.status(status).json({ error: true, message, details });
}
/* ---------- Paginated child FOLDERS for a parent ----------
GET /items/folders?parentId=&limit=&offset=
parentId may be "null" or numeric or absent (means root)
*/
router.get(
"/items/folders",
async (req: Request, res: Response): Promise<any> => {
const rawParent = req.query.parentId;
const parentId =
rawParent === undefined
? null
: rawParent === "null"
? null
: Number(rawParent);
if (parentId !== null && (!Number.isInteger(parentId) || parentId <= 0)) {
return sendError(res, 400, "Invalid parentId");
}
const limit = parsePositiveInt(req.query.limit, 10); // default 10 folders/page
const offset = parsePositiveInt(req.query.offset, 0);
try {
// Prefer a storage method that lists folders by parent, otherwise filter
let data: CloudFolder[] = [];
if (typeof (storage as any).listFoldersByParent === "function") {
data = await (storage as any).listFoldersByParent(
parentId,
limit,
offset
);
const total =
(await (storage as any).countFoldersByParent?.(parentId)) ??
data.length;
return res.json({ error: false, data, total, limit, offset });
}
// Fallback: use recent and filter (less efficient). Recommend implementing listFoldersByParent in storage.
const recent = await storage.listRecentFolders(1000, 0);
const folders = (recent || []).filter(
(f: any) => (f as any).parentId === parentId
);
const paged = folders.slice(offset, offset + limit);
return res.json({
error: false,
data: paged,
totalCount: folders.length,
});
} catch (err) {
return sendError(res, 500, "Failed to load child folders", err);
}
}
);
/* ---------- Paginated files for a folder ----------
GET /items/files?parentId=&limit=&offset=
parentId may be "null" or numeric or absent (means root)
*/
router.get(
"/items/files",
async (req: Request, res: Response): Promise<any> => {
const rawParent = req.query.parentId;
const parentId =
rawParent === undefined
? null
: rawParent === "null"
? null
: Number(rawParent);
if (parentId !== null && (!Number.isInteger(parentId) || parentId <= 0)) {
return sendError(res, 400, "Invalid parentId");
}
const limit = parsePositiveInt(req.query.limit, 20); // default 20 files/page
const offset = parsePositiveInt(req.query.offset, 0);
try {
const files = await storage.listFilesInFolder(parentId, limit, offset);
const totalCount = await storage.countFilesInFolder(parentId);
const serialized = files.map(serializeFile);
return res.json({ error: false, data: serialized, totalCount });
} catch (err) {
return sendError(res, 500, "Failed to load files for folder", err);
}
}
);
/* ---------- Recent folders (global) ----------
GET /folders/recent?limit=&offset=
*/
router.get(
"/folders/recent",
async (req: Request, res: Response): Promise<any> => {
const limit = parsePositiveInt(req.query.limit, 50);
const offset = parsePositiveInt(req.query.offset, 0);
try {
// Always request top-level folders (parentId = null)
const parentId: number | null = null;
const folders = await storage.listRecentFolders(limit, offset, parentId);
const totalCount = await storage.countFoldersByParent(parentId);
return res.json({
error: false,
data: folders,
totalCount,
});
} catch (err) {
return sendError(res, 500, "Failed to load recent folders");
}
}
);
// ---------- Folder CRUD ----------
router.get(
"/folders/:id",
async (req: Request, res: Response): Promise<any> => {
const id = Number.parseInt(req.params.id ?? "", 10);
if (!Number.isInteger(id) || id <= 0)
return sendError(res, 400, "Invalid folder id");
try {
const folder = await storage.getFolder(id);
if (!folder) return sendError(res, 404, "Folder not found");
return res.json({ error: false, data: folder });
} catch (err) {
return sendError(res, 500, "Failed to load folder");
}
}
);
router.post("/folders", async (req: Request, res: Response): Promise<any> => {
const { userId, name, parentId } = req.body;
if (!userId || typeof name !== "string" || !name.trim()) {
return sendError(res, 400, "Missing or invalid userId/name");
}
try {
const created = await storage.createFolder(
userId,
name.trim(),
parentId ?? null
);
return res.status(201).json({ error: false, data: created });
} catch (err) {
return sendError(res, 500, "Failed to create folder");
}
});
router.put(
"/folders/:id",
async (req: Request, res: Response): Promise<any> => {
// coerce possibly-undefined param to string before parsing
const id = Number.parseInt(req.params.id ?? "", 10);
if (!Number.isInteger(id) || id <= 0)
return sendError(res, 400, "Invalid folder id");
const updates: any = {};
if (typeof req.body.name === "string") updates.name = req.body.name.trim();
if (req.body.parentId !== undefined) updates.parentId = req.body.parentId;
try {
const updated = await storage.updateFolder(id, updates);
if (!updated)
return sendError(res, 404, "Folder not found or update failed");
return res.json({ error: false, data: updated });
} catch (err) {
return sendError(res, 500, "Failed to update folder");
}
}
);
router.delete(
"/folders/:id",
async (req: Request, res: Response): Promise<any> => {
const id = Number.parseInt(req.params.id ?? "", 10);
if (!Number.isInteger(id) || id <= 0)
return sendError(res, 400, "Invalid folder id");
try {
const ok = await storage.deleteFolder(id);
if (!ok) return sendError(res, 404, "Folder not found or delete failed");
return res.json({ error: false, data: { id } });
} catch (err) {
return sendError(res, 500, "Failed to delete folder");
}
}
);
/* ---------- Files inside folder (pagination) ----------
GET /folders/:id/files?limit=&offset=
id = "null" lists files with folderId = null
responses serialized
*/
router.get(
"/folders/:id/files",
async (req: Request, res: Response): Promise<any> => {
const rawId = req.params.id;
const folderId = rawId === "null" ? null : Number.parseInt(rawId ?? "", 10);
if (folderId !== null && (!Number.isInteger(folderId) || folderId <= 0)) {
return sendError(res, 400, "Invalid folder id");
}
const limit = parsePositiveInt(req.query.limit, 50);
const offset = parsePositiveInt(req.query.offset, 0);
try {
const files = await storage.listFilesInFolder(folderId, limit, offset);
const totalCount = await storage.countFilesInFolder(folderId);
const serialized = files.map(serializeFile);
return res.json({ error: false, data: serialized, totalCount });
} catch (err) {
return sendError(res, 500, "Failed to list files for folder");
}
}
);
/* ---------- File CRUD (init, update metadata, delete) ----------
POST /folders/:id/files { userId, name, mimeType?, expectedSize?, totalChunks? }
PUT /files/:id { name?, mimeType?, folderId? }
DELETE /files/:id
*/
const MAX_FILE_MB = 20;
const MAX_FILE_BYTES = MAX_FILE_MB * 1024 * 1024;
router.post(
"/folders/:id/files",
async (req: Request, res: Response): Promise<any> => {
const rawId = req.params.id;
const folderId = rawId === "null" ? null : Number.parseInt(rawId ?? "", 10);
if (folderId !== null && (!Number.isInteger(folderId) || folderId <= 0)) {
return sendError(res, 400, "Invalid folder id");
}
const { userId, name, mimeType } = req.body;
if (!userId || typeof name !== "string" || !name.trim()) {
return sendError(res, 400, "Missing or invalid userId/name");
}
// coerce size & chunks
let expectedSize: bigint | null = null;
if (req.body.expectedSize != null) {
try {
// coerce to BigInt safely
const asNum = Number(req.body.expectedSize);
if (!Number.isFinite(asNum) || asNum < 0) {
return sendError(res, 400, "Invalid expectedSize");
}
if (asNum > MAX_FILE_BYTES) {
// Payload Too Large
return sendError(
res,
413,
`File too large. Max allowed is ${MAX_FILE_MB} MB`
);
}
expectedSize = BigInt(String(req.body.expectedSize));
} catch {
return sendError(res, 400, "Invalid expectedSize");
}
}
let totalChunks: number | null = null;
if (req.body.totalChunks != null) {
const tc = Number(req.body.totalChunks);
if (!Number.isFinite(tc) || tc <= 0)
return sendError(res, 400, "Invalid totalChunks");
totalChunks = Math.floor(tc);
}
try {
const created = await storage.initializeFileUpload(
userId,
name.trim(),
mimeType ?? null,
expectedSize,
totalChunks,
folderId
);
return res
.status(201)
.json({ error: false, data: serializeFile(created as any) });
} catch {
return sendError(res, 500, "Failed to create file");
}
}
);
/* ---------- 2. CHUNKS (raw upload) ---------- */
router.post(
"/files/:id/chunks",
// only here: use express.raw so req.body is Buffer
express.raw({ type: () => true, limit: "100mb" }),
async (req: Request, res: Response): Promise<any> => {
const id = Number.parseInt(req.params.id ?? "", 10);
const seq = Number.parseInt(
String(req.query.seq ?? req.body.seq ?? ""),
10
);
if (!Number.isInteger(id) || id <= 0)
return sendError(res, 400, "Invalid file id");
if (!Number.isInteger(seq) || seq < 0)
return sendError(res, 400, "Invalid seq");
const body = req.body as Buffer;
if (!body || !(body instanceof Buffer)) {
return sendError(res, 400, "Expected raw binary body (Buffer)");
}
// strict size guard: any single chunk must not exceed MAX_FILE_BYTES
if (body.length > MAX_FILE_BYTES) {
return sendError(res, 413, `Chunk size exceeds ${MAX_FILE_MB} MB limit`);
}
try {
await storage.appendFileChunk(id, seq, body);
return res.json({ error: false, data: { fileId: id, seq } });
} catch (err: any) {
return sendError(res, 500, "Failed to add chunk");
}
}
);
/* ---------- 3. COMPLETE ---------- */
router.post(
"/files/:id/complete",
async (req: Request, res: Response): Promise<any> => {
const id = Number.parseInt(req.params.id ?? "", 10);
if (!Number.isInteger(id) || id <= 0)
return sendError(res, 400, "Invalid file id");
try {
// Ask storage for the file (includes chunks in your implementation)
const file = await storage.getFile(id);
if (!file) return sendError(res, 404, "File not found");
// Sum chunks' sizes (storage.getFile returns chunks ordered by seq in your impl)
const chunks = (file as any).chunks ?? [];
if (!chunks.length) return sendError(res, 400, "No chunks uploaded");
let total = 0;
for (const c of chunks) {
// c.data is Bytes / Buffer-like
total += c.data.length;
// early bailout
if (total > MAX_FILE_BYTES) {
return sendError(
res,
413,
`Assembled file is too large (${Math.round(total / 1024 / 1024)} MB). Max allowed is ${MAX_FILE_MB} MB.`
);
}
}
const result = await storage.finalizeFileUpload(id);
return res.json({ error: false, data: result });
} catch (err: any) {
return sendError(res, 500, err?.message || "Failed to complete file");
}
}
);
router.put("/files/:id", async (req: Request, res: Response): Promise<any> => {
const id = Number.parseInt(req.params.id ?? "", 10);
if (!Number.isInteger(id) || id <= 0)
return sendError(res, 400, "Invalid file id");
const updates: any = {};
if (typeof req.body.name === "string") updates.name = req.body.name.trim();
if (typeof req.body.mimeType === "string")
updates.mimeType = req.body.mimeType;
if (req.body.folderId !== undefined) updates.folderId = req.body.folderId;
try {
const updated = await storage.updateFile(id, updates);
if (!updated) return sendError(res, 404, "File not found or update failed");
return res.json({ error: false, data: serializeFile(updated as any) });
} catch (err) {
return sendError(res, 500, "Failed to update file metadata");
}
});
router.delete(
"/files/:id",
async (req: Request, res: Response): Promise<any> => {
const id = Number.parseInt(req.params.id ?? "", 10);
if (!Number.isInteger(id) || id <= 0)
return sendError(res, 400, "Invalid file id");
try {
const ok = await storage.deleteFile(id);
if (!ok) return sendError(res, 404, "File not found or delete failed");
return res.json({ error: false, data: { id } });
} catch (err) {
return sendError(res, 500, "Failed to delete file");
}
}
);
/* GET /files/:id -> return serialized metadata (used by preview modal) */
router.get("/files/:id", async (req: Request, res: Response): Promise<any> => {
const id = Number.parseInt(req.params.id ?? "", 10);
if (!Number.isInteger(id) || id <= 0)
return sendError(res, 400, "Invalid file id");
try {
const file = await storage.getFile(id);
if (!file) return sendError(res, 404, "File not found");
return res.json({ error: false, data: serializeFile(file as any) });
} catch (err) {
return sendError(res, 500, "Failed to load file");
}
});
/* GET /files/:id/content -> stream file with inline disposition for preview */
router.get(
"/files/:id/content",
async (req: Request, res: Response): Promise<any> => {
const id = Number.parseInt(req.params.id ?? "", 10);
if (!Number.isInteger(id) || id <= 0)
return sendError(res, 400, "Invalid file id");
try {
const file = await storage.getFile(id);
if (!file) return sendError(res, 404, "File not found");
const filename = (file.name ?? `file-${(file as any).id}`).replace(
/["\\]/g,
""
);
if ((file as any).mimeType)
res.setHeader("Content-Type", (file as any).mimeType);
// NOTE: inline instead of attachment so browser can render (images, pdfs)
res.setHeader(
"Content-Disposition",
`inline; filename="${encodeURIComponent(filename)}"`
);
await storage.streamFileTo(res, id);
if (!res.writableEnded) res.end();
} catch (err) {
if (res.headersSent) return res.end();
return sendError(res, 500, "Failed to stream file");
}
}
);
/* GET /files/:id/download */
router.get(
"/files/:id/download",
async (req: Request, res: Response): Promise<any> => {
const id = Number.parseInt(req.params.id ?? "", 10);
if (!Number.isInteger(id) || id <= 0)
return sendError(res, 400, "Invalid file id");
try {
const file = await storage.getFile(id);
if (!file) return sendError(res, 404, "File not found");
const filename = (file.name ?? `file-${(file as any).id}`).replace(
/["\\]/g,
""
);
if ((file as any).mimeType)
res.setHeader("Content-Type", (file as any).mimeType);
res.setHeader(
"Content-Disposition",
`attachment; filename="${encodeURIComponent(filename)}"`
);
await storage.streamFileTo(res, id);
if (!res.writableEnded) res.end();
} catch (err) {
if (res.headersSent) return res.end();
return sendError(res, 500, "Failed to stream file");
}
}
);
/* ---------- Search endpoints (separate) ----------
GET /search/folders?q=&limit=&offset=
GET /search/files?q=&type=&limit=&offset=
*/
router.get(
"/search/folders",
async (req: Request, res: Response): Promise<any> => {
const q = String(req.query.q ?? "").trim();
const limit = parsePositiveInt(req.query.limit, 20);
const offset = parsePositiveInt(req.query.offset, 0);
if (!q) return sendError(res, 400, "Missing search query parameter 'q'");
try {
const parentId = null;
const { data, total } = await storage.searchFolders(
q,
limit,
offset,
parentId
);
return res.json({ error: false, data, totalCount: total });
} catch (err) {
return sendError(res, 500, "Folder search failed");
}
}
);
router.get(
"/search/files",
async (req: Request, res: Response): Promise<any> => {
const q = String(req.query.q ?? "").trim();
const type =
typeof req.query.type === "string" ? req.query.type.trim() : undefined;
const limit = parsePositiveInt(req.query.limit, 20);
const offset = parsePositiveInt(req.query.offset, 0);
if (!q && !type)
return sendError(
res,
400,
"Provide at least one of 'q' or 'type' to search files"
);
try {
const { data, total } = await storage.searchFiles(q, type, limit, offset);
const serialized = data.map(serializeFile);
return res.json({ error: false, data: serialized, totalCount: total });
} catch (err) {
return sendError(res, 500, "File search failed");
}
}
);
export default router;

View File

@@ -0,0 +1,360 @@
import { Router, Request, Response } from "express";
import { spawn } from "child_process";
import path from "path";
import os from "os";
import fs from "fs";
import { prisma } from "@repo/db/client";
import { storage } from "../storage";
import archiver from "archiver";
import { backupDatabaseToPath } from "../services/databaseBackupService";
const router = Router();
/**
* Create a database backup
*
* - Uses pg_dump in directory format for parallel dump to a tmp dir
* - Uses 'archiver' to create zip or gzipped tar stream directly to response
* - Supports explicit override via BACKUP_ARCHIVE_FORMAT env var ('zip' or 'tar')
* - Ensures cleanup of tmp dir on success/error/client disconnect
*/
// helper to remove directory (sync to keep code straightforward)
function safeRmDir(dir: string) {
try {
fs.rmSync(dir, { recursive: true, force: true });
} catch (e) {
/* ignore */
}
}
router.post("/backup", async (req: Request, res: Response): Promise<any> => {
try {
const userId = req.user?.id;
if (!userId) {
return res.status(401).json({ error: "Unauthorized" });
}
const destination = await storage.getActiveBackupDestination(userId);
// create a unique tmp directory for directory-format dump
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "dental_backup_")); // MUST
// Decide archive format
// BACKUP_ARCHIVE_FORMAT can be 'zip' or 'tar' (case-insensitive)
const forced = (process.env.BACKUP_ARCHIVE_FORMAT || "").toLowerCase();
const useZip =
forced === "zip"
? true
: forced === "tar"
? false
: process.platform === "win32";
const filename = useZip
? `dental_backup_${Date.now()}.zip`
: `dental_backup_${Date.now()}.tar.gz`;
// Spawn pg_dump
const pgDump = spawn(
"pg_dump",
[
"-Fd", // DIRECTORY format (required for parallel dump)
"-j",
"4", // number of parallel jobs — MUST be >0 for parallelism
"--no-acl",
"--no-owner",
"-h",
process.env.DB_HOST || "localhost",
"-U",
process.env.DB_USER || "postgres",
process.env.DB_NAME || "dental_db",
"-f",
tmpDir, // write parallely
],
{
env: {
...process.env,
PGPASSWORD: process.env.DB_PASSWORD,
},
}
);
let pgStderr = "";
pgDump.stderr.on("data", (chunk) => {
pgStderr += chunk.toString();
});
pgDump.on("error", (err) => {
safeRmDir(tmpDir);
console.error("Failed to start pg_dump:", err);
// If headers haven't been sent, respond; otherwise just end socket
if (!res.headersSent) {
return res
.status(500)
.json({ error: "Failed to run pg_dump", details: err.message });
} else {
res.destroy(err);
}
});
pgDump.on("close", async (code) => {
if (code !== 0) {
safeRmDir(tmpDir);
console.error("pg_dump failed:", pgStderr || `exit ${code}`);
if (!res.headersSent) {
return res.status(500).json({
error: "Backup failed",
details: pgStderr || `pg_dump exited with ${code}`,
});
} else {
// headers already sent — destroy response
res.destroy(new Error("pg_dump failed"));
return;
}
}
// pg_dump succeeded — stream archive directly to response using archiver
// Set headers before piping
res.setHeader(
"Content-Disposition",
`attachment; filename="${filename}"`
);
res.setHeader(
"Content-Type",
useZip ? "application/zip" : "application/gzip"
);
const archive = archiver(
useZip ? "zip" : "tar",
useZip ? {} : { gzip: true, gzipOptions: { level: 6 } }
);
let archErr: string | null = null;
archive.on("error", (err) => {
archErr = err.message;
console.error("Archiver error:", err);
// attempt to respond with error if possible
try {
if (!res.headersSent) {
res.status(500).json({
error: "Failed to create archive",
details: err.message,
});
} else {
// if streaming already started, destroy the connection
res.destroy(err);
}
} catch (e) {
// swallow
} finally {
safeRmDir(tmpDir);
}
});
// If client disconnects while streaming
res.once("close", () => {
// destroy archiver (stop processing) and cleanup tmpDir
try {
archive.destroy();
} catch (e) {}
safeRmDir(tmpDir);
});
// When streaming finishes successfully
res.once("finish", async () => {
// cleanup the tmp dir used by pg_dump
safeRmDir(tmpDir);
// update metadata (try/catch so it won't break response flow)
try {
await storage.createBackup(userId);
await storage.deleteNotificationsByType(userId, "BACKUP");
} catch (err) {
console.error("Backup saved but metadata update failed:", err);
}
});
// Pipe archive into response
archive.pipe(res);
// Add the dumped directory contents to the archive root
// `directory(source, dest)` where dest is false/'' to place contents at archive root
archive.directory(tmpDir + path.sep, false);
// finalize archive (this starts streaming)
try {
await archive.finalize();
} catch (err: any) {
console.error("Failed to finalize archive:", err);
// if headers not sent, send 500; otherwise destroy
try {
if (!res.headersSent) {
res.status(500).json({
error: "Failed to finalize archive",
details: String(err),
});
} else {
res.destroy(err);
}
} catch (e) {}
safeRmDir(tmpDir);
}
});
} catch (err: any) {
console.error("Unexpected error in /backup:", err);
if (!res.headersSent) {
return res
.status(500)
.json({ message: "Internal server error", details: String(err) });
} else {
res.destroy(err);
}
}
});
/**
* Get database status (connected, size, records count)
*/
router.get("/status", async (req: Request, res: Response): Promise<any> => {
try {
const userId = req.user?.id;
if (!userId) {
return res.status(401).json({ error: "Unauthorized" });
}
const size = await prisma.$queryRaw<{ size: string }[]>`
SELECT pg_size_pretty(pg_database_size(current_database())) as size
`;
const patientsCount = await storage.getTotalPatientCount();
const lastBackup = await storage.getLastBackup(userId);
res.json({
connected: true,
size: size[0]?.size,
patients: patientsCount,
lastBackup: lastBackup?.createdAt ?? null,
});
} catch (err) {
console.error("Status error:", err);
res.status(500).json({
connected: false,
error: "Could not fetch database status",
});
}
});
// ==============================
// Backup Destination CRUD
// ==============================
// CREATE / UPDATE destination
router.post("/destination", async (req, res) => {
const userId = req.user?.id;
const { path: destinationPath } = req.body;
if (!userId) return res.status(401).json({ error: "Unauthorized" });
if (!destinationPath)
return res.status(400).json({ error: "Path is required" });
// validate path exists
if (!fs.existsSync(destinationPath)) {
return res.status(400).json({
error: "Backup path does not exist or drive not connected",
});
}
try {
const destination = await storage.createBackupDestination(
userId,
destinationPath
);
res.json(destination);
} catch (err) {
console.error(err);
res.status(500).json({ error: "Failed to save backup destination" });
}
});
// GET all destinations
router.get("/destination", async (req, res) => {
const userId = req.user?.id;
if (!userId) return res.status(401).json({ error: "Unauthorized" });
const destinations = await storage.getAllBackupDestination(userId);
res.json(destinations);
});
// UPDATE destination
router.put("/destination/:id", async (req, res) => {
const userId = req.user?.id;
const id = Number(req.params.id);
const { path: destinationPath } = req.body;
if (!userId) return res.status(401).json({ error: "Unauthorized" });
if (!destinationPath)
return res.status(400).json({ error: "Path is required" });
if (!fs.existsSync(destinationPath)) {
return res.status(400).json({ error: "Path does not exist" });
}
const updated = await storage.updateBackupDestination(
id,
userId,
destinationPath
);
res.json(updated);
});
// DELETE destination
router.delete("/destination/:id", async (req, res) => {
const userId = req.user?.id;
const id = Number(req.params.id);
if (!userId) return res.status(401).json({ error: "Unauthorized" });
await storage.deleteBackupDestination(id, userId);
res.json({ success: true });
});
router.post("/backup-path", async (req, res) => {
const userId = req.user?.id;
if (!userId) return res.status(401).json({ error: "Unauthorized" });
const destination = await storage.getActiveBackupDestination(userId);
if (!destination) {
return res.status(400).json({
error: "No backup destination configured",
});
}
if (!fs.existsSync(destination.path)) {
return res.status(400).json({
error:
"Backup destination not found. External drive may be disconnected.",
});
}
const filename = `dental_backup_${Date.now()}.zip`;
try {
await backupDatabaseToPath({
destinationPath: destination.path,
filename,
});
await storage.createBackup(userId);
await storage.deleteNotificationsByType(userId, "BACKUP");
res.json({ success: true, filename });
} catch (err: any) {
console.error(err);
res.status(500).json({
error: "Backup to destination failed",
details: err.message,
});
}
});
export default router;

View File

@@ -0,0 +1,423 @@
import { Router } from "express";
import { Request, Response } from "express";
import { storage } from "../storage";
import multer from "multer";
import { PdfFile } from "../../../../packages/db/types/pdf-types";
const upload = multer({ storage: multer.memoryStorage() });
const router = Router();
// ----------- PDF GROUPS ------------------
router.post(
"/pdf-groups",
async (req: Request, res: Response): Promise<any> => {
try {
const { patientId, groupTitle, groupTitleKey } = req.body;
if (!patientId || !groupTitle || groupTitleKey) {
return res
.status(400)
.json({ error: "Missing title, titleKey, or patientId" });
}
const group = await storage.createPdfGroup(
parseInt(patientId),
groupTitle,
groupTitleKey
);
res.json(group);
} catch (err) {
console.error("Error creating PDF group:", err);
res.status(500).json({ error: "Internal server error" });
}
}
);
router.get(
"/pdf-groups/patient/:patientId",
async (req: Request, res: Response): Promise<any> => {
try {
const { patientId } = req.params;
if (!patientId) {
return res.status(400).json({ error: "Missing patient ID" });
}
const groups = await storage.getPdfGroupsByPatientId(parseInt(patientId));
res.json(groups);
} catch (err) {
console.error("Error fetching groups by patient ID:", err);
res.status(500).json({ error: "Internal server error" });
}
}
);
router.get(
"/pdf-groups/:id",
async (req: Request, res: Response): Promise<any> => {
try {
const idParam = req.params.id;
if (!idParam) {
return res.status(400).json({ error: "Missing ID" });
}
const id = parseInt(idParam);
const group = await storage.getPdfGroupById(id);
if (!group) return res.status(404).json({ error: "Group not found" });
res.json(group);
} catch (err) {
console.error("Error fetching PDF group:", err);
res.status(500).json({ error: "Internal server error" });
}
}
);
router.get("/pdf-groups", async (req: Request, res: Response): Promise<any> => {
try {
const groups = await storage.getAllPdfGroups();
res.json(groups);
} catch (err) {
console.error("Error listing PDF groups:", err);
res.status(500).json({ error: "Internal server error" });
}
});
router.put(
"/pdf-groups/:id",
async (req: Request, res: Response): Promise<any> => {
try {
const idParam = req.params.id;
if (!idParam) {
return res.status(400).json({ error: "Missing ID" });
}
const id = parseInt(idParam);
const { title, titleKey } = req.body;
const updates: any = {};
updates.title = title;
updates.titleKey = titleKey;
const updated = await storage.updatePdfGroup(id, updates);
if (!updated) return res.status(404).json({ error: "Group not found" });
res.json(updated);
} catch (err) {
console.error("Error updating PDF group:", err);
res.status(500).json({ error: "Internal server error" });
}
}
);
router.delete(
"/pdf-groups/:id",
async (req: Request, res: Response): Promise<any> => {
try {
const idParam = req.params.id;
if (!idParam) {
return res.status(400).json({ error: "Missing ID" });
}
const id = parseInt(idParam);
const success = await storage.deletePdfGroup(id);
res.json({ success });
} catch (err) {
console.error("Error deleting PDF group:", err);
res.status(500).json({ error: "Internal server error" });
}
}
);
// ----------- PDF FILES ------------------
router.post(
"/pdf-files",
upload.single("file"),
async (req: Request, res: Response): Promise<any> => {
try {
const { groupId } = req.body;
const file = req.file;
if (!groupId || !file) {
return res.status(400).json({ error: "Missing groupId or file" });
}
const pdf = await storage.createPdfFile(
parseInt(groupId),
file.originalname,
file.buffer
);
res.json(pdf);
} catch (err) {
console.error("Error uploading PDF file:", err);
res.status(500).json({ error: "Internal server error" });
}
}
);
router.get(
"/pdf-files/group/:groupId",
async (req: Request, res: Response): Promise<any> => {
try {
const idParam = req.params.groupId;
if (!idParam) {
return res.status(400).json({ error: "Missing Groupt ID" });
}
const groupId = parseInt(idParam);
const files = await storage.getPdfFilesByGroupId(groupId);
res.json(files);
} catch (err) {
res.status(500).json({ error: "Internal server error" });
}
}
);
/**
* GET /pdf-files/group/:groupId
* Query params:
* - limit (optional, defaults to 5): number of items per page (max 1000)
* - offset (optional, defaults to 0): offset for pagination
*
* Response: { total: number, data: PdfFile[] }
*/
router.get(
"/recent-pdf-files/group/:groupId",
async (req: Request, res: Response): Promise<any> => {
try {
const rawGroupId = req.params.groupId;
if (!rawGroupId) {
return res.status(400).json({ error: "Missing groupId param" });
}
const groupId = Number(rawGroupId);
if (Number.isNaN(groupId) || groupId <= 0) {
return res.status(400).json({ error: "Invalid groupId" });
}
// Parse & sanitize query params
const limitQuery = req.query.limit;
const offsetQuery = req.query.offset;
const limit =
limitQuery !== undefined
? Math.min(Math.max(Number(limitQuery), 1), 1000) // 1..1000
: undefined; // if undefined -> treat as "no pagination" (return all)
const offset =
offsetQuery !== undefined ? Math.max(Number(offsetQuery), 0) : 0;
// Decide whether client asked for paginated response
const wantsPagination = typeof limit === "number";
if (wantsPagination) {
// storage.getPdfFilesByGroupId with pagination should return { total, data }
const result = await storage.getPdfFilesByGroupId(groupId, {
limit,
offset,
withGroup: false, // do not include group relation in listing
});
// result should be { total, data }, but handle unexpected shapes defensively
if (Array.isArray(result)) {
// fallback: storage returned full array; compute total
return res.json({ total: result.length, data: result });
}
return res.json(result);
} else {
// no limit requested -> return all files for the group
const all = (await storage.getPdfFilesByGroupId(groupId)) as PdfFile[];
return res.json({ total: all.length, data: all });
}
} catch (err) {
console.error("GET /pdf-files/group/:groupId error:", err);
return res.status(500).json({ error: "Internal server error" });
}
}
);
router.get(
"/pdf-files/:id",
async (req: Request, res: Response): Promise<any> => {
try {
const idParam = req.params.id;
if (!idParam) {
return res.status(400).json({ error: "Missing ID" });
}
const id = parseInt(idParam, 10);
if (Number.isNaN(id)) {
return res.status(400).json({ error: "Invalid ID" });
}
const pdf = await storage.getPdfFileById(id);
if (!pdf || !pdf.pdfData) {
return res.status(404).json({ error: "PDF not found" });
}
const data: any = pdf.pdfData;
// Helper: try many plausible conversions into a Buffer
function normalizeToBuffer(d: any): Buffer | null {
// Already a Buffer
if (Buffer.isBuffer(d)) return d;
// Uint8Array or other typed arrays
if (d instanceof Uint8Array) return Buffer.from(d);
// ArrayBuffer
if (d instanceof ArrayBuffer) return Buffer.from(new Uint8Array(d));
// number[] (common)
if (Array.isArray(d) && d.every((n) => typeof n === "number")) {
return Buffer.from(d as number[]);
}
// Some drivers: { data: number[] }
if (
d &&
typeof d === "object" &&
Array.isArray(d.data) &&
d.data.every((n: any) => typeof n === "number")
) {
return Buffer.from(d.data as number[]);
}
// Some drivers return object with numeric keys: { '0': 37, '1': 80, ... }
if (d && typeof d === "object") {
const keys = Object.keys(d);
const numericKeys = keys.filter((k) => /^\d+$/.test(k));
if (numericKeys.length > 0 && numericKeys.length === keys.length) {
// sort numeric keys to correct order and map to numbers
const sorted = numericKeys
.map((k) => parseInt(k, 10))
.sort((a, b) => a - b)
.map((n) => d[String(n)]);
if (sorted.every((v) => typeof v === "number")) {
return Buffer.from(sorted as number[]);
}
}
}
// Last resort: if Object.values(d) yields numbers (this is what you used originally)
try {
const vals = Object.values(d);
if (Array.isArray(vals) && vals.every((v) => typeof v === "number")) {
// coerce to number[] for TS safety
return Buffer.from(vals as number[]);
}
} catch {
// ignore
}
// give up
return null;
}
const pdfBuffer = normalizeToBuffer(data);
if (!pdfBuffer) {
console.error("Unsupported pdf.pdfData shape:", {
typeofData: typeof data,
constructorName:
data && data.constructor ? data.constructor.name : undefined,
keys:
data && typeof data === "object"
? Object.keys(data).slice(0, 20)
: undefined,
sample: (() => {
if (Array.isArray(data)) return data.slice(0, 20);
if (data && typeof data === "object") {
const vals = Object.values(data);
return Array.isArray(vals) ? vals.slice(0, 20) : undefined;
}
return String(data).slice(0, 200);
})(),
});
// Try a safe textual fallback (may produce invalid PDF but avoids crashing)
try {
const fallback = Buffer.from(String(data));
res.setHeader("Content-Type", "application/pdf");
res.setHeader(
"Content-Disposition",
`attachment; filename="${pdf.filename}"; filename*=UTF-8''${encodeURIComponent(pdf.filename)}`
);
return res.send(fallback);
} catch (err) {
console.error("Failed fallback conversion:", err);
return res.status(500).json({ error: "Cannot process PDF data" });
}
}
res.setHeader("Content-Type", "application/pdf");
res.setHeader(
"Content-Disposition",
`attachment; filename="${pdf.filename}"; filename*=UTF-8''${encodeURIComponent(pdf.filename)}`
);
res.send(pdfBuffer);
} catch (err) {
console.error("Error downloading PDF file:", err);
res.status(500).json({ error: "Internal server error" });
}
}
);
router.get(
"/pdf-files/recent",
async (req: Request, res: Response): Promise<any> => {
try {
const limit = parseInt(req.query.limit as string) || 5;
const offset = parseInt(req.query.offset as string) || 0;
const files = await storage.getRecentPdfFiles(limit, offset);
res.json(files);
} catch (err) {
console.error("Error getting recent PDF files:", err);
res.status(500).json({ error: "Internal server error" });
}
}
);
router.put(
"/pdf-files/:id",
upload.single("file"),
async (req: Request, res: Response): Promise<any> => {
try {
const idParam = req.params.id;
if (!idParam) {
return res.status(400).json({ error: "Missing ID" });
}
const id = parseInt(idParam);
const file = req.file;
const updated = await storage.updatePdfFile(id, {
filename: file?.originalname,
pdfData: file?.buffer,
});
if (!updated)
return res
.status(404)
.json({ error: "PDF not found or update failed" });
res.json(updated);
} catch (err) {
console.error("Error updating PDF file:", err);
res.status(500).json({ error: "Internal server error" });
}
}
);
router.delete(
"/pdf-files/:id",
async (req: Request, res: Response): Promise<any> => {
try {
const idParam = req.params.id;
if (!idParam) {
return res.status(400).json({ error: "Missing ID" });
}
const id = parseInt(idParam);
const success = await storage.deletePdfFile(id);
res.json({ success });
} catch (err) {
console.error("Error deleting PDF file:", err);
res.status(500).json({ error: "Internal server error" });
}
}
);
export default router;

View File

@@ -0,0 +1,99 @@
import { Router } from "express";
import type { Request, Response } from "express";
import { storage } from "../storage";
const router = Router();
/**
* GET /api/reports/export
* query:
* - type = patients_with_balance | collections_by_doctor
* - from, to = optional ISO date strings (YYYY-MM-DD)
* - staffId = required for collections_by_doctor
* - format = csv (we expect csv; if missing default to csv)
*/
function escapeCsvCell(v: any) {
if (v === null || v === undefined) return "";
const s = String(v).replace(/\r?\n/g, " ");
if (s.includes('"') || s.includes(",") || s.includes("\n")) {
return `"${s.replace(/"/g, '""')}"`;
}
return s;
}
router.get("/export", async (req: Request, res: Response): Promise<any> => {
try {
const type = String(req.query.type || "");
const from = req.query.from ? new Date(String(req.query.from)) : undefined;
const to = req.query.to ? new Date(String(req.query.to)) : undefined;
const staffId = req.query.staffId ? Number(req.query.staffId) : undefined;
const format = String(req.query.format || "csv").toLowerCase();
if (format !== "csv") {
return res.status(400).json({ message: "Only CSV export is supported" });
}
let patientsSummary: any[] = [];
if (type === "patients_with_balance") {
patientsSummary = await storage.fetchAllPatientsWithBalances(from, to);
} else if (type === "collections_by_doctor") {
if (!staffId || !Number.isFinite(staffId) || staffId <= 0) {
return res.status(400).json({ message: "Missing or invalid staffId for collections_by_doctor" });
}
patientsSummary = await storage.fetchAllPatientsForDoctor(staffId, from, to);
} else {
return res.status(400).json({ message: "Unsupported report type" });
}
const patientsWithFinancials = await storage.buildExportRowsForPatients(patientsSummary, 5000);
// Build CSV - flattened rows
// columns: patientId, patientName, currentBalance, type, date, procedureCode, billed, paid, adjusted, totalDue, status
const header = [
"patientId",
"patientName",
"currentBalance",
"type",
"date",
"procedureCode",
"billed",
"paid",
"adjusted",
"totalDue",
"status",
];
const lines = [header.join(",")];
for (const p of patientsWithFinancials) {
const name = `${p.firstName ?? ""} ${p.lastName ?? ""}`.trim();
for (const fr of p.financialRows) {
lines.push(
[
escapeCsvCell(p.patientId),
escapeCsvCell(name),
(Number(p.currentBalance ?? 0)).toFixed(2),
escapeCsvCell(fr.type),
escapeCsvCell(fr.date),
escapeCsvCell(fr.procedureCode),
(Number(fr.billed ?? 0)).toFixed(2),
(Number(fr.paid ?? 0)).toFixed(2),
(Number(fr.adjusted ?? 0)).toFixed(2),
(Number(fr.totalDue ?? 0)).toFixed(2),
escapeCsvCell(fr.status),
].join(",")
);
}
}
const fname = `report-${type}-${new Date().toISOString().slice(0, 10)}.csv`;
res.setHeader("Content-Type", "text/csv; charset=utf-8");
res.setHeader("Content-Disposition", `attachment; filename="${fname}"`);
return res.send(lines.join("\n"));
} catch (err: any) {
console.error("[/api/reports/export] error:", err?.message ?? err, err?.stack);
return res.status(500).json({ message: "Export error" });
}
});
export default router;

View File

@@ -0,0 +1,46 @@
import { Router } from "express";
import patientsRoutes from "./patients";
import appointmentsRoutes from "./appointments";
import appointmentProceduresRoutes from "./appointments-procedures";
import usersRoutes from "./users";
import staffsRoutes from "./staffs";
import npiProvidersRoutes from "./npiProviders";
import claimsRoutes from "./claims";
import patientDataExtractionRoutes from "./patientDataExtraction";
import insuranceCredsRoutes from "./insuranceCreds";
import documentsRoutes from "./documents";
import patientDocumentsRoutes from "./patient-documents";
import insuranceStatusRoutes from "./insuranceStatus";
import insuranceStatusDdmaRoutes from "./insuranceStatusDDMA";
import paymentsRoutes from "./payments";
import databaseManagementRoutes from "./database-management";
import notificationsRoutes from "./notifications";
import paymentOcrRoutes from "./paymentOcrExtraction";
import cloudStorageRoutes from "./cloud-storage";
import paymentsReportsRoutes from "./payments-reports";
import exportPaymentsReportsRoutes from "./export-payments-reports";
const router = Router();
router.use("/patients", patientsRoutes);
router.use("/appointments", appointmentsRoutes);
router.use("/appointment-procedures", appointmentProceduresRoutes);
router.use("/users", usersRoutes);
router.use("/staffs", staffsRoutes);
router.use("/npiProviders", npiProvidersRoutes);
router.use("/patientDataExtraction", patientDataExtractionRoutes);
router.use("/claims", claimsRoutes);
router.use("/insuranceCreds", insuranceCredsRoutes);
router.use("/documents", documentsRoutes);
router.use("/patient-documents", patientDocumentsRoutes);
router.use("/insurance-status", insuranceStatusRoutes);
router.use("/insurance-status-ddma", insuranceStatusDdmaRoutes);
router.use("/payments", paymentsRoutes);
router.use("/database-management", databaseManagementRoutes);
router.use("/notifications", notificationsRoutes);
router.use("/payment-ocr", paymentOcrRoutes);
router.use("/cloud-storage", cloudStorageRoutes);
router.use("/payments-reports", paymentsReportsRoutes);
router.use("/export-payments-reports", exportPaymentsReportsRoutes);
export default router;

View File

@@ -0,0 +1,126 @@
import express, { Request, Response } from "express";
import { storage } from "../storage";
import { z } from "zod";
import {
insertInsuranceCredentialSchema,
InsuranceCredential,
} from "@repo/db/types";
const router = express.Router();
// ✅ Get all credentials for a user
router.get("/", async (req: Request, res: Response): Promise<any> => {
try {
if (!req.user || !req.user.id) {
return res
.status(401)
.json({ message: "Unauthorized: user info missing" });
}
const userId = req.user.id;
const credentials = await storage.getInsuranceCredentialsByUser(userId);
return res.status(200).json(credentials);
} catch (err) {
return res
.status(500)
.json({ error: "Failed to fetch credentials", details: String(err) });
}
});
// ✅ Create credential for a user
router.post("/", async (req: Request, res: Response): Promise<any> => {
try {
if (!req.user || !req.user.id) {
return res
.status(401)
.json({ message: "Unauthorized: user info missing" });
}
const userId = req.user.id;
const parseResult = insertInsuranceCredentialSchema.safeParse({
...req.body,
userId,
});
if (!parseResult.success) {
const flat = (
parseResult as typeof parseResult & { error: z.ZodError<any> }
).error.flatten();
const firstError =
Object.values(flat.fieldErrors)[0]?.[0] || "Invalid input";
return res.status(400).json({
message: firstError,
details: flat.fieldErrors,
});
}
const credential = await storage.createInsuranceCredential(
parseResult.data
);
return res.status(201).json(credential);
} catch (err: any) {
if (err.code === "P2002") {
return res.status(400).json({
message: `Credential with this ${err.meta?.target?.join(", ")} already exists.`,
});
}
return res
.status(500)
.json({ error: "Failed to create credential", details: String(err) });
}
});
// ✅ Update credential
router.put("/:id", async (req: Request, res: Response): Promise<any> => {
try {
const id = Number(req.params.id);
if (isNaN(id)) return res.status(400).send("Invalid credential ID");
const updates = req.body as Partial<InsuranceCredential>;
const credential = await storage.updateInsuranceCredential(id, updates);
return res.status(200).json(credential);
} catch (err) {
return res
.status(500)
.json({ error: "Failed to update credential", details: String(err) });
}
});
// ✅ Delete a credential
router.delete("/:id", async (req: Request, res: Response): Promise<any> => {
try {
const userId = (req as any).user?.id;
if (!userId) return res.status(401).json({ message: "Unauthorized" });
const id = Number(req.params.id);
if (isNaN(id)) return res.status(400).send("Invalid ID");
// 1) Check existence
const existing = await storage.getInsuranceCredential(id);
if (!existing)
return res.status(404).json({ message: "Credential not found" });
// 2) Ownership check
if (existing.userId !== userId) {
return res.status(403).json({
message:
"Forbidden: Credentials belongs to a different user, you can't delete this.",
});
}
// 3) Delete (storage method enforces userId + id)
const ok = await storage.deleteInsuranceCredential(userId, id);
if (!ok) {
return res
.status(404)
.json({ message: "Credential not found or already deleted" });
}
return res.status(204).send();
} catch (err) {
return res
.status(500)
.json({ error: "Failed to delete credential", details: String(err) });
}
});
export default router;

View File

@@ -0,0 +1,844 @@
import { Router } from "express";
import { Request, Response } from "express";
import { storage } from "../storage";
import { forwardToSeleniumInsuranceEligibilityAgent } from "../services/seleniumInsuranceEligibilityClient";
import fs from "fs/promises";
import path from "path";
import PDFDocument from "pdfkit";
import { forwardToSeleniumInsuranceClaimStatusAgent } from "../services/seleniumInsuranceClaimStatusClient";
import fsSync from "fs";
import { emptyFolderContainingFile } from "../utils/emptyTempFolder";
import forwardToPatientDataExtractorService from "../services/patientDataExtractorService";
import {
InsertPatient,
insertPatientSchema,
} from "../../../../packages/db/types/patient-types";
import { formatDobForAgent } from "../utils/dateUtils";
const router = Router();
/** Utility: naive name splitter */
function splitName(fullName?: string | null) {
if (!fullName) return { firstName: "", lastName: "" };
const parts = fullName.trim().split(/\s+/).filter(Boolean);
const firstName = parts.shift() ?? "";
const lastName = parts.join(" ") ?? "";
return { firstName, lastName };
}
/**
* Ensure patient exists for given insuranceId.
* If exists -> update first/last name when different.
* If not -> create using provided fields.
* Returns the patient object (the version read from DB after potential create/update).
*/
async function createOrUpdatePatientByInsuranceId(options: {
insuranceId: string;
firstName?: string | null;
lastName?: string | null;
dob?: string | Date | null;
userId: number;
}) {
const { insuranceId, firstName, lastName, dob, userId } = options;
if (!insuranceId) throw new Error("Missing insuranceId");
let patient = await storage.getPatientByInsuranceId(insuranceId);
// Normalize incoming names
const incomingFirst = firstName?.trim() ?? "";
const incomingLast = lastName?.trim() ?? "";
if (patient && patient.id) {
// update only if different
const updates: any = {};
if (
incomingFirst &&
String(patient.firstName ?? "").trim() !== incomingFirst
) {
updates.firstName = incomingFirst;
}
if (
incomingLast &&
String(patient.lastName ?? "").trim() !== incomingLast
) {
updates.lastName = incomingLast;
}
if (Object.keys(updates).length > 0) {
await storage.updatePatient(patient.id, updates);
// Refetch to get updated data
patient = await storage.getPatientByInsuranceId(insuranceId);
}
return patient;
} else {
// inside createOrUpdatePatientByInsuranceId, when creating:
const createPayload: any = {
firstName: incomingFirst,
lastName: incomingLast,
dateOfBirth: dob, // raw from caller (string | Date | null)
gender: "",
phone: "",
userId,
insuranceId,
};
let patientData: InsertPatient;
try {
patientData = insertPatientSchema.parse(createPayload);
} catch (err) {
// handle malformed dob or other validation errors conservatively
console.warn(
"Failed to validate patient payload in insurance flow:",
err
);
// either rethrow or drop invalid fields — here we drop dob and proceed
const safePayload = { ...createPayload };
delete (safePayload as any).dateOfBirth;
patientData = insertPatientSchema.parse(safePayload);
}
await storage.createPatient(patientData);
// Return the created patient
return await storage.getPatientByInsuranceId(insuranceId);
}
}
/**
* /eligibility-check
* - run selenium
* - if pdf created -> call extractor -> get name
* - create or update patient (by memberId)
* - attach PDF to patient (create pdf group/file)
* - return { patient, pdfFileId, extractedName ... }
*/
router.post(
"/eligibility-check",
async (req: Request, res: Response): Promise<any> => {
if (!req.body.data) {
return res
.status(400)
.json({ error: "Missing Insurance Eligibility data for selenium" });
}
if (!req.user || !req.user.id) {
return res.status(401).json({ error: "Unauthorized: user info missing" });
}
let seleniumResult: any = undefined;
let createdPdfFileId: number | null = null;
let outputResult: any = {};
const extracted: any = {};
try {
// const insuranceEligibilityData = JSON.parse(req.body.data);
// Handle both string and object data
const insuranceEligibilityData = typeof req.body.data === 'string'
? JSON.parse(req.body.data)
: req.body.data;
const credentials = await storage.getInsuranceCredentialByUserAndSiteKey(
req.user.id,
insuranceEligibilityData.insuranceSiteKey
);
if (!credentials) {
return res.status(404).json({
error:
"No insurance credentials found for this provider, Kindly Update this at Settings Page.",
});
}
const enrichedData = {
...insuranceEligibilityData,
massdhpUsername: credentials.username,
massdhpPassword: credentials.password,
};
// 1) Run selenium agent
try {
seleniumResult =
await forwardToSeleniumInsuranceEligibilityAgent(enrichedData);
} catch (seleniumErr: any) {
return res.status(502).json({
error: "Selenium service failed",
detail: seleniumErr?.message ?? String(seleniumErr),
});
}
// 2) Extract data from selenium result (page extraction) and PDF
let extracted: any = {};
// First, try to get data from selenium's page extraction
if (seleniumResult.firstName || seleniumResult.lastName) {
extracted.firstName = seleniumResult.firstName || null;
extracted.lastName = seleniumResult.lastName || null;
console.log('[eligibility-check] Using name from selenium extraction:', {
firstName: extracted.firstName,
lastName: extracted.lastName
});
}
// Also check for combined name field (fallback)
else if (seleniumResult.name) {
const parts = splitName(seleniumResult.name);
extracted.firstName = parts.firstName;
extracted.lastName = parts.lastName;
console.log('[eligibility-check] Using combined name from selenium extraction:', parts);
}
// If no name from selenium, try PDF extraction
if (!extracted.firstName && !extracted.lastName &&
seleniumResult?.pdf_path &&
seleniumResult.pdf_path.endsWith(".pdf")
) {
try {
const pdfPath = seleniumResult.pdf_path;
console.log('[eligibility-check] Extracting data from PDF:', pdfPath);
const pdfBuffer = await fs.readFile(pdfPath);
const extraction = await forwardToPatientDataExtractorService({
buffer: pdfBuffer,
originalname: path.basename(pdfPath),
mimetype: "application/pdf",
} as any);
console.log('[eligibility-check] PDF Extraction result:', extraction);
if (extraction.name) {
const parts = splitName(extraction.name);
extracted.firstName = parts.firstName;
extracted.lastName = parts.lastName;
console.log('[eligibility-check] Split name from PDF:', parts);
} else {
console.warn('[eligibility-check] No name extracted from PDF');
}
} catch (extractErr: any) {
console.error('[eligibility-check] Patient data extraction failed:', extractErr);
// Continue without extracted names - we'll use form names or create patient with empty names
}
}
// Step-3) Create or update patient name using extracted info (prefer extractor -> request)
const insuranceId = String(
insuranceEligibilityData.memberId ?? ""
).trim();
if (!insuranceId) {
return res.status(400).json({ error: "Missing memberId" });
}
// Always prioritize extracted data from MassHealth over form input
// Form input is only used as fallback when extraction fails
const preferFirst = extracted.firstName || null;
const preferLast = extracted.lastName || null;
console.log('[eligibility-check] Name priority:', {
extracted: { firstName: extracted.firstName, lastName: extracted.lastName },
fromForm: { firstName: insuranceEligibilityData.firstName, lastName: insuranceEligibilityData.lastName },
using: { firstName: preferFirst, lastName: preferLast }
});
let patient;
try {
patient = await createOrUpdatePatientByInsuranceId({
insuranceId,
firstName: preferFirst,
lastName: preferLast,
dob: insuranceEligibilityData.dateOfBirth,
userId: req.user.id,
});
console.log('[eligibility-check] Patient after create/update:', patient);
} catch (patientOpErr: any) {
return res.status(500).json({
error: "Failed to create/update patient",
detail: patientOpErr?.message ?? String(patientOpErr),
});
}
// ✅ Step 4: Update patient status based on selenium result
if (patient && patient.id !== undefined) {
// Use eligibility from selenium extraction if available, otherwise default to UNKNOWN
let newStatus = "UNKNOWN";
if (seleniumResult.eligibility === "Y") {
newStatus = "ACTIVE";
} else if (seleniumResult.eligibility === "N") {
newStatus = "INACTIVE";
}
// Prepare updates object
const updates: any = { status: newStatus };
// Update insurance provider if extracted
if (seleniumResult.insurance) {
updates.insuranceProvider = seleniumResult.insurance;
console.log('[eligibility-check] Updating insurance provider:', seleniumResult.insurance);
}
await storage.updatePatient(patient.id, updates);
outputResult.patientUpdateStatus = `Patient status updated to ${newStatus}${seleniumResult.insurance ? ', insurance updated' : ''}`;
console.log('[eligibility-check] Status updated:', {
patientId: patient.id,
newStatus,
eligibility: seleniumResult.eligibility,
insurance: seleniumResult.insurance
});
// ✅ Step 5: Handle PDF Upload
if (
seleniumResult.pdf_path &&
seleniumResult.pdf_path.endsWith(".pdf")
) {
const pdfBuffer = await fs.readFile(seleniumResult.pdf_path);
const groupTitle = "Eligibility Status";
const groupTitleKey = "ELIGIBILITY_STATUS";
let group = await storage.findPdfGroupByPatientTitleKey(
patient.id,
groupTitleKey
);
// Step 5b: Create group if it doesnt exist
if (!group) {
group = await storage.createPdfGroup(
patient.id,
groupTitle,
groupTitleKey
);
}
if (!group?.id) {
throw new Error("PDF group creation failed: missing group ID");
}
const created = await storage.createPdfFile(
group.id,
path.basename(seleniumResult.pdf_path),
pdfBuffer
);
// created could be { id, filename } or just id, adapt to your storage API.
if (created && typeof created === "object" && "id" in created) {
createdPdfFileId = Number(created.id);
}
outputResult.pdfUploadStatus = `PDF saved to group: ${group.title}`;
} else {
outputResult.pdfUploadStatus =
"No valid PDF path provided by Selenium, Couldn't upload pdf to server.";
}
} else {
outputResult.patientUpdateStatus =
"Patient not found or missing ID; no update performed";
}
res.json({
patientUpdateStatus: outputResult.patientUpdateStatus,
pdfUploadStatus: outputResult.pdfUploadStatus,
pdfFileId: createdPdfFileId,
});
} catch (err: any) {
console.error(err);
return res.status(500).json({
error: err.message || "Failed to forward to selenium agent",
});
} finally {
try {
if (seleniumResult && seleniumResult.pdf_path) {
await emptyFolderContainingFile(seleniumResult.pdf_path);
} else {
console.log(`[eligibility-check] no pdf_path available to cleanup`);
}
} catch (cleanupErr) {
console.error(
`[eligibility-check cleanup failed for ${seleniumResult?.pdf_path}`,
cleanupErr
);
}
}
}
);
router.post(
"/claim-status-check",
async (req: Request, res: Response): Promise<any> => {
if (!req.body.data) {
return res
.status(400)
.json({ error: "Missing Insurance Status data for selenium" });
}
if (!req.user || !req.user.id) {
return res.status(401).json({ error: "Unauthorized: user info missing" });
}
let result: any = undefined;
async function imageToPdfBuffer(imagePath: string): Promise<Buffer> {
return new Promise<Buffer>((resolve, reject) => {
try {
const doc = new PDFDocument({ autoFirstPage: false });
const chunks: Uint8Array[] = [];
// collect data chunks
doc.on("data", (chunk: any) => chunks.push(chunk));
doc.on("end", () => resolve(Buffer.concat(chunks)));
doc.on("error", (err: any) => reject(err));
const A4_WIDTH = 595.28; // points
const A4_HEIGHT = 841.89; // points
doc.addPage({ size: [A4_WIDTH, A4_HEIGHT] });
doc.image(imagePath, 0, 0, {
fit: [A4_WIDTH, A4_HEIGHT],
align: "center",
valign: "center",
});
doc.end();
} catch (err) {
reject(err);
}
});
}
try {
const insuranceClaimStatusData = JSON.parse(req.body.data);
const credentials = await storage.getInsuranceCredentialByUserAndSiteKey(
req.user.id,
insuranceClaimStatusData.insuranceSiteKey
);
if (!credentials) {
return res.status(404).json({
error:
"No insurance credentials found for this provider, Kindly Update this at Settings Page.",
});
}
const enrichedData = {
...insuranceClaimStatusData,
massdhpUsername: credentials.username,
massdhpPassword: credentials.password,
};
result = await forwardToSeleniumInsuranceClaimStatusAgent(enrichedData);
let createdPdfFileId: number | null = null;
// ✅ Step 1: Check result
const patient = await storage.getPatientByInsuranceId(
insuranceClaimStatusData.memberId
);
if (patient && patient.id !== undefined) {
let pdfBuffer: Buffer | null = null;
let generatedPdfPath: string | null = null;
if (
result.ss_path &&
(result.ss_path.endsWith(".png") ||
result.ss_path.endsWith(".jpg") ||
result.ss_path.endsWith(".jpeg"))
) {
try {
// Ensure file exists
if (!fsSync.existsSync(result.ss_path)) {
throw new Error(`Screenshot file not found: ${result.ss_path}`);
}
// Convert image to PDF buffer
pdfBuffer = await imageToPdfBuffer(result.ss_path);
// Optionally write generated PDF to temp path (so name is available for createPdfFile)
const pdfFileName = `claimStatus_${insuranceClaimStatusData.memberId}_${Date.now()}.pdf`;
generatedPdfPath = path.join(
path.dirname(result.ss_path),
pdfFileName
);
await fs.writeFile(generatedPdfPath, pdfBuffer);
} catch (err) {
console.error("Failed to convert screenshot to PDF:", err);
result.pdfUploadStatus = `Failed to convert screenshot to PDF: ${String(err)}`;
}
} else {
result.pdfUploadStatus =
"No valid PDF or screenshot path provided by Selenium; nothing to upload.";
}
if (pdfBuffer && generatedPdfPath) {
const groupTitle = "Claim Status";
const groupTitleKey = "CLAIM_STATUS";
let group = await storage.findPdfGroupByPatientTitleKey(
patient.id,
groupTitleKey
);
// Create group if missing
if (!group) {
group = await storage.createPdfGroup(
patient.id,
groupTitle,
groupTitleKey
);
}
if (!group?.id) {
throw new Error("PDF group creation failed: missing group ID");
}
// Use the basename for storage
const basename = path.basename(generatedPdfPath);
const created = await storage.createPdfFile(
group.id,
basename,
pdfBuffer
);
if (created && typeof created === "object" && "id" in created) {
createdPdfFileId = Number(created.id);
}
result.pdfUploadStatus = `PDF saved to group: ${group.title}`;
}
} else {
result.patientUpdateStatus =
"Patient not found or missing ID; no update performed";
}
res.json({
pdfUploadStatus: result.pdfUploadStatus,
pdfFileId: createdPdfFileId,
});
return;
} catch (err: any) {
console.error(err);
return res.status(500).json({
error: err.message || "Failed to forward to selenium agent",
});
} finally {
try {
if (result && result.ss_path) {
await emptyFolderContainingFile(result.ss_path);
} else {
console.log(`claim-status-check] no pdf_path available to cleanup`);
}
} catch (cleanupErr) {
console.error(
`[claim-status-check cleanup failed for ${result?.ss_path}`,
cleanupErr
);
}
}
}
);
router.post(
"/appointments/check-all-eligibilities",
async (req: Request, res: Response): Promise<any> => {
// Query param: date=YYYY-MM-DD (required)
const date = String(req.query.date ?? "").trim();
if (!date) {
return res
.status(400)
.json({ error: "Missing date query param (YYYY-MM-DD)" });
}
if (!req.user || !req.user.id) {
return res.status(401).json({ error: "Unauthorized: user info missing" });
}
// Track any paths that couldn't be cleaned immediately so we can try again at the end
const remainingCleanupPaths = new Set<string>();
try {
// 1) fetch appointments for the day (reuse your storage API)
const dayAppointments = await storage.getAppointmentsByDateForUser(
date,
req.user.id
);
if (!Array.isArray(dayAppointments)) {
return res
.status(500)
.json({ error: "Failed to load appointments for date" });
}
const results: Array<any> = [];
// process sequentially so selenium agent / python semaphore isn't overwhelmed
for (const apt of dayAppointments) {
// For each appointment we keep a per-appointment seleniumResult so we can cleanup its files
let seleniumResult: any = undefined;
const resultItem: any = {
appointmentId: apt.id,
patientId: apt.patientId ?? null,
processed: false,
error: null,
pdfFileId: null,
patientUpdateStatus: null,
warning: null,
};
try {
// fetch patient record (use getPatient or getPatientById depending on your storage)
const patient = apt.patientId
? await storage.getPatient(apt.patientId)
: null;
const memberId = (patient?.insuranceId ?? "").toString().trim();
// create a readable patient label for error messages
const patientLabel = patient
? `${patient.firstName ?? ""} ${patient.lastName ?? ""}`.trim() ||
`patient#${patient.id}`
: `patient#${apt.patientId ?? "unknown"}`;
const aptLabel = `appointment#${apt.id}${apt.date ? ` (${apt.date}${apt.startTime ? ` ${apt.startTime}` : ""})` : ""}`;
if (!memberId) {
resultItem.error = `Missing insuranceId for ${patientLabel} — skipping ${aptLabel}`;
results.push(resultItem);
continue;
}
// prepare eligibility data; prefer patient DOB + name if present
const dob = patient?.dateOfBirth;
if (!dob) {
resultItem.error = `Missing dob for ${patientLabel} — skipping ${aptLabel}`;
results.push(resultItem);
continue;
}
// Convert Date object → YYYY-MM-DD string - req for selenium agent.
const dobStr = formatDobForAgent(dob);
if (!dobStr) {
resultItem.error = `Invalid or missing DOB for ${patientLabel} — skipping ${aptLabel}`;
results.push(resultItem);
continue;
}
const payload = {
memberId,
dateOfBirth: dobStr,
insuranceSiteKey: "MH",
};
// Get credentials for this user+site
const credentials =
await storage.getInsuranceCredentialByUserAndSiteKey(
req.user.id,
payload.insuranceSiteKey
);
if (!credentials) {
resultItem.error = `No insurance credentials found for siteKey — skipping ${aptLabel} for ${patientLabel}`;
results.push(resultItem);
continue;
}
// enrich payload
const enriched = {
...payload,
massdhpUsername: credentials.username,
massdhpPassword: credentials.password,
};
// forward to selenium agent (sequential)
try {
seleniumResult =
await forwardToSeleniumInsuranceEligibilityAgent(enriched);
} catch (seleniumErr: any) {
resultItem.error = `Selenium agent failed for ${patientLabel} (${aptLabel}): ${seleniumErr?.message ?? String(seleniumErr)}`;
results.push(resultItem);
continue;
}
// Attempt extraction (if pdf_path present)
const extracted: any = {};
if (
seleniumResult?.pdf_path &&
seleniumResult.pdf_path.endsWith(".pdf")
) {
try {
const pdfPath = seleniumResult.pdf_path;
const pdfBuffer = await fs.readFile(pdfPath);
const extraction = await forwardToPatientDataExtractorService({
buffer: pdfBuffer,
originalname: path.basename(pdfPath),
mimetype: "application/pdf",
} as any);
if (extraction.name) {
const parts = splitName(extraction.name);
extracted.firstName = parts.firstName;
extracted.lastName = parts.lastName;
}
} catch (extractErr: any) {
resultItem.warning = `Extraction failed: ${extractErr?.message ?? String(extractErr)}`;
}
}
// create or update patient by insuranceId — prefer extracted name
const preferFirst = extracted.firstName ?? null;
const preferLast = extracted.lastName ?? null;
try {
await createOrUpdatePatientByInsuranceId({
insuranceId: memberId,
firstName: preferFirst,
lastName: preferLast,
dob: payload.dateOfBirth,
userId: req.user.id,
});
} catch (patientOpErr: any) {
resultItem.error = `Failed to create/update patient ${patientLabel} for ${aptLabel}: ${patientOpErr?.message ?? String(patientOpErr)}`;
results.push(resultItem);
continue;
}
// fetch patient again
const updatedPatient =
await storage.getPatientByInsuranceId(memberId);
if (!updatedPatient || !updatedPatient.id) {
resultItem.error = `Patient not found after create/update for ${patientLabel} (${aptLabel})`;
results.push(resultItem);
continue;
}
// Update patient status based on seleniumResult.eligibility
const newStatus =
seleniumResult?.eligibility === "Y" ? "ACTIVE" : "INACTIVE";
// 1. updating patient
await storage.updatePatient(updatedPatient.id, { status: newStatus });
resultItem.patientUpdateStatus = `Patient status updated to ${newStatus}`;
// 2. updating appointment status - for aptmnt page
try {
await storage.updateAppointment(Number(apt.id), {
eligibilityStatus: newStatus,
});
resultItem.appointmentUpdateStatus = `Appointment eligibility set to ${newStatus}`;
} catch (apptUpdateErr: any) {
resultItem.warning =
(resultItem.warning ? resultItem.warning + " | " : "") +
`Failed to update appointment eligibility: ${apptUpdateErr?.message ?? String(apptUpdateErr)}`;
}
// If PDF exists, upload to PdfGroup (ELIGIBILITY_STATUS)
if (
seleniumResult?.pdf_path &&
seleniumResult.pdf_path.endsWith(".pdf")
) {
try {
const pdfBuf = await fs.readFile(seleniumResult.pdf_path);
const groupTitle = "Eligibility Status";
const groupTitleKey = "ELIGIBILITY_STATUS";
let group = await storage.findPdfGroupByPatientTitleKey(
updatedPatient.id,
groupTitleKey
);
if (!group) {
group = await storage.createPdfGroup(
updatedPatient.id,
groupTitle,
groupTitleKey
);
}
if (!group?.id)
throw new Error("Failed to create/find pdf group");
const created = await storage.createPdfFile(
group.id,
path.basename(seleniumResult.pdf_path),
pdfBuf
);
if (created && typeof created === "object" && "id" in created) {
resultItem.pdfFileId = Number(created.id);
} else if (typeof created === "number") {
resultItem.pdfFileId = created;
} else if (created && (created as any).id) {
resultItem.pdfFileId = (created as any).id;
}
resultItem.processed = true;
} catch (pdfErr: any) {
resultItem.warning = `PDF upload failed for ${patientLabel} (${aptLabel}): ${pdfErr?.message ?? String(pdfErr)}`;
}
} else {
// no pdf; still mark processed true (status updated)
resultItem.processed = true;
resultItem.pdfFileId = null;
}
results.push(resultItem);
} catch (err: any) {
resultItem.error = `Unexpected error for appointment#${apt.id}: ${err?.message ?? String(err)}`;
results.push(resultItem);
console.error(
"[batch eligibility] unexpected error for appointment",
apt.id,
err
);
} finally {
// Per-appointment cleanup: always try to remove selenium temp files for this appointment
try {
if (
seleniumResult &&
(seleniumResult.pdf_path || seleniumResult.ss_path)
) {
// prefer pdf_path, fallback to ss_path
const candidatePath =
seleniumResult.pdf_path ?? seleniumResult.ss_path;
try {
await emptyFolderContainingFile(candidatePath);
} catch (cleanupErr: any) {
console.warn(
`[batch cleanup] failed to clean ${candidatePath} for appointment ${apt.id}`,
cleanupErr
);
// remember path for final cleanup attempt
remainingCleanupPaths.add(candidatePath);
}
}
} catch (cleanupOuterErr: any) {
console.warn(
"[batch cleanup] unexpected error during per-appointment cleanup",
cleanupOuterErr
);
// don't throw — we want to continue processing next appointments
}
} // end try/catch/finally per appointment
} // end for appointments
// return summary
return res.json({ date, count: results.length, results });
} catch (err: any) {
console.error("[check-all-eligibilities] error", err);
return res
.status(500)
.json({ error: err?.message ?? "Internal server error" });
} finally {
// Final cleanup attempt for any remaining paths we couldn't delete earlier
try {
if (remainingCleanupPaths.size > 0) {
for (const p of remainingCleanupPaths) {
try {
await emptyFolderContainingFile(p);
} catch (finalCleanupErr: any) {
console.error(`[final cleanup] failed for ${p}`, finalCleanupErr);
}
}
}
} catch (outerFinalErr: any) {
console.error(
"[check-all-eligibilities final cleanup] unexpected error",
outerFinalErr
);
}
}
}
);
export default router;

View File

@@ -0,0 +1,699 @@
import { Router, Request, Response } from "express";
import { storage } from "../storage";
import {
forwardToSeleniumDdmaEligibilityAgent,
forwardOtpToSeleniumDdmaAgent,
getSeleniumDdmaSessionStatus,
} from "../services/seleniumDdmaInsuranceEligibilityClient";
import fs from "fs/promises";
import fsSync from "fs";
import path from "path";
import PDFDocument from "pdfkit";
import { emptyFolderContainingFile } from "../utils/emptyTempFolder";
import {
InsertPatient,
insertPatientSchema,
} from "../../../../packages/db/types/patient-types";
import { io } from "../socket";
const router = Router();
/** Job context stored in memory by sessionId */
interface DdmaJobContext {
userId: number;
insuranceEligibilityData: any; // parsed, enriched (includes username/password)
socketId?: string;
}
const ddmaJobs: Record<string, DdmaJobContext> = {};
/** Utility: naive name splitter */
function splitName(fullName?: string | null) {
if (!fullName) return { firstName: "", lastName: "" };
const parts = fullName.trim().split(/\s+/).filter(Boolean);
const firstName = parts.shift() ?? "";
const lastName = parts.join(" ") ?? "";
return { firstName, lastName };
}
async function imageToPdfBuffer(imagePath: string): Promise<Buffer> {
return new Promise<Buffer>((resolve, reject) => {
try {
const doc = new PDFDocument({ autoFirstPage: false });
const chunks: Uint8Array[] = [];
doc.on("data", (chunk: any) => chunks.push(chunk));
doc.on("end", () => resolve(Buffer.concat(chunks)));
doc.on("error", (err: any) => reject(err));
const A4_WIDTH = 595.28; // points
const A4_HEIGHT = 841.89; // points
doc.addPage({ size: [A4_WIDTH, A4_HEIGHT] });
doc.image(imagePath, 0, 0, {
fit: [A4_WIDTH, A4_HEIGHT],
align: "center",
valign: "center",
});
doc.end();
} catch (err) {
reject(err);
}
});
}
/**
* Ensure patient exists for given insuranceId.
*/
async function createOrUpdatePatientByInsuranceId(options: {
insuranceId: string;
firstName?: string | null;
lastName?: string | null;
dob?: string | Date | null;
userId: number;
}) {
const { insuranceId, firstName, lastName, dob, userId } = options;
if (!insuranceId) throw new Error("Missing insuranceId");
const incomingFirst = (firstName || "").trim();
const incomingLast = (lastName || "").trim();
let patient = await storage.getPatientByInsuranceId(insuranceId);
if (patient && patient.id) {
const updates: any = {};
if (
incomingFirst &&
String(patient.firstName ?? "").trim() !== incomingFirst
) {
updates.firstName = incomingFirst;
}
if (
incomingLast &&
String(patient.lastName ?? "").trim() !== incomingLast
) {
updates.lastName = incomingLast;
}
if (Object.keys(updates).length > 0) {
await storage.updatePatient(patient.id, updates);
}
return;
} else {
const createPayload: any = {
firstName: incomingFirst,
lastName: incomingLast,
dateOfBirth: dob,
gender: "",
phone: "",
userId,
insuranceId,
};
let patientData: InsertPatient;
try {
patientData = insertPatientSchema.parse(createPayload);
} catch (err) {
const safePayload = { ...createPayload };
delete (safePayload as any).dateOfBirth;
patientData = insertPatientSchema.parse(safePayload);
}
await storage.createPatient(patientData);
}
}
/**
* When Selenium finishes for a given sessionId, run your patient + PDF pipeline,
* and return the final API response shape.
*/
async function handleDdmaCompletedJob(
sessionId: string,
job: DdmaJobContext,
seleniumResult: any
) {
let createdPdfFileId: number | null = null;
const outputResult: any = {};
// We'll wrap the processing in try/catch/finally so cleanup always runs
try {
// 1) ensuring memberid.
const insuranceEligibilityData = job.insuranceEligibilityData;
const insuranceId = String(insuranceEligibilityData.memberId ?? "").trim();
if (!insuranceId) {
throw new Error("Missing memberId for ddma job");
}
// 2) Create or update patient (with name from selenium result if available)
const patientNameFromResult =
typeof seleniumResult?.patientName === "string"
? seleniumResult.patientName.trim()
: null;
const { firstName, lastName } = splitName(patientNameFromResult);
await createOrUpdatePatientByInsuranceId({
insuranceId,
firstName,
lastName,
dob: insuranceEligibilityData.dateOfBirth,
userId: job.userId,
});
// 3) Update patient status + PDF upload
const patient = await storage.getPatientByInsuranceId(
insuranceEligibilityData.memberId
);
if (!patient?.id) {
outputResult.patientUpdateStatus =
"Patient not found; no update performed";
return {
patientUpdateStatus: outputResult.patientUpdateStatus,
pdfUploadStatus: "none",
pdfFileId: null,
};
}
// update patient status.
const newStatus =
seleniumResult.eligibility === "active" ? "ACTIVE" : "INACTIVE";
await storage.updatePatient(patient.id, { status: newStatus });
outputResult.patientUpdateStatus = `Patient status updated to ${newStatus}`;
// convert screenshot -> pdf if available
let pdfBuffer: Buffer | null = null;
let generatedPdfPath: string | null = null;
if (
seleniumResult &&
seleniumResult.ss_path &&
typeof seleniumResult.ss_path === "string" &&
(seleniumResult.ss_path.endsWith(".png") ||
seleniumResult.ss_path.endsWith(".jpg") ||
seleniumResult.ss_path.endsWith(".jpeg"))
) {
try {
if (!fsSync.existsSync(seleniumResult.ss_path)) {
throw new Error(
`Screenshot file not found: ${seleniumResult.ss_path}`
);
}
pdfBuffer = await imageToPdfBuffer(seleniumResult.ss_path);
const pdfFileName = `ddma_eligibility_${insuranceEligibilityData.memberId}_${Date.now()}.pdf`;
generatedPdfPath = path.join(
path.dirname(seleniumResult.ss_path),
pdfFileName
);
await fs.writeFile(generatedPdfPath, pdfBuffer);
// ensure cleanup uses this
seleniumResult.pdf_path = generatedPdfPath;
} catch (err: any) {
console.error("Failed to convert screenshot to PDF:", err);
outputResult.pdfUploadStatus = `Failed to convert screenshot to PDF: ${String(err)}`;
}
} else {
outputResult.pdfUploadStatus =
"No valid screenshot (ss_path) provided by Selenium; nothing to upload.";
}
if (pdfBuffer && generatedPdfPath) {
const groupTitle = "Eligibility Status";
const groupTitleKey = "ELIGIBILITY_STATUS";
let group = await storage.findPdfGroupByPatientTitleKey(
patient.id,
groupTitleKey
);
if (!group) {
group = await storage.createPdfGroup(
patient.id,
groupTitle,
groupTitleKey
);
}
if (!group?.id) {
throw new Error("PDF group creation failed: missing group ID");
}
const created = await storage.createPdfFile(
group.id,
path.basename(generatedPdfPath),
pdfBuffer
);
if (created && typeof created === "object" && "id" in created) {
createdPdfFileId = Number(created.id);
}
outputResult.pdfUploadStatus = `PDF saved to group: ${group.title}`;
} else {
outputResult.pdfUploadStatus =
"No valid PDF path provided by Selenium, Couldn't upload pdf to server.";
}
return {
patientUpdateStatus: outputResult.patientUpdateStatus,
pdfUploadStatus: outputResult.pdfUploadStatus,
pdfFileId: createdPdfFileId,
};
} catch (err: any) {
return {
patientUpdateStatus: outputResult.patientUpdateStatus,
pdfUploadStatus:
outputResult.pdfUploadStatus ??
`Failed to process DDMA job: ${err?.message ?? String(err)}`,
pdfFileId: createdPdfFileId,
error: err?.message ?? String(err),
};
} finally {
// ALWAYS attempt cleanup of temp files
try {
if (seleniumResult && seleniumResult.pdf_path) {
await emptyFolderContainingFile(seleniumResult.pdf_path);
} else if (seleniumResult && seleniumResult.ss_path) {
await emptyFolderContainingFile(seleniumResult.ss_path);
} else {
console.log(
`[ddma-eligibility] no pdf_path or ss_path available to cleanup`
);
}
} catch (cleanupErr) {
console.error(
`[ddma-eligibility cleanup failed for ${seleniumResult?.pdf_path ?? seleniumResult?.ss_path}]`,
cleanupErr
);
}
}
}
// --- top of file, alongside ddmaJobs ---
let currentFinalSessionId: string | null = null;
let currentFinalResult: any = null;
function now() {
return new Date().toISOString();
}
function log(tag: string, msg: string, ctx?: any) {
console.log(`${now()} [${tag}] ${msg}`, ctx ?? "");
}
function emitSafe(socketId: string | undefined, event: string, payload: any) {
if (!socketId) {
log("socket", "no socketId for emit", { event });
return;
}
try {
const socket = io?.sockets.sockets.get(socketId);
if (!socket) {
log("socket", "socket not found (maybe disconnected)", {
socketId,
event,
});
return;
}
socket.emit(event, payload);
log("socket", "emitted", { socketId, event });
} catch (err: any) {
log("socket", "emit failed", { socketId, event, err: err?.message });
}
}
/**
* Polls Python agent for session status and emits socket events:
* - 'selenium:otp_required' when waiting_for_otp
* - 'selenium:session_update' when completed/error
* - rabsolute timeout + transient error handling.
* - pollTimeoutMs default = 2 minutes (adjust where invoked)
*/
async function pollAgentSessionAndProcess(
sessionId: string,
socketId?: string,
pollTimeoutMs = 2 * 60 * 1000
) {
const maxAttempts = 300;
const baseDelayMs = 1000;
const maxTransientErrors = 12;
// NEW: give up if same non-terminal status repeats this many times
const noProgressLimit = 100;
const job = ddmaJobs[sessionId];
let transientErrorCount = 0;
let consecutiveNoProgress = 0;
let lastStatus: string | null = null;
const deadline = Date.now() + pollTimeoutMs;
for (let attempt = 0; attempt < maxAttempts; attempt++) {
// absolute deadline check
if (Date.now() > deadline) {
emitSafe(socketId, "selenium:session_update", {
session_id: sessionId,
status: "error",
message: `Polling timeout reached (${Math.round(pollTimeoutMs / 1000)}s).`,
});
delete ddmaJobs[sessionId];
return;
}
log(
"poller",
`attempt=${attempt} session=${sessionId} transientErrCount=${transientErrorCount}`
);
try {
const st = await getSeleniumDdmaSessionStatus(sessionId);
const status = st?.status ?? null;
log("poller", "got status", {
sessionId,
status,
message: st?.message,
resultKeys: st?.result ? Object.keys(st.result) : null,
});
// reset transient errors on success
transientErrorCount = 0;
// if status unchanged and non-terminal, increment no-progress counter
const isTerminalLike =
status === "completed" || status === "error" || status === "not_found";
if (status === lastStatus && !isTerminalLike) {
consecutiveNoProgress++;
} else {
consecutiveNoProgress = 0;
}
lastStatus = status;
// if no progress for too many consecutive polls -> abort
if (consecutiveNoProgress >= noProgressLimit) {
emitSafe(socketId, "selenium:session_update", {
session_id: sessionId,
status: "error",
message: `No progress from selenium agent (status="${status}") after ${consecutiveNoProgress} polls; aborting.`,
});
emitSafe(socketId, "selenium:session_error", {
session_id: sessionId,
status: "error",
message: "No progress from selenium agent",
});
delete ddmaJobs[sessionId];
return;
}
// always emit debug to client if socket exists
emitSafe(socketId, "selenium:debug", {
session_id: sessionId,
attempt,
status,
serverTime: new Date().toISOString(),
});
// If agent is waiting for OTP, inform client but keep polling (do not return)
if (status === "waiting_for_otp") {
emitSafe(socketId, "selenium:otp_required", {
session_id: sessionId,
message: "OTP required. Please enter the OTP.",
});
// do not return — keep polling (allows same poller to pick up completion)
await new Promise((r) => setTimeout(r, baseDelayMs));
continue;
}
// Completed path
if (status === "completed") {
log("poller", "agent completed; processing result", {
sessionId,
resultKeys: st.result ? Object.keys(st.result) : null,
});
// Persist raw result so frontend can fetch if socket disconnects
currentFinalSessionId = sessionId;
currentFinalResult = {
rawSelenium: st.result,
processedAt: null,
final: null,
};
let finalResult: any = null;
if (job && st.result) {
try {
finalResult = await handleDdmaCompletedJob(
sessionId,
job,
st.result
);
currentFinalResult.final = finalResult;
currentFinalResult.processedAt = Date.now();
} catch (err: any) {
currentFinalResult.final = {
error: "processing_failed",
detail: err?.message ?? String(err),
};
currentFinalResult.processedAt = Date.now();
log("poller", "handleDdmaCompletedJob failed", {
sessionId,
err: err?.message ?? err,
});
}
} else {
currentFinalResult[sessionId].final = {
error: "no_job_or_no_result",
};
currentFinalResult[sessionId].processedAt = Date.now();
}
// Emit final update (if socket present)
emitSafe(socketId, "selenium:session_update", {
session_id: sessionId,
status: "completed",
rawSelenium: st.result,
final: currentFinalResult.final,
});
// cleanup job context
delete ddmaJobs[sessionId];
return;
}
// Terminal error / not_found
if (status === "error" || status === "not_found") {
const emitPayload = {
session_id: sessionId,
status,
message: st?.message || "Selenium session error",
};
emitSafe(socketId, "selenium:session_update", emitPayload);
emitSafe(socketId, "selenium:session_error", emitPayload);
delete ddmaJobs[sessionId];
return;
}
} catch (err: any) {
const axiosStatus =
err?.response?.status ?? (err?.status ? Number(err.status) : undefined);
const errCode = err?.code ?? err?.errno;
const errMsg = err?.message ?? String(err);
const errData = err?.response?.data ?? null;
// If agent explicitly returned 404 -> terminal (session gone)
if (
axiosStatus === 404 ||
(typeof errMsg === "string" && errMsg.includes("not_found"))
) {
console.warn(
`${new Date().toISOString()} [poller] terminal 404/not_found for ${sessionId}: data=${JSON.stringify(errData)}`
);
// Emit not_found to client
const emitPayload = {
session_id: sessionId,
status: "not_found",
message:
errData?.detail || "Selenium session not found (agent cleaned up).",
};
emitSafe(socketId, "selenium:session_update", emitPayload);
emitSafe(socketId, "selenium:session_error", emitPayload);
// Remove job context and stop polling
delete ddmaJobs[sessionId];
return;
}
// Detailed transient error logging
transientErrorCount++;
if (transientErrorCount > maxTransientErrors) {
const emitPayload = {
session_id: sessionId,
status: "error",
message:
"Repeated network errors while polling selenium agent; giving up.",
};
emitSafe(socketId, "selenium:session_update", emitPayload);
emitSafe(socketId, "selenium:session_error", emitPayload);
delete ddmaJobs[sessionId];
return;
}
const backoffMs = Math.min(
30_000,
baseDelayMs * Math.pow(2, transientErrorCount - 1)
);
console.warn(
`${new Date().toISOString()} [poller] transient error (#${transientErrorCount}) for ${sessionId}: code=${errCode} status=${axiosStatus} msg=${errMsg} data=${JSON.stringify(errData)}`
);
console.warn(
`${new Date().toISOString()} [poller] backing off ${backoffMs}ms before next attempt`
);
await new Promise((r) => setTimeout(r, backoffMs));
continue;
}
// normal poll interval
await new Promise((r) => setTimeout(r, baseDelayMs));
}
// overall timeout fallback
emitSafe(socketId, "selenium:session_update", {
session_id: sessionId,
status: "error",
message: "Polling timeout while waiting for selenium session",
});
delete ddmaJobs[sessionId];
}
/**
* POST /ddma-eligibility
* Starts DDMA eligibility Selenium job.
* Expects:
* - req.body.data: stringified JSON like your existing /eligibility-check
* - req.body.socketId: socket.io client id
*/
router.post(
"/ddma-eligibility",
async (req: Request, res: Response): Promise<any> => {
if (!req.body.data) {
return res
.status(400)
.json({ error: "Missing Insurance Eligibility data for selenium" });
}
if (!req.user || !req.user.id) {
return res.status(401).json({ error: "Unauthorized: user info missing" });
}
try {
const rawData =
typeof req.body.data === "string"
? JSON.parse(req.body.data)
: req.body.data;
const credentials = await storage.getInsuranceCredentialByUserAndSiteKey(
req.user.id,
rawData.insuranceSiteKey
);
if (!credentials) {
return res.status(404).json({
error:
"No insurance credentials found for this provider, Kindly Update this at Settings Page.",
});
}
const enrichedData = {
...rawData,
massddmaUsername: credentials.username,
massddmaPassword: credentials.password,
};
const socketId: string | undefined = req.body.socketId;
const agentResp =
await forwardToSeleniumDdmaEligibilityAgent(enrichedData);
if (
!agentResp ||
agentResp.status !== "started" ||
!agentResp.session_id
) {
return res.status(502).json({
error: "Selenium agent did not return a started session",
detail: agentResp,
});
}
const sessionId = agentResp.session_id as string;
// Save job context
ddmaJobs[sessionId] = {
userId: req.user.id,
insuranceEligibilityData: enrichedData,
socketId,
};
// start polling in background to notify client via socket and process job
pollAgentSessionAndProcess(sessionId, socketId).catch((e) =>
console.warn("pollAgentSessionAndProcess failed", e)
);
// reply immediately with started status
return res.json({ status: "started", session_id: sessionId });
} catch (err: any) {
console.error(err);
return res.status(500).json({
error: err.message || "Failed to start ddma selenium agent",
});
}
}
);
/**
* POST /selenium/submit-otp
* Body: { session_id, otp, socketId? }
* Forwards OTP to Python agent and optionally notifies client socket.
*/
router.post(
"/selenium/submit-otp",
async (req: Request, res: Response): Promise<any> => {
const { session_id: sessionId, otp, socketId } = req.body;
if (!sessionId || !otp) {
return res.status(400).json({ error: "session_id and otp are required" });
}
try {
const r = await forwardOtpToSeleniumDdmaAgent(sessionId, otp);
// emit OTP accepted (if socket present)
emitSafe(socketId, "selenium:otp_submitted", {
session_id: sessionId,
result: r,
});
return res.json(r);
} catch (err: any) {
console.error(
"Failed to forward OTP:",
err?.response?.data || err?.message || err
);
return res.status(500).json({
error: "Failed to forward otp to selenium agent",
detail: err?.message || err,
});
}
}
);
// GET /selenium/session/:sid/final
router.get(
"/selenium/session/:sid/final",
async (req: Request, res: Response) => {
const sid = req.params.sid;
if (!sid) return res.status(400).json({ error: "session id required" });
// Only the current in-memory result is available
if (currentFinalSessionId !== sid || !currentFinalResult) {
return res.status(404).json({ error: "final result not found" });
}
return res.json(currentFinalResult);
}
);
export default router;

View File

@@ -0,0 +1,69 @@
import { Router, Request, Response } from "express";
import { storage } from "../storage";
const router = Router();
router.get("/", async (req: Request, res: Response): Promise<any> => {
try {
const userId = (req as any).user?.id;
if (!userId) return res.status(401).json({ message: "Unauthorized" });
const notifications = await storage.getNotifications(userId, 20, 0);
res.json(notifications);
} catch (err) {
console.error("Failed to fetch notifications:", err);
res.status(500).json({ message: "Failed to fetch notifications" });
}
});
// Mark one notification as read
router.post("/:id/read", async (req: Request, res: Response): Promise<any> => {
try {
const userId = (req as any).user?.id;
if (!userId) return res.status(401).json({ message: "Unauthorized" });
const success = await storage.markNotificationRead(
userId,
Number(req.params.id)
);
if (!success)
return res.status(404).json({ message: "Notification not found" });
res.json({ success: true });
} catch (err) {
console.error("Failed to mark notification as read:", err);
res.status(500).json({ message: "Failed to mark notification as read" });
}
});
// Mark all notifications as read
router.post("/read-all", async (req: Request, res: Response): Promise<any> => {
try {
const userId = (req as any).user?.id;
if (!userId) return res.status(401).json({ message: "Unauthorized" });
const count = await storage.markAllNotificationsRead(userId);
res.json({ success: true, updatedCount: count });
} catch (err) {
console.error("Failed to mark all notifications read:", err);
res.status(500).json({ message: "Failed to mark all notifications read" });
}
});
router.delete(
"/delete-all",
async (req: Request, res: Response): Promise<any> => {
try {
const userId = (req as any).user?.id;
if (!userId) return res.status(401).json({ message: "Unauthorized" });
const deletedCount = await storage.deleteAllNotifications(userId);
res.json({ success: true, deletedCount });
} catch (err) {
console.error("Failed to delete notifications:", err);
res.status(500).json({ message: "Failed to delete notifications" });
}
}
);
export default router;

View File

@@ -0,0 +1,99 @@
import express, { Request, Response } from "express";
import { z } from "zod";
import { storage } from "../storage";
import { insertNpiProviderSchema } from "@repo/db/types";
const router = express.Router();
router.get("/", async (req: Request, res: Response) => {
try {
if (!req.user?.id) {
return res.status(401).json({ message: "Unauthorized" });
}
const providers = await storage.getNpiProvidersByUser(req.user.id);
res.status(200).json(providers);
} catch (err) {
res.status(500).json({
error: "Failed to fetch NPI providers",
details: String(err),
});
}
});
router.post("/", async (req: Request, res: Response) => {
try {
if (!req.user?.id) {
return res.status(401).json({ message: "Unauthorized" });
}
const parsed = insertNpiProviderSchema.safeParse({
...req.body,
userId: req.user.id,
});
if (!parsed.success) {
const flat = parsed.error.flatten();
const firstError =
Object.values(flat.fieldErrors)[0]?.[0] || "Invalid input";
return res.status(400).json({
message: firstError,
details: flat.fieldErrors,
});
}
const provider = await storage.createNpiProvider(parsed.data);
res.status(201).json(provider);
} catch (err: any) {
if (err.code === "P2002") {
return res.status(400).json({
message: "This NPI already exists for the user",
});
}
res.status(500).json({
error: "Failed to create NPI provider",
details: String(err),
});
}
});
router.put("/:id", async (req: Request, res: Response) => {
try {
const id = Number(req.params.id);
if (isNaN(id)) return res.status(400).send("Invalid ID");
const provider = await storage.updateNpiProvider(id, req.body);
res.status(200).json(provider);
} catch (err) {
res.status(500).json({
error: "Failed to update NPI provider",
details: String(err),
});
}
});
router.delete("/:id", async (req: Request, res: Response) => {
try {
if (!req.user?.id) {
return res.status(401).json({ message: "Unauthorized" });
}
const id = Number(req.params.id);
if (isNaN(id)) return res.status(400).send("Invalid ID");
const ok = await storage.deleteNpiProvider(req.user.id, id);
if (!ok) {
return res.status(404).json({ message: "NPI provider not found" });
}
res.status(204).send();
} catch (err) {
res.status(500).json({
error: "Failed to delete NPI provider",
details: String(err),
});
}
});
export default router;

View File

@@ -0,0 +1,258 @@
import { Router } from "express";
import { Request, Response } from "express";
import { storage } from "../storage";
import multer from "multer";
import { z } from "zod";
const router = Router();
// Configure multer for file uploads
const upload = multer({
storage: multer.memoryStorage(),
limits: {
fileSize: 10 * 1024 * 1024, // 10MB limit
},
fileFilter: (req, file, cb) => {
// Accept common document and image formats
const allowedTypes = [
'application/pdf',
'image/jpeg',
'image/jpg',
'image/png',
'image/gif',
'application/msword',
'application/vnd.openxmlformats-officedocument.wordprocessingml.document',
'text/plain',
];
if (allowedTypes.includes(file.mimetype)) {
cb(null, true);
} else {
cb(new Error('Invalid file type. Only PDF, images, and documents are allowed.'));
}
}
});
// Validation schemas
const uploadDocumentSchema = z.object({
patientId: z.string().transform((val) => parseInt(val, 10)),
});
const getDocumentsSchema = z.object({
patientId: z.string().transform((val) => parseInt(val, 10)),
limit: z.string().optional().transform((val) => val ? parseInt(val, 10) : undefined),
offset: z.string().optional().transform((val) => val ? parseInt(val, 10) : undefined),
});
const deleteDocumentSchema = z.object({
id: z.string().transform((val) => parseInt(val, 10)),
});
// POST /api/patient-documents/upload
// Upload a document for a specific patient
router.post("/upload", upload.single("file"), async (req: Request, res: Response): Promise<any> => {
try {
const { patientId } = uploadDocumentSchema.parse(req.body);
const file = req.file;
if (!file) {
return res.status(400).json({ error: "No file uploaded" });
}
const document = await storage.createPatientDocument(
patientId,
file.originalname,
file.originalname,
file.mimetype,
file.size,
file.buffer
);
res.status(201).json({
success: true,
document: {
...document,
fileSize: Number(document.fileSize), // Convert BigInt to Number for JSON serialization
}
});
} catch (error) {
console.error("Error uploading document:", error);
if (error instanceof z.ZodError) {
return res.status(400).json({ error: "Invalid request data", details: error.errors });
}
if (error instanceof Error && error.message.includes('Invalid file type')) {
return res.status(400).json({ error: error.message });
}
res.status(500).json({ error: "Internal server error" });
}
});
// GET /api/patient-documents/patient/:patientId
// Get all documents for a specific patient
router.get("/patient/:patientId", async (req: Request, res: Response): Promise<any> => {
try {
const { patientId, limit, offset } = getDocumentsSchema.parse({
patientId: req.params.patientId,
limit: req.query.limit,
offset: req.query.offset,
});
if (limit !== undefined && offset !== undefined) {
// Paginated response
const result = await storage.getDocumentsByPatientIdPaginated(patientId, limit, offset);
res.json({
success: true,
documents: result.documents.map(doc => ({
...doc,
fileSize: Number(doc.fileSize), // Convert BigInt to Number
})),
total: result.total,
});
} else {
// Non-paginated response
const documents = await storage.getDocumentsByPatientId(patientId);
res.json({
success: true,
documents: documents.map(doc => ({
...doc,
fileSize: Number(doc.fileSize), // Convert BigInt to Number
})),
});
}
} catch (error) {
console.error("Error fetching documents:", error);
if (error instanceof z.ZodError) {
return res.status(400).json({ error: "Invalid patient ID", details: error.errors });
}
res.status(500).json({ error: "Internal server error" });
}
});
// GET /api/patient-documents/:id/download
// Download a specific document
router.get("/:id/download", async (req: Request, res: Response): Promise<any> => {
try {
const { id } = deleteDocumentSchema.parse({ id: req.params.id });
const result = await storage.getDocumentFile(id);
if (!result) {
return res.status(404).json({ error: "Document not found" });
}
const { buffer, document } = result;
// Set appropriate headers
res.setHeader("Content-Type", document.mimeType);
res.setHeader("Content-Length", document.fileSize.toString());
res.setHeader(
"Content-Disposition",
`attachment; filename="${encodeURIComponent(document.originalName)}"`
);
res.send(buffer);
} catch (error) {
console.error("Error downloading document:", error);
if (error instanceof z.ZodError) {
return res.status(400).json({ error: "Invalid document ID", details: error.errors });
}
res.status(500).json({ error: "Internal server error" });
}
});
// GET /api/patient-documents/:id/view
// View a specific document (inline display)
router.get("/:id/view", async (req: Request, res: Response): Promise<any> => {
try {
const { id } = deleteDocumentSchema.parse({ id: req.params.id });
const result = await storage.getDocumentFile(id);
if (!result) {
return res.status(404).json({ error: "Document not found" });
}
const { buffer, document } = result;
// Set appropriate headers for inline viewing
res.setHeader("Content-Type", document.mimeType);
res.setHeader("Content-Length", document.fileSize.toString());
res.setHeader(
"Content-Disposition",
`inline; filename="${encodeURIComponent(document.originalName)}"`
);
res.send(buffer);
} catch (error) {
console.error("Error viewing document:", error);
if (error instanceof z.ZodError) {
return res.status(400).json({ error: "Invalid document ID", details: error.errors });
}
res.status(500).json({ error: "Internal server error" });
}
});
// DELETE /api/patient-documents/:id
// Delete a specific document
router.delete("/:id", async (req: Request, res: Response): Promise<any> => {
try {
const { id } = deleteDocumentSchema.parse({ id: req.params.id });
const success = await storage.deleteDocument(id);
if (!success) {
return res.status(404).json({ error: "Document not found" });
}
res.json({ success: true, message: "Document deleted successfully" });
} catch (error) {
console.error("Error deleting document:", error);
if (error instanceof z.ZodError) {
return res.status(400).json({ error: "Invalid document ID", details: error.errors });
}
res.status(500).json({ error: "Internal server error" });
}
});
// POST /api/patient-documents/scan
// Simulate document scanning (placeholder for actual scanner integration)
router.post("/scan", async (req: Request, res: Response): Promise<any> => {
try {
const { patientId } = uploadDocumentSchema.parse(req.body);
// This is a placeholder for actual scanner integration
// In a real implementation, you would:
// 1. Interface with scanner hardware/software
// 2. Capture the scanned image
// 3. Process and save the image
// 4. Return the document info
res.json({
success: true,
message: "Scanner interface ready. Please integrate with your scanner hardware/software.",
patientId,
note: "This endpoint requires integration with scanner hardware/software SDK."
});
} catch (error) {
console.error("Error scanning document:", error);
if (error instanceof z.ZodError) {
return res.status(400).json({ error: "Invalid request data", details: error.errors });
}
res.status(500).json({ error: "Internal server error" });
}
});
export default router;

View File

@@ -0,0 +1,23 @@
import { Router } from "express";
import type { Request, Response } from "express";
const router = Router();
import multer from "multer";
import forwardToPatientDataExtractorService from "../services/patientDataExtractorService";
const upload = multer({ storage: multer.memoryStorage() });
router.post("/patientdataextract", upload.single("pdf"), async (req: Request, res: Response): Promise<any>=> {
if (!req.file) {
return res.status(400).json({ error: "No PDF file uploaded." });
}
try {
const result = await forwardToPatientDataExtractorService(req.file);
res.json(result);
} catch (err) {
console.error(err);
res.status(500).json({ error: "Extraction failed" });
}
});
export default router;

View File

@@ -0,0 +1,376 @@
import { Router } from "express";
import type { Request, Response } from "express";
import { storage } from "../storage";
import { z } from "zod";
import { insertPatientSchema, updatePatientSchema } from "@repo/db/types";
import { normalizeInsuranceId } from "../utils/helpers";
const router = Router();
// Patient Routes
// Get all patients for the logged-in user
router.get("/", async (req, res) => {
try {
const patients = await storage.getPatientsByUserId(req.user!.id);
res.json(patients);
} catch (error) {
res.status(500).json({ message: "Failed to retrieve patients" });
}
});
// Get recent patients (paginated)
router.get("/recent", async (req: Request, res: Response) => {
try {
const limit = parseInt(req.query.limit as string) || 10;
const offset = parseInt(req.query.offset as string) || 0;
const [patients, totalCount] = await Promise.all([
storage.getRecentPatients(limit, offset),
storage.getTotalPatientCount(),
]);
res.json({ patients, totalCount });
} catch (error) {
console.error("Failed to retrieve recent patients:", error);
res.status(500).json({ message: "Failed to retrieve recent patients" });
}
});
router.get("/search", async (req: Request, res: Response): Promise<any> => {
try {
const {
name,
phone,
insuranceId,
gender,
dob,
term,
limit = "10",
offset = "0",
} = req.query as Record<string, string>;
const filters: any = {};
if (term) {
filters.OR = [
{ firstName: { contains: term, mode: "insensitive" } },
{ lastName: { contains: term, mode: "insensitive" } },
{ phone: { contains: term, mode: "insensitive" } },
{ insuranceId: { contains: term, mode: "insensitive" } },
];
}
if (name) {
filters.OR = [
{ firstName: { contains: name, mode: "insensitive" } },
{ lastName: { contains: name, mode: "insensitive" } },
];
}
if (phone) {
filters.phone = { contains: phone, mode: "insensitive" };
}
if (insuranceId) {
filters.insuranceId = { contains: insuranceId, mode: "insensitive" };
}
if (gender) {
filters.gender = gender;
}
if (dob) {
const parsed = new Date(dob);
if (isNaN(parsed.getTime())) {
return res.status(400).json({
message: "Invalid date format for DOB. Use format: YYYY-MM-DD",
});
}
// Match exact dateOfBirth (optional: adjust for timezone)
filters.dateOfBirth = parsed;
}
const [patients, totalCount] = await Promise.all([
storage.searchPatients({
filters,
limit: parseInt(limit),
offset: parseInt(offset),
}),
storage.countPatients(filters),
]);
return res.json({ patients, totalCount });
} catch (error) {
console.error("Search error:", error);
return res.status(500).json({ message: "Failed to search patients" });
}
});
// get patient by insurance id
router.get(
"/by-insurance-id",
async (req: Request, res: Response): Promise<any> => {
const insuranceId = req.query.insuranceId?.toString();
if (!insuranceId) {
return res.status(400).json({ error: "Missing insuranceId" });
}
try {
const patient = await storage.getPatientByInsuranceId(insuranceId);
if (patient) {
return res.status(200).json(patient);
} else {
return res.status(200).json(null);
}
} catch (err) {
console.error("Failed to lookup patient:", err);
return res.status(500).json({ error: "Internal server error" });
}
}
);
// GET /api/patients/:id/financials?limit=50&offset=0
router.get(
"/:id/financials",
async (req: Request, res: Response): Promise<any> => {
try {
const patientIdParam = req.params.id;
if (!patientIdParam)
return res.status(400).json({ message: "Patient ID required" });
const patientId = parseInt(patientIdParam, 10);
if (isNaN(patientId))
return res.status(400).json({ message: "Invalid patient ID" });
const limit = Math.min(1000, Number(req.query.limit ?? 50)); // cap maximums
const offset = Math.max(0, Number(req.query.offset ?? 0));
const { rows, totalCount } = await storage.getPatientFinancialRows(
patientId,
limit,
offset
);
return res.json({ rows, totalCount, limit, offset });
} catch (err) {
console.error("Failed to fetch financial rows:", err);
return res
.status(500)
.json({ message: "Failed to fetch financial rows" });
}
}
);
// Get a single patient by ID
router.get(
"/:id",
async (req: Request, res: Response): Promise<any> => {
try {
const patientIdParam = req.params.id;
// Ensure that patientIdParam exists and is a valid number
if (!patientIdParam) {
return res.status(400).json({ message: "Patient ID is required" });
}
const patientId = parseInt(patientIdParam);
const patient = await storage.getPatient(patientId);
if (!patient) {
return res.status(404).json({ message: "Patient not found" });
}
res.json(patient);
} catch (error) {
res.status(500).json({ message: "Failed to retrieve patient" });
}
}
);
// Create a new patient
router.post("/", async (req: Request, res: Response): Promise<any> => {
try {
const body: any = { ...req.body, userId: req.user!.id };
// Normalize insuranceId early and return clear error if invalid
try {
const normalized = normalizeInsuranceId(body.insuranceId);
body.insuranceId = normalized;
} catch (err: any) {
return res.status(400).json({
message: "Invalid insuranceId",
details: err?.message ?? "Invalid insuranceId format",
});
}
// Validate request body
const patientData = insertPatientSchema.parse({
...req.body,
userId: req.user!.id,
});
// Check for duplicate insuranceId if it's provided
if (patientData.insuranceId) {
const existingPatient = await storage.getPatientByInsuranceId(
patientData.insuranceId as string
);
if (existingPatient) {
return res.status(409).json({
message: "A patient with this insurance ID already exists.",
});
}
}
const patient = await storage.createPatient(patientData);
res.status(201).json(patient);
} catch (error) {
if (error instanceof z.ZodError) {
return res.status(400).json({
message: "Validation error",
errors: error.format(),
});
}
res.status(500).json({ message: "Failed to create patient" });
}
});
// Update an existing patient
router.put(
"/:id",
async (req: Request, res: Response): Promise<any> => {
try {
const patientIdParam = req.params.id;
// Normalize incoming insuranceId (if present)
try {
if (req.body.insuranceId !== undefined) {
req.body.insuranceId = normalizeInsuranceId(req.body.insuranceId);
}
} catch (err: any) {
return res.status(400).json({
message: "Invalid insuranceId",
details: err?.message ?? "Invalid insuranceId format",
});
}
// Ensure that patientIdParam exists and is a valid number
if (!patientIdParam) {
return res.status(400).json({ message: "Patient ID is required" });
}
const patientId = parseInt(patientIdParam);
// Check if patient exists and belongs to user
const existingPatient = await storage.getPatient(patientId);
if (!existingPatient) {
return res.status(404).json({ message: "Patient not found" });
}
// Validate request body
const patientData = updatePatientSchema.parse(req.body);
// If updating insuranceId, check for uniqueness (excluding self)
if (
patientData.insuranceId &&
patientData.insuranceId !== existingPatient.insuranceId
) {
const duplicatePatient = await storage.getPatientByInsuranceId(
patientData.insuranceId as string
);
if (duplicatePatient && duplicatePatient.id !== patientId) {
return res.status(409).json({
message: "Another patient with this insurance ID already exists.",
});
}
}
// Update patient
const updatedPatient = await storage.updatePatient(
patientId,
patientData
);
res.json(updatedPatient);
} catch (error) {
if (error instanceof z.ZodError) {
return res.status(400).json({
message: "Validation error",
errors: error.format(),
});
}
res.status(500).json({ message: "Failed to update patient" });
}
}
);
// Delete a patient
router.delete(
"/:id",
async (req: Request, res: Response): Promise<any> => {
try {
const patientIdParam = req.params.id;
// Ensure that patientIdParam exists and is a valid number
if (!patientIdParam) {
return res.status(400).json({ message: "Patient ID is required" });
}
const patientId = parseInt(patientIdParam);
// Check if patient exists and belongs to user
const existingPatient = await storage.getPatient(patientId);
if (!existingPatient) {
return res.status(404).json({ message: "Patient not found" });
}
if (existingPatient.userId !== req.user!.id) {
return res.status(403).json({
message:
"Forbidden: Patient belongs to a different user, you can't delete this.",
});
}
// Delete patient
await storage.deletePatient(patientId);
res.status(204).send();
} catch (error: any) {
console.error("Delete patient error:", error);
res.status(500).json({ message: "Failed to delete patient" });
}
}
);
// Get appointments for a specific patient
router.get(
"/:patientId/appointments",
async (req: Request, res: Response): Promise<any> => {
try {
const patientIdParam = req.params.id;
// Ensure that patientIdParam exists and is a valid number
if (!patientIdParam) {
return res.status(400).json({ message: "Patient ID is required" });
}
const patientId = parseInt(patientIdParam);
// Check if patient exists and belongs to user
const patient = await storage.getPatient(patientId);
if (!patient) {
return res.status(404).json({ message: "Patient not found" });
}
const appointments = await storage.getAppointmentsByPatientId(patientId);
res.json(appointments);
} catch (error) {
res.status(500).json({ message: "Failed to retrieve appointments" });
}
}
);
export default router;

View File

@@ -0,0 +1,50 @@
import { Router, Request, Response } from "express";
import multer from "multer";
import { forwardToPaymentOCRService } from "../services/paymentOCRService";
const router = Router();
// keep files in memory; FastAPI accepts them as multipart bytes
const upload = multer({ storage: multer.memoryStorage() });
// POST /payment-ocr/extract (field name: "files")
router.post(
"/extract",
upload.array("files"), // allow multiple images
async (req: Request, res: Response): Promise<any> => {
try {
const files = req.files as Express.Multer.File[] | undefined;
if (!files || files.length === 0) {
return res
.status(400)
.json({ error: "No image files uploaded. Use field name 'files'." });
}
// (optional) basic client-side MIME guard
const allowed = new Set([
"image/jpeg",
"image/png",
"image/tiff",
"image/bmp",
"image/jpg",
]);
const bad = files.filter((f) => !allowed.has(f.mimetype.toLowerCase()));
if (bad.length) {
return res.status(415).json({
error: `Unsupported file types: ${bad
.map((b) => b.originalname)
.join(", ")}`,
});
}
const rows = await forwardToPaymentOCRService(files);
return res.json({ rows });
} catch (err) {
console.error(err);
return res.status(500).json({ error: "Payment OCR extraction failed" });
}
}
);
export default router;

View File

@@ -0,0 +1,213 @@
import { Router } from "express";
import type { Request, Response } from "express";
import { storage } from "../storage";
const router = Router();
/**
* GET /api/payments-reports/summary
* optional query: from=YYYY-MM-DD&to=YYYY-MM-DD (ISO date strings)
*/
router.get("/summary", async (req: Request, res: Response): Promise<any> => {
try {
const from = req.query.from ? new Date(String(req.query.from)) : undefined;
const to = req.query.to ? new Date(String(req.query.to)) : undefined;
if (req.query.from && isNaN(from?.getTime() ?? NaN))
return res.status(400).json({ message: "Invalid 'from' date" });
if (req.query.to && isNaN(to?.getTime() ?? NaN))
return res.status(400).json({ message: "Invalid 'to' date" });
const summary = await storage.getSummary(from, to);
res.json(summary);
} catch (err: any) {
console.error(
"GET /api/payments-reports/summary error:",
err?.message ?? err,
err?.stack
);
res.status(500).json({ message: "Failed to fetch dashboard summary" });
}
});
/**
* GET /api/payments-reports/patient-balances
* query:
* - limit (default 25)
* - cursor (optional base64 cursor token)
* - from / to (optional ISO date strings) - filter payments by createdAt in the range (inclusive)
*/
router.get(
"/patients-with-balances",
async (req: Request, res: Response): Promise<any> => {
try {
const limit = Math.max(
1,
Math.min(200, parseInt(String(req.query.limit || "25"), 10))
);
const cursor =
typeof req.query.cursor === "string" ? String(req.query.cursor) : null;
const from = req.query.from
? new Date(String(req.query.from))
: undefined;
const to = req.query.to ? new Date(String(req.query.to)) : undefined;
if (req.query.from && isNaN(from?.getTime() ?? NaN)) {
return res.status(400).json({ message: "Invalid 'from' date" });
}
if (req.query.to && isNaN(to?.getTime() ?? NaN)) {
return res.status(400).json({ message: "Invalid 'to' date" });
}
const data = await storage.getPatientsWithBalances(
limit,
cursor,
from,
to
);
// returns { balances, totalCount, nextCursor, hasMore }
res.json(data);
} catch (err: any) {
console.error(
"GET /api/payments-reports/patient-balances error:",
err?.message ?? err,
err?.stack
);
res.status(500).json({ message: "Failed to fetch patient balances" });
}
}
);
/**
* GET /api/payments-reports/by-doctor/balances
* Query params:
* - staffId (required)
* - limit (optional, default 25)
* - cursor (optional)
* - from/to (optional ISO date strings) - filter payments by createdAt in the range (inclusive)
*
* Response: { balances, totalCount, nextCursor, hasMore }
*/
router.get(
"/by-doctor/balances",
async (req: Request, res: Response): Promise<any> => {
try {
const staffIdRaw = req.query.staffId;
if (!staffIdRaw) {
return res
.status(400)
.json({ message: "Missing required 'staffId' query parameter" });
}
const staffId = Number(staffIdRaw);
if (!Number.isFinite(staffId) || staffId <= 0) {
return res
.status(400)
.json({ message: "Invalid 'staffId' query parameter" });
}
const limit = Math.max(
1,
Math.min(200, parseInt(String(req.query.limit || "25"), 10))
);
const cursor =
typeof req.query.cursor === "string" ? String(req.query.cursor) : null;
const from = req.query.from
? new Date(String(req.query.from))
: undefined;
const to = req.query.to ? new Date(String(req.query.to)) : undefined;
if (req.query.from && isNaN(from?.getTime() ?? NaN)) {
return res.status(400).json({ message: "Invalid 'from' date" });
}
if (req.query.to && isNaN(to?.getTime() ?? NaN)) {
return res.status(400).json({ message: "Invalid 'to' date" });
}
// use the new storage method that returns only the paged balances
const balancesResult = await storage.getPatientsBalancesByDoctor(
staffId,
limit,
cursor,
from,
to
);
res.json({
balances: balancesResult?.balances ?? [],
totalCount: Number(balancesResult?.totalCount ?? 0),
nextCursor: balancesResult?.nextCursor ?? null,
hasMore: Boolean(balancesResult?.hasMore ?? false),
});
} catch (err: any) {
console.error(
"GET /api/payments-reports/by-doctor/balances error:",
err?.message ?? err,
err?.stack
);
res.status(500).json({
message: "Failed to fetch doctor balances",
detail: err?.message ?? String(err),
});
}
}
);
/**
* GET /api/payments-reports/by-doctor/summary
* Query params:
* - staffId (required)
* - from/to (optional ISO date strings) - filter payments by createdAt in the range (inclusive)
*
* Response: { totalPatients, totalOutstanding, totalCollected, patientsWithBalance }
*/
router.get(
"/by-doctor/summary",
async (req: Request, res: Response): Promise<any> => {
try {
const staffIdRaw = req.query.staffId;
if (!staffIdRaw) {
return res
.status(400)
.json({ message: "Missing required 'staffId' query parameter" });
}
const staffId = Number(staffIdRaw);
if (!Number.isFinite(staffId) || staffId <= 0) {
return res
.status(400)
.json({ message: "Invalid 'staffId' query parameter" });
}
const from = req.query.from
? new Date(String(req.query.from))
: undefined;
const to = req.query.to ? new Date(String(req.query.to)) : undefined;
if (req.query.from && isNaN(from?.getTime() ?? NaN)) {
return res.status(400).json({ message: "Invalid 'from' date" });
}
if (req.query.to && isNaN(to?.getTime() ?? NaN)) {
return res.status(400).json({ message: "Invalid 'to' date" });
}
// use the new storage method that returns only the summary for the staff
const summary = await storage.getSummaryByDoctor(staffId, from, to);
res.json(summary);
} catch (err: any) {
console.error(
"GET /api/payments-reports/by-doctor/summary error:",
err?.message ?? err,
err?.stack
);
res.status(500).json({
message: "Failed to fetch doctor summary",
detail: err?.message ?? String(err),
});
}
}
);
export default router;

View File

@@ -0,0 +1,459 @@
import { Router } from "express";
import { Request, Response } from "express";
import { storage } from "../storage";
import { z } from "zod";
import { ZodError } from "zod";
import {
insertPaymentSchema,
NewTransactionPayload,
newTransactionPayloadSchema,
paymentMethodOptions,
PaymentStatus,
paymentStatusOptions,
claimStatusOptions,
} from "@repo/db/types";
import { prisma } from "@repo/db/client";
import { PaymentStatusSchema } from "@repo/db/types";
import * as paymentService from "../services/paymentService";
const paymentFilterSchema = z.object({
from: z.string().datetime(),
to: z.string().datetime(),
});
function parseIntOrError(input: string | undefined, name: string) {
if (!input) throw new Error(`${name} is required`);
const value = parseInt(input, 10);
if (isNaN(value)) throw new Error(`${name} must be a valid number`);
return value;
}
export function handleRouteError(
res: Response,
error: unknown,
defaultMsg: string
) {
if (error instanceof ZodError) {
return res.status(400).json({
message: "Validation error",
errors: error.format(),
});
}
const msg = error instanceof Error ? error.message : defaultMsg;
return res.status(500).json({ message: msg });
}
const router = Router();
// GET /api/payments/recent
router.get("/recent", async (req: Request, res: Response): Promise<any> => {
try {
const limit = parseInt(req.query.limit as string) || 10;
const offset = parseInt(req.query.offset as string) || 0;
const [payments, totalCount] = await Promise.all([
storage.getRecentPayments(limit, offset),
storage.getTotalPaymentCount(),
]);
res.status(200).json({ payments, totalCount });
} catch (err) {
console.error("Failed to fetch payments:", err);
res.status(500).json({ message: "Failed to fetch recent payments" });
}
});
// GET /api/payments/claim/:claimId
router.get(
"/claim/:claimId",
async (req: Request, res: Response): Promise<any> => {
try {
const parsedClaimId = parseIntOrError(req.params.claimId, "Claim ID");
const payments = await storage.getPaymentsByClaimId(parsedClaimId);
if (!payments)
return res.status(404).json({ message: "No payments found for claim" });
res.status(200).json(payments);
} catch (error) {
console.error("Error fetching payments:", error);
res.status(500).json({ message: "Failed to retrieve payments" });
}
}
);
// GET /api/payments/patient/:patientId
router.get(
"/patient/:patientId",
async (req: Request, res: Response): Promise<any> => {
try {
const patientIdParam = req.params.patientId;
if (!patientIdParam) {
return res.status(400).json({ message: "Missing patientId" });
}
const patientId = parseInt(patientIdParam);
if (isNaN(patientId)) {
return res.status(400).json({ message: "Invalid patientId" });
}
const limit = parseInt(req.query.limit as string) || 10;
const offset = parseInt(req.query.offset as string) || 0;
if (isNaN(patientId)) {
return res.status(400).json({ message: "Invalid patient ID" });
}
const [payments, totalCount] = await Promise.all([
storage.getRecentPaymentsByPatientId(patientId, limit, offset),
storage.getTotalPaymentCountByPatient(patientId),
]);
res.json({ payments, totalCount });
} catch (error) {
console.error("Failed to retrieve payments for patient:", error);
res.status(500).json({ message: "Failed to retrieve patient payments" });
}
}
);
// GET /api/payments/filter
router.get("/filter", async (req: Request, res: Response): Promise<any> => {
try {
const validated = paymentFilterSchema.safeParse(req.query);
if (!validated.success) {
return res.status(400).json({
message: "Invalid date format",
errors: validated.error.errors,
});
}
const { from, to } = validated.data;
const payments = await storage.getPaymentsByDateRange(
new Date(from),
new Date(to)
);
res.status(200).json(payments);
} catch (err) {
console.error("Failed to filter payments:", err);
res.status(500).json({ message: "Server error" });
}
});
// GET /api/payments/:id
router.get("/:id", async (req: Request, res: Response): Promise<any> => {
try {
const id = parseIntOrError(req.params.id, "Payment ID");
const payment = await storage.getPaymentById(id);
if (!payment) return res.status(404).json({ message: "Payment not found" });
res.status(200).json(payment);
} catch (err: unknown) {
const message =
err instanceof Error ? err.message : "Failed to retrieve payment";
res.status(500).json({ message });
}
});
// POST /api/payments/full-ocr-import
router.post(
"/full-ocr-import",
async (req: Request, res: Response): Promise<any> => {
try {
const userId = req.user?.id;
if (!userId) return res.status(401).json({ message: "Unauthorized" });
const { rows } = req.body;
if (!rows || !Array.isArray(rows)) {
return res.status(400).json({ message: "Invalid OCR payload" });
}
const paymentIds = await paymentService.fullOcrPaymentService.importRows(
rows,
userId
);
res.status(200).json({
message: "OCR rows imported successfully",
paymentIds,
});
} catch (err) {
console.error(err);
if (err instanceof Error) {
return res.status(500).json({ message: err.message });
}
return res
.status(500)
.json({ message: "Unknown error importing OCR payments" });
}
}
);
// POST /api/payments/:claimId
router.post("/:claimId", async (req: Request, res: Response): Promise<any> => {
try {
const userId = req.user?.id;
if (!userId) return res.status(401).json({ message: "Unauthorized" });
const claimId = parseIntOrError(req.params.claimId, "Claim ID");
const validated = insertPaymentSchema.safeParse({
...req.body,
claimId,
userId,
});
if (!validated.success) {
return res.status(400).json({
message: "Validation failed",
errors: validated.error.flatten(),
});
}
const payment = await storage.createPayment(validated.data);
res.status(201).json(payment);
} catch (err: unknown) {
const message =
err instanceof Error ? err.message : "Failed to create payment";
res.status(500).json({ message });
}
});
// PUT /api/payments/:id
router.put("/:id", async (req: Request, res: Response): Promise<any> => {
try {
const userId = req.user?.id;
if (!userId) return res.status(401).json({ message: "Unauthorized" });
const paymentId = parseIntOrError(req.params.id, "Payment ID");
const validated = newTransactionPayloadSchema.safeParse(
req.body.data as NewTransactionPayload
);
if (!validated.success) {
return res.status(400).json({
message: "Validation failed",
errors: validated.error.flatten(),
});
}
const { serviceLineTransactions } = validated.data;
const updatedPayment = await paymentService.updatePayment(
paymentId,
serviceLineTransactions,
userId
);
res.status(200).json(updatedPayment);
} catch (err: unknown) {
const message =
err instanceof Error ? err.message : "Failed to update payment";
res.status(500).json({ message });
}
});
// PUT /api/payments/:id/pay-absolute-full-claim
router.put(
"/:id/pay-absolute-full-claim",
async (req: Request, res: Response): Promise<any> => {
try {
const userId = req.user?.id;
if (!userId) return res.status(401).json({ message: "Unauthorized" });
const paymentId = parseIntOrError(req.params.id, "Payment ID");
const paymentRecord = await storage.getPaymentById(paymentId);
if (!paymentRecord) {
return res.status(404).json({ message: "Payment not found" });
}
// Collect service lines from either claim or direct payment(OCR based data)
const serviceLines = paymentRecord.claim
? paymentRecord.claim.serviceLines
: paymentRecord.serviceLines;
if (!serviceLines || serviceLines.length === 0) {
return res
.status(400)
.json({ message: "No service lines available for this payment" });
}
const serviceLineTransactions = serviceLines
.filter((line) => line.totalDue.gt(0))
.map((line) => ({
serviceLineId: line.id,
paidAmount: line.totalDue.toNumber(),
adjustedAmount: 0,
method: paymentMethodOptions.CHECK,
receivedDate: new Date(),
notes: "Full claim payment",
}));
if (serviceLineTransactions.length === 0) {
return res.status(400).json({ message: "No outstanding balance" });
}
// Use updatePayment for consistency & validation
const updatedPayment = await paymentService.updatePayment(
paymentId,
serviceLineTransactions,
userId
);
res.status(200).json(updatedPayment);
} catch (err) {
console.error(err);
res.status(500).json({ message: "Failed to pay full claim" });
}
}
);
// PUT /api/payments/:id/revert-full-claim
router.put(
"/:id/revert-full-claim",
async (req: Request, res: Response): Promise<any> => {
try {
const userId = req.user?.id;
if (!userId) return res.status(401).json({ message: "Unauthorized" });
const paymentId = parseIntOrError(req.params.id, "Payment ID");
const paymentRecord = await storage.getPaymentById(paymentId);
if (!paymentRecord) {
return res.status(404).json({ message: "Payment not found" });
}
const serviceLines = paymentRecord.claim
? paymentRecord.claim.serviceLines
: paymentRecord.serviceLines;
if (!serviceLines || serviceLines.length === 0) {
return res
.status(400)
.json({ message: "No service lines available for this payment" });
}
// Build reversal transactions (negating whats already paid/adjusted)
const serviceLineTransactions = serviceLines
.filter((line) => line.totalPaid.gt(0) || line.totalAdjusted.gt(0))
.map((line) => ({
serviceLineId: line.id,
paidAmount: line.totalPaid.negated().toNumber(), // negative to undo
adjustedAmount: line.totalAdjusted.negated().toNumber(),
method: paymentMethodOptions.OTHER,
receivedDate: new Date(),
notes: "Reverted full claim",
}));
if (serviceLineTransactions.length === 0) {
return res.status(400).json({ message: "Nothing to revert" });
}
const updatedPayment = await paymentService.updatePayment(
paymentId,
serviceLineTransactions,
userId,
{ isReversal: true }
);
res.status(200).json(updatedPayment);
} catch (err) {
console.error(err);
res.status(500).json({ message: "Failed to revert claim payments" });
}
}
);
// PATCH /api/payments/:id/status
router.patch(
"/:id/status",
async (req: Request, res: Response): Promise<any> => {
try {
const userId = req.user?.id;
if (!userId) {
return res.status(401).json({ message: "Unauthorized" });
}
const paymentId = parseIntOrError(req.params.id, "Payment ID");
// Parse & coerce to PaymentStatus enum
const rawStatus = PaymentStatusSchema.parse(req.body.data.status);
if (
!Object.values(paymentStatusOptions).includes(
rawStatus as PaymentStatus
)
) {
return res.status(400).json({ message: "Invalid payment status" });
}
const status = rawStatus as PaymentStatus;
// Load existing payment
const existingPayment = await storage.getPayment(paymentId);
if (!existingPayment) {
return res.status(404).json({ message: "Payment not found" });
}
// If changing to VOID and linked to a claim -> update both atomically
if (status === paymentStatusOptions.VOID && existingPayment.claimId) {
const [updatedPayment, updatedClaim] = await prisma.$transaction([
prisma.payment.update({
where: { id: paymentId },
data: { status, updatedById: userId },
}),
prisma.claim.update({
where: { id: existingPayment.claimId },
data: { status: claimStatusOptions.VOID },
}),
]);
return res.json(updatedPayment);
}
// Otherwise just update payment (use storage helper)
const updatedPayment = await storage.updatePaymentStatus(
paymentId,
{ status } as any,
userId
);
return res.json(updatedPayment);
} catch (err: unknown) {
const message =
err instanceof Error ? err.message : "Failed to update payment status";
return res.status(500).json({ message });
}
}
);
// DELETE /api/payments/:id
router.delete("/:id", async (req: Request, res: Response): Promise<any> => {
try {
const userId = req.user?.id;
if (!userId) return res.status(401).json({ message: "Unauthorized" });
const id = parseIntOrError(req.params.id, "Payment ID");
// Check if payment exists and belongs to this user
const existingPayment = await storage.getPayment(id);
if (!existingPayment) {
return res.status(404).json({ message: "Payment not found" });
}
if (existingPayment.userId !== req.user!.id) {
return res.status(403).json({
message:
"Forbidden: Payment belongs to a different user, you can't delete this.",
});
}
await storage.deletePayment(id, userId);
res.status(200).json({ message: "Payment deleted successfully" });
} catch (err: unknown) {
const message =
err instanceof Error ? err.message : "Failed to delete payment";
res.status(500).json({ message });
}
});
export default router;

113
apps/Backend/src/routes/staffs.ts Executable file
View File

@@ -0,0 +1,113 @@
import { Router } from "express";
import type { Request, Response } from "express";
import { storage } from "../storage";
import { z } from "zod";
import { StaffUncheckedCreateInputObjectSchema } from "@repo/db/usedSchemas";
type Staff = z.infer<typeof StaffUncheckedCreateInputObjectSchema>;
const staffCreateSchema = StaffUncheckedCreateInputObjectSchema;
const staffUpdateSchema = (
StaffUncheckedCreateInputObjectSchema as unknown as z.ZodObject<any>
).partial();
const router = Router();
router.post("/", async (req: Request, res: Response): Promise<any> => {
try {
const userId = req.user!.id; // from auth middleware
const validatedData = staffCreateSchema.parse({
...req.body,
userId,
});
const newStaff = await storage.createStaff(validatedData);
res.status(200).json(newStaff);
} catch (error) {
console.error("Failed to create staff:", error);
res.status(500).send("Failed to create staff");
}
});
router.get("/", async (req: Request, res: Response): Promise<any> => {
try {
const staff = await storage.getAllStaff();
if (!staff) return res.status(404).send("Staff not found");
res.status(201).json(staff);
} catch (error) {
console.error("Failed to fetch staff:", error);
res.status(500).send("Failed to fetch staff");
}
});
router.put("/:id", async (req: Request, res: Response): Promise<any> => {
try {
const parsedStaffId = Number(req.params.id);
if (isNaN(parsedStaffId)) {
return res.status(400).send("Invalid staff ID");
}
const validatedData = staffUpdateSchema.parse(req.body);
const updatedStaff = await storage.updateStaff(
parsedStaffId,
validatedData
);
if (!updatedStaff) return res.status(404).send("Staff not found");
res.json(updatedStaff);
} catch (error) {
console.error("Failed to update staff:", error);
res.status(500).send("Failed to update staff");
}
});
const parseIdOr400 = (raw: any, label: string) => {
const n = Number(raw);
if (!Number.isFinite(n)) throw new Error(`${label} is invalid`);
return n;
};
router.delete("/:id", async (req: Request, res: Response): Promise<any> => {
try {
const userId = req.user!.id;
const id = parseIdOr400(req.params.id, "Staff ID");
const parsedStaffId = Number(req.params.id);
if (isNaN(parsedStaffId)) {
return res.status(400).send("Invalid staff ID");
}
const existing = await storage.getStaff(id); // must include createdById
if (!existing) return res.status(404).json({ message: "Staff not found" });
if (existing.userId !== userId) {
return res.status(403).json({
message:
"Forbidden: Staff was created by a different user; you cannot delete it.",
});
}
const [apptCount, claimCount] = await Promise.all([
storage.countAppointmentsByStaffId(id),
storage.countClaimsByStaffId(id),
]);
if (apptCount || claimCount) {
return res.status(409).json({
message: `Cannot delete staff with linked records. Appointment of this staff : ${apptCount} and Claims ${claimCount}`,
hint: "Archive this staff, or reassign linked records, then delete.",
});
}
const deleted = await storage.deleteStaff(parsedStaffId);
if (!deleted) return res.status(404).send("Staff not found");
res.status(200).send("Staff deleted successfully");
} catch (error) {
console.error("Failed to delete staff:", error);
res.status(500).send("Failed to delete staff");
}
});
export default router;

127
apps/Backend/src/routes/users.ts Executable file
View File

@@ -0,0 +1,127 @@
import { Router } from "express";
import type { Request, Response } from "express";
import { storage } from "../storage";
import { z } from "zod";
import { UserUncheckedCreateInputObjectSchema } from "@repo/db/usedSchemas";
import jwt from 'jsonwebtoken';
import bcrypt from 'bcrypt';
const router = Router();
// Type based on shared schema
type SelectUser = z.infer<typeof UserUncheckedCreateInputObjectSchema>;
// Zod validation
const userCreateSchema = UserUncheckedCreateInputObjectSchema;
const userUpdateSchema = (UserUncheckedCreateInputObjectSchema as unknown as z.ZodObject<any>).partial();
router.get("/", async (req: Request, res: Response): Promise<any> => {
try {
const userId = req.user?.id;
if (!userId) return res.status(401).send("Unauthorized UserId");
const user = await storage.getUser(userId);
if (!user) return res.status(404).send("User not found");
const { password, ...safeUser } = user;
res.json(safeUser);
} catch (error) {
console.error(error);
res.status(500).send("Failed to fetch user");
}
});
// GET: User by ID
router.get("/:id", async (req: Request, res: Response): Promise<any> => {
try {
const idParam = req.params.id;
if (!idParam) return res.status(400).send("User ID is required");
const id = parseInt(idParam);
if (isNaN(id)) return res.status(400).send("Invalid user ID");
const user = await storage.getUser(id);
if (!user) return res.status(404).send("User not found");
const { password, ...safeUser } = user;
res.json(safeUser);
} catch (error) {
console.error(error);
res.status(500).send("Failed to fetch user");
}
});
// POST: Create new user
router.post("/", async (req: Request, res: Response) => {
try {
const input = userCreateSchema.parse(req.body);
const newUser = await storage.createUser(input);
const { password, ...safeUser } = newUser;
res.status(201).json(safeUser);
} catch (err) {
console.error(err);
res.status(400).json({ error: "Invalid user data", details: err });
}
});
// Function to hash password using bcrypt
async function hashPassword(password: string) {
const saltRounds = 10; // Salt rounds for bcrypt
const hashedPassword = await bcrypt.hash(password, saltRounds);
return hashedPassword;
}
// PUT: Update user
router.put("/:id", async (req: Request, res: Response):Promise<any> => {
try {
const idParam = req.params.id;
if (!idParam) return res.status(400).send("User ID is required");
const id = parseInt(idParam);
if (isNaN(id)) return res.status(400).send("Invalid user ID");
const updates = userUpdateSchema.parse(req.body);
// If password is provided and non-empty, hash it
if (updates.password && updates.password.trim() !== "") {
updates.password = await hashPassword(updates.password);
} else {
// Remove password field if empty, so it won't overwrite existing password with blank
delete updates.password;
}
const updatedUser = await storage.updateUser(id, updates);
if (!updatedUser) return res.status(404).send("User not found");
const { password, ...safeUser } = updatedUser;
res.json(safeUser);
} catch (err) {
console.error(err);
res.status(400).json({ error: "Invalid update data", details: err });
}
});
// DELETE: Delete user
router.delete("/:id", async (req: Request, res: Response):Promise<any> => {
try {
const idParam = req.params.id;
if (!idParam) return res.status(400).send("User ID is required");
const id = parseInt(idParam);
if (isNaN(id)) return res.status(400).send("Invalid user ID");
const success = await storage.deleteUser(id);
if (!success) return res.status(404).send("User not found");
res.status(204).send();
} catch (error) {
console.error(error);
res.status(500).send("Failed to delete user");
}
});
export default router;

View File

@@ -0,0 +1,85 @@
import { spawn } from "child_process";
import fs from "fs";
import os from "os";
import path from "path";
import archiver from "archiver";
function safeRmDir(dir: string) {
try {
fs.rmSync(dir, { recursive: true, force: true });
} catch {}
}
interface BackupToPathParams {
destinationPath: string;
filename: string;
}
export async function backupDatabaseToPath({
destinationPath,
filename,
}: BackupToPathParams): Promise<void> {
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "dental_backup_"));
return new Promise((resolve, reject) => {
const pgDump = spawn(
"pg_dump",
[
"-Fd",
"-j",
"4",
"--no-acl",
"--no-owner",
"-h",
process.env.DB_HOST || "localhost",
"-U",
process.env.DB_USER || "postgres",
process.env.DB_NAME || "dental_db",
"-f",
tmpDir,
],
{
env: {
...process.env,
PGPASSWORD: process.env.DB_PASSWORD,
},
}
);
let pgError = "";
pgDump.stderr.on("data", (d) => (pgError += d.toString()));
pgDump.on("close", async (code) => {
if (code !== 0) {
safeRmDir(tmpDir);
return reject(new Error(pgError || "pg_dump failed"));
}
const outputFile = path.join(destinationPath, filename);
const outputStream = fs.createWriteStream(outputFile);
const archive = archiver("zip");
outputStream.on("error", (err) => {
safeRmDir(tmpDir);
reject(err);
});
archive.on("error", (err) => {
safeRmDir(tmpDir);
reject(err);
});
archive.pipe(outputStream);
archive.directory(tmpDir + path.sep, false);
archive.finalize();
archive.on("end", () => {
safeRmDir(tmpDir);
resolve();
});
});
});
}

View File

@@ -0,0 +1,29 @@
import axios from "axios";
import FormData from "form-data";
export interface ExtractedData {
name?: string;
memberId?: string;
dob?: string;
[key: string]: any;
}
export default async function forwardToPatientDataExtractorService(
file: Express.Multer.File
): Promise<ExtractedData> {
const form = new FormData();
form.append("pdf", file.buffer, {
filename: file.originalname,
contentType: file.mimetype,
});
const response = await axios.post<ExtractedData>(
"http://localhost:5001/extract",
form,
{
headers: form.getHeaders(),
}
);
return response.data;
}

View File

@@ -0,0 +1,34 @@
import axios from "axios";
import FormData from "form-data";
export async function forwardToPaymentOCRService(
files: Express.Multer.File | Express.Multer.File[]
): Promise<any> {
const arr = Array.isArray(files) ? files : [files];
const form = new FormData();
for (const f of arr) {
form.append("files", f.buffer, {
filename: f.originalname,
contentType: f.mimetype, // image/jpeg, image/png, image/tiff, etc.
knownLength: f.size,
});
}
const url = `http://localhost:5003/extract/json`;
try {
const resp = await axios.post<{ rows: any }>(url, form, {
headers: form.getHeaders(),
maxBodyLength: Infinity,
maxContentLength: Infinity,
timeout: 120000, // OCR can be heavy; adjust as needed
});
return resp.data?.rows ?? [];
} catch (err: any) {
// Bubble up a useful error message
const status = err?.response?.status;
const detail = err?.response?.data?.detail || err?.message || "Unknown error";
throw new Error(`Payment OCR request failed${status ? ` (${status})` : ""}: ${detail}`);
}
}

View File

@@ -0,0 +1,284 @@
import Decimal from "decimal.js";
import {
NewTransactionPayload,
OcrRow,
Payment,
PaymentMethod,
PaymentStatus,
ClaimStatus,
} from "@repo/db/types";
import { storage } from "../storage";
import { prisma } from "@repo/db/client";
import { convertOCRDate } from "../utils/dateUtils";
/**
* Validate transactions against a payment record
*/
export async function validateTransactions(
paymentId: number,
serviceLineTransactions: NewTransactionPayload["serviceLineTransactions"],
options?: { isReversal?: boolean }
) {
const paymentRecord = await storage.getPaymentById(paymentId);
if (!paymentRecord) {
throw new Error("Payment not found");
}
// Choose service lines from claim if present, otherwise direct payment service lines(OCR Based datas)
const serviceLines = paymentRecord.claim
? paymentRecord.claim.serviceLines
: paymentRecord.serviceLines;
if (!serviceLines || serviceLines.length === 0) {
throw new Error("No service lines available for this payment");
}
for (const txn of serviceLineTransactions) {
const line = serviceLines.find((sl) => sl.id === txn.serviceLineId);
if (!line) {
throw new Error(`Invalid service line: ${txn.serviceLineId}`);
}
const paidAmount = new Decimal(txn.paidAmount ?? 0);
const adjustedAmount = new Decimal(txn.adjustedAmount ?? 0);
if (!options?.isReversal && (paidAmount.lt(0) || adjustedAmount.lt(0))) {
throw new Error("Amounts cannot be negative");
}
if (paidAmount.eq(0) && adjustedAmount.eq(0)) {
throw new Error("Must provide a payment or adjustment");
}
if (!options?.isReversal && paidAmount.gt(line.totalDue)) {
throw new Error(
`Paid amount exceeds due for service line ${txn.serviceLineId}`
);
}
}
return paymentRecord;
}
/**
* Apply transactions to a payment & recalc totals
*/
export async function applyTransactions(
paymentId: number,
serviceLineTransactions: NewTransactionPayload["serviceLineTransactions"],
userId: number
): Promise<Payment> {
return prisma.$transaction(async (tx) => {
// 1. Insert service line transactions + recalculate each serviceLines
for (const txn of serviceLineTransactions) {
await tx.serviceLineTransaction.create({
data: {
paymentId,
serviceLineId: txn.serviceLineId,
transactionId: txn.transactionId,
paidAmount: new Decimal(txn.paidAmount),
adjustedAmount: new Decimal(txn.adjustedAmount || 0),
method: txn.method,
receivedDate: txn.receivedDate,
payerName: txn.payerName,
notes: txn.notes,
},
});
// Recalculate Claim - serviceLines model totals and updates along with Claim-serviceLine status
const aggLine = await tx.serviceLineTransaction.aggregate({
_sum: { paidAmount: true, adjustedAmount: true },
where: { serviceLineId: txn.serviceLineId },
});
const serviceLine = await tx.serviceLine.findUniqueOrThrow({
where: { id: txn.serviceLineId },
select: { totalBilled: true },
});
const totalPaid = aggLine._sum.paidAmount || new Decimal(0);
const totalAdjusted = aggLine._sum.adjustedAmount || new Decimal(0);
const totalDue = serviceLine.totalBilled
.minus(totalPaid)
.minus(totalAdjusted);
await tx.serviceLine.update({
where: { id: txn.serviceLineId },
data: {
totalPaid,
totalAdjusted,
totalDue,
status:
totalDue.lte(0) && totalPaid.gt(0)
? "PAID"
: totalPaid.gt(0)
? "PARTIALLY_PAID"
: "UNPAID",
},
});
}
// 2. Recalc payment model totals based on serviceLineTransactions, and update PaymentStatus.
const aggPayment = await tx.serviceLineTransaction.aggregate({
_sum: { paidAmount: true, adjustedAmount: true },
where: { paymentId },
});
const payment = await tx.payment.findUniqueOrThrow({
where: { id: paymentId },
select: { totalBilled: true },
});
const totalPaid = aggPayment._sum.paidAmount || new Decimal(0);
const totalAdjusted = aggPayment._sum.adjustedAmount || new Decimal(0);
const totalDue = payment.totalBilled.minus(totalPaid).minus(totalAdjusted);
let status: PaymentStatus;
if (totalDue.lte(0) && totalPaid.gt(0)) status = "PAID";
else if (totalPaid.gt(0)) status = "PARTIALLY_PAID";
else status = "PENDING";
const updatedPayment = await tx.payment.update({
where: { id: paymentId },
data: { totalPaid, totalAdjusted, totalDue, status, updatedById: userId },
});
// 3. Update Claim Model Status based on serviceLineTransaction and Payment values.(as they hold the same values
// as per, ServiceLine.totalPaid and totalAdjusted and Claim.totalBilled) Hence not fetching unneccessary.
const claimId = updatedPayment.claimId ?? null;
if (claimId) {
let newClaimStatus: ClaimStatus;
if (totalDue.lte(0) && totalPaid.gt(0)) newClaimStatus = "APPROVED";
else newClaimStatus = "PENDING";
await tx.claim.update({
where: { id: claimId },
data: { status: newClaimStatus },
});
}
return updatedPayment;
});
}
/**
* Main entry point for updating payments
*/
export async function updatePayment(
paymentId: number,
serviceLineTransactions: NewTransactionPayload["serviceLineTransactions"],
userId: number,
options?: { isReversal?: boolean }
): Promise<Payment> {
await validateTransactions(paymentId, serviceLineTransactions, options);
return applyTransactions(paymentId, serviceLineTransactions, userId);
}
// handling full-ocr-payments-import
export const fullOcrPaymentService = {
async importRows(rows: OcrRow[], userId: number) {
const results: number[] = [];
for (const [index, row] of rows.entries()) {
try {
if (!row.patientName || !row.insuranceId) {
throw new Error(
`Row ${index + 1}: missing patientName or insuranceId`
);
}
if (!row.procedureCode) {
throw new Error(`Row ${index + 1}: missing procedureCode`);
}
const billed = new Decimal(row.totalBilled ?? 0);
const allowed = new Decimal(row.totalAllowed ?? row.totalBilled ?? 0);
const paid = new Decimal(row.totalPaid ?? 0);
const adjusted = billed.minus(allowed); // write-off
// Step 13 in a transaction
const { paymentId, serviceLineId } = await prisma.$transaction(
async (tx) => {
// 1. Find or create patient
let patient = await tx.patient.findFirst({
where: { insuranceId: row.insuranceId.toString() },
});
if (!patient) {
const [firstNameRaw, ...rest] = (row.patientName ?? "")
.trim()
.split(" ");
const firstName = firstNameRaw || "Unknown";
const lastName = rest.length > 0 ? rest.join(" ") : "Unknown";
patient = await tx.patient.create({
data: {
firstName,
lastName,
insuranceId: row.insuranceId.toString(),
dateOfBirth: new Date(Date.UTC(1900, 0, 1)), // fallback (1900, jan, 1)
gender: "",
phone: "",
userId,
},
});
}
// 2. Create payment (claimId null) — IMPORTANT: start with zeros, due = billed
const payment = await tx.payment.create({
data: {
patientId: patient.id,
userId,
totalBilled: billed,
totalPaid: new Decimal(0),
totalAdjusted: new Decimal(0),
totalDue: billed,
status: "PENDING", // updatePayment will fix it
notes: `OCR import from ${row.sourceFile ?? "Unknown file"}`,
icn: row.icn ?? "",
},
});
// 3. Create service line — IMPORTANT: start with zeros, due = billed
const serviceLine = await tx.serviceLine.create({
data: {
paymentId: payment.id,
procedureCode: row.procedureCode,
toothNumber: row.toothNumber ?? null,
toothSurface: row.toothSurface ?? null,
procedureDate: convertOCRDate(row.procedureDate),
totalBilled: billed,
totalPaid: new Decimal(0),
totalAdjusted: new Decimal(0),
totalDue: billed,
},
});
return { paymentId: payment.id, serviceLineId: serviceLine.id };
}
);
// Step 4: AFTER commit, recalc using updatePayment (global prisma can see it now)
// Build transaction & let updatePayment handle recalculation
const txn = {
serviceLineId,
paidAmount: paid.toNumber(),
adjustedAmount: adjusted.toNumber(),
method: "OTHER" as PaymentMethod,
receivedDate: new Date(),
notes: "OCR import",
};
await updatePayment(paymentId, [txn], userId);
results.push(paymentId);
} catch (err) {
console.error(`❌ Failed to import OCR row ${index + 1}:`, err);
throw err;
}
}
return results;
},
};

View File

@@ -0,0 +1,52 @@
import axios from "axios";
export interface SeleniumPayload {
claim: any;
pdfs: {
originalname: string;
bufferBase64: string;
}[];
images: {
originalname: string;
bufferBase64: string;
}[];
}
export async function forwardToSeleniumClaimAgent(
claimData: any,
files: Express.Multer.File[]
): Promise<any> {
const pdfs = files
.filter((file) => file.mimetype === "application/pdf")
.map((file) => ({
originalname: file.originalname,
bufferBase64: file.buffer.toString("base64"),
}));
const images = files
.filter((file) => file.mimetype.startsWith("image/"))
.map((file) => ({
originalname: file.originalname,
bufferBase64: file.buffer.toString("base64"),
}));
const payload: SeleniumPayload = {
claim: claimData,
pdfs,
images,
};
const result = await axios.post(
"http://localhost:5002/claimsubmit",
payload
);
if (result.data.status === "error") {
const errorMsg =
typeof result.data.message === "string"
? result.data.message
: result.data.message?.msg || "Selenium agent error";
throw new Error(errorMsg);
}
return result.data;
}

View File

@@ -0,0 +1,122 @@
import axios from "axios";
import http from "http";
import https from "https";
import dotenv from "dotenv";
dotenv.config();
export interface SeleniumPayload {
data: any;
url?: string;
}
const SELENIUM_AGENT_BASE = process.env.SELENIUM_AGENT_BASE_URL;
const httpAgent = new http.Agent({ keepAlive: true, keepAliveMsecs: 60_000 });
const httpsAgent = new https.Agent({ keepAlive: true, keepAliveMsecs: 60_000 });
const client = axios.create({
baseURL: SELENIUM_AGENT_BASE,
timeout: 5 * 60 * 1000,
httpAgent,
httpsAgent,
validateStatus: (s) => s >= 200 && s < 600,
});
async function requestWithRetries(
config: any,
retries = 4,
baseBackoffMs = 300
) {
for (let attempt = 1; attempt <= retries; attempt++) {
try {
const r = await client.request(config);
if (![502, 503, 504].includes(r.status)) return r;
console.warn(
`[selenium-client] retryable HTTP status ${r.status} (attempt ${attempt})`
);
} catch (err: any) {
const code = err?.code;
const isTransient =
code === "ECONNRESET" ||
code === "ECONNREFUSED" ||
code === "EPIPE" ||
code === "ETIMEDOUT";
if (!isTransient) throw err;
console.warn(
`[selenium-client] transient network error ${code} (attempt ${attempt})`
);
}
await new Promise((r) => setTimeout(r, baseBackoffMs * attempt));
}
// final attempt (let exception bubble if it fails)
return client.request(config);
}
function now() {
return new Date().toISOString();
}
function log(tag: string, msg: string, ctx?: any) {
console.log(`${now()} [${tag}] ${msg}`, ctx ?? "");
}
export async function forwardToSeleniumDdmaEligibilityAgent(
insuranceEligibilityData: any
): Promise<any> {
const payload = { data: insuranceEligibilityData };
const url = `/ddma-eligibility`;
log("selenium-client", "POST ddma-eligibility", {
url: SELENIUM_AGENT_BASE + url,
keys: Object.keys(payload),
});
const r = await requestWithRetries({ url, method: "POST", data: payload }, 4);
log("selenium-client", "agent response", {
status: r.status,
dataKeys: r.data ? Object.keys(r.data) : null,
});
if (r.status >= 500)
throw new Error(`Selenium agent server error: ${r.status}`);
return r.data;
}
export async function forwardOtpToSeleniumDdmaAgent(
sessionId: string,
otp: string
): Promise<any> {
const url = `/submit-otp`;
log("selenium-client", "POST submit-otp", {
url: SELENIUM_AGENT_BASE + url,
sessionId,
});
const r = await requestWithRetries(
{ url, method: "POST", data: { session_id: sessionId, otp } },
4
);
log("selenium-client", "submit-otp response", {
status: r.status,
data: r.data,
});
if (r.status >= 500)
throw new Error(`Selenium agent server error on submit-otp: ${r.status}`);
return r.data;
}
export async function getSeleniumDdmaSessionStatus(
sessionId: string
): Promise<any> {
const url = `/session/${sessionId}/status`;
log("selenium-client", "GET session status", {
url: SELENIUM_AGENT_BASE + url,
sessionId,
});
const r = await requestWithRetries({ url, method: "GET" }, 4);
log("selenium-client", "session status response", {
status: r.status,
dataKeys: r.data ? Object.keys(r.data) : null,
});
if (r.status === 404) {
const e: any = new Error("not_found");
e.response = { status: 404, data: r.data };
throw e;
}
return r.data;
}

View File

@@ -0,0 +1,52 @@
import axios from "axios";
export interface SeleniumPayload {
claim: any;
pdfs: {
originalname: string;
bufferBase64: string;
}[];
images: {
originalname: string;
bufferBase64: string;
}[];
}
export async function forwardToSeleniumClaimPreAuthAgent(
claimData: any,
files: Express.Multer.File[]
): Promise<any> {
const pdfs = files
.filter((file) => file.mimetype === "application/pdf")
.map((file) => ({
originalname: file.originalname,
bufferBase64: file.buffer.toString("base64"),
}));
const images = files
.filter((file) => file.mimetype.startsWith("image/"))
.map((file) => ({
originalname: file.originalname,
bufferBase64: file.buffer.toString("base64"),
}));
const payload: SeleniumPayload = {
claim: claimData,
pdfs,
images,
};
const result = await axios.post(
"http://localhost:5002/claim-pre-auth",
payload
);
if (result.data.status === "error") {
const errorMsg =
typeof result.data.message === "string"
? result.data.message
: result.data.message?.msg || "Selenium agent error";
throw new Error(errorMsg);
}
return result.data;
}

View File

@@ -0,0 +1,27 @@
import axios from "axios";
export interface SeleniumPayload {
data: any;
}
export async function forwardToSeleniumInsuranceClaimStatusAgent(
insuranceClaimStatusData: any
): Promise<any> {
const payload: SeleniumPayload = {
data: insuranceClaimStatusData,
};
const result = await axios.post(
"http://localhost:5002/claim-status-check",
payload
);
if (result.data.status === "error") {
const errorMsg =
typeof result.data.message === "string"
? result.data.message
: result.data.message?.msg || "Selenium agent error";
throw new Error(errorMsg);
}
return result.data;
}

View File

@@ -0,0 +1,27 @@
import axios from "axios";
export interface SeleniumPayload {
data: any;
}
export async function forwardToSeleniumInsuranceEligibilityAgent(
insuranceEligibilityData: any
): Promise<any> {
const payload: SeleniumPayload = {
data: insuranceEligibilityData,
};
const result = await axios.post(
"http://localhost:5002/eligibility-check",
payload
);
if (result.data.status === "error") {
const errorMsg =
typeof result.data.message === "string"
? result.data.message
: result.data.message?.msg || "Selenium agent error";
throw new Error(errorMsg);
}
return result.data;
}

53
apps/Backend/src/socket.ts Executable file
View File

@@ -0,0 +1,53 @@
import { Server as HttpServer } from "http";
import { Server, Socket } from "socket.io";
let io: Server | null = null;
export function initSocket(server: HttpServer) {
const NODE_ENV = (
process.env.NODE_ENV ||
process.env.ENV ||
"development"
).toLowerCase();
const rawFrontendUrls =
process.env.FRONTEND_URLS || process.env.FRONTEND_URL || "";
const FRONTEND_URLS = rawFrontendUrls
.split(",")
.map((s) => s.trim())
.filter(Boolean);
// In dev: allow all origins
// In prod: restrict to FRONTEND_URLS if provided
const corsOrigin =
NODE_ENV !== "production"
? true
: FRONTEND_URLS.length > 0
? FRONTEND_URLS
: false; // no origins allowed if none configured in prod
io = new Server(server, {
cors: {
origin: corsOrigin,
methods: ["GET", "POST"],
credentials: true,
},
});
io.on("connection", (socket: Socket) => {
console.log("🔌 Socket connected:", socket.id);
socket.on("disconnect", () => {
console.log("🔌 Socket disconnected:", socket.id);
});
});
// Optional: log low-level engine errors for debugging
io.engine.on("connection_error", (err) => {
console.error("Socket engine connection_error:", err);
});
return io;
}
export { io };

View File

@@ -0,0 +1,100 @@
import {
Appointment,
AppointmentProcedure,
InsertAppointmentProcedure,
Patient,
UpdateAppointmentProcedure,
} from "@repo/db/types";
import { prisma as db } from "@repo/db/client";
export interface IAppointmentProceduresStorage {
getByAppointmentId(appointmentId: number): Promise<AppointmentProcedure[]>;
getPrefillDataByAppointmentId(appointmentId: number): Promise<{
appointment: Appointment;
patient: Patient;
procedures: AppointmentProcedure[];
} | null>;
createProcedure(
data: InsertAppointmentProcedure
): Promise<AppointmentProcedure>;
createProceduresBulk(data: InsertAppointmentProcedure[]): Promise<number>;
updateProcedure(
id: number,
data: UpdateAppointmentProcedure
): Promise<AppointmentProcedure>;
deleteProcedure(id: number): Promise<void>;
clearByAppointmentId(appointmentId: number): Promise<void>;
}
export const appointmentProceduresStorage: IAppointmentProceduresStorage = {
async getByAppointmentId(
appointmentId: number
): Promise<AppointmentProcedure[]> {
return db.appointmentProcedure.findMany({
where: { appointmentId },
orderBy: { createdAt: "asc" },
});
},
async getPrefillDataByAppointmentId(appointmentId: number) {
const appointment = await db.appointment.findUnique({
where: { id: appointmentId },
include: {
patient: true,
procedures: {
orderBy: { createdAt: "asc" },
},
},
});
if (!appointment) {
return null;
}
return {
appointment,
patient: appointment.patient,
procedures: appointment.procedures,
};
},
async createProcedure(
data: InsertAppointmentProcedure
): Promise<AppointmentProcedure> {
return db.appointmentProcedure.create({
data: data as AppointmentProcedure,
});
},
async createProceduresBulk(
data: InsertAppointmentProcedure[]
): Promise<number> {
const result = await db.appointmentProcedure.createMany({
data: data as any[],
});
return result.count;
},
async updateProcedure(
id: number,
data: UpdateAppointmentProcedure
): Promise<AppointmentProcedure> {
return db.appointmentProcedure.update({
where: { id },
data: data as any,
});
},
async deleteProcedure(id: number): Promise<void> {
await db.appointmentProcedure.delete({
where: { id },
});
},
async clearByAppointmentId(appointmentId: number): Promise<void> {
await db.appointmentProcedure.deleteMany({
where: { appointmentId },
});
},
};

View File

@@ -0,0 +1,226 @@
import {
Appointment,
InsertAppointment,
Patient,
UpdateAppointment,
} from "@repo/db/types";
import { prisma as db } from "@repo/db/client";
export interface IStorage {
getAppointment(id: number): Promise<Appointment | undefined>;
getAllAppointments(): Promise<Appointment[]>;
getAppointmentsByUserId(userId: number): Promise<Appointment[]>;
getAppointmentsByPatientId(patientId: number): Promise<Appointment[]>;
getPatientFromAppointmentId(
appointmentId: number
): Promise<Patient | undefined>;
getRecentAppointments(limit: number, offset: number): Promise<Appointment[]>;
getAppointmentsOnRange(start: Date, end: Date): Promise<Appointment[]>;
createAppointment(appointment: InsertAppointment): Promise<Appointment>;
updateAppointment(
id: number,
appointment: UpdateAppointment
): Promise<Appointment>;
deleteAppointment(id: number): Promise<void>;
getPatientAppointmentByDateTime(
patientId: number,
date: Date,
startTime: string
): Promise<Appointment | undefined>;
getStaffAppointmentByDateTime(
staffId: number,
date: Date,
startTime: string,
excludeId?: number
): Promise<Appointment | undefined>;
getPatientConflictAppointment(
patientId: number,
date: Date,
startTime: string,
excludeId: number
): Promise<Appointment | undefined>;
getStaffConflictAppointment(
staffId: number,
date: Date,
startTime: string,
excludeId: number
): Promise<Appointment | undefined>;
getAppointmentsByDateForUser(dateStr: string, userId: number): Promise<Appointment[]>;
}
export const appointmentsStorage: IStorage = {
async getAppointment(id: number): Promise<Appointment | undefined> {
const appointment = await db.appointment.findUnique({ where: { id } });
return appointment ?? undefined;
},
async getAllAppointments(): Promise<Appointment[]> {
return await db.appointment.findMany();
},
async getAppointmentsByUserId(userId: number): Promise<Appointment[]> {
return await db.appointment.findMany({ where: { userId } });
},
async getAppointmentsByPatientId(patientId: number): Promise<Appointment[]> {
return await db.appointment.findMany({ where: { patientId } });
},
async getPatientFromAppointmentId(
appointmentId: number
): Promise<Patient | undefined> {
const appointment = await db.appointment.findUnique({
where: { id: appointmentId },
include: { patient: true },
});
return appointment?.patient ?? undefined;
},
async getAppointmentsOnRange(start: Date, end: Date): Promise<Appointment[]> {
return db.appointment.findMany({
where: {
date: {
gte: start,
lte: end,
},
},
orderBy: { date: "asc" },
});
},
async getRecentAppointments(
limit: number,
offset: number
): Promise<Appointment[]> {
return db.appointment.findMany({
skip: offset,
take: limit,
orderBy: { date: "desc" },
});
},
async createAppointment(
appointment: InsertAppointment
): Promise<Appointment> {
return await db.appointment.create({ data: appointment as Appointment });
},
async updateAppointment(
id: number,
updateData: UpdateAppointment
): Promise<Appointment> {
try {
return await db.appointment.update({
where: { id },
data: updateData,
});
} catch (err) {
throw new Error(`Appointment with ID ${id} not found`);
}
},
async deleteAppointment(id: number): Promise<void> {
try {
await db.appointment.delete({ where: { id } });
} catch (err) {
throw new Error(`Appointment with ID ${id} not found`);
}
},
async getPatientAppointmentByDateTime(
patientId: number,
date: Date,
startTime: string
): Promise<Appointment | undefined> {
return (
(await db.appointment.findFirst({
where: {
patientId,
date,
startTime,
},
})) ?? undefined
);
},
async getStaffAppointmentByDateTime(
staffId: number,
date: Date,
startTime: string,
excludeId?: number
): Promise<Appointment | undefined> {
return (
(await db.appointment.findFirst({
where: {
staffId,
date,
startTime,
NOT: excludeId ? { id: excludeId } : undefined,
},
})) ?? undefined
);
},
async getPatientConflictAppointment(
patientId: number,
date: Date,
startTime: string,
excludeId: number
): Promise<Appointment | undefined> {
return (
(await db.appointment.findFirst({
where: {
patientId,
date,
startTime,
NOT: { id: excludeId },
},
})) ?? undefined
);
},
async getStaffConflictAppointment(
staffId: number,
date: Date,
startTime: string,
excludeId: number
): Promise<Appointment | undefined> {
return (
(await db.appointment.findFirst({
where: {
staffId,
date,
startTime,
NOT: { id: excludeId },
},
})) ?? undefined
);
},
/**
* getAppointmentsByDateForUser
* dateStr expected as "YYYY-MM-DD" (same string your frontend sends)
* returns appointments for that date (local midnight-to-midnight) filtered by userId
*/
async getAppointmentsByDateForUser(dateStr: string, userId: number): Promise<Appointment[]> {
// defensive parsing — if invalid, throw so caller can handle
const start = new Date(dateStr);
if (Number.isNaN(start.getTime())) {
throw new Error(`Invalid date string passed to getAppointmentsByDateForUser: ${dateStr}`);
}
// create exclusive end (next day midnight)
const end = new Date(start);
end.setDate(start.getDate() + 1);
return db.appointment.findMany({
where: {
userId,
date: {
gte: start,
lt: end,
},
},
orderBy: { startTime: "asc" },
});
}
};

View File

@@ -0,0 +1,111 @@
import {
Claim,
ClaimStatus,
ClaimWithServiceLines,
InsertClaim,
UpdateClaim,
} from "@repo/db/types";
import { prisma as db } from "@repo/db/client";
export interface IStorage {
getClaim(id: number): Promise<Claim | undefined>;
getRecentClaimsByPatientId(
patientId: number,
limit: number,
offset: number
): Promise<ClaimWithServiceLines[]>;
getTotalClaimCountByPatient(patientId: number): Promise<number>;
getClaimsByAppointmentId(appointmentId: number): Promise<Claim[]>;
getRecentClaims(limit: number, offset: number): Promise<Claim[]>;
getTotalClaimCount(): Promise<number>;
createClaim(claim: InsertClaim): Promise<Claim>;
updateClaim(id: number, updates: UpdateClaim): Promise<Claim>;
updateClaimStatus(id: number, status: ClaimStatus): Promise<Claim>;
deleteClaim(id: number): Promise<void>;
}
export const claimsStorage: IStorage = {
async getClaim(id: number): Promise<Claim | undefined> {
const claim = await db.claim.findUnique({ where: { id } });
return claim ?? undefined;
},
async getRecentClaimsByPatientId(
patientId: number,
limit: number,
offset: number
): Promise<ClaimWithServiceLines[]> {
return db.claim.findMany({
where: { patientId },
orderBy: { createdAt: "desc" },
skip: offset,
take: limit,
include: {
serviceLines: true,
staff: true,
claimFiles: true,
},
});
},
async getTotalClaimCountByPatient(patientId: number): Promise<number> {
return db.claim.count({
where: { patientId },
});
},
async getClaimsByAppointmentId(appointmentId: number): Promise<Claim[]> {
return await db.claim.findMany({ where: { appointmentId } });
},
async getRecentClaims(
limit: number,
offset: number
): Promise<ClaimWithServiceLines[]> {
return db.claim.findMany({
orderBy: { createdAt: "desc" },
skip: offset,
take: limit,
include: { serviceLines: true, staff: true, claimFiles: true },
});
},
async getTotalClaimCount(): Promise<number> {
return db.claim.count();
},
async createClaim(claim: InsertClaim): Promise<Claim> {
return await db.claim.create({ data: claim as Claim });
},
async updateClaim(id: number, updates: UpdateClaim): Promise<Claim> {
try {
return await db.claim.update({
where: { id },
data: updates,
});
} catch (err) {
throw new Error(`Claim with ID ${id} not found`);
}
},
async updateClaimStatus(id: number, status: ClaimStatus): Promise<Claim> {
const existing = await db.claim.findUnique({ where: { id } });
if (!existing) {
throw new Error("Claim not found");
}
return db.claim.update({
where: { id },
data: { status },
});
},
async deleteClaim(id: number): Promise<void> {
try {
await db.claim.delete({ where: { id } });
} catch (err) {
throw new Error(`Claim with ID ${id} not found`);
}
},
};

View File

@@ -0,0 +1,493 @@
import { prisma as db } from "@repo/db/client";
import { CloudFolder, CloudFile } from "@repo/db/types";
import { serializeFile } from "../utils/prismaFileUtils";
/**
* Cloud storage implementation
*
* - Clear, self-describing method names
* - Folder timestamp propagation helper: updateFolderTimestampsRecursively
* - File upload lifecycle: initializeFileUpload -> appendFileChunk -> finalizeFileUpload
*/
/* ------------------------------- Helpers ------------------------------- */
async function updateFolderTimestampsRecursively(folderId: number | null) {
if (folderId == null) return;
let currentId: number | null = folderId;
const MAX_DEPTH = 50;
let depth = 0;
while (currentId != null && depth < MAX_DEPTH) {
depth += 1;
try {
// touch updatedAt and fetch parentId
const row = (await db.cloudFolder.update({
where: { id: currentId },
data: { updatedAt: new Date() },
select: { parentId: true },
})) as { parentId: number | null };
currentId = row.parentId ?? null;
} catch (err: any) {
// Stop walking if folder removed concurrently (Prisma P2025)
if (err?.code === "P2025") break;
throw err;
}
}
}
/* ------------------------------- IStorage ------------------------------- */
export interface IStorage {
// Folders
getFolder(id: number): Promise<CloudFolder | null>;
listRecentFolders(
limit: number,
offset: number,
parentId?: number | null
): Promise<CloudFolder[]>;
countFoldersByParent(parentId: number | null): Promise<number>;
countFolders(filter?: {
userId?: number;
nameContains?: string | null;
}): Promise<number>;
createFolder(
userId: number,
name: string,
parentId?: number | null
): Promise<CloudFolder>;
updateFolder(
id: number,
updates: Partial<{ name?: string; parentId?: number | null }>
): Promise<CloudFolder | null>;
deleteFolder(id: number): Promise<boolean>;
// Files
getFile(id: number): Promise<CloudFile | null>;
listFilesInFolder(
folderId: number | null,
limit: number,
offset: number
): Promise<CloudFile[]>;
initializeFileUpload(
userId: number,
name: string,
mimeType?: string | null,
expectedSize?: bigint | null,
totalChunks?: number | null,
folderId?: number | null
): Promise<CloudFile>;
appendFileChunk(fileId: number, seq: number, data: Buffer): Promise<void>;
finalizeFileUpload(fileId: number): Promise<{ ok: true; size: string }>;
deleteFile(fileId: number): Promise<boolean>;
updateFile(
id: number,
updates: Partial<Pick<CloudFile, "name" | "mimeType" | "folderId">>
): Promise<CloudFile | null>;
renameFile(id: number, name: string): Promise<CloudFile | null>;
countFilesInFolder(folderId: number | null): Promise<number>;
countFiles(filter?: {
userId?: number;
nameContains?: string | null;
mimeType?: string | null;
}): Promise<number>;
// Search
searchFolders(
q: string,
limit: number,
offset: number,
parentId?: number | null
): Promise<{ data: CloudFolder[]; total: number }>;
searchFiles(
q: string,
type: string | undefined,
limit: number,
offset: number
): Promise<{ data: CloudFile[]; total: number }>;
// Streaming
streamFileTo(resStream: NodeJS.WritableStream, fileId: number): Promise<void>;
}
/* ------------------------------- Implementation ------------------------------- */
export const cloudStorageStorage: IStorage = {
// --- FOLDERS ---
async getFolder(id: number) {
const folder = await db.cloudFolder.findUnique({
where: { id },
include: { files: false },
});
return (folder as unknown as CloudFolder) ?? null;
},
async listRecentFolders(limit = 50, offset = 0, parentId?: number | null) {
const where: any = {};
// parentId === undefined → no filter (global recent)
// parentId === null → top-level folders (parent IS NULL)
// parentId === number → children of that folder
if (parentId !== undefined) {
where.parentId = parentId;
}
const folders = await db.cloudFolder.findMany({
where,
orderBy: { updatedAt: "desc" },
skip: offset,
take: limit,
});
return folders as unknown as CloudFolder[];
},
async countFoldersByParent(parentId: number | null = null) {
return db.cloudFolder.count({ where: { parentId } });
},
async createFolder(
userId: number,
name: string,
parentId: number | null = null
) {
const created = await db.cloudFolder.create({
data: { userId, name, parentId },
});
// mark parent(s) as updated
await updateFolderTimestampsRecursively(parentId);
return created as unknown as CloudFolder;
},
async updateFolder(
id: number,
updates: Partial<{ name?: string; parentId?: number | null }>
) {
try {
const updated = await db.cloudFolder.update({
where: { id },
data: updates,
});
if (updates.parentId !== undefined) {
await updateFolderTimestampsRecursively(updates.parentId ?? null);
} else {
// touch this folder's parent (to mark modification)
const f = await db.cloudFolder.findUnique({
where: { id },
select: { parentId: true },
});
await updateFolderTimestampsRecursively(f?.parentId ?? null);
}
return updated as unknown as CloudFolder;
} catch (err) {
throw err;
}
},
async deleteFolder(id: number) {
try {
const folder = await db.cloudFolder.findUnique({
where: { id },
select: { parentId: true },
});
const parentId = folder?.parentId ?? null;
await db.cloudFolder.delete({ where: { id } });
await updateFolderTimestampsRecursively(parentId);
return true;
} catch (err: any) {
if (err?.code === "P2025") return false;
throw err;
}
},
async countFolders(filter?: {
userId?: number;
nameContains?: string | null;
}) {
const where: any = {};
if (filter?.userId) where.userId = filter.userId;
if (filter?.nameContains)
where.name = { contains: filter.nameContains, mode: "insensitive" };
return db.cloudFolder.count({ where });
},
// --- FILES ---
async getFile(id: number) {
const file = await db.cloudFile.findUnique({
where: { id },
include: { chunks: { orderBy: { seq: "asc" } } },
});
return (file as unknown as CloudFile) ?? null;
},
async listFilesInFolder(
folderId: number | null = null,
limit = 50,
offset = 0
) {
const files = await db.cloudFile.findMany({
where: { folderId },
orderBy: { createdAt: "desc" },
skip: offset,
take: limit,
select: {
id: true,
name: true,
mimeType: true,
fileSize: true,
folderId: true,
isComplete: true,
createdAt: true,
updatedAt: true,
},
});
return files.map(serializeFile) as unknown as CloudFile[];
},
async initializeFileUpload(
userId: number,
name: string,
mimeType: string | null = null,
expectedSize: bigint | null = null,
totalChunks: number | null = null,
folderId: number | null = null
) {
const created = await db.cloudFile.create({
data: {
userId,
name,
mimeType,
fileSize: expectedSize ?? BigInt(0),
folderId,
totalChunks,
isComplete: false,
},
});
await updateFolderTimestampsRecursively(folderId);
return serializeFile(created) as unknown as CloudFile;
},
async appendFileChunk(fileId: number, seq: number, data: Buffer) {
try {
await db.cloudFileChunk.create({ data: { fileId, seq, data } });
} catch (err: any) {
// idempotent: ignore duplicate chunk constraint
if (
err?.code === "P2002" ||
err?.message?.includes("Unique constraint failed")
) {
return;
}
throw err;
}
},
async finalizeFileUpload(fileId: number) {
const chunks = await db.cloudFileChunk.findMany({ where: { fileId } });
if (!chunks.length) throw new Error("No chunks uploaded");
// compute total size
let total = 0;
for (const c of chunks) total += c.data.length;
// transactionally update file and read folderId
const updated = await db.$transaction(async (tx) => {
await tx.cloudFile.update({
where: { id: fileId },
data: { fileSize: BigInt(total), isComplete: true },
});
return tx.cloudFile.findUnique({
where: { id: fileId },
select: { folderId: true },
});
});
const folderId = (updated as any)?.folderId ?? null;
await updateFolderTimestampsRecursively(folderId);
return { ok: true, size: BigInt(total).toString() };
},
async deleteFile(fileId: number) {
try {
const file = await db.cloudFile.findUnique({
where: { id: fileId },
select: { folderId: true },
});
if (!file) return false;
const folderId = file.folderId ?? null;
await db.cloudFile.delete({ where: { id: fileId } });
await updateFolderTimestampsRecursively(folderId);
return true;
} catch (err: any) {
if (err?.code === "P2025") return false;
throw err;
}
},
async updateFile(
id: number,
updates: Partial<Pick<CloudFile, "name" | "mimeType" | "folderId">>
) {
try {
let prevFolderId: number | null = null;
if (updates.folderId !== undefined) {
const f = await db.cloudFile.findUnique({
where: { id },
select: { folderId: true },
});
prevFolderId = f?.folderId ?? null;
}
const updated = await db.cloudFile.update({
where: { id },
data: updates,
});
// touch affected folders
if (updates.folderId !== undefined) {
await updateFolderTimestampsRecursively(updates.folderId ?? null);
if (
prevFolderId != null &&
prevFolderId !== (updates.folderId ?? null)
) {
await updateFolderTimestampsRecursively(prevFolderId);
}
} else {
const f = await db.cloudFile.findUnique({
where: { id },
select: { folderId: true },
});
await updateFolderTimestampsRecursively(f?.folderId ?? null);
}
return serializeFile(updated) as unknown as CloudFile;
} catch (err) {
throw err;
}
},
async renameFile(id: number, name: string) {
try {
const updated = await db.cloudFile.update({
where: { id },
data: { name },
});
const f = await db.cloudFile.findUnique({
where: { id },
select: { folderId: true },
});
await updateFolderTimestampsRecursively(f?.folderId ?? null);
return serializeFile(updated) as unknown as CloudFile;
} catch (err) {
throw err;
}
},
async countFilesInFolder(folderId: number | null) {
return db.cloudFile.count({ where: { folderId } });
},
async countFiles(filter?: {
userId?: number;
nameContains?: string | null;
mimeType?: string | null;
}) {
const where: any = {};
if (filter?.userId) where.userId = filter.userId;
if (filter?.nameContains)
where.name = { contains: filter.nameContains, mode: "insensitive" };
if (filter?.mimeType)
where.mimeType = { startsWith: filter.mimeType, mode: "insensitive" };
return db.cloudFile.count({ where });
},
// --- SEARCH ---
async searchFolders(
q: string,
limit = 20,
offset = 0,
parentId?: number | null
) {
// Build where clause
const where: any = {
name: { contains: q, mode: "insensitive" },
};
// If parentId is explicitly provided:
// - parentId === null -> top-level folders (parent IS NULL)
// - parentId === number -> children of that folder
// If parentId is undefined -> search across all folders (no parent filter)
if (parentId !== undefined) {
where.parentId = parentId;
}
const [folders, total] = await Promise.all([
db.cloudFolder.findMany({
where,
orderBy: { name: "asc" },
skip: offset,
take: limit,
}),
db.cloudFolder.count({
where,
}),
]);
return { data: folders as unknown as CloudFolder[], total };
},
async searchFiles(
q: string,
type: string | undefined,
limit = 20,
offset = 0
) {
const where: any = {};
if (q) where.name = { contains: q, mode: "insensitive" };
if (type) {
if (!type.includes("/"))
where.mimeType = { startsWith: `${type}/`, mode: "insensitive" };
else where.mimeType = { startsWith: type, mode: "insensitive" };
}
const [files, total] = await Promise.all([
db.cloudFile.findMany({
where,
orderBy: { createdAt: "desc" },
skip: offset,
take: limit,
select: {
id: true,
name: true,
mimeType: true,
fileSize: true,
folderId: true,
isComplete: true,
createdAt: true,
updatedAt: true,
},
}),
db.cloudFile.count({ where }),
]);
return { data: files.map(serializeFile) as unknown as CloudFile[], total };
},
// --- STREAM ---
async streamFileTo(resStream: NodeJS.WritableStream, fileId: number) {
const batchSize = 100;
let offset = 0;
while (true) {
const chunks = await db.cloudFileChunk.findMany({
where: { fileId },
orderBy: { seq: "asc" },
take: batchSize,
skip: offset,
});
if (!chunks.length) break;
for (const c of chunks) resStream.write(Buffer.from(c.data));
offset += chunks.length;
if (chunks.length < batchSize) break;
}
},
};
export default cloudStorageStorage;

View File

@@ -0,0 +1,113 @@
import { DatabaseBackup, BackupDestination } from "@repo/db/types";
import { prisma as db } from "@repo/db/client";
export interface IStorage {
// Database Backup methods
createBackup(userId: number): Promise<DatabaseBackup>;
getLastBackup(userId: number): Promise<DatabaseBackup | null>;
getBackups(userId: number, limit?: number): Promise<DatabaseBackup[]>;
deleteBackups(userId: number): Promise<number>; // clears all for user
// ==============================
// Backup Destination methods
// ==============================
createBackupDestination(
userId: number,
path: string
): Promise<BackupDestination>;
getActiveBackupDestination(
userId: number
): Promise<BackupDestination | null>;
getAllBackupDestination(
userId: number
): Promise<BackupDestination[]>;
updateBackupDestination(
id: number,
userId: number,
path: string
): Promise<BackupDestination>;
deleteBackupDestination(
id: number,
userId: number
): Promise<BackupDestination>;
}
export const databaseBackupStorage: IStorage = {
// ==============================
// Database Backup methods
// ==============================
async createBackup(userId) {
return await db.databaseBackup.create({ data: { userId } });
},
async getLastBackup(userId) {
return await db.databaseBackup.findFirst({
where: { userId },
orderBy: { createdAt: "desc" },
});
},
async getBackups(userId, limit = 10) {
return await db.databaseBackup.findMany({
where: { userId },
orderBy: { createdAt: "desc" },
take: limit,
});
},
async deleteBackups(userId) {
const result = await db.databaseBackup.deleteMany({ where: { userId } });
return result.count;
},
// ==============================
// Backup Destination methods
// ==============================
async createBackupDestination(userId, path) {
// deactivate existing destination
await db.backupDestination.updateMany({
where: { userId },
data: { isActive: false },
});
return db.backupDestination.create({
data: { userId, path },
});
},
async getActiveBackupDestination(userId) {
return db.backupDestination.findFirst({
where: { userId, isActive: true },
});
},
async getAllBackupDestination(userId) {
return db.backupDestination.findMany({
where: { userId },
orderBy: { createdAt: "desc" },
});
},
async updateBackupDestination(id, userId, path) {
// optional: make this one active
await db.backupDestination.updateMany({
where: { userId },
data: { isActive: false },
});
return db.backupDestination.update({
where: { id, userId },
data: { path, isActive: true },
});
},
async deleteBackupDestination(id, userId) {
return db.backupDestination.delete({
where: { id, userId },
});
},
};

View File

@@ -0,0 +1,140 @@
import { storage } from "../storage";
import { getPatientFinancialRowsFn } from "./patients-storage";
import { GetPatientBalancesResult } from "@repo/db/types";
type PatientSummaryRow = {
patientId: number;
firstName: string | null;
lastName: string | null;
currentBalance: number;
};
/**
* Page through storage.getPatientsWithBalances to return the full list (not paginated).
* Uses the same filters (from/to) as the existing queries.
*/
export async function fetchAllPatientsWithBalances(
from?: Date | null,
to?: Date | null,
pageSize = 500
): Promise<PatientSummaryRow[]> {
const all: PatientSummaryRow[] = [];
let cursor: string | null = null;
while (true) {
const page: GetPatientBalancesResult =
await storage.getPatientsWithBalances(pageSize, cursor, from, to);
if (!page) break;
if (Array.isArray(page.balances) && page.balances.length) {
for (const b of page.balances) {
all.push({
patientId: Number(b.patientId),
firstName: b.firstName ?? null,
lastName: b.lastName ?? null,
currentBalance: Number(b.currentBalance ?? 0),
});
}
}
if (!page.hasMore || !page.nextCursor) break;
cursor = page.nextCursor;
}
return all;
}
/**
* Page through storage.getPatientsBalancesByDoctor to return full patient list for the staff.
*/
export async function fetchAllPatientsForDoctor(
staffId: number,
from?: Date | null,
to?: Date | null,
pageSize = 500
): Promise<PatientSummaryRow[]> {
const all: PatientSummaryRow[] = [];
let cursor: string | null = null;
while (true) {
const page: GetPatientBalancesResult =
await storage.getPatientsBalancesByDoctor(
staffId,
pageSize,
cursor,
from,
to
);
if (!page) break;
if (Array.isArray(page.balances) && page.balances.length) {
for (const b of page.balances) {
all.push({
patientId: Number(b.patientId),
firstName: b.firstName ?? null,
lastName: b.lastName ?? null,
currentBalance: Number(b.currentBalance ?? 0),
});
}
}
if (!page.hasMore || !page.nextCursor) break;
cursor = page.nextCursor;
}
return all;
}
/**
* For each patient, call the existing function to fetch full financial rows.
* This uses your existing getPatientFinancialRowsFn which returns { rows, totalCount }.
*
* The function returns an array of:
* { patientId, firstName, lastName, currentBalance, financialRows: Array<{ type, date, procedureCode, billed, paid, adjusted, totalDue, status }> }
*/
export async function buildExportRowsForPatients(
patients: PatientSummaryRow[],
perPatientLimit = 5000
) {
const out: Array<any> = [];
for (const p of patients) {
const patientId = Number(p.patientId);
const { rows } = await getPatientFinancialRowsFn(
patientId,
perPatientLimit,
0
); // returns rows array similarly to your earlier code
const frs = rows.flatMap((r: any) => {
const svc = r.service_lines ?? [];
if (svc.length > 0) {
return svc.map((sl: any) => ({
type: r.type,
date: r.date ? new Date(r.date).toLocaleDateString() : "",
procedureCode: String(sl.procedureCode ?? "-"),
billed: Number(sl.totalBilled ?? 0),
paid: Number(sl.totalPaid ?? 0),
adjusted: Number(sl.totalAdjusted ?? 0),
totalDue: Number(sl.totalDue ?? 0),
status: sl.status ?? r.status ?? "",
}));
} else {
return [
{
type: r.type,
date: r.date ? new Date(r.date).toLocaleDateString() : "",
procedureCode: "-",
billed: Number(r.total_billed ?? r.totalBilled ?? 0),
paid: Number(r.total_paid ?? r.totalPaid ?? 0),
adjusted: Number(r.total_adjusted ?? r.totalAdjusted ?? 0),
totalDue: Number(r.total_due ?? r.totalDue ?? 0),
status: r.status ?? "",
},
];
}
});
out.push({
patientId,
firstName: p.firstName,
lastName: p.lastName,
currentBalance: Number(p.currentBalance ?? 0),
financialRows: frs,
});
}
return out;
}

View File

@@ -0,0 +1,218 @@
import { PdfFile, PdfGroup } from "@repo/db/types";
import { prisma as db } from "@repo/db/client";
import { PdfTitleKey } from "@repo/db/generated/prisma";
export interface IStorage {
// General PDF Methods
createPdfFile(
groupId: number,
filename: string,
pdfData: Buffer
): Promise<PdfFile>;
getPdfFileById(id: number): Promise<PdfFile | undefined>;
getPdfFilesByGroupId(
groupId: number,
opts?: { limit?: number; offset?: number; withGroup?: boolean }
): Promise<PdfFile[] | { total: number; data: PdfFile[] }>;
getRecentPdfFiles(limit: number, offset: number): Promise<PdfFile[]>;
deletePdfFile(id: number): Promise<boolean>;
updatePdfFile(
id: number,
updates: Partial<Pick<PdfFile, "filename" | "pdfData">>
): Promise<PdfFile | undefined>;
// PDF Group management
createPdfGroup(
patientId: number,
title: string,
titleKey: PdfTitleKey
): Promise<PdfGroup>;
findPdfGroupByPatientTitleKey(
patientId: number,
titleKey: PdfTitleKey
): Promise<PdfGroup | undefined>;
getAllPdfGroups(): Promise<PdfGroup[]>;
getPdfGroupById(id: number): Promise<PdfGroup | undefined>;
getPdfGroupsByPatientId(patientId: number): Promise<PdfGroup[]>;
updatePdfGroup(
id: number,
updates: Partial<Pick<PdfGroup, "title">>
): Promise<PdfGroup | undefined>;
deletePdfGroup(id: number): Promise<boolean>;
}
export const generalPdfStorage: IStorage = {
// PDF Files
async createPdfFile(groupId, filename, pdfData) {
return db.pdfFile.create({
data: {
groupId,
filename,
pdfData,
},
});
},
async getAllPdfGroups(): Promise<PdfGroup[]> {
return db.pdfGroup.findMany({
orderBy: {
createdAt: "desc",
},
});
},
async getPdfFileById(id) {
return (await db.pdfFile.findUnique({ where: { id } })) ?? undefined;
},
/**
* getPdfFilesByGroupId: supports
* - getPdfFilesByGroupId(groupId) => Promise<PdfFile[]>
* - getPdfFilesByGroupId(groupId, { limit, offset }) => Promise<{ total, data }>
* - getPdfFilesByGroupId(groupId, { limit, offset, withGroup: true }) => Promise<{ total, data: PdfFileWithGroup[] }>
*/
async getPdfFilesByGroupId(groupId, opts) {
// if pagination is requested (limit provided) return total + page
const wantsPagination =
!!opts &&
(typeof opts.limit === "number" || typeof opts.offset === "number");
if (wantsPagination) {
const limit = Math.min(Number(opts?.limit ?? 5), 1000);
const offset = Number(opts?.offset ?? 0);
if (opts?.withGroup) {
// return total + data with group included
const [total, data] = await Promise.all([
db.pdfFile.count({ where: { groupId } }),
db.pdfFile.findMany({
where: { groupId },
orderBy: { uploadedAt: "desc" },
take: limit,
skip: offset,
include: { group: true }, // only include
}),
]);
return { total, data };
} else {
// return total + data with limited fields via select
const [total, data] = await Promise.all([
db.pdfFile.count({ where: { groupId } }),
db.pdfFile.findMany({
where: { groupId },
orderBy: { uploadedAt: "desc" },
take: limit,
skip: offset,
select: { id: true, filename: true, uploadedAt: true }, // only select
}),
]);
// Note: selected shape won't have all PdfFile fields; cast if needed
return { total, data: data as unknown as PdfFile[] };
}
}
// non-paginated: return all files (keep descending order)
if (opts?.withGroup) {
const all = await db.pdfFile.findMany({
where: { groupId },
orderBy: { uploadedAt: "desc" },
include: { group: true },
});
return all as PdfFile[];
} else {
const all = await db.pdfFile.findMany({
where: { groupId },
orderBy: { uploadedAt: "desc" },
// no select or include -> returns full PdfFile
});
return all as PdfFile[];
}
},
async getRecentPdfFiles(limit: number, offset: number): Promise<PdfFile[]> {
return db.pdfFile.findMany({
skip: offset,
take: limit,
orderBy: { uploadedAt: "desc" },
include: { group: true },
});
},
async updatePdfFile(id, updates) {
try {
return await db.pdfFile.update({
where: { id },
data: updates,
});
} catch {
return undefined;
}
},
async deletePdfFile(id) {
try {
await db.pdfFile.delete({ where: { id } });
return true;
} catch {
return false;
}
},
// ----------------------
// PdfGroup CRUD
// ----------------------
async createPdfGroup(patientId, title, titleKey) {
return db.pdfGroup.create({
data: {
patientId,
title,
titleKey,
},
});
},
async findPdfGroupByPatientTitleKey(patientId, titleKey) {
return (
(await db.pdfGroup.findFirst({
where: {
patientId,
titleKey,
},
})) ?? undefined
);
},
async getPdfGroupById(id) {
return (await db.pdfGroup.findUnique({ where: { id } })) ?? undefined;
},
async getPdfGroupsByPatientId(patientId) {
return db.pdfGroup.findMany({
where: { patientId },
orderBy: { createdAt: "desc" },
});
},
async updatePdfGroup(id, updates) {
try {
return await db.pdfGroup.update({
where: { id },
data: updates,
});
} catch {
return undefined;
}
},
async deletePdfGroup(id) {
try {
await db.pdfGroup.delete({ where: { id } });
return true;
} catch {
return false;
}
},
};

View File

@@ -0,0 +1,41 @@
import { usersStorage } from './users-storage';
import { patientsStorage } from './patients-storage';
import { appointmentsStorage } from './appointments-storage';
import { appointmentProceduresStorage } from './appointment-procedures-storage';
import { staffStorage } from './staff-storage';
import { npiProviderStorage } from './npi-providers-storage';
import { claimsStorage } from './claims-storage';
import { insuranceCredsStorage } from './insurance-creds-storage';
import { generalPdfStorage } from './general-pdf-storage';
import { paymentsStorage } from './payments-storage';
import { databaseBackupStorage } from './database-backup-storage';
import { notificationsStorage } from './notifications-storage';
import { cloudStorageStorage } from './cloudStorage-storage';
import { paymentsReportsStorage } from './payments-reports-storage';
import { patientDocumentsStorage } from './patientDocuments-storage';
import * as exportPaymentsReportsStorage from "./export-payments-reports-storage";
export const storage = {
...usersStorage,
...patientsStorage,
...appointmentsStorage,
...appointmentProceduresStorage,
...staffStorage,
...npiProviderStorage,
...claimsStorage,
...insuranceCredsStorage,
...generalPdfStorage,
...paymentsStorage,
...databaseBackupStorage,
...notificationsStorage,
...cloudStorageStorage,
...paymentsReportsStorage,
...patientDocumentsStorage,
...exportPaymentsReportsStorage,
};
export default storage;

View File

@@ -0,0 +1,63 @@
import { InsertInsuranceCredential, InsuranceCredential } from "@repo/db/types";
import { prisma as db } from "@repo/db/client";
export interface IStorage {
getInsuranceCredential(id: number): Promise<InsuranceCredential | null>;
getInsuranceCredentialsByUser(userId: number): Promise<InsuranceCredential[]>;
createInsuranceCredential(
data: InsertInsuranceCredential
): Promise<InsuranceCredential>;
updateInsuranceCredential(
id: number,
updates: Partial<InsuranceCredential>
): Promise<InsuranceCredential | null>;
deleteInsuranceCredential(userId: number, id: number): Promise<boolean>;
getInsuranceCredentialByUserAndSiteKey(
userId: number,
siteKey: string
): Promise<InsuranceCredential | null>;
}
export const insuranceCredsStorage: IStorage = {
async getInsuranceCredential(id: number) {
return await db.insuranceCredential.findUnique({ where: { id } });
},
async getInsuranceCredentialsByUser(userId: number) {
return await db.insuranceCredential.findMany({ where: { userId } });
},
async createInsuranceCredential(data: InsertInsuranceCredential) {
return await db.insuranceCredential.create({
data: data as InsuranceCredential,
});
},
async updateInsuranceCredential(
id: number,
updates: Partial<InsuranceCredential>
) {
return await db.insuranceCredential.update({
where: { id },
data: updates,
});
},
async deleteInsuranceCredential(userId: number, id: number) {
try {
await db.insuranceCredential.delete({ where: { userId, id } });
return true;
} catch {
return false;
}
},
async getInsuranceCredentialByUserAndSiteKey(
userId: number,
siteKey: string
): Promise<InsuranceCredential | null> {
return await db.insuranceCredential.findFirst({
where: { userId, siteKey },
});
},
};

View File

@@ -0,0 +1,80 @@
import { Notification, NotificationTypes } from "@repo/db/types";
import { prisma as db } from "@repo/db/client";
export interface IStorage {
// Notification methods
createNotification(
userId: number,
type: NotificationTypes,
message: string
): Promise<Notification>;
getNotifications(
userId: number,
limit?: number,
offset?: number
): Promise<Notification[]>;
markNotificationRead(
userId: number,
notificationId: number
): Promise<boolean>;
markAllNotificationsRead(userId: number): Promise<number>;
deleteNotificationsByType(
userId: number,
type: NotificationTypes
): Promise<number>;
deleteAllNotifications(userId: number): Promise<number>;
}
export const notificationsStorage: IStorage = {
// ==============================
// Notification methods
// ==============================
async createNotification(userId, type, message) {
return await db.notification.create({
data: { userId, type, message },
});
},
async getNotifications(
userId: number,
limit = 50,
offset = 0
): Promise<Notification[]> {
return await db.notification.findMany({
where: { userId },
orderBy: { createdAt: "desc" },
take: limit,
skip: offset,
});
},
async markNotificationRead(userId, notificationId) {
const result = await db.notification.updateMany({
where: { id: notificationId, userId },
data: { read: true },
});
return result.count > 0;
},
async markAllNotificationsRead(userId) {
const result = await db.notification.updateMany({
where: { userId },
data: { read: true },
});
return result.count;
},
async deleteNotificationsByType(userId, type) {
const result = await db.notification.deleteMany({
where: { userId, type },
});
return result.count;
},
async deleteAllNotifications(userId: number): Promise<number> {
const result = await db.notification.deleteMany({
where: { userId },
});
return result.count;
},
};

View File

@@ -0,0 +1,50 @@
import { prisma as db } from "@repo/db/client";
import { InsertNpiProvider, NpiProvider } from "@repo/db/types";
export interface INpiProviderStorage {
getNpiProvider(id: number): Promise<NpiProvider | null>;
getNpiProvidersByUser(userId: number): Promise<NpiProvider[]>;
createNpiProvider(data: InsertNpiProvider): Promise<NpiProvider>;
updateNpiProvider(
id: number,
updates: Partial<NpiProvider>,
): Promise<NpiProvider | null>;
deleteNpiProvider(userId: number, id: number): Promise<boolean>;
}
export const npiProviderStorage: INpiProviderStorage = {
async getNpiProvider(id: number) {
return db.npiProvider.findUnique({ where: { id } });
},
async getNpiProvidersByUser(userId: number) {
return db.npiProvider.findMany({
where: { userId },
orderBy: { createdAt: "desc" },
});
},
async createNpiProvider(data: InsertNpiProvider) {
return db.npiProvider.create({
data: data as NpiProvider,
});
},
async updateNpiProvider(id: number, updates: Partial<NpiProvider>) {
return db.npiProvider.update({
where: { id },
data: updates,
});
},
async deleteNpiProvider(userId: number, id: number) {
try {
await db.npiProvider.delete({
where: { id, userId },
});
return true;
} catch {
return false;
}
},
};

View File

@@ -0,0 +1,179 @@
import { PatientDocument, CreatePatientDocument } from "@repo/db/types";
import { prisma as db } from "@repo/db/client";
import path from "path";
import fs from "fs/promises";
import { randomBytes } from "crypto";
const UPLOAD_DIR = path.join(process.cwd(), "uploads", "patient-documents");
// Get local file path from URL
const getLocalFilePath = (fileUrl: string): string => {
const filename = fileUrl.split('/').pop() || '';
return path.join(UPLOAD_DIR, filename);
};
// Get the base URL for serving files
const getBaseUrl = (): string => {
// For development, use localhost instead of 0.0.0.0
const host = process.env.HOST === '0.0.0.0' ? 'localhost' : (process.env.HOST || 'localhost');
const port = process.env.PORT || '5000';
const protocol = process.env.NODE_ENV === 'production' ? 'https' : 'http';
return `${protocol}://${host}:${port}`;
};
// Ensure upload directory exists
const ensureUploadDir = async () => {
try {
await fs.access(UPLOAD_DIR);
} catch {
await fs.mkdir(UPLOAD_DIR, { recursive: true });
}
};
// Generate unique filename
const generateUniqueFilename = (originalName: string): string => {
const ext = path.extname(originalName);
const name = path.basename(originalName, ext);
const timestamp = Date.now();
const random = randomBytes(4).toString("hex");
return `${name}-${timestamp}-${random}${ext}`;
};
export const patientDocumentsStorage = {
// Create a new patient document
createPatientDocument: async (
patientId: number,
filename: string,
originalName: string,
mimeType: string,
fileSize: number,
buffer: Buffer
): Promise<PatientDocument> => {
await ensureUploadDir();
const uniqueFilename = generateUniqueFilename(filename);
const localFilePath = path.join(UPLOAD_DIR, uniqueFilename);
// Save file to disk
await fs.writeFile(localFilePath, buffer);
// Create the full URL for accessing the file using the uploads directory structure
const fileUrl = `${getBaseUrl()}/uploads/patient-documents/${uniqueFilename}`;
// Create database record with full URL
const document = await db.patientDocument.create({
data: {
patientId,
filename: uniqueFilename,
originalName,
mimeType,
fileSize: BigInt(fileSize),
filePath: fileUrl, // Store the full URL instead of local path
},
});
return document;
},
// Get all documents for a patient
getDocumentsByPatientId: async (patientId: number): Promise<PatientDocument[]> => {
return await db.patientDocument.findMany({
where: { patientId },
orderBy: { uploadedAt: "desc" },
});
},
// Get a specific document by ID
getDocumentById: async (id: number): Promise<PatientDocument | null> => {
return await db.patientDocument.findUnique({
where: { id },
include: {
patient: {
select: {
firstName: true,
lastName: true,
},
},
},
});
},
// Get document file content
getDocumentFile: async (id: number): Promise<{ buffer: Buffer; document: PatientDocument } | null> => {
const document = await db.patientDocument.findUnique({
where: { id },
});
if (!document) {
return null;
}
try {
// Get local file path from the stored URL
const localFilePath = getLocalFilePath(document.filePath);
const buffer = await fs.readFile(localFilePath);
return { buffer, document };
} catch (error) {
console.error("Error reading file:", error);
return null;
}
},
// Delete a document
deleteDocument: async (id: number): Promise<boolean> => {
const document = await db.patientDocument.findUnique({
where: { id },
});
if (!document) {
return false;
}
try {
// Get local file path from the stored URL
const localFilePath = getLocalFilePath(document.filePath);
// Delete file from disk
await fs.unlink(localFilePath);
// Delete database record
await db.patientDocument.delete({
where: { id },
});
return true;
} catch (error) {
console.error("Error deleting document:", error);
return false;
}
},
// Update document metadata
updateDocument: async (id: number, data: Partial<CreatePatientDocument>): Promise<PatientDocument | null> => {
return await db.patientDocument.update({
where: { id },
data,
});
},
// Get documents with pagination
getDocumentsByPatientIdPaginated: async (
patientId: number,
limit: number,
offset: number
): Promise<{ documents: PatientDocument[]; total: number }> => {
const [documents, total] = await Promise.all([
db.patientDocument.findMany({
where: { patientId },
orderBy: { uploadedAt: "desc" },
skip: offset,
take: limit,
}),
db.patientDocument.count({
where: { patientId },
}),
]);
return { documents, total };
},
};

View File

@@ -0,0 +1,287 @@
import {
FinancialRow,
InsertPatient,
Patient,
UpdatePatient,
} from "@repo/db/types";
import { prisma as db } from "@repo/db/client";
export interface IStorage {
// Patient methods
getPatient(id: number): Promise<Patient | undefined>;
getPatientByInsuranceId(insuranceId: string): Promise<Patient | null>;
getPatientsByUserId(userId: number): Promise<Patient[]>;
getRecentPatients(limit: number, offset: number): Promise<Patient[]>;
getPatientsByIds(ids: number[]): Promise<Patient[]>;
createPatient(patient: InsertPatient): Promise<Patient>;
updatePatient(id: number, patient: UpdatePatient): Promise<Patient>;
deletePatient(id: number): Promise<void>;
searchPatients(args: {
filters: any;
limit: number;
offset: number;
}): Promise<
{
id: number;
firstName: string | null;
lastName: string | null;
phone: string | null;
gender: string | null;
dateOfBirth: Date;
insuranceId: string | null;
insuranceProvider: string | null;
status: string;
}[]
>;
getTotalPatientCount(): Promise<number>;
countPatients(filters: any): Promise<number>; // optional but useful
getPatientFinancialRows(
patientId: number,
limit?: number,
offset?: number
): Promise<{ rows: any[]; totalCount: number }>;
}
export const patientsStorage: IStorage = {
// Patient methods
async getPatient(id: number): Promise<Patient | undefined> {
const patient = await db.patient.findUnique({ where: { id } });
return patient ?? undefined;
},
async getPatientsByUserId(userId: number): Promise<Patient[]> {
return await db.patient.findMany({ where: { userId } });
},
async getPatientByInsuranceId(insuranceId: string): Promise<Patient | null> {
return db.patient.findFirst({
where: { insuranceId },
});
},
async getRecentPatients(limit: number, offset: number): Promise<Patient[]> {
return db.patient.findMany({
skip: offset,
take: limit,
orderBy: { createdAt: "desc" },
});
},
async getPatientsByIds(ids: number[]): Promise<Patient[]> {
if (!ids || ids.length === 0) return [];
const uniqueIds = Array.from(new Set(ids));
return db.patient.findMany({
where: { id: { in: uniqueIds } },
select: {
id: true,
firstName: true,
lastName: true,
phone: true,
email: true,
dateOfBirth: true,
gender: true,
insuranceId: true,
insuranceProvider: true,
status: true,
userId: true,
createdAt: true,
},
});
},
async createPatient(patient: InsertPatient): Promise<Patient> {
return await db.patient.create({ data: patient as Patient });
},
async updatePatient(id: number, updateData: UpdatePatient): Promise<Patient> {
try {
return await db.patient.update({
where: { id },
data: updateData as Patient,
});
} catch (err) {
throw new Error(`Patient with ID ${id} not found`);
}
},
async deletePatient(id: number): Promise<void> {
try {
await db.patient.delete({ where: { id } });
} catch (err) {
console.error("Error deleting patient:", err);
throw new Error(`Failed to delete patient: ${err}`);
}
},
async searchPatients({
filters,
limit,
offset,
}: {
filters: any;
limit: number;
offset: number;
}) {
return db.patient.findMany({
where: filters,
orderBy: { createdAt: "desc" },
take: limit,
skip: offset,
select: {
id: true,
firstName: true,
lastName: true,
phone: true,
gender: true,
dateOfBirth: true,
insuranceId: true,
insuranceProvider: true,
status: true,
},
});
},
async getTotalPatientCount(): Promise<number> {
return db.patient.count();
},
async countPatients(filters: any) {
return db.patient.count({ where: filters });
},
async getPatientFinancialRows(patientId: number, limit = 50, offset = 0) {
return getPatientFinancialRowsFn(patientId, limit, offset);
},
};
export const getPatientFinancialRowsFn = async (
patientId: number,
limit = 50,
offset = 0
): Promise<{ rows: FinancialRow[]; totalCount: number }> => {
try {
// Count claims and orphan payments
const [[{ count_claims }], [{ count_orphan_payments }]] =
(await Promise.all([
db.$queryRaw`SELECT COUNT(1) AS count_claims FROM "Claim" c WHERE c."patientId" = ${patientId}`,
db.$queryRaw`SELECT COUNT(1) AS count_orphan_payments FROM "Payment" p WHERE p."patientId" = ${patientId} AND p."claimId" IS NULL`,
])) as any;
const totalCount =
Number(count_claims ?? 0) + Number(count_orphan_payments ?? 0);
const rawRows = (await db.$queryRaw`
WITH claim_rows AS (
SELECT
'CLAIM'::text AS type,
c.id,
COALESCE(c."serviceDate", c."createdAt")::timestamptz AS date,
c."createdAt"::timestamptz AS created_at,
c.status::text AS status,
COALESCE(sum(sl."totalBilled")::numeric::text, '0') AS total_billed,
COALESCE(sum(sl."totalPaid")::numeric::text, '0') AS total_paid,
COALESCE(sum(sl."totalAdjusted")::numeric::text, '0') AS total_adjusted,
COALESCE(sum(sl."totalDue")::numeric::text, '0') AS total_due,
(
SELECT (pat."firstName" || ' ' || pat."lastName") FROM "Patient" pat WHERE pat.id = c."patientId" LIMIT 1
) AS patient_name,
-- linked_payment_id (NULL if none). Schema has unique Payment.claimId so LIMIT 1 is safe.
(
SELECT p2.id FROM "Payment" p2 WHERE p2."claimId" = c.id LIMIT 1
) AS linked_payment_id,
(
SELECT coalesce(json_agg(
json_build_object(
'id', sl2.id,
'procedureCode', sl2."procedureCode",
'procedureDate', sl2."procedureDate",
'toothNumber', sl2."toothNumber",
'toothSurface', sl2."toothSurface",
'totalBilled', sl2."totalBilled",
'totalPaid', sl2."totalPaid",
'totalAdjusted', sl2."totalAdjusted",
'totalDue', sl2."totalDue",
'status', sl2.status
)
), '[]'::json)
FROM "ServiceLine" sl2 WHERE sl2."claimId" = c.id
) AS service_lines
FROM "Claim" c
LEFT JOIN "ServiceLine" sl ON sl."claimId" = c.id
WHERE c."patientId" = ${patientId}
GROUP BY c.id
),
orphan_payment_rows AS (
SELECT
'PAYMENT'::text AS type,
p.id,
p."createdAt"::timestamptz AS date,
p."createdAt"::timestamptz AS created_at,
p.status::text AS status,
p."totalBilled"::numeric::text AS total_billed,
p."totalPaid"::numeric::text AS total_paid,
p."totalAdjusted"::numeric::text AS total_adjusted,
p."totalDue"::numeric::text AS total_due,
(
SELECT (pat."firstName" || ' ' || pat."lastName") FROM "Patient" pat WHERE pat.id = p."patientId" LIMIT 1
) AS patient_name,
-- this payment's id is the linked_payment_id
p.id AS linked_payment_id,
(
SELECT coalesce(json_agg(
json_build_object(
'id', sl3.id,
'procedureCode', sl3."procedureCode",
'procedureDate', sl3."procedureDate",
'toothNumber', sl3."toothNumber",
'toothSurface', sl3."toothSurface",
'totalBilled', sl3."totalBilled",
'totalPaid', sl3."totalPaid",
'totalAdjusted', sl3."totalAdjusted",
'totalDue', sl3."totalDue",
'status', sl3.status
)
), '[]'::json)
FROM "ServiceLine" sl3 WHERE sl3."paymentId" = p.id
) AS service_lines
FROM "Payment" p
WHERE p."patientId" = ${patientId} AND p."claimId" IS NULL
)
SELECT type, id, date, created_at, status, total_billed, total_paid, total_adjusted, total_due, patient_name, linked_payment_id, service_lines
FROM (
SELECT * FROM claim_rows
UNION ALL
SELECT * FROM orphan_payment_rows
) t
ORDER BY t.created_at DESC
LIMIT ${limit} OFFSET ${offset}
`) as any[];
// map to expected JS shape; convert totals to numbers
const rows: FinancialRow[] = rawRows.map((r: any) => ({
type: r.type,
id: Number(r.id),
date: r.date ? r.date.toString() : null,
createdAt: r.created_at ? r.created_at.toString() : null,
status: r.status ?? null,
total_billed: Number(r.total_billed ?? 0),
total_paid: Number(r.total_paid ?? 0),
total_adjusted: Number(r.total_adjusted ?? 0),
total_due: Number(r.total_due ?? 0),
patient_name: r.patient_name ?? null,
service_lines: r.service_lines ?? [],
linked_payment_id: r.linked_payment_id
? Number(r.linked_payment_id)
: null,
}));
return { rows, totalCount };
} catch (err) {
console.error("getPatientFinancialRowsFn error:", err);
throw err;
}
};

View File

@@ -0,0 +1,858 @@
import { prisma } from "@repo/db/client";
import {
GetPatientBalancesResult,
PatientBalanceRow,
} from "../../../../packages/db/types/payments-reports-types";
export interface IPaymentsReportsStorage {
// summary now returns an extra field patientsWithBalance
getSummary(
from?: Date | null,
to?: Date | null
): Promise<{
totalPatients: number;
totalOutstanding: number;
totalCollected: number;
patientsWithBalance: number;
}>;
/**
* Cursor-based pagination:
* - limit: page size
* - cursorToken: base64(JSON) token for last-seen row (or null for first page)
* - from/to: optional date range filter applied to Payment."createdAt"
*/
getPatientsWithBalances(
limit: number,
cursorToken?: string | null,
from?: Date | null,
to?: Date | null
): Promise<GetPatientBalancesResult>;
/**
* Returns the paginated patient balances for a specific staff (doctor).
* Same semantics / columns / ordering / cursor behavior as the previous combined function.
*
* - staffId required
* - limit: page size
* - cursorToken: optional base64 cursor (must have been produced for same staffId)
* - from/to: optional date range applied to Payment."createdAt"
*/
getPatientsBalancesByDoctor(
staffId: number,
limit: number,
cursorToken?: string | null,
from?: Date | null,
to?: Date | null
): Promise<GetPatientBalancesResult>;
/**
* Returns only the summary object for the given staff (doctor).
* Same summary shape as getSummary(), but scoped to claims/payments associated with the given staffId.
*/
getSummaryByDoctor(
staffId: number,
from?: Date | null,
to?: Date | null
): Promise<{
totalPatients: number;
totalOutstanding: number;
totalCollected: number;
patientsWithBalance: number;
}>;
}
/** Return ISO literal for inclusive start-of-day (UTC midnight) */
function isoStartOfDayLiteral(d?: Date | null): string | null {
if (!d) return null;
const dt = new Date(d);
dt.setUTCHours(0, 0, 0, 0);
return `'${dt.toISOString()}'`;
}
/** Return ISO literal for exclusive next-day start (UTC midnight of the next day) */
function isoStartOfNextDayLiteral(d?: Date | null): string | null {
if (!d) return null;
const dt = new Date(d);
dt.setUTCHours(0, 0, 0, 0);
dt.setUTCDate(dt.getUTCDate() + 1);
return `'${dt.toISOString()}'`;
}
/** Cursor helpers — base64(JSON) */
/** Cursor format (backwards compatible):
* { staffId?: number, lastPaymentDate: string | null, lastPatientId: number, lastPaymentMs?: number | null }
*/
function encodeCursor(obj: {
staffId?: number;
lastPaymentDate: string | null;
lastPatientId: number;
lastPaymentMs?: number | null;
}) {
return Buffer.from(JSON.stringify(obj)).toString("base64");
}
function decodeCursor(token?: string | null): {
staffId?: number; // optional because older cursors might not include it
lastPaymentDate: string | null;
lastPatientId: number;
lastPaymentMs?: number | null;
} | null {
if (!token) return null;
try {
const parsed = JSON.parse(Buffer.from(token, "base64").toString("utf8"));
if (
typeof parsed === "object" &&
"lastPaymentDate" in parsed &&
"lastPatientId" in parsed
) {
return {
staffId:
"staffId" in parsed ? Number((parsed as any).staffId) : undefined,
lastPaymentDate:
(parsed as any).lastPaymentDate === null
? null
: String((parsed as any).lastPaymentDate),
lastPatientId: Number((parsed as any).lastPatientId),
lastPaymentMs:
"lastPaymentMs" in parsed
? parsed.lastPaymentMs === null
? null
: Number(parsed.lastPaymentMs)
: undefined,
};
}
return null;
} catch {
return null;
}
}
export const paymentsReportsStorage: IPaymentsReportsStorage = {
async getSummary(from?: Date | null, to?: Date | null) {
try {
const hasFrom = from !== undefined && from !== null;
const hasTo = to !== undefined && to !== null;
// Use inclusive start-of-day for 'from' and exclusive start-of-next-day for 'to'
const fromStart = isoStartOfDayLiteral(from); // 'YYYY-MM-DDT00:00:00.000Z'
const toNextStart = isoStartOfNextDayLiteral(to); // 'YYYY-MM-DDT00:00:00.000Z' of next day
// totalPatients: distinct patients who had payments in the date range
let patientsCountSql = "";
if (hasFrom && hasTo) {
patientsCountSql = `
SELECT COUNT(*)::int AS cnt FROM (
SELECT pay."patientId" AS patient_id
FROM "Payment" pay
WHERE pay."createdAt" >= ${fromStart} AND pay."createdAt" <= ${toNextStart}
GROUP BY pay."patientId"
) t
`;
} else if (hasFrom) {
patientsCountSql = `
SELECT COUNT(*)::int AS cnt FROM (
SELECT pay."patientId" AS patient_id
FROM "Payment" pay
WHERE pay."createdAt" >= ${fromStart}
GROUP BY pay."patientId"
) t
`;
} else if (hasTo) {
patientsCountSql = `
SELECT COUNT(*)::int AS cnt FROM (
SELECT pay."patientId" AS patient_id
FROM "Payment" pay
WHERE pay."createdAt" <= ${toNextStart}
GROUP BY pay."patientId"
) t
`;
} else {
patientsCountSql = `SELECT COUNT(DISTINCT "patientId")::int AS cnt FROM "Payment"`;
}
const patientsCntRows = (await prisma.$queryRawUnsafe(
patientsCountSql
)) as { cnt: number }[];
const totalPatients = patientsCntRows?.[0]?.cnt ?? 0;
// totalOutstanding: sum of (charges - paid - adjusted) across patients, using payments in range
let outstandingSql = "";
if (hasFrom && hasTo) {
outstandingSql = `
SELECT COALESCE(SUM(
COALESCE(pm.total_charges,0) - COALESCE(pm.total_paid,0) - COALESCE(pm.total_adjusted,0)
),0)::numeric(14,2) AS outstanding
FROM (
SELECT pay."patientId" AS patient_id,
SUM(pay."totalBilled")::numeric(14,2) AS total_charges,
SUM(pay."totalPaid")::numeric(14,2) AS total_paid,
SUM(pay."totalAdjusted")::numeric(14,2) AS total_adjusted
FROM "Payment" pay
WHERE pay."createdAt" >= ${fromStart} AND pay."createdAt" <= ${toNextStart}
GROUP BY pay."patientId"
) pm
`;
} else if (hasFrom) {
outstandingSql = `
SELECT COALESCE(SUM(
COALESCE(pm.total_charges,0) - COALESCE(pm.total_paid,0) - COALESCE(pm.total_adjusted,0)
),0)::numeric(14,2) AS outstanding
FROM (
SELECT pay."patientId" AS patient_id,
SUM(pay."totalBilled")::numeric(14,2) AS total_charges,
SUM(pay."totalPaid")::numeric(14,2) AS total_paid,
SUM(pay."totalAdjusted")::numeric(14,2) AS total_adjusted
FROM "Payment" pay
WHERE pay."createdAt" >= ${fromStart}
GROUP BY pay."patientId"
) pm
`;
} else if (hasTo) {
outstandingSql = `
SELECT COALESCE(SUM(
COALESCE(pm.total_charges,0) - COALESCE(pm.total_paid,0) - COALESCE(pm.total_adjusted,0)
),0)::numeric(14,2) AS outstanding
FROM (
SELECT pay."patientId" AS patient_id,
SUM(pay."totalBilled")::numeric(14,2) AS total_charges,
SUM(pay."totalPaid")::numeric(14,2) AS total_paid,
SUM(pay."totalAdjusted")::numeric(14,2) AS total_adjusted
FROM "Payment" pay
WHERE pay."createdAt" <= ${toNextStart}
GROUP BY pay."patientId"
) pm
`;
} else {
outstandingSql = `
SELECT COALESCE(SUM(
COALESCE(pm.total_charges,0) - COALESCE(pm.total_paid,0) - COALESCE(pm.total_adjusted,0)
),0)::numeric(14,2) AS outstanding
FROM (
SELECT pay."patientId" AS patient_id,
SUM(pay."totalBilled")::numeric(14,2) AS total_charges,
SUM(pay."totalPaid")::numeric(14,2) AS total_paid,
SUM(pay."totalAdjusted")::numeric(14,2) AS total_adjusted
FROM "Payment" pay
GROUP BY pay."patientId"
) pm
`;
}
const outstandingRows = (await prisma.$queryRawUnsafe(
outstandingSql
)) as { outstanding: string }[];
const totalOutstanding = Number(outstandingRows?.[0]?.outstanding ?? 0);
// totalCollected: sum(totalPaid) in the range
let collSql = "";
if (hasFrom && hasTo) {
collSql = `SELECT COALESCE(SUM("totalPaid"),0)::numeric(14,2) AS collected FROM "Payment" WHERE "createdAt" >= ${fromStart} AND "createdAt" <= ${toNextStart}`;
} else if (hasFrom) {
collSql = `SELECT COALESCE(SUM("totalPaid"),0)::numeric(14,2) AS collected FROM "Payment" WHERE "createdAt" >= ${fromStart}`;
} else if (hasTo) {
collSql = `SELECT COALESCE(SUM("totalPaid"),0)::numeric(14,2) AS collected FROM "Payment" WHERE "createdAt" <= ${toNextStart}`;
} else {
collSql = `SELECT COALESCE(SUM("totalPaid"),0)::numeric(14,2) AS collected FROM "Payment"`;
}
const collRows = (await prisma.$queryRawUnsafe(collSql)) as {
collected: string;
}[];
const totalCollected = Number(collRows?.[0]?.collected ?? 0);
// NEW: patientsWithBalance: number of patients whose (charges - paid - adjusted) > 0, within the date range
let patientsWithBalanceSql = "";
if (hasFrom && hasTo) {
patientsWithBalanceSql = `
SELECT COUNT(*)::int AS cnt FROM (
SELECT pay."patientId" AS patient_id,
SUM(pay."totalBilled")::numeric(14,2) AS total_charges,
SUM(pay."totalPaid")::numeric(14,2) AS total_paid,
SUM(pay."totalAdjusted")::numeric(14,2) AS total_adjusted
FROM "Payment" pay
WHERE pay."createdAt" >= ${fromStart} AND pay."createdAt" <= ${toNextStart}
GROUP BY pay."patientId"
) t
WHERE (COALESCE(t.total_charges,0) - COALESCE(t.total_paid,0) - COALESCE(t.total_adjusted,0)) > 0
`;
} else if (hasFrom) {
patientsWithBalanceSql = `
SELECT COUNT(*)::int AS cnt FROM (
SELECT pay."patientId" AS patient_id,
SUM(pay."totalBilled")::numeric(14,2) AS total_charges,
SUM(pay."totalPaid")::numeric(14,2) AS total_paid,
SUM(pay."totalAdjusted")::numeric(14,2) AS total_adjusted
FROM "Payment" pay
WHERE pay."createdAt" >= ${fromStart}
GROUP BY pay."patientId"
) t
WHERE (COALESCE(t.total_charges,0) - COALESCE(t.total_paid,0) - COALESCE(t.total_adjusted,0)) > 0
`;
} else if (hasTo) {
patientsWithBalanceSql = `
SELECT COUNT(*)::int AS cnt FROM (
SELECT pay."patientId" AS patient_id,
SUM(pay."totalBilled")::numeric(14,2) AS total_charges,
SUM(pay."totalPaid")::numeric(14,2) AS total_paid,
SUM(pay."totalAdjusted")::numeric(14,2) AS total_adjusted
FROM "Payment" pay
WHERE pay."createdAt" <= ${toNextStart}
GROUP BY pay."patientId"
) t
WHERE (COALESCE(t.total_charges,0) - COALESCE(t.total_paid,0) - COALESCE(t.total_adjusted,0)) > 0
`;
} else {
patientsWithBalanceSql = `
SELECT COUNT(*)::int AS cnt FROM (
SELECT pay."patientId" AS patient_id,
SUM(pay."totalBilled")::numeric(14,2) AS total_charges,
SUM(pay."totalPaid")::numeric(14,2) AS total_paid,
SUM(pay."totalAdjusted")::numeric(14,2) AS total_adjusted
FROM "Payment" pay
GROUP BY pay."patientId"
) t
WHERE (COALESCE(t.total_charges,0) - COALESCE(t.total_paid,0) - COALESCE(t.total_adjusted,0)) > 0
`;
}
const pwbRows = (await prisma.$queryRawUnsafe(
patientsWithBalanceSql
)) as { cnt: number }[];
const patientsWithBalance = pwbRows?.[0]?.cnt ?? 0;
return {
totalPatients,
totalOutstanding,
totalCollected,
patientsWithBalance,
};
} catch (err) {
console.error("[paymentsReportsStorage.getSummary] error:", err);
throw err;
}
},
/**
* Returns all patients that currently have an outstanding balance (>0)
* Optionally filtered by date range.
*/
/**
* Cursor-based getPatientsWithBalances
*/
async getPatientsWithBalances(
limit = 25,
cursorToken?: string | null,
from?: Date | null,
to?: Date | null
) {
try {
type RawRow = {
patient_id: number;
first_name: string | null;
last_name: string | null;
total_charges: string;
total_paid: string;
total_adjusted: string;
current_balance: string;
last_payment_date: Date | null;
last_appointment_date: Date | null;
};
const safeLimit = Math.max(1, Math.min(200, Number(limit) || 25));
const cursor = decodeCursor(cursorToken);
const hasFrom = from !== undefined && from !== null;
const hasTo = to !== undefined && to !== null;
// Use inclusive start-of-day for 'from' and exclusive start-of-next-day for 'to'
const fromStart = isoStartOfDayLiteral(from); // 'YYYY-MM-DDT00:00:00.000Z'
const toNextStart = isoStartOfNextDayLiteral(to); // 'YYYY-MM-DDT00:00:00.000Z' of next day
// Build payment subquery (aggregated payments by patient, filtered by createdAt if provided)
const paymentWhereClause =
hasFrom && hasTo
? `WHERE pay."createdAt" >= ${fromStart} AND pay."createdAt" <= ${toNextStart}`
: hasFrom
? `WHERE pay."createdAt" >= ${fromStart}`
: hasTo
? `WHERE pay."createdAt" <= ${toNextStart}`
: "";
const pmSubquery = `
(
SELECT
pay."patientId" AS patient_id,
SUM(pay."totalBilled")::numeric(12,2) AS total_charges,
SUM(pay."totalPaid")::numeric(12,2) AS total_paid,
SUM(pay."totalAdjusted")::numeric(12,2) AS total_adjusted,
MAX(pay."createdAt") AS last_payment_date
FROM "Payment" pay
${paymentWhereClause}
GROUP BY pay."patientId"
) pm
`;
// Build keyset predicate if cursor provided.
// Ordering used: pm.last_payment_date DESC NULLS LAST, p."createdAt" DESC, p.id DESC
// For keyset, we need to fetch rows strictly "less than" the cursor in this ordering.
let keysetPredicate = "";
if (cursor) {
const lp = cursor.lastPaymentDate
? `'${cursor.lastPaymentDate}'`
: "NULL";
const id = Number(cursor.lastPatientId);
// We handle NULL last_payment_date ordering: since we use "NULLS LAST" in ORDER BY,
// rows with last_payment_date = NULL are considered *after* any non-null dates.
// To page correctly when cursor's lastPaymentDate is null, we compare accordingly.
// This predicate tries to cover both cases.
keysetPredicate = `
AND (
(pm.last_payment_date IS NOT NULL AND ${lp} IS NOT NULL AND (
pm.last_payment_date < ${lp}
OR (pm.last_payment_date = ${lp} AND p.id < ${id})
))
OR (pm.last_payment_date IS NULL AND ${lp} IS NOT NULL)
OR (pm.last_payment_date IS NULL AND ${lp} IS NULL AND p.id < ${id})
)
`;
}
const baseSelect = `
SELECT
p.id AS patient_id,
p."firstName" AS first_name,
p."lastName" AS last_name,
COALESCE(pm.total_charges,0)::numeric(12,2) AS total_charges,
COALESCE(pm.total_paid,0)::numeric(12,2) AS total_paid,
COALESCE(pm.total_adjusted,0)::numeric(12,2) AS total_adjusted,
(COALESCE(pm.total_charges,0) - COALESCE(pm.total_paid,0) - COALESCE(pm.total_adjusted,0))::numeric(12,2) AS current_balance,
pm.last_payment_date,
apt.last_appointment_date
FROM "Patient" p
LEFT JOIN ${pmSubquery} ON pm.patient_id = p.id
LEFT JOIN (
SELECT "patientId" AS patient_id, MAX("date") AS last_appointment_date
FROM "Appointment"
GROUP BY "patientId"
) apt ON apt.patient_id = p.id
WHERE (COALESCE(pm.total_charges,0) - COALESCE(pm.total_paid,0) - COALESCE(pm.total_adjusted,0)) > 0
`;
const orderBy = `ORDER BY pm.last_payment_date DESC NULLS LAST, p.id DESC`;
const limitClause = `LIMIT ${safeLimit}`;
const query = `
${baseSelect}
${cursor ? keysetPredicate : ""}
${orderBy}
${limitClause};
`;
const rows = (await prisma.$queryRawUnsafe(query)) as RawRow[];
// Build nextCursor from last returned row (if any)
let nextCursor: string | null = null;
// Explicitly handle empty result set
if (rows.length === 0) {
nextCursor = null;
} else {
// rows.length > 0 here, but do an explicit last-check to make TS happy
const last = rows[rows.length - 1];
if (!last) {
// defensive — should not happen, but satisfies strict checks
nextCursor = null;
} else {
const lastPaymentDateIso = last.last_payment_date
? new Date(last.last_payment_date).toISOString()
: null;
if (rows.length === safeLimit) {
nextCursor = encodeCursor({
lastPaymentDate: lastPaymentDateIso,
lastPatientId: Number(last.patient_id),
});
} else {
nextCursor = null;
}
}
}
// Determine hasMore: if we returned exactly limit, there *may* be more.
const hasMore = rows.length === safeLimit;
// Convert rows to PatientBalanceRow
const balances: PatientBalanceRow[] = rows.map((r) => ({
patientId: Number(r.patient_id),
firstName: r.first_name,
lastName: r.last_name,
totalCharges: Number(r.total_charges ?? 0),
totalPayments: Number(r.total_paid ?? 0),
totalAdjusted: Number(r.total_adjusted ?? 0),
currentBalance: Number(r.current_balance ?? 0),
lastPaymentDate: r.last_payment_date
? new Date(r.last_payment_date).toISOString()
: null,
lastAppointmentDate: r.last_appointment_date
? new Date(r.last_appointment_date).toISOString()
: null,
}));
// totalCount: count of patients with positive balance within same payment date filter
const countSql = `
SELECT COUNT(*)::int AS cnt FROM (
SELECT pay."patientId" AS patient_id,
SUM(pay."totalBilled")::numeric(14,2) AS total_charges,
SUM(pay."totalPaid")::numeric(14,2) AS total_paid,
SUM(pay."totalAdjusted")::numeric(14,2) AS total_adjusted
FROM "Payment" pay
${paymentWhereClause}
GROUP BY pay."patientId"
) t
WHERE (COALESCE(t.total_charges,0) - COALESCE(t.total_paid,0) - COALESCE(t.total_adjusted,0)) > 0;
`;
const cntRows = (await prisma.$queryRawUnsafe(countSql)) as {
cnt: number;
}[];
const totalCount = cntRows?.[0]?.cnt ?? 0;
return {
balances,
totalCount,
nextCursor,
hasMore,
};
} catch (err) {
console.error("[paymentsReportsStorage.getPatientBalances] error:", err);
throw err;
}
},
/**
* Return just the paged balances for a doctor (same logic/filters as previous single-query approach)
*/
async getPatientsBalancesByDoctor(
staffId: number,
limit = 25,
cursorToken?: string | null,
from?: Date | null,
to?: Date | null
): Promise<{
balances: PatientBalanceRow[];
totalCount: number;
nextCursor: string | null;
hasMore: boolean;
}> {
if (!Number.isFinite(Number(staffId)) || Number(staffId) <= 0) {
throw new Error("Invalid staffId");
}
const safeLimit = Math.max(1, Math.min(200, Number(limit) || 25));
const decoded = decodeCursor(cursorToken);
// Do NOT accept cursors without staffId — they may belong to another listing.
const effectiveCursor =
decoded &&
typeof decoded.staffId === "number" &&
decoded.staffId === Number(staffId)
? decoded
: null;
const hasFrom = from !== undefined && from !== null;
const hasTo = to !== undefined && to !== null;
// Use inclusive start-of-day for 'from' and exclusive start-of-next-day for 'to'
const fromStart = isoStartOfDayLiteral(from);
const toNextStart = isoStartOfNextDayLiteral(to);
// Filter payments by createdAt (time window) when provided
const paymentTimeFilter =
hasFrom && hasTo
? `AND pay."createdAt" >= ${fromStart} AND pay."createdAt" <= ${toNextStart}`
: hasFrom
? `AND pay."createdAt" >= ${fromStart}`
: hasTo
? `AND pay."createdAt" <= ${toNextStart}`
: "";
// Keyset predicate — prefer numeric epoch-ms comparison for stability
let pageKeysetPredicate = "";
if (effectiveCursor) {
// Use epoch ms if present in cursor (more precise); otherwise fall back to timestamptz literal.
const hasCursorMs =
typeof effectiveCursor.lastPaymentMs === "number" &&
!Number.isNaN(effectiveCursor.lastPaymentMs);
const id = Number(effectiveCursor.lastPatientId);
if (hasCursorMs) {
const lpMs = Number(effectiveCursor.lastPaymentMs);
// Compare numeric epoch ms; handle NULL last_payment_date rows too.
pageKeysetPredicate = `
AND (
(p.last_payment_ms IS NOT NULL AND ${lpMs} IS NOT NULL AND (
p.last_payment_ms < ${lpMs}
OR (p.last_payment_ms = ${lpMs} AND p.id < ${id})
))
OR (p.last_payment_ms IS NULL AND ${lpMs} IS NOT NULL)
OR (p.last_payment_ms IS NULL AND ${lpMs} IS NULL AND p.id < ${id})
)
`;
} else {
// fall back to timestamptz string literal (older cursor)
const lpLiteral = effectiveCursor.lastPaymentDate
? `('${effectiveCursor.lastPaymentDate}'::timestamptz)`
: "NULL";
pageKeysetPredicate = `
AND (
(p.last_payment_date IS NOT NULL AND ${lpLiteral} IS NOT NULL AND (
p.last_payment_date < ${lpLiteral}
OR (p.last_payment_date = ${lpLiteral} AND p.id < ${id})
))
OR (p.last_payment_date IS NULL AND ${lpLiteral} IS NOT NULL)
OR (p.last_payment_date IS NULL AND ${lpLiteral} IS NULL AND p.id < ${id})
)
`;
}
}
const paymentsJoinForPatients =
hasFrom || hasTo
? "INNER JOIN payments_agg pa ON pa.patient_id = p.id"
: "LEFT JOIN payments_agg pa ON pa.patient_id = p.id";
// Common CTEs (identical to previous single-query approach)
const commonCtes = `
WITH
staff_patients AS (
SELECT DISTINCT "patientId" AS patient_id
FROM "Appointment"
WHERE "staffId" = ${Number(staffId)}
),
payments_agg AS (
SELECT
pay."patientId" AS patient_id,
SUM(pay."totalBilled")::numeric(14,2) AS total_charges,
SUM(pay."totalPaid")::numeric(14,2) AS total_paid,
SUM(pay."totalAdjusted")::numeric(14,2) AS total_adjusted,
MAX(pay."createdAt") AS last_payment_date
FROM "Payment" pay
JOIN "Claim" c ON pay."claimId" = c.id
WHERE c."staffId" = ${Number(staffId)}
${paymentTimeFilter}
GROUP BY pay."patientId"
),
last_appointments AS (
SELECT "patientId" AS patient_id, MAX("date") AS last_appointment_date
FROM "Appointment"
GROUP BY "patientId"
),
patients AS (
SELECT
p.id,
p."firstName" AS first_name,
p."lastName" AS last_name,
COALESCE(pa.total_charges, 0)::numeric(14,2) AS total_charges,
COALESCE(pa.total_paid, 0)::numeric(14,2) AS total_paid,
COALESCE(pa.total_adjusted, 0)::numeric(14,2) AS total_adjusted,
(COALESCE(pa.total_charges,0) - COALESCE(pa.total_paid,0) - COALESCE(pa.total_adjusted,0))::numeric(14,2) AS current_balance,
pa.last_payment_date,
-- epoch milliseconds for last payment date (NULL when last_payment_date is NULL)
(CASE WHEN pa.last_payment_date IS NULL THEN NULL
ELSE (EXTRACT(EPOCH FROM (pa.last_payment_date AT TIME ZONE 'UTC')) * 1000)::bigint
END) AS last_payment_ms,
la.last_appointment_date
FROM "Patient" p
INNER JOIN staff_patients sp ON sp.patient_id = p.id
${paymentsJoinForPatients}
LEFT JOIN last_appointments la ON la.patient_id = p.id
)
`;
// Fetch one extra row to detect whether there's a following page.
const fetchLimit = safeLimit + 1;
const balancesQuery = `
${commonCtes}
SELECT COALESCE(json_agg(row_to_json(t)), '[]'::json) AS balances_json FROM (
SELECT
p.id AS "patientId",
p.first_name AS "firstName",
p.last_name AS "lastName",
p.total_charges::text AS "totalCharges",
p.total_paid::text AS "totalPaid",
p.total_adjusted::text AS "totalAdjusted",
p.current_balance::text AS "currentBalance",
-- ISO text for UI (optional)
to_char(p.last_payment_date AT TIME ZONE 'UTC', 'YYYY-MM-DD"T"HH24:MI:SS.MS"Z"') AS "lastPaymentDate",
-- epoch ms (number) used for precise keyset comparisons
p.last_payment_ms::bigint AS "lastPaymentMs",
to_char(p.last_appointment_date AT TIME ZONE 'UTC', 'YYYY-MM-DD"T"HH24:MI:SS.MS"Z"') AS "lastAppointmentDate"
FROM patients p
WHERE 1=1
${pageKeysetPredicate}
ORDER BY p.last_payment_date DESC NULLS LAST, p.id DESC
LIMIT ${fetchLimit}
) t;
`;
const balancesRawRows = (await prisma.$queryRawUnsafe(
balancesQuery
)) as Array<{ balances_json?: any }>;
const balancesJson = (balancesRawRows?.[0]?.balances_json as any) ?? [];
const fetchedArr = Array.isArray(balancesJson) ? balancesJson : [];
// If we fetched > safeLimit, there is another page.
let hasMore = false;
let pageRows = fetchedArr;
if (fetchedArr.length > safeLimit) {
hasMore = true;
pageRows = fetchedArr.slice(0, safeLimit);
}
const balances: PatientBalanceRow[] = (pageRows || []).map((r: any) => ({
patientId: Number(r.patientId),
firstName: r.firstName ?? null,
lastName: r.lastName ?? null,
totalCharges: Number(r.totalCharges ?? 0),
totalPayments: Number(r.totalPaid ?? 0),
totalAdjusted: Number(r.totalAdjusted ?? 0),
currentBalance: Number(r.currentBalance ?? 0),
lastPaymentDate: r.lastPaymentDate
? new Date(r.lastPaymentDate).toISOString()
: null,
lastAppointmentDate: r.lastAppointmentDate
? new Date(r.lastAppointmentDate).toISOString()
: null,
}));
// Build nextCursor only when we actually have more rows.
let nextCursor: string | null = null;
if (hasMore) {
// If we somehow have no balances for this page (defensive), don't build a cursor.
if (!Array.isArray(balances) || balances.length === 0) {
nextCursor = null;
} else {
// Now balances.length > 0, so last is definitely present.
const lastIndex = balances.length - 1;
const last = balances[lastIndex];
if (!last) {
// defensive fallback (shouldn't happen because of length check)
nextCursor = null;
} else {
// get the raw JSON row corresponding to the last returned page row so we can read the numeric ms
// `pageRows` is the array of raw JSON objects fetched from the DB (slice(0, safeLimit) applied above).
const corresponding = (pageRows as any[])[pageRows.length - 1];
const lastPaymentMs =
typeof corresponding?.lastPaymentMs === "number"
? Number(corresponding.lastPaymentMs)
: corresponding?.lastPaymentMs === null
? null
: undefined;
nextCursor = encodeCursor({
staffId: Number(staffId),
lastPaymentDate: last.lastPaymentDate ?? null,
lastPatientId: Number(last.patientId),
lastPaymentMs: lastPaymentMs ?? null,
});
}
}
}
// Count query (same logic as before)
const countQuery = `
${commonCtes}
SELECT
(CASE WHEN ${hasFrom || hasTo ? "true" : "false"} THEN
(SELECT COUNT(DISTINCT pa.patient_id) FROM payments_agg pa)
ELSE
(SELECT COUNT(*)::int FROM staff_patients)
END) AS total_count;
`;
const countRows = (await prisma.$queryRawUnsafe(countQuery)) as Array<{
total_count?: number;
}>;
const totalCount = Number(countRows?.[0]?.total_count ?? 0);
return {
balances,
totalCount,
nextCursor,
hasMore,
};
},
/**
* Return only the summary data for a doctor (same logic/filters as previous single-query approach)
*/
async getSummaryByDoctor(
staffId: number,
from?: Date | null,
to?: Date | null
): Promise<{
totalPatients: number;
totalOutstanding: number;
totalCollected: number;
patientsWithBalance: number;
}> {
if (!Number.isFinite(Number(staffId)) || Number(staffId) <= 0) {
throw new Error("Invalid staffId");
}
const hasFrom = from !== undefined && from !== null;
const hasTo = to !== undefined && to !== null;
const fromStart = isoStartOfDayLiteral(from);
const toNextStart = isoStartOfNextDayLiteral(to);
const paymentTimeFilter =
hasFrom && hasTo
? `AND pay."createdAt" >= ${fromStart} AND pay."createdAt" <= ${toNextStart}`
: hasFrom
? `AND pay."createdAt" >= ${fromStart}`
: hasTo
? `AND pay."createdAt" <= ${toNextStart}`
: "";
const summaryQuery = `
WITH
payments_agg AS (
SELECT
pay."patientId" AS patient_id,
SUM(pay."totalBilled")::numeric(14,2) AS total_charges,
SUM(pay."totalPaid")::numeric(14,2) AS total_paid,
SUM(pay."totalAdjusted")::numeric(14,2) AS total_adjusted
FROM "Payment" pay
JOIN "Claim" c ON pay."claimId" = c.id
WHERE c."staffId" = ${Number(staffId)}
${paymentTimeFilter}
GROUP BY pay."patientId"
)
SELECT json_build_object(
'totalPatients', COALESCE(COUNT(DISTINCT pa.patient_id),0),
'totalOutstanding', COALESCE(SUM(COALESCE(pa.total_charges,0) - COALESCE(pa.total_paid,0) - COALESCE(pa.total_adjusted,0)),0)::text,
'totalCollected', COALESCE(SUM(COALESCE(pa.total_paid,0)),0)::text,
'patientsWithBalance', COALESCE(SUM(CASE WHEN (COALESCE(pa.total_charges,0) - COALESCE(pa.total_paid,0) - COALESCE(pa.total_adjusted,0)) > 0 THEN 1 ELSE 0 END),0)
) AS summary_json
FROM payments_agg pa;
`;
const rows = (await prisma.$queryRawUnsafe(summaryQuery)) as Array<{
summary_json?: any;
}>;
const summaryRaw = (rows?.[0]?.summary_json as any) ?? {};
return {
totalPatients: Number(summaryRaw.totalPatients ?? 0),
totalOutstanding: Number(summaryRaw.totalOutstanding ?? 0),
totalCollected: Number(summaryRaw.totalCollected ?? 0),
patientsWithBalance: Number(summaryRaw.patientsWithBalance ?? 0),
};
},
};

View File

@@ -0,0 +1,263 @@
import {
InsertPayment,
Payment,
PaymentWithExtras,
UpdatePayment,
} from "@repo/db/types";
import { prisma as db } from "@repo/db/client";
export interface IStorage {
// Payment methods:
getPayment(id: number): Promise<Payment | undefined>;
createPayment(data: InsertPayment): Promise<Payment>;
updatePayment(id: number, updates: UpdatePayment): Promise<Payment>;
updatePaymentStatus(
id: number,
updates: UpdatePayment,
updatedById?: number
): Promise<Payment>;
deletePayment(id: number, userId: number): Promise<void>;
getPaymentById(id: number): Promise<PaymentWithExtras | null>;
getRecentPaymentsByPatientId(
patientId: number,
limit: number,
offset: number
): Promise<PaymentWithExtras[] | null>;
getTotalPaymentCountByPatient(patientId: number): Promise<number>;
getPaymentsByClaimId(claimId: number): Promise<PaymentWithExtras | null>;
getRecentPayments(
limit: number,
offset: number
): Promise<PaymentWithExtras[]>;
getPaymentsByDateRange(from: Date, to: Date): Promise<PaymentWithExtras[]>;
getTotalPaymentCount(): Promise<number>;
}
export const paymentsStorage: IStorage = {
// Payment Methods
async getPayment(id: number): Promise<Payment | undefined> {
const payment = await db.payment.findUnique({ where: { id } });
return payment ?? undefined;
},
async createPayment(payment: InsertPayment): Promise<Payment> {
return db.payment.create({ data: payment as Payment });
},
async updatePayment(id: number, updates: UpdatePayment): Promise<Payment> {
const existing = await db.payment.findFirst({ where: { id } });
if (!existing) {
throw new Error("Payment not found");
}
return db.payment.update({
where: { id },
data: updates,
});
},
async updatePaymentStatus(
id: number,
updates: UpdatePayment,
updatedById?: number
): Promise<Payment> {
const existing = await db.payment.findFirst({ where: { id } });
if (!existing) {
throw new Error("Payment not found");
}
const data: any = { ...updates };
if (typeof updatedById === "number") data.updatedById = updatedById;
return db.payment.update({
where: { id },
data,
});
},
async deletePayment(id: number, userId: number): Promise<void> {
const existing = await db.payment.findFirst({ where: { id, userId } });
if (!existing) {
throw new Error("Not authorized or payment not found");
}
await db.payment.delete({ where: { id } });
},
async getRecentPaymentsByPatientId(
patientId: number,
limit: number,
offset: number
): Promise<PaymentWithExtras[]> {
const payments = await db.payment.findMany({
where: { patientId },
orderBy: { createdAt: "desc" },
skip: offset,
take: limit,
include: {
claim: {
include: {
serviceLines: true,
},
},
serviceLines: true,
serviceLineTransactions: {
include: {
serviceLine: true,
},
},
updatedBy: true,
patient: true,
},
});
return payments.map((payment) => ({
...payment,
patientName: payment.claim?.patientName ?? "",
paymentDate: payment.createdAt,
paymentMethod: payment.serviceLineTransactions[0]?.method ?? "OTHER",
}));
},
async getTotalPaymentCountByPatient(patientId: number): Promise<number> {
return db.payment.count({
where: { patientId },
});
},
async getPaymentById(id: number): Promise<PaymentWithExtras | null> {
const payment = await db.payment.findFirst({
where: { id },
include: {
claim: {
include: {
serviceLines: true,
},
},
serviceLines: true,
serviceLineTransactions: {
include: {
serviceLine: true,
},
},
updatedBy: true,
patient: true,
},
});
if (!payment) return null;
return {
...payment,
patientName: payment.claim?.patientName ?? "",
paymentDate: payment.createdAt,
paymentMethod: payment.serviceLineTransactions[0]?.method ?? "OTHER",
};
},
async getPaymentsByClaimId(
claimId: number
): Promise<PaymentWithExtras | null> {
const payment = await db.payment.findFirst({
where: { claimId },
include: {
claim: {
include: {
serviceLines: true,
},
},
serviceLines: true,
serviceLineTransactions: {
include: {
serviceLine: true,
},
},
updatedBy: true,
patient: true,
},
});
if (!payment) return null;
return {
...payment,
patientName: payment.claim?.patientName ?? "",
paymentDate: payment.createdAt,
paymentMethod: payment.serviceLineTransactions[0]?.method ?? "OTHER",
};
},
async getRecentPayments(
limit: number,
offset: number
): Promise<PaymentWithExtras[]> {
const payments = await db.payment.findMany({
orderBy: { createdAt: "desc" },
skip: offset,
take: limit,
include: {
claim: {
include: {
serviceLines: true,
},
},
serviceLines: true,
serviceLineTransactions: {
include: {
serviceLine: true,
},
},
updatedBy: true,
patient: true,
},
});
return payments.map((payment) => ({
...payment,
patientName: payment.claim?.patientName ?? "",
paymentDate: payment.createdAt,
paymentMethod: payment.serviceLineTransactions[0]?.method ?? "OTHER",
}));
},
async getPaymentsByDateRange(
from: Date,
to: Date
): Promise<PaymentWithExtras[]> {
const payments = await db.payment.findMany({
where: {
createdAt: {
gte: from,
lte: to,
},
},
orderBy: { createdAt: "desc" },
include: {
claim: {
include: {
serviceLines: true,
},
},
serviceLines: true,
serviceLineTransactions: {
include: {
serviceLine: true,
},
},
updatedBy: true,
patient: true,
},
});
return payments.map((payment) => ({
...payment,
patientName: payment.claim?.patientName ?? "",
paymentDate: payment.createdAt,
paymentMethod: payment.serviceLineTransactions[0]?.method ?? "OTHER",
}));
},
async getTotalPaymentCount(): Promise<number> {
return db.payment.count();
},
};

View File

@@ -0,0 +1,61 @@
import { Staff } from "@repo/db/types";
import { prisma as db } from "@repo/db/client";
export interface IStorage {
getStaff(id: number): Promise<Staff | undefined>;
getAllStaff(): Promise<Staff[]>;
createStaff(staff: Staff): Promise<Staff>;
updateStaff(id: number, updates: Partial<Staff>): Promise<Staff | undefined>;
deleteStaff(id: number): Promise<boolean>;
countAppointmentsByStaffId(staffId: number): Promise<number>;
countClaimsByStaffId(staffId: number): Promise<number>;
}
export const staffStorage: IStorage = {
// Staff methods
async getStaff(id: number): Promise<Staff | undefined> {
const staff = await db.staff.findUnique({ where: { id } });
return staff ?? undefined;
},
async getAllStaff(): Promise<Staff[]> {
const staff = await db.staff.findMany();
return staff;
},
async createStaff(staff: Staff): Promise<Staff> {
const createdStaff = await db.staff.create({
data: staff,
});
return createdStaff;
},
async updateStaff(
id: number,
updates: Partial<Staff>
): Promise<Staff | undefined> {
const updatedStaff = await db.staff.update({
where: { id },
data: updates,
});
return updatedStaff ?? undefined;
},
async deleteStaff(id: number): Promise<boolean> {
try {
await db.staff.delete({ where: { id } });
return true;
} catch (error) {
console.error("Error deleting staff:", error);
return false;
}
},
async countAppointmentsByStaffId(staffId: number): Promise<number> {
return await db.appointment.count({ where: { staffId } });
},
async countClaimsByStaffId(staffId: number): Promise<number> {
return await db.claim.count({ where: { staffId } });
},
};

View File

@@ -0,0 +1,53 @@
import { InsertUser, User } from "@repo/db/types";
import { prisma as db } from "@repo/db/client";
export interface IUsersStorage {
// User methods
getUser(id: number): Promise<User | undefined>;
getUsers(limit: number, offset: number): Promise<User[]>;
getUserByUsername(username: string): Promise<User | undefined>;
createUser(user: InsertUser): Promise<User>;
updateUser(id: number, updates: Partial<User>): Promise<User | undefined>;
deleteUser(id: number): Promise<boolean>;
}
export const usersStorage: IUsersStorage = {
// User methods
async getUser(id: number): Promise<User | undefined> {
const user = await db.user.findUnique({ where: { id } });
return user ?? undefined;
},
async getUsers(limit: number, offset: number): Promise<User[]> {
return await db.user.findMany({ skip: offset, take: limit });
},
async getUserByUsername(username: string): Promise<User | undefined> {
const user = await db.user.findUnique({ where: { username } });
return user ?? undefined;
},
async createUser(user: InsertUser): Promise<User> {
return await db.user.create({ data: user as User });
},
async updateUser(
id: number,
updates: Partial<User>
): Promise<User | undefined> {
try {
return await db.user.update({ where: { id }, data: updates });
} catch {
return undefined;
}
},
async deleteUser(id: number): Promise<boolean> {
try {
await db.user.delete({ where: { id } });
return true;
} catch {
return false;
}
},
};

10
apps/Backend/src/types/express.types.d.ts vendored Executable file
View File

@@ -0,0 +1,10 @@
import { User } from "@repo/db/client";
declare global {
namespace Express {
interface User {
id: number;
// include any other properties
}
}
}

View File

@@ -0,0 +1,61 @@
/**
* Convert any OCR string-like value into a safe string.
*/
export function toStr(val: string | number | null | undefined): string {
if (val == null) return "";
return String(val).trim();
}
/**
* Convert OCR date strings like "070825" (MMDDYY) into a JS Date object.
* Example: "070825" → 2025-08-07.
*/
export function convertOCRDate(input: string | number | null | undefined): Date {
const raw = toStr(input);
if (!/^\d{6}$/.test(raw)) {
throw new Error(`Invalid OCR date format: ${raw}`);
}
const month = parseInt(raw.slice(0, 2), 10) - 1;
const day = parseInt(raw.slice(2, 4), 10);
const year2 = parseInt(raw.slice(4, 6), 10);
const year = year2 < 50 ? 2000 + year2 : 1900 + year2;
return new Date(year, month, day);
}
/**
* Normalize a DOB value to "YYYY-MM-DD" string expected by the Python agent.
* - If dob is already "YYYY-MM-DD" string, returns it.
* - If dob is an ISO datetime string or Date, returns YYYY-MM-DD derived from UTC parts (no timezone shifts).
* - Returns null for invalid values.
*/
export function formatDobForAgent(dob: Date | string | null | undefined): string | null {
if (!dob) return null;
// If it's a string in exact YYYY-MM-DD format, return as-is (most ideal).
if (typeof dob === "string") {
const simpleDateMatch = /^\d{4}-\d{2}-\d{2}$/.test(dob);
if (simpleDateMatch) return dob;
// Otherwise try parsing as a Date/ISO string and use UTC parts
const parsed = new Date(dob);
if (isNaN(parsed.getTime())) return null;
const y = parsed.getUTCFullYear();
const m = String(parsed.getUTCMonth() + 1).padStart(2, "0");
const d = String(parsed.getUTCDate()).padStart(2, "0");
return `${y}-${m}-${d}`;
}
// If it's a Date object, use UTC getters to avoid timezone shifts
if (dob instanceof Date) {
if (isNaN(dob.getTime())) return null;
const y = dob.getUTCFullYear();
const m = String(dob.getUTCMonth() + 1).padStart(2, "0");
const d = String(dob.getUTCDate()).padStart(2, "0");
return `${y}-${m}-${d}`;
}
return null;
}

View File

@@ -0,0 +1,146 @@
// ../utils/emptyTempFolder.ts
import fs from "fs/promises";
import fsSync from "fs";
import path from "path";
import os from "os";
/**
* Remove EVERYTHING under the parent folder that contains filePath.
* - Does NOT remove the parent folder itself (only its children).
* - Uses fs.rm with recursive+force if available.
* - Falls back to reliable manual recursion otherwise.
* - Logs folder contents before and after.
*
* Throws on critical safety checks.
*/
export async function emptyFolderContainingFile(filePath?: string | null): Promise<void> {
if (!filePath) return;
const absFile = path.resolve(String(filePath));
const folder = path.dirname(absFile);
// Safety checks
if (!folder) {
throw new Error(`Refusing to clean: resolved folder empty for filePath=${filePath}`);
}
const parsed = path.parse(folder);
if (folder === parsed.root) {
throw new Error(`Refusing to clean root folder: ${folder}`);
}
const home = os.homedir();
if (home && path.resolve(home) === path.resolve(folder)) {
throw new Error(`Refusing to clean user's home directory: ${folder}`);
}
const base = path.basename(folder);
if (!base || base.length < 2) {
throw new Error(`Refusing to clean suspicious folder: ${folder}`);
}
console.log(`[cleanup] emptyFolderContainingFile called for filePath=${filePath}`);
console.log(`[cleanup] target folder=${folder}`);
// Read and log contents before
let before: string[] = [];
try {
before = await fs.readdir(folder);
console.log(`[cleanup] before (${before.length}):`, before);
} catch (err) {
console.error(`[cleanup] failed to read folder ${folder} before removal:`, err);
// If we can't read, bail out (safety)
throw err;
}
// Helper fallback: recursive remove
async function recursiveRemove(p: string): Promise<void> {
try {
const st = await fs.lstat(p);
if (st.isDirectory()) {
const children = await fs.readdir(p);
for (const c of children) {
await recursiveRemove(path.join(p, c));
}
// remove directory after children removed
try {
await fs.rmdir(p);
} catch (err) {
// log and continue
console.error(`[cleanup] rmdir failed for ${p}:`, err);
}
} else {
try {
await fs.unlink(p);
} catch (err: any) {
// On EPERM try chmod and retry once
if (err.code === "EPERM" || err.code === "EACCES") {
try {
fsSync.chmodSync(p, 0o666);
await fs.unlink(p);
} catch (retryErr) {
console.error(`[cleanup] unlink after chmod failed for ${p}:`, retryErr);
throw retryErr;
}
} else if (err.code === "ENOENT") {
// already gone — ignore
} else {
throw err;
}
}
}
} catch (err: any) {
if (err.code === "ENOENT") return; // already gone
// rethrow to allow caller to log
throw err;
}
}
// Remove everything under folder (each top-level entry)
for (const name of before) {
const full = path.join(folder, name);
try {
if (typeof (fs as any).rm === "function") {
// Node >= 14.14/16+: use fs.rm with recursive & force
await (fs as any).rm(full, { recursive: true, force: true });
console.log(`[cleanup] removed (fs.rm): ${full}`);
} else {
// fallback
await recursiveRemove(full);
console.log(`[cleanup] removed (recursive): ${full}`);
}
} catch (err) {
console.error(`[cleanup] failed to remove ${full}:`, err);
// Try chmod and retry once for stubborn files
try {
if (fsSync.existsSync(full)) {
console.log(`[cleanup] attempting chmod+retry for ${full}`);
try {
fsSync.chmodSync(full, 0o666);
if (typeof (fs as any).rm === "function") {
await (fs as any).rm(full, { recursive: true, force: true });
} else {
await recursiveRemove(full);
}
console.log(`[cleanup] removed after chmod: ${full}`);
continue;
} catch (retryErr) {
console.error(`[cleanup] retry after chmod failed for ${full}:`, retryErr);
}
} else {
console.log(`[cleanup] ${full} disappeared before retry`);
}
} catch (permErr) {
console.error(`[cleanup] chmod/retry error for ${full}:`, permErr);
}
// continue to next entry even if this failed
}
}
// Read and log contents after
try {
const after = await fs.readdir(folder);
console.log(`[cleanup] after (${after.length}):`, after);
} catch (err) {
console.error(`[cleanup] failed to read folder ${folder} after removal:`, err);
}
console.log(`[cleanup] finished cleaning folder: ${folder}`);
}

View File

@@ -0,0 +1,27 @@
export function normalizeInsuranceId(raw: unknown): string | undefined {
if (raw === undefined || raw === null) return undefined;
// Accept numbers too (e.g. 12345), but prefer strings
let s: string;
if (typeof raw === "number") {
s = String(raw);
} else if (typeof raw === "string") {
s = raw;
} else {
// Not acceptable type
throw new Error("Insurance ID must be a numeric string.");
}
// Remove all whitespace
const cleaned = s.replace(/\s+/g, "");
// If empty after cleaning, treat as undefined
if (cleaned === "") return undefined;
// Only digits allowed (since you said it's numeric)
if (!/^\d+$/.test(cleaned)) {
throw new Error("Insurance ID must contain only digits.");
}
return cleaned;
}

View File

@@ -0,0 +1,12 @@
/**
* Helper: convert Prisma CloudFile result to JSON-friendly object.
*/
export function serializeFile(f: any) {
if (!f) return null;
return {
...f,
fileSize: typeof f.fileSize === "bigint" ? f.fileSize.toString() : f.fileSize,
createdAt: f.createdAt?.toISOString?.(),
updatedAt: f.updatedAt?.toISOString?.(),
};
}

12
apps/Backend/tsconfig.json Executable file
View File

@@ -0,0 +1,12 @@
{
"extends": "@repo/typescript-config/base.json",
"compilerOptions": {
"outDir": "dist",
"module": "CommonJS",
"moduleResolution": "Node",
"noEmit": true
},
"include": ["src"],
"exclude": ["node_modules", "dist"]
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 12 KiB