feat(cloud-page) - wip - uploading files till now
This commit is contained in:
@@ -1,7 +1,450 @@
|
||||
import { Router } from "express";
|
||||
// src/routes/cloudStorage.routes.ts
|
||||
import express, { Request, Response } from "express";
|
||||
// import storage from "../storage";
|
||||
import { cloudStorage as storage } from "../storage/cloudStorage-storage";
|
||||
import { serializeFile } from "../utils/prismaFileUtils";
|
||||
import { CloudFolder } from "@repo/db/types";
|
||||
|
||||
const router = Router();
|
||||
const router = express.Router();
|
||||
|
||||
/* ---------- Helpers ---------- */
|
||||
function parsePositiveInt(v: unknown, fallback: number) {
|
||||
const n = Number(v);
|
||||
if (!Number.isFinite(n) || n < 0) return fallback;
|
||||
return Math.floor(n);
|
||||
}
|
||||
|
||||
function sendError(
|
||||
res: Response,
|
||||
status: number,
|
||||
message: string,
|
||||
details?: any
|
||||
) {
|
||||
return res.status(status).json({ error: true, message, details });
|
||||
}
|
||||
|
||||
/* ---------- Paginated child FOLDERS for a parent ----------
|
||||
GET /items/folders?parentId=&limit=&offset=
|
||||
parentId may be "null" or numeric or absent (means root)
|
||||
*/
|
||||
router.get(
|
||||
"/items/folders",
|
||||
async (req: Request, res: Response): Promise<any> => {
|
||||
const rawParent = req.query.parentId;
|
||||
const parentId =
|
||||
rawParent === undefined
|
||||
? null
|
||||
: rawParent === "null"
|
||||
? null
|
||||
: Number(rawParent);
|
||||
|
||||
if (parentId !== null && (!Number.isInteger(parentId) || parentId <= 0)) {
|
||||
return sendError(res, 400, "Invalid parentId");
|
||||
}
|
||||
|
||||
const limit = parsePositiveInt(req.query.limit, 10); // default 10 folders/page
|
||||
const offset = parsePositiveInt(req.query.offset, 0);
|
||||
|
||||
try {
|
||||
// Prefer a storage method that lists folders by parent, otherwise filter
|
||||
let data: CloudFolder[] = [];
|
||||
if (typeof (storage as any).listFoldersByParent === "function") {
|
||||
data = await (storage as any).listFoldersByParent(
|
||||
parentId,
|
||||
limit,
|
||||
offset
|
||||
);
|
||||
const total =
|
||||
(await (storage as any).countFoldersByParent?.(parentId)) ??
|
||||
data.length;
|
||||
return res.json({ error: false, data, total, limit, offset });
|
||||
}
|
||||
|
||||
// Fallback: use recent and filter (less efficient). Recommend implementing listFoldersByParent in storage.
|
||||
const recent = await storage.listRecentFolders(1000, 0);
|
||||
const folders = (recent || []).filter(
|
||||
(f: any) => (f as any).parentId === parentId
|
||||
);
|
||||
const paged = folders.slice(offset, offset + limit);
|
||||
return res.json({
|
||||
error: false,
|
||||
data: paged,
|
||||
total: folders.length,
|
||||
limit,
|
||||
offset,
|
||||
});
|
||||
} catch (err) {
|
||||
return sendError(res, 500, "Failed to load child folders", err);
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
/* ---------- Paginated files for a folder ----------
|
||||
GET /items/files?parentId=&limit=&offset=
|
||||
parentId may be "null" or numeric or absent (means root)
|
||||
*/
|
||||
router.get(
|
||||
"/items/files",
|
||||
async (req: Request, res: Response): Promise<any> => {
|
||||
const rawParent = req.query.parentId;
|
||||
const parentId =
|
||||
rawParent === undefined
|
||||
? null
|
||||
: rawParent === "null"
|
||||
? null
|
||||
: Number(rawParent);
|
||||
|
||||
if (parentId !== null && (!Number.isInteger(parentId) || parentId <= 0)) {
|
||||
return sendError(res, 400, "Invalid parentId");
|
||||
}
|
||||
|
||||
const limit = parsePositiveInt(req.query.limit, 20); // default 20 files/page
|
||||
const offset = parsePositiveInt(req.query.offset, 0);
|
||||
|
||||
try {
|
||||
const files = await storage.listFilesInFolder(parentId, limit, offset);
|
||||
const total = await storage.countFilesInFolder(parentId);
|
||||
const serialized = files.map(serializeFile);
|
||||
return res.json({ error: false, data: serialized, total, limit, offset });
|
||||
} catch (err) {
|
||||
return sendError(res, 500, "Failed to load files for folder", err);
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
/* ---------- Recent folders (global) ----------
|
||||
GET /folders/recent?limit=&offset=
|
||||
*/
|
||||
router.get(
|
||||
"/folders/recent",
|
||||
async (req: Request, res: Response): Promise<any> => {
|
||||
const limit = parsePositiveInt(req.query.limit, 50);
|
||||
const offset = parsePositiveInt(req.query.offset, 0);
|
||||
try {
|
||||
const folders = await storage.listRecentFolders(limit, offset);
|
||||
const total = await storage.countFolders();
|
||||
return res.json({ error: false, data: folders, total, limit, offset });
|
||||
} catch (err) {
|
||||
return sendError(res, 500, "Failed to load recent folders");
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
/* ---------- Folder CRUD ----------
|
||||
POST /folders { userId, name, parentId? }
|
||||
PUT /folders/:id { name?, parentId? }
|
||||
DELETE /folders/:id
|
||||
*/
|
||||
router.post("/folders", async (req: Request, res: Response): Promise<any> => {
|
||||
const { userId, name, parentId } = req.body;
|
||||
if (!userId || typeof name !== "string" || !name.trim()) {
|
||||
return sendError(res, 400, "Missing or invalid userId/name");
|
||||
}
|
||||
try {
|
||||
const created = await storage.createFolder(
|
||||
userId,
|
||||
name.trim(),
|
||||
parentId ?? null
|
||||
);
|
||||
return res.status(201).json({ error: false, data: created });
|
||||
} catch (err) {
|
||||
return sendError(res, 500, "Failed to create folder");
|
||||
}
|
||||
});
|
||||
|
||||
router.put(
|
||||
"/folders/:id",
|
||||
async (req: Request, res: Response): Promise<any> => {
|
||||
// coerce possibly-undefined param to string before parsing
|
||||
const id = Number.parseInt(req.params.id ?? "", 10);
|
||||
if (!Number.isInteger(id) || id <= 0)
|
||||
return sendError(res, 400, "Invalid folder id");
|
||||
|
||||
const updates: any = {};
|
||||
if (typeof req.body.name === "string") updates.name = req.body.name.trim();
|
||||
if (req.body.parentId !== undefined) updates.parentId = req.body.parentId;
|
||||
|
||||
try {
|
||||
const updated = await storage.updateFolder(id, updates);
|
||||
if (!updated)
|
||||
return sendError(res, 404, "Folder not found or update failed");
|
||||
return res.json({ error: false, data: updated });
|
||||
} catch (err) {
|
||||
return sendError(res, 500, "Failed to update folder");
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
router.delete(
|
||||
"/folders/:id",
|
||||
async (req: Request, res: Response): Promise<any> => {
|
||||
const id = Number.parseInt(req.params.id ?? "", 10);
|
||||
if (!Number.isInteger(id) || id <= 0)
|
||||
return sendError(res, 400, "Invalid folder id");
|
||||
try {
|
||||
const ok = await storage.deleteFolder(id);
|
||||
if (!ok) return sendError(res, 404, "Folder not found or delete failed");
|
||||
return res.json({ error: false, data: { id } });
|
||||
} catch (err) {
|
||||
return sendError(res, 500, "Failed to delete folder");
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
/* ---------- Files inside folder (pagination) ----------
|
||||
GET /folders/:id/files?limit=&offset=
|
||||
id = "null" lists files with folderId = null
|
||||
responses serialized
|
||||
*/
|
||||
router.get(
|
||||
"/folders/:id/files",
|
||||
async (req: Request, res: Response): Promise<any> => {
|
||||
const rawId = req.params.id;
|
||||
const folderId = rawId === "null" ? null : Number.parseInt(rawId ?? "", 10);
|
||||
if (folderId !== null && (!Number.isInteger(folderId) || folderId <= 0)) {
|
||||
return sendError(res, 400, "Invalid folder id");
|
||||
}
|
||||
|
||||
const limit = parsePositiveInt(req.query.limit, 50);
|
||||
const offset = parsePositiveInt(req.query.offset, 0);
|
||||
|
||||
try {
|
||||
const files = await storage.listFilesInFolder(folderId, limit, offset);
|
||||
const total = await storage.countFilesInFolder(folderId);
|
||||
const serialized = files.map(serializeFile);
|
||||
return res.json({ error: false, data: serialized, total, limit, offset });
|
||||
} catch (err) {
|
||||
return sendError(res, 500, "Failed to list files for folder");
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
/* ---------- File CRUD (init, update metadata, delete) ----------
|
||||
POST /folders/:id/files { userId, name, mimeType?, expectedSize?, totalChunks? }
|
||||
PUT /files/:id { name?, mimeType?, folderId? }
|
||||
DELETE /files/:id
|
||||
*/
|
||||
router.post(
|
||||
"/folders/:id/files",
|
||||
async (req: Request, res: Response): Promise<any> => {
|
||||
const rawId = req.params.id;
|
||||
const folderId = rawId === "null" ? null : Number.parseInt(rawId ?? "", 10);
|
||||
if (folderId !== null && (!Number.isInteger(folderId) || folderId <= 0)) {
|
||||
return sendError(res, 400, "Invalid folder id");
|
||||
}
|
||||
|
||||
const { userId, name, mimeType } = req.body;
|
||||
if (!userId || typeof name !== "string" || !name.trim()) {
|
||||
return sendError(res, 400, "Missing or invalid userId/name");
|
||||
}
|
||||
|
||||
// coerce size & chunks
|
||||
let expectedSize: bigint | null = null;
|
||||
if (req.body.expectedSize != null) {
|
||||
try {
|
||||
expectedSize = BigInt(String(req.body.expectedSize));
|
||||
} catch {
|
||||
return sendError(res, 400, "Invalid expectedSize");
|
||||
}
|
||||
}
|
||||
let totalChunks: number | null = null;
|
||||
if (req.body.totalChunks != null) {
|
||||
const tc = Number(req.body.totalChunks);
|
||||
if (!Number.isFinite(tc) || tc <= 0)
|
||||
return sendError(res, 400, "Invalid totalChunks");
|
||||
totalChunks = Math.floor(tc);
|
||||
}
|
||||
|
||||
try {
|
||||
const created = await storage.initializeFileUpload(
|
||||
userId,
|
||||
name.trim(),
|
||||
mimeType ?? null,
|
||||
expectedSize,
|
||||
totalChunks,
|
||||
folderId
|
||||
);
|
||||
return res
|
||||
.status(201)
|
||||
.json({ error: false, data: serializeFile(created as any) });
|
||||
} catch {
|
||||
return sendError(res, 500, "Failed to create file");
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
/* ---------- 2. CHUNKS (raw upload) ---------- */
|
||||
router.post(
|
||||
"/files/:id/chunks",
|
||||
// only here: use express.raw so req.body is Buffer
|
||||
express.raw({ type: () => true, limit: "100mb" }),
|
||||
async (req: Request, res: Response): Promise<any> => {
|
||||
const id = Number.parseInt(req.params.id ?? "", 10);
|
||||
const seq = Number.parseInt(
|
||||
String(req.query.seq ?? req.body.seq ?? ""),
|
||||
10
|
||||
);
|
||||
if (!Number.isInteger(id) || id <= 0)
|
||||
return sendError(res, 400, "Invalid file id");
|
||||
if (!Number.isInteger(seq) || seq < 0)
|
||||
return sendError(res, 400, "Invalid seq");
|
||||
|
||||
const body = req.body as Buffer;
|
||||
console.log(
|
||||
`[chunk upload] fileId=${id} seq=${seq} contentType=${String(req.headers["content-type"])} bodyIsBuffer=${Buffer.isBuffer(body)} bodyLength=${body?.length ?? 0}`
|
||||
);
|
||||
|
||||
if (!body || !(body instanceof Buffer)) {
|
||||
return sendError(res, 400, "Expected raw binary body (Buffer)");
|
||||
}
|
||||
|
||||
try {
|
||||
await storage.appendFileChunk(id, seq, body);
|
||||
return res.json({ error: false, data: { fileId: id, seq } });
|
||||
} catch (err: any) {
|
||||
console.error(
|
||||
"[chunk upload] appendFileChunk failed:",
|
||||
err && (err.stack || err.message || err)
|
||||
);
|
||||
|
||||
return sendError(res, 500, "Failed to add chunk");
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
/* ---------- 3. COMPLETE ---------- */
|
||||
router.post(
|
||||
"/files/:id/complete",
|
||||
async (req: Request, res: Response): Promise<any> => {
|
||||
const id = Number.parseInt(req.params.id ?? "", 10);
|
||||
if (!Number.isInteger(id) || id <= 0)
|
||||
return sendError(res, 400, "Invalid file id");
|
||||
|
||||
try {
|
||||
const result = await storage.finalizeFileUpload(id);
|
||||
return res.json({ error: false, data: result });
|
||||
} catch (err: any) {
|
||||
return sendError(res, 500, err?.message || "Failed to complete file");
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
router.put("/files/:id", async (req: Request, res: Response): Promise<any> => {
|
||||
const id = Number.parseInt(req.params.id ?? "", 10);
|
||||
if (!Number.isInteger(id) || id <= 0)
|
||||
return sendError(res, 400, "Invalid file id");
|
||||
|
||||
const updates: any = {};
|
||||
if (typeof req.body.name === "string") updates.name = req.body.name.trim();
|
||||
if (typeof req.body.mimeType === "string")
|
||||
updates.mimeType = req.body.mimeType;
|
||||
if (req.body.folderId !== undefined) updates.folderId = req.body.folderId;
|
||||
|
||||
try {
|
||||
const updated = await storage.updateFile(id, updates);
|
||||
if (!updated) return sendError(res, 404, "File not found or update failed");
|
||||
return res.json({ error: false, data: serializeFile(updated as any) });
|
||||
} catch (err) {
|
||||
return sendError(res, 500, "Failed to update file metadata");
|
||||
}
|
||||
});
|
||||
|
||||
router.delete(
|
||||
"/files/:id",
|
||||
async (req: Request, res: Response): Promise<any> => {
|
||||
const id = Number.parseInt(req.params.id ?? "", 10);
|
||||
if (!Number.isInteger(id) || id <= 0)
|
||||
return sendError(res, 400, "Invalid file id");
|
||||
|
||||
try {
|
||||
const ok = await storage.deleteFile(id);
|
||||
if (!ok) return sendError(res, 404, "File not found or delete failed");
|
||||
return res.json({ error: false, data: { id } });
|
||||
} catch (err) {
|
||||
return sendError(res, 500, "Failed to delete file");
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
/* ---------- Download (stream) ----------
|
||||
GET /files/:id/download
|
||||
*/
|
||||
router.get(
|
||||
"/files/:id/download",
|
||||
async (req: Request, res: Response): Promise<any> => {
|
||||
const id = Number.parseInt(req.params.id ?? "", 10);
|
||||
if (!Number.isInteger(id) || id <= 0)
|
||||
return sendError(res, 400, "Invalid file id");
|
||||
|
||||
try {
|
||||
const file = await storage.getFile(id);
|
||||
if (!file) return sendError(res, 404, "File not found");
|
||||
|
||||
const filename = (file.name ?? `file-${(file as any).id}`).replace(
|
||||
/["\\]/g,
|
||||
""
|
||||
);
|
||||
if ((file as any).mimeType)
|
||||
res.setHeader("Content-Type", (file as any).mimeType);
|
||||
res.setHeader(
|
||||
"Content-Disposition",
|
||||
`attachment; filename="${encodeURIComponent(filename)}"`
|
||||
);
|
||||
|
||||
await storage.streamFileTo(res, id);
|
||||
|
||||
if (!res.writableEnded) res.end();
|
||||
} catch (err) {
|
||||
if (res.headersSent) return res.end();
|
||||
return sendError(res, 500, "Failed to stream file");
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
/* ---------- Search endpoints (separate) ----------
|
||||
GET /search/folders?q=&limit=&offset=
|
||||
GET /search/files?q=&type=&limit=&offset=
|
||||
*/
|
||||
router.get(
|
||||
"/search/folders",
|
||||
async (req: Request, res: Response): Promise<any> => {
|
||||
const q = String(req.query.q ?? "").trim();
|
||||
const limit = parsePositiveInt(req.query.limit, 20);
|
||||
const offset = parsePositiveInt(req.query.offset, 0);
|
||||
if (!q) return sendError(res, 400, "Missing search query parameter 'q'");
|
||||
|
||||
try {
|
||||
const { data, total } = await storage.searchFolders(q, limit, offset);
|
||||
return res.json({ error: false, data, total, limit, offset });
|
||||
} catch (err) {
|
||||
return sendError(res, 500, "Folder search failed");
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
router.get(
|
||||
"/search/files",
|
||||
async (req: Request, res: Response): Promise<any> => {
|
||||
const q = String(req.query.q ?? "").trim();
|
||||
const type =
|
||||
typeof req.query.type === "string" ? req.query.type.trim() : undefined;
|
||||
const limit = parsePositiveInt(req.query.limit, 20);
|
||||
const offset = parsePositiveInt(req.query.offset, 0);
|
||||
if (!q && !type)
|
||||
return sendError(
|
||||
res,
|
||||
400,
|
||||
"Provide at least one of 'q' or 'type' to search files"
|
||||
);
|
||||
|
||||
try {
|
||||
const { data, total } = await storage.searchFiles(q, type, limit, offset);
|
||||
const serialized = data.map(serializeFile);
|
||||
return res.json({ error: false, data: serialized, total, limit, offset });
|
||||
} catch (err) {
|
||||
return sendError(res, 500, "File search failed");
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
export default router;
|
||||
|
||||
@@ -1,17 +1,52 @@
|
||||
import { prisma as db } from "@repo/db/client";
|
||||
import { CloudFile, CloudFolder } from "@repo/db/types";
|
||||
import { CloudFolder, CloudFile } from "@repo/db/types";
|
||||
import { serializeFile } from "../utils/prismaFileUtils";
|
||||
|
||||
/**
|
||||
* Cloud storage implementation
|
||||
*
|
||||
* - Clear, self-describing method names
|
||||
* - Folder timestamp propagation helper: updateFolderTimestampsRecursively
|
||||
* - File upload lifecycle: initializeFileUpload -> appendFileChunk -> finalizeFileUpload
|
||||
*/
|
||||
|
||||
/* ------------------------------- Helpers ------------------------------- */
|
||||
async function updateFolderTimestampsRecursively(folderId: number | null) {
|
||||
if (folderId == null) return;
|
||||
let currentId: number | null = folderId;
|
||||
const MAX_DEPTH = 50;
|
||||
let depth = 0;
|
||||
|
||||
while (currentId != null && depth < MAX_DEPTH) {
|
||||
depth += 1;
|
||||
try {
|
||||
// touch updatedAt and fetch parentId
|
||||
const row = (await db.cloudFolder.update({
|
||||
where: { id: currentId },
|
||||
data: { updatedAt: new Date() },
|
||||
select: { parentId: true },
|
||||
})) as { parentId: number | null };
|
||||
|
||||
currentId = row.parentId ?? null;
|
||||
} catch (err: any) {
|
||||
// Stop walking if folder removed concurrently (Prisma P2025)
|
||||
if (err?.code === "P2025") break;
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/* ------------------------------- IStorage ------------------------------- */
|
||||
export interface IStorage {
|
||||
// CloudFolder methods
|
||||
// Folders
|
||||
getFolder(id: number): Promise<CloudFolder | null>;
|
||||
getFoldersByUser(
|
||||
userId: number,
|
||||
listFoldersByParent(
|
||||
parentId: number | null,
|
||||
limit: number,
|
||||
offset: number
|
||||
): Promise<CloudFolder[]>;
|
||||
getRecentFolders(limit: number, offset: number): Promise<CloudFolder[]>;
|
||||
countFoldersByParent(parentId: number | null): Promise<number>;
|
||||
listRecentFolders(limit: number, offset: number): Promise<CloudFolder[]>;
|
||||
createFolder(
|
||||
userId: number,
|
||||
name: string,
|
||||
@@ -22,23 +57,19 @@ export interface IStorage {
|
||||
updates: Partial<{ name?: string; parentId?: number | null }>
|
||||
): Promise<CloudFolder | null>;
|
||||
deleteFolder(id: number): Promise<boolean>;
|
||||
countFolders(filter?: {
|
||||
userId?: number;
|
||||
nameContains?: string | null;
|
||||
}): Promise<number>;
|
||||
|
||||
// CloudFile methods
|
||||
// Files
|
||||
getFile(id: number): Promise<CloudFile | null>;
|
||||
listFilesByFolderByUser(
|
||||
userId: number,
|
||||
listFilesInFolder(
|
||||
folderId: number | null,
|
||||
limit: number,
|
||||
offset: number
|
||||
): Promise<CloudFile[]>;
|
||||
listFilesByFolder(
|
||||
folderId: number | null,
|
||||
limit: number,
|
||||
offset: number
|
||||
): Promise<CloudFile[]>;
|
||||
|
||||
// chunked upload methods
|
||||
createFileInit(
|
||||
initializeFileUpload(
|
||||
userId: number,
|
||||
name: string,
|
||||
mimeType?: string | null,
|
||||
@@ -46,59 +77,74 @@ export interface IStorage {
|
||||
totalChunks?: number | null,
|
||||
folderId?: number | null
|
||||
): Promise<CloudFile>;
|
||||
addChunk(fileId: number, seq: number, data: Buffer): Promise<void>;
|
||||
completeFile(fileId: number): Promise<{ ok: true; size: string }>;
|
||||
appendFileChunk(fileId: number, seq: number, data: Buffer): Promise<void>;
|
||||
finalizeFileUpload(fileId: number): Promise<{ ok: true; size: string }>;
|
||||
deleteFile(fileId: number): Promise<boolean>;
|
||||
updateFile(
|
||||
id: number,
|
||||
updates: Partial<Pick<CloudFile, "name" | "mimeType" | "folderId">>
|
||||
): Promise<CloudFile | null>;
|
||||
renameFile(id: number, name: string): Promise<CloudFile | null>;
|
||||
countFilesInFolder(folderId: number | null): Promise<number>;
|
||||
countFiles(filter?: {
|
||||
userId?: number;
|
||||
nameContains?: string | null;
|
||||
mimeType?: string | null;
|
||||
}): Promise<number>;
|
||||
|
||||
// search
|
||||
searchByName(
|
||||
userId: number,
|
||||
// Search
|
||||
searchFolders(
|
||||
q: string,
|
||||
limit: number,
|
||||
offset: number
|
||||
): Promise<{
|
||||
folders: CloudFolder[];
|
||||
files: CloudFile[];
|
||||
foldersTotal: number;
|
||||
filesTotal: number;
|
||||
}>;
|
||||
): Promise<{ data: CloudFolder[]; total: number }>;
|
||||
searchFiles(
|
||||
q: string,
|
||||
type: string | undefined,
|
||||
limit: number,
|
||||
offset: number
|
||||
): Promise<{ data: CloudFile[]; total: number }>;
|
||||
|
||||
// helper: stream file chunks via Node.js stream
|
||||
// Streaming
|
||||
streamFileTo(resStream: NodeJS.WritableStream, fileId: number): Promise<void>;
|
||||
}
|
||||
|
||||
export const cloudStorageStorage: IStorage = {
|
||||
// --- Folders ---
|
||||
/* ------------------------------- Implementation ------------------------------- */
|
||||
export const cloudStorage: IStorage = {
|
||||
// --- FOLDERS ---
|
||||
async getFolder(id: number) {
|
||||
const folder = await db.cloudFolder.findUnique({
|
||||
where: { id },
|
||||
include: { files: false },
|
||||
});
|
||||
return folder ?? null;
|
||||
return (folder as unknown as CloudFolder) ?? null;
|
||||
},
|
||||
|
||||
async getFoldersByUser(
|
||||
userId: number,
|
||||
async listFoldersByParent(
|
||||
parentId: number | null = null,
|
||||
limit = 50,
|
||||
offset = 0
|
||||
) {
|
||||
const folders = await db.cloudFolder.findMany({
|
||||
where: { userId, parentId },
|
||||
where: { parentId },
|
||||
orderBy: { name: "asc" },
|
||||
skip: offset,
|
||||
take: limit,
|
||||
});
|
||||
return folders;
|
||||
return folders as unknown as CloudFolder[];
|
||||
},
|
||||
|
||||
async getRecentFolders(limit = 50, offset = 0) {
|
||||
async countFoldersByParent(parentId: number | null = null) {
|
||||
return db.cloudFolder.count({ where: { parentId } });
|
||||
},
|
||||
|
||||
async listRecentFolders(limit = 50, offset = 0) {
|
||||
const folders = await db.cloudFolder.findMany({
|
||||
orderBy: { name: "asc" },
|
||||
orderBy: { updatedAt: "desc" },
|
||||
skip: offset,
|
||||
take: limit,
|
||||
});
|
||||
return folders;
|
||||
return folders as unknown as CloudFolder[];
|
||||
},
|
||||
|
||||
async createFolder(
|
||||
@@ -109,7 +155,9 @@ export const cloudStorageStorage: IStorage = {
|
||||
const created = await db.cloudFolder.create({
|
||||
data: { userId, name, parentId },
|
||||
});
|
||||
return created;
|
||||
// mark parent(s) as updated
|
||||
await updateFolderTimestampsRecursively(parentId);
|
||||
return created as unknown as CloudFolder;
|
||||
},
|
||||
|
||||
async updateFolder(
|
||||
@@ -121,24 +169,51 @@ export const cloudStorageStorage: IStorage = {
|
||||
where: { id },
|
||||
data: updates,
|
||||
});
|
||||
return updated;
|
||||
if (updates.parentId !== undefined) {
|
||||
await updateFolderTimestampsRecursively(updates.parentId ?? null);
|
||||
} else {
|
||||
// touch this folder's parent (to mark modification)
|
||||
const f = await db.cloudFolder.findUnique({
|
||||
where: { id },
|
||||
select: { parentId: true },
|
||||
});
|
||||
await updateFolderTimestampsRecursively(f?.parentId ?? null);
|
||||
}
|
||||
return updated as unknown as CloudFolder;
|
||||
} catch (err) {
|
||||
return null;
|
||||
throw err;
|
||||
}
|
||||
},
|
||||
|
||||
async deleteFolder(id: number) {
|
||||
try {
|
||||
const folder = await db.cloudFolder.findUnique({
|
||||
where: { id },
|
||||
select: { parentId: true },
|
||||
});
|
||||
const parentId = folder?.parentId ?? null;
|
||||
await db.cloudFolder.delete({ where: { id } });
|
||||
await updateFolderTimestampsRecursively(parentId);
|
||||
return true;
|
||||
} catch (err) {
|
||||
console.error("deleteFolder error", err);
|
||||
return false;
|
||||
} catch (err: any) {
|
||||
if (err?.code === "P2025") return false;
|
||||
throw err;
|
||||
}
|
||||
},
|
||||
|
||||
// --- Files ---
|
||||
async getFile(id: number): Promise<CloudFile | null> {
|
||||
async countFolders(filter?: {
|
||||
userId?: number;
|
||||
nameContains?: string | null;
|
||||
}) {
|
||||
const where: any = {};
|
||||
if (filter?.userId) where.userId = filter.userId;
|
||||
if (filter?.nameContains)
|
||||
where.name = { contains: filter.nameContains, mode: "insensitive" };
|
||||
return db.cloudFolder.count({ where });
|
||||
},
|
||||
|
||||
// --- FILES ---
|
||||
async getFile(id: number) {
|
||||
const file = await db.cloudFile.findUnique({
|
||||
where: { id },
|
||||
include: { chunks: { orderBy: { seq: "asc" } } },
|
||||
@@ -146,32 +221,7 @@ export const cloudStorageStorage: IStorage = {
|
||||
return (file as unknown as CloudFile) ?? null;
|
||||
},
|
||||
|
||||
async listFilesByFolderByUser(
|
||||
userId: number,
|
||||
folderId: number | null = null,
|
||||
limit = 50,
|
||||
offset = 0
|
||||
) {
|
||||
const files = await db.cloudFile.findMany({
|
||||
where: { userId, folderId },
|
||||
orderBy: { createdAt: "desc" },
|
||||
skip: offset,
|
||||
take: limit,
|
||||
select: {
|
||||
id: true,
|
||||
name: true,
|
||||
mimeType: true,
|
||||
fileSize: true,
|
||||
folderId: true,
|
||||
isComplete: true,
|
||||
createdAt: true,
|
||||
updatedAt: true,
|
||||
},
|
||||
});
|
||||
return files.map(serializeFile);
|
||||
},
|
||||
|
||||
async listFilesByFolder(
|
||||
async listFilesInFolder(
|
||||
folderId: number | null = null,
|
||||
limit = 50,
|
||||
offset = 0
|
||||
@@ -192,17 +242,16 @@ export const cloudStorageStorage: IStorage = {
|
||||
updatedAt: true,
|
||||
},
|
||||
});
|
||||
return files.map(serializeFile);
|
||||
return files.map(serializeFile) as unknown as CloudFile[];
|
||||
},
|
||||
|
||||
// --- Chunked upload methods ---
|
||||
async createFileInit(
|
||||
userId,
|
||||
name,
|
||||
mimeType = null,
|
||||
expectedSize = null,
|
||||
totalChunks = null,
|
||||
folderId = null
|
||||
async initializeFileUpload(
|
||||
userId: number,
|
||||
name: string,
|
||||
mimeType: string | null = null,
|
||||
expectedSize: bigint | null = null,
|
||||
totalChunks: number | null = null,
|
||||
folderId: number | null = null
|
||||
) {
|
||||
const created = await db.cloudFile.create({
|
||||
data: {
|
||||
@@ -215,81 +264,178 @@ export const cloudStorageStorage: IStorage = {
|
||||
isComplete: false,
|
||||
},
|
||||
});
|
||||
return serializeFile(created);
|
||||
await updateFolderTimestampsRecursively(folderId);
|
||||
return serializeFile(created) as unknown as CloudFile;
|
||||
},
|
||||
|
||||
async addChunk(fileId: number, seq: number, data: Buffer) {
|
||||
// Ensure file exists & belongs to owner will be done by caller (route)
|
||||
// Attempt insert; if unique violation => ignore (idempotent)
|
||||
async appendFileChunk(fileId: number, seq: number, data: Buffer) {
|
||||
try {
|
||||
await db.cloudFileChunk.create({
|
||||
data: {
|
||||
fileId,
|
||||
seq,
|
||||
data,
|
||||
},
|
||||
});
|
||||
await db.cloudFileChunk.create({ data: { fileId, seq, data } });
|
||||
} catch (err: any) {
|
||||
// If unique constraint violation (duplicate chunk), ignore
|
||||
// idempotent: ignore duplicate chunk constraint
|
||||
if (
|
||||
err?.code === "P2002" ||
|
||||
err?.message?.includes("Unique constraint failed")
|
||||
) {
|
||||
// duplicate chunk, ignore
|
||||
return;
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
},
|
||||
|
||||
async completeFile(fileId: number) {
|
||||
// Compute total size from chunks and mark complete inside a transaction
|
||||
async finalizeFileUpload(fileId: number) {
|
||||
const chunks = await db.cloudFileChunk.findMany({ where: { fileId } });
|
||||
if (!chunks.length) {
|
||||
throw new Error("No chunks uploaded");
|
||||
}
|
||||
if (!chunks.length) throw new Error("No chunks uploaded");
|
||||
|
||||
// compute total size
|
||||
let total = 0;
|
||||
for (const c of chunks) total += c.data.length;
|
||||
|
||||
// Update file
|
||||
await db.cloudFile.update({
|
||||
// transactionally update file and read folderId
|
||||
const updated = await db.$transaction(async (tx) => {
|
||||
await tx.cloudFile.update({
|
||||
where: { id: fileId },
|
||||
data: {
|
||||
fileSize: BigInt(total),
|
||||
isComplete: true,
|
||||
},
|
||||
data: { fileSize: BigInt(total), isComplete: true },
|
||||
});
|
||||
return tx.cloudFile.findUnique({
|
||||
where: { id: fileId },
|
||||
select: { folderId: true },
|
||||
});
|
||||
});
|
||||
|
||||
const folderId = (updated as any)?.folderId ?? null;
|
||||
await updateFolderTimestampsRecursively(folderId);
|
||||
|
||||
return { ok: true, size: BigInt(total).toString() };
|
||||
},
|
||||
|
||||
async deleteFile(fileId: number) {
|
||||
try {
|
||||
const file = await db.cloudFile.findUnique({
|
||||
where: { id: fileId },
|
||||
select: { folderId: true },
|
||||
});
|
||||
if (!file) return false;
|
||||
const folderId = file.folderId ?? null;
|
||||
await db.cloudFile.delete({ where: { id: fileId } });
|
||||
// chunks cascade-delete via Prisma relation onDelete: Cascade
|
||||
await updateFolderTimestampsRecursively(folderId);
|
||||
return true;
|
||||
} catch (err) {
|
||||
console.error("deleteFile error", err);
|
||||
return false;
|
||||
} catch (err: any) {
|
||||
if (err?.code === "P2025") return false;
|
||||
throw err;
|
||||
}
|
||||
},
|
||||
|
||||
// --- Search ---
|
||||
async searchByName(userId: number, q: string, limit = 20, offset = 0) {
|
||||
const [folders, files, foldersTotal, filesTotal] = await Promise.all([
|
||||
db.cloudFolder.findMany({
|
||||
where: {
|
||||
userId,
|
||||
name: { contains: q, mode: "insensitive" },
|
||||
async updateFile(
|
||||
id: number,
|
||||
updates: Partial<Pick<CloudFile, "name" | "mimeType" | "folderId">>
|
||||
) {
|
||||
try {
|
||||
let prevFolderId: number | null = null;
|
||||
if (updates.folderId !== undefined) {
|
||||
const f = await db.cloudFile.findUnique({
|
||||
where: { id },
|
||||
select: { folderId: true },
|
||||
});
|
||||
prevFolderId = f?.folderId ?? null;
|
||||
}
|
||||
|
||||
const updated = await db.cloudFile.update({
|
||||
where: { id },
|
||||
data: updates,
|
||||
});
|
||||
|
||||
// touch affected folders
|
||||
if (updates.folderId !== undefined) {
|
||||
await updateFolderTimestampsRecursively(updates.folderId ?? null);
|
||||
if (
|
||||
prevFolderId != null &&
|
||||
prevFolderId !== (updates.folderId ?? null)
|
||||
) {
|
||||
await updateFolderTimestampsRecursively(prevFolderId);
|
||||
}
|
||||
} else {
|
||||
const f = await db.cloudFile.findUnique({
|
||||
where: { id },
|
||||
select: { folderId: true },
|
||||
});
|
||||
await updateFolderTimestampsRecursively(f?.folderId ?? null);
|
||||
}
|
||||
|
||||
return serializeFile(updated) as unknown as CloudFile;
|
||||
} catch (err) {
|
||||
throw err;
|
||||
}
|
||||
},
|
||||
|
||||
async renameFile(id: number, name: string) {
|
||||
try {
|
||||
const updated = await db.cloudFile.update({
|
||||
where: { id },
|
||||
data: { name },
|
||||
});
|
||||
const f = await db.cloudFile.findUnique({
|
||||
where: { id },
|
||||
select: { folderId: true },
|
||||
});
|
||||
await updateFolderTimestampsRecursively(f?.folderId ?? null);
|
||||
return serializeFile(updated) as unknown as CloudFile;
|
||||
} catch (err) {
|
||||
throw err;
|
||||
}
|
||||
},
|
||||
|
||||
async countFilesInFolder(folderId: number | null) {
|
||||
return db.cloudFile.count({ where: { folderId } });
|
||||
},
|
||||
|
||||
async countFiles(filter?: {
|
||||
userId?: number;
|
||||
nameContains?: string | null;
|
||||
mimeType?: string | null;
|
||||
}) {
|
||||
const where: any = {};
|
||||
if (filter?.userId) where.userId = filter.userId;
|
||||
if (filter?.nameContains)
|
||||
where.name = { contains: filter.nameContains, mode: "insensitive" };
|
||||
if (filter?.mimeType)
|
||||
where.mimeType = { startsWith: filter.mimeType, mode: "insensitive" };
|
||||
return db.cloudFile.count({ where });
|
||||
},
|
||||
|
||||
// --- SEARCH ---
|
||||
async searchFolders(q: string, limit = 20, offset = 0) {
|
||||
const [folders, total] = await Promise.all([
|
||||
db.cloudFolder.findMany({
|
||||
where: { name: { contains: q, mode: "insensitive" } },
|
||||
orderBy: { name: "asc" },
|
||||
skip: offset,
|
||||
take: limit,
|
||||
}),
|
||||
db.cloudFile.findMany({
|
||||
where: {
|
||||
userId,
|
||||
name: { contains: q, mode: "insensitive" },
|
||||
db.cloudFolder.count({
|
||||
where: { name: { contains: q, mode: "insensitive" } },
|
||||
}),
|
||||
]);
|
||||
return { data: folders as unknown as CloudFolder[], total };
|
||||
},
|
||||
|
||||
async searchFiles(
|
||||
q: string,
|
||||
type: string | undefined,
|
||||
limit = 20,
|
||||
offset = 0
|
||||
) {
|
||||
const where: any = {};
|
||||
if (q) where.name = { contains: q, mode: "insensitive" };
|
||||
if (type) {
|
||||
if (!type.includes("/"))
|
||||
where.mimeType = { startsWith: `${type}/`, mode: "insensitive" };
|
||||
else where.mimeType = { startsWith: type, mode: "insensitive" };
|
||||
}
|
||||
|
||||
const [files, total] = await Promise.all([
|
||||
db.cloudFile.findMany({
|
||||
where,
|
||||
orderBy: { createdAt: "desc" },
|
||||
skip: offset,
|
||||
take: limit,
|
||||
@@ -304,30 +450,14 @@ export const cloudStorageStorage: IStorage = {
|
||||
updatedAt: true,
|
||||
},
|
||||
}),
|
||||
db.cloudFolder.count({
|
||||
where: {
|
||||
userId,
|
||||
name: { contains: q, mode: "insensitive" },
|
||||
},
|
||||
}),
|
||||
db.cloudFile.count({
|
||||
where: {
|
||||
userId,
|
||||
name: { contains: q, mode: "insensitive" },
|
||||
},
|
||||
}),
|
||||
db.cloudFile.count({ where }),
|
||||
]);
|
||||
return {
|
||||
folders,
|
||||
files: files.map(serializeFile),
|
||||
foldersTotal,
|
||||
filesTotal,
|
||||
};
|
||||
|
||||
return { data: files.map(serializeFile) as unknown as CloudFile[], total };
|
||||
},
|
||||
|
||||
// --- Streaming helper ---
|
||||
// --- STREAM ---
|
||||
async streamFileTo(resStream: NodeJS.WritableStream, fileId: number) {
|
||||
// Stream chunks in batches to avoid loading everything at once.
|
||||
const batchSize = 100;
|
||||
let offset = 0;
|
||||
while (true) {
|
||||
@@ -338,12 +468,11 @@ export const cloudStorageStorage: IStorage = {
|
||||
skip: offset,
|
||||
});
|
||||
if (!chunks.length) break;
|
||||
for (const c of chunks) {
|
||||
resStream.write(Buffer.from(c.data));
|
||||
}
|
||||
for (const c of chunks) resStream.write(Buffer.from(c.data));
|
||||
offset += chunks.length;
|
||||
if (chunks.length < batchSize) break;
|
||||
}
|
||||
// caller will end the response stream
|
||||
},
|
||||
};
|
||||
|
||||
export default cloudStorage;
|
||||
|
||||
@@ -38,16 +38,52 @@ export async function apiRequest(
|
||||
const isFormData =
|
||||
typeof FormData !== "undefined" && data instanceof FormData;
|
||||
|
||||
const isFileLike =
|
||||
(typeof File !== "undefined" && data instanceof File) ||
|
||||
(typeof Blob !== "undefined" && data instanceof Blob);
|
||||
|
||||
const isArrayBufferLike =
|
||||
(typeof ArrayBuffer !== "undefined" && data instanceof ArrayBuffer) ||
|
||||
(typeof Uint8Array !== "undefined" && data instanceof Uint8Array) ||
|
||||
(data != null && (data as any)?.constructor?.name === "Buffer"); // Node Buffer
|
||||
|
||||
// Decide Content-Type header appropriately:
|
||||
const headers: Record<string, string> = {
|
||||
...(token ? { Authorization: `Bearer ${token}` } : {}),
|
||||
// Only set Content-Type if not using FormData
|
||||
...(isFormData ? {} : { "Content-Type": "application/json" }),
|
||||
};
|
||||
|
||||
if (!isFormData) {
|
||||
if (isFileLike) {
|
||||
// File/Blob: use its own MIME type if present, otherwise fallback
|
||||
const mime = (data as File | Blob).type || "application/octet-stream";
|
||||
headers["Content-Type"] = mime;
|
||||
} else if (isArrayBufferLike) {
|
||||
// ArrayBuffer / Buffer / Uint8Array: use generic octet-stream
|
||||
headers["Content-Type"] = "application/octet-stream";
|
||||
} else {
|
||||
// Normal JSON body
|
||||
headers["Content-Type"] = "application/json";
|
||||
}
|
||||
}
|
||||
// If FormData, we must NOT set Content-Type (browser will set multipart boundary)
|
||||
|
||||
// Build final body
|
||||
const finalBody = isFormData
|
||||
? (data as FormData)
|
||||
: isFileLike
|
||||
? // File/Blob can be passed directly as BodyInit
|
||||
(data as BodyInit)
|
||||
: isArrayBufferLike
|
||||
? // ArrayBuffer / Uint8Array / Buffer -> convert to Uint8Array if needed
|
||||
(data as BodyInit)
|
||||
: data !== undefined
|
||||
? JSON.stringify(data)
|
||||
: undefined;
|
||||
|
||||
const res = await fetch(`${API_BASE_URL}${url}`, {
|
||||
method,
|
||||
headers,
|
||||
body: isFormData ? (data as FormData) : JSON.stringify(data),
|
||||
body: finalBody,
|
||||
credentials: "include",
|
||||
});
|
||||
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -323,6 +323,7 @@ model CloudFolder {
|
||||
user User @relation(fields: [userId], references: [id])
|
||||
files CloudFile[]
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
|
||||
@@unique([userId, parentId, name]) // prevents sibling folder name duplicates
|
||||
@@index([parentId])
|
||||
|
||||
Reference in New Issue
Block a user