feat: add bulk file upload endpoints
This commit is contained in:
@@ -14,10 +14,13 @@ import {
|
|||||||
getFile,
|
getFile,
|
||||||
updateFile,
|
updateFile,
|
||||||
uploadDone,
|
uploadDone,
|
||||||
|
uploadDoneBatch,
|
||||||
} from "./file.service";
|
} from "./file.service";
|
||||||
import {
|
import {
|
||||||
CreateFileInput,
|
CreateFileInput,
|
||||||
|
CreateFilesBatch,
|
||||||
UpdateFileInput,
|
UpdateFileInput,
|
||||||
|
UploadDoneBatchInput,
|
||||||
UploadMultiPartCompleteRequest,
|
UploadMultiPartCompleteRequest,
|
||||||
} from "./file.schema";
|
} from "./file.schema";
|
||||||
|
|
||||||
@@ -43,6 +46,31 @@ export async function createFileHandler(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export async function createFileBatchHandler(
|
||||||
|
req: FastifyRequest,
|
||||||
|
res: FastifyReply
|
||||||
|
) {
|
||||||
|
const input = req.body as CreateFilesBatch;
|
||||||
|
const resObj = [];
|
||||||
|
|
||||||
|
try {
|
||||||
|
for (const file of input.files) {
|
||||||
|
const newFile = (await createFile(file, req.user)).toObject();
|
||||||
|
|
||||||
|
if (newFile.mimeType != "folder") {
|
||||||
|
const signedUrl = await getUploadUrl(newFile.pid);
|
||||||
|
newFile["signedUrl"] = signedUrl;
|
||||||
|
}
|
||||||
|
|
||||||
|
resObj.push({ ...newFile });
|
||||||
|
}
|
||||||
|
|
||||||
|
return res.code(201).send(resObj);
|
||||||
|
} catch (err) {
|
||||||
|
return err;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
export async function uploadDoneHandler(
|
export async function uploadDoneHandler(
|
||||||
req: FastifyRequest,
|
req: FastifyRequest,
|
||||||
res: FastifyReply
|
res: FastifyReply
|
||||||
@@ -57,6 +85,20 @@ export async function uploadDoneHandler(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export async function uploadDoneBatchHandler(
|
||||||
|
req: FastifyRequest,
|
||||||
|
res: FastifyReply
|
||||||
|
) {
|
||||||
|
const input = req.body as UploadDoneBatchInput;
|
||||||
|
|
||||||
|
try {
|
||||||
|
const updateRes = await uploadDoneBatch(input, req.user);
|
||||||
|
return res.code(200).send();
|
||||||
|
} catch (err) {
|
||||||
|
return err;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
export async function getFileHandler(req: FastifyRequest, res: FastifyReply) {
|
export async function getFileHandler(req: FastifyRequest, res: FastifyReply) {
|
||||||
const { fileId } = req.params as { fileId: string };
|
const { fileId } = req.params as { fileId: string };
|
||||||
|
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
import { FastifyInstance } from "fastify";
|
import { FastifyInstance } from "fastify";
|
||||||
import {
|
import {
|
||||||
|
createFileBatchHandler,
|
||||||
createFileHandler,
|
createFileHandler,
|
||||||
deleteFileHandler,
|
deleteFileHandler,
|
||||||
fileDownloadHandler,
|
fileDownloadHandler,
|
||||||
@@ -8,6 +9,7 @@ import {
|
|||||||
getChildrenHandler,
|
getChildrenHandler,
|
||||||
getFileHandler,
|
getFileHandler,
|
||||||
updateFileHandler,
|
updateFileHandler,
|
||||||
|
uploadDoneBatchHandler,
|
||||||
uploadDoneHandler,
|
uploadDoneHandler,
|
||||||
} from "./file.controller";
|
} from "./file.controller";
|
||||||
import { $file } from "./file.schema";
|
import { $file } from "./file.schema";
|
||||||
@@ -37,6 +39,30 @@ export async function fileRoutes(fastify: FastifyInstance) {
|
|||||||
uploadDoneHandler
|
uploadDoneHandler
|
||||||
);
|
);
|
||||||
|
|
||||||
|
fastify.post(
|
||||||
|
"/createBatch",
|
||||||
|
{
|
||||||
|
schema: {
|
||||||
|
body: $file("createFileBatch"),
|
||||||
|
},
|
||||||
|
config: { requiredClaims: ["file:upload"] },
|
||||||
|
preHandler: [fastify.authorize],
|
||||||
|
},
|
||||||
|
createFileBatchHandler
|
||||||
|
);
|
||||||
|
|
||||||
|
fastify.post(
|
||||||
|
"/doneBatch",
|
||||||
|
{
|
||||||
|
schema: {
|
||||||
|
body: $file("uploadDoneBatchInput"),
|
||||||
|
},
|
||||||
|
config: { requiredClaims: ["file:upload"] },
|
||||||
|
preHandler: [fastify.authorize],
|
||||||
|
},
|
||||||
|
uploadDoneBatchHandler
|
||||||
|
);
|
||||||
|
|
||||||
fastify.get(
|
fastify.get(
|
||||||
"/:fileId",
|
"/:fileId",
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -47,6 +47,12 @@ const updateFileInput = z.object({
|
|||||||
root: z.boolean().optional(),
|
root: z.boolean().optional(),
|
||||||
});
|
});
|
||||||
|
|
||||||
|
const createFileBatch = z.object({
|
||||||
|
files: z.array(createFileInput),
|
||||||
|
});
|
||||||
|
|
||||||
|
const uploadDoneBatchInput = z.array(z.string());
|
||||||
|
|
||||||
const downloadFileResponse = z.object({
|
const downloadFileResponse = z.object({
|
||||||
url: z.string().url(),
|
url: z.string().url(),
|
||||||
});
|
});
|
||||||
@@ -95,12 +101,16 @@ export type UploadMultiPartCompleteRequest = z.infer<
|
|||||||
|
|
||||||
export type CreateFileInput = z.infer<typeof createFileInput>;
|
export type CreateFileInput = z.infer<typeof createFileInput>;
|
||||||
export type UpdateFileInput = z.infer<typeof updateFileInput>;
|
export type UpdateFileInput = z.infer<typeof updateFileInput>;
|
||||||
|
export type CreateFilesBatch = z.infer<typeof createFileBatch>;
|
||||||
|
export type UploadDoneBatchInput = z.infer<typeof uploadDoneBatchInput>;
|
||||||
|
|
||||||
export const { schemas: fileSchemas, $ref: $file } = buildJsonSchemas(
|
export const { schemas: fileSchemas, $ref: $file } = buildJsonSchemas(
|
||||||
{
|
{
|
||||||
createFileInput,
|
createFileInput,
|
||||||
updateFileInput,
|
updateFileInput,
|
||||||
downloadFileResponse,
|
downloadFileResponse,
|
||||||
|
createFileBatch,
|
||||||
|
uploadDoneBatchInput,
|
||||||
uploadMultipartCompleteRequest,
|
uploadMultipartCompleteRequest,
|
||||||
},
|
},
|
||||||
{ $id: "file" }
|
{ $id: "file" }
|
||||||
|
|||||||
@@ -1,7 +1,13 @@
|
|||||||
import { AuthenticatedUser } from "../auth";
|
import { AuthenticatedUser } from "../auth";
|
||||||
import { generateId } from "../utils/id";
|
import { generateId } from "../utils/id";
|
||||||
import { deleteFileS3 } from "../utils/s3";
|
import { deleteFileS3 } from "../utils/s3";
|
||||||
import { CreateFileInput, fileModel, UpdateFileInput } from "./file.schema";
|
import {
|
||||||
|
CreateFileInput,
|
||||||
|
CreateFilesBatch,
|
||||||
|
fileModel,
|
||||||
|
UpdateFileInput,
|
||||||
|
UploadDoneBatchInput,
|
||||||
|
} from "./file.schema";
|
||||||
|
|
||||||
export const ErrNotFound = "not_found";
|
export const ErrNotFound = "not_found";
|
||||||
export const ParentNotFound = "parent_not_found";
|
export const ParentNotFound = "parent_not_found";
|
||||||
@@ -45,6 +51,18 @@ export async function uploadDone(fileId: string, tenantId: string) {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export async function uploadDoneBatch(
|
||||||
|
input: UploadDoneBatchInput,
|
||||||
|
user: AuthenticatedUser
|
||||||
|
) {
|
||||||
|
return await fileModel.updateMany(
|
||||||
|
{
|
||||||
|
$and: [{ tenantId: user.tenantId }, { pid: { $in: input } }],
|
||||||
|
},
|
||||||
|
{ status: "done" }
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
export async function getFile(fileId: string, tenantId: string) {
|
export async function getFile(fileId: string, tenantId: string) {
|
||||||
return await fileModel.findOne({
|
return await fileModel.findOne({
|
||||||
$and: [{ tenantId: tenantId }, { pid: fileId }, { isDeleted: false }],
|
$and: [{ tenantId: tenantId }, { pid: fileId }, { isDeleted: false }],
|
||||||
|
|||||||
@@ -36,7 +36,7 @@ export async function getUploadUrl(key: string) {
|
|||||||
Key: key,
|
Key: key,
|
||||||
});
|
});
|
||||||
|
|
||||||
return await getSignedUrl(client, command, { expiresIn: 300 });
|
return await getSignedUrl(client, command, { expiresIn: 600 });
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function getUploadUrlMultiPart(key: string, fileSize: number) {
|
export async function getUploadUrlMultiPart(key: string, fileSize: number) {
|
||||||
|
|||||||
Reference in New Issue
Block a user