231 lines
6.0 KiB
JavaScript
231 lines
6.0 KiB
JavaScript
const express = require("express");
|
|
const router = express.Router();
|
|
const { authenticateToken } = require("../middleware/auth");
|
|
const { uploadPresignLimiter } = require("../middleware/rateLimiter");
|
|
const s3Service = require("../services/s3Service");
|
|
const S3OwnershipService = require("../services/s3OwnershipService");
|
|
const { v4: uuidv4 } = require("uuid");
|
|
const logger = require("../utils/logger");
|
|
const MAX_BATCH_SIZE = 20;
|
|
|
|
/**
|
|
* Middleware to check if S3 is enabled
|
|
*/
|
|
const requireS3Enabled = (req, res, next) => {
|
|
if (!s3Service.isEnabled()) {
|
|
return res.status(503).json({
|
|
error: "File upload service is not available",
|
|
});
|
|
}
|
|
next();
|
|
};
|
|
|
|
/**
|
|
* POST /api/upload/presign
|
|
* Get a presigned URL for uploading a single file to S3
|
|
*/
|
|
router.post(
|
|
"/presign",
|
|
authenticateToken,
|
|
requireS3Enabled,
|
|
uploadPresignLimiter,
|
|
async (req, res, next) => {
|
|
try {
|
|
const { uploadType, contentType, fileName, fileSize } = req.body;
|
|
|
|
// Validate required fields
|
|
if (!uploadType || !contentType || !fileName || !fileSize) {
|
|
return res.status(400).json({ error: "Missing required fields" });
|
|
}
|
|
|
|
const result = await s3Service.getPresignedUploadUrl(
|
|
uploadType,
|
|
contentType,
|
|
fileName,
|
|
fileSize
|
|
);
|
|
|
|
logger.info("Presigned URL generated", {
|
|
userId: req.user.id,
|
|
uploadType,
|
|
key: result.key,
|
|
});
|
|
|
|
res.json(result);
|
|
} catch (error) {
|
|
if (error.message.includes("Invalid")) {
|
|
return res.status(400).json({ error: error.message });
|
|
}
|
|
next(error);
|
|
}
|
|
}
|
|
);
|
|
|
|
/**
|
|
* POST /api/upload/presign-batch
|
|
* Get presigned URLs for uploading multiple files to S3
|
|
* All files in a batch share the same UUID base for coordinated variant uploads
|
|
*/
|
|
router.post(
|
|
"/presign-batch",
|
|
authenticateToken,
|
|
requireS3Enabled,
|
|
uploadPresignLimiter,
|
|
async (req, res, next) => {
|
|
try {
|
|
const { uploadType, files } = req.body;
|
|
|
|
if (!uploadType || !files || !Array.isArray(files)) {
|
|
return res.status(400).json({ error: "Missing required fields" });
|
|
}
|
|
|
|
if (files.length === 0) {
|
|
return res.status(400).json({ error: "No files specified" });
|
|
}
|
|
|
|
if (files.length > MAX_BATCH_SIZE) {
|
|
return res
|
|
.status(400)
|
|
.json({ error: "Maximum ${MAX_BATCH_SIZE} files per batch" });
|
|
}
|
|
|
|
// Validate each file has required fields
|
|
for (const file of files) {
|
|
if (!file.contentType || !file.fileName || !file.fileSize) {
|
|
return res.status(400).json({
|
|
error: "Each file must have contentType, fileName, and fileSize",
|
|
});
|
|
}
|
|
}
|
|
|
|
// Generate one shared UUID for all files in this batch
|
|
const sharedBaseKey = uuidv4();
|
|
|
|
const results = await Promise.all(
|
|
files.map((f) =>
|
|
s3Service.getPresignedUploadUrl(
|
|
uploadType,
|
|
f.contentType,
|
|
f.fileName,
|
|
f.fileSize,
|
|
sharedBaseKey
|
|
)
|
|
)
|
|
);
|
|
|
|
logger.info("Batch presigned URLs generated", {
|
|
userId: req.user.id,
|
|
uploadType,
|
|
count: results.length,
|
|
baseKey: sharedBaseKey,
|
|
});
|
|
|
|
res.json({ uploads: results, baseKey: sharedBaseKey });
|
|
} catch (error) {
|
|
if (error.message.includes("Invalid")) {
|
|
return res.status(400).json({ error: error.message });
|
|
}
|
|
next(error);
|
|
}
|
|
}
|
|
);
|
|
|
|
/**
|
|
* POST /api/upload/confirm
|
|
* Confirm that files have been uploaded to S3
|
|
*/
|
|
router.post(
|
|
"/confirm",
|
|
authenticateToken,
|
|
requireS3Enabled,
|
|
async (req, res, next) => {
|
|
try {
|
|
const { keys } = req.body;
|
|
|
|
if (!keys || !Array.isArray(keys)) {
|
|
return res.status(400).json({ error: "Missing keys array" });
|
|
}
|
|
|
|
if (keys.length === 0) {
|
|
return res.status(400).json({ error: "No keys specified" });
|
|
}
|
|
|
|
const results = await Promise.all(
|
|
keys.map(async (key) => ({
|
|
key,
|
|
exists: await s3Service.verifyUpload(key),
|
|
}))
|
|
);
|
|
|
|
const confirmed = results.filter((r) => r.exists).map((r) => r.key);
|
|
|
|
logger.info("Upload confirmation", {
|
|
userId: req.user.id,
|
|
confirmed: confirmed.length,
|
|
total: keys.length,
|
|
});
|
|
|
|
// Only return confirmed keys, not which ones failed (prevents file existence probing)
|
|
res.json({ confirmed, total: keys.length });
|
|
} catch (error) {
|
|
next(error);
|
|
}
|
|
}
|
|
);
|
|
|
|
/**
|
|
* GET /api/upload/signed-url/*key
|
|
* Get a signed URL for accessing private content (messages, condition-checks)
|
|
* The key is the full path after /signed-url/ (e.g., "messages/uuid.jpg")
|
|
*/
|
|
router.get(
|
|
"/signed-url/*key",
|
|
authenticateToken,
|
|
requireS3Enabled,
|
|
async (req, res, next) => {
|
|
try {
|
|
// Express wildcard params may be string or array - handle both
|
|
let key = req.params.key;
|
|
if (Array.isArray(key)) {
|
|
key = key.join("/");
|
|
}
|
|
if (!key || typeof key !== "string") {
|
|
return res.status(400).json({ error: "Invalid key parameter" });
|
|
}
|
|
// Decode URL-encoded characters (e.g., %2F -> /)
|
|
key = decodeURIComponent(key);
|
|
|
|
// Only allow private folders to use signed URLs
|
|
const isPrivate =
|
|
key.startsWith("messages/") || key.startsWith("condition-checks/");
|
|
if (!isPrivate) {
|
|
return res
|
|
.status(400)
|
|
.json({ error: "Signed URLs only for private content" });
|
|
}
|
|
|
|
// Verify user is authorized to access this file
|
|
const authResult = await S3OwnershipService.canAccessFile(
|
|
key,
|
|
req.user.id
|
|
);
|
|
if (!authResult.authorized) {
|
|
logger.warn("Unauthorized signed URL request", {
|
|
userId: req.user.id,
|
|
key,
|
|
reason: authResult.reason,
|
|
});
|
|
return res.status(403).json({ error: "Access denied" });
|
|
}
|
|
|
|
const url = await s3Service.getPresignedDownloadUrl(key);
|
|
|
|
res.json({ url, expiresIn: 3600 });
|
|
} catch (error) {
|
|
next(error);
|
|
}
|
|
}
|
|
);
|
|
|
|
module.exports = router;
|