This commit is contained in:
jackiettran
2025-12-11 20:05:18 -05:00
parent 11593606aa
commit b0268a2fb7
28 changed files with 2578 additions and 432 deletions

View File

@@ -104,6 +104,13 @@ const burstProtection = createUserBasedRateLimiter(
"Too many requests in a short period. Please slow down."
);
// Upload presign rate limiter - 30 requests per minute
const uploadPresignLimiter = createUserBasedRateLimiter(
60 * 1000, // 1 minute window
30, // 30 presign requests per minute per user
"Too many upload requests. Please slow down."
);
// Authentication rate limiters
const authRateLimiters = {
// Login rate limiter - stricter to prevent brute force
@@ -184,6 +191,9 @@ module.exports = {
// Burst protection
burstProtection,
// Upload rate limiter
uploadPresignLimiter,
// Utility functions
createMapsRateLimiter,
createUserBasedRateLimiter,

1724
backend/package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -34,8 +34,10 @@
"author": "",
"license": "ISC",
"dependencies": {
"@aws-sdk/client-s3": "^3.940.0",
"@aws-sdk/client-ses": "^3.896.0",
"@aws-sdk/credential-providers": "^3.901.0",
"@aws-sdk/s3-request-presigner": "^3.940.0",
"@googlemaps/google-maps-services-js": "^3.4.2",
"bcryptjs": "^3.0.2",
"body-parser": "^2.2.0",

View File

@@ -1,77 +1,56 @@
const express = require("express");
const multer = require("multer");
const { authenticateToken } = require("../middleware/auth");
const ConditionCheckService = require("../services/conditionCheckService");
const logger = require("../utils/logger");
const router = express.Router();
// Configure multer for photo uploads
const upload = multer({
dest: "uploads/condition-checks/",
limits: {
fileSize: 10 * 1024 * 1024, // 10MB limit
files: 20, // Maximum 20 files
},
fileFilter: (req, file, cb) => {
// Accept only image files
if (file.mimetype.startsWith("image/")) {
cb(null, true);
} else {
cb(new Error("Only image files are allowed"), false);
}
},
});
// Submit a condition check
router.post(
"/:rentalId",
authenticateToken,
upload.array("imageFilenames"),
async (req, res) => {
try {
const { rentalId } = req.params;
const { checkType, notes } = req.body;
const userId = req.user.id;
router.post("/:rentalId", authenticateToken, async (req, res) => {
try {
const { rentalId } = req.params;
const { checkType, notes, imageFilenames: rawImageFilenames } = req.body;
const userId = req.user.id;
// Get uploaded file paths
const imageFilenames = req.files ? req.files.map((file) => file.path) : [];
// Ensure imageFilenames is an array (S3 keys)
const imageFilenames = Array.isArray(rawImageFilenames)
? rawImageFilenames
: [];
const conditionCheck = await ConditionCheckService.submitConditionCheck(
rentalId,
checkType,
userId,
imageFilenames,
notes
);
const conditionCheck = await ConditionCheckService.submitConditionCheck(
rentalId,
checkType,
userId,
imageFilenames,
notes
);
const reqLogger = logger.withRequestId(req.id);
reqLogger.info("Condition check submitted", {
rentalId,
checkType,
userId,
photoCount: imageFilenames.length,
});
const reqLogger = logger.withRequestId(req.id);
reqLogger.info("Condition check submitted", {
rentalId,
checkType,
userId,
photoCount: imageFilenames.length,
});
res.status(201).json({
success: true,
conditionCheck,
});
} catch (error) {
const reqLogger = logger.withRequestId(req.id);
reqLogger.error("Error submitting condition check", {
error: error.message,
rentalId: req.params.rentalId,
userId: req.user?.id,
});
res.status(201).json({
success: true,
conditionCheck,
});
} catch (error) {
const reqLogger = logger.withRequestId(req.id);
reqLogger.error("Error submitting condition check", {
error: error.message,
rentalId: req.params.rentalId,
userId: req.user?.id,
});
res.status(400).json({
success: false,
error: error.message,
});
}
res.status(400).json({
success: false,
error: error.message,
});
}
);
});
// Get condition checks for a rental
router.get("/:rentalId", authenticateToken, async (req, res) => {

View File

@@ -2,7 +2,6 @@ const express = require('express');
const { Op } = require('sequelize');
const { ForumPost, ForumComment, PostTag, User } = require('../models');
const { authenticateToken, requireAdmin, optionalAuth } = require('../middleware/auth');
const { uploadForumPostImages, uploadForumCommentImages } = require('../middleware/upload');
const logger = require('../utils/logger');
const emailServices = require('../services/email');
const googleMapsService = require('../services/googleMapsService');
@@ -238,21 +237,12 @@ router.get('/posts/:id', optionalAuth, async (req, res, next) => {
});
// POST /api/forum/posts - Create new post
router.post('/posts', authenticateToken, uploadForumPostImages, async (req, res, next) => {
router.post('/posts', authenticateToken, async (req, res, next) => {
try {
let { title, content, category, tags, zipCode, latitude: providedLat, longitude: providedLng } = req.body;
let { title, content, category, tags, zipCode, latitude: providedLat, longitude: providedLng, imageFilenames } = req.body;
// Parse tags if they come as JSON string (from FormData)
if (typeof tags === 'string') {
try {
tags = JSON.parse(tags);
} catch (e) {
tags = [];
}
}
// Extract image filenames if uploaded
const imageFilenames = req.files ? req.files.map(file => file.filename) : [];
// Ensure imageFilenames is an array
imageFilenames = Array.isArray(imageFilenames) ? imageFilenames : [];
// Initialize location fields
let latitude = null;
@@ -913,9 +903,11 @@ router.patch('/posts/:id/accept-answer', authenticateToken, async (req, res, nex
});
// POST /api/forum/posts/:id/comments - Add comment/reply
router.post('/posts/:id/comments', authenticateToken, uploadForumCommentImages, async (req, res, next) => {
router.post('/posts/:id/comments', authenticateToken, async (req, res, next) => {
try {
const { content, parentCommentId } = req.body;
// Support both parentId (new) and parentCommentId (legacy) for backwards compatibility
const { content, parentId, parentCommentId, imageFilenames: rawImageFilenames } = req.body;
const parentIdResolved = parentId || parentCommentId;
const post = await ForumPost.findByPk(req.params.id);
if (!post) {
@@ -928,21 +920,21 @@ router.post('/posts/:id/comments', authenticateToken, uploadForumCommentImages,
}
// Validate parent comment if provided
if (parentCommentId) {
const parentComment = await ForumComment.findByPk(parentCommentId);
if (parentIdResolved) {
const parentComment = await ForumComment.findByPk(parentIdResolved);
if (!parentComment || parentComment.postId !== post.id) {
return res.status(400).json({ error: 'Invalid parent comment' });
}
}
// Extract image filenames if uploaded
const imageFilenames = req.files ? req.files.map(file => file.filename) : [];
// Ensure imageFilenames is an array
const imageFilenames = Array.isArray(rawImageFilenames) ? rawImageFilenames : [];
const comment = await ForumComment.create({
postId: req.params.id,
authorId: req.user.id,
content,
parentCommentId: parentCommentId || null,
parentCommentId: parentIdResolved || null,
imageFilenames
});

View File

@@ -2,7 +2,6 @@ const express = require('express');
const helmet = require('helmet');
const { Message, User } = require('../models');
const { authenticateToken } = require('../middleware/auth');
const { uploadMessageImage } = require('../middleware/upload');
const logger = require('../utils/logger');
const { emitNewMessage, emitMessageRead } = require('../sockets/messageSocket');
const { Op } = require('sequelize');
@@ -237,9 +236,9 @@ router.get('/:id', authenticateToken, async (req, res, next) => {
});
// Send a new message
router.post('/', authenticateToken, uploadMessageImage, async (req, res, next) => {
router.post('/', authenticateToken, async (req, res, next) => {
try {
const { receiverId, content } = req.body;
const { receiverId, content, imageFilename } = req.body;
// Check if receiver exists
const receiver = await User.findByPk(receiverId);
@@ -252,14 +251,11 @@ router.post('/', authenticateToken, uploadMessageImage, async (req, res, next) =
return res.status(400).json({ error: 'Cannot send messages to yourself' });
}
// Extract image filename if uploaded
const imageFilename = req.file ? req.file.filename : null;
const message = await Message.create({
senderId: req.user.id,
receiverId,
content,
imageFilename
imageFilename: imageFilename || null
});
const messageWithSender = await Message.findByPk(message.id, {

214
backend/routes/upload.js Normal file
View File

@@ -0,0 +1,214 @@
const express = require("express");
const router = express.Router();
const { authenticateToken } = require("../middleware/auth");
const { uploadPresignLimiter } = require("../middleware/rateLimiter");
const s3Service = require("../services/s3Service");
const S3OwnershipService = require("../services/s3OwnershipService");
const logger = require("../utils/logger");
const MAX_BATCH_SIZE = 20;
/**
* Middleware to check if S3 is enabled
*/
const requireS3Enabled = (req, res, next) => {
if (!s3Service.isEnabled()) {
return res.status(503).json({
error: "File upload service is not available",
});
}
next();
};
/**
* POST /api/upload/presign
* Get a presigned URL for uploading a single file to S3
*/
router.post(
"/presign",
authenticateToken,
requireS3Enabled,
uploadPresignLimiter,
async (req, res, next) => {
try {
const { uploadType, contentType, fileName, fileSize } = req.body;
// Validate required fields
if (!uploadType || !contentType || !fileName || !fileSize) {
return res.status(400).json({ error: "Missing required fields" });
}
const result = await s3Service.getPresignedUploadUrl(
uploadType,
contentType,
fileName,
fileSize
);
logger.info("Presigned URL generated", {
userId: req.user.id,
uploadType,
key: result.key,
});
res.json(result);
} catch (error) {
if (error.message.includes("Invalid")) {
return res.status(400).json({ error: error.message });
}
next(error);
}
}
);
/**
* POST /api/upload/presign-batch
* Get presigned URLs for uploading multiple files to S3
*/
router.post(
"/presign-batch",
authenticateToken,
requireS3Enabled,
uploadPresignLimiter,
async (req, res, next) => {
try {
const { uploadType, files } = req.body;
if (!uploadType || !files || !Array.isArray(files)) {
return res.status(400).json({ error: "Missing required fields" });
}
if (files.length === 0) {
return res.status(400).json({ error: "No files specified" });
}
if (files.length > MAX_BATCH_SIZE) {
return res
.status(400)
.json({ error: "Maximum ${MAX_BATCH_SIZE} files per batch" });
}
// Validate each file has required fields
for (const file of files) {
if (!file.contentType || !file.fileName || !file.fileSize) {
return res.status(400).json({
error: "Each file must have contentType, fileName, and fileSize",
});
}
}
const results = await Promise.all(
files.map((f) =>
s3Service.getPresignedUploadUrl(
uploadType,
f.contentType,
f.fileName,
f.fileSize
)
)
);
logger.info("Batch presigned URLs generated", {
userId: req.user.id,
uploadType,
count: results.length,
});
res.json({ uploads: results });
} catch (error) {
if (error.message.includes("Invalid")) {
return res.status(400).json({ error: error.message });
}
next(error);
}
}
);
/**
* POST /api/upload/confirm
* Confirm that files have been uploaded to S3
*/
router.post(
"/confirm",
authenticateToken,
requireS3Enabled,
async (req, res, next) => {
try {
const { keys } = req.body;
if (!keys || !Array.isArray(keys)) {
return res.status(400).json({ error: "Missing keys array" });
}
if (keys.length === 0) {
return res.status(400).json({ error: "No keys specified" });
}
const results = await Promise.all(
keys.map(async (key) => ({
key,
exists: await s3Service.verifyUpload(key),
}))
);
const confirmed = results.filter((r) => r.exists).map((r) => r.key);
logger.info("Upload confirmation", {
userId: req.user.id,
confirmed: confirmed.length,
total: keys.length,
});
// Only return confirmed keys, not which ones failed (prevents file existence probing)
res.json({ confirmed, total: keys.length });
} catch (error) {
next(error);
}
}
);
/**
* GET /api/upload/signed-url/*key
* Get a signed URL for accessing private content (messages, condition-checks)
* The key is the full path after /signed-url/ (e.g., "messages/uuid.jpg")
*/
router.get(
"/signed-url/*key",
authenticateToken,
requireS3Enabled,
async (req, res, next) => {
try {
const { key } = req.params;
// Only allow private folders to use signed URLs
const isPrivate =
key.startsWith("messages/") || key.startsWith("condition-checks/");
if (!isPrivate) {
return res
.status(400)
.json({ error: "Signed URLs only for private content" });
}
// Verify user is authorized to access this file
const authResult = await S3OwnershipService.canAccessFile(
key,
req.user.id
);
if (!authResult.authorized) {
logger.warn("Unauthorized signed URL request", {
userId: req.user.id,
key,
reason: authResult.reason,
});
return res.status(403).json({ error: "Access denied" });
}
const url = await s3Service.getPresignedDownloadUrl(key);
res.json({ url, expiresIn: 3600 });
} catch (error) {
next(error);
}
}
);
module.exports = router;

View File

@@ -28,11 +28,13 @@ const stripeRoutes = require("./routes/stripe");
const mapsRoutes = require("./routes/maps");
const conditionCheckRoutes = require("./routes/conditionChecks");
const feedbackRoutes = require("./routes/feedback");
const uploadRoutes = require("./routes/upload");
const PayoutProcessor = require("./jobs/payoutProcessor");
const RentalStatusJob = require("./jobs/rentalStatusJob");
const ConditionCheckReminderJob = require("./jobs/conditionCheckReminder");
const emailServices = require("./services/email");
const s3Service = require("./services/s3Service");
// Socket.io setup
const { authenticateSocket } = require("./sockets/socketAuth");
@@ -159,6 +161,7 @@ app.use("/api/stripe", requireAlphaAccess, stripeRoutes);
app.use("/api/maps", requireAlphaAccess, mapsRoutes);
app.use("/api/condition-checks", requireAlphaAccess, conditionCheckRoutes);
app.use("/api/feedback", requireAlphaAccess, feedbackRoutes);
app.use("/api/upload", requireAlphaAccess, uploadRoutes);
// Error handling middleware (must be last)
app.use(errorLogger);
@@ -195,13 +198,30 @@ sequelize
});
// Fail fast - don't start server if email templates can't load
if (env === "prod" || env === "production") {
logger.error("Cannot start server without email services in production");
logger.error(
"Cannot start server without email services in production"
);
process.exit(1);
} else {
logger.warn("Email services failed to initialize - continuing in dev mode");
logger.warn(
"Email services failed to initialize - continuing in dev mode"
);
}
}
// Initialize S3 service for image uploads
try {
s3Service.initialize();
logger.info("S3 service initialized successfully");
} catch (err) {
logger.error("Failed to initialize S3 service", {
error: err.message,
stack: err.stack,
});
logger.error("Cannot start server without S3 service in production");
process.exit(1);
}
// Start the payout processor
const payoutJobs = PayoutProcessor.startScheduledPayouts();
logger.info("Payout processor started");
@@ -211,7 +231,8 @@ sequelize
logger.info("Rental status job started");
// Start the condition check reminder job
const conditionCheckJobs = ConditionCheckReminderJob.startScheduledReminders();
const conditionCheckJobs =
ConditionCheckReminderJob.startScheduledReminders();
logger.info("Condition check reminder job started");
server.listen(PORT, () => {

View File

@@ -0,0 +1,98 @@
const { Message, ConditionCheck, Rental } = require("../models");
const { Op } = require("sequelize");
/**
* Service for verifying ownership/access to S3 files
* Used to authorize signed URL requests for private content
*/
class S3OwnershipService {
/**
* Extract file type from S3 key
* @param {string} key - S3 key like "messages/uuid.jpg"
* @returns {string|null} - File type or null if unknown
*/
static getFileTypeFromKey(key) {
if (!key) return null;
const folder = key.split("/")[0];
const folderMap = {
profiles: "profile",
items: "item",
messages: "message",
forum: "forum",
"condition-checks": "condition-check",
};
return folderMap[folder] || null;
}
/**
* Verify if a user can access a file
* @param {string} key - S3 key
* @param {string} userId - User ID making the request
* @returns {Promise<{authorized: boolean, reason?: string}>}
*/
static async canAccessFile(key, userId) {
const fileType = this.getFileTypeFromKey(key);
switch (fileType) {
case "profile":
case "item":
case "forum":
// Public folders - anyone can access
return { authorized: true };
case "message":
return this.verifyMessageAccess(key, userId);
case "condition-check":
return this.verifyConditionCheckAccess(key, userId);
default:
return { authorized: false, reason: "Unknown file type" };
}
}
/**
* Verify message image access - user must be sender OR receiver
* @param {string} key - S3 key
* @param {string} userId - User ID making the request
* @returns {Promise<{authorized: boolean, reason?: string}>}
*/
static async verifyMessageAccess(key, userId) {
const message = await Message.findOne({
where: {
imageFilename: key,
[Op.or]: [{ senderId: userId }, { receiverId: userId }],
},
});
return {
authorized: !!message,
reason: message ? null : "Not a participant in this message",
};
}
/**
* Verify condition check image access - user must be rental owner OR renter
* @param {string} key - S3 key
* @param {string} userId - User ID making the request
* @returns {Promise<{authorized: boolean, reason?: string}>}
*/
static async verifyConditionCheckAccess(key, userId) {
const check = await ConditionCheck.findOne({
where: {
imageFilenames: { [Op.contains]: [key] },
},
include: [
{
model: Rental,
as: "rental",
where: {
[Op.or]: [{ ownerId: userId }, { renterId: userId }],
},
},
],
});
return {
authorized: !!check,
reason: check ? null : "Not a participant in this rental",
};
}
}
module.exports = S3OwnershipService;

View File

@@ -0,0 +1,238 @@
const {
S3Client,
PutObjectCommand,
GetObjectCommand,
HeadObjectCommand,
} = require("@aws-sdk/client-s3");
const { getSignedUrl } = require("@aws-sdk/s3-request-presigner");
const { getAWSConfig } = require("../config/aws");
const { v4: uuidv4 } = require("uuid");
const path = require("path");
const logger = require("../utils/logger");
// Cache-Control: 24 hours for public content (allows moderation takedowns to propagate)
// Private content (messages, condition-checks) uses presigned URLs so cache doesn't matter as much
const DEFAULT_CACHE_MAX_AGE = 86400; // 24 hours in seconds
const UPLOAD_CONFIGS = {
profile: {
folder: "profiles",
maxSize: 5 * 1024 * 1024,
cacheMaxAge: DEFAULT_CACHE_MAX_AGE,
public: true,
},
item: {
folder: "items",
maxSize: 10 * 1024 * 1024,
cacheMaxAge: DEFAULT_CACHE_MAX_AGE,
public: true,
},
message: {
folder: "messages",
maxSize: 5 * 1024 * 1024,
cacheMaxAge: 3600,
public: false,
},
forum: {
folder: "forum",
maxSize: 10 * 1024 * 1024,
cacheMaxAge: DEFAULT_CACHE_MAX_AGE,
public: true,
},
"condition-check": {
folder: "condition-checks",
maxSize: 10 * 1024 * 1024,
cacheMaxAge: 3600,
public: false,
},
};
const ALLOWED_TYPES = [
"image/jpeg",
"image/jpg",
"image/png",
"image/gif",
"image/webp",
];
const PRESIGN_EXPIRY = 300; // 5 minutes
class S3Service {
constructor() {
this.client = null;
this.bucket = null;
this.region = null;
this.enabled = false;
}
/**
* Check if S3 is enabled
* @returns {boolean}
*/
isEnabled() {
return this.enabled;
}
initialize() {
if (process.env.S3_ENABLED !== "true") {
logger.info("S3 Service disabled (S3_ENABLED !== true)");
this.enabled = false;
return;
}
// S3 is enabled - validate required configuration
const bucket = process.env.S3_BUCKET;
if (!bucket) {
logger.error("S3_ENABLED=true but S3_BUCKET is not set");
process.exit(1);
}
try {
const config = getAWSConfig();
this.client = new S3Client({
...config,
// Disable automatic checksums - browser uploads can't calculate them
requestChecksumCalculation: "WHEN_REQUIRED",
});
this.bucket = bucket;
this.region = config.region || "us-east-1";
this.enabled = true;
logger.info("S3 Service initialized", {
bucket: this.bucket,
region: this.region,
});
} catch (error) {
logger.error("Failed to initialize S3 Service", { error: error.message });
process.exit(1);
}
}
/**
* Get a presigned URL for uploading a file directly to S3
* @param {string} uploadType - Type of upload (profile, item, message, forum, condition-check)
* @param {string} contentType - MIME type of the file
* @param {string} fileName - Original filename (used for extension)
* @param {number} fileSize - File size in bytes (required for size enforcement)
* @returns {Promise<{uploadUrl: string, key: string, publicUrl: string, expiresAt: Date}>}
*/
async getPresignedUploadUrl(uploadType, contentType, fileName, fileSize) {
if (!this.enabled) {
throw new Error("S3 storage is not enabled");
}
const config = UPLOAD_CONFIGS[uploadType];
if (!config) {
throw new Error(`Invalid upload type: ${uploadType}`);
}
if (!ALLOWED_TYPES.includes(contentType)) {
throw new Error(`Invalid content type: ${contentType}`);
}
if (!fileSize || fileSize <= 0) {
throw new Error("File size is required");
}
if (fileSize > config.maxSize) {
throw new Error(
`File too large. Maximum size is ${config.maxSize / (1024 * 1024)}MB`
);
}
const ext = path.extname(fileName) || this.getExtFromMime(contentType);
const key = `${config.folder}/${uuidv4()}${ext}`;
const cacheDirective = config.public ? "public" : "private";
const command = new PutObjectCommand({
Bucket: this.bucket,
Key: key,
ContentType: contentType,
ContentLength: fileSize, // Enforce exact file size
CacheControl: `${cacheDirective}, max-age=${config.cacheMaxAge}`,
});
const uploadUrl = await getSignedUrl(this.client, command, {
expiresIn: PRESIGN_EXPIRY,
});
return {
uploadUrl,
key,
publicUrl: config.public
? `https://${this.bucket}.s3.${this.region}.amazonaws.com/${key}`
: null,
expiresAt: new Date(Date.now() + PRESIGN_EXPIRY * 1000),
};
}
/**
* Get a presigned URL for downloading a private file from S3
* @param {string} key - S3 object key
* @param {number} expiresIn - Expiration time in seconds (default 1 hour)
* @returns {Promise<string>}
*/
async getPresignedDownloadUrl(key, expiresIn = 3600) {
if (!this.enabled) {
throw new Error("S3 storage is not enabled");
}
const command = new GetObjectCommand({
Bucket: this.bucket,
Key: key,
});
return getSignedUrl(this.client, command, { expiresIn });
}
/**
* Get the public URL for a file (only for public folders)
* @param {string} key - S3 object key
* @returns {string|null}
*/
getPublicUrl(key) {
if (!this.enabled) {
return null;
}
return `https://${this.bucket}.s3.${this.region}.amazonaws.com/${key}`;
}
/**
* Verify that a file exists in S3
* @param {string} key - S3 object key
* @returns {Promise<boolean>}
*/
async verifyUpload(key) {
if (!this.enabled) {
return false;
}
try {
await this.client.send(
new HeadObjectCommand({
Bucket: this.bucket,
Key: key,
})
);
return true;
} catch (err) {
if (err.name === "NotFound" || err.$metadata?.httpStatusCode === 404) {
return false;
}
throw err;
}
}
/**
* Get file extension from MIME type
* @param {string} mime - MIME type
* @returns {string}
*/
getExtFromMime(mime) {
const map = {
"image/jpeg": ".jpg",
"image/jpg": ".jpg",
"image/png": ".png",
"image/gif": ".gif",
"image/webp": ".webp",
};
return map[mime] || ".jpg";
}
}
const s3Service = new S3Service();
module.exports = s3Service;