image processing lambda
This commit is contained in:
95
backend/migrations/20260105161056-create-image-metadata.js
Normal file
95
backend/migrations/20260105161056-create-image-metadata.js
Normal file
@@ -0,0 +1,95 @@
|
||||
"use strict";
|
||||
|
||||
module.exports = {
|
||||
up: async (queryInterface, Sequelize) => {
|
||||
await queryInterface.createTable("ImageMetadata", {
|
||||
id: {
|
||||
type: Sequelize.UUID,
|
||||
defaultValue: Sequelize.UUIDV4,
|
||||
primaryKey: true,
|
||||
},
|
||||
s3Key: {
|
||||
type: Sequelize.TEXT,
|
||||
allowNull: false,
|
||||
unique: true,
|
||||
},
|
||||
latitude: {
|
||||
type: Sequelize.DECIMAL(10, 8),
|
||||
allowNull: true,
|
||||
},
|
||||
longitude: {
|
||||
type: Sequelize.DECIMAL(11, 8),
|
||||
allowNull: true,
|
||||
},
|
||||
cameraMake: {
|
||||
type: Sequelize.STRING(100),
|
||||
allowNull: true,
|
||||
},
|
||||
cameraModel: {
|
||||
type: Sequelize.STRING(100),
|
||||
allowNull: true,
|
||||
},
|
||||
cameraSoftware: {
|
||||
type: Sequelize.STRING(100),
|
||||
allowNull: true,
|
||||
},
|
||||
dateTaken: {
|
||||
type: Sequelize.DATE,
|
||||
allowNull: true,
|
||||
},
|
||||
width: {
|
||||
type: Sequelize.INTEGER,
|
||||
allowNull: true,
|
||||
},
|
||||
height: {
|
||||
type: Sequelize.INTEGER,
|
||||
allowNull: true,
|
||||
},
|
||||
orientation: {
|
||||
type: Sequelize.INTEGER,
|
||||
allowNull: true,
|
||||
},
|
||||
fileSize: {
|
||||
type: Sequelize.INTEGER,
|
||||
allowNull: true,
|
||||
},
|
||||
processingStatus: {
|
||||
type: Sequelize.ENUM("pending", "processing", "completed", "failed"),
|
||||
allowNull: false,
|
||||
defaultValue: "pending",
|
||||
},
|
||||
processedAt: {
|
||||
type: Sequelize.DATE,
|
||||
allowNull: true,
|
||||
},
|
||||
errorMessage: {
|
||||
type: Sequelize.TEXT,
|
||||
allowNull: true,
|
||||
},
|
||||
createdAt: {
|
||||
type: Sequelize.DATE,
|
||||
allowNull: false,
|
||||
},
|
||||
updatedAt: {
|
||||
type: Sequelize.DATE,
|
||||
allowNull: false,
|
||||
},
|
||||
});
|
||||
|
||||
// Add indexes
|
||||
await queryInterface.addIndex("ImageMetadata", ["s3Key"], {
|
||||
unique: true,
|
||||
name: "image_metadata_s3_key_unique",
|
||||
});
|
||||
await queryInterface.addIndex("ImageMetadata", ["latitude", "longitude"], {
|
||||
name: "image_metadata_geo",
|
||||
});
|
||||
await queryInterface.addIndex("ImageMetadata", ["processingStatus"], {
|
||||
name: "image_metadata_processing_status",
|
||||
});
|
||||
},
|
||||
|
||||
down: async (queryInterface, Sequelize) => {
|
||||
await queryInterface.dropTable("ImageMetadata");
|
||||
},
|
||||
};
|
||||
88
backend/models/ImageMetadata.js
Normal file
88
backend/models/ImageMetadata.js
Normal file
@@ -0,0 +1,88 @@
|
||||
const { DataTypes } = require("sequelize");
|
||||
const sequelize = require("../config/database");
|
||||
|
||||
const ImageMetadata = sequelize.define(
|
||||
"ImageMetadata",
|
||||
{
|
||||
id: {
|
||||
type: DataTypes.UUID,
|
||||
defaultValue: DataTypes.UUIDV4,
|
||||
primaryKey: true,
|
||||
},
|
||||
s3Key: {
|
||||
type: DataTypes.TEXT,
|
||||
allowNull: false,
|
||||
unique: true,
|
||||
},
|
||||
latitude: {
|
||||
type: DataTypes.DECIMAL(10, 8),
|
||||
allowNull: true,
|
||||
},
|
||||
longitude: {
|
||||
type: DataTypes.DECIMAL(11, 8),
|
||||
allowNull: true,
|
||||
},
|
||||
cameraMake: {
|
||||
type: DataTypes.STRING(100),
|
||||
allowNull: true,
|
||||
},
|
||||
cameraModel: {
|
||||
type: DataTypes.STRING(100),
|
||||
allowNull: true,
|
||||
},
|
||||
cameraSoftware: {
|
||||
type: DataTypes.STRING(100),
|
||||
allowNull: true,
|
||||
},
|
||||
dateTaken: {
|
||||
type: DataTypes.DATE,
|
||||
allowNull: true,
|
||||
},
|
||||
width: {
|
||||
type: DataTypes.INTEGER,
|
||||
allowNull: true,
|
||||
},
|
||||
height: {
|
||||
type: DataTypes.INTEGER,
|
||||
allowNull: true,
|
||||
},
|
||||
orientation: {
|
||||
type: DataTypes.INTEGER,
|
||||
allowNull: true,
|
||||
},
|
||||
fileSize: {
|
||||
type: DataTypes.INTEGER,
|
||||
allowNull: true,
|
||||
},
|
||||
processingStatus: {
|
||||
type: DataTypes.ENUM("pending", "processing", "completed", "failed"),
|
||||
allowNull: false,
|
||||
defaultValue: "pending",
|
||||
},
|
||||
processedAt: {
|
||||
type: DataTypes.DATE,
|
||||
allowNull: true,
|
||||
},
|
||||
errorMessage: {
|
||||
type: DataTypes.TEXT,
|
||||
allowNull: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
timestamps: true,
|
||||
indexes: [
|
||||
{
|
||||
fields: ["s3Key"],
|
||||
unique: true,
|
||||
},
|
||||
{
|
||||
fields: ["latitude", "longitude"],
|
||||
},
|
||||
{
|
||||
fields: ["processingStatus"],
|
||||
},
|
||||
],
|
||||
}
|
||||
);
|
||||
|
||||
module.exports = ImageMetadata;
|
||||
@@ -10,6 +10,7 @@ const UserAddress = require("./UserAddress");
|
||||
const ConditionCheck = require("./ConditionCheck");
|
||||
const AlphaInvitation = require("./AlphaInvitation");
|
||||
const Feedback = require("./Feedback");
|
||||
const ImageMetadata = require("./ImageMetadata");
|
||||
|
||||
User.hasMany(Item, { as: "ownedItems", foreignKey: "ownerId" });
|
||||
Item.belongsTo(User, { as: "owner", foreignKey: "ownerId" });
|
||||
@@ -91,4 +92,5 @@ module.exports = {
|
||||
ConditionCheck,
|
||||
AlphaInvitation,
|
||||
Feedback,
|
||||
ImageMetadata,
|
||||
};
|
||||
|
||||
@@ -106,6 +106,15 @@ class S3Service {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if image processing (metadata stripping) is enabled
|
||||
* When enabled, uploads go to staging/ prefix and Lambda processes them
|
||||
* @returns {boolean}
|
||||
*/
|
||||
isImageProcessingEnabled() {
|
||||
return process.env.IMAGE_PROCESSING_ENABLED === "true";
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a presigned URL for uploading a file directly to S3
|
||||
* @param {string} uploadType - Type of upload (profile, item, message, forum, condition-check)
|
||||
@@ -113,7 +122,7 @@ class S3Service {
|
||||
* @param {string} fileName - Original filename (used for extension)
|
||||
* @param {number} fileSize - File size in bytes (required for size enforcement)
|
||||
* @param {string} [baseKey] - Optional base key (UUID) for coordinated variant uploads
|
||||
* @returns {Promise<{uploadUrl: string, key: string, publicUrl: string, expiresAt: Date}>}
|
||||
* @returns {Promise<{uploadUrl: string, key: string, stagingKey: string|null, publicUrl: string, expiresAt: Date}>}
|
||||
*/
|
||||
async getPresignedUploadUrl(uploadType, contentType, fileName, fileSize, baseKey = null) {
|
||||
if (!this.enabled) {
|
||||
@@ -150,12 +159,19 @@ class S3Service {
|
||||
|
||||
// Use provided baseKey or generate new UUID
|
||||
const uuid = baseKey || uuidv4();
|
||||
const key = `${config.folder}/${uuid}${suffix}${ext}`;
|
||||
|
||||
// Final key is where the processed image will be (what frontend stores in DB)
|
||||
const finalKey = `${config.folder}/${uuid}${suffix}${ext}`;
|
||||
|
||||
// When image processing is enabled, upload to staging/ prefix
|
||||
// Lambda will process and move to final location
|
||||
const useStaging = this.isImageProcessingEnabled();
|
||||
const uploadKey = useStaging ? `staging/${finalKey}` : finalKey;
|
||||
|
||||
const cacheDirective = config.public ? "public" : "private";
|
||||
const command = new PutObjectCommand({
|
||||
Bucket: this.bucket,
|
||||
Key: key,
|
||||
Key: uploadKey,
|
||||
ContentType: contentType,
|
||||
ContentLength: fileSize, // Enforce exact file size
|
||||
CacheControl: `${cacheDirective}, max-age=${config.cacheMaxAge}`,
|
||||
@@ -167,9 +183,10 @@ class S3Service {
|
||||
|
||||
return {
|
||||
uploadUrl,
|
||||
key,
|
||||
key: finalKey, // Frontend stores this in database
|
||||
stagingKey: useStaging ? uploadKey : null, // Actual upload location (if staging enabled)
|
||||
publicUrl: config.public
|
||||
? `https://${this.bucket}.s3.${this.region}.amazonaws.com/${key}`
|
||||
? `https://${this.bucket}.s3.${this.region}.amazonaws.com/${finalKey}`
|
||||
: null,
|
||||
expiresAt: new Date(Date.now() + PRESIGN_EXPIRY * 1000),
|
||||
};
|
||||
|
||||
@@ -26,7 +26,8 @@ export const getPublicImageUrl = (
|
||||
|
||||
export interface PresignedUrlResponse {
|
||||
uploadUrl: string;
|
||||
key: string;
|
||||
key: string; // Final key for database storage
|
||||
stagingKey: string | null; // Actual upload location (when image processing enabled)
|
||||
publicUrl: string;
|
||||
expiresAt: string;
|
||||
}
|
||||
@@ -152,13 +153,15 @@ export async function uploadFile(
|
||||
// Upload to S3
|
||||
await uploadToS3(file, presigned.uploadUrl, options);
|
||||
|
||||
// Confirm upload
|
||||
const { confirmed } = await confirmUploads([presigned.key]);
|
||||
// Confirm upload - use stagingKey if present (image processing enabled), else key
|
||||
const confirmKey = presigned.stagingKey || presigned.key;
|
||||
const { confirmed } = await confirmUploads([confirmKey]);
|
||||
|
||||
if (confirmed.length === 0) {
|
||||
throw new Error("Upload verification failed");
|
||||
}
|
||||
|
||||
// Return final key for database storage
|
||||
return { key: presigned.key, publicUrl: presigned.publicUrl };
|
||||
}
|
||||
|
||||
@@ -253,20 +256,20 @@ export async function uploadImageWithVariants(
|
||||
)
|
||||
);
|
||||
|
||||
// Confirm all uploads
|
||||
const keys = presignedUrls.map((p) => p.key);
|
||||
await confirmUploads(keys);
|
||||
// Confirm all uploads - use stagingKey if present (image processing enabled), else key
|
||||
const confirmKeys = presignedUrls.map((p) => p.stagingKey || p.key);
|
||||
await confirmUploads(confirmKeys);
|
||||
|
||||
if (onProgress) onProgress(100);
|
||||
|
||||
// Use the baseKey returned by the backend (shared UUID for all variants)
|
||||
// The stored key format is: items/uuid.jpg (original), and variants are items/uuid_th.jpg, items/uuid_md.jpg
|
||||
const originalKey = keys.find((k) => !k.includes("_th") && !k.includes("_md")) || keys[0];
|
||||
// Use the final keys for database storage (not staging keys)
|
||||
const finalKeys = presignedUrls.map((p) => p.key);
|
||||
const originalKey = finalKeys.find((k) => !k.includes("_th") && !k.includes("_md")) || finalKeys[0];
|
||||
|
||||
return {
|
||||
baseKey: originalKey,
|
||||
publicUrl: getPublicImageUrl(originalKey),
|
||||
variants: keys,
|
||||
variants: finalKeys,
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
import "source-map-support/register";
|
||||
import * as cdk from "aws-cdk-lib";
|
||||
import { ConditionCheckLambdaStack } from "../lib/condition-check-lambda-stack";
|
||||
import { ImageProcessorLambdaStack } from "../lib/image-processor-lambda-stack";
|
||||
|
||||
const app = new cdk.App();
|
||||
|
||||
@@ -43,7 +44,7 @@ if (!config) {
|
||||
throw new Error(`Unknown environment: ${environment}`);
|
||||
}
|
||||
|
||||
// Create the Lambda stack
|
||||
// Create the Condition Check Lambda stack
|
||||
new ConditionCheckLambdaStack(app, `ConditionCheckLambdaStack-${environment}`, {
|
||||
environment,
|
||||
databaseUrl: config.databaseUrl,
|
||||
@@ -57,7 +58,24 @@ new ConditionCheckLambdaStack(app, `ConditionCheckLambdaStack-${environment}`, {
|
||||
description: `Condition Check Reminder Lambda infrastructure (${environment})`,
|
||||
tags: {
|
||||
Environment: environment,
|
||||
Project: "rentall",
|
||||
Project: "village-share",
|
||||
Service: "condition-check-reminder",
|
||||
},
|
||||
});
|
||||
|
||||
// Create the Image Processor Lambda stack
|
||||
new ImageProcessorLambdaStack(app, `ImageProcessorLambdaStack-${environment}`, {
|
||||
environment,
|
||||
databaseUrl: config.databaseUrl,
|
||||
frontendUrl: config.frontendUrl,
|
||||
env: {
|
||||
account: process.env.CDK_DEFAULT_ACCOUNT,
|
||||
region: process.env.CDK_DEFAULT_REGION || "us-east-1",
|
||||
},
|
||||
description: `Image Processor Lambda infrastructure (${environment})`,
|
||||
tags: {
|
||||
Environment: environment,
|
||||
Project: "village-share",
|
||||
Service: "image-processor",
|
||||
},
|
||||
});
|
||||
|
||||
219
infrastructure/cdk/lib/image-processor-lambda-stack.ts
Normal file
219
infrastructure/cdk/lib/image-processor-lambda-stack.ts
Normal file
@@ -0,0 +1,219 @@
|
||||
import * as cdk from "aws-cdk-lib";
|
||||
import * as lambda from "aws-cdk-lib/aws-lambda";
|
||||
import * as iam from "aws-cdk-lib/aws-iam";
|
||||
import * as s3 from "aws-cdk-lib/aws-s3";
|
||||
import * as s3n from "aws-cdk-lib/aws-s3-notifications";
|
||||
import * as sqs from "aws-cdk-lib/aws-sqs";
|
||||
import { Construct } from "constructs";
|
||||
import * as path from "path";
|
||||
|
||||
interface ImageProcessorLambdaStackProps extends cdk.StackProps {
|
||||
/**
|
||||
* Environment name (staging, prod)
|
||||
*/
|
||||
environment: string;
|
||||
|
||||
/**
|
||||
* Database URL for the Lambda
|
||||
*/
|
||||
databaseUrl: string;
|
||||
|
||||
/**
|
||||
* Frontend URL for CORS configuration
|
||||
*/
|
||||
frontendUrl: string;
|
||||
}
|
||||
|
||||
export class ImageProcessorLambdaStack extends cdk.Stack {
|
||||
/**
|
||||
* The Lambda function for image processing
|
||||
*/
|
||||
public readonly lambdaFunction: lambda.Function;
|
||||
|
||||
/**
|
||||
* The S3 bucket for image uploads
|
||||
*/
|
||||
public readonly uploadsBucket: s3.Bucket;
|
||||
|
||||
/**
|
||||
* Dead letter queue for failed Lambda invocations
|
||||
*/
|
||||
public readonly deadLetterQueue: sqs.Queue;
|
||||
|
||||
constructor(
|
||||
scope: Construct,
|
||||
id: string,
|
||||
props: ImageProcessorLambdaStackProps
|
||||
) {
|
||||
super(scope, id, props);
|
||||
|
||||
const { environment, databaseUrl, frontendUrl } = props;
|
||||
|
||||
// Dead Letter Queue for failed Lambda invocations
|
||||
this.deadLetterQueue = new sqs.Queue(this, "ImageProcessorDLQ", {
|
||||
queueName: `image-processor-dlq-${environment}`,
|
||||
retentionPeriod: cdk.Duration.days(14),
|
||||
});
|
||||
|
||||
// S3 bucket for uploads
|
||||
this.uploadsBucket = new s3.Bucket(this, "UploadsBucket", {
|
||||
bucketName: `village-share-${environment}`,
|
||||
versioned: true,
|
||||
encryption: s3.BucketEncryption.S3_MANAGED,
|
||||
blockPublicAccess: new s3.BlockPublicAccess({
|
||||
blockPublicAcls: true,
|
||||
blockPublicPolicy: false, // Allow bucket policy for public reads
|
||||
ignorePublicAcls: true,
|
||||
restrictPublicBuckets: false,
|
||||
}),
|
||||
cors: [
|
||||
{
|
||||
allowedMethods: [
|
||||
s3.HttpMethods.GET,
|
||||
s3.HttpMethods.PUT,
|
||||
s3.HttpMethods.POST,
|
||||
],
|
||||
allowedOrigins: [frontendUrl, "http://localhost:3000"],
|
||||
allowedHeaders: ["*"],
|
||||
exposedHeaders: ["ETag"],
|
||||
maxAge: 3600,
|
||||
},
|
||||
],
|
||||
lifecycleRules: [
|
||||
{
|
||||
// Clean up incomplete multipart uploads
|
||||
abortIncompleteMultipartUploadAfter: cdk.Duration.days(1),
|
||||
},
|
||||
{
|
||||
// Delete staging files that weren't processed after 7 days
|
||||
prefix: "staging/",
|
||||
expiration: cdk.Duration.days(7),
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
// Bucket policy: allow public read for non-staging files
|
||||
this.uploadsBucket.addToResourcePolicy(
|
||||
new iam.PolicyStatement({
|
||||
effect: iam.Effect.ALLOW,
|
||||
principals: [new iam.AnyPrincipal()],
|
||||
actions: ["s3:GetObject"],
|
||||
resources: [
|
||||
`${this.uploadsBucket.bucketArn}/profiles/*`,
|
||||
`${this.uploadsBucket.bucketArn}/items/*`,
|
||||
`${this.uploadsBucket.bucketArn}/forum/*`,
|
||||
],
|
||||
})
|
||||
);
|
||||
|
||||
// Lambda execution role
|
||||
const lambdaRole = new iam.Role(this, "ImageProcessorLambdaRole", {
|
||||
roleName: `image-processor-lambda-role-${environment}`,
|
||||
assumedBy: new iam.ServicePrincipal("lambda.amazonaws.com"),
|
||||
description: "Execution role for Image Processor Lambda",
|
||||
});
|
||||
|
||||
// CloudWatch Logs permissions
|
||||
lambdaRole.addToPolicy(
|
||||
new iam.PolicyStatement({
|
||||
effect: iam.Effect.ALLOW,
|
||||
actions: [
|
||||
"logs:CreateLogGroup",
|
||||
"logs:CreateLogStream",
|
||||
"logs:PutLogEvents",
|
||||
],
|
||||
resources: ["*"],
|
||||
})
|
||||
);
|
||||
|
||||
// S3 permissions
|
||||
lambdaRole.addToPolicy(
|
||||
new iam.PolicyStatement({
|
||||
effect: iam.Effect.ALLOW,
|
||||
actions: [
|
||||
"s3:GetObject",
|
||||
"s3:PutObject",
|
||||
"s3:DeleteObject",
|
||||
"s3:HeadObject",
|
||||
],
|
||||
resources: [`${this.uploadsBucket.bucketArn}/*`],
|
||||
})
|
||||
);
|
||||
|
||||
// Lambda function
|
||||
this.lambdaFunction = new lambda.Function(this, "ImageProcessorLambda", {
|
||||
functionName: `image-processor-${environment}`,
|
||||
runtime: lambda.Runtime.NODEJS_20_X,
|
||||
handler: "index.handler",
|
||||
code: lambda.Code.fromAsset(
|
||||
path.join(__dirname, "../../../lambdas/imageProcessor"),
|
||||
{
|
||||
bundling: {
|
||||
image: lambda.Runtime.NODEJS_20_X.bundlingImage,
|
||||
command: [
|
||||
"bash",
|
||||
"-c",
|
||||
[
|
||||
"cp -r /asset-input/* /asset-output/",
|
||||
"cd /asset-output",
|
||||
"npm install --omit=dev",
|
||||
// Copy shared modules
|
||||
"mkdir -p shared",
|
||||
"cp -r /asset-input/../shared/* shared/",
|
||||
"cd shared && npm install --omit=dev",
|
||||
].join(" && "),
|
||||
],
|
||||
},
|
||||
}
|
||||
),
|
||||
role: lambdaRole,
|
||||
timeout: cdk.Duration.seconds(60),
|
||||
memorySize: 1024, // Higher memory for image processing
|
||||
environment: {
|
||||
NODE_ENV: environment,
|
||||
DATABASE_URL: databaseUrl,
|
||||
S3_BUCKET: this.uploadsBucket.bucketName,
|
||||
AWS_REGION: this.region,
|
||||
LOG_LEVEL: environment === "prod" ? "info" : "debug",
|
||||
},
|
||||
deadLetterQueue: this.deadLetterQueue,
|
||||
retryAttempts: 2,
|
||||
description:
|
||||
"Processes uploaded images: extracts metadata and strips EXIF",
|
||||
});
|
||||
|
||||
// S3 event notification for staging uploads
|
||||
this.uploadsBucket.addEventNotification(
|
||||
s3.EventType.OBJECT_CREATED,
|
||||
new s3n.LambdaDestination(this.lambdaFunction),
|
||||
{
|
||||
prefix: "staging/",
|
||||
}
|
||||
);
|
||||
|
||||
// Outputs
|
||||
new cdk.CfnOutput(this, "LambdaFunctionArn", {
|
||||
value: this.lambdaFunction.functionArn,
|
||||
description: "ARN of the Image Processor Lambda",
|
||||
exportName: `ImageProcessorLambdaArn-${environment}`,
|
||||
});
|
||||
|
||||
new cdk.CfnOutput(this, "UploadsBucketName", {
|
||||
value: this.uploadsBucket.bucketName,
|
||||
description: "Name of the uploads S3 bucket",
|
||||
exportName: `UploadsBucketName-${environment}`,
|
||||
});
|
||||
|
||||
new cdk.CfnOutput(this, "UploadsBucketArn", {
|
||||
value: this.uploadsBucket.bucketArn,
|
||||
description: "ARN of the uploads S3 bucket",
|
||||
exportName: `UploadsBucketArn-${environment}`,
|
||||
});
|
||||
|
||||
new cdk.CfnOutput(this, "DLQUrl", {
|
||||
value: this.deadLetterQueue.queueUrl,
|
||||
description: "URL of the Dead Letter Queue",
|
||||
exportName: `ImageProcessorDLQUrl-${environment}`,
|
||||
});
|
||||
}
|
||||
}
|
||||
487
infrastructure/cdk/package-lock.json
generated
Normal file
487
infrastructure/cdk/package-lock.json
generated
Normal file
@@ -0,0 +1,487 @@
|
||||
{
|
||||
"name": "rentall-infrastructure",
|
||||
"version": "1.0.0",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "rentall-infrastructure",
|
||||
"version": "1.0.0",
|
||||
"dependencies": {
|
||||
"aws-cdk-lib": "^2.170.0",
|
||||
"constructs": "^10.4.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^22.0.0",
|
||||
"aws-cdk": "^2.170.0",
|
||||
"typescript": "^5.7.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@aws-cdk/asset-awscli-v1": {
|
||||
"version": "2.2.258",
|
||||
"resolved": "https://registry.npmjs.org/@aws-cdk/asset-awscli-v1/-/asset-awscli-v1-2.2.258.tgz",
|
||||
"integrity": "sha512-TL3I9cIue0bAsuwrmjgjAQaEH6JL09y49FVQMDhrz4jJ2iPKuHtdrYd7ydm02t1YZdPZE2M0VNj6VD4fGIFpvw==",
|
||||
"license": "Apache-2.0"
|
||||
},
|
||||
"node_modules/@aws-cdk/asset-node-proxy-agent-v6": {
|
||||
"version": "2.1.0",
|
||||
"resolved": "https://registry.npmjs.org/@aws-cdk/asset-node-proxy-agent-v6/-/asset-node-proxy-agent-v6-2.1.0.tgz",
|
||||
"integrity": "sha512-7bY3J8GCVxLupn/kNmpPc5VJz8grx+4RKfnnJiO1LG+uxkZfANZG3RMHhE+qQxxwkyQ9/MfPtTpf748UhR425A==",
|
||||
"license": "Apache-2.0"
|
||||
},
|
||||
"node_modules/@aws-cdk/cloud-assembly-schema": {
|
||||
"version": "48.20.0",
|
||||
"resolved": "https://registry.npmjs.org/@aws-cdk/cloud-assembly-schema/-/cloud-assembly-schema-48.20.0.tgz",
|
||||
"integrity": "sha512-+eeiav9LY4wbF/EFuCt/vfvi/Zoxo8bf94PW5clbMraChEliq83w4TbRVy0jB9jE0v1ooFTtIjSQkowSPkfISg==",
|
||||
"bundleDependencies": [
|
||||
"jsonschema",
|
||||
"semver"
|
||||
],
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"jsonschema": "~1.4.1",
|
||||
"semver": "^7.7.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 18.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@aws-cdk/cloud-assembly-schema/node_modules/jsonschema": {
|
||||
"version": "1.4.1",
|
||||
"inBundle": true,
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/@aws-cdk/cloud-assembly-schema/node_modules/semver": {
|
||||
"version": "7.7.2",
|
||||
"inBundle": true,
|
||||
"license": "ISC",
|
||||
"bin": {
|
||||
"semver": "bin/semver.js"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/node": {
|
||||
"version": "22.19.6",
|
||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-22.19.6.tgz",
|
||||
"integrity": "sha512-qm+G8HuG6hOHQigsi7VGuLjUVu6TtBo/F05zvX04Mw2uCg9Dv0Qxy3Qw7j41SidlTcl5D/5yg0SEZqOB+EqZnQ==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"undici-types": "~6.21.0"
|
||||
}
|
||||
},
|
||||
"node_modules/aws-cdk": {
|
||||
"version": "2.1100.3",
|
||||
"resolved": "https://registry.npmjs.org/aws-cdk/-/aws-cdk-2.1100.3.tgz",
|
||||
"integrity": "sha512-jeSamF+IwPJKhqMir7Cw+2IoeHsmNFc/SoDAlOS9BYM8Wrd0Q1jJd3GcJOFzsMcWv9mcBAP5o23amyKHu03dXA==",
|
||||
"dev": true,
|
||||
"license": "Apache-2.0",
|
||||
"bin": {
|
||||
"cdk": "bin/cdk"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 18.0.0"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"fsevents": "2.3.2"
|
||||
}
|
||||
},
|
||||
"node_modules/aws-cdk-lib": {
|
||||
"version": "2.234.1",
|
||||
"resolved": "https://registry.npmjs.org/aws-cdk-lib/-/aws-cdk-lib-2.234.1.tgz",
|
||||
"integrity": "sha512-2oNqAA1qjF9xHCom6yHuY8KE6UltK7pTg3egf/t1+C6/OFEaw9+jyhCWmTasGmvjyQSkbvKiCPZco0l+XVyxiQ==",
|
||||
"bundleDependencies": [
|
||||
"@balena/dockerignore",
|
||||
"case",
|
||||
"fs-extra",
|
||||
"ignore",
|
||||
"jsonschema",
|
||||
"minimatch",
|
||||
"punycode",
|
||||
"semver",
|
||||
"table",
|
||||
"yaml",
|
||||
"mime-types"
|
||||
],
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"@aws-cdk/asset-awscli-v1": "2.2.258",
|
||||
"@aws-cdk/asset-node-proxy-agent-v6": "^2.1.0",
|
||||
"@aws-cdk/cloud-assembly-schema": "^48.20.0",
|
||||
"@balena/dockerignore": "^1.0.2",
|
||||
"case": "1.6.3",
|
||||
"fs-extra": "^11.3.3",
|
||||
"ignore": "^5.3.2",
|
||||
"jsonschema": "^1.5.0",
|
||||
"mime-types": "^2.1.35",
|
||||
"minimatch": "^3.1.2",
|
||||
"punycode": "^2.3.1",
|
||||
"semver": "^7.7.3",
|
||||
"table": "^6.9.0",
|
||||
"yaml": "1.10.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 18.0.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"constructs": "^10.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/aws-cdk-lib/node_modules/@balena/dockerignore": {
|
||||
"version": "1.0.2",
|
||||
"inBundle": true,
|
||||
"license": "Apache-2.0"
|
||||
},
|
||||
"node_modules/aws-cdk-lib/node_modules/ajv": {
|
||||
"version": "8.17.1",
|
||||
"inBundle": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"fast-deep-equal": "^3.1.3",
|
||||
"fast-uri": "^3.0.1",
|
||||
"json-schema-traverse": "^1.0.0",
|
||||
"require-from-string": "^2.0.2"
|
||||
},
|
||||
"funding": {
|
||||
"type": "github",
|
||||
"url": "https://github.com/sponsors/epoberezkin"
|
||||
}
|
||||
},
|
||||
"node_modules/aws-cdk-lib/node_modules/ansi-regex": {
|
||||
"version": "5.0.1",
|
||||
"inBundle": true,
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
}
|
||||
},
|
||||
"node_modules/aws-cdk-lib/node_modules/ansi-styles": {
|
||||
"version": "4.3.0",
|
||||
"inBundle": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"color-convert": "^2.0.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/chalk/ansi-styles?sponsor=1"
|
||||
}
|
||||
},
|
||||
"node_modules/aws-cdk-lib/node_modules/astral-regex": {
|
||||
"version": "2.0.0",
|
||||
"inBundle": true,
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
}
|
||||
},
|
||||
"node_modules/aws-cdk-lib/node_modules/balanced-match": {
|
||||
"version": "1.0.2",
|
||||
"inBundle": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/aws-cdk-lib/node_modules/brace-expansion": {
|
||||
"version": "1.1.12",
|
||||
"inBundle": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"balanced-match": "^1.0.0",
|
||||
"concat-map": "0.0.1"
|
||||
}
|
||||
},
|
||||
"node_modules/aws-cdk-lib/node_modules/case": {
|
||||
"version": "1.6.3",
|
||||
"inBundle": true,
|
||||
"license": "(MIT OR GPL-3.0-or-later)",
|
||||
"engines": {
|
||||
"node": ">= 0.8.0"
|
||||
}
|
||||
},
|
||||
"node_modules/aws-cdk-lib/node_modules/color-convert": {
|
||||
"version": "2.0.1",
|
||||
"inBundle": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"color-name": "~1.1.4"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=7.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/aws-cdk-lib/node_modules/color-name": {
|
||||
"version": "1.1.4",
|
||||
"inBundle": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/aws-cdk-lib/node_modules/concat-map": {
|
||||
"version": "0.0.1",
|
||||
"inBundle": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/aws-cdk-lib/node_modules/emoji-regex": {
|
||||
"version": "8.0.0",
|
||||
"inBundle": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/aws-cdk-lib/node_modules/fast-deep-equal": {
|
||||
"version": "3.1.3",
|
||||
"inBundle": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/aws-cdk-lib/node_modules/fast-uri": {
|
||||
"version": "3.1.0",
|
||||
"funding": [
|
||||
{
|
||||
"type": "github",
|
||||
"url": "https://github.com/sponsors/fastify"
|
||||
},
|
||||
{
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/fastify"
|
||||
}
|
||||
],
|
||||
"inBundle": true,
|
||||
"license": "BSD-3-Clause"
|
||||
},
|
||||
"node_modules/aws-cdk-lib/node_modules/fs-extra": {
|
||||
"version": "11.3.3",
|
||||
"inBundle": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"graceful-fs": "^4.2.0",
|
||||
"jsonfile": "^6.0.1",
|
||||
"universalify": "^2.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=14.14"
|
||||
}
|
||||
},
|
||||
"node_modules/aws-cdk-lib/node_modules/graceful-fs": {
|
||||
"version": "4.2.11",
|
||||
"inBundle": true,
|
||||
"license": "ISC"
|
||||
},
|
||||
"node_modules/aws-cdk-lib/node_modules/ignore": {
|
||||
"version": "5.3.2",
|
||||
"inBundle": true,
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 4"
|
||||
}
|
||||
},
|
||||
"node_modules/aws-cdk-lib/node_modules/is-fullwidth-code-point": {
|
||||
"version": "3.0.0",
|
||||
"inBundle": true,
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
}
|
||||
},
|
||||
"node_modules/aws-cdk-lib/node_modules/json-schema-traverse": {
|
||||
"version": "1.0.0",
|
||||
"inBundle": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/aws-cdk-lib/node_modules/jsonfile": {
|
||||
"version": "6.2.0",
|
||||
"inBundle": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"universalify": "^2.0.0"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"graceful-fs": "^4.1.6"
|
||||
}
|
||||
},
|
||||
"node_modules/aws-cdk-lib/node_modules/jsonschema": {
|
||||
"version": "1.5.0",
|
||||
"inBundle": true,
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/aws-cdk-lib/node_modules/lodash.truncate": {
|
||||
"version": "4.4.2",
|
||||
"inBundle": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/aws-cdk-lib/node_modules/mime-db": {
|
||||
"version": "1.52.0",
|
||||
"inBundle": true,
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 0.6"
|
||||
}
|
||||
},
|
||||
"node_modules/aws-cdk-lib/node_modules/mime-types": {
|
||||
"version": "2.1.35",
|
||||
"inBundle": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"mime-db": "1.52.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.6"
|
||||
}
|
||||
},
|
||||
"node_modules/aws-cdk-lib/node_modules/minimatch": {
|
||||
"version": "3.1.2",
|
||||
"inBundle": true,
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"brace-expansion": "^1.1.7"
|
||||
},
|
||||
"engines": {
|
||||
"node": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/aws-cdk-lib/node_modules/punycode": {
|
||||
"version": "2.3.1",
|
||||
"inBundle": true,
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=6"
|
||||
}
|
||||
},
|
||||
"node_modules/aws-cdk-lib/node_modules/require-from-string": {
|
||||
"version": "2.0.2",
|
||||
"inBundle": true,
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=0.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/aws-cdk-lib/node_modules/semver": {
|
||||
"version": "7.7.3",
|
||||
"inBundle": true,
|
||||
"license": "ISC",
|
||||
"bin": {
|
||||
"semver": "bin/semver.js"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
}
|
||||
},
|
||||
"node_modules/aws-cdk-lib/node_modules/slice-ansi": {
|
||||
"version": "4.0.0",
|
||||
"inBundle": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"ansi-styles": "^4.0.0",
|
||||
"astral-regex": "^2.0.0",
|
||||
"is-fullwidth-code-point": "^3.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/chalk/slice-ansi?sponsor=1"
|
||||
}
|
||||
},
|
||||
"node_modules/aws-cdk-lib/node_modules/string-width": {
|
||||
"version": "4.2.3",
|
||||
"inBundle": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"emoji-regex": "^8.0.0",
|
||||
"is-fullwidth-code-point": "^3.0.0",
|
||||
"strip-ansi": "^6.0.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
}
|
||||
},
|
||||
"node_modules/aws-cdk-lib/node_modules/strip-ansi": {
|
||||
"version": "6.0.1",
|
||||
"inBundle": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"ansi-regex": "^5.0.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
}
|
||||
},
|
||||
"node_modules/aws-cdk-lib/node_modules/table": {
|
||||
"version": "6.9.0",
|
||||
"inBundle": true,
|
||||
"license": "BSD-3-Clause",
|
||||
"dependencies": {
|
||||
"ajv": "^8.0.1",
|
||||
"lodash.truncate": "^4.4.2",
|
||||
"slice-ansi": "^4.0.0",
|
||||
"string-width": "^4.2.3",
|
||||
"strip-ansi": "^6.0.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=10.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/aws-cdk-lib/node_modules/universalify": {
|
||||
"version": "2.0.1",
|
||||
"inBundle": true,
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 10.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/aws-cdk-lib/node_modules/yaml": {
|
||||
"version": "1.10.2",
|
||||
"inBundle": true,
|
||||
"license": "ISC",
|
||||
"engines": {
|
||||
"node": ">= 6"
|
||||
}
|
||||
},
|
||||
"node_modules/constructs": {
|
||||
"version": "10.4.4",
|
||||
"resolved": "https://registry.npmjs.org/constructs/-/constructs-10.4.4.tgz",
|
||||
"integrity": "sha512-lP0qC1oViYf1cutHo9/KQ8QL637f/W29tDmv/6sy35F5zs+MD9f66nbAAIjicwc7fwyuF3rkg6PhZh4sfvWIpA==",
|
||||
"license": "Apache-2.0"
|
||||
},
|
||||
"node_modules/fsevents": {
|
||||
"version": "2.3.2",
|
||||
"resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz",
|
||||
"integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==",
|
||||
"dev": true,
|
||||
"hasInstallScript": true,
|
||||
"license": "MIT",
|
||||
"optional": true,
|
||||
"os": [
|
||||
"darwin"
|
||||
],
|
||||
"engines": {
|
||||
"node": "^8.16.0 || ^10.6.0 || >=11.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/typescript": {
|
||||
"version": "5.9.3",
|
||||
"resolved": "https://registry.npmjs.org/typescript/-/typescript-5.9.3.tgz",
|
||||
"integrity": "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==",
|
||||
"dev": true,
|
||||
"license": "Apache-2.0",
|
||||
"bin": {
|
||||
"tsc": "bin/tsc",
|
||||
"tsserver": "bin/tsserver"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=14.17"
|
||||
}
|
||||
},
|
||||
"node_modules/undici-types": {
|
||||
"version": "6.21.0",
|
||||
"resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.21.0.tgz",
|
||||
"integrity": "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==",
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
}
|
||||
}
|
||||
}
|
||||
77
lambdas/imageProcessor/README.md
Normal file
77
lambdas/imageProcessor/README.md
Normal file
@@ -0,0 +1,77 @@
|
||||
# Image Processor Lambda
|
||||
|
||||
Processes uploaded images by extracting metadata (EXIF, GPS, camera info) and stripping it before storing publicly. Triggered by S3 uploads to the `staging/` prefix.
|
||||
|
||||
## How It Works
|
||||
|
||||
1. User uploads image to `staging/{folder}/{uuid}.jpg` via presigned URL
|
||||
2. S3 triggers this Lambda
|
||||
3. Lambda extracts metadata and saves to `ImageMetadata` table
|
||||
4. Lambda strips EXIF data from image
|
||||
5. Lambda moves clean image to final location `{folder}/{uuid}.jpg`
|
||||
6. Lambda deletes staging file
|
||||
|
||||
## Local Development
|
||||
|
||||
### Install Dependencies
|
||||
|
||||
```bash
|
||||
cd lambdas/shared && npm install
|
||||
cd ../imageProcessor && npm install
|
||||
```
|
||||
|
||||
### Set Up Environment
|
||||
|
||||
## Environment Variables
|
||||
|
||||
| Variable | Description | Example |
|
||||
| -------------- | ---------------------------- | ----------------------------------------------- |
|
||||
| `DATABASE_URL` | PostgreSQL connection string | `postgresql://user:pass@localhost:5432/db-name` |
|
||||
| `S3_BUCKET` | S3 bucket name | `bucket-name` |
|
||||
| `AWS_REGION` | AWS region | `us-east-1` |
|
||||
| `LOG_LEVEL` | Logging level | `debug`, `info`, `warn`, `error` |
|
||||
|
||||
### Run Locally
|
||||
|
||||
```bash
|
||||
aws s3 cp source s3://bucket-name/staging/image-type/key --profile your-profile-name
|
||||
```
|
||||
|
||||
```bash
|
||||
npm run local -- staging/items/test-image.jpg my-bucket
|
||||
```
|
||||
|
||||
### Run Migration
|
||||
|
||||
```bash
|
||||
cd backend
|
||||
npx sequelize-cli db:migrate
|
||||
```
|
||||
|
||||
## Supported Image Types
|
||||
|
||||
- JPEG (`.jpg`, `.jpeg`)
|
||||
- PNG (`.png`)
|
||||
- GIF (`.gif`) - preserves animation
|
||||
- WebP (`.webp`)
|
||||
|
||||
## Metadata Extracted
|
||||
|
||||
| Field | Description |
|
||||
| --------------------------- | -------------------------------------------- |
|
||||
| `latitude`, `longitude` | GPS coordinates (stripped from public image) |
|
||||
| `cameraMake`, `cameraModel` | Camera manufacturer and model |
|
||||
| `cameraSoftware` | Software used to create image |
|
||||
| `dateTaken` | Original capture date |
|
||||
| `width`, `height` | Image dimensions |
|
||||
| `orientation` | EXIF orientation flag |
|
||||
| `fileSize` | File size in bytes |
|
||||
|
||||
## Deployment
|
||||
|
||||
See [infrastructure/cdk/README.md](../../infrastructure/cdk/README.md) for deployment instructions.
|
||||
|
||||
```bash
|
||||
cd infrastructure/cdk
|
||||
npm run deploy:staging
|
||||
```
|
||||
170
lambdas/imageProcessor/imageProcessor.js
Normal file
170
lambdas/imageProcessor/imageProcessor.js
Normal file
@@ -0,0 +1,170 @@
|
||||
const sharp = require("sharp");
|
||||
const exifReader = require("exif-reader");
|
||||
const { logger } = require("../shared");
|
||||
|
||||
/**
|
||||
* Extract metadata from an image buffer
|
||||
* @param {Buffer} buffer - Image buffer
|
||||
* @returns {Object} Extracted metadata
|
||||
*/
|
||||
async function extractMetadata(buffer) {
|
||||
const image = sharp(buffer);
|
||||
const metadata = await image.metadata();
|
||||
|
||||
let exifData = {};
|
||||
if (metadata.exif) {
|
||||
try {
|
||||
exifData = exifReader(metadata.exif);
|
||||
} catch (e) {
|
||||
// EXIF parsing failed, continue without it
|
||||
logger.warn("Failed to parse EXIF data", { error: e.message });
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
// GPS data
|
||||
latitude: parseGpsCoordinate(
|
||||
exifData?.gps?.GPSLatitude,
|
||||
exifData?.gps?.GPSLatitudeRef
|
||||
),
|
||||
longitude: parseGpsCoordinate(
|
||||
exifData?.gps?.GPSLongitude,
|
||||
exifData?.gps?.GPSLongitudeRef
|
||||
),
|
||||
|
||||
// Camera info
|
||||
cameraMake: exifData?.image?.Make || null,
|
||||
cameraModel: exifData?.image?.Model || null,
|
||||
cameraSoftware: exifData?.image?.Software || null,
|
||||
|
||||
// Date/time
|
||||
dateTaken: parseExifDate(
|
||||
exifData?.exif?.DateTimeOriginal || exifData?.image?.DateTime
|
||||
),
|
||||
|
||||
// Dimensions
|
||||
width: metadata.width,
|
||||
height: metadata.height,
|
||||
orientation: metadata.orientation || 1,
|
||||
|
||||
// File info
|
||||
fileSize: buffer.length,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Strip all metadata from an image buffer
|
||||
* Uses sharp's rotate() which auto-orients AND strips EXIF
|
||||
* @param {Buffer} buffer - Image buffer
|
||||
* @param {string} format - Output format (default: 'jpeg')
|
||||
* @returns {Buffer} Processed image buffer
|
||||
*/
|
||||
async function stripMetadata(buffer, format = "jpeg") {
|
||||
const image = sharp(buffer);
|
||||
const metadata = await image.metadata();
|
||||
|
||||
// Handle different formats
|
||||
let processed;
|
||||
if (metadata.format === "gif") {
|
||||
// For GIFs, try to preserve animation but strip metadata
|
||||
processed = await image
|
||||
.gif()
|
||||
.toBuffer();
|
||||
} else if (metadata.format === "png") {
|
||||
// For PNGs, rotate strips metadata and we output as PNG
|
||||
processed = await image
|
||||
.rotate() // Auto-orient and strip EXIF
|
||||
.png()
|
||||
.toBuffer();
|
||||
} else if (metadata.format === "webp") {
|
||||
processed = await image
|
||||
.rotate()
|
||||
.webp()
|
||||
.toBuffer();
|
||||
} else {
|
||||
// Default to JPEG for best compatibility
|
||||
processed = await image
|
||||
.rotate() // Auto-orient and strip EXIF
|
||||
.jpeg({ quality: 90 })
|
||||
.toBuffer();
|
||||
}
|
||||
|
||||
return processed;
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert GPS DMS (degrees, minutes, seconds) to decimal
|
||||
* @param {Array} dms - [degrees, minutes, seconds]
|
||||
* @param {string} ref - N/S/E/W reference
|
||||
* @returns {number|null} Decimal coordinate
|
||||
*/
|
||||
function parseGpsCoordinate(dms, ref) {
|
||||
if (!dms || !Array.isArray(dms) || dms.length !== 3) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const [degrees, minutes, seconds] = dms;
|
||||
let decimal = degrees + minutes / 60 + seconds / 3600;
|
||||
|
||||
// South and West are negative
|
||||
if (ref === "S" || ref === "W") {
|
||||
decimal = -decimal;
|
||||
}
|
||||
|
||||
// Round to 8 decimal places (about 1mm precision)
|
||||
return Math.round(decimal * 100000000) / 100000000;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse EXIF date string to ISO date
|
||||
* EXIF format: "YYYY:MM:DD HH:MM:SS"
|
||||
* @param {string|Date} dateStr - EXIF date string or Date object
|
||||
* @returns {Date|null} Parsed date
|
||||
*/
|
||||
function parseExifDate(dateStr) {
|
||||
if (!dateStr) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// If already a Date object
|
||||
if (dateStr instanceof Date) {
|
||||
return dateStr;
|
||||
}
|
||||
|
||||
// EXIF format: "YYYY:MM:DD HH:MM:SS"
|
||||
const match = String(dateStr).match(
|
||||
/(\d{4}):(\d{2}):(\d{2}) (\d{2}):(\d{2}):(\d{2})/
|
||||
);
|
||||
if (match) {
|
||||
const [, year, month, day, hour, minute, second] = match;
|
||||
return new Date(`${year}-${month}-${day}T${hour}:${minute}:${second}`);
|
||||
}
|
||||
|
||||
// Try parsing as ISO date
|
||||
const date = new Date(dateStr);
|
||||
return isNaN(date.getTime()) ? null : date;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the MIME type for a format
|
||||
* @param {string} format - Image format
|
||||
* @returns {string} MIME type
|
||||
*/
|
||||
function getMimeType(format) {
|
||||
const mimeTypes = {
|
||||
jpeg: "image/jpeg",
|
||||
jpg: "image/jpeg",
|
||||
png: "image/png",
|
||||
gif: "image/gif",
|
||||
webp: "image/webp",
|
||||
};
|
||||
return mimeTypes[format] || "image/jpeg";
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
extractMetadata,
|
||||
stripMetadata,
|
||||
parseGpsCoordinate,
|
||||
parseExifDate,
|
||||
getMimeType,
|
||||
};
|
||||
161
lambdas/imageProcessor/index.js
Normal file
161
lambdas/imageProcessor/index.js
Normal file
@@ -0,0 +1,161 @@
|
||||
const {
|
||||
S3Client,
|
||||
GetObjectCommand,
|
||||
PutObjectCommand,
|
||||
DeleteObjectCommand,
|
||||
} = require("@aws-sdk/client-s3");
|
||||
const { extractMetadata, stripMetadata, getMimeType } = require("./imageProcessor");
|
||||
const { saveImageMetadata, updateProcessingStatus } = require("./queries");
|
||||
const { logger } = require("../shared");
|
||||
|
||||
const s3Client = new S3Client({ region: process.env.AWS_REGION });
|
||||
|
||||
/**
|
||||
* Lambda handler for S3 image processing.
|
||||
* Triggered by S3 ObjectCreated events on staging/ prefix.
|
||||
*
|
||||
* @param {Object} event - S3 event
|
||||
* @returns {Object} Processing results
|
||||
*/
|
||||
exports.handler = async (event) => {
|
||||
logger.info("Lambda invoked", { recordCount: event.Records?.length });
|
||||
|
||||
const results = [];
|
||||
|
||||
for (const record of event.Records) {
|
||||
const bucket = record.s3.bucket.name;
|
||||
const stagingKey = decodeURIComponent(record.s3.object.key.replace(/\+/g, " "));
|
||||
|
||||
logger.info("Processing image", { bucket, stagingKey });
|
||||
|
||||
try {
|
||||
// Only process files in staging/ folder
|
||||
if (!stagingKey.startsWith("staging/")) {
|
||||
logger.info("Skipping non-staging key", { stagingKey });
|
||||
results.push({ key: stagingKey, status: "skipped", reason: "not in staging" });
|
||||
continue;
|
||||
}
|
||||
|
||||
// Calculate final key: staging/items/uuid.jpg -> items/uuid.jpg
|
||||
const finalKey = stagingKey.replace(/^staging\//, "");
|
||||
|
||||
// Check if this is an image file
|
||||
if (!isImageFile(stagingKey)) {
|
||||
logger.info("Skipping non-image file", { stagingKey });
|
||||
results.push({ key: stagingKey, status: "skipped", reason: "not an image" });
|
||||
continue;
|
||||
}
|
||||
|
||||
// Process the image
|
||||
await processImage(bucket, stagingKey, finalKey);
|
||||
|
||||
results.push({ key: finalKey, status: "success" });
|
||||
logger.info("Successfully processed image", { finalKey });
|
||||
} catch (error) {
|
||||
logger.error("Error processing image", { stagingKey, error: error.message, stack: error.stack });
|
||||
results.push({ key: stagingKey, status: "error", error: error.message });
|
||||
|
||||
// Try to update status to failed if we have a finalKey
|
||||
try {
|
||||
const finalKey = stagingKey.replace(/^staging\//, "");
|
||||
await updateProcessingStatus(finalKey, "failed", error.message);
|
||||
} catch (dbError) {
|
||||
logger.error("Failed to update error status in DB", { error: dbError.message });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
body: JSON.stringify({ processed: results.length, results }),
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Process a single image: extract metadata, strip, and move to final location.
|
||||
*
|
||||
* @param {string} bucket - S3 bucket name
|
||||
* @param {string} stagingKey - Staging key (e.g., staging/items/uuid.jpg)
|
||||
* @param {string} finalKey - Final key (e.g., items/uuid.jpg)
|
||||
*/
|
||||
async function processImage(bucket, stagingKey, finalKey) {
|
||||
// 1. Download image from staging location
|
||||
logger.debug("Downloading from staging", { stagingKey });
|
||||
const getCommand = new GetObjectCommand({
|
||||
Bucket: bucket,
|
||||
Key: stagingKey,
|
||||
});
|
||||
const response = await s3Client.send(getCommand);
|
||||
const buffer = Buffer.from(await response.Body.transformToByteArray());
|
||||
|
||||
// 2. Extract metadata BEFORE stripping
|
||||
logger.debug("Extracting metadata");
|
||||
const metadata = await extractMetadata(buffer);
|
||||
logger.info("Extracted metadata", { finalKey, metadata });
|
||||
|
||||
// 3. Save metadata to database
|
||||
logger.debug("Saving metadata to DB", { finalKey });
|
||||
await saveImageMetadata(finalKey, metadata);
|
||||
|
||||
// 4. Strip metadata from image
|
||||
logger.debug("Stripping metadata");
|
||||
const strippedBuffer = await stripMetadata(buffer);
|
||||
|
||||
// 5. Determine content type from original
|
||||
const format = stagingKey.split(".").pop().toLowerCase();
|
||||
const contentType = getMimeType(format);
|
||||
|
||||
// 6. Upload clean image to FINAL location
|
||||
logger.debug("Uploading to final location", { finalKey });
|
||||
const putCommand = new PutObjectCommand({
|
||||
Bucket: bucket,
|
||||
Key: finalKey,
|
||||
Body: strippedBuffer,
|
||||
ContentType: contentType,
|
||||
CacheControl: getCacheControl(finalKey),
|
||||
Metadata: {
|
||||
"x-processed": "true",
|
||||
"x-processed-at": new Date().toISOString(),
|
||||
},
|
||||
});
|
||||
await s3Client.send(putCommand);
|
||||
|
||||
// 7. Delete staging file
|
||||
logger.debug("Deleting staging file", { stagingKey });
|
||||
const deleteCommand = new DeleteObjectCommand({
|
||||
Bucket: bucket,
|
||||
Key: stagingKey,
|
||||
});
|
||||
await s3Client.send(deleteCommand);
|
||||
|
||||
// 8. Update processing status to completed
|
||||
logger.debug("Updating processing status to completed");
|
||||
await updateProcessingStatus(finalKey, "completed");
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a file is an image based on extension.
|
||||
*
|
||||
* @param {string} key - S3 key
|
||||
* @returns {boolean}
|
||||
*/
|
||||
function isImageFile(key) {
|
||||
const imageExtensions = [".jpg", ".jpeg", ".png", ".gif", ".webp"];
|
||||
const ext = key.toLowerCase().slice(key.lastIndexOf("."));
|
||||
return imageExtensions.includes(ext);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get Cache-Control header based on folder.
|
||||
*
|
||||
* @param {string} key - S3 key
|
||||
* @returns {string}
|
||||
*/
|
||||
function getCacheControl(key) {
|
||||
// Private folders get shorter cache
|
||||
if (key.startsWith("messages/") || key.startsWith("condition-checks/")) {
|
||||
return "private, max-age=3600";
|
||||
}
|
||||
// Public folders get longer cache
|
||||
return "public, max-age=86400";
|
||||
}
|
||||
6574
lambdas/imageProcessor/package-lock.json
generated
Normal file
6574
lambdas/imageProcessor/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
23
lambdas/imageProcessor/package.json
Normal file
23
lambdas/imageProcessor/package.json
Normal file
@@ -0,0 +1,23 @@
|
||||
{
|
||||
"name": "image-processor-lambda",
|
||||
"version": "1.0.0",
|
||||
"description": "Lambda function to extract and strip image metadata from uploads",
|
||||
"main": "index.js",
|
||||
"dependencies": {
|
||||
"@aws-sdk/client-s3": "^3.400.0",
|
||||
"@rentall/lambda-shared": "file:../shared",
|
||||
"exif-reader": "^2.0.0",
|
||||
"sharp": "^0.33.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"dotenv": "^17.2.3",
|
||||
"jest": "^30.1.3"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "jest",
|
||||
"local": "node -r dotenv/config test-local.js dotenv_config_path=.env.dev"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=18.0.0"
|
||||
}
|
||||
}
|
||||
80
lambdas/imageProcessor/queries.js
Normal file
80
lambdas/imageProcessor/queries.js
Normal file
@@ -0,0 +1,80 @@
|
||||
const { db } = require("../shared");
|
||||
|
||||
/**
|
||||
* Save image metadata to the database.
|
||||
* Uses UPSERT pattern to handle re-uploads.
|
||||
*
|
||||
* @param {string} s3Key - Final S3 key (without staging/ prefix)
|
||||
* @param {Object} metadata - Extracted metadata
|
||||
* @returns {Promise<string>} The inserted/updated record ID
|
||||
*/
|
||||
async function saveImageMetadata(s3Key, metadata) {
|
||||
const query = `
|
||||
INSERT INTO "ImageMetadata" (
|
||||
id, "s3Key", latitude, longitude,
|
||||
"cameraMake", "cameraModel", "cameraSoftware",
|
||||
"dateTaken", width, height, orientation, "fileSize",
|
||||
"processingStatus", "createdAt", "updatedAt"
|
||||
) VALUES (
|
||||
gen_random_uuid(), $1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, NOW(), NOW()
|
||||
)
|
||||
ON CONFLICT ("s3Key") DO UPDATE SET
|
||||
latitude = EXCLUDED.latitude,
|
||||
longitude = EXCLUDED.longitude,
|
||||
"cameraMake" = EXCLUDED."cameraMake",
|
||||
"cameraModel" = EXCLUDED."cameraModel",
|
||||
"cameraSoftware" = EXCLUDED."cameraSoftware",
|
||||
"dateTaken" = EXCLUDED."dateTaken",
|
||||
width = EXCLUDED.width,
|
||||
height = EXCLUDED.height,
|
||||
orientation = EXCLUDED.orientation,
|
||||
"fileSize" = EXCLUDED."fileSize",
|
||||
"processingStatus" = EXCLUDED."processingStatus",
|
||||
"updatedAt" = NOW()
|
||||
RETURNING id
|
||||
`;
|
||||
|
||||
const values = [
|
||||
s3Key,
|
||||
metadata.latitude,
|
||||
metadata.longitude,
|
||||
metadata.cameraMake,
|
||||
metadata.cameraModel,
|
||||
metadata.cameraSoftware,
|
||||
metadata.dateTaken,
|
||||
metadata.width,
|
||||
metadata.height,
|
||||
metadata.orientation,
|
||||
metadata.fileSize,
|
||||
"processing",
|
||||
];
|
||||
|
||||
const result = await db.query(query, values);
|
||||
return result.rows[0].id;
|
||||
}
|
||||
|
||||
/**
|
||||
* Update processing status after completion.
|
||||
*
|
||||
* @param {string} s3Key - S3 key
|
||||
* @param {string} status - 'completed' or 'failed'
|
||||
* @param {string} errorMessage - Error message if failed
|
||||
*/
|
||||
async function updateProcessingStatus(s3Key, status, errorMessage = null) {
|
||||
const query = `
|
||||
UPDATE "ImageMetadata"
|
||||
SET
|
||||
"processingStatus" = $2::"enum_ImageMetadata_processingStatus",
|
||||
"processedAt" = CASE WHEN $2 = 'completed' THEN NOW() ELSE "processedAt" END,
|
||||
"errorMessage" = $3,
|
||||
"updatedAt" = NOW()
|
||||
WHERE "s3Key" = $1
|
||||
`;
|
||||
|
||||
await db.query(query, [s3Key, status, errorMessage]);
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
saveImageMetadata,
|
||||
updateProcessingStatus,
|
||||
};
|
||||
59
lambdas/imageProcessor/test-local.js
Normal file
59
lambdas/imageProcessor/test-local.js
Normal file
@@ -0,0 +1,59 @@
|
||||
/**
|
||||
* Local testing script for the image processor lambda.
|
||||
*
|
||||
* Usage:
|
||||
* npm run local -- <stagingKey> [bucket]
|
||||
*
|
||||
* Example:
|
||||
* npm run local -- staging/items/test-image.jpg my-bucket
|
||||
*
|
||||
* Note: Requires .env.dev file with DATABASE_URL and AWS credentials configured.
|
||||
*/
|
||||
|
||||
const { handler } = require("./index");
|
||||
|
||||
async function main() {
|
||||
// Filter out dotenv config args from process.argv
|
||||
const args = process.argv.slice(2).filter(arg => !arg.startsWith("dotenv_config_path"));
|
||||
|
||||
// Get staging key from command line args
|
||||
const stagingKey = args[0] || "staging/items/test-image.jpg";
|
||||
const bucket = args[1] || process.env.S3_BUCKET;
|
||||
|
||||
console.log("Testing image processor lambda locally...");
|
||||
console.log(`Bucket: ${bucket}`);
|
||||
console.log(`Staging Key: ${stagingKey}`);
|
||||
console.log("---");
|
||||
|
||||
// Simulate S3 event
|
||||
const event = {
|
||||
Records: [
|
||||
{
|
||||
eventSource: "aws:s3",
|
||||
eventName: "ObjectCreated:Put",
|
||||
s3: {
|
||||
bucket: {
|
||||
name: bucket,
|
||||
},
|
||||
object: {
|
||||
key: stagingKey,
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
console.log("Event:", JSON.stringify(event, null, 2));
|
||||
console.log("---");
|
||||
|
||||
try {
|
||||
const result = await handler(event);
|
||||
console.log("Result:", JSON.stringify(result, null, 2));
|
||||
process.exit(0);
|
||||
} catch (error) {
|
||||
console.error("Error:", error);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
main();
|
||||
Reference in New Issue
Block a user