Compare commits

...

178 Commits

Author SHA1 Message Date
jackiettran
5d3c124d3e text changes 2026-01-21 19:20:07 -05:00
jackiettran
420e0efeb4 text changes and remove infra folder 2026-01-21 19:00:55 -05:00
jackiettran
23ca97cea9 text clean up 2026-01-21 17:48:50 -05:00
jackiettran
b5755109a7 Merge branch 'feature/aws-deployment'
merge infrastructure aws cdk
2026-01-21 14:19:04 -05:00
jackiettran
0136b74ee0 infrastructure with aws cdk 2026-01-21 14:18:07 -05:00
jackiettran
cae9e7e473 more frontend tests 2026-01-20 22:31:57 -05:00
jackiettran
fcce10e664 More frontend tests 2026-01-20 14:19:22 -05:00
jackiettran
28554acc2d Migrated to react router v7 2026-01-19 22:50:53 -05:00
jackiettran
1923ffc251 backend unit test coverage to 80% 2026-01-19 19:22:01 -05:00
jackiettran
d4362074f5 more unit tests 2026-01-19 00:29:28 -05:00
jackiettran
75ddb2908f fixed skipped tests 2026-01-18 19:29:28 -05:00
jackiettran
41d8cf4c04 more backend unit test coverage 2026-01-18 19:18:35 -05:00
jackiettran
e6c56ae90f fixed integration tests 2026-01-18 17:44:26 -05:00
jackiettran
d570f607d3 migration to vite and cleaned up /uploads 2026-01-18 16:55:19 -05:00
jackiettran
f9c2057e64 fixed csrf test and a bug 2026-01-18 14:02:56 -05:00
jackiettran
f58178a253 fixed tests and package vulnerabilities 2026-01-17 11:12:40 -05:00
jackiettran
cf97dffbfb MFA 2026-01-16 18:04:39 -05:00
jackiettran
63385e049c updated tests 2026-01-15 18:47:43 -05:00
jackiettran
35d5050286 removed dead code 2026-01-15 17:32:44 -05:00
jackiettran
826e4f2ed5 infrastructure updates 2026-01-15 17:17:06 -05:00
jackiettran
a3ef343326 generic response without specific error message 2026-01-15 16:37:01 -05:00
jackiettran
1b6f782648 query parameter token could be leaked 2026-01-15 16:26:53 -05:00
jackiettran
18a37e2996 lat lon validation 2026-01-15 16:11:57 -05:00
jackiettran
7b12e59f0c sanitization to all api routes 2026-01-15 15:42:30 -05:00
jackiettran
c6b531d12a more specific resources in iam policies 2026-01-15 15:31:23 -05:00
jackiettran
942867d94c fixed bug where had to login every time the server restarted 2026-01-15 15:14:55 -05:00
jackiettran
c560d9e13c updated gitignore 2026-01-15 12:07:24 -05:00
jackiettran
2242ed810e lazy loading email templates 2026-01-14 23:42:04 -05:00
jackiettran
e7081620a9 removed the cron jobs 2026-01-14 22:44:18 -05:00
jackiettran
7f2f45b1c2 payout retry lambda 2026-01-14 18:05:41 -05:00
jackiettran
da82872297 image processing lambda 2026-01-14 12:11:50 -05:00
jackiettran
f5fdcbfb82 condition check lambda 2026-01-13 17:14:19 -05:00
jackiettran
2ee5571b5b updated variable name 2026-01-12 18:12:17 -05:00
jackiettran
89dd99c263 added missing email template file references 2026-01-12 17:58:16 -05:00
jackiettran
c2ebe8709d fixed bug where earnings would show set up before disappearing even when user has stripePayoutsEnabled 2026-01-12 17:44:53 -05:00
jackiettran
6c9fd8aec2 have the right dispute statuses 2026-01-12 17:00:08 -05:00
jackiettran
80d643c65c Fixed bug where could not rent 3-4 and 4-5PM 2026-01-12 16:52:37 -05:00
jackiettran
415bcc5021 replaced some console.errors with logger 2026-01-10 20:47:29 -05:00
jackiettran
86cb8b3fe0 can cancel a rental request before owner approval 2026-01-10 19:22:15 -05:00
jackiettran
860b6d6160 Stripe error handling and now you can rent an item for a different time while having an upcoming or active rental 2026-01-10 13:29:09 -05:00
jackiettran
8aea3c38ed idempotency for stripe transfer, refund, charge 2026-01-09 14:14:49 -05:00
jackiettran
e2e32f7632 handling changes to stripe account where owner needs to provide information 2026-01-08 19:08:14 -05:00
jackiettran
0ea35e9d6f handling when payout is canceled 2026-01-08 18:12:58 -05:00
jackiettran
8585633907 handling if owner disconnects their stripe account 2026-01-08 17:49:02 -05:00
jackiettran
3042a9007f handling stripe disputes/chargeback where renter disputes the charge through their credit card company or bank 2026-01-08 17:23:55 -05:00
jackiettran
5248c3dc39 handling case where payout failed and webhook event not received 2026-01-08 15:27:02 -05:00
jackiettran
65b7574be2 updated card and bank error handling messages 2026-01-08 15:00:12 -05:00
jackiettran
bcb917c959 3D Secure handling 2026-01-08 12:44:57 -05:00
jackiettran
8b9b92d848 Text changes with earnings 2026-01-07 22:37:41 -05:00
jackiettran
550de32a41 migrations readme 2026-01-07 21:55:41 -05:00
jackiettran
5eb877b7c2 addtional checks for if user is banned 2026-01-07 00:46:16 -05:00
jackiettran
b56e031ee5 ability to ban and unban users 2026-01-07 00:39:20 -05:00
jackiettran
1203fb7996 optimized condition checks 2026-01-06 17:28:20 -05:00
jackiettran
28c0b4976d failed payment method handling 2026-01-06 16:13:58 -05:00
jackiettran
ec84b8354e paid out amount updates on page load if webhook doesn't work 2026-01-05 23:13:18 -05:00
jackiettran
8809a012d5 Layout changes to Earnings page. Earnings Setup only shows if not set up yet 2026-01-04 01:30:30 -05:00
jackiettran
a0d63ff04a Merge branch 'fix/backend-unit-tests' 2026-01-03 21:21:08 -05:00
jackiettran
e408880cae fixed tests 2026-01-03 21:19:23 -05:00
jackiettran
76102d48a9 stripe webhooks. removed payout cron. webhook for when amount is deposited into bank. More communication about payout timelines 2026-01-03 19:58:23 -05:00
jackiettran
493921b723 Merge branch 'feature/stripe-embedded-onboarding' 2026-01-02 18:27:16 -05:00
jackiettran
6853ae264c Add Stripe embedded onboarding
- Update StripeConnectOnboarding component with embedded flow
- Add new Stripe routes and service methods for embedded onboarding
- Update EarningsStatus and EarningsDashboard to support new flow
- Add required frontend dependencies

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2026-01-02 18:26:53 -05:00
jackiettran
e9bc87da99 review viewable after 72 hours instead of 10 minutes if only one side submits a review 2026-01-02 17:54:01 -05:00
jackiettran
b89a0e3de7 Renter can now see owner's pre rental condition 2026-01-02 17:39:45 -05:00
jackiettran
4209dcc8fc removed cron job that made rentals active. Now whether or not the rental is active is determined on the fly 2026-01-02 17:08:49 -05:00
jackiettran
bc01c818aa Condition check modal title text edit 2026-01-02 14:24:22 -05:00
jackiettran
0104f369a9 Owner should only be able to complete an active rental not a confirmed rental. Removed an icon 2026-01-01 23:49:03 -05:00
jackiettran
0682494ee0 Fixed an email bug where it wasn't getting email from the db 2026-01-01 23:29:39 -05:00
jackiettran
fe38ef430a Fixed a bug with What will you use it for, fixed a bug with the sticky pricing card, text change 2026-01-01 18:48:01 -05:00
jackiettran
9e41f328e0 layout and styling changes for RentItem 2026-01-01 17:17:02 -05:00
jackiettran
fd2312fe47 Edited layout of mmddyyyy and time dropdown. Changed algorithm for determining pricing so that it choosest the cheapest option for users 2026-01-01 14:46:40 -05:00
jackiettran
3d0e553620 date time validation and added ability to type in date 2026-01-01 00:50:19 -05:00
jackiettran
f66dccdfa3 fixed bug where avatar wasn't showing on desktop mode 2025-12-30 23:48:38 -05:00
jackiettran
3ff98fbe1e avatar menu closes properly 2025-12-30 23:25:50 -05:00
jackiettran
1b4e86be29 fixed image previews 2025-12-30 22:49:34 -05:00
jackiettran
807082eebf image optimization. Image resizing client side, index added to db, pagination 2025-12-30 20:23:32 -05:00
jackiettran
3e31b9d08b fixing intemittent undefined errors 2025-12-30 18:07:23 -05:00
jackiettran
e3acf45ba0 fixed sticky bottom pricing card for mobile 2025-12-30 17:35:48 -05:00
jackiettran
4bb4e7bcb6 Grouping markers and changing pin to tear shape 2025-12-30 16:58:03 -05:00
jackiettran
6cf8a009ff location filter 2025-12-30 14:23:21 -05:00
jackiettran
546c881701 rental price calculation bug, sticky pricing cards on mobile, bigger font app wide, removed delivery options from frontened, searching by location with zipcode works when there's multiple zipcodes in the area, 2025-12-30 00:20:15 -05:00
jackiettran
7dd3aff0f8 Image is required for creating an item, required fields actually required, Available After and Available Before defaults changed, delete confirmation modal for deleting an item 2025-12-29 19:26:37 -05:00
jackiettran
ac1e22f194 better UX when resetting pw 2025-12-29 00:38:10 -05:00
jackiettran
e153614993 login attempts 2025-12-28 12:43:10 -05:00
jackiettran
2e18137b5b 404 page 2025-12-25 23:32:55 -05:00
jackiettran
36cf5b65fa improved email verification experience wording 2025-12-25 23:09:10 -05:00
jackiettran
4f85243815 more stack traces 2025-12-25 19:05:12 -05:00
jackiettran
76e4039ba8 added stack trace to some logging 2025-12-25 18:41:42 -05:00
jackiettran
b02ec19d5c navbar menu styling 2025-12-23 23:08:36 -05:00
jackiettran
2a32470758 text changes, error styling, navbar menu styling 2025-12-23 23:08:22 -05:00
jackiettran
5ec22c2a5b Navbar UX consistency 2025-12-23 19:39:23 -05:00
jackiettran
426f974ed3 users can click outside of modal to close the modal for info only modals. Take away that ability for important modals 2025-12-23 18:43:17 -05:00
jackiettran
347f709f72 Updated search bar to remove location. Will get or ask for user's location. Removed Start Earning button. Works on desktop and mobile 2025-12-23 18:09:12 -05:00
jackiettran
07e5a2a320 Rebrand and updated copyright date 2025-12-22 22:35:57 -05:00
jackiettran
955517347e health endpoint 2025-12-20 15:21:33 -05:00
jackiettran
bd1bd5014c updating unit and integration tests 2025-12-20 14:59:09 -05:00
jackiettran
4e0a4ef019 updated upload unit tests for s3 image handling 2025-12-19 18:58:30 -05:00
jackiettran
4b4584bc0f sending images through messages works 2025-12-18 19:37:16 -05:00
jackiettran
996e815d57 if authmodal is up, cursor is already in it 2025-12-18 18:43:08 -05:00
jackiettran
38e0b6a16d condition checks in rental history in profile 2025-12-16 14:15:07 -05:00
jackiettran
27a7b641dd condition check gallery 2025-12-16 13:50:23 -05:00
jackiettran
372ab093ef email verification flow updated 2025-12-15 22:45:55 -05:00
jackiettran
5e01bb8cff images for forum and forum comments 2025-12-13 20:32:25 -05:00
jackiettran
55e08e14b8 consistent profile image, initials with background color as backup, better profile image editing 2025-12-12 23:08:54 -05:00
jackiettran
3f319bfdd0 unit tests 2025-12-12 16:27:56 -05:00
jackiettran
25bbf5d20b mass assignment vulnerabilites and refactoring of photos 2025-12-12 13:57:44 -05:00
jackiettran
1dee5232a0 s3 image file validation 2025-12-12 13:33:24 -05:00
jackiettran
763945fef4 S3 markdown file 2025-12-12 11:47:16 -05:00
jackiettran
b0268a2fb7 s3 2025-12-11 20:05:18 -05:00
jackiettran
11593606aa imageFilenames and imageFilename, backend integration tests, frontend tests, removed username references 2025-11-26 23:13:23 -05:00
jackiettran
f2d3aac029 sanitized errors 2025-11-26 15:49:42 -05:00
jackiettran
fab79e64ee removed console logs from frontend and a logs from locationService 2025-11-26 15:01:00 -05:00
jackiettran
8b10103ae4 csrf token handling, two jwt tokens 2025-11-26 14:25:49 -05:00
jackiettran
f3a356d64b test migration script 2025-11-25 21:35:09 -05:00
jackiettran
9ec3e97d9e remove sync alter true, add pending migration check 2025-11-25 17:53:49 -05:00
jackiettran
8fc269c62a migration files 2025-11-25 17:24:34 -05:00
jackiettran
31d94b1b3f simplified message model 2025-11-25 17:22:57 -05:00
jackiettran
2983f67ce8 removed metadata from condition check model 2025-11-25 16:48:54 -05:00
jackiettran
8de814fdee replaced vague notes with specific intended use, also fixed modal on top of modal for reviews 2025-11-25 16:40:42 -05:00
jackiettran
13268784fd migration files 2025-11-24 18:11:39 -05:00
jackiettran
8e6af92cba schema updates to rental statuses 2025-11-24 18:08:12 -05:00
jackiettran
42a5412582 changed field from availability to isAvailable 2025-11-24 17:36:18 -05:00
jackiettran
bb16d659bd removed unneeded fields from item including needsTraining 2025-11-24 17:31:09 -05:00
jackiettran
34bbf06f0c no need for notes field for alpha invitation 2025-11-24 17:04:05 -05:00
jackiettran
532f3014df email can't be null, username removed since email can't be null 2025-11-24 15:41:35 -05:00
jackiettran
6aac929ec1 config files and scripts for database migration system 2025-11-24 13:48:10 -05:00
jackiettran
a9f3d37343 copy link buttons for item detail and forum post 2025-11-21 22:20:07 -05:00
jackiettran
f2d42dffee email sent when personal information changed 2025-11-21 16:47:39 -05:00
jackiettran
f7767dfd13 deletion reason and email for soft deleted forum posts and comments by admin 2025-11-20 18:08:30 -05:00
jackiettran
b2f18d77f6 admin can soft delete listings 2025-11-20 17:14:40 -05:00
jackiettran
88c831419c disable item request notifications 2025-11-20 15:28:16 -05:00
jackiettran
83872fe039 fixing bugs with item notification radius 2025-11-20 15:01:15 -05:00
jackiettran
413ac6b6e2 Item request notifications 2025-11-18 22:28:47 -05:00
jackiettran
026e748bf8 handling closing posts 2025-11-17 17:53:41 -05:00
jackiettran
e260992ef2 admin soft delete functionality, also fixed google sign in when user doesn't have first and last name 2025-11-17 11:21:52 -05:00
jackiettran
3a6da3d47d email refactor 2025-11-14 17:36:35 -05:00
jackiettran
629f0055a1 integrated email is forum posts/comments 2025-11-14 15:51:25 -05:00
jackiettran
105f257c5f can add images to forum posts and comments 2025-11-11 23:32:03 -05:00
jackiettran
b045fbeb01 Can mark a comment as the answer, some layout changes 2025-11-11 18:23:11 -05:00
jackiettran
825389228d essential forum code 2025-11-11 16:55:00 -05:00
jackiettran
4a4eee86a7 can add image to message 2025-11-10 22:45:29 -05:00
jackiettran
d8a927ac4e send email when message is sent 2025-11-10 13:05:10 -05:00
jackiettran
3442e880d8 conversations, unread count, autoscrolling to recent messages, cursor in text bar 2025-11-09 22:16:26 -05:00
jackiettran
7a5bff8f2b real time messaging 2025-11-08 18:20:02 -05:00
jackiettran
de32b68ec4 10% community upkeep fee with explanations throughout 2025-11-07 15:51:32 -05:00
jackiettran
e20e33a0f6 updated links 2025-11-07 13:34:24 -05:00
jackiettran
066ad4a3fe moved private information, test fixes 2025-11-06 17:56:12 -05:00
jackiettran
2ee4b5c389 fixed map tests 2025-11-06 16:56:17 -05:00
jackiettran
2956b79f34 updated test 2025-11-06 16:28:35 -05:00
jackiettran
3dca6c803a pricing tiers 2025-11-06 15:54:27 -05:00
jackiettran
9c258177ae home page styling changes 2025-11-04 17:17:44 -05:00
jackiettran
6ec7858bbd added spacing 2025-11-03 15:41:37 -05:00
jackiettran
6d0beccea0 refactor mylistings and my rentals 2025-11-01 22:33:59 -04:00
jackiettran
16272ba373 feedback tab 2025-10-31 16:48:18 -04:00
jackiettran
99aa0b3bdc badge when owner gets pending rental request 2025-10-31 12:18:40 -04:00
jackiettran
71ce2c63fb alpha testing feature flag 2025-10-30 16:16:27 -04:00
jackiettran
ee3a6fd8e1 alpha 2025-10-30 15:38:57 -04:00
jackiettran
d1cb857aa7 payment confirmation for renter after rental request approval, first listing celebration email, removed burstprotection for google places autocomplete, renamed email templates 2025-10-28 22:23:41 -04:00
jackiettran
502d84a741 emails for rental cancelation, rental declined, rental request confirmation, payout received 2025-10-27 13:07:02 -04:00
jackiettran
407c69aa22 rental request email to owner 2025-10-15 15:19:23 -04:00
jackiettran
b9e6cfc54d password reset 2025-10-10 22:54:45 -04:00
jackiettran
462dbf6b7a no separate login/register page, use modal throughout 2025-10-10 15:26:07 -04:00
jackiettran
0a9b875a9d email verfication after account creation, password component, added password special characters 2025-10-10 14:36:09 -04:00
jackiettran
513347e8b7 rental confirmation looks less like spam 2025-10-09 15:13:40 -04:00
jackiettran
34c0ad2920 fixed cors bug, separating rental confirmation for owner and renter, removing condition checks from my-listings 2025-10-08 23:03:28 -04:00
jackiettran
052781a0e6 google sign in with oauth 2.0. no more console errors or warnings 2025-10-08 12:46:25 -04:00
jackiettran
299522b3a6 no more 401 error for publicly browsing user 2025-10-07 11:43:05 -04:00
jackiettran
9a9e96d007 tests 2025-10-06 16:05:29 -04:00
jackiettran
5c3d505988 email plus return item statuses 2025-10-06 15:41:48 -04:00
jackiettran
67cc997ddc Skip payment process if item is free to borrow 2025-09-22 22:02:08 -04:00
jackiettran
3e76769a3e backend logging 2025-09-22 18:38:51 -04:00
jackiettran
6199609a4d removed old beta 2025-09-19 22:33:44 -04:00
jackiettran
649289bf90 backend unit tests 2025-09-19 19:46:41 -04:00
jackiettran
cf6dd9be90 more secure token handling 2025-09-17 18:37:07 -04:00
jackiettran
a9fa579b6d protect against sql injection, xss, csrf 2025-09-16 12:27:15 -04:00
jackiettran
ce0b7bd0cc google sign in 2025-09-15 12:38:18 -04:00
jackiettran
688f5ac8d6 map search 2025-09-10 16:41:05 -04:00
jackiettran
1d7db138df Google maps integration 2025-09-09 22:49:55 -04:00
jackiettran
69bf64fe70 review bug 2025-09-04 22:02:49 -04:00
433 changed files with 151406 additions and 23017 deletions

13
.gitignore vendored
View File

@@ -17,13 +17,17 @@ node_modules/
.env.development.local
.env.test.local
.env.production.local
.env.dev
.mcp.json
.claude
# Logs
npm-debug.log*
yarn-debug.log*
yarn-error.log*
lerna-debug.log*
*.log
logs/
# Editor directories and files
.idea
@@ -43,6 +47,7 @@ backend/.env.qa
backend/.env.prod
backend/dist
backend/logs
backend/coverage
# Frontend specific
frontend/node_modules
@@ -60,4 +65,10 @@ frontend/.env.local
# Uploads
uploads/
temp/
temp/
# Infrastructure CDK
infrastructure/cdk/dist/
infrastructure/cdk/cdk.out/
infrastructure/cdk/*.js
infrastructure/cdk/*.d.ts

113
README.md
View File

@@ -1,112 +1 @@
# Rentall App
A full-stack marketplace application for renting items, built with React and Node.js.
## Features
- **User Authentication**: Secure JWT-based authentication
- **Item Listings**: Create, edit, and manage rental items
- **Smart Search**: Browse and filter available items
- **Availability Calendar**: Visual calendar for managing item availability
- **Rental Requests**: Accept or reject rental requests with custom reasons
- **Delivery Options**: Support for pickup, delivery, and in-place use
- **User Profiles**: Manage profile information and view rental statistics
- **Responsive Design**: Mobile-friendly interface with Bootstrap
## Tech Stack
### Frontend
- React with TypeScript
- React Router for navigation
- Bootstrap for styling
- Axios for API calls
- Google Places API for address autocomplete
### Backend
- Node.js with Express
- SQLite database with Sequelize ORM
- JWT for authentication
- Bcrypt for password hashing
## Getting Started
### Prerequisites
- Node.js (v14 or higher)
- npm or yarn
### Installation
1. Clone the repository
```bash
git clone https://github.com/YOUR_USERNAME/rentall-app.git
cd rentall-app
```
2. Install backend dependencies
```bash
cd backend
npm install
```
3. Set up backend environment variables
Create a `.env` file in the backend directory:
```
JWT_SECRET=your_jwt_secret_here
PORT=5001
```
4. Install frontend dependencies
```bash
cd ../frontend
npm install
```
5. Set up frontend environment variables
Create a `.env` file in the frontend directory:
```
REACT_APP_API_URL=http://localhost:5001
REACT_APP_GOOGLE_MAPS_API_KEY=your_google_maps_api_key
```
### Running the Application
1. Start the backend server
```bash
cd backend
npm start
```
2. In a new terminal, start the frontend
```bash
cd frontend
npm start
```
The application will be available at `http://localhost:3000`
## Key Features Explained
### Item Management
- Create listings with multiple images, pricing options, and delivery methods
- Set availability using an intuitive calendar interface
- Manage rental rules and requirements
### Rental Process
- Browse available items with search and filter options
- Select rental dates with calendar interface
- Secure payment information collection
- Real-time rental request notifications
### User Dashboard
- View and manage your listings
- Track rental requests and accepted rentals
- Monitor rental statistics
- Update profile information
## Contributing
Feel free to submit issues and enhancement requests!
## License
This project is open source and available under the MIT License.
# Village Share

1
backend/.gitignore vendored
View File

@@ -1,6 +1,5 @@
node_modules/
.env
.env.*
uploads/
*.log
.DS_Store

7
backend/.sequelizerc Normal file
View File

@@ -0,0 +1,7 @@
const path = require('path');
module.exports = {
'config': path.resolve('config', 'database.js'),
'migrations-path': path.resolve('migrations'),
'models-path': path.resolve('models'),
};

5
backend/babel.config.js Normal file
View File

@@ -0,0 +1,5 @@
module.exports = {
presets: [
['@babel/preset-env', { targets: { node: 'current' } }]
]
};

36
backend/config/aws.js Normal file
View File

@@ -0,0 +1,36 @@
const { fromIni } = require("@aws-sdk/credential-providers");
/**
* Get AWS configuration based on environment
* - Development: Uses AWS credential profiles from ~/.aws/credentials
* - Production: Uses IAM roles (EC2/Lambda/ECS instance roles)
*/
function getAWSCredentials() {
if (process.env.NODE_ENV === "dev") {
// Local development: use profile from ~/.aws/credentials
const profile = process.env.AWS_PROFILE;
return fromIni({ profile });
}
}
/**
* Get complete AWS client configuration
*/
function getAWSConfig() {
const config = {
region: process.env.AWS_REGION,
};
const credentials = getAWSCredentials();
if (credentials) {
config.credentials = credentials;
}
return config;
}
module.exports = {
getAWSConfig,
getAWSCredentials,
};

View File

@@ -1,21 +1,61 @@
const { Sequelize } = require('sequelize');
const { Sequelize } = require("sequelize");
const sequelize = new Sequelize(
process.env.DB_NAME,
process.env.DB_USER,
process.env.DB_PASSWORD,
{
host: process.env.DB_HOST,
port: process.env.DB_PORT,
dialect: 'postgres',
logging: false,
pool: {
max: 5,
min: 0,
acquire: 30000,
idle: 10000
}
// Load environment variables based on NODE_ENV
// This ensures variables are available for both CLI and programmatic usage
if (!process.env.DB_NAME && process.env.NODE_ENV) {
const dotenv = require("dotenv");
const envFile = `.env.${process.env.NODE_ENV}`;
const result = dotenv.config({ path: envFile });
if (result.error && process.env.NODE_ENV !== "production") {
console.warn(
`Warning: Could not load ${envFile}, using existing environment variables`,
);
}
}
// Database configuration object
// Used by both Sequelize CLI and programmatic initialization
const dbConfig = {
username: process.env.DB_USER,
password: process.env.DB_PASSWORD,
database: process.env.DB_NAME,
host: process.env.DB_HOST,
port: process.env.DB_PORT,
dialect: "postgres",
logging: false,
pool: {
max: 5,
min: 0,
acquire: 10000,
idle: 10000,
},
};
// Configuration for Sequelize CLI (supports multiple environments)
// All environments use the same configuration from environment variables
const cliConfig = {
dev: dbConfig,
test: dbConfig,
qa: dbConfig,
production: dbConfig,
prod: dbConfig,
};
// Create Sequelize instance for programmatic use
const sequelize = new Sequelize(
dbConfig.database,
dbConfig.username,
dbConfig.password,
{
host: dbConfig.host,
port: dbConfig.port,
dialect: dbConfig.dialect,
logging: dbConfig.logging,
pool: dbConfig.pool,
},
);
module.exports = sequelize;
// Export the sequelize instance as default (for backward compatibility)
// Also export all environment configs for Sequelize CLI
module.exports = sequelize;
Object.assign(module.exports, cliConfig);

View File

@@ -0,0 +1,14 @@
/**
* Image upload limits configuration
* Keep in sync with frontend/src/config/imageLimits.ts
*/
const IMAGE_LIMITS = {
items: 10,
forum: 10,
conditionChecks: 10,
damageReports: 10,
profile: 1,
messages: 1,
};
module.exports = { IMAGE_LIMITS };

44
backend/jest.config.js Normal file
View File

@@ -0,0 +1,44 @@
module.exports = {
projects: [
{
displayName: 'unit',
testEnvironment: 'node',
testMatch: ['**/tests/unit/**/*.test.js'],
setupFilesAfterEnv: ['<rootDir>/tests/setup.js'],
testTimeout: 10000,
transformIgnorePatterns: [
'node_modules/(?!(@scure|@otplib|otplib|@noble)/)'
],
},
{
displayName: 'integration',
testEnvironment: 'node',
testMatch: ['**/tests/integration/**/*.test.js'],
setupFilesAfterEnv: ['<rootDir>/tests/integration-setup.js'],
testTimeout: 30000,
transformIgnorePatterns: [
'node_modules/(?!(@scure|@otplib|otplib|@noble)/)'
],
},
],
// Run tests sequentially to avoid module cache conflicts between unit and integration tests
maxWorkers: 1,
coverageDirectory: 'coverage',
collectCoverageFrom: [
'**/*.js',
'!**/node_modules/**',
'!**/coverage/**',
'!**/tests/**',
'!**/migrations/**',
'!**/scripts/**',
'!jest.config.js',
'!babel.config.js',
],
coverageReporters: ['text', 'lcov', 'html'],
coverageThreshold: {
global: {
lines: 80,
statements: 80
}
}
};

View File

@@ -1,90 +0,0 @@
const cron = require("node-cron");
const PayoutService = require("../services/payoutService");
const paymentsSchedule = "0 * * * *"; // Run every hour at minute 0
const retrySchedule = "0 7 * * *"; // Retry failed payouts once daily at 7 AM
class PayoutProcessor {
static startScheduledPayouts() {
console.log("Starting automated payout processor...");
const payoutJob = cron.schedule(
paymentsSchedule,
async () => {
console.log("Running scheduled payout processing...");
try {
const results = await PayoutService.processAllEligiblePayouts();
if (results.totalProcessed > 0) {
console.log(
`Payout batch completed: ${results.successful.length} successful, ${results.failed.length} failed`
);
// Log any failures for monitoring
if (results.failed.length > 0) {
console.warn("Failed payouts:", results.failed);
}
}
} catch (error) {
console.error("Error in scheduled payout processing:", error);
}
},
{
scheduled: false,
timezone: "America/New_York",
}
);
const retryJob = cron.schedule(
retrySchedule,
async () => {
console.log("Running failed payout retry process...");
try {
const results = await PayoutService.retryFailedPayouts();
if (results.totalProcessed > 0) {
console.log(
`Retry batch completed: ${results.successful.length} successful, ${results.failed.length} still failed`
);
}
} catch (error) {
console.error("Error in retry payout processing:", error);
}
},
{
scheduled: false,
timezone: "America/New_York",
}
);
// Start the jobs
payoutJob.start();
retryJob.start();
console.log("Payout processor jobs scheduled:");
console.log("- Hourly payout processing: " + paymentsSchedule);
console.log("- Daily retry processing: " + retrySchedule);
return {
payoutJob,
retryJob,
stop() {
payoutJob.stop();
retryJob.stop();
console.log("Payout processor jobs stopped");
},
getStatus() {
return {
payoutJobRunning: payoutJob.getStatus() === "scheduled",
retryJobRunning: retryJob.getStatus() === "scheduled",
};
},
};
}
}
module.exports = PayoutProcessor;

View File

@@ -0,0 +1,64 @@
const { AlphaInvitation } = require("../models");
const logger = require("../utils/logger");
/**
* Middleware to require alpha access for protected routes
* Checks for valid alpha cookie or registered user with invitation
*/
const requireAlphaAccess = async (req, res, next) => {
try {
// Bypass alpha access check if feature is disabled
if (process.env.ALPHA_TESTING_ENABLED !== 'true') {
return next();
}
let hasAccess = false;
// Check 1: Valid alpha access cookie
if (req.cookies && req.cookies.alphaAccessCode) {
const { code } = req.cookies.alphaAccessCode;
if (code) {
const invitation = await AlphaInvitation.findOne({
where: { code, status: ["pending", "active"] },
});
if (invitation) {
hasAccess = true;
}
}
}
// Check 2: Authenticated user who has used an invitation
if (!hasAccess && req.user && req.user.id) {
const invitation = await AlphaInvitation.findOne({
where: { usedBy: req.user.id },
});
if (invitation) {
hasAccess = true;
}
}
if (!hasAccess) {
logger.warn(
`Alpha access denied for request to ${req.path}`,
{
ip: req.ip,
userId: req.user?.id,
}
);
return res.status(403).json({
error: "Alpha access required",
code: "ALPHA_ACCESS_REQUIRED",
});
}
// Access granted
next();
} catch (error) {
logger.error(`Error checking alpha access: ${error.message}`, { error });
res.status(500).json({
error: "Server error",
});
}
};
module.exports = { requireAlphaAccess };

View File

@@ -0,0 +1,84 @@
const logger = require('../utils/logger');
const apiLogger = (req, res, next) => {
const startTime = Date.now();
const reqLogger = logger.withRequestId(req.id);
const requestData = {
method: req.method,
url: req.url,
userAgent: req.get('User-Agent'),
ip: req.ip,
userId: req.user?.id || 'anonymous',
body: logger.sanitize(req.body),
params: req.params,
query: req.query,
headers: {
'content-type': req.get('Content-Type'),
'content-length': req.get('Content-Length'),
'referer': req.get('Referer'),
}
};
reqLogger.info('API Request', requestData);
const originalSend = res.send;
res.send = function(body) {
const endTime = Date.now();
const responseTime = endTime - startTime;
const responseData = {
statusCode: res.statusCode,
responseTime: `${responseTime}ms`,
contentLength: res.get('Content-Length') || (body ? body.length : 0),
method: req.method,
url: req.url,
userId: req.user?.id || 'anonymous'
};
// Parse response body for error responses to include error details
if (res.statusCode >= 400) {
let errorDetails = null;
if (body) {
try {
const parsed = typeof body === 'string' ? JSON.parse(body) : body;
// Extract error message, validation errors, or full response
errorDetails = {
error: parsed.error || parsed.message || null,
errors: parsed.errors || null, // validation errors array
details: parsed.details || null
};
// Remove null values
Object.keys(errorDetails).forEach(key => {
if (errorDetails[key] === null) delete errorDetails[key];
});
if (Object.keys(errorDetails).length > 0) {
responseData.errorDetails = errorDetails;
}
} catch (e) {
// Body is not JSON, include as string (truncated)
if (typeof body === 'string' && body.length > 0) {
responseData.errorDetails = { raw: body.substring(0, 500) };
}
}
}
}
if (res.statusCode >= 400 && res.statusCode < 500) {
// Don't log 401s for /users/profile - these are expected auth checks
if (!(res.statusCode === 401 && req.url === '/profile')) {
reqLogger.warn('API Response - Client Error', responseData);
}
} else if (res.statusCode >= 500) {
reqLogger.error('API Response - Server Error', responseData);
} else {
reqLogger.info('API Response - Success', responseData);
}
return originalSend.call(this, body);
};
next();
};
module.exports = apiLogger;

View File

@@ -1,34 +1,163 @@
const jwt = require("jsonwebtoken");
const { User } = require("../models"); // Import from models/index.js to get models with associations
const logger = require("../utils/logger");
const authenticateToken = async (req, res, next) => {
const authHeader = req.headers["authorization"];
const token = authHeader && authHeader.split(" ")[1];
// First try to get token from cookie
let token = req.cookies?.accessToken;
if (!token) {
return res.status(401).json({ error: "Access token required" });
return res.status(401).json({
error: "Access token required",
code: "NO_TOKEN",
});
}
try {
const decoded = jwt.verify(token, process.env.JWT_SECRET);
const decoded = jwt.verify(token, process.env.JWT_ACCESS_SECRET);
const userId = decoded.id;
if (!userId) {
return res.status(401).json({ error: "Invalid token format" });
return res.status(401).json({
error: "Invalid token format",
code: "INVALID_TOKEN_FORMAT",
});
}
const user = await User.findByPk(userId);
if (!user) {
return res.status(401).json({ error: "User not found" });
return res.status(401).json({
error: "User not found",
code: "USER_NOT_FOUND",
});
}
// Check if user is banned
if (user.isBanned) {
return res.status(403).json({
error: "Your account has been suspended. Please contact support for more information.",
code: "USER_BANNED",
});
}
// Validate JWT version to invalidate old tokens after password change
if (decoded.jwtVersion !== user.jwtVersion) {
return res.status(401).json({
error: "Session expired due to password change. Please log in again.",
code: "JWT_VERSION_MISMATCH",
});
}
req.user = user;
next();
} catch (error) {
console.error("Auth middleware error:", error);
return res.status(403).json({ error: "Invalid or expired token" });
// Check if token is expired
if (error.name === "TokenExpiredError") {
return res.status(401).json({
error: "Token expired",
code: "TOKEN_EXPIRED",
});
}
const reqLogger = logger.withRequestId(req.id);
reqLogger.error("Auth middleware error", {
error: error.message,
stack: error.stack,
tokenPresent: !!token,
userId: req.user?.id
});
return res.status(403).json({
error: "Invalid token",
code: "INVALID_TOKEN",
});
}
};
module.exports = { authenticateToken };
// Optional authentication - doesn't return 401 if no token, just continues
const optionalAuth = async (req, res, next) => {
// Try to get token from cookie
let token = req.cookies?.accessToken;
if (!token) {
// No token is fine for optional auth, just continue
req.user = null;
return next();
}
try {
const decoded = jwt.verify(token, process.env.JWT_ACCESS_SECRET);
const userId = decoded.id;
if (!userId) {
req.user = null;
return next();
}
const user = await User.findByPk(userId);
if (!user) {
req.user = null;
return next();
}
// Banned users are treated as unauthenticated for optional auth
if (user.isBanned) {
req.user = null;
return next();
}
// Validate JWT version to invalidate old tokens after password change
if (decoded.jwtVersion !== user.jwtVersion) {
req.user = null;
return next();
}
req.user = user;
next();
} catch (error) {
// Token invalid/expired is fine for optional auth
req.user = null;
next();
}
};
// Require verified email middleware - must be used after authenticateToken
const requireVerifiedEmail = (req, res, next) => {
if (!req.user) {
return res.status(401).json({
error: "Authentication required",
code: "NO_AUTH",
});
}
if (!req.user.isVerified) {
return res.status(403).json({
error: "Email verification required. Please verify your email address to perform this action.",
code: "EMAIL_NOT_VERIFIED",
});
}
next();
};
// Require admin role middleware - must be used after authenticateToken
const requireAdmin = (req, res, next) => {
if (!req.user) {
return res.status(401).json({
error: "Authentication required",
code: "NO_AUTH",
});
}
if (req.user.role !== "admin") {
return res.status(403).json({
error: "Admin access required",
code: "INSUFFICIENT_PERMISSIONS",
});
}
next();
};
module.exports = { authenticateToken, optionalAuth, requireVerifiedEmail, requireAdmin };

View File

@@ -1,21 +0,0 @@
const verifyBetaPassword = (req, res, next) => {
const betaPassword = req.headers['x-beta-password'];
const configuredPassword = process.env.BETA_PASSWORD;
if (!configuredPassword) {
console.error('BETA_PASSWORD environment variable is not set');
return res.status(500).json({ error: 'Beta password not configured on server' });
}
if (!betaPassword) {
return res.status(401).json({ error: 'Beta password required' });
}
if (betaPassword !== configuredPassword) {
return res.status(403).json({ error: 'Invalid beta password' });
}
next();
};
module.exports = { verifyBetaPassword };

View File

@@ -0,0 +1,96 @@
const csrf = require("csrf");
const cookieParser = require("cookie-parser");
const logger = require("../utils/logger");
// Initialize CSRF token generator
const tokens = new csrf();
// Use persistent secret from environment variable to prevent token invalidation on restart
const secret = process.env.CSRF_SECRET;
if (!secret) {
const errorMsg = "CSRF_SECRET environment variable is required.";
logger.error(errorMsg);
throw new Error(errorMsg);
}
if (secret.length < 32) {
const errorMsg = "CSRF_SECRET must be at least 32 characters for security";
logger.error(errorMsg);
throw new Error(errorMsg);
}
// CSRF middleware using double submit cookie pattern
const csrfProtection = (req, res, next) => {
// Skip CSRF for safe methods
if (["GET", "HEAD", "OPTIONS"].includes(req.method)) {
return next();
}
// Get token from header or body
const token = req.headers["x-csrf-token"] || req.body.csrfToken;
// Get token from cookie
const cookieToken = req.cookies && req.cookies["csrf-token"];
// Verify both tokens exist and match
if (!token || !cookieToken || token !== cookieToken) {
return res.status(403).json({
error: "Invalid CSRF token",
code: "CSRF_TOKEN_MISMATCH",
});
}
// Verify token is valid
if (!tokens.verify(secret, token)) {
return res.status(403).json({
error: "Invalid CSRF token",
code: "CSRF_TOKEN_INVALID",
});
}
next();
};
// Middleware to generate and send CSRF token
const generateCSRFToken = (req, res, next) => {
const token = tokens.create(secret);
// Set token in cookie (httpOnly for security)
res.cookie("csrf-token", token, {
httpOnly: true,
secure: ["production", "prod", "qa"].includes(process.env.NODE_ENV),
sameSite: "strict",
maxAge: 60 * 60 * 1000, // 1 hour
});
// Also provide token in header for client to use
res.set("X-CSRF-Token", token);
// Make token available to response
res.locals.csrfToken = token;
next();
};
// Route to get CSRF token (for initial page loads)
const getCSRFToken = (req, res) => {
const token = tokens.create(secret);
res.cookie("csrf-token", token, {
httpOnly: true,
secure: ["production", "prod", "qa"].includes(process.env.NODE_ENV),
sameSite: "strict",
maxAge: 60 * 60 * 1000,
});
res.set("X-CSRF-Token", token);
res.status(204).send();
};
module.exports = {
csrfProtection,
generateCSRFToken,
getCSRFToken,
cookieParser: cookieParser(),
};

View File

@@ -0,0 +1,33 @@
const logger = require('../utils/logger');
const errorLogger = (err, req, res, next) => {
// Create a request-specific logger with request ID
const reqLogger = logger.withRequestId(req.id);
// Log the error with context
const errorContext = {
method: req.method,
url: req.url,
userAgent: req.get('User-Agent'),
ip: req.ip,
userId: req.user?.id || 'anonymous',
body: logger.sanitize(req.body),
params: req.params,
query: req.query,
statusCode: err.statusCode || 500,
stack: err.stack,
};
if (err.statusCode && err.statusCode < 500) {
// Client errors (4xx)
reqLogger.warn(`Client error: ${err.message}`, errorContext);
} else {
// Server errors (5xx)
reqLogger.error(`Server error: ${err.message}`, errorContext);
}
// Pass error to next middleware
next(err);
};
module.exports = errorLogger;

View File

@@ -0,0 +1,292 @@
const rateLimit = require("express-rate-limit");
const logger = require("../utils/logger");
// General rate limiter for Maps API endpoints
const createMapsRateLimiter = (windowMs, max, message) => {
return rateLimit({
windowMs, // time window in milliseconds
max, // limit each IP/user to max requests per windowMs
message: {
error: message,
retryAfter: Math.ceil(windowMs / 1000), // seconds
},
standardHeaders: true, // Return rate limit info in the `RateLimit-*` headers
legacyHeaders: false, // Disable the `X-RateLimit-*` headers
// Use user ID if available, otherwise fall back to IPv6-safe IP handling
keyGenerator: (req) => {
if (req.user?.id) {
return `user:${req.user.id}`;
}
// Use the built-in IP key generator which properly handles IPv6
return rateLimit.defaultKeyGenerator(req);
},
});
};
// Specific rate limiters for different endpoints
const rateLimiters = {
// Places Autocomplete - allow more requests since users type frequently
placesAutocomplete: createMapsRateLimiter(
60 * 1000, // 1 minute window
30, // 30 requests per minute per user/IP
"Too many autocomplete requests. Please slow down."
),
// Place Details - moderate limit since each selection triggers this
placeDetails: createMapsRateLimiter(
60 * 1000, // 1 minute window
20, // 20 requests per minute per user/IP
"Too many place detail requests. Please slow down."
),
// Geocoding - lower limit since this is typically used less frequently
geocoding: createMapsRateLimiter(
60 * 1000, // 1 minute window
10, // 10 requests per minute per user/IP
"Too many geocoding requests. Please slow down."
),
};
// Enhanced rate limiter with user-specific limits
const createUserBasedRateLimiter = (windowMs, max, message) => {
const store = new Map(); // Simple in-memory store (use Redis in production)
return (req, res, next) => {
const key = req.user?.id
? `user:${req.user.id}`
: rateLimit.defaultKeyGenerator(req);
const now = Date.now();
const windowStart = now - windowMs;
// Clean up old entries
for (const [k, data] of store.entries()) {
if (data.windowStart < windowStart) {
store.delete(k);
}
}
// Get or create user's request data
let userData = store.get(key);
if (!userData || userData.windowStart < windowStart) {
userData = {
count: 0,
windowStart: now,
resetTime: now + windowMs,
};
}
// Check if limit exceeded
if (userData.count >= max) {
return res.status(429).json({
error: message,
retryAfter: Math.ceil((userData.resetTime - now) / 1000),
});
}
// Increment counter and store
userData.count++;
store.set(key, userData);
// Add headers
res.set({
"RateLimit-Limit": max,
"RateLimit-Remaining": Math.max(0, max - userData.count),
"RateLimit-Reset": new Date(userData.resetTime).toISOString(),
});
next();
};
};
// Burst protection for expensive operations
const burstProtection = createUserBasedRateLimiter(
10 * 1000, // 10 seconds
5, // 5 requests per 10 seconds
"Too many requests in a short period. Please slow down."
);
// Upload presign rate limiter - 30 requests per minute
const uploadPresignLimiter = createUserBasedRateLimiter(
60 * 1000, // 1 minute window
30, // 30 presign requests per minute per user
"Too many upload requests. Please slow down."
);
// Helper to create a rate limit handler that logs the event
const createRateLimitHandler = (limiterName) => (req, res, next, options) => {
const reqLogger = logger.withRequestId(req.id);
reqLogger.warn('Rate limit exceeded', {
limiter: limiterName,
ip: req.ip,
userId: req.user?.id || 'anonymous',
method: req.method,
url: req.url,
userAgent: req.get('User-Agent'),
message: options.message?.error || 'Rate limit exceeded'
});
res.status(options.statusCode).json(options.message);
};
// Authentication rate limiters
const authRateLimiters = {
// Login rate limiter - stricter to prevent brute force
login: rateLimit({
windowMs: 15 * 60 * 1000, // 15 minutes
max: 5, // 5 login attempts per 15 minutes
message: {
error: "Too many login attempts. Please try again in 15 minutes.",
retryAfter: 900, // seconds
},
standardHeaders: true,
legacyHeaders: false,
skipSuccessfulRequests: true, // Don't count successful logins
handler: createRateLimitHandler('login'),
}),
// Registration rate limiter
register: rateLimit({
windowMs: 60 * 60 * 1000, // 1 hour
max: 3, // 3 registration attempts per hour
message: {
error: "Too many registration attempts. Please try again later.",
retryAfter: 3600,
},
standardHeaders: true,
legacyHeaders: false,
handler: createRateLimitHandler('register'),
}),
// Password reset rate limiter
passwordReset: rateLimit({
windowMs: 60 * 60 * 1000, // 1 hour
max: 3, // 3 password reset requests per hour
message: {
error: "Too many password reset requests. Please try again later.",
retryAfter: 3600,
},
standardHeaders: true,
legacyHeaders: false,
handler: createRateLimitHandler('passwordReset'),
}),
// Alpha code validation rate limiter
alphaCodeValidation: rateLimit({
windowMs: 15 * 60 * 1000, // 15 minutes
max: 5, // 5 code validation attempts per 15 minutes
message: {
error: "Too many attempts. Please try again later.",
retryAfter: 900,
},
standardHeaders: true,
legacyHeaders: false,
handler: createRateLimitHandler('alphaCodeValidation'),
}),
// Email verification rate limiter - protect against brute force on 6-digit codes
emailVerification: rateLimit({
windowMs: 15 * 60 * 1000, // 15 minutes
max: 10, // 10 verification attempts per 15 minutes per IP
message: {
error: "Too many verification attempts. Please try again later.",
retryAfter: 900,
},
standardHeaders: true,
legacyHeaders: false,
handler: createRateLimitHandler('emailVerification'),
}),
// General API rate limiter
general: rateLimit({
windowMs: 60 * 1000, // 1 minute
max: 100, // 100 requests per minute
message: {
error: "Too many requests. Please slow down.",
retryAfter: 60,
},
standardHeaders: true,
legacyHeaders: false,
handler: createRateLimitHandler('general'),
}),
// Two-Factor Authentication rate limiters
twoFactorVerification: rateLimit({
windowMs: 15 * 60 * 1000, // 15 minutes
max: 10, // 10 verification attempts per 15 minutes
message: {
error: "Too many verification attempts. Please try again later.",
retryAfter: 900,
},
standardHeaders: true,
legacyHeaders: false,
skipSuccessfulRequests: true,
handler: createRateLimitHandler('twoFactorVerification'),
}),
twoFactorSetup: rateLimit({
windowMs: 60 * 60 * 1000, // 1 hour
max: 5, // 5 setup attempts per hour
message: {
error: "Too many setup attempts. Please try again later.",
retryAfter: 3600,
},
standardHeaders: true,
legacyHeaders: false,
handler: createRateLimitHandler('twoFactorSetup'),
}),
recoveryCode: rateLimit({
windowMs: 15 * 60 * 1000, // 15 minutes
max: 3, // 3 recovery code attempts per 15 minutes
message: {
error: "Too many recovery code attempts. Please try again later.",
retryAfter: 900,
},
standardHeaders: true,
legacyHeaders: false,
skipSuccessfulRequests: false, // Count all attempts for security
handler: createRateLimitHandler('recoveryCode'),
}),
emailOtpSend: rateLimit({
windowMs: 10 * 60 * 1000, // 10 minutes
max: 2, // 2 OTP sends per 10 minutes
message: {
error: "Please wait before requesting another code.",
retryAfter: 600,
},
standardHeaders: true,
legacyHeaders: false,
handler: createRateLimitHandler('emailOtpSend'),
}),
};
module.exports = {
// Individual rate limiters
placesAutocomplete: rateLimiters.placesAutocomplete,
placeDetails: rateLimiters.placeDetails,
geocoding: rateLimiters.geocoding,
// Auth rate limiters
loginLimiter: authRateLimiters.login,
registerLimiter: authRateLimiters.register,
passwordResetLimiter: authRateLimiters.passwordReset,
alphaCodeValidationLimiter: authRateLimiters.alphaCodeValidation,
emailVerificationLimiter: authRateLimiters.emailVerification,
generalLimiter: authRateLimiters.general,
// Two-Factor Authentication rate limiters
twoFactorVerificationLimiter: authRateLimiters.twoFactorVerification,
twoFactorSetupLimiter: authRateLimiters.twoFactorSetup,
recoveryCodeLimiter: authRateLimiters.recoveryCode,
emailOtpSendLimiter: authRateLimiters.emailOtpSend,
// Burst protection
burstProtection,
// Upload rate limiter
uploadPresignLimiter,
// Utility functions
createMapsRateLimiter,
createUserBasedRateLimiter,
};

View File

@@ -0,0 +1,133 @@
const logger = require('../utils/logger');
// HTTPS enforcement middleware
const enforceHTTPS = (req, res, next) => {
// Skip HTTPS enforcement in development
if (
process.env.NODE_ENV === "dev" ||
process.env.NODE_ENV === "development"
) {
return next();
}
// Check if request is already HTTPS
const isSecure =
req.secure ||
req.headers["x-forwarded-proto"] === "https" ||
req.protocol === "https";
if (!isSecure) {
// Use configured allowed host to prevent Host Header Injection
const allowedHost = process.env.FRONTEND_URL;
// Log the redirect for monitoring
if (req.headers.host !== allowedHost) {
const reqLogger = logger.withRequestId(req.id);
reqLogger.warn("Host header mismatch during HTTPS redirect", {
requestHost: req.headers.host,
allowedHost,
ip: req.ip,
url: req.url,
eventType: 'SECURITY_HOST_MISMATCH'
});
}
// Redirect to HTTPS with validated host
return res.redirect(301, `https://${allowedHost}${req.url}`);
}
// Set Strict-Transport-Security header
res.setHeader(
"Strict-Transport-Security",
"max-age=31536000; includeSubDomains; preload"
);
next();
};
// Security headers middleware
const securityHeaders = (req, res, next) => {
// X-Content-Type-Options
res.setHeader("X-Content-Type-Options", "nosniff");
// X-Frame-Options
res.setHeader("X-Frame-Options", "DENY");
// Referrer-Policy
res.setHeader("Referrer-Policy", "strict-origin-when-cross-origin");
// Permissions-Policy (formerly Feature-Policy)
res.setHeader(
"Permissions-Policy",
"camera=(), microphone=(), geolocation=(self)"
);
next();
};
// Request ID middleware for tracking
const requestId = require("crypto");
const addRequestId = (req, res, next) => {
req.id = requestId.randomBytes(16).toString("hex");
res.setHeader("X-Request-ID", req.id);
next();
};
// Log security events
const logSecurityEvent = (eventType, details, req) => {
const reqLogger = logger.withRequestId(req.id || "unknown");
const logEntry = {
eventType,
ip: req.ip || req.connection.remoteAddress,
userAgent: req.get("user-agent"),
userId: req.user?.id || "anonymous",
...details,
};
reqLogger.warn(`Security event: ${eventType}`, logEntry);
};
// Sanitize error messages to prevent information leakage
const sanitizeError = (err, req, res, next) => {
// Send sanitized error to client
const isDevelopment =
process.env.NODE_ENV === "dev" || process.env.NODE_ENV === "development";
if (err.status === 400) {
// Client errors can be more specific
return res.status(400).json({
error: err.message || "Bad Request",
requestId: req.id,
});
} else if (err.status === 401) {
return res.status(401).json({
error: "Unauthorized",
requestId: req.id,
});
} else if (err.status === 403) {
return res.status(403).json({
error: "Forbidden",
requestId: req.id,
});
} else if (err.status === 404) {
return res.status(404).json({
error: "Not Found",
requestId: req.id,
});
} else {
// Server errors should be generic in production
return res.status(err.status || 500).json({
error: isDevelopment ? err.message : "Internal Server Error",
requestId: req.id,
...(isDevelopment && { stack: err.stack }),
});
}
};
module.exports = {
enforceHTTPS,
securityHeaders,
addRequestId,
logSecurityEvent,
sanitizeError,
};

View File

@@ -0,0 +1,73 @@
const TwoFactorService = require("../services/TwoFactorService");
const logger = require("../utils/logger");
/**
* Middleware to require step-up authentication for sensitive actions.
* Only applies to users who have 2FA enabled.
*
* @param {string} action - The sensitive action being protected
* @returns {Function} Express middleware function
*/
const requireStepUpAuth = (action) => {
return async (req, res, next) => {
try {
// If user doesn't have 2FA enabled, skip step-up requirement
if (!req.user.twoFactorEnabled) {
return next();
}
// Check if user has a valid step-up session (within 5 minutes)
const isValid = TwoFactorService.validateStepUpSession(req.user);
if (!isValid) {
logger.info(
`Step-up authentication required for user ${req.user.id}, action: ${action}`
);
return res.status(403).json({
error: "Multi-factor authentication required",
code: "STEP_UP_REQUIRED",
action: action,
methods: getTwoFactorMethods(req.user),
});
}
next();
} catch (error) {
logger.error("Step-up auth middleware error:", error);
return res.status(500).json({
error: "An error occurred during authentication",
});
}
};
};
/**
* Get available 2FA methods for a user
* @param {Object} user - User object
* @returns {string[]} Array of available methods
*/
function getTwoFactorMethods(user) {
const methods = [];
// Primary method is always available
if (user.twoFactorMethod === "totp") {
methods.push("totp");
}
// Email is always available as a backup method
methods.push("email");
// Recovery codes are available if any remain
if (user.recoveryCodesHash) {
const recoveryData = JSON.parse(user.recoveryCodesHash);
const remaining = TwoFactorService.getRemainingRecoveryCodesCount(recoveryData);
if (remaining > 0) {
methods.push("recovery");
}
}
return methods;
}
module.exports = { requireStepUpAuth };

View File

@@ -1,40 +0,0 @@
const multer = require('multer');
const path = require('path');
const { v4: uuidv4 } = require('uuid');
// Configure storage for profile images
const profileImageStorage = multer.diskStorage({
destination: function (req, file, cb) {
cb(null, path.join(__dirname, '../uploads/profiles'));
},
filename: function (req, file, cb) {
// Generate unique filename: uuid + original extension
const uniqueId = uuidv4();
const ext = path.extname(file.originalname);
cb(null, `${uniqueId}${ext}`);
}
});
// File filter to accept only images
const imageFileFilter = (req, file, cb) => {
// Accept images only
const allowedMimes = ['image/jpeg', 'image/jpg', 'image/png', 'image/gif', 'image/webp'];
if (allowedMimes.includes(file.mimetype)) {
cb(null, true);
} else {
cb(new Error('Invalid file type. Only JPEG, PNG, GIF and WebP images are allowed.'), false);
}
};
// Create multer upload middleware for profile images
const uploadProfileImage = multer({
storage: profileImageStorage,
fileFilter: imageFileFilter,
limits: {
fileSize: 5 * 1024 * 1024 // 5MB limit
}
}).single('profileImage');
module.exports = {
uploadProfileImage
};

View File

@@ -0,0 +1,391 @@
const { body, query, validationResult } = require("express-validator");
const DOMPurify = require("dompurify");
const { JSDOM } = require("jsdom");
// Create a DOM purify instance for server-side sanitization
const window = new JSDOM("").window;
const purify = DOMPurify(window);
// Password strength validation
const passwordStrengthRegex =
/^(?=.*\d)(?=.*[a-z])(?=.*[A-Z])(?=.*[a-zA-Z])(?=.*[-@$!%*?&#^]).{8,}$/; //-@$!%*?&#^
const commonPasswords = [
"password",
"123456",
"123456789",
"qwerty",
"abc123",
"password123",
"admin",
"letmein",
"welcome",
"monkey",
"1234567890",
];
// Sanitization middleware
const sanitizeInput = (req, res, next) => {
const sanitizeValue = (value) => {
if (typeof value === "string") {
return purify.sanitize(value, { ALLOWED_TAGS: [] });
}
if (typeof value === "object" && value !== null) {
const sanitized = {};
for (const [key, val] of Object.entries(value)) {
sanitized[key] = sanitizeValue(val);
}
return sanitized;
}
return value;
};
if (req.body) {
req.body = sanitizeValue(req.body);
}
if (req.query) {
req.query = sanitizeValue(req.query);
}
if (req.params) {
req.params = sanitizeValue(req.params);
}
next();
};
// Validation error handler
const handleValidationErrors = (req, res, next) => {
const errors = validationResult(req);
if (!errors.isEmpty()) {
return res.status(400).json({
error: "Validation failed",
details: errors.array().map((err) => ({
field: err.path,
message: err.msg,
})),
});
}
next();
};
// Registration validation rules
const validateRegistration = [
body("email")
.isEmail()
.normalizeEmail()
.withMessage("Please provide a valid email address")
.isLength({ max: 255 })
.withMessage("Email must be less than 255 characters"),
body("password")
.isLength({ min: 8, max: 128 })
.withMessage("Password must be between 8 and 128 characters")
.matches(passwordStrengthRegex)
.withMessage(
"Password does not meet requirements"
)
.custom((value) => {
if (commonPasswords.includes(value.toLowerCase())) {
throw new Error(
"Password is too common. Please choose a stronger password"
);
}
return true;
}),
body("firstName")
.trim()
.isLength({ min: 1, max: 50 })
.withMessage("First name must be between 1 and 50 characters")
.matches(/^[a-zA-Z\s\-']+$/)
.withMessage(
"First name can only contain letters, spaces, hyphens, and apostrophes"
),
body("lastName")
.trim()
.isLength({ min: 1, max: 50 })
.withMessage("Last name must be between 1 and 50 characters")
.matches(/^[a-zA-Z\s\-']+$/)
.withMessage(
"Last name can only contain letters, spaces, hyphens, and apostrophes"
),
body("phone")
.optional()
.isMobilePhone()
.withMessage("Please provide a valid phone number"),
handleValidationErrors,
];
// Login validation rules
const validateLogin = [
body("email")
.isEmail()
.normalizeEmail()
.withMessage("Please provide a valid email address"),
body("password")
.notEmpty()
.withMessage("Password is required")
.isLength({ max: 128 })
.withMessage("Password is too long"),
handleValidationErrors,
];
// Google auth validation
const validateGoogleAuth = [
body("code")
.notEmpty()
.withMessage("Authorization code is required")
.isLength({ max: 512 })
.withMessage("Invalid authorization code format"),
handleValidationErrors,
];
// Profile update validation
const validateProfileUpdate = [
body("firstName")
.optional()
.trim()
.isLength({ min: 1, max: 50 })
.withMessage("First name must be between 1 and 50 characters")
.matches(/^[a-zA-Z\s\-']+$/)
.withMessage(
"First name can only contain letters, spaces, hyphens, and apostrophes"
),
body("lastName")
.optional()
.trim()
.isLength({ min: 1, max: 50 })
.withMessage("Last name must be between 1 and 50 characters")
.matches(/^[a-zA-Z\s\-']+$/)
.withMessage(
"Last name can only contain letters, spaces, hyphens, and apostrophes"
),
body("phone")
.optional()
.isMobilePhone()
.withMessage("Please provide a valid phone number"),
body("address1")
.optional()
.trim()
.isLength({ max: 255 })
.withMessage("Address line 1 must be less than 255 characters"),
body("address2")
.optional()
.trim()
.isLength({ max: 255 })
.withMessage("Address line 2 must be less than 255 characters"),
body("city")
.optional()
.trim()
.isLength({ max: 100 })
.withMessage("City must be less than 100 characters")
.matches(/^[a-zA-Z\s\-']+$/)
.withMessage(
"City can only contain letters, spaces, hyphens, and apostrophes"
),
body("state")
.optional()
.trim()
.isLength({ max: 100 })
.withMessage("State must be less than 100 characters"),
body("zipCode")
.optional()
.trim()
.matches(/^[0-9]{5}(-[0-9]{4})?$/)
.withMessage("Please provide a valid ZIP code"),
body("country")
.optional()
.trim()
.isLength({ max: 100 })
.withMessage("Country must be less than 100 characters"),
handleValidationErrors,
];
// Password change validation
const validatePasswordChange = [
body("currentPassword")
.notEmpty()
.withMessage("Current password is required"),
body("newPassword")
.isLength({ min: 8, max: 128 })
.withMessage("New password must be between 8 and 128 characters")
.matches(passwordStrengthRegex)
.withMessage(
"New password must contain at least one uppercase letter, one lowercase letter, one number, and one special character"
)
.custom((value, { req }) => {
if (value === req.body.currentPassword) {
throw new Error("New password must be different from current password");
}
if (commonPasswords.includes(value.toLowerCase())) {
throw new Error(
"Password is too common. Please choose a stronger password"
);
}
return true;
}),
body("confirmPassword").custom((value, { req }) => {
if (value !== req.body.newPassword) {
throw new Error("Password confirmation does not match");
}
return true;
}),
handleValidationErrors,
];
// Forgot password validation
const validateForgotPassword = [
body("email")
.isEmail()
.normalizeEmail()
.withMessage("Please provide a valid email address")
.isLength({ max: 255 })
.withMessage("Email must be less than 255 characters"),
handleValidationErrors,
];
// Reset password validation
const validateResetPassword = [
body("token")
.notEmpty()
.withMessage("Reset token is required")
.isLength({ min: 64, max: 64 })
.withMessage("Invalid reset token format"),
body("newPassword")
.isLength({ min: 8, max: 128 })
.withMessage("Password must be between 8 and 128 characters")
.matches(passwordStrengthRegex)
.withMessage(
"Password does not meet requirements"
)
.custom((value) => {
if (commonPasswords.includes(value.toLowerCase())) {
throw new Error(
"Password is too common. Please choose a stronger password"
);
}
return true;
}),
handleValidationErrors,
];
// Verify reset token validation
const validateVerifyResetToken = [
body("token")
.notEmpty()
.withMessage("Reset token is required")
.isLength({ min: 64, max: 64 })
.withMessage("Invalid reset token format"),
handleValidationErrors,
];
// Feedback validation
const validateFeedback = [
body("feedbackText")
.trim()
.isLength({ min: 5, max: 5000 })
.withMessage("Feedback must be between 5 and 5000 characters"),
body("url")
.optional()
.trim()
.isLength({ max: 500 })
.withMessage("URL must be less than 500 characters"),
handleValidationErrors,
];
// Coordinate validation for query parameters (e.g., location search)
const validateCoordinatesQuery = [
query("lat")
.optional()
.isFloat({ min: -90, max: 90 })
.withMessage("Latitude must be between -90 and 90"),
query("lng")
.optional()
.isFloat({ min: -180, max: 180 })
.withMessage("Longitude must be between -180 and 180"),
query("radius")
.optional()
.isFloat({ min: 0.1, max: 100 })
.withMessage("Radius must be between 0.1 and 100 miles"),
handleValidationErrors,
];
// Coordinate validation for body parameters (e.g., user addresses, forum posts)
const validateCoordinatesBody = [
body("latitude")
.optional()
.isFloat({ min: -90, max: 90 })
.withMessage("Latitude must be between -90 and 90"),
body("longitude")
.optional()
.isFloat({ min: -180, max: 180 })
.withMessage("Longitude must be between -180 and 180"),
];
// Two-Factor Authentication validation
const validateTotpCode = [
body("code")
.trim()
.matches(/^\d{6}$/)
.withMessage("TOTP code must be exactly 6 digits"),
handleValidationErrors,
];
const validateEmailOtp = [
body("code")
.trim()
.matches(/^\d{6}$/)
.withMessage("Email OTP must be exactly 6 digits"),
handleValidationErrors,
];
const validateRecoveryCode = [
body("code")
.trim()
.matches(/^[A-Za-z0-9]{4}-[A-Za-z0-9]{4}$/i)
.withMessage("Recovery code must be in format XXXX-XXXX"),
handleValidationErrors,
];
module.exports = {
sanitizeInput,
handleValidationErrors,
validateRegistration,
validateLogin,
validateGoogleAuth,
validateProfileUpdate,
validatePasswordChange,
validateForgotPassword,
validateResetPassword,
validateVerifyResetToken,
validateFeedback,
validateCoordinatesQuery,
validateCoordinatesBody,
// Two-Factor Authentication
validateTotpCode,
validateEmailOtp,
validateRecoveryCode,
};

View File

@@ -0,0 +1,157 @@
"use strict";
module.exports = {
up: async (queryInterface, Sequelize) => {
await queryInterface.createTable("Users", {
id: {
type: Sequelize.UUID,
defaultValue: Sequelize.UUIDV4,
primaryKey: true,
},
email: {
type: Sequelize.STRING,
unique: true,
allowNull: false,
},
password: {
type: Sequelize.STRING,
allowNull: true,
},
firstName: {
type: Sequelize.STRING,
allowNull: false,
},
lastName: {
type: Sequelize.STRING,
allowNull: false,
},
phone: {
type: Sequelize.STRING,
allowNull: true,
},
authProvider: {
type: Sequelize.ENUM("local", "google"),
defaultValue: "local",
},
providerId: {
type: Sequelize.STRING,
allowNull: true,
},
address1: {
type: Sequelize.STRING,
},
address2: {
type: Sequelize.STRING,
},
city: {
type: Sequelize.STRING,
},
state: {
type: Sequelize.STRING,
},
zipCode: {
type: Sequelize.STRING,
},
country: {
type: Sequelize.STRING,
},
profileImage: {
type: Sequelize.STRING,
},
isVerified: {
type: Sequelize.BOOLEAN,
defaultValue: false,
},
verificationToken: {
type: Sequelize.STRING,
allowNull: true,
},
verificationTokenExpiry: {
type: Sequelize.DATE,
allowNull: true,
},
verifiedAt: {
type: Sequelize.DATE,
allowNull: true,
},
passwordResetToken: {
type: Sequelize.STRING,
allowNull: true,
},
passwordResetTokenExpiry: {
type: Sequelize.DATE,
allowNull: true,
},
defaultAvailableAfter: {
type: Sequelize.STRING,
defaultValue: "09:00",
},
defaultAvailableBefore: {
type: Sequelize.STRING,
defaultValue: "17:00",
},
defaultSpecifyTimesPerDay: {
type: Sequelize.BOOLEAN,
defaultValue: false,
},
defaultWeeklyTimes: {
type: Sequelize.JSONB,
defaultValue: {
sunday: { availableAfter: "09:00", availableBefore: "17:00" },
monday: { availableAfter: "09:00", availableBefore: "17:00" },
tuesday: { availableAfter: "09:00", availableBefore: "17:00" },
wednesday: { availableAfter: "09:00", availableBefore: "17:00" },
thursday: { availableAfter: "09:00", availableBefore: "17:00" },
friday: { availableAfter: "09:00", availableBefore: "17:00" },
saturday: { availableAfter: "09:00", availableBefore: "17:00" },
},
},
stripeConnectedAccountId: {
type: Sequelize.STRING,
allowNull: true,
},
stripeCustomerId: {
type: Sequelize.STRING,
allowNull: true,
},
loginAttempts: {
type: Sequelize.INTEGER,
defaultValue: 0,
},
lockUntil: {
type: Sequelize.DATE,
allowNull: true,
},
jwtVersion: {
type: Sequelize.INTEGER,
defaultValue: 0,
allowNull: false,
},
role: {
type: Sequelize.ENUM("user", "admin"),
defaultValue: "user",
allowNull: false,
},
itemRequestNotificationRadius: {
type: Sequelize.INTEGER,
defaultValue: 10,
allowNull: true,
},
createdAt: {
type: Sequelize.DATE,
allowNull: false,
},
updatedAt: {
type: Sequelize.DATE,
allowNull: false,
},
});
// Add indexes
await queryInterface.addIndex("Users", ["email"], { unique: true });
},
down: async (queryInterface, Sequelize) => {
await queryInterface.dropTable("Users");
},
};

View File

@@ -0,0 +1,60 @@
"use strict";
module.exports = {
up: async (queryInterface, Sequelize) => {
await queryInterface.createTable("AlphaInvitations", {
id: {
type: Sequelize.UUID,
defaultValue: Sequelize.UUIDV4,
primaryKey: true,
},
code: {
type: Sequelize.STRING,
unique: true,
allowNull: false,
},
email: {
type: Sequelize.STRING,
unique: true,
allowNull: false,
},
status: {
type: Sequelize.ENUM("pending", "active", "revoked"),
defaultValue: "pending",
},
usedBy: {
type: Sequelize.UUID,
allowNull: true,
references: {
model: "Users",
key: "id",
},
onUpdate: "CASCADE",
onDelete: "SET NULL",
},
usedAt: {
type: Sequelize.DATE,
allowNull: true,
},
createdAt: {
type: Sequelize.DATE,
allowNull: false,
},
updatedAt: {
type: Sequelize.DATE,
allowNull: false,
},
});
// Add indexes
await queryInterface.addIndex("AlphaInvitations", ["code"], {
unique: true,
});
await queryInterface.addIndex("AlphaInvitations", ["email"]);
await queryInterface.addIndex("AlphaInvitations", ["status"]);
},
down: async (queryInterface, Sequelize) => {
await queryInterface.dropTable("AlphaInvitations");
},
};

View File

@@ -0,0 +1,168 @@
"use strict";
module.exports = {
up: async (queryInterface, Sequelize) => {
await queryInterface.createTable("Items", {
id: {
type: Sequelize.UUID,
defaultValue: Sequelize.UUIDV4,
primaryKey: true,
},
name: {
type: Sequelize.STRING,
allowNull: false,
},
description: {
type: Sequelize.TEXT,
allowNull: true,
},
pickUpAvailable: {
type: Sequelize.BOOLEAN,
allowNull: false,
defaultValue: false,
},
localDeliveryAvailable: {
type: Sequelize.BOOLEAN,
allowNull: false,
defaultValue: false,
},
localDeliveryRadius: {
type: Sequelize.INTEGER,
},
shippingAvailable: {
type: Sequelize.BOOLEAN,
allowNull: false,
defaultValue: false,
},
inPlaceUseAvailable: {
type: Sequelize.BOOLEAN,
allowNull: false,
defaultValue: false,
},
pricePerHour: {
type: Sequelize.DECIMAL(10, 2),
},
pricePerDay: {
type: Sequelize.DECIMAL(10, 2),
},
pricePerWeek: {
type: Sequelize.DECIMAL(10, 2),
},
pricePerMonth: {
type: Sequelize.DECIMAL(10, 2),
},
replacementCost: {
type: Sequelize.DECIMAL(10, 2),
allowNull: false,
},
address1: {
type: Sequelize.STRING,
},
address2: {
type: Sequelize.STRING,
},
city: {
type: Sequelize.STRING,
},
state: {
type: Sequelize.STRING,
},
zipCode: {
type: Sequelize.STRING,
},
country: {
type: Sequelize.STRING,
},
latitude: {
type: Sequelize.DECIMAL(10, 8),
},
longitude: {
type: Sequelize.DECIMAL(11, 8),
},
images: {
type: Sequelize.ARRAY(Sequelize.STRING),
defaultValue: [],
},
isAvailable: {
type: Sequelize.BOOLEAN,
defaultValue: true,
},
rules: {
type: Sequelize.TEXT,
},
availableAfter: {
type: Sequelize.STRING,
defaultValue: "09:00",
},
availableBefore: {
type: Sequelize.STRING,
defaultValue: "17:00",
},
specifyTimesPerDay: {
type: Sequelize.BOOLEAN,
defaultValue: false,
},
weeklyTimes: {
type: Sequelize.JSONB,
defaultValue: {
sunday: { availableAfter: "09:00", availableBefore: "17:00" },
monday: { availableAfter: "09:00", availableBefore: "17:00" },
tuesday: { availableAfter: "09:00", availableBefore: "17:00" },
wednesday: { availableAfter: "09:00", availableBefore: "17:00" },
thursday: { availableAfter: "09:00", availableBefore: "17:00" },
friday: { availableAfter: "09:00", availableBefore: "17:00" },
saturday: { availableAfter: "09:00", availableBefore: "17:00" },
},
},
ownerId: {
type: Sequelize.UUID,
allowNull: false,
references: {
model: "Users",
key: "id",
},
onUpdate: "CASCADE",
onDelete: "CASCADE",
},
isDeleted: {
type: Sequelize.BOOLEAN,
defaultValue: false,
},
deletedBy: {
type: Sequelize.UUID,
allowNull: true,
references: {
model: "Users",
key: "id",
},
onUpdate: "CASCADE",
onDelete: "SET NULL",
},
deletedAt: {
type: Sequelize.DATE,
allowNull: true,
},
deletionReason: {
type: Sequelize.TEXT,
allowNull: true,
},
createdAt: {
type: Sequelize.DATE,
allowNull: false,
},
updatedAt: {
type: Sequelize.DATE,
allowNull: false,
},
});
// Add indexes
await queryInterface.addIndex("Items", ["ownerId"]);
await queryInterface.addIndex("Items", ["isAvailable"]);
await queryInterface.addIndex("Items", ["isDeleted"]);
},
down: async (queryInterface, Sequelize) => {
await queryInterface.dropTable("Items");
},
};

View File

@@ -0,0 +1,72 @@
"use strict";
module.exports = {
up: async (queryInterface, Sequelize) => {
await queryInterface.createTable("UserAddresses", {
id: {
type: Sequelize.UUID,
defaultValue: Sequelize.UUIDV4,
primaryKey: true,
},
userId: {
type: Sequelize.UUID,
allowNull: false,
references: {
model: "Users",
key: "id",
},
onUpdate: "CASCADE",
onDelete: "CASCADE",
},
address1: {
type: Sequelize.STRING,
allowNull: false,
},
address2: {
type: Sequelize.STRING,
},
city: {
type: Sequelize.STRING,
allowNull: false,
},
state: {
type: Sequelize.STRING,
allowNull: false,
},
zipCode: {
type: Sequelize.STRING,
allowNull: false,
},
country: {
type: Sequelize.STRING,
allowNull: false,
defaultValue: "US",
},
latitude: {
type: Sequelize.DECIMAL(10, 8),
},
longitude: {
type: Sequelize.DECIMAL(11, 8),
},
isPrimary: {
type: Sequelize.BOOLEAN,
defaultValue: false,
},
createdAt: {
type: Sequelize.DATE,
allowNull: false,
},
updatedAt: {
type: Sequelize.DATE,
allowNull: false,
},
});
// Add indexes
await queryInterface.addIndex("UserAddresses", ["userId"]);
},
down: async (queryInterface, Sequelize) => {
await queryInterface.dropTable("UserAddresses");
},
};

View File

@@ -0,0 +1,210 @@
"use strict";
module.exports = {
up: async (queryInterface, Sequelize) => {
await queryInterface.createTable("Rentals", {
id: {
type: Sequelize.UUID,
defaultValue: Sequelize.UUIDV4,
primaryKey: true,
},
itemId: {
type: Sequelize.UUID,
allowNull: false,
references: {
model: "Items",
key: "id",
},
onUpdate: "CASCADE",
onDelete: "CASCADE",
},
renterId: {
type: Sequelize.UUID,
allowNull: false,
references: {
model: "Users",
key: "id",
},
onUpdate: "CASCADE",
onDelete: "CASCADE",
},
ownerId: {
type: Sequelize.UUID,
allowNull: false,
references: {
model: "Users",
key: "id",
},
onUpdate: "CASCADE",
onDelete: "CASCADE",
},
startDateTime: {
type: Sequelize.DATE,
allowNull: false,
},
endDateTime: {
type: Sequelize.DATE,
allowNull: false,
},
totalAmount: {
type: Sequelize.DECIMAL(10, 2),
allowNull: false,
},
platformFee: {
type: Sequelize.DECIMAL(10, 2),
allowNull: false,
},
payoutAmount: {
type: Sequelize.DECIMAL(10, 2),
allowNull: false,
},
status: {
type: Sequelize.ENUM(
"pending",
"confirmed",
"declined",
"active",
"completed",
"cancelled",
"returned_late",
"returned_late_and_damaged",
"damaged",
"lost"
),
allowNull: false,
},
paymentStatus: {
type: Sequelize.ENUM("pending", "paid", "refunded", "not_required"),
allowNull: false,
},
payoutStatus: {
type: Sequelize.ENUM("pending", "completed", "failed"),
allowNull: true,
},
payoutProcessedAt: {
type: Sequelize.DATE,
},
stripeTransferId: {
type: Sequelize.STRING,
},
refundAmount: {
type: Sequelize.DECIMAL(10, 2),
},
refundProcessedAt: {
type: Sequelize.DATE,
},
refundReason: {
type: Sequelize.TEXT,
},
stripeRefundId: {
type: Sequelize.STRING,
},
cancelledBy: {
type: Sequelize.ENUM("renter", "owner"),
},
cancelledAt: {
type: Sequelize.DATE,
},
declineReason: {
type: Sequelize.TEXT,
},
stripePaymentMethodId: {
type: Sequelize.STRING,
},
stripePaymentIntentId: {
type: Sequelize.STRING,
},
paymentMethodBrand: {
type: Sequelize.STRING,
},
paymentMethodLast4: {
type: Sequelize.STRING,
},
chargedAt: {
type: Sequelize.DATE,
},
deliveryMethod: {
type: Sequelize.ENUM("pickup", "delivery"),
defaultValue: "pickup",
},
deliveryAddress: {
type: Sequelize.TEXT,
},
intendedUse: {
type: Sequelize.TEXT,
},
itemRating: {
type: Sequelize.INTEGER,
},
itemReview: {
type: Sequelize.TEXT,
},
itemReviewSubmittedAt: {
type: Sequelize.DATE,
},
itemReviewVisible: {
type: Sequelize.BOOLEAN,
defaultValue: false,
},
renterRating: {
type: Sequelize.INTEGER,
},
renterReview: {
type: Sequelize.TEXT,
},
renterReviewSubmittedAt: {
type: Sequelize.DATE,
},
renterReviewVisible: {
type: Sequelize.BOOLEAN,
defaultValue: false,
},
itemPrivateMessage: {
type: Sequelize.TEXT,
},
renterPrivateMessage: {
type: Sequelize.TEXT,
},
actualReturnDateTime: {
type: Sequelize.DATE,
},
lateFees: {
type: Sequelize.DECIMAL(10, 2),
defaultValue: 0.0,
},
damageFees: {
type: Sequelize.DECIMAL(10, 2),
defaultValue: 0.0,
},
replacementFees: {
type: Sequelize.DECIMAL(10, 2),
defaultValue: 0.0,
},
itemLostReportedAt: {
type: Sequelize.DATE,
},
damageAssessment: {
type: Sequelize.JSONB,
defaultValue: {},
},
createdAt: {
type: Sequelize.DATE,
allowNull: false,
},
updatedAt: {
type: Sequelize.DATE,
allowNull: false,
},
});
// Add indexes
await queryInterface.addIndex("Rentals", ["itemId"]);
await queryInterface.addIndex("Rentals", ["renterId"]);
await queryInterface.addIndex("Rentals", ["ownerId"]);
await queryInterface.addIndex("Rentals", ["status"]);
},
down: async (queryInterface, Sequelize) => {
await queryInterface.dropTable("Rentals");
},
};

View File

@@ -0,0 +1,70 @@
"use strict";
module.exports = {
up: async (queryInterface, Sequelize) => {
await queryInterface.createTable("ConditionChecks", {
id: {
type: Sequelize.UUID,
defaultValue: Sequelize.UUIDV4,
primaryKey: true,
},
rentalId: {
type: Sequelize.UUID,
allowNull: false,
references: {
model: "Rentals",
key: "id",
},
onUpdate: "CASCADE",
onDelete: "CASCADE",
},
submittedBy: {
type: Sequelize.UUID,
allowNull: false,
references: {
model: "Users",
key: "id",
},
onUpdate: "CASCADE",
onDelete: "CASCADE",
},
checkType: {
type: Sequelize.ENUM(
"pre_rental_owner",
"rental_start_renter",
"rental_end_renter",
"post_rental_owner"
),
allowNull: false,
},
photos: {
type: Sequelize.ARRAY(Sequelize.STRING),
defaultValue: [],
},
notes: {
type: Sequelize.TEXT,
},
submittedAt: {
type: Sequelize.DATE,
allowNull: false,
defaultValue: Sequelize.NOW,
},
createdAt: {
type: Sequelize.DATE,
allowNull: false,
},
updatedAt: {
type: Sequelize.DATE,
allowNull: false,
},
});
// Add indexes
await queryInterface.addIndex("ConditionChecks", ["rentalId"]);
await queryInterface.addIndex("ConditionChecks", ["submittedBy"]);
},
down: async (queryInterface, Sequelize) => {
await queryInterface.dropTable("ConditionChecks");
},
};

View File

@@ -0,0 +1,61 @@
"use strict";
module.exports = {
up: async (queryInterface, Sequelize) => {
await queryInterface.createTable("Messages", {
id: {
type: Sequelize.UUID,
defaultValue: Sequelize.UUIDV4,
primaryKey: true,
},
senderId: {
type: Sequelize.UUID,
allowNull: false,
references: {
model: "Users",
key: "id",
},
onUpdate: "CASCADE",
onDelete: "CASCADE",
},
receiverId: {
type: Sequelize.UUID,
allowNull: false,
references: {
model: "Users",
key: "id",
},
onUpdate: "CASCADE",
onDelete: "CASCADE",
},
content: {
type: Sequelize.TEXT,
allowNull: false,
},
isRead: {
type: Sequelize.BOOLEAN,
defaultValue: false,
},
imagePath: {
type: Sequelize.STRING,
allowNull: true,
},
createdAt: {
type: Sequelize.DATE,
allowNull: false,
},
updatedAt: {
type: Sequelize.DATE,
allowNull: false,
},
});
// Add indexes
await queryInterface.addIndex("Messages", ["senderId"]);
await queryInterface.addIndex("Messages", ["receiverId"]);
},
down: async (queryInterface, Sequelize) => {
await queryInterface.dropTable("Messages");
},
};

View File

@@ -0,0 +1,129 @@
"use strict";
module.exports = {
up: async (queryInterface, Sequelize) => {
await queryInterface.createTable("ForumPosts", {
id: {
type: Sequelize.UUID,
defaultValue: Sequelize.UUIDV4,
primaryKey: true,
},
authorId: {
type: Sequelize.UUID,
allowNull: false,
references: {
model: "Users",
key: "id",
},
onUpdate: "CASCADE",
onDelete: "CASCADE",
},
title: {
type: Sequelize.STRING,
allowNull: false,
},
content: {
type: Sequelize.TEXT,
allowNull: false,
},
category: {
type: Sequelize.ENUM(
"item_request",
"technical_support",
"community_resources",
"general_discussion"
),
allowNull: false,
defaultValue: "general_discussion",
},
status: {
type: Sequelize.ENUM("open", "answered", "closed"),
defaultValue: "open",
},
viewCount: {
type: Sequelize.INTEGER,
defaultValue: 0,
},
commentCount: {
type: Sequelize.INTEGER,
defaultValue: 0,
},
zipCode: {
type: Sequelize.STRING,
},
latitude: {
type: Sequelize.DECIMAL(10, 8),
},
longitude: {
type: Sequelize.DECIMAL(11, 8),
},
acceptedAnswerId: {
type: Sequelize.UUID,
allowNull: true,
},
images: {
type: Sequelize.ARRAY(Sequelize.TEXT),
allowNull: true,
defaultValue: [],
},
isPinned: {
type: Sequelize.BOOLEAN,
defaultValue: false,
},
isDeleted: {
type: Sequelize.BOOLEAN,
defaultValue: false,
},
deletedBy: {
type: Sequelize.UUID,
allowNull: true,
references: {
model: "Users",
key: "id",
},
onUpdate: "CASCADE",
onDelete: "SET NULL",
},
deletedAt: {
type: Sequelize.DATE,
allowNull: true,
},
deletionReason: {
type: Sequelize.TEXT,
allowNull: true,
},
closedBy: {
type: Sequelize.UUID,
allowNull: true,
references: {
model: "Users",
key: "id",
},
onUpdate: "CASCADE",
onDelete: "SET NULL",
},
closedAt: {
type: Sequelize.DATE,
allowNull: true,
},
createdAt: {
type: Sequelize.DATE,
allowNull: false,
},
updatedAt: {
type: Sequelize.DATE,
allowNull: false,
},
});
// Add indexes
await queryInterface.addIndex("ForumPosts", ["authorId"]);
await queryInterface.addIndex("ForumPosts", ["category"]);
await queryInterface.addIndex("ForumPosts", ["status"]);
await queryInterface.addIndex("ForumPosts", ["isDeleted"]);
},
down: async (queryInterface, Sequelize) => {
await queryInterface.dropTable("ForumPosts");
},
};

View File

@@ -0,0 +1,92 @@
"use strict";
module.exports = {
up: async (queryInterface, Sequelize) => {
await queryInterface.createTable("ForumComments", {
id: {
type: Sequelize.UUID,
defaultValue: Sequelize.UUIDV4,
primaryKey: true,
},
postId: {
type: Sequelize.UUID,
allowNull: false,
references: {
model: "ForumPosts",
key: "id",
},
onUpdate: "CASCADE",
onDelete: "CASCADE",
},
authorId: {
type: Sequelize.UUID,
allowNull: false,
references: {
model: "Users",
key: "id",
},
onUpdate: "CASCADE",
onDelete: "CASCADE",
},
parentCommentId: {
type: Sequelize.UUID,
allowNull: true,
references: {
model: "ForumComments",
key: "id",
},
onUpdate: "CASCADE",
onDelete: "SET NULL",
},
content: {
type: Sequelize.TEXT,
allowNull: false,
},
images: {
type: Sequelize.ARRAY(Sequelize.TEXT),
allowNull: true,
defaultValue: [],
},
isDeleted: {
type: Sequelize.BOOLEAN,
defaultValue: false,
},
deletedBy: {
type: Sequelize.UUID,
allowNull: true,
references: {
model: "Users",
key: "id",
},
onUpdate: "CASCADE",
onDelete: "SET NULL",
},
deletedAt: {
type: Sequelize.DATE,
allowNull: true,
},
deletionReason: {
type: Sequelize.TEXT,
allowNull: true,
},
createdAt: {
type: Sequelize.DATE,
allowNull: false,
},
updatedAt: {
type: Sequelize.DATE,
allowNull: false,
},
});
// Add indexes
await queryInterface.addIndex("ForumComments", ["postId"]);
await queryInterface.addIndex("ForumComments", ["authorId"]);
await queryInterface.addIndex("ForumComments", ["parentCommentId"]);
await queryInterface.addIndex("ForumComments", ["isDeleted"]);
},
down: async (queryInterface, Sequelize) => {
await queryInterface.dropTable("ForumComments");
},
};

View File

@@ -0,0 +1,25 @@
"use strict";
module.exports = {
up: async (queryInterface, Sequelize) => {
// Add foreign key constraint for acceptedAnswerId
await queryInterface.addConstraint("ForumPosts", {
fields: ["acceptedAnswerId"],
type: "foreign key",
name: "ForumPosts_acceptedAnswerId_fkey",
references: {
table: "ForumComments",
field: "id",
},
onDelete: "SET NULL",
onUpdate: "CASCADE",
});
},
down: async (queryInterface, Sequelize) => {
await queryInterface.removeConstraint(
"ForumPosts",
"ForumPosts_acceptedAnswerId_fkey"
);
},
};

View File

@@ -0,0 +1,43 @@
"use strict";
module.exports = {
up: async (queryInterface, Sequelize) => {
await queryInterface.createTable("PostTags", {
id: {
type: Sequelize.UUID,
defaultValue: Sequelize.UUIDV4,
primaryKey: true,
},
postId: {
type: Sequelize.UUID,
allowNull: false,
references: {
model: "ForumPosts",
key: "id",
},
onUpdate: "CASCADE",
onDelete: "CASCADE",
},
tagName: {
type: Sequelize.STRING,
allowNull: false,
},
createdAt: {
type: Sequelize.DATE,
allowNull: false,
},
updatedAt: {
type: Sequelize.DATE,
allowNull: false,
},
});
// Add indexes
await queryInterface.addIndex("PostTags", ["postId"]);
await queryInterface.addIndex("PostTags", ["tagName"]);
},
down: async (queryInterface, Sequelize) => {
await queryInterface.dropTable("PostTags");
},
};

View File

@@ -0,0 +1,50 @@
"use strict";
module.exports = {
up: async (queryInterface, Sequelize) => {
await queryInterface.createTable("Feedbacks", {
id: {
type: Sequelize.UUID,
defaultValue: Sequelize.UUIDV4,
primaryKey: true,
},
userId: {
type: Sequelize.UUID,
allowNull: false,
references: {
model: "Users",
key: "id",
},
onUpdate: "CASCADE",
onDelete: "CASCADE",
},
feedbackText: {
type: Sequelize.TEXT,
allowNull: false,
},
userAgent: {
type: Sequelize.STRING,
allowNull: true,
},
url: {
type: Sequelize.STRING(500),
allowNull: true,
},
createdAt: {
type: Sequelize.DATE,
allowNull: false,
},
updatedAt: {
type: Sequelize.DATE,
allowNull: false,
},
});
// Add indexes
await queryInterface.addIndex("Feedbacks", ["userId"]);
},
down: async (queryInterface, Sequelize) => {
await queryInterface.dropTable("Feedbacks");
},
};

View File

@@ -0,0 +1,19 @@
"use strict";
module.exports = {
up: async (queryInterface, Sequelize) => {
// Change images column from VARCHAR(255)[] to TEXT[] to support longer URLs
await queryInterface.changeColumn("Items", "images", {
type: Sequelize.ARRAY(Sequelize.TEXT),
defaultValue: [],
});
},
down: async (queryInterface, Sequelize) => {
// Revert to original VARCHAR(255)[]
await queryInterface.changeColumn("Items", "images", {
type: Sequelize.ARRAY(Sequelize.STRING),
defaultValue: [],
});
},
};

View File

@@ -0,0 +1,39 @@
"use strict";
module.exports = {
up: async (queryInterface, Sequelize) => {
// Change image/photo URL fields from VARCHAR(255) to TEXT to support longer URLs
await Promise.all([
queryInterface.changeColumn("Users", "profileImage", {
type: Sequelize.TEXT,
allowNull: true,
}),
queryInterface.changeColumn("Messages", "imagePath", {
type: Sequelize.TEXT,
allowNull: true,
}),
queryInterface.changeColumn("ConditionChecks", "photos", {
type: Sequelize.ARRAY(Sequelize.TEXT),
defaultValue: [],
}),
]);
},
down: async (queryInterface, Sequelize) => {
// Revert to original VARCHAR(255)
await Promise.all([
queryInterface.changeColumn("Users", "profileImage", {
type: Sequelize.STRING,
allowNull: true,
}),
queryInterface.changeColumn("Messages", "imagePath", {
type: Sequelize.STRING,
allowNull: true,
}),
queryInterface.changeColumn("ConditionChecks", "photos", {
type: Sequelize.ARRAY(Sequelize.STRING),
defaultValue: [],
}),
]);
},
};

View File

@@ -0,0 +1,24 @@
"use strict";
module.exports = {
up: async (queryInterface, Sequelize) => {
// Rename image fields to consistent naming convention
// Using TEXT type for all to support long URLs/paths
await queryInterface.renameColumn("Items", "images", "imageFilenames");
await queryInterface.renameColumn("Users", "profileImage", "imageFilename");
await queryInterface.renameColumn("Messages", "imagePath", "imageFilename");
await queryInterface.renameColumn("ConditionChecks", "photos", "imageFilenames");
await queryInterface.renameColumn("ForumPosts", "images", "imageFilenames");
await queryInterface.renameColumn("ForumComments", "images", "imageFilenames");
},
down: async (queryInterface, Sequelize) => {
// Revert to original column names
await queryInterface.renameColumn("Items", "imageFilenames", "images");
await queryInterface.renameColumn("Users", "imageFilename", "profileImage");
await queryInterface.renameColumn("Messages", "imageFilename", "imagePath");
await queryInterface.renameColumn("ConditionChecks", "imageFilenames", "photos");
await queryInterface.renameColumn("ForumPosts", "imageFilenames", "images");
await queryInterface.renameColumn("ForumComments", "imageFilenames", "images");
},
};

View File

@@ -0,0 +1,15 @@
"use strict";
module.exports = {
up: async (queryInterface, Sequelize) => {
await queryInterface.addColumn("Users", "verificationAttempts", {
type: Sequelize.INTEGER,
defaultValue: 0,
allowNull: true,
});
},
down: async (queryInterface, Sequelize) => {
await queryInterface.removeColumn("Users", "verificationAttempts");
},
};

View File

@@ -0,0 +1,21 @@
"use strict";
module.exports = {
up: async (queryInterface, Sequelize) => {
await queryInterface.changeColumn("Messages", "content", {
type: Sequelize.TEXT,
allowNull: true,
});
},
down: async (queryInterface, Sequelize) => {
// First update any null content to empty string before reverting
await queryInterface.sequelize.query(
`UPDATE "Messages" SET content = '' WHERE content IS NULL`
);
await queryInterface.changeColumn("Messages", "content", {
type: Sequelize.TEXT,
allowNull: false,
});
},
};

View File

@@ -0,0 +1,20 @@
'use strict';
/** @type {import('sequelize-cli').Migration} */
module.exports = {
async up(queryInterface, Sequelize) {
// Add index on latitude and longitude columns for faster geospatial queries
// This improves performance of the bounding box pre-filter used in radius searches
await queryInterface.addIndex('Items', ['latitude', 'longitude'], {
name: 'idx_items_lat_lng',
where: {
latitude: { [Sequelize.Op.ne]: null },
longitude: { [Sequelize.Op.ne]: null }
}
});
},
async down(queryInterface, Sequelize) {
await queryInterface.removeIndex('Items', 'idx_items_lat_lng');
}
};

View File

@@ -0,0 +1,15 @@
"use strict";
module.exports = {
up: async (queryInterface, Sequelize) => {
await queryInterface.addColumn("Users", "stripePayoutsEnabled", {
type: Sequelize.BOOLEAN,
defaultValue: false,
allowNull: true,
});
},
down: async (queryInterface, Sequelize) => {
await queryInterface.removeColumn("Users", "stripePayoutsEnabled");
},
};

View File

@@ -0,0 +1,42 @@
"use strict";
module.exports = {
up: async (queryInterface, Sequelize) => {
// Add bankDepositStatus enum column
await queryInterface.addColumn("Rentals", "bankDepositStatus", {
type: Sequelize.ENUM("pending", "in_transit", "paid", "failed", "canceled"),
allowNull: true,
defaultValue: null,
});
// Add bankDepositAt timestamp
await queryInterface.addColumn("Rentals", "bankDepositAt", {
type: Sequelize.DATE,
allowNull: true,
});
// Add stripePayoutId to track which Stripe payout included this transfer
await queryInterface.addColumn("Rentals", "stripePayoutId", {
type: Sequelize.STRING,
allowNull: true,
});
// Add bankDepositFailureCode for failed deposits
await queryInterface.addColumn("Rentals", "bankDepositFailureCode", {
type: Sequelize.STRING,
allowNull: true,
});
},
down: async (queryInterface, Sequelize) => {
await queryInterface.removeColumn("Rentals", "bankDepositFailureCode");
await queryInterface.removeColumn("Rentals", "stripePayoutId");
await queryInterface.removeColumn("Rentals", "bankDepositAt");
await queryInterface.removeColumn("Rentals", "bankDepositStatus");
// Drop the enum type (PostgreSQL specific)
await queryInterface.sequelize.query(
'DROP TYPE IF EXISTS "enum_Rentals_bankDepositStatus";'
);
},
};

View File

@@ -0,0 +1,30 @@
"use strict";
module.exports = {
up: async (queryInterface, Sequelize) => {
// Add paymentFailedNotifiedAt - tracks when owner notified renter about failed payment
await queryInterface.addColumn("Rentals", "paymentFailedNotifiedAt", {
type: Sequelize.DATE,
allowNull: true,
});
// Add paymentMethodUpdatedAt - tracks last payment method update for rate limiting
await queryInterface.addColumn("Rentals", "paymentMethodUpdatedAt", {
type: Sequelize.DATE,
allowNull: true,
});
// Add paymentMethodUpdateCount - count of updates within time window for rate limiting
await queryInterface.addColumn("Rentals", "paymentMethodUpdateCount", {
type: Sequelize.INTEGER,
allowNull: true,
defaultValue: 0,
});
},
down: async (queryInterface, Sequelize) => {
await queryInterface.removeColumn("Rentals", "paymentMethodUpdateCount");
await queryInterface.removeColumn("Rentals", "paymentMethodUpdatedAt");
await queryInterface.removeColumn("Rentals", "paymentFailedNotifiedAt");
},
};

View File

@@ -0,0 +1,95 @@
"use strict";
module.exports = {
up: async (queryInterface, Sequelize) => {
await queryInterface.createTable("ImageMetadata", {
id: {
type: Sequelize.UUID,
defaultValue: Sequelize.UUIDV4,
primaryKey: true,
},
s3Key: {
type: Sequelize.TEXT,
allowNull: false,
unique: true,
},
latitude: {
type: Sequelize.DECIMAL(10, 8),
allowNull: true,
},
longitude: {
type: Sequelize.DECIMAL(11, 8),
allowNull: true,
},
cameraMake: {
type: Sequelize.STRING(100),
allowNull: true,
},
cameraModel: {
type: Sequelize.STRING(100),
allowNull: true,
},
cameraSoftware: {
type: Sequelize.STRING(100),
allowNull: true,
},
dateTaken: {
type: Sequelize.DATE,
allowNull: true,
},
width: {
type: Sequelize.INTEGER,
allowNull: true,
},
height: {
type: Sequelize.INTEGER,
allowNull: true,
},
orientation: {
type: Sequelize.INTEGER,
allowNull: true,
},
fileSize: {
type: Sequelize.INTEGER,
allowNull: true,
},
processingStatus: {
type: Sequelize.ENUM("pending", "processing", "completed", "failed"),
allowNull: false,
defaultValue: "pending",
},
processedAt: {
type: Sequelize.DATE,
allowNull: true,
},
errorMessage: {
type: Sequelize.TEXT,
allowNull: true,
},
createdAt: {
type: Sequelize.DATE,
allowNull: false,
},
updatedAt: {
type: Sequelize.DATE,
allowNull: false,
},
});
// Add indexes
await queryInterface.addIndex("ImageMetadata", ["s3Key"], {
unique: true,
name: "image_metadata_s3_key_unique",
});
await queryInterface.addIndex("ImageMetadata", ["latitude", "longitude"], {
name: "image_metadata_geo",
});
await queryInterface.addIndex("ImageMetadata", ["processingStatus"], {
name: "image_metadata_processing_status",
});
},
down: async (queryInterface, Sequelize) => {
await queryInterface.dropTable("ImageMetadata");
},
};

View File

@@ -0,0 +1,41 @@
"use strict";
module.exports = {
up: async (queryInterface, Sequelize) => {
// isBanned - boolean flag indicating if user is banned
await queryInterface.addColumn("Users", "isBanned", {
type: Sequelize.BOOLEAN,
defaultValue: false,
allowNull: false,
});
// bannedAt - timestamp when ban was applied
await queryInterface.addColumn("Users", "bannedAt", {
type: Sequelize.DATE,
allowNull: true,
});
// bannedBy - UUID of admin who applied the ban
await queryInterface.addColumn("Users", "bannedBy", {
type: Sequelize.UUID,
allowNull: true,
references: {
model: "Users",
key: "id",
},
});
// banReason - reason provided by admin for the ban
await queryInterface.addColumn("Users", "banReason", {
type: Sequelize.TEXT,
allowNull: true,
});
},
down: async (queryInterface, Sequelize) => {
await queryInterface.removeColumn("Users", "banReason");
await queryInterface.removeColumn("Users", "bannedBy");
await queryInterface.removeColumn("Users", "bannedAt");
await queryInterface.removeColumn("Users", "isBanned");
},
};

View File

@@ -0,0 +1,18 @@
"use strict";
module.exports = {
up: async (queryInterface, Sequelize) => {
// Add 'requires_action' to the paymentStatus enum
// This status is used when 3DS authentication is required for a payment
await queryInterface.sequelize.query(`
ALTER TYPE "enum_Rentals_paymentStatus" ADD VALUE IF NOT EXISTS 'requires_action';
`);
},
down: async (queryInterface, Sequelize) => {
console.log(
"PostgreSQL does not support removing ENUM values. " +
"'requires_action' will remain in the enum but will not be used.",
);
},
};

View File

@@ -0,0 +1,16 @@
"use strict";
module.exports = {
up: async (queryInterface, Sequelize) => {
// Add 'on_hold' to the existing payoutStatus enum
await queryInterface.sequelize.query(`
ALTER TYPE "enum_Rentals_payoutStatus" ADD VALUE IF NOT EXISTS 'on_hold';
`);
},
down: async (queryInterface, Sequelize) => {
console.log(
"Cannot remove enum value - manual intervention required if rollback needed",
);
},
};

View File

@@ -0,0 +1,57 @@
"use strict";
module.exports = {
up: async (queryInterface, Sequelize) => {
await queryInterface.addColumn("Rentals", "stripeDisputeStatus", {
type: Sequelize.ENUM("open", "won", "lost", "warning_closed"),
allowNull: true,
});
await queryInterface.addColumn("Rentals", "stripeDisputeId", {
type: Sequelize.STRING,
allowNull: true,
});
await queryInterface.addColumn("Rentals", "stripeDisputeReason", {
type: Sequelize.STRING,
allowNull: true,
});
await queryInterface.addColumn("Rentals", "stripeDisputeAmount", {
type: Sequelize.INTEGER,
allowNull: true,
});
await queryInterface.addColumn("Rentals", "stripeDisputeCreatedAt", {
type: Sequelize.DATE,
allowNull: true,
});
await queryInterface.addColumn("Rentals", "stripeDisputeEvidenceDueBy", {
type: Sequelize.DATE,
allowNull: true,
});
await queryInterface.addColumn("Rentals", "stripeDisputeClosedAt", {
type: Sequelize.DATE,
allowNull: true,
});
await queryInterface.addColumn("Rentals", "stripeDisputeLost", {
type: Sequelize.BOOLEAN,
defaultValue: false,
});
await queryInterface.addColumn("Rentals", "stripeDisputeLostAmount", {
type: Sequelize.INTEGER,
allowNull: true,
});
},
down: async (queryInterface) => {
await queryInterface.removeColumn("Rentals", "stripeDisputeStatus");
await queryInterface.removeColumn("Rentals", "stripeDisputeId");
await queryInterface.removeColumn("Rentals", "stripeDisputeReason");
await queryInterface.removeColumn("Rentals", "stripeDisputeAmount");
await queryInterface.removeColumn("Rentals", "stripeDisputeCreatedAt");
await queryInterface.removeColumn("Rentals", "stripeDisputeEvidenceDueBy");
await queryInterface.removeColumn("Rentals", "stripeDisputeClosedAt");
await queryInterface.removeColumn("Rentals", "stripeDisputeLost");
await queryInterface.removeColumn("Rentals", "stripeDisputeLostAmount");
await queryInterface.sequelize.query(
'DROP TYPE IF EXISTS "enum_Rentals_stripeDisputeStatus";'
);
},
};

View File

@@ -0,0 +1,34 @@
"use strict";
module.exports = {
up: async (queryInterface, Sequelize) => {
await queryInterface.addColumn("Users", "stripeRequirementsCurrentlyDue", {
type: Sequelize.JSON,
defaultValue: [],
allowNull: true,
});
await queryInterface.addColumn("Users", "stripeRequirementsPastDue", {
type: Sequelize.JSON,
defaultValue: [],
allowNull: true,
});
await queryInterface.addColumn("Users", "stripeDisabledReason", {
type: Sequelize.STRING,
allowNull: true,
});
await queryInterface.addColumn("Users", "stripeRequirementsLastUpdated", {
type: Sequelize.DATE,
allowNull: true,
});
},
down: async (queryInterface, Sequelize) => {
await queryInterface.removeColumn("Users", "stripeRequirementsCurrentlyDue");
await queryInterface.removeColumn("Users", "stripeRequirementsPastDue");
await queryInterface.removeColumn("Users", "stripeDisabledReason");
await queryInterface.removeColumn("Users", "stripeRequirementsLastUpdated");
},
};

View File

@@ -0,0 +1,15 @@
"use strict";
module.exports = {
up: async (queryInterface, Sequelize) => {
// Add paymentFailedReason - stores the user-friendly error message for payment failures
await queryInterface.addColumn("Rentals", "paymentFailedReason", {
type: Sequelize.TEXT,
allowNull: true,
});
},
down: async (queryInterface, Sequelize) => {
await queryInterface.removeColumn("Rentals", "paymentFailedReason");
},
};

View File

@@ -0,0 +1,67 @@
"use strict";
/**
* Replaces stripeDisputeStatus enum with all valid Stripe dispute statuses.
* Previous enum had: open, won, lost, warning_closed
* Stripe uses: needs_response, under_review, won, lost,
* warning_needs_response, warning_under_review, warning_closed
*/
module.exports = {
up: async (queryInterface) => {
// Create new enum type with correct Stripe statuses
await queryInterface.sequelize.query(`
CREATE TYPE "enum_Rentals_stripeDisputeStatus_new" AS ENUM (
'needs_response',
'under_review',
'won',
'lost',
'warning_needs_response',
'warning_under_review',
'warning_closed'
);
`);
// Alter column to use new type
await queryInterface.sequelize.query(`
ALTER TABLE "Rentals"
ALTER COLUMN "stripeDisputeStatus"
TYPE "enum_Rentals_stripeDisputeStatus_new"
USING "stripeDisputeStatus"::text::"enum_Rentals_stripeDisputeStatus_new";
`);
// Drop old enum type
await queryInterface.sequelize.query(`
DROP TYPE "enum_Rentals_stripeDisputeStatus";
`);
// Rename new type to original name
await queryInterface.sequelize.query(`
ALTER TYPE "enum_Rentals_stripeDisputeStatus_new"
RENAME TO "enum_Rentals_stripeDisputeStatus";
`);
},
down: async (queryInterface) => {
await queryInterface.sequelize.query(`
CREATE TYPE "enum_Rentals_stripeDisputeStatus_old" AS ENUM (
'open', 'won', 'lost', 'warning_closed'
);
`);
await queryInterface.sequelize.query(`
ALTER TABLE "Rentals"
ALTER COLUMN "stripeDisputeStatus"
TYPE "enum_Rentals_stripeDisputeStatus_old"
USING "stripeDisputeStatus"::text::"enum_Rentals_stripeDisputeStatus_old";
`);
await queryInterface.sequelize.query(`
DROP TYPE "enum_Rentals_stripeDisputeStatus";
`);
await queryInterface.sequelize.query(`
ALTER TYPE "enum_Rentals_stripeDisputeStatus_old"
RENAME TO "enum_Rentals_stripeDisputeStatus";
`);
},
};

View File

@@ -0,0 +1,107 @@
"use strict";
/** @type {import('sequelize-cli').Migration} */
module.exports = {
async up(queryInterface, Sequelize) {
// Add TOTP configuration fields
await queryInterface.addColumn("Users", "twoFactorEnabled", {
type: Sequelize.BOOLEAN,
defaultValue: false,
allowNull: false,
});
await queryInterface.addColumn("Users", "twoFactorMethod", {
type: Sequelize.ENUM("totp", "email"),
allowNull: true,
});
await queryInterface.addColumn("Users", "totpSecret", {
type: Sequelize.STRING,
allowNull: true,
});
await queryInterface.addColumn("Users", "totpSecretIv", {
type: Sequelize.STRING,
allowNull: true,
});
await queryInterface.addColumn("Users", "twoFactorEnabledAt", {
type: Sequelize.DATE,
allowNull: true,
});
// Add Email OTP fields (backup method)
await queryInterface.addColumn("Users", "emailOtpCode", {
type: Sequelize.STRING,
allowNull: true,
});
await queryInterface.addColumn("Users", "emailOtpExpiry", {
type: Sequelize.DATE,
allowNull: true,
});
await queryInterface.addColumn("Users", "emailOtpAttempts", {
type: Sequelize.INTEGER,
defaultValue: 0,
allowNull: false,
});
// Add Recovery Codes fields
await queryInterface.addColumn("Users", "recoveryCodesHash", {
type: Sequelize.TEXT,
allowNull: true,
});
await queryInterface.addColumn("Users", "recoveryCodesGeneratedAt", {
type: Sequelize.DATE,
allowNull: true,
});
await queryInterface.addColumn("Users", "recoveryCodesUsedCount", {
type: Sequelize.INTEGER,
defaultValue: 0,
allowNull: false,
});
// Add Step-up session tracking
await queryInterface.addColumn("Users", "twoFactorVerifiedAt", {
type: Sequelize.DATE,
allowNull: true,
});
// Add temporary secret storage during setup
await queryInterface.addColumn("Users", "twoFactorSetupPendingSecret", {
type: Sequelize.STRING,
allowNull: true,
});
await queryInterface.addColumn("Users", "twoFactorSetupPendingSecretIv", {
type: Sequelize.STRING,
allowNull: true,
});
},
async down(queryInterface, Sequelize) {
// Remove all 2FA fields in reverse order
await queryInterface.removeColumn("Users", "twoFactorSetupPendingSecretIv");
await queryInterface.removeColumn("Users", "twoFactorSetupPendingSecret");
await queryInterface.removeColumn("Users", "twoFactorVerifiedAt");
await queryInterface.removeColumn("Users", "recoveryCodesUsedCount");
await queryInterface.removeColumn("Users", "recoveryCodesGeneratedAt");
await queryInterface.removeColumn("Users", "recoveryCodesHash");
await queryInterface.removeColumn("Users", "emailOtpAttempts");
await queryInterface.removeColumn("Users", "emailOtpExpiry");
await queryInterface.removeColumn("Users", "emailOtpCode");
await queryInterface.removeColumn("Users", "twoFactorEnabledAt");
await queryInterface.removeColumn("Users", "totpSecretIv");
await queryInterface.removeColumn("Users", "totpSecret");
await queryInterface.removeColumn("Users", "twoFactorMethod");
await queryInterface.removeColumn("Users", "twoFactorEnabled");
// Remove the ENUM type
await queryInterface.sequelize.query(
'DROP TYPE IF EXISTS "enum_Users_twoFactorMethod";'
);
},
};

View File

@@ -0,0 +1,32 @@
"use strict";
/** @type {import('sequelize-cli').Migration} */
module.exports = {
async up(queryInterface, Sequelize) {
// Add recentTotpCodes field for TOTP replay protection
await queryInterface.addColumn("Users", "recentTotpCodes", {
type: Sequelize.TEXT,
allowNull: true,
comment: "JSON array of hashed recently used TOTP codes for replay protection",
});
// Remove deprecated columns (if they exist)
await queryInterface.removeColumn("Users", "twoFactorEnabledAt").catch(() => {});
await queryInterface.removeColumn("Users", "recoveryCodesUsedCount").catch(() => {});
},
async down(queryInterface, Sequelize) {
await queryInterface.removeColumn("Users", "recentTotpCodes");
// Re-add deprecated columns for rollback
await queryInterface.addColumn("Users", "twoFactorEnabledAt", {
type: Sequelize.DATE,
allowNull: true,
});
await queryInterface.addColumn("Users", "recoveryCodesUsedCount", {
type: Sequelize.INTEGER,
defaultValue: 0,
allowNull: false,
});
},
};

View File

@@ -0,0 +1,303 @@
# Database Migrations
This project uses Sequelize CLI for database migrations. Migrations provide version control for your database schema.
## Quick Reference
```bash
# Run pending migrations
npm run db:migrate
# Undo last migration
npm run db:migrate:undo
# Undo all migrations
npm run db:migrate:undo:all
# Check migration status
npm run db:migrate:status
# Test all migrations (up, down, up)
npm run test:migrations
```
## Available Commands
### `npm run db:migrate`
**Purpose:** Runs all pending migrations that haven't been applied yet.
- Checks the `SequelizeMeta` table to see which migrations have already run
- Executes the `up` function in each pending migration file in order
- Records each successful migration in the `SequelizeMeta` table
- **When to use:** Deploy new schema changes to your database
### `npm run db:migrate:undo`
**Purpose:** Rolls back the most recent migration.
- Executes the `down` function of the last applied migration
- Removes that migration's entry from `SequelizeMeta`
- **When to use:** Quickly revert the last change if something went wrong
### `npm run db:migrate:undo:all`
**Purpose:** Rolls back ALL migrations, returning to an empty database.
- Executes the `down` function of every migration in reverse order
- Clears the `SequelizeMeta` table
- **When to use:** Reset development database to start fresh, or in testing
### `npm run db:migrate:status`
**Purpose:** Shows the status of all migrations.
- Lists which migrations have been executed (with timestamps)
- Shows which migrations are pending
- **When to use:** Check what's been applied before deploying or debugging
### `npm run db:create`
**Purpose:** Creates the database specified in your config.
- Reads `DB_NAME` from environment variables
- Creates a new PostgreSQL database with that name
- **When to use:** Initial setup on a new environment
### `npm run test:migrations`
**Purpose:** Automated testing of migrations (to be implemented in Phase 4).
- Will create a fresh test database
- Run all migrations up, then down, then up again
- Verify migrations are reversible and idempotent
- **When to use:** In CI/CD pipeline before merging migration changes
## Environment Configuration
All commands use the environment specified by `NODE_ENV` (dev, test, qa, prod) and load the corresponding `.env` file automatically.
Examples:
```bash
# Run migrations in development
NODE_ENV=dev npm run db:migrate
# Check status in QA environment
NODE_ENV=qa npm run db:migrate:status
# Run migrations in production
NODE_ENV=prod npm run db:migrate
```
## Creating a New Migration
```bash
# Generate a new migration file
npx sequelize-cli migration:generate --name description-of-change
```
This creates a timestamped file in `backend/migrations/`:
```
20241125123456-description-of-change.js
```
### Migration File Structure
```javascript
module.exports = {
up: async (queryInterface, Sequelize) => {
// Schema changes to apply
},
down: async (queryInterface, Sequelize) => {
// How to revert the changes
},
};
```
## Naming Conventions
Use descriptive names that indicate the action:
- `create-users` - Creating a new table
- `add-email-to-users` - Adding a column
- `remove-legacy-field-from-items` - Removing a column
- `add-index-on-users-email` - Adding an index
- `change-status-enum-in-rentals` - Modifying a column
## Zero-Downtime Patterns
### Adding a Column (Safe)
```javascript
up: async (queryInterface, Sequelize) => {
await queryInterface.addColumn("users", "newField", {
type: Sequelize.STRING,
allowNull: true, // Must be nullable or have default
});
};
```
### Adding a NOT NULL Column
```javascript
// Step 1: Add as nullable
up: async (queryInterface, Sequelize) => {
await queryInterface.addColumn("users", "newField", {
type: Sequelize.STRING,
allowNull: true,
});
};
// Step 2: Backfill data (separate migration)
up: async (queryInterface, Sequelize) => {
await queryInterface.sequelize.query(
`UPDATE users SET "newField" = 'default_value' WHERE "newField" IS NULL`
);
};
// Step 3: Add NOT NULL constraint (separate migration, after code deployed)
up: async (queryInterface, Sequelize) => {
await queryInterface.changeColumn("users", "newField", {
type: Sequelize.STRING,
allowNull: false,
});
};
```
### Removing a Column (3-Step Process)
1. **Deploy 1**: Update code to stop reading/writing the column
2. **Deploy 2**: Run migration to remove column
3. **Deploy 3**: Remove column references from model (cleanup)
```javascript
// Migration in Deploy 2
up: async (queryInterface) => {
await queryInterface.removeColumn('users', 'oldField');
},
down: async (queryInterface, Sequelize) => {
await queryInterface.addColumn('users', 'oldField', {
type: Sequelize.STRING,
allowNull: true
});
}
```
### Renaming a Column (3-Step Process)
```javascript
// Deploy 1: Add new column, copy data
up: async (queryInterface, Sequelize) => {
await queryInterface.addColumn("users", "newName", {
type: Sequelize.STRING,
});
await queryInterface.sequelize.query(
'UPDATE users SET "newName" = "oldName"'
);
};
// Deploy 2: Update code to use newName, keep oldName as fallback
// (no migration)
// Deploy 3: Remove old column
up: async (queryInterface) => {
await queryInterface.removeColumn("users", "oldName");
};
```
### Creating Indexes (Use CONCURRENTLY)
```javascript
up: async (queryInterface) => {
await queryInterface.addIndex("users", ["email"], {
unique: true,
concurrently: true, // Prevents table locking
name: "users_email_unique",
});
};
```
## Testing Checklist
Before committing a migration:
- [ ] Migration has both `up` and `down` functions
- [ ] Tested locally: `npm run db:migrate`
- [ ] Tested rollback: `npm run db:migrate:undo`
- [ ] Tested re-apply: `npm run db:migrate`
- [ ] Full test: `npm run test:migrations`
- [ ] Application starts and works correctly
- [ ] No data loss in `down` migration (where possible)
## Deployment Checklist
Before deploying to production:
- [ ] Backup production database
- [ ] Test migration on copy of production data
- [ ] Review migration for safety (no destructive operations)
- [ ] Schedule during low-traffic window
- [ ] Have rollback plan ready
- [ ] Monitor logs after deployment
## Rollback Procedures
### Undo Last Migration
```bash
npm run db:migrate:undo
```
### Undo Multiple Migrations
```bash
# Undo last 3 migrations
npx sequelize-cli db:migrate:undo --step 3
```
### Undo to Specific Migration
```bash
npx sequelize-cli db:migrate:undo:all --to 20241124000005-create-rentals.js
```
## Common Issues
### "Migration file not found"
Ensure the migration filename in `SequelizeMeta` matches the file on disk.
### "Column already exists"
The migration may have partially run. Check the schema and either:
- Manually fix the schema
- Mark migration as complete: `INSERT INTO "SequelizeMeta" VALUES ('filename.js')`
### "Cannot drop column - dependent objects"
Drop dependent indexes/constraints first:
```javascript
await queryInterface.removeIndex("users", "index_name");
await queryInterface.removeColumn("users", "column_name");
```
### Foreign Key Constraint Failures
Ensure data integrity before adding constraints:
```javascript
// Clean orphaned records first
await queryInterface.sequelize.query(
'DELETE FROM rentals WHERE "itemId" NOT IN (SELECT id FROM items)'
);
// Then add constraint
await queryInterface.addConstraint("rentals", {
fields: ["itemId"],
type: "foreign key",
references: { table: "items", field: "id" },
});
```

View File

@@ -0,0 +1,65 @@
const { DataTypes } = require("sequelize");
const sequelize = require("../config/database");
const AlphaInvitation = sequelize.define(
"AlphaInvitation",
{
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true,
},
code: {
type: DataTypes.STRING,
unique: true,
allowNull: false,
validate: {
is: /^ALPHA-[A-Z0-9]{8}$/i,
},
},
email: {
type: DataTypes.STRING,
unique: true,
allowNull: false,
validate: {
isEmail: true,
},
set(value) {
// Normalize email to lowercase
this.setDataValue("email", value.toLowerCase().trim());
},
},
usedBy: {
type: DataTypes.UUID,
allowNull: true,
references: {
model: "Users",
key: "id",
},
},
usedAt: {
type: DataTypes.DATE,
allowNull: true,
},
status: {
type: DataTypes.ENUM("pending", "active", "revoked"),
defaultValue: "pending",
allowNull: false,
},
},
{
indexes: [
{
fields: ["code"],
},
{
fields: ["email"],
},
{
fields: ["status"],
},
],
}
);
module.exports = AlphaInvitation;

View File

@@ -0,0 +1,49 @@
const { DataTypes } = require("sequelize");
const sequelize = require("../config/database");
const ConditionCheck = sequelize.define("ConditionCheck", {
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true,
},
rentalId: {
type: DataTypes.UUID,
allowNull: false,
references: {
model: "Rentals",
key: "id",
},
},
checkType: {
type: DataTypes.ENUM(
"pre_rental_owner",
"rental_start_renter",
"rental_end_renter",
"post_rental_owner"
),
allowNull: false,
},
imageFilenames: {
type: DataTypes.ARRAY(DataTypes.TEXT),
defaultValue: [],
},
notes: {
type: DataTypes.TEXT,
},
submittedBy: {
type: DataTypes.UUID,
allowNull: false,
references: {
model: "Users",
key: "id",
},
},
submittedAt: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: DataTypes.NOW,
},
});
module.exports = ConditionCheck;

View File

@@ -0,0 +1,34 @@
const { DataTypes } = require('sequelize');
const sequelize = require('../config/database');
const Feedback = sequelize.define('Feedback', {
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
},
userId: {
type: DataTypes.UUID,
allowNull: false,
references: {
model: 'Users',
key: 'id'
}
},
feedbackText: {
type: DataTypes.TEXT,
allowNull: false
},
userAgent: {
type: DataTypes.STRING,
allowNull: true
},
url: {
type: DataTypes.STRING(500),
allowNull: true
}
}, {
timestamps: true
});
module.exports = Feedback;

View File

@@ -0,0 +1,65 @@
const { DataTypes } = require('sequelize');
const sequelize = require('../config/database');
const ForumComment = sequelize.define('ForumComment', {
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
},
postId: {
type: DataTypes.UUID,
allowNull: false,
references: {
model: 'ForumPosts',
key: 'id'
}
},
authorId: {
type: DataTypes.UUID,
allowNull: false,
references: {
model: 'Users',
key: 'id'
}
},
content: {
type: DataTypes.TEXT,
allowNull: false
},
parentCommentId: {
type: DataTypes.UUID,
allowNull: true,
references: {
model: 'ForumComments',
key: 'id'
}
},
isDeleted: {
type: DataTypes.BOOLEAN,
defaultValue: false
},
imageFilenames: {
type: DataTypes.ARRAY(DataTypes.TEXT),
allowNull: true,
defaultValue: []
},
deletedBy: {
type: DataTypes.UUID,
allowNull: true,
references: {
model: 'Users',
key: 'id'
}
},
deletedAt: {
type: DataTypes.DATE,
allowNull: true
},
deletionReason: {
type: DataTypes.TEXT,
allowNull: true
}
});
module.exports = ForumComment;

106
backend/models/ForumPost.js Normal file
View File

@@ -0,0 +1,106 @@
const { DataTypes } = require('sequelize');
const sequelize = require('../config/database');
const ForumPost = sequelize.define('ForumPost', {
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
},
title: {
type: DataTypes.STRING,
allowNull: false
},
content: {
type: DataTypes.TEXT,
allowNull: false
},
authorId: {
type: DataTypes.UUID,
allowNull: false,
references: {
model: 'Users',
key: 'id'
}
},
category: {
type: DataTypes.ENUM('item_request', 'technical_support', 'community_resources', 'general_discussion'),
allowNull: false,
defaultValue: 'general_discussion'
},
status: {
type: DataTypes.ENUM('open', 'answered', 'closed'),
defaultValue: 'open'
},
viewCount: {
type: DataTypes.INTEGER,
defaultValue: 0
},
commentCount: {
type: DataTypes.INTEGER,
defaultValue: 0
},
isPinned: {
type: DataTypes.BOOLEAN,
defaultValue: false
},
acceptedAnswerId: {
type: DataTypes.UUID,
allowNull: true,
references: {
model: 'ForumComments',
key: 'id'
}
},
imageFilenames: {
type: DataTypes.ARRAY(DataTypes.TEXT),
allowNull: true,
defaultValue: []
},
zipCode: {
type: DataTypes.STRING,
allowNull: true
},
latitude: {
type: DataTypes.DECIMAL(10, 8),
allowNull: true
},
longitude: {
type: DataTypes.DECIMAL(11, 8),
allowNull: true
},
isDeleted: {
type: DataTypes.BOOLEAN,
defaultValue: false
},
deletedBy: {
type: DataTypes.UUID,
allowNull: true,
references: {
model: 'Users',
key: 'id'
}
},
deletedAt: {
type: DataTypes.DATE,
allowNull: true
},
deletionReason: {
type: DataTypes.TEXT,
allowNull: true
},
closedBy: {
type: DataTypes.UUID,
allowNull: true,
references: {
model: 'Users',
key: 'id'
}
},
closedAt: {
type: DataTypes.DATE,
allowNull: true
}
});
module.exports = ForumPost;

View File

@@ -0,0 +1,88 @@
const { DataTypes } = require("sequelize");
const sequelize = require("../config/database");
const ImageMetadata = sequelize.define(
"ImageMetadata",
{
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true,
},
s3Key: {
type: DataTypes.TEXT,
allowNull: false,
unique: true,
},
latitude: {
type: DataTypes.DECIMAL(10, 8),
allowNull: true,
},
longitude: {
type: DataTypes.DECIMAL(11, 8),
allowNull: true,
},
cameraMake: {
type: DataTypes.STRING(100),
allowNull: true,
},
cameraModel: {
type: DataTypes.STRING(100),
allowNull: true,
},
cameraSoftware: {
type: DataTypes.STRING(100),
allowNull: true,
},
dateTaken: {
type: DataTypes.DATE,
allowNull: true,
},
width: {
type: DataTypes.INTEGER,
allowNull: true,
},
height: {
type: DataTypes.INTEGER,
allowNull: true,
},
orientation: {
type: DataTypes.INTEGER,
allowNull: true,
},
fileSize: {
type: DataTypes.INTEGER,
allowNull: true,
},
processingStatus: {
type: DataTypes.ENUM("pending", "processing", "completed", "failed"),
allowNull: false,
defaultValue: "pending",
},
processedAt: {
type: DataTypes.DATE,
allowNull: true,
},
errorMessage: {
type: DataTypes.TEXT,
allowNull: true,
},
},
{
timestamps: true,
indexes: [
{
fields: ["s3Key"],
unique: true,
},
{
fields: ["latitude", "longitude"],
},
{
fields: ["processingStatus"],
},
],
}
);
module.exports = ImageMetadata;

View File

@@ -13,7 +13,7 @@ const Item = sequelize.define("Item", {
},
description: {
type: DataTypes.TEXT,
allowNull: false,
allowNull: true,
},
pickUpAvailable: {
type: DataTypes.BOOLEAN,
@@ -48,6 +48,12 @@ const Item = sequelize.define("Item", {
pricePerDay: {
type: DataTypes.DECIMAL(10, 2),
},
pricePerWeek: {
type: DataTypes.DECIMAL(10, 2),
},
pricePerMonth: {
type: DataTypes.DECIMAL(10, 2),
},
replacementCost: {
type: DataTypes.DECIMAL(10, 2),
allowNull: false,
@@ -76,40 +82,24 @@ const Item = sequelize.define("Item", {
longitude: {
type: DataTypes.DECIMAL(11, 8),
},
images: {
type: DataTypes.ARRAY(DataTypes.STRING),
imageFilenames: {
type: DataTypes.ARRAY(DataTypes.TEXT),
defaultValue: [],
},
availability: {
isAvailable: {
type: DataTypes.BOOLEAN,
defaultValue: true,
},
specifications: {
type: DataTypes.JSONB,
defaultValue: {},
},
rules: {
type: DataTypes.TEXT,
},
minimumRentalDays: {
type: DataTypes.INTEGER,
defaultValue: 1,
},
maximumRentalDays: {
type: DataTypes.INTEGER,
},
needsTraining: {
type: DataTypes.BOOLEAN,
allowNull: false,
defaultValue: false,
},
availableAfter: {
type: DataTypes.STRING,
defaultValue: "09:00",
defaultValue: "00:00",
},
availableBefore: {
type: DataTypes.STRING,
defaultValue: "17:00",
defaultValue: "23:00",
},
specifyTimesPerDay: {
type: DataTypes.BOOLEAN,
@@ -118,13 +108,13 @@ const Item = sequelize.define("Item", {
weeklyTimes: {
type: DataTypes.JSONB,
defaultValue: {
sunday: { availableAfter: "09:00", availableBefore: "17:00" },
monday: { availableAfter: "09:00", availableBefore: "17:00" },
tuesday: { availableAfter: "09:00", availableBefore: "17:00" },
wednesday: { availableAfter: "09:00", availableBefore: "17:00" },
thursday: { availableAfter: "09:00", availableBefore: "17:00" },
friday: { availableAfter: "09:00", availableBefore: "17:00" },
saturday: { availableAfter: "09:00", availableBefore: "17:00" },
sunday: { availableAfter: "00:00", availableBefore: "23:00" },
monday: { availableAfter: "00:00", availableBefore: "23:00" },
tuesday: { availableAfter: "00:00", availableBefore: "23:00" },
wednesday: { availableAfter: "00:00", availableBefore: "23:00" },
thursday: { availableAfter: "00:00", availableBefore: "23:00" },
friday: { availableAfter: "00:00", availableBefore: "23:00" },
saturday: { availableAfter: "00:00", availableBefore: "23:00" },
},
},
ownerId: {
@@ -135,6 +125,26 @@ const Item = sequelize.define("Item", {
key: "id",
},
},
isDeleted: {
type: DataTypes.BOOLEAN,
defaultValue: false,
},
deletedBy: {
type: DataTypes.UUID,
allowNull: true,
references: {
model: "Users",
key: "id",
},
},
deletedAt: {
type: DataTypes.DATE,
allowNull: true,
},
deletionReason: {
type: DataTypes.TEXT,
allowNull: true,
},
});
module.exports = Item;

View File

@@ -1,76 +0,0 @@
const { DataTypes } = require('sequelize');
const sequelize = require('../config/database');
const ItemRequest = sequelize.define('ItemRequest', {
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
},
title: {
type: DataTypes.STRING,
allowNull: false
},
description: {
type: DataTypes.TEXT,
allowNull: false
},
address1: {
type: DataTypes.STRING
},
address2: {
type: DataTypes.STRING
},
city: {
type: DataTypes.STRING
},
state: {
type: DataTypes.STRING
},
zipCode: {
type: DataTypes.STRING
},
country: {
type: DataTypes.STRING
},
latitude: {
type: DataTypes.DECIMAL(10, 8)
},
longitude: {
type: DataTypes.DECIMAL(11, 8)
},
maxPricePerHour: {
type: DataTypes.DECIMAL(10, 2)
},
maxPricePerDay: {
type: DataTypes.DECIMAL(10, 2)
},
preferredStartDate: {
type: DataTypes.DATE
},
preferredEndDate: {
type: DataTypes.DATE
},
isFlexibleDates: {
type: DataTypes.BOOLEAN,
defaultValue: true
},
status: {
type: DataTypes.ENUM('open', 'fulfilled', 'closed'),
defaultValue: 'open'
},
requesterId: {
type: DataTypes.UUID,
allowNull: false,
references: {
model: 'Users',
key: 'id'
}
},
responseCount: {
type: DataTypes.INTEGER,
defaultValue: 0
}
});
module.exports = ItemRequest;

View File

@@ -1,59 +0,0 @@
const { DataTypes } = require('sequelize');
const sequelize = require('../config/database');
const ItemRequestResponse = sequelize.define('ItemRequestResponse', {
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
},
itemRequestId: {
type: DataTypes.UUID,
allowNull: false,
references: {
model: 'ItemRequests',
key: 'id'
}
},
responderId: {
type: DataTypes.UUID,
allowNull: false,
references: {
model: 'Users',
key: 'id'
}
},
message: {
type: DataTypes.TEXT,
allowNull: false
},
offerPricePerHour: {
type: DataTypes.DECIMAL(10, 2)
},
offerPricePerDay: {
type: DataTypes.DECIMAL(10, 2)
},
availableStartDate: {
type: DataTypes.DATE
},
availableEndDate: {
type: DataTypes.DATE
},
existingItemId: {
type: DataTypes.UUID,
allowNull: true,
references: {
model: 'Items',
key: 'id'
}
},
status: {
type: DataTypes.ENUM('pending', 'accepted', 'declined', 'expired'),
defaultValue: 'pending'
},
contactInfo: {
type: DataTypes.STRING
}
});
module.exports = ItemRequestResponse;

View File

@@ -23,28 +23,28 @@ const Message = sequelize.define('Message', {
key: 'id'
}
},
subject: {
type: DataTypes.STRING,
allowNull: false
},
content: {
type: DataTypes.TEXT,
allowNull: false
allowNull: true
},
isRead: {
type: DataTypes.BOOLEAN,
defaultValue: false
},
parentMessageId: {
type: DataTypes.UUID,
allowNull: true,
references: {
model: 'Messages',
key: 'id'
}
imageFilename: {
type: DataTypes.TEXT,
allowNull: true
}
}, {
timestamps: true
timestamps: true,
validate: {
contentOrImage() {
const hasContent = this.content && this.content.trim().length > 0;
if (!hasContent && !this.imageFilename) {
throw new Error('Message must have content or an image');
}
}
}
});
module.exports = Message;

24
backend/models/PostTag.js Normal file
View File

@@ -0,0 +1,24 @@
const { DataTypes } = require('sequelize');
const sequelize = require('../config/database');
const PostTag = sequelize.define('PostTag', {
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
},
postId: {
type: DataTypes.UUID,
allowNull: false,
references: {
model: 'ForumPosts',
key: 'id'
}
},
tagName: {
type: DataTypes.STRING,
allowNull: false
}
});
module.exports = PostTag;

View File

@@ -55,19 +55,24 @@ const Rental = sequelize.define("Rental", {
type: DataTypes.ENUM(
"pending",
"confirmed",
"declined",
"active",
"completed",
"cancelled"
"cancelled",
"returned_late",
"returned_late_and_damaged",
"damaged",
"lost"
),
defaultValue: "pending",
allowNull: false,
},
paymentStatus: {
type: DataTypes.ENUM("pending", "paid", "refunded"),
defaultValue: "pending",
type: DataTypes.ENUM("pending", "paid", "refunded", "not_required", "requires_action"),
allowNull: false,
},
payoutStatus: {
type: DataTypes.ENUM("pending", "processing", "completed", "failed"),
defaultValue: "pending",
type: DataTypes.ENUM("pending", "completed", "failed", "on_hold"),
allowNull: true,
},
payoutProcessedAt: {
type: DataTypes.DATE,
@@ -75,6 +80,66 @@ const Rental = sequelize.define("Rental", {
stripeTransferId: {
type: DataTypes.STRING,
},
// Bank deposit tracking fields (for tracking when Stripe deposits to owner's bank)
bankDepositStatus: {
type: DataTypes.ENUM("pending", "in_transit", "paid", "failed", "canceled"),
allowNull: true,
},
bankDepositAt: {
type: DataTypes.DATE,
},
stripePayoutId: {
type: DataTypes.STRING,
},
bankDepositFailureCode: {
type: DataTypes.STRING,
},
// Dispute tracking fields (for tracking Stripe payment disputes/chargebacks)
// Stripe dispute statuses: https://docs.stripe.com/api/disputes/object#dispute_object-status
stripeDisputeStatus: {
type: DataTypes.ENUM(
"needs_response",
"under_review",
"won",
"lost",
"warning_needs_response",
"warning_under_review",
"warning_closed"
),
allowNull: true,
},
stripeDisputeId: {
type: DataTypes.STRING,
allowNull: true,
},
stripeDisputeReason: {
type: DataTypes.STRING,
allowNull: true,
},
stripeDisputeAmount: {
type: DataTypes.INTEGER,
allowNull: true,
},
stripeDisputeCreatedAt: {
type: DataTypes.DATE,
allowNull: true,
},
stripeDisputeEvidenceDueBy: {
type: DataTypes.DATE,
allowNull: true,
},
stripeDisputeClosedAt: {
type: DataTypes.DATE,
allowNull: true,
},
stripeDisputeLost: {
type: DataTypes.BOOLEAN,
defaultValue: false,
},
stripeDisputeLostAmount: {
type: DataTypes.INTEGER,
allowNull: true,
},
// Refund tracking fields
refundAmount: {
type: DataTypes.DECIMAL(10, 2),
@@ -94,12 +159,39 @@ const Rental = sequelize.define("Rental", {
cancelledAt: {
type: DataTypes.DATE,
},
declineReason: {
type: DataTypes.TEXT,
},
stripePaymentMethodId: {
type: DataTypes.STRING,
},
stripePaymentIntentId: {
type: DataTypes.STRING,
},
paymentMethodBrand: {
type: DataTypes.STRING,
},
paymentMethodLast4: {
type: DataTypes.STRING,
},
chargedAt: {
type: DataTypes.DATE,
},
// Payment failure notification tracking
paymentFailedNotifiedAt: {
type: DataTypes.DATE,
},
paymentFailedReason: {
type: DataTypes.TEXT,
},
// Payment method update rate limiting
paymentMethodUpdatedAt: {
type: DataTypes.DATE,
},
paymentMethodUpdateCount: {
type: DataTypes.INTEGER,
defaultValue: 0,
},
deliveryMethod: {
type: DataTypes.ENUM("pickup", "delivery"),
defaultValue: "pickup",
@@ -107,7 +199,7 @@ const Rental = sequelize.define("Rental", {
deliveryAddress: {
type: DataTypes.TEXT,
},
notes: {
intendedUse: {
type: DataTypes.TEXT,
},
// Renter's review of the item (existing fields renamed for clarity)
@@ -153,6 +245,29 @@ const Rental = sequelize.define("Rental", {
renterPrivateMessage: {
type: DataTypes.TEXT,
},
// Condition check and return handling fields
actualReturnDateTime: {
type: DataTypes.DATE,
},
lateFees: {
type: DataTypes.DECIMAL(10, 2),
defaultValue: 0.0,
},
damageFees: {
type: DataTypes.DECIMAL(10, 2),
defaultValue: 0.0,
},
replacementFees: {
type: DataTypes.DECIMAL(10, 2),
defaultValue: 0.0,
},
itemLostReportedAt: {
type: DataTypes.DATE,
},
damageAssessment: {
type: DataTypes.JSONB,
defaultValue: {},
},
});
module.exports = Rental;

View File

@@ -1,132 +1,715 @@
const { DataTypes } = require('sequelize');
const sequelize = require('../config/database');
const bcrypt = require('bcryptjs');
const { DataTypes } = require("sequelize");
const sequelize = require("../config/database");
const bcrypt = require("bcryptjs");
const User = sequelize.define('User', {
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
},
username: {
type: DataTypes.STRING,
unique: true,
allowNull: true
},
email: {
type: DataTypes.STRING,
unique: true,
allowNull: true,
validate: {
isEmail: true
}
},
password: {
type: DataTypes.STRING,
allowNull: true
},
firstName: {
type: DataTypes.STRING,
allowNull: false
},
lastName: {
type: DataTypes.STRING,
allowNull: false
},
phone: {
type: DataTypes.STRING,
unique: true,
allowNull: true
},
phoneVerified: {
type: DataTypes.BOOLEAN,
defaultValue: false
},
authProvider: {
type: DataTypes.ENUM('local', 'phone', 'google', 'apple', 'facebook'),
defaultValue: 'local'
},
providerId: {
type: DataTypes.STRING,
allowNull: true
},
address1: {
type: DataTypes.STRING
},
address2: {
type: DataTypes.STRING
},
city: {
type: DataTypes.STRING
},
state: {
type: DataTypes.STRING
},
zipCode: {
type: DataTypes.STRING
},
country: {
type: DataTypes.STRING
},
profileImage: {
type: DataTypes.STRING
},
isVerified: {
type: DataTypes.BOOLEAN,
defaultValue: false
},
defaultAvailableAfter: {
type: DataTypes.STRING,
defaultValue: '09:00'
},
defaultAvailableBefore: {
type: DataTypes.STRING,
defaultValue: '17:00'
},
defaultSpecifyTimesPerDay: {
type: DataTypes.BOOLEAN,
defaultValue: false
},
defaultWeeklyTimes: {
type: DataTypes.JSONB,
defaultValue: {
sunday: { availableAfter: "09:00", availableBefore: "17:00" },
monday: { availableAfter: "09:00", availableBefore: "17:00" },
tuesday: { availableAfter: "09:00", availableBefore: "17:00" },
wednesday: { availableAfter: "09:00", availableBefore: "17:00" },
thursday: { availableAfter: "09:00", availableBefore: "17:00" },
friday: { availableAfter: "09:00", availableBefore: "17:00" },
saturday: { availableAfter: "09:00", availableBefore: "17:00" }
}
},
stripeConnectedAccountId: {
type: DataTypes.STRING,
allowNull: true
},
stripeCustomerId: {
type: DataTypes.STRING,
allowNull: true
}
}, {
hooks: {
beforeCreate: async (user) => {
if (user.password) {
user.password = await bcrypt.hash(user.password, 10);
}
const User = sequelize.define(
"User",
{
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true,
},
beforeUpdate: async (user) => {
if (user.changed('password') && user.password) {
user.password = await bcrypt.hash(user.password, 10);
}
}
}
});
email: {
type: DataTypes.STRING,
unique: true,
allowNull: false,
validate: {
isEmail: true,
},
},
password: {
type: DataTypes.STRING,
allowNull: true,
},
firstName: {
type: DataTypes.STRING,
allowNull: false,
},
lastName: {
type: DataTypes.STRING,
allowNull: false,
},
phone: {
type: DataTypes.STRING,
allowNull: true,
},
authProvider: {
type: DataTypes.ENUM("local", "google"),
defaultValue: "local",
},
providerId: {
type: DataTypes.STRING,
allowNull: true,
},
address1: {
type: DataTypes.STRING,
},
address2: {
type: DataTypes.STRING,
},
city: {
type: DataTypes.STRING,
},
state: {
type: DataTypes.STRING,
},
zipCode: {
type: DataTypes.STRING,
},
country: {
type: DataTypes.STRING,
},
imageFilename: {
type: DataTypes.TEXT,
},
isVerified: {
type: DataTypes.BOOLEAN,
defaultValue: false,
},
verificationToken: {
type: DataTypes.STRING,
allowNull: true,
},
verificationTokenExpiry: {
type: DataTypes.DATE,
allowNull: true,
},
verifiedAt: {
type: DataTypes.DATE,
allowNull: true,
},
passwordResetToken: {
type: DataTypes.STRING,
allowNull: true,
},
passwordResetTokenExpiry: {
type: DataTypes.DATE,
allowNull: true,
},
defaultAvailableAfter: {
type: DataTypes.STRING,
defaultValue: "00:00",
},
defaultAvailableBefore: {
type: DataTypes.STRING,
defaultValue: "23:00",
},
defaultSpecifyTimesPerDay: {
type: DataTypes.BOOLEAN,
defaultValue: false,
},
defaultWeeklyTimes: {
type: DataTypes.JSONB,
defaultValue: {
sunday: { availableAfter: "00:00", availableBefore: "23:00" },
monday: { availableAfter: "00:00", availableBefore: "23:00" },
tuesday: { availableAfter: "00:00", availableBefore: "23:00" },
wednesday: { availableAfter: "00:00", availableBefore: "23:00" },
thursday: { availableAfter: "00:00", availableBefore: "23:00" },
friday: { availableAfter: "00:00", availableBefore: "23:00" },
saturday: { availableAfter: "00:00", availableBefore: "23:00" },
},
},
stripeConnectedAccountId: {
type: DataTypes.STRING,
allowNull: true,
},
stripePayoutsEnabled: {
type: DataTypes.BOOLEAN,
defaultValue: false,
allowNull: true,
},
stripeCustomerId: {
type: DataTypes.STRING,
allowNull: true,
},
stripeRequirementsCurrentlyDue: {
type: DataTypes.JSON,
defaultValue: [],
allowNull: true,
},
stripeRequirementsPastDue: {
type: DataTypes.JSON,
defaultValue: [],
allowNull: true,
},
stripeDisabledReason: {
type: DataTypes.STRING,
allowNull: true,
},
stripeRequirementsLastUpdated: {
type: DataTypes.DATE,
allowNull: true,
},
loginAttempts: {
type: DataTypes.INTEGER,
defaultValue: 0,
},
lockUntil: {
type: DataTypes.DATE,
allowNull: true,
},
jwtVersion: {
type: DataTypes.INTEGER,
defaultValue: 0,
allowNull: false,
},
role: {
type: DataTypes.ENUM("user", "admin"),
defaultValue: "user",
allowNull: false,
},
isBanned: {
type: DataTypes.BOOLEAN,
defaultValue: false,
allowNull: false,
},
bannedAt: {
type: DataTypes.DATE,
allowNull: true,
},
bannedBy: {
type: DataTypes.UUID,
allowNull: true,
},
banReason: {
type: DataTypes.TEXT,
allowNull: true,
},
itemRequestNotificationRadius: {
type: DataTypes.INTEGER,
defaultValue: 10,
allowNull: true,
validate: {
min: 1,
max: 100,
},
},
verificationAttempts: {
type: DataTypes.INTEGER,
defaultValue: 0,
allowNull: true,
},
// Two-Factor Authentication fields
twoFactorEnabled: {
type: DataTypes.BOOLEAN,
defaultValue: false,
allowNull: false,
},
twoFactorMethod: {
type: DataTypes.ENUM("totp", "email"),
allowNull: true,
},
totpSecret: {
type: DataTypes.STRING,
allowNull: true,
},
totpSecretIv: {
type: DataTypes.STRING,
allowNull: true,
},
// Email OTP fields (backup method)
emailOtpCode: {
type: DataTypes.STRING,
allowNull: true,
},
emailOtpExpiry: {
type: DataTypes.DATE,
allowNull: true,
},
emailOtpAttempts: {
type: DataTypes.INTEGER,
defaultValue: 0,
allowNull: false,
},
// Recovery codes
recoveryCodesHash: {
type: DataTypes.TEXT,
allowNull: true,
},
recoveryCodesGeneratedAt: {
type: DataTypes.DATE,
allowNull: true,
},
// Step-up session tracking
twoFactorVerifiedAt: {
type: DataTypes.DATE,
allowNull: true,
},
// Temporary secret during setup
twoFactorSetupPendingSecret: {
type: DataTypes.STRING,
allowNull: true,
},
twoFactorSetupPendingSecretIv: {
type: DataTypes.STRING,
allowNull: true,
},
// TOTP replay protection
recentTotpCodes: {
type: DataTypes.TEXT,
allowNull: true,
},
},
{
hooks: {
beforeCreate: async (user) => {
if (user.password) {
user.password = await bcrypt.hash(user.password, 12);
}
},
beforeUpdate: async (user) => {
if (user.changed("password") && user.password) {
user.password = await bcrypt.hash(user.password, 12);
}
},
},
},
);
User.prototype.comparePassword = async function(password) {
User.prototype.comparePassword = async function (password) {
if (!this.password) {
return false;
}
return bcrypt.compare(password, this.password);
};
module.exports = User;
// Account lockout constants
const MAX_LOGIN_ATTEMPTS = 10;
const LOCK_TIME = 2 * 60 * 60 * 1000; // 2 hours
// Check if account is locked
User.prototype.isLocked = function () {
return !!(this.lockUntil && this.lockUntil > Date.now());
};
// Increment login attempts and lock account if necessary
User.prototype.incLoginAttempts = async function () {
// If we have a previous lock that has expired, restart at 1
if (this.lockUntil && this.lockUntil < Date.now()) {
return this.update({
loginAttempts: 1,
lockUntil: null,
});
}
const updates = { loginAttempts: this.loginAttempts + 1 };
// Lock account after max attempts
if (this.loginAttempts + 1 >= MAX_LOGIN_ATTEMPTS && !this.isLocked()) {
updates.lockUntil = Date.now() + LOCK_TIME;
}
return this.update(updates);
};
// Reset login attempts after successful login
User.prototype.resetLoginAttempts = async function () {
return this.update({
loginAttempts: 0,
lockUntil: null,
});
};
// Email verification methods
// Maximum verification attempts before requiring a new code
const MAX_VERIFICATION_ATTEMPTS = 5;
User.prototype.generateVerificationToken = async function () {
const crypto = require("crypto");
// Generate 6-digit numeric code (100000-999999)
const code = crypto.randomInt(100000, 999999).toString();
const expiry = new Date(Date.now() + 24 * 60 * 60 * 1000); // 24 hours
return this.update({
verificationToken: code,
verificationTokenExpiry: expiry,
verificationAttempts: 0, // Reset attempts on new code
});
};
User.prototype.isVerificationTokenValid = function (token) {
const crypto = require("crypto");
if (!this.verificationToken || !this.verificationTokenExpiry) {
return false;
}
// Check if token is expired
if (new Date() > new Date(this.verificationTokenExpiry)) {
return false;
}
// Validate 6-digit format
if (!/^\d{6}$/.test(token)) {
return false;
}
// Use timing-safe comparison to prevent timing attacks
try {
const inputBuffer = Buffer.from(token);
const storedBuffer = Buffer.from(this.verificationToken);
if (inputBuffer.length !== storedBuffer.length) {
return false;
}
return crypto.timingSafeEqual(inputBuffer, storedBuffer);
} catch {
return false;
}
};
// Check if too many verification attempts
User.prototype.isVerificationLocked = function () {
return (this.verificationAttempts || 0) >= MAX_VERIFICATION_ATTEMPTS;
};
// Increment verification attempts
User.prototype.incrementVerificationAttempts = async function () {
const newAttempts = (this.verificationAttempts || 0) + 1;
await this.update({ verificationAttempts: newAttempts });
return newAttempts;
};
User.prototype.verifyEmail = async function () {
return this.update({
isVerified: true,
verifiedAt: new Date(),
verificationToken: null,
verificationTokenExpiry: null,
verificationAttempts: 0,
});
};
// Password reset methods
User.prototype.generatePasswordResetToken = async function () {
const crypto = require("crypto");
// Generate random token for email URL
const token = crypto.randomBytes(32).toString("hex");
// Hash token before storing in database (SHA-256)
const hashedToken = crypto.createHash("sha256").update(token).digest("hex");
const expiry = new Date(Date.now() + 60 * 60 * 1000); // 1 hour
await this.update({
passwordResetToken: hashedToken,
passwordResetTokenExpiry: expiry,
});
// Return plain token for email URL (not stored in DB)
return token;
};
User.prototype.isPasswordResetTokenValid = function (token) {
if (!this.passwordResetToken || !this.passwordResetTokenExpiry) {
return false;
}
// Check if token is expired first
if (new Date() > new Date(this.passwordResetTokenExpiry)) {
return false;
}
const crypto = require("crypto");
// Hash the incoming token to compare with stored hash
const hashedToken = crypto.createHash("sha256").update(token).digest("hex");
// Use timing-safe comparison to prevent timing attacks
const storedTokenBuffer = Buffer.from(this.passwordResetToken, "hex");
const hashedTokenBuffer = Buffer.from(hashedToken, "hex");
// Ensure buffers are same length for timingSafeEqual
if (storedTokenBuffer.length !== hashedTokenBuffer.length) {
return false;
}
return crypto.timingSafeEqual(storedTokenBuffer, hashedTokenBuffer);
};
User.prototype.resetPassword = async function (newPassword) {
return this.update({
password: newPassword,
passwordResetToken: null,
passwordResetTokenExpiry: null,
// Increment JWT version to invalidate all existing sessions
jwtVersion: this.jwtVersion + 1,
});
};
// Ban user method - sets ban fields and invalidates all sessions
User.prototype.banUser = async function (adminId, reason) {
return this.update({
isBanned: true,
bannedAt: new Date(),
bannedBy: adminId,
banReason: reason,
// Increment JWT version to immediately invalidate all sessions
jwtVersion: this.jwtVersion + 1,
});
};
// Unban user method - clears ban fields
User.prototype.unbanUser = async function () {
return this.update({
isBanned: false,
bannedAt: null,
bannedBy: null,
banReason: null,
// We don't increment jwtVersion on unban - user will need to log in fresh
});
};
// Two-Factor Authentication methods
const TwoFactorService = require("../services/TwoFactorService");
// Store pending TOTP secret during setup
User.prototype.storePendingTotpSecret = async function (
encryptedSecret,
encryptedSecretIv,
) {
return this.update({
twoFactorSetupPendingSecret: encryptedSecret,
twoFactorSetupPendingSecretIv: encryptedSecretIv,
});
};
// Enable TOTP 2FA after verification
User.prototype.enableTotp = async function (recoveryCodes) {
const hashedCodes = await Promise.all(
recoveryCodes.map((code) => bcrypt.hash(code, 12)),
);
// Store in structured format
const recoveryData = {
version: 1,
codes: hashedCodes.map((hash) => ({
hash,
used: false,
})),
};
return this.update({
twoFactorEnabled: true,
twoFactorMethod: "totp",
totpSecret: this.twoFactorSetupPendingSecret,
totpSecretIv: this.twoFactorSetupPendingSecretIv,
twoFactorSetupPendingSecret: null,
twoFactorSetupPendingSecretIv: null,
recoveryCodesHash: JSON.stringify(recoveryData),
recoveryCodesGeneratedAt: new Date(),
twoFactorVerifiedAt: new Date(), // Consider setup as verification
});
};
// Enable Email 2FA
User.prototype.enableEmailTwoFactor = async function (recoveryCodes) {
const hashedCodes = await Promise.all(
recoveryCodes.map((code) => bcrypt.hash(code, 12)),
);
// Store in structured format
const recoveryData = {
version: 1,
codes: hashedCodes.map((hash) => ({
hash,
used: false,
})),
};
return this.update({
twoFactorEnabled: true,
twoFactorMethod: "email",
recoveryCodesHash: JSON.stringify(recoveryData),
recoveryCodesGeneratedAt: new Date(),
twoFactorVerifiedAt: new Date(),
});
};
// Disable 2FA
User.prototype.disableTwoFactor = async function () {
return this.update({
twoFactorEnabled: false,
twoFactorMethod: null,
totpSecret: null,
totpSecretIv: null,
emailOtpCode: null,
emailOtpExpiry: null,
emailOtpAttempts: 0,
recoveryCodesHash: null,
recoveryCodesGeneratedAt: null,
twoFactorVerifiedAt: null,
twoFactorSetupPendingSecret: null,
twoFactorSetupPendingSecretIv: null,
});
};
// Generate and store email OTP
User.prototype.generateEmailOtp = async function () {
const { code, hashedCode, expiry } = TwoFactorService.generateEmailOtp();
await this.update({
emailOtpCode: hashedCode,
emailOtpExpiry: expiry,
emailOtpAttempts: 0,
});
return code; // Return plain code for sending via email
};
// Verify email OTP
User.prototype.verifyEmailOtp = function (inputCode) {
return TwoFactorService.verifyEmailOtp(
inputCode,
this.emailOtpCode,
this.emailOtpExpiry,
);
};
// Increment email OTP attempts
User.prototype.incrementEmailOtpAttempts = async function () {
const newAttempts = (this.emailOtpAttempts || 0) + 1;
await this.update({ emailOtpAttempts: newAttempts });
return newAttempts;
};
// Check if email OTP is locked
User.prototype.isEmailOtpLocked = function () {
return TwoFactorService.isEmailOtpLocked(this.emailOtpAttempts || 0);
};
// Clear email OTP after successful verification
User.prototype.clearEmailOtp = async function () {
return this.update({
emailOtpCode: null,
emailOtpExpiry: null,
emailOtpAttempts: 0,
});
};
// Check if a TOTP code was recently used (replay protection)
User.prototype.hasUsedTotpCode = function (code) {
const crypto = require("crypto");
const recentCodes = JSON.parse(this.recentTotpCodes || "[]");
const codeHash = crypto.createHash("sha256").update(code).digest("hex");
return recentCodes.includes(codeHash);
};
// Mark a TOTP code as used (replay protection)
User.prototype.markTotpCodeUsed = async function (code) {
const crypto = require("crypto");
const recentCodes = JSON.parse(this.recentTotpCodes || "[]");
const codeHash = crypto.createHash("sha256").update(code).digest("hex");
recentCodes.unshift(codeHash);
// Keep only last 5 codes (covers about 2.5 minutes of 30-second windows)
await this.update({
recentTotpCodes: JSON.stringify(recentCodes.slice(0, 5)),
});
};
// Verify TOTP code with replay protection
User.prototype.verifyTotpCode = function (code) {
if (!this.totpSecret || !this.totpSecretIv) {
return false;
}
// Check for replay attack
if (this.hasUsedTotpCode(code)) {
return false;
}
return TwoFactorService.verifyTotpCode(
this.totpSecret,
this.totpSecretIv,
code,
);
};
// Verify pending TOTP code (during setup)
User.prototype.verifyPendingTotpCode = function (code) {
if (
!this.twoFactorSetupPendingSecret ||
!this.twoFactorSetupPendingSecretIv
) {
return false;
}
return TwoFactorService.verifyTotpCode(
this.twoFactorSetupPendingSecret,
this.twoFactorSetupPendingSecretIv,
code,
);
};
// Use a recovery code
User.prototype.useRecoveryCode = async function (inputCode) {
if (!this.recoveryCodesHash) {
return { valid: false };
}
const recoveryData = JSON.parse(this.recoveryCodesHash);
const { valid, index } = await TwoFactorService.verifyRecoveryCode(
inputCode,
recoveryData,
);
if (valid) {
// Handle both old and new format
if (recoveryData.version) {
// New structured format - mark as used with timestamp
recoveryData.codes[index].used = true;
recoveryData.codes[index].usedAt = new Date().toISOString();
} else {
// Legacy format - set to null
recoveryData[index] = null;
}
await this.update({
recoveryCodesHash: JSON.stringify(recoveryData),
twoFactorVerifiedAt: new Date(),
});
}
return {
valid,
remainingCodes:
TwoFactorService.getRemainingRecoveryCodesCount(recoveryData),
};
};
// Get remaining recovery codes count
User.prototype.getRemainingRecoveryCodes = function () {
if (!this.recoveryCodesHash) {
return 0;
}
const recoveryData = JSON.parse(this.recoveryCodesHash);
return TwoFactorService.getRemainingRecoveryCodesCount(recoveryData);
};
// Regenerate recovery codes
User.prototype.regenerateRecoveryCodes = async function () {
const { codes, hashedCodes } = await TwoFactorService.generateRecoveryCodes();
// Store in structured format
const recoveryData = {
version: 1,
codes: hashedCodes.map((hash) => ({
hash,
used: false,
})),
};
await this.update({
recoveryCodesHash: JSON.stringify(recoveryData),
recoveryCodesGeneratedAt: new Date(),
});
return codes; // Return plain codes for display to user
};
// Update step-up verification timestamp
User.prototype.updateStepUpSession = async function () {
return this.update({
twoFactorVerifiedAt: new Date(),
});
};
module.exports = User;

View File

@@ -1,54 +1,54 @@
const { DataTypes } = require('sequelize');
const sequelize = require('../config/database');
const { DataTypes } = require("sequelize");
const sequelize = require("../config/database");
const UserAddress = sequelize.define('UserAddress', {
const UserAddress = sequelize.define("UserAddress", {
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
primaryKey: true,
},
userId: {
type: DataTypes.UUID,
allowNull: false,
references: {
model: 'Users',
key: 'id'
}
model: "Users",
key: "id",
},
},
address1: {
type: DataTypes.STRING,
allowNull: false
allowNull: false,
},
address2: {
type: DataTypes.STRING
type: DataTypes.STRING,
},
city: {
type: DataTypes.STRING,
allowNull: false
allowNull: false,
},
state: {
type: DataTypes.STRING,
allowNull: false
allowNull: false,
},
zipCode: {
type: DataTypes.STRING,
allowNull: false
allowNull: false,
},
country: {
type: DataTypes.STRING,
allowNull: false,
defaultValue: 'US'
defaultValue: "US",
},
latitude: {
type: DataTypes.DECIMAL(10, 8)
type: DataTypes.DECIMAL(10, 8),
},
longitude: {
type: DataTypes.DECIMAL(11, 8)
type: DataTypes.DECIMAL(11, 8),
},
isPrimary: {
type: DataTypes.BOOLEAN,
defaultValue: false
}
defaultValue: false,
},
});
module.exports = UserAddress;
module.exports = UserAddress;

View File

@@ -1,41 +1,83 @@
const sequelize = require('../config/database');
const User = require('./User');
const Item = require('./Item');
const Rental = require('./Rental');
const Message = require('./Message');
const ItemRequest = require('./ItemRequest');
const ItemRequestResponse = require('./ItemRequestResponse');
const UserAddress = require('./UserAddress');
const sequelize = require("../config/database");
const User = require("./User");
const Item = require("./Item");
const Rental = require("./Rental");
const Message = require("./Message");
const ForumPost = require("./ForumPost");
const ForumComment = require("./ForumComment");
const PostTag = require("./PostTag");
const UserAddress = require("./UserAddress");
const ConditionCheck = require("./ConditionCheck");
const AlphaInvitation = require("./AlphaInvitation");
const Feedback = require("./Feedback");
const ImageMetadata = require("./ImageMetadata");
User.hasMany(Item, { as: 'ownedItems', foreignKey: 'ownerId' });
Item.belongsTo(User, { as: 'owner', foreignKey: 'ownerId' });
User.hasMany(Item, { as: "ownedItems", foreignKey: "ownerId" });
Item.belongsTo(User, { as: "owner", foreignKey: "ownerId" });
Item.belongsTo(User, { as: "deleter", foreignKey: "deletedBy" });
User.hasMany(Rental, { as: 'rentalsAsRenter', foreignKey: 'renterId' });
User.hasMany(Rental, { as: 'rentalsAsOwner', foreignKey: 'ownerId' });
User.hasMany(Rental, { as: "rentalsAsRenter", foreignKey: "renterId" });
User.hasMany(Rental, { as: "rentalsAsOwner", foreignKey: "ownerId" });
Item.hasMany(Rental, { as: 'rentals', foreignKey: 'itemId' });
Rental.belongsTo(Item, { as: 'item', foreignKey: 'itemId' });
Rental.belongsTo(User, { as: 'renter', foreignKey: 'renterId' });
Rental.belongsTo(User, { as: 'owner', foreignKey: 'ownerId' });
Item.hasMany(Rental, { as: "rentals", foreignKey: "itemId" });
Rental.belongsTo(Item, { as: "item", foreignKey: "itemId" });
Rental.belongsTo(User, { as: "renter", foreignKey: "renterId" });
Rental.belongsTo(User, { as: "owner", foreignKey: "ownerId" });
User.hasMany(Message, { as: 'sentMessages', foreignKey: 'senderId' });
User.hasMany(Message, { as: 'receivedMessages', foreignKey: 'receiverId' });
Message.belongsTo(User, { as: 'sender', foreignKey: 'senderId' });
Message.belongsTo(User, { as: 'receiver', foreignKey: 'receiverId' });
Message.hasMany(Message, { as: 'replies', foreignKey: 'parentMessageId' });
Message.belongsTo(Message, { as: 'parentMessage', foreignKey: 'parentMessageId' });
User.hasMany(Message, { as: "sentMessages", foreignKey: "senderId" });
User.hasMany(Message, { as: "receivedMessages", foreignKey: "receiverId" });
Message.belongsTo(User, { as: "sender", foreignKey: "senderId" });
Message.belongsTo(User, { as: "receiver", foreignKey: "receiverId" });
User.hasMany(ItemRequest, { as: 'itemRequests', foreignKey: 'requesterId' });
ItemRequest.belongsTo(User, { as: 'requester', foreignKey: 'requesterId' });
// Forum associations
User.hasMany(ForumPost, { as: "forumPosts", foreignKey: "authorId" });
ForumPost.belongsTo(User, { as: "author", foreignKey: "authorId" });
ForumPost.belongsTo(User, { as: "closer", foreignKey: "closedBy" });
User.hasMany(ItemRequestResponse, { as: 'itemRequestResponses', foreignKey: 'responderId' });
ItemRequest.hasMany(ItemRequestResponse, { as: 'responses', foreignKey: 'itemRequestId' });
ItemRequestResponse.belongsTo(User, { as: 'responder', foreignKey: 'responderId' });
ItemRequestResponse.belongsTo(ItemRequest, { as: 'itemRequest', foreignKey: 'itemRequestId' });
ItemRequestResponse.belongsTo(Item, { as: 'existingItem', foreignKey: 'existingItemId' });
User.hasMany(ForumComment, { as: "forumComments", foreignKey: "authorId" });
ForumComment.belongsTo(User, { as: "author", foreignKey: "authorId" });
User.hasMany(UserAddress, { as: 'addresses', foreignKey: 'userId' });
UserAddress.belongsTo(User, { as: 'user', foreignKey: 'userId' });
ForumPost.hasMany(ForumComment, { as: "comments", foreignKey: "postId" });
ForumComment.belongsTo(ForumPost, { as: "post", foreignKey: "postId" });
// Self-referential association for nested comments
ForumComment.hasMany(ForumComment, { as: "replies", foreignKey: "parentCommentId" });
ForumComment.belongsTo(ForumComment, { as: "parentComment", foreignKey: "parentCommentId" });
ForumPost.hasMany(PostTag, { as: "tags", foreignKey: "postId" });
PostTag.belongsTo(ForumPost, { as: "post", foreignKey: "postId" });
User.hasMany(UserAddress, { as: "addresses", foreignKey: "userId" });
UserAddress.belongsTo(User, { as: "user", foreignKey: "userId" });
// ConditionCheck associations
Rental.hasMany(ConditionCheck, {
as: "conditionChecks",
foreignKey: "rentalId",
});
ConditionCheck.belongsTo(Rental, { as: "rental", foreignKey: "rentalId" });
User.hasMany(ConditionCheck, {
as: "conditionChecks",
foreignKey: "submittedBy",
});
ConditionCheck.belongsTo(User, {
as: "submittedByUser",
foreignKey: "submittedBy",
});
// AlphaInvitation associations
AlphaInvitation.belongsTo(User, {
as: "user",
foreignKey: "usedBy",
});
User.hasMany(AlphaInvitation, {
as: "alphaInvitations",
foreignKey: "usedBy",
});
// Feedback associations
User.hasMany(Feedback, { as: "feedbacks", foreignKey: "userId" });
Feedback.belongsTo(User, { as: "user", foreignKey: "userId" });
module.exports = {
sequelize,
@@ -43,7 +85,12 @@ module.exports = {
Item,
Rental,
Message,
ItemRequest,
ItemRequestResponse,
UserAddress
};
ForumPost,
ForumComment,
PostTag,
UserAddress,
ConditionCheck,
AlphaInvitation,
Feedback,
ImageMetadata,
};

11789
backend/package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -10,27 +10,72 @@
"start:prod": "NODE_ENV=prod node -r dotenv/config server.js dotenv_config_path=.env.prod",
"dev": "NODE_ENV=dev nodemon -r dotenv/config server.js dotenv_config_path=.env.dev",
"dev:qa": "NODE_ENV=qa nodemon -r dotenv/config server.js dotenv_config_path=.env.qa",
"test": "echo \"Error: no test specified\" && exit 1"
"test": "NODE_ENV=test jest",
"test:watch": "NODE_ENV=test jest --watch",
"test:coverage": "jest --coverage --maxWorkers=1",
"test:unit": "NODE_ENV=test jest tests/unit",
"test:integration": "NODE_ENV=test jest tests/integration",
"test:ci": "NODE_ENV=test jest --ci --coverage --maxWorkers=1",
"db:migrate": "sequelize-cli db:migrate",
"db:migrate:undo": "sequelize-cli db:migrate:undo",
"db:migrate:undo:all": "sequelize-cli db:migrate:undo:all",
"db:migrate:status": "sequelize-cli db:migrate:status",
"db:create": "sequelize-cli db:create",
"test:migrations": "NODE_ENV=test node scripts/test-migrations.js",
"alpha:add": "NODE_ENV=dev node scripts/manageAlphaInvitations.js add",
"alpha:list": "NODE_ENV=dev node scripts/manageAlphaInvitations.js list",
"alpha:revoke": "NODE_ENV=dev node scripts/manageAlphaInvitations.js revoke",
"alpha:restore": "NODE_ENV=dev node scripts/manageAlphaInvitations.js restore",
"alpha:resend": "NODE_ENV=dev node scripts/manageAlphaInvitations.js resend",
"alpha:bulk": "NODE_ENV=dev node scripts/manageAlphaInvitations.js bulk",
"alpha:help": "node scripts/manageAlphaInvitations.js help"
},
"keywords": [],
"author": "",
"license": "ISC",
"dependencies": {
"@aws-sdk/client-s3": "^3.940.0",
"@aws-sdk/client-scheduler": "^3.896.0",
"@aws-sdk/client-ses": "^3.896.0",
"@aws-sdk/credential-providers": "^3.901.0",
"@aws-sdk/s3-request-presigner": "^3.940.0",
"@googlemaps/google-maps-services-js": "^3.4.2",
"bcryptjs": "^3.0.2",
"body-parser": "^2.2.0",
"cookie-parser": "^1.4.7",
"cors": "^2.8.5",
"csrf": "^3.1.0",
"dompurify": "^3.2.6",
"dotenv": "^17.2.0",
"express": "^5.1.0",
"express-rate-limit": "^8.1.0",
"express-validator": "^7.2.1",
"google-auth-library": "^10.3.0",
"helmet": "^8.1.0",
"jsdom": "^27.0.0",
"jsonwebtoken": "^9.0.2",
"multer": "^2.0.2",
"node-cron": "^3.0.3",
"morgan": "^1.10.1",
"otplib": "^13.1.1",
"pg": "^8.16.3",
"qrcode": "^1.5.4",
"sequelize": "^6.37.7",
"sequelize-cli": "^6.6.3",
"socket.io": "^4.8.1",
"stripe": "^18.4.0",
"uuid": "^11.1.0"
"uuid": "^11.1.0",
"winston": "^3.17.0",
"winston-daily-rotate-file": "^5.0.0"
},
"devDependencies": {
"nodemon": "^3.1.10"
"@babel/core": "^7.28.6",
"@babel/preset-env": "^7.28.6",
"@types/jest": "^30.0.0",
"babel-jest": "^30.2.0",
"jest": "^30.1.3",
"nodemon": "^3.1.10",
"sequelize-mock": "^0.10.2",
"sinon": "^21.0.0",
"socket.io-client": "^4.8.1",
"supertest": "^7.1.4"
}
}

132
backend/routes/alpha.js Normal file
View File

@@ -0,0 +1,132 @@
const express = require("express");
const { AlphaInvitation, User } = require("../models");
const { authenticateToken, optionalAuth } = require("../middleware/auth");
const { alphaCodeValidationLimiter } = require("../middleware/rateLimiter");
const logger = require("../utils/logger");
const crypto = require("crypto");
const router = express.Router();
// Helper function to check if user has alpha access
async function checkAlphaAccess(req) {
// Bypass alpha access check if feature is disabled
if (process.env.ALPHA_TESTING_ENABLED !== 'true') {
return true;
}
// Check 1: Valid alpha access cookie
if (req.cookies && req.cookies.alphaAccessCode) {
const { code } = req.cookies.alphaAccessCode;
const invitation = await AlphaInvitation.findOne({
where: { code, status: ["pending", "active"] },
});
if (invitation) {
return true;
}
}
// Check 2: Authenticated user who has used an invitation
if (req.user && req.user.id) {
const invitation = await AlphaInvitation.findOne({
where: { usedBy: req.user.id },
});
if (invitation) {
return true;
}
}
return false;
}
/**
* POST /api/alpha/validate-code
* Validates an alpha invitation code and grants access
*/
router.post("/validate-code", alphaCodeValidationLimiter, async (req, res) => {
try {
const { code } = req.body;
if (!code) {
return res.status(400).json({
error: "Code is required",
});
}
// Normalize code (uppercase, trim)
const normalizedCode = code.trim().toUpperCase();
// Validate code format
if (!/^ALPHA-[A-Z0-9]{8}$/.test(normalizedCode)) {
logger.warn(`Invalid code format attempted: ${code}`);
return res.status(400).json({
error: "Invalid alpha code",
});
}
// Find invitation in database
const invitation = await AlphaInvitation.findOne({
where: { code: normalizedCode },
});
// Generic error for invalid code (prevent enumeration)
if (!invitation) {
logger.warn(`Code not found: ${normalizedCode}`);
return res.status(400).json({
error: "Invalid alpha code",
});
}
// Check if code is revoked
if (invitation.status === "revoked") {
logger.warn(`Revoked code attempted: ${normalizedCode}`);
return res.status(400).json({
error: "Invalid alpha code",
});
}
// Set httpOnly cookie for alpha access
const cookieData = {
code: normalizedCode,
validatedAt: new Date().toISOString(),
};
res.cookie("alphaAccessCode", cookieData, {
httpOnly: true,
secure: ["production", "prod", "qa"].includes(process.env.NODE_ENV),
sameSite: "strict",
maxAge: 30 * 24 * 60 * 60 * 1000, // 30 days
});
logger.info(`Alpha code validated successfully: ${normalizedCode}`);
res.json({
success: true,
message: "Access granted",
});
} catch (error) {
logger.error(`Error validating alpha code: ${error.message}`, { error });
res.status(500).json({
error: "Server error",
});
}
});
/**
* GET /api/alpha/verify-session
* Checks if current session has alpha access
*/
router.get("/verify-session", optionalAuth, async (req, res) => {
try {
const hasAccess = await checkAlphaAccess(req);
res.json({
hasAccess,
});
} catch (error) {
logger.error(`Error verifying alpha session: ${error.message}`, { error });
res.status(500).json({
error: "Server error",
});
}
});
module.exports = { router, checkAlphaAccess };

File diff suppressed because it is too large Load Diff

View File

@@ -1,10 +0,0 @@
const express = require('express');
const router = express.Router();
const { verifyBetaPassword } = require('../middleware/betaAuth');
// Beta verification endpoint
router.get('/verify', verifyBetaPassword, (req, res) => {
res.json({ success: true, message: 'Beta access granted' });
});
module.exports = router;

View File

@@ -0,0 +1,150 @@
const express = require("express");
const { authenticateToken } = require("../middleware/auth");
const ConditionCheckService = require("../services/conditionCheckService");
const logger = require("../utils/logger");
const { validateS3Keys } = require("../utils/s3KeyValidator");
const { IMAGE_LIMITS } = require("../config/imageLimits");
const router = express.Router();
// Get condition checks for multiple rentals in a single request (batch)
router.get("/batch", authenticateToken, async (req, res) => {
try {
const { rentalIds } = req.query;
if (!rentalIds) {
return res.json({
success: true,
conditionChecks: [],
});
}
const ids = rentalIds.split(",").filter((id) => id.trim());
if (ids.length === 0) {
return res.json({
success: true,
conditionChecks: [],
});
}
const conditionChecks =
await ConditionCheckService.getConditionChecksForRentals(ids);
res.json({
success: true,
conditionChecks,
});
} catch (error) {
const reqLogger = logger.withRequestId(req.id);
reqLogger.error("Error fetching batch condition checks", {
error: error.message,
stack: error.stack,
rentalIds: req.query.rentalIds,
});
res.status(500).json({
success: false,
error: "Failed to fetch condition checks",
});
}
});
// Submit a condition check
router.post("/:rentalId", authenticateToken, async (req, res) => {
try {
const { rentalId } = req.params;
const { checkType, notes, imageFilenames: rawImageFilenames } = req.body;
const userId = req.user.id;
// Ensure imageFilenames is an array (S3 keys)
const imageFilenamesArray = Array.isArray(rawImageFilenames)
? rawImageFilenames
: [];
// Validate S3 keys format and folder
const keyValidation = validateS3Keys(
imageFilenamesArray,
"condition-checks",
{
maxKeys: IMAGE_LIMITS.conditionChecks,
}
);
if (!keyValidation.valid) {
return res.status(400).json({
success: false,
error: keyValidation.error,
details: keyValidation.invalidKeys,
});
}
const imageFilenames = imageFilenamesArray;
const conditionCheck = await ConditionCheckService.submitConditionCheck(
rentalId,
checkType,
userId,
imageFilenames,
notes
);
const reqLogger = logger.withRequestId(req.id);
reqLogger.info("Condition check submitted", {
rentalId,
checkType,
userId,
photoCount: imageFilenames.length,
});
res.status(201).json({
success: true,
conditionCheck,
});
} catch (error) {
const reqLogger = logger.withRequestId(req.id);
reqLogger.error("Error submitting condition check", {
error: error.message,
stack: error.stack,
rentalId: req.params.rentalId,
userId: req.user?.id,
});
res.status(400).json({
success: false,
error: error.message,
});
}
});
// Get available condition checks for current user
router.get("/", authenticateToken, async (req, res) => {
try {
const userId = req.user.id;
const { rentalIds } = req.query;
const ids = rentalIds ? rentalIds.split(",").filter((id) => id.trim()) : [];
const availableChecks = await ConditionCheckService.getAvailableChecks(
userId,
ids
);
res.json({
success: true,
availableChecks,
});
} catch (error) {
const reqLogger = logger.withRequestId(req.id);
reqLogger.error("Error fetching available checks", {
error: error.message,
stack: error.stack,
userId: req.user?.id,
});
res.status(500).json({
success: false,
error: "Failed to fetch available checks",
});
}
});
module.exports = router;

View File

@@ -0,0 +1,68 @@
const express = require('express');
const { Feedback, User } = require('../models');
const { authenticateToken } = require('../middleware/auth');
const { validateFeedback, sanitizeInput } = require('../middleware/validation');
const logger = require('../utils/logger');
const emailServices = require('../services/email');
const router = express.Router();
// Submit new feedback
router.post('/', authenticateToken, sanitizeInput, validateFeedback, async (req, res, next) => {
try {
const { feedbackText, url } = req.body;
// Capture user agent from request headers
const userAgent = req.headers['user-agent'];
const feedback = await Feedback.create({
userId: req.user.id,
feedbackText,
url: url || null,
userAgent: userAgent || null
});
const reqLogger = logger.withRequestId(req.id);
reqLogger.info("Feedback submitted", {
userId: req.user.id,
feedbackId: feedback.id
});
// Send confirmation email to user
try {
await emailServices.feedback.sendFeedbackConfirmation(req.user, feedback);
} catch (emailError) {
reqLogger.error("Failed to send feedback confirmation email", {
error: emailError.message,
stack: emailError.stack,
userId: req.user.id,
feedbackId: feedback.id
});
// Don't fail the request if email fails
}
// Send notification email to admin
try {
await emailServices.feedback.sendFeedbackNotificationToAdmin(req.user, feedback);
} catch (emailError) {
reqLogger.error("Failed to send feedback notification to admin", {
error: emailError.message,
stack: emailError.stack,
userId: req.user.id,
feedbackId: feedback.id
});
// Don't fail the request if email fails
}
res.status(201).json(feedback);
} catch (error) {
const reqLogger = logger.withRequestId(req.id);
reqLogger.error("Feedback submission failed", {
error: error.message,
stack: error.stack,
userId: req.user.id
});
next(error);
}
});
module.exports = router;

1750
backend/routes/forum.js Normal file

File diff suppressed because it is too large Load Diff

121
backend/routes/health.js Normal file
View File

@@ -0,0 +1,121 @@
const express = require("express");
const router = express.Router();
const { sequelize } = require("../models");
const s3Service = require("../services/s3Service");
const logger = require("../utils/logger");
/**
* Health check endpoint for load balancers and monitoring
* GET /health
*
* Returns:
* - 200: All services healthy
* - 503: One or more services unhealthy
*/
router.get("/", async (req, res) => {
const startTime = Date.now();
const checks = {
database: { status: "unknown", latency: null },
s3: { status: "unknown", latency: null },
};
let allHealthy = true;
// Database health check
try {
const dbStart = Date.now();
await sequelize.authenticate();
checks.database = {
status: "healthy",
latency: Date.now() - dbStart,
};
} catch (error) {
allHealthy = false;
checks.database = {
status: "unhealthy",
error: error.message,
latency: Date.now() - startTime,
};
logger.error("Health check: Database connection failed", {
error: error.message,
});
}
// S3 health check (if enabled)
if (s3Service.isEnabled()) {
try {
const s3Start = Date.now();
// S3 is considered healthy if it's properly initialized
// A more thorough check could list bucket contents, but that adds latency
checks.s3 = {
status: "healthy",
latency: Date.now() - s3Start,
bucket: process.env.S3_BUCKET,
};
} catch (error) {
allHealthy = false;
checks.s3 = {
status: "unhealthy",
error: error.message,
latency: Date.now() - startTime,
};
logger.error("Health check: S3 check failed", {
error: error.message,
});
}
} else {
checks.s3 = {
status: "disabled",
latency: 0,
};
}
// Log unhealthy states
if (!allHealthy) {
logger.warn("Health check failed", { checks });
}
res.status(allHealthy ? 200 : 503).json({
status: allHealthy ? "healthy" : "unhealthy",
});
});
/**
* Liveness probe - simple check that the process is running
* GET /health/live
*
* Used by Kubernetes/ECS for liveness probes
* Returns 200 if the process is alive
*/
router.get("/live", (req, res) => {
res.status(200).json({
status: "alive",
timestamp: new Date().toISOString(),
});
});
/**
* Readiness probe - check if the service is ready to accept traffic
* GET /health/ready
*
* Used by load balancers to determine if instance should receive traffic
* Checks critical dependencies (database)
*/
router.get("/ready", async (req, res) => {
try {
await sequelize.authenticate();
res.status(200).json({
status: "ready",
timestamp: new Date().toISOString(),
});
} catch (error) {
logger.error("Readiness check failed", { error: error.message, stack: error.stack });
res.status(503).json({
status: "not_ready",
timestamp: new Date().toISOString(),
error: "Database connection failed",
});
}
});
module.exports = router;

View File

@@ -1,286 +0,0 @@
const express = require('express');
const { Op } = require('sequelize');
const { ItemRequest, ItemRequestResponse, User, Item } = require('../models');
const { authenticateToken } = require('../middleware/auth');
const router = express.Router();
router.get('/', async (req, res) => {
try {
const {
search,
status = 'open',
page = 1,
limit = 20
} = req.query;
const where = { status };
if (search) {
where[Op.or] = [
{ title: { [Op.iLike]: `%${search}%` } },
{ description: { [Op.iLike]: `%${search}%` } }
];
}
const offset = (page - 1) * limit;
const { count, rows } = await ItemRequest.findAndCountAll({
where,
include: [
{
model: User,
as: 'requester',
attributes: ['id', 'username', 'firstName', 'lastName']
}
],
limit: parseInt(limit),
offset: parseInt(offset),
order: [['createdAt', 'DESC']]
});
res.json({
requests: rows,
totalPages: Math.ceil(count / limit),
currentPage: parseInt(page),
totalRequests: count
});
} catch (error) {
res.status(500).json({ error: error.message });
}
});
router.get('/my-requests', authenticateToken, async (req, res) => {
try {
const requests = await ItemRequest.findAll({
where: { requesterId: req.user.id },
include: [
{
model: User,
as: 'requester',
attributes: ['id', 'username', 'firstName', 'lastName']
},
{
model: ItemRequestResponse,
as: 'responses',
include: [
{
model: User,
as: 'responder',
attributes: ['id', 'username', 'firstName', 'lastName']
},
{
model: Item,
as: 'existingItem'
}
]
}
],
order: [['createdAt', 'DESC']]
});
res.json(requests);
} catch (error) {
res.status(500).json({ error: error.message });
}
});
router.get('/:id', async (req, res) => {
try {
const request = await ItemRequest.findByPk(req.params.id, {
include: [
{
model: User,
as: 'requester',
attributes: ['id', 'username', 'firstName', 'lastName']
},
{
model: ItemRequestResponse,
as: 'responses',
include: [
{
model: User,
as: 'responder',
attributes: ['id', 'username', 'firstName', 'lastName']
},
{
model: Item,
as: 'existingItem'
}
]
}
]
});
if (!request) {
return res.status(404).json({ error: 'Item request not found' });
}
res.json(request);
} catch (error) {
res.status(500).json({ error: error.message });
}
});
router.post('/', authenticateToken, async (req, res) => {
try {
const request = await ItemRequest.create({
...req.body,
requesterId: req.user.id
});
const requestWithRequester = await ItemRequest.findByPk(request.id, {
include: [
{
model: User,
as: 'requester',
attributes: ['id', 'username', 'firstName', 'lastName']
}
]
});
res.status(201).json(requestWithRequester);
} catch (error) {
res.status(500).json({ error: error.message });
}
});
router.put('/:id', authenticateToken, async (req, res) => {
try {
const request = await ItemRequest.findByPk(req.params.id);
if (!request) {
return res.status(404).json({ error: 'Item request not found' });
}
if (request.requesterId !== req.user.id) {
return res.status(403).json({ error: 'Unauthorized' });
}
await request.update(req.body);
const updatedRequest = await ItemRequest.findByPk(request.id, {
include: [
{
model: User,
as: 'requester',
attributes: ['id', 'username', 'firstName', 'lastName']
}
]
});
res.json(updatedRequest);
} catch (error) {
res.status(500).json({ error: error.message });
}
});
router.delete('/:id', authenticateToken, async (req, res) => {
try {
const request = await ItemRequest.findByPk(req.params.id);
if (!request) {
return res.status(404).json({ error: 'Item request not found' });
}
if (request.requesterId !== req.user.id) {
return res.status(403).json({ error: 'Unauthorized' });
}
await request.destroy();
res.status(204).send();
} catch (error) {
res.status(500).json({ error: error.message });
}
});
router.post('/:id/responses', authenticateToken, async (req, res) => {
try {
const request = await ItemRequest.findByPk(req.params.id);
if (!request) {
return res.status(404).json({ error: 'Item request not found' });
}
if (request.requesterId === req.user.id) {
return res.status(400).json({ error: 'Cannot respond to your own request' });
}
if (request.status !== 'open') {
return res.status(400).json({ error: 'Cannot respond to closed request' });
}
const response = await ItemRequestResponse.create({
...req.body,
itemRequestId: req.params.id,
responderId: req.user.id
});
await request.increment('responseCount');
const responseWithDetails = await ItemRequestResponse.findByPk(response.id, {
include: [
{
model: User,
as: 'responder',
attributes: ['id', 'username', 'firstName', 'lastName']
},
{
model: Item,
as: 'existingItem'
}
]
});
res.status(201).json(responseWithDetails);
} catch (error) {
res.status(500).json({ error: error.message });
}
});
router.put('/responses/:responseId/status', authenticateToken, async (req, res) => {
try {
const { status } = req.body;
const response = await ItemRequestResponse.findByPk(req.params.responseId, {
include: [
{
model: ItemRequest,
as: 'itemRequest'
}
]
});
if (!response) {
return res.status(404).json({ error: 'Response not found' });
}
if (response.itemRequest.requesterId !== req.user.id) {
return res.status(403).json({ error: 'Only the requester can update response status' });
}
await response.update({ status });
if (status === 'accepted') {
await response.itemRequest.update({ status: 'fulfilled' });
}
const updatedResponse = await ItemRequestResponse.findByPk(response.id, {
include: [
{
model: User,
as: 'responder',
attributes: ['id', 'username', 'firstName', 'lastName']
},
{
model: Item,
as: 'existingItem'
}
]
});
res.json(updatedResponse);
} catch (error) {
res.status(500).json({ error: error.message });
}
});
module.exports = router;

View File

@@ -1,10 +1,60 @@
const express = require("express");
const { Op } = require("sequelize");
const { Item, User, Rental } = require("../models"); // Import from models/index.js to get models with associations
const { authenticateToken } = require("../middleware/auth");
const { Op, Sequelize } = require("sequelize");
const { Item, User, Rental, sequelize } = require("../models"); // Import from models/index.js to get models with associations
const { authenticateToken, requireVerifiedEmail, requireAdmin, optionalAuth } = require("../middleware/auth");
const { validateCoordinatesQuery, validateCoordinatesBody, handleValidationErrors } = require("../middleware/validation");
const logger = require("../utils/logger");
const { validateS3Keys } = require("../utils/s3KeyValidator");
const { IMAGE_LIMITS } = require("../config/imageLimits");
const router = express.Router();
router.get("/", async (req, res) => {
// Allowed fields for item create/update (prevents mass assignment)
const ALLOWED_ITEM_FIELDS = [
'name',
'description',
'pickUpAvailable',
'localDeliveryAvailable',
'localDeliveryRadius',
'shippingAvailable',
'inPlaceUseAvailable',
'pricePerHour',
'pricePerDay',
'pricePerWeek',
'pricePerMonth',
'replacementCost',
'address1',
'address2',
'city',
'state',
'zipCode',
'country',
'latitude',
'longitude',
'imageFilenames',
'isAvailable',
'rules',
'availableAfter',
'availableBefore',
'specifyTimesPerDay',
'weeklyTimes',
];
/**
* Extract only allowed fields from request body
* @param {Object} body - Request body
* @returns {Object} - Object with only allowed fields
*/
function extractAllowedFields(body) {
const result = {};
for (const field of ALLOWED_ITEM_FIELDS) {
if (body[field] !== undefined) {
result[field] = body[field];
}
}
return result;
}
router.get("/", validateCoordinatesQuery, async (req, res, next) => {
try {
const {
minPrice,
@@ -12,19 +62,66 @@ router.get("/", async (req, res) => {
city,
zipCode,
search,
lat,
lng,
radius = 25,
page = 1,
limit = 20,
} = req.query;
const where = {};
const where = {
isDeleted: false // Always exclude soft-deleted items from public browse
};
if (minPrice || maxPrice) {
where.pricePerDay = {};
if (minPrice) where.pricePerDay[Op.gte] = minPrice;
if (maxPrice) where.pricePerDay[Op.lte] = maxPrice;
}
if (city) where.city = { [Op.iLike]: `%${city}%` };
if (zipCode) where.zipCode = { [Op.iLike]: `%${zipCode}%` };
// Location filtering: Radius search OR city/ZIP fallback
if (lat && lng) {
// Parse and validate coordinates
const latNum = parseFloat(lat);
const lngNum = parseFloat(lng);
const radiusNum = parseFloat(radius);
if (!isNaN(latNum) && !isNaN(lngNum) && !isNaN(radiusNum)) {
// Bounding box pre-filter (fast, uses indexes)
// ~69 miles per degree latitude, longitude varies by latitude
const latDelta = radiusNum / 69;
const lngDelta = radiusNum / (69 * Math.cos(latNum * Math.PI / 180));
where.latitude = {
[Op.and]: [
{ [Op.gte]: latNum - latDelta },
{ [Op.lte]: latNum + latDelta },
{ [Op.ne]: null }
]
};
where.longitude = {
[Op.and]: [
{ [Op.gte]: lngNum - lngDelta },
{ [Op.lte]: lngNum + lngDelta },
{ [Op.ne]: null }
]
};
// Haversine formula for exact distance (applied after bounding box)
// 3959 = Earth's radius in miles
where[Op.and] = sequelize.literal(`
(3959 * acos(
cos(radians(${latNum})) * cos(radians("Item"."latitude")) *
cos(radians("Item"."longitude") - radians(${lngNum})) +
sin(radians(${latNum})) * sin(radians("Item"."latitude"))
)) <= ${radiusNum}
`);
}
} else {
// Fallback to city/ZIP string matching
if (city) where.city = { [Op.iLike]: `%${city}%` };
if (zipCode) where.zipCode = { [Op.iLike]: `%${zipCode}%` };
}
if (search) {
where[Op.or] = [
{ name: { [Op.iLike]: `%${search}%` } },
@@ -40,7 +137,11 @@ router.get("/", async (req, res) => {
{
model: User,
as: "owner",
attributes: ["id", "username", "firstName", "lastName"],
attributes: ["id", "firstName", "lastName", "imageFilename"],
where: {
isBanned: { [Op.ne]: true }
},
required: true,
},
],
limit: parseInt(limit),
@@ -48,18 +149,44 @@ router.get("/", async (req, res) => {
order: [["createdAt", "DESC"]],
});
// Round coordinates to 2 decimal places for map display while keeping precise values in database
const itemsWithRoundedCoords = rows.map(item => {
const itemData = item.toJSON();
if (itemData.latitude !== null && itemData.latitude !== undefined) {
itemData.latitude = Math.round(parseFloat(itemData.latitude) * 100) / 100;
}
if (itemData.longitude !== null && itemData.longitude !== undefined) {
itemData.longitude = Math.round(parseFloat(itemData.longitude) * 100) / 100;
}
return itemData;
});
const reqLogger = logger.withRequestId(req.id);
reqLogger.info("Items search completed", {
filters: { minPrice, maxPrice, city, zipCode, search, lat, lng, radius },
resultsCount: count,
page: parseInt(page),
limit: parseInt(limit)
});
res.json({
items: rows,
items: itemsWithRoundedCoords,
totalPages: Math.ceil(count / limit),
currentPage: parseInt(page),
totalItems: count,
});
} catch (error) {
res.status(500).json({ error: error.message });
const reqLogger = logger.withRequestId(req.id);
reqLogger.error("Items search failed", {
error: error.message,
stack: error.stack,
query: req.query
});
next(error);
}
});
router.get("/recommendations", authenticateToken, async (req, res) => {
router.get("/recommendations", authenticateToken, async (req, res, next) => {
try {
const userRentals = await Rental.findAll({
where: { renterId: req.user.id },
@@ -69,62 +196,94 @@ router.get("/recommendations", authenticateToken, async (req, res) => {
// For now, just return random available items as recommendations
const recommendations = await Item.findAll({
where: {
availability: true,
isAvailable: true,
isDeleted: false,
},
limit: 10,
order: [["createdAt", "DESC"]],
});
const reqLogger = logger.withRequestId(req.id);
reqLogger.info("Recommendations fetched", {
userId: req.user.id,
recommendationsCount: recommendations.length
});
res.json(recommendations);
} catch (error) {
res.status(500).json({ error: error.message });
const reqLogger = logger.withRequestId(req.id);
reqLogger.error("Recommendations fetch failed", {
error: error.message,
stack: error.stack,
userId: req.user.id
});
next(error);
}
});
// Public endpoint to get reviews for a specific item (must come before /:id route)
router.get('/:id/reviews', async (req, res) => {
router.get('/:id/reviews', async (req, res, next) => {
try {
const { Rental, User } = require('../models');
const reviews = await Rental.findAll({
where: {
itemId: req.params.id,
status: 'completed',
rating: { [Op.not]: null },
review: { [Op.not]: null }
itemRating: { [Op.not]: null },
itemReview: { [Op.not]: null },
itemReviewVisible: true
},
include: [
{
model: User,
as: 'renter',
attributes: ['id', 'firstName', 'lastName']
{
model: User,
as: 'renter',
attributes: ['id', 'firstName', 'lastName', 'imageFilename']
}
],
order: [['createdAt', 'DESC']]
});
const averageRating = reviews.length > 0
? reviews.reduce((sum, review) => sum + review.rating, 0) / reviews.length
? reviews.reduce((sum, review) => sum + review.itemRating, 0) / reviews.length
: 0;
const reqLogger = logger.withRequestId(req.id);
reqLogger.info("Item reviews fetched", {
itemId: req.params.id,
reviewsCount: reviews.length,
averageRating
});
res.json({
reviews,
averageRating,
totalReviews: reviews.length
});
} catch (error) {
res.status(500).json({ error: error.message });
const reqLogger = logger.withRequestId(req.id);
reqLogger.error("Item reviews fetch failed", {
error: error.message,
stack: error.stack,
itemId: req.params.id
});
next(error);
}
});
router.get("/:id", async (req, res) => {
router.get("/:id", optionalAuth, async (req, res, next) => {
try {
const item = await Item.findByPk(req.params.id, {
include: [
{
model: User,
as: "owner",
attributes: ["id", "username", "firstName", "lastName"],
attributes: ["id", "firstName", "lastName", "imageFilename"],
},
{
model: User,
as: "deleter",
attributes: ["id", "firstName", "lastName"],
},
],
});
@@ -133,16 +292,89 @@ router.get("/:id", async (req, res) => {
return res.status(404).json({ error: "Item not found" });
}
res.json(item);
// Check if item is deleted - only allow admins to view
if (item.isDeleted) {
const isAdmin = req.user?.role === 'admin';
if (!isAdmin) {
return res.status(404).json({ error: "Item not found" });
}
}
// Round coordinates to 2 decimal places for map display while keeping precise values in database
const itemResponse = item.toJSON();
if (itemResponse.latitude !== null && itemResponse.latitude !== undefined) {
itemResponse.latitude = Math.round(parseFloat(itemResponse.latitude) * 100) / 100;
}
if (itemResponse.longitude !== null && itemResponse.longitude !== undefined) {
itemResponse.longitude = Math.round(parseFloat(itemResponse.longitude) * 100) / 100;
}
const reqLogger = logger.withRequestId(req.id);
reqLogger.info("Item fetched", {
itemId: req.params.id,
ownerId: item.ownerId
});
res.json(itemResponse);
} catch (error) {
res.status(500).json({ error: error.message });
const reqLogger = logger.withRequestId(req.id);
reqLogger.error("Item fetch failed", {
error: error.message,
stack: error.stack,
itemId: req.params.id
});
next(error);
}
});
router.post("/", authenticateToken, async (req, res) => {
router.post("/", authenticateToken, requireVerifiedEmail, ...validateCoordinatesBody, handleValidationErrors, async (req, res, next) => {
try {
// Extract only allowed fields (prevents mass assignment)
const allowedData = extractAllowedFields(req.body);
// Validate imageFilenames - at least one image is required
const imageFilenames = Array.isArray(allowedData.imageFilenames)
? allowedData.imageFilenames
: [];
if (imageFilenames.length === 0) {
return res.status(400).json({
error: "At least one image is required to create a listing"
});
}
// Validate required fields
if (!allowedData.name || !allowedData.name.trim()) {
return res.status(400).json({ error: "Item name is required" });
}
if (!allowedData.address1 || !allowedData.address1.trim()) {
return res.status(400).json({ error: "Address is required" });
}
if (!allowedData.city || !allowedData.city.trim()) {
return res.status(400).json({ error: "City is required" });
}
if (!allowedData.state || !allowedData.state.trim()) {
return res.status(400).json({ error: "State is required" });
}
if (!allowedData.zipCode || !allowedData.zipCode.trim()) {
return res.status(400).json({ error: "ZIP code is required" });
}
if (!allowedData.replacementCost || Number(allowedData.replacementCost) <= 0) {
return res.status(400).json({ error: "Replacement cost is required" });
}
const keyValidation = validateS3Keys(imageFilenames, 'items', { maxKeys: IMAGE_LIMITS.items });
if (!keyValidation.valid) {
return res.status(400).json({
error: keyValidation.error,
details: keyValidation.invalidKeys
});
}
allowedData.imageFilenames = imageFilenames;
const item = await Item.create({
...req.body,
...allowedData,
ownerId: req.user.id,
});
@@ -151,18 +383,60 @@ router.post("/", authenticateToken, async (req, res) => {
{
model: User,
as: "owner",
attributes: ["id", "username", "firstName", "lastName"],
attributes: ["id", "firstName", "lastName", "email", "stripeConnectedAccountId"],
},
],
});
// Check if this is the owner's first listing
const ownerItemCount = await Item.count({
where: { ownerId: req.user.id }
});
// If first listing, send celebration email
if (ownerItemCount === 1) {
try {
const emailServices = require("../services/email");
await emailServices.userEngagement.sendFirstListingCelebrationEmail(
itemWithOwner.owner,
itemWithOwner
);
const reqLogger = logger.withRequestId(req.id);
reqLogger.info("First listing celebration email sent", { ownerId: req.user.id });
} catch (emailError) {
// Log but don't fail the item creation
const reqLogger = logger.withRequestId(req.id);
reqLogger.error('Failed to send first listing celebration email', {
error: emailError.message,
stack: emailError.stack,
ownerId: req.user.id,
itemId: item.id
});
}
}
const reqLogger = logger.withRequestId(req.id);
reqLogger.info("Item created", {
itemId: item.id,
ownerId: req.user.id,
itemName: req.body.name,
isFirstListing: ownerItemCount === 1
});
res.status(201).json(itemWithOwner);
} catch (error) {
res.status(500).json({ error: error.message });
const reqLogger = logger.withRequestId(req.id);
reqLogger.error("Item creation failed", {
error: error.message,
stack: error.stack,
ownerId: req.user.id,
itemData: logger.sanitize(req.body)
});
next(error);
}
});
router.put("/:id", authenticateToken, async (req, res) => {
router.put("/:id", authenticateToken, ...validateCoordinatesBody, handleValidationErrors, async (req, res, next) => {
try {
const item = await Item.findByPk(req.params.id);
@@ -174,25 +448,84 @@ router.put("/:id", authenticateToken, async (req, res) => {
return res.status(403).json({ error: "Unauthorized" });
}
await item.update(req.body);
// Extract only allowed fields (prevents mass assignment)
const allowedData = extractAllowedFields(req.body);
// Validate imageFilenames if provided
if (allowedData.imageFilenames !== undefined) {
const imageFilenames = Array.isArray(allowedData.imageFilenames)
? allowedData.imageFilenames
: [];
// Require at least one image
if (imageFilenames.length === 0) {
return res.status(400).json({
error: "At least one image is required for a listing"
});
}
const keyValidation = validateS3Keys(imageFilenames, 'items', { maxKeys: IMAGE_LIMITS.items });
if (!keyValidation.valid) {
return res.status(400).json({
error: keyValidation.error,
details: keyValidation.invalidKeys
});
}
allowedData.imageFilenames = imageFilenames;
}
// Validate required fields if they are being updated
if (allowedData.name !== undefined && (!allowedData.name || !allowedData.name.trim())) {
return res.status(400).json({ error: "Item name is required" });
}
if (allowedData.address1 !== undefined && (!allowedData.address1 || !allowedData.address1.trim())) {
return res.status(400).json({ error: "Address is required" });
}
if (allowedData.city !== undefined && (!allowedData.city || !allowedData.city.trim())) {
return res.status(400).json({ error: "City is required" });
}
if (allowedData.state !== undefined && (!allowedData.state || !allowedData.state.trim())) {
return res.status(400).json({ error: "State is required" });
}
if (allowedData.zipCode !== undefined && (!allowedData.zipCode || !allowedData.zipCode.trim())) {
return res.status(400).json({ error: "ZIP code is required" });
}
if (allowedData.replacementCost !== undefined && (!allowedData.replacementCost || Number(allowedData.replacementCost) <= 0)) {
return res.status(400).json({ error: "Replacement cost is required" });
}
await item.update(allowedData);
const updatedItem = await Item.findByPk(item.id, {
include: [
{
model: User,
as: "owner",
attributes: ["id", "username", "firstName", "lastName"],
attributes: ["id", "firstName", "lastName"],
},
],
});
const reqLogger = logger.withRequestId(req.id);
reqLogger.info("Item updated", {
itemId: req.params.id,
ownerId: req.user.id
});
res.json(updatedItem);
} catch (error) {
res.status(500).json({ error: error.message });
const reqLogger = logger.withRequestId(req.id);
reqLogger.error("Item update failed", {
error: error.message,
stack: error.stack,
itemId: req.params.id,
ownerId: req.user.id
});
next(error);
}
});
router.delete("/:id", authenticateToken, async (req, res) => {
router.delete("/:id", authenticateToken, async (req, res, next) => {
try {
const item = await Item.findByPk(req.params.id);
@@ -205,9 +538,181 @@ router.delete("/:id", authenticateToken, async (req, res) => {
}
await item.destroy();
const reqLogger = logger.withRequestId(req.id);
reqLogger.info("Item deleted", {
itemId: req.params.id,
ownerId: req.user.id
});
res.status(204).send();
} catch (error) {
res.status(500).json({ error: error.message });
const reqLogger = logger.withRequestId(req.id);
reqLogger.error("Item deletion failed", {
error: error.message,
stack: error.stack,
itemId: req.params.id,
ownerId: req.user.id
});
next(error);
}
});
// Admin endpoints
router.delete("/admin/:id", authenticateToken, requireAdmin, async (req, res, next) => {
try {
const { reason } = req.body;
if (!reason || !reason.trim()) {
return res.status(400).json({ error: "Deletion reason is required" });
}
const item = await Item.findByPk(req.params.id, {
include: [
{
model: User,
as: "owner",
attributes: ["id", "firstName", "lastName", "email"],
},
],
});
if (!item) {
return res.status(404).json({ error: "Item not found" });
}
if (item.isDeleted) {
return res.status(400).json({ error: "Item is already deleted" });
}
// Check for active or upcoming rentals
const activeRentals = await Rental.count({
where: {
itemId: req.params.id,
status: {
[Op.in]: ['pending', 'confirmed', 'active']
}
}
});
if (activeRentals > 0) {
return res.status(400).json({
error: "Cannot delete item with active or upcoming rentals",
code: "ACTIVE_RENTALS_EXIST",
activeRentalsCount: activeRentals
});
}
// Soft delete the item
await item.update({
isDeleted: true,
deletedBy: req.user.id,
deletedAt: new Date(),
deletionReason: reason.trim()
});
const updatedItem = await Item.findByPk(item.id, {
include: [
{
model: User,
as: "owner",
attributes: ["id", "firstName", "lastName"],
},
{
model: User,
as: "deleter",
attributes: ["id", "firstName", "lastName"],
}
],
});
// Send email notification to owner
try {
const emailServices = require("../services/email");
await emailServices.userEngagement.sendItemDeletionNotificationToOwner(
item.owner,
item,
reason.trim()
);
logger.info("Item deletion notification email sent", { ownerId: item.ownerId, itemId: item.id });
} catch (emailError) {
// Log but don't fail the deletion
logger.error('Failed to send item deletion notification email', {
error: emailError.message,
stack: emailError.stack,
ownerId: item.ownerId,
itemId: item.id
});
}
const reqLogger = logger.withRequestId(req.id);
reqLogger.info("Item soft deleted by admin", {
itemId: req.params.id,
deletedBy: req.user.id,
ownerId: item.ownerId,
reason: reason.trim()
});
res.json(updatedItem);
} catch (error) {
const reqLogger = logger.withRequestId(req.id);
reqLogger.error("Admin item soft delete failed", {
error: error.message,
stack: error.stack,
itemId: req.params.id,
adminId: req.user.id
});
next(error);
}
});
router.patch("/admin/:id/restore", authenticateToken, requireAdmin, async (req, res, next) => {
try {
const item = await Item.findByPk(req.params.id);
if (!item) {
return res.status(404).json({ error: "Item not found" });
}
if (!item.isDeleted) {
return res.status(400).json({ error: "Item is not deleted" });
}
// Restore the item
await item.update({
isDeleted: false,
deletedBy: null,
deletedAt: null,
deletionReason: null
});
const updatedItem = await Item.findByPk(item.id, {
include: [
{
model: User,
as: "owner",
attributes: ["id", "firstName", "lastName"],
}
],
});
const reqLogger = logger.withRequestId(req.id);
reqLogger.info("Item restored by admin", {
itemId: req.params.id,
restoredBy: req.user.id,
ownerId: item.ownerId
});
res.json(updatedItem);
} catch (error) {
const reqLogger = logger.withRequestId(req.id);
reqLogger.error("Admin item restore failed", {
error: error.message,
stack: error.stack,
itemId: req.params.id,
adminId: req.user.id
});
next(error);
}
});

199
backend/routes/maps.js Normal file
View File

@@ -0,0 +1,199 @@
const express = require("express");
const router = express.Router();
const { authenticateToken } = require("../middleware/auth");
const rateLimiter = require("../middleware/rateLimiter");
const googleMapsService = require("../services/googleMapsService");
const logger = require("../utils/logger");
// Input validation middleware
const validateInput = (req, res, next) => {
// Basic input sanitization
if (req.body.input) {
req.body.input = req.body.input.toString().trim();
// Prevent extremely long inputs
if (req.body.input.length > 500) {
return res.status(400).json({ error: "Input too long" });
}
}
if (req.body.placeId) {
req.body.placeId = req.body.placeId.toString().trim();
// Basic place ID validation
if (!/^[A-Za-z0-9_-]+$/.test(req.body.placeId)) {
return res.status(400).json({ error: "Invalid place ID format" });
}
}
if (req.body.address) {
req.body.address = req.body.address.toString().trim();
if (req.body.address.length > 500) {
return res.status(400).json({ error: "Address too long" });
}
}
next();
};
// Error handling middleware
const handleServiceError = (error, res, req) => {
const reqLogger = logger.withRequestId(req?.id);
reqLogger.error("Maps service error", {
error: error.message,
stack: error.stack
});
if (error.message.includes("API key not configured")) {
return res.status(503).json({
error: "Maps service temporarily unavailable",
details: "Configuration issue",
});
}
if (error.message.includes("quota exceeded")) {
return res.status(429).json({
error: "Service temporarily unavailable due to high demand",
details: "Please try again later",
});
}
return res.status(500).json({
error: "Failed to process request",
details: error.message,
});
};
/**
* POST /api/maps/places/autocomplete
* Proxy for Google Places Autocomplete API
*/
router.post(
"/places/autocomplete",
authenticateToken,
rateLimiter.placesAutocomplete,
validateInput,
async (req, res) => {
try {
const { input, types, componentRestrictions, sessionToken } = req.body;
if (!input || input.length < 2) {
return res.json({ predictions: [] });
}
const options = {
types: types || ["address"],
componentRestrictions,
sessionToken,
};
const result = await googleMapsService.getPlacesAutocomplete(
input,
options
);
// Log request for monitoring (without sensitive data)
const reqLogger = logger.withRequestId(req.id);
reqLogger.info("Places Autocomplete request", {
userId: req.user?.id || "anonymous",
queryLength: input.length,
resultsCount: result.predictions?.length || 0
});
res.json(result);
} catch (error) {
handleServiceError(error, res, req);
}
}
);
/**
* POST /api/maps/places/details
* Proxy for Google Places Details API
*/
router.post(
"/places/details",
authenticateToken,
rateLimiter.placeDetails,
validateInput,
async (req, res) => {
try {
const { placeId, sessionToken } = req.body;
if (!placeId) {
return res.status(400).json({ error: "Place ID is required" });
}
const options = {
sessionToken,
};
const result = await googleMapsService.getPlaceDetails(placeId, options);
// Log request for monitoring
const reqLogger = logger.withRequestId(req.id);
reqLogger.info("Place Details request", {
userId: req.user?.id || "anonymous",
placeIdPrefix: placeId.substring(0, 10) + "..."
});
res.json(result);
} catch (error) {
handleServiceError(error, res, req);
}
}
);
/**
* POST /api/maps/geocode
* Proxy for Google Geocoding API
*/
router.post(
"/geocode",
authenticateToken,
rateLimiter.geocoding,
validateInput,
async (req, res) => {
try {
const { address, componentRestrictions } = req.body;
if (!address) {
return res.status(400).json({ error: "Address is required" });
}
const options = {
componentRestrictions,
};
const result = await googleMapsService.geocodeAddress(address, options);
// Log request for monitoring
const reqLogger = logger.withRequestId(req.id);
reqLogger.info("Geocoding request", {
userId: req.user?.id || "anonymous",
addressLength: address.length
});
res.json(result);
} catch (error) {
handleServiceError(error, res, req);
}
}
);
/**
* GET /api/maps/health
* Health check endpoint for Maps service
*/
router.get("/health", (req, res) => {
const isConfigured = googleMapsService.isConfigured();
res.status(isConfigured ? 200 : 503).json({
status: isConfigured ? "healthy" : "unavailable",
service: "Google Maps API Proxy",
timestamp: new Date().toISOString(),
configuration: {
apiKeyConfigured: isConfigured,
},
});
});
module.exports = router;

View File

@@ -1,10 +1,16 @@
const express = require('express');
const { Message, User } = require('../models');
const { authenticateToken } = require('../middleware/auth');
const logger = require('../utils/logger');
const { emitNewMessage, emitMessageRead } = require('../sockets/messageSocket');
const { Op } = require('sequelize');
const emailServices = require('../services/email');
const { validateS3Keys } = require('../utils/s3KeyValidator');
const { IMAGE_LIMITS } = require('../config/imageLimits');
const router = express.Router();
// Get all messages for the current user (inbox)
router.get('/', authenticateToken, async (req, res) => {
router.get('/', authenticateToken, async (req, res, next) => {
try {
const messages = await Message.findAll({
where: { receiverId: req.user.id },
@@ -12,19 +18,126 @@ router.get('/', authenticateToken, async (req, res) => {
{
model: User,
as: 'sender',
attributes: ['id', 'firstName', 'lastName', 'profileImage']
attributes: ['id', 'firstName', 'lastName', 'imageFilename']
}
],
order: [['createdAt', 'DESC']]
});
const reqLogger = logger.withRequestId(req.id);
reqLogger.info("Messages inbox fetched", {
userId: req.user.id,
messageCount: messages.length
});
res.json(messages);
} catch (error) {
res.status(500).json({ error: error.message });
const reqLogger = logger.withRequestId(req.id);
reqLogger.error("Messages inbox fetch failed", {
error: error.message,
stack: error.stack,
userId: req.user.id
});
next(error);
}
});
// Get conversations grouped by user pairs
router.get('/conversations', authenticateToken, async (req, res, next) => {
try {
const userId = req.user.id;
// Fetch all messages where user is sender or receiver
const allMessages = await Message.findAll({
where: {
[Op.or]: [
{ senderId: userId },
{ receiverId: userId }
]
},
include: [
{
model: User,
as: 'sender',
attributes: ['id', 'firstName', 'lastName', 'imageFilename']
},
{
model: User,
as: 'receiver',
attributes: ['id', 'firstName', 'lastName', 'imageFilename']
}
],
order: [['createdAt', 'DESC']]
});
// Group messages by conversation partner
const conversationsMap = new Map();
allMessages.forEach(message => {
// Determine the conversation partner
const partnerId = message.senderId === userId ? message.receiverId : message.senderId;
const partner = message.senderId === userId ? message.receiver : message.sender;
if (!conversationsMap.has(partnerId)) {
conversationsMap.set(partnerId, {
partnerId,
partner: partner ? {
id: partner.id,
firstName: partner.firstName,
lastName: partner.lastName,
profileImage: partner.profileImage
} : null,
lastMessage: null,
lastMessageAt: null,
unreadCount: 0
});
}
const conversation = conversationsMap.get(partnerId);
// Count unread messages (only those received by current user)
if (message.receiverId === userId && !message.isRead) {
conversation.unreadCount++;
}
// Keep the most recent message (messages are already sorted DESC)
if (!conversation.lastMessage) {
conversation.lastMessage = {
id: message.id,
content: message.content,
senderId: message.senderId,
createdAt: message.createdAt,
isRead: message.isRead
};
conversation.lastMessageAt = message.createdAt;
}
});
// Convert to array and sort by most recent message first
const conversations = Array.from(conversationsMap.values())
.filter(conv => conv.partner !== null) // Filter out conversations with deleted users
.sort((a, b) => new Date(b.lastMessageAt) - new Date(a.lastMessageAt));
const reqLogger = logger.withRequestId(req.id);
reqLogger.info("Conversations fetched", {
userId: req.user.id,
conversationCount: conversations.length
});
res.json(conversations);
} catch (error) {
const reqLogger = logger.withRequestId(req.id);
reqLogger.error("Conversations fetch failed", {
error: error.message,
stack: error.stack,
userId: req.user.id
});
next(error);
}
});
// Get sent messages
router.get('/sent', authenticateToken, async (req, res) => {
router.get('/sent', authenticateToken, async (req, res, next) => {
try {
const messages = await Message.findAll({
where: { senderId: req.user.id },
@@ -32,22 +145,35 @@ router.get('/sent', authenticateToken, async (req, res) => {
{
model: User,
as: 'receiver',
attributes: ['id', 'firstName', 'lastName', 'profileImage']
attributes: ['id', 'firstName', 'lastName', 'imageFilename']
}
],
order: [['createdAt', 'DESC']]
});
const reqLogger = logger.withRequestId(req.id);
reqLogger.info("Sent messages fetched", {
userId: req.user.id,
messageCount: messages.length
});
res.json(messages);
} catch (error) {
res.status(500).json({ error: error.message });
const reqLogger = logger.withRequestId(req.id);
reqLogger.error("Sent messages fetch failed", {
error: error.message,
stack: error.stack,
userId: req.user.id
});
next(error);
}
});
// Get a single message with replies
router.get('/:id', authenticateToken, async (req, res) => {
// Get a single message
router.get('/:id', authenticateToken, async (req, res, next) => {
try {
const message = await Message.findOne({
where: {
where: {
id: req.params.id,
[require('sequelize').Op.or]: [
{ senderId: req.user.id },
@@ -58,21 +184,12 @@ router.get('/:id', authenticateToken, async (req, res) => {
{
model: User,
as: 'sender',
attributes: ['id', 'firstName', 'lastName', 'profileImage']
attributes: ['id', 'firstName', 'lastName', 'imageFilename']
},
{
model: User,
as: 'receiver',
attributes: ['id', 'firstName', 'lastName', 'profileImage']
},
{
model: Message,
as: 'replies',
include: [{
model: User,
as: 'sender',
attributes: ['id', 'firstName', 'lastName', 'profileImage']
}]
attributes: ['id', 'firstName', 'lastName', 'imageFilename']
}
]
});
@@ -82,20 +199,56 @@ router.get('/:id', authenticateToken, async (req, res) => {
}
// Mark as read if user is the receiver
if (message.receiverId === req.user.id && !message.isRead) {
const wasUnread = message.receiverId === req.user.id && !message.isRead;
if (wasUnread) {
await message.update({ isRead: true });
// Emit socket event to sender for real-time read receipt
const io = req.app.get('io');
if (io) {
emitMessageRead(io, message.senderId, {
messageId: message.id,
readAt: new Date().toISOString(),
readBy: req.user.id
});
}
}
const reqLogger = logger.withRequestId(req.id);
reqLogger.info("Message fetched", {
userId: req.user.id,
messageId: req.params.id,
markedAsRead: wasUnread
});
res.json(message);
} catch (error) {
res.status(500).json({ error: error.message });
const reqLogger = logger.withRequestId(req.id);
reqLogger.error("Message fetch failed", {
error: error.message,
stack: error.stack,
userId: req.user.id,
messageId: req.params.id
});
next(error);
}
});
// Send a new message
router.post('/', authenticateToken, async (req, res) => {
router.post('/', authenticateToken, async (req, res, next) => {
try {
const { receiverId, subject, content, parentMessageId } = req.body;
const { receiverId, content, imageFilename } = req.body;
// Validate imageFilename if provided
if (imageFilename) {
const keyValidation = validateS3Keys([imageFilename], 'messages', { maxKeys: IMAGE_LIMITS.messages });
if (!keyValidation.valid) {
return res.status(400).json({
error: keyValidation.error,
details: keyValidation.invalidKeys
});
}
}
// Check if receiver exists
const receiver = await User.findByPk(receiverId);
@@ -111,27 +264,64 @@ router.post('/', authenticateToken, async (req, res) => {
const message = await Message.create({
senderId: req.user.id,
receiverId,
subject,
content,
parentMessageId
imageFilename: imageFilename || null
});
const messageWithSender = await Message.findByPk(message.id, {
include: [{
model: User,
as: 'sender',
attributes: ['id', 'firstName', 'lastName', 'profileImage']
attributes: ['id', 'firstName', 'lastName', 'imageFilename']
}]
});
// Emit socket event to receiver for real-time notification
const io = req.app.get('io');
if (io) {
emitNewMessage(io, receiverId, messageWithSender.toJSON());
}
// Send email notification to receiver
try {
const sender = await User.findByPk(req.user.id, {
attributes: ['id', 'firstName', 'lastName', 'email']
});
await emailServices.messaging.sendNewMessageNotification(receiver, sender, message);
} catch (emailError) {
// Log email error but don't block the message send
const reqLogger = logger.withRequestId(req.id);
reqLogger.error("Failed to send message notification email", {
error: emailError.message,
stack: emailError.stack,
messageId: message.id,
receiverId: receiverId
});
}
const reqLogger = logger.withRequestId(req.id);
reqLogger.info("Message sent", {
senderId: req.user.id,
receiverId: receiverId,
messageId: message.id
});
res.status(201).json(messageWithSender);
} catch (error) {
res.status(500).json({ error: error.message });
const reqLogger = logger.withRequestId(req.id);
reqLogger.error("Message send failed", {
error: error.message,
stack: error.stack,
senderId: req.user.id,
receiverId: req.body?.receiverId
});
next(error);
}
});
// Mark message as read
router.put('/:id/read', authenticateToken, async (req, res) => {
router.put('/:id/read', authenticateToken, async (req, res, next) => {
try {
const message = await Message.findOne({
where: {
@@ -145,14 +335,38 @@ router.put('/:id/read', authenticateToken, async (req, res) => {
}
await message.update({ isRead: true });
// Emit socket event to sender for real-time read receipt
const io = req.app.get('io');
if (io) {
emitMessageRead(io, message.senderId, {
messageId: message.id,
readAt: new Date().toISOString(),
readBy: req.user.id
});
}
const reqLogger = logger.withRequestId(req.id);
reqLogger.info("Message marked as read", {
userId: req.user.id,
messageId: req.params.id
});
res.json(message);
} catch (error) {
res.status(500).json({ error: error.message });
const reqLogger = logger.withRequestId(req.id);
reqLogger.error("Message mark as read failed", {
error: error.message,
stack: error.stack,
userId: req.user.id,
messageId: req.params.id
});
next(error);
}
});
// Get unread message count
router.get('/unread/count', authenticateToken, async (req, res) => {
router.get('/unread/count', authenticateToken, async (req, res, next) => {
try {
const count = await Message.count({
where: {
@@ -160,9 +374,21 @@ router.get('/unread/count', authenticateToken, async (req, res) => {
isRead: false
}
});
const reqLogger = logger.withRequestId(req.id);
reqLogger.info("Unread message count fetched", {
userId: req.user.id,
unreadCount: count
});
res.json({ count });
} catch (error) {
res.status(500).json({ error: error.message });
const reqLogger = logger.withRequestId(req.id);
reqLogger.error("Unread message count fetch failed", {
error: error.message,
stack: error.stack,
userId: req.user.id
});
next(error);
}
});

View File

@@ -1,145 +0,0 @@
const express = require("express");
const router = express.Router();
const jwt = require("jsonwebtoken");
const { User } = require("../models");
// Temporary in-memory storage for verification codes
// In production, use Redis or a database
const verificationCodes = new Map();
// Generate random 6-digit code
const generateVerificationCode = () => {
return Math.floor(100000 + Math.random() * 900000).toString();
};
// Send verification code
router.post("/send-code", async (req, res) => {
try {
const { phoneNumber } = req.body;
if (!phoneNumber) {
return res.status(400).json({ message: "Phone number is required" });
}
// Generate and store verification code
const code = generateVerificationCode();
verificationCodes.set(phoneNumber, {
code,
createdAt: Date.now(),
attempts: 0,
});
// TODO: Integrate with SMS service (Twilio, AWS SNS, etc.)
// For development, log the code
console.log(`Verification code for ${phoneNumber}: ${code}`);
res.json({
message: "Verification code sent",
// Remove this in production - only for development
devCode: code,
});
} catch (error) {
console.error("Error sending verification code:", error);
res.status(500).json({ message: "Failed to send verification code" });
}
});
// Verify code and create/login user
router.post("/verify-code", async (req, res) => {
try {
const { phoneNumber, code, firstName, lastName } = req.body;
if (!phoneNumber || !code) {
return res
.status(400)
.json({ message: "Phone number and code are required" });
}
// Check verification code
const storedData = verificationCodes.get(phoneNumber);
if (!storedData) {
return res.status(400).json({
message: "No verification code found. Please request a new one.",
});
}
// Check if code expired (10 minutes)
if (Date.now() - storedData.createdAt > 10 * 60 * 1000) {
verificationCodes.delete(phoneNumber);
return res.status(400).json({
message: "Verification code expired. Please request a new one.",
});
}
// Check attempts
if (storedData.attempts >= 3) {
verificationCodes.delete(phoneNumber);
return res.status(400).json({
message: "Too many failed attempts. Please request a new code.",
});
}
if (storedData.code !== code) {
storedData.attempts++;
return res.status(400).json({ message: "Invalid verification code" });
}
// Code is valid, remove it
verificationCodes.delete(phoneNumber);
// Find or create user
let user = await User.findOne({ where: { phone: phoneNumber } });
if (!user) {
// New user - require firstName and lastName
if (!firstName || !lastName) {
return res.status(400).json({
message: "First name and last name are required for new users",
isNewUser: true,
});
}
user = await User.create({
phone: phoneNumber,
phoneVerified: true,
firstName,
lastName,
authProvider: "phone",
// Generate a unique username from phone
username: `user_${phoneNumber
.replace(/\D/g, "")
.slice(-6)}_${Date.now().toString(36)}`,
});
} else {
// Existing user - update phone verification
await user.update({ phoneVerified: true });
}
// Generate JWT token
const token = jwt.sign(
{ id: user.id, phone: user.phone },
process.env.JWT_SECRET,
{ expiresIn: "7d" }
);
res.json({
message: "Phone verified successfully",
token,
user: {
id: user.id,
username: user.username,
firstName: user.firstName,
lastName: user.lastName,
phone: user.phone,
email: user.email,
phoneVerified: user.phoneVerified,
},
});
} catch (error) {
console.error("Error verifying code:", error);
res.status(500).json({ message: "Failed to verify code" });
}
});
module.exports = router;

File diff suppressed because it is too large Load Diff

View File

@@ -1,16 +1,27 @@
const express = require("express");
const { authenticateToken } = require("../middleware/auth");
const { authenticateToken, requireVerifiedEmail } = require("../middleware/auth");
const { User, Item } = require("../models");
const StripeService = require("../services/stripeService");
const StripeWebhookService = require("../services/stripeWebhookService");
const emailServices = require("../services/email");
const logger = require("../utils/logger");
const router = express.Router();
// Get checkout session status
router.get("/checkout-session/:sessionId", async (req, res) => {
router.get("/checkout-session/:sessionId", async (req, res, next) => {
try {
const { sessionId } = req.params;
const session = await StripeService.getCheckoutSession(sessionId);
const reqLogger = logger.withRequestId(req.id);
reqLogger.info("Stripe checkout session retrieved", {
sessionId: sessionId,
status: session.status,
payment_status: session.payment_status,
metadata: session.metadata,
});
res.json({
status: session.status,
payment_status: session.payment_status,
@@ -19,13 +30,18 @@ router.get("/checkout-session/:sessionId", async (req, res) => {
metadata: session.metadata,
});
} catch (error) {
console.error("Error retrieving checkout session:", error);
res.status(500).json({ error: error.message });
const reqLogger = logger.withRequestId(req.id);
reqLogger.error("Stripe checkout session retrieval failed", {
error: error.message,
stack: error.stack,
sessionId: req.params.sessionId,
});
next(error);
}
});
// Create connected account
router.post("/accounts", authenticateToken, async (req, res) => {
router.post("/accounts", authenticateToken, requireVerifiedEmail, async (req, res, next) => {
try {
const user = await User.findByPk(req.user.id);
@@ -51,20 +67,32 @@ router.post("/accounts", authenticateToken, async (req, res) => {
stripeConnectedAccountId: account.id,
});
const reqLogger = logger.withRequestId(req.id);
reqLogger.info("Stripe connected account created", {
userId: req.user.id,
stripeConnectedAccountId: account.id,
});
res.json({
stripeConnectedAccountId: account.id,
success: true,
});
} catch (error) {
console.error("Error creating connected account:", error);
res.status(500).json({ error: error.message });
const reqLogger = logger.withRequestId(req.id);
reqLogger.error("Stripe connected account creation failed", {
error: error.message,
stack: error.stack,
userId: req.user.id,
});
next(error);
}
});
// Generate onboarding link
router.post("/account-links", authenticateToken, async (req, res) => {
router.post("/account-links", authenticateToken, requireVerifiedEmail, async (req, res, next) => {
let user = null;
try {
const user = await User.findByPk(req.user.id);
user = await User.findByPk(req.user.id);
if (!user || !user.stripeConnectedAccountId) {
return res.status(400).json({ error: "No connected account found" });
@@ -84,20 +112,69 @@ router.post("/account-links", authenticateToken, async (req, res) => {
returnUrl
);
const reqLogger = logger.withRequestId(req.id);
reqLogger.info("Stripe account link created", {
userId: req.user.id,
stripeConnectedAccountId: user.stripeConnectedAccountId,
expiresAt: accountLink.expires_at,
});
res.json({
url: accountLink.url,
expiresAt: accountLink.expires_at,
});
} catch (error) {
console.error("Error creating account link:", error);
res.status(500).json({ error: error.message });
const reqLogger = logger.withRequestId(req.id);
reqLogger.error("Stripe account link creation failed", {
error: error.message,
stack: error.stack,
userId: req.user.id,
stripeConnectedAccountId: user?.stripeConnectedAccountId,
});
next(error);
}
});
// Get account status
router.get("/account-status", authenticateToken, async (req, res) => {
// Create account session for embedded onboarding
router.post("/account-sessions", authenticateToken, requireVerifiedEmail, async (req, res, next) => {
let user = null;
try {
const user = await User.findByPk(req.user.id);
user = await User.findByPk(req.user.id);
if (!user || !user.stripeConnectedAccountId) {
return res.status(400).json({ error: "No connected account found" });
}
const accountSession = await StripeService.createAccountSession(
user.stripeConnectedAccountId
);
const reqLogger = logger.withRequestId(req.id);
reqLogger.info("Stripe account session created", {
userId: req.user.id,
stripeConnectedAccountId: user.stripeConnectedAccountId,
});
res.json({
clientSecret: accountSession.client_secret,
});
} catch (error) {
const reqLogger = logger.withRequestId(req.id);
reqLogger.error("Stripe account session creation failed", {
error: error.message,
stack: error.stack,
userId: req.user.id,
stripeConnectedAccountId: user?.stripeConnectedAccountId,
});
next(error);
}
});
// Get account status with reconciliation
router.get("/account-status", authenticateToken, async (req, res, next) => {
let user = null;
try {
user = await User.findByPk(req.user.id);
if (!user || !user.stripeConnectedAccountId) {
return res.status(400).json({ error: "No connected account found" });
@@ -107,6 +184,72 @@ router.get("/account-status", authenticateToken, async (req, res) => {
user.stripeConnectedAccountId
);
const reqLogger = logger.withRequestId(req.id);
reqLogger.info("Stripe account status retrieved", {
userId: req.user.id,
stripeConnectedAccountId: user.stripeConnectedAccountId,
detailsSubmitted: accountStatus.details_submitted,
payoutsEnabled: accountStatus.payouts_enabled,
});
// Reconciliation: Compare fetched status with stored User fields
const previousPayoutsEnabled = user.stripePayoutsEnabled;
const currentPayoutsEnabled = accountStatus.payouts_enabled;
const requirements = accountStatus.requirements || {};
// Check if status has changed and needs updating
const statusChanged =
previousPayoutsEnabled !== currentPayoutsEnabled ||
JSON.stringify(user.stripeRequirementsCurrentlyDue || []) !==
JSON.stringify(requirements.currently_due || []);
if (statusChanged) {
reqLogger.info("Reconciling account status from API call", {
userId: req.user.id,
previousPayoutsEnabled,
currentPayoutsEnabled,
previousCurrentlyDue: user.stripeRequirementsCurrentlyDue?.length || 0,
newCurrentlyDue: requirements.currently_due?.length || 0,
});
// Update user with current status
await user.update({
stripePayoutsEnabled: currentPayoutsEnabled,
stripeRequirementsCurrentlyDue: requirements.currently_due || [],
stripeRequirementsPastDue: requirements.past_due || [],
stripeDisabledReason: requirements.disabled_reason || null,
stripeRequirementsLastUpdated: new Date(),
});
// If payouts just became disabled (true -> false), send notification
if (!currentPayoutsEnabled && previousPayoutsEnabled) {
reqLogger.warn("Payouts disabled detected during reconciliation", {
userId: req.user.id,
disabledReason: requirements.disabled_reason,
});
try {
const disabledReason = StripeWebhookService.formatDisabledReason(
requirements.disabled_reason
);
await emailServices.payment.sendPayoutsDisabledEmail(user.email, {
ownerName: user.firstName || user.lastName,
disabledReason,
});
reqLogger.info("Sent payouts disabled email during reconciliation", {
userId: req.user.id,
});
} catch (emailError) {
reqLogger.error("Failed to send payouts disabled email", {
userId: req.user.id,
error: emailError.message,
});
}
}
}
res.json({
accountId: accountStatus.id,
detailsSubmitted: accountStatus.details_submitted,
@@ -115,59 +258,87 @@ router.get("/account-status", authenticateToken, async (req, res) => {
requirements: accountStatus.requirements,
});
} catch (error) {
console.error("Error getting account status:", error);
res.status(500).json({ error: error.message });
const reqLogger = logger.withRequestId(req.id);
reqLogger.error("Stripe account status retrieval failed", {
error: error.message,
stack: error.stack,
userId: req.user.id,
stripeConnectedAccountId: user?.stripeConnectedAccountId,
});
next(error);
}
});
// Create embedded setup checkout session for collecting payment method
router.post("/create-setup-checkout-session", authenticateToken, async (req, res) => {
try {
const { rentalData } = req.body;
router.post(
"/create-setup-checkout-session",
authenticateToken,
requireVerifiedEmail,
async (req, res, next) => {
let user = null;
try {
const { rentalData } = req.body;
const user = await User.findByPk(req.user.id);
if (!user) {
return res.status(404).json({ error: "User not found" });
}
user = await User.findByPk(req.user.id);
// Create or get Stripe customer
let stripeCustomerId = user.stripeCustomerId;
if (!stripeCustomerId) {
// Create new Stripe customer
const customer = await StripeService.createCustomer({
email: user.email,
name: `${user.firstName} ${user.lastName}`,
metadata: {
userId: user.id.toString()
}
if (!user) {
return res.status(404).json({ error: "User not found" });
}
// Create or get Stripe customer
let stripeCustomerId = user.stripeCustomerId;
if (!stripeCustomerId) {
// Create new Stripe customer
const customer = await StripeService.createCustomer({
email: user.email,
name: `${user.firstName} ${user.lastName}`,
metadata: {
userId: user.id.toString(),
},
});
stripeCustomerId = customer.id;
// Save customer ID to user record
await user.update({ stripeCustomerId });
}
// Add rental data to metadata if provided
const metadata = rentalData
? {
rentalData: JSON.stringify(rentalData),
}
: {};
const session = await StripeService.createSetupCheckoutSession({
customerId: stripeCustomerId,
metadata,
});
stripeCustomerId = customer.id;
// Save customer ID to user record
await user.update({ stripeCustomerId });
const reqLogger = logger.withRequestId(req.id);
reqLogger.info("Stripe setup checkout session created", {
userId: req.user.id,
stripeCustomerId: stripeCustomerId,
sessionId: session.id,
hasRentalData: !!rentalData,
});
res.json({
clientSecret: session.client_secret,
sessionId: session.id,
});
} catch (error) {
const reqLogger = logger.withRequestId(req.id);
reqLogger.error("Stripe setup checkout session creation failed", {
error: error.message,
stack: error.stack,
userId: req.user.id,
stripeCustomerId: user?.stripeCustomerId,
});
next(error);
}
// Add rental data to metadata if provided
const metadata = rentalData ? {
rentalData: JSON.stringify(rentalData)
} : {};
const session = await StripeService.createSetupCheckoutSession({
customerId: stripeCustomerId,
metadata
});
res.json({
clientSecret: session.client_secret,
sessionId: session.id
});
} catch (error) {
console.error("Error creating setup checkout session:", error);
res.status(500).json({ error: error.message });
}
});
);
module.exports = router;

View File

@@ -0,0 +1,119 @@
const express = require("express");
const StripeWebhookService = require("../services/stripeWebhookService");
const DisputeService = require("../services/disputeService");
const logger = require("../utils/logger");
const router = express.Router();
const WEBHOOK_SECRET = process.env.STRIPE_WEBHOOK_SECRET;
/**
* POST /stripe/webhooks
* Stripe webhook endpoint - receives events from Stripe.
* Must use raw body for signature verification.
*/
router.post("/", async (req, res) => {
const signature = req.headers["stripe-signature"];
if (!signature) {
logger.warn("Webhook request missing stripe-signature header");
return res.status(400).json({ error: "Missing signature" });
}
if (!WEBHOOK_SECRET) {
logger.error("STRIPE_WEBHOOK_SECRET not configured");
return res.status(500).json({ error: "Webhook not configured" });
}
let event;
try {
// Use rawBody stored by bodyParser in server.js
event = StripeWebhookService.constructEvent(
req.rawBody,
signature,
WEBHOOK_SECRET
);
} catch (err) {
logger.error("Webhook signature verification failed", {
error: err.message,
});
return res.status(400).json({ error: "Invalid signature" });
}
// Log event receipt for debugging
// For Connect account events, event.account contains the connected account ID
logger.info("Stripe webhook received", {
eventId: event.id,
eventType: event.type,
connectedAccount: event.account || null,
});
try {
switch (event.type) {
case "account.updated":
await StripeWebhookService.handleAccountUpdated(event.data.object);
break;
case "payout.paid":
// Payout to connected account's bank succeeded
await StripeWebhookService.handlePayoutPaid(
event.data.object,
event.account
);
break;
case "payout.failed":
// Payout to connected account's bank failed
await StripeWebhookService.handlePayoutFailed(
event.data.object,
event.account
);
break;
case "payout.canceled":
// Payout was canceled before being deposited
await StripeWebhookService.handlePayoutCanceled(
event.data.object,
event.account
);
break;
case "account.application.deauthorized":
// Owner disconnected their Stripe account from our platform
await StripeWebhookService.handleAccountDeauthorized(event.account);
break;
case "charge.dispute.created":
// Renter disputed a charge with their bank
await DisputeService.handleDisputeCreated(event.data.object);
break;
case "charge.dispute.closed":
case "charge.dispute.funds_reinstated":
case "charge.dispute.funds_withdrawn":
// Dispute was resolved (won, lost, or warning closed)
await DisputeService.handleDisputeClosed(event.data.object);
break;
default:
logger.info("Unhandled webhook event type", { type: event.type });
}
// Always return 200 to acknowledge receipt
res.json({ received: true, eventId: event.id });
} catch (error) {
logger.error("Error processing webhook", {
eventId: event.id,
eventType: event.type,
error: error.message,
stack: error.stack,
});
// Still return 200 to prevent Stripe retries for processing errors
// Failed payouts will be handled by retry job
res.json({ received: true, eventId: event.id });
}
});
module.exports = router;

627
backend/routes/twoFactor.js Normal file
View File

@@ -0,0 +1,627 @@
const express = require("express");
const { User } = require("../models");
const TwoFactorService = require("../services/TwoFactorService");
const emailServices = require("../services/email");
const logger = require("../utils/logger");
const { authenticateToken } = require("../middleware/auth");
const { requireStepUpAuth } = require("../middleware/stepUpAuth");
const { csrfProtection } = require("../middleware/csrf");
const {
sanitizeInput,
validateTotpCode,
validateEmailOtp,
validateRecoveryCode,
} = require("../middleware/validation");
const {
twoFactorVerificationLimiter,
twoFactorSetupLimiter,
recoveryCodeLimiter,
emailOtpSendLimiter,
} = require("../middleware/rateLimiter");
const router = express.Router();
// Helper for structured security audit logging
const auditLog = (req, action, userId, details = {}) => {
logger.info({
type: 'security_audit',
action,
userId,
ip: req.ip,
userAgent: req.get('User-Agent'),
...details,
});
};
// All routes require authentication
router.use(authenticateToken);
// ============================================
// SETUP ENDPOINTS
// ============================================
/**
* POST /api/2fa/setup/totp/init
* Initialize TOTP setup - generate secret and QR code
*/
router.post(
"/setup/totp/init",
twoFactorSetupLimiter,
csrfProtection,
async (req, res) => {
try {
const user = await User.findByPk(req.user.id);
if (!user) {
return res.status(404).json({ error: "User not found" });
}
if (user.twoFactorEnabled) {
return res.status(400).json({
error: "Multi-factor authentication is already enabled",
});
}
// Generate TOTP secret and QR code
const { qrCodeDataUrl, encryptedSecret, encryptedSecretIv } =
await TwoFactorService.generateTotpSecret(user.email);
// Store pending secret for verification
await user.storePendingTotpSecret(encryptedSecret, encryptedSecretIv);
auditLog(req, '2fa.setup.initiated', user.id, { method: 'totp' });
res.json({
qrCodeDataUrl,
message: "Scan the QR code with your authenticator app",
});
} catch (error) {
logger.error("TOTP setup init error:", error);
res.status(500).json({ error: "Failed to initialize TOTP setup" });
}
}
);
/**
* POST /api/2fa/setup/totp/verify
* Verify TOTP code and enable 2FA
*/
router.post(
"/setup/totp/verify",
twoFactorSetupLimiter,
csrfProtection,
sanitizeInput,
validateTotpCode,
async (req, res) => {
try {
const { code } = req.body;
const user = await User.findByPk(req.user.id);
if (!user) {
return res.status(404).json({ error: "User not found" });
}
if (user.twoFactorEnabled) {
return res.status(400).json({
error: "Multi-factor authentication is already enabled",
});
}
if (!user.twoFactorSetupPendingSecret) {
return res.status(400).json({
error: "No pending TOTP setup. Please start the setup process again.",
});
}
// Verify the code against the pending secret
const isValid = user.verifyPendingTotpCode(code);
if (!isValid) {
return res.status(400).json({
error: "Invalid verification code. Please try again.",
});
}
// Generate recovery codes
const { codes: recoveryCodes } =
await TwoFactorService.generateRecoveryCodes();
// Enable TOTP
await user.enableTotp(recoveryCodes);
// Send confirmation email
try {
await emailServices.auth.sendTwoFactorEnabledEmail(user);
} catch (emailError) {
logger.error("Failed to send 2FA enabled email:", emailError);
// Don't fail the request if email fails
}
auditLog(req, '2fa.setup.completed', user.id, { method: 'totp' });
res.json({
message: "Multi-factor authentication enabled successfully",
recoveryCodes,
warning:
"Save these recovery codes in a secure location. You will not be able to see them again.",
});
} catch (error) {
logger.error("TOTP setup verify error:", error);
res.status(500).json({ error: "Failed to enable multi-factor authentication" });
}
}
);
/**
* POST /api/2fa/setup/email/init
* Initialize email 2FA setup - send verification code
*/
router.post(
"/setup/email/init",
twoFactorSetupLimiter,
emailOtpSendLimiter,
csrfProtection,
async (req, res) => {
try {
const user = await User.findByPk(req.user.id);
if (!user) {
return res.status(404).json({ error: "User not found" });
}
if (user.twoFactorEnabled) {
return res.status(400).json({
error: "Multi-factor authentication is already enabled",
});
}
// Generate and send email OTP
const otpCode = await user.generateEmailOtp();
try {
await emailServices.auth.sendTwoFactorOtpEmail(user, otpCode);
} catch (emailError) {
logger.error("Failed to send 2FA setup OTP email:", emailError);
return res.status(500).json({ error: "Failed to send verification email" });
}
auditLog(req, '2fa.setup.initiated', user.id, { method: 'email' });
res.json({
message: "Verification code sent to your email",
});
} catch (error) {
logger.error("Email 2FA setup init error:", error);
res.status(500).json({ error: "Failed to initialize email 2FA setup" });
}
}
);
/**
* POST /api/2fa/setup/email/verify
* Verify email OTP and enable email 2FA
*/
router.post(
"/setup/email/verify",
twoFactorSetupLimiter,
csrfProtection,
sanitizeInput,
validateEmailOtp,
async (req, res) => {
try {
const { code } = req.body;
const user = await User.findByPk(req.user.id);
if (!user) {
return res.status(404).json({ error: "User not found" });
}
if (user.twoFactorEnabled) {
return res.status(400).json({
error: "Multi-factor authentication is already enabled",
});
}
if (user.isEmailOtpLocked()) {
return res.status(429).json({
error: "Too many failed attempts. Please request a new code.",
});
}
// Verify the OTP
const isValid = user.verifyEmailOtp(code);
if (!isValid) {
await user.incrementEmailOtpAttempts();
return res.status(400).json({
error: "Invalid or expired verification code",
});
}
// Generate recovery codes
const { codes: recoveryCodes } =
await TwoFactorService.generateRecoveryCodes();
// Enable email 2FA
await user.enableEmailTwoFactor(recoveryCodes);
await user.clearEmailOtp();
// Send confirmation email
try {
await emailServices.auth.sendTwoFactorEnabledEmail(user);
} catch (emailError) {
logger.error("Failed to send 2FA enabled email:", emailError);
}
auditLog(req, '2fa.setup.completed', user.id, { method: 'email' });
res.json({
message: "Multi-factor authentication enabled successfully",
recoveryCodes,
warning:
"Save these recovery codes in a secure location. You will not be able to see them again.",
});
} catch (error) {
logger.error("Email 2FA setup verify error:", error);
res.status(500).json({ error: "Failed to enable multi-factor authentication" });
}
}
);
// ============================================
// VERIFICATION ENDPOINTS (Step-up auth)
// ============================================
/**
* POST /api/2fa/verify/totp
* Verify TOTP code for step-up authentication
*/
router.post(
"/verify/totp",
twoFactorVerificationLimiter,
csrfProtection,
sanitizeInput,
validateTotpCode,
async (req, res) => {
try {
const { code } = req.body;
const user = await User.findByPk(req.user.id);
if (!user) {
return res.status(404).json({ error: "User not found" });
}
if (!user.twoFactorEnabled || user.twoFactorMethod !== "totp") {
logger.warn(`2FA verify failed for user ${user.id}: TOTP not enabled or wrong method`);
return res.status(400).json({
error: "Verification failed",
});
}
const isValid = user.verifyTotpCode(code);
if (!isValid) {
auditLog(req, '2fa.verify.failed', user.id, { method: 'totp' });
return res.status(400).json({
error: "Invalid verification code",
});
}
// Mark code as used for replay protection
await user.markTotpCodeUsed(code);
// Update step-up session
await user.updateStepUpSession();
auditLog(req, '2fa.verify.success', user.id, { method: 'totp' });
res.json({
message: "Verification successful",
verified: true,
});
} catch (error) {
logger.error("TOTP verification error:", error);
res.status(500).json({ error: "Verification failed" });
}
}
);
/**
* POST /api/2fa/verify/email/send
* Send email OTP for step-up authentication
*/
router.post(
"/verify/email/send",
emailOtpSendLimiter,
csrfProtection,
async (req, res) => {
try {
const user = await User.findByPk(req.user.id);
if (!user) {
return res.status(404).json({ error: "User not found" });
}
if (!user.twoFactorEnabled) {
logger.warn(`2FA verify failed for user ${user.id}: 2FA not enabled`);
return res.status(400).json({
error: "Verification failed",
});
}
// Generate and send email OTP
const otpCode = await user.generateEmailOtp();
try {
await emailServices.auth.sendTwoFactorOtpEmail(user, otpCode);
} catch (emailError) {
logger.error("Failed to send 2FA OTP email:", emailError);
return res.status(500).json({ error: "Failed to send verification email" });
}
auditLog(req, '2fa.otp.sent', user.id, { method: 'email' });
res.json({
message: "Verification code sent to your email",
});
} catch (error) {
logger.error("Email OTP send error:", error);
res.status(500).json({ error: "Failed to send verification code" });
}
}
);
/**
* POST /api/2fa/verify/email
* Verify email OTP for step-up authentication
*/
router.post(
"/verify/email",
twoFactorVerificationLimiter,
csrfProtection,
sanitizeInput,
validateEmailOtp,
async (req, res) => {
try {
const { code } = req.body;
const user = await User.findByPk(req.user.id);
if (!user) {
return res.status(404).json({ error: "User not found" });
}
if (!user.twoFactorEnabled) {
logger.warn(`2FA verify failed for user ${user.id}: 2FA not enabled`);
return res.status(400).json({
error: "Verification failed",
});
}
if (user.isEmailOtpLocked()) {
return res.status(429).json({
error: "Too many failed attempts. Please request a new code.",
});
}
const isValid = user.verifyEmailOtp(code);
if (!isValid) {
await user.incrementEmailOtpAttempts();
auditLog(req, '2fa.verify.failed', user.id, { method: 'email' });
return res.status(400).json({
error: "Invalid or expired verification code",
});
}
// Update step-up session and clear OTP
await user.updateStepUpSession();
await user.clearEmailOtp();
auditLog(req, '2fa.verify.success', user.id, { method: 'email' });
res.json({
message: "Verification successful",
verified: true,
});
} catch (error) {
logger.error("Email OTP verification error:", error);
res.status(500).json({ error: "Verification failed" });
}
}
);
/**
* POST /api/2fa/verify/recovery
* Use recovery code for step-up authentication
*/
router.post(
"/verify/recovery",
recoveryCodeLimiter,
csrfProtection,
sanitizeInput,
validateRecoveryCode,
async (req, res) => {
try {
const { code } = req.body;
const user = await User.findByPk(req.user.id);
if (!user) {
return res.status(404).json({ error: "User not found" });
}
if (!user.twoFactorEnabled) {
logger.warn(`2FA verify failed for user ${user.id}: 2FA not enabled`);
return res.status(400).json({
error: "Verification failed",
});
}
const { valid, remainingCodes } = await user.useRecoveryCode(code);
if (!valid) {
auditLog(req, '2fa.verify.failed', user.id, { method: 'recovery' });
return res.status(400).json({
error: "Invalid recovery code",
});
}
// Send alert email about recovery code usage
try {
await emailServices.auth.sendRecoveryCodeUsedEmail(user, remainingCodes);
} catch (emailError) {
logger.error("Failed to send recovery code used email:", emailError);
}
auditLog(req, '2fa.recovery.used', user.id, { lowCodes: remainingCodes <= 2 });
res.json({
message: "Verification successful",
verified: true,
remainingCodes,
warning:
remainingCodes <= 2
? "You are running low on recovery codes. Please generate new ones."
: null,
});
} catch (error) {
logger.error("Recovery code verification error:", error);
res.status(500).json({ error: "Verification failed" });
}
}
);
// ============================================
// MANAGEMENT ENDPOINTS
// ============================================
/**
* GET /api/2fa/status
* Get current 2FA status for the user
*/
router.get("/status", async (req, res) => {
try {
const user = await User.findByPk(req.user.id);
if (!user) {
return res.status(404).json({ error: "User not found" });
}
res.json({
enabled: user.twoFactorEnabled,
method: user.twoFactorMethod,
hasRecoveryCodes: user.getRemainingRecoveryCodes() > 0,
lowRecoveryCodes: user.getRemainingRecoveryCodes() <= 2,
});
} catch (error) {
logger.error("2FA status error:", error);
res.status(500).json({ error: "Failed to get 2FA status" });
}
});
/**
* POST /api/2fa/disable
* Disable 2FA (requires step-up authentication)
*/
router.post(
"/disable",
csrfProtection,
requireStepUpAuth("2fa_disable"),
async (req, res) => {
try {
const user = await User.findByPk(req.user.id);
if (!user) {
return res.status(404).json({ error: "User not found" });
}
if (!user.twoFactorEnabled) {
logger.warn(`2FA disable failed for user ${user.id}: 2FA not enabled`);
return res.status(400).json({
error: "Operation failed",
});
}
await user.disableTwoFactor();
// Send notification email
try {
await emailServices.auth.sendTwoFactorDisabledEmail(user);
} catch (emailError) {
logger.error("Failed to send 2FA disabled email:", emailError);
}
auditLog(req, '2fa.disabled', user.id);
res.json({
message: "Multi-factor authentication has been disabled",
});
} catch (error) {
logger.error("2FA disable error:", error);
res.status(500).json({ error: "Failed to disable multi-factor authentication" });
}
}
);
/**
* POST /api/2fa/recovery/regenerate
* Generate new recovery codes (requires step-up authentication)
*/
router.post(
"/recovery/regenerate",
csrfProtection,
requireStepUpAuth("recovery_regenerate"),
async (req, res) => {
try {
const user = await User.findByPk(req.user.id);
if (!user) {
return res.status(404).json({ error: "User not found" });
}
if (!user.twoFactorEnabled) {
logger.warn(`Recovery regenerate failed for user ${user.id}: 2FA not enabled`);
return res.status(400).json({
error: "Operation failed",
});
}
const recoveryCodes = await user.regenerateRecoveryCodes();
auditLog(req, '2fa.recovery.regenerated', user.id);
res.json({
recoveryCodes,
warning:
"Save these recovery codes in a secure location. Your previous codes are now invalid.",
});
} catch (error) {
logger.error("Recovery code regeneration error:", error);
res.status(500).json({ error: "Failed to regenerate recovery codes" });
}
}
);
/**
* GET /api/2fa/recovery/remaining
* Get recovery codes status (not exact count for security)
*/
router.get("/recovery/remaining", async (req, res) => {
try {
const user = await User.findByPk(req.user.id);
if (!user) {
return res.status(404).json({ error: "User not found" });
}
const remaining = user.getRemainingRecoveryCodes();
res.json({
hasRecoveryCodes: remaining > 0,
lowRecoveryCodes: remaining <= 2,
});
} catch (error) {
logger.error("Recovery codes remaining error:", error);
res.status(500).json({ error: "Failed to get recovery codes status" });
}
});
module.exports = router;

230
backend/routes/upload.js Normal file
View File

@@ -0,0 +1,230 @@
const express = require("express");
const router = express.Router();
const { authenticateToken } = require("../middleware/auth");
const { uploadPresignLimiter } = require("../middleware/rateLimiter");
const s3Service = require("../services/s3Service");
const S3OwnershipService = require("../services/s3OwnershipService");
const { v4: uuidv4 } = require("uuid");
const logger = require("../utils/logger");
const MAX_BATCH_SIZE = 20;
/**
* Middleware to check if S3 is enabled
*/
const requireS3Enabled = (req, res, next) => {
if (!s3Service.isEnabled()) {
return res.status(503).json({
error: "File upload service is not available",
});
}
next();
};
/**
* POST /api/upload/presign
* Get a presigned URL for uploading a single file to S3
*/
router.post(
"/presign",
authenticateToken,
requireS3Enabled,
uploadPresignLimiter,
async (req, res, next) => {
try {
const { uploadType, contentType, fileName, fileSize } = req.body;
// Validate required fields
if (!uploadType || !contentType || !fileName || !fileSize) {
return res.status(400).json({ error: "Missing required fields" });
}
const result = await s3Service.getPresignedUploadUrl(
uploadType,
contentType,
fileName,
fileSize
);
logger.info("Presigned URL generated", {
userId: req.user.id,
uploadType,
key: result.key,
});
res.json(result);
} catch (error) {
if (error.message.includes("Invalid")) {
return res.status(400).json({ error: error.message });
}
next(error);
}
}
);
/**
* POST /api/upload/presign-batch
* Get presigned URLs for uploading multiple files to S3
* All files in a batch share the same UUID base for coordinated variant uploads
*/
router.post(
"/presign-batch",
authenticateToken,
requireS3Enabled,
uploadPresignLimiter,
async (req, res, next) => {
try {
const { uploadType, files } = req.body;
if (!uploadType || !files || !Array.isArray(files)) {
return res.status(400).json({ error: "Missing required fields" });
}
if (files.length === 0) {
return res.status(400).json({ error: "No files specified" });
}
if (files.length > MAX_BATCH_SIZE) {
return res
.status(400)
.json({ error: "Maximum ${MAX_BATCH_SIZE} files per batch" });
}
// Validate each file has required fields
for (const file of files) {
if (!file.contentType || !file.fileName || !file.fileSize) {
return res.status(400).json({
error: "Each file must have contentType, fileName, and fileSize",
});
}
}
// Generate one shared UUID for all files in this batch
const sharedBaseKey = uuidv4();
const results = await Promise.all(
files.map((f) =>
s3Service.getPresignedUploadUrl(
uploadType,
f.contentType,
f.fileName,
f.fileSize,
sharedBaseKey
)
)
);
logger.info("Batch presigned URLs generated", {
userId: req.user.id,
uploadType,
count: results.length,
baseKey: sharedBaseKey,
});
res.json({ uploads: results, baseKey: sharedBaseKey });
} catch (error) {
if (error.message.includes("Invalid")) {
return res.status(400).json({ error: error.message });
}
next(error);
}
}
);
/**
* POST /api/upload/confirm
* Confirm that files have been uploaded to S3
*/
router.post(
"/confirm",
authenticateToken,
requireS3Enabled,
async (req, res, next) => {
try {
const { keys } = req.body;
if (!keys || !Array.isArray(keys)) {
return res.status(400).json({ error: "Missing keys array" });
}
if (keys.length === 0) {
return res.status(400).json({ error: "No keys specified" });
}
const results = await Promise.all(
keys.map(async (key) => ({
key,
exists: await s3Service.verifyUpload(key),
}))
);
const confirmed = results.filter((r) => r.exists).map((r) => r.key);
logger.info("Upload confirmation", {
userId: req.user.id,
confirmed: confirmed.length,
total: keys.length,
});
// Only return confirmed keys, not which ones failed (prevents file existence probing)
res.json({ confirmed, total: keys.length });
} catch (error) {
next(error);
}
}
);
/**
* GET /api/upload/signed-url/*key
* Get a signed URL for accessing private content (messages, condition-checks)
* The key is the full path after /signed-url/ (e.g., "messages/uuid.jpg")
*/
router.get(
"/signed-url/*key",
authenticateToken,
requireS3Enabled,
async (req, res, next) => {
try {
// Express wildcard params may be string or array - handle both
let key = req.params.key;
if (Array.isArray(key)) {
key = key.join("/");
}
if (!key || typeof key !== "string") {
return res.status(400).json({ error: "Invalid key parameter" });
}
// Decode URL-encoded characters (e.g., %2F -> /)
key = decodeURIComponent(key);
// Only allow private folders to use signed URLs
const isPrivate =
key.startsWith("messages/") || key.startsWith("condition-checks/");
if (!isPrivate) {
return res
.status(400)
.json({ error: "Signed URLs only for private content" });
}
// Verify user is authorized to access this file
const authResult = await S3OwnershipService.canAccessFile(
key,
req.user.id
);
if (!authResult.authorized) {
logger.warn("Unauthorized signed URL request", {
userId: req.user.id,
key,
reason: authResult.reason,
});
return res.status(403).json({ error: "Access denied" });
}
const url = await s3Service.getPresignedDownloadUrl(key);
res.json({ url, expiresIn: 3600 });
} catch (error) {
next(error);
}
}
);
module.exports = router;

View File

@@ -1,87 +1,185 @@
const express = require('express');
const { User, UserAddress } = require('../models'); // Import from models/index.js to get models with associations
const { authenticateToken } = require('../middleware/auth');
const { uploadProfileImage } = require('../middleware/upload');
const fs = require('fs').promises;
const path = require('path');
const { authenticateToken, optionalAuth, requireAdmin } = require('../middleware/auth');
const { validateCoordinatesBody, validatePasswordChange, handleValidationErrors, sanitizeInput } = require('../middleware/validation');
const { requireStepUpAuth } = require('../middleware/stepUpAuth');
const { csrfProtection } = require('../middleware/csrf');
const logger = require('../utils/logger');
const userService = require('../services/UserService');
const emailServices = require('../services/email');
const { validateS3Keys } = require('../utils/s3KeyValidator');
const { IMAGE_LIMITS } = require('../config/imageLimits');
const router = express.Router();
router.get('/profile', authenticateToken, async (req, res) => {
// Allowed fields for profile update (prevents mass assignment)
const ALLOWED_PROFILE_FIELDS = [
'firstName',
'lastName',
'email',
'phone',
'address1',
'address2',
'city',
'state',
'zipCode',
'country',
'imageFilename',
'itemRequestNotificationRadius',
];
// Allowed fields for user address create/update (prevents mass assignment)
const ALLOWED_ADDRESS_FIELDS = [
'address1',
'address2',
'city',
'state',
'zipCode',
'country',
'latitude',
'longitude',
];
/**
* Extract only allowed fields from request body
*/
function extractAllowedProfileFields(body) {
const result = {};
for (const field of ALLOWED_PROFILE_FIELDS) {
if (body[field] !== undefined) {
result[field] = body[field];
}
}
return result;
}
/**
* Extract only allowed address fields from request body
*/
function extractAllowedAddressFields(body) {
const result = {};
for (const field of ALLOWED_ADDRESS_FIELDS) {
if (body[field] !== undefined) {
result[field] = body[field];
}
}
return result;
}
router.get('/profile', authenticateToken, async (req, res, next) => {
try {
const user = await User.findByPk(req.user.id, {
attributes: { exclude: ['password'] }
});
const reqLogger = logger.withRequestId(req.id);
reqLogger.info("User profile fetched", {
userId: req.user.id
});
res.json(user);
} catch (error) {
res.status(500).json({ error: error.message });
const reqLogger = logger.withRequestId(req.id);
reqLogger.error("User profile fetch failed", {
error: error.message,
stack: error.stack,
userId: req.user.id
});
next(error);
}
});
// Address routes (must come before /:id route)
router.get('/addresses', authenticateToken, async (req, res) => {
router.get('/addresses', authenticateToken, async (req, res, next) => {
try {
const addresses = await UserAddress.findAll({
where: { userId: req.user.id },
order: [['isPrimary', 'DESC'], ['createdAt', 'ASC']]
});
const reqLogger = logger.withRequestId(req.id);
reqLogger.info("User addresses fetched", {
userId: req.user.id,
addressCount: addresses.length
});
res.json(addresses);
} catch (error) {
res.status(500).json({ error: error.message });
}
});
router.post('/addresses', authenticateToken, async (req, res) => {
try {
const address = await UserAddress.create({
...req.body,
const reqLogger = logger.withRequestId(req.id);
reqLogger.error("User addresses fetch failed", {
error: error.message,
stack: error.stack,
userId: req.user.id
});
next(error);
}
});
router.post('/addresses', authenticateToken, ...validateCoordinatesBody, handleValidationErrors, async (req, res, next) => {
try {
// Extract only allowed fields (prevents mass assignment)
const allowedData = extractAllowedAddressFields(req.body);
const address = await userService.createUserAddress(req.user.id, allowedData);
res.status(201).json(address);
} catch (error) {
res.status(500).json({ error: error.message });
const reqLogger = logger.withRequestId(req.id);
reqLogger.error("User address creation failed", {
error: error.message,
stack: error.stack,
userId: req.user.id,
addressData: logger.sanitize(req.body)
});
next(error);
}
});
router.put('/addresses/:id', authenticateToken, async (req, res) => {
router.put('/addresses/:id', authenticateToken, ...validateCoordinatesBody, handleValidationErrors, async (req, res, next) => {
try {
const address = await UserAddress.findByPk(req.params.id);
if (!address) {
return res.status(404).json({ error: 'Address not found' });
}
if (address.userId !== req.user.id) {
return res.status(403).json({ error: 'Unauthorized' });
}
await address.update(req.body);
// Extract only allowed fields (prevents mass assignment)
const allowedData = extractAllowedAddressFields(req.body);
const address = await userService.updateUserAddress(req.user.id, req.params.id, allowedData);
res.json(address);
} catch (error) {
res.status(500).json({ error: error.message });
const reqLogger = logger.withRequestId(req.id);
reqLogger.error("User address update failed", {
error: error.message,
stack: error.stack,
userId: req.user.id,
addressId: req.params.id
});
if (error.message === 'Address not found') {
return res.status(404).json({ error: 'Address not found' });
}
next(error);
}
});
router.delete('/addresses/:id', authenticateToken, async (req, res) => {
router.delete('/addresses/:id', authenticateToken, async (req, res, next) => {
try {
const address = await UserAddress.findByPk(req.params.id);
if (!address) {
return res.status(404).json({ error: 'Address not found' });
}
if (address.userId !== req.user.id) {
return res.status(403).json({ error: 'Unauthorized' });
}
await address.destroy();
await userService.deleteUserAddress(req.user.id, req.params.id);
res.status(204).send();
} catch (error) {
res.status(500).json({ error: error.message });
const reqLogger = logger.withRequestId(req.id);
reqLogger.error("User address deletion failed", {
error: error.message,
stack: error.stack,
userId: req.user.id,
addressId: req.params.id
});
if (error.message === 'Address not found') {
return res.status(404).json({ error: 'Address not found' });
}
next(error);
}
});
// User availability routes (must come before /:id route)
router.get('/availability', authenticateToken, async (req, res) => {
router.get('/availability', authenticateToken, async (req, res, next) => {
try {
const user = await User.findByPk(req.user.id, {
attributes: ['defaultAvailableAfter', 'defaultAvailableBefore', 'defaultSpecifyTimesPerDay', 'defaultWeeklyTimes']
@@ -93,11 +191,11 @@ router.get('/availability', authenticateToken, async (req, res) => {
weeklyTimes: user.defaultWeeklyTimes
});
} catch (error) {
res.status(500).json({ error: error.message });
next(error);
}
});
router.put('/availability', authenticateToken, async (req, res) => {
router.put('/availability', authenticateToken, async (req, res, next) => {
try {
const { generalAvailableAfter, generalAvailableBefore, specifyTimesPerDay, weeklyTimes } = req.body;
@@ -112,114 +210,257 @@ router.put('/availability', authenticateToken, async (req, res) => {
res.json({ message: 'Availability updated successfully' });
} catch (error) {
res.status(500).json({ error: error.message });
next(error);
}
});
router.get('/:id', async (req, res) => {
router.get('/:id', optionalAuth, async (req, res, next) => {
try {
const isAdmin = req.user?.role === 'admin';
// Base attributes to exclude
const excludedAttributes = ['password', 'email', 'phone', 'address', 'verificationToken', 'passwordResetToken'];
// If not admin, also exclude ban-related fields
if (!isAdmin) {
excludedAttributes.push('isBanned', 'bannedAt', 'bannedBy', 'banReason');
}
const user = await User.findByPk(req.params.id, {
attributes: { exclude: ['password', 'email', 'phone', 'address'] }
attributes: { exclude: excludedAttributes }
});
if (!user) {
return res.status(404).json({ error: 'User not found' });
}
const reqLogger = logger.withRequestId(req.id);
reqLogger.info("Public user profile fetched", {
requestedUserId: req.params.id,
viewerIsAdmin: isAdmin
});
res.json(user);
} catch (error) {
res.status(500).json({ error: error.message });
const reqLogger = logger.withRequestId(req.id);
reqLogger.error("Public user profile fetch failed", {
error: error.message,
stack: error.stack,
requestedUserId: req.params.id
});
next(error);
}
});
router.put('/profile', authenticateToken, async (req, res) => {
router.put('/profile', authenticateToken, async (req, res, next) => {
try {
const {
firstName,
lastName,
email,
phone,
address1,
address2,
city,
state,
zipCode,
country
} = req.body;
// Build update object, excluding empty email
const updateData = {
firstName,
lastName,
phone,
address1,
address2,
city,
state,
zipCode,
country
};
// Only include email if it's not empty
if (email && email.trim() !== '') {
updateData.email = email;
}
await req.user.update(updateData);
// Extract only allowed fields (prevents mass assignment)
const allowedData = extractAllowedProfileFields(req.body);
const updatedUser = await User.findByPk(req.user.id, {
attributes: { exclude: ['password'] }
});
// Validate imageFilename if provided
if (allowedData.imageFilename !== undefined && allowedData.imageFilename !== null) {
const keyValidation = validateS3Keys([allowedData.imageFilename], 'profiles', { maxKeys: IMAGE_LIMITS.profile });
if (!keyValidation.valid) {
return res.status(400).json({
error: keyValidation.error,
details: keyValidation.invalidKeys
});
}
}
// Use UserService to handle update and email notification
const updatedUser = await userService.updateProfile(req.user.id, allowedData);
res.json(updatedUser);
} catch (error) {
console.error('Profile update error:', error);
res.status(500).json({
error: error.message,
details: error.errors ? error.errors.map(e => ({ field: e.path, message: e.message })) : undefined
});
logger.error('Profile update error', { error });
next(error);
}
});
// Upload profile image endpoint
router.post('/profile/image', authenticateToken, (req, res) => {
uploadProfileImage(req, res, async (err) => {
if (err) {
console.error('Upload error:', err);
return res.status(400).json({ error: err.message });
// Admin: Ban a user
router.post('/admin/:id/ban', authenticateToken, requireAdmin, async (req, res, next) => {
try {
const { reason } = req.body;
const targetUserId = req.params.id;
// Validate reason is provided
if (!reason || !reason.trim()) {
return res.status(400).json({ error: "Ban reason is required" });
}
if (!req.file) {
return res.status(400).json({ error: 'No file uploaded' });
// Prevent banning yourself
if (targetUserId === req.user.id) {
return res.status(400).json({ error: "You cannot ban yourself" });
}
const targetUser = await User.findByPk(targetUserId);
if (!targetUser) {
return res.status(404).json({ error: "User not found" });
}
// Prevent banning other admins
if (targetUser.role === 'admin') {
return res.status(403).json({ error: "Cannot ban admin users" });
}
// Check if already banned
if (targetUser.isBanned) {
return res.status(400).json({ error: "User is already banned" });
}
// Ban the user (this also invalidates sessions via jwtVersion increment)
await targetUser.banUser(req.user.id, reason.trim());
// Send ban notification email
try {
// Delete old profile image if exists
const user = await User.findByPk(req.user.id);
if (user.profileImage) {
const oldImagePath = path.join(__dirname, '../uploads/profiles', user.profileImage);
try {
await fs.unlink(oldImagePath);
} catch (unlinkErr) {
console.error('Error deleting old image:', unlinkErr);
}
}
// Update user with new image filename
await user.update({
profileImage: req.file.filename
const emailServices = require("../services/email");
await emailServices.userEngagement.sendUserBannedNotification(
targetUser,
req.user,
reason.trim()
);
const reqLogger = logger.withRequestId(req.id);
reqLogger.info("User ban notification email sent", {
bannedUserId: targetUserId,
adminId: req.user.id
});
res.json({
message: 'Profile image uploaded successfully',
filename: req.file.filename,
imageUrl: `/uploads/profiles/${req.file.filename}`
} catch (emailError) {
// Log but don't fail the ban operation
const reqLogger = logger.withRequestId(req.id);
reqLogger.error('Failed to send user ban notification email', {
error: emailError.message,
stack: emailError.stack,
bannedUserId: targetUserId,
adminId: req.user.id
});
} catch (error) {
console.error('Database update error:', error);
res.status(500).json({ error: 'Failed to update profile image' });
}
});
const reqLogger = logger.withRequestId(req.id);
reqLogger.info("User banned by admin", {
targetUserId,
adminId: req.user.id,
reason: reason.trim()
});
// Return updated user data (excluding sensitive fields)
const updatedUser = await User.findByPk(targetUserId, {
attributes: { exclude: ['password', 'verificationToken', 'passwordResetToken'] }
});
res.json({
message: "User has been banned successfully",
user: updatedUser
});
} catch (error) {
const reqLogger = logger.withRequestId(req.id);
reqLogger.error("Admin ban user failed", {
error: error.message,
stack: error.stack,
targetUserId: req.params.id,
adminId: req.user.id
});
next(error);
}
});
// Change password (requires step-up auth if 2FA is enabled)
router.put('/password', authenticateToken, csrfProtection, requireStepUpAuth('password_change'), sanitizeInput, validatePasswordChange, async (req, res, next) => {
try {
const { currentPassword, newPassword } = req.body;
const user = await User.findByPk(req.user.id);
if (!user) {
return res.status(404).json({ error: 'User not found' });
}
// Google OAuth users can't change password
if (user.authProvider === 'google' && !user.password) {
return res.status(400).json({
error: 'Cannot change password for accounts linked with Google'
});
}
// Verify current password
const isValid = await user.comparePassword(currentPassword);
if (!isValid) {
return res.status(400).json({ error: 'Current password is incorrect' });
}
// Update password (this increments jwtVersion to invalidate other sessions)
await user.resetPassword(newPassword);
// Send password changed notification
try {
await emailServices.auth.sendPasswordChangedEmail(user);
} catch (emailError) {
const reqLogger = logger.withRequestId(req.id);
reqLogger.error('Failed to send password changed email', {
error: emailError.message,
userId: req.user.id
});
}
const reqLogger = logger.withRequestId(req.id);
reqLogger.info('Password changed successfully', { userId: req.user.id });
res.json({ message: 'Password changed successfully' });
} catch (error) {
const reqLogger = logger.withRequestId(req.id);
reqLogger.error('Password change failed', {
error: error.message,
stack: error.stack,
userId: req.user.id
});
next(error);
}
});
// Admin: Unban a user
router.post('/admin/:id/unban', authenticateToken, requireAdmin, async (req, res, next) => {
try {
const targetUserId = req.params.id;
const targetUser = await User.findByPk(targetUserId);
if (!targetUser) {
return res.status(404).json({ error: "User not found" });
}
// Check if user is actually banned
if (!targetUser.isBanned) {
return res.status(400).json({ error: "User is not banned" });
}
// Unban the user
await targetUser.unbanUser();
const reqLogger = logger.withRequestId(req.id);
reqLogger.info("User unbanned by admin", {
targetUserId,
adminId: req.user.id
});
// Return updated user data (excluding sensitive fields)
const updatedUser = await User.findByPk(targetUserId, {
attributes: { exclude: ['password', 'verificationToken', 'passwordResetToken'] }
});
res.json({
message: "User has been unbanned successfully",
user: updatedUser
});
} catch (error) {
const reqLogger = logger.withRequestId(req.id);
reqLogger.error("Admin unban user failed", {
error: error.message,
stack: error.stack,
targetUserId: req.params.id,
adminId: req.user.id
});
next(error);
}
});
module.exports = router;

View File

@@ -0,0 +1,487 @@
// Load environment config
const env = process.env.NODE_ENV;
const envFile = `.env.${env}`;
require("dotenv").config({ path: envFile });
const crypto = require("crypto");
const fs = require("fs");
const path = require("path");
const { AlphaInvitation, User, sequelize } = require("../models");
const emailServices = require("../services/email");
const logger = require("../utils/logger");
// Generate unique alpha code
async function generateUniqueAlphaCode() {
let code;
let exists = true;
while (exists) {
// Generate exactly 8 random alphanumeric characters
const chars = "ABCDEFGHIJKLMNPQRSTUVWXYZ123456789";
let randomStr = "";
for (let i = 0; i < 8; i++) {
const randomIndex = crypto.randomInt(0, chars.length);
randomStr += chars[randomIndex];
}
code = `ALPHA-${randomStr}`;
// Check if code exists
const existing = await AlphaInvitation.findOne({ where: { code } });
exists = !!existing;
}
return code;
}
// Normalize email
function normalizeEmail(email) {
return email.toLowerCase().trim();
}
// Add invitation
async function addInvitation(email, notes = "") {
try {
email = normalizeEmail(email);
// Check if invitation already exists for this email
const existing = await AlphaInvitation.findOne({ where: { email } });
if (existing) {
console.log(`\n❌ Invitation already exists for ${email}`);
console.log(` Code: ${existing.code}`);
console.log(` Status: ${existing.status}`);
console.log(` Created: ${existing.createdAt}`);
return null;
}
// Generate unique code
const code = await generateUniqueAlphaCode();
// Create invitation
const invitation = await AlphaInvitation.create({
code,
email,
status: "pending",
notes,
});
// Send invitation email
let emailSent = false;
try {
await emailServices.alphaInvitation.sendAlphaInvitation(email, code);
emailSent = true;
} catch (emailError) {
console.log(`\n⚠️ Warning: Failed to send email to ${email}`);
console.log(` Error: ${emailError.message}`);
console.log(` Invitation created but email not sent.`);
}
console.log(`\n✅ Alpha invitation created successfully!`);
console.log(` Email: ${email}`);
console.log(` Code: ${code}`);
console.log(` Email sent: ${emailSent ? "Yes" : "No"}`);
if (notes) {
console.log(` Notes: ${notes}`);
}
return invitation;
} catch (error) {
console.error(`\n❌ Error creating invitation: ${error.message}`);
throw error;
}
}
// Resend invitation
async function resendInvitation(emailOrCode) {
try {
const input = emailOrCode.trim();
let invitation;
// Try to find by code first (if it looks like a code), otherwise by email
if (input.toUpperCase().startsWith("ALPHA-")) {
invitation = await AlphaInvitation.findOne({
where: { code: input.toUpperCase() },
});
} else {
invitation = await AlphaInvitation.findOne({
where: { email: normalizeEmail(input) },
});
}
if (!invitation) {
console.log(`\n❌ Invitation not found: ${input}`);
return null;
}
// Check if revoked
if (invitation.status === "revoked") {
console.log(`\n❌ Cannot resend revoked invitation`);
console.log(` Code: ${invitation.code}`);
console.log(` Email: ${invitation.email}`);
return null;
}
// Warn if already used
if (invitation.usedBy) {
console.log(`\n⚠️ Warning: This invitation has already been used`);
console.log(` Used by user ID: ${invitation.usedBy}`);
console.log(` Continuing with resend...\n`);
}
// Resend the email
try {
await emailServices.alphaInvitation.sendAlphaInvitation(
invitation.email,
invitation.code,
);
console.log(`\n✅ Alpha invitation resent successfully!`);
console.log(` Email: ${invitation.email}`);
console.log(` Code: ${invitation.code}`);
console.log(` Status: ${invitation.status}`);
return invitation;
} catch (emailError) {
console.error(`\n❌ Error sending email: ${emailError.message}`);
throw emailError;
}
} catch (error) {
console.error(`\n❌ Error resending invitation: ${error.message}`);
throw error;
}
}
// List invitations
async function listInvitations(filter = "all") {
try {
let where = {};
if (filter === "pending") {
where.status = "pending";
where.usedBy = null;
} else if (filter === "active") {
where.status = "active";
} else if (filter === "revoked") {
where.status = "revoked";
} else if (filter === "unused") {
where.usedBy = null;
}
const invitations = await AlphaInvitation.findAll({
where,
include: [
{
model: User,
as: "user",
attributes: ["id", "email", "firstName", "lastName"],
},
],
order: [["createdAt", "DESC"]],
});
console.log(
`\n📋 Alpha Invitations (${invitations.length} total, filter: ${filter})\n`,
);
console.log("─".repeat(100));
console.log(
"CODE".padEnd(15) +
"EMAIL".padEnd(30) +
"STATUS".padEnd(10) +
"USED BY".padEnd(25) +
"CREATED",
);
console.log("─".repeat(100));
if (invitations.length === 0) {
console.log("No invitations found.");
} else {
invitations.forEach((inv) => {
const usedBy = inv.user
? `${inv.user.firstName} ${inv.user.lastName}`
: "-";
const created = new Date(inv.createdAt).toLocaleDateString();
console.log(
inv.code.padEnd(15) +
inv.email.padEnd(30) +
inv.status.padEnd(10) +
usedBy.padEnd(25) +
created,
);
});
}
console.log("─".repeat(100));
// Summary
const stats = {
total: invitations.length,
pending: invitations.filter((i) => i.status === "pending" && !i.usedBy)
.length,
active: invitations.filter((i) => i.status === "active" && i.usedBy)
.length,
revoked: invitations.filter((i) => i.status === "revoked").length,
};
console.log(
`\nSummary: ${stats.pending} pending | ${stats.active} active | ${stats.revoked} revoked\n`,
);
return invitations;
} catch (error) {
console.error(`\n❌ Error listing invitations: ${error.message}`);
throw error;
}
}
// Revoke invitation
async function revokeInvitation(code) {
try {
code = code.trim().toUpperCase();
const invitation = await AlphaInvitation.findOne({ where: { code } });
if (!invitation) {
console.log(`\n❌ Invitation not found with code: ${code}`);
return null;
}
if (invitation.status === "revoked") {
console.log(`\n⚠️ Invitation is already revoked: ${code}`);
return invitation;
}
await invitation.update({ status: "revoked" });
console.log(`\n✅ Invitation revoked successfully!`);
console.log(` Code: ${code}`);
console.log(` Email: ${invitation.email}`);
return invitation;
} catch (error) {
console.error(`\n❌ Error revoking invitation: ${error.message}`);
throw error;
}
}
// Restore invitation
async function restoreInvitation(code) {
try {
code = code.trim().toUpperCase();
const invitation = await AlphaInvitation.findOne({ where: { code } });
if (!invitation) {
console.log(`\n❌ Invitation not found with code: ${code}`);
return null;
}
if (invitation.status !== "revoked") {
console.log(
`\n⚠️ Invitation is not revoked (current status: ${invitation.status})`,
);
console.log(` Code: ${code}`);
console.log(` Email: ${invitation.email}`);
return invitation;
}
// Determine the appropriate status to restore to
const newStatus = invitation.usedBy ? "active" : "pending";
await invitation.update({ status: newStatus });
console.log(`\n✅ Invitation restored successfully!`);
console.log(` Code: ${code}`);
console.log(` Email: ${invitation.email}`);
console.log(
` Status: ${newStatus} (${invitation.usedBy ? "was previously used" : "never used"})`,
);
return invitation;
} catch (error) {
console.error(`\n❌ Error restoring invitation: ${error.message}`);
throw error;
}
}
// Bulk import from CSV
async function bulkImport(csvPath) {
try {
if (!fs.existsSync(csvPath)) {
console.log(`\n❌ File not found: ${csvPath}`);
return;
}
const csvContent = fs.readFileSync(csvPath, "utf-8");
const lines = csvContent.split("\n").filter((line) => line.trim());
// Skip header if present
const hasHeader = lines[0].toLowerCase().includes("email");
const dataLines = hasHeader ? lines.slice(1) : lines;
console.log(
`\n📥 Importing ${dataLines.length} invitations from ${csvPath}...\n`,
);
let successCount = 0;
let failCount = 0;
for (const line of dataLines) {
const [email, notes] = line.split(",").map((s) => s.trim());
if (!email) {
console.log(`⚠️ Skipping empty line`);
failCount++;
continue;
}
try {
await addInvitation(email, notes || "");
successCount++;
} catch (error) {
console.log(`❌ Failed to add ${email}: ${error.message}`);
failCount++;
}
}
console.log(`\n✅ Bulk import completed!`);
console.log(` Success: ${successCount}`);
console.log(` Failed: ${failCount}`);
console.log(` Total: ${dataLines.length}\n`);
} catch (error) {
console.error(`\n❌ Error during bulk import: ${error.message}`);
throw error;
}
}
// Main CLI handler
async function main() {
const args = process.argv.slice(2);
const command = args[0];
try {
// Verify database connection
await sequelize.authenticate();
if (!command || command === "help") {
console.log(`
Alpha Invitation Management CLI
Usage:
node scripts/manageAlphaInvitations.js <command> [options]
Commands:
add <email> [notes] Add a new alpha invitation
list [filter] List all invitations (filter: all|pending|active|revoked|unused)
revoke <code> Revoke an invitation code
restore <code> Restore a revoked invitation
resend <email|code> Resend an invitation email
bulk <csvPath> Bulk import invitations from CSV file
Examples:
node scripts/manageAlphaInvitations.js add alice@example.com "Product team"
node scripts/manageAlphaInvitations.js list pending
node scripts/manageAlphaInvitations.js revoke ALPHA-ABC12345
node scripts/manageAlphaInvitations.js restore ALPHA-ABC12345
node scripts/manageAlphaInvitations.js resend alice@example.com
node scripts/manageAlphaInvitations.js bulk invitations.csv
CSV Format:
email,notes
alice@example.com,Product team
bob@example.com,Early adopter
`);
} else if (command === "add") {
const email = args[1];
const notes = args.slice(2).join(" ");
if (!email) {
console.log("\n❌ Error: Email is required");
console.log(
"Usage: node scripts/manageAlphaInvitations.js add <email> [notes]\n",
);
process.exit(1);
}
await addInvitation(email, notes);
} else if (command === "list") {
const filter = args[1] || "all";
await listInvitations(filter);
} else if (command === "revoke") {
const code = args[1];
if (!code) {
console.log("\n❌ Error: Code is required");
console.log(
"Usage: node scripts/manageAlphaInvitations.js revoke <code>\n",
);
process.exit(1);
}
await revokeInvitation(code);
} else if (command === "resend") {
const emailOrCode = args[1];
if (!emailOrCode) {
console.log("\n❌ Error: Email or code is required");
console.log(
"Usage: node scripts/manageAlphaInvitations.js resend <email|code>\n",
);
process.exit(1);
}
await resendInvitation(emailOrCode);
} else if (command === "restore") {
const code = args[1];
if (!code) {
console.log("\n❌ Error: Code is required");
console.log(
"Usage: node scripts/manageAlphaInvitations.js restore <code>\n",
);
process.exit(1);
}
await restoreInvitation(code);
} else if (command === "bulk") {
const csvPath = args[1];
if (!csvPath) {
console.log("\n❌ Error: CSV path is required");
console.log(
"Usage: node scripts/manageAlphaInvitations.js bulk <csvPath>\n",
);
process.exit(1);
}
await bulkImport(path.resolve(csvPath));
} else {
console.log(`\n❌ Unknown command: ${command}`);
console.log(
"Run 'node scripts/manageAlphaInvitations.js help' for usage information\n",
);
process.exit(1);
}
process.exit(0);
} catch (error) {
console.error(`\n❌ Fatal error: ${error.message}`);
console.error(error.stack);
process.exit(1);
}
}
// Run if called directly
if (require.main === module) {
main();
}
module.exports = {
addInvitation,
listInvitations,
revokeInvitation,
restoreInvitation,
resendInvitation,
bulkImport,
generateUniqueAlphaCode,
};

View File

@@ -0,0 +1,225 @@
#!/usr/bin/env node
/**
* Migration Test Script
*
* Tests that all migrations can run successfully up and down.
* This script:
* 1. Connects to a test database
* 2. Runs all migrations down (clean slate)
* 3. Runs all migrations up
* 4. Verifies tables were created
* 5. Runs all migrations down (test rollback)
* 6. Runs all migrations up again (test idempotency)
* 7. Reports results
*
* Usage:
* NODE_ENV=test npm run test:migrations
*
* Requires:
* - Test database to exist (create with: npm run db:create)
* - Environment variables set for test database connection
*/
const { execSync } = require("child_process");
const path = require("path");
// Colors for console output
const colors = {
reset: "\x1b[0m",
green: "\x1b[32m",
red: "\x1b[31m",
yellow: "\x1b[33m",
blue: "\x1b[34m",
};
function log(message, color = colors.reset) {
console.log(`${color}${message}${colors.reset}`);
}
function logStep(step, message) {
log(`\n[${step}] ${message}`, colors.blue);
}
function logSuccess(message) {
log(`${message}`, colors.green);
}
function logError(message) {
log(`${message}`, colors.red);
}
function logWarning(message) {
log(`${message}`, colors.yellow);
}
function runCommand(command, description) {
try {
log(` Running: ${command}`, colors.yellow);
const output = execSync(command, {
cwd: path.resolve(__dirname, ".."),
encoding: "utf-8",
stdio: ["pipe", "pipe", "pipe"],
env: { ...process.env, NODE_ENV: process.env.NODE_ENV || "test" },
});
if (output.trim()) {
console.log(output);
}
logSuccess(description);
return { success: true, output };
} catch (error) {
logError(`${description} failed`);
console.error(error.stderr || error.message);
return { success: false, error };
}
}
async function main() {
log("\n========================================", colors.blue);
log(" Migration Test Suite", colors.blue);
log("========================================\n", colors.blue);
const env = process.env.NODE_ENV;
// Safety checks - only allow running against test database
if (!env) {
logError("NODE_ENV is not set!");
logError("This script will DELETE ALL DATA in the target database.");
logError("You must explicitly set NODE_ENV=test to run this script.");
log("\nUsage: NODE_ENV=test npm run test:migrations\n");
process.exit(1);
}
if (env.toLowerCase() !== "test") {
logWarning(`Unrecognized NODE_ENV: ${env}`);
logWarning("This script will DELETE ALL DATA in the target database.");
logWarning("Recommended: NODE_ENV=test npm run test:migrations");
log("");
}
log(`Environment: ${env}`);
const results = {
steps: [],
passed: 0,
failed: 0,
};
function recordResult(step, success) {
results.steps.push({ step, success });
if (success) {
results.passed++;
} else {
results.failed++;
}
}
// Step 1: Check migration status
logStep(1, "Checking current migration status");
const statusResult = runCommand(
"npx sequelize-cli db:migrate:status",
"Migration status check"
);
recordResult("Status check", statusResult.success);
// Step 2: Undo all migrations (clean slate)
logStep(2, "Undoing all migrations (clean slate)");
const undoAllResult = runCommand(
"npx sequelize-cli db:migrate:undo:all",
"Undo all migrations"
);
recordResult("Undo all migrations", undoAllResult.success);
if (!undoAllResult.success) {
logWarning("Undo failed - database may already be empty, continuing...");
}
// Step 3: Run all migrations up
logStep(3, "Running all migrations up");
const migrateUpResult = runCommand(
"npx sequelize-cli db:migrate",
"Run all migrations"
);
recordResult("Migrate up", migrateUpResult.success);
if (!migrateUpResult.success) {
logError("Migration up failed - cannot continue");
printSummary(results);
process.exit(1);
}
// Step 4: Verify migration status shows all executed
logStep(4, "Verifying all migrations executed");
const verifyResult = runCommand(
"npx sequelize-cli db:migrate:status",
"Verify migration status"
);
recordResult("Verify status", verifyResult.success);
// Step 5: Test rollback - undo all migrations
logStep(5, "Testing rollback - undoing all migrations");
const rollbackResult = runCommand(
"npx sequelize-cli db:migrate:undo:all",
"Rollback all migrations"
);
recordResult("Rollback", rollbackResult.success);
if (!rollbackResult.success) {
logError("Rollback failed - down migrations have issues");
printSummary(results);
process.exit(1);
}
// Step 6: Test idempotency - run migrations up again
logStep(6, "Testing idempotency - running migrations up again");
const idempotencyResult = runCommand(
"npx sequelize-cli db:migrate",
"Re-run all migrations"
);
recordResult("Idempotency test", idempotencyResult.success);
if (!idempotencyResult.success) {
logError("Idempotency test failed - migrations may not be repeatable");
printSummary(results);
process.exit(1);
}
// Step 7: Final status check
logStep(7, "Final migration status");
const finalStatusResult = runCommand(
"npx sequelize-cli db:migrate:status",
"Final status check"
);
recordResult("Final status", finalStatusResult.success);
printSummary(results);
if (results.failed > 0) {
process.exit(1);
}
log("\nMigration tests completed successfully!", colors.green);
process.exit(0);
}
function printSummary(results) {
log("\n========================================", colors.blue);
log(" Test Summary", colors.blue);
log("========================================\n", colors.blue);
results.steps.forEach(({ step, success }) => {
if (success) {
logSuccess(step);
} else {
logError(step);
}
});
log(`\nTotal: ${results.passed} passed, ${results.failed} failed`);
}
main().catch((error) => {
logError("Unexpected error:");
console.error(error);
process.exit(1);
});

View File

@@ -1,67 +1,243 @@
// Load environment-specific config
const env = process.env.NODE_ENV || "dev";
const env = process.env.NODE_ENV;
const envFile = `.env.${env}`;
require("dotenv").config({
path: envFile,
});
const express = require("express");
const http = require("http");
const { Server } = require("socket.io");
const cors = require("cors");
const bodyParser = require("body-parser");
const path = require("path");
const helmet = require("helmet");
const { sequelize } = require("./models"); // Import from models/index.js to ensure associations are loaded
const { cookieParser } = require("./middleware/csrf");
const logger = require("./utils/logger");
const morgan = require("morgan");
const authRoutes = require("./routes/auth");
const phoneAuthRoutes = require("./routes/phone-auth");
const { router: alphaRoutes } = require("./routes/alpha");
const userRoutes = require("./routes/users");
const itemRoutes = require("./routes/items");
const rentalRoutes = require("./routes/rentals");
const messageRoutes = require("./routes/messages");
const betaRoutes = require("./routes/beta");
const itemRequestRoutes = require("./routes/itemRequests");
const forumRoutes = require("./routes/forum");
const stripeRoutes = require("./routes/stripe");
const stripeWebhookRoutes = require("./routes/stripeWebhooks");
const mapsRoutes = require("./routes/maps");
const conditionCheckRoutes = require("./routes/conditionChecks");
const feedbackRoutes = require("./routes/feedback");
const uploadRoutes = require("./routes/upload");
const healthRoutes = require("./routes/health");
const twoFactorRoutes = require("./routes/twoFactor");
const PayoutProcessor = require("./jobs/payoutProcessor");
const emailServices = require("./services/email");
const s3Service = require("./services/s3Service");
// Socket.io setup
const { authenticateSocket } = require("./sockets/socketAuth");
const { initializeMessageSocket } = require("./sockets/messageSocket");
const app = express();
const server = http.createServer(app);
app.use(cors());
app.use(bodyParser.json({ limit: "5mb" }));
app.use(bodyParser.urlencoded({ extended: true, limit: "5mb" }));
// Serve static files from uploads directory
app.use("/uploads", express.static(path.join(__dirname, "uploads")));
// Beta verification route (doesn't require auth)
app.use("/api/beta", betaRoutes);
app.use("/api/auth", authRoutes);
app.use("/api/auth/phone", phoneAuthRoutes);
app.use("/api/users", userRoutes);
app.use("/api/items", itemRoutes);
app.use("/api/rentals", rentalRoutes);
app.use("/api/messages", messageRoutes);
app.use("/api/item-requests", itemRequestRoutes);
app.use("/api/stripe", stripeRoutes);
app.get("/", (req, res) => {
res.json({ message: "CommunityRentals.App API is running!" });
// Initialize Socket.io with CORS
const io = new Server(server, {
cors: {
origin: process.env.FRONTEND_URL,
credentials: true,
methods: ["GET", "POST"],
},
});
const PORT = process.env.PORT || 5000;
// Apply socket authentication middleware
io.use(authenticateSocket);
// Initialize message socket handlers
initializeMessageSocket(io);
// Store io instance in app for use in routes
app.set("io", io);
// Import security middleware
const {
enforceHTTPS,
securityHeaders,
addRequestId,
sanitizeError,
} = require("./middleware/security");
const { sanitizeInput } = require("./middleware/validation");
const { generalLimiter } = require("./middleware/rateLimiter");
const errorLogger = require("./middleware/errorLogger");
const apiLogger = require("./middleware/apiLogger");
const { requireAlphaAccess } = require("./middleware/alphaAccess");
// Apply security middleware
app.use(enforceHTTPS);
app.use(addRequestId);
app.use(securityHeaders);
// Security headers with Helmet
app.use(
helmet({
contentSecurityPolicy: {
directives: {
defaultSrc: ["'self'"],
styleSrc: ["'self'", "https://cdn.jsdelivr.net"],
fontSrc: ["'self'"],
scriptSrc: ["'self'", "https://accounts.google.com"],
imgSrc: ["'self'"],
connectSrc: ["'self'"],
frameSrc: ["'self'", "https://accounts.google.com"],
},
},
}),
);
// Cookie parser for CSRF
app.use(cookieParser);
// HTTP request logging
app.use(morgan("combined", { stream: logger.stream }));
// API request/response logging
app.use("/api/", apiLogger);
// CORS with security settings (must come BEFORE rate limiter to ensure headers on all responses)
app.use(
cors({
origin: process.env.FRONTEND_URL,
credentials: true,
optionsSuccessStatus: 200,
exposedHeaders: ["X-CSRF-Token"],
}),
);
// General rate limiting for all routes
app.use("/api/", generalLimiter);
// Body parsing with size limits
app.use(
bodyParser.json({
limit: "1mb",
verify: (req, res, buf) => {
// Store raw body for webhook verification
req.rawBody = buf;
},
}),
);
app.use(
bodyParser.urlencoded({
extended: true,
limit: "1mb",
parameterLimit: 100, // Limit number of parameters
}),
);
// Apply input sanitization to all API routes (XSS prevention)
app.use("/api/", sanitizeInput);
// Health check endpoints (no auth, no rate limiting)
app.use("/health", healthRoutes);
// Stripe webhooks (no auth, uses signature verification instead)
app.use("/api/stripe/webhooks", stripeWebhookRoutes);
// Root endpoint
app.get("/", (req, res) => {
res.json({ message: "Village Share API is running!" });
});
// Public routes (no alpha access required)
app.use("/api/alpha", alphaRoutes);
app.use("/api/auth", authRoutes); // Auth has its own alpha checks in registration
app.use("/api/2fa", twoFactorRoutes); // 2FA routes require authentication (handled in router)
// Protected routes (require alpha access)
app.use("/api/users", requireAlphaAccess, userRoutes);
app.use("/api/items", requireAlphaAccess, itemRoutes);
app.use("/api/rentals", requireAlphaAccess, rentalRoutes);
app.use("/api/messages", requireAlphaAccess, messageRoutes);
app.use("/api/forum", requireAlphaAccess, forumRoutes);
app.use("/api/stripe", requireAlphaAccess, stripeRoutes);
app.use("/api/maps", requireAlphaAccess, mapsRoutes);
app.use("/api/condition-checks", requireAlphaAccess, conditionCheckRoutes);
app.use("/api/feedback", requireAlphaAccess, feedbackRoutes);
app.use("/api/upload", requireAlphaAccess, uploadRoutes);
// Error handling middleware (must be last)
app.use(errorLogger);
app.use(sanitizeError);
const PORT = process.env.PORT;
const { checkPendingMigrations } = require("./utils/checkMigrations");
sequelize
.sync({ alter: true })
.then(() => {
console.log("Database synced");
// Start the payout processor
const payoutJobs = PayoutProcessor.startScheduledPayouts();
app.listen(PORT, () => {
console.log(`Server is running on port ${PORT}`);
.authenticate()
.then(async () => {
logger.info("Database connection established successfully");
// Check for pending migrations
const pendingMigrations = await checkPendingMigrations(sequelize);
if (pendingMigrations.length > 0) {
logger.error(
`Found ${pendingMigrations.length} pending migration(s). Please run 'npm run db:migrate'`,
{ pendingMigrations },
);
process.exit(1);
}
logger.info("All migrations are up to date");
// Initialize email services and load templates
try {
await emailServices.initialize();
logger.info("Email services initialized successfully");
} catch (err) {
logger.error("Failed to initialize email services", {
error: err.message,
stack: err.stack,
});
// Fail fast - don't start server if email templates can't load
if (env === "prod" || env === "production") {
logger.error(
"Cannot start server without email services in production",
);
process.exit(1);
} else {
logger.warn(
"Email services failed to initialize - continuing in dev mode",
);
}
}
// Initialize S3 service for image uploads
try {
s3Service.initialize();
logger.info("S3 service initialized successfully");
} catch (err) {
logger.error("Failed to initialize S3 service", {
error: err.message,
stack: err.stack,
});
logger.error("Cannot start server without S3 service in production");
process.exit(1);
}
server.listen(PORT, () => {
logger.info(`Server is running on port ${PORT}`, {
port: PORT,
environment: env,
});
logger.info("Socket.io server initialized");
});
})
.catch((err) => {
console.error("Unable to sync database:", err);
logger.error("Unable to connect to database", {
error: err.message,
stack: err.stack,
});
process.exit(1);
});

View File

@@ -0,0 +1,305 @@
const crypto = require("crypto");
const { authenticator } = require("otplib");
const QRCode = require("qrcode");
const bcrypt = require("bcryptjs");
const logger = require("../utils/logger");
// Configuration
const TOTP_ISSUER = process.env.TOTP_ISSUER;
const EMAIL_OTP_EXPIRY_MINUTES = parseInt(
process.env.TWO_FACTOR_EMAIL_OTP_EXPIRY_MINUTES,
10,
);
const STEP_UP_VALIDITY_MINUTES = parseInt(
process.env.TWO_FACTOR_STEP_UP_VALIDITY_MINUTES,
10,
);
const MAX_EMAIL_OTP_ATTEMPTS = 3;
const RECOVERY_CODE_COUNT = 10;
const BCRYPT_ROUNDS = 12;
// Characters for recovery codes (excludes confusing chars: 0, O, 1, I, L)
const RECOVERY_CODE_CHARS = "ABCDEFGHJKMNPQRSTUVWXYZ23456789";
class TwoFactorService {
/**
* Generate a new TOTP secret and QR code for setup
* @param {string} email - User's email address
* @returns {Promise<{qrCodeDataUrl: string, encryptedSecret: string, encryptedSecretIv: string}>}
*/
static async generateTotpSecret(email) {
const secret = authenticator.generateSecret();
const otpAuthUrl = authenticator.keyuri(email, TOTP_ISSUER, secret);
// Generate QR code as data URL
const qrCodeDataUrl = await QRCode.toDataURL(otpAuthUrl);
// Encrypt the secret for storage
const { encrypted, iv } = this._encryptSecret(secret);
return {
qrCodeDataUrl,
encryptedSecret: encrypted,
encryptedSecretIv: iv,
};
}
/**
* Verify a TOTP code against a user's secret
* @param {string} encryptedSecret - Encrypted TOTP secret
* @param {string} iv - Initialization vector for decryption
* @param {string} code - 6-digit TOTP code to verify
* @returns {boolean}
*/
static verifyTotpCode(encryptedSecret, iv, code) {
try {
// Validate code format
if (!/^\d{6}$/.test(code)) {
return false;
}
// Decrypt the secret
const secret = this._decryptSecret(encryptedSecret, iv);
// Verify with window 0 (only current 30-second period) to prevent replay attacks
return authenticator.verify({ token: code, secret, window: 0 });
} catch (error) {
logger.error("TOTP verification error:", error);
return false;
}
}
/**
* Generate a 6-digit email OTP code
* @returns {{code: string, hashedCode: string, expiry: Date}}
*/
static generateEmailOtp() {
// Generate 6-digit numeric code
const code = crypto.randomInt(100000, 999999).toString();
// Hash the code for storage (SHA-256)
const hashedCode = crypto.createHash("sha256").update(code).digest("hex");
// Calculate expiry
const expiry = new Date(Date.now() + EMAIL_OTP_EXPIRY_MINUTES * 60 * 1000);
return { code, hashedCode, expiry };
}
/**
* Verify an email OTP code using timing-safe comparison
* @param {string} inputCode - Code entered by user
* @param {string} storedHash - Hashed code stored in database
* @param {Date} expiry - Expiry timestamp
* @returns {boolean}
*/
static verifyEmailOtp(inputCode, storedHash, expiry) {
try {
// Validate code format
if (!/^\d{6}$/.test(inputCode)) {
return false;
}
// Check expiry
if (!expiry || new Date() > new Date(expiry)) {
return false;
}
// Hash the input code
const inputHash = crypto
.createHash("sha256")
.update(inputCode)
.digest("hex");
// Timing-safe comparison
const inputBuffer = Buffer.from(inputHash, "hex");
const storedBuffer = Buffer.from(storedHash, "hex");
if (inputBuffer.length !== storedBuffer.length) {
return false;
}
return crypto.timingSafeEqual(inputBuffer, storedBuffer);
} catch (error) {
logger.error("Email OTP verification error:", error);
return false;
}
}
/**
* Generate recovery codes (10 codes in XXXX-XXXX format)
* @returns {Promise<{codes: string[], hashedCodes: string[]}>}
*/
static async generateRecoveryCodes() {
const codes = [];
const hashedCodes = [];
for (let i = 0; i < RECOVERY_CODE_COUNT; i++) {
// Generate code in XXXX-XXXX format
let code = "";
for (let j = 0; j < 8; j++) {
if (j === 4) code += "-";
code +=
RECOVERY_CODE_CHARS[crypto.randomInt(RECOVERY_CODE_CHARS.length)];
}
codes.push(code);
// Hash the code for storage
const hashedCode = await bcrypt.hash(code, BCRYPT_ROUNDS);
hashedCodes.push(hashedCode);
}
return { codes, hashedCodes };
}
/**
* Verify a recovery code and return the index if valid
* @param {string} inputCode - Recovery code entered by user
* @param {Object} recoveryData - Recovery codes data (structured format)
* @returns {Promise<{valid: boolean, index: number}>}
*/
static async verifyRecoveryCode(inputCode, recoveryData) {
// Normalize input (uppercase, ensure format)
const normalizedCode = inputCode.toUpperCase().trim();
// Validate format
if (!/^[A-Z0-9]{4}-[A-Z0-9]{4}$/.test(normalizedCode)) {
return { valid: false, index: -1 };
}
// Handle both old format (array) and new format (structured object)
const codes = recoveryData.version
? recoveryData.codes
: recoveryData.map((hash, i) => ({
hash,
used: hash === null,
index: i,
}));
// Check each code
for (let i = 0; i < codes.length; i++) {
const codeEntry = codes[i];
// Skip already used codes
if (codeEntry.used || !codeEntry.hash) continue;
const isMatch = await bcrypt.compare(normalizedCode, codeEntry.hash);
if (isMatch) {
return { valid: true, index: i };
}
}
return { valid: false, index: -1 };
}
/**
* Validate if a step-up session is still valid
* @param {Object} user - User object with twoFactorVerifiedAt field
* @param {number} maxAgeMinutes - Maximum age in minutes (default: 15)
* @returns {boolean}
*/
static validateStepUpSession(user, maxAgeMinutes = STEP_UP_VALIDITY_MINUTES) {
if (!user.twoFactorVerifiedAt) {
return false;
}
const verifiedAt = new Date(user.twoFactorVerifiedAt);
const maxAge = maxAgeMinutes * 60 * 1000;
const now = Date.now();
return now - verifiedAt.getTime() < maxAge;
}
/**
* Get the count of remaining recovery codes
* @param {Object|Array} recoveryData - Recovery codes data (structured or legacy format)
* @returns {number}
*/
static getRemainingRecoveryCodesCount(recoveryData) {
if (!recoveryData) {
return 0;
}
// Handle new structured format
if (recoveryData.version) {
return recoveryData.codes.filter((code) => !code.used).length;
}
// Handle legacy array format
if (Array.isArray(recoveryData)) {
return recoveryData.filter((code) => code !== null && code !== "").length;
}
return 0;
}
/**
* Encrypt a TOTP secret using AES-256-GCM
* @param {string} secret - Plain text secret
* @returns {{encrypted: string, iv: string}}
* @private
*/
static _encryptSecret(secret) {
const encryptionKey = process.env.TOTP_ENCRYPTION_KEY;
if (!encryptionKey || encryptionKey.length !== 64) {
throw new Error(
"TOTP_ENCRYPTION_KEY must be a 64-character hex string (32 bytes)",
);
}
const iv = crypto.randomBytes(16);
const cipher = crypto.createCipheriv(
"aes-256-gcm",
Buffer.from(encryptionKey, "hex"),
iv,
);
let encrypted = cipher.update(secret, "utf8", "hex");
encrypted += cipher.final("hex");
const authTag = cipher.getAuthTag().toString("hex");
return {
encrypted: encrypted + ":" + authTag,
iv: iv.toString("hex"),
};
}
/**
* Decrypt a TOTP secret using AES-256-GCM
* @param {string} encryptedData - Encrypted data with auth tag
* @param {string} iv - Initialization vector (hex)
* @returns {string} - Decrypted secret
* @private
*/
static _decryptSecret(encryptedData, iv) {
const encryptionKey = process.env.TOTP_ENCRYPTION_KEY;
if (!encryptionKey || encryptionKey.length !== 64) {
throw new Error(
"TOTP_ENCRYPTION_KEY must be a 64-character hex string (32 bytes)",
);
}
const [ciphertext, authTag] = encryptedData.split(":");
const decipher = crypto.createDecipheriv(
"aes-256-gcm",
Buffer.from(encryptionKey, "hex"),
Buffer.from(iv, "hex"),
);
decipher.setAuthTag(Buffer.from(authTag, "hex"));
let decrypted = decipher.update(ciphertext, "hex", "utf8");
decrypted += decipher.final("utf8");
return decrypted;
}
/**
* Check if email OTP attempts are locked
* @param {number} attempts - Current attempt count
* @returns {boolean}
*/
static isEmailOtpLocked(attempts) {
return attempts >= MAX_EMAIL_OTP_ATTEMPTS;
}
}
module.exports = TwoFactorService;

View File

@@ -0,0 +1,238 @@
const { User, UserAddress } = require("../models");
const emailServices = require("./email");
const logger = require("../utils/logger");
/**
* UserService handles user-related business logic
* Including profile updates and associated notifications
*/
class UserService {
/**
* Update user profile and send notification if personal info changed
* @param {string} userId - User ID
* @param {Object} rawUpdateData - Data to update
* @param {Object} options - Optional transaction or other options
* @returns {Promise<User>} Updated user (without password field)
*/
async updateProfile(userId, rawUpdateData, options = {}) {
const user = await User.findByPk(userId);
if (!user) {
throw new Error("User not found");
}
// Store original values for comparison
const originalValues = {
email: user.email,
firstName: user.firstName,
lastName: user.lastName,
address1: user.address1,
address2: user.address2,
city: user.city,
state: user.state,
zipCode: user.zipCode,
country: user.country,
};
// Prepare update data with preprocessing
const updateData = { ...rawUpdateData };
// Only include email if it's not empty
if (updateData.email !== undefined) {
if (updateData.email && updateData.email.trim() !== "") {
updateData.email = updateData.email.trim();
} else {
delete updateData.email; // Don't update if empty
}
}
// Handle phone: convert empty strings to null to avoid unique constraint issues
if (updateData.phone !== undefined) {
updateData.phone =
updateData.phone && updateData.phone.trim() !== ""
? updateData.phone.trim()
: null;
}
// Perform the update
await user.update(updateData, options);
// Check if personal information changed
const personalInfoFields = [
"email",
"firstName",
"lastName",
"address1",
"address2",
"city",
"state",
"zipCode",
"country",
];
const changedFields = personalInfoFields.filter(
(field) =>
updateData[field] !== undefined &&
originalValues[field] !== updateData[field]
);
// Send notification email if personal info changed
if (changedFields.length > 0 && process.env.NODE_ENV !== "test") {
try {
await emailServices.auth.sendPersonalInfoChangedEmail(user);
logger.info("Personal information changed notification sent", {
userId: user.id,
email: user.email,
changedFields,
});
} catch (emailError) {
logger.error(
"Failed to send personal information changed notification",
{
error: emailError.message,
stack: emailError.stack,
userId: user.id,
email: user.email,
changedFields,
}
);
// Don't throw - email failure shouldn't fail the update
}
}
// Return user without password
const updatedUser = await User.findByPk(user.id, {
attributes: { exclude: ["password"] },
});
return updatedUser;
}
/**
* Create a new address for a user and send notification
* @param {string} userId - User ID
* @param {Object} addressData - Address data
* @returns {Promise<UserAddress>} Created address
*/
async createUserAddress(userId, addressData) {
const user = await User.findByPk(userId);
if (!user) {
throw new Error("User not found");
}
const address = await UserAddress.create({
...addressData,
userId,
});
// Send notification for address creation
if (process.env.NODE_ENV !== "test") {
try {
await emailServices.auth.sendPersonalInfoChangedEmail(user);
logger.info(
"Personal information changed notification sent (address created)",
{
userId: user.id,
email: user.email,
addressId: address.id,
}
);
} catch (emailError) {
logger.error("Failed to send notification for address creation", {
error: emailError.message,
stack: emailError.stack,
userId: user.id,
addressId: address.id,
});
}
}
return address;
}
/**
* Update a user address and send notification
* @param {string} userId - User ID
* @param {string} addressId - Address ID
* @param {Object} updateData - Data to update
* @returns {Promise<UserAddress>} Updated address
*/
async updateUserAddress(userId, addressId, updateData) {
const address = await UserAddress.findOne({
where: { id: addressId, userId },
});
if (!address) {
throw new Error("Address not found");
}
await address.update(updateData);
// Send notification for address update
if (process.env.NODE_ENV !== "test") {
try {
const user = await User.findByPk(userId);
await emailServices.auth.sendPersonalInfoChangedEmail(user);
logger.info(
"Personal information changed notification sent (address updated)",
{
userId: user.id,
email: user.email,
addressId: address.id,
}
);
} catch (emailError) {
logger.error("Failed to send notification for address update", {
error: emailError.message,
stack: emailError.stack,
userId,
addressId: address.id,
});
}
}
return address;
}
/**
* Delete a user address and send notification
* @param {string} userId - User ID
* @param {string} addressId - Address ID
* @returns {Promise<void>}
*/
async deleteUserAddress(userId, addressId) {
const address = await UserAddress.findOne({
where: { id: addressId, userId },
});
if (!address) {
throw new Error("Address not found");
}
await address.destroy();
// Send notification for address deletion
if (process.env.NODE_ENV !== "test") {
try {
const user = await User.findByPk(userId);
await emailServices.auth.sendPersonalInfoChangedEmail(user);
logger.info(
"Personal information changed notification sent (address deleted)",
{
userId: user.id,
email: user.email,
addressId,
}
);
} catch (emailError) {
logger.error("Failed to send notification for address deletion", {
error: emailError.message,
stack: emailError.stack,
userId,
addressId,
});
}
}
}
}
module.exports = new UserService();

View File

@@ -0,0 +1,265 @@
const { ConditionCheck, Rental, User } = require("../models");
const { Op } = require("sequelize");
const { isActive } = require("../utils/rentalStatus");
class ConditionCheckService {
/**
* Validate if a condition check can be submitted
* @param {string} rentalId - Rental ID
* @param {string} checkType - Type of check (pre_rental_owner, rental_start_renter, etc.)
* @param {string} userId - User attempting to submit
* @returns {Object} - { canSubmit, reason, timeWindow }
*/
static async validateConditionCheck(rentalId, checkType, userId) {
const rental = await Rental.findByPk(rentalId);
if (!rental) {
return { canSubmit: false, reason: "Rental not found" };
}
// Check user permissions
const isOwner = rental.ownerId === userId;
const isRenter = rental.renterId === userId;
if (checkType.includes("owner") && !isOwner) {
return {
canSubmit: false,
reason: "Only the item owner can submit owner condition checks",
};
}
if (checkType.includes("renter") && !isRenter) {
return {
canSubmit: false,
reason: "Only the renter can submit renter condition checks",
};
}
// Check if already submitted
const existingCheck = await ConditionCheck.findOne({
where: { rentalId, checkType },
});
if (existingCheck) {
return {
canSubmit: false,
reason: "Condition check already submitted for this type",
};
}
// Check time windows (24 hour windows)
const now = new Date();
const startDate = new Date(rental.startDateTime);
const endDate = new Date(rental.endDateTime);
const twentyFourHours = 24 * 60 * 60 * 1000;
let timeWindow = {};
let canSubmit = false;
switch (checkType) {
case "pre_rental_owner":
// 24 hours before rental starts
timeWindow.start = new Date(startDate.getTime() - twentyFourHours);
timeWindow.end = startDate;
canSubmit = now >= timeWindow.start && now <= timeWindow.end;
break;
case "rental_start_renter":
// 24 hours after rental starts
timeWindow.start = startDate;
timeWindow.end = new Date(startDate.getTime() + twentyFourHours);
canSubmit =
now >= timeWindow.start &&
now <= timeWindow.end &&
isActive(rental);
break;
case "rental_end_renter":
// 24 hours before rental ends
timeWindow.start = new Date(endDate.getTime() - twentyFourHours);
timeWindow.end = endDate;
canSubmit =
now >= timeWindow.start &&
now <= timeWindow.end &&
isActive(rental);
break;
case "post_rental_owner":
// Can be submitted anytime (integrated into return flow)
timeWindow.start = endDate;
timeWindow.end = null; // No time limit
canSubmit = true; // Always allowed when owner marks return
break;
default:
return { canSubmit: false, reason: "Invalid check type" };
}
if (!canSubmit) {
const isBeforeWindow = now < timeWindow.start;
const isAfterWindow = now > timeWindow.end;
let reason = "Outside of allowed time window";
if (isBeforeWindow) {
reason = `Too early. Check can be submitted starting ${timeWindow.start.toLocaleString()}`;
} else if (isAfterWindow) {
reason = `Pre-Rental Condition can only be submitted before start of rental period`;
}
return { canSubmit: false, reason, timeWindow };
}
return { canSubmit: true, timeWindow };
}
/**
* Submit a condition check with photos
* @param {string} rentalId - Rental ID
* @param {string} checkType - Type of check
* @param {string} userId - User submitting the check
* @param {Array} imageFilenames - Array of image filenames
* @param {string} notes - Optional notes
* @returns {Object} - Created condition check
*/
static async submitConditionCheck(
rentalId,
checkType,
userId,
imageFilenames = [],
notes = null
) {
// Validate the check
const validation = await this.validateConditionCheck(
rentalId,
checkType,
userId
);
if (!validation.canSubmit) {
throw new Error(validation.reason);
}
// Validate photos (basic validation)
if (imageFilenames.length > 20) {
throw new Error("Maximum 20 photos allowed per condition check");
}
const conditionCheck = await ConditionCheck.create({
rentalId,
checkType,
submittedBy: userId,
imageFilenames,
notes,
});
return conditionCheck;
}
/**
* Get all condition checks for multiple rentals (batch)
* @param {Array<string>} rentalIds - Array of Rental IDs
* @returns {Array} - Array of condition checks with user info
*/
static async getConditionChecksForRentals(rentalIds) {
if (!rentalIds || rentalIds.length === 0) {
return [];
}
const checks = await ConditionCheck.findAll({
where: {
rentalId: {
[Op.in]: rentalIds,
},
},
include: [
{
model: User,
as: "submittedByUser",
attributes: ["id", "firstName", "lastName"],
},
],
order: [["submittedAt", "ASC"]],
});
return checks;
}
/**
* Get available condition checks for a user
* @param {string} userId - User ID
* @param {Array<string>} rentalIds - Array of rental IDs to check
* @returns {Array} - Array of available condition checks
*/
static async getAvailableChecks(userId, rentalIds) {
if (!rentalIds || rentalIds.length === 0) {
return [];
}
const now = new Date();
const twentyFourHours = 24 * 60 * 60 * 1000;
// Find specified rentals where user is owner or renter
const rentals = await Rental.findAll({
where: {
id: { [Op.in]: rentalIds },
[Op.or]: [{ ownerId: userId }, { renterId: userId }],
status: {
[Op.in]: ["confirmed", "active", "completed"],
},
},
});
const availableChecks = [];
for (const rental of rentals) {
const isOwner = rental.ownerId === userId;
const isRenter = rental.renterId === userId;
const startDate = new Date(rental.startDateTime);
const endDate = new Date(rental.endDateTime);
// Check each type of condition check
const checkTypes = [];
if (isOwner) {
// Only include pre_rental_owner; post_rental is now part of return flow
checkTypes.push("pre_rental_owner");
}
if (isRenter) {
checkTypes.push("rental_start_renter", "rental_end_renter");
}
for (const checkType of checkTypes) {
// Check if already submitted
const existing = await ConditionCheck.findOne({
where: { rentalId: rental.id, checkType },
});
if (!existing) {
const validation = await this.validateConditionCheck(
rental.id,
checkType,
userId
);
if (validation.canSubmit) {
availableChecks.push({
rentalId: rental.id,
checkType,
rental: {
id: rental.id,
itemId: rental.itemId,
startDateTime: rental.startDateTime,
endDateTime: rental.endDateTime,
},
timeWindow: validation.timeWindow,
});
}
}
}
}
return availableChecks;
}
}
module.exports = ConditionCheckService;

View File

@@ -0,0 +1,145 @@
const { Rental, Item, ConditionCheck, User } = require("../models");
const LateReturnService = require("./lateReturnService");
const emailServices = require("./email");
const { isActive } = require("../utils/rentalStatus");
class DamageAssessmentService {
/**
* Process damage assessment and calculate fees
* @param {string} rentalId - Rental ID
* @param {Object} damageInfo - Damage assessment information
* @param {string} userId - Owner reporting the damage
* @returns {Object} - Updated rental with damage fees
*/
static async processDamageAssessment(rentalId, damageInfo, userId) {
const {
description,
canBeFixed,
repairCost,
needsReplacement,
replacementCost,
proofOfOwnership,
actualReturnDateTime,
imageFilenames = [],
} = damageInfo;
const rental = await Rental.findByPk(rentalId, {
include: [{ model: Item, as: "item" }],
});
if (!rental) {
throw new Error("Rental not found");
}
if (rental.ownerId !== userId) {
throw new Error("Only the item owner can report damage");
}
if (!isActive(rental)) {
throw new Error("Can only assess damage for active rentals");
}
// Validate required fields
if (!description || description.trim().length === 0) {
throw new Error("Damage description is required");
}
if (canBeFixed && (!repairCost || repairCost <= 0)) {
throw new Error("Repair cost is required when item can be fixed");
}
if (needsReplacement && (!replacementCost || replacementCost <= 0)) {
throw new Error(
"Replacement cost is required when item needs replacement"
);
}
// Calculate damage fees
let damageFees = 0;
let feeCalculation = {};
if (needsReplacement) {
// Full replacement cost
damageFees = parseFloat(replacementCost);
feeCalculation = {
type: "replacement",
amount: damageFees,
originalCost: replacementCost,
depreciation: 0,
};
} else if (canBeFixed && repairCost > 0) {
// Repair cost
damageFees = parseFloat(repairCost);
feeCalculation = {
type: "repair",
amount: damageFees,
repairCost: repairCost,
};
}
// Process late return if applicable
let lateFees = 0;
let lateCalculation = null;
if (actualReturnDateTime) {
const lateReturn = await LateReturnService.processLateReturn(
rentalId,
actualReturnDateTime,
`Item returned damaged: ${description}`
);
lateFees = lateReturn.lateCalculation.lateFee;
lateCalculation = lateReturn.lateCalculation;
}
// Create damage assessment record as metadata
const damageAssessment = {
description,
canBeFixed,
repairCost: canBeFixed ? parseFloat(repairCost) : null,
needsReplacement,
replacementCost: needsReplacement ? parseFloat(replacementCost) : null,
proofOfOwnership: proofOfOwnership || [],
imageFilenames,
assessedAt: new Date(),
assessedBy: userId,
feeCalculation,
};
// Update rental
const updates = {
status: "damaged",
damageFees: damageFees,
damageAssessment: damageAssessment,
};
// Add late fees if applicable
if (lateFees > 0) {
updates.lateFees = lateFees;
updates.actualReturnDateTime = new Date(actualReturnDateTime);
}
const updatedRental = await rental.update(updates);
// Fetch owner and renter user data for email
const owner = await User.findByPk(updatedRental.ownerId);
const renter = await User.findByPk(updatedRental.renterId);
// Send damage report to customer service for review
await emailServices.customerService.sendDamageReportToCustomerService(
updatedRental,
owner,
renter,
damageAssessment,
lateCalculation
);
return {
rental: updatedRental,
damageAssessment,
lateCalculation,
totalAdditionalFees: damageFees + lateFees,
};
}
}
module.exports = DamageAssessmentService;

View File

@@ -0,0 +1,162 @@
const { Rental, User, Item } = require("../models");
const emailServices = require("./email");
const logger = require("../utils/logger");
class DisputeService {
/**
* Handle charge.dispute.created webhook
* Called when a renter disputes a charge with their bank
* @param {Object} dispute - The Stripe dispute object from the webhook
* @returns {Object} Processing result
*/
static async handleDisputeCreated(dispute) {
const paymentIntentId = dispute.payment_intent;
logger.info("Processing dispute.created webhook", {
disputeId: dispute.id,
paymentIntentId,
reason: dispute.reason,
amount: dispute.amount,
});
const rental = await Rental.findOne({
where: { stripePaymentIntentId: paymentIntentId },
include: [
{ model: User, as: "owner" },
{ model: User, as: "renter" },
{ model: Item, as: "item" },
],
});
if (!rental) {
logger.warn("Dispute received for unknown rental", {
paymentIntentId,
disputeId: dispute.id,
});
return { processed: false, reason: "rental_not_found" };
}
// Update rental with dispute info
await rental.update({
stripeDisputeStatus: dispute.status,
stripeDisputeId: dispute.id,
stripeDisputeReason: dispute.reason,
stripeDisputeAmount: dispute.amount,
stripeDisputeCreatedAt: new Date(dispute.created * 1000),
stripeDisputeEvidenceDueBy: new Date(
dispute.evidence_details.due_by * 1000
),
});
// Pause payout if not yet deposited to owner's bank
if (rental.bankDepositStatus !== "paid") {
await rental.update({ payoutStatus: "on_hold" });
logger.info("Payout placed on hold due to dispute", {
rentalId: rental.id,
});
}
// Send admin notification
await emailServices.payment.sendDisputeAlertEmail({
rentalId: rental.id,
amount: dispute.amount / 100,
reason: dispute.reason,
evidenceDueBy: new Date(dispute.evidence_details.due_by * 1000),
renterEmail: rental.renter?.email,
renterName: rental.renter?.firstName,
ownerEmail: rental.owner?.email,
ownerName: rental.owner?.firstName,
itemName: rental.item?.name,
});
logger.warn("Dispute created for rental", {
rentalId: rental.id,
disputeId: dispute.id,
reason: dispute.reason,
evidenceDueBy: dispute.evidence_details.due_by,
});
return { processed: true, rentalId: rental.id };
}
/**
* Handle dispute closed events (won, lost, or warning_closed)
* Called for: charge.dispute.closed, charge.dispute.funds_reinstated, charge.dispute.funds_withdrawn
* @param {Object} dispute - The Stripe dispute object from the webhook
* @returns {Object} Processing result
*/
static async handleDisputeClosed(dispute) {
logger.info("Processing dispute closed webhook", {
disputeId: dispute.id,
status: dispute.status,
});
const rental = await Rental.findOne({
where: { stripeDisputeId: dispute.id },
include: [{ model: User, as: "owner" }],
});
if (!rental) {
logger.warn("Dispute closed for unknown rental", {
disputeId: dispute.id,
});
return { processed: false, reason: "rental_not_found" };
}
const won = dispute.status === "won";
await rental.update({
stripeDisputeStatus: dispute.status,
stripeDisputeClosedAt: new Date(),
});
// If we won the dispute, resume payout if it was on hold
if (won && rental.payoutStatus === "on_hold") {
await rental.update({ payoutStatus: "pending" });
logger.info("Payout resumed after winning dispute", {
rentalId: rental.id,
});
}
// If we lost, record the loss amount
if (!won && dispute.status === "lost") {
await rental.update({
stripeDisputeLost: true,
stripeDisputeLostAmount: dispute.amount,
});
logger.warn("Dispute lost", {
rentalId: rental.id,
amount: dispute.amount,
});
// If owner was already paid, flag for manual review
if (rental.bankDepositStatus === "paid") {
await emailServices.payment.sendDisputeLostAlertEmail({
rentalId: rental.id,
amount: dispute.amount / 100,
ownerAlreadyPaid: true,
ownerPayoutAmount: rental.payoutAmount,
ownerEmail: rental.owner?.email,
ownerName: rental.owner?.firstName,
});
logger.warn(
"Dispute lost - owner already paid, flagged for manual review",
{
rentalId: rental.id,
payoutAmount: rental.payoutAmount,
}
);
}
}
logger.info("Dispute closed", {
rentalId: rental.id,
disputeId: dispute.id,
outcome: dispute.status,
});
return { processed: true, won, rentalId: rental.id };
}
}
module.exports = DisputeService;

View File

@@ -0,0 +1,128 @@
const { SESClient, SendEmailCommand } = require("@aws-sdk/client-ses");
const { getAWSConfig } = require("../../../config/aws");
const { htmlToPlainText } = require("./emailUtils");
const logger = require("../../../utils/logger");
/**
* EmailClient handles AWS SES configuration and core email sending functionality
* This class is responsible for:
* - Initializing the AWS SES client
* - Sending emails with HTML and plain text content
* - Managing email sending state (enabled/disabled via environment)
*/
class EmailClient {
constructor() {
// Singleton pattern - return existing instance if already created
if (EmailClient.instance) {
return EmailClient.instance;
}
this.sesClient = null;
this.initialized = false;
this.initializationPromise = null;
EmailClient.instance = this;
}
/**
* Initialize the AWS SES client
* @returns {Promise<void>}
*/
async initialize() {
// If already initialized, return immediately
if (this.initialized) return;
// If initialization is in progress, wait for it
if (this.initializationPromise) {
return this.initializationPromise;
}
// Start initialization and store the promise
this.initializationPromise = (async () => {
try {
// Use centralized AWS configuration with credential profiles
const awsConfig = getAWSConfig();
this.sesClient = new SESClient(awsConfig);
this.initialized = true;
logger.info("AWS SES Email Client initialized successfully");
} catch (error) {
logger.error("Failed to initialize AWS SES Email Client", { error });
throw error;
}
})();
return this.initializationPromise;
}
/**
* Send an email using AWS SES
* @param {string|string[]} to - Email address(es) to send to
* @param {string} subject - Email subject line
* @param {string} htmlContent - HTML content of the email
* @param {string|null} textContent - Plain text content (auto-generated from HTML if not provided)
* @returns {Promise<{success: boolean, messageId?: string, error?: string}>}
*/
async sendEmail(to, subject, htmlContent, textContent = null) {
if (!this.initialized) {
await this.initialize();
}
// Check if email sending is enabled in the environment
if (!process.env.EMAIL_ENABLED || process.env.EMAIL_ENABLED !== "true") {
logger.debug("Email sending disabled in environment");
return { success: true, messageId: "disabled" };
}
// Auto-generate plain text from HTML if not provided
if (!textContent) {
textContent = htmlToPlainText(htmlContent);
}
// Use friendly sender name format for better recognition
const fromName = process.env.SES_FROM_NAME || "Village Share";
const fromEmail = process.env.SES_FROM_EMAIL;
const source = `${fromName} <${fromEmail}>`;
const params = {
Source: source,
Destination: {
ToAddresses: Array.isArray(to) ? to : [to],
},
Message: {
Subject: {
Data: subject,
Charset: "UTF-8",
},
Body: {
Html: {
Data: htmlContent,
Charset: "UTF-8",
},
Text: {
Data: textContent,
Charset: "UTF-8",
},
},
},
};
// Add reply-to address if configured
if (process.env.SES_REPLY_TO_EMAIL) {
params.ReplyToAddresses = [process.env.SES_REPLY_TO_EMAIL];
}
try {
const command = new SendEmailCommand(params);
const result = await this.sesClient.send(command);
logger.info("Email sent successfully", { to, messageId: result.MessageId });
return { success: true, messageId: result.MessageId };
} catch (error) {
logger.error("Failed to send email", { error, to });
return { success: false, error: error.message };
}
}
}
module.exports = EmailClient;

View File

@@ -0,0 +1,243 @@
const fs = require("fs").promises;
const path = require("path");
const logger = require("../../../utils/logger");
const { escapeHtml } = require("./emailUtils");
/**
* TemplateManager handles loading, caching, and rendering email templates
* This class is responsible for:
* - Loading HTML email templates from disk
* - Caching templates in memory for performance
* - Rendering templates with variable substitution
* - Providing fallback templates when files can't be loaded
*/
// Critical templates that must be preloaded at startup for auth flows
const CRITICAL_TEMPLATES = [
"emailVerificationToUser",
"passwordResetToUser",
"passwordChangedToUser",
"personalInfoChangedToUser",
];
class TemplateManager {
constructor() {
// Singleton pattern - return existing instance if already created
if (TemplateManager.instance) {
return TemplateManager.instance;
}
this.templates = new Map(); // Cached template content
this.templateNames = new Set(); // Discovered template names
this.initialized = false;
this.initializationPromise = null;
this.templatesDir = path.join(
__dirname,
"..",
"..",
"..",
"templates",
"emails"
);
TemplateManager.instance = this;
}
/**
* Discover all available templates by scanning the templates directory
* Only reads filenames, not content (for fast startup)
* @returns {Promise<void>}
*/
async discoverTemplates() {
try {
const files = await fs.readdir(this.templatesDir);
for (const file of files) {
if (file.endsWith(".html")) {
this.templateNames.add(file.replace(".html", ""));
}
}
logger.info("Discovered email templates", {
count: this.templateNames.size,
});
} catch (error) {
logger.error("Failed to discover email templates", {
templatesDir: this.templatesDir,
error,
});
throw error;
}
}
/**
* Load a single template from disk (lazy loading)
* @param {string} templateName - Name of the template (without .html extension)
* @returns {Promise<string>} Template content
*/
async loadTemplate(templateName) {
// Return cached template if already loaded
if (this.templates.has(templateName)) {
return this.templates.get(templateName);
}
const templatePath = path.join(this.templatesDir, `${templateName}.html`);
try {
const content = await fs.readFile(templatePath, "utf-8");
this.templates.set(templateName, content);
logger.debug("Loaded template", { templateName });
return content;
} catch (error) {
logger.error("Failed to load template", {
templateName,
templatePath,
error,
});
throw error;
}
}
/**
* Initialize the template manager by discovering templates and preloading critical ones
* @returns {Promise<void>}
*/
async initialize() {
// If already initialized, return immediately
if (this.initialized) return;
// If initialization is in progress, wait for it
if (this.initializationPromise) {
return this.initializationPromise;
}
// Start initialization and store the promise
this.initializationPromise = (async () => {
// Discover all available templates (fast - only reads filenames)
await this.discoverTemplates();
// Preload critical templates for auth flows
const missingCritical = [];
for (const templateName of CRITICAL_TEMPLATES) {
if (!this.templateNames.has(templateName)) {
missingCritical.push(templateName);
} else {
await this.loadTemplate(templateName);
}
}
if (missingCritical.length > 0) {
const error = new Error(
`Critical email templates not found: ${missingCritical.join(", ")}`
);
error.missingTemplates = missingCritical;
throw error;
}
this.initialized = true;
logger.info("Email Template Manager initialized successfully", {
discovered: this.templateNames.size,
preloaded: CRITICAL_TEMPLATES.length,
});
})();
return this.initializationPromise;
}
/**
* Render a template with the provided variables
* @param {string} templateName - Name of the template to render
* @param {Object} variables - Variables to substitute in the template
* @returns {Promise<string>} Rendered HTML
*/
async renderTemplate(templateName, variables = {}) {
// Ensure service is initialized before rendering
if (!this.initialized) {
logger.debug("Template manager not initialized yet, initializing now...");
await this.initialize();
}
let template;
// Check if template exists in our discovered templates
if (this.templateNames.has(templateName)) {
// Lazy load the template if not already cached
template = await this.loadTemplate(templateName);
} else {
logger.error("Template not found, using fallback", {
templateName,
discoveredTemplates: Array.from(this.templateNames),
});
template = this.getFallbackTemplate(templateName);
}
let rendered = template;
try {
Object.keys(variables).forEach((key) => {
// Variables ending in 'Html' or 'Section' contain trusted HTML content
// (e.g., refundSection, stripeSection, earningsSection) - don't escape these
const isTrustedHtml = key.endsWith("Html") || key.endsWith("Section");
let value = variables[key] || "";
// Escape HTML in user-provided values to prevent XSS
if (!isTrustedHtml && typeof value === "string") {
value = escapeHtml(value);
}
const regex = new RegExp(`{{${key}}}`, "g");
rendered = rendered.replace(regex, value);
});
} catch (error) {
logger.error("Error rendering template", {
templateName,
variableKeys: Object.keys(variables),
error,
});
}
return rendered;
}
/**
* Get a generic fallback template when the HTML file is not available
* This is used as a last resort when a template cannot be loaded
* @param {string} templateName - Name of the template (for logging)
* @returns {string} Generic fallback HTML template
*/
getFallbackTemplate(templateName) {
logger.warn("Using generic fallback template", { templateName });
return `
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Village Share</title>
<style>
body { font-family: Arial, sans-serif; margin: 0; padding: 20px; background-color: #f5f5f5; }
.container { max-width: 600px; margin: 0 auto; background-color: white; padding: 30px; border-radius: 8px; box-shadow: 0 2px 4px rgba(0,0,0,0.1); }
.header { text-align: center; border-bottom: 2px solid #e9ecef; padding-bottom: 20px; margin-bottom: 30px; }
.logo { font-size: 24px; font-weight: bold; color: #333; }
.content { line-height: 1.6; color: #555; }
.footer { margin-top: 30px; padding-top: 20px; border-top: 1px solid #e9ecef; text-align: center; font-size: 12px; color: #6c757d; }
</style>
</head>
<body>
<div class="container">
<div class="header">
<div class="logo">Village Share</div>
</div>
<div class="content">
<p>Hi {{recipientName}},</p>
<h2>{{title}}</h2>
<p>{{message}}</p>
</div>
<div class="footer">
<p>This email was sent from Village Share. If you have any questions, please contact support.</p>
</div>
</div>
</body>
</html>
`;
}
}
module.exports = TemplateManager;

View File

@@ -0,0 +1,115 @@
/**
* Email utility functions shared across all email services
*/
/**
* Convert HTML to plain text for email fallback
* Strips HTML tags and formats content for plain text email clients
* @param {string} html - HTML content to convert
* @returns {string} Plain text version of the HTML
*/
function htmlToPlainText(html) {
return (
html
// Remove style and script tags and their content
.replace(/<style[^>]*>[\s\S]*?<\/style>/gi, "")
.replace(/<script[^>]*>[\s\S]*?<\/script>/gi, "")
// Convert common HTML elements to text equivalents
.replace(/<br\s*\/?>/gi, "\n")
.replace(/<\/p>/gi, "\n\n")
.replace(/<\/div>/gi, "\n")
.replace(/<\/li>/gi, "\n")
.replace(/<\/h[1-6]>/gi, "\n\n")
.replace(/<li>/gi, "• ")
// Remove remaining HTML tags
.replace(/<[^>]+>/g, "")
// Decode HTML entities
.replace(/&nbsp;/g, " ")
.replace(/&amp;/g, "&")
.replace(/&lt;/g, "<")
.replace(/&gt;/g, ">")
.replace(/&quot;/g, '"')
.replace(/&#39;/g, "'")
// Remove emojis and special characters that don't render well in plain text
.replace(/[\u{1F600}-\u{1F64F}]/gu, "") // Emoticons
.replace(/[\u{1F300}-\u{1F5FF}]/gu, "") // Misc Symbols and Pictographs
.replace(/[\u{1F680}-\u{1F6FF}]/gu, "") // Transport and Map
.replace(/[\u{2600}-\u{26FF}]/gu, "") // Misc symbols
.replace(/[\u{2700}-\u{27BF}]/gu, "") // Dingbats
.replace(/[\u{FE00}-\u{FE0F}]/gu, "") // Variation Selectors
.replace(/[\u{1F900}-\u{1F9FF}]/gu, "") // Supplemental Symbols and Pictographs
.replace(/[\u{1FA70}-\u{1FAFF}]/gu, "") // Symbols and Pictographs Extended-A
// Clean up excessive whitespace
.replace(/\n\s*\n\s*\n/g, "\n\n")
.trim()
);
}
/**
* Format a date consistently for email display
* @param {Date|string} date - Date to format
* @returns {string} Formatted date string
*/
function formatEmailDate(date) {
const dateObj = typeof date === "string" ? new Date(date) : date;
return dateObj.toLocaleString("en-US", {
weekday: "long",
year: "numeric",
month: "long",
day: "numeric",
hour: "numeric",
minute: "2-digit",
hour12: true,
});
}
/**
* Format a date as a short date (no time)
* @param {Date|string} date - Date to format
* @returns {string} Formatted date string
*/
function formatShortDate(date) {
const dateObj = typeof date === "string" ? new Date(date) : date;
return dateObj.toLocaleString("en-US", {
year: "numeric",
month: "long",
day: "numeric",
});
}
/**
* Format currency for email display
* @param {number} amount - Amount in cents or smallest currency unit
* @param {string} currency - Currency code (default: USD)
* @returns {string} Formatted currency string
*/
function formatCurrency(amount, currency = "USD") {
return new Intl.NumberFormat("en-US", {
style: "currency",
currency: currency,
}).format(amount / 100);
}
/**
* Escape HTML special characters to prevent XSS attacks
* Converts characters that could be interpreted as HTML into safe entities
* @param {*} str - Value to escape (will be converted to string)
* @returns {string} HTML-escaped string safe for insertion into HTML
*/
function escapeHtml(str) {
if (str === null || str === undefined) return "";
return String(str)
.replace(/&/g, "&amp;")
.replace(/</g, "&lt;")
.replace(/>/g, "&gt;")
.replace(/"/g, "&quot;")
.replace(/'/g, "&#039;");
}
module.exports = {
htmlToPlainText,
formatEmailDate,
formatShortDate,
formatCurrency,
escapeHtml,
};

View File

@@ -0,0 +1,72 @@
const EmailClient = require("../core/EmailClient");
const TemplateManager = require("../core/TemplateManager");
const logger = require("../../../utils/logger");
/**
* AlphaInvitationEmailService handles alpha program invitation emails
* This service is responsible for:
* - Sending alpha access invitation codes to new testers
*/
class AlphaInvitationEmailService {
constructor() {
this.emailClient = new EmailClient();
this.templateManager = new TemplateManager();
this.initialized = false;
}
/**
* Initialize the alpha invitation email service
* @returns {Promise<void>}
*/
async initialize() {
if (this.initialized) return;
await Promise.all([
this.emailClient.initialize(),
this.templateManager.initialize(),
]);
this.initialized = true;
logger.info("Alpha Invitation Email Service initialized successfully");
}
/**
* Send alpha invitation email
* @param {string} email - Recipient's email address
* @param {string} code - Alpha access code
* @returns {Promise<{success: boolean, messageId?: string, error?: string}>}
*/
async sendAlphaInvitation(email, code) {
if (!this.initialized) {
await this.initialize();
}
try {
const frontendUrl = process.env.FRONTEND_URL;
const variables = {
code: code,
email: email,
frontendUrl: frontendUrl,
title: "Welcome to Alpha Testing!",
message: `You've been invited to join our exclusive alpha testing program. Use the code <strong>${code}</strong> to unlock access and be among the first to experience our platform.`,
};
const htmlContent = await this.templateManager.renderTemplate(
"alphaInvitationToUser",
variables,
);
return await this.emailClient.sendEmail(
email,
"Your Alpha Access Code - Village Share",
htmlContent,
);
} catch (error) {
logger.error("Failed to send alpha invitation email", { error });
return { success: false, error: error.message };
}
}
}
module.exports = AlphaInvitationEmailService;

Some files were not shown because too many files have changed in this diff Show More