Compare commits

...

30 Commits

Author SHA1 Message Date
Gitea Actions
b0719d1e39 ci: Bump version to 0.9.37 [skip ci] 2026-01-06 10:11:19 +05:00
0039ac3752 fuck database contraints - seems buggy
Some checks failed
Deploy to Test Environment / deploy-to-test (push) Failing after 37s
2026-01-05 21:08:16 -08:00
Gitea Actions
3c8316f4f7 ci: Bump version to 0.9.36 [skip ci] 2026-01-06 09:03:20 +05:00
2564df1c64 get rid of localhost in tests - not a qualified URL - we'll see
All checks were successful
Deploy to Test Environment / deploy-to-test (push) Successful in 33m19s
2026-01-05 20:02:44 -08:00
Gitea Actions
696c547238 ci: Bump version to 0.9.35 [skip ci] 2026-01-06 08:11:42 +05:00
38165bdb9a get rid of localhost in tests - not a qualified URL - we'll see
All checks were successful
Deploy to Test Environment / deploy-to-test (push) Successful in 26m14s
2026-01-05 19:10:46 -08:00
Gitea Actions
6139dca072 ci: Bump version to 0.9.34 [skip ci] 2026-01-06 06:33:46 +05:00
68bfaa50e6 more baseurl work - hopefully that does it for now
All checks were successful
Deploy to Test Environment / deploy-to-test (push) Successful in 26m5s
2026-01-05 17:33:00 -08:00
Gitea Actions
9c42621f74 ci: Bump version to 0.9.33 [skip ci] 2026-01-06 04:34:48 +05:00
1b98282202 more rate limiting
All checks were successful
Deploy to Test Environment / deploy-to-test (push) Successful in 30m19s
2026-01-05 15:31:01 -08:00
Gitea Actions
b6731b220c ci: Bump version to 0.9.32 [skip ci] 2026-01-06 04:13:42 +05:00
3507d455e8 more rate limiting
Some checks failed
Deploy to Test Environment / deploy-to-test (push) Has been cancelled
2026-01-05 15:13:10 -08:00
Gitea Actions
92b2adf8e8 ci: Bump version to 0.9.31 [skip ci] 2026-01-06 04:07:21 +05:00
d6c7452256 more rate limiting
Some checks failed
Deploy to Test Environment / deploy-to-test (push) Failing after 41s
2026-01-05 15:06:55 -08:00
Gitea Actions
d812b681dd ci: Bump version to 0.9.30 [skip ci] 2026-01-06 03:54:42 +05:00
b4306a6092 more rate limiting
Some checks failed
Deploy to Test Environment / deploy-to-test (push) Failing after 50s
2026-01-05 14:53:49 -08:00
Gitea Actions
57fdd159d5 ci: Bump version to 0.9.29 [skip ci] 2026-01-06 01:08:45 +05:00
4a747ca042 even even more and more test fixes
All checks were successful
Deploy to Test Environment / deploy-to-test (push) Successful in 23m46s
2026-01-05 12:08:18 -08:00
Gitea Actions
e0bf96824c ci: Bump version to 0.9.28 [skip ci] 2026-01-06 00:28:11 +05:00
e86e09703e even even more and more test fixes
Some checks failed
Deploy to Test Environment / deploy-to-test (push) Failing after 59s
2026-01-05 11:27:13 -08:00
Gitea Actions
275741c79e ci: Bump version to 0.9.27 [skip ci] 2026-01-05 15:32:08 +05:00
3a40249ddb even more and more test fixes
All checks were successful
Deploy to Test Environment / deploy-to-test (push) Successful in 22m19s
2026-01-05 02:30:28 -08:00
Gitea Actions
4c70905950 ci: Bump version to 0.9.26 [skip ci] 2026-01-05 14:51:27 +05:00
0b4884ff2a even more and more test fixes
All checks were successful
Deploy to Test Environment / deploy-to-test (push) Successful in 26m1s
2026-01-05 01:50:54 -08:00
Gitea Actions
e4acab77c8 ci: Bump version to 0.9.25 [skip ci] 2026-01-05 14:26:57 +05:00
4e20b1b430 even more and more test fixes
Some checks failed
Deploy to Test Environment / deploy-to-test (push) Failing after 54s
2026-01-05 01:26:12 -08:00
Gitea Actions
15747ac942 ci: Bump version to 0.9.24 [skip ci] 2026-01-05 12:37:56 +05:00
e5fa89ef17 even more and more test fixes
All checks were successful
Deploy to Test Environment / deploy-to-test (push) Successful in 27m55s
2026-01-04 23:36:56 -08:00
Gitea Actions
2c65da31e9 ci: Bump version to 0.9.23 [skip ci] 2026-01-05 05:12:54 +05:00
eeec6af905 even more and more test fixes
All checks were successful
Deploy to Test Environment / deploy-to-test (push) Successful in 27m33s
2026-01-04 16:01:55 -08:00
112 changed files with 3597 additions and 1461 deletions

View File

@@ -113,7 +113,7 @@ jobs:
REDIS_PASSWORD: ${{ secrets.REDIS_PASSWORD_TEST }}
# --- Integration test specific variables ---
FRONTEND_URL: 'http://localhost:3000'
FRONTEND_URL: 'https://example.com'
VITE_API_BASE_URL: 'http://localhost:3001/api'
GEMINI_API_KEY: ${{ secrets.VITE_GOOGLE_GENAI_API_KEY }}
@@ -335,7 +335,8 @@ jobs:
fi
GITEA_SERVER_URL="https://gitea.projectium.com" # Your Gitea instance URL
COMMIT_MESSAGE=$(git log -1 --grep="\[skip ci\]" --invert-grep --pretty=%s)
# Sanitize commit message to prevent shell injection or build breaks (removes quotes, backticks, backslashes, $)
COMMIT_MESSAGE=$(git log -1 --grep="\[skip ci\]" --invert-grep --pretty=%s | tr -d '"`\\$')
PACKAGE_VERSION=$(node -p "require('./package.json').version")
VITE_APP_VERSION="$(date +'%Y%m%d-%H%M'):$(git rev-parse --short HEAD):$PACKAGE_VERSION" \
VITE_APP_COMMIT_URL="$GITEA_SERVER_URL/${{ gitea.repository }}/commit/${{ gitea.sha }}" \
@@ -388,7 +389,7 @@ jobs:
REDIS_PASSWORD: ${{ secrets.REDIS_PASSWORD_TEST }}
# Application Secrets
FRONTEND_URL: 'https://flyer-crawler-test.projectium.com'
FRONTEND_URL: 'https://example.com'
JWT_SECRET: ${{ secrets.JWT_SECRET }}
GEMINI_API_KEY: ${{ secrets.VITE_GOOGLE_GENAI_API_KEY_TEST }}
GOOGLE_MAPS_API_KEY: ${{ secrets.GOOGLE_MAPS_API_KEY }}

View File

@@ -0,0 +1,41 @@
# ADR-027: Standardized Naming Convention for AI and Database Types
**Date**: 2026-01-05
**Status**: Accepted
## Context
The application codebase primarily follows the standard TypeScript convention of `camelCase` for variable and property names. However, the PostgreSQL database uses `snake_case` for column names. Additionally, the AI prompts are designed to extract data that maps directly to these database columns.
Attempting to enforce `camelCase` strictly across the entire stack created friction and ambiguity, particularly in the background processing pipeline where data moves from the AI model directly to the database. Developers were unsure whether to transform keys immediately upon receipt (adding overhead) or keep them as-is.
## Decision
We will adopt a hybrid naming convention strategy to explicitly distinguish between internal application state and external/persisted data formats.
1. **Database and AI Types (`snake_case`)**:
Interfaces, Type definitions, and Zod schemas that represent raw database rows or direct AI responses **MUST** use `snake_case`.
- *Examples*: `AiFlyerDataSchema`, `ExtractedFlyerItemSchema`, `FlyerInsert`.
- *Reasoning*: This avoids unnecessary mapping layers when inserting data into the database or parsing AI output. It serves as a visual cue that the data is "raw", "external", or destined for persistence.
2. **Internal Application Logic (`camelCase`)**:
Variables, function arguments, and processed data structures used within the application logic (Service layer, UI components, utility functions) **MUST** use `camelCase`.
- *Reasoning*: This adheres to standard JavaScript/TypeScript practices and maintains consistency with the rest of the ecosystem (React, etc.).
3. **Boundary Handling**:
- For background jobs that primarily move data from AI to DB, preserving `snake_case` is preferred to minimize transformation logic.
- For API responses sent to the frontend, data should generally be transformed to `camelCase` unless it is a direct dump of a database entity for a specific administrative view.
## Consequences
### Positive
- **Visual Distinction**: It is immediately obvious whether a variable holds raw data (`price_in_cents`) or processed application state (`priceInCents`).
- **Efficiency**: Reduces boilerplate code for mapping keys (e.g., `price_in_cents: data.priceInCents`) when performing bulk inserts or updates.
- **Simplicity**: AI prompts can request JSON keys that match the database schema 1:1, reducing the risk of mapping errors.
### Negative
- **Context Switching**: Developers must be mindful of the casing context.
- **Linter Configuration**: May require specific overrides or `// eslint-disable-next-line` comments if the linter is configured to strictly enforce `camelCase` everywhere.

View File

@@ -16,6 +16,27 @@ if (missingSecrets.length > 0) {
console.log('[ecosystem.config.cjs] ✅ Critical environment variables are present.');
}
// --- Shared Environment Variables ---
// Define common variables to reduce duplication and ensure consistency across apps.
const sharedEnv = {
DB_HOST: process.env.DB_HOST,
DB_USER: process.env.DB_USER,
DB_PASSWORD: process.env.DB_PASSWORD,
DB_NAME: process.env.DB_NAME,
REDIS_URL: process.env.REDIS_URL,
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
FRONTEND_URL: process.env.FRONTEND_URL,
JWT_SECRET: process.env.JWT_SECRET,
GEMINI_API_KEY: process.env.GEMINI_API_KEY,
GOOGLE_MAPS_API_KEY: process.env.GOOGLE_MAPS_API_KEY,
SMTP_HOST: process.env.SMTP_HOST,
SMTP_PORT: process.env.SMTP_PORT,
SMTP_SECURE: process.env.SMTP_SECURE,
SMTP_USER: process.env.SMTP_USER,
SMTP_PASS: process.env.SMTP_PASS,
SMTP_FROM_EMAIL: process.env.SMTP_FROM_EMAIL,
};
module.exports = {
apps: [
{
@@ -25,6 +46,11 @@ module.exports = {
script: './node_modules/.bin/tsx',
args: 'server.ts',
max_memory_restart: '500M',
// Production Optimization: Run in cluster mode to utilize all CPU cores
instances: 'max',
exec_mode: 'cluster',
kill_timeout: 5000, // Allow 5s for graceful shutdown of API requests
log_date_format: 'YYYY-MM-DD HH:mm:ss Z',
// Restart Logic
max_restarts: 40,
@@ -36,46 +62,16 @@ module.exports = {
NODE_ENV: 'production',
name: 'flyer-crawler-api',
cwd: '/var/www/flyer-crawler.projectium.com',
DB_HOST: process.env.DB_HOST,
DB_USER: process.env.DB_USER,
DB_PASSWORD: process.env.DB_PASSWORD,
DB_NAME: process.env.DB_NAME,
REDIS_URL: process.env.REDIS_URL,
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
FRONTEND_URL: process.env.FRONTEND_URL,
JWT_SECRET: process.env.JWT_SECRET,
GEMINI_API_KEY: process.env.GEMINI_API_KEY,
GOOGLE_MAPS_API_KEY: process.env.GOOGLE_MAPS_API_KEY,
SMTP_HOST: process.env.SMTP_HOST,
SMTP_PORT: process.env.SMTP_PORT,
SMTP_SECURE: process.env.SMTP_SECURE,
SMTP_USER: process.env.SMTP_USER,
SMTP_PASS: process.env.SMTP_PASS,
SMTP_FROM_EMAIL: process.env.SMTP_FROM_EMAIL,
WORKER_LOCK_DURATION: '120000',
...sharedEnv,
},
// Test Environment Settings
env_test: {
NODE_ENV: 'test',
name: 'flyer-crawler-api-test',
cwd: '/var/www/flyer-crawler-test.projectium.com',
DB_HOST: process.env.DB_HOST,
DB_USER: process.env.DB_USER,
DB_PASSWORD: process.env.DB_PASSWORD,
DB_NAME: process.env.DB_NAME,
REDIS_URL: process.env.REDIS_URL,
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
FRONTEND_URL: process.env.FRONTEND_URL,
JWT_SECRET: process.env.JWT_SECRET,
GEMINI_API_KEY: process.env.GEMINI_API_KEY,
GOOGLE_MAPS_API_KEY: process.env.GOOGLE_MAPS_API_KEY,
SMTP_HOST: process.env.SMTP_HOST,
SMTP_PORT: process.env.SMTP_PORT,
SMTP_SECURE: process.env.SMTP_SECURE,
SMTP_USER: process.env.SMTP_USER,
SMTP_PASS: process.env.SMTP_PASS,
SMTP_FROM_EMAIL: process.env.SMTP_FROM_EMAIL,
WORKER_LOCK_DURATION: '120000',
...sharedEnv,
},
// Development Environment Settings
env_development: {
@@ -83,23 +79,8 @@ module.exports = {
name: 'flyer-crawler-api-dev',
watch: true,
ignore_watch: ['node_modules', 'logs', '*.log', 'flyer-images', '.git'],
DB_HOST: process.env.DB_HOST,
DB_USER: process.env.DB_USER,
DB_PASSWORD: process.env.DB_PASSWORD,
DB_NAME: process.env.DB_NAME,
REDIS_URL: process.env.REDIS_URL,
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
FRONTEND_URL: process.env.FRONTEND_URL,
JWT_SECRET: process.env.JWT_SECRET,
GEMINI_API_KEY: process.env.GEMINI_API_KEY,
GOOGLE_MAPS_API_KEY: process.env.GOOGLE_MAPS_API_KEY,
SMTP_HOST: process.env.SMTP_HOST,
SMTP_PORT: process.env.SMTP_PORT,
SMTP_SECURE: process.env.SMTP_SECURE,
SMTP_USER: process.env.SMTP_USER,
SMTP_PASS: process.env.SMTP_PASS,
SMTP_FROM_EMAIL: process.env.SMTP_FROM_EMAIL,
WORKER_LOCK_DURATION: '120000',
...sharedEnv,
},
},
{
@@ -108,6 +89,8 @@ module.exports = {
script: './node_modules/.bin/tsx',
args: 'src/services/worker.ts',
max_memory_restart: '1G',
kill_timeout: 10000, // Workers may need more time to complete a job
log_date_format: 'YYYY-MM-DD HH:mm:ss Z',
// Restart Logic
max_restarts: 40,
@@ -119,44 +102,14 @@ module.exports = {
NODE_ENV: 'production',
name: 'flyer-crawler-worker',
cwd: '/var/www/flyer-crawler.projectium.com',
DB_HOST: process.env.DB_HOST,
DB_USER: process.env.DB_USER,
DB_PASSWORD: process.env.DB_PASSWORD,
DB_NAME: process.env.DB_NAME,
REDIS_URL: process.env.REDIS_URL,
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
FRONTEND_URL: process.env.FRONTEND_URL,
JWT_SECRET: process.env.JWT_SECRET,
GEMINI_API_KEY: process.env.GEMINI_API_KEY,
GOOGLE_MAPS_API_KEY: process.env.GOOGLE_MAPS_API_KEY,
SMTP_HOST: process.env.SMTP_HOST,
SMTP_PORT: process.env.SMTP_PORT,
SMTP_SECURE: process.env.SMTP_SECURE,
SMTP_USER: process.env.SMTP_USER,
SMTP_PASS: process.env.SMTP_PASS,
SMTP_FROM_EMAIL: process.env.SMTP_FROM_EMAIL,
...sharedEnv,
},
// Test Environment Settings
env_test: {
NODE_ENV: 'test',
name: 'flyer-crawler-worker-test',
cwd: '/var/www/flyer-crawler-test.projectium.com',
DB_HOST: process.env.DB_HOST,
DB_USER: process.env.DB_USER,
DB_PASSWORD: process.env.DB_PASSWORD,
DB_NAME: process.env.DB_NAME,
REDIS_URL: process.env.REDIS_URL,
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
FRONTEND_URL: process.env.FRONTEND_URL,
JWT_SECRET: process.env.JWT_SECRET,
GEMINI_API_KEY: process.env.GEMINI_API_KEY,
GOOGLE_MAPS_API_KEY: process.env.GOOGLE_MAPS_API_KEY,
SMTP_HOST: process.env.SMTP_HOST,
SMTP_PORT: process.env.SMTP_PORT,
SMTP_SECURE: process.env.SMTP_SECURE,
SMTP_USER: process.env.SMTP_USER,
SMTP_PASS: process.env.SMTP_PASS,
SMTP_FROM_EMAIL: process.env.SMTP_FROM_EMAIL,
...sharedEnv,
},
// Development Environment Settings
env_development: {
@@ -164,22 +117,7 @@ module.exports = {
name: 'flyer-crawler-worker-dev',
watch: true,
ignore_watch: ['node_modules', 'logs', '*.log', 'flyer-images', '.git'],
DB_HOST: process.env.DB_HOST,
DB_USER: process.env.DB_USER,
DB_PASSWORD: process.env.DB_PASSWORD,
DB_NAME: process.env.DB_NAME,
REDIS_URL: process.env.REDIS_URL,
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
FRONTEND_URL: process.env.FRONTEND_URL,
JWT_SECRET: process.env.JWT_SECRET,
GEMINI_API_KEY: process.env.GEMINI_API_KEY,
GOOGLE_MAPS_API_KEY: process.env.GOOGLE_MAPS_API_KEY,
SMTP_HOST: process.env.SMTP_HOST,
SMTP_PORT: process.env.SMTP_PORT,
SMTP_SECURE: process.env.SMTP_SECURE,
SMTP_USER: process.env.SMTP_USER,
SMTP_PASS: process.env.SMTP_PASS,
SMTP_FROM_EMAIL: process.env.SMTP_FROM_EMAIL,
...sharedEnv,
},
},
{
@@ -188,6 +126,8 @@ module.exports = {
script: './node_modules/.bin/tsx',
args: 'src/services/worker.ts',
max_memory_restart: '1G',
kill_timeout: 10000,
log_date_format: 'YYYY-MM-DD HH:mm:ss Z',
// Restart Logic
max_restarts: 40,
@@ -199,44 +139,14 @@ module.exports = {
NODE_ENV: 'production',
name: 'flyer-crawler-analytics-worker',
cwd: '/var/www/flyer-crawler.projectium.com',
DB_HOST: process.env.DB_HOST,
DB_USER: process.env.DB_USER,
DB_PASSWORD: process.env.DB_PASSWORD,
DB_NAME: process.env.DB_NAME,
REDIS_URL: process.env.REDIS_URL,
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
FRONTEND_URL: process.env.FRONTEND_URL,
JWT_SECRET: process.env.JWT_SECRET,
GEMINI_API_KEY: process.env.GEMINI_API_KEY,
GOOGLE_MAPS_API_KEY: process.env.GOOGLE_MAPS_API_KEY,
SMTP_HOST: process.env.SMTP_HOST,
SMTP_PORT: process.env.SMTP_PORT,
SMTP_SECURE: process.env.SMTP_SECURE,
SMTP_USER: process.env.SMTP_USER,
SMTP_PASS: process.env.SMTP_PASS,
SMTP_FROM_EMAIL: process.env.SMTP_FROM_EMAIL,
...sharedEnv,
},
// Test Environment Settings
env_test: {
NODE_ENV: 'test',
name: 'flyer-crawler-analytics-worker-test',
cwd: '/var/www/flyer-crawler-test.projectium.com',
DB_HOST: process.env.DB_HOST,
DB_USER: process.env.DB_USER,
DB_PASSWORD: process.env.DB_PASSWORD,
DB_NAME: process.env.DB_NAME,
REDIS_URL: process.env.REDIS_URL,
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
FRONTEND_URL: process.env.FRONTEND_URL,
JWT_SECRET: process.env.JWT_SECRET,
GEMINI_API_KEY: process.env.GEMINI_API_KEY,
GOOGLE_MAPS_API_KEY: process.env.GOOGLE_MAPS_API_KEY,
SMTP_HOST: process.env.SMTP_HOST,
SMTP_PORT: process.env.SMTP_PORT,
SMTP_SECURE: process.env.SMTP_SECURE,
SMTP_USER: process.env.SMTP_USER,
SMTP_PASS: process.env.SMTP_PASS,
SMTP_FROM_EMAIL: process.env.SMTP_FROM_EMAIL,
...sharedEnv,
},
// Development Environment Settings
env_development: {
@@ -244,22 +154,7 @@ module.exports = {
name: 'flyer-crawler-analytics-worker-dev',
watch: true,
ignore_watch: ['node_modules', 'logs', '*.log', 'flyer-images', '.git'],
DB_HOST: process.env.DB_HOST,
DB_USER: process.env.DB_USER,
DB_PASSWORD: process.env.DB_PASSWORD,
DB_NAME: process.env.DB_NAME,
REDIS_URL: process.env.REDIS_URL,
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
FRONTEND_URL: process.env.FRONTEND_URL,
JWT_SECRET: process.env.JWT_SECRET,
GEMINI_API_KEY: process.env.GEMINI_API_KEY,
GOOGLE_MAPS_API_KEY: process.env.GOOGLE_MAPS_API_KEY,
SMTP_HOST: process.env.SMTP_HOST,
SMTP_PORT: process.env.SMTP_PORT,
SMTP_SECURE: process.env.SMTP_SECURE,
SMTP_USER: process.env.SMTP_USER,
SMTP_PASS: process.env.SMTP_PASS,
SMTP_FROM_EMAIL: process.env.SMTP_FROM_EMAIL,
...sharedEnv,
},
},
],

4
package-lock.json generated
View File

@@ -1,12 +1,12 @@
{
"name": "flyer-crawler",
"version": "0.9.22",
"version": "0.9.37",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "flyer-crawler",
"version": "0.9.22",
"version": "0.9.37",
"dependencies": {
"@bull-board/api": "^6.14.2",
"@bull-board/express": "^6.14.2",

View File

@@ -1,7 +1,7 @@
{
"name": "flyer-crawler",
"private": true,
"version": "0.9.22",
"version": "0.9.37",
"type": "module",
"scripts": {
"dev": "concurrently \"npm:start:dev\" \"vite\"",

View File

@@ -90,10 +90,10 @@ CREATE TABLE IF NOT EXISTS public.profiles (
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL,
CONSTRAINT profiles_full_name_check CHECK (full_name IS NULL OR TRIM(full_name) <> ''),
CONSTRAINT profiles_avatar_url_check CHECK (avatar_url IS NULL OR avatar_url ~* '^https://?.*'),
created_by UUID REFERENCES public.users(user_id) ON DELETE SET NULL,
updated_by UUID REFERENCES public.users(user_id) ON DELETE SET NULL
);
-- CONSTRAINT profiles_avatar_url_check CHECK (avatar_url IS NULL OR avatar_url ~* '^https://?.*'),
COMMENT ON TABLE public.profiles IS 'Stores public-facing user data, linked to the public.users table.';
COMMENT ON COLUMN public.profiles.address_id IS 'A foreign key to the user''s primary address in the `addresses` table.';
-- This index is crucial for the gamification leaderboard feature.
@@ -108,9 +108,9 @@ CREATE TABLE IF NOT EXISTS public.stores (
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL,
CONSTRAINT stores_name_check CHECK (TRIM(name) <> ''),
CONSTRAINT stores_logo_url_check CHECK (logo_url IS NULL OR logo_url ~* '^https://?.*'),
created_by UUID REFERENCES public.users(user_id) ON DELETE SET NULL
);
-- CONSTRAINT stores_logo_url_check CHECK (logo_url IS NULL OR logo_url ~* '^https://?.*'),
COMMENT ON TABLE public.stores IS 'Stores metadata for grocery store chains (e.g., Safeway, Kroger).';
-- 5. The 'categories' table for normalized category data.
@@ -141,10 +141,10 @@ CREATE TABLE IF NOT EXISTS public.flyers (
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL,
CONSTRAINT flyers_valid_dates_check CHECK (valid_to >= valid_from),
CONSTRAINT flyers_file_name_check CHECK (TRIM(file_name) <> ''),
CONSTRAINT flyers_image_url_check CHECK (image_url ~* '^https://?.*'),
CONSTRAINT flyers_icon_url_check CHECK (icon_url IS NULL OR icon_url ~* '^https://?.*'),
CONSTRAINT flyers_checksum_check CHECK (checksum IS NULL OR length(checksum) = 64)
);
-- CONSTRAINT flyers_image_url_check CHECK (image_url ~* '^https://?.*'),
-- CONSTRAINT flyers_icon_url_check CHECK (icon_url IS NULL OR icon_url ~* '^https://?.*'),
COMMENT ON TABLE public.flyers IS 'Stores metadata for each processed flyer, linking it to a store and its validity period.';
CREATE INDEX IF NOT EXISTS idx_flyers_store_id ON public.flyers(store_id);
COMMENT ON COLUMN public.flyers.file_name IS 'The original name of the uploaded flyer file (e.g., "flyer_week_1.pdf").';
@@ -198,9 +198,9 @@ CREATE TABLE IF NOT EXISTS public.brands (
store_id BIGINT REFERENCES public.stores(store_id) ON DELETE SET NULL,
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL,
CONSTRAINT brands_name_check CHECK (TRIM(name) <> ''),
CONSTRAINT brands_logo_url_check CHECK (logo_url IS NULL OR logo_url ~* '^https://?.*')
CONSTRAINT brands_name_check CHECK (TRIM(name) <> '')
);
-- CONSTRAINT brands_logo_url_check CHECK (logo_url IS NULL OR logo_url ~* '^https://?.*')
COMMENT ON TABLE public.brands IS 'Stores brand names like "Coca-Cola", "Maple Leaf", or "Kraft".';
COMMENT ON COLUMN public.brands.store_id IS 'If this is a store-specific brand (e.g., President''s Choice), this links to the parent store.';
@@ -464,9 +464,9 @@ CREATE TABLE IF NOT EXISTS public.user_submitted_prices (
upvotes INTEGER DEFAULT 0 NOT NULL CHECK (upvotes >= 0),
downvotes INTEGER DEFAULT 0 NOT NULL CHECK (downvotes >= 0),
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL,
CONSTRAINT user_submitted_prices_photo_url_check CHECK (photo_url IS NULL OR photo_url ~* '^https://?.*')
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL
);
-- CONSTRAINT user_submitted_prices_photo_url_check CHECK (photo_url IS NULL OR photo_url ~* '^https://?.*')
COMMENT ON TABLE public.user_submitted_prices IS 'Stores item prices submitted by users directly from physical stores.';
COMMENT ON COLUMN public.user_submitted_prices.photo_url IS 'URL to user-submitted photo evidence of the price.';
COMMENT ON COLUMN public.user_submitted_prices.upvotes IS 'Community validation score indicating accuracy.';
@@ -521,9 +521,9 @@ CREATE TABLE IF NOT EXISTS public.recipes (
fork_count INTEGER DEFAULT 0 NOT NULL CHECK (fork_count >= 0),
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL,
CONSTRAINT recipes_name_check CHECK (TRIM(name) <> ''),
CONSTRAINT recipes_photo_url_check CHECK (photo_url IS NULL OR photo_url ~* '^https://?.*')
CONSTRAINT recipes_name_check CHECK (TRIM(name) <> '')
);
-- CONSTRAINT recipes_photo_url_check CHECK (photo_url IS NULL OR photo_url ~* '^https://?.*')
COMMENT ON TABLE public.recipes IS 'Stores recipes that can be used to generate shopping lists.';
COMMENT ON COLUMN public.recipes.servings IS 'The number of servings this recipe yields.';
COMMENT ON COLUMN public.recipes.original_recipe_id IS 'If this recipe is a variation of another, this points to the original.';
@@ -920,9 +920,9 @@ CREATE TABLE IF NOT EXISTS public.receipts (
raw_text TEXT,
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
processed_at TIMESTAMPTZ,
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL,
CONSTRAINT receipts_receipt_image_url_check CHECK (receipt_image_url ~* '^https://?.*')
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL
);
-- CONSTRAINT receipts_receipt_image_url_check CHECK (receipt_image_url ~* '^https://?.*')
COMMENT ON TABLE public.receipts IS 'Stores uploaded user receipts for purchase tracking and analysis.';
CREATE INDEX IF NOT EXISTS idx_receipts_user_id ON public.receipts(user_id);
CREATE INDEX IF NOT EXISTS idx_receipts_store_id ON public.receipts(store_id);

View File

@@ -106,10 +106,10 @@ CREATE TABLE IF NOT EXISTS public.profiles (
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL,
CONSTRAINT profiles_full_name_check CHECK (full_name IS NULL OR TRIM(full_name) <> ''),
CONSTRAINT profiles_avatar_url_check CHECK (avatar_url IS NULL OR avatar_url ~* '^https?://.*'),
created_by UUID REFERENCES public.users(user_id) ON DELETE SET NULL,
updated_by UUID REFERENCES public.users(user_id) ON DELETE SET NULL
);
-- CONSTRAINT profiles_avatar_url_check CHECK (avatar_url IS NULL OR avatar_url ~* '^https?://.*'),
COMMENT ON TABLE public.profiles IS 'Stores public-facing user data, linked to the public.users table.';
COMMENT ON COLUMN public.profiles.address_id IS 'A foreign key to the user''s primary address in the `addresses` table.';
-- This index is crucial for the gamification leaderboard feature.
@@ -124,9 +124,9 @@ CREATE TABLE IF NOT EXISTS public.stores (
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL,
CONSTRAINT stores_name_check CHECK (TRIM(name) <> ''),
CONSTRAINT stores_logo_url_check CHECK (logo_url IS NULL OR logo_url ~* '^https?://.*'),
created_by UUID REFERENCES public.users(user_id) ON DELETE SET NULL
);
-- CONSTRAINT stores_logo_url_check CHECK (logo_url IS NULL OR logo_url ~* '^https?://.*'),
COMMENT ON TABLE public.stores IS 'Stores metadata for grocery store chains (e.g., Safeway, Kroger).';
-- 5. The 'categories' table for normalized category data.
@@ -157,10 +157,10 @@ CREATE TABLE IF NOT EXISTS public.flyers (
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL,
CONSTRAINT flyers_valid_dates_check CHECK (valid_to >= valid_from),
CONSTRAINT flyers_file_name_check CHECK (TRIM(file_name) <> ''),
CONSTRAINT flyers_image_url_check CHECK (image_url ~* '^https?://.*'),
CONSTRAINT flyers_icon_url_check CHECK (icon_url ~* '^https?://.*'),
CONSTRAINT flyers_checksum_check CHECK (checksum IS NULL OR length(checksum) = 64)
);
-- CONSTRAINT flyers_image_url_check CHECK (image_url ~* '^https?://.*'),
-- CONSTRAINT flyers_icon_url_check CHECK (icon_url ~* '^https?://.*'),
COMMENT ON TABLE public.flyers IS 'Stores metadata for each processed flyer, linking it to a store and its validity period.';
CREATE INDEX IF NOT EXISTS idx_flyers_store_id ON public.flyers(store_id);
COMMENT ON COLUMN public.flyers.file_name IS 'The original name of the uploaded flyer file (e.g., "flyer_week_1.pdf").';
@@ -214,9 +214,9 @@ CREATE TABLE IF NOT EXISTS public.brands (
store_id BIGINT REFERENCES public.stores(store_id) ON DELETE SET NULL,
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL,
CONSTRAINT brands_name_check CHECK (TRIM(name) <> ''),
CONSTRAINT brands_logo_url_check CHECK (logo_url IS NULL OR logo_url ~* '^https?://.*')
CONSTRAINT brands_name_check CHECK (TRIM(name) <> '')
);
-- CONSTRAINT brands_logo_url_check CHECK (logo_url IS NULL OR logo_url ~* '^https?://.*')
COMMENT ON TABLE public.brands IS 'Stores brand names like "Coca-Cola", "Maple Leaf", or "Kraft".';
COMMENT ON COLUMN public.brands.store_id IS 'If this is a store-specific brand (e.g., President''s Choice), this links to the parent store.';
@@ -481,9 +481,9 @@ CREATE TABLE IF NOT EXISTS public.user_submitted_prices (
upvotes INTEGER DEFAULT 0 NOT NULL CHECK (upvotes >= 0),
downvotes INTEGER DEFAULT 0 NOT NULL CHECK (downvotes >= 0),
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL,
CONSTRAINT user_submitted_prices_photo_url_check CHECK (photo_url IS NULL OR photo_url ~* '^https?://.*')
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL
);
-- CONSTRAINT user_submitted_prices_photo_url_check CHECK (photo_url IS NULL OR photo_url ~* '^https?://.*')
COMMENT ON TABLE public.user_submitted_prices IS 'Stores item prices submitted by users directly from physical stores.';
COMMENT ON COLUMN public.user_submitted_prices.photo_url IS 'URL to user-submitted photo evidence of the price.';
COMMENT ON COLUMN public.user_submitted_prices.upvotes IS 'Community validation score indicating accuracy.';
@@ -538,9 +538,9 @@ CREATE TABLE IF NOT EXISTS public.recipes (
fork_count INTEGER DEFAULT 0 NOT NULL CHECK (fork_count >= 0),
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL,
CONSTRAINT recipes_name_check CHECK (TRIM(name) <> ''),
CONSTRAINT recipes_photo_url_check CHECK (photo_url IS NULL OR photo_url ~* '^https?://.*')
CONSTRAINT recipes_name_check CHECK (TRIM(name) <> '')
);
-- CONSTRAINT recipes_photo_url_check CHECK (photo_url IS NULL OR photo_url ~* '^https?://.*')
COMMENT ON TABLE public.recipes IS 'Stores recipes that can be used to generate shopping lists.';
COMMENT ON COLUMN public.recipes.servings IS 'The number of servings this recipe yields.';
COMMENT ON COLUMN public.recipes.original_recipe_id IS 'If this recipe is a variation of another, this points to the original.';
@@ -940,9 +940,9 @@ CREATE TABLE IF NOT EXISTS public.receipts (
raw_text TEXT,
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
processed_at TIMESTAMPTZ,
CONSTRAINT receipts_receipt_image_url_check CHECK (receipt_image_url ~* '^https?://.*'),
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL
);
-- CONSTRAINT receipts_receipt_image_url_check CHECK (receipt_image_url ~* '^https?://.*'),
COMMENT ON TABLE public.receipts IS 'Stores uploaded user receipts for purchase tracking and analysis.';
CREATE INDEX IF NOT EXISTS idx_receipts_user_id ON public.receipts(user_id);
CREATE INDEX IF NOT EXISTS idx_receipts_store_id ON public.receipts(store_id);

View File

@@ -628,7 +628,7 @@ describe('App Component', () => {
app: {
version: '2.0.0',
commitMessage: 'A new version!',
commitUrl: 'http://example.com/commit/2.0.0',
commitUrl: 'https://example.com/commit/2.0.0',
},
},
}));
@@ -638,7 +638,7 @@ describe('App Component', () => {
renderApp();
const versionLink = screen.getByText(`Version: 2.0.0`);
expect(versionLink).toBeInTheDocument();
expect(versionLink).toHaveAttribute('href', 'http://example.com/commit/2.0.0');
expect(versionLink).toHaveAttribute('href', 'https://example.com/commit/2.0.0');
});
it('should open the "What\'s New" modal when the question mark icon is clicked', async () => {

View File

@@ -19,7 +19,7 @@ const mockedNotifyError = notifyError as Mocked<typeof notifyError>;
const defaultProps = {
isOpen: true,
onClose: vi.fn(),
imageUrl: 'http://example.com/flyer.jpg',
imageUrl: 'https://example.com/flyer.jpg',
onDataExtracted: vi.fn(),
};

View File

@@ -25,7 +25,7 @@ const mockLeaderboardData: LeaderboardUser[] = [
createMockLeaderboardUser({
user_id: 'user-2',
full_name: 'Bob',
avatar_url: 'http://example.com/bob.jpg',
avatar_url: 'https://example.com/bob.jpg',
points: 950,
rank: '2',
}),
@@ -95,7 +95,7 @@ describe('Leaderboard', () => {
// Check for correct avatar URLs
const bobAvatar = screen.getByAltText('Bob') as HTMLImageElement;
expect(bobAvatar.src).toBe('http://example.com/bob.jpg');
expect(bobAvatar.src).toBe('https://example.com/bob.jpg');
const aliceAvatar = screen.getByAltText('Alice') as HTMLImageElement;
expect(aliceAvatar.src).toContain('api.dicebear.com'); // Check for fallback avatar

147
src/config/rateLimiters.ts Normal file
View File

@@ -0,0 +1,147 @@
// src/config/rateLimiters.ts
import rateLimit from 'express-rate-limit';
import { shouldSkipRateLimit } from '../utils/rateLimit';
const standardConfig = {
standardHeaders: true,
legacyHeaders: false,
skip: shouldSkipRateLimit,
};
// --- AUTHENTICATION ---
export const loginLimiter = rateLimit({
...standardConfig,
windowMs: 15 * 60 * 1000, // 15 minutes
max: 5,
message: 'Too many login attempts from this IP, please try again after 15 minutes.',
});
export const registerLimiter = rateLimit({
...standardConfig,
windowMs: 60 * 60 * 1000, // 1 hour
max: 5,
message: 'Too many accounts created from this IP, please try again after an hour.',
});
export const forgotPasswordLimiter = rateLimit({
...standardConfig,
windowMs: 15 * 60 * 1000, // 15 minutes
max: 5,
message: 'Too many password reset requests from this IP, please try again after 15 minutes.',
});
export const resetPasswordLimiter = rateLimit({
...standardConfig,
windowMs: 15 * 60 * 1000, // 15 minutes
max: 10,
message: 'Too many password reset attempts from this IP, please try again after 15 minutes.',
});
export const refreshTokenLimiter = rateLimit({
...standardConfig,
windowMs: 15 * 60 * 1000, // 15 minutes
max: 20,
message: 'Too many token refresh attempts from this IP, please try again after 15 minutes.',
});
export const logoutLimiter = rateLimit({
...standardConfig,
windowMs: 15 * 60 * 1000, // 15 minutes
max: 10,
message: 'Too many logout attempts from this IP, please try again after 15 minutes.',
});
// --- GENERAL PUBLIC & USER ---
export const publicReadLimiter = rateLimit({
...standardConfig,
windowMs: 15 * 60 * 1000, // 15 minutes
max: 100,
message: 'Too many requests from this IP, please try again later.',
});
export const userReadLimiter = publicReadLimiter; // Alias for consistency
export const userUpdateLimiter = rateLimit({
...standardConfig,
windowMs: 15 * 60 * 1000, // 15 minutes
max: 100,
message: 'Too many update requests from this IP, please try again after 15 minutes.',
});
export const reactionToggleLimiter = rateLimit({
...standardConfig,
windowMs: 15 * 60 * 1000, // 15 minutes
max: 150,
message: 'Too many reaction requests from this IP, please try again later.',
});
export const trackingLimiter = rateLimit({
...standardConfig,
windowMs: 15 * 60 * 1000, // 15 minutes
max: 200,
message: 'Too many tracking requests from this IP, please try again later.',
});
// --- SENSITIVE / COSTLY ---
export const userSensitiveUpdateLimiter = rateLimit({
...standardConfig,
windowMs: 60 * 60 * 1000, // 1 hour
max: 5,
message: 'Too many sensitive requests from this IP, please try again after an hour.',
});
export const adminTriggerLimiter = rateLimit({
...standardConfig,
windowMs: 15 * 60 * 1000, // 15 minutes
max: 30,
message: 'Too many administrative triggers from this IP, please try again later.',
});
export const aiGenerationLimiter = rateLimit({
...standardConfig,
windowMs: 15 * 60 * 1000, // 15 minutes
max: 20,
message: 'Too many AI generation requests from this IP, please try again after 15 minutes.',
});
export const suggestionLimiter = aiGenerationLimiter; // Alias
export const geocodeLimiter = rateLimit({
...standardConfig,
windowMs: 60 * 60 * 1000, // 1 hour
max: 100,
message: 'Too many geocoding requests from this IP, please try again later.',
});
export const priceHistoryLimiter = rateLimit({
...standardConfig,
windowMs: 15 * 60 * 1000, // 15 minutes
max: 50,
message: 'Too many price history requests from this IP, please try again later.',
});
// --- UPLOADS / BATCH ---
export const adminUploadLimiter = rateLimit({
...standardConfig,
windowMs: 15 * 60 * 1000, // 15 minutes
max: 20,
message: 'Too many file uploads from this IP, please try again after 15 minutes.',
});
export const userUploadLimiter = adminUploadLimiter; // Alias
export const aiUploadLimiter = rateLimit({
...standardConfig,
windowMs: 15 * 60 * 1000, // 15 minutes
max: 10,
message: 'Too many file uploads from this IP, please try again after 15 minutes.',
});
export const batchLimiter = rateLimit({
...standardConfig,
windowMs: 15 * 60 * 1000, // 15 minutes
max: 50,
message: 'Too many batch requests from this IP, please try again later.',
});
export const budgetUpdateLimiter = batchLimiter; // Alias

View File

@@ -160,9 +160,9 @@ describe('AnalysisPanel', () => {
results: { WEB_SEARCH: 'Search results text.' },
sources: {
WEB_SEARCH: [
{ title: 'Valid Source', uri: 'http://example.com/source1' },
{ title: 'Valid Source', uri: 'https://example.com/source1' },
{ title: 'Source without URI', uri: null },
{ title: 'Another Valid Source', uri: 'http://example.com/source2' },
{ title: 'Another Valid Source', uri: 'https://example.com/source2' },
],
},
loadingAnalysis: null,
@@ -178,7 +178,7 @@ describe('AnalysisPanel', () => {
expect(screen.getByText('Sources:')).toBeInTheDocument();
const source1 = screen.getByText('Valid Source');
expect(source1).toBeInTheDocument();
expect(source1.closest('a')).toHaveAttribute('href', 'http://example.com/source1');
expect(source1.closest('a')).toHaveAttribute('href', 'https://example.com/source1');
expect(screen.queryByText('Source without URI')).not.toBeInTheDocument();
expect(screen.getByText('Another Valid Source')).toBeInTheDocument();
});
@@ -278,13 +278,13 @@ describe('AnalysisPanel', () => {
loadingAnalysis: null,
error: null,
runAnalysis: mockRunAnalysis,
generatedImageUrl: 'http://example.com/meal.jpg',
generatedImageUrl: 'https://example.com/meal.jpg',
generateImage: mockGenerateImage,
});
rerender(<AnalysisPanel selectedFlyer={mockFlyer} />);
const image = screen.getByAltText('AI generated meal plan');
expect(image).toBeInTheDocument();
expect(image).toHaveAttribute('src', 'http://example.com/meal.jpg');
expect(image).toHaveAttribute('src', 'https://example.com/meal.jpg');
});
it('should not show sources for non-search analysis types', () => {

View File

@@ -8,13 +8,13 @@ import { createMockStore } from '../../tests/utils/mockFactories';
const mockStore = createMockStore({
store_id: 1,
name: 'SuperMart',
logo_url: 'http://example.com/logo.png',
logo_url: 'https://example.com/logo.png',
});
const mockOnOpenCorrectionTool = vi.fn();
const defaultProps = {
imageUrl: 'http://example.com/flyer.jpg',
imageUrl: 'https://example.com/flyer.jpg',
store: mockStore,
validFrom: '2023-10-26',
validTo: '2023-11-01',

View File

@@ -1,8 +1,8 @@
// src/features/flyer/FlyerDisplay.tsx
import React from 'react';
import { ScanIcon } from '../../components/icons/ScanIcon';
import { formatDateRange } from '../../utils/dateUtils';
import type { Store } from '../../types';
import { formatDateRange } from './dateUtils';
import { ScanIcon } from '../../components/icons/ScanIcon';
export interface FlyerDisplayProps {
imageUrl: string | null;

View File

@@ -3,7 +3,7 @@ import React from 'react';
import { render, screen, fireEvent, waitFor } from '@testing-library/react';
import { describe, it, expect, vi, beforeEach, afterEach, type Mocked } from 'vitest';
import { FlyerList } from './FlyerList';
import { formatShortDate } from './dateUtils';
import { formatShortDate } from '../../utils/dateUtils';
import type { Flyer, UserProfile } from '../../types';
import { createMockUserProfile } from '../../tests/utils/mockFactories';
import { createMockFlyer } from '../../tests/utils/mockFactories';
@@ -19,7 +19,7 @@ const mockFlyers: Flyer[] = [
flyer_id: 1,
file_name: 'metro_flyer_oct_1.pdf',
item_count: 50,
image_url: 'http://example.com/flyer1.jpg',
image_url: 'https://example.com/flyer1.jpg',
store: { store_id: 101, name: 'Metro' },
valid_from: '2023-10-05',
valid_to: '2023-10-11',
@@ -29,7 +29,7 @@ const mockFlyers: Flyer[] = [
flyer_id: 2,
file_name: 'walmart_flyer.pdf',
item_count: 75,
image_url: 'http://example.com/flyer2.jpg',
image_url: 'https://example.com/flyer2.jpg',
store: { store_id: 102, name: 'Walmart' },
valid_from: '2023-10-06',
valid_to: '2023-10-06', // Same day
@@ -40,8 +40,8 @@ const mockFlyers: Flyer[] = [
flyer_id: 3,
file_name: 'no-store-flyer.pdf',
item_count: 10,
image_url: 'http://example.com/flyer3.jpg',
icon_url: 'http://example.com/icon3.png',
image_url: 'https://example.com/flyer3.jpg',
icon_url: 'https://example.com/icon3.png',
valid_from: '2023-10-07',
valid_to: '2023-10-08',
store_address: '456 Side St, Ottawa',
@@ -53,7 +53,7 @@ const mockFlyers: Flyer[] = [
flyer_id: 4,
file_name: 'bad-date-flyer.pdf',
item_count: 5,
image_url: 'http://example.com/flyer4.jpg',
image_url: 'https://example.com/flyer4.jpg',
store: { store_id: 103, name: 'Date Store' },
created_at: 'invalid-date',
valid_from: 'invalid-from',
@@ -163,7 +163,7 @@ describe('FlyerList', () => {
const flyerWithIcon = screen.getByText('Unknown Store').closest('li'); // Flyer ID 3
const iconImage = flyerWithIcon?.querySelector('img');
expect(iconImage).toBeInTheDocument();
expect(iconImage).toHaveAttribute('src', 'http://example.com/icon3.png');
expect(iconImage).toHaveAttribute('src', 'https://example.com/icon3.png');
});
it('should render a document icon when icon_url is not present', () => {

View File

@@ -7,7 +7,7 @@ import { parseISO, format, isValid } from 'date-fns';
import { MapPinIcon, Trash2Icon } from 'lucide-react';
import { logger } from '../../services/logger.client';
import * as apiClient from '../../services/apiClient';
import { calculateDaysBetween, formatDateRange } from './dateUtils';
import { calculateDaysBetween, formatDateRange, getCurrentDateISOString } from '../../utils/dateUtils';
interface FlyerListProps {
flyers: Flyer[];
@@ -54,7 +54,7 @@ export const FlyerList: React.FC<FlyerListProps> = ({
verbose: true,
});
const daysLeft = calculateDaysBetween(format(new Date(), 'yyyy-MM-dd'), flyer.valid_to);
const daysLeft = calculateDaysBetween(getCurrentDateISOString(), flyer.valid_to);
let daysLeftText = '';
let daysLeftColor = '';

View File

@@ -1,130 +0,0 @@
// src/features/flyer/dateUtils.test.ts
import { describe, it, expect } from 'vitest';
import { formatShortDate, calculateDaysBetween, formatDateRange } from './dateUtils';
describe('formatShortDate', () => {
it('should format a valid YYYY-MM-DD date string correctly', () => {
expect(formatShortDate('2024-07-26')).toBe('Jul 26');
});
it('should handle single-digit days correctly', () => {
expect(formatShortDate('2025-01-05')).toBe('Jan 5');
});
it('should handle dates at the end of the year', () => {
expect(formatShortDate('2023-12-31')).toBe('Dec 31');
});
it('should return null for a null input', () => {
expect(formatShortDate(null)).toBeNull();
});
it('should return null for an undefined input', () => {
expect(formatShortDate(undefined)).toBeNull();
});
it('should return null for an empty string input', () => {
expect(formatShortDate('')).toBeNull();
});
it('should return null for an invalid date string', () => {
expect(formatShortDate('not-a-real-date')).toBeNull();
});
it('should return null for a malformed date string', () => {
expect(formatShortDate('2024-13-01')).toBeNull(); // Invalid month
});
it('should correctly format a full ISO string with time and timezone', () => {
expect(formatShortDate('2024-12-25T10:00:00Z')).toBe('Dec 25');
});
});
describe('calculateDaysBetween', () => {
it('should calculate the difference in days between two valid date strings', () => {
expect(calculateDaysBetween('2023-01-01', '2023-01-05')).toBe(4);
});
it('should return a negative number if the end date is before the start date', () => {
expect(calculateDaysBetween('2023-01-05', '2023-01-01')).toBe(-4);
});
it('should handle Date objects', () => {
const start = new Date('2023-01-01');
const end = new Date('2023-01-10');
expect(calculateDaysBetween(start, end)).toBe(9);
});
it('should return null if either date is null or undefined', () => {
expect(calculateDaysBetween(null, '2023-01-01')).toBeNull();
expect(calculateDaysBetween('2023-01-01', undefined)).toBeNull();
});
it('should return null if either date is invalid', () => {
expect(calculateDaysBetween('invalid', '2023-01-01')).toBeNull();
expect(calculateDaysBetween('2023-01-01', 'invalid')).toBeNull();
});
});
describe('formatDateRange', () => {
it('should format a range with two different valid dates', () => {
expect(formatDateRange('2023-01-01', '2023-01-05')).toBe('Jan 1 - Jan 5');
});
it('should format a range with the same start and end date as a single date', () => {
expect(formatDateRange('2023-01-01', '2023-01-01')).toBe('Jan 1');
});
it('should return only the start date if end date is missing', () => {
expect(formatDateRange('2023-01-01', null)).toBe('Jan 1');
expect(formatDateRange('2023-01-01', undefined)).toBe('Jan 1');
});
it('should return only the end date if start date is missing', () => {
expect(formatDateRange(null, '2023-01-05')).toBe('Jan 5');
expect(formatDateRange(undefined, '2023-01-05')).toBe('Jan 5');
});
it('should return null if both dates are missing or invalid', () => {
expect(formatDateRange(null, null)).toBeNull();
expect(formatDateRange(undefined, undefined)).toBeNull();
expect(formatDateRange('invalid', 'invalid')).toBeNull();
});
it('should handle one valid and one invalid date by showing only the valid one', () => {
expect(formatDateRange('2023-01-01', 'invalid')).toBe('Jan 1');
expect(formatDateRange('invalid', '2023-01-05')).toBe('Jan 5');
});
describe('verbose mode', () => {
it('should format a range with two different valid dates verbosely', () => {
expect(formatDateRange('2023-01-01', '2023-01-05', { verbose: true })).toBe(
'Deals valid from January 1, 2023 to January 5, 2023',
);
});
it('should format a range with the same start and end date verbosely', () => {
expect(formatDateRange('2023-01-01', '2023-01-01', { verbose: true })).toBe(
'Valid on January 1, 2023',
);
});
it('should format only the start date verbosely', () => {
expect(formatDateRange('2023-01-01', null, { verbose: true })).toBe(
'Deals start January 1, 2023',
);
});
it('should format only the end date verbosely', () => {
expect(formatDateRange(null, '2023-01-05', { verbose: true })).toBe(
'Deals end January 5, 2023',
);
});
it('should handle one valid and one invalid date verbosely', () => {
expect(formatDateRange('2023-01-01', 'invalid', { verbose: true })).toBe(
'Deals start January 1, 2023',
);
});
});
});

View File

@@ -1,65 +0,0 @@
// src/features/flyer/dateUtils.ts
import { parseISO, format, isValid, differenceInDays } from 'date-fns';
export const formatShortDate = (dateString: string | null | undefined): string | null => {
if (!dateString) return null;
// Using `parseISO` from date-fns is more reliable than `new Date()` for YYYY-MM-DD strings.
// It correctly interprets the string as a local date, avoiding timezone-related "off-by-one" errors.
const date = parseISO(dateString);
if (isValid(date)) {
return format(date, 'MMM d');
}
return null;
};
export const calculateDaysBetween = (
startDate: string | Date | null | undefined,
endDate: string | Date | null | undefined,
): number | null => {
if (!startDate || !endDate) return null;
const start = typeof startDate === 'string' ? parseISO(startDate) : startDate;
const end = typeof endDate === 'string' ? parseISO(endDate) : endDate;
if (!isValid(start) || !isValid(end)) return null;
return differenceInDays(end, start);
};
interface DateRangeOptions {
verbose?: boolean;
}
export const formatDateRange = (
startDate: string | null | undefined,
endDate: string | null | undefined,
options?: DateRangeOptions,
): string | null => {
if (!options?.verbose) {
const start = formatShortDate(startDate);
const end = formatShortDate(endDate);
if (start && end) {
return start === end ? start : `${start} - ${end}`;
}
return start || end || null;
}
// Verbose format logic
const dateFormat = 'MMMM d, yyyy';
const formatFn = (dateStr: string | null | undefined) => {
if (!dateStr) return null;
const date = parseISO(dateStr);
return isValid(date) ? format(date, dateFormat) : null;
};
const start = formatFn(startDate);
const end = formatFn(endDate);
if (start && end) {
return start === end ? `Valid on ${start}` : `Deals valid from ${start} to ${end}`;
}
if (start) return `Deals start ${start}`;
if (end) return `Deals end ${end}`;
return null;
};

View File

@@ -15,8 +15,8 @@ describe('useFlyerItems Hook', () => {
const mockFlyer = createMockFlyer({
flyer_id: 123,
file_name: 'test-flyer.jpg',
image_url: 'http://example.com/test.jpg',
icon_url: 'http://example.com/icon.jpg',
image_url: 'https://example.com/test.jpg',
icon_url: 'https://example.com/icon.jpg',
checksum: 'abc',
valid_from: '2024-01-01',
valid_to: '2024-01-07',

View File

@@ -72,7 +72,7 @@ describe('useFlyers Hook and FlyersProvider', () => {
createMockFlyer({
flyer_id: 1,
file_name: 'flyer1.jpg',
image_url: 'http://example.com/flyer1.jpg',
image_url: 'https://example.com/flyer1.jpg',
item_count: 5,
created_at: '2024-01-01',
}),

View File

@@ -0,0 +1,51 @@
// src/hooks/useUserProfileData.ts
import { useState, useEffect } from 'react';
import * as apiClient from '../services/apiClient';
import { UserProfile, Achievement, UserAchievement } from '../types';
import { logger } from '../services/logger.client';
export const useUserProfileData = () => {
const [profile, setProfile] = useState<UserProfile | null>(null);
const [achievements, setAchievements] = useState<(UserAchievement & Achievement)[]>([]);
const [isLoading, setIsLoading] = useState(true);
const [error, setError] = useState<string | null>(null);
useEffect(() => {
const fetchData = async () => {
setIsLoading(true);
try {
const [profileRes, achievementsRes] = await Promise.all([
apiClient.getAuthenticatedUserProfile(),
apiClient.getUserAchievements(),
]);
if (!profileRes.ok) throw new Error('Failed to fetch user profile.');
if (!achievementsRes.ok) throw new Error('Failed to fetch user achievements.');
const profileData: UserProfile | null = await profileRes.json();
const achievementsData: (UserAchievement & Achievement)[] | null =
await achievementsRes.json();
logger.info(
{ profileData, achievementsCount: achievementsData?.length },
'useUserProfileData: Fetched data',
);
if (profileData) {
setProfile(profileData);
}
setAchievements(achievementsData || []);
} catch (err) {
const errorMessage = err instanceof Error ? err.message : 'An unknown error occurred.';
setError(errorMessage);
logger.error({ err }, 'Error in useUserProfileData:');
} finally {
setIsLoading(false);
}
};
fetchData();
}, []);
return { profile, setProfile, achievements, isLoading, error };
};

View File

@@ -79,7 +79,7 @@ describe('HomePage Component', () => {
describe('when a flyer is selected', () => {
const mockFlyer: Flyer = createMockFlyer({
flyer_id: 1,
image_url: 'http://example.com/flyer.jpg',
image_url: 'https://example.com/flyer.jpg',
});
it('should render FlyerDisplay but not data tables if there are no flyer items', () => {

View File

@@ -109,6 +109,33 @@ describe('ResetPasswordPage', () => {
);
});
it('should show an error message if API returns a non-JSON error response', async () => {
// Simulate a server error returning HTML instead of JSON
mockedApiClient.resetPassword.mockResolvedValue(
new Response('<h1>Server Error</h1>', {
status: 500,
headers: { 'Content-Type': 'text/html' },
}),
);
renderWithRouter('test-token');
fireEvent.change(screen.getByPlaceholderText('New Password'), {
target: { value: 'newSecurePassword123' },
});
fireEvent.change(screen.getByPlaceholderText('Confirm New Password'), {
target: { value: 'newSecurePassword123' },
});
fireEvent.click(screen.getByRole('button', { name: /reset password/i }));
await waitFor(() => {
// The error from response.json() is implementation-dependent.
// We check for a substring that is likely to be present.
expect(screen.getByText(/not valid JSON/i)).toBeInTheDocument();
});
expect(logger.error).toHaveBeenCalledWith({ err: expect.any(SyntaxError) }, 'Failed to reset password.');
});
it('should show a loading spinner while submitting', async () => {
let resolvePromise: (value: Response) => void;
const mockPromise = new Promise<Response>((resolve) => {

View File

@@ -26,7 +26,7 @@ const mockedApiClient = vi.mocked(apiClient);
const mockProfile: UserProfile = createMockUserProfile({
user: createMockUser({ user_id: 'user-123', email: 'test@example.com' }),
full_name: 'Test User',
avatar_url: 'http://example.com/avatar.jpg',
avatar_url: 'https://example.com/avatar.jpg',
points: 150,
role: 'user',
});
@@ -123,6 +123,24 @@ describe('UserProfilePage', () => {
});
});
it('should handle null achievements data gracefully on fetch', async () => {
mockedApiClient.getAuthenticatedUserProfile.mockResolvedValue(
new Response(JSON.stringify(mockProfile)),
);
// Mock a successful response but with a null body for achievements
mockedApiClient.getUserAchievements.mockResolvedValue(new Response(JSON.stringify(null)));
render(<UserProfilePage />);
await waitFor(() => {
expect(screen.getByRole('heading', { name: 'Test User' })).toBeInTheDocument();
// The mock achievements list should show 0 achievements because the component
// should handle the null response and pass an empty array to the list.
expect(screen.getByTestId('achievements-list-mock')).toHaveTextContent(
'Achievements Count: 0',
);
});
});
it('should render the profile and achievements on successful fetch', async () => {
mockedApiClient.getAuthenticatedUserProfile.mockResolvedValue(
new Response(JSON.stringify(mockProfile)),
@@ -294,6 +312,24 @@ describe('UserProfilePage', () => {
});
});
it('should handle non-ok response with null body when saving name', async () => {
// This tests the case where the server returns an error status but an empty/null body.
mockedApiClient.updateUserProfile.mockResolvedValue(new Response(null, { status: 500 }));
render(<UserProfilePage />);
await screen.findByText('Test User');
fireEvent.click(screen.getByRole('button', { name: /edit/i }));
fireEvent.change(screen.getByRole('textbox'), { target: { value: 'New Name' } });
fireEvent.click(screen.getByRole('button', { name: /save/i }));
await waitFor(() => {
// The component should fall back to the default error message.
expect(mockedNotificationService.notifyError).toHaveBeenCalledWith(
'Failed to update name.',
);
});
});
it('should handle unknown errors when saving name', async () => {
mockedApiClient.updateUserProfile.mockRejectedValue('Unknown update error');
render(<UserProfilePage />);
@@ -323,7 +359,7 @@ describe('UserProfilePage', () => {
});
it('should upload a new avatar and update the image source', async () => {
const updatedProfile = { ...mockProfile, avatar_url: 'http://example.com/new-avatar.png' };
const updatedProfile = { ...mockProfile, avatar_url: 'https://example.com/new-avatar.png' };
// Log when the mock is called
mockedApiClient.uploadAvatar.mockImplementation((file) => {
@@ -420,6 +456,22 @@ describe('UserProfilePage', () => {
});
});
it('should handle non-ok response with null body when uploading avatar', async () => {
mockedApiClient.uploadAvatar.mockResolvedValue(new Response(null, { status: 500 }));
render(<UserProfilePage />);
await screen.findByAltText('User Avatar');
const fileInput = screen.getByTestId('avatar-file-input');
const file = new File(['(⌐□_□)'], 'chucknorris.png', { type: 'image/png' });
fireEvent.change(fileInput, { target: { files: [file] } });
await waitFor(() => {
expect(mockedNotificationService.notifyError).toHaveBeenCalledWith(
'Failed to upload avatar.',
);
});
});
it('should handle unknown errors when uploading avatar', async () => {
mockedApiClient.uploadAvatar.mockRejectedValue('Unknown upload error');
render(<UserProfilePage />);

View File

@@ -1,15 +1,13 @@
import React, { useState, useEffect, useRef } from 'react';
import * as apiClient from '../services/apiClient';
import { UserProfile, Achievement, UserAchievement } from '../types';
import type { UserProfile } from '../types';
import { logger } from '../services/logger.client';
import { notifySuccess, notifyError } from '../services/notificationService';
import { AchievementsList } from '../components/AchievementsList';
import { useUserProfileData } from '../hooks/useUserProfileData';
const UserProfilePage: React.FC = () => {
const [profile, setProfile] = useState<UserProfile | null>(null);
const [achievements, setAchievements] = useState<(UserAchievement & Achievement)[]>([]);
const [isLoading, setIsLoading] = useState(true);
const [error, setError] = useState<string | null>(null);
const { profile, setProfile, achievements, isLoading, error } = useUserProfileData();
const [isEditingName, setIsEditingName] = useState(false);
const [editingName, setEditingName] = useState('');
const [isUploading, setIsUploading] = useState(false);
@@ -17,43 +15,10 @@ const UserProfilePage: React.FC = () => {
const fileInputRef = useRef<HTMLInputElement>(null);
useEffect(() => {
const fetchData = async () => {
setIsLoading(true);
try {
// Fetch profile and achievements data in parallel
const [profileRes, achievementsRes] = await Promise.all([
apiClient.getAuthenticatedUserProfile(),
apiClient.getUserAchievements(),
]);
if (!profileRes.ok) throw new Error('Failed to fetch user profile.');
if (!achievementsRes.ok) throw new Error('Failed to fetch user achievements.');
const profileData: UserProfile = await profileRes.json();
const achievementsData: (UserAchievement & Achievement)[] = await achievementsRes.json();
logger.info(
{ profileData, achievementsCount: achievementsData?.length },
'UserProfilePage: Fetched data',
);
setProfile(profileData);
if (profileData) {
setEditingName(profileData.full_name || '');
}
setAchievements(achievementsData);
} catch (err) {
const errorMessage = err instanceof Error ? err.message : 'An unknown error occurred.';
setError(errorMessage);
logger.error({ err }, 'Error fetching user profile data:');
} finally {
setIsLoading(false);
}
};
fetchData();
}, []); // Empty dependency array means this runs once on component mount
if (profile) {
setEditingName(profile.full_name || '');
}
}, [profile]);
const handleSaveName = async () => {
if (!profile) return;
@@ -61,8 +26,8 @@ const UserProfilePage: React.FC = () => {
try {
const response = await apiClient.updateUserProfile({ full_name: editingName });
if (!response.ok) {
const errorData = await response.json();
throw new Error(errorData.message || 'Failed to update name.');
const errorData = await response.json().catch(() => null); // Gracefully handle non-JSON responses
throw new Error(errorData?.message || 'Failed to update name.');
}
const updatedProfile = await response.json();
setProfile((prevProfile) => (prevProfile ? { ...prevProfile, ...updatedProfile } : null));
@@ -88,8 +53,8 @@ const UserProfilePage: React.FC = () => {
try {
const response = await apiClient.uploadAvatar(file);
if (!response.ok) {
const errorData = await response.json();
throw new Error(errorData.message || 'Failed to upload avatar.');
const errorData = await response.json().catch(() => null); // Gracefully handle non-JSON responses
throw new Error(errorData?.message || 'Failed to upload avatar.');
}
const updatedProfile = await response.json();
setProfile((prevProfile) => (prevProfile ? { ...prevProfile, ...updatedProfile } : null));

View File

@@ -30,7 +30,7 @@ const mockLogs: ActivityLogItem[] = [
user_id: 'user-123',
action: 'flyer_processed',
display_text: 'Processed a new flyer for Walmart.',
user_avatar_url: 'http://example.com/avatar.png',
user_avatar_url: 'https://example.com/avatar.png',
user_full_name: 'Test User',
details: { flyer_id: 1, store_name: 'Walmart' },
}),
@@ -63,7 +63,7 @@ const mockLogs: ActivityLogItem[] = [
action: 'recipe_favorited',
display_text: 'User favorited a recipe',
user_full_name: 'Pizza Lover',
user_avatar_url: 'http://example.com/pizza.png',
user_avatar_url: 'https://example.com/pizza.png',
details: { recipe_name: 'Best Pizza' },
}),
createMockActivityLogItem({
@@ -136,7 +136,7 @@ describe('ActivityLog', () => {
// Check for avatar
const avatar = screen.getByAltText('Test User');
expect(avatar).toBeInTheDocument();
expect(avatar).toHaveAttribute('src', 'http://example.com/avatar.png');
expect(avatar).toHaveAttribute('src', 'https://example.com/avatar.png');
// Check for fallback avatar (Newbie User has no avatar)
// The fallback is an SVG inside a span. We can check for the span's class or the SVG.

View File

@@ -59,14 +59,14 @@ describe('FlyerReviewPage', () => {
file_name: 'flyer1.jpg',
created_at: '2023-01-01T00:00:00Z',
store: { name: 'Store A' },
icon_url: 'http://example.com/icon1.jpg',
icon_url: 'https://example.com/icon1.jpg',
},
{
flyer_id: 2,
file_name: 'flyer2.jpg',
created_at: '2023-01-02T00:00:00Z',
store: { name: 'Store B' },
icon_url: 'http://example.com/icon2.jpg',
icon_url: 'https://example.com/icon2.jpg',
},
{
flyer_id: 3,

View File

@@ -19,7 +19,7 @@ const mockBrands = [
brand_id: 2,
name: 'Compliments',
store_name: 'Sobeys',
logo_url: 'http://example.com/compliments.png',
logo_url: 'https://example.com/compliments.png',
}),
];
@@ -92,7 +92,7 @@ describe('AdminBrandManager', () => {
);
mockedApiClient.uploadBrandLogo.mockImplementation(
async () =>
new Response(JSON.stringify({ logoUrl: 'http://example.com/new-logo.png' }), {
new Response(JSON.stringify({ logoUrl: 'https://example.com/new-logo.png' }), {
status: 200,
}),
);
@@ -120,7 +120,7 @@ describe('AdminBrandManager', () => {
// Check if the UI updates with the new logo
expect(screen.getByAltText('No Frills logo')).toHaveAttribute(
'src',
'http://example.com/new-logo.png',
'https://example.com/new-logo.png',
);
console.log('TEST SUCCESS: All assertions for successful upload passed.');
});
@@ -350,7 +350,7 @@ describe('AdminBrandManager', () => {
// Brand 2 should still have original logo
expect(screen.getByAltText('Compliments logo')).toHaveAttribute(
'src',
'http://example.com/compliments.png',
'https://example.com/compliments.png',
);
});
});

View File

@@ -35,7 +35,7 @@ const authenticatedUser = createMockUser({ user_id: 'auth-user-123', email: 'tes
const mockAddressId = 123;
const authenticatedProfile = createMockUserProfile({
full_name: 'Test User',
avatar_url: 'http://example.com/avatar.png',
avatar_url: 'https://example.com/avatar.png',
role: 'user',
points: 100,
preferences: {
@@ -264,6 +264,7 @@ describe('ProfileManager', () => {
});
it('should show an error if trying to save profile when not logged in', async () => {
const loggerSpy = vi.spyOn(logger.logger, 'warn');
// This is an edge case, but good to test the safeguard
render(<ProfileManager {...defaultAuthenticatedProps} userProfile={null} />);
fireEvent.change(screen.getByLabelText(/full name/i), { target: { value: 'Updated Name' } });
@@ -271,6 +272,7 @@ describe('ProfileManager', () => {
await waitFor(() => {
expect(notifyError).toHaveBeenCalledWith('Cannot save profile, no user is logged in.');
expect(loggerSpy).toHaveBeenCalledWith('[handleProfileSave] Aborted: No user is logged in.');
});
expect(mockedApiClient.updateUserProfile).not.toHaveBeenCalled();
});
@@ -496,6 +498,23 @@ describe('ProfileManager', () => {
});
});
it('should show an error when trying to link a GitHub account', async () => {
render(<ProfileManager {...defaultAuthenticatedProps} />);
fireEvent.click(screen.getByRole('button', { name: /security/i }));
await waitFor(() => {
expect(screen.getByRole('button', { name: /link github account/i })).toBeInTheDocument();
});
fireEvent.click(screen.getByRole('button', { name: /link github account/i }));
await waitFor(() => {
expect(notifyError).toHaveBeenCalledWith(
'Account linking with github is not yet implemented.',
);
});
});
it('should switch between all tabs correctly', async () => {
render(<ProfileManager {...defaultAuthenticatedProps} />);
@@ -804,6 +823,63 @@ describe('ProfileManager', () => {
});
});
it('should allow changing unit system when preferences are initially null', async () => {
const profileWithoutPrefs = { ...authenticatedProfile, preferences: null as any };
const { rerender } = render(
<ProfileManager {...defaultAuthenticatedProps} userProfile={profileWithoutPrefs} />,
);
fireEvent.click(screen.getByRole('button', { name: /preferences/i }));
const imperialRadio = await screen.findByLabelText(/imperial/i);
const metricRadio = screen.getByLabelText(/metric/i);
// With null preferences, neither should be checked.
expect(imperialRadio).not.toBeChecked();
expect(metricRadio).not.toBeChecked();
// Mock the API response for the update
const updatedProfileWithPrefs = {
...profileWithoutPrefs,
preferences: { darkMode: false, unitSystem: 'metric' as const },
};
mockedApiClient.updateUserPreferences.mockResolvedValue({
ok: true,
json: () => Promise.resolve(updatedProfileWithPrefs),
} as Response);
fireEvent.click(metricRadio);
await waitFor(() => {
expect(mockedApiClient.updateUserPreferences).toHaveBeenCalledWith(
{ unitSystem: 'metric' },
expect.anything(),
);
expect(mockOnProfileUpdate).toHaveBeenCalledWith(updatedProfileWithPrefs);
});
// Rerender with the new profile to check the UI update
rerender(
<ProfileManager {...defaultAuthenticatedProps} userProfile={updatedProfileWithPrefs} />,
);
fireEvent.click(screen.getByRole('button', { name: /preferences/i }));
expect(await screen.findByLabelText(/metric/i)).toBeChecked();
expect(screen.getByLabelText(/imperial/i)).not.toBeChecked();
});
it('should not call onProfileUpdate if updating unit system fails', async () => {
mockedApiClient.updateUserPreferences.mockRejectedValue(new Error('API failed'));
render(<ProfileManager {...defaultAuthenticatedProps} />);
fireEvent.click(screen.getByRole('button', { name: /preferences/i }));
const metricRadio = await screen.findByLabelText(/metric/i);
fireEvent.click(metricRadio);
await waitFor(() => {
expect(notifyError).toHaveBeenCalledWith('API failed');
});
expect(mockOnProfileUpdate).not.toHaveBeenCalled();
});
it('should only call updateProfile when only profile data has changed', async () => {
render(<ProfileManager {...defaultAuthenticatedProps} />);
await waitFor(() =>
@@ -1004,5 +1080,19 @@ describe('ProfileManager', () => {
expect(notifyError).toHaveBeenCalledWith('Permission denied');
});
});
it('should not trigger OAuth link if user profile is missing', async () => {
// This is an edge case to test the guard clause in handleOAuthLink
render(<ProfileManager {...defaultAuthenticatedProps} userProfile={null} />);
fireEvent.click(screen.getByRole('button', { name: /security/i }));
const linkButton = await screen.findByRole('button', { name: /link google account/i });
fireEvent.click(linkButton);
// The function should just return, so nothing should happen.
await waitFor(() => {
expect(notifyError).not.toHaveBeenCalled();
});
});
});
});

View File

@@ -250,6 +250,17 @@ describe('Admin Content Management Routes (/api/admin)', () => {
expect(response.status).toBe(404);
expect(response.body.message).toBe('Correction with ID 999 not found');
});
it('PUT /corrections/:id should return 500 on a generic DB error', async () => {
vi.mocked(mockedDb.adminRepo.updateSuggestedCorrection).mockRejectedValue(
new Error('Generic DB Error'),
);
const response = await supertest(app)
.put('/api/admin/corrections/101')
.send({ suggested_value: 'new value' });
expect(response.status).toBe(500);
expect(response.body.message).toBe('Generic DB Error');
});
});
describe('Flyer Review Routes', () => {
@@ -294,6 +305,13 @@ describe('Admin Content Management Routes (/api/admin)', () => {
expect(response.body).toEqual(mockBrands);
});
it('GET /brands should return 500 on DB error', async () => {
vi.mocked(mockedDb.flyerRepo.getAllBrands).mockRejectedValue(new Error('DB Error'));
const response = await supertest(app).get('/api/admin/brands');
expect(response.status).toBe(500);
expect(response.body.message).toBe('DB Error');
});
it('POST /brands/:id/logo should upload a logo and update the brand', async () => {
const brandId = 55;
vi.mocked(mockedDb.adminRepo.updateBrandLogo).mockResolvedValue(undefined);
@@ -500,6 +518,16 @@ describe('Admin Content Management Routes (/api/admin)', () => {
expect(response.body.message).toBe('Flyer with ID 999 not found.');
});
it('DELETE /flyers/:flyerId should return 500 on a generic DB error', async () => {
const flyerId = 42;
vi.mocked(mockedDb.flyerRepo.deleteFlyer).mockRejectedValue(
new Error('Generic DB Error'),
);
const response = await supertest(app).delete(`/api/admin/flyers/${flyerId}`);
expect(response.status).toBe(500);
expect(response.body.message).toBe('Generic DB Error');
});
it('DELETE /flyers/:flyerId should return 400 for an invalid flyerId', async () => {
const response = await supertest(app).delete('/api/admin/flyers/abc');
expect(response.status).toBe(400);

View File

@@ -54,6 +54,14 @@ vi.mock('../services/workers.server', () => ({
weeklyAnalyticsWorker: { name: 'weekly-analytics-reporting', isRunning: vi.fn() },
}));
// Mock the monitoring service directly to test route error handling
vi.mock('../services/monitoringService.server', () => ({
monitoringService: {
getWorkerStatuses: vi.fn(),
getQueueStatuses: vi.fn(),
},
}));
// Mock other dependencies that are part of the adminRouter setup but not directly tested here
vi.mock('../services/db/flyer.db');
vi.mock('../services/db/recipe.db');
@@ -78,11 +86,8 @@ vi.mock('@bull-board/express', () => ({
import adminRouter from './admin.routes';
// Import the mocked modules to control them
import * as queueService from '../services/queueService.server';
import * as workerService from '../services/workers.server';
import { monitoringService } from '../services/monitoringService.server';
import { adminRepo } from '../services/db/index.db';
const mockedQueueService = queueService as Mocked<typeof queueService>;
const mockedWorkerService = workerService as Mocked<typeof workerService>;
// Mock the logger
vi.mock('../services/logger.server', () => ({
@@ -146,16 +151,26 @@ describe('Admin Monitoring Routes (/api/admin)', () => {
expect(response.body.errors).toBeDefined();
expect(response.body.errors.length).toBe(2); // Both limit and offset are invalid
});
it('should return 500 if fetching activity log fails', async () => {
vi.mocked(adminRepo.getActivityLog).mockRejectedValue(new Error('DB Error'));
const response = await supertest(app).get('/api/admin/activity-log');
expect(response.status).toBe(500);
expect(response.body.message).toBe('DB Error');
});
});
describe('GET /workers/status', () => {
it('should return the status of all registered workers', async () => {
// Arrange: Set the mock status for each worker
vi.mocked(mockedWorkerService.flyerWorker.isRunning).mockReturnValue(true);
vi.mocked(mockedWorkerService.emailWorker.isRunning).mockReturnValue(true);
vi.mocked(mockedWorkerService.analyticsWorker.isRunning).mockReturnValue(false); // Simulate one worker being stopped
vi.mocked(mockedWorkerService.cleanupWorker.isRunning).mockReturnValue(true);
vi.mocked(mockedWorkerService.weeklyAnalyticsWorker.isRunning).mockReturnValue(true);
const mockStatuses = [
{ name: 'flyer-processing', isRunning: true },
{ name: 'email-sending', isRunning: true },
{ name: 'analytics-reporting', isRunning: false },
{ name: 'file-cleanup', isRunning: true },
{ name: 'weekly-analytics-reporting', isRunning: true },
];
vi.mocked(monitoringService.getWorkerStatuses).mockResolvedValue(mockStatuses);
// Act
const response = await supertest(app).get('/api/admin/workers/status');
@@ -170,51 +185,41 @@ describe('Admin Monitoring Routes (/api/admin)', () => {
{ name: 'weekly-analytics-reporting', isRunning: true },
]);
});
it('should return 500 if fetching worker statuses fails', async () => {
vi.mocked(monitoringService.getWorkerStatuses).mockRejectedValue(new Error('Worker Error'));
const response = await supertest(app).get('/api/admin/workers/status');
expect(response.status).toBe(500);
expect(response.body.message).toBe('Worker Error');
});
});
describe('GET /queues/status', () => {
it('should return job counts for all registered queues', async () => {
// Arrange: Set the mock job counts for each queue
vi.mocked(mockedQueueService.flyerQueue.getJobCounts).mockResolvedValue({
waiting: 5,
active: 1,
completed: 100,
failed: 2,
delayed: 0,
paused: 0,
});
vi.mocked(mockedQueueService.emailQueue.getJobCounts).mockResolvedValue({
waiting: 0,
active: 0,
completed: 50,
failed: 0,
delayed: 0,
paused: 0,
});
vi.mocked(mockedQueueService.analyticsQueue.getJobCounts).mockResolvedValue({
waiting: 0,
active: 1,
completed: 10,
failed: 1,
delayed: 0,
paused: 0,
});
vi.mocked(mockedQueueService.cleanupQueue.getJobCounts).mockResolvedValue({
waiting: 2,
active: 0,
completed: 25,
failed: 0,
delayed: 0,
paused: 0,
});
vi.mocked(mockedQueueService.weeklyAnalyticsQueue.getJobCounts).mockResolvedValue({
waiting: 1,
active: 0,
completed: 5,
failed: 0,
delayed: 0,
paused: 0,
});
const mockStatuses = [
{
name: 'flyer-processing',
counts: { waiting: 5, active: 1, completed: 100, failed: 2, delayed: 0, paused: 0 },
},
{
name: 'email-sending',
counts: { waiting: 0, active: 0, completed: 50, failed: 0, delayed: 0, paused: 0 },
},
{
name: 'analytics-reporting',
counts: { waiting: 0, active: 1, completed: 10, failed: 1, delayed: 0, paused: 0 },
},
{
name: 'file-cleanup',
counts: { waiting: 2, active: 0, completed: 25, failed: 0, delayed: 0, paused: 0 },
},
{
name: 'weekly-analytics-reporting',
counts: { waiting: 1, active: 0, completed: 5, failed: 0, delayed: 0, paused: 0 },
},
];
vi.mocked(monitoringService.getQueueStatuses).mockResolvedValue(mockStatuses);
// Act
const response = await supertest(app).get('/api/admin/queues/status');
@@ -246,7 +251,7 @@ describe('Admin Monitoring Routes (/api/admin)', () => {
});
it('should return 500 if fetching queue counts fails', async () => {
vi.mocked(mockedQueueService.flyerQueue.getJobCounts).mockRejectedValue(
vi.mocked(monitoringService.getQueueStatuses).mockRejectedValue(
new Error('Redis is down'),
);

View File

@@ -0,0 +1,113 @@
import { describe, it, expect, vi, beforeEach } from 'vitest';
import supertest from 'supertest';
import { createTestApp } from '../tests/utils/createTestApp';
import { createMockUserProfile } from '../tests/utils/mockFactories';
// Mock dependencies required by admin.routes.ts
vi.mock('../services/db/index.db', () => ({
adminRepo: {},
flyerRepo: {},
recipeRepo: {},
userRepo: {},
personalizationRepo: {},
notificationRepo: {},
}));
vi.mock('../services/backgroundJobService', () => ({
backgroundJobService: {
runDailyDealCheck: vi.fn(),
triggerAnalyticsReport: vi.fn(),
triggerWeeklyAnalyticsReport: vi.fn(),
},
}));
vi.mock('../services/queueService.server', () => ({
flyerQueue: { add: vi.fn(), getJob: vi.fn() },
emailQueue: { add: vi.fn(), getJob: vi.fn() },
analyticsQueue: { add: vi.fn(), getJob: vi.fn() },
cleanupQueue: { add: vi.fn(), getJob: vi.fn() },
weeklyAnalyticsQueue: { add: vi.fn(), getJob: vi.fn() },
}));
vi.mock('../services/geocodingService.server', () => ({
geocodingService: { clearGeocodeCache: vi.fn() },
}));
vi.mock('../services/logger.server', async () => ({
logger: (await import('../tests/utils/mockLogger')).mockLogger,
}));
vi.mock('@bull-board/api');
vi.mock('@bull-board/api/bullMQAdapter');
vi.mock('@bull-board/express', () => ({
ExpressAdapter: class {
setBasePath() {}
getRouter() { return (req: any, res: any, next: any) => next(); }
},
}));
vi.mock('node:fs/promises');
// Mock Passport to allow admin access
vi.mock('./passport.routes', () => ({
default: {
authenticate: vi.fn(() => (req: any, res: any, next: any) => {
req.user = createMockUserProfile({ role: 'admin' });
next();
}),
},
isAdmin: (req: any, res: any, next: any) => next(),
}));
import adminRouter from './admin.routes';
describe('Admin Routes Rate Limiting', () => {
const app = createTestApp({ router: adminRouter, basePath: '/api/admin' });
beforeEach(() => {
vi.clearAllMocks();
});
describe('Trigger Rate Limiting', () => {
it('should block requests to /trigger/daily-deal-check after exceeding limit', async () => {
const limit = 30; // Matches adminTriggerLimiter config
// Make requests up to the limit
for (let i = 0; i < limit; i++) {
await supertest(app)
.post('/api/admin/trigger/daily-deal-check')
.set('X-Test-Rate-Limit-Enable', 'true');
}
// The next request should be blocked
const response = await supertest(app)
.post('/api/admin/trigger/daily-deal-check')
.set('X-Test-Rate-Limit-Enable', 'true');
expect(response.status).toBe(429);
expect(response.text).toContain('Too many administrative triggers');
});
});
describe('Upload Rate Limiting', () => {
it('should block requests to /brands/:id/logo after exceeding limit', async () => {
const limit = 20; // Matches adminUploadLimiter config
const brandId = 1;
// Make requests up to the limit
// Note: We don't need to attach a file to test the rate limiter, as it runs before multer
for (let i = 0; i < limit; i++) {
await supertest(app)
.post(`/api/admin/brands/${brandId}/logo`)
.set('X-Test-Rate-Limit-Enable', 'true');
}
const response = await supertest(app)
.post(`/api/admin/brands/${brandId}/logo`)
.set('X-Test-Rate-Limit-Enable', 'true');
expect(response.status).toBe(429);
expect(response.text).toContain('Too many file uploads');
});
});
});

View File

@@ -35,6 +35,7 @@ import { monitoringService } from '../services/monitoringService.server';
import { userService } from '../services/userService';
import { cleanupUploadedFile } from '../utils/fileUtils';
import { brandService } from '../services/brandService';
import { adminTriggerLimiter, adminUploadLimiter } from '../config/rateLimiters';
const updateCorrectionSchema = numericIdParam('id').extend({
body: z.object({
@@ -242,6 +243,7 @@ router.put(
router.post(
'/brands/:id/logo',
adminUploadLimiter,
validateRequest(numericIdParam('id')),
brandLogoUpload.single('logoImage'),
requireFileUpload('logoImage'),
@@ -421,6 +423,7 @@ router.delete(
*/
router.post(
'/trigger/daily-deal-check',
adminTriggerLimiter,
validateRequest(emptySchema),
async (req: Request, res: Response, next: NextFunction) => {
const userProfile = req.user as UserProfile;
@@ -449,6 +452,7 @@ router.post(
*/
router.post(
'/trigger/analytics-report',
adminTriggerLimiter,
validateRequest(emptySchema),
async (req: Request, res: Response, next: NextFunction) => {
const userProfile = req.user as UserProfile;
@@ -474,6 +478,7 @@ router.post(
*/
router.post(
'/flyers/:flyerId/cleanup',
adminTriggerLimiter,
validateRequest(numericIdParam('flyerId')),
async (req: Request, res: Response, next: NextFunction) => {
const userProfile = req.user as UserProfile;
@@ -502,6 +507,7 @@ router.post(
*/
router.post(
'/trigger/failing-job',
adminTriggerLimiter,
validateRequest(emptySchema),
async (req: Request, res: Response, next: NextFunction) => {
const userProfile = req.user as UserProfile;
@@ -528,6 +534,7 @@ router.post(
*/
router.post(
'/system/clear-geocode-cache',
adminTriggerLimiter,
validateRequest(emptySchema),
async (req: Request, res: Response, next: NextFunction) => {
const userProfile = req.user as UserProfile;
@@ -580,6 +587,7 @@ router.get('/queues/status', validateRequest(emptySchema), async (req: Request,
*/
router.post(
'/jobs/:queueName/:jobId/retry',
adminTriggerLimiter,
validateRequest(jobRetrySchema),
async (req: Request, res: Response, next: NextFunction) => {
const userProfile = req.user as UserProfile;
@@ -606,6 +614,7 @@ router.post(
*/
router.post(
'/trigger/weekly-analytics',
adminTriggerLimiter,
validateRequest(emptySchema),
async (req: Request, res: Response, next: NextFunction) => {
const userProfile = req.user as UserProfile; // This was a duplicate, fixed.

View File

@@ -318,6 +318,76 @@ describe('AI Routes (/api/ai)', () => {
// because URL parameters cannot easily simulate empty strings for min(1) validation checks via supertest routing.
});
describe('POST /upload-legacy', () => {
const imagePath = path.resolve(__dirname, '../tests/assets/test-flyer-image.jpg');
const mockUser = createMockUserProfile({
user: { user_id: 'legacy-user-1', email: 'legacy-user@test.com' },
});
// This route requires authentication, so we create an app instance with a user.
const authenticatedApp = createTestApp({
router: aiRouter,
basePath: '/api/ai',
authenticatedUser: mockUser,
});
it('should process a legacy flyer and return 200 on success', async () => {
// Arrange
const mockFlyer = createMockFlyer({ flyer_id: 10 });
vi.mocked(aiService.aiService.processLegacyFlyerUpload).mockResolvedValue(mockFlyer);
// Act
const response = await supertest(authenticatedApp)
.post('/api/ai/upload-legacy')
.field('some_legacy_field', 'value') // simulate some body data
.attach('flyerFile', imagePath);
// Assert
expect(response.status).toBe(200);
expect(response.body).toEqual(mockFlyer);
expect(aiService.aiService.processLegacyFlyerUpload).toHaveBeenCalledWith(
expect.any(Object), // req.file
expect.any(Object), // req.body
mockUser,
expect.any(Object), // req.log
);
});
it('should return 400 if no flyer file is uploaded', async () => {
const response = await supertest(authenticatedApp)
.post('/api/ai/upload-legacy')
.field('some_legacy_field', 'value');
expect(response.status).toBe(400);
expect(response.body.message).toBe('No flyer file uploaded.');
});
it('should return 409 and cleanup file if a duplicate flyer is detected', async () => {
const duplicateError = new aiService.DuplicateFlyerError('Duplicate legacy flyer.', 101);
vi.mocked(aiService.aiService.processLegacyFlyerUpload).mockRejectedValue(duplicateError);
const unlinkSpy = vi.spyOn(fs.promises, 'unlink').mockResolvedValue(undefined);
const response = await supertest(authenticatedApp).post('/api/ai/upload-legacy').attach('flyerFile', imagePath);
expect(response.status).toBe(409);
expect(response.body.message).toBe('Duplicate legacy flyer.');
expect(response.body.flyerId).toBe(101);
expect(unlinkSpy).toHaveBeenCalledTimes(1);
unlinkSpy.mockRestore();
});
it('should return 500 and cleanup file on a generic service error', async () => {
vi.mocked(aiService.aiService.processLegacyFlyerUpload).mockRejectedValue(new Error('Internal service failure'));
const unlinkSpy = vi.spyOn(fs.promises, 'unlink').mockResolvedValue(undefined);
const response = await supertest(authenticatedApp).post('/api/ai/upload-legacy').attach('flyerFile', imagePath);
expect(response.status).toBe(500);
expect(response.body.message).toBe('Internal service failure');
expect(unlinkSpy).toHaveBeenCalledTimes(1);
unlinkSpy.mockRestore();
});
});
describe('POST /flyers/process (Legacy)', () => {
const imagePath = path.resolve(__dirname, '../tests/assets/test-flyer-image.jpg');
const mockDataPayload = {

View File

@@ -14,6 +14,7 @@ import { validateRequest } from '../middleware/validation.middleware';
import { requiredString } from '../utils/zodUtils';
import { cleanupUploadedFile, cleanupUploadedFiles } from '../utils/fileUtils';
import { monitoringService } from '../services/monitoringService.server';
import { aiUploadLimiter, aiGenerationLimiter } from '../config/rateLimiters';
const router = Router();
@@ -27,6 +28,7 @@ const uploadAndProcessSchema = z.object({
.length(64, 'Checksum must be 64 characters long.')
.regex(/^[a-f0-9]+$/, 'Checksum must be a valid hexadecimal string.'),
),
baseUrl: z.string().url().optional(),
}),
});
@@ -165,6 +167,7 @@ router.use((req: Request, res: Response, next: NextFunction) => {
*/
router.post(
'/upload-and-process',
aiUploadLimiter,
optionalAuth,
uploadToDisk.single('flyerFile'),
// Validation is now handled inside the route to ensure file cleanup on failure.
@@ -196,6 +199,7 @@ router.post(
userProfile,
req.ip ?? 'unknown',
req.log,
body.baseUrl,
);
// Respond immediately to the client with 202 Accepted
@@ -221,6 +225,7 @@ router.post(
*/
router.post(
'/upload-legacy',
aiUploadLimiter,
passport.authenticate('jwt', { session: false }),
uploadToDisk.single('flyerFile'),
async (req: Request, res: Response, next: NextFunction) => {
@@ -271,6 +276,7 @@ router.get(
*/
router.post(
'/flyers/process',
aiUploadLimiter,
optionalAuth,
uploadToDisk.single('flyerImage'),
async (req, res, next: NextFunction) => {
@@ -306,6 +312,7 @@ router.post(
*/
router.post(
'/check-flyer',
aiUploadLimiter,
optionalAuth,
uploadToDisk.single('image'),
async (req, res, next: NextFunction) => {
@@ -325,6 +332,7 @@ router.post(
router.post(
'/extract-address',
aiUploadLimiter,
optionalAuth,
uploadToDisk.single('image'),
async (req, res, next: NextFunction) => {
@@ -344,6 +352,7 @@ router.post(
router.post(
'/extract-logo',
aiUploadLimiter,
optionalAuth,
uploadToDisk.array('images'),
async (req, res, next: NextFunction) => {
@@ -363,6 +372,7 @@ router.post(
router.post(
'/quick-insights',
aiGenerationLimiter,
passport.authenticate('jwt', { session: false }),
validateRequest(insightsSchema),
async (req, res, next: NextFunction) => {
@@ -379,6 +389,7 @@ router.post(
router.post(
'/deep-dive',
aiGenerationLimiter,
passport.authenticate('jwt', { session: false }),
validateRequest(insightsSchema),
async (req, res, next: NextFunction) => {
@@ -395,6 +406,7 @@ router.post(
router.post(
'/search-web',
aiGenerationLimiter,
passport.authenticate('jwt', { session: false }),
validateRequest(searchWebSchema),
async (req, res, next: NextFunction) => {
@@ -409,6 +421,7 @@ router.post(
router.post(
'/compare-prices',
aiGenerationLimiter,
passport.authenticate('jwt', { session: false }),
validateRequest(comparePricesSchema),
async (req, res, next: NextFunction) => {
@@ -427,6 +440,7 @@ router.post(
router.post(
'/plan-trip',
aiGenerationLimiter,
passport.authenticate('jwt', { session: false }),
validateRequest(planTripSchema),
async (req, res, next: NextFunction) => {
@@ -446,6 +460,7 @@ router.post(
router.post(
'/generate-image',
aiGenerationLimiter,
passport.authenticate('jwt', { session: false }),
validateRequest(generateImageSchema),
(req: Request, res: Response) => {
@@ -458,6 +473,7 @@ router.post(
router.post(
'/generate-speech',
aiGenerationLimiter,
passport.authenticate('jwt', { session: false }),
validateRequest(generateSpeechSchema),
(req: Request, res: Response) => {
@@ -474,6 +490,7 @@ router.post(
*/
router.post(
'/rescan-area',
aiUploadLimiter,
passport.authenticate('jwt', { session: false }),
uploadToDisk.single('image'),
validateRequest(rescanAreaSchema),

View File

@@ -197,6 +197,33 @@ describe('Auth Routes (/api/auth)', () => {
);
});
it('should allow registration with an empty string for full_name', async () => {
// Arrange
const email = 'empty-name@test.com';
mockedAuthService.registerAndLoginUser.mockResolvedValue({
newUserProfile: createMockUserProfile({ user: { email } }),
accessToken: 'token',
refreshToken: 'token',
});
// Act
const response = await supertest(app).post('/api/auth/register').send({
email,
password: strongPassword,
full_name: '', // Send an empty string
});
// Assert
expect(response.status).toBe(201);
expect(mockedAuthService.registerAndLoginUser).toHaveBeenCalledWith(
email,
strongPassword,
undefined, // The preprocess step in the Zod schema should convert '' to undefined
undefined,
mockLogger,
);
});
it('should set a refresh token cookie on successful registration', async () => {
const mockNewUser = createMockUserProfile({
user: { user_id: 'new-user-id', email: 'cookie@test.com' },
@@ -396,6 +423,24 @@ describe('Auth Routes (/api/auth)', () => {
const setCookieHeader = response.headers['set-cookie'];
expect(setCookieHeader[0]).toContain('Max-Age=2592000'); // 30 days in seconds
});
it('should return 400 for an invalid email format', async () => {
const response = await supertest(app)
.post('/api/auth/login')
.send({ email: 'not-an-email', password: 'password123' });
expect(response.status).toBe(400);
expect(response.body.errors[0].message).toBe('A valid email is required.');
});
it('should return 400 if password is missing', async () => {
const response = await supertest(app)
.post('/api/auth/login')
.send({ email: 'test@test.com' });
expect(response.status).toBe(400);
expect(response.body.errors[0].message).toBe('Password is required.');
});
});
describe('POST /forgot-password', () => {
@@ -550,12 +595,15 @@ describe('Auth Routes (/api/auth)', () => {
expect(setCookieHeader[0]).toContain('Max-Age=0');
});
it('should still return 200 OK even if deleting the refresh token from DB fails', async () => {
it('should still return 200 OK and log an error if deleting the refresh token from DB fails', async () => {
// Arrange
const dbError = new Error('DB connection lost');
mockedAuthService.logout.mockRejectedValue(dbError);
const { logger } = await import('../services/logger.server');
// Spy on logger.error to ensure it's called
const errorSpy = vi.spyOn(logger, 'error');
// Act
const response = await supertest(app)
.post('/api/auth/logout')
@@ -563,7 +611,12 @@ describe('Auth Routes (/api/auth)', () => {
// Assert
expect(response.status).toBe(200);
expect(logger.error).toHaveBeenCalledWith(
// Because authService.logout is fire-and-forget (not awaited), we need to
// give the event loop a moment to process the rejected promise and trigger the .catch() block.
await new Promise((resolve) => setImmediate(resolve));
expect(errorSpy).toHaveBeenCalledWith(
expect.objectContaining({ error: dbError }),
'Logout token invalidation failed in background.',
);
@@ -578,4 +631,280 @@ describe('Auth Routes (/api/auth)', () => {
expect(response.headers['set-cookie'][0]).toContain('refreshToken=;');
});
});
describe('Rate Limiting on /forgot-password', () => {
it('should block requests after exceeding the limit when the opt-in header is sent', async () => {
// Arrange
const email = 'rate-limit-test@example.com';
const maxRequests = 5; // from the rate limiter config
mockedAuthService.resetPassword.mockResolvedValue('mock-token');
// Act: Make `maxRequests` successful calls with the special header
for (let i = 0; i < maxRequests; i++) {
const response = await supertest(app)
.post('/api/auth/forgot-password')
.set('X-Test-Rate-Limit-Enable', 'true') // Opt-in to the rate limiter for this test
.send({ email });
expect(response.status, `Request ${i + 1} should succeed`).toBe(200);
}
// Act: Make one more call, which should be blocked
const blockedResponse = await supertest(app)
.post('/api/auth/forgot-password')
.set('X-Test-Rate-Limit-Enable', 'true')
.send({ email });
// Assert
expect(blockedResponse.status).toBe(429);
expect(blockedResponse.text).toContain('Too many password reset requests');
});
it('should NOT block requests when the opt-in header is not sent (default test behavior)', async () => {
// Arrange
const email = 'no-rate-limit-test@example.com';
const overLimitRequests = 7; // More than the max of 5
mockedAuthService.resetPassword.mockResolvedValue('mock-token');
// Act: Make more calls than the limit. They should all succeed because the limiter is skipped.
for (let i = 0; i < overLimitRequests; i++) {
const response = await supertest(app)
.post('/api/auth/forgot-password')
// NO 'X-Test-Rate-Limit-Enable' header is sent
.send({ email });
expect(response.status, `Request ${i + 1} should succeed`).toBe(200);
}
});
});
describe('Rate Limiting on /reset-password', () => {
it('should block requests after exceeding the limit when the opt-in header is sent', async () => {
// Arrange
const maxRequests = 10; // from the rate limiter config in auth.routes.ts
const newPassword = 'a-Very-Strong-Password-123!';
const token = 'some-token-for-rate-limit-test';
// Mock the service to return a consistent value for the first `maxRequests` calls.
// The endpoint returns 400 for invalid tokens, which is fine for this test.
// We just need to ensure it's not a 429.
mockedAuthService.updatePassword.mockResolvedValue(null);
// Act: Make `maxRequests` calls. They should not be rate-limited.
for (let i = 0; i < maxRequests; i++) {
const response = await supertest(app)
.post('/api/auth/reset-password')
.set('X-Test-Rate-Limit-Enable', 'true') // Opt-in to the rate limiter
.send({ token, newPassword });
// The expected status is 400 because the token is invalid, but not 429.
expect(response.status, `Request ${i + 1} should not be rate-limited`).toBe(400);
}
// Act: Make one more call, which should be blocked by the rate limiter.
const blockedResponse = await supertest(app)
.post('/api/auth/reset-password')
.set('X-Test-Rate-Limit-Enable', 'true')
.send({ token, newPassword });
// Assert
expect(blockedResponse.status).toBe(429);
expect(blockedResponse.text).toContain('Too many password reset attempts');
});
it('should NOT block requests when the opt-in header is not sent (default test behavior)', async () => {
// Arrange
const maxRequests = 12; // Limit is 10
const newPassword = 'a-Very-Strong-Password-123!';
const token = 'some-token-for-skip-limit-test';
mockedAuthService.updatePassword.mockResolvedValue(null);
// Act: Make more calls than the limit.
for (let i = 0; i < maxRequests; i++) {
const response = await supertest(app)
.post('/api/auth/reset-password')
.send({ token, newPassword });
expect(response.status).toBe(400);
}
});
});
describe('Rate Limiting on /register', () => {
it('should block requests after exceeding the limit when the opt-in header is sent', async () => {
// Arrange
const maxRequests = 5; // Limit is 5 per hour
const newUser = {
email: 'rate-limit-reg@test.com',
password: 'StrongPassword123!',
full_name: 'Rate Limit User',
};
// Mock success to ensure we are hitting the limiter and not failing early
mockedAuthService.registerAndLoginUser.mockResolvedValue({
newUserProfile: createMockUserProfile({ user: { email: newUser.email } }),
accessToken: 'token',
refreshToken: 'refresh',
});
// Act: Make maxRequests calls
for (let i = 0; i < maxRequests; i++) {
const response = await supertest(app)
.post('/api/auth/register')
.set('X-Test-Rate-Limit-Enable', 'true')
.send(newUser);
expect(response.status).not.toBe(429);
}
// Act: Make one more call
const blockedResponse = await supertest(app)
.post('/api/auth/register')
.set('X-Test-Rate-Limit-Enable', 'true')
.send(newUser);
// Assert
expect(blockedResponse.status).toBe(429);
expect(blockedResponse.text).toContain('Too many accounts created');
});
it('should NOT block requests when the opt-in header is not sent', async () => {
const maxRequests = 7;
const newUser = {
email: 'no-limit-reg@test.com',
password: 'StrongPassword123!',
full_name: 'No Limit User',
};
mockedAuthService.registerAndLoginUser.mockResolvedValue({
newUserProfile: createMockUserProfile({ user: { email: newUser.email } }),
accessToken: 'token',
refreshToken: 'refresh',
});
for (let i = 0; i < maxRequests; i++) {
const response = await supertest(app).post('/api/auth/register').send(newUser);
expect(response.status).not.toBe(429);
}
});
});
describe('Rate Limiting on /login', () => {
it('should block requests after exceeding the limit when the opt-in header is sent', async () => {
// Arrange
const maxRequests = 5; // Limit is 5 per 15 mins
const credentials = { email: 'rate-limit-login@test.com', password: 'password123' };
mockedAuthService.handleSuccessfulLogin.mockResolvedValue({
accessToken: 'token',
refreshToken: 'refresh',
});
// Act
for (let i = 0; i < maxRequests; i++) {
const response = await supertest(app)
.post('/api/auth/login')
.set('X-Test-Rate-Limit-Enable', 'true')
.send(credentials);
expect(response.status).not.toBe(429);
}
const blockedResponse = await supertest(app)
.post('/api/auth/login')
.set('X-Test-Rate-Limit-Enable', 'true')
.send(credentials);
// Assert
expect(blockedResponse.status).toBe(429);
expect(blockedResponse.text).toContain('Too many login attempts');
});
it('should NOT block requests when the opt-in header is not sent', async () => {
const maxRequests = 7;
const credentials = { email: 'no-limit-login@test.com', password: 'password123' };
mockedAuthService.handleSuccessfulLogin.mockResolvedValue({
accessToken: 'token',
refreshToken: 'refresh',
});
for (let i = 0; i < maxRequests; i++) {
const response = await supertest(app).post('/api/auth/login').send(credentials);
expect(response.status).not.toBe(429);
}
});
});
describe('Rate Limiting on /refresh-token', () => {
it('should block requests after exceeding the limit when the opt-in header is sent', async () => {
// Arrange
const maxRequests = 20; // Limit is 20 per 15 mins
mockedAuthService.refreshAccessToken.mockResolvedValue({ accessToken: 'new-token' });
// Act: Make maxRequests calls
for (let i = 0; i < maxRequests; i++) {
const response = await supertest(app)
.post('/api/auth/refresh-token')
.set('Cookie', 'refreshToken=valid-token')
.set('X-Test-Rate-Limit-Enable', 'true');
expect(response.status).not.toBe(429);
}
// Act: Make one more call
const blockedResponse = await supertest(app)
.post('/api/auth/refresh-token')
.set('Cookie', 'refreshToken=valid-token')
.set('X-Test-Rate-Limit-Enable', 'true');
// Assert
expect(blockedResponse.status).toBe(429);
expect(blockedResponse.text).toContain('Too many token refresh attempts');
});
it('should NOT block requests when the opt-in header is not sent', async () => {
const maxRequests = 22;
mockedAuthService.refreshAccessToken.mockResolvedValue({ accessToken: 'new-token' });
for (let i = 0; i < maxRequests; i++) {
const response = await supertest(app)
.post('/api/auth/refresh-token')
.set('Cookie', 'refreshToken=valid-token');
expect(response.status).not.toBe(429);
}
});
});
describe('Rate Limiting on /logout', () => {
it('should block requests after exceeding the limit when the opt-in header is sent', async () => {
// Arrange
const maxRequests = 10; // Limit is 10 per 15 mins
mockedAuthService.logout.mockResolvedValue(undefined);
// Act
for (let i = 0; i < maxRequests; i++) {
const response = await supertest(app)
.post('/api/auth/logout')
.set('Cookie', 'refreshToken=valid-token')
.set('X-Test-Rate-Limit-Enable', 'true');
expect(response.status).not.toBe(429);
}
const blockedResponse = await supertest(app)
.post('/api/auth/logout')
.set('Cookie', 'refreshToken=valid-token')
.set('X-Test-Rate-Limit-Enable', 'true');
// Assert
expect(blockedResponse.status).toBe(429);
expect(blockedResponse.text).toContain('Too many logout attempts');
});
it('should NOT block requests when the opt-in header is not sent', async () => {
const maxRequests = 12;
mockedAuthService.logout.mockResolvedValue(undefined);
for (let i = 0; i < maxRequests; i++) {
const response = await supertest(app)
.post('/api/auth/logout')
.set('Cookie', 'refreshToken=valid-token');
expect(response.status).not.toBe(429);
}
});
});
});

View File

@@ -1,7 +1,6 @@
// src/routes/auth.routes.ts
import { Router, Request, Response, NextFunction } from 'express';
import { z } from 'zod';
import rateLimit from 'express-rate-limit';
import passport from './passport.routes';
import { UniqueConstraintError } from '../services/db/errors.db'; // Import actual class for instanceof checks
import { logger } from '../services/logger.server';
@@ -9,48 +8,36 @@ import { validateRequest } from '../middleware/validation.middleware';
import type { UserProfile } from '../types';
import { validatePasswordStrength } from '../utils/authUtils';
import { requiredString } from '../utils/zodUtils';
import {
loginLimiter,
registerLimiter,
forgotPasswordLimiter,
resetPasswordLimiter,
refreshTokenLimiter,
logoutLimiter,
} from '../config/rateLimiters';
import { authService } from '../services/authService';
const router = Router();
// Conditionally disable rate limiting for the test environment
const isTestEnv = process.env.NODE_ENV === 'test';
// --- Reusable Schemas ---
// --- Rate Limiting Configuration ---
const forgotPasswordLimiter = rateLimit({
windowMs: 15 * 60 * 1000, // 15 minutes
max: 5,
message: 'Too many password reset requests from this IP, please try again after 15 minutes.',
standardHeaders: true,
legacyHeaders: false,
// Do not skip in test environment so we can write integration tests for it.
// The limiter uses an in-memory store by default, so counts are reset when the test server restarts.
// skip: () => isTestEnv,
});
const resetPasswordLimiter = rateLimit({
windowMs: 15 * 60 * 1000, // 15 minutes
max: 10,
message: 'Too many password reset attempts from this IP, please try again after 15 minutes.',
standardHeaders: true,
legacyHeaders: false,
skip: () => isTestEnv, // Skip this middleware if in test environment
});
const passwordSchema = z
.string()
.trim() // Prevent leading/trailing whitespace in passwords.
.min(8, 'Password must be at least 8 characters long.')
.superRefine((password, ctx) => {
const strength = validatePasswordStrength(password);
if (!strength.isValid) ctx.addIssue({ code: 'custom', message: strength.feedback });
});
const registerSchema = z.object({
body: z.object({
// Sanitize email by trimming and converting to lowercase.
email: z.string().trim().toLowerCase().email('A valid email is required.'),
password: z
.string()
.trim() // Prevent leading/trailing whitespace in passwords.
.min(8, 'Password must be at least 8 characters long.')
.superRefine((password, ctx) => {
const strength = validatePasswordStrength(password);
if (!strength.isValid) ctx.addIssue({ code: 'custom', message: strength.feedback });
}),
password: passwordSchema,
// Sanitize optional string inputs.
full_name: z.string().trim().optional(),
full_name: z.preprocess((val) => (val === '' ? undefined : val), z.string().trim().optional()),
// Allow empty string or valid URL. If empty string is received, convert to undefined.
avatar_url: z.preprocess(
(val) => (val === '' ? undefined : val),
@@ -59,6 +46,14 @@ const registerSchema = z.object({
}),
});
const loginSchema = z.object({
body: z.object({
email: z.string().trim().toLowerCase().email('A valid email is required.'),
password: requiredString('Password is required.'),
rememberMe: z.boolean().optional(),
}),
});
const forgotPasswordSchema = z.object({
body: z.object({
// Sanitize email by trimming and converting to lowercase.
@@ -69,14 +64,7 @@ const forgotPasswordSchema = z.object({
const resetPasswordSchema = z.object({
body: z.object({
token: requiredString('Token is required.'),
newPassword: z
.string()
.trim() // Prevent leading/trailing whitespace in passwords.
.min(8, 'Password must be at least 8 characters long.')
.superRefine((password, ctx) => {
const strength = validatePasswordStrength(password);
if (!strength.isValid) ctx.addIssue({ code: 'custom', message: strength.feedback });
}),
newPassword: passwordSchema,
}),
});
@@ -85,6 +73,7 @@ const resetPasswordSchema = z.object({
// Registration Route
router.post(
'/register',
registerLimiter,
validateRequest(registerSchema),
async (req: Request, res: Response, next: NextFunction) => {
type RegisterRequest = z.infer<typeof registerSchema>;
@@ -122,52 +111,57 @@ router.post(
);
// Login Route
router.post('/login', (req: Request, res: Response, next: NextFunction) => {
passport.authenticate(
'local',
{ session: false },
async (err: Error, user: Express.User | false, info: { message: string }) => {
// --- LOGIN ROUTE DEBUG LOGGING ---
req.log.debug(`[API /login] Received login request for email: ${req.body.email}`);
if (err) req.log.error({ err }, '[API /login] Passport reported an error.');
if (!user) req.log.warn({ info }, '[API /login] Passport reported NO USER found.');
if (user) req.log.debug({ user }, '[API /login] Passport user object:'); // Log the user object passport returns
if (user) req.log.info({ user }, '[API /login] Passport reported USER FOUND.');
router.post(
'/login',
loginLimiter,
validateRequest(loginSchema),
(req: Request, res: Response, next: NextFunction) => {
passport.authenticate(
'local',
{ session: false },
async (err: Error, user: Express.User | false, info: { message: string }) => {
// --- LOGIN ROUTE DEBUG LOGGING ---
req.log.debug(`[API /login] Received login request for email: ${req.body.email}`);
if (err) req.log.error({ err }, '[API /login] Passport reported an error.');
if (!user) req.log.warn({ info }, '[API /login] Passport reported NO USER found.');
if (user) req.log.debug({ user }, '[API /login] Passport user object:'); // Log the user object passport returns
if (user) req.log.info({ user }, '[API /login] Passport reported USER FOUND.');
if (err) {
req.log.error(
{ error: err },
`Login authentication error in /login route for email: ${req.body.email}`,
);
return next(err);
}
if (!user) {
return res.status(401).json({ message: info.message || 'Login failed' });
}
if (err) {
req.log.error(
{ error: err },
`Login authentication error in /login route for email: ${req.body.email}`,
);
return next(err);
}
if (!user) {
return res.status(401).json({ message: info.message || 'Login failed' });
}
try {
const { rememberMe } = req.body;
const userProfile = user as UserProfile;
const { accessToken, refreshToken } = await authService.handleSuccessfulLogin(userProfile, req.log);
req.log.info(`JWT and refresh token issued for user: ${userProfile.user.email}`);
try {
const { rememberMe } = req.body;
const userProfile = user as UserProfile;
const { accessToken, refreshToken } = await authService.handleSuccessfulLogin(userProfile, req.log);
req.log.info(`JWT and refresh token issued for user: ${userProfile.user.email}`);
const cookieOptions = {
httpOnly: true,
secure: process.env.NODE_ENV === 'production',
maxAge: rememberMe ? 30 * 24 * 60 * 60 * 1000 : undefined, // 30 days
};
const cookieOptions = {
httpOnly: true,
secure: process.env.NODE_ENV === 'production',
maxAge: rememberMe ? 30 * 24 * 60 * 60 * 1000 : undefined, // 30 days
};
res.cookie('refreshToken', refreshToken, cookieOptions);
// Return the full user profile object on login to avoid a second fetch on the client.
return res.json({ userprofile: userProfile, token: accessToken });
} catch (tokenErr) {
const email = (user as UserProfile)?.user?.email || req.body.email;
req.log.error({ error: tokenErr }, `Failed to process login for user: ${email}`);
return next(tokenErr);
}
},
)(req, res, next);
});
res.cookie('refreshToken', refreshToken, cookieOptions);
// Return the full user profile object on login to avoid a second fetch on the client.
return res.json({ userprofile: userProfile, token: accessToken });
} catch (tokenErr) {
const email = (user as UserProfile)?.user?.email || req.body.email;
req.log.error({ error: tokenErr }, `Failed to process login for user: ${email}`);
return next(tokenErr);
}
},
)(req, res, next);
},
);
// Route to request a password reset
router.post(
@@ -224,7 +218,7 @@ router.post(
);
// New Route to refresh the access token
router.post('/refresh-token', async (req: Request, res: Response, next: NextFunction) => {
router.post('/refresh-token', refreshTokenLimiter, async (req: Request, res: Response, next: NextFunction) => {
const { refreshToken } = req.cookies;
if (!refreshToken) {
return res.status(401).json({ message: 'Refresh token not found.' });
@@ -247,7 +241,7 @@ router.post('/refresh-token', async (req: Request, res: Response, next: NextFunc
* It clears the refresh token from the database and instructs the client to
* expire the `refreshToken` cookie.
*/
router.post('/logout', async (req: Request, res: Response) => {
router.post('/logout', logoutLimiter, async (req: Request, res: Response) => {
const { refreshToken } = req.cookies;
if (refreshToken) {
// Invalidate the token in the database so it cannot be used again.

View File

@@ -6,6 +6,7 @@ import { budgetRepo } from '../services/db/index.db';
import type { UserProfile } from '../types';
import { validateRequest } from '../middleware/validation.middleware';
import { requiredString, numericIdParam } from '../utils/zodUtils';
import { budgetUpdateLimiter } from '../config/rateLimiters';
const router = express.Router();
@@ -37,6 +38,9 @@ const spendingAnalysisSchema = z.object({
// Middleware to ensure user is authenticated for all budget routes
router.use(passport.authenticate('jwt', { session: false }));
// Apply rate limiting to all subsequent budget routes
router.use(budgetUpdateLimiter);
/**
* GET /api/budgets - Get all budgets for the authenticated user.
*/

View File

@@ -103,4 +103,18 @@ describe('Deals Routes (/api/users/deals)', () => {
);
});
});
describe('Rate Limiting', () => {
it('should apply userReadLimiter to GET /best-watched-prices', async () => {
vi.mocked(dealsRepo.findBestPricesForWatchedItems).mockResolvedValue([]);
const response = await supertest(authenticatedApp)
.get('/api/users/deals/best-watched-prices')
.set('X-Test-Rate-Limit-Enable', 'true');
expect(response.status).toBe(200);
expect(response.headers).toHaveProperty('ratelimit-limit');
expect(parseInt(response.headers['ratelimit-limit'])).toBe(100);
});
});
});

View File

@@ -5,6 +5,7 @@ import passport from './passport.routes';
import { dealsRepo } from '../services/db/deals.db';
import type { UserProfile } from '../types';
import { validateRequest } from '../middleware/validation.middleware';
import { userReadLimiter } from '../config/rateLimiters';
const router = express.Router();
@@ -27,6 +28,7 @@ router.use(passport.authenticate('jwt', { session: false }));
*/
router.get(
'/best-watched-prices',
userReadLimiter,
validateRequest(bestWatchedPricesSchema),
async (req: Request, res: Response, next: NextFunction) => {
const userProfile = req.user as UserProfile;

View File

@@ -13,7 +13,7 @@ vi.mock('../services/db/index.db', () => ({
getFlyerItems: vi.fn(),
getFlyerItemsForFlyers: vi.fn(),
countFlyerItemsForFlyers: vi.fn(),
trackFlyerItemInteraction: vi.fn(),
trackFlyerItemInteraction: vi.fn().mockResolvedValue(undefined),
},
}));
@@ -50,6 +50,8 @@ describe('Flyer Routes (/api/flyers)', () => {
expect(response.status).toBe(200);
expect(response.body).toEqual(mockFlyers);
// Also assert that the default limit and offset were used.
expect(db.flyerRepo.getFlyers).toHaveBeenCalledWith(expectLogger, 20, 0);
});
it('should pass limit and offset query parameters to the db function', async () => {
@@ -58,6 +60,18 @@ describe('Flyer Routes (/api/flyers)', () => {
expect(db.flyerRepo.getFlyers).toHaveBeenCalledWith(expectLogger, 15, 30);
});
it('should use default for offset when only limit is provided', async () => {
vi.mocked(db.flyerRepo.getFlyers).mockResolvedValue([]);
await supertest(app).get('/api/flyers?limit=5');
expect(db.flyerRepo.getFlyers).toHaveBeenCalledWith(expectLogger, 5, 0);
});
it('should use default for limit when only offset is provided', async () => {
vi.mocked(db.flyerRepo.getFlyers).mockResolvedValue([]);
await supertest(app).get('/api/flyers?offset=10');
expect(db.flyerRepo.getFlyers).toHaveBeenCalledWith(expectLogger, 20, 10);
});
it('should return 500 if the database call fails', async () => {
const dbError = new Error('DB Error');
vi.mocked(db.flyerRepo.getFlyers).mockRejectedValue(dbError);
@@ -151,7 +165,7 @@ describe('Flyer Routes (/api/flyers)', () => {
expect(response.status).toBe(500);
expect(response.body.message).toBe('DB Error');
expect(mockLogger.error).toHaveBeenCalledWith(
{ error: dbError },
{ error: dbError, flyerId: 123 },
'Error fetching flyer items in /api/flyers/:id/items:',
);
});
@@ -276,5 +290,75 @@ describe('Flyer Routes (/api/flyers)', () => {
.send({ type: 'invalid' });
expect(response.status).toBe(400);
});
it('should return 202 and log an error if the tracking function fails', async () => {
const trackingError = new Error('Tracking DB is down');
vi.mocked(db.flyerRepo.trackFlyerItemInteraction).mockRejectedValue(trackingError);
const response = await supertest(app)
.post('/api/flyers/items/99/track')
.send({ type: 'click' });
expect(response.status).toBe(202);
// Allow the event loop to process the unhandled promise rejection from the fire-and-forget call
await new Promise((resolve) => setImmediate(resolve));
expect(mockLogger.error).toHaveBeenCalledWith(
{ error: trackingError, itemId: 99 },
'Flyer item interaction tracking failed',
);
});
});
describe('Rate Limiting', () => {
it('should apply publicReadLimiter to GET /', async () => {
vi.mocked(db.flyerRepo.getFlyers).mockResolvedValue([]);
const response = await supertest(app)
.get('/api/flyers')
.set('X-Test-Rate-Limit-Enable', 'true');
expect(response.status).toBe(200);
expect(response.headers).toHaveProperty('ratelimit-limit');
expect(parseInt(response.headers['ratelimit-limit'])).toBe(100);
});
it('should apply batchLimiter to POST /items/batch-fetch', async () => {
vi.mocked(db.flyerRepo.getFlyerItemsForFlyers).mockResolvedValue([]);
const response = await supertest(app)
.post('/api/flyers/items/batch-fetch')
.set('X-Test-Rate-Limit-Enable', 'true')
.send({ flyerIds: [1] });
expect(response.status).toBe(200);
expect(response.headers).toHaveProperty('ratelimit-limit');
expect(parseInt(response.headers['ratelimit-limit'])).toBe(50);
});
it('should apply batchLimiter to POST /items/batch-count', async () => {
vi.mocked(db.flyerRepo.countFlyerItemsForFlyers).mockResolvedValue(0);
const response = await supertest(app)
.post('/api/flyers/items/batch-count')
.set('X-Test-Rate-Limit-Enable', 'true')
.send({ flyerIds: [1] });
expect(response.status).toBe(200);
expect(response.headers).toHaveProperty('ratelimit-limit');
expect(parseInt(response.headers['ratelimit-limit'])).toBe(50);
});
it('should apply trackingLimiter to POST /items/:itemId/track', async () => {
// Mock fire-and-forget promise
vi.mocked(db.flyerRepo.trackFlyerItemInteraction).mockResolvedValue(undefined);
const response = await supertest(app)
.post('/api/flyers/items/1/track')
.set('X-Test-Rate-Limit-Enable', 'true')
.send({ type: 'view' });
expect(response.status).toBe(202);
expect(response.headers).toHaveProperty('ratelimit-limit');
expect(parseInt(response.headers['ratelimit-limit'])).toBe(200);
});
});
});

View File

@@ -4,6 +4,11 @@ import * as db from '../services/db/index.db';
import { z } from 'zod';
import { validateRequest } from '../middleware/validation.middleware';
import { optionalNumeric } from '../utils/zodUtils';
import {
publicReadLimiter,
batchLimiter,
trackingLimiter,
} from '../config/rateLimiters';
const router = Router();
@@ -48,12 +53,12 @@ const trackItemSchema = z.object({
/**
* GET /api/flyers - Get a paginated list of all flyers.
*/
type GetFlyersRequest = z.infer<typeof getFlyersSchema>;
router.get('/', validateRequest(getFlyersSchema), async (req, res, next): Promise<void> => {
const { query } = req as unknown as GetFlyersRequest;
router.get('/', publicReadLimiter, validateRequest(getFlyersSchema), async (req, res, next): Promise<void> => {
try {
const limit = query.limit ? Number(query.limit) : 20;
const offset = query.offset ? Number(query.offset) : 0;
// The `validateRequest` middleware ensures `req.query` is valid.
// We parse it here to apply Zod's coercions (string to number) and defaults.
const { limit, offset } = getFlyersSchema.shape.query.parse(req.query);
const flyers = await db.flyerRepo.getFlyers(req.log, limit, offset);
res.json(flyers);
} catch (error) {
@@ -65,14 +70,14 @@ router.get('/', validateRequest(getFlyersSchema), async (req, res, next): Promis
/**
* GET /api/flyers/:id - Get a single flyer by its ID.
*/
type GetFlyerByIdRequest = z.infer<typeof flyerIdParamSchema>;
router.get('/:id', validateRequest(flyerIdParamSchema), async (req, res, next): Promise<void> => {
const { params } = req as unknown as GetFlyerByIdRequest;
router.get('/:id', publicReadLimiter, validateRequest(flyerIdParamSchema), async (req, res, next): Promise<void> => {
try {
const flyer = await db.flyerRepo.getFlyerById(params.id);
// Explicitly parse to get the coerced number type for `id`.
const { id } = flyerIdParamSchema.shape.params.parse(req.params);
const flyer = await db.flyerRepo.getFlyerById(id);
res.json(flyer);
} catch (error) {
req.log.error({ error, flyerId: params.id }, 'Error fetching flyer by ID:');
req.log.error({ error, flyerId: req.params.id }, 'Error fetching flyer by ID:');
next(error);
}
});
@@ -82,14 +87,17 @@ router.get('/:id', validateRequest(flyerIdParamSchema), async (req, res, next):
*/
router.get(
'/:id/items',
publicReadLimiter,
validateRequest(flyerIdParamSchema),
async (req, res, next): Promise<void> => {
const { params } = req as unknown as GetFlyerByIdRequest;
type GetFlyerByIdRequest = z.infer<typeof flyerIdParamSchema>;
try {
const items = await db.flyerRepo.getFlyerItems(params.id, req.log);
// Explicitly parse to get the coerced number type for `id`.
const { id } = flyerIdParamSchema.shape.params.parse(req.params);
const items = await db.flyerRepo.getFlyerItems(id, req.log);
res.json(items);
} catch (error) {
req.log.error({ error }, 'Error fetching flyer items in /api/flyers/:id/items:');
req.log.error({ error, flyerId: req.params.id }, 'Error fetching flyer items in /api/flyers/:id/items:');
next(error);
}
},
@@ -101,10 +109,13 @@ router.get(
type BatchFetchRequest = z.infer<typeof batchFetchSchema>;
router.post(
'/items/batch-fetch',
batchLimiter,
validateRequest(batchFetchSchema),
async (req, res, next): Promise<void> => {
const { body } = req as unknown as BatchFetchRequest;
try {
// No re-parsing needed here as `validateRequest` has already ensured the body shape,
// and `express.json()` has parsed it. There's no type coercion to apply.
const items = await db.flyerRepo.getFlyerItemsForFlyers(body.flyerIds, req.log);
res.json(items);
} catch (error) {
@@ -120,12 +131,14 @@ router.post(
type BatchCountRequest = z.infer<typeof batchCountSchema>;
router.post(
'/items/batch-count',
batchLimiter,
validateRequest(batchCountSchema),
async (req, res, next): Promise<void> => {
const { body } = req as unknown as BatchCountRequest;
try {
// The DB function handles an empty array, so we can simplify.
const count = await db.flyerRepo.countFlyerItemsForFlyers(body.flyerIds ?? [], req.log);
// The schema ensures flyerIds is an array of numbers.
// The `?? []` was redundant as `validateRequest` would have already caught a missing `flyerIds`.
const count = await db.flyerRepo.countFlyerItemsForFlyers(body.flyerIds, req.log);
res.json({ count });
} catch (error) {
req.log.error({ error }, 'Error counting batch flyer items');
@@ -137,11 +150,22 @@ router.post(
/**
* POST /api/flyers/items/:itemId/track - Tracks a user interaction with a flyer item.
*/
type TrackItemRequest = z.infer<typeof trackItemSchema>;
router.post('/items/:itemId/track', validateRequest(trackItemSchema), (req, res): void => {
const { params, body } = req as unknown as TrackItemRequest;
db.flyerRepo.trackFlyerItemInteraction(params.itemId, body.type, req.log);
res.status(202).send();
router.post('/items/:itemId/track', trackingLimiter, validateRequest(trackItemSchema), (req, res, next): void => {
try {
// Explicitly parse to get coerced types.
const { params, body } = trackItemSchema.parse({ params: req.params, body: req.body });
// Fire-and-forget: we don't await the tracking call to avoid delaying the response.
// We add a .catch to log any potential errors without crashing the server process.
db.flyerRepo.trackFlyerItemInteraction(params.itemId, body.type, req.log).catch((error) => {
req.log.error({ error, itemId: params.itemId }, 'Flyer item interaction tracking failed');
});
res.status(202).send();
} catch (error) {
// This will catch Zod parsing errors if they occur.
next(error);
}
});
export default router;

View File

@@ -336,4 +336,50 @@ describe('Gamification Routes (/api/achievements)', () => {
expect(response.body.errors[0].message).toMatch(/less than or equal to 50|Too big/i);
});
});
describe('Rate Limiting', () => {
it('should apply publicReadLimiter to GET /', async () => {
vi.mocked(db.gamificationRepo.getAllAchievements).mockResolvedValue([]);
const response = await supertest(unauthenticatedApp)
.get('/api/achievements')
.set('X-Test-Rate-Limit-Enable', 'true');
expect(response.status).toBe(200);
expect(response.headers).toHaveProperty('ratelimit-limit');
expect(parseInt(response.headers['ratelimit-limit'])).toBe(100);
});
it('should apply userReadLimiter to GET /me', async () => {
mockedAuthMiddleware.mockImplementation((req: Request, res: Response, next: NextFunction) => {
req.user = mockUserProfile;
next();
});
vi.mocked(db.gamificationRepo.getUserAchievements).mockResolvedValue([]);
const response = await supertest(authenticatedApp)
.get('/api/achievements/me')
.set('X-Test-Rate-Limit-Enable', 'true');
expect(response.status).toBe(200);
expect(response.headers).toHaveProperty('ratelimit-limit');
expect(parseInt(response.headers['ratelimit-limit'])).toBe(100);
});
it('should apply adminTriggerLimiter to POST /award', async () => {
mockedAuthMiddleware.mockImplementation((req: Request, res: Response, next: NextFunction) => {
req.user = mockAdminProfile;
next();
});
mockedIsAdmin.mockImplementation((req: Request, res: Response, next: NextFunction) => next());
vi.mocked(db.gamificationRepo.awardAchievement).mockResolvedValue(undefined);
const response = await supertest(adminApp)
.post('/api/achievements/award')
.set('X-Test-Rate-Limit-Enable', 'true')
.send({ userId: 'some-user', achievementName: 'some-achievement' });
expect(response.status).toBe(200);
expect(response.headers).toHaveProperty('ratelimit-limit');
expect(parseInt(response.headers['ratelimit-limit'])).toBe(30);
});
});
});

View File

@@ -7,6 +7,11 @@ import { logger } from '../services/logger.server';
import { UserProfile } from '../types';
import { validateRequest } from '../middleware/validation.middleware';
import { requiredString, optionalNumeric } from '../utils/zodUtils';
import {
publicReadLimiter,
userReadLimiter,
adminTriggerLimiter,
} from '../config/rateLimiters';
const router = express.Router();
const adminGamificationRouter = express.Router(); // Create a new router for admin-only routes.
@@ -34,7 +39,7 @@ const awardAchievementSchema = z.object({
* GET /api/achievements - Get the master list of all available achievements.
* This is a public endpoint.
*/
router.get('/', async (req, res, next: NextFunction) => {
router.get('/', publicReadLimiter, async (req, res, next: NextFunction) => {
try {
const achievements = await gamificationService.getAllAchievements(req.log);
res.json(achievements);
@@ -50,6 +55,7 @@ router.get('/', async (req, res, next: NextFunction) => {
*/
router.get(
'/leaderboard',
publicReadLimiter,
validateRequest(leaderboardSchema),
async (req, res, next: NextFunction): Promise<void> => {
try {
@@ -74,6 +80,7 @@ router.get(
router.get(
'/me',
passport.authenticate('jwt', { session: false }),
userReadLimiter,
async (req, res, next: NextFunction): Promise<void> => {
const userProfile = req.user as UserProfile;
try {
@@ -103,6 +110,7 @@ adminGamificationRouter.use(passport.authenticate('jwt', { session: false }), is
*/
adminGamificationRouter.post(
'/award',
adminTriggerLimiter,
validateRequest(awardAchievementSchema),
async (req, res, next: NextFunction): Promise<void> => {
// Infer type and cast request object as per ADR-003

View File

@@ -102,6 +102,7 @@ vi.mock('passport', () => {
// Now, import the passport configuration which will use our mocks
import passport, { isAdmin, optionalAuth, mockAuth } from './passport.routes';
import { logger } from '../services/logger.server';
import { ForbiddenError } from '../services/db/errors.db';
describe('Passport Configuration', () => {
beforeEach(() => {
@@ -414,6 +415,29 @@ describe('Passport Configuration', () => {
// Assert
expect(done).toHaveBeenCalledWith(dbError, false);
});
it('should call done(err, false) if jwt_payload is null', async () => {
// Arrange
const jwtPayload = null;
const done = vi.fn();
// Act
// We know the mock setup populates the callback.
if (verifyCallbackWrapper.callback) {
// The strategy would not even call the callback if the token is invalid/missing.
// However, to test the robustness of our callback, we can invoke it directly with null.
await verifyCallbackWrapper.callback(jwtPayload as any, done);
}
// Assert
// The code will throw a TypeError because it tries to access 'user_id' of null.
// The catch block in the strategy will catch this and call done(err, false).
expect(done).toHaveBeenCalledWith(expect.any(TypeError), false);
expect(logger.error).toHaveBeenCalledWith(
{ error: expect.any(TypeError) },
'Error during JWT authentication strategy:',
);
});
});
describe('isAdmin Middleware', () => {
@@ -445,7 +469,7 @@ describe('Passport Configuration', () => {
expect(mockRes.status).not.toHaveBeenCalled();
});
it('should return 403 Forbidden if user does not have "admin" role', () => {
it('should call next with a ForbiddenError if user does not have "admin" role', () => {
// Arrange
const mockReq: Partial<Request> = {
user: createMockUserProfile({
@@ -458,14 +482,11 @@ describe('Passport Configuration', () => {
isAdmin(mockReq as Request, mockRes as Response, mockNext);
// Assert
expect(mockNext).not.toHaveBeenCalled(); // This was a duplicate, fixed.
expect(mockRes.status).toHaveBeenCalledWith(403);
expect(mockRes.json).toHaveBeenCalledWith({
message: 'Forbidden: Administrator access required.',
});
expect(mockNext).toHaveBeenCalledWith(expect.any(ForbiddenError));
expect(mockRes.status).not.toHaveBeenCalled();
});
it('should return 403 Forbidden if req.user is missing', () => {
it('should call next with a ForbiddenError if req.user is missing', () => {
// Arrange
const mockReq = {} as Request; // No req.user
@@ -473,11 +494,86 @@ describe('Passport Configuration', () => {
isAdmin(mockReq, mockRes as Response, mockNext);
// Assert
expect(mockNext).not.toHaveBeenCalled(); // This was a duplicate, fixed.
expect(mockRes.status).toHaveBeenCalledWith(403);
expect(mockNext).toHaveBeenCalledWith(expect.any(ForbiddenError));
expect(mockRes.status).not.toHaveBeenCalled();
});
it('should return 403 Forbidden if req.user is not a valid UserProfile object', () => {
it('should log a warning when a non-admin user tries to access an admin route', () => {
// Arrange
const mockReq: Partial<Request> = {
user: createMockUserProfile({
role: 'user',
user: { user_id: 'user-id-123', email: 'user@test.com' },
}),
};
// Act
isAdmin(mockReq as Request, mockRes as Response, mockNext);
// Assert
expect(logger.warn).toHaveBeenCalledWith('Admin access denied for user: user-id-123');
});
it('should log a warning with "unknown" user when req.user is missing', () => {
// Arrange
const mockReq = {} as Request; // No req.user
// Act
isAdmin(mockReq, mockRes as Response, mockNext);
// Assert
expect(logger.warn).toHaveBeenCalledWith('Admin access denied for user: unknown');
});
it('should call next with a ForbiddenError for various invalid user object shapes', () => {
const mockNext = vi.fn();
const mockRes: Partial<Response> = {
status: vi.fn().mockReturnThis(),
json: vi.fn(),
};
// Case 1: user is not an object (e.g., a string)
const req1 = { user: 'not-an-object' } as unknown as Request;
isAdmin(req1, mockRes as Response, mockNext);
expect(mockNext).toHaveBeenLastCalledWith(expect.any(ForbiddenError));
expect(mockRes.status).not.toHaveBeenCalled();
vi.clearAllMocks();
// Case 2: user is null
const req2 = { user: null } as unknown as Request;
isAdmin(req2, mockRes as Response, mockNext);
expect(mockNext).toHaveBeenLastCalledWith(expect.any(ForbiddenError));
expect(mockRes.status).not.toHaveBeenCalled();
vi.clearAllMocks();
// Case 3: user object is missing 'user' property
const req3 = { user: { role: 'admin' } } as unknown as Request;
isAdmin(req3, mockRes as Response, mockNext);
expect(mockNext).toHaveBeenLastCalledWith(expect.any(ForbiddenError));
expect(mockRes.status).not.toHaveBeenCalled();
vi.clearAllMocks();
// Case 4: user.user is not an object
const req4 = { user: { role: 'admin', user: 'not-an-object' } } as unknown as Request;
isAdmin(req4, mockRes as Response, mockNext);
expect(mockNext).toHaveBeenLastCalledWith(expect.any(ForbiddenError));
expect(mockRes.status).not.toHaveBeenCalled();
vi.clearAllMocks();
// Case 5: user.user is missing 'user_id'
const req5 = {
user: { role: 'admin', user: { email: 'test@test.com' } },
} as unknown as Request;
isAdmin(req5, mockRes as Response, mockNext);
expect(mockNext).toHaveBeenLastCalledWith(expect.any(ForbiddenError));
expect(mockRes.status).not.toHaveBeenCalled();
vi.clearAllMocks();
// Reset the main mockNext for other tests in the suite
mockNext.mockClear();
});
it('should call next with a ForbiddenError if req.user is not a valid UserProfile object', () => {
// Arrange
const mockReq: Partial<Request> = {
// An object that is not a valid UserProfile (e.g., missing 'role')
@@ -490,11 +586,8 @@ describe('Passport Configuration', () => {
isAdmin(mockReq as Request, mockRes as Response, mockNext);
// Assert
expect(mockNext).not.toHaveBeenCalled(); // This was a duplicate, fixed.
expect(mockRes.status).toHaveBeenCalledWith(403);
expect(mockRes.json).toHaveBeenCalledWith({
message: 'Forbidden: Administrator access required.',
});
expect(mockNext).toHaveBeenCalledWith(expect.any(ForbiddenError));
expect(mockRes.status).not.toHaveBeenCalled();
});
});
@@ -611,13 +704,18 @@ describe('Passport Configuration', () => {
optionalAuth(mockReq, mockRes as Response, mockNext);
// Assert
// The new implementation logs a warning and proceeds.
expect(logger.warn).toHaveBeenCalledWith(
{ error: authError },
'Optional auth encountered an error, proceeding anonymously.',
);
expect(mockReq.user).toBeUndefined();
expect(mockNext).toHaveBeenCalledTimes(1);
});
});
describe('mockAuth Middleware', () => {
const mockNext: NextFunction = vi.fn();
const mockNext: NextFunction = vi.fn(); // This was a duplicate, fixed.
const mockRes: Partial<Response> = {
status: vi.fn().mockReturnThis(),
json: vi.fn(),

View File

@@ -11,6 +11,7 @@ import * as db from '../services/db/index.db';
import { logger } from '../services/logger.server';
import { UserProfile } from '../types';
import { createMockUserProfile } from '../tests/utils/mockFactories';
import { ForbiddenError } from '../services/db/errors.db';
const JWT_SECRET = process.env.JWT_SECRET!;
@@ -307,7 +308,7 @@ export const isAdmin = (req: Request, res: Response, next: NextFunction) => {
// Check if userProfile is a valid UserProfile before accessing its properties for logging.
const userIdForLog = isUserProfile(userProfile) ? userProfile.user.user_id : 'unknown';
logger.warn(`Admin access denied for user: ${userIdForLog}`);
res.status(403).json({ message: 'Forbidden: Administrator access required.' });
next(new ForbiddenError('Forbidden: Administrator access required.'));
}
};
@@ -323,12 +324,17 @@ export const optionalAuth = (req: Request, res: Response, next: NextFunction) =>
'jwt',
{ session: false },
(err: Error | null, user: Express.User | false, info: { message: string } | Error) => {
// If there's an authentication error (e.g., malformed token), log it but don't block the request.
if (err) {
// An actual error occurred during authentication (e.g., malformed token).
// For optional auth, we log this but still proceed without a user.
logger.warn({ error: err }, 'Optional auth encountered an error, proceeding anonymously.');
return next();
}
if (info) {
// The patch requested this specific error handling.
logger.info({ info: info.message || info.toString() }, 'Optional auth info:');
} // The patch requested this specific error handling.
if (user) (req as Express.Request).user = user; // Attach user if authentication succeeds
}
if (user) (req as Express.Request).user = user; // Attach user if authentication succeeds.
next(); // Always proceed to the next middleware
},

View File

@@ -40,7 +40,7 @@ describe('Personalization Routes (/api/personalization)', () => {
const mockItems = [createMockMasterGroceryItem({ master_grocery_item_id: 1, name: 'Milk' })];
vi.mocked(db.personalizationRepo.getAllMasterItems).mockResolvedValue(mockItems);
const response = await supertest(app).get('/api/personalization/master-items');
const response = await supertest(app).get('/api/personalization/master-items').set('x-test-rate-limit-enable', 'true');
expect(response.status).toBe(200);
expect(response.body).toEqual(mockItems);
@@ -49,7 +49,7 @@ describe('Personalization Routes (/api/personalization)', () => {
it('should return 500 if the database call fails', async () => {
const dbError = new Error('DB Error');
vi.mocked(db.personalizationRepo.getAllMasterItems).mockRejectedValue(dbError);
const response = await supertest(app).get('/api/personalization/master-items');
const response = await supertest(app).get('/api/personalization/master-items').set('x-test-rate-limit-enable', 'true');
expect(response.status).toBe(500);
expect(response.body.message).toBe('DB Error');
expect(mockLogger.error).toHaveBeenCalledWith(
@@ -106,4 +106,16 @@ describe('Personalization Routes (/api/personalization)', () => {
);
});
});
describe('Rate Limiting', () => {
it('should apply publicReadLimiter to GET /master-items', async () => {
vi.mocked(db.personalizationRepo.getAllMasterItems).mockResolvedValue([]);
const response = await supertest(app)
.get('/api/personalization/master-items')
.set('X-Test-Rate-Limit-Enable', 'true');
expect(response.status).toBe(200);
expect(response.headers).toHaveProperty('ratelimit-limit');
});
});
});

View File

@@ -3,6 +3,7 @@ import { Router, Request, Response, NextFunction } from 'express';
import { z } from 'zod';
import * as db from '../services/db/index.db';
import { validateRequest } from '../middleware/validation.middleware';
import { publicReadLimiter } from '../config/rateLimiters';
const router = Router();
@@ -16,6 +17,7 @@ const emptySchema = z.object({});
*/
router.get(
'/master-items',
publicReadLimiter,
validateRequest(emptySchema),
async (req: Request, res: Response, next: NextFunction) => {
try {
@@ -39,6 +41,7 @@ router.get(
*/
router.get(
'/dietary-restrictions',
publicReadLimiter,
validateRequest(emptySchema),
async (req: Request, res: Response, next: NextFunction) => {
try {
@@ -59,6 +62,7 @@ router.get(
*/
router.get(
'/appliances',
publicReadLimiter,
validateRequest(emptySchema),
async (req: Request, res: Response, next: NextFunction) => {
try {

View File

@@ -1,8 +1,10 @@
// src/routes/price.routes.test.ts
import { describe, it, expect, vi, beforeEach } from 'vitest';
import supertest from 'supertest';
import type { Request, Response, NextFunction } from 'express';
import { createTestApp } from '../tests/utils/createTestApp';
import { mockLogger } from '../tests/utils/mockLogger';
import { createMockUserProfile } from '../tests/utils/mockFactories';
// Mock the price repository
vi.mock('../services/db/price.db', () => ({
@@ -17,12 +19,29 @@ vi.mock('../services/logger.server', async () => ({
logger: (await import('../tests/utils/mockLogger')).mockLogger,
}));
// Mock the passport middleware
vi.mock('./passport.routes', () => ({
default: {
authenticate: vi.fn(
(_strategy, _options) => (req: Request, res: Response, next: NextFunction) => {
// If req.user is not set by the test setup, simulate unauthenticated access.
if (!req.user) {
return res.status(401).json({ message: 'Unauthorized' });
}
// If req.user is set, proceed as an authenticated user.
next();
},
),
},
}));
// Import the router AFTER other setup.
import priceRouter from './price.routes';
import { priceRepo } from '../services/db/price.db';
describe('Price Routes (/api/price-history)', () => {
const app = createTestApp({ router: priceRouter, basePath: '/api/price-history' });
const mockUser = createMockUserProfile({ user: { user_id: 'price-user-123' } });
const app = createTestApp({ router: priceRouter, basePath: '/api/price-history', authenticatedUser: mockUser });
beforeEach(() => {
vi.clearAllMocks();
});
@@ -130,4 +149,18 @@ describe('Price Routes (/api/price-history)', () => {
expect(response.body.errors[1].message).toBe('Invalid input: expected number, received NaN');
});
});
describe('Rate Limiting', () => {
it('should apply priceHistoryLimiter to POST /', async () => {
vi.mocked(priceRepo.getPriceHistory).mockResolvedValue([]);
const response = await supertest(app)
.post('/api/price-history')
.set('X-Test-Rate-Limit-Enable', 'true')
.send({ masterItemIds: [1, 2] });
expect(response.status).toBe(200);
expect(response.headers).toHaveProperty('ratelimit-limit');
expect(parseInt(response.headers['ratelimit-limit'])).toBe(50);
});
});
});

View File

@@ -1,9 +1,11 @@
// src/routes/price.routes.ts
import { Router, Request, Response, NextFunction } from 'express';
import { z } from 'zod';
import passport from './passport.routes';
import { validateRequest } from '../middleware/validation.middleware';
import { priceRepo } from '../services/db/price.db';
import { optionalNumeric } from '../utils/zodUtils';
import { priceHistoryLimiter } from '../config/rateLimiters';
const router = Router();
@@ -26,21 +28,27 @@ type PriceHistoryRequest = z.infer<typeof priceHistorySchema>;
* POST /api/price-history - Fetches historical price data for a given list of master item IDs.
* This endpoint retrieves price points over time for specified master grocery items.
*/
router.post('/', validateRequest(priceHistorySchema), async (req: Request, res: Response, next: NextFunction) => {
// Cast 'req' to the inferred type for full type safety.
const {
body: { masterItemIds, limit, offset },
} = req as unknown as PriceHistoryRequest;
req.log.info(
{ itemCount: masterItemIds.length, limit, offset },
'[API /price-history] Received request for historical price data.',
);
try {
const priceHistory = await priceRepo.getPriceHistory(masterItemIds, req.log, limit, offset);
res.status(200).json(priceHistory);
} catch (error) {
next(error);
}
});
router.post(
'/',
passport.authenticate('jwt', { session: false }),
priceHistoryLimiter,
validateRequest(priceHistorySchema),
async (req: Request, res: Response, next: NextFunction) => {
// Cast 'req' to the inferred type for full type safety.
const {
body: { masterItemIds, limit, offset },
} = req as unknown as PriceHistoryRequest;
req.log.info(
{ itemCount: masterItemIds.length, limit, offset },
'[API /price-history] Received request for historical price data.',
);
try {
const priceHistory = await priceRepo.getPriceHistory(masterItemIds, req.log, limit, offset);
res.status(200).json(priceHistory);
} catch (error) {
next(error);
}
},
);
export default router;

View File

@@ -208,4 +208,36 @@ describe('Reaction Routes (/api/reactions)', () => {
);
});
});
describe('Rate Limiting', () => {
it('should apply publicReadLimiter to GET /', async () => {
const app = createTestApp({ router: reactionsRouter, basePath: '/api/reactions' });
vi.mocked(reactionRepo.getReactions).mockResolvedValue([]);
const response = await supertest(app)
.get('/api/reactions')
.set('X-Test-Rate-Limit-Enable', 'true');
expect(response.status).toBe(200);
expect(response.headers).toHaveProperty('ratelimit-limit');
});
it('should apply userUpdateLimiter to POST /toggle', async () => {
const mockUser = createMockUserProfile({ user: { user_id: 'user-123' } });
const app = createTestApp({
router: reactionsRouter,
basePath: '/api/reactions',
authenticatedUser: mockUser,
});
vi.mocked(reactionRepo.toggleReaction).mockResolvedValue(null);
const response = await supertest(app)
.post('/api/reactions/toggle')
.set('X-Test-Rate-Limit-Enable', 'true')
.send({ entity_type: 'recipe', entity_id: '1', reaction_type: 'like' });
expect(response.status).toBe(200);
expect(response.headers).toHaveProperty('ratelimit-limit');
expect(parseInt(response.headers['ratelimit-limit'])).toBe(150);
});
});
});

View File

@@ -5,6 +5,7 @@ import { validateRequest } from '../middleware/validation.middleware';
import passport from './passport.routes';
import { requiredString } from '../utils/zodUtils';
import { UserProfile } from '../types';
import { publicReadLimiter, reactionToggleLimiter } from '../config/rateLimiters';
const router = Router();
@@ -42,6 +43,7 @@ const getReactionSummarySchema = z.object({
*/
router.get(
'/',
publicReadLimiter,
validateRequest(getReactionsSchema),
async (req: Request, res: Response, next: NextFunction) => {
try {
@@ -62,6 +64,7 @@ router.get(
*/
router.get(
'/summary',
publicReadLimiter,
validateRequest(getReactionSummarySchema),
async (req: Request, res: Response, next: NextFunction) => {
try {
@@ -81,6 +84,7 @@ router.get(
*/
router.post(
'/toggle',
reactionToggleLimiter,
passport.authenticate('jwt', { session: false }),
validateRequest(toggleReactionSchema),
async (req: Request, res: Response, next: NextFunction) => {

View File

@@ -318,4 +318,65 @@ describe('Recipe Routes (/api/recipes)', () => {
);
});
});
describe('Rate Limiting on /suggest', () => {
const mockUser = createMockUserProfile({ user: { user_id: 'rate-limit-user' } });
const authApp = createTestApp({
router: recipeRouter,
basePath: '/api/recipes',
authenticatedUser: mockUser,
});
it('should block requests after exceeding the limit when the opt-in header is sent', async () => {
// Arrange
const maxRequests = 20; // Limit is 20 per 15 mins
const ingredients = ['chicken', 'rice'];
vi.mocked(aiService.generateRecipeSuggestion).mockResolvedValue('A tasty suggestion');
// Act: Make maxRequests calls
for (let i = 0; i < maxRequests; i++) {
const response = await supertest(authApp)
.post('/api/recipes/suggest')
.set('X-Test-Rate-Limit-Enable', 'true')
.send({ ingredients });
expect(response.status).not.toBe(429);
}
// Act: Make one more call
const blockedResponse = await supertest(authApp)
.post('/api/recipes/suggest')
.set('X-Test-Rate-Limit-Enable', 'true')
.send({ ingredients });
// Assert
expect(blockedResponse.status).toBe(429);
expect(blockedResponse.text).toContain('Too many AI generation requests');
});
it('should NOT block requests when the opt-in header is not sent', async () => {
const maxRequests = 22;
const ingredients = ['beef', 'potatoes'];
vi.mocked(aiService.generateRecipeSuggestion).mockResolvedValue('Another suggestion');
for (let i = 0; i < maxRequests; i++) {
const response = await supertest(authApp)
.post('/api/recipes/suggest')
.send({ ingredients });
expect(response.status).not.toBe(429);
}
});
});
describe('Rate Limiting on Public Routes', () => {
it('should apply publicReadLimiter to GET /:recipeId', async () => {
vi.mocked(db.recipeRepo.getRecipeById).mockResolvedValue(createMockRecipe({}));
const response = await supertest(app)
.get('/api/recipes/1')
.set('X-Test-Rate-Limit-Enable', 'true');
expect(response.status).toBe(200);
expect(response.headers).toHaveProperty('ratelimit-limit');
expect(parseInt(response.headers['ratelimit-limit'])).toBe(100);
});
});
});

View File

@@ -6,6 +6,7 @@ import { aiService } from '../services/aiService.server';
import passport from './passport.routes';
import { validateRequest } from '../middleware/validation.middleware';
import { requiredString, numericIdParam, optionalNumeric } from '../utils/zodUtils';
import { publicReadLimiter, suggestionLimiter } from '../config/rateLimiters';
const router = Router();
@@ -41,6 +42,7 @@ const suggestRecipeSchema = z.object({
*/
router.get(
'/by-sale-percentage',
publicReadLimiter,
validateRequest(bySalePercentageSchema),
async (req, res, next) => {
try {
@@ -60,6 +62,7 @@ router.get(
*/
router.get(
'/by-sale-ingredients',
publicReadLimiter,
validateRequest(bySaleIngredientsSchema),
async (req, res, next) => {
try {
@@ -82,6 +85,7 @@ router.get(
*/
router.get(
'/by-ingredient-and-tag',
publicReadLimiter,
validateRequest(byIngredientAndTagSchema),
async (req, res, next) => {
try {
@@ -102,7 +106,7 @@ router.get(
/**
* GET /api/recipes/:recipeId/comments - Get all comments for a specific recipe.
*/
router.get('/:recipeId/comments', validateRequest(recipeIdParamsSchema), async (req, res, next) => {
router.get('/:recipeId/comments', publicReadLimiter, validateRequest(recipeIdParamsSchema), async (req, res, next) => {
try {
// Explicitly parse req.params to coerce recipeId to a number
const { params } = recipeIdParamsSchema.parse({ params: req.params });
@@ -117,7 +121,7 @@ router.get('/:recipeId/comments', validateRequest(recipeIdParamsSchema), async (
/**
* GET /api/recipes/:recipeId - Get a single recipe by its ID, including ingredients and tags.
*/
router.get('/:recipeId', validateRequest(recipeIdParamsSchema), async (req, res, next) => {
router.get('/:recipeId', publicReadLimiter, validateRequest(recipeIdParamsSchema), async (req, res, next) => {
try {
// Explicitly parse req.params to coerce recipeId to a number
const { params } = recipeIdParamsSchema.parse({ params: req.params });
@@ -135,6 +139,7 @@ router.get('/:recipeId', validateRequest(recipeIdParamsSchema), async (req, res,
*/
router.post(
'/suggest',
suggestionLimiter,
passport.authenticate('jwt', { session: false }),
validateRequest(suggestRecipeSchema),
async (req, res, next) => {

View File

@@ -66,4 +66,16 @@ describe('Stats Routes (/api/stats)', () => {
expect(response.body.errors.length).toBe(2);
});
});
describe('Rate Limiting', () => {
it('should apply publicReadLimiter to GET /most-frequent-sales', async () => {
vi.mocked(db.adminRepo.getMostFrequentSaleItems).mockResolvedValue([]);
const response = await supertest(app)
.get('/api/stats/most-frequent-sales')
.set('X-Test-Rate-Limit-Enable', 'true');
expect(response.status).toBe(200);
expect(response.headers).toHaveProperty('ratelimit-limit');
});
});
});

View File

@@ -4,6 +4,7 @@ import { z } from 'zod';
import * as db from '../services/db/index.db';
import { validateRequest } from '../middleware/validation.middleware';
import { optionalNumeric } from '../utils/zodUtils';
import { publicReadLimiter } from '../config/rateLimiters';
const router = Router();
@@ -25,6 +26,7 @@ const mostFrequentSalesSchema = z.object({
*/
router.get(
'/most-frequent-sales',
publicReadLimiter,
validateRequest(mostFrequentSalesSchema),
async (req: Request, res: Response, next: NextFunction) => {
try {

View File

@@ -156,4 +156,25 @@ describe('System Routes (/api/system)', () => {
expect(response.body.errors[0].message).toMatch(/An address string is required|Required/i);
});
});
describe('Rate Limiting on /geocode', () => {
it('should block requests after exceeding the limit when the opt-in header is sent', async () => {
const limit = 100; // Matches geocodeLimiter config
const address = '123 Test St';
vi.mocked(geocodingService.geocodeAddress).mockResolvedValue({ lat: 0, lng: 0 });
// We only need to verify it blocks eventually.
// Instead of running 100 requests, we check for the headers which confirm the middleware is active.
const response = await supertest(app)
.post('/api/system/geocode')
.set('X-Test-Rate-Limit-Enable', 'true')
.send({ address });
expect(response.status).toBe(200);
expect(response.headers).toHaveProperty('ratelimit-limit');
expect(response.headers).toHaveProperty('ratelimit-remaining');
expect(parseInt(response.headers['ratelimit-limit'])).toBe(limit);
expect(parseInt(response.headers['ratelimit-remaining'])).toBeLessThan(limit);
});
});
});

View File

@@ -6,6 +6,7 @@ import { validateRequest } from '../middleware/validation.middleware';
import { z } from 'zod';
import { requiredString } from '../utils/zodUtils';
import { systemService } from '../services/systemService';
import { geocodeLimiter } from '../config/rateLimiters';
const router = Router();
@@ -41,6 +42,7 @@ router.get(
*/
router.post(
'/geocode',
geocodeLimiter,
validateRequest(geocodeSchema),
async (req: Request, res: Response, next: NextFunction) => {
// Infer type and cast request object as per ADR-003

View File

@@ -1030,7 +1030,7 @@ describe('User Routes (/api/users)', () => {
it('should upload an avatar and update the user profile', async () => {
const mockUpdatedProfile = createMockUserProfile({
...mockUserProfile,
avatar_url: 'http://localhost:3001/uploads/avatars/new-avatar.png',
avatar_url: 'https://example.com/uploads/avatars/new-avatar.png',
});
vi.mocked(userService.updateUserAvatar).mockResolvedValue(mockUpdatedProfile);
@@ -1042,7 +1042,7 @@ describe('User Routes (/api/users)', () => {
.attach('avatar', Buffer.from('dummy-image-content'), dummyImagePath);
expect(response.status).toBe(200);
expect(response.body.avatar_url).toContain('http://localhost:3001/uploads/avatars/');
expect(response.body.avatar_url).toContain('https://example.com/uploads/avatars/');
expect(userService.updateUserAvatar).toHaveBeenCalledWith(
mockUserProfile.user.user_id,
expect.any(Object),
@@ -1140,6 +1140,19 @@ describe('User Routes (/api/users)', () => {
expect(logger.error).toHaveBeenCalled();
});
it('DELETE /recipes/:recipeId should return 404 if recipe not found', async () => {
vi.mocked(db.recipeRepo.deleteRecipe).mockRejectedValue(new NotFoundError('Recipe not found'));
const response = await supertest(app).delete('/api/users/recipes/999');
expect(response.status).toBe(404);
expect(response.body.message).toBe('Recipe not found');
});
it('DELETE /recipes/:recipeId should return 400 for invalid recipe ID', async () => {
const response = await supertest(app).delete('/api/users/recipes/abc');
expect(response.status).toBe(400);
expect(response.body.errors[0].message).toContain('received NaN');
});
it("PUT /recipes/:recipeId should update a user's own recipe", async () => {
const updates = { description: 'A new delicious description.' };
const mockUpdatedRecipe = createMockRecipe({ recipe_id: 1, ...updates });
@@ -1181,6 +1194,14 @@ describe('User Routes (/api/users)', () => {
expect(response.body.errors[0].message).toBe('No fields provided to update.');
});
it('PUT /recipes/:recipeId should return 400 for invalid recipe ID', async () => {
const response = await supertest(app)
.put('/api/users/recipes/abc')
.send({ name: 'New Name' });
expect(response.status).toBe(400);
expect(response.body.errors[0].message).toContain('received NaN');
});
it('GET /shopping-lists/:listId should return 404 if list is not found', async () => {
vi.mocked(db.shoppingRepo.getShoppingListById).mockRejectedValue(
new NotFoundError('Shopping list not found'),
@@ -1214,5 +1235,96 @@ describe('User Routes (/api/users)', () => {
expect(logger.error).toHaveBeenCalled();
});
}); // End of Recipe Routes
describe('Rate Limiting', () => {
beforeAll(() => {
vi.useFakeTimers();
});
beforeEach(() => {
// Advance time to ensure rate limits are reset between tests
vi.advanceTimersByTime(2 * 60 * 60 * 1000);
});
afterAll(() => {
vi.useRealTimers();
});
it('should apply userUpdateLimiter to PUT /profile', async () => {
vi.mocked(db.userRepo.updateUserProfile).mockResolvedValue(mockUserProfile);
const response = await supertest(app)
.put('/api/users/profile')
.set('X-Test-Rate-Limit-Enable', 'true')
.send({ full_name: 'Rate Limit Test' });
expect(response.status).toBe(200);
expect(response.headers).toHaveProperty('ratelimit-limit');
expect(parseInt(response.headers['ratelimit-limit'])).toBe(100);
});
it('should apply userSensitiveUpdateLimiter to PUT /profile/password and block after limit', async () => {
const limit = 5;
vi.mocked(userService.updateUserPassword).mockResolvedValue(undefined);
// Consume the limit
for (let i = 0; i < limit; i++) {
const response = await supertest(app)
.put('/api/users/profile/password')
.set('X-Test-Rate-Limit-Enable', 'true')
.send({ newPassword: 'StrongPassword123!' });
expect(response.status).toBe(200);
}
// Next request should be blocked
const response = await supertest(app)
.put('/api/users/profile/password')
.set('X-Test-Rate-Limit-Enable', 'true')
.send({ newPassword: 'StrongPassword123!' });
expect(response.status).toBe(429);
expect(response.text).toContain('Too many sensitive requests');
});
it('should apply userUploadLimiter to POST /profile/avatar', async () => {
vi.mocked(userService.updateUserAvatar).mockResolvedValue(mockUserProfile);
const dummyImagePath = 'test-avatar.png';
const response = await supertest(app)
.post('/api/users/profile/avatar')
.set('X-Test-Rate-Limit-Enable', 'true')
.attach('avatar', Buffer.from('dummy-image-content'), dummyImagePath);
expect(response.status).toBe(200);
expect(response.headers).toHaveProperty('ratelimit-limit');
expect(parseInt(response.headers['ratelimit-limit'])).toBe(20);
});
it('should apply userSensitiveUpdateLimiter to DELETE /account and block after limit', async () => {
// Explicitly advance time to ensure the rate limiter window has reset from previous tests
vi.advanceTimersByTime(60 * 60 * 1000 + 5000);
const limit = 5;
vi.mocked(userService.deleteUserAccount).mockResolvedValue(undefined);
// Consume the limit
for (let i = 0; i < limit; i++) {
const response = await supertest(app)
.delete('/api/users/account')
.set('X-Test-Rate-Limit-Enable', 'true')
.send({ password: 'correct-password' });
expect(response.status).toBe(200);
}
// Next request should be blocked
const response = await supertest(app)
.delete('/api/users/account')
.set('X-Test-Rate-Limit-Enable', 'true')
.send({ password: 'correct-password' });
expect(response.status).toBe(429);
expect(response.text).toContain('Too many sensitive requests');
});
});
});
});

View File

@@ -21,6 +21,11 @@ import {
} from '../utils/zodUtils';
import * as db from '../services/db/index.db';
import { cleanupUploadedFile } from '../utils/fileUtils';
import {
userUpdateLimiter,
userSensitiveUpdateLimiter,
userUploadLimiter,
} from '../config/rateLimiters';
const router = express.Router();
@@ -95,6 +100,7 @@ const avatarUpload = createUploadMiddleware({
*/
router.post(
'/profile/avatar',
userUploadLimiter,
avatarUpload.single('avatar'),
async (req: Request, res: Response, next: NextFunction) => {
// The try-catch block was already correct here.
@@ -215,6 +221,7 @@ router.get('/profile', validateRequest(emptySchema), async (req, res, next: Next
type UpdateProfileRequest = z.infer<typeof updateProfileSchema>;
router.put(
'/profile',
userUpdateLimiter,
validateRequest(updateProfileSchema),
async (req, res, next: NextFunction) => {
logger.debug(`[ROUTE] PUT /api/users/profile - ENTER`);
@@ -241,6 +248,7 @@ router.put(
type UpdatePasswordRequest = z.infer<typeof updatePasswordSchema>;
router.put(
'/profile/password',
userSensitiveUpdateLimiter,
validateRequest(updatePasswordSchema),
async (req, res, next: NextFunction) => {
logger.debug(`[ROUTE] PUT /api/users/profile/password - ENTER`);
@@ -264,6 +272,7 @@ router.put(
type DeleteAccountRequest = z.infer<typeof deleteAccountSchema>;
router.delete(
'/account',
userSensitiveUpdateLimiter,
validateRequest(deleteAccountSchema),
async (req, res, next: NextFunction) => {
logger.debug(`[ROUTE] DELETE /api/users/account - ENTER`);
@@ -302,6 +311,7 @@ router.get('/watched-items', validateRequest(emptySchema), async (req, res, next
type AddWatchedItemRequest = z.infer<typeof addWatchedItemSchema>;
router.post(
'/watched-items',
userUpdateLimiter,
validateRequest(addWatchedItemSchema),
async (req, res, next: NextFunction) => {
logger.debug(`[ROUTE] POST /api/users/watched-items - ENTER`);
@@ -333,6 +343,7 @@ const watchedItemIdSchema = numericIdParam('masterItemId');
type DeleteWatchedItemRequest = z.infer<typeof watchedItemIdSchema>;
router.delete(
'/watched-items/:masterItemId',
userUpdateLimiter,
validateRequest(watchedItemIdSchema),
async (req, res, next: NextFunction) => {
logger.debug(`[ROUTE] DELETE /api/users/watched-items/:masterItemId - ENTER`);
@@ -407,6 +418,7 @@ router.get(
type CreateShoppingListRequest = z.infer<typeof createShoppingListSchema>;
router.post(
'/shopping-lists',
userUpdateLimiter,
validateRequest(createShoppingListSchema),
async (req, res, next: NextFunction) => {
logger.debug(`[ROUTE] POST /api/users/shopping-lists - ENTER`);
@@ -435,6 +447,7 @@ router.post(
*/
router.delete(
'/shopping-lists/:listId',
userUpdateLimiter,
validateRequest(shoppingListIdSchema),
async (req, res, next: NextFunction) => {
logger.debug(`[ROUTE] DELETE /api/users/shopping-lists/:listId - ENTER`);
@@ -475,6 +488,7 @@ const addShoppingListItemSchema = shoppingListIdSchema.extend({
type AddShoppingListItemRequest = z.infer<typeof addShoppingListItemSchema>;
router.post(
'/shopping-lists/:listId/items',
userUpdateLimiter,
validateRequest(addShoppingListItemSchema),
async (req, res, next: NextFunction) => {
logger.debug(`[ROUTE] POST /api/users/shopping-lists/:listId/items - ENTER`);
@@ -515,6 +529,7 @@ const updateShoppingListItemSchema = numericIdParam('itemId').extend({
type UpdateShoppingListItemRequest = z.infer<typeof updateShoppingListItemSchema>;
router.put(
'/shopping-lists/items/:itemId',
userUpdateLimiter,
validateRequest(updateShoppingListItemSchema),
async (req, res, next: NextFunction) => {
logger.debug(`[ROUTE] PUT /api/users/shopping-lists/items/:itemId - ENTER`);
@@ -546,6 +561,7 @@ const shoppingListItemIdSchema = numericIdParam('itemId');
type DeleteShoppingListItemRequest = z.infer<typeof shoppingListItemIdSchema>;
router.delete(
'/shopping-lists/items/:itemId',
userUpdateLimiter,
validateRequest(shoppingListItemIdSchema),
async (req, res, next: NextFunction) => {
logger.debug(`[ROUTE] DELETE /api/users/shopping-lists/items/:itemId - ENTER`);
@@ -574,6 +590,7 @@ const updatePreferencesSchema = z.object({
type UpdatePreferencesRequest = z.infer<typeof updatePreferencesSchema>;
router.put(
'/profile/preferences',
userUpdateLimiter,
validateRequest(updatePreferencesSchema),
async (req, res, next: NextFunction) => {
logger.debug(`[ROUTE] PUT /api/users/profile/preferences - ENTER`);
@@ -619,6 +636,7 @@ const setUserRestrictionsSchema = z.object({
type SetUserRestrictionsRequest = z.infer<typeof setUserRestrictionsSchema>;
router.put(
'/me/dietary-restrictions',
userUpdateLimiter,
validateRequest(setUserRestrictionsSchema),
async (req, res, next: NextFunction) => {
logger.debug(`[ROUTE] PUT /api/users/me/dietary-restrictions - ENTER`);
@@ -663,6 +681,7 @@ const setUserAppliancesSchema = z.object({
type SetUserAppliancesRequest = z.infer<typeof setUserAppliancesSchema>;
router.put(
'/me/appliances',
userUpdateLimiter,
validateRequest(setUserAppliancesSchema),
async (req, res, next: NextFunction) => {
logger.debug(`[ROUTE] PUT /api/users/me/appliances - ENTER`);
@@ -730,6 +749,7 @@ const updateUserAddressSchema = z.object({
type UpdateUserAddressRequest = z.infer<typeof updateUserAddressSchema>;
router.put(
'/profile/address',
userUpdateLimiter,
validateRequest(updateUserAddressSchema),
async (req, res, next: NextFunction) => {
const userProfile = req.user as UserProfile;
@@ -756,6 +776,7 @@ const recipeIdSchema = numericIdParam('recipeId');
type DeleteRecipeRequest = z.infer<typeof recipeIdSchema>;
router.delete(
'/recipes/:recipeId',
userUpdateLimiter,
validateRequest(recipeIdSchema),
async (req, res, next: NextFunction) => {
logger.debug(`[ROUTE] DELETE /api/users/recipes/:recipeId - ENTER`);
@@ -794,6 +815,7 @@ const updateRecipeSchema = recipeIdSchema.extend({
type UpdateRecipeRequest = z.infer<typeof updateRecipeSchema>;
router.put(
'/recipes/:recipeId',
userUpdateLimiter,
validateRequest(updateRecipeSchema),
async (req, res, next: NextFunction) => {
logger.debug(`[ROUTE] PUT /api/users/recipes/:recipeId - ENTER`);

View File

@@ -81,6 +81,7 @@ vi.mock('./db/flyer.db', () => ({
vi.mock('../utils/imageProcessor', () => ({
generateFlyerIcon: vi.fn(),
processAndSaveImage: vi.fn(),
}));
vi.mock('./db/admin.db', () => ({
@@ -93,8 +94,8 @@ vi.mock('./db/admin.db', () => ({
import * as dbModule from './db/index.db';
import { flyerQueue } from './queueService.server';
import { createFlyerAndItems } from './db/flyer.db';
import { withTransaction } from './db/index.db';
import { generateFlyerIcon } from '../utils/imageProcessor';
import { withTransaction } from './db/index.db'; // This was a duplicate, fixed.
import { generateFlyerIcon, processAndSaveImage } from '../utils/imageProcessor';
// Define a mock interface that closely resembles the actual Flyer type for testing purposes.
// This helps ensure type safety in mocks without relying on 'any'.
@@ -115,7 +116,7 @@ interface MockFlyer {
updated_at: string;
}
const baseUrl = 'http://localhost:3001';
const baseUrl = 'https://example.com';
describe('AI Service (Server)', () => {
// Create mock dependencies that will be injected into the service
@@ -808,9 +809,11 @@ describe('AI Service (Server)', () => {
expect(
(localAiServiceInstance as any)._parseJsonFromAiResponse(responseText, localLogger),
).toBeNull(); // This was a duplicate, fixed.
// The code now fails earlier because it can't find the closing brace.
// We need to update the assertion to match the actual error log.
expect(localLogger.error).toHaveBeenCalledWith(
expect.objectContaining({ jsonSlice: '{ "key": "value"' }),
'[_parseJsonFromAiResponse] Failed to parse JSON slice.',
{ responseText }, // The log includes the full response text.
"[_parseJsonFromAiResponse] Could not find ending '}' or ']' in response.",
);
});
});
@@ -1012,7 +1015,7 @@ describe('AI Service (Server)', () => {
userId: 'user123',
submitterIp: '127.0.0.1',
userProfileAddress: '123 St, City, Country', // Partial address match based on filter(Boolean)
baseUrl: 'http://localhost:3000',
baseUrl: 'https://example.com',
});
expect(result.id).toBe('job123');
});
@@ -1034,7 +1037,7 @@ describe('AI Service (Server)', () => {
expect.objectContaining({
userId: undefined,
userProfileAddress: undefined,
baseUrl: 'http://localhost:3000',
baseUrl: 'https://example.com',
}),
);
});
@@ -1052,6 +1055,7 @@ describe('AI Service (Server)', () => {
beforeEach(() => {
// Default success mocks. Use createMockFlyer for a more complete mock.
vi.mocked(dbModule.flyerRepo.findFlyerByChecksum).mockResolvedValue(undefined);
vi.mocked(processAndSaveImage).mockResolvedValue('processed.jpg');
vi.mocked(generateFlyerIcon).mockResolvedValue('icon.jpg');
vi.mocked(createFlyerAndItems).mockResolvedValue({
flyer: {

View File

@@ -23,8 +23,8 @@ import * as db from './db/index.db';
import { flyerQueue } from './queueService.server';
import type { Job } from 'bullmq';
import { createFlyerAndItems } from './db/flyer.db';
import { getBaseUrl } from '../utils/serverUtils';
import { generateFlyerIcon } from '../utils/imageProcessor';
import { getBaseUrl } from '../utils/serverUtils'; // This was a duplicate, fixed.
import { generateFlyerIcon, processAndSaveImage } from '../utils/imageProcessor';
import { AdminRepository } from './db/admin.db';
import path from 'path';
import { ValidationError } from './db/errors.db'; // Keep this import for ValidationError
@@ -73,14 +73,7 @@ interface IAiClient {
* This type is intentionally loose to accommodate potential null/undefined values
* from the AI before they are cleaned and normalized.
*/
export type RawFlyerItem = {
item: string | null;
price_display: string | null | undefined;
price_in_cents: number | null | undefined;
quantity: string | null | undefined;
category_name: string | null | undefined;
master_item_id?: number | null | undefined;
};
export type RawFlyerItem = z.infer<typeof ExtractedFlyerItemSchema>;
export class DuplicateFlyerError extends FlyerProcessingError {
constructor(message: string, public flyerId: number) {
@@ -215,7 +208,11 @@ export class AIService {
this.logger.warn(
'[AIService] Mock generateContent called. This should only happen in tests when no API key is available.',
);
return { text: '[]' } as unknown as GenerateContentResponse;
// Return a minimal valid JSON object structure to prevent downstream parsing errors.
const mockResponse = { store_name: 'Mock Store', items: [] };
return {
text: JSON.stringify(mockResponse),
} as unknown as GenerateContentResponse;
},
};
}
@@ -369,62 +366,43 @@ export class AIService {
* @returns The parsed JSON object, or null if parsing fails.
*/
private _parseJsonFromAiResponse<T>(responseText: string | undefined, logger: Logger): T | null {
// --- START HYPER-DIAGNOSTIC LOGGING ---
console.log('\n--- DIAGNOSING _parseJsonFromAiResponse ---');
console.log(
`1. Initial responseText (Type: ${typeof responseText}):`,
JSON.stringify(responseText),
// --- START EXTENSIVE DEBUG LOGGING ---
logger.debug(
{
responseText_type: typeof responseText,
responseText_length: responseText?.length,
responseText_preview: responseText?.substring(0, 200),
},
'[_parseJsonFromAiResponse] Starting JSON parsing.',
);
// --- END HYPER-DIAGNOSTIC LOGGING ---
if (!responseText) {
logger.warn(
'[_parseJsonFromAiResponse] Response text is empty or undefined. Returning null.',
);
console.log('2. responseText is falsy. ABORTING.');
console.log('--- END DIAGNOSIS ---\n');
logger.warn('[_parseJsonFromAiResponse] Response text is empty or undefined. Aborting parsing.');
return null;
}
// Find the start of the JSON, which can be inside a markdown block
const markdownRegex = /```(json)?\s*([\s\S]*?)\s*```/;
const markdownMatch = responseText.match(markdownRegex);
console.log('2. Regex Result (markdownMatch):', markdownMatch);
let jsonString;
if (markdownMatch && markdownMatch[2] !== undefined) {
// Check for capture group
console.log('3. Regex matched. Processing Captured Group.');
console.log(
` - Captured content (Type: ${typeof markdownMatch[2]}, Length: ${markdownMatch[2].length}):`,
JSON.stringify(markdownMatch[2]),
);
logger.debug(
{ rawCapture: markdownMatch[2] },
{ capturedLength: markdownMatch[2].length },
'[_parseJsonFromAiResponse] Found JSON content within markdown code block.',
);
jsonString = markdownMatch[2].trim();
console.log(
`4. After trimming, jsonString is (Type: ${typeof jsonString}, Length: ${jsonString.length}):`,
JSON.stringify(jsonString),
);
logger.debug(
{ trimmedJsonString: jsonString },
'[_parseJsonFromAiResponse] Trimmed extracted JSON string.',
);
} else {
console.log(
'3. Regex did NOT match or capture group 2 is undefined. Will attempt to parse entire responseText.',
);
logger.debug('[_parseJsonFromAiResponse] No markdown code block found. Using raw response text.');
jsonString = responseText;
}
// Find the first '{' or '[' and the last '}' or ']' to isolate the JSON object.
const firstBrace = jsonString.indexOf('{');
const firstBracket = jsonString.indexOf('[');
console.log(
`5. Index search on jsonString: firstBrace=${firstBrace}, firstBracket=${firstBracket}`,
logger.debug(
{ firstBrace, firstBracket },
'[_parseJsonFromAiResponse] Searching for start of JSON.',
);
// Determine the starting point of the JSON content
@@ -432,37 +410,44 @@ export class AIService {
firstBrace === -1 || (firstBracket !== -1 && firstBracket < firstBrace)
? firstBracket
: firstBrace;
console.log('6. Calculated startIndex:', startIndex);
if (startIndex === -1) {
logger.error(
{ responseText },
"[_parseJsonFromAiResponse] Could not find starting '{' or '[' in response.",
);
console.log('7. startIndex is -1. ABORTING.');
console.log('--- END DIAGNOSIS ---\n');
return null;
}
const jsonSlice = jsonString.substring(startIndex);
console.log(
`8. Sliced string to be parsed (jsonSlice) (Length: ${jsonSlice.length}):`,
JSON.stringify(jsonSlice),
// Find the last brace or bracket to gracefully handle trailing text.
// This is a robust way to handle cases where the AI might add trailing text after the JSON.
const lastBrace = jsonString.lastIndexOf('}');
const lastBracket = jsonString.lastIndexOf(']');
const endIndex = Math.max(lastBrace, lastBracket);
if (endIndex === -1) {
logger.error(
{ responseText },
"[_parseJsonFromAiResponse] Could not find ending '}' or ']' in response.",
);
return null;
}
const jsonSlice = jsonString.substring(startIndex, endIndex + 1);
logger.debug(
{ sliceLength: jsonSlice.length },
'[_parseJsonFromAiResponse] Extracted JSON slice for parsing.',
);
try {
console.log('9. Attempting JSON.parse on jsonSlice...');
const parsed = JSON.parse(jsonSlice) as T;
console.log('10. SUCCESS: JSON.parse succeeded.');
console.log('--- END DIAGNOSIS (SUCCESS) ---\n');
logger.info('[_parseJsonFromAiResponse] Successfully parsed JSON from AI response.');
return parsed;
} catch (e) {
logger.error(
{ jsonSlice, error: e, errorMessage: (e as Error).message, stack: (e as Error).stack },
'[_parseJsonFromAiResponse] Failed to parse JSON slice.',
);
console.error('10. FAILURE: JSON.parse FAILED. Error:', e);
console.log('--- END DIAGNOSIS (FAILURE) ---\n');
return null;
}
}
@@ -768,6 +753,7 @@ async enqueueFlyerProcessing(
userProfile: UserProfile | undefined,
submitterIp: string,
logger: Logger,
baseUrlOverride?: string,
): Promise<Job> {
// 1. Check for duplicate flyer
const existingFlyer = await db.flyerRepo.findFlyerByChecksum(checksum, logger);
@@ -794,7 +780,19 @@ async enqueueFlyerProcessing(
.join(', ');
}
const baseUrl = getBaseUrl(logger);
const baseUrl = baseUrlOverride || getBaseUrl(logger);
// --- START DEBUGGING ---
// Add a fail-fast check to ensure the baseUrl is a valid URL before enqueuing.
// This will make the test fail at the upload step if the URL is the problem,
// which is easier to debug than a worker failure.
if (!baseUrl || !baseUrl.startsWith('http')) {
const errorMessage = `[aiService] FATAL: The generated baseUrl is not a valid absolute URL. Value: "${baseUrl}". This will cause the flyer processing worker to fail. Check the FRONTEND_URL environment variable.`;
logger.error(errorMessage);
// Throw a standard error that the calling route can handle.
throw new Error(errorMessage);
}
logger.info({ baseUrl }, '[aiService] Enqueuing job with valid baseUrl.');
// --- END DEBUGGING ---
// 3. Add job to the queue
const job = await flyerQueue.add('process-flyer', {
@@ -818,6 +816,7 @@ async enqueueFlyerProcessing(
body: any,
logger: Logger,
): { parsed: FlyerProcessPayload; extractedData: Partial<ExtractedCoreData> | null | undefined } {
logger.debug({ body, type: typeof body }, '[AIService] Starting _parseLegacyPayload');
let parsed: FlyerProcessPayload = {};
try {
@@ -826,6 +825,7 @@ async enqueueFlyerProcessing(
logger.warn({ error: errMsg(e) }, '[AIService] Failed to parse top-level request body string.');
return { parsed: {}, extractedData: {} };
}
logger.debug({ parsed }, '[AIService] Parsed top-level body');
// If the real payload is nested inside a 'data' property (which could be a string),
// we parse it out but keep the original `parsed` object for top-level properties like checksum.
@@ -841,13 +841,16 @@ async enqueueFlyerProcessing(
potentialPayload = parsed.data;
}
}
logger.debug({ potentialPayload }, '[AIService] Potential payload after checking "data" property');
// The extracted data is either in an `extractedData` key or is the payload itself.
const extractedData = potentialPayload.extractedData ?? potentialPayload;
logger.debug({ extractedData: !!extractedData }, '[AIService] Extracted data object');
// Merge for checksum lookup: properties in the outer `parsed` object (like a top-level checksum)
// take precedence over any same-named properties inside `potentialPayload`.
const finalParsed = { ...potentialPayload, ...parsed };
logger.debug({ finalParsed }, '[AIService] Final parsed object for checksum lookup');
return { parsed: finalParsed, extractedData };
}
@@ -858,10 +861,12 @@ async enqueueFlyerProcessing(
userProfile: UserProfile | undefined,
logger: Logger,
): Promise<Flyer> {
logger.debug({ body, file }, '[AIService] Starting processLegacyFlyerUpload');
const { parsed, extractedData: initialExtractedData } = this._parseLegacyPayload(body, logger);
let extractedData = initialExtractedData;
const checksum = parsed.checksum ?? parsed?.data?.checksum ?? '';
logger.debug({ checksum, parsed }, '[AIService] Extracted checksum from legacy payload');
if (!checksum) {
throw new ValidationError([], 'Checksum is required.');
}
@@ -896,12 +901,24 @@ async enqueueFlyerProcessing(
logger.warn('extractedData.store_name missing; using fallback store name.');
}
const iconsDir = path.join(path.dirname(file.path), 'icons');
const iconFileName = await generateFlyerIcon(file.path, iconsDir, logger);
// Process the uploaded image to strip metadata and optimize it.
const flyerImageDir = path.dirname(file.path);
const processedImageFileName = await processAndSaveImage(
file.path,
flyerImageDir,
originalFileName,
logger,
);
const processedImagePath = path.join(flyerImageDir, processedImageFileName);
// Generate the icon from the newly processed (and cleaned) image.
const iconsDir = path.join(flyerImageDir, 'icons');
const iconFileName = await generateFlyerIcon(processedImagePath, iconsDir, logger);
const baseUrl = getBaseUrl(logger);
const iconUrl = `${baseUrl}/flyer-images/icons/${iconFileName}`;
const imageUrl = `${baseUrl}/flyer-images/${file.filename}`;
const imageUrl = `${baseUrl}/flyer-images/${processedImageFileName}`;
logger.debug({ imageUrl, iconUrl }, 'Constructed URLs for legacy upload');
const flyerData: FlyerInsert = {
file_name: originalFileName,

View File

@@ -32,13 +32,13 @@ const joinUrl = (base: string, path: string): string => {
* A promise that holds the in-progress token refresh operation.
* This prevents multiple parallel refresh requests.
*/
let refreshTokenPromise: Promise<string> | null = null;
let performTokenRefreshPromise: Promise<string> | null = null;
/**
* Attempts to refresh the access token using the HttpOnly refresh token cookie.
* @returns A promise that resolves to the new access token.
*/
const refreshToken = async (): Promise<string> => {
const _performTokenRefresh = async (): Promise<string> => {
logger.info('Attempting to refresh access token...');
try {
// Use the joinUrl helper for consistency, though usually this is a relative fetch in browser
@@ -75,11 +75,15 @@ const refreshToken = async (): Promise<string> => {
};
/**
* A custom fetch wrapper that handles automatic token refreshing.
* All authenticated API calls should use this function.
* @param url The URL to fetch.
* @param options The fetch options.
* @returns A promise that resolves to the fetch Response.
* A custom fetch wrapper that handles automatic token refreshing for authenticated API calls.
* If a request fails with a 401 Unauthorized status, it attempts to refresh the access token
* using the refresh token cookie. If successful, it retries the original request with the new token.
* All authenticated API calls should use this function or one of its helpers (e.g., `authedGet`).
*
* @param url The endpoint path (e.g., '/users/profile') or a full URL.
* @param options Standard `fetch` options (method, body, etc.).
* @param apiOptions Custom options for the API client, such as `tokenOverride` for testing or an `AbortSignal`.
* @returns A promise that resolves to the final `Response` object from the fetch call.
*/
export const apiFetch = async (
url: string,
@@ -122,12 +126,12 @@ export const apiFetch = async (
try {
logger.info(`apiFetch: Received 401 for ${fullUrl}. Attempting token refresh.`);
// If no refresh is in progress, start one.
if (!refreshTokenPromise) {
refreshTokenPromise = refreshToken();
if (!performTokenRefreshPromise) {
performTokenRefreshPromise = _performTokenRefresh();
}
// Wait for the existing or new refresh operation to complete.
const newToken = await refreshTokenPromise;
const newToken = await performTokenRefreshPromise;
logger.info(`apiFetch: Token refreshed. Retrying original request to ${fullUrl}.`);
// Retry the original request with the new token.
@@ -138,7 +142,7 @@ export const apiFetch = async (
return Promise.reject(refreshError);
} finally {
// Clear the promise so the next 401 will trigger a new refresh.
refreshTokenPromise = null;
performTokenRefreshPromise = null;
}
}
@@ -768,6 +772,25 @@ export const triggerFailingJob = (tokenOverride?: string): Promise<Response> =>
export const getJobStatus = (jobId: string, tokenOverride?: string): Promise<Response> =>
authedGet(`/ai/jobs/${jobId}/status`, { tokenOverride });
/**
* Refreshes an access token using a refresh token cookie.
* This is intended for use in Node.js test environments where cookies must be set manually.
* @param cookie The full 'Cookie' header string (e.g., "refreshToken=...").
* @returns A promise that resolves to the fetch Response.
*/
export async function refreshToken(cookie: string) {
const url = joinUrl(API_BASE_URL, '/auth/refresh-token');
const options: RequestInit = {
method: 'POST',
headers: {
'Content-Type': 'application/json',
// The browser would handle this automatically, but in Node.js tests we must set it manually.
Cookie: cookie,
},
};
return fetch(url, options);
}
/**
* Triggers the clearing of the geocoding cache on the server.
* Requires admin privileges.

View File

@@ -59,7 +59,7 @@ describe('AuthService', () => {
// Set environment variables before any modules are imported
vi.stubEnv('JWT_SECRET', 'test-secret');
vi.stubEnv('FRONTEND_URL', 'http://localhost:3000');
vi.stubEnv('FRONTEND_URL', 'https://example.com');
// Mock all dependencies before dynamically importing the service
// Core modules like bcrypt, jsonwebtoken, and crypto are now mocked globally in tests-setup-unit.ts
@@ -134,7 +134,6 @@ describe('AuthService', () => {
'hashed-password',
{ full_name: 'Test User', avatar_url: undefined },
reqLog,
{},
);
expect(transactionalAdminRepoMocks.logActivity).toHaveBeenCalledWith(
expect.objectContaining({

View File

@@ -40,7 +40,6 @@ class AuthService {
hashedPassword,
{ full_name: fullName, avatar_url: avatarUrl },
reqLog,
client, // Pass the transactional client
);
logger.info(`Successfully created new user in DB: ${newUser.user.email} (ID: ${newUser.user.user_id})`);

View File

@@ -24,6 +24,19 @@ vi.mock('../services/logger.server', () => ({
// Mock the date utility to control the output for the weekly analytics job
vi.mock('../utils/dateUtils', () => ({
getSimpleWeekAndYear: vi.fn(() => ({ year: 2024, week: 42 })),
getCurrentDateISOString: vi.fn(() => '2024-10-18'),
}));
vi.mock('../services/queueService.server', () => ({
analyticsQueue: {
add: vi.fn(),
},
weeklyAnalyticsQueue: {
add: vi.fn(),
},
emailQueue: {
add: vi.fn(),
},
}));
import { BackgroundJobService, startBackgroundJobs } from './backgroundJobService';
@@ -32,6 +45,7 @@ import type { PersonalizationRepository } from './db/personalization.db';
import type { NotificationRepository } from './db/notification.db';
import { createMockWatchedItemDeal } from '../tests/utils/mockFactories';
import { logger as globalMockLogger } from '../services/logger.server'; // Import the mocked logger
import { analyticsQueue, weeklyAnalyticsQueue } from '../services/queueService.server';
describe('Background Job Service', () => {
// Create mock dependencies that will be injected into the service
@@ -118,6 +132,37 @@ describe('Background Job Service', () => {
mockServiceLogger,
);
describe('Manual Triggers', () => {
it('triggerAnalyticsReport should add a daily report job to the queue', async () => {
// The mock should return the jobId passed to it to simulate bullmq's behavior
vi.mocked(analyticsQueue.add).mockImplementation(async (name, data, opts) => ({ id: opts?.jobId }) as any);
const jobId = await service.triggerAnalyticsReport();
expect(jobId).toContain('manual-report-');
expect(analyticsQueue.add).toHaveBeenCalledWith(
'generate-daily-report',
{ reportDate: '2024-10-18' },
{ jobId: expect.stringContaining('manual-report-') },
);
});
it('triggerWeeklyAnalyticsReport should add a weekly report job to the queue', async () => {
// The mock should return the jobId passed to it
vi.mocked(weeklyAnalyticsQueue.add).mockImplementation(async (name, data, opts) => ({ id: opts?.jobId }) as any);
const jobId = await service.triggerWeeklyAnalyticsReport();
expect(jobId).toContain('manual-weekly-report-');
expect(weeklyAnalyticsQueue.add).toHaveBeenCalledWith(
'generate-weekly-report',
{
reportYear: 2024, // From mocked dateUtils
reportWeek: 42, // From mocked dateUtils
},
{ jobId: expect.stringContaining('manual-weekly-report-') },
);
});
});
it('should do nothing if no deals are found for any user', async () => {
mockPersonalizationRepo.getBestSalePricesForAllUsers.mockResolvedValue([]);
await service.runDailyDealCheck();
@@ -153,24 +198,27 @@ describe('Background Job Service', () => {
// Check that in-app notifications were created for both users
expect(mockNotificationRepo.createBulkNotifications).toHaveBeenCalledTimes(1);
const notificationPayload = mockNotificationRepo.createBulkNotifications.mock.calls[0][0];
expect(notificationPayload).toHaveLength(2);
// Use expect.arrayContaining to be order-agnostic.
expect(notificationPayload).toEqual(
expect.arrayContaining([
{
user_id: 'user-1',
content: 'You have 1 new deal(s) on your watched items!',
link_url: '/dashboard/deals',
updated_at: expect.any(String),
},
{
user_id: 'user-2',
content: 'You have 2 new deal(s) on your watched items!',
link_url: '/dashboard/deals',
updated_at: expect.any(String),
},
]),
// Sort by user_id to ensure a consistent order for a direct `toEqual` comparison.
// This provides a clearer diff on failure than `expect.arrayContaining`.
const sortedPayload = [...notificationPayload].sort((a, b) =>
a.user_id.localeCompare(b.user_id),
);
expect(sortedPayload).toEqual([
{
user_id: 'user-1',
content: 'You have 1 new deal(s) on your watched items!',
link_url: '/dashboard/deals',
updated_at: expect.any(String),
},
{
user_id: 'user-2',
content: 'You have 2 new deal(s) on your watched items!',
link_url: '/dashboard/deals',
updated_at: expect.any(String),
},
]);
});
it('should handle and log errors for individual users without stopping the process', async () => {
@@ -252,7 +300,7 @@ describe('Background Job Service', () => {
vi.mocked(mockWeeklyAnalyticsQueue.add).mockClear();
});
it('should schedule three cron jobs with the correct schedules', () => {
it('should schedule four cron jobs with the correct schedules', () => {
startBackgroundJobs(
mockBackgroundJobService,
mockAnalyticsQueue,

View File

@@ -2,13 +2,19 @@
import cron from 'node-cron';
import type { Logger } from 'pino';
import type { Queue } from 'bullmq';
import { Notification, WatchedItemDeal } from '../types';
import { getSimpleWeekAndYear } from '../utils/dateUtils';
import { formatCurrency } from '../utils/formatUtils';
import { getSimpleWeekAndYear, getCurrentDateISOString } from '../utils/dateUtils';
import type { Notification, WatchedItemDeal } from '../types';
// Import types for repositories from their source files
import type { PersonalizationRepository } from './db/personalization.db';
import type { NotificationRepository } from './db/notification.db';
import { analyticsQueue, weeklyAnalyticsQueue } from './queueService.server';
type UserDealGroup = {
userProfile: { user_id: string; email: string; full_name: string | null };
deals: WatchedItemDeal[];
};
interface EmailJobData {
to: string;
subject: string;
@@ -25,7 +31,7 @@ export class BackgroundJobService {
) {}
public async triggerAnalyticsReport(): Promise<string> {
const reportDate = new Date().toISOString().split('T')[0]; // YYYY-MM-DD
const reportDate = getCurrentDateISOString(); // YYYY-MM-DD
const jobId = `manual-report-${reportDate}-${Date.now()}`;
const job = await analyticsQueue.add('generate-daily-report', { reportDate }, { jobId });
return job.id!;
@@ -57,14 +63,16 @@ export class BackgroundJobService {
const dealsListHtml = deals
.map(
(deal) =>
`<li><strong>${deal.item_name}</strong> is on sale for <strong>$${(deal.best_price_in_cents / 100).toFixed(2)}</strong> at ${deal.store_name}!</li>`,
`<li><strong>${deal.item_name}</strong> is on sale for <strong>${formatCurrency(
deal.best_price_in_cents,
)}</strong> at ${deal.store_name}!</li>`,
)
.join('');
const html = `<p>Hi ${recipientName},</p><p>We found some great deals on items you're watching:</p><ul>${dealsListHtml}</ul>`;
const text = `Hi ${recipientName},\n\nWe found some great deals on items you're watching. Visit the deals page on the site to learn more.`;
// Use a predictable Job ID to prevent duplicate email notifications for the same user on the same day.
const today = new Date().toISOString().split('T')[0];
const today = getCurrentDateISOString();
const jobId = `deal-email-${userProfile.user_id}-${today}`;
return {
@@ -82,15 +90,41 @@ export class BackgroundJobService {
private _prepareInAppNotification(
userId: string,
dealCount: number,
): Omit<Notification, 'notification_id' | 'is_read' | 'created_at'> {
): Omit<Notification, 'notification_id' | 'is_read' | 'created_at' | 'updated_at'> {
return {
user_id: userId,
content: `You have ${dealCount} new deal(s) on your watched items!`,
link_url: '/dashboard/deals', // A link to the future "My Deals" page
updated_at: new Date().toISOString(),
};
}
private async _processDealsForUser({
userProfile,
deals,
}: UserDealGroup): Promise<Omit<Notification, 'notification_id' | 'is_read' | 'created_at' | 'updated_at'> | null> {
try {
this.logger.info(
`[BackgroundJob] Found ${deals.length} deals for user ${userProfile.user_id}.`,
);
// Prepare in-app and email notifications.
const notification = this._prepareInAppNotification(userProfile.user_id, deals.length);
const { jobData, jobId } = this._prepareDealEmail(userProfile, deals);
// Enqueue an email notification job.
await this.emailQueue.add('send-deal-notification', jobData, { jobId });
// Return the notification to be collected for bulk insertion.
return notification;
} catch (userError) {
this.logger.error(
{ err: userError },
`[BackgroundJob] Failed to process deals for user ${userProfile.user_id}`,
);
return null; // Return null on error for this user.
}
}
/**
* Checks for new deals on watched items for all users and sends notifications.
* This function is designed to be run periodically (e.g., daily).
@@ -110,70 +144,47 @@ export class BackgroundJobService {
this.logger.info(`[BackgroundJob] Found ${allDeals.length} total deals across all users.`);
// 2. Group deals by user in memory.
const dealsByUser = allDeals.reduce<
Record<
string,
{
userProfile: { user_id: string; email: string; full_name: string | null };
deals: WatchedItemDeal[];
}
>
>((acc, deal) => {
if (!acc[deal.user_id]) {
acc[deal.user_id] = {
const dealsByUser = new Map<string, UserDealGroup>();
for (const deal of allDeals) {
let userGroup = dealsByUser.get(deal.user_id);
if (!userGroup) {
userGroup = {
userProfile: { user_id: deal.user_id, email: deal.email, full_name: deal.full_name },
deals: [],
};
dealsByUser.set(deal.user_id, userGroup);
}
acc[deal.user_id].deals.push(deal);
return acc;
}, {});
const allNotifications: Omit<Notification, 'notification_id' | 'is_read' | 'created_at'>[] =
[];
userGroup.deals.push(deal);
}
// 3. Process each user's deals in parallel.
const userProcessingPromises = Object.values(dealsByUser).map(
async ({ userProfile, deals }) => {
try {
this.logger.info(
`[BackgroundJob] Found ${deals.length} deals for user ${userProfile.user_id}.`,
);
// 4. Prepare in-app and email notifications.
const notification = this._prepareInAppNotification(userProfile.user_id, deals.length);
const { jobData, jobId } = this._prepareDealEmail(userProfile, deals);
// 5. Enqueue an email notification job.
await this.emailQueue.add('send-deal-notification', jobData, { jobId });
// Return the notification to be collected for bulk insertion.
return notification;
} catch (userError) {
this.logger.error(
{ err: userError },
`[BackgroundJob] Failed to process deals for user ${userProfile.user_id}`,
);
return null; // Return null on error for this user.
}
},
const userProcessingPromises = Array.from(dealsByUser.values()).map((userGroup) =>
this._processDealsForUser(userGroup),
);
// Wait for all user processing to complete.
const results = await Promise.allSettled(userProcessingPromises);
// 6. Collect all successfully created notifications.
results.forEach((result) => {
if (result.status === 'fulfilled' && result.value) {
allNotifications.push(result.value);
}
});
const successfulNotifications = results
.filter(
(
result,
): result is PromiseFulfilledResult<
Omit<Notification, 'notification_id' | 'is_read' | 'created_at' | 'updated_at'>
> => result.status === 'fulfilled' && !!result.value,
)
.map((result) => result.value);
// 7. Bulk insert all in-app notifications in a single query.
if (allNotifications.length > 0) {
await this.notificationRepo.createBulkNotifications(allNotifications, this.logger);
if (successfulNotifications.length > 0) {
const notificationsForDb = successfulNotifications.map((n) => ({
...n,
updated_at: new Date().toISOString(),
}));
await this.notificationRepo.createBulkNotifications(notificationsForDb, this.logger);
this.logger.info(
`[BackgroundJob] Successfully created ${allNotifications.length} in-app notifications.`,
`[BackgroundJob] Successfully created ${successfulNotifications.length} in-app notifications.`,
);
}
@@ -244,7 +255,7 @@ export function startBackgroundJobs(
(async () => {
logger.info('[BackgroundJob] Enqueuing daily analytics report generation job.');
try {
const reportDate = new Date().toISOString().split('T')[0]; // YYYY-MM-DD
const reportDate = getCurrentDateISOString(); // YYYY-MM-DD
// We use a unique job ID to prevent duplicate jobs for the same day if the scheduler restarts.
await analyticsQueue.add(
'generate-daily-report',

View File

@@ -6,6 +6,7 @@ import {
UniqueConstraintError,
ForeignKeyConstraintError,
NotFoundError,
ForbiddenError,
ValidationError,
FileUploadError,
NotNullConstraintError,
@@ -89,6 +90,25 @@ describe('Custom Database and Application Errors', () => {
});
});
describe('ForbiddenError', () => {
it('should create an error with a default message and status 403', () => {
const error = new ForbiddenError();
expect(error).toBeInstanceOf(Error);
expect(error).toBeInstanceOf(RepositoryError);
expect(error).toBeInstanceOf(ForbiddenError);
expect(error.message).toBe('Access denied.');
expect(error.status).toBe(403);
expect(error.name).toBe('ForbiddenError');
});
it('should create an error with a custom message', () => {
const message = 'You shall not pass.';
const error = new ForbiddenError(message);
expect(error.message).toBe(message);
});
});
describe('ValidationError', () => {
it('should create an error with a default message, status 400, and validation errors array', () => {
const validationIssues = [{ path: ['email'], message: 'Invalid email' }];

View File

@@ -86,6 +86,16 @@ export class NotFoundError extends RepositoryError {
}
}
/**
* Thrown when the user does not have permission to access the resource.
*/
export class ForbiddenError extends RepositoryError {
constructor(message = 'Access denied.') {
super(message, 403); // 403 Forbidden
this.name = 'ForbiddenError';
}
}
/**
* Defines the structure for a single validation issue, often from a library like Zod.
*/

View File

@@ -132,8 +132,8 @@ describe('Flyer DB Service', () => {
it('should execute an INSERT query and return the new flyer', async () => {
const flyerData: FlyerDbInsert = {
file_name: 'test.jpg',
image_url: 'http://localhost:3001/images/test.jpg',
icon_url: 'http://localhost:3001/images/icons/test.jpg',
image_url: 'https://example.com/images/test.jpg',
icon_url: 'https://example.com/images/icons/test.jpg',
checksum: 'checksum123',
store_id: 1,
valid_from: '2024-01-01',
@@ -155,8 +155,8 @@ describe('Flyer DB Service', () => {
expect.stringContaining('INSERT INTO flyers'),
[
'test.jpg',
'http://localhost:3001/images/test.jpg',
'http://localhost:3001/images/icons/test.jpg',
'https://example.com/images/test.jpg',
'https://example.com/images/icons/test.jpg',
'checksum123',
1,
'2024-01-01',

View File

@@ -596,7 +596,7 @@ describe('Shopping DB Service', () => {
const mockReceipt = {
receipt_id: 1,
user_id: 'user-1',
receipt_image_url: 'http://example.com/receipt.jpg',
receipt_image_url: 'https://example.com/receipt.jpg',
status: 'pending',
};
mockPoolInstance.query.mockResolvedValue({ rows: [mockReceipt] });

View File

@@ -282,6 +282,95 @@ describe('User DB Service', () => {
});
});
describe('_createUser (private)', () => {
it('should execute queries in order and return a full user profile', async () => {
const mockUser = {
user_id: 'private-user-id',
email: 'private@example.com',
};
const mockDbProfile = {
user_id: 'private-user-id',
email: 'private@example.com',
role: 'user',
full_name: 'Private User',
avatar_url: null,
points: 0,
preferences: null,
created_at: new Date().toISOString(),
updated_at: new Date().toISOString(),
user_created_at: new Date().toISOString(),
user_updated_at: new Date().toISOString(),
};
const expectedProfile: UserProfile = {
user: {
user_id: mockDbProfile.user_id,
email: mockDbProfile.email,
created_at: mockDbProfile.user_created_at,
updated_at: mockDbProfile.user_updated_at,
},
full_name: 'Private User',
avatar_url: null,
role: 'user',
points: 0,
preferences: null,
created_at: mockDbProfile.created_at,
updated_at: mockDbProfile.updated_at,
};
// Mock the sequence of queries on the client
(mockPoolInstance.query as Mock)
.mockResolvedValueOnce({ rows: [] }) // set_config
.mockResolvedValueOnce({ rows: [mockUser] }) // INSERT user
.mockResolvedValueOnce({ rows: [mockDbProfile] }); // SELECT profile
// Access private method for testing
const result = await (userRepo as any)._createUser(
mockPoolInstance, // Pass the mock client
'private@example.com',
'hashedpass',
{ full_name: 'Private User' },
mockLogger,
);
expect(result).toEqual(expectedProfile);
expect(mockPoolInstance.query).toHaveBeenCalledTimes(3);
expect(mockPoolInstance.query).toHaveBeenNthCalledWith(
1,
"SELECT set_config('my_app.user_metadata', $1, true)",
[JSON.stringify({ full_name: 'Private User' })],
);
expect(mockPoolInstance.query).toHaveBeenNthCalledWith(
2,
'INSERT INTO public.users (email, password_hash) VALUES ($1, $2) RETURNING user_id, email',
['private@example.com', 'hashedpass'],
);
expect(mockPoolInstance.query).toHaveBeenNthCalledWith(
3,
expect.stringContaining('FROM public.users u'),
['private-user-id'],
);
});
it('should throw an error if profile is not found after user creation', async () => {
const mockUser = { user_id: 'no-profile-user', email: 'no-profile@example.com' };
(mockPoolInstance.query as Mock)
.mockResolvedValueOnce({ rows: [] }) // set_config
.mockResolvedValueOnce({ rows: [mockUser] }) // INSERT user
.mockResolvedValueOnce({ rows: [] }); // SELECT profile returns nothing
await expect(
(userRepo as any)._createUser(
mockPoolInstance,
'no-profile@example.com',
'pass',
{},
mockLogger,
),
).rejects.toThrow('Failed to create or retrieve user profile after registration.');
});
});
describe('findUserWithProfileByEmail', () => {
it('should query for a user and their profile by email', async () => {
const mockDbResult: any = {

View File

@@ -61,6 +61,64 @@ export class UserRepository {
}
}
/**
* The internal logic for creating a user. This method assumes it is being run
* within a database transaction and operates on a single PoolClient.
*/
private async _createUser(
dbClient: PoolClient,
email: string,
passwordHash: string | null,
profileData: { full_name?: string; avatar_url?: string },
logger: Logger,
): Promise<UserProfile> {
logger.debug(`[DB _createUser] Starting user creation for email: ${email}`);
await dbClient.query("SELECT set_config('my_app.user_metadata', $1, true)", [
JSON.stringify(profileData ?? {}),
]);
logger.debug(`[DB _createUser] Session metadata set for ${email}.`);
const userInsertRes = await dbClient.query<{ user_id: string; email: string }>(
'INSERT INTO public.users (email, password_hash) VALUES ($1, $2) RETURNING user_id, email',
[email, passwordHash],
);
const newUserId = userInsertRes.rows[0].user_id;
logger.debug(`[DB _createUser] Inserted into users table. New user ID: ${newUserId}`);
const profileQuery = `
SELECT u.user_id, u.email, u.created_at as user_created_at, u.updated_at as user_updated_at, p.full_name, p.avatar_url, p.role, p.points, p.preferences, p.created_at, p.updated_at
FROM public.users u
JOIN public.profiles p ON u.user_id = p.user_id
WHERE u.user_id = $1;
`;
const finalProfileRes = await dbClient.query(profileQuery, [newUserId]);
const flatProfile = finalProfileRes.rows[0];
if (!flatProfile) {
throw new Error('Failed to create or retrieve user profile after registration.');
}
const fullUserProfile: UserProfile = {
user: {
user_id: flatProfile.user_id,
email: flatProfile.email,
created_at: flatProfile.user_created_at,
updated_at: flatProfile.user_updated_at,
},
full_name: flatProfile.full_name,
avatar_url: flatProfile.avatar_url,
role: flatProfile.role,
points: flatProfile.points,
preferences: flatProfile.preferences,
created_at: flatProfile.created_at,
updated_at: flatProfile.updated_at,
};
logger.debug({ user: fullUserProfile }, `[DB _createUser] Fetched full profile for new user:`);
return fullUserProfile;
}
/**
* Creates a new user in the public.users table.
* This method expects to be run within a transaction, so it requires a PoolClient.
@@ -74,60 +132,18 @@ export class UserRepository {
passwordHash: string | null,
profileData: { full_name?: string; avatar_url?: string },
logger: Logger,
// Allow passing a transactional client
client: Pool | PoolClient = this.db,
): Promise<UserProfile> {
// This method is now a wrapper that ensures the core logic runs within a transaction.
try {
logger.debug(`[DB createUser] Starting user creation for email: ${email}`);
// Use 'set_config' to safely pass parameters to a configuration variable.
await client.query("SELECT set_config('my_app.user_metadata', $1, true)", [
JSON.stringify(profileData),
]);
logger.debug(`[DB createUser] Session metadata set for ${email}.`);
// Insert the new user into the 'users' table. This will fire the trigger.
const userInsertRes = await client.query<{ user_id: string }>(
'INSERT INTO public.users (email, password_hash) VALUES ($1, $2) RETURNING user_id, email',
[email, passwordHash],
);
const newUserId = userInsertRes.rows[0].user_id;
logger.debug(`[DB createUser] Inserted into users table. New user ID: ${newUserId}`);
// After the trigger has run, fetch the complete profile data.
const profileQuery = `
SELECT u.user_id, u.email, u.created_at as user_created_at, u.updated_at as user_updated_at, p.full_name, p.avatar_url, p.role, p.points, p.preferences, p.created_at, p.updated_at
FROM public.users u
JOIN public.profiles p ON u.user_id = p.user_id
WHERE u.user_id = $1;
`;
const finalProfileRes = await client.query(profileQuery, [newUserId]);
const flatProfile = finalProfileRes.rows[0];
if (!flatProfile) {
throw new Error('Failed to create or retrieve user profile after registration.');
// If this.db has a 'connect' method, it's a Pool. We must start a transaction.
if ('connect' in this.db) {
return await withTransaction(async (client) => {
return this._createUser(client, email, passwordHash, profileData, logger);
});
} else {
// If this.db is already a PoolClient, we're inside a transaction. Use it directly.
return await this._createUser(this.db as PoolClient, email, passwordHash, profileData, logger);
}
// Construct the nested UserProfile object to match the type definition.
const fullUserProfile: UserProfile = {
// user_id is now correctly part of the nested user object, not at the top level.
user: {
user_id: flatProfile.user_id,
email: flatProfile.email,
created_at: flatProfile.user_created_at,
updated_at: flatProfile.user_updated_at,
},
full_name: flatProfile.full_name,
avatar_url: flatProfile.avatar_url,
role: flatProfile.role,
points: flatProfile.points,
preferences: flatProfile.preferences,
created_at: flatProfile.created_at,
updated_at: flatProfile.updated_at,
};
logger.debug({ user: fullUserProfile }, `[DB createUser] Fetched full profile for new user:`);
return fullUserProfile;
} catch (error) {
handleDbError(error, logger, 'Error during createUser', { email }, {
uniqueMessage: 'A user with this email address already exists.',
@@ -136,6 +152,7 @@ export class UserRepository {
}
}
/**
* Finds a user by their email and joins their profile data.
* This is used by the LocalStrategy to get all necessary data for authentication and session creation in one query.

View File

@@ -21,7 +21,7 @@ const createMockJobData = (data: Partial<FlyerJobData>): FlyerJobData => ({
filePath: '/tmp/flyer.jpg',
originalFileName: 'flyer.jpg',
checksum: 'checksum-123',
baseUrl: 'http://localhost:3000',
baseUrl: 'https://example.com',
...data,
});

View File

@@ -5,6 +5,7 @@ import { logger as mockLogger } from './logger.server';
import { generateFlyerIcon } from '../utils/imageProcessor';
import type { AiProcessorResult } from './flyerAiProcessor.server';
import type { FlyerItemInsert } from '../types';
import { getBaseUrl } from '../utils/serverUtils';
// Mock the dependencies
vi.mock('../utils/imageProcessor', () => ({
@@ -15,6 +16,10 @@ vi.mock('./logger.server', () => ({
logger: { info: vi.fn(), error: vi.fn(), warn: vi.fn(), debug: vi.fn() },
}));
vi.mock('../utils/serverUtils', () => ({
getBaseUrl: vi.fn(),
}));
describe('FlyerDataTransformer', () => {
let transformer: FlyerDataTransformer;
@@ -23,12 +28,13 @@ describe('FlyerDataTransformer', () => {
transformer = new FlyerDataTransformer();
// Stub environment variables to ensure consistency and predictability.
// Prioritize FRONTEND_URL to match the updated service logic.
vi.stubEnv('FRONTEND_URL', 'http://localhost:3000');
vi.stubEnv('FRONTEND_URL', 'https://example.com');
vi.stubEnv('BASE_URL', ''); // Ensure this is not used to confirm priority logic
vi.stubEnv('PORT', ''); // Ensure this is not used
// Provide a default mock implementation for generateFlyerIcon
vi.mocked(generateFlyerIcon).mockResolvedValue('icon-flyer-page-1.webp');
vi.mocked(getBaseUrl).mockReturnValue('https://example.com');
});
it('should transform AI data into database-ready format with a user ID', async () => {
@@ -60,17 +66,17 @@ describe('FlyerDataTransformer', () => {
},
needsReview: false,
};
const imagePaths = [{ path: '/uploads/flyer-page-1.jpg', mimetype: 'image/jpeg' }];
const originalFileName = 'my-flyer.pdf';
const checksum = 'checksum-abc-123';
const userId = 'user-xyz-456';
const baseUrl = 'http://test.host';
const baseUrl = 'https://example.com';
// Act
const { flyerData, itemsForDb } = await transformer.transform(
aiResult,
imagePaths,
originalFileName,
'flyer-page-1.jpg',
'icon-flyer-page-1.webp',
checksum,
userId,
mockLogger,
@@ -121,12 +127,6 @@ describe('FlyerDataTransformer', () => {
}),
);
// 3. Check that generateFlyerIcon was called correctly
expect(generateFlyerIcon).toHaveBeenCalledWith(
'/uploads/flyer-page-1.jpg',
'/uploads/icons',
mockLogger,
);
});
it('should handle missing optional data gracefully', async () => {
@@ -141,7 +141,6 @@ describe('FlyerDataTransformer', () => {
},
needsReview: true,
};
const imagePaths = [{ path: '/uploads/another.png', mimetype: 'image/png' }];
const originalFileName = 'another.png';
const checksum = 'checksum-def-456';
// No userId provided
@@ -151,8 +150,9 @@ describe('FlyerDataTransformer', () => {
// Act
const { flyerData, itemsForDb } = await transformer.transform(
aiResult,
imagePaths,
originalFileName,
'another.png',
'icon-another.webp',
checksum,
undefined,
mockLogger,
@@ -219,13 +219,13 @@ describe('FlyerDataTransformer', () => {
},
needsReview: false,
};
const imagePaths = [{ path: '/uploads/flyer-page-1.jpg', mimetype: 'image/jpeg' }];
// Act
const { itemsForDb } = await transformer.transform(
aiResult,
imagePaths,
'file.pdf',
'flyer-page-1.jpg',
'icon-flyer-page-1.webp',
'checksum',
'user-1',
mockLogger,
@@ -250,7 +250,7 @@ describe('FlyerDataTransformer', () => {
);
});
it('should use fallback baseUrl if none is provided and log a warning', async () => {
it('should use fallback baseUrl from getBaseUrl if none is provided', async () => {
// Arrange
const aiResult: AiProcessorResult = {
data: {
@@ -262,18 +262,17 @@ describe('FlyerDataTransformer', () => {
},
needsReview: false,
};
const imagePaths = [{ path: '/uploads/flyer-page-1.jpg', mimetype: 'image/jpeg' }];
const baseUrl = undefined; // Explicitly pass undefined for this test
const baseUrl = ''; // Explicitly pass '' for this test
// The fallback logic uses process.env.PORT || 3000.
// The beforeEach sets PORT to '', so it should fallback to 3000.
const expectedFallbackUrl = 'http://localhost:3000';
const expectedFallbackUrl = 'http://fallback-url.com';
vi.mocked(getBaseUrl).mockReturnValue(expectedFallbackUrl);
// Act
const { flyerData } = await transformer.transform(
aiResult,
imagePaths,
'my-flyer.pdf',
'flyer-page-1.jpg',
'icon-flyer-page-1.webp',
'checksum-abc-123',
'user-xyz-456',
mockLogger,
@@ -281,10 +280,8 @@ describe('FlyerDataTransformer', () => {
);
// Assert
// 1. Check that a warning was logged
expect(mockLogger.warn).toHaveBeenCalledWith(
`Base URL not provided in job data. Falling back to default local URL: ${expectedFallbackUrl}`,
);
// 1. Check that getBaseUrl was called
expect(getBaseUrl).toHaveBeenCalledWith(mockLogger);
// 2. Check that the URLs were constructed with the fallback
expect(flyerData.image_url).toBe(`${expectedFallbackUrl}/flyer-images/flyer-page-1.jpg`);
@@ -315,13 +312,13 @@ describe('FlyerDataTransformer', () => {
},
needsReview: false,
};
const imagePaths = [{ path: '/uploads/flyer-page-1.jpg', mimetype: 'image/jpeg' }];
// Act
const { itemsForDb } = await transformer.transform(
aiResult,
imagePaths,
'file.pdf',
'flyer-page-1.jpg',
'icon-flyer-page-1.webp',
'checksum',
'user-1',
mockLogger,
@@ -353,13 +350,13 @@ describe('FlyerDataTransformer', () => {
},
needsReview: false,
};
const imagePaths = [{ path: '/uploads/flyer-page-1.jpg', mimetype: 'image/jpeg' }];
// Act
const { itemsForDb } = await transformer.transform(
aiResult,
imagePaths,
'file.pdf',
'flyer-page-1.jpg',
'icon-flyer-page-1.webp',
'checksum',
'user-1',
mockLogger,
@@ -391,13 +388,13 @@ describe('FlyerDataTransformer', () => {
},
needsReview: false,
};
const imagePaths = [{ path: '/uploads/flyer-page-1.jpg', mimetype: 'image/jpeg' }];
// Act
const { itemsForDb } = await transformer.transform(
aiResult,
imagePaths,
'file.pdf',
'flyer-page-1.jpg',
'icon-flyer-page-1.webp',
'checksum',
'user-1',
mockLogger,
@@ -432,13 +429,13 @@ describe('FlyerDataTransformer', () => {
},
needsReview: false,
};
const imagePaths = [{ path: '/uploads/flyer-page-1.jpg', mimetype: 'image/jpeg' }];
// Act
const { itemsForDb } = await transformer.transform(
aiResult,
imagePaths,
'file.pdf',
'flyer-page-1.jpg',
'icon-flyer-page-1.webp',
'checksum',
'user-1',
mockLogger,
@@ -469,13 +466,13 @@ describe('FlyerDataTransformer', () => {
},
needsReview: false, // Key part of this test
};
const imagePaths = [{ path: '/uploads/flyer-page-1.jpg', mimetype: 'image/jpeg' }];
// Act
const { flyerData } = await transformer.transform(
aiResult,
imagePaths,
'file.pdf',
'flyer-page-1.jpg',
'icon-flyer-page-1.webp',
'checksum',
'user-1',
mockLogger,
@@ -498,13 +495,13 @@ describe('FlyerDataTransformer', () => {
},
needsReview: true, // Key part of this test
};
const imagePaths = [{ path: '/uploads/flyer-page-1.jpg', mimetype: 'image/jpeg' }];
// Act
const { flyerData } = await transformer.transform(
aiResult,
imagePaths,
'file.pdf',
'flyer-page-1.jpg',
'icon-flyer-page-1.webp',
'checksum',
'user-1',
mockLogger,

View File

@@ -5,9 +5,9 @@ import type { Logger } from 'pino';
import type { FlyerInsert, FlyerItemInsert } from '../types';
import type { AiProcessorResult } from './flyerAiProcessor.server'; // Keep this import for AiProcessorResult
import { AiFlyerDataSchema } from '../types/ai'; // Import consolidated schema
import { generateFlyerIcon } from '../utils/imageProcessor';
import { TransformationError } from './processingErrors';
import { parsePriceToCents } from '../utils/priceParser';
import { getBaseUrl } from '../utils/serverUtils';
/**
* This class is responsible for transforming the validated data from the AI service
@@ -48,44 +48,25 @@ export class FlyerDataTransformer {
};
}
/**
* Generates a 64x64 icon for the flyer's first page.
* @param firstImage The path to the first image of the flyer.
* @param logger The logger instance.
* @returns The filename of the generated icon.
*/
private async _generateIcon(firstImage: string, logger: Logger): Promise<string> {
const iconFileName = await generateFlyerIcon(
firstImage,
path.join(path.dirname(firstImage), 'icons'),
logger,
);
return iconFileName;
}
/**
* Constructs the full public URLs for the flyer image and its icon.
* @param firstImage The path to the first image of the flyer.
* @param imageFileName The filename of the main processed image.
* @param iconFileName The filename of the generated icon.
* @param baseUrl The base URL from the job payload.
* @param logger The logger instance.
* @returns An object containing the full image_url and icon_url.
*/
private _buildUrls(
firstImage: string,
imageFileName: string,
iconFileName: string,
baseUrl: string | undefined,
baseUrl: string,
logger: Logger,
): { imageUrl: string; iconUrl: string } {
let finalBaseUrl = baseUrl;
if (!finalBaseUrl) {
const port = process.env.PORT || 3000;
finalBaseUrl = `http://localhost:${port}`;
logger.warn(`Base URL not provided in job data. Falling back to default local URL: ${finalBaseUrl}`);
}
finalBaseUrl = finalBaseUrl.endsWith('/') ? finalBaseUrl.slice(0, -1) : finalBaseUrl;
const imageUrl = `${finalBaseUrl}/flyer-images/${path.basename(firstImage)}`;
logger.debug({ imageFileName, iconFileName, baseUrl }, 'Building URLs');
const finalBaseUrl = baseUrl || getBaseUrl(logger);
const imageUrl = `${finalBaseUrl}/flyer-images/${imageFileName}`;
const iconUrl = `${finalBaseUrl}/flyer-images/icons/${iconFileName}`;
logger.debug({ imageUrl, iconUrl }, 'Constructed URLs');
return { imageUrl, iconUrl };
}
@@ -101,21 +82,20 @@ export class FlyerDataTransformer {
*/
async transform(
aiResult: AiProcessorResult,
imagePaths: { path: string; mimetype: string }[],
originalFileName: string,
imageFileName: string,
iconFileName: string,
checksum: string,
userId: string | undefined,
logger: Logger,
baseUrl?: string,
baseUrl: string,
): Promise<{ flyerData: FlyerInsert; itemsForDb: FlyerItemInsert[] }> {
logger.info('Starting data transformation from AI output to database format.');
try {
const { data: extractedData, needsReview } = aiResult;
const firstImage = imagePaths[0].path;
const iconFileName = await this._generateIcon(firstImage, logger);
const { imageUrl, iconUrl } = this._buildUrls(firstImage, iconFileName, baseUrl, logger);
const { imageUrl, iconUrl } = this._buildUrls(imageFileName, iconFileName, baseUrl, logger);
const itemsForDb: FlyerItemInsert[] = extractedData.items.map((item) => this._normalizeItem(item));

View File

@@ -36,13 +36,20 @@ import {
PdfConversionError,
UnsupportedFileTypeError,
TransformationError,
DatabaseError,
} from './processingErrors';
import { NotFoundError } from './db/errors.db';
import { FlyerFileHandler } from './flyerFileHandler.server';
import { FlyerAiProcessor } from './flyerAiProcessor.server';
import type { IFileSystem, ICommandExecutor } from './flyerFileHandler.server';
import { generateFlyerIcon } from '../utils/imageProcessor';
import type { AIService } from './aiService.server';
// Mock image processor functions
vi.mock('../utils/imageProcessor', () => ({
generateFlyerIcon: vi.fn(),
}));
// Mock dependencies
vi.mock('./aiService.server', () => ({
aiService: {
@@ -97,8 +104,8 @@ describe('FlyerProcessingService', () => {
vi.spyOn(FlyerDataTransformer.prototype, 'transform').mockResolvedValue({
flyerData: {
file_name: 'test.jpg',
image_url: 'http://example.com/test.jpg',
icon_url: 'http://example.com/icon.webp',
image_url: 'https://example.com/test.jpg',
icon_url: 'https://example.com/icon.webp',
store_name: 'Mock Store',
// Add required fields for FlyerInsert type
status: 'processed',
@@ -162,7 +169,7 @@ describe('FlyerProcessingService', () => {
flyer: createMockFlyer({
flyer_id: 1,
file_name: 'test.jpg',
image_url: 'http://example.com/test.jpg',
image_url: 'https://example.com/test.jpg',
item_count: 1,
}),
items: [],
@@ -171,6 +178,9 @@ describe('FlyerProcessingService', () => {
// FIX: Provide a default mock for getAllMasterItems to prevent a TypeError on `.length`.
vi.mocked(mockedDb.personalizationRepo.getAllMasterItems).mockResolvedValue([]);
});
beforeEach(() => {
vi.mocked(generateFlyerIcon).mockResolvedValue('icon-flyer.webp');
});
const createMockJob = (data: Partial<FlyerJobData>): Job<FlyerJobData> => {
return {
@@ -179,7 +189,7 @@ describe('FlyerProcessingService', () => {
filePath: '/tmp/flyer.jpg',
originalFileName: 'flyer.jpg',
checksum: 'checksum-123',
baseUrl: 'http://localhost:3000',
baseUrl: 'https://example.com',
...data,
},
updateProgress: vi.fn(),
@@ -202,19 +212,54 @@ describe('FlyerProcessingService', () => {
it('should process an image file successfully and enqueue a cleanup job', async () => {
const job = createMockJob({ filePath: '/tmp/flyer.jpg', originalFileName: 'flyer.jpg' });
// Arrange: Mock dependencies to simulate a successful run
mockFileHandler.prepareImageInputs.mockResolvedValue({
imagePaths: [{ path: '/tmp/flyer-processed.jpeg', mimetype: 'image/jpeg' }],
createdImagePaths: ['/tmp/flyer-processed.jpeg'],
});
vi.mocked(generateFlyerIcon).mockResolvedValue('icon-flyer.webp');
const result = await service.processJob(job);
expect(result).toEqual({ flyerId: 1 });
// 1. File handler was called
expect(mockFileHandler.prepareImageInputs).toHaveBeenCalledWith(job.data.filePath, job, expect.any(Object));
// 2. AI processor was called
expect(mockAiProcessor.extractAndValidateData).toHaveBeenCalledTimes(1);
// Verify that the transaction function was called.
// 3. Icon was generated from the processed image
expect(generateFlyerIcon).toHaveBeenCalledWith('/tmp/flyer-processed.jpeg', '/tmp/icons', expect.any(Object));
// 4. Transformer was called with the correct filenames
expect(FlyerDataTransformer.prototype.transform).toHaveBeenCalledWith(
expect.any(Object), // aiResult
'flyer.jpg', // originalFileName
'flyer-processed.jpeg', // imageFileName
'icon-flyer.webp', // iconFileName
'checksum-123', // checksum
undefined, // userId
expect.any(Object), // logger
'https://example.com', // baseUrl
);
// 5. DB transaction was initiated
expect(mockedDb.withTransaction).toHaveBeenCalledTimes(1);
// Verify that the functions inside the transaction were called.
expect(createFlyerAndItems).toHaveBeenCalledTimes(1);
expect(mocks.mockAdminLogActivity).toHaveBeenCalledTimes(1);
// 6. Cleanup job was enqueued with all generated files
expect(mockCleanupQueue.add).toHaveBeenCalledWith(
'cleanup-flyer-files',
{ flyerId: 1, paths: ['/tmp/flyer.jpg'] },
{
flyerId: 1,
paths: [
'/tmp/flyer.jpg', // original job path
'/tmp/flyer-processed.jpeg', // from prepareImageInputs
'/tmp/icons/icon-flyer.webp', // from generateFlyerIcon
],
},
expect.any(Object),
);
});
@@ -225,9 +270,13 @@ describe('FlyerProcessingService', () => {
// Mock the file handler to return multiple created paths
const createdPaths = ['/tmp/flyer-1.jpg', '/tmp/flyer-2.jpg'];
mockFileHandler.prepareImageInputs.mockResolvedValue({
imagePaths: createdPaths.map(p => ({ path: p, mimetype: 'image/jpeg' })),
imagePaths: [
{ path: '/tmp/flyer-1.jpg', mimetype: 'image/jpeg' },
{ path: '/tmp/flyer-2.jpg', mimetype: 'image/jpeg' },
],
createdImagePaths: createdPaths,
});
vi.mocked(generateFlyerIcon).mockResolvedValue('icon-flyer-1.webp');
await service.processJob(job);
@@ -236,15 +285,18 @@ describe('FlyerProcessingService', () => {
expect(mockFileHandler.prepareImageInputs).toHaveBeenCalledWith('/tmp/flyer.pdf', job, expect.any(Object));
expect(mockAiProcessor.extractAndValidateData).toHaveBeenCalledTimes(1);
expect(createFlyerAndItems).toHaveBeenCalledTimes(1);
// Verify cleanup job includes original PDF and both generated images
// Verify icon generation was called for the first page
expect(generateFlyerIcon).toHaveBeenCalledWith('/tmp/flyer-1.jpg', '/tmp/icons', expect.any(Object));
// Verify cleanup job includes original PDF and all generated/processed images
expect(mockCleanupQueue.add).toHaveBeenCalledWith(
'cleanup-flyer-files',
{
flyerId: 1,
paths: [
'/tmp/flyer.pdf',
'/tmp/flyer-1.jpg',
'/tmp/flyer-2.jpg',
'/tmp/flyer.pdf', // original job path
'/tmp/flyer-1.jpg', // from prepareImageInputs
'/tmp/flyer-2.jpg', // from prepareImageInputs
'/tmp/icons/icon-flyer-1.webp', // from generateFlyerIcon
],
},
expect.any(Object),
@@ -377,6 +429,7 @@ describe('FlyerProcessingService', () => {
imagePaths: [{ path: convertedPath, mimetype: 'image/png' }],
createdImagePaths: [convertedPath],
});
vi.mocked(generateFlyerIcon).mockResolvedValue('icon-flyer-converted.webp');
await service.processJob(job);
@@ -384,9 +437,18 @@ describe('FlyerProcessingService', () => {
expect(mockedDb.withTransaction).toHaveBeenCalledTimes(1);
expect(mockFileHandler.prepareImageInputs).toHaveBeenCalledWith('/tmp/flyer.gif', job, expect.any(Object));
expect(mockAiProcessor.extractAndValidateData).toHaveBeenCalledTimes(1);
// Verify icon generation was called for the converted image
expect(generateFlyerIcon).toHaveBeenCalledWith(convertedPath, '/tmp/icons', expect.any(Object));
expect(mockCleanupQueue.add).toHaveBeenCalledWith(
'cleanup-flyer-files',
{ flyerId: 1, paths: ['/tmp/flyer.gif', convertedPath] },
{
flyerId: 1,
paths: [
'/tmp/flyer.gif', // original job path
convertedPath, // from prepareImageInputs
'/tmp/icons/icon-flyer-converted.webp', // from generateFlyerIcon
],
},
expect.any(Object),
);
});
@@ -400,8 +462,8 @@ describe('FlyerProcessingService', () => {
// This is more realistic than mocking the inner function `createFlyerAndItems`.
vi.mocked(mockedDb.withTransaction).mockRejectedValue(dbError);
// The service wraps the generic DB error in a DatabaseError, but _reportErrorAndThrow re-throws the original.
await expect(service.processJob(job)).rejects.toThrow(dbError);
// The service wraps the generic DB error in a DatabaseError.
await expect(service.processJob(job)).rejects.toThrow(DatabaseError);
// The final progress update should reflect the structured DatabaseError.
expect(job.updateProgress).toHaveBeenLastCalledWith({
@@ -443,17 +505,14 @@ describe('FlyerProcessingService', () => {
it('should delegate to _reportErrorAndThrow if icon generation fails', async () => {
const job = createMockJob({});
const { logger } = await import('./logger.server');
const transformationError = new TransformationError('Icon generation failed.');
// The `transform` method calls `generateFlyerIcon`. In `beforeEach`, `transform` is mocked
// to always succeed. For this test, we override that mock to simulate a failure
// bubbling up from the icon generation step.
vi.spyOn(FlyerDataTransformer.prototype, 'transform').mockRejectedValue(transformationError);
const iconGenError = new Error('Icon generation failed.');
vi.mocked(generateFlyerIcon).mockRejectedValue(iconGenError);
const reportErrorSpy = vi.spyOn(service as any, '_reportErrorAndThrow');
await expect(service.processJob(job)).rejects.toThrow('Icon generation failed.');
expect(reportErrorSpy).toHaveBeenCalledWith(transformationError, job, expect.any(Object), expect.any(Array));
expect(reportErrorSpy).toHaveBeenCalledWith(iconGenError, job, expect.any(Object), expect.any(Array));
expect(mockCleanupQueue.add).not.toHaveBeenCalled();
expect(logger.warn).toHaveBeenCalledWith(
'Job failed. Temporary files will NOT be cleaned up to allow for manual inspection.',
@@ -632,5 +691,30 @@ describe('FlyerProcessingService', () => {
'Job received no paths and could not derive any from the database. Skipping.',
);
});
it('should derive paths from DB and delete files if job paths are empty', async () => {
const job = createMockCleanupJob({ flyerId: 1, paths: [] }); // Empty paths
const mockFlyer = createMockFlyer({
image_url: 'https://example.com/flyer-images/flyer-abc.jpg',
icon_url: 'https://example.com/flyer-images/icons/icon-flyer-abc.webp',
});
// Mock DB call to return a flyer
vi.mocked(mockedDb.flyerRepo.getFlyerById).mockResolvedValue(mockFlyer);
mocks.unlink.mockResolvedValue(undefined);
// Mock process.env.STORAGE_PATH
vi.stubEnv('STORAGE_PATH', '/var/www/app/flyer-images');
const result = await service.processCleanupJob(job);
expect(result).toEqual({ status: 'success', deletedCount: 2 });
expect(mocks.unlink).toHaveBeenCalledTimes(2);
expect(mocks.unlink).toHaveBeenCalledWith('/var/www/app/flyer-images/flyer-abc.jpg');
expect(mocks.unlink).toHaveBeenCalledWith('/var/www/app/flyer-images/icons/icon-flyer-abc.webp');
const { logger } = await import('./logger.server');
expect(logger.warn).toHaveBeenCalledWith(
'Cleanup job for flyer 1 received no paths. Attempting to derive paths from DB.',
);
});
});
});

View File

@@ -1,6 +1,5 @@
// src/services/flyerProcessingService.server.ts
import type { Job, Queue } from 'bullmq';
import { UnrecoverableError } from 'bullmq';
import { UnrecoverableError, type Job, type Queue } from 'bullmq';
import path from 'path';
import type { Logger } from 'pino';
import type { FlyerFileHandler, IFileSystem, ICommandExecutor } from './flyerFileHandler.server';
@@ -18,7 +17,8 @@ import {
} from './processingErrors';
import { NotFoundError } from './db/errors.db';
import { createFlyerAndItems } from './db/flyer.db';
import { logger as globalLogger } from './logger.server';
import { logger as globalLogger } from './logger.server'; // This was a duplicate, fixed.
import { generateFlyerIcon } from '../utils/imageProcessor';
// Define ProcessingStage locally as it's not exported from the types file.
export type ProcessingStage = {
@@ -92,10 +92,22 @@ export class FlyerProcessingService {
stages[2].status = 'in-progress';
await job.updateProgress({ stages });
// The fileHandler has already prepared the primary image (e.g., by stripping EXIF data).
// We now generate an icon from it and prepare the filenames for the transformer.
const primaryImagePath = imagePaths[0].path;
const imageFileName = path.basename(primaryImagePath);
const iconsDir = path.join(path.dirname(primaryImagePath), 'icons');
const iconFileName = await generateFlyerIcon(primaryImagePath, iconsDir, logger);
// Add the newly generated icon to the list of files to be cleaned up.
// The main processed image path is already in `allFilePaths` via `createdImagePaths`.
allFilePaths.push(path.join(iconsDir, iconFileName));
const { flyerData, itemsForDb } = await this.transformer.transform(
aiResult,
imagePaths,
job.data.originalFileName,
imageFileName,
iconFileName,
job.data.checksum,
job.data.userId,
logger,

View File

@@ -7,6 +7,7 @@ import { ValidationError, NotFoundError } from './db/errors.db';
import { DatabaseError } from './processingErrors';
import type { Job } from 'bullmq';
import type { TokenCleanupJobData } from '../types/job-data';
import { getTestBaseUrl } from '../tests/utils/testHelpers';
// Un-mock the service under test to ensure we are testing the real implementation,
// not the global mock from `tests/setup/tests-setup-unit.ts`.
@@ -240,12 +241,12 @@ describe('UserService', () => {
describe('updateUserAvatar', () => {
it('should construct avatar URL and update profile', async () => {
const { logger } = await import('./logger.server');
const testBaseUrl = 'http://localhost:3001';
const testBaseUrl = getTestBaseUrl();
vi.stubEnv('FRONTEND_URL', testBaseUrl);
const userId = 'user-123';
const file = { filename: 'avatar.jpg' } as Express.Multer.File;
const expectedUrl = `${testBaseUrl}/uploads/avatars/avatar.jpg`;
const expectedUrl = `${testBaseUrl}/uploads/avatars/${file.filename}`;
mocks.mockUpdateUserProfile.mockResolvedValue({} as any);

View File

@@ -0,0 +1,51 @@
// src/tests/e2e/admin-authorization.e2e.test.ts
import { describe, it, expect, beforeAll, afterAll } from 'vitest';
import * as apiClient from '../../services/apiClient';
import { cleanupDb } from '../utils/cleanup';
import { createAndLoginUser } from '../utils/testHelpers';
import type { UserProfile } from '../../types';
/**
* @vitest-environment node
*/
describe('Admin Route Authorization', () => {
let regularUser: UserProfile;
let regularUserAuthToken: string;
beforeAll(async () => {
// Create a standard user for testing authorization
const { user, token } = await createAndLoginUser({
email: `e2e-authz-user-${Date.now()}@example.com`,
fullName: 'E2E AuthZ User',
});
regularUser = user;
regularUserAuthToken = token;
});
afterAll(async () => {
// Cleanup the created user
if (regularUser?.user.user_id) {
await cleanupDb({ userIds: [regularUser.user.user_id] });
}
});
// Define a list of admin-only endpoints to test
const adminEndpoints = [
{ method: 'GET', path: '/admin/stats', action: (token: string) => apiClient.getApplicationStats(token) },
{ method: 'GET', path: '/admin/users', action: (token: string) => apiClient.authedGet('/admin/users', { tokenOverride: token }) },
{ method: 'GET', path: '/admin/corrections', action: (token: string) => apiClient.getSuggestedCorrections(token) },
{ method: 'POST', path: '/admin/corrections/1/approve', action: (token: string) => apiClient.approveCorrection(1, token) },
{ method: 'POST', path: '/admin/trigger/daily-deal-check', action: (token: string) => apiClient.authedPostEmpty('/admin/trigger/daily-deal-check', { tokenOverride: token }) },
{ method: 'GET', path: '/admin/queues/status', action: (token: string) => apiClient.authedGet('/admin/queues/status', { tokenOverride: token }) },
];
it.each(adminEndpoints)('should return 403 Forbidden for a regular user trying to access $method $path', async ({ action }) => {
// Act: Attempt to access the admin endpoint with the regular user's token
const response = await action(regularUserAuthToken);
// Assert: The request should be forbidden
expect(response.status).toBe(403);
const errorData = await response.json();
expect(errorData.message).toBe('Forbidden: Administrator access required.');
});
});

View File

@@ -1,15 +1,14 @@
// src/tests/e2e/admin-dashboard.e2e.test.ts
import { describe, it, expect, afterAll } from 'vitest';
import supertest from 'supertest';
import app from '../../../server';
import * as apiClient from '../../services/apiClient';
import { getPool } from '../../services/db/connection.db';
import { cleanupDb } from '../utils/cleanup';
import { poll } from '../utils/poll';
/**
* @vitest-environment node
*/
const request = supertest(app);
describe('E2E Admin Dashboard Flow', () => {
// Use a unique email for every run to avoid collisions
const uniqueId = Date.now();
@@ -21,25 +20,18 @@ describe('E2E Admin Dashboard Flow', () => {
afterAll(async () => {
// Safety cleanup: Ensure the user is deleted from the DB if the test fails mid-way.
if (adminUserId) {
try {
await getPool().query('DELETE FROM public.users WHERE user_id = $1', [adminUserId]);
} catch (err) {
console.error('Error cleaning up E2E admin user:', err);
}
}
await cleanupDb({
userIds: [adminUserId],
});
});
it('should allow an admin to log in and access dashboard features', async () => {
// 1. Register a new user (initially a regular user)
const registerResponse = await request.post('/api/auth/register').send({
email: adminEmail,
password: adminPassword,
full_name: 'E2E Admin User',
});
const registerResponse = await apiClient.registerUser(adminEmail, adminPassword, 'E2E Admin User');
expect(registerResponse.status).toBe(201);
const registeredUser = registerResponse.body.userprofile.user;
const registerData = await registerResponse.json();
const registeredUser = registerData.userprofile.user;
adminUserId = registeredUser.user_id;
expect(adminUserId).toBeDefined();
@@ -50,46 +42,55 @@ describe('E2E Admin Dashboard Flow', () => {
]);
// 3. Login to get the access token (now with admin privileges)
const loginResponse = await request.post('/api/auth/login').send({
email: adminEmail,
password: adminPassword,
});
// We poll because the direct DB write above runs in a separate transaction
// from the login API call. Due to PostgreSQL's `Read Committed` transaction
// isolation, the API might read the user's role before the test's update
// transaction is fully committed and visible. Polling makes the test resilient to this race condition.
const { response: loginResponse, data: loginData } = await poll(
async () => {
const response = await apiClient.loginUser(adminEmail, adminPassword, false);
// Clone to read body without consuming the original response stream
const data = response.ok ? await response.clone().json() : {};
return { response, data };
},
(result) => result.response.ok && result.data?.userprofile?.role === 'admin',
{ timeout: 10000, interval: 1000, description: 'user login with admin role' },
);
expect(loginResponse.status).toBe(200);
authToken = loginResponse.body.token;
authToken = loginData.token;
expect(authToken).toBeDefined();
// Verify the role returned in the login response is now 'admin'
expect(loginResponse.body.userprofile.role).toBe('admin');
expect(loginData.userprofile.role).toBe('admin');
// 4. Fetch System Stats (Protected Admin Route)
const statsResponse = await request
.get('/api/admin/stats')
.set('Authorization', `Bearer ${authToken}`);
const statsResponse = await apiClient.getApplicationStats(authToken);
expect(statsResponse.status).toBe(200);
expect(statsResponse.body).toHaveProperty('userCount');
expect(statsResponse.body).toHaveProperty('flyerCount');
const statsData = await statsResponse.json();
expect(statsData).toHaveProperty('userCount');
expect(statsData).toHaveProperty('flyerCount');
// 5. Fetch User List (Protected Admin Route)
const usersResponse = await request
.get('/api/admin/users')
.set('Authorization', `Bearer ${authToken}`);
const usersResponse = await apiClient.authedGet('/admin/users', { tokenOverride: authToken });
expect(usersResponse.status).toBe(200);
expect(Array.isArray(usersResponse.body)).toBe(true);
const usersData = await usersResponse.json();
expect(Array.isArray(usersData)).toBe(true);
// The list should contain the admin user we just created
const self = usersResponse.body.find((u: any) => u.user_id === adminUserId);
const self = usersData.find((u: any) => u.user_id === adminUserId);
expect(self).toBeDefined();
// 6. Check Queue Status (Protected Admin Route)
const queueResponse = await request
.get('/api/admin/queues/status')
.set('Authorization', `Bearer ${authToken}`);
const queueResponse = await apiClient.authedGet('/admin/queues/status', {
tokenOverride: authToken,
});
expect(queueResponse.status).toBe(200);
expect(Array.isArray(queueResponse.body)).toBe(true);
const queueData = await queueResponse.json();
expect(Array.isArray(queueData)).toBe(true);
// Verify that the 'flyer-processing' queue is present in the status report
const flyerQueue = queueResponse.body.find((q: any) => q.name === 'flyer-processing');
const flyerQueue = queueData.find((q: any) => q.name === 'flyer-processing');
expect(flyerQueue).toBeDefined();
expect(flyerQueue.counts).toBeDefined();
});

View File

@@ -2,6 +2,7 @@
import { describe, it, expect, afterAll, beforeAll } from 'vitest';
import * as apiClient from '../../services/apiClient';
import { cleanupDb } from '../utils/cleanup';
import { poll } from '../utils/poll';
import { createAndLoginUser, TEST_PASSWORD } from '../utils/testHelpers';
import type { UserProfile } from '../../types';
@@ -11,15 +12,17 @@ import type { UserProfile } from '../../types';
describe('Authentication E2E Flow', () => {
let testUser: UserProfile;
let testUserAuthToken: string;
const createdUserIds: string[] = [];
beforeAll(async () => {
// Create a user that can be used for login-related tests in this suite.
try {
const { user } = await createAndLoginUser({
const { user, token } = await createAndLoginUser({
email: `e2e-login-user-${Date.now()}@example.com`,
fullName: 'E2E Login User',
});
testUserAuthToken = token;
testUser = user;
createdUserIds.push(user.user.user_id);
} catch (error) {
@@ -118,12 +121,8 @@ describe('Authentication E2E Flow', () => {
});
it('should be able to access a protected route after logging in', async () => {
// Arrange: Log in to get a token
const loginResponse = await apiClient.loginUser(testUser.user.email, TEST_PASSWORD, false);
const loginData = await loginResponse.json();
const token = loginData.token;
expect(loginResponse.status).toBe(200);
// Arrange: Use the token from the beforeAll hook
const token = testUserAuthToken;
expect(token).toBeDefined();
// Act: Use the token to access a protected route
@@ -139,11 +138,9 @@ describe('Authentication E2E Flow', () => {
});
it('should allow an authenticated user to update their profile', async () => {
// Arrange: Log in to get a token
const loginResponse = await apiClient.loginUser(testUser.user.email, TEST_PASSWORD, false);
const loginData = await loginResponse.json();
const token = loginData.token;
expect(loginResponse.status).toBe(200);
// Arrange: Use the token from the beforeAll hook
const token = testUserAuthToken;
expect(token).toBeDefined();
const profileUpdates = {
full_name: 'E2E Updated Name',
@@ -178,34 +175,26 @@ describe('Authentication E2E Flow', () => {
expect(registerResponse.status).toBe(201);
createdUserIds.push(registerData.userprofile.user.user_id);
// Instead of a fixed delay, poll by attempting to log in. This is more robust
// and confirms the user record is committed and readable by subsequent transactions.
let loginSuccess = false;
for (let i = 0; i < 10; i++) {
// Poll for up to 10 seconds
const loginResponse = await apiClient.loginUser(email, TEST_PASSWORD, false);
if (loginResponse.ok) {
loginSuccess = true;
break;
}
await new Promise((resolve) => setTimeout(resolve, 1000));
}
expect(loginSuccess, 'User should be able to log in after registration. DB might be lagging.').toBe(true);
// Poll until the user can log in, confirming the record has propagated.
await poll(
() => apiClient.loginUser(email, TEST_PASSWORD, false),
(response) => response.ok,
{ timeout: 10000, interval: 1000, description: 'user login after registration' },
);
// Act 1: Request a password reset
const forgotResponse = await apiClient.requestPasswordReset(email);
const forgotData = await forgotResponse.json();
const resetToken = forgotData.token;
// --- DEBUG SECTION FOR FAILURE ---
if (!resetToken) {
console.error(' [DEBUG FAILURE] Token missing in response:', JSON.stringify(forgotData, null, 2));
console.error(' [DEBUG FAILURE] This usually means the backend hit a DB error or is not in NODE_ENV=test mode.');
}
// ---------------------------------
// Poll for the password reset token.
const { response: forgotResponse, token: resetToken } = await poll(
async () => {
const response = await apiClient.requestPasswordReset(email);
// Clone to read body without consuming the original response stream
const data = response.ok ? await response.clone().json() : {};
return { response, token: data.token };
},
(result) => !!result.token,
{ timeout: 10000, interval: 1000, description: 'password reset token generation' },
);
// Assert 1: Check that we received a token.
expect(forgotResponse.status).toBe(200);
expect(resetToken, 'Backend returned 200 but no token. Check backend logs for "Connection terminated" errors.').toBeDefined();
expect(resetToken).toBeTypeOf('string');
@@ -236,4 +225,47 @@ describe('Authentication E2E Flow', () => {
expect(data.token).toBeUndefined();
});
});
describe('Token Refresh Flow', () => {
it('should allow an authenticated user to refresh their access token and use it', async () => {
// 1. Log in to get the refresh token cookie and an initial access token.
const loginResponse = await apiClient.loginUser(testUser.user.email, TEST_PASSWORD, false);
expect(loginResponse.status).toBe(200);
const loginData = await loginResponse.json();
const initialAccessToken = loginData.token;
// 2. Extract the refresh token from the 'set-cookie' header.
const setCookieHeader = loginResponse.headers.get('set-cookie');
expect(setCookieHeader, 'Set-Cookie header should be present in login response').toBeDefined();
// A typical Set-Cookie header might be 'refreshToken=...; Path=/; HttpOnly; Max-Age=...'. We just need the 'refreshToken=...' part.
const refreshTokenCookie = setCookieHeader!.split(';')[0];
// 3. Call the refresh token endpoint, passing the cookie.
// This assumes a new method in apiClient to handle this specific request.
const refreshResponse = await apiClient.refreshToken(refreshTokenCookie);
// 4. Assert the refresh was successful and we got a new token.
expect(refreshResponse.status).toBe(200);
const refreshData = await refreshResponse.json();
const newAccessToken = refreshData.token;
expect(newAccessToken).toBeDefined();
expect(newAccessToken).not.toBe(initialAccessToken);
// 5. Use the new access token to access a protected route.
const profileResponse = await apiClient.getAuthenticatedUserProfile({ tokenOverride: newAccessToken });
expect(profileResponse.status).toBe(200);
const profileData = await profileResponse.json();
expect(profileData.user.user_id).toBe(testUser.user.user_id);
});
it('should fail to refresh with an invalid or missing token', async () => {
// Case 1: No cookie provided. This assumes refreshToken can handle an empty string.
const noCookieResponse = await apiClient.refreshToken('');
expect(noCookieResponse.status).toBe(401);
// Case 2: Invalid cookie provided
const invalidCookieResponse = await apiClient.refreshToken('refreshToken=invalid-garbage-token');
expect(invalidCookieResponse.status).toBe(403);
});
});
});

View File

@@ -1,18 +1,16 @@
// src/tests/e2e/flyer-upload.e2e.test.ts
import { describe, it, expect, afterAll } from 'vitest';
import supertest from 'supertest';
import app from '../../../server';
import { getPool } from '../../services/db/connection.db';
import crypto from 'crypto';
import * as apiClient from '../../services/apiClient';
import path from 'path';
import fs from 'fs';
import { cleanupDb } from '../utils/cleanup';
import { poll } from '../utils/poll';
/**
* @vitest-environment node
*/
const request = supertest(app);
describe('E2E Flyer Upload and Processing Workflow', () => {
const uniqueId = Date.now();
const userEmail = `e2e-uploader-${uniqueId}@example.com`;
@@ -23,33 +21,24 @@ describe('E2E Flyer Upload and Processing Workflow', () => {
let flyerId: number | null = null;
afterAll(async () => {
// Cleanup: Delete the flyer and user created during the test
const pool = getPool();
if (flyerId) {
await pool.query('DELETE FROM public.flyers WHERE flyer_id = $1', [flyerId]);
}
if (userId) {
await pool.query('DELETE FROM public.users WHERE user_id = $1', [userId]);
}
// Use the centralized cleanup utility for robustness.
await cleanupDb({
userIds: [userId],
flyerIds: [flyerId],
});
});
it('should allow a user to upload a flyer and wait for processing to complete', async () => {
// 1. Register a new user
const registerResponse = await request.post('/api/auth/register').send({
email: userEmail,
password: userPassword,
full_name: 'E2E Flyer Uploader',
});
const registerResponse = await apiClient.registerUser(userEmail, userPassword, 'E2E Flyer Uploader');
expect(registerResponse.status).toBe(201);
// 2. Login to get the access token
const loginResponse = await request.post('/api/auth/login').send({
email: userEmail,
password: userPassword,
});
const loginResponse = await apiClient.loginUser(userEmail, userPassword, false);
expect(loginResponse.status).toBe(200);
authToken = loginResponse.body.token;
userId = loginResponse.body.userprofile.user.user_id;
const loginData = await loginResponse.json();
authToken = loginData.token;
userId = loginData.userprofile.user.user_id;
expect(authToken).toBeDefined();
// 3. Prepare the flyer file
@@ -73,34 +62,37 @@ describe('E2E Flyer Upload and Processing Workflow', () => {
]);
}
// Create a File object for the apiClient
// FIX: The Node.js `Buffer` type can be incompatible with the web `File` API's
// expected `BlobPart` type in some TypeScript configurations. Explicitly creating
// a `Uint8Array` from the buffer ensures compatibility and resolves the type error.
// `Uint8Array` is a valid `BufferSource`, which is a valid `BlobPart`.
const flyerFile = new File([new Uint8Array(fileBuffer)], fileName, { type: 'image/jpeg' });
// Calculate checksum (required by the API)
const checksum = crypto.createHash('sha256').update(fileBuffer).digest('hex');
// 4. Upload the flyer
const uploadResponse = await request
.post('/api/ai/upload-and-process')
.set('Authorization', `Bearer ${authToken}`)
.field('checksum', checksum)
.attach('flyerFile', fileBuffer, fileName);
const uploadResponse = await apiClient.uploadAndProcessFlyer(flyerFile, checksum, authToken);
expect(uploadResponse.status).toBe(202);
const jobId = uploadResponse.body.jobId;
const uploadData = await uploadResponse.json();
const jobId = uploadData.jobId;
expect(jobId).toBeDefined();
// 5. Poll for job completion
let jobStatus;
const maxRetries = 60; // Poll for up to 180 seconds
for (let i = 0; i < maxRetries; i++) {
await new Promise((resolve) => setTimeout(resolve, 3000)); // Wait 3s
const statusResponse = await request
.get(`/api/ai/jobs/${jobId}/status`)
.set('Authorization', `Bearer ${authToken}`);
jobStatus = statusResponse.body;
if (jobStatus.state === 'completed' || jobStatus.state === 'failed') {
break;
}
// 5. Poll for job completion using the new utility
const jobStatus = await poll(
async () => {
const statusResponse = await apiClient.getJobStatus(jobId, authToken);
return statusResponse.json();
},
(status) => status.state === 'completed' || status.state === 'failed',
{ timeout: 180000, interval: 3000, description: 'flyer processing job completion' },
);
if (jobStatus.state === 'failed') {
// Log the failure reason for easier debugging in CI/CD environments.
console.error('E2E flyer processing job failed. Reason:', jobStatus.failedReason);
}
expect(jobStatus.state).toBe('completed');

View File

@@ -1,15 +1,13 @@
// src/tests/e2e/user-journey.e2e.test.ts
import { describe, it, expect, afterAll } from 'vitest';
import supertest from 'supertest';
import app from '../../../server';
import { getPool } from '../../services/db/connection.db';
import * as apiClient from '../../services/apiClient';
import { cleanupDb } from '../utils/cleanup';
import { poll } from '../utils/poll';
/**
* @vitest-environment node
*/
const request = supertest(app);
describe('E2E User Journey', () => {
// Use a unique email for every run to avoid collisions
const uniqueId = Date.now();
@@ -23,65 +21,64 @@ describe('E2E User Journey', () => {
afterAll(async () => {
// Safety cleanup: Ensure the user is deleted from the DB if the test fails mid-way.
// If the test succeeds, the user deletes their own account, so this acts as a fallback.
if (userId) {
try {
await getPool().query('DELETE FROM public.users WHERE user_id = $1', [userId]);
} catch (err) {
console.error('Error cleaning up E2E test user:', err);
}
}
await cleanupDb({
userIds: [userId],
});
});
it('should complete a full user lifecycle: Register -> Login -> Manage List -> Delete Account', async () => {
// 1. Register a new user
const registerResponse = await request.post('/api/auth/register').send({
email: userEmail,
password: userPassword,
full_name: 'E2E Traveler',
});
const registerResponse = await apiClient.registerUser(userEmail, userPassword, 'E2E Traveler');
expect(registerResponse.status).toBe(201);
expect(registerResponse.body.message).toBe('User registered successfully!');
const registerData = await registerResponse.json();
expect(registerData.message).toBe('User registered successfully!');
// 2. Login to get the access token
const loginResponse = await request.post('/api/auth/login').send({
email: userEmail,
password: userPassword,
});
// 2. Login to get the access token.
// We poll here because even between two API calls (register and login),
// there can be a small delay before the newly created user record is visible
// to the transaction started by the login request. This prevents flaky test failures.
const { response: loginResponse, data: loginData } = await poll(
async () => {
const response = await apiClient.loginUser(userEmail, userPassword, false);
const data = response.ok ? await response.clone().json() : {};
return { response, data };
},
(result) => result.response.ok,
{ timeout: 10000, interval: 1000, description: 'user login after registration' },
);
expect(loginResponse.status).toBe(200);
authToken = loginResponse.body.token;
userId = loginResponse.body.userprofile.user.user_id;
authToken = loginData.token;
userId = loginData.userprofile.user.user_id;
expect(authToken).toBeDefined();
expect(userId).toBeDefined();
// 3. Create a Shopping List
const createListResponse = await request
.post('/api/users/shopping-lists')
.set('Authorization', `Bearer ${authToken}`)
.send({ name: 'E2E Party List' });
const createListResponse = await apiClient.createShoppingList('E2E Party List', authToken);
expect(createListResponse.status).toBe(201);
shoppingListId = createListResponse.body.shopping_list_id;
const createListData = await createListResponse.json();
shoppingListId = createListData.shopping_list_id;
expect(shoppingListId).toBeDefined();
// 4. Add an item to the list
const addItemResponse = await request
.post(`/api/users/shopping-lists/${shoppingListId}/items`)
.set('Authorization', `Bearer ${authToken}`)
.send({ customItemName: 'Chips' });
const addItemResponse = await apiClient.addShoppingListItem(
shoppingListId,
{ customItemName: 'Chips' },
authToken,
);
expect(addItemResponse.status).toBe(201);
expect(addItemResponse.body.custom_item_name).toBe('Chips');
const addItemData = await addItemResponse.json();
expect(addItemData.custom_item_name).toBe('Chips');
// 5. Verify the list and item exist via GET
const getListsResponse = await request
.get('/api/users/shopping-lists')
.set('Authorization', `Bearer ${authToken}`);
const getListsResponse = await apiClient.fetchShoppingLists(authToken);
expect(getListsResponse.status).toBe(200);
const myLists = getListsResponse.body;
const myLists = await getListsResponse.json();
const targetList = myLists.find((l: any) => l.shopping_list_id === shoppingListId);
expect(targetList).toBeDefined();
@@ -89,19 +86,16 @@ describe('E2E User Journey', () => {
expect(targetList.items[0].custom_item_name).toBe('Chips');
// 6. Delete the User Account (Self-Service)
const deleteAccountResponse = await request
.delete('/api/users/account')
.set('Authorization', `Bearer ${authToken}`)
.send({ password: userPassword });
const deleteAccountResponse = await apiClient.deleteUserAccount(userPassword, {
tokenOverride: authToken,
});
expect(deleteAccountResponse.status).toBe(200);
expect(deleteAccountResponse.body.message).toBe('Account deleted successfully.');
const deleteData = await deleteAccountResponse.json();
expect(deleteData.message).toBe('Account deleted successfully.');
// 7. Verify Login is no longer possible
const failLoginResponse = await request.post('/api/auth/login').send({
email: userEmail,
password: userPassword,
});
const failLoginResponse = await apiClient.loginUser(userEmail, userPassword, false);
expect(failLoginResponse.status).toBe(401);

View File

@@ -4,7 +4,7 @@ import supertest from 'supertest';
import app from '../../../server';
import { getPool } from '../../services/db/connection.db';
import type { UserProfile } from '../../types';
import { createAndLoginUser } from '../utils/testHelpers';
import { createAndLoginUser, TEST_EXAMPLE_DOMAIN } from '../utils/testHelpers';
import { cleanupDb } from '../utils/cleanup';
/**
@@ -164,7 +164,7 @@ describe('Admin API Routes Integration Tests', () => {
beforeEach(async () => {
const flyerRes = await getPool().query(
`INSERT INTO public.flyers (store_id, file_name, image_url, icon_url, item_count, checksum)
VALUES ($1, 'admin-test.jpg', 'https://example.com/flyer-images/asdmin-test.jpg', 'https://example.com/flyer-images/icons/admin-test.jpg', 1, $2) RETURNING flyer_id`,
VALUES ($1, 'admin-test.jpg', '${TEST_EXAMPLE_DOMAIN}/flyer-images/asdmin-test.jpg', '${TEST_EXAMPLE_DOMAIN}/flyer-images/icons/admin-test.jpg', 1, $2) RETURNING flyer_id`,
// The checksum must be a unique 64-character string to satisfy the DB constraint.
// We generate a dynamic string and pad it to 64 characters.
[testStoreId, `checksum-${Date.now()}-${Math.random()}`.padEnd(64, '0')],

View File

@@ -193,4 +193,31 @@ describe('AI API Routes Integration Tests', () => {
.send({ text: 'a test prompt' });
expect(response.status).toBe(501);
});
describe('Rate Limiting', () => {
it('should block requests to /api/ai/quick-insights after exceeding the limit', async () => {
const limit = 20; // Matches aiGenerationLimiter config
const items = [{ item: 'test' }];
// Send requests up to the limit
for (let i = 0; i < limit; i++) {
const response = await request
.post('/api/ai/quick-insights')
.set('Authorization', `Bearer ${authToken}`)
.set('X-Test-Rate-Limit-Enable', 'true')
.send({ items });
expect(response.status).toBe(200);
}
// The next request should be blocked
const blockedResponse = await request
.post('/api/ai/quick-insights')
.set('Authorization', `Bearer ${authToken}`)
.set('X-Test-Rate-Limit-Enable', 'true')
.send({ items });
expect(blockedResponse.status).toBe(429);
expect(blockedResponse.text).toContain('Too many AI generation requests');
});
});
});

View File

@@ -172,22 +172,26 @@ describe('Authentication API Integration', () => {
});
describe('Rate Limiting', () => {
// This test requires the `skip: () => isTestEnv` line in the `forgotPasswordLimiter`
// configuration within `src/routes/auth.routes.ts` to be commented out or removed.
it('should block requests to /forgot-password after exceeding the limit', async () => {
const email = testUserEmail; // Use the user created in beforeAll
const limit = 5; // Based on the configuration in auth.routes.ts
// Send requests up to the limit. These should all pass.
for (let i = 0; i < limit; i++) {
const response = await request.post('/api/auth/forgot-password').send({ email });
const response = await request
.post('/api/auth/forgot-password')
.set('X-Test-Rate-Limit-Enable', 'true')
.send({ email });
// The endpoint returns 200 even for non-existent users to prevent email enumeration.
expect(response.status).toBe(200);
}
// The next request (the 6th one) should be blocked.
const blockedResponse = await request.post('/api/auth/forgot-password').send({ email });
const blockedResponse = await request
.post('/api/auth/forgot-password')
.set('X-Test-Rate-Limit-Enable', 'true')
.send({ email });
expect(blockedResponse.status).toBe(429);
expect(blockedResponse.text).toContain(

View File

@@ -6,6 +6,7 @@ import { getPool } from '../../services/db/connection.db';
import { logger } from '../../services/logger.server';
import type { UserProfile } from '../../types';
import { cleanupDb } from '../utils/cleanup';
import { poll } from '../utils/poll';
describe('Database Service Integration Tests', () => {
let testUser: UserProfile;
@@ -26,6 +27,13 @@ describe('Database Service Integration Tests', () => {
{ full_name: fullName },
logger,
);
// Poll to ensure the user record is findable before tests run.
await poll(
() => db.userRepo.findUserByEmail(testUserEmail, logger),
(foundUser) => !!foundUser,
{ timeout: 5000, interval: 500, description: `user ${testUserEmail} to be findable` },
);
});
afterEach(async () => {

View File

@@ -1,5 +1,5 @@
// src/tests/integration/flyer-processing.integration.test.ts
import { describe, it, expect, beforeAll, afterAll, vi } from 'vitest';
import { describe, it, expect, beforeAll, afterAll, vi, beforeEach } from 'vitest';
import supertest from 'supertest';
import app from '../../../server';
import fs from 'node:fs/promises';
@@ -9,13 +9,13 @@ import { getPool } from '../../services/db/connection.db';
import { generateFileChecksum } from '../../utils/checksum';
import { logger } from '../../services/logger.server';
import type { UserProfile, ExtractedFlyerItem } from '../../types';
import { createAndLoginUser } from '../utils/testHelpers';
import { createAndLoginUser, getTestBaseUrl } from '../utils/testHelpers';
import { cleanupDb } from '../utils/cleanup';
import { poll } from '../utils/poll';
import { cleanupFiles } from '../utils/cleanupFiles';
import piexif from 'piexifjs';
import exifParser from 'exif-parser';
import sharp from 'sharp';
import { createFlyerAndItems } from '../../services/db/flyer.db';
/**
@@ -39,13 +39,13 @@ vi.mock('../../services/aiService.server', async (importOriginal) => {
return actual;
});
// Mock the database service to allow for simulating DB failures.
// Mock the main DB service to allow for simulating transaction failures.
// By default, it will use the real implementation.
vi.mock('../../services/db/flyer.db', async (importOriginal) => {
const actual = await importOriginal<typeof import('../../services/db/flyer.db')>();
vi.mock('../../services/db/index.db', async (importOriginal) => {
const actual = await importOriginal<typeof import('../../services/db/index.db')>();
return {
...actual,
createFlyerAndItems: vi.fn().mockImplementation(actual.createFlyerAndItems),
withTransaction: vi.fn().mockImplementation(actual.withTransaction),
};
});
@@ -55,7 +55,16 @@ describe('Flyer Processing Background Job Integration Test', () => {
const createdFilePaths: string[] = [];
beforeAll(async () => {
// Setup default mock response for the AI service's extractCoreDataFromFlyerImage method.
// FIX: Stub FRONTEND_URL to ensure valid absolute URLs (http://...) are generated
// for the database, satisfying the 'url_check' constraint.
vi.stubEnv('FRONTEND_URL', 'https://example.com');
});
// FIX: Reset mocks before each test to ensure isolation.
// This prevents "happy path" mocks from leaking into error handling tests and vice versa.
beforeEach(async () => {
// 1. Reset AI Service Mock to default success state
mockExtractCoreData.mockReset();
mockExtractCoreData.mockResolvedValue({
store_name: 'Mock Store',
valid_from: null,
@@ -71,9 +80,18 @@ describe('Flyer Processing Background Job Integration Test', () => {
},
],
});
// 2. Restore DB Service Mock to real implementation
// This ensures that unless a test specifically mocks a failure, the DB logic works as expected.
const { withTransaction } = await import('../../services/db/index.db');
const actualDb = await vi.importActual<typeof import('../../services/db/index.db')>('../../services/db/index.db');
vi.mocked(withTransaction).mockReset();
vi.mocked(withTransaction).mockImplementation(actualDb.withTransaction);
});
afterAll(async () => {
vi.unstubAllEnvs(); // Clean up env stubs
// Use the centralized cleanup utility.
await cleanupDb({
userIds: createdUserIds,
@@ -96,7 +114,7 @@ describe('Flyer Processing Background Job Integration Test', () => {
// This prevents a 409 Conflict error when the second test runs.
const uniqueContent = Buffer.concat([imageBuffer, Buffer.from(Date.now().toString())]);
const uniqueFileName = `test-flyer-image-${Date.now()}.jpg`;
const mockImageFile = new File([uniqueContent], uniqueFileName, { type: 'image/jpeg' });
const mockImageFile = new File([new Uint8Array(uniqueContent)], uniqueFileName, { type: 'image/jpeg' });
const checksum = await generateFileChecksum(mockImageFile);
// Track created files for cleanup
@@ -110,6 +128,9 @@ describe('Flyer Processing Background Job Integration Test', () => {
const uploadReq = request
.post('/api/ai/upload-and-process')
.field('checksum', checksum)
// Pass the baseUrl directly in the form data to ensure the worker receives it,
// bypassing issues with vi.stubEnv in multi-threaded test environments.
.field('baseUrl', getTestBaseUrl())
.attach('flyerFile', uniqueContent, uniqueFileName);
if (token) {
uploadReq.set('Authorization', `Bearer ${token}`);
@@ -120,25 +141,19 @@ describe('Flyer Processing Background Job Integration Test', () => {
// Assert 1: Check that a job ID was returned.
expect(jobId).toBeTypeOf('string');
// Act 2: Poll for the job status until it completes.
let jobStatus;
// Poll for up to 210 seconds (70 * 3s). This should be greater than the worker's
// lockDuration (120s) to patiently wait for long-running jobs.
const maxRetries = 70;
for (let i = 0; i < maxRetries; i++) {
console.log(`Polling attempt ${i + 1}...`);
await new Promise((resolve) => setTimeout(resolve, 3000)); // Wait 3 seconds between polls
const statusReq = request.get(`/api/ai/jobs/${jobId}/status`);
if (token) {
statusReq.set('Authorization', `Bearer ${token}`);
}
const statusResponse = await statusReq;
jobStatus = statusResponse.body;
console.log(`Job status: ${JSON.stringify(jobStatus)}`);
if (jobStatus.state === 'completed' || jobStatus.state === 'failed') {
break;
}
}
// Act 2: Poll for job completion using the new utility.
const jobStatus = await poll(
async () => {
const statusReq = request.get(`/api/ai/jobs/${jobId}/status`);
if (token) {
statusReq.set('Authorization', `Bearer ${token}`);
}
const statusResponse = await statusReq;
return statusResponse.body;
},
(status) => status.state === 'completed' || status.state === 'failed',
{ timeout: 210000, interval: 3000, description: 'flyer processing' },
);
// Assert 2: Check that the job completed successfully.
if (jobStatus?.state === 'failed') {
@@ -220,7 +235,7 @@ describe('Flyer Processing Background Job Integration Test', () => {
const imageWithExifBuffer = Buffer.from(jpegWithExif, 'binary');
const uniqueFileName = `test-flyer-with-exif-${Date.now()}.jpg`;
const mockImageFile = new File([imageWithExifBuffer], uniqueFileName, { type: 'image/jpeg' });
const mockImageFile = new File([new Uint8Array(imageWithExifBuffer)], uniqueFileName, { type: 'image/jpeg' });
const checksum = await generateFileChecksum(mockImageFile);
// Track original and derived files for cleanup
@@ -233,25 +248,24 @@ describe('Flyer Processing Background Job Integration Test', () => {
const uploadResponse = await request
.post('/api/ai/upload-and-process')
.set('Authorization', `Bearer ${token}`)
.field('baseUrl', getTestBaseUrl())
.field('checksum', checksum)
.attach('flyerFile', imageWithExifBuffer, uniqueFileName);
const { jobId } = uploadResponse.body;
expect(jobId).toBeTypeOf('string');
// Poll for job completion
let jobStatus;
const maxRetries = 60; // Poll for up to 180 seconds
for (let i = 0; i < maxRetries; i++) {
await new Promise((resolve) => setTimeout(resolve, 3000));
const statusResponse = await request
.get(`/api/ai/jobs/${jobId}/status`)
.set('Authorization', `Bearer ${token}`);
jobStatus = statusResponse.body;
if (jobStatus.state === 'completed' || jobStatus.state === 'failed') {
break;
}
}
// Poll for job completion using the new utility.
const jobStatus = await poll(
async () => {
const statusResponse = await request
.get(`/api/ai/jobs/${jobId}/status`)
.set('Authorization', `Bearer ${token}`);
return statusResponse.body;
},
(status) => status.state === 'completed' || status.state === 'failed',
{ timeout: 180000, interval: 3000, description: 'EXIF stripping job' },
);
// 3. Assert
if (jobStatus?.state === 'failed') {
@@ -306,7 +320,7 @@ describe('Flyer Processing Background Job Integration Test', () => {
.toBuffer();
const uniqueFileName = `test-flyer-with-metadata-${Date.now()}.png`;
const mockImageFile = new File([Buffer.from(imageWithMetadataBuffer)], uniqueFileName, { type: 'image/png' });
const mockImageFile = new File([new Uint8Array(imageWithMetadataBuffer)], uniqueFileName, { type: 'image/png' });
const checksum = await generateFileChecksum(mockImageFile);
// Track files for cleanup
@@ -319,25 +333,24 @@ describe('Flyer Processing Background Job Integration Test', () => {
const uploadResponse = await request
.post('/api/ai/upload-and-process')
.set('Authorization', `Bearer ${token}`)
.field('baseUrl', getTestBaseUrl())
.field('checksum', checksum)
.attach('flyerFile', imageWithMetadataBuffer, uniqueFileName);
const { jobId } = uploadResponse.body;
expect(jobId).toBeTypeOf('string');
// Poll for job completion
let jobStatus;
const maxRetries = 60; // Poll for up to 180 seconds
for (let i = 0; i < maxRetries; i++) {
await new Promise((resolve) => setTimeout(resolve, 3000));
const statusResponse = await request
.get(`/api/ai/jobs/${jobId}/status`)
.set('Authorization', `Bearer ${token}`);
jobStatus = statusResponse.body;
if (jobStatus.state === 'completed' || jobStatus.state === 'failed') {
break;
}
}
// Poll for job completion using the new utility.
const jobStatus = await poll(
async () => {
const statusResponse = await request
.get(`/api/ai/jobs/${jobId}/status`)
.set('Authorization', `Bearer ${token}`);
return statusResponse.body;
},
(status) => status.state === 'completed' || status.state === 'failed',
{ timeout: 180000, interval: 3000, description: 'PNG metadata stripping job' },
);
// 3. Assert job completion
if (jobStatus?.state === 'failed') {
@@ -369,14 +382,14 @@ it(
async () => {
// Arrange: Mock the AI service to throw an error for this specific test.
const aiError = new Error('AI model failed to extract data.');
mockExtractCoreData.mockRejectedValueOnce(aiError);
mockExtractCoreData.mockRejectedValue(aiError);
// Arrange: Prepare a unique flyer file for upload.
const imagePath = path.resolve(__dirname, '../assets/test-flyer-image.jpg');
const imageBuffer = await fs.readFile(imagePath);
const uniqueContent = Buffer.concat([imageBuffer, Buffer.from(`fail-test-${Date.now()}`)]);
const uniqueFileName = `ai-fail-test-${Date.now()}.jpg`;
const mockImageFile = new File([uniqueContent], uniqueFileName, { type: 'image/jpeg' });
const mockImageFile = new File([new Uint8Array(uniqueContent)], uniqueFileName, { type: 'image/jpeg' });
const checksum = await generateFileChecksum(mockImageFile);
// Track created files for cleanup
@@ -386,23 +399,22 @@ it(
// Act 1: Upload the file to start the background job.
const uploadResponse = await request
.post('/api/ai/upload-and-process')
.field('baseUrl', getTestBaseUrl())
.field('checksum', checksum)
.attach('flyerFile', uniqueContent, uniqueFileName);
const { jobId } = uploadResponse.body;
expect(jobId).toBeTypeOf('string');
// Act 2: Poll for the job status until it completes or fails.
let jobStatus;
const maxRetries = 60;
for (let i = 0; i < maxRetries; i++) {
await new Promise((resolve) => setTimeout(resolve, 3000));
const statusResponse = await request.get(`/api/ai/jobs/${jobId}/status`);
jobStatus = statusResponse.body;
if (jobStatus.state === 'completed' || jobStatus.state === 'failed') {
break;
}
}
// Act 2: Poll for job completion using the new utility.
const jobStatus = await poll(
async () => {
const statusResponse = await request.get(`/api/ai/jobs/${jobId}/status`);
return statusResponse.body;
},
(status) => status.state === 'completed' || status.state === 'failed',
{ timeout: 180000, interval: 3000, description: 'AI failure test job' },
);
// Assert 1: Check that the job failed.
expect(jobStatus?.state).toBe('failed');
@@ -418,16 +430,18 @@ it(
it(
'should handle a database failure during flyer creation',
async () => {
// Arrange: Mock the database creation function to throw an error for this specific test.
// Arrange: Mock the database transaction function to throw an error.
// This is a more realistic simulation of a DB failure than mocking the inner createFlyerAndItems function.
const dbError = new Error('DB transaction failed');
vi.mocked(createFlyerAndItems).mockRejectedValueOnce(dbError);
const { withTransaction } = await import('../../services/db/index.db');
vi.mocked(withTransaction).mockRejectedValue(dbError);
// Arrange: Prepare a unique flyer file for upload.
const imagePath = path.resolve(__dirname, '../assets/test-flyer-image.jpg');
const imageBuffer = await fs.readFile(imagePath);
const uniqueContent = Buffer.concat([imageBuffer, Buffer.from(`db-fail-test-${Date.now()}`)]);
const uniqueFileName = `db-fail-test-${Date.now()}.jpg`;
const mockImageFile = new File([uniqueContent], uniqueFileName, { type: 'image/jpeg' });
const mockImageFile = new File([new Uint8Array(uniqueContent)], uniqueFileName, { type: 'image/jpeg' });
const checksum = await generateFileChecksum(mockImageFile);
// Track created files for cleanup
@@ -437,23 +451,22 @@ it(
// Act 1: Upload the file to start the background job.
const uploadResponse = await request
.post('/api/ai/upload-and-process')
.field('baseUrl', getTestBaseUrl())
.field('checksum', checksum)
.attach('flyerFile', uniqueContent, uniqueFileName);
const { jobId } = uploadResponse.body;
expect(jobId).toBeTypeOf('string');
// Act 2: Poll for the job status until it completes or fails.
let jobStatus;
const maxRetries = 60;
for (let i = 0; i < maxRetries; i++) {
await new Promise((resolve) => setTimeout(resolve, 3000));
const statusResponse = await request.get(`/api/ai/jobs/${jobId}/status`);
jobStatus = statusResponse.body;
if (jobStatus.state === 'completed' || jobStatus.state === 'failed') {
break;
}
}
// Act 2: Poll for job completion using the new utility.
const jobStatus = await poll(
async () => {
const statusResponse = await request.get(`/api/ai/jobs/${jobId}/status`);
return statusResponse.body;
},
(status) => status.state === 'completed' || status.state === 'failed',
{ timeout: 180000, interval: 3000, description: 'DB failure test job' },
);
// Assert 1: Check that the job failed.
expect(jobStatus?.state).toBe('failed');
@@ -471,7 +484,7 @@ it(
async () => {
// Arrange: Mock the AI service to throw an error, causing the job to fail.
const aiError = new Error('Simulated AI failure for cleanup test.');
mockExtractCoreData.mockRejectedValueOnce(aiError);
mockExtractCoreData.mockRejectedValue(aiError);
// Arrange: Prepare a unique flyer file for upload.
const imagePath = path.resolve(__dirname, '../assets/test-flyer-image.jpg');
@@ -481,7 +494,7 @@ it(
Buffer.from(`cleanup-fail-test-${Date.now()}`),
]);
const uniqueFileName = `cleanup-fail-test-${Date.now()}.jpg`;
const mockImageFile = new File([uniqueContent], uniqueFileName, { type: 'image/jpeg' });
const mockImageFile = new File([new Uint8Array(uniqueContent)], uniqueFileName, { type: 'image/jpeg' });
const checksum = await generateFileChecksum(mockImageFile);
// Track the path of the file that will be created in the uploads directory.
@@ -492,23 +505,22 @@ it(
// Act 1: Upload the file to start the background job.
const uploadResponse = await request
.post('/api/ai/upload-and-process')
.field('baseUrl', getTestBaseUrl())
.field('checksum', checksum)
.attach('flyerFile', uniqueContent, uniqueFileName);
const { jobId } = uploadResponse.body;
expect(jobId).toBeTypeOf('string');
// Act 2: Poll for the job status until it fails.
let jobStatus;
const maxRetries = 60;
for (let i = 0; i < maxRetries; i++) {
await new Promise((resolve) => setTimeout(resolve, 3000));
const statusResponse = await request.get(`/api/ai/jobs/${jobId}/status`);
jobStatus = statusResponse.body;
if (jobStatus.state === 'failed') {
break;
}
}
// Act 2: Poll for job completion using the new utility.
const jobStatus = await poll(
async () => {
const statusResponse = await request.get(`/api/ai/jobs/${jobId}/status`);
return statusResponse.body;
},
(status) => status.state === 'failed', // We expect this one to fail
{ timeout: 180000, interval: 3000, description: 'file cleanup failure test job' },
);
// Assert 1: Check that the job actually failed.
expect(jobStatus?.state).toBe('failed');

View File

@@ -5,6 +5,7 @@ import { getPool } from '../../services/db/connection.db';
import app from '../../../server';
import type { Flyer, FlyerItem } from '../../types';
import { cleanupDb } from '../utils/cleanup';
import { TEST_EXAMPLE_DOMAIN } from '../utils/testHelpers';
/**
* @vitest-environment node
@@ -27,7 +28,7 @@ describe('Public Flyer API Routes Integration Tests', () => {
const flyerRes = await getPool().query(
`INSERT INTO public.flyers (store_id, file_name, image_url, icon_url, item_count, checksum)
VALUES ($1, 'integration-test.jpg', 'https://example.com/flyer-images/integration-test.jpg', 'https://example.com/flyer-images/icons/integration-test.jpg', 1, $2) RETURNING flyer_id`,
VALUES ($1, 'integration-test.jpg', '${TEST_EXAMPLE_DOMAIN}/flyer-images/integration-test.jpg', '${TEST_EXAMPLE_DOMAIN}/flyer-images/icons/integration-test.jpg', 1, $2) RETURNING flyer_id`,
[testStoreId, `${Date.now().toString(16)}`.padEnd(64, '0')],
);
createdFlyerId = flyerRes.rows[0].flyer_id;

View File

@@ -5,12 +5,13 @@ import app from '../../../server';
import path from 'path';
import fs from 'node:fs/promises';
import { getPool } from '../../services/db/connection.db';
import { createAndLoginUser } from '../utils/testHelpers';
import { createAndLoginUser, getTestBaseUrl } from '../utils/testHelpers';
import { generateFileChecksum } from '../../utils/checksum';
import * as db from '../../services/db/index.db';
import { cleanupDb } from '../utils/cleanup';
import { logger } from '../../services/logger.server';
import * as imageProcessor from '../../utils/imageProcessor';
import { poll } from '../utils/poll';
import type {
UserProfile,
UserAchievement,
@@ -66,6 +67,10 @@ describe('Gamification Flow Integration Test', () => {
request,
}));
// Stub environment variables for URL generation in the background worker.
// This needs to be in beforeAll to ensure it's set before any code that might use it is imported.
vi.stubEnv('FRONTEND_URL', 'https://example.com');
// Setup default mock response for the AI service's extractCoreDataFromFlyerImage method.
mockExtractCoreData.mockResolvedValue({
store_name: 'Gamification Test Store',
@@ -96,16 +101,12 @@ describe('Gamification Flow Integration Test', () => {
it(
'should award the "First Upload" achievement after a user successfully uploads and processes their first flyer',
async () => {
// --- Arrange: Stub environment variables for URL generation in the background worker ---
const testBaseUrl = 'http://localhost:3001'; // Use a fixed port for predictability
vi.stubEnv('FRONTEND_URL', testBaseUrl);
// --- Arrange: Prepare a unique flyer file for upload ---
const imagePath = path.resolve(__dirname, '../assets/test-flyer-image.jpg');
const imageBuffer = await fs.readFile(imagePath);
const uniqueContent = Buffer.concat([imageBuffer, Buffer.from(Date.now().toString())]);
const uniqueFileName = `gamification-test-flyer-${Date.now()}.jpg`;
const mockImageFile = new File([uniqueContent], uniqueFileName, { type: 'image/jpeg' });
const mockImageFile = new File([new Uint8Array(uniqueContent)], uniqueFileName, { type: 'image/jpeg' });
const checksum = await generateFileChecksum(mockImageFile);
// Track created files for cleanup
@@ -124,20 +125,19 @@ describe('Gamification Flow Integration Test', () => {
const { jobId } = uploadResponse.body;
expect(jobId).toBeTypeOf('string');
// --- Act 2: Poll for job completion ---
let jobStatus;
const maxRetries = 60; // Poll for up to 180 seconds
for (let i = 0; i < maxRetries; i++) {
await new Promise((resolve) => setTimeout(resolve, 3000));
const statusResponse = await request
.get(`/api/ai/jobs/${jobId}/status`)
.set('Authorization', `Bearer ${authToken}`);
jobStatus = statusResponse.body;
if (jobStatus.state === 'completed' || jobStatus.state === 'failed') {
break;
}
}
if (!jobStatus) {
// --- Act 2: Poll for job completion using the new utility ---
const jobStatus = await poll(
async () => {
const statusResponse = await request
.get(`/api/ai/jobs/${jobId}/status`)
.set('Authorization', `Bearer ${authToken}`);
return statusResponse.body;
},
(status) => status.state === 'completed' || status.state === 'failed',
{ timeout: 180000, interval: 3000, description: 'gamification flyer processing' },
);
if (!jobStatus) {
console.error('[DEBUG] Gamification test job timed out: No job status received.');
throw new Error('Gamification test job timed out: No job status received.');
}
@@ -187,8 +187,6 @@ describe('Gamification Flow Integration Test', () => {
firstUploadAchievement!.points_value,
);
// --- Cleanup ---
vi.unstubAllEnvs();
},
240000, // Increase timeout to 240s to match other long-running processing tests
);
@@ -196,10 +194,6 @@ describe('Gamification Flow Integration Test', () => {
describe('Legacy Flyer Upload', () => {
it('should process a legacy upload and save fully qualified URLs to the database', async () => {
// --- Arrange ---
// 1. Stub environment variables to have a predictable base URL for the test.
const testBaseUrl = 'https://cdn.example.com';
vi.stubEnv('FRONTEND_URL', testBaseUrl);
// 2. Mock the icon generator to return a predictable filename.
vi.mocked(imageProcessor.generateFlyerIcon).mockResolvedValue('legacy-icon.webp');
@@ -207,7 +201,7 @@ describe('Gamification Flow Integration Test', () => {
const imagePath = path.resolve(__dirname, '../assets/test-flyer-image.jpg');
const imageBuffer = await fs.readFile(imagePath);
const uniqueFileName = `legacy-upload-test-${Date.now()}.jpg`;
const mockImageFile = new File([imageBuffer], uniqueFileName, { type: 'image/jpeg' });
const mockImageFile = new File([new Uint8Array(imageBuffer)], uniqueFileName, { type: 'image/jpeg' });
const checksum = await generateFileChecksum(mockImageFile);
// Track created files for cleanup.
@@ -257,11 +251,10 @@ describe('Gamification Flow Integration Test', () => {
createdStoreIds.push(savedFlyer.store_id!); // Add for cleanup.
// 8. Assert that the URLs are fully qualified.
expect(savedFlyer.image_url).to.equal(`${testBaseUrl}/flyer-images/${uniqueFileName}`);
expect(savedFlyer.icon_url).to.equal(`${testBaseUrl}/flyer-images/icons/legacy-icon.webp`);
// --- Cleanup ---
vi.unstubAllEnvs();
expect(savedFlyer.image_url).to.equal(newFlyer.image_url);
expect(savedFlyer.icon_url).to.equal(newFlyer.icon_url);
const expectedBaseUrl = getTestBaseUrl();
expect(newFlyer.image_url).toContain(`${expectedBaseUrl}/flyer-images/`);
});
});
});

View File

@@ -3,6 +3,7 @@ import { describe, it, expect, beforeAll, afterAll } from 'vitest';
import supertest from 'supertest';
import app from '../../../server';
import { getPool } from '../../services/db/connection.db';
import { TEST_EXAMPLE_DOMAIN } from '../utils/testHelpers';
/**
* @vitest-environment node
@@ -35,21 +36,21 @@ describe('Price History API Integration Test (/api/price-history)', () => {
// 3. Create two flyers with different dates
const flyerRes1 = await pool.query(
`INSERT INTO public.flyers (store_id, file_name, image_url, icon_url, item_count, checksum, valid_from)
VALUES ($1, 'price-test-1.jpg', 'https://example.com/flyer-images/price-test-1.jpg', 'https://example.com/flyer-images/icons/price-test-1.jpg', 1, $2, '2025-01-01') RETURNING flyer_id`,
VALUES ($1, 'price-test-1.jpg', '${TEST_EXAMPLE_DOMAIN}/flyer-images/price-test-1.jpg', '${TEST_EXAMPLE_DOMAIN}/flyer-images/icons/price-test-1.jpg', 1, $2, '2025-01-01') RETURNING flyer_id`,
[storeId, `${Date.now().toString(16)}1`.padEnd(64, '0')],
);
flyerId1 = flyerRes1.rows[0].flyer_id;
const flyerRes2 = await pool.query(
`INSERT INTO public.flyers (store_id, file_name, image_url, icon_url, item_count, checksum, valid_from)
VALUES ($1, 'price-test-2.jpg', 'https://example.com/flyer-images/price-test-2.jpg', 'https://example.com/flyer-images/icons/price-test-2.jpg', 1, $2, '2025-01-08') RETURNING flyer_id`,
VALUES ($1, 'price-test-2.jpg', '${TEST_EXAMPLE_DOMAIN}/flyer-images/price-test-2.jpg', '${TEST_EXAMPLE_DOMAIN}/flyer-images/icons/price-test-2.jpg', 1, $2, '2025-01-08') RETURNING flyer_id`,
[storeId, `${Date.now().toString(16)}2`.padEnd(64, '0')],
);
flyerId2 = flyerRes2.rows[0].flyer_id; // This was a duplicate, fixed.
const flyerRes3 = await pool.query(
`INSERT INTO public.flyers (store_id, file_name, image_url, icon_url, item_count, checksum, valid_from)
VALUES ($1, 'price-test-3.jpg', 'https://example.com/flyer-images/price-test-3.jpg', 'https://example.com/flyer-images/icons/price-test-3.jpg', 1, $2, '2025-01-15') RETURNING flyer_id`,
VALUES ($1, 'price-test-3.jpg', '${TEST_EXAMPLE_DOMAIN}/flyer-images/price-test-3.jpg', '${TEST_EXAMPLE_DOMAIN}/flyer-images/icons/price-test-3.jpg', 1, $2, '2025-01-15') RETURNING flyer_id`,
[storeId, `${Date.now().toString(16)}3`.padEnd(64, '0')],
);
flyerId3 = flyerRes3.rows[0].flyer_id;
@@ -93,7 +94,7 @@ describe('Price History API Integration Test (/api/price-history)', () => {
});
it('should return the correct price history for a given master item ID', async () => {
const response = await request.post('/api/price-history').send({ masterItemIds: [masterItemId] });
const response = await request.post('/api/price-history').set('Authorization', 'Bearer ${token}').send({ masterItemIds: [masterItemId] });
expect(response.status).toBe(200);
expect(response.body).toBeInstanceOf(Array);
@@ -107,6 +108,7 @@ describe('Price History API Integration Test (/api/price-history)', () => {
it('should respect the limit parameter', async () => {
const response = await request
.post('/api/price-history')
.set('Authorization', 'Bearer ${token}')
.send({ masterItemIds: [masterItemId], limit: 2 });
expect(response.status).toBe(200);
@@ -118,6 +120,7 @@ describe('Price History API Integration Test (/api/price-history)', () => {
it('should respect the offset parameter', async () => {
const response = await request
.post('/api/price-history')
.set('Authorization', 'Bearer ${token}')
.send({ masterItemIds: [masterItemId], limit: 2, offset: 1 });
expect(response.status).toBe(200);
@@ -127,7 +130,7 @@ describe('Price History API Integration Test (/api/price-history)', () => {
});
it('should return price history sorted by date in ascending order', async () => {
const response = await request.post('/api/price-history').send({ masterItemIds: [masterItemId] });
const response = await request.post('/api/price-history').set('Authorization', 'Bearer ${token}').send({ masterItemIds: [masterItemId] });
expect(response.status).toBe(200);
const history = response.body;
@@ -142,7 +145,7 @@ describe('Price History API Integration Test (/api/price-history)', () => {
});
it('should return an empty array for a master item ID with no price history', async () => {
const response = await request.post('/api/price-history').send({ masterItemIds: [999999] });
const response = await request.post('/api/price-history').set('Authorization', 'Bearer ${token}').send({ masterItemIds: [999999] });
expect(response.status).toBe(200);
expect(response.body).toEqual([]);
});

View File

@@ -13,7 +13,8 @@ import type {
} from '../../types';
import { getPool } from '../../services/db/connection.db';
import { cleanupDb } from '../utils/cleanup';
import { createAndLoginUser } from '../utils/testHelpers';
import { poll } from '../utils/poll';
import { createAndLoginUser, TEST_EXAMPLE_DOMAIN } from '../utils/testHelpers';
/**
* @vitest-environment node
@@ -42,27 +43,12 @@ describe('Public API Routes Integration Tests', () => {
});
testUser = createdUser;
// DEBUG: Verify user existence in DB
console.log(`[DEBUG] createAndLoginUser returned user ID: ${testUser.user.user_id}`);
const userCheck = await pool.query('SELECT user_id FROM public.users WHERE user_id = $1', [testUser.user.user_id]);
console.log(`[DEBUG] DB check for user found ${userCheck.rowCount ?? 0} rows.`);
if (!userCheck.rowCount) {
console.error(`[DEBUG] CRITICAL: User ${testUser.user.user_id} does not exist in public.users table! Attempting to wait...`);
// Wait loop to ensure user persistence if there's a race condition
for (let i = 0; i < 5; i++) {
await new Promise((resolve) => setTimeout(resolve, 500));
const retryCheck = await pool.query('SELECT user_id FROM public.users WHERE user_id = $1', [testUser.user.user_id]);
if (retryCheck.rowCount && retryCheck.rowCount > 0) {
console.log(`[DEBUG] User found after retry ${i + 1}`);
break;
}
}
}
// Final check before proceeding to avoid FK error
const finalCheck = await pool.query('SELECT user_id FROM public.users WHERE user_id = $1', [testUser.user.user_id]);
if (!finalCheck.rowCount) {
throw new Error(`User ${testUser.user.user_id} failed to persist in DB. Cannot continue test.`);
}
// Poll to ensure the user record has propagated before creating dependent records.
await poll(
() => pool.query('SELECT 1 FROM public.users WHERE user_id = $1', [testUser.user.user_id]),
(result) => (result.rowCount ?? 0) > 0,
{ timeout: 5000, interval: 500, description: `user ${testUser.user.user_id} to persist` },
);
// Create a recipe
const recipeRes = await pool.query(
@@ -78,7 +64,7 @@ describe('Public API Routes Integration Tests', () => {
testStoreId = storeRes.rows[0].store_id;
const flyerRes = await pool.query(
`INSERT INTO public.flyers (store_id, file_name, image_url, icon_url, item_count, checksum)
VALUES ($1, 'public-routes-test.jpg', 'https://example.com/flyer-images/public-routes-test.jpg', 'https://example.com/flyer-images/icons/public-routes-test.jpg', 1, $2) RETURNING *`,
VALUES ($1, 'public-routes-test.jpg', '${TEST_EXAMPLE_DOMAIN}/flyer-images/public-routes-test.jpg', '${TEST_EXAMPLE_DOMAIN}/flyer-images/icons/public-routes-test.jpg', 1, $2) RETURNING *`,
[testStoreId, `${Date.now().toString(16)}`.padEnd(64, '0')],
);
testFlyer = flyerRes.rows[0];
@@ -235,4 +221,27 @@ describe('Public API Routes Integration Tests', () => {
expect(appliances[0]).toHaveProperty('appliance_id');
});
});
describe('Rate Limiting on Public Routes', () => {
it('should block requests to /api/personalization/master-items after exceeding the limit', async () => {
const limit = 100; // Matches publicReadLimiter config
// We only need to verify it blocks eventually, but running 100 requests in a test is slow.
// Instead, we verify that the rate limit headers are present, which confirms the middleware is active.
const response = await request
.get('/api/personalization/master-items')
.set('X-Test-Rate-Limit-Enable', 'true'); // Opt-in to rate limiting
expect(response.status).toBe(200);
expect(response.headers).toHaveProperty('x-ratelimit-limit');
expect(response.headers).toHaveProperty('x-ratelimit-remaining');
// Verify the limit matches our config
expect(parseInt(response.headers['x-ratelimit-limit'])).toBe(limit);
// Verify we consumed one
const remaining = parseInt(response.headers['x-ratelimit-remaining']);
expect(remaining).toBeLessThan(limit);
});
});
});

View File

@@ -5,6 +5,7 @@ import * as bcrypt from 'bcrypt';
import { getPool } from '../../services/db/connection.db';
import type { ShoppingList } from '../../types';
import { logger } from '../../services/logger.server';
import { poll } from '../utils/poll';
describe('Shopping List DB Service Tests', () => {
it('should create and retrieve a shopping list for a user', async ({ onTestFinished }) => {
@@ -19,6 +20,12 @@ describe('Shopping List DB Service Tests', () => {
);
const testUserId = userprofile.user.user_id;
// Poll to ensure the user record has propagated before creating dependent records.
await poll(
() => getPool().query('SELECT 1 FROM public.users WHERE user_id = $1', [testUserId]),
(result) => (result.rowCount ?? 0) > 0,
{ timeout: 5000, interval: 500, description: `user ${testUserId} to persist` },
);
onTestFinished(async () => {
await getPool().query('DELETE FROM public.users WHERE user_id = $1', [testUserId]);
});
@@ -51,6 +58,13 @@ describe('Shopping List DB Service Tests', () => {
);
const testUserId = userprofile.user.user_id;
// Poll to ensure the user record has propagated before creating dependent records.
await poll(
() => getPool().query('SELECT 1 FROM public.users WHERE user_id = $1', [testUserId]),
(result) => (result.rowCount ?? 0) > 0,
{ timeout: 5000, interval: 500, description: `user ${testUserId} to persist` },
);
onTestFinished(async () => {
await getPool().query('DELETE FROM public.users WHERE user_id = $1', [testUserId]);
});

View File

@@ -1,12 +1,15 @@
// src/tests/integration/user.integration.test.ts
import { describe, it, expect, beforeAll, afterAll } from 'vitest';
import supertest from 'supertest';
import path from 'path';
import fs from 'node:fs/promises';
import app from '../../../server';
import { logger } from '../../services/logger.server';
import { getPool } from '../../services/db/connection.db';
import type { UserProfile, MasterGroceryItem, ShoppingList } from '../../types';
import { createAndLoginUser, TEST_PASSWORD } from '../utils/testHelpers';
import { cleanupDb } from '../utils/cleanup';
import { cleanupFiles } from '../utils/cleanupFiles';
/**
* @vitest-environment node
@@ -33,6 +36,25 @@ describe('User API Routes Integration Tests', () => {
// This now cleans up ALL users created by this test suite to prevent pollution.
afterAll(async () => {
await cleanupDb({ userIds: createdUserIds });
// Safeguard to clean up any avatar files created during tests.
const uploadDir = path.resolve(__dirname, '../../../uploads/avatars');
try {
const allFiles = await fs.readdir(uploadDir);
// Filter for any file that contains any of the user IDs created in this test suite.
const testFiles = allFiles
.filter((f) => createdUserIds.some((userId) => userId && f.includes(userId)))
.map((f) => path.join(uploadDir, f));
if (testFiles.length > 0) {
await cleanupFiles(testFiles);
}
} catch (error) {
// Ignore if the directory doesn't exist, but log other errors.
if (error instanceof Error && (error as NodeJS.ErrnoException).code !== 'ENOENT') {
console.error('Error during user integration test avatar file cleanup:', error);
}
}
});
it('should fetch the authenticated user profile via GET /api/users/profile', async () => {
@@ -295,4 +317,64 @@ describe('User API Routes Integration Tests', () => {
);
});
});
it('should allow a user to upload an avatar image and update their profile', async () => {
// Arrange: Path to a dummy image file
const dummyImagePath = path.resolve(__dirname, '../assets/test-flyer-image.jpg');
// Act: Make the POST request to upload the avatar
const response = await request
.post('/api/users/profile/avatar')
.set('Authorization', `Bearer ${authToken}`)
.attach('avatar', dummyImagePath);
// Assert: Check the response
expect(response.status).toBe(200);
const updatedProfile = response.body;
expect(updatedProfile.avatar_url).toBeDefined();
expect(updatedProfile.avatar_url).not.toBeNull();
expect(updatedProfile.avatar_url).toContain('/uploads/avatars/test-avatar');
// Assert (Verification): Fetch the profile again to ensure the change was persisted
const verifyResponse = await request
.get('/api/users/profile')
.set('Authorization', `Bearer ${authToken}`);
const refetchedProfile = verifyResponse.body;
expect(refetchedProfile.avatar_url).toBe(updatedProfile.avatar_url);
});
it('should reject avatar upload for an invalid file type', async () => {
// Arrange: Create a buffer representing a text file.
const invalidFileBuffer = Buffer.from('This is not an image file.');
const invalidFileName = 'test.txt';
// Act: Attempt to upload the text file to the avatar endpoint.
const response = await request
.post('/api/users/profile/avatar')
.set('Authorization', `Bearer ${authToken}`)
.attach('avatar', invalidFileBuffer, invalidFileName);
// Assert: Check for a 400 Bad Request response.
// This error comes from the multer fileFilter configuration in the route.
expect(response.status).toBe(400);
expect(response.body.message).toBe('Only image files are allowed!');
});
it('should reject avatar upload for a file that is too large', async () => {
// Arrange: Create a buffer larger than the configured limit (e.g., > 1MB).
// The limit is set in the multer middleware in `user.routes.ts`.
// We'll create a 2MB buffer to be safe.
const largeFileBuffer = Buffer.alloc(2 * 1024 * 1024, 'a');
const largeFileName = 'large-avatar.jpg';
// Act: Attempt to upload the large file.
const response = await request
.post('/api/users/profile/avatar')
.set('Authorization', `Bearer ${authToken}`)
.attach('avatar', largeFileBuffer, largeFileName);
// Assert: Check for a 400 Bad Request response from the multer error handler.
expect(response.status).toBe(400);
expect(response.body.message).toBe('File upload error: File too large');
});
});

View File

@@ -1,85 +1,57 @@
// src/tests/utils/cleanup.ts
import { getPool } from '../../services/db/connection.db';
import { logger } from '../../services/logger.server';
import fs from 'node:fs/promises';
import path from 'path';
export interface TestResourceIds {
userIds?: string[];
flyerIds?: number[];
storeIds?: number[];
recipeIds?: number[];
masterItemIds?: number[];
budgetIds?: number[];
interface CleanupOptions {
userIds?: (string | null | undefined)[];
flyerIds?: (number | null | undefined)[];
storeIds?: (number | null | undefined)[];
recipeIds?: (number | null | undefined)[];
budgetIds?: (number | null | undefined)[];
}
/**
* A robust cleanup utility for integration tests.
* It deletes entities in the correct order to avoid foreign key violations.
* It's designed to be called in an `afterAll` hook.
*
* @param ids An object containing arrays of IDs for each resource type to clean up.
* A centralized utility to clean up database records created during tests.
* It deletes records in an order that respects foreign key constraints.
* It performs operations on a single client connection but does not use a
* transaction, ensuring that a failure to delete from one table does not
* prevent cleanup attempts on others.
*/
export const cleanupDb = async (ids: TestResourceIds) => {
export const cleanupDb = async (options: CleanupOptions) => {
const pool = getPool();
logger.info('[Test Cleanup] Starting database resource cleanup...');
const {
userIds = [],
flyerIds = [],
storeIds = [],
recipeIds = [],
masterItemIds = [],
budgetIds = [],
} = ids;
const client = await pool.connect();
try {
// --- Stage 1: Delete most dependent records ---
// These records depend on users, recipes, flyers, etc.
if (userIds.length > 0) {
await pool.query('DELETE FROM public.recipe_comments WHERE user_id = ANY($1::uuid[])', [userIds]);
await pool.query('DELETE FROM public.suggested_corrections WHERE user_id = ANY($1::uuid[])', [userIds]);
await pool.query('DELETE FROM public.shopping_lists WHERE user_id = ANY($1::uuid[])', [userIds]); // Assumes shopping_list_items cascades
await pool.query('DELETE FROM public.user_watched_items WHERE user_id = ANY($1::uuid[])', [userIds]);
await pool.query('DELETE FROM public.user_achievements WHERE user_id = ANY($1::uuid[])', [userIds]);
await pool.query('DELETE FROM public.activity_log WHERE user_id = ANY($1::uuid[])', [userIds]);
// Order of deletion matters to avoid foreign key violations.
// Children entities first, then parents.
if (options.budgetIds?.filter(Boolean).length) {
await client.query('DELETE FROM public.budgets WHERE budget_id = ANY($1::int[])', [options.budgetIds]);
logger.debug(`Cleaned up ${options.budgetIds.length} budget(s).`);
}
// --- Stage 2: Delete parent records that other things depend on ---
if (recipeIds.length > 0) {
await pool.query('DELETE FROM public.recipes WHERE recipe_id = ANY($1::int[])', [recipeIds]);
if (options.recipeIds?.filter(Boolean).length) {
await client.query('DELETE FROM public.recipes WHERE recipe_id = ANY($1::int[])', [options.recipeIds]);
logger.debug(`Cleaned up ${options.recipeIds.length} recipe(s).`);
}
// Flyers might be created by users, but we clean them up separately.
// flyer_items should cascade from this.
if (flyerIds.length > 0) {
await pool.query('DELETE FROM public.flyers WHERE flyer_id = ANY($1::bigint[])', [flyerIds]);
if (options.flyerIds?.filter(Boolean).length) {
await client.query('DELETE FROM public.flyers WHERE flyer_id = ANY($1::int[])', [options.flyerIds]);
logger.debug(`Cleaned up ${options.flyerIds.length} flyer(s).`);
}
// Stores are parents of flyers, so they come after.
if (storeIds.length > 0) {
await pool.query('DELETE FROM public.stores WHERE store_id = ANY($1::int[])', [storeIds]);
if (options.storeIds?.filter(Boolean).length) {
await client.query('DELETE FROM public.stores WHERE store_id = ANY($1::int[])', [options.storeIds]);
logger.debug(`Cleaned up ${options.storeIds.length} store(s).`);
}
// Master items are parents of flyer_items and watched_items.
if (masterItemIds.length > 0) {
await pool.query('DELETE FROM public.master_grocery_items WHERE master_grocery_item_id = ANY($1::int[])', [masterItemIds]);
if (options.userIds?.filter(Boolean).length) {
await client.query('DELETE FROM public.users WHERE user_id = ANY($1::uuid[])', [options.userIds]);
logger.debug(`Cleaned up ${options.userIds.length} user(s).`);
}
// Budgets are parents of nothing, but depend on users.
if (budgetIds.length > 0) {
await pool.query('DELETE FROM public.budgets WHERE budget_id = ANY($1::int[])', [budgetIds]);
}
// --- Stage 3: Delete the root user records ---
if (userIds.length > 0) {
const { rowCount } = await pool.query('DELETE FROM public.users WHERE user_id = ANY($1::uuid[])', [userIds]);
logger.info(`[Test Cleanup] Cleaned up ${rowCount} user(s).`);
}
logger.info('[Test Cleanup] Finished database resource cleanup successfully.');
} catch (error) {
logger.error({ error }, '[Test Cleanup] CRITICAL: An error occurred during database cleanup.');
throw error; // Re-throw to fail the test suite
logger.error({ error, options }, 'A database cleanup operation failed.');
} finally {
client.release();
}
};

View File

@@ -1,48 +1,30 @@
// src/tests/utils/cleanupFiles.ts
import fs from 'node:fs/promises';
import path from 'path';
import { logger } from '../../services/logger.server';
/**
* Safely cleans up files from the filesystem.
* Designed to be used in `afterAll` or `afterEach` hooks in integration tests.
*
* @param filePaths An array of file paths to clean up.
* A centralized utility to clean up files created during tests.
* It iterates through a list of file paths and attempts to delete each one.
* It gracefully handles errors for files that don't exist (e.g., already deleted
* or never created due to a test failure).
*/
export const cleanupFiles = async (filePaths: string[]) => {
if (!filePaths || filePaths.length === 0) {
logger.info('[Test Cleanup] No file paths provided for cleanup.');
export const cleanupFiles = async (filePaths: (string | undefined | null)[]) => {
const validPaths = filePaths.filter((p): p is string => !!p);
if (validPaths.length === 0) {
return;
}
logger.info(`[Test Cleanup] Starting filesystem cleanup for ${filePaths.length} file(s)...`);
logger.debug(`Cleaning up ${validPaths.length} test-created file(s)...`);
try {
await Promise.all(
filePaths.map(async (filePath) => {
try {
await fs.unlink(filePath);
logger.debug(`[Test Cleanup] Successfully deleted file: ${filePath}`);
} catch (err: any) {
// Ignore "file not found" errors, but log other errors.
if (err.code === 'ENOENT') {
logger.debug(`[Test Cleanup] File not found, skipping: ${filePath}`);
} else {
logger.warn(
{ err, filePath },
'[Test Cleanup] Failed to clean up file from filesystem.',
);
}
}
}),
);
const cleanupPromises = validPaths.map(async (filePath) => {
try {
await fs.unlink(filePath);
} catch (error) {
if (error instanceof Error && (error as NodeJS.ErrnoException).code !== 'ENOENT') {
logger.error({ error, filePath }, 'Failed to delete test file during cleanup.');
}
}
});
logger.info('[Test Cleanup] Finished filesystem cleanup successfully.');
} catch (error) {
logger.error(
{ error },
'[Test Cleanup] CRITICAL: An error occurred during filesystem cleanup.',
);
throw error; // Re-throw to fail the test suite if cleanup fails
}
await Promise.allSettled(cleanupPromises);
};

View File

@@ -178,7 +178,7 @@ export const createMockFlyer = (
store_id: overrides.store_id ?? overrides.store?.store_id,
});
const baseUrl = 'http://localhost:3001'; // A reasonable default for tests
const baseUrl = 'https://example.com'; // A reasonable default for tests
// Determine the final file_name to generate dependent properties from.
const fileName = overrides.file_name ?? `flyer-${flyerId}.jpg`;

36
src/tests/utils/poll.ts Normal file
View File

@@ -0,0 +1,36 @@
// src/tests/utils/poll.ts
interface PollOptions {
/** The maximum time to wait in milliseconds. Defaults to 10000 (10 seconds). */
timeout?: number;
/** The interval between attempts in milliseconds. Defaults to 500. */
interval?: number;
/** A description of the operation for better error messages. */
description?: string;
}
/**
* A generic polling utility for asynchronous operations in tests.
*
* @param fn The async function to execute on each attempt.
* @param validate A function that returns `true` if the result is satisfactory, ending the poll.
* @param options Polling options like timeout and interval.
* @returns A promise that resolves with the first valid result from `fn`.
* @throws An error if the timeout is reached before `validate` returns `true`.
*/
export async function poll<T>(
fn: () => Promise<T>,
validate: (result: T) => boolean,
options: PollOptions = {},
): Promise<T> {
const { timeout = 10000, interval = 500, description = 'operation' } = options;
const startTime = Date.now();
while (Date.now() - startTime < timeout) {
const result = await fn();
if (validate(result)) return result;
await new Promise((resolve) => setTimeout(resolve, interval));
}
throw new Error(`Polling timed out for ${description} after ${timeout}ms.`);
}

Some files were not shown because too many files have changed in this diff Show More