Compare commits

...

20 Commits

Author SHA1 Message Date
Gitea Actions
c46efe1474 ci: Bump version to 0.9.76 [skip ci] 2026-01-10 06:59:56 +05:00
25d6b76f6d ADR-026: Client-Side Logging + linting fixes
Some checks failed
Deploy to Test Environment / deploy-to-test (push) Has been cancelled
2026-01-09 17:58:21 -08:00
Gitea Actions
9ffcc9d65d ci: Bump version to 0.9.75 [skip ci] 2026-01-10 03:25:25 +05:00
1285702210 adr-028 fixes for tests
All checks were successful
Deploy to Test Environment / deploy-to-test (push) Successful in 15m38s
2026-01-09 14:24:20 -08:00
Gitea Actions
d38b751b40 ci: Bump version to 0.9.74 [skip ci] 2026-01-10 03:14:12 +05:00
e122d55ced adr-028 fixes for tests
Some checks failed
Deploy to Test Environment / deploy-to-test (push) Failing after 1m1s
2026-01-09 14:12:48 -08:00
Gitea Actions
af9992f773 ci: Bump version to 0.9.73 [skip ci] 2026-01-10 01:54:56 +05:00
3912139273 adr-028 and int tests
All checks were successful
Deploy to Test Environment / deploy-to-test (push) Successful in 16m24s
2026-01-09 12:47:41 -08:00
b5f7f5e4d1 adr-0028 and int test fixes 2026-01-09 12:35:55 -08:00
Gitea Actions
5173059621 ci: Bump version to 0.9.72 [skip ci] 2026-01-10 00:46:09 +05:00
ebceb0e2e3 just work
All checks were successful
Deploy to Test Environment / deploy-to-test (push) Successful in 20m51s
2026-01-09 11:45:03 -08:00
e75054b1ab ADR work, dockerfile work, integration test fixes 2026-01-09 11:45:00 -08:00
Gitea Actions
639313485a ci: Bump version to 0.9.71 [skip ci] 2026-01-09 19:00:01 +05:00
4a04e478c4 integration test fixes - claude for the win? try 4 - i have a good feeling
Some checks failed
Deploy to Test Environment / deploy-to-test (push) Failing after 16m58s
2026-01-09 05:56:19 -08:00
Gitea Actions
1814469eb4 ci: Bump version to 0.9.70 [skip ci] 2026-01-09 18:19:13 +05:00
b777430ff7 integration test fixes - claude for the win? try 4 - i have a good feeling
Some checks failed
Deploy to Test Environment / deploy-to-test (push) Has been cancelled
2026-01-09 05:18:19 -08:00
Gitea Actions
23830c0d4e ci: Bump version to 0.9.69 [skip ci] 2026-01-09 17:24:00 +05:00
ef42fee982 integration test fixes - claude for the win? try 3
All checks were successful
Deploy to Test Environment / deploy-to-test (push) Successful in 32m3s
2026-01-09 04:23:23 -08:00
Gitea Actions
65cb54500c ci: Bump version to 0.9.68 [skip ci] 2026-01-09 16:42:51 +05:00
664ad291be integration test fixes - claude for the win? try 3
All checks were successful
Deploy to Test Environment / deploy-to-test (push) Successful in 30m3s
2026-01-09 03:41:57 -08:00
137 changed files with 10537 additions and 1755 deletions

View File

@@ -56,7 +56,30 @@
"mcp__memory__delete_entities",
"mcp__sequential-thinking__sequentialthinking",
"mcp__filesystem__list_directory",
"mcp__filesystem__read_multiple_files"
"mcp__filesystem__read_multiple_files",
"mcp__filesystem__directory_tree",
"mcp__filesystem__read_text_file",
"Bash(wc:*)",
"Bash(npm install:*)",
"Bash(git grep:*)",
"Bash(findstr:*)",
"Bash(git add:*)",
"mcp__filesystem__write_file",
"mcp__podman__container_list",
"Bash(podman cp:*)",
"mcp__podman__container_inspect",
"mcp__podman__network_list",
"Bash(podman network connect:*)",
"Bash(npm run build:*)",
"Bash(set NODE_ENV=test)",
"Bash(podman-compose:*)",
"Bash(timeout 60 podman machine start:*)",
"Bash(podman build:*)",
"Bash(podman network rm:*)",
"Bash(npm run lint)",
"Bash(npm run typecheck:*)",
"Bash(npm run type-check:*)",
"Bash(npm run test:unit:*)"
]
}
}

View File

@@ -1,18 +1,96 @@
{
// ============================================================================
// VS CODE DEV CONTAINER CONFIGURATION
// ============================================================================
// This file configures VS Code's Dev Containers extension to provide a
// consistent, fully-configured development environment.
//
// Features:
// - Automatic PostgreSQL + Redis startup with healthchecks
// - Automatic npm install
// - Automatic database schema initialization and seeding
// - Pre-configured VS Code extensions (ESLint, Prettier)
// - Podman support for Windows users
//
// Usage:
// 1. Install the "Dev Containers" extension in VS Code
// 2. Open this project folder
// 3. Click "Reopen in Container" when prompted (or use Command Palette)
// 4. Wait for container build and initialization
// 5. Development server starts automatically
// ============================================================================
"name": "Flyer Crawler Dev (Ubuntu 22.04)",
// Use Docker Compose for multi-container setup
"dockerComposeFile": ["../compose.dev.yml"],
"service": "app",
"workspaceFolder": "/app",
// VS Code customizations
"customizations": {
"vscode": {
"extensions": ["dbaeumer.vscode-eslint", "esbenp.prettier-vscode"]
"extensions": [
// Code quality
"dbaeumer.vscode-eslint",
"esbenp.prettier-vscode",
// TypeScript
"ms-vscode.vscode-typescript-next",
// Database
"mtxr.sqltools",
"mtxr.sqltools-driver-pg",
// Utilities
"eamodio.gitlens",
"streetsidesoftware.code-spell-checker"
],
"settings": {
"editor.formatOnSave": true,
"editor.defaultFormatter": "esbenp.prettier-vscode",
"typescript.preferences.importModuleSpecifier": "relative"
}
}
},
// Run as root (required for npm global installs)
"remoteUser": "root",
// Automatically install dependencies when the container is created.
// This runs inside the container, populating the isolated node_modules volume.
"postCreateCommand": "npm install",
// ============================================================================
// Lifecycle Commands
// ============================================================================
// initializeCommand: Runs on the HOST before the container is created.
// Starts Podman machine on Windows (no-op if already running or using Docker).
"initializeCommand": "powershell -Command \"podman machine start; exit 0\"",
// postCreateCommand: Runs ONCE when the container is first created.
// This is where we do full initialization: npm install + database setup.
"postCreateCommand": "chmod +x scripts/docker-init.sh && ./scripts/docker-init.sh",
// postAttachCommand: Runs EVERY TIME VS Code attaches to the container.
// Starts the development server automatically.
"postAttachCommand": "npm run dev:container",
// Try to start podman machine, but exit with success (0) even if it's already running
"initializeCommand": "powershell -Command \"podman machine start; exit 0\""
// ============================================================================
// Port Forwarding
// ============================================================================
// Automatically forward these ports from the container to the host
"forwardPorts": [3000, 3001],
// Labels for forwarded ports in VS Code's Ports panel
"portsAttributes": {
"3000": {
"label": "Frontend (Vite)",
"onAutoForward": "notify"
},
"3001": {
"label": "Backend API",
"onAutoForward": "notify"
}
},
// ============================================================================
// Features
// ============================================================================
// Additional dev container features (optional)
"features": {}
}

77
.env.example Normal file
View File

@@ -0,0 +1,77 @@
# .env.example
# ============================================================================
# ENVIRONMENT VARIABLES TEMPLATE
# ============================================================================
# Copy this file to .env and fill in your values.
# For local development with Docker/Podman, these defaults should work out of the box.
#
# IMPORTANT: Never commit .env files with real credentials to version control!
# ============================================================================
# ===================
# Database Configuration
# ===================
# PostgreSQL connection settings
# For container development, use the service name "postgres"
DB_HOST=postgres
DB_PORT=5432
DB_USER=postgres
DB_PASSWORD=postgres
DB_NAME=flyer_crawler_dev
# ===================
# Redis Configuration
# ===================
# Redis URL for caching and job queues
# For container development, use the service name "redis"
REDIS_URL=redis://redis:6379
# Optional: Redis password (leave empty if not required)
REDIS_PASSWORD=
# ===================
# Application Settings
# ===================
NODE_ENV=development
# Frontend URL for CORS and email links
FRONTEND_URL=http://localhost:3000
# ===================
# Authentication
# ===================
# REQUIRED: Secret key for signing JWT tokens (generate a random 64+ character string)
JWT_SECRET=your-super-secret-jwt-key-change-this-in-production
# ===================
# AI/ML Services
# ===================
# REQUIRED: Google Gemini API key for flyer OCR processing
GEMINI_API_KEY=your-gemini-api-key
# ===================
# External APIs
# ===================
# Optional: Google Maps API key for geocoding store addresses
GOOGLE_MAPS_API_KEY=
# ===================
# Email Configuration (Optional)
# ===================
# SMTP settings for sending emails (deal notifications, password reset)
SMTP_HOST=
SMTP_PORT=587
SMTP_SECURE=false
SMTP_USER=
SMTP_PASS=
SMTP_FROM_EMAIL=noreply@example.com
# ===================
# Worker Configuration (Optional)
# ===================
# Concurrency settings for background job workers
WORKER_CONCURRENCY=1
EMAIL_WORKER_CONCURRENCY=10
ANALYTICS_WORKER_CONCURRENCY=1
CLEANUP_WORKER_CONCURRENCY=10
# Worker lock duration in milliseconds (default: 2 minutes)
WORKER_LOCK_DURATION=120000

6
.env.test Normal file
View File

@@ -0,0 +1,6 @@
DB_HOST=10.89.0.4
DB_USER=flyer
DB_PASSWORD=flyer
DB_NAME=flyer_crawler_test
REDIS_URL=redis://redis:6379
NODE_ENV=test

View File

@@ -1,61 +1,66 @@
{
"mcpServers": {
"markitdown": {
"command": "C:\\Users\\games3\\.local\\bin\\uvx.exe",
"args": [
"markitdown-mcp"
]
},
"gitea-torbonium": {
"command": "d:\\gitea-mcp\\gitea-mcp.exe",
"args": ["run", "-t", "stdio"],
"env": {
"GITEA_HOST": "https://gitea.torbonium.com",
"GITEA_ACCESS_TOKEN": "391c9ddbe113378bc87bb8184800ba954648fcf8"
}
},
"gitea-lan": {
"command": "d:\\gitea-mcp\\gitea-mcp.exe",
"args": ["run", "-t", "stdio"],
"env": {
"GITEA_HOST": "https://gitea.torbolan.com",
"GITEA_ACCESS_TOKEN": "REPLACE_WITH_NEW_TOKEN"
}
},
"gitea-projectium": {
"command": "d:\\gitea-mcp\\gitea-mcp.exe",
"args": ["run", "-t", "stdio"],
"env": {
"GITEA_HOST": "https://gitea.projectium.com",
"GITEA_ACCESS_TOKEN": "c72bc0f14f623fec233d3c94b3a16397fe3649ef"
}
},
"podman": {
"command": "D:\\nodejs\\npx.cmd",
"args": ["-y", "podman-mcp-server@latest"],
"env": {
"DOCKER_HOST": "npipe:////./pipe/podman-machine-default"
}
},
"filesystem": {
"command": "D:\\nodejs\\npx.cmd",
"args": [
"-y",
"@modelcontextprotocol/server-filesystem",
"D:\\gitea\\flyer-crawler.projectium.com\\flyer-crawler.projectium.com"
]
},
"fetch": {
"command": "D:\\nodejs\\npx.cmd",
"args": ["-y", "@modelcontextprotocol/server-fetch"]
},
"sequential-thinking": {
"command": "D:\\nodejs\\npx.cmd",
"args": ["-y", "@modelcontextprotocol/server-sequential-thinking"]
},
"memory": {
"command": "D:\\nodejs\\npx.cmd",
"args": ["-y", "@modelcontextprotocol/server-memory"]
}
}
}
"mcpServers": {
"gitea-projectium": {
"command": "d:\\gitea-mcp\\gitea-mcp.exe",
"args": ["run", "-t", "stdio"],
"env": {
"GITEA_HOST": "https://gitea.projectium.com",
"GITEA_ACCESS_TOKEN": "c72bc0f14f623fec233d3c94b3a16397fe3649ef"
}
},
"gitea-torbonium": {
"command": "d:\\gitea-mcp\\gitea-mcp.exe",
"args": ["run", "-t", "stdio"],
"env": {
"GITEA_HOST": "https://gitea.torbonium.com",
"GITEA_ACCESS_TOKEN": "391c9ddbe113378bc87bb8184800ba954648fcf8"
}
},
"gitea-lan": {
"command": "d:\\gitea-mcp\\gitea-mcp.exe",
"args": ["run", "-t", "stdio"],
"env": {
"GITEA_HOST": "https://gitea.torbolan.com",
"GITEA_ACCESS_TOKEN": "YOUR_LAN_TOKEN_HERE"
},
"disabled": true
},
"podman": {
"command": "D:\\nodejs\\npx.cmd",
"args": ["-y", "podman-mcp-server@latest"],
"env": {
"DOCKER_HOST": "npipe:////./pipe/podman-machine-default"
}
},
"filesystem": {
"command": "d:\\nodejs\\node.exe",
"args": [
"c:\\Users\\games3\\AppData\\Roaming\\npm\\node_modules\\@modelcontextprotocol\\server-filesystem\\dist\\index.js",
"d:\\gitea\\flyer-crawler.projectium.com\\flyer-crawler.projectium.com"
]
},
"fetch": {
"command": "D:\\nodejs\\npx.cmd",
"args": ["-y", "@modelcontextprotocol/server-fetch"]
},
"io.github.ChromeDevTools/chrome-devtools-mcp": {
"type": "stdio",
"command": "npx",
"args": ["chrome-devtools-mcp@0.12.1"],
"gallery": "https://api.mcp.github.com",
"version": "0.12.1"
},
"markitdown": {
"command": "C:\\Users\\games3\\.local\\bin\\uvx.exe",
"args": ["markitdown-mcp"]
},
"sequential-thinking": {
"command": "D:\\nodejs\\npx.cmd",
"args": ["-y", "@modelcontextprotocol/server-sequential-thinking"]
},
"memory": {
"command": "D:\\nodejs\\npx.cmd",
"args": ["-y", "@modelcontextprotocol/server-memory"]
}
}
}

View File

@@ -117,7 +117,8 @@ jobs:
DB_USER: ${{ secrets.DB_USER }}
DB_PASSWORD: ${{ secrets.DB_PASSWORD }}
DB_NAME: ${{ secrets.DB_DATABASE_PROD }}
REDIS_URL: 'redis://localhost:6379'
# Explicitly use database 0 for production (test uses database 1)
REDIS_URL: 'redis://localhost:6379/0'
REDIS_PASSWORD: ${{ secrets.REDIS_PASSWORD_PROD }}
FRONTEND_URL: 'https://flyer-crawler.projectium.com'
JWT_SECRET: ${{ secrets.JWT_SECRET }}

View File

@@ -96,6 +96,24 @@ jobs:
# It prevents the accumulation of duplicate processes from previous test runs.
node -e "const exec = require('child_process').execSync; try { const list = JSON.parse(exec('pm2 jlist').toString()); list.forEach(p => { if (p.name && p.name.endsWith('-test')) { console.log('Deleting test process: ' + p.name + ' (' + p.pm2_env.pm_id + ')'); try { exec('pm2 delete ' + p.pm2_env.pm_id); } catch(e) { console.error('Failed to delete ' + p.pm2_env.pm_id, e.message); } } }); console.log('✅ Test process cleanup complete.'); } catch (e) { if (e.stdout.toString().includes('No process found')) { console.log('No PM2 processes running, cleanup not needed.'); } else { console.error('Error cleaning up test processes:', e.message); } }" || true
- name: Flush Redis Test Database Before Tests
# CRITICAL: Clear Redis database 1 (test database) to remove stale BullMQ jobs.
# This prevents old jobs with outdated error messages from polluting test results.
# NOTE: We use database 1 for tests to isolate from production (database 0).
env:
REDIS_PASSWORD: ${{ secrets.REDIS_PASSWORD_TEST }}
run: |
echo "--- Flushing Redis database 1 (test database) to remove stale jobs ---"
if [ -z "$REDIS_PASSWORD" ]; then
echo "⚠️ REDIS_PASSWORD_TEST not set, attempting flush without password..."
redis-cli -n 1 FLUSHDB || echo "Redis flush failed (no password)"
else
redis-cli -a "$REDIS_PASSWORD" -n 1 FLUSHDB 2>/dev/null && echo "✅ Redis database 1 (test) flushed successfully." || echo "⚠️ Redis flush failed"
fi
# Verify the flush worked by checking key count on database 1
KEY_COUNT=$(redis-cli -a "$REDIS_PASSWORD" -n 1 DBSIZE 2>/dev/null | grep -oE '[0-9]+' || echo "unknown")
echo "Redis database 1 key count after flush: $KEY_COUNT"
- name: Run All Tests and Generate Merged Coverage Report
# This single step runs both unit and integration tests, then merges their
# coverage data into a single report. It combines the environment variables
@@ -109,7 +127,9 @@ jobs:
DB_NAME: 'flyer-crawler-test' # Explicitly set for tests
# --- Redis credentials for the test suite ---
REDIS_URL: 'redis://localhost:6379'
# CRITICAL: Use Redis database 1 to isolate tests from production (which uses db 0).
# This prevents the production worker from picking up test jobs.
REDIS_URL: 'redis://localhost:6379/1'
REDIS_PASSWORD: ${{ secrets.REDIS_PASSWORD_TEST }}
# --- Integration test specific variables ---
@@ -117,6 +137,13 @@ jobs:
VITE_API_BASE_URL: 'http://localhost:3001/api'
GEMINI_API_KEY: ${{ secrets.VITE_GOOGLE_GENAI_API_KEY }}
# --- Storage path for flyer images ---
# CRITICAL: Use an absolute path in the test runner's working directory for file storage.
# This ensures tests can read processed files to verify their contents (e.g., EXIF stripping).
# Without this, multer and flyerProcessingService default to /var/www/.../flyer-images.
# NOTE: We use ${{ github.workspace }} which resolves to the checkout directory.
STORAGE_PATH: '${{ github.workspace }}/flyer-images'
# --- JWT Secret for Passport authentication in tests ---
JWT_SECRET: ${{ secrets.JWT_SECRET }}
@@ -384,8 +411,8 @@ jobs:
DB_PASSWORD: ${{ secrets.DB_PASSWORD }}
DB_NAME: ${{ secrets.DB_DATABASE_TEST }}
# Redis Credentials
REDIS_URL: 'redis://localhost:6379'
# Redis Credentials (use database 1 to isolate from production)
REDIS_URL: 'redis://localhost:6379/1'
REDIS_PASSWORD: ${{ secrets.REDIS_PASSWORD_TEST }}
# Application Secrets

View File

@@ -116,7 +116,8 @@ jobs:
DB_USER: ${{ secrets.DB_USER }}
DB_PASSWORD: ${{ secrets.DB_PASSWORD }}
DB_NAME: ${{ secrets.DB_DATABASE_PROD }}
REDIS_URL: 'redis://localhost:6379'
# Explicitly use database 0 for production (test uses database 1)
REDIS_URL: 'redis://localhost:6379/0'
REDIS_PASSWORD: ${{ secrets.REDIS_PASSWORD_PROD }}
FRONTEND_URL: 'https://flyer-crawler.projectium.com'
JWT_SECRET: ${{ secrets.JWT_SECRET }}

View File

@@ -0,0 +1,167 @@
# .gitea/workflows/manual-redis-flush-prod.yml
#
# DANGER: This workflow is DESTRUCTIVE and intended for manual execution only.
# It will completely FLUSH the PRODUCTION Redis database (db 0).
# This will clear all BullMQ queues, sessions, caches, and any other Redis data.
#
name: Manual - Flush Production Redis
on:
workflow_dispatch:
inputs:
confirmation:
description: 'DANGER: This will FLUSH production Redis. Type "flush-production-redis" to confirm.'
required: true
default: 'do-not-run'
flush_type:
description: 'What to flush?'
required: true
type: choice
options:
- 'queues-only'
- 'entire-database'
default: 'queues-only'
jobs:
flush-redis:
runs-on: projectium.com # This job runs on your self-hosted Gitea runner.
env:
REDIS_PASSWORD: ${{ secrets.REDIS_PASSWORD_PROD }}
steps:
- name: Checkout Code
uses: actions/checkout@v3
- name: Setup Node.js
uses: actions/setup-node@v3
with:
node-version: '20'
cache: 'npm'
cache-dependency-path: '**/package-lock.json'
- name: Install Dependencies
run: npm ci
- name: Validate Secrets
run: |
if [ -z "$REDIS_PASSWORD" ]; then
echo "ERROR: REDIS_PASSWORD_PROD secret is not set in Gitea repository settings."
exit 1
fi
echo "✅ Redis password secret is present."
- name: Verify Confirmation Phrase
run: |
if [ "${{ gitea.event.inputs.confirmation }}" != "flush-production-redis" ]; then
echo "ERROR: Confirmation phrase did not match. Aborting Redis flush."
exit 1
fi
echo "✅ Confirmation accepted. Proceeding with Redis flush."
- name: Show Current Redis State
run: |
echo "--- Current Redis Database 0 (Production) State ---"
redis-cli -a "$REDIS_PASSWORD" -n 0 INFO keyspace 2>/dev/null || echo "Could not get keyspace info"
echo ""
echo "--- Key Count ---"
KEY_COUNT=$(redis-cli -a "$REDIS_PASSWORD" -n 0 DBSIZE 2>/dev/null | grep -oE '[0-9]+' || echo "unknown")
echo "Production Redis (db 0) key count: $KEY_COUNT"
echo ""
echo "--- BullMQ Queue Keys ---"
redis-cli -a "$REDIS_PASSWORD" -n 0 KEYS "bull:*" 2>/dev/null | head -20 || echo "No BullMQ keys found"
- name: 🚨 FINAL WARNING & PAUSE 🚨
run: |
echo "*********************************************************************"
echo "WARNING: YOU ARE ABOUT TO FLUSH PRODUCTION REDIS DATA."
echo "Flush type: ${{ gitea.event.inputs.flush_type }}"
echo ""
if [ "${{ gitea.event.inputs.flush_type }}" = "entire-database" ]; then
echo "This will DELETE ALL Redis data including sessions, caches, and queues!"
else
echo "This will DELETE ALL BullMQ queue data (pending jobs, failed jobs, etc.)"
fi
echo ""
echo "This action is IRREVERSIBLE. Press Ctrl+C in the runner terminal NOW to cancel."
echo "Sleeping for 10 seconds..."
echo "*********************************************************************"
sleep 10
- name: Flush BullMQ Queues Only
if: ${{ gitea.event.inputs.flush_type == 'queues-only' }}
env:
REDIS_URL: 'redis://localhost:6379/0'
run: |
echo "--- Obliterating BullMQ queues using Node.js ---"
node -e "
const { Queue } = require('bullmq');
const IORedis = require('ioredis');
const connection = new IORedis(process.env.REDIS_URL, {
maxRetriesPerRequest: null,
password: process.env.REDIS_PASSWORD,
});
const queueNames = [
'flyer-processing',
'email-sending',
'analytics-reporting',
'weekly-analytics-reporting',
'file-cleanup',
'token-cleanup'
];
(async () => {
for (const name of queueNames) {
try {
const queue = new Queue(name, { connection });
const counts = await queue.getJobCounts();
console.log('Queue \"' + name + '\" before obliterate:', JSON.stringify(counts));
await queue.obliterate({ force: true });
console.log('✅ Obliterated queue: ' + name);
await queue.close();
} catch (err) {
console.error('⚠️ Failed to obliterate queue ' + name + ':', err.message);
}
}
await connection.quit();
console.log('✅ All BullMQ queues obliterated.');
})();
"
- name: Flush Entire Redis Database
if: ${{ gitea.event.inputs.flush_type == 'entire-database' }}
run: |
echo "--- Flushing entire Redis database 0 (production) ---"
redis-cli -a "$REDIS_PASSWORD" -n 0 FLUSHDB 2>/dev/null && echo "✅ Redis database 0 flushed successfully." || echo "❌ Redis flush failed"
- name: Verify Flush Results
run: |
echo "--- Redis Database 0 (Production) State After Flush ---"
KEY_COUNT=$(redis-cli -a "$REDIS_PASSWORD" -n 0 DBSIZE 2>/dev/null | grep -oE '[0-9]+' || echo "unknown")
echo "Production Redis (db 0) key count after flush: $KEY_COUNT"
echo ""
echo "--- Remaining BullMQ Queue Keys ---"
BULL_KEYS=$(redis-cli -a "$REDIS_PASSWORD" -n 0 KEYS "bull:*" 2>/dev/null | wc -l || echo "0")
echo "BullMQ key count: $BULL_KEYS"
if [ "${{ gitea.event.inputs.flush_type }}" = "queues-only" ] && [ "$BULL_KEYS" -gt 0 ]; then
echo "⚠️ Warning: Some BullMQ keys may still exist. This can happen if new jobs were added during the flush."
fi
- name: Summary
run: |
echo ""
echo "=========================================="
echo "PRODUCTION REDIS FLUSH COMPLETE"
echo "=========================================="
echo "Flush type: ${{ gitea.event.inputs.flush_type }}"
echo "Timestamp: $(date -u '+%Y-%m-%d %H:%M:%S UTC')"
echo ""
echo "NOTE: If you flushed queues, any pending jobs (flyer processing,"
echo "emails, analytics, etc.) have been permanently deleted."
echo ""
echo "The production workers will automatically start processing"
echo "new jobs as they are added to the queues."
echo "=========================================="

1
.husky/pre-commit Normal file
View File

@@ -0,0 +1 @@
npx lint-staged

4
.lintstagedrc.json Normal file
View File

@@ -0,0 +1,4 @@
{
"*.{js,jsx,ts,tsx}": ["eslint --fix", "prettier --write"],
"*.{json,md,css,html,yml,yaml}": ["prettier --write"]
}

41
.prettierignore Normal file
View File

@@ -0,0 +1,41 @@
# Dependencies
node_modules/
# Build output
dist/
build/
.cache/
# Coverage reports
coverage/
.coverage/
# IDE and editor configs
.idea/
.vscode/
*.swp
*.swo
# Logs
*.log
logs/
# Environment files (may contain secrets)
.env*
!.env.example
# Lock files (managed by package managers)
package-lock.json
pnpm-lock.yaml
yarn.lock
# Generated files
*.min.js
*.min.css
# Git directory
.git/
.gitea/
# Test artifacts
__snapshots__/

View File

@@ -1,31 +1,60 @@
# Use Ubuntu 22.04 (LTS) as the base image to match production
# Dockerfile.dev
# ============================================================================
# DEVELOPMENT DOCKERFILE
# ============================================================================
# This Dockerfile creates a development environment that matches production
# as closely as possible while providing the tools needed for development.
#
# Base: Ubuntu 22.04 (LTS) - matches production server
# Node: v20.x (LTS) - matches production
# Includes: PostgreSQL client, Redis CLI, build tools
# ============================================================================
FROM ubuntu:22.04
# Set environment variables to non-interactive to avoid prompts during installation
ENV DEBIAN_FRONTEND=noninteractive
# Update package lists and install essential tools
# - curl: for downloading Node.js setup script
# ============================================================================
# Install System Dependencies
# ============================================================================
# - curl: for downloading Node.js setup script and health checks
# - git: for version control operations
# - build-essential: for compiling native Node.js modules (node-gyp)
# - python3: required by some Node.js build tools
# - postgresql-client: for psql CLI (database initialization)
# - redis-tools: for redis-cli (health checks)
RUN apt-get update && apt-get install -y \
curl \
git \
build-essential \
python3 \
postgresql-client \
redis-tools \
&& rm -rf /var/lib/apt/lists/*
# Install Node.js 20.x (LTS) from NodeSource
# ============================================================================
# Install Node.js 20.x (LTS)
# ============================================================================
RUN curl -fsSL https://deb.nodesource.com/setup_20.x | bash - \
&& apt-get install -y nodejs
# Set the working directory inside the container
# ============================================================================
# Set Working Directory
# ============================================================================
WORKDIR /app
# Set default environment variables for development
# ============================================================================
# Environment Configuration
# ============================================================================
# Default environment variables for development
ENV NODE_ENV=development
# Increase Node.js memory limit for large builds
ENV NODE_OPTIONS='--max-old-space-size=8192'
# Default command keeps the container running so you can attach to it
CMD ["bash"]
# ============================================================================
# Default Command
# ============================================================================
# Keep container running so VS Code can attach.
# Actual commands (npm run dev, etc.) are run via devcontainer.json.
CMD ["bash"]

View File

@@ -1,8 +1,36 @@
# compose.dev.yml
# ============================================================================
# DEVELOPMENT DOCKER COMPOSE CONFIGURATION
# ============================================================================
# This file defines the local development environment using Docker/Podman.
#
# Services:
# - app: Node.js application (API + Frontend)
# - postgres: PostgreSQL 15 with PostGIS extension
# - redis: Redis for caching and job queues
#
# Usage:
# Start all services: podman-compose -f compose.dev.yml up -d
# Stop all services: podman-compose -f compose.dev.yml down
# View logs: podman-compose -f compose.dev.yml logs -f
# Reset everything: podman-compose -f compose.dev.yml down -v
#
# VS Code Dev Containers:
# This file is referenced by .devcontainer/devcontainer.json for seamless
# VS Code integration. Open the project in VS Code and use "Reopen in Container".
# ============================================================================
version: '3.8'
services:
# ===================
# Application Service
# ===================
app:
container_name: flyer-crawler-dev
# Use pre-built image if available, otherwise build from Dockerfile.dev
# To build: podman build -f Dockerfile.dev -t flyer-crawler-dev:latest .
image: localhost/flyer-crawler-dev:latest
build:
context: .
dockerfile: Dockerfile.dev
@@ -16,21 +44,44 @@ services:
- '3000:3000' # Frontend (Vite default)
- '3001:3001' # Backend API
environment:
# Core settings
- NODE_ENV=development
# Database - use service name for Docker networking
- DB_HOST=postgres
- DB_PORT=5432
- DB_USER=postgres
- DB_PASSWORD=postgres
- DB_NAME=flyer_crawler_dev
# Redis - use service name for Docker networking
- REDIS_URL=redis://redis:6379
# Add other secrets here or use a .env file
- REDIS_HOST=redis
- REDIS_PORT=6379
# Frontend URL for CORS
- FRONTEND_URL=http://localhost:3000
# Default JWT secret for development (override in production!)
- JWT_SECRET=dev-jwt-secret-change-in-production
# Worker settings
- WORKER_LOCK_DURATION=120000
depends_on:
- postgres
- redis
postgres:
condition: service_healthy
redis:
condition: service_healthy
# Keep container running so VS Code can attach
command: tail -f /dev/null
# Healthcheck for the app (once it's running)
healthcheck:
test: ['CMD', 'curl', '-f', 'http://localhost:3001/api/health', '||', 'exit', '0']
interval: 30s
timeout: 10s
retries: 3
start_period: 60s
# ===================
# PostgreSQL Database
# ===================
postgres:
image: docker.io/library/postgis/postgis:15-3.4
image: docker.io/postgis/postgis:15-3.4
container_name: flyer-crawler-postgres
ports:
- '5432:5432'
@@ -38,15 +89,54 @@ services:
POSTGRES_USER: postgres
POSTGRES_PASSWORD: postgres
POSTGRES_DB: flyer_crawler_dev
# Optimize for development
POSTGRES_INITDB_ARGS: '--encoding=UTF8 --locale=C'
volumes:
- postgres_data:/var/lib/postgresql/data
# Mount the extensions init script to run on first database creation
# The 00- prefix ensures it runs before any other init scripts
- ./sql/00-init-extensions.sql:/docker-entrypoint-initdb.d/00-init-extensions.sql:ro
# Healthcheck ensures postgres is ready before app starts
healthcheck:
test: ['CMD-SHELL', 'pg_isready -U postgres -d flyer_crawler_dev']
interval: 5s
timeout: 5s
retries: 10
start_period: 10s
# ===================
# Redis Cache/Queue
# ===================
redis:
image: docker.io/library/redis:alpine
container_name: flyer-crawler-redis
ports:
- '6379:6379'
volumes:
- redis_data:/data
# Healthcheck ensures redis is ready before app starts
healthcheck:
test: ['CMD', 'redis-cli', 'ping']
interval: 5s
timeout: 5s
retries: 10
start_period: 5s
# Enable persistence for development data
command: redis-server --appendonly yes
# ===================
# Named Volumes
# ===================
volumes:
postgres_data:
name: flyer-crawler-postgres-data
redis_data:
name: flyer-crawler-redis-data
node_modules_data:
name: flyer-crawler-node-modules
# ===================
# Network Configuration
# ===================
# All services are on the default bridge network.
# Use service names (postgres, redis) as hostnames.

View File

@@ -4,6 +4,8 @@
**Status**: Accepted
**Implemented**: 2026-01-07
## Context
Our application has experienced a recurring pattern of bugs and brittle tests related to error handling, specifically for "resource not found" scenarios. The root causes identified are:
@@ -41,3 +43,86 @@ We will adopt a strict, consistent error-handling contract for the service and r
**Initial Refactoring**: Requires a one-time effort to audit and refactor all existing repository methods to conform to this new standard.
**Convention Adherence**: Developers must be aware of and adhere to this convention. This ADR serves as the primary documentation for this pattern.
## Implementation Details
### Custom Error Types
All custom errors are defined in `src/services/db/errors.db.ts`:
| Error Class | HTTP Status | PostgreSQL Code | Use Case |
| -------------------------------- | ----------- | --------------- | ------------------------------- |
| `NotFoundError` | 404 | - | Resource not found |
| `UniqueConstraintError` | 409 | 23505 | Duplicate key violation |
| `ForeignKeyConstraintError` | 400 | 23503 | Referenced record doesn't exist |
| `NotNullConstraintError` | 400 | 23502 | Required field is null |
| `CheckConstraintError` | 400 | 23514 | Check constraint violated |
| `InvalidTextRepresentationError` | 400 | 22P02 | Invalid data type format |
| `NumericValueOutOfRangeError` | 400 | 22003 | Numeric overflow |
| `ValidationError` | 400 | - | Request validation failed |
| `ForbiddenError` | 403 | - | Access denied |
### Error Handler Middleware
The centralized error handler in `src/middleware/errorHandler.ts`:
1. Catches all errors from route handlers
2. Maps custom error types to HTTP status codes
3. Logs errors with appropriate severity (warn for 4xx, error for 5xx)
4. Returns consistent JSON error responses
5. Includes error ID for server errors (for support correlation)
### Usage Pattern
```typescript
// In repository (throws NotFoundError)
async function getUserById(id: number): Promise<User> {
const result = await pool.query('SELECT * FROM users WHERE id = $1', [id]);
if (result.rows.length === 0) {
throw new NotFoundError(`User with ID ${id} not found.`);
}
return result.rows[0];
}
// In route handler (simple try/catch)
router.get('/:id', async (req, res, next) => {
try {
const user = await getUserById(req.params.id);
res.json(user);
} catch (error) {
next(error); // errorHandler maps NotFoundError to 404
}
});
```
### Centralized Error Handler Helper
The `handleDbError` function in `src/services/db/errors.db.ts` provides centralized PostgreSQL error handling:
```typescript
import { handleDbError } from './errors.db';
try {
await pool.query('INSERT INTO users (email) VALUES ($1)', [email]);
} catch (error) {
handleDbError(
error,
logger,
'Failed to create user',
{ email },
{
uniqueMessage: 'A user with this email already exists.',
defaultMessage: 'Failed to create user.',
},
);
}
```
## Key Files
- `src/services/db/errors.db.ts` - Custom error classes and `handleDbError` utility
- `src/middleware/errorHandler.ts` - Centralized Express error handling middleware
## Related ADRs
- [ADR-034](./0034-repository-pattern-standards.md) - Repository Pattern Standards (extends this ADR)

View File

@@ -60,3 +60,109 @@ async function registerUserAndCreateDefaultList(userData) {
**Learning Curve**: Developers will need to learn and adopt the `withTransaction` pattern for all transactional database work.
**Refactoring Effort**: Existing methods that manually manage transactions (`createUser`, `createBudget`, etc.) will need to be refactored to use the new pattern.
## Implementation Details
### The `withTransaction` Helper
Located in `src/services/db/connection.db.ts`:
```typescript
export async function withTransaction<T>(callback: (client: PoolClient) => Promise<T>): Promise<T> {
const client = await getPool().connect();
try {
await client.query('BEGIN');
const result = await callback(client);
await client.query('COMMIT');
return result;
} catch (error) {
await client.query('ROLLBACK');
logger.error({ err: error }, 'Transaction failed, rolling back.');
throw error;
} finally {
client.release();
}
}
```
### Repository Pattern for Transaction Support
Repository methods accept an optional `PoolClient` parameter:
```typescript
// Function-based approach
export async function createUser(userData: CreateUserInput, client?: PoolClient): Promise<User> {
const queryable = client || getPool();
const result = await queryable.query<User>(
'INSERT INTO users (email, password_hash) VALUES ($1, $2) RETURNING *',
[userData.email, userData.passwordHash],
);
return result.rows[0];
}
```
### Transactional Service Example
```typescript
// src/services/authService.ts
import { withTransaction } from './db/connection.db';
import { createUser, createProfile } from './db';
export async function registerUserWithProfile(
email: string,
password: string,
profileData: ProfileInput,
): Promise<UserWithProfile> {
return withTransaction(async (client) => {
// All operations use the same transactional client
const user = await createUser({ email, password }, client);
const profile = await createProfile(
{
userId: user.user_id,
...profileData,
},
client,
);
return { user, profile };
});
}
```
### Services Using `withTransaction`
| Service | Function | Operations |
| ------------------------- | ----------------------- | ----------------------------------- |
| `authService` | `registerAndLoginUser` | Create user + profile + preferences |
| `userService` | `updateUserWithProfile` | Update user + profile atomically |
| `flyerPersistenceService` | `saveFlyer` | Create flyer + items + metadata |
| `shoppingService` | `createListWithItems` | Create list + initial items |
| `gamificationService` | `awardAchievement` | Create achievement + update points |
### Connection Pool Configuration
```typescript
const poolConfig: PoolConfig = {
max: 20, // Max clients in pool
idleTimeoutMillis: 30000, // Close idle clients after 30s
connectionTimeoutMillis: 2000, // Fail connect after 2s
};
```
### Pool Status Monitoring
```typescript
import { getPoolStatus } from './db/connection.db';
const status = getPoolStatus();
// { totalCount: 20, idleCount: 15, waitingCount: 0 }
```
## Key Files
- `src/services/db/connection.db.ts` - `getPool()`, `withTransaction()`, `getPoolStatus()`
## Related ADRs
- [ADR-001](./0001-standardized-error-handling.md) - Error handling within transactions
- [ADR-034](./0034-repository-pattern-standards.md) - Repository patterns for transaction participation

View File

@@ -79,3 +79,140 @@ router.get('/:id', validateRequest(getFlyerSchema), async (req, res, next) => {
**New Dependency**: Introduces `zod` as a new project dependency.
**Learning Curve**: Developers need to learn the `zod` schema definition syntax.
**Refactoring Effort**: Requires a one-time effort to create schemas and refactor all existing routes to use the `validateRequest` middleware.
## Implementation Details
### The `validateRequest` Middleware
Located in `src/middleware/validation.middleware.ts`:
```typescript
export const validateRequest =
(schema: ZodObject<z.ZodRawShape>) => async (req: Request, res: Response, next: NextFunction) => {
try {
const { params, query, body } = await schema.parseAsync({
params: req.params,
query: req.query,
body: req.body,
});
// Merge parsed data back into request
Object.keys(req.params).forEach((key) => delete req.params[key]);
Object.assign(req.params, params);
Object.keys(req.query).forEach((key) => delete req.query[key]);
Object.assign(req.query, query);
req.body = body;
return next();
} catch (error) {
if (error instanceof ZodError) {
const validationIssues = error.issues.map((issue) => ({
...issue,
path: issue.path.map((p) => String(p)),
}));
return next(new ValidationError(validationIssues));
}
return next(error);
}
};
```
### Common Zod Patterns
```typescript
import { z } from 'zod';
import { requiredString } from '../utils/zodUtils';
// String that coerces to positive integer (for ID params)
const idParam = z.string().pipe(z.coerce.number().int().positive());
// Pagination query params with defaults
const paginationQuery = z.object({
limit: z.coerce.number().int().positive().max(100).default(20),
offset: z.coerce.number().int().nonnegative().default(0),
});
// Email with sanitization
const emailSchema = z.string().trim().toLowerCase().email('A valid email is required.');
// Password with strength validation
const passwordSchema = z
.string()
.trim()
.min(8, 'Password must be at least 8 characters long.')
.superRefine((password, ctx) => {
const strength = validatePasswordStrength(password);
if (!strength.isValid) ctx.addIssue({ code: 'custom', message: strength.feedback });
});
// Optional string that converts empty string to undefined
const optionalString = z.preprocess(
(val) => (val === '' ? undefined : val),
z.string().trim().optional(),
);
```
### Routes Using `validateRequest`
All API routes use the validation middleware:
| Router | Schemas Defined | Validated Endpoints |
| ------------------------ | --------------- | -------------------------------------------------------------------------------- |
| `auth.routes.ts` | 5 | `/register`, `/login`, `/forgot-password`, `/reset-password`, `/change-password` |
| `user.routes.ts` | 4 | `/profile`, `/address`, `/preferences`, `/notifications` |
| `flyer.routes.ts` | 6 | `GET /:id`, `GET /`, `GET /:id/items`, `DELETE /:id` |
| `budget.routes.ts` | 5 | `/`, `/:id`, `/batch`, `/categories` |
| `recipe.routes.ts` | 4 | `GET /`, `GET /:id`, `POST /`, `PATCH /:id` |
| `admin.routes.ts` | 8 | Various admin endpoints |
| `ai.routes.ts` | 3 | `/upload-and-process`, `/analyze`, `/jobs/:jobId/status` |
| `gamification.routes.ts` | 3 | `/achievements`, `/leaderboard`, `/points` |
### Validation Error Response Format
When validation fails, the `errorHandler` returns:
```json
{
"message": "The request data is invalid.",
"errors": [
{
"path": ["body", "email"],
"message": "A valid email is required."
},
{
"path": ["body", "password"],
"message": "Password must be at least 8 characters long."
}
]
}
```
HTTP Status: `400 Bad Request`
### Zod Utility Functions
Located in `src/utils/zodUtils.ts`:
```typescript
// String that rejects empty strings
export const requiredString = (message?: string) =>
z.string().min(1, message || 'This field is required.');
// Number from string with validation
export const numericString = z.string().pipe(z.coerce.number());
// Boolean from string ('true'/'false')
export const booleanString = z.enum(['true', 'false']).transform((v) => v === 'true');
```
## Key Files
- `src/middleware/validation.middleware.ts` - The `validateRequest` middleware
- `src/services/db/errors.db.ts` - `ValidationError` class definition
- `src/middleware/errorHandler.ts` - Error formatting for validation errors
- `src/utils/zodUtils.ts` - Reusable Zod schema utilities
## Related ADRs
- [ADR-001](./0001-standardized-error-handling.md) - Error handling for validation errors
- [ADR-032](./0032-rate-limiting-strategy.md) - Rate limiting applied alongside validation

View File

@@ -86,3 +86,219 @@ router.get('/:id', async (req, res, next) => {
**Refactoring Effort**: Requires adding the `requestLogger` middleware and refactoring all routes and services to use `req.log` instead of the global `logger`.
**Slight Performance Overhead**: Creating a child logger for every request adds a minor performance cost, though this is negligible for most modern logging libraries.
## Implementation Details
### Logger Configuration
Located in `src/services/logger.server.ts`:
```typescript
import pino from 'pino';
const isProduction = process.env.NODE_ENV === 'production';
const isTest = process.env.NODE_ENV === 'test';
export const logger = pino({
level: isProduction ? 'info' : 'debug',
transport:
isProduction || isTest
? undefined
: {
target: 'pino-pretty',
options: {
colorize: true,
translateTime: 'SYS:standard',
ignore: 'pid,hostname',
},
},
redact: {
paths: [
'req.headers.authorization',
'req.headers.cookie',
'*.body.password',
'*.body.newPassword',
'*.body.currentPassword',
'*.body.confirmPassword',
'*.body.refreshToken',
'*.body.token',
],
censor: '[REDACTED]',
},
});
```
### Request Logger Middleware
Located in `server.ts`:
```typescript
const requestLogger = (req: Request, res: Response, next: NextFunction) => {
const requestId = randomUUID();
const user = req.user as UserProfile | undefined;
const start = process.hrtime();
// Create request-scoped logger
req.log = logger.child({
request_id: requestId,
user_id: user?.user.user_id,
ip_address: req.ip,
});
req.log.debug({ method: req.method, originalUrl: req.originalUrl }, 'INCOMING');
res.on('finish', () => {
const duration = getDurationInMilliseconds(start);
const { statusCode, statusMessage } = res;
const logDetails = {
user_id: (req.user as UserProfile | undefined)?.user.user_id,
method: req.method,
originalUrl: req.originalUrl,
statusCode,
statusMessage,
duration: duration.toFixed(2),
};
// Include request details for failed requests (for debugging)
if (statusCode >= 400) {
logDetails.req = { headers: req.headers, body: req.body };
}
if (statusCode >= 500) req.log.error(logDetails, 'Request completed with server error');
else if (statusCode >= 400) req.log.warn(logDetails, 'Request completed with client error');
else req.log.info(logDetails, 'Request completed successfully');
});
next();
};
app.use(requestLogger);
```
### TypeScript Support
The `req.log` property is typed via declaration merging in `src/types/express.d.ts`:
```typescript
import { Logger } from 'pino';
declare global {
namespace Express {
export interface Request {
log: Logger;
}
}
}
```
### Automatic Sensitive Data Redaction
The Pino logger automatically redacts sensitive fields:
```json
// Before redaction
{
"body": {
"email": "user@example.com",
"password": "secret123",
"newPassword": "newsecret456"
}
}
// After redaction (in logs)
{
"body": {
"email": "user@example.com",
"password": "[REDACTED]",
"newPassword": "[REDACTED]"
}
}
```
### Log Levels by Scenario
| Level | HTTP Status | Scenario |
| ----- | ----------- | -------------------------------------------------- |
| DEBUG | Any | Request incoming, internal state, development info |
| INFO | 2xx | Successful requests, business events |
| WARN | 4xx | Client errors, validation failures, not found |
| ERROR | 5xx | Server errors, unhandled exceptions |
### Service Layer Logging
Services accept the request-scoped logger as an optional parameter:
```typescript
export async function registerUser(email: string, password: string, reqLog?: Logger) {
const log = reqLog || logger; // Fall back to global logger
log.info({ email }, 'Registering new user');
// ... implementation
log.debug({ userId: user.user_id }, 'User created successfully');
return user;
}
// In route handler
router.post('/register', async (req, res, next) => {
await authService.registerUser(req.body.email, req.body.password, req.log);
});
```
### Log Output Format
**Development** (pino-pretty):
```text
[2026-01-09 12:34:56.789] INFO (request_id=abc123): Request completed successfully
method: "GET"
originalUrl: "/api/flyers"
statusCode: 200
duration: "45.23"
```
**Production** (JSON):
```json
{
"level": 30,
"time": 1704812096789,
"request_id": "abc123",
"user_id": "user_456",
"ip_address": "192.168.1.1",
"method": "GET",
"originalUrl": "/api/flyers",
"statusCode": 200,
"duration": "45.23",
"msg": "Request completed successfully"
}
```
### Routes Using `req.log`
All route files have been migrated to use the request-scoped logger:
- `src/routes/auth.routes.ts`
- `src/routes/user.routes.ts`
- `src/routes/flyer.routes.ts`
- `src/routes/ai.routes.ts`
- `src/routes/admin.routes.ts`
- `src/routes/budget.routes.ts`
- `src/routes/recipe.routes.ts`
- `src/routes/gamification.routes.ts`
- `src/routes/personalization.routes.ts`
- `src/routes/stats.routes.ts`
- `src/routes/health.routes.ts`
- `src/routes/system.routes.ts`
## Key Files
- `src/services/logger.server.ts` - Pino logger configuration
- `src/services/logger.client.ts` - Client-side logger (for frontend)
- `src/types/express.d.ts` - TypeScript declaration for `req.log`
- `server.ts` - Request logger middleware
## Related ADRs
- [ADR-001](./0001-standardized-error-handling.md) - Error handler uses `req.log` for error logging
- [ADR-026](./0026-standardized-client-side-structured-logging.md) - Client-side logging strategy

View File

@@ -2,7 +2,7 @@
**Date**: 2025-12-12
**Status**: Proposed
**Status**: Accepted
## Context
@@ -16,3 +16,82 @@ We will implement a dedicated background job processing system using a task queu
**Positive**: Decouples the API from heavy processing, allows for retries on failure, and enables scaling the processing workers independently. Increases application reliability and resilience.
**Negative**: Introduces a new dependency (Redis) into the infrastructure. Requires refactoring of the flyer processing logic to work within a job queue structure.
## Implementation Details
### Queue Infrastructure
The implementation uses **BullMQ v5.65.1** with **ioredis v5.8.2** for Redis connectivity. Six distinct queues handle different job types:
| Queue Name | Purpose | Retry Attempts | Backoff Strategy |
| ---------------------------- | --------------------------- | -------------- | ---------------------- |
| `flyer-processing` | OCR/AI processing of flyers | 3 | Exponential (5s base) |
| `email-sending` | Email delivery | 5 | Exponential (10s base) |
| `analytics-reporting` | Daily report generation | 2 | Exponential (60s base) |
| `weekly-analytics-reporting` | Weekly report generation | 2 | Exponential (1h base) |
| `file-cleanup` | Temporary file cleanup | 3 | Exponential (30s base) |
| `token-cleanup` | Expired token removal | 2 | Exponential (1h base) |
### Key Files
- `src/services/queues.server.ts` - Queue definitions and configuration
- `src/services/workers.server.ts` - Worker implementations with configurable concurrency
- `src/services/redis.server.ts` - Redis connection management
- `src/services/queueService.server.ts` - Queue lifecycle and graceful shutdown
- `src/services/flyerProcessingService.server.ts` - 5-stage flyer processing pipeline
- `src/types/job-data.ts` - TypeScript interfaces for all job data types
### API Design
Endpoints for long-running tasks return **202 Accepted** immediately with a job ID:
```text
POST /api/ai/upload-and-process → 202 { jobId: "..." }
GET /api/ai/jobs/:jobId/status → { state: "...", progress: ... }
```
### Worker Configuration
Workers are configured via environment variables:
- `WORKER_CONCURRENCY` - Flyer processing parallelism (default: 1)
- `EMAIL_WORKER_CONCURRENCY` - Email worker parallelism (default: 10)
- `ANALYTICS_WORKER_CONCURRENCY` - Analytics worker parallelism (default: 1)
- `CLEANUP_WORKER_CONCURRENCY` - Cleanup worker parallelism (default: 10)
### Monitoring
- **Bull Board UI** available at `/api/admin/jobs` for admin users
- Worker status endpoint: `GET /api/admin/workers/status`
- Queue status endpoint: `GET /api/admin/queues/status`
### Graceful Shutdown
Both API and worker processes implement graceful shutdown with a 30-second timeout, ensuring in-flight jobs complete before process termination.
## Compliance Notes
### Deprecated Synchronous Endpoints
The following endpoints process flyers synchronously and are **deprecated**:
- `POST /api/ai/upload-legacy` - For integration testing only
- `POST /api/ai/flyers/process` - Legacy workflow, should migrate to queue-based approach
New integrations MUST use `POST /api/ai/upload-and-process` for queue-based processing.
### Email Handling
- **Bulk emails** (deal notifications): Enqueued via `emailQueue`
- **Transactional emails** (password reset): Sent synchronously for immediate user feedback
## Future Enhancements
Potential improvements for consideration:
1. **Dead Letter Queue (DLQ)**: Move permanently failed jobs to a dedicated queue for analysis
2. **Job Priority Levels**: Allow priority-based processing for different job types
3. **Real-time Progress**: WebSocket/SSE for live job progress updates to clients
4. **Per-Queue Rate Limiting**: Throttle job processing based on external API limits
5. **Job Dependencies**: Support for jobs that depend on completion of other jobs
6. **Prometheus Metrics**: Export queue metrics for observability dashboards

View File

@@ -2,7 +2,9 @@
**Date**: 2025-12-12
**Status**: Proposed
**Status**: Accepted
**Implemented**: 2026-01-09
## Context
@@ -16,3 +18,216 @@ We will introduce a centralized, schema-validated configuration service. We will
**Positive**: Improves application reliability and developer experience by catching configuration errors at startup rather than at runtime. Provides a single source of truth for all required configuration.
**Negative**: Adds a small amount of boilerplate for defining the configuration schema. Requires a one-time effort to refactor all `process.env` access points to use the new configuration service.
## Implementation Status
### What's Implemented
-**Centralized Configuration Schema** - Zod-based validation in `src/config/env.ts`
-**Type-Safe Access** - Full TypeScript types for all configuration
-**Fail-Fast Startup** - Clear error messages for missing/invalid config
-**Environment Helpers** - `isProduction`, `isTest`, `isDevelopment` exports
-**Service Configuration Helpers** - `isSmtpConfigured`, `isAiConfigured`, etc.
### Migration Status
- ⏳ Gradual migration of `process.env` access to `config.*` in progress
- Legacy `process.env` access still works during transition
## Implementation Details
### Configuration Schema
The configuration is organized into logical groups:
```typescript
import { config, isProduction, isTest } from './config/env';
// Database
config.database.host; // DB_HOST
config.database.port; // DB_PORT (default: 5432)
config.database.user; // DB_USER
config.database.password; // DB_PASSWORD
config.database.name; // DB_NAME
// Redis
config.redis.url; // REDIS_URL
config.redis.password; // REDIS_PASSWORD (optional)
// Authentication
config.auth.jwtSecret; // JWT_SECRET (min 32 chars)
config.auth.jwtSecretPrevious; // JWT_SECRET_PREVIOUS (for rotation)
// SMTP (all optional - email degrades gracefully)
config.smtp.host; // SMTP_HOST
config.smtp.port; // SMTP_PORT (default: 587)
config.smtp.user; // SMTP_USER
config.smtp.pass; // SMTP_PASS
config.smtp.secure; // SMTP_SECURE (default: false)
config.smtp.fromEmail; // SMTP_FROM_EMAIL
// AI Services
config.ai.geminiApiKey; // GEMINI_API_KEY
config.ai.geminiRpm; // GEMINI_RPM (default: 5)
config.ai.priceQualityThreshold; // AI_PRICE_QUALITY_THRESHOLD (default: 0.5)
// Google Services
config.google.mapsApiKey; // GOOGLE_MAPS_API_KEY (optional)
config.google.clientId; // GOOGLE_CLIENT_ID (optional)
config.google.clientSecret; // GOOGLE_CLIENT_SECRET (optional)
// Worker Configuration
config.worker.concurrency; // WORKER_CONCURRENCY (default: 1)
config.worker.lockDuration; // WORKER_LOCK_DURATION (default: 30000)
config.worker.emailConcurrency; // EMAIL_WORKER_CONCURRENCY (default: 10)
config.worker.analyticsConcurrency; // ANALYTICS_WORKER_CONCURRENCY (default: 1)
config.worker.cleanupConcurrency; // CLEANUP_WORKER_CONCURRENCY (default: 10)
config.worker.weeklyAnalyticsConcurrency; // WEEKLY_ANALYTICS_WORKER_CONCURRENCY (default: 1)
// Server
config.server.nodeEnv; // NODE_ENV (development/production/test)
config.server.port; // PORT (default: 3001)
config.server.frontendUrl; // FRONTEND_URL
config.server.baseUrl; // BASE_URL
config.server.storagePath; // STORAGE_PATH (default: /var/www/.../flyer-images)
```
### Convenience Helpers
```typescript
import { isProduction, isTest, isDevelopment, isSmtpConfigured } from './config/env';
// Environment checks
if (isProduction) {
// Production-only logic
}
// Service availability checks
if (isSmtpConfigured) {
await sendEmail(...);
} else {
logger.warn('Email not configured, skipping notification');
}
```
### Fail-Fast Error Messages
When configuration is invalid, the application exits with a clear error:
```text
╔════════════════════════════════════════════════════════════════╗
║ CONFIGURATION ERROR - APPLICATION STARTUP ║
╚════════════════════════════════════════════════════════════════╝
The following environment variables are missing or invalid:
- database.host: DB_HOST is required
- auth.jwtSecret: JWT_SECRET must be at least 32 characters for security
Please check your .env file or environment configuration.
See ADR-007 for the complete list of required environment variables.
```
### Usage Example
```typescript
// Before (direct process.env access)
const pool = new Pool({
host: process.env.DB_HOST,
port: parseInt(process.env.DB_PORT || '5432', 10),
user: process.env.DB_USER,
password: process.env.DB_PASSWORD,
database: process.env.DB_NAME,
});
// After (type-safe config access)
import { config } from './config/env';
const pool = new Pool({
host: config.database.host,
port: config.database.port,
user: config.database.user,
password: config.database.password,
database: config.database.name,
});
```
## Required Environment Variables
### Critical (Application will not start without these)
| Variable | Description |
| ------------- | ----------------------------------------------------- |
| `DB_HOST` | PostgreSQL database host |
| `DB_USER` | PostgreSQL database user |
| `DB_PASSWORD` | PostgreSQL database password |
| `DB_NAME` | PostgreSQL database name |
| `REDIS_URL` | Redis connection URL (e.g., `redis://localhost:6379`) |
| `JWT_SECRET` | JWT signing secret (minimum 32 characters) |
### Optional with Defaults
| Variable | Default | Description |
| ---------------------------- | ------------------------- | ------------------------------- |
| `DB_PORT` | 5432 | PostgreSQL port |
| `PORT` | 3001 | Server HTTP port |
| `NODE_ENV` | development | Environment mode |
| `STORAGE_PATH` | /var/www/.../flyer-images | File upload directory |
| `SMTP_PORT` | 587 | SMTP server port |
| `SMTP_SECURE` | false | Use TLS for SMTP |
| `GEMINI_RPM` | 5 | Gemini API requests per minute |
| `AI_PRICE_QUALITY_THRESHOLD` | 0.5 | AI extraction quality threshold |
| `WORKER_CONCURRENCY` | 1 | Flyer processing concurrency |
| `WORKER_LOCK_DURATION` | 30000 | Worker lock duration (ms) |
### Optional (Feature-specific)
| Variable | Description |
| --------------------- | ------------------------------------------- |
| `GEMINI_API_KEY` | Google Gemini API key (enables AI features) |
| `GOOGLE_MAPS_API_KEY` | Google Maps API key (enables geocoding) |
| `SMTP_HOST` | SMTP server (enables email notifications) |
| `SMTP_USER` | SMTP authentication username |
| `SMTP_PASS` | SMTP authentication password |
| `SMTP_FROM_EMAIL` | Sender email address |
| `FRONTEND_URL` | Frontend URL for email links |
| `JWT_SECRET_PREVIOUS` | Previous JWT secret for rotation (ADR-029) |
## Key Files
- `src/config/env.ts` - Configuration schema and validation
- `.env.example` - Template for required environment variables
## Migration Guide
To migrate existing `process.env` usage:
1. Import the config:
```typescript
import { config, isProduction } from '../config/env';
```
2. Replace direct access:
```typescript
// Before
process.env.DB_HOST;
process.env.NODE_ENV === 'production';
parseInt(process.env.PORT || '3001', 10);
// After
config.database.host;
isProduction;
config.server.port;
```
3. Use service helpers for optional features:
```typescript
import { isSmtpConfigured, isAiConfigured } from '../config/env';
if (isSmtpConfigured) {
// Email is available
}
```

View File

@@ -2,7 +2,7 @@
**Date**: 2025-12-12
**Status**: Proposed
**Status**: Accepted
## Context
@@ -20,3 +20,107 @@ We will implement a multi-layered caching strategy using an in-memory data store
**Positive**: Directly addresses application performance and scalability. Reduces database load and improves API response times for common requests.
**Negative**: Introduces Redis as a dependency if not already used. Adds complexity to the data-fetching logic and requires careful management of cache invalidation to prevent stale data.
## Implementation Details
### Cache Service
A centralized cache service (`src/services/cacheService.server.ts`) provides reusable caching functionality:
- **`getOrSet<T>(key, fetcher, options)`**: Cache-aside pattern implementation
- **`get<T>(key)`**: Retrieve cached value
- **`set<T>(key, value, ttl)`**: Store value with TTL
- **`del(key)`**: Delete specific key
- **`invalidatePattern(pattern)`**: Delete keys matching a pattern
All cache operations are fail-safe - cache failures do not break the application.
### TTL Configuration
Different data types use different TTL values based on volatility:
| Data Type | TTL | Rationale |
| ------------------- | --------- | -------------------------------------- |
| Brands/Stores | 1 hour | Rarely changes, safe to cache longer |
| Flyer lists | 5 minutes | Changes when new flyers are added |
| Individual flyers | 10 minutes| Stable once created |
| Flyer items | 10 minutes| Stable once created |
| Statistics | 5 minutes | Can be slightly stale |
| Frequent sales | 15 minutes| Aggregated data, updated periodically |
| Categories | 1 hour | Rarely changes |
### Cache Key Strategy
Cache keys follow a consistent prefix pattern for pattern-based invalidation:
- `cache:brands` - All brands list
- `cache:flyers:{limit}:{offset}` - Paginated flyer lists
- `cache:flyer:{id}` - Individual flyer data
- `cache:flyer-items:{flyerId}` - Items for a specific flyer
- `cache:stats:*` - Statistics data
- `geocode:{address}` - Geocoding results (30-day TTL)
### Cached Endpoints
The following repository methods implement server-side caching:
| Method | Cache Key Pattern | TTL |
| ------ | ----------------- | --- |
| `FlyerRepository.getAllBrands()` | `cache:brands` | 1 hour |
| `FlyerRepository.getFlyers()` | `cache:flyers:{limit}:{offset}` | 5 minutes |
| `FlyerRepository.getFlyerItems()` | `cache:flyer-items:{flyerId}` | 10 minutes |
### Cache Invalidation
**Event-based invalidation** is triggered on write operations:
- **Flyer creation** (`FlyerPersistenceService.saveFlyer`): Invalidates all `cache:flyers*` keys
- **Flyer deletion** (`FlyerRepository.deleteFlyer`): Invalidates specific flyer and flyer items cache, plus flyer lists
**Manual invalidation** via admin endpoints:
- `POST /api/admin/system/clear-cache` - Clears all application cache (flyers, brands, stats)
- `POST /api/admin/system/clear-geocode-cache` - Clears geocoding cache
### Client-Side Caching
TanStack React Query provides client-side caching with configurable stale times:
| Query Type | Stale Time |
| ----------------- | ----------- |
| Categories | 1 hour |
| Master Items | 10 minutes |
| Flyer Items | 5 minutes |
| Flyers | 2 minutes |
| Shopping Lists | 1 minute |
| Activity Log | 30 seconds |
### Multi-Layer Cache Architecture
```text
Client Request
[TanStack React Query] ← Client-side cache (staleTime-based)
[Express API]
[CacheService.getOrSet()] ← Server-side Redis cache (TTL-based)
[PostgreSQL Database]
```
## Key Files
- `src/services/cacheService.server.ts` - Centralized cache service
- `src/services/db/flyer.db.ts` - Repository with caching for brands, flyers, flyer items
- `src/services/flyerPersistenceService.server.ts` - Cache invalidation on flyer creation
- `src/routes/admin.routes.ts` - Admin cache management endpoints
- `src/config/queryClient.ts` - Client-side query cache configuration
## Future Enhancements
1. **Recipe caching**: Add caching to expensive recipe queries (by-sale-percentage, etc.)
2. **Cache warming**: Pre-populate cache on startup for frequently accessed static data
3. **Cache metrics**: Add hit/miss rate monitoring for observability
4. **Conditional caching**: Skip cache for authenticated user-specific data
5. **Cache compression**: Compress large cached payloads to reduce Redis memory usage

View File

@@ -2,7 +2,7 @@
**Date**: 2025-12-12
**Status**: Proposed
**Status**: Accepted
## Context
@@ -14,9 +14,305 @@ We will formalize the testing pyramid for the project, defining the role of each
1. **Unit Tests (Vitest)**: For isolated functions, components, and repository methods with mocked dependencies. High coverage is expected.
2. **Integration Tests (Supertest)**: For API routes, testing the interaction between controllers, services, and mocked database layers. Focus on contract and middleware correctness.
3. **End-to-End (E2E) Tests (Playwright/Cypress)**: For critical user flows (e.g., login, flyer upload, checkout), running against a real browser and a test database to ensure the entire system works together.
3. **End-to-End (E2E) Tests (Vitest + Supertest)**: For critical user flows (e.g., login, flyer upload, checkout), running against a real test server and database to ensure the entire system works together.
## Consequences
**Positive**: Ensures a consistent and comprehensive approach to quality assurance. Gives developers confidence when refactoring or adding new features. Clearly defines "done" for a new feature.
**Negative**: May require investment in setting up and maintaining the E2E testing environment. Can slightly increase the time required to develop a feature if all test layers are required.
## Implementation Details
### Testing Framework Stack
| Tool | Version | Purpose |
| ---- | ------- | ------- |
| Vitest | 4.0.15 | Test runner for all test types |
| @testing-library/react | 16.3.0 | React component testing |
| @testing-library/jest-dom | 6.9.1 | DOM assertion matchers |
| supertest | 7.1.4 | HTTP assertion library for API testing |
| msw | 2.12.3 | Mock Service Worker for network mocking |
| testcontainers | 11.8.1 | Database containerization (optional) |
| c8 + nyc | 10.1.3 / 17.1.0 | Coverage reporting |
### Test File Organization
```text
src/
├── components/
│ └── *.test.tsx # Component unit tests (colocated)
├── hooks/
│ └── *.test.ts # Hook unit tests (colocated)
├── services/
│ └── *.test.ts # Service unit tests (colocated)
├── routes/
│ └── *.test.ts # Route handler unit tests (colocated)
├── utils/
│ └── *.test.ts # Utility function tests (colocated)
└── tests/
├── setup/ # Test configuration and setup files
├── utils/ # Test utilities, factories, helpers
├── assets/ # Test fixtures (images, files)
├── integration/ # Integration test files (*.test.ts)
└── e2e/ # End-to-end test files (*.e2e.test.ts)
```
**Naming Convention**: `{filename}.test.ts` or `{filename}.test.tsx` for unit/integration, `{filename}.e2e.test.ts` for E2E.
### Configuration Files
| Config | Environment | Purpose |
| ------ | ----------- | ------- |
| `vite.config.ts` | jsdom | Unit tests (React components, hooks) |
| `vitest.config.integration.ts` | node | Integration tests (API routes) |
| `vitest.config.e2e.ts` | node | E2E tests (full user flows) |
| `vitest.workspace.ts` | - | Orchestrates all test projects |
### Test Pyramid
```text
┌─────────────┐
│ E2E │ 5 test files
│ Tests │ Critical user flows
├─────────────┤
│ Integration │ 17 test files
│ Tests │ API contracts + middleware
┌───┴─────────────┴───┐
│ Unit Tests │ 185 test files
│ Components, Hooks, │ Isolated functions
│ Services, Utils │ Mocked dependencies
└─────────────────────┘
```
### Unit Tests
**Purpose**: Test isolated functions, components, and modules with mocked dependencies.
**Environment**: jsdom (browser-like)
**Key Patterns**:
```typescript
// Component testing with providers
import { renderWithProviders, screen } from '@/tests/utils/renderWithProviders';
describe('MyComponent', () => {
it('renders correctly', () => {
renderWithProviders(<MyComponent />);
expect(screen.getByText('Hello')).toBeInTheDocument();
});
});
```
```typescript
// Hook testing
import { renderHook, waitFor } from '@testing-library/react';
import { useMyHook } from './useMyHook';
describe('useMyHook', () => {
it('returns expected value', async () => {
const { result } = renderHook(() => useMyHook());
await waitFor(() => expect(result.current.data).toBeDefined());
});
});
```
**Global Mocks** (automatically applied via `tests-setup-unit.ts`):
- Database connections (`pg.Pool`)
- AI services (`@google/genai`)
- Authentication (`jsonwebtoken`, `bcrypt`)
- Logging (`logger.server`, `logger.client`)
- Notifications (`notificationService`)
### Integration Tests
**Purpose**: Test API routes with real service interactions and database.
**Environment**: node
**Setup**: Real Express server on port 3001, real PostgreSQL database
```typescript
// API route testing pattern
import supertest from 'supertest';
import { createAndLoginUser } from '@/tests/utils/testHelpers';
describe('Auth API', () => {
let request: ReturnType<typeof supertest>;
let authToken: string;
beforeAll(async () => {
const app = (await import('../../../server')).default;
request = supertest(app);
const { token } = await createAndLoginUser(request);
authToken = token;
});
it('GET /api/auth/me returns user profile', async () => {
const response = await request
.get('/api/auth/me')
.set('Authorization', `Bearer ${authToken}`);
expect(response.status).toBe(200);
expect(response.body.user.email).toBeDefined();
});
});
```
**Database Cleanup**:
```typescript
import { cleanupDb } from '@/tests/utils/cleanup';
afterAll(async () => {
await cleanupDb({ users: [testUserId] });
});
```
### E2E Tests
**Purpose**: Test complete user journeys through the application.
**Timeout**: 120 seconds (for long-running flows)
**Current E2E Tests**:
- `auth.e2e.test.ts` - Registration, login, password reset
- `flyer-upload.e2e.test.ts` - Complete flyer upload pipeline
- `user-journey.e2e.test.ts` - Full user workflow
- `admin-authorization.e2e.test.ts` - Admin-specific flows
- `admin-dashboard.e2e.test.ts` - Admin dashboard functionality
### Mock Factories
The project uses comprehensive mock factories (`src/tests/utils/mockFactories.ts`, 1553 lines) for creating test data:
```typescript
import {
createMockUser,
createMockFlyer,
createMockFlyerItem,
createMockRecipe,
resetMockIds,
} from '@/tests/utils/mockFactories';
beforeEach(() => {
resetMockIds(); // Ensure deterministic IDs
});
it('creates flyer with items', () => {
const flyer = createMockFlyer({ store_name: 'TestMart' });
const items = [createMockFlyerItem({ flyer_id: flyer.flyer_id })];
// ...
});
```
**Factory Coverage**: 90+ factory functions for all domain entities including users, flyers, recipes, shopping lists, budgets, achievements, etc.
### Test Utilities
| Utility | Purpose |
| ------- | ------- |
| `renderWithProviders()` | Wrap components with AppProviders + Router |
| `createAndLoginUser()` | Create user and return auth token |
| `cleanupDb()` | Database cleanup respecting FK constraints |
| `createTestApp()` | Create Express app for route testing |
| `poll()` | Polling utility for async operations |
### Coverage Configuration
**Coverage Provider**: v8 (built-in Vitest)
**Report Directories**:
- `.coverage/unit/` - Unit test coverage
- `.coverage/integration/` - Integration test coverage
- `.coverage/e2e/` - E2E test coverage
**Excluded from Coverage**:
- `src/index.tsx`, `src/main.tsx` (entry points)
- `src/tests/**` (test files themselves)
- `src/**/*.d.ts` (type declarations)
- `src/components/icons/**` (icon components)
- `src/db/seed*.ts` (database seeding scripts)
### npm Scripts
```bash
# Run all tests
npm run test
# Run by level
npm run test:unit # Unit tests only (jsdom)
npm run test:integration # Integration tests only (node)
# With coverage
npm run test:coverage # Unit + Integration with reports
# Clean coverage directories
npm run clean
```
### Test Timeouts
| Test Type | Timeout | Rationale |
| --------- | ------- | --------- |
| Unit | 5 seconds | Fast, isolated tests |
| Integration | 60 seconds | AI service calls, DB operations |
| E2E | 120 seconds | Full user flow with multiple API calls |
## Best Practices
### When to Write Each Test Type
1. **Unit Tests** (required):
- Pure functions and utilities
- React components (rendering, user interactions)
- Custom hooks
- Service methods with mocked dependencies
- Repository methods
2. **Integration Tests** (required for API changes):
- New API endpoints
- Authentication/authorization flows
- Middleware behavior
- Database query correctness
3. **E2E Tests** (for critical paths):
- User registration and login
- Core business flows (flyer upload, shopping lists)
- Admin operations
### Test Isolation Guidelines
1. **Reset mock IDs**: Call `resetMockIds()` in `beforeEach()`
2. **Unique test data**: Use timestamps or UUIDs for emails/usernames
3. **Clean up after tests**: Use `cleanupDb()` in `afterAll()`
4. **Don't share state**: Each test should be independent
### Mocking Guidelines
1. **Unit tests**: Mock external dependencies (DB, APIs, services)
2. **Integration tests**: Mock only external APIs (AI services)
3. **E2E tests**: Minimal mocking, use real services where possible
## Key Files
- `vite.config.ts` - Unit test configuration
- `vitest.config.integration.ts` - Integration test configuration
- `vitest.config.e2e.ts` - E2E test configuration
- `vitest.workspace.ts` - Workspace orchestration
- `src/tests/setup/tests-setup-unit.ts` - Global mocks (488 lines)
- `src/tests/setup/integration-global-setup.ts` - Server + DB setup
- `src/tests/utils/mockFactories.ts` - Mock factories (1553 lines)
- `src/tests/utils/testHelpers.ts` - Test utilities
## Future Enhancements
1. **Browser E2E Tests**: Consider adding Playwright for actual browser testing
2. **Visual Regression**: Screenshot comparison for UI components
3. **Performance Testing**: Add benchmarks for critical paths
4. **Mutation Testing**: Verify test quality with mutation testing tools
5. **Coverage Thresholds**: Define minimum coverage requirements per module

View File

@@ -2,7 +2,7 @@
**Date**: 2025-12-12
**Status**: Proposed
**Status**: Partially Implemented
## Context
@@ -16,3 +16,255 @@ We will establish a formal Design System and Component Library. This will involv
- **Positive**: Ensures a consistent and high-quality user interface. Accelerates frontend development by providing reusable, well-documented components. Improves maintainability and reduces technical debt.
- **Negative**: Requires an initial investment in setting up Storybook and migrating existing components. Adds a new dependency and a new workflow for frontend development.
## Implementation Status
### What's Implemented
The codebase has a solid foundation for a design system:
-**Tailwind CSS v4.1.17** as the styling solution
-**Dark mode** fully implemented with system preference detection
-**55 custom icon components** for consistent iconography
-**Component organization** with shared vs. feature-specific separation
-**Accessibility patterns** with ARIA attributes and focus management
### What's Not Yet Implemented
-**Storybook** is not yet installed or configured
-**Formal design token documentation** (colors, typography, spacing)
-**Visual regression testing** for component changes
## Implementation Details
### Component Library Structure
```text
src/
├── components/ # 30+ shared UI components
│ ├── icons/ # 55 SVG icon components
│ ├── Header.tsx
│ ├── Footer.tsx
│ ├── LoadingSpinner.tsx
│ ├── ErrorDisplay.tsx
│ ├── ConfirmationModal.tsx
│ ├── DarkModeToggle.tsx
│ ├── StatCard.tsx
│ ├── PasswordInput.tsx
│ └── ...
├── features/ # Feature-specific components
│ ├── charts/ # PriceChart, PriceHistoryChart
│ ├── flyer/ # FlyerDisplay, FlyerList, FlyerUploader
│ ├── shopping/ # ShoppingListComponent, WatchedItemsList
│ └── voice-assistant/ # VoiceAssistant
├── layouts/ # Page layouts
│ └── MainLayout.tsx
├── pages/ # Page components
│ └── admin/components/ # Admin-specific components
└── providers/ # Context providers
```
### Styling Approach
**Tailwind CSS** with utility-first classes:
```typescript
// Component example with consistent styling patterns
<button className="px-4 py-2 bg-brand-primary text-white rounded-lg
hover:bg-brand-dark transition-colors duration-200
focus:outline-none focus:ring-2 focus:ring-brand-primary
focus:ring-offset-2 dark:focus:ring-offset-gray-800">
Click me
</button>
```
**Common Utility Patterns**:
| Pattern | Classes |
| ------- | ------- |
| Card container | `bg-white dark:bg-gray-800 rounded-lg shadow-md p-6` |
| Primary button | `bg-brand-primary hover:bg-brand-dark text-white rounded-lg px-4 py-2` |
| Secondary button | `bg-gray-100 dark:bg-gray-700 text-gray-700 dark:text-gray-200` |
| Input field | `border border-gray-300 dark:border-gray-600 rounded-md px-3 py-2` |
| Focus ring | `focus:outline-none focus:ring-2 focus:ring-brand-primary` |
### Color System
**Brand Colors** (Tailwind theme extensions):
- `brand-primary` - Primary brand color (blue/teal)
- `brand-light` - Lighter variant
- `brand-dark` - Darker variant for hover states
- `brand-secondary` - Secondary accent color
**Semantic Colors**:
- Gray scale: `gray-50` through `gray-950`
- Error: `red-500`, `red-600`
- Success: `green-500`, `green-600`
- Warning: `yellow-500`, `orange-500`
- Info: `blue-500`, `blue-600`
### Dark Mode Implementation
Dark mode is fully implemented using Tailwind's `dark:` variant:
```typescript
// Initialization in useAppInitialization hook
const initializeDarkMode = () => {
// Priority: user profile > localStorage > system preference
const stored = localStorage.getItem('darkMode');
const systemPreference = window.matchMedia('(prefers-color-scheme: dark)').matches;
const isDarkMode = stored ? stored === 'true' : systemPreference;
document.documentElement.classList.toggle('dark', isDarkMode);
return isDarkMode;
};
```
**Usage in components**:
```typescript
<div className="bg-white dark:bg-gray-800 text-gray-900 dark:text-white">
Content adapts to theme
</div>
```
### Icon System
**55 custom SVG icon components** in `src/components/icons/`:
```typescript
// Icon component pattern
interface IconProps extends React.SVGProps<SVGSVGElement> {
title?: string;
}
export const CheckCircleIcon: React.FC<IconProps> = ({ title, ...props }) => (
<svg {...props} fill="currentColor" viewBox="0 0 24 24">
{title && <title>{title}</title>}
<path d="..." />
</svg>
);
```
**Usage**:
```typescript
<CheckCircleIcon className="w-5 h-5 text-green-500" title="Success" />
```
**External icons**: Lucide React (`lucide-react` v0.555.0) used for additional icons.
### Accessibility Patterns
**ARIA Attributes**:
```typescript
// Modal pattern
<div role="dialog" aria-modal="true" aria-labelledby="modal-title">
<h2 id="modal-title">Modal Title</h2>
</div>
// Button with label
<button aria-label="Close modal">
<XMarkIcon aria-hidden="true" />
</button>
// Loading state
<div role="status" aria-live="polite">
<LoadingSpinner />
</div>
```
**Focus Management**:
- Consistent focus rings: `focus:ring-2 focus:ring-brand-primary focus:ring-offset-2`
- Dark mode offset: `dark:focus:ring-offset-gray-800`
- No outline: `focus:outline-none` (using ring instead)
### State Management
**Context Providers** (see ADR-005):
| Provider | Purpose |
| -------- | ------- |
| `AuthProvider` | Authentication state |
| `ModalProvider` | Modal open/close state |
| `FlyersProvider` | Flyer data |
| `MasterItemsProvider` | Grocery items |
| `UserDataProvider` | User-specific data |
**Provider Hierarchy** in `AppProviders.tsx`:
```typescript
<QueryClientProvider>
<ModalProvider>
<AuthProvider>
<FlyersProvider>
<MasterItemsProvider>
<UserDataProvider>
{children}
</UserDataProvider>
</MasterItemsProvider>
</FlyersProvider>
</AuthProvider>
</ModalProvider>
</QueryClientProvider>
```
## Key Files
- `tailwind.config.js` - Tailwind CSS configuration
- `src/index.css` - Tailwind CSS entry point
- `src/components/` - Shared UI components
- `src/components/icons/` - Icon component library (55 icons)
- `src/providers/AppProviders.tsx` - Context provider composition
- `src/hooks/useAppInitialization.ts` - Dark mode initialization
## Component Guidelines
### When to Create Shared Components
Create a shared component in `src/components/` when:
1. Used in 3+ places across the application
2. Represents a reusable UI pattern (buttons, cards, modals)
3. Has consistent styling/behavior requirements
### Naming Conventions
- **Components**: PascalCase (`LoadingSpinner.tsx`)
- **Icons**: PascalCase with `Icon` suffix (`CheckCircleIcon.tsx`)
- **Hooks**: camelCase with `use` prefix (`useModal.ts`)
- **Contexts**: PascalCase with `Context` suffix (`AuthContext.tsx`)
### Styling Guidelines
1. Use Tailwind utility classes exclusively
2. Include dark mode variants for all colors: `bg-white dark:bg-gray-800`
3. Add focus states for interactive elements
4. Use semantic color names from the design system
## Future Enhancements (Storybook Setup)
To complete ADR-012 implementation:
1. **Install Storybook**:
```bash
npx storybook@latest init
```
2. **Create stories for core components**:
- Button variants
- Form inputs (PasswordInput, etc.)
- Modal components
- Loading states
- Icon showcase
3. **Add visual regression testing** with Chromatic or Percy
4. **Document design tokens** formally in Storybook
5. **Create component composition guidelines**

View File

@@ -2,17 +2,288 @@
**Date**: 2025-12-12
**Status**: Proposed
**Status**: Implemented
**Implemented**: 2026-01-09
## Context
The project is currently run using `pm2`, and the `README.md` contains manual setup instructions. While functional, this lacks the portability, scalability, and consistency of modern deployment practices.
The project is currently run using `pm2`, and the `README.md` contains manual setup instructions. While functional, this lacks the portability, scalability, and consistency of modern deployment practices. Local development environments also suffered from inconsistency issues.
## Decision
We will standardize the deployment process by containerizing the application using **Docker**. This will involve defining a `Dockerfile` for building a production-ready image and a `docker-compose.yml` file for orchestrating the application, database, and other services (like Redis) in a development environment.
We will standardize the deployment process using a hybrid approach:
1. **PM2 for Production**: Use PM2 cluster mode for process management, load balancing, and zero-downtime reloads.
2. **Docker/Podman for Development**: Provide a complete containerized development environment with automatic initialization.
3. **VS Code Dev Containers**: Enable one-click development environment setup.
4. **Gitea Actions for CI/CD**: Automated deployment pipelines handle builds and deployments.
## Consequences
- **Positive**: Ensures consistency between development and production environments. Simplifies the setup for new developers. Improves portability and scalability of the application.
- **Negative**: Requires learning Docker and containerization concepts. Adds `Dockerfile` and `docker-compose.yml` to the project's configuration.
- **Positive**: Ensures consistency between development and production environments. Simplifies the setup for new developers to a single "Reopen in Container" action. Improves portability and scalability of the application.
- **Negative**: Requires Docker/Podman installation. Container builds take time on first setup.
## Implementation Details
### Quick Start (Development)
```bash
# Prerequisites:
# - Docker Desktop or Podman installed
# - VS Code with "Dev Containers" extension
# Option 1: VS Code Dev Containers (Recommended)
# 1. Open project in VS Code
# 2. Click "Reopen in Container" when prompted
# 3. Wait for initialization to complete
# 4. Development server starts automatically
# Option 2: Manual Docker Compose
podman-compose -f compose.dev.yml up -d
podman exec -it flyer-crawler-dev bash
./scripts/docker-init.sh
npm run dev:container
```
### Container Services Architecture
```text
┌─────────────────────────────────────────────────────────────┐
│ Development Environment │
├─────────────────────────────────────────────────────────────┤
│ │
│ ┌─────────────┐ ┌─────────────┐ ┌─────────────┐ │
│ │ app │ │ postgres │ │ redis │ │
│ │ (Node.js) │───▶│ (PostGIS) │ │ (Cache) │ │
│ │ │───▶│ │ │ │ │
│ └─────────────┘ └─────────────┘ └─────────────┘ │
│ :3000/:3001 :5432 :6379 │
│ │
└─────────────────────────────────────────────────────────────┘
```
### compose.dev.yml Services
| Service | Image | Purpose | Healthcheck |
| ---------- | ----------------------- | ---------------------- | ---------------- |
| `app` | Custom (Dockerfile.dev) | Node.js application | HTTP /api/health |
| `postgres` | postgis/postgis:15-3.4 | Database with PostGIS | pg_isready |
| `redis` | redis:alpine | Caching and job queues | redis-cli ping |
### Automatic Initialization
The container initialization script (`scripts/docker-init.sh`) performs:
1. **npm install** - Installs dependencies into isolated volume
2. **Wait for PostgreSQL** - Polls until database is ready
3. **Wait for Redis** - Polls until Redis is responding
4. **Schema Check** - Detects if database needs initialization
5. **Database Setup** - Runs `npm run db:reset:dev` if needed (schema + seed data)
### Development Dockerfile
Located in `Dockerfile.dev`:
```dockerfile
FROM ubuntu:22.04
ENV DEBIAN_FRONTEND=noninteractive
# Install Node.js 20.x LTS + database clients
RUN apt-get update && apt-get install -y \
curl git build-essential python3 \
postgresql-client redis-tools \
&& rm -rf /var/lib/apt/lists/*
RUN curl -fsSL https://deb.nodesource.com/setup_20.x | bash - \
&& apt-get install -y nodejs
WORKDIR /app
ENV NODE_ENV=development
ENV NODE_OPTIONS='--max-old-space-size=8192'
CMD ["bash"]
```
### Environment Configuration
Copy `.env.example` to `.env` for local overrides (optional for containers):
```bash
# Container defaults (set in compose.dev.yml)
DB_HOST=postgres # Use Docker service name, not IP
DB_PORT=5432
DB_USER=postgres
DB_PASSWORD=postgres
DB_NAME=flyer_crawler_dev
REDIS_URL=redis://redis:6379
```
### VS Code Dev Container Configuration
Located in `.devcontainer/devcontainer.json`:
| Lifecycle Hook | Timing | Action |
| ------------------- | ----------------- | ------------------------------ |
| `initializeCommand` | Before container | Start Podman machine (Windows) |
| `postCreateCommand` | Container created | Run `docker-init.sh` |
| `postAttachCommand` | VS Code attached | Start dev server |
### Default Test Accounts
After initialization, these accounts are available:
| Role | Email | Password |
| ----- | ------------------- | --------- |
| Admin | `admin@example.com` | adminpass |
| User | `user@example.com` | userpass |
---
## Production Deployment (PM2)
### PM2 Ecosystem Configuration
Located in `ecosystem.config.cjs`:
```javascript
module.exports = {
apps: [
{
// API Server - Cluster mode for load balancing
name: 'flyer-crawler-api',
script: './node_modules/.bin/tsx',
args: 'server.ts',
max_memory_restart: '500M',
instances: 'max', // Use all CPU cores
exec_mode: 'cluster', // Enable cluster mode
kill_timeout: 5000, // Graceful shutdown timeout
// Restart configuration
max_restarts: 40,
exp_backoff_restart_delay: 100,
min_uptime: '10s',
env_production: {
NODE_ENV: 'production',
cwd: '/var/www/flyer-crawler.projectium.com',
},
env_test: {
NODE_ENV: 'test',
cwd: '/var/www/flyer-crawler-test.projectium.com',
},
},
{
// Background Worker - Single instance
name: 'flyer-crawler-worker',
script: './node_modules/.bin/tsx',
args: 'src/services/worker.ts',
max_memory_restart: '1G',
kill_timeout: 10000, // Workers need more time for jobs
// ... similar config
},
],
};
```
### Deployment Directory Structure
```text
/var/www/
├── flyer-crawler.projectium.com/ # Production
│ ├── server.ts
│ ├── ecosystem.config.cjs
│ ├── package.json
│ ├── flyer-images/
│ │ ├── icons/
│ │ └── archive/
│ └── ...
└── flyer-crawler-test.projectium.com/ # Test environment
└── ... (same structure)
```
### Environment-Specific Configuration
| Environment | Port | Redis DB | PM2 Process Suffix |
| ----------- | ---- | -------- | ------------------ |
| Production | 3000 | 0 | (none) |
| Test | 3001 | 1 | `-test` |
| Development | 3000 | 0 | `-dev` |
### PM2 Commands Reference
```bash
# Start/reload with environment
pm2 startOrReload ecosystem.config.cjs --env production --update-env
# Save process list for startup
pm2 save
# View logs
pm2 logs flyer-crawler-api --lines 50
# Monitor processes
pm2 monit
# List all processes
pm2 list
# Describe process details
pm2 describe flyer-crawler-api
```
### Resource Limits
| Process | Memory Limit | Restart Delay | Kill Timeout |
| ---------------- | ------------ | ------------------------ | ------------ |
| API Server | 500MB | Exponential (100ms base) | 5s |
| Worker | 1GB | Exponential (100ms base) | 10s |
| Analytics Worker | 1GB | Exponential (100ms base) | 10s |
---
## Troubleshooting
### Container Issues
```bash
# Reset everything and start fresh
podman-compose -f compose.dev.yml down -v
podman-compose -f compose.dev.yml up -d --build
# View container logs
podman-compose -f compose.dev.yml logs -f app
# Connect to database manually
podman exec -it flyer-crawler-postgres psql -U postgres -d flyer_crawler_dev
# Rebuild just the app container
podman-compose -f compose.dev.yml build app
```
### Common Issues
| Issue | Solution |
| ------------------------ | --------------------------------------------------------------- |
| "Database not ready" | Wait for postgres healthcheck, or run `docker-init.sh` manually |
| "node_modules not found" | Run `npm install` inside container |
| "Permission denied" | Ensure scripts have execute permission: `chmod +x scripts/*.sh` |
| "Network unreachable" | Use service names (postgres, redis) not IPs |
## Key Files
- `compose.dev.yml` - Docker Compose configuration
- `Dockerfile.dev` - Development container definition
- `.devcontainer/devcontainer.json` - VS Code Dev Container config
- `scripts/docker-init.sh` - Container initialization script
- `.env.example` - Environment variable template
- `ecosystem.config.cjs` - PM2 production configuration
- `.gitea/workflows/deploy-to-prod.yml` - Production deployment pipeline
- `.gitea/workflows/deploy-to-test.yml` - Test deployment pipeline
## Related ADRs
- [ADR-017](./0017-ci-cd-and-branching-strategy.md) - CI/CD Strategy
- [ADR-038](./0038-graceful-shutdown-pattern.md) - Graceful Shutdown Pattern

View File

@@ -2,7 +2,7 @@
**Date**: 2025-12-12
**Status**: Proposed
**Status**: Accepted
## Context
@@ -20,3 +20,197 @@ We will implement a multi-layered security approach for the API:
- **Positive**: Significantly improves the application's security posture against common web vulnerabilities like XSS, clickjacking, and brute-force attacks.
- **Negative**: Requires careful configuration of CORS and rate limits to avoid blocking legitimate traffic. Content-Security-Policy can be complex to configure correctly.
## Implementation Status
### What's Implemented
-**Helmet** - Security headers middleware with CSP, HSTS, and more
-**Rate Limiting** - Comprehensive implementation with 17+ specific limiters
-**Input Validation** - Zod-based request validation on all routes
-**File Upload Security** - MIME type validation, size limits, filename sanitization
-**Error Handling** - Production-safe error responses (no sensitive data leakage)
-**Request Timeout** - 5-minute timeout protection
-**Secure Cookies** - httpOnly and secure flags for authentication cookies
### Not Required
- **CORS** - Not needed (API and frontend are same-origin)
## Implementation Details
### Helmet Security Headers
Using **helmet v8.x** configured in `server.ts` as the first middleware after app initialization.
**Security Headers Applied**:
| Header | Configuration | Purpose |
| ------ | ------------- | ------- |
| Content-Security-Policy | Custom directives | Prevents XSS, code injection |
| Strict-Transport-Security | 1 year, includeSubDomains, preload | Forces HTTPS connections |
| X-Content-Type-Options | nosniff | Prevents MIME type sniffing |
| X-Frame-Options | DENY | Prevents clickjacking |
| X-XSS-Protection | 0 (disabled) | Deprecated, CSP preferred |
| Referrer-Policy | strict-origin-when-cross-origin | Controls referrer information |
| Cross-Origin-Resource-Policy | cross-origin | Allows external resource loading |
**Content Security Policy Directives**:
```typescript
contentSecurityPolicy: {
directives: {
defaultSrc: ["'self'"],
scriptSrc: ["'self'", "'unsafe-inline'"], // React inline scripts
styleSrc: ["'self'", "'unsafe-inline'"], // Tailwind inline styles
imgSrc: ["'self'", 'data:', 'blob:', 'https:'], // External images
fontSrc: ["'self'", 'https:', 'data:'],
connectSrc: ["'self'", 'https:', 'wss:'], // API + WebSocket
frameSrc: ["'none'"], // No iframes
objectSrc: ["'none'"], // No plugins
upgradeInsecureRequests: [], // Production only
},
}
```
**HSTS Configuration**:
- Max-age: 1 year (31536000 seconds)
- Includes subdomains
- Preload-ready for browser HSTS lists
### Rate Limiting
Using **express-rate-limit v8.2.1** with a centralized configuration in `src/config/rateLimiters.ts`.
**Standard Configuration**:
```typescript
const standardConfig = {
standardHeaders: true, // Sends RateLimit-* headers
legacyHeaders: false,
skip: shouldSkipRateLimit, // Disabled in test environment
};
```
**Rate Limiters by Category**:
| Category | Limiter | Window | Max Requests |
| -------- | ------- | ------ | ------------ |
| **Authentication** | loginLimiter | 15 min | 5 |
| | registerLimiter | 1 hour | 5 |
| | forgotPasswordLimiter | 15 min | 5 |
| | resetPasswordLimiter | 15 min | 10 |
| | refreshTokenLimiter | 15 min | 20 |
| | logoutLimiter | 15 min | 10 |
| **Public/User Read** | publicReadLimiter | 15 min | 100 |
| | userReadLimiter | 15 min | 100 |
| | userUpdateLimiter | 15 min | 100 |
| **Sensitive Operations** | userSensitiveUpdateLimiter | 1 hour | 5 |
| | adminTriggerLimiter | 15 min | 30 |
| **AI/Costly** | aiGenerationLimiter | 15 min | 20 |
| | geocodeLimiter | 1 hour | 100 |
| | priceHistoryLimiter | 15 min | 50 |
| **Uploads** | adminUploadLimiter | 15 min | 20 |
| | aiUploadLimiter | 15 min | 10 |
| | batchLimiter | 15 min | 50 |
| **Tracking** | trackingLimiter | 15 min | 200 |
| | reactionToggleLimiter | 15 min | 150 |
**Test Environment Handling**:
Rate limiting is automatically disabled in test environment via `shouldSkipRateLimit` utility (`src/utils/rateLimit.ts`). Tests can opt-in to rate limiting by setting the `x-test-rate-limit-enable: true` header.
### Input Validation
**Zod Schema Validation** (`src/middleware/validation.middleware.ts`):
- Type-safe parsing and coercion for params, query, and body
- Applied to all API routes via `validateRequest()` middleware
- Returns structured validation errors with field-level details
**Filename Sanitization** (`src/utils/stringUtils.ts`):
```typescript
// Removes dangerous characters from uploaded filenames
sanitizeFilename(filename: string): string
```
### File Upload Security
**Multer Configuration** (`src/middleware/multer.middleware.ts`):
- MIME type validation via `imageFileFilter` (only image/* allowed)
- File size limits (2MB for logos, configurable per upload type)
- Unique filenames using timestamps + random suffixes
- User-scoped storage paths
### Error Handling
**Production-Safe Responses** (`src/middleware/errorHandler.ts`):
- Production mode: Returns generic error message with tracking ID
- Development mode: Returns detailed error information
- Sensitive error details are logged but never exposed to clients
### Request Security
**Timeout Protection** (`server.ts`):
- 5-minute request timeout via `connect-timeout` middleware
- Prevents resource exhaustion from long-running requests
**Secure Cookies**:
```typescript
// Cookie configuration for auth tokens
{
httpOnly: true,
secure: process.env.NODE_ENV === 'production',
sameSite: 'strict',
maxAge: 7 * 24 * 60 * 60 * 1000 // 7 days for refresh token
}
```
### Request Logging
Per-request structured logging (ADR-004):
- Request ID tracking
- User ID and IP address logging
- Failed request details (4xx+) logged with headers and body
- Unhandled errors assigned unique error IDs
## Key Files
- `server.ts` - Helmet middleware configuration (security headers)
- `src/config/rateLimiters.ts` - Rate limiter definitions (17+ limiters)
- `src/utils/rateLimit.ts` - Rate limit skip logic for testing
- `src/middleware/validation.middleware.ts` - Zod-based request validation
- `src/middleware/errorHandler.ts` - Production-safe error handling
- `src/middleware/multer.middleware.ts` - Secure file upload configuration
- `src/utils/stringUtils.ts` - Filename sanitization
## Future Enhancements
1. **Configure CORS** (if needed for cross-origin access):
```bash
npm install cors @types/cors
```
Add to `server.ts`:
```typescript
import cors from 'cors';
app.use(cors({
origin: process.env.ALLOWED_ORIGINS?.split(',') || 'http://localhost:3000',
credentials: true,
}));
```
2. **Redis-backed rate limiting**: For distributed deployments, use `rate-limit-redis` store
3. **CSP Nonce**: Generate per-request nonces for stricter script-src policy
4. **Report-Only CSP**: Add `Content-Security-Policy-Report-Only` header for testing policy changes

View File

@@ -2,7 +2,9 @@
**Date**: 2025-12-12
**Status**: Proposed
**Status**: Accepted
**Implemented**: 2026-01-09
## Context
@@ -10,9 +12,186 @@ The project has Gitea workflows but lacks a documented standard for how code mov
## Decision
We will formalize the end-to-end CI/CD process. This ADR will define the project's **branching strategy** (e.g., GitFlow or Trunk-Based Development), establish mandatory checks in the pipeline (e.g., linting, unit tests, vulnerability scanning), and specify the process for building and publishing Docker images (`ADR-014`) to a registry.
We will formalize the end-to-end CI/CD process using:
1. **Trunk-Based Development**: All work is merged to `main` branch.
2. **Automated Test Deployment**: Every push to `main` triggers deployment to test environment.
3. **Manual Production Deployment**: Production deployments require explicit confirmation.
4. **Semantic Versioning**: Automated version bumping on deployments.
## Consequences
- **Positive**: Automates quality control and creates a safe, repeatable path to production. Increases development velocity and reduces deployment-related errors.
- **Negative**: Initial setup effort for the CI/CD pipeline. May slightly increase the time to merge code due to mandatory checks.
## Implementation Details
### Branching Strategy
**Trunk-Based Development**:
```text
main ─────●─────●─────●─────●─────●─────▶
│ │ │ │ │
│ │ │ │ └── Deploy to Prod (manual)
│ │ │ └── v0.9.70 (patch bump)
│ │ └── Deploy to Test (auto)
│ └── v0.9.69 (patch bump)
└── Feature complete
```
- All development happens on `main` branch
- Feature branches are short-lived (< 1 day)
- Every merge to `main` triggers test deployment
- Production deploys are manual with confirmation
### Pipeline Stages
**Deploy to Test** (Automatic on push to `main`):
```yaml
jobs:
deploy-to-test:
steps:
- Checkout code
- Setup Node.js 20
- Install dependencies (npm ci)
- Bump patch version (npm version patch)
- TypeScript type-check
- Prettier check
- ESLint check
- Run unit tests with coverage
- Run integration tests with coverage
- Run E2E tests with coverage
- Merge coverage reports
- Check database schema hash
- Build React application
- Deploy to test server (rsync)
- Install production dependencies
- Reload PM2 processes
- Update schema hash in database
```
**Deploy to Production** (Manual trigger):
```yaml
on:
workflow_dispatch:
inputs:
confirmation:
description: 'Type "deploy-to-prod" to confirm'
required: true
jobs:
deploy-production:
steps:
- Verify confirmation phrase
- Checkout main branch
- Install dependencies
- Bump minor version (npm version minor)
- Check production schema hash
- Build React application
- Deploy to production server
- Reload PM2 processes
- Update schema hash
```
### Version Bumping Strategy
| Trigger | Version Change | Example |
| -------------------------- | -------------- | --------------- |
| Push to main (test deploy) | Patch bump | 0.9.69 → 0.9.70 |
| Production deploy | Minor bump | 0.9.70 → 0.10.0 |
| Major release | Manual | 0.10.0 → 1.0.0 |
**Commit Message Format**:
```text
ci: Bump version to 0.9.70 [skip ci]
```
The `[skip ci]` tag prevents version bump commits from triggering another workflow.
### Database Schema Management
Schema changes are tracked via SHA-256 hash:
```sql
CREATE TABLE public.schema_info (
environment VARCHAR(50) PRIMARY KEY,
schema_hash VARCHAR(64) NOT NULL,
deployed_at TIMESTAMP DEFAULT NOW()
);
```
**Deployment Checks**:
1. Calculate hash of `sql/master_schema_rollup.sql`
2. Compare with hash in target database
3. If mismatch: **FAIL** deployment (manual migration required)
4. If match: Continue deployment
5. After deploy: Update hash in database
### Quality Gates
| Check | Required | Blocking |
| --------------------- | -------- | ---------------------- |
| TypeScript type-check | ✅ | No (continue-on-error) |
| Prettier formatting | ✅ | No |
| ESLint | ✅ | No |
| Unit tests | ✅ | No |
| Integration tests | ✅ | No |
| E2E tests | ✅ | No |
| Schema hash check | ✅ | **Yes** |
| Build | ✅ | **Yes** |
### Environment Variables
Secrets are injected from Gitea repository settings:
| Secret | Test | Production |
| -------------------------------------------------------------- | ------------------ | ------------- |
| `DB_DATABASE_TEST` / `DB_DATABASE_PROD` | flyer-crawler-test | flyer-crawler |
| `REDIS_PASSWORD_TEST` / `REDIS_PASSWORD_PROD` | \*\*\* | \*\*\* |
| `VITE_GOOGLE_GENAI_API_KEY_TEST` / `VITE_GOOGLE_GENAI_API_KEY` | \*\*\* | \*\*\* |
### Coverage Reporting
Coverage reports are generated and published:
```text
https://flyer-crawler-test.projectium.com/coverage/
```
Coverage merging combines:
- Unit test coverage (Vitest)
- Integration test coverage (Vitest)
- E2E test coverage (Vitest)
- Server V8 coverage (c8)
### Gitea Workflows
| Workflow | Trigger | Purpose |
| ----------------------------- | ------------ | ------------------------- |
| `deploy-to-test.yml` | Push to main | Automated test deployment |
| `deploy-to-prod.yml` | Manual | Production deployment |
| `manual-db-backup.yml` | Manual | Create database backup |
| `manual-db-restore.yml` | Manual | Restore from backup |
| `manual-db-reset-test.yml` | Manual | Reset test database |
| `manual-db-reset-prod.yml` | Manual | Reset production database |
| `manual-deploy-major.yml` | Manual | Major version release |
| `manual-redis-flush-prod.yml` | Manual | Flush Redis cache |
## Key Files
- `.gitea/workflows/deploy-to-test.yml` - Test deployment pipeline
- `.gitea/workflows/deploy-to-prod.yml` - Production deployment pipeline
- `.gitea/workflows/manual-db-backup.yml` - Database backup workflow
- `ecosystem.config.cjs` - PM2 configuration
## Related ADRs
- [ADR-014](./0014-containerization-and-deployment-strategy.md) - Containerization Strategy
- [ADR-010](./0010-testing-strategy-and-standards.md) - Testing Strategy
- [ADR-019](./0019-data-backup-and-recovery-strategy.md) - Backup Strategy

View File

@@ -2,7 +2,9 @@
**Date**: 2025-12-12
**Status**: Proposed
**Status**: Accepted
**Implemented**: 2026-01-09
## Context
@@ -16,3 +18,210 @@ We will implement a formal data backup and recovery strategy. This will involve
- **Positive**: Protects against catastrophic data loss, ensuring business continuity. Provides a clear, tested plan for disaster recovery.
- **Negative**: Requires setup and maintenance of backup scripts and secure storage. Incurs storage costs for backup files.
## Implementation Details
### Backup Workflow
Located in `.gitea/workflows/manual-db-backup.yml`:
```yaml
name: Manual - Backup Production Database
on:
workflow_dispatch:
inputs:
confirmation:
description: 'Type "backup-production-db" to confirm'
required: true
jobs:
backup-database:
runs-on: projectium.com
env:
DB_HOST: ${{ secrets.DB_HOST }}
DB_PORT: ${{ secrets.DB_PORT }}
DB_USER: ${{ secrets.DB_USER }}
DB_PASSWORD: ${{ secrets.DB_PASSWORD }}
DB_NAME: ${{ secrets.DB_NAME_PROD }}
steps:
- name: Validate Secrets
run: |
if [ -z "$DB_HOST" ] || [ -z "$DB_USER" ]; then
echo "ERROR: Database secrets not configured."
exit 1
fi
- name: Create Database Backup
run: |
TIMESTAMP=$(date +'%Y%m%d-%H%M%S')
BACKUP_FILENAME="flyer-crawler-prod-backup-${TIMESTAMP}.sql.gz"
# Create compressed backup
PGPASSWORD="$DB_PASSWORD" pg_dump \
-h "$DB_HOST" -p "$DB_PORT" \
-U "$DB_USER" -d "$DB_NAME" \
--clean --if-exists | gzip > "$BACKUP_FILENAME"
echo "backup_filename=$BACKUP_FILENAME" >> $GITEA_ENV
- name: Upload Backup as Artifact
uses: actions/upload-artifact@v3
with:
name: database-backup
path: ${{ env.backup_filename }}
```
### Restore Workflow
Located in `.gitea/workflows/manual-db-restore.yml`:
```yaml
name: Manual - Restore Database from Backup
on:
workflow_dispatch:
inputs:
confirmation:
description: 'Type "restore-from-backup" to confirm'
required: true
backup_file:
description: 'Path to backup file on server'
required: true
jobs:
restore-database:
steps:
- name: Verify Confirmation
run: |
if [ "${{ inputs.confirmation }}" != "restore-from-backup" ]; then
exit 1
fi
- name: Restore Database
run: |
# Decompress and restore
gunzip -c "${{ inputs.backup_file }}" | \
PGPASSWORD="$DB_PASSWORD" psql \
-h "$DB_HOST" -p "$DB_PORT" \
-U "$DB_USER" -d "$DB_NAME"
```
### Backup Command Reference
**Manual Backup**:
```bash
# Create compressed backup
PGPASSWORD="password" pg_dump \
-h localhost -p 5432 \
-U dbuser -d flyer-crawler \
--clean --if-exists | gzip > backup-$(date +%Y%m%d).sql.gz
# List backup contents (without restoring)
gunzip -c backup-20260109.sql.gz | head -100
```
**Manual Restore**:
```bash
# Restore from compressed backup
gunzip -c backup-20260109.sql.gz | \
PGPASSWORD="password" psql \
-h localhost -p 5432 \
-U dbuser -d flyer-crawler
```
### pg_dump Options
| Option | Purpose |
| ----------------- | ------------------------------ |
| `--clean` | Drop objects before recreating |
| `--if-exists` | Use IF EXISTS when dropping |
| `--no-owner` | Skip ownership commands |
| `--no-privileges` | Skip access privilege commands |
| `-F c` | Custom format (for pg_restore) |
| `-F p` | Plain text SQL (default) |
### Recovery Objectives
| Metric | Target | Current |
| ---------------------------------- | -------- | -------------- |
| **RPO** (Recovery Point Objective) | 24 hours | Manual trigger |
| **RTO** (Recovery Time Objective) | 1 hour | ~15 minutes |
### Backup Retention Policy
| Type | Retention | Storage |
| --------------- | --------- | ---------------- |
| Daily backups | 7 days | Gitea artifacts |
| Weekly backups | 4 weeks | Gitea artifacts |
| Monthly backups | 12 months | Off-site storage |
### Backup Verification
Periodically test backup integrity:
```bash
# Verify backup can be read
gunzip -t backup-20260109.sql.gz
# Test restore to a temporary database
createdb flyer-crawler-restore-test
gunzip -c backup-20260109.sql.gz | psql -d flyer-crawler-restore-test
# Verify data integrity...
dropdb flyer-crawler-restore-test
```
### Disaster Recovery Checklist
1. **Identify the Issue**
- Data corruption?
- Accidental deletion?
- Full database loss?
2. **Select Backup**
- Find most recent valid backup
- Download from Gitea artifacts or off-site storage
3. **Stop Application**
```bash
pm2 stop all
```
4. **Restore Database**
```bash
gunzip -c backup.sql.gz | psql -d flyer-crawler
```
5. **Verify Data**
- Check table row counts
- Verify recent data exists
- Test critical queries
6. **Restart Application**
```bash
pm2 start all
```
7. **Post-Mortem**
- Document incident
- Update procedures if needed
## Key Files
- `.gitea/workflows/manual-db-backup.yml` - Backup workflow
- `.gitea/workflows/manual-db-restore.yml` - Restore workflow
- `.gitea/workflows/manual-db-reset-test.yml` - Reset test database
- `.gitea/workflows/manual-db-reset-prod.yml` - Reset production database
- `sql/master_schema_rollup.sql` - Current schema definition
## Related ADRs
- [ADR-013](./0013-database-schema-migration-strategy.md) - Schema Migration Strategy
- [ADR-017](./0017-ci-cd-and-branching-strategy.md) - CI/CD Strategy

View File

@@ -2,7 +2,9 @@
**Date**: 2025-12-12
**Status**: Proposed
**Status**: Accepted
**Implemented**: 2026-01-09
## Context
@@ -20,3 +22,195 @@ We will implement dedicated health check endpoints in the Express application.
- **Positive**: Enables robust, automated application lifecycle management in a containerized environment. Prevents traffic from being sent to unhealthy or uninitialized application instances.
- **Negative**: Adds a small amount of code for the health check endpoints. Requires configuration in the container orchestration layer.
## Implementation Status
### What's Implemented
-**Liveness Probe** (`/api/health/live`) - Simple process health check
-**Readiness Probe** (`/api/health/ready`) - Comprehensive dependency health check
-**Startup Probe** (`/api/health/startup`) - Initial startup verification
-**Individual Service Checks** - Database, Redis, Storage endpoints
-**Detailed Health Response** - Service latency, status, and details
## Implementation Details
### Probe Endpoints
| Endpoint | Purpose | Checks | HTTP Status |
| --------------------- | --------------- | ------------------ | ----------------------------- |
| `/api/health/live` | Liveness probe | Process running | 200 = alive |
| `/api/health/ready` | Readiness probe | DB, Redis, Storage | 200 = ready, 503 = not ready |
| `/api/health/startup` | Startup probe | Database only | 200 = started, 503 = starting |
### Liveness Probe
The liveness probe is intentionally simple with no external dependencies:
```typescript
// GET /api/health/live
{
"status": "ok",
"timestamp": "2026-01-09T12:00:00.000Z"
}
```
**Usage**: If this endpoint fails to respond, the container should be restarted.
### Readiness Probe
The readiness probe checks all critical dependencies:
```typescript
// GET /api/health/ready
{
"status": "healthy", // healthy | degraded | unhealthy
"timestamp": "2026-01-09T12:00:00.000Z",
"uptime": 3600.5,
"services": {
"database": {
"status": "healthy",
"latency": 5,
"details": {
"totalConnections": 10,
"idleConnections": 8,
"waitingConnections": 0
}
},
"redis": {
"status": "healthy",
"latency": 2
},
"storage": {
"status": "healthy",
"latency": 1,
"details": {
"path": "/var/www/.../flyer-images"
}
}
}
}
```
**Status Logic**:
- `healthy` - All critical services (database, Redis) are healthy
- `degraded` - Some non-critical issues (high connection wait, storage issues)
- `unhealthy` - Critical service unavailable (returns 503)
### Startup Probe
The startup probe is used during container initialization:
```typescript
// GET /api/health/startup
// Success (200):
{
"status": "started",
"timestamp": "2026-01-09T12:00:00.000Z",
"database": { "status": "healthy", "latency": 5 }
}
// Still starting (503):
{
"status": "starting",
"message": "Waiting for database connection",
"database": { "status": "unhealthy", "message": "..." }
}
```
### Individual Service Endpoints
For detailed diagnostics:
| Endpoint | Purpose |
| ----------------------- | ------------------------------- |
| `/api/health/ping` | Simple server responsiveness |
| `/api/health/db-schema` | Verify database tables exist |
| `/api/health/db-pool` | Database connection pool status |
| `/api/health/redis` | Redis connectivity |
| `/api/health/storage` | File storage accessibility |
| `/api/health/time` | Server time synchronization |
## Kubernetes Configuration Example
```yaml
apiVersion: v1
kind: Pod
spec:
containers:
- name: flyer-crawler
livenessProbe:
httpGet:
path: /api/health/live
port: 3001
initialDelaySeconds: 10
periodSeconds: 15
failureThreshold: 3
readinessProbe:
httpGet:
path: /api/health/ready
port: 3001
initialDelaySeconds: 5
periodSeconds: 10
failureThreshold: 3
startupProbe:
httpGet:
path: /api/health/startup
port: 3001
initialDelaySeconds: 0
periodSeconds: 5
failureThreshold: 30 # Allow up to 150 seconds for startup
```
## Docker Compose Configuration Example
```yaml
services:
api:
image: flyer-crawler:latest
healthcheck:
test: ['CMD', 'curl', '-f', 'http://localhost:3001/api/health/ready']
interval: 30s
timeout: 10s
retries: 3
start_period: 40s
```
## PM2 Configuration Example
For non-containerized deployments using PM2:
```javascript
// ecosystem.config.js
module.exports = {
apps: [
{
name: 'flyer-crawler',
script: 'dist/server.js',
// PM2 will check this endpoint
// and restart if it fails
health_check: {
url: 'http://localhost:3001/api/health/ready',
interval: 30000,
timeout: 10000,
},
},
],
};
```
## Key Files
- `src/routes/health.routes.ts` - Health check endpoint implementations
- `server.ts` - Health routes mounted at `/api/health`
## Service Health Thresholds
| Service | Healthy | Degraded | Unhealthy |
| -------- | ---------------------- | ----------------------- | ------------------- |
| Database | Responds to `SELECT 1` | > 3 waiting connections | Connection fails |
| Redis | `PING` returns `PONG` | N/A | Connection fails |
| Storage | Write access to path | N/A | Path not accessible |

View File

@@ -2,7 +2,9 @@
**Date**: 2025-12-12
**Status**: Proposed
**Status**: Accepted
**Implemented**: 2026-01-09
## Context
@@ -10,10 +12,203 @@ The project contains both frontend (React) and backend (Node.js) code. While lin
## Decision
We will mandate the use of **Prettier** for automated code formatting and a unified **ESLint** configuration for code quality rules across both frontend and backend. This will be enforced automatically using a pre-commit hook managed by a tool like **Husky**.
We will mandate the use of **Prettier** for automated code formatting and a unified **ESLint** configuration for code quality rules across both frontend and backend. This will be enforced automatically using a pre-commit hook managed by **Husky** and **lint-staged**.
## Consequences
**Positive**: Improves developer experience and team velocity by automating code consistency. Reduces time spent on stylistic code review comments. Enhances code readability and maintainability.
**Negative**: Requires an initial setup and configuration of Prettier, ESLint, and Husky. May require a one-time reformatting of the entire codebase.
## Implementation Status
### What's Implemented
-**Prettier Configuration** - `.prettierrc` with consistent settings
-**Prettier Ignore** - `.prettierignore` to exclude generated files
-**ESLint Configuration** - `eslint.config.js` with TypeScript and React support
-**ESLint + Prettier Integration** - `eslint-config-prettier` to avoid conflicts
-**Husky Pre-commit Hooks** - Automatic enforcement on commit
-**lint-staged** - Run linters only on staged files for performance
## Implementation Details
### Prettier Configuration
The project uses a consistent Prettier configuration in `.prettierrc`:
```json
{
"semi": true,
"trailingComma": "all",
"singleQuote": true,
"printWidth": 100,
"tabWidth": 2,
"useTabs": false,
"endOfLine": "auto"
}
```
### ESLint Configuration
ESLint is configured with:
- TypeScript support via `typescript-eslint`
- React hooks rules via `eslint-plugin-react-hooks`
- React Refresh support for HMR
- Prettier compatibility via `eslint-config-prettier`
- **Relaxed rules for test files** (see below)
```javascript
// eslint.config.js (ESLint v9 flat config)
import globals from 'globals';
import tseslint from 'typescript-eslint';
import pluginReact from 'eslint-plugin-react';
import pluginReactHooks from 'eslint-plugin-react-hooks';
import pluginReactRefresh from 'eslint-plugin-react-refresh';
import eslintConfigPrettier from 'eslint-config-prettier';
export default tseslint.config(
// ... configurations
eslintConfigPrettier, // Must be last to override formatting rules
);
```
### Relaxed Linting Rules for Test Files
**Decision Date**: 2026-01-09
**Status**: Active (revisit when product nears final release)
The following ESLint rules are relaxed for test files (`*.test.ts`, `*.test.tsx`, `*.spec.ts`, `*.spec.tsx`):
| Rule | Setting | Rationale |
| ------------------------------------ | ------- | ---------------------------------------------------------------------------------------------------------- |
| `@typescript-eslint/no-explicit-any` | `off` | Mocking complexity often requires `any`; strict typing in tests adds friction without proportional benefit |
**Rationale**:
1. **Tests are not production code** - The primary goal of tests is verifying behavior, not type safety of the test code itself
2. **Mocking complexity** - Mocking libraries often require type gymnastics; `any` simplifies creating partial mocks and test doubles
3. **Testing edge cases** - Sometimes tests intentionally pass invalid types to verify error handling
4. **Development velocity** - Strict typing in tests slows down test writing without proportional benefit during active development
**Future Consideration**: This decision should be revisited when the product is nearing its final stages. At that point, stricter linting in tests may be warranted to ensure long-term maintainability.
```javascript
// eslint.config.js - Test file overrides
{
files: ['**/*.test.ts', '**/*.test.tsx', '**/*.spec.ts', '**/*.spec.tsx'],
rules: {
'@typescript-eslint/no-explicit-any': 'off',
},
}
```
### Pre-commit Hook
The pre-commit hook runs lint-staged automatically:
```bash
# .husky/pre-commit
npx lint-staged
```
### lint-staged Configuration
lint-staged runs appropriate tools based on file type:
```json
{
"*.{js,jsx,ts,tsx}": ["eslint --fix", "prettier --write"],
"*.{json,md,css,html,yml,yaml}": ["prettier --write"]
}
```
### NPM Scripts
| Script | Description |
| ------------------ | ---------------------------------------------- |
| `npm run format` | Format all files with Prettier |
| `npm run lint` | Run ESLint on all TypeScript/JavaScript files |
| `npm run validate` | Run Prettier check + TypeScript check + ESLint |
## Key Files
| File | Purpose |
| -------------------- | -------------------------------- |
| `.prettierrc` | Prettier configuration |
| `.prettierignore` | Files to exclude from formatting |
| `eslint.config.js` | ESLint flat configuration (v9) |
| `.husky/pre-commit` | Pre-commit hook script |
| `.lintstagedrc.json` | lint-staged configuration |
## Developer Workflow
### Automatic Formatting on Commit
When you commit changes:
1. Husky intercepts the commit
2. lint-staged identifies staged files
3. ESLint fixes auto-fixable issues
4. Prettier formats the code
5. Changes are automatically staged
6. Commit proceeds if no errors
### Manual Formatting
```bash
# Format entire codebase
npm run format
# Check formatting without changes
npx prettier --check .
# Run ESLint
npm run lint
# Run all validation checks
npm run validate
```
### IDE Integration
For the best experience, configure your IDE:
**VS Code** - Install extensions:
- Prettier - Code formatter
- ESLint
Add to `.vscode/settings.json`:
```json
{
"editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.formatOnSave": true,
"editor.codeActionsOnSave": {
"source.fixAll.eslint": "explicit"
}
}
```
## Troubleshooting
### "eslint --fix failed"
ESLint may fail on unfixable errors. Review the output and manually fix the issues.
### "prettier --write failed"
Check for syntax errors in the file that prevent parsing.
### Bypassing Hooks (Emergency)
In rare cases, you may need to bypass hooks:
```bash
git commit --no-verify -m "emergency fix"
```
Use sparingly - the CI pipeline will still catch formatting issues.

View File

@@ -2,7 +2,7 @@
**Date**: 2025-12-14
**Status**: Proposed
**Status**: Adopted
## Context

View File

@@ -0,0 +1,177 @@
# ADR-028: API Response Standardization and Envelope Pattern
**Date**: 2026-01-09
**Status**: Implemented
## Context
The API currently has inconsistent response formats across different endpoints:
1. Some endpoints return raw data arrays (`[{...}, {...}]`)
2. Some return wrapped objects (`{ data: [...] }`)
3. Pagination is handled inconsistently (some use `page`/`limit`, others use `offset`/`count`)
4. Error responses vary in structure between middleware and route handlers
5. No standard for including metadata (pagination info, request timing, etc.)
This inconsistency creates friction for:
- Frontend developers who must handle multiple response formats
- API documentation and client SDK generation
- Implementing consistent error handling across the application
- Future API versioning transitions
## Decision
We will adopt a standardized response envelope pattern for all API responses.
### Success Response Format
```typescript
interface ApiSuccessResponse<T> {
success: true;
data: T;
meta?: {
// Pagination (when applicable)
pagination?: {
page: number;
limit: number;
total: number;
totalPages: number;
hasNextPage: boolean;
hasPrevPage: boolean;
};
// Timing
requestId?: string;
timestamp?: string;
duration?: number;
};
}
```
### Error Response Format
```typescript
interface ApiErrorResponse {
success: false;
error: {
code: string; // Machine-readable error code (e.g., 'VALIDATION_ERROR')
message: string; // Human-readable message
details?: unknown; // Additional context (validation errors, etc.)
};
meta?: {
requestId?: string;
timestamp?: string;
};
}
```
### Implementation Approach
1. **Response Helper Functions**: Create utility functions in `src/utils/apiResponse.ts`:
- `sendSuccess(res, data, meta?)`
- `sendPaginated(res, data, pagination)`
- `sendError(res, code, message, details?, statusCode?)`
2. **Error Handler Integration**: Update `errorHandler.ts` to use the standard error format
3. **Gradual Migration**: Apply to new endpoints immediately, migrate existing endpoints incrementally
4. **TypeScript Types**: Export response types for frontend consumption
## Consequences
### Positive
- **Consistency**: All responses follow a predictable structure
- **Type Safety**: Frontend can rely on consistent types
- **Debugging**: Request IDs and timestamps aid in issue investigation
- **Pagination**: Standardized pagination metadata reduces frontend complexity
- **API Evolution**: Envelope pattern makes it easier to add fields without breaking changes
### Negative
- **Verbosity**: Responses are slightly larger due to envelope overhead
- **Migration Effort**: Existing endpoints need updating
- **Learning Curve**: Developers must learn and use the helper functions
## Implementation Status
### What's Implemented
- ✅ Created `src/utils/apiResponse.ts` with helper functions (`sendSuccess`, `sendPaginated`, `sendError`, `sendNoContent`, `sendMessage`, `calculatePagination`)
- ✅ Created `src/types/api.ts` with response type definitions (`ApiSuccessResponse`, `ApiErrorResponse`, `PaginationMeta`, `ErrorCode`)
- ✅ Updated `src/middleware/errorHandler.ts` to use standard error format
- ✅ Migrated all route files to use standardized responses:
- `health.routes.ts`
- `flyer.routes.ts`
- `deals.routes.ts`
- `budget.routes.ts`
- `personalization.routes.ts`
- `price.routes.ts`
- `reactions.routes.ts`
- `stats.routes.ts`
- `system.routes.ts`
- `gamification.routes.ts`
- `recipe.routes.ts`
- `auth.routes.ts`
- `user.routes.ts`
- `admin.routes.ts`
- `ai.routes.ts`
### Error Codes
The following error codes are defined in `src/types/api.ts`:
| Code | HTTP Status | Description |
| ------------------------ | ----------- | ----------------------------------- |
| `VALIDATION_ERROR` | 400 | Request validation failed |
| `BAD_REQUEST` | 400 | Malformed request |
| `UNAUTHORIZED` | 401 | Authentication required |
| `FORBIDDEN` | 403 | Insufficient permissions |
| `NOT_FOUND` | 404 | Resource not found |
| `CONFLICT` | 409 | Resource conflict (e.g., duplicate) |
| `RATE_LIMITED` | 429 | Too many requests |
| `PAYLOAD_TOO_LARGE` | 413 | Request body too large |
| `INTERNAL_ERROR` | 500 | Server error |
| `NOT_IMPLEMENTED` | 501 | Feature not yet implemented |
| `SERVICE_UNAVAILABLE` | 503 | Service temporarily unavailable |
| `EXTERNAL_SERVICE_ERROR` | 502 | External service failure |
## Example Usage
```typescript
// In a route handler
router.get('/flyers', async (req, res, next) => {
try {
const { page = 1, limit = 20 } = req.query;
const { flyers, total } = await flyerService.getFlyers({ page, limit });
return sendPaginated(res, flyers, {
page,
limit,
total,
});
} catch (error) {
next(error);
}
});
// Response:
// {
// "success": true,
// "data": [...],
// "meta": {
// "pagination": {
// "page": 1,
// "limit": 20,
// "total": 150,
// "totalPages": 8,
// "hasNextPage": true,
// "hasPrevPage": false
// },
// "requestId": "abc-123",
// "timestamp": "2026-01-09T12:00:00.000Z"
// }
// }
```

View File

@@ -0,0 +1,147 @@
# ADR-029: Secret Rotation and Key Management Strategy
**Date**: 2026-01-09
**Status**: Proposed
## Context
While ADR-007 covers configuration validation at startup, it does not address the lifecycle management of secrets:
1. **JWT Secrets**: If the JWT_SECRET is rotated, all existing user sessions are immediately invalidated
2. **Database Credentials**: No documented procedure for rotating database passwords without downtime
3. **API Keys**: External service API keys (AI services, geocoding) have no rotation strategy
4. **Emergency Revocation**: No process for immediately invalidating compromised credentials
Current risks:
- Long-lived secrets that never change become high-value targets
- No ability to rotate secrets without application restart
- No audit trail of when secrets were last rotated
- Compromised keys could remain active indefinitely
## Decision
We will implement a comprehensive secret rotation and key management strategy.
### 1. JWT Secret Rotation with Dual-Key Support
Support multiple JWT secrets simultaneously to enable zero-downtime rotation:
```typescript
// Environment variables
JWT_SECRET = current_secret;
JWT_SECRET_PREVIOUS = old_secret; // Optional, for transition period
// Token verification tries current first, falls back to previous
const verifyToken = (token: string) => {
try {
return jwt.verify(token, process.env.JWT_SECRET);
} catch {
if (process.env.JWT_SECRET_PREVIOUS) {
return jwt.verify(token, process.env.JWT_SECRET_PREVIOUS);
}
throw new AuthenticationError('Invalid token');
}
};
```
### 2. Database Credential Rotation
Document and implement a procedure for PostgreSQL credential rotation:
1. Create new database user with identical permissions
2. Update application configuration to use new credentials
3. Restart application instances (rolling restart)
4. Remove old database user after all instances updated
5. Log rotation event for audit purposes
### 3. API Key Management
For external service API keys (Google AI, geocoding services):
1. **Naming Convention**: `{SERVICE}_API_KEY` and `{SERVICE}_API_KEY_PREVIOUS`
2. **Fallback Logic**: Try primary key, fall back to previous on 401/403
3. **Health Checks**: Validate API keys on startup
4. **Usage Logging**: Track which key is being used for each request
### 4. Emergency Revocation Procedures
Document emergency procedures for:
- **JWT Compromise**: Set new JWT_SECRET, clear all refresh tokens from database
- **Database Compromise**: Rotate credentials immediately, audit access logs
- **API Key Compromise**: Regenerate at provider, update environment, restart
### 5. Secret Audit Trail
Track secret lifecycle events:
- When secrets were last rotated
- Who initiated the rotation
- Which instances are using which secrets
## Implementation Approach
### Phase 1: Dual JWT Secret Support
- Modify token verification to support fallback secret
- Add JWT_SECRET_PREVIOUS to configuration schema
- Update documentation
### Phase 2: Rotation Scripts
- Create `scripts/rotate-jwt-secret.sh`
- Create `scripts/rotate-db-credentials.sh`
- Add rotation instructions to operations runbook
### Phase 3: API Key Fallback
- Wrap external API clients with fallback logic
- Add key validation to health checks
- Implement key usage logging
## Consequences
### Positive
- **Zero-Downtime Rotation**: Secrets can be rotated without invalidating all sessions
- **Reduced Risk**: Regular rotation limits exposure window for compromised credentials
- **Audit Trail**: Clear record of when secrets were changed
- **Emergency Response**: Documented procedures for security incidents
### Negative
- **Complexity**: Dual-key logic adds code complexity
- **Operations Overhead**: Regular rotation requires operational discipline
- **Testing**: Rotation procedures need to be tested periodically
## Implementation Status
### What's Implemented
- ❌ Not yet implemented
### What Needs To Be Done
1. Implement dual JWT secret verification
2. Create rotation scripts
3. Document emergency procedures
4. Add secret validation to health checks
5. Create rotation schedule recommendations
## Key Files (To Be Created)
- `src/utils/secretManager.ts` - Secret rotation utilities
- `scripts/rotate-jwt-secret.sh` - JWT rotation script
- `scripts/rotate-db-credentials.sh` - Database credential rotation
- `docs/operations/secret-rotation.md` - Operations runbook
## Rotation Schedule Recommendations
| Secret Type | Rotation Frequency | Grace Period |
| ------------------ | -------------------------- | ----------------- |
| JWT_SECRET | 90 days | 7 days (dual-key) |
| Database Passwords | 180 days | Rolling restart |
| AI API Keys | On suspicion of compromise | Immediate |
| Refresh Tokens | 7-day max age | N/A (per-token) |

View File

@@ -0,0 +1,150 @@
# ADR-030: Graceful Degradation and Circuit Breaker Pattern
**Date**: 2026-01-09
**Status**: Proposed
## Context
The application depends on several external services:
1. **AI Services** (Google Gemini) - For flyer item extraction
2. **Redis** - For caching, rate limiting, and job queues
3. **PostgreSQL** - Primary data store
4. **Geocoding APIs** - For location services
Currently, when these services fail:
- AI failures may cause the entire upload to fail
- Redis unavailability could crash the application or bypass rate limiting
- No circuit breakers prevent repeated calls to failing services
- No fallback behaviors are defined
This creates fragility where a single service outage can cascade into application-wide failures.
## Decision
We will implement a graceful degradation strategy with circuit breakers for external service dependencies.
### 1. Circuit Breaker Pattern
Implement circuit breakers for external service calls using a library like `opossum`:
```typescript
import CircuitBreaker from 'opossum';
const aiCircuitBreaker = new CircuitBreaker(callAiService, {
timeout: 30000, // 30 second timeout
errorThresholdPercentage: 50, // Open circuit at 50% failures
resetTimeout: 30000, // Try again after 30 seconds
volumeThreshold: 5, // Minimum calls before calculating error %
});
aiCircuitBreaker.on('open', () => {
logger.warn('AI service circuit breaker opened');
});
aiCircuitBreaker.on('halfOpen', () => {
logger.info('AI service circuit breaker half-open, testing...');
});
```
### 2. Fallback Behaviors by Service
| Service | Fallback Behavior |
| ---------------------- | ---------------------------------------- |
| **Redis (Cache)** | Skip cache, query database directly |
| **Redis (Rate Limit)** | Log warning, allow request (fail-open) |
| **Redis (Queues)** | Queue to memory, process synchronously |
| **AI Service** | Return partial results, queue for retry |
| **Geocoding** | Return null location, allow manual entry |
| **PostgreSQL** | No fallback - critical dependency |
### 3. Health Status Aggregation
Extend health checks (ADR-020) to report service-level health:
```typescript
// GET /api/health/ready response
{
"status": "degraded", // healthy | degraded | unhealthy
"services": {
"database": { "status": "healthy", "latency": 5 },
"redis": { "status": "healthy", "latency": 2 },
"ai": { "status": "degraded", "circuitState": "half-open" },
"geocoding": { "status": "healthy", "latency": 150 }
}
}
```
### 4. Retry Strategies
Define retry policies for transient failures:
```typescript
const retryConfig = {
ai: { maxRetries: 3, backoff: 'exponential', initialDelay: 1000 },
geocoding: { maxRetries: 2, backoff: 'linear', initialDelay: 500 },
database: { maxRetries: 3, backoff: 'exponential', initialDelay: 100 },
};
```
## Implementation Approach
### Phase 1: Redis Fallbacks
- Wrap cache operations with try-catch (already partially done in cacheService)
- Add fail-open for rate limiting when Redis is down
- Log degraded state
### Phase 2: AI Circuit Breaker
- Wrap AI service calls with circuit breaker
- Implement queue-for-retry on circuit open
- Add manual fallback UI for failed extractions
### Phase 3: Health Aggregation
- Update health endpoints with service status
- Add Prometheus-compatible metrics
- Create dashboard for service health
## Consequences
### Positive
- **Resilience**: Application continues functioning during partial outages
- **User Experience**: Degraded but functional is better than complete failure
- **Observability**: Clear visibility into service health
- **Protection**: Circuit breakers prevent cascading failures
### Negative
- **Complexity**: Additional code for fallback logic
- **Testing**: Requires testing failure scenarios
- **Consistency**: Some operations may have different results during degradation
## Implementation Status
### What's Implemented
- ✅ Cache operations fail gracefully (cacheService.server.ts)
- ❌ Circuit breakers for AI services
- ❌ Rate limit fail-open behavior
- ❌ Health aggregation endpoint
- ❌ Retry strategies with backoff
### What Needs To Be Done
1. Install and configure `opossum` circuit breaker library
2. Wrap AI service calls with circuit breaker
3. Add fail-open to rate limiting
4. Extend health endpoints with service status
5. Document degraded mode behaviors
## Key Files
- `src/utils/circuitBreaker.ts` - Circuit breaker configurations (to create)
- `src/services/cacheService.server.ts` - Already has graceful fallbacks
- `src/routes/health.routes.ts` - Health check endpoints (to extend)
- `src/services/aiService.server.ts` - AI service wrapper (to wrap)

View File

@@ -0,0 +1,199 @@
# ADR-031: Data Retention and Privacy Compliance (GDPR/CCPA)
**Date**: 2026-01-09
**Status**: Proposed
## Context
The application stores various types of user data:
1. **User Accounts**: Email, password hash, profile information
2. **Shopping Lists**: Personal shopping preferences and history
3. **Watch Lists**: Tracked items and price alerts
4. **Activity Logs**: User actions for analytics and debugging
5. **Tracking Data**: Page views, interactions, feature usage
Current gaps in privacy compliance:
- **No Data Retention Policies**: Activity logs accumulate indefinitely
- **No User Data Export**: Users cannot export their data (GDPR Article 20)
- **No User Data Deletion**: No self-service account deletion (GDPR Article 17)
- **No Cookie Consent**: Cookie usage not disclosed or consented
- **No Privacy Policy Enforcement**: Privacy commitments not enforced in code
These gaps create legal exposure for users in EU (GDPR) and California (CCPA).
## Decision
We will implement comprehensive data retention and privacy compliance features.
### 1. Data Retention Policies
| Data Type | Retention Period | Deletion Method |
| ------------------------- | ------------------------ | ------------------------ |
| **Activity Logs** | 90 days | Automated cleanup job |
| **Tracking Events** | 30 days | Automated cleanup job |
| **Deleted User Data** | 30 days (soft delete) | Hard delete after period |
| **Expired Sessions** | 7 days after expiry | Token cleanup job |
| **Failed Login Attempts** | 24 hours | Automated cleanup |
| **Flyer Data** | Indefinite (public data) | N/A |
| **User Shopping Lists** | Until account deletion | With account |
| **User Watch Lists** | Until account deletion | With account |
### 2. User Data Export (Right to Portability)
Implement `GET /api/users/me/export` endpoint:
```typescript
interface UserDataExport {
exportDate: string;
user: {
email: string;
created_at: string;
profile: ProfileData;
};
shoppingLists: ShoppingList[];
watchedItems: WatchedItem[];
priceAlerts: PriceAlert[];
achievements: Achievement[];
// Exclude: password hash, internal IDs, admin flags
}
```
Export formats: JSON (primary), CSV (optional)
### 3. User Data Deletion (Right to Erasure)
Implement `DELETE /api/users/me` endpoint:
1. **Soft Delete**: Mark account as deleted, anonymize PII
2. **Grace Period**: 30 days to restore account
3. **Hard Delete**: Permanently remove all user data after grace period
4. **Audit Log**: Record deletion request (anonymized)
Deletion cascade:
- User account → Anonymize email/name
- Shopping lists → Delete
- Watch lists → Delete
- Achievements → Delete
- Activity logs → Anonymize user_id
- Sessions/tokens → Delete immediately
### 4. Cookie Consent
Implement cookie consent banner:
```typescript
// Cookie categories
enum CookieCategory {
ESSENTIAL = 'essential', // Always allowed (auth, CSRF)
FUNCTIONAL = 'functional', // Dark mode, preferences
ANALYTICS = 'analytics', // Usage tracking
}
// Store consent in localStorage and server-side
interface CookieConsent {
essential: true; // Cannot be disabled
functional: boolean;
analytics: boolean;
consentDate: string;
consentVersion: string;
}
```
### 5. Privacy Policy Enforcement
Enforce privacy commitments in code:
- Email addresses never logged in plaintext
- Passwords never logged (already in pino redact config)
- IP addresses anonymized after 7 days
- Third-party data sharing requires explicit consent
## Implementation Approach
### Phase 1: Data Retention Jobs
- Create retention cleanup job in background job service
- Add activity_log retention (90 days)
- Add tracking_events retention (30 days)
### Phase 2: User Data Export
- Create export endpoint
- Implement data aggregation query
- Add rate limiting (1 export per 24h)
### Phase 3: Account Deletion
- Implement soft delete with anonymization
- Create hard delete cleanup job
- Add account recovery endpoint
### Phase 4: Cookie Consent
- Create consent banner component
- Store consent preferences
- Gate analytics based on consent
## Consequences
### Positive
- **Legal Compliance**: Meets GDPR and CCPA requirements
- **User Trust**: Demonstrates commitment to privacy
- **Data Hygiene**: Automatic cleanup prevents data bloat
- **Reduced Liability**: Less data = less risk
### Negative
- **Implementation Effort**: Significant feature development
- **Operational Complexity**: Deletion jobs need monitoring
- **Feature Limitations**: Some features may be limited without consent
## Implementation Status
### What's Implemented
- ✅ Token cleanup job exists (tokenCleanupQueue)
- ❌ Activity log retention
- ❌ User data export endpoint
- ❌ Account deletion endpoint
- ❌ Cookie consent banner
- ❌ Data anonymization functions
### What Needs To Be Done
1. Add activity_log cleanup to background jobs
2. Create `/api/users/me/export` endpoint
3. Create `/api/users/me` DELETE endpoint with soft delete
4. Implement cookie consent UI component
5. Document data retention in privacy policy
6. Add anonymization utility functions
## Key Files (To Be Created/Modified)
- `src/services/backgroundJobService.ts` - Add retention jobs
- `src/routes/user.routes.ts` - Add export/delete endpoints
- `src/services/privacyService.server.ts` - Data export/deletion logic
- `src/components/CookieConsent.tsx` - Consent banner
- `src/utils/anonymize.ts` - Data anonymization utilities
## Compliance Checklist
### GDPR Requirements
- [ ] Article 15: Right of Access (data export)
- [ ] Article 17: Right to Erasure (account deletion)
- [ ] Article 20: Right to Data Portability (JSON export)
- [ ] Article 7: Conditions for Consent (cookie consent)
- [ ] Article 13: Information to be Provided (privacy policy)
### CCPA Requirements
- [ ] Right to Know (data export)
- [ ] Right to Delete (account deletion)
- [ ] Right to Opt-Out (cookie consent for analytics)
- [ ] Non-Discrimination (no feature penalty for privacy choices)

View File

@@ -0,0 +1,147 @@
# ADR-032: Rate Limiting Strategy
**Date**: 2026-01-09
**Status**: Accepted
**Implemented**: 2026-01-09
## Context
Public-facing APIs are vulnerable to abuse through excessive requests, whether from malicious actors attempting denial-of-service attacks, automated scrapers, or accidental loops in client code. Without proper rate limiting, the application could:
1. **Experience degraded performance**: Excessive requests can overwhelm database connections and server resources
2. **Incur unexpected costs**: AI service calls (Gemini API) and external APIs (Google Maps) are billed per request
3. **Allow credential stuffing**: Login endpoints without limits enable brute-force attacks
4. **Suffer from data scraping**: Public endpoints could be scraped at high volume
## Decision
We will implement a tiered rate limiting strategy using `express-rate-limit` middleware, with different limits based on endpoint sensitivity and resource cost.
### Tier System
| Tier | Window | Max Requests | Use Case |
| --------------------------- | ------ | ------------ | -------------------------------- |
| **Authentication (Strict)** | 15 min | 5 | Login, registration |
| **Sensitive Operations** | 1 hour | 5 | Password changes, email updates |
| **AI/Costly Operations** | 15 min | 10-20 | Gemini API calls, geocoding |
| **File Uploads** | 15 min | 10-20 | Flyer uploads, avatar uploads |
| **Batch Operations** | 15 min | 50 | Bulk updates |
| **User Read** | 15 min | 100 | Standard authenticated endpoints |
| **Public Read** | 15 min | 100 | Public data endpoints |
| **Tracking/High-Volume** | 15 min | 150-200 | Analytics, reactions |
### Rate Limiter Configuration
All rate limiters share a standard configuration:
```typescript
const standardConfig = {
standardHeaders: true, // Return rate limit info in headers
legacyHeaders: false, // Disable deprecated X-RateLimit headers
skip: shouldSkipRateLimit, // Allow bypassing in test environment
};
```
### Test Environment Bypass
Rate limiting is bypassed during integration and E2E tests to avoid test flakiness:
```typescript
export const shouldSkipRateLimit = (req: Request): boolean => {
return process.env.NODE_ENV === 'test';
};
```
## Implementation Details
### Available Rate Limiters
| Limiter | Window | Max | Endpoint Examples |
| ---------------------------- | ------ | --- | --------------------------------- |
| `loginLimiter` | 15 min | 5 | POST /api/auth/login |
| `registerLimiter` | 1 hour | 5 | POST /api/auth/register |
| `forgotPasswordLimiter` | 15 min | 5 | POST /api/auth/forgot-password |
| `resetPasswordLimiter` | 15 min | 10 | POST /api/auth/reset-password |
| `refreshTokenLimiter` | 15 min | 20 | POST /api/auth/refresh |
| `logoutLimiter` | 15 min | 10 | POST /api/auth/logout |
| `publicReadLimiter` | 15 min | 100 | GET /api/flyers, GET /api/recipes |
| `userReadLimiter` | 15 min | 100 | GET /api/users/profile |
| `userUpdateLimiter` | 15 min | 100 | PUT /api/users/profile |
| `userSensitiveUpdateLimiter` | 1 hour | 5 | PUT /api/auth/change-password |
| `adminTriggerLimiter` | 15 min | 30 | POST /api/admin/jobs/\* |
| `aiGenerationLimiter` | 15 min | 20 | POST /api/ai/analyze |
| `aiUploadLimiter` | 15 min | 10 | POST /api/ai/upload-and-process |
| `geocodeLimiter` | 1 hour | 100 | GET /api/users/geocode |
| `priceHistoryLimiter` | 15 min | 50 | GET /api/price-history/\* |
| `reactionToggleLimiter` | 15 min | 150 | POST /api/reactions/toggle |
| `trackingLimiter` | 15 min | 200 | POST /api/personalization/track |
| `batchLimiter` | 15 min | 50 | PATCH /api/budgets/batch |
### Usage Pattern
```typescript
import { loginLimiter, userReadLimiter } from '../config/rateLimiters';
// Apply to individual routes
router.post('/login', loginLimiter, validateRequest(loginSchema), async (req, res, next) => {
// handler
});
// Or apply to entire router for consistent limits
router.use(userReadLimiter);
router.get('/me', async (req, res, next) => {
/* handler */
});
```
### Response Headers
When rate limiting is active, responses include standard headers:
```
RateLimit-Limit: 100
RateLimit-Remaining: 95
RateLimit-Reset: 900
```
### Rate Limit Exceeded Response
When a client exceeds their limit:
```json
{
"message": "Too many login attempts from this IP, please try again after 15 minutes."
}
```
HTTP Status: `429 Too Many Requests`
## Key Files
- `src/config/rateLimiters.ts` - Rate limiter definitions
- `src/utils/rateLimit.ts` - Helper functions (test bypass)
## Consequences
### Positive
- **Security**: Protects against brute-force and credential stuffing attacks
- **Cost Control**: Prevents runaway costs from AI/external API abuse
- **Fair Usage**: Ensures all users get reasonable service access
- **DDoS Mitigation**: Provides basic protection against request flooding
### Negative
- **Legitimate User Impact**: Aggressive users may hit limits during normal use
- **IP-Based Limitations**: Shared IPs (offices, VPNs) may cause false positives
- **No Distributed State**: Rate limits are per-instance, not cluster-wide (would need Redis store for that)
## Future Enhancements
1. **Redis Store**: Implement distributed rate limiting with Redis for multi-instance deployments
2. **User-Based Limits**: Track limits per authenticated user rather than just IP
3. **Dynamic Limits**: Adjust limits based on user tier (free vs premium)
4. **Monitoring Dashboard**: Track rate limit hits in admin dashboard
5. **Allowlisting**: Allow specific IPs (monitoring services) to bypass limits

View File

@@ -0,0 +1,196 @@
# ADR-033: File Upload and Storage Strategy
**Date**: 2026-01-09
**Status**: Accepted
**Implemented**: 2026-01-09
## Context
The application handles file uploads for flyer images and user avatars. Without a consistent strategy, file uploads can introduce security vulnerabilities (path traversal, malicious file types), performance issues (unbounded file sizes), and maintenance challenges (inconsistent storage locations).
Key concerns:
1. **Security**: Preventing malicious file uploads, path traversal attacks, and unsafe filenames
2. **Storage Organization**: Consistent directory structure for uploaded files
3. **Size Limits**: Preventing resource exhaustion from oversized uploads
4. **File Type Validation**: Ensuring only expected file types are accepted
5. **Cleanup**: Managing temporary and orphaned files
## Decision
We will implement a centralized file upload strategy using `multer` middleware with custom storage configurations, file type validation, and size limits.
### Storage Types
| Type | Directory | Purpose | Size Limit |
| -------- | ------------------------------ | ------------------------------ | ---------- |
| `flyer` | `$STORAGE_PATH` (configurable) | Flyer images for AI processing | 100MB |
| `avatar` | `public/uploads/avatars/` | User profile pictures | 5MB |
### Filename Strategy
All uploaded files are renamed to prevent:
- Path traversal attacks
- Filename collisions
- Problematic characters in filenames
**Pattern**: `{fieldname}-{timestamp}-{random}-{sanitized-original}`
Example: `flyer-1704825600000-829461742-grocery-flyer.jpg`
### File Type Validation
Only image files (`image/*` MIME type) are accepted. Non-image uploads are rejected with a structured `ValidationError`.
## Implementation Details
### Multer Configuration Factory
```typescript
import { createUploadMiddleware } from '../middleware/multer.middleware';
// For flyer uploads (100MB limit)
const flyerUpload = createUploadMiddleware({
storageType: 'flyer',
fileSize: 100 * 1024 * 1024, // 100MB
fileFilter: 'image',
});
// For avatar uploads (5MB limit)
const avatarUpload = createUploadMiddleware({
storageType: 'avatar',
fileSize: 5 * 1024 * 1024, // 5MB
fileFilter: 'image',
});
```
### Storage Configuration
```typescript
// Configurable via environment variable
export const flyerStoragePath =
process.env.STORAGE_PATH || '/var/www/flyer-crawler.projectium.com/flyer-images';
// Relative to project root
export const avatarStoragePath = path.join(process.cwd(), 'public', 'uploads', 'avatars');
```
### Filename Sanitization
The `sanitizeFilename` utility removes dangerous characters:
```typescript
// Removes: path separators, null bytes, special characters
// Keeps: alphanumeric, dots, hyphens, underscores
const sanitized = sanitizeFilename(file.originalname);
```
### Required File Validation Middleware
Ensures a file was uploaded before processing:
```typescript
import { requireFileUpload } from '../middleware/fileUpload.middleware';
router.post(
'/upload',
flyerUpload.single('flyerImage'),
requireFileUpload('flyerImage'), // 400 error if missing
handleMulterError,
async (req, res) => {
// req.file is guaranteed to exist
},
);
```
### Error Handling
```typescript
import { handleMulterError } from '../middleware/multer.middleware';
// Catches multer-specific errors (file too large, etc.)
router.use(handleMulterError);
```
### Directory Initialization
Storage directories are created automatically at application startup:
```typescript
(async () => {
await fs.mkdir(flyerStoragePath, { recursive: true });
await fs.mkdir(avatarStoragePath, { recursive: true });
})();
```
### Test Environment Handling
In test environments, files use predictable names for easy cleanup:
```typescript
if (process.env.NODE_ENV === 'test') {
return cb(null, `test-avatar${path.extname(file.originalname) || '.png'}`);
}
```
## Usage Example
```typescript
import { createUploadMiddleware, handleMulterError } from '../middleware/multer.middleware';
import { requireFileUpload } from '../middleware/fileUpload.middleware';
import { validateRequest } from '../middleware/validation.middleware';
import { aiUploadLimiter } from '../config/rateLimiters';
const flyerUpload = createUploadMiddleware({
storageType: 'flyer',
fileSize: 100 * 1024 * 1024,
fileFilter: 'image',
});
router.post(
'/upload-and-process',
aiUploadLimiter,
validateRequest(uploadSchema),
flyerUpload.single('flyerImage'),
requireFileUpload('flyerImage'),
handleMulterError,
async (req, res, next) => {
const filePath = req.file!.path;
// Process the uploaded file...
},
);
```
## Key Files
- `src/middleware/multer.middleware.ts` - Multer configuration and storage handlers
- `src/middleware/fileUpload.middleware.ts` - File requirement validation
- `src/utils/stringUtils.ts` - Filename sanitization utilities
- `src/utils/fileUtils.ts` - File system utilities (deletion, etc.)
## Consequences
### Positive
- **Security**: Prevents path traversal and malicious uploads through sanitization and validation
- **Consistency**: All uploads follow the same patterns and storage organization
- **Predictability**: Test environments use predictable filenames for cleanup
- **Extensibility**: Factory pattern allows easy addition of new upload types
### Negative
- **Disk Storage**: Files stored on disk require backup and cleanup strategies
- **Single Server**: Current implementation doesn't support cloud storage (S3, etc.)
- **No Virus Scanning**: Files aren't scanned for malware before processing
## Future Enhancements
1. **Cloud Storage**: Support for S3/GCS as storage backend
2. **Virus Scanning**: Integrate ClamAV or cloud-based scanning
3. **Image Optimization**: Automatic resizing/compression before storage
4. **CDN Integration**: Serve uploaded files through CDN
5. **Cleanup Job**: Scheduled job to remove orphaned/temporary files
6. **Presigned URLs**: Direct upload to cloud storage to reduce server load

View File

@@ -0,0 +1,345 @@
# ADR-034: Repository Pattern Standards
**Date**: 2026-01-09
**Status**: Accepted
**Implemented**: 2026-01-09
## Context
The application uses a repository pattern to abstract database access from business logic. However, without clear standards, repository implementations can diverge in:
1. **Method naming**: Inconsistent verbs (get vs find vs fetch)
2. **Return types**: Some methods return `undefined`, others throw errors
3. **Error handling**: Varied approaches to database error handling
4. **Transaction participation**: Unclear how methods participate in transactions
5. **Logging patterns**: Inconsistent logging context and messages
This ADR establishes standards for all repository implementations, complementing ADR-001 (Error Handling) and ADR-002 (Transaction Management).
## Decision
All repository implementations MUST follow these standards:
### Method Naming Conventions
| Prefix | Returns | Behavior on Not Found |
| --------- | ---------------------- | ------------------------------------ |
| `get*` | Single entity | Throws `NotFoundError` |
| `find*` | Entity or `null` | Returns `null` |
| `list*` | Array (possibly empty) | Returns `[]` |
| `create*` | Created entity | Throws on constraint violation |
| `update*` | Updated entity | Throws `NotFoundError` if not exists |
| `delete*` | `void` or `boolean` | Throws `NotFoundError` if not exists |
| `exists*` | `boolean` | Returns true/false |
| `count*` | `number` | Returns count |
### Error Handling Pattern
All repository methods MUST use the centralized `handleDbError` function:
```typescript
import { handleDbError, NotFoundError } from './errors.db';
async getById(id: number): Promise<Entity> {
try {
const result = await this.pool.query('SELECT * FROM entities WHERE id = $1', [id]);
if (result.rows.length === 0) {
throw new NotFoundError(`Entity with ID ${id} not found.`);
}
return result.rows[0];
} catch (error) {
handleDbError(error, this.logger, 'Database error in getById', { id }, {
entityName: 'Entity',
defaultMessage: 'Failed to fetch entity.',
});
}
}
```
### Transaction Participation
Repository methods that need to participate in transactions MUST accept an optional `PoolClient`:
```typescript
class UserRepository {
private pool: Pool;
private client?: PoolClient;
constructor(poolOrClient?: Pool | PoolClient) {
if (poolOrClient && 'query' in poolOrClient && !('connect' in poolOrClient)) {
// It's a PoolClient (for transactions)
this.client = poolOrClient as PoolClient;
} else {
this.pool = (poolOrClient as Pool) || getPool();
}
}
private get queryable() {
return this.client || this.pool;
}
}
```
Or using the function-based pattern:
```typescript
async function createUser(userData: CreateUserInput, client?: PoolClient): Promise<User> {
const queryable = client || getPool();
// ...
}
```
## Implementation Details
### Repository File Structure
```
src/services/db/
├── connection.db.ts # Pool management, withTransaction
├── errors.db.ts # Custom error types, handleDbError
├── index.db.ts # Barrel exports
├── user.db.ts # User repository
├── user.db.test.ts # User repository tests
├── flyer.db.ts # Flyer repository
├── flyer.db.test.ts # Flyer repository tests
└── ... # Other domain repositories
```
### Standard Repository Template
```typescript
// src/services/db/example.db.ts
import { Pool, PoolClient } from 'pg';
import { getPool } from './connection.db';
import { handleDbError, NotFoundError } from './errors.db';
import { logger } from '../logger.server';
import type { Example, CreateExampleInput, UpdateExampleInput } from '../../types';
const log = logger.child({ module: 'example.db' });
/**
* Gets an example by ID.
* @throws {NotFoundError} If the example doesn't exist.
*/
export async function getExampleById(id: number, client?: PoolClient): Promise<Example> {
const queryable = client || getPool();
try {
const result = await queryable.query<Example>('SELECT * FROM examples WHERE id = $1', [id]);
if (result.rows.length === 0) {
throw new NotFoundError(`Example with ID ${id} not found.`);
}
return result.rows[0];
} catch (error) {
handleDbError(
error,
log,
'Database error in getExampleById',
{ id },
{
entityName: 'Example',
defaultMessage: 'Failed to fetch example.',
},
);
}
}
/**
* Finds an example by slug, returns null if not found.
*/
export async function findExampleBySlug(
slug: string,
client?: PoolClient,
): Promise<Example | null> {
const queryable = client || getPool();
try {
const result = await queryable.query<Example>('SELECT * FROM examples WHERE slug = $1', [slug]);
return result.rows[0] || null;
} catch (error) {
handleDbError(
error,
log,
'Database error in findExampleBySlug',
{ slug },
{
entityName: 'Example',
defaultMessage: 'Failed to find example.',
},
);
}
}
/**
* Lists all examples with optional pagination.
*/
export async function listExamples(
options: { limit?: number; offset?: number } = {},
client?: PoolClient,
): Promise<Example[]> {
const queryable = client || getPool();
const { limit = 100, offset = 0 } = options;
try {
const result = await queryable.query<Example>(
'SELECT * FROM examples ORDER BY created_at DESC LIMIT $1 OFFSET $2',
[limit, offset],
);
return result.rows;
} catch (error) {
handleDbError(
error,
log,
'Database error in listExamples',
{ limit, offset },
{
entityName: 'Example',
defaultMessage: 'Failed to list examples.',
},
);
}
}
/**
* Creates a new example.
* @throws {UniqueConstraintError} If slug already exists.
*/
export async function createExample(
input: CreateExampleInput,
client?: PoolClient,
): Promise<Example> {
const queryable = client || getPool();
try {
const result = await queryable.query<Example>(
`INSERT INTO examples (name, slug, description)
VALUES ($1, $2, $3)
RETURNING *`,
[input.name, input.slug, input.description],
);
return result.rows[0];
} catch (error) {
handleDbError(
error,
log,
'Database error in createExample',
{ input },
{
entityName: 'Example',
uniqueMessage: 'An example with this slug already exists.',
defaultMessage: 'Failed to create example.',
},
);
}
}
/**
* Updates an existing example.
* @throws {NotFoundError} If the example doesn't exist.
*/
export async function updateExample(
id: number,
input: UpdateExampleInput,
client?: PoolClient,
): Promise<Example> {
const queryable = client || getPool();
try {
const result = await queryable.query<Example>(
`UPDATE examples
SET name = COALESCE($2, name), description = COALESCE($3, description)
WHERE id = $1
RETURNING *`,
[id, input.name, input.description],
);
if (result.rows.length === 0) {
throw new NotFoundError(`Example with ID ${id} not found.`);
}
return result.rows[0];
} catch (error) {
handleDbError(
error,
log,
'Database error in updateExample',
{ id, input },
{
entityName: 'Example',
defaultMessage: 'Failed to update example.',
},
);
}
}
/**
* Deletes an example.
* @throws {NotFoundError} If the example doesn't exist.
*/
export async function deleteExample(id: number, client?: PoolClient): Promise<void> {
const queryable = client || getPool();
try {
const result = await queryable.query('DELETE FROM examples WHERE id = $1', [id]);
if (result.rowCount === 0) {
throw new NotFoundError(`Example with ID ${id} not found.`);
}
} catch (error) {
handleDbError(
error,
log,
'Database error in deleteExample',
{ id },
{
entityName: 'Example',
defaultMessage: 'Failed to delete example.',
},
);
}
}
```
### Using with Transactions
```typescript
import { withTransaction } from './connection.db';
import { createExample, updateExample } from './example.db';
import { createRelated } from './related.db';
async function createExampleWithRelated(data: ComplexInput): Promise<Example> {
return withTransaction(async (client) => {
const example = await createExample(data.example, client);
await createRelated({ exampleId: example.id, ...data.related }, client);
return example;
});
}
```
## Key Files
- `src/services/db/connection.db.ts` - `getPool()`, `withTransaction()`
- `src/services/db/errors.db.ts` - `handleDbError()`, custom error classes
- `src/services/db/index.db.ts` - Barrel exports for all repositories
- `src/services/db/*.db.ts` - Individual domain repositories
## Consequences
### Positive
- **Consistency**: All repositories follow the same patterns
- **Predictability**: Method names clearly indicate behavior
- **Testability**: Consistent interfaces make mocking straightforward
- **Error Handling**: Centralized error handling prevents inconsistent responses
- **Transaction Safety**: Clear pattern for transaction participation
### Negative
- **Learning Curve**: Developers must learn and follow conventions
- **Boilerplate**: Each method requires similar error handling structure
- **Refactoring**: Existing repositories may need updates to conform
## Compliance Checklist
For new repository methods:
- [ ] Method name follows prefix convention (get/find/list/create/update/delete)
- [ ] Throws `NotFoundError` for `get*` methods when entity not found
- [ ] Returns `null` for `find*` methods when entity not found
- [ ] Uses `handleDbError` for database error handling
- [ ] Accepts optional `PoolClient` parameter for transaction support
- [ ] Includes JSDoc with `@throws` documentation
- [ ] Has corresponding unit tests

View File

@@ -0,0 +1,328 @@
# ADR-035: Service Layer Architecture
**Date**: 2026-01-09
**Status**: Accepted
**Implemented**: 2026-01-09
## Context
The application has evolved to include multiple service types:
1. **Repository services** (`*.db.ts`): Direct database access
2. **Business services** (`*Service.ts`): Business logic orchestration
3. **External services** (`*Service.server.ts`): Integration with external APIs
4. **Infrastructure services** (`logger`, `redis`, `queues`): Cross-cutting concerns
Without clear boundaries, business logic can leak into routes, repositories can contain business rules, and services can become tightly coupled.
## Decision
We will establish a clear layered architecture with defined responsibilities for each layer:
### Layer Responsibilities
```
┌─────────────────────────────────────────────────────────────────┐
│ Routes Layer │
│ - Request/response handling │
│ - Input validation (via middleware) │
│ - Authentication/authorization │
│ - Rate limiting │
│ - Response formatting │
└─────────────────────────────────────────────────────────────────┘
┌─────────────────────────────────────────────────────────────────┐
│ Services Layer │
│ - Business logic orchestration │
│ - Transaction coordination │
│ - External API integration │
│ - Cross-repository operations │
│ - Event publishing │
└─────────────────────────────────────────────────────────────────┘
┌─────────────────────────────────────────────────────────────────┐
│ Repository Layer │
│ - Direct database access │
│ - Query construction │
│ - Entity mapping │
│ - Error translation │
└─────────────────────────────────────────────────────────────────┘
```
### Service Types and Naming
| Type | Pattern | Suffix | Example |
| ------------------- | ------------------------------- | ------------- | --------------------- |
| Business Service | Orchestrates business logic | `*Service.ts` | `authService.ts` |
| Server-Only Service | External APIs, server-side only | `*.server.ts` | `aiService.server.ts` |
| Database Repository | Direct DB access | `*.db.ts` | `user.db.ts` |
| Infrastructure | Cross-cutting concerns | Descriptive | `logger.server.ts` |
### Service Dependencies
```
Routes → Business Services → Repositories
External Services
Infrastructure (logger, redis, queues)
```
**Rules**:
- Routes MUST NOT directly access repositories (except simple CRUD)
- Repositories MUST NOT call other repositories (use services)
- Services MAY call other services
- Infrastructure services MAY be called from any layer
## Implementation Details
### Business Service Pattern
```typescript
// src/services/authService.ts
import { withTransaction } from './db/connection.db';
import * as userRepo from './db/user.db';
import * as profileRepo from './db/personalization.db';
import { emailService } from './emailService.server';
import { logger } from './logger.server';
const log = logger.child({ service: 'auth' });
interface LoginResult {
user: UserProfile;
accessToken: string;
refreshToken: string;
}
export const authService = {
/**
* Registers a new user and sends welcome email.
* Orchestrates multiple repositories in a transaction.
*/
async registerAndLoginUser(
email: string,
password: string,
fullName?: string,
avatarUrl?: string,
reqLog?: Logger,
): Promise<LoginResult> {
const log = reqLog || logger;
return withTransaction(async (client) => {
// 1. Create user (repository)
const user = await userRepo.createUser({ email, password }, client);
// 2. Create profile (repository)
await profileRepo.createProfile(
{
userId: user.user_id,
fullName,
avatarUrl,
},
client,
);
// 3. Generate tokens (business logic)
const { accessToken, refreshToken } = this.generateTokens(user);
// 4. Send welcome email (external service, non-blocking)
emailService.sendWelcomeEmail(email, fullName).catch((err) => {
log.warn({ err, email }, 'Failed to send welcome email');
});
log.info({ userId: user.user_id }, 'User registered successfully');
return {
user: await this.buildUserProfile(user.user_id, client),
accessToken,
refreshToken,
};
});
},
// ... other methods
};
```
### Server-Only Service Pattern
```typescript
// src/services/aiService.server.ts
// This file MUST only be imported by server-side code
import { GenAI } from '@google/genai';
import { config } from '../config/env';
import { logger } from './logger.server';
const log = logger.child({ service: 'ai' });
class AiService {
private client: GenAI;
constructor() {
this.client = new GenAI({ apiKey: config.ai.geminiApiKey });
}
async analyzeImage(imagePath: string): Promise<AnalysisResult> {
log.info({ imagePath }, 'Starting image analysis');
// ... implementation
}
}
export const aiService = new AiService();
```
### Route Handler Pattern
```typescript
// src/routes/auth.routes.ts
import { Router } from 'express';
import { validateRequest } from '../middleware/validation.middleware';
import { loginLimiter } from '../config/rateLimiters';
import { authService } from '../services/authService';
const router = Router();
// Route is thin - delegates to service
router.post(
'/register',
registerLimiter,
validateRequest(registerSchema),
async (req, res, next) => {
try {
const { email, password, full_name } = req.body;
// Delegate to service
const result = await authService.registerAndLoginUser(
email,
password,
full_name,
undefined,
req.log, // Pass request-scoped logger
);
// Format response
res.status(201).json({
message: 'Registration successful',
user: result.user,
accessToken: result.accessToken,
});
} catch (error) {
next(error); // Let error handler deal with it
}
},
);
```
### Service File Organization
```
src/services/
├── db/ # Repository layer
│ ├── connection.db.ts # Pool, transactions
│ ├── errors.db.ts # DB error types
│ ├── user.db.ts # User repository
│ ├── flyer.db.ts # Flyer repository
│ └── index.db.ts # Barrel exports
├── authService.ts # Authentication business logic
├── userService.ts # User management business logic
├── gamificationService.ts # Gamification business logic
├── aiService.server.ts # AI API integration (server-only)
├── emailService.server.ts # Email sending (server-only)
├── geocodingService.server.ts # Geocoding API (server-only)
├── cacheService.server.ts # Redis caching (server-only)
├── queueService.server.ts # BullMQ queues (server-only)
├── logger.server.ts # Pino logger (server-only)
└── logger.client.ts # Client-side logger
```
### Dependency Injection for Testing
Services should support dependency injection for easier testing:
```typescript
// Production: use singleton
export const authService = createAuthService();
// Testing: inject mocks
export function createAuthService(deps?: Partial<AuthServiceDeps>) {
const userRepo = deps?.userRepo || defaultUserRepo;
const emailService = deps?.emailService || defaultEmailService;
return {
async registerAndLoginUser(...) { /* ... */ },
};
}
```
## Key Files
### Infrastructure Services
- `src/services/logger.server.ts` - Server-side structured logging
- `src/services/logger.client.ts` - Client-side logging
- `src/services/redis.server.ts` - Redis connection management
- `src/services/queueService.server.ts` - BullMQ queue management
- `src/services/cacheService.server.ts` - Caching abstraction
### Business Services
- `src/services/authService.ts` - Authentication flows
- `src/services/userService.ts` - User management
- `src/services/gamificationService.ts` - Achievements, leaderboards
- `src/services/flyerProcessingService.server.ts` - Flyer pipeline
### External Integration Services
- `src/services/aiService.server.ts` - Gemini AI integration
- `src/services/emailService.server.ts` - Email sending
- `src/services/geocodingService.server.ts` - Address geocoding
## Consequences
### Positive
- **Separation of Concerns**: Clear boundaries between layers
- **Testability**: Services can be tested in isolation with mocked dependencies
- **Reusability**: Business logic in services can be used by multiple routes
- **Maintainability**: Changes to one layer don't ripple through others
- **Transaction Safety**: Services coordinate transactions across repositories
### Negative
- **Indirection**: More layers mean more code to navigate
- **Potential Over-Engineering**: Simple CRUD operations don't need full service layer
- **Coordination Overhead**: Team must agree on layer boundaries
## Guidelines
### When to Create a Service
Create a business service when:
- Logic spans multiple repositories
- External APIs need to be called
- Complex business rules exist
- The same logic is needed by multiple routes
- Transaction coordination is required
### When Direct Repository Access is OK
Routes can directly use repositories for:
- Simple single-entity CRUD operations
- Read-only queries with no business logic
- Operations that don't need transaction coordination
### Service Method Guidelines
- Accept a request-scoped logger as an optional parameter
- Return domain objects, not HTTP-specific responses
- Throw domain errors, let routes handle HTTP status codes
- Use `withTransaction` for multi-repository operations
- Log business events (user registered, order placed, etc.)

View File

@@ -0,0 +1,212 @@
# ADR-036: Event Bus and Pub/Sub Pattern
**Date**: 2026-01-09
**Status**: Accepted
**Implemented**: 2026-01-09
## Context
Modern web applications often need to handle cross-component communication without creating tight coupling between modules. In our application, several scenarios require broadcasting events across the system:
1. **Session Expiry**: When a user's session expires, multiple components need to respond (auth state, UI notifications, API client).
2. **Real-time Updates**: When data changes on the server, multiple UI components may need to update.
3. **Cross-Component Communication**: Independent components need to communicate without direct references to each other.
Traditional approaches like prop drilling or global state management can lead to tightly coupled code that is difficult to maintain and test.
## Decision
We will implement a lightweight, in-memory event bus pattern using a publish/subscribe (pub/sub) architecture. This provides:
1. **Decoupled Communication**: Publishers and subscribers don't need to know about each other.
2. **Event-Driven Architecture**: Components react to events rather than polling for changes.
3. **Testability**: Events can be easily mocked and verified in tests.
### Design Principles
- **Singleton Pattern**: A single event bus instance is shared across the application.
- **Type-Safe Events**: Event names are string constants to prevent typos.
- **Memory Management**: Subscribers must unsubscribe when components unmount to prevent memory leaks.
## Implementation Details
### EventBus Class
Located in `src/services/eventBus.ts`:
```typescript
type EventCallback = (data?: any) => void;
export class EventBus {
private listeners: { [key: string]: EventCallback[] } = {};
on(event: string, callback: EventCallback): void {
if (!this.listeners[event]) {
this.listeners[event] = [];
}
this.listeners[event].push(callback);
}
off(event: string, callback: EventCallback): void {
if (!this.listeners[event]) return;
this.listeners[event] = this.listeners[event].filter((l) => l !== callback);
}
dispatch(event: string, data?: any): void {
if (!this.listeners[event]) return;
this.listeners[event].forEach((callback) => callback(data));
}
}
// Singleton instance
export const eventBus = new EventBus();
```
### Event Constants
Define event names as constants to prevent typos:
```typescript
// src/constants/events.ts
export const EVENTS = {
SESSION_EXPIRED: 'session:expired',
SESSION_REFRESHED: 'session:refreshed',
USER_LOGGED_OUT: 'user:loggedOut',
DATA_UPDATED: 'data:updated',
NOTIFICATION_RECEIVED: 'notification:received',
} as const;
```
### React Hook for Event Subscription
```typescript
// src/hooks/useEventBus.ts
import { useEffect } from 'react';
import { eventBus } from '../services/eventBus';
export function useEventBus(event: string, callback: (data?: any) => void) {
useEffect(() => {
eventBus.on(event, callback);
// Cleanup on unmount
return () => {
eventBus.off(event, callback);
};
}, [event, callback]);
}
```
### Usage Examples
**Publishing Events**:
```typescript
import { eventBus } from '../services/eventBus';
import { EVENTS } from '../constants/events';
// In API client when session expires
function handleSessionExpiry() {
eventBus.dispatch(EVENTS.SESSION_EXPIRED, { reason: 'token_expired' });
}
```
**Subscribing in Components**:
```typescript
import { useCallback } from 'react';
import { useEventBus } from '../hooks/useEventBus';
import { EVENTS } from '../constants/events';
function AuthenticatedComponent() {
const handleSessionExpired = useCallback((data) => {
console.log('Session expired:', data.reason);
// Redirect to login, show notification, etc.
}, []);
useEventBus(EVENTS.SESSION_EXPIRED, handleSessionExpired);
return <div>Protected Content</div>;
}
```
**Subscribing in Non-React Code**:
```typescript
import { eventBus } from '../services/eventBus';
import { EVENTS } from '../constants/events';
// In API client
const handleLogout = () => {
clearAuthToken();
};
eventBus.on(EVENTS.USER_LOGGED_OUT, handleLogout);
```
### Testing
The EventBus is fully tested in `src/services/eventBus.test.ts`:
```typescript
import { EventBus } from './eventBus';
describe('EventBus', () => {
let bus: EventBus;
beforeEach(() => {
bus = new EventBus();
});
it('should call registered listeners when event is dispatched', () => {
const callback = vi.fn();
bus.on('test', callback);
bus.dispatch('test', { value: 42 });
expect(callback).toHaveBeenCalledWith({ value: 42 });
});
it('should unsubscribe listeners correctly', () => {
const callback = vi.fn();
bus.on('test', callback);
bus.off('test', callback);
bus.dispatch('test');
expect(callback).not.toHaveBeenCalled();
});
it('should handle multiple listeners for the same event', () => {
const callback1 = vi.fn();
const callback2 = vi.fn();
bus.on('test', callback1);
bus.on('test', callback2);
bus.dispatch('test');
expect(callback1).toHaveBeenCalled();
expect(callback2).toHaveBeenCalled();
});
});
```
## Consequences
### Positive
- **Loose Coupling**: Components don't need direct references to communicate.
- **Flexibility**: New subscribers can be added without modifying publishers.
- **Testability**: Easy to mock events and verify interactions.
- **Simplicity**: Minimal code footprint compared to full state management solutions.
### Negative
- **Debugging Complexity**: Event-driven flows can be harder to trace than direct function calls.
- **Memory Leaks**: Forgetting to unsubscribe can cause memory leaks (mitigated by the React hook).
- **No Type Safety for Payloads**: Event data is typed as `any` (could be improved with generics).
## Key Files
- `src/services/eventBus.ts` - EventBus implementation
- `src/services/eventBus.test.ts` - EventBus tests
## Related ADRs
- [ADR-005](./0005-frontend-state-management-and-server-cache-strategy.md) - State Management Strategy
- [ADR-022](./0022-real-time-notification-system.md) - Real-time Notification System

View File

@@ -0,0 +1,265 @@
# ADR-037: Scheduled Jobs and Cron Pattern
**Date**: 2026-01-09
**Status**: Accepted
**Implemented**: 2026-01-09
## Context
Many business operations need to run on a recurring schedule without user intervention:
1. **Daily Deal Checks**: Scan watched items for price drops and notify users.
2. **Analytics Generation**: Compile daily and weekly statistics reports.
3. **Token Cleanup**: Remove expired password reset tokens from the database.
4. **Data Maintenance**: Archive old data, clean up temporary files.
These scheduled operations require:
- Reliable execution at specific times
- Protection against overlapping runs
- Graceful error handling that doesn't crash the server
- Integration with the existing job queue system (BullMQ)
## Decision
We will use `node-cron` for scheduling jobs and integrate with BullMQ for job execution. This provides:
1. **Cron Expressions**: Standard, well-understood scheduling syntax.
2. **Job Queue Integration**: Scheduled jobs enqueue work to BullMQ for reliable processing.
3. **Idempotency**: Jobs use predictable IDs to prevent duplicate runs.
4. **Overlap Protection**: In-memory locks prevent concurrent execution of the same job.
### Architecture
```text
┌─────────────────┐ ┌─────────────────┐ ┌─────────────────┐
│ node-cron │────▶│ BullMQ Queue │────▶│ Worker │
│ (Scheduler) │ │ (Job Store) │ │ (Processor) │
└─────────────────┘ └─────────────────┘ └─────────────────┘
┌─────────────────┐
│ Redis │
│ (Persistence) │
└─────────────────┘
```
## Implementation Details
### BackgroundJobService
Located in `src/services/backgroundJobService.ts`:
```typescript
import cron from 'node-cron';
import type { Logger } from 'pino';
import type { Queue } from 'bullmq';
export class BackgroundJobService {
constructor(
private personalizationRepo: PersonalizationRepository,
private notificationRepo: NotificationRepository,
private emailQueue: Queue<EmailJobData>,
private logger: Logger,
) {}
async runDailyDealCheck(): Promise<void> {
this.logger.info('[BackgroundJob] Starting daily deal check...');
// 1. Fetch all deals for all users in one efficient query
const allDeals = await this.personalizationRepo.getBestSalePricesForAllUsers(this.logger);
// 2. Group deals by user
const dealsByUser = this.groupDealsByUser(allDeals);
// 3. Process each user's deals in parallel
const results = await Promise.allSettled(
Array.from(dealsByUser.values()).map((userGroup) => this._processDealsForUser(userGroup)),
);
// 4. Bulk insert notifications
await this.bulkCreateNotifications(results);
this.logger.info('[BackgroundJob] Daily deal check completed.');
}
async triggerAnalyticsReport(): Promise<string> {
const reportDate = getCurrentDateISOString();
const jobId = `manual-report-${reportDate}-${Date.now()}`;
const job = await analyticsQueue.add('generate-daily-report', { reportDate }, { jobId });
return job.id;
}
}
```
### Cron Job Initialization
```typescript
// In-memory lock to prevent job overlap
let isDailyDealCheckRunning = false;
export function startBackgroundJobs(
backgroundJobService: BackgroundJobService,
analyticsQueue: Queue,
weeklyAnalyticsQueue: Queue,
tokenCleanupQueue: Queue,
logger: Logger,
): void {
// Daily deal check at 2:00 AM
cron.schedule('0 2 * * *', () => {
(async () => {
if (isDailyDealCheckRunning) {
logger.warn('[BackgroundJob] Daily deal check already running. Skipping.');
return;
}
isDailyDealCheckRunning = true;
try {
await backgroundJobService.runDailyDealCheck();
} catch (error) {
logger.error({ err: error }, '[BackgroundJob] Daily deal check failed.');
} finally {
isDailyDealCheckRunning = false;
}
})().catch((error) => {
logger.error({ err: error }, '[BackgroundJob] Unhandled rejection in cron wrapper.');
isDailyDealCheckRunning = false;
});
});
// Daily analytics at 3:00 AM
cron.schedule('0 3 * * *', () => {
(async () => {
const reportDate = getCurrentDateISOString();
await analyticsQueue.add(
'generate-daily-report',
{ reportDate },
{ jobId: `daily-report-${reportDate}` }, // Prevents duplicates
);
})().catch((error) => {
logger.error({ err: error }, '[BackgroundJob] Analytics job enqueue failed.');
});
});
// Weekly analytics at 4:00 AM on Sundays
cron.schedule('0 4 * * 0', () => {
(async () => {
const { year, week } = getSimpleWeekAndYear();
await weeklyAnalyticsQueue.add(
'generate-weekly-report',
{ reportYear: year, reportWeek: week },
{ jobId: `weekly-report-${year}-${week}` },
);
})().catch((error) => {
logger.error({ err: error }, '[BackgroundJob] Weekly analytics enqueue failed.');
});
});
// Token cleanup at 5:00 AM
cron.schedule('0 5 * * *', () => {
(async () => {
const timestamp = new Date().toISOString();
await tokenCleanupQueue.add(
'cleanup-tokens',
{ timestamp },
{ jobId: `token-cleanup-${timestamp.split('T')[0]}` },
);
})().catch((error) => {
logger.error({ err: error }, '[BackgroundJob] Token cleanup enqueue failed.');
});
});
logger.info('[BackgroundJob] All cron jobs scheduled successfully.');
}
```
### Job Schedule Reference
| Job | Schedule | Queue | Purpose |
| ---------------- | ---------------------------- | ---------------------- | --------------------------------- |
| Daily Deal Check | `0 2 * * *` (2:00 AM) | Direct execution | Find price drops on watched items |
| Daily Analytics | `0 3 * * *` (3:00 AM) | `analyticsQueue` | Generate daily statistics |
| Weekly Analytics | `0 4 * * 0` (4:00 AM Sunday) | `weeklyAnalyticsQueue` | Generate weekly reports |
| Token Cleanup | `0 5 * * *` (5:00 AM) | `tokenCleanupQueue` | Remove expired tokens |
### Cron Expression Reference
```text
┌───────────── minute (0 - 59)
│ ┌───────────── hour (0 - 23)
│ │ ┌───────────── day of month (1 - 31)
│ │ │ ┌───────────── month (1 - 12)
│ │ │ │ ┌───────────── day of week (0 - 7, Sun = 0 or 7)
│ │ │ │ │
* * * * *
Examples:
0 2 * * * = 2:00 AM every day
0 4 * * 0 = 4:00 AM every Sunday
*/15 * * * * = Every 15 minutes
0 0 1 * * = Midnight on the 1st of each month
```
### Error Handling Pattern
The async IIFE wrapper with `.catch()` ensures that:
1. Errors in the job don't crash the cron scheduler
2. Unhandled promise rejections are logged
3. The lock is always released in the `finally` block
```typescript
cron.schedule('0 2 * * *', () => {
(async () => {
// Job logic here
})().catch((error) => {
// Handle unhandled rejections from the async wrapper
logger.error({ err: error }, 'Unhandled rejection');
});
});
```
### Manual Trigger API
Admin endpoints allow manual triggering of scheduled jobs:
```typescript
// src/routes/admin.routes.ts
router.post('/jobs/daily-deals', isAdmin, async (req, res, next) => {
await backgroundJobService.runDailyDealCheck();
res.json({ message: 'Daily deal check triggered' });
});
router.post('/jobs/analytics', isAdmin, async (req, res, next) => {
const jobId = await backgroundJobService.triggerAnalyticsReport();
res.json({ message: 'Analytics report queued', jobId });
});
```
## Consequences
### Positive
- **Reliability**: Jobs run at predictable times without manual intervention.
- **Idempotency**: Duplicate job prevention via job IDs.
- **Observability**: All job activity is logged with structured logging.
- **Flexibility**: Jobs can be triggered manually for testing or urgent runs.
- **Separation**: Scheduling is decoupled from job execution (cron vs BullMQ).
### Negative
- **Single Server**: Cron runs on a single server instance. For multi-server deployments, consider distributed scheduling.
- **Time Zone Dependency**: Cron times are server-local; consider UTC for distributed systems.
- **In-Memory Locks**: Overlap protection is per-process, not cluster-wide.
## Key Files
- `src/services/backgroundJobService.ts` - BackgroundJobService class and `startBackgroundJobs`
- `src/services/queueService.server.ts` - BullMQ queue definitions
- `src/services/workers.server.ts` - BullMQ worker processors
## Related ADRs
- [ADR-006](./0006-background-job-processing-and-task-queues.md) - Background Job Processing
- [ADR-004](./0004-standardized-application-wide-structured-logging.md) - Structured Logging

View File

@@ -0,0 +1,290 @@
# ADR-038: Graceful Shutdown Pattern
**Date**: 2026-01-09
**Status**: Accepted
**Implemented**: 2026-01-09
## Context
When deploying or restarting the application, abrupt termination can cause:
1. **Lost Jobs**: BullMQ jobs in progress may be marked as failed or stalled.
2. **Connection Leaks**: Database and Redis connections may not be properly closed.
3. **Incomplete Requests**: HTTP requests in flight may receive no response.
4. **Data Corruption**: Transactions may be left in an inconsistent state.
Kubernetes and PM2 send termination signals (SIGTERM, SIGINT) to processes before forcefully killing them. The application must handle these signals to shut down gracefully.
## Decision
We will implement a coordinated graceful shutdown pattern that:
1. **Stops Accepting New Work**: Closes HTTP server, pauses job queues.
2. **Completes In-Flight Work**: Waits for active requests and jobs to finish.
3. **Releases Resources**: Closes database pools, Redis connections, and queues.
4. **Logs Shutdown Progress**: Provides visibility into the shutdown process.
### Signal Handling
| Signal | Source | Behavior |
| ------- | ------------------ | --------------------------------------- |
| SIGTERM | Kubernetes, PM2 | Graceful shutdown with resource cleanup |
| SIGINT | Ctrl+C in terminal | Same as SIGTERM |
| SIGKILL | Force kill | Cannot be caught; immediate termination |
## Implementation Details
### Queue and Worker Shutdown
Located in `src/services/queueService.server.ts`:
```typescript
import { logger } from './logger.server';
export const gracefulShutdown = async (signal: string): Promise<void> => {
logger.info(`[Shutdown] Received ${signal}. Closing all queues and workers...`);
const resources = [
{ name: 'flyerQueue', close: () => flyerQueue.close() },
{ name: 'emailQueue', close: () => emailQueue.close() },
{ name: 'analyticsQueue', close: () => analyticsQueue.close() },
{ name: 'weeklyAnalyticsQueue', close: () => weeklyAnalyticsQueue.close() },
{ name: 'cleanupQueue', close: () => cleanupQueue.close() },
{ name: 'tokenCleanupQueue', close: () => tokenCleanupQueue.close() },
{ name: 'redisConnection', close: () => connection.quit() },
];
const results = await Promise.allSettled(
resources.map(async (resource) => {
try {
await resource.close();
logger.info(`[Shutdown] ${resource.name} closed successfully.`);
} catch (error) {
logger.error({ err: error }, `[Shutdown] Error closing ${resource.name}`);
throw error;
}
}),
);
const failures = results.filter((r) => r.status === 'rejected');
if (failures.length > 0) {
logger.error(`[Shutdown] ${failures.length} resources failed to close.`);
}
logger.info('[Shutdown] All resources closed. Process can now exit.');
};
// Register signal handlers
process.on('SIGTERM', () => gracefulShutdown('SIGTERM'));
process.on('SIGINT', () => gracefulShutdown('SIGINT'));
```
### HTTP Server Shutdown
Located in `server.ts`:
```typescript
import { gracefulShutdown as shutdownQueues } from './src/services/queueService.server';
import { closePool } from './src/services/db/connection.db';
const server = app.listen(PORT, () => {
logger.info(`Server listening on port ${PORT}`);
});
const gracefulShutdown = async (signal: string): Promise<void> => {
logger.info(`[Shutdown] Received ${signal}. Starting graceful shutdown...`);
// 1. Stop accepting new connections
server.close((err) => {
if (err) {
logger.error({ err }, '[Shutdown] Error closing HTTP server');
} else {
logger.info('[Shutdown] HTTP server closed.');
}
});
// 2. Wait for in-flight requests (with timeout)
await new Promise((resolve) => setTimeout(resolve, 5000));
// 3. Close queues and workers
await shutdownQueues(signal);
// 4. Close database pool
await closePool();
logger.info('[Shutdown] Database pool closed.');
// 5. Exit process
process.exit(0);
};
process.on('SIGTERM', () => gracefulShutdown('SIGTERM'));
process.on('SIGINT', () => gracefulShutdown('SIGINT'));
```
### Database Pool Shutdown
Located in `src/services/db/connection.db.ts`:
```typescript
let pool: Pool | null = null;
export function getPool(): Pool {
if (!pool) {
pool = new Pool({
max: 20,
idleTimeoutMillis: 30000,
connectionTimeoutMillis: 2000,
});
}
return pool;
}
export async function closePool(): Promise<void> {
if (pool) {
await pool.end();
pool = null;
logger.info('[Database] Connection pool closed.');
}
}
export function getPoolStatus(): { totalCount: number; idleCount: number; waitingCount: number } {
const p = getPool();
return {
totalCount: p.totalCount,
idleCount: p.idleCount,
waitingCount: p.waitingCount,
};
}
```
### PM2 Ecosystem Configuration
Located in `ecosystem.config.cjs`:
```javascript
module.exports = {
apps: [
{
name: 'flyer-crawler-api',
script: 'server.ts',
interpreter: 'tsx',
// Graceful shutdown settings
kill_timeout: 10000, // 10 seconds to cleanup before SIGKILL
wait_ready: true, // Wait for 'ready' signal before considering app started
listen_timeout: 10000, // Timeout for ready signal
// Cluster mode for zero-downtime reloads
instances: 1,
exec_mode: 'fork',
// Environment variables
env_production: {
NODE_ENV: 'production',
PORT: 3000,
},
env_test: {
NODE_ENV: 'test',
PORT: 3001,
},
},
],
};
```
### Worker Graceful Shutdown
BullMQ workers can be configured to wait for active jobs:
```typescript
import { Worker } from 'bullmq';
const worker = new Worker('flyerQueue', processor, {
connection,
// Graceful shutdown: wait for active jobs before closing
settings: {
lockDuration: 30000, // Time before job is considered stalled
stalledInterval: 5000, // Check for stalled jobs every 5s
},
});
// Workers auto-close when connection closes
worker.on('closing', () => {
logger.info('[Worker] flyerQueue worker is closing...');
});
worker.on('closed', () => {
logger.info('[Worker] flyerQueue worker closed.');
});
```
### Shutdown Sequence Diagram
```text
SIGTERM Received
┌──────────────────────┐
│ Stop HTTP Server │ ← No new connections accepted
│ (server.close()) │
└──────────────────────┘
┌──────────────────────┐
│ Wait for In-Flight │ ← 5-second grace period
│ Requests │
└──────────────────────┘
┌──────────────────────┐
│ Close BullMQ Queues │ ← Stop processing new jobs
│ and Workers │
└──────────────────────┘
┌──────────────────────┐
│ Close Redis │ ← Disconnect from Redis
│ Connection │
└──────────────────────┘
┌──────────────────────┐
│ Close Database Pool │ ← Release all DB connections
│ (pool.end()) │
└──────────────────────┘
┌──────────────────────┐
│ process.exit(0) │ ← Clean exit
└──────────────────────┘
```
## Consequences
### Positive
- **Zero Lost Work**: In-flight requests and jobs complete before shutdown.
- **Clean Resource Cleanup**: All connections are properly closed.
- **Zero-Downtime Deploys**: PM2 can reload without dropping requests.
- **Observability**: Shutdown progress is logged for debugging.
### Negative
- **Shutdown Delay**: Takes 5-15 seconds to fully shutdown.
- **Complexity**: Multiple shutdown handlers must be coordinated.
- **Edge Cases**: Very long-running jobs may be killed if they exceed the grace period.
## Key Files
- `server.ts` - HTTP server shutdown and signal handling
- `src/services/queueService.server.ts` - Queue shutdown (`gracefulShutdown`)
- `src/services/db/connection.db.ts` - Database pool shutdown (`closePool`)
- `ecosystem.config.cjs` - PM2 configuration with `kill_timeout`
## Related ADRs
- [ADR-006](./0006-background-job-processing-and-task-queues.md) - Background Job Processing
- [ADR-020](./0020-health-checks-and-liveness-readiness-probes.md) - Health Checks
- [ADR-014](./0014-containerization-and-deployment-strategy.md) - Containerization

View File

@@ -0,0 +1,278 @@
# ADR-039: Dependency Injection Pattern
**Date**: 2026-01-09
**Status**: Accepted
**Implemented**: 2026-01-09
## Context
As the application grows, tightly coupled components become difficult to test and maintain. Common issues include:
1. **Hard-to-Test Code**: Components that instantiate their own dependencies cannot be easily unit tested with mocks.
2. **Rigid Architecture**: Changing one implementation requires modifying all consumers.
3. **Hidden Dependencies**: It's unclear what a component needs to function.
4. **Circular Dependencies**: Tight coupling can lead to circular import issues.
Dependency Injection (DI) addresses these issues by inverting the control of dependency creation.
## Decision
We will adopt a constructor-based dependency injection pattern for all services and repositories. This approach:
1. **Explicit Dependencies**: All dependencies are declared in the constructor.
2. **Default Values**: Production dependencies have sensible defaults.
3. **Testability**: Test code can inject mocks without modifying source code.
4. **Loose Coupling**: Components depend on interfaces, not implementations.
### Design Principles
- **Constructor Injection**: Dependencies are passed through constructors, not looked up globally.
- **Default Production Dependencies**: Use default parameter values for production instances.
- **Interface Segregation**: Depend on the minimal interface needed (e.g., `Pick<Pool, 'query'>`).
- **Composition Root**: Wire dependencies at the application entry point.
## Implementation Details
### Repository Pattern with DI
Located in `src/services/db/flyer.db.ts`:
```typescript
import { Pool, PoolClient } from 'pg';
import { getPool } from './connection.db';
export class FlyerRepository {
// Accept any object with a 'query' method - Pool or PoolClient
private db: Pick<Pool | PoolClient, 'query'>;
constructor(db: Pick<Pool | PoolClient, 'query'> = getPool()) {
this.db = db;
}
async getFlyerById(flyerId: number, logger: Logger): Promise<Flyer> {
const result = await this.db.query<Flyer>('SELECT * FROM flyers WHERE flyer_id = $1', [
flyerId,
]);
if (result.rows.length === 0) {
throw new NotFoundError(`Flyer with ID ${flyerId} not found.`);
}
return result.rows[0];
}
async insertFlyer(flyer: FlyerDbInsert, logger: Logger): Promise<Flyer> {
// Implementation
}
}
```
**Usage in Production**:
```typescript
// Uses default pool
const flyerRepo = new FlyerRepository();
```
**Usage in Tests**:
```typescript
const mockDb = {
query: vi.fn().mockResolvedValue({ rows: [mockFlyer] }),
};
const flyerRepo = new FlyerRepository(mockDb);
```
**Usage in Transactions**:
```typescript
import { withTransaction } from './connection.db';
await withTransaction(async (client) => {
// Pass transactional client to repository
const flyerRepo = new FlyerRepository(client);
const flyer = await flyerRepo.insertFlyer(flyerData, logger);
// ... more operations in the same transaction
});
```
### Service Layer with DI
Located in `src/services/backgroundJobService.ts`:
```typescript
export class BackgroundJobService {
constructor(
private personalizationRepo: PersonalizationRepository,
private notificationRepo: NotificationRepository,
private emailQueue: Queue<EmailJobData>,
private logger: Logger,
) {}
async runDailyDealCheck(): Promise<void> {
this.logger.info('[BackgroundJob] Starting daily deal check...');
const deals = await this.personalizationRepo.getBestSalePricesForAllUsers(this.logger);
// ... process deals
}
}
// Composition root - wire production dependencies
import { personalizationRepo, notificationRepo } from './db/index.db';
import { logger } from './logger.server';
import { emailQueue } from './queueService.server';
export const backgroundJobService = new BackgroundJobService(
personalizationRepo,
notificationRepo,
emailQueue,
logger,
);
```
**Testing with Mocks**:
```typescript
describe('BackgroundJobService', () => {
it('should process deals for all users', async () => {
const mockPersonalizationRepo = {
getBestSalePricesForAllUsers: vi.fn().mockResolvedValue([mockDeal]),
};
const mockNotificationRepo = {
createBulkNotifications: vi.fn().mockResolvedValue([]),
};
const mockEmailQueue = {
add: vi.fn().mockResolvedValue({ id: 'job-1' }),
};
const mockLogger = {
info: vi.fn(),
error: vi.fn(),
};
const service = new BackgroundJobService(
mockPersonalizationRepo as any,
mockNotificationRepo as any,
mockEmailQueue as any,
mockLogger as any,
);
await service.runDailyDealCheck();
expect(mockPersonalizationRepo.getBestSalePricesForAllUsers).toHaveBeenCalled();
expect(mockEmailQueue.add).toHaveBeenCalled();
});
});
```
### Processing Service with DI
Located in `src/services/flyer/flyerProcessingService.ts`:
```typescript
export class FlyerProcessingService {
constructor(
private fileHandler: FlyerFileHandler,
private aiProcessor: FlyerAiProcessor,
private fsAdapter: FileSystemAdapter,
private cleanupQueue: Queue<CleanupJobData>,
private dataTransformer: FlyerDataTransformer,
private persistenceService: FlyerPersistenceService,
) {}
async processFlyer(filePath: string, logger: Logger): Promise<ProcessedFlyer> {
// Use injected dependencies
const fileInfo = await this.fileHandler.extractMetadata(filePath);
const aiResult = await this.aiProcessor.analyze(filePath, logger);
const transformed = this.dataTransformer.transform(aiResult);
const saved = await this.persistenceService.save(transformed, logger);
// Queue cleanup
await this.cleanupQueue.add('cleanup', { filePath });
return saved;
}
}
// Composition root
const flyerProcessingService = new FlyerProcessingService(
new FlyerFileHandler(fsAdapter, execAsync),
new FlyerAiProcessor(aiService, db.personalizationRepo),
fsAdapter,
cleanupQueue,
new FlyerDataTransformer(),
new FlyerPersistenceService(),
);
```
### Interface Segregation
Use the minimum interface required:
```typescript
// Bad - depends on full Pool
constructor(pool: Pool) {}
// Good - depends only on what's needed
constructor(db: Pick<Pool | PoolClient, 'query'>) {}
```
This allows injecting either a `Pool`, `PoolClient` (for transactions), or a mock object with just a `query` method.
### Composition Root Pattern
Wire all dependencies at application startup:
```typescript
// src/services/db/index.db.ts - Composition root for repositories
import { getPool } from './connection.db';
export const userRepo = new UserRepository(getPool());
export const flyerRepo = new FlyerRepository(getPool());
export const adminRepo = new AdminRepository(getPool());
export const personalizationRepo = new PersonalizationRepository(getPool());
export const notificationRepo = new NotificationRepository(getPool());
export const db = {
userRepo,
flyerRepo,
adminRepo,
personalizationRepo,
notificationRepo,
};
```
## Consequences
### Positive
- **Testability**: Unit tests can inject mocks without modifying production code.
- **Flexibility**: Swap implementations (e.g., different database adapters) easily.
- **Explicit Dependencies**: Clear contract of what a component needs.
- **Transaction Support**: Repositories can participate in transactions by accepting a client.
### Negative
- **More Boilerplate**: Constructors become longer with many dependencies.
- **Composition Complexity**: Must wire dependencies somewhere (composition root).
- **No Runtime Type Checking**: TypeScript types are erased at runtime.
### Mitigation
For complex services with many dependencies, consider:
1. **Factory Functions**: Encapsulate construction logic.
2. **Dependency Groups**: Pass related dependencies as a single object.
3. **DI Containers**: For very large applications, consider a DI library like `tsyringe` or `inversify`.
## Key Files
- `src/services/db/*.db.ts` - Repository classes with constructor DI
- `src/services/db/index.db.ts` - Composition root for repositories
- `src/services/backgroundJobService.ts` - Service class with constructor DI
- `src/services/flyer/flyerProcessingService.ts` - Complex service with multiple dependencies
## Related ADRs
- [ADR-002](./0002-standardized-transaction-management.md) - Transaction Management
- [ADR-034](./0034-repository-pattern-standards.md) - Repository Pattern Standards
- [ADR-035](./0035-service-layer-architecture.md) - Service Layer Architecture

View File

@@ -0,0 +1,145 @@
# ADR Implementation Tracker
This document tracks the implementation status and estimated effort for all Architectural Decision Records (ADRs).
## Effort Estimation Guide
| Rating | Description | Typical Duration |
| ------ | ------------------------------------------- | ----------------- |
| S | Small - Simple, isolated changes | 1-2 hours |
| M | Medium - Multiple files, some testing | Half day to 1 day |
| L | Large - Significant refactoring, many files | 1-3 days |
| XL | Extra Large - Major architectural change | 1+ weeks |
## Implementation Status Overview
| Status | Count |
| ---------------------------- | ----- |
| Accepted (Fully Implemented) | 22 |
| Partially Implemented | 2 |
| Proposed (Not Started) | 15 |
---
## Detailed Implementation Status
### Category 1: Foundational / Core Infrastructure
| ADR | Title | Status | Effort | Notes |
| ---------------------------------------------------------------- | ----------------------- | -------- | ------ | ------------------------------ |
| [ADR-002](./0002-standardized-transaction-management.md) | Transaction Management | Accepted | - | Fully implemented |
| [ADR-007](./0007-configuration-and-secrets-management.md) | Configuration & Secrets | Accepted | - | Fully implemented |
| [ADR-020](./0020-health-checks-and-liveness-readiness-probes.md) | Health Checks | Accepted | - | Fully implemented |
| [ADR-030](./0030-graceful-degradation-and-circuit-breaker.md) | Circuit Breaker | Proposed | L | New resilience patterns needed |
### Category 2: Data Management
| ADR | Title | Status | Effort | Notes |
| --------------------------------------------------------------- | ------------------------ | -------- | ------ | ------------------------------ |
| [ADR-009](./0009-caching-strategy-for-read-heavy-operations.md) | Caching Strategy | Accepted | - | Fully implemented |
| [ADR-013](./0013-database-schema-migration-strategy.md) | Schema Migrations v1 | Proposed | M | Superseded by ADR-023 |
| [ADR-019](./0019-data-backup-and-recovery-strategy.md) | Backup & Recovery | Accepted | - | Fully implemented |
| [ADR-023](./0023-database-schema-migration-strategy.md) | Schema Migrations v2 | Proposed | L | Requires tooling setup |
| [ADR-031](./0031-data-retention-and-privacy-compliance.md) | Data Retention & Privacy | Proposed | XL | Legal/compliance review needed |
### Category 3: API & Integration
| ADR | Title | Status | Effort | Notes |
| ------------------------------------------------------------------- | ------------------------ | ----------- | ------ | ------------------------------------- |
| [ADR-003](./0003-standardized-input-validation-using-middleware.md) | Input Validation | Accepted | - | Fully implemented |
| [ADR-008](./0008-api-versioning-strategy.md) | API Versioning | Proposed | L | Major URL/routing changes |
| [ADR-018](./0018-api-documentation-strategy.md) | API Documentation | Proposed | M | OpenAPI/Swagger setup |
| [ADR-022](./0022-real-time-notification-system.md) | Real-time Notifications | Proposed | XL | WebSocket infrastructure |
| [ADR-028](./0028-api-response-standardization.md) | Response Standardization | Implemented | L | Completed (routes, middleware, tests) |
### Category 4: Security & Compliance
| ADR | Title | Status | Effort | Notes |
| ----------------------------------------------------------------------- | --------------------- | -------- | ------ | -------------------------------- |
| [ADR-001](./0001-standardized-error-handling.md) | Error Handling | Accepted | - | Fully implemented |
| [ADR-011](./0011-advanced-authorization-and-access-control-strategy.md) | Authorization & RBAC | Proposed | XL | Policy engine, permission system |
| [ADR-016](./0016-api-security-hardening.md) | Security Hardening | Accepted | - | Fully implemented |
| [ADR-029](./0029-secret-rotation-and-key-management.md) | Secret Rotation | Proposed | L | Infrastructure changes needed |
| [ADR-032](./0032-rate-limiting-strategy.md) | Rate Limiting | Accepted | - | Fully implemented |
| [ADR-033](./0033-file-upload-and-storage-strategy.md) | File Upload & Storage | Accepted | - | Fully implemented |
### Category 5: Observability & Monitoring
| ADR | Title | Status | Effort | Notes |
| -------------------------------------------------------------------------- | -------------------- | -------- | ------ | ----------------------- |
| [ADR-004](./0004-standardized-application-wide-structured-logging.md) | Structured Logging | Accepted | - | Fully implemented |
| [ADR-015](./0015-application-performance-monitoring-and-error-tracking.md) | APM & Error Tracking | Proposed | M | Third-party integration |
### Category 6: Deployment & Operations
| ADR | Title | Status | Effort | Notes |
| -------------------------------------------------------------- | ----------------- | -------- | ------ | -------------------------- |
| [ADR-006](./0006-background-job-processing-and-task-queues.md) | Background Jobs | Accepted | - | Fully implemented |
| [ADR-014](./0014-containerization-and-deployment-strategy.md) | Containerization | Partial | M | Docker done, K8s pending |
| [ADR-017](./0017-ci-cd-and-branching-strategy.md) | CI/CD & Branching | Accepted | - | Fully implemented |
| [ADR-024](./0024-feature-flagging-strategy.md) | Feature Flags | Proposed | M | New service/library needed |
| [ADR-037](./0037-scheduled-jobs-and-cron-pattern.md) | Scheduled Jobs | Accepted | - | Fully implemented |
| [ADR-038](./0038-graceful-shutdown-pattern.md) | Graceful Shutdown | Accepted | - | Fully implemented |
### Category 7: Frontend / User Interface
| ADR | Title | Status | Effort | Notes |
| ------------------------------------------------------------------------ | ------------------- | -------- | ------ | ------------------------------------------- |
| [ADR-005](./0005-frontend-state-management-and-server-cache-strategy.md) | State Management | Accepted | - | Fully implemented |
| [ADR-012](./0012-frontend-component-library-and-design-system.md) | Component Library | Partial | L | Core components done, design tokens pending |
| [ADR-025](./0025-internationalization-and-localization-strategy.md) | i18n & l10n | Proposed | XL | All UI strings need extraction |
| [ADR-026](./0026-standardized-client-side-structured-logging.md) | Client-Side Logging | Accepted | - | Fully implemented |
### Category 8: Development Workflow & Quality
| ADR | Title | Status | Effort | Notes |
| ----------------------------------------------------------------------------- | -------------------- | -------- | ------ | ----------------- |
| [ADR-010](./0010-testing-strategy-and-standards.md) | Testing Strategy | Accepted | - | Fully implemented |
| [ADR-021](./0021-code-formatting-and-linting-unification.md) | Formatting & Linting | Accepted | - | Fully implemented |
| [ADR-027](./0027-standardized-naming-convention-for-ai-and-database-types.md) | Naming Conventions | Accepted | - | Fully implemented |
### Category 9: Architecture Patterns
| ADR | Title | Status | Effort | Notes |
| -------------------------------------------------- | -------------------- | -------- | ------ | ----------------- |
| [ADR-034](./0034-repository-pattern-standards.md) | Repository Pattern | Accepted | - | Fully implemented |
| [ADR-035](./0035-service-layer-architecture.md) | Service Layer | Accepted | - | Fully implemented |
| [ADR-036](./0036-event-bus-and-pub-sub-pattern.md) | Event Bus | Accepted | - | Fully implemented |
| [ADR-039](./0039-dependency-injection-pattern.md) | Dependency Injection | Accepted | - | Fully implemented |
---
## Work Still To Be Completed (Priority Order)
These ADRs are proposed but not yet implemented, ordered by suggested implementation priority:
| Priority | ADR | Title | Effort | Rationale |
| -------- | ------- | ------------------------ | ------ | ----------------------------------------------------- |
| 1 | ADR-018 | API Documentation | M | Improves developer experience, enables SDK generation |
| 2 | ADR-015 | APM & Error Tracking | M | Production visibility, debugging |
| 3 | ADR-024 | Feature Flags | M | Safer deployments, A/B testing |
| 4 | ADR-023 | Schema Migrations v2 | L | Database evolution support |
| 5 | ADR-029 | Secret Rotation | L | Security improvement |
| 6 | ADR-008 | API Versioning | L | Future API evolution |
| 7 | ADR-030 | Circuit Breaker | L | Resilience improvement |
| 8 | ADR-022 | Real-time Notifications | XL | Major feature enhancement |
| 9 | ADR-011 | Authorization & RBAC | XL | Advanced permission system |
| 10 | ADR-025 | i18n & l10n | XL | Multi-language support |
| 11 | ADR-031 | Data Retention & Privacy | XL | Compliance requirements |
---
## Recent Implementation History
| Date | ADR | Change |
| ---------- | ------- | --------------------------------------------------------------------------------------------- |
| 2026-01-09 | ADR-026 | Fully implemented - all client-side components, hooks, and services now use structured logger |
| 2026-01-09 | ADR-028 | Fully implemented - all routes, middleware, and tests updated |
---
## Notes
- **Effort estimates** are rough guidelines and may vary based on current codebase state
- **Dependencies** between ADRs should be considered when planning implementation order
- This document should be updated when ADRs are implemented or status changes

View File

@@ -4,49 +4,66 @@ This directory contains a log of the architectural decisions made for the Flyer
## 1. Foundational / Core Infrastructure
**[ADR-002](./0002-standardized-transaction-management.md)**: Standardized Transaction Management and Unit of Work Pattern (Proposed)
**[ADR-007](./0007-configuration-and-secrets-management.md)**: Configuration and Secrets Management (Proposed)
**[ADR-020](./0020-health-checks-and-liveness-readiness-probes.md)**: Health Checks and Liveness/Readiness Probes (Proposed)
**[ADR-002](./0002-standardized-transaction-management.md)**: Standardized Transaction Management and Unit of Work Pattern (Accepted)
**[ADR-007](./0007-configuration-and-secrets-management.md)**: Configuration and Secrets Management (Accepted)
**[ADR-020](./0020-health-checks-and-liveness-readiness-probes.md)**: Health Checks and Liveness/Readiness Probes (Accepted)
**[ADR-030](./0030-graceful-degradation-and-circuit-breaker.md)**: Graceful Degradation and Circuit Breaker Pattern (Proposed)
## 2. Data Management
**[ADR-009](./0009-caching-strategy-for-read-heavy-operations.md)**: Caching Strategy for Read-Heavy Operations (Proposed)
**[ADR-009](./0009-caching-strategy-for-read-heavy-operations.md)**: Caching Strategy for Read-Heavy Operations (Accepted)
**[ADR-013](./0013-database-schema-migration-strategy.md)**: Database Schema Migration Strategy (Proposed)
**[ADR-019](./0019-data-backup-and-recovery-strategy.md)**: Data Backup and Recovery Strategy (Proposed)
**[ADR-019](./0019-data-backup-and-recovery-strategy.md)**: Data Backup and Recovery Strategy (Accepted)
**[ADR-023](./0023-database-schema-migration-strategy.md)**: Database Schema Migration Strategy (Proposed)
**[ADR-031](./0031-data-retention-and-privacy-compliance.md)**: Data Retention and Privacy Compliance (Proposed)
## 3. API & Integration
**[ADR-003](./0003-standardized-input-validation-using-middleware.md)**: Standardized Input Validation using Middleware (Proposed)
**[ADR-003](./0003-standardized-input-validation-using-middleware.md)**: Standardized Input Validation using Middleware (Accepted)
**[ADR-008](./0008-api-versioning-strategy.md)**: API Versioning Strategy (Proposed)
**[ADR-018](./0018-api-documentation-strategy.md)**: API Documentation Strategy (Proposed)
**[ADR-022](./0022-real-time-notification-system.md)**: Real-time Notification System (Proposed)
**[ADR-028](./0028-api-response-standardization.md)**: API Response Standardization and Envelope Pattern (Implemented)
## 4. Security & Compliance
**[ADR-001](./0001-standardized-error-handling.md)**: Standardized Error Handling for Service and Repository Layers (Accepted)
**[ADR-011](./0011-advanced-authorization-and-access-control-strategy.md)**: Advanced Authorization and Access Control Strategy (Proposed)
**[ADR-016](./0016-api-security-hardening.md)**: API Security Hardening (Proposed)
**[ADR-016](./0016-api-security-hardening.md)**: API Security Hardening (Accepted)
**[ADR-029](./0029-secret-rotation-and-key-management.md)**: Secret Rotation and Key Management Strategy (Proposed)
**[ADR-032](./0032-rate-limiting-strategy.md)**: Rate Limiting Strategy (Accepted)
**[ADR-033](./0033-file-upload-and-storage-strategy.md)**: File Upload and Storage Strategy (Accepted)
## 5. Observability & Monitoring
**[ADR-004](./0004-standardized-application-wide-structured-logging.md)**: Standardized Application-Wide Structured Logging (Proposed)
**[ADR-004](./0004-standardized-application-wide-structured-logging.md)**: Standardized Application-Wide Structured Logging (Accepted)
**[ADR-015](./0015-application-performance-monitoring-and-error-tracking.md)**: Application Performance Monitoring (APM) and Error Tracking (Proposed)
## 6. Deployment & Operations
**[ADR-006](./0006-background-job-processing-and-task-queues.md)**: Background Job Processing and Task Queues (Proposed)
**[ADR-014](./0014-containerization-and-deployment-strategy.md)**: Containerization and Deployment Strategy (Proposed)
**[ADR-017](./0017-ci-cd-and-branching-strategy.md)**: CI/CD and Branching Strategy (Proposed)
**[ADR-006](./0006-background-job-processing-and-task-queues.md)**: Background Job Processing and Task Queues (Accepted)
**[ADR-014](./0014-containerization-and-deployment-strategy.md)**: Containerization and Deployment Strategy (Partially Implemented)
**[ADR-017](./0017-ci-cd-and-branching-strategy.md)**: CI/CD and Branching Strategy (Accepted)
**[ADR-024](./0024-feature-flagging-strategy.md)**: Feature Flagging Strategy (Proposed)
**[ADR-037](./0037-scheduled-jobs-and-cron-pattern.md)**: Scheduled Jobs and Cron Pattern (Accepted)
**[ADR-038](./0038-graceful-shutdown-pattern.md)**: Graceful Shutdown Pattern (Accepted)
## 7. Frontend / User Interface
**[ADR-005](./0005-frontend-state-management-and-server-cache-strategy.md)**: Frontend State Management and Server Cache Strategy (Proposed)
**[ADR-012](./0012-frontend-component-library-and-design-system.md)**: Frontend Component Library and Design System (Proposed)
**[ADR-005](./0005-frontend-state-management-and-server-cache-strategy.md)**: Frontend State Management and Server Cache Strategy (Accepted)
**[ADR-012](./0012-frontend-component-library-and-design-system.md)**: Frontend Component Library and Design System (Partially Implemented)
**[ADR-025](./0025-internationalization-and-localization-strategy.md)**: Internationalization (i18n) and Localization (l10n) Strategy (Proposed)
**[ADR-026](./0026-standardized-client-side-structured-logging.md)**: Standardized Client-Side Structured Logging (Proposed)
## 8. Development Workflow & Quality
**[ADR-010](./0010-testing-strategy-and-standards.md)**: Testing Strategy and Standards (Proposed)
**[ADR-021](./0021-code-formatting-and-linting-unification.md)**: Code Formatting and Linting Unification (Proposed)
**[ADR-010](./0010-testing-strategy-and-standards.md)**: Testing Strategy and Standards (Accepted)
**[ADR-021](./0021-code-formatting-and-linting-unification.md)**: Code Formatting and Linting Unification (Accepted)
**[ADR-027](./0027-standardized-naming-convention-for-ai-and-database-types.md)**: Standardized Naming Convention for AI and Database Types (Accepted)
## 9. Architecture Patterns
**[ADR-034](./0034-repository-pattern-standards.md)**: Repository Pattern Standards (Accepted)
**[ADR-035](./0035-service-layer-architecture.md)**: Service Layer Architecture (Accepted)
**[ADR-036](./0036-event-bus-and-pub-sub-pattern.md)**: Event Bus and Pub/Sub Pattern (Accepted)
**[ADR-039](./0039-dependency-injection-pattern.md)**: Dependency Injection Pattern (Accepted)

View File

@@ -3,6 +3,7 @@ import tseslint from 'typescript-eslint';
import pluginReact from 'eslint-plugin-react';
import pluginReactHooks from 'eslint-plugin-react-hooks';
import pluginReactRefresh from 'eslint-plugin-react-refresh';
import eslintConfigPrettier from 'eslint-config-prettier';
export default tseslint.config(
{
@@ -29,4 +30,26 @@ export default tseslint.config(
},
// TypeScript files
...tseslint.configs.recommended,
// Allow underscore-prefixed variables to be unused (common convention for intentionally unused params)
{
files: ['**/*.{ts,tsx}'],
rules: {
'@typescript-eslint/no-unused-vars': [
'error',
{
argsIgnorePattern: '^_',
varsIgnorePattern: '^_',
},
],
},
},
// Relaxed rules for test files - see ADR-021 for rationale
{
files: ['**/*.test.ts', '**/*.test.tsx', '**/*.spec.ts', '**/*.spec.tsx'],
rules: {
'@typescript-eslint/no-explicit-any': 'off',
},
},
// Prettier compatibility - must be last to override other formatting rules
eslintConfigPrettier,
);

607
package-lock.json generated
View File

@@ -1,12 +1,12 @@
{
"name": "flyer-crawler",
"version": "0.9.67",
"version": "0.9.76",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "flyer-crawler",
"version": "0.9.67",
"version": "0.9.76",
"dependencies": {
"@bull-board/api": "^6.14.2",
"@bull-board/express": "^6.14.2",
@@ -22,6 +22,7 @@
"express": "^5.1.0",
"express-list-endpoints": "^7.1.1",
"express-rate-limit": "^8.2.1",
"helmet": "^8.1.0",
"ioredis": "^5.8.2",
"jsonwebtoken": "^9.0.2",
"lucide-react": "^0.555.0",
@@ -92,8 +93,10 @@
"eslint-plugin-react-refresh": "^0.4.24",
"glob": "^13.0.0",
"globals": "16.5.0",
"husky": "^9.1.7",
"istanbul-reports": "^3.2.0",
"jsdom": "^27.2.0",
"lint-staged": "^16.2.7",
"msw": "^2.12.3",
"nyc": "^17.1.0",
"pino-pretty": "^13.1.3",
@@ -6144,6 +6147,22 @@
"url": "https://github.com/sponsors/epoberezkin"
}
},
"node_modules/ansi-escapes": {
"version": "7.2.0",
"resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-7.2.0.tgz",
"integrity": "sha512-g6LhBsl+GBPRWGWsBtutpzBYuIIdBkLEvad5C/va/74Db018+5TZiyA26cZJAr3Rft5lprVqOIPxf5Vid6tqAw==",
"dev": true,
"license": "MIT",
"dependencies": {
"environment": "^1.0.0"
},
"engines": {
"node": ">=18"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/ansi-regex": {
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz",
@@ -6952,6 +6971,19 @@
"balanced-match": "^1.0.0"
}
},
"node_modules/braces": {
"version": "3.0.3",
"resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz",
"integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==",
"dev": true,
"license": "MIT",
"dependencies": {
"fill-range": "^7.1.1"
},
"engines": {
"node": ">=8"
}
},
"node_modules/browserslist": {
"version": "4.28.1",
"resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.28.1.tgz",
@@ -7286,6 +7318,85 @@
"node": ">=6"
}
},
"node_modules/cli-cursor": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-5.0.0.tgz",
"integrity": "sha512-aCj4O5wKyszjMmDT4tZj93kxyydN/K5zPWSCe6/0AV/AA1pqe5ZBIw0a2ZfPQV7lL5/yb5HsUreJ6UFAF1tEQw==",
"dev": true,
"license": "MIT",
"dependencies": {
"restore-cursor": "^5.0.0"
},
"engines": {
"node": ">=18"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/cli-truncate": {
"version": "5.1.1",
"resolved": "https://registry.npmjs.org/cli-truncate/-/cli-truncate-5.1.1.tgz",
"integrity": "sha512-SroPvNHxUnk+vIW/dOSfNqdy1sPEFkrTk6TUtqLCnBlo3N7TNYYkzzN7uSD6+jVjrdO4+p8nH7JzH6cIvUem6A==",
"dev": true,
"license": "MIT",
"dependencies": {
"slice-ansi": "^7.1.0",
"string-width": "^8.0.0"
},
"engines": {
"node": ">=20"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/cli-truncate/node_modules/ansi-regex": {
"version": "6.2.2",
"resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.2.2.tgz",
"integrity": "sha512-Bq3SmSpyFHaWjPk8If9yc6svM8c56dB5BAtW4Qbw5jHTwwXXcTLoRMkpDJp6VL0XzlWaCHTXrkFURMYmD0sLqg==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=12"
},
"funding": {
"url": "https://github.com/chalk/ansi-regex?sponsor=1"
}
},
"node_modules/cli-truncate/node_modules/string-width": {
"version": "8.1.0",
"resolved": "https://registry.npmjs.org/string-width/-/string-width-8.1.0.tgz",
"integrity": "sha512-Kxl3KJGb/gxkaUMOjRsQ8IrXiGW75O4E3RPjFIINOVH8AMl2SQ/yWdTzWwF3FevIX9LcMAjJW+GRwAlAbTSXdg==",
"dev": true,
"license": "MIT",
"dependencies": {
"get-east-asian-width": "^1.3.0",
"strip-ansi": "^7.1.0"
},
"engines": {
"node": ">=20"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/cli-truncate/node_modules/strip-ansi": {
"version": "7.1.2",
"resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.2.tgz",
"integrity": "sha512-gmBGslpoQJtgnMAvOVqGZpEz9dyoKTCzy2nfz/n8aIFhN/jCE/rCmcxabB6jOOHV+0WNnylOxaxBQPSvcWklhA==",
"dev": true,
"license": "MIT",
"dependencies": {
"ansi-regex": "^6.0.1"
},
"engines": {
"node": ">=12"
},
"funding": {
"url": "https://github.com/chalk/strip-ansi?sponsor=1"
}
},
"node_modules/cli-width": {
"version": "4.1.0",
"resolved": "https://registry.npmjs.org/cli-width/-/cli-width-4.1.0.tgz",
@@ -7394,6 +7505,16 @@
"node": ">= 0.8"
}
},
"node_modules/commander": {
"version": "14.0.2",
"resolved": "https://registry.npmjs.org/commander/-/commander-14.0.2.tgz",
"integrity": "sha512-TywoWNNRbhoD0BXs1P3ZEScW8W5iKrnbithIl0YH+uCmBd0QpPOA8yc82DS3BIE5Ma6FnBVUsJ7wVUDz4dvOWQ==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=20"
}
},
"node_modules/commondir": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/commondir/-/commondir-1.0.1.tgz",
@@ -8344,6 +8465,19 @@
"url": "https://github.com/fb55/entities?sponsor=1"
}
},
"node_modules/environment": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/environment/-/environment-1.1.0.tgz",
"integrity": "sha512-xUtoPkMggbz0MPyPiIWr1Kp4aeWJjDZ6SMvURhimjdZgsRuDplF5/s9hcgGhyXMhs+6vpnuoiZ2kFiu3FMnS8Q==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=18"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/es-abstract": {
"version": "1.24.1",
"resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.24.1.tgz",
@@ -9292,6 +9426,19 @@
"node": ">=10"
}
},
"node_modules/fill-range": {
"version": "7.1.1",
"resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz",
"integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==",
"dev": true,
"license": "MIT",
"dependencies": {
"to-regex-range": "^5.0.1"
},
"engines": {
"node": ">=8"
}
},
"node_modules/finalhandler": {
"version": "2.1.1",
"resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-2.1.1.tgz",
@@ -9816,6 +9963,19 @@
"node": "6.* || 8.* || >= 10.*"
}
},
"node_modules/get-east-asian-width": {
"version": "1.4.0",
"resolved": "https://registry.npmjs.org/get-east-asian-width/-/get-east-asian-width-1.4.0.tgz",
"integrity": "sha512-QZjmEOC+IT1uk6Rx0sX22V6uHWVwbdbxf1faPqJ1QhLdGgsRGCZoyaQBm/piRdJy/D2um6hM1UP7ZEeQ4EkP+Q==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=18"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/get-intrinsic": {
"version": "1.3.0",
"resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.3.0.tgz",
@@ -10193,6 +10353,15 @@
"dev": true,
"license": "MIT"
},
"node_modules/helmet": {
"version": "8.1.0",
"resolved": "https://registry.npmjs.org/helmet/-/helmet-8.1.0.tgz",
"integrity": "sha512-jOiHyAZsmnr8LqoPGmCjYAaiuWwjAPLgY8ZX2XrmHawt99/u1y6RgrZMTeoPfpUbV96HOalYgz1qzkRbw54Pmg==",
"license": "MIT",
"engines": {
"node": ">=18.0.0"
}
},
"node_modules/help-me": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/help-me/-/help-me-5.0.0.tgz",
@@ -10307,6 +10476,22 @@
"node": ">= 6"
}
},
"node_modules/husky": {
"version": "9.1.7",
"resolved": "https://registry.npmjs.org/husky/-/husky-9.1.7.tgz",
"integrity": "sha512-5gs5ytaNjBrh5Ow3zrvdUUY+0VxIuWVL4i9irt6friV+BqdCfmV11CQTWMiBYWHbXhco+J1kHfTOUkePhCDvMA==",
"dev": true,
"license": "MIT",
"bin": {
"husky": "bin.js"
},
"engines": {
"node": ">=18"
},
"funding": {
"url": "https://github.com/sponsors/typicode"
}
},
"node_modules/iconv-lite": {
"version": "0.7.1",
"resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.7.1.tgz",
@@ -10720,6 +10905,16 @@
"dev": true,
"license": "MIT"
},
"node_modules/is-number": {
"version": "7.0.0",
"resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz",
"integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=0.12.0"
}
},
"node_modules/is-number-object": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/is-number-object/-/is-number-object-1.1.1.tgz",
@@ -11755,6 +11950,134 @@
"url": "https://opencollective.com/parcel"
}
},
"node_modules/lint-staged": {
"version": "16.2.7",
"resolved": "https://registry.npmjs.org/lint-staged/-/lint-staged-16.2.7.tgz",
"integrity": "sha512-lDIj4RnYmK7/kXMya+qJsmkRFkGolciXjrsZ6PC25GdTfWOAWetR0ZbsNXRAj1EHHImRSalc+whZFg56F5DVow==",
"dev": true,
"license": "MIT",
"dependencies": {
"commander": "^14.0.2",
"listr2": "^9.0.5",
"micromatch": "^4.0.8",
"nano-spawn": "^2.0.0",
"pidtree": "^0.6.0",
"string-argv": "^0.3.2",
"yaml": "^2.8.1"
},
"bin": {
"lint-staged": "bin/lint-staged.js"
},
"engines": {
"node": ">=20.17"
},
"funding": {
"url": "https://opencollective.com/lint-staged"
}
},
"node_modules/listr2": {
"version": "9.0.5",
"resolved": "https://registry.npmjs.org/listr2/-/listr2-9.0.5.tgz",
"integrity": "sha512-ME4Fb83LgEgwNw96RKNvKV4VTLuXfoKudAmm2lP8Kk87KaMK0/Xrx/aAkMWmT8mDb+3MlFDspfbCs7adjRxA2g==",
"dev": true,
"license": "MIT",
"dependencies": {
"cli-truncate": "^5.0.0",
"colorette": "^2.0.20",
"eventemitter3": "^5.0.1",
"log-update": "^6.1.0",
"rfdc": "^1.4.1",
"wrap-ansi": "^9.0.0"
},
"engines": {
"node": ">=20.0.0"
}
},
"node_modules/listr2/node_modules/ansi-regex": {
"version": "6.2.2",
"resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.2.2.tgz",
"integrity": "sha512-Bq3SmSpyFHaWjPk8If9yc6svM8c56dB5BAtW4Qbw5jHTwwXXcTLoRMkpDJp6VL0XzlWaCHTXrkFURMYmD0sLqg==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=12"
},
"funding": {
"url": "https://github.com/chalk/ansi-regex?sponsor=1"
}
},
"node_modules/listr2/node_modules/ansi-styles": {
"version": "6.2.3",
"resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.3.tgz",
"integrity": "sha512-4Dj6M28JB+oAH8kFkTLUo+a2jwOFkuqb3yucU0CANcRRUbxS0cP0nZYCGjcc3BNXwRIsUVmDGgzawme7zvJHvg==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=12"
},
"funding": {
"url": "https://github.com/chalk/ansi-styles?sponsor=1"
}
},
"node_modules/listr2/node_modules/emoji-regex": {
"version": "10.6.0",
"resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-10.6.0.tgz",
"integrity": "sha512-toUI84YS5YmxW219erniWD0CIVOo46xGKColeNQRgOzDorgBi1v4D71/OFzgD9GO2UGKIv1C3Sp8DAn0+j5w7A==",
"dev": true,
"license": "MIT"
},
"node_modules/listr2/node_modules/string-width": {
"version": "7.2.0",
"resolved": "https://registry.npmjs.org/string-width/-/string-width-7.2.0.tgz",
"integrity": "sha512-tsaTIkKW9b4N+AEj+SVA+WhJzV7/zMhcSu78mLKWSk7cXMOSHsBKFWUs0fWwq8QyK3MgJBQRX6Gbi4kYbdvGkQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"emoji-regex": "^10.3.0",
"get-east-asian-width": "^1.0.0",
"strip-ansi": "^7.1.0"
},
"engines": {
"node": ">=18"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/listr2/node_modules/strip-ansi": {
"version": "7.1.2",
"resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.2.tgz",
"integrity": "sha512-gmBGslpoQJtgnMAvOVqGZpEz9dyoKTCzy2nfz/n8aIFhN/jCE/rCmcxabB6jOOHV+0WNnylOxaxBQPSvcWklhA==",
"dev": true,
"license": "MIT",
"dependencies": {
"ansi-regex": "^6.0.1"
},
"engines": {
"node": ">=12"
},
"funding": {
"url": "https://github.com/chalk/strip-ansi?sponsor=1"
}
},
"node_modules/listr2/node_modules/wrap-ansi": {
"version": "9.0.2",
"resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-9.0.2.tgz",
"integrity": "sha512-42AtmgqjV+X1VpdOfyTGOYRi0/zsoLqtXQckTmqTeybT+BDIbM/Guxo7x3pE2vtpr1ok6xRqM9OpBe+Jyoqyww==",
"dev": true,
"license": "MIT",
"dependencies": {
"ansi-styles": "^6.2.1",
"string-width": "^7.0.0",
"strip-ansi": "^7.1.0"
},
"engines": {
"node": ">=18"
},
"funding": {
"url": "https://github.com/chalk/wrap-ansi?sponsor=1"
}
},
"node_modules/locate-path": {
"version": "6.0.0",
"resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz",
@@ -11852,6 +12175,111 @@
"integrity": "sha512-Sb487aTOCr9drQVL8pIxOzVhafOjZN9UU54hiN8PU3uAiSV7lx1yYNpbNmex2PK6dSJoNTSJUUswT651yww3Mg==",
"license": "MIT"
},
"node_modules/log-update": {
"version": "6.1.0",
"resolved": "https://registry.npmjs.org/log-update/-/log-update-6.1.0.tgz",
"integrity": "sha512-9ie8ItPR6tjY5uYJh8K/Zrv/RMZ5VOlOWvtZdEHYSTFKZfIBPQa9tOAEeAWhd+AnIneLJ22w5fjOYtoutpWq5w==",
"dev": true,
"license": "MIT",
"dependencies": {
"ansi-escapes": "^7.0.0",
"cli-cursor": "^5.0.0",
"slice-ansi": "^7.1.0",
"strip-ansi": "^7.1.0",
"wrap-ansi": "^9.0.0"
},
"engines": {
"node": ">=18"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/log-update/node_modules/ansi-regex": {
"version": "6.2.2",
"resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.2.2.tgz",
"integrity": "sha512-Bq3SmSpyFHaWjPk8If9yc6svM8c56dB5BAtW4Qbw5jHTwwXXcTLoRMkpDJp6VL0XzlWaCHTXrkFURMYmD0sLqg==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=12"
},
"funding": {
"url": "https://github.com/chalk/ansi-regex?sponsor=1"
}
},
"node_modules/log-update/node_modules/ansi-styles": {
"version": "6.2.3",
"resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.3.tgz",
"integrity": "sha512-4Dj6M28JB+oAH8kFkTLUo+a2jwOFkuqb3yucU0CANcRRUbxS0cP0nZYCGjcc3BNXwRIsUVmDGgzawme7zvJHvg==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=12"
},
"funding": {
"url": "https://github.com/chalk/ansi-styles?sponsor=1"
}
},
"node_modules/log-update/node_modules/emoji-regex": {
"version": "10.6.0",
"resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-10.6.0.tgz",
"integrity": "sha512-toUI84YS5YmxW219erniWD0CIVOo46xGKColeNQRgOzDorgBi1v4D71/OFzgD9GO2UGKIv1C3Sp8DAn0+j5w7A==",
"dev": true,
"license": "MIT"
},
"node_modules/log-update/node_modules/string-width": {
"version": "7.2.0",
"resolved": "https://registry.npmjs.org/string-width/-/string-width-7.2.0.tgz",
"integrity": "sha512-tsaTIkKW9b4N+AEj+SVA+WhJzV7/zMhcSu78mLKWSk7cXMOSHsBKFWUs0fWwq8QyK3MgJBQRX6Gbi4kYbdvGkQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"emoji-regex": "^10.3.0",
"get-east-asian-width": "^1.0.0",
"strip-ansi": "^7.1.0"
},
"engines": {
"node": ">=18"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/log-update/node_modules/strip-ansi": {
"version": "7.1.2",
"resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.2.tgz",
"integrity": "sha512-gmBGslpoQJtgnMAvOVqGZpEz9dyoKTCzy2nfz/n8aIFhN/jCE/rCmcxabB6jOOHV+0WNnylOxaxBQPSvcWklhA==",
"dev": true,
"license": "MIT",
"dependencies": {
"ansi-regex": "^6.0.1"
},
"engines": {
"node": ">=12"
},
"funding": {
"url": "https://github.com/chalk/strip-ansi?sponsor=1"
}
},
"node_modules/log-update/node_modules/wrap-ansi": {
"version": "9.0.2",
"resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-9.0.2.tgz",
"integrity": "sha512-42AtmgqjV+X1VpdOfyTGOYRi0/zsoLqtXQckTmqTeybT+BDIbM/Guxo7x3pE2vtpr1ok6xRqM9OpBe+Jyoqyww==",
"dev": true,
"license": "MIT",
"dependencies": {
"ansi-styles": "^6.2.1",
"string-width": "^7.0.0",
"strip-ansi": "^7.1.0"
},
"engines": {
"node": ">=18"
},
"funding": {
"url": "https://github.com/chalk/wrap-ansi?sponsor=1"
}
},
"node_modules/long": {
"version": "5.3.2",
"resolved": "https://registry.npmjs.org/long/-/long-5.3.2.tgz",
@@ -12004,6 +12432,33 @@
"node": ">= 0.6"
}
},
"node_modules/micromatch": {
"version": "4.0.8",
"resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz",
"integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==",
"dev": true,
"license": "MIT",
"dependencies": {
"braces": "^3.0.3",
"picomatch": "^2.3.1"
},
"engines": {
"node": ">=8.6"
}
},
"node_modules/micromatch/node_modules/picomatch": {
"version": "2.3.1",
"resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz",
"integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=8.6"
},
"funding": {
"url": "https://github.com/sponsors/jonschlinkert"
}
},
"node_modules/mime": {
"version": "2.6.0",
"resolved": "https://registry.npmjs.org/mime/-/mime-2.6.0.tgz",
@@ -12042,6 +12497,19 @@
"url": "https://opencollective.com/express"
}
},
"node_modules/mimic-function": {
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/mimic-function/-/mimic-function-5.0.1.tgz",
"integrity": "sha512-VP79XUPxV2CigYP3jWwAUFSku2aKqBH7uTAapFWCBqutsbmDo96KY5o8uh6U+/YSIn5OxJnXp73beVkpqMIGhA==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=18"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/min-indent": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/min-indent/-/min-indent-1.0.1.tgz",
@@ -12320,6 +12788,19 @@
"license": "MIT",
"optional": true
},
"node_modules/nano-spawn": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/nano-spawn/-/nano-spawn-2.0.0.tgz",
"integrity": "sha512-tacvGzUY5o2D8CBh2rrwxyNojUsZNU2zjNTzKQrkgGJQTbGAfArVWXSKMBokBeeg6C7OLRGUEyoFlYbfeWQIqw==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=20.17"
},
"funding": {
"url": "https://github.com/sindresorhus/nano-spawn?sponsor=1"
}
},
"node_modules/nanoid": {
"version": "3.3.11",
"resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.11.tgz",
@@ -12953,6 +13434,22 @@
"wrappy": "1"
}
},
"node_modules/onetime": {
"version": "7.0.0",
"resolved": "https://registry.npmjs.org/onetime/-/onetime-7.0.0.tgz",
"integrity": "sha512-VXJjc87FScF88uafS3JllDgvAm+c/Slfz06lorj2uAY34rlUu0Nt+v8wreiImcrgAjjIHp1rXpTDlLOGw29WwQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"mimic-function": "^5.0.0"
},
"engines": {
"node": ">=18"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/optionator": {
"version": "0.9.4",
"resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.4.tgz",
@@ -13408,6 +13905,19 @@
"url": "https://github.com/sponsors/jonschlinkert"
}
},
"node_modules/pidtree": {
"version": "0.6.0",
"resolved": "https://registry.npmjs.org/pidtree/-/pidtree-0.6.0.tgz",
"integrity": "sha512-eG2dWTVw5bzqGRztnHExczNxt5VGsE6OwTeCG3fdUf9KBsZzO3R5OIIIzWR+iZA0NtZ+RDVdaoE2dK1cn6jH4g==",
"dev": true,
"license": "MIT",
"bin": {
"pidtree": "bin/pidtree.js"
},
"engines": {
"node": ">=0.10"
}
},
"node_modules/piexifjs": {
"version": "1.0.6",
"resolved": "https://registry.npmjs.org/piexifjs/-/piexifjs-1.0.6.tgz",
@@ -14358,6 +14868,23 @@
"url": "https://github.com/privatenumber/resolve-pkg-maps?sponsor=1"
}
},
"node_modules/restore-cursor": {
"version": "5.1.0",
"resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-5.1.0.tgz",
"integrity": "sha512-oMA2dcrw6u0YfxJQXm342bFKX/E4sG9rbTzO9ptUcR/e8A33cHuvStiYOwH7fszkZlZ1z/ta9AAoPk2F4qIOHA==",
"dev": true,
"license": "MIT",
"dependencies": {
"onetime": "^7.0.0",
"signal-exit": "^4.1.0"
},
"engines": {
"node": ">=18"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/retry": {
"version": "0.12.0",
"resolved": "https://registry.npmjs.org/retry/-/retry-0.12.0.tgz",
@@ -14375,6 +14902,13 @@
"dev": true,
"license": "MIT"
},
"node_modules/rfdc": {
"version": "1.4.1",
"resolved": "https://registry.npmjs.org/rfdc/-/rfdc-1.4.1.tgz",
"integrity": "sha512-q1b3N5QkRUWUl7iyylaaj3kOpIT0N2i9MqIEQXP73GVsN9cw3fdx8X63cEmWhJGi2PPCF23Ijp7ktmd39rawIA==",
"dev": true,
"license": "MIT"
},
"node_modules/rimraf": {
"version": "6.1.2",
"resolved": "https://registry.npmjs.org/rimraf/-/rimraf-6.1.2.tgz",
@@ -14957,6 +15491,52 @@
"node": ">=18"
}
},
"node_modules/slice-ansi": {
"version": "7.1.2",
"resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-7.1.2.tgz",
"integrity": "sha512-iOBWFgUX7caIZiuutICxVgX1SdxwAVFFKwt1EvMYYec/NWO5meOJ6K5uQxhrYBdQJne4KxiqZc+KptFOWFSI9w==",
"dev": true,
"license": "MIT",
"dependencies": {
"ansi-styles": "^6.2.1",
"is-fullwidth-code-point": "^5.0.0"
},
"engines": {
"node": ">=18"
},
"funding": {
"url": "https://github.com/chalk/slice-ansi?sponsor=1"
}
},
"node_modules/slice-ansi/node_modules/ansi-styles": {
"version": "6.2.3",
"resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.3.tgz",
"integrity": "sha512-4Dj6M28JB+oAH8kFkTLUo+a2jwOFkuqb3yucU0CANcRRUbxS0cP0nZYCGjcc3BNXwRIsUVmDGgzawme7zvJHvg==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=12"
},
"funding": {
"url": "https://github.com/chalk/ansi-styles?sponsor=1"
}
},
"node_modules/slice-ansi/node_modules/is-fullwidth-code-point": {
"version": "5.1.0",
"resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-5.1.0.tgz",
"integrity": "sha512-5XHYaSyiqADb4RnZ1Bdad6cPp8Toise4TzEjcOYDHZkTCbKgiUl7WTUCpNWHuxmDt91wnsZBc9xinNzopv3JMQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"get-east-asian-width": "^1.3.1"
},
"engines": {
"node": ">=18"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/sonic-boom": {
"version": "4.2.0",
"resolved": "https://registry.npmjs.org/sonic-boom/-/sonic-boom-4.2.0.tgz",
@@ -15230,6 +15810,16 @@
"safe-buffer": "~5.2.0"
}
},
"node_modules/string-argv": {
"version": "0.3.2",
"resolved": "https://registry.npmjs.org/string-argv/-/string-argv-0.3.2.tgz",
"integrity": "sha512-aqD2Q0144Z+/RqG52NeHEkZauTAUWJO8c6yTftGJKO3Tja5tUgIfmIl6kExvhtxSDP7fXB6DvzkfMpCd/F3G+Q==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=0.6.19"
}
},
"node_modules/string-width": {
"version": "4.2.3",
"resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz",
@@ -15790,6 +16380,19 @@
"node": ">=14.14"
}
},
"node_modules/to-regex-range": {
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz",
"integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"is-number": "^7.0.0"
},
"engines": {
"node": ">=8.0"
}
},
"node_modules/toidentifier": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.1.tgz",

View File

@@ -1,7 +1,7 @@
{
"name": "flyer-crawler",
"private": true,
"version": "0.9.67",
"version": "0.9.76",
"type": "module",
"scripts": {
"dev": "concurrently \"npm:start:dev\" \"vite\"",
@@ -24,7 +24,8 @@
"start:test": "NODE_ENV=test NODE_V8_COVERAGE=.coverage/tmp/integration-server tsx server.ts",
"db:reset:dev": "NODE_ENV=development tsx src/db/seed.ts",
"db:reset:test": "NODE_ENV=test tsx src/db/seed.ts",
"worker:prod": "NODE_ENV=production tsx src/services/queueService.server.ts"
"worker:prod": "NODE_ENV=production tsx src/services/queueService.server.ts",
"prepare": "node -e \"try { require.resolve('husky') } catch (e) { process.exit(0) }\" && husky || true"
},
"dependencies": {
"@bull-board/api": "^6.14.2",
@@ -41,6 +42,7 @@
"express": "^5.1.0",
"express-list-endpoints": "^7.1.1",
"express-rate-limit": "^8.2.1",
"helmet": "^8.1.0",
"ioredis": "^5.8.2",
"jsonwebtoken": "^9.0.2",
"lucide-react": "^0.555.0",
@@ -111,8 +113,10 @@
"eslint-plugin-react-refresh": "^0.4.24",
"glob": "^13.0.0",
"globals": "16.5.0",
"husky": "^9.1.7",
"istanbul-reports": "^3.2.0",
"jsdom": "^27.2.0",
"lint-staged": "^16.2.7",
"msw": "^2.12.3",
"nyc": "^17.1.0",
"pino-pretty": "^13.1.3",

150
scripts/docker-init.sh Normal file
View File

@@ -0,0 +1,150 @@
#!/bin/bash
# scripts/docker-init.sh
# ============================================================================
# CONTAINER INITIALIZATION SCRIPT
# ============================================================================
# Purpose:
# This script is run when the dev container is created for the first time.
# It handles all first-run setup tasks to ensure a fully working environment.
#
# Tasks performed:
# 1. Install npm dependencies (if not already done)
# 2. Wait for PostgreSQL to be ready
# 3. Wait for Redis to be ready
# 4. Initialize the database schema
# 5. Seed the database with development data
#
# Usage:
# This script is called automatically by devcontainer.json's postCreateCommand.
# It can also be run manually: ./scripts/docker-init.sh
# ============================================================================
set -e # Exit immediately on error
# Colors for output
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
NC='\033[0m' # No Color
log_info() {
echo -e "${BLUE}[INFO]${NC} $1"
}
log_success() {
echo -e "${GREEN}[SUCCESS]${NC} $1"
}
log_warning() {
echo -e "${YELLOW}[WARNING]${NC} $1"
}
log_error() {
echo -e "${RED}[ERROR]${NC} $1"
}
# ============================================================================
# 1. Install npm dependencies
# ============================================================================
log_info "Step 1/5: Installing npm dependencies..."
if [ -d "node_modules" ] && [ -f "node_modules/.package-lock.json" ]; then
log_info "node_modules exists, running npm install to sync..."
fi
npm install
log_success "npm dependencies installed."
# ============================================================================
# 2. Wait for PostgreSQL to be ready
# ============================================================================
log_info "Step 2/5: Waiting for PostgreSQL to be ready..."
POSTGRES_HOST="${DB_HOST:-postgres}"
POSTGRES_PORT="${DB_PORT:-5432}"
POSTGRES_USER="${DB_USER:-postgres}"
POSTGRES_DB="${DB_NAME:-flyer_crawler_dev}"
MAX_RETRIES=30
RETRY_COUNT=0
until PGPASSWORD="${DB_PASSWORD:-postgres}" psql -h "$POSTGRES_HOST" -p "$POSTGRES_PORT" -U "$POSTGRES_USER" -d "postgres" -c '\q' 2>/dev/null; do
RETRY_COUNT=$((RETRY_COUNT + 1))
if [ $RETRY_COUNT -ge $MAX_RETRIES ]; then
log_error "PostgreSQL did not become ready after $MAX_RETRIES attempts. Exiting."
exit 1
fi
log_warning "PostgreSQL is not ready yet (attempt $RETRY_COUNT/$MAX_RETRIES). Waiting 2 seconds..."
sleep 2
done
log_success "PostgreSQL is ready."
# ============================================================================
# 3. Wait for Redis to be ready
# ============================================================================
log_info "Step 3/5: Waiting for Redis to be ready..."
REDIS_HOST="${REDIS_HOST:-redis}"
REDIS_PORT="${REDIS_PORT:-6379}"
MAX_RETRIES=30
RETRY_COUNT=0
# Extract host from REDIS_URL if set
if [ -n "$REDIS_URL" ]; then
# Parse redis://host:port format
REDIS_HOST=$(echo "$REDIS_URL" | sed -E 's|redis://([^:]+):?.*|\1|')
fi
until redis-cli -h "$REDIS_HOST" -p "$REDIS_PORT" ping 2>/dev/null | grep -q PONG; do
RETRY_COUNT=$((RETRY_COUNT + 1))
if [ $RETRY_COUNT -ge $MAX_RETRIES ]; then
log_error "Redis did not become ready after $MAX_RETRIES attempts. Exiting."
exit 1
fi
log_warning "Redis is not ready yet (attempt $RETRY_COUNT/$MAX_RETRIES). Waiting 2 seconds..."
sleep 2
done
log_success "Redis is ready."
# ============================================================================
# 4. Check if database needs initialization
# ============================================================================
log_info "Step 4/5: Checking database state..."
# Check if the users table exists (indicator of initialized schema)
TABLE_EXISTS=$(PGPASSWORD="${DB_PASSWORD:-postgres}" psql -h "$POSTGRES_HOST" -p "$POSTGRES_PORT" -U "$POSTGRES_USER" -d "$POSTGRES_DB" -t -c "SELECT EXISTS (SELECT FROM information_schema.tables WHERE table_schema = 'public' AND table_name = 'users');" 2>/dev/null | tr -d '[:space:]' || echo "f")
if [ "$TABLE_EXISTS" = "t" ]; then
log_info "Database schema already exists. Skipping initialization."
log_info "To reset the database, run: npm run db:reset:dev"
else
log_info "Database schema not found. Initializing..."
# ============================================================================
# 5. Initialize and seed the database
# ============================================================================
log_info "Step 5/5: Running database initialization and seed..."
# The db:reset:dev script handles both schema creation and seeding
npm run db:reset:dev
log_success "Database initialized and seeded successfully."
fi
# ============================================================================
# Done!
# ============================================================================
echo ""
log_success "=========================================="
log_success "Container initialization complete!"
log_success "=========================================="
echo ""
log_info "Default test accounts:"
echo " Admin: admin@example.com / adminpass"
echo " User: user@example.com / userpass"
echo ""
log_info "To start the development server, run:"
echo " npm run dev:container"
echo ""

View File

@@ -1,6 +1,7 @@
// server.ts
import express, { Request, Response, NextFunction } from 'express';
import { randomUUID } from 'crypto';
import helmet from 'helmet';
import timeout from 'connect-timeout';
import cookieParser from 'cookie-parser';
import listEndpoints from 'express-list-endpoints';
@@ -62,6 +63,38 @@ logger.info('-----------------------------------------------\n');
const app = express();
// --- Security Headers Middleware (ADR-016) ---
// Helmet sets various HTTP headers to help protect the app from common web vulnerabilities.
// Must be applied early in the middleware chain, before any routes.
app.use(
helmet({
// Content Security Policy - configured for API + SPA frontend
contentSecurityPolicy: {
directives: {
defaultSrc: ["'self'"],
scriptSrc: ["'self'", "'unsafe-inline'"], // Allow inline scripts for React
styleSrc: ["'self'", "'unsafe-inline'"], // Allow inline styles for Tailwind
imgSrc: ["'self'", 'data:', 'blob:', 'https:'], // Allow images from various sources
fontSrc: ["'self'", 'https:', 'data:'],
connectSrc: ["'self'", 'https:', 'wss:'], // Allow API and WebSocket connections
frameSrc: ["'none'"], // Disallow iframes
objectSrc: ["'none'"], // Disallow plugins
upgradeInsecureRequests: process.env.NODE_ENV === 'production' ? [] : null,
},
},
// Cross-Origin settings for API
crossOriginEmbedderPolicy: false, // Disabled to allow loading external images
crossOriginResourcePolicy: { policy: 'cross-origin' }, // Allow cross-origin resource loading
// Additional security headers
hsts: {
maxAge: 31536000, // 1 year in seconds
includeSubDomains: true,
preload: true,
},
referrerPolicy: { policy: 'strict-origin-when-cross-origin' },
}),
);
// --- Core Middleware ---
// Increase the limit for JSON and URL-encoded bodies. This is crucial for handling large file uploads
// that are part of multipart/form-data requests, as the overall request size is checked.

View File

@@ -0,0 +1,24 @@
-- sql/00-init-extensions.sql
-- ============================================================================
-- DATABASE EXTENSIONS INITIALIZATION
-- ============================================================================
-- This script is automatically run by PostgreSQL on database creation
-- when placed in /docker-entrypoint-initdb.d/
--
-- It creates the required extensions before the schema is loaded.
-- ============================================================================
-- Enable UUID generation
CREATE EXTENSION IF NOT EXISTS "uuid-ossp";
-- Enable trigram fuzzy text search
CREATE EXTENSION IF NOT EXISTS pg_trgm;
-- Enable PostGIS for geographic queries (usually pre-installed in postgis image)
CREATE EXTENSION IF NOT EXISTS postgis;
-- Log completion
DO $$
BEGIN
RAISE NOTICE '✅ All required PostgreSQL extensions have been created';
END $$;

View File

@@ -56,13 +56,16 @@ function App() {
// Debugging: Log renders to identify infinite loops
useEffect(() => {
if (process.env.NODE_ENV === 'test') {
console.log('[App] Render:', {
flyersCount: flyers.length,
selectedFlyerId: selectedFlyer?.flyer_id,
flyerIdFromUrl,
authStatus,
profileId: userProfile?.user.user_id,
});
logger.debug(
{
flyersCount: flyers.length,
selectedFlyerId: selectedFlyer?.flyer_id,
flyerIdFromUrl,
authStatus,
profileId: userProfile?.user.user_id,
},
'[App] Render',
);
}
});
@@ -76,7 +79,6 @@ function App() {
const handleCloseVoiceAssistant = useCallback(() => closeModal('voiceAssistant'), [closeModal]);
const handleOpenWhatsNew = useCallback(() => openModal('whatsNew'), [openModal]);
const handleCloseWhatsNew = useCallback(() => closeModal('whatsNew'), [closeModal]);
const handleOpenCorrectionTool = useCallback(() => openModal('correctionTool'), [openModal]);
const handleCloseCorrectionTool = useCallback(() => closeModal('correctionTool'), [closeModal]);
@@ -134,7 +136,7 @@ function App() {
useEffect(() => {
if (!selectedFlyer && flyers.length > 0) {
if (process.env.NODE_ENV === 'test') console.log('[App] Effect: Auto-selecting first flyer');
if (process.env.NODE_ENV === 'test') logger.debug('[App] Effect: Auto-selecting first flyer');
handleFlyerSelect(flyers[0]);
}
}, [flyers, selectedFlyer, handleFlyerSelect]);

View File

@@ -34,17 +34,16 @@ export const FlyerCorrectionTool: React.FC<FlyerCorrectionToolProps> = ({
// Fetch the image and store it as a File object for API submission
useEffect(() => {
if (isOpen && imageUrl) {
console.debug('[DEBUG] FlyerCorrectionTool: isOpen is true, fetching image URL:', imageUrl);
logger.debug({ imageUrl }, '[FlyerCorrectionTool] isOpen is true, fetching image URL');
fetch(imageUrl)
.then((res) => res.blob())
.then((blob) => {
const file = new File([blob], 'flyer-image.jpg', { type: blob.type });
setImageFile(file);
console.debug('[DEBUG] FlyerCorrectionTool: Image fetched and stored as File object.');
logger.debug('[FlyerCorrectionTool] Image fetched and stored as File object');
})
.catch((err) => {
console.error('[DEBUG] FlyerCorrectionTool: Failed to fetch image.', { err });
logger.error({ error: err }, 'Failed to fetch image for correction tool');
logger.error({ err }, '[FlyerCorrectionTool] Failed to fetch image');
notifyError('Could not load the image for correction.');
});
}
@@ -112,26 +111,37 @@ export const FlyerCorrectionTool: React.FC<FlyerCorrectionToolProps> = ({
const handleMouseUp = () => {
setIsDrawing(false);
setStartPoint(null);
console.debug('[DEBUG] FlyerCorrectionTool: Mouse Up - selection complete.', { selectionRect });
logger.debug({ selectionRect }, '[FlyerCorrectionTool] Mouse Up - selection complete');
};
const handleRescan = async (type: ExtractionType) => {
console.debug(`[DEBUG] handleRescan triggered for type: ${type}`);
console.debug(
`[DEBUG] handleRescan state: selectionRect=${!!selectionRect}, imageRef=${!!imageRef.current}, imageFile=${!!imageFile}`,
logger.debug({ type }, '[FlyerCorrectionTool] handleRescan triggered');
logger.debug(
{
hasSelectionRect: !!selectionRect,
hasImageRef: !!imageRef.current,
hasImageFile: !!imageFile,
},
'[FlyerCorrectionTool] handleRescan state',
);
if (!selectionRect || !imageRef.current || !imageFile) {
console.warn('[DEBUG] handleRescan: Guard failed. Missing prerequisites.');
if (!selectionRect) console.warn('[DEBUG] Reason: No selectionRect');
if (!imageRef.current) console.warn('[DEBUG] Reason: No imageRef');
if (!imageFile) console.warn('[DEBUG] Reason: No imageFile');
logger.warn(
{
hasSelectionRect: !!selectionRect,
hasImageRef: !!imageRef.current,
hasImageFile: !!imageFile,
},
'[FlyerCorrectionTool] handleRescan: Guard failed. Missing prerequisites',
);
notifyError('Please select an area on the image first.');
return;
}
console.debug(`[DEBUG] handleRescan: Prerequisites met. Starting processing for "${type}".`);
logger.debug(
{ type },
'[FlyerCorrectionTool] handleRescan: Prerequisites met. Starting processing',
);
setIsProcessing(true);
try {
// Scale selection coordinates to the original image dimensions
@@ -145,38 +155,34 @@ export const FlyerCorrectionTool: React.FC<FlyerCorrectionToolProps> = ({
width: selectionRect.width * scaleX,
height: selectionRect.height * scaleY,
};
console.debug('[DEBUG] handleRescan: Calculated scaled cropArea:', cropArea);
logger.debug({ cropArea }, '[FlyerCorrectionTool] handleRescan: Calculated scaled cropArea');
console.debug('[DEBUG] handleRescan: Awaiting aiApiClient.rescanImageArea...');
logger.debug('[FlyerCorrectionTool] handleRescan: Awaiting aiApiClient.rescanImageArea');
const response = await aiApiClient.rescanImageArea(imageFile, cropArea, type);
console.debug('[DEBUG] handleRescan: API call returned. Response ok:', response.ok);
logger.debug({ ok: response.ok }, '[FlyerCorrectionTool] handleRescan: API call returned');
if (!response.ok) {
const errorData = await response.json();
throw new Error(errorData.message || 'Failed to rescan area.');
}
const { text } = await response.json();
console.debug('[DEBUG] handleRescan: Successfully extracted text:', text);
logger.debug({ text }, '[FlyerCorrectionTool] handleRescan: Successfully extracted text');
notifySuccess(`Extracted: ${text}`);
onDataExtracted(type, text);
onClose(); // Close modal on success
} catch (err) {
const msg = err instanceof Error ? err.message : 'An unknown error occurred.';
console.error('[DEBUG] handleRescan: Caught an error.', { error: err });
logger.error({ err }, '[FlyerCorrectionTool] handleRescan: Caught an error');
notifyError(msg);
logger.error({ error: err }, 'Error during rescan:');
} finally {
console.debug('[DEBUG] handleRescan: Finished. Setting isProcessing=false.');
logger.debug('[FlyerCorrectionTool] handleRescan: Finished. Setting isProcessing=false');
setIsProcessing(false);
}
};
if (!isOpen) return null;
console.debug('[DEBUG] FlyerCorrectionTool: Rendering with state:', {
isProcessing,
hasSelection: !!selectionRect,
});
logger.debug({ isProcessing, hasSelection: !!selectionRect }, '[FlyerCorrectionTool] Rendering');
return (
<div
className="fixed inset-0 bg-black bg-opacity-75 z-50 flex justify-center items-center p-4"

View File

@@ -1,12 +1,11 @@
// src/components/Leaderboard.test.tsx
import React from 'react';
import { screen, waitFor } from '@testing-library/react';
import { describe, it, expect, vi, beforeEach, type Mocked } from 'vitest';
import { describe, it, expect, vi, beforeEach } from 'vitest';
import Leaderboard from './Leaderboard';
import * as apiClient from '../services/apiClient';
import { LeaderboardUser } from '../types';
import { createMockLeaderboardUser } from '../tests/utils/mockFactories';
import { createMockLogger } from '../tests/utils/mockLogger';
import { renderWithProviders } from '../tests/utils/renderWithProviders';
// The apiClient and logger are mocked globally.

303
src/config/env.ts Normal file
View File

@@ -0,0 +1,303 @@
// src/config/env.ts
/**
* @file Centralized, schema-validated configuration service.
* Implements ADR-007: Configuration and Secrets Management.
*
* This module parses and validates all environment variables at application startup.
* If any required configuration is missing or invalid, the application will fail fast
* with a clear error message.
*
* Usage:
* import { config } from './config/env';
* console.log(config.database.host);
*/
import { z } from 'zod';
// --- Schema Definitions ---
/**
* Helper to parse string to integer with default.
* Handles empty strings by treating them as undefined.
*/
const intWithDefault = (defaultValue: number) =>
z
.string()
.optional()
.transform((val) => (val && val.trim() !== '' ? parseInt(val, 10) : defaultValue))
.pipe(z.number().int());
/**
* Helper to parse string to float with default.
*/
const floatWithDefault = (defaultValue: number) =>
z
.string()
.optional()
.transform((val) => (val && val.trim() !== '' ? parseFloat(val) : defaultValue))
.pipe(z.number());
/**
* Helper to parse string 'true'/'false' to boolean.
*/
const booleanString = (defaultValue: boolean) =>
z
.string()
.optional()
.transform((val) => (val === undefined ? defaultValue : val === 'true'));
/**
* Database configuration schema.
*/
const databaseSchema = z.object({
host: z.string().min(1, 'DB_HOST is required'),
port: intWithDefault(5432),
user: z.string().min(1, 'DB_USER is required'),
password: z.string().min(1, 'DB_PASSWORD is required'),
name: z.string().min(1, 'DB_NAME is required'),
});
/**
* Redis configuration schema.
*/
const redisSchema = z.object({
url: z.string().url('REDIS_URL must be a valid URL'),
password: z.string().optional(),
});
/**
* Authentication configuration schema.
*/
const authSchema = z.object({
jwtSecret: z.string().min(32, 'JWT_SECRET must be at least 32 characters for security'),
jwtSecretPrevious: z.string().optional(), // For secret rotation (ADR-029)
});
/**
* SMTP/Email configuration schema.
* All fields are optional - email service degrades gracefully if not configured.
*/
const smtpSchema = z.object({
host: z.string().optional(),
port: intWithDefault(587),
user: z.string().optional(),
pass: z.string().optional(),
secure: booleanString(false),
fromEmail: z.string().email().optional(),
});
/**
* AI/Gemini configuration schema.
*/
const aiSchema = z.object({
geminiApiKey: z.string().optional(),
geminiRpm: intWithDefault(5),
priceQualityThreshold: floatWithDefault(0.5),
});
/**
* Google services configuration schema.
*/
const googleSchema = z.object({
mapsApiKey: z.string().optional(),
clientId: z.string().optional(),
clientSecret: z.string().optional(),
});
/**
* Worker concurrency configuration schema.
*/
const workerSchema = z.object({
concurrency: intWithDefault(1),
lockDuration: intWithDefault(30000),
emailConcurrency: intWithDefault(10),
analyticsConcurrency: intWithDefault(1),
cleanupConcurrency: intWithDefault(10),
weeklyAnalyticsConcurrency: intWithDefault(1),
});
/**
* Server configuration schema.
*/
const serverSchema = z.object({
nodeEnv: z.enum(['development', 'production', 'test']).default('development'),
port: intWithDefault(3001),
frontendUrl: z.string().url().optional(),
baseUrl: z.string().optional(),
storagePath: z.string().default('/var/www/flyer-crawler.projectium.com/flyer-images'),
});
/**
* Complete environment configuration schema.
*/
const envSchema = z.object({
database: databaseSchema,
redis: redisSchema,
auth: authSchema,
smtp: smtpSchema,
ai: aiSchema,
google: googleSchema,
worker: workerSchema,
server: serverSchema,
});
export type EnvConfig = z.infer<typeof envSchema>;
// --- Configuration Loading ---
/**
* Maps environment variables to the configuration structure.
* This is the single source of truth for which env vars map to which config keys.
*/
function loadEnvVars(): unknown {
return {
database: {
host: process.env.DB_HOST,
port: process.env.DB_PORT,
user: process.env.DB_USER,
password: process.env.DB_PASSWORD,
name: process.env.DB_NAME,
},
redis: {
url: process.env.REDIS_URL,
password: process.env.REDIS_PASSWORD,
},
auth: {
jwtSecret: process.env.JWT_SECRET,
jwtSecretPrevious: process.env.JWT_SECRET_PREVIOUS,
},
smtp: {
host: process.env.SMTP_HOST,
port: process.env.SMTP_PORT,
user: process.env.SMTP_USER,
pass: process.env.SMTP_PASS,
secure: process.env.SMTP_SECURE,
fromEmail: process.env.SMTP_FROM_EMAIL,
},
ai: {
geminiApiKey: process.env.GEMINI_API_KEY,
geminiRpm: process.env.GEMINI_RPM,
priceQualityThreshold: process.env.AI_PRICE_QUALITY_THRESHOLD,
},
google: {
mapsApiKey: process.env.GOOGLE_MAPS_API_KEY,
clientId: process.env.GOOGLE_CLIENT_ID,
clientSecret: process.env.GOOGLE_CLIENT_SECRET,
},
worker: {
concurrency: process.env.WORKER_CONCURRENCY,
lockDuration: process.env.WORKER_LOCK_DURATION,
emailConcurrency: process.env.EMAIL_WORKER_CONCURRENCY,
analyticsConcurrency: process.env.ANALYTICS_WORKER_CONCURRENCY,
cleanupConcurrency: process.env.CLEANUP_WORKER_CONCURRENCY,
weeklyAnalyticsConcurrency: process.env.WEEKLY_ANALYTICS_WORKER_CONCURRENCY,
},
server: {
nodeEnv: process.env.NODE_ENV,
port: process.env.PORT,
frontendUrl: process.env.FRONTEND_URL,
baseUrl: process.env.BASE_URL,
storagePath: process.env.STORAGE_PATH,
},
};
}
/**
* Validates and parses environment configuration.
* Throws a descriptive error if validation fails.
*/
function parseConfig(): EnvConfig {
const rawConfig = loadEnvVars();
const result = envSchema.safeParse(rawConfig);
if (!result.success) {
const errors = result.error.issues.map((issue) => {
const path = issue.path.join('.');
return ` - ${path}: ${issue.message}`;
});
const errorMessage = [
'',
'╔════════════════════════════════════════════════════════════════╗',
'║ CONFIGURATION ERROR - APPLICATION STARTUP ║',
'╚════════════════════════════════════════════════════════════════╝',
'',
'The following environment variables are missing or invalid:',
'',
...errors,
'',
'Please check your .env file or environment configuration.',
'See ADR-007 for the complete list of required environment variables.',
'',
].join('\n');
// In test environment, throw instead of exiting to allow test frameworks to catch
if (process.env.NODE_ENV === 'test') {
throw new Error(errorMessage);
}
console.error(errorMessage);
process.exit(1);
}
return result.data;
}
// --- Exported Configuration ---
/**
* The validated application configuration.
* This is a singleton that is parsed once at module load time.
*
* @example
* ```typescript
* import { config } from './config/env';
*
* // Access database config
* const pool = new Pool({
* host: config.database.host,
* port: config.database.port,
* user: config.database.user,
* password: config.database.password,
* database: config.database.name,
* });
*
* // Check environment
* if (config.server.isProduction) {
* // production-only logic
* }
* ```
*/
export const config: EnvConfig = parseConfig();
// --- Convenience Helpers ---
/**
* Returns true if running in production environment.
*/
export const isProduction = config.server.nodeEnv === 'production';
/**
* Returns true if running in test environment.
*/
export const isTest = config.server.nodeEnv === 'test';
/**
* Returns true if running in development environment.
*/
export const isDevelopment = config.server.nodeEnv === 'development';
/**
* Returns true if SMTP is configured (all required fields present).
*/
export const isSmtpConfigured =
!!config.smtp.host && !!config.smtp.user && !!config.smtp.pass && !!config.smtp.fromEmail;
/**
* Returns true if AI services are configured.
*/
export const isAiConfigured = !!config.ai.geminiApiKey;
/**
* Returns true if Google Maps is configured.
*/
export const isGoogleMapsConfigured = !!config.google.mapsApiKey;

View File

@@ -2,15 +2,9 @@
// src/hooks/useFlyerUploader.ts
import { useState, useCallback, useMemo } from 'react';
import { useQuery, useMutation, useQueryClient } from '@tanstack/react-query';
import {
uploadAndProcessFlyer,
getJobStatus,
type JobStatus,
JobFailedError,
} from '../services/aiApiClient';
import { uploadAndProcessFlyer, getJobStatus, type JobStatus } from '../services/aiApiClient';
import { logger } from '../services/logger.client';
import { generateFileChecksum } from '../utils/checksum';
import type { ProcessingStage } from '../types';
export type ProcessingState = 'idle' | 'uploading' | 'polling' | 'completed' | 'error';
@@ -105,7 +99,7 @@ export const useFlyerUploader = () => {
// Consolidate state derivation for the UI from the react-query hooks using useMemo.
// This improves performance by memoizing the derived state and makes the logic easier to follow.
const { processingState, errorMessage, duplicateFlyerId, flyerId, statusMessage } = useMemo(() => {
const { processingState, errorMessage, duplicateFlyerId, flyerId } = useMemo(() => {
// The order of these checks is critical. Errors must be checked first to override
// any stale `jobStatus` from a previous successful poll.
const state: ProcessingState = (() => {
@@ -150,7 +144,7 @@ export const useFlyerUploader = () => {
processingState: state,
errorMessage: msg,
duplicateFlyerId: dupId,
flyerId: jobStatus?.state === 'completed' ? jobStatus.returnValue?.flyerId ?? null : null,
flyerId: jobStatus?.state === 'completed' ? (jobStatus.returnValue?.flyerId ?? null) : null,
statusMessage: uploadMutation.isPending ? 'Uploading file...' : jobStatus?.progress?.message,
};
}, [uploadMutation, jobStatus, pollError]);

View File

@@ -9,6 +9,7 @@ import {
useUpdateShoppingListItemMutation,
useRemoveShoppingListItemMutation,
} from './mutations';
import { logger } from '../services/logger.client';
import type { ShoppingListItem } from '../types';
/**
@@ -84,7 +85,7 @@ const useShoppingListsHook = () => {
await createListMutation.mutateAsync({ name });
} catch (error) {
// Error is already handled by the mutation hook (notification shown)
console.error('useShoppingLists: Failed to create list', error);
logger.error({ err: error }, '[useShoppingLists] Failed to create list');
}
},
[userProfile, createListMutation],
@@ -102,7 +103,7 @@ const useShoppingListsHook = () => {
await deleteListMutation.mutateAsync({ listId });
} catch (error) {
// Error is already handled by the mutation hook (notification shown)
console.error('useShoppingLists: Failed to delete list', error);
logger.error({ err: error }, '[useShoppingLists] Failed to delete list');
}
},
[userProfile, deleteListMutation],
@@ -123,7 +124,7 @@ const useShoppingListsHook = () => {
await addItemMutation.mutateAsync({ listId, item });
} catch (error) {
// Error is already handled by the mutation hook (notification shown)
console.error('useShoppingLists: Failed to add item', error);
logger.error({ err: error }, '[useShoppingLists] Failed to add item');
}
},
[userProfile, addItemMutation],
@@ -141,7 +142,7 @@ const useShoppingListsHook = () => {
await updateItemMutation.mutateAsync({ itemId, updates });
} catch (error) {
// Error is already handled by the mutation hook (notification shown)
console.error('useShoppingLists: Failed to update item', error);
logger.error({ err: error }, '[useShoppingLists] Failed to update item');
}
},
[userProfile, updateItemMutation],
@@ -159,7 +160,7 @@ const useShoppingListsHook = () => {
await removeItemMutation.mutateAsync({ itemId });
} catch (error) {
// Error is already handled by the mutation hook (notification shown)
console.error('useShoppingLists: Failed to remove item', error);
logger.error({ err: error }, '[useShoppingLists] Failed to remove item');
}
},
[userProfile, removeItemMutation],

View File

@@ -3,6 +3,7 @@ import { useMemo, useCallback } from 'react';
import { useAuth } from '../hooks/useAuth';
import { useUserData } from '../hooks/useUserData';
import { useAddWatchedItemMutation, useRemoveWatchedItemMutation } from './mutations';
import { logger } from '../services/logger.client';
/**
* A custom hook to manage all state and logic related to a user's watched items.
@@ -43,7 +44,7 @@ const useWatchedItemsHook = () => {
} catch (error) {
// Error is already handled by the mutation hook (notification shown)
// Just log for debugging
console.error('useWatchedItems: Failed to add item', error);
logger.error({ err: error }, '[useWatchedItems] Failed to add item');
}
},
[userProfile, addWatchedItemMutation],
@@ -62,7 +63,7 @@ const useWatchedItemsHook = () => {
} catch (error) {
// Error is already handled by the mutation hook (notification shown)
// Just log for debugging
console.error('useWatchedItems: Failed to remove item', error);
logger.error({ err: error }, '[useWatchedItems] Failed to remove item');
}
},
[userProfile, removeWatchedItemMutation],

View File

@@ -114,10 +114,13 @@ describe('errorHandler Middleware', () => {
const response = await supertest(app).get('/generic-error');
expect(response.status).toBe(500);
// In test/dev, we now expect a stack trace for 5xx errors.
expect(response.body.message).toBe('A generic server error occurred.');
expect(response.body.stack).toBeDefined();
expect(response.body.errorId).toEqual(expect.any(String));
console.log('[DEBUG] errorHandler.test.ts: Received 500 error response with ID:', response.body.errorId);
expect(response.body.error.message).toBe('A generic server error occurred.');
expect(response.body.error.details.stack).toBeDefined();
expect(response.body.meta.requestId).toEqual(expect.any(String));
console.log(
'[DEBUG] errorHandler.test.ts: Received 500 error response with ID:',
response.body.meta.requestId,
);
expect(mockLogger.error).toHaveBeenCalledWith(
expect.objectContaining({
err: expect.any(Error),
@@ -136,7 +139,10 @@ describe('errorHandler Middleware', () => {
const response = await supertest(app).get('/http-error-404');
expect(response.status).toBe(404);
expect(response.body).toEqual({ message: 'Resource not found' });
expect(response.body).toEqual({
success: false,
error: { code: 'NOT_FOUND', message: 'Resource not found' },
});
expect(mockLogger.error).not.toHaveBeenCalled(); // 4xx errors are not logged as server errors
expect(mockLogger.warn).toHaveBeenCalledWith(
{
@@ -152,7 +158,10 @@ describe('errorHandler Middleware', () => {
const response = await supertest(app).get('/not-found-error');
expect(response.status).toBe(404);
expect(response.body).toEqual({ message: 'Specific resource missing' });
expect(response.body).toEqual({
success: false,
error: { code: 'NOT_FOUND', message: 'Specific resource missing' },
});
expect(mockLogger.error).not.toHaveBeenCalled();
expect(mockLogger.warn).toHaveBeenCalledWith(
{
@@ -168,7 +177,10 @@ describe('errorHandler Middleware', () => {
const response = await supertest(app).get('/fk-error');
expect(response.status).toBe(400);
expect(response.body).toEqual({ message: 'The referenced item does not exist.' });
expect(response.body).toEqual({
success: false,
error: { code: 'BAD_REQUEST', message: 'The referenced item does not exist.' },
});
expect(mockLogger.error).not.toHaveBeenCalled();
expect(mockLogger.warn).toHaveBeenCalledWith(
{
@@ -184,7 +196,10 @@ describe('errorHandler Middleware', () => {
const response = await supertest(app).get('/unique-error');
expect(response.status).toBe(409); // 409 Conflict
expect(response.body).toEqual({ message: 'This item already exists.' });
expect(response.body).toEqual({
success: false,
error: { code: 'CONFLICT', message: 'This item already exists.' },
});
expect(mockLogger.error).not.toHaveBeenCalled();
expect(mockLogger.warn).toHaveBeenCalledWith(
{
@@ -200,9 +215,9 @@ describe('errorHandler Middleware', () => {
const response = await supertest(app).get('/validation-error');
expect(response.status).toBe(400);
expect(response.body.message).toBe('Input validation failed');
expect(response.body.errors).toBeDefined();
expect(response.body.errors).toEqual([
expect(response.body.error.message).toBe('Input validation failed');
expect(response.body.error.details).toBeDefined();
expect(response.body.error.details).toEqual([
{ path: ['body', 'email'], message: 'Invalid email format' },
]);
expect(mockLogger.error).not.toHaveBeenCalled(); // 4xx errors are not logged as server errors
@@ -222,9 +237,9 @@ describe('errorHandler Middleware', () => {
expect(response.status).toBe(500);
// In test/dev, we now expect a stack trace for 5xx errors.
expect(response.body.message).toBe('A database connection issue occurred.');
expect(response.body.stack).toBeDefined();
expect(response.body.errorId).toEqual(expect.any(String));
expect(response.body.error.message).toBe('A database connection issue occurred.');
expect(response.body.error.details.stack).toBeDefined();
expect(response.body.meta.requestId).toEqual(expect.any(String));
expect(mockLogger.error).toHaveBeenCalledWith(
expect.objectContaining({
err: expect.any(DatabaseError),
@@ -243,7 +258,10 @@ describe('errorHandler Middleware', () => {
const response = await supertest(app).get('/unauthorized-error-no-status');
expect(response.status).toBe(401);
expect(response.body).toEqual({ message: 'Invalid Token' });
expect(response.body).toEqual({
success: false,
error: { code: 'UNAUTHORIZED', message: 'Invalid Token' },
});
expect(mockLogger.warn).toHaveBeenCalledWith(
{
err: expect.any(Error),
@@ -258,7 +276,10 @@ describe('errorHandler Middleware', () => {
const response = await supertest(app).get('/unauthorized-error-with-status');
expect(response.status).toBe(401);
expect(response.body).toEqual({ message: 'Invalid Token' });
expect(response.body).toEqual({
success: false,
error: { code: 'UNAUTHORIZED', message: 'Invalid Token' },
});
expect(mockLogger.warn).toHaveBeenCalledWith(
{
err: expect.any(Error),
@@ -304,17 +325,17 @@ describe('errorHandler Middleware', () => {
const response = await supertest(app).get('/generic-error');
expect(response.status).toBe(500);
expect(response.body.message).toMatch(
expect(response.body.error.message).toMatch(
/An unexpected server error occurred. Please reference error ID: \w+/,
);
expect(response.body.stack).toBeUndefined();
expect(response.body.error.details?.stack).toBeUndefined();
});
it('should return the actual error message for client errors (4xx) in production', async () => {
const response = await supertest(app).get('/http-error-404');
expect(response.status).toBe(404);
expect(response.body.message).toBe('Resource not found');
expect(response.body.error.message).toBe('Resource not found');
});
});
});

View File

@@ -1,4 +1,11 @@
// src/middleware/errorHandler.ts
// ============================================================================
// CENTRALIZED ERROR HANDLING MIDDLEWARE
// ============================================================================
// This middleware standardizes all error responses per ADR-028.
// It should be the LAST `app.use()` call to catch all errors.
// ============================================================================
import { Request, Response, NextFunction } from 'express';
import crypto from 'crypto';
import { ZodError } from 'zod';
@@ -9,12 +16,43 @@ import {
ValidationError,
} from '../services/db/errors.db';
import { logger } from '../services/logger.server';
import { ErrorCode, ApiErrorResponse } from '../types/api';
/**
* Helper to send standardized error responses.
*/
function sendErrorResponse(
res: Response,
statusCode: number,
code: string,
message: string,
details?: unknown,
meta?: { requestId?: string; timestamp?: string },
): Response<ApiErrorResponse> {
const response: ApiErrorResponse = {
success: false,
error: {
code,
message,
},
};
if (details !== undefined) {
response.error.details = details;
}
if (meta) {
response.meta = meta;
}
return res.status(statusCode).json(response);
}
/**
* A centralized error handling middleware for the Express application.
* This middleware should be the LAST `app.use()` call to catch all errors from previous routes and middleware.
*
* It standardizes error responses and ensures consistent logging.
* It standardizes error responses per ADR-028 and ensures consistent logging per ADR-004.
*/
export const errorHandler = (err: Error, req: Request, res: Response, next: NextFunction) => {
// If headers have already been sent, delegate to the default Express error handler.
@@ -29,16 +67,19 @@ export const errorHandler = (err: Error, req: Request, res: Response, next: Next
if (err instanceof ZodError) {
const statusCode = 400;
const message = 'The request data is invalid.';
const errors = err.issues.map((e) => ({ path: e.path, message: e.message }));
log.warn({ err, validationErrors: errors, statusCode }, `Client Error on ${req.method} ${req.path}: ${message}`);
return res.status(statusCode).json({ message, errors });
const details = err.issues.map((e) => ({ path: e.path, message: e.message }));
log.warn(
{ err, validationErrors: details, statusCode },
`Client Error on ${req.method} ${req.path}: ${message}`,
);
return sendErrorResponse(res, statusCode, ErrorCode.VALIDATION_ERROR, message, details);
}
// --- Handle Custom Operational Errors ---
if (err instanceof NotFoundError) {
const statusCode = 404;
log.warn({ err, statusCode }, `Client Error on ${req.method} ${req.path}: ${err.message}`);
return res.status(statusCode).json({ message: err.message });
return sendErrorResponse(res, statusCode, ErrorCode.NOT_FOUND, err.message);
}
if (err instanceof ValidationError) {
@@ -47,30 +88,66 @@ export const errorHandler = (err: Error, req: Request, res: Response, next: Next
{ err, validationErrors: err.validationErrors, statusCode },
`Client Error on ${req.method} ${req.path}: ${err.message}`,
);
return res.status(statusCode).json({ message: err.message, errors: err.validationErrors });
return sendErrorResponse(
res,
statusCode,
ErrorCode.VALIDATION_ERROR,
err.message,
err.validationErrors,
);
}
if (err instanceof UniqueConstraintError) {
const statusCode = 409;
log.warn({ err, statusCode }, `Client Error on ${req.method} ${req.path}: ${err.message}`);
return res.status(statusCode).json({ message: err.message }); // Use 409 Conflict for unique constraints
return sendErrorResponse(res, statusCode, ErrorCode.CONFLICT, err.message);
}
if (err instanceof ForeignKeyConstraintError) {
const statusCode = 400;
log.warn({ err, statusCode }, `Client Error on ${req.method} ${req.path}: ${err.message}`);
return res.status(statusCode).json({ message: err.message });
return sendErrorResponse(res, statusCode, ErrorCode.BAD_REQUEST, err.message);
}
// --- Handle Generic Client Errors (e.g., from express-jwt, or manual status setting) ---
let status = (err as any).status || (err as any).statusCode;
const errWithStatus = err as Error & { status?: number; statusCode?: number };
let status = errWithStatus.status || errWithStatus.statusCode;
// Default UnauthorizedError to 401 if no status is present, a common case for express-jwt.
if (err.name === 'UnauthorizedError' && !status) {
status = 401;
}
if (status && status >= 400 && status < 500) {
log.warn({ err, statusCode: status }, `Client Error on ${req.method} ${req.path}: ${err.message}`);
return res.status(status).json({ message: err.message });
log.warn(
{ err, statusCode: status },
`Client Error on ${req.method} ${req.path}: ${err.message}`,
);
// Map status codes to error codes
let errorCode: string;
switch (status) {
case 400:
errorCode = ErrorCode.BAD_REQUEST;
break;
case 401:
errorCode = ErrorCode.UNAUTHORIZED;
break;
case 403:
errorCode = ErrorCode.FORBIDDEN;
break;
case 404:
errorCode = ErrorCode.NOT_FOUND;
break;
case 409:
errorCode = ErrorCode.CONFLICT;
break;
case 429:
errorCode = ErrorCode.RATE_LIMITED;
break;
default:
errorCode = ErrorCode.BAD_REQUEST;
}
return sendErrorResponse(res, status, errorCode, err.message);
}
// --- Handle All Other (500-level) Errors ---
@@ -91,11 +168,23 @@ export const errorHandler = (err: Error, req: Request, res: Response, next: Next
// In production, send a generic message to avoid leaking implementation details.
if (process.env.NODE_ENV === 'production') {
return res.status(500).json({
message: `An unexpected server error occurred. Please reference error ID: ${errorId}`,
});
return sendErrorResponse(
res,
500,
ErrorCode.INTERNAL_ERROR,
`An unexpected server error occurred. Please reference error ID: ${errorId}`,
undefined,
{ requestId: errorId },
);
}
// In non-production environments (dev, test, etc.), send more details for easier debugging.
return res.status(500).json({ message: err.message, stack: err.stack, errorId });
};
return sendErrorResponse(
res,
500,
ErrorCode.INTERNAL_ERROR,
err.message,
{ stack: err.stack },
{ requestId: errorId },
);
};

View File

@@ -1,7 +1,7 @@
// src/pages/MyDealsPage.test.tsx
import React from 'react';
import { render, screen, waitFor } from '@testing-library/react';
import { describe, it, expect, vi, beforeEach, type Mocked } from 'vitest';
import { describe, it, expect, vi, beforeEach } from 'vitest';
import MyDealsPage from './MyDealsPage';
import * as apiClient from '../services/apiClient';
import type { WatchedItemDeal } from '../types';

View File

@@ -1,7 +1,7 @@
// src/pages/UserProfilePage.test.tsx
import React from 'react';
import { render, screen, fireEvent, waitFor } from '@testing-library/react';
import { describe, it, expect, vi, beforeEach, type Mocked } from 'vitest';
import { describe, it, expect, vi, beforeEach } from 'vitest';
import UserProfilePage from './UserProfilePage';
import * as apiClient from '../services/apiClient';
import { UserProfile, Achievement, UserAchievement } from '../types';

View File

@@ -1,6 +1,6 @@
// src/pages/UserProfilePage.tsx
import React, { useState, useEffect, useRef } from 'react';
import * as apiClient from '../services/apiClient';
import type { UserProfile } from '../types';
import { logger } from '../services/logger.client';
import { notifySuccess, notifyError } from '../services/notificationService';
import { AchievementsList } from '../components/AchievementsList';

View File

@@ -15,47 +15,43 @@ export const VoiceLabPage: React.FC = () => {
const [audioPlayer, setAudioPlayer] = useState<HTMLAudioElement | null>(null);
// Debug log for rendering
console.log(
'[VoiceLabPage RENDER] audioPlayer state is:',
audioPlayer ? 'Present (Object)' : 'Null',
);
logger.debug({ hasAudioPlayer: !!audioPlayer }, '[VoiceLabPage] Render');
const handleGenerateSpeech = async () => {
console.log('[VoiceLabPage] handleGenerateSpeech triggered');
logger.debug('[VoiceLabPage] handleGenerateSpeech triggered');
if (!textToSpeak.trim()) {
notifyError('Please enter some text to generate speech.');
return;
}
setIsGeneratingSpeech(true);
try {
console.log('[VoiceLabPage] Calling generateSpeechFromText...');
logger.debug('[VoiceLabPage] Calling generateSpeechFromText');
const response = await generateSpeechFromText(textToSpeak);
const base64Audio = await response.json(); // Extract the base64 audio string from the response
console.log('[VoiceLabPage] Response JSON received. Length:', base64Audio?.length);
logger.debug({ audioLength: base64Audio?.length }, '[VoiceLabPage] Response JSON received');
if (base64Audio) {
const audioSrc = `data:audio/mpeg;base64,${base64Audio}`;
console.log('[VoiceLabPage] creating new Audio()');
logger.debug('[VoiceLabPage] Creating new Audio()');
const audio = new Audio(audioSrc);
console.log('[VoiceLabPage] Audio created:', audio);
logger.debug('[VoiceLabPage] Audio created');
console.log('[VoiceLabPage] calling setAudioPlayer...');
logger.debug('[VoiceLabPage] Calling setAudioPlayer');
setAudioPlayer(audio);
console.log('[VoiceLabPage] calling audio.play()...');
logger.debug('[VoiceLabPage] Calling audio.play()');
await audio.play();
console.log('[VoiceLabPage] audio.play() resolved');
logger.debug('[VoiceLabPage] audio.play() resolved');
} else {
console.warn('[VoiceLabPage] base64Audio was falsy');
logger.warn('[VoiceLabPage] base64Audio was falsy');
notifyError('The AI did not return any audio data.');
}
} catch (error) {
console.error('[VoiceLabPage] Error caught:', error);
logger.error({ err: error }, '[VoiceLabPage] Failed to generate speech');
const errorMessage = error instanceof Error ? error.message : 'An unknown error occurred.';
logger.error({ err: error }, 'Failed to generate speech:');
notifyError(`Speech generation failed: ${errorMessage}`);
} finally {
console.log('[VoiceLabPage] finally block - setting isGeneratingSpeech false');
logger.debug('[VoiceLabPage] finally block - setting isGeneratingSpeech false');
setIsGeneratingSpeech(false);
}
};

View File

@@ -4,6 +4,7 @@ import { ActivityLogItem } from '../../types';
import { UserProfile } from '../../types';
import { formatDistanceToNow } from 'date-fns';
import { useActivityLogQuery } from '../../hooks/queries/useActivityLogQuery';
import { logger } from '../../services/logger.client';
export type ActivityLogClickHandler = (log: ActivityLogItem) => void;
@@ -98,8 +99,9 @@ export const ActivityLog: React.FC<ActivityLogProps> = ({ userProfile, onLogClic
{log.user_avatar_url ? (
(() => {
const altText = log.user_full_name || 'User Avatar';
console.log(
`[ActivityLog] Rendering avatar for log ${log.activity_log_id}. Alt: "${altText}"`,
logger.debug(
{ activityLogId: log.activity_log_id, altText },
'[ActivityLog] Rendering avatar',
);
return (
<img className="h-8 w-8 rounded-full" src={log.user_avatar_url} alt={altText} />

View File

@@ -1,7 +1,6 @@
// src/pages/admin/CorrectionsPage.tsx
import React from 'react';
import { Link } from 'react-router-dom';
import type { SuggestedCorrection, MasterGroceryItem, Category } from '../../types';
import { LoadingSpinner } from '../../components/LoadingSpinner';
import { ArrowPathIcon } from '../../components/icons/ArrowPathIcon';
import { CorrectionRow } from './components/CorrectionRow';
@@ -18,15 +17,9 @@ export const CorrectionsPage: React.FC = () => {
refetch: refetchCorrections,
} = useSuggestedCorrectionsQuery();
const {
data: masterItems = [],
isLoading: isLoadingMasterItems,
} = useMasterItemsQuery();
const { data: masterItems = [], isLoading: isLoadingMasterItems } = useMasterItemsQuery();
const {
data: categories = [],
isLoading: isLoadingCategories,
} = useCategoriesQuery();
const { data: categories = [], isLoading: isLoadingCategories } = useCategoriesQuery();
const isLoading = isLoadingCorrections || isLoadingMasterItems || isLoadingCategories;
const error = correctionsError?.message || null;

View File

@@ -5,14 +5,15 @@ import { fetchAllBrands, uploadBrandLogo } from '../../../services/apiClient';
import { Brand } from '../../../types';
import { ErrorDisplay } from '../../../components/ErrorDisplay';
import { useApiOnMount } from '../../../hooks/useApiOnMount';
import { logger } from '../../../services/logger.client';
export const AdminBrandManager: React.FC = () => {
// Wrap the fetcher function in useCallback to prevent it from being recreated on every render.
// The hook expects a function that returns a Promise<Response>, and it will handle
// the JSON parsing and error checking internally.
const fetchBrandsWrapper = useCallback(() => {
console.log(
'AdminBrandManager: The memoized fetchBrandsWrapper is being passed to useApiOnMount.',
logger.debug(
'[AdminBrandManager] The memoized fetchBrandsWrapper is being passed to useApiOnMount',
);
// This wrapper simply calls the API client function. The hook will manage the promise.
return fetchAllBrands();
@@ -30,19 +31,22 @@ export const AdminBrandManager: React.FC = () => {
// At render time, decide which data to display. If updatedBrands exists, it takes precedence.
// Otherwise, fall back to the initial data from the hook. Default to an empty array.
const brandsToRender = updatedBrands || initialBrands || [];
console.log('AdminBrandManager RENDER:', {
loading,
error: error?.message,
hasInitialBrands: !!initialBrands,
hasUpdatedBrands: !!updatedBrands,
brandsToRenderCount: brandsToRender.length,
});
logger.debug(
{
loading,
error: error?.message,
hasInitialBrands: !!initialBrands,
hasUpdatedBrands: !!updatedBrands,
brandsToRenderCount: brandsToRender.length,
},
'[AdminBrandManager] Render',
);
// The file parameter is now optional to handle cases where the user cancels the file picker.
const handleLogoUpload = async (brandId: number, file: File | undefined) => {
if (!file) {
// This check is now the single source of truth for a missing file.
console.log('AdminBrandManager: handleLogoUpload called with no file. Showing error toast.');
logger.debug('[AdminBrandManager] handleLogoUpload called with no file. Showing error toast');
toast.error('Please select a file to upload.');
return;
}
@@ -61,11 +65,14 @@ export const AdminBrandManager: React.FC = () => {
try {
const response = await uploadBrandLogo(brandId, file);
console.log('AdminBrandManager: Logo upload response received.', {
ok: response.ok,
status: response.status,
statusText: response.statusText,
});
logger.debug(
{
ok: response.ok,
status: response.status,
statusText: response.statusText,
},
'[AdminBrandManager] Logo upload response received',
);
// Check for a successful response before attempting to parse JSON.
if (!response.ok) {
@@ -78,8 +85,9 @@ export const AdminBrandManager: React.FC = () => {
// Optimistically update the UI by setting the updatedBrands state.
// This update is based on the currently rendered list of brands.
console.log(
`AdminBrandManager: Optimistically updating brand ${brandId} with new logo: ${logoUrl}`,
logger.debug(
{ brandId, logoUrl },
'[AdminBrandManager] Optimistically updating brand with new logo',
);
setUpdatedBrands(
brandsToRender.map((brand) =>
@@ -93,12 +101,12 @@ export const AdminBrandManager: React.FC = () => {
};
if (loading) {
console.log('AdminBrandManager: Rendering the loading state.');
logger.debug('[AdminBrandManager] Rendering the loading state');
return <div className="text-center p-4">Loading brands...</div>;
}
if (error) {
console.error(`AdminBrandManager: Rendering the error state. Error: ${error.message}`);
logger.error({ err: error }, '[AdminBrandManager] Rendering the error state');
return <ErrorDisplay message={`Failed to load brands: ${error.message}`} />;
}

View File

@@ -2,7 +2,7 @@
import React from 'react';
import ReactDOM from 'react-dom';
import { screen, fireEvent, waitFor } from '@testing-library/react';
import { describe, it, expect, vi, beforeEach, type Mocked } from 'vitest';
import { describe, it, expect, vi, beforeEach } from 'vitest';
import { CorrectionRow } from './CorrectionRow';
import * as apiClient from '../../../services/apiClient';
import {

View File

@@ -1,7 +1,7 @@
// src/pages/admin/components/ProfileManager.test.tsx
import React from 'react';
import { render, screen, fireEvent, waitFor, cleanup, act } from '@testing-library/react';
import { describe, it, expect, vi, beforeEach, afterEach, type Mock, test } from 'vitest';
import { describe, it, expect, vi, beforeEach, afterEach, type Mock } from 'vitest';
import { ProfileManager } from './ProfileManager';
import * as apiClient from '../../../services/apiClient';
import { notifySuccess, notifyError } from '../../../services/notificationService';
@@ -272,7 +272,9 @@ describe('ProfileManager', () => {
await waitFor(() => {
expect(notifyError).toHaveBeenCalledWith('Cannot save profile, no user is logged in.');
expect(loggerSpy).toHaveBeenCalledWith('[handleProfileSave] Aborted: No user is logged in.');
expect(loggerSpy).toHaveBeenCalledWith(
'[handleProfileSave] Aborted: No user is logged in.',
);
});
expect(mockedApiClient.updateUserProfile).not.toHaveBeenCalled();
});
@@ -974,11 +976,11 @@ describe('ProfileManager', () => {
});
it('should handle updating the user profile and address with empty strings', async () => {
mockedApiClient.updateUserProfile.mockImplementation(async (data) =>
new Response(JSON.stringify({ ...authenticatedProfile, ...data })),
mockedApiClient.updateUserProfile.mockImplementation(
async (data) => new Response(JSON.stringify({ ...authenticatedProfile, ...data })),
);
mockedApiClient.updateUserAddress.mockImplementation(async (data) =>
new Response(JSON.stringify({ ...mockAddress, ...data })),
mockedApiClient.updateUserAddress.mockImplementation(
async (data) => new Response(JSON.stringify({ ...mockAddress, ...data })),
);
render(<ProfileManager {...defaultAuthenticatedProps} />);
@@ -1004,7 +1006,7 @@ describe('ProfileManager', () => {
expect.objectContaining({ signal: expect.anything() }),
);
expect(mockOnProfileUpdate).toHaveBeenCalledWith(
expect.objectContaining({ full_name: '' })
expect.objectContaining({ full_name: '' }),
);
expect(notifySuccess).toHaveBeenCalledWith('Profile updated successfully!');
});

View File

@@ -1,7 +1,7 @@
// src/providers/ApiProvider.test.tsx
import React, { useContext } from 'react';
import { render, screen } from '@testing-library/react';
import { describe, it, expect, vi } from 'vitest';
import { describe, it, expect } from 'vitest';
import { ApiProvider } from './ApiProvider';
import { ApiContext } from '../contexts/ApiContext';
import * as apiClient from '../services/apiClient';
@@ -26,7 +26,7 @@ describe('ApiProvider & ApiContext', () => {
render(
<ApiProvider>
<div data-testid="child">Child Content</div>
</ApiProvider>
</ApiProvider>,
);
expect(screen.getByTestId('child')).toBeInTheDocument();
expect(screen.getByText('Child Content')).toBeInTheDocument();
@@ -36,7 +36,7 @@ describe('ApiProvider & ApiContext', () => {
render(
<ApiProvider>
<TestConsumer />
</ApiProvider>
</ApiProvider>,
);
expect(screen.getByTestId('value-check')).toHaveTextContent('Matches apiClient');
});
@@ -46,4 +46,4 @@ describe('ApiProvider & ApiContext', () => {
render(<TestConsumer />);
expect(screen.getByTestId('value-check')).toHaveTextContent('Matches apiClient');
});
});
});

View File

@@ -170,7 +170,7 @@ describe('Admin Content Management Routes (/api/admin)', () => {
vi.mocked(mockedDb.adminRepo.getSuggestedCorrections).mockResolvedValue(mockCorrections);
const response = await supertest(app).get('/api/admin/corrections');
expect(response.status).toBe(200);
expect(response.body).toEqual(mockCorrections);
expect(response.body.data).toEqual(mockCorrections);
});
it('should return 500 if the database call fails', async () => {
@@ -179,7 +179,7 @@ describe('Admin Content Management Routes (/api/admin)', () => {
);
const response = await supertest(app).get('/api/admin/corrections');
expect(response.status).toBe(500);
expect(response.body.message).toBe('DB Error');
expect(response.body.error.message).toBe('DB Error');
});
it('POST /corrections/:id/approve should approve a correction', async () => {
@@ -187,7 +187,7 @@ describe('Admin Content Management Routes (/api/admin)', () => {
vi.mocked(mockedDb.adminRepo.approveCorrection).mockResolvedValue(undefined);
const response = await supertest(app).post(`/api/admin/corrections/${correctionId}/approve`);
expect(response.status).toBe(200);
expect(response.body).toEqual({ message: 'Correction approved successfully.' });
expect(response.body.data).toEqual({ message: 'Correction approved successfully.' });
expect(vi.mocked(mockedDb.adminRepo.approveCorrection)).toHaveBeenCalledWith(
correctionId,
expect.anything(),
@@ -206,7 +206,7 @@ describe('Admin Content Management Routes (/api/admin)', () => {
vi.mocked(mockedDb.adminRepo.rejectCorrection).mockResolvedValue(undefined);
const response = await supertest(app).post(`/api/admin/corrections/${correctionId}/reject`);
expect(response.status).toBe(200);
expect(response.body).toEqual({ message: 'Correction rejected successfully.' });
expect(response.body.data).toEqual({ message: 'Correction rejected successfully.' });
});
it('POST /corrections/:id/reject should return 500 on DB error', async () => {
@@ -230,7 +230,7 @@ describe('Admin Content Management Routes (/api/admin)', () => {
.put(`/api/admin/corrections/${correctionId}`)
.send(requestBody);
expect(response.status).toBe(200);
expect(response.body).toEqual(mockUpdatedCorrection);
expect(response.body.data).toEqual(mockUpdatedCorrection);
});
it('PUT /corrections/:id should return 400 for invalid data', async () => {
@@ -248,7 +248,7 @@ describe('Admin Content Management Routes (/api/admin)', () => {
.put('/api/admin/corrections/999')
.send({ suggested_value: 'new value' });
expect(response.status).toBe(404);
expect(response.body.message).toBe('Correction with ID 999 not found');
expect(response.body.error.message).toBe('Correction with ID 999 not found');
});
it('PUT /corrections/:id should return 500 on a generic DB error', async () => {
@@ -259,7 +259,7 @@ describe('Admin Content Management Routes (/api/admin)', () => {
.put('/api/admin/corrections/101')
.send({ suggested_value: 'new value' });
expect(response.status).toBe(500);
expect(response.body.message).toBe('Generic DB Error');
expect(response.body.error.message).toBe('Generic DB Error');
});
});
@@ -272,7 +272,7 @@ describe('Admin Content Management Routes (/api/admin)', () => {
vi.mocked(mockedDb.adminRepo.getFlyersForReview).mockResolvedValue(mockFlyers);
const response = await supertest(app).get('/api/admin/review/flyers');
expect(response.status).toBe(200);
expect(response.body).toEqual(mockFlyers);
expect(response.body.data).toEqual(mockFlyers);
expect(vi.mocked(mockedDb.adminRepo.getFlyersForReview)).toHaveBeenCalledWith(
expect.anything(),
);
@@ -282,7 +282,7 @@ describe('Admin Content Management Routes (/api/admin)', () => {
vi.mocked(mockedDb.adminRepo.getFlyersForReview).mockRejectedValue(new Error('DB Error'));
const response = await supertest(app).get('/api/admin/review/flyers');
expect(response.status).toBe(500);
expect(response.body.message).toBe('DB Error');
expect(response.body.error.message).toBe('DB Error');
});
});
@@ -292,7 +292,7 @@ describe('Admin Content Management Routes (/api/admin)', () => {
vi.mocked(mockedDb.adminRepo.getApplicationStats).mockRejectedValue(new Error('DB Error'));
const response = await supertest(app).get('/api/admin/stats');
expect(response.status).toBe(500);
expect(response.body.message).toBe('DB Error');
expect(response.body.error.message).toBe('DB Error');
});
});
@@ -302,14 +302,14 @@ describe('Admin Content Management Routes (/api/admin)', () => {
vi.mocked(mockedDb.flyerRepo.getAllBrands).mockResolvedValue(mockBrands);
const response = await supertest(app).get('/api/admin/brands');
expect(response.status).toBe(200);
expect(response.body).toEqual(mockBrands);
expect(response.body.data).toEqual(mockBrands);
});
it('GET /brands should return 500 on DB error', async () => {
vi.mocked(mockedDb.flyerRepo.getAllBrands).mockRejectedValue(new Error('DB Error'));
const response = await supertest(app).get('/api/admin/brands');
expect(response.status).toBe(500);
expect(response.body.message).toBe('DB Error');
expect(response.body.error.message).toBe('DB Error');
});
it('POST /brands/:id/logo should upload a logo and update the brand', async () => {
@@ -319,7 +319,7 @@ describe('Admin Content Management Routes (/api/admin)', () => {
.post(`/api/admin/brands/${brandId}/logo`)
.attach('logoImage', Buffer.from('dummy-logo-content'), 'test-logo.png');
expect(response.status).toBe(200);
expect(response.body.message).toBe('Brand logo updated successfully.');
expect(response.body.data.message).toBe('Brand logo updated successfully.');
expect(vi.mocked(mockedDb.adminRepo.updateBrandLogo)).toHaveBeenCalledWith(
brandId,
expect.stringContaining('/flyer-images/'),
@@ -339,7 +339,7 @@ describe('Admin Content Management Routes (/api/admin)', () => {
it('POST /brands/:id/logo should return 400 if no file is uploaded', async () => {
const response = await supertest(app).post('/api/admin/brands/55/logo');
expect(response.status).toBe(400);
expect(response.body.message).toMatch(
expect(response.body.error.message).toMatch(
/Logo image file is required|The request data is invalid|Logo image file is missing./,
);
});
@@ -367,7 +367,7 @@ describe('Admin Content Management Routes (/api/admin)', () => {
.attach('logoImage', Buffer.from('this is not an image'), 'document.txt');
expect(response.status).toBe(400);
// This message comes from the handleMulterError middleware for the imageFileFilter
expect(response.body.message).toBe('Only image files are allowed!');
expect(response.body.error.message).toBe('Only image files are allowed!');
});
it('POST /brands/:id/logo should return 400 for an invalid brand ID', async () => {
@@ -414,7 +414,7 @@ describe('Admin Content Management Routes (/api/admin)', () => {
.put(`/api/admin/recipes/${recipeId}/status`)
.send(requestBody);
expect(response.status).toBe(200);
expect(response.body).toEqual(mockUpdatedRecipe);
expect(response.body.data).toEqual(mockUpdatedRecipe);
});
it('PUT /recipes/:id/status should return 400 for an invalid status value', async () => {
@@ -448,7 +448,7 @@ describe('Admin Content Management Routes (/api/admin)', () => {
.put(`/api/admin/comments/${commentId}/status`)
.send(requestBody);
expect(response.status).toBe(200);
expect(response.body).toEqual(mockUpdatedComment);
expect(response.body.data).toEqual(mockUpdatedComment);
});
it('PUT /comments/:id/status should return 400 for an invalid status value', async () => {
@@ -485,7 +485,7 @@ describe('Admin Content Management Routes (/api/admin)', () => {
vi.mocked(mockedDb.adminRepo.getUnmatchedFlyerItems).mockResolvedValue(mockUnmatchedItems);
const response = await supertest(app).get('/api/admin/unmatched-items');
expect(response.status).toBe(200);
expect(response.body).toEqual(mockUnmatchedItems);
expect(response.body.data).toEqual(mockUnmatchedItems);
});
it('GET /unmatched-items should return 500 on DB error', async () => {
@@ -515,23 +515,21 @@ describe('Admin Content Management Routes (/api/admin)', () => {
);
const response = await supertest(app).delete(`/api/admin/flyers/${flyerId}`);
expect(response.status).toBe(404);
expect(response.body.message).toBe('Flyer with ID 999 not found.');
expect(response.body.error.message).toBe('Flyer with ID 999 not found.');
});
it('DELETE /flyers/:flyerId should return 500 on a generic DB error', async () => {
const flyerId = 42;
vi.mocked(mockedDb.flyerRepo.deleteFlyer).mockRejectedValue(
new Error('Generic DB Error'),
);
vi.mocked(mockedDb.flyerRepo.deleteFlyer).mockRejectedValue(new Error('Generic DB Error'));
const response = await supertest(app).delete(`/api/admin/flyers/${flyerId}`);
expect(response.status).toBe(500);
expect(response.body.message).toBe('Generic DB Error');
expect(response.body.error.message).toBe('Generic DB Error');
});
it('DELETE /flyers/:flyerId should return 400 for an invalid flyerId', async () => {
const response = await supertest(app).delete('/api/admin/flyers/abc');
expect(response.status).toBe(400);
expect(response.body.errors[0].message).toMatch(/Expected number, received nan/i);
expect(response.body.error.details[0].message).toMatch(/Expected number, received nan/i);
});
});
});

View File

@@ -108,7 +108,7 @@ describe('Admin Job Trigger Routes (/api/admin/trigger)', () => {
// Use the instance method mock
const response = await supertest(app).post('/api/admin/trigger/daily-deal-check');
expect(response.status).toBe(202);
expect(response.body.message).toContain('Daily deal check job has been triggered');
expect(response.body.data.message).toContain('Daily deal check job has been triggered');
expect(backgroundJobService.runDailyDealCheck).toHaveBeenCalledTimes(1);
});
@@ -118,7 +118,7 @@ describe('Admin Job Trigger Routes (/api/admin/trigger)', () => {
});
const response = await supertest(app).post('/api/admin/trigger/daily-deal-check');
expect(response.status).toBe(500);
expect(response.body.message).toContain('Job runner failed');
expect(response.body.error.message).toContain('Job runner failed');
});
});
@@ -128,7 +128,7 @@ describe('Admin Job Trigger Routes (/api/admin/trigger)', () => {
vi.mocked(analyticsQueue.add).mockResolvedValue(mockJob);
const response = await supertest(app).post('/api/admin/trigger/failing-job');
expect(response.status).toBe(202);
expect(response.body.message).toContain('Failing test job has been enqueued');
expect(response.body.data.message).toContain('Failing test job has been enqueued');
expect(analyticsQueue.add).toHaveBeenCalledWith('generate-daily-report', {
reportDate: 'FAIL',
});
@@ -138,23 +138,29 @@ describe('Admin Job Trigger Routes (/api/admin/trigger)', () => {
vi.mocked(analyticsQueue.add).mockRejectedValue(new Error('Queue is down'));
const response = await supertest(app).post('/api/admin/trigger/failing-job');
expect(response.status).toBe(500);
expect(response.body.message).toBe('Queue is down');
expect(response.body.error.message).toBe('Queue is down');
});
});
describe('POST /trigger/analytics-report', () => {
it('should trigger the analytics report job and return 202 Accepted', async () => {
vi.mocked(backgroundJobService.triggerAnalyticsReport).mockResolvedValue('manual-report-job-123');
vi.mocked(backgroundJobService.triggerAnalyticsReport).mockResolvedValue(
'manual-report-job-123',
);
const response = await supertest(app).post('/api/admin/trigger/analytics-report');
expect(response.status).toBe(202);
expect(response.body.message).toContain('Analytics report generation job has been enqueued');
expect(response.body.data.message).toContain(
'Analytics report generation job has been enqueued',
);
expect(backgroundJobService.triggerAnalyticsReport).toHaveBeenCalledTimes(1);
});
it('should return 500 if enqueuing the analytics job fails', async () => {
vi.mocked(backgroundJobService.triggerAnalyticsReport).mockRejectedValue(new Error('Queue error'));
vi.mocked(backgroundJobService.triggerAnalyticsReport).mockRejectedValue(
new Error('Queue error'),
);
const response = await supertest(app).post('/api/admin/trigger/analytics-report');
expect(response.status).toBe(500);
});
@@ -162,17 +168,21 @@ describe('Admin Job Trigger Routes (/api/admin/trigger)', () => {
describe('POST /trigger/weekly-analytics', () => {
it('should trigger the weekly analytics job and return 202 Accepted', async () => {
vi.mocked(backgroundJobService.triggerWeeklyAnalyticsReport).mockResolvedValue('manual-weekly-report-job-123');
vi.mocked(backgroundJobService.triggerWeeklyAnalyticsReport).mockResolvedValue(
'manual-weekly-report-job-123',
);
const response = await supertest(app).post('/api/admin/trigger/weekly-analytics');
expect(response.status).toBe(202);
expect(response.body.message).toContain('Successfully enqueued weekly analytics job');
expect(response.body.data.message).toContain('Successfully enqueued weekly analytics job');
expect(backgroundJobService.triggerWeeklyAnalyticsReport).toHaveBeenCalledTimes(1);
});
it('should return 500 if enqueuing the weekly analytics job fails', async () => {
vi.mocked(backgroundJobService.triggerWeeklyAnalyticsReport).mockRejectedValue(new Error('Queue error'));
vi.mocked(backgroundJobService.triggerWeeklyAnalyticsReport).mockRejectedValue(
new Error('Queue error'),
);
const response = await supertest(app).post('/api/admin/trigger/weekly-analytics');
expect(response.status).toBe(500);
});
@@ -185,7 +195,7 @@ describe('Admin Job Trigger Routes (/api/admin/trigger)', () => {
vi.mocked(cleanupQueue.add).mockResolvedValue(mockJob);
const response = await supertest(app).post(`/api/admin/flyers/${flyerId}/cleanup`);
expect(response.status).toBe(202);
expect(response.body.message).toBe(
expect(response.body.data.message).toBe(
`File cleanup job for flyer ID ${flyerId} has been enqueued.`,
);
expect(cleanupQueue.add).toHaveBeenCalledWith('cleanup-flyer-files', { flyerId });
@@ -196,13 +206,13 @@ describe('Admin Job Trigger Routes (/api/admin/trigger)', () => {
vi.mocked(cleanupQueue.add).mockRejectedValue(new Error('Queue is down'));
const response = await supertest(app).post(`/api/admin/flyers/${flyerId}/cleanup`);
expect(response.status).toBe(500);
expect(response.body.message).toBe('Queue is down');
expect(response.body.error.message).toBe('Queue is down');
});
it('should return 400 for an invalid flyerId', async () => {
const response = await supertest(app).post('/api/admin/flyers/abc/cleanup');
expect(response.status).toBe(400);
expect(response.body.errors[0].message).toMatch(/Expected number, received nan/i);
expect(response.body.error.details[0].message).toMatch(/Expected number, received nan/i);
});
});
@@ -224,7 +234,9 @@ describe('Admin Job Trigger Routes (/api/admin/trigger)', () => {
// Assert
expect(response.status).toBe(200);
expect(response.body.message).toBe(`Job ${jobId} has been successfully marked for retry.`);
expect(response.body.data.message).toBe(
`Job ${jobId} has been successfully marked for retry.`,
);
expect(mockJob.retry).toHaveBeenCalledTimes(1);
});
@@ -244,7 +256,9 @@ describe('Admin Job Trigger Routes (/api/admin/trigger)', () => {
const response = await supertest(app).post(`/api/admin/jobs/${queueName}/${jobId}/retry`);
expect(response.status).toBe(404);
expect(response.body.message).toBe(`Job with ID '${jobId}' not found in queue '${queueName}'.`);
expect(response.body.error.message).toBe(
`Job with ID '${jobId}' not found in queue '${queueName}'.`,
);
});
it('should return 404 if the job ID is not found in the queue', async () => {
@@ -253,7 +267,7 @@ describe('Admin Job Trigger Routes (/api/admin/trigger)', () => {
`/api/admin/jobs/${queueName}/not-found-job/retry`,
);
expect(response.status).toBe(404);
expect(response.body.message).toContain('not found in queue');
expect(response.body.error.message).toContain('not found in queue');
});
it('should return 400 if the job is not in a failed state', async () => {
@@ -267,7 +281,7 @@ describe('Admin Job Trigger Routes (/api/admin/trigger)', () => {
const response = await supertest(app).post(`/api/admin/jobs/${queueName}/${jobId}/retry`);
expect(response.status).toBe(400);
expect(response.body.message).toBe(
expect(response.body.error.message).toBe(
"Job is not in a 'failed' state. Current state: completed.",
); // This is now handled by the errorHandler
expect(mockJob.retry).not.toHaveBeenCalled();
@@ -284,7 +298,7 @@ describe('Admin Job Trigger Routes (/api/admin/trigger)', () => {
const response = await supertest(app).post(`/api/admin/jobs/${queueName}/${jobId}/retry`);
expect(response.status).toBe(500);
expect(response.body.message).toContain('Cannot retry job');
expect(response.body.error.message).toContain('Cannot retry job');
});
it('should return 400 for an invalid queueName or jobId', async () => {

View File

@@ -1,5 +1,5 @@
// src/routes/admin.monitoring.routes.test.ts
import { describe, it, expect, vi, beforeEach, type Mocked } from 'vitest';
import { describe, it, expect, vi, beforeEach } from 'vitest';
import supertest from 'supertest';
import type { Request, Response, NextFunction } from 'express';
import { createMockUserProfile, createMockActivityLogItem } from '../tests/utils/mockFactories';
@@ -133,7 +133,7 @@ describe('Admin Monitoring Routes (/api/admin)', () => {
const response = await supertest(app).get('/api/admin/activity-log');
expect(response.status).toBe(200);
expect(response.body).toEqual(mockLogs);
expect(response.body.data).toEqual(mockLogs);
expect(adminRepo.getActivityLog).toHaveBeenCalledWith(50, 0, expect.anything());
});
@@ -148,15 +148,15 @@ describe('Admin Monitoring Routes (/api/admin)', () => {
it('should return 400 for invalid limit and offset query parameters', async () => {
const response = await supertest(app).get('/api/admin/activity-log?limit=abc&offset=-1');
expect(response.status).toBe(400);
expect(response.body.errors).toBeDefined();
expect(response.body.errors.length).toBe(2); // Both limit and offset are invalid
expect(response.body.error.details).toBeDefined();
expect(response.body.error.details.length).toBe(2); // Both limit and offset are invalid
});
it('should return 500 if fetching activity log fails', async () => {
vi.mocked(adminRepo.getActivityLog).mockRejectedValue(new Error('DB Error'));
const response = await supertest(app).get('/api/admin/activity-log');
expect(response.status).toBe(500);
expect(response.body.message).toBe('DB Error');
expect(response.body.error.message).toBe('DB Error');
});
});
@@ -177,7 +177,7 @@ describe('Admin Monitoring Routes (/api/admin)', () => {
// Assert
expect(response.status).toBe(200);
expect(response.body).toEqual([
expect(response.body.data).toEqual([
{ name: 'flyer-processing', isRunning: true },
{ name: 'email-sending', isRunning: true },
{ name: 'analytics-reporting', isRunning: false },
@@ -190,7 +190,7 @@ describe('Admin Monitoring Routes (/api/admin)', () => {
vi.mocked(monitoringService.getWorkerStatuses).mockRejectedValue(new Error('Worker Error'));
const response = await supertest(app).get('/api/admin/workers/status');
expect(response.status).toBe(500);
expect(response.body.message).toBe('Worker Error');
expect(response.body.error.message).toBe('Worker Error');
});
});
@@ -226,7 +226,7 @@ describe('Admin Monitoring Routes (/api/admin)', () => {
// Assert
expect(response.status).toBe(200);
expect(response.body).toEqual([
expect(response.body.data).toEqual([
{
name: 'flyer-processing',
counts: { waiting: 5, active: 1, completed: 100, failed: 2, delayed: 0, paused: 0 },
@@ -251,13 +251,11 @@ describe('Admin Monitoring Routes (/api/admin)', () => {
});
it('should return 500 if fetching queue counts fails', async () => {
vi.mocked(monitoringService.getQueueStatuses).mockRejectedValue(
new Error('Redis is down'),
);
vi.mocked(monitoringService.getQueueStatuses).mockRejectedValue(new Error('Redis is down'));
const response = await supertest(app).get('/api/admin/queues/status');
expect(response.status).toBe(500);
expect(response.body.message).toBe('Redis is down');
expect(response.body.error.message).toBe('Redis is down');
});
});
});

View File

@@ -2,18 +2,15 @@
import { Router, NextFunction, Request, Response } from 'express';
import passport from './passport.routes';
import { isAdmin } from './passport.routes'; // Correctly imported
import multer from 'multer';
import { z } from 'zod';
import * as db from '../services/db/index.db';
import type { UserProfile } from '../types';
import { geocodingService } from '../services/geocodingService.server';
import { cacheService } from '../services/cacheService.server';
import { requireFileUpload } from '../middleware/fileUpload.middleware'; // This was a duplicate, fixed.
import {
createUploadMiddleware,
handleMulterError,
} from '../middleware/multer.middleware';
import { NotFoundError, ValidationError } from '../services/db/errors.db';
import { createUploadMiddleware, handleMulterError } from '../middleware/multer.middleware';
import { ValidationError } from '../services/db/errors.db';
import { validateRequest } from '../middleware/validation.middleware';
// --- Bull Board (Job Queue UI) Imports ---
@@ -21,15 +18,14 @@ import { createBullBoard } from '@bull-board/api';
import { BullMQAdapter } from '@bull-board/api/bullMQAdapter';
import { ExpressAdapter } from '@bull-board/express';
import { backgroundJobService } from '../services/backgroundJobService';
import { flyerQueue, emailQueue, analyticsQueue, cleanupQueue, weeklyAnalyticsQueue } from '../services/queueService.server';
import { getSimpleWeekAndYear } from '../utils/dateUtils';
import {
requiredString,
numericIdParam,
uuidParamSchema,
optionalNumeric,
optionalString,
} from '../utils/zodUtils';
flyerQueue,
emailQueue,
analyticsQueue,
cleanupQueue,
weeklyAnalyticsQueue,
} from '../services/queueService.server';
import { numericIdParam, uuidParamSchema, optionalNumeric } from '../utils/zodUtils';
// Removed: import { logger } from '../services/logger.server';
// All route handlers now use req.log (request-scoped logger) as per ADR-004
import { monitoringService } from '../services/monitoringService.server';
@@ -37,6 +33,7 @@ import { userService } from '../services/userService';
import { cleanupUploadedFile } from '../utils/fileUtils';
import { brandService } from '../services/brandService';
import { adminTriggerLimiter, adminUploadLimiter } from '../config/rateLimiters';
import { sendSuccess, sendNoContent } from '../utils/apiResponse';
const updateCorrectionSchema = numericIdParam('id').extend({
body: z.object({
@@ -125,7 +122,7 @@ router.use(passport.authenticate('jwt', { session: false }), isAdmin);
router.get('/corrections', validateRequest(emptySchema), async (req, res, next: NextFunction) => {
try {
const corrections = await db.adminRepo.getSuggestedCorrections(req.log);
res.json(corrections);
sendSuccess(res, corrections);
} catch (error) {
req.log.error({ error }, 'Error fetching suggested corrections');
next(error);
@@ -136,8 +133,11 @@ router.get('/review/flyers', validateRequest(emptySchema), async (req, res, next
try {
req.log.debug('Fetching flyers for review via adminRepo');
const flyers = await db.adminRepo.getFlyersForReview(req.log);
req.log.info({ count: Array.isArray(flyers) ? flyers.length : 'unknown' }, 'Successfully fetched flyers for review');
res.json(flyers);
req.log.info(
{ count: Array.isArray(flyers) ? flyers.length : 'unknown' },
'Successfully fetched flyers for review',
);
sendSuccess(res, flyers);
} catch (error) {
req.log.error({ error }, 'Error fetching flyers for review');
next(error);
@@ -147,7 +147,7 @@ router.get('/review/flyers', validateRequest(emptySchema), async (req, res, next
router.get('/brands', validateRequest(emptySchema), async (req, res, next: NextFunction) => {
try {
const brands = await db.flyerRepo.getAllBrands(req.log);
res.json(brands);
sendSuccess(res, brands);
} catch (error) {
req.log.error({ error }, 'Error fetching brands');
next(error);
@@ -157,7 +157,7 @@ router.get('/brands', validateRequest(emptySchema), async (req, res, next: NextF
router.get('/stats', validateRequest(emptySchema), async (req, res, next: NextFunction) => {
try {
const stats = await db.adminRepo.getApplicationStats(req.log);
res.json(stats);
sendSuccess(res, stats);
} catch (error) {
req.log.error({ error }, 'Error fetching application stats');
next(error);
@@ -167,7 +167,7 @@ router.get('/stats', validateRequest(emptySchema), async (req, res, next: NextFu
router.get('/stats/daily', validateRequest(emptySchema), async (req, res, next: NextFunction) => {
try {
const dailyStats = await db.adminRepo.getDailyStatsForLast30Days(req.log);
res.json(dailyStats);
sendSuccess(res, dailyStats);
} catch (error) {
req.log.error({ error }, 'Error fetching daily stats');
next(error);
@@ -182,7 +182,7 @@ router.post(
const { params } = req as unknown as z.infer<ReturnType<typeof numericIdParam>>;
try {
await db.adminRepo.approveCorrection(params.id, req.log); // params.id is now safely typed as number
res.status(200).json({ message: 'Correction approved successfully.' });
sendSuccess(res, { message: 'Correction approved successfully.' });
} catch (error) {
req.log.error({ error }, 'Error approving correction');
next(error);
@@ -198,7 +198,7 @@ router.post(
const { params } = req as unknown as z.infer<ReturnType<typeof numericIdParam>>;
try {
await db.adminRepo.rejectCorrection(params.id, req.log); // params.id is now safely typed as number
res.status(200).json({ message: 'Correction rejected successfully.' });
sendSuccess(res, { message: 'Correction rejected successfully.' });
} catch (error) {
req.log.error({ error }, 'Error rejecting correction');
next(error);
@@ -218,7 +218,7 @@ router.put(
body.suggested_value,
req.log,
);
res.status(200).json(updatedCorrection);
sendSuccess(res, updatedCorrection);
} catch (error) {
req.log.error({ error }, 'Error updating suggested correction');
next(error);
@@ -234,7 +234,7 @@ router.put(
const { params, body } = req as unknown as z.infer<typeof updateRecipeStatusSchema>;
try {
const updatedRecipe = await db.adminRepo.updateRecipeStatus(params.id, body.status, req.log); // This is still a standalone function in admin.db.ts
res.status(200).json(updatedRecipe);
sendSuccess(res, updatedRecipe);
} catch (error) {
req.log.error({ error }, 'Error updating recipe status');
next(error); // Pass all errors to the central error handler
@@ -259,8 +259,11 @@ router.post(
const logoUrl = await brandService.updateBrandLogo(params.id, req.file, req.log);
req.log.info({ brandId: params.id, logoUrl }, `Brand logo updated for brand ID: ${params.id}`);
res.status(200).json({ message: 'Brand logo updated successfully.', logoUrl });
req.log.info(
{ brandId: params.id, logoUrl },
`Brand logo updated for brand ID: ${params.id}`,
);
sendSuccess(res, { message: 'Brand logo updated successfully.', logoUrl });
} catch (error) {
// If an error occurs after the file has been uploaded (e.g., DB error),
// we must clean up the orphaned file from the disk.
@@ -271,15 +274,19 @@ router.post(
},
);
router.get('/unmatched-items', validateRequest(emptySchema), async (req, res, next: NextFunction) => {
try {
const items = await db.adminRepo.getUnmatchedFlyerItems(req.log);
res.json(items);
} catch (error) {
req.log.error({ error }, 'Error fetching unmatched items');
next(error);
}
});
router.get(
'/unmatched-items',
validateRequest(emptySchema),
async (req, res, next: NextFunction) => {
try {
const items = await db.adminRepo.getUnmatchedFlyerItems(req.log);
sendSuccess(res, items);
} catch (error) {
req.log.error({ error }, 'Error fetching unmatched items');
next(error);
}
},
);
/**
* DELETE /api/admin/recipes/:recipeId - Admin endpoint to delete any recipe.
@@ -294,7 +301,7 @@ router.delete(
try {
// The isAdmin flag bypasses the ownership check in the repository method.
await db.recipeRepo.deleteRecipe(params.recipeId, userProfile.user.user_id, true, req.log);
res.status(204).send();
sendNoContent(res);
} catch (error: unknown) {
req.log.error({ error }, 'Error deleting recipe');
next(error);
@@ -313,7 +320,7 @@ router.delete(
const { params } = req as unknown as z.infer<ReturnType<typeof numericIdParam>>;
try {
await db.flyerRepo.deleteFlyer(params.flyerId, req.log);
res.status(204).send();
sendNoContent(res);
} catch (error: unknown) {
req.log.error({ error }, 'Error deleting flyer');
next(error);
@@ -333,7 +340,7 @@ router.put(
body.status,
req.log,
); // This is still a standalone function in admin.db.ts
res.status(200).json(updatedComment);
sendSuccess(res, updatedComment);
} catch (error: unknown) {
req.log.error({ error }, 'Error updating comment status');
next(error);
@@ -344,7 +351,7 @@ router.put(
router.get('/users', validateRequest(emptySchema), async (req, res, next: NextFunction) => {
try {
const users = await db.adminRepo.getAllUsers(req.log);
res.json(users);
sendSuccess(res, users);
} catch (error) {
req.log.error({ error }, 'Error fetching users');
next(error);
@@ -361,7 +368,7 @@ router.get(
try {
const logs = await db.adminRepo.getActivityLog(limit!, offset!, req.log);
res.json(logs);
sendSuccess(res, logs);
} catch (error) {
req.log.error({ error }, 'Error fetching activity log');
next(error);
@@ -377,7 +384,7 @@ router.get(
const { params } = req as unknown as z.infer<ReturnType<typeof uuidParamSchema>>;
try {
const user = await db.userRepo.findUserProfileById(params.id, req.log);
res.json(user);
sendSuccess(res, user);
} catch (error) {
req.log.error({ error }, 'Error fetching user profile');
next(error);
@@ -393,7 +400,7 @@ router.put(
const { params, body } = req as unknown as z.infer<typeof updateUserRoleSchema>;
try {
const updatedUser = await db.adminRepo.updateUserRole(params.id, body.role, req.log);
res.json(updatedUser);
sendSuccess(res, updatedUser);
} catch (error) {
req.log.error({ error }, `Error updating user ${params.id}:`);
next(error);
@@ -410,7 +417,7 @@ router.delete(
const { params } = req as unknown as z.infer<ReturnType<typeof uuidParamSchema>>;
try {
await userService.deleteUserAsAdmin(userProfile.user.user_id, params.id, req.log);
res.status(204).send();
sendNoContent(res);
} catch (error) {
req.log.error({ error }, 'Error deleting user');
next(error);
@@ -436,10 +443,14 @@ router.post(
// We call the function but don't wait for it to finish (no `await`).
// This is a "fire-and-forget" operation from the client's perspective.
backgroundJobService.runDailyDealCheck();
res.status(202).json({
message:
'Daily deal check job has been triggered successfully. It will run in the background.',
});
sendSuccess(
res,
{
message:
'Daily deal check job has been triggered successfully. It will run in the background.',
},
202,
);
} catch (error) {
req.log.error({ error }, '[Admin] Failed to trigger daily deal check job.');
next(error);
@@ -463,9 +474,13 @@ router.post(
try {
const jobId = await backgroundJobService.triggerAnalyticsReport();
res.status(202).json({
message: `Analytics report generation job has been enqueued successfully. Job ID: ${jobId}`,
});
sendSuccess(
res,
{
message: `Analytics report generation job has been enqueued successfully. Job ID: ${jobId}`,
},
202,
);
} catch (error) {
req.log.error({ error }, '[Admin] Failed to enqueue analytics report job.');
next(error);
@@ -492,9 +507,11 @@ router.post(
// Enqueue the cleanup job. The worker will handle the file deletion.
try {
await cleanupQueue.add('cleanup-flyer-files', { flyerId: params.flyerId });
res
.status(202)
.json({ message: `File cleanup job for flyer ID ${params.flyerId} has been enqueued.` });
sendSuccess(
res,
{ message: `File cleanup job for flyer ID ${params.flyerId} has been enqueued.` },
202,
);
} catch (error) {
req.log.error({ error }, 'Error enqueuing cleanup job');
next(error);
@@ -511,22 +528,24 @@ router.post(
adminTriggerLimiter,
validateRequest(emptySchema),
async (req: Request, res: Response, next: NextFunction) => {
const userProfile = req.user as UserProfile;
req.log.info(
`[Admin] Manual trigger for a failing job received from user: ${userProfile.user.user_id}`,
);
const userProfile = req.user as UserProfile;
req.log.info(
`[Admin] Manual trigger for a failing job received from user: ${userProfile.user.user_id}`,
);
try {
// Add a job with a special 'forceFail' flag that the worker will recognize.
const job = await analyticsQueue.add('generate-daily-report', { reportDate: 'FAIL' });
res
.status(202)
.json({ message: `Failing test job has been enqueued successfully. Job ID: ${job.id}` });
} catch (error) {
req.log.error({ error }, 'Error enqueuing failing job');
next(error);
}
}
try {
// Add a job with a special 'forceFail' flag that the worker will recognize.
const job = await analyticsQueue.add('generate-daily-report', { reportDate: 'FAIL' });
sendSuccess(
res,
{ message: `Failing test job has been enqueued successfully. Job ID: ${job.id}` },
202,
);
} catch (error) {
req.log.error({ error }, 'Error enqueuing failing job');
next(error);
}
},
);
/**
@@ -545,7 +564,7 @@ router.post(
try {
const keysDeleted = await geocodingService.clearGeocodeCache(req.log);
res.status(200).json({
sendSuccess(res, {
message: `Successfully cleared the geocode cache. ${keysDeleted} keys were removed.`,
});
} catch (error) {
@@ -559,29 +578,37 @@ router.post(
* GET /api/admin/workers/status - Get the current running status of all BullMQ workers.
* This is useful for a system health dashboard to see if any workers have crashed.
*/
router.get('/workers/status', validateRequest(emptySchema), async (req: Request, res: Response, next: NextFunction) => {
try {
const workerStatuses = await monitoringService.getWorkerStatuses();
res.json(workerStatuses);
} catch (error) {
req.log.error({ error }, 'Error fetching worker statuses');
next(error);
}
});
router.get(
'/workers/status',
validateRequest(emptySchema),
async (req: Request, res: Response, next: NextFunction) => {
try {
const workerStatuses = await monitoringService.getWorkerStatuses();
sendSuccess(res, workerStatuses);
} catch (error) {
req.log.error({ error }, 'Error fetching worker statuses');
next(error);
}
},
);
/**
* GET /api/admin/queues/status - Get job counts for all BullMQ queues.
* This is useful for monitoring the health and backlog of background jobs.
*/
router.get('/queues/status', validateRequest(emptySchema), async (req: Request, res: Response, next: NextFunction) => {
try {
const queueStatuses = await monitoringService.getQueueStatuses();
res.json(queueStatuses);
} catch (error) {
req.log.error({ error }, 'Error fetching queue statuses');
next(error);
}
});
router.get(
'/queues/status',
validateRequest(emptySchema),
async (req: Request, res: Response, next: NextFunction) => {
try {
const queueStatuses = await monitoringService.getQueueStatuses();
sendSuccess(res, queueStatuses);
} catch (error) {
req.log.error({ error }, 'Error fetching queue statuses');
next(error);
}
},
);
/**
* POST /api/admin/jobs/:queueName/:jobId/retry - Retries a specific failed job.
@@ -597,12 +624,8 @@ router.post(
} = req as unknown as z.infer<typeof jobRetrySchema>;
try {
await monitoringService.retryFailedJob(
queueName,
jobId,
userProfile.user.user_id,
);
res.status(200).json({ message: `Job ${jobId} has been successfully marked for retry.` });
await monitoringService.retryFailedJob(queueName, jobId, userProfile.user.user_id);
sendSuccess(res, { message: `Job ${jobId} has been successfully marked for retry.` });
} catch (error) {
req.log.error({ error }, 'Error retrying job');
next(error);
@@ -625,9 +648,7 @@ router.post(
try {
const jobId = await backgroundJobService.triggerWeeklyAnalyticsReport();
res
.status(202)
.json({ message: 'Successfully enqueued weekly analytics job.', jobId });
sendSuccess(res, { message: 'Successfully enqueued weekly analytics job.', jobId }, 202);
} catch (error) {
req.log.error({ error }, 'Error enqueuing weekly analytics job');
next(error);
@@ -635,8 +656,43 @@ router.post(
},
);
/**
* POST /api/admin/system/clear-cache - Clears the application data cache.
* Clears cached flyers, brands, and stats data from Redis.
* Requires admin privileges.
*/
router.post(
'/system/clear-cache',
adminTriggerLimiter,
validateRequest(emptySchema),
async (req: Request, res: Response, next: NextFunction) => {
const userProfile = req.user as UserProfile;
req.log.info(`[Admin] Manual cache clear received from user: ${userProfile.user.user_id}`);
try {
const [flyersDeleted, brandsDeleted, statsDeleted] = await Promise.all([
cacheService.invalidateFlyers(req.log),
cacheService.invalidateBrands(req.log),
cacheService.invalidateStats(req.log),
]);
const totalDeleted = flyersDeleted + brandsDeleted + statsDeleted;
sendSuccess(res, {
message: `Successfully cleared the application cache. ${totalDeleted} keys were removed.`,
details: {
flyers: flyersDeleted,
brands: brandsDeleted,
stats: statsDeleted,
},
});
} catch (error) {
req.log.error({ error }, '[Admin] Failed to clear application cache.');
next(error);
}
},
);
/* Catches errors from multer (e.g., file size, file filter) */
router.use(handleMulterError);
export default router;

View File

@@ -90,14 +90,14 @@ describe('Admin Stats Routes (/api/admin/stats)', () => {
vi.mocked(adminRepo.getApplicationStats).mockResolvedValue(mockStats);
const response = await supertest(app).get('/api/admin/stats');
expect(response.status).toBe(200);
expect(response.body).toEqual(mockStats);
expect(response.body.data).toEqual(mockStats);
});
it('should return 500 if the database call fails', async () => {
vi.mocked(adminRepo.getApplicationStats).mockRejectedValue(new Error('DB Error'));
const response = await supertest(app).get('/api/admin/stats');
expect(response.status).toBe(500);
expect(response.body.message).toBe('DB Error');
expect(response.body.error.message).toBe('DB Error');
});
});
@@ -110,14 +110,14 @@ describe('Admin Stats Routes (/api/admin/stats)', () => {
vi.mocked(adminRepo.getDailyStatsForLast30Days).mockResolvedValue(mockDailyStats);
const response = await supertest(app).get('/api/admin/stats/daily');
expect(response.status).toBe(200);
expect(response.body).toEqual(mockDailyStats);
expect(response.body.data).toEqual(mockDailyStats);
});
it('should return 500 if the database call fails', async () => {
vi.mocked(adminRepo.getDailyStatsForLast30Days).mockRejectedValue(new Error('DB Error'));
const response = await supertest(app).get('/api/admin/stats/daily');
expect(response.status).toBe(500);
expect(response.body.message).toBe('DB Error');
expect(response.body.error.message).toBe('DB Error');
});
});
});

View File

@@ -88,14 +88,14 @@ describe('Admin System Routes (/api/admin/system)', () => {
vi.mocked(geocodingService.clearGeocodeCache).mockResolvedValue(10);
const response = await supertest(app).post('/api/admin/system/clear-geocode-cache');
expect(response.status).toBe(200);
expect(response.body.message).toContain('10 keys were removed');
expect(response.body.data.message).toContain('10 keys were removed');
});
it('should return 500 if clearing the cache fails', async () => {
vi.mocked(geocodingService.clearGeocodeCache).mockRejectedValue(new Error('Redis is down'));
const response = await supertest(app).post('/api/admin/system/clear-geocode-cache');
expect(response.status).toBe(500);
expect(response.body.message).toContain('Redis is down');
expect(response.body.error.message).toContain('Redis is down');
});
});
});

View File

@@ -104,7 +104,7 @@ describe('Admin User Management Routes (/api/admin/users)', () => {
vi.mocked(adminRepo.getAllUsers).mockResolvedValue(mockUsers);
const response = await supertest(app).get('/api/admin/users');
expect(response.status).toBe(200);
expect(response.body).toEqual(mockUsers);
expect(response.body.data).toEqual(mockUsers);
expect(adminRepo.getAllUsers).toHaveBeenCalledTimes(1);
});
@@ -122,7 +122,7 @@ describe('Admin User Management Routes (/api/admin/users)', () => {
vi.mocked(userRepo.findUserProfileById).mockResolvedValue(mockUser);
const response = await supertest(app).get(`/api/admin/users/${userId}`);
expect(response.status).toBe(200);
expect(response.body).toEqual(mockUser);
expect(response.body.data).toEqual(mockUser);
expect(userRepo.findUserProfileById).toHaveBeenCalledWith(userId, expect.any(Object));
});
@@ -133,7 +133,7 @@ describe('Admin User Management Routes (/api/admin/users)', () => {
);
const response = await supertest(app).get(`/api/admin/users/${missingId}`);
expect(response.status).toBe(404);
expect(response.body.message).toBe('User not found.');
expect(response.body.error.message).toBe('User not found.');
});
it('should return 500 on a generic database error', async () => {
@@ -160,7 +160,7 @@ describe('Admin User Management Routes (/api/admin/users)', () => {
.put(`/api/admin/users/${userId}`)
.send({ role: 'admin' });
expect(response.status).toBe(200);
expect(response.body).toEqual(updatedUser);
expect(response.body.data).toEqual(updatedUser);
expect(adminRepo.updateUserRole).toHaveBeenCalledWith(userId, 'admin', expect.any(Object));
});
@@ -173,7 +173,7 @@ describe('Admin User Management Routes (/api/admin/users)', () => {
.put(`/api/admin/users/${missingId}`)
.send({ role: 'user' });
expect(response.status).toBe(404);
expect(response.body.message).toBe(`User with ID ${missingId} not found.`);
expect(response.body.error.message).toBe(`User with ID ${missingId} not found.`);
});
it('should return 500 on a generic database error', async () => {
@@ -183,7 +183,7 @@ describe('Admin User Management Routes (/api/admin/users)', () => {
.put(`/api/admin/users/${userId}`)
.send({ role: 'admin' });
expect(response.status).toBe(500);
expect(response.body.message).toBe('DB Error');
expect(response.body.error.message).toBe('DB Error');
});
it('should return 400 for an invalid role', async () => {
@@ -201,7 +201,11 @@ describe('Admin User Management Routes (/api/admin/users)', () => {
vi.mocked(userService.deleteUserAsAdmin).mockResolvedValue(undefined);
const response = await supertest(app).delete(`/api/admin/users/${targetId}`);
expect(response.status).toBe(204);
expect(userService.deleteUserAsAdmin).toHaveBeenCalledWith(adminId, targetId, expect.any(Object));
expect(userService.deleteUserAsAdmin).toHaveBeenCalledWith(
adminId,
targetId,
expect.any(Object),
);
});
it('should prevent an admin from deleting their own account', async () => {
@@ -209,9 +213,13 @@ describe('Admin User Management Routes (/api/admin/users)', () => {
vi.mocked(userService.deleteUserAsAdmin).mockRejectedValue(validationError);
const response = await supertest(app).delete(`/api/admin/users/${adminId}`);
expect(response.status).toBe(400);
expect(response.body.message).toMatch(/Admins cannot delete their own account/);
expect(response.body.error.message).toMatch(/Admins cannot delete their own account/);
expect(userRepo.deleteUserById).not.toHaveBeenCalled();
expect(userService.deleteUserAsAdmin).toHaveBeenCalledWith(adminId, adminId, expect.any(Object));
expect(userService.deleteUserAsAdmin).toHaveBeenCalledWith(
adminId,
adminId,
expect.any(Object),
);
});
it('should return 500 on a generic database error', async () => {

View File

@@ -151,7 +151,9 @@ describe('AI Routes (/api/ai)', () => {
const validChecksum = 'a'.repeat(64);
it('should enqueue a job and return 202 on success', async () => {
vi.mocked(aiService.aiService.enqueueFlyerProcessing).mockResolvedValue({ id: 'job-123' } as unknown as Job);
vi.mocked(aiService.aiService.enqueueFlyerProcessing).mockResolvedValue({
id: 'job-123',
} as unknown as Job);
const response = await supertest(app)
.post('/api/ai/upload-and-process')
@@ -159,8 +161,8 @@ describe('AI Routes (/api/ai)', () => {
.attach('flyerFile', imagePath);
expect(response.status).toBe(202);
expect(response.body.message).toBe('Flyer accepted for processing.');
expect(response.body.jobId).toBe('job-123');
expect(response.body.data.message).toBe('Flyer accepted for processing.');
expect(response.body.data.jobId).toBe('job-123');
expect(aiService.aiService.enqueueFlyerProcessing).toHaveBeenCalled();
});
@@ -170,7 +172,7 @@ describe('AI Routes (/api/ai)', () => {
.field('checksum', validChecksum);
expect(response.status).toBe(400);
expect(response.body.message).toBe('A flyer file (PDF or image) is required.');
expect(response.body.error.message).toBe('A flyer file (PDF or image) is required.');
});
it('should return 400 if checksum is missing', async () => {
@@ -180,11 +182,14 @@ describe('AI Routes (/api/ai)', () => {
expect(response.status).toBe(400);
// Use regex to be resilient to validation message changes
expect(response.body.errors[0].message).toMatch(/checksum is required|Required/i);
expect(response.body.error.details[0].message).toMatch(/checksum is required|Required/i);
});
it('should return 409 if flyer checksum already exists', async () => {
const duplicateError = new aiService.DuplicateFlyerError('This flyer has already been processed.', 99);
const duplicateError = new aiService.DuplicateFlyerError(
'This flyer has already been processed.',
99,
);
vi.mocked(aiService.aiService.enqueueFlyerProcessing).mockRejectedValue(duplicateError);
const response = await supertest(app)
@@ -193,11 +198,13 @@ describe('AI Routes (/api/ai)', () => {
.attach('flyerFile', imagePath);
expect(response.status).toBe(409);
expect(response.body.message).toBe('This flyer has already been processed.');
expect(response.body.error.message).toBe('This flyer has already been processed.');
});
it('should return 500 if enqueuing the job fails', async () => {
vi.mocked(aiService.aiService.enqueueFlyerProcessing).mockRejectedValueOnce(new Error('Redis connection failed'));
vi.mocked(aiService.aiService.enqueueFlyerProcessing).mockRejectedValueOnce(
new Error('Redis connection failed'),
);
const response = await supertest(app)
.post('/api/ai/upload-and-process')
@@ -205,7 +212,7 @@ describe('AI Routes (/api/ai)', () => {
.attach('flyerFile', imagePath);
expect(response.status).toBe(500);
expect(response.body.message).toBe('Redis connection failed');
expect(response.body.error.message).toBe('Redis connection failed');
});
it('should pass user ID to the job when authenticated', async () => {
@@ -219,8 +226,10 @@ describe('AI Routes (/api/ai)', () => {
basePath: '/api/ai',
authenticatedUser: mockUser,
});
vi.mocked(aiService.aiService.enqueueFlyerProcessing).mockResolvedValue({ id: 'job-456' } as unknown as Job);
vi.mocked(aiService.aiService.enqueueFlyerProcessing).mockResolvedValue({
id: 'job-456',
} as unknown as Job);
// Act
await supertest(authenticatedApp)
@@ -255,8 +264,10 @@ describe('AI Routes (/api/ai)', () => {
basePath: '/api/ai',
authenticatedUser: mockUserWithAddress,
});
vi.mocked(aiService.aiService.enqueueFlyerProcessing).mockResolvedValue({ id: 'job-789' } as unknown as Job);
vi.mocked(aiService.aiService.enqueueFlyerProcessing).mockResolvedValue({
id: 'job-789',
} as unknown as Job);
// Act
await supertest(authenticatedApp)
@@ -296,7 +307,7 @@ describe('AI Routes (/api/ai)', () => {
const response = await supertest(app).get('/api/ai/jobs/non-existent-job/status');
expect(response.status).toBe(404);
expect(response.body.message).toBe('Job not found.');
expect(response.body.error.message).toBe('Job not found.');
});
it('should return job status if job is found', async () => {
@@ -311,7 +322,7 @@ describe('AI Routes (/api/ai)', () => {
const response = await supertest(app).get('/api/ai/jobs/job-123/status');
expect(response.status).toBe(200);
expect(response.body.state).toBe('completed');
expect(response.body.data.state).toBe('completed');
});
// Removed flaky test 'should return 400 for an invalid job ID format'
@@ -343,7 +354,7 @@ describe('AI Routes (/api/ai)', () => {
// Assert
expect(response.status).toBe(200);
expect(response.body).toEqual(mockFlyer);
expect(response.body.data).toEqual(mockFlyer);
expect(aiService.aiService.processLegacyFlyerUpload).toHaveBeenCalledWith(
expect.any(Object), // req.file
expect.any(Object), // req.body
@@ -358,7 +369,7 @@ describe('AI Routes (/api/ai)', () => {
.field('some_legacy_field', 'value');
expect(response.status).toBe(400);
expect(response.body.message).toBe('No flyer file uploaded.');
expect(response.body.error.message).toBe('No flyer file uploaded.');
});
it('should return 409 and cleanup file if a duplicate flyer is detected', async () => {
@@ -366,23 +377,29 @@ describe('AI Routes (/api/ai)', () => {
vi.mocked(aiService.aiService.processLegacyFlyerUpload).mockRejectedValue(duplicateError);
const unlinkSpy = vi.spyOn(fs.promises, 'unlink').mockResolvedValue(undefined);
const response = await supertest(authenticatedApp).post('/api/ai/upload-legacy').attach('flyerFile', imagePath);
const response = await supertest(authenticatedApp)
.post('/api/ai/upload-legacy')
.attach('flyerFile', imagePath);
expect(response.status).toBe(409);
expect(response.body.message).toBe('Duplicate legacy flyer.');
expect(response.body.flyerId).toBe(101);
expect(response.body.error.message).toBe('Duplicate legacy flyer.');
expect(response.body.error.details.flyerId).toBe(101);
expect(unlinkSpy).toHaveBeenCalledTimes(1);
unlinkSpy.mockRestore();
});
it('should return 500 and cleanup file on a generic service error', async () => {
vi.mocked(aiService.aiService.processLegacyFlyerUpload).mockRejectedValue(new Error('Internal service failure'));
vi.mocked(aiService.aiService.processLegacyFlyerUpload).mockRejectedValue(
new Error('Internal service failure'),
);
const unlinkSpy = vi.spyOn(fs.promises, 'unlink').mockResolvedValue(undefined);
const response = await supertest(authenticatedApp).post('/api/ai/upload-legacy').attach('flyerFile', imagePath);
const response = await supertest(authenticatedApp)
.post('/api/ai/upload-legacy')
.attach('flyerFile', imagePath);
expect(response.status).toBe(500);
expect(response.body.message).toBe('Internal service failure');
expect(response.body.error.message).toBe('Internal service failure');
expect(unlinkSpy).toHaveBeenCalledTimes(1);
unlinkSpy.mockRestore();
});
@@ -412,7 +429,7 @@ describe('AI Routes (/api/ai)', () => {
// Assert
expect(response.status).toBe(201);
expect(response.body.message).toBe('Flyer processed and saved successfully.');
expect(response.body.data.message).toBe('Flyer processed and saved successfully.');
expect(aiService.aiService.processLegacyFlyerUpload).toHaveBeenCalledTimes(1);
});
@@ -425,7 +442,10 @@ describe('AI Routes (/api/ai)', () => {
it('should return 409 Conflict and delete the uploaded file if flyer checksum already exists', async () => {
// Arrange
const duplicateError = new aiService.DuplicateFlyerError('This flyer has already been processed.', 99);
const duplicateError = new aiService.DuplicateFlyerError(
'This flyer has already been processed.',
99,
);
vi.mocked(aiService.aiService.processLegacyFlyerUpload).mockRejectedValue(duplicateError);
const unlinkSpy = vi.spyOn(fs.promises, 'unlink').mockResolvedValue(undefined);
@@ -437,12 +457,14 @@ describe('AI Routes (/api/ai)', () => {
// Assert
expect(response.status).toBe(409);
expect(response.body.message).toBe('This flyer has already been processed.');
expect(response.body.error.message).toBe('This flyer has already been processed.');
expect(mockedDb.createFlyerAndItems).not.toHaveBeenCalled(); // Should not be called if service throws
// Assert that the file was deleted
expect(unlinkSpy).toHaveBeenCalledTimes(1);
// The filename is predictable in the test environment because of the multer config in ai.routes.ts
expect(unlinkSpy).toHaveBeenCalledWith(expect.stringContaining('flyerImage-test-flyer-image.jpg'));
expect(unlinkSpy).toHaveBeenCalledWith(
expect.stringContaining('flyerImage-test-flyer-image.jpg'),
);
});
it('should accept payload when extractedData.items is missing and save with empty items', async () => {
@@ -453,7 +475,9 @@ describe('AI Routes (/api/ai)', () => {
extractedData: { store_name: 'Partial Store' }, // no items key
};
vi.mocked(aiService.aiService.processLegacyFlyerUpload).mockResolvedValue(createMockFlyer({ flyer_id: 2 }));
vi.mocked(aiService.aiService.processLegacyFlyerUpload).mockResolvedValue(
createMockFlyer({ flyer_id: 2 }),
);
const response = await supertest(app)
.post('/api/ai/flyers/process')
@@ -471,7 +495,9 @@ describe('AI Routes (/api/ai)', () => {
extractedData: { items: [] }, // store_name missing
};
vi.mocked(aiService.aiService.processLegacyFlyerUpload).mockResolvedValue(createMockFlyer({ flyer_id: 3 }));
vi.mocked(aiService.aiService.processLegacyFlyerUpload).mockResolvedValue(
createMockFlyer({ flyer_id: 3 }),
);
const response = await supertest(app)
.post('/api/ai/flyers/process')
@@ -519,7 +545,7 @@ describe('AI Routes (/api/ai)', () => {
expect(aiService.aiService.processLegacyFlyerUpload).toHaveBeenCalledTimes(1);
});
it('should handle payload where extractedData is null', async () => {
it('should handle payload where extractedData is null', async () => {
const payloadWithNullExtractedData = {
checksum: 'null-extracted-data-checksum',
originalFileName: 'flyer-null.jpg',
@@ -590,10 +616,12 @@ describe('AI Routes (/api/ai)', () => {
it('should handle malformed JSON in data field and return 400', async () => {
const malformedDataString = '{"checksum":'; // Invalid JSON
// Since the service parses the data, we mock it to throw a ValidationError when parsing fails
// or when it detects the malformed input.
vi.mocked(aiService.aiService.processLegacyFlyerUpload).mockRejectedValue(new ValidationError([], 'Checksum is required.'));
vi.mocked(aiService.aiService.processLegacyFlyerUpload).mockRejectedValue(
new ValidationError([], 'Checksum is required.'),
);
const response = await supertest(app)
.post('/api/ai/flyers/process')
@@ -603,8 +631,8 @@ describe('AI Routes (/api/ai)', () => {
// The outer catch block should be hit, leading to empty parsed data.
// The handler then fails the checksum validation.
expect(response.status).toBe(400);
expect(response.body.message).toBe('Checksum is required.');
// Note: The logging expectation was removed because if the service throws a ValidationError,
expect(response.body.error.message).toBe('Checksum is required.');
// Note: The logging expectation was removed because if the service throws a ValidationError,
// the route handler passes it to the global error handler, which might log differently or not as a "critical error during parsing" in the route itself.
});
@@ -615,9 +643,11 @@ describe('AI Routes (/api/ai)', () => {
};
// Spy on fs.promises.unlink to verify file cleanup
const unlinkSpy = vi.spyOn(fs.promises, 'unlink').mockResolvedValue(undefined);
// Mock the service to throw a ValidationError because the checksum is missing
vi.mocked(aiService.aiService.processLegacyFlyerUpload).mockRejectedValue(new ValidationError([], 'Checksum is required.'));
vi.mocked(aiService.aiService.processLegacyFlyerUpload).mockRejectedValue(
new ValidationError([], 'Checksum is required.'),
);
const response = await supertest(app)
.post('/api/ai/flyers/process')
@@ -625,7 +655,7 @@ describe('AI Routes (/api/ai)', () => {
.attach('flyerImage', imagePath);
expect(response.status).toBe(400);
expect(response.body.message).toBe('Checksum is required.');
expect(response.body.error.message).toBe('Checksum is required.');
// Ensure the uploaded file is cleaned up
expect(unlinkSpy).toHaveBeenCalledTimes(1);
@@ -643,7 +673,7 @@ describe('AI Routes (/api/ai)', () => {
it('should return 200 with a stubbed response on success', async () => {
const response = await supertest(app).post('/api/ai/check-flyer').attach('image', imagePath);
expect(response.status).toBe(200);
expect(response.body.is_flyer).toBe(true);
expect(response.body.data.is_flyer).toBe(true);
});
it('should return 500 on a generic error', async () => {
@@ -674,7 +704,7 @@ describe('AI Routes (/api/ai)', () => {
.attach('image', imagePath)
.field('extractionType', 'store_name'); // Missing cropArea
expect(response.status).toBe(400);
expect(response.body.errors[0].message).toMatch(
expect(response.body.error.details[0].message).toMatch(
/cropArea must be a valid JSON string|Required/i,
);
});
@@ -700,7 +730,7 @@ describe('AI Routes (/api/ai)', () => {
.post('/api/ai/extract-address')
.attach('image', imagePath);
expect(response.status).toBe(200);
expect(response.body.address).toBe('not identified');
expect(response.body.data.address).toBe('not identified');
});
it('should return 500 on a generic error', async () => {
@@ -728,7 +758,7 @@ describe('AI Routes (/api/ai)', () => {
.post('/api/ai/extract-logo')
.attach('images', imagePath);
expect(response.status).toBe(200);
expect(response.body.store_logo_base_64).toBeNull();
expect(response.body.data.store_logo_base_64).toBeNull();
});
it('should return 500 on a generic error', async () => {
@@ -750,7 +780,11 @@ describe('AI Routes (/api/ai)', () => {
const mockUser = createMockUserProfile({
user: { user_id: 'user-123', email: 'user-123@test.com' },
});
const authenticatedApp = createTestApp({ router: aiRouter, basePath: '/api/ai', authenticatedUser: mockUser });
const authenticatedApp = createTestApp({
router: aiRouter,
basePath: '/api/ai',
authenticatedUser: mockUser,
});
beforeEach(() => {
// Inject an authenticated user for this test block
@@ -771,7 +805,7 @@ describe('AI Routes (/api/ai)', () => {
.attach('image', imagePath);
// Use the authenticatedApp instance for requests in this block
expect(response.status).toBe(200);
expect(response.body).toEqual(mockResult);
expect(response.body.data).toEqual(mockResult);
expect(aiService.aiService.extractTextFromImageArea).toHaveBeenCalled();
});
@@ -788,27 +822,20 @@ describe('AI Routes (/api/ai)', () => {
expect(response.status).toBe(500);
// The error message might be wrapped or formatted differently
expect(response.body.message).toMatch(/AI API is down/i);
expect(response.body.error.message).toMatch(/AI API is down/i);
});
});
describe('when user is authenticated', () => {
const mockUserProfile = createMockUserProfile({
user: { user_id: 'user-123', email: 'user-123@test.com' },
});
const authenticatedApp = createTestApp({ router: aiRouter, basePath: '/api/ai', authenticatedUser: mockUserProfile });
// Note: authenticatedApp is available from the describe block above if needed
beforeEach(() => {
// The authenticatedApp instance is already set up with mockUserProfile
});
it('POST /quick-insights should return the stubbed response', async () => {
const response = await supertest(app)
.post('/api/ai/quick-insights')
.send({ items: [{ name: 'test' }] });
expect(response.status).toBe(200);
expect(response.body.text).toContain('server-generated quick insight');
expect(response.body.data.text).toContain('server-generated quick insight');
});
it('POST /quick-insights should accept items with "item" property instead of "name"', async () => {
@@ -835,20 +862,20 @@ describe('AI Routes (/api/ai)', () => {
.post('/api/ai/deep-dive')
.send({ items: [{ name: 'test' }] });
expect(response.status).toBe(200);
expect(response.body.text).toContain('server-generated deep dive');
expect(response.body.data.text).toContain('server-generated deep dive');
});
it('POST /generate-image should return 501 Not Implemented', async () => {
const response = await supertest(app).post('/api/ai/generate-image').send({ prompt: 'test' });
expect(response.status).toBe(501);
expect(response.body.message).toBe('Image generation is not yet implemented.');
expect(response.body.error.message).toBe('Image generation is not yet implemented.');
});
it('POST /generate-speech should return 501 Not Implemented', async () => {
const response = await supertest(app).post('/api/ai/generate-speech').send({ text: 'test' });
expect(response.status).toBe(501);
expect(response.body.message).toBe('Speech generation is not yet implemented.');
expect(response.body.error.message).toBe('Speech generation is not yet implemented.');
});
it('POST /search-web should return the stubbed response', async () => {
@@ -857,7 +884,7 @@ describe('AI Routes (/api/ai)', () => {
.send({ query: 'test query' });
expect(response.status).toBe(200);
expect(response.body.text).toContain('The web says this is good');
expect(response.body.data.text).toContain('The web says this is good');
});
it('POST /compare-prices should return the stubbed response', async () => {
@@ -866,7 +893,7 @@ describe('AI Routes (/api/ai)', () => {
.send({ items: [{ name: 'Milk' }] });
expect(response.status).toBe(200);
expect(response.body.text).toContain('server-generated price comparison');
expect(response.body.data.text).toContain('server-generated price comparison');
});
it('POST /plan-trip should return result on success', async () => {
@@ -882,7 +909,7 @@ describe('AI Routes (/api/ai)', () => {
});
expect(response.status).toBe(200);
expect(response.body).toEqual(mockResult);
expect(response.body.data).toEqual(mockResult);
});
it('POST /plan-trip should return 500 if the AI service fails', async () => {
@@ -899,7 +926,7 @@ describe('AI Routes (/api/ai)', () => {
});
expect(response.status).toBe(500);
expect(response.body.message).toBe('Maps API key invalid');
expect(response.body.error.message).toBe('Maps API key invalid');
});
it('POST /deep-dive should return 500 on a generic error', async () => {
@@ -910,7 +937,7 @@ describe('AI Routes (/api/ai)', () => {
.post('/api/ai/deep-dive')
.send({ items: [{ name: 'test' }] });
expect(response.status).toBe(500);
expect(response.body.message).toBe('Deep dive logging failed');
expect(response.body.error.message).toBe('Deep dive logging failed');
});
it('POST /search-web should return 500 on a generic error', async () => {
@@ -921,7 +948,7 @@ describe('AI Routes (/api/ai)', () => {
.post('/api/ai/search-web')
.send({ query: 'test query' });
expect(response.status).toBe(500);
expect(response.body.message).toBe('Search web logging failed');
expect(response.body.error.message).toBe('Search web logging failed');
});
it('POST /compare-prices should return 500 on a generic error', async () => {
@@ -932,7 +959,7 @@ describe('AI Routes (/api/ai)', () => {
.post('/api/ai/compare-prices')
.send({ items: [{ name: 'Milk' }] });
expect(response.status).toBe(500);
expect(response.body.message).toBe('Compare prices logging failed');
expect(response.body.error.message).toBe('Compare prices logging failed');
});
it('POST /quick-insights should return 400 if items are missing', async () => {

View File

@@ -9,10 +9,7 @@ import { optionalAuth } from './passport.routes';
// All route handlers now use req.log (request-scoped logger) as per ADR-004
import { aiService, DuplicateFlyerError } from '../services/aiService.server';
// All route handlers now use req.log (request-scoped logger) as per ADR-004
import {
createUploadMiddleware,
handleMulterError,
} from '../middleware/multer.middleware';
import { createUploadMiddleware, handleMulterError } from '../middleware/multer.middleware';
import { logger } from '../services/logger.server'; // Needed for module-level logging (e.g., Zod schema transforms)
// All route handlers now use req.log (request-scoped logger) as per ADR-004
import { UserProfile } from '../types'; // This was a duplicate, fixed.
@@ -26,6 +23,7 @@ import { cleanupUploadedFile, cleanupUploadedFiles } from '../utils/fileUtils';
import { monitoringService } from '../services/monitoringService.server';
// All route handlers now use req.log (request-scoped logger) as per ADR-004
import { aiUploadLimiter, aiGenerationLimiter } from '../config/rateLimiters';
import { sendSuccess, sendError, ErrorCode } from '../utils/apiResponse';
const router = Router();
@@ -35,7 +33,8 @@ const uploadAndProcessSchema = z.object({
body: z.object({
// Stricter validation for SHA-256 checksum. It must be a 64-character hexadecimal string.
checksum: requiredString('File checksum is required.').pipe(
z.string()
z
.string()
.length(64, 'Checksum must be 64 characters long.')
.regex(/^[a-f0-9]+$/, 'Checksum must be a valid hexadecimal string.'),
),
@@ -96,8 +95,14 @@ const flyerItemForAnalysisSchema = z
// Sanitize item and name by trimming whitespace.
// The transform ensures that null/undefined values are preserved
// while trimming any actual string values.
item: z.string().nullish().transform(val => (val ? val.trim() : val)),
name: z.string().nullish().transform(val => (val ? val.trim() : val)),
item: z
.string()
.nullish()
.transform((val) => (val ? val.trim() : val)),
name: z
.string()
.nullish()
.transform((val) => (val ? val.trim() : val)),
})
// Using .passthrough() allows extra properties on the item object.
// If the intent is to strictly enforce only 'item' and 'name' (and other known properties),
@@ -190,7 +195,12 @@ router.post(
const { body } = uploadAndProcessSchema.parse({ body: req.body });
if (!req.file) {
return res.status(400).json({ message: 'A flyer file (PDF or image) is required.' });
return sendError(
res,
ErrorCode.BAD_REQUEST,
'A flyer file (PDF or image) is required.',
400,
);
}
req.log.debug(
@@ -204,7 +214,7 @@ router.post(
if (process.env.NODE_ENV === 'test' && !req.headers['authorization']) {
userProfile = undefined;
}
const job = await aiService.enqueueFlyerProcessing(
req.file,
body.checksum,
@@ -215,15 +225,19 @@ router.post(
);
// Respond immediately to the client with 202 Accepted
res.status(202).json({
message: 'Flyer accepted for processing.',
jobId: job.id,
});
sendSuccess(
res,
{
message: 'Flyer accepted for processing.',
jobId: job.id,
},
202,
);
} catch (error) {
await cleanupUploadedFile(req.file);
if (error instanceof DuplicateFlyerError) {
req.log.warn(`Duplicate flyer upload attempt blocked for checksum: ${req.body?.checksum}`);
return res.status(409).json({ message: error.message, flyerId: error.flyerId });
return sendError(res, ErrorCode.CONFLICT, error.message, 409, { flyerId: error.flyerId });
}
next(error);
}
@@ -234,6 +248,9 @@ router.post(
* POST /api/ai/upload-legacy - Process a flyer upload from a legacy client.
* This is an authenticated route that processes the flyer synchronously.
* This is used for integration testing the legacy upload flow.
*
* @deprecated Use POST /api/ai/upload-and-process instead for async queue-based processing (ADR-0006).
* This synchronous endpoint is retained only for integration testing purposes.
*/
router.post(
'/upload-legacy',
@@ -243,16 +260,21 @@ router.post(
async (req: Request, res: Response, next: NextFunction) => {
try {
if (!req.file) {
return res.status(400).json({ message: 'No flyer file uploaded.' });
return sendError(res, ErrorCode.BAD_REQUEST, 'No flyer file uploaded.', 400);
}
const userProfile = req.user as UserProfile;
const newFlyer = await aiService.processLegacyFlyerUpload(req.file, req.body, userProfile, req.log);
res.status(200).json(newFlyer);
const newFlyer = await aiService.processLegacyFlyerUpload(
req.file,
req.body,
userProfile,
req.log,
);
sendSuccess(res, newFlyer);
} catch (error) {
await cleanupUploadedFile(req.file);
if (error instanceof DuplicateFlyerError) {
req.log.warn(`Duplicate legacy flyer upload attempt blocked.`);
return res.status(409).json({ message: error.message, flyerId: error.flyerId });
return sendError(res, ErrorCode.CONFLICT, error.message, 409, { flyerId: error.flyerId });
}
next(error);
}
@@ -274,7 +296,7 @@ router.get(
try {
const jobStatus = await monitoringService.getFlyerJobStatus(jobId); // This was a duplicate, fixed.
req.log.debug(`[API /ai/jobs] Status check for job ${jobId}: ${jobStatus.state}`);
res.json(jobStatus);
sendSuccess(res, jobStatus);
} catch (error) {
next(error);
}
@@ -282,9 +304,12 @@ router.get(
);
/**
* This endpoint saves the processed flyer data to the database. It is the final step
* in the flyer upload workflow after the AI has extracted the data.
* POST /api/ai/flyers/process - Saves the processed flyer data to the database.
* This is the final step in the flyer upload workflow after the AI has extracted the data.
* It uses `optionalAuth` to handle submissions from both anonymous and authenticated users.
*
* @deprecated Use POST /api/ai/upload-and-process instead for async queue-based processing (ADR-0006).
* This synchronous endpoint processes flyers inline and should be migrated to the queue-based approach.
*/
router.post(
'/flyers/process',
@@ -294,7 +319,7 @@ router.post(
async (req, res, next: NextFunction) => {
try {
if (!req.file) {
return res.status(400).json({ message: 'Flyer image file is required.' });
return sendError(res, ErrorCode.BAD_REQUEST, 'Flyer image file is required.', 400);
}
const userProfile = req.user as UserProfile | undefined;
@@ -306,12 +331,16 @@ router.post(
req.log,
);
res.status(201).json({ message: 'Flyer processed and saved successfully.', flyer: newFlyer });
sendSuccess(
res,
{ message: 'Flyer processed and saved successfully.', flyer: newFlyer },
201,
);
} catch (error) {
await cleanupUploadedFile(req.file);
if (error instanceof DuplicateFlyerError) {
req.log.warn(`Duplicate flyer upload attempt blocked.`);
return res.status(409).json({ message: error.message, flyerId: error.flyerId });
return sendError(res, ErrorCode.CONFLICT, error.message, 409, { flyerId: error.flyerId });
}
next(error);
}
@@ -330,10 +359,10 @@ router.post(
async (req, res, next: NextFunction) => {
try {
if (!req.file) {
return res.status(400).json({ message: 'Image file is required.' });
return sendError(res, ErrorCode.BAD_REQUEST, 'Image file is required.', 400);
}
req.log.info(`Server-side flyer check for file: ${req.file.originalname}`);
res.status(200).json({ is_flyer: true }); // Stubbed response
sendSuccess(res, { is_flyer: true }); // Stubbed response
} catch (error) {
next(error);
} finally {
@@ -350,10 +379,10 @@ router.post(
async (req, res, next: NextFunction) => {
try {
if (!req.file) {
return res.status(400).json({ message: 'Image file is required.' });
return sendError(res, ErrorCode.BAD_REQUEST, 'Image file is required.', 400);
}
req.log.info(`Server-side address extraction for file: ${req.file.originalname}`);
res.status(200).json({ address: 'not identified' }); // Updated stubbed response
sendSuccess(res, { address: 'not identified' }); // Updated stubbed response
} catch (error) {
next(error);
} finally {
@@ -370,10 +399,10 @@ router.post(
async (req, res, next: NextFunction) => {
try {
if (!req.files || !Array.isArray(req.files) || req.files.length === 0) {
return res.status(400).json({ message: 'Image files are required.' });
return sendError(res, ErrorCode.BAD_REQUEST, 'Image files are required.', 400);
}
req.log.info(`Server-side logo extraction for ${req.files.length} image(s).`);
res.status(200).json({ store_logo_base_64: null }); // Stubbed response
sendSuccess(res, { store_logo_base_64: null }); // Stubbed response
} catch (error) {
next(error);
} finally {
@@ -390,9 +419,7 @@ router.post(
async (req, res, next: NextFunction) => {
try {
req.log.info(`Server-side quick insights requested.`);
res
.status(200)
.json({ text: 'This is a server-generated quick insight: buy the cheap stuff!' }); // Stubbed response
sendSuccess(res, { text: 'This is a server-generated quick insight: buy the cheap stuff!' }); // Stubbed response
} catch (error) {
next(error);
}
@@ -407,9 +434,9 @@ router.post(
async (req, res, next: NextFunction) => {
try {
req.log.info(`Server-side deep dive requested.`);
res
.status(200)
.json({ text: 'This is a server-generated deep dive analysis. It is very detailed.' }); // Stubbed response
sendSuccess(res, {
text: 'This is a server-generated deep dive analysis. It is very detailed.',
}); // Stubbed response
} catch (error) {
next(error);
}
@@ -424,7 +451,7 @@ router.post(
async (req, res, next: NextFunction) => {
try {
req.log.info(`Server-side web search requested.`);
res.status(200).json({ text: 'The web says this is good.', sources: [] }); // Stubbed response
sendSuccess(res, { text: 'The web says this is good.', sources: [] }); // Stubbed response
} catch (error) {
next(error);
}
@@ -440,7 +467,7 @@ router.post(
try {
const { items } = req.body;
req.log.info(`Server-side price comparison requested for ${items.length} items.`);
res.status(200).json({
sendSuccess(res, {
text: 'This is a server-generated price comparison. Milk is cheaper at SuperMart.',
sources: [],
}); // Stubbed response
@@ -460,7 +487,7 @@ router.post(
const { items, store, userLocation } = req.body;
req.log.debug({ itemCount: items.length, storeName: store.name }, 'Trip planning requested.');
const result = await aiService.planTripWithMaps(items, store, userLocation);
res.status(200).json(result);
sendSuccess(res, result);
} catch (error) {
req.log.error({ error: errMsg(error) }, 'Error in /api/ai/plan-trip endpoint:');
next(error);
@@ -479,7 +506,7 @@ router.post(
// This endpoint is a placeholder for a future feature.
// Returning 501 Not Implemented is the correct HTTP response for this case.
req.log.info('Request received for unimplemented endpoint: /api/ai/generate-image');
res.status(501).json({ message: 'Image generation is not yet implemented.' });
sendError(res, ErrorCode.NOT_IMPLEMENTED, 'Image generation is not yet implemented.', 501);
},
);
@@ -492,7 +519,7 @@ router.post(
// This endpoint is a placeholder for a future feature.
// Returning 501 Not Implemented is the correct HTTP response for this case.
req.log.info('Request received for unimplemented endpoint: /api/ai/generate-speech');
res.status(501).json({ message: 'Speech generation is not yet implemented.' });
sendError(res, ErrorCode.NOT_IMPLEMENTED, 'Speech generation is not yet implemented.', 501);
},
);
@@ -509,7 +536,7 @@ router.post(
async (req, res, next: NextFunction) => {
try {
if (!req.file) {
return res.status(400).json({ message: 'Image file is required.' });
return sendError(res, ErrorCode.BAD_REQUEST, 'Image file is required.', 400);
}
// validateRequest transforms the cropArea JSON string into an object in req.body.
// So we use it directly instead of JSON.parse().
@@ -530,7 +557,7 @@ router.post(
req.log,
);
res.status(200).json(result);
sendSuccess(res, result);
} catch (error) {
next(error);
} finally {

View File

@@ -137,9 +137,9 @@ describe('Auth Routes (/api/auth)', () => {
});
// Assert
expect(response.status).toBe(201);
expect(response.body.message).toBe('User registered successfully!');
expect(response.body.userprofile.user.email).toBe(newUserEmail);
expect(response.body.token).toBeTypeOf('string'); // This was a duplicate, fixed.
expect(response.body.data.message).toBe('User registered successfully!');
expect(response.body.data.userprofile.user.email).toBe(newUserEmail);
expect(response.body.data.token).toBeTypeOf('string'); // This was a duplicate, fixed.
expect(mockedAuthService.registerAndLoginUser).toHaveBeenCalledWith(
newUserEmail,
strongPassword,
@@ -171,7 +171,7 @@ describe('Auth Routes (/api/auth)', () => {
// Assert
expect(response.status).toBe(201);
expect(response.body.message).toBe('User registered successfully!');
expect(response.body.data.message).toBe('User registered successfully!');
expect(mockedAuthService.registerAndLoginUser).toHaveBeenCalledWith(
email,
strongPassword,
@@ -242,7 +242,7 @@ describe('Auth Routes (/api/auth)', () => {
interface ZodError {
message: string;
}
const errorMessages = response.body.errors?.map((e: ZodError) => e.message).join(' ');
const errorMessages = response.body.error.details?.map((e: ZodError) => e.message).join(' ');
expect(errorMessages).toMatch(/Password is too weak/i);
});
@@ -260,7 +260,7 @@ describe('Auth Routes (/api/auth)', () => {
.send({ email: newUserEmail, password: strongPassword });
expect(response.status).toBe(409); // 409 Conflict
expect(response.body.message).toBe('User with that email already exists.');
expect(response.body.error.message).toBe('User with that email already exists.');
});
it('should return 500 if a generic database error occurs during registration', async () => {
@@ -272,7 +272,7 @@ describe('Auth Routes (/api/auth)', () => {
.send({ email: 'fail@test.com', password: strongPassword });
expect(response.status).toBe(500);
expect(response.body.message).toBe('DB connection lost'); // The errorHandler will forward the message
expect(response.body.error.message).toBe('DB connection lost'); // The errorHandler will forward the message
});
it('should return 400 for an invalid email format', async () => {
@@ -281,7 +281,7 @@ describe('Auth Routes (/api/auth)', () => {
.send({ email: 'not-an-email', password: strongPassword });
expect(response.status).toBe(400);
expect(response.body.errors[0].message).toBe('A valid email is required.');
expect(response.body.error.details[0].message).toBe('A valid email is required.');
});
it('should return 400 for a password that is too short', async () => {
@@ -290,7 +290,9 @@ describe('Auth Routes (/api/auth)', () => {
.send({ email: newUserEmail, password: 'short' });
expect(response.status).toBe(400);
expect(response.body.errors[0].message).toBe('Password must be at least 8 characters long.');
expect(response.body.error.details[0].message).toBe(
'Password must be at least 8 characters long.',
);
});
});
@@ -309,7 +311,7 @@ describe('Auth Routes (/api/auth)', () => {
// Assert
expect(response.status).toBe(200);
// The API now returns a nested UserProfile object
expect(response.body.userprofile).toEqual(
expect(response.body.data.userprofile).toEqual(
expect.objectContaining({
user: expect.objectContaining({
user_id: 'user-123',
@@ -317,7 +319,7 @@ describe('Auth Routes (/api/auth)', () => {
}),
}),
);
expect(response.body.token).toBeTypeOf('string');
expect(response.body.data.token).toBeTypeOf('string');
expect(response.headers['set-cookie']).toBeDefined();
});
@@ -327,7 +329,7 @@ describe('Auth Routes (/api/auth)', () => {
.send({ email: 'test@test.com', password: 'wrong_password' });
expect(response.status).toBe(401);
expect(response.body.message).toBe('Incorrect email or password.');
expect(response.body.error.message).toBe('Incorrect email or password.');
});
it('should reject login for a locked account', async () => {
@@ -336,7 +338,7 @@ describe('Auth Routes (/api/auth)', () => {
.send({ email: 'locked@test.com', password: 'password123' });
expect(response.status).toBe(401);
expect(response.body.message).toBe(
expect(response.body.error.message).toBe(
'Account is temporarily locked. Please try again in 15 minutes.',
);
});
@@ -371,7 +373,7 @@ describe('Auth Routes (/api/auth)', () => {
.send({ email: 'dberror@test.com', password: 'any_password' });
expect(response.status).toBe(500);
expect(response.body.message).toBe('Database connection failed');
expect(response.body.error.message).toBe('Database connection failed');
});
it('should log a warning when passport authentication fails without a user', async () => {
@@ -414,7 +416,7 @@ describe('Auth Routes (/api/auth)', () => {
.send({ email: 'not-an-email', password: 'password123' });
expect(response.status).toBe(400);
expect(response.body.errors[0].message).toBe('A valid email is required.');
expect(response.body.error.details[0].message).toBe('A valid email is required.');
});
it('should return 400 if password is missing', async () => {
@@ -423,7 +425,7 @@ describe('Auth Routes (/api/auth)', () => {
.send({ email: 'test@test.com' });
expect(response.status).toBe(400);
expect(response.body.errors[0].message).toBe('Password is required.');
expect(response.body.error.details[0].message).toBe('Password is required.');
});
});
@@ -439,8 +441,8 @@ describe('Auth Routes (/api/auth)', () => {
// Assert
expect(response.status).toBe(200);
expect(response.body.message).toContain('a password reset link has been sent'); // This was a duplicate, fixed.
expect(response.body.token).toBeTypeOf('string');
expect(response.body.data.message).toContain('a password reset link has been sent'); // This was a duplicate, fixed.
expect(response.body.data.token).toBeTypeOf('string');
});
it('should return a generic success message even if the user does not exist', async () => {
@@ -451,7 +453,7 @@ describe('Auth Routes (/api/auth)', () => {
.send({ email: 'nouser@test.com' });
expect(response.status).toBe(200);
expect(response.body.message).toContain('a password reset link has been sent');
expect(response.body.data.message).toContain('a password reset link has been sent');
});
it('should return 500 if the database call fails', async () => {
@@ -469,7 +471,7 @@ describe('Auth Routes (/api/auth)', () => {
.send({ email: 'invalid-email' });
expect(response.status).toBe(400);
expect(response.body.errors[0].message).toBe('A valid email is required.');
expect(response.body.error.details[0].message).toBe('A valid email is required.');
});
});
@@ -482,7 +484,7 @@ describe('Auth Routes (/api/auth)', () => {
.send({ token: 'valid-token', newPassword: 'a-Very-Strong-Password-789!' });
expect(response.status).toBe(200);
expect(response.body.message).toBe('Password has been reset successfully.');
expect(response.body.data.message).toBe('Password has been reset successfully.');
});
it('should reject with an invalid or expired token', async () => {
@@ -493,7 +495,7 @@ describe('Auth Routes (/api/auth)', () => {
.send({ token: 'invalid-token', newPassword: 'a-Very-Strong-Password-123!' }); // Use strong password to pass validation
expect(response.status).toBe(400);
expect(response.body.message).toBe('Invalid or expired password reset token.');
expect(response.body.error.message).toBe('Invalid or expired password reset token.');
});
it('should return 400 for a weak new password', async () => {
@@ -511,7 +513,7 @@ describe('Auth Routes (/api/auth)', () => {
.send({ newPassword: 'a-Very-Strong-Password-789!' });
expect(response.status).toBe(400);
expect(response.body.errors[0].message).toMatch(/Token is required|Required/i);
expect(response.body.error.details[0].message).toMatch(/Token is required|Required/i);
});
});
@@ -524,13 +526,13 @@ describe('Auth Routes (/api/auth)', () => {
.set('Cookie', 'refreshToken=valid-refresh-token');
expect(response.status).toBe(200);
expect(response.body.token).toBeTypeOf('string');
expect(response.body.data.token).toBeTypeOf('string');
});
it('should return 401 if no refresh token cookie is provided', async () => {
const response = await supertest(app).post('/api/auth/refresh-token');
expect(response.status).toBe(401);
expect(response.body.message).toBe('Refresh token not found.');
expect(response.body.error.message).toBe('Refresh token not found.');
});
it('should return 403 if refresh token is invalid', async () => {
@@ -552,7 +554,7 @@ describe('Auth Routes (/api/auth)', () => {
.post('/api/auth/refresh-token')
.set('Cookie', 'refreshToken=any-token');
expect(response.status).toBe(500);
expect(response.body.message).toMatch(/DB Error/);
expect(response.body.error.message).toMatch(/DB Error/);
});
});
@@ -568,7 +570,7 @@ describe('Auth Routes (/api/auth)', () => {
// Assert
expect(response.status).toBe(200);
expect(response.body.message).toBe('Logged out successfully.');
expect(response.body.data.message).toBe('Logged out successfully.');
// Check that the 'set-cookie' header is trying to expire the cookie
const setCookieHeader = response.headers['set-cookie'];
@@ -616,7 +618,7 @@ describe('Auth Routes (/api/auth)', () => {
});
});
describe('Rate Limiting on /forgot-password', () => {
describe('Rate Limiting on /forgot-password', () => {
it('should block requests after exceeding the limit when the opt-in header is sent', async () => {
// Arrange
const email = 'rate-limit-test@example.com';
@@ -658,7 +660,7 @@ describe('Rate Limiting on /forgot-password', () => {
expect(response.status, `Request ${i + 1} should succeed`).toBe(200);
}
});
});
});
describe('Rate Limiting on /reset-password', () => {
it('should block requests after exceeding the limit when the opt-in header is sent', async () => {

View File

@@ -23,6 +23,7 @@ import {
refreshTokenLimiter,
logoutLimiter,
} from '../config/rateLimiters';
import { sendSuccess, sendError, ErrorCode } from '../utils/apiResponse';
// All route handlers now use req.log (request-scoped logger) as per ADR-004
import { authService } from '../services/authService';
@@ -103,13 +104,19 @@ router.post(
secure: process.env.NODE_ENV === 'production',
maxAge: 7 * 24 * 60 * 60 * 1000, // 7 days
});
return res
.status(201)
.json({ message: 'User registered successfully!', userprofile: newUserProfile, token: accessToken });
return sendSuccess(
res,
{
message: 'User registered successfully!',
userprofile: newUserProfile,
token: accessToken,
},
201,
);
} catch (error: unknown) {
if (error instanceof UniqueConstraintError) {
// If the email is a duplicate, return a 409 Conflict status.
return res.status(409).json({ message: error.message });
return sendError(res, ErrorCode.CONFLICT, error.message, 409);
}
req.log.error({ error }, `User registration route failed for email: ${email}.`);
// Pass the error to the centralized handler
@@ -143,13 +150,16 @@ router.post(
return next(err);
}
if (!user) {
return res.status(401).json({ message: info.message || 'Login failed' });
return sendError(res, ErrorCode.UNAUTHORIZED, info.message || 'Login failed', 401);
}
try {
const { rememberMe } = req.body;
const userProfile = user as UserProfile;
const { accessToken, refreshToken } = await authService.handleSuccessfulLogin(userProfile, req.log);
const { accessToken, refreshToken } = await authService.handleSuccessfulLogin(
userProfile,
req.log,
);
req.log.info(`JWT and refresh token issued for user: ${userProfile.user.email}`);
const cookieOptions = {
@@ -160,7 +170,7 @@ router.post(
res.cookie('refreshToken', refreshToken, cookieOptions);
// Return the full user profile object on login to avoid a second fetch on the client.
return res.json({ userprofile: userProfile, token: accessToken });
return sendSuccess(res, { userprofile: userProfile, token: accessToken });
} catch (tokenErr) {
const email = (user as UserProfile)?.user?.email || req.body.email;
req.log.error({ error: tokenErr }, `Failed to process login for user: ${email}`);
@@ -191,7 +201,7 @@ router.post(
message: 'If an account with that email exists, a password reset link has been sent.',
};
if (process.env.NODE_ENV === 'test' && token) responsePayload.token = token;
res.status(200).json(responsePayload);
sendSuccess(res, responsePayload);
} catch (error) {
req.log.error({ error }, `An error occurred during /forgot-password for email: ${email}`);
next(error);
@@ -214,10 +224,15 @@ router.post(
const resetSuccessful = await authService.updatePassword(token, newPassword, req.log);
if (!resetSuccessful) {
return res.status(400).json({ message: 'Invalid or expired password reset token.' });
return sendError(
res,
ErrorCode.BAD_REQUEST,
'Invalid or expired password reset token.',
400,
);
}
res.status(200).json({ message: 'Password has been reset successfully.' });
sendSuccess(res, { message: 'Password has been reset successfully.' });
} catch (error) {
req.log.error({ error }, `An error occurred during password reset.`);
next(error);
@@ -226,23 +241,27 @@ router.post(
);
// New Route to refresh the access token
router.post('/refresh-token', refreshTokenLimiter, async (req: Request, res: Response, next: NextFunction) => {
const { refreshToken } = req.cookies;
if (!refreshToken) {
return res.status(401).json({ message: 'Refresh token not found.' });
}
try {
const result = await authService.refreshAccessToken(refreshToken, req.log);
if (!result) {
return res.status(403).json({ message: 'Invalid or expired refresh token.' });
router.post(
'/refresh-token',
refreshTokenLimiter,
async (req: Request, res: Response, next: NextFunction) => {
const { refreshToken } = req.cookies;
if (!refreshToken) {
return sendError(res, ErrorCode.UNAUTHORIZED, 'Refresh token not found.', 401);
}
res.json({ token: result.accessToken });
} catch (error) {
req.log.error({ error }, 'An error occurred during /refresh-token.');
next(error);
}
});
try {
const result = await authService.refreshAccessToken(refreshToken, req.log);
if (!result) {
return sendError(res, ErrorCode.FORBIDDEN, 'Invalid or expired refresh token.', 403);
}
sendSuccess(res, { token: result.accessToken });
} catch (error) {
req.log.error({ error }, 'An error occurred during /refresh-token.');
next(error);
}
},
);
/**
* POST /api/auth/logout - Logs the user out by invalidating their refresh token.
@@ -264,7 +283,7 @@ router.post('/logout', logoutLimiter, async (req: Request, res: Response) => {
maxAge: 0, // Use maxAge for modern compatibility; Express sets 'Expires' as a fallback.
secure: process.env.NODE_ENV === 'production',
});
res.status(200).json({ message: 'Logged out successfully.' });
sendSuccess(res, { message: 'Logged out successfully.' });
});
// --- OAuth Routes ---

View File

@@ -69,7 +69,11 @@ describe('Budget Routes (/api/budgets)', () => {
vi.mocked(db.budgetRepo.getSpendingByCategory).mockResolvedValue([]);
});
const app = createTestApp({ router: budgetRouter, basePath: '/api/budgets', authenticatedUser: mockUserProfile });
const app = createTestApp({
router: budgetRouter,
basePath: '/api/budgets',
authenticatedUser: mockUserProfile,
});
describe('GET /', () => {
it('should return a list of budgets for the user', async () => {
@@ -80,7 +84,7 @@ describe('Budget Routes (/api/budgets)', () => {
const response = await supertest(app).get('/api/budgets');
expect(response.status).toBe(200);
expect(response.body).toEqual(mockBudgets);
expect(response.body.data).toEqual(mockBudgets);
expect(db.budgetRepo.getBudgetsForUser).toHaveBeenCalledWith(
mockUserProfile.user.user_id,
expectLogger,
@@ -91,7 +95,7 @@ describe('Budget Routes (/api/budgets)', () => {
vi.mocked(db.budgetRepo.getBudgetsForUser).mockRejectedValue(new Error('DB Error'));
const response = await supertest(app).get('/api/budgets');
expect(response.status).toBe(500); // The custom handler will now be used
expect(response.body.message).toBe('DB Error');
expect(response.body.error.message).toBe('DB Error');
});
});
@@ -114,7 +118,7 @@ describe('Budget Routes (/api/budgets)', () => {
const response = await supertest(app).post('/api/budgets').send(newBudgetData);
expect(response.status).toBe(201);
expect(response.body).toEqual(mockCreatedBudget);
expect(response.body.data).toEqual(mockCreatedBudget);
});
it('should return 400 if the user does not exist', async () => {
@@ -129,7 +133,7 @@ describe('Budget Routes (/api/budgets)', () => {
);
const response = await supertest(app).post('/api/budgets').send(newBudgetData);
expect(response.status).toBe(400);
expect(response.body.message).toBe('User not found');
expect(response.body.error.message).toBe('User not found');
});
it('should return 500 if a generic database error occurs', async () => {
@@ -142,7 +146,7 @@ describe('Budget Routes (/api/budgets)', () => {
vi.mocked(db.budgetRepo.createBudget).mockRejectedValue(new Error('DB Error'));
const response = await supertest(app).post('/api/budgets').send(newBudgetData);
expect(response.status).toBe(500);
expect(response.body.message).toBe('DB Error');
expect(response.body.error.message).toBe('DB Error');
});
it('should return 400 for invalid budget data', async () => {
@@ -156,7 +160,7 @@ describe('Budget Routes (/api/budgets)', () => {
const response = await supertest(app).post('/api/budgets').send(invalidData);
expect(response.status).toBe(400);
expect(response.body.errors).toHaveLength(4);
expect(response.body.error.details).toHaveLength(4);
});
it('should return 400 if required fields are missing', async () => {
@@ -165,7 +169,7 @@ describe('Budget Routes (/api/budgets)', () => {
.post('/api/budgets')
.send({ amount_cents: 10000, period: 'monthly', start_date: '2024-01-01' });
expect(response.status).toBe(400);
expect(response.body.errors[0].message).toBe('Budget name is required.');
expect(response.body.error.details[0].message).toBe('Budget name is required.');
});
});
@@ -183,7 +187,7 @@ describe('Budget Routes (/api/budgets)', () => {
const response = await supertest(app).put('/api/budgets/1').send(budgetUpdates);
expect(response.status).toBe(200);
expect(response.body).toEqual(mockUpdatedBudget);
expect(response.body.data).toEqual(mockUpdatedBudget);
});
it('should return 404 if the budget is not found', async () => {
@@ -192,7 +196,7 @@ describe('Budget Routes (/api/budgets)', () => {
);
const response = await supertest(app).put('/api/budgets/999').send({ amount_cents: 1 });
expect(response.status).toBe(404);
expect(response.body.message).toBe('Budget not found');
expect(response.body.error.message).toBe('Budget not found');
});
it('should return 500 if a generic database error occurs', async () => {
@@ -200,13 +204,13 @@ describe('Budget Routes (/api/budgets)', () => {
vi.mocked(db.budgetRepo.updateBudget).mockRejectedValue(new Error('DB Error'));
const response = await supertest(app).put('/api/budgets/1').send(budgetUpdates);
expect(response.status).toBe(500); // The custom handler will now be used
expect(response.body.message).toBe('DB Error');
expect(response.body.error.message).toBe('DB Error');
});
it('should return 400 if no update fields are provided', async () => {
const response = await supertest(app).put('/api/budgets/1').send({});
expect(response.status).toBe(400);
expect(response.body.errors[0].message).toBe(
expect(response.body.error.details[0].message).toBe(
'At least one field to update must be provided.',
);
});
@@ -214,7 +218,7 @@ describe('Budget Routes (/api/budgets)', () => {
it('should return 400 for an invalid budget ID', async () => {
const response = await supertest(app).put('/api/budgets/abc').send({ amount_cents: 5000 });
expect(response.status).toBe(400);
expect(response.body.errors[0].message).toMatch(/Invalid ID|number/i);
expect(response.body.error.details[0].message).toMatch(/Invalid ID|number/i);
});
});
@@ -239,20 +243,20 @@ describe('Budget Routes (/api/budgets)', () => {
);
const response = await supertest(app).delete('/api/budgets/999');
expect(response.status).toBe(404);
expect(response.body.message).toBe('Budget not found');
expect(response.body.error.message).toBe('Budget not found');
});
it('should return 500 if a generic database error occurs', async () => {
vi.mocked(db.budgetRepo.deleteBudget).mockRejectedValue(new Error('DB Error'));
const response = await supertest(app).delete('/api/budgets/1');
expect(response.status).toBe(500); // The custom handler will now be used
expect(response.body.message).toBe('DB Error');
expect(response.body.error.message).toBe('DB Error');
});
it('should return 400 for an invalid budget ID', async () => {
const response = await supertest(app).delete('/api/budgets/abc');
expect(response.status).toBe(400);
expect(response.body.errors[0].message).toMatch(/Invalid ID|number/i);
expect(response.body.error.details[0].message).toMatch(/Invalid ID|number/i);
});
});
@@ -269,7 +273,7 @@ describe('Budget Routes (/api/budgets)', () => {
);
expect(response.status).toBe(200);
expect(response.body).toEqual(mockSpendingData);
expect(response.body.data).toEqual(mockSpendingData);
});
it('should return 500 if the database call fails', async () => {
@@ -281,7 +285,7 @@ describe('Budget Routes (/api/budgets)', () => {
);
expect(response.status).toBe(500);
expect(response.body.message).toBe('DB Error');
expect(response.body.error.message).toBe('DB Error');
});
it('should return 400 for invalid date formats', async () => {
@@ -289,14 +293,14 @@ describe('Budget Routes (/api/budgets)', () => {
'/api/budgets/spending-analysis?startDate=2024/01/01&endDate=invalid',
);
expect(response.status).toBe(400);
expect(response.body.errors).toHaveLength(2);
expect(response.body.error.details).toHaveLength(2);
});
it('should return 400 if required query parameters are missing', async () => {
const response = await supertest(app).get('/api/budgets/spending-analysis');
expect(response.status).toBe(400);
// Expect errors for both startDate and endDate
expect(response.body.errors).toHaveLength(2);
expect(response.body.error.details).toHaveLength(2);
});
});
});

View File

@@ -7,11 +7,15 @@ import type { UserProfile } from '../types';
import { validateRequest } from '../middleware/validation.middleware';
import { requiredString, numericIdParam } from '../utils/zodUtils';
import { budgetUpdateLimiter } from '../config/rateLimiters';
import { sendSuccess, sendNoContent } from '../utils/apiResponse';
const router = express.Router();
// --- Zod Schemas for Budget Routes (as per ADR-003) ---
const budgetIdParamSchema = numericIdParam('id', "Invalid ID for parameter 'id'. Must be a number.");
const budgetIdParamSchema = numericIdParam(
'id',
"Invalid ID for parameter 'id'. Must be a number.",
);
const createBudgetSchema = z.object({
body: z.object({
@@ -48,7 +52,7 @@ router.get('/', async (req: Request, res: Response, next: NextFunction) => {
const userProfile = req.user as UserProfile;
try {
const budgets = await budgetRepo.getBudgetsForUser(userProfile.user.user_id, req.log);
res.json(budgets);
sendSuccess(res, budgets);
} catch (error) {
req.log.error({ error, userId: userProfile.user.user_id }, 'Error fetching budgets');
next(error);
@@ -67,7 +71,7 @@ router.post(
const { body } = req as unknown as CreateBudgetRequest;
try {
const newBudget = await budgetRepo.createBudget(userProfile.user.user_id, body, req.log);
res.status(201).json(newBudget);
sendSuccess(res, newBudget, 201);
} catch (error: unknown) {
req.log.error({ error, userId: userProfile.user.user_id, body }, 'Error creating budget');
next(error);
@@ -92,7 +96,7 @@ router.put(
body,
req.log,
);
res.json(updatedBudget);
sendSuccess(res, updatedBudget);
} catch (error: unknown) {
req.log.error(
{ error, userId: userProfile.user.user_id, budgetId: params.id },
@@ -115,7 +119,7 @@ router.delete(
const { params } = req as unknown as DeleteBudgetRequest;
try {
await budgetRepo.deleteBudget(params.id, userProfile.user.user_id, req.log);
res.status(204).send(); // No Content
sendNoContent(res);
} catch (error: unknown) {
req.log.error(
{ error, userId: userProfile.user.user_id, budgetId: params.id },
@@ -147,7 +151,7 @@ router.get(
endDate,
req.log,
);
res.json(spendingData);
sendSuccess(res, spendingData);
} catch (error) {
req.log.error(
{ error, userId: userProfile.user.user_id, startDate, endDate },

View File

@@ -27,16 +27,14 @@ vi.mock('../services/logger.server', async () => ({
// Mock the passport middleware
vi.mock('./passport.routes', () => ({
default: {
authenticate: vi.fn(
(_strategy, _options) => (req: Request, res: Response, next: NextFunction) => {
// If req.user is not set by the test setup, simulate unauthenticated access.
if (!req.user) {
return res.status(401).json({ message: 'Unauthorized' });
}
// If req.user is set, proceed as an authenticated user.
next();
},
),
authenticate: vi.fn(() => (req: Request, res: Response, next: NextFunction) => {
// If req.user is not set by the test setup, simulate unauthenticated access.
if (!req.user) {
return res.status(401).json({ message: 'Unauthorized' });
}
// If req.user is set, proceed as an authenticated user.
next();
}),
},
}));
@@ -77,7 +75,7 @@ describe('Deals Routes (/api/users/deals)', () => {
);
expect(response.status).toBe(200);
expect(response.body).toEqual(mockDeals);
expect(response.body.data).toEqual(mockDeals);
expect(dealsRepo.findBestPricesForWatchedItems).toHaveBeenCalledWith(
mockUser.user.user_id,
expectLogger,
@@ -96,7 +94,7 @@ describe('Deals Routes (/api/users/deals)', () => {
'/api/users/deals/best-watched-prices',
);
expect(response.status).toBe(500);
expect(response.body.message).toBe('DB Error');
expect(response.body.error.message).toBe('DB Error');
expect(mockLogger.error).toHaveBeenCalledWith(
{ error: dbError },
'Error fetching best watched item deals.',

View File

@@ -6,6 +6,7 @@ import { dealsRepo } from '../services/db/deals.db';
import type { UserProfile } from '../types';
import { validateRequest } from '../middleware/validation.middleware';
import { userReadLimiter } from '../config/rateLimiters';
import { sendSuccess } from '../utils/apiResponse';
const router = express.Router();
@@ -40,7 +41,7 @@ router.get(
req.log,
);
req.log.info({ dealCount: deals.length }, 'Successfully fetched best watched item deals.');
res.status(200).json(deals);
sendSuccess(res, deals);
} catch (error) {
req.log.error({ error }, 'Error fetching best watched item deals.');
next(error); // Pass errors to the global error handler

View File

@@ -49,7 +49,7 @@ describe('Flyer Routes (/api/flyers)', () => {
const response = await supertest(app).get('/api/flyers');
expect(response.status).toBe(200);
expect(response.body).toEqual(mockFlyers);
expect(response.body.data).toEqual(mockFlyers);
// Also assert that the default limit and offset were used.
expect(db.flyerRepo.getFlyers).toHaveBeenCalledWith(expectLogger, 20, 0);
});
@@ -77,7 +77,7 @@ describe('Flyer Routes (/api/flyers)', () => {
vi.mocked(db.flyerRepo.getFlyers).mockRejectedValue(dbError);
const response = await supertest(app).get('/api/flyers');
expect(response.status).toBe(500);
expect(response.body.message).toBe('DB Error');
expect(response.body.error.message).toBe('DB Error');
expect(mockLogger.error).toHaveBeenCalledWith(
{ error: dbError },
'Error fetching flyers in /api/flyers:',
@@ -87,8 +87,8 @@ describe('Flyer Routes (/api/flyers)', () => {
it('should return 400 for invalid query parameters', async () => {
const response = await supertest(app).get('/api/flyers?limit=abc&offset=-5');
expect(response.status).toBe(400);
expect(response.body.errors).toBeDefined();
expect(response.body.errors.length).toBe(2);
expect(response.body.error.details).toBeDefined();
expect(response.body.error.details.length).toBe(2);
});
});
@@ -100,7 +100,7 @@ describe('Flyer Routes (/api/flyers)', () => {
const response = await supertest(app).get('/api/flyers/123');
expect(response.status).toBe(200);
expect(response.body).toEqual(mockFlyer);
expect(response.body.data).toEqual(mockFlyer);
expect(db.flyerRepo.getFlyerById).toHaveBeenCalledWith(123);
});
@@ -114,14 +114,14 @@ describe('Flyer Routes (/api/flyers)', () => {
const response = await supertest(app).get('/api/flyers/999');
expect(response.status).toBe(404);
expect(response.body.message).toContain('not found');
expect(response.body.error.message).toContain('not found');
});
it('should return 400 for an invalid flyer ID', async () => {
const response = await supertest(app).get('/api/flyers/abc');
expect(response.status).toBe(400);
// Zod coercion results in NaN for "abc", which triggers a type error before our custom message
expect(response.body.errors[0].message).toMatch(
expect(response.body.error.details[0].message).toMatch(
/Invalid flyer ID provided|expected number, received NaN/,
);
});
@@ -131,7 +131,7 @@ describe('Flyer Routes (/api/flyers)', () => {
vi.mocked(db.flyerRepo.getFlyerById).mockRejectedValue(dbError);
const response = await supertest(app).get('/api/flyers/123');
expect(response.status).toBe(500);
expect(response.body.message).toBe('DB Error');
expect(response.body.error.message).toBe('DB Error');
expect(mockLogger.error).toHaveBeenCalledWith(
{ error: dbError, flyerId: 123 },
'Error fetching flyer by ID:',
@@ -147,13 +147,13 @@ describe('Flyer Routes (/api/flyers)', () => {
const response = await supertest(app).get('/api/flyers/123/items');
expect(response.status).toBe(200);
expect(response.body).toEqual(mockFlyerItems);
expect(response.body.data).toEqual(mockFlyerItems);
});
it('should return 400 for an invalid flyer ID', async () => {
const response = await supertest(app).get('/api/flyers/abc/items');
expect(response.status).toBe(400);
expect(response.body.errors[0].message).toMatch(
expect(response.body.error.details[0].message).toMatch(
/Invalid flyer ID provided|expected number, received NaN/,
);
});
@@ -163,7 +163,7 @@ describe('Flyer Routes (/api/flyers)', () => {
vi.mocked(db.flyerRepo.getFlyerItems).mockRejectedValue(dbError);
const response = await supertest(app).get('/api/flyers/123/items');
expect(response.status).toBe(500);
expect(response.body.message).toBe('DB Error');
expect(response.body.error.message).toBe('DB Error');
expect(mockLogger.error).toHaveBeenCalledWith(
{ error: dbError, flyerId: 123 },
'Error fetching flyer items in /api/flyers/:id/items:',
@@ -181,7 +181,7 @@ describe('Flyer Routes (/api/flyers)', () => {
.send({ flyerIds: [1, 2] });
expect(response.status).toBe(200);
expect(response.body).toEqual(mockFlyerItems);
expect(response.body.data).toEqual(mockFlyerItems);
});
it('should return 400 if flyerIds is not an array', async () => {
@@ -189,7 +189,7 @@ describe('Flyer Routes (/api/flyers)', () => {
.post('/api/flyers/items/batch-fetch')
.send({ flyerIds: 'not-an-array' });
expect(response.status).toBe(400);
expect(response.body.errors[0].message).toMatch(/expected array/);
expect(response.body.error.details[0].message).toMatch(/expected array/);
});
it('should return 400 if flyerIds is an empty array, as per schema validation', async () => {
@@ -198,7 +198,7 @@ describe('Flyer Routes (/api/flyers)', () => {
.send({ flyerIds: [] });
expect(response.status).toBe(400);
// Check for the specific Zod error message.
expect(response.body.errors[0].message).toBe('flyerIds must be a non-empty array.');
expect(response.body.error.details[0].message).toBe('flyerIds must be a non-empty array.');
});
it('should return 500 if the database call fails', async () => {
@@ -207,7 +207,7 @@ describe('Flyer Routes (/api/flyers)', () => {
.post('/api/flyers/items/batch-fetch')
.send({ flyerIds: [1] });
expect(response.status).toBe(500);
expect(response.body.message).toBe('DB Error');
expect(response.body.error.message).toBe('DB Error');
});
});
@@ -220,7 +220,7 @@ describe('Flyer Routes (/api/flyers)', () => {
.send({ flyerIds: [1, 2, 3] });
expect(response.status).toBe(200);
expect(response.body).toEqual({ count: 42 });
expect(response.body.data).toEqual({ count: 42 });
});
it('should return 400 if flyerIds is not an array', async () => {
@@ -237,7 +237,7 @@ describe('Flyer Routes (/api/flyers)', () => {
.post('/api/flyers/items/batch-count')
.send({ flyerIds: [] });
expect(response.status).toBe(200);
expect(response.body).toEqual({ count: 0 });
expect(response.body.data).toEqual({ count: 0 });
});
it('should return 500 if the database call fails', async () => {
@@ -246,7 +246,7 @@ describe('Flyer Routes (/api/flyers)', () => {
.post('/api/flyers/items/batch-count')
.send({ flyerIds: [1] });
expect(response.status).toBe(500);
expect(response.body.message).toBe('DB Error');
expect(response.body.error.message).toBe('DB Error');
});
});
@@ -317,7 +317,7 @@ describe('Flyer Routes (/api/flyers)', () => {
const response = await supertest(app)
.get('/api/flyers')
.set('X-Test-Rate-Limit-Enable', 'true');
expect(response.status).toBe(200);
expect(response.headers).toHaveProperty('ratelimit-limit');
expect(parseInt(response.headers['ratelimit-limit'])).toBe(100);
@@ -350,7 +350,7 @@ describe('Flyer Routes (/api/flyers)', () => {
it('should apply trackingLimiter to POST /items/:itemId/track', async () => {
// Mock fire-and-forget promise
vi.mocked(db.flyerRepo.trackFlyerItemInteraction).mockResolvedValue(undefined);
const response = await supertest(app)
.post('/api/flyers/items/1/track')
.set('X-Test-Rate-Limit-Enable', 'true')

View File

@@ -4,11 +4,8 @@ import * as db from '../services/db/index.db';
import { z } from 'zod';
import { validateRequest } from '../middleware/validation.middleware';
import { optionalNumeric } from '../utils/zodUtils';
import {
publicReadLimiter,
batchLimiter,
trackingLimiter,
} from '../config/rateLimiters';
import { publicReadLimiter, batchLimiter, trackingLimiter } from '../config/rateLimiters';
import { sendSuccess } from '../utils/apiResponse';
const router = Router();
@@ -53,34 +50,44 @@ const trackItemSchema = z.object({
/**
* GET /api/flyers - Get a paginated list of all flyers.
*/
router.get('/', publicReadLimiter, validateRequest(getFlyersSchema), async (req, res, next): Promise<void> => {
try {
// The `validateRequest` middleware ensures `req.query` is valid.
// We parse it here to apply Zod's coercions (string to number) and defaults.
const { limit, offset } = getFlyersSchema.shape.query.parse(req.query);
router.get(
'/',
publicReadLimiter,
validateRequest(getFlyersSchema),
async (req, res, next): Promise<void> => {
try {
// The `validateRequest` middleware ensures `req.query` is valid.
// We parse it here to apply Zod's coercions (string to number) and defaults.
const { limit, offset } = getFlyersSchema.shape.query.parse(req.query);
const flyers = await db.flyerRepo.getFlyers(req.log, limit, offset);
res.json(flyers);
} catch (error) {
req.log.error({ error }, 'Error fetching flyers in /api/flyers:');
next(error);
}
});
const flyers = await db.flyerRepo.getFlyers(req.log, limit, offset);
sendSuccess(res, flyers);
} catch (error) {
req.log.error({ error }, 'Error fetching flyers in /api/flyers:');
next(error);
}
},
);
/**
* GET /api/flyers/:id - Get a single flyer by its ID.
*/
router.get('/:id', publicReadLimiter, validateRequest(flyerIdParamSchema), async (req, res, next): Promise<void> => {
try {
// Explicitly parse to get the coerced number type for `id`.
const { id } = flyerIdParamSchema.shape.params.parse(req.params);
const flyer = await db.flyerRepo.getFlyerById(id);
res.json(flyer);
} catch (error) {
req.log.error({ error, flyerId: req.params.id }, 'Error fetching flyer by ID:');
next(error);
}
});
router.get(
'/:id',
publicReadLimiter,
validateRequest(flyerIdParamSchema),
async (req, res, next): Promise<void> => {
try {
// Explicitly parse to get the coerced number type for `id`.
const { id } = flyerIdParamSchema.shape.params.parse(req.params);
const flyer = await db.flyerRepo.getFlyerById(id);
sendSuccess(res, flyer);
} catch (error) {
req.log.error({ error, flyerId: req.params.id }, 'Error fetching flyer by ID:');
next(error);
}
},
);
/**
* GET /api/flyers/:id/items - Get all items for a specific flyer.
@@ -90,14 +97,16 @@ router.get(
publicReadLimiter,
validateRequest(flyerIdParamSchema),
async (req, res, next): Promise<void> => {
type GetFlyerByIdRequest = z.infer<typeof flyerIdParamSchema>;
try {
// Explicitly parse to get the coerced number type for `id`.
const { id } = flyerIdParamSchema.shape.params.parse(req.params);
const items = await db.flyerRepo.getFlyerItems(id, req.log);
res.json(items);
sendSuccess(res, items);
} catch (error) {
req.log.error({ error, flyerId: req.params.id }, 'Error fetching flyer items in /api/flyers/:id/items:');
req.log.error(
{ error, flyerId: req.params.id },
'Error fetching flyer items in /api/flyers/:id/items:',
);
next(error);
}
},
@@ -117,7 +126,7 @@ router.post(
// No re-parsing needed here as `validateRequest` has already ensured the body shape,
// and `express.json()` has parsed it. There's no type coercion to apply.
const items = await db.flyerRepo.getFlyerItemsForFlyers(body.flyerIds, req.log);
res.json(items);
sendSuccess(res, items);
} catch (error) {
req.log.error({ error }, 'Error fetching batch flyer items');
next(error);
@@ -139,7 +148,7 @@ router.post(
// The schema ensures flyerIds is an array of numbers.
// The `?? []` was redundant as `validateRequest` would have already caught a missing `flyerIds`.
const count = await db.flyerRepo.countFlyerItemsForFlyers(body.flyerIds, req.log);
res.json({ count });
sendSuccess(res, { count });
} catch (error) {
req.log.error({ error }, 'Error counting batch flyer items');
next(error);
@@ -150,22 +159,27 @@ router.post(
/**
* POST /api/flyers/items/:itemId/track - Tracks a user interaction with a flyer item.
*/
router.post('/items/:itemId/track', trackingLimiter, validateRequest(trackItemSchema), (req, res, next): void => {
try {
// Explicitly parse to get coerced types.
const { params, body } = trackItemSchema.parse({ params: req.params, body: req.body });
router.post(
'/items/:itemId/track',
trackingLimiter,
validateRequest(trackItemSchema),
(req, res, next): void => {
try {
// Explicitly parse to get coerced types.
const { params, body } = trackItemSchema.parse({ params: req.params, body: req.body });
// Fire-and-forget: we don't await the tracking call to avoid delaying the response.
// We add a .catch to log any potential errors without crashing the server process.
db.flyerRepo.trackFlyerItemInteraction(params.itemId, body.type, req.log).catch((error) => {
req.log.error({ error, itemId: params.itemId }, 'Flyer item interaction tracking failed');
});
// Fire-and-forget: we don't await the tracking call to avoid delaying the response.
// We add a .catch to log any potential errors without crashing the server process.
db.flyerRepo.trackFlyerItemInteraction(params.itemId, body.type, req.log).catch((error) => {
req.log.error({ error, itemId: params.itemId }, 'Flyer item interaction tracking failed');
});
res.status(202).send();
} catch (error) {
// This will catch Zod parsing errors if they occur.
next(error);
}
});
sendSuccess(res, { message: 'Tracking accepted' }, 202);
} catch (error) {
// This will catch Zod parsing errors if they occur.
next(error);
}
},
);
export default router;

View File

@@ -8,7 +8,7 @@ import {
createMockUserAchievement,
createMockLeaderboardUser,
} from '../tests/utils/mockFactories';
import { mockLogger } from '../tests/utils/mockLogger';
import '../tests/utils/mockLogger';
import { ForeignKeyConstraintError } from '../services/db/errors.db';
import { createTestApp } from '../tests/utils/createTestApp';
@@ -98,7 +98,7 @@ describe('Gamification Routes (/api/achievements)', () => {
const response = await supertest(unauthenticatedApp).get('/api/achievements');
expect(response.status).toBe(200);
expect(response.body).toEqual(mockAchievements);
expect(response.body.data).toEqual(mockAchievements);
expect(db.gamificationRepo.getAllAchievements).toHaveBeenCalledWith(expectLogger);
});
@@ -108,7 +108,7 @@ describe('Gamification Routes (/api/achievements)', () => {
const response = await supertest(unauthenticatedApp).get('/api/achievements');
expect(response.status).toBe(500);
expect(response.body.message).toBe('DB Connection Failed');
expect(response.body.error.message).toBe('DB Connection Failed');
});
it('should return 400 if awarding an achievement to a non-existent user', async () => {
@@ -125,7 +125,7 @@ describe('Gamification Routes (/api/achievements)', () => {
.post('/api/achievements/award')
.send({ userId: 'non-existent', achievementName: 'Test Award' });
expect(response.status).toBe(400);
expect(response.body.message).toBe('User not found');
expect(response.body.error.message).toBe('User not found');
});
});
@@ -150,7 +150,7 @@ describe('Gamification Routes (/api/achievements)', () => {
const response = await supertest(authenticatedApp).get('/api/achievements/me');
expect(response.status).toBe(200);
expect(response.body).toEqual(mockUserAchievements);
expect(response.body.data).toEqual(mockUserAchievements);
expect(db.gamificationRepo.getUserAchievements).toHaveBeenCalledWith(
'user-123',
expectLogger,
@@ -167,7 +167,7 @@ describe('Gamification Routes (/api/achievements)', () => {
vi.mocked(db.gamificationRepo.getUserAchievements).mockRejectedValue(dbError);
const response = await supertest(authenticatedApp).get('/api/achievements/me');
expect(response.status).toBe(500);
expect(response.body.message).toBe('DB Error');
expect(response.body.error.message).toBe('DB Error');
});
});
@@ -207,7 +207,7 @@ describe('Gamification Routes (/api/achievements)', () => {
const response = await supertest(adminApp).post('/api/achievements/award').send(awardPayload);
expect(response.status).toBe(200);
expect(response.body.message).toContain('Successfully awarded');
expect(response.body.data.message).toContain('Successfully awarded');
expect(db.gamificationRepo.awardAchievement).toHaveBeenCalledTimes(1);
expect(db.gamificationRepo.awardAchievement).toHaveBeenCalledWith(
awardPayload.userId,
@@ -226,7 +226,7 @@ describe('Gamification Routes (/api/achievements)', () => {
const response = await supertest(adminApp).post('/api/achievements/award').send(awardPayload);
expect(response.status).toBe(500);
expect(response.body.message).toBe('DB Error');
expect(response.body.error.message).toBe('DB Error');
});
it('should return 400 for an invalid userId or achievementName', async () => {
@@ -240,7 +240,7 @@ describe('Gamification Routes (/api/achievements)', () => {
.post('/api/achievements/award')
.send({ userId: '', achievementName: '' });
expect(response.status).toBe(400);
expect(response.body.errors).toHaveLength(2);
expect(response.body.error.details).toHaveLength(2);
});
it('should return 400 if userId or achievementName are missing', async () => {
@@ -254,13 +254,13 @@ describe('Gamification Routes (/api/achievements)', () => {
.post('/api/achievements/award')
.send({ achievementName: 'Test Award' });
expect(response1.status).toBe(400);
expect(response1.body.errors[0].message).toBe('userId is required.');
expect(response1.body.error.details[0].message).toBe('userId is required.');
const response2 = await supertest(adminApp)
.post('/api/achievements/award')
.send({ userId: 'user-789' });
expect(response2.status).toBe(400);
expect(response2.body.errors[0].message).toBe('achievementName is required.');
expect(response2.body.error.details[0].message).toBe('achievementName is required.');
});
it('should return 400 if awarding an achievement to a non-existent user', async () => {
@@ -277,7 +277,7 @@ describe('Gamification Routes (/api/achievements)', () => {
.post('/api/achievements/award')
.send({ userId: 'non-existent', achievementName: 'Test Award' });
expect(response.status).toBe(400);
expect(response.body.message).toBe('User not found');
expect(response.body.error.message).toBe('User not found');
});
});
@@ -298,7 +298,7 @@ describe('Gamification Routes (/api/achievements)', () => {
);
expect(response.status).toBe(200);
expect(response.body).toEqual(mockLeaderboard);
expect(response.body.data).toEqual(mockLeaderboard);
expect(db.gamificationRepo.getLeaderboard).toHaveBeenCalledWith(5, expect.anything());
});
@@ -316,7 +316,7 @@ describe('Gamification Routes (/api/achievements)', () => {
const response = await supertest(unauthenticatedApp).get('/api/achievements/leaderboard');
expect(response.status).toBe(200);
expect(response.body).toEqual(mockLeaderboard);
expect(response.body.data).toEqual(mockLeaderboard);
expect(db.gamificationRepo.getLeaderboard).toHaveBeenCalledWith(10, expect.anything());
});
@@ -324,7 +324,7 @@ describe('Gamification Routes (/api/achievements)', () => {
vi.mocked(db.gamificationRepo.getLeaderboard).mockRejectedValue(new Error('DB Error'));
const response = await supertest(unauthenticatedApp).get('/api/achievements/leaderboard');
expect(response.status).toBe(500);
expect(response.body.message).toBe('DB Error');
expect(response.body.error.message).toBe('DB Error');
});
it('should return 400 for an invalid limit parameter', async () => {
@@ -332,8 +332,8 @@ describe('Gamification Routes (/api/achievements)', () => {
'/api/achievements/leaderboard?limit=100',
);
expect(response.status).toBe(400);
expect(response.body.errors).toBeDefined();
expect(response.body.errors[0].message).toMatch(/less than or equal to 50|Too big/i);
expect(response.body.error.details).toBeDefined();
expect(response.body.error.details[0].message).toMatch(/less than or equal to 50|Too big/i);
});
});

View File

@@ -13,11 +13,8 @@ import { validateRequest } from '../middleware/validation.middleware';
// All route handlers now use req.log (request-scoped logger) as per ADR-004
import { requiredString, optionalNumeric } from '../utils/zodUtils';
// All route handlers now use req.log (request-scoped logger) as per ADR-004
import {
publicReadLimiter,
userReadLimiter,
adminTriggerLimiter,
} from '../config/rateLimiters';
import { publicReadLimiter, userReadLimiter, adminTriggerLimiter } from '../config/rateLimiters';
import { sendSuccess } from '../utils/apiResponse';
const router = express.Router();
const adminGamificationRouter = express.Router(); // Create a new router for admin-only routes.
@@ -48,7 +45,7 @@ const awardAchievementSchema = z.object({
router.get('/', publicReadLimiter, async (req, res, next: NextFunction) => {
try {
const achievements = await gamificationService.getAllAchievements(req.log);
res.json(achievements);
sendSuccess(res, achievements);
} catch (error) {
req.log.error({ error }, 'Error fetching all achievements in /api/achievements:');
next(error);
@@ -69,7 +66,7 @@ router.get(
// We parse it here to apply Zod's coercions (string to number) and defaults.
const { limit } = leaderboardQuerySchema.parse(req.query);
const leaderboard = await gamificationService.getLeaderboard(limit!, req.log);
res.json(leaderboard);
sendSuccess(res, leaderboard);
} catch (error) {
req.log.error({ error }, 'Error fetching leaderboard:');
next(error);
@@ -94,7 +91,7 @@ router.get(
userProfile.user.user_id,
req.log,
);
res.json(userAchievements);
sendSuccess(res, userAchievements);
} catch (error) {
req.log.error(
{ error, userId: userProfile.user.user_id },
@@ -124,11 +121,9 @@ adminGamificationRouter.post(
const { body } = req as unknown as AwardAchievementRequest;
try {
await gamificationService.awardAchievement(body.userId, body.achievementName, req.log);
res
.status(200)
.json({
message: `Successfully awarded '${body.achievementName}' to user ${body.userId}.`,
});
sendSuccess(res, {
message: `Successfully awarded '${body.achievementName}' to user ${body.userId}.`,
});
} catch (error) {
next(error);
}

View File

@@ -64,7 +64,7 @@ describe('Health Routes (/api/health)', () => {
// Assert
expect(response.status).toBe(200);
expect(response.text).toBe('pong');
expect(response.body.data.message).toBe('pong');
});
});
@@ -78,10 +78,8 @@ describe('Health Routes (/api/health)', () => {
// Assert: Check for the correct status and response body.
expect(response.status).toBe(200);
expect(response.body).toEqual({
success: true,
message: 'Redis connection is healthy.',
});
expect(response.body.success).toBe(true);
expect(response.body.data.message).toBe('Redis connection is healthy.');
});
it('should return 500 if Redis ping fails', async () => {
@@ -94,7 +92,7 @@ describe('Health Routes (/api/health)', () => {
// Assert
expect(response.status).toBe(500);
expect(response.body.message).toBe('Connection timed out');
expect(response.body.error.message).toBe('Connection timed out');
});
it('should return 500 if Redis ping returns an unexpected response', async () => {
@@ -106,7 +104,7 @@ describe('Health Routes (/api/health)', () => {
// Assert
expect(response.status).toBe(500);
expect(response.body.message).toContain('Unexpected Redis ping response: OK');
expect(response.body.error.message).toContain('Unexpected Redis ping response: OK');
});
});
@@ -122,9 +120,9 @@ describe('Health Routes (/api/health)', () => {
// Assert
expect(response.status).toBe(200);
expect(response.body.currentTime).toBe('2024-03-15T10:30:00.000Z');
expect(response.body.year).toBe(2024);
expect(response.body.week).toBe(11);
expect(response.body.data.currentTime).toBe('2024-03-15T10:30:00.000Z');
expect(response.body.data.year).toBe(2024);
expect(response.body.data.week).toBe(11);
});
});
@@ -139,7 +137,7 @@ describe('Health Routes (/api/health)', () => {
// Assert
expect(response.status).toBe(200);
expect(response.body.success).toBe(true);
expect(response.body.message).toBe('All required database tables exist.');
expect(response.body.data.message).toBe('All required database tables exist.');
});
it('should return 500 if tables are missing', async () => {
@@ -149,7 +147,9 @@ describe('Health Routes (/api/health)', () => {
const response = await supertest(app).get('/api/health/db-schema');
expect(response.status).toBe(500);
expect(response.body.message).toContain('Missing tables: missing_table_1, missing_table_2');
expect(response.body.error.message).toContain(
'Missing tables: missing_table_1, missing_table_2',
);
// The error is passed to next(), so the global error handler would log it, not the route handler itself.
});
@@ -161,10 +161,12 @@ describe('Health Routes (/api/health)', () => {
const response = await supertest(app).get('/api/health/db-schema');
expect(response.status).toBe(500);
expect(response.body.message).toBe('DB connection failed'); // This is the message from the original error
expect(response.body.stack).toBeDefined();
expect(response.body.errorId).toEqual(expect.any(String));
console.log('[DEBUG] health.routes.test.ts: Verifying logger.error for DB schema check failure');
expect(response.body.error.message).toBe('DB connection failed'); // This is the message from the original error
expect(response.body.error.details.stack).toBeDefined();
expect(response.body.meta.requestId).toEqual(expect.any(String));
console.log(
'[DEBUG] health.routes.test.ts: Verifying logger.error for DB schema check failure',
);
expect(mockLogger.error).toHaveBeenCalledWith(
expect.objectContaining({
err: expect.any(Error),
@@ -181,8 +183,8 @@ describe('Health Routes (/api/health)', () => {
const response = await supertest(app).get('/api/health/db-schema');
expect(response.status).toBe(500);
expect(response.body.message).toBe('DB connection failed'); // This is the message from the original error
expect(response.body.errorId).toEqual(expect.any(String));
expect(response.body.error.message).toBe('DB connection failed'); // This is the message from the original error
expect(response.body.meta.requestId).toEqual(expect.any(String));
expect(mockLogger.error).toHaveBeenCalledWith(
expect.objectContaining({
err: expect.objectContaining({ message: 'DB connection failed' }),
@@ -203,7 +205,7 @@ describe('Health Routes (/api/health)', () => {
// Assert
expect(response.status).toBe(200);
expect(response.body.success).toBe(true);
expect(response.body.message).toContain('is accessible and writable');
expect(response.body.data.message).toContain('is accessible and writable');
});
it('should return 500 if storage is not accessible or writable', async () => {
@@ -216,7 +218,7 @@ describe('Health Routes (/api/health)', () => {
// Assert
expect(response.status).toBe(500);
expect(response.body.message).toContain('Storage check failed.');
expect(response.body.error.message).toContain('Storage check failed.');
expect(mockLogger.error).toHaveBeenCalledWith(
expect.objectContaining({
err: expect.any(Error),
@@ -235,7 +237,7 @@ describe('Health Routes (/api/health)', () => {
// Assert
expect(response.status).toBe(500);
expect(response.body.message).toContain('Storage check failed.');
expect(response.body.error.message).toContain('Storage check failed.');
expect(mockLogger.error).toHaveBeenCalledWith(
expect.objectContaining({
err: expect.any(Error),
@@ -260,7 +262,7 @@ describe('Health Routes (/api/health)', () => {
// Assert
expect(response.status).toBe(200);
expect(response.body.success).toBe(true);
expect(response.body.message).toContain('Pool Status: 10 total, 8 idle, 1 waiting.');
expect(response.body.data.message).toContain('Pool Status: 10 total, 8 idle, 1 waiting.');
});
it('should return 500 for an unhealthy pool status', async () => {
@@ -277,8 +279,8 @@ describe('Health Routes (/api/health)', () => {
// Assert
expect(response.status).toBe(500);
expect(response.body.success).toBe(false);
expect(response.body.message).toContain('Pool may be under stress.');
expect(response.body.message).toContain('Pool Status: 20 total, 5 idle, 15 waiting.');
expect(response.body.error.message).toContain('Pool may be under stress.');
expect(response.body.error.message).toContain('Pool Status: 20 total, 5 idle, 15 waiting.');
expect(logger.warn).toHaveBeenCalledWith(
'Database pool health check shows high waiting count: 15',
);
@@ -295,8 +297,8 @@ describe('Health Routes (/api/health)', () => {
const response = await supertest(app).get('/api/health/db-pool');
expect(response.status).toBe(500);
expect(response.body.message).toBe('Pool is not initialized'); // This is the message from the original error
expect(response.body.errorId).toEqual(expect.any(String));
expect(response.body.error.message).toBe('Pool is not initialized'); // This is the message from the original error
expect(response.body.meta.requestId).toEqual(expect.any(String));
expect(mockLogger.error).toHaveBeenCalledWith(
expect.objectContaining({
err: expect.any(Error),
@@ -315,9 +317,9 @@ describe('Health Routes (/api/health)', () => {
const response = await supertest(app).get('/api/health/db-pool');
expect(response.status).toBe(500);
expect(response.body.message).toBe('Pool is not initialized'); // This is the message from the original error
expect(response.body.stack).toBeDefined();
expect(response.body.errorId).toEqual(expect.any(String));
expect(response.body.error.message).toBe('Pool is not initialized'); // This is the message from the original error
expect(response.body.error.details.stack).toBeDefined();
expect(response.body.meta.requestId).toEqual(expect.any(String));
expect(mockLogger.error).toHaveBeenCalledWith(
expect.objectContaining({
err: expect.objectContaining({ message: 'Pool is not initialized' }),
@@ -334,10 +336,12 @@ describe('Health Routes (/api/health)', () => {
const response = await supertest(app).get('/api/health/redis');
expect(response.status).toBe(500);
expect(response.body.message).toBe('Connection timed out');
expect(response.body.stack).toBeDefined();
expect(response.body.errorId).toEqual(expect.any(String));
console.log('[DEBUG] health.routes.test.ts: Checking if logger.error was called with the correct pattern');
expect(response.body.error.message).toBe('Connection timed out');
expect(response.body.error.details.stack).toBeDefined();
expect(response.body.meta.requestId).toEqual(expect.any(String));
console.log(
'[DEBUG] health.routes.test.ts: Checking if logger.error was called with the correct pattern',
);
expect(mockLogger.error).toHaveBeenCalledWith(
expect.objectContaining({
err: expect.any(Error),
@@ -352,9 +356,9 @@ describe('Health Routes (/api/health)', () => {
const response = await supertest(app).get('/api/health/redis');
expect(response.status).toBe(500);
expect(response.body.message).toContain('Unexpected Redis ping response: OK');
expect(response.body.stack).toBeDefined();
expect(response.body.errorId).toEqual(expect.any(String));
expect(response.body.error.message).toContain('Unexpected Redis ping response: OK');
expect(response.body.error.details.stack).toBeDefined();
expect(response.body.meta.requestId).toEqual(expect.any(String));
expect(mockLogger.error).toHaveBeenCalledWith(
expect.objectContaining({
err: expect.any(Error),

View File

@@ -1,21 +1,126 @@
// src/routes/health.routes.ts
// All route handlers now use req.log (request-scoped logger) as per ADR-004
/**
* @file Health check endpoints implementing ADR-020: Health Checks and Liveness/Readiness Probes.
*
* Provides endpoints for:
* - Liveness probe (/live) - Is the server process running?
* - Readiness probe (/ready) - Is the server ready to accept traffic?
* - Individual service health checks (db, redis, storage)
*/
import { Router, Request, Response, NextFunction } from 'express';
// All route handlers now use req.log (request-scoped logger) as per ADR-004
import { z } from 'zod';
// All route handlers now use req.log (request-scoped logger) as per ADR-004
import { checkTablesExist, getPoolStatus } from '../services/db/connection.db';
// Removed: import { logger } from '../services/logger.server';
// All route handlers now use req.log (request-scoped logger) as per ADR-004
import { checkTablesExist, getPoolStatus, getPool } from '../services/db/connection.db';
import { connection as redisConnection } from '../services/queueService.server';
import fs from 'node:fs/promises';
// All route handlers now use req.log (request-scoped logger) as per ADR-004
import { getSimpleWeekAndYear } from '../utils/dateUtils';
// All route handlers now use req.log (request-scoped logger) as per ADR-004
import { validateRequest } from '../middleware/validation.middleware';
import { sendSuccess, sendError, ErrorCode } from '../utils/apiResponse';
const router = Router();
// --- Types for Health Check Response ---
interface ServiceHealth {
status: 'healthy' | 'degraded' | 'unhealthy';
latency?: number;
message?: string;
details?: Record<string, unknown>;
}
interface ReadinessResponse {
status: 'healthy' | 'degraded' | 'unhealthy';
timestamp: string;
uptime: number;
services: {
database: ServiceHealth;
redis: ServiceHealth;
storage: ServiceHealth;
};
}
// --- Helper Functions ---
/**
* Checks database connectivity with timing.
*/
async function checkDatabase(): Promise<ServiceHealth> {
const start = Date.now();
try {
const pool = getPool();
await pool.query('SELECT 1');
const latency = Date.now() - start;
const poolStatus = getPoolStatus();
// Consider degraded if waiting connections > 3
const status = poolStatus.waitingCount > 3 ? 'degraded' : 'healthy';
return {
status,
latency,
details: {
totalConnections: poolStatus.totalCount,
idleConnections: poolStatus.idleCount,
waitingConnections: poolStatus.waitingCount,
},
};
} catch (error) {
return {
status: 'unhealthy',
latency: Date.now() - start,
message: error instanceof Error ? error.message : 'Database connection failed',
};
}
}
/**
* Checks Redis connectivity with timing.
*/
async function checkRedis(): Promise<ServiceHealth> {
const start = Date.now();
try {
const reply = await redisConnection.ping();
const latency = Date.now() - start;
if (reply === 'PONG') {
return { status: 'healthy', latency };
}
return {
status: 'unhealthy',
latency,
message: `Unexpected ping response: ${reply}`,
};
} catch (error) {
return {
status: 'unhealthy',
latency: Date.now() - start,
message: error instanceof Error ? error.message : 'Redis connection failed',
};
}
}
/**
* Checks storage accessibility.
*/
async function checkStorage(): Promise<ServiceHealth> {
const storagePath =
process.env.STORAGE_PATH || '/var/www/flyer-crawler.projectium.com/flyer-images';
const start = Date.now();
try {
await fs.access(storagePath, fs.constants.W_OK);
return {
status: 'healthy',
latency: Date.now() - start,
details: { path: storagePath },
};
} catch {
return {
status: 'unhealthy',
latency: Date.now() - start,
message: `Storage not accessible: ${storagePath}`,
};
}
}
// --- Zod Schemas for Health Routes (as per ADR-003) ---
// These routes do not expect any input, so we define empty schemas
// to maintain a consistent validation pattern across the application.
@@ -25,7 +130,105 @@ const emptySchema = z.object({});
* GET /api/health/ping - A simple endpoint to check if the server is responsive.
*/
router.get('/ping', validateRequest(emptySchema), (_req: Request, res: Response) => {
res.status(200).send('pong');
return sendSuccess(res, { message: 'pong' });
});
// =============================================================================
// KUBERNETES PROBES (ADR-020)
// =============================================================================
/**
* GET /api/health/live - Liveness probe for container orchestration.
*
* Returns 200 OK if the server process is running.
* If this fails, the orchestrator should restart the container.
*
* This endpoint is intentionally simple and has no external dependencies.
* It only checks that the Node.js process can handle HTTP requests.
*/
router.get('/live', validateRequest(emptySchema), (_req: Request, res: Response) => {
return sendSuccess(res, {
status: 'ok',
timestamp: new Date().toISOString(),
});
});
/**
* GET /api/health/ready - Readiness probe for container orchestration.
*
* Returns 200 OK if the server is ready to accept traffic.
* Checks all critical dependencies (database, Redis).
* If this fails, the orchestrator should remove the container from the load balancer.
*
* Response includes detailed status of each service for debugging.
*/
router.get('/ready', validateRequest(emptySchema), async (req: Request, res: Response) => {
// Check all services in parallel for speed
const [database, redis, storage] = await Promise.all([
checkDatabase(),
checkRedis(),
checkStorage(),
]);
// Determine overall status
// - 'healthy' if all critical services (db, redis) are healthy
// - 'degraded' if any service is degraded but none unhealthy
// - 'unhealthy' if any critical service is unhealthy
const criticalServices = [database, redis];
const allServices = [database, redis, storage];
let overallStatus: 'healthy' | 'degraded' | 'unhealthy' = 'healthy';
if (criticalServices.some((s) => s.status === 'unhealthy')) {
overallStatus = 'unhealthy';
} else if (allServices.some((s) => s.status === 'degraded')) {
overallStatus = 'degraded';
}
const response: ReadinessResponse = {
status: overallStatus,
timestamp: new Date().toISOString(),
uptime: process.uptime(),
services: {
database,
redis,
storage,
},
};
// Return appropriate HTTP status code
// 200 = healthy or degraded (can still handle traffic)
// 503 = unhealthy (should not receive traffic)
if (overallStatus === 'unhealthy') {
return sendError(res, ErrorCode.SERVICE_UNAVAILABLE, 'Service unhealthy', 503, response);
}
return sendSuccess(res, response);
});
/**
* GET /api/health/startup - Startup probe for container orchestration.
*
* Similar to readiness but used during container startup.
* The orchestrator will not send liveness/readiness probes until this succeeds.
* This allows for longer initialization times without triggering restarts.
*/
router.get('/startup', validateRequest(emptySchema), async (req: Request, res: Response) => {
// For startup, we only check database connectivity
// Redis and storage can be checked later in readiness
const database = await checkDatabase();
if (database.status === 'unhealthy') {
return sendError(res, ErrorCode.SERVICE_UNAVAILABLE, 'Waiting for database connection', 503, {
status: 'starting',
database,
});
}
return sendSuccess(res, {
status: 'started',
timestamp: new Date().toISOString(),
database,
});
});
/**
@@ -43,13 +246,14 @@ router.get('/db-schema', validateRequest(emptySchema), async (req, res, next: Ne
new Error(`Database schema check failed. Missing tables: ${missingTables.join(', ')}.`),
);
}
return res.status(200).json({ success: true, message: 'All required database tables exist.' });
return sendSuccess(res, { message: 'All required database tables exist.' });
} catch (error: unknown) {
if (error instanceof Error) {
return next(error);
}
const message =
(error as any)?.message || 'An unknown error occurred during DB schema check.';
(error as { message?: string })?.message ||
'An unknown error occurred during DB schema check.';
return next(new Error(message));
}
});
@@ -59,16 +263,14 @@ router.get('/db-schema', validateRequest(emptySchema), async (req, res, next: Ne
* This is important for features like file uploads.
*/
router.get('/storage', validateRequest(emptySchema), async (req, res, next: NextFunction) => {
const storagePath = process.env.STORAGE_PATH || '/var/www/flyer-crawler.projectium.com/flyer-images';
const storagePath =
process.env.STORAGE_PATH || '/var/www/flyer-crawler.projectium.com/flyer-images';
try {
await fs.access(storagePath, fs.constants.W_OK); // Use fs.promises
return res
.status(200)
.json({
success: true,
message: `Storage directory '${storagePath}' is accessible and writable.`,
});
} catch (error: unknown) {
return sendSuccess(res, {
message: `Storage directory '${storagePath}' is accessible and writable.`,
});
} catch {
next(
new Error(
`Storage check failed. Ensure the directory '${storagePath}' exists and is writable by the application.`,
@@ -91,19 +293,24 @@ router.get(
const message = `Pool Status: ${status.totalCount} total, ${status.idleCount} idle, ${status.waitingCount} waiting.`;
if (isHealthy) {
return res.status(200).json({ success: true, message });
return sendSuccess(res, { message, ...status });
} else {
req.log.warn(`Database pool health check shows high waiting count: ${status.waitingCount}`);
return res
.status(500)
.json({ success: false, message: `Pool may be under stress. ${message}` });
return sendError(
res,
ErrorCode.INTERNAL_ERROR,
`Pool may be under stress. ${message}`,
500,
status,
);
}
} catch (error: unknown) {
if (error instanceof Error) {
return next(error);
}
const message =
(error as any)?.message || 'An unknown error occurred during DB pool check.';
(error as { message?: string })?.message ||
'An unknown error occurred during DB pool check.';
return next(new Error(message));
}
},
@@ -116,7 +323,7 @@ router.get(
router.get('/time', validateRequest(emptySchema), (req: Request, res: Response) => {
const now = new Date();
const { year, week } = getSimpleWeekAndYear(now);
res.json({
return sendSuccess(res, {
currentTime: now.toISOString(),
year,
week,
@@ -133,7 +340,7 @@ router.get(
try {
const reply = await redisConnection.ping();
if (reply === 'PONG') {
return res.status(200).json({ success: true, message: 'Redis connection is healthy.' });
return sendSuccess(res, { message: 'Redis connection is healthy.' });
}
throw new Error(`Unexpected Redis ping response: ${reply}`); // This will be caught below
} catch (error: unknown) {
@@ -141,7 +348,8 @@ router.get(
return next(error);
}
const message =
(error as any)?.message || 'An unknown error occurred during Redis health check.';
(error as { message?: string })?.message ||
'An unknown error occurred during Redis health check.';
return next(new Error(message));
}
},

View File

@@ -40,18 +40,22 @@ describe('Personalization Routes (/api/personalization)', () => {
const mockItems = [createMockMasterGroceryItem({ master_grocery_item_id: 1, name: 'Milk' })];
vi.mocked(db.personalizationRepo.getAllMasterItems).mockResolvedValue(mockItems);
const response = await supertest(app).get('/api/personalization/master-items').set('x-test-rate-limit-enable', 'true');
const response = await supertest(app)
.get('/api/personalization/master-items')
.set('x-test-rate-limit-enable', 'true');
expect(response.status).toBe(200);
expect(response.body).toEqual(mockItems);
expect(response.body.data).toEqual(mockItems);
});
it('should return 500 if the database call fails', async () => {
const dbError = new Error('DB Error');
vi.mocked(db.personalizationRepo.getAllMasterItems).mockRejectedValue(dbError);
const response = await supertest(app).get('/api/personalization/master-items').set('x-test-rate-limit-enable', 'true');
const response = await supertest(app)
.get('/api/personalization/master-items')
.set('x-test-rate-limit-enable', 'true');
expect(response.status).toBe(500);
expect(response.body.message).toBe('DB Error');
expect(response.body.error.message).toBe('DB Error');
expect(mockLogger.error).toHaveBeenCalledWith(
{ error: dbError },
'Error fetching master items in /api/personalization/master-items:',
@@ -67,7 +71,7 @@ describe('Personalization Routes (/api/personalization)', () => {
const response = await supertest(app).get('/api/personalization/dietary-restrictions');
expect(response.status).toBe(200);
expect(response.body).toEqual(mockRestrictions);
expect(response.body.data).toEqual(mockRestrictions);
});
it('should return 500 if the database call fails', async () => {
@@ -75,7 +79,7 @@ describe('Personalization Routes (/api/personalization)', () => {
vi.mocked(db.personalizationRepo.getDietaryRestrictions).mockRejectedValue(dbError);
const response = await supertest(app).get('/api/personalization/dietary-restrictions');
expect(response.status).toBe(500);
expect(response.body.message).toBe('DB Error');
expect(response.body.error.message).toBe('DB Error');
expect(mockLogger.error).toHaveBeenCalledWith(
{ error: dbError },
'Error fetching dietary restrictions in /api/personalization/dietary-restrictions:',
@@ -91,7 +95,7 @@ describe('Personalization Routes (/api/personalization)', () => {
const response = await supertest(app).get('/api/personalization/appliances');
expect(response.status).toBe(200);
expect(response.body).toEqual(mockAppliances);
expect(response.body.data).toEqual(mockAppliances);
});
it('should return 500 if the database call fails', async () => {
@@ -99,7 +103,7 @@ describe('Personalization Routes (/api/personalization)', () => {
vi.mocked(db.personalizationRepo.getAppliances).mockRejectedValue(dbError);
const response = await supertest(app).get('/api/personalization/appliances');
expect(response.status).toBe(500);
expect(response.body.message).toBe('DB Error');
expect(response.body.error.message).toBe('DB Error');
expect(mockLogger.error).toHaveBeenCalledWith(
{ error: dbError },
'Error fetching appliances in /api/personalization/appliances:',

View File

@@ -4,6 +4,7 @@ import { z } from 'zod';
import * as db from '../services/db/index.db';
import { validateRequest } from '../middleware/validation.middleware';
import { publicReadLimiter } from '../config/rateLimiters';
import { sendSuccess } from '../utils/apiResponse';
const router = Router();
@@ -28,7 +29,7 @@ router.get(
res.set('Cache-Control', 'public, max-age=3600');
const masterItems = await db.personalizationRepo.getAllMasterItems(req.log);
res.json(masterItems);
sendSuccess(res, masterItems);
} catch (error) {
req.log.error({ error }, 'Error fetching master items in /api/personalization/master-items:');
next(error);
@@ -46,7 +47,7 @@ router.get(
async (req: Request, res: Response, next: NextFunction) => {
try {
const restrictions = await db.personalizationRepo.getDietaryRestrictions(req.log);
res.json(restrictions);
sendSuccess(res, restrictions);
} catch (error) {
req.log.error(
{ error },
@@ -67,7 +68,7 @@ router.get(
async (req: Request, res: Response, next: NextFunction) => {
try {
const appliances = await db.personalizationRepo.getAppliances(req.log);
res.json(appliances);
sendSuccess(res, appliances);
} catch (error) {
req.log.error({ error }, 'Error fetching appliances in /api/personalization/appliances:');
next(error);

View File

@@ -22,16 +22,14 @@ vi.mock('../services/logger.server', async () => ({
// Mock the passport middleware
vi.mock('./passport.routes', () => ({
default: {
authenticate: vi.fn(
(_strategy, _options) => (req: Request, res: Response, next: NextFunction) => {
// If req.user is not set by the test setup, simulate unauthenticated access.
if (!req.user) {
return res.status(401).json({ message: 'Unauthorized' });
}
// If req.user is set, proceed as an authenticated user.
next();
},
),
authenticate: vi.fn(() => (req: Request, res: Response, next: NextFunction) => {
// If req.user is not set by the test setup, simulate unauthenticated access.
if (!req.user) {
return res.status(401).json({ message: 'Unauthorized' });
}
// If req.user is set, proceed as an authenticated user.
next();
}),
},
}));
@@ -41,7 +39,11 @@ import { priceRepo } from '../services/db/price.db';
describe('Price Routes (/api/price-history)', () => {
const mockUser = createMockUserProfile({ user: { user_id: 'price-user-123' } });
const app = createTestApp({ router: priceRouter, basePath: '/api/price-history', authenticatedUser: mockUser });
const app = createTestApp({
router: priceRouter,
basePath: '/api/price-history',
authenticatedUser: mockUser,
});
beforeEach(() => {
vi.clearAllMocks();
});
@@ -59,7 +61,7 @@ describe('Price Routes (/api/price-history)', () => {
.send({ masterItemIds: [1, 2] });
expect(response.status).toBe(200);
expect(response.body).toEqual(mockHistory);
expect(response.body.data).toEqual(mockHistory);
expect(priceRepo.getPriceHistory).toHaveBeenCalledWith([1, 2], expect.any(Object), 1000, 0);
});
@@ -69,12 +71,7 @@ describe('Price Routes (/api/price-history)', () => {
.post('/api/price-history')
.send({ masterItemIds: [1, 2, 3], limit: 50, offset: 10 });
expect(priceRepo.getPriceHistory).toHaveBeenCalledWith(
[1, 2, 3],
expect.any(Object),
50,
10,
);
expect(priceRepo.getPriceHistory).toHaveBeenCalledWith([1, 2, 3], expect.any(Object), 50, 10);
});
it('should log the request info', async () => {
@@ -98,14 +95,14 @@ describe('Price Routes (/api/price-history)', () => {
.send({ masterItemIds: [1, 2, 3] });
expect(response.status).toBe(500);
expect(response.body.message).toBe('Database connection failed');
expect(response.body.error.message).toBe('Database connection failed');
});
it('should return 400 if masterItemIds is an empty array', async () => {
const response = await supertest(app).post('/api/price-history').send({ masterItemIds: [] });
expect(response.status).toBe(400);
expect(response.body.errors[0].message).toBe(
expect(response.body.error.details[0].message).toBe(
'masterItemIds must be a non-empty array of positive integers.',
);
});
@@ -117,7 +114,9 @@ describe('Price Routes (/api/price-history)', () => {
expect(response.status).toBe(400);
// The actual message is "Invalid input: expected array, received string"
expect(response.body.errors[0].message).toBe('Invalid input: expected array, received string');
expect(response.body.error.details[0].message).toBe(
'Invalid input: expected array, received string',
);
});
it('should return 400 if masterItemIds contains non-positive integers', async () => {
@@ -126,7 +125,7 @@ describe('Price Routes (/api/price-history)', () => {
.send({ masterItemIds: [1, -2, 3] });
expect(response.status).toBe(400);
expect(response.body.errors[0].message).toBe('Number must be greater than 0');
expect(response.body.error.details[0].message).toBe('Number must be greater than 0');
});
it('should return 400 if masterItemIds is missing', async () => {
@@ -134,7 +133,9 @@ describe('Price Routes (/api/price-history)', () => {
expect(response.status).toBe(400);
// The actual message is "Invalid input: expected array, received undefined"
expect(response.body.errors[0].message).toBe('Invalid input: expected array, received undefined');
expect(response.body.error.details[0].message).toBe(
'Invalid input: expected array, received undefined',
);
});
it('should return 400 for invalid limit and offset', async () => {
@@ -143,10 +144,12 @@ describe('Price Routes (/api/price-history)', () => {
.send({ masterItemIds: [1], limit: -1, offset: 'abc' });
expect(response.status).toBe(400);
expect(response.body.errors).toHaveLength(2);
expect(response.body.error.details).toHaveLength(2);
// The actual message is "Too small: expected number to be >0"
expect(response.body.errors[0].message).toBe('Too small: expected number to be >0');
expect(response.body.errors[1].message).toBe('Invalid input: expected number, received NaN');
expect(response.body.error.details[0].message).toBe('Too small: expected number to be >0');
expect(response.body.error.details[1].message).toBe(
'Invalid input: expected number, received NaN',
);
});
});

View File

@@ -6,16 +6,15 @@ import { validateRequest } from '../middleware/validation.middleware';
import { priceRepo } from '../services/db/price.db';
import { optionalNumeric } from '../utils/zodUtils';
import { priceHistoryLimiter } from '../config/rateLimiters';
import { sendSuccess } from '../utils/apiResponse';
const router = Router();
const priceHistorySchema = z.object({
body: z.object({
masterItemIds: z
.array(z.number().int().positive('Number must be greater than 0'))
.nonempty({
message: 'masterItemIds must be a non-empty array of positive integers.',
}),
masterItemIds: z.array(z.number().int().positive('Number must be greater than 0')).nonempty({
message: 'masterItemIds must be a non-empty array of positive integers.',
}),
limit: optionalNumeric({ default: 1000, integer: true, positive: true }),
offset: optionalNumeric({ default: 0, integer: true, nonnegative: true }),
}),
@@ -44,7 +43,7 @@ router.post(
);
try {
const priceHistory = await priceRepo.getPriceHistory(masterItemIds, req.log, limit, offset);
res.status(200).json(priceHistory);
sendSuccess(res, priceHistory);
} catch (error) {
next(error);
}

View File

@@ -1,7 +1,9 @@
import { describe, it, expect, vi, beforeEach } from 'vitest';
import supertest from 'supertest';
import type { Request, Response, NextFunction } from 'express';
import { createTestApp } from '../tests/utils/createTestApp';
import { createMockUserProfile } from '../tests/utils/mockFactories';
import type { UserReaction } from '../types';
// 1. Mock the Service Layer directly.
vi.mock('../services/db/index.db', () => ({
@@ -20,15 +22,13 @@ vi.mock('../services/logger.server', async () => ({
// Mock Passport middleware
vi.mock('./passport.routes', () => ({
default: {
authenticate: vi.fn(
() => (req: any, res: any, next: any) => {
// If we are testing the unauthenticated state (no user injected), simulate 401.
if (!req.user) {
return res.status(401).json({ message: 'Unauthorized' });
}
next();
},
),
authenticate: vi.fn(() => (req: Request, res: Response, next: NextFunction) => {
// If we are testing the unauthenticated state (no user injected), simulate 401.
if (!req.user) {
return res.status(401).json({ message: 'Unauthorized' });
}
next();
}),
},
}));
@@ -51,20 +51,24 @@ describe('Reaction Routes (/api/reactions)', () => {
const app = createTestApp({ router: reactionsRouter, basePath: '/api/reactions' });
it('should return a list of reactions', async () => {
const mockReactions = [{ id: 1, reaction_type: 'like', entity_id: '123' }];
vi.mocked(reactionRepo.getReactions).mockResolvedValue(mockReactions as any);
const mockReactions = [
{ reaction_id: 1, reaction_type: 'like', entity_id: '123' },
] as unknown as UserReaction[];
vi.mocked(reactionRepo.getReactions).mockResolvedValue(mockReactions);
const response = await supertest(app).get('/api/reactions');
expect(response.status).toBe(200);
expect(response.body).toEqual(mockReactions);
expect(response.body.data).toEqual(mockReactions);
expect(reactionRepo.getReactions).toHaveBeenCalledWith({}, expectLogger);
});
it('should filter by query parameters', async () => {
const mockReactions = [{ id: 1, reaction_type: 'like' }];
vi.mocked(reactionRepo.getReactions).mockResolvedValue(mockReactions as any);
const mockReactions = [
{ reaction_id: 1, reaction_type: 'like' },
] as unknown as UserReaction[];
vi.mocked(reactionRepo.getReactions).mockResolvedValue(mockReactions);
const validUuid = '123e4567-e89b-12d3-a456-426614174000';
const query = { userId: validUuid, entityType: 'recipe', entityId: '1' };
@@ -73,7 +77,7 @@ describe('Reaction Routes (/api/reactions)', () => {
expect(response.status).toBe(200);
expect(reactionRepo.getReactions).toHaveBeenCalledWith(
expect.objectContaining(query),
expectLogger
expectLogger,
);
});
@@ -84,10 +88,7 @@ describe('Reaction Routes (/api/reactions)', () => {
const response = await supertest(app).get('/api/reactions');
expect(response.status).toBe(500);
expect(mockLogger.error).toHaveBeenCalledWith(
{ error },
'Error fetching user reactions'
);
expect(mockLogger.error).toHaveBeenCalledWith({ error }, 'Error fetching user reactions');
});
});
@@ -95,26 +96,25 @@ describe('Reaction Routes (/api/reactions)', () => {
const app = createTestApp({ router: reactionsRouter, basePath: '/api/reactions' });
it('should return reaction summary for an entity', async () => {
const mockSummary = { like: 10, love: 5 };
vi.mocked(reactionRepo.getReactionSummary).mockResolvedValue(mockSummary as any);
const mockSummary = [
{ reaction_type: 'like', count: 10 },
{ reaction_type: 'love', count: 5 },
];
vi.mocked(reactionRepo.getReactionSummary).mockResolvedValue(mockSummary);
const response = await supertest(app)
.get('/api/reactions/summary')
.query({ entityType: 'recipe', entityId: '123' });
expect(response.status).toBe(200);
expect(response.body).toEqual(mockSummary);
expect(reactionRepo.getReactionSummary).toHaveBeenCalledWith(
'recipe',
'123',
expectLogger
);
expect(response.body.data).toEqual(mockSummary);
expect(reactionRepo.getReactionSummary).toHaveBeenCalledWith('recipe', '123', expectLogger);
});
it('should return 400 if required parameters are missing', async () => {
const response = await supertest(app).get('/api/reactions/summary');
expect(response.status).toBe(400);
expect(response.body.errors[0].message).toContain('required');
expect(response.body.error.details[0].message).toContain('required');
});
it('should return 500 on database error', async () => {
@@ -126,10 +126,7 @@ describe('Reaction Routes (/api/reactions)', () => {
.query({ entityType: 'recipe', entityId: '123' });
expect(response.status).toBe(500);
expect(mockLogger.error).toHaveBeenCalledWith(
{ error },
'Error fetching reaction summary'
);
expect(mockLogger.error).toHaveBeenCalledWith({ error }, 'Error fetching reaction summary');
});
});
@@ -148,18 +145,20 @@ describe('Reaction Routes (/api/reactions)', () => {
};
it('should return 201 when a reaction is added', async () => {
const mockResult = { ...validBody, id: 1, user_id: 'user-123' };
vi.mocked(reactionRepo.toggleReaction).mockResolvedValue(mockResult as any);
const mockResult = {
...validBody,
reaction_id: 1,
user_id: 'user-123',
} as unknown as UserReaction;
vi.mocked(reactionRepo.toggleReaction).mockResolvedValue(mockResult);
const response = await supertest(app)
.post('/api/reactions/toggle')
.send(validBody);
const response = await supertest(app).post('/api/reactions/toggle').send(validBody);
expect(response.status).toBe(201);
expect(response.body).toEqual({ message: 'Reaction added.', reaction: mockResult });
expect(response.body.data).toEqual({ message: 'Reaction added.', reaction: mockResult });
expect(reactionRepo.toggleReaction).toHaveBeenCalledWith(
{ user_id: 'user-123', ...validBody },
expectLogger
expectLogger,
);
});
@@ -167,12 +166,10 @@ describe('Reaction Routes (/api/reactions)', () => {
// Returning null/false from toggleReaction implies the reaction was removed
vi.mocked(reactionRepo.toggleReaction).mockResolvedValue(null);
const response = await supertest(app)
.post('/api/reactions/toggle')
.send(validBody);
const response = await supertest(app).post('/api/reactions/toggle').send(validBody);
expect(response.status).toBe(200);
expect(response.body).toEqual({ message: 'Reaction removed.' });
expect(response.body.data).toEqual({ message: 'Reaction removed.' });
});
it('should return 400 if body is invalid', async () => {
@@ -181,14 +178,12 @@ describe('Reaction Routes (/api/reactions)', () => {
.send({ entity_type: 'recipe' }); // Missing other required fields
expect(response.status).toBe(400);
expect(response.body.errors).toBeDefined();
expect(response.body.error.details).toBeDefined();
});
it('should return 401 if not authenticated', async () => {
const unauthApp = createTestApp({ router: reactionsRouter, basePath: '/api/reactions' });
const response = await supertest(unauthApp)
.post('/api/reactions/toggle')
.send(validBody);
const response = await supertest(unauthApp).post('/api/reactions/toggle').send(validBody);
expect(response.status).toBe(401);
});
@@ -197,14 +192,12 @@ describe('Reaction Routes (/api/reactions)', () => {
const error = new Error('DB Error');
vi.mocked(reactionRepo.toggleReaction).mockRejectedValue(error);
const response = await supertest(app)
.post('/api/reactions/toggle')
.send(validBody);
const response = await supertest(app).post('/api/reactions/toggle').send(validBody);
expect(response.status).toBe(500);
expect(mockLogger.error).toHaveBeenCalledWith(
{ error, body: validBody },
'Error toggling user reaction'
'Error toggling user reaction',
);
});
});
@@ -240,4 +233,4 @@ describe('Reaction Routes (/api/reactions)', () => {
expect(parseInt(response.headers['ratelimit-limit'])).toBe(150);
});
});
});
});

View File

@@ -6,6 +6,7 @@ import passport from './passport.routes';
import { requiredString } from '../utils/zodUtils';
import { UserProfile } from '../types';
import { publicReadLimiter, reactionToggleLimiter } from '../config/rateLimiters';
import { sendSuccess } from '../utils/apiResponse';
const router = Router();
@@ -49,7 +50,7 @@ router.get(
try {
const { query } = getReactionsSchema.parse({ query: req.query });
const reactions = await reactionRepo.getReactions(query, req.log);
res.json(reactions);
sendSuccess(res, reactions);
} catch (error) {
req.log.error({ error }, 'Error fetching user reactions');
next(error);
@@ -69,8 +70,12 @@ router.get(
async (req: Request, res: Response, next: NextFunction) => {
try {
const { query } = getReactionSummarySchema.parse({ query: req.query });
const summary = await reactionRepo.getReactionSummary(query.entityType, query.entityId, req.log);
res.json(summary);
const summary = await reactionRepo.getReactionSummary(
query.entityType,
query.entityId,
req.log,
);
sendSuccess(res, summary);
} catch (error) {
req.log.error({ error }, 'Error fetching reaction summary');
next(error);
@@ -99,9 +104,9 @@ router.post(
};
const result = await reactionRepo.toggleReaction(reactionData, req.log);
if (result) {
res.status(201).json({ message: 'Reaction added.', reaction: result });
sendSuccess(res, { message: 'Reaction added.', reaction: result }, 201);
} else {
res.status(200).json({ message: 'Reaction removed.' });
sendSuccess(res, { message: 'Reaction removed.' });
}
} catch (error) {
req.log.error({ error, body }, 'Error toggling user reaction');
@@ -110,4 +115,4 @@ router.post(
},
);
export default router;
export default router;

View File

@@ -1,7 +1,12 @@
// src/routes/recipe.routes.test.ts
import { describe, it, expect, vi, beforeEach } from 'vitest';
import supertest from 'supertest';
import { createMockRecipe, createMockRecipeComment, createMockUserProfile } from '../tests/utils/mockFactories';
import type { Request, Response, NextFunction } from 'express';
import {
createMockRecipe,
createMockRecipeComment,
createMockUserProfile,
} from '../tests/utils/mockFactories';
import { NotFoundError } from '../services/db/errors.db';
import { createTestApp } from '../tests/utils/createTestApp';
@@ -26,14 +31,12 @@ vi.mock('../services/aiService.server', () => ({
// Mock Passport
vi.mock('./passport.routes', () => ({
default: {
authenticate: vi.fn(
() => (req: any, res: any, next: any) => {
if (!req.user) {
return res.status(401).json({ message: 'Unauthorized' });
}
next();
},
),
authenticate: vi.fn(() => (req: Request, res: Response, next: NextFunction) => {
if (!req.user) {
return res.status(401).json({ message: 'Unauthorized' });
}
next();
}),
},
}));
@@ -70,7 +73,7 @@ describe('Recipe Routes (/api/recipes)', () => {
const response = await supertest(app).get('/api/recipes/by-sale-percentage?minPercentage=75');
expect(response.status).toBe(200);
expect(response.body).toEqual(mockRecipes);
expect(response.body.data).toEqual(mockRecipes);
expect(db.recipeRepo.getRecipesBySalePercentage).toHaveBeenCalledWith(75, expectLogger);
});
@@ -85,7 +88,7 @@ describe('Recipe Routes (/api/recipes)', () => {
vi.mocked(db.recipeRepo.getRecipesBySalePercentage).mockRejectedValue(dbError);
const response = await supertest(app).get('/api/recipes/by-sale-percentage');
expect(response.status).toBe(500);
expect(response.body.message).toBe('DB Error');
expect(response.body.error.message).toBe('DB Error');
expect(mockLogger.error).toHaveBeenCalledWith(
{ error: dbError },
'Error fetching recipes in /api/recipes/by-sale-percentage:',
@@ -97,7 +100,7 @@ describe('Recipe Routes (/api/recipes)', () => {
'/api/recipes/by-sale-percentage?minPercentage=101',
);
expect(response.status).toBe(400);
expect(response.body.errors[0].message).toContain('Too big');
expect(response.body.error.details[0].message).toContain('Too big');
});
});
@@ -120,7 +123,7 @@ describe('Recipe Routes (/api/recipes)', () => {
vi.mocked(db.recipeRepo.getRecipesByMinSaleIngredients).mockRejectedValue(dbError);
const response = await supertest(app).get('/api/recipes/by-sale-ingredients');
expect(response.status).toBe(500);
expect(response.body.message).toBe('DB Error');
expect(response.body.error.message).toBe('DB Error');
expect(mockLogger.error).toHaveBeenCalledWith(
{ error: dbError },
'Error fetching recipes in /api/recipes/by-sale-ingredients:',
@@ -132,7 +135,7 @@ describe('Recipe Routes (/api/recipes)', () => {
'/api/recipes/by-sale-ingredients?minIngredients=abc',
);
expect(response.status).toBe(400);
expect(response.body.errors[0].message).toContain('received NaN');
expect(response.body.error.details[0].message).toContain('received NaN');
});
});
@@ -146,7 +149,7 @@ describe('Recipe Routes (/api/recipes)', () => {
);
expect(response.status).toBe(200);
expect(response.body).toEqual(mockRecipes);
expect(response.body.data).toEqual(mockRecipes);
});
it('should return 500 if the database call fails', async () => {
@@ -156,7 +159,7 @@ describe('Recipe Routes (/api/recipes)', () => {
'/api/recipes/by-ingredient-and-tag?ingredient=chicken&tag=quick',
);
expect(response.status).toBe(500);
expect(response.body.message).toBe('DB Error');
expect(response.body.error.message).toBe('DB Error');
expect(mockLogger.error).toHaveBeenCalledWith(
{ error: dbError },
'Error fetching recipes in /api/recipes/by-ingredient-and-tag:',
@@ -168,7 +171,7 @@ describe('Recipe Routes (/api/recipes)', () => {
'/api/recipes/by-ingredient-and-tag?ingredient=chicken',
);
expect(response.status).toBe(400);
expect(response.body.errors[0].message).toBe('Query parameter "tag" is required.');
expect(response.body.error.details[0].message).toBe('Query parameter "tag" is required.');
});
});
@@ -180,14 +183,14 @@ describe('Recipe Routes (/api/recipes)', () => {
const response = await supertest(app).get('/api/recipes/1/comments');
expect(response.status).toBe(200);
expect(response.body).toEqual(mockComments);
expect(response.body.data).toEqual(mockComments);
expect(db.recipeRepo.getRecipeComments).toHaveBeenCalledWith(1, expectLogger);
});
it('should return an empty array if recipe has no comments', async () => {
vi.mocked(db.recipeRepo.getRecipeComments).mockResolvedValue([]);
const response = await supertest(app).get('/api/recipes/2/comments');
expect(response.body).toEqual([]);
expect(response.body.data).toEqual([]);
});
it('should return 500 if the database call fails', async () => {
@@ -195,7 +198,7 @@ describe('Recipe Routes (/api/recipes)', () => {
vi.mocked(db.recipeRepo.getRecipeComments).mockRejectedValue(dbError);
const response = await supertest(app).get('/api/recipes/1/comments');
expect(response.status).toBe(500);
expect(response.body.message).toBe('DB Error');
expect(response.body.error.message).toBe('DB Error');
expect(mockLogger.error).toHaveBeenCalledWith(
{ error: dbError },
`Error fetching comments for recipe ID 1:`,
@@ -205,7 +208,7 @@ describe('Recipe Routes (/api/recipes)', () => {
it('should return 400 for an invalid recipeId', async () => {
const response = await supertest(app).get('/api/recipes/abc/comments');
expect(response.status).toBe(400);
expect(response.body.errors[0].message).toContain('received NaN');
expect(response.body.error.details[0].message).toContain('received NaN');
});
});
@@ -217,7 +220,7 @@ describe('Recipe Routes (/api/recipes)', () => {
const response = await supertest(app).get('/api/recipes/456');
expect(response.status).toBe(200);
expect(response.body).toEqual(mockRecipe);
expect(response.body.data).toEqual(mockRecipe);
expect(db.recipeRepo.getRecipeById).toHaveBeenCalledWith(456, expectLogger);
});
@@ -226,7 +229,7 @@ describe('Recipe Routes (/api/recipes)', () => {
vi.mocked(db.recipeRepo.getRecipeById).mockRejectedValue(notFoundError);
const response = await supertest(app).get('/api/recipes/999');
expect(response.status).toBe(404);
expect(response.body.message).toContain('not found');
expect(response.body.error.message).toContain('not found');
expect(mockLogger.error).toHaveBeenCalledWith(
{ error: notFoundError },
`Error fetching recipe ID 999:`,
@@ -238,7 +241,7 @@ describe('Recipe Routes (/api/recipes)', () => {
vi.mocked(db.recipeRepo.getRecipeById).mockRejectedValue(dbError);
const response = await supertest(app).get('/api/recipes/456');
expect(response.status).toBe(500);
expect(response.body.message).toBe('DB Error');
expect(response.body.error.message).toBe('DB Error');
expect(mockLogger.error).toHaveBeenCalledWith(
{ error: dbError },
`Error fetching recipe ID 456:`,
@@ -248,7 +251,7 @@ describe('Recipe Routes (/api/recipes)', () => {
it('should return 400 for an invalid recipeId', async () => {
const response = await supertest(app).get('/api/recipes/abc');
expect(response.status).toBe(400);
expect(response.body.errors[0].message).toContain('received NaN');
expect(response.body.error.details[0].message).toContain('received NaN');
});
});
@@ -265,12 +268,10 @@ describe('Recipe Routes (/api/recipes)', () => {
const mockSuggestion = 'Chicken and Rice Casserole...';
vi.mocked(aiService.generateRecipeSuggestion).mockResolvedValue(mockSuggestion);
const response = await supertest(authApp)
.post('/api/recipes/suggest')
.send({ ingredients });
const response = await supertest(authApp).post('/api/recipes/suggest').send({ ingredients });
expect(response.status).toBe(200);
expect(response.body).toEqual({ suggestion: mockSuggestion });
expect(response.body.data).toEqual({ suggestion: mockSuggestion });
expect(aiService.generateRecipeSuggestion).toHaveBeenCalledWith(ingredients, expectLogger);
});
@@ -282,7 +283,7 @@ describe('Recipe Routes (/api/recipes)', () => {
.send({ ingredients: ['water'] });
expect(response.status).toBe(503);
expect(response.body.message).toContain('unavailable');
expect(response.body.error.message).toContain('unavailable');
});
it('should return 400 if ingredients list is empty', async () => {
@@ -291,7 +292,9 @@ describe('Recipe Routes (/api/recipes)', () => {
.send({ ingredients: [] });
expect(response.status).toBe(400);
expect(response.body.errors[0].message).toContain('At least one ingredient is required');
expect(response.body.error.details[0].message).toContain(
'At least one ingredient is required',
);
});
it('should return 401 if not authenticated', async () => {
@@ -314,7 +317,7 @@ describe('Recipe Routes (/api/recipes)', () => {
expect(response.status).toBe(500);
expect(mockLogger.error).toHaveBeenCalledWith(
{ error },
'Error generating recipe suggestion'
'Error generating recipe suggestion',
);
});
});

View File

@@ -7,6 +7,7 @@ import passport from './passport.routes';
import { validateRequest } from '../middleware/validation.middleware';
import { requiredString, numericIdParam, optionalNumeric } from '../utils/zodUtils';
import { publicReadLimiter, suggestionLimiter } from '../config/rateLimiters';
import { sendSuccess, sendError, ErrorCode } from '../utils/apiResponse';
const router = Router();
@@ -49,7 +50,7 @@ router.get(
// Explicitly parse req.query to apply coercion (string -> number) and default values
const { query } = bySalePercentageSchema.parse({ query: req.query });
const recipes = await db.recipeRepo.getRecipesBySalePercentage(query.minPercentage!, req.log);
res.json(recipes);
sendSuccess(res, recipes);
} catch (error) {
req.log.error({ error }, 'Error fetching recipes in /api/recipes/by-sale-percentage:');
next(error);
@@ -72,7 +73,7 @@ router.get(
query.minIngredients!,
req.log,
);
res.json(recipes);
sendSuccess(res, recipes);
} catch (error) {
req.log.error({ error }, 'Error fetching recipes in /api/recipes/by-sale-ingredients:');
next(error);
@@ -95,7 +96,7 @@ router.get(
query.tag,
req.log,
);
res.json(recipes);
sendSuccess(res, recipes);
} catch (error) {
req.log.error({ error }, 'Error fetching recipes in /api/recipes/by-ingredient-and-tag:');
next(error);
@@ -106,32 +107,42 @@ router.get(
/**
* GET /api/recipes/:recipeId/comments - Get all comments for a specific recipe.
*/
router.get('/:recipeId/comments', publicReadLimiter, validateRequest(recipeIdParamsSchema), async (req, res, next) => {
try {
// Explicitly parse req.params to coerce recipeId to a number
const { params } = recipeIdParamsSchema.parse({ params: req.params });
const comments = await db.recipeRepo.getRecipeComments(params.recipeId, req.log);
res.json(comments);
} catch (error) {
req.log.error({ error }, `Error fetching comments for recipe ID ${req.params.recipeId}:`);
next(error);
}
});
router.get(
'/:recipeId/comments',
publicReadLimiter,
validateRequest(recipeIdParamsSchema),
async (req, res, next) => {
try {
// Explicitly parse req.params to coerce recipeId to a number
const { params } = recipeIdParamsSchema.parse({ params: req.params });
const comments = await db.recipeRepo.getRecipeComments(params.recipeId, req.log);
sendSuccess(res, comments);
} catch (error) {
req.log.error({ error }, `Error fetching comments for recipe ID ${req.params.recipeId}:`);
next(error);
}
},
);
/**
* GET /api/recipes/:recipeId - Get a single recipe by its ID, including ingredients and tags.
*/
router.get('/:recipeId', publicReadLimiter, validateRequest(recipeIdParamsSchema), async (req, res, next) => {
try {
// Explicitly parse req.params to coerce recipeId to a number
const { params } = recipeIdParamsSchema.parse({ params: req.params });
const recipe = await db.recipeRepo.getRecipeById(params.recipeId, req.log);
res.json(recipe);
} catch (error) {
req.log.error({ error }, `Error fetching recipe ID ${req.params.recipeId}:`);
next(error);
}
});
router.get(
'/:recipeId',
publicReadLimiter,
validateRequest(recipeIdParamsSchema),
async (req, res, next) => {
try {
// Explicitly parse req.params to coerce recipeId to a number
const { params } = recipeIdParamsSchema.parse({ params: req.params });
const recipe = await db.recipeRepo.getRecipeById(params.recipeId, req.log);
sendSuccess(res, recipe);
} catch (error) {
req.log.error({ error }, `Error fetching recipe ID ${req.params.recipeId}:`);
next(error);
}
},
);
/**
* POST /api/recipes/suggest - Generates a simple recipe suggestion from a list of ingredients.
@@ -148,12 +159,15 @@ router.post(
const suggestion = await aiService.generateRecipeSuggestion(body.ingredients, req.log);
if (!suggestion) {
return res
.status(503)
.json({ message: 'AI service is currently unavailable or failed to generate a suggestion.' });
return sendError(
res,
ErrorCode.SERVICE_UNAVAILABLE,
'AI service is currently unavailable or failed to generate a suggestion.',
503,
);
}
res.json({ suggestion });
sendSuccess(res, { suggestion });
} catch (error) {
req.log.error({ error }, 'Error generating recipe suggestion');
next(error);

View File

@@ -52,7 +52,7 @@ describe('Stats Routes (/api/stats)', () => {
vi.mocked(db.adminRepo.getMostFrequentSaleItems).mockRejectedValue(dbError);
const response = await supertest(app).get('/api/stats/most-frequent-sales');
expect(response.status).toBe(500);
expect(response.body.message).toBe('DB Error');
expect(response.body.error.message).toBe('DB Error');
expect(mockLogger.error).toHaveBeenCalledWith(
{ error: dbError },
'Error fetching most frequent sale items in /api/stats/most-frequent-sales:',
@@ -62,8 +62,8 @@ describe('Stats Routes (/api/stats)', () => {
it('should return 400 for invalid query parameters', async () => {
const response = await supertest(app).get('/api/stats/most-frequent-sales?days=0&limit=abc');
expect(response.status).toBe(400);
expect(response.body.errors).toBeDefined();
expect(response.body.errors.length).toBe(2);
expect(response.body.error.details).toBeDefined();
expect(response.body.error.details.length).toBe(2);
});
});

View File

@@ -5,6 +5,7 @@ import * as db from '../services/db/index.db';
import { validateRequest } from '../middleware/validation.middleware';
import { optionalNumeric } from '../utils/zodUtils';
import { publicReadLimiter } from '../config/rateLimiters';
import { sendSuccess } from '../utils/apiResponse';
const router = Router();
@@ -34,7 +35,7 @@ router.get(
// We parse it here to apply Zod's coercions (string to number) and defaults.
const { days, limit } = statsQuerySchema.parse(req.query);
const items = await db.adminRepo.getMostFrequentSaleItems(days!, limit!, req.log);
res.json(items);
sendSuccess(res, items);
} catch (error) {
req.log.error(
{ error },

Some files were not shown because too many files have changed in this diff Show More