Compare commits
10 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
9ffcc9d65d | ||
| 1285702210 | |||
|
|
d38b751b40 | ||
| e122d55ced | |||
|
|
af9992f773 | ||
| 3912139273 | |||
| b5f7f5e4d1 | |||
|
|
5173059621 | ||
| ebceb0e2e3 | |||
| e75054b1ab |
@@ -63,7 +63,20 @@
|
||||
"Bash(npm install:*)",
|
||||
"Bash(git grep:*)",
|
||||
"Bash(findstr:*)",
|
||||
"Bash(git add:*)"
|
||||
"Bash(git add:*)",
|
||||
"mcp__filesystem__write_file",
|
||||
"mcp__podman__container_list",
|
||||
"Bash(podman cp:*)",
|
||||
"mcp__podman__container_inspect",
|
||||
"mcp__podman__network_list",
|
||||
"Bash(podman network connect:*)",
|
||||
"Bash(npm run build:*)",
|
||||
"Bash(set NODE_ENV=test)",
|
||||
"Bash(podman-compose:*)",
|
||||
"Bash(timeout 60 podman machine start:*)",
|
||||
"Bash(podman build:*)",
|
||||
"Bash(podman network rm:*)",
|
||||
"Bash(npm run lint)"
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,18 +1,96 @@
|
||||
{
|
||||
// ============================================================================
|
||||
// VS CODE DEV CONTAINER CONFIGURATION
|
||||
// ============================================================================
|
||||
// This file configures VS Code's Dev Containers extension to provide a
|
||||
// consistent, fully-configured development environment.
|
||||
//
|
||||
// Features:
|
||||
// - Automatic PostgreSQL + Redis startup with healthchecks
|
||||
// - Automatic npm install
|
||||
// - Automatic database schema initialization and seeding
|
||||
// - Pre-configured VS Code extensions (ESLint, Prettier)
|
||||
// - Podman support for Windows users
|
||||
//
|
||||
// Usage:
|
||||
// 1. Install the "Dev Containers" extension in VS Code
|
||||
// 2. Open this project folder
|
||||
// 3. Click "Reopen in Container" when prompted (or use Command Palette)
|
||||
// 4. Wait for container build and initialization
|
||||
// 5. Development server starts automatically
|
||||
// ============================================================================
|
||||
|
||||
"name": "Flyer Crawler Dev (Ubuntu 22.04)",
|
||||
|
||||
// Use Docker Compose for multi-container setup
|
||||
"dockerComposeFile": ["../compose.dev.yml"],
|
||||
"service": "app",
|
||||
"workspaceFolder": "/app",
|
||||
|
||||
// VS Code customizations
|
||||
"customizations": {
|
||||
"vscode": {
|
||||
"extensions": ["dbaeumer.vscode-eslint", "esbenp.prettier-vscode"]
|
||||
"extensions": [
|
||||
// Code quality
|
||||
"dbaeumer.vscode-eslint",
|
||||
"esbenp.prettier-vscode",
|
||||
// TypeScript
|
||||
"ms-vscode.vscode-typescript-next",
|
||||
// Database
|
||||
"mtxr.sqltools",
|
||||
"mtxr.sqltools-driver-pg",
|
||||
// Utilities
|
||||
"eamodio.gitlens",
|
||||
"streetsidesoftware.code-spell-checker"
|
||||
],
|
||||
"settings": {
|
||||
"editor.formatOnSave": true,
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
"typescript.preferences.importModuleSpecifier": "relative"
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
// Run as root (required for npm global installs)
|
||||
"remoteUser": "root",
|
||||
// Automatically install dependencies when the container is created.
|
||||
// This runs inside the container, populating the isolated node_modules volume.
|
||||
"postCreateCommand": "npm install",
|
||||
|
||||
// ============================================================================
|
||||
// Lifecycle Commands
|
||||
// ============================================================================
|
||||
|
||||
// initializeCommand: Runs on the HOST before the container is created.
|
||||
// Starts Podman machine on Windows (no-op if already running or using Docker).
|
||||
"initializeCommand": "powershell -Command \"podman machine start; exit 0\"",
|
||||
|
||||
// postCreateCommand: Runs ONCE when the container is first created.
|
||||
// This is where we do full initialization: npm install + database setup.
|
||||
"postCreateCommand": "chmod +x scripts/docker-init.sh && ./scripts/docker-init.sh",
|
||||
|
||||
// postAttachCommand: Runs EVERY TIME VS Code attaches to the container.
|
||||
// Starts the development server automatically.
|
||||
"postAttachCommand": "npm run dev:container",
|
||||
// Try to start podman machine, but exit with success (0) even if it's already running
|
||||
"initializeCommand": "powershell -Command \"podman machine start; exit 0\""
|
||||
|
||||
// ============================================================================
|
||||
// Port Forwarding
|
||||
// ============================================================================
|
||||
// Automatically forward these ports from the container to the host
|
||||
"forwardPorts": [3000, 3001],
|
||||
|
||||
// Labels for forwarded ports in VS Code's Ports panel
|
||||
"portsAttributes": {
|
||||
"3000": {
|
||||
"label": "Frontend (Vite)",
|
||||
"onAutoForward": "notify"
|
||||
},
|
||||
"3001": {
|
||||
"label": "Backend API",
|
||||
"onAutoForward": "notify"
|
||||
}
|
||||
},
|
||||
|
||||
// ============================================================================
|
||||
// Features
|
||||
// ============================================================================
|
||||
// Additional dev container features (optional)
|
||||
"features": {}
|
||||
}
|
||||
|
||||
77
.env.example
Normal file
77
.env.example
Normal file
@@ -0,0 +1,77 @@
|
||||
# .env.example
|
||||
# ============================================================================
|
||||
# ENVIRONMENT VARIABLES TEMPLATE
|
||||
# ============================================================================
|
||||
# Copy this file to .env and fill in your values.
|
||||
# For local development with Docker/Podman, these defaults should work out of the box.
|
||||
#
|
||||
# IMPORTANT: Never commit .env files with real credentials to version control!
|
||||
# ============================================================================
|
||||
|
||||
# ===================
|
||||
# Database Configuration
|
||||
# ===================
|
||||
# PostgreSQL connection settings
|
||||
# For container development, use the service name "postgres"
|
||||
DB_HOST=postgres
|
||||
DB_PORT=5432
|
||||
DB_USER=postgres
|
||||
DB_PASSWORD=postgres
|
||||
DB_NAME=flyer_crawler_dev
|
||||
|
||||
# ===================
|
||||
# Redis Configuration
|
||||
# ===================
|
||||
# Redis URL for caching and job queues
|
||||
# For container development, use the service name "redis"
|
||||
REDIS_URL=redis://redis:6379
|
||||
# Optional: Redis password (leave empty if not required)
|
||||
REDIS_PASSWORD=
|
||||
|
||||
# ===================
|
||||
# Application Settings
|
||||
# ===================
|
||||
NODE_ENV=development
|
||||
# Frontend URL for CORS and email links
|
||||
FRONTEND_URL=http://localhost:3000
|
||||
|
||||
# ===================
|
||||
# Authentication
|
||||
# ===================
|
||||
# REQUIRED: Secret key for signing JWT tokens (generate a random 64+ character string)
|
||||
JWT_SECRET=your-super-secret-jwt-key-change-this-in-production
|
||||
|
||||
# ===================
|
||||
# AI/ML Services
|
||||
# ===================
|
||||
# REQUIRED: Google Gemini API key for flyer OCR processing
|
||||
GEMINI_API_KEY=your-gemini-api-key
|
||||
|
||||
# ===================
|
||||
# External APIs
|
||||
# ===================
|
||||
# Optional: Google Maps API key for geocoding store addresses
|
||||
GOOGLE_MAPS_API_KEY=
|
||||
|
||||
# ===================
|
||||
# Email Configuration (Optional)
|
||||
# ===================
|
||||
# SMTP settings for sending emails (deal notifications, password reset)
|
||||
SMTP_HOST=
|
||||
SMTP_PORT=587
|
||||
SMTP_SECURE=false
|
||||
SMTP_USER=
|
||||
SMTP_PASS=
|
||||
SMTP_FROM_EMAIL=noreply@example.com
|
||||
|
||||
# ===================
|
||||
# Worker Configuration (Optional)
|
||||
# ===================
|
||||
# Concurrency settings for background job workers
|
||||
WORKER_CONCURRENCY=1
|
||||
EMAIL_WORKER_CONCURRENCY=10
|
||||
ANALYTICS_WORKER_CONCURRENCY=1
|
||||
CLEANUP_WORKER_CONCURRENCY=10
|
||||
|
||||
# Worker lock duration in milliseconds (default: 2 minutes)
|
||||
WORKER_LOCK_DURATION=120000
|
||||
6
.env.test
Normal file
6
.env.test
Normal file
@@ -0,0 +1,6 @@
|
||||
DB_HOST=10.89.0.4
|
||||
DB_USER=flyer
|
||||
DB_PASSWORD=flyer
|
||||
DB_NAME=flyer_crawler_test
|
||||
REDIS_URL=redis://redis:6379
|
||||
NODE_ENV=test
|
||||
@@ -137,6 +137,13 @@ jobs:
|
||||
VITE_API_BASE_URL: 'http://localhost:3001/api'
|
||||
GEMINI_API_KEY: ${{ secrets.VITE_GOOGLE_GENAI_API_KEY }}
|
||||
|
||||
# --- Storage path for flyer images ---
|
||||
# CRITICAL: Use an absolute path in the test runner's working directory for file storage.
|
||||
# This ensures tests can read processed files to verify their contents (e.g., EXIF stripping).
|
||||
# Without this, multer and flyerProcessingService default to /var/www/.../flyer-images.
|
||||
# NOTE: We use ${{ github.workspace }} which resolves to the checkout directory.
|
||||
STORAGE_PATH: '${{ github.workspace }}/flyer-images'
|
||||
|
||||
# --- JWT Secret for Passport authentication in tests ---
|
||||
JWT_SECRET: ${{ secrets.JWT_SECRET }}
|
||||
|
||||
|
||||
@@ -1,31 +1,60 @@
|
||||
# Use Ubuntu 22.04 (LTS) as the base image to match production
|
||||
# Dockerfile.dev
|
||||
# ============================================================================
|
||||
# DEVELOPMENT DOCKERFILE
|
||||
# ============================================================================
|
||||
# This Dockerfile creates a development environment that matches production
|
||||
# as closely as possible while providing the tools needed for development.
|
||||
#
|
||||
# Base: Ubuntu 22.04 (LTS) - matches production server
|
||||
# Node: v20.x (LTS) - matches production
|
||||
# Includes: PostgreSQL client, Redis CLI, build tools
|
||||
# ============================================================================
|
||||
|
||||
FROM ubuntu:22.04
|
||||
|
||||
# Set environment variables to non-interactive to avoid prompts during installation
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
# Update package lists and install essential tools
|
||||
# - curl: for downloading Node.js setup script
|
||||
# ============================================================================
|
||||
# Install System Dependencies
|
||||
# ============================================================================
|
||||
# - curl: for downloading Node.js setup script and health checks
|
||||
# - git: for version control operations
|
||||
# - build-essential: for compiling native Node.js modules (node-gyp)
|
||||
# - python3: required by some Node.js build tools
|
||||
# - postgresql-client: for psql CLI (database initialization)
|
||||
# - redis-tools: for redis-cli (health checks)
|
||||
RUN apt-get update && apt-get install -y \
|
||||
curl \
|
||||
git \
|
||||
build-essential \
|
||||
python3 \
|
||||
postgresql-client \
|
||||
redis-tools \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Install Node.js 20.x (LTS) from NodeSource
|
||||
# ============================================================================
|
||||
# Install Node.js 20.x (LTS)
|
||||
# ============================================================================
|
||||
RUN curl -fsSL https://deb.nodesource.com/setup_20.x | bash - \
|
||||
&& apt-get install -y nodejs
|
||||
|
||||
# Set the working directory inside the container
|
||||
# ============================================================================
|
||||
# Set Working Directory
|
||||
# ============================================================================
|
||||
WORKDIR /app
|
||||
|
||||
# Set default environment variables for development
|
||||
# ============================================================================
|
||||
# Environment Configuration
|
||||
# ============================================================================
|
||||
# Default environment variables for development
|
||||
ENV NODE_ENV=development
|
||||
# Increase Node.js memory limit for large builds
|
||||
ENV NODE_OPTIONS='--max-old-space-size=8192'
|
||||
|
||||
# Default command keeps the container running so you can attach to it
|
||||
CMD ["bash"]
|
||||
# ============================================================================
|
||||
# Default Command
|
||||
# ============================================================================
|
||||
# Keep container running so VS Code can attach.
|
||||
# Actual commands (npm run dev, etc.) are run via devcontainer.json.
|
||||
CMD ["bash"]
|
||||
|
||||
@@ -1,8 +1,36 @@
|
||||
# compose.dev.yml
|
||||
# ============================================================================
|
||||
# DEVELOPMENT DOCKER COMPOSE CONFIGURATION
|
||||
# ============================================================================
|
||||
# This file defines the local development environment using Docker/Podman.
|
||||
#
|
||||
# Services:
|
||||
# - app: Node.js application (API + Frontend)
|
||||
# - postgres: PostgreSQL 15 with PostGIS extension
|
||||
# - redis: Redis for caching and job queues
|
||||
#
|
||||
# Usage:
|
||||
# Start all services: podman-compose -f compose.dev.yml up -d
|
||||
# Stop all services: podman-compose -f compose.dev.yml down
|
||||
# View logs: podman-compose -f compose.dev.yml logs -f
|
||||
# Reset everything: podman-compose -f compose.dev.yml down -v
|
||||
#
|
||||
# VS Code Dev Containers:
|
||||
# This file is referenced by .devcontainer/devcontainer.json for seamless
|
||||
# VS Code integration. Open the project in VS Code and use "Reopen in Container".
|
||||
# ============================================================================
|
||||
|
||||
version: '3.8'
|
||||
|
||||
services:
|
||||
# ===================
|
||||
# Application Service
|
||||
# ===================
|
||||
app:
|
||||
container_name: flyer-crawler-dev
|
||||
# Use pre-built image if available, otherwise build from Dockerfile.dev
|
||||
# To build: podman build -f Dockerfile.dev -t flyer-crawler-dev:latest .
|
||||
image: localhost/flyer-crawler-dev:latest
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile.dev
|
||||
@@ -16,21 +44,44 @@ services:
|
||||
- '3000:3000' # Frontend (Vite default)
|
||||
- '3001:3001' # Backend API
|
||||
environment:
|
||||
# Core settings
|
||||
- NODE_ENV=development
|
||||
# Database - use service name for Docker networking
|
||||
- DB_HOST=postgres
|
||||
- DB_PORT=5432
|
||||
- DB_USER=postgres
|
||||
- DB_PASSWORD=postgres
|
||||
- DB_NAME=flyer_crawler_dev
|
||||
# Redis - use service name for Docker networking
|
||||
- REDIS_URL=redis://redis:6379
|
||||
# Add other secrets here or use a .env file
|
||||
- REDIS_HOST=redis
|
||||
- REDIS_PORT=6379
|
||||
# Frontend URL for CORS
|
||||
- FRONTEND_URL=http://localhost:3000
|
||||
# Default JWT secret for development (override in production!)
|
||||
- JWT_SECRET=dev-jwt-secret-change-in-production
|
||||
# Worker settings
|
||||
- WORKER_LOCK_DURATION=120000
|
||||
depends_on:
|
||||
- postgres
|
||||
- redis
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
redis:
|
||||
condition: service_healthy
|
||||
# Keep container running so VS Code can attach
|
||||
command: tail -f /dev/null
|
||||
# Healthcheck for the app (once it's running)
|
||||
healthcheck:
|
||||
test: ['CMD', 'curl', '-f', 'http://localhost:3001/api/health', '||', 'exit', '0']
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
start_period: 60s
|
||||
|
||||
# ===================
|
||||
# PostgreSQL Database
|
||||
# ===================
|
||||
postgres:
|
||||
image: docker.io/library/postgis/postgis:15-3.4
|
||||
image: docker.io/postgis/postgis:15-3.4
|
||||
container_name: flyer-crawler-postgres
|
||||
ports:
|
||||
- '5432:5432'
|
||||
@@ -38,15 +89,54 @@ services:
|
||||
POSTGRES_USER: postgres
|
||||
POSTGRES_PASSWORD: postgres
|
||||
POSTGRES_DB: flyer_crawler_dev
|
||||
# Optimize for development
|
||||
POSTGRES_INITDB_ARGS: '--encoding=UTF8 --locale=C'
|
||||
volumes:
|
||||
- postgres_data:/var/lib/postgresql/data
|
||||
# Mount the extensions init script to run on first database creation
|
||||
# The 00- prefix ensures it runs before any other init scripts
|
||||
- ./sql/00-init-extensions.sql:/docker-entrypoint-initdb.d/00-init-extensions.sql:ro
|
||||
# Healthcheck ensures postgres is ready before app starts
|
||||
healthcheck:
|
||||
test: ['CMD-SHELL', 'pg_isready -U postgres -d flyer_crawler_dev']
|
||||
interval: 5s
|
||||
timeout: 5s
|
||||
retries: 10
|
||||
start_period: 10s
|
||||
|
||||
# ===================
|
||||
# Redis Cache/Queue
|
||||
# ===================
|
||||
redis:
|
||||
image: docker.io/library/redis:alpine
|
||||
container_name: flyer-crawler-redis
|
||||
ports:
|
||||
- '6379:6379'
|
||||
volumes:
|
||||
- redis_data:/data
|
||||
# Healthcheck ensures redis is ready before app starts
|
||||
healthcheck:
|
||||
test: ['CMD', 'redis-cli', 'ping']
|
||||
interval: 5s
|
||||
timeout: 5s
|
||||
retries: 10
|
||||
start_period: 5s
|
||||
# Enable persistence for development data
|
||||
command: redis-server --appendonly yes
|
||||
|
||||
# ===================
|
||||
# Named Volumes
|
||||
# ===================
|
||||
volumes:
|
||||
postgres_data:
|
||||
name: flyer-crawler-postgres-data
|
||||
redis_data:
|
||||
name: flyer-crawler-redis-data
|
||||
node_modules_data:
|
||||
name: flyer-crawler-node-modules
|
||||
|
||||
# ===================
|
||||
# Network Configuration
|
||||
# ===================
|
||||
# All services are on the default bridge network.
|
||||
# Use service names (postgres, redis) as hostnames.
|
||||
|
||||
@@ -4,6 +4,8 @@
|
||||
|
||||
**Status**: Accepted
|
||||
|
||||
**Implemented**: 2026-01-07
|
||||
|
||||
## Context
|
||||
|
||||
Our application has experienced a recurring pattern of bugs and brittle tests related to error handling, specifically for "resource not found" scenarios. The root causes identified are:
|
||||
@@ -41,3 +43,86 @@ We will adopt a strict, consistent error-handling contract for the service and r
|
||||
|
||||
**Initial Refactoring**: Requires a one-time effort to audit and refactor all existing repository methods to conform to this new standard.
|
||||
**Convention Adherence**: Developers must be aware of and adhere to this convention. This ADR serves as the primary documentation for this pattern.
|
||||
|
||||
## Implementation Details
|
||||
|
||||
### Custom Error Types
|
||||
|
||||
All custom errors are defined in `src/services/db/errors.db.ts`:
|
||||
|
||||
| Error Class | HTTP Status | PostgreSQL Code | Use Case |
|
||||
| -------------------------------- | ----------- | --------------- | ------------------------------- |
|
||||
| `NotFoundError` | 404 | - | Resource not found |
|
||||
| `UniqueConstraintError` | 409 | 23505 | Duplicate key violation |
|
||||
| `ForeignKeyConstraintError` | 400 | 23503 | Referenced record doesn't exist |
|
||||
| `NotNullConstraintError` | 400 | 23502 | Required field is null |
|
||||
| `CheckConstraintError` | 400 | 23514 | Check constraint violated |
|
||||
| `InvalidTextRepresentationError` | 400 | 22P02 | Invalid data type format |
|
||||
| `NumericValueOutOfRangeError` | 400 | 22003 | Numeric overflow |
|
||||
| `ValidationError` | 400 | - | Request validation failed |
|
||||
| `ForbiddenError` | 403 | - | Access denied |
|
||||
|
||||
### Error Handler Middleware
|
||||
|
||||
The centralized error handler in `src/middleware/errorHandler.ts`:
|
||||
|
||||
1. Catches all errors from route handlers
|
||||
2. Maps custom error types to HTTP status codes
|
||||
3. Logs errors with appropriate severity (warn for 4xx, error for 5xx)
|
||||
4. Returns consistent JSON error responses
|
||||
5. Includes error ID for server errors (for support correlation)
|
||||
|
||||
### Usage Pattern
|
||||
|
||||
```typescript
|
||||
// In repository (throws NotFoundError)
|
||||
async function getUserById(id: number): Promise<User> {
|
||||
const result = await pool.query('SELECT * FROM users WHERE id = $1', [id]);
|
||||
if (result.rows.length === 0) {
|
||||
throw new NotFoundError(`User with ID ${id} not found.`);
|
||||
}
|
||||
return result.rows[0];
|
||||
}
|
||||
|
||||
// In route handler (simple try/catch)
|
||||
router.get('/:id', async (req, res, next) => {
|
||||
try {
|
||||
const user = await getUserById(req.params.id);
|
||||
res.json(user);
|
||||
} catch (error) {
|
||||
next(error); // errorHandler maps NotFoundError to 404
|
||||
}
|
||||
});
|
||||
```
|
||||
|
||||
### Centralized Error Handler Helper
|
||||
|
||||
The `handleDbError` function in `src/services/db/errors.db.ts` provides centralized PostgreSQL error handling:
|
||||
|
||||
```typescript
|
||||
import { handleDbError } from './errors.db';
|
||||
|
||||
try {
|
||||
await pool.query('INSERT INTO users (email) VALUES ($1)', [email]);
|
||||
} catch (error) {
|
||||
handleDbError(
|
||||
error,
|
||||
logger,
|
||||
'Failed to create user',
|
||||
{ email },
|
||||
{
|
||||
uniqueMessage: 'A user with this email already exists.',
|
||||
defaultMessage: 'Failed to create user.',
|
||||
},
|
||||
);
|
||||
}
|
||||
```
|
||||
|
||||
## Key Files
|
||||
|
||||
- `src/services/db/errors.db.ts` - Custom error classes and `handleDbError` utility
|
||||
- `src/middleware/errorHandler.ts` - Centralized Express error handling middleware
|
||||
|
||||
## Related ADRs
|
||||
|
||||
- [ADR-034](./0034-repository-pattern-standards.md) - Repository Pattern Standards (extends this ADR)
|
||||
|
||||
@@ -60,3 +60,109 @@ async function registerUserAndCreateDefaultList(userData) {
|
||||
|
||||
**Learning Curve**: Developers will need to learn and adopt the `withTransaction` pattern for all transactional database work.
|
||||
**Refactoring Effort**: Existing methods that manually manage transactions (`createUser`, `createBudget`, etc.) will need to be refactored to use the new pattern.
|
||||
|
||||
## Implementation Details
|
||||
|
||||
### The `withTransaction` Helper
|
||||
|
||||
Located in `src/services/db/connection.db.ts`:
|
||||
|
||||
```typescript
|
||||
export async function withTransaction<T>(callback: (client: PoolClient) => Promise<T>): Promise<T> {
|
||||
const client = await getPool().connect();
|
||||
try {
|
||||
await client.query('BEGIN');
|
||||
const result = await callback(client);
|
||||
await client.query('COMMIT');
|
||||
return result;
|
||||
} catch (error) {
|
||||
await client.query('ROLLBACK');
|
||||
logger.error({ err: error }, 'Transaction failed, rolling back.');
|
||||
throw error;
|
||||
} finally {
|
||||
client.release();
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Repository Pattern for Transaction Support
|
||||
|
||||
Repository methods accept an optional `PoolClient` parameter:
|
||||
|
||||
```typescript
|
||||
// Function-based approach
|
||||
export async function createUser(userData: CreateUserInput, client?: PoolClient): Promise<User> {
|
||||
const queryable = client || getPool();
|
||||
const result = await queryable.query<User>(
|
||||
'INSERT INTO users (email, password_hash) VALUES ($1, $2) RETURNING *',
|
||||
[userData.email, userData.passwordHash],
|
||||
);
|
||||
return result.rows[0];
|
||||
}
|
||||
```
|
||||
|
||||
### Transactional Service Example
|
||||
|
||||
```typescript
|
||||
// src/services/authService.ts
|
||||
import { withTransaction } from './db/connection.db';
|
||||
import { createUser, createProfile } from './db';
|
||||
|
||||
export async function registerUserWithProfile(
|
||||
email: string,
|
||||
password: string,
|
||||
profileData: ProfileInput,
|
||||
): Promise<UserWithProfile> {
|
||||
return withTransaction(async (client) => {
|
||||
// All operations use the same transactional client
|
||||
const user = await createUser({ email, password }, client);
|
||||
const profile = await createProfile(
|
||||
{
|
||||
userId: user.user_id,
|
||||
...profileData,
|
||||
},
|
||||
client,
|
||||
);
|
||||
|
||||
return { user, profile };
|
||||
});
|
||||
}
|
||||
```
|
||||
|
||||
### Services Using `withTransaction`
|
||||
|
||||
| Service | Function | Operations |
|
||||
| ------------------------- | ----------------------- | ----------------------------------- |
|
||||
| `authService` | `registerAndLoginUser` | Create user + profile + preferences |
|
||||
| `userService` | `updateUserWithProfile` | Update user + profile atomically |
|
||||
| `flyerPersistenceService` | `saveFlyer` | Create flyer + items + metadata |
|
||||
| `shoppingService` | `createListWithItems` | Create list + initial items |
|
||||
| `gamificationService` | `awardAchievement` | Create achievement + update points |
|
||||
|
||||
### Connection Pool Configuration
|
||||
|
||||
```typescript
|
||||
const poolConfig: PoolConfig = {
|
||||
max: 20, // Max clients in pool
|
||||
idleTimeoutMillis: 30000, // Close idle clients after 30s
|
||||
connectionTimeoutMillis: 2000, // Fail connect after 2s
|
||||
};
|
||||
```
|
||||
|
||||
### Pool Status Monitoring
|
||||
|
||||
```typescript
|
||||
import { getPoolStatus } from './db/connection.db';
|
||||
|
||||
const status = getPoolStatus();
|
||||
// { totalCount: 20, idleCount: 15, waitingCount: 0 }
|
||||
```
|
||||
|
||||
## Key Files
|
||||
|
||||
- `src/services/db/connection.db.ts` - `getPool()`, `withTransaction()`, `getPoolStatus()`
|
||||
|
||||
## Related ADRs
|
||||
|
||||
- [ADR-001](./0001-standardized-error-handling.md) - Error handling within transactions
|
||||
- [ADR-034](./0034-repository-pattern-standards.md) - Repository patterns for transaction participation
|
||||
|
||||
@@ -79,3 +79,140 @@ router.get('/:id', validateRequest(getFlyerSchema), async (req, res, next) => {
|
||||
**New Dependency**: Introduces `zod` as a new project dependency.
|
||||
**Learning Curve**: Developers need to learn the `zod` schema definition syntax.
|
||||
**Refactoring Effort**: Requires a one-time effort to create schemas and refactor all existing routes to use the `validateRequest` middleware.
|
||||
|
||||
## Implementation Details
|
||||
|
||||
### The `validateRequest` Middleware
|
||||
|
||||
Located in `src/middleware/validation.middleware.ts`:
|
||||
|
||||
```typescript
|
||||
export const validateRequest =
|
||||
(schema: ZodObject<z.ZodRawShape>) => async (req: Request, res: Response, next: NextFunction) => {
|
||||
try {
|
||||
const { params, query, body } = await schema.parseAsync({
|
||||
params: req.params,
|
||||
query: req.query,
|
||||
body: req.body,
|
||||
});
|
||||
|
||||
// Merge parsed data back into request
|
||||
Object.keys(req.params).forEach((key) => delete req.params[key]);
|
||||
Object.assign(req.params, params);
|
||||
Object.keys(req.query).forEach((key) => delete req.query[key]);
|
||||
Object.assign(req.query, query);
|
||||
req.body = body;
|
||||
|
||||
return next();
|
||||
} catch (error) {
|
||||
if (error instanceof ZodError) {
|
||||
const validationIssues = error.issues.map((issue) => ({
|
||||
...issue,
|
||||
path: issue.path.map((p) => String(p)),
|
||||
}));
|
||||
return next(new ValidationError(validationIssues));
|
||||
}
|
||||
return next(error);
|
||||
}
|
||||
};
|
||||
```
|
||||
|
||||
### Common Zod Patterns
|
||||
|
||||
```typescript
|
||||
import { z } from 'zod';
|
||||
import { requiredString } from '../utils/zodUtils';
|
||||
|
||||
// String that coerces to positive integer (for ID params)
|
||||
const idParam = z.string().pipe(z.coerce.number().int().positive());
|
||||
|
||||
// Pagination query params with defaults
|
||||
const paginationQuery = z.object({
|
||||
limit: z.coerce.number().int().positive().max(100).default(20),
|
||||
offset: z.coerce.number().int().nonnegative().default(0),
|
||||
});
|
||||
|
||||
// Email with sanitization
|
||||
const emailSchema = z.string().trim().toLowerCase().email('A valid email is required.');
|
||||
|
||||
// Password with strength validation
|
||||
const passwordSchema = z
|
||||
.string()
|
||||
.trim()
|
||||
.min(8, 'Password must be at least 8 characters long.')
|
||||
.superRefine((password, ctx) => {
|
||||
const strength = validatePasswordStrength(password);
|
||||
if (!strength.isValid) ctx.addIssue({ code: 'custom', message: strength.feedback });
|
||||
});
|
||||
|
||||
// Optional string that converts empty string to undefined
|
||||
const optionalString = z.preprocess(
|
||||
(val) => (val === '' ? undefined : val),
|
||||
z.string().trim().optional(),
|
||||
);
|
||||
```
|
||||
|
||||
### Routes Using `validateRequest`
|
||||
|
||||
All API routes use the validation middleware:
|
||||
|
||||
| Router | Schemas Defined | Validated Endpoints |
|
||||
| ------------------------ | --------------- | -------------------------------------------------------------------------------- |
|
||||
| `auth.routes.ts` | 5 | `/register`, `/login`, `/forgot-password`, `/reset-password`, `/change-password` |
|
||||
| `user.routes.ts` | 4 | `/profile`, `/address`, `/preferences`, `/notifications` |
|
||||
| `flyer.routes.ts` | 6 | `GET /:id`, `GET /`, `GET /:id/items`, `DELETE /:id` |
|
||||
| `budget.routes.ts` | 5 | `/`, `/:id`, `/batch`, `/categories` |
|
||||
| `recipe.routes.ts` | 4 | `GET /`, `GET /:id`, `POST /`, `PATCH /:id` |
|
||||
| `admin.routes.ts` | 8 | Various admin endpoints |
|
||||
| `ai.routes.ts` | 3 | `/upload-and-process`, `/analyze`, `/jobs/:jobId/status` |
|
||||
| `gamification.routes.ts` | 3 | `/achievements`, `/leaderboard`, `/points` |
|
||||
|
||||
### Validation Error Response Format
|
||||
|
||||
When validation fails, the `errorHandler` returns:
|
||||
|
||||
```json
|
||||
{
|
||||
"message": "The request data is invalid.",
|
||||
"errors": [
|
||||
{
|
||||
"path": ["body", "email"],
|
||||
"message": "A valid email is required."
|
||||
},
|
||||
{
|
||||
"path": ["body", "password"],
|
||||
"message": "Password must be at least 8 characters long."
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
HTTP Status: `400 Bad Request`
|
||||
|
||||
### Zod Utility Functions
|
||||
|
||||
Located in `src/utils/zodUtils.ts`:
|
||||
|
||||
```typescript
|
||||
// String that rejects empty strings
|
||||
export const requiredString = (message?: string) =>
|
||||
z.string().min(1, message || 'This field is required.');
|
||||
|
||||
// Number from string with validation
|
||||
export const numericString = z.string().pipe(z.coerce.number());
|
||||
|
||||
// Boolean from string ('true'/'false')
|
||||
export const booleanString = z.enum(['true', 'false']).transform((v) => v === 'true');
|
||||
```
|
||||
|
||||
## Key Files
|
||||
|
||||
- `src/middleware/validation.middleware.ts` - The `validateRequest` middleware
|
||||
- `src/services/db/errors.db.ts` - `ValidationError` class definition
|
||||
- `src/middleware/errorHandler.ts` - Error formatting for validation errors
|
||||
- `src/utils/zodUtils.ts` - Reusable Zod schema utilities
|
||||
|
||||
## Related ADRs
|
||||
|
||||
- [ADR-001](./0001-standardized-error-handling.md) - Error handling for validation errors
|
||||
- [ADR-032](./0032-rate-limiting-strategy.md) - Rate limiting applied alongside validation
|
||||
|
||||
@@ -86,3 +86,219 @@ router.get('/:id', async (req, res, next) => {
|
||||
|
||||
**Refactoring Effort**: Requires adding the `requestLogger` middleware and refactoring all routes and services to use `req.log` instead of the global `logger`.
|
||||
**Slight Performance Overhead**: Creating a child logger for every request adds a minor performance cost, though this is negligible for most modern logging libraries.
|
||||
|
||||
## Implementation Details
|
||||
|
||||
### Logger Configuration
|
||||
|
||||
Located in `src/services/logger.server.ts`:
|
||||
|
||||
```typescript
|
||||
import pino from 'pino';
|
||||
|
||||
const isProduction = process.env.NODE_ENV === 'production';
|
||||
const isTest = process.env.NODE_ENV === 'test';
|
||||
|
||||
export const logger = pino({
|
||||
level: isProduction ? 'info' : 'debug',
|
||||
transport:
|
||||
isProduction || isTest
|
||||
? undefined
|
||||
: {
|
||||
target: 'pino-pretty',
|
||||
options: {
|
||||
colorize: true,
|
||||
translateTime: 'SYS:standard',
|
||||
ignore: 'pid,hostname',
|
||||
},
|
||||
},
|
||||
redact: {
|
||||
paths: [
|
||||
'req.headers.authorization',
|
||||
'req.headers.cookie',
|
||||
'*.body.password',
|
||||
'*.body.newPassword',
|
||||
'*.body.currentPassword',
|
||||
'*.body.confirmPassword',
|
||||
'*.body.refreshToken',
|
||||
'*.body.token',
|
||||
],
|
||||
censor: '[REDACTED]',
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
### Request Logger Middleware
|
||||
|
||||
Located in `server.ts`:
|
||||
|
||||
```typescript
|
||||
const requestLogger = (req: Request, res: Response, next: NextFunction) => {
|
||||
const requestId = randomUUID();
|
||||
const user = req.user as UserProfile | undefined;
|
||||
const start = process.hrtime();
|
||||
|
||||
// Create request-scoped logger
|
||||
req.log = logger.child({
|
||||
request_id: requestId,
|
||||
user_id: user?.user.user_id,
|
||||
ip_address: req.ip,
|
||||
});
|
||||
|
||||
req.log.debug({ method: req.method, originalUrl: req.originalUrl }, 'INCOMING');
|
||||
|
||||
res.on('finish', () => {
|
||||
const duration = getDurationInMilliseconds(start);
|
||||
const { statusCode, statusMessage } = res;
|
||||
const logDetails = {
|
||||
user_id: (req.user as UserProfile | undefined)?.user.user_id,
|
||||
method: req.method,
|
||||
originalUrl: req.originalUrl,
|
||||
statusCode,
|
||||
statusMessage,
|
||||
duration: duration.toFixed(2),
|
||||
};
|
||||
|
||||
// Include request details for failed requests (for debugging)
|
||||
if (statusCode >= 400) {
|
||||
logDetails.req = { headers: req.headers, body: req.body };
|
||||
}
|
||||
|
||||
if (statusCode >= 500) req.log.error(logDetails, 'Request completed with server error');
|
||||
else if (statusCode >= 400) req.log.warn(logDetails, 'Request completed with client error');
|
||||
else req.log.info(logDetails, 'Request completed successfully');
|
||||
});
|
||||
|
||||
next();
|
||||
};
|
||||
|
||||
app.use(requestLogger);
|
||||
```
|
||||
|
||||
### TypeScript Support
|
||||
|
||||
The `req.log` property is typed via declaration merging in `src/types/express.d.ts`:
|
||||
|
||||
```typescript
|
||||
import { Logger } from 'pino';
|
||||
|
||||
declare global {
|
||||
namespace Express {
|
||||
export interface Request {
|
||||
log: Logger;
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Automatic Sensitive Data Redaction
|
||||
|
||||
The Pino logger automatically redacts sensitive fields:
|
||||
|
||||
```json
|
||||
// Before redaction
|
||||
{
|
||||
"body": {
|
||||
"email": "user@example.com",
|
||||
"password": "secret123",
|
||||
"newPassword": "newsecret456"
|
||||
}
|
||||
}
|
||||
|
||||
// After redaction (in logs)
|
||||
{
|
||||
"body": {
|
||||
"email": "user@example.com",
|
||||
"password": "[REDACTED]",
|
||||
"newPassword": "[REDACTED]"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Log Levels by Scenario
|
||||
|
||||
| Level | HTTP Status | Scenario |
|
||||
| ----- | ----------- | -------------------------------------------------- |
|
||||
| DEBUG | Any | Request incoming, internal state, development info |
|
||||
| INFO | 2xx | Successful requests, business events |
|
||||
| WARN | 4xx | Client errors, validation failures, not found |
|
||||
| ERROR | 5xx | Server errors, unhandled exceptions |
|
||||
|
||||
### Service Layer Logging
|
||||
|
||||
Services accept the request-scoped logger as an optional parameter:
|
||||
|
||||
```typescript
|
||||
export async function registerUser(email: string, password: string, reqLog?: Logger) {
|
||||
const log = reqLog || logger; // Fall back to global logger
|
||||
|
||||
log.info({ email }, 'Registering new user');
|
||||
// ... implementation
|
||||
|
||||
log.debug({ userId: user.user_id }, 'User created successfully');
|
||||
return user;
|
||||
}
|
||||
|
||||
// In route handler
|
||||
router.post('/register', async (req, res, next) => {
|
||||
await authService.registerUser(req.body.email, req.body.password, req.log);
|
||||
});
|
||||
```
|
||||
|
||||
### Log Output Format
|
||||
|
||||
**Development** (pino-pretty):
|
||||
|
||||
```text
|
||||
[2026-01-09 12:34:56.789] INFO (request_id=abc123): Request completed successfully
|
||||
method: "GET"
|
||||
originalUrl: "/api/flyers"
|
||||
statusCode: 200
|
||||
duration: "45.23"
|
||||
```
|
||||
|
||||
**Production** (JSON):
|
||||
|
||||
```json
|
||||
{
|
||||
"level": 30,
|
||||
"time": 1704812096789,
|
||||
"request_id": "abc123",
|
||||
"user_id": "user_456",
|
||||
"ip_address": "192.168.1.1",
|
||||
"method": "GET",
|
||||
"originalUrl": "/api/flyers",
|
||||
"statusCode": 200,
|
||||
"duration": "45.23",
|
||||
"msg": "Request completed successfully"
|
||||
}
|
||||
```
|
||||
|
||||
### Routes Using `req.log`
|
||||
|
||||
All route files have been migrated to use the request-scoped logger:
|
||||
|
||||
- `src/routes/auth.routes.ts`
|
||||
- `src/routes/user.routes.ts`
|
||||
- `src/routes/flyer.routes.ts`
|
||||
- `src/routes/ai.routes.ts`
|
||||
- `src/routes/admin.routes.ts`
|
||||
- `src/routes/budget.routes.ts`
|
||||
- `src/routes/recipe.routes.ts`
|
||||
- `src/routes/gamification.routes.ts`
|
||||
- `src/routes/personalization.routes.ts`
|
||||
- `src/routes/stats.routes.ts`
|
||||
- `src/routes/health.routes.ts`
|
||||
- `src/routes/system.routes.ts`
|
||||
|
||||
## Key Files
|
||||
|
||||
- `src/services/logger.server.ts` - Pino logger configuration
|
||||
- `src/services/logger.client.ts` - Client-side logger (for frontend)
|
||||
- `src/types/express.d.ts` - TypeScript declaration for `req.log`
|
||||
- `server.ts` - Request logger middleware
|
||||
|
||||
## Related ADRs
|
||||
|
||||
- [ADR-001](./0001-standardized-error-handling.md) - Error handler uses `req.log` for error logging
|
||||
- [ADR-026](./0026-standardized-client-side-structured-logging.md) - Client-side logging strategy
|
||||
|
||||
@@ -2,17 +2,288 @@
|
||||
|
||||
**Date**: 2025-12-12
|
||||
|
||||
**Status**: Proposed
|
||||
**Status**: Implemented
|
||||
|
||||
**Implemented**: 2026-01-09
|
||||
|
||||
## Context
|
||||
|
||||
The project is currently run using `pm2`, and the `README.md` contains manual setup instructions. While functional, this lacks the portability, scalability, and consistency of modern deployment practices.
|
||||
The project is currently run using `pm2`, and the `README.md` contains manual setup instructions. While functional, this lacks the portability, scalability, and consistency of modern deployment practices. Local development environments also suffered from inconsistency issues.
|
||||
|
||||
## Decision
|
||||
|
||||
We will standardize the deployment process by containerizing the application using **Docker**. This will involve defining a `Dockerfile` for building a production-ready image and a `docker-compose.yml` file for orchestrating the application, database, and other services (like Redis) in a development environment.
|
||||
We will standardize the deployment process using a hybrid approach:
|
||||
|
||||
1. **PM2 for Production**: Use PM2 cluster mode for process management, load balancing, and zero-downtime reloads.
|
||||
2. **Docker/Podman for Development**: Provide a complete containerized development environment with automatic initialization.
|
||||
3. **VS Code Dev Containers**: Enable one-click development environment setup.
|
||||
4. **Gitea Actions for CI/CD**: Automated deployment pipelines handle builds and deployments.
|
||||
|
||||
## Consequences
|
||||
|
||||
- **Positive**: Ensures consistency between development and production environments. Simplifies the setup for new developers. Improves portability and scalability of the application.
|
||||
- **Negative**: Requires learning Docker and containerization concepts. Adds `Dockerfile` and `docker-compose.yml` to the project's configuration.
|
||||
- **Positive**: Ensures consistency between development and production environments. Simplifies the setup for new developers to a single "Reopen in Container" action. Improves portability and scalability of the application.
|
||||
- **Negative**: Requires Docker/Podman installation. Container builds take time on first setup.
|
||||
|
||||
## Implementation Details
|
||||
|
||||
### Quick Start (Development)
|
||||
|
||||
```bash
|
||||
# Prerequisites:
|
||||
# - Docker Desktop or Podman installed
|
||||
# - VS Code with "Dev Containers" extension
|
||||
|
||||
# Option 1: VS Code Dev Containers (Recommended)
|
||||
# 1. Open project in VS Code
|
||||
# 2. Click "Reopen in Container" when prompted
|
||||
# 3. Wait for initialization to complete
|
||||
# 4. Development server starts automatically
|
||||
|
||||
# Option 2: Manual Docker Compose
|
||||
podman-compose -f compose.dev.yml up -d
|
||||
podman exec -it flyer-crawler-dev bash
|
||||
./scripts/docker-init.sh
|
||||
npm run dev:container
|
||||
```
|
||||
|
||||
### Container Services Architecture
|
||||
|
||||
```text
|
||||
┌─────────────────────────────────────────────────────────────┐
|
||||
│ Development Environment │
|
||||
├─────────────────────────────────────────────────────────────┤
|
||||
│ │
|
||||
│ ┌─────────────┐ ┌─────────────┐ ┌─────────────┐ │
|
||||
│ │ app │ │ postgres │ │ redis │ │
|
||||
│ │ (Node.js) │───▶│ (PostGIS) │ │ (Cache) │ │
|
||||
│ │ │───▶│ │ │ │ │
|
||||
│ └─────────────┘ └─────────────┘ └─────────────┘ │
|
||||
│ :3000/:3001 :5432 :6379 │
|
||||
│ │
|
||||
└─────────────────────────────────────────────────────────────┘
|
||||
```
|
||||
|
||||
### compose.dev.yml Services
|
||||
|
||||
| Service | Image | Purpose | Healthcheck |
|
||||
| ---------- | ----------------------- | ---------------------- | ---------------- |
|
||||
| `app` | Custom (Dockerfile.dev) | Node.js application | HTTP /api/health |
|
||||
| `postgres` | postgis/postgis:15-3.4 | Database with PostGIS | pg_isready |
|
||||
| `redis` | redis:alpine | Caching and job queues | redis-cli ping |
|
||||
|
||||
### Automatic Initialization
|
||||
|
||||
The container initialization script (`scripts/docker-init.sh`) performs:
|
||||
|
||||
1. **npm install** - Installs dependencies into isolated volume
|
||||
2. **Wait for PostgreSQL** - Polls until database is ready
|
||||
3. **Wait for Redis** - Polls until Redis is responding
|
||||
4. **Schema Check** - Detects if database needs initialization
|
||||
5. **Database Setup** - Runs `npm run db:reset:dev` if needed (schema + seed data)
|
||||
|
||||
### Development Dockerfile
|
||||
|
||||
Located in `Dockerfile.dev`:
|
||||
|
||||
```dockerfile
|
||||
FROM ubuntu:22.04
|
||||
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
# Install Node.js 20.x LTS + database clients
|
||||
RUN apt-get update && apt-get install -y \
|
||||
curl git build-essential python3 \
|
||||
postgresql-client redis-tools \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
RUN curl -fsSL https://deb.nodesource.com/setup_20.x | bash - \
|
||||
&& apt-get install -y nodejs
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
ENV NODE_ENV=development
|
||||
ENV NODE_OPTIONS='--max-old-space-size=8192'
|
||||
|
||||
CMD ["bash"]
|
||||
```
|
||||
|
||||
### Environment Configuration
|
||||
|
||||
Copy `.env.example` to `.env` for local overrides (optional for containers):
|
||||
|
||||
```bash
|
||||
# Container defaults (set in compose.dev.yml)
|
||||
DB_HOST=postgres # Use Docker service name, not IP
|
||||
DB_PORT=5432
|
||||
DB_USER=postgres
|
||||
DB_PASSWORD=postgres
|
||||
DB_NAME=flyer_crawler_dev
|
||||
REDIS_URL=redis://redis:6379
|
||||
```
|
||||
|
||||
### VS Code Dev Container Configuration
|
||||
|
||||
Located in `.devcontainer/devcontainer.json`:
|
||||
|
||||
| Lifecycle Hook | Timing | Action |
|
||||
| ------------------- | ----------------- | ------------------------------ |
|
||||
| `initializeCommand` | Before container | Start Podman machine (Windows) |
|
||||
| `postCreateCommand` | Container created | Run `docker-init.sh` |
|
||||
| `postAttachCommand` | VS Code attached | Start dev server |
|
||||
|
||||
### Default Test Accounts
|
||||
|
||||
After initialization, these accounts are available:
|
||||
|
||||
| Role | Email | Password |
|
||||
| ----- | ------------------- | --------- |
|
||||
| Admin | `admin@example.com` | adminpass |
|
||||
| User | `user@example.com` | userpass |
|
||||
|
||||
---
|
||||
|
||||
## Production Deployment (PM2)
|
||||
|
||||
### PM2 Ecosystem Configuration
|
||||
|
||||
Located in `ecosystem.config.cjs`:
|
||||
|
||||
```javascript
|
||||
module.exports = {
|
||||
apps: [
|
||||
{
|
||||
// API Server - Cluster mode for load balancing
|
||||
name: 'flyer-crawler-api',
|
||||
script: './node_modules/.bin/tsx',
|
||||
args: 'server.ts',
|
||||
max_memory_restart: '500M',
|
||||
instances: 'max', // Use all CPU cores
|
||||
exec_mode: 'cluster', // Enable cluster mode
|
||||
kill_timeout: 5000, // Graceful shutdown timeout
|
||||
|
||||
// Restart configuration
|
||||
max_restarts: 40,
|
||||
exp_backoff_restart_delay: 100,
|
||||
min_uptime: '10s',
|
||||
|
||||
env_production: {
|
||||
NODE_ENV: 'production',
|
||||
cwd: '/var/www/flyer-crawler.projectium.com',
|
||||
},
|
||||
env_test: {
|
||||
NODE_ENV: 'test',
|
||||
cwd: '/var/www/flyer-crawler-test.projectium.com',
|
||||
},
|
||||
},
|
||||
{
|
||||
// Background Worker - Single instance
|
||||
name: 'flyer-crawler-worker',
|
||||
script: './node_modules/.bin/tsx',
|
||||
args: 'src/services/worker.ts',
|
||||
max_memory_restart: '1G',
|
||||
kill_timeout: 10000, // Workers need more time for jobs
|
||||
// ... similar config
|
||||
},
|
||||
],
|
||||
};
|
||||
```
|
||||
|
||||
### Deployment Directory Structure
|
||||
|
||||
```text
|
||||
/var/www/
|
||||
├── flyer-crawler.projectium.com/ # Production
|
||||
│ ├── server.ts
|
||||
│ ├── ecosystem.config.cjs
|
||||
│ ├── package.json
|
||||
│ ├── flyer-images/
|
||||
│ │ ├── icons/
|
||||
│ │ └── archive/
|
||||
│ └── ...
|
||||
└── flyer-crawler-test.projectium.com/ # Test environment
|
||||
└── ... (same structure)
|
||||
```
|
||||
|
||||
### Environment-Specific Configuration
|
||||
|
||||
| Environment | Port | Redis DB | PM2 Process Suffix |
|
||||
| ----------- | ---- | -------- | ------------------ |
|
||||
| Production | 3000 | 0 | (none) |
|
||||
| Test | 3001 | 1 | `-test` |
|
||||
| Development | 3000 | 0 | `-dev` |
|
||||
|
||||
### PM2 Commands Reference
|
||||
|
||||
```bash
|
||||
# Start/reload with environment
|
||||
pm2 startOrReload ecosystem.config.cjs --env production --update-env
|
||||
|
||||
# Save process list for startup
|
||||
pm2 save
|
||||
|
||||
# View logs
|
||||
pm2 logs flyer-crawler-api --lines 50
|
||||
|
||||
# Monitor processes
|
||||
pm2 monit
|
||||
|
||||
# List all processes
|
||||
pm2 list
|
||||
|
||||
# Describe process details
|
||||
pm2 describe flyer-crawler-api
|
||||
```
|
||||
|
||||
### Resource Limits
|
||||
|
||||
| Process | Memory Limit | Restart Delay | Kill Timeout |
|
||||
| ---------------- | ------------ | ------------------------ | ------------ |
|
||||
| API Server | 500MB | Exponential (100ms base) | 5s |
|
||||
| Worker | 1GB | Exponential (100ms base) | 10s |
|
||||
| Analytics Worker | 1GB | Exponential (100ms base) | 10s |
|
||||
|
||||
---
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Container Issues
|
||||
|
||||
```bash
|
||||
# Reset everything and start fresh
|
||||
podman-compose -f compose.dev.yml down -v
|
||||
podman-compose -f compose.dev.yml up -d --build
|
||||
|
||||
# View container logs
|
||||
podman-compose -f compose.dev.yml logs -f app
|
||||
|
||||
# Connect to database manually
|
||||
podman exec -it flyer-crawler-postgres psql -U postgres -d flyer_crawler_dev
|
||||
|
||||
# Rebuild just the app container
|
||||
podman-compose -f compose.dev.yml build app
|
||||
```
|
||||
|
||||
### Common Issues
|
||||
|
||||
| Issue | Solution |
|
||||
| ------------------------ | --------------------------------------------------------------- |
|
||||
| "Database not ready" | Wait for postgres healthcheck, or run `docker-init.sh` manually |
|
||||
| "node_modules not found" | Run `npm install` inside container |
|
||||
| "Permission denied" | Ensure scripts have execute permission: `chmod +x scripts/*.sh` |
|
||||
| "Network unreachable" | Use service names (postgres, redis) not IPs |
|
||||
|
||||
## Key Files
|
||||
|
||||
- `compose.dev.yml` - Docker Compose configuration
|
||||
- `Dockerfile.dev` - Development container definition
|
||||
- `.devcontainer/devcontainer.json` - VS Code Dev Container config
|
||||
- `scripts/docker-init.sh` - Container initialization script
|
||||
- `.env.example` - Environment variable template
|
||||
- `ecosystem.config.cjs` - PM2 production configuration
|
||||
- `.gitea/workflows/deploy-to-prod.yml` - Production deployment pipeline
|
||||
- `.gitea/workflows/deploy-to-test.yml` - Test deployment pipeline
|
||||
|
||||
## Related ADRs
|
||||
|
||||
- [ADR-017](./0017-ci-cd-and-branching-strategy.md) - CI/CD Strategy
|
||||
- [ADR-038](./0038-graceful-shutdown-pattern.md) - Graceful Shutdown Pattern
|
||||
|
||||
@@ -2,7 +2,9 @@
|
||||
|
||||
**Date**: 2025-12-12
|
||||
|
||||
**Status**: Proposed
|
||||
**Status**: Accepted
|
||||
|
||||
**Implemented**: 2026-01-09
|
||||
|
||||
## Context
|
||||
|
||||
@@ -10,9 +12,186 @@ The project has Gitea workflows but lacks a documented standard for how code mov
|
||||
|
||||
## Decision
|
||||
|
||||
We will formalize the end-to-end CI/CD process. This ADR will define the project's **branching strategy** (e.g., GitFlow or Trunk-Based Development), establish mandatory checks in the pipeline (e.g., linting, unit tests, vulnerability scanning), and specify the process for building and publishing Docker images (`ADR-014`) to a registry.
|
||||
We will formalize the end-to-end CI/CD process using:
|
||||
|
||||
1. **Trunk-Based Development**: All work is merged to `main` branch.
|
||||
2. **Automated Test Deployment**: Every push to `main` triggers deployment to test environment.
|
||||
3. **Manual Production Deployment**: Production deployments require explicit confirmation.
|
||||
4. **Semantic Versioning**: Automated version bumping on deployments.
|
||||
|
||||
## Consequences
|
||||
|
||||
- **Positive**: Automates quality control and creates a safe, repeatable path to production. Increases development velocity and reduces deployment-related errors.
|
||||
- **Negative**: Initial setup effort for the CI/CD pipeline. May slightly increase the time to merge code due to mandatory checks.
|
||||
|
||||
## Implementation Details
|
||||
|
||||
### Branching Strategy
|
||||
|
||||
**Trunk-Based Development**:
|
||||
|
||||
```text
|
||||
main ─────●─────●─────●─────●─────●─────▶
|
||||
│ │ │ │ │
|
||||
│ │ │ │ └── Deploy to Prod (manual)
|
||||
│ │ │ └── v0.9.70 (patch bump)
|
||||
│ │ └── Deploy to Test (auto)
|
||||
│ └── v0.9.69 (patch bump)
|
||||
└── Feature complete
|
||||
```
|
||||
|
||||
- All development happens on `main` branch
|
||||
- Feature branches are short-lived (< 1 day)
|
||||
- Every merge to `main` triggers test deployment
|
||||
- Production deploys are manual with confirmation
|
||||
|
||||
### Pipeline Stages
|
||||
|
||||
**Deploy to Test** (Automatic on push to `main`):
|
||||
|
||||
```yaml
|
||||
jobs:
|
||||
deploy-to-test:
|
||||
steps:
|
||||
- Checkout code
|
||||
- Setup Node.js 20
|
||||
- Install dependencies (npm ci)
|
||||
- Bump patch version (npm version patch)
|
||||
- TypeScript type-check
|
||||
- Prettier check
|
||||
- ESLint check
|
||||
- Run unit tests with coverage
|
||||
- Run integration tests with coverage
|
||||
- Run E2E tests with coverage
|
||||
- Merge coverage reports
|
||||
- Check database schema hash
|
||||
- Build React application
|
||||
- Deploy to test server (rsync)
|
||||
- Install production dependencies
|
||||
- Reload PM2 processes
|
||||
- Update schema hash in database
|
||||
```
|
||||
|
||||
**Deploy to Production** (Manual trigger):
|
||||
|
||||
```yaml
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
confirmation:
|
||||
description: 'Type "deploy-to-prod" to confirm'
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
deploy-production:
|
||||
steps:
|
||||
- Verify confirmation phrase
|
||||
- Checkout main branch
|
||||
- Install dependencies
|
||||
- Bump minor version (npm version minor)
|
||||
- Check production schema hash
|
||||
- Build React application
|
||||
- Deploy to production server
|
||||
- Reload PM2 processes
|
||||
- Update schema hash
|
||||
```
|
||||
|
||||
### Version Bumping Strategy
|
||||
|
||||
| Trigger | Version Change | Example |
|
||||
| -------------------------- | -------------- | --------------- |
|
||||
| Push to main (test deploy) | Patch bump | 0.9.69 → 0.9.70 |
|
||||
| Production deploy | Minor bump | 0.9.70 → 0.10.0 |
|
||||
| Major release | Manual | 0.10.0 → 1.0.0 |
|
||||
|
||||
**Commit Message Format**:
|
||||
|
||||
```text
|
||||
ci: Bump version to 0.9.70 [skip ci]
|
||||
```
|
||||
|
||||
The `[skip ci]` tag prevents version bump commits from triggering another workflow.
|
||||
|
||||
### Database Schema Management
|
||||
|
||||
Schema changes are tracked via SHA-256 hash:
|
||||
|
||||
```sql
|
||||
CREATE TABLE public.schema_info (
|
||||
environment VARCHAR(50) PRIMARY KEY,
|
||||
schema_hash VARCHAR(64) NOT NULL,
|
||||
deployed_at TIMESTAMP DEFAULT NOW()
|
||||
);
|
||||
```
|
||||
|
||||
**Deployment Checks**:
|
||||
|
||||
1. Calculate hash of `sql/master_schema_rollup.sql`
|
||||
2. Compare with hash in target database
|
||||
3. If mismatch: **FAIL** deployment (manual migration required)
|
||||
4. If match: Continue deployment
|
||||
5. After deploy: Update hash in database
|
||||
|
||||
### Quality Gates
|
||||
|
||||
| Check | Required | Blocking |
|
||||
| --------------------- | -------- | ---------------------- |
|
||||
| TypeScript type-check | ✅ | No (continue-on-error) |
|
||||
| Prettier formatting | ✅ | No |
|
||||
| ESLint | ✅ | No |
|
||||
| Unit tests | ✅ | No |
|
||||
| Integration tests | ✅ | No |
|
||||
| E2E tests | ✅ | No |
|
||||
| Schema hash check | ✅ | **Yes** |
|
||||
| Build | ✅ | **Yes** |
|
||||
|
||||
### Environment Variables
|
||||
|
||||
Secrets are injected from Gitea repository settings:
|
||||
|
||||
| Secret | Test | Production |
|
||||
| -------------------------------------------------------------- | ------------------ | ------------- |
|
||||
| `DB_DATABASE_TEST` / `DB_DATABASE_PROD` | flyer-crawler-test | flyer-crawler |
|
||||
| `REDIS_PASSWORD_TEST` / `REDIS_PASSWORD_PROD` | \*\*\* | \*\*\* |
|
||||
| `VITE_GOOGLE_GENAI_API_KEY_TEST` / `VITE_GOOGLE_GENAI_API_KEY` | \*\*\* | \*\*\* |
|
||||
|
||||
### Coverage Reporting
|
||||
|
||||
Coverage reports are generated and published:
|
||||
|
||||
```text
|
||||
https://flyer-crawler-test.projectium.com/coverage/
|
||||
```
|
||||
|
||||
Coverage merging combines:
|
||||
|
||||
- Unit test coverage (Vitest)
|
||||
- Integration test coverage (Vitest)
|
||||
- E2E test coverage (Vitest)
|
||||
- Server V8 coverage (c8)
|
||||
|
||||
### Gitea Workflows
|
||||
|
||||
| Workflow | Trigger | Purpose |
|
||||
| ----------------------------- | ------------ | ------------------------- |
|
||||
| `deploy-to-test.yml` | Push to main | Automated test deployment |
|
||||
| `deploy-to-prod.yml` | Manual | Production deployment |
|
||||
| `manual-db-backup.yml` | Manual | Create database backup |
|
||||
| `manual-db-restore.yml` | Manual | Restore from backup |
|
||||
| `manual-db-reset-test.yml` | Manual | Reset test database |
|
||||
| `manual-db-reset-prod.yml` | Manual | Reset production database |
|
||||
| `manual-deploy-major.yml` | Manual | Major version release |
|
||||
| `manual-redis-flush-prod.yml` | Manual | Flush Redis cache |
|
||||
|
||||
## Key Files
|
||||
|
||||
- `.gitea/workflows/deploy-to-test.yml` - Test deployment pipeline
|
||||
- `.gitea/workflows/deploy-to-prod.yml` - Production deployment pipeline
|
||||
- `.gitea/workflows/manual-db-backup.yml` - Database backup workflow
|
||||
- `ecosystem.config.cjs` - PM2 configuration
|
||||
|
||||
## Related ADRs
|
||||
|
||||
- [ADR-014](./0014-containerization-and-deployment-strategy.md) - Containerization Strategy
|
||||
- [ADR-010](./0010-testing-strategy-and-standards.md) - Testing Strategy
|
||||
- [ADR-019](./0019-data-backup-and-recovery-strategy.md) - Backup Strategy
|
||||
|
||||
@@ -2,7 +2,9 @@
|
||||
|
||||
**Date**: 2025-12-12
|
||||
|
||||
**Status**: Proposed
|
||||
**Status**: Accepted
|
||||
|
||||
**Implemented**: 2026-01-09
|
||||
|
||||
## Context
|
||||
|
||||
@@ -16,3 +18,210 @@ We will implement a formal data backup and recovery strategy. This will involve
|
||||
|
||||
- **Positive**: Protects against catastrophic data loss, ensuring business continuity. Provides a clear, tested plan for disaster recovery.
|
||||
- **Negative**: Requires setup and maintenance of backup scripts and secure storage. Incurs storage costs for backup files.
|
||||
|
||||
## Implementation Details
|
||||
|
||||
### Backup Workflow
|
||||
|
||||
Located in `.gitea/workflows/manual-db-backup.yml`:
|
||||
|
||||
```yaml
|
||||
name: Manual - Backup Production Database
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
confirmation:
|
||||
description: 'Type "backup-production-db" to confirm'
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
backup-database:
|
||||
runs-on: projectium.com
|
||||
|
||||
env:
|
||||
DB_HOST: ${{ secrets.DB_HOST }}
|
||||
DB_PORT: ${{ secrets.DB_PORT }}
|
||||
DB_USER: ${{ secrets.DB_USER }}
|
||||
DB_PASSWORD: ${{ secrets.DB_PASSWORD }}
|
||||
DB_NAME: ${{ secrets.DB_NAME_PROD }}
|
||||
|
||||
steps:
|
||||
- name: Validate Secrets
|
||||
run: |
|
||||
if [ -z "$DB_HOST" ] || [ -z "$DB_USER" ]; then
|
||||
echo "ERROR: Database secrets not configured."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Create Database Backup
|
||||
run: |
|
||||
TIMESTAMP=$(date +'%Y%m%d-%H%M%S')
|
||||
BACKUP_FILENAME="flyer-crawler-prod-backup-${TIMESTAMP}.sql.gz"
|
||||
|
||||
# Create compressed backup
|
||||
PGPASSWORD="$DB_PASSWORD" pg_dump \
|
||||
-h "$DB_HOST" -p "$DB_PORT" \
|
||||
-U "$DB_USER" -d "$DB_NAME" \
|
||||
--clean --if-exists | gzip > "$BACKUP_FILENAME"
|
||||
|
||||
echo "backup_filename=$BACKUP_FILENAME" >> $GITEA_ENV
|
||||
|
||||
- name: Upload Backup as Artifact
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: database-backup
|
||||
path: ${{ env.backup_filename }}
|
||||
```
|
||||
|
||||
### Restore Workflow
|
||||
|
||||
Located in `.gitea/workflows/manual-db-restore.yml`:
|
||||
|
||||
```yaml
|
||||
name: Manual - Restore Database from Backup
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
confirmation:
|
||||
description: 'Type "restore-from-backup" to confirm'
|
||||
required: true
|
||||
backup_file:
|
||||
description: 'Path to backup file on server'
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
restore-database:
|
||||
steps:
|
||||
- name: Verify Confirmation
|
||||
run: |
|
||||
if [ "${{ inputs.confirmation }}" != "restore-from-backup" ]; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Restore Database
|
||||
run: |
|
||||
# Decompress and restore
|
||||
gunzip -c "${{ inputs.backup_file }}" | \
|
||||
PGPASSWORD="$DB_PASSWORD" psql \
|
||||
-h "$DB_HOST" -p "$DB_PORT" \
|
||||
-U "$DB_USER" -d "$DB_NAME"
|
||||
```
|
||||
|
||||
### Backup Command Reference
|
||||
|
||||
**Manual Backup**:
|
||||
|
||||
```bash
|
||||
# Create compressed backup
|
||||
PGPASSWORD="password" pg_dump \
|
||||
-h localhost -p 5432 \
|
||||
-U dbuser -d flyer-crawler \
|
||||
--clean --if-exists | gzip > backup-$(date +%Y%m%d).sql.gz
|
||||
|
||||
# List backup contents (without restoring)
|
||||
gunzip -c backup-20260109.sql.gz | head -100
|
||||
```
|
||||
|
||||
**Manual Restore**:
|
||||
|
||||
```bash
|
||||
# Restore from compressed backup
|
||||
gunzip -c backup-20260109.sql.gz | \
|
||||
PGPASSWORD="password" psql \
|
||||
-h localhost -p 5432 \
|
||||
-U dbuser -d flyer-crawler
|
||||
```
|
||||
|
||||
### pg_dump Options
|
||||
|
||||
| Option | Purpose |
|
||||
| ----------------- | ------------------------------ |
|
||||
| `--clean` | Drop objects before recreating |
|
||||
| `--if-exists` | Use IF EXISTS when dropping |
|
||||
| `--no-owner` | Skip ownership commands |
|
||||
| `--no-privileges` | Skip access privilege commands |
|
||||
| `-F c` | Custom format (for pg_restore) |
|
||||
| `-F p` | Plain text SQL (default) |
|
||||
|
||||
### Recovery Objectives
|
||||
|
||||
| Metric | Target | Current |
|
||||
| ---------------------------------- | -------- | -------------- |
|
||||
| **RPO** (Recovery Point Objective) | 24 hours | Manual trigger |
|
||||
| **RTO** (Recovery Time Objective) | 1 hour | ~15 minutes |
|
||||
|
||||
### Backup Retention Policy
|
||||
|
||||
| Type | Retention | Storage |
|
||||
| --------------- | --------- | ---------------- |
|
||||
| Daily backups | 7 days | Gitea artifacts |
|
||||
| Weekly backups | 4 weeks | Gitea artifacts |
|
||||
| Monthly backups | 12 months | Off-site storage |
|
||||
|
||||
### Backup Verification
|
||||
|
||||
Periodically test backup integrity:
|
||||
|
||||
```bash
|
||||
# Verify backup can be read
|
||||
gunzip -t backup-20260109.sql.gz
|
||||
|
||||
# Test restore to a temporary database
|
||||
createdb flyer-crawler-restore-test
|
||||
gunzip -c backup-20260109.sql.gz | psql -d flyer-crawler-restore-test
|
||||
# Verify data integrity...
|
||||
dropdb flyer-crawler-restore-test
|
||||
```
|
||||
|
||||
### Disaster Recovery Checklist
|
||||
|
||||
1. **Identify the Issue**
|
||||
- Data corruption?
|
||||
- Accidental deletion?
|
||||
- Full database loss?
|
||||
|
||||
2. **Select Backup**
|
||||
- Find most recent valid backup
|
||||
- Download from Gitea artifacts or off-site storage
|
||||
|
||||
3. **Stop Application**
|
||||
|
||||
```bash
|
||||
pm2 stop all
|
||||
```
|
||||
|
||||
4. **Restore Database**
|
||||
|
||||
```bash
|
||||
gunzip -c backup.sql.gz | psql -d flyer-crawler
|
||||
```
|
||||
|
||||
5. **Verify Data**
|
||||
- Check table row counts
|
||||
- Verify recent data exists
|
||||
- Test critical queries
|
||||
|
||||
6. **Restart Application**
|
||||
|
||||
```bash
|
||||
pm2 start all
|
||||
```
|
||||
|
||||
7. **Post-Mortem**
|
||||
- Document incident
|
||||
- Update procedures if needed
|
||||
|
||||
## Key Files
|
||||
|
||||
- `.gitea/workflows/manual-db-backup.yml` - Backup workflow
|
||||
- `.gitea/workflows/manual-db-restore.yml` - Restore workflow
|
||||
- `.gitea/workflows/manual-db-reset-test.yml` - Reset test database
|
||||
- `.gitea/workflows/manual-db-reset-prod.yml` - Reset production database
|
||||
- `sql/master_schema_rollup.sql` - Current schema definition
|
||||
|
||||
## Related ADRs
|
||||
|
||||
- [ADR-013](./0013-database-schema-migration-strategy.md) - Schema Migration Strategy
|
||||
- [ADR-017](./0017-ci-cd-and-branching-strategy.md) - CI/CD Strategy
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
**Date**: 2026-01-09
|
||||
|
||||
**Status**: Proposed
|
||||
**Status**: Implemented
|
||||
|
||||
## Context
|
||||
|
||||
@@ -99,16 +99,44 @@ interface ApiErrorResponse {
|
||||
|
||||
### What's Implemented
|
||||
|
||||
- ❌ Not yet implemented
|
||||
- ✅ Created `src/utils/apiResponse.ts` with helper functions (`sendSuccess`, `sendPaginated`, `sendError`, `sendNoContent`, `sendMessage`, `calculatePagination`)
|
||||
- ✅ Created `src/types/api.ts` with response type definitions (`ApiSuccessResponse`, `ApiErrorResponse`, `PaginationMeta`, `ErrorCode`)
|
||||
- ✅ Updated `src/middleware/errorHandler.ts` to use standard error format
|
||||
- ✅ Migrated all route files to use standardized responses:
|
||||
- `health.routes.ts`
|
||||
- `flyer.routes.ts`
|
||||
- `deals.routes.ts`
|
||||
- `budget.routes.ts`
|
||||
- `personalization.routes.ts`
|
||||
- `price.routes.ts`
|
||||
- `reactions.routes.ts`
|
||||
- `stats.routes.ts`
|
||||
- `system.routes.ts`
|
||||
- `gamification.routes.ts`
|
||||
- `recipe.routes.ts`
|
||||
- `auth.routes.ts`
|
||||
- `user.routes.ts`
|
||||
- `admin.routes.ts`
|
||||
- `ai.routes.ts`
|
||||
|
||||
### What Needs To Be Done
|
||||
### Error Codes
|
||||
|
||||
1. Create `src/utils/apiResponse.ts` with helper functions
|
||||
2. Create `src/types/api.ts` with response type definitions
|
||||
3. Update `errorHandler.ts` to use standard error format
|
||||
4. Create migration guide for existing endpoints
|
||||
5. Update 2-3 routes as examples
|
||||
6. Document pattern in this ADR
|
||||
The following error codes are defined in `src/types/api.ts`:
|
||||
|
||||
| Code | HTTP Status | Description |
|
||||
| ------------------------ | ----------- | ----------------------------------- |
|
||||
| `VALIDATION_ERROR` | 400 | Request validation failed |
|
||||
| `BAD_REQUEST` | 400 | Malformed request |
|
||||
| `UNAUTHORIZED` | 401 | Authentication required |
|
||||
| `FORBIDDEN` | 403 | Insufficient permissions |
|
||||
| `NOT_FOUND` | 404 | Resource not found |
|
||||
| `CONFLICT` | 409 | Resource conflict (e.g., duplicate) |
|
||||
| `RATE_LIMITED` | 429 | Too many requests |
|
||||
| `PAYLOAD_TOO_LARGE` | 413 | Request body too large |
|
||||
| `INTERNAL_ERROR` | 500 | Server error |
|
||||
| `NOT_IMPLEMENTED` | 501 | Feature not yet implemented |
|
||||
| `SERVICE_UNAVAILABLE` | 503 | Service temporarily unavailable |
|
||||
| `EXTERNAL_SERVICE_ERROR` | 502 | External service failure |
|
||||
|
||||
## Example Usage
|
||||
|
||||
|
||||
147
docs/adr/0032-rate-limiting-strategy.md
Normal file
147
docs/adr/0032-rate-limiting-strategy.md
Normal file
@@ -0,0 +1,147 @@
|
||||
# ADR-032: Rate Limiting Strategy
|
||||
|
||||
**Date**: 2026-01-09
|
||||
|
||||
**Status**: Accepted
|
||||
|
||||
**Implemented**: 2026-01-09
|
||||
|
||||
## Context
|
||||
|
||||
Public-facing APIs are vulnerable to abuse through excessive requests, whether from malicious actors attempting denial-of-service attacks, automated scrapers, or accidental loops in client code. Without proper rate limiting, the application could:
|
||||
|
||||
1. **Experience degraded performance**: Excessive requests can overwhelm database connections and server resources
|
||||
2. **Incur unexpected costs**: AI service calls (Gemini API) and external APIs (Google Maps) are billed per request
|
||||
3. **Allow credential stuffing**: Login endpoints without limits enable brute-force attacks
|
||||
4. **Suffer from data scraping**: Public endpoints could be scraped at high volume
|
||||
|
||||
## Decision
|
||||
|
||||
We will implement a tiered rate limiting strategy using `express-rate-limit` middleware, with different limits based on endpoint sensitivity and resource cost.
|
||||
|
||||
### Tier System
|
||||
|
||||
| Tier | Window | Max Requests | Use Case |
|
||||
| --------------------------- | ------ | ------------ | -------------------------------- |
|
||||
| **Authentication (Strict)** | 15 min | 5 | Login, registration |
|
||||
| **Sensitive Operations** | 1 hour | 5 | Password changes, email updates |
|
||||
| **AI/Costly Operations** | 15 min | 10-20 | Gemini API calls, geocoding |
|
||||
| **File Uploads** | 15 min | 10-20 | Flyer uploads, avatar uploads |
|
||||
| **Batch Operations** | 15 min | 50 | Bulk updates |
|
||||
| **User Read** | 15 min | 100 | Standard authenticated endpoints |
|
||||
| **Public Read** | 15 min | 100 | Public data endpoints |
|
||||
| **Tracking/High-Volume** | 15 min | 150-200 | Analytics, reactions |
|
||||
|
||||
### Rate Limiter Configuration
|
||||
|
||||
All rate limiters share a standard configuration:
|
||||
|
||||
```typescript
|
||||
const standardConfig = {
|
||||
standardHeaders: true, // Return rate limit info in headers
|
||||
legacyHeaders: false, // Disable deprecated X-RateLimit headers
|
||||
skip: shouldSkipRateLimit, // Allow bypassing in test environment
|
||||
};
|
||||
```
|
||||
|
||||
### Test Environment Bypass
|
||||
|
||||
Rate limiting is bypassed during integration and E2E tests to avoid test flakiness:
|
||||
|
||||
```typescript
|
||||
export const shouldSkipRateLimit = (req: Request): boolean => {
|
||||
return process.env.NODE_ENV === 'test';
|
||||
};
|
||||
```
|
||||
|
||||
## Implementation Details
|
||||
|
||||
### Available Rate Limiters
|
||||
|
||||
| Limiter | Window | Max | Endpoint Examples |
|
||||
| ---------------------------- | ------ | --- | --------------------------------- |
|
||||
| `loginLimiter` | 15 min | 5 | POST /api/auth/login |
|
||||
| `registerLimiter` | 1 hour | 5 | POST /api/auth/register |
|
||||
| `forgotPasswordLimiter` | 15 min | 5 | POST /api/auth/forgot-password |
|
||||
| `resetPasswordLimiter` | 15 min | 10 | POST /api/auth/reset-password |
|
||||
| `refreshTokenLimiter` | 15 min | 20 | POST /api/auth/refresh |
|
||||
| `logoutLimiter` | 15 min | 10 | POST /api/auth/logout |
|
||||
| `publicReadLimiter` | 15 min | 100 | GET /api/flyers, GET /api/recipes |
|
||||
| `userReadLimiter` | 15 min | 100 | GET /api/users/profile |
|
||||
| `userUpdateLimiter` | 15 min | 100 | PUT /api/users/profile |
|
||||
| `userSensitiveUpdateLimiter` | 1 hour | 5 | PUT /api/auth/change-password |
|
||||
| `adminTriggerLimiter` | 15 min | 30 | POST /api/admin/jobs/\* |
|
||||
| `aiGenerationLimiter` | 15 min | 20 | POST /api/ai/analyze |
|
||||
| `aiUploadLimiter` | 15 min | 10 | POST /api/ai/upload-and-process |
|
||||
| `geocodeLimiter` | 1 hour | 100 | GET /api/users/geocode |
|
||||
| `priceHistoryLimiter` | 15 min | 50 | GET /api/price-history/\* |
|
||||
| `reactionToggleLimiter` | 15 min | 150 | POST /api/reactions/toggle |
|
||||
| `trackingLimiter` | 15 min | 200 | POST /api/personalization/track |
|
||||
| `batchLimiter` | 15 min | 50 | PATCH /api/budgets/batch |
|
||||
|
||||
### Usage Pattern
|
||||
|
||||
```typescript
|
||||
import { loginLimiter, userReadLimiter } from '../config/rateLimiters';
|
||||
|
||||
// Apply to individual routes
|
||||
router.post('/login', loginLimiter, validateRequest(loginSchema), async (req, res, next) => {
|
||||
// handler
|
||||
});
|
||||
|
||||
// Or apply to entire router for consistent limits
|
||||
router.use(userReadLimiter);
|
||||
router.get('/me', async (req, res, next) => {
|
||||
/* handler */
|
||||
});
|
||||
```
|
||||
|
||||
### Response Headers
|
||||
|
||||
When rate limiting is active, responses include standard headers:
|
||||
|
||||
```
|
||||
RateLimit-Limit: 100
|
||||
RateLimit-Remaining: 95
|
||||
RateLimit-Reset: 900
|
||||
```
|
||||
|
||||
### Rate Limit Exceeded Response
|
||||
|
||||
When a client exceeds their limit:
|
||||
|
||||
```json
|
||||
{
|
||||
"message": "Too many login attempts from this IP, please try again after 15 minutes."
|
||||
}
|
||||
```
|
||||
|
||||
HTTP Status: `429 Too Many Requests`
|
||||
|
||||
## Key Files
|
||||
|
||||
- `src/config/rateLimiters.ts` - Rate limiter definitions
|
||||
- `src/utils/rateLimit.ts` - Helper functions (test bypass)
|
||||
|
||||
## Consequences
|
||||
|
||||
### Positive
|
||||
|
||||
- **Security**: Protects against brute-force and credential stuffing attacks
|
||||
- **Cost Control**: Prevents runaway costs from AI/external API abuse
|
||||
- **Fair Usage**: Ensures all users get reasonable service access
|
||||
- **DDoS Mitigation**: Provides basic protection against request flooding
|
||||
|
||||
### Negative
|
||||
|
||||
- **Legitimate User Impact**: Aggressive users may hit limits during normal use
|
||||
- **IP-Based Limitations**: Shared IPs (offices, VPNs) may cause false positives
|
||||
- **No Distributed State**: Rate limits are per-instance, not cluster-wide (would need Redis store for that)
|
||||
|
||||
## Future Enhancements
|
||||
|
||||
1. **Redis Store**: Implement distributed rate limiting with Redis for multi-instance deployments
|
||||
2. **User-Based Limits**: Track limits per authenticated user rather than just IP
|
||||
3. **Dynamic Limits**: Adjust limits based on user tier (free vs premium)
|
||||
4. **Monitoring Dashboard**: Track rate limit hits in admin dashboard
|
||||
5. **Allowlisting**: Allow specific IPs (monitoring services) to bypass limits
|
||||
196
docs/adr/0033-file-upload-and-storage-strategy.md
Normal file
196
docs/adr/0033-file-upload-and-storage-strategy.md
Normal file
@@ -0,0 +1,196 @@
|
||||
# ADR-033: File Upload and Storage Strategy
|
||||
|
||||
**Date**: 2026-01-09
|
||||
|
||||
**Status**: Accepted
|
||||
|
||||
**Implemented**: 2026-01-09
|
||||
|
||||
## Context
|
||||
|
||||
The application handles file uploads for flyer images and user avatars. Without a consistent strategy, file uploads can introduce security vulnerabilities (path traversal, malicious file types), performance issues (unbounded file sizes), and maintenance challenges (inconsistent storage locations).
|
||||
|
||||
Key concerns:
|
||||
|
||||
1. **Security**: Preventing malicious file uploads, path traversal attacks, and unsafe filenames
|
||||
2. **Storage Organization**: Consistent directory structure for uploaded files
|
||||
3. **Size Limits**: Preventing resource exhaustion from oversized uploads
|
||||
4. **File Type Validation**: Ensuring only expected file types are accepted
|
||||
5. **Cleanup**: Managing temporary and orphaned files
|
||||
|
||||
## Decision
|
||||
|
||||
We will implement a centralized file upload strategy using `multer` middleware with custom storage configurations, file type validation, and size limits.
|
||||
|
||||
### Storage Types
|
||||
|
||||
| Type | Directory | Purpose | Size Limit |
|
||||
| -------- | ------------------------------ | ------------------------------ | ---------- |
|
||||
| `flyer` | `$STORAGE_PATH` (configurable) | Flyer images for AI processing | 100MB |
|
||||
| `avatar` | `public/uploads/avatars/` | User profile pictures | 5MB |
|
||||
|
||||
### Filename Strategy
|
||||
|
||||
All uploaded files are renamed to prevent:
|
||||
|
||||
- Path traversal attacks
|
||||
- Filename collisions
|
||||
- Problematic characters in filenames
|
||||
|
||||
**Pattern**: `{fieldname}-{timestamp}-{random}-{sanitized-original}`
|
||||
|
||||
Example: `flyer-1704825600000-829461742-grocery-flyer.jpg`
|
||||
|
||||
### File Type Validation
|
||||
|
||||
Only image files (`image/*` MIME type) are accepted. Non-image uploads are rejected with a structured `ValidationError`.
|
||||
|
||||
## Implementation Details
|
||||
|
||||
### Multer Configuration Factory
|
||||
|
||||
```typescript
|
||||
import { createUploadMiddleware } from '../middleware/multer.middleware';
|
||||
|
||||
// For flyer uploads (100MB limit)
|
||||
const flyerUpload = createUploadMiddleware({
|
||||
storageType: 'flyer',
|
||||
fileSize: 100 * 1024 * 1024, // 100MB
|
||||
fileFilter: 'image',
|
||||
});
|
||||
|
||||
// For avatar uploads (5MB limit)
|
||||
const avatarUpload = createUploadMiddleware({
|
||||
storageType: 'avatar',
|
||||
fileSize: 5 * 1024 * 1024, // 5MB
|
||||
fileFilter: 'image',
|
||||
});
|
||||
```
|
||||
|
||||
### Storage Configuration
|
||||
|
||||
```typescript
|
||||
// Configurable via environment variable
|
||||
export const flyerStoragePath =
|
||||
process.env.STORAGE_PATH || '/var/www/flyer-crawler.projectium.com/flyer-images';
|
||||
|
||||
// Relative to project root
|
||||
export const avatarStoragePath = path.join(process.cwd(), 'public', 'uploads', 'avatars');
|
||||
```
|
||||
|
||||
### Filename Sanitization
|
||||
|
||||
The `sanitizeFilename` utility removes dangerous characters:
|
||||
|
||||
```typescript
|
||||
// Removes: path separators, null bytes, special characters
|
||||
// Keeps: alphanumeric, dots, hyphens, underscores
|
||||
const sanitized = sanitizeFilename(file.originalname);
|
||||
```
|
||||
|
||||
### Required File Validation Middleware
|
||||
|
||||
Ensures a file was uploaded before processing:
|
||||
|
||||
```typescript
|
||||
import { requireFileUpload } from '../middleware/fileUpload.middleware';
|
||||
|
||||
router.post(
|
||||
'/upload',
|
||||
flyerUpload.single('flyerImage'),
|
||||
requireFileUpload('flyerImage'), // 400 error if missing
|
||||
handleMulterError,
|
||||
async (req, res) => {
|
||||
// req.file is guaranteed to exist
|
||||
},
|
||||
);
|
||||
```
|
||||
|
||||
### Error Handling
|
||||
|
||||
```typescript
|
||||
import { handleMulterError } from '../middleware/multer.middleware';
|
||||
|
||||
// Catches multer-specific errors (file too large, etc.)
|
||||
router.use(handleMulterError);
|
||||
```
|
||||
|
||||
### Directory Initialization
|
||||
|
||||
Storage directories are created automatically at application startup:
|
||||
|
||||
```typescript
|
||||
(async () => {
|
||||
await fs.mkdir(flyerStoragePath, { recursive: true });
|
||||
await fs.mkdir(avatarStoragePath, { recursive: true });
|
||||
})();
|
||||
```
|
||||
|
||||
### Test Environment Handling
|
||||
|
||||
In test environments, files use predictable names for easy cleanup:
|
||||
|
||||
```typescript
|
||||
if (process.env.NODE_ENV === 'test') {
|
||||
return cb(null, `test-avatar${path.extname(file.originalname) || '.png'}`);
|
||||
}
|
||||
```
|
||||
|
||||
## Usage Example
|
||||
|
||||
```typescript
|
||||
import { createUploadMiddleware, handleMulterError } from '../middleware/multer.middleware';
|
||||
import { requireFileUpload } from '../middleware/fileUpload.middleware';
|
||||
import { validateRequest } from '../middleware/validation.middleware';
|
||||
import { aiUploadLimiter } from '../config/rateLimiters';
|
||||
|
||||
const flyerUpload = createUploadMiddleware({
|
||||
storageType: 'flyer',
|
||||
fileSize: 100 * 1024 * 1024,
|
||||
fileFilter: 'image',
|
||||
});
|
||||
|
||||
router.post(
|
||||
'/upload-and-process',
|
||||
aiUploadLimiter,
|
||||
validateRequest(uploadSchema),
|
||||
flyerUpload.single('flyerImage'),
|
||||
requireFileUpload('flyerImage'),
|
||||
handleMulterError,
|
||||
async (req, res, next) => {
|
||||
const filePath = req.file!.path;
|
||||
// Process the uploaded file...
|
||||
},
|
||||
);
|
||||
```
|
||||
|
||||
## Key Files
|
||||
|
||||
- `src/middleware/multer.middleware.ts` - Multer configuration and storage handlers
|
||||
- `src/middleware/fileUpload.middleware.ts` - File requirement validation
|
||||
- `src/utils/stringUtils.ts` - Filename sanitization utilities
|
||||
- `src/utils/fileUtils.ts` - File system utilities (deletion, etc.)
|
||||
|
||||
## Consequences
|
||||
|
||||
### Positive
|
||||
|
||||
- **Security**: Prevents path traversal and malicious uploads through sanitization and validation
|
||||
- **Consistency**: All uploads follow the same patterns and storage organization
|
||||
- **Predictability**: Test environments use predictable filenames for cleanup
|
||||
- **Extensibility**: Factory pattern allows easy addition of new upload types
|
||||
|
||||
### Negative
|
||||
|
||||
- **Disk Storage**: Files stored on disk require backup and cleanup strategies
|
||||
- **Single Server**: Current implementation doesn't support cloud storage (S3, etc.)
|
||||
- **No Virus Scanning**: Files aren't scanned for malware before processing
|
||||
|
||||
## Future Enhancements
|
||||
|
||||
1. **Cloud Storage**: Support for S3/GCS as storage backend
|
||||
2. **Virus Scanning**: Integrate ClamAV or cloud-based scanning
|
||||
3. **Image Optimization**: Automatic resizing/compression before storage
|
||||
4. **CDN Integration**: Serve uploaded files through CDN
|
||||
5. **Cleanup Job**: Scheduled job to remove orphaned/temporary files
|
||||
6. **Presigned URLs**: Direct upload to cloud storage to reduce server load
|
||||
345
docs/adr/0034-repository-pattern-standards.md
Normal file
345
docs/adr/0034-repository-pattern-standards.md
Normal file
@@ -0,0 +1,345 @@
|
||||
# ADR-034: Repository Pattern Standards
|
||||
|
||||
**Date**: 2026-01-09
|
||||
|
||||
**Status**: Accepted
|
||||
|
||||
**Implemented**: 2026-01-09
|
||||
|
||||
## Context
|
||||
|
||||
The application uses a repository pattern to abstract database access from business logic. However, without clear standards, repository implementations can diverge in:
|
||||
|
||||
1. **Method naming**: Inconsistent verbs (get vs find vs fetch)
|
||||
2. **Return types**: Some methods return `undefined`, others throw errors
|
||||
3. **Error handling**: Varied approaches to database error handling
|
||||
4. **Transaction participation**: Unclear how methods participate in transactions
|
||||
5. **Logging patterns**: Inconsistent logging context and messages
|
||||
|
||||
This ADR establishes standards for all repository implementations, complementing ADR-001 (Error Handling) and ADR-002 (Transaction Management).
|
||||
|
||||
## Decision
|
||||
|
||||
All repository implementations MUST follow these standards:
|
||||
|
||||
### Method Naming Conventions
|
||||
|
||||
| Prefix | Returns | Behavior on Not Found |
|
||||
| --------- | ---------------------- | ------------------------------------ |
|
||||
| `get*` | Single entity | Throws `NotFoundError` |
|
||||
| `find*` | Entity or `null` | Returns `null` |
|
||||
| `list*` | Array (possibly empty) | Returns `[]` |
|
||||
| `create*` | Created entity | Throws on constraint violation |
|
||||
| `update*` | Updated entity | Throws `NotFoundError` if not exists |
|
||||
| `delete*` | `void` or `boolean` | Throws `NotFoundError` if not exists |
|
||||
| `exists*` | `boolean` | Returns true/false |
|
||||
| `count*` | `number` | Returns count |
|
||||
|
||||
### Error Handling Pattern
|
||||
|
||||
All repository methods MUST use the centralized `handleDbError` function:
|
||||
|
||||
```typescript
|
||||
import { handleDbError, NotFoundError } from './errors.db';
|
||||
|
||||
async getById(id: number): Promise<Entity> {
|
||||
try {
|
||||
const result = await this.pool.query('SELECT * FROM entities WHERE id = $1', [id]);
|
||||
if (result.rows.length === 0) {
|
||||
throw new NotFoundError(`Entity with ID ${id} not found.`);
|
||||
}
|
||||
return result.rows[0];
|
||||
} catch (error) {
|
||||
handleDbError(error, this.logger, 'Database error in getById', { id }, {
|
||||
entityName: 'Entity',
|
||||
defaultMessage: 'Failed to fetch entity.',
|
||||
});
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Transaction Participation
|
||||
|
||||
Repository methods that need to participate in transactions MUST accept an optional `PoolClient`:
|
||||
|
||||
```typescript
|
||||
class UserRepository {
|
||||
private pool: Pool;
|
||||
private client?: PoolClient;
|
||||
|
||||
constructor(poolOrClient?: Pool | PoolClient) {
|
||||
if (poolOrClient && 'query' in poolOrClient && !('connect' in poolOrClient)) {
|
||||
// It's a PoolClient (for transactions)
|
||||
this.client = poolOrClient as PoolClient;
|
||||
} else {
|
||||
this.pool = (poolOrClient as Pool) || getPool();
|
||||
}
|
||||
}
|
||||
|
||||
private get queryable() {
|
||||
return this.client || this.pool;
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Or using the function-based pattern:
|
||||
|
||||
```typescript
|
||||
async function createUser(userData: CreateUserInput, client?: PoolClient): Promise<User> {
|
||||
const queryable = client || getPool();
|
||||
// ...
|
||||
}
|
||||
```
|
||||
|
||||
## Implementation Details
|
||||
|
||||
### Repository File Structure
|
||||
|
||||
```
|
||||
src/services/db/
|
||||
├── connection.db.ts # Pool management, withTransaction
|
||||
├── errors.db.ts # Custom error types, handleDbError
|
||||
├── index.db.ts # Barrel exports
|
||||
├── user.db.ts # User repository
|
||||
├── user.db.test.ts # User repository tests
|
||||
├── flyer.db.ts # Flyer repository
|
||||
├── flyer.db.test.ts # Flyer repository tests
|
||||
└── ... # Other domain repositories
|
||||
```
|
||||
|
||||
### Standard Repository Template
|
||||
|
||||
```typescript
|
||||
// src/services/db/example.db.ts
|
||||
import { Pool, PoolClient } from 'pg';
|
||||
import { getPool } from './connection.db';
|
||||
import { handleDbError, NotFoundError } from './errors.db';
|
||||
import { logger } from '../logger.server';
|
||||
import type { Example, CreateExampleInput, UpdateExampleInput } from '../../types';
|
||||
|
||||
const log = logger.child({ module: 'example.db' });
|
||||
|
||||
/**
|
||||
* Gets an example by ID.
|
||||
* @throws {NotFoundError} If the example doesn't exist.
|
||||
*/
|
||||
export async function getExampleById(id: number, client?: PoolClient): Promise<Example> {
|
||||
const queryable = client || getPool();
|
||||
try {
|
||||
const result = await queryable.query<Example>('SELECT * FROM examples WHERE id = $1', [id]);
|
||||
if (result.rows.length === 0) {
|
||||
throw new NotFoundError(`Example with ID ${id} not found.`);
|
||||
}
|
||||
return result.rows[0];
|
||||
} catch (error) {
|
||||
handleDbError(
|
||||
error,
|
||||
log,
|
||||
'Database error in getExampleById',
|
||||
{ id },
|
||||
{
|
||||
entityName: 'Example',
|
||||
defaultMessage: 'Failed to fetch example.',
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Finds an example by slug, returns null if not found.
|
||||
*/
|
||||
export async function findExampleBySlug(
|
||||
slug: string,
|
||||
client?: PoolClient,
|
||||
): Promise<Example | null> {
|
||||
const queryable = client || getPool();
|
||||
try {
|
||||
const result = await queryable.query<Example>('SELECT * FROM examples WHERE slug = $1', [slug]);
|
||||
return result.rows[0] || null;
|
||||
} catch (error) {
|
||||
handleDbError(
|
||||
error,
|
||||
log,
|
||||
'Database error in findExampleBySlug',
|
||||
{ slug },
|
||||
{
|
||||
entityName: 'Example',
|
||||
defaultMessage: 'Failed to find example.',
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Lists all examples with optional pagination.
|
||||
*/
|
||||
export async function listExamples(
|
||||
options: { limit?: number; offset?: number } = {},
|
||||
client?: PoolClient,
|
||||
): Promise<Example[]> {
|
||||
const queryable = client || getPool();
|
||||
const { limit = 100, offset = 0 } = options;
|
||||
try {
|
||||
const result = await queryable.query<Example>(
|
||||
'SELECT * FROM examples ORDER BY created_at DESC LIMIT $1 OFFSET $2',
|
||||
[limit, offset],
|
||||
);
|
||||
return result.rows;
|
||||
} catch (error) {
|
||||
handleDbError(
|
||||
error,
|
||||
log,
|
||||
'Database error in listExamples',
|
||||
{ limit, offset },
|
||||
{
|
||||
entityName: 'Example',
|
||||
defaultMessage: 'Failed to list examples.',
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new example.
|
||||
* @throws {UniqueConstraintError} If slug already exists.
|
||||
*/
|
||||
export async function createExample(
|
||||
input: CreateExampleInput,
|
||||
client?: PoolClient,
|
||||
): Promise<Example> {
|
||||
const queryable = client || getPool();
|
||||
try {
|
||||
const result = await queryable.query<Example>(
|
||||
`INSERT INTO examples (name, slug, description)
|
||||
VALUES ($1, $2, $3)
|
||||
RETURNING *`,
|
||||
[input.name, input.slug, input.description],
|
||||
);
|
||||
return result.rows[0];
|
||||
} catch (error) {
|
||||
handleDbError(
|
||||
error,
|
||||
log,
|
||||
'Database error in createExample',
|
||||
{ input },
|
||||
{
|
||||
entityName: 'Example',
|
||||
uniqueMessage: 'An example with this slug already exists.',
|
||||
defaultMessage: 'Failed to create example.',
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates an existing example.
|
||||
* @throws {NotFoundError} If the example doesn't exist.
|
||||
*/
|
||||
export async function updateExample(
|
||||
id: number,
|
||||
input: UpdateExampleInput,
|
||||
client?: PoolClient,
|
||||
): Promise<Example> {
|
||||
const queryable = client || getPool();
|
||||
try {
|
||||
const result = await queryable.query<Example>(
|
||||
`UPDATE examples
|
||||
SET name = COALESCE($2, name), description = COALESCE($3, description)
|
||||
WHERE id = $1
|
||||
RETURNING *`,
|
||||
[id, input.name, input.description],
|
||||
);
|
||||
if (result.rows.length === 0) {
|
||||
throw new NotFoundError(`Example with ID ${id} not found.`);
|
||||
}
|
||||
return result.rows[0];
|
||||
} catch (error) {
|
||||
handleDbError(
|
||||
error,
|
||||
log,
|
||||
'Database error in updateExample',
|
||||
{ id, input },
|
||||
{
|
||||
entityName: 'Example',
|
||||
defaultMessage: 'Failed to update example.',
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Deletes an example.
|
||||
* @throws {NotFoundError} If the example doesn't exist.
|
||||
*/
|
||||
export async function deleteExample(id: number, client?: PoolClient): Promise<void> {
|
||||
const queryable = client || getPool();
|
||||
try {
|
||||
const result = await queryable.query('DELETE FROM examples WHERE id = $1', [id]);
|
||||
if (result.rowCount === 0) {
|
||||
throw new NotFoundError(`Example with ID ${id} not found.`);
|
||||
}
|
||||
} catch (error) {
|
||||
handleDbError(
|
||||
error,
|
||||
log,
|
||||
'Database error in deleteExample',
|
||||
{ id },
|
||||
{
|
||||
entityName: 'Example',
|
||||
defaultMessage: 'Failed to delete example.',
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Using with Transactions
|
||||
|
||||
```typescript
|
||||
import { withTransaction } from './connection.db';
|
||||
import { createExample, updateExample } from './example.db';
|
||||
import { createRelated } from './related.db';
|
||||
|
||||
async function createExampleWithRelated(data: ComplexInput): Promise<Example> {
|
||||
return withTransaction(async (client) => {
|
||||
const example = await createExample(data.example, client);
|
||||
await createRelated({ exampleId: example.id, ...data.related }, client);
|
||||
return example;
|
||||
});
|
||||
}
|
||||
```
|
||||
|
||||
## Key Files
|
||||
|
||||
- `src/services/db/connection.db.ts` - `getPool()`, `withTransaction()`
|
||||
- `src/services/db/errors.db.ts` - `handleDbError()`, custom error classes
|
||||
- `src/services/db/index.db.ts` - Barrel exports for all repositories
|
||||
- `src/services/db/*.db.ts` - Individual domain repositories
|
||||
|
||||
## Consequences
|
||||
|
||||
### Positive
|
||||
|
||||
- **Consistency**: All repositories follow the same patterns
|
||||
- **Predictability**: Method names clearly indicate behavior
|
||||
- **Testability**: Consistent interfaces make mocking straightforward
|
||||
- **Error Handling**: Centralized error handling prevents inconsistent responses
|
||||
- **Transaction Safety**: Clear pattern for transaction participation
|
||||
|
||||
### Negative
|
||||
|
||||
- **Learning Curve**: Developers must learn and follow conventions
|
||||
- **Boilerplate**: Each method requires similar error handling structure
|
||||
- **Refactoring**: Existing repositories may need updates to conform
|
||||
|
||||
## Compliance Checklist
|
||||
|
||||
For new repository methods:
|
||||
|
||||
- [ ] Method name follows prefix convention (get/find/list/create/update/delete)
|
||||
- [ ] Throws `NotFoundError` for `get*` methods when entity not found
|
||||
- [ ] Returns `null` for `find*` methods when entity not found
|
||||
- [ ] Uses `handleDbError` for database error handling
|
||||
- [ ] Accepts optional `PoolClient` parameter for transaction support
|
||||
- [ ] Includes JSDoc with `@throws` documentation
|
||||
- [ ] Has corresponding unit tests
|
||||
328
docs/adr/0035-service-layer-architecture.md
Normal file
328
docs/adr/0035-service-layer-architecture.md
Normal file
@@ -0,0 +1,328 @@
|
||||
# ADR-035: Service Layer Architecture
|
||||
|
||||
**Date**: 2026-01-09
|
||||
|
||||
**Status**: Accepted
|
||||
|
||||
**Implemented**: 2026-01-09
|
||||
|
||||
## Context
|
||||
|
||||
The application has evolved to include multiple service types:
|
||||
|
||||
1. **Repository services** (`*.db.ts`): Direct database access
|
||||
2. **Business services** (`*Service.ts`): Business logic orchestration
|
||||
3. **External services** (`*Service.server.ts`): Integration with external APIs
|
||||
4. **Infrastructure services** (`logger`, `redis`, `queues`): Cross-cutting concerns
|
||||
|
||||
Without clear boundaries, business logic can leak into routes, repositories can contain business rules, and services can become tightly coupled.
|
||||
|
||||
## Decision
|
||||
|
||||
We will establish a clear layered architecture with defined responsibilities for each layer:
|
||||
|
||||
### Layer Responsibilities
|
||||
|
||||
```
|
||||
┌─────────────────────────────────────────────────────────────────┐
|
||||
│ Routes Layer │
|
||||
│ - Request/response handling │
|
||||
│ - Input validation (via middleware) │
|
||||
│ - Authentication/authorization │
|
||||
│ - Rate limiting │
|
||||
│ - Response formatting │
|
||||
└─────────────────────────────────────────────────────────────────┘
|
||||
│
|
||||
▼
|
||||
┌─────────────────────────────────────────────────────────────────┐
|
||||
│ Services Layer │
|
||||
│ - Business logic orchestration │
|
||||
│ - Transaction coordination │
|
||||
│ - External API integration │
|
||||
│ - Cross-repository operations │
|
||||
│ - Event publishing │
|
||||
└─────────────────────────────────────────────────────────────────┘
|
||||
│
|
||||
▼
|
||||
┌─────────────────────────────────────────────────────────────────┐
|
||||
│ Repository Layer │
|
||||
│ - Direct database access │
|
||||
│ - Query construction │
|
||||
│ - Entity mapping │
|
||||
│ - Error translation │
|
||||
└─────────────────────────────────────────────────────────────────┘
|
||||
```
|
||||
|
||||
### Service Types and Naming
|
||||
|
||||
| Type | Pattern | Suffix | Example |
|
||||
| ------------------- | ------------------------------- | ------------- | --------------------- |
|
||||
| Business Service | Orchestrates business logic | `*Service.ts` | `authService.ts` |
|
||||
| Server-Only Service | External APIs, server-side only | `*.server.ts` | `aiService.server.ts` |
|
||||
| Database Repository | Direct DB access | `*.db.ts` | `user.db.ts` |
|
||||
| Infrastructure | Cross-cutting concerns | Descriptive | `logger.server.ts` |
|
||||
|
||||
### Service Dependencies
|
||||
|
||||
```
|
||||
Routes → Business Services → Repositories
|
||||
↓
|
||||
External Services
|
||||
↓
|
||||
Infrastructure (logger, redis, queues)
|
||||
```
|
||||
|
||||
**Rules**:
|
||||
|
||||
- Routes MUST NOT directly access repositories (except simple CRUD)
|
||||
- Repositories MUST NOT call other repositories (use services)
|
||||
- Services MAY call other services
|
||||
- Infrastructure services MAY be called from any layer
|
||||
|
||||
## Implementation Details
|
||||
|
||||
### Business Service Pattern
|
||||
|
||||
```typescript
|
||||
// src/services/authService.ts
|
||||
import { withTransaction } from './db/connection.db';
|
||||
import * as userRepo from './db/user.db';
|
||||
import * as profileRepo from './db/personalization.db';
|
||||
import { emailService } from './emailService.server';
|
||||
import { logger } from './logger.server';
|
||||
|
||||
const log = logger.child({ service: 'auth' });
|
||||
|
||||
interface LoginResult {
|
||||
user: UserProfile;
|
||||
accessToken: string;
|
||||
refreshToken: string;
|
||||
}
|
||||
|
||||
export const authService = {
|
||||
/**
|
||||
* Registers a new user and sends welcome email.
|
||||
* Orchestrates multiple repositories in a transaction.
|
||||
*/
|
||||
async registerAndLoginUser(
|
||||
email: string,
|
||||
password: string,
|
||||
fullName?: string,
|
||||
avatarUrl?: string,
|
||||
reqLog?: Logger,
|
||||
): Promise<LoginResult> {
|
||||
const log = reqLog || logger;
|
||||
|
||||
return withTransaction(async (client) => {
|
||||
// 1. Create user (repository)
|
||||
const user = await userRepo.createUser({ email, password }, client);
|
||||
|
||||
// 2. Create profile (repository)
|
||||
await profileRepo.createProfile(
|
||||
{
|
||||
userId: user.user_id,
|
||||
fullName,
|
||||
avatarUrl,
|
||||
},
|
||||
client,
|
||||
);
|
||||
|
||||
// 3. Generate tokens (business logic)
|
||||
const { accessToken, refreshToken } = this.generateTokens(user);
|
||||
|
||||
// 4. Send welcome email (external service, non-blocking)
|
||||
emailService.sendWelcomeEmail(email, fullName).catch((err) => {
|
||||
log.warn({ err, email }, 'Failed to send welcome email');
|
||||
});
|
||||
|
||||
log.info({ userId: user.user_id }, 'User registered successfully');
|
||||
|
||||
return {
|
||||
user: await this.buildUserProfile(user.user_id, client),
|
||||
accessToken,
|
||||
refreshToken,
|
||||
};
|
||||
});
|
||||
},
|
||||
|
||||
// ... other methods
|
||||
};
|
||||
```
|
||||
|
||||
### Server-Only Service Pattern
|
||||
|
||||
```typescript
|
||||
// src/services/aiService.server.ts
|
||||
// This file MUST only be imported by server-side code
|
||||
|
||||
import { GenAI } from '@google/genai';
|
||||
import { config } from '../config/env';
|
||||
import { logger } from './logger.server';
|
||||
|
||||
const log = logger.child({ service: 'ai' });
|
||||
|
||||
class AiService {
|
||||
private client: GenAI;
|
||||
|
||||
constructor() {
|
||||
this.client = new GenAI({ apiKey: config.ai.geminiApiKey });
|
||||
}
|
||||
|
||||
async analyzeImage(imagePath: string): Promise<AnalysisResult> {
|
||||
log.info({ imagePath }, 'Starting image analysis');
|
||||
// ... implementation
|
||||
}
|
||||
}
|
||||
|
||||
export const aiService = new AiService();
|
||||
```
|
||||
|
||||
### Route Handler Pattern
|
||||
|
||||
```typescript
|
||||
// src/routes/auth.routes.ts
|
||||
import { Router } from 'express';
|
||||
import { validateRequest } from '../middleware/validation.middleware';
|
||||
import { loginLimiter } from '../config/rateLimiters';
|
||||
import { authService } from '../services/authService';
|
||||
|
||||
const router = Router();
|
||||
|
||||
// Route is thin - delegates to service
|
||||
router.post(
|
||||
'/register',
|
||||
registerLimiter,
|
||||
validateRequest(registerSchema),
|
||||
async (req, res, next) => {
|
||||
try {
|
||||
const { email, password, full_name } = req.body;
|
||||
|
||||
// Delegate to service
|
||||
const result = await authService.registerAndLoginUser(
|
||||
email,
|
||||
password,
|
||||
full_name,
|
||||
undefined,
|
||||
req.log, // Pass request-scoped logger
|
||||
);
|
||||
|
||||
// Format response
|
||||
res.status(201).json({
|
||||
message: 'Registration successful',
|
||||
user: result.user,
|
||||
accessToken: result.accessToken,
|
||||
});
|
||||
} catch (error) {
|
||||
next(error); // Let error handler deal with it
|
||||
}
|
||||
},
|
||||
);
|
||||
```
|
||||
|
||||
### Service File Organization
|
||||
|
||||
```
|
||||
src/services/
|
||||
├── db/ # Repository layer
|
||||
│ ├── connection.db.ts # Pool, transactions
|
||||
│ ├── errors.db.ts # DB error types
|
||||
│ ├── user.db.ts # User repository
|
||||
│ ├── flyer.db.ts # Flyer repository
|
||||
│ └── index.db.ts # Barrel exports
|
||||
├── authService.ts # Authentication business logic
|
||||
├── userService.ts # User management business logic
|
||||
├── gamificationService.ts # Gamification business logic
|
||||
├── aiService.server.ts # AI API integration (server-only)
|
||||
├── emailService.server.ts # Email sending (server-only)
|
||||
├── geocodingService.server.ts # Geocoding API (server-only)
|
||||
├── cacheService.server.ts # Redis caching (server-only)
|
||||
├── queueService.server.ts # BullMQ queues (server-only)
|
||||
├── logger.server.ts # Pino logger (server-only)
|
||||
└── logger.client.ts # Client-side logger
|
||||
```
|
||||
|
||||
### Dependency Injection for Testing
|
||||
|
||||
Services should support dependency injection for easier testing:
|
||||
|
||||
```typescript
|
||||
// Production: use singleton
|
||||
export const authService = createAuthService();
|
||||
|
||||
// Testing: inject mocks
|
||||
export function createAuthService(deps?: Partial<AuthServiceDeps>) {
|
||||
const userRepo = deps?.userRepo || defaultUserRepo;
|
||||
const emailService = deps?.emailService || defaultEmailService;
|
||||
|
||||
return {
|
||||
async registerAndLoginUser(...) { /* ... */ },
|
||||
};
|
||||
}
|
||||
```
|
||||
|
||||
## Key Files
|
||||
|
||||
### Infrastructure Services
|
||||
|
||||
- `src/services/logger.server.ts` - Server-side structured logging
|
||||
- `src/services/logger.client.ts` - Client-side logging
|
||||
- `src/services/redis.server.ts` - Redis connection management
|
||||
- `src/services/queueService.server.ts` - BullMQ queue management
|
||||
- `src/services/cacheService.server.ts` - Caching abstraction
|
||||
|
||||
### Business Services
|
||||
|
||||
- `src/services/authService.ts` - Authentication flows
|
||||
- `src/services/userService.ts` - User management
|
||||
- `src/services/gamificationService.ts` - Achievements, leaderboards
|
||||
- `src/services/flyerProcessingService.server.ts` - Flyer pipeline
|
||||
|
||||
### External Integration Services
|
||||
|
||||
- `src/services/aiService.server.ts` - Gemini AI integration
|
||||
- `src/services/emailService.server.ts` - Email sending
|
||||
- `src/services/geocodingService.server.ts` - Address geocoding
|
||||
|
||||
## Consequences
|
||||
|
||||
### Positive
|
||||
|
||||
- **Separation of Concerns**: Clear boundaries between layers
|
||||
- **Testability**: Services can be tested in isolation with mocked dependencies
|
||||
- **Reusability**: Business logic in services can be used by multiple routes
|
||||
- **Maintainability**: Changes to one layer don't ripple through others
|
||||
- **Transaction Safety**: Services coordinate transactions across repositories
|
||||
|
||||
### Negative
|
||||
|
||||
- **Indirection**: More layers mean more code to navigate
|
||||
- **Potential Over-Engineering**: Simple CRUD operations don't need full service layer
|
||||
- **Coordination Overhead**: Team must agree on layer boundaries
|
||||
|
||||
## Guidelines
|
||||
|
||||
### When to Create a Service
|
||||
|
||||
Create a business service when:
|
||||
|
||||
- Logic spans multiple repositories
|
||||
- External APIs need to be called
|
||||
- Complex business rules exist
|
||||
- The same logic is needed by multiple routes
|
||||
- Transaction coordination is required
|
||||
|
||||
### When Direct Repository Access is OK
|
||||
|
||||
Routes can directly use repositories for:
|
||||
|
||||
- Simple single-entity CRUD operations
|
||||
- Read-only queries with no business logic
|
||||
- Operations that don't need transaction coordination
|
||||
|
||||
### Service Method Guidelines
|
||||
|
||||
- Accept a request-scoped logger as an optional parameter
|
||||
- Return domain objects, not HTTP-specific responses
|
||||
- Throw domain errors, let routes handle HTTP status codes
|
||||
- Use `withTransaction` for multi-repository operations
|
||||
- Log business events (user registered, order placed, etc.)
|
||||
212
docs/adr/0036-event-bus-and-pub-sub-pattern.md
Normal file
212
docs/adr/0036-event-bus-and-pub-sub-pattern.md
Normal file
@@ -0,0 +1,212 @@
|
||||
# ADR-036: Event Bus and Pub/Sub Pattern
|
||||
|
||||
**Date**: 2026-01-09
|
||||
|
||||
**Status**: Accepted
|
||||
|
||||
**Implemented**: 2026-01-09
|
||||
|
||||
## Context
|
||||
|
||||
Modern web applications often need to handle cross-component communication without creating tight coupling between modules. In our application, several scenarios require broadcasting events across the system:
|
||||
|
||||
1. **Session Expiry**: When a user's session expires, multiple components need to respond (auth state, UI notifications, API client).
|
||||
2. **Real-time Updates**: When data changes on the server, multiple UI components may need to update.
|
||||
3. **Cross-Component Communication**: Independent components need to communicate without direct references to each other.
|
||||
|
||||
Traditional approaches like prop drilling or global state management can lead to tightly coupled code that is difficult to maintain and test.
|
||||
|
||||
## Decision
|
||||
|
||||
We will implement a lightweight, in-memory event bus pattern using a publish/subscribe (pub/sub) architecture. This provides:
|
||||
|
||||
1. **Decoupled Communication**: Publishers and subscribers don't need to know about each other.
|
||||
2. **Event-Driven Architecture**: Components react to events rather than polling for changes.
|
||||
3. **Testability**: Events can be easily mocked and verified in tests.
|
||||
|
||||
### Design Principles
|
||||
|
||||
- **Singleton Pattern**: A single event bus instance is shared across the application.
|
||||
- **Type-Safe Events**: Event names are string constants to prevent typos.
|
||||
- **Memory Management**: Subscribers must unsubscribe when components unmount to prevent memory leaks.
|
||||
|
||||
## Implementation Details
|
||||
|
||||
### EventBus Class
|
||||
|
||||
Located in `src/services/eventBus.ts`:
|
||||
|
||||
```typescript
|
||||
type EventCallback = (data?: any) => void;
|
||||
|
||||
export class EventBus {
|
||||
private listeners: { [key: string]: EventCallback[] } = {};
|
||||
|
||||
on(event: string, callback: EventCallback): void {
|
||||
if (!this.listeners[event]) {
|
||||
this.listeners[event] = [];
|
||||
}
|
||||
this.listeners[event].push(callback);
|
||||
}
|
||||
|
||||
off(event: string, callback: EventCallback): void {
|
||||
if (!this.listeners[event]) return;
|
||||
this.listeners[event] = this.listeners[event].filter((l) => l !== callback);
|
||||
}
|
||||
|
||||
dispatch(event: string, data?: any): void {
|
||||
if (!this.listeners[event]) return;
|
||||
this.listeners[event].forEach((callback) => callback(data));
|
||||
}
|
||||
}
|
||||
|
||||
// Singleton instance
|
||||
export const eventBus = new EventBus();
|
||||
```
|
||||
|
||||
### Event Constants
|
||||
|
||||
Define event names as constants to prevent typos:
|
||||
|
||||
```typescript
|
||||
// src/constants/events.ts
|
||||
export const EVENTS = {
|
||||
SESSION_EXPIRED: 'session:expired',
|
||||
SESSION_REFRESHED: 'session:refreshed',
|
||||
USER_LOGGED_OUT: 'user:loggedOut',
|
||||
DATA_UPDATED: 'data:updated',
|
||||
NOTIFICATION_RECEIVED: 'notification:received',
|
||||
} as const;
|
||||
```
|
||||
|
||||
### React Hook for Event Subscription
|
||||
|
||||
```typescript
|
||||
// src/hooks/useEventBus.ts
|
||||
import { useEffect } from 'react';
|
||||
import { eventBus } from '../services/eventBus';
|
||||
|
||||
export function useEventBus(event: string, callback: (data?: any) => void) {
|
||||
useEffect(() => {
|
||||
eventBus.on(event, callback);
|
||||
|
||||
// Cleanup on unmount
|
||||
return () => {
|
||||
eventBus.off(event, callback);
|
||||
};
|
||||
}, [event, callback]);
|
||||
}
|
||||
```
|
||||
|
||||
### Usage Examples
|
||||
|
||||
**Publishing Events**:
|
||||
|
||||
```typescript
|
||||
import { eventBus } from '../services/eventBus';
|
||||
import { EVENTS } from '../constants/events';
|
||||
|
||||
// In API client when session expires
|
||||
function handleSessionExpiry() {
|
||||
eventBus.dispatch(EVENTS.SESSION_EXPIRED, { reason: 'token_expired' });
|
||||
}
|
||||
```
|
||||
|
||||
**Subscribing in Components**:
|
||||
|
||||
```typescript
|
||||
import { useCallback } from 'react';
|
||||
import { useEventBus } from '../hooks/useEventBus';
|
||||
import { EVENTS } from '../constants/events';
|
||||
|
||||
function AuthenticatedComponent() {
|
||||
const handleSessionExpired = useCallback((data) => {
|
||||
console.log('Session expired:', data.reason);
|
||||
// Redirect to login, show notification, etc.
|
||||
}, []);
|
||||
|
||||
useEventBus(EVENTS.SESSION_EXPIRED, handleSessionExpired);
|
||||
|
||||
return <div>Protected Content</div>;
|
||||
}
|
||||
```
|
||||
|
||||
**Subscribing in Non-React Code**:
|
||||
|
||||
```typescript
|
||||
import { eventBus } from '../services/eventBus';
|
||||
import { EVENTS } from '../constants/events';
|
||||
|
||||
// In API client
|
||||
const handleLogout = () => {
|
||||
clearAuthToken();
|
||||
};
|
||||
|
||||
eventBus.on(EVENTS.USER_LOGGED_OUT, handleLogout);
|
||||
```
|
||||
|
||||
### Testing
|
||||
|
||||
The EventBus is fully tested in `src/services/eventBus.test.ts`:
|
||||
|
||||
```typescript
|
||||
import { EventBus } from './eventBus';
|
||||
|
||||
describe('EventBus', () => {
|
||||
let bus: EventBus;
|
||||
|
||||
beforeEach(() => {
|
||||
bus = new EventBus();
|
||||
});
|
||||
|
||||
it('should call registered listeners when event is dispatched', () => {
|
||||
const callback = vi.fn();
|
||||
bus.on('test', callback);
|
||||
bus.dispatch('test', { value: 42 });
|
||||
expect(callback).toHaveBeenCalledWith({ value: 42 });
|
||||
});
|
||||
|
||||
it('should unsubscribe listeners correctly', () => {
|
||||
const callback = vi.fn();
|
||||
bus.on('test', callback);
|
||||
bus.off('test', callback);
|
||||
bus.dispatch('test');
|
||||
expect(callback).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should handle multiple listeners for the same event', () => {
|
||||
const callback1 = vi.fn();
|
||||
const callback2 = vi.fn();
|
||||
bus.on('test', callback1);
|
||||
bus.on('test', callback2);
|
||||
bus.dispatch('test');
|
||||
expect(callback1).toHaveBeenCalled();
|
||||
expect(callback2).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
## Consequences
|
||||
|
||||
### Positive
|
||||
|
||||
- **Loose Coupling**: Components don't need direct references to communicate.
|
||||
- **Flexibility**: New subscribers can be added without modifying publishers.
|
||||
- **Testability**: Easy to mock events and verify interactions.
|
||||
- **Simplicity**: Minimal code footprint compared to full state management solutions.
|
||||
|
||||
### Negative
|
||||
|
||||
- **Debugging Complexity**: Event-driven flows can be harder to trace than direct function calls.
|
||||
- **Memory Leaks**: Forgetting to unsubscribe can cause memory leaks (mitigated by the React hook).
|
||||
- **No Type Safety for Payloads**: Event data is typed as `any` (could be improved with generics).
|
||||
|
||||
## Key Files
|
||||
|
||||
- `src/services/eventBus.ts` - EventBus implementation
|
||||
- `src/services/eventBus.test.ts` - EventBus tests
|
||||
|
||||
## Related ADRs
|
||||
|
||||
- [ADR-005](./0005-frontend-state-management-and-server-cache-strategy.md) - State Management Strategy
|
||||
- [ADR-022](./0022-real-time-notification-system.md) - Real-time Notification System
|
||||
265
docs/adr/0037-scheduled-jobs-and-cron-pattern.md
Normal file
265
docs/adr/0037-scheduled-jobs-and-cron-pattern.md
Normal file
@@ -0,0 +1,265 @@
|
||||
# ADR-037: Scheduled Jobs and Cron Pattern
|
||||
|
||||
**Date**: 2026-01-09
|
||||
|
||||
**Status**: Accepted
|
||||
|
||||
**Implemented**: 2026-01-09
|
||||
|
||||
## Context
|
||||
|
||||
Many business operations need to run on a recurring schedule without user intervention:
|
||||
|
||||
1. **Daily Deal Checks**: Scan watched items for price drops and notify users.
|
||||
2. **Analytics Generation**: Compile daily and weekly statistics reports.
|
||||
3. **Token Cleanup**: Remove expired password reset tokens from the database.
|
||||
4. **Data Maintenance**: Archive old data, clean up temporary files.
|
||||
|
||||
These scheduled operations require:
|
||||
|
||||
- Reliable execution at specific times
|
||||
- Protection against overlapping runs
|
||||
- Graceful error handling that doesn't crash the server
|
||||
- Integration with the existing job queue system (BullMQ)
|
||||
|
||||
## Decision
|
||||
|
||||
We will use `node-cron` for scheduling jobs and integrate with BullMQ for job execution. This provides:
|
||||
|
||||
1. **Cron Expressions**: Standard, well-understood scheduling syntax.
|
||||
2. **Job Queue Integration**: Scheduled jobs enqueue work to BullMQ for reliable processing.
|
||||
3. **Idempotency**: Jobs use predictable IDs to prevent duplicate runs.
|
||||
4. **Overlap Protection**: In-memory locks prevent concurrent execution of the same job.
|
||||
|
||||
### Architecture
|
||||
|
||||
```text
|
||||
┌─────────────────┐ ┌─────────────────┐ ┌─────────────────┐
|
||||
│ node-cron │────▶│ BullMQ Queue │────▶│ Worker │
|
||||
│ (Scheduler) │ │ (Job Store) │ │ (Processor) │
|
||||
└─────────────────┘ └─────────────────┘ └─────────────────┘
|
||||
│
|
||||
▼
|
||||
┌─────────────────┐
|
||||
│ Redis │
|
||||
│ (Persistence) │
|
||||
└─────────────────┘
|
||||
```
|
||||
|
||||
## Implementation Details
|
||||
|
||||
### BackgroundJobService
|
||||
|
||||
Located in `src/services/backgroundJobService.ts`:
|
||||
|
||||
```typescript
|
||||
import cron from 'node-cron';
|
||||
import type { Logger } from 'pino';
|
||||
import type { Queue } from 'bullmq';
|
||||
|
||||
export class BackgroundJobService {
|
||||
constructor(
|
||||
private personalizationRepo: PersonalizationRepository,
|
||||
private notificationRepo: NotificationRepository,
|
||||
private emailQueue: Queue<EmailJobData>,
|
||||
private logger: Logger,
|
||||
) {}
|
||||
|
||||
async runDailyDealCheck(): Promise<void> {
|
||||
this.logger.info('[BackgroundJob] Starting daily deal check...');
|
||||
|
||||
// 1. Fetch all deals for all users in one efficient query
|
||||
const allDeals = await this.personalizationRepo.getBestSalePricesForAllUsers(this.logger);
|
||||
|
||||
// 2. Group deals by user
|
||||
const dealsByUser = this.groupDealsByUser(allDeals);
|
||||
|
||||
// 3. Process each user's deals in parallel
|
||||
const results = await Promise.allSettled(
|
||||
Array.from(dealsByUser.values()).map((userGroup) => this._processDealsForUser(userGroup)),
|
||||
);
|
||||
|
||||
// 4. Bulk insert notifications
|
||||
await this.bulkCreateNotifications(results);
|
||||
|
||||
this.logger.info('[BackgroundJob] Daily deal check completed.');
|
||||
}
|
||||
|
||||
async triggerAnalyticsReport(): Promise<string> {
|
||||
const reportDate = getCurrentDateISOString();
|
||||
const jobId = `manual-report-${reportDate}-${Date.now()}`;
|
||||
const job = await analyticsQueue.add('generate-daily-report', { reportDate }, { jobId });
|
||||
return job.id;
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Cron Job Initialization
|
||||
|
||||
```typescript
|
||||
// In-memory lock to prevent job overlap
|
||||
let isDailyDealCheckRunning = false;
|
||||
|
||||
export function startBackgroundJobs(
|
||||
backgroundJobService: BackgroundJobService,
|
||||
analyticsQueue: Queue,
|
||||
weeklyAnalyticsQueue: Queue,
|
||||
tokenCleanupQueue: Queue,
|
||||
logger: Logger,
|
||||
): void {
|
||||
// Daily deal check at 2:00 AM
|
||||
cron.schedule('0 2 * * *', () => {
|
||||
(async () => {
|
||||
if (isDailyDealCheckRunning) {
|
||||
logger.warn('[BackgroundJob] Daily deal check already running. Skipping.');
|
||||
return;
|
||||
}
|
||||
isDailyDealCheckRunning = true;
|
||||
try {
|
||||
await backgroundJobService.runDailyDealCheck();
|
||||
} catch (error) {
|
||||
logger.error({ err: error }, '[BackgroundJob] Daily deal check failed.');
|
||||
} finally {
|
||||
isDailyDealCheckRunning = false;
|
||||
}
|
||||
})().catch((error) => {
|
||||
logger.error({ err: error }, '[BackgroundJob] Unhandled rejection in cron wrapper.');
|
||||
isDailyDealCheckRunning = false;
|
||||
});
|
||||
});
|
||||
|
||||
// Daily analytics at 3:00 AM
|
||||
cron.schedule('0 3 * * *', () => {
|
||||
(async () => {
|
||||
const reportDate = getCurrentDateISOString();
|
||||
await analyticsQueue.add(
|
||||
'generate-daily-report',
|
||||
{ reportDate },
|
||||
{ jobId: `daily-report-${reportDate}` }, // Prevents duplicates
|
||||
);
|
||||
})().catch((error) => {
|
||||
logger.error({ err: error }, '[BackgroundJob] Analytics job enqueue failed.');
|
||||
});
|
||||
});
|
||||
|
||||
// Weekly analytics at 4:00 AM on Sundays
|
||||
cron.schedule('0 4 * * 0', () => {
|
||||
(async () => {
|
||||
const { year, week } = getSimpleWeekAndYear();
|
||||
await weeklyAnalyticsQueue.add(
|
||||
'generate-weekly-report',
|
||||
{ reportYear: year, reportWeek: week },
|
||||
{ jobId: `weekly-report-${year}-${week}` },
|
||||
);
|
||||
})().catch((error) => {
|
||||
logger.error({ err: error }, '[BackgroundJob] Weekly analytics enqueue failed.');
|
||||
});
|
||||
});
|
||||
|
||||
// Token cleanup at 5:00 AM
|
||||
cron.schedule('0 5 * * *', () => {
|
||||
(async () => {
|
||||
const timestamp = new Date().toISOString();
|
||||
await tokenCleanupQueue.add(
|
||||
'cleanup-tokens',
|
||||
{ timestamp },
|
||||
{ jobId: `token-cleanup-${timestamp.split('T')[0]}` },
|
||||
);
|
||||
})().catch((error) => {
|
||||
logger.error({ err: error }, '[BackgroundJob] Token cleanup enqueue failed.');
|
||||
});
|
||||
});
|
||||
|
||||
logger.info('[BackgroundJob] All cron jobs scheduled successfully.');
|
||||
}
|
||||
```
|
||||
|
||||
### Job Schedule Reference
|
||||
|
||||
| Job | Schedule | Queue | Purpose |
|
||||
| ---------------- | ---------------------------- | ---------------------- | --------------------------------- |
|
||||
| Daily Deal Check | `0 2 * * *` (2:00 AM) | Direct execution | Find price drops on watched items |
|
||||
| Daily Analytics | `0 3 * * *` (3:00 AM) | `analyticsQueue` | Generate daily statistics |
|
||||
| Weekly Analytics | `0 4 * * 0` (4:00 AM Sunday) | `weeklyAnalyticsQueue` | Generate weekly reports |
|
||||
| Token Cleanup | `0 5 * * *` (5:00 AM) | `tokenCleanupQueue` | Remove expired tokens |
|
||||
|
||||
### Cron Expression Reference
|
||||
|
||||
```text
|
||||
┌───────────── minute (0 - 59)
|
||||
│ ┌───────────── hour (0 - 23)
|
||||
│ │ ┌───────────── day of month (1 - 31)
|
||||
│ │ │ ┌───────────── month (1 - 12)
|
||||
│ │ │ │ ┌───────────── day of week (0 - 7, Sun = 0 or 7)
|
||||
│ │ │ │ │
|
||||
* * * * *
|
||||
|
||||
Examples:
|
||||
0 2 * * * = 2:00 AM every day
|
||||
0 4 * * 0 = 4:00 AM every Sunday
|
||||
*/15 * * * * = Every 15 minutes
|
||||
0 0 1 * * = Midnight on the 1st of each month
|
||||
```
|
||||
|
||||
### Error Handling Pattern
|
||||
|
||||
The async IIFE wrapper with `.catch()` ensures that:
|
||||
|
||||
1. Errors in the job don't crash the cron scheduler
|
||||
2. Unhandled promise rejections are logged
|
||||
3. The lock is always released in the `finally` block
|
||||
|
||||
```typescript
|
||||
cron.schedule('0 2 * * *', () => {
|
||||
(async () => {
|
||||
// Job logic here
|
||||
})().catch((error) => {
|
||||
// Handle unhandled rejections from the async wrapper
|
||||
logger.error({ err: error }, 'Unhandled rejection');
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
### Manual Trigger API
|
||||
|
||||
Admin endpoints allow manual triggering of scheduled jobs:
|
||||
|
||||
```typescript
|
||||
// src/routes/admin.routes.ts
|
||||
router.post('/jobs/daily-deals', isAdmin, async (req, res, next) => {
|
||||
await backgroundJobService.runDailyDealCheck();
|
||||
res.json({ message: 'Daily deal check triggered' });
|
||||
});
|
||||
|
||||
router.post('/jobs/analytics', isAdmin, async (req, res, next) => {
|
||||
const jobId = await backgroundJobService.triggerAnalyticsReport();
|
||||
res.json({ message: 'Analytics report queued', jobId });
|
||||
});
|
||||
```
|
||||
|
||||
## Consequences
|
||||
|
||||
### Positive
|
||||
|
||||
- **Reliability**: Jobs run at predictable times without manual intervention.
|
||||
- **Idempotency**: Duplicate job prevention via job IDs.
|
||||
- **Observability**: All job activity is logged with structured logging.
|
||||
- **Flexibility**: Jobs can be triggered manually for testing or urgent runs.
|
||||
- **Separation**: Scheduling is decoupled from job execution (cron vs BullMQ).
|
||||
|
||||
### Negative
|
||||
|
||||
- **Single Server**: Cron runs on a single server instance. For multi-server deployments, consider distributed scheduling.
|
||||
- **Time Zone Dependency**: Cron times are server-local; consider UTC for distributed systems.
|
||||
- **In-Memory Locks**: Overlap protection is per-process, not cluster-wide.
|
||||
|
||||
## Key Files
|
||||
|
||||
- `src/services/backgroundJobService.ts` - BackgroundJobService class and `startBackgroundJobs`
|
||||
- `src/services/queueService.server.ts` - BullMQ queue definitions
|
||||
- `src/services/workers.server.ts` - BullMQ worker processors
|
||||
|
||||
## Related ADRs
|
||||
|
||||
- [ADR-006](./0006-background-job-processing-and-task-queues.md) - Background Job Processing
|
||||
- [ADR-004](./0004-standardized-application-wide-structured-logging.md) - Structured Logging
|
||||
290
docs/adr/0038-graceful-shutdown-pattern.md
Normal file
290
docs/adr/0038-graceful-shutdown-pattern.md
Normal file
@@ -0,0 +1,290 @@
|
||||
# ADR-038: Graceful Shutdown Pattern
|
||||
|
||||
**Date**: 2026-01-09
|
||||
|
||||
**Status**: Accepted
|
||||
|
||||
**Implemented**: 2026-01-09
|
||||
|
||||
## Context
|
||||
|
||||
When deploying or restarting the application, abrupt termination can cause:
|
||||
|
||||
1. **Lost Jobs**: BullMQ jobs in progress may be marked as failed or stalled.
|
||||
2. **Connection Leaks**: Database and Redis connections may not be properly closed.
|
||||
3. **Incomplete Requests**: HTTP requests in flight may receive no response.
|
||||
4. **Data Corruption**: Transactions may be left in an inconsistent state.
|
||||
|
||||
Kubernetes and PM2 send termination signals (SIGTERM, SIGINT) to processes before forcefully killing them. The application must handle these signals to shut down gracefully.
|
||||
|
||||
## Decision
|
||||
|
||||
We will implement a coordinated graceful shutdown pattern that:
|
||||
|
||||
1. **Stops Accepting New Work**: Closes HTTP server, pauses job queues.
|
||||
2. **Completes In-Flight Work**: Waits for active requests and jobs to finish.
|
||||
3. **Releases Resources**: Closes database pools, Redis connections, and queues.
|
||||
4. **Logs Shutdown Progress**: Provides visibility into the shutdown process.
|
||||
|
||||
### Signal Handling
|
||||
|
||||
| Signal | Source | Behavior |
|
||||
| ------- | ------------------ | --------------------------------------- |
|
||||
| SIGTERM | Kubernetes, PM2 | Graceful shutdown with resource cleanup |
|
||||
| SIGINT | Ctrl+C in terminal | Same as SIGTERM |
|
||||
| SIGKILL | Force kill | Cannot be caught; immediate termination |
|
||||
|
||||
## Implementation Details
|
||||
|
||||
### Queue and Worker Shutdown
|
||||
|
||||
Located in `src/services/queueService.server.ts`:
|
||||
|
||||
```typescript
|
||||
import { logger } from './logger.server';
|
||||
|
||||
export const gracefulShutdown = async (signal: string): Promise<void> => {
|
||||
logger.info(`[Shutdown] Received ${signal}. Closing all queues and workers...`);
|
||||
|
||||
const resources = [
|
||||
{ name: 'flyerQueue', close: () => flyerQueue.close() },
|
||||
{ name: 'emailQueue', close: () => emailQueue.close() },
|
||||
{ name: 'analyticsQueue', close: () => analyticsQueue.close() },
|
||||
{ name: 'weeklyAnalyticsQueue', close: () => weeklyAnalyticsQueue.close() },
|
||||
{ name: 'cleanupQueue', close: () => cleanupQueue.close() },
|
||||
{ name: 'tokenCleanupQueue', close: () => tokenCleanupQueue.close() },
|
||||
{ name: 'redisConnection', close: () => connection.quit() },
|
||||
];
|
||||
|
||||
const results = await Promise.allSettled(
|
||||
resources.map(async (resource) => {
|
||||
try {
|
||||
await resource.close();
|
||||
logger.info(`[Shutdown] ${resource.name} closed successfully.`);
|
||||
} catch (error) {
|
||||
logger.error({ err: error }, `[Shutdown] Error closing ${resource.name}`);
|
||||
throw error;
|
||||
}
|
||||
}),
|
||||
);
|
||||
|
||||
const failures = results.filter((r) => r.status === 'rejected');
|
||||
if (failures.length > 0) {
|
||||
logger.error(`[Shutdown] ${failures.length} resources failed to close.`);
|
||||
}
|
||||
|
||||
logger.info('[Shutdown] All resources closed. Process can now exit.');
|
||||
};
|
||||
|
||||
// Register signal handlers
|
||||
process.on('SIGTERM', () => gracefulShutdown('SIGTERM'));
|
||||
process.on('SIGINT', () => gracefulShutdown('SIGINT'));
|
||||
```
|
||||
|
||||
### HTTP Server Shutdown
|
||||
|
||||
Located in `server.ts`:
|
||||
|
||||
```typescript
|
||||
import { gracefulShutdown as shutdownQueues } from './src/services/queueService.server';
|
||||
import { closePool } from './src/services/db/connection.db';
|
||||
|
||||
const server = app.listen(PORT, () => {
|
||||
logger.info(`Server listening on port ${PORT}`);
|
||||
});
|
||||
|
||||
const gracefulShutdown = async (signal: string): Promise<void> => {
|
||||
logger.info(`[Shutdown] Received ${signal}. Starting graceful shutdown...`);
|
||||
|
||||
// 1. Stop accepting new connections
|
||||
server.close((err) => {
|
||||
if (err) {
|
||||
logger.error({ err }, '[Shutdown] Error closing HTTP server');
|
||||
} else {
|
||||
logger.info('[Shutdown] HTTP server closed.');
|
||||
}
|
||||
});
|
||||
|
||||
// 2. Wait for in-flight requests (with timeout)
|
||||
await new Promise((resolve) => setTimeout(resolve, 5000));
|
||||
|
||||
// 3. Close queues and workers
|
||||
await shutdownQueues(signal);
|
||||
|
||||
// 4. Close database pool
|
||||
await closePool();
|
||||
logger.info('[Shutdown] Database pool closed.');
|
||||
|
||||
// 5. Exit process
|
||||
process.exit(0);
|
||||
};
|
||||
|
||||
process.on('SIGTERM', () => gracefulShutdown('SIGTERM'));
|
||||
process.on('SIGINT', () => gracefulShutdown('SIGINT'));
|
||||
```
|
||||
|
||||
### Database Pool Shutdown
|
||||
|
||||
Located in `src/services/db/connection.db.ts`:
|
||||
|
||||
```typescript
|
||||
let pool: Pool | null = null;
|
||||
|
||||
export function getPool(): Pool {
|
||||
if (!pool) {
|
||||
pool = new Pool({
|
||||
max: 20,
|
||||
idleTimeoutMillis: 30000,
|
||||
connectionTimeoutMillis: 2000,
|
||||
});
|
||||
}
|
||||
return pool;
|
||||
}
|
||||
|
||||
export async function closePool(): Promise<void> {
|
||||
if (pool) {
|
||||
await pool.end();
|
||||
pool = null;
|
||||
logger.info('[Database] Connection pool closed.');
|
||||
}
|
||||
}
|
||||
|
||||
export function getPoolStatus(): { totalCount: number; idleCount: number; waitingCount: number } {
|
||||
const p = getPool();
|
||||
return {
|
||||
totalCount: p.totalCount,
|
||||
idleCount: p.idleCount,
|
||||
waitingCount: p.waitingCount,
|
||||
};
|
||||
}
|
||||
```
|
||||
|
||||
### PM2 Ecosystem Configuration
|
||||
|
||||
Located in `ecosystem.config.cjs`:
|
||||
|
||||
```javascript
|
||||
module.exports = {
|
||||
apps: [
|
||||
{
|
||||
name: 'flyer-crawler-api',
|
||||
script: 'server.ts',
|
||||
interpreter: 'tsx',
|
||||
|
||||
// Graceful shutdown settings
|
||||
kill_timeout: 10000, // 10 seconds to cleanup before SIGKILL
|
||||
wait_ready: true, // Wait for 'ready' signal before considering app started
|
||||
listen_timeout: 10000, // Timeout for ready signal
|
||||
|
||||
// Cluster mode for zero-downtime reloads
|
||||
instances: 1,
|
||||
exec_mode: 'fork',
|
||||
|
||||
// Environment variables
|
||||
env_production: {
|
||||
NODE_ENV: 'production',
|
||||
PORT: 3000,
|
||||
},
|
||||
env_test: {
|
||||
NODE_ENV: 'test',
|
||||
PORT: 3001,
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
```
|
||||
|
||||
### Worker Graceful Shutdown
|
||||
|
||||
BullMQ workers can be configured to wait for active jobs:
|
||||
|
||||
```typescript
|
||||
import { Worker } from 'bullmq';
|
||||
|
||||
const worker = new Worker('flyerQueue', processor, {
|
||||
connection,
|
||||
// Graceful shutdown: wait for active jobs before closing
|
||||
settings: {
|
||||
lockDuration: 30000, // Time before job is considered stalled
|
||||
stalledInterval: 5000, // Check for stalled jobs every 5s
|
||||
},
|
||||
});
|
||||
|
||||
// Workers auto-close when connection closes
|
||||
worker.on('closing', () => {
|
||||
logger.info('[Worker] flyerQueue worker is closing...');
|
||||
});
|
||||
|
||||
worker.on('closed', () => {
|
||||
logger.info('[Worker] flyerQueue worker closed.');
|
||||
});
|
||||
```
|
||||
|
||||
### Shutdown Sequence Diagram
|
||||
|
||||
```text
|
||||
SIGTERM Received
|
||||
│
|
||||
▼
|
||||
┌──────────────────────┐
|
||||
│ Stop HTTP Server │ ← No new connections accepted
|
||||
│ (server.close()) │
|
||||
└──────────────────────┘
|
||||
│
|
||||
▼
|
||||
┌──────────────────────┐
|
||||
│ Wait for In-Flight │ ← 5-second grace period
|
||||
│ Requests │
|
||||
└──────────────────────┘
|
||||
│
|
||||
▼
|
||||
┌──────────────────────┐
|
||||
│ Close BullMQ Queues │ ← Stop processing new jobs
|
||||
│ and Workers │
|
||||
└──────────────────────┘
|
||||
│
|
||||
▼
|
||||
┌──────────────────────┐
|
||||
│ Close Redis │ ← Disconnect from Redis
|
||||
│ Connection │
|
||||
└──────────────────────┘
|
||||
│
|
||||
▼
|
||||
┌──────────────────────┐
|
||||
│ Close Database Pool │ ← Release all DB connections
|
||||
│ (pool.end()) │
|
||||
└──────────────────────┘
|
||||
│
|
||||
▼
|
||||
┌──────────────────────┐
|
||||
│ process.exit(0) │ ← Clean exit
|
||||
└──────────────────────┘
|
||||
```
|
||||
|
||||
## Consequences
|
||||
|
||||
### Positive
|
||||
|
||||
- **Zero Lost Work**: In-flight requests and jobs complete before shutdown.
|
||||
- **Clean Resource Cleanup**: All connections are properly closed.
|
||||
- **Zero-Downtime Deploys**: PM2 can reload without dropping requests.
|
||||
- **Observability**: Shutdown progress is logged for debugging.
|
||||
|
||||
### Negative
|
||||
|
||||
- **Shutdown Delay**: Takes 5-15 seconds to fully shutdown.
|
||||
- **Complexity**: Multiple shutdown handlers must be coordinated.
|
||||
- **Edge Cases**: Very long-running jobs may be killed if they exceed the grace period.
|
||||
|
||||
## Key Files
|
||||
|
||||
- `server.ts` - HTTP server shutdown and signal handling
|
||||
- `src/services/queueService.server.ts` - Queue shutdown (`gracefulShutdown`)
|
||||
- `src/services/db/connection.db.ts` - Database pool shutdown (`closePool`)
|
||||
- `ecosystem.config.cjs` - PM2 configuration with `kill_timeout`
|
||||
|
||||
## Related ADRs
|
||||
|
||||
- [ADR-006](./0006-background-job-processing-and-task-queues.md) - Background Job Processing
|
||||
- [ADR-020](./0020-health-checks-and-liveness-readiness-probes.md) - Health Checks
|
||||
- [ADR-014](./0014-containerization-and-deployment-strategy.md) - Containerization
|
||||
278
docs/adr/0039-dependency-injection-pattern.md
Normal file
278
docs/adr/0039-dependency-injection-pattern.md
Normal file
@@ -0,0 +1,278 @@
|
||||
# ADR-039: Dependency Injection Pattern
|
||||
|
||||
**Date**: 2026-01-09
|
||||
|
||||
**Status**: Accepted
|
||||
|
||||
**Implemented**: 2026-01-09
|
||||
|
||||
## Context
|
||||
|
||||
As the application grows, tightly coupled components become difficult to test and maintain. Common issues include:
|
||||
|
||||
1. **Hard-to-Test Code**: Components that instantiate their own dependencies cannot be easily unit tested with mocks.
|
||||
2. **Rigid Architecture**: Changing one implementation requires modifying all consumers.
|
||||
3. **Hidden Dependencies**: It's unclear what a component needs to function.
|
||||
4. **Circular Dependencies**: Tight coupling can lead to circular import issues.
|
||||
|
||||
Dependency Injection (DI) addresses these issues by inverting the control of dependency creation.
|
||||
|
||||
## Decision
|
||||
|
||||
We will adopt a constructor-based dependency injection pattern for all services and repositories. This approach:
|
||||
|
||||
1. **Explicit Dependencies**: All dependencies are declared in the constructor.
|
||||
2. **Default Values**: Production dependencies have sensible defaults.
|
||||
3. **Testability**: Test code can inject mocks without modifying source code.
|
||||
4. **Loose Coupling**: Components depend on interfaces, not implementations.
|
||||
|
||||
### Design Principles
|
||||
|
||||
- **Constructor Injection**: Dependencies are passed through constructors, not looked up globally.
|
||||
- **Default Production Dependencies**: Use default parameter values for production instances.
|
||||
- **Interface Segregation**: Depend on the minimal interface needed (e.g., `Pick<Pool, 'query'>`).
|
||||
- **Composition Root**: Wire dependencies at the application entry point.
|
||||
|
||||
## Implementation Details
|
||||
|
||||
### Repository Pattern with DI
|
||||
|
||||
Located in `src/services/db/flyer.db.ts`:
|
||||
|
||||
```typescript
|
||||
import { Pool, PoolClient } from 'pg';
|
||||
import { getPool } from './connection.db';
|
||||
|
||||
export class FlyerRepository {
|
||||
// Accept any object with a 'query' method - Pool or PoolClient
|
||||
private db: Pick<Pool | PoolClient, 'query'>;
|
||||
|
||||
constructor(db: Pick<Pool | PoolClient, 'query'> = getPool()) {
|
||||
this.db = db;
|
||||
}
|
||||
|
||||
async getFlyerById(flyerId: number, logger: Logger): Promise<Flyer> {
|
||||
const result = await this.db.query<Flyer>('SELECT * FROM flyers WHERE flyer_id = $1', [
|
||||
flyerId,
|
||||
]);
|
||||
if (result.rows.length === 0) {
|
||||
throw new NotFoundError(`Flyer with ID ${flyerId} not found.`);
|
||||
}
|
||||
return result.rows[0];
|
||||
}
|
||||
|
||||
async insertFlyer(flyer: FlyerDbInsert, logger: Logger): Promise<Flyer> {
|
||||
// Implementation
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Usage in Production**:
|
||||
|
||||
```typescript
|
||||
// Uses default pool
|
||||
const flyerRepo = new FlyerRepository();
|
||||
```
|
||||
|
||||
**Usage in Tests**:
|
||||
|
||||
```typescript
|
||||
const mockDb = {
|
||||
query: vi.fn().mockResolvedValue({ rows: [mockFlyer] }),
|
||||
};
|
||||
const flyerRepo = new FlyerRepository(mockDb);
|
||||
```
|
||||
|
||||
**Usage in Transactions**:
|
||||
|
||||
```typescript
|
||||
import { withTransaction } from './connection.db';
|
||||
|
||||
await withTransaction(async (client) => {
|
||||
// Pass transactional client to repository
|
||||
const flyerRepo = new FlyerRepository(client);
|
||||
const flyer = await flyerRepo.insertFlyer(flyerData, logger);
|
||||
// ... more operations in the same transaction
|
||||
});
|
||||
```
|
||||
|
||||
### Service Layer with DI
|
||||
|
||||
Located in `src/services/backgroundJobService.ts`:
|
||||
|
||||
```typescript
|
||||
export class BackgroundJobService {
|
||||
constructor(
|
||||
private personalizationRepo: PersonalizationRepository,
|
||||
private notificationRepo: NotificationRepository,
|
||||
private emailQueue: Queue<EmailJobData>,
|
||||
private logger: Logger,
|
||||
) {}
|
||||
|
||||
async runDailyDealCheck(): Promise<void> {
|
||||
this.logger.info('[BackgroundJob] Starting daily deal check...');
|
||||
|
||||
const deals = await this.personalizationRepo.getBestSalePricesForAllUsers(this.logger);
|
||||
// ... process deals
|
||||
}
|
||||
}
|
||||
|
||||
// Composition root - wire production dependencies
|
||||
import { personalizationRepo, notificationRepo } from './db/index.db';
|
||||
import { logger } from './logger.server';
|
||||
import { emailQueue } from './queueService.server';
|
||||
|
||||
export const backgroundJobService = new BackgroundJobService(
|
||||
personalizationRepo,
|
||||
notificationRepo,
|
||||
emailQueue,
|
||||
logger,
|
||||
);
|
||||
```
|
||||
|
||||
**Testing with Mocks**:
|
||||
|
||||
```typescript
|
||||
describe('BackgroundJobService', () => {
|
||||
it('should process deals for all users', async () => {
|
||||
const mockPersonalizationRepo = {
|
||||
getBestSalePricesForAllUsers: vi.fn().mockResolvedValue([mockDeal]),
|
||||
};
|
||||
const mockNotificationRepo = {
|
||||
createBulkNotifications: vi.fn().mockResolvedValue([]),
|
||||
};
|
||||
const mockEmailQueue = {
|
||||
add: vi.fn().mockResolvedValue({ id: 'job-1' }),
|
||||
};
|
||||
const mockLogger = {
|
||||
info: vi.fn(),
|
||||
error: vi.fn(),
|
||||
};
|
||||
|
||||
const service = new BackgroundJobService(
|
||||
mockPersonalizationRepo as any,
|
||||
mockNotificationRepo as any,
|
||||
mockEmailQueue as any,
|
||||
mockLogger as any,
|
||||
);
|
||||
|
||||
await service.runDailyDealCheck();
|
||||
|
||||
expect(mockPersonalizationRepo.getBestSalePricesForAllUsers).toHaveBeenCalled();
|
||||
expect(mockEmailQueue.add).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
### Processing Service with DI
|
||||
|
||||
Located in `src/services/flyer/flyerProcessingService.ts`:
|
||||
|
||||
```typescript
|
||||
export class FlyerProcessingService {
|
||||
constructor(
|
||||
private fileHandler: FlyerFileHandler,
|
||||
private aiProcessor: FlyerAiProcessor,
|
||||
private fsAdapter: FileSystemAdapter,
|
||||
private cleanupQueue: Queue<CleanupJobData>,
|
||||
private dataTransformer: FlyerDataTransformer,
|
||||
private persistenceService: FlyerPersistenceService,
|
||||
) {}
|
||||
|
||||
async processFlyer(filePath: string, logger: Logger): Promise<ProcessedFlyer> {
|
||||
// Use injected dependencies
|
||||
const fileInfo = await this.fileHandler.extractMetadata(filePath);
|
||||
const aiResult = await this.aiProcessor.analyze(filePath, logger);
|
||||
const transformed = this.dataTransformer.transform(aiResult);
|
||||
const saved = await this.persistenceService.save(transformed, logger);
|
||||
|
||||
// Queue cleanup
|
||||
await this.cleanupQueue.add('cleanup', { filePath });
|
||||
|
||||
return saved;
|
||||
}
|
||||
}
|
||||
|
||||
// Composition root
|
||||
const flyerProcessingService = new FlyerProcessingService(
|
||||
new FlyerFileHandler(fsAdapter, execAsync),
|
||||
new FlyerAiProcessor(aiService, db.personalizationRepo),
|
||||
fsAdapter,
|
||||
cleanupQueue,
|
||||
new FlyerDataTransformer(),
|
||||
new FlyerPersistenceService(),
|
||||
);
|
||||
```
|
||||
|
||||
### Interface Segregation
|
||||
|
||||
Use the minimum interface required:
|
||||
|
||||
```typescript
|
||||
// Bad - depends on full Pool
|
||||
constructor(pool: Pool) {}
|
||||
|
||||
// Good - depends only on what's needed
|
||||
constructor(db: Pick<Pool | PoolClient, 'query'>) {}
|
||||
```
|
||||
|
||||
This allows injecting either a `Pool`, `PoolClient` (for transactions), or a mock object with just a `query` method.
|
||||
|
||||
### Composition Root Pattern
|
||||
|
||||
Wire all dependencies at application startup:
|
||||
|
||||
```typescript
|
||||
// src/services/db/index.db.ts - Composition root for repositories
|
||||
import { getPool } from './connection.db';
|
||||
|
||||
export const userRepo = new UserRepository(getPool());
|
||||
export const flyerRepo = new FlyerRepository(getPool());
|
||||
export const adminRepo = new AdminRepository(getPool());
|
||||
export const personalizationRepo = new PersonalizationRepository(getPool());
|
||||
export const notificationRepo = new NotificationRepository(getPool());
|
||||
|
||||
export const db = {
|
||||
userRepo,
|
||||
flyerRepo,
|
||||
adminRepo,
|
||||
personalizationRepo,
|
||||
notificationRepo,
|
||||
};
|
||||
```
|
||||
|
||||
## Consequences
|
||||
|
||||
### Positive
|
||||
|
||||
- **Testability**: Unit tests can inject mocks without modifying production code.
|
||||
- **Flexibility**: Swap implementations (e.g., different database adapters) easily.
|
||||
- **Explicit Dependencies**: Clear contract of what a component needs.
|
||||
- **Transaction Support**: Repositories can participate in transactions by accepting a client.
|
||||
|
||||
### Negative
|
||||
|
||||
- **More Boilerplate**: Constructors become longer with many dependencies.
|
||||
- **Composition Complexity**: Must wire dependencies somewhere (composition root).
|
||||
- **No Runtime Type Checking**: TypeScript types are erased at runtime.
|
||||
|
||||
### Mitigation
|
||||
|
||||
For complex services with many dependencies, consider:
|
||||
|
||||
1. **Factory Functions**: Encapsulate construction logic.
|
||||
2. **Dependency Groups**: Pass related dependencies as a single object.
|
||||
3. **DI Containers**: For very large applications, consider a DI library like `tsyringe` or `inversify`.
|
||||
|
||||
## Key Files
|
||||
|
||||
- `src/services/db/*.db.ts` - Repository classes with constructor DI
|
||||
- `src/services/db/index.db.ts` - Composition root for repositories
|
||||
- `src/services/backgroundJobService.ts` - Service class with constructor DI
|
||||
- `src/services/flyer/flyerProcessingService.ts` - Complex service with multiple dependencies
|
||||
|
||||
## Related ADRs
|
||||
|
||||
- [ADR-002](./0002-standardized-transaction-management.md) - Transaction Management
|
||||
- [ADR-034](./0034-repository-pattern-standards.md) - Repository Pattern Standards
|
||||
- [ADR-035](./0035-service-layer-architecture.md) - Service Layer Architecture
|
||||
145
docs/adr/adr-implementation-tracker.md
Normal file
145
docs/adr/adr-implementation-tracker.md
Normal file
@@ -0,0 +1,145 @@
|
||||
# ADR Implementation Tracker
|
||||
|
||||
This document tracks the implementation status and estimated effort for all Architectural Decision Records (ADRs).
|
||||
|
||||
## Effort Estimation Guide
|
||||
|
||||
| Rating | Description | Typical Duration |
|
||||
| ------ | ------------------------------------------- | ----------------- |
|
||||
| S | Small - Simple, isolated changes | 1-2 hours |
|
||||
| M | Medium - Multiple files, some testing | Half day to 1 day |
|
||||
| L | Large - Significant refactoring, many files | 1-3 days |
|
||||
| XL | Extra Large - Major architectural change | 1+ weeks |
|
||||
|
||||
## Implementation Status Overview
|
||||
|
||||
| Status | Count |
|
||||
| ---------------------------- | ----- |
|
||||
| Accepted (Fully Implemented) | 21 |
|
||||
| Partially Implemented | 2 |
|
||||
| Proposed (Not Started) | 16 |
|
||||
|
||||
---
|
||||
|
||||
## Detailed Implementation Status
|
||||
|
||||
### Category 1: Foundational / Core Infrastructure
|
||||
|
||||
| ADR | Title | Status | Effort | Notes |
|
||||
| ---------------------------------------------------------------- | ----------------------- | -------- | ------ | ------------------------------ |
|
||||
| [ADR-002](./0002-standardized-transaction-management.md) | Transaction Management | Accepted | - | Fully implemented |
|
||||
| [ADR-007](./0007-configuration-and-secrets-management.md) | Configuration & Secrets | Accepted | - | Fully implemented |
|
||||
| [ADR-020](./0020-health-checks-and-liveness-readiness-probes.md) | Health Checks | Accepted | - | Fully implemented |
|
||||
| [ADR-030](./0030-graceful-degradation-and-circuit-breaker.md) | Circuit Breaker | Proposed | L | New resilience patterns needed |
|
||||
|
||||
### Category 2: Data Management
|
||||
|
||||
| ADR | Title | Status | Effort | Notes |
|
||||
| --------------------------------------------------------------- | ------------------------ | -------- | ------ | ------------------------------ |
|
||||
| [ADR-009](./0009-caching-strategy-for-read-heavy-operations.md) | Caching Strategy | Accepted | - | Fully implemented |
|
||||
| [ADR-013](./0013-database-schema-migration-strategy.md) | Schema Migrations v1 | Proposed | M | Superseded by ADR-023 |
|
||||
| [ADR-019](./0019-data-backup-and-recovery-strategy.md) | Backup & Recovery | Accepted | - | Fully implemented |
|
||||
| [ADR-023](./0023-database-schema-migration-strategy.md) | Schema Migrations v2 | Proposed | L | Requires tooling setup |
|
||||
| [ADR-031](./0031-data-retention-and-privacy-compliance.md) | Data Retention & Privacy | Proposed | XL | Legal/compliance review needed |
|
||||
|
||||
### Category 3: API & Integration
|
||||
|
||||
| ADR | Title | Status | Effort | Notes |
|
||||
| ------------------------------------------------------------------- | ------------------------ | ----------- | ------ | ------------------------------------- |
|
||||
| [ADR-003](./0003-standardized-input-validation-using-middleware.md) | Input Validation | Accepted | - | Fully implemented |
|
||||
| [ADR-008](./0008-api-versioning-strategy.md) | API Versioning | Proposed | L | Major URL/routing changes |
|
||||
| [ADR-018](./0018-api-documentation-strategy.md) | API Documentation | Proposed | M | OpenAPI/Swagger setup |
|
||||
| [ADR-022](./0022-real-time-notification-system.md) | Real-time Notifications | Proposed | XL | WebSocket infrastructure |
|
||||
| [ADR-028](./0028-api-response-standardization.md) | Response Standardization | Implemented | L | Completed (routes, middleware, tests) |
|
||||
|
||||
### Category 4: Security & Compliance
|
||||
|
||||
| ADR | Title | Status | Effort | Notes |
|
||||
| ----------------------------------------------------------------------- | --------------------- | -------- | ------ | -------------------------------- |
|
||||
| [ADR-001](./0001-standardized-error-handling.md) | Error Handling | Accepted | - | Fully implemented |
|
||||
| [ADR-011](./0011-advanced-authorization-and-access-control-strategy.md) | Authorization & RBAC | Proposed | XL | Policy engine, permission system |
|
||||
| [ADR-016](./0016-api-security-hardening.md) | Security Hardening | Accepted | - | Fully implemented |
|
||||
| [ADR-029](./0029-secret-rotation-and-key-management.md) | Secret Rotation | Proposed | L | Infrastructure changes needed |
|
||||
| [ADR-032](./0032-rate-limiting-strategy.md) | Rate Limiting | Accepted | - | Fully implemented |
|
||||
| [ADR-033](./0033-file-upload-and-storage-strategy.md) | File Upload & Storage | Accepted | - | Fully implemented |
|
||||
|
||||
### Category 5: Observability & Monitoring
|
||||
|
||||
| ADR | Title | Status | Effort | Notes |
|
||||
| -------------------------------------------------------------------------- | -------------------- | -------- | ------ | ----------------------- |
|
||||
| [ADR-004](./0004-standardized-application-wide-structured-logging.md) | Structured Logging | Accepted | - | Fully implemented |
|
||||
| [ADR-015](./0015-application-performance-monitoring-and-error-tracking.md) | APM & Error Tracking | Proposed | M | Third-party integration |
|
||||
|
||||
### Category 6: Deployment & Operations
|
||||
|
||||
| ADR | Title | Status | Effort | Notes |
|
||||
| -------------------------------------------------------------- | ----------------- | -------- | ------ | -------------------------- |
|
||||
| [ADR-006](./0006-background-job-processing-and-task-queues.md) | Background Jobs | Accepted | - | Fully implemented |
|
||||
| [ADR-014](./0014-containerization-and-deployment-strategy.md) | Containerization | Partial | M | Docker done, K8s pending |
|
||||
| [ADR-017](./0017-ci-cd-and-branching-strategy.md) | CI/CD & Branching | Accepted | - | Fully implemented |
|
||||
| [ADR-024](./0024-feature-flagging-strategy.md) | Feature Flags | Proposed | M | New service/library needed |
|
||||
| [ADR-037](./0037-scheduled-jobs-and-cron-pattern.md) | Scheduled Jobs | Accepted | - | Fully implemented |
|
||||
| [ADR-038](./0038-graceful-shutdown-pattern.md) | Graceful Shutdown | Accepted | - | Fully implemented |
|
||||
|
||||
### Category 7: Frontend / User Interface
|
||||
|
||||
| ADR | Title | Status | Effort | Notes |
|
||||
| ------------------------------------------------------------------------ | ------------------- | -------- | ------ | ------------------------------------------- |
|
||||
| [ADR-005](./0005-frontend-state-management-and-server-cache-strategy.md) | State Management | Accepted | - | Fully implemented |
|
||||
| [ADR-012](./0012-frontend-component-library-and-design-system.md) | Component Library | Partial | L | Core components done, design tokens pending |
|
||||
| [ADR-025](./0025-internationalization-and-localization-strategy.md) | i18n & l10n | Proposed | XL | All UI strings need extraction |
|
||||
| [ADR-026](./0026-standardized-client-side-structured-logging.md) | Client-Side Logging | Proposed | M | Browser logging infrastructure |
|
||||
|
||||
### Category 8: Development Workflow & Quality
|
||||
|
||||
| ADR | Title | Status | Effort | Notes |
|
||||
| ----------------------------------------------------------------------------- | -------------------- | -------- | ------ | ----------------- |
|
||||
| [ADR-010](./0010-testing-strategy-and-standards.md) | Testing Strategy | Accepted | - | Fully implemented |
|
||||
| [ADR-021](./0021-code-formatting-and-linting-unification.md) | Formatting & Linting | Accepted | - | Fully implemented |
|
||||
| [ADR-027](./0027-standardized-naming-convention-for-ai-and-database-types.md) | Naming Conventions | Accepted | - | Fully implemented |
|
||||
|
||||
### Category 9: Architecture Patterns
|
||||
|
||||
| ADR | Title | Status | Effort | Notes |
|
||||
| -------------------------------------------------- | -------------------- | -------- | ------ | ----------------- |
|
||||
| [ADR-034](./0034-repository-pattern-standards.md) | Repository Pattern | Accepted | - | Fully implemented |
|
||||
| [ADR-035](./0035-service-layer-architecture.md) | Service Layer | Accepted | - | Fully implemented |
|
||||
| [ADR-036](./0036-event-bus-and-pub-sub-pattern.md) | Event Bus | Accepted | - | Fully implemented |
|
||||
| [ADR-039](./0039-dependency-injection-pattern.md) | Dependency Injection | Accepted | - | Fully implemented |
|
||||
|
||||
---
|
||||
|
||||
## Work Still To Be Completed (Priority Order)
|
||||
|
||||
These ADRs are proposed but not yet implemented, ordered by suggested implementation priority:
|
||||
|
||||
| Priority | ADR | Title | Effort | Rationale |
|
||||
| -------- | ------- | ------------------------ | ------ | ----------------------------------------------------- |
|
||||
| 1 | ADR-018 | API Documentation | M | Improves developer experience, enables SDK generation |
|
||||
| 2 | ADR-015 | APM & Error Tracking | M | Production visibility, debugging |
|
||||
| 3 | ADR-024 | Feature Flags | M | Safer deployments, A/B testing |
|
||||
| 4 | ADR-026 | Client-Side Logging | M | Frontend debugging parity |
|
||||
| 5 | ADR-023 | Schema Migrations v2 | L | Database evolution support |
|
||||
| 6 | ADR-029 | Secret Rotation | L | Security improvement |
|
||||
| 7 | ADR-008 | API Versioning | L | Future API evolution |
|
||||
| 8 | ADR-030 | Circuit Breaker | L | Resilience improvement |
|
||||
| 9 | ADR-022 | Real-time Notifications | XL | Major feature enhancement |
|
||||
| 10 | ADR-011 | Authorization & RBAC | XL | Advanced permission system |
|
||||
| 11 | ADR-025 | i18n & l10n | XL | Multi-language support |
|
||||
| 12 | ADR-031 | Data Retention & Privacy | XL | Compliance requirements |
|
||||
|
||||
---
|
||||
|
||||
## Recent Implementation History
|
||||
|
||||
| Date | ADR | Change |
|
||||
| ---------- | ------- | ------------------------------------------------------------- |
|
||||
| 2026-01-09 | ADR-028 | Fully implemented - all routes, middleware, and tests updated |
|
||||
|
||||
---
|
||||
|
||||
## Notes
|
||||
|
||||
- **Effort estimates** are rough guidelines and may vary based on current codebase state
|
||||
- **Dependencies** between ADRs should be considered when planning implementation order
|
||||
- This document should be updated when ADRs are implemented or status changes
|
||||
@@ -11,9 +11,9 @@ This directory contains a log of the architectural decisions made for the Flyer
|
||||
|
||||
## 2. Data Management
|
||||
|
||||
**[ADR-009](./0009-caching-strategy-for-read-heavy-operations.md)**: Caching Strategy for Read-Heavy Operations (Partially Implemented)
|
||||
**[ADR-009](./0009-caching-strategy-for-read-heavy-operations.md)**: Caching Strategy for Read-Heavy Operations (Accepted)
|
||||
**[ADR-013](./0013-database-schema-migration-strategy.md)**: Database Schema Migration Strategy (Proposed)
|
||||
**[ADR-019](./0019-data-backup-and-recovery-strategy.md)**: Data Backup and Recovery Strategy (Proposed)
|
||||
**[ADR-019](./0019-data-backup-and-recovery-strategy.md)**: Data Backup and Recovery Strategy (Accepted)
|
||||
**[ADR-023](./0023-database-schema-migration-strategy.md)**: Database Schema Migration Strategy (Proposed)
|
||||
**[ADR-031](./0031-data-retention-and-privacy-compliance.md)**: Data Retention and Privacy Compliance (Proposed)
|
||||
|
||||
@@ -23,7 +23,7 @@ This directory contains a log of the architectural decisions made for the Flyer
|
||||
**[ADR-008](./0008-api-versioning-strategy.md)**: API Versioning Strategy (Proposed)
|
||||
**[ADR-018](./0018-api-documentation-strategy.md)**: API Documentation Strategy (Proposed)
|
||||
**[ADR-022](./0022-real-time-notification-system.md)**: Real-time Notification System (Proposed)
|
||||
**[ADR-028](./0028-api-response-standardization.md)**: API Response Standardization and Envelope Pattern (Proposed)
|
||||
**[ADR-028](./0028-api-response-standardization.md)**: API Response Standardization and Envelope Pattern (Implemented)
|
||||
|
||||
## 4. Security & Compliance
|
||||
|
||||
@@ -31,6 +31,8 @@ This directory contains a log of the architectural decisions made for the Flyer
|
||||
**[ADR-011](./0011-advanced-authorization-and-access-control-strategy.md)**: Advanced Authorization and Access Control Strategy (Proposed)
|
||||
**[ADR-016](./0016-api-security-hardening.md)**: API Security Hardening (Accepted)
|
||||
**[ADR-029](./0029-secret-rotation-and-key-management.md)**: Secret Rotation and Key Management Strategy (Proposed)
|
||||
**[ADR-032](./0032-rate-limiting-strategy.md)**: Rate Limiting Strategy (Accepted)
|
||||
**[ADR-033](./0033-file-upload-and-storage-strategy.md)**: File Upload and Storage Strategy (Accepted)
|
||||
|
||||
## 5. Observability & Monitoring
|
||||
|
||||
@@ -39,10 +41,12 @@ This directory contains a log of the architectural decisions made for the Flyer
|
||||
|
||||
## 6. Deployment & Operations
|
||||
|
||||
**[ADR-006](./0006-background-job-processing-and-task-queues.md)**: Background Job Processing and Task Queues (Partially Implemented)
|
||||
**[ADR-014](./0014-containerization-and-deployment-strategy.md)**: Containerization and Deployment Strategy (Proposed)
|
||||
**[ADR-017](./0017-ci-cd-and-branching-strategy.md)**: CI/CD and Branching Strategy (Proposed)
|
||||
**[ADR-006](./0006-background-job-processing-and-task-queues.md)**: Background Job Processing and Task Queues (Accepted)
|
||||
**[ADR-014](./0014-containerization-and-deployment-strategy.md)**: Containerization and Deployment Strategy (Partially Implemented)
|
||||
**[ADR-017](./0017-ci-cd-and-branching-strategy.md)**: CI/CD and Branching Strategy (Accepted)
|
||||
**[ADR-024](./0024-feature-flagging-strategy.md)**: Feature Flagging Strategy (Proposed)
|
||||
**[ADR-037](./0037-scheduled-jobs-and-cron-pattern.md)**: Scheduled Jobs and Cron Pattern (Accepted)
|
||||
**[ADR-038](./0038-graceful-shutdown-pattern.md)**: Graceful Shutdown Pattern (Accepted)
|
||||
|
||||
## 7. Frontend / User Interface
|
||||
|
||||
@@ -56,3 +60,10 @@ This directory contains a log of the architectural decisions made for the Flyer
|
||||
**[ADR-010](./0010-testing-strategy-and-standards.md)**: Testing Strategy and Standards (Accepted)
|
||||
**[ADR-021](./0021-code-formatting-and-linting-unification.md)**: Code Formatting and Linting Unification (Accepted)
|
||||
**[ADR-027](./0027-standardized-naming-convention-for-ai-and-database-types.md)**: Standardized Naming Convention for AI and Database Types (Accepted)
|
||||
|
||||
## 9. Architecture Patterns
|
||||
|
||||
**[ADR-034](./0034-repository-pattern-standards.md)**: Repository Pattern Standards (Accepted)
|
||||
**[ADR-035](./0035-service-layer-architecture.md)**: Service Layer Architecture (Accepted)
|
||||
**[ADR-036](./0036-event-bus-and-pub-sub-pattern.md)**: Event Bus and Pub/Sub Pattern (Accepted)
|
||||
**[ADR-039](./0039-dependency-injection-pattern.md)**: Dependency Injection Pattern (Accepted)
|
||||
|
||||
4
package-lock.json
generated
4
package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "flyer-crawler",
|
||||
"version": "0.9.71",
|
||||
"version": "0.9.75",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "flyer-crawler",
|
||||
"version": "0.9.71",
|
||||
"version": "0.9.75",
|
||||
"dependencies": {
|
||||
"@bull-board/api": "^6.14.2",
|
||||
"@bull-board/express": "^6.14.2",
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "flyer-crawler",
|
||||
"private": true,
|
||||
"version": "0.9.71",
|
||||
"version": "0.9.75",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "concurrently \"npm:start:dev\" \"vite\"",
|
||||
@@ -25,7 +25,7 @@
|
||||
"db:reset:dev": "NODE_ENV=development tsx src/db/seed.ts",
|
||||
"db:reset:test": "NODE_ENV=test tsx src/db/seed.ts",
|
||||
"worker:prod": "NODE_ENV=production tsx src/services/queueService.server.ts",
|
||||
"prepare": "husky"
|
||||
"prepare": "node -e \"try { require.resolve('husky') } catch (e) { process.exit(0) }\" && husky || true"
|
||||
},
|
||||
"dependencies": {
|
||||
"@bull-board/api": "^6.14.2",
|
||||
|
||||
150
scripts/docker-init.sh
Normal file
150
scripts/docker-init.sh
Normal file
@@ -0,0 +1,150 @@
|
||||
#!/bin/bash
|
||||
# scripts/docker-init.sh
|
||||
# ============================================================================
|
||||
# CONTAINER INITIALIZATION SCRIPT
|
||||
# ============================================================================
|
||||
# Purpose:
|
||||
# This script is run when the dev container is created for the first time.
|
||||
# It handles all first-run setup tasks to ensure a fully working environment.
|
||||
#
|
||||
# Tasks performed:
|
||||
# 1. Install npm dependencies (if not already done)
|
||||
# 2. Wait for PostgreSQL to be ready
|
||||
# 3. Wait for Redis to be ready
|
||||
# 4. Initialize the database schema
|
||||
# 5. Seed the database with development data
|
||||
#
|
||||
# Usage:
|
||||
# This script is called automatically by devcontainer.json's postCreateCommand.
|
||||
# It can also be run manually: ./scripts/docker-init.sh
|
||||
# ============================================================================
|
||||
|
||||
set -e # Exit immediately on error
|
||||
|
||||
# Colors for output
|
||||
RED='\033[0;31m'
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[1;33m'
|
||||
BLUE='\033[0;34m'
|
||||
NC='\033[0m' # No Color
|
||||
|
||||
log_info() {
|
||||
echo -e "${BLUE}[INFO]${NC} $1"
|
||||
}
|
||||
|
||||
log_success() {
|
||||
echo -e "${GREEN}[SUCCESS]${NC} $1"
|
||||
}
|
||||
|
||||
log_warning() {
|
||||
echo -e "${YELLOW}[WARNING]${NC} $1"
|
||||
}
|
||||
|
||||
log_error() {
|
||||
echo -e "${RED}[ERROR]${NC} $1"
|
||||
}
|
||||
|
||||
# ============================================================================
|
||||
# 1. Install npm dependencies
|
||||
# ============================================================================
|
||||
log_info "Step 1/5: Installing npm dependencies..."
|
||||
if [ -d "node_modules" ] && [ -f "node_modules/.package-lock.json" ]; then
|
||||
log_info "node_modules exists, running npm install to sync..."
|
||||
fi
|
||||
npm install
|
||||
log_success "npm dependencies installed."
|
||||
|
||||
# ============================================================================
|
||||
# 2. Wait for PostgreSQL to be ready
|
||||
# ============================================================================
|
||||
log_info "Step 2/5: Waiting for PostgreSQL to be ready..."
|
||||
|
||||
POSTGRES_HOST="${DB_HOST:-postgres}"
|
||||
POSTGRES_PORT="${DB_PORT:-5432}"
|
||||
POSTGRES_USER="${DB_USER:-postgres}"
|
||||
POSTGRES_DB="${DB_NAME:-flyer_crawler_dev}"
|
||||
|
||||
MAX_RETRIES=30
|
||||
RETRY_COUNT=0
|
||||
|
||||
until PGPASSWORD="${DB_PASSWORD:-postgres}" psql -h "$POSTGRES_HOST" -p "$POSTGRES_PORT" -U "$POSTGRES_USER" -d "postgres" -c '\q' 2>/dev/null; do
|
||||
RETRY_COUNT=$((RETRY_COUNT + 1))
|
||||
if [ $RETRY_COUNT -ge $MAX_RETRIES ]; then
|
||||
log_error "PostgreSQL did not become ready after $MAX_RETRIES attempts. Exiting."
|
||||
exit 1
|
||||
fi
|
||||
log_warning "PostgreSQL is not ready yet (attempt $RETRY_COUNT/$MAX_RETRIES). Waiting 2 seconds..."
|
||||
sleep 2
|
||||
done
|
||||
|
||||
log_success "PostgreSQL is ready."
|
||||
|
||||
# ============================================================================
|
||||
# 3. Wait for Redis to be ready
|
||||
# ============================================================================
|
||||
log_info "Step 3/5: Waiting for Redis to be ready..."
|
||||
|
||||
REDIS_HOST="${REDIS_HOST:-redis}"
|
||||
REDIS_PORT="${REDIS_PORT:-6379}"
|
||||
|
||||
MAX_RETRIES=30
|
||||
RETRY_COUNT=0
|
||||
|
||||
# Extract host from REDIS_URL if set
|
||||
if [ -n "$REDIS_URL" ]; then
|
||||
# Parse redis://host:port format
|
||||
REDIS_HOST=$(echo "$REDIS_URL" | sed -E 's|redis://([^:]+):?.*|\1|')
|
||||
fi
|
||||
|
||||
until redis-cli -h "$REDIS_HOST" -p "$REDIS_PORT" ping 2>/dev/null | grep -q PONG; do
|
||||
RETRY_COUNT=$((RETRY_COUNT + 1))
|
||||
if [ $RETRY_COUNT -ge $MAX_RETRIES ]; then
|
||||
log_error "Redis did not become ready after $MAX_RETRIES attempts. Exiting."
|
||||
exit 1
|
||||
fi
|
||||
log_warning "Redis is not ready yet (attempt $RETRY_COUNT/$MAX_RETRIES). Waiting 2 seconds..."
|
||||
sleep 2
|
||||
done
|
||||
|
||||
log_success "Redis is ready."
|
||||
|
||||
# ============================================================================
|
||||
# 4. Check if database needs initialization
|
||||
# ============================================================================
|
||||
log_info "Step 4/5: Checking database state..."
|
||||
|
||||
# Check if the users table exists (indicator of initialized schema)
|
||||
TABLE_EXISTS=$(PGPASSWORD="${DB_PASSWORD:-postgres}" psql -h "$POSTGRES_HOST" -p "$POSTGRES_PORT" -U "$POSTGRES_USER" -d "$POSTGRES_DB" -t -c "SELECT EXISTS (SELECT FROM information_schema.tables WHERE table_schema = 'public' AND table_name = 'users');" 2>/dev/null | tr -d '[:space:]' || echo "f")
|
||||
|
||||
if [ "$TABLE_EXISTS" = "t" ]; then
|
||||
log_info "Database schema already exists. Skipping initialization."
|
||||
log_info "To reset the database, run: npm run db:reset:dev"
|
||||
else
|
||||
log_info "Database schema not found. Initializing..."
|
||||
|
||||
# ============================================================================
|
||||
# 5. Initialize and seed the database
|
||||
# ============================================================================
|
||||
log_info "Step 5/5: Running database initialization and seed..."
|
||||
|
||||
# The db:reset:dev script handles both schema creation and seeding
|
||||
npm run db:reset:dev
|
||||
|
||||
log_success "Database initialized and seeded successfully."
|
||||
fi
|
||||
|
||||
# ============================================================================
|
||||
# Done!
|
||||
# ============================================================================
|
||||
echo ""
|
||||
log_success "=========================================="
|
||||
log_success "Container initialization complete!"
|
||||
log_success "=========================================="
|
||||
echo ""
|
||||
log_info "Default test accounts:"
|
||||
echo " Admin: admin@example.com / adminpass"
|
||||
echo " User: user@example.com / userpass"
|
||||
echo ""
|
||||
log_info "To start the development server, run:"
|
||||
echo " npm run dev:container"
|
||||
echo ""
|
||||
24
sql/00-init-extensions.sql
Normal file
24
sql/00-init-extensions.sql
Normal file
@@ -0,0 +1,24 @@
|
||||
-- sql/00-init-extensions.sql
|
||||
-- ============================================================================
|
||||
-- DATABASE EXTENSIONS INITIALIZATION
|
||||
-- ============================================================================
|
||||
-- This script is automatically run by PostgreSQL on database creation
|
||||
-- when placed in /docker-entrypoint-initdb.d/
|
||||
--
|
||||
-- It creates the required extensions before the schema is loaded.
|
||||
-- ============================================================================
|
||||
|
||||
-- Enable UUID generation
|
||||
CREATE EXTENSION IF NOT EXISTS "uuid-ossp";
|
||||
|
||||
-- Enable trigram fuzzy text search
|
||||
CREATE EXTENSION IF NOT EXISTS pg_trgm;
|
||||
|
||||
-- Enable PostGIS for geographic queries (usually pre-installed in postgis image)
|
||||
CREATE EXTENSION IF NOT EXISTS postgis;
|
||||
|
||||
-- Log completion
|
||||
DO $$
|
||||
BEGIN
|
||||
RAISE NOTICE '✅ All required PostgreSQL extensions have been created';
|
||||
END $$;
|
||||
@@ -114,10 +114,13 @@ describe('errorHandler Middleware', () => {
|
||||
const response = await supertest(app).get('/generic-error');
|
||||
expect(response.status).toBe(500);
|
||||
// In test/dev, we now expect a stack trace for 5xx errors.
|
||||
expect(response.body.message).toBe('A generic server error occurred.');
|
||||
expect(response.body.stack).toBeDefined();
|
||||
expect(response.body.errorId).toEqual(expect.any(String));
|
||||
console.log('[DEBUG] errorHandler.test.ts: Received 500 error response with ID:', response.body.errorId);
|
||||
expect(response.body.error.message).toBe('A generic server error occurred.');
|
||||
expect(response.body.error.details.stack).toBeDefined();
|
||||
expect(response.body.meta.requestId).toEqual(expect.any(String));
|
||||
console.log(
|
||||
'[DEBUG] errorHandler.test.ts: Received 500 error response with ID:',
|
||||
response.body.meta.requestId,
|
||||
);
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
err: expect.any(Error),
|
||||
@@ -136,7 +139,10 @@ describe('errorHandler Middleware', () => {
|
||||
const response = await supertest(app).get('/http-error-404');
|
||||
|
||||
expect(response.status).toBe(404);
|
||||
expect(response.body).toEqual({ message: 'Resource not found' });
|
||||
expect(response.body).toEqual({
|
||||
success: false,
|
||||
error: { code: 'NOT_FOUND', message: 'Resource not found' },
|
||||
});
|
||||
expect(mockLogger.error).not.toHaveBeenCalled(); // 4xx errors are not logged as server errors
|
||||
expect(mockLogger.warn).toHaveBeenCalledWith(
|
||||
{
|
||||
@@ -152,7 +158,10 @@ describe('errorHandler Middleware', () => {
|
||||
const response = await supertest(app).get('/not-found-error');
|
||||
|
||||
expect(response.status).toBe(404);
|
||||
expect(response.body).toEqual({ message: 'Specific resource missing' });
|
||||
expect(response.body).toEqual({
|
||||
success: false,
|
||||
error: { code: 'NOT_FOUND', message: 'Specific resource missing' },
|
||||
});
|
||||
expect(mockLogger.error).not.toHaveBeenCalled();
|
||||
expect(mockLogger.warn).toHaveBeenCalledWith(
|
||||
{
|
||||
@@ -168,7 +177,10 @@ describe('errorHandler Middleware', () => {
|
||||
const response = await supertest(app).get('/fk-error');
|
||||
|
||||
expect(response.status).toBe(400);
|
||||
expect(response.body).toEqual({ message: 'The referenced item does not exist.' });
|
||||
expect(response.body).toEqual({
|
||||
success: false,
|
||||
error: { code: 'BAD_REQUEST', message: 'The referenced item does not exist.' },
|
||||
});
|
||||
expect(mockLogger.error).not.toHaveBeenCalled();
|
||||
expect(mockLogger.warn).toHaveBeenCalledWith(
|
||||
{
|
||||
@@ -184,7 +196,10 @@ describe('errorHandler Middleware', () => {
|
||||
const response = await supertest(app).get('/unique-error');
|
||||
|
||||
expect(response.status).toBe(409); // 409 Conflict
|
||||
expect(response.body).toEqual({ message: 'This item already exists.' });
|
||||
expect(response.body).toEqual({
|
||||
success: false,
|
||||
error: { code: 'CONFLICT', message: 'This item already exists.' },
|
||||
});
|
||||
expect(mockLogger.error).not.toHaveBeenCalled();
|
||||
expect(mockLogger.warn).toHaveBeenCalledWith(
|
||||
{
|
||||
@@ -200,9 +215,9 @@ describe('errorHandler Middleware', () => {
|
||||
const response = await supertest(app).get('/validation-error');
|
||||
|
||||
expect(response.status).toBe(400);
|
||||
expect(response.body.message).toBe('Input validation failed');
|
||||
expect(response.body.errors).toBeDefined();
|
||||
expect(response.body.errors).toEqual([
|
||||
expect(response.body.error.message).toBe('Input validation failed');
|
||||
expect(response.body.error.details).toBeDefined();
|
||||
expect(response.body.error.details).toEqual([
|
||||
{ path: ['body', 'email'], message: 'Invalid email format' },
|
||||
]);
|
||||
expect(mockLogger.error).not.toHaveBeenCalled(); // 4xx errors are not logged as server errors
|
||||
@@ -222,9 +237,9 @@ describe('errorHandler Middleware', () => {
|
||||
|
||||
expect(response.status).toBe(500);
|
||||
// In test/dev, we now expect a stack trace for 5xx errors.
|
||||
expect(response.body.message).toBe('A database connection issue occurred.');
|
||||
expect(response.body.stack).toBeDefined();
|
||||
expect(response.body.errorId).toEqual(expect.any(String));
|
||||
expect(response.body.error.message).toBe('A database connection issue occurred.');
|
||||
expect(response.body.error.details.stack).toBeDefined();
|
||||
expect(response.body.meta.requestId).toEqual(expect.any(String));
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
err: expect.any(DatabaseError),
|
||||
@@ -243,7 +258,10 @@ describe('errorHandler Middleware', () => {
|
||||
const response = await supertest(app).get('/unauthorized-error-no-status');
|
||||
|
||||
expect(response.status).toBe(401);
|
||||
expect(response.body).toEqual({ message: 'Invalid Token' });
|
||||
expect(response.body).toEqual({
|
||||
success: false,
|
||||
error: { code: 'UNAUTHORIZED', message: 'Invalid Token' },
|
||||
});
|
||||
expect(mockLogger.warn).toHaveBeenCalledWith(
|
||||
{
|
||||
err: expect.any(Error),
|
||||
@@ -258,7 +276,10 @@ describe('errorHandler Middleware', () => {
|
||||
const response = await supertest(app).get('/unauthorized-error-with-status');
|
||||
|
||||
expect(response.status).toBe(401);
|
||||
expect(response.body).toEqual({ message: 'Invalid Token' });
|
||||
expect(response.body).toEqual({
|
||||
success: false,
|
||||
error: { code: 'UNAUTHORIZED', message: 'Invalid Token' },
|
||||
});
|
||||
expect(mockLogger.warn).toHaveBeenCalledWith(
|
||||
{
|
||||
err: expect.any(Error),
|
||||
@@ -304,17 +325,17 @@ describe('errorHandler Middleware', () => {
|
||||
const response = await supertest(app).get('/generic-error');
|
||||
|
||||
expect(response.status).toBe(500);
|
||||
expect(response.body.message).toMatch(
|
||||
expect(response.body.error.message).toMatch(
|
||||
/An unexpected server error occurred. Please reference error ID: \w+/,
|
||||
);
|
||||
expect(response.body.stack).toBeUndefined();
|
||||
expect(response.body.error.details?.stack).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should return the actual error message for client errors (4xx) in production', async () => {
|
||||
const response = await supertest(app).get('/http-error-404');
|
||||
|
||||
expect(response.status).toBe(404);
|
||||
expect(response.body.message).toBe('Resource not found');
|
||||
expect(response.body.error.message).toBe('Resource not found');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,4 +1,11 @@
|
||||
// src/middleware/errorHandler.ts
|
||||
// ============================================================================
|
||||
// CENTRALIZED ERROR HANDLING MIDDLEWARE
|
||||
// ============================================================================
|
||||
// This middleware standardizes all error responses per ADR-028.
|
||||
// It should be the LAST `app.use()` call to catch all errors.
|
||||
// ============================================================================
|
||||
|
||||
import { Request, Response, NextFunction } from 'express';
|
||||
import crypto from 'crypto';
|
||||
import { ZodError } from 'zod';
|
||||
@@ -9,12 +16,43 @@ import {
|
||||
ValidationError,
|
||||
} from '../services/db/errors.db';
|
||||
import { logger } from '../services/logger.server';
|
||||
import { ErrorCode, ApiErrorResponse } from '../types/api';
|
||||
|
||||
/**
|
||||
* Helper to send standardized error responses.
|
||||
*/
|
||||
function sendErrorResponse(
|
||||
res: Response,
|
||||
statusCode: number,
|
||||
code: string,
|
||||
message: string,
|
||||
details?: unknown,
|
||||
meta?: { requestId?: string; timestamp?: string },
|
||||
): Response<ApiErrorResponse> {
|
||||
const response: ApiErrorResponse = {
|
||||
success: false,
|
||||
error: {
|
||||
code,
|
||||
message,
|
||||
},
|
||||
};
|
||||
|
||||
if (details !== undefined) {
|
||||
response.error.details = details;
|
||||
}
|
||||
|
||||
if (meta) {
|
||||
response.meta = meta;
|
||||
}
|
||||
|
||||
return res.status(statusCode).json(response);
|
||||
}
|
||||
|
||||
/**
|
||||
* A centralized error handling middleware for the Express application.
|
||||
* This middleware should be the LAST `app.use()` call to catch all errors from previous routes and middleware.
|
||||
*
|
||||
* It standardizes error responses and ensures consistent logging.
|
||||
* It standardizes error responses per ADR-028 and ensures consistent logging per ADR-004.
|
||||
*/
|
||||
export const errorHandler = (err: Error, req: Request, res: Response, next: NextFunction) => {
|
||||
// If headers have already been sent, delegate to the default Express error handler.
|
||||
@@ -29,16 +67,19 @@ export const errorHandler = (err: Error, req: Request, res: Response, next: Next
|
||||
if (err instanceof ZodError) {
|
||||
const statusCode = 400;
|
||||
const message = 'The request data is invalid.';
|
||||
const errors = err.issues.map((e) => ({ path: e.path, message: e.message }));
|
||||
log.warn({ err, validationErrors: errors, statusCode }, `Client Error on ${req.method} ${req.path}: ${message}`);
|
||||
return res.status(statusCode).json({ message, errors });
|
||||
const details = err.issues.map((e) => ({ path: e.path, message: e.message }));
|
||||
log.warn(
|
||||
{ err, validationErrors: details, statusCode },
|
||||
`Client Error on ${req.method} ${req.path}: ${message}`,
|
||||
);
|
||||
return sendErrorResponse(res, statusCode, ErrorCode.VALIDATION_ERROR, message, details);
|
||||
}
|
||||
|
||||
// --- Handle Custom Operational Errors ---
|
||||
if (err instanceof NotFoundError) {
|
||||
const statusCode = 404;
|
||||
log.warn({ err, statusCode }, `Client Error on ${req.method} ${req.path}: ${err.message}`);
|
||||
return res.status(statusCode).json({ message: err.message });
|
||||
return sendErrorResponse(res, statusCode, ErrorCode.NOT_FOUND, err.message);
|
||||
}
|
||||
|
||||
if (err instanceof ValidationError) {
|
||||
@@ -47,30 +88,66 @@ export const errorHandler = (err: Error, req: Request, res: Response, next: Next
|
||||
{ err, validationErrors: err.validationErrors, statusCode },
|
||||
`Client Error on ${req.method} ${req.path}: ${err.message}`,
|
||||
);
|
||||
return res.status(statusCode).json({ message: err.message, errors: err.validationErrors });
|
||||
return sendErrorResponse(
|
||||
res,
|
||||
statusCode,
|
||||
ErrorCode.VALIDATION_ERROR,
|
||||
err.message,
|
||||
err.validationErrors,
|
||||
);
|
||||
}
|
||||
|
||||
if (err instanceof UniqueConstraintError) {
|
||||
const statusCode = 409;
|
||||
log.warn({ err, statusCode }, `Client Error on ${req.method} ${req.path}: ${err.message}`);
|
||||
return res.status(statusCode).json({ message: err.message }); // Use 409 Conflict for unique constraints
|
||||
return sendErrorResponse(res, statusCode, ErrorCode.CONFLICT, err.message);
|
||||
}
|
||||
|
||||
if (err instanceof ForeignKeyConstraintError) {
|
||||
const statusCode = 400;
|
||||
log.warn({ err, statusCode }, `Client Error on ${req.method} ${req.path}: ${err.message}`);
|
||||
return res.status(statusCode).json({ message: err.message });
|
||||
return sendErrorResponse(res, statusCode, ErrorCode.BAD_REQUEST, err.message);
|
||||
}
|
||||
|
||||
// --- Handle Generic Client Errors (e.g., from express-jwt, or manual status setting) ---
|
||||
let status = (err as any).status || (err as any).statusCode;
|
||||
const errWithStatus = err as Error & { status?: number; statusCode?: number };
|
||||
let status = errWithStatus.status || errWithStatus.statusCode;
|
||||
// Default UnauthorizedError to 401 if no status is present, a common case for express-jwt.
|
||||
if (err.name === 'UnauthorizedError' && !status) {
|
||||
status = 401;
|
||||
}
|
||||
if (status && status >= 400 && status < 500) {
|
||||
log.warn({ err, statusCode: status }, `Client Error on ${req.method} ${req.path}: ${err.message}`);
|
||||
return res.status(status).json({ message: err.message });
|
||||
log.warn(
|
||||
{ err, statusCode: status },
|
||||
`Client Error on ${req.method} ${req.path}: ${err.message}`,
|
||||
);
|
||||
|
||||
// Map status codes to error codes
|
||||
let errorCode: string;
|
||||
switch (status) {
|
||||
case 400:
|
||||
errorCode = ErrorCode.BAD_REQUEST;
|
||||
break;
|
||||
case 401:
|
||||
errorCode = ErrorCode.UNAUTHORIZED;
|
||||
break;
|
||||
case 403:
|
||||
errorCode = ErrorCode.FORBIDDEN;
|
||||
break;
|
||||
case 404:
|
||||
errorCode = ErrorCode.NOT_FOUND;
|
||||
break;
|
||||
case 409:
|
||||
errorCode = ErrorCode.CONFLICT;
|
||||
break;
|
||||
case 429:
|
||||
errorCode = ErrorCode.RATE_LIMITED;
|
||||
break;
|
||||
default:
|
||||
errorCode = ErrorCode.BAD_REQUEST;
|
||||
}
|
||||
|
||||
return sendErrorResponse(res, status, errorCode, err.message);
|
||||
}
|
||||
|
||||
// --- Handle All Other (500-level) Errors ---
|
||||
@@ -91,11 +168,23 @@ export const errorHandler = (err: Error, req: Request, res: Response, next: Next
|
||||
|
||||
// In production, send a generic message to avoid leaking implementation details.
|
||||
if (process.env.NODE_ENV === 'production') {
|
||||
return res.status(500).json({
|
||||
message: `An unexpected server error occurred. Please reference error ID: ${errorId}`,
|
||||
});
|
||||
return sendErrorResponse(
|
||||
res,
|
||||
500,
|
||||
ErrorCode.INTERNAL_ERROR,
|
||||
`An unexpected server error occurred. Please reference error ID: ${errorId}`,
|
||||
undefined,
|
||||
{ requestId: errorId },
|
||||
);
|
||||
}
|
||||
|
||||
// In non-production environments (dev, test, etc.), send more details for easier debugging.
|
||||
return res.status(500).json({ message: err.message, stack: err.stack, errorId });
|
||||
};
|
||||
return sendErrorResponse(
|
||||
res,
|
||||
500,
|
||||
ErrorCode.INTERNAL_ERROR,
|
||||
err.message,
|
||||
{ stack: err.stack },
|
||||
{ requestId: errorId },
|
||||
);
|
||||
};
|
||||
|
||||
@@ -170,7 +170,7 @@ describe('Admin Content Management Routes (/api/admin)', () => {
|
||||
vi.mocked(mockedDb.adminRepo.getSuggestedCorrections).mockResolvedValue(mockCorrections);
|
||||
const response = await supertest(app).get('/api/admin/corrections');
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body).toEqual(mockCorrections);
|
||||
expect(response.body.data).toEqual(mockCorrections);
|
||||
});
|
||||
|
||||
it('should return 500 if the database call fails', async () => {
|
||||
@@ -179,7 +179,7 @@ describe('Admin Content Management Routes (/api/admin)', () => {
|
||||
);
|
||||
const response = await supertest(app).get('/api/admin/corrections');
|
||||
expect(response.status).toBe(500);
|
||||
expect(response.body.message).toBe('DB Error');
|
||||
expect(response.body.error.message).toBe('DB Error');
|
||||
});
|
||||
|
||||
it('POST /corrections/:id/approve should approve a correction', async () => {
|
||||
@@ -187,7 +187,7 @@ describe('Admin Content Management Routes (/api/admin)', () => {
|
||||
vi.mocked(mockedDb.adminRepo.approveCorrection).mockResolvedValue(undefined);
|
||||
const response = await supertest(app).post(`/api/admin/corrections/${correctionId}/approve`);
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body).toEqual({ message: 'Correction approved successfully.' });
|
||||
expect(response.body.data).toEqual({ message: 'Correction approved successfully.' });
|
||||
expect(vi.mocked(mockedDb.adminRepo.approveCorrection)).toHaveBeenCalledWith(
|
||||
correctionId,
|
||||
expect.anything(),
|
||||
@@ -206,7 +206,7 @@ describe('Admin Content Management Routes (/api/admin)', () => {
|
||||
vi.mocked(mockedDb.adminRepo.rejectCorrection).mockResolvedValue(undefined);
|
||||
const response = await supertest(app).post(`/api/admin/corrections/${correctionId}/reject`);
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body).toEqual({ message: 'Correction rejected successfully.' });
|
||||
expect(response.body.data).toEqual({ message: 'Correction rejected successfully.' });
|
||||
});
|
||||
|
||||
it('POST /corrections/:id/reject should return 500 on DB error', async () => {
|
||||
@@ -230,7 +230,7 @@ describe('Admin Content Management Routes (/api/admin)', () => {
|
||||
.put(`/api/admin/corrections/${correctionId}`)
|
||||
.send(requestBody);
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body).toEqual(mockUpdatedCorrection);
|
||||
expect(response.body.data).toEqual(mockUpdatedCorrection);
|
||||
});
|
||||
|
||||
it('PUT /corrections/:id should return 400 for invalid data', async () => {
|
||||
@@ -248,7 +248,7 @@ describe('Admin Content Management Routes (/api/admin)', () => {
|
||||
.put('/api/admin/corrections/999')
|
||||
.send({ suggested_value: 'new value' });
|
||||
expect(response.status).toBe(404);
|
||||
expect(response.body.message).toBe('Correction with ID 999 not found');
|
||||
expect(response.body.error.message).toBe('Correction with ID 999 not found');
|
||||
});
|
||||
|
||||
it('PUT /corrections/:id should return 500 on a generic DB error', async () => {
|
||||
@@ -259,7 +259,7 @@ describe('Admin Content Management Routes (/api/admin)', () => {
|
||||
.put('/api/admin/corrections/101')
|
||||
.send({ suggested_value: 'new value' });
|
||||
expect(response.status).toBe(500);
|
||||
expect(response.body.message).toBe('Generic DB Error');
|
||||
expect(response.body.error.message).toBe('Generic DB Error');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -272,7 +272,7 @@ describe('Admin Content Management Routes (/api/admin)', () => {
|
||||
vi.mocked(mockedDb.adminRepo.getFlyersForReview).mockResolvedValue(mockFlyers);
|
||||
const response = await supertest(app).get('/api/admin/review/flyers');
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body).toEqual(mockFlyers);
|
||||
expect(response.body.data).toEqual(mockFlyers);
|
||||
expect(vi.mocked(mockedDb.adminRepo.getFlyersForReview)).toHaveBeenCalledWith(
|
||||
expect.anything(),
|
||||
);
|
||||
@@ -282,7 +282,7 @@ describe('Admin Content Management Routes (/api/admin)', () => {
|
||||
vi.mocked(mockedDb.adminRepo.getFlyersForReview).mockRejectedValue(new Error('DB Error'));
|
||||
const response = await supertest(app).get('/api/admin/review/flyers');
|
||||
expect(response.status).toBe(500);
|
||||
expect(response.body.message).toBe('DB Error');
|
||||
expect(response.body.error.message).toBe('DB Error');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -292,7 +292,7 @@ describe('Admin Content Management Routes (/api/admin)', () => {
|
||||
vi.mocked(mockedDb.adminRepo.getApplicationStats).mockRejectedValue(new Error('DB Error'));
|
||||
const response = await supertest(app).get('/api/admin/stats');
|
||||
expect(response.status).toBe(500);
|
||||
expect(response.body.message).toBe('DB Error');
|
||||
expect(response.body.error.message).toBe('DB Error');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -302,14 +302,14 @@ describe('Admin Content Management Routes (/api/admin)', () => {
|
||||
vi.mocked(mockedDb.flyerRepo.getAllBrands).mockResolvedValue(mockBrands);
|
||||
const response = await supertest(app).get('/api/admin/brands');
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body).toEqual(mockBrands);
|
||||
expect(response.body.data).toEqual(mockBrands);
|
||||
});
|
||||
|
||||
it('GET /brands should return 500 on DB error', async () => {
|
||||
vi.mocked(mockedDb.flyerRepo.getAllBrands).mockRejectedValue(new Error('DB Error'));
|
||||
const response = await supertest(app).get('/api/admin/brands');
|
||||
expect(response.status).toBe(500);
|
||||
expect(response.body.message).toBe('DB Error');
|
||||
expect(response.body.error.message).toBe('DB Error');
|
||||
});
|
||||
|
||||
it('POST /brands/:id/logo should upload a logo and update the brand', async () => {
|
||||
@@ -319,7 +319,7 @@ describe('Admin Content Management Routes (/api/admin)', () => {
|
||||
.post(`/api/admin/brands/${brandId}/logo`)
|
||||
.attach('logoImage', Buffer.from('dummy-logo-content'), 'test-logo.png');
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body.message).toBe('Brand logo updated successfully.');
|
||||
expect(response.body.data.message).toBe('Brand logo updated successfully.');
|
||||
expect(vi.mocked(mockedDb.adminRepo.updateBrandLogo)).toHaveBeenCalledWith(
|
||||
brandId,
|
||||
expect.stringContaining('/flyer-images/'),
|
||||
@@ -339,7 +339,7 @@ describe('Admin Content Management Routes (/api/admin)', () => {
|
||||
it('POST /brands/:id/logo should return 400 if no file is uploaded', async () => {
|
||||
const response = await supertest(app).post('/api/admin/brands/55/logo');
|
||||
expect(response.status).toBe(400);
|
||||
expect(response.body.message).toMatch(
|
||||
expect(response.body.error.message).toMatch(
|
||||
/Logo image file is required|The request data is invalid|Logo image file is missing./,
|
||||
);
|
||||
});
|
||||
@@ -367,7 +367,7 @@ describe('Admin Content Management Routes (/api/admin)', () => {
|
||||
.attach('logoImage', Buffer.from('this is not an image'), 'document.txt');
|
||||
expect(response.status).toBe(400);
|
||||
// This message comes from the handleMulterError middleware for the imageFileFilter
|
||||
expect(response.body.message).toBe('Only image files are allowed!');
|
||||
expect(response.body.error.message).toBe('Only image files are allowed!');
|
||||
});
|
||||
|
||||
it('POST /brands/:id/logo should return 400 for an invalid brand ID', async () => {
|
||||
@@ -414,7 +414,7 @@ describe('Admin Content Management Routes (/api/admin)', () => {
|
||||
.put(`/api/admin/recipes/${recipeId}/status`)
|
||||
.send(requestBody);
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body).toEqual(mockUpdatedRecipe);
|
||||
expect(response.body.data).toEqual(mockUpdatedRecipe);
|
||||
});
|
||||
|
||||
it('PUT /recipes/:id/status should return 400 for an invalid status value', async () => {
|
||||
@@ -448,7 +448,7 @@ describe('Admin Content Management Routes (/api/admin)', () => {
|
||||
.put(`/api/admin/comments/${commentId}/status`)
|
||||
.send(requestBody);
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body).toEqual(mockUpdatedComment);
|
||||
expect(response.body.data).toEqual(mockUpdatedComment);
|
||||
});
|
||||
|
||||
it('PUT /comments/:id/status should return 400 for an invalid status value', async () => {
|
||||
@@ -485,7 +485,7 @@ describe('Admin Content Management Routes (/api/admin)', () => {
|
||||
vi.mocked(mockedDb.adminRepo.getUnmatchedFlyerItems).mockResolvedValue(mockUnmatchedItems);
|
||||
const response = await supertest(app).get('/api/admin/unmatched-items');
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body).toEqual(mockUnmatchedItems);
|
||||
expect(response.body.data).toEqual(mockUnmatchedItems);
|
||||
});
|
||||
|
||||
it('GET /unmatched-items should return 500 on DB error', async () => {
|
||||
@@ -515,23 +515,21 @@ describe('Admin Content Management Routes (/api/admin)', () => {
|
||||
);
|
||||
const response = await supertest(app).delete(`/api/admin/flyers/${flyerId}`);
|
||||
expect(response.status).toBe(404);
|
||||
expect(response.body.message).toBe('Flyer with ID 999 not found.');
|
||||
expect(response.body.error.message).toBe('Flyer with ID 999 not found.');
|
||||
});
|
||||
|
||||
it('DELETE /flyers/:flyerId should return 500 on a generic DB error', async () => {
|
||||
const flyerId = 42;
|
||||
vi.mocked(mockedDb.flyerRepo.deleteFlyer).mockRejectedValue(
|
||||
new Error('Generic DB Error'),
|
||||
);
|
||||
vi.mocked(mockedDb.flyerRepo.deleteFlyer).mockRejectedValue(new Error('Generic DB Error'));
|
||||
const response = await supertest(app).delete(`/api/admin/flyers/${flyerId}`);
|
||||
expect(response.status).toBe(500);
|
||||
expect(response.body.message).toBe('Generic DB Error');
|
||||
expect(response.body.error.message).toBe('Generic DB Error');
|
||||
});
|
||||
|
||||
it('DELETE /flyers/:flyerId should return 400 for an invalid flyerId', async () => {
|
||||
const response = await supertest(app).delete('/api/admin/flyers/abc');
|
||||
expect(response.status).toBe(400);
|
||||
expect(response.body.errors[0].message).toMatch(/Expected number, received nan/i);
|
||||
expect(response.body.error.details[0].message).toMatch(/Expected number, received nan/i);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -108,7 +108,7 @@ describe('Admin Job Trigger Routes (/api/admin/trigger)', () => {
|
||||
// Use the instance method mock
|
||||
const response = await supertest(app).post('/api/admin/trigger/daily-deal-check');
|
||||
expect(response.status).toBe(202);
|
||||
expect(response.body.message).toContain('Daily deal check job has been triggered');
|
||||
expect(response.body.data.message).toContain('Daily deal check job has been triggered');
|
||||
expect(backgroundJobService.runDailyDealCheck).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
@@ -118,7 +118,7 @@ describe('Admin Job Trigger Routes (/api/admin/trigger)', () => {
|
||||
});
|
||||
const response = await supertest(app).post('/api/admin/trigger/daily-deal-check');
|
||||
expect(response.status).toBe(500);
|
||||
expect(response.body.message).toContain('Job runner failed');
|
||||
expect(response.body.error.message).toContain('Job runner failed');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -128,7 +128,7 @@ describe('Admin Job Trigger Routes (/api/admin/trigger)', () => {
|
||||
vi.mocked(analyticsQueue.add).mockResolvedValue(mockJob);
|
||||
const response = await supertest(app).post('/api/admin/trigger/failing-job');
|
||||
expect(response.status).toBe(202);
|
||||
expect(response.body.message).toContain('Failing test job has been enqueued');
|
||||
expect(response.body.data.message).toContain('Failing test job has been enqueued');
|
||||
expect(analyticsQueue.add).toHaveBeenCalledWith('generate-daily-report', {
|
||||
reportDate: 'FAIL',
|
||||
});
|
||||
@@ -138,23 +138,29 @@ describe('Admin Job Trigger Routes (/api/admin/trigger)', () => {
|
||||
vi.mocked(analyticsQueue.add).mockRejectedValue(new Error('Queue is down'));
|
||||
const response = await supertest(app).post('/api/admin/trigger/failing-job');
|
||||
expect(response.status).toBe(500);
|
||||
expect(response.body.message).toBe('Queue is down');
|
||||
expect(response.body.error.message).toBe('Queue is down');
|
||||
});
|
||||
});
|
||||
|
||||
describe('POST /trigger/analytics-report', () => {
|
||||
it('should trigger the analytics report job and return 202 Accepted', async () => {
|
||||
vi.mocked(backgroundJobService.triggerAnalyticsReport).mockResolvedValue('manual-report-job-123');
|
||||
vi.mocked(backgroundJobService.triggerAnalyticsReport).mockResolvedValue(
|
||||
'manual-report-job-123',
|
||||
);
|
||||
|
||||
const response = await supertest(app).post('/api/admin/trigger/analytics-report');
|
||||
|
||||
expect(response.status).toBe(202);
|
||||
expect(response.body.message).toContain('Analytics report generation job has been enqueued');
|
||||
expect(response.body.data.message).toContain(
|
||||
'Analytics report generation job has been enqueued',
|
||||
);
|
||||
expect(backgroundJobService.triggerAnalyticsReport).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('should return 500 if enqueuing the analytics job fails', async () => {
|
||||
vi.mocked(backgroundJobService.triggerAnalyticsReport).mockRejectedValue(new Error('Queue error'));
|
||||
vi.mocked(backgroundJobService.triggerAnalyticsReport).mockRejectedValue(
|
||||
new Error('Queue error'),
|
||||
);
|
||||
const response = await supertest(app).post('/api/admin/trigger/analytics-report');
|
||||
expect(response.status).toBe(500);
|
||||
});
|
||||
@@ -162,17 +168,21 @@ describe('Admin Job Trigger Routes (/api/admin/trigger)', () => {
|
||||
|
||||
describe('POST /trigger/weekly-analytics', () => {
|
||||
it('should trigger the weekly analytics job and return 202 Accepted', async () => {
|
||||
vi.mocked(backgroundJobService.triggerWeeklyAnalyticsReport).mockResolvedValue('manual-weekly-report-job-123');
|
||||
vi.mocked(backgroundJobService.triggerWeeklyAnalyticsReport).mockResolvedValue(
|
||||
'manual-weekly-report-job-123',
|
||||
);
|
||||
|
||||
const response = await supertest(app).post('/api/admin/trigger/weekly-analytics');
|
||||
|
||||
expect(response.status).toBe(202);
|
||||
expect(response.body.message).toContain('Successfully enqueued weekly analytics job');
|
||||
expect(response.body.data.message).toContain('Successfully enqueued weekly analytics job');
|
||||
expect(backgroundJobService.triggerWeeklyAnalyticsReport).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('should return 500 if enqueuing the weekly analytics job fails', async () => {
|
||||
vi.mocked(backgroundJobService.triggerWeeklyAnalyticsReport).mockRejectedValue(new Error('Queue error'));
|
||||
vi.mocked(backgroundJobService.triggerWeeklyAnalyticsReport).mockRejectedValue(
|
||||
new Error('Queue error'),
|
||||
);
|
||||
const response = await supertest(app).post('/api/admin/trigger/weekly-analytics');
|
||||
expect(response.status).toBe(500);
|
||||
});
|
||||
@@ -185,7 +195,7 @@ describe('Admin Job Trigger Routes (/api/admin/trigger)', () => {
|
||||
vi.mocked(cleanupQueue.add).mockResolvedValue(mockJob);
|
||||
const response = await supertest(app).post(`/api/admin/flyers/${flyerId}/cleanup`);
|
||||
expect(response.status).toBe(202);
|
||||
expect(response.body.message).toBe(
|
||||
expect(response.body.data.message).toBe(
|
||||
`File cleanup job for flyer ID ${flyerId} has been enqueued.`,
|
||||
);
|
||||
expect(cleanupQueue.add).toHaveBeenCalledWith('cleanup-flyer-files', { flyerId });
|
||||
@@ -196,13 +206,13 @@ describe('Admin Job Trigger Routes (/api/admin/trigger)', () => {
|
||||
vi.mocked(cleanupQueue.add).mockRejectedValue(new Error('Queue is down'));
|
||||
const response = await supertest(app).post(`/api/admin/flyers/${flyerId}/cleanup`);
|
||||
expect(response.status).toBe(500);
|
||||
expect(response.body.message).toBe('Queue is down');
|
||||
expect(response.body.error.message).toBe('Queue is down');
|
||||
});
|
||||
|
||||
it('should return 400 for an invalid flyerId', async () => {
|
||||
const response = await supertest(app).post('/api/admin/flyers/abc/cleanup');
|
||||
expect(response.status).toBe(400);
|
||||
expect(response.body.errors[0].message).toMatch(/Expected number, received nan/i);
|
||||
expect(response.body.error.details[0].message).toMatch(/Expected number, received nan/i);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -224,7 +234,9 @@ describe('Admin Job Trigger Routes (/api/admin/trigger)', () => {
|
||||
|
||||
// Assert
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body.message).toBe(`Job ${jobId} has been successfully marked for retry.`);
|
||||
expect(response.body.data.message).toBe(
|
||||
`Job ${jobId} has been successfully marked for retry.`,
|
||||
);
|
||||
expect(mockJob.retry).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
@@ -244,7 +256,9 @@ describe('Admin Job Trigger Routes (/api/admin/trigger)', () => {
|
||||
const response = await supertest(app).post(`/api/admin/jobs/${queueName}/${jobId}/retry`);
|
||||
|
||||
expect(response.status).toBe(404);
|
||||
expect(response.body.message).toBe(`Job with ID '${jobId}' not found in queue '${queueName}'.`);
|
||||
expect(response.body.error.message).toBe(
|
||||
`Job with ID '${jobId}' not found in queue '${queueName}'.`,
|
||||
);
|
||||
});
|
||||
|
||||
it('should return 404 if the job ID is not found in the queue', async () => {
|
||||
@@ -253,7 +267,7 @@ describe('Admin Job Trigger Routes (/api/admin/trigger)', () => {
|
||||
`/api/admin/jobs/${queueName}/not-found-job/retry`,
|
||||
);
|
||||
expect(response.status).toBe(404);
|
||||
expect(response.body.message).toContain('not found in queue');
|
||||
expect(response.body.error.message).toContain('not found in queue');
|
||||
});
|
||||
|
||||
it('should return 400 if the job is not in a failed state', async () => {
|
||||
@@ -267,7 +281,7 @@ describe('Admin Job Trigger Routes (/api/admin/trigger)', () => {
|
||||
const response = await supertest(app).post(`/api/admin/jobs/${queueName}/${jobId}/retry`);
|
||||
|
||||
expect(response.status).toBe(400);
|
||||
expect(response.body.message).toBe(
|
||||
expect(response.body.error.message).toBe(
|
||||
"Job is not in a 'failed' state. Current state: completed.",
|
||||
); // This is now handled by the errorHandler
|
||||
expect(mockJob.retry).not.toHaveBeenCalled();
|
||||
@@ -284,7 +298,7 @@ describe('Admin Job Trigger Routes (/api/admin/trigger)', () => {
|
||||
const response = await supertest(app).post(`/api/admin/jobs/${queueName}/${jobId}/retry`);
|
||||
|
||||
expect(response.status).toBe(500);
|
||||
expect(response.body.message).toContain('Cannot retry job');
|
||||
expect(response.body.error.message).toContain('Cannot retry job');
|
||||
});
|
||||
|
||||
it('should return 400 for an invalid queueName or jobId', async () => {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
// src/routes/admin.monitoring.routes.test.ts
|
||||
import { describe, it, expect, vi, beforeEach, type Mocked } from 'vitest';
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
import supertest from 'supertest';
|
||||
import type { Request, Response, NextFunction } from 'express';
|
||||
import { createMockUserProfile, createMockActivityLogItem } from '../tests/utils/mockFactories';
|
||||
@@ -133,7 +133,7 @@ describe('Admin Monitoring Routes (/api/admin)', () => {
|
||||
const response = await supertest(app).get('/api/admin/activity-log');
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body).toEqual(mockLogs);
|
||||
expect(response.body.data).toEqual(mockLogs);
|
||||
expect(adminRepo.getActivityLog).toHaveBeenCalledWith(50, 0, expect.anything());
|
||||
});
|
||||
|
||||
@@ -148,15 +148,15 @@ describe('Admin Monitoring Routes (/api/admin)', () => {
|
||||
it('should return 400 for invalid limit and offset query parameters', async () => {
|
||||
const response = await supertest(app).get('/api/admin/activity-log?limit=abc&offset=-1');
|
||||
expect(response.status).toBe(400);
|
||||
expect(response.body.errors).toBeDefined();
|
||||
expect(response.body.errors.length).toBe(2); // Both limit and offset are invalid
|
||||
expect(response.body.error.details).toBeDefined();
|
||||
expect(response.body.error.details.length).toBe(2); // Both limit and offset are invalid
|
||||
});
|
||||
|
||||
it('should return 500 if fetching activity log fails', async () => {
|
||||
vi.mocked(adminRepo.getActivityLog).mockRejectedValue(new Error('DB Error'));
|
||||
const response = await supertest(app).get('/api/admin/activity-log');
|
||||
expect(response.status).toBe(500);
|
||||
expect(response.body.message).toBe('DB Error');
|
||||
expect(response.body.error.message).toBe('DB Error');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -177,7 +177,7 @@ describe('Admin Monitoring Routes (/api/admin)', () => {
|
||||
|
||||
// Assert
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body).toEqual([
|
||||
expect(response.body.data).toEqual([
|
||||
{ name: 'flyer-processing', isRunning: true },
|
||||
{ name: 'email-sending', isRunning: true },
|
||||
{ name: 'analytics-reporting', isRunning: false },
|
||||
@@ -190,7 +190,7 @@ describe('Admin Monitoring Routes (/api/admin)', () => {
|
||||
vi.mocked(monitoringService.getWorkerStatuses).mockRejectedValue(new Error('Worker Error'));
|
||||
const response = await supertest(app).get('/api/admin/workers/status');
|
||||
expect(response.status).toBe(500);
|
||||
expect(response.body.message).toBe('Worker Error');
|
||||
expect(response.body.error.message).toBe('Worker Error');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -226,7 +226,7 @@ describe('Admin Monitoring Routes (/api/admin)', () => {
|
||||
|
||||
// Assert
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body).toEqual([
|
||||
expect(response.body.data).toEqual([
|
||||
{
|
||||
name: 'flyer-processing',
|
||||
counts: { waiting: 5, active: 1, completed: 100, failed: 2, delayed: 0, paused: 0 },
|
||||
@@ -251,13 +251,11 @@ describe('Admin Monitoring Routes (/api/admin)', () => {
|
||||
});
|
||||
|
||||
it('should return 500 if fetching queue counts fails', async () => {
|
||||
vi.mocked(monitoringService.getQueueStatuses).mockRejectedValue(
|
||||
new Error('Redis is down'),
|
||||
);
|
||||
vi.mocked(monitoringService.getQueueStatuses).mockRejectedValue(new Error('Redis is down'));
|
||||
|
||||
const response = await supertest(app).get('/api/admin/queues/status');
|
||||
expect(response.status).toBe(500);
|
||||
expect(response.body.message).toBe('Redis is down');
|
||||
expect(response.body.error.message).toBe('Redis is down');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
import { Router, NextFunction, Request, Response } from 'express';
|
||||
import passport from './passport.routes';
|
||||
import { isAdmin } from './passport.routes'; // Correctly imported
|
||||
import multer from 'multer';
|
||||
import { z } from 'zod';
|
||||
|
||||
import * as db from '../services/db/index.db';
|
||||
@@ -10,11 +9,8 @@ import type { UserProfile } from '../types';
|
||||
import { geocodingService } from '../services/geocodingService.server';
|
||||
import { cacheService } from '../services/cacheService.server';
|
||||
import { requireFileUpload } from '../middleware/fileUpload.middleware'; // This was a duplicate, fixed.
|
||||
import {
|
||||
createUploadMiddleware,
|
||||
handleMulterError,
|
||||
} from '../middleware/multer.middleware';
|
||||
import { NotFoundError, ValidationError } from '../services/db/errors.db';
|
||||
import { createUploadMiddleware, handleMulterError } from '../middleware/multer.middleware';
|
||||
import { ValidationError } from '../services/db/errors.db';
|
||||
import { validateRequest } from '../middleware/validation.middleware';
|
||||
|
||||
// --- Bull Board (Job Queue UI) Imports ---
|
||||
@@ -22,15 +18,14 @@ import { createBullBoard } from '@bull-board/api';
|
||||
import { BullMQAdapter } from '@bull-board/api/bullMQAdapter';
|
||||
import { ExpressAdapter } from '@bull-board/express';
|
||||
import { backgroundJobService } from '../services/backgroundJobService';
|
||||
import { flyerQueue, emailQueue, analyticsQueue, cleanupQueue, weeklyAnalyticsQueue } from '../services/queueService.server';
|
||||
import { getSimpleWeekAndYear } from '../utils/dateUtils';
|
||||
import {
|
||||
requiredString,
|
||||
numericIdParam,
|
||||
uuidParamSchema,
|
||||
optionalNumeric,
|
||||
optionalString,
|
||||
} from '../utils/zodUtils';
|
||||
flyerQueue,
|
||||
emailQueue,
|
||||
analyticsQueue,
|
||||
cleanupQueue,
|
||||
weeklyAnalyticsQueue,
|
||||
} from '../services/queueService.server';
|
||||
import { numericIdParam, uuidParamSchema, optionalNumeric } from '../utils/zodUtils';
|
||||
// Removed: import { logger } from '../services/logger.server';
|
||||
// All route handlers now use req.log (request-scoped logger) as per ADR-004
|
||||
import { monitoringService } from '../services/monitoringService.server';
|
||||
@@ -38,6 +33,7 @@ import { userService } from '../services/userService';
|
||||
import { cleanupUploadedFile } from '../utils/fileUtils';
|
||||
import { brandService } from '../services/brandService';
|
||||
import { adminTriggerLimiter, adminUploadLimiter } from '../config/rateLimiters';
|
||||
import { sendSuccess, sendNoContent } from '../utils/apiResponse';
|
||||
|
||||
const updateCorrectionSchema = numericIdParam('id').extend({
|
||||
body: z.object({
|
||||
@@ -126,7 +122,7 @@ router.use(passport.authenticate('jwt', { session: false }), isAdmin);
|
||||
router.get('/corrections', validateRequest(emptySchema), async (req, res, next: NextFunction) => {
|
||||
try {
|
||||
const corrections = await db.adminRepo.getSuggestedCorrections(req.log);
|
||||
res.json(corrections);
|
||||
sendSuccess(res, corrections);
|
||||
} catch (error) {
|
||||
req.log.error({ error }, 'Error fetching suggested corrections');
|
||||
next(error);
|
||||
@@ -137,8 +133,11 @@ router.get('/review/flyers', validateRequest(emptySchema), async (req, res, next
|
||||
try {
|
||||
req.log.debug('Fetching flyers for review via adminRepo');
|
||||
const flyers = await db.adminRepo.getFlyersForReview(req.log);
|
||||
req.log.info({ count: Array.isArray(flyers) ? flyers.length : 'unknown' }, 'Successfully fetched flyers for review');
|
||||
res.json(flyers);
|
||||
req.log.info(
|
||||
{ count: Array.isArray(flyers) ? flyers.length : 'unknown' },
|
||||
'Successfully fetched flyers for review',
|
||||
);
|
||||
sendSuccess(res, flyers);
|
||||
} catch (error) {
|
||||
req.log.error({ error }, 'Error fetching flyers for review');
|
||||
next(error);
|
||||
@@ -148,7 +147,7 @@ router.get('/review/flyers', validateRequest(emptySchema), async (req, res, next
|
||||
router.get('/brands', validateRequest(emptySchema), async (req, res, next: NextFunction) => {
|
||||
try {
|
||||
const brands = await db.flyerRepo.getAllBrands(req.log);
|
||||
res.json(brands);
|
||||
sendSuccess(res, brands);
|
||||
} catch (error) {
|
||||
req.log.error({ error }, 'Error fetching brands');
|
||||
next(error);
|
||||
@@ -158,7 +157,7 @@ router.get('/brands', validateRequest(emptySchema), async (req, res, next: NextF
|
||||
router.get('/stats', validateRequest(emptySchema), async (req, res, next: NextFunction) => {
|
||||
try {
|
||||
const stats = await db.adminRepo.getApplicationStats(req.log);
|
||||
res.json(stats);
|
||||
sendSuccess(res, stats);
|
||||
} catch (error) {
|
||||
req.log.error({ error }, 'Error fetching application stats');
|
||||
next(error);
|
||||
@@ -168,7 +167,7 @@ router.get('/stats', validateRequest(emptySchema), async (req, res, next: NextFu
|
||||
router.get('/stats/daily', validateRequest(emptySchema), async (req, res, next: NextFunction) => {
|
||||
try {
|
||||
const dailyStats = await db.adminRepo.getDailyStatsForLast30Days(req.log);
|
||||
res.json(dailyStats);
|
||||
sendSuccess(res, dailyStats);
|
||||
} catch (error) {
|
||||
req.log.error({ error }, 'Error fetching daily stats');
|
||||
next(error);
|
||||
@@ -183,7 +182,7 @@ router.post(
|
||||
const { params } = req as unknown as z.infer<ReturnType<typeof numericIdParam>>;
|
||||
try {
|
||||
await db.adminRepo.approveCorrection(params.id, req.log); // params.id is now safely typed as number
|
||||
res.status(200).json({ message: 'Correction approved successfully.' });
|
||||
sendSuccess(res, { message: 'Correction approved successfully.' });
|
||||
} catch (error) {
|
||||
req.log.error({ error }, 'Error approving correction');
|
||||
next(error);
|
||||
@@ -199,7 +198,7 @@ router.post(
|
||||
const { params } = req as unknown as z.infer<ReturnType<typeof numericIdParam>>;
|
||||
try {
|
||||
await db.adminRepo.rejectCorrection(params.id, req.log); // params.id is now safely typed as number
|
||||
res.status(200).json({ message: 'Correction rejected successfully.' });
|
||||
sendSuccess(res, { message: 'Correction rejected successfully.' });
|
||||
} catch (error) {
|
||||
req.log.error({ error }, 'Error rejecting correction');
|
||||
next(error);
|
||||
@@ -219,7 +218,7 @@ router.put(
|
||||
body.suggested_value,
|
||||
req.log,
|
||||
);
|
||||
res.status(200).json(updatedCorrection);
|
||||
sendSuccess(res, updatedCorrection);
|
||||
} catch (error) {
|
||||
req.log.error({ error }, 'Error updating suggested correction');
|
||||
next(error);
|
||||
@@ -235,7 +234,7 @@ router.put(
|
||||
const { params, body } = req as unknown as z.infer<typeof updateRecipeStatusSchema>;
|
||||
try {
|
||||
const updatedRecipe = await db.adminRepo.updateRecipeStatus(params.id, body.status, req.log); // This is still a standalone function in admin.db.ts
|
||||
res.status(200).json(updatedRecipe);
|
||||
sendSuccess(res, updatedRecipe);
|
||||
} catch (error) {
|
||||
req.log.error({ error }, 'Error updating recipe status');
|
||||
next(error); // Pass all errors to the central error handler
|
||||
@@ -260,8 +259,11 @@ router.post(
|
||||
|
||||
const logoUrl = await brandService.updateBrandLogo(params.id, req.file, req.log);
|
||||
|
||||
req.log.info({ brandId: params.id, logoUrl }, `Brand logo updated for brand ID: ${params.id}`);
|
||||
res.status(200).json({ message: 'Brand logo updated successfully.', logoUrl });
|
||||
req.log.info(
|
||||
{ brandId: params.id, logoUrl },
|
||||
`Brand logo updated for brand ID: ${params.id}`,
|
||||
);
|
||||
sendSuccess(res, { message: 'Brand logo updated successfully.', logoUrl });
|
||||
} catch (error) {
|
||||
// If an error occurs after the file has been uploaded (e.g., DB error),
|
||||
// we must clean up the orphaned file from the disk.
|
||||
@@ -272,15 +274,19 @@ router.post(
|
||||
},
|
||||
);
|
||||
|
||||
router.get('/unmatched-items', validateRequest(emptySchema), async (req, res, next: NextFunction) => {
|
||||
try {
|
||||
const items = await db.adminRepo.getUnmatchedFlyerItems(req.log);
|
||||
res.json(items);
|
||||
} catch (error) {
|
||||
req.log.error({ error }, 'Error fetching unmatched items');
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
router.get(
|
||||
'/unmatched-items',
|
||||
validateRequest(emptySchema),
|
||||
async (req, res, next: NextFunction) => {
|
||||
try {
|
||||
const items = await db.adminRepo.getUnmatchedFlyerItems(req.log);
|
||||
sendSuccess(res, items);
|
||||
} catch (error) {
|
||||
req.log.error({ error }, 'Error fetching unmatched items');
|
||||
next(error);
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
/**
|
||||
* DELETE /api/admin/recipes/:recipeId - Admin endpoint to delete any recipe.
|
||||
@@ -295,7 +301,7 @@ router.delete(
|
||||
try {
|
||||
// The isAdmin flag bypasses the ownership check in the repository method.
|
||||
await db.recipeRepo.deleteRecipe(params.recipeId, userProfile.user.user_id, true, req.log);
|
||||
res.status(204).send();
|
||||
sendNoContent(res);
|
||||
} catch (error: unknown) {
|
||||
req.log.error({ error }, 'Error deleting recipe');
|
||||
next(error);
|
||||
@@ -314,7 +320,7 @@ router.delete(
|
||||
const { params } = req as unknown as z.infer<ReturnType<typeof numericIdParam>>;
|
||||
try {
|
||||
await db.flyerRepo.deleteFlyer(params.flyerId, req.log);
|
||||
res.status(204).send();
|
||||
sendNoContent(res);
|
||||
} catch (error: unknown) {
|
||||
req.log.error({ error }, 'Error deleting flyer');
|
||||
next(error);
|
||||
@@ -334,7 +340,7 @@ router.put(
|
||||
body.status,
|
||||
req.log,
|
||||
); // This is still a standalone function in admin.db.ts
|
||||
res.status(200).json(updatedComment);
|
||||
sendSuccess(res, updatedComment);
|
||||
} catch (error: unknown) {
|
||||
req.log.error({ error }, 'Error updating comment status');
|
||||
next(error);
|
||||
@@ -345,7 +351,7 @@ router.put(
|
||||
router.get('/users', validateRequest(emptySchema), async (req, res, next: NextFunction) => {
|
||||
try {
|
||||
const users = await db.adminRepo.getAllUsers(req.log);
|
||||
res.json(users);
|
||||
sendSuccess(res, users);
|
||||
} catch (error) {
|
||||
req.log.error({ error }, 'Error fetching users');
|
||||
next(error);
|
||||
@@ -362,7 +368,7 @@ router.get(
|
||||
|
||||
try {
|
||||
const logs = await db.adminRepo.getActivityLog(limit!, offset!, req.log);
|
||||
res.json(logs);
|
||||
sendSuccess(res, logs);
|
||||
} catch (error) {
|
||||
req.log.error({ error }, 'Error fetching activity log');
|
||||
next(error);
|
||||
@@ -378,7 +384,7 @@ router.get(
|
||||
const { params } = req as unknown as z.infer<ReturnType<typeof uuidParamSchema>>;
|
||||
try {
|
||||
const user = await db.userRepo.findUserProfileById(params.id, req.log);
|
||||
res.json(user);
|
||||
sendSuccess(res, user);
|
||||
} catch (error) {
|
||||
req.log.error({ error }, 'Error fetching user profile');
|
||||
next(error);
|
||||
@@ -394,7 +400,7 @@ router.put(
|
||||
const { params, body } = req as unknown as z.infer<typeof updateUserRoleSchema>;
|
||||
try {
|
||||
const updatedUser = await db.adminRepo.updateUserRole(params.id, body.role, req.log);
|
||||
res.json(updatedUser);
|
||||
sendSuccess(res, updatedUser);
|
||||
} catch (error) {
|
||||
req.log.error({ error }, `Error updating user ${params.id}:`);
|
||||
next(error);
|
||||
@@ -411,7 +417,7 @@ router.delete(
|
||||
const { params } = req as unknown as z.infer<ReturnType<typeof uuidParamSchema>>;
|
||||
try {
|
||||
await userService.deleteUserAsAdmin(userProfile.user.user_id, params.id, req.log);
|
||||
res.status(204).send();
|
||||
sendNoContent(res);
|
||||
} catch (error) {
|
||||
req.log.error({ error }, 'Error deleting user');
|
||||
next(error);
|
||||
@@ -437,10 +443,14 @@ router.post(
|
||||
// We call the function but don't wait for it to finish (no `await`).
|
||||
// This is a "fire-and-forget" operation from the client's perspective.
|
||||
backgroundJobService.runDailyDealCheck();
|
||||
res.status(202).json({
|
||||
message:
|
||||
'Daily deal check job has been triggered successfully. It will run in the background.',
|
||||
});
|
||||
sendSuccess(
|
||||
res,
|
||||
{
|
||||
message:
|
||||
'Daily deal check job has been triggered successfully. It will run in the background.',
|
||||
},
|
||||
202,
|
||||
);
|
||||
} catch (error) {
|
||||
req.log.error({ error }, '[Admin] Failed to trigger daily deal check job.');
|
||||
next(error);
|
||||
@@ -464,9 +474,13 @@ router.post(
|
||||
|
||||
try {
|
||||
const jobId = await backgroundJobService.triggerAnalyticsReport();
|
||||
res.status(202).json({
|
||||
message: `Analytics report generation job has been enqueued successfully. Job ID: ${jobId}`,
|
||||
});
|
||||
sendSuccess(
|
||||
res,
|
||||
{
|
||||
message: `Analytics report generation job has been enqueued successfully. Job ID: ${jobId}`,
|
||||
},
|
||||
202,
|
||||
);
|
||||
} catch (error) {
|
||||
req.log.error({ error }, '[Admin] Failed to enqueue analytics report job.');
|
||||
next(error);
|
||||
@@ -493,9 +507,11 @@ router.post(
|
||||
// Enqueue the cleanup job. The worker will handle the file deletion.
|
||||
try {
|
||||
await cleanupQueue.add('cleanup-flyer-files', { flyerId: params.flyerId });
|
||||
res
|
||||
.status(202)
|
||||
.json({ message: `File cleanup job for flyer ID ${params.flyerId} has been enqueued.` });
|
||||
sendSuccess(
|
||||
res,
|
||||
{ message: `File cleanup job for flyer ID ${params.flyerId} has been enqueued.` },
|
||||
202,
|
||||
);
|
||||
} catch (error) {
|
||||
req.log.error({ error }, 'Error enqueuing cleanup job');
|
||||
next(error);
|
||||
@@ -512,22 +528,24 @@ router.post(
|
||||
adminTriggerLimiter,
|
||||
validateRequest(emptySchema),
|
||||
async (req: Request, res: Response, next: NextFunction) => {
|
||||
const userProfile = req.user as UserProfile;
|
||||
req.log.info(
|
||||
`[Admin] Manual trigger for a failing job received from user: ${userProfile.user.user_id}`,
|
||||
);
|
||||
const userProfile = req.user as UserProfile;
|
||||
req.log.info(
|
||||
`[Admin] Manual trigger for a failing job received from user: ${userProfile.user.user_id}`,
|
||||
);
|
||||
|
||||
try {
|
||||
// Add a job with a special 'forceFail' flag that the worker will recognize.
|
||||
const job = await analyticsQueue.add('generate-daily-report', { reportDate: 'FAIL' });
|
||||
res
|
||||
.status(202)
|
||||
.json({ message: `Failing test job has been enqueued successfully. Job ID: ${job.id}` });
|
||||
} catch (error) {
|
||||
req.log.error({ error }, 'Error enqueuing failing job');
|
||||
next(error);
|
||||
}
|
||||
}
|
||||
try {
|
||||
// Add a job with a special 'forceFail' flag that the worker will recognize.
|
||||
const job = await analyticsQueue.add('generate-daily-report', { reportDate: 'FAIL' });
|
||||
sendSuccess(
|
||||
res,
|
||||
{ message: `Failing test job has been enqueued successfully. Job ID: ${job.id}` },
|
||||
202,
|
||||
);
|
||||
} catch (error) {
|
||||
req.log.error({ error }, 'Error enqueuing failing job');
|
||||
next(error);
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
/**
|
||||
@@ -546,7 +564,7 @@ router.post(
|
||||
|
||||
try {
|
||||
const keysDeleted = await geocodingService.clearGeocodeCache(req.log);
|
||||
res.status(200).json({
|
||||
sendSuccess(res, {
|
||||
message: `Successfully cleared the geocode cache. ${keysDeleted} keys were removed.`,
|
||||
});
|
||||
} catch (error) {
|
||||
@@ -560,29 +578,37 @@ router.post(
|
||||
* GET /api/admin/workers/status - Get the current running status of all BullMQ workers.
|
||||
* This is useful for a system health dashboard to see if any workers have crashed.
|
||||
*/
|
||||
router.get('/workers/status', validateRequest(emptySchema), async (req: Request, res: Response, next: NextFunction) => {
|
||||
try {
|
||||
const workerStatuses = await monitoringService.getWorkerStatuses();
|
||||
res.json(workerStatuses);
|
||||
} catch (error) {
|
||||
req.log.error({ error }, 'Error fetching worker statuses');
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
router.get(
|
||||
'/workers/status',
|
||||
validateRequest(emptySchema),
|
||||
async (req: Request, res: Response, next: NextFunction) => {
|
||||
try {
|
||||
const workerStatuses = await monitoringService.getWorkerStatuses();
|
||||
sendSuccess(res, workerStatuses);
|
||||
} catch (error) {
|
||||
req.log.error({ error }, 'Error fetching worker statuses');
|
||||
next(error);
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
/**
|
||||
* GET /api/admin/queues/status - Get job counts for all BullMQ queues.
|
||||
* This is useful for monitoring the health and backlog of background jobs.
|
||||
*/
|
||||
router.get('/queues/status', validateRequest(emptySchema), async (req: Request, res: Response, next: NextFunction) => {
|
||||
try {
|
||||
const queueStatuses = await monitoringService.getQueueStatuses();
|
||||
res.json(queueStatuses);
|
||||
} catch (error) {
|
||||
req.log.error({ error }, 'Error fetching queue statuses');
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
router.get(
|
||||
'/queues/status',
|
||||
validateRequest(emptySchema),
|
||||
async (req: Request, res: Response, next: NextFunction) => {
|
||||
try {
|
||||
const queueStatuses = await monitoringService.getQueueStatuses();
|
||||
sendSuccess(res, queueStatuses);
|
||||
} catch (error) {
|
||||
req.log.error({ error }, 'Error fetching queue statuses');
|
||||
next(error);
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
/**
|
||||
* POST /api/admin/jobs/:queueName/:jobId/retry - Retries a specific failed job.
|
||||
@@ -598,12 +624,8 @@ router.post(
|
||||
} = req as unknown as z.infer<typeof jobRetrySchema>;
|
||||
|
||||
try {
|
||||
await monitoringService.retryFailedJob(
|
||||
queueName,
|
||||
jobId,
|
||||
userProfile.user.user_id,
|
||||
);
|
||||
res.status(200).json({ message: `Job ${jobId} has been successfully marked for retry.` });
|
||||
await monitoringService.retryFailedJob(queueName, jobId, userProfile.user.user_id);
|
||||
sendSuccess(res, { message: `Job ${jobId} has been successfully marked for retry.` });
|
||||
} catch (error) {
|
||||
req.log.error({ error }, 'Error retrying job');
|
||||
next(error);
|
||||
@@ -626,9 +648,7 @@ router.post(
|
||||
|
||||
try {
|
||||
const jobId = await backgroundJobService.triggerWeeklyAnalyticsReport();
|
||||
res
|
||||
.status(202)
|
||||
.json({ message: 'Successfully enqueued weekly analytics job.', jobId });
|
||||
sendSuccess(res, { message: 'Successfully enqueued weekly analytics job.', jobId }, 202);
|
||||
} catch (error) {
|
||||
req.log.error({ error }, 'Error enqueuing weekly analytics job');
|
||||
next(error);
|
||||
@@ -647,9 +667,7 @@ router.post(
|
||||
validateRequest(emptySchema),
|
||||
async (req: Request, res: Response, next: NextFunction) => {
|
||||
const userProfile = req.user as UserProfile;
|
||||
req.log.info(
|
||||
`[Admin] Manual cache clear received from user: ${userProfile.user.user_id}`,
|
||||
);
|
||||
req.log.info(`[Admin] Manual cache clear received from user: ${userProfile.user.user_id}`);
|
||||
|
||||
try {
|
||||
const [flyersDeleted, brandsDeleted, statsDeleted] = await Promise.all([
|
||||
@@ -659,7 +677,7 @@ router.post(
|
||||
]);
|
||||
|
||||
const totalDeleted = flyersDeleted + brandsDeleted + statsDeleted;
|
||||
res.status(200).json({
|
||||
sendSuccess(res, {
|
||||
message: `Successfully cleared the application cache. ${totalDeleted} keys were removed.`,
|
||||
details: {
|
||||
flyers: flyersDeleted,
|
||||
@@ -677,5 +695,4 @@ router.post(
|
||||
/* Catches errors from multer (e.g., file size, file filter) */
|
||||
router.use(handleMulterError);
|
||||
|
||||
|
||||
export default router;
|
||||
|
||||
@@ -90,14 +90,14 @@ describe('Admin Stats Routes (/api/admin/stats)', () => {
|
||||
vi.mocked(adminRepo.getApplicationStats).mockResolvedValue(mockStats);
|
||||
const response = await supertest(app).get('/api/admin/stats');
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body).toEqual(mockStats);
|
||||
expect(response.body.data).toEqual(mockStats);
|
||||
});
|
||||
|
||||
it('should return 500 if the database call fails', async () => {
|
||||
vi.mocked(adminRepo.getApplicationStats).mockRejectedValue(new Error('DB Error'));
|
||||
const response = await supertest(app).get('/api/admin/stats');
|
||||
expect(response.status).toBe(500);
|
||||
expect(response.body.message).toBe('DB Error');
|
||||
expect(response.body.error.message).toBe('DB Error');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -110,14 +110,14 @@ describe('Admin Stats Routes (/api/admin/stats)', () => {
|
||||
vi.mocked(adminRepo.getDailyStatsForLast30Days).mockResolvedValue(mockDailyStats);
|
||||
const response = await supertest(app).get('/api/admin/stats/daily');
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body).toEqual(mockDailyStats);
|
||||
expect(response.body.data).toEqual(mockDailyStats);
|
||||
});
|
||||
|
||||
it('should return 500 if the database call fails', async () => {
|
||||
vi.mocked(adminRepo.getDailyStatsForLast30Days).mockRejectedValue(new Error('DB Error'));
|
||||
const response = await supertest(app).get('/api/admin/stats/daily');
|
||||
expect(response.status).toBe(500);
|
||||
expect(response.body.message).toBe('DB Error');
|
||||
expect(response.body.error.message).toBe('DB Error');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -88,14 +88,14 @@ describe('Admin System Routes (/api/admin/system)', () => {
|
||||
vi.mocked(geocodingService.clearGeocodeCache).mockResolvedValue(10);
|
||||
const response = await supertest(app).post('/api/admin/system/clear-geocode-cache');
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body.message).toContain('10 keys were removed');
|
||||
expect(response.body.data.message).toContain('10 keys were removed');
|
||||
});
|
||||
|
||||
it('should return 500 if clearing the cache fails', async () => {
|
||||
vi.mocked(geocodingService.clearGeocodeCache).mockRejectedValue(new Error('Redis is down'));
|
||||
const response = await supertest(app).post('/api/admin/system/clear-geocode-cache');
|
||||
expect(response.status).toBe(500);
|
||||
expect(response.body.message).toContain('Redis is down');
|
||||
expect(response.body.error.message).toContain('Redis is down');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -104,7 +104,7 @@ describe('Admin User Management Routes (/api/admin/users)', () => {
|
||||
vi.mocked(adminRepo.getAllUsers).mockResolvedValue(mockUsers);
|
||||
const response = await supertest(app).get('/api/admin/users');
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body).toEqual(mockUsers);
|
||||
expect(response.body.data).toEqual(mockUsers);
|
||||
expect(adminRepo.getAllUsers).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
@@ -122,7 +122,7 @@ describe('Admin User Management Routes (/api/admin/users)', () => {
|
||||
vi.mocked(userRepo.findUserProfileById).mockResolvedValue(mockUser);
|
||||
const response = await supertest(app).get(`/api/admin/users/${userId}`);
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body).toEqual(mockUser);
|
||||
expect(response.body.data).toEqual(mockUser);
|
||||
expect(userRepo.findUserProfileById).toHaveBeenCalledWith(userId, expect.any(Object));
|
||||
});
|
||||
|
||||
@@ -133,7 +133,7 @@ describe('Admin User Management Routes (/api/admin/users)', () => {
|
||||
);
|
||||
const response = await supertest(app).get(`/api/admin/users/${missingId}`);
|
||||
expect(response.status).toBe(404);
|
||||
expect(response.body.message).toBe('User not found.');
|
||||
expect(response.body.error.message).toBe('User not found.');
|
||||
});
|
||||
|
||||
it('should return 500 on a generic database error', async () => {
|
||||
@@ -160,7 +160,7 @@ describe('Admin User Management Routes (/api/admin/users)', () => {
|
||||
.put(`/api/admin/users/${userId}`)
|
||||
.send({ role: 'admin' });
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body).toEqual(updatedUser);
|
||||
expect(response.body.data).toEqual(updatedUser);
|
||||
expect(adminRepo.updateUserRole).toHaveBeenCalledWith(userId, 'admin', expect.any(Object));
|
||||
});
|
||||
|
||||
@@ -173,7 +173,7 @@ describe('Admin User Management Routes (/api/admin/users)', () => {
|
||||
.put(`/api/admin/users/${missingId}`)
|
||||
.send({ role: 'user' });
|
||||
expect(response.status).toBe(404);
|
||||
expect(response.body.message).toBe(`User with ID ${missingId} not found.`);
|
||||
expect(response.body.error.message).toBe(`User with ID ${missingId} not found.`);
|
||||
});
|
||||
|
||||
it('should return 500 on a generic database error', async () => {
|
||||
@@ -183,7 +183,7 @@ describe('Admin User Management Routes (/api/admin/users)', () => {
|
||||
.put(`/api/admin/users/${userId}`)
|
||||
.send({ role: 'admin' });
|
||||
expect(response.status).toBe(500);
|
||||
expect(response.body.message).toBe('DB Error');
|
||||
expect(response.body.error.message).toBe('DB Error');
|
||||
});
|
||||
|
||||
it('should return 400 for an invalid role', async () => {
|
||||
@@ -201,7 +201,11 @@ describe('Admin User Management Routes (/api/admin/users)', () => {
|
||||
vi.mocked(userService.deleteUserAsAdmin).mockResolvedValue(undefined);
|
||||
const response = await supertest(app).delete(`/api/admin/users/${targetId}`);
|
||||
expect(response.status).toBe(204);
|
||||
expect(userService.deleteUserAsAdmin).toHaveBeenCalledWith(adminId, targetId, expect.any(Object));
|
||||
expect(userService.deleteUserAsAdmin).toHaveBeenCalledWith(
|
||||
adminId,
|
||||
targetId,
|
||||
expect.any(Object),
|
||||
);
|
||||
});
|
||||
|
||||
it('should prevent an admin from deleting their own account', async () => {
|
||||
@@ -209,9 +213,13 @@ describe('Admin User Management Routes (/api/admin/users)', () => {
|
||||
vi.mocked(userService.deleteUserAsAdmin).mockRejectedValue(validationError);
|
||||
const response = await supertest(app).delete(`/api/admin/users/${adminId}`);
|
||||
expect(response.status).toBe(400);
|
||||
expect(response.body.message).toMatch(/Admins cannot delete their own account/);
|
||||
expect(response.body.error.message).toMatch(/Admins cannot delete their own account/);
|
||||
expect(userRepo.deleteUserById).not.toHaveBeenCalled();
|
||||
expect(userService.deleteUserAsAdmin).toHaveBeenCalledWith(adminId, adminId, expect.any(Object));
|
||||
expect(userService.deleteUserAsAdmin).toHaveBeenCalledWith(
|
||||
adminId,
|
||||
adminId,
|
||||
expect.any(Object),
|
||||
);
|
||||
});
|
||||
|
||||
it('should return 500 on a generic database error', async () => {
|
||||
|
||||
@@ -151,7 +151,9 @@ describe('AI Routes (/api/ai)', () => {
|
||||
const validChecksum = 'a'.repeat(64);
|
||||
|
||||
it('should enqueue a job and return 202 on success', async () => {
|
||||
vi.mocked(aiService.aiService.enqueueFlyerProcessing).mockResolvedValue({ id: 'job-123' } as unknown as Job);
|
||||
vi.mocked(aiService.aiService.enqueueFlyerProcessing).mockResolvedValue({
|
||||
id: 'job-123',
|
||||
} as unknown as Job);
|
||||
|
||||
const response = await supertest(app)
|
||||
.post('/api/ai/upload-and-process')
|
||||
@@ -159,8 +161,8 @@ describe('AI Routes (/api/ai)', () => {
|
||||
.attach('flyerFile', imagePath);
|
||||
|
||||
expect(response.status).toBe(202);
|
||||
expect(response.body.message).toBe('Flyer accepted for processing.');
|
||||
expect(response.body.jobId).toBe('job-123');
|
||||
expect(response.body.data.message).toBe('Flyer accepted for processing.');
|
||||
expect(response.body.data.jobId).toBe('job-123');
|
||||
expect(aiService.aiService.enqueueFlyerProcessing).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
@@ -170,7 +172,7 @@ describe('AI Routes (/api/ai)', () => {
|
||||
.field('checksum', validChecksum);
|
||||
|
||||
expect(response.status).toBe(400);
|
||||
expect(response.body.message).toBe('A flyer file (PDF or image) is required.');
|
||||
expect(response.body.error.message).toBe('A flyer file (PDF or image) is required.');
|
||||
});
|
||||
|
||||
it('should return 400 if checksum is missing', async () => {
|
||||
@@ -180,11 +182,14 @@ describe('AI Routes (/api/ai)', () => {
|
||||
|
||||
expect(response.status).toBe(400);
|
||||
// Use regex to be resilient to validation message changes
|
||||
expect(response.body.errors[0].message).toMatch(/checksum is required|Required/i);
|
||||
expect(response.body.error.details[0].message).toMatch(/checksum is required|Required/i);
|
||||
});
|
||||
|
||||
it('should return 409 if flyer checksum already exists', async () => {
|
||||
const duplicateError = new aiService.DuplicateFlyerError('This flyer has already been processed.', 99);
|
||||
const duplicateError = new aiService.DuplicateFlyerError(
|
||||
'This flyer has already been processed.',
|
||||
99,
|
||||
);
|
||||
vi.mocked(aiService.aiService.enqueueFlyerProcessing).mockRejectedValue(duplicateError);
|
||||
|
||||
const response = await supertest(app)
|
||||
@@ -193,11 +198,13 @@ describe('AI Routes (/api/ai)', () => {
|
||||
.attach('flyerFile', imagePath);
|
||||
|
||||
expect(response.status).toBe(409);
|
||||
expect(response.body.message).toBe('This flyer has already been processed.');
|
||||
expect(response.body.error.message).toBe('This flyer has already been processed.');
|
||||
});
|
||||
|
||||
it('should return 500 if enqueuing the job fails', async () => {
|
||||
vi.mocked(aiService.aiService.enqueueFlyerProcessing).mockRejectedValueOnce(new Error('Redis connection failed'));
|
||||
vi.mocked(aiService.aiService.enqueueFlyerProcessing).mockRejectedValueOnce(
|
||||
new Error('Redis connection failed'),
|
||||
);
|
||||
|
||||
const response = await supertest(app)
|
||||
.post('/api/ai/upload-and-process')
|
||||
@@ -205,7 +212,7 @@ describe('AI Routes (/api/ai)', () => {
|
||||
.attach('flyerFile', imagePath);
|
||||
|
||||
expect(response.status).toBe(500);
|
||||
expect(response.body.message).toBe('Redis connection failed');
|
||||
expect(response.body.error.message).toBe('Redis connection failed');
|
||||
});
|
||||
|
||||
it('should pass user ID to the job when authenticated', async () => {
|
||||
@@ -219,8 +226,10 @@ describe('AI Routes (/api/ai)', () => {
|
||||
basePath: '/api/ai',
|
||||
authenticatedUser: mockUser,
|
||||
});
|
||||
|
||||
vi.mocked(aiService.aiService.enqueueFlyerProcessing).mockResolvedValue({ id: 'job-456' } as unknown as Job);
|
||||
|
||||
vi.mocked(aiService.aiService.enqueueFlyerProcessing).mockResolvedValue({
|
||||
id: 'job-456',
|
||||
} as unknown as Job);
|
||||
|
||||
// Act
|
||||
await supertest(authenticatedApp)
|
||||
@@ -255,8 +264,10 @@ describe('AI Routes (/api/ai)', () => {
|
||||
basePath: '/api/ai',
|
||||
authenticatedUser: mockUserWithAddress,
|
||||
});
|
||||
|
||||
vi.mocked(aiService.aiService.enqueueFlyerProcessing).mockResolvedValue({ id: 'job-789' } as unknown as Job);
|
||||
|
||||
vi.mocked(aiService.aiService.enqueueFlyerProcessing).mockResolvedValue({
|
||||
id: 'job-789',
|
||||
} as unknown as Job);
|
||||
|
||||
// Act
|
||||
await supertest(authenticatedApp)
|
||||
@@ -296,7 +307,7 @@ describe('AI Routes (/api/ai)', () => {
|
||||
const response = await supertest(app).get('/api/ai/jobs/non-existent-job/status');
|
||||
|
||||
expect(response.status).toBe(404);
|
||||
expect(response.body.message).toBe('Job not found.');
|
||||
expect(response.body.error.message).toBe('Job not found.');
|
||||
});
|
||||
|
||||
it('should return job status if job is found', async () => {
|
||||
@@ -311,7 +322,7 @@ describe('AI Routes (/api/ai)', () => {
|
||||
const response = await supertest(app).get('/api/ai/jobs/job-123/status');
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body.state).toBe('completed');
|
||||
expect(response.body.data.state).toBe('completed');
|
||||
});
|
||||
|
||||
// Removed flaky test 'should return 400 for an invalid job ID format'
|
||||
@@ -343,7 +354,7 @@ describe('AI Routes (/api/ai)', () => {
|
||||
|
||||
// Assert
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body).toEqual(mockFlyer);
|
||||
expect(response.body.data).toEqual(mockFlyer);
|
||||
expect(aiService.aiService.processLegacyFlyerUpload).toHaveBeenCalledWith(
|
||||
expect.any(Object), // req.file
|
||||
expect.any(Object), // req.body
|
||||
@@ -358,7 +369,7 @@ describe('AI Routes (/api/ai)', () => {
|
||||
.field('some_legacy_field', 'value');
|
||||
|
||||
expect(response.status).toBe(400);
|
||||
expect(response.body.message).toBe('No flyer file uploaded.');
|
||||
expect(response.body.error.message).toBe('No flyer file uploaded.');
|
||||
});
|
||||
|
||||
it('should return 409 and cleanup file if a duplicate flyer is detected', async () => {
|
||||
@@ -366,23 +377,29 @@ describe('AI Routes (/api/ai)', () => {
|
||||
vi.mocked(aiService.aiService.processLegacyFlyerUpload).mockRejectedValue(duplicateError);
|
||||
const unlinkSpy = vi.spyOn(fs.promises, 'unlink').mockResolvedValue(undefined);
|
||||
|
||||
const response = await supertest(authenticatedApp).post('/api/ai/upload-legacy').attach('flyerFile', imagePath);
|
||||
const response = await supertest(authenticatedApp)
|
||||
.post('/api/ai/upload-legacy')
|
||||
.attach('flyerFile', imagePath);
|
||||
|
||||
expect(response.status).toBe(409);
|
||||
expect(response.body.message).toBe('Duplicate legacy flyer.');
|
||||
expect(response.body.flyerId).toBe(101);
|
||||
expect(response.body.error.message).toBe('Duplicate legacy flyer.');
|
||||
expect(response.body.error.details.flyerId).toBe(101);
|
||||
expect(unlinkSpy).toHaveBeenCalledTimes(1);
|
||||
unlinkSpy.mockRestore();
|
||||
});
|
||||
|
||||
it('should return 500 and cleanup file on a generic service error', async () => {
|
||||
vi.mocked(aiService.aiService.processLegacyFlyerUpload).mockRejectedValue(new Error('Internal service failure'));
|
||||
vi.mocked(aiService.aiService.processLegacyFlyerUpload).mockRejectedValue(
|
||||
new Error('Internal service failure'),
|
||||
);
|
||||
const unlinkSpy = vi.spyOn(fs.promises, 'unlink').mockResolvedValue(undefined);
|
||||
|
||||
const response = await supertest(authenticatedApp).post('/api/ai/upload-legacy').attach('flyerFile', imagePath);
|
||||
const response = await supertest(authenticatedApp)
|
||||
.post('/api/ai/upload-legacy')
|
||||
.attach('flyerFile', imagePath);
|
||||
|
||||
expect(response.status).toBe(500);
|
||||
expect(response.body.message).toBe('Internal service failure');
|
||||
expect(response.body.error.message).toBe('Internal service failure');
|
||||
expect(unlinkSpy).toHaveBeenCalledTimes(1);
|
||||
unlinkSpy.mockRestore();
|
||||
});
|
||||
@@ -412,7 +429,7 @@ describe('AI Routes (/api/ai)', () => {
|
||||
|
||||
// Assert
|
||||
expect(response.status).toBe(201);
|
||||
expect(response.body.message).toBe('Flyer processed and saved successfully.');
|
||||
expect(response.body.data.message).toBe('Flyer processed and saved successfully.');
|
||||
expect(aiService.aiService.processLegacyFlyerUpload).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
@@ -425,7 +442,10 @@ describe('AI Routes (/api/ai)', () => {
|
||||
|
||||
it('should return 409 Conflict and delete the uploaded file if flyer checksum already exists', async () => {
|
||||
// Arrange
|
||||
const duplicateError = new aiService.DuplicateFlyerError('This flyer has already been processed.', 99);
|
||||
const duplicateError = new aiService.DuplicateFlyerError(
|
||||
'This flyer has already been processed.',
|
||||
99,
|
||||
);
|
||||
vi.mocked(aiService.aiService.processLegacyFlyerUpload).mockRejectedValue(duplicateError);
|
||||
const unlinkSpy = vi.spyOn(fs.promises, 'unlink').mockResolvedValue(undefined);
|
||||
|
||||
@@ -437,12 +457,14 @@ describe('AI Routes (/api/ai)', () => {
|
||||
|
||||
// Assert
|
||||
expect(response.status).toBe(409);
|
||||
expect(response.body.message).toBe('This flyer has already been processed.');
|
||||
expect(response.body.error.message).toBe('This flyer has already been processed.');
|
||||
expect(mockedDb.createFlyerAndItems).not.toHaveBeenCalled(); // Should not be called if service throws
|
||||
// Assert that the file was deleted
|
||||
expect(unlinkSpy).toHaveBeenCalledTimes(1);
|
||||
// The filename is predictable in the test environment because of the multer config in ai.routes.ts
|
||||
expect(unlinkSpy).toHaveBeenCalledWith(expect.stringContaining('flyerImage-test-flyer-image.jpg'));
|
||||
expect(unlinkSpy).toHaveBeenCalledWith(
|
||||
expect.stringContaining('flyerImage-test-flyer-image.jpg'),
|
||||
);
|
||||
});
|
||||
|
||||
it('should accept payload when extractedData.items is missing and save with empty items', async () => {
|
||||
@@ -453,7 +475,9 @@ describe('AI Routes (/api/ai)', () => {
|
||||
extractedData: { store_name: 'Partial Store' }, // no items key
|
||||
};
|
||||
|
||||
vi.mocked(aiService.aiService.processLegacyFlyerUpload).mockResolvedValue(createMockFlyer({ flyer_id: 2 }));
|
||||
vi.mocked(aiService.aiService.processLegacyFlyerUpload).mockResolvedValue(
|
||||
createMockFlyer({ flyer_id: 2 }),
|
||||
);
|
||||
|
||||
const response = await supertest(app)
|
||||
.post('/api/ai/flyers/process')
|
||||
@@ -471,7 +495,9 @@ describe('AI Routes (/api/ai)', () => {
|
||||
extractedData: { items: [] }, // store_name missing
|
||||
};
|
||||
|
||||
vi.mocked(aiService.aiService.processLegacyFlyerUpload).mockResolvedValue(createMockFlyer({ flyer_id: 3 }));
|
||||
vi.mocked(aiService.aiService.processLegacyFlyerUpload).mockResolvedValue(
|
||||
createMockFlyer({ flyer_id: 3 }),
|
||||
);
|
||||
|
||||
const response = await supertest(app)
|
||||
.post('/api/ai/flyers/process')
|
||||
@@ -519,7 +545,7 @@ describe('AI Routes (/api/ai)', () => {
|
||||
expect(aiService.aiService.processLegacyFlyerUpload).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('should handle payload where extractedData is null', async () => {
|
||||
it('should handle payload where extractedData is null', async () => {
|
||||
const payloadWithNullExtractedData = {
|
||||
checksum: 'null-extracted-data-checksum',
|
||||
originalFileName: 'flyer-null.jpg',
|
||||
@@ -590,10 +616,12 @@ describe('AI Routes (/api/ai)', () => {
|
||||
|
||||
it('should handle malformed JSON in data field and return 400', async () => {
|
||||
const malformedDataString = '{"checksum":'; // Invalid JSON
|
||||
|
||||
|
||||
// Since the service parses the data, we mock it to throw a ValidationError when parsing fails
|
||||
// or when it detects the malformed input.
|
||||
vi.mocked(aiService.aiService.processLegacyFlyerUpload).mockRejectedValue(new ValidationError([], 'Checksum is required.'));
|
||||
vi.mocked(aiService.aiService.processLegacyFlyerUpload).mockRejectedValue(
|
||||
new ValidationError([], 'Checksum is required.'),
|
||||
);
|
||||
|
||||
const response = await supertest(app)
|
||||
.post('/api/ai/flyers/process')
|
||||
@@ -603,8 +631,8 @@ describe('AI Routes (/api/ai)', () => {
|
||||
// The outer catch block should be hit, leading to empty parsed data.
|
||||
// The handler then fails the checksum validation.
|
||||
expect(response.status).toBe(400);
|
||||
expect(response.body.message).toBe('Checksum is required.');
|
||||
// Note: The logging expectation was removed because if the service throws a ValidationError,
|
||||
expect(response.body.error.message).toBe('Checksum is required.');
|
||||
// Note: The logging expectation was removed because if the service throws a ValidationError,
|
||||
// the route handler passes it to the global error handler, which might log differently or not as a "critical error during parsing" in the route itself.
|
||||
});
|
||||
|
||||
@@ -615,9 +643,11 @@ describe('AI Routes (/api/ai)', () => {
|
||||
};
|
||||
// Spy on fs.promises.unlink to verify file cleanup
|
||||
const unlinkSpy = vi.spyOn(fs.promises, 'unlink').mockResolvedValue(undefined);
|
||||
|
||||
|
||||
// Mock the service to throw a ValidationError because the checksum is missing
|
||||
vi.mocked(aiService.aiService.processLegacyFlyerUpload).mockRejectedValue(new ValidationError([], 'Checksum is required.'));
|
||||
vi.mocked(aiService.aiService.processLegacyFlyerUpload).mockRejectedValue(
|
||||
new ValidationError([], 'Checksum is required.'),
|
||||
);
|
||||
|
||||
const response = await supertest(app)
|
||||
.post('/api/ai/flyers/process')
|
||||
@@ -625,7 +655,7 @@ describe('AI Routes (/api/ai)', () => {
|
||||
.attach('flyerImage', imagePath);
|
||||
|
||||
expect(response.status).toBe(400);
|
||||
expect(response.body.message).toBe('Checksum is required.');
|
||||
expect(response.body.error.message).toBe('Checksum is required.');
|
||||
// Ensure the uploaded file is cleaned up
|
||||
expect(unlinkSpy).toHaveBeenCalledTimes(1);
|
||||
|
||||
@@ -643,7 +673,7 @@ describe('AI Routes (/api/ai)', () => {
|
||||
it('should return 200 with a stubbed response on success', async () => {
|
||||
const response = await supertest(app).post('/api/ai/check-flyer').attach('image', imagePath);
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body.is_flyer).toBe(true);
|
||||
expect(response.body.data.is_flyer).toBe(true);
|
||||
});
|
||||
|
||||
it('should return 500 on a generic error', async () => {
|
||||
@@ -674,7 +704,7 @@ describe('AI Routes (/api/ai)', () => {
|
||||
.attach('image', imagePath)
|
||||
.field('extractionType', 'store_name'); // Missing cropArea
|
||||
expect(response.status).toBe(400);
|
||||
expect(response.body.errors[0].message).toMatch(
|
||||
expect(response.body.error.details[0].message).toMatch(
|
||||
/cropArea must be a valid JSON string|Required/i,
|
||||
);
|
||||
});
|
||||
@@ -700,7 +730,7 @@ describe('AI Routes (/api/ai)', () => {
|
||||
.post('/api/ai/extract-address')
|
||||
.attach('image', imagePath);
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body.address).toBe('not identified');
|
||||
expect(response.body.data.address).toBe('not identified');
|
||||
});
|
||||
|
||||
it('should return 500 on a generic error', async () => {
|
||||
@@ -728,7 +758,7 @@ describe('AI Routes (/api/ai)', () => {
|
||||
.post('/api/ai/extract-logo')
|
||||
.attach('images', imagePath);
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body.store_logo_base_64).toBeNull();
|
||||
expect(response.body.data.store_logo_base_64).toBeNull();
|
||||
});
|
||||
|
||||
it('should return 500 on a generic error', async () => {
|
||||
@@ -750,7 +780,11 @@ describe('AI Routes (/api/ai)', () => {
|
||||
const mockUser = createMockUserProfile({
|
||||
user: { user_id: 'user-123', email: 'user-123@test.com' },
|
||||
});
|
||||
const authenticatedApp = createTestApp({ router: aiRouter, basePath: '/api/ai', authenticatedUser: mockUser });
|
||||
const authenticatedApp = createTestApp({
|
||||
router: aiRouter,
|
||||
basePath: '/api/ai',
|
||||
authenticatedUser: mockUser,
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
// Inject an authenticated user for this test block
|
||||
@@ -771,7 +805,7 @@ describe('AI Routes (/api/ai)', () => {
|
||||
.attach('image', imagePath);
|
||||
// Use the authenticatedApp instance for requests in this block
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body).toEqual(mockResult);
|
||||
expect(response.body.data).toEqual(mockResult);
|
||||
expect(aiService.aiService.extractTextFromImageArea).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
@@ -788,27 +822,20 @@ describe('AI Routes (/api/ai)', () => {
|
||||
|
||||
expect(response.status).toBe(500);
|
||||
// The error message might be wrapped or formatted differently
|
||||
expect(response.body.message).toMatch(/AI API is down/i);
|
||||
expect(response.body.error.message).toMatch(/AI API is down/i);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when user is authenticated', () => {
|
||||
const mockUserProfile = createMockUserProfile({
|
||||
user: { user_id: 'user-123', email: 'user-123@test.com' },
|
||||
});
|
||||
const authenticatedApp = createTestApp({ router: aiRouter, basePath: '/api/ai', authenticatedUser: mockUserProfile });
|
||||
// Note: authenticatedApp is available from the describe block above if needed
|
||||
|
||||
beforeEach(() => {
|
||||
// The authenticatedApp instance is already set up with mockUserProfile
|
||||
});
|
||||
|
||||
it('POST /quick-insights should return the stubbed response', async () => {
|
||||
const response = await supertest(app)
|
||||
.post('/api/ai/quick-insights')
|
||||
.send({ items: [{ name: 'test' }] });
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body.text).toContain('server-generated quick insight');
|
||||
expect(response.body.data.text).toContain('server-generated quick insight');
|
||||
});
|
||||
|
||||
it('POST /quick-insights should accept items with "item" property instead of "name"', async () => {
|
||||
@@ -835,20 +862,20 @@ describe('AI Routes (/api/ai)', () => {
|
||||
.post('/api/ai/deep-dive')
|
||||
.send({ items: [{ name: 'test' }] });
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body.text).toContain('server-generated deep dive');
|
||||
expect(response.body.data.text).toContain('server-generated deep dive');
|
||||
});
|
||||
|
||||
it('POST /generate-image should return 501 Not Implemented', async () => {
|
||||
const response = await supertest(app).post('/api/ai/generate-image').send({ prompt: 'test' });
|
||||
|
||||
expect(response.status).toBe(501);
|
||||
expect(response.body.message).toBe('Image generation is not yet implemented.');
|
||||
expect(response.body.error.message).toBe('Image generation is not yet implemented.');
|
||||
});
|
||||
|
||||
it('POST /generate-speech should return 501 Not Implemented', async () => {
|
||||
const response = await supertest(app).post('/api/ai/generate-speech').send({ text: 'test' });
|
||||
expect(response.status).toBe(501);
|
||||
expect(response.body.message).toBe('Speech generation is not yet implemented.');
|
||||
expect(response.body.error.message).toBe('Speech generation is not yet implemented.');
|
||||
});
|
||||
|
||||
it('POST /search-web should return the stubbed response', async () => {
|
||||
@@ -857,7 +884,7 @@ describe('AI Routes (/api/ai)', () => {
|
||||
.send({ query: 'test query' });
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body.text).toContain('The web says this is good');
|
||||
expect(response.body.data.text).toContain('The web says this is good');
|
||||
});
|
||||
|
||||
it('POST /compare-prices should return the stubbed response', async () => {
|
||||
@@ -866,7 +893,7 @@ describe('AI Routes (/api/ai)', () => {
|
||||
.send({ items: [{ name: 'Milk' }] });
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body.text).toContain('server-generated price comparison');
|
||||
expect(response.body.data.text).toContain('server-generated price comparison');
|
||||
});
|
||||
|
||||
it('POST /plan-trip should return result on success', async () => {
|
||||
@@ -882,7 +909,7 @@ describe('AI Routes (/api/ai)', () => {
|
||||
});
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body).toEqual(mockResult);
|
||||
expect(response.body.data).toEqual(mockResult);
|
||||
});
|
||||
|
||||
it('POST /plan-trip should return 500 if the AI service fails', async () => {
|
||||
@@ -899,7 +926,7 @@ describe('AI Routes (/api/ai)', () => {
|
||||
});
|
||||
|
||||
expect(response.status).toBe(500);
|
||||
expect(response.body.message).toBe('Maps API key invalid');
|
||||
expect(response.body.error.message).toBe('Maps API key invalid');
|
||||
});
|
||||
|
||||
it('POST /deep-dive should return 500 on a generic error', async () => {
|
||||
@@ -910,7 +937,7 @@ describe('AI Routes (/api/ai)', () => {
|
||||
.post('/api/ai/deep-dive')
|
||||
.send({ items: [{ name: 'test' }] });
|
||||
expect(response.status).toBe(500);
|
||||
expect(response.body.message).toBe('Deep dive logging failed');
|
||||
expect(response.body.error.message).toBe('Deep dive logging failed');
|
||||
});
|
||||
|
||||
it('POST /search-web should return 500 on a generic error', async () => {
|
||||
@@ -921,7 +948,7 @@ describe('AI Routes (/api/ai)', () => {
|
||||
.post('/api/ai/search-web')
|
||||
.send({ query: 'test query' });
|
||||
expect(response.status).toBe(500);
|
||||
expect(response.body.message).toBe('Search web logging failed');
|
||||
expect(response.body.error.message).toBe('Search web logging failed');
|
||||
});
|
||||
|
||||
it('POST /compare-prices should return 500 on a generic error', async () => {
|
||||
@@ -932,7 +959,7 @@ describe('AI Routes (/api/ai)', () => {
|
||||
.post('/api/ai/compare-prices')
|
||||
.send({ items: [{ name: 'Milk' }] });
|
||||
expect(response.status).toBe(500);
|
||||
expect(response.body.message).toBe('Compare prices logging failed');
|
||||
expect(response.body.error.message).toBe('Compare prices logging failed');
|
||||
});
|
||||
|
||||
it('POST /quick-insights should return 400 if items are missing', async () => {
|
||||
|
||||
@@ -9,10 +9,7 @@ import { optionalAuth } from './passport.routes';
|
||||
// All route handlers now use req.log (request-scoped logger) as per ADR-004
|
||||
import { aiService, DuplicateFlyerError } from '../services/aiService.server';
|
||||
// All route handlers now use req.log (request-scoped logger) as per ADR-004
|
||||
import {
|
||||
createUploadMiddleware,
|
||||
handleMulterError,
|
||||
} from '../middleware/multer.middleware';
|
||||
import { createUploadMiddleware, handleMulterError } from '../middleware/multer.middleware';
|
||||
import { logger } from '../services/logger.server'; // Needed for module-level logging (e.g., Zod schema transforms)
|
||||
// All route handlers now use req.log (request-scoped logger) as per ADR-004
|
||||
import { UserProfile } from '../types'; // This was a duplicate, fixed.
|
||||
@@ -26,6 +23,7 @@ import { cleanupUploadedFile, cleanupUploadedFiles } from '../utils/fileUtils';
|
||||
import { monitoringService } from '../services/monitoringService.server';
|
||||
// All route handlers now use req.log (request-scoped logger) as per ADR-004
|
||||
import { aiUploadLimiter, aiGenerationLimiter } from '../config/rateLimiters';
|
||||
import { sendSuccess, sendError, ErrorCode } from '../utils/apiResponse';
|
||||
|
||||
const router = Router();
|
||||
|
||||
@@ -35,7 +33,8 @@ const uploadAndProcessSchema = z.object({
|
||||
body: z.object({
|
||||
// Stricter validation for SHA-256 checksum. It must be a 64-character hexadecimal string.
|
||||
checksum: requiredString('File checksum is required.').pipe(
|
||||
z.string()
|
||||
z
|
||||
.string()
|
||||
.length(64, 'Checksum must be 64 characters long.')
|
||||
.regex(/^[a-f0-9]+$/, 'Checksum must be a valid hexadecimal string.'),
|
||||
),
|
||||
@@ -96,8 +95,14 @@ const flyerItemForAnalysisSchema = z
|
||||
// Sanitize item and name by trimming whitespace.
|
||||
// The transform ensures that null/undefined values are preserved
|
||||
// while trimming any actual string values.
|
||||
item: z.string().nullish().transform(val => (val ? val.trim() : val)),
|
||||
name: z.string().nullish().transform(val => (val ? val.trim() : val)),
|
||||
item: z
|
||||
.string()
|
||||
.nullish()
|
||||
.transform((val) => (val ? val.trim() : val)),
|
||||
name: z
|
||||
.string()
|
||||
.nullish()
|
||||
.transform((val) => (val ? val.trim() : val)),
|
||||
})
|
||||
// Using .passthrough() allows extra properties on the item object.
|
||||
// If the intent is to strictly enforce only 'item' and 'name' (and other known properties),
|
||||
@@ -190,7 +195,12 @@ router.post(
|
||||
const { body } = uploadAndProcessSchema.parse({ body: req.body });
|
||||
|
||||
if (!req.file) {
|
||||
return res.status(400).json({ message: 'A flyer file (PDF or image) is required.' });
|
||||
return sendError(
|
||||
res,
|
||||
ErrorCode.BAD_REQUEST,
|
||||
'A flyer file (PDF or image) is required.',
|
||||
400,
|
||||
);
|
||||
}
|
||||
|
||||
req.log.debug(
|
||||
@@ -204,7 +214,7 @@ router.post(
|
||||
if (process.env.NODE_ENV === 'test' && !req.headers['authorization']) {
|
||||
userProfile = undefined;
|
||||
}
|
||||
|
||||
|
||||
const job = await aiService.enqueueFlyerProcessing(
|
||||
req.file,
|
||||
body.checksum,
|
||||
@@ -215,15 +225,19 @@ router.post(
|
||||
);
|
||||
|
||||
// Respond immediately to the client with 202 Accepted
|
||||
res.status(202).json({
|
||||
message: 'Flyer accepted for processing.',
|
||||
jobId: job.id,
|
||||
});
|
||||
sendSuccess(
|
||||
res,
|
||||
{
|
||||
message: 'Flyer accepted for processing.',
|
||||
jobId: job.id,
|
||||
},
|
||||
202,
|
||||
);
|
||||
} catch (error) {
|
||||
await cleanupUploadedFile(req.file);
|
||||
if (error instanceof DuplicateFlyerError) {
|
||||
req.log.warn(`Duplicate flyer upload attempt blocked for checksum: ${req.body?.checksum}`);
|
||||
return res.status(409).json({ message: error.message, flyerId: error.flyerId });
|
||||
return sendError(res, ErrorCode.CONFLICT, error.message, 409, { flyerId: error.flyerId });
|
||||
}
|
||||
next(error);
|
||||
}
|
||||
@@ -246,16 +260,21 @@ router.post(
|
||||
async (req: Request, res: Response, next: NextFunction) => {
|
||||
try {
|
||||
if (!req.file) {
|
||||
return res.status(400).json({ message: 'No flyer file uploaded.' });
|
||||
return sendError(res, ErrorCode.BAD_REQUEST, 'No flyer file uploaded.', 400);
|
||||
}
|
||||
const userProfile = req.user as UserProfile;
|
||||
const newFlyer = await aiService.processLegacyFlyerUpload(req.file, req.body, userProfile, req.log);
|
||||
res.status(200).json(newFlyer);
|
||||
const newFlyer = await aiService.processLegacyFlyerUpload(
|
||||
req.file,
|
||||
req.body,
|
||||
userProfile,
|
||||
req.log,
|
||||
);
|
||||
sendSuccess(res, newFlyer);
|
||||
} catch (error) {
|
||||
await cleanupUploadedFile(req.file);
|
||||
if (error instanceof DuplicateFlyerError) {
|
||||
req.log.warn(`Duplicate legacy flyer upload attempt blocked.`);
|
||||
return res.status(409).json({ message: error.message, flyerId: error.flyerId });
|
||||
return sendError(res, ErrorCode.CONFLICT, error.message, 409, { flyerId: error.flyerId });
|
||||
}
|
||||
next(error);
|
||||
}
|
||||
@@ -277,7 +296,7 @@ router.get(
|
||||
try {
|
||||
const jobStatus = await monitoringService.getFlyerJobStatus(jobId); // This was a duplicate, fixed.
|
||||
req.log.debug(`[API /ai/jobs] Status check for job ${jobId}: ${jobStatus.state}`);
|
||||
res.json(jobStatus);
|
||||
sendSuccess(res, jobStatus);
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
@@ -300,7 +319,7 @@ router.post(
|
||||
async (req, res, next: NextFunction) => {
|
||||
try {
|
||||
if (!req.file) {
|
||||
return res.status(400).json({ message: 'Flyer image file is required.' });
|
||||
return sendError(res, ErrorCode.BAD_REQUEST, 'Flyer image file is required.', 400);
|
||||
}
|
||||
|
||||
const userProfile = req.user as UserProfile | undefined;
|
||||
@@ -312,12 +331,16 @@ router.post(
|
||||
req.log,
|
||||
);
|
||||
|
||||
res.status(201).json({ message: 'Flyer processed and saved successfully.', flyer: newFlyer });
|
||||
sendSuccess(
|
||||
res,
|
||||
{ message: 'Flyer processed and saved successfully.', flyer: newFlyer },
|
||||
201,
|
||||
);
|
||||
} catch (error) {
|
||||
await cleanupUploadedFile(req.file);
|
||||
if (error instanceof DuplicateFlyerError) {
|
||||
req.log.warn(`Duplicate flyer upload attempt blocked.`);
|
||||
return res.status(409).json({ message: error.message, flyerId: error.flyerId });
|
||||
return sendError(res, ErrorCode.CONFLICT, error.message, 409, { flyerId: error.flyerId });
|
||||
}
|
||||
next(error);
|
||||
}
|
||||
@@ -336,10 +359,10 @@ router.post(
|
||||
async (req, res, next: NextFunction) => {
|
||||
try {
|
||||
if (!req.file) {
|
||||
return res.status(400).json({ message: 'Image file is required.' });
|
||||
return sendError(res, ErrorCode.BAD_REQUEST, 'Image file is required.', 400);
|
||||
}
|
||||
req.log.info(`Server-side flyer check for file: ${req.file.originalname}`);
|
||||
res.status(200).json({ is_flyer: true }); // Stubbed response
|
||||
sendSuccess(res, { is_flyer: true }); // Stubbed response
|
||||
} catch (error) {
|
||||
next(error);
|
||||
} finally {
|
||||
@@ -356,10 +379,10 @@ router.post(
|
||||
async (req, res, next: NextFunction) => {
|
||||
try {
|
||||
if (!req.file) {
|
||||
return res.status(400).json({ message: 'Image file is required.' });
|
||||
return sendError(res, ErrorCode.BAD_REQUEST, 'Image file is required.', 400);
|
||||
}
|
||||
req.log.info(`Server-side address extraction for file: ${req.file.originalname}`);
|
||||
res.status(200).json({ address: 'not identified' }); // Updated stubbed response
|
||||
sendSuccess(res, { address: 'not identified' }); // Updated stubbed response
|
||||
} catch (error) {
|
||||
next(error);
|
||||
} finally {
|
||||
@@ -376,10 +399,10 @@ router.post(
|
||||
async (req, res, next: NextFunction) => {
|
||||
try {
|
||||
if (!req.files || !Array.isArray(req.files) || req.files.length === 0) {
|
||||
return res.status(400).json({ message: 'Image files are required.' });
|
||||
return sendError(res, ErrorCode.BAD_REQUEST, 'Image files are required.', 400);
|
||||
}
|
||||
req.log.info(`Server-side logo extraction for ${req.files.length} image(s).`);
|
||||
res.status(200).json({ store_logo_base_64: null }); // Stubbed response
|
||||
sendSuccess(res, { store_logo_base_64: null }); // Stubbed response
|
||||
} catch (error) {
|
||||
next(error);
|
||||
} finally {
|
||||
@@ -396,9 +419,7 @@ router.post(
|
||||
async (req, res, next: NextFunction) => {
|
||||
try {
|
||||
req.log.info(`Server-side quick insights requested.`);
|
||||
res
|
||||
.status(200)
|
||||
.json({ text: 'This is a server-generated quick insight: buy the cheap stuff!' }); // Stubbed response
|
||||
sendSuccess(res, { text: 'This is a server-generated quick insight: buy the cheap stuff!' }); // Stubbed response
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
@@ -413,9 +434,9 @@ router.post(
|
||||
async (req, res, next: NextFunction) => {
|
||||
try {
|
||||
req.log.info(`Server-side deep dive requested.`);
|
||||
res
|
||||
.status(200)
|
||||
.json({ text: 'This is a server-generated deep dive analysis. It is very detailed.' }); // Stubbed response
|
||||
sendSuccess(res, {
|
||||
text: 'This is a server-generated deep dive analysis. It is very detailed.',
|
||||
}); // Stubbed response
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
@@ -430,7 +451,7 @@ router.post(
|
||||
async (req, res, next: NextFunction) => {
|
||||
try {
|
||||
req.log.info(`Server-side web search requested.`);
|
||||
res.status(200).json({ text: 'The web says this is good.', sources: [] }); // Stubbed response
|
||||
sendSuccess(res, { text: 'The web says this is good.', sources: [] }); // Stubbed response
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
@@ -446,7 +467,7 @@ router.post(
|
||||
try {
|
||||
const { items } = req.body;
|
||||
req.log.info(`Server-side price comparison requested for ${items.length} items.`);
|
||||
res.status(200).json({
|
||||
sendSuccess(res, {
|
||||
text: 'This is a server-generated price comparison. Milk is cheaper at SuperMart.',
|
||||
sources: [],
|
||||
}); // Stubbed response
|
||||
@@ -466,7 +487,7 @@ router.post(
|
||||
const { items, store, userLocation } = req.body;
|
||||
req.log.debug({ itemCount: items.length, storeName: store.name }, 'Trip planning requested.');
|
||||
const result = await aiService.planTripWithMaps(items, store, userLocation);
|
||||
res.status(200).json(result);
|
||||
sendSuccess(res, result);
|
||||
} catch (error) {
|
||||
req.log.error({ error: errMsg(error) }, 'Error in /api/ai/plan-trip endpoint:');
|
||||
next(error);
|
||||
@@ -485,7 +506,7 @@ router.post(
|
||||
// This endpoint is a placeholder for a future feature.
|
||||
// Returning 501 Not Implemented is the correct HTTP response for this case.
|
||||
req.log.info('Request received for unimplemented endpoint: /api/ai/generate-image');
|
||||
res.status(501).json({ message: 'Image generation is not yet implemented.' });
|
||||
sendError(res, ErrorCode.NOT_IMPLEMENTED, 'Image generation is not yet implemented.', 501);
|
||||
},
|
||||
);
|
||||
|
||||
@@ -498,7 +519,7 @@ router.post(
|
||||
// This endpoint is a placeholder for a future feature.
|
||||
// Returning 501 Not Implemented is the correct HTTP response for this case.
|
||||
req.log.info('Request received for unimplemented endpoint: /api/ai/generate-speech');
|
||||
res.status(501).json({ message: 'Speech generation is not yet implemented.' });
|
||||
sendError(res, ErrorCode.NOT_IMPLEMENTED, 'Speech generation is not yet implemented.', 501);
|
||||
},
|
||||
);
|
||||
|
||||
@@ -515,7 +536,7 @@ router.post(
|
||||
async (req, res, next: NextFunction) => {
|
||||
try {
|
||||
if (!req.file) {
|
||||
return res.status(400).json({ message: 'Image file is required.' });
|
||||
return sendError(res, ErrorCode.BAD_REQUEST, 'Image file is required.', 400);
|
||||
}
|
||||
// validateRequest transforms the cropArea JSON string into an object in req.body.
|
||||
// So we use it directly instead of JSON.parse().
|
||||
@@ -536,7 +557,7 @@ router.post(
|
||||
req.log,
|
||||
);
|
||||
|
||||
res.status(200).json(result);
|
||||
sendSuccess(res, result);
|
||||
} catch (error) {
|
||||
next(error);
|
||||
} finally {
|
||||
|
||||
@@ -137,9 +137,9 @@ describe('Auth Routes (/api/auth)', () => {
|
||||
});
|
||||
// Assert
|
||||
expect(response.status).toBe(201);
|
||||
expect(response.body.message).toBe('User registered successfully!');
|
||||
expect(response.body.userprofile.user.email).toBe(newUserEmail);
|
||||
expect(response.body.token).toBeTypeOf('string'); // This was a duplicate, fixed.
|
||||
expect(response.body.data.message).toBe('User registered successfully!');
|
||||
expect(response.body.data.userprofile.user.email).toBe(newUserEmail);
|
||||
expect(response.body.data.token).toBeTypeOf('string'); // This was a duplicate, fixed.
|
||||
expect(mockedAuthService.registerAndLoginUser).toHaveBeenCalledWith(
|
||||
newUserEmail,
|
||||
strongPassword,
|
||||
@@ -171,7 +171,7 @@ describe('Auth Routes (/api/auth)', () => {
|
||||
|
||||
// Assert
|
||||
expect(response.status).toBe(201);
|
||||
expect(response.body.message).toBe('User registered successfully!');
|
||||
expect(response.body.data.message).toBe('User registered successfully!');
|
||||
expect(mockedAuthService.registerAndLoginUser).toHaveBeenCalledWith(
|
||||
email,
|
||||
strongPassword,
|
||||
@@ -242,7 +242,7 @@ describe('Auth Routes (/api/auth)', () => {
|
||||
interface ZodError {
|
||||
message: string;
|
||||
}
|
||||
const errorMessages = response.body.errors?.map((e: ZodError) => e.message).join(' ');
|
||||
const errorMessages = response.body.error.details?.map((e: ZodError) => e.message).join(' ');
|
||||
expect(errorMessages).toMatch(/Password is too weak/i);
|
||||
});
|
||||
|
||||
@@ -260,7 +260,7 @@ describe('Auth Routes (/api/auth)', () => {
|
||||
.send({ email: newUserEmail, password: strongPassword });
|
||||
|
||||
expect(response.status).toBe(409); // 409 Conflict
|
||||
expect(response.body.message).toBe('User with that email already exists.');
|
||||
expect(response.body.error.message).toBe('User with that email already exists.');
|
||||
});
|
||||
|
||||
it('should return 500 if a generic database error occurs during registration', async () => {
|
||||
@@ -272,7 +272,7 @@ describe('Auth Routes (/api/auth)', () => {
|
||||
.send({ email: 'fail@test.com', password: strongPassword });
|
||||
|
||||
expect(response.status).toBe(500);
|
||||
expect(response.body.message).toBe('DB connection lost'); // The errorHandler will forward the message
|
||||
expect(response.body.error.message).toBe('DB connection lost'); // The errorHandler will forward the message
|
||||
});
|
||||
|
||||
it('should return 400 for an invalid email format', async () => {
|
||||
@@ -281,7 +281,7 @@ describe('Auth Routes (/api/auth)', () => {
|
||||
.send({ email: 'not-an-email', password: strongPassword });
|
||||
|
||||
expect(response.status).toBe(400);
|
||||
expect(response.body.errors[0].message).toBe('A valid email is required.');
|
||||
expect(response.body.error.details[0].message).toBe('A valid email is required.');
|
||||
});
|
||||
|
||||
it('should return 400 for a password that is too short', async () => {
|
||||
@@ -290,7 +290,9 @@ describe('Auth Routes (/api/auth)', () => {
|
||||
.send({ email: newUserEmail, password: 'short' });
|
||||
|
||||
expect(response.status).toBe(400);
|
||||
expect(response.body.errors[0].message).toBe('Password must be at least 8 characters long.');
|
||||
expect(response.body.error.details[0].message).toBe(
|
||||
'Password must be at least 8 characters long.',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -309,7 +311,7 @@ describe('Auth Routes (/api/auth)', () => {
|
||||
// Assert
|
||||
expect(response.status).toBe(200);
|
||||
// The API now returns a nested UserProfile object
|
||||
expect(response.body.userprofile).toEqual(
|
||||
expect(response.body.data.userprofile).toEqual(
|
||||
expect.objectContaining({
|
||||
user: expect.objectContaining({
|
||||
user_id: 'user-123',
|
||||
@@ -317,7 +319,7 @@ describe('Auth Routes (/api/auth)', () => {
|
||||
}),
|
||||
}),
|
||||
);
|
||||
expect(response.body.token).toBeTypeOf('string');
|
||||
expect(response.body.data.token).toBeTypeOf('string');
|
||||
expect(response.headers['set-cookie']).toBeDefined();
|
||||
});
|
||||
|
||||
@@ -327,7 +329,7 @@ describe('Auth Routes (/api/auth)', () => {
|
||||
.send({ email: 'test@test.com', password: 'wrong_password' });
|
||||
|
||||
expect(response.status).toBe(401);
|
||||
expect(response.body.message).toBe('Incorrect email or password.');
|
||||
expect(response.body.error.message).toBe('Incorrect email or password.');
|
||||
});
|
||||
|
||||
it('should reject login for a locked account', async () => {
|
||||
@@ -336,7 +338,7 @@ describe('Auth Routes (/api/auth)', () => {
|
||||
.send({ email: 'locked@test.com', password: 'password123' });
|
||||
|
||||
expect(response.status).toBe(401);
|
||||
expect(response.body.message).toBe(
|
||||
expect(response.body.error.message).toBe(
|
||||
'Account is temporarily locked. Please try again in 15 minutes.',
|
||||
);
|
||||
});
|
||||
@@ -371,7 +373,7 @@ describe('Auth Routes (/api/auth)', () => {
|
||||
.send({ email: 'dberror@test.com', password: 'any_password' });
|
||||
|
||||
expect(response.status).toBe(500);
|
||||
expect(response.body.message).toBe('Database connection failed');
|
||||
expect(response.body.error.message).toBe('Database connection failed');
|
||||
});
|
||||
|
||||
it('should log a warning when passport authentication fails without a user', async () => {
|
||||
@@ -414,7 +416,7 @@ describe('Auth Routes (/api/auth)', () => {
|
||||
.send({ email: 'not-an-email', password: 'password123' });
|
||||
|
||||
expect(response.status).toBe(400);
|
||||
expect(response.body.errors[0].message).toBe('A valid email is required.');
|
||||
expect(response.body.error.details[0].message).toBe('A valid email is required.');
|
||||
});
|
||||
|
||||
it('should return 400 if password is missing', async () => {
|
||||
@@ -423,7 +425,7 @@ describe('Auth Routes (/api/auth)', () => {
|
||||
.send({ email: 'test@test.com' });
|
||||
|
||||
expect(response.status).toBe(400);
|
||||
expect(response.body.errors[0].message).toBe('Password is required.');
|
||||
expect(response.body.error.details[0].message).toBe('Password is required.');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -439,8 +441,8 @@ describe('Auth Routes (/api/auth)', () => {
|
||||
|
||||
// Assert
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body.message).toContain('a password reset link has been sent'); // This was a duplicate, fixed.
|
||||
expect(response.body.token).toBeTypeOf('string');
|
||||
expect(response.body.data.message).toContain('a password reset link has been sent'); // This was a duplicate, fixed.
|
||||
expect(response.body.data.token).toBeTypeOf('string');
|
||||
});
|
||||
|
||||
it('should return a generic success message even if the user does not exist', async () => {
|
||||
@@ -451,7 +453,7 @@ describe('Auth Routes (/api/auth)', () => {
|
||||
.send({ email: 'nouser@test.com' });
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body.message).toContain('a password reset link has been sent');
|
||||
expect(response.body.data.message).toContain('a password reset link has been sent');
|
||||
});
|
||||
|
||||
it('should return 500 if the database call fails', async () => {
|
||||
@@ -469,7 +471,7 @@ describe('Auth Routes (/api/auth)', () => {
|
||||
.send({ email: 'invalid-email' });
|
||||
|
||||
expect(response.status).toBe(400);
|
||||
expect(response.body.errors[0].message).toBe('A valid email is required.');
|
||||
expect(response.body.error.details[0].message).toBe('A valid email is required.');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -482,7 +484,7 @@ describe('Auth Routes (/api/auth)', () => {
|
||||
.send({ token: 'valid-token', newPassword: 'a-Very-Strong-Password-789!' });
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body.message).toBe('Password has been reset successfully.');
|
||||
expect(response.body.data.message).toBe('Password has been reset successfully.');
|
||||
});
|
||||
|
||||
it('should reject with an invalid or expired token', async () => {
|
||||
@@ -493,7 +495,7 @@ describe('Auth Routes (/api/auth)', () => {
|
||||
.send({ token: 'invalid-token', newPassword: 'a-Very-Strong-Password-123!' }); // Use strong password to pass validation
|
||||
|
||||
expect(response.status).toBe(400);
|
||||
expect(response.body.message).toBe('Invalid or expired password reset token.');
|
||||
expect(response.body.error.message).toBe('Invalid or expired password reset token.');
|
||||
});
|
||||
|
||||
it('should return 400 for a weak new password', async () => {
|
||||
@@ -511,7 +513,7 @@ describe('Auth Routes (/api/auth)', () => {
|
||||
.send({ newPassword: 'a-Very-Strong-Password-789!' });
|
||||
|
||||
expect(response.status).toBe(400);
|
||||
expect(response.body.errors[0].message).toMatch(/Token is required|Required/i);
|
||||
expect(response.body.error.details[0].message).toMatch(/Token is required|Required/i);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -524,13 +526,13 @@ describe('Auth Routes (/api/auth)', () => {
|
||||
.set('Cookie', 'refreshToken=valid-refresh-token');
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body.token).toBeTypeOf('string');
|
||||
expect(response.body.data.token).toBeTypeOf('string');
|
||||
});
|
||||
|
||||
it('should return 401 if no refresh token cookie is provided', async () => {
|
||||
const response = await supertest(app).post('/api/auth/refresh-token');
|
||||
expect(response.status).toBe(401);
|
||||
expect(response.body.message).toBe('Refresh token not found.');
|
||||
expect(response.body.error.message).toBe('Refresh token not found.');
|
||||
});
|
||||
|
||||
it('should return 403 if refresh token is invalid', async () => {
|
||||
@@ -552,7 +554,7 @@ describe('Auth Routes (/api/auth)', () => {
|
||||
.post('/api/auth/refresh-token')
|
||||
.set('Cookie', 'refreshToken=any-token');
|
||||
expect(response.status).toBe(500);
|
||||
expect(response.body.message).toMatch(/DB Error/);
|
||||
expect(response.body.error.message).toMatch(/DB Error/);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -568,7 +570,7 @@ describe('Auth Routes (/api/auth)', () => {
|
||||
|
||||
// Assert
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body.message).toBe('Logged out successfully.');
|
||||
expect(response.body.data.message).toBe('Logged out successfully.');
|
||||
|
||||
// Check that the 'set-cookie' header is trying to expire the cookie
|
||||
const setCookieHeader = response.headers['set-cookie'];
|
||||
@@ -616,7 +618,7 @@ describe('Auth Routes (/api/auth)', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('Rate Limiting on /forgot-password', () => {
|
||||
describe('Rate Limiting on /forgot-password', () => {
|
||||
it('should block requests after exceeding the limit when the opt-in header is sent', async () => {
|
||||
// Arrange
|
||||
const email = 'rate-limit-test@example.com';
|
||||
@@ -658,7 +660,7 @@ describe('Rate Limiting on /forgot-password', () => {
|
||||
expect(response.status, `Request ${i + 1} should succeed`).toBe(200);
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Rate Limiting on /reset-password', () => {
|
||||
it('should block requests after exceeding the limit when the opt-in header is sent', async () => {
|
||||
|
||||
@@ -23,6 +23,7 @@ import {
|
||||
refreshTokenLimiter,
|
||||
logoutLimiter,
|
||||
} from '../config/rateLimiters';
|
||||
import { sendSuccess, sendError, ErrorCode } from '../utils/apiResponse';
|
||||
|
||||
// All route handlers now use req.log (request-scoped logger) as per ADR-004
|
||||
import { authService } from '../services/authService';
|
||||
@@ -103,13 +104,19 @@ router.post(
|
||||
secure: process.env.NODE_ENV === 'production',
|
||||
maxAge: 7 * 24 * 60 * 60 * 1000, // 7 days
|
||||
});
|
||||
return res
|
||||
.status(201)
|
||||
.json({ message: 'User registered successfully!', userprofile: newUserProfile, token: accessToken });
|
||||
return sendSuccess(
|
||||
res,
|
||||
{
|
||||
message: 'User registered successfully!',
|
||||
userprofile: newUserProfile,
|
||||
token: accessToken,
|
||||
},
|
||||
201,
|
||||
);
|
||||
} catch (error: unknown) {
|
||||
if (error instanceof UniqueConstraintError) {
|
||||
// If the email is a duplicate, return a 409 Conflict status.
|
||||
return res.status(409).json({ message: error.message });
|
||||
return sendError(res, ErrorCode.CONFLICT, error.message, 409);
|
||||
}
|
||||
req.log.error({ error }, `User registration route failed for email: ${email}.`);
|
||||
// Pass the error to the centralized handler
|
||||
@@ -143,13 +150,16 @@ router.post(
|
||||
return next(err);
|
||||
}
|
||||
if (!user) {
|
||||
return res.status(401).json({ message: info.message || 'Login failed' });
|
||||
return sendError(res, ErrorCode.UNAUTHORIZED, info.message || 'Login failed', 401);
|
||||
}
|
||||
|
||||
try {
|
||||
const { rememberMe } = req.body;
|
||||
const userProfile = user as UserProfile;
|
||||
const { accessToken, refreshToken } = await authService.handleSuccessfulLogin(userProfile, req.log);
|
||||
const { accessToken, refreshToken } = await authService.handleSuccessfulLogin(
|
||||
userProfile,
|
||||
req.log,
|
||||
);
|
||||
req.log.info(`JWT and refresh token issued for user: ${userProfile.user.email}`);
|
||||
|
||||
const cookieOptions = {
|
||||
@@ -160,7 +170,7 @@ router.post(
|
||||
|
||||
res.cookie('refreshToken', refreshToken, cookieOptions);
|
||||
// Return the full user profile object on login to avoid a second fetch on the client.
|
||||
return res.json({ userprofile: userProfile, token: accessToken });
|
||||
return sendSuccess(res, { userprofile: userProfile, token: accessToken });
|
||||
} catch (tokenErr) {
|
||||
const email = (user as UserProfile)?.user?.email || req.body.email;
|
||||
req.log.error({ error: tokenErr }, `Failed to process login for user: ${email}`);
|
||||
@@ -191,7 +201,7 @@ router.post(
|
||||
message: 'If an account with that email exists, a password reset link has been sent.',
|
||||
};
|
||||
if (process.env.NODE_ENV === 'test' && token) responsePayload.token = token;
|
||||
res.status(200).json(responsePayload);
|
||||
sendSuccess(res, responsePayload);
|
||||
} catch (error) {
|
||||
req.log.error({ error }, `An error occurred during /forgot-password for email: ${email}`);
|
||||
next(error);
|
||||
@@ -214,10 +224,15 @@ router.post(
|
||||
const resetSuccessful = await authService.updatePassword(token, newPassword, req.log);
|
||||
|
||||
if (!resetSuccessful) {
|
||||
return res.status(400).json({ message: 'Invalid or expired password reset token.' });
|
||||
return sendError(
|
||||
res,
|
||||
ErrorCode.BAD_REQUEST,
|
||||
'Invalid or expired password reset token.',
|
||||
400,
|
||||
);
|
||||
}
|
||||
|
||||
res.status(200).json({ message: 'Password has been reset successfully.' });
|
||||
sendSuccess(res, { message: 'Password has been reset successfully.' });
|
||||
} catch (error) {
|
||||
req.log.error({ error }, `An error occurred during password reset.`);
|
||||
next(error);
|
||||
@@ -226,23 +241,27 @@ router.post(
|
||||
);
|
||||
|
||||
// New Route to refresh the access token
|
||||
router.post('/refresh-token', refreshTokenLimiter, async (req: Request, res: Response, next: NextFunction) => {
|
||||
const { refreshToken } = req.cookies;
|
||||
if (!refreshToken) {
|
||||
return res.status(401).json({ message: 'Refresh token not found.' });
|
||||
}
|
||||
|
||||
try {
|
||||
const result = await authService.refreshAccessToken(refreshToken, req.log);
|
||||
if (!result) {
|
||||
return res.status(403).json({ message: 'Invalid or expired refresh token.' });
|
||||
router.post(
|
||||
'/refresh-token',
|
||||
refreshTokenLimiter,
|
||||
async (req: Request, res: Response, next: NextFunction) => {
|
||||
const { refreshToken } = req.cookies;
|
||||
if (!refreshToken) {
|
||||
return sendError(res, ErrorCode.UNAUTHORIZED, 'Refresh token not found.', 401);
|
||||
}
|
||||
res.json({ token: result.accessToken });
|
||||
} catch (error) {
|
||||
req.log.error({ error }, 'An error occurred during /refresh-token.');
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
try {
|
||||
const result = await authService.refreshAccessToken(refreshToken, req.log);
|
||||
if (!result) {
|
||||
return sendError(res, ErrorCode.FORBIDDEN, 'Invalid or expired refresh token.', 403);
|
||||
}
|
||||
sendSuccess(res, { token: result.accessToken });
|
||||
} catch (error) {
|
||||
req.log.error({ error }, 'An error occurred during /refresh-token.');
|
||||
next(error);
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
/**
|
||||
* POST /api/auth/logout - Logs the user out by invalidating their refresh token.
|
||||
@@ -264,7 +283,7 @@ router.post('/logout', logoutLimiter, async (req: Request, res: Response) => {
|
||||
maxAge: 0, // Use maxAge for modern compatibility; Express sets 'Expires' as a fallback.
|
||||
secure: process.env.NODE_ENV === 'production',
|
||||
});
|
||||
res.status(200).json({ message: 'Logged out successfully.' });
|
||||
sendSuccess(res, { message: 'Logged out successfully.' });
|
||||
});
|
||||
|
||||
// --- OAuth Routes ---
|
||||
|
||||
@@ -69,7 +69,11 @@ describe('Budget Routes (/api/budgets)', () => {
|
||||
vi.mocked(db.budgetRepo.getSpendingByCategory).mockResolvedValue([]);
|
||||
});
|
||||
|
||||
const app = createTestApp({ router: budgetRouter, basePath: '/api/budgets', authenticatedUser: mockUserProfile });
|
||||
const app = createTestApp({
|
||||
router: budgetRouter,
|
||||
basePath: '/api/budgets',
|
||||
authenticatedUser: mockUserProfile,
|
||||
});
|
||||
|
||||
describe('GET /', () => {
|
||||
it('should return a list of budgets for the user', async () => {
|
||||
@@ -80,7 +84,7 @@ describe('Budget Routes (/api/budgets)', () => {
|
||||
const response = await supertest(app).get('/api/budgets');
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body).toEqual(mockBudgets);
|
||||
expect(response.body.data).toEqual(mockBudgets);
|
||||
expect(db.budgetRepo.getBudgetsForUser).toHaveBeenCalledWith(
|
||||
mockUserProfile.user.user_id,
|
||||
expectLogger,
|
||||
@@ -91,7 +95,7 @@ describe('Budget Routes (/api/budgets)', () => {
|
||||
vi.mocked(db.budgetRepo.getBudgetsForUser).mockRejectedValue(new Error('DB Error'));
|
||||
const response = await supertest(app).get('/api/budgets');
|
||||
expect(response.status).toBe(500); // The custom handler will now be used
|
||||
expect(response.body.message).toBe('DB Error');
|
||||
expect(response.body.error.message).toBe('DB Error');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -114,7 +118,7 @@ describe('Budget Routes (/api/budgets)', () => {
|
||||
const response = await supertest(app).post('/api/budgets').send(newBudgetData);
|
||||
|
||||
expect(response.status).toBe(201);
|
||||
expect(response.body).toEqual(mockCreatedBudget);
|
||||
expect(response.body.data).toEqual(mockCreatedBudget);
|
||||
});
|
||||
|
||||
it('should return 400 if the user does not exist', async () => {
|
||||
@@ -129,7 +133,7 @@ describe('Budget Routes (/api/budgets)', () => {
|
||||
);
|
||||
const response = await supertest(app).post('/api/budgets').send(newBudgetData);
|
||||
expect(response.status).toBe(400);
|
||||
expect(response.body.message).toBe('User not found');
|
||||
expect(response.body.error.message).toBe('User not found');
|
||||
});
|
||||
|
||||
it('should return 500 if a generic database error occurs', async () => {
|
||||
@@ -142,7 +146,7 @@ describe('Budget Routes (/api/budgets)', () => {
|
||||
vi.mocked(db.budgetRepo.createBudget).mockRejectedValue(new Error('DB Error'));
|
||||
const response = await supertest(app).post('/api/budgets').send(newBudgetData);
|
||||
expect(response.status).toBe(500);
|
||||
expect(response.body.message).toBe('DB Error');
|
||||
expect(response.body.error.message).toBe('DB Error');
|
||||
});
|
||||
|
||||
it('should return 400 for invalid budget data', async () => {
|
||||
@@ -156,7 +160,7 @@ describe('Budget Routes (/api/budgets)', () => {
|
||||
const response = await supertest(app).post('/api/budgets').send(invalidData);
|
||||
|
||||
expect(response.status).toBe(400);
|
||||
expect(response.body.errors).toHaveLength(4);
|
||||
expect(response.body.error.details).toHaveLength(4);
|
||||
});
|
||||
|
||||
it('should return 400 if required fields are missing', async () => {
|
||||
@@ -165,7 +169,7 @@ describe('Budget Routes (/api/budgets)', () => {
|
||||
.post('/api/budgets')
|
||||
.send({ amount_cents: 10000, period: 'monthly', start_date: '2024-01-01' });
|
||||
expect(response.status).toBe(400);
|
||||
expect(response.body.errors[0].message).toBe('Budget name is required.');
|
||||
expect(response.body.error.details[0].message).toBe('Budget name is required.');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -183,7 +187,7 @@ describe('Budget Routes (/api/budgets)', () => {
|
||||
const response = await supertest(app).put('/api/budgets/1').send(budgetUpdates);
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body).toEqual(mockUpdatedBudget);
|
||||
expect(response.body.data).toEqual(mockUpdatedBudget);
|
||||
});
|
||||
|
||||
it('should return 404 if the budget is not found', async () => {
|
||||
@@ -192,7 +196,7 @@ describe('Budget Routes (/api/budgets)', () => {
|
||||
);
|
||||
const response = await supertest(app).put('/api/budgets/999').send({ amount_cents: 1 });
|
||||
expect(response.status).toBe(404);
|
||||
expect(response.body.message).toBe('Budget not found');
|
||||
expect(response.body.error.message).toBe('Budget not found');
|
||||
});
|
||||
|
||||
it('should return 500 if a generic database error occurs', async () => {
|
||||
@@ -200,13 +204,13 @@ describe('Budget Routes (/api/budgets)', () => {
|
||||
vi.mocked(db.budgetRepo.updateBudget).mockRejectedValue(new Error('DB Error'));
|
||||
const response = await supertest(app).put('/api/budgets/1').send(budgetUpdates);
|
||||
expect(response.status).toBe(500); // The custom handler will now be used
|
||||
expect(response.body.message).toBe('DB Error');
|
||||
expect(response.body.error.message).toBe('DB Error');
|
||||
});
|
||||
|
||||
it('should return 400 if no update fields are provided', async () => {
|
||||
const response = await supertest(app).put('/api/budgets/1').send({});
|
||||
expect(response.status).toBe(400);
|
||||
expect(response.body.errors[0].message).toBe(
|
||||
expect(response.body.error.details[0].message).toBe(
|
||||
'At least one field to update must be provided.',
|
||||
);
|
||||
});
|
||||
@@ -214,7 +218,7 @@ describe('Budget Routes (/api/budgets)', () => {
|
||||
it('should return 400 for an invalid budget ID', async () => {
|
||||
const response = await supertest(app).put('/api/budgets/abc').send({ amount_cents: 5000 });
|
||||
expect(response.status).toBe(400);
|
||||
expect(response.body.errors[0].message).toMatch(/Invalid ID|number/i);
|
||||
expect(response.body.error.details[0].message).toMatch(/Invalid ID|number/i);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -239,20 +243,20 @@ describe('Budget Routes (/api/budgets)', () => {
|
||||
);
|
||||
const response = await supertest(app).delete('/api/budgets/999');
|
||||
expect(response.status).toBe(404);
|
||||
expect(response.body.message).toBe('Budget not found');
|
||||
expect(response.body.error.message).toBe('Budget not found');
|
||||
});
|
||||
|
||||
it('should return 500 if a generic database error occurs', async () => {
|
||||
vi.mocked(db.budgetRepo.deleteBudget).mockRejectedValue(new Error('DB Error'));
|
||||
const response = await supertest(app).delete('/api/budgets/1');
|
||||
expect(response.status).toBe(500); // The custom handler will now be used
|
||||
expect(response.body.message).toBe('DB Error');
|
||||
expect(response.body.error.message).toBe('DB Error');
|
||||
});
|
||||
|
||||
it('should return 400 for an invalid budget ID', async () => {
|
||||
const response = await supertest(app).delete('/api/budgets/abc');
|
||||
expect(response.status).toBe(400);
|
||||
expect(response.body.errors[0].message).toMatch(/Invalid ID|number/i);
|
||||
expect(response.body.error.details[0].message).toMatch(/Invalid ID|number/i);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -269,7 +273,7 @@ describe('Budget Routes (/api/budgets)', () => {
|
||||
);
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body).toEqual(mockSpendingData);
|
||||
expect(response.body.data).toEqual(mockSpendingData);
|
||||
});
|
||||
|
||||
it('should return 500 if the database call fails', async () => {
|
||||
@@ -281,7 +285,7 @@ describe('Budget Routes (/api/budgets)', () => {
|
||||
);
|
||||
|
||||
expect(response.status).toBe(500);
|
||||
expect(response.body.message).toBe('DB Error');
|
||||
expect(response.body.error.message).toBe('DB Error');
|
||||
});
|
||||
|
||||
it('should return 400 for invalid date formats', async () => {
|
||||
@@ -289,14 +293,14 @@ describe('Budget Routes (/api/budgets)', () => {
|
||||
'/api/budgets/spending-analysis?startDate=2024/01/01&endDate=invalid',
|
||||
);
|
||||
expect(response.status).toBe(400);
|
||||
expect(response.body.errors).toHaveLength(2);
|
||||
expect(response.body.error.details).toHaveLength(2);
|
||||
});
|
||||
|
||||
it('should return 400 if required query parameters are missing', async () => {
|
||||
const response = await supertest(app).get('/api/budgets/spending-analysis');
|
||||
expect(response.status).toBe(400);
|
||||
// Expect errors for both startDate and endDate
|
||||
expect(response.body.errors).toHaveLength(2);
|
||||
expect(response.body.error.details).toHaveLength(2);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -7,11 +7,15 @@ import type { UserProfile } from '../types';
|
||||
import { validateRequest } from '../middleware/validation.middleware';
|
||||
import { requiredString, numericIdParam } from '../utils/zodUtils';
|
||||
import { budgetUpdateLimiter } from '../config/rateLimiters';
|
||||
import { sendSuccess, sendNoContent } from '../utils/apiResponse';
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
// --- Zod Schemas for Budget Routes (as per ADR-003) ---
|
||||
const budgetIdParamSchema = numericIdParam('id', "Invalid ID for parameter 'id'. Must be a number.");
|
||||
const budgetIdParamSchema = numericIdParam(
|
||||
'id',
|
||||
"Invalid ID for parameter 'id'. Must be a number.",
|
||||
);
|
||||
|
||||
const createBudgetSchema = z.object({
|
||||
body: z.object({
|
||||
@@ -48,7 +52,7 @@ router.get('/', async (req: Request, res: Response, next: NextFunction) => {
|
||||
const userProfile = req.user as UserProfile;
|
||||
try {
|
||||
const budgets = await budgetRepo.getBudgetsForUser(userProfile.user.user_id, req.log);
|
||||
res.json(budgets);
|
||||
sendSuccess(res, budgets);
|
||||
} catch (error) {
|
||||
req.log.error({ error, userId: userProfile.user.user_id }, 'Error fetching budgets');
|
||||
next(error);
|
||||
@@ -67,7 +71,7 @@ router.post(
|
||||
const { body } = req as unknown as CreateBudgetRequest;
|
||||
try {
|
||||
const newBudget = await budgetRepo.createBudget(userProfile.user.user_id, body, req.log);
|
||||
res.status(201).json(newBudget);
|
||||
sendSuccess(res, newBudget, 201);
|
||||
} catch (error: unknown) {
|
||||
req.log.error({ error, userId: userProfile.user.user_id, body }, 'Error creating budget');
|
||||
next(error);
|
||||
@@ -92,7 +96,7 @@ router.put(
|
||||
body,
|
||||
req.log,
|
||||
);
|
||||
res.json(updatedBudget);
|
||||
sendSuccess(res, updatedBudget);
|
||||
} catch (error: unknown) {
|
||||
req.log.error(
|
||||
{ error, userId: userProfile.user.user_id, budgetId: params.id },
|
||||
@@ -115,7 +119,7 @@ router.delete(
|
||||
const { params } = req as unknown as DeleteBudgetRequest;
|
||||
try {
|
||||
await budgetRepo.deleteBudget(params.id, userProfile.user.user_id, req.log);
|
||||
res.status(204).send(); // No Content
|
||||
sendNoContent(res);
|
||||
} catch (error: unknown) {
|
||||
req.log.error(
|
||||
{ error, userId: userProfile.user.user_id, budgetId: params.id },
|
||||
@@ -147,7 +151,7 @@ router.get(
|
||||
endDate,
|
||||
req.log,
|
||||
);
|
||||
res.json(spendingData);
|
||||
sendSuccess(res, spendingData);
|
||||
} catch (error) {
|
||||
req.log.error(
|
||||
{ error, userId: userProfile.user.user_id, startDate, endDate },
|
||||
|
||||
@@ -27,16 +27,14 @@ vi.mock('../services/logger.server', async () => ({
|
||||
// Mock the passport middleware
|
||||
vi.mock('./passport.routes', () => ({
|
||||
default: {
|
||||
authenticate: vi.fn(
|
||||
(_strategy, _options) => (req: Request, res: Response, next: NextFunction) => {
|
||||
// If req.user is not set by the test setup, simulate unauthenticated access.
|
||||
if (!req.user) {
|
||||
return res.status(401).json({ message: 'Unauthorized' });
|
||||
}
|
||||
// If req.user is set, proceed as an authenticated user.
|
||||
next();
|
||||
},
|
||||
),
|
||||
authenticate: vi.fn(() => (req: Request, res: Response, next: NextFunction) => {
|
||||
// If req.user is not set by the test setup, simulate unauthenticated access.
|
||||
if (!req.user) {
|
||||
return res.status(401).json({ message: 'Unauthorized' });
|
||||
}
|
||||
// If req.user is set, proceed as an authenticated user.
|
||||
next();
|
||||
}),
|
||||
},
|
||||
}));
|
||||
|
||||
@@ -77,7 +75,7 @@ describe('Deals Routes (/api/users/deals)', () => {
|
||||
);
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body).toEqual(mockDeals);
|
||||
expect(response.body.data).toEqual(mockDeals);
|
||||
expect(dealsRepo.findBestPricesForWatchedItems).toHaveBeenCalledWith(
|
||||
mockUser.user.user_id,
|
||||
expectLogger,
|
||||
@@ -96,7 +94,7 @@ describe('Deals Routes (/api/users/deals)', () => {
|
||||
'/api/users/deals/best-watched-prices',
|
||||
);
|
||||
expect(response.status).toBe(500);
|
||||
expect(response.body.message).toBe('DB Error');
|
||||
expect(response.body.error.message).toBe('DB Error');
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
{ error: dbError },
|
||||
'Error fetching best watched item deals.',
|
||||
|
||||
@@ -6,6 +6,7 @@ import { dealsRepo } from '../services/db/deals.db';
|
||||
import type { UserProfile } from '../types';
|
||||
import { validateRequest } from '../middleware/validation.middleware';
|
||||
import { userReadLimiter } from '../config/rateLimiters';
|
||||
import { sendSuccess } from '../utils/apiResponse';
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
@@ -40,7 +41,7 @@ router.get(
|
||||
req.log,
|
||||
);
|
||||
req.log.info({ dealCount: deals.length }, 'Successfully fetched best watched item deals.');
|
||||
res.status(200).json(deals);
|
||||
sendSuccess(res, deals);
|
||||
} catch (error) {
|
||||
req.log.error({ error }, 'Error fetching best watched item deals.');
|
||||
next(error); // Pass errors to the global error handler
|
||||
|
||||
@@ -49,7 +49,7 @@ describe('Flyer Routes (/api/flyers)', () => {
|
||||
const response = await supertest(app).get('/api/flyers');
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body).toEqual(mockFlyers);
|
||||
expect(response.body.data).toEqual(mockFlyers);
|
||||
// Also assert that the default limit and offset were used.
|
||||
expect(db.flyerRepo.getFlyers).toHaveBeenCalledWith(expectLogger, 20, 0);
|
||||
});
|
||||
@@ -77,7 +77,7 @@ describe('Flyer Routes (/api/flyers)', () => {
|
||||
vi.mocked(db.flyerRepo.getFlyers).mockRejectedValue(dbError);
|
||||
const response = await supertest(app).get('/api/flyers');
|
||||
expect(response.status).toBe(500);
|
||||
expect(response.body.message).toBe('DB Error');
|
||||
expect(response.body.error.message).toBe('DB Error');
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
{ error: dbError },
|
||||
'Error fetching flyers in /api/flyers:',
|
||||
@@ -87,8 +87,8 @@ describe('Flyer Routes (/api/flyers)', () => {
|
||||
it('should return 400 for invalid query parameters', async () => {
|
||||
const response = await supertest(app).get('/api/flyers?limit=abc&offset=-5');
|
||||
expect(response.status).toBe(400);
|
||||
expect(response.body.errors).toBeDefined();
|
||||
expect(response.body.errors.length).toBe(2);
|
||||
expect(response.body.error.details).toBeDefined();
|
||||
expect(response.body.error.details.length).toBe(2);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -100,7 +100,7 @@ describe('Flyer Routes (/api/flyers)', () => {
|
||||
const response = await supertest(app).get('/api/flyers/123');
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body).toEqual(mockFlyer);
|
||||
expect(response.body.data).toEqual(mockFlyer);
|
||||
expect(db.flyerRepo.getFlyerById).toHaveBeenCalledWith(123);
|
||||
});
|
||||
|
||||
@@ -114,14 +114,14 @@ describe('Flyer Routes (/api/flyers)', () => {
|
||||
const response = await supertest(app).get('/api/flyers/999');
|
||||
|
||||
expect(response.status).toBe(404);
|
||||
expect(response.body.message).toContain('not found');
|
||||
expect(response.body.error.message).toContain('not found');
|
||||
});
|
||||
|
||||
it('should return 400 for an invalid flyer ID', async () => {
|
||||
const response = await supertest(app).get('/api/flyers/abc');
|
||||
expect(response.status).toBe(400);
|
||||
// Zod coercion results in NaN for "abc", which triggers a type error before our custom message
|
||||
expect(response.body.errors[0].message).toMatch(
|
||||
expect(response.body.error.details[0].message).toMatch(
|
||||
/Invalid flyer ID provided|expected number, received NaN/,
|
||||
);
|
||||
});
|
||||
@@ -131,7 +131,7 @@ describe('Flyer Routes (/api/flyers)', () => {
|
||||
vi.mocked(db.flyerRepo.getFlyerById).mockRejectedValue(dbError);
|
||||
const response = await supertest(app).get('/api/flyers/123');
|
||||
expect(response.status).toBe(500);
|
||||
expect(response.body.message).toBe('DB Error');
|
||||
expect(response.body.error.message).toBe('DB Error');
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
{ error: dbError, flyerId: 123 },
|
||||
'Error fetching flyer by ID:',
|
||||
@@ -147,13 +147,13 @@ describe('Flyer Routes (/api/flyers)', () => {
|
||||
const response = await supertest(app).get('/api/flyers/123/items');
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body).toEqual(mockFlyerItems);
|
||||
expect(response.body.data).toEqual(mockFlyerItems);
|
||||
});
|
||||
|
||||
it('should return 400 for an invalid flyer ID', async () => {
|
||||
const response = await supertest(app).get('/api/flyers/abc/items');
|
||||
expect(response.status).toBe(400);
|
||||
expect(response.body.errors[0].message).toMatch(
|
||||
expect(response.body.error.details[0].message).toMatch(
|
||||
/Invalid flyer ID provided|expected number, received NaN/,
|
||||
);
|
||||
});
|
||||
@@ -163,7 +163,7 @@ describe('Flyer Routes (/api/flyers)', () => {
|
||||
vi.mocked(db.flyerRepo.getFlyerItems).mockRejectedValue(dbError);
|
||||
const response = await supertest(app).get('/api/flyers/123/items');
|
||||
expect(response.status).toBe(500);
|
||||
expect(response.body.message).toBe('DB Error');
|
||||
expect(response.body.error.message).toBe('DB Error');
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
{ error: dbError, flyerId: 123 },
|
||||
'Error fetching flyer items in /api/flyers/:id/items:',
|
||||
@@ -181,7 +181,7 @@ describe('Flyer Routes (/api/flyers)', () => {
|
||||
.send({ flyerIds: [1, 2] });
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body).toEqual(mockFlyerItems);
|
||||
expect(response.body.data).toEqual(mockFlyerItems);
|
||||
});
|
||||
|
||||
it('should return 400 if flyerIds is not an array', async () => {
|
||||
@@ -189,7 +189,7 @@ describe('Flyer Routes (/api/flyers)', () => {
|
||||
.post('/api/flyers/items/batch-fetch')
|
||||
.send({ flyerIds: 'not-an-array' });
|
||||
expect(response.status).toBe(400);
|
||||
expect(response.body.errors[0].message).toMatch(/expected array/);
|
||||
expect(response.body.error.details[0].message).toMatch(/expected array/);
|
||||
});
|
||||
|
||||
it('should return 400 if flyerIds is an empty array, as per schema validation', async () => {
|
||||
@@ -198,7 +198,7 @@ describe('Flyer Routes (/api/flyers)', () => {
|
||||
.send({ flyerIds: [] });
|
||||
expect(response.status).toBe(400);
|
||||
// Check for the specific Zod error message.
|
||||
expect(response.body.errors[0].message).toBe('flyerIds must be a non-empty array.');
|
||||
expect(response.body.error.details[0].message).toBe('flyerIds must be a non-empty array.');
|
||||
});
|
||||
|
||||
it('should return 500 if the database call fails', async () => {
|
||||
@@ -207,7 +207,7 @@ describe('Flyer Routes (/api/flyers)', () => {
|
||||
.post('/api/flyers/items/batch-fetch')
|
||||
.send({ flyerIds: [1] });
|
||||
expect(response.status).toBe(500);
|
||||
expect(response.body.message).toBe('DB Error');
|
||||
expect(response.body.error.message).toBe('DB Error');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -220,7 +220,7 @@ describe('Flyer Routes (/api/flyers)', () => {
|
||||
.send({ flyerIds: [1, 2, 3] });
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body).toEqual({ count: 42 });
|
||||
expect(response.body.data).toEqual({ count: 42 });
|
||||
});
|
||||
|
||||
it('should return 400 if flyerIds is not an array', async () => {
|
||||
@@ -237,7 +237,7 @@ describe('Flyer Routes (/api/flyers)', () => {
|
||||
.post('/api/flyers/items/batch-count')
|
||||
.send({ flyerIds: [] });
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body).toEqual({ count: 0 });
|
||||
expect(response.body.data).toEqual({ count: 0 });
|
||||
});
|
||||
|
||||
it('should return 500 if the database call fails', async () => {
|
||||
@@ -246,7 +246,7 @@ describe('Flyer Routes (/api/flyers)', () => {
|
||||
.post('/api/flyers/items/batch-count')
|
||||
.send({ flyerIds: [1] });
|
||||
expect(response.status).toBe(500);
|
||||
expect(response.body.message).toBe('DB Error');
|
||||
expect(response.body.error.message).toBe('DB Error');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -317,7 +317,7 @@ describe('Flyer Routes (/api/flyers)', () => {
|
||||
const response = await supertest(app)
|
||||
.get('/api/flyers')
|
||||
.set('X-Test-Rate-Limit-Enable', 'true');
|
||||
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.headers).toHaveProperty('ratelimit-limit');
|
||||
expect(parseInt(response.headers['ratelimit-limit'])).toBe(100);
|
||||
@@ -350,7 +350,7 @@ describe('Flyer Routes (/api/flyers)', () => {
|
||||
it('should apply trackingLimiter to POST /items/:itemId/track', async () => {
|
||||
// Mock fire-and-forget promise
|
||||
vi.mocked(db.flyerRepo.trackFlyerItemInteraction).mockResolvedValue(undefined);
|
||||
|
||||
|
||||
const response = await supertest(app)
|
||||
.post('/api/flyers/items/1/track')
|
||||
.set('X-Test-Rate-Limit-Enable', 'true')
|
||||
|
||||
@@ -4,11 +4,8 @@ import * as db from '../services/db/index.db';
|
||||
import { z } from 'zod';
|
||||
import { validateRequest } from '../middleware/validation.middleware';
|
||||
import { optionalNumeric } from '../utils/zodUtils';
|
||||
import {
|
||||
publicReadLimiter,
|
||||
batchLimiter,
|
||||
trackingLimiter,
|
||||
} from '../config/rateLimiters';
|
||||
import { publicReadLimiter, batchLimiter, trackingLimiter } from '../config/rateLimiters';
|
||||
import { sendSuccess } from '../utils/apiResponse';
|
||||
|
||||
const router = Router();
|
||||
|
||||
@@ -53,34 +50,44 @@ const trackItemSchema = z.object({
|
||||
/**
|
||||
* GET /api/flyers - Get a paginated list of all flyers.
|
||||
*/
|
||||
router.get('/', publicReadLimiter, validateRequest(getFlyersSchema), async (req, res, next): Promise<void> => {
|
||||
try {
|
||||
// The `validateRequest` middleware ensures `req.query` is valid.
|
||||
// We parse it here to apply Zod's coercions (string to number) and defaults.
|
||||
const { limit, offset } = getFlyersSchema.shape.query.parse(req.query);
|
||||
router.get(
|
||||
'/',
|
||||
publicReadLimiter,
|
||||
validateRequest(getFlyersSchema),
|
||||
async (req, res, next): Promise<void> => {
|
||||
try {
|
||||
// The `validateRequest` middleware ensures `req.query` is valid.
|
||||
// We parse it here to apply Zod's coercions (string to number) and defaults.
|
||||
const { limit, offset } = getFlyersSchema.shape.query.parse(req.query);
|
||||
|
||||
const flyers = await db.flyerRepo.getFlyers(req.log, limit, offset);
|
||||
res.json(flyers);
|
||||
} catch (error) {
|
||||
req.log.error({ error }, 'Error fetching flyers in /api/flyers:');
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
const flyers = await db.flyerRepo.getFlyers(req.log, limit, offset);
|
||||
sendSuccess(res, flyers);
|
||||
} catch (error) {
|
||||
req.log.error({ error }, 'Error fetching flyers in /api/flyers:');
|
||||
next(error);
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
/**
|
||||
* GET /api/flyers/:id - Get a single flyer by its ID.
|
||||
*/
|
||||
router.get('/:id', publicReadLimiter, validateRequest(flyerIdParamSchema), async (req, res, next): Promise<void> => {
|
||||
try {
|
||||
// Explicitly parse to get the coerced number type for `id`.
|
||||
const { id } = flyerIdParamSchema.shape.params.parse(req.params);
|
||||
const flyer = await db.flyerRepo.getFlyerById(id);
|
||||
res.json(flyer);
|
||||
} catch (error) {
|
||||
req.log.error({ error, flyerId: req.params.id }, 'Error fetching flyer by ID:');
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
router.get(
|
||||
'/:id',
|
||||
publicReadLimiter,
|
||||
validateRequest(flyerIdParamSchema),
|
||||
async (req, res, next): Promise<void> => {
|
||||
try {
|
||||
// Explicitly parse to get the coerced number type for `id`.
|
||||
const { id } = flyerIdParamSchema.shape.params.parse(req.params);
|
||||
const flyer = await db.flyerRepo.getFlyerById(id);
|
||||
sendSuccess(res, flyer);
|
||||
} catch (error) {
|
||||
req.log.error({ error, flyerId: req.params.id }, 'Error fetching flyer by ID:');
|
||||
next(error);
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
/**
|
||||
* GET /api/flyers/:id/items - Get all items for a specific flyer.
|
||||
@@ -90,14 +97,16 @@ router.get(
|
||||
publicReadLimiter,
|
||||
validateRequest(flyerIdParamSchema),
|
||||
async (req, res, next): Promise<void> => {
|
||||
type GetFlyerByIdRequest = z.infer<typeof flyerIdParamSchema>;
|
||||
try {
|
||||
// Explicitly parse to get the coerced number type for `id`.
|
||||
const { id } = flyerIdParamSchema.shape.params.parse(req.params);
|
||||
const items = await db.flyerRepo.getFlyerItems(id, req.log);
|
||||
res.json(items);
|
||||
sendSuccess(res, items);
|
||||
} catch (error) {
|
||||
req.log.error({ error, flyerId: req.params.id }, 'Error fetching flyer items in /api/flyers/:id/items:');
|
||||
req.log.error(
|
||||
{ error, flyerId: req.params.id },
|
||||
'Error fetching flyer items in /api/flyers/:id/items:',
|
||||
);
|
||||
next(error);
|
||||
}
|
||||
},
|
||||
@@ -117,7 +126,7 @@ router.post(
|
||||
// No re-parsing needed here as `validateRequest` has already ensured the body shape,
|
||||
// and `express.json()` has parsed it. There's no type coercion to apply.
|
||||
const items = await db.flyerRepo.getFlyerItemsForFlyers(body.flyerIds, req.log);
|
||||
res.json(items);
|
||||
sendSuccess(res, items);
|
||||
} catch (error) {
|
||||
req.log.error({ error }, 'Error fetching batch flyer items');
|
||||
next(error);
|
||||
@@ -139,7 +148,7 @@ router.post(
|
||||
// The schema ensures flyerIds is an array of numbers.
|
||||
// The `?? []` was redundant as `validateRequest` would have already caught a missing `flyerIds`.
|
||||
const count = await db.flyerRepo.countFlyerItemsForFlyers(body.flyerIds, req.log);
|
||||
res.json({ count });
|
||||
sendSuccess(res, { count });
|
||||
} catch (error) {
|
||||
req.log.error({ error }, 'Error counting batch flyer items');
|
||||
next(error);
|
||||
@@ -150,22 +159,27 @@ router.post(
|
||||
/**
|
||||
* POST /api/flyers/items/:itemId/track - Tracks a user interaction with a flyer item.
|
||||
*/
|
||||
router.post('/items/:itemId/track', trackingLimiter, validateRequest(trackItemSchema), (req, res, next): void => {
|
||||
try {
|
||||
// Explicitly parse to get coerced types.
|
||||
const { params, body } = trackItemSchema.parse({ params: req.params, body: req.body });
|
||||
router.post(
|
||||
'/items/:itemId/track',
|
||||
trackingLimiter,
|
||||
validateRequest(trackItemSchema),
|
||||
(req, res, next): void => {
|
||||
try {
|
||||
// Explicitly parse to get coerced types.
|
||||
const { params, body } = trackItemSchema.parse({ params: req.params, body: req.body });
|
||||
|
||||
// Fire-and-forget: we don't await the tracking call to avoid delaying the response.
|
||||
// We add a .catch to log any potential errors without crashing the server process.
|
||||
db.flyerRepo.trackFlyerItemInteraction(params.itemId, body.type, req.log).catch((error) => {
|
||||
req.log.error({ error, itemId: params.itemId }, 'Flyer item interaction tracking failed');
|
||||
});
|
||||
// Fire-and-forget: we don't await the tracking call to avoid delaying the response.
|
||||
// We add a .catch to log any potential errors without crashing the server process.
|
||||
db.flyerRepo.trackFlyerItemInteraction(params.itemId, body.type, req.log).catch((error) => {
|
||||
req.log.error({ error, itemId: params.itemId }, 'Flyer item interaction tracking failed');
|
||||
});
|
||||
|
||||
res.status(202).send();
|
||||
} catch (error) {
|
||||
// This will catch Zod parsing errors if they occur.
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
sendSuccess(res, { message: 'Tracking accepted' }, 202);
|
||||
} catch (error) {
|
||||
// This will catch Zod parsing errors if they occur.
|
||||
next(error);
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
export default router;
|
||||
|
||||
@@ -8,7 +8,7 @@ import {
|
||||
createMockUserAchievement,
|
||||
createMockLeaderboardUser,
|
||||
} from '../tests/utils/mockFactories';
|
||||
import { mockLogger } from '../tests/utils/mockLogger';
|
||||
import '../tests/utils/mockLogger';
|
||||
import { ForeignKeyConstraintError } from '../services/db/errors.db';
|
||||
import { createTestApp } from '../tests/utils/createTestApp';
|
||||
|
||||
@@ -98,7 +98,7 @@ describe('Gamification Routes (/api/achievements)', () => {
|
||||
|
||||
const response = await supertest(unauthenticatedApp).get('/api/achievements');
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body).toEqual(mockAchievements);
|
||||
expect(response.body.data).toEqual(mockAchievements);
|
||||
expect(db.gamificationRepo.getAllAchievements).toHaveBeenCalledWith(expectLogger);
|
||||
});
|
||||
|
||||
@@ -108,7 +108,7 @@ describe('Gamification Routes (/api/achievements)', () => {
|
||||
|
||||
const response = await supertest(unauthenticatedApp).get('/api/achievements');
|
||||
expect(response.status).toBe(500);
|
||||
expect(response.body.message).toBe('DB Connection Failed');
|
||||
expect(response.body.error.message).toBe('DB Connection Failed');
|
||||
});
|
||||
|
||||
it('should return 400 if awarding an achievement to a non-existent user', async () => {
|
||||
@@ -125,7 +125,7 @@ describe('Gamification Routes (/api/achievements)', () => {
|
||||
.post('/api/achievements/award')
|
||||
.send({ userId: 'non-existent', achievementName: 'Test Award' });
|
||||
expect(response.status).toBe(400);
|
||||
expect(response.body.message).toBe('User not found');
|
||||
expect(response.body.error.message).toBe('User not found');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -150,7 +150,7 @@ describe('Gamification Routes (/api/achievements)', () => {
|
||||
const response = await supertest(authenticatedApp).get('/api/achievements/me');
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body).toEqual(mockUserAchievements);
|
||||
expect(response.body.data).toEqual(mockUserAchievements);
|
||||
expect(db.gamificationRepo.getUserAchievements).toHaveBeenCalledWith(
|
||||
'user-123',
|
||||
expectLogger,
|
||||
@@ -167,7 +167,7 @@ describe('Gamification Routes (/api/achievements)', () => {
|
||||
vi.mocked(db.gamificationRepo.getUserAchievements).mockRejectedValue(dbError);
|
||||
const response = await supertest(authenticatedApp).get('/api/achievements/me');
|
||||
expect(response.status).toBe(500);
|
||||
expect(response.body.message).toBe('DB Error');
|
||||
expect(response.body.error.message).toBe('DB Error');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -207,7 +207,7 @@ describe('Gamification Routes (/api/achievements)', () => {
|
||||
const response = await supertest(adminApp).post('/api/achievements/award').send(awardPayload);
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body.message).toContain('Successfully awarded');
|
||||
expect(response.body.data.message).toContain('Successfully awarded');
|
||||
expect(db.gamificationRepo.awardAchievement).toHaveBeenCalledTimes(1);
|
||||
expect(db.gamificationRepo.awardAchievement).toHaveBeenCalledWith(
|
||||
awardPayload.userId,
|
||||
@@ -226,7 +226,7 @@ describe('Gamification Routes (/api/achievements)', () => {
|
||||
|
||||
const response = await supertest(adminApp).post('/api/achievements/award').send(awardPayload);
|
||||
expect(response.status).toBe(500);
|
||||
expect(response.body.message).toBe('DB Error');
|
||||
expect(response.body.error.message).toBe('DB Error');
|
||||
});
|
||||
|
||||
it('should return 400 for an invalid userId or achievementName', async () => {
|
||||
@@ -240,7 +240,7 @@ describe('Gamification Routes (/api/achievements)', () => {
|
||||
.post('/api/achievements/award')
|
||||
.send({ userId: '', achievementName: '' });
|
||||
expect(response.status).toBe(400);
|
||||
expect(response.body.errors).toHaveLength(2);
|
||||
expect(response.body.error.details).toHaveLength(2);
|
||||
});
|
||||
|
||||
it('should return 400 if userId or achievementName are missing', async () => {
|
||||
@@ -254,13 +254,13 @@ describe('Gamification Routes (/api/achievements)', () => {
|
||||
.post('/api/achievements/award')
|
||||
.send({ achievementName: 'Test Award' });
|
||||
expect(response1.status).toBe(400);
|
||||
expect(response1.body.errors[0].message).toBe('userId is required.');
|
||||
expect(response1.body.error.details[0].message).toBe('userId is required.');
|
||||
|
||||
const response2 = await supertest(adminApp)
|
||||
.post('/api/achievements/award')
|
||||
.send({ userId: 'user-789' });
|
||||
expect(response2.status).toBe(400);
|
||||
expect(response2.body.errors[0].message).toBe('achievementName is required.');
|
||||
expect(response2.body.error.details[0].message).toBe('achievementName is required.');
|
||||
});
|
||||
|
||||
it('should return 400 if awarding an achievement to a non-existent user', async () => {
|
||||
@@ -277,7 +277,7 @@ describe('Gamification Routes (/api/achievements)', () => {
|
||||
.post('/api/achievements/award')
|
||||
.send({ userId: 'non-existent', achievementName: 'Test Award' });
|
||||
expect(response.status).toBe(400);
|
||||
expect(response.body.message).toBe('User not found');
|
||||
expect(response.body.error.message).toBe('User not found');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -298,7 +298,7 @@ describe('Gamification Routes (/api/achievements)', () => {
|
||||
);
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body).toEqual(mockLeaderboard);
|
||||
expect(response.body.data).toEqual(mockLeaderboard);
|
||||
expect(db.gamificationRepo.getLeaderboard).toHaveBeenCalledWith(5, expect.anything());
|
||||
});
|
||||
|
||||
@@ -316,7 +316,7 @@ describe('Gamification Routes (/api/achievements)', () => {
|
||||
const response = await supertest(unauthenticatedApp).get('/api/achievements/leaderboard');
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body).toEqual(mockLeaderboard);
|
||||
expect(response.body.data).toEqual(mockLeaderboard);
|
||||
expect(db.gamificationRepo.getLeaderboard).toHaveBeenCalledWith(10, expect.anything());
|
||||
});
|
||||
|
||||
@@ -324,7 +324,7 @@ describe('Gamification Routes (/api/achievements)', () => {
|
||||
vi.mocked(db.gamificationRepo.getLeaderboard).mockRejectedValue(new Error('DB Error'));
|
||||
const response = await supertest(unauthenticatedApp).get('/api/achievements/leaderboard');
|
||||
expect(response.status).toBe(500);
|
||||
expect(response.body.message).toBe('DB Error');
|
||||
expect(response.body.error.message).toBe('DB Error');
|
||||
});
|
||||
|
||||
it('should return 400 for an invalid limit parameter', async () => {
|
||||
@@ -332,8 +332,8 @@ describe('Gamification Routes (/api/achievements)', () => {
|
||||
'/api/achievements/leaderboard?limit=100',
|
||||
);
|
||||
expect(response.status).toBe(400);
|
||||
expect(response.body.errors).toBeDefined();
|
||||
expect(response.body.errors[0].message).toMatch(/less than or equal to 50|Too big/i);
|
||||
expect(response.body.error.details).toBeDefined();
|
||||
expect(response.body.error.details[0].message).toMatch(/less than or equal to 50|Too big/i);
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
@@ -13,11 +13,8 @@ import { validateRequest } from '../middleware/validation.middleware';
|
||||
// All route handlers now use req.log (request-scoped logger) as per ADR-004
|
||||
import { requiredString, optionalNumeric } from '../utils/zodUtils';
|
||||
// All route handlers now use req.log (request-scoped logger) as per ADR-004
|
||||
import {
|
||||
publicReadLimiter,
|
||||
userReadLimiter,
|
||||
adminTriggerLimiter,
|
||||
} from '../config/rateLimiters';
|
||||
import { publicReadLimiter, userReadLimiter, adminTriggerLimiter } from '../config/rateLimiters';
|
||||
import { sendSuccess } from '../utils/apiResponse';
|
||||
|
||||
const router = express.Router();
|
||||
const adminGamificationRouter = express.Router(); // Create a new router for admin-only routes.
|
||||
@@ -48,7 +45,7 @@ const awardAchievementSchema = z.object({
|
||||
router.get('/', publicReadLimiter, async (req, res, next: NextFunction) => {
|
||||
try {
|
||||
const achievements = await gamificationService.getAllAchievements(req.log);
|
||||
res.json(achievements);
|
||||
sendSuccess(res, achievements);
|
||||
} catch (error) {
|
||||
req.log.error({ error }, 'Error fetching all achievements in /api/achievements:');
|
||||
next(error);
|
||||
@@ -69,7 +66,7 @@ router.get(
|
||||
// We parse it here to apply Zod's coercions (string to number) and defaults.
|
||||
const { limit } = leaderboardQuerySchema.parse(req.query);
|
||||
const leaderboard = await gamificationService.getLeaderboard(limit!, req.log);
|
||||
res.json(leaderboard);
|
||||
sendSuccess(res, leaderboard);
|
||||
} catch (error) {
|
||||
req.log.error({ error }, 'Error fetching leaderboard:');
|
||||
next(error);
|
||||
@@ -94,7 +91,7 @@ router.get(
|
||||
userProfile.user.user_id,
|
||||
req.log,
|
||||
);
|
||||
res.json(userAchievements);
|
||||
sendSuccess(res, userAchievements);
|
||||
} catch (error) {
|
||||
req.log.error(
|
||||
{ error, userId: userProfile.user.user_id },
|
||||
@@ -124,11 +121,9 @@ adminGamificationRouter.post(
|
||||
const { body } = req as unknown as AwardAchievementRequest;
|
||||
try {
|
||||
await gamificationService.awardAchievement(body.userId, body.achievementName, req.log);
|
||||
res
|
||||
.status(200)
|
||||
.json({
|
||||
message: `Successfully awarded '${body.achievementName}' to user ${body.userId}.`,
|
||||
});
|
||||
sendSuccess(res, {
|
||||
message: `Successfully awarded '${body.achievementName}' to user ${body.userId}.`,
|
||||
});
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
|
||||
@@ -64,7 +64,7 @@ describe('Health Routes (/api/health)', () => {
|
||||
|
||||
// Assert
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.text).toBe('pong');
|
||||
expect(response.body.data.message).toBe('pong');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -78,10 +78,8 @@ describe('Health Routes (/api/health)', () => {
|
||||
|
||||
// Assert: Check for the correct status and response body.
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body).toEqual({
|
||||
success: true,
|
||||
message: 'Redis connection is healthy.',
|
||||
});
|
||||
expect(response.body.success).toBe(true);
|
||||
expect(response.body.data.message).toBe('Redis connection is healthy.');
|
||||
});
|
||||
|
||||
it('should return 500 if Redis ping fails', async () => {
|
||||
@@ -94,7 +92,7 @@ describe('Health Routes (/api/health)', () => {
|
||||
|
||||
// Assert
|
||||
expect(response.status).toBe(500);
|
||||
expect(response.body.message).toBe('Connection timed out');
|
||||
expect(response.body.error.message).toBe('Connection timed out');
|
||||
});
|
||||
|
||||
it('should return 500 if Redis ping returns an unexpected response', async () => {
|
||||
@@ -106,7 +104,7 @@ describe('Health Routes (/api/health)', () => {
|
||||
|
||||
// Assert
|
||||
expect(response.status).toBe(500);
|
||||
expect(response.body.message).toContain('Unexpected Redis ping response: OK');
|
||||
expect(response.body.error.message).toContain('Unexpected Redis ping response: OK');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -122,9 +120,9 @@ describe('Health Routes (/api/health)', () => {
|
||||
|
||||
// Assert
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body.currentTime).toBe('2024-03-15T10:30:00.000Z');
|
||||
expect(response.body.year).toBe(2024);
|
||||
expect(response.body.week).toBe(11);
|
||||
expect(response.body.data.currentTime).toBe('2024-03-15T10:30:00.000Z');
|
||||
expect(response.body.data.year).toBe(2024);
|
||||
expect(response.body.data.week).toBe(11);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -139,7 +137,7 @@ describe('Health Routes (/api/health)', () => {
|
||||
// Assert
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body.success).toBe(true);
|
||||
expect(response.body.message).toBe('All required database tables exist.');
|
||||
expect(response.body.data.message).toBe('All required database tables exist.');
|
||||
});
|
||||
|
||||
it('should return 500 if tables are missing', async () => {
|
||||
@@ -149,7 +147,9 @@ describe('Health Routes (/api/health)', () => {
|
||||
const response = await supertest(app).get('/api/health/db-schema');
|
||||
|
||||
expect(response.status).toBe(500);
|
||||
expect(response.body.message).toContain('Missing tables: missing_table_1, missing_table_2');
|
||||
expect(response.body.error.message).toContain(
|
||||
'Missing tables: missing_table_1, missing_table_2',
|
||||
);
|
||||
// The error is passed to next(), so the global error handler would log it, not the route handler itself.
|
||||
});
|
||||
|
||||
@@ -161,10 +161,12 @@ describe('Health Routes (/api/health)', () => {
|
||||
const response = await supertest(app).get('/api/health/db-schema');
|
||||
|
||||
expect(response.status).toBe(500);
|
||||
expect(response.body.message).toBe('DB connection failed'); // This is the message from the original error
|
||||
expect(response.body.stack).toBeDefined();
|
||||
expect(response.body.errorId).toEqual(expect.any(String));
|
||||
console.log('[DEBUG] health.routes.test.ts: Verifying logger.error for DB schema check failure');
|
||||
expect(response.body.error.message).toBe('DB connection failed'); // This is the message from the original error
|
||||
expect(response.body.error.details.stack).toBeDefined();
|
||||
expect(response.body.meta.requestId).toEqual(expect.any(String));
|
||||
console.log(
|
||||
'[DEBUG] health.routes.test.ts: Verifying logger.error for DB schema check failure',
|
||||
);
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
err: expect.any(Error),
|
||||
@@ -181,8 +183,8 @@ describe('Health Routes (/api/health)', () => {
|
||||
const response = await supertest(app).get('/api/health/db-schema');
|
||||
|
||||
expect(response.status).toBe(500);
|
||||
expect(response.body.message).toBe('DB connection failed'); // This is the message from the original error
|
||||
expect(response.body.errorId).toEqual(expect.any(String));
|
||||
expect(response.body.error.message).toBe('DB connection failed'); // This is the message from the original error
|
||||
expect(response.body.meta.requestId).toEqual(expect.any(String));
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
err: expect.objectContaining({ message: 'DB connection failed' }),
|
||||
@@ -203,7 +205,7 @@ describe('Health Routes (/api/health)', () => {
|
||||
// Assert
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body.success).toBe(true);
|
||||
expect(response.body.message).toContain('is accessible and writable');
|
||||
expect(response.body.data.message).toContain('is accessible and writable');
|
||||
});
|
||||
|
||||
it('should return 500 if storage is not accessible or writable', async () => {
|
||||
@@ -216,7 +218,7 @@ describe('Health Routes (/api/health)', () => {
|
||||
|
||||
// Assert
|
||||
expect(response.status).toBe(500);
|
||||
expect(response.body.message).toContain('Storage check failed.');
|
||||
expect(response.body.error.message).toContain('Storage check failed.');
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
err: expect.any(Error),
|
||||
@@ -235,7 +237,7 @@ describe('Health Routes (/api/health)', () => {
|
||||
|
||||
// Assert
|
||||
expect(response.status).toBe(500);
|
||||
expect(response.body.message).toContain('Storage check failed.');
|
||||
expect(response.body.error.message).toContain('Storage check failed.');
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
err: expect.any(Error),
|
||||
@@ -260,7 +262,7 @@ describe('Health Routes (/api/health)', () => {
|
||||
// Assert
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body.success).toBe(true);
|
||||
expect(response.body.message).toContain('Pool Status: 10 total, 8 idle, 1 waiting.');
|
||||
expect(response.body.data.message).toContain('Pool Status: 10 total, 8 idle, 1 waiting.');
|
||||
});
|
||||
|
||||
it('should return 500 for an unhealthy pool status', async () => {
|
||||
@@ -277,8 +279,8 @@ describe('Health Routes (/api/health)', () => {
|
||||
// Assert
|
||||
expect(response.status).toBe(500);
|
||||
expect(response.body.success).toBe(false);
|
||||
expect(response.body.message).toContain('Pool may be under stress.');
|
||||
expect(response.body.message).toContain('Pool Status: 20 total, 5 idle, 15 waiting.');
|
||||
expect(response.body.error.message).toContain('Pool may be under stress.');
|
||||
expect(response.body.error.message).toContain('Pool Status: 20 total, 5 idle, 15 waiting.');
|
||||
expect(logger.warn).toHaveBeenCalledWith(
|
||||
'Database pool health check shows high waiting count: 15',
|
||||
);
|
||||
@@ -295,8 +297,8 @@ describe('Health Routes (/api/health)', () => {
|
||||
const response = await supertest(app).get('/api/health/db-pool');
|
||||
|
||||
expect(response.status).toBe(500);
|
||||
expect(response.body.message).toBe('Pool is not initialized'); // This is the message from the original error
|
||||
expect(response.body.errorId).toEqual(expect.any(String));
|
||||
expect(response.body.error.message).toBe('Pool is not initialized'); // This is the message from the original error
|
||||
expect(response.body.meta.requestId).toEqual(expect.any(String));
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
err: expect.any(Error),
|
||||
@@ -315,9 +317,9 @@ describe('Health Routes (/api/health)', () => {
|
||||
const response = await supertest(app).get('/api/health/db-pool');
|
||||
|
||||
expect(response.status).toBe(500);
|
||||
expect(response.body.message).toBe('Pool is not initialized'); // This is the message from the original error
|
||||
expect(response.body.stack).toBeDefined();
|
||||
expect(response.body.errorId).toEqual(expect.any(String));
|
||||
expect(response.body.error.message).toBe('Pool is not initialized'); // This is the message from the original error
|
||||
expect(response.body.error.details.stack).toBeDefined();
|
||||
expect(response.body.meta.requestId).toEqual(expect.any(String));
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
err: expect.objectContaining({ message: 'Pool is not initialized' }),
|
||||
@@ -334,10 +336,12 @@ describe('Health Routes (/api/health)', () => {
|
||||
const response = await supertest(app).get('/api/health/redis');
|
||||
|
||||
expect(response.status).toBe(500);
|
||||
expect(response.body.message).toBe('Connection timed out');
|
||||
expect(response.body.stack).toBeDefined();
|
||||
expect(response.body.errorId).toEqual(expect.any(String));
|
||||
console.log('[DEBUG] health.routes.test.ts: Checking if logger.error was called with the correct pattern');
|
||||
expect(response.body.error.message).toBe('Connection timed out');
|
||||
expect(response.body.error.details.stack).toBeDefined();
|
||||
expect(response.body.meta.requestId).toEqual(expect.any(String));
|
||||
console.log(
|
||||
'[DEBUG] health.routes.test.ts: Checking if logger.error was called with the correct pattern',
|
||||
);
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
err: expect.any(Error),
|
||||
@@ -352,9 +356,9 @@ describe('Health Routes (/api/health)', () => {
|
||||
const response = await supertest(app).get('/api/health/redis');
|
||||
|
||||
expect(response.status).toBe(500);
|
||||
expect(response.body.message).toContain('Unexpected Redis ping response: OK');
|
||||
expect(response.body.stack).toBeDefined();
|
||||
expect(response.body.errorId).toEqual(expect.any(String));
|
||||
expect(response.body.error.message).toContain('Unexpected Redis ping response: OK');
|
||||
expect(response.body.error.details.stack).toBeDefined();
|
||||
expect(response.body.meta.requestId).toEqual(expect.any(String));
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
err: expect.any(Error),
|
||||
|
||||
@@ -14,6 +14,7 @@ import { connection as redisConnection } from '../services/queueService.server';
|
||||
import fs from 'node:fs/promises';
|
||||
import { getSimpleWeekAndYear } from '../utils/dateUtils';
|
||||
import { validateRequest } from '../middleware/validation.middleware';
|
||||
import { sendSuccess, sendError, ErrorCode } from '../utils/apiResponse';
|
||||
|
||||
const router = Router();
|
||||
|
||||
@@ -129,7 +130,7 @@ const emptySchema = z.object({});
|
||||
* GET /api/health/ping - A simple endpoint to check if the server is responsive.
|
||||
*/
|
||||
router.get('/ping', validateRequest(emptySchema), (_req: Request, res: Response) => {
|
||||
res.status(200).send('pong');
|
||||
return sendSuccess(res, { message: 'pong' });
|
||||
});
|
||||
|
||||
// =============================================================================
|
||||
@@ -146,7 +147,7 @@ router.get('/ping', validateRequest(emptySchema), (_req: Request, res: Response)
|
||||
* It only checks that the Node.js process can handle HTTP requests.
|
||||
*/
|
||||
router.get('/live', validateRequest(emptySchema), (_req: Request, res: Response) => {
|
||||
res.status(200).json({
|
||||
return sendSuccess(res, {
|
||||
status: 'ok',
|
||||
timestamp: new Date().toISOString(),
|
||||
});
|
||||
@@ -198,9 +199,10 @@ router.get('/ready', validateRequest(emptySchema), async (req: Request, res: Res
|
||||
// Return appropriate HTTP status code
|
||||
// 200 = healthy or degraded (can still handle traffic)
|
||||
// 503 = unhealthy (should not receive traffic)
|
||||
const httpStatus = overallStatus === 'unhealthy' ? 503 : 200;
|
||||
|
||||
return res.status(httpStatus).json(response);
|
||||
if (overallStatus === 'unhealthy') {
|
||||
return sendError(res, ErrorCode.SERVICE_UNAVAILABLE, 'Service unhealthy', 503, response);
|
||||
}
|
||||
return sendSuccess(res, response);
|
||||
});
|
||||
|
||||
/**
|
||||
@@ -216,14 +218,13 @@ router.get('/startup', validateRequest(emptySchema), async (req: Request, res: R
|
||||
const database = await checkDatabase();
|
||||
|
||||
if (database.status === 'unhealthy') {
|
||||
return res.status(503).json({
|
||||
return sendError(res, ErrorCode.SERVICE_UNAVAILABLE, 'Waiting for database connection', 503, {
|
||||
status: 'starting',
|
||||
message: 'Waiting for database connection',
|
||||
database,
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).json({
|
||||
return sendSuccess(res, {
|
||||
status: 'started',
|
||||
timestamp: new Date().toISOString(),
|
||||
database,
|
||||
@@ -245,7 +246,7 @@ router.get('/db-schema', validateRequest(emptySchema), async (req, res, next: Ne
|
||||
new Error(`Database schema check failed. Missing tables: ${missingTables.join(', ')}.`),
|
||||
);
|
||||
}
|
||||
return res.status(200).json({ success: true, message: 'All required database tables exist.' });
|
||||
return sendSuccess(res, { message: 'All required database tables exist.' });
|
||||
} catch (error: unknown) {
|
||||
if (error instanceof Error) {
|
||||
return next(error);
|
||||
@@ -266,8 +267,7 @@ router.get('/storage', validateRequest(emptySchema), async (req, res, next: Next
|
||||
process.env.STORAGE_PATH || '/var/www/flyer-crawler.projectium.com/flyer-images';
|
||||
try {
|
||||
await fs.access(storagePath, fs.constants.W_OK); // Use fs.promises
|
||||
return res.status(200).json({
|
||||
success: true,
|
||||
return sendSuccess(res, {
|
||||
message: `Storage directory '${storagePath}' is accessible and writable.`,
|
||||
});
|
||||
} catch {
|
||||
@@ -293,12 +293,16 @@ router.get(
|
||||
const message = `Pool Status: ${status.totalCount} total, ${status.idleCount} idle, ${status.waitingCount} waiting.`;
|
||||
|
||||
if (isHealthy) {
|
||||
return res.status(200).json({ success: true, message });
|
||||
return sendSuccess(res, { message, ...status });
|
||||
} else {
|
||||
req.log.warn(`Database pool health check shows high waiting count: ${status.waitingCount}`);
|
||||
return res
|
||||
.status(500)
|
||||
.json({ success: false, message: `Pool may be under stress. ${message}` });
|
||||
return sendError(
|
||||
res,
|
||||
ErrorCode.INTERNAL_ERROR,
|
||||
`Pool may be under stress. ${message}`,
|
||||
500,
|
||||
status,
|
||||
);
|
||||
}
|
||||
} catch (error: unknown) {
|
||||
if (error instanceof Error) {
|
||||
@@ -319,7 +323,7 @@ router.get(
|
||||
router.get('/time', validateRequest(emptySchema), (req: Request, res: Response) => {
|
||||
const now = new Date();
|
||||
const { year, week } = getSimpleWeekAndYear(now);
|
||||
res.json({
|
||||
return sendSuccess(res, {
|
||||
currentTime: now.toISOString(),
|
||||
year,
|
||||
week,
|
||||
@@ -336,7 +340,7 @@ router.get(
|
||||
try {
|
||||
const reply = await redisConnection.ping();
|
||||
if (reply === 'PONG') {
|
||||
return res.status(200).json({ success: true, message: 'Redis connection is healthy.' });
|
||||
return sendSuccess(res, { message: 'Redis connection is healthy.' });
|
||||
}
|
||||
throw new Error(`Unexpected Redis ping response: ${reply}`); // This will be caught below
|
||||
} catch (error: unknown) {
|
||||
|
||||
@@ -40,18 +40,22 @@ describe('Personalization Routes (/api/personalization)', () => {
|
||||
const mockItems = [createMockMasterGroceryItem({ master_grocery_item_id: 1, name: 'Milk' })];
|
||||
vi.mocked(db.personalizationRepo.getAllMasterItems).mockResolvedValue(mockItems);
|
||||
|
||||
const response = await supertest(app).get('/api/personalization/master-items').set('x-test-rate-limit-enable', 'true');
|
||||
const response = await supertest(app)
|
||||
.get('/api/personalization/master-items')
|
||||
.set('x-test-rate-limit-enable', 'true');
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body).toEqual(mockItems);
|
||||
expect(response.body.data).toEqual(mockItems);
|
||||
});
|
||||
|
||||
it('should return 500 if the database call fails', async () => {
|
||||
const dbError = new Error('DB Error');
|
||||
vi.mocked(db.personalizationRepo.getAllMasterItems).mockRejectedValue(dbError);
|
||||
const response = await supertest(app).get('/api/personalization/master-items').set('x-test-rate-limit-enable', 'true');
|
||||
const response = await supertest(app)
|
||||
.get('/api/personalization/master-items')
|
||||
.set('x-test-rate-limit-enable', 'true');
|
||||
expect(response.status).toBe(500);
|
||||
expect(response.body.message).toBe('DB Error');
|
||||
expect(response.body.error.message).toBe('DB Error');
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
{ error: dbError },
|
||||
'Error fetching master items in /api/personalization/master-items:',
|
||||
@@ -67,7 +71,7 @@ describe('Personalization Routes (/api/personalization)', () => {
|
||||
const response = await supertest(app).get('/api/personalization/dietary-restrictions');
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body).toEqual(mockRestrictions);
|
||||
expect(response.body.data).toEqual(mockRestrictions);
|
||||
});
|
||||
|
||||
it('should return 500 if the database call fails', async () => {
|
||||
@@ -75,7 +79,7 @@ describe('Personalization Routes (/api/personalization)', () => {
|
||||
vi.mocked(db.personalizationRepo.getDietaryRestrictions).mockRejectedValue(dbError);
|
||||
const response = await supertest(app).get('/api/personalization/dietary-restrictions');
|
||||
expect(response.status).toBe(500);
|
||||
expect(response.body.message).toBe('DB Error');
|
||||
expect(response.body.error.message).toBe('DB Error');
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
{ error: dbError },
|
||||
'Error fetching dietary restrictions in /api/personalization/dietary-restrictions:',
|
||||
@@ -91,7 +95,7 @@ describe('Personalization Routes (/api/personalization)', () => {
|
||||
const response = await supertest(app).get('/api/personalization/appliances');
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body).toEqual(mockAppliances);
|
||||
expect(response.body.data).toEqual(mockAppliances);
|
||||
});
|
||||
|
||||
it('should return 500 if the database call fails', async () => {
|
||||
@@ -99,7 +103,7 @@ describe('Personalization Routes (/api/personalization)', () => {
|
||||
vi.mocked(db.personalizationRepo.getAppliances).mockRejectedValue(dbError);
|
||||
const response = await supertest(app).get('/api/personalization/appliances');
|
||||
expect(response.status).toBe(500);
|
||||
expect(response.body.message).toBe('DB Error');
|
||||
expect(response.body.error.message).toBe('DB Error');
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
{ error: dbError },
|
||||
'Error fetching appliances in /api/personalization/appliances:',
|
||||
|
||||
@@ -4,6 +4,7 @@ import { z } from 'zod';
|
||||
import * as db from '../services/db/index.db';
|
||||
import { validateRequest } from '../middleware/validation.middleware';
|
||||
import { publicReadLimiter } from '../config/rateLimiters';
|
||||
import { sendSuccess } from '../utils/apiResponse';
|
||||
|
||||
const router = Router();
|
||||
|
||||
@@ -28,7 +29,7 @@ router.get(
|
||||
res.set('Cache-Control', 'public, max-age=3600');
|
||||
|
||||
const masterItems = await db.personalizationRepo.getAllMasterItems(req.log);
|
||||
res.json(masterItems);
|
||||
sendSuccess(res, masterItems);
|
||||
} catch (error) {
|
||||
req.log.error({ error }, 'Error fetching master items in /api/personalization/master-items:');
|
||||
next(error);
|
||||
@@ -46,7 +47,7 @@ router.get(
|
||||
async (req: Request, res: Response, next: NextFunction) => {
|
||||
try {
|
||||
const restrictions = await db.personalizationRepo.getDietaryRestrictions(req.log);
|
||||
res.json(restrictions);
|
||||
sendSuccess(res, restrictions);
|
||||
} catch (error) {
|
||||
req.log.error(
|
||||
{ error },
|
||||
@@ -67,7 +68,7 @@ router.get(
|
||||
async (req: Request, res: Response, next: NextFunction) => {
|
||||
try {
|
||||
const appliances = await db.personalizationRepo.getAppliances(req.log);
|
||||
res.json(appliances);
|
||||
sendSuccess(res, appliances);
|
||||
} catch (error) {
|
||||
req.log.error({ error }, 'Error fetching appliances in /api/personalization/appliances:');
|
||||
next(error);
|
||||
|
||||
@@ -22,16 +22,14 @@ vi.mock('../services/logger.server', async () => ({
|
||||
// Mock the passport middleware
|
||||
vi.mock('./passport.routes', () => ({
|
||||
default: {
|
||||
authenticate: vi.fn(
|
||||
(_strategy, _options) => (req: Request, res: Response, next: NextFunction) => {
|
||||
// If req.user is not set by the test setup, simulate unauthenticated access.
|
||||
if (!req.user) {
|
||||
return res.status(401).json({ message: 'Unauthorized' });
|
||||
}
|
||||
// If req.user is set, proceed as an authenticated user.
|
||||
next();
|
||||
},
|
||||
),
|
||||
authenticate: vi.fn(() => (req: Request, res: Response, next: NextFunction) => {
|
||||
// If req.user is not set by the test setup, simulate unauthenticated access.
|
||||
if (!req.user) {
|
||||
return res.status(401).json({ message: 'Unauthorized' });
|
||||
}
|
||||
// If req.user is set, proceed as an authenticated user.
|
||||
next();
|
||||
}),
|
||||
},
|
||||
}));
|
||||
|
||||
@@ -41,7 +39,11 @@ import { priceRepo } from '../services/db/price.db';
|
||||
|
||||
describe('Price Routes (/api/price-history)', () => {
|
||||
const mockUser = createMockUserProfile({ user: { user_id: 'price-user-123' } });
|
||||
const app = createTestApp({ router: priceRouter, basePath: '/api/price-history', authenticatedUser: mockUser });
|
||||
const app = createTestApp({
|
||||
router: priceRouter,
|
||||
basePath: '/api/price-history',
|
||||
authenticatedUser: mockUser,
|
||||
});
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
@@ -59,7 +61,7 @@ describe('Price Routes (/api/price-history)', () => {
|
||||
.send({ masterItemIds: [1, 2] });
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body).toEqual(mockHistory);
|
||||
expect(response.body.data).toEqual(mockHistory);
|
||||
expect(priceRepo.getPriceHistory).toHaveBeenCalledWith([1, 2], expect.any(Object), 1000, 0);
|
||||
});
|
||||
|
||||
@@ -69,12 +71,7 @@ describe('Price Routes (/api/price-history)', () => {
|
||||
.post('/api/price-history')
|
||||
.send({ masterItemIds: [1, 2, 3], limit: 50, offset: 10 });
|
||||
|
||||
expect(priceRepo.getPriceHistory).toHaveBeenCalledWith(
|
||||
[1, 2, 3],
|
||||
expect.any(Object),
|
||||
50,
|
||||
10,
|
||||
);
|
||||
expect(priceRepo.getPriceHistory).toHaveBeenCalledWith([1, 2, 3], expect.any(Object), 50, 10);
|
||||
});
|
||||
|
||||
it('should log the request info', async () => {
|
||||
@@ -98,14 +95,14 @@ describe('Price Routes (/api/price-history)', () => {
|
||||
.send({ masterItemIds: [1, 2, 3] });
|
||||
|
||||
expect(response.status).toBe(500);
|
||||
expect(response.body.message).toBe('Database connection failed');
|
||||
expect(response.body.error.message).toBe('Database connection failed');
|
||||
});
|
||||
|
||||
it('should return 400 if masterItemIds is an empty array', async () => {
|
||||
const response = await supertest(app).post('/api/price-history').send({ masterItemIds: [] });
|
||||
|
||||
expect(response.status).toBe(400);
|
||||
expect(response.body.errors[0].message).toBe(
|
||||
expect(response.body.error.details[0].message).toBe(
|
||||
'masterItemIds must be a non-empty array of positive integers.',
|
||||
);
|
||||
});
|
||||
@@ -117,7 +114,9 @@ describe('Price Routes (/api/price-history)', () => {
|
||||
|
||||
expect(response.status).toBe(400);
|
||||
// The actual message is "Invalid input: expected array, received string"
|
||||
expect(response.body.errors[0].message).toBe('Invalid input: expected array, received string');
|
||||
expect(response.body.error.details[0].message).toBe(
|
||||
'Invalid input: expected array, received string',
|
||||
);
|
||||
});
|
||||
|
||||
it('should return 400 if masterItemIds contains non-positive integers', async () => {
|
||||
@@ -126,7 +125,7 @@ describe('Price Routes (/api/price-history)', () => {
|
||||
.send({ masterItemIds: [1, -2, 3] });
|
||||
|
||||
expect(response.status).toBe(400);
|
||||
expect(response.body.errors[0].message).toBe('Number must be greater than 0');
|
||||
expect(response.body.error.details[0].message).toBe('Number must be greater than 0');
|
||||
});
|
||||
|
||||
it('should return 400 if masterItemIds is missing', async () => {
|
||||
@@ -134,7 +133,9 @@ describe('Price Routes (/api/price-history)', () => {
|
||||
|
||||
expect(response.status).toBe(400);
|
||||
// The actual message is "Invalid input: expected array, received undefined"
|
||||
expect(response.body.errors[0].message).toBe('Invalid input: expected array, received undefined');
|
||||
expect(response.body.error.details[0].message).toBe(
|
||||
'Invalid input: expected array, received undefined',
|
||||
);
|
||||
});
|
||||
|
||||
it('should return 400 for invalid limit and offset', async () => {
|
||||
@@ -143,10 +144,12 @@ describe('Price Routes (/api/price-history)', () => {
|
||||
.send({ masterItemIds: [1], limit: -1, offset: 'abc' });
|
||||
|
||||
expect(response.status).toBe(400);
|
||||
expect(response.body.errors).toHaveLength(2);
|
||||
expect(response.body.error.details).toHaveLength(2);
|
||||
// The actual message is "Too small: expected number to be >0"
|
||||
expect(response.body.errors[0].message).toBe('Too small: expected number to be >0');
|
||||
expect(response.body.errors[1].message).toBe('Invalid input: expected number, received NaN');
|
||||
expect(response.body.error.details[0].message).toBe('Too small: expected number to be >0');
|
||||
expect(response.body.error.details[1].message).toBe(
|
||||
'Invalid input: expected number, received NaN',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
@@ -6,16 +6,15 @@ import { validateRequest } from '../middleware/validation.middleware';
|
||||
import { priceRepo } from '../services/db/price.db';
|
||||
import { optionalNumeric } from '../utils/zodUtils';
|
||||
import { priceHistoryLimiter } from '../config/rateLimiters';
|
||||
import { sendSuccess } from '../utils/apiResponse';
|
||||
|
||||
const router = Router();
|
||||
|
||||
const priceHistorySchema = z.object({
|
||||
body: z.object({
|
||||
masterItemIds: z
|
||||
.array(z.number().int().positive('Number must be greater than 0'))
|
||||
.nonempty({
|
||||
message: 'masterItemIds must be a non-empty array of positive integers.',
|
||||
}),
|
||||
masterItemIds: z.array(z.number().int().positive('Number must be greater than 0')).nonempty({
|
||||
message: 'masterItemIds must be a non-empty array of positive integers.',
|
||||
}),
|
||||
limit: optionalNumeric({ default: 1000, integer: true, positive: true }),
|
||||
offset: optionalNumeric({ default: 0, integer: true, nonnegative: true }),
|
||||
}),
|
||||
@@ -44,7 +43,7 @@ router.post(
|
||||
);
|
||||
try {
|
||||
const priceHistory = await priceRepo.getPriceHistory(masterItemIds, req.log, limit, offset);
|
||||
res.status(200).json(priceHistory);
|
||||
sendSuccess(res, priceHistory);
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
import supertest from 'supertest';
|
||||
import type { Request, Response, NextFunction } from 'express';
|
||||
import { createTestApp } from '../tests/utils/createTestApp';
|
||||
import { createMockUserProfile } from '../tests/utils/mockFactories';
|
||||
import type { UserReaction } from '../types';
|
||||
|
||||
// 1. Mock the Service Layer directly.
|
||||
vi.mock('../services/db/index.db', () => ({
|
||||
@@ -20,15 +22,13 @@ vi.mock('../services/logger.server', async () => ({
|
||||
// Mock Passport middleware
|
||||
vi.mock('./passport.routes', () => ({
|
||||
default: {
|
||||
authenticate: vi.fn(
|
||||
() => (req: any, res: any, next: any) => {
|
||||
// If we are testing the unauthenticated state (no user injected), simulate 401.
|
||||
if (!req.user) {
|
||||
return res.status(401).json({ message: 'Unauthorized' });
|
||||
}
|
||||
next();
|
||||
},
|
||||
),
|
||||
authenticate: vi.fn(() => (req: Request, res: Response, next: NextFunction) => {
|
||||
// If we are testing the unauthenticated state (no user injected), simulate 401.
|
||||
if (!req.user) {
|
||||
return res.status(401).json({ message: 'Unauthorized' });
|
||||
}
|
||||
next();
|
||||
}),
|
||||
},
|
||||
}));
|
||||
|
||||
@@ -51,20 +51,24 @@ describe('Reaction Routes (/api/reactions)', () => {
|
||||
const app = createTestApp({ router: reactionsRouter, basePath: '/api/reactions' });
|
||||
|
||||
it('should return a list of reactions', async () => {
|
||||
const mockReactions = [{ id: 1, reaction_type: 'like', entity_id: '123' }];
|
||||
vi.mocked(reactionRepo.getReactions).mockResolvedValue(mockReactions as any);
|
||||
const mockReactions = [
|
||||
{ reaction_id: 1, reaction_type: 'like', entity_id: '123' },
|
||||
] as unknown as UserReaction[];
|
||||
vi.mocked(reactionRepo.getReactions).mockResolvedValue(mockReactions);
|
||||
|
||||
const response = await supertest(app).get('/api/reactions');
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body).toEqual(mockReactions);
|
||||
expect(response.body.data).toEqual(mockReactions);
|
||||
expect(reactionRepo.getReactions).toHaveBeenCalledWith({}, expectLogger);
|
||||
});
|
||||
|
||||
it('should filter by query parameters', async () => {
|
||||
const mockReactions = [{ id: 1, reaction_type: 'like' }];
|
||||
vi.mocked(reactionRepo.getReactions).mockResolvedValue(mockReactions as any);
|
||||
|
||||
const mockReactions = [
|
||||
{ reaction_id: 1, reaction_type: 'like' },
|
||||
] as unknown as UserReaction[];
|
||||
vi.mocked(reactionRepo.getReactions).mockResolvedValue(mockReactions);
|
||||
|
||||
const validUuid = '123e4567-e89b-12d3-a456-426614174000';
|
||||
const query = { userId: validUuid, entityType: 'recipe', entityId: '1' };
|
||||
|
||||
@@ -73,7 +77,7 @@ describe('Reaction Routes (/api/reactions)', () => {
|
||||
expect(response.status).toBe(200);
|
||||
expect(reactionRepo.getReactions).toHaveBeenCalledWith(
|
||||
expect.objectContaining(query),
|
||||
expectLogger
|
||||
expectLogger,
|
||||
);
|
||||
});
|
||||
|
||||
@@ -84,10 +88,7 @@ describe('Reaction Routes (/api/reactions)', () => {
|
||||
const response = await supertest(app).get('/api/reactions');
|
||||
|
||||
expect(response.status).toBe(500);
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
{ error },
|
||||
'Error fetching user reactions'
|
||||
);
|
||||
expect(mockLogger.error).toHaveBeenCalledWith({ error }, 'Error fetching user reactions');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -95,26 +96,25 @@ describe('Reaction Routes (/api/reactions)', () => {
|
||||
const app = createTestApp({ router: reactionsRouter, basePath: '/api/reactions' });
|
||||
|
||||
it('should return reaction summary for an entity', async () => {
|
||||
const mockSummary = { like: 10, love: 5 };
|
||||
vi.mocked(reactionRepo.getReactionSummary).mockResolvedValue(mockSummary as any);
|
||||
const mockSummary = [
|
||||
{ reaction_type: 'like', count: 10 },
|
||||
{ reaction_type: 'love', count: 5 },
|
||||
];
|
||||
vi.mocked(reactionRepo.getReactionSummary).mockResolvedValue(mockSummary);
|
||||
|
||||
const response = await supertest(app)
|
||||
.get('/api/reactions/summary')
|
||||
.query({ entityType: 'recipe', entityId: '123' });
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body).toEqual(mockSummary);
|
||||
expect(reactionRepo.getReactionSummary).toHaveBeenCalledWith(
|
||||
'recipe',
|
||||
'123',
|
||||
expectLogger
|
||||
);
|
||||
expect(response.body.data).toEqual(mockSummary);
|
||||
expect(reactionRepo.getReactionSummary).toHaveBeenCalledWith('recipe', '123', expectLogger);
|
||||
});
|
||||
|
||||
it('should return 400 if required parameters are missing', async () => {
|
||||
const response = await supertest(app).get('/api/reactions/summary');
|
||||
expect(response.status).toBe(400);
|
||||
expect(response.body.errors[0].message).toContain('required');
|
||||
expect(response.body.error.details[0].message).toContain('required');
|
||||
});
|
||||
|
||||
it('should return 500 on database error', async () => {
|
||||
@@ -126,10 +126,7 @@ describe('Reaction Routes (/api/reactions)', () => {
|
||||
.query({ entityType: 'recipe', entityId: '123' });
|
||||
|
||||
expect(response.status).toBe(500);
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
{ error },
|
||||
'Error fetching reaction summary'
|
||||
);
|
||||
expect(mockLogger.error).toHaveBeenCalledWith({ error }, 'Error fetching reaction summary');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -148,18 +145,20 @@ describe('Reaction Routes (/api/reactions)', () => {
|
||||
};
|
||||
|
||||
it('should return 201 when a reaction is added', async () => {
|
||||
const mockResult = { ...validBody, id: 1, user_id: 'user-123' };
|
||||
vi.mocked(reactionRepo.toggleReaction).mockResolvedValue(mockResult as any);
|
||||
const mockResult = {
|
||||
...validBody,
|
||||
reaction_id: 1,
|
||||
user_id: 'user-123',
|
||||
} as unknown as UserReaction;
|
||||
vi.mocked(reactionRepo.toggleReaction).mockResolvedValue(mockResult);
|
||||
|
||||
const response = await supertest(app)
|
||||
.post('/api/reactions/toggle')
|
||||
.send(validBody);
|
||||
const response = await supertest(app).post('/api/reactions/toggle').send(validBody);
|
||||
|
||||
expect(response.status).toBe(201);
|
||||
expect(response.body).toEqual({ message: 'Reaction added.', reaction: mockResult });
|
||||
expect(response.body.data).toEqual({ message: 'Reaction added.', reaction: mockResult });
|
||||
expect(reactionRepo.toggleReaction).toHaveBeenCalledWith(
|
||||
{ user_id: 'user-123', ...validBody },
|
||||
expectLogger
|
||||
expectLogger,
|
||||
);
|
||||
});
|
||||
|
||||
@@ -167,12 +166,10 @@ describe('Reaction Routes (/api/reactions)', () => {
|
||||
// Returning null/false from toggleReaction implies the reaction was removed
|
||||
vi.mocked(reactionRepo.toggleReaction).mockResolvedValue(null);
|
||||
|
||||
const response = await supertest(app)
|
||||
.post('/api/reactions/toggle')
|
||||
.send(validBody);
|
||||
const response = await supertest(app).post('/api/reactions/toggle').send(validBody);
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body).toEqual({ message: 'Reaction removed.' });
|
||||
expect(response.body.data).toEqual({ message: 'Reaction removed.' });
|
||||
});
|
||||
|
||||
it('should return 400 if body is invalid', async () => {
|
||||
@@ -181,14 +178,12 @@ describe('Reaction Routes (/api/reactions)', () => {
|
||||
.send({ entity_type: 'recipe' }); // Missing other required fields
|
||||
|
||||
expect(response.status).toBe(400);
|
||||
expect(response.body.errors).toBeDefined();
|
||||
expect(response.body.error.details).toBeDefined();
|
||||
});
|
||||
|
||||
it('should return 401 if not authenticated', async () => {
|
||||
const unauthApp = createTestApp({ router: reactionsRouter, basePath: '/api/reactions' });
|
||||
const response = await supertest(unauthApp)
|
||||
.post('/api/reactions/toggle')
|
||||
.send(validBody);
|
||||
const response = await supertest(unauthApp).post('/api/reactions/toggle').send(validBody);
|
||||
|
||||
expect(response.status).toBe(401);
|
||||
});
|
||||
@@ -197,14 +192,12 @@ describe('Reaction Routes (/api/reactions)', () => {
|
||||
const error = new Error('DB Error');
|
||||
vi.mocked(reactionRepo.toggleReaction).mockRejectedValue(error);
|
||||
|
||||
const response = await supertest(app)
|
||||
.post('/api/reactions/toggle')
|
||||
.send(validBody);
|
||||
const response = await supertest(app).post('/api/reactions/toggle').send(validBody);
|
||||
|
||||
expect(response.status).toBe(500);
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
{ error, body: validBody },
|
||||
'Error toggling user reaction'
|
||||
'Error toggling user reaction',
|
||||
);
|
||||
});
|
||||
});
|
||||
@@ -240,4 +233,4 @@ describe('Reaction Routes (/api/reactions)', () => {
|
||||
expect(parseInt(response.headers['ratelimit-limit'])).toBe(150);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -6,6 +6,7 @@ import passport from './passport.routes';
|
||||
import { requiredString } from '../utils/zodUtils';
|
||||
import { UserProfile } from '../types';
|
||||
import { publicReadLimiter, reactionToggleLimiter } from '../config/rateLimiters';
|
||||
import { sendSuccess } from '../utils/apiResponse';
|
||||
|
||||
const router = Router();
|
||||
|
||||
@@ -49,7 +50,7 @@ router.get(
|
||||
try {
|
||||
const { query } = getReactionsSchema.parse({ query: req.query });
|
||||
const reactions = await reactionRepo.getReactions(query, req.log);
|
||||
res.json(reactions);
|
||||
sendSuccess(res, reactions);
|
||||
} catch (error) {
|
||||
req.log.error({ error }, 'Error fetching user reactions');
|
||||
next(error);
|
||||
@@ -69,8 +70,12 @@ router.get(
|
||||
async (req: Request, res: Response, next: NextFunction) => {
|
||||
try {
|
||||
const { query } = getReactionSummarySchema.parse({ query: req.query });
|
||||
const summary = await reactionRepo.getReactionSummary(query.entityType, query.entityId, req.log);
|
||||
res.json(summary);
|
||||
const summary = await reactionRepo.getReactionSummary(
|
||||
query.entityType,
|
||||
query.entityId,
|
||||
req.log,
|
||||
);
|
||||
sendSuccess(res, summary);
|
||||
} catch (error) {
|
||||
req.log.error({ error }, 'Error fetching reaction summary');
|
||||
next(error);
|
||||
@@ -99,9 +104,9 @@ router.post(
|
||||
};
|
||||
const result = await reactionRepo.toggleReaction(reactionData, req.log);
|
||||
if (result) {
|
||||
res.status(201).json({ message: 'Reaction added.', reaction: result });
|
||||
sendSuccess(res, { message: 'Reaction added.', reaction: result }, 201);
|
||||
} else {
|
||||
res.status(200).json({ message: 'Reaction removed.' });
|
||||
sendSuccess(res, { message: 'Reaction removed.' });
|
||||
}
|
||||
} catch (error) {
|
||||
req.log.error({ error, body }, 'Error toggling user reaction');
|
||||
@@ -110,4 +115,4 @@ router.post(
|
||||
},
|
||||
);
|
||||
|
||||
export default router;
|
||||
export default router;
|
||||
|
||||
@@ -1,7 +1,12 @@
|
||||
// src/routes/recipe.routes.test.ts
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
import supertest from 'supertest';
|
||||
import { createMockRecipe, createMockRecipeComment, createMockUserProfile } from '../tests/utils/mockFactories';
|
||||
import type { Request, Response, NextFunction } from 'express';
|
||||
import {
|
||||
createMockRecipe,
|
||||
createMockRecipeComment,
|
||||
createMockUserProfile,
|
||||
} from '../tests/utils/mockFactories';
|
||||
import { NotFoundError } from '../services/db/errors.db';
|
||||
import { createTestApp } from '../tests/utils/createTestApp';
|
||||
|
||||
@@ -26,14 +31,12 @@ vi.mock('../services/aiService.server', () => ({
|
||||
// Mock Passport
|
||||
vi.mock('./passport.routes', () => ({
|
||||
default: {
|
||||
authenticate: vi.fn(
|
||||
() => (req: any, res: any, next: any) => {
|
||||
if (!req.user) {
|
||||
return res.status(401).json({ message: 'Unauthorized' });
|
||||
}
|
||||
next();
|
||||
},
|
||||
),
|
||||
authenticate: vi.fn(() => (req: Request, res: Response, next: NextFunction) => {
|
||||
if (!req.user) {
|
||||
return res.status(401).json({ message: 'Unauthorized' });
|
||||
}
|
||||
next();
|
||||
}),
|
||||
},
|
||||
}));
|
||||
|
||||
@@ -70,7 +73,7 @@ describe('Recipe Routes (/api/recipes)', () => {
|
||||
const response = await supertest(app).get('/api/recipes/by-sale-percentage?minPercentage=75');
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body).toEqual(mockRecipes);
|
||||
expect(response.body.data).toEqual(mockRecipes);
|
||||
expect(db.recipeRepo.getRecipesBySalePercentage).toHaveBeenCalledWith(75, expectLogger);
|
||||
});
|
||||
|
||||
@@ -85,7 +88,7 @@ describe('Recipe Routes (/api/recipes)', () => {
|
||||
vi.mocked(db.recipeRepo.getRecipesBySalePercentage).mockRejectedValue(dbError);
|
||||
const response = await supertest(app).get('/api/recipes/by-sale-percentage');
|
||||
expect(response.status).toBe(500);
|
||||
expect(response.body.message).toBe('DB Error');
|
||||
expect(response.body.error.message).toBe('DB Error');
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
{ error: dbError },
|
||||
'Error fetching recipes in /api/recipes/by-sale-percentage:',
|
||||
@@ -97,7 +100,7 @@ describe('Recipe Routes (/api/recipes)', () => {
|
||||
'/api/recipes/by-sale-percentage?minPercentage=101',
|
||||
);
|
||||
expect(response.status).toBe(400);
|
||||
expect(response.body.errors[0].message).toContain('Too big');
|
||||
expect(response.body.error.details[0].message).toContain('Too big');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -120,7 +123,7 @@ describe('Recipe Routes (/api/recipes)', () => {
|
||||
vi.mocked(db.recipeRepo.getRecipesByMinSaleIngredients).mockRejectedValue(dbError);
|
||||
const response = await supertest(app).get('/api/recipes/by-sale-ingredients');
|
||||
expect(response.status).toBe(500);
|
||||
expect(response.body.message).toBe('DB Error');
|
||||
expect(response.body.error.message).toBe('DB Error');
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
{ error: dbError },
|
||||
'Error fetching recipes in /api/recipes/by-sale-ingredients:',
|
||||
@@ -132,7 +135,7 @@ describe('Recipe Routes (/api/recipes)', () => {
|
||||
'/api/recipes/by-sale-ingredients?minIngredients=abc',
|
||||
);
|
||||
expect(response.status).toBe(400);
|
||||
expect(response.body.errors[0].message).toContain('received NaN');
|
||||
expect(response.body.error.details[0].message).toContain('received NaN');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -146,7 +149,7 @@ describe('Recipe Routes (/api/recipes)', () => {
|
||||
);
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body).toEqual(mockRecipes);
|
||||
expect(response.body.data).toEqual(mockRecipes);
|
||||
});
|
||||
|
||||
it('should return 500 if the database call fails', async () => {
|
||||
@@ -156,7 +159,7 @@ describe('Recipe Routes (/api/recipes)', () => {
|
||||
'/api/recipes/by-ingredient-and-tag?ingredient=chicken&tag=quick',
|
||||
);
|
||||
expect(response.status).toBe(500);
|
||||
expect(response.body.message).toBe('DB Error');
|
||||
expect(response.body.error.message).toBe('DB Error');
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
{ error: dbError },
|
||||
'Error fetching recipes in /api/recipes/by-ingredient-and-tag:',
|
||||
@@ -168,7 +171,7 @@ describe('Recipe Routes (/api/recipes)', () => {
|
||||
'/api/recipes/by-ingredient-and-tag?ingredient=chicken',
|
||||
);
|
||||
expect(response.status).toBe(400);
|
||||
expect(response.body.errors[0].message).toBe('Query parameter "tag" is required.');
|
||||
expect(response.body.error.details[0].message).toBe('Query parameter "tag" is required.');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -180,14 +183,14 @@ describe('Recipe Routes (/api/recipes)', () => {
|
||||
const response = await supertest(app).get('/api/recipes/1/comments');
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body).toEqual(mockComments);
|
||||
expect(response.body.data).toEqual(mockComments);
|
||||
expect(db.recipeRepo.getRecipeComments).toHaveBeenCalledWith(1, expectLogger);
|
||||
});
|
||||
|
||||
it('should return an empty array if recipe has no comments', async () => {
|
||||
vi.mocked(db.recipeRepo.getRecipeComments).mockResolvedValue([]);
|
||||
const response = await supertest(app).get('/api/recipes/2/comments');
|
||||
expect(response.body).toEqual([]);
|
||||
expect(response.body.data).toEqual([]);
|
||||
});
|
||||
|
||||
it('should return 500 if the database call fails', async () => {
|
||||
@@ -195,7 +198,7 @@ describe('Recipe Routes (/api/recipes)', () => {
|
||||
vi.mocked(db.recipeRepo.getRecipeComments).mockRejectedValue(dbError);
|
||||
const response = await supertest(app).get('/api/recipes/1/comments');
|
||||
expect(response.status).toBe(500);
|
||||
expect(response.body.message).toBe('DB Error');
|
||||
expect(response.body.error.message).toBe('DB Error');
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
{ error: dbError },
|
||||
`Error fetching comments for recipe ID 1:`,
|
||||
@@ -205,7 +208,7 @@ describe('Recipe Routes (/api/recipes)', () => {
|
||||
it('should return 400 for an invalid recipeId', async () => {
|
||||
const response = await supertest(app).get('/api/recipes/abc/comments');
|
||||
expect(response.status).toBe(400);
|
||||
expect(response.body.errors[0].message).toContain('received NaN');
|
||||
expect(response.body.error.details[0].message).toContain('received NaN');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -217,7 +220,7 @@ describe('Recipe Routes (/api/recipes)', () => {
|
||||
const response = await supertest(app).get('/api/recipes/456');
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body).toEqual(mockRecipe);
|
||||
expect(response.body.data).toEqual(mockRecipe);
|
||||
expect(db.recipeRepo.getRecipeById).toHaveBeenCalledWith(456, expectLogger);
|
||||
});
|
||||
|
||||
@@ -226,7 +229,7 @@ describe('Recipe Routes (/api/recipes)', () => {
|
||||
vi.mocked(db.recipeRepo.getRecipeById).mockRejectedValue(notFoundError);
|
||||
const response = await supertest(app).get('/api/recipes/999');
|
||||
expect(response.status).toBe(404);
|
||||
expect(response.body.message).toContain('not found');
|
||||
expect(response.body.error.message).toContain('not found');
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
{ error: notFoundError },
|
||||
`Error fetching recipe ID 999:`,
|
||||
@@ -238,7 +241,7 @@ describe('Recipe Routes (/api/recipes)', () => {
|
||||
vi.mocked(db.recipeRepo.getRecipeById).mockRejectedValue(dbError);
|
||||
const response = await supertest(app).get('/api/recipes/456');
|
||||
expect(response.status).toBe(500);
|
||||
expect(response.body.message).toBe('DB Error');
|
||||
expect(response.body.error.message).toBe('DB Error');
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
{ error: dbError },
|
||||
`Error fetching recipe ID 456:`,
|
||||
@@ -248,7 +251,7 @@ describe('Recipe Routes (/api/recipes)', () => {
|
||||
it('should return 400 for an invalid recipeId', async () => {
|
||||
const response = await supertest(app).get('/api/recipes/abc');
|
||||
expect(response.status).toBe(400);
|
||||
expect(response.body.errors[0].message).toContain('received NaN');
|
||||
expect(response.body.error.details[0].message).toContain('received NaN');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -265,12 +268,10 @@ describe('Recipe Routes (/api/recipes)', () => {
|
||||
const mockSuggestion = 'Chicken and Rice Casserole...';
|
||||
vi.mocked(aiService.generateRecipeSuggestion).mockResolvedValue(mockSuggestion);
|
||||
|
||||
const response = await supertest(authApp)
|
||||
.post('/api/recipes/suggest')
|
||||
.send({ ingredients });
|
||||
const response = await supertest(authApp).post('/api/recipes/suggest').send({ ingredients });
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body).toEqual({ suggestion: mockSuggestion });
|
||||
expect(response.body.data).toEqual({ suggestion: mockSuggestion });
|
||||
expect(aiService.generateRecipeSuggestion).toHaveBeenCalledWith(ingredients, expectLogger);
|
||||
});
|
||||
|
||||
@@ -282,7 +283,7 @@ describe('Recipe Routes (/api/recipes)', () => {
|
||||
.send({ ingredients: ['water'] });
|
||||
|
||||
expect(response.status).toBe(503);
|
||||
expect(response.body.message).toContain('unavailable');
|
||||
expect(response.body.error.message).toContain('unavailable');
|
||||
});
|
||||
|
||||
it('should return 400 if ingredients list is empty', async () => {
|
||||
@@ -291,7 +292,9 @@ describe('Recipe Routes (/api/recipes)', () => {
|
||||
.send({ ingredients: [] });
|
||||
|
||||
expect(response.status).toBe(400);
|
||||
expect(response.body.errors[0].message).toContain('At least one ingredient is required');
|
||||
expect(response.body.error.details[0].message).toContain(
|
||||
'At least one ingredient is required',
|
||||
);
|
||||
});
|
||||
|
||||
it('should return 401 if not authenticated', async () => {
|
||||
@@ -314,7 +317,7 @@ describe('Recipe Routes (/api/recipes)', () => {
|
||||
expect(response.status).toBe(500);
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
{ error },
|
||||
'Error generating recipe suggestion'
|
||||
'Error generating recipe suggestion',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -7,6 +7,7 @@ import passport from './passport.routes';
|
||||
import { validateRequest } from '../middleware/validation.middleware';
|
||||
import { requiredString, numericIdParam, optionalNumeric } from '../utils/zodUtils';
|
||||
import { publicReadLimiter, suggestionLimiter } from '../config/rateLimiters';
|
||||
import { sendSuccess, sendError, ErrorCode } from '../utils/apiResponse';
|
||||
|
||||
const router = Router();
|
||||
|
||||
@@ -49,7 +50,7 @@ router.get(
|
||||
// Explicitly parse req.query to apply coercion (string -> number) and default values
|
||||
const { query } = bySalePercentageSchema.parse({ query: req.query });
|
||||
const recipes = await db.recipeRepo.getRecipesBySalePercentage(query.minPercentage!, req.log);
|
||||
res.json(recipes);
|
||||
sendSuccess(res, recipes);
|
||||
} catch (error) {
|
||||
req.log.error({ error }, 'Error fetching recipes in /api/recipes/by-sale-percentage:');
|
||||
next(error);
|
||||
@@ -72,7 +73,7 @@ router.get(
|
||||
query.minIngredients!,
|
||||
req.log,
|
||||
);
|
||||
res.json(recipes);
|
||||
sendSuccess(res, recipes);
|
||||
} catch (error) {
|
||||
req.log.error({ error }, 'Error fetching recipes in /api/recipes/by-sale-ingredients:');
|
||||
next(error);
|
||||
@@ -95,7 +96,7 @@ router.get(
|
||||
query.tag,
|
||||
req.log,
|
||||
);
|
||||
res.json(recipes);
|
||||
sendSuccess(res, recipes);
|
||||
} catch (error) {
|
||||
req.log.error({ error }, 'Error fetching recipes in /api/recipes/by-ingredient-and-tag:');
|
||||
next(error);
|
||||
@@ -106,32 +107,42 @@ router.get(
|
||||
/**
|
||||
* GET /api/recipes/:recipeId/comments - Get all comments for a specific recipe.
|
||||
*/
|
||||
router.get('/:recipeId/comments', publicReadLimiter, validateRequest(recipeIdParamsSchema), async (req, res, next) => {
|
||||
try {
|
||||
// Explicitly parse req.params to coerce recipeId to a number
|
||||
const { params } = recipeIdParamsSchema.parse({ params: req.params });
|
||||
const comments = await db.recipeRepo.getRecipeComments(params.recipeId, req.log);
|
||||
res.json(comments);
|
||||
} catch (error) {
|
||||
req.log.error({ error }, `Error fetching comments for recipe ID ${req.params.recipeId}:`);
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
router.get(
|
||||
'/:recipeId/comments',
|
||||
publicReadLimiter,
|
||||
validateRequest(recipeIdParamsSchema),
|
||||
async (req, res, next) => {
|
||||
try {
|
||||
// Explicitly parse req.params to coerce recipeId to a number
|
||||
const { params } = recipeIdParamsSchema.parse({ params: req.params });
|
||||
const comments = await db.recipeRepo.getRecipeComments(params.recipeId, req.log);
|
||||
sendSuccess(res, comments);
|
||||
} catch (error) {
|
||||
req.log.error({ error }, `Error fetching comments for recipe ID ${req.params.recipeId}:`);
|
||||
next(error);
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
/**
|
||||
* GET /api/recipes/:recipeId - Get a single recipe by its ID, including ingredients and tags.
|
||||
*/
|
||||
router.get('/:recipeId', publicReadLimiter, validateRequest(recipeIdParamsSchema), async (req, res, next) => {
|
||||
try {
|
||||
// Explicitly parse req.params to coerce recipeId to a number
|
||||
const { params } = recipeIdParamsSchema.parse({ params: req.params });
|
||||
const recipe = await db.recipeRepo.getRecipeById(params.recipeId, req.log);
|
||||
res.json(recipe);
|
||||
} catch (error) {
|
||||
req.log.error({ error }, `Error fetching recipe ID ${req.params.recipeId}:`);
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
router.get(
|
||||
'/:recipeId',
|
||||
publicReadLimiter,
|
||||
validateRequest(recipeIdParamsSchema),
|
||||
async (req, res, next) => {
|
||||
try {
|
||||
// Explicitly parse req.params to coerce recipeId to a number
|
||||
const { params } = recipeIdParamsSchema.parse({ params: req.params });
|
||||
const recipe = await db.recipeRepo.getRecipeById(params.recipeId, req.log);
|
||||
sendSuccess(res, recipe);
|
||||
} catch (error) {
|
||||
req.log.error({ error }, `Error fetching recipe ID ${req.params.recipeId}:`);
|
||||
next(error);
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
/**
|
||||
* POST /api/recipes/suggest - Generates a simple recipe suggestion from a list of ingredients.
|
||||
@@ -148,12 +159,15 @@ router.post(
|
||||
const suggestion = await aiService.generateRecipeSuggestion(body.ingredients, req.log);
|
||||
|
||||
if (!suggestion) {
|
||||
return res
|
||||
.status(503)
|
||||
.json({ message: 'AI service is currently unavailable or failed to generate a suggestion.' });
|
||||
return sendError(
|
||||
res,
|
||||
ErrorCode.SERVICE_UNAVAILABLE,
|
||||
'AI service is currently unavailable or failed to generate a suggestion.',
|
||||
503,
|
||||
);
|
||||
}
|
||||
|
||||
res.json({ suggestion });
|
||||
sendSuccess(res, { suggestion });
|
||||
} catch (error) {
|
||||
req.log.error({ error }, 'Error generating recipe suggestion');
|
||||
next(error);
|
||||
|
||||
@@ -52,7 +52,7 @@ describe('Stats Routes (/api/stats)', () => {
|
||||
vi.mocked(db.adminRepo.getMostFrequentSaleItems).mockRejectedValue(dbError);
|
||||
const response = await supertest(app).get('/api/stats/most-frequent-sales');
|
||||
expect(response.status).toBe(500);
|
||||
expect(response.body.message).toBe('DB Error');
|
||||
expect(response.body.error.message).toBe('DB Error');
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
{ error: dbError },
|
||||
'Error fetching most frequent sale items in /api/stats/most-frequent-sales:',
|
||||
@@ -62,8 +62,8 @@ describe('Stats Routes (/api/stats)', () => {
|
||||
it('should return 400 for invalid query parameters', async () => {
|
||||
const response = await supertest(app).get('/api/stats/most-frequent-sales?days=0&limit=abc');
|
||||
expect(response.status).toBe(400);
|
||||
expect(response.body.errors).toBeDefined();
|
||||
expect(response.body.errors.length).toBe(2);
|
||||
expect(response.body.error.details).toBeDefined();
|
||||
expect(response.body.error.details.length).toBe(2);
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
@@ -5,6 +5,7 @@ import * as db from '../services/db/index.db';
|
||||
import { validateRequest } from '../middleware/validation.middleware';
|
||||
import { optionalNumeric } from '../utils/zodUtils';
|
||||
import { publicReadLimiter } from '../config/rateLimiters';
|
||||
import { sendSuccess } from '../utils/apiResponse';
|
||||
|
||||
const router = Router();
|
||||
|
||||
@@ -34,7 +35,7 @@ router.get(
|
||||
// We parse it here to apply Zod's coercions (string to number) and defaults.
|
||||
const { days, limit } = statsQuerySchema.parse(req.query);
|
||||
const items = await db.adminRepo.getMostFrequentSaleItems(days!, limit!, req.log);
|
||||
res.json(items);
|
||||
sendSuccess(res, items);
|
||||
} catch (error) {
|
||||
req.log.error(
|
||||
{ error },
|
||||
|
||||
@@ -53,7 +53,7 @@ describe('System Routes (/api/system)', () => {
|
||||
|
||||
// Assert
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body).toEqual({
|
||||
expect(response.body.data).toEqual({
|
||||
success: true,
|
||||
message: 'Application is online and running under PM2.',
|
||||
});
|
||||
@@ -69,7 +69,7 @@ describe('System Routes (/api/system)', () => {
|
||||
|
||||
// Assert
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body.success).toBe(false);
|
||||
expect(response.body.data.success).toBe(false);
|
||||
});
|
||||
|
||||
it('should return success: false when pm2 process does not exist', async () => {
|
||||
@@ -84,7 +84,7 @@ describe('System Routes (/api/system)', () => {
|
||||
|
||||
// Assert
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body).toEqual({
|
||||
expect(response.body.data).toEqual({
|
||||
success: false,
|
||||
message: 'Application process is not running under PM2.',
|
||||
});
|
||||
@@ -92,12 +92,14 @@ describe('System Routes (/api/system)', () => {
|
||||
|
||||
it('should return 500 if pm2 command produces stderr output', async () => {
|
||||
// Arrange: Simulate a successful exit code but with content in stderr.
|
||||
const serviceError = new Error('PM2 command produced an error: A non-fatal warning occurred.');
|
||||
const serviceError = new Error(
|
||||
'PM2 command produced an error: A non-fatal warning occurred.',
|
||||
);
|
||||
vi.mocked(systemService.getPm2Status).mockRejectedValue(serviceError);
|
||||
|
||||
const response = await supertest(app).get('/api/system/pm2-status');
|
||||
expect(response.status).toBe(500);
|
||||
expect(response.body.message).toBe(serviceError.message);
|
||||
expect(response.body.error.message).toBe(serviceError.message);
|
||||
});
|
||||
|
||||
it('should return 500 on a generic exec error', async () => {
|
||||
@@ -109,7 +111,7 @@ describe('System Routes (/api/system)', () => {
|
||||
|
||||
// Assert
|
||||
expect(response.status).toBe(500);
|
||||
expect(response.body.message).toBe('System error');
|
||||
expect(response.body.error.message).toBe('System error');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -126,7 +128,7 @@ describe('System Routes (/api/system)', () => {
|
||||
|
||||
// Assert
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body).toEqual(mockCoordinates);
|
||||
expect(response.body.data).toEqual(mockCoordinates);
|
||||
});
|
||||
|
||||
it('should return 404 if the address cannot be geocoded', async () => {
|
||||
@@ -135,7 +137,7 @@ describe('System Routes (/api/system)', () => {
|
||||
.post('/api/system/geocode')
|
||||
.send({ address: 'Invalid Address' });
|
||||
expect(response.status).toBe(404);
|
||||
expect(response.body.message).toBe('Could not geocode the provided address.');
|
||||
expect(response.body.error.message).toBe('Could not geocode the provided address.');
|
||||
});
|
||||
|
||||
it('should return 500 if the geocoding service throws an error', async () => {
|
||||
@@ -153,7 +155,9 @@ describe('System Routes (/api/system)', () => {
|
||||
.send({ not_address: 'Victoria, BC' });
|
||||
expect(response.status).toBe(400);
|
||||
// Zod validation error message can vary slightly depending on configuration or version
|
||||
expect(response.body.errors[0].message).toMatch(/An address string is required|Required/i);
|
||||
expect(response.body.error.details[0].message).toMatch(
|
||||
/An address string is required|Required/i,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
@@ -14,6 +14,7 @@ import { requiredString } from '../utils/zodUtils';
|
||||
import { systemService } from '../services/systemService';
|
||||
// All route handlers now use req.log (request-scoped logger) as per ADR-004
|
||||
import { geocodeLimiter } from '../config/rateLimiters';
|
||||
import { sendSuccess, sendError, ErrorCode } from '../utils/apiResponse';
|
||||
|
||||
const router = Router();
|
||||
|
||||
@@ -36,7 +37,7 @@ router.get(
|
||||
async (req: Request, res: Response, next: NextFunction) => {
|
||||
try {
|
||||
const status = await systemService.getPm2Status();
|
||||
res.json(status);
|
||||
sendSuccess(res, status);
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
@@ -63,10 +64,10 @@ router.post(
|
||||
|
||||
if (!coordinates) {
|
||||
// This check remains, but now it only fails if BOTH services fail.
|
||||
return res.status(404).json({ message: 'Could not geocode the provided address.' });
|
||||
return sendError(res, ErrorCode.NOT_FOUND, 'Could not geocode the provided address.', 404);
|
||||
}
|
||||
|
||||
res.json(coordinates);
|
||||
sendSuccess(res, coordinates);
|
||||
} catch (error) {
|
||||
req.log.error({ error }, 'Error geocoding address');
|
||||
next(error);
|
||||
|
||||
@@ -13,13 +13,16 @@ import {
|
||||
createMockNotification,
|
||||
createMockDietaryRestriction,
|
||||
createMockAppliance,
|
||||
createMockUserWithPasswordHash,
|
||||
createMockAddress,
|
||||
} from '../tests/utils/mockFactories';
|
||||
import { Appliance, Notification, DietaryRestriction } from '../types';
|
||||
import { ForeignKeyConstraintError, NotFoundError, ValidationError } from '../services/db/errors.db';
|
||||
import {
|
||||
ForeignKeyConstraintError,
|
||||
NotFoundError,
|
||||
ValidationError,
|
||||
} from '../services/db/errors.db';
|
||||
import { createTestApp } from '../tests/utils/createTestApp';
|
||||
import { mockLogger } from '../tests/utils/mockLogger';
|
||||
import '../tests/utils/mockLogger';
|
||||
import { cleanupFiles } from '../tests/utils/cleanupFiles';
|
||||
import { logger } from '../services/logger.server';
|
||||
import { userService } from '../services/userService';
|
||||
@@ -148,7 +151,7 @@ describe('User Routes (/api/users)', () => {
|
||||
vi.mocked(db.userRepo.findUserProfileById).mockResolvedValue(mockUserProfile);
|
||||
const response = await supertest(app).get('/api/users/profile');
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body).toEqual(mockUserProfile);
|
||||
expect(response.body.data).toEqual(mockUserProfile);
|
||||
expect(db.userRepo.findUserProfileById).toHaveBeenCalledWith(
|
||||
mockUserProfile.user.user_id,
|
||||
expectLogger,
|
||||
@@ -161,7 +164,7 @@ describe('User Routes (/api/users)', () => {
|
||||
);
|
||||
const response = await supertest(app).get('/api/users/profile');
|
||||
expect(response.status).toBe(404);
|
||||
expect(response.body.message).toContain('Profile not found');
|
||||
expect(response.body.error.message).toContain('Profile not found');
|
||||
});
|
||||
|
||||
it('should return 500 on a generic database error', async () => {
|
||||
@@ -184,7 +187,7 @@ describe('User Routes (/api/users)', () => {
|
||||
vi.mocked(db.personalizationRepo.getWatchedItems).mockResolvedValue(mockItems);
|
||||
const response = await supertest(app).get('/api/users/watched-items');
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body).toEqual(mockItems);
|
||||
expect(response.body.data).toEqual(mockItems);
|
||||
});
|
||||
|
||||
it('should return 500 on a generic database error', async () => {
|
||||
@@ -210,7 +213,7 @@ describe('User Routes (/api/users)', () => {
|
||||
vi.mocked(db.personalizationRepo.addWatchedItem).mockResolvedValue(mockAddedItem);
|
||||
const response = await supertest(app).post('/api/users/watched-items').send(newItem);
|
||||
expect(response.status).toBe(201);
|
||||
expect(response.body).toEqual(mockAddedItem);
|
||||
expect(response.body.data).toEqual(mockAddedItem);
|
||||
});
|
||||
|
||||
it('should return 500 on a generic database error', async () => {
|
||||
@@ -230,8 +233,8 @@ describe('User Routes (/api/users)', () => {
|
||||
.post('/api/users/watched-items')
|
||||
.send({ category: 'Produce' });
|
||||
expect(response.status).toBe(400);
|
||||
// Check the 'errors' array for the specific validation message.
|
||||
expect(response.body.errors[0].message).toBe("Field 'itemName' is required.");
|
||||
// Check the 'error.details' array for the specific validation message.
|
||||
expect(response.body.error.details[0].message).toBe("Field 'itemName' is required.");
|
||||
});
|
||||
|
||||
it('should return 400 if category is missing', async () => {
|
||||
@@ -239,8 +242,8 @@ describe('User Routes (/api/users)', () => {
|
||||
.post('/api/users/watched-items')
|
||||
.send({ itemName: 'Apples' });
|
||||
expect(response.status).toBe(400);
|
||||
// Check the 'errors' array for the specific validation message.
|
||||
expect(response.body.errors[0].message).toBe("Field 'category' is required.");
|
||||
// Check the 'error.details' array for the specific validation message.
|
||||
expect(response.body.error.details[0].message).toBe("Field 'category' is required.");
|
||||
});
|
||||
});
|
||||
|
||||
@@ -286,7 +289,7 @@ describe('User Routes (/api/users)', () => {
|
||||
vi.mocked(db.shoppingRepo.getShoppingLists).mockResolvedValue(mockLists);
|
||||
const response = await supertest(app).get('/api/users/shopping-lists');
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body).toEqual(mockLists);
|
||||
expect(response.body.data).toEqual(mockLists);
|
||||
});
|
||||
|
||||
it('should return 500 on a generic database error', async () => {
|
||||
@@ -312,14 +315,14 @@ describe('User Routes (/api/users)', () => {
|
||||
.send({ name: 'Party Supplies' });
|
||||
|
||||
expect(response.status).toBe(201);
|
||||
expect(response.body).toEqual(mockNewList);
|
||||
expect(response.body.data).toEqual(mockNewList);
|
||||
});
|
||||
|
||||
it('should return 400 if name is missing', async () => {
|
||||
const response = await supertest(app).post('/api/users/shopping-lists').send({});
|
||||
expect(response.status).toBe(400);
|
||||
// Check the 'errors' array for the specific validation message.
|
||||
expect(response.body.errors[0].message).toBe("Field 'name' is required.");
|
||||
// Check the 'error.details' array for the specific validation message.
|
||||
expect(response.body.error.details[0].message).toBe("Field 'name' is required.");
|
||||
});
|
||||
|
||||
it('should return 400 on foreign key constraint error', async () => {
|
||||
@@ -330,7 +333,7 @@ describe('User Routes (/api/users)', () => {
|
||||
.post('/api/users/shopping-lists')
|
||||
.send({ name: 'Failing List' });
|
||||
expect(response.status).toBe(400);
|
||||
expect(response.body.message).toBe('User not found');
|
||||
expect(response.body.error.message).toBe('User not found');
|
||||
});
|
||||
|
||||
it('should return 500 on a generic database error during creation', async () => {
|
||||
@@ -340,15 +343,15 @@ describe('User Routes (/api/users)', () => {
|
||||
.post('/api/users/shopping-lists')
|
||||
.send({ name: 'Failing List' });
|
||||
expect(response.status).toBe(500);
|
||||
expect(response.body.message).toBe('DB Connection Failed');
|
||||
expect(response.body.error.message).toBe('DB Connection Failed');
|
||||
expect(logger.error).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should return 400 for an invalid listId on DELETE', async () => {
|
||||
const response = await supertest(app).delete('/api/users/shopping-lists/abc');
|
||||
expect(response.status).toBe(400);
|
||||
// Check the 'errors' array for the specific validation message.
|
||||
expect(response.body.errors[0].message).toContain('received NaN');
|
||||
// Check the 'error.details' array for the specific validation message.
|
||||
expect(response.body.error.details[0].message).toContain('received NaN');
|
||||
});
|
||||
|
||||
describe('DELETE /shopping-lists/:listId', () => {
|
||||
@@ -378,7 +381,7 @@ describe('User Routes (/api/users)', () => {
|
||||
it('should return 400 for an invalid listId', async () => {
|
||||
const response = await supertest(app).delete('/api/users/shopping-lists/abc');
|
||||
expect(response.status).toBe(400);
|
||||
expect(response.body.errors[0].message).toContain('received NaN');
|
||||
expect(response.body.error.details[0].message).toContain('received NaN');
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -387,7 +390,7 @@ describe('User Routes (/api/users)', () => {
|
||||
it('should return 400 if neither masterItemId nor customItemName are provided', async () => {
|
||||
const response = await supertest(app).post('/api/users/shopping-lists/1/items').send({});
|
||||
expect(response.status).toBe(400);
|
||||
expect(response.body.errors[0].message).toBe(
|
||||
expect(response.body.error.details[0].message).toBe(
|
||||
'Either masterItemId or customItemName must be provided.',
|
||||
);
|
||||
});
|
||||
@@ -436,7 +439,7 @@ describe('User Routes (/api/users)', () => {
|
||||
.send(itemData);
|
||||
|
||||
expect(response.status).toBe(201);
|
||||
expect(response.body).toEqual(mockAddedItem);
|
||||
expect(response.body.data).toEqual(mockAddedItem);
|
||||
expect(db.shoppingRepo.addShoppingListItem).toHaveBeenCalledWith(
|
||||
listId,
|
||||
mockUserProfile.user.user_id,
|
||||
@@ -479,7 +482,7 @@ describe('User Routes (/api/users)', () => {
|
||||
.send(updates);
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body).toEqual(mockUpdatedItem);
|
||||
expect(response.body.data).toEqual(mockUpdatedItem);
|
||||
expect(db.shoppingRepo.updateShoppingListItem).toHaveBeenCalledWith(
|
||||
itemId,
|
||||
mockUserProfile.user.user_id,
|
||||
@@ -511,7 +514,7 @@ describe('User Routes (/api/users)', () => {
|
||||
it('should return 400 if no update fields are provided for an item', async () => {
|
||||
const response = await supertest(app).put(`/api/users/shopping-lists/items/101`).send({});
|
||||
expect(response.status).toBe(400);
|
||||
expect(response.body.errors[0].message).toContain(
|
||||
expect(response.body.error.details[0].message).toContain(
|
||||
'At least one field (quantity, is_purchased) must be provided.',
|
||||
);
|
||||
});
|
||||
@@ -554,7 +557,7 @@ describe('User Routes (/api/users)', () => {
|
||||
const response = await supertest(app).put('/api/users/profile').send(profileUpdates);
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body).toEqual(updatedProfile);
|
||||
expect(response.body.data).toEqual(updatedProfile);
|
||||
});
|
||||
|
||||
it('should allow updating the profile with an empty string for avatar_url', async () => {
|
||||
@@ -569,7 +572,7 @@ describe('User Routes (/api/users)', () => {
|
||||
|
||||
// Assert
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body).toEqual(updatedProfile);
|
||||
expect(response.body.data).toEqual(updatedProfile);
|
||||
// Verify that the Zod schema preprocessed the empty string to undefined
|
||||
expect(db.userRepo.updateUserProfile).toHaveBeenCalledWith(
|
||||
mockUserProfile.user.user_id,
|
||||
@@ -594,7 +597,7 @@ describe('User Routes (/api/users)', () => {
|
||||
it('should return 400 if the body is empty', async () => {
|
||||
const response = await supertest(app).put('/api/users/profile').send({});
|
||||
expect(response.status).toBe(400);
|
||||
expect(response.body.errors[0].message).toBe(
|
||||
expect(response.body.error.details[0].message).toBe(
|
||||
'At least one field to update must be provided.',
|
||||
);
|
||||
});
|
||||
@@ -607,7 +610,7 @@ describe('User Routes (/api/users)', () => {
|
||||
.put('/api/users/profile/password')
|
||||
.send({ newPassword: 'a-Very-Strong-Password-456!' });
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body.message).toBe('Password updated successfully.');
|
||||
expect(response.body.data.message).toBe('Password updated successfully.');
|
||||
});
|
||||
|
||||
it('should return 500 on a generic database error', async () => {
|
||||
@@ -629,7 +632,7 @@ describe('User Routes (/api/users)', () => {
|
||||
.send({ newPassword: 'password123' });
|
||||
|
||||
expect(response.status).toBe(400);
|
||||
expect(response.body.errors[0].message).toContain('Password is too weak.');
|
||||
expect(response.body.error.details[0].message).toContain('Password is too weak.');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -640,33 +643,43 @@ describe('User Routes (/api/users)', () => {
|
||||
.delete('/api/users/account')
|
||||
.send({ password: 'correct-password' });
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body.message).toBe('Account deleted successfully.');
|
||||
expect(userService.deleteUserAccount).toHaveBeenCalledWith('user-123', 'correct-password', expectLogger);
|
||||
expect(response.body.data.message).toBe('Account deleted successfully.');
|
||||
expect(userService.deleteUserAccount).toHaveBeenCalledWith(
|
||||
'user-123',
|
||||
'correct-password',
|
||||
expectLogger,
|
||||
);
|
||||
});
|
||||
|
||||
it('should return 400 for an incorrect password', async () => {
|
||||
vi.mocked(userService.deleteUserAccount).mockRejectedValue(new ValidationError([], 'Incorrect password.'));
|
||||
vi.mocked(userService.deleteUserAccount).mockRejectedValue(
|
||||
new ValidationError([], 'Incorrect password.'),
|
||||
);
|
||||
const response = await supertest(app)
|
||||
.delete('/api/users/account')
|
||||
.send({ password: 'wrong-password' });
|
||||
|
||||
expect(response.status).toBe(400);
|
||||
expect(response.body.message).toBe('Incorrect password.');
|
||||
expect(response.body.error.message).toBe('Incorrect password.');
|
||||
});
|
||||
|
||||
it('should return 404 if the user to delete is not found', async () => {
|
||||
vi.mocked(userService.deleteUserAccount).mockRejectedValue(new NotFoundError('User not found.'));
|
||||
vi.mocked(userService.deleteUserAccount).mockRejectedValue(
|
||||
new NotFoundError('User not found.'),
|
||||
);
|
||||
|
||||
const response = await supertest(app)
|
||||
.delete('/api/users/account')
|
||||
.send({ password: 'any-password' });
|
||||
|
||||
expect(response.status).toBe(404);
|
||||
expect(response.body.message).toBe('User not found.');
|
||||
expect(response.body.error.message).toBe('User not found.');
|
||||
});
|
||||
|
||||
it('should return 500 on a generic database error', async () => {
|
||||
vi.mocked(userService.deleteUserAccount).mockRejectedValue(new Error('DB Connection Failed'));
|
||||
vi.mocked(userService.deleteUserAccount).mockRejectedValue(
|
||||
new Error('DB Connection Failed'),
|
||||
);
|
||||
const response = await supertest(app)
|
||||
.delete('/api/users/account')
|
||||
.send({ password: 'correct-password' });
|
||||
@@ -691,7 +704,7 @@ describe('User Routes (/api/users)', () => {
|
||||
.put('/api/users/profile/preferences')
|
||||
.send(preferencesUpdate);
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body).toEqual(updatedProfile);
|
||||
expect(response.body.data).toEqual(updatedProfile);
|
||||
});
|
||||
|
||||
it('should return 500 on a generic database error', async () => {
|
||||
@@ -728,7 +741,7 @@ describe('User Routes (/api/users)', () => {
|
||||
);
|
||||
const response = await supertest(app).get('/api/users/me/dietary-restrictions');
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body).toEqual(mockRestrictions);
|
||||
expect(response.body.data).toEqual(mockRestrictions);
|
||||
});
|
||||
|
||||
it('GET should return 500 on a generic database error', async () => {
|
||||
@@ -745,8 +758,8 @@ describe('User Routes (/api/users)', () => {
|
||||
it('should return 400 for an invalid masterItemId', async () => {
|
||||
const response = await supertest(app).delete('/api/users/watched-items/abc');
|
||||
expect(response.status).toBe(400);
|
||||
// Check the 'errors' array for the specific validation message.
|
||||
expect(response.body.errors[0].message).toContain('received NaN');
|
||||
// Check the 'error.details' array for the specific validation message.
|
||||
expect(response.body.error.details[0].message).toContain('received NaN');
|
||||
});
|
||||
|
||||
it('PUT should successfully set the restrictions', async () => {
|
||||
@@ -792,7 +805,7 @@ describe('User Routes (/api/users)', () => {
|
||||
vi.mocked(db.personalizationRepo.getUserAppliances).mockResolvedValue(mockAppliances);
|
||||
const response = await supertest(app).get('/api/users/me/appliances');
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body).toEqual(mockAppliances);
|
||||
expect(response.body.data).toEqual(mockAppliances);
|
||||
});
|
||||
|
||||
it('GET should return 500 on a generic database error', async () => {
|
||||
@@ -823,7 +836,7 @@ describe('User Routes (/api/users)', () => {
|
||||
.put('/api/users/me/appliances')
|
||||
.send({ applianceIds: [999] }); // Invalid ID
|
||||
expect(response.status).toBe(400);
|
||||
expect(response.body.message).toBe('Invalid appliance ID');
|
||||
expect(response.body.error.message).toBe('Invalid appliance ID');
|
||||
});
|
||||
|
||||
it('PUT should return 500 on a generic database error', async () => {
|
||||
@@ -855,7 +868,7 @@ describe('User Routes (/api/users)', () => {
|
||||
const response = await supertest(app).get('/api/users/notifications?limit=10');
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body).toEqual(mockNotifications);
|
||||
expect(response.body.data).toEqual(mockNotifications);
|
||||
expect(db.notificationRepo.getNotificationsForUser).toHaveBeenCalledWith(
|
||||
'user-123',
|
||||
10,
|
||||
@@ -875,7 +888,7 @@ describe('User Routes (/api/users)', () => {
|
||||
const response = await supertest(app).get('/api/users/notifications?includeRead=true');
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body).toEqual(mockNotifications);
|
||||
expect(response.body.data).toEqual(mockNotifications);
|
||||
expect(db.notificationRepo.getNotificationsForUser).toHaveBeenCalledWith(
|
||||
'user-123',
|
||||
20, // default limit
|
||||
@@ -935,7 +948,7 @@ describe('User Routes (/api/users)', () => {
|
||||
.post('/api/users/notifications/abc/mark-read')
|
||||
.send({});
|
||||
expect(response.status).toBe(400);
|
||||
expect(response.body.errors[0].message).toContain('received NaN');
|
||||
expect(response.body.error.details[0].message).toContain('received NaN');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -950,7 +963,7 @@ describe('User Routes (/api/users)', () => {
|
||||
vi.mocked(userService.getUserAddress).mockResolvedValue(mockAddress);
|
||||
const response = await supertest(appWithUser).get('/api/users/addresses/1');
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body).toEqual(mockAddress);
|
||||
expect(response.body.data).toEqual(mockAddress);
|
||||
});
|
||||
|
||||
it('GET /addresses/:addressId should return 500 on a generic database error', async () => {
|
||||
@@ -972,10 +985,12 @@ describe('User Routes (/api/users)', () => {
|
||||
});
|
||||
|
||||
it('GET /addresses/:addressId should return 403 if address does not belong to user', async () => {
|
||||
vi.mocked(userService.getUserAddress).mockRejectedValue(new ValidationError([], 'Forbidden'));
|
||||
vi.mocked(userService.getUserAddress).mockRejectedValue(
|
||||
new ValidationError([], 'Forbidden'),
|
||||
);
|
||||
const response = await supertest(app).get('/api/users/addresses/2'); // Requesting address 2
|
||||
expect(response.status).toBe(400); // ValidationError maps to 400 by default in the test error handler
|
||||
expect(response.body.message).toBe('Forbidden');
|
||||
expect(response.body.error.message).toBe('Forbidden');
|
||||
});
|
||||
|
||||
it('GET /addresses/:addressId should return 404 if address not found', async () => {
|
||||
@@ -989,16 +1004,14 @@ describe('User Routes (/api/users)', () => {
|
||||
);
|
||||
const response = await supertest(appWithUser).get('/api/users/addresses/1');
|
||||
expect(response.status).toBe(404);
|
||||
expect(response.body.message).toBe('Address not found.');
|
||||
expect(response.body.error.message).toBe('Address not found.');
|
||||
});
|
||||
|
||||
it('PUT /profile/address should call upsertAddress and updateUserProfile if needed', async () => {
|
||||
const addressData = { address_line_1: '123 New St' };
|
||||
vi.mocked(userService.upsertUserAddress).mockResolvedValue(5);
|
||||
|
||||
const response = await supertest(app)
|
||||
.put('/api/users/profile/address')
|
||||
.send(addressData);
|
||||
const response = await supertest(app).put('/api/users/profile/address').send(addressData);
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(userService.upsertUserAddress).toHaveBeenCalledWith(
|
||||
@@ -1020,7 +1033,7 @@ describe('User Routes (/api/users)', () => {
|
||||
it('should return 400 if the address body is empty', async () => {
|
||||
const response = await supertest(app).put('/api/users/profile/address').send({});
|
||||
expect(response.status).toBe(400);
|
||||
expect(response.body.errors[0].message).toContain(
|
||||
expect(response.body.error.details[0].message).toContain(
|
||||
'At least one address field must be provided',
|
||||
);
|
||||
});
|
||||
@@ -1042,7 +1055,7 @@ describe('User Routes (/api/users)', () => {
|
||||
.attach('avatar', Buffer.from('dummy-image-content'), dummyImagePath);
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body.avatar_url).toContain('https://example.com/uploads/avatars/');
|
||||
expect(response.body.data.avatar_url).toContain('https://example.com/uploads/avatars/');
|
||||
expect(userService.updateUserAvatar).toHaveBeenCalledWith(
|
||||
mockUserProfile.user.user_id,
|
||||
expect.any(Object),
|
||||
@@ -1068,7 +1081,7 @@ describe('User Routes (/api/users)', () => {
|
||||
.attach('avatar', Buffer.from('this is not an image'), dummyTextPath);
|
||||
|
||||
expect(response.status).toBe(400);
|
||||
expect(response.body.message).toBe('Only image files are allowed!');
|
||||
expect(response.body.error.message).toBe('Only image files are allowed!');
|
||||
});
|
||||
|
||||
it('should return 400 if the uploaded file is too large', async () => {
|
||||
@@ -1081,6 +1094,7 @@ describe('User Routes (/api/users)', () => {
|
||||
.attach('avatar', largeFile, dummyImagePath);
|
||||
|
||||
expect(response.status).toBe(400);
|
||||
// Multer middleware returns non-envelope format directly
|
||||
expect(response.body.message).toContain('File too large');
|
||||
});
|
||||
|
||||
@@ -1088,7 +1102,7 @@ describe('User Routes (/api/users)', () => {
|
||||
const response = await supertest(app).post('/api/users/profile/avatar'); // No .attach() call
|
||||
|
||||
expect(response.status).toBe(400);
|
||||
expect(response.body.message).toBe('No avatar file uploaded.');
|
||||
expect(response.body.error.message).toBe('No avatar file uploaded.');
|
||||
});
|
||||
|
||||
it('should clean up the uploaded file if updating the profile fails', async () => {
|
||||
@@ -1115,7 +1129,7 @@ describe('User Routes (/api/users)', () => {
|
||||
it('should return 400 for a non-numeric address ID', async () => {
|
||||
const response = await supertest(app).get('/api/users/addresses/abc');
|
||||
expect(response.status).toBe(400);
|
||||
expect(response.body.errors[0].message).toContain('received NaN');
|
||||
expect(response.body.error.details[0].message).toContain('received NaN');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1141,16 +1155,18 @@ describe('User Routes (/api/users)', () => {
|
||||
});
|
||||
|
||||
it('DELETE /recipes/:recipeId should return 404 if recipe not found', async () => {
|
||||
vi.mocked(db.recipeRepo.deleteRecipe).mockRejectedValue(new NotFoundError('Recipe not found'));
|
||||
vi.mocked(db.recipeRepo.deleteRecipe).mockRejectedValue(
|
||||
new NotFoundError('Recipe not found'),
|
||||
);
|
||||
const response = await supertest(app).delete('/api/users/recipes/999');
|
||||
expect(response.status).toBe(404);
|
||||
expect(response.body.message).toBe('Recipe not found');
|
||||
expect(response.body.error.message).toBe('Recipe not found');
|
||||
});
|
||||
|
||||
it('DELETE /recipes/:recipeId should return 400 for invalid recipe ID', async () => {
|
||||
const response = await supertest(app).delete('/api/users/recipes/abc');
|
||||
expect(response.status).toBe(400);
|
||||
expect(response.body.errors[0].message).toContain('received NaN');
|
||||
expect(response.body.error.details[0].message).toContain('received NaN');
|
||||
});
|
||||
|
||||
it("PUT /recipes/:recipeId should update a user's own recipe", async () => {
|
||||
@@ -1161,7 +1177,7 @@ describe('User Routes (/api/users)', () => {
|
||||
const response = await supertest(app).put('/api/users/recipes/1').send(updates);
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body).toEqual(mockUpdatedRecipe);
|
||||
expect(response.body.data).toEqual(mockUpdatedRecipe);
|
||||
expect(db.recipeRepo.updateRecipe).toHaveBeenCalledWith(
|
||||
1,
|
||||
mockUserProfile.user.user_id,
|
||||
@@ -1191,7 +1207,7 @@ describe('User Routes (/api/users)', () => {
|
||||
it('PUT /recipes/:recipeId should return 400 if no update fields are provided', async () => {
|
||||
const response = await supertest(app).put('/api/users/recipes/1').send({});
|
||||
expect(response.status).toBe(400);
|
||||
expect(response.body.errors[0].message).toBe('No fields provided to update.');
|
||||
expect(response.body.error.details[0].message).toBe('No fields provided to update.');
|
||||
});
|
||||
|
||||
it('PUT /recipes/:recipeId should return 400 for invalid recipe ID', async () => {
|
||||
@@ -1199,7 +1215,7 @@ describe('User Routes (/api/users)', () => {
|
||||
.put('/api/users/recipes/abc')
|
||||
.send({ name: 'New Name' });
|
||||
expect(response.status).toBe(400);
|
||||
expect(response.body.errors[0].message).toContain('received NaN');
|
||||
expect(response.body.error.details[0].message).toContain('received NaN');
|
||||
});
|
||||
|
||||
it('GET /shopping-lists/:listId should return 404 if list is not found', async () => {
|
||||
@@ -1208,7 +1224,7 @@ describe('User Routes (/api/users)', () => {
|
||||
);
|
||||
const response = await supertest(app).get('/api/users/shopping-lists/999');
|
||||
expect(response.status).toBe(404);
|
||||
expect(response.body.message).toBe('Shopping list not found');
|
||||
expect(response.body.error.message).toBe('Shopping list not found');
|
||||
});
|
||||
|
||||
it('GET /shopping-lists/:listId should return a single shopping list', async () => {
|
||||
@@ -1219,7 +1235,7 @@ describe('User Routes (/api/users)', () => {
|
||||
vi.mocked(db.shoppingRepo.getShoppingListById).mockResolvedValue(mockList);
|
||||
const response = await supertest(app).get('/api/users/shopping-lists/1');
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body).toEqual(mockList);
|
||||
expect(response.body.data).toEqual(mockList);
|
||||
expect(db.shoppingRepo.getShoppingListById).toHaveBeenCalledWith(
|
||||
1,
|
||||
mockUserProfile.user.user_id,
|
||||
|
||||
@@ -1,17 +1,13 @@
|
||||
// src/routes/user.routes.ts
|
||||
import express, { Request, Response, NextFunction } from 'express';
|
||||
import passport from './passport.routes';
|
||||
import multer from 'multer'; // Keep for MulterError type check
|
||||
// All route handlers now use req.log (request-scoped logger) as per ADR-004
|
||||
import { z } from 'zod';
|
||||
// Removed: import { logger } from '../services/logger.server';
|
||||
// All route handlers now use req.log (request-scoped logger) as per ADR-004
|
||||
import { UserProfile } from '../types';
|
||||
// All route handlers now use req.log (request-scoped logger) as per ADR-004
|
||||
import {
|
||||
createUploadMiddleware,
|
||||
handleMulterError,
|
||||
} from '../middleware/multer.middleware';
|
||||
import { createUploadMiddleware, handleMulterError } from '../middleware/multer.middleware';
|
||||
// All route handlers now use req.log (request-scoped logger) as per ADR-004
|
||||
import { userService } from '../services/userService';
|
||||
// All route handlers now use req.log (request-scoped logger) as per ADR-004
|
||||
@@ -36,6 +32,7 @@ import {
|
||||
userSensitiveUpdateLimiter,
|
||||
userUploadLimiter,
|
||||
} from '../config/rateLimiters';
|
||||
import { sendSuccess, sendNoContent, sendError, ErrorCode } from '../utils/apiResponse';
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
@@ -128,10 +125,14 @@ router.post(
|
||||
// The try-catch block was already correct here.
|
||||
try {
|
||||
// The `requireFileUpload` middleware is not used here, so we must check for `req.file`.
|
||||
if (!req.file) return res.status(400).json({ message: 'No avatar file uploaded.' });
|
||||
if (!req.file) return sendError(res, ErrorCode.BAD_REQUEST, 'No avatar file uploaded.', 400);
|
||||
const userProfile = req.user as UserProfile;
|
||||
const updatedProfile = await userService.updateUserAvatar(userProfile.user.user_id, req.file, req.log);
|
||||
res.json(updatedProfile);
|
||||
const updatedProfile = await userService.updateUserAvatar(
|
||||
userProfile.user.user_id,
|
||||
req.file,
|
||||
req.log,
|
||||
);
|
||||
sendSuccess(res, updatedProfile);
|
||||
} catch (error) {
|
||||
// If an error occurs after the file has been uploaded (e.g., DB error),
|
||||
// we must clean up the orphaned file from the disk.
|
||||
@@ -146,17 +147,14 @@ router.post(
|
||||
* GET /api/users/notifications - Get notifications for the authenticated user.
|
||||
* Supports pagination with `limit` and `offset` query parameters.
|
||||
*/
|
||||
type GetNotificationsRequest = z.infer<typeof notificationQuerySchema>;
|
||||
router.get(
|
||||
'/notifications',
|
||||
validateRequest(notificationQuerySchema),
|
||||
async (req: Request, res: Response, next: NextFunction) => {
|
||||
// Cast to UserProfile to access user properties safely.
|
||||
const userProfile = req.user as UserProfile;
|
||||
// Apply ADR-003 pattern for type safety
|
||||
try {
|
||||
const { query } = req as unknown as GetNotificationsRequest;
|
||||
const parsedQuery = notificationQuerySchema.parse({ query: req.query }).query;
|
||||
const parsedQuery = notificationQuerySchema.shape.query.parse(req.query);
|
||||
const notifications = await db.notificationRepo.getNotificationsForUser(
|
||||
userProfile.user.user_id,
|
||||
parsedQuery.limit!,
|
||||
@@ -164,7 +162,7 @@ router.get(
|
||||
parsedQuery.includeRead!,
|
||||
req.log,
|
||||
);
|
||||
res.json(notifications);
|
||||
sendSuccess(res, notifications);
|
||||
} catch (error) {
|
||||
req.log.error({ error }, 'Error fetching notifications');
|
||||
next(error);
|
||||
@@ -182,7 +180,7 @@ router.post(
|
||||
try {
|
||||
const userProfile = req.user as UserProfile;
|
||||
await db.notificationRepo.markAllNotificationsAsRead(userProfile.user.user_id, req.log);
|
||||
res.status(204).send(); // No Content
|
||||
sendNoContent(res);
|
||||
} catch (error) {
|
||||
req.log.error({ error }, 'Error marking all notifications as read');
|
||||
next(error);
|
||||
@@ -208,7 +206,7 @@ router.post(
|
||||
userProfile.user.user_id,
|
||||
req.log,
|
||||
);
|
||||
res.status(204).send(); // Success, no content to return
|
||||
sendNoContent(res);
|
||||
} catch (error) {
|
||||
req.log.error({ error }, 'Error marking notification as read');
|
||||
next(error);
|
||||
@@ -230,7 +228,7 @@ router.get('/profile', validateRequest(emptySchema), async (req, res, next: Next
|
||||
userProfile.user.user_id,
|
||||
req.log,
|
||||
);
|
||||
res.json(fullUserProfile);
|
||||
sendSuccess(res, fullUserProfile);
|
||||
} catch (error) {
|
||||
req.log.error({ error }, `[ROUTE] GET /api/users/profile - ERROR`);
|
||||
next(error);
|
||||
@@ -256,7 +254,7 @@ router.put(
|
||||
body,
|
||||
req.log,
|
||||
);
|
||||
res.json(updatedProfile);
|
||||
sendSuccess(res, updatedProfile);
|
||||
} catch (error) {
|
||||
req.log.error({ error }, `[ROUTE] PUT /api/users/profile - ERROR`);
|
||||
next(error);
|
||||
@@ -280,7 +278,7 @@ router.put(
|
||||
|
||||
try {
|
||||
await userService.updateUserPassword(userProfile.user.user_id, body.newPassword, req.log);
|
||||
res.status(200).json({ message: 'Password updated successfully.' });
|
||||
sendSuccess(res, { message: 'Password updated successfully.' });
|
||||
} catch (error) {
|
||||
req.log.error({ error }, `[ROUTE] PUT /api/users/profile/password - ERROR`);
|
||||
next(error);
|
||||
@@ -304,7 +302,7 @@ router.delete(
|
||||
|
||||
try {
|
||||
await userService.deleteUserAccount(userProfile.user.user_id, body.password, req.log);
|
||||
res.status(200).json({ message: 'Account deleted successfully.' });
|
||||
sendSuccess(res, { message: 'Account deleted successfully.' });
|
||||
} catch (error) {
|
||||
req.log.error({ error }, `[ROUTE] DELETE /api/users/account - ERROR`);
|
||||
next(error);
|
||||
@@ -320,7 +318,7 @@ router.get('/watched-items', validateRequest(emptySchema), async (req, res, next
|
||||
const userProfile = req.user as UserProfile;
|
||||
try {
|
||||
const items = await db.personalizationRepo.getWatchedItems(userProfile.user.user_id, req.log);
|
||||
res.json(items);
|
||||
sendSuccess(res, items);
|
||||
} catch (error) {
|
||||
req.log.error({ error }, `[ROUTE] GET /api/users/watched-items - ERROR`);
|
||||
next(error);
|
||||
@@ -347,10 +345,10 @@ router.post(
|
||||
body.category,
|
||||
req.log,
|
||||
);
|
||||
res.status(201).json(newItem);
|
||||
sendSuccess(res, newItem, 201);
|
||||
} catch (error) {
|
||||
if (error instanceof ForeignKeyConstraintError) {
|
||||
return res.status(400).json({ message: error.message });
|
||||
return sendError(res, ErrorCode.BAD_REQUEST, error.message, 400);
|
||||
}
|
||||
req.log.error({ error, body: req.body }, 'Failed to add watched item');
|
||||
next(error);
|
||||
@@ -378,7 +376,7 @@ router.delete(
|
||||
params.masterItemId,
|
||||
req.log,
|
||||
);
|
||||
res.status(204).send();
|
||||
sendNoContent(res);
|
||||
} catch (error) {
|
||||
req.log.error({ error }, `[ROUTE] DELETE /api/users/watched-items/:masterItemId - ERROR`);
|
||||
next(error);
|
||||
@@ -397,7 +395,7 @@ router.get(
|
||||
const userProfile = req.user as UserProfile;
|
||||
try {
|
||||
const lists = await db.shoppingRepo.getShoppingLists(userProfile.user.user_id, req.log);
|
||||
res.json(lists);
|
||||
sendSuccess(res, lists);
|
||||
} catch (error) {
|
||||
req.log.error({ error }, `[ROUTE] GET /api/users/shopping-lists - ERROR`);
|
||||
next(error);
|
||||
@@ -423,7 +421,7 @@ router.get(
|
||||
userProfile.user.user_id,
|
||||
req.log,
|
||||
);
|
||||
res.json(list);
|
||||
sendSuccess(res, list);
|
||||
} catch (error) {
|
||||
req.log.error(
|
||||
{ error, listId: params.listId },
|
||||
@@ -453,10 +451,10 @@ router.post(
|
||||
body.name,
|
||||
req.log,
|
||||
);
|
||||
res.status(201).json(newList);
|
||||
sendSuccess(res, newList, 201);
|
||||
} catch (error) {
|
||||
if (error instanceof ForeignKeyConstraintError) {
|
||||
return res.status(400).json({ message: error.message });
|
||||
return sendError(res, ErrorCode.BAD_REQUEST, error.message, 400);
|
||||
}
|
||||
req.log.error({ error, body: req.body }, 'Failed to create shopping list');
|
||||
next(error);
|
||||
@@ -478,7 +476,7 @@ router.delete(
|
||||
const { params } = req as unknown as GetShoppingListRequest;
|
||||
try {
|
||||
await db.shoppingRepo.deleteShoppingList(params.listId, userProfile.user.user_id, req.log);
|
||||
res.status(204).send();
|
||||
sendNoContent(res);
|
||||
} catch (error: unknown) {
|
||||
const errorMessage = error instanceof Error ? error.message : 'An unknown error occurred';
|
||||
req.log.error(
|
||||
@@ -524,12 +522,15 @@ router.post(
|
||||
body,
|
||||
req.log,
|
||||
);
|
||||
res.status(201).json(newItem);
|
||||
sendSuccess(res, newItem, 201);
|
||||
} catch (error) {
|
||||
if (error instanceof ForeignKeyConstraintError) {
|
||||
return res.status(400).json({ message: error.message });
|
||||
return sendError(res, ErrorCode.BAD_REQUEST, error.message, 400);
|
||||
}
|
||||
req.log.error({ error, params: req.params, body: req.body }, 'Failed to add shopping list item');
|
||||
req.log.error(
|
||||
{ error, params: req.params, body: req.body },
|
||||
'Failed to add shopping list item',
|
||||
);
|
||||
next(error);
|
||||
}
|
||||
},
|
||||
@@ -565,7 +566,7 @@ router.put(
|
||||
body,
|
||||
req.log,
|
||||
);
|
||||
res.json(updatedItem);
|
||||
sendSuccess(res, updatedItem);
|
||||
} catch (error: unknown) {
|
||||
req.log.error(
|
||||
{ error, params: req.params, body: req.body },
|
||||
@@ -591,8 +592,12 @@ router.delete(
|
||||
// Apply ADR-003 pattern for type safety
|
||||
const { params } = req as unknown as DeleteShoppingListItemRequest;
|
||||
try {
|
||||
await db.shoppingRepo.removeShoppingListItem(params.itemId, userProfile.user.user_id, req.log);
|
||||
res.status(204).send();
|
||||
await db.shoppingRepo.removeShoppingListItem(
|
||||
params.itemId,
|
||||
userProfile.user.user_id,
|
||||
req.log,
|
||||
);
|
||||
sendNoContent(res);
|
||||
} catch (error: unknown) {
|
||||
req.log.error(
|
||||
{ error, params: req.params },
|
||||
@@ -625,7 +630,7 @@ router.put(
|
||||
body,
|
||||
req.log,
|
||||
);
|
||||
res.json(updatedProfile);
|
||||
sendSuccess(res, updatedProfile);
|
||||
} catch (error) {
|
||||
req.log.error({ error }, `[ROUTE] PUT /api/users/profile/preferences - ERROR`);
|
||||
next(error);
|
||||
@@ -644,7 +649,7 @@ router.get(
|
||||
userProfile.user.user_id,
|
||||
req.log,
|
||||
);
|
||||
res.json(restrictions);
|
||||
sendSuccess(res, restrictions);
|
||||
} catch (error) {
|
||||
req.log.error({ error }, `[ROUTE] GET /api/users/me/dietary-restrictions - ERROR`);
|
||||
next(error);
|
||||
@@ -671,10 +676,10 @@ router.put(
|
||||
body.restrictionIds,
|
||||
req.log,
|
||||
);
|
||||
res.status(204).send();
|
||||
sendNoContent(res);
|
||||
} catch (error) {
|
||||
if (error instanceof ForeignKeyConstraintError) {
|
||||
return res.status(400).json({ message: error.message });
|
||||
return sendError(res, ErrorCode.BAD_REQUEST, error.message, 400);
|
||||
}
|
||||
req.log.error({ error, body: req.body }, 'Failed to set user dietary restrictions');
|
||||
next(error);
|
||||
@@ -690,7 +695,7 @@ router.get('/me/appliances', validateRequest(emptySchema), async (req, res, next
|
||||
userProfile.user.user_id,
|
||||
req.log,
|
||||
);
|
||||
res.json(appliances);
|
||||
sendSuccess(res, appliances);
|
||||
} catch (error) {
|
||||
req.log.error({ error }, `[ROUTE] GET /api/users/me/appliances - ERROR`);
|
||||
next(error);
|
||||
@@ -716,10 +721,10 @@ router.put(
|
||||
body.applianceIds,
|
||||
req.log,
|
||||
);
|
||||
res.status(204).send();
|
||||
sendNoContent(res);
|
||||
} catch (error) {
|
||||
if (error instanceof ForeignKeyConstraintError) {
|
||||
return res.status(400).json({ message: error.message });
|
||||
return sendError(res, ErrorCode.BAD_REQUEST, error.message, 400);
|
||||
}
|
||||
req.log.error({ error, body: req.body }, 'Failed to set user appliances');
|
||||
next(error);
|
||||
@@ -743,7 +748,7 @@ router.get(
|
||||
try {
|
||||
const addressId = params.addressId;
|
||||
const address = await userService.getUserAddress(userProfile, addressId, req.log);
|
||||
res.json(address);
|
||||
sendSuccess(res, address);
|
||||
} catch (error) {
|
||||
req.log.error({ error }, 'Error fetching user address');
|
||||
next(error);
|
||||
@@ -783,7 +788,7 @@ router.put(
|
||||
// encapsulated in a single service method that manages the transaction.
|
||||
// This ensures both the address upsert and the user profile update are atomic.
|
||||
const addressId = await userService.upsertUserAddress(userProfile, addressData, req.log); // This was a duplicate, fixed.
|
||||
res.status(200).json({ message: 'Address updated successfully', address_id: addressId });
|
||||
sendSuccess(res, { message: 'Address updated successfully', address_id: addressId });
|
||||
} catch (error) {
|
||||
req.log.error({ error }, 'Error updating user address');
|
||||
next(error);
|
||||
@@ -803,12 +808,12 @@ router.post(
|
||||
const { body } = req as unknown as z.infer<typeof createRecipeSchema>;
|
||||
try {
|
||||
const recipe = await db.recipeRepo.createRecipe(userProfile.user.user_id, body, req.log);
|
||||
res.status(201).json(recipe);
|
||||
sendSuccess(res, recipe, 201);
|
||||
} catch (error) {
|
||||
req.log.error({ error }, 'Error creating recipe');
|
||||
next(error);
|
||||
}
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
/**
|
||||
@@ -827,7 +832,7 @@ router.delete(
|
||||
const { params } = req as unknown as DeleteRecipeRequest;
|
||||
try {
|
||||
await db.recipeRepo.deleteRecipe(params.recipeId, userProfile.user.user_id, false, req.log);
|
||||
res.status(204).send();
|
||||
sendNoContent(res);
|
||||
} catch (error) {
|
||||
req.log.error(
|
||||
{ error, params: req.params },
|
||||
@@ -872,7 +877,7 @@ router.put(
|
||||
body,
|
||||
req.log,
|
||||
);
|
||||
res.json(updatedRecipe);
|
||||
sendSuccess(res, updatedRecipe);
|
||||
} catch (error) {
|
||||
req.log.error(
|
||||
{ error, params: req.params, body: req.body },
|
||||
|
||||
@@ -1,13 +1,34 @@
|
||||
// src/services/flyerPersistenceService.server.ts
|
||||
import type { Logger } from 'pino';
|
||||
import { withTransaction } from './db/connection.db';
|
||||
import type { PoolClient } from 'pg';
|
||||
import { withTransaction as defaultWithTransaction } from './db/connection.db';
|
||||
import { createFlyerAndItems } from './db/flyer.db';
|
||||
import { AdminRepository } from './db/admin.db';
|
||||
import { GamificationRepository } from './db/gamification.db';
|
||||
import { cacheService } from './cacheService.server';
|
||||
import type { FlyerInsert, FlyerItemInsert, Flyer } from '../types';
|
||||
|
||||
export type WithTransactionFn = <T>(callback: (client: PoolClient) => Promise<T>) => Promise<T>;
|
||||
|
||||
export class FlyerPersistenceService {
|
||||
private withTransaction: WithTransactionFn;
|
||||
|
||||
constructor(withTransactionFn: WithTransactionFn = defaultWithTransaction) {
|
||||
this.withTransaction = withTransactionFn;
|
||||
}
|
||||
|
||||
/**
|
||||
* Allows replacing the withTransaction function at runtime.
|
||||
* This is primarily used for testing to inject mock implementations.
|
||||
* @internal
|
||||
*/
|
||||
_setWithTransaction(fn: WithTransactionFn): void {
|
||||
console.error(
|
||||
`[DEBUG] FlyerPersistenceService._setWithTransaction called, replacing withTransaction function`,
|
||||
);
|
||||
this.withTransaction = fn;
|
||||
}
|
||||
|
||||
/**
|
||||
* Saves the flyer and its items to the database within a transaction.
|
||||
* Also logs the activity and invalidates related cache entries.
|
||||
@@ -18,7 +39,13 @@ export class FlyerPersistenceService {
|
||||
userId: string | undefined,
|
||||
logger: Logger,
|
||||
): Promise<Flyer> {
|
||||
const flyer = await withTransaction(async (client) => {
|
||||
console.error(
|
||||
`[DEBUG] FlyerPersistenceService.saveFlyer called, about to invoke withTransaction`,
|
||||
);
|
||||
console.error(
|
||||
`[DEBUG] withTransaction function name: ${this.withTransaction.name || 'anonymous'}`,
|
||||
);
|
||||
const flyer = await this.withTransaction(async (client) => {
|
||||
const { flyer, items } = await createFlyerAndItems(flyerData, itemsForDb, logger, client);
|
||||
|
||||
logger.info(
|
||||
@@ -52,4 +79,4 @@ export class FlyerPersistenceService {
|
||||
|
||||
return flyer;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
import { UnrecoverableError, type Job, type Queue } from 'bullmq';
|
||||
import path from 'path';
|
||||
import type { Logger } from 'pino';
|
||||
import type { FlyerFileHandler, IFileSystem, ICommandExecutor } from './flyerFileHandler.server';
|
||||
import type { FlyerFileHandler, IFileSystem } from './flyerFileHandler.server';
|
||||
import type { FlyerAiProcessor } from './flyerAiProcessor.server';
|
||||
import * as db from './db/index.db';
|
||||
import { FlyerDataTransformer } from './flyerDataTransformer';
|
||||
@@ -11,7 +11,6 @@ import {
|
||||
FlyerProcessingError,
|
||||
PdfConversionError,
|
||||
AiDataValidationError,
|
||||
UnsupportedFileTypeError,
|
||||
} from './processingErrors';
|
||||
import { NotFoundError } from './db/errors.db';
|
||||
import { logger as globalLogger } from './logger.server'; // This was a duplicate, fixed.
|
||||
@@ -44,6 +43,14 @@ export class FlyerProcessingService {
|
||||
private persistenceService: FlyerPersistenceService,
|
||||
) {}
|
||||
|
||||
/**
|
||||
* Provides access to the persistence service for testing purposes.
|
||||
* @internal
|
||||
*/
|
||||
_getPersistenceService(): FlyerPersistenceService {
|
||||
return this.persistenceService;
|
||||
}
|
||||
|
||||
/**
|
||||
* Orchestrates the processing of a flyer job.
|
||||
* @param job The BullMQ job containing flyer data.
|
||||
@@ -55,9 +62,24 @@ export class FlyerProcessingService {
|
||||
logger.info('Picked up flyer processing job.');
|
||||
|
||||
const stages: ProcessingStage[] = [
|
||||
{ name: 'Preparing Inputs', status: 'pending', critical: true, detail: 'Validating and preparing file...' },
|
||||
{ name: 'Image Optimization', status: 'pending', critical: true, detail: 'Compressing and resizing images...' },
|
||||
{ name: 'Extracting Data with AI', status: 'pending', critical: true, detail: 'Communicating with AI model...' },
|
||||
{
|
||||
name: 'Preparing Inputs',
|
||||
status: 'pending',
|
||||
critical: true,
|
||||
detail: 'Validating and preparing file...',
|
||||
},
|
||||
{
|
||||
name: 'Image Optimization',
|
||||
status: 'pending',
|
||||
critical: true,
|
||||
detail: 'Compressing and resizing images...',
|
||||
},
|
||||
{
|
||||
name: 'Extracting Data with AI',
|
||||
status: 'pending',
|
||||
critical: true,
|
||||
detail: 'Communicating with AI model...',
|
||||
},
|
||||
{ name: 'Transforming AI Data', status: 'pending', critical: true },
|
||||
{ name: 'Saving to Database', status: 'pending', critical: true },
|
||||
];
|
||||
@@ -69,7 +91,9 @@ export class FlyerProcessingService {
|
||||
// Stage 1: Prepare Inputs (e.g., convert PDF to images)
|
||||
stages[0].status = 'in-progress';
|
||||
await job.updateProgress({ stages });
|
||||
console.error(`[WORKER DEBUG] ProcessingService: Calling fileHandler.prepareImageInputs for ${job.data.filePath}`);
|
||||
console.error(
|
||||
`[WORKER DEBUG] ProcessingService: Calling fileHandler.prepareImageInputs for ${job.data.filePath}`,
|
||||
);
|
||||
|
||||
const { imagePaths, createdImagePaths } = await this.fileHandler.prepareImageInputs(
|
||||
job.data.filePath,
|
||||
@@ -77,7 +101,9 @@ export class FlyerProcessingService {
|
||||
logger,
|
||||
);
|
||||
allFilePaths.push(...createdImagePaths);
|
||||
console.error(`[WORKER DEBUG] ProcessingService: fileHandler returned ${imagePaths.length} images.`);
|
||||
console.error(
|
||||
`[WORKER DEBUG] ProcessingService: fileHandler returned ${imagePaths.length} images.`,
|
||||
);
|
||||
stages[0].status = 'completed';
|
||||
stages[0].detail = `${imagePaths.length} page(s) ready for AI.`;
|
||||
await job.updateProgress({ stages });
|
||||
@@ -96,7 +122,9 @@ export class FlyerProcessingService {
|
||||
|
||||
console.error(`[WORKER DEBUG] ProcessingService: Calling aiProcessor.extractAndValidateData`);
|
||||
const aiResult = await this.aiProcessor.extractAndValidateData(imagePaths, job.data, logger);
|
||||
console.error(`[WORKER DEBUG] ProcessingService: aiProcessor returned data for store: ${aiResult.data.store_name}`);
|
||||
console.error(
|
||||
`[WORKER DEBUG] ProcessingService: aiProcessor returned data for store: ${aiResult.data.store_name}`,
|
||||
);
|
||||
stages[2].status = 'completed';
|
||||
await job.updateProgress({ stages });
|
||||
|
||||
@@ -109,7 +137,9 @@ export class FlyerProcessingService {
|
||||
const primaryImagePath = imagePaths[0].path;
|
||||
const imageFileName = path.basename(primaryImagePath);
|
||||
const iconsDir = path.join(path.dirname(primaryImagePath), 'icons');
|
||||
console.error(`[WORKER DEBUG] ProcessingService: Generating icon from ${primaryImagePath} to ${iconsDir}`);
|
||||
console.error(
|
||||
`[WORKER DEBUG] ProcessingService: Generating icon from ${primaryImagePath} to ${iconsDir}`,
|
||||
);
|
||||
const iconFileName = await generateFlyerIcon(primaryImagePath, iconsDir, logger);
|
||||
console.error(`[WORKER DEBUG] ProcessingService: Icon generated: ${iconFileName}`);
|
||||
|
||||
@@ -120,8 +150,16 @@ export class FlyerProcessingService {
|
||||
// Ensure we have a valid base URL, preferring the one from the job data.
|
||||
// This is critical for workers where process.env.FRONTEND_URL might be undefined.
|
||||
const baseUrl = job.data.baseUrl || process.env.FRONTEND_URL || 'http://localhost:3000';
|
||||
console.error(`[DEBUG] FlyerProcessingService resolved baseUrl: "${baseUrl}" (job.data.baseUrl: "${job.data.baseUrl}", env.FRONTEND_URL: "${process.env.FRONTEND_URL}")`);
|
||||
console.error('[DEBUG] FlyerProcessingService calling transformer with:', { originalFileName: job.data.originalFileName, imageFileName, iconFileName, checksum: job.data.checksum, baseUrl });
|
||||
console.error(
|
||||
`[DEBUG] FlyerProcessingService resolved baseUrl: "${baseUrl}" (job.data.baseUrl: "${job.data.baseUrl}", env.FRONTEND_URL: "${process.env.FRONTEND_URL}")`,
|
||||
);
|
||||
console.error('[DEBUG] FlyerProcessingService calling transformer with:', {
|
||||
originalFileName: job.data.originalFileName,
|
||||
imageFileName,
|
||||
iconFileName,
|
||||
checksum: job.data.checksum,
|
||||
baseUrl,
|
||||
});
|
||||
|
||||
const { flyerData, itemsForDb } = await this.transformer.transform(
|
||||
aiResult,
|
||||
@@ -133,7 +171,10 @@ export class FlyerProcessingService {
|
||||
logger,
|
||||
baseUrl,
|
||||
);
|
||||
console.error('[DEBUG] FlyerProcessingService transformer output URLs:', { imageUrl: flyerData.image_url, iconUrl: flyerData.icon_url });
|
||||
console.error('[DEBUG] FlyerProcessingService transformer output URLs:', {
|
||||
imageUrl: flyerData.image_url,
|
||||
iconUrl: flyerData.icon_url,
|
||||
});
|
||||
console.error('[DEBUG] Full Flyer Data to be saved:', JSON.stringify(flyerData, null, 2));
|
||||
stages[3].status = 'completed';
|
||||
await job.updateProgress({ stages });
|
||||
@@ -170,10 +211,15 @@ export class FlyerProcessingService {
|
||||
|
||||
return { flyerId };
|
||||
} catch (error) {
|
||||
logger.warn('Job failed. Temporary files will NOT be cleaned up to allow for manual inspection.');
|
||||
logger.warn(
|
||||
'Job failed. Temporary files will NOT be cleaned up to allow for manual inspection.',
|
||||
);
|
||||
// Add detailed logging of the raw error object
|
||||
if (error instanceof Error) {
|
||||
logger.error({ err: error, stack: error.stack }, 'Raw error object in processJob catch block');
|
||||
logger.error(
|
||||
{ err: error, stack: error.stack },
|
||||
'Raw error object in processJob catch block',
|
||||
);
|
||||
} else {
|
||||
logger.error({ error }, 'Raw non-Error object in processJob catch block');
|
||||
}
|
||||
@@ -190,7 +236,9 @@ export class FlyerProcessingService {
|
||||
* @param job The BullMQ job containing cleanup data.
|
||||
* @returns An object indicating the status of the cleanup operation.
|
||||
*/
|
||||
async processCleanupJob(job: Job<CleanupJobData>): Promise<{ status: string; deletedCount?: number; reason?: string }> {
|
||||
async processCleanupJob(
|
||||
job: Job<CleanupJobData>,
|
||||
): Promise<{ status: string; deletedCount?: number; reason?: string }> {
|
||||
const logger = globalLogger.child({ jobId: job.id, jobName: job.name, ...job.data });
|
||||
logger.info('Picked up file cleanup job.');
|
||||
|
||||
@@ -199,19 +247,25 @@ export class FlyerProcessingService {
|
||||
|
||||
// If no paths are provided (e.g., from a manual trigger), attempt to derive them from the database.
|
||||
if (!pathsToDelete || pathsToDelete.length === 0) {
|
||||
logger.warn(`Cleanup job for flyer ${flyerId} received no paths. Attempting to derive paths from DB.`);
|
||||
logger.warn(
|
||||
`Cleanup job for flyer ${flyerId} received no paths. Attempting to derive paths from DB.`,
|
||||
);
|
||||
try {
|
||||
const flyer = await db.flyerRepo.getFlyerById(flyerId);
|
||||
const derivedPaths: string[] = [];
|
||||
// This path needs to be configurable and match where multer saves files.
|
||||
const storagePath = process.env.STORAGE_PATH || '/var/www/flyer-crawler.projectium.com/flyer-images';
|
||||
const storagePath =
|
||||
process.env.STORAGE_PATH || '/var/www/flyer-crawler.projectium.com/flyer-images';
|
||||
|
||||
if (flyer.image_url) {
|
||||
try {
|
||||
const imageName = path.basename(new URL(flyer.image_url).pathname);
|
||||
derivedPaths.push(path.join(storagePath, imageName));
|
||||
} catch (urlError) {
|
||||
logger.error({ err: urlError, url: flyer.image_url }, 'Failed to parse flyer.image_url to derive file path.');
|
||||
logger.error(
|
||||
{ err: urlError, url: flyer.image_url },
|
||||
'Failed to parse flyer.image_url to derive file path.',
|
||||
);
|
||||
}
|
||||
}
|
||||
if (flyer.icon_url) {
|
||||
@@ -219,13 +273,19 @@ export class FlyerProcessingService {
|
||||
const iconName = path.basename(new URL(flyer.icon_url).pathname);
|
||||
derivedPaths.push(path.join(storagePath, 'icons', iconName));
|
||||
} catch (urlError) {
|
||||
logger.error({ err: urlError, url: flyer.icon_url }, 'Failed to parse flyer.icon_url to derive file path.');
|
||||
logger.error(
|
||||
{ err: urlError, url: flyer.icon_url },
|
||||
'Failed to parse flyer.icon_url to derive file path.',
|
||||
);
|
||||
}
|
||||
}
|
||||
pathsToDelete = derivedPaths;
|
||||
} catch (error) {
|
||||
if (error instanceof NotFoundError) {
|
||||
logger.error({ flyerId }, 'Cannot derive cleanup paths because flyer was not found in DB.');
|
||||
logger.error(
|
||||
{ flyerId },
|
||||
'Cannot derive cleanup paths because flyer was not found in DB.',
|
||||
);
|
||||
// Do not throw. Allow the job to be marked as skipped if no paths are found.
|
||||
} else {
|
||||
throw error; // Re-throw other DB errors to allow for retries.
|
||||
@@ -259,7 +319,9 @@ export class FlyerProcessingService {
|
||||
const failedDeletions = results.filter((r) => r.status === 'rejected');
|
||||
if (failedDeletions.length > 0) {
|
||||
const failedPaths = pathsToDelete.filter((_, i) => results[i].status === 'rejected');
|
||||
throw new Error(`Failed to delete ${failedDeletions.length} file(s): ${failedPaths.join(', ')}`);
|
||||
throw new Error(
|
||||
`Failed to delete ${failedDeletions.length} file(s): ${failedPaths.join(', ')}`,
|
||||
);
|
||||
}
|
||||
|
||||
logger.info(`Successfully deleted all ${pathsToDelete.length} temporary files.`);
|
||||
@@ -290,8 +352,13 @@ export class FlyerProcessingService {
|
||||
['DATABASE_ERROR', 'Saving to Database'],
|
||||
]);
|
||||
const normalizedError = error instanceof Error ? error : new Error(String(error));
|
||||
let errorPayload: { errorCode: string; message: string; [key: string]: any };
|
||||
let stagesToReport: ProcessingStage[] = [...initialStages]; // Create a mutable copy
|
||||
let errorPayload: {
|
||||
errorCode: string;
|
||||
message: string;
|
||||
stages?: ProcessingStage[];
|
||||
[key: string]: unknown;
|
||||
};
|
||||
const stagesToReport: ProcessingStage[] = [...initialStages]; // Create a mutable copy
|
||||
|
||||
if (normalizedError instanceof FlyerProcessingError) {
|
||||
errorPayload = normalizedError.toErrorPayload();
|
||||
@@ -302,11 +369,13 @@ export class FlyerProcessingService {
|
||||
|
||||
// Determine which stage failed
|
||||
const failedStageName = errorCodeToStageMap.get(errorPayload.errorCode);
|
||||
let errorStageIndex = failedStageName ? stagesToReport.findIndex(s => s.name === failedStageName) : -1;
|
||||
let errorStageIndex = failedStageName
|
||||
? stagesToReport.findIndex((s) => s.name === failedStageName)
|
||||
: -1;
|
||||
|
||||
// 2. If not mapped, find the currently running stage
|
||||
if (errorStageIndex === -1) {
|
||||
errorStageIndex = stagesToReport.findIndex(s => s.status === 'in-progress');
|
||||
errorStageIndex = stagesToReport.findIndex((s) => s.status === 'in-progress');
|
||||
}
|
||||
|
||||
// 3. Fallback to the last stage
|
||||
@@ -324,10 +393,12 @@ export class FlyerProcessingService {
|
||||
// Mark subsequent critical stages as skipped
|
||||
for (let i = errorStageIndex + 1; i < stagesToReport.length; i++) {
|
||||
if (stagesToReport[i].critical) {
|
||||
// When a stage is skipped, we don't need its previous 'detail' property.
|
||||
// This creates a clean 'skipped' state object by removing `detail` and keeping the rest.
|
||||
const { detail, ...restOfStage } = stagesToReport[i];
|
||||
stagesToReport[i] = { ...restOfStage, status: 'skipped' };
|
||||
// When a stage is skipped, we create a clean 'skipped' state object without the 'detail' property.
|
||||
stagesToReport[i] = {
|
||||
name: stagesToReport[i].name,
|
||||
status: 'skipped',
|
||||
critical: stagesToReport[i].critical,
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -337,7 +408,7 @@ export class FlyerProcessingService {
|
||||
// Logging logic
|
||||
if (normalizedError instanceof FlyerProcessingError) {
|
||||
// Simplify log object creation
|
||||
const logDetails: Record<string, any> = { ...errorPayload, err: normalizedError };
|
||||
const logDetails: Record<string, unknown> = { ...errorPayload, err: normalizedError };
|
||||
|
||||
if (normalizedError instanceof AiDataValidationError) {
|
||||
logDetails.validationErrors = normalizedError.validationErrors;
|
||||
@@ -349,12 +420,22 @@ export class FlyerProcessingService {
|
||||
|
||||
logger.error(logDetails, `A known processing error occurred: ${normalizedError.name}`);
|
||||
} else {
|
||||
logger.error({ err: normalizedError, ...errorPayload }, `An unknown error occurred: ${errorPayload.message}`);
|
||||
logger.error(
|
||||
{ err: normalizedError, ...errorPayload },
|
||||
`An unknown error occurred: ${errorPayload.message}`,
|
||||
);
|
||||
}
|
||||
|
||||
// Check for specific error messages that indicate a non-retriable failure, like quota exhaustion.
|
||||
if (errorPayload.message.toLowerCase().includes('quota') || errorPayload.message.toLowerCase().includes('resource_exhausted')) {
|
||||
const unrecoverablePayload = { errorCode: 'QUOTA_EXCEEDED', message: 'An AI quota has been exceeded. Please try again later.', stages: errorPayload.stages };
|
||||
if (
|
||||
errorPayload.message.toLowerCase().includes('quota') ||
|
||||
errorPayload.message.toLowerCase().includes('resource_exhausted')
|
||||
) {
|
||||
const unrecoverablePayload = {
|
||||
errorCode: 'QUOTA_EXCEEDED',
|
||||
message: 'An AI quota has been exceeded. Please try again later.',
|
||||
stages: errorPayload.stages,
|
||||
};
|
||||
await job.updateProgress(unrecoverablePayload);
|
||||
throw new UnrecoverableError(unrecoverablePayload.message);
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
// src/services/workers.server.ts
|
||||
import { Worker, Job, UnrecoverableError } from 'bullmq';
|
||||
import { Worker, Job } from 'bullmq';
|
||||
import fsPromises from 'node:fs/promises';
|
||||
import { exec } from 'child_process';
|
||||
import { promisify } from 'util';
|
||||
@@ -15,6 +15,7 @@ import { FlyerProcessingService } from './flyerProcessingService.server';
|
||||
import { FlyerAiProcessor } from './flyerAiProcessor.server';
|
||||
import { FlyerDataTransformer } from './flyerDataTransformer';
|
||||
import { FlyerPersistenceService } from './flyerPersistenceService.server';
|
||||
import { withTransaction } from './db/connection.db';
|
||||
import {
|
||||
cleanupQueue,
|
||||
flyerQueue,
|
||||
@@ -43,13 +44,13 @@ export const fsAdapter: IFileSystem = {
|
||||
rename: (oldPath: string, newPath: string) => fsPromises.rename(oldPath, newPath),
|
||||
};
|
||||
|
||||
const flyerProcessingService = new FlyerProcessingService(
|
||||
export const flyerProcessingService = new FlyerProcessingService(
|
||||
new FlyerFileHandler(fsAdapter, execAsync),
|
||||
new FlyerAiProcessor(aiService, db.personalizationRepo),
|
||||
fsAdapter,
|
||||
cleanupQueue,
|
||||
new FlyerDataTransformer(),
|
||||
new FlyerPersistenceService(),
|
||||
new FlyerPersistenceService(withTransaction),
|
||||
);
|
||||
|
||||
const normalizeError = (error: unknown): Error => {
|
||||
@@ -63,7 +64,7 @@ const normalizeError = (error: unknown): Error => {
|
||||
* @param processor The core logic for the worker.
|
||||
* @returns An async function that takes a job and executes the processor.
|
||||
*/
|
||||
const createWorkerProcessor = <T>(processor: (job: Job<T>) => Promise<any>) => {
|
||||
const createWorkerProcessor = <T, R>(processor: (job: Job<T>) => Promise<R>) => {
|
||||
return async (job: Job<T>) => {
|
||||
try {
|
||||
return await processor(job);
|
||||
@@ -179,7 +180,10 @@ export const gracefulShutdown = async (signal: string) => {
|
||||
let hasErrors = false;
|
||||
|
||||
// Helper function to close a group of resources and log results
|
||||
const closeResources = async (resources: { name: string; close: () => Promise<any> }[], type: string) => {
|
||||
const closeResources = async (
|
||||
resources: { name: string; close: () => Promise<void> }[],
|
||||
type: string,
|
||||
) => {
|
||||
logger.info(`[Shutdown] Closing all ${type}...`);
|
||||
const results = await Promise.allSettled(resources.map((r) => r.close()));
|
||||
let groupHasErrors = false;
|
||||
@@ -230,7 +234,10 @@ export const gracefulShutdown = async (signal: string) => {
|
||||
logger.info('[Shutdown] Redis connection closed successfully.');
|
||||
} catch (err) {
|
||||
hasErrors = true;
|
||||
logger.error({ err, resource: 'redisConnection' }, `[Shutdown] Error closing Redis connection.`);
|
||||
logger.error(
|
||||
{ err, resource: 'redisConnection' },
|
||||
`[Shutdown] Error closing Redis connection.`,
|
||||
);
|
||||
}
|
||||
|
||||
return hasErrors;
|
||||
|
||||
@@ -15,6 +15,13 @@ import piexif from 'piexifjs';
|
||||
import exifParser from 'exif-parser';
|
||||
import sharp from 'sharp';
|
||||
|
||||
// NOTE: STORAGE_PATH is set via the CI environment (deploy-to-test.yml).
|
||||
// This ensures multer and flyerProcessingService use the test runner's directory
|
||||
// instead of the production path (/var/www/.../flyer-images).
|
||||
// The testStoragePath variable is used for constructing paths in test assertions.
|
||||
const testStoragePath =
|
||||
process.env.STORAGE_PATH || path.resolve(__dirname, '../../../flyer-images');
|
||||
|
||||
// Mock the image processor to ensure safe filenames for DB constraints
|
||||
vi.mock('../../utils/imageProcessor', async () => {
|
||||
const actual = await vi.importActual<typeof import('../../utils/imageProcessor')>(
|
||||
@@ -92,10 +99,9 @@ vi.mock('../../services/storage/storageService', () => {
|
||||
|
||||
// CRITICAL: These mock functions must be declared with vi.hoisted() to ensure they're available
|
||||
// at the module level BEFORE any imports are resolved.
|
||||
const { mockExtractCoreData, mockWithTransaction } = vi.hoisted(() => {
|
||||
const { mockExtractCoreData } = vi.hoisted(() => {
|
||||
return {
|
||||
mockExtractCoreData: vi.fn(),
|
||||
mockWithTransaction: vi.fn(),
|
||||
};
|
||||
});
|
||||
|
||||
@@ -122,20 +128,9 @@ vi.mock('../../services/aiService.server', async (importOriginal) => {
|
||||
};
|
||||
});
|
||||
|
||||
// Mock the connection DB service to intercept withTransaction.
|
||||
// This is crucial because FlyerPersistenceService imports directly from connection.db,
|
||||
// so mocking index.db is insufficient.
|
||||
// CRITICAL: We use the hoisted mockWithTransaction function so tests can manipulate the same
|
||||
// function instance that workers are using.
|
||||
vi.mock('../../services/db/connection.db', async (importOriginal) => {
|
||||
const actual = await importOriginal<typeof import('../../services/db/connection.db')>();
|
||||
// Initialize the hoisted mock to use the real implementation by default
|
||||
mockWithTransaction.mockImplementation(actual.withTransaction);
|
||||
return {
|
||||
...actual,
|
||||
withTransaction: mockWithTransaction,
|
||||
};
|
||||
});
|
||||
// NOTE: We no longer mock connection.db at the module level because vi.mock() doesn't work
|
||||
// across module boundaries (the worker imports the real module before our mock is applied).
|
||||
// Instead, we use dependency injection via FlyerPersistenceService._setWithTransaction().
|
||||
|
||||
describe('Flyer Processing Background Job Integration Test', () => {
|
||||
let request: ReturnType<typeof supertest>;
|
||||
@@ -152,6 +147,12 @@ describe('Flyer Processing Background Job Integration Test', () => {
|
||||
// for the database, satisfying the 'url_check' constraint.
|
||||
// IMPORTANT: This must run BEFORE the app is imported so workers inherit the env var.
|
||||
vi.stubEnv('FRONTEND_URL', 'https://example.com');
|
||||
|
||||
// STORAGE_PATH is primarily set via CI environment (deploy-to-test.yml).
|
||||
// This stubEnv call serves as a fallback for local development runs.
|
||||
// It ensures multer and flyerProcessingService use the test directory, not production path.
|
||||
vi.stubEnv('STORAGE_PATH', testStoragePath);
|
||||
console.error('[TEST SETUP] STORAGE_PATH:', testStoragePath);
|
||||
process.env.FRONTEND_URL = 'https://example.com';
|
||||
console.error('[TEST SETUP] FRONTEND_URL stubbed to:', process.env.FRONTEND_URL);
|
||||
|
||||
@@ -190,15 +191,15 @@ describe('Flyer Processing Background Job Integration Test', () => {
|
||||
],
|
||||
});
|
||||
|
||||
// 2. Restore DB Service Mock to real implementation
|
||||
// This ensures that unless a test specifically mocks a failure, the DB logic works as expected.
|
||||
// CRITICAL: Use the hoisted mockWithTransaction directly so we're manipulating the same instance
|
||||
// that the workers are using.
|
||||
const actualDb = await vi.importActual<typeof import('../../services/db/connection.db')>(
|
||||
'../../services/db/connection.db',
|
||||
);
|
||||
mockWithTransaction.mockReset();
|
||||
mockWithTransaction.mockImplementation(actualDb.withTransaction);
|
||||
// 2. Restore withTransaction to real implementation via dependency injection
|
||||
// This ensures that unless a test specifically injects a mock, the DB logic works as expected.
|
||||
if (workersModule) {
|
||||
const { withTransaction } = await import('../../services/db/connection.db');
|
||||
workersModule.flyerProcessingService
|
||||
._getPersistenceService()
|
||||
._setWithTransaction(withTransaction);
|
||||
console.error('[TEST SETUP] withTransaction restored to real implementation via DI');
|
||||
}
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
@@ -263,7 +264,7 @@ describe('Flyer Processing Background Job Integration Test', () => {
|
||||
console.error('[TEST DATA] Generated checksum for test:', checksum);
|
||||
|
||||
// Track created files for cleanup
|
||||
const uploadDir = path.resolve(__dirname, '../../../flyer-images');
|
||||
const uploadDir = testStoragePath;
|
||||
createdFilePaths.push(path.join(uploadDir, uniqueFileName));
|
||||
console.error('[TEST] createdFilesPaths after 1st push: ', createdFilePaths);
|
||||
// The icon name is derived from the original filename.
|
||||
@@ -393,7 +394,7 @@ describe('Flyer Processing Background Job Integration Test', () => {
|
||||
const checksum = await generateFileChecksum(mockImageFile);
|
||||
|
||||
// Track original and derived files for cleanup
|
||||
const uploadDir = path.resolve(__dirname, '../../../flyer-images');
|
||||
const uploadDir = testStoragePath;
|
||||
createdFilePaths.push(path.join(uploadDir, uniqueFileName));
|
||||
const iconFileName = `icon-${path.parse(uniqueFileName).name}.webp`;
|
||||
createdFilePaths.push(path.join(uploadDir, 'icons', iconFileName));
|
||||
@@ -484,7 +485,7 @@ describe('Flyer Processing Background Job Integration Test', () => {
|
||||
const checksum = await generateFileChecksum(mockImageFile);
|
||||
|
||||
// Track files for cleanup
|
||||
const uploadDir = path.resolve(__dirname, '../../../flyer-images');
|
||||
const uploadDir = testStoragePath;
|
||||
createdFilePaths.push(path.join(uploadDir, uniqueFileName));
|
||||
const iconFileName = `icon-${path.parse(uniqueFileName).name}.webp`;
|
||||
createdFilePaths.push(path.join(uploadDir, 'icons', iconFileName));
|
||||
@@ -559,7 +560,7 @@ describe('Flyer Processing Background Job Integration Test', () => {
|
||||
const checksum = await generateFileChecksum(mockImageFile);
|
||||
|
||||
// Track created files for cleanup
|
||||
const uploadDir = path.resolve(__dirname, '../../../flyer-images');
|
||||
const uploadDir = testStoragePath;
|
||||
createdFilePaths.push(path.join(uploadDir, uniqueFileName));
|
||||
|
||||
// Act 1: Upload the file to start the background job.
|
||||
@@ -596,11 +597,16 @@ describe('Flyer Processing Background Job Integration Test', () => {
|
||||
}, 240000);
|
||||
|
||||
it('should handle a database failure during flyer creation', async () => {
|
||||
// Arrange: Mock the database transaction function to throw an error.
|
||||
// This is a more realistic simulation of a DB failure than mocking the inner createFlyerAndItems function.
|
||||
// CRITICAL: Use the hoisted mockWithTransaction directly - this is the same instance the workers use.
|
||||
// Arrange: Inject a failing withTransaction function via dependency injection.
|
||||
// This is the correct approach because vi.mock() doesn't work across module boundaries -
|
||||
// the worker imports the real module before our mock is applied.
|
||||
const dbError = new Error('DB transaction failed');
|
||||
mockWithTransaction.mockRejectedValue(dbError);
|
||||
const failingWithTransaction = vi.fn().mockRejectedValue(dbError);
|
||||
console.error('[DB FAILURE TEST] About to inject failingWithTransaction mock');
|
||||
workersModule.flyerProcessingService
|
||||
._getPersistenceService()
|
||||
._setWithTransaction(failingWithTransaction);
|
||||
console.error('[DB FAILURE TEST] failingWithTransaction mock injected successfully');
|
||||
|
||||
// Arrange: Prepare a unique flyer file for upload.
|
||||
const imagePath = path.resolve(__dirname, '../assets/test-flyer-image.jpg');
|
||||
@@ -613,7 +619,7 @@ describe('Flyer Processing Background Job Integration Test', () => {
|
||||
const checksum = await generateFileChecksum(mockImageFile);
|
||||
|
||||
// Track created files for cleanup
|
||||
const uploadDir = path.resolve(__dirname, '../../../flyer-images');
|
||||
const uploadDir = testStoragePath;
|
||||
createdFilePaths.push(path.join(uploadDir, uniqueFileName));
|
||||
|
||||
// Act 1: Upload the file to start the background job.
|
||||
@@ -661,7 +667,7 @@ describe('Flyer Processing Background Job Integration Test', () => {
|
||||
const checksum = await generateFileChecksum(mockImageFile);
|
||||
|
||||
// Track the path of the file that will be created in the uploads directory.
|
||||
const uploadDir = path.resolve(__dirname, '../../../flyer-images');
|
||||
const uploadDir = testStoragePath;
|
||||
const tempFilePath = path.join(uploadDir, uniqueFileName);
|
||||
createdFilePaths.push(tempFilePath);
|
||||
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
// src/tests/setup/integration-global-setup.ts
|
||||
import { execSync } from 'child_process';
|
||||
import fs from 'node:fs/promises';
|
||||
import path from 'path';
|
||||
import type { Server } from 'http';
|
||||
import { logger } from '../../services/logger.server';
|
||||
import { getPool } from '../../services/db/connection.db';
|
||||
@@ -18,28 +20,49 @@ async function cleanAllQueues() {
|
||||
console.error(`[PID:${process.pid}] [QUEUE CLEANUP] Starting BullMQ queue cleanup...`);
|
||||
|
||||
try {
|
||||
const { flyerQueue, cleanupQueue, emailQueue, analyticsQueue, weeklyAnalyticsQueue, tokenCleanupQueue } = await import('../../services/queues.server');
|
||||
const {
|
||||
flyerQueue,
|
||||
cleanupQueue,
|
||||
emailQueue,
|
||||
analyticsQueue,
|
||||
weeklyAnalyticsQueue,
|
||||
tokenCleanupQueue,
|
||||
} = await import('../../services/queues.server');
|
||||
console.error(`[QUEUE CLEANUP] Successfully imported queue modules`);
|
||||
|
||||
const queues = [flyerQueue, cleanupQueue, emailQueue, analyticsQueue, weeklyAnalyticsQueue, tokenCleanupQueue];
|
||||
const queues = [
|
||||
flyerQueue,
|
||||
cleanupQueue,
|
||||
emailQueue,
|
||||
analyticsQueue,
|
||||
weeklyAnalyticsQueue,
|
||||
tokenCleanupQueue,
|
||||
];
|
||||
|
||||
for (const queue of queues) {
|
||||
try {
|
||||
// Log queue state before cleanup
|
||||
const jobCounts = await queue.getJobCounts();
|
||||
console.error(`[QUEUE CLEANUP] Queue "${queue.name}" before cleanup: ${JSON.stringify(jobCounts)}`);
|
||||
console.error(
|
||||
`[QUEUE CLEANUP] Queue "${queue.name}" before cleanup: ${JSON.stringify(jobCounts)}`,
|
||||
);
|
||||
|
||||
// obliterate() removes ALL data associated with the queue from Redis
|
||||
await queue.obliterate({ force: true });
|
||||
console.error(` ✅ [QUEUE CLEANUP] Cleaned queue: ${queue.name}`);
|
||||
} catch (error) {
|
||||
// Log but don't fail - the queue might not exist yet
|
||||
console.error(` ⚠️ [QUEUE CLEANUP] Could not clean queue ${queue.name}: ${error instanceof Error ? error.message : 'Unknown error'}`);
|
||||
console.error(
|
||||
` ⚠️ [QUEUE CLEANUP] Could not clean queue ${queue.name}: ${error instanceof Error ? error.message : 'Unknown error'}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
console.error(`✅ [PID:${process.pid}] [QUEUE CLEANUP] All queues cleaned successfully.`);
|
||||
} catch (error) {
|
||||
console.error(`❌ [PID:${process.pid}] [QUEUE CLEANUP] CRITICAL ERROR during queue cleanup:`, error);
|
||||
console.error(
|
||||
`❌ [PID:${process.pid}] [QUEUE CLEANUP] CRITICAL ERROR during queue cleanup:`,
|
||||
error,
|
||||
);
|
||||
// Don't throw - we want the tests to continue even if cleanup fails
|
||||
}
|
||||
}
|
||||
@@ -50,7 +73,24 @@ export async function setup() {
|
||||
// Fix: Set the FRONTEND_URL globally for the test server instance
|
||||
process.env.FRONTEND_URL = 'https://example.com';
|
||||
|
||||
// CRITICAL: Set STORAGE_PATH before importing the server.
|
||||
// The multer middleware runs an IIFE on import that creates directories based on this path.
|
||||
// If not set, it defaults to /var/www/.../flyer-images which won't exist in the test environment.
|
||||
if (!process.env.STORAGE_PATH) {
|
||||
// Use path relative to the project root (where tests run from)
|
||||
process.env.STORAGE_PATH = path.resolve(process.cwd(), 'flyer-images');
|
||||
}
|
||||
|
||||
// Ensure the storage directories exist before the server starts
|
||||
try {
|
||||
await fs.mkdir(path.join(process.env.STORAGE_PATH, 'icons'), { recursive: true });
|
||||
console.error(`[SETUP] Created storage directory: ${process.env.STORAGE_PATH}`);
|
||||
} catch (error) {
|
||||
console.error(`[SETUP] Warning: Could not create storage directory: ${error}`);
|
||||
}
|
||||
|
||||
console.error(`\n--- [PID:${process.pid}] Running Integration Test GLOBAL Setup ---`);
|
||||
console.error(`[SETUP] STORAGE_PATH: ${process.env.STORAGE_PATH}`);
|
||||
console.error(`[SETUP] REDIS_URL: ${process.env.REDIS_URL}`);
|
||||
console.error(`[SETUP] REDIS_PASSWORD is set: ${!!process.env.REDIS_PASSWORD}`);
|
||||
|
||||
|
||||
165
src/types/api.ts
Normal file
165
src/types/api.ts
Normal file
@@ -0,0 +1,165 @@
|
||||
// src/types/api.ts
|
||||
// ============================================================================
|
||||
// API RESPONSE TYPE DEFINITIONS
|
||||
// ============================================================================
|
||||
// Standardized response types for all API endpoints per ADR-028.
|
||||
// These types ensure consistent response structure across the entire API.
|
||||
// ============================================================================
|
||||
|
||||
/**
|
||||
* Standard pagination metadata included in paginated responses.
|
||||
*/
|
||||
export interface PaginationMeta {
|
||||
/** Current page number (1-indexed) */
|
||||
page: number;
|
||||
/** Number of items per page */
|
||||
limit: number;
|
||||
/** Total number of items across all pages */
|
||||
total: number;
|
||||
/** Total number of pages */
|
||||
totalPages: number;
|
||||
/** Whether there is a next page */
|
||||
hasNextPage: boolean;
|
||||
/** Whether there is a previous page */
|
||||
hasPrevPage: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* Optional metadata that can be included in any response.
|
||||
*/
|
||||
export interface ResponseMeta {
|
||||
/** Unique request identifier for tracking/debugging */
|
||||
requestId?: string;
|
||||
/** ISO timestamp of when the response was generated */
|
||||
timestamp?: string;
|
||||
/** Pagination info (only for paginated responses) */
|
||||
pagination?: PaginationMeta;
|
||||
}
|
||||
|
||||
/**
|
||||
* Standard success response envelope.
|
||||
* All successful API responses should follow this structure.
|
||||
*
|
||||
* @example
|
||||
* // Single item response
|
||||
* {
|
||||
* "success": true,
|
||||
* "data": { "id": 1, "name": "Item" }
|
||||
* }
|
||||
*
|
||||
* @example
|
||||
* // Paginated list response
|
||||
* {
|
||||
* "success": true,
|
||||
* "data": [{ "id": 1 }, { "id": 2 }],
|
||||
* "meta": {
|
||||
* "pagination": { "page": 1, "limit": 20, "total": 100, ... }
|
||||
* }
|
||||
* }
|
||||
*/
|
||||
export interface ApiSuccessResponse<T> {
|
||||
success: true;
|
||||
data: T;
|
||||
meta?: ResponseMeta;
|
||||
}
|
||||
|
||||
/**
|
||||
* Standard error response envelope.
|
||||
* All error responses should follow this structure.
|
||||
*
|
||||
* @example
|
||||
* // Validation error
|
||||
* {
|
||||
* "success": false,
|
||||
* "error": {
|
||||
* "code": "VALIDATION_ERROR",
|
||||
* "message": "The request data is invalid.",
|
||||
* "details": [{ "path": ["email"], "message": "Invalid email format" }]
|
||||
* }
|
||||
* }
|
||||
*
|
||||
* @example
|
||||
* // Not found error
|
||||
* {
|
||||
* "success": false,
|
||||
* "error": {
|
||||
* "code": "NOT_FOUND",
|
||||
* "message": "User not found"
|
||||
* }
|
||||
* }
|
||||
*/
|
||||
export interface ApiErrorResponse {
|
||||
success: false;
|
||||
error: {
|
||||
/** Machine-readable error code (e.g., 'VALIDATION_ERROR', 'NOT_FOUND') */
|
||||
code: string;
|
||||
/** Human-readable error message */
|
||||
message: string;
|
||||
/** Additional error details (validation errors, etc.) */
|
||||
details?: unknown;
|
||||
};
|
||||
meta?: Pick<ResponseMeta, 'requestId' | 'timestamp'>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Union type for all API responses.
|
||||
* Useful for frontend type narrowing based on `success` field.
|
||||
*/
|
||||
export type ApiResponse<T> = ApiSuccessResponse<T> | ApiErrorResponse;
|
||||
|
||||
// ============================================================================
|
||||
// ERROR CODES
|
||||
// ============================================================================
|
||||
// Standardized error codes for consistent error identification.
|
||||
// ============================================================================
|
||||
|
||||
/**
|
||||
* Standard error codes used across the API.
|
||||
* These should be used with the `sendError` helper function.
|
||||
*/
|
||||
export const ErrorCode = {
|
||||
// Client errors (4xx)
|
||||
VALIDATION_ERROR: 'VALIDATION_ERROR',
|
||||
NOT_FOUND: 'NOT_FOUND',
|
||||
UNAUTHORIZED: 'UNAUTHORIZED',
|
||||
FORBIDDEN: 'FORBIDDEN',
|
||||
CONFLICT: 'CONFLICT',
|
||||
BAD_REQUEST: 'BAD_REQUEST',
|
||||
RATE_LIMITED: 'RATE_LIMITED',
|
||||
PAYLOAD_TOO_LARGE: 'PAYLOAD_TOO_LARGE',
|
||||
|
||||
// Server errors (5xx)
|
||||
INTERNAL_ERROR: 'INTERNAL_ERROR',
|
||||
SERVICE_UNAVAILABLE: 'SERVICE_UNAVAILABLE',
|
||||
EXTERNAL_SERVICE_ERROR: 'EXTERNAL_SERVICE_ERROR',
|
||||
NOT_IMPLEMENTED: 'NOT_IMPLEMENTED',
|
||||
} as const;
|
||||
|
||||
export type ErrorCodeType = (typeof ErrorCode)[keyof typeof ErrorCode];
|
||||
|
||||
// ============================================================================
|
||||
// HELPER TYPES
|
||||
// ============================================================================
|
||||
|
||||
/**
|
||||
* Input for creating paginated responses.
|
||||
*/
|
||||
export interface PaginationInput {
|
||||
page: number;
|
||||
limit: number;
|
||||
total: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Type guard to check if a response is a success response.
|
||||
*/
|
||||
export function isApiSuccess<T>(response: ApiResponse<T>): response is ApiSuccessResponse<T> {
|
||||
return response.success === true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Type guard to check if a response is an error response.
|
||||
*/
|
||||
export function isApiError<T>(response: ApiResponse<T>): response is ApiErrorResponse {
|
||||
return response.success === false;
|
||||
}
|
||||
183
src/utils/apiResponse.ts
Normal file
183
src/utils/apiResponse.ts
Normal file
@@ -0,0 +1,183 @@
|
||||
// src/utils/apiResponse.ts
|
||||
// ============================================================================
|
||||
// API RESPONSE HELPERS
|
||||
// ============================================================================
|
||||
// Utility functions for creating standardized API responses per ADR-028.
|
||||
// Use these helpers in all route handlers for consistent response formats.
|
||||
// ============================================================================
|
||||
|
||||
import { Response } from 'express';
|
||||
import {
|
||||
ApiSuccessResponse,
|
||||
ApiErrorResponse,
|
||||
PaginationInput,
|
||||
PaginationMeta,
|
||||
ResponseMeta,
|
||||
ErrorCodeType,
|
||||
ErrorCode,
|
||||
} from '../types/api';
|
||||
|
||||
/**
|
||||
* Send a successful response with data.
|
||||
*
|
||||
* @param res - Express response object
|
||||
* @param data - The response data
|
||||
* @param statusCode - HTTP status code (default: 200)
|
||||
* @param meta - Optional metadata (requestId, timestamp)
|
||||
*
|
||||
* @example
|
||||
* // Simple success response
|
||||
* sendSuccess(res, { id: 1, name: 'Item' });
|
||||
*
|
||||
* @example
|
||||
* // Success with 201 Created
|
||||
* sendSuccess(res, newUser, 201);
|
||||
*/
|
||||
export function sendSuccess<T>(
|
||||
res: Response,
|
||||
data: T,
|
||||
statusCode: number = 200,
|
||||
meta?: Omit<ResponseMeta, 'pagination'>,
|
||||
): Response<ApiSuccessResponse<T>> {
|
||||
const response: ApiSuccessResponse<T> = {
|
||||
success: true,
|
||||
data,
|
||||
};
|
||||
|
||||
if (meta) {
|
||||
response.meta = meta;
|
||||
}
|
||||
|
||||
return res.status(statusCode).json(response);
|
||||
}
|
||||
|
||||
/**
|
||||
* Send a successful response with no content (204).
|
||||
* Used for DELETE operations or actions that don't return data.
|
||||
*
|
||||
* @param res - Express response object
|
||||
*
|
||||
* @example
|
||||
* // After deleting a resource
|
||||
* sendNoContent(res);
|
||||
*/
|
||||
export function sendNoContent(res: Response): Response {
|
||||
return res.status(204).send();
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate pagination metadata from input parameters.
|
||||
*
|
||||
* @param input - Pagination input (page, limit, total)
|
||||
* @returns Calculated pagination metadata
|
||||
*/
|
||||
export function calculatePagination(input: PaginationInput): PaginationMeta {
|
||||
const { page, limit, total } = input;
|
||||
const totalPages = Math.ceil(total / limit);
|
||||
|
||||
return {
|
||||
page,
|
||||
limit,
|
||||
total,
|
||||
totalPages,
|
||||
hasNextPage: page < totalPages,
|
||||
hasPrevPage: page > 1,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Send a paginated success response.
|
||||
*
|
||||
* @param res - Express response object
|
||||
* @param data - The array of items for the current page
|
||||
* @param pagination - Pagination input (page, limit, total)
|
||||
* @param meta - Optional additional metadata
|
||||
*
|
||||
* @example
|
||||
* const { flyers, total } = await flyerService.getFlyers({ page, limit });
|
||||
* sendPaginated(res, flyers, { page, limit, total });
|
||||
*/
|
||||
export function sendPaginated<T>(
|
||||
res: Response,
|
||||
data: T[],
|
||||
pagination: PaginationInput,
|
||||
meta?: Omit<ResponseMeta, 'pagination'>,
|
||||
): Response<ApiSuccessResponse<T[]>> {
|
||||
const response: ApiSuccessResponse<T[]> = {
|
||||
success: true,
|
||||
data,
|
||||
meta: {
|
||||
...meta,
|
||||
pagination: calculatePagination(pagination),
|
||||
},
|
||||
};
|
||||
|
||||
return res.status(200).json(response);
|
||||
}
|
||||
|
||||
/**
|
||||
* Send an error response.
|
||||
*
|
||||
* @param res - Express response object
|
||||
* @param code - Machine-readable error code
|
||||
* @param message - Human-readable error message
|
||||
* @param statusCode - HTTP status code (default: 400)
|
||||
* @param details - Optional error details (validation errors, etc.)
|
||||
* @param meta - Optional metadata (requestId for error tracking)
|
||||
*
|
||||
* @example
|
||||
* // Validation error
|
||||
* sendError(res, ErrorCode.VALIDATION_ERROR, 'Invalid email format', 400, errors);
|
||||
*
|
||||
* @example
|
||||
* // Not found error
|
||||
* sendError(res, ErrorCode.NOT_FOUND, 'User not found', 404);
|
||||
*/
|
||||
export function sendError(
|
||||
res: Response,
|
||||
code: ErrorCodeType | string,
|
||||
message: string,
|
||||
statusCode: number = 400,
|
||||
details?: unknown,
|
||||
meta?: Pick<ResponseMeta, 'requestId' | 'timestamp'>,
|
||||
): Response<ApiErrorResponse> {
|
||||
const response: ApiErrorResponse = {
|
||||
success: false,
|
||||
error: {
|
||||
code,
|
||||
message,
|
||||
},
|
||||
};
|
||||
|
||||
if (details !== undefined) {
|
||||
response.error.details = details;
|
||||
}
|
||||
|
||||
if (meta) {
|
||||
response.meta = meta;
|
||||
}
|
||||
|
||||
return res.status(statusCode).json(response);
|
||||
}
|
||||
|
||||
/**
|
||||
* Send a message-only success response.
|
||||
* Useful for operations that complete successfully but don't return data.
|
||||
*
|
||||
* @param res - Express response object
|
||||
* @param message - Success message
|
||||
* @param statusCode - HTTP status code (default: 200)
|
||||
*
|
||||
* @example
|
||||
* sendMessage(res, 'Password updated successfully');
|
||||
*/
|
||||
export function sendMessage(
|
||||
res: Response,
|
||||
message: string,
|
||||
statusCode: number = 200,
|
||||
): Response<ApiSuccessResponse<{ message: string }>> {
|
||||
return sendSuccess(res, { message }, statusCode);
|
||||
}
|
||||
|
||||
// Re-export ErrorCode for convenience
|
||||
export { ErrorCode };
|
||||
Reference in New Issue
Block a user