integration test fixes - claude for the win? try 4 - i have a good feeling
Some checks failed
Deploy to Test Environment / deploy-to-test (push) Failing after 16m58s

This commit is contained in:
2026-01-09 05:55:55 -08:00
parent 1814469eb4
commit 4a04e478c4
20 changed files with 3006 additions and 570 deletions

View File

@@ -62,7 +62,8 @@
"Bash(wc:*)", "Bash(wc:*)",
"Bash(npm install:*)", "Bash(npm install:*)",
"Bash(git grep:*)", "Bash(git grep:*)",
"Bash(findstr:*)" "Bash(findstr:*)",
"Bash(git add:*)"
] ]
} }
} }

View File

@@ -1,61 +1,66 @@
{ {
"mcpServers": { "mcpServers": {
"markitdown": { "gitea-projectium": {
"command": "C:\\Users\\games3\\.local\\bin\\uvx.exe", "command": "d:\\gitea-mcp\\gitea-mcp.exe",
"args": [ "args": ["run", "-t", "stdio"],
"markitdown-mcp" "env": {
] "GITEA_HOST": "https://gitea.projectium.com",
}, "GITEA_ACCESS_TOKEN": "c72bc0f14f623fec233d3c94b3a16397fe3649ef"
"gitea-torbonium": { }
"command": "d:\\gitea-mcp\\gitea-mcp.exe", },
"args": ["run", "-t", "stdio"], "gitea-torbonium": {
"env": { "command": "d:\\gitea-mcp\\gitea-mcp.exe",
"GITEA_HOST": "https://gitea.torbonium.com", "args": ["run", "-t", "stdio"],
"GITEA_ACCESS_TOKEN": "391c9ddbe113378bc87bb8184800ba954648fcf8" "env": {
} "GITEA_HOST": "https://gitea.torbonium.com",
}, "GITEA_ACCESS_TOKEN": "391c9ddbe113378bc87bb8184800ba954648fcf8"
"gitea-lan": { }
"command": "d:\\gitea-mcp\\gitea-mcp.exe", },
"args": ["run", "-t", "stdio"], "gitea-lan": {
"env": { "command": "d:\\gitea-mcp\\gitea-mcp.exe",
"GITEA_HOST": "https://gitea.torbolan.com", "args": ["run", "-t", "stdio"],
"GITEA_ACCESS_TOKEN": "REPLACE_WITH_NEW_TOKEN" "env": {
} "GITEA_HOST": "https://gitea.torbolan.com",
}, "GITEA_ACCESS_TOKEN": "YOUR_LAN_TOKEN_HERE"
"gitea-projectium": { },
"command": "d:\\gitea-mcp\\gitea-mcp.exe", "disabled": true
"args": ["run", "-t", "stdio"], },
"env": { "podman": {
"GITEA_HOST": "https://gitea.projectium.com", "command": "D:\\nodejs\\npx.cmd",
"GITEA_ACCESS_TOKEN": "c72bc0f14f623fec233d3c94b3a16397fe3649ef" "args": ["-y", "podman-mcp-server@latest"],
} "env": {
}, "DOCKER_HOST": "npipe:////./pipe/podman-machine-default"
"podman": { }
"command": "D:\\nodejs\\npx.cmd", },
"args": ["-y", "podman-mcp-server@latest"], "filesystem": {
"env": { "command": "d:\\nodejs\\node.exe",
"DOCKER_HOST": "npipe:////./pipe/podman-machine-default" "args": [
} "c:\\Users\\games3\\AppData\\Roaming\\npm\\node_modules\\@modelcontextprotocol\\server-filesystem\\dist\\index.js",
}, "d:\\gitea\\flyer-crawler.projectium.com\\flyer-crawler.projectium.com"
"filesystem": { ]
"command": "D:\\nodejs\\npx.cmd", },
"args": [ "fetch": {
"-y", "command": "D:\\nodejs\\npx.cmd",
"@modelcontextprotocol/server-filesystem", "args": ["-y", "@modelcontextprotocol/server-fetch"]
"D:\\gitea\\flyer-crawler.projectium.com\\flyer-crawler.projectium.com" },
] "io.github.ChromeDevTools/chrome-devtools-mcp": {
}, "type": "stdio",
"fetch": { "command": "npx",
"command": "D:\\nodejs\\npx.cmd", "args": ["chrome-devtools-mcp@0.12.1"],
"args": ["-y", "@modelcontextprotocol/server-fetch"] "gallery": "https://api.mcp.github.com",
}, "version": "0.12.1"
"sequential-thinking": { },
"command": "D:\\nodejs\\npx.cmd", "markitdown": {
"args": ["-y", "@modelcontextprotocol/server-sequential-thinking"] "command": "C:\\Users\\games3\\.local\\bin\\uvx.exe",
}, "args": ["markitdown-mcp"]
"memory": { },
"command": "D:\\nodejs\\npx.cmd", "sequential-thinking": {
"args": ["-y", "@modelcontextprotocol/server-memory"] "command": "D:\\nodejs\\npx.cmd",
} "args": ["-y", "@modelcontextprotocol/server-sequential-thinking"]
} },
"memory": {
"command": "D:\\nodejs\\npx.cmd",
"args": ["-y", "@modelcontextprotocol/server-memory"]
}
}
} }

1
.husky/pre-commit Normal file
View File

@@ -0,0 +1 @@
npx lint-staged

4
.lintstagedrc.json Normal file
View File

@@ -0,0 +1,4 @@
{
"*.{js,jsx,ts,tsx}": ["eslint --fix", "prettier --write"],
"*.{json,md,css,html,yml,yaml}": ["prettier --write"]
}

41
.prettierignore Normal file
View File

@@ -0,0 +1,41 @@
# Dependencies
node_modules/
# Build output
dist/
build/
.cache/
# Coverage reports
coverage/
.coverage/
# IDE and editor configs
.idea/
.vscode/
*.swp
*.swo
# Logs
*.log
logs/
# Environment files (may contain secrets)
.env*
!.env.example
# Lock files (managed by package managers)
package-lock.json
pnpm-lock.yaml
yarn.lock
# Generated files
*.min.js
*.min.css
# Git directory
.git/
.gitea/
# Test artifacts
__snapshots__/

View File

@@ -2,7 +2,9 @@
**Date**: 2025-12-12 **Date**: 2025-12-12
**Status**: Proposed **Status**: Accepted
**Implemented**: 2026-01-09
## Context ## Context
@@ -16,3 +18,216 @@ We will introduce a centralized, schema-validated configuration service. We will
**Positive**: Improves application reliability and developer experience by catching configuration errors at startup rather than at runtime. Provides a single source of truth for all required configuration. **Positive**: Improves application reliability and developer experience by catching configuration errors at startup rather than at runtime. Provides a single source of truth for all required configuration.
**Negative**: Adds a small amount of boilerplate for defining the configuration schema. Requires a one-time effort to refactor all `process.env` access points to use the new configuration service. **Negative**: Adds a small amount of boilerplate for defining the configuration schema. Requires a one-time effort to refactor all `process.env` access points to use the new configuration service.
## Implementation Status
### What's Implemented
-**Centralized Configuration Schema** - Zod-based validation in `src/config/env.ts`
-**Type-Safe Access** - Full TypeScript types for all configuration
-**Fail-Fast Startup** - Clear error messages for missing/invalid config
-**Environment Helpers** - `isProduction`, `isTest`, `isDevelopment` exports
-**Service Configuration Helpers** - `isSmtpConfigured`, `isAiConfigured`, etc.
### Migration Status
- ⏳ Gradual migration of `process.env` access to `config.*` in progress
- Legacy `process.env` access still works during transition
## Implementation Details
### Configuration Schema
The configuration is organized into logical groups:
```typescript
import { config, isProduction, isTest } from './config/env';
// Database
config.database.host; // DB_HOST
config.database.port; // DB_PORT (default: 5432)
config.database.user; // DB_USER
config.database.password; // DB_PASSWORD
config.database.name; // DB_NAME
// Redis
config.redis.url; // REDIS_URL
config.redis.password; // REDIS_PASSWORD (optional)
// Authentication
config.auth.jwtSecret; // JWT_SECRET (min 32 chars)
config.auth.jwtSecretPrevious; // JWT_SECRET_PREVIOUS (for rotation)
// SMTP (all optional - email degrades gracefully)
config.smtp.host; // SMTP_HOST
config.smtp.port; // SMTP_PORT (default: 587)
config.smtp.user; // SMTP_USER
config.smtp.pass; // SMTP_PASS
config.smtp.secure; // SMTP_SECURE (default: false)
config.smtp.fromEmail; // SMTP_FROM_EMAIL
// AI Services
config.ai.geminiApiKey; // GEMINI_API_KEY
config.ai.geminiRpm; // GEMINI_RPM (default: 5)
config.ai.priceQualityThreshold; // AI_PRICE_QUALITY_THRESHOLD (default: 0.5)
// Google Services
config.google.mapsApiKey; // GOOGLE_MAPS_API_KEY (optional)
config.google.clientId; // GOOGLE_CLIENT_ID (optional)
config.google.clientSecret; // GOOGLE_CLIENT_SECRET (optional)
// Worker Configuration
config.worker.concurrency; // WORKER_CONCURRENCY (default: 1)
config.worker.lockDuration; // WORKER_LOCK_DURATION (default: 30000)
config.worker.emailConcurrency; // EMAIL_WORKER_CONCURRENCY (default: 10)
config.worker.analyticsConcurrency; // ANALYTICS_WORKER_CONCURRENCY (default: 1)
config.worker.cleanupConcurrency; // CLEANUP_WORKER_CONCURRENCY (default: 10)
config.worker.weeklyAnalyticsConcurrency; // WEEKLY_ANALYTICS_WORKER_CONCURRENCY (default: 1)
// Server
config.server.nodeEnv; // NODE_ENV (development/production/test)
config.server.port; // PORT (default: 3001)
config.server.frontendUrl; // FRONTEND_URL
config.server.baseUrl; // BASE_URL
config.server.storagePath; // STORAGE_PATH (default: /var/www/.../flyer-images)
```
### Convenience Helpers
```typescript
import { isProduction, isTest, isDevelopment, isSmtpConfigured } from './config/env';
// Environment checks
if (isProduction) {
// Production-only logic
}
// Service availability checks
if (isSmtpConfigured) {
await sendEmail(...);
} else {
logger.warn('Email not configured, skipping notification');
}
```
### Fail-Fast Error Messages
When configuration is invalid, the application exits with a clear error:
```text
╔════════════════════════════════════════════════════════════════╗
║ CONFIGURATION ERROR - APPLICATION STARTUP ║
╚════════════════════════════════════════════════════════════════╝
The following environment variables are missing or invalid:
- database.host: DB_HOST is required
- auth.jwtSecret: JWT_SECRET must be at least 32 characters for security
Please check your .env file or environment configuration.
See ADR-007 for the complete list of required environment variables.
```
### Usage Example
```typescript
// Before (direct process.env access)
const pool = new Pool({
host: process.env.DB_HOST,
port: parseInt(process.env.DB_PORT || '5432', 10),
user: process.env.DB_USER,
password: process.env.DB_PASSWORD,
database: process.env.DB_NAME,
});
// After (type-safe config access)
import { config } from './config/env';
const pool = new Pool({
host: config.database.host,
port: config.database.port,
user: config.database.user,
password: config.database.password,
database: config.database.name,
});
```
## Required Environment Variables
### Critical (Application will not start without these)
| Variable | Description |
| ------------- | ----------------------------------------------------- |
| `DB_HOST` | PostgreSQL database host |
| `DB_USER` | PostgreSQL database user |
| `DB_PASSWORD` | PostgreSQL database password |
| `DB_NAME` | PostgreSQL database name |
| `REDIS_URL` | Redis connection URL (e.g., `redis://localhost:6379`) |
| `JWT_SECRET` | JWT signing secret (minimum 32 characters) |
### Optional with Defaults
| Variable | Default | Description |
| ---------------------------- | ------------------------- | ------------------------------- |
| `DB_PORT` | 5432 | PostgreSQL port |
| `PORT` | 3001 | Server HTTP port |
| `NODE_ENV` | development | Environment mode |
| `STORAGE_PATH` | /var/www/.../flyer-images | File upload directory |
| `SMTP_PORT` | 587 | SMTP server port |
| `SMTP_SECURE` | false | Use TLS for SMTP |
| `GEMINI_RPM` | 5 | Gemini API requests per minute |
| `AI_PRICE_QUALITY_THRESHOLD` | 0.5 | AI extraction quality threshold |
| `WORKER_CONCURRENCY` | 1 | Flyer processing concurrency |
| `WORKER_LOCK_DURATION` | 30000 | Worker lock duration (ms) |
### Optional (Feature-specific)
| Variable | Description |
| --------------------- | ------------------------------------------- |
| `GEMINI_API_KEY` | Google Gemini API key (enables AI features) |
| `GOOGLE_MAPS_API_KEY` | Google Maps API key (enables geocoding) |
| `SMTP_HOST` | SMTP server (enables email notifications) |
| `SMTP_USER` | SMTP authentication username |
| `SMTP_PASS` | SMTP authentication password |
| `SMTP_FROM_EMAIL` | Sender email address |
| `FRONTEND_URL` | Frontend URL for email links |
| `JWT_SECRET_PREVIOUS` | Previous JWT secret for rotation (ADR-029) |
## Key Files
- `src/config/env.ts` - Configuration schema and validation
- `.env.example` - Template for required environment variables
## Migration Guide
To migrate existing `process.env` usage:
1. Import the config:
```typescript
import { config, isProduction } from '../config/env';
```
2. Replace direct access:
```typescript
// Before
process.env.DB_HOST;
process.env.NODE_ENV === 'production';
parseInt(process.env.PORT || '3001', 10);
// After
config.database.host;
isProduction;
config.server.port;
```
3. Use service helpers for optional features:
```typescript
import { isSmtpConfigured, isAiConfigured } from '../config/env';
if (isSmtpConfigured) {
// Email is available
}
```

View File

@@ -2,7 +2,9 @@
**Date**: 2025-12-12 **Date**: 2025-12-12
**Status**: Proposed **Status**: Accepted
**Implemented**: 2026-01-09
## Context ## Context
@@ -20,3 +22,195 @@ We will implement dedicated health check endpoints in the Express application.
- **Positive**: Enables robust, automated application lifecycle management in a containerized environment. Prevents traffic from being sent to unhealthy or uninitialized application instances. - **Positive**: Enables robust, automated application lifecycle management in a containerized environment. Prevents traffic from being sent to unhealthy or uninitialized application instances.
- **Negative**: Adds a small amount of code for the health check endpoints. Requires configuration in the container orchestration layer. - **Negative**: Adds a small amount of code for the health check endpoints. Requires configuration in the container orchestration layer.
## Implementation Status
### What's Implemented
-**Liveness Probe** (`/api/health/live`) - Simple process health check
-**Readiness Probe** (`/api/health/ready`) - Comprehensive dependency health check
-**Startup Probe** (`/api/health/startup`) - Initial startup verification
-**Individual Service Checks** - Database, Redis, Storage endpoints
-**Detailed Health Response** - Service latency, status, and details
## Implementation Details
### Probe Endpoints
| Endpoint | Purpose | Checks | HTTP Status |
| --------------------- | --------------- | ------------------ | ----------------------------- |
| `/api/health/live` | Liveness probe | Process running | 200 = alive |
| `/api/health/ready` | Readiness probe | DB, Redis, Storage | 200 = ready, 503 = not ready |
| `/api/health/startup` | Startup probe | Database only | 200 = started, 503 = starting |
### Liveness Probe
The liveness probe is intentionally simple with no external dependencies:
```typescript
// GET /api/health/live
{
"status": "ok",
"timestamp": "2026-01-09T12:00:00.000Z"
}
```
**Usage**: If this endpoint fails to respond, the container should be restarted.
### Readiness Probe
The readiness probe checks all critical dependencies:
```typescript
// GET /api/health/ready
{
"status": "healthy", // healthy | degraded | unhealthy
"timestamp": "2026-01-09T12:00:00.000Z",
"uptime": 3600.5,
"services": {
"database": {
"status": "healthy",
"latency": 5,
"details": {
"totalConnections": 10,
"idleConnections": 8,
"waitingConnections": 0
}
},
"redis": {
"status": "healthy",
"latency": 2
},
"storage": {
"status": "healthy",
"latency": 1,
"details": {
"path": "/var/www/.../flyer-images"
}
}
}
}
```
**Status Logic**:
- `healthy` - All critical services (database, Redis) are healthy
- `degraded` - Some non-critical issues (high connection wait, storage issues)
- `unhealthy` - Critical service unavailable (returns 503)
### Startup Probe
The startup probe is used during container initialization:
```typescript
// GET /api/health/startup
// Success (200):
{
"status": "started",
"timestamp": "2026-01-09T12:00:00.000Z",
"database": { "status": "healthy", "latency": 5 }
}
// Still starting (503):
{
"status": "starting",
"message": "Waiting for database connection",
"database": { "status": "unhealthy", "message": "..." }
}
```
### Individual Service Endpoints
For detailed diagnostics:
| Endpoint | Purpose |
| ----------------------- | ------------------------------- |
| `/api/health/ping` | Simple server responsiveness |
| `/api/health/db-schema` | Verify database tables exist |
| `/api/health/db-pool` | Database connection pool status |
| `/api/health/redis` | Redis connectivity |
| `/api/health/storage` | File storage accessibility |
| `/api/health/time` | Server time synchronization |
## Kubernetes Configuration Example
```yaml
apiVersion: v1
kind: Pod
spec:
containers:
- name: flyer-crawler
livenessProbe:
httpGet:
path: /api/health/live
port: 3001
initialDelaySeconds: 10
periodSeconds: 15
failureThreshold: 3
readinessProbe:
httpGet:
path: /api/health/ready
port: 3001
initialDelaySeconds: 5
periodSeconds: 10
failureThreshold: 3
startupProbe:
httpGet:
path: /api/health/startup
port: 3001
initialDelaySeconds: 0
periodSeconds: 5
failureThreshold: 30 # Allow up to 150 seconds for startup
```
## Docker Compose Configuration Example
```yaml
services:
api:
image: flyer-crawler:latest
healthcheck:
test: ['CMD', 'curl', '-f', 'http://localhost:3001/api/health/ready']
interval: 30s
timeout: 10s
retries: 3
start_period: 40s
```
## PM2 Configuration Example
For non-containerized deployments using PM2:
```javascript
// ecosystem.config.js
module.exports = {
apps: [
{
name: 'flyer-crawler',
script: 'dist/server.js',
// PM2 will check this endpoint
// and restart if it fails
health_check: {
url: 'http://localhost:3001/api/health/ready',
interval: 30000,
timeout: 10000,
},
},
],
};
```
## Key Files
- `src/routes/health.routes.ts` - Health check endpoint implementations
- `server.ts` - Health routes mounted at `/api/health`
## Service Health Thresholds
| Service | Healthy | Degraded | Unhealthy |
| -------- | ---------------------- | ----------------------- | ------------------- |
| Database | Responds to `SELECT 1` | > 3 waiting connections | Connection fails |
| Redis | `PING` returns `PONG` | N/A | Connection fails |
| Storage | Write access to path | N/A | Path not accessible |

View File

@@ -2,7 +2,9 @@
**Date**: 2025-12-12 **Date**: 2025-12-12
**Status**: Proposed **Status**: Accepted
**Implemented**: 2026-01-09
## Context ## Context
@@ -10,10 +12,171 @@ The project contains both frontend (React) and backend (Node.js) code. While lin
## Decision ## Decision
We will mandate the use of **Prettier** for automated code formatting and a unified **ESLint** configuration for code quality rules across both frontend and backend. This will be enforced automatically using a pre-commit hook managed by a tool like **Husky**. We will mandate the use of **Prettier** for automated code formatting and a unified **ESLint** configuration for code quality rules across both frontend and backend. This will be enforced automatically using a pre-commit hook managed by **Husky** and **lint-staged**.
## Consequences ## Consequences
**Positive**: Improves developer experience and team velocity by automating code consistency. Reduces time spent on stylistic code review comments. Enhances code readability and maintainability. **Positive**: Improves developer experience and team velocity by automating code consistency. Reduces time spent on stylistic code review comments. Enhances code readability and maintainability.
**Negative**: Requires an initial setup and configuration of Prettier, ESLint, and Husky. May require a one-time reformatting of the entire codebase. **Negative**: Requires an initial setup and configuration of Prettier, ESLint, and Husky. May require a one-time reformatting of the entire codebase.
## Implementation Status
### What's Implemented
-**Prettier Configuration** - `.prettierrc` with consistent settings
-**Prettier Ignore** - `.prettierignore` to exclude generated files
-**ESLint Configuration** - `eslint.config.js` with TypeScript and React support
-**ESLint + Prettier Integration** - `eslint-config-prettier` to avoid conflicts
-**Husky Pre-commit Hooks** - Automatic enforcement on commit
-**lint-staged** - Run linters only on staged files for performance
## Implementation Details
### Prettier Configuration
The project uses a consistent Prettier configuration in `.prettierrc`:
```json
{
"semi": true,
"trailingComma": "all",
"singleQuote": true,
"printWidth": 100,
"tabWidth": 2,
"useTabs": false,
"endOfLine": "auto"
}
```
### ESLint Configuration
ESLint is configured with:
- TypeScript support via `typescript-eslint`
- React hooks rules via `eslint-plugin-react-hooks`
- React Refresh support for HMR
- Prettier compatibility via `eslint-config-prettier`
```javascript
// eslint.config.js (ESLint v9 flat config)
import globals from 'globals';
import tseslint from 'typescript-eslint';
import pluginReact from 'eslint-plugin-react';
import pluginReactHooks from 'eslint-plugin-react-hooks';
import pluginReactRefresh from 'eslint-plugin-react-refresh';
import eslintConfigPrettier from 'eslint-config-prettier';
export default tseslint.config(
// ... configurations
eslintConfigPrettier, // Must be last to override formatting rules
);
```
### Pre-commit Hook
The pre-commit hook runs lint-staged automatically:
```bash
# .husky/pre-commit
npx lint-staged
```
### lint-staged Configuration
lint-staged runs appropriate tools based on file type:
```json
{
"*.{js,jsx,ts,tsx}": ["eslint --fix", "prettier --write"],
"*.{json,md,css,html,yml,yaml}": ["prettier --write"]
}
```
### NPM Scripts
| Script | Description |
| ------------------ | ---------------------------------------------- |
| `npm run format` | Format all files with Prettier |
| `npm run lint` | Run ESLint on all TypeScript/JavaScript files |
| `npm run validate` | Run Prettier check + TypeScript check + ESLint |
## Key Files
| File | Purpose |
| -------------------- | -------------------------------- |
| `.prettierrc` | Prettier configuration |
| `.prettierignore` | Files to exclude from formatting |
| `eslint.config.js` | ESLint flat configuration (v9) |
| `.husky/pre-commit` | Pre-commit hook script |
| `.lintstagedrc.json` | lint-staged configuration |
## Developer Workflow
### Automatic Formatting on Commit
When you commit changes:
1. Husky intercepts the commit
2. lint-staged identifies staged files
3. ESLint fixes auto-fixable issues
4. Prettier formats the code
5. Changes are automatically staged
6. Commit proceeds if no errors
### Manual Formatting
```bash
# Format entire codebase
npm run format
# Check formatting without changes
npx prettier --check .
# Run ESLint
npm run lint
# Run all validation checks
npm run validate
```
### IDE Integration
For the best experience, configure your IDE:
**VS Code** - Install extensions:
- Prettier - Code formatter
- ESLint
Add to `.vscode/settings.json`:
```json
{
"editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.formatOnSave": true,
"editor.codeActionsOnSave": {
"source.fixAll.eslint": "explicit"
}
}
```
## Troubleshooting
### "eslint --fix failed"
ESLint may fail on unfixable errors. Review the output and manually fix the issues.
### "prettier --write failed"
Check for syntax errors in the file that prevent parsing.
### Bypassing Hooks (Emergency)
In rare cases, you may need to bypass hooks:
```bash
git commit --no-verify -m "emergency fix"
```
Use sparingly - the CI pipeline will still catch formatting issues.

View File

@@ -0,0 +1,149 @@
# ADR-028: API Response Standardization and Envelope Pattern
**Date**: 2026-01-09
**Status**: Proposed
## Context
The API currently has inconsistent response formats across different endpoints:
1. Some endpoints return raw data arrays (`[{...}, {...}]`)
2. Some return wrapped objects (`{ data: [...] }`)
3. Pagination is handled inconsistently (some use `page`/`limit`, others use `offset`/`count`)
4. Error responses vary in structure between middleware and route handlers
5. No standard for including metadata (pagination info, request timing, etc.)
This inconsistency creates friction for:
- Frontend developers who must handle multiple response formats
- API documentation and client SDK generation
- Implementing consistent error handling across the application
- Future API versioning transitions
## Decision
We will adopt a standardized response envelope pattern for all API responses.
### Success Response Format
```typescript
interface ApiSuccessResponse<T> {
success: true;
data: T;
meta?: {
// Pagination (when applicable)
pagination?: {
page: number;
limit: number;
total: number;
totalPages: number;
hasNextPage: boolean;
hasPrevPage: boolean;
};
// Timing
requestId?: string;
timestamp?: string;
duration?: number;
};
}
```
### Error Response Format
```typescript
interface ApiErrorResponse {
success: false;
error: {
code: string; // Machine-readable error code (e.g., 'VALIDATION_ERROR')
message: string; // Human-readable message
details?: unknown; // Additional context (validation errors, etc.)
};
meta?: {
requestId?: string;
timestamp?: string;
};
}
```
### Implementation Approach
1. **Response Helper Functions**: Create utility functions in `src/utils/apiResponse.ts`:
- `sendSuccess(res, data, meta?)`
- `sendPaginated(res, data, pagination)`
- `sendError(res, code, message, details?, statusCode?)`
2. **Error Handler Integration**: Update `errorHandler.ts` to use the standard error format
3. **Gradual Migration**: Apply to new endpoints immediately, migrate existing endpoints incrementally
4. **TypeScript Types**: Export response types for frontend consumption
## Consequences
### Positive
- **Consistency**: All responses follow a predictable structure
- **Type Safety**: Frontend can rely on consistent types
- **Debugging**: Request IDs and timestamps aid in issue investigation
- **Pagination**: Standardized pagination metadata reduces frontend complexity
- **API Evolution**: Envelope pattern makes it easier to add fields without breaking changes
### Negative
- **Verbosity**: Responses are slightly larger due to envelope overhead
- **Migration Effort**: Existing endpoints need updating
- **Learning Curve**: Developers must learn and use the helper functions
## Implementation Status
### What's Implemented
- ❌ Not yet implemented
### What Needs To Be Done
1. Create `src/utils/apiResponse.ts` with helper functions
2. Create `src/types/api.ts` with response type definitions
3. Update `errorHandler.ts` to use standard error format
4. Create migration guide for existing endpoints
5. Update 2-3 routes as examples
6. Document pattern in this ADR
## Example Usage
```typescript
// In a route handler
router.get('/flyers', async (req, res, next) => {
try {
const { page = 1, limit = 20 } = req.query;
const { flyers, total } = await flyerService.getFlyers({ page, limit });
return sendPaginated(res, flyers, {
page,
limit,
total,
});
} catch (error) {
next(error);
}
});
// Response:
// {
// "success": true,
// "data": [...],
// "meta": {
// "pagination": {
// "page": 1,
// "limit": 20,
// "total": 150,
// "totalPages": 8,
// "hasNextPage": true,
// "hasPrevPage": false
// },
// "requestId": "abc-123",
// "timestamp": "2026-01-09T12:00:00.000Z"
// }
// }
```

View File

@@ -0,0 +1,147 @@
# ADR-029: Secret Rotation and Key Management Strategy
**Date**: 2026-01-09
**Status**: Proposed
## Context
While ADR-007 covers configuration validation at startup, it does not address the lifecycle management of secrets:
1. **JWT Secrets**: If the JWT_SECRET is rotated, all existing user sessions are immediately invalidated
2. **Database Credentials**: No documented procedure for rotating database passwords without downtime
3. **API Keys**: External service API keys (AI services, geocoding) have no rotation strategy
4. **Emergency Revocation**: No process for immediately invalidating compromised credentials
Current risks:
- Long-lived secrets that never change become high-value targets
- No ability to rotate secrets without application restart
- No audit trail of when secrets were last rotated
- Compromised keys could remain active indefinitely
## Decision
We will implement a comprehensive secret rotation and key management strategy.
### 1. JWT Secret Rotation with Dual-Key Support
Support multiple JWT secrets simultaneously to enable zero-downtime rotation:
```typescript
// Environment variables
JWT_SECRET = current_secret;
JWT_SECRET_PREVIOUS = old_secret; // Optional, for transition period
// Token verification tries current first, falls back to previous
const verifyToken = (token: string) => {
try {
return jwt.verify(token, process.env.JWT_SECRET);
} catch {
if (process.env.JWT_SECRET_PREVIOUS) {
return jwt.verify(token, process.env.JWT_SECRET_PREVIOUS);
}
throw new AuthenticationError('Invalid token');
}
};
```
### 2. Database Credential Rotation
Document and implement a procedure for PostgreSQL credential rotation:
1. Create new database user with identical permissions
2. Update application configuration to use new credentials
3. Restart application instances (rolling restart)
4. Remove old database user after all instances updated
5. Log rotation event for audit purposes
### 3. API Key Management
For external service API keys (Google AI, geocoding services):
1. **Naming Convention**: `{SERVICE}_API_KEY` and `{SERVICE}_API_KEY_PREVIOUS`
2. **Fallback Logic**: Try primary key, fall back to previous on 401/403
3. **Health Checks**: Validate API keys on startup
4. **Usage Logging**: Track which key is being used for each request
### 4. Emergency Revocation Procedures
Document emergency procedures for:
- **JWT Compromise**: Set new JWT_SECRET, clear all refresh tokens from database
- **Database Compromise**: Rotate credentials immediately, audit access logs
- **API Key Compromise**: Regenerate at provider, update environment, restart
### 5. Secret Audit Trail
Track secret lifecycle events:
- When secrets were last rotated
- Who initiated the rotation
- Which instances are using which secrets
## Implementation Approach
### Phase 1: Dual JWT Secret Support
- Modify token verification to support fallback secret
- Add JWT_SECRET_PREVIOUS to configuration schema
- Update documentation
### Phase 2: Rotation Scripts
- Create `scripts/rotate-jwt-secret.sh`
- Create `scripts/rotate-db-credentials.sh`
- Add rotation instructions to operations runbook
### Phase 3: API Key Fallback
- Wrap external API clients with fallback logic
- Add key validation to health checks
- Implement key usage logging
## Consequences
### Positive
- **Zero-Downtime Rotation**: Secrets can be rotated without invalidating all sessions
- **Reduced Risk**: Regular rotation limits exposure window for compromised credentials
- **Audit Trail**: Clear record of when secrets were changed
- **Emergency Response**: Documented procedures for security incidents
### Negative
- **Complexity**: Dual-key logic adds code complexity
- **Operations Overhead**: Regular rotation requires operational discipline
- **Testing**: Rotation procedures need to be tested periodically
## Implementation Status
### What's Implemented
- ❌ Not yet implemented
### What Needs To Be Done
1. Implement dual JWT secret verification
2. Create rotation scripts
3. Document emergency procedures
4. Add secret validation to health checks
5. Create rotation schedule recommendations
## Key Files (To Be Created)
- `src/utils/secretManager.ts` - Secret rotation utilities
- `scripts/rotate-jwt-secret.sh` - JWT rotation script
- `scripts/rotate-db-credentials.sh` - Database credential rotation
- `docs/operations/secret-rotation.md` - Operations runbook
## Rotation Schedule Recommendations
| Secret Type | Rotation Frequency | Grace Period |
| ------------------ | -------------------------- | ----------------- |
| JWT_SECRET | 90 days | 7 days (dual-key) |
| Database Passwords | 180 days | Rolling restart |
| AI API Keys | On suspicion of compromise | Immediate |
| Refresh Tokens | 7-day max age | N/A (per-token) |

View File

@@ -0,0 +1,150 @@
# ADR-030: Graceful Degradation and Circuit Breaker Pattern
**Date**: 2026-01-09
**Status**: Proposed
## Context
The application depends on several external services:
1. **AI Services** (Google Gemini) - For flyer item extraction
2. **Redis** - For caching, rate limiting, and job queues
3. **PostgreSQL** - Primary data store
4. **Geocoding APIs** - For location services
Currently, when these services fail:
- AI failures may cause the entire upload to fail
- Redis unavailability could crash the application or bypass rate limiting
- No circuit breakers prevent repeated calls to failing services
- No fallback behaviors are defined
This creates fragility where a single service outage can cascade into application-wide failures.
## Decision
We will implement a graceful degradation strategy with circuit breakers for external service dependencies.
### 1. Circuit Breaker Pattern
Implement circuit breakers for external service calls using a library like `opossum`:
```typescript
import CircuitBreaker from 'opossum';
const aiCircuitBreaker = new CircuitBreaker(callAiService, {
timeout: 30000, // 30 second timeout
errorThresholdPercentage: 50, // Open circuit at 50% failures
resetTimeout: 30000, // Try again after 30 seconds
volumeThreshold: 5, // Minimum calls before calculating error %
});
aiCircuitBreaker.on('open', () => {
logger.warn('AI service circuit breaker opened');
});
aiCircuitBreaker.on('halfOpen', () => {
logger.info('AI service circuit breaker half-open, testing...');
});
```
### 2. Fallback Behaviors by Service
| Service | Fallback Behavior |
| ---------------------- | ---------------------------------------- |
| **Redis (Cache)** | Skip cache, query database directly |
| **Redis (Rate Limit)** | Log warning, allow request (fail-open) |
| **Redis (Queues)** | Queue to memory, process synchronously |
| **AI Service** | Return partial results, queue for retry |
| **Geocoding** | Return null location, allow manual entry |
| **PostgreSQL** | No fallback - critical dependency |
### 3. Health Status Aggregation
Extend health checks (ADR-020) to report service-level health:
```typescript
// GET /api/health/ready response
{
"status": "degraded", // healthy | degraded | unhealthy
"services": {
"database": { "status": "healthy", "latency": 5 },
"redis": { "status": "healthy", "latency": 2 },
"ai": { "status": "degraded", "circuitState": "half-open" },
"geocoding": { "status": "healthy", "latency": 150 }
}
}
```
### 4. Retry Strategies
Define retry policies for transient failures:
```typescript
const retryConfig = {
ai: { maxRetries: 3, backoff: 'exponential', initialDelay: 1000 },
geocoding: { maxRetries: 2, backoff: 'linear', initialDelay: 500 },
database: { maxRetries: 3, backoff: 'exponential', initialDelay: 100 },
};
```
## Implementation Approach
### Phase 1: Redis Fallbacks
- Wrap cache operations with try-catch (already partially done in cacheService)
- Add fail-open for rate limiting when Redis is down
- Log degraded state
### Phase 2: AI Circuit Breaker
- Wrap AI service calls with circuit breaker
- Implement queue-for-retry on circuit open
- Add manual fallback UI for failed extractions
### Phase 3: Health Aggregation
- Update health endpoints with service status
- Add Prometheus-compatible metrics
- Create dashboard for service health
## Consequences
### Positive
- **Resilience**: Application continues functioning during partial outages
- **User Experience**: Degraded but functional is better than complete failure
- **Observability**: Clear visibility into service health
- **Protection**: Circuit breakers prevent cascading failures
### Negative
- **Complexity**: Additional code for fallback logic
- **Testing**: Requires testing failure scenarios
- **Consistency**: Some operations may have different results during degradation
## Implementation Status
### What's Implemented
- ✅ Cache operations fail gracefully (cacheService.server.ts)
- ❌ Circuit breakers for AI services
- ❌ Rate limit fail-open behavior
- ❌ Health aggregation endpoint
- ❌ Retry strategies with backoff
### What Needs To Be Done
1. Install and configure `opossum` circuit breaker library
2. Wrap AI service calls with circuit breaker
3. Add fail-open to rate limiting
4. Extend health endpoints with service status
5. Document degraded mode behaviors
## Key Files
- `src/utils/circuitBreaker.ts` - Circuit breaker configurations (to create)
- `src/services/cacheService.server.ts` - Already has graceful fallbacks
- `src/routes/health.routes.ts` - Health check endpoints (to extend)
- `src/services/aiService.server.ts` - AI service wrapper (to wrap)

View File

@@ -0,0 +1,199 @@
# ADR-031: Data Retention and Privacy Compliance (GDPR/CCPA)
**Date**: 2026-01-09
**Status**: Proposed
## Context
The application stores various types of user data:
1. **User Accounts**: Email, password hash, profile information
2. **Shopping Lists**: Personal shopping preferences and history
3. **Watch Lists**: Tracked items and price alerts
4. **Activity Logs**: User actions for analytics and debugging
5. **Tracking Data**: Page views, interactions, feature usage
Current gaps in privacy compliance:
- **No Data Retention Policies**: Activity logs accumulate indefinitely
- **No User Data Export**: Users cannot export their data (GDPR Article 20)
- **No User Data Deletion**: No self-service account deletion (GDPR Article 17)
- **No Cookie Consent**: Cookie usage not disclosed or consented
- **No Privacy Policy Enforcement**: Privacy commitments not enforced in code
These gaps create legal exposure for users in EU (GDPR) and California (CCPA).
## Decision
We will implement comprehensive data retention and privacy compliance features.
### 1. Data Retention Policies
| Data Type | Retention Period | Deletion Method |
| ------------------------- | ------------------------ | ------------------------ |
| **Activity Logs** | 90 days | Automated cleanup job |
| **Tracking Events** | 30 days | Automated cleanup job |
| **Deleted User Data** | 30 days (soft delete) | Hard delete after period |
| **Expired Sessions** | 7 days after expiry | Token cleanup job |
| **Failed Login Attempts** | 24 hours | Automated cleanup |
| **Flyer Data** | Indefinite (public data) | N/A |
| **User Shopping Lists** | Until account deletion | With account |
| **User Watch Lists** | Until account deletion | With account |
### 2. User Data Export (Right to Portability)
Implement `GET /api/users/me/export` endpoint:
```typescript
interface UserDataExport {
exportDate: string;
user: {
email: string;
created_at: string;
profile: ProfileData;
};
shoppingLists: ShoppingList[];
watchedItems: WatchedItem[];
priceAlerts: PriceAlert[];
achievements: Achievement[];
// Exclude: password hash, internal IDs, admin flags
}
```
Export formats: JSON (primary), CSV (optional)
### 3. User Data Deletion (Right to Erasure)
Implement `DELETE /api/users/me` endpoint:
1. **Soft Delete**: Mark account as deleted, anonymize PII
2. **Grace Period**: 30 days to restore account
3. **Hard Delete**: Permanently remove all user data after grace period
4. **Audit Log**: Record deletion request (anonymized)
Deletion cascade:
- User account → Anonymize email/name
- Shopping lists → Delete
- Watch lists → Delete
- Achievements → Delete
- Activity logs → Anonymize user_id
- Sessions/tokens → Delete immediately
### 4. Cookie Consent
Implement cookie consent banner:
```typescript
// Cookie categories
enum CookieCategory {
ESSENTIAL = 'essential', // Always allowed (auth, CSRF)
FUNCTIONAL = 'functional', // Dark mode, preferences
ANALYTICS = 'analytics', // Usage tracking
}
// Store consent in localStorage and server-side
interface CookieConsent {
essential: true; // Cannot be disabled
functional: boolean;
analytics: boolean;
consentDate: string;
consentVersion: string;
}
```
### 5. Privacy Policy Enforcement
Enforce privacy commitments in code:
- Email addresses never logged in plaintext
- Passwords never logged (already in pino redact config)
- IP addresses anonymized after 7 days
- Third-party data sharing requires explicit consent
## Implementation Approach
### Phase 1: Data Retention Jobs
- Create retention cleanup job in background job service
- Add activity_log retention (90 days)
- Add tracking_events retention (30 days)
### Phase 2: User Data Export
- Create export endpoint
- Implement data aggregation query
- Add rate limiting (1 export per 24h)
### Phase 3: Account Deletion
- Implement soft delete with anonymization
- Create hard delete cleanup job
- Add account recovery endpoint
### Phase 4: Cookie Consent
- Create consent banner component
- Store consent preferences
- Gate analytics based on consent
## Consequences
### Positive
- **Legal Compliance**: Meets GDPR and CCPA requirements
- **User Trust**: Demonstrates commitment to privacy
- **Data Hygiene**: Automatic cleanup prevents data bloat
- **Reduced Liability**: Less data = less risk
### Negative
- **Implementation Effort**: Significant feature development
- **Operational Complexity**: Deletion jobs need monitoring
- **Feature Limitations**: Some features may be limited without consent
## Implementation Status
### What's Implemented
- ✅ Token cleanup job exists (tokenCleanupQueue)
- ❌ Activity log retention
- ❌ User data export endpoint
- ❌ Account deletion endpoint
- ❌ Cookie consent banner
- ❌ Data anonymization functions
### What Needs To Be Done
1. Add activity_log cleanup to background jobs
2. Create `/api/users/me/export` endpoint
3. Create `/api/users/me` DELETE endpoint with soft delete
4. Implement cookie consent UI component
5. Document data retention in privacy policy
6. Add anonymization utility functions
## Key Files (To Be Created/Modified)
- `src/services/backgroundJobService.ts` - Add retention jobs
- `src/routes/user.routes.ts` - Add export/delete endpoints
- `src/services/privacyService.server.ts` - Data export/deletion logic
- `src/components/CookieConsent.tsx` - Consent banner
- `src/utils/anonymize.ts` - Data anonymization utilities
## Compliance Checklist
### GDPR Requirements
- [ ] Article 15: Right of Access (data export)
- [ ] Article 17: Right to Erasure (account deletion)
- [ ] Article 20: Right to Data Portability (JSON export)
- [ ] Article 7: Conditions for Consent (cookie consent)
- [ ] Article 13: Information to be Provided (privacy policy)
### CCPA Requirements
- [ ] Right to Know (data export)
- [ ] Right to Delete (account deletion)
- [ ] Right to Opt-Out (cookie consent for analytics)
- [ ] Non-Discrimination (no feature penalty for privacy choices)

View File

@@ -4,49 +4,55 @@ This directory contains a log of the architectural decisions made for the Flyer
## 1. Foundational / Core Infrastructure ## 1. Foundational / Core Infrastructure
**[ADR-002](./0002-standardized-transaction-management.md)**: Standardized Transaction Management and Unit of Work Pattern (Proposed) **[ADR-002](./0002-standardized-transaction-management.md)**: Standardized Transaction Management and Unit of Work Pattern (Accepted)
**[ADR-007](./0007-configuration-and-secrets-management.md)**: Configuration and Secrets Management (Proposed) **[ADR-007](./0007-configuration-and-secrets-management.md)**: Configuration and Secrets Management (Accepted)
**[ADR-020](./0020-health-checks-and-liveness-readiness-probes.md)**: Health Checks and Liveness/Readiness Probes (Proposed) **[ADR-020](./0020-health-checks-and-liveness-readiness-probes.md)**: Health Checks and Liveness/Readiness Probes (Accepted)
**[ADR-030](./0030-graceful-degradation-and-circuit-breaker.md)**: Graceful Degradation and Circuit Breaker Pattern (Proposed)
## 2. Data Management ## 2. Data Management
**[ADR-009](./0009-caching-strategy-for-read-heavy-operations.md)**: Caching Strategy for Read-Heavy Operations (Proposed) **[ADR-009](./0009-caching-strategy-for-read-heavy-operations.md)**: Caching Strategy for Read-Heavy Operations (Partially Implemented)
**[ADR-013](./0013-database-schema-migration-strategy.md)**: Database Schema Migration Strategy (Proposed) **[ADR-013](./0013-database-schema-migration-strategy.md)**: Database Schema Migration Strategy (Proposed)
**[ADR-019](./0019-data-backup-and-recovery-strategy.md)**: Data Backup and Recovery Strategy (Proposed) **[ADR-019](./0019-data-backup-and-recovery-strategy.md)**: Data Backup and Recovery Strategy (Proposed)
**[ADR-023](./0023-database-schema-migration-strategy.md)**: Database Schema Migration Strategy (Proposed) **[ADR-023](./0023-database-schema-migration-strategy.md)**: Database Schema Migration Strategy (Proposed)
**[ADR-031](./0031-data-retention-and-privacy-compliance.md)**: Data Retention and Privacy Compliance (Proposed)
## 3. API & Integration ## 3. API & Integration
**[ADR-003](./0003-standardized-input-validation-using-middleware.md)**: Standardized Input Validation using Middleware (Proposed) **[ADR-003](./0003-standardized-input-validation-using-middleware.md)**: Standardized Input Validation using Middleware (Accepted)
**[ADR-008](./0008-api-versioning-strategy.md)**: API Versioning Strategy (Proposed) **[ADR-008](./0008-api-versioning-strategy.md)**: API Versioning Strategy (Proposed)
**[ADR-018](./0018-api-documentation-strategy.md)**: API Documentation Strategy (Proposed) **[ADR-018](./0018-api-documentation-strategy.md)**: API Documentation Strategy (Proposed)
**[ADR-022](./0022-real-time-notification-system.md)**: Real-time Notification System (Proposed) **[ADR-022](./0022-real-time-notification-system.md)**: Real-time Notification System (Proposed)
**[ADR-028](./0028-api-response-standardization.md)**: API Response Standardization and Envelope Pattern (Proposed)
## 4. Security & Compliance ## 4. Security & Compliance
**[ADR-001](./0001-standardized-error-handling.md)**: Standardized Error Handling for Service and Repository Layers (Accepted) **[ADR-001](./0001-standardized-error-handling.md)**: Standardized Error Handling for Service and Repository Layers (Accepted)
**[ADR-011](./0011-advanced-authorization-and-access-control-strategy.md)**: Advanced Authorization and Access Control Strategy (Proposed) **[ADR-011](./0011-advanced-authorization-and-access-control-strategy.md)**: Advanced Authorization and Access Control Strategy (Proposed)
**[ADR-016](./0016-api-security-hardening.md)**: API Security Hardening (Proposed) **[ADR-016](./0016-api-security-hardening.md)**: API Security Hardening (Accepted)
**[ADR-029](./0029-secret-rotation-and-key-management.md)**: Secret Rotation and Key Management Strategy (Proposed)
## 5. Observability & Monitoring ## 5. Observability & Monitoring
**[ADR-004](./0004-standardized-application-wide-structured-logging.md)**: Standardized Application-Wide Structured Logging (Proposed) **[ADR-004](./0004-standardized-application-wide-structured-logging.md)**: Standardized Application-Wide Structured Logging (Accepted)
**[ADR-015](./0015-application-performance-monitoring-and-error-tracking.md)**: Application Performance Monitoring (APM) and Error Tracking (Proposed) **[ADR-015](./0015-application-performance-monitoring-and-error-tracking.md)**: Application Performance Monitoring (APM) and Error Tracking (Proposed)
## 6. Deployment & Operations ## 6. Deployment & Operations
**[ADR-006](./0006-background-job-processing-and-task-queues.md)**: Background Job Processing and Task Queues (Proposed) **[ADR-006](./0006-background-job-processing-and-task-queues.md)**: Background Job Processing and Task Queues (Partially Implemented)
**[ADR-014](./0014-containerization-and-deployment-strategy.md)**: Containerization and Deployment Strategy (Proposed) **[ADR-014](./0014-containerization-and-deployment-strategy.md)**: Containerization and Deployment Strategy (Proposed)
**[ADR-017](./0017-ci-cd-and-branching-strategy.md)**: CI/CD and Branching Strategy (Proposed) **[ADR-017](./0017-ci-cd-and-branching-strategy.md)**: CI/CD and Branching Strategy (Proposed)
**[ADR-024](./0024-feature-flagging-strategy.md)**: Feature Flagging Strategy (Proposed) **[ADR-024](./0024-feature-flagging-strategy.md)**: Feature Flagging Strategy (Proposed)
## 7. Frontend / User Interface ## 7. Frontend / User Interface
**[ADR-005](./0005-frontend-state-management-and-server-cache-strategy.md)**: Frontend State Management and Server Cache Strategy (Proposed) **[ADR-005](./0005-frontend-state-management-and-server-cache-strategy.md)**: Frontend State Management and Server Cache Strategy (Accepted)
**[ADR-012](./0012-frontend-component-library-and-design-system.md)**: Frontend Component Library and Design System (Proposed) **[ADR-012](./0012-frontend-component-library-and-design-system.md)**: Frontend Component Library and Design System (Partially Implemented)
**[ADR-025](./0025-internationalization-and-localization-strategy.md)**: Internationalization (i18n) and Localization (l10n) Strategy (Proposed) **[ADR-025](./0025-internationalization-and-localization-strategy.md)**: Internationalization (i18n) and Localization (l10n) Strategy (Proposed)
**[ADR-026](./0026-standardized-client-side-structured-logging.md)**: Standardized Client-Side Structured Logging (Proposed)
## 8. Development Workflow & Quality ## 8. Development Workflow & Quality
**[ADR-010](./0010-testing-strategy-and-standards.md)**: Testing Strategy and Standards (Proposed) **[ADR-010](./0010-testing-strategy-and-standards.md)**: Testing Strategy and Standards (Accepted)
**[ADR-021](./0021-code-formatting-and-linting-unification.md)**: Code Formatting and Linting Unification (Proposed) **[ADR-021](./0021-code-formatting-and-linting-unification.md)**: Code Formatting and Linting Unification (Accepted)
**[ADR-027](./0027-standardized-naming-convention-for-ai-and-database-types.md)**: Standardized Naming Convention for AI and Database Types (Accepted)

View File

@@ -3,6 +3,7 @@ import tseslint from 'typescript-eslint';
import pluginReact from 'eslint-plugin-react'; import pluginReact from 'eslint-plugin-react';
import pluginReactHooks from 'eslint-plugin-react-hooks'; import pluginReactHooks from 'eslint-plugin-react-hooks';
import pluginReactRefresh from 'eslint-plugin-react-refresh'; import pluginReactRefresh from 'eslint-plugin-react-refresh';
import eslintConfigPrettier from 'eslint-config-prettier';
export default tseslint.config( export default tseslint.config(
{ {
@@ -29,4 +30,6 @@ export default tseslint.config(
}, },
// TypeScript files // TypeScript files
...tseslint.configs.recommended, ...tseslint.configs.recommended,
// Prettier compatibility - must be last to override other formatting rules
eslintConfigPrettier,
); );

593
package-lock.json generated
View File

@@ -93,8 +93,10 @@
"eslint-plugin-react-refresh": "^0.4.24", "eslint-plugin-react-refresh": "^0.4.24",
"glob": "^13.0.0", "glob": "^13.0.0",
"globals": "16.5.0", "globals": "16.5.0",
"husky": "^9.1.7",
"istanbul-reports": "^3.2.0", "istanbul-reports": "^3.2.0",
"jsdom": "^27.2.0", "jsdom": "^27.2.0",
"lint-staged": "^16.2.7",
"msw": "^2.12.3", "msw": "^2.12.3",
"nyc": "^17.1.0", "nyc": "^17.1.0",
"pino-pretty": "^13.1.3", "pino-pretty": "^13.1.3",
@@ -6145,6 +6147,22 @@
"url": "https://github.com/sponsors/epoberezkin" "url": "https://github.com/sponsors/epoberezkin"
} }
}, },
"node_modules/ansi-escapes": {
"version": "7.2.0",
"resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-7.2.0.tgz",
"integrity": "sha512-g6LhBsl+GBPRWGWsBtutpzBYuIIdBkLEvad5C/va/74Db018+5TZiyA26cZJAr3Rft5lprVqOIPxf5Vid6tqAw==",
"dev": true,
"license": "MIT",
"dependencies": {
"environment": "^1.0.0"
},
"engines": {
"node": ">=18"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/ansi-regex": { "node_modules/ansi-regex": {
"version": "5.0.1", "version": "5.0.1",
"resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz",
@@ -6953,6 +6971,19 @@
"balanced-match": "^1.0.0" "balanced-match": "^1.0.0"
} }
}, },
"node_modules/braces": {
"version": "3.0.3",
"resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz",
"integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==",
"dev": true,
"license": "MIT",
"dependencies": {
"fill-range": "^7.1.1"
},
"engines": {
"node": ">=8"
}
},
"node_modules/browserslist": { "node_modules/browserslist": {
"version": "4.28.1", "version": "4.28.1",
"resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.28.1.tgz", "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.28.1.tgz",
@@ -7287,6 +7318,85 @@
"node": ">=6" "node": ">=6"
} }
}, },
"node_modules/cli-cursor": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-5.0.0.tgz",
"integrity": "sha512-aCj4O5wKyszjMmDT4tZj93kxyydN/K5zPWSCe6/0AV/AA1pqe5ZBIw0a2ZfPQV7lL5/yb5HsUreJ6UFAF1tEQw==",
"dev": true,
"license": "MIT",
"dependencies": {
"restore-cursor": "^5.0.0"
},
"engines": {
"node": ">=18"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/cli-truncate": {
"version": "5.1.1",
"resolved": "https://registry.npmjs.org/cli-truncate/-/cli-truncate-5.1.1.tgz",
"integrity": "sha512-SroPvNHxUnk+vIW/dOSfNqdy1sPEFkrTk6TUtqLCnBlo3N7TNYYkzzN7uSD6+jVjrdO4+p8nH7JzH6cIvUem6A==",
"dev": true,
"license": "MIT",
"dependencies": {
"slice-ansi": "^7.1.0",
"string-width": "^8.0.0"
},
"engines": {
"node": ">=20"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/cli-truncate/node_modules/ansi-regex": {
"version": "6.2.2",
"resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.2.2.tgz",
"integrity": "sha512-Bq3SmSpyFHaWjPk8If9yc6svM8c56dB5BAtW4Qbw5jHTwwXXcTLoRMkpDJp6VL0XzlWaCHTXrkFURMYmD0sLqg==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=12"
},
"funding": {
"url": "https://github.com/chalk/ansi-regex?sponsor=1"
}
},
"node_modules/cli-truncate/node_modules/string-width": {
"version": "8.1.0",
"resolved": "https://registry.npmjs.org/string-width/-/string-width-8.1.0.tgz",
"integrity": "sha512-Kxl3KJGb/gxkaUMOjRsQ8IrXiGW75O4E3RPjFIINOVH8AMl2SQ/yWdTzWwF3FevIX9LcMAjJW+GRwAlAbTSXdg==",
"dev": true,
"license": "MIT",
"dependencies": {
"get-east-asian-width": "^1.3.0",
"strip-ansi": "^7.1.0"
},
"engines": {
"node": ">=20"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/cli-truncate/node_modules/strip-ansi": {
"version": "7.1.2",
"resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.2.tgz",
"integrity": "sha512-gmBGslpoQJtgnMAvOVqGZpEz9dyoKTCzy2nfz/n8aIFhN/jCE/rCmcxabB6jOOHV+0WNnylOxaxBQPSvcWklhA==",
"dev": true,
"license": "MIT",
"dependencies": {
"ansi-regex": "^6.0.1"
},
"engines": {
"node": ">=12"
},
"funding": {
"url": "https://github.com/chalk/strip-ansi?sponsor=1"
}
},
"node_modules/cli-width": { "node_modules/cli-width": {
"version": "4.1.0", "version": "4.1.0",
"resolved": "https://registry.npmjs.org/cli-width/-/cli-width-4.1.0.tgz", "resolved": "https://registry.npmjs.org/cli-width/-/cli-width-4.1.0.tgz",
@@ -7395,6 +7505,16 @@
"node": ">= 0.8" "node": ">= 0.8"
} }
}, },
"node_modules/commander": {
"version": "14.0.2",
"resolved": "https://registry.npmjs.org/commander/-/commander-14.0.2.tgz",
"integrity": "sha512-TywoWNNRbhoD0BXs1P3ZEScW8W5iKrnbithIl0YH+uCmBd0QpPOA8yc82DS3BIE5Ma6FnBVUsJ7wVUDz4dvOWQ==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=20"
}
},
"node_modules/commondir": { "node_modules/commondir": {
"version": "1.0.1", "version": "1.0.1",
"resolved": "https://registry.npmjs.org/commondir/-/commondir-1.0.1.tgz", "resolved": "https://registry.npmjs.org/commondir/-/commondir-1.0.1.tgz",
@@ -8345,6 +8465,19 @@
"url": "https://github.com/fb55/entities?sponsor=1" "url": "https://github.com/fb55/entities?sponsor=1"
} }
}, },
"node_modules/environment": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/environment/-/environment-1.1.0.tgz",
"integrity": "sha512-xUtoPkMggbz0MPyPiIWr1Kp4aeWJjDZ6SMvURhimjdZgsRuDplF5/s9hcgGhyXMhs+6vpnuoiZ2kFiu3FMnS8Q==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=18"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/es-abstract": { "node_modules/es-abstract": {
"version": "1.24.1", "version": "1.24.1",
"resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.24.1.tgz", "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.24.1.tgz",
@@ -9293,6 +9426,19 @@
"node": ">=10" "node": ">=10"
} }
}, },
"node_modules/fill-range": {
"version": "7.1.1",
"resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz",
"integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==",
"dev": true,
"license": "MIT",
"dependencies": {
"to-regex-range": "^5.0.1"
},
"engines": {
"node": ">=8"
}
},
"node_modules/finalhandler": { "node_modules/finalhandler": {
"version": "2.1.1", "version": "2.1.1",
"resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-2.1.1.tgz", "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-2.1.1.tgz",
@@ -9817,6 +9963,19 @@
"node": "6.* || 8.* || >= 10.*" "node": "6.* || 8.* || >= 10.*"
} }
}, },
"node_modules/get-east-asian-width": {
"version": "1.4.0",
"resolved": "https://registry.npmjs.org/get-east-asian-width/-/get-east-asian-width-1.4.0.tgz",
"integrity": "sha512-QZjmEOC+IT1uk6Rx0sX22V6uHWVwbdbxf1faPqJ1QhLdGgsRGCZoyaQBm/piRdJy/D2um6hM1UP7ZEeQ4EkP+Q==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=18"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/get-intrinsic": { "node_modules/get-intrinsic": {
"version": "1.3.0", "version": "1.3.0",
"resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.3.0.tgz", "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.3.0.tgz",
@@ -10317,6 +10476,22 @@
"node": ">= 6" "node": ">= 6"
} }
}, },
"node_modules/husky": {
"version": "9.1.7",
"resolved": "https://registry.npmjs.org/husky/-/husky-9.1.7.tgz",
"integrity": "sha512-5gs5ytaNjBrh5Ow3zrvdUUY+0VxIuWVL4i9irt6friV+BqdCfmV11CQTWMiBYWHbXhco+J1kHfTOUkePhCDvMA==",
"dev": true,
"license": "MIT",
"bin": {
"husky": "bin.js"
},
"engines": {
"node": ">=18"
},
"funding": {
"url": "https://github.com/sponsors/typicode"
}
},
"node_modules/iconv-lite": { "node_modules/iconv-lite": {
"version": "0.7.1", "version": "0.7.1",
"resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.7.1.tgz", "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.7.1.tgz",
@@ -10730,6 +10905,16 @@
"dev": true, "dev": true,
"license": "MIT" "license": "MIT"
}, },
"node_modules/is-number": {
"version": "7.0.0",
"resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz",
"integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=0.12.0"
}
},
"node_modules/is-number-object": { "node_modules/is-number-object": {
"version": "1.1.1", "version": "1.1.1",
"resolved": "https://registry.npmjs.org/is-number-object/-/is-number-object-1.1.1.tgz", "resolved": "https://registry.npmjs.org/is-number-object/-/is-number-object-1.1.1.tgz",
@@ -11765,6 +11950,134 @@
"url": "https://opencollective.com/parcel" "url": "https://opencollective.com/parcel"
} }
}, },
"node_modules/lint-staged": {
"version": "16.2.7",
"resolved": "https://registry.npmjs.org/lint-staged/-/lint-staged-16.2.7.tgz",
"integrity": "sha512-lDIj4RnYmK7/kXMya+qJsmkRFkGolciXjrsZ6PC25GdTfWOAWetR0ZbsNXRAj1EHHImRSalc+whZFg56F5DVow==",
"dev": true,
"license": "MIT",
"dependencies": {
"commander": "^14.0.2",
"listr2": "^9.0.5",
"micromatch": "^4.0.8",
"nano-spawn": "^2.0.0",
"pidtree": "^0.6.0",
"string-argv": "^0.3.2",
"yaml": "^2.8.1"
},
"bin": {
"lint-staged": "bin/lint-staged.js"
},
"engines": {
"node": ">=20.17"
},
"funding": {
"url": "https://opencollective.com/lint-staged"
}
},
"node_modules/listr2": {
"version": "9.0.5",
"resolved": "https://registry.npmjs.org/listr2/-/listr2-9.0.5.tgz",
"integrity": "sha512-ME4Fb83LgEgwNw96RKNvKV4VTLuXfoKudAmm2lP8Kk87KaMK0/Xrx/aAkMWmT8mDb+3MlFDspfbCs7adjRxA2g==",
"dev": true,
"license": "MIT",
"dependencies": {
"cli-truncate": "^5.0.0",
"colorette": "^2.0.20",
"eventemitter3": "^5.0.1",
"log-update": "^6.1.0",
"rfdc": "^1.4.1",
"wrap-ansi": "^9.0.0"
},
"engines": {
"node": ">=20.0.0"
}
},
"node_modules/listr2/node_modules/ansi-regex": {
"version": "6.2.2",
"resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.2.2.tgz",
"integrity": "sha512-Bq3SmSpyFHaWjPk8If9yc6svM8c56dB5BAtW4Qbw5jHTwwXXcTLoRMkpDJp6VL0XzlWaCHTXrkFURMYmD0sLqg==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=12"
},
"funding": {
"url": "https://github.com/chalk/ansi-regex?sponsor=1"
}
},
"node_modules/listr2/node_modules/ansi-styles": {
"version": "6.2.3",
"resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.3.tgz",
"integrity": "sha512-4Dj6M28JB+oAH8kFkTLUo+a2jwOFkuqb3yucU0CANcRRUbxS0cP0nZYCGjcc3BNXwRIsUVmDGgzawme7zvJHvg==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=12"
},
"funding": {
"url": "https://github.com/chalk/ansi-styles?sponsor=1"
}
},
"node_modules/listr2/node_modules/emoji-regex": {
"version": "10.6.0",
"resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-10.6.0.tgz",
"integrity": "sha512-toUI84YS5YmxW219erniWD0CIVOo46xGKColeNQRgOzDorgBi1v4D71/OFzgD9GO2UGKIv1C3Sp8DAn0+j5w7A==",
"dev": true,
"license": "MIT"
},
"node_modules/listr2/node_modules/string-width": {
"version": "7.2.0",
"resolved": "https://registry.npmjs.org/string-width/-/string-width-7.2.0.tgz",
"integrity": "sha512-tsaTIkKW9b4N+AEj+SVA+WhJzV7/zMhcSu78mLKWSk7cXMOSHsBKFWUs0fWwq8QyK3MgJBQRX6Gbi4kYbdvGkQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"emoji-regex": "^10.3.0",
"get-east-asian-width": "^1.0.0",
"strip-ansi": "^7.1.0"
},
"engines": {
"node": ">=18"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/listr2/node_modules/strip-ansi": {
"version": "7.1.2",
"resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.2.tgz",
"integrity": "sha512-gmBGslpoQJtgnMAvOVqGZpEz9dyoKTCzy2nfz/n8aIFhN/jCE/rCmcxabB6jOOHV+0WNnylOxaxBQPSvcWklhA==",
"dev": true,
"license": "MIT",
"dependencies": {
"ansi-regex": "^6.0.1"
},
"engines": {
"node": ">=12"
},
"funding": {
"url": "https://github.com/chalk/strip-ansi?sponsor=1"
}
},
"node_modules/listr2/node_modules/wrap-ansi": {
"version": "9.0.2",
"resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-9.0.2.tgz",
"integrity": "sha512-42AtmgqjV+X1VpdOfyTGOYRi0/zsoLqtXQckTmqTeybT+BDIbM/Guxo7x3pE2vtpr1ok6xRqM9OpBe+Jyoqyww==",
"dev": true,
"license": "MIT",
"dependencies": {
"ansi-styles": "^6.2.1",
"string-width": "^7.0.0",
"strip-ansi": "^7.1.0"
},
"engines": {
"node": ">=18"
},
"funding": {
"url": "https://github.com/chalk/wrap-ansi?sponsor=1"
}
},
"node_modules/locate-path": { "node_modules/locate-path": {
"version": "6.0.0", "version": "6.0.0",
"resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz",
@@ -11862,6 +12175,111 @@
"integrity": "sha512-Sb487aTOCr9drQVL8pIxOzVhafOjZN9UU54hiN8PU3uAiSV7lx1yYNpbNmex2PK6dSJoNTSJUUswT651yww3Mg==", "integrity": "sha512-Sb487aTOCr9drQVL8pIxOzVhafOjZN9UU54hiN8PU3uAiSV7lx1yYNpbNmex2PK6dSJoNTSJUUswT651yww3Mg==",
"license": "MIT" "license": "MIT"
}, },
"node_modules/log-update": {
"version": "6.1.0",
"resolved": "https://registry.npmjs.org/log-update/-/log-update-6.1.0.tgz",
"integrity": "sha512-9ie8ItPR6tjY5uYJh8K/Zrv/RMZ5VOlOWvtZdEHYSTFKZfIBPQa9tOAEeAWhd+AnIneLJ22w5fjOYtoutpWq5w==",
"dev": true,
"license": "MIT",
"dependencies": {
"ansi-escapes": "^7.0.0",
"cli-cursor": "^5.0.0",
"slice-ansi": "^7.1.0",
"strip-ansi": "^7.1.0",
"wrap-ansi": "^9.0.0"
},
"engines": {
"node": ">=18"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/log-update/node_modules/ansi-regex": {
"version": "6.2.2",
"resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.2.2.tgz",
"integrity": "sha512-Bq3SmSpyFHaWjPk8If9yc6svM8c56dB5BAtW4Qbw5jHTwwXXcTLoRMkpDJp6VL0XzlWaCHTXrkFURMYmD0sLqg==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=12"
},
"funding": {
"url": "https://github.com/chalk/ansi-regex?sponsor=1"
}
},
"node_modules/log-update/node_modules/ansi-styles": {
"version": "6.2.3",
"resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.3.tgz",
"integrity": "sha512-4Dj6M28JB+oAH8kFkTLUo+a2jwOFkuqb3yucU0CANcRRUbxS0cP0nZYCGjcc3BNXwRIsUVmDGgzawme7zvJHvg==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=12"
},
"funding": {
"url": "https://github.com/chalk/ansi-styles?sponsor=1"
}
},
"node_modules/log-update/node_modules/emoji-regex": {
"version": "10.6.0",
"resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-10.6.0.tgz",
"integrity": "sha512-toUI84YS5YmxW219erniWD0CIVOo46xGKColeNQRgOzDorgBi1v4D71/OFzgD9GO2UGKIv1C3Sp8DAn0+j5w7A==",
"dev": true,
"license": "MIT"
},
"node_modules/log-update/node_modules/string-width": {
"version": "7.2.0",
"resolved": "https://registry.npmjs.org/string-width/-/string-width-7.2.0.tgz",
"integrity": "sha512-tsaTIkKW9b4N+AEj+SVA+WhJzV7/zMhcSu78mLKWSk7cXMOSHsBKFWUs0fWwq8QyK3MgJBQRX6Gbi4kYbdvGkQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"emoji-regex": "^10.3.0",
"get-east-asian-width": "^1.0.0",
"strip-ansi": "^7.1.0"
},
"engines": {
"node": ">=18"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/log-update/node_modules/strip-ansi": {
"version": "7.1.2",
"resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.2.tgz",
"integrity": "sha512-gmBGslpoQJtgnMAvOVqGZpEz9dyoKTCzy2nfz/n8aIFhN/jCE/rCmcxabB6jOOHV+0WNnylOxaxBQPSvcWklhA==",
"dev": true,
"license": "MIT",
"dependencies": {
"ansi-regex": "^6.0.1"
},
"engines": {
"node": ">=12"
},
"funding": {
"url": "https://github.com/chalk/strip-ansi?sponsor=1"
}
},
"node_modules/log-update/node_modules/wrap-ansi": {
"version": "9.0.2",
"resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-9.0.2.tgz",
"integrity": "sha512-42AtmgqjV+X1VpdOfyTGOYRi0/zsoLqtXQckTmqTeybT+BDIbM/Guxo7x3pE2vtpr1ok6xRqM9OpBe+Jyoqyww==",
"dev": true,
"license": "MIT",
"dependencies": {
"ansi-styles": "^6.2.1",
"string-width": "^7.0.0",
"strip-ansi": "^7.1.0"
},
"engines": {
"node": ">=18"
},
"funding": {
"url": "https://github.com/chalk/wrap-ansi?sponsor=1"
}
},
"node_modules/long": { "node_modules/long": {
"version": "5.3.2", "version": "5.3.2",
"resolved": "https://registry.npmjs.org/long/-/long-5.3.2.tgz", "resolved": "https://registry.npmjs.org/long/-/long-5.3.2.tgz",
@@ -12014,6 +12432,33 @@
"node": ">= 0.6" "node": ">= 0.6"
} }
}, },
"node_modules/micromatch": {
"version": "4.0.8",
"resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz",
"integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==",
"dev": true,
"license": "MIT",
"dependencies": {
"braces": "^3.0.3",
"picomatch": "^2.3.1"
},
"engines": {
"node": ">=8.6"
}
},
"node_modules/micromatch/node_modules/picomatch": {
"version": "2.3.1",
"resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz",
"integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=8.6"
},
"funding": {
"url": "https://github.com/sponsors/jonschlinkert"
}
},
"node_modules/mime": { "node_modules/mime": {
"version": "2.6.0", "version": "2.6.0",
"resolved": "https://registry.npmjs.org/mime/-/mime-2.6.0.tgz", "resolved": "https://registry.npmjs.org/mime/-/mime-2.6.0.tgz",
@@ -12052,6 +12497,19 @@
"url": "https://opencollective.com/express" "url": "https://opencollective.com/express"
} }
}, },
"node_modules/mimic-function": {
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/mimic-function/-/mimic-function-5.0.1.tgz",
"integrity": "sha512-VP79XUPxV2CigYP3jWwAUFSku2aKqBH7uTAapFWCBqutsbmDo96KY5o8uh6U+/YSIn5OxJnXp73beVkpqMIGhA==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=18"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/min-indent": { "node_modules/min-indent": {
"version": "1.0.1", "version": "1.0.1",
"resolved": "https://registry.npmjs.org/min-indent/-/min-indent-1.0.1.tgz", "resolved": "https://registry.npmjs.org/min-indent/-/min-indent-1.0.1.tgz",
@@ -12330,6 +12788,19 @@
"license": "MIT", "license": "MIT",
"optional": true "optional": true
}, },
"node_modules/nano-spawn": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/nano-spawn/-/nano-spawn-2.0.0.tgz",
"integrity": "sha512-tacvGzUY5o2D8CBh2rrwxyNojUsZNU2zjNTzKQrkgGJQTbGAfArVWXSKMBokBeeg6C7OLRGUEyoFlYbfeWQIqw==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=20.17"
},
"funding": {
"url": "https://github.com/sindresorhus/nano-spawn?sponsor=1"
}
},
"node_modules/nanoid": { "node_modules/nanoid": {
"version": "3.3.11", "version": "3.3.11",
"resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.11.tgz", "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.11.tgz",
@@ -12963,6 +13434,22 @@
"wrappy": "1" "wrappy": "1"
} }
}, },
"node_modules/onetime": {
"version": "7.0.0",
"resolved": "https://registry.npmjs.org/onetime/-/onetime-7.0.0.tgz",
"integrity": "sha512-VXJjc87FScF88uafS3JllDgvAm+c/Slfz06lorj2uAY34rlUu0Nt+v8wreiImcrgAjjIHp1rXpTDlLOGw29WwQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"mimic-function": "^5.0.0"
},
"engines": {
"node": ">=18"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/optionator": { "node_modules/optionator": {
"version": "0.9.4", "version": "0.9.4",
"resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.4.tgz", "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.4.tgz",
@@ -13418,6 +13905,19 @@
"url": "https://github.com/sponsors/jonschlinkert" "url": "https://github.com/sponsors/jonschlinkert"
} }
}, },
"node_modules/pidtree": {
"version": "0.6.0",
"resolved": "https://registry.npmjs.org/pidtree/-/pidtree-0.6.0.tgz",
"integrity": "sha512-eG2dWTVw5bzqGRztnHExczNxt5VGsE6OwTeCG3fdUf9KBsZzO3R5OIIIzWR+iZA0NtZ+RDVdaoE2dK1cn6jH4g==",
"dev": true,
"license": "MIT",
"bin": {
"pidtree": "bin/pidtree.js"
},
"engines": {
"node": ">=0.10"
}
},
"node_modules/piexifjs": { "node_modules/piexifjs": {
"version": "1.0.6", "version": "1.0.6",
"resolved": "https://registry.npmjs.org/piexifjs/-/piexifjs-1.0.6.tgz", "resolved": "https://registry.npmjs.org/piexifjs/-/piexifjs-1.0.6.tgz",
@@ -14368,6 +14868,23 @@
"url": "https://github.com/privatenumber/resolve-pkg-maps?sponsor=1" "url": "https://github.com/privatenumber/resolve-pkg-maps?sponsor=1"
} }
}, },
"node_modules/restore-cursor": {
"version": "5.1.0",
"resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-5.1.0.tgz",
"integrity": "sha512-oMA2dcrw6u0YfxJQXm342bFKX/E4sG9rbTzO9ptUcR/e8A33cHuvStiYOwH7fszkZlZ1z/ta9AAoPk2F4qIOHA==",
"dev": true,
"license": "MIT",
"dependencies": {
"onetime": "^7.0.0",
"signal-exit": "^4.1.0"
},
"engines": {
"node": ">=18"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/retry": { "node_modules/retry": {
"version": "0.12.0", "version": "0.12.0",
"resolved": "https://registry.npmjs.org/retry/-/retry-0.12.0.tgz", "resolved": "https://registry.npmjs.org/retry/-/retry-0.12.0.tgz",
@@ -14385,6 +14902,13 @@
"dev": true, "dev": true,
"license": "MIT" "license": "MIT"
}, },
"node_modules/rfdc": {
"version": "1.4.1",
"resolved": "https://registry.npmjs.org/rfdc/-/rfdc-1.4.1.tgz",
"integrity": "sha512-q1b3N5QkRUWUl7iyylaaj3kOpIT0N2i9MqIEQXP73GVsN9cw3fdx8X63cEmWhJGi2PPCF23Ijp7ktmd39rawIA==",
"dev": true,
"license": "MIT"
},
"node_modules/rimraf": { "node_modules/rimraf": {
"version": "6.1.2", "version": "6.1.2",
"resolved": "https://registry.npmjs.org/rimraf/-/rimraf-6.1.2.tgz", "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-6.1.2.tgz",
@@ -14967,6 +15491,52 @@
"node": ">=18" "node": ">=18"
} }
}, },
"node_modules/slice-ansi": {
"version": "7.1.2",
"resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-7.1.2.tgz",
"integrity": "sha512-iOBWFgUX7caIZiuutICxVgX1SdxwAVFFKwt1EvMYYec/NWO5meOJ6K5uQxhrYBdQJne4KxiqZc+KptFOWFSI9w==",
"dev": true,
"license": "MIT",
"dependencies": {
"ansi-styles": "^6.2.1",
"is-fullwidth-code-point": "^5.0.0"
},
"engines": {
"node": ">=18"
},
"funding": {
"url": "https://github.com/chalk/slice-ansi?sponsor=1"
}
},
"node_modules/slice-ansi/node_modules/ansi-styles": {
"version": "6.2.3",
"resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.3.tgz",
"integrity": "sha512-4Dj6M28JB+oAH8kFkTLUo+a2jwOFkuqb3yucU0CANcRRUbxS0cP0nZYCGjcc3BNXwRIsUVmDGgzawme7zvJHvg==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=12"
},
"funding": {
"url": "https://github.com/chalk/ansi-styles?sponsor=1"
}
},
"node_modules/slice-ansi/node_modules/is-fullwidth-code-point": {
"version": "5.1.0",
"resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-5.1.0.tgz",
"integrity": "sha512-5XHYaSyiqADb4RnZ1Bdad6cPp8Toise4TzEjcOYDHZkTCbKgiUl7WTUCpNWHuxmDt91wnsZBc9xinNzopv3JMQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"get-east-asian-width": "^1.3.1"
},
"engines": {
"node": ">=18"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/sonic-boom": { "node_modules/sonic-boom": {
"version": "4.2.0", "version": "4.2.0",
"resolved": "https://registry.npmjs.org/sonic-boom/-/sonic-boom-4.2.0.tgz", "resolved": "https://registry.npmjs.org/sonic-boom/-/sonic-boom-4.2.0.tgz",
@@ -15240,6 +15810,16 @@
"safe-buffer": "~5.2.0" "safe-buffer": "~5.2.0"
} }
}, },
"node_modules/string-argv": {
"version": "0.3.2",
"resolved": "https://registry.npmjs.org/string-argv/-/string-argv-0.3.2.tgz",
"integrity": "sha512-aqD2Q0144Z+/RqG52NeHEkZauTAUWJO8c6yTftGJKO3Tja5tUgIfmIl6kExvhtxSDP7fXB6DvzkfMpCd/F3G+Q==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=0.6.19"
}
},
"node_modules/string-width": { "node_modules/string-width": {
"version": "4.2.3", "version": "4.2.3",
"resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz",
@@ -15800,6 +16380,19 @@
"node": ">=14.14" "node": ">=14.14"
} }
}, },
"node_modules/to-regex-range": {
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz",
"integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"is-number": "^7.0.0"
},
"engines": {
"node": ">=8.0"
}
},
"node_modules/toidentifier": { "node_modules/toidentifier": {
"version": "1.0.1", "version": "1.0.1",
"resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.1.tgz", "resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.1.tgz",

View File

@@ -24,7 +24,8 @@
"start:test": "NODE_ENV=test NODE_V8_COVERAGE=.coverage/tmp/integration-server tsx server.ts", "start:test": "NODE_ENV=test NODE_V8_COVERAGE=.coverage/tmp/integration-server tsx server.ts",
"db:reset:dev": "NODE_ENV=development tsx src/db/seed.ts", "db:reset:dev": "NODE_ENV=development tsx src/db/seed.ts",
"db:reset:test": "NODE_ENV=test tsx src/db/seed.ts", "db:reset:test": "NODE_ENV=test tsx src/db/seed.ts",
"worker:prod": "NODE_ENV=production tsx src/services/queueService.server.ts" "worker:prod": "NODE_ENV=production tsx src/services/queueService.server.ts",
"prepare": "husky"
}, },
"dependencies": { "dependencies": {
"@bull-board/api": "^6.14.2", "@bull-board/api": "^6.14.2",
@@ -112,8 +113,10 @@
"eslint-plugin-react-refresh": "^0.4.24", "eslint-plugin-react-refresh": "^0.4.24",
"glob": "^13.0.0", "glob": "^13.0.0",
"globals": "16.5.0", "globals": "16.5.0",
"husky": "^9.1.7",
"istanbul-reports": "^3.2.0", "istanbul-reports": "^3.2.0",
"jsdom": "^27.2.0", "jsdom": "^27.2.0",
"lint-staged": "^16.2.7",
"msw": "^2.12.3", "msw": "^2.12.3",
"nyc": "^17.1.0", "nyc": "^17.1.0",
"pino-pretty": "^13.1.3", "pino-pretty": "^13.1.3",

303
src/config/env.ts Normal file
View File

@@ -0,0 +1,303 @@
// src/config/env.ts
/**
* @file Centralized, schema-validated configuration service.
* Implements ADR-007: Configuration and Secrets Management.
*
* This module parses and validates all environment variables at application startup.
* If any required configuration is missing or invalid, the application will fail fast
* with a clear error message.
*
* Usage:
* import { config } from './config/env';
* console.log(config.database.host);
*/
import { z } from 'zod';
// --- Schema Definitions ---
/**
* Helper to parse string to integer with default.
* Handles empty strings by treating them as undefined.
*/
const intWithDefault = (defaultValue: number) =>
z
.string()
.optional()
.transform((val) => (val && val.trim() !== '' ? parseInt(val, 10) : defaultValue))
.pipe(z.number().int());
/**
* Helper to parse string to float with default.
*/
const floatWithDefault = (defaultValue: number) =>
z
.string()
.optional()
.transform((val) => (val && val.trim() !== '' ? parseFloat(val) : defaultValue))
.pipe(z.number());
/**
* Helper to parse string 'true'/'false' to boolean.
*/
const booleanString = (defaultValue: boolean) =>
z
.string()
.optional()
.transform((val) => (val === undefined ? defaultValue : val === 'true'));
/**
* Database configuration schema.
*/
const databaseSchema = z.object({
host: z.string().min(1, 'DB_HOST is required'),
port: intWithDefault(5432),
user: z.string().min(1, 'DB_USER is required'),
password: z.string().min(1, 'DB_PASSWORD is required'),
name: z.string().min(1, 'DB_NAME is required'),
});
/**
* Redis configuration schema.
*/
const redisSchema = z.object({
url: z.string().url('REDIS_URL must be a valid URL'),
password: z.string().optional(),
});
/**
* Authentication configuration schema.
*/
const authSchema = z.object({
jwtSecret: z.string().min(32, 'JWT_SECRET must be at least 32 characters for security'),
jwtSecretPrevious: z.string().optional(), // For secret rotation (ADR-029)
});
/**
* SMTP/Email configuration schema.
* All fields are optional - email service degrades gracefully if not configured.
*/
const smtpSchema = z.object({
host: z.string().optional(),
port: intWithDefault(587),
user: z.string().optional(),
pass: z.string().optional(),
secure: booleanString(false),
fromEmail: z.string().email().optional(),
});
/**
* AI/Gemini configuration schema.
*/
const aiSchema = z.object({
geminiApiKey: z.string().optional(),
geminiRpm: intWithDefault(5),
priceQualityThreshold: floatWithDefault(0.5),
});
/**
* Google services configuration schema.
*/
const googleSchema = z.object({
mapsApiKey: z.string().optional(),
clientId: z.string().optional(),
clientSecret: z.string().optional(),
});
/**
* Worker concurrency configuration schema.
*/
const workerSchema = z.object({
concurrency: intWithDefault(1),
lockDuration: intWithDefault(30000),
emailConcurrency: intWithDefault(10),
analyticsConcurrency: intWithDefault(1),
cleanupConcurrency: intWithDefault(10),
weeklyAnalyticsConcurrency: intWithDefault(1),
});
/**
* Server configuration schema.
*/
const serverSchema = z.object({
nodeEnv: z.enum(['development', 'production', 'test']).default('development'),
port: intWithDefault(3001),
frontendUrl: z.string().url().optional(),
baseUrl: z.string().optional(),
storagePath: z.string().default('/var/www/flyer-crawler.projectium.com/flyer-images'),
});
/**
* Complete environment configuration schema.
*/
const envSchema = z.object({
database: databaseSchema,
redis: redisSchema,
auth: authSchema,
smtp: smtpSchema,
ai: aiSchema,
google: googleSchema,
worker: workerSchema,
server: serverSchema,
});
export type EnvConfig = z.infer<typeof envSchema>;
// --- Configuration Loading ---
/**
* Maps environment variables to the configuration structure.
* This is the single source of truth for which env vars map to which config keys.
*/
function loadEnvVars(): unknown {
return {
database: {
host: process.env.DB_HOST,
port: process.env.DB_PORT,
user: process.env.DB_USER,
password: process.env.DB_PASSWORD,
name: process.env.DB_NAME,
},
redis: {
url: process.env.REDIS_URL,
password: process.env.REDIS_PASSWORD,
},
auth: {
jwtSecret: process.env.JWT_SECRET,
jwtSecretPrevious: process.env.JWT_SECRET_PREVIOUS,
},
smtp: {
host: process.env.SMTP_HOST,
port: process.env.SMTP_PORT,
user: process.env.SMTP_USER,
pass: process.env.SMTP_PASS,
secure: process.env.SMTP_SECURE,
fromEmail: process.env.SMTP_FROM_EMAIL,
},
ai: {
geminiApiKey: process.env.GEMINI_API_KEY,
geminiRpm: process.env.GEMINI_RPM,
priceQualityThreshold: process.env.AI_PRICE_QUALITY_THRESHOLD,
},
google: {
mapsApiKey: process.env.GOOGLE_MAPS_API_KEY,
clientId: process.env.GOOGLE_CLIENT_ID,
clientSecret: process.env.GOOGLE_CLIENT_SECRET,
},
worker: {
concurrency: process.env.WORKER_CONCURRENCY,
lockDuration: process.env.WORKER_LOCK_DURATION,
emailConcurrency: process.env.EMAIL_WORKER_CONCURRENCY,
analyticsConcurrency: process.env.ANALYTICS_WORKER_CONCURRENCY,
cleanupConcurrency: process.env.CLEANUP_WORKER_CONCURRENCY,
weeklyAnalyticsConcurrency: process.env.WEEKLY_ANALYTICS_WORKER_CONCURRENCY,
},
server: {
nodeEnv: process.env.NODE_ENV,
port: process.env.PORT,
frontendUrl: process.env.FRONTEND_URL,
baseUrl: process.env.BASE_URL,
storagePath: process.env.STORAGE_PATH,
},
};
}
/**
* Validates and parses environment configuration.
* Throws a descriptive error if validation fails.
*/
function parseConfig(): EnvConfig {
const rawConfig = loadEnvVars();
const result = envSchema.safeParse(rawConfig);
if (!result.success) {
const errors = result.error.issues.map((issue) => {
const path = issue.path.join('.');
return ` - ${path}: ${issue.message}`;
});
const errorMessage = [
'',
'╔════════════════════════════════════════════════════════════════╗',
'║ CONFIGURATION ERROR - APPLICATION STARTUP ║',
'╚════════════════════════════════════════════════════════════════╝',
'',
'The following environment variables are missing or invalid:',
'',
...errors,
'',
'Please check your .env file or environment configuration.',
'See ADR-007 for the complete list of required environment variables.',
'',
].join('\n');
// In test environment, throw instead of exiting to allow test frameworks to catch
if (process.env.NODE_ENV === 'test') {
throw new Error(errorMessage);
}
console.error(errorMessage);
process.exit(1);
}
return result.data;
}
// --- Exported Configuration ---
/**
* The validated application configuration.
* This is a singleton that is parsed once at module load time.
*
* @example
* ```typescript
* import { config } from './config/env';
*
* // Access database config
* const pool = new Pool({
* host: config.database.host,
* port: config.database.port,
* user: config.database.user,
* password: config.database.password,
* database: config.database.name,
* });
*
* // Check environment
* if (config.server.isProduction) {
* // production-only logic
* }
* ```
*/
export const config: EnvConfig = parseConfig();
// --- Convenience Helpers ---
/**
* Returns true if running in production environment.
*/
export const isProduction = config.server.nodeEnv === 'production';
/**
* Returns true if running in test environment.
*/
export const isTest = config.server.nodeEnv === 'test';
/**
* Returns true if running in development environment.
*/
export const isDevelopment = config.server.nodeEnv === 'development';
/**
* Returns true if SMTP is configured (all required fields present).
*/
export const isSmtpConfigured =
!!config.smtp.host && !!config.smtp.user && !!config.smtp.pass && !!config.smtp.fromEmail;
/**
* Returns true if AI services are configured.
*/
export const isAiConfigured = !!config.ai.geminiApiKey;
/**
* Returns true if Google Maps is configured.
*/
export const isGoogleMapsConfigured = !!config.google.mapsApiKey;

View File

@@ -1,21 +1,125 @@
// src/routes/health.routes.ts // src/routes/health.routes.ts
// All route handlers now use req.log (request-scoped logger) as per ADR-004 /**
* @file Health check endpoints implementing ADR-020: Health Checks and Liveness/Readiness Probes.
*
* Provides endpoints for:
* - Liveness probe (/live) - Is the server process running?
* - Readiness probe (/ready) - Is the server ready to accept traffic?
* - Individual service health checks (db, redis, storage)
*/
import { Router, Request, Response, NextFunction } from 'express'; import { Router, Request, Response, NextFunction } from 'express';
// All route handlers now use req.log (request-scoped logger) as per ADR-004
import { z } from 'zod'; import { z } from 'zod';
// All route handlers now use req.log (request-scoped logger) as per ADR-004 import { checkTablesExist, getPoolStatus, getPool } from '../services/db/connection.db';
import { checkTablesExist, getPoolStatus } from '../services/db/connection.db';
// Removed: import { logger } from '../services/logger.server';
// All route handlers now use req.log (request-scoped logger) as per ADR-004
import { connection as redisConnection } from '../services/queueService.server'; import { connection as redisConnection } from '../services/queueService.server';
import fs from 'node:fs/promises'; import fs from 'node:fs/promises';
// All route handlers now use req.log (request-scoped logger) as per ADR-004
import { getSimpleWeekAndYear } from '../utils/dateUtils'; import { getSimpleWeekAndYear } from '../utils/dateUtils';
// All route handlers now use req.log (request-scoped logger) as per ADR-004
import { validateRequest } from '../middleware/validation.middleware'; import { validateRequest } from '../middleware/validation.middleware';
const router = Router(); const router = Router();
// --- Types for Health Check Response ---
interface ServiceHealth {
status: 'healthy' | 'degraded' | 'unhealthy';
latency?: number;
message?: string;
details?: Record<string, unknown>;
}
interface ReadinessResponse {
status: 'healthy' | 'degraded' | 'unhealthy';
timestamp: string;
uptime: number;
services: {
database: ServiceHealth;
redis: ServiceHealth;
storage: ServiceHealth;
};
}
// --- Helper Functions ---
/**
* Checks database connectivity with timing.
*/
async function checkDatabase(): Promise<ServiceHealth> {
const start = Date.now();
try {
const pool = getPool();
await pool.query('SELECT 1');
const latency = Date.now() - start;
const poolStatus = getPoolStatus();
// Consider degraded if waiting connections > 3
const status = poolStatus.waitingCount > 3 ? 'degraded' : 'healthy';
return {
status,
latency,
details: {
totalConnections: poolStatus.totalCount,
idleConnections: poolStatus.idleCount,
waitingConnections: poolStatus.waitingCount,
},
};
} catch (error) {
return {
status: 'unhealthy',
latency: Date.now() - start,
message: error instanceof Error ? error.message : 'Database connection failed',
};
}
}
/**
* Checks Redis connectivity with timing.
*/
async function checkRedis(): Promise<ServiceHealth> {
const start = Date.now();
try {
const reply = await redisConnection.ping();
const latency = Date.now() - start;
if (reply === 'PONG') {
return { status: 'healthy', latency };
}
return {
status: 'unhealthy',
latency,
message: `Unexpected ping response: ${reply}`,
};
} catch (error) {
return {
status: 'unhealthy',
latency: Date.now() - start,
message: error instanceof Error ? error.message : 'Redis connection failed',
};
}
}
/**
* Checks storage accessibility.
*/
async function checkStorage(): Promise<ServiceHealth> {
const storagePath =
process.env.STORAGE_PATH || '/var/www/flyer-crawler.projectium.com/flyer-images';
const start = Date.now();
try {
await fs.access(storagePath, fs.constants.W_OK);
return {
status: 'healthy',
latency: Date.now() - start,
details: { path: storagePath },
};
} catch {
return {
status: 'unhealthy',
latency: Date.now() - start,
message: `Storage not accessible: ${storagePath}`,
};
}
}
// --- Zod Schemas for Health Routes (as per ADR-003) --- // --- Zod Schemas for Health Routes (as per ADR-003) ---
// These routes do not expect any input, so we define empty schemas // These routes do not expect any input, so we define empty schemas
// to maintain a consistent validation pattern across the application. // to maintain a consistent validation pattern across the application.
@@ -28,6 +132,104 @@ router.get('/ping', validateRequest(emptySchema), (_req: Request, res: Response)
res.status(200).send('pong'); res.status(200).send('pong');
}); });
// =============================================================================
// KUBERNETES PROBES (ADR-020)
// =============================================================================
/**
* GET /api/health/live - Liveness probe for container orchestration.
*
* Returns 200 OK if the server process is running.
* If this fails, the orchestrator should restart the container.
*
* This endpoint is intentionally simple and has no external dependencies.
* It only checks that the Node.js process can handle HTTP requests.
*/
router.get('/live', validateRequest(emptySchema), (_req: Request, res: Response) => {
res.status(200).json({
status: 'ok',
timestamp: new Date().toISOString(),
});
});
/**
* GET /api/health/ready - Readiness probe for container orchestration.
*
* Returns 200 OK if the server is ready to accept traffic.
* Checks all critical dependencies (database, Redis).
* If this fails, the orchestrator should remove the container from the load balancer.
*
* Response includes detailed status of each service for debugging.
*/
router.get('/ready', validateRequest(emptySchema), async (req: Request, res: Response) => {
// Check all services in parallel for speed
const [database, redis, storage] = await Promise.all([
checkDatabase(),
checkRedis(),
checkStorage(),
]);
// Determine overall status
// - 'healthy' if all critical services (db, redis) are healthy
// - 'degraded' if any service is degraded but none unhealthy
// - 'unhealthy' if any critical service is unhealthy
const criticalServices = [database, redis];
const allServices = [database, redis, storage];
let overallStatus: 'healthy' | 'degraded' | 'unhealthy' = 'healthy';
if (criticalServices.some((s) => s.status === 'unhealthy')) {
overallStatus = 'unhealthy';
} else if (allServices.some((s) => s.status === 'degraded')) {
overallStatus = 'degraded';
}
const response: ReadinessResponse = {
status: overallStatus,
timestamp: new Date().toISOString(),
uptime: process.uptime(),
services: {
database,
redis,
storage,
},
};
// Return appropriate HTTP status code
// 200 = healthy or degraded (can still handle traffic)
// 503 = unhealthy (should not receive traffic)
const httpStatus = overallStatus === 'unhealthy' ? 503 : 200;
return res.status(httpStatus).json(response);
});
/**
* GET /api/health/startup - Startup probe for container orchestration.
*
* Similar to readiness but used during container startup.
* The orchestrator will not send liveness/readiness probes until this succeeds.
* This allows for longer initialization times without triggering restarts.
*/
router.get('/startup', validateRequest(emptySchema), async (req: Request, res: Response) => {
// For startup, we only check database connectivity
// Redis and storage can be checked later in readiness
const database = await checkDatabase();
if (database.status === 'unhealthy') {
return res.status(503).json({
status: 'starting',
message: 'Waiting for database connection',
database,
});
}
return res.status(200).json({
status: 'started',
timestamp: new Date().toISOString(),
database,
});
});
/** /**
* GET /api/health/db-schema - Checks if all essential database tables exist. * GET /api/health/db-schema - Checks if all essential database tables exist.
* This is a critical check to ensure the database schema is correctly set up. * This is a critical check to ensure the database schema is correctly set up.
@@ -49,7 +251,8 @@ router.get('/db-schema', validateRequest(emptySchema), async (req, res, next: Ne
return next(error); return next(error);
} }
const message = const message =
(error as any)?.message || 'An unknown error occurred during DB schema check.'; (error as { message?: string })?.message ||
'An unknown error occurred during DB schema check.';
return next(new Error(message)); return next(new Error(message));
} }
}); });
@@ -59,16 +262,15 @@ router.get('/db-schema', validateRequest(emptySchema), async (req, res, next: Ne
* This is important for features like file uploads. * This is important for features like file uploads.
*/ */
router.get('/storage', validateRequest(emptySchema), async (req, res, next: NextFunction) => { router.get('/storage', validateRequest(emptySchema), async (req, res, next: NextFunction) => {
const storagePath = process.env.STORAGE_PATH || '/var/www/flyer-crawler.projectium.com/flyer-images'; const storagePath =
process.env.STORAGE_PATH || '/var/www/flyer-crawler.projectium.com/flyer-images';
try { try {
await fs.access(storagePath, fs.constants.W_OK); // Use fs.promises await fs.access(storagePath, fs.constants.W_OK); // Use fs.promises
return res return res.status(200).json({
.status(200) success: true,
.json({ message: `Storage directory '${storagePath}' is accessible and writable.`,
success: true, });
message: `Storage directory '${storagePath}' is accessible and writable.`, } catch {
});
} catch (error: unknown) {
next( next(
new Error( new Error(
`Storage check failed. Ensure the directory '${storagePath}' exists and is writable by the application.`, `Storage check failed. Ensure the directory '${storagePath}' exists and is writable by the application.`,
@@ -103,7 +305,8 @@ router.get(
return next(error); return next(error);
} }
const message = const message =
(error as any)?.message || 'An unknown error occurred during DB pool check.'; (error as { message?: string })?.message ||
'An unknown error occurred during DB pool check.';
return next(new Error(message)); return next(new Error(message));
} }
}, },
@@ -141,7 +344,8 @@ router.get(
return next(error); return next(error);
} }
const message = const message =
(error as any)?.message || 'An unknown error occurred during Redis health check.'; (error as { message?: string })?.message ||
'An unknown error occurred during Redis health check.';
return next(new Error(message)); return next(new Error(message));
} }
}, },

View File

@@ -4,10 +4,9 @@ import supertest from 'supertest';
import fs from 'node:fs/promises'; import fs from 'node:fs/promises';
import path from 'path'; import path from 'path';
import * as db from '../../services/db/index.db'; import * as db from '../../services/db/index.db';
import { getPool } from '../../services/db/connection.db';
import { generateFileChecksum } from '../../utils/checksum'; import { generateFileChecksum } from '../../utils/checksum';
import { logger } from '../../services/logger.server'; import { logger } from '../../services/logger.server';
import type { UserProfile, ExtractedFlyerItem } from '../../types'; import type { UserProfile } from '../../types';
import { createAndLoginUser } from '../utils/testHelpers'; import { createAndLoginUser } from '../utils/testHelpers';
import { cleanupDb } from '../utils/cleanup'; import { cleanupDb } from '../utils/cleanup';
import { poll } from '../utils/poll'; import { poll } from '../utils/poll';
@@ -15,11 +14,12 @@ import { cleanupFiles } from '../utils/cleanupFiles';
import piexif from 'piexifjs'; import piexif from 'piexifjs';
import exifParser from 'exif-parser'; import exifParser from 'exif-parser';
import sharp from 'sharp'; import sharp from 'sharp';
import * as imageProcessor from '../../utils/imageProcessor';
// Mock the image processor to ensure safe filenames for DB constraints // Mock the image processor to ensure safe filenames for DB constraints
vi.mock('../../utils/imageProcessor', async () => { vi.mock('../../utils/imageProcessor', async () => {
const actual = await vi.importActual<typeof import('../../utils/imageProcessor')>('../../utils/imageProcessor'); const actual = await vi.importActual<typeof import('../../utils/imageProcessor')>(
'../../utils/imageProcessor',
);
return { return {
...actual, ...actual,
generateFlyerIcon: vi.fn().mockResolvedValue('mock-icon-safe.webp'), generateFlyerIcon: vi.fn().mockResolvedValue('mock-icon-safe.webp'),
@@ -28,34 +28,61 @@ vi.mock('../../utils/imageProcessor', async () => {
// FIX: Mock storageService to return valid URLs (for DB) and write files to disk (for test verification) // FIX: Mock storageService to return valid URLs (for DB) and write files to disk (for test verification)
vi.mock('../../services/storage/storageService', () => { vi.mock('../../services/storage/storageService', () => {
const fs = require('node:fs/promises'); // eslint-disable-next-line @typescript-eslint/no-require-imports
const path = require('path'); const fsModule = require('node:fs/promises');
// eslint-disable-next-line @typescript-eslint/no-require-imports
const pathModule = require('path');
// Match the directory used in the test helpers // Match the directory used in the test helpers
const uploadDir = path.join(process.cwd(), 'flyer-images'); const uploadDir = pathModule.join(process.cwd(), 'flyer-images');
return { return {
storageService: { storageService: {
upload: vi.fn().mockImplementation(async (fileData, fileName) => { upload: vi
const name = fileName || (fileData && fileData.name) || (typeof fileData === 'string' ? path.basename(fileData) : `upload-${Date.now()}.jpg`); .fn()
.mockImplementation(
async (
fileData: Buffer | string | { name?: string; path?: string },
fileName?: string,
) => {
const name =
fileName ||
(fileData && typeof fileData === 'object' && 'name' in fileData && fileData.name) ||
(typeof fileData === 'string'
? pathModule.basename(fileData)
: `upload-${Date.now()}.jpg`);
await fs.mkdir(uploadDir, { recursive: true }); await fsModule.mkdir(uploadDir, { recursive: true });
const destPath = path.join(uploadDir, name); const destPath = pathModule.join(uploadDir, name);
let content = Buffer.from(''); let content: Buffer = Buffer.from('');
if (Buffer.isBuffer(fileData)) { if (Buffer.isBuffer(fileData)) {
content = fileData as any; content = Buffer.from(fileData);
} else if (typeof fileData === 'string') { } else if (typeof fileData === 'string') {
try { content = await fs.readFile(fileData); } catch (e) {} try {
} else if (fileData && fileData.path) { content = await fsModule.readFile(fileData);
try { content = await fs.readFile(fileData.path); } catch (e) {} } catch {
} /* ignore */
await fs.writeFile(destPath, content); }
} else if (
fileData &&
typeof fileData === 'object' &&
'path' in fileData &&
fileData.path
) {
try {
content = await fsModule.readFile(fileData.path);
} catch {
/* ignore */
}
}
await fsModule.writeFile(destPath, content);
// Return a valid URL to satisfy the 'url_check' DB constraint // Return a valid URL to satisfy the 'url_check' DB constraint
return `https://example.com/uploads/${name}`; return `https://example.com/uploads/${name}`;
}), },
),
delete: vi.fn().mockResolvedValue(undefined), delete: vi.fn().mockResolvedValue(undefined),
} },
}; };
}); });
@@ -63,11 +90,12 @@ vi.mock('../../services/storage/storageService', () => {
* @vitest-environment node * @vitest-environment node
*/ */
// CRITICAL: This mock function must be declared with vi.hoisted() to ensure it's available // CRITICAL: These mock functions must be declared with vi.hoisted() to ensure they're available
// at the module level BEFORE any imports are resolved. // at the module level BEFORE any imports are resolved.
const { mockExtractCoreData } = vi.hoisted(() => { const { mockExtractCoreData, mockWithTransaction } = vi.hoisted(() => {
return { return {
mockExtractCoreData: vi.fn(), mockExtractCoreData: vi.fn(),
mockWithTransaction: vi.fn(),
}; };
}); });
@@ -97,11 +125,15 @@ vi.mock('../../services/aiService.server', async (importOriginal) => {
// Mock the connection DB service to intercept withTransaction. // Mock the connection DB service to intercept withTransaction.
// This is crucial because FlyerPersistenceService imports directly from connection.db, // This is crucial because FlyerPersistenceService imports directly from connection.db,
// so mocking index.db is insufficient. // so mocking index.db is insufficient.
// CRITICAL: We use the hoisted mockWithTransaction function so tests can manipulate the same
// function instance that workers are using.
vi.mock('../../services/db/connection.db', async (importOriginal) => { vi.mock('../../services/db/connection.db', async (importOriginal) => {
const actual = await importOriginal<typeof import('../../services/db/connection.db')>(); const actual = await importOriginal<typeof import('../../services/db/connection.db')>();
// Initialize the hoisted mock to use the real implementation by default
mockWithTransaction.mockImplementation(actual.withTransaction);
return { return {
...actual, ...actual,
withTransaction: vi.fn().mockImplementation(actual.withTransaction), withTransaction: mockWithTransaction,
}; };
}); });
@@ -160,11 +192,13 @@ describe('Flyer Processing Background Job Integration Test', () => {
// 2. Restore DB Service Mock to real implementation // 2. Restore DB Service Mock to real implementation
// This ensures that unless a test specifically mocks a failure, the DB logic works as expected. // This ensures that unless a test specifically mocks a failure, the DB logic works as expected.
const { withTransaction } = await import('../../services/db/connection.db'); // CRITICAL: Use the hoisted mockWithTransaction directly so we're manipulating the same instance
// We need to get the actual implementation again to restore it // that the workers are using.
const actualDb = await vi.importActual<typeof import('../../services/db/connection.db')>('../../services/db/connection.db'); const actualDb = await vi.importActual<typeof import('../../services/db/connection.db')>(
vi.mocked(withTransaction).mockReset(); '../../services/db/connection.db',
vi.mocked(withTransaction).mockImplementation(actualDb.withTransaction); );
mockWithTransaction.mockReset();
mockWithTransaction.mockImplementation(actualDb.withTransaction);
}); });
afterAll(async () => { afterAll(async () => {
@@ -174,6 +208,21 @@ describe('Flyer Processing Background Job Integration Test', () => {
vi.unstubAllEnvs(); // Clean up env stubs vi.unstubAllEnvs(); // Clean up env stubs
vi.restoreAllMocks(); // Restore the AI spy vi.restoreAllMocks(); // Restore the AI spy
// CRITICAL: Close workers FIRST before any cleanup to ensure no pending jobs
// are trying to access files or databases during cleanup.
// This prevents the Node.js async hooks crash that occurs when fs operations
// are rejected during process shutdown.
if (workersModule) {
console.error('[TEST TEARDOWN] Closing in-process workers...');
await workersModule.closeWorkers();
// Give workers a moment to fully release resources
await new Promise((resolve) => setTimeout(resolve, 100));
}
// Close the shared redis connection used by the workers/queues
const { connection } = await import('../../services/redis.server');
await connection.quit();
// Use the centralized cleanup utility. // Use the centralized cleanup utility.
await cleanupDb({ await cleanupDb({
userIds: createdUserIds, userIds: createdUserIds,
@@ -184,15 +233,9 @@ describe('Flyer Processing Background Job Integration Test', () => {
// Use the centralized file cleanup utility. // Use the centralized file cleanup utility.
await cleanupFiles(createdFilePaths); await cleanupFiles(createdFilePaths);
// NEW: Clean up workers and Redis connection to prevent tests from hanging. // Final delay to let any remaining async operations settle
if (workersModule) { // This helps prevent the Node.js async context assertion failure
console.error('[TEST TEARDOWN] Closing in-process workers...'); await new Promise((resolve) => setTimeout(resolve, 50));
await workersModule.closeWorkers();
}
// Close the shared redis connection used by the workers/queues
const { connection } = await import('../../services/redis.server');
await connection.quit();
}); });
/** /**
@@ -200,9 +243,11 @@ describe('Flyer Processing Background Job Integration Test', () => {
* It uploads a file, polls for completion, and verifies the result in the database. * It uploads a file, polls for completion, and verifies the result in the database.
*/ */
const runBackgroundProcessingTest = async (user?: UserProfile, token?: string) => { const runBackgroundProcessingTest = async (user?: UserProfile, token?: string) => {
console.error(`[TEST START] runBackgroundProcessingTest. User: ${user?.user.email ?? 'ANONYMOUS'}`); console.error(
`[TEST START] runBackgroundProcessingTest. User: ${user?.user.email ?? 'ANONYMOUS'}`,
);
// Arrange: Load a mock flyer PDF. // Arrange: Load a mock flyer PDF.
console.error('[TEST] about to read test-flyer-image.jpg') console.error('[TEST] about to read test-flyer-image.jpg');
const imagePath = path.resolve(__dirname, '../assets/test-flyer-image.jpg'); const imagePath = path.resolve(__dirname, '../assets/test-flyer-image.jpg');
const imageBuffer = await fs.readFile(imagePath); const imageBuffer = await fs.readFile(imagePath);
@@ -210,15 +255,17 @@ describe('Flyer Processing Background Job Integration Test', () => {
// This prevents a 409 Conflict error when the second test runs. // This prevents a 409 Conflict error when the second test runs.
const uniqueContent = Buffer.concat([imageBuffer, Buffer.from(Date.now().toString())]); const uniqueContent = Buffer.concat([imageBuffer, Buffer.from(Date.now().toString())]);
const uniqueFileName = `test-flyer-image-${Date.now()}.jpg`; const uniqueFileName = `test-flyer-image-${Date.now()}.jpg`;
const mockImageFile = new File([new Uint8Array(uniqueContent)], uniqueFileName, { type: 'image/jpeg' }); const mockImageFile = new File([new Uint8Array(uniqueContent)], uniqueFileName, {
type: 'image/jpeg',
});
const checksum = await generateFileChecksum(mockImageFile); const checksum = await generateFileChecksum(mockImageFile);
console.error('[TEST] mockImageFile created with uniqueFileName: ', uniqueFileName) console.error('[TEST] mockImageFile created with uniqueFileName: ', uniqueFileName);
console.error('[TEST DATA] Generated checksum for test:', checksum); console.error('[TEST DATA] Generated checksum for test:', checksum);
// Track created files for cleanup // Track created files for cleanup
const uploadDir = path.resolve(__dirname, '../../../flyer-images'); const uploadDir = path.resolve(__dirname, '../../../flyer-images');
createdFilePaths.push(path.join(uploadDir, uniqueFileName)); createdFilePaths.push(path.join(uploadDir, uniqueFileName));
console.error('[TEST] createdFilesPaths after 1st push: ', createdFilePaths) console.error('[TEST] createdFilesPaths after 1st push: ', createdFilePaths);
// The icon name is derived from the original filename. // The icon name is derived from the original filename.
const iconFileName = `icon-${path.parse(uniqueFileName).name}.webp`; const iconFileName = `icon-${path.parse(uniqueFileName).name}.webp`;
createdFilePaths.push(path.join(uploadDir, 'icons', iconFileName)); createdFilePaths.push(path.join(uploadDir, 'icons', iconFileName));
@@ -298,9 +345,7 @@ describe('Flyer Processing Background Job Integration Test', () => {
} }
}; };
it('should successfully process a flyer for an AUTHENTICATED user via the background queue', async ({ it('should successfully process a flyer for an AUTHENTICATED user via the background queue', async () => {
onTestFinished,
}) => {
// Arrange: Create a new user specifically for this test. // Arrange: Create a new user specifically for this test.
const email = `auth-flyer-user-${Date.now()}@example.com`; const email = `auth-flyer-user-${Date.now()}@example.com`;
const { user: authUser, token } = await createAndLoginUser({ const { user: authUser, token } = await createAndLoginUser({
@@ -319,351 +364,336 @@ describe('Flyer Processing Background Job Integration Test', () => {
await runBackgroundProcessingTest(); await runBackgroundProcessingTest();
}, 240000); // Increase timeout to 240 seconds for this long-running test }, 240000); // Increase timeout to 240 seconds for this long-running test
it( it('should strip EXIF data from uploaded JPEG images during processing', async () => {
'should strip EXIF data from uploaded JPEG images during processing', // Arrange: Create a user for this test
async () => { const { user: authUser, token } = await createAndLoginUser({
// Arrange: Create a user for this test email: `exif-user-${Date.now()}@example.com`,
const { user: authUser, token } = await createAndLoginUser({ fullName: 'EXIF Tester',
email: `exif-user-${Date.now()}@example.com`, request,
fullName: 'EXIF Tester', });
request, createdUserIds.push(authUser.user.user_id);
});
createdUserIds.push(authUser.user.user_id);
// 1. Create an image buffer with EXIF data // 1. Create an image buffer with EXIF data
const imagePath = path.resolve(__dirname, '../assets/test-flyer-image.jpg'); const imagePath = path.resolve(__dirname, '../assets/test-flyer-image.jpg');
const imageBuffer = await fs.readFile(imagePath); const imageBuffer = await fs.readFile(imagePath);
const jpegDataAsString = imageBuffer.toString('binary'); const jpegDataAsString = imageBuffer.toString('binary');
const exifObj = { const exifObj = {
'0th': { [piexif.ImageIFD.Software]: 'Gemini Code Assist Test' }, '0th': { [piexif.ImageIFD.Software]: 'Gemini Code Assist Test' },
Exif: { [piexif.ExifIFD.DateTimeOriginal]: '2025:12:25 10:00:00' }, Exif: { [piexif.ExifIFD.DateTimeOriginal]: '2025:12:25 10:00:00' },
}; };
const exifBytes = piexif.dump(exifObj); const exifBytes = piexif.dump(exifObj);
const jpegWithExif = piexif.insert(exifBytes, jpegDataAsString); const jpegWithExif = piexif.insert(exifBytes, jpegDataAsString);
const imageWithExifBuffer = Buffer.from(jpegWithExif, 'binary'); const imageWithExifBuffer = Buffer.from(jpegWithExif, 'binary');
const uniqueFileName = `test-flyer-with-exif-${Date.now()}.jpg`; const uniqueFileName = `test-flyer-with-exif-${Date.now()}.jpg`;
const mockImageFile = new File([new Uint8Array(imageWithExifBuffer)], uniqueFileName, { type: 'image/jpeg' }); const mockImageFile = new File([new Uint8Array(imageWithExifBuffer)], uniqueFileName, {
const checksum = await generateFileChecksum(mockImageFile); type: 'image/jpeg',
});
const checksum = await generateFileChecksum(mockImageFile);
// Track original and derived files for cleanup // Track original and derived files for cleanup
const uploadDir = path.resolve(__dirname, '../../../flyer-images'); const uploadDir = path.resolve(__dirname, '../../../flyer-images');
createdFilePaths.push(path.join(uploadDir, uniqueFileName)); createdFilePaths.push(path.join(uploadDir, uniqueFileName));
const iconFileName = `icon-${path.parse(uniqueFileName).name}.webp`; const iconFileName = `icon-${path.parse(uniqueFileName).name}.webp`;
createdFilePaths.push(path.join(uploadDir, 'icons', iconFileName)); createdFilePaths.push(path.join(uploadDir, 'icons', iconFileName));
// 2. Act: Upload the file and wait for processing // 2. Act: Upload the file and wait for processing
const uploadResponse = await request const uploadResponse = await request
.post('/api/ai/upload-and-process') .post('/api/ai/upload-and-process')
.set('Authorization', `Bearer ${token}`) .set('Authorization', `Bearer ${token}`)
.field('baseUrl', 'https://example.com') .field('baseUrl', 'https://example.com')
.field('checksum', checksum) .field('checksum', checksum)
.attach('flyerFile', imageWithExifBuffer, uniqueFileName); .attach('flyerFile', imageWithExifBuffer, uniqueFileName);
const { jobId } = uploadResponse.body; const { jobId } = uploadResponse.body;
expect(jobId).toBeTypeOf('string'); expect(jobId).toBeTypeOf('string');
// Poll for job completion using the new utility. // Poll for job completion using the new utility.
const jobStatus = await poll( const jobStatus = await poll(
async () => { async () => {
const statusResponse = await request const statusResponse = await request
.get(`/api/ai/jobs/${jobId}/status`) .get(`/api/ai/jobs/${jobId}/status`)
.set('Authorization', `Bearer ${token}`); .set('Authorization', `Bearer ${token}`);
return statusResponse.body; return statusResponse.body;
}, },
(status) => status.state === 'completed' || status.state === 'failed', (status) => status.state === 'completed' || status.state === 'failed',
{ timeout: 180000, interval: 3000, description: 'EXIF stripping job' }, { timeout: 180000, interval: 3000, description: 'EXIF stripping job' },
); );
// 3. Assert // 3. Assert
if (jobStatus?.state === 'failed') { if (jobStatus?.state === 'failed') {
console.error('[DEBUG] EXIF test job failed:', jobStatus.failedReason); console.error('[DEBUG] EXIF test job failed:', jobStatus.failedReason);
console.error('[DEBUG] Job stack trace:', jobStatus.stacktrace); console.error('[DEBUG] Job stack trace:', jobStatus.stacktrace);
console.error('[DEBUG] Job return value:', JSON.stringify(jobStatus.returnValue, null, 2)); console.error('[DEBUG] Job return value:', JSON.stringify(jobStatus.returnValue, null, 2));
} }
expect(jobStatus?.state).toBe('completed'); expect(jobStatus?.state).toBe('completed');
const flyerId = jobStatus?.returnValue?.flyerId; const flyerId = jobStatus?.returnValue?.flyerId;
expect(flyerId).toBeTypeOf('number'); expect(flyerId).toBeTypeOf('number');
createdFlyerIds.push(flyerId); createdFlyerIds.push(flyerId);
// 4. Verify EXIF data is stripped from the saved file // 4. Verify EXIF data is stripped from the saved file
const savedFlyer = await db.flyerRepo.findFlyerByChecksum(checksum, logger); const savedFlyer = await db.flyerRepo.findFlyerByChecksum(checksum, logger);
expect(savedFlyer).toBeDefined(); expect(savedFlyer).toBeDefined();
if (savedFlyer?.store_id) { if (savedFlyer?.store_id) {
createdStoreIds.push(savedFlyer.store_id); createdStoreIds.push(savedFlyer.store_id);
} }
const savedImagePath = path.join(uploadDir, path.basename(savedFlyer!.image_url)); const savedImagePath = path.join(uploadDir, path.basename(savedFlyer!.image_url));
createdFilePaths.push(savedImagePath); // Add final path for cleanup createdFilePaths.push(savedImagePath); // Add final path for cleanup
const savedImageBuffer = await fs.readFile(savedImagePath); const savedImageBuffer = await fs.readFile(savedImagePath);
const parser = exifParser.create(savedImageBuffer); const parser = exifParser.create(savedImageBuffer);
const exifResult = parser.parse(); const exifResult = parser.parse();
console.error('[TEST] savedImagePath during EXIF data stripping: ', savedImagePath) console.error('[TEST] savedImagePath during EXIF data stripping: ', savedImagePath);
console.error('[TEST] exifResult.tags: ', exifResult.tags) console.error('[TEST] exifResult.tags: ', exifResult.tags);
// The `tags` object will be empty if no EXIF data is found.
expect(exifResult.tags).toEqual({});
expect(exifResult.tags.Software).toBeUndefined();
}, 240000);
// The `tags` object will be empty if no EXIF data is found. it('should strip metadata from uploaded PNG images during processing', async () => {
expect(exifResult.tags).toEqual({}); // Arrange: Create a user for this test
expect(exifResult.tags.Software).toBeUndefined(); const { user: authUser, token } = await createAndLoginUser({
}, email: `png-meta-user-${Date.now()}@example.com`,
240000, fullName: 'PNG Metadata Tester',
); request,
});
createdUserIds.push(authUser.user.user_id);
it( // 1. Create a PNG image buffer with custom metadata using sharp
'should strip metadata from uploaded PNG images during processing', const imagePath = path.resolve(__dirname, '../assets/test-flyer-image.jpg');
async () => {
// Arrange: Create a user for this test
const { user: authUser, token } = await createAndLoginUser({
email: `png-meta-user-${Date.now()}@example.com`,
fullName: 'PNG Metadata Tester',
request,
});
createdUserIds.push(authUser.user.user_id);
// 1. Create a PNG image buffer with custom metadata using sharp const imageWithMetadataBuffer = await sharp(imagePath)
const imagePath = path.resolve(__dirname, '../assets/test-flyer-image.jpg'); .png() // Convert to PNG
.withMetadata({
const imageWithMetadataBuffer = await sharp(imagePath) exif: {
.png() // Convert to PNG IFD0: {
.withMetadata({ Copyright: 'Gemini Code Assist PNG Test',
exif: {
IFD0: {
Copyright: 'Gemini Code Assist PNG Test',
},
}, },
})
.toBuffer();
const uniqueFileName = `test-flyer-with-metadata-${Date.now()}.png`;
const mockImageFile = new File([new Uint8Array(imageWithMetadataBuffer)], uniqueFileName, { type: 'image/png' });
const checksum = await generateFileChecksum(mockImageFile);
// Track files for cleanup
const uploadDir = path.resolve(__dirname, '../../../flyer-images');
createdFilePaths.push(path.join(uploadDir, uniqueFileName));
const iconFileName = `icon-${path.parse(uniqueFileName).name}.webp`;
createdFilePaths.push(path.join(uploadDir, 'icons', iconFileName));
// 2. Act: Upload the file and wait for processing
const uploadResponse = await request
.post('/api/ai/upload-and-process')
.set('Authorization', `Bearer ${token}`)
.field('baseUrl', 'https://example.com')
.field('checksum', checksum)
.attach('flyerFile', imageWithMetadataBuffer, uniqueFileName);
const { jobId } = uploadResponse.body;
expect(jobId).toBeTypeOf('string');
// Poll for job completion using the new utility.
const jobStatus = await poll(
async () => {
const statusResponse = await request
.get(`/api/ai/jobs/${jobId}/status`)
.set('Authorization', `Bearer ${token}`);
return statusResponse.body;
}, },
(status) => status.state === 'completed' || status.state === 'failed', })
{ timeout: 180000, interval: 3000, description: 'PNG metadata stripping job' }, .toBuffer();
);
// 3. Assert job completion const uniqueFileName = `test-flyer-with-metadata-${Date.now()}.png`;
if (jobStatus?.state === 'failed') { const mockImageFile = new File([new Uint8Array(imageWithMetadataBuffer)], uniqueFileName, {
console.error('[DEBUG] PNG metadata test job failed:', jobStatus.failedReason); type: 'image/png',
console.error('[DEBUG] Job stack trace:', jobStatus.stacktrace); });
console.error('[DEBUG] Job return value:', JSON.stringify(jobStatus.returnValue, null, 2)); const checksum = await generateFileChecksum(mockImageFile);
}
expect(jobStatus?.state).toBe('completed');
const flyerId = jobStatus?.returnValue?.flyerId;
expect(flyerId).toBeTypeOf('number');
createdFlyerIds.push(flyerId);
// 4. Verify metadata is stripped from the saved file // Track files for cleanup
const savedFlyer = await db.flyerRepo.findFlyerByChecksum(checksum, logger); const uploadDir = path.resolve(__dirname, '../../../flyer-images');
expect(savedFlyer).toBeDefined(); createdFilePaths.push(path.join(uploadDir, uniqueFileName));
if (savedFlyer?.store_id) { const iconFileName = `icon-${path.parse(uniqueFileName).name}.webp`;
createdStoreIds.push(savedFlyer.store_id); createdFilePaths.push(path.join(uploadDir, 'icons', iconFileName));
}
const savedImagePath = path.join(uploadDir, path.basename(savedFlyer!.image_url)); // 2. Act: Upload the file and wait for processing
createdFilePaths.push(savedImagePath); // Add final path for cleanup const uploadResponse = await request
.post('/api/ai/upload-and-process')
.set('Authorization', `Bearer ${token}`)
.field('baseUrl', 'https://example.com')
.field('checksum', checksum)
.attach('flyerFile', imageWithMetadataBuffer, uniqueFileName);
console.error('[TEST] savedImagePath during PNG metadata stripping: ', savedImagePath) const { jobId } = uploadResponse.body;
expect(jobId).toBeTypeOf('string');
// Poll for job completion using the new utility.
const jobStatus = await poll(
async () => {
const statusResponse = await request
.get(`/api/ai/jobs/${jobId}/status`)
.set('Authorization', `Bearer ${token}`);
return statusResponse.body;
},
(status) => status.state === 'completed' || status.state === 'failed',
{ timeout: 180000, interval: 3000, description: 'PNG metadata stripping job' },
);
const savedImageMetadata = await sharp(savedImagePath).metadata(); // 3. Assert job completion
if (jobStatus?.state === 'failed') {
console.error('[DEBUG] PNG metadata test job failed:', jobStatus.failedReason);
console.error('[DEBUG] Job stack trace:', jobStatus.stacktrace);
console.error('[DEBUG] Job return value:', JSON.stringify(jobStatus.returnValue, null, 2));
}
expect(jobStatus?.state).toBe('completed');
const flyerId = jobStatus?.returnValue?.flyerId;
expect(flyerId).toBeTypeOf('number');
createdFlyerIds.push(flyerId);
// The test should fail here initially because PNGs are not processed. // 4. Verify metadata is stripped from the saved file
// The `exif` property should be undefined after the fix. const savedFlyer = await db.flyerRepo.findFlyerByChecksum(checksum, logger);
expect(savedImageMetadata.exif).toBeUndefined(); expect(savedFlyer).toBeDefined();
}, if (savedFlyer?.store_id) {
240000, createdStoreIds.push(savedFlyer.store_id);
}
); const savedImagePath = path.join(uploadDir, path.basename(savedFlyer!.image_url));
createdFilePaths.push(savedImagePath); // Add final path for cleanup
it( console.error('[TEST] savedImagePath during PNG metadata stripping: ', savedImagePath);
'should handle a failure from the AI service gracefully',
async () => {
// Arrange: Mock the AI service to throw an error for this specific test.
const aiError = new Error('AI model failed to extract data.');
// Update the spy implementation to reject
mockExtractCoreData.mockRejectedValue(aiError);
// Arrange: Prepare a unique flyer file for upload. const savedImageMetadata = await sharp(savedImagePath).metadata();
const imagePath = path.resolve(__dirname, '../assets/test-flyer-image.jpg');
const imageBuffer = await fs.readFile(imagePath);
const uniqueContent = Buffer.concat([imageBuffer, Buffer.from(`ai-error-test-${Date.now()}`)]);
const uniqueFileName = `ai-error-test-${Date.now()}.jpg`;
const mockImageFile = new File([new Uint8Array(uniqueContent)], uniqueFileName, { type: 'image/jpeg' });
const checksum = await generateFileChecksum(mockImageFile);
// Track created files for cleanup // The test should fail here initially because PNGs are not processed.
const uploadDir = path.resolve(__dirname, '../../../flyer-images'); // The `exif` property should be undefined after the fix.
createdFilePaths.push(path.join(uploadDir, uniqueFileName)); expect(savedImageMetadata.exif).toBeUndefined();
}, 240000);
// Act 1: Upload the file to start the background job. it('should handle a failure from the AI service gracefully', async () => {
const uploadResponse = await request // Arrange: Mock the AI service to throw an error for this specific test.
.post('/api/ai/upload-and-process') const aiError = new Error('AI model failed to extract data.');
.field('baseUrl', 'https://example.com') // Update the spy implementation to reject
.field('checksum', checksum) mockExtractCoreData.mockRejectedValue(aiError);
.attach('flyerFile', uniqueContent, uniqueFileName);
const { jobId } = uploadResponse.body; // Arrange: Prepare a unique flyer file for upload.
expect(jobId).toBeTypeOf('string'); const imagePath = path.resolve(__dirname, '../assets/test-flyer-image.jpg');
const imageBuffer = await fs.readFile(imagePath);
const uniqueContent = Buffer.concat([imageBuffer, Buffer.from(`ai-error-test-${Date.now()}`)]);
const uniqueFileName = `ai-error-test-${Date.now()}.jpg`;
const mockImageFile = new File([new Uint8Array(uniqueContent)], uniqueFileName, {
type: 'image/jpeg',
});
const checksum = await generateFileChecksum(mockImageFile);
// Act 2: Poll for job completion using the new utility. // Track created files for cleanup
const jobStatus = await poll( const uploadDir = path.resolve(__dirname, '../../../flyer-images');
async () => { createdFilePaths.push(path.join(uploadDir, uniqueFileName));
const statusResponse = await request.get(`/api/ai/jobs/${jobId}/status`);
return statusResponse.body;
},
(status) => status.state === 'completed' || status.state === 'failed',
{ timeout: 180000, interval: 3000, description: 'AI failure test job' },
);
// Assert 1: Check that the job failed. // Act 1: Upload the file to start the background job.
if (jobStatus?.state === 'failed') { const uploadResponse = await request
console.error('[TEST DEBUG] AI Failure Test - Job Failed Reason:', jobStatus.failedReason); .post('/api/ai/upload-and-process')
console.error('[TEST DEBUG] AI Failure Test - Job Stack:', jobStatus.stacktrace); .field('baseUrl', 'https://example.com')
} .field('checksum', checksum)
expect(jobStatus?.state).toBe('failed'); .attach('flyerFile', uniqueContent, uniqueFileName);
expect(jobStatus?.failedReason).toContain('AI model failed to extract data.');
// Assert 2: Verify the flyer was NOT saved in the database. const { jobId } = uploadResponse.body;
const savedFlyer = await db.flyerRepo.findFlyerByChecksum(checksum, logger); expect(jobId).toBeTypeOf('string');
expect(savedFlyer).toBeUndefined();
},
240000,
);
it( // Act 2: Poll for job completion using the new utility.
'should handle a database failure during flyer creation', const jobStatus = await poll(
async () => { async () => {
// Arrange: Mock the database transaction function to throw an error. const statusResponse = await request.get(`/api/ai/jobs/${jobId}/status`);
// This is a more realistic simulation of a DB failure than mocking the inner createFlyerAndItems function. return statusResponse.body;
const dbError = new Error('DB transaction failed'); },
const { withTransaction } = await import('../../services/db/connection.db'); (status) => status.state === 'completed' || status.state === 'failed',
vi.mocked(withTransaction).mockRejectedValue(dbError); { timeout: 180000, interval: 3000, description: 'AI failure test job' },
);
// Arrange: Prepare a unique flyer file for upload. // Assert 1: Check that the job failed.
const imagePath = path.resolve(__dirname, '../assets/test-flyer-image.jpg'); if (jobStatus?.state === 'failed') {
const imageBuffer = await fs.readFile(imagePath); console.error('[TEST DEBUG] AI Failure Test - Job Failed Reason:', jobStatus.failedReason);
const uniqueContent = Buffer.concat([imageBuffer, Buffer.from(`db-error-test-${Date.now()}`)]); console.error('[TEST DEBUG] AI Failure Test - Job Stack:', jobStatus.stacktrace);
const uniqueFileName = `db-error-test-${Date.now()}.jpg`; }
const mockImageFile = new File([new Uint8Array(uniqueContent)], uniqueFileName, { type: 'image/jpeg' }); expect(jobStatus?.state).toBe('failed');
const checksum = await generateFileChecksum(mockImageFile); expect(jobStatus?.failedReason).toContain('AI model failed to extract data.');
// Track created files for cleanup // Assert 2: Verify the flyer was NOT saved in the database.
const uploadDir = path.resolve(__dirname, '../../../flyer-images'); const savedFlyer = await db.flyerRepo.findFlyerByChecksum(checksum, logger);
createdFilePaths.push(path.join(uploadDir, uniqueFileName)); expect(savedFlyer).toBeUndefined();
}, 240000);
// Act 1: Upload the file to start the background job. it('should handle a database failure during flyer creation', async () => {
const uploadResponse = await request // Arrange: Mock the database transaction function to throw an error.
.post('/api/ai/upload-and-process') // This is a more realistic simulation of a DB failure than mocking the inner createFlyerAndItems function.
.field('baseUrl', 'https://example.com') // CRITICAL: Use the hoisted mockWithTransaction directly - this is the same instance the workers use.
.field('checksum', checksum) const dbError = new Error('DB transaction failed');
.attach('flyerFile', uniqueContent, uniqueFileName); mockWithTransaction.mockRejectedValue(dbError);
const { jobId } = uploadResponse.body; // Arrange: Prepare a unique flyer file for upload.
expect(jobId).toBeTypeOf('string'); const imagePath = path.resolve(__dirname, '../assets/test-flyer-image.jpg');
const imageBuffer = await fs.readFile(imagePath);
const uniqueContent = Buffer.concat([imageBuffer, Buffer.from(`db-error-test-${Date.now()}`)]);
const uniqueFileName = `db-error-test-${Date.now()}.jpg`;
const mockImageFile = new File([new Uint8Array(uniqueContent)], uniqueFileName, {
type: 'image/jpeg',
});
const checksum = await generateFileChecksum(mockImageFile);
// Act 2: Poll for job completion using the new utility. // Track created files for cleanup
const jobStatus = await poll( const uploadDir = path.resolve(__dirname, '../../../flyer-images');
async () => { createdFilePaths.push(path.join(uploadDir, uniqueFileName));
const statusResponse = await request.get(`/api/ai/jobs/${jobId}/status`);
return statusResponse.body;
},
(status) => status.state === 'completed' || status.state === 'failed',
{ timeout: 180000, interval: 3000, description: 'DB failure test job' },
);
// Assert 1: Check that the job failed. // Act 1: Upload the file to start the background job.
expect(jobStatus?.state).toBe('failed'); const uploadResponse = await request
expect(jobStatus?.failedReason).toContain('DB transaction failed'); .post('/api/ai/upload-and-process')
.field('baseUrl', 'https://example.com')
.field('checksum', checksum)
.attach('flyerFile', uniqueContent, uniqueFileName);
// Assert 2: Verify the flyer was NOT saved in the database. const { jobId } = uploadResponse.body;
const savedFlyer = await db.flyerRepo.findFlyerByChecksum(checksum, logger); expect(jobId).toBeTypeOf('string');
expect(savedFlyer).toBeUndefined();
},
240000,
);
it( // Act 2: Poll for job completion using the new utility.
'should NOT clean up temporary files when a job fails, to allow for manual inspection', const jobStatus = await poll(
async () => { async () => {
// Arrange: Mock the AI service to throw an error, causing the job to fail. const statusResponse = await request.get(`/api/ai/jobs/${jobId}/status`);
const aiError = new Error('Simulated AI failure for cleanup test.'); return statusResponse.body;
mockExtractCoreData.mockRejectedValue(aiError); },
(status) => status.state === 'completed' || status.state === 'failed',
{ timeout: 180000, interval: 3000, description: 'DB failure test job' },
);
// Arrange: Prepare a unique flyer file for upload. // Assert 1: Check that the job failed.
const imagePath = path.resolve(__dirname, '../assets/test-flyer-image.jpg'); expect(jobStatus?.state).toBe('failed');
const imageBuffer = await fs.readFile(imagePath); expect(jobStatus?.failedReason).toContain('DB transaction failed');
const uniqueContent = Buffer.concat([
imageBuffer,
Buffer.from(`cleanup-test-${Date.now()}`),
]);
const uniqueFileName = `cleanup-test-${Date.now()}.jpg`;
const mockImageFile = new File([new Uint8Array(uniqueContent)], uniqueFileName, { type: 'image/jpeg' });
const checksum = await generateFileChecksum(mockImageFile);
// Track the path of the file that will be created in the uploads directory. // Assert 2: Verify the flyer was NOT saved in the database.
const uploadDir = path.resolve(__dirname, '../../../flyer-images'); const savedFlyer = await db.flyerRepo.findFlyerByChecksum(checksum, logger);
const tempFilePath = path.join(uploadDir, uniqueFileName); expect(savedFlyer).toBeUndefined();
createdFilePaths.push(tempFilePath); }, 240000);
// Act 1: Upload the file to start the background job. it('should NOT clean up temporary files when a job fails, to allow for manual inspection', async () => {
const uploadResponse = await request // Arrange: Mock the AI service to throw an error, causing the job to fail.
.post('/api/ai/upload-and-process') const aiError = new Error('Simulated AI failure for cleanup test.');
.field('baseUrl', 'https://example.com') mockExtractCoreData.mockRejectedValue(aiError);
.field('checksum', checksum)
.attach('flyerFile', uniqueContent, uniqueFileName);
const { jobId } = uploadResponse.body; // Arrange: Prepare a unique flyer file for upload.
expect(jobId).toBeTypeOf('string'); const imagePath = path.resolve(__dirname, '../assets/test-flyer-image.jpg');
const imageBuffer = await fs.readFile(imagePath);
const uniqueContent = Buffer.concat([imageBuffer, Buffer.from(`cleanup-test-${Date.now()}`)]);
const uniqueFileName = `cleanup-test-${Date.now()}.jpg`;
const mockImageFile = new File([new Uint8Array(uniqueContent)], uniqueFileName, {
type: 'image/jpeg',
});
const checksum = await generateFileChecksum(mockImageFile);
// Act 2: Poll for job completion using the new utility. // Track the path of the file that will be created in the uploads directory.
const jobStatus = await poll( const uploadDir = path.resolve(__dirname, '../../../flyer-images');
async () => { const tempFilePath = path.join(uploadDir, uniqueFileName);
const statusResponse = await request.get(`/api/ai/jobs/${jobId}/status`); createdFilePaths.push(tempFilePath);
return statusResponse.body;
},
(status) => status.state === 'failed', // We expect this one to fail
{ timeout: 180000, interval: 3000, description: 'file cleanup failure test job' },
);
// Assert 1: Check that the job actually failed. // Act 1: Upload the file to start the background job.
expect(jobStatus?.state).toBe('failed'); const uploadResponse = await request
expect(jobStatus?.failedReason).toContain('Simulated AI failure for cleanup test.'); .post('/api/ai/upload-and-process')
.field('baseUrl', 'https://example.com')
.field('checksum', checksum)
.attach('flyerFile', uniqueContent, uniqueFileName);
// Assert 2: Verify the temporary file was NOT deleted. const { jobId } = uploadResponse.body;
// We check for its existence. If it doesn't exist, fs.access will throw an error. expect(jobId).toBeTypeOf('string');
await expect(fs.access(tempFilePath), 'Expected temporary file to exist after job failure, but it was deleted.');
},
240000,
); // Act 2: Poll for job completion using the new utility.
const jobStatus = await poll(
async () => {
const statusResponse = await request.get(`/api/ai/jobs/${jobId}/status`);
return statusResponse.body;
},
(status) => status.state === 'failed', // We expect this one to fail
{ timeout: 180000, interval: 3000, description: 'file cleanup failure test job' },
);
// Assert 1: Check that the job actually failed.
expect(jobStatus?.state).toBe('failed');
expect(jobStatus?.failedReason).toContain('Simulated AI failure for cleanup test.');
// Assert 2: Verify the temporary file was NOT deleted.
// We check for its existence. If it doesn't exist, fs.access will throw an error.
await expect(
fs.access(tempFilePath),
'Expected temporary file to exist after job failure, but it was deleted.',
);
}, 240000);
}); });

View File

@@ -4,20 +4,14 @@ import supertest from 'supertest';
import path from 'path'; import path from 'path';
import fs from 'node:fs/promises'; import fs from 'node:fs/promises';
import { getPool } from '../../services/db/connection.db'; import { getPool } from '../../services/db/connection.db';
import { createAndLoginUser, getTestBaseUrl } from '../utils/testHelpers'; import { createAndLoginUser } from '../utils/testHelpers';
import { generateFileChecksum } from '../../utils/checksum'; import { generateFileChecksum } from '../../utils/checksum';
import * as db from '../../services/db/index.db'; import * as db from '../../services/db/index.db';
import { cleanupDb } from '../utils/cleanup'; import { cleanupDb } from '../utils/cleanup';
import { logger } from '../../services/logger.server'; import { logger } from '../../services/logger.server';
import * as imageProcessor from '../../utils/imageProcessor'; import * as imageProcessor from '../../utils/imageProcessor';
import { poll } from '../utils/poll'; import { poll } from '../utils/poll';
import type { import type { UserProfile, LeaderboardUser } from '../../types';
UserProfile,
UserAchievement,
LeaderboardUser,
Achievement,
ExtractedFlyerItem,
} from '../../types';
import type { Flyer } from '../../types'; import type { Flyer } from '../../types';
import { cleanupFiles } from '../utils/cleanupFiles'; import { cleanupFiles } from '../utils/cleanupFiles';
import { aiService } from '../../services/aiService.server'; import { aiService } from '../../services/aiService.server';
@@ -101,6 +95,20 @@ describe('Gamification Flow Integration Test', () => {
vi.unstubAllEnvs(); vi.unstubAllEnvs();
vi.restoreAllMocks(); // Restore the AI spy vi.restoreAllMocks(); // Restore the AI spy
// CRITICAL: Close workers FIRST before any cleanup to ensure no pending jobs
// are trying to access files or databases during cleanup.
// This prevents the Node.js async hooks crash that occurs when fs operations
// are rejected during process shutdown.
if (workersModule) {
await workersModule.closeWorkers();
// Give workers a moment to fully release resources
await new Promise((resolve) => setTimeout(resolve, 100));
}
// Close the shared redis connection used by the workers/queues
const { connection } = await import('../../services/redis.server');
await connection.quit();
await cleanupDb({ await cleanupDb({
userIds: testUser ? [testUser.user.user_id] : [], userIds: testUser ? [testUser.user.user_id] : [],
flyerIds: createdFlyerIds, flyerIds: createdFlyerIds,
@@ -108,146 +116,161 @@ describe('Gamification Flow Integration Test', () => {
}); });
await cleanupFiles(createdFilePaths); await cleanupFiles(createdFilePaths);
// Clean up workers and Redis connection to prevent tests from hanging. // Final delay to let any remaining async operations settle
if (workersModule) { await new Promise((resolve) => setTimeout(resolve, 50));
await workersModule.closeWorkers();
}
// Close the shared redis connection used by the workers/queues
const { connection } = await import('../../services/redis.server');
await connection.quit();
}); });
it( it('should award the "First Upload" achievement after a user successfully uploads and processes their first flyer', async () => {
'should award the "First Upload" achievement after a user successfully uploads and processes their first flyer', // --- Arrange: Prepare a unique flyer file for upload ---
async () => { const imagePath = path.resolve(__dirname, '../assets/test-flyer-image.jpg');
// --- Arrange: Prepare a unique flyer file for upload --- const imageBuffer = await fs.readFile(imagePath);
const imagePath = path.resolve(__dirname, '../assets/test-flyer-image.jpg'); const uniqueContent = Buffer.concat([imageBuffer, Buffer.from(Date.now().toString())]);
const imageBuffer = await fs.readFile(imagePath); const uniqueFileName = `gamification-test-flyer-${Date.now()}.jpg`;
const uniqueContent = Buffer.concat([imageBuffer, Buffer.from(Date.now().toString())]); const mockImageFile = new File([new Uint8Array(uniqueContent)], uniqueFileName, {
const uniqueFileName = `gamification-test-flyer-${Date.now()}.jpg`; type: 'image/jpeg',
const mockImageFile = new File([new Uint8Array(uniqueContent)], uniqueFileName, { type: 'image/jpeg' }); });
const checksum = await generateFileChecksum(mockImageFile); const checksum = await generateFileChecksum(mockImageFile);
// Track created files for cleanup // Track created files for cleanup
const uploadDir = path.resolve(__dirname, '../../../flyer-images'); const uploadDir = path.resolve(__dirname, '../../../flyer-images');
createdFilePaths.push(path.join(uploadDir, uniqueFileName)); createdFilePaths.push(path.join(uploadDir, uniqueFileName));
const iconFileName = `icon-${path.parse(uniqueFileName).name}.webp`; const iconFileName = `icon-${path.parse(uniqueFileName).name}.webp`;
createdFilePaths.push(path.join(uploadDir, 'icons', iconFileName)); createdFilePaths.push(path.join(uploadDir, 'icons', iconFileName));
// --- Act 1: Upload the flyer to trigger the background job --- // --- Act 1: Upload the flyer to trigger the background job ---
const testBaseUrl = 'https://example.com'; const testBaseUrl = 'https://example.com';
console.error('--------------------------------------------------------------------------------'); console.error(
console.error('[TEST DEBUG] STARTING UPLOAD STEP'); '--------------------------------------------------------------------------------',
console.error(`[TEST DEBUG] Env FRONTEND_URL: "${process.env.FRONTEND_URL}"`); );
console.error(`[TEST DEBUG] Sending baseUrl field: "${testBaseUrl}"`); console.error('[TEST DEBUG] STARTING UPLOAD STEP');
console.error('--------------------------------------------------------------------------------'); console.error(`[TEST DEBUG] Env FRONTEND_URL: "${process.env.FRONTEND_URL}"`);
console.error(`[TEST DEBUG] Sending baseUrl field: "${testBaseUrl}"`);
console.error(
'--------------------------------------------------------------------------------',
);
const uploadResponse = await request const uploadResponse = await request
.post('/api/ai/upload-and-process') .post('/api/ai/upload-and-process')
.set('Authorization', `Bearer ${authToken}`) .set('Authorization', `Bearer ${authToken}`)
.field('checksum', checksum) .field('checksum', checksum)
.field('baseUrl', testBaseUrl) .field('baseUrl', testBaseUrl)
.attach('flyerFile', uniqueContent, uniqueFileName); .attach('flyerFile', uniqueContent, uniqueFileName);
console.error('--------------------------------------------------------------------------------'); console.error(
console.error(`[TEST DEBUG] Upload Response Status: ${uploadResponse.status}`); '--------------------------------------------------------------------------------',
console.error(`[TEST DEBUG] Upload Response Body: ${JSON.stringify(uploadResponse.body, null, 2)}`); );
console.error('--------------------------------------------------------------------------------'); console.error(`[TEST DEBUG] Upload Response Status: ${uploadResponse.status}`);
console.error(
`[TEST DEBUG] Upload Response Body: ${JSON.stringify(uploadResponse.body, null, 2)}`,
);
console.error(
'--------------------------------------------------------------------------------',
);
const { jobId } = uploadResponse.body; const { jobId } = uploadResponse.body;
expect(jobId).toBeTypeOf('string'); expect(jobId).toBeTypeOf('string');
console.error(`[TEST DEBUG] Job ID received: ${jobId}`); console.error(`[TEST DEBUG] Job ID received: ${jobId}`);
// --- Act 2: Poll for job completion using the new utility --- // --- Act 2: Poll for job completion using the new utility ---
const jobStatus = await poll( const jobStatus = await poll(
async () => { async () => {
const statusResponse = await request const statusResponse = await request
.get(`/api/ai/jobs/${jobId}/status`) .get(`/api/ai/jobs/${jobId}/status`)
.set('Authorization', `Bearer ${authToken}`); .set('Authorization', `Bearer ${authToken}`);
console.error(`[TEST DEBUG] Polling status for ${jobId}: ${statusResponse.body?.state}`); console.error(`[TEST DEBUG] Polling status for ${jobId}: ${statusResponse.body?.state}`);
return statusResponse.body; return statusResponse.body;
}, },
(status) => status.state === 'completed' || status.state === 'failed', (status) => status.state === 'completed' || status.state === 'failed',
{ timeout: 180000, interval: 3000, description: 'gamification flyer processing' }, { timeout: 180000, interval: 3000, description: 'gamification flyer processing' },
); );
if (!jobStatus) { if (!jobStatus) {
console.error('[DEBUG] Gamification test job timed out: No job status received.'); console.error('[DEBUG] Gamification test job timed out: No job status received.');
throw new Error('Gamification test job timed out: No job status received.'); throw new Error('Gamification test job timed out: No job status received.');
} }
console.error('--------------------------------------------------------------------------------'); console.error(
console.error('[TEST DEBUG] Final Job Status Object:', JSON.stringify(jobStatus, null, 2)); '--------------------------------------------------------------------------------',
if (jobStatus.state === 'failed') { );
console.error(`[TEST DEBUG] Job Failed Reason: ${jobStatus.failedReason}`); console.error('[TEST DEBUG] Final Job Status Object:', JSON.stringify(jobStatus, null, 2));
// If there is a progress object with error details, log it if (jobStatus.state === 'failed') {
if (jobStatus.progress) { console.error(`[TEST DEBUG] Job Failed Reason: ${jobStatus.failedReason}`);
console.error(`[TEST DEBUG] Job Progress/Error Details:`, JSON.stringify(jobStatus.progress, null, 2)); // If there is a progress object with error details, log it
} if (jobStatus.progress) {
console.error(
`[TEST DEBUG] Job Progress/Error Details:`,
JSON.stringify(jobStatus.progress, null, 2),
);
} }
console.error('--------------------------------------------------------------------------------'); }
console.error(
'--------------------------------------------------------------------------------',
);
// --- Assert 1: Verify the job completed successfully --- // --- Assert 1: Verify the job completed successfully ---
if (jobStatus?.state === 'failed') { if (jobStatus?.state === 'failed') {
console.error('[DEBUG] Gamification test job failed:', jobStatus.failedReason); console.error('[DEBUG] Gamification test job failed:', jobStatus.failedReason);
console.error('[DEBUG] Job stack trace:', jobStatus.stacktrace); console.error('[DEBUG] Job stack trace:', jobStatus.stacktrace);
console.error('[DEBUG] Job return value:', JSON.stringify(jobStatus.returnValue, null, 2)); console.error('[DEBUG] Job return value:', JSON.stringify(jobStatus.returnValue, null, 2));
} }
expect(jobStatus?.state).toBe('completed'); expect(jobStatus?.state).toBe('completed');
const flyerId = jobStatus?.returnValue?.flyerId; const flyerId = jobStatus?.returnValue?.flyerId;
expect(flyerId).toBeTypeOf('number'); expect(flyerId).toBeTypeOf('number');
createdFlyerIds.push(flyerId); // Track for cleanup createdFlyerIds.push(flyerId); // Track for cleanup
// --- Assert 1.5: Verify the flyer was saved with the correct original filename --- // --- Assert 1.5: Verify the flyer was saved with the correct original filename ---
const savedFlyer = await db.flyerRepo.findFlyerByChecksum(checksum, logger); const savedFlyer = await db.flyerRepo.findFlyerByChecksum(checksum, logger);
expect(savedFlyer).toBeDefined(); expect(savedFlyer).toBeDefined();
expect(savedFlyer?.file_name).toBe(uniqueFileName); expect(savedFlyer?.file_name).toBe(uniqueFileName);
if (savedFlyer?.store_id) { if (savedFlyer?.store_id) {
createdStoreIds.push(savedFlyer.store_id); createdStoreIds.push(savedFlyer.store_id);
} }
// Also add the final processed image path to the cleanup list. // Also add the final processed image path to the cleanup list.
// This is important because JPEGs are re-processed to strip EXIF data, creating a new file. // This is important because JPEGs are re-processed to strip EXIF data, creating a new file.
const savedImagePath = path.join(uploadDir, path.basename(savedFlyer!.image_url)); const savedImagePath = path.join(uploadDir, path.basename(savedFlyer!.image_url));
createdFilePaths.push(savedImagePath); createdFilePaths.push(savedImagePath);
// --- Act 3: Fetch the user's achievements --- // --- Act 3: Fetch the user's achievements (triggers endpoint, response not needed) ---
const achievementsResponse = await request await request.get('/api/achievements/me').set('Authorization', `Bearer ${authToken}`);
.get('/api/achievements/me')
.set('Authorization', `Bearer ${authToken}`);
// --- Assert 2: Verify the "First-Upload" achievement was awarded --- // --- Assert 2: Verify the "First-Upload" achievement was awarded ---
// The 'user_registered' achievement is awarded on creation, so we expect at least two. // The 'user_registered' achievement is awarded on creation, so we expect at least two.
// Wait for the asynchronous achievement event to process // Wait for the asynchronous achievement event to process
await vi.waitUntil(async () => { await vi.waitUntil(
const achievements = await db.gamificationRepo.getUserAchievements(testUser.user.user_id, logger); async () => {
const achievements = await db.gamificationRepo.getUserAchievements(
testUser.user.user_id,
logger,
);
return achievements.length >= 2; return achievements.length >= 2;
}, { timeout: 5000, interval: 200 }); },
{ timeout: 5000, interval: 200 },
);
// Final assertion and retrieval // Final assertion and retrieval
const userAchievements = await db.gamificationRepo.getUserAchievements(testUser.user.user_id, logger); const userAchievements = await db.gamificationRepo.getUserAchievements(
expect(userAchievements.length).toBeGreaterThanOrEqual(2); testUser.user.user_id,
const firstUploadAchievement = userAchievements.find((ach) => ach.name === 'First-Upload'); logger,
expect(firstUploadAchievement).toBeDefined(); );
expect(firstUploadAchievement?.points_value).toBeGreaterThan(0); expect(userAchievements.length).toBeGreaterThanOrEqual(2);
const firstUploadAchievement = userAchievements.find((ach) => ach.name === 'First-Upload');
expect(firstUploadAchievement).toBeDefined();
expect(firstUploadAchievement?.points_value).toBeGreaterThan(0);
// --- Act 4: Fetch the leaderboard --- // --- Act 4: Fetch the leaderboard ---
const leaderboardResponse = await request.get('/api/achievements/leaderboard'); const leaderboardResponse = await request.get('/api/achievements/leaderboard');
const leaderboard: LeaderboardUser[] = leaderboardResponse.body; const leaderboard: LeaderboardUser[] = leaderboardResponse.body;
// --- Assert 3: Verify the user is on the leaderboard with points --- // --- Assert 3: Verify the user is on the leaderboard with points ---
const userOnLeaderboard = leaderboard.find((u) => u.user_id === testUser.user.user_id); const userOnLeaderboard = leaderboard.find((u) => u.user_id === testUser.user.user_id);
expect(userOnLeaderboard).toBeDefined(); expect(userOnLeaderboard).toBeDefined();
// The user should have points from 'user_registered' and 'First-Upload'. // The user should have points from 'user_registered' and 'First-Upload'.
// We check that the points are greater than or equal to the points from the upload achievement. // We check that the points are greater than or equal to the points from the upload achievement.
expect(Number(userOnLeaderboard?.points)).toBeGreaterThanOrEqual( expect(Number(userOnLeaderboard?.points)).toBeGreaterThanOrEqual(
firstUploadAchievement!.points_value, firstUploadAchievement!.points_value,
); );
}, 240000); // Increase timeout to 240s to match other long-running processing tests
},
240000, // Increase timeout to 240s to match other long-running processing tests
);
describe('Legacy Flyer Upload', () => { describe('Legacy Flyer Upload', () => {
it('should process a legacy upload and save fully qualified URLs to the database', async () => { it('should process a legacy upload and save fully qualified URLs to the database', async () => {
@@ -259,7 +282,9 @@ describe('Gamification Flow Integration Test', () => {
const imagePath = path.resolve(__dirname, '../assets/test-flyer-image.jpg'); const imagePath = path.resolve(__dirname, '../assets/test-flyer-image.jpg');
const imageBuffer = await fs.readFile(imagePath); const imageBuffer = await fs.readFile(imagePath);
const uniqueFileName = `legacy-upload-test-${Date.now()}.jpg`; const uniqueFileName = `legacy-upload-test-${Date.now()}.jpg`;
const mockImageFile = new File([new Uint8Array(imageBuffer)], uniqueFileName, { type: 'image/jpeg' }); const mockImageFile = new File([new Uint8Array(imageBuffer)], uniqueFileName, {
type: 'image/jpeg',
});
const checksum = await generateFileChecksum(mockImageFile); const checksum = await generateFileChecksum(mockImageFile);
// Track created files for cleanup. // Track created files for cleanup.