Compare commits
8 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
1814469eb4 | ||
| b777430ff7 | |||
|
|
23830c0d4e | ||
| ef42fee982 | |||
|
|
65cb54500c | ||
| 664ad291be | |||
|
|
ff912b9055 | ||
| ec32027bd4 |
@@ -56,7 +56,13 @@
|
|||||||
"mcp__memory__delete_entities",
|
"mcp__memory__delete_entities",
|
||||||
"mcp__sequential-thinking__sequentialthinking",
|
"mcp__sequential-thinking__sequentialthinking",
|
||||||
"mcp__filesystem__list_directory",
|
"mcp__filesystem__list_directory",
|
||||||
"mcp__filesystem__read_multiple_files"
|
"mcp__filesystem__read_multiple_files",
|
||||||
|
"mcp__filesystem__directory_tree",
|
||||||
|
"mcp__filesystem__read_text_file",
|
||||||
|
"Bash(wc:*)",
|
||||||
|
"Bash(npm install:*)",
|
||||||
|
"Bash(git grep:*)",
|
||||||
|
"Bash(findstr:*)"
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -117,7 +117,8 @@ jobs:
|
|||||||
DB_USER: ${{ secrets.DB_USER }}
|
DB_USER: ${{ secrets.DB_USER }}
|
||||||
DB_PASSWORD: ${{ secrets.DB_PASSWORD }}
|
DB_PASSWORD: ${{ secrets.DB_PASSWORD }}
|
||||||
DB_NAME: ${{ secrets.DB_DATABASE_PROD }}
|
DB_NAME: ${{ secrets.DB_DATABASE_PROD }}
|
||||||
REDIS_URL: 'redis://localhost:6379'
|
# Explicitly use database 0 for production (test uses database 1)
|
||||||
|
REDIS_URL: 'redis://localhost:6379/0'
|
||||||
REDIS_PASSWORD: ${{ secrets.REDIS_PASSWORD_PROD }}
|
REDIS_PASSWORD: ${{ secrets.REDIS_PASSWORD_PROD }}
|
||||||
FRONTEND_URL: 'https://flyer-crawler.projectium.com'
|
FRONTEND_URL: 'https://flyer-crawler.projectium.com'
|
||||||
JWT_SECRET: ${{ secrets.JWT_SECRET }}
|
JWT_SECRET: ${{ secrets.JWT_SECRET }}
|
||||||
|
|||||||
@@ -96,6 +96,24 @@ jobs:
|
|||||||
# It prevents the accumulation of duplicate processes from previous test runs.
|
# It prevents the accumulation of duplicate processes from previous test runs.
|
||||||
node -e "const exec = require('child_process').execSync; try { const list = JSON.parse(exec('pm2 jlist').toString()); list.forEach(p => { if (p.name && p.name.endsWith('-test')) { console.log('Deleting test process: ' + p.name + ' (' + p.pm2_env.pm_id + ')'); try { exec('pm2 delete ' + p.pm2_env.pm_id); } catch(e) { console.error('Failed to delete ' + p.pm2_env.pm_id, e.message); } } }); console.log('✅ Test process cleanup complete.'); } catch (e) { if (e.stdout.toString().includes('No process found')) { console.log('No PM2 processes running, cleanup not needed.'); } else { console.error('Error cleaning up test processes:', e.message); } }" || true
|
node -e "const exec = require('child_process').execSync; try { const list = JSON.parse(exec('pm2 jlist').toString()); list.forEach(p => { if (p.name && p.name.endsWith('-test')) { console.log('Deleting test process: ' + p.name + ' (' + p.pm2_env.pm_id + ')'); try { exec('pm2 delete ' + p.pm2_env.pm_id); } catch(e) { console.error('Failed to delete ' + p.pm2_env.pm_id, e.message); } } }); console.log('✅ Test process cleanup complete.'); } catch (e) { if (e.stdout.toString().includes('No process found')) { console.log('No PM2 processes running, cleanup not needed.'); } else { console.error('Error cleaning up test processes:', e.message); } }" || true
|
||||||
|
|
||||||
|
- name: Flush Redis Test Database Before Tests
|
||||||
|
# CRITICAL: Clear Redis database 1 (test database) to remove stale BullMQ jobs.
|
||||||
|
# This prevents old jobs with outdated error messages from polluting test results.
|
||||||
|
# NOTE: We use database 1 for tests to isolate from production (database 0).
|
||||||
|
env:
|
||||||
|
REDIS_PASSWORD: ${{ secrets.REDIS_PASSWORD_TEST }}
|
||||||
|
run: |
|
||||||
|
echo "--- Flushing Redis database 1 (test database) to remove stale jobs ---"
|
||||||
|
if [ -z "$REDIS_PASSWORD" ]; then
|
||||||
|
echo "⚠️ REDIS_PASSWORD_TEST not set, attempting flush without password..."
|
||||||
|
redis-cli -n 1 FLUSHDB || echo "Redis flush failed (no password)"
|
||||||
|
else
|
||||||
|
redis-cli -a "$REDIS_PASSWORD" -n 1 FLUSHDB 2>/dev/null && echo "✅ Redis database 1 (test) flushed successfully." || echo "⚠️ Redis flush failed"
|
||||||
|
fi
|
||||||
|
# Verify the flush worked by checking key count on database 1
|
||||||
|
KEY_COUNT=$(redis-cli -a "$REDIS_PASSWORD" -n 1 DBSIZE 2>/dev/null | grep -oE '[0-9]+' || echo "unknown")
|
||||||
|
echo "Redis database 1 key count after flush: $KEY_COUNT"
|
||||||
|
|
||||||
- name: Run All Tests and Generate Merged Coverage Report
|
- name: Run All Tests and Generate Merged Coverage Report
|
||||||
# This single step runs both unit and integration tests, then merges their
|
# This single step runs both unit and integration tests, then merges their
|
||||||
# coverage data into a single report. It combines the environment variables
|
# coverage data into a single report. It combines the environment variables
|
||||||
@@ -109,7 +127,9 @@ jobs:
|
|||||||
DB_NAME: 'flyer-crawler-test' # Explicitly set for tests
|
DB_NAME: 'flyer-crawler-test' # Explicitly set for tests
|
||||||
|
|
||||||
# --- Redis credentials for the test suite ---
|
# --- Redis credentials for the test suite ---
|
||||||
REDIS_URL: 'redis://localhost:6379'
|
# CRITICAL: Use Redis database 1 to isolate tests from production (which uses db 0).
|
||||||
|
# This prevents the production worker from picking up test jobs.
|
||||||
|
REDIS_URL: 'redis://localhost:6379/1'
|
||||||
REDIS_PASSWORD: ${{ secrets.REDIS_PASSWORD_TEST }}
|
REDIS_PASSWORD: ${{ secrets.REDIS_PASSWORD_TEST }}
|
||||||
|
|
||||||
# --- Integration test specific variables ---
|
# --- Integration test specific variables ---
|
||||||
@@ -384,8 +404,8 @@ jobs:
|
|||||||
DB_PASSWORD: ${{ secrets.DB_PASSWORD }}
|
DB_PASSWORD: ${{ secrets.DB_PASSWORD }}
|
||||||
DB_NAME: ${{ secrets.DB_DATABASE_TEST }}
|
DB_NAME: ${{ secrets.DB_DATABASE_TEST }}
|
||||||
|
|
||||||
# Redis Credentials
|
# Redis Credentials (use database 1 to isolate from production)
|
||||||
REDIS_URL: 'redis://localhost:6379'
|
REDIS_URL: 'redis://localhost:6379/1'
|
||||||
REDIS_PASSWORD: ${{ secrets.REDIS_PASSWORD_TEST }}
|
REDIS_PASSWORD: ${{ secrets.REDIS_PASSWORD_TEST }}
|
||||||
|
|
||||||
# Application Secrets
|
# Application Secrets
|
||||||
|
|||||||
@@ -116,7 +116,8 @@ jobs:
|
|||||||
DB_USER: ${{ secrets.DB_USER }}
|
DB_USER: ${{ secrets.DB_USER }}
|
||||||
DB_PASSWORD: ${{ secrets.DB_PASSWORD }}
|
DB_PASSWORD: ${{ secrets.DB_PASSWORD }}
|
||||||
DB_NAME: ${{ secrets.DB_DATABASE_PROD }}
|
DB_NAME: ${{ secrets.DB_DATABASE_PROD }}
|
||||||
REDIS_URL: 'redis://localhost:6379'
|
# Explicitly use database 0 for production (test uses database 1)
|
||||||
|
REDIS_URL: 'redis://localhost:6379/0'
|
||||||
REDIS_PASSWORD: ${{ secrets.REDIS_PASSWORD_PROD }}
|
REDIS_PASSWORD: ${{ secrets.REDIS_PASSWORD_PROD }}
|
||||||
FRONTEND_URL: 'https://flyer-crawler.projectium.com'
|
FRONTEND_URL: 'https://flyer-crawler.projectium.com'
|
||||||
JWT_SECRET: ${{ secrets.JWT_SECRET }}
|
JWT_SECRET: ${{ secrets.JWT_SECRET }}
|
||||||
|
|||||||
167
.gitea/workflows/manual-redis-flush-prod.yml
Normal file
167
.gitea/workflows/manual-redis-flush-prod.yml
Normal file
@@ -0,0 +1,167 @@
|
|||||||
|
# .gitea/workflows/manual-redis-flush-prod.yml
|
||||||
|
#
|
||||||
|
# DANGER: This workflow is DESTRUCTIVE and intended for manual execution only.
|
||||||
|
# It will completely FLUSH the PRODUCTION Redis database (db 0).
|
||||||
|
# This will clear all BullMQ queues, sessions, caches, and any other Redis data.
|
||||||
|
#
|
||||||
|
name: Manual - Flush Production Redis
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
confirmation:
|
||||||
|
description: 'DANGER: This will FLUSH production Redis. Type "flush-production-redis" to confirm.'
|
||||||
|
required: true
|
||||||
|
default: 'do-not-run'
|
||||||
|
flush_type:
|
||||||
|
description: 'What to flush?'
|
||||||
|
required: true
|
||||||
|
type: choice
|
||||||
|
options:
|
||||||
|
- 'queues-only'
|
||||||
|
- 'entire-database'
|
||||||
|
default: 'queues-only'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
flush-redis:
|
||||||
|
runs-on: projectium.com # This job runs on your self-hosted Gitea runner.
|
||||||
|
|
||||||
|
env:
|
||||||
|
REDIS_PASSWORD: ${{ secrets.REDIS_PASSWORD_PROD }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout Code
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
|
- name: Setup Node.js
|
||||||
|
uses: actions/setup-node@v3
|
||||||
|
with:
|
||||||
|
node-version: '20'
|
||||||
|
cache: 'npm'
|
||||||
|
cache-dependency-path: '**/package-lock.json'
|
||||||
|
|
||||||
|
- name: Install Dependencies
|
||||||
|
run: npm ci
|
||||||
|
|
||||||
|
- name: Validate Secrets
|
||||||
|
run: |
|
||||||
|
if [ -z "$REDIS_PASSWORD" ]; then
|
||||||
|
echo "ERROR: REDIS_PASSWORD_PROD secret is not set in Gitea repository settings."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
echo "✅ Redis password secret is present."
|
||||||
|
|
||||||
|
- name: Verify Confirmation Phrase
|
||||||
|
run: |
|
||||||
|
if [ "${{ gitea.event.inputs.confirmation }}" != "flush-production-redis" ]; then
|
||||||
|
echo "ERROR: Confirmation phrase did not match. Aborting Redis flush."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
echo "✅ Confirmation accepted. Proceeding with Redis flush."
|
||||||
|
|
||||||
|
- name: Show Current Redis State
|
||||||
|
run: |
|
||||||
|
echo "--- Current Redis Database 0 (Production) State ---"
|
||||||
|
redis-cli -a "$REDIS_PASSWORD" -n 0 INFO keyspace 2>/dev/null || echo "Could not get keyspace info"
|
||||||
|
echo ""
|
||||||
|
echo "--- Key Count ---"
|
||||||
|
KEY_COUNT=$(redis-cli -a "$REDIS_PASSWORD" -n 0 DBSIZE 2>/dev/null | grep -oE '[0-9]+' || echo "unknown")
|
||||||
|
echo "Production Redis (db 0) key count: $KEY_COUNT"
|
||||||
|
echo ""
|
||||||
|
echo "--- BullMQ Queue Keys ---"
|
||||||
|
redis-cli -a "$REDIS_PASSWORD" -n 0 KEYS "bull:*" 2>/dev/null | head -20 || echo "No BullMQ keys found"
|
||||||
|
|
||||||
|
- name: 🚨 FINAL WARNING & PAUSE 🚨
|
||||||
|
run: |
|
||||||
|
echo "*********************************************************************"
|
||||||
|
echo "WARNING: YOU ARE ABOUT TO FLUSH PRODUCTION REDIS DATA."
|
||||||
|
echo "Flush type: ${{ gitea.event.inputs.flush_type }}"
|
||||||
|
echo ""
|
||||||
|
if [ "${{ gitea.event.inputs.flush_type }}" = "entire-database" ]; then
|
||||||
|
echo "This will DELETE ALL Redis data including sessions, caches, and queues!"
|
||||||
|
else
|
||||||
|
echo "This will DELETE ALL BullMQ queue data (pending jobs, failed jobs, etc.)"
|
||||||
|
fi
|
||||||
|
echo ""
|
||||||
|
echo "This action is IRREVERSIBLE. Press Ctrl+C in the runner terminal NOW to cancel."
|
||||||
|
echo "Sleeping for 10 seconds..."
|
||||||
|
echo "*********************************************************************"
|
||||||
|
sleep 10
|
||||||
|
|
||||||
|
- name: Flush BullMQ Queues Only
|
||||||
|
if: ${{ gitea.event.inputs.flush_type == 'queues-only' }}
|
||||||
|
env:
|
||||||
|
REDIS_URL: 'redis://localhost:6379/0'
|
||||||
|
run: |
|
||||||
|
echo "--- Obliterating BullMQ queues using Node.js ---"
|
||||||
|
node -e "
|
||||||
|
const { Queue } = require('bullmq');
|
||||||
|
const IORedis = require('ioredis');
|
||||||
|
|
||||||
|
const connection = new IORedis(process.env.REDIS_URL, {
|
||||||
|
maxRetriesPerRequest: null,
|
||||||
|
password: process.env.REDIS_PASSWORD,
|
||||||
|
});
|
||||||
|
|
||||||
|
const queueNames = [
|
||||||
|
'flyer-processing',
|
||||||
|
'email-sending',
|
||||||
|
'analytics-reporting',
|
||||||
|
'weekly-analytics-reporting',
|
||||||
|
'file-cleanup',
|
||||||
|
'token-cleanup'
|
||||||
|
];
|
||||||
|
|
||||||
|
(async () => {
|
||||||
|
for (const name of queueNames) {
|
||||||
|
try {
|
||||||
|
const queue = new Queue(name, { connection });
|
||||||
|
const counts = await queue.getJobCounts();
|
||||||
|
console.log('Queue \"' + name + '\" before obliterate:', JSON.stringify(counts));
|
||||||
|
await queue.obliterate({ force: true });
|
||||||
|
console.log('✅ Obliterated queue: ' + name);
|
||||||
|
await queue.close();
|
||||||
|
} catch (err) {
|
||||||
|
console.error('⚠️ Failed to obliterate queue ' + name + ':', err.message);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
await connection.quit();
|
||||||
|
console.log('✅ All BullMQ queues obliterated.');
|
||||||
|
})();
|
||||||
|
"
|
||||||
|
|
||||||
|
- name: Flush Entire Redis Database
|
||||||
|
if: ${{ gitea.event.inputs.flush_type == 'entire-database' }}
|
||||||
|
run: |
|
||||||
|
echo "--- Flushing entire Redis database 0 (production) ---"
|
||||||
|
redis-cli -a "$REDIS_PASSWORD" -n 0 FLUSHDB 2>/dev/null && echo "✅ Redis database 0 flushed successfully." || echo "❌ Redis flush failed"
|
||||||
|
|
||||||
|
- name: Verify Flush Results
|
||||||
|
run: |
|
||||||
|
echo "--- Redis Database 0 (Production) State After Flush ---"
|
||||||
|
KEY_COUNT=$(redis-cli -a "$REDIS_PASSWORD" -n 0 DBSIZE 2>/dev/null | grep -oE '[0-9]+' || echo "unknown")
|
||||||
|
echo "Production Redis (db 0) key count after flush: $KEY_COUNT"
|
||||||
|
echo ""
|
||||||
|
echo "--- Remaining BullMQ Queue Keys ---"
|
||||||
|
BULL_KEYS=$(redis-cli -a "$REDIS_PASSWORD" -n 0 KEYS "bull:*" 2>/dev/null | wc -l || echo "0")
|
||||||
|
echo "BullMQ key count: $BULL_KEYS"
|
||||||
|
|
||||||
|
if [ "${{ gitea.event.inputs.flush_type }}" = "queues-only" ] && [ "$BULL_KEYS" -gt 0 ]; then
|
||||||
|
echo "⚠️ Warning: Some BullMQ keys may still exist. This can happen if new jobs were added during the flush."
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Summary
|
||||||
|
run: |
|
||||||
|
echo ""
|
||||||
|
echo "=========================================="
|
||||||
|
echo "PRODUCTION REDIS FLUSH COMPLETE"
|
||||||
|
echo "=========================================="
|
||||||
|
echo "Flush type: ${{ gitea.event.inputs.flush_type }}"
|
||||||
|
echo "Timestamp: $(date -u '+%Y-%m-%d %H:%M:%S UTC')"
|
||||||
|
echo ""
|
||||||
|
echo "NOTE: If you flushed queues, any pending jobs (flyer processing,"
|
||||||
|
echo "emails, analytics, etc.) have been permanently deleted."
|
||||||
|
echo ""
|
||||||
|
echo "The production workers will automatically start processing"
|
||||||
|
echo "new jobs as they are added to the queues."
|
||||||
|
echo "=========================================="
|
||||||
@@ -2,7 +2,7 @@
|
|||||||
|
|
||||||
**Date**: 2025-12-12
|
**Date**: 2025-12-12
|
||||||
|
|
||||||
**Status**: Proposed
|
**Status**: Accepted
|
||||||
|
|
||||||
## Context
|
## Context
|
||||||
|
|
||||||
@@ -16,3 +16,82 @@ We will implement a dedicated background job processing system using a task queu
|
|||||||
|
|
||||||
**Positive**: Decouples the API from heavy processing, allows for retries on failure, and enables scaling the processing workers independently. Increases application reliability and resilience.
|
**Positive**: Decouples the API from heavy processing, allows for retries on failure, and enables scaling the processing workers independently. Increases application reliability and resilience.
|
||||||
**Negative**: Introduces a new dependency (Redis) into the infrastructure. Requires refactoring of the flyer processing logic to work within a job queue structure.
|
**Negative**: Introduces a new dependency (Redis) into the infrastructure. Requires refactoring of the flyer processing logic to work within a job queue structure.
|
||||||
|
|
||||||
|
## Implementation Details
|
||||||
|
|
||||||
|
### Queue Infrastructure
|
||||||
|
|
||||||
|
The implementation uses **BullMQ v5.65.1** with **ioredis v5.8.2** for Redis connectivity. Six distinct queues handle different job types:
|
||||||
|
|
||||||
|
| Queue Name | Purpose | Retry Attempts | Backoff Strategy |
|
||||||
|
| ---------------------------- | --------------------------- | -------------- | ---------------------- |
|
||||||
|
| `flyer-processing` | OCR/AI processing of flyers | 3 | Exponential (5s base) |
|
||||||
|
| `email-sending` | Email delivery | 5 | Exponential (10s base) |
|
||||||
|
| `analytics-reporting` | Daily report generation | 2 | Exponential (60s base) |
|
||||||
|
| `weekly-analytics-reporting` | Weekly report generation | 2 | Exponential (1h base) |
|
||||||
|
| `file-cleanup` | Temporary file cleanup | 3 | Exponential (30s base) |
|
||||||
|
| `token-cleanup` | Expired token removal | 2 | Exponential (1h base) |
|
||||||
|
|
||||||
|
### Key Files
|
||||||
|
|
||||||
|
- `src/services/queues.server.ts` - Queue definitions and configuration
|
||||||
|
- `src/services/workers.server.ts` - Worker implementations with configurable concurrency
|
||||||
|
- `src/services/redis.server.ts` - Redis connection management
|
||||||
|
- `src/services/queueService.server.ts` - Queue lifecycle and graceful shutdown
|
||||||
|
- `src/services/flyerProcessingService.server.ts` - 5-stage flyer processing pipeline
|
||||||
|
- `src/types/job-data.ts` - TypeScript interfaces for all job data types
|
||||||
|
|
||||||
|
### API Design
|
||||||
|
|
||||||
|
Endpoints for long-running tasks return **202 Accepted** immediately with a job ID:
|
||||||
|
|
||||||
|
```text
|
||||||
|
POST /api/ai/upload-and-process → 202 { jobId: "..." }
|
||||||
|
GET /api/ai/jobs/:jobId/status → { state: "...", progress: ... }
|
||||||
|
```
|
||||||
|
|
||||||
|
### Worker Configuration
|
||||||
|
|
||||||
|
Workers are configured via environment variables:
|
||||||
|
|
||||||
|
- `WORKER_CONCURRENCY` - Flyer processing parallelism (default: 1)
|
||||||
|
- `EMAIL_WORKER_CONCURRENCY` - Email worker parallelism (default: 10)
|
||||||
|
- `ANALYTICS_WORKER_CONCURRENCY` - Analytics worker parallelism (default: 1)
|
||||||
|
- `CLEANUP_WORKER_CONCURRENCY` - Cleanup worker parallelism (default: 10)
|
||||||
|
|
||||||
|
### Monitoring
|
||||||
|
|
||||||
|
- **Bull Board UI** available at `/api/admin/jobs` for admin users
|
||||||
|
- Worker status endpoint: `GET /api/admin/workers/status`
|
||||||
|
- Queue status endpoint: `GET /api/admin/queues/status`
|
||||||
|
|
||||||
|
### Graceful Shutdown
|
||||||
|
|
||||||
|
Both API and worker processes implement graceful shutdown with a 30-second timeout, ensuring in-flight jobs complete before process termination.
|
||||||
|
|
||||||
|
## Compliance Notes
|
||||||
|
|
||||||
|
### Deprecated Synchronous Endpoints
|
||||||
|
|
||||||
|
The following endpoints process flyers synchronously and are **deprecated**:
|
||||||
|
|
||||||
|
- `POST /api/ai/upload-legacy` - For integration testing only
|
||||||
|
- `POST /api/ai/flyers/process` - Legacy workflow, should migrate to queue-based approach
|
||||||
|
|
||||||
|
New integrations MUST use `POST /api/ai/upload-and-process` for queue-based processing.
|
||||||
|
|
||||||
|
### Email Handling
|
||||||
|
|
||||||
|
- **Bulk emails** (deal notifications): Enqueued via `emailQueue`
|
||||||
|
- **Transactional emails** (password reset): Sent synchronously for immediate user feedback
|
||||||
|
|
||||||
|
## Future Enhancements
|
||||||
|
|
||||||
|
Potential improvements for consideration:
|
||||||
|
|
||||||
|
1. **Dead Letter Queue (DLQ)**: Move permanently failed jobs to a dedicated queue for analysis
|
||||||
|
2. **Job Priority Levels**: Allow priority-based processing for different job types
|
||||||
|
3. **Real-time Progress**: WebSocket/SSE for live job progress updates to clients
|
||||||
|
4. **Per-Queue Rate Limiting**: Throttle job processing based on external API limits
|
||||||
|
5. **Job Dependencies**: Support for jobs that depend on completion of other jobs
|
||||||
|
6. **Prometheus Metrics**: Export queue metrics for observability dashboards
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
|
|
||||||
**Date**: 2025-12-12
|
**Date**: 2025-12-12
|
||||||
|
|
||||||
**Status**: Proposed
|
**Status**: Accepted
|
||||||
|
|
||||||
## Context
|
## Context
|
||||||
|
|
||||||
@@ -20,3 +20,107 @@ We will implement a multi-layered caching strategy using an in-memory data store
|
|||||||
|
|
||||||
**Positive**: Directly addresses application performance and scalability. Reduces database load and improves API response times for common requests.
|
**Positive**: Directly addresses application performance and scalability. Reduces database load and improves API response times for common requests.
|
||||||
**Negative**: Introduces Redis as a dependency if not already used. Adds complexity to the data-fetching logic and requires careful management of cache invalidation to prevent stale data.
|
**Negative**: Introduces Redis as a dependency if not already used. Adds complexity to the data-fetching logic and requires careful management of cache invalidation to prevent stale data.
|
||||||
|
|
||||||
|
## Implementation Details
|
||||||
|
|
||||||
|
### Cache Service
|
||||||
|
|
||||||
|
A centralized cache service (`src/services/cacheService.server.ts`) provides reusable caching functionality:
|
||||||
|
|
||||||
|
- **`getOrSet<T>(key, fetcher, options)`**: Cache-aside pattern implementation
|
||||||
|
- **`get<T>(key)`**: Retrieve cached value
|
||||||
|
- **`set<T>(key, value, ttl)`**: Store value with TTL
|
||||||
|
- **`del(key)`**: Delete specific key
|
||||||
|
- **`invalidatePattern(pattern)`**: Delete keys matching a pattern
|
||||||
|
|
||||||
|
All cache operations are fail-safe - cache failures do not break the application.
|
||||||
|
|
||||||
|
### TTL Configuration
|
||||||
|
|
||||||
|
Different data types use different TTL values based on volatility:
|
||||||
|
|
||||||
|
| Data Type | TTL | Rationale |
|
||||||
|
| ------------------- | --------- | -------------------------------------- |
|
||||||
|
| Brands/Stores | 1 hour | Rarely changes, safe to cache longer |
|
||||||
|
| Flyer lists | 5 minutes | Changes when new flyers are added |
|
||||||
|
| Individual flyers | 10 minutes| Stable once created |
|
||||||
|
| Flyer items | 10 minutes| Stable once created |
|
||||||
|
| Statistics | 5 minutes | Can be slightly stale |
|
||||||
|
| Frequent sales | 15 minutes| Aggregated data, updated periodically |
|
||||||
|
| Categories | 1 hour | Rarely changes |
|
||||||
|
|
||||||
|
### Cache Key Strategy
|
||||||
|
|
||||||
|
Cache keys follow a consistent prefix pattern for pattern-based invalidation:
|
||||||
|
|
||||||
|
- `cache:brands` - All brands list
|
||||||
|
- `cache:flyers:{limit}:{offset}` - Paginated flyer lists
|
||||||
|
- `cache:flyer:{id}` - Individual flyer data
|
||||||
|
- `cache:flyer-items:{flyerId}` - Items for a specific flyer
|
||||||
|
- `cache:stats:*` - Statistics data
|
||||||
|
- `geocode:{address}` - Geocoding results (30-day TTL)
|
||||||
|
|
||||||
|
### Cached Endpoints
|
||||||
|
|
||||||
|
The following repository methods implement server-side caching:
|
||||||
|
|
||||||
|
| Method | Cache Key Pattern | TTL |
|
||||||
|
| ------ | ----------------- | --- |
|
||||||
|
| `FlyerRepository.getAllBrands()` | `cache:brands` | 1 hour |
|
||||||
|
| `FlyerRepository.getFlyers()` | `cache:flyers:{limit}:{offset}` | 5 minutes |
|
||||||
|
| `FlyerRepository.getFlyerItems()` | `cache:flyer-items:{flyerId}` | 10 minutes |
|
||||||
|
|
||||||
|
### Cache Invalidation
|
||||||
|
|
||||||
|
**Event-based invalidation** is triggered on write operations:
|
||||||
|
|
||||||
|
- **Flyer creation** (`FlyerPersistenceService.saveFlyer`): Invalidates all `cache:flyers*` keys
|
||||||
|
- **Flyer deletion** (`FlyerRepository.deleteFlyer`): Invalidates specific flyer and flyer items cache, plus flyer lists
|
||||||
|
|
||||||
|
**Manual invalidation** via admin endpoints:
|
||||||
|
|
||||||
|
- `POST /api/admin/system/clear-cache` - Clears all application cache (flyers, brands, stats)
|
||||||
|
- `POST /api/admin/system/clear-geocode-cache` - Clears geocoding cache
|
||||||
|
|
||||||
|
### Client-Side Caching
|
||||||
|
|
||||||
|
TanStack React Query provides client-side caching with configurable stale times:
|
||||||
|
|
||||||
|
| Query Type | Stale Time |
|
||||||
|
| ----------------- | ----------- |
|
||||||
|
| Categories | 1 hour |
|
||||||
|
| Master Items | 10 minutes |
|
||||||
|
| Flyer Items | 5 minutes |
|
||||||
|
| Flyers | 2 minutes |
|
||||||
|
| Shopping Lists | 1 minute |
|
||||||
|
| Activity Log | 30 seconds |
|
||||||
|
|
||||||
|
### Multi-Layer Cache Architecture
|
||||||
|
|
||||||
|
```text
|
||||||
|
Client Request
|
||||||
|
↓
|
||||||
|
[TanStack React Query] ← Client-side cache (staleTime-based)
|
||||||
|
↓
|
||||||
|
[Express API]
|
||||||
|
↓
|
||||||
|
[CacheService.getOrSet()] ← Server-side Redis cache (TTL-based)
|
||||||
|
↓
|
||||||
|
[PostgreSQL Database]
|
||||||
|
```
|
||||||
|
|
||||||
|
## Key Files
|
||||||
|
|
||||||
|
- `src/services/cacheService.server.ts` - Centralized cache service
|
||||||
|
- `src/services/db/flyer.db.ts` - Repository with caching for brands, flyers, flyer items
|
||||||
|
- `src/services/flyerPersistenceService.server.ts` - Cache invalidation on flyer creation
|
||||||
|
- `src/routes/admin.routes.ts` - Admin cache management endpoints
|
||||||
|
- `src/config/queryClient.ts` - Client-side query cache configuration
|
||||||
|
|
||||||
|
## Future Enhancements
|
||||||
|
|
||||||
|
1. **Recipe caching**: Add caching to expensive recipe queries (by-sale-percentage, etc.)
|
||||||
|
2. **Cache warming**: Pre-populate cache on startup for frequently accessed static data
|
||||||
|
3. **Cache metrics**: Add hit/miss rate monitoring for observability
|
||||||
|
4. **Conditional caching**: Skip cache for authenticated user-specific data
|
||||||
|
5. **Cache compression**: Compress large cached payloads to reduce Redis memory usage
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
|
|
||||||
**Date**: 2025-12-12
|
**Date**: 2025-12-12
|
||||||
|
|
||||||
**Status**: Proposed
|
**Status**: Accepted
|
||||||
|
|
||||||
## Context
|
## Context
|
||||||
|
|
||||||
@@ -14,9 +14,305 @@ We will formalize the testing pyramid for the project, defining the role of each
|
|||||||
|
|
||||||
1. **Unit Tests (Vitest)**: For isolated functions, components, and repository methods with mocked dependencies. High coverage is expected.
|
1. **Unit Tests (Vitest)**: For isolated functions, components, and repository methods with mocked dependencies. High coverage is expected.
|
||||||
2. **Integration Tests (Supertest)**: For API routes, testing the interaction between controllers, services, and mocked database layers. Focus on contract and middleware correctness.
|
2. **Integration Tests (Supertest)**: For API routes, testing the interaction between controllers, services, and mocked database layers. Focus on contract and middleware correctness.
|
||||||
3. **End-to-End (E2E) Tests (Playwright/Cypress)**: For critical user flows (e.g., login, flyer upload, checkout), running against a real browser and a test database to ensure the entire system works together.
|
3. **End-to-End (E2E) Tests (Vitest + Supertest)**: For critical user flows (e.g., login, flyer upload, checkout), running against a real test server and database to ensure the entire system works together.
|
||||||
|
|
||||||
## Consequences
|
## Consequences
|
||||||
|
|
||||||
**Positive**: Ensures a consistent and comprehensive approach to quality assurance. Gives developers confidence when refactoring or adding new features. Clearly defines "done" for a new feature.
|
**Positive**: Ensures a consistent and comprehensive approach to quality assurance. Gives developers confidence when refactoring or adding new features. Clearly defines "done" for a new feature.
|
||||||
**Negative**: May require investment in setting up and maintaining the E2E testing environment. Can slightly increase the time required to develop a feature if all test layers are required.
|
**Negative**: May require investment in setting up and maintaining the E2E testing environment. Can slightly increase the time required to develop a feature if all test layers are required.
|
||||||
|
|
||||||
|
## Implementation Details
|
||||||
|
|
||||||
|
### Testing Framework Stack
|
||||||
|
|
||||||
|
| Tool | Version | Purpose |
|
||||||
|
| ---- | ------- | ------- |
|
||||||
|
| Vitest | 4.0.15 | Test runner for all test types |
|
||||||
|
| @testing-library/react | 16.3.0 | React component testing |
|
||||||
|
| @testing-library/jest-dom | 6.9.1 | DOM assertion matchers |
|
||||||
|
| supertest | 7.1.4 | HTTP assertion library for API testing |
|
||||||
|
| msw | 2.12.3 | Mock Service Worker for network mocking |
|
||||||
|
| testcontainers | 11.8.1 | Database containerization (optional) |
|
||||||
|
| c8 + nyc | 10.1.3 / 17.1.0 | Coverage reporting |
|
||||||
|
|
||||||
|
### Test File Organization
|
||||||
|
|
||||||
|
```text
|
||||||
|
src/
|
||||||
|
├── components/
|
||||||
|
│ └── *.test.tsx # Component unit tests (colocated)
|
||||||
|
├── hooks/
|
||||||
|
│ └── *.test.ts # Hook unit tests (colocated)
|
||||||
|
├── services/
|
||||||
|
│ └── *.test.ts # Service unit tests (colocated)
|
||||||
|
├── routes/
|
||||||
|
│ └── *.test.ts # Route handler unit tests (colocated)
|
||||||
|
├── utils/
|
||||||
|
│ └── *.test.ts # Utility function tests (colocated)
|
||||||
|
└── tests/
|
||||||
|
├── setup/ # Test configuration and setup files
|
||||||
|
├── utils/ # Test utilities, factories, helpers
|
||||||
|
├── assets/ # Test fixtures (images, files)
|
||||||
|
├── integration/ # Integration test files (*.test.ts)
|
||||||
|
└── e2e/ # End-to-end test files (*.e2e.test.ts)
|
||||||
|
```
|
||||||
|
|
||||||
|
**Naming Convention**: `{filename}.test.ts` or `{filename}.test.tsx` for unit/integration, `{filename}.e2e.test.ts` for E2E.
|
||||||
|
|
||||||
|
### Configuration Files
|
||||||
|
|
||||||
|
| Config | Environment | Purpose |
|
||||||
|
| ------ | ----------- | ------- |
|
||||||
|
| `vite.config.ts` | jsdom | Unit tests (React components, hooks) |
|
||||||
|
| `vitest.config.integration.ts` | node | Integration tests (API routes) |
|
||||||
|
| `vitest.config.e2e.ts` | node | E2E tests (full user flows) |
|
||||||
|
| `vitest.workspace.ts` | - | Orchestrates all test projects |
|
||||||
|
|
||||||
|
### Test Pyramid
|
||||||
|
|
||||||
|
```text
|
||||||
|
┌─────────────┐
|
||||||
|
│ E2E │ 5 test files
|
||||||
|
│ Tests │ Critical user flows
|
||||||
|
├─────────────┤
|
||||||
|
│ Integration │ 17 test files
|
||||||
|
│ Tests │ API contracts + middleware
|
||||||
|
┌───┴─────────────┴───┐
|
||||||
|
│ Unit Tests │ 185 test files
|
||||||
|
│ Components, Hooks, │ Isolated functions
|
||||||
|
│ Services, Utils │ Mocked dependencies
|
||||||
|
└─────────────────────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
### Unit Tests
|
||||||
|
|
||||||
|
**Purpose**: Test isolated functions, components, and modules with mocked dependencies.
|
||||||
|
|
||||||
|
**Environment**: jsdom (browser-like)
|
||||||
|
|
||||||
|
**Key Patterns**:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// Component testing with providers
|
||||||
|
import { renderWithProviders, screen } from '@/tests/utils/renderWithProviders';
|
||||||
|
|
||||||
|
describe('MyComponent', () => {
|
||||||
|
it('renders correctly', () => {
|
||||||
|
renderWithProviders(<MyComponent />);
|
||||||
|
expect(screen.getByText('Hello')).toBeInTheDocument();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// Hook testing
|
||||||
|
import { renderHook, waitFor } from '@testing-library/react';
|
||||||
|
import { useMyHook } from './useMyHook';
|
||||||
|
|
||||||
|
describe('useMyHook', () => {
|
||||||
|
it('returns expected value', async () => {
|
||||||
|
const { result } = renderHook(() => useMyHook());
|
||||||
|
await waitFor(() => expect(result.current.data).toBeDefined());
|
||||||
|
});
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
**Global Mocks** (automatically applied via `tests-setup-unit.ts`):
|
||||||
|
|
||||||
|
- Database connections (`pg.Pool`)
|
||||||
|
- AI services (`@google/genai`)
|
||||||
|
- Authentication (`jsonwebtoken`, `bcrypt`)
|
||||||
|
- Logging (`logger.server`, `logger.client`)
|
||||||
|
- Notifications (`notificationService`)
|
||||||
|
|
||||||
|
### Integration Tests
|
||||||
|
|
||||||
|
**Purpose**: Test API routes with real service interactions and database.
|
||||||
|
|
||||||
|
**Environment**: node
|
||||||
|
|
||||||
|
**Setup**: Real Express server on port 3001, real PostgreSQL database
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// API route testing pattern
|
||||||
|
import supertest from 'supertest';
|
||||||
|
import { createAndLoginUser } from '@/tests/utils/testHelpers';
|
||||||
|
|
||||||
|
describe('Auth API', () => {
|
||||||
|
let request: ReturnType<typeof supertest>;
|
||||||
|
let authToken: string;
|
||||||
|
|
||||||
|
beforeAll(async () => {
|
||||||
|
const app = (await import('../../../server')).default;
|
||||||
|
request = supertest(app);
|
||||||
|
const { token } = await createAndLoginUser(request);
|
||||||
|
authToken = token;
|
||||||
|
});
|
||||||
|
|
||||||
|
it('GET /api/auth/me returns user profile', async () => {
|
||||||
|
const response = await request
|
||||||
|
.get('/api/auth/me')
|
||||||
|
.set('Authorization', `Bearer ${authToken}`);
|
||||||
|
|
||||||
|
expect(response.status).toBe(200);
|
||||||
|
expect(response.body.user.email).toBeDefined();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
**Database Cleanup**:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { cleanupDb } from '@/tests/utils/cleanup';
|
||||||
|
|
||||||
|
afterAll(async () => {
|
||||||
|
await cleanupDb({ users: [testUserId] });
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
### E2E Tests
|
||||||
|
|
||||||
|
**Purpose**: Test complete user journeys through the application.
|
||||||
|
|
||||||
|
**Timeout**: 120 seconds (for long-running flows)
|
||||||
|
|
||||||
|
**Current E2E Tests**:
|
||||||
|
|
||||||
|
- `auth.e2e.test.ts` - Registration, login, password reset
|
||||||
|
- `flyer-upload.e2e.test.ts` - Complete flyer upload pipeline
|
||||||
|
- `user-journey.e2e.test.ts` - Full user workflow
|
||||||
|
- `admin-authorization.e2e.test.ts` - Admin-specific flows
|
||||||
|
- `admin-dashboard.e2e.test.ts` - Admin dashboard functionality
|
||||||
|
|
||||||
|
### Mock Factories
|
||||||
|
|
||||||
|
The project uses comprehensive mock factories (`src/tests/utils/mockFactories.ts`, 1553 lines) for creating test data:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import {
|
||||||
|
createMockUser,
|
||||||
|
createMockFlyer,
|
||||||
|
createMockFlyerItem,
|
||||||
|
createMockRecipe,
|
||||||
|
resetMockIds,
|
||||||
|
} from '@/tests/utils/mockFactories';
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
resetMockIds(); // Ensure deterministic IDs
|
||||||
|
});
|
||||||
|
|
||||||
|
it('creates flyer with items', () => {
|
||||||
|
const flyer = createMockFlyer({ store_name: 'TestMart' });
|
||||||
|
const items = [createMockFlyerItem({ flyer_id: flyer.flyer_id })];
|
||||||
|
// ...
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
**Factory Coverage**: 90+ factory functions for all domain entities including users, flyers, recipes, shopping lists, budgets, achievements, etc.
|
||||||
|
|
||||||
|
### Test Utilities
|
||||||
|
|
||||||
|
| Utility | Purpose |
|
||||||
|
| ------- | ------- |
|
||||||
|
| `renderWithProviders()` | Wrap components with AppProviders + Router |
|
||||||
|
| `createAndLoginUser()` | Create user and return auth token |
|
||||||
|
| `cleanupDb()` | Database cleanup respecting FK constraints |
|
||||||
|
| `createTestApp()` | Create Express app for route testing |
|
||||||
|
| `poll()` | Polling utility for async operations |
|
||||||
|
|
||||||
|
### Coverage Configuration
|
||||||
|
|
||||||
|
**Coverage Provider**: v8 (built-in Vitest)
|
||||||
|
|
||||||
|
**Report Directories**:
|
||||||
|
|
||||||
|
- `.coverage/unit/` - Unit test coverage
|
||||||
|
- `.coverage/integration/` - Integration test coverage
|
||||||
|
- `.coverage/e2e/` - E2E test coverage
|
||||||
|
|
||||||
|
**Excluded from Coverage**:
|
||||||
|
|
||||||
|
- `src/index.tsx`, `src/main.tsx` (entry points)
|
||||||
|
- `src/tests/**` (test files themselves)
|
||||||
|
- `src/**/*.d.ts` (type declarations)
|
||||||
|
- `src/components/icons/**` (icon components)
|
||||||
|
- `src/db/seed*.ts` (database seeding scripts)
|
||||||
|
|
||||||
|
### npm Scripts
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Run all tests
|
||||||
|
npm run test
|
||||||
|
|
||||||
|
# Run by level
|
||||||
|
npm run test:unit # Unit tests only (jsdom)
|
||||||
|
npm run test:integration # Integration tests only (node)
|
||||||
|
|
||||||
|
# With coverage
|
||||||
|
npm run test:coverage # Unit + Integration with reports
|
||||||
|
|
||||||
|
# Clean coverage directories
|
||||||
|
npm run clean
|
||||||
|
```
|
||||||
|
|
||||||
|
### Test Timeouts
|
||||||
|
|
||||||
|
| Test Type | Timeout | Rationale |
|
||||||
|
| --------- | ------- | --------- |
|
||||||
|
| Unit | 5 seconds | Fast, isolated tests |
|
||||||
|
| Integration | 60 seconds | AI service calls, DB operations |
|
||||||
|
| E2E | 120 seconds | Full user flow with multiple API calls |
|
||||||
|
|
||||||
|
## Best Practices
|
||||||
|
|
||||||
|
### When to Write Each Test Type
|
||||||
|
|
||||||
|
1. **Unit Tests** (required):
|
||||||
|
- Pure functions and utilities
|
||||||
|
- React components (rendering, user interactions)
|
||||||
|
- Custom hooks
|
||||||
|
- Service methods with mocked dependencies
|
||||||
|
- Repository methods
|
||||||
|
|
||||||
|
2. **Integration Tests** (required for API changes):
|
||||||
|
- New API endpoints
|
||||||
|
- Authentication/authorization flows
|
||||||
|
- Middleware behavior
|
||||||
|
- Database query correctness
|
||||||
|
|
||||||
|
3. **E2E Tests** (for critical paths):
|
||||||
|
- User registration and login
|
||||||
|
- Core business flows (flyer upload, shopping lists)
|
||||||
|
- Admin operations
|
||||||
|
|
||||||
|
### Test Isolation Guidelines
|
||||||
|
|
||||||
|
1. **Reset mock IDs**: Call `resetMockIds()` in `beforeEach()`
|
||||||
|
2. **Unique test data**: Use timestamps or UUIDs for emails/usernames
|
||||||
|
3. **Clean up after tests**: Use `cleanupDb()` in `afterAll()`
|
||||||
|
4. **Don't share state**: Each test should be independent
|
||||||
|
|
||||||
|
### Mocking Guidelines
|
||||||
|
|
||||||
|
1. **Unit tests**: Mock external dependencies (DB, APIs, services)
|
||||||
|
2. **Integration tests**: Mock only external APIs (AI services)
|
||||||
|
3. **E2E tests**: Minimal mocking, use real services where possible
|
||||||
|
|
||||||
|
## Key Files
|
||||||
|
|
||||||
|
- `vite.config.ts` - Unit test configuration
|
||||||
|
- `vitest.config.integration.ts` - Integration test configuration
|
||||||
|
- `vitest.config.e2e.ts` - E2E test configuration
|
||||||
|
- `vitest.workspace.ts` - Workspace orchestration
|
||||||
|
- `src/tests/setup/tests-setup-unit.ts` - Global mocks (488 lines)
|
||||||
|
- `src/tests/setup/integration-global-setup.ts` - Server + DB setup
|
||||||
|
- `src/tests/utils/mockFactories.ts` - Mock factories (1553 lines)
|
||||||
|
- `src/tests/utils/testHelpers.ts` - Test utilities
|
||||||
|
|
||||||
|
## Future Enhancements
|
||||||
|
|
||||||
|
1. **Browser E2E Tests**: Consider adding Playwright for actual browser testing
|
||||||
|
2. **Visual Regression**: Screenshot comparison for UI components
|
||||||
|
3. **Performance Testing**: Add benchmarks for critical paths
|
||||||
|
4. **Mutation Testing**: Verify test quality with mutation testing tools
|
||||||
|
5. **Coverage Thresholds**: Define minimum coverage requirements per module
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
|
|
||||||
**Date**: 2025-12-12
|
**Date**: 2025-12-12
|
||||||
|
|
||||||
**Status**: Proposed
|
**Status**: Partially Implemented
|
||||||
|
|
||||||
## Context
|
## Context
|
||||||
|
|
||||||
@@ -16,3 +16,255 @@ We will establish a formal Design System and Component Library. This will involv
|
|||||||
|
|
||||||
- **Positive**: Ensures a consistent and high-quality user interface. Accelerates frontend development by providing reusable, well-documented components. Improves maintainability and reduces technical debt.
|
- **Positive**: Ensures a consistent and high-quality user interface. Accelerates frontend development by providing reusable, well-documented components. Improves maintainability and reduces technical debt.
|
||||||
- **Negative**: Requires an initial investment in setting up Storybook and migrating existing components. Adds a new dependency and a new workflow for frontend development.
|
- **Negative**: Requires an initial investment in setting up Storybook and migrating existing components. Adds a new dependency and a new workflow for frontend development.
|
||||||
|
|
||||||
|
## Implementation Status
|
||||||
|
|
||||||
|
### What's Implemented
|
||||||
|
|
||||||
|
The codebase has a solid foundation for a design system:
|
||||||
|
|
||||||
|
- ✅ **Tailwind CSS v4.1.17** as the styling solution
|
||||||
|
- ✅ **Dark mode** fully implemented with system preference detection
|
||||||
|
- ✅ **55 custom icon components** for consistent iconography
|
||||||
|
- ✅ **Component organization** with shared vs. feature-specific separation
|
||||||
|
- ✅ **Accessibility patterns** with ARIA attributes and focus management
|
||||||
|
|
||||||
|
### What's Not Yet Implemented
|
||||||
|
|
||||||
|
- ❌ **Storybook** is not yet installed or configured
|
||||||
|
- ❌ **Formal design token documentation** (colors, typography, spacing)
|
||||||
|
- ❌ **Visual regression testing** for component changes
|
||||||
|
|
||||||
|
## Implementation Details
|
||||||
|
|
||||||
|
### Component Library Structure
|
||||||
|
|
||||||
|
```text
|
||||||
|
src/
|
||||||
|
├── components/ # 30+ shared UI components
|
||||||
|
│ ├── icons/ # 55 SVG icon components
|
||||||
|
│ ├── Header.tsx
|
||||||
|
│ ├── Footer.tsx
|
||||||
|
│ ├── LoadingSpinner.tsx
|
||||||
|
│ ├── ErrorDisplay.tsx
|
||||||
|
│ ├── ConfirmationModal.tsx
|
||||||
|
│ ├── DarkModeToggle.tsx
|
||||||
|
│ ├── StatCard.tsx
|
||||||
|
│ ├── PasswordInput.tsx
|
||||||
|
│ └── ...
|
||||||
|
├── features/ # Feature-specific components
|
||||||
|
│ ├── charts/ # PriceChart, PriceHistoryChart
|
||||||
|
│ ├── flyer/ # FlyerDisplay, FlyerList, FlyerUploader
|
||||||
|
│ ├── shopping/ # ShoppingListComponent, WatchedItemsList
|
||||||
|
│ └── voice-assistant/ # VoiceAssistant
|
||||||
|
├── layouts/ # Page layouts
|
||||||
|
│ └── MainLayout.tsx
|
||||||
|
├── pages/ # Page components
|
||||||
|
│ └── admin/components/ # Admin-specific components
|
||||||
|
└── providers/ # Context providers
|
||||||
|
```
|
||||||
|
|
||||||
|
### Styling Approach
|
||||||
|
|
||||||
|
**Tailwind CSS** with utility-first classes:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// Component example with consistent styling patterns
|
||||||
|
<button className="px-4 py-2 bg-brand-primary text-white rounded-lg
|
||||||
|
hover:bg-brand-dark transition-colors duration-200
|
||||||
|
focus:outline-none focus:ring-2 focus:ring-brand-primary
|
||||||
|
focus:ring-offset-2 dark:focus:ring-offset-gray-800">
|
||||||
|
Click me
|
||||||
|
</button>
|
||||||
|
```
|
||||||
|
|
||||||
|
**Common Utility Patterns**:
|
||||||
|
|
||||||
|
| Pattern | Classes |
|
||||||
|
| ------- | ------- |
|
||||||
|
| Card container | `bg-white dark:bg-gray-800 rounded-lg shadow-md p-6` |
|
||||||
|
| Primary button | `bg-brand-primary hover:bg-brand-dark text-white rounded-lg px-4 py-2` |
|
||||||
|
| Secondary button | `bg-gray-100 dark:bg-gray-700 text-gray-700 dark:text-gray-200` |
|
||||||
|
| Input field | `border border-gray-300 dark:border-gray-600 rounded-md px-3 py-2` |
|
||||||
|
| Focus ring | `focus:outline-none focus:ring-2 focus:ring-brand-primary` |
|
||||||
|
|
||||||
|
### Color System
|
||||||
|
|
||||||
|
**Brand Colors** (Tailwind theme extensions):
|
||||||
|
|
||||||
|
- `brand-primary` - Primary brand color (blue/teal)
|
||||||
|
- `brand-light` - Lighter variant
|
||||||
|
- `brand-dark` - Darker variant for hover states
|
||||||
|
- `brand-secondary` - Secondary accent color
|
||||||
|
|
||||||
|
**Semantic Colors**:
|
||||||
|
|
||||||
|
- Gray scale: `gray-50` through `gray-950`
|
||||||
|
- Error: `red-500`, `red-600`
|
||||||
|
- Success: `green-500`, `green-600`
|
||||||
|
- Warning: `yellow-500`, `orange-500`
|
||||||
|
- Info: `blue-500`, `blue-600`
|
||||||
|
|
||||||
|
### Dark Mode Implementation
|
||||||
|
|
||||||
|
Dark mode is fully implemented using Tailwind's `dark:` variant:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// Initialization in useAppInitialization hook
|
||||||
|
const initializeDarkMode = () => {
|
||||||
|
// Priority: user profile > localStorage > system preference
|
||||||
|
const stored = localStorage.getItem('darkMode');
|
||||||
|
const systemPreference = window.matchMedia('(prefers-color-scheme: dark)').matches;
|
||||||
|
const isDarkMode = stored ? stored === 'true' : systemPreference;
|
||||||
|
|
||||||
|
document.documentElement.classList.toggle('dark', isDarkMode);
|
||||||
|
return isDarkMode;
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
|
**Usage in components**:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
<div className="bg-white dark:bg-gray-800 text-gray-900 dark:text-white">
|
||||||
|
Content adapts to theme
|
||||||
|
</div>
|
||||||
|
```
|
||||||
|
|
||||||
|
### Icon System
|
||||||
|
|
||||||
|
**55 custom SVG icon components** in `src/components/icons/`:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// Icon component pattern
|
||||||
|
interface IconProps extends React.SVGProps<SVGSVGElement> {
|
||||||
|
title?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export const CheckCircleIcon: React.FC<IconProps> = ({ title, ...props }) => (
|
||||||
|
<svg {...props} fill="currentColor" viewBox="0 0 24 24">
|
||||||
|
{title && <title>{title}</title>}
|
||||||
|
<path d="..." />
|
||||||
|
</svg>
|
||||||
|
);
|
||||||
|
```
|
||||||
|
|
||||||
|
**Usage**:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
<CheckCircleIcon className="w-5 h-5 text-green-500" title="Success" />
|
||||||
|
```
|
||||||
|
|
||||||
|
**External icons**: Lucide React (`lucide-react` v0.555.0) used for additional icons.
|
||||||
|
|
||||||
|
### Accessibility Patterns
|
||||||
|
|
||||||
|
**ARIA Attributes**:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// Modal pattern
|
||||||
|
<div role="dialog" aria-modal="true" aria-labelledby="modal-title">
|
||||||
|
<h2 id="modal-title">Modal Title</h2>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
// Button with label
|
||||||
|
<button aria-label="Close modal">
|
||||||
|
<XMarkIcon aria-hidden="true" />
|
||||||
|
</button>
|
||||||
|
|
||||||
|
// Loading state
|
||||||
|
<div role="status" aria-live="polite">
|
||||||
|
<LoadingSpinner />
|
||||||
|
</div>
|
||||||
|
```
|
||||||
|
|
||||||
|
**Focus Management**:
|
||||||
|
|
||||||
|
- Consistent focus rings: `focus:ring-2 focus:ring-brand-primary focus:ring-offset-2`
|
||||||
|
- Dark mode offset: `dark:focus:ring-offset-gray-800`
|
||||||
|
- No outline: `focus:outline-none` (using ring instead)
|
||||||
|
|
||||||
|
### State Management
|
||||||
|
|
||||||
|
**Context Providers** (see ADR-005):
|
||||||
|
|
||||||
|
| Provider | Purpose |
|
||||||
|
| -------- | ------- |
|
||||||
|
| `AuthProvider` | Authentication state |
|
||||||
|
| `ModalProvider` | Modal open/close state |
|
||||||
|
| `FlyersProvider` | Flyer data |
|
||||||
|
| `MasterItemsProvider` | Grocery items |
|
||||||
|
| `UserDataProvider` | User-specific data |
|
||||||
|
|
||||||
|
**Provider Hierarchy** in `AppProviders.tsx`:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
<QueryClientProvider>
|
||||||
|
<ModalProvider>
|
||||||
|
<AuthProvider>
|
||||||
|
<FlyersProvider>
|
||||||
|
<MasterItemsProvider>
|
||||||
|
<UserDataProvider>
|
||||||
|
{children}
|
||||||
|
</UserDataProvider>
|
||||||
|
</MasterItemsProvider>
|
||||||
|
</FlyersProvider>
|
||||||
|
</AuthProvider>
|
||||||
|
</ModalProvider>
|
||||||
|
</QueryClientProvider>
|
||||||
|
```
|
||||||
|
|
||||||
|
## Key Files
|
||||||
|
|
||||||
|
- `tailwind.config.js` - Tailwind CSS configuration
|
||||||
|
- `src/index.css` - Tailwind CSS entry point
|
||||||
|
- `src/components/` - Shared UI components
|
||||||
|
- `src/components/icons/` - Icon component library (55 icons)
|
||||||
|
- `src/providers/AppProviders.tsx` - Context provider composition
|
||||||
|
- `src/hooks/useAppInitialization.ts` - Dark mode initialization
|
||||||
|
|
||||||
|
## Component Guidelines
|
||||||
|
|
||||||
|
### When to Create Shared Components
|
||||||
|
|
||||||
|
Create a shared component in `src/components/` when:
|
||||||
|
|
||||||
|
1. Used in 3+ places across the application
|
||||||
|
2. Represents a reusable UI pattern (buttons, cards, modals)
|
||||||
|
3. Has consistent styling/behavior requirements
|
||||||
|
|
||||||
|
### Naming Conventions
|
||||||
|
|
||||||
|
- **Components**: PascalCase (`LoadingSpinner.tsx`)
|
||||||
|
- **Icons**: PascalCase with `Icon` suffix (`CheckCircleIcon.tsx`)
|
||||||
|
- **Hooks**: camelCase with `use` prefix (`useModal.ts`)
|
||||||
|
- **Contexts**: PascalCase with `Context` suffix (`AuthContext.tsx`)
|
||||||
|
|
||||||
|
### Styling Guidelines
|
||||||
|
|
||||||
|
1. Use Tailwind utility classes exclusively
|
||||||
|
2. Include dark mode variants for all colors: `bg-white dark:bg-gray-800`
|
||||||
|
3. Add focus states for interactive elements
|
||||||
|
4. Use semantic color names from the design system
|
||||||
|
|
||||||
|
## Future Enhancements (Storybook Setup)
|
||||||
|
|
||||||
|
To complete ADR-012 implementation:
|
||||||
|
|
||||||
|
1. **Install Storybook**:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
npx storybook@latest init
|
||||||
|
```
|
||||||
|
|
||||||
|
2. **Create stories for core components**:
|
||||||
|
- Button variants
|
||||||
|
- Form inputs (PasswordInput, etc.)
|
||||||
|
- Modal components
|
||||||
|
- Loading states
|
||||||
|
- Icon showcase
|
||||||
|
|
||||||
|
3. **Add visual regression testing** with Chromatic or Percy
|
||||||
|
|
||||||
|
4. **Document design tokens** formally in Storybook
|
||||||
|
|
||||||
|
5. **Create component composition guidelines**
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
|
|
||||||
**Date**: 2025-12-12
|
**Date**: 2025-12-12
|
||||||
|
|
||||||
**Status**: Proposed
|
**Status**: Accepted
|
||||||
|
|
||||||
## Context
|
## Context
|
||||||
|
|
||||||
@@ -20,3 +20,197 @@ We will implement a multi-layered security approach for the API:
|
|||||||
|
|
||||||
- **Positive**: Significantly improves the application's security posture against common web vulnerabilities like XSS, clickjacking, and brute-force attacks.
|
- **Positive**: Significantly improves the application's security posture against common web vulnerabilities like XSS, clickjacking, and brute-force attacks.
|
||||||
- **Negative**: Requires careful configuration of CORS and rate limits to avoid blocking legitimate traffic. Content-Security-Policy can be complex to configure correctly.
|
- **Negative**: Requires careful configuration of CORS and rate limits to avoid blocking legitimate traffic. Content-Security-Policy can be complex to configure correctly.
|
||||||
|
|
||||||
|
## Implementation Status
|
||||||
|
|
||||||
|
### What's Implemented
|
||||||
|
|
||||||
|
- ✅ **Helmet** - Security headers middleware with CSP, HSTS, and more
|
||||||
|
- ✅ **Rate Limiting** - Comprehensive implementation with 17+ specific limiters
|
||||||
|
- ✅ **Input Validation** - Zod-based request validation on all routes
|
||||||
|
- ✅ **File Upload Security** - MIME type validation, size limits, filename sanitization
|
||||||
|
- ✅ **Error Handling** - Production-safe error responses (no sensitive data leakage)
|
||||||
|
- ✅ **Request Timeout** - 5-minute timeout protection
|
||||||
|
- ✅ **Secure Cookies** - httpOnly and secure flags for authentication cookies
|
||||||
|
|
||||||
|
### Not Required
|
||||||
|
|
||||||
|
- ℹ️ **CORS** - Not needed (API and frontend are same-origin)
|
||||||
|
|
||||||
|
## Implementation Details
|
||||||
|
|
||||||
|
### Helmet Security Headers
|
||||||
|
|
||||||
|
Using **helmet v8.x** configured in `server.ts` as the first middleware after app initialization.
|
||||||
|
|
||||||
|
**Security Headers Applied**:
|
||||||
|
|
||||||
|
| Header | Configuration | Purpose |
|
||||||
|
| ------ | ------------- | ------- |
|
||||||
|
| Content-Security-Policy | Custom directives | Prevents XSS, code injection |
|
||||||
|
| Strict-Transport-Security | 1 year, includeSubDomains, preload | Forces HTTPS connections |
|
||||||
|
| X-Content-Type-Options | nosniff | Prevents MIME type sniffing |
|
||||||
|
| X-Frame-Options | DENY | Prevents clickjacking |
|
||||||
|
| X-XSS-Protection | 0 (disabled) | Deprecated, CSP preferred |
|
||||||
|
| Referrer-Policy | strict-origin-when-cross-origin | Controls referrer information |
|
||||||
|
| Cross-Origin-Resource-Policy | cross-origin | Allows external resource loading |
|
||||||
|
|
||||||
|
**Content Security Policy Directives**:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
contentSecurityPolicy: {
|
||||||
|
directives: {
|
||||||
|
defaultSrc: ["'self'"],
|
||||||
|
scriptSrc: ["'self'", "'unsafe-inline'"], // React inline scripts
|
||||||
|
styleSrc: ["'self'", "'unsafe-inline'"], // Tailwind inline styles
|
||||||
|
imgSrc: ["'self'", 'data:', 'blob:', 'https:'], // External images
|
||||||
|
fontSrc: ["'self'", 'https:', 'data:'],
|
||||||
|
connectSrc: ["'self'", 'https:', 'wss:'], // API + WebSocket
|
||||||
|
frameSrc: ["'none'"], // No iframes
|
||||||
|
objectSrc: ["'none'"], // No plugins
|
||||||
|
upgradeInsecureRequests: [], // Production only
|
||||||
|
},
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**HSTS Configuration**:
|
||||||
|
|
||||||
|
- Max-age: 1 year (31536000 seconds)
|
||||||
|
- Includes subdomains
|
||||||
|
- Preload-ready for browser HSTS lists
|
||||||
|
|
||||||
|
### Rate Limiting
|
||||||
|
|
||||||
|
Using **express-rate-limit v8.2.1** with a centralized configuration in `src/config/rateLimiters.ts`.
|
||||||
|
|
||||||
|
**Standard Configuration**:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
const standardConfig = {
|
||||||
|
standardHeaders: true, // Sends RateLimit-* headers
|
||||||
|
legacyHeaders: false,
|
||||||
|
skip: shouldSkipRateLimit, // Disabled in test environment
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
|
**Rate Limiters by Category**:
|
||||||
|
|
||||||
|
| Category | Limiter | Window | Max Requests |
|
||||||
|
| -------- | ------- | ------ | ------------ |
|
||||||
|
| **Authentication** | loginLimiter | 15 min | 5 |
|
||||||
|
| | registerLimiter | 1 hour | 5 |
|
||||||
|
| | forgotPasswordLimiter | 15 min | 5 |
|
||||||
|
| | resetPasswordLimiter | 15 min | 10 |
|
||||||
|
| | refreshTokenLimiter | 15 min | 20 |
|
||||||
|
| | logoutLimiter | 15 min | 10 |
|
||||||
|
| **Public/User Read** | publicReadLimiter | 15 min | 100 |
|
||||||
|
| | userReadLimiter | 15 min | 100 |
|
||||||
|
| | userUpdateLimiter | 15 min | 100 |
|
||||||
|
| **Sensitive Operations** | userSensitiveUpdateLimiter | 1 hour | 5 |
|
||||||
|
| | adminTriggerLimiter | 15 min | 30 |
|
||||||
|
| **AI/Costly** | aiGenerationLimiter | 15 min | 20 |
|
||||||
|
| | geocodeLimiter | 1 hour | 100 |
|
||||||
|
| | priceHistoryLimiter | 15 min | 50 |
|
||||||
|
| **Uploads** | adminUploadLimiter | 15 min | 20 |
|
||||||
|
| | aiUploadLimiter | 15 min | 10 |
|
||||||
|
| | batchLimiter | 15 min | 50 |
|
||||||
|
| **Tracking** | trackingLimiter | 15 min | 200 |
|
||||||
|
| | reactionToggleLimiter | 15 min | 150 |
|
||||||
|
|
||||||
|
**Test Environment Handling**:
|
||||||
|
|
||||||
|
Rate limiting is automatically disabled in test environment via `shouldSkipRateLimit` utility (`src/utils/rateLimit.ts`). Tests can opt-in to rate limiting by setting the `x-test-rate-limit-enable: true` header.
|
||||||
|
|
||||||
|
### Input Validation
|
||||||
|
|
||||||
|
**Zod Schema Validation** (`src/middleware/validation.middleware.ts`):
|
||||||
|
|
||||||
|
- Type-safe parsing and coercion for params, query, and body
|
||||||
|
- Applied to all API routes via `validateRequest()` middleware
|
||||||
|
- Returns structured validation errors with field-level details
|
||||||
|
|
||||||
|
**Filename Sanitization** (`src/utils/stringUtils.ts`):
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// Removes dangerous characters from uploaded filenames
|
||||||
|
sanitizeFilename(filename: string): string
|
||||||
|
```
|
||||||
|
|
||||||
|
### File Upload Security
|
||||||
|
|
||||||
|
**Multer Configuration** (`src/middleware/multer.middleware.ts`):
|
||||||
|
|
||||||
|
- MIME type validation via `imageFileFilter` (only image/* allowed)
|
||||||
|
- File size limits (2MB for logos, configurable per upload type)
|
||||||
|
- Unique filenames using timestamps + random suffixes
|
||||||
|
- User-scoped storage paths
|
||||||
|
|
||||||
|
### Error Handling
|
||||||
|
|
||||||
|
**Production-Safe Responses** (`src/middleware/errorHandler.ts`):
|
||||||
|
|
||||||
|
- Production mode: Returns generic error message with tracking ID
|
||||||
|
- Development mode: Returns detailed error information
|
||||||
|
- Sensitive error details are logged but never exposed to clients
|
||||||
|
|
||||||
|
### Request Security
|
||||||
|
|
||||||
|
**Timeout Protection** (`server.ts`):
|
||||||
|
|
||||||
|
- 5-minute request timeout via `connect-timeout` middleware
|
||||||
|
- Prevents resource exhaustion from long-running requests
|
||||||
|
|
||||||
|
**Secure Cookies**:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// Cookie configuration for auth tokens
|
||||||
|
{
|
||||||
|
httpOnly: true,
|
||||||
|
secure: process.env.NODE_ENV === 'production',
|
||||||
|
sameSite: 'strict',
|
||||||
|
maxAge: 7 * 24 * 60 * 60 * 1000 // 7 days for refresh token
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Request Logging
|
||||||
|
|
||||||
|
Per-request structured logging (ADR-004):
|
||||||
|
|
||||||
|
- Request ID tracking
|
||||||
|
- User ID and IP address logging
|
||||||
|
- Failed request details (4xx+) logged with headers and body
|
||||||
|
- Unhandled errors assigned unique error IDs
|
||||||
|
|
||||||
|
## Key Files
|
||||||
|
|
||||||
|
- `server.ts` - Helmet middleware configuration (security headers)
|
||||||
|
- `src/config/rateLimiters.ts` - Rate limiter definitions (17+ limiters)
|
||||||
|
- `src/utils/rateLimit.ts` - Rate limit skip logic for testing
|
||||||
|
- `src/middleware/validation.middleware.ts` - Zod-based request validation
|
||||||
|
- `src/middleware/errorHandler.ts` - Production-safe error handling
|
||||||
|
- `src/middleware/multer.middleware.ts` - Secure file upload configuration
|
||||||
|
- `src/utils/stringUtils.ts` - Filename sanitization
|
||||||
|
|
||||||
|
## Future Enhancements
|
||||||
|
|
||||||
|
1. **Configure CORS** (if needed for cross-origin access):
|
||||||
|
|
||||||
|
```bash
|
||||||
|
npm install cors @types/cors
|
||||||
|
```
|
||||||
|
|
||||||
|
Add to `server.ts`:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import cors from 'cors';
|
||||||
|
app.use(cors({
|
||||||
|
origin: process.env.ALLOWED_ORIGINS?.split(',') || 'http://localhost:3000',
|
||||||
|
credentials: true,
|
||||||
|
}));
|
||||||
|
```
|
||||||
|
|
||||||
|
2. **Redis-backed rate limiting**: For distributed deployments, use `rate-limit-redis` store
|
||||||
|
|
||||||
|
3. **CSP Nonce**: Generate per-request nonces for stricter script-src policy
|
||||||
|
|
||||||
|
4. **Report-Only CSP**: Add `Content-Security-Policy-Report-Only` header for testing policy changes
|
||||||
|
|||||||
14
package-lock.json
generated
14
package-lock.json
generated
@@ -1,12 +1,12 @@
|
|||||||
{
|
{
|
||||||
"name": "flyer-crawler",
|
"name": "flyer-crawler",
|
||||||
"version": "0.9.66",
|
"version": "0.9.70",
|
||||||
"lockfileVersion": 3,
|
"lockfileVersion": 3,
|
||||||
"requires": true,
|
"requires": true,
|
||||||
"packages": {
|
"packages": {
|
||||||
"": {
|
"": {
|
||||||
"name": "flyer-crawler",
|
"name": "flyer-crawler",
|
||||||
"version": "0.9.66",
|
"version": "0.9.70",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@bull-board/api": "^6.14.2",
|
"@bull-board/api": "^6.14.2",
|
||||||
"@bull-board/express": "^6.14.2",
|
"@bull-board/express": "^6.14.2",
|
||||||
@@ -22,6 +22,7 @@
|
|||||||
"express": "^5.1.0",
|
"express": "^5.1.0",
|
||||||
"express-list-endpoints": "^7.1.1",
|
"express-list-endpoints": "^7.1.1",
|
||||||
"express-rate-limit": "^8.2.1",
|
"express-rate-limit": "^8.2.1",
|
||||||
|
"helmet": "^8.1.0",
|
||||||
"ioredis": "^5.8.2",
|
"ioredis": "^5.8.2",
|
||||||
"jsonwebtoken": "^9.0.2",
|
"jsonwebtoken": "^9.0.2",
|
||||||
"lucide-react": "^0.555.0",
|
"lucide-react": "^0.555.0",
|
||||||
@@ -10193,6 +10194,15 @@
|
|||||||
"dev": true,
|
"dev": true,
|
||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
|
"node_modules/helmet": {
|
||||||
|
"version": "8.1.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/helmet/-/helmet-8.1.0.tgz",
|
||||||
|
"integrity": "sha512-jOiHyAZsmnr8LqoPGmCjYAaiuWwjAPLgY8ZX2XrmHawt99/u1y6RgrZMTeoPfpUbV96HOalYgz1qzkRbw54Pmg==",
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=18.0.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/help-me": {
|
"node_modules/help-me": {
|
||||||
"version": "5.0.0",
|
"version": "5.0.0",
|
||||||
"resolved": "https://registry.npmjs.org/help-me/-/help-me-5.0.0.tgz",
|
"resolved": "https://registry.npmjs.org/help-me/-/help-me-5.0.0.tgz",
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
{
|
{
|
||||||
"name": "flyer-crawler",
|
"name": "flyer-crawler",
|
||||||
"private": true,
|
"private": true,
|
||||||
"version": "0.9.66",
|
"version": "0.9.70",
|
||||||
"type": "module",
|
"type": "module",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"dev": "concurrently \"npm:start:dev\" \"vite\"",
|
"dev": "concurrently \"npm:start:dev\" \"vite\"",
|
||||||
@@ -41,6 +41,7 @@
|
|||||||
"express": "^5.1.0",
|
"express": "^5.1.0",
|
||||||
"express-list-endpoints": "^7.1.1",
|
"express-list-endpoints": "^7.1.1",
|
||||||
"express-rate-limit": "^8.2.1",
|
"express-rate-limit": "^8.2.1",
|
||||||
|
"helmet": "^8.1.0",
|
||||||
"ioredis": "^5.8.2",
|
"ioredis": "^5.8.2",
|
||||||
"jsonwebtoken": "^9.0.2",
|
"jsonwebtoken": "^9.0.2",
|
||||||
"lucide-react": "^0.555.0",
|
"lucide-react": "^0.555.0",
|
||||||
|
|||||||
33
server.ts
33
server.ts
@@ -1,6 +1,7 @@
|
|||||||
// server.ts
|
// server.ts
|
||||||
import express, { Request, Response, NextFunction } from 'express';
|
import express, { Request, Response, NextFunction } from 'express';
|
||||||
import { randomUUID } from 'crypto';
|
import { randomUUID } from 'crypto';
|
||||||
|
import helmet from 'helmet';
|
||||||
import timeout from 'connect-timeout';
|
import timeout from 'connect-timeout';
|
||||||
import cookieParser from 'cookie-parser';
|
import cookieParser from 'cookie-parser';
|
||||||
import listEndpoints from 'express-list-endpoints';
|
import listEndpoints from 'express-list-endpoints';
|
||||||
@@ -62,6 +63,38 @@ logger.info('-----------------------------------------------\n');
|
|||||||
|
|
||||||
const app = express();
|
const app = express();
|
||||||
|
|
||||||
|
// --- Security Headers Middleware (ADR-016) ---
|
||||||
|
// Helmet sets various HTTP headers to help protect the app from common web vulnerabilities.
|
||||||
|
// Must be applied early in the middleware chain, before any routes.
|
||||||
|
app.use(
|
||||||
|
helmet({
|
||||||
|
// Content Security Policy - configured for API + SPA frontend
|
||||||
|
contentSecurityPolicy: {
|
||||||
|
directives: {
|
||||||
|
defaultSrc: ["'self'"],
|
||||||
|
scriptSrc: ["'self'", "'unsafe-inline'"], // Allow inline scripts for React
|
||||||
|
styleSrc: ["'self'", "'unsafe-inline'"], // Allow inline styles for Tailwind
|
||||||
|
imgSrc: ["'self'", 'data:', 'blob:', 'https:'], // Allow images from various sources
|
||||||
|
fontSrc: ["'self'", 'https:', 'data:'],
|
||||||
|
connectSrc: ["'self'", 'https:', 'wss:'], // Allow API and WebSocket connections
|
||||||
|
frameSrc: ["'none'"], // Disallow iframes
|
||||||
|
objectSrc: ["'none'"], // Disallow plugins
|
||||||
|
upgradeInsecureRequests: process.env.NODE_ENV === 'production' ? [] : null,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
// Cross-Origin settings for API
|
||||||
|
crossOriginEmbedderPolicy: false, // Disabled to allow loading external images
|
||||||
|
crossOriginResourcePolicy: { policy: 'cross-origin' }, // Allow cross-origin resource loading
|
||||||
|
// Additional security headers
|
||||||
|
hsts: {
|
||||||
|
maxAge: 31536000, // 1 year in seconds
|
||||||
|
includeSubDomains: true,
|
||||||
|
preload: true,
|
||||||
|
},
|
||||||
|
referrerPolicy: { policy: 'strict-origin-when-cross-origin' },
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
// --- Core Middleware ---
|
// --- Core Middleware ---
|
||||||
// Increase the limit for JSON and URL-encoded bodies. This is crucial for handling large file uploads
|
// Increase the limit for JSON and URL-encoded bodies. This is crucial for handling large file uploads
|
||||||
// that are part of multipart/form-data requests, as the overall request size is checked.
|
// that are part of multipart/form-data requests, as the overall request size is checked.
|
||||||
|
|||||||
@@ -8,6 +8,7 @@ import { z } from 'zod';
|
|||||||
import * as db from '../services/db/index.db';
|
import * as db from '../services/db/index.db';
|
||||||
import type { UserProfile } from '../types';
|
import type { UserProfile } from '../types';
|
||||||
import { geocodingService } from '../services/geocodingService.server';
|
import { geocodingService } from '../services/geocodingService.server';
|
||||||
|
import { cacheService } from '../services/cacheService.server';
|
||||||
import { requireFileUpload } from '../middleware/fileUpload.middleware'; // This was a duplicate, fixed.
|
import { requireFileUpload } from '../middleware/fileUpload.middleware'; // This was a duplicate, fixed.
|
||||||
import {
|
import {
|
||||||
createUploadMiddleware,
|
createUploadMiddleware,
|
||||||
@@ -635,6 +636,44 @@ router.post(
|
|||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* POST /api/admin/system/clear-cache - Clears the application data cache.
|
||||||
|
* Clears cached flyers, brands, and stats data from Redis.
|
||||||
|
* Requires admin privileges.
|
||||||
|
*/
|
||||||
|
router.post(
|
||||||
|
'/system/clear-cache',
|
||||||
|
adminTriggerLimiter,
|
||||||
|
validateRequest(emptySchema),
|
||||||
|
async (req: Request, res: Response, next: NextFunction) => {
|
||||||
|
const userProfile = req.user as UserProfile;
|
||||||
|
req.log.info(
|
||||||
|
`[Admin] Manual cache clear received from user: ${userProfile.user.user_id}`,
|
||||||
|
);
|
||||||
|
|
||||||
|
try {
|
||||||
|
const [flyersDeleted, brandsDeleted, statsDeleted] = await Promise.all([
|
||||||
|
cacheService.invalidateFlyers(req.log),
|
||||||
|
cacheService.invalidateBrands(req.log),
|
||||||
|
cacheService.invalidateStats(req.log),
|
||||||
|
]);
|
||||||
|
|
||||||
|
const totalDeleted = flyersDeleted + brandsDeleted + statsDeleted;
|
||||||
|
res.status(200).json({
|
||||||
|
message: `Successfully cleared the application cache. ${totalDeleted} keys were removed.`,
|
||||||
|
details: {
|
||||||
|
flyers: flyersDeleted,
|
||||||
|
brands: brandsDeleted,
|
||||||
|
stats: statsDeleted,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
req.log.error({ error }, '[Admin] Failed to clear application cache.');
|
||||||
|
next(error);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
/* Catches errors from multer (e.g., file size, file filter) */
|
/* Catches errors from multer (e.g., file size, file filter) */
|
||||||
router.use(handleMulterError);
|
router.use(handleMulterError);
|
||||||
|
|
||||||
|
|||||||
@@ -234,6 +234,9 @@ router.post(
|
|||||||
* POST /api/ai/upload-legacy - Process a flyer upload from a legacy client.
|
* POST /api/ai/upload-legacy - Process a flyer upload from a legacy client.
|
||||||
* This is an authenticated route that processes the flyer synchronously.
|
* This is an authenticated route that processes the flyer synchronously.
|
||||||
* This is used for integration testing the legacy upload flow.
|
* This is used for integration testing the legacy upload flow.
|
||||||
|
*
|
||||||
|
* @deprecated Use POST /api/ai/upload-and-process instead for async queue-based processing (ADR-0006).
|
||||||
|
* This synchronous endpoint is retained only for integration testing purposes.
|
||||||
*/
|
*/
|
||||||
router.post(
|
router.post(
|
||||||
'/upload-legacy',
|
'/upload-legacy',
|
||||||
@@ -282,9 +285,12 @@ router.get(
|
|||||||
);
|
);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This endpoint saves the processed flyer data to the database. It is the final step
|
* POST /api/ai/flyers/process - Saves the processed flyer data to the database.
|
||||||
* in the flyer upload workflow after the AI has extracted the data.
|
* This is the final step in the flyer upload workflow after the AI has extracted the data.
|
||||||
* It uses `optionalAuth` to handle submissions from both anonymous and authenticated users.
|
* It uses `optionalAuth` to handle submissions from both anonymous and authenticated users.
|
||||||
|
*
|
||||||
|
* @deprecated Use POST /api/ai/upload-and-process instead for async queue-based processing (ADR-0006).
|
||||||
|
* This synchronous endpoint processes flyers inline and should be migrated to the queue-based approach.
|
||||||
*/
|
*/
|
||||||
router.post(
|
router.post(
|
||||||
'/flyers/process',
|
'/flyers/process',
|
||||||
|
|||||||
@@ -146,7 +146,6 @@ describe('flyerDbInsertSchema', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('should fail if store_id is missing', () => {
|
it('should fail if store_id is missing', () => {
|
||||||
// @ts-expect-error Testing runtime validation
|
|
||||||
const { store_id, ...invalid } = validDbFlyer;
|
const { store_id, ...invalid } = validDbFlyer;
|
||||||
const result = flyerDbInsertSchema.safeParse(invalid);
|
const result = flyerDbInsertSchema.safeParse(invalid);
|
||||||
expect(result.success).toBe(false);
|
expect(result.success).toBe(false);
|
||||||
|
|||||||
226
src/services/cacheService.server.ts
Normal file
226
src/services/cacheService.server.ts
Normal file
@@ -0,0 +1,226 @@
|
|||||||
|
// src/services/cacheService.server.ts
|
||||||
|
/**
|
||||||
|
* @file Centralized caching service implementing the Cache-Aside pattern.
|
||||||
|
* This service provides a reusable wrapper around Redis for caching read-heavy operations.
|
||||||
|
* See ADR-009 for the caching strategy documentation.
|
||||||
|
*/
|
||||||
|
import type { Logger } from 'pino';
|
||||||
|
import { connection as redis } from './redis.server';
|
||||||
|
import { logger as globalLogger } from './logger.server';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* TTL values in seconds for different cache types.
|
||||||
|
* These can be tuned based on data volatility and freshness requirements.
|
||||||
|
*/
|
||||||
|
export const CACHE_TTL = {
|
||||||
|
/** Brand/store list - rarely changes, safe to cache for 1 hour */
|
||||||
|
BRANDS: 60 * 60,
|
||||||
|
/** Flyer list - changes when new flyers are added, cache for 5 minutes */
|
||||||
|
FLYERS: 5 * 60,
|
||||||
|
/** Individual flyer data - cache for 10 minutes */
|
||||||
|
FLYER: 10 * 60,
|
||||||
|
/** Flyer items - cache for 10 minutes */
|
||||||
|
FLYER_ITEMS: 10 * 60,
|
||||||
|
/** Statistics - can be slightly stale, cache for 5 minutes */
|
||||||
|
STATS: 5 * 60,
|
||||||
|
/** Most frequent sales - aggregated data, cache for 15 minutes */
|
||||||
|
FREQUENT_SALES: 15 * 60,
|
||||||
|
/** Categories - rarely changes, cache for 1 hour */
|
||||||
|
CATEGORIES: 60 * 60,
|
||||||
|
} as const;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Cache key prefixes for different data types.
|
||||||
|
* Using consistent prefixes allows for pattern-based invalidation.
|
||||||
|
*/
|
||||||
|
export const CACHE_PREFIX = {
|
||||||
|
BRANDS: 'cache:brands',
|
||||||
|
FLYERS: 'cache:flyers',
|
||||||
|
FLYER: 'cache:flyer',
|
||||||
|
FLYER_ITEMS: 'cache:flyer-items',
|
||||||
|
STATS: 'cache:stats',
|
||||||
|
FREQUENT_SALES: 'cache:frequent-sales',
|
||||||
|
CATEGORIES: 'cache:categories',
|
||||||
|
} as const;
|
||||||
|
|
||||||
|
export interface CacheOptions {
|
||||||
|
/** Time-to-live in seconds */
|
||||||
|
ttl: number;
|
||||||
|
/** Optional logger for this operation */
|
||||||
|
logger?: Logger;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Centralized cache service implementing the Cache-Aside pattern.
|
||||||
|
* All cache operations are fail-safe - cache failures do not break the application.
|
||||||
|
*/
|
||||||
|
class CacheService {
|
||||||
|
/**
|
||||||
|
* Retrieves a value from cache.
|
||||||
|
* @param key The cache key
|
||||||
|
* @param logger Optional logger for this operation
|
||||||
|
* @returns The cached value or null if not found/error
|
||||||
|
*/
|
||||||
|
async get<T>(key: string, logger: Logger = globalLogger): Promise<T | null> {
|
||||||
|
try {
|
||||||
|
const cached = await redis.get(key);
|
||||||
|
if (cached) {
|
||||||
|
logger.debug({ cacheKey: key }, 'Cache hit');
|
||||||
|
return JSON.parse(cached) as T;
|
||||||
|
}
|
||||||
|
logger.debug({ cacheKey: key }, 'Cache miss');
|
||||||
|
return null;
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn({ err: error, cacheKey: key }, 'Redis GET failed, proceeding without cache');
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Stores a value in cache with TTL.
|
||||||
|
* @param key The cache key
|
||||||
|
* @param value The value to cache (will be JSON stringified)
|
||||||
|
* @param ttl Time-to-live in seconds
|
||||||
|
* @param logger Optional logger for this operation
|
||||||
|
*/
|
||||||
|
async set<T>(key: string, value: T, ttl: number, logger: Logger = globalLogger): Promise<void> {
|
||||||
|
try {
|
||||||
|
await redis.set(key, JSON.stringify(value), 'EX', ttl);
|
||||||
|
logger.debug({ cacheKey: key, ttl }, 'Value cached');
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn({ err: error, cacheKey: key }, 'Redis SET failed, value not cached');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Deletes a specific key from cache.
|
||||||
|
* @param key The cache key to delete
|
||||||
|
* @param logger Optional logger for this operation
|
||||||
|
*/
|
||||||
|
async del(key: string, logger: Logger = globalLogger): Promise<void> {
|
||||||
|
try {
|
||||||
|
await redis.del(key);
|
||||||
|
logger.debug({ cacheKey: key }, 'Cache key deleted');
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn({ err: error, cacheKey: key }, 'Redis DEL failed');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Invalidates all cache keys matching a pattern.
|
||||||
|
* Uses SCAN for safe iteration over large key sets.
|
||||||
|
* @param pattern The pattern to match (e.g., 'cache:flyers*')
|
||||||
|
* @param logger Optional logger for this operation
|
||||||
|
* @returns The number of keys deleted
|
||||||
|
*/
|
||||||
|
async invalidatePattern(pattern: string, logger: Logger = globalLogger): Promise<number> {
|
||||||
|
let cursor = '0';
|
||||||
|
let totalDeleted = 0;
|
||||||
|
|
||||||
|
try {
|
||||||
|
do {
|
||||||
|
const [nextCursor, keys] = await redis.scan(cursor, 'MATCH', pattern, 'COUNT', 100);
|
||||||
|
cursor = nextCursor;
|
||||||
|
if (keys.length > 0) {
|
||||||
|
const deletedCount = await redis.del(...keys);
|
||||||
|
totalDeleted += deletedCount;
|
||||||
|
}
|
||||||
|
} while (cursor !== '0');
|
||||||
|
|
||||||
|
logger.info({ pattern, totalDeleted }, 'Cache invalidation completed');
|
||||||
|
return totalDeleted;
|
||||||
|
} catch (error) {
|
||||||
|
logger.error({ err: error, pattern }, 'Cache invalidation failed');
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Implements the Cache-Aside pattern: try cache first, fall back to fetcher, cache result.
|
||||||
|
* This is the primary method for adding caching to existing repository methods.
|
||||||
|
*
|
||||||
|
* @param key The cache key
|
||||||
|
* @param fetcher Function that retrieves data from the source (e.g., database)
|
||||||
|
* @param options Cache options including TTL
|
||||||
|
* @returns The data (from cache or fetcher)
|
||||||
|
*
|
||||||
|
* @example
|
||||||
|
* ```typescript
|
||||||
|
* const brands = await cacheService.getOrSet(
|
||||||
|
* CACHE_PREFIX.BRANDS,
|
||||||
|
* () => this.db.query('SELECT * FROM stores'),
|
||||||
|
* { ttl: CACHE_TTL.BRANDS, logger }
|
||||||
|
* );
|
||||||
|
* ```
|
||||||
|
*/
|
||||||
|
async getOrSet<T>(
|
||||||
|
key: string,
|
||||||
|
fetcher: () => Promise<T>,
|
||||||
|
options: CacheOptions,
|
||||||
|
): Promise<T> {
|
||||||
|
const logger = options.logger ?? globalLogger;
|
||||||
|
|
||||||
|
// Try to get from cache first
|
||||||
|
const cached = await this.get<T>(key, logger);
|
||||||
|
if (cached !== null) {
|
||||||
|
return cached;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Cache miss - fetch from source
|
||||||
|
const data = await fetcher();
|
||||||
|
|
||||||
|
// Cache the result (fire-and-forget, don't await)
|
||||||
|
this.set(key, data, options.ttl, logger).catch(() => {
|
||||||
|
// Error already logged in set()
|
||||||
|
});
|
||||||
|
|
||||||
|
return data;
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- Convenience methods for specific cache types ---
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Invalidates all brand-related cache entries.
|
||||||
|
*/
|
||||||
|
async invalidateBrands(logger: Logger = globalLogger): Promise<number> {
|
||||||
|
return this.invalidatePattern(`${CACHE_PREFIX.BRANDS}*`, logger);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Invalidates all flyer-related cache entries.
|
||||||
|
*/
|
||||||
|
async invalidateFlyers(logger: Logger = globalLogger): Promise<number> {
|
||||||
|
const patterns = [
|
||||||
|
`${CACHE_PREFIX.FLYERS}*`,
|
||||||
|
`${CACHE_PREFIX.FLYER}*`,
|
||||||
|
`${CACHE_PREFIX.FLYER_ITEMS}*`,
|
||||||
|
];
|
||||||
|
|
||||||
|
let total = 0;
|
||||||
|
for (const pattern of patterns) {
|
||||||
|
total += await this.invalidatePattern(pattern, logger);
|
||||||
|
}
|
||||||
|
return total;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Invalidates cache for a specific flyer and its items.
|
||||||
|
*/
|
||||||
|
async invalidateFlyer(flyerId: number, logger: Logger = globalLogger): Promise<void> {
|
||||||
|
await Promise.all([
|
||||||
|
this.del(`${CACHE_PREFIX.FLYER}:${flyerId}`, logger),
|
||||||
|
this.del(`${CACHE_PREFIX.FLYER_ITEMS}:${flyerId}`, logger),
|
||||||
|
// Also invalidate the flyers list since it may contain this flyer
|
||||||
|
this.invalidatePattern(`${CACHE_PREFIX.FLYERS}*`, logger),
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Invalidates all statistics cache entries.
|
||||||
|
*/
|
||||||
|
async invalidateStats(logger: Logger = globalLogger): Promise<number> {
|
||||||
|
return this.invalidatePattern(`${CACHE_PREFIX.STATS}*`, logger);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export const cacheService = new CacheService();
|
||||||
@@ -18,6 +18,7 @@ describe('Address DB Service', () => {
|
|||||||
|
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
vi.clearAllMocks();
|
vi.clearAllMocks();
|
||||||
|
mockDb.query.mockReset();
|
||||||
addressRepo = new AddressRepository(mockDb);
|
addressRepo = new AddressRepository(mockDb);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@@ -40,6 +40,7 @@ describe('Admin DB Service', () => {
|
|||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
// Reset the global mock's call history before each test.
|
// Reset the global mock's call history before each test.
|
||||||
vi.clearAllMocks();
|
vi.clearAllMocks();
|
||||||
|
mockDb.query.mockReset();
|
||||||
|
|
||||||
// Reset the withTransaction mock before each test
|
// Reset the withTransaction mock before each test
|
||||||
vi.mocked(withTransaction).mockImplementation(async (callback) => {
|
vi.mocked(withTransaction).mockImplementation(async (callback) => {
|
||||||
|
|||||||
@@ -47,6 +47,7 @@ describe('Budget DB Service', () => {
|
|||||||
|
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
vi.clearAllMocks();
|
vi.clearAllMocks();
|
||||||
|
mockDb.query.mockReset();
|
||||||
// Instantiate the repository with the minimal mock db for each test
|
// Instantiate the repository with the minimal mock db for each test
|
||||||
budgetRepo = new BudgetRepository(mockDb);
|
budgetRepo = new BudgetRepository(mockDb);
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -28,6 +28,7 @@ import { logger as mockLogger } from '../logger.server';
|
|||||||
describe('Conversion DB Service', () => {
|
describe('Conversion DB Service', () => {
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
vi.clearAllMocks();
|
vi.clearAllMocks();
|
||||||
|
mockPoolInstance.query.mockReset();
|
||||||
// Make getPool return our mock instance for each test
|
// Make getPool return our mock instance for each test
|
||||||
vi.mocked(getPool).mockReturnValue(mockPoolInstance as any);
|
vi.mocked(getPool).mockReturnValue(mockPoolInstance as any);
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -34,6 +34,16 @@ vi.mock('../logger.server', () => ({
|
|||||||
}));
|
}));
|
||||||
import { logger as mockLogger } from '../logger.server';
|
import { logger as mockLogger } from '../logger.server';
|
||||||
|
|
||||||
|
// Mock cacheService to bypass caching logic during tests
|
||||||
|
vi.mock('../cacheService.server', () => ({
|
||||||
|
cacheService: {
|
||||||
|
getOrSet: vi.fn(async (_key, callback) => callback()),
|
||||||
|
invalidateFlyer: vi.fn(),
|
||||||
|
},
|
||||||
|
CACHE_TTL: { BRANDS: 3600, FLYERS: 300, FLYER_ITEMS: 600 },
|
||||||
|
CACHE_PREFIX: { BRANDS: 'brands', FLYERS: 'flyers', FLYER_ITEMS: 'flyer_items' },
|
||||||
|
}));
|
||||||
|
|
||||||
// Mock the withTransaction helper
|
// Mock the withTransaction helper
|
||||||
vi.mock('./connection.db', async (importOriginal) => {
|
vi.mock('./connection.db', async (importOriginal) => {
|
||||||
const actual = await importOriginal<typeof import('./connection.db')>();
|
const actual = await importOriginal<typeof import('./connection.db')>();
|
||||||
@@ -46,6 +56,7 @@ describe('Flyer DB Service', () => {
|
|||||||
|
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
vi.clearAllMocks();
|
vi.clearAllMocks();
|
||||||
|
mockPoolInstance.query.mockReset();
|
||||||
//In a transaction, `pool.connect()` returns a client. That client has a `release` method.
|
//In a transaction, `pool.connect()` returns a client. That client has a `release` method.
|
||||||
// For these tests, we simulate this by having `connect` resolve to the pool instance itself,
|
// For these tests, we simulate this by having `connect` resolve to the pool instance itself,
|
||||||
// and we ensure the `release` method is mocked on that instance.
|
// and we ensure the `release` method is mocked on that instance.
|
||||||
@@ -586,18 +597,6 @@ describe('Flyer DB Service', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
describe('getFlyers', () => {
|
describe('getFlyers', () => {
|
||||||
const expectedQuery = `
|
|
||||||
SELECT
|
|
||||||
f.*,
|
|
||||||
json_build_object(
|
|
||||||
'store_id', s.store_id,
|
|
||||||
'name', s.name,
|
|
||||||
'logo_url', s.logo_url
|
|
||||||
) as store
|
|
||||||
FROM public.flyers f
|
|
||||||
JOIN public.stores s ON f.store_id = s.store_id
|
|
||||||
ORDER BY f.created_at DESC LIMIT $1 OFFSET $2`;
|
|
||||||
|
|
||||||
it('should use default limit and offset when none are provided', async () => {
|
it('should use default limit and offset when none are provided', async () => {
|
||||||
console.log('[TEST DEBUG] Running test: getFlyers > should use default limit and offset');
|
console.log('[TEST DEBUG] Running test: getFlyers > should use default limit and offset');
|
||||||
const mockFlyers: Flyer[] = [createMockFlyer({ flyer_id: 1 })];
|
const mockFlyers: Flyer[] = [createMockFlyer({ flyer_id: 1 })];
|
||||||
@@ -611,7 +610,7 @@ describe('Flyer DB Service', () => {
|
|||||||
);
|
);
|
||||||
|
|
||||||
expect(mockPoolInstance.query).toHaveBeenCalledWith(
|
expect(mockPoolInstance.query).toHaveBeenCalledWith(
|
||||||
expectedQuery,
|
expect.stringContaining('FROM public.flyers f'),
|
||||||
[20, 0], // Default values
|
[20, 0], // Default values
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
@@ -629,7 +628,7 @@ describe('Flyer DB Service', () => {
|
|||||||
);
|
);
|
||||||
|
|
||||||
expect(mockPoolInstance.query).toHaveBeenCalledWith(
|
expect(mockPoolInstance.query).toHaveBeenCalledWith(
|
||||||
expectedQuery,
|
expect.stringContaining('FROM public.flyers f'),
|
||||||
[10, 5], // Provided values
|
[10, 5], // Provided values
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ import type { Pool, PoolClient } from 'pg';
|
|||||||
import { getPool, withTransaction } from './connection.db';
|
import { getPool, withTransaction } from './connection.db';
|
||||||
import type { Logger } from 'pino';
|
import type { Logger } from 'pino';
|
||||||
import { UniqueConstraintError, NotFoundError, handleDbError } from './errors.db';
|
import { UniqueConstraintError, NotFoundError, handleDbError } from './errors.db';
|
||||||
|
import { cacheService, CACHE_TTL, CACHE_PREFIX } from '../cacheService.server';
|
||||||
import type {
|
import type {
|
||||||
Flyer,
|
Flyer,
|
||||||
FlyerItem,
|
FlyerItem,
|
||||||
@@ -229,22 +230,31 @@ export class FlyerRepository {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Retrieves all distinct brands from the stores table.
|
* Retrieves all distinct brands from the stores table.
|
||||||
|
* Uses cache-aside pattern with 1-hour TTL (brands rarely change).
|
||||||
* @returns A promise that resolves to an array of Brand objects.
|
* @returns A promise that resolves to an array of Brand objects.
|
||||||
*/
|
*/
|
||||||
async getAllBrands(logger: Logger): Promise<Brand[]> {
|
async getAllBrands(logger: Logger): Promise<Brand[]> {
|
||||||
try {
|
const cacheKey = CACHE_PREFIX.BRANDS;
|
||||||
const query = `
|
|
||||||
SELECT s.store_id as brand_id, s.name, s.logo_url, s.created_at, s.updated_at
|
return cacheService.getOrSet<Brand[]>(
|
||||||
FROM public.stores s
|
cacheKey,
|
||||||
ORDER BY s.name;
|
async () => {
|
||||||
`;
|
try {
|
||||||
const res = await this.db.query<Brand>(query);
|
const query = `
|
||||||
return res.rows;
|
SELECT s.store_id as brand_id, s.name, s.logo_url, s.created_at, s.updated_at
|
||||||
} catch (error) {
|
FROM public.stores s
|
||||||
handleDbError(error, logger, 'Database error in getAllBrands', {}, {
|
ORDER BY s.name;
|
||||||
defaultMessage: 'Failed to retrieve brands from database.',
|
`;
|
||||||
});
|
const res = await this.db.query<Brand>(query);
|
||||||
}
|
return res.rows;
|
||||||
|
} catch (error) {
|
||||||
|
handleDbError(error, logger, 'Database error in getAllBrands', {}, {
|
||||||
|
defaultMessage: 'Failed to retrieve brands from database.',
|
||||||
|
});
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{ ttl: CACHE_TTL.BRANDS, logger },
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -262,49 +272,67 @@ export class FlyerRepository {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Retrieves all flyers from the database, ordered by creation date.
|
* Retrieves all flyers from the database, ordered by creation date.
|
||||||
|
* Uses cache-aside pattern with 5-minute TTL.
|
||||||
* @param limit The maximum number of flyers to return.
|
* @param limit The maximum number of flyers to return.
|
||||||
* @param offset The number of flyers to skip.
|
* @param offset The number of flyers to skip.
|
||||||
* @returns A promise that resolves to an array of Flyer objects.
|
* @returns A promise that resolves to an array of Flyer objects.
|
||||||
*/
|
*/
|
||||||
async getFlyers(logger: Logger, limit: number = 20, offset: number = 0): Promise<Flyer[]> {
|
async getFlyers(logger: Logger, limit: number = 20, offset: number = 0): Promise<Flyer[]> {
|
||||||
try {
|
const cacheKey = `${CACHE_PREFIX.FLYERS}:${limit}:${offset}`;
|
||||||
const query = `
|
|
||||||
SELECT
|
return cacheService.getOrSet<Flyer[]>(
|
||||||
f.*,
|
cacheKey,
|
||||||
json_build_object(
|
async () => {
|
||||||
'store_id', s.store_id,
|
try {
|
||||||
'name', s.name,
|
const query = `
|
||||||
'logo_url', s.logo_url
|
SELECT
|
||||||
) as store
|
f.*,
|
||||||
FROM public.flyers f
|
json_build_object(
|
||||||
JOIN public.stores s ON f.store_id = s.store_id
|
'store_id', s.store_id,
|
||||||
ORDER BY f.created_at DESC LIMIT $1 OFFSET $2`;
|
'name', s.name,
|
||||||
const res = await this.db.query<Flyer>(query, [limit, offset]);
|
'logo_url', s.logo_url
|
||||||
return res.rows;
|
) as store
|
||||||
} catch (error) {
|
FROM public.flyers f
|
||||||
handleDbError(error, logger, 'Database error in getFlyers', { limit, offset }, {
|
JOIN public.stores s ON f.store_id = s.store_id
|
||||||
defaultMessage: 'Failed to retrieve flyers from database.',
|
ORDER BY f.created_at DESC LIMIT $1 OFFSET $2`;
|
||||||
});
|
const res = await this.db.query<Flyer>(query, [limit, offset]);
|
||||||
}
|
return res.rows;
|
||||||
|
} catch (error) {
|
||||||
|
handleDbError(error, logger, 'Database error in getFlyers', { limit, offset }, {
|
||||||
|
defaultMessage: 'Failed to retrieve flyers from database.',
|
||||||
|
});
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{ ttl: CACHE_TTL.FLYERS, logger },
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Retrieves all items for a specific flyer.
|
* Retrieves all items for a specific flyer.
|
||||||
|
* Uses cache-aside pattern with 10-minute TTL.
|
||||||
* @param flyerId The ID of the flyer.
|
* @param flyerId The ID of the flyer.
|
||||||
* @returns A promise that resolves to an array of FlyerItem objects.
|
* @returns A promise that resolves to an array of FlyerItem objects.
|
||||||
*/
|
*/
|
||||||
async getFlyerItems(flyerId: number, logger: Logger): Promise<FlyerItem[]> {
|
async getFlyerItems(flyerId: number, logger: Logger): Promise<FlyerItem[]> {
|
||||||
try {
|
const cacheKey = `${CACHE_PREFIX.FLYER_ITEMS}:${flyerId}`;
|
||||||
const res = await this.db.query<FlyerItem>(
|
|
||||||
'SELECT * FROM public.flyer_items WHERE flyer_id = $1 ORDER BY flyer_item_id ASC',
|
return cacheService.getOrSet<FlyerItem[]>(
|
||||||
[flyerId],
|
cacheKey,
|
||||||
);
|
async () => {
|
||||||
return res.rows;
|
try {
|
||||||
} catch (error) {
|
const res = await this.db.query<FlyerItem>(
|
||||||
handleDbError(error, logger, 'Database error in getFlyerItems', { flyerId }, {
|
'SELECT * FROM public.flyer_items WHERE flyer_id = $1 ORDER BY flyer_item_id ASC',
|
||||||
defaultMessage: 'Failed to retrieve flyer items from database.',
|
[flyerId],
|
||||||
});
|
);
|
||||||
}
|
return res.rows;
|
||||||
|
} catch (error) {
|
||||||
|
handleDbError(error, logger, 'Database error in getFlyerItems', { flyerId }, {
|
||||||
|
defaultMessage: 'Failed to retrieve flyer items from database.',
|
||||||
|
});
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{ ttl: CACHE_TTL.FLYER_ITEMS, logger },
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -399,6 +427,7 @@ export class FlyerRepository {
|
|||||||
/**
|
/**
|
||||||
* Deletes a flyer and all its associated items in a transaction.
|
* Deletes a flyer and all its associated items in a transaction.
|
||||||
* This should typically be an admin-only action.
|
* This should typically be an admin-only action.
|
||||||
|
* Invalidates related cache entries after successful deletion.
|
||||||
* @param flyerId The ID of the flyer to delete.
|
* @param flyerId The ID of the flyer to delete.
|
||||||
*/
|
*/
|
||||||
async deleteFlyer(flyerId: number, logger: Logger): Promise<void> {
|
async deleteFlyer(flyerId: number, logger: Logger): Promise<void> {
|
||||||
@@ -413,6 +442,9 @@ export class FlyerRepository {
|
|||||||
}
|
}
|
||||||
logger.info(`Successfully deleted flyer with ID: ${flyerId}`);
|
logger.info(`Successfully deleted flyer with ID: ${flyerId}`);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// Invalidate cache after successful deletion
|
||||||
|
await cacheService.invalidateFlyer(flyerId, logger);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
handleDbError(error, logger, 'Database transaction error in deleteFlyer', { flyerId }, {
|
handleDbError(error, logger, 'Database transaction error in deleteFlyer', { flyerId }, {
|
||||||
defaultMessage: 'Failed to delete flyer.',
|
defaultMessage: 'Failed to delete flyer.',
|
||||||
|
|||||||
@@ -29,6 +29,7 @@ describe('Gamification DB Service', () => {
|
|||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
// Reset the global mock's call history before each test.
|
// Reset the global mock's call history before each test.
|
||||||
vi.clearAllMocks();
|
vi.clearAllMocks();
|
||||||
|
mockDb.query.mockReset();
|
||||||
|
|
||||||
// Instantiate the repository with the mock pool for each test
|
// Instantiate the repository with the mock pool for each test
|
||||||
gamificationRepo = new GamificationRepository(mockDb);
|
gamificationRepo = new GamificationRepository(mockDb);
|
||||||
|
|||||||
@@ -30,6 +30,7 @@ describe('Notification DB Service', () => {
|
|||||||
|
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
vi.clearAllMocks();
|
vi.clearAllMocks();
|
||||||
|
mockPoolInstance.query.mockReset();
|
||||||
// Instantiate the repository with the mock pool for each test
|
// Instantiate the repository with the mock pool for each test
|
||||||
|
|
||||||
notificationRepo = new NotificationRepository(mockPoolInstance as unknown as Pool);
|
notificationRepo = new NotificationRepository(mockPoolInstance as unknown as Pool);
|
||||||
|
|||||||
@@ -35,6 +35,7 @@ describe('Personalization DB Service', () => {
|
|||||||
|
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
vi.clearAllMocks();
|
vi.clearAllMocks();
|
||||||
|
mockQuery.mockReset();
|
||||||
// Reset the withTransaction mock before each test
|
// Reset the withTransaction mock before each test
|
||||||
vi.mocked(withTransaction).mockImplementation(async (callback) => {
|
vi.mocked(withTransaction).mockImplementation(async (callback) => {
|
||||||
const mockClient = { query: vi.fn() };
|
const mockClient = { query: vi.fn() };
|
||||||
|
|||||||
@@ -27,6 +27,7 @@ import { logger as mockLogger } from '../logger.server';
|
|||||||
describe('Price DB Service', () => {
|
describe('Price DB Service', () => {
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
vi.clearAllMocks();
|
vi.clearAllMocks();
|
||||||
|
mockPoolInstance.query.mockReset();
|
||||||
// Make getPool return our mock instance for each test
|
// Make getPool return our mock instance for each test
|
||||||
vi.mocked(getPool).mockReturnValue(mockPoolInstance as any);
|
vi.mocked(getPool).mockReturnValue(mockPoolInstance as any);
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -34,6 +34,7 @@ describe('Reaction DB Service', () => {
|
|||||||
|
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
vi.clearAllMocks();
|
vi.clearAllMocks();
|
||||||
|
mockDb.query.mockReset();
|
||||||
reactionRepo = new ReactionRepository(mockDb);
|
reactionRepo = new ReactionRepository(mockDb);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@@ -28,6 +28,7 @@ describe('Recipe DB Service', () => {
|
|||||||
|
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
vi.clearAllMocks();
|
vi.clearAllMocks();
|
||||||
|
mockQuery.mockReset();
|
||||||
// Instantiate the repository with the mock pool for each test
|
// Instantiate the repository with the mock pool for each test
|
||||||
recipeRepo = new RecipeRepository(mockPoolInstance as unknown as Pool);
|
recipeRepo = new RecipeRepository(mockPoolInstance as unknown as Pool);
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -36,6 +36,7 @@ describe('Shopping DB Service', () => {
|
|||||||
|
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
vi.clearAllMocks();
|
vi.clearAllMocks();
|
||||||
|
mockPoolInstance.query.mockReset();
|
||||||
// Instantiate the repository with the mock pool for each test
|
// Instantiate the repository with the mock pool for each test
|
||||||
shoppingRepo = new ShoppingRepository(mockPoolInstance as unknown as Pool);
|
shoppingRepo = new ShoppingRepository(mockPoolInstance as unknown as Pool);
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -62,6 +62,7 @@ describe('User DB Service', () => {
|
|||||||
|
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
vi.clearAllMocks();
|
vi.clearAllMocks();
|
||||||
|
mockPoolInstance.query.mockReset();
|
||||||
userRepo = new UserRepository(mockPoolInstance as unknown as PoolClient);
|
userRepo = new UserRepository(mockPoolInstance as unknown as PoolClient);
|
||||||
// Provide a default mock implementation for withTransaction for all tests.
|
// Provide a default mock implementation for withTransaction for all tests.
|
||||||
vi.mocked(withTransaction).mockImplementation(
|
vi.mocked(withTransaction).mockImplementation(
|
||||||
|
|||||||
@@ -4,12 +4,13 @@ import { withTransaction } from './db/connection.db';
|
|||||||
import { createFlyerAndItems } from './db/flyer.db';
|
import { createFlyerAndItems } from './db/flyer.db';
|
||||||
import { AdminRepository } from './db/admin.db';
|
import { AdminRepository } from './db/admin.db';
|
||||||
import { GamificationRepository } from './db/gamification.db';
|
import { GamificationRepository } from './db/gamification.db';
|
||||||
|
import { cacheService } from './cacheService.server';
|
||||||
import type { FlyerInsert, FlyerItemInsert, Flyer } from '../types';
|
import type { FlyerInsert, FlyerItemInsert, Flyer } from '../types';
|
||||||
|
|
||||||
export class FlyerPersistenceService {
|
export class FlyerPersistenceService {
|
||||||
/**
|
/**
|
||||||
* Saves the flyer and its items to the database within a transaction.
|
* Saves the flyer and its items to the database within a transaction.
|
||||||
* Also logs the activity.
|
* Also logs the activity and invalidates related cache entries.
|
||||||
*/
|
*/
|
||||||
async saveFlyer(
|
async saveFlyer(
|
||||||
flyerData: FlyerInsert,
|
flyerData: FlyerInsert,
|
||||||
@@ -17,7 +18,7 @@ export class FlyerPersistenceService {
|
|||||||
userId: string | undefined,
|
userId: string | undefined,
|
||||||
logger: Logger,
|
logger: Logger,
|
||||||
): Promise<Flyer> {
|
): Promise<Flyer> {
|
||||||
return withTransaction(async (client) => {
|
const flyer = await withTransaction(async (client) => {
|
||||||
const { flyer, items } = await createFlyerAndItems(flyerData, itemsForDb, logger, client);
|
const { flyer, items } = await createFlyerAndItems(flyerData, itemsForDb, logger, client);
|
||||||
|
|
||||||
logger.info(
|
logger.info(
|
||||||
@@ -43,5 +44,12 @@ export class FlyerPersistenceService {
|
|||||||
}
|
}
|
||||||
return flyer;
|
return flyer;
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// Invalidate flyer list cache after successful creation (fire-and-forget)
|
||||||
|
cacheService.invalidateFlyers(logger).catch(() => {
|
||||||
|
// Error already logged in invalidateFlyers
|
||||||
|
});
|
||||||
|
|
||||||
|
return flyer;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -2,6 +2,7 @@
|
|||||||
import { describe, it, expect, afterAll } from 'vitest';
|
import { describe, it, expect, afterAll } from 'vitest';
|
||||||
import crypto from 'crypto';
|
import crypto from 'crypto';
|
||||||
import * as apiClient from '../../services/apiClient';
|
import * as apiClient from '../../services/apiClient';
|
||||||
|
import { getPool } from '../../services/db/connection.db';
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
import fs from 'fs';
|
import fs from 'fs';
|
||||||
import { cleanupDb } from '../utils/cleanup';
|
import { cleanupDb } from '../utils/cleanup';
|
||||||
@@ -19,12 +20,14 @@ describe('E2E Flyer Upload and Processing Workflow', () => {
|
|||||||
let authToken: string;
|
let authToken: string;
|
||||||
let userId: string | null = null;
|
let userId: string | null = null;
|
||||||
let flyerId: number | null = null;
|
let flyerId: number | null = null;
|
||||||
|
let storeId: number | null = null;
|
||||||
|
|
||||||
afterAll(async () => {
|
afterAll(async () => {
|
||||||
// Use the centralized cleanup utility for robustness.
|
// Use the centralized cleanup utility for robustness.
|
||||||
await cleanupDb({
|
await cleanupDb({
|
||||||
userIds: [userId],
|
userIds: [userId],
|
||||||
flyerIds: [flyerId],
|
flyerIds: [flyerId],
|
||||||
|
storeIds: [storeId],
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -98,5 +101,13 @@ describe('E2E Flyer Upload and Processing Workflow', () => {
|
|||||||
expect(jobStatus.state).toBe('completed');
|
expect(jobStatus.state).toBe('completed');
|
||||||
flyerId = jobStatus.returnValue?.flyerId;
|
flyerId = jobStatus.returnValue?.flyerId;
|
||||||
expect(flyerId).toBeTypeOf('number');
|
expect(flyerId).toBeTypeOf('number');
|
||||||
|
|
||||||
|
// Fetch the store_id associated with the created flyer for robust cleanup
|
||||||
|
if (flyerId) {
|
||||||
|
const flyerRes = await getPool().query('SELECT store_id FROM public.flyers WHERE flyer_id = $1', [flyerId]);
|
||||||
|
if (flyerRes.rows.length > 0) {
|
||||||
|
storeId = flyerRes.rows[0].store_id;
|
||||||
|
}
|
||||||
|
}
|
||||||
}, 240000); // Extended timeout for AI processing
|
}, 240000); // Extended timeout for AI processing
|
||||||
});
|
});
|
||||||
@@ -18,6 +18,8 @@ describe('Admin API Routes Integration Tests', () => {
|
|||||||
let regularUserToken: string;
|
let regularUserToken: string;
|
||||||
const createdUserIds: string[] = [];
|
const createdUserIds: string[] = [];
|
||||||
const createdStoreIds: number[] = [];
|
const createdStoreIds: number[] = [];
|
||||||
|
const createdCorrectionIds: number[] = [];
|
||||||
|
const createdFlyerIds: number[] = [];
|
||||||
|
|
||||||
beforeAll(async () => {
|
beforeAll(async () => {
|
||||||
vi.stubEnv('FRONTEND_URL', 'https://example.com');
|
vi.stubEnv('FRONTEND_URL', 'https://example.com');
|
||||||
@@ -47,6 +49,8 @@ describe('Admin API Routes Integration Tests', () => {
|
|||||||
await cleanupDb({
|
await cleanupDb({
|
||||||
userIds: createdUserIds,
|
userIds: createdUserIds,
|
||||||
storeIds: createdStoreIds,
|
storeIds: createdStoreIds,
|
||||||
|
suggestedCorrectionIds: createdCorrectionIds,
|
||||||
|
flyerIds: createdFlyerIds,
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -174,6 +178,7 @@ describe('Admin API Routes Integration Tests', () => {
|
|||||||
[testStoreId, `checksum-${Date.now()}-${Math.random()}`.padEnd(64, '0')],
|
[testStoreId, `checksum-${Date.now()}-${Math.random()}`.padEnd(64, '0')],
|
||||||
);
|
);
|
||||||
const flyerId = flyerRes.rows[0].flyer_id;
|
const flyerId = flyerRes.rows[0].flyer_id;
|
||||||
|
createdFlyerIds.push(flyerId);
|
||||||
|
|
||||||
const flyerItemRes = await getPool().query(
|
const flyerItemRes = await getPool().query(
|
||||||
`INSERT INTO public.flyer_items (flyer_id, item, price_display, price_in_cents, quantity)
|
`INSERT INTO public.flyer_items (flyer_id, item, price_display, price_in_cents, quantity)
|
||||||
@@ -188,6 +193,7 @@ describe('Admin API Routes Integration Tests', () => {
|
|||||||
[testFlyerItemId, adminUser.user.user_id],
|
[testFlyerItemId, adminUser.user.user_id],
|
||||||
);
|
);
|
||||||
testCorrectionId = correctionRes.rows[0].suggested_correction_id;
|
testCorrectionId = correctionRes.rows[0].suggested_correction_id;
|
||||||
|
createdCorrectionIds.push(testCorrectionId);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should allow an admin to approve a correction', async () => {
|
it('should allow an admin to approve a correction', async () => {
|
||||||
|
|||||||
@@ -110,6 +110,7 @@ describe('Flyer Processing Background Job Integration Test', () => {
|
|||||||
const createdUserIds: string[] = [];
|
const createdUserIds: string[] = [];
|
||||||
const createdFlyerIds: number[] = [];
|
const createdFlyerIds: number[] = [];
|
||||||
const createdFilePaths: string[] = [];
|
const createdFilePaths: string[] = [];
|
||||||
|
const createdStoreIds: number[] = [];
|
||||||
let workersModule: typeof import('../../services/workers.server');
|
let workersModule: typeof import('../../services/workers.server');
|
||||||
|
|
||||||
const originalFrontendUrl = process.env.FRONTEND_URL;
|
const originalFrontendUrl = process.env.FRONTEND_URL;
|
||||||
@@ -177,6 +178,7 @@ describe('Flyer Processing Background Job Integration Test', () => {
|
|||||||
await cleanupDb({
|
await cleanupDb({
|
||||||
userIds: createdUserIds,
|
userIds: createdUserIds,
|
||||||
flyerIds: createdFlyerIds,
|
flyerIds: createdFlyerIds,
|
||||||
|
storeIds: createdStoreIds,
|
||||||
});
|
});
|
||||||
|
|
||||||
// Use the centralized file cleanup utility.
|
// Use the centralized file cleanup utility.
|
||||||
@@ -274,6 +276,9 @@ describe('Flyer Processing Background Job Integration Test', () => {
|
|||||||
const savedFlyer = await db.flyerRepo.findFlyerByChecksum(checksum, logger);
|
const savedFlyer = await db.flyerRepo.findFlyerByChecksum(checksum, logger);
|
||||||
expect(savedFlyer).toBeDefined();
|
expect(savedFlyer).toBeDefined();
|
||||||
expect(savedFlyer?.flyer_id).toBe(flyerId);
|
expect(savedFlyer?.flyer_id).toBe(flyerId);
|
||||||
|
if (savedFlyer?.store_id) {
|
||||||
|
createdStoreIds.push(savedFlyer.store_id);
|
||||||
|
}
|
||||||
expect(savedFlyer?.file_name).toBe(uniqueFileName);
|
expect(savedFlyer?.file_name).toBe(uniqueFileName);
|
||||||
// Also add the final processed image path to the cleanup list.
|
// Also add the final processed image path to the cleanup list.
|
||||||
// This is important because JPEGs are re-processed to strip EXIF data, creating a new file.
|
// This is important because JPEGs are re-processed to strip EXIF data, creating a new file.
|
||||||
@@ -385,6 +390,9 @@ describe('Flyer Processing Background Job Integration Test', () => {
|
|||||||
// 4. Verify EXIF data is stripped from the saved file
|
// 4. Verify EXIF data is stripped from the saved file
|
||||||
const savedFlyer = await db.flyerRepo.findFlyerByChecksum(checksum, logger);
|
const savedFlyer = await db.flyerRepo.findFlyerByChecksum(checksum, logger);
|
||||||
expect(savedFlyer).toBeDefined();
|
expect(savedFlyer).toBeDefined();
|
||||||
|
if (savedFlyer?.store_id) {
|
||||||
|
createdStoreIds.push(savedFlyer.store_id);
|
||||||
|
}
|
||||||
|
|
||||||
const savedImagePath = path.join(uploadDir, path.basename(savedFlyer!.image_url));
|
const savedImagePath = path.join(uploadDir, path.basename(savedFlyer!.image_url));
|
||||||
createdFilePaths.push(savedImagePath); // Add final path for cleanup
|
createdFilePaths.push(savedImagePath); // Add final path for cleanup
|
||||||
@@ -476,6 +484,9 @@ describe('Flyer Processing Background Job Integration Test', () => {
|
|||||||
// 4. Verify metadata is stripped from the saved file
|
// 4. Verify metadata is stripped from the saved file
|
||||||
const savedFlyer = await db.flyerRepo.findFlyerByChecksum(checksum, logger);
|
const savedFlyer = await db.flyerRepo.findFlyerByChecksum(checksum, logger);
|
||||||
expect(savedFlyer).toBeDefined();
|
expect(savedFlyer).toBeDefined();
|
||||||
|
if (savedFlyer?.store_id) {
|
||||||
|
createdStoreIds.push(savedFlyer.store_id);
|
||||||
|
}
|
||||||
|
|
||||||
const savedImagePath = path.join(uploadDir, path.basename(savedFlyer!.image_url));
|
const savedImagePath = path.join(uploadDir, path.basename(savedFlyer!.image_url));
|
||||||
createdFilePaths.push(savedImagePath); // Add final path for cleanup
|
createdFilePaths.push(savedImagePath); // Add final path for cleanup
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
// src/tests/integration/gamification.integration.test.ts
|
// src/tests/integration/gamification.integration.test.ts
|
||||||
import { describe, it, expect, beforeAll, afterAll, vi } from 'vitest';
|
import { describe, it, expect, beforeAll, afterAll, vi, beforeEach } from 'vitest';
|
||||||
import supertest from 'supertest';
|
import supertest from 'supertest';
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
import fs from 'node:fs/promises';
|
import fs from 'node:fs/promises';
|
||||||
@@ -70,8 +70,13 @@ describe('Gamification Flow Integration Test', () => {
|
|||||||
fullName: 'Gamification Tester',
|
fullName: 'Gamification Tester',
|
||||||
request,
|
request,
|
||||||
}));
|
}));
|
||||||
|
});
|
||||||
|
|
||||||
// Setup default mock response for the AI service's extractCoreDataFromFlyerImage method.
|
beforeEach(() => {
|
||||||
|
vi.clearAllMocks();
|
||||||
|
|
||||||
|
// Reset AI Service Mock to default success state
|
||||||
|
mockExtractCoreData.mockReset();
|
||||||
mockExtractCoreData.mockResolvedValue({
|
mockExtractCoreData.mockResolvedValue({
|
||||||
store_name: 'Gamification Test Store',
|
store_name: 'Gamification Test Store',
|
||||||
valid_from: null,
|
valid_from: null,
|
||||||
@@ -87,6 +92,9 @@ describe('Gamification Flow Integration Test', () => {
|
|||||||
},
|
},
|
||||||
],
|
],
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// Reset Image Processor Mock
|
||||||
|
vi.mocked(imageProcessor.generateFlyerIcon).mockResolvedValue('mock-icon.webp');
|
||||||
});
|
});
|
||||||
|
|
||||||
afterAll(async () => {
|
afterAll(async () => {
|
||||||
@@ -196,6 +204,9 @@ describe('Gamification Flow Integration Test', () => {
|
|||||||
const savedFlyer = await db.flyerRepo.findFlyerByChecksum(checksum, logger);
|
const savedFlyer = await db.flyerRepo.findFlyerByChecksum(checksum, logger);
|
||||||
expect(savedFlyer).toBeDefined();
|
expect(savedFlyer).toBeDefined();
|
||||||
expect(savedFlyer?.file_name).toBe(uniqueFileName);
|
expect(savedFlyer?.file_name).toBe(uniqueFileName);
|
||||||
|
if (savedFlyer?.store_id) {
|
||||||
|
createdStoreIds.push(savedFlyer.store_id);
|
||||||
|
}
|
||||||
// Also add the final processed image path to the cleanup list.
|
// Also add the final processed image path to the cleanup list.
|
||||||
// This is important because JPEGs are re-processed to strip EXIF data, creating a new file.
|
// This is important because JPEGs are re-processed to strip EXIF data, creating a new file.
|
||||||
const savedImagePath = path.join(uploadDir, path.basename(savedFlyer!.image_url));
|
const savedImagePath = path.join(uploadDir, path.basename(savedFlyer!.image_url));
|
||||||
|
|||||||
@@ -88,27 +88,29 @@ describe('Price History API Integration Test (/api/price-history)', () => {
|
|||||||
|
|
||||||
afterAll(async () => {
|
afterAll(async () => {
|
||||||
vi.unstubAllEnvs();
|
vi.unstubAllEnvs();
|
||||||
await cleanupDb({ userIds: createdUserIds });
|
|
||||||
const pool = getPool();
|
const pool = getPool();
|
||||||
|
|
||||||
// The CASCADE on the tables should handle flyer_items.
|
// The CASCADE on the tables should handle flyer_items.
|
||||||
// The delete on flyers cascades to flyer_items, which fires a trigger `recalculate_price_history_on_flyer_item_delete`.
|
// The delete on flyers cascades to flyer_items, which fires a trigger `recalculate_price_history_on_flyer_item_delete`.
|
||||||
// This trigger has a bug causing the test to fail. As a workaround for the test suite,
|
// This trigger has a bug causing the test to fail. As a workaround for the test suite,
|
||||||
// we temporarily disable user-defined triggers on the flyer_items table during cleanup.
|
// we temporarily disable user-defined triggers on the flyer_items table during cleanup.
|
||||||
const flyerIds = [flyerId1, flyerId2, flyerId3].filter(Boolean);
|
const flyerIds = [flyerId1, flyerId2, flyerId3].filter(Boolean);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
await pool.query('ALTER TABLE public.flyer_items DISABLE TRIGGER USER;');
|
await pool.query('ALTER TABLE public.flyer_items DISABLE TRIGGER USER;');
|
||||||
if (flyerIds.length > 0) {
|
if (flyerIds.length > 0) {
|
||||||
await pool.query('DELETE FROM public.flyers WHERE flyer_id = ANY($1::int[])', [flyerIds]);
|
await pool.query('DELETE FROM public.flyers WHERE flyer_id = ANY($1::int[])', [flyerIds]);
|
||||||
}
|
}
|
||||||
if (storeId) await pool.query('DELETE FROM public.stores WHERE store_id = $1', [storeId]);
|
|
||||||
if (masterItemId)
|
|
||||||
await pool.query('DELETE FROM public.master_grocery_items WHERE master_grocery_item_id = $1', [
|
|
||||||
masterItemId,
|
|
||||||
]);
|
|
||||||
} finally {
|
} finally {
|
||||||
// Ensure triggers are always re-enabled, even if an error occurs during deletion.
|
// Ensure triggers are always re-enabled, even if an error occurs during deletion.
|
||||||
await pool.query('ALTER TABLE public.flyer_items ENABLE TRIGGER USER;');
|
await pool.query('ALTER TABLE public.flyer_items ENABLE TRIGGER USER;');
|
||||||
}
|
}
|
||||||
|
|
||||||
|
await cleanupDb({
|
||||||
|
userIds: createdUserIds,
|
||||||
|
masterItemIds: [masterItemId],
|
||||||
|
storeIds: [storeId],
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return the correct price history for a given master item ID', async () => {
|
it('should return the correct price history for a given master item ID', async () => {
|
||||||
|
|||||||
@@ -26,6 +26,7 @@ describe('Public API Routes Integration Tests', () => {
|
|||||||
let testRecipe: Recipe;
|
let testRecipe: Recipe;
|
||||||
let testFlyer: Flyer;
|
let testFlyer: Flyer;
|
||||||
let testStoreId: number;
|
let testStoreId: number;
|
||||||
|
const createdRecipeCommentIds: number[] = [];
|
||||||
|
|
||||||
beforeAll(async () => {
|
beforeAll(async () => {
|
||||||
vi.stubEnv('FRONTEND_URL', 'https://example.com');
|
vi.stubEnv('FRONTEND_URL', 'https://example.com');
|
||||||
@@ -85,6 +86,7 @@ describe('Public API Routes Integration Tests', () => {
|
|||||||
recipeIds: testRecipe ? [testRecipe.recipe_id] : [],
|
recipeIds: testRecipe ? [testRecipe.recipe_id] : [],
|
||||||
flyerIds: testFlyer ? [testFlyer.flyer_id] : [],
|
flyerIds: testFlyer ? [testFlyer.flyer_id] : [],
|
||||||
storeIds: testStoreId ? [testStoreId] : [],
|
storeIds: testStoreId ? [testStoreId] : [],
|
||||||
|
recipeCommentIds: createdRecipeCommentIds,
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -186,10 +188,11 @@ describe('Public API Routes Integration Tests', () => {
|
|||||||
|
|
||||||
it('GET /api/recipes/:recipeId/comments should return comments for a recipe', async () => {
|
it('GET /api/recipes/:recipeId/comments should return comments for a recipe', async () => {
|
||||||
// Add a comment to our test recipe first
|
// Add a comment to our test recipe first
|
||||||
await getPool().query(
|
const commentRes = await getPool().query(
|
||||||
`INSERT INTO public.recipe_comments (recipe_id, user_id, content) VALUES ($1, $2, 'Test comment')`,
|
`INSERT INTO public.recipe_comments (recipe_id, user_id, content) VALUES ($1, $2, 'Test comment') RETURNING recipe_comment_id`,
|
||||||
[testRecipe.recipe_id, testUser.user.user_id],
|
[testRecipe.recipe_id, testUser.user.user_id],
|
||||||
);
|
);
|
||||||
|
createdRecipeCommentIds.push(commentRes.rows[0].recipe_comment_id);
|
||||||
const response = await request.get(`/api/recipes/${testRecipe.recipe_id}/comments`);
|
const response = await request.get(`/api/recipes/${testRecipe.recipe_id}/comments`);
|
||||||
const comments: RecipeComment[] = response.body;
|
const comments: RecipeComment[] = response.body;
|
||||||
expect(response.status).toBe(200);
|
expect(response.status).toBe(200);
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
// src/tests/integration/recipe.integration.test.ts
|
// src/tests/integration/recipe.integration.test.ts
|
||||||
import { describe, it, expect, beforeAll, afterAll, vi } from 'vitest';
|
import { describe, it, expect, beforeAll, afterAll, vi, afterEach } from 'vitest';
|
||||||
import supertest from 'supertest';
|
import supertest from 'supertest';
|
||||||
import { createAndLoginUser } from '../utils/testHelpers';
|
import { createAndLoginUser } from '../utils/testHelpers';
|
||||||
import { cleanupDb } from '../utils/cleanup';
|
import { cleanupDb } from '../utils/cleanup';
|
||||||
@@ -49,6 +49,12 @@ describe('Recipe API Routes Integration Tests', () => {
|
|||||||
createdRecipeIds.push(testRecipe.recipe_id);
|
createdRecipeIds.push(testRecipe.recipe_id);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
vi.clearAllMocks();
|
||||||
|
// Reset the mock to its default state for the next test
|
||||||
|
vi.mocked(aiService.generateRecipeSuggestion).mockResolvedValue('Default Mock Suggestion');
|
||||||
|
});
|
||||||
|
|
||||||
afterAll(async () => {
|
afterAll(async () => {
|
||||||
vi.unstubAllEnvs();
|
vi.unstubAllEnvs();
|
||||||
// Clean up all created resources
|
// Clean up all created resources
|
||||||
|
|||||||
@@ -19,6 +19,7 @@ describe('User API Routes Integration Tests', () => {
|
|||||||
let testUser: UserProfile;
|
let testUser: UserProfile;
|
||||||
let authToken: string;
|
let authToken: string;
|
||||||
const createdUserIds: string[] = [];
|
const createdUserIds: string[] = [];
|
||||||
|
const createdMasterItemIds: number[] = [];
|
||||||
|
|
||||||
// Before any tests run, create a new user and log them in.
|
// Before any tests run, create a new user and log them in.
|
||||||
// The token will be used for all subsequent API calls in this test suite.
|
// The token will be used for all subsequent API calls in this test suite.
|
||||||
@@ -38,7 +39,10 @@ describe('User API Routes Integration Tests', () => {
|
|||||||
// This now cleans up ALL users created by this test suite to prevent pollution.
|
// This now cleans up ALL users created by this test suite to prevent pollution.
|
||||||
afterAll(async () => {
|
afterAll(async () => {
|
||||||
vi.unstubAllEnvs();
|
vi.unstubAllEnvs();
|
||||||
await cleanupDb({ userIds: createdUserIds });
|
await cleanupDb({
|
||||||
|
userIds: createdUserIds,
|
||||||
|
masterItemIds: createdMasterItemIds
|
||||||
|
});
|
||||||
|
|
||||||
// Safeguard to clean up any avatar files created during tests.
|
// Safeguard to clean up any avatar files created during tests.
|
||||||
const uploadDir = path.resolve(__dirname, '../../../uploads/avatars');
|
const uploadDir = path.resolve(__dirname, '../../../uploads/avatars');
|
||||||
@@ -244,6 +248,7 @@ describe('User API Routes Integration Tests', () => {
|
|||||||
.send({ itemName: 'Integration Test Item', category: 'Other/Miscellaneous' });
|
.send({ itemName: 'Integration Test Item', category: 'Other/Miscellaneous' });
|
||||||
const newItem = addResponse.body;
|
const newItem = addResponse.body;
|
||||||
|
|
||||||
|
if (newItem?.master_grocery_item_id) createdMasterItemIds.push(newItem.master_grocery_item_id);
|
||||||
// Assert 1: Check that the item was created correctly.
|
// Assert 1: Check that the item was created correctly.
|
||||||
expect(addResponse.status).toBe(201);
|
expect(addResponse.status).toBe(201);
|
||||||
expect(newItem.name).toBe('Integration Test Item');
|
expect(newItem.name).toBe('Integration Test Item');
|
||||||
|
|||||||
@@ -14,22 +14,34 @@ let globalPool: ReturnType<typeof getPool> | null = null;
|
|||||||
* This is critical because old jobs with outdated error messages can pollute test results.
|
* This is critical because old jobs with outdated error messages can pollute test results.
|
||||||
*/
|
*/
|
||||||
async function cleanAllQueues() {
|
async function cleanAllQueues() {
|
||||||
console.log(`[PID:${process.pid}] Cleaning all BullMQ queues...`);
|
// Use console.error for visibility in CI logs (stderr is often more reliable)
|
||||||
const { flyerQueue, cleanupQueue, emailQueue, analyticsQueue, weeklyAnalyticsQueue, tokenCleanupQueue } = await import('../../services/queues.server');
|
console.error(`[PID:${process.pid}] [QUEUE CLEANUP] Starting BullMQ queue cleanup...`);
|
||||||
|
|
||||||
const queues = [flyerQueue, cleanupQueue, emailQueue, analyticsQueue, weeklyAnalyticsQueue, tokenCleanupQueue];
|
try {
|
||||||
|
const { flyerQueue, cleanupQueue, emailQueue, analyticsQueue, weeklyAnalyticsQueue, tokenCleanupQueue } = await import('../../services/queues.server');
|
||||||
|
console.error(`[QUEUE CLEANUP] Successfully imported queue modules`);
|
||||||
|
|
||||||
for (const queue of queues) {
|
const queues = [flyerQueue, cleanupQueue, emailQueue, analyticsQueue, weeklyAnalyticsQueue, tokenCleanupQueue];
|
||||||
try {
|
|
||||||
// obliterate() removes ALL data associated with the queue from Redis
|
for (const queue of queues) {
|
||||||
await queue.obliterate({ force: true });
|
try {
|
||||||
console.log(` ✅ Cleaned queue: ${queue.name}`);
|
// Log queue state before cleanup
|
||||||
} catch (error) {
|
const jobCounts = await queue.getJobCounts();
|
||||||
// Log but don't fail - the queue might not exist yet
|
console.error(`[QUEUE CLEANUP] Queue "${queue.name}" before cleanup: ${JSON.stringify(jobCounts)}`);
|
||||||
console.log(` ⚠️ Could not clean queue ${queue.name}: ${error instanceof Error ? error.message : 'Unknown error'}`);
|
|
||||||
|
// obliterate() removes ALL data associated with the queue from Redis
|
||||||
|
await queue.obliterate({ force: true });
|
||||||
|
console.error(` ✅ [QUEUE CLEANUP] Cleaned queue: ${queue.name}`);
|
||||||
|
} catch (error) {
|
||||||
|
// Log but don't fail - the queue might not exist yet
|
||||||
|
console.error(` ⚠️ [QUEUE CLEANUP] Could not clean queue ${queue.name}: ${error instanceof Error ? error.message : 'Unknown error'}`);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
console.error(`✅ [PID:${process.pid}] [QUEUE CLEANUP] All queues cleaned successfully.`);
|
||||||
|
} catch (error) {
|
||||||
|
console.error(`❌ [PID:${process.pid}] [QUEUE CLEANUP] CRITICAL ERROR during queue cleanup:`, error);
|
||||||
|
// Don't throw - we want the tests to continue even if cleanup fails
|
||||||
}
|
}
|
||||||
console.log(`✅ [PID:${process.pid}] All queues cleaned.`);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function setup() {
|
export async function setup() {
|
||||||
@@ -38,11 +50,15 @@ export async function setup() {
|
|||||||
// Fix: Set the FRONTEND_URL globally for the test server instance
|
// Fix: Set the FRONTEND_URL globally for the test server instance
|
||||||
process.env.FRONTEND_URL = 'https://example.com';
|
process.env.FRONTEND_URL = 'https://example.com';
|
||||||
|
|
||||||
console.log(`\n--- [PID:${process.pid}] Running Integration Test GLOBAL Setup ---`);
|
console.error(`\n--- [PID:${process.pid}] Running Integration Test GLOBAL Setup ---`);
|
||||||
|
console.error(`[SETUP] REDIS_URL: ${process.env.REDIS_URL}`);
|
||||||
|
console.error(`[SETUP] REDIS_PASSWORD is set: ${!!process.env.REDIS_PASSWORD}`);
|
||||||
|
|
||||||
// CRITICAL: Clean all queues BEFORE running any tests to remove stale jobs
|
// CRITICAL: Clean all queues BEFORE running any tests to remove stale jobs
|
||||||
// from previous test runs that may have outdated error messages.
|
// from previous test runs that may have outdated error messages.
|
||||||
|
console.error(`[SETUP] About to call cleanAllQueues()...`);
|
||||||
await cleanAllQueues();
|
await cleanAllQueues();
|
||||||
|
console.error(`[SETUP] cleanAllQueues() completed.`);
|
||||||
|
|
||||||
// The integration setup is now the single source of truth for preparing the test DB.
|
// The integration setup is now the single source of truth for preparing the test DB.
|
||||||
// It runs the same seed script that `npm run db:reset:test` used.
|
// It runs the same seed script that `npm run db:reset:test` used.
|
||||||
|
|||||||
@@ -8,6 +8,10 @@ interface CleanupOptions {
|
|||||||
storeIds?: (number | null | undefined)[];
|
storeIds?: (number | null | undefined)[];
|
||||||
recipeIds?: (number | null | undefined)[];
|
recipeIds?: (number | null | undefined)[];
|
||||||
budgetIds?: (number | null | undefined)[];
|
budgetIds?: (number | null | undefined)[];
|
||||||
|
masterItemIds?: (number | null | undefined)[];
|
||||||
|
shoppingListIds?: (number | null | undefined)[];
|
||||||
|
suggestedCorrectionIds?: (number | null | undefined)[];
|
||||||
|
recipeCommentIds?: (number | null | undefined)[];
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -25,11 +29,21 @@ export const cleanupDb = async (options: CleanupOptions) => {
|
|||||||
// Order of deletion matters to avoid foreign key violations.
|
// Order of deletion matters to avoid foreign key violations.
|
||||||
// Children entities first, then parents.
|
// Children entities first, then parents.
|
||||||
|
|
||||||
|
if (options.suggestedCorrectionIds?.filter(Boolean).length) {
|
||||||
|
await client.query('DELETE FROM public.suggested_corrections WHERE suggested_correction_id = ANY($1::int[])', [options.suggestedCorrectionIds]);
|
||||||
|
logger.debug(`Cleaned up ${options.suggestedCorrectionIds.length} suggested correction(s).`);
|
||||||
|
}
|
||||||
|
|
||||||
if (options.budgetIds?.filter(Boolean).length) {
|
if (options.budgetIds?.filter(Boolean).length) {
|
||||||
await client.query('DELETE FROM public.budgets WHERE budget_id = ANY($1::int[])', [options.budgetIds]);
|
await client.query('DELETE FROM public.budgets WHERE budget_id = ANY($1::int[])', [options.budgetIds]);
|
||||||
logger.debug(`Cleaned up ${options.budgetIds.length} budget(s).`);
|
logger.debug(`Cleaned up ${options.budgetIds.length} budget(s).`);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (options.recipeCommentIds?.filter(Boolean).length) {
|
||||||
|
await client.query('DELETE FROM public.recipe_comments WHERE recipe_comment_id = ANY($1::int[])', [options.recipeCommentIds]);
|
||||||
|
logger.debug(`Cleaned up ${options.recipeCommentIds.length} recipe comment(s).`);
|
||||||
|
}
|
||||||
|
|
||||||
if (options.recipeIds?.filter(Boolean).length) {
|
if (options.recipeIds?.filter(Boolean).length) {
|
||||||
await client.query('DELETE FROM public.recipes WHERE recipe_id = ANY($1::int[])', [options.recipeIds]);
|
await client.query('DELETE FROM public.recipes WHERE recipe_id = ANY($1::int[])', [options.recipeIds]);
|
||||||
logger.debug(`Cleaned up ${options.recipeIds.length} recipe(s).`);
|
logger.debug(`Cleaned up ${options.recipeIds.length} recipe(s).`);
|
||||||
@@ -45,6 +59,16 @@ export const cleanupDb = async (options: CleanupOptions) => {
|
|||||||
logger.debug(`Cleaned up ${options.storeIds.length} store(s).`);
|
logger.debug(`Cleaned up ${options.storeIds.length} store(s).`);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (options.masterItemIds?.filter(Boolean).length) {
|
||||||
|
await client.query('DELETE FROM public.master_grocery_items WHERE master_grocery_item_id = ANY($1::int[])', [options.masterItemIds]);
|
||||||
|
logger.debug(`Cleaned up ${options.masterItemIds.length} master grocery item(s).`);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (options.shoppingListIds?.filter(Boolean).length) {
|
||||||
|
await client.query('DELETE FROM public.shopping_lists WHERE shopping_list_id = ANY($1::int[])', [options.shoppingListIds]);
|
||||||
|
logger.debug(`Cleaned up ${options.shoppingListIds.length} shopping list(s).`);
|
||||||
|
}
|
||||||
|
|
||||||
if (options.userIds?.filter(Boolean).length) {
|
if (options.userIds?.filter(Boolean).length) {
|
||||||
await client.query('DELETE FROM public.users WHERE user_id = ANY($1::uuid[])', [options.userIds]);
|
await client.query('DELETE FROM public.users WHERE user_id = ANY($1::uuid[])', [options.userIds]);
|
||||||
logger.debug(`Cleaned up ${options.userIds.length} user(s).`);
|
logger.debug(`Cleaned up ${options.userIds.length} user(s).`);
|
||||||
|
|||||||
Reference in New Issue
Block a user