Compare commits
12 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e0bf96824c | ||
| e86e09703e | |||
|
|
275741c79e | ||
| 3a40249ddb | |||
|
|
4c70905950 | ||
| 0b4884ff2a | |||
|
|
e4acab77c8 | ||
| 4e20b1b430 | |||
|
|
15747ac942 | ||
| e5fa89ef17 | |||
|
|
2c65da31e9 | ||
| eeec6af905 |
@@ -335,7 +335,8 @@ jobs:
|
||||
fi
|
||||
|
||||
GITEA_SERVER_URL="https://gitea.projectium.com" # Your Gitea instance URL
|
||||
COMMIT_MESSAGE=$(git log -1 --grep="\[skip ci\]" --invert-grep --pretty=%s)
|
||||
# Sanitize commit message to prevent shell injection or build breaks (removes quotes, backticks, backslashes, $)
|
||||
COMMIT_MESSAGE=$(git log -1 --grep="\[skip ci\]" --invert-grep --pretty=%s | tr -d '"`\\$')
|
||||
PACKAGE_VERSION=$(node -p "require('./package.json').version")
|
||||
VITE_APP_VERSION="$(date +'%Y%m%d-%H%M'):$(git rev-parse --short HEAD):$PACKAGE_VERSION" \
|
||||
VITE_APP_COMMIT_URL="$GITEA_SERVER_URL/${{ gitea.repository }}/commit/${{ gitea.sha }}" \
|
||||
|
||||
@@ -16,6 +16,27 @@ if (missingSecrets.length > 0) {
|
||||
console.log('[ecosystem.config.cjs] ✅ Critical environment variables are present.');
|
||||
}
|
||||
|
||||
// --- Shared Environment Variables ---
|
||||
// Define common variables to reduce duplication and ensure consistency across apps.
|
||||
const sharedEnv = {
|
||||
DB_HOST: process.env.DB_HOST,
|
||||
DB_USER: process.env.DB_USER,
|
||||
DB_PASSWORD: process.env.DB_PASSWORD,
|
||||
DB_NAME: process.env.DB_NAME,
|
||||
REDIS_URL: process.env.REDIS_URL,
|
||||
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
|
||||
FRONTEND_URL: process.env.FRONTEND_URL,
|
||||
JWT_SECRET: process.env.JWT_SECRET,
|
||||
GEMINI_API_KEY: process.env.GEMINI_API_KEY,
|
||||
GOOGLE_MAPS_API_KEY: process.env.GOOGLE_MAPS_API_KEY,
|
||||
SMTP_HOST: process.env.SMTP_HOST,
|
||||
SMTP_PORT: process.env.SMTP_PORT,
|
||||
SMTP_SECURE: process.env.SMTP_SECURE,
|
||||
SMTP_USER: process.env.SMTP_USER,
|
||||
SMTP_PASS: process.env.SMTP_PASS,
|
||||
SMTP_FROM_EMAIL: process.env.SMTP_FROM_EMAIL,
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
apps: [
|
||||
{
|
||||
@@ -25,6 +46,11 @@ module.exports = {
|
||||
script: './node_modules/.bin/tsx',
|
||||
args: 'server.ts',
|
||||
max_memory_restart: '500M',
|
||||
// Production Optimization: Run in cluster mode to utilize all CPU cores
|
||||
instances: 'max',
|
||||
exec_mode: 'cluster',
|
||||
kill_timeout: 5000, // Allow 5s for graceful shutdown of API requests
|
||||
log_date_format: 'YYYY-MM-DD HH:mm:ss Z',
|
||||
|
||||
// Restart Logic
|
||||
max_restarts: 40,
|
||||
@@ -36,46 +62,16 @@ module.exports = {
|
||||
NODE_ENV: 'production',
|
||||
name: 'flyer-crawler-api',
|
||||
cwd: '/var/www/flyer-crawler.projectium.com',
|
||||
DB_HOST: process.env.DB_HOST,
|
||||
DB_USER: process.env.DB_USER,
|
||||
DB_PASSWORD: process.env.DB_PASSWORD,
|
||||
DB_NAME: process.env.DB_NAME,
|
||||
REDIS_URL: process.env.REDIS_URL,
|
||||
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
|
||||
FRONTEND_URL: process.env.FRONTEND_URL,
|
||||
JWT_SECRET: process.env.JWT_SECRET,
|
||||
GEMINI_API_KEY: process.env.GEMINI_API_KEY,
|
||||
GOOGLE_MAPS_API_KEY: process.env.GOOGLE_MAPS_API_KEY,
|
||||
SMTP_HOST: process.env.SMTP_HOST,
|
||||
SMTP_PORT: process.env.SMTP_PORT,
|
||||
SMTP_SECURE: process.env.SMTP_SECURE,
|
||||
SMTP_USER: process.env.SMTP_USER,
|
||||
SMTP_PASS: process.env.SMTP_PASS,
|
||||
SMTP_FROM_EMAIL: process.env.SMTP_FROM_EMAIL,
|
||||
WORKER_LOCK_DURATION: '120000',
|
||||
...sharedEnv,
|
||||
},
|
||||
// Test Environment Settings
|
||||
env_test: {
|
||||
NODE_ENV: 'test',
|
||||
name: 'flyer-crawler-api-test',
|
||||
cwd: '/var/www/flyer-crawler-test.projectium.com',
|
||||
DB_HOST: process.env.DB_HOST,
|
||||
DB_USER: process.env.DB_USER,
|
||||
DB_PASSWORD: process.env.DB_PASSWORD,
|
||||
DB_NAME: process.env.DB_NAME,
|
||||
REDIS_URL: process.env.REDIS_URL,
|
||||
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
|
||||
FRONTEND_URL: process.env.FRONTEND_URL,
|
||||
JWT_SECRET: process.env.JWT_SECRET,
|
||||
GEMINI_API_KEY: process.env.GEMINI_API_KEY,
|
||||
GOOGLE_MAPS_API_KEY: process.env.GOOGLE_MAPS_API_KEY,
|
||||
SMTP_HOST: process.env.SMTP_HOST,
|
||||
SMTP_PORT: process.env.SMTP_PORT,
|
||||
SMTP_SECURE: process.env.SMTP_SECURE,
|
||||
SMTP_USER: process.env.SMTP_USER,
|
||||
SMTP_PASS: process.env.SMTP_PASS,
|
||||
SMTP_FROM_EMAIL: process.env.SMTP_FROM_EMAIL,
|
||||
WORKER_LOCK_DURATION: '120000',
|
||||
...sharedEnv,
|
||||
},
|
||||
// Development Environment Settings
|
||||
env_development: {
|
||||
@@ -83,23 +79,8 @@ module.exports = {
|
||||
name: 'flyer-crawler-api-dev',
|
||||
watch: true,
|
||||
ignore_watch: ['node_modules', 'logs', '*.log', 'flyer-images', '.git'],
|
||||
DB_HOST: process.env.DB_HOST,
|
||||
DB_USER: process.env.DB_USER,
|
||||
DB_PASSWORD: process.env.DB_PASSWORD,
|
||||
DB_NAME: process.env.DB_NAME,
|
||||
REDIS_URL: process.env.REDIS_URL,
|
||||
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
|
||||
FRONTEND_URL: process.env.FRONTEND_URL,
|
||||
JWT_SECRET: process.env.JWT_SECRET,
|
||||
GEMINI_API_KEY: process.env.GEMINI_API_KEY,
|
||||
GOOGLE_MAPS_API_KEY: process.env.GOOGLE_MAPS_API_KEY,
|
||||
SMTP_HOST: process.env.SMTP_HOST,
|
||||
SMTP_PORT: process.env.SMTP_PORT,
|
||||
SMTP_SECURE: process.env.SMTP_SECURE,
|
||||
SMTP_USER: process.env.SMTP_USER,
|
||||
SMTP_PASS: process.env.SMTP_PASS,
|
||||
SMTP_FROM_EMAIL: process.env.SMTP_FROM_EMAIL,
|
||||
WORKER_LOCK_DURATION: '120000',
|
||||
...sharedEnv,
|
||||
},
|
||||
},
|
||||
{
|
||||
@@ -108,6 +89,8 @@ module.exports = {
|
||||
script: './node_modules/.bin/tsx',
|
||||
args: 'src/services/worker.ts',
|
||||
max_memory_restart: '1G',
|
||||
kill_timeout: 10000, // Workers may need more time to complete a job
|
||||
log_date_format: 'YYYY-MM-DD HH:mm:ss Z',
|
||||
|
||||
// Restart Logic
|
||||
max_restarts: 40,
|
||||
@@ -119,44 +102,14 @@ module.exports = {
|
||||
NODE_ENV: 'production',
|
||||
name: 'flyer-crawler-worker',
|
||||
cwd: '/var/www/flyer-crawler.projectium.com',
|
||||
DB_HOST: process.env.DB_HOST,
|
||||
DB_USER: process.env.DB_USER,
|
||||
DB_PASSWORD: process.env.DB_PASSWORD,
|
||||
DB_NAME: process.env.DB_NAME,
|
||||
REDIS_URL: process.env.REDIS_URL,
|
||||
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
|
||||
FRONTEND_URL: process.env.FRONTEND_URL,
|
||||
JWT_SECRET: process.env.JWT_SECRET,
|
||||
GEMINI_API_KEY: process.env.GEMINI_API_KEY,
|
||||
GOOGLE_MAPS_API_KEY: process.env.GOOGLE_MAPS_API_KEY,
|
||||
SMTP_HOST: process.env.SMTP_HOST,
|
||||
SMTP_PORT: process.env.SMTP_PORT,
|
||||
SMTP_SECURE: process.env.SMTP_SECURE,
|
||||
SMTP_USER: process.env.SMTP_USER,
|
||||
SMTP_PASS: process.env.SMTP_PASS,
|
||||
SMTP_FROM_EMAIL: process.env.SMTP_FROM_EMAIL,
|
||||
...sharedEnv,
|
||||
},
|
||||
// Test Environment Settings
|
||||
env_test: {
|
||||
NODE_ENV: 'test',
|
||||
name: 'flyer-crawler-worker-test',
|
||||
cwd: '/var/www/flyer-crawler-test.projectium.com',
|
||||
DB_HOST: process.env.DB_HOST,
|
||||
DB_USER: process.env.DB_USER,
|
||||
DB_PASSWORD: process.env.DB_PASSWORD,
|
||||
DB_NAME: process.env.DB_NAME,
|
||||
REDIS_URL: process.env.REDIS_URL,
|
||||
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
|
||||
FRONTEND_URL: process.env.FRONTEND_URL,
|
||||
JWT_SECRET: process.env.JWT_SECRET,
|
||||
GEMINI_API_KEY: process.env.GEMINI_API_KEY,
|
||||
GOOGLE_MAPS_API_KEY: process.env.GOOGLE_MAPS_API_KEY,
|
||||
SMTP_HOST: process.env.SMTP_HOST,
|
||||
SMTP_PORT: process.env.SMTP_PORT,
|
||||
SMTP_SECURE: process.env.SMTP_SECURE,
|
||||
SMTP_USER: process.env.SMTP_USER,
|
||||
SMTP_PASS: process.env.SMTP_PASS,
|
||||
SMTP_FROM_EMAIL: process.env.SMTP_FROM_EMAIL,
|
||||
...sharedEnv,
|
||||
},
|
||||
// Development Environment Settings
|
||||
env_development: {
|
||||
@@ -164,22 +117,7 @@ module.exports = {
|
||||
name: 'flyer-crawler-worker-dev',
|
||||
watch: true,
|
||||
ignore_watch: ['node_modules', 'logs', '*.log', 'flyer-images', '.git'],
|
||||
DB_HOST: process.env.DB_HOST,
|
||||
DB_USER: process.env.DB_USER,
|
||||
DB_PASSWORD: process.env.DB_PASSWORD,
|
||||
DB_NAME: process.env.DB_NAME,
|
||||
REDIS_URL: process.env.REDIS_URL,
|
||||
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
|
||||
FRONTEND_URL: process.env.FRONTEND_URL,
|
||||
JWT_SECRET: process.env.JWT_SECRET,
|
||||
GEMINI_API_KEY: process.env.GEMINI_API_KEY,
|
||||
GOOGLE_MAPS_API_KEY: process.env.GOOGLE_MAPS_API_KEY,
|
||||
SMTP_HOST: process.env.SMTP_HOST,
|
||||
SMTP_PORT: process.env.SMTP_PORT,
|
||||
SMTP_SECURE: process.env.SMTP_SECURE,
|
||||
SMTP_USER: process.env.SMTP_USER,
|
||||
SMTP_PASS: process.env.SMTP_PASS,
|
||||
SMTP_FROM_EMAIL: process.env.SMTP_FROM_EMAIL,
|
||||
...sharedEnv,
|
||||
},
|
||||
},
|
||||
{
|
||||
@@ -188,6 +126,8 @@ module.exports = {
|
||||
script: './node_modules/.bin/tsx',
|
||||
args: 'src/services/worker.ts',
|
||||
max_memory_restart: '1G',
|
||||
kill_timeout: 10000,
|
||||
log_date_format: 'YYYY-MM-DD HH:mm:ss Z',
|
||||
|
||||
// Restart Logic
|
||||
max_restarts: 40,
|
||||
@@ -199,44 +139,14 @@ module.exports = {
|
||||
NODE_ENV: 'production',
|
||||
name: 'flyer-crawler-analytics-worker',
|
||||
cwd: '/var/www/flyer-crawler.projectium.com',
|
||||
DB_HOST: process.env.DB_HOST,
|
||||
DB_USER: process.env.DB_USER,
|
||||
DB_PASSWORD: process.env.DB_PASSWORD,
|
||||
DB_NAME: process.env.DB_NAME,
|
||||
REDIS_URL: process.env.REDIS_URL,
|
||||
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
|
||||
FRONTEND_URL: process.env.FRONTEND_URL,
|
||||
JWT_SECRET: process.env.JWT_SECRET,
|
||||
GEMINI_API_KEY: process.env.GEMINI_API_KEY,
|
||||
GOOGLE_MAPS_API_KEY: process.env.GOOGLE_MAPS_API_KEY,
|
||||
SMTP_HOST: process.env.SMTP_HOST,
|
||||
SMTP_PORT: process.env.SMTP_PORT,
|
||||
SMTP_SECURE: process.env.SMTP_SECURE,
|
||||
SMTP_USER: process.env.SMTP_USER,
|
||||
SMTP_PASS: process.env.SMTP_PASS,
|
||||
SMTP_FROM_EMAIL: process.env.SMTP_FROM_EMAIL,
|
||||
...sharedEnv,
|
||||
},
|
||||
// Test Environment Settings
|
||||
env_test: {
|
||||
NODE_ENV: 'test',
|
||||
name: 'flyer-crawler-analytics-worker-test',
|
||||
cwd: '/var/www/flyer-crawler-test.projectium.com',
|
||||
DB_HOST: process.env.DB_HOST,
|
||||
DB_USER: process.env.DB_USER,
|
||||
DB_PASSWORD: process.env.DB_PASSWORD,
|
||||
DB_NAME: process.env.DB_NAME,
|
||||
REDIS_URL: process.env.REDIS_URL,
|
||||
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
|
||||
FRONTEND_URL: process.env.FRONTEND_URL,
|
||||
JWT_SECRET: process.env.JWT_SECRET,
|
||||
GEMINI_API_KEY: process.env.GEMINI_API_KEY,
|
||||
GOOGLE_MAPS_API_KEY: process.env.GOOGLE_MAPS_API_KEY,
|
||||
SMTP_HOST: process.env.SMTP_HOST,
|
||||
SMTP_PORT: process.env.SMTP_PORT,
|
||||
SMTP_SECURE: process.env.SMTP_SECURE,
|
||||
SMTP_USER: process.env.SMTP_USER,
|
||||
SMTP_PASS: process.env.SMTP_PASS,
|
||||
SMTP_FROM_EMAIL: process.env.SMTP_FROM_EMAIL,
|
||||
...sharedEnv,
|
||||
},
|
||||
// Development Environment Settings
|
||||
env_development: {
|
||||
@@ -244,22 +154,7 @@ module.exports = {
|
||||
name: 'flyer-crawler-analytics-worker-dev',
|
||||
watch: true,
|
||||
ignore_watch: ['node_modules', 'logs', '*.log', 'flyer-images', '.git'],
|
||||
DB_HOST: process.env.DB_HOST,
|
||||
DB_USER: process.env.DB_USER,
|
||||
DB_PASSWORD: process.env.DB_PASSWORD,
|
||||
DB_NAME: process.env.DB_NAME,
|
||||
REDIS_URL: process.env.REDIS_URL,
|
||||
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
|
||||
FRONTEND_URL: process.env.FRONTEND_URL,
|
||||
JWT_SECRET: process.env.JWT_SECRET,
|
||||
GEMINI_API_KEY: process.env.GEMINI_API_KEY,
|
||||
GOOGLE_MAPS_API_KEY: process.env.GOOGLE_MAPS_API_KEY,
|
||||
SMTP_HOST: process.env.SMTP_HOST,
|
||||
SMTP_PORT: process.env.SMTP_PORT,
|
||||
SMTP_SECURE: process.env.SMTP_SECURE,
|
||||
SMTP_USER: process.env.SMTP_USER,
|
||||
SMTP_PASS: process.env.SMTP_PASS,
|
||||
SMTP_FROM_EMAIL: process.env.SMTP_FROM_EMAIL,
|
||||
...sharedEnv,
|
||||
},
|
||||
},
|
||||
],
|
||||
|
||||
4
package-lock.json
generated
4
package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "flyer-crawler",
|
||||
"version": "0.9.22",
|
||||
"version": "0.9.28",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "flyer-crawler",
|
||||
"version": "0.9.22",
|
||||
"version": "0.9.28",
|
||||
"dependencies": {
|
||||
"@bull-board/api": "^6.14.2",
|
||||
"@bull-board/express": "^6.14.2",
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "flyer-crawler",
|
||||
"private": true,
|
||||
"version": "0.9.22",
|
||||
"version": "0.9.28",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "concurrently \"npm:start:dev\" \"vite\"",
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
// src/features/flyer/FlyerDisplay.tsx
|
||||
import React from 'react';
|
||||
import { ScanIcon } from '../../components/icons/ScanIcon';
|
||||
import { formatDateRange } from '../../utils/dateUtils';
|
||||
import type { Store } from '../../types';
|
||||
import { formatDateRange } from './dateUtils';
|
||||
import { ScanIcon } from '../../components/icons/ScanIcon';
|
||||
|
||||
export interface FlyerDisplayProps {
|
||||
imageUrl: string | null;
|
||||
|
||||
@@ -3,7 +3,7 @@ import React from 'react';
|
||||
import { render, screen, fireEvent, waitFor } from '@testing-library/react';
|
||||
import { describe, it, expect, vi, beforeEach, afterEach, type Mocked } from 'vitest';
|
||||
import { FlyerList } from './FlyerList';
|
||||
import { formatShortDate } from './dateUtils';
|
||||
import { formatShortDate } from '../../utils/dateUtils';
|
||||
import type { Flyer, UserProfile } from '../../types';
|
||||
import { createMockUserProfile } from '../../tests/utils/mockFactories';
|
||||
import { createMockFlyer } from '../../tests/utils/mockFactories';
|
||||
|
||||
@@ -7,7 +7,7 @@ import { parseISO, format, isValid } from 'date-fns';
|
||||
import { MapPinIcon, Trash2Icon } from 'lucide-react';
|
||||
import { logger } from '../../services/logger.client';
|
||||
import * as apiClient from '../../services/apiClient';
|
||||
import { calculateDaysBetween, formatDateRange } from './dateUtils';
|
||||
import { calculateDaysBetween, formatDateRange, getCurrentDateISOString } from '../../utils/dateUtils';
|
||||
|
||||
interface FlyerListProps {
|
||||
flyers: Flyer[];
|
||||
@@ -54,7 +54,7 @@ export const FlyerList: React.FC<FlyerListProps> = ({
|
||||
verbose: true,
|
||||
});
|
||||
|
||||
const daysLeft = calculateDaysBetween(format(new Date(), 'yyyy-MM-dd'), flyer.valid_to);
|
||||
const daysLeft = calculateDaysBetween(getCurrentDateISOString(), flyer.valid_to);
|
||||
let daysLeftText = '';
|
||||
let daysLeftColor = '';
|
||||
|
||||
|
||||
@@ -1,130 +0,0 @@
|
||||
// src/features/flyer/dateUtils.test.ts
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { formatShortDate, calculateDaysBetween, formatDateRange } from './dateUtils';
|
||||
|
||||
describe('formatShortDate', () => {
|
||||
it('should format a valid YYYY-MM-DD date string correctly', () => {
|
||||
expect(formatShortDate('2024-07-26')).toBe('Jul 26');
|
||||
});
|
||||
|
||||
it('should handle single-digit days correctly', () => {
|
||||
expect(formatShortDate('2025-01-05')).toBe('Jan 5');
|
||||
});
|
||||
|
||||
it('should handle dates at the end of the year', () => {
|
||||
expect(formatShortDate('2023-12-31')).toBe('Dec 31');
|
||||
});
|
||||
|
||||
it('should return null for a null input', () => {
|
||||
expect(formatShortDate(null)).toBeNull();
|
||||
});
|
||||
|
||||
it('should return null for an undefined input', () => {
|
||||
expect(formatShortDate(undefined)).toBeNull();
|
||||
});
|
||||
|
||||
it('should return null for an empty string input', () => {
|
||||
expect(formatShortDate('')).toBeNull();
|
||||
});
|
||||
|
||||
it('should return null for an invalid date string', () => {
|
||||
expect(formatShortDate('not-a-real-date')).toBeNull();
|
||||
});
|
||||
|
||||
it('should return null for a malformed date string', () => {
|
||||
expect(formatShortDate('2024-13-01')).toBeNull(); // Invalid month
|
||||
});
|
||||
|
||||
it('should correctly format a full ISO string with time and timezone', () => {
|
||||
expect(formatShortDate('2024-12-25T10:00:00Z')).toBe('Dec 25');
|
||||
});
|
||||
});
|
||||
|
||||
describe('calculateDaysBetween', () => {
|
||||
it('should calculate the difference in days between two valid date strings', () => {
|
||||
expect(calculateDaysBetween('2023-01-01', '2023-01-05')).toBe(4);
|
||||
});
|
||||
|
||||
it('should return a negative number if the end date is before the start date', () => {
|
||||
expect(calculateDaysBetween('2023-01-05', '2023-01-01')).toBe(-4);
|
||||
});
|
||||
|
||||
it('should handle Date objects', () => {
|
||||
const start = new Date('2023-01-01');
|
||||
const end = new Date('2023-01-10');
|
||||
expect(calculateDaysBetween(start, end)).toBe(9);
|
||||
});
|
||||
|
||||
it('should return null if either date is null or undefined', () => {
|
||||
expect(calculateDaysBetween(null, '2023-01-01')).toBeNull();
|
||||
expect(calculateDaysBetween('2023-01-01', undefined)).toBeNull();
|
||||
});
|
||||
|
||||
it('should return null if either date is invalid', () => {
|
||||
expect(calculateDaysBetween('invalid', '2023-01-01')).toBeNull();
|
||||
expect(calculateDaysBetween('2023-01-01', 'invalid')).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe('formatDateRange', () => {
|
||||
it('should format a range with two different valid dates', () => {
|
||||
expect(formatDateRange('2023-01-01', '2023-01-05')).toBe('Jan 1 - Jan 5');
|
||||
});
|
||||
|
||||
it('should format a range with the same start and end date as a single date', () => {
|
||||
expect(formatDateRange('2023-01-01', '2023-01-01')).toBe('Jan 1');
|
||||
});
|
||||
|
||||
it('should return only the start date if end date is missing', () => {
|
||||
expect(formatDateRange('2023-01-01', null)).toBe('Jan 1');
|
||||
expect(formatDateRange('2023-01-01', undefined)).toBe('Jan 1');
|
||||
});
|
||||
|
||||
it('should return only the end date if start date is missing', () => {
|
||||
expect(formatDateRange(null, '2023-01-05')).toBe('Jan 5');
|
||||
expect(formatDateRange(undefined, '2023-01-05')).toBe('Jan 5');
|
||||
});
|
||||
|
||||
it('should return null if both dates are missing or invalid', () => {
|
||||
expect(formatDateRange(null, null)).toBeNull();
|
||||
expect(formatDateRange(undefined, undefined)).toBeNull();
|
||||
expect(formatDateRange('invalid', 'invalid')).toBeNull();
|
||||
});
|
||||
|
||||
it('should handle one valid and one invalid date by showing only the valid one', () => {
|
||||
expect(formatDateRange('2023-01-01', 'invalid')).toBe('Jan 1');
|
||||
expect(formatDateRange('invalid', '2023-01-05')).toBe('Jan 5');
|
||||
});
|
||||
|
||||
describe('verbose mode', () => {
|
||||
it('should format a range with two different valid dates verbosely', () => {
|
||||
expect(formatDateRange('2023-01-01', '2023-01-05', { verbose: true })).toBe(
|
||||
'Deals valid from January 1, 2023 to January 5, 2023',
|
||||
);
|
||||
});
|
||||
|
||||
it('should format a range with the same start and end date verbosely', () => {
|
||||
expect(formatDateRange('2023-01-01', '2023-01-01', { verbose: true })).toBe(
|
||||
'Valid on January 1, 2023',
|
||||
);
|
||||
});
|
||||
|
||||
it('should format only the start date verbosely', () => {
|
||||
expect(formatDateRange('2023-01-01', null, { verbose: true })).toBe(
|
||||
'Deals start January 1, 2023',
|
||||
);
|
||||
});
|
||||
|
||||
it('should format only the end date verbosely', () => {
|
||||
expect(formatDateRange(null, '2023-01-05', { verbose: true })).toBe(
|
||||
'Deals end January 5, 2023',
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle one valid and one invalid date verbosely', () => {
|
||||
expect(formatDateRange('2023-01-01', 'invalid', { verbose: true })).toBe(
|
||||
'Deals start January 1, 2023',
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,65 +0,0 @@
|
||||
// src/features/flyer/dateUtils.ts
|
||||
import { parseISO, format, isValid, differenceInDays } from 'date-fns';
|
||||
|
||||
export const formatShortDate = (dateString: string | null | undefined): string | null => {
|
||||
if (!dateString) return null;
|
||||
// Using `parseISO` from date-fns is more reliable than `new Date()` for YYYY-MM-DD strings.
|
||||
// It correctly interprets the string as a local date, avoiding timezone-related "off-by-one" errors.
|
||||
const date = parseISO(dateString);
|
||||
if (isValid(date)) {
|
||||
return format(date, 'MMM d');
|
||||
}
|
||||
return null;
|
||||
};
|
||||
|
||||
export const calculateDaysBetween = (
|
||||
startDate: string | Date | null | undefined,
|
||||
endDate: string | Date | null | undefined,
|
||||
): number | null => {
|
||||
if (!startDate || !endDate) return null;
|
||||
|
||||
const start = typeof startDate === 'string' ? parseISO(startDate) : startDate;
|
||||
const end = typeof endDate === 'string' ? parseISO(endDate) : endDate;
|
||||
|
||||
if (!isValid(start) || !isValid(end)) return null;
|
||||
|
||||
return differenceInDays(end, start);
|
||||
};
|
||||
|
||||
interface DateRangeOptions {
|
||||
verbose?: boolean;
|
||||
}
|
||||
|
||||
export const formatDateRange = (
|
||||
startDate: string | null | undefined,
|
||||
endDate: string | null | undefined,
|
||||
options?: DateRangeOptions,
|
||||
): string | null => {
|
||||
if (!options?.verbose) {
|
||||
const start = formatShortDate(startDate);
|
||||
const end = formatShortDate(endDate);
|
||||
|
||||
if (start && end) {
|
||||
return start === end ? start : `${start} - ${end}`;
|
||||
}
|
||||
return start || end || null;
|
||||
}
|
||||
|
||||
// Verbose format logic
|
||||
const dateFormat = 'MMMM d, yyyy';
|
||||
const formatFn = (dateStr: string | null | undefined) => {
|
||||
if (!dateStr) return null;
|
||||
const date = parseISO(dateStr);
|
||||
return isValid(date) ? format(date, dateFormat) : null;
|
||||
};
|
||||
|
||||
const start = formatFn(startDate);
|
||||
const end = formatFn(endDate);
|
||||
|
||||
if (start && end) {
|
||||
return start === end ? `Valid on ${start}` : `Deals valid from ${start} to ${end}`;
|
||||
}
|
||||
if (start) return `Deals start ${start}`;
|
||||
if (end) return `Deals end ${end}`;
|
||||
return null;
|
||||
};
|
||||
51
src/hooks/useUserProfileData.ts
Normal file
51
src/hooks/useUserProfileData.ts
Normal file
@@ -0,0 +1,51 @@
|
||||
// src/hooks/useUserProfileData.ts
|
||||
import { useState, useEffect } from 'react';
|
||||
import * as apiClient from '../services/apiClient';
|
||||
import { UserProfile, Achievement, UserAchievement } from '../types';
|
||||
import { logger } from '../services/logger.client';
|
||||
|
||||
export const useUserProfileData = () => {
|
||||
const [profile, setProfile] = useState<UserProfile | null>(null);
|
||||
const [achievements, setAchievements] = useState<(UserAchievement & Achievement)[]>([]);
|
||||
const [isLoading, setIsLoading] = useState(true);
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
|
||||
useEffect(() => {
|
||||
const fetchData = async () => {
|
||||
setIsLoading(true);
|
||||
try {
|
||||
const [profileRes, achievementsRes] = await Promise.all([
|
||||
apiClient.getAuthenticatedUserProfile(),
|
||||
apiClient.getUserAchievements(),
|
||||
]);
|
||||
|
||||
if (!profileRes.ok) throw new Error('Failed to fetch user profile.');
|
||||
if (!achievementsRes.ok) throw new Error('Failed to fetch user achievements.');
|
||||
|
||||
const profileData: UserProfile | null = await profileRes.json();
|
||||
const achievementsData: (UserAchievement & Achievement)[] | null =
|
||||
await achievementsRes.json();
|
||||
|
||||
logger.info(
|
||||
{ profileData, achievementsCount: achievementsData?.length },
|
||||
'useUserProfileData: Fetched data',
|
||||
);
|
||||
|
||||
if (profileData) {
|
||||
setProfile(profileData);
|
||||
}
|
||||
setAchievements(achievementsData || []);
|
||||
} catch (err) {
|
||||
const errorMessage = err instanceof Error ? err.message : 'An unknown error occurred.';
|
||||
setError(errorMessage);
|
||||
logger.error({ err }, 'Error in useUserProfileData:');
|
||||
} finally {
|
||||
setIsLoading(false);
|
||||
}
|
||||
};
|
||||
|
||||
fetchData();
|
||||
}, []);
|
||||
|
||||
return { profile, setProfile, achievements, isLoading, error };
|
||||
};
|
||||
@@ -109,6 +109,33 @@ describe('ResetPasswordPage', () => {
|
||||
);
|
||||
});
|
||||
|
||||
it('should show an error message if API returns a non-JSON error response', async () => {
|
||||
// Simulate a server error returning HTML instead of JSON
|
||||
mockedApiClient.resetPassword.mockResolvedValue(
|
||||
new Response('<h1>Server Error</h1>', {
|
||||
status: 500,
|
||||
headers: { 'Content-Type': 'text/html' },
|
||||
}),
|
||||
);
|
||||
renderWithRouter('test-token');
|
||||
|
||||
fireEvent.change(screen.getByPlaceholderText('New Password'), {
|
||||
target: { value: 'newSecurePassword123' },
|
||||
});
|
||||
fireEvent.change(screen.getByPlaceholderText('Confirm New Password'), {
|
||||
target: { value: 'newSecurePassword123' },
|
||||
});
|
||||
fireEvent.click(screen.getByRole('button', { name: /reset password/i }));
|
||||
|
||||
await waitFor(() => {
|
||||
// The error from response.json() is implementation-dependent.
|
||||
// We check for a substring that is likely to be present.
|
||||
expect(screen.getByText(/not valid JSON/i)).toBeInTheDocument();
|
||||
});
|
||||
|
||||
expect(logger.error).toHaveBeenCalledWith({ err: expect.any(SyntaxError) }, 'Failed to reset password.');
|
||||
});
|
||||
|
||||
it('should show a loading spinner while submitting', async () => {
|
||||
let resolvePromise: (value: Response) => void;
|
||||
const mockPromise = new Promise<Response>((resolve) => {
|
||||
|
||||
@@ -123,6 +123,24 @@ describe('UserProfilePage', () => {
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle null achievements data gracefully on fetch', async () => {
|
||||
mockedApiClient.getAuthenticatedUserProfile.mockResolvedValue(
|
||||
new Response(JSON.stringify(mockProfile)),
|
||||
);
|
||||
// Mock a successful response but with a null body for achievements
|
||||
mockedApiClient.getUserAchievements.mockResolvedValue(new Response(JSON.stringify(null)));
|
||||
render(<UserProfilePage />);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByRole('heading', { name: 'Test User' })).toBeInTheDocument();
|
||||
// The mock achievements list should show 0 achievements because the component
|
||||
// should handle the null response and pass an empty array to the list.
|
||||
expect(screen.getByTestId('achievements-list-mock')).toHaveTextContent(
|
||||
'Achievements Count: 0',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
it('should render the profile and achievements on successful fetch', async () => {
|
||||
mockedApiClient.getAuthenticatedUserProfile.mockResolvedValue(
|
||||
new Response(JSON.stringify(mockProfile)),
|
||||
@@ -294,6 +312,24 @@ describe('UserProfilePage', () => {
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle non-ok response with null body when saving name', async () => {
|
||||
// This tests the case where the server returns an error status but an empty/null body.
|
||||
mockedApiClient.updateUserProfile.mockResolvedValue(new Response(null, { status: 500 }));
|
||||
render(<UserProfilePage />);
|
||||
await screen.findByText('Test User');
|
||||
|
||||
fireEvent.click(screen.getByRole('button', { name: /edit/i }));
|
||||
fireEvent.change(screen.getByRole('textbox'), { target: { value: 'New Name' } });
|
||||
fireEvent.click(screen.getByRole('button', { name: /save/i }));
|
||||
|
||||
await waitFor(() => {
|
||||
// The component should fall back to the default error message.
|
||||
expect(mockedNotificationService.notifyError).toHaveBeenCalledWith(
|
||||
'Failed to update name.',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle unknown errors when saving name', async () => {
|
||||
mockedApiClient.updateUserProfile.mockRejectedValue('Unknown update error');
|
||||
render(<UserProfilePage />);
|
||||
@@ -420,6 +456,22 @@ describe('UserProfilePage', () => {
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle non-ok response with null body when uploading avatar', async () => {
|
||||
mockedApiClient.uploadAvatar.mockResolvedValue(new Response(null, { status: 500 }));
|
||||
render(<UserProfilePage />);
|
||||
await screen.findByAltText('User Avatar');
|
||||
|
||||
const fileInput = screen.getByTestId('avatar-file-input');
|
||||
const file = new File(['(⌐□_□)'], 'chucknorris.png', { type: 'image/png' });
|
||||
fireEvent.change(fileInput, { target: { files: [file] } });
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockedNotificationService.notifyError).toHaveBeenCalledWith(
|
||||
'Failed to upload avatar.',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle unknown errors when uploading avatar', async () => {
|
||||
mockedApiClient.uploadAvatar.mockRejectedValue('Unknown upload error');
|
||||
render(<UserProfilePage />);
|
||||
|
||||
@@ -1,15 +1,13 @@
|
||||
import React, { useState, useEffect, useRef } from 'react';
|
||||
import * as apiClient from '../services/apiClient';
|
||||
import { UserProfile, Achievement, UserAchievement } from '../types';
|
||||
import type { UserProfile } from '../types';
|
||||
import { logger } from '../services/logger.client';
|
||||
import { notifySuccess, notifyError } from '../services/notificationService';
|
||||
import { AchievementsList } from '../components/AchievementsList';
|
||||
import { useUserProfileData } from '../hooks/useUserProfileData';
|
||||
|
||||
const UserProfilePage: React.FC = () => {
|
||||
const [profile, setProfile] = useState<UserProfile | null>(null);
|
||||
const [achievements, setAchievements] = useState<(UserAchievement & Achievement)[]>([]);
|
||||
const [isLoading, setIsLoading] = useState(true);
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
const { profile, setProfile, achievements, isLoading, error } = useUserProfileData();
|
||||
const [isEditingName, setIsEditingName] = useState(false);
|
||||
const [editingName, setEditingName] = useState('');
|
||||
const [isUploading, setIsUploading] = useState(false);
|
||||
@@ -17,43 +15,10 @@ const UserProfilePage: React.FC = () => {
|
||||
const fileInputRef = useRef<HTMLInputElement>(null);
|
||||
|
||||
useEffect(() => {
|
||||
const fetchData = async () => {
|
||||
setIsLoading(true);
|
||||
try {
|
||||
// Fetch profile and achievements data in parallel
|
||||
const [profileRes, achievementsRes] = await Promise.all([
|
||||
apiClient.getAuthenticatedUserProfile(),
|
||||
apiClient.getUserAchievements(),
|
||||
]);
|
||||
|
||||
if (!profileRes.ok) throw new Error('Failed to fetch user profile.');
|
||||
if (!achievementsRes.ok) throw new Error('Failed to fetch user achievements.');
|
||||
|
||||
const profileData: UserProfile = await profileRes.json();
|
||||
const achievementsData: (UserAchievement & Achievement)[] = await achievementsRes.json();
|
||||
|
||||
logger.info(
|
||||
{ profileData, achievementsCount: achievementsData?.length },
|
||||
'UserProfilePage: Fetched data',
|
||||
);
|
||||
|
||||
setProfile(profileData);
|
||||
|
||||
if (profileData) {
|
||||
setEditingName(profileData.full_name || '');
|
||||
}
|
||||
setAchievements(achievementsData);
|
||||
} catch (err) {
|
||||
const errorMessage = err instanceof Error ? err.message : 'An unknown error occurred.';
|
||||
setError(errorMessage);
|
||||
logger.error({ err }, 'Error fetching user profile data:');
|
||||
} finally {
|
||||
setIsLoading(false);
|
||||
}
|
||||
};
|
||||
|
||||
fetchData();
|
||||
}, []); // Empty dependency array means this runs once on component mount
|
||||
if (profile) {
|
||||
setEditingName(profile.full_name || '');
|
||||
}
|
||||
}, [profile]);
|
||||
|
||||
const handleSaveName = async () => {
|
||||
if (!profile) return;
|
||||
@@ -61,8 +26,8 @@ const UserProfilePage: React.FC = () => {
|
||||
try {
|
||||
const response = await apiClient.updateUserProfile({ full_name: editingName });
|
||||
if (!response.ok) {
|
||||
const errorData = await response.json();
|
||||
throw new Error(errorData.message || 'Failed to update name.');
|
||||
const errorData = await response.json().catch(() => null); // Gracefully handle non-JSON responses
|
||||
throw new Error(errorData?.message || 'Failed to update name.');
|
||||
}
|
||||
const updatedProfile = await response.json();
|
||||
setProfile((prevProfile) => (prevProfile ? { ...prevProfile, ...updatedProfile } : null));
|
||||
@@ -88,8 +53,8 @@ const UserProfilePage: React.FC = () => {
|
||||
try {
|
||||
const response = await apiClient.uploadAvatar(file);
|
||||
if (!response.ok) {
|
||||
const errorData = await response.json();
|
||||
throw new Error(errorData.message || 'Failed to upload avatar.');
|
||||
const errorData = await response.json().catch(() => null); // Gracefully handle non-JSON responses
|
||||
throw new Error(errorData?.message || 'Failed to upload avatar.');
|
||||
}
|
||||
const updatedProfile = await response.json();
|
||||
setProfile((prevProfile) => (prevProfile ? { ...prevProfile, ...updatedProfile } : null));
|
||||
|
||||
@@ -264,6 +264,7 @@ describe('ProfileManager', () => {
|
||||
});
|
||||
|
||||
it('should show an error if trying to save profile when not logged in', async () => {
|
||||
const loggerSpy = vi.spyOn(logger.logger, 'warn');
|
||||
// This is an edge case, but good to test the safeguard
|
||||
render(<ProfileManager {...defaultAuthenticatedProps} userProfile={null} />);
|
||||
fireEvent.change(screen.getByLabelText(/full name/i), { target: { value: 'Updated Name' } });
|
||||
@@ -271,6 +272,7 @@ describe('ProfileManager', () => {
|
||||
|
||||
await waitFor(() => {
|
||||
expect(notifyError).toHaveBeenCalledWith('Cannot save profile, no user is logged in.');
|
||||
expect(loggerSpy).toHaveBeenCalledWith('[handleProfileSave] Aborted: No user is logged in.');
|
||||
});
|
||||
expect(mockedApiClient.updateUserProfile).not.toHaveBeenCalled();
|
||||
});
|
||||
@@ -496,6 +498,23 @@ describe('ProfileManager', () => {
|
||||
});
|
||||
});
|
||||
|
||||
it('should show an error when trying to link a GitHub account', async () => {
|
||||
render(<ProfileManager {...defaultAuthenticatedProps} />);
|
||||
fireEvent.click(screen.getByRole('button', { name: /security/i }));
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByRole('button', { name: /link github account/i })).toBeInTheDocument();
|
||||
});
|
||||
|
||||
fireEvent.click(screen.getByRole('button', { name: /link github account/i }));
|
||||
|
||||
await waitFor(() => {
|
||||
expect(notifyError).toHaveBeenCalledWith(
|
||||
'Account linking with github is not yet implemented.',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
it('should switch between all tabs correctly', async () => {
|
||||
render(<ProfileManager {...defaultAuthenticatedProps} />);
|
||||
|
||||
@@ -804,6 +823,63 @@ describe('ProfileManager', () => {
|
||||
});
|
||||
});
|
||||
|
||||
it('should allow changing unit system when preferences are initially null', async () => {
|
||||
const profileWithoutPrefs = { ...authenticatedProfile, preferences: null as any };
|
||||
const { rerender } = render(
|
||||
<ProfileManager {...defaultAuthenticatedProps} userProfile={profileWithoutPrefs} />,
|
||||
);
|
||||
|
||||
fireEvent.click(screen.getByRole('button', { name: /preferences/i }));
|
||||
|
||||
const imperialRadio = await screen.findByLabelText(/imperial/i);
|
||||
const metricRadio = screen.getByLabelText(/metric/i);
|
||||
|
||||
// With null preferences, neither should be checked.
|
||||
expect(imperialRadio).not.toBeChecked();
|
||||
expect(metricRadio).not.toBeChecked();
|
||||
|
||||
// Mock the API response for the update
|
||||
const updatedProfileWithPrefs = {
|
||||
...profileWithoutPrefs,
|
||||
preferences: { darkMode: false, unitSystem: 'metric' as const },
|
||||
};
|
||||
mockedApiClient.updateUserPreferences.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve(updatedProfileWithPrefs),
|
||||
} as Response);
|
||||
|
||||
fireEvent.click(metricRadio);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockedApiClient.updateUserPreferences).toHaveBeenCalledWith(
|
||||
{ unitSystem: 'metric' },
|
||||
expect.anything(),
|
||||
);
|
||||
expect(mockOnProfileUpdate).toHaveBeenCalledWith(updatedProfileWithPrefs);
|
||||
});
|
||||
|
||||
// Rerender with the new profile to check the UI update
|
||||
rerender(
|
||||
<ProfileManager {...defaultAuthenticatedProps} userProfile={updatedProfileWithPrefs} />,
|
||||
);
|
||||
|
||||
fireEvent.click(screen.getByRole('button', { name: /preferences/i }));
|
||||
expect(await screen.findByLabelText(/metric/i)).toBeChecked();
|
||||
expect(screen.getByLabelText(/imperial/i)).not.toBeChecked();
|
||||
});
|
||||
|
||||
it('should not call onProfileUpdate if updating unit system fails', async () => {
|
||||
mockedApiClient.updateUserPreferences.mockRejectedValue(new Error('API failed'));
|
||||
render(<ProfileManager {...defaultAuthenticatedProps} />);
|
||||
fireEvent.click(screen.getByRole('button', { name: /preferences/i }));
|
||||
const metricRadio = await screen.findByLabelText(/metric/i);
|
||||
fireEvent.click(metricRadio);
|
||||
await waitFor(() => {
|
||||
expect(notifyError).toHaveBeenCalledWith('API failed');
|
||||
});
|
||||
expect(mockOnProfileUpdate).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should only call updateProfile when only profile data has changed', async () => {
|
||||
render(<ProfileManager {...defaultAuthenticatedProps} />);
|
||||
await waitFor(() =>
|
||||
@@ -1004,5 +1080,19 @@ describe('ProfileManager', () => {
|
||||
expect(notifyError).toHaveBeenCalledWith('Permission denied');
|
||||
});
|
||||
});
|
||||
|
||||
it('should not trigger OAuth link if user profile is missing', async () => {
|
||||
// This is an edge case to test the guard clause in handleOAuthLink
|
||||
render(<ProfileManager {...defaultAuthenticatedProps} userProfile={null} />);
|
||||
fireEvent.click(screen.getByRole('button', { name: /security/i }));
|
||||
|
||||
const linkButton = await screen.findByRole('button', { name: /link google account/i });
|
||||
fireEvent.click(linkButton);
|
||||
|
||||
// The function should just return, so nothing should happen.
|
||||
await waitFor(() => {
|
||||
expect(notifyError).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -250,6 +250,17 @@ describe('Admin Content Management Routes (/api/admin)', () => {
|
||||
expect(response.status).toBe(404);
|
||||
expect(response.body.message).toBe('Correction with ID 999 not found');
|
||||
});
|
||||
|
||||
it('PUT /corrections/:id should return 500 on a generic DB error', async () => {
|
||||
vi.mocked(mockedDb.adminRepo.updateSuggestedCorrection).mockRejectedValue(
|
||||
new Error('Generic DB Error'),
|
||||
);
|
||||
const response = await supertest(app)
|
||||
.put('/api/admin/corrections/101')
|
||||
.send({ suggested_value: 'new value' });
|
||||
expect(response.status).toBe(500);
|
||||
expect(response.body.message).toBe('Generic DB Error');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Flyer Review Routes', () => {
|
||||
@@ -294,6 +305,13 @@ describe('Admin Content Management Routes (/api/admin)', () => {
|
||||
expect(response.body).toEqual(mockBrands);
|
||||
});
|
||||
|
||||
it('GET /brands should return 500 on DB error', async () => {
|
||||
vi.mocked(mockedDb.flyerRepo.getAllBrands).mockRejectedValue(new Error('DB Error'));
|
||||
const response = await supertest(app).get('/api/admin/brands');
|
||||
expect(response.status).toBe(500);
|
||||
expect(response.body.message).toBe('DB Error');
|
||||
});
|
||||
|
||||
it('POST /brands/:id/logo should upload a logo and update the brand', async () => {
|
||||
const brandId = 55;
|
||||
vi.mocked(mockedDb.adminRepo.updateBrandLogo).mockResolvedValue(undefined);
|
||||
@@ -500,6 +518,16 @@ describe('Admin Content Management Routes (/api/admin)', () => {
|
||||
expect(response.body.message).toBe('Flyer with ID 999 not found.');
|
||||
});
|
||||
|
||||
it('DELETE /flyers/:flyerId should return 500 on a generic DB error', async () => {
|
||||
const flyerId = 42;
|
||||
vi.mocked(mockedDb.flyerRepo.deleteFlyer).mockRejectedValue(
|
||||
new Error('Generic DB Error'),
|
||||
);
|
||||
const response = await supertest(app).delete(`/api/admin/flyers/${flyerId}`);
|
||||
expect(response.status).toBe(500);
|
||||
expect(response.body.message).toBe('Generic DB Error');
|
||||
});
|
||||
|
||||
it('DELETE /flyers/:flyerId should return 400 for an invalid flyerId', async () => {
|
||||
const response = await supertest(app).delete('/api/admin/flyers/abc');
|
||||
expect(response.status).toBe(400);
|
||||
|
||||
@@ -54,6 +54,14 @@ vi.mock('../services/workers.server', () => ({
|
||||
weeklyAnalyticsWorker: { name: 'weekly-analytics-reporting', isRunning: vi.fn() },
|
||||
}));
|
||||
|
||||
// Mock the monitoring service directly to test route error handling
|
||||
vi.mock('../services/monitoringService.server', () => ({
|
||||
monitoringService: {
|
||||
getWorkerStatuses: vi.fn(),
|
||||
getQueueStatuses: vi.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
// Mock other dependencies that are part of the adminRouter setup but not directly tested here
|
||||
vi.mock('../services/db/flyer.db');
|
||||
vi.mock('../services/db/recipe.db');
|
||||
@@ -78,11 +86,8 @@ vi.mock('@bull-board/express', () => ({
|
||||
import adminRouter from './admin.routes';
|
||||
|
||||
// Import the mocked modules to control them
|
||||
import * as queueService from '../services/queueService.server';
|
||||
import * as workerService from '../services/workers.server';
|
||||
import { monitoringService } from '../services/monitoringService.server';
|
||||
import { adminRepo } from '../services/db/index.db';
|
||||
const mockedQueueService = queueService as Mocked<typeof queueService>;
|
||||
const mockedWorkerService = workerService as Mocked<typeof workerService>;
|
||||
|
||||
// Mock the logger
|
||||
vi.mock('../services/logger.server', () => ({
|
||||
@@ -146,16 +151,26 @@ describe('Admin Monitoring Routes (/api/admin)', () => {
|
||||
expect(response.body.errors).toBeDefined();
|
||||
expect(response.body.errors.length).toBe(2); // Both limit and offset are invalid
|
||||
});
|
||||
|
||||
it('should return 500 if fetching activity log fails', async () => {
|
||||
vi.mocked(adminRepo.getActivityLog).mockRejectedValue(new Error('DB Error'));
|
||||
const response = await supertest(app).get('/api/admin/activity-log');
|
||||
expect(response.status).toBe(500);
|
||||
expect(response.body.message).toBe('DB Error');
|
||||
});
|
||||
});
|
||||
|
||||
describe('GET /workers/status', () => {
|
||||
it('should return the status of all registered workers', async () => {
|
||||
// Arrange: Set the mock status for each worker
|
||||
vi.mocked(mockedWorkerService.flyerWorker.isRunning).mockReturnValue(true);
|
||||
vi.mocked(mockedWorkerService.emailWorker.isRunning).mockReturnValue(true);
|
||||
vi.mocked(mockedWorkerService.analyticsWorker.isRunning).mockReturnValue(false); // Simulate one worker being stopped
|
||||
vi.mocked(mockedWorkerService.cleanupWorker.isRunning).mockReturnValue(true);
|
||||
vi.mocked(mockedWorkerService.weeklyAnalyticsWorker.isRunning).mockReturnValue(true);
|
||||
const mockStatuses = [
|
||||
{ name: 'flyer-processing', isRunning: true },
|
||||
{ name: 'email-sending', isRunning: true },
|
||||
{ name: 'analytics-reporting', isRunning: false },
|
||||
{ name: 'file-cleanup', isRunning: true },
|
||||
{ name: 'weekly-analytics-reporting', isRunning: true },
|
||||
];
|
||||
vi.mocked(monitoringService.getWorkerStatuses).mockResolvedValue(mockStatuses);
|
||||
|
||||
// Act
|
||||
const response = await supertest(app).get('/api/admin/workers/status');
|
||||
@@ -170,51 +185,41 @@ describe('Admin Monitoring Routes (/api/admin)', () => {
|
||||
{ name: 'weekly-analytics-reporting', isRunning: true },
|
||||
]);
|
||||
});
|
||||
|
||||
it('should return 500 if fetching worker statuses fails', async () => {
|
||||
vi.mocked(monitoringService.getWorkerStatuses).mockRejectedValue(new Error('Worker Error'));
|
||||
const response = await supertest(app).get('/api/admin/workers/status');
|
||||
expect(response.status).toBe(500);
|
||||
expect(response.body.message).toBe('Worker Error');
|
||||
});
|
||||
});
|
||||
|
||||
describe('GET /queues/status', () => {
|
||||
it('should return job counts for all registered queues', async () => {
|
||||
// Arrange: Set the mock job counts for each queue
|
||||
vi.mocked(mockedQueueService.flyerQueue.getJobCounts).mockResolvedValue({
|
||||
waiting: 5,
|
||||
active: 1,
|
||||
completed: 100,
|
||||
failed: 2,
|
||||
delayed: 0,
|
||||
paused: 0,
|
||||
});
|
||||
vi.mocked(mockedQueueService.emailQueue.getJobCounts).mockResolvedValue({
|
||||
waiting: 0,
|
||||
active: 0,
|
||||
completed: 50,
|
||||
failed: 0,
|
||||
delayed: 0,
|
||||
paused: 0,
|
||||
});
|
||||
vi.mocked(mockedQueueService.analyticsQueue.getJobCounts).mockResolvedValue({
|
||||
waiting: 0,
|
||||
active: 1,
|
||||
completed: 10,
|
||||
failed: 1,
|
||||
delayed: 0,
|
||||
paused: 0,
|
||||
});
|
||||
vi.mocked(mockedQueueService.cleanupQueue.getJobCounts).mockResolvedValue({
|
||||
waiting: 2,
|
||||
active: 0,
|
||||
completed: 25,
|
||||
failed: 0,
|
||||
delayed: 0,
|
||||
paused: 0,
|
||||
});
|
||||
vi.mocked(mockedQueueService.weeklyAnalyticsQueue.getJobCounts).mockResolvedValue({
|
||||
waiting: 1,
|
||||
active: 0,
|
||||
completed: 5,
|
||||
failed: 0,
|
||||
delayed: 0,
|
||||
paused: 0,
|
||||
});
|
||||
const mockStatuses = [
|
||||
{
|
||||
name: 'flyer-processing',
|
||||
counts: { waiting: 5, active: 1, completed: 100, failed: 2, delayed: 0, paused: 0 },
|
||||
},
|
||||
{
|
||||
name: 'email-sending',
|
||||
counts: { waiting: 0, active: 0, completed: 50, failed: 0, delayed: 0, paused: 0 },
|
||||
},
|
||||
{
|
||||
name: 'analytics-reporting',
|
||||
counts: { waiting: 0, active: 1, completed: 10, failed: 1, delayed: 0, paused: 0 },
|
||||
},
|
||||
{
|
||||
name: 'file-cleanup',
|
||||
counts: { waiting: 2, active: 0, completed: 25, failed: 0, delayed: 0, paused: 0 },
|
||||
},
|
||||
{
|
||||
name: 'weekly-analytics-reporting',
|
||||
counts: { waiting: 1, active: 0, completed: 5, failed: 0, delayed: 0, paused: 0 },
|
||||
},
|
||||
];
|
||||
vi.mocked(monitoringService.getQueueStatuses).mockResolvedValue(mockStatuses);
|
||||
|
||||
// Act
|
||||
const response = await supertest(app).get('/api/admin/queues/status');
|
||||
@@ -246,7 +251,7 @@ describe('Admin Monitoring Routes (/api/admin)', () => {
|
||||
});
|
||||
|
||||
it('should return 500 if fetching queue counts fails', async () => {
|
||||
vi.mocked(mockedQueueService.flyerQueue.getJobCounts).mockRejectedValue(
|
||||
vi.mocked(monitoringService.getQueueStatuses).mockRejectedValue(
|
||||
new Error('Redis is down'),
|
||||
);
|
||||
|
||||
|
||||
@@ -318,6 +318,76 @@ describe('AI Routes (/api/ai)', () => {
|
||||
// because URL parameters cannot easily simulate empty strings for min(1) validation checks via supertest routing.
|
||||
});
|
||||
|
||||
describe('POST /upload-legacy', () => {
|
||||
const imagePath = path.resolve(__dirname, '../tests/assets/test-flyer-image.jpg');
|
||||
const mockUser = createMockUserProfile({
|
||||
user: { user_id: 'legacy-user-1', email: 'legacy-user@test.com' },
|
||||
});
|
||||
// This route requires authentication, so we create an app instance with a user.
|
||||
const authenticatedApp = createTestApp({
|
||||
router: aiRouter,
|
||||
basePath: '/api/ai',
|
||||
authenticatedUser: mockUser,
|
||||
});
|
||||
|
||||
it('should process a legacy flyer and return 200 on success', async () => {
|
||||
// Arrange
|
||||
const mockFlyer = createMockFlyer({ flyer_id: 10 });
|
||||
vi.mocked(aiService.aiService.processLegacyFlyerUpload).mockResolvedValue(mockFlyer);
|
||||
|
||||
// Act
|
||||
const response = await supertest(authenticatedApp)
|
||||
.post('/api/ai/upload-legacy')
|
||||
.field('some_legacy_field', 'value') // simulate some body data
|
||||
.attach('flyerFile', imagePath);
|
||||
|
||||
// Assert
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body).toEqual(mockFlyer);
|
||||
expect(aiService.aiService.processLegacyFlyerUpload).toHaveBeenCalledWith(
|
||||
expect.any(Object), // req.file
|
||||
expect.any(Object), // req.body
|
||||
mockUser,
|
||||
expect.any(Object), // req.log
|
||||
);
|
||||
});
|
||||
|
||||
it('should return 400 if no flyer file is uploaded', async () => {
|
||||
const response = await supertest(authenticatedApp)
|
||||
.post('/api/ai/upload-legacy')
|
||||
.field('some_legacy_field', 'value');
|
||||
|
||||
expect(response.status).toBe(400);
|
||||
expect(response.body.message).toBe('No flyer file uploaded.');
|
||||
});
|
||||
|
||||
it('should return 409 and cleanup file if a duplicate flyer is detected', async () => {
|
||||
const duplicateError = new aiService.DuplicateFlyerError('Duplicate legacy flyer.', 101);
|
||||
vi.mocked(aiService.aiService.processLegacyFlyerUpload).mockRejectedValue(duplicateError);
|
||||
const unlinkSpy = vi.spyOn(fs.promises, 'unlink').mockResolvedValue(undefined);
|
||||
|
||||
const response = await supertest(authenticatedApp).post('/api/ai/upload-legacy').attach('flyerFile', imagePath);
|
||||
|
||||
expect(response.status).toBe(409);
|
||||
expect(response.body.message).toBe('Duplicate legacy flyer.');
|
||||
expect(response.body.flyerId).toBe(101);
|
||||
expect(unlinkSpy).toHaveBeenCalledTimes(1);
|
||||
unlinkSpy.mockRestore();
|
||||
});
|
||||
|
||||
it('should return 500 and cleanup file on a generic service error', async () => {
|
||||
vi.mocked(aiService.aiService.processLegacyFlyerUpload).mockRejectedValue(new Error('Internal service failure'));
|
||||
const unlinkSpy = vi.spyOn(fs.promises, 'unlink').mockResolvedValue(undefined);
|
||||
|
||||
const response = await supertest(authenticatedApp).post('/api/ai/upload-legacy').attach('flyerFile', imagePath);
|
||||
|
||||
expect(response.status).toBe(500);
|
||||
expect(response.body.message).toBe('Internal service failure');
|
||||
expect(unlinkSpy).toHaveBeenCalledTimes(1);
|
||||
unlinkSpy.mockRestore();
|
||||
});
|
||||
});
|
||||
|
||||
describe('POST /flyers/process (Legacy)', () => {
|
||||
const imagePath = path.resolve(__dirname, '../tests/assets/test-flyer-image.jpg');
|
||||
const mockDataPayload = {
|
||||
|
||||
@@ -197,6 +197,33 @@ describe('Auth Routes (/api/auth)', () => {
|
||||
);
|
||||
});
|
||||
|
||||
it('should allow registration with an empty string for full_name', async () => {
|
||||
// Arrange
|
||||
const email = 'empty-name@test.com';
|
||||
mockedAuthService.registerAndLoginUser.mockResolvedValue({
|
||||
newUserProfile: createMockUserProfile({ user: { email } }),
|
||||
accessToken: 'token',
|
||||
refreshToken: 'token',
|
||||
});
|
||||
|
||||
// Act
|
||||
const response = await supertest(app).post('/api/auth/register').send({
|
||||
email,
|
||||
password: strongPassword,
|
||||
full_name: '', // Send an empty string
|
||||
});
|
||||
|
||||
// Assert
|
||||
expect(response.status).toBe(201);
|
||||
expect(mockedAuthService.registerAndLoginUser).toHaveBeenCalledWith(
|
||||
email,
|
||||
strongPassword,
|
||||
undefined, // The preprocess step in the Zod schema should convert '' to undefined
|
||||
undefined,
|
||||
mockLogger,
|
||||
);
|
||||
});
|
||||
|
||||
it('should set a refresh token cookie on successful registration', async () => {
|
||||
const mockNewUser = createMockUserProfile({
|
||||
user: { user_id: 'new-user-id', email: 'cookie@test.com' },
|
||||
@@ -396,6 +423,24 @@ describe('Auth Routes (/api/auth)', () => {
|
||||
const setCookieHeader = response.headers['set-cookie'];
|
||||
expect(setCookieHeader[0]).toContain('Max-Age=2592000'); // 30 days in seconds
|
||||
});
|
||||
|
||||
it('should return 400 for an invalid email format', async () => {
|
||||
const response = await supertest(app)
|
||||
.post('/api/auth/login')
|
||||
.send({ email: 'not-an-email', password: 'password123' });
|
||||
|
||||
expect(response.status).toBe(400);
|
||||
expect(response.body.errors[0].message).toBe('A valid email is required.');
|
||||
});
|
||||
|
||||
it('should return 400 if password is missing', async () => {
|
||||
const response = await supertest(app)
|
||||
.post('/api/auth/login')
|
||||
.send({ email: 'test@test.com' });
|
||||
|
||||
expect(response.status).toBe(400);
|
||||
expect(response.body.errors[0].message).toBe('Password is required.');
|
||||
});
|
||||
});
|
||||
|
||||
describe('POST /forgot-password', () => {
|
||||
@@ -550,12 +595,15 @@ describe('Auth Routes (/api/auth)', () => {
|
||||
expect(setCookieHeader[0]).toContain('Max-Age=0');
|
||||
});
|
||||
|
||||
it('should still return 200 OK even if deleting the refresh token from DB fails', async () => {
|
||||
it('should still return 200 OK and log an error if deleting the refresh token from DB fails', async () => {
|
||||
// Arrange
|
||||
const dbError = new Error('DB connection lost');
|
||||
mockedAuthService.logout.mockRejectedValue(dbError);
|
||||
const { logger } = await import('../services/logger.server');
|
||||
|
||||
// Spy on logger.error to ensure it's called
|
||||
const errorSpy = vi.spyOn(logger, 'error');
|
||||
|
||||
// Act
|
||||
const response = await supertest(app)
|
||||
.post('/api/auth/logout')
|
||||
@@ -563,7 +611,12 @@ describe('Auth Routes (/api/auth)', () => {
|
||||
|
||||
// Assert
|
||||
expect(response.status).toBe(200);
|
||||
expect(logger.error).toHaveBeenCalledWith(
|
||||
|
||||
// Because authService.logout is fire-and-forget (not awaited), we need to
|
||||
// give the event loop a moment to process the rejected promise and trigger the .catch() block.
|
||||
await new Promise((resolve) => setImmediate(resolve));
|
||||
|
||||
expect(errorSpy).toHaveBeenCalledWith(
|
||||
expect.objectContaining({ error: dbError }),
|
||||
'Logout token invalidation failed in background.',
|
||||
);
|
||||
@@ -578,4 +631,82 @@ describe('Auth Routes (/api/auth)', () => {
|
||||
expect(response.headers['set-cookie'][0]).toContain('refreshToken=;');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Rate Limiting on /forgot-password', () => {
|
||||
it('should block requests after exceeding the limit when the opt-in header is sent', async () => {
|
||||
// Arrange
|
||||
const email = 'rate-limit-test@example.com';
|
||||
const maxRequests = 5; // from the rate limiter config
|
||||
mockedAuthService.resetPassword.mockResolvedValue('mock-token');
|
||||
|
||||
// Act: Make `maxRequests` successful calls with the special header
|
||||
for (let i = 0; i < maxRequests; i++) {
|
||||
const response = await supertest(app)
|
||||
.post('/api/auth/forgot-password')
|
||||
.set('X-Test-Rate-Limit-Enable', 'true') // Opt-in to the rate limiter for this test
|
||||
.send({ email });
|
||||
expect(response.status, `Request ${i + 1} should succeed`).toBe(200);
|
||||
}
|
||||
|
||||
// Act: Make one more call, which should be blocked
|
||||
const blockedResponse = await supertest(app)
|
||||
.post('/api/auth/forgot-password')
|
||||
.set('X-Test-Rate-Limit-Enable', 'true')
|
||||
.send({ email });
|
||||
|
||||
// Assert
|
||||
expect(blockedResponse.status).toBe(429);
|
||||
expect(blockedResponse.text).toContain('Too many password reset requests');
|
||||
});
|
||||
|
||||
it('should NOT block requests when the opt-in header is not sent (default test behavior)', async () => {
|
||||
// Arrange
|
||||
const email = 'no-rate-limit-test@example.com';
|
||||
const overLimitRequests = 7; // More than the max of 5
|
||||
mockedAuthService.resetPassword.mockResolvedValue('mock-token');
|
||||
|
||||
// Act: Make more calls than the limit. They should all succeed because the limiter is skipped.
|
||||
for (let i = 0; i < overLimitRequests; i++) {
|
||||
const response = await supertest(app)
|
||||
.post('/api/auth/forgot-password')
|
||||
// NO 'X-Test-Rate-Limit-Enable' header is sent
|
||||
.send({ email });
|
||||
expect(response.status, `Request ${i + 1} should succeed`).toBe(200);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('Rate Limiting on /reset-password', () => {
|
||||
it('should block requests after exceeding the limit when the opt-in header is sent', async () => {
|
||||
// Arrange
|
||||
const maxRequests = 10; // from the rate limiter config in auth.routes.ts
|
||||
const newPassword = 'a-Very-Strong-Password-123!';
|
||||
const token = 'some-token-for-rate-limit-test';
|
||||
|
||||
// Mock the service to return a consistent value for the first `maxRequests` calls.
|
||||
// The endpoint returns 400 for invalid tokens, which is fine for this test.
|
||||
// We just need to ensure it's not a 429.
|
||||
mockedAuthService.updatePassword.mockResolvedValue(null);
|
||||
|
||||
// Act: Make `maxRequests` calls. They should not be rate-limited.
|
||||
for (let i = 0; i < maxRequests; i++) {
|
||||
const response = await supertest(app)
|
||||
.post('/api/auth/reset-password')
|
||||
.set('X-Test-Rate-Limit-Enable', 'true') // Opt-in to the rate limiter
|
||||
.send({ token, newPassword });
|
||||
// The expected status is 400 because the token is invalid, but not 429.
|
||||
expect(response.status, `Request ${i + 1} should not be rate-limited`).toBe(400);
|
||||
}
|
||||
|
||||
// Act: Make one more call, which should be blocked by the rate limiter.
|
||||
const blockedResponse = await supertest(app)
|
||||
.post('/api/auth/reset-password')
|
||||
.set('X-Test-Rate-Limit-Enable', 'true')
|
||||
.send({ token, newPassword });
|
||||
|
||||
// Assert
|
||||
expect(blockedResponse.status).toBe(429);
|
||||
expect(blockedResponse.text).toContain('Too many password reset attempts');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -23,9 +23,14 @@ const forgotPasswordLimiter = rateLimit({
|
||||
message: 'Too many password reset requests from this IP, please try again after 15 minutes.',
|
||||
standardHeaders: true,
|
||||
legacyHeaders: false,
|
||||
// Do not skip in test environment so we can write integration tests for it.
|
||||
// The limiter uses an in-memory store by default, so counts are reset when the test server restarts.
|
||||
// skip: () => isTestEnv,
|
||||
// Skip in test env unless a specific header is present.
|
||||
// This allows E2E tests to run unblocked, while specific integration
|
||||
// tests for the limiter can opt-in by sending the header.
|
||||
skip: (req) => {
|
||||
if (!isTestEnv) return false; // Never skip in non-test environments.
|
||||
// In test env, skip UNLESS the opt-in header is present.
|
||||
return req.headers['x-test-rate-limit-enable'] !== 'true';
|
||||
},
|
||||
});
|
||||
|
||||
const resetPasswordLimiter = rateLimit({
|
||||
@@ -37,20 +42,24 @@ const resetPasswordLimiter = rateLimit({
|
||||
skip: () => isTestEnv, // Skip this middleware if in test environment
|
||||
});
|
||||
|
||||
// --- Reusable Schemas ---
|
||||
|
||||
const passwordSchema = z
|
||||
.string()
|
||||
.trim() // Prevent leading/trailing whitespace in passwords.
|
||||
.min(8, 'Password must be at least 8 characters long.')
|
||||
.superRefine((password, ctx) => {
|
||||
const strength = validatePasswordStrength(password);
|
||||
if (!strength.isValid) ctx.addIssue({ code: 'custom', message: strength.feedback });
|
||||
});
|
||||
|
||||
const registerSchema = z.object({
|
||||
body: z.object({
|
||||
// Sanitize email by trimming and converting to lowercase.
|
||||
email: z.string().trim().toLowerCase().email('A valid email is required.'),
|
||||
password: z
|
||||
.string()
|
||||
.trim() // Prevent leading/trailing whitespace in passwords.
|
||||
.min(8, 'Password must be at least 8 characters long.')
|
||||
.superRefine((password, ctx) => {
|
||||
const strength = validatePasswordStrength(password);
|
||||
if (!strength.isValid) ctx.addIssue({ code: 'custom', message: strength.feedback });
|
||||
}),
|
||||
password: passwordSchema,
|
||||
// Sanitize optional string inputs.
|
||||
full_name: z.string().trim().optional(),
|
||||
full_name: z.preprocess((val) => (val === '' ? undefined : val), z.string().trim().optional()),
|
||||
// Allow empty string or valid URL. If empty string is received, convert to undefined.
|
||||
avatar_url: z.preprocess(
|
||||
(val) => (val === '' ? undefined : val),
|
||||
@@ -59,6 +68,14 @@ const registerSchema = z.object({
|
||||
}),
|
||||
});
|
||||
|
||||
const loginSchema = z.object({
|
||||
body: z.object({
|
||||
email: z.string().trim().toLowerCase().email('A valid email is required.'),
|
||||
password: requiredString('Password is required.'),
|
||||
rememberMe: z.boolean().optional(),
|
||||
}),
|
||||
});
|
||||
|
||||
const forgotPasswordSchema = z.object({
|
||||
body: z.object({
|
||||
// Sanitize email by trimming and converting to lowercase.
|
||||
@@ -69,14 +86,7 @@ const forgotPasswordSchema = z.object({
|
||||
const resetPasswordSchema = z.object({
|
||||
body: z.object({
|
||||
token: requiredString('Token is required.'),
|
||||
newPassword: z
|
||||
.string()
|
||||
.trim() // Prevent leading/trailing whitespace in passwords.
|
||||
.min(8, 'Password must be at least 8 characters long.')
|
||||
.superRefine((password, ctx) => {
|
||||
const strength = validatePasswordStrength(password);
|
||||
if (!strength.isValid) ctx.addIssue({ code: 'custom', message: strength.feedback });
|
||||
}),
|
||||
newPassword: passwordSchema,
|
||||
}),
|
||||
});
|
||||
|
||||
@@ -122,52 +132,56 @@ router.post(
|
||||
);
|
||||
|
||||
// Login Route
|
||||
router.post('/login', (req: Request, res: Response, next: NextFunction) => {
|
||||
passport.authenticate(
|
||||
'local',
|
||||
{ session: false },
|
||||
async (err: Error, user: Express.User | false, info: { message: string }) => {
|
||||
// --- LOGIN ROUTE DEBUG LOGGING ---
|
||||
req.log.debug(`[API /login] Received login request for email: ${req.body.email}`);
|
||||
if (err) req.log.error({ err }, '[API /login] Passport reported an error.');
|
||||
if (!user) req.log.warn({ info }, '[API /login] Passport reported NO USER found.');
|
||||
if (user) req.log.debug({ user }, '[API /login] Passport user object:'); // Log the user object passport returns
|
||||
if (user) req.log.info({ user }, '[API /login] Passport reported USER FOUND.');
|
||||
router.post(
|
||||
'/login',
|
||||
validateRequest(loginSchema),
|
||||
(req: Request, res: Response, next: NextFunction) => {
|
||||
passport.authenticate(
|
||||
'local',
|
||||
{ session: false },
|
||||
async (err: Error, user: Express.User | false, info: { message: string }) => {
|
||||
// --- LOGIN ROUTE DEBUG LOGGING ---
|
||||
req.log.debug(`[API /login] Received login request for email: ${req.body.email}`);
|
||||
if (err) req.log.error({ err }, '[API /login] Passport reported an error.');
|
||||
if (!user) req.log.warn({ info }, '[API /login] Passport reported NO USER found.');
|
||||
if (user) req.log.debug({ user }, '[API /login] Passport user object:'); // Log the user object passport returns
|
||||
if (user) req.log.info({ user }, '[API /login] Passport reported USER FOUND.');
|
||||
|
||||
if (err) {
|
||||
req.log.error(
|
||||
{ error: err },
|
||||
`Login authentication error in /login route for email: ${req.body.email}`,
|
||||
);
|
||||
return next(err);
|
||||
}
|
||||
if (!user) {
|
||||
return res.status(401).json({ message: info.message || 'Login failed' });
|
||||
}
|
||||
if (err) {
|
||||
req.log.error(
|
||||
{ error: err },
|
||||
`Login authentication error in /login route for email: ${req.body.email}`,
|
||||
);
|
||||
return next(err);
|
||||
}
|
||||
if (!user) {
|
||||
return res.status(401).json({ message: info.message || 'Login failed' });
|
||||
}
|
||||
|
||||
try {
|
||||
const { rememberMe } = req.body;
|
||||
const userProfile = user as UserProfile;
|
||||
const { accessToken, refreshToken } = await authService.handleSuccessfulLogin(userProfile, req.log);
|
||||
req.log.info(`JWT and refresh token issued for user: ${userProfile.user.email}`);
|
||||
try {
|
||||
const { rememberMe } = req.body;
|
||||
const userProfile = user as UserProfile;
|
||||
const { accessToken, refreshToken } = await authService.handleSuccessfulLogin(userProfile, req.log);
|
||||
req.log.info(`JWT and refresh token issued for user: ${userProfile.user.email}`);
|
||||
|
||||
const cookieOptions = {
|
||||
httpOnly: true,
|
||||
secure: process.env.NODE_ENV === 'production',
|
||||
maxAge: rememberMe ? 30 * 24 * 60 * 60 * 1000 : undefined, // 30 days
|
||||
};
|
||||
const cookieOptions = {
|
||||
httpOnly: true,
|
||||
secure: process.env.NODE_ENV === 'production',
|
||||
maxAge: rememberMe ? 30 * 24 * 60 * 60 * 1000 : undefined, // 30 days
|
||||
};
|
||||
|
||||
res.cookie('refreshToken', refreshToken, cookieOptions);
|
||||
// Return the full user profile object on login to avoid a second fetch on the client.
|
||||
return res.json({ userprofile: userProfile, token: accessToken });
|
||||
} catch (tokenErr) {
|
||||
const email = (user as UserProfile)?.user?.email || req.body.email;
|
||||
req.log.error({ error: tokenErr }, `Failed to process login for user: ${email}`);
|
||||
return next(tokenErr);
|
||||
}
|
||||
},
|
||||
)(req, res, next);
|
||||
});
|
||||
res.cookie('refreshToken', refreshToken, cookieOptions);
|
||||
// Return the full user profile object on login to avoid a second fetch on the client.
|
||||
return res.json({ userprofile: userProfile, token: accessToken });
|
||||
} catch (tokenErr) {
|
||||
const email = (user as UserProfile)?.user?.email || req.body.email;
|
||||
req.log.error({ error: tokenErr }, `Failed to process login for user: ${email}`);
|
||||
return next(tokenErr);
|
||||
}
|
||||
},
|
||||
)(req, res, next);
|
||||
},
|
||||
);
|
||||
|
||||
// Route to request a password reset
|
||||
router.post(
|
||||
|
||||
@@ -13,7 +13,7 @@ vi.mock('../services/db/index.db', () => ({
|
||||
getFlyerItems: vi.fn(),
|
||||
getFlyerItemsForFlyers: vi.fn(),
|
||||
countFlyerItemsForFlyers: vi.fn(),
|
||||
trackFlyerItemInteraction: vi.fn(),
|
||||
trackFlyerItemInteraction: vi.fn().mockResolvedValue(undefined),
|
||||
},
|
||||
}));
|
||||
|
||||
@@ -50,6 +50,8 @@ describe('Flyer Routes (/api/flyers)', () => {
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body).toEqual(mockFlyers);
|
||||
// Also assert that the default limit and offset were used.
|
||||
expect(db.flyerRepo.getFlyers).toHaveBeenCalledWith(expectLogger, 20, 0);
|
||||
});
|
||||
|
||||
it('should pass limit and offset query parameters to the db function', async () => {
|
||||
@@ -58,6 +60,18 @@ describe('Flyer Routes (/api/flyers)', () => {
|
||||
expect(db.flyerRepo.getFlyers).toHaveBeenCalledWith(expectLogger, 15, 30);
|
||||
});
|
||||
|
||||
it('should use default for offset when only limit is provided', async () => {
|
||||
vi.mocked(db.flyerRepo.getFlyers).mockResolvedValue([]);
|
||||
await supertest(app).get('/api/flyers?limit=5');
|
||||
expect(db.flyerRepo.getFlyers).toHaveBeenCalledWith(expectLogger, 5, 0);
|
||||
});
|
||||
|
||||
it('should use default for limit when only offset is provided', async () => {
|
||||
vi.mocked(db.flyerRepo.getFlyers).mockResolvedValue([]);
|
||||
await supertest(app).get('/api/flyers?offset=10');
|
||||
expect(db.flyerRepo.getFlyers).toHaveBeenCalledWith(expectLogger, 20, 10);
|
||||
});
|
||||
|
||||
it('should return 500 if the database call fails', async () => {
|
||||
const dbError = new Error('DB Error');
|
||||
vi.mocked(db.flyerRepo.getFlyers).mockRejectedValue(dbError);
|
||||
@@ -151,7 +165,7 @@ describe('Flyer Routes (/api/flyers)', () => {
|
||||
expect(response.status).toBe(500);
|
||||
expect(response.body.message).toBe('DB Error');
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
{ error: dbError },
|
||||
{ error: dbError, flyerId: 123 },
|
||||
'Error fetching flyer items in /api/flyers/:id/items:',
|
||||
);
|
||||
});
|
||||
@@ -276,5 +290,24 @@ describe('Flyer Routes (/api/flyers)', () => {
|
||||
.send({ type: 'invalid' });
|
||||
expect(response.status).toBe(400);
|
||||
});
|
||||
|
||||
it('should return 202 and log an error if the tracking function fails', async () => {
|
||||
const trackingError = new Error('Tracking DB is down');
|
||||
vi.mocked(db.flyerRepo.trackFlyerItemInteraction).mockRejectedValue(trackingError);
|
||||
|
||||
const response = await supertest(app)
|
||||
.post('/api/flyers/items/99/track')
|
||||
.send({ type: 'click' });
|
||||
|
||||
expect(response.status).toBe(202);
|
||||
|
||||
// Allow the event loop to process the unhandled promise rejection from the fire-and-forget call
|
||||
await new Promise((resolve) => setImmediate(resolve));
|
||||
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
{ error: trackingError, itemId: 99 },
|
||||
'Flyer item interaction tracking failed',
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -48,12 +48,12 @@ const trackItemSchema = z.object({
|
||||
/**
|
||||
* GET /api/flyers - Get a paginated list of all flyers.
|
||||
*/
|
||||
type GetFlyersRequest = z.infer<typeof getFlyersSchema>;
|
||||
router.get('/', validateRequest(getFlyersSchema), async (req, res, next): Promise<void> => {
|
||||
const { query } = req as unknown as GetFlyersRequest;
|
||||
try {
|
||||
const limit = query.limit ? Number(query.limit) : 20;
|
||||
const offset = query.offset ? Number(query.offset) : 0;
|
||||
// The `validateRequest` middleware ensures `req.query` is valid.
|
||||
// We parse it here to apply Zod's coercions (string to number) and defaults.
|
||||
const { limit, offset } = getFlyersSchema.shape.query.parse(req.query);
|
||||
|
||||
const flyers = await db.flyerRepo.getFlyers(req.log, limit, offset);
|
||||
res.json(flyers);
|
||||
} catch (error) {
|
||||
@@ -65,14 +65,14 @@ router.get('/', validateRequest(getFlyersSchema), async (req, res, next): Promis
|
||||
/**
|
||||
* GET /api/flyers/:id - Get a single flyer by its ID.
|
||||
*/
|
||||
type GetFlyerByIdRequest = z.infer<typeof flyerIdParamSchema>;
|
||||
router.get('/:id', validateRequest(flyerIdParamSchema), async (req, res, next): Promise<void> => {
|
||||
const { params } = req as unknown as GetFlyerByIdRequest;
|
||||
try {
|
||||
const flyer = await db.flyerRepo.getFlyerById(params.id);
|
||||
// Explicitly parse to get the coerced number type for `id`.
|
||||
const { id } = flyerIdParamSchema.shape.params.parse(req.params);
|
||||
const flyer = await db.flyerRepo.getFlyerById(id);
|
||||
res.json(flyer);
|
||||
} catch (error) {
|
||||
req.log.error({ error, flyerId: params.id }, 'Error fetching flyer by ID:');
|
||||
req.log.error({ error, flyerId: req.params.id }, 'Error fetching flyer by ID:');
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
@@ -84,12 +84,14 @@ router.get(
|
||||
'/:id/items',
|
||||
validateRequest(flyerIdParamSchema),
|
||||
async (req, res, next): Promise<void> => {
|
||||
const { params } = req as unknown as GetFlyerByIdRequest;
|
||||
type GetFlyerByIdRequest = z.infer<typeof flyerIdParamSchema>;
|
||||
try {
|
||||
const items = await db.flyerRepo.getFlyerItems(params.id, req.log);
|
||||
// Explicitly parse to get the coerced number type for `id`.
|
||||
const { id } = flyerIdParamSchema.shape.params.parse(req.params);
|
||||
const items = await db.flyerRepo.getFlyerItems(id, req.log);
|
||||
res.json(items);
|
||||
} catch (error) {
|
||||
req.log.error({ error }, 'Error fetching flyer items in /api/flyers/:id/items:');
|
||||
req.log.error({ error, flyerId: req.params.id }, 'Error fetching flyer items in /api/flyers/:id/items:');
|
||||
next(error);
|
||||
}
|
||||
},
|
||||
@@ -105,6 +107,8 @@ router.post(
|
||||
async (req, res, next): Promise<void> => {
|
||||
const { body } = req as unknown as BatchFetchRequest;
|
||||
try {
|
||||
// No re-parsing needed here as `validateRequest` has already ensured the body shape,
|
||||
// and `express.json()` has parsed it. There's no type coercion to apply.
|
||||
const items = await db.flyerRepo.getFlyerItemsForFlyers(body.flyerIds, req.log);
|
||||
res.json(items);
|
||||
} catch (error) {
|
||||
@@ -124,8 +128,9 @@ router.post(
|
||||
async (req, res, next): Promise<void> => {
|
||||
const { body } = req as unknown as BatchCountRequest;
|
||||
try {
|
||||
// The DB function handles an empty array, so we can simplify.
|
||||
const count = await db.flyerRepo.countFlyerItemsForFlyers(body.flyerIds ?? [], req.log);
|
||||
// The schema ensures flyerIds is an array of numbers.
|
||||
// The `?? []` was redundant as `validateRequest` would have already caught a missing `flyerIds`.
|
||||
const count = await db.flyerRepo.countFlyerItemsForFlyers(body.flyerIds, req.log);
|
||||
res.json({ count });
|
||||
} catch (error) {
|
||||
req.log.error({ error }, 'Error counting batch flyer items');
|
||||
@@ -137,11 +142,22 @@ router.post(
|
||||
/**
|
||||
* POST /api/flyers/items/:itemId/track - Tracks a user interaction with a flyer item.
|
||||
*/
|
||||
type TrackItemRequest = z.infer<typeof trackItemSchema>;
|
||||
router.post('/items/:itemId/track', validateRequest(trackItemSchema), (req, res): void => {
|
||||
const { params, body } = req as unknown as TrackItemRequest;
|
||||
db.flyerRepo.trackFlyerItemInteraction(params.itemId, body.type, req.log);
|
||||
res.status(202).send();
|
||||
router.post('/items/:itemId/track', validateRequest(trackItemSchema), (req, res, next): void => {
|
||||
try {
|
||||
// Explicitly parse to get coerced types.
|
||||
const { params, body } = trackItemSchema.parse({ params: req.params, body: req.body });
|
||||
|
||||
// Fire-and-forget: we don't await the tracking call to avoid delaying the response.
|
||||
// We add a .catch to log any potential errors without crashing the server process.
|
||||
db.flyerRepo.trackFlyerItemInteraction(params.itemId, body.type, req.log).catch((error) => {
|
||||
req.log.error({ error, itemId: params.itemId }, 'Flyer item interaction tracking failed');
|
||||
});
|
||||
|
||||
res.status(202).send();
|
||||
} catch (error) {
|
||||
// This will catch Zod parsing errors if they occur.
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
export default router;
|
||||
|
||||
@@ -102,6 +102,7 @@ vi.mock('passport', () => {
|
||||
// Now, import the passport configuration which will use our mocks
|
||||
import passport, { isAdmin, optionalAuth, mockAuth } from './passport.routes';
|
||||
import { logger } from '../services/logger.server';
|
||||
import { ForbiddenError } from '../services/db/errors.db';
|
||||
|
||||
describe('Passport Configuration', () => {
|
||||
beforeEach(() => {
|
||||
@@ -414,6 +415,29 @@ describe('Passport Configuration', () => {
|
||||
// Assert
|
||||
expect(done).toHaveBeenCalledWith(dbError, false);
|
||||
});
|
||||
|
||||
it('should call done(err, false) if jwt_payload is null', async () => {
|
||||
// Arrange
|
||||
const jwtPayload = null;
|
||||
const done = vi.fn();
|
||||
|
||||
// Act
|
||||
// We know the mock setup populates the callback.
|
||||
if (verifyCallbackWrapper.callback) {
|
||||
// The strategy would not even call the callback if the token is invalid/missing.
|
||||
// However, to test the robustness of our callback, we can invoke it directly with null.
|
||||
await verifyCallbackWrapper.callback(jwtPayload as any, done);
|
||||
}
|
||||
|
||||
// Assert
|
||||
// The code will throw a TypeError because it tries to access 'user_id' of null.
|
||||
// The catch block in the strategy will catch this and call done(err, false).
|
||||
expect(done).toHaveBeenCalledWith(expect.any(TypeError), false);
|
||||
expect(logger.error).toHaveBeenCalledWith(
|
||||
{ error: expect.any(TypeError) },
|
||||
'Error during JWT authentication strategy:',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('isAdmin Middleware', () => {
|
||||
@@ -445,7 +469,7 @@ describe('Passport Configuration', () => {
|
||||
expect(mockRes.status).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should return 403 Forbidden if user does not have "admin" role', () => {
|
||||
it('should call next with a ForbiddenError if user does not have "admin" role', () => {
|
||||
// Arrange
|
||||
const mockReq: Partial<Request> = {
|
||||
user: createMockUserProfile({
|
||||
@@ -458,14 +482,11 @@ describe('Passport Configuration', () => {
|
||||
isAdmin(mockReq as Request, mockRes as Response, mockNext);
|
||||
|
||||
// Assert
|
||||
expect(mockNext).not.toHaveBeenCalled(); // This was a duplicate, fixed.
|
||||
expect(mockRes.status).toHaveBeenCalledWith(403);
|
||||
expect(mockRes.json).toHaveBeenCalledWith({
|
||||
message: 'Forbidden: Administrator access required.',
|
||||
});
|
||||
expect(mockNext).toHaveBeenCalledWith(expect.any(ForbiddenError));
|
||||
expect(mockRes.status).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should return 403 Forbidden if req.user is missing', () => {
|
||||
it('should call next with a ForbiddenError if req.user is missing', () => {
|
||||
// Arrange
|
||||
const mockReq = {} as Request; // No req.user
|
||||
|
||||
@@ -473,11 +494,86 @@ describe('Passport Configuration', () => {
|
||||
isAdmin(mockReq, mockRes as Response, mockNext);
|
||||
|
||||
// Assert
|
||||
expect(mockNext).not.toHaveBeenCalled(); // This was a duplicate, fixed.
|
||||
expect(mockRes.status).toHaveBeenCalledWith(403);
|
||||
expect(mockNext).toHaveBeenCalledWith(expect.any(ForbiddenError));
|
||||
expect(mockRes.status).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should return 403 Forbidden if req.user is not a valid UserProfile object', () => {
|
||||
it('should log a warning when a non-admin user tries to access an admin route', () => {
|
||||
// Arrange
|
||||
const mockReq: Partial<Request> = {
|
||||
user: createMockUserProfile({
|
||||
role: 'user',
|
||||
user: { user_id: 'user-id-123', email: 'user@test.com' },
|
||||
}),
|
||||
};
|
||||
|
||||
// Act
|
||||
isAdmin(mockReq as Request, mockRes as Response, mockNext);
|
||||
|
||||
// Assert
|
||||
expect(logger.warn).toHaveBeenCalledWith('Admin access denied for user: user-id-123');
|
||||
});
|
||||
|
||||
it('should log a warning with "unknown" user when req.user is missing', () => {
|
||||
// Arrange
|
||||
const mockReq = {} as Request; // No req.user
|
||||
|
||||
// Act
|
||||
isAdmin(mockReq, mockRes as Response, mockNext);
|
||||
|
||||
// Assert
|
||||
expect(logger.warn).toHaveBeenCalledWith('Admin access denied for user: unknown');
|
||||
});
|
||||
|
||||
it('should call next with a ForbiddenError for various invalid user object shapes', () => {
|
||||
const mockNext = vi.fn();
|
||||
const mockRes: Partial<Response> = {
|
||||
status: vi.fn().mockReturnThis(),
|
||||
json: vi.fn(),
|
||||
};
|
||||
|
||||
// Case 1: user is not an object (e.g., a string)
|
||||
const req1 = { user: 'not-an-object' } as unknown as Request;
|
||||
isAdmin(req1, mockRes as Response, mockNext);
|
||||
expect(mockNext).toHaveBeenLastCalledWith(expect.any(ForbiddenError));
|
||||
expect(mockRes.status).not.toHaveBeenCalled();
|
||||
vi.clearAllMocks();
|
||||
|
||||
// Case 2: user is null
|
||||
const req2 = { user: null } as unknown as Request;
|
||||
isAdmin(req2, mockRes as Response, mockNext);
|
||||
expect(mockNext).toHaveBeenLastCalledWith(expect.any(ForbiddenError));
|
||||
expect(mockRes.status).not.toHaveBeenCalled();
|
||||
vi.clearAllMocks();
|
||||
|
||||
// Case 3: user object is missing 'user' property
|
||||
const req3 = { user: { role: 'admin' } } as unknown as Request;
|
||||
isAdmin(req3, mockRes as Response, mockNext);
|
||||
expect(mockNext).toHaveBeenLastCalledWith(expect.any(ForbiddenError));
|
||||
expect(mockRes.status).not.toHaveBeenCalled();
|
||||
vi.clearAllMocks();
|
||||
|
||||
// Case 4: user.user is not an object
|
||||
const req4 = { user: { role: 'admin', user: 'not-an-object' } } as unknown as Request;
|
||||
isAdmin(req4, mockRes as Response, mockNext);
|
||||
expect(mockNext).toHaveBeenLastCalledWith(expect.any(ForbiddenError));
|
||||
expect(mockRes.status).not.toHaveBeenCalled();
|
||||
vi.clearAllMocks();
|
||||
|
||||
// Case 5: user.user is missing 'user_id'
|
||||
const req5 = {
|
||||
user: { role: 'admin', user: { email: 'test@test.com' } },
|
||||
} as unknown as Request;
|
||||
isAdmin(req5, mockRes as Response, mockNext);
|
||||
expect(mockNext).toHaveBeenLastCalledWith(expect.any(ForbiddenError));
|
||||
expect(mockRes.status).not.toHaveBeenCalled();
|
||||
vi.clearAllMocks();
|
||||
|
||||
// Reset the main mockNext for other tests in the suite
|
||||
mockNext.mockClear();
|
||||
});
|
||||
|
||||
it('should call next with a ForbiddenError if req.user is not a valid UserProfile object', () => {
|
||||
// Arrange
|
||||
const mockReq: Partial<Request> = {
|
||||
// An object that is not a valid UserProfile (e.g., missing 'role')
|
||||
@@ -490,11 +586,8 @@ describe('Passport Configuration', () => {
|
||||
isAdmin(mockReq as Request, mockRes as Response, mockNext);
|
||||
|
||||
// Assert
|
||||
expect(mockNext).not.toHaveBeenCalled(); // This was a duplicate, fixed.
|
||||
expect(mockRes.status).toHaveBeenCalledWith(403);
|
||||
expect(mockRes.json).toHaveBeenCalledWith({
|
||||
message: 'Forbidden: Administrator access required.',
|
||||
});
|
||||
expect(mockNext).toHaveBeenCalledWith(expect.any(ForbiddenError));
|
||||
expect(mockRes.status).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -611,13 +704,18 @@ describe('Passport Configuration', () => {
|
||||
optionalAuth(mockReq, mockRes as Response, mockNext);
|
||||
|
||||
// Assert
|
||||
// The new implementation logs a warning and proceeds.
|
||||
expect(logger.warn).toHaveBeenCalledWith(
|
||||
{ error: authError },
|
||||
'Optional auth encountered an error, proceeding anonymously.',
|
||||
);
|
||||
expect(mockReq.user).toBeUndefined();
|
||||
expect(mockNext).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('mockAuth Middleware', () => {
|
||||
const mockNext: NextFunction = vi.fn();
|
||||
const mockNext: NextFunction = vi.fn(); // This was a duplicate, fixed.
|
||||
const mockRes: Partial<Response> = {
|
||||
status: vi.fn().mockReturnThis(),
|
||||
json: vi.fn(),
|
||||
|
||||
@@ -11,6 +11,7 @@ import * as db from '../services/db/index.db';
|
||||
import { logger } from '../services/logger.server';
|
||||
import { UserProfile } from '../types';
|
||||
import { createMockUserProfile } from '../tests/utils/mockFactories';
|
||||
import { ForbiddenError } from '../services/db/errors.db';
|
||||
|
||||
const JWT_SECRET = process.env.JWT_SECRET!;
|
||||
|
||||
@@ -307,7 +308,7 @@ export const isAdmin = (req: Request, res: Response, next: NextFunction) => {
|
||||
// Check if userProfile is a valid UserProfile before accessing its properties for logging.
|
||||
const userIdForLog = isUserProfile(userProfile) ? userProfile.user.user_id : 'unknown';
|
||||
logger.warn(`Admin access denied for user: ${userIdForLog}`);
|
||||
res.status(403).json({ message: 'Forbidden: Administrator access required.' });
|
||||
next(new ForbiddenError('Forbidden: Administrator access required.'));
|
||||
}
|
||||
};
|
||||
|
||||
@@ -323,12 +324,17 @@ export const optionalAuth = (req: Request, res: Response, next: NextFunction) =>
|
||||
'jwt',
|
||||
{ session: false },
|
||||
(err: Error | null, user: Express.User | false, info: { message: string } | Error) => {
|
||||
// If there's an authentication error (e.g., malformed token), log it but don't block the request.
|
||||
if (err) {
|
||||
// An actual error occurred during authentication (e.g., malformed token).
|
||||
// For optional auth, we log this but still proceed without a user.
|
||||
logger.warn({ error: err }, 'Optional auth encountered an error, proceeding anonymously.');
|
||||
return next();
|
||||
}
|
||||
if (info) {
|
||||
// The patch requested this specific error handling.
|
||||
logger.info({ info: info.message || info.toString() }, 'Optional auth info:');
|
||||
} // The patch requested this specific error handling.
|
||||
if (user) (req as Express.Request).user = user; // Attach user if authentication succeeds
|
||||
}
|
||||
if (user) (req as Express.Request).user = user; // Attach user if authentication succeeds.
|
||||
|
||||
next(); // Always proceed to the next middleware
|
||||
},
|
||||
|
||||
@@ -1140,6 +1140,19 @@ describe('User Routes (/api/users)', () => {
|
||||
expect(logger.error).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('DELETE /recipes/:recipeId should return 404 if recipe not found', async () => {
|
||||
vi.mocked(db.recipeRepo.deleteRecipe).mockRejectedValue(new NotFoundError('Recipe not found'));
|
||||
const response = await supertest(app).delete('/api/users/recipes/999');
|
||||
expect(response.status).toBe(404);
|
||||
expect(response.body.message).toBe('Recipe not found');
|
||||
});
|
||||
|
||||
it('DELETE /recipes/:recipeId should return 400 for invalid recipe ID', async () => {
|
||||
const response = await supertest(app).delete('/api/users/recipes/abc');
|
||||
expect(response.status).toBe(400);
|
||||
expect(response.body.errors[0].message).toContain('received NaN');
|
||||
});
|
||||
|
||||
it("PUT /recipes/:recipeId should update a user's own recipe", async () => {
|
||||
const updates = { description: 'A new delicious description.' };
|
||||
const mockUpdatedRecipe = createMockRecipe({ recipe_id: 1, ...updates });
|
||||
@@ -1181,6 +1194,14 @@ describe('User Routes (/api/users)', () => {
|
||||
expect(response.body.errors[0].message).toBe('No fields provided to update.');
|
||||
});
|
||||
|
||||
it('PUT /recipes/:recipeId should return 400 for invalid recipe ID', async () => {
|
||||
const response = await supertest(app)
|
||||
.put('/api/users/recipes/abc')
|
||||
.send({ name: 'New Name' });
|
||||
expect(response.status).toBe(400);
|
||||
expect(response.body.errors[0].message).toContain('received NaN');
|
||||
});
|
||||
|
||||
it('GET /shopping-lists/:listId should return 404 if list is not found', async () => {
|
||||
vi.mocked(db.shoppingRepo.getShoppingListById).mockRejectedValue(
|
||||
new NotFoundError('Shopping list not found'),
|
||||
|
||||
@@ -81,6 +81,7 @@ vi.mock('./db/flyer.db', () => ({
|
||||
|
||||
vi.mock('../utils/imageProcessor', () => ({
|
||||
generateFlyerIcon: vi.fn(),
|
||||
processAndSaveImage: vi.fn(),
|
||||
}));
|
||||
|
||||
vi.mock('./db/admin.db', () => ({
|
||||
@@ -93,8 +94,8 @@ vi.mock('./db/admin.db', () => ({
|
||||
import * as dbModule from './db/index.db';
|
||||
import { flyerQueue } from './queueService.server';
|
||||
import { createFlyerAndItems } from './db/flyer.db';
|
||||
import { withTransaction } from './db/index.db';
|
||||
import { generateFlyerIcon } from '../utils/imageProcessor';
|
||||
import { withTransaction } from './db/index.db'; // This was a duplicate, fixed.
|
||||
import { generateFlyerIcon, processAndSaveImage } from '../utils/imageProcessor';
|
||||
|
||||
// Define a mock interface that closely resembles the actual Flyer type for testing purposes.
|
||||
// This helps ensure type safety in mocks without relying on 'any'.
|
||||
@@ -808,9 +809,11 @@ describe('AI Service (Server)', () => {
|
||||
expect(
|
||||
(localAiServiceInstance as any)._parseJsonFromAiResponse(responseText, localLogger),
|
||||
).toBeNull(); // This was a duplicate, fixed.
|
||||
// The code now fails earlier because it can't find the closing brace.
|
||||
// We need to update the assertion to match the actual error log.
|
||||
expect(localLogger.error).toHaveBeenCalledWith(
|
||||
expect.objectContaining({ jsonSlice: '{ "key": "value"' }),
|
||||
'[_parseJsonFromAiResponse] Failed to parse JSON slice.',
|
||||
{ responseText }, // The log includes the full response text.
|
||||
"[_parseJsonFromAiResponse] Could not find ending '}' or ']' in response.",
|
||||
);
|
||||
});
|
||||
});
|
||||
@@ -1052,6 +1055,7 @@ describe('AI Service (Server)', () => {
|
||||
beforeEach(() => {
|
||||
// Default success mocks. Use createMockFlyer for a more complete mock.
|
||||
vi.mocked(dbModule.flyerRepo.findFlyerByChecksum).mockResolvedValue(undefined);
|
||||
vi.mocked(processAndSaveImage).mockResolvedValue('processed.jpg');
|
||||
vi.mocked(generateFlyerIcon).mockResolvedValue('icon.jpg');
|
||||
vi.mocked(createFlyerAndItems).mockResolvedValue({
|
||||
flyer: {
|
||||
|
||||
@@ -23,8 +23,8 @@ import * as db from './db/index.db';
|
||||
import { flyerQueue } from './queueService.server';
|
||||
import type { Job } from 'bullmq';
|
||||
import { createFlyerAndItems } from './db/flyer.db';
|
||||
import { getBaseUrl } from '../utils/serverUtils';
|
||||
import { generateFlyerIcon } from '../utils/imageProcessor';
|
||||
import { getBaseUrl } from '../utils/serverUtils'; // This was a duplicate, fixed.
|
||||
import { generateFlyerIcon, processAndSaveImage } from '../utils/imageProcessor';
|
||||
import { AdminRepository } from './db/admin.db';
|
||||
import path from 'path';
|
||||
import { ValidationError } from './db/errors.db'; // Keep this import for ValidationError
|
||||
@@ -215,7 +215,11 @@ export class AIService {
|
||||
this.logger.warn(
|
||||
'[AIService] Mock generateContent called. This should only happen in tests when no API key is available.',
|
||||
);
|
||||
return { text: '[]' } as unknown as GenerateContentResponse;
|
||||
// Return a minimal valid JSON object structure to prevent downstream parsing errors.
|
||||
const mockResponse = { store_name: 'Mock Store', items: [] };
|
||||
return {
|
||||
text: JSON.stringify(mockResponse),
|
||||
} as unknown as GenerateContentResponse;
|
||||
},
|
||||
};
|
||||
}
|
||||
@@ -369,62 +373,43 @@ export class AIService {
|
||||
* @returns The parsed JSON object, or null if parsing fails.
|
||||
*/
|
||||
private _parseJsonFromAiResponse<T>(responseText: string | undefined, logger: Logger): T | null {
|
||||
// --- START HYPER-DIAGNOSTIC LOGGING ---
|
||||
console.log('\n--- DIAGNOSING _parseJsonFromAiResponse ---');
|
||||
console.log(
|
||||
`1. Initial responseText (Type: ${typeof responseText}):`,
|
||||
JSON.stringify(responseText),
|
||||
// --- START EXTENSIVE DEBUG LOGGING ---
|
||||
logger.debug(
|
||||
{
|
||||
responseText_type: typeof responseText,
|
||||
responseText_length: responseText?.length,
|
||||
responseText_preview: responseText?.substring(0, 200),
|
||||
},
|
||||
'[_parseJsonFromAiResponse] Starting JSON parsing.',
|
||||
);
|
||||
// --- END HYPER-DIAGNOSTIC LOGGING ---
|
||||
|
||||
if (!responseText) {
|
||||
logger.warn(
|
||||
'[_parseJsonFromAiResponse] Response text is empty or undefined. Returning null.',
|
||||
);
|
||||
console.log('2. responseText is falsy. ABORTING.');
|
||||
console.log('--- END DIAGNOSIS ---\n');
|
||||
logger.warn('[_parseJsonFromAiResponse] Response text is empty or undefined. Aborting parsing.');
|
||||
return null;
|
||||
}
|
||||
|
||||
// Find the start of the JSON, which can be inside a markdown block
|
||||
const markdownRegex = /```(json)?\s*([\s\S]*?)\s*```/;
|
||||
const markdownMatch = responseText.match(markdownRegex);
|
||||
console.log('2. Regex Result (markdownMatch):', markdownMatch);
|
||||
|
||||
let jsonString;
|
||||
if (markdownMatch && markdownMatch[2] !== undefined) {
|
||||
// Check for capture group
|
||||
console.log('3. Regex matched. Processing Captured Group.');
|
||||
console.log(
|
||||
` - Captured content (Type: ${typeof markdownMatch[2]}, Length: ${markdownMatch[2].length}):`,
|
||||
JSON.stringify(markdownMatch[2]),
|
||||
);
|
||||
logger.debug(
|
||||
{ rawCapture: markdownMatch[2] },
|
||||
{ capturedLength: markdownMatch[2].length },
|
||||
'[_parseJsonFromAiResponse] Found JSON content within markdown code block.',
|
||||
);
|
||||
|
||||
jsonString = markdownMatch[2].trim();
|
||||
console.log(
|
||||
`4. After trimming, jsonString is (Type: ${typeof jsonString}, Length: ${jsonString.length}):`,
|
||||
JSON.stringify(jsonString),
|
||||
);
|
||||
logger.debug(
|
||||
{ trimmedJsonString: jsonString },
|
||||
'[_parseJsonFromAiResponse] Trimmed extracted JSON string.',
|
||||
);
|
||||
} else {
|
||||
console.log(
|
||||
'3. Regex did NOT match or capture group 2 is undefined. Will attempt to parse entire responseText.',
|
||||
);
|
||||
logger.debug('[_parseJsonFromAiResponse] No markdown code block found. Using raw response text.');
|
||||
jsonString = responseText;
|
||||
}
|
||||
|
||||
// Find the first '{' or '[' and the last '}' or ']' to isolate the JSON object.
|
||||
const firstBrace = jsonString.indexOf('{');
|
||||
const firstBracket = jsonString.indexOf('[');
|
||||
console.log(
|
||||
`5. Index search on jsonString: firstBrace=${firstBrace}, firstBracket=${firstBracket}`,
|
||||
logger.debug(
|
||||
{ firstBrace, firstBracket },
|
||||
'[_parseJsonFromAiResponse] Searching for start of JSON.',
|
||||
);
|
||||
|
||||
// Determine the starting point of the JSON content
|
||||
@@ -432,37 +417,44 @@ export class AIService {
|
||||
firstBrace === -1 || (firstBracket !== -1 && firstBracket < firstBrace)
|
||||
? firstBracket
|
||||
: firstBrace;
|
||||
console.log('6. Calculated startIndex:', startIndex);
|
||||
|
||||
if (startIndex === -1) {
|
||||
logger.error(
|
||||
{ responseText },
|
||||
"[_parseJsonFromAiResponse] Could not find starting '{' or '[' in response.",
|
||||
);
|
||||
console.log('7. startIndex is -1. ABORTING.');
|
||||
console.log('--- END DIAGNOSIS ---\n');
|
||||
return null;
|
||||
}
|
||||
|
||||
const jsonSlice = jsonString.substring(startIndex);
|
||||
console.log(
|
||||
`8. Sliced string to be parsed (jsonSlice) (Length: ${jsonSlice.length}):`,
|
||||
JSON.stringify(jsonSlice),
|
||||
// Find the last brace or bracket to gracefully handle trailing text.
|
||||
// This is a robust way to handle cases where the AI might add trailing text after the JSON.
|
||||
const lastBrace = jsonString.lastIndexOf('}');
|
||||
const lastBracket = jsonString.lastIndexOf(']');
|
||||
const endIndex = Math.max(lastBrace, lastBracket);
|
||||
|
||||
if (endIndex === -1) {
|
||||
logger.error(
|
||||
{ responseText },
|
||||
"[_parseJsonFromAiResponse] Could not find ending '}' or ']' in response.",
|
||||
);
|
||||
return null;
|
||||
}
|
||||
|
||||
const jsonSlice = jsonString.substring(startIndex, endIndex + 1);
|
||||
logger.debug(
|
||||
{ sliceLength: jsonSlice.length },
|
||||
'[_parseJsonFromAiResponse] Extracted JSON slice for parsing.',
|
||||
);
|
||||
|
||||
try {
|
||||
console.log('9. Attempting JSON.parse on jsonSlice...');
|
||||
const parsed = JSON.parse(jsonSlice) as T;
|
||||
console.log('10. SUCCESS: JSON.parse succeeded.');
|
||||
console.log('--- END DIAGNOSIS (SUCCESS) ---\n');
|
||||
logger.info('[_parseJsonFromAiResponse] Successfully parsed JSON from AI response.');
|
||||
return parsed;
|
||||
} catch (e) {
|
||||
logger.error(
|
||||
{ jsonSlice, error: e, errorMessage: (e as Error).message, stack: (e as Error).stack },
|
||||
'[_parseJsonFromAiResponse] Failed to parse JSON slice.',
|
||||
);
|
||||
console.error('10. FAILURE: JSON.parse FAILED. Error:', e);
|
||||
console.log('--- END DIAGNOSIS (FAILURE) ---\n');
|
||||
return null;
|
||||
}
|
||||
}
|
||||
@@ -795,6 +787,18 @@ async enqueueFlyerProcessing(
|
||||
}
|
||||
|
||||
const baseUrl = getBaseUrl(logger);
|
||||
// --- START DEBUGGING ---
|
||||
// Add a fail-fast check to ensure the baseUrl is a valid URL before enqueuing.
|
||||
// This will make the test fail at the upload step if the URL is the problem,
|
||||
// which is easier to debug than a worker failure.
|
||||
if (!baseUrl || !baseUrl.startsWith('http')) {
|
||||
const errorMessage = `[aiService] FATAL: The generated baseUrl is not a valid absolute URL. Value: "${baseUrl}". This will cause the flyer processing worker to fail. Check the FRONTEND_URL environment variable.`;
|
||||
logger.error(errorMessage);
|
||||
// Throw a standard error that the calling route can handle.
|
||||
throw new Error(errorMessage);
|
||||
}
|
||||
logger.info({ baseUrl }, '[aiService] Enqueuing job with valid baseUrl.');
|
||||
// --- END DEBUGGING ---
|
||||
|
||||
// 3. Add job to the queue
|
||||
const job = await flyerQueue.add('process-flyer', {
|
||||
@@ -818,6 +822,7 @@ async enqueueFlyerProcessing(
|
||||
body: any,
|
||||
logger: Logger,
|
||||
): { parsed: FlyerProcessPayload; extractedData: Partial<ExtractedCoreData> | null | undefined } {
|
||||
logger.debug({ body, type: typeof body }, '[AIService] Starting _parseLegacyPayload');
|
||||
let parsed: FlyerProcessPayload = {};
|
||||
|
||||
try {
|
||||
@@ -826,6 +831,7 @@ async enqueueFlyerProcessing(
|
||||
logger.warn({ error: errMsg(e) }, '[AIService] Failed to parse top-level request body string.');
|
||||
return { parsed: {}, extractedData: {} };
|
||||
}
|
||||
logger.debug({ parsed }, '[AIService] Parsed top-level body');
|
||||
|
||||
// If the real payload is nested inside a 'data' property (which could be a string),
|
||||
// we parse it out but keep the original `parsed` object for top-level properties like checksum.
|
||||
@@ -841,13 +847,16 @@ async enqueueFlyerProcessing(
|
||||
potentialPayload = parsed.data;
|
||||
}
|
||||
}
|
||||
logger.debug({ potentialPayload }, '[AIService] Potential payload after checking "data" property');
|
||||
|
||||
// The extracted data is either in an `extractedData` key or is the payload itself.
|
||||
const extractedData = potentialPayload.extractedData ?? potentialPayload;
|
||||
logger.debug({ extractedData: !!extractedData }, '[AIService] Extracted data object');
|
||||
|
||||
// Merge for checksum lookup: properties in the outer `parsed` object (like a top-level checksum)
|
||||
// take precedence over any same-named properties inside `potentialPayload`.
|
||||
const finalParsed = { ...potentialPayload, ...parsed };
|
||||
logger.debug({ finalParsed }, '[AIService] Final parsed object for checksum lookup');
|
||||
|
||||
return { parsed: finalParsed, extractedData };
|
||||
}
|
||||
@@ -858,10 +867,12 @@ async enqueueFlyerProcessing(
|
||||
userProfile: UserProfile | undefined,
|
||||
logger: Logger,
|
||||
): Promise<Flyer> {
|
||||
logger.debug({ body, file }, '[AIService] Starting processLegacyFlyerUpload');
|
||||
const { parsed, extractedData: initialExtractedData } = this._parseLegacyPayload(body, logger);
|
||||
let extractedData = initialExtractedData;
|
||||
|
||||
const checksum = parsed.checksum ?? parsed?.data?.checksum ?? '';
|
||||
logger.debug({ checksum, parsed }, '[AIService] Extracted checksum from legacy payload');
|
||||
if (!checksum) {
|
||||
throw new ValidationError([], 'Checksum is required.');
|
||||
}
|
||||
@@ -896,12 +907,24 @@ async enqueueFlyerProcessing(
|
||||
logger.warn('extractedData.store_name missing; using fallback store name.');
|
||||
}
|
||||
|
||||
const iconsDir = path.join(path.dirname(file.path), 'icons');
|
||||
const iconFileName = await generateFlyerIcon(file.path, iconsDir, logger);
|
||||
// Process the uploaded image to strip metadata and optimize it.
|
||||
const flyerImageDir = path.dirname(file.path);
|
||||
const processedImageFileName = await processAndSaveImage(
|
||||
file.path,
|
||||
flyerImageDir,
|
||||
originalFileName,
|
||||
logger,
|
||||
);
|
||||
const processedImagePath = path.join(flyerImageDir, processedImageFileName);
|
||||
|
||||
// Generate the icon from the newly processed (and cleaned) image.
|
||||
const iconsDir = path.join(flyerImageDir, 'icons');
|
||||
const iconFileName = await generateFlyerIcon(processedImagePath, iconsDir, logger);
|
||||
|
||||
const baseUrl = getBaseUrl(logger);
|
||||
const iconUrl = `${baseUrl}/flyer-images/icons/${iconFileName}`;
|
||||
const imageUrl = `${baseUrl}/flyer-images/${file.filename}`;
|
||||
const imageUrl = `${baseUrl}/flyer-images/${processedImageFileName}`;
|
||||
logger.debug({ imageUrl, iconUrl }, 'Constructed URLs for legacy upload');
|
||||
|
||||
const flyerData: FlyerInsert = {
|
||||
file_name: originalFileName,
|
||||
|
||||
@@ -32,13 +32,13 @@ const joinUrl = (base: string, path: string): string => {
|
||||
* A promise that holds the in-progress token refresh operation.
|
||||
* This prevents multiple parallel refresh requests.
|
||||
*/
|
||||
let refreshTokenPromise: Promise<string> | null = null;
|
||||
let performTokenRefreshPromise: Promise<string> | null = null;
|
||||
|
||||
/**
|
||||
* Attempts to refresh the access token using the HttpOnly refresh token cookie.
|
||||
* @returns A promise that resolves to the new access token.
|
||||
*/
|
||||
const refreshToken = async (): Promise<string> => {
|
||||
const _performTokenRefresh = async (): Promise<string> => {
|
||||
logger.info('Attempting to refresh access token...');
|
||||
try {
|
||||
// Use the joinUrl helper for consistency, though usually this is a relative fetch in browser
|
||||
@@ -75,11 +75,15 @@ const refreshToken = async (): Promise<string> => {
|
||||
};
|
||||
|
||||
/**
|
||||
* A custom fetch wrapper that handles automatic token refreshing.
|
||||
* All authenticated API calls should use this function.
|
||||
* @param url The URL to fetch.
|
||||
* @param options The fetch options.
|
||||
* @returns A promise that resolves to the fetch Response.
|
||||
* A custom fetch wrapper that handles automatic token refreshing for authenticated API calls.
|
||||
* If a request fails with a 401 Unauthorized status, it attempts to refresh the access token
|
||||
* using the refresh token cookie. If successful, it retries the original request with the new token.
|
||||
* All authenticated API calls should use this function or one of its helpers (e.g., `authedGet`).
|
||||
*
|
||||
* @param url The endpoint path (e.g., '/users/profile') or a full URL.
|
||||
* @param options Standard `fetch` options (method, body, etc.).
|
||||
* @param apiOptions Custom options for the API client, such as `tokenOverride` for testing or an `AbortSignal`.
|
||||
* @returns A promise that resolves to the final `Response` object from the fetch call.
|
||||
*/
|
||||
export const apiFetch = async (
|
||||
url: string,
|
||||
@@ -122,12 +126,12 @@ export const apiFetch = async (
|
||||
try {
|
||||
logger.info(`apiFetch: Received 401 for ${fullUrl}. Attempting token refresh.`);
|
||||
// If no refresh is in progress, start one.
|
||||
if (!refreshTokenPromise) {
|
||||
refreshTokenPromise = refreshToken();
|
||||
if (!performTokenRefreshPromise) {
|
||||
performTokenRefreshPromise = _performTokenRefresh();
|
||||
}
|
||||
|
||||
// Wait for the existing or new refresh operation to complete.
|
||||
const newToken = await refreshTokenPromise;
|
||||
const newToken = await performTokenRefreshPromise;
|
||||
|
||||
logger.info(`apiFetch: Token refreshed. Retrying original request to ${fullUrl}.`);
|
||||
// Retry the original request with the new token.
|
||||
@@ -138,7 +142,7 @@ export const apiFetch = async (
|
||||
return Promise.reject(refreshError);
|
||||
} finally {
|
||||
// Clear the promise so the next 401 will trigger a new refresh.
|
||||
refreshTokenPromise = null;
|
||||
performTokenRefreshPromise = null;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -768,6 +772,25 @@ export const triggerFailingJob = (tokenOverride?: string): Promise<Response> =>
|
||||
export const getJobStatus = (jobId: string, tokenOverride?: string): Promise<Response> =>
|
||||
authedGet(`/ai/jobs/${jobId}/status`, { tokenOverride });
|
||||
|
||||
/**
|
||||
* Refreshes an access token using a refresh token cookie.
|
||||
* This is intended for use in Node.js test environments where cookies must be set manually.
|
||||
* @param cookie The full 'Cookie' header string (e.g., "refreshToken=...").
|
||||
* @returns A promise that resolves to the fetch Response.
|
||||
*/
|
||||
export async function refreshToken(cookie: string) {
|
||||
const url = joinUrl(API_BASE_URL, '/auth/refresh-token');
|
||||
const options: RequestInit = {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
// The browser would handle this automatically, but in Node.js tests we must set it manually.
|
||||
Cookie: cookie,
|
||||
},
|
||||
};
|
||||
return fetch(url, options);
|
||||
}
|
||||
|
||||
/**
|
||||
* Triggers the clearing of the geocoding cache on the server.
|
||||
* Requires admin privileges.
|
||||
|
||||
@@ -134,7 +134,6 @@ describe('AuthService', () => {
|
||||
'hashed-password',
|
||||
{ full_name: 'Test User', avatar_url: undefined },
|
||||
reqLog,
|
||||
{},
|
||||
);
|
||||
expect(transactionalAdminRepoMocks.logActivity).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
|
||||
@@ -40,7 +40,6 @@ class AuthService {
|
||||
hashedPassword,
|
||||
{ full_name: fullName, avatar_url: avatarUrl },
|
||||
reqLog,
|
||||
client, // Pass the transactional client
|
||||
);
|
||||
|
||||
logger.info(`Successfully created new user in DB: ${newUser.user.email} (ID: ${newUser.user.user_id})`);
|
||||
|
||||
@@ -24,6 +24,19 @@ vi.mock('../services/logger.server', () => ({
|
||||
// Mock the date utility to control the output for the weekly analytics job
|
||||
vi.mock('../utils/dateUtils', () => ({
|
||||
getSimpleWeekAndYear: vi.fn(() => ({ year: 2024, week: 42 })),
|
||||
getCurrentDateISOString: vi.fn(() => '2024-10-18'),
|
||||
}));
|
||||
|
||||
vi.mock('../services/queueService.server', () => ({
|
||||
analyticsQueue: {
|
||||
add: vi.fn(),
|
||||
},
|
||||
weeklyAnalyticsQueue: {
|
||||
add: vi.fn(),
|
||||
},
|
||||
emailQueue: {
|
||||
add: vi.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
import { BackgroundJobService, startBackgroundJobs } from './backgroundJobService';
|
||||
@@ -32,6 +45,7 @@ import type { PersonalizationRepository } from './db/personalization.db';
|
||||
import type { NotificationRepository } from './db/notification.db';
|
||||
import { createMockWatchedItemDeal } from '../tests/utils/mockFactories';
|
||||
import { logger as globalMockLogger } from '../services/logger.server'; // Import the mocked logger
|
||||
import { analyticsQueue, weeklyAnalyticsQueue } from '../services/queueService.server';
|
||||
|
||||
describe('Background Job Service', () => {
|
||||
// Create mock dependencies that will be injected into the service
|
||||
@@ -118,6 +132,37 @@ describe('Background Job Service', () => {
|
||||
mockServiceLogger,
|
||||
);
|
||||
|
||||
describe('Manual Triggers', () => {
|
||||
it('triggerAnalyticsReport should add a daily report job to the queue', async () => {
|
||||
// The mock should return the jobId passed to it to simulate bullmq's behavior
|
||||
vi.mocked(analyticsQueue.add).mockImplementation(async (name, data, opts) => ({ id: opts?.jobId }) as any);
|
||||
const jobId = await service.triggerAnalyticsReport();
|
||||
|
||||
expect(jobId).toContain('manual-report-');
|
||||
expect(analyticsQueue.add).toHaveBeenCalledWith(
|
||||
'generate-daily-report',
|
||||
{ reportDate: '2024-10-18' },
|
||||
{ jobId: expect.stringContaining('manual-report-') },
|
||||
);
|
||||
});
|
||||
|
||||
it('triggerWeeklyAnalyticsReport should add a weekly report job to the queue', async () => {
|
||||
// The mock should return the jobId passed to it
|
||||
vi.mocked(weeklyAnalyticsQueue.add).mockImplementation(async (name, data, opts) => ({ id: opts?.jobId }) as any);
|
||||
const jobId = await service.triggerWeeklyAnalyticsReport();
|
||||
|
||||
expect(jobId).toContain('manual-weekly-report-');
|
||||
expect(weeklyAnalyticsQueue.add).toHaveBeenCalledWith(
|
||||
'generate-weekly-report',
|
||||
{
|
||||
reportYear: 2024, // From mocked dateUtils
|
||||
reportWeek: 42, // From mocked dateUtils
|
||||
},
|
||||
{ jobId: expect.stringContaining('manual-weekly-report-') },
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
it('should do nothing if no deals are found for any user', async () => {
|
||||
mockPersonalizationRepo.getBestSalePricesForAllUsers.mockResolvedValue([]);
|
||||
await service.runDailyDealCheck();
|
||||
@@ -153,24 +198,27 @@ describe('Background Job Service', () => {
|
||||
// Check that in-app notifications were created for both users
|
||||
expect(mockNotificationRepo.createBulkNotifications).toHaveBeenCalledTimes(1);
|
||||
const notificationPayload = mockNotificationRepo.createBulkNotifications.mock.calls[0][0];
|
||||
expect(notificationPayload).toHaveLength(2);
|
||||
// Use expect.arrayContaining to be order-agnostic.
|
||||
expect(notificationPayload).toEqual(
|
||||
expect.arrayContaining([
|
||||
{
|
||||
user_id: 'user-1',
|
||||
content: 'You have 1 new deal(s) on your watched items!',
|
||||
link_url: '/dashboard/deals',
|
||||
updated_at: expect.any(String),
|
||||
},
|
||||
{
|
||||
user_id: 'user-2',
|
||||
content: 'You have 2 new deal(s) on your watched items!',
|
||||
link_url: '/dashboard/deals',
|
||||
updated_at: expect.any(String),
|
||||
},
|
||||
]),
|
||||
|
||||
// Sort by user_id to ensure a consistent order for a direct `toEqual` comparison.
|
||||
// This provides a clearer diff on failure than `expect.arrayContaining`.
|
||||
const sortedPayload = [...notificationPayload].sort((a, b) =>
|
||||
a.user_id.localeCompare(b.user_id),
|
||||
);
|
||||
|
||||
expect(sortedPayload).toEqual([
|
||||
{
|
||||
user_id: 'user-1',
|
||||
content: 'You have 1 new deal(s) on your watched items!',
|
||||
link_url: '/dashboard/deals',
|
||||
updated_at: expect.any(String),
|
||||
},
|
||||
{
|
||||
user_id: 'user-2',
|
||||
content: 'You have 2 new deal(s) on your watched items!',
|
||||
link_url: '/dashboard/deals',
|
||||
updated_at: expect.any(String),
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it('should handle and log errors for individual users without stopping the process', async () => {
|
||||
@@ -252,7 +300,7 @@ describe('Background Job Service', () => {
|
||||
vi.mocked(mockWeeklyAnalyticsQueue.add).mockClear();
|
||||
});
|
||||
|
||||
it('should schedule three cron jobs with the correct schedules', () => {
|
||||
it('should schedule four cron jobs with the correct schedules', () => {
|
||||
startBackgroundJobs(
|
||||
mockBackgroundJobService,
|
||||
mockAnalyticsQueue,
|
||||
|
||||
@@ -2,13 +2,19 @@
|
||||
import cron from 'node-cron';
|
||||
import type { Logger } from 'pino';
|
||||
import type { Queue } from 'bullmq';
|
||||
import { Notification, WatchedItemDeal } from '../types';
|
||||
import { getSimpleWeekAndYear } from '../utils/dateUtils';
|
||||
import { formatCurrency } from '../utils/formatUtils';
|
||||
import { getSimpleWeekAndYear, getCurrentDateISOString } from '../utils/dateUtils';
|
||||
import type { Notification, WatchedItemDeal } from '../types';
|
||||
// Import types for repositories from their source files
|
||||
import type { PersonalizationRepository } from './db/personalization.db';
|
||||
import type { NotificationRepository } from './db/notification.db';
|
||||
import { analyticsQueue, weeklyAnalyticsQueue } from './queueService.server';
|
||||
|
||||
type UserDealGroup = {
|
||||
userProfile: { user_id: string; email: string; full_name: string | null };
|
||||
deals: WatchedItemDeal[];
|
||||
};
|
||||
|
||||
interface EmailJobData {
|
||||
to: string;
|
||||
subject: string;
|
||||
@@ -25,7 +31,7 @@ export class BackgroundJobService {
|
||||
) {}
|
||||
|
||||
public async triggerAnalyticsReport(): Promise<string> {
|
||||
const reportDate = new Date().toISOString().split('T')[0]; // YYYY-MM-DD
|
||||
const reportDate = getCurrentDateISOString(); // YYYY-MM-DD
|
||||
const jobId = `manual-report-${reportDate}-${Date.now()}`;
|
||||
const job = await analyticsQueue.add('generate-daily-report', { reportDate }, { jobId });
|
||||
return job.id!;
|
||||
@@ -57,14 +63,16 @@ export class BackgroundJobService {
|
||||
const dealsListHtml = deals
|
||||
.map(
|
||||
(deal) =>
|
||||
`<li><strong>${deal.item_name}</strong> is on sale for <strong>$${(deal.best_price_in_cents / 100).toFixed(2)}</strong> at ${deal.store_name}!</li>`,
|
||||
`<li><strong>${deal.item_name}</strong> is on sale for <strong>${formatCurrency(
|
||||
deal.best_price_in_cents,
|
||||
)}</strong> at ${deal.store_name}!</li>`,
|
||||
)
|
||||
.join('');
|
||||
const html = `<p>Hi ${recipientName},</p><p>We found some great deals on items you're watching:</p><ul>${dealsListHtml}</ul>`;
|
||||
const text = `Hi ${recipientName},\n\nWe found some great deals on items you're watching. Visit the deals page on the site to learn more.`;
|
||||
|
||||
// Use a predictable Job ID to prevent duplicate email notifications for the same user on the same day.
|
||||
const today = new Date().toISOString().split('T')[0];
|
||||
const today = getCurrentDateISOString();
|
||||
const jobId = `deal-email-${userProfile.user_id}-${today}`;
|
||||
|
||||
return {
|
||||
@@ -82,15 +90,41 @@ export class BackgroundJobService {
|
||||
private _prepareInAppNotification(
|
||||
userId: string,
|
||||
dealCount: number,
|
||||
): Omit<Notification, 'notification_id' | 'is_read' | 'created_at'> {
|
||||
): Omit<Notification, 'notification_id' | 'is_read' | 'created_at' | 'updated_at'> {
|
||||
return {
|
||||
user_id: userId,
|
||||
content: `You have ${dealCount} new deal(s) on your watched items!`,
|
||||
link_url: '/dashboard/deals', // A link to the future "My Deals" page
|
||||
updated_at: new Date().toISOString(),
|
||||
};
|
||||
}
|
||||
|
||||
private async _processDealsForUser({
|
||||
userProfile,
|
||||
deals,
|
||||
}: UserDealGroup): Promise<Omit<Notification, 'notification_id' | 'is_read' | 'created_at' | 'updated_at'> | null> {
|
||||
try {
|
||||
this.logger.info(
|
||||
`[BackgroundJob] Found ${deals.length} deals for user ${userProfile.user_id}.`,
|
||||
);
|
||||
|
||||
// Prepare in-app and email notifications.
|
||||
const notification = this._prepareInAppNotification(userProfile.user_id, deals.length);
|
||||
const { jobData, jobId } = this._prepareDealEmail(userProfile, deals);
|
||||
|
||||
// Enqueue an email notification job.
|
||||
await this.emailQueue.add('send-deal-notification', jobData, { jobId });
|
||||
|
||||
// Return the notification to be collected for bulk insertion.
|
||||
return notification;
|
||||
} catch (userError) {
|
||||
this.logger.error(
|
||||
{ err: userError },
|
||||
`[BackgroundJob] Failed to process deals for user ${userProfile.user_id}`,
|
||||
);
|
||||
return null; // Return null on error for this user.
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks for new deals on watched items for all users and sends notifications.
|
||||
* This function is designed to be run periodically (e.g., daily).
|
||||
@@ -110,70 +144,47 @@ export class BackgroundJobService {
|
||||
this.logger.info(`[BackgroundJob] Found ${allDeals.length} total deals across all users.`);
|
||||
|
||||
// 2. Group deals by user in memory.
|
||||
const dealsByUser = allDeals.reduce<
|
||||
Record<
|
||||
string,
|
||||
{
|
||||
userProfile: { user_id: string; email: string; full_name: string | null };
|
||||
deals: WatchedItemDeal[];
|
||||
}
|
||||
>
|
||||
>((acc, deal) => {
|
||||
if (!acc[deal.user_id]) {
|
||||
acc[deal.user_id] = {
|
||||
const dealsByUser = new Map<string, UserDealGroup>();
|
||||
for (const deal of allDeals) {
|
||||
let userGroup = dealsByUser.get(deal.user_id);
|
||||
if (!userGroup) {
|
||||
userGroup = {
|
||||
userProfile: { user_id: deal.user_id, email: deal.email, full_name: deal.full_name },
|
||||
deals: [],
|
||||
};
|
||||
dealsByUser.set(deal.user_id, userGroup);
|
||||
}
|
||||
acc[deal.user_id].deals.push(deal);
|
||||
return acc;
|
||||
}, {});
|
||||
|
||||
const allNotifications: Omit<Notification, 'notification_id' | 'is_read' | 'created_at'>[] =
|
||||
[];
|
||||
userGroup.deals.push(deal);
|
||||
}
|
||||
|
||||
// 3. Process each user's deals in parallel.
|
||||
const userProcessingPromises = Object.values(dealsByUser).map(
|
||||
async ({ userProfile, deals }) => {
|
||||
try {
|
||||
this.logger.info(
|
||||
`[BackgroundJob] Found ${deals.length} deals for user ${userProfile.user_id}.`,
|
||||
);
|
||||
|
||||
// 4. Prepare in-app and email notifications.
|
||||
const notification = this._prepareInAppNotification(userProfile.user_id, deals.length);
|
||||
const { jobData, jobId } = this._prepareDealEmail(userProfile, deals);
|
||||
|
||||
// 5. Enqueue an email notification job.
|
||||
await this.emailQueue.add('send-deal-notification', jobData, { jobId });
|
||||
|
||||
// Return the notification to be collected for bulk insertion.
|
||||
return notification;
|
||||
} catch (userError) {
|
||||
this.logger.error(
|
||||
{ err: userError },
|
||||
`[BackgroundJob] Failed to process deals for user ${userProfile.user_id}`,
|
||||
);
|
||||
return null; // Return null on error for this user.
|
||||
}
|
||||
},
|
||||
const userProcessingPromises = Array.from(dealsByUser.values()).map((userGroup) =>
|
||||
this._processDealsForUser(userGroup),
|
||||
);
|
||||
|
||||
// Wait for all user processing to complete.
|
||||
const results = await Promise.allSettled(userProcessingPromises);
|
||||
|
||||
// 6. Collect all successfully created notifications.
|
||||
results.forEach((result) => {
|
||||
if (result.status === 'fulfilled' && result.value) {
|
||||
allNotifications.push(result.value);
|
||||
}
|
||||
});
|
||||
const successfulNotifications = results
|
||||
.filter(
|
||||
(
|
||||
result,
|
||||
): result is PromiseFulfilledResult<
|
||||
Omit<Notification, 'notification_id' | 'is_read' | 'created_at' | 'updated_at'>
|
||||
> => result.status === 'fulfilled' && !!result.value,
|
||||
)
|
||||
.map((result) => result.value);
|
||||
|
||||
// 7. Bulk insert all in-app notifications in a single query.
|
||||
if (allNotifications.length > 0) {
|
||||
await this.notificationRepo.createBulkNotifications(allNotifications, this.logger);
|
||||
if (successfulNotifications.length > 0) {
|
||||
const notificationsForDb = successfulNotifications.map((n) => ({
|
||||
...n,
|
||||
updated_at: new Date().toISOString(),
|
||||
}));
|
||||
await this.notificationRepo.createBulkNotifications(notificationsForDb, this.logger);
|
||||
this.logger.info(
|
||||
`[BackgroundJob] Successfully created ${allNotifications.length} in-app notifications.`,
|
||||
`[BackgroundJob] Successfully created ${successfulNotifications.length} in-app notifications.`,
|
||||
);
|
||||
}
|
||||
|
||||
@@ -244,7 +255,7 @@ export function startBackgroundJobs(
|
||||
(async () => {
|
||||
logger.info('[BackgroundJob] Enqueuing daily analytics report generation job.');
|
||||
try {
|
||||
const reportDate = new Date().toISOString().split('T')[0]; // YYYY-MM-DD
|
||||
const reportDate = getCurrentDateISOString(); // YYYY-MM-DD
|
||||
// We use a unique job ID to prevent duplicate jobs for the same day if the scheduler restarts.
|
||||
await analyticsQueue.add(
|
||||
'generate-daily-report',
|
||||
|
||||
@@ -6,6 +6,7 @@ import {
|
||||
UniqueConstraintError,
|
||||
ForeignKeyConstraintError,
|
||||
NotFoundError,
|
||||
ForbiddenError,
|
||||
ValidationError,
|
||||
FileUploadError,
|
||||
NotNullConstraintError,
|
||||
@@ -89,6 +90,25 @@ describe('Custom Database and Application Errors', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('ForbiddenError', () => {
|
||||
it('should create an error with a default message and status 403', () => {
|
||||
const error = new ForbiddenError();
|
||||
|
||||
expect(error).toBeInstanceOf(Error);
|
||||
expect(error).toBeInstanceOf(RepositoryError);
|
||||
expect(error).toBeInstanceOf(ForbiddenError);
|
||||
expect(error.message).toBe('Access denied.');
|
||||
expect(error.status).toBe(403);
|
||||
expect(error.name).toBe('ForbiddenError');
|
||||
});
|
||||
|
||||
it('should create an error with a custom message', () => {
|
||||
const message = 'You shall not pass.';
|
||||
const error = new ForbiddenError(message);
|
||||
expect(error.message).toBe(message);
|
||||
});
|
||||
});
|
||||
|
||||
describe('ValidationError', () => {
|
||||
it('should create an error with a default message, status 400, and validation errors array', () => {
|
||||
const validationIssues = [{ path: ['email'], message: 'Invalid email' }];
|
||||
|
||||
@@ -86,6 +86,16 @@ export class NotFoundError extends RepositoryError {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Thrown when the user does not have permission to access the resource.
|
||||
*/
|
||||
export class ForbiddenError extends RepositoryError {
|
||||
constructor(message = 'Access denied.') {
|
||||
super(message, 403); // 403 Forbidden
|
||||
this.name = 'ForbiddenError';
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Defines the structure for a single validation issue, often from a library like Zod.
|
||||
*/
|
||||
|
||||
@@ -282,6 +282,95 @@ describe('User DB Service', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('_createUser (private)', () => {
|
||||
it('should execute queries in order and return a full user profile', async () => {
|
||||
const mockUser = {
|
||||
user_id: 'private-user-id',
|
||||
email: 'private@example.com',
|
||||
};
|
||||
const mockDbProfile = {
|
||||
user_id: 'private-user-id',
|
||||
email: 'private@example.com',
|
||||
role: 'user',
|
||||
full_name: 'Private User',
|
||||
avatar_url: null,
|
||||
points: 0,
|
||||
preferences: null,
|
||||
created_at: new Date().toISOString(),
|
||||
updated_at: new Date().toISOString(),
|
||||
user_created_at: new Date().toISOString(),
|
||||
user_updated_at: new Date().toISOString(),
|
||||
};
|
||||
const expectedProfile: UserProfile = {
|
||||
user: {
|
||||
user_id: mockDbProfile.user_id,
|
||||
email: mockDbProfile.email,
|
||||
created_at: mockDbProfile.user_created_at,
|
||||
updated_at: mockDbProfile.user_updated_at,
|
||||
},
|
||||
full_name: 'Private User',
|
||||
avatar_url: null,
|
||||
role: 'user',
|
||||
points: 0,
|
||||
preferences: null,
|
||||
created_at: mockDbProfile.created_at,
|
||||
updated_at: mockDbProfile.updated_at,
|
||||
};
|
||||
|
||||
// Mock the sequence of queries on the client
|
||||
(mockPoolInstance.query as Mock)
|
||||
.mockResolvedValueOnce({ rows: [] }) // set_config
|
||||
.mockResolvedValueOnce({ rows: [mockUser] }) // INSERT user
|
||||
.mockResolvedValueOnce({ rows: [mockDbProfile] }); // SELECT profile
|
||||
|
||||
// Access private method for testing
|
||||
const result = await (userRepo as any)._createUser(
|
||||
mockPoolInstance, // Pass the mock client
|
||||
'private@example.com',
|
||||
'hashedpass',
|
||||
{ full_name: 'Private User' },
|
||||
mockLogger,
|
||||
);
|
||||
|
||||
expect(result).toEqual(expectedProfile);
|
||||
expect(mockPoolInstance.query).toHaveBeenCalledTimes(3);
|
||||
expect(mockPoolInstance.query).toHaveBeenNthCalledWith(
|
||||
1,
|
||||
"SELECT set_config('my_app.user_metadata', $1, true)",
|
||||
[JSON.stringify({ full_name: 'Private User' })],
|
||||
);
|
||||
expect(mockPoolInstance.query).toHaveBeenNthCalledWith(
|
||||
2,
|
||||
'INSERT INTO public.users (email, password_hash) VALUES ($1, $2) RETURNING user_id, email',
|
||||
['private@example.com', 'hashedpass'],
|
||||
);
|
||||
expect(mockPoolInstance.query).toHaveBeenNthCalledWith(
|
||||
3,
|
||||
expect.stringContaining('FROM public.users u'),
|
||||
['private-user-id'],
|
||||
);
|
||||
});
|
||||
|
||||
it('should throw an error if profile is not found after user creation', async () => {
|
||||
const mockUser = { user_id: 'no-profile-user', email: 'no-profile@example.com' };
|
||||
|
||||
(mockPoolInstance.query as Mock)
|
||||
.mockResolvedValueOnce({ rows: [] }) // set_config
|
||||
.mockResolvedValueOnce({ rows: [mockUser] }) // INSERT user
|
||||
.mockResolvedValueOnce({ rows: [] }); // SELECT profile returns nothing
|
||||
|
||||
await expect(
|
||||
(userRepo as any)._createUser(
|
||||
mockPoolInstance,
|
||||
'no-profile@example.com',
|
||||
'pass',
|
||||
{},
|
||||
mockLogger,
|
||||
),
|
||||
).rejects.toThrow('Failed to create or retrieve user profile after registration.');
|
||||
});
|
||||
});
|
||||
|
||||
describe('findUserWithProfileByEmail', () => {
|
||||
it('should query for a user and their profile by email', async () => {
|
||||
const mockDbResult: any = {
|
||||
|
||||
@@ -61,6 +61,64 @@ export class UserRepository {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* The internal logic for creating a user. This method assumes it is being run
|
||||
* within a database transaction and operates on a single PoolClient.
|
||||
*/
|
||||
private async _createUser(
|
||||
dbClient: PoolClient,
|
||||
email: string,
|
||||
passwordHash: string | null,
|
||||
profileData: { full_name?: string; avatar_url?: string },
|
||||
logger: Logger,
|
||||
): Promise<UserProfile> {
|
||||
logger.debug(`[DB _createUser] Starting user creation for email: ${email}`);
|
||||
|
||||
await dbClient.query("SELECT set_config('my_app.user_metadata', $1, true)", [
|
||||
JSON.stringify(profileData ?? {}),
|
||||
]);
|
||||
logger.debug(`[DB _createUser] Session metadata set for ${email}.`);
|
||||
|
||||
const userInsertRes = await dbClient.query<{ user_id: string; email: string }>(
|
||||
'INSERT INTO public.users (email, password_hash) VALUES ($1, $2) RETURNING user_id, email',
|
||||
[email, passwordHash],
|
||||
);
|
||||
const newUserId = userInsertRes.rows[0].user_id;
|
||||
logger.debug(`[DB _createUser] Inserted into users table. New user ID: ${newUserId}`);
|
||||
|
||||
const profileQuery = `
|
||||
SELECT u.user_id, u.email, u.created_at as user_created_at, u.updated_at as user_updated_at, p.full_name, p.avatar_url, p.role, p.points, p.preferences, p.created_at, p.updated_at
|
||||
FROM public.users u
|
||||
JOIN public.profiles p ON u.user_id = p.user_id
|
||||
WHERE u.user_id = $1;
|
||||
`;
|
||||
const finalProfileRes = await dbClient.query(profileQuery, [newUserId]);
|
||||
const flatProfile = finalProfileRes.rows[0];
|
||||
|
||||
if (!flatProfile) {
|
||||
throw new Error('Failed to create or retrieve user profile after registration.');
|
||||
}
|
||||
|
||||
const fullUserProfile: UserProfile = {
|
||||
user: {
|
||||
user_id: flatProfile.user_id,
|
||||
email: flatProfile.email,
|
||||
created_at: flatProfile.user_created_at,
|
||||
updated_at: flatProfile.user_updated_at,
|
||||
},
|
||||
full_name: flatProfile.full_name,
|
||||
avatar_url: flatProfile.avatar_url,
|
||||
role: flatProfile.role,
|
||||
points: flatProfile.points,
|
||||
preferences: flatProfile.preferences,
|
||||
created_at: flatProfile.created_at,
|
||||
updated_at: flatProfile.updated_at,
|
||||
};
|
||||
|
||||
logger.debug({ user: fullUserProfile }, `[DB _createUser] Fetched full profile for new user:`);
|
||||
return fullUserProfile;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new user in the public.users table.
|
||||
* This method expects to be run within a transaction, so it requires a PoolClient.
|
||||
@@ -74,60 +132,18 @@ export class UserRepository {
|
||||
passwordHash: string | null,
|
||||
profileData: { full_name?: string; avatar_url?: string },
|
||||
logger: Logger,
|
||||
// Allow passing a transactional client
|
||||
client: Pool | PoolClient = this.db,
|
||||
): Promise<UserProfile> {
|
||||
// This method is now a wrapper that ensures the core logic runs within a transaction.
|
||||
try {
|
||||
logger.debug(`[DB createUser] Starting user creation for email: ${email}`);
|
||||
|
||||
// Use 'set_config' to safely pass parameters to a configuration variable.
|
||||
await client.query("SELECT set_config('my_app.user_metadata', $1, true)", [
|
||||
JSON.stringify(profileData),
|
||||
]);
|
||||
logger.debug(`[DB createUser] Session metadata set for ${email}.`);
|
||||
|
||||
// Insert the new user into the 'users' table. This will fire the trigger.
|
||||
const userInsertRes = await client.query<{ user_id: string }>(
|
||||
'INSERT INTO public.users (email, password_hash) VALUES ($1, $2) RETURNING user_id, email',
|
||||
[email, passwordHash],
|
||||
);
|
||||
const newUserId = userInsertRes.rows[0].user_id;
|
||||
logger.debug(`[DB createUser] Inserted into users table. New user ID: ${newUserId}`);
|
||||
|
||||
// After the trigger has run, fetch the complete profile data.
|
||||
const profileQuery = `
|
||||
SELECT u.user_id, u.email, u.created_at as user_created_at, u.updated_at as user_updated_at, p.full_name, p.avatar_url, p.role, p.points, p.preferences, p.created_at, p.updated_at
|
||||
FROM public.users u
|
||||
JOIN public.profiles p ON u.user_id = p.user_id
|
||||
WHERE u.user_id = $1;
|
||||
`;
|
||||
const finalProfileRes = await client.query(profileQuery, [newUserId]);
|
||||
const flatProfile = finalProfileRes.rows[0];
|
||||
|
||||
if (!flatProfile) {
|
||||
throw new Error('Failed to create or retrieve user profile after registration.');
|
||||
// If this.db has a 'connect' method, it's a Pool. We must start a transaction.
|
||||
if ('connect' in this.db) {
|
||||
return await withTransaction(async (client) => {
|
||||
return this._createUser(client, email, passwordHash, profileData, logger);
|
||||
});
|
||||
} else {
|
||||
// If this.db is already a PoolClient, we're inside a transaction. Use it directly.
|
||||
return await this._createUser(this.db as PoolClient, email, passwordHash, profileData, logger);
|
||||
}
|
||||
|
||||
// Construct the nested UserProfile object to match the type definition.
|
||||
const fullUserProfile: UserProfile = {
|
||||
// user_id is now correctly part of the nested user object, not at the top level.
|
||||
user: {
|
||||
user_id: flatProfile.user_id,
|
||||
email: flatProfile.email,
|
||||
created_at: flatProfile.user_created_at,
|
||||
updated_at: flatProfile.user_updated_at,
|
||||
},
|
||||
full_name: flatProfile.full_name,
|
||||
avatar_url: flatProfile.avatar_url,
|
||||
role: flatProfile.role,
|
||||
points: flatProfile.points,
|
||||
preferences: flatProfile.preferences,
|
||||
created_at: flatProfile.created_at,
|
||||
updated_at: flatProfile.updated_at,
|
||||
};
|
||||
|
||||
logger.debug({ user: fullUserProfile }, `[DB createUser] Fetched full profile for new user:`);
|
||||
return fullUserProfile;
|
||||
} catch (error) {
|
||||
handleDbError(error, logger, 'Error during createUser', { email }, {
|
||||
uniqueMessage: 'A user with this email address already exists.',
|
||||
@@ -136,6 +152,7 @@ export class UserRepository {
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Finds a user by their email and joins their profile data.
|
||||
* This is used by the LocalStrategy to get all necessary data for authentication and session creation in one query.
|
||||
|
||||
@@ -5,7 +5,6 @@ import type { Logger } from 'pino';
|
||||
import type { FlyerInsert, FlyerItemInsert } from '../types';
|
||||
import type { AiProcessorResult } from './flyerAiProcessor.server'; // Keep this import for AiProcessorResult
|
||||
import { AiFlyerDataSchema } from '../types/ai'; // Import consolidated schema
|
||||
import { generateFlyerIcon } from '../utils/imageProcessor';
|
||||
import { TransformationError } from './processingErrors';
|
||||
import { parsePriceToCents } from '../utils/priceParser';
|
||||
|
||||
@@ -48,35 +47,21 @@ export class FlyerDataTransformer {
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates a 64x64 icon for the flyer's first page.
|
||||
* @param firstImage The path to the first image of the flyer.
|
||||
* @param logger The logger instance.
|
||||
* @returns The filename of the generated icon.
|
||||
*/
|
||||
private async _generateIcon(firstImage: string, logger: Logger): Promise<string> {
|
||||
const iconFileName = await generateFlyerIcon(
|
||||
firstImage,
|
||||
path.join(path.dirname(firstImage), 'icons'),
|
||||
logger,
|
||||
);
|
||||
return iconFileName;
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs the full public URLs for the flyer image and its icon.
|
||||
* @param firstImage The path to the first image of the flyer.
|
||||
* @param imageFileName The filename of the main processed image.
|
||||
* @param iconFileName The filename of the generated icon.
|
||||
* @param baseUrl The base URL from the job payload.
|
||||
* @param logger The logger instance.
|
||||
* @returns An object containing the full image_url and icon_url.
|
||||
*/
|
||||
private _buildUrls(
|
||||
firstImage: string,
|
||||
imageFileName: string,
|
||||
iconFileName: string,
|
||||
baseUrl: string | undefined,
|
||||
logger: Logger,
|
||||
): { imageUrl: string; iconUrl: string } {
|
||||
logger.debug({ imageFileName, iconFileName, baseUrl }, 'Building URLs');
|
||||
let finalBaseUrl = baseUrl;
|
||||
if (!finalBaseUrl) {
|
||||
const port = process.env.PORT || 3000;
|
||||
@@ -84,8 +69,9 @@ export class FlyerDataTransformer {
|
||||
logger.warn(`Base URL not provided in job data. Falling back to default local URL: ${finalBaseUrl}`);
|
||||
}
|
||||
finalBaseUrl = finalBaseUrl.endsWith('/') ? finalBaseUrl.slice(0, -1) : finalBaseUrl;
|
||||
const imageUrl = `${finalBaseUrl}/flyer-images/${path.basename(firstImage)}`;
|
||||
const imageUrl = `${finalBaseUrl}/flyer-images/${imageFileName}`;
|
||||
const iconUrl = `${finalBaseUrl}/flyer-images/icons/${iconFileName}`;
|
||||
logger.debug({ imageUrl, iconUrl }, 'Constructed URLs');
|
||||
return { imageUrl, iconUrl };
|
||||
}
|
||||
|
||||
@@ -101,8 +87,9 @@ export class FlyerDataTransformer {
|
||||
*/
|
||||
async transform(
|
||||
aiResult: AiProcessorResult,
|
||||
imagePaths: { path: string; mimetype: string }[],
|
||||
originalFileName: string,
|
||||
imageFileName: string,
|
||||
iconFileName: string,
|
||||
checksum: string,
|
||||
userId: string | undefined,
|
||||
logger: Logger,
|
||||
@@ -113,9 +100,7 @@ export class FlyerDataTransformer {
|
||||
try {
|
||||
const { data: extractedData, needsReview } = aiResult;
|
||||
|
||||
const firstImage = imagePaths[0].path;
|
||||
const iconFileName = await this._generateIcon(firstImage, logger);
|
||||
const { imageUrl, iconUrl } = this._buildUrls(firstImage, iconFileName, baseUrl, logger);
|
||||
const { imageUrl, iconUrl } = this._buildUrls(imageFileName, iconFileName, baseUrl, logger);
|
||||
|
||||
const itemsForDb: FlyerItemInsert[] = extractedData.items.map((item) => this._normalizeItem(item));
|
||||
|
||||
|
||||
@@ -36,13 +36,20 @@ import {
|
||||
PdfConversionError,
|
||||
UnsupportedFileTypeError,
|
||||
TransformationError,
|
||||
DatabaseError,
|
||||
} from './processingErrors';
|
||||
import { NotFoundError } from './db/errors.db';
|
||||
import { FlyerFileHandler } from './flyerFileHandler.server';
|
||||
import { FlyerAiProcessor } from './flyerAiProcessor.server';
|
||||
import type { IFileSystem, ICommandExecutor } from './flyerFileHandler.server';
|
||||
import { generateFlyerIcon } from '../utils/imageProcessor';
|
||||
import type { AIService } from './aiService.server';
|
||||
|
||||
// Mock image processor functions
|
||||
vi.mock('../utils/imageProcessor', () => ({
|
||||
generateFlyerIcon: vi.fn(),
|
||||
}));
|
||||
|
||||
// Mock dependencies
|
||||
vi.mock('./aiService.server', () => ({
|
||||
aiService: {
|
||||
@@ -171,6 +178,9 @@ describe('FlyerProcessingService', () => {
|
||||
// FIX: Provide a default mock for getAllMasterItems to prevent a TypeError on `.length`.
|
||||
vi.mocked(mockedDb.personalizationRepo.getAllMasterItems).mockResolvedValue([]);
|
||||
});
|
||||
beforeEach(() => {
|
||||
vi.mocked(generateFlyerIcon).mockResolvedValue('icon-flyer.webp');
|
||||
});
|
||||
|
||||
const createMockJob = (data: Partial<FlyerJobData>): Job<FlyerJobData> => {
|
||||
return {
|
||||
@@ -202,19 +212,54 @@ describe('FlyerProcessingService', () => {
|
||||
it('should process an image file successfully and enqueue a cleanup job', async () => {
|
||||
const job = createMockJob({ filePath: '/tmp/flyer.jpg', originalFileName: 'flyer.jpg' });
|
||||
|
||||
// Arrange: Mock dependencies to simulate a successful run
|
||||
mockFileHandler.prepareImageInputs.mockResolvedValue({
|
||||
imagePaths: [{ path: '/tmp/flyer-processed.jpeg', mimetype: 'image/jpeg' }],
|
||||
createdImagePaths: ['/tmp/flyer-processed.jpeg'],
|
||||
});
|
||||
vi.mocked(generateFlyerIcon).mockResolvedValue('icon-flyer.webp');
|
||||
|
||||
const result = await service.processJob(job);
|
||||
|
||||
expect(result).toEqual({ flyerId: 1 });
|
||||
|
||||
// 1. File handler was called
|
||||
expect(mockFileHandler.prepareImageInputs).toHaveBeenCalledWith(job.data.filePath, job, expect.any(Object));
|
||||
|
||||
// 2. AI processor was called
|
||||
expect(mockAiProcessor.extractAndValidateData).toHaveBeenCalledTimes(1);
|
||||
// Verify that the transaction function was called.
|
||||
|
||||
// 3. Icon was generated from the processed image
|
||||
expect(generateFlyerIcon).toHaveBeenCalledWith('/tmp/flyer-processed.jpeg', '/tmp/icons', expect.any(Object));
|
||||
|
||||
// 4. Transformer was called with the correct filenames
|
||||
expect(FlyerDataTransformer.prototype.transform).toHaveBeenCalledWith(
|
||||
expect.any(Object), // aiResult
|
||||
'flyer.jpg', // originalFileName
|
||||
'flyer-processed.jpeg', // imageFileName
|
||||
'icon-flyer.webp', // iconFileName
|
||||
'checksum-123', // checksum
|
||||
undefined, // userId
|
||||
expect.any(Object), // logger
|
||||
'http://localhost:3000', // baseUrl
|
||||
);
|
||||
|
||||
// 5. DB transaction was initiated
|
||||
expect(mockedDb.withTransaction).toHaveBeenCalledTimes(1);
|
||||
// Verify that the functions inside the transaction were called.
|
||||
expect(createFlyerAndItems).toHaveBeenCalledTimes(1);
|
||||
expect(mocks.mockAdminLogActivity).toHaveBeenCalledTimes(1);
|
||||
|
||||
// 6. Cleanup job was enqueued with all generated files
|
||||
expect(mockCleanupQueue.add).toHaveBeenCalledWith(
|
||||
'cleanup-flyer-files',
|
||||
{ flyerId: 1, paths: ['/tmp/flyer.jpg'] },
|
||||
{
|
||||
flyerId: 1,
|
||||
paths: [
|
||||
'/tmp/flyer.jpg', // original job path
|
||||
'/tmp/flyer-processed.jpeg', // from prepareImageInputs
|
||||
'/tmp/icons/icon-flyer.webp', // from generateFlyerIcon
|
||||
],
|
||||
},
|
||||
expect.any(Object),
|
||||
);
|
||||
});
|
||||
@@ -225,9 +270,13 @@ describe('FlyerProcessingService', () => {
|
||||
// Mock the file handler to return multiple created paths
|
||||
const createdPaths = ['/tmp/flyer-1.jpg', '/tmp/flyer-2.jpg'];
|
||||
mockFileHandler.prepareImageInputs.mockResolvedValue({
|
||||
imagePaths: createdPaths.map(p => ({ path: p, mimetype: 'image/jpeg' })),
|
||||
imagePaths: [
|
||||
{ path: '/tmp/flyer-1.jpg', mimetype: 'image/jpeg' },
|
||||
{ path: '/tmp/flyer-2.jpg', mimetype: 'image/jpeg' },
|
||||
],
|
||||
createdImagePaths: createdPaths,
|
||||
});
|
||||
vi.mocked(generateFlyerIcon).mockResolvedValue('icon-flyer-1.webp');
|
||||
|
||||
await service.processJob(job);
|
||||
|
||||
@@ -236,15 +285,18 @@ describe('FlyerProcessingService', () => {
|
||||
expect(mockFileHandler.prepareImageInputs).toHaveBeenCalledWith('/tmp/flyer.pdf', job, expect.any(Object));
|
||||
expect(mockAiProcessor.extractAndValidateData).toHaveBeenCalledTimes(1);
|
||||
expect(createFlyerAndItems).toHaveBeenCalledTimes(1);
|
||||
// Verify cleanup job includes original PDF and both generated images
|
||||
// Verify icon generation was called for the first page
|
||||
expect(generateFlyerIcon).toHaveBeenCalledWith('/tmp/flyer-1.jpg', '/tmp/icons', expect.any(Object));
|
||||
// Verify cleanup job includes original PDF and all generated/processed images
|
||||
expect(mockCleanupQueue.add).toHaveBeenCalledWith(
|
||||
'cleanup-flyer-files',
|
||||
{
|
||||
flyerId: 1,
|
||||
paths: [
|
||||
'/tmp/flyer.pdf',
|
||||
'/tmp/flyer-1.jpg',
|
||||
'/tmp/flyer-2.jpg',
|
||||
'/tmp/flyer.pdf', // original job path
|
||||
'/tmp/flyer-1.jpg', // from prepareImageInputs
|
||||
'/tmp/flyer-2.jpg', // from prepareImageInputs
|
||||
'/tmp/icons/icon-flyer-1.webp', // from generateFlyerIcon
|
||||
],
|
||||
},
|
||||
expect.any(Object),
|
||||
@@ -377,6 +429,7 @@ describe('FlyerProcessingService', () => {
|
||||
imagePaths: [{ path: convertedPath, mimetype: 'image/png' }],
|
||||
createdImagePaths: [convertedPath],
|
||||
});
|
||||
vi.mocked(generateFlyerIcon).mockResolvedValue('icon-flyer-converted.webp');
|
||||
|
||||
await service.processJob(job);
|
||||
|
||||
@@ -384,9 +437,18 @@ describe('FlyerProcessingService', () => {
|
||||
expect(mockedDb.withTransaction).toHaveBeenCalledTimes(1);
|
||||
expect(mockFileHandler.prepareImageInputs).toHaveBeenCalledWith('/tmp/flyer.gif', job, expect.any(Object));
|
||||
expect(mockAiProcessor.extractAndValidateData).toHaveBeenCalledTimes(1);
|
||||
// Verify icon generation was called for the converted image
|
||||
expect(generateFlyerIcon).toHaveBeenCalledWith(convertedPath, '/tmp/icons', expect.any(Object));
|
||||
expect(mockCleanupQueue.add).toHaveBeenCalledWith(
|
||||
'cleanup-flyer-files',
|
||||
{ flyerId: 1, paths: ['/tmp/flyer.gif', convertedPath] },
|
||||
{
|
||||
flyerId: 1,
|
||||
paths: [
|
||||
'/tmp/flyer.gif', // original job path
|
||||
convertedPath, // from prepareImageInputs
|
||||
'/tmp/icons/icon-flyer-converted.webp', // from generateFlyerIcon
|
||||
],
|
||||
},
|
||||
expect.any(Object),
|
||||
);
|
||||
});
|
||||
@@ -400,8 +462,8 @@ describe('FlyerProcessingService', () => {
|
||||
// This is more realistic than mocking the inner function `createFlyerAndItems`.
|
||||
vi.mocked(mockedDb.withTransaction).mockRejectedValue(dbError);
|
||||
|
||||
// The service wraps the generic DB error in a DatabaseError, but _reportErrorAndThrow re-throws the original.
|
||||
await expect(service.processJob(job)).rejects.toThrow(dbError);
|
||||
// The service wraps the generic DB error in a DatabaseError.
|
||||
await expect(service.processJob(job)).rejects.toThrow(DatabaseError);
|
||||
|
||||
// The final progress update should reflect the structured DatabaseError.
|
||||
expect(job.updateProgress).toHaveBeenLastCalledWith({
|
||||
@@ -443,17 +505,14 @@ describe('FlyerProcessingService', () => {
|
||||
it('should delegate to _reportErrorAndThrow if icon generation fails', async () => {
|
||||
const job = createMockJob({});
|
||||
const { logger } = await import('./logger.server');
|
||||
const transformationError = new TransformationError('Icon generation failed.');
|
||||
// The `transform` method calls `generateFlyerIcon`. In `beforeEach`, `transform` is mocked
|
||||
// to always succeed. For this test, we override that mock to simulate a failure
|
||||
// bubbling up from the icon generation step.
|
||||
vi.spyOn(FlyerDataTransformer.prototype, 'transform').mockRejectedValue(transformationError);
|
||||
const iconGenError = new Error('Icon generation failed.');
|
||||
vi.mocked(generateFlyerIcon).mockRejectedValue(iconGenError);
|
||||
|
||||
const reportErrorSpy = vi.spyOn(service as any, '_reportErrorAndThrow');
|
||||
|
||||
await expect(service.processJob(job)).rejects.toThrow('Icon generation failed.');
|
||||
|
||||
expect(reportErrorSpy).toHaveBeenCalledWith(transformationError, job, expect.any(Object), expect.any(Array));
|
||||
expect(reportErrorSpy).toHaveBeenCalledWith(iconGenError, job, expect.any(Object), expect.any(Array));
|
||||
expect(mockCleanupQueue.add).not.toHaveBeenCalled();
|
||||
expect(logger.warn).toHaveBeenCalledWith(
|
||||
'Job failed. Temporary files will NOT be cleaned up to allow for manual inspection.',
|
||||
@@ -632,5 +691,30 @@ describe('FlyerProcessingService', () => {
|
||||
'Job received no paths and could not derive any from the database. Skipping.',
|
||||
);
|
||||
});
|
||||
|
||||
it('should derive paths from DB and delete files if job paths are empty', async () => {
|
||||
const job = createMockCleanupJob({ flyerId: 1, paths: [] }); // Empty paths
|
||||
const mockFlyer = createMockFlyer({
|
||||
image_url: 'http://localhost:3000/flyer-images/flyer-abc.jpg',
|
||||
icon_url: 'http://localhost:3000/flyer-images/icons/icon-flyer-abc.webp',
|
||||
});
|
||||
// Mock DB call to return a flyer
|
||||
vi.mocked(mockedDb.flyerRepo.getFlyerById).mockResolvedValue(mockFlyer);
|
||||
mocks.unlink.mockResolvedValue(undefined);
|
||||
|
||||
// Mock process.env.STORAGE_PATH
|
||||
vi.stubEnv('STORAGE_PATH', '/var/www/app/flyer-images');
|
||||
|
||||
const result = await service.processCleanupJob(job);
|
||||
|
||||
expect(result).toEqual({ status: 'success', deletedCount: 2 });
|
||||
expect(mocks.unlink).toHaveBeenCalledTimes(2);
|
||||
expect(mocks.unlink).toHaveBeenCalledWith('/var/www/app/flyer-images/flyer-abc.jpg');
|
||||
expect(mocks.unlink).toHaveBeenCalledWith('/var/www/app/flyer-images/icons/icon-flyer-abc.webp');
|
||||
const { logger } = await import('./logger.server');
|
||||
expect(logger.warn).toHaveBeenCalledWith(
|
||||
'Cleanup job for flyer 1 received no paths. Attempting to derive paths from DB.',
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
// src/services/flyerProcessingService.server.ts
|
||||
import type { Job, Queue } from 'bullmq';
|
||||
import { UnrecoverableError } from 'bullmq';
|
||||
import { UnrecoverableError, type Job, type Queue } from 'bullmq';
|
||||
import path from 'path';
|
||||
import type { Logger } from 'pino';
|
||||
import type { FlyerFileHandler, IFileSystem, ICommandExecutor } from './flyerFileHandler.server';
|
||||
@@ -18,7 +17,8 @@ import {
|
||||
} from './processingErrors';
|
||||
import { NotFoundError } from './db/errors.db';
|
||||
import { createFlyerAndItems } from './db/flyer.db';
|
||||
import { logger as globalLogger } from './logger.server';
|
||||
import { logger as globalLogger } from './logger.server'; // This was a duplicate, fixed.
|
||||
import { generateFlyerIcon } from '../utils/imageProcessor';
|
||||
|
||||
// Define ProcessingStage locally as it's not exported from the types file.
|
||||
export type ProcessingStage = {
|
||||
@@ -92,10 +92,22 @@ export class FlyerProcessingService {
|
||||
stages[2].status = 'in-progress';
|
||||
await job.updateProgress({ stages });
|
||||
|
||||
// The fileHandler has already prepared the primary image (e.g., by stripping EXIF data).
|
||||
// We now generate an icon from it and prepare the filenames for the transformer.
|
||||
const primaryImagePath = imagePaths[0].path;
|
||||
const imageFileName = path.basename(primaryImagePath);
|
||||
const iconsDir = path.join(path.dirname(primaryImagePath), 'icons');
|
||||
const iconFileName = await generateFlyerIcon(primaryImagePath, iconsDir, logger);
|
||||
|
||||
// Add the newly generated icon to the list of files to be cleaned up.
|
||||
// The main processed image path is already in `allFilePaths` via `createdImagePaths`.
|
||||
allFilePaths.push(path.join(iconsDir, iconFileName));
|
||||
|
||||
const { flyerData, itemsForDb } = await this.transformer.transform(
|
||||
aiResult,
|
||||
imagePaths,
|
||||
job.data.originalFileName,
|
||||
imageFileName,
|
||||
iconFileName,
|
||||
job.data.checksum,
|
||||
job.data.userId,
|
||||
logger,
|
||||
|
||||
51
src/tests/e2e/admin-authorization.e2e.test.ts
Normal file
51
src/tests/e2e/admin-authorization.e2e.test.ts
Normal file
@@ -0,0 +1,51 @@
|
||||
// src/tests/e2e/admin-authorization.e2e.test.ts
|
||||
import { describe, it, expect, beforeAll, afterAll } from 'vitest';
|
||||
import * as apiClient from '../../services/apiClient';
|
||||
import { cleanupDb } from '../utils/cleanup';
|
||||
import { createAndLoginUser } from '../utils/testHelpers';
|
||||
import type { UserProfile } from '../../types';
|
||||
|
||||
/**
|
||||
* @vitest-environment node
|
||||
*/
|
||||
describe('Admin Route Authorization', () => {
|
||||
let regularUser: UserProfile;
|
||||
let regularUserAuthToken: string;
|
||||
|
||||
beforeAll(async () => {
|
||||
// Create a standard user for testing authorization
|
||||
const { user, token } = await createAndLoginUser({
|
||||
email: `e2e-authz-user-${Date.now()}@example.com`,
|
||||
fullName: 'E2E AuthZ User',
|
||||
});
|
||||
regularUser = user;
|
||||
regularUserAuthToken = token;
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
// Cleanup the created user
|
||||
if (regularUser?.user.user_id) {
|
||||
await cleanupDb({ userIds: [regularUser.user.user_id] });
|
||||
}
|
||||
});
|
||||
|
||||
// Define a list of admin-only endpoints to test
|
||||
const adminEndpoints = [
|
||||
{ method: 'GET', path: '/admin/stats', action: (token: string) => apiClient.getApplicationStats(token) },
|
||||
{ method: 'GET', path: '/admin/users', action: (token: string) => apiClient.authedGet('/admin/users', { tokenOverride: token }) },
|
||||
{ method: 'GET', path: '/admin/corrections', action: (token: string) => apiClient.getSuggestedCorrections(token) },
|
||||
{ method: 'POST', path: '/admin/corrections/1/approve', action: (token: string) => apiClient.approveCorrection(1, token) },
|
||||
{ method: 'POST', path: '/admin/trigger/daily-deal-check', action: (token: string) => apiClient.authedPostEmpty('/admin/trigger/daily-deal-check', { tokenOverride: token }) },
|
||||
{ method: 'GET', path: '/admin/queues/status', action: (token: string) => apiClient.authedGet('/admin/queues/status', { tokenOverride: token }) },
|
||||
];
|
||||
|
||||
it.each(adminEndpoints)('should return 403 Forbidden for a regular user trying to access $method $path', async ({ action }) => {
|
||||
// Act: Attempt to access the admin endpoint with the regular user's token
|
||||
const response = await action(regularUserAuthToken);
|
||||
|
||||
// Assert: The request should be forbidden
|
||||
expect(response.status).toBe(403);
|
||||
const errorData = await response.json();
|
||||
expect(errorData.message).toBe('Forbidden: Administrator access required.');
|
||||
});
|
||||
});
|
||||
@@ -1,15 +1,14 @@
|
||||
// src/tests/e2e/admin-dashboard.e2e.test.ts
|
||||
import { describe, it, expect, afterAll } from 'vitest';
|
||||
import supertest from 'supertest';
|
||||
import app from '../../../server';
|
||||
import * as apiClient from '../../services/apiClient';
|
||||
import { getPool } from '../../services/db/connection.db';
|
||||
import { cleanupDb } from '../utils/cleanup';
|
||||
import { poll } from '../utils/poll';
|
||||
|
||||
/**
|
||||
* @vitest-environment node
|
||||
*/
|
||||
|
||||
const request = supertest(app);
|
||||
|
||||
describe('E2E Admin Dashboard Flow', () => {
|
||||
// Use a unique email for every run to avoid collisions
|
||||
const uniqueId = Date.now();
|
||||
@@ -21,25 +20,18 @@ describe('E2E Admin Dashboard Flow', () => {
|
||||
|
||||
afterAll(async () => {
|
||||
// Safety cleanup: Ensure the user is deleted from the DB if the test fails mid-way.
|
||||
if (adminUserId) {
|
||||
try {
|
||||
await getPool().query('DELETE FROM public.users WHERE user_id = $1', [adminUserId]);
|
||||
} catch (err) {
|
||||
console.error('Error cleaning up E2E admin user:', err);
|
||||
}
|
||||
}
|
||||
await cleanupDb({
|
||||
userIds: [adminUserId],
|
||||
});
|
||||
});
|
||||
|
||||
it('should allow an admin to log in and access dashboard features', async () => {
|
||||
// 1. Register a new user (initially a regular user)
|
||||
const registerResponse = await request.post('/api/auth/register').send({
|
||||
email: adminEmail,
|
||||
password: adminPassword,
|
||||
full_name: 'E2E Admin User',
|
||||
});
|
||||
const registerResponse = await apiClient.registerUser(adminEmail, adminPassword, 'E2E Admin User');
|
||||
|
||||
expect(registerResponse.status).toBe(201);
|
||||
const registeredUser = registerResponse.body.userprofile.user;
|
||||
const registerData = await registerResponse.json();
|
||||
const registeredUser = registerData.userprofile.user;
|
||||
adminUserId = registeredUser.user_id;
|
||||
expect(adminUserId).toBeDefined();
|
||||
|
||||
@@ -50,46 +42,55 @@ describe('E2E Admin Dashboard Flow', () => {
|
||||
]);
|
||||
|
||||
// 3. Login to get the access token (now with admin privileges)
|
||||
const loginResponse = await request.post('/api/auth/login').send({
|
||||
email: adminEmail,
|
||||
password: adminPassword,
|
||||
});
|
||||
// We poll because the direct DB write above runs in a separate transaction
|
||||
// from the login API call. Due to PostgreSQL's `Read Committed` transaction
|
||||
// isolation, the API might read the user's role before the test's update
|
||||
// transaction is fully committed and visible. Polling makes the test resilient to this race condition.
|
||||
const { response: loginResponse, data: loginData } = await poll(
|
||||
async () => {
|
||||
const response = await apiClient.loginUser(adminEmail, adminPassword, false);
|
||||
// Clone to read body without consuming the original response stream
|
||||
const data = response.ok ? await response.clone().json() : {};
|
||||
return { response, data };
|
||||
},
|
||||
(result) => result.response.ok && result.data?.userprofile?.role === 'admin',
|
||||
{ timeout: 10000, interval: 1000, description: 'user login with admin role' },
|
||||
);
|
||||
|
||||
expect(loginResponse.status).toBe(200);
|
||||
authToken = loginResponse.body.token;
|
||||
authToken = loginData.token;
|
||||
expect(authToken).toBeDefined();
|
||||
// Verify the role returned in the login response is now 'admin'
|
||||
expect(loginResponse.body.userprofile.role).toBe('admin');
|
||||
expect(loginData.userprofile.role).toBe('admin');
|
||||
|
||||
// 4. Fetch System Stats (Protected Admin Route)
|
||||
const statsResponse = await request
|
||||
.get('/api/admin/stats')
|
||||
.set('Authorization', `Bearer ${authToken}`);
|
||||
const statsResponse = await apiClient.getApplicationStats(authToken);
|
||||
|
||||
expect(statsResponse.status).toBe(200);
|
||||
expect(statsResponse.body).toHaveProperty('userCount');
|
||||
expect(statsResponse.body).toHaveProperty('flyerCount');
|
||||
const statsData = await statsResponse.json();
|
||||
expect(statsData).toHaveProperty('userCount');
|
||||
expect(statsData).toHaveProperty('flyerCount');
|
||||
|
||||
// 5. Fetch User List (Protected Admin Route)
|
||||
const usersResponse = await request
|
||||
.get('/api/admin/users')
|
||||
.set('Authorization', `Bearer ${authToken}`);
|
||||
const usersResponse = await apiClient.authedGet('/admin/users', { tokenOverride: authToken });
|
||||
|
||||
expect(usersResponse.status).toBe(200);
|
||||
expect(Array.isArray(usersResponse.body)).toBe(true);
|
||||
const usersData = await usersResponse.json();
|
||||
expect(Array.isArray(usersData)).toBe(true);
|
||||
// The list should contain the admin user we just created
|
||||
const self = usersResponse.body.find((u: any) => u.user_id === adminUserId);
|
||||
const self = usersData.find((u: any) => u.user_id === adminUserId);
|
||||
expect(self).toBeDefined();
|
||||
|
||||
// 6. Check Queue Status (Protected Admin Route)
|
||||
const queueResponse = await request
|
||||
.get('/api/admin/queues/status')
|
||||
.set('Authorization', `Bearer ${authToken}`);
|
||||
const queueResponse = await apiClient.authedGet('/admin/queues/status', {
|
||||
tokenOverride: authToken,
|
||||
});
|
||||
|
||||
expect(queueResponse.status).toBe(200);
|
||||
expect(Array.isArray(queueResponse.body)).toBe(true);
|
||||
const queueData = await queueResponse.json();
|
||||
expect(Array.isArray(queueData)).toBe(true);
|
||||
// Verify that the 'flyer-processing' queue is present in the status report
|
||||
const flyerQueue = queueResponse.body.find((q: any) => q.name === 'flyer-processing');
|
||||
const flyerQueue = queueData.find((q: any) => q.name === 'flyer-processing');
|
||||
expect(flyerQueue).toBeDefined();
|
||||
expect(flyerQueue.counts).toBeDefined();
|
||||
});
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
import { describe, it, expect, afterAll, beforeAll } from 'vitest';
|
||||
import * as apiClient from '../../services/apiClient';
|
||||
import { cleanupDb } from '../utils/cleanup';
|
||||
import { poll } from '../utils/poll';
|
||||
import { createAndLoginUser, TEST_PASSWORD } from '../utils/testHelpers';
|
||||
import type { UserProfile } from '../../types';
|
||||
|
||||
@@ -11,15 +12,17 @@ import type { UserProfile } from '../../types';
|
||||
|
||||
describe('Authentication E2E Flow', () => {
|
||||
let testUser: UserProfile;
|
||||
let testUserAuthToken: string;
|
||||
const createdUserIds: string[] = [];
|
||||
|
||||
beforeAll(async () => {
|
||||
// Create a user that can be used for login-related tests in this suite.
|
||||
try {
|
||||
const { user } = await createAndLoginUser({
|
||||
const { user, token } = await createAndLoginUser({
|
||||
email: `e2e-login-user-${Date.now()}@example.com`,
|
||||
fullName: 'E2E Login User',
|
||||
});
|
||||
testUserAuthToken = token;
|
||||
testUser = user;
|
||||
createdUserIds.push(user.user.user_id);
|
||||
} catch (error) {
|
||||
@@ -118,12 +121,8 @@ describe('Authentication E2E Flow', () => {
|
||||
});
|
||||
|
||||
it('should be able to access a protected route after logging in', async () => {
|
||||
// Arrange: Log in to get a token
|
||||
const loginResponse = await apiClient.loginUser(testUser.user.email, TEST_PASSWORD, false);
|
||||
const loginData = await loginResponse.json();
|
||||
const token = loginData.token;
|
||||
|
||||
expect(loginResponse.status).toBe(200);
|
||||
// Arrange: Use the token from the beforeAll hook
|
||||
const token = testUserAuthToken;
|
||||
expect(token).toBeDefined();
|
||||
|
||||
// Act: Use the token to access a protected route
|
||||
@@ -139,11 +138,9 @@ describe('Authentication E2E Flow', () => {
|
||||
});
|
||||
|
||||
it('should allow an authenticated user to update their profile', async () => {
|
||||
// Arrange: Log in to get a token
|
||||
const loginResponse = await apiClient.loginUser(testUser.user.email, TEST_PASSWORD, false);
|
||||
const loginData = await loginResponse.json();
|
||||
const token = loginData.token;
|
||||
expect(loginResponse.status).toBe(200);
|
||||
// Arrange: Use the token from the beforeAll hook
|
||||
const token = testUserAuthToken;
|
||||
expect(token).toBeDefined();
|
||||
|
||||
const profileUpdates = {
|
||||
full_name: 'E2E Updated Name',
|
||||
@@ -178,34 +175,26 @@ describe('Authentication E2E Flow', () => {
|
||||
expect(registerResponse.status).toBe(201);
|
||||
createdUserIds.push(registerData.userprofile.user.user_id);
|
||||
|
||||
// Instead of a fixed delay, poll by attempting to log in. This is more robust
|
||||
// and confirms the user record is committed and readable by subsequent transactions.
|
||||
let loginSuccess = false;
|
||||
for (let i = 0; i < 10; i++) {
|
||||
// Poll for up to 10 seconds
|
||||
const loginResponse = await apiClient.loginUser(email, TEST_PASSWORD, false);
|
||||
if (loginResponse.ok) {
|
||||
loginSuccess = true;
|
||||
break;
|
||||
}
|
||||
await new Promise((resolve) => setTimeout(resolve, 1000));
|
||||
}
|
||||
expect(loginSuccess, 'User should be able to log in after registration. DB might be lagging.').toBe(true);
|
||||
// Poll until the user can log in, confirming the record has propagated.
|
||||
await poll(
|
||||
() => apiClient.loginUser(email, TEST_PASSWORD, false),
|
||||
(response) => response.ok,
|
||||
{ timeout: 10000, interval: 1000, description: 'user login after registration' },
|
||||
);
|
||||
|
||||
// Act 1: Request a password reset
|
||||
const forgotResponse = await apiClient.requestPasswordReset(email);
|
||||
const forgotData = await forgotResponse.json();
|
||||
const resetToken = forgotData.token;
|
||||
|
||||
// --- DEBUG SECTION FOR FAILURE ---
|
||||
if (!resetToken) {
|
||||
console.error(' [DEBUG FAILURE] Token missing in response:', JSON.stringify(forgotData, null, 2));
|
||||
console.error(' [DEBUG FAILURE] This usually means the backend hit a DB error or is not in NODE_ENV=test mode.');
|
||||
}
|
||||
// ---------------------------------
|
||||
// Poll for the password reset token.
|
||||
const { response: forgotResponse, token: resetToken } = await poll(
|
||||
async () => {
|
||||
const response = await apiClient.requestPasswordReset(email);
|
||||
// Clone to read body without consuming the original response stream
|
||||
const data = response.ok ? await response.clone().json() : {};
|
||||
return { response, token: data.token };
|
||||
},
|
||||
(result) => !!result.token,
|
||||
{ timeout: 10000, interval: 1000, description: 'password reset token generation' },
|
||||
);
|
||||
|
||||
// Assert 1: Check that we received a token.
|
||||
expect(forgotResponse.status).toBe(200);
|
||||
expect(resetToken, 'Backend returned 200 but no token. Check backend logs for "Connection terminated" errors.').toBeDefined();
|
||||
expect(resetToken).toBeTypeOf('string');
|
||||
|
||||
@@ -236,4 +225,47 @@ describe('Authentication E2E Flow', () => {
|
||||
expect(data.token).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Token Refresh Flow', () => {
|
||||
it('should allow an authenticated user to refresh their access token and use it', async () => {
|
||||
// 1. Log in to get the refresh token cookie and an initial access token.
|
||||
const loginResponse = await apiClient.loginUser(testUser.user.email, TEST_PASSWORD, false);
|
||||
expect(loginResponse.status).toBe(200);
|
||||
const loginData = await loginResponse.json();
|
||||
const initialAccessToken = loginData.token;
|
||||
|
||||
// 2. Extract the refresh token from the 'set-cookie' header.
|
||||
const setCookieHeader = loginResponse.headers.get('set-cookie');
|
||||
expect(setCookieHeader, 'Set-Cookie header should be present in login response').toBeDefined();
|
||||
// A typical Set-Cookie header might be 'refreshToken=...; Path=/; HttpOnly; Max-Age=...'. We just need the 'refreshToken=...' part.
|
||||
const refreshTokenCookie = setCookieHeader!.split(';')[0];
|
||||
|
||||
// 3. Call the refresh token endpoint, passing the cookie.
|
||||
// This assumes a new method in apiClient to handle this specific request.
|
||||
const refreshResponse = await apiClient.refreshToken(refreshTokenCookie);
|
||||
|
||||
// 4. Assert the refresh was successful and we got a new token.
|
||||
expect(refreshResponse.status).toBe(200);
|
||||
const refreshData = await refreshResponse.json();
|
||||
const newAccessToken = refreshData.token;
|
||||
expect(newAccessToken).toBeDefined();
|
||||
expect(newAccessToken).not.toBe(initialAccessToken);
|
||||
|
||||
// 5. Use the new access token to access a protected route.
|
||||
const profileResponse = await apiClient.getAuthenticatedUserProfile({ tokenOverride: newAccessToken });
|
||||
expect(profileResponse.status).toBe(200);
|
||||
const profileData = await profileResponse.json();
|
||||
expect(profileData.user.user_id).toBe(testUser.user.user_id);
|
||||
});
|
||||
|
||||
it('should fail to refresh with an invalid or missing token', async () => {
|
||||
// Case 1: No cookie provided. This assumes refreshToken can handle an empty string.
|
||||
const noCookieResponse = await apiClient.refreshToken('');
|
||||
expect(noCookieResponse.status).toBe(401);
|
||||
|
||||
// Case 2: Invalid cookie provided
|
||||
const invalidCookieResponse = await apiClient.refreshToken('refreshToken=invalid-garbage-token');
|
||||
expect(invalidCookieResponse.status).toBe(403);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,18 +1,16 @@
|
||||
// src/tests/e2e/flyer-upload.e2e.test.ts
|
||||
import { describe, it, expect, afterAll } from 'vitest';
|
||||
import supertest from 'supertest';
|
||||
import app from '../../../server';
|
||||
import { getPool } from '../../services/db/connection.db';
|
||||
import crypto from 'crypto';
|
||||
import * as apiClient from '../../services/apiClient';
|
||||
import path from 'path';
|
||||
import fs from 'fs';
|
||||
import { cleanupDb } from '../utils/cleanup';
|
||||
import { poll } from '../utils/poll';
|
||||
|
||||
/**
|
||||
* @vitest-environment node
|
||||
*/
|
||||
|
||||
const request = supertest(app);
|
||||
|
||||
describe('E2E Flyer Upload and Processing Workflow', () => {
|
||||
const uniqueId = Date.now();
|
||||
const userEmail = `e2e-uploader-${uniqueId}@example.com`;
|
||||
@@ -23,33 +21,24 @@ describe('E2E Flyer Upload and Processing Workflow', () => {
|
||||
let flyerId: number | null = null;
|
||||
|
||||
afterAll(async () => {
|
||||
// Cleanup: Delete the flyer and user created during the test
|
||||
const pool = getPool();
|
||||
if (flyerId) {
|
||||
await pool.query('DELETE FROM public.flyers WHERE flyer_id = $1', [flyerId]);
|
||||
}
|
||||
if (userId) {
|
||||
await pool.query('DELETE FROM public.users WHERE user_id = $1', [userId]);
|
||||
}
|
||||
// Use the centralized cleanup utility for robustness.
|
||||
await cleanupDb({
|
||||
userIds: [userId],
|
||||
flyerIds: [flyerId],
|
||||
});
|
||||
});
|
||||
|
||||
it('should allow a user to upload a flyer and wait for processing to complete', async () => {
|
||||
// 1. Register a new user
|
||||
const registerResponse = await request.post('/api/auth/register').send({
|
||||
email: userEmail,
|
||||
password: userPassword,
|
||||
full_name: 'E2E Flyer Uploader',
|
||||
});
|
||||
const registerResponse = await apiClient.registerUser(userEmail, userPassword, 'E2E Flyer Uploader');
|
||||
expect(registerResponse.status).toBe(201);
|
||||
|
||||
// 2. Login to get the access token
|
||||
const loginResponse = await request.post('/api/auth/login').send({
|
||||
email: userEmail,
|
||||
password: userPassword,
|
||||
});
|
||||
const loginResponse = await apiClient.loginUser(userEmail, userPassword, false);
|
||||
expect(loginResponse.status).toBe(200);
|
||||
authToken = loginResponse.body.token;
|
||||
userId = loginResponse.body.userprofile.user.user_id;
|
||||
const loginData = await loginResponse.json();
|
||||
authToken = loginData.token;
|
||||
userId = loginData.userprofile.user.user_id;
|
||||
expect(authToken).toBeDefined();
|
||||
|
||||
// 3. Prepare the flyer file
|
||||
@@ -73,34 +62,37 @@ describe('E2E Flyer Upload and Processing Workflow', () => {
|
||||
]);
|
||||
}
|
||||
|
||||
// Create a File object for the apiClient
|
||||
// FIX: The Node.js `Buffer` type can be incompatible with the web `File` API's
|
||||
// expected `BlobPart` type in some TypeScript configurations. Explicitly creating
|
||||
// a `Uint8Array` from the buffer ensures compatibility and resolves the type error.
|
||||
// `Uint8Array` is a valid `BufferSource`, which is a valid `BlobPart`.
|
||||
const flyerFile = new File([new Uint8Array(fileBuffer)], fileName, { type: 'image/jpeg' });
|
||||
|
||||
// Calculate checksum (required by the API)
|
||||
const checksum = crypto.createHash('sha256').update(fileBuffer).digest('hex');
|
||||
|
||||
// 4. Upload the flyer
|
||||
const uploadResponse = await request
|
||||
.post('/api/ai/upload-and-process')
|
||||
.set('Authorization', `Bearer ${authToken}`)
|
||||
.field('checksum', checksum)
|
||||
.attach('flyerFile', fileBuffer, fileName);
|
||||
const uploadResponse = await apiClient.uploadAndProcessFlyer(flyerFile, checksum, authToken);
|
||||
|
||||
expect(uploadResponse.status).toBe(202);
|
||||
const jobId = uploadResponse.body.jobId;
|
||||
const uploadData = await uploadResponse.json();
|
||||
const jobId = uploadData.jobId;
|
||||
expect(jobId).toBeDefined();
|
||||
|
||||
// 5. Poll for job completion
|
||||
let jobStatus;
|
||||
const maxRetries = 60; // Poll for up to 180 seconds
|
||||
for (let i = 0; i < maxRetries; i++) {
|
||||
await new Promise((resolve) => setTimeout(resolve, 3000)); // Wait 3s
|
||||
|
||||
const statusResponse = await request
|
||||
.get(`/api/ai/jobs/${jobId}/status`)
|
||||
.set('Authorization', `Bearer ${authToken}`);
|
||||
|
||||
jobStatus = statusResponse.body;
|
||||
if (jobStatus.state === 'completed' || jobStatus.state === 'failed') {
|
||||
break;
|
||||
}
|
||||
// 5. Poll for job completion using the new utility
|
||||
const jobStatus = await poll(
|
||||
async () => {
|
||||
const statusResponse = await apiClient.getJobStatus(jobId, authToken);
|
||||
return statusResponse.json();
|
||||
},
|
||||
(status) => status.state === 'completed' || status.state === 'failed',
|
||||
{ timeout: 180000, interval: 3000, description: 'flyer processing job completion' },
|
||||
);
|
||||
|
||||
if (jobStatus.state === 'failed') {
|
||||
// Log the failure reason for easier debugging in CI/CD environments.
|
||||
console.error('E2E flyer processing job failed. Reason:', jobStatus.failedReason);
|
||||
}
|
||||
|
||||
expect(jobStatus.state).toBe('completed');
|
||||
|
||||
@@ -1,15 +1,13 @@
|
||||
// src/tests/e2e/user-journey.e2e.test.ts
|
||||
import { describe, it, expect, afterAll } from 'vitest';
|
||||
import supertest from 'supertest';
|
||||
import app from '../../../server';
|
||||
import { getPool } from '../../services/db/connection.db';
|
||||
import * as apiClient from '../../services/apiClient';
|
||||
import { cleanupDb } from '../utils/cleanup';
|
||||
import { poll } from '../utils/poll';
|
||||
|
||||
/**
|
||||
* @vitest-environment node
|
||||
*/
|
||||
|
||||
const request = supertest(app);
|
||||
|
||||
describe('E2E User Journey', () => {
|
||||
// Use a unique email for every run to avoid collisions
|
||||
const uniqueId = Date.now();
|
||||
@@ -23,65 +21,64 @@ describe('E2E User Journey', () => {
|
||||
afterAll(async () => {
|
||||
// Safety cleanup: Ensure the user is deleted from the DB if the test fails mid-way.
|
||||
// If the test succeeds, the user deletes their own account, so this acts as a fallback.
|
||||
if (userId) {
|
||||
try {
|
||||
await getPool().query('DELETE FROM public.users WHERE user_id = $1', [userId]);
|
||||
} catch (err) {
|
||||
console.error('Error cleaning up E2E test user:', err);
|
||||
}
|
||||
}
|
||||
await cleanupDb({
|
||||
userIds: [userId],
|
||||
});
|
||||
});
|
||||
|
||||
it('should complete a full user lifecycle: Register -> Login -> Manage List -> Delete Account', async () => {
|
||||
// 1. Register a new user
|
||||
const registerResponse = await request.post('/api/auth/register').send({
|
||||
email: userEmail,
|
||||
password: userPassword,
|
||||
full_name: 'E2E Traveler',
|
||||
});
|
||||
const registerResponse = await apiClient.registerUser(userEmail, userPassword, 'E2E Traveler');
|
||||
|
||||
expect(registerResponse.status).toBe(201);
|
||||
expect(registerResponse.body.message).toBe('User registered successfully!');
|
||||
const registerData = await registerResponse.json();
|
||||
expect(registerData.message).toBe('User registered successfully!');
|
||||
|
||||
// 2. Login to get the access token
|
||||
const loginResponse = await request.post('/api/auth/login').send({
|
||||
email: userEmail,
|
||||
password: userPassword,
|
||||
});
|
||||
// 2. Login to get the access token.
|
||||
// We poll here because even between two API calls (register and login),
|
||||
// there can be a small delay before the newly created user record is visible
|
||||
// to the transaction started by the login request. This prevents flaky test failures.
|
||||
const { response: loginResponse, data: loginData } = await poll(
|
||||
async () => {
|
||||
const response = await apiClient.loginUser(userEmail, userPassword, false);
|
||||
const data = response.ok ? await response.clone().json() : {};
|
||||
return { response, data };
|
||||
},
|
||||
(result) => result.response.ok,
|
||||
{ timeout: 10000, interval: 1000, description: 'user login after registration' },
|
||||
);
|
||||
|
||||
expect(loginResponse.status).toBe(200);
|
||||
authToken = loginResponse.body.token;
|
||||
userId = loginResponse.body.userprofile.user.user_id;
|
||||
authToken = loginData.token;
|
||||
userId = loginData.userprofile.user.user_id;
|
||||
|
||||
expect(authToken).toBeDefined();
|
||||
expect(userId).toBeDefined();
|
||||
|
||||
// 3. Create a Shopping List
|
||||
const createListResponse = await request
|
||||
.post('/api/users/shopping-lists')
|
||||
.set('Authorization', `Bearer ${authToken}`)
|
||||
.send({ name: 'E2E Party List' });
|
||||
const createListResponse = await apiClient.createShoppingList('E2E Party List', authToken);
|
||||
|
||||
expect(createListResponse.status).toBe(201);
|
||||
shoppingListId = createListResponse.body.shopping_list_id;
|
||||
const createListData = await createListResponse.json();
|
||||
shoppingListId = createListData.shopping_list_id;
|
||||
expect(shoppingListId).toBeDefined();
|
||||
|
||||
// 4. Add an item to the list
|
||||
const addItemResponse = await request
|
||||
.post(`/api/users/shopping-lists/${shoppingListId}/items`)
|
||||
.set('Authorization', `Bearer ${authToken}`)
|
||||
.send({ customItemName: 'Chips' });
|
||||
const addItemResponse = await apiClient.addShoppingListItem(
|
||||
shoppingListId,
|
||||
{ customItemName: 'Chips' },
|
||||
authToken,
|
||||
);
|
||||
|
||||
expect(addItemResponse.status).toBe(201);
|
||||
expect(addItemResponse.body.custom_item_name).toBe('Chips');
|
||||
const addItemData = await addItemResponse.json();
|
||||
expect(addItemData.custom_item_name).toBe('Chips');
|
||||
|
||||
// 5. Verify the list and item exist via GET
|
||||
const getListsResponse = await request
|
||||
.get('/api/users/shopping-lists')
|
||||
.set('Authorization', `Bearer ${authToken}`);
|
||||
const getListsResponse = await apiClient.fetchShoppingLists(authToken);
|
||||
|
||||
expect(getListsResponse.status).toBe(200);
|
||||
const myLists = getListsResponse.body;
|
||||
const myLists = await getListsResponse.json();
|
||||
const targetList = myLists.find((l: any) => l.shopping_list_id === shoppingListId);
|
||||
|
||||
expect(targetList).toBeDefined();
|
||||
@@ -89,19 +86,16 @@ describe('E2E User Journey', () => {
|
||||
expect(targetList.items[0].custom_item_name).toBe('Chips');
|
||||
|
||||
// 6. Delete the User Account (Self-Service)
|
||||
const deleteAccountResponse = await request
|
||||
.delete('/api/users/account')
|
||||
.set('Authorization', `Bearer ${authToken}`)
|
||||
.send({ password: userPassword });
|
||||
const deleteAccountResponse = await apiClient.deleteUserAccount(userPassword, {
|
||||
tokenOverride: authToken,
|
||||
});
|
||||
|
||||
expect(deleteAccountResponse.status).toBe(200);
|
||||
expect(deleteAccountResponse.body.message).toBe('Account deleted successfully.');
|
||||
const deleteData = await deleteAccountResponse.json();
|
||||
expect(deleteData.message).toBe('Account deleted successfully.');
|
||||
|
||||
// 7. Verify Login is no longer possible
|
||||
const failLoginResponse = await request.post('/api/auth/login').send({
|
||||
email: userEmail,
|
||||
password: userPassword,
|
||||
});
|
||||
const failLoginResponse = await apiClient.loginUser(userEmail, userPassword, false);
|
||||
|
||||
expect(failLoginResponse.status).toBe(401);
|
||||
|
||||
|
||||
@@ -6,6 +6,7 @@ import { getPool } from '../../services/db/connection.db';
|
||||
import { logger } from '../../services/logger.server';
|
||||
import type { UserProfile } from '../../types';
|
||||
import { cleanupDb } from '../utils/cleanup';
|
||||
import { poll } from '../utils/poll';
|
||||
|
||||
describe('Database Service Integration Tests', () => {
|
||||
let testUser: UserProfile;
|
||||
@@ -26,6 +27,13 @@ describe('Database Service Integration Tests', () => {
|
||||
{ full_name: fullName },
|
||||
logger,
|
||||
);
|
||||
|
||||
// Poll to ensure the user record is findable before tests run.
|
||||
await poll(
|
||||
() => db.userRepo.findUserByEmail(testUserEmail, logger),
|
||||
(foundUser) => !!foundUser,
|
||||
{ timeout: 5000, interval: 500, description: `user ${testUserEmail} to be findable` },
|
||||
);
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
// src/tests/integration/flyer-processing.integration.test.ts
|
||||
import { describe, it, expect, beforeAll, afterAll, vi } from 'vitest';
|
||||
import { describe, it, expect, beforeAll, afterAll, vi, beforeEach } from 'vitest';
|
||||
import supertest from 'supertest';
|
||||
import app from '../../../server';
|
||||
import fs from 'node:fs/promises';
|
||||
@@ -11,11 +11,11 @@ import { logger } from '../../services/logger.server';
|
||||
import type { UserProfile, ExtractedFlyerItem } from '../../types';
|
||||
import { createAndLoginUser } from '../utils/testHelpers';
|
||||
import { cleanupDb } from '../utils/cleanup';
|
||||
import { poll } from '../utils/poll';
|
||||
import { cleanupFiles } from '../utils/cleanupFiles';
|
||||
import piexif from 'piexifjs';
|
||||
import exifParser from 'exif-parser';
|
||||
import sharp from 'sharp';
|
||||
import { createFlyerAndItems } from '../../services/db/flyer.db';
|
||||
|
||||
|
||||
/**
|
||||
@@ -39,13 +39,13 @@ vi.mock('../../services/aiService.server', async (importOriginal) => {
|
||||
return actual;
|
||||
});
|
||||
|
||||
// Mock the database service to allow for simulating DB failures.
|
||||
// Mock the main DB service to allow for simulating transaction failures.
|
||||
// By default, it will use the real implementation.
|
||||
vi.mock('../../services/db/flyer.db', async (importOriginal) => {
|
||||
const actual = await importOriginal<typeof import('../../services/db/flyer.db')>();
|
||||
vi.mock('../../services/db/index.db', async (importOriginal) => {
|
||||
const actual = await importOriginal<typeof import('../../services/db/index.db')>();
|
||||
return {
|
||||
...actual,
|
||||
createFlyerAndItems: vi.fn().mockImplementation(actual.createFlyerAndItems),
|
||||
withTransaction: vi.fn().mockImplementation(actual.withTransaction),
|
||||
};
|
||||
});
|
||||
|
||||
@@ -55,7 +55,16 @@ describe('Flyer Processing Background Job Integration Test', () => {
|
||||
const createdFilePaths: string[] = [];
|
||||
|
||||
beforeAll(async () => {
|
||||
// Setup default mock response for the AI service's extractCoreDataFromFlyerImage method.
|
||||
// FIX: Stub FRONTEND_URL to ensure valid absolute URLs (http://...) are generated
|
||||
// for the database, satisfying the 'url_check' constraint.
|
||||
vi.stubEnv('FRONTEND_URL', 'http://localhost:3000');
|
||||
});
|
||||
|
||||
// FIX: Reset mocks before each test to ensure isolation.
|
||||
// This prevents "happy path" mocks from leaking into error handling tests and vice versa.
|
||||
beforeEach(async () => {
|
||||
// 1. Reset AI Service Mock to default success state
|
||||
mockExtractCoreData.mockReset();
|
||||
mockExtractCoreData.mockResolvedValue({
|
||||
store_name: 'Mock Store',
|
||||
valid_from: null,
|
||||
@@ -71,9 +80,18 @@ describe('Flyer Processing Background Job Integration Test', () => {
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
// 2. Restore DB Service Mock to real implementation
|
||||
// This ensures that unless a test specifically mocks a failure, the DB logic works as expected.
|
||||
const { withTransaction } = await import('../../services/db/index.db');
|
||||
const actualDb = await vi.importActual<typeof import('../../services/db/index.db')>('../../services/db/index.db');
|
||||
vi.mocked(withTransaction).mockReset();
|
||||
vi.mocked(withTransaction).mockImplementation(actualDb.withTransaction);
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
vi.unstubAllEnvs(); // Clean up env stubs
|
||||
|
||||
// Use the centralized cleanup utility.
|
||||
await cleanupDb({
|
||||
userIds: createdUserIds,
|
||||
@@ -96,7 +114,7 @@ describe('Flyer Processing Background Job Integration Test', () => {
|
||||
// This prevents a 409 Conflict error when the second test runs.
|
||||
const uniqueContent = Buffer.concat([imageBuffer, Buffer.from(Date.now().toString())]);
|
||||
const uniqueFileName = `test-flyer-image-${Date.now()}.jpg`;
|
||||
const mockImageFile = new File([uniqueContent], uniqueFileName, { type: 'image/jpeg' });
|
||||
const mockImageFile = new File([new Uint8Array(uniqueContent)], uniqueFileName, { type: 'image/jpeg' });
|
||||
const checksum = await generateFileChecksum(mockImageFile);
|
||||
|
||||
// Track created files for cleanup
|
||||
@@ -110,6 +128,9 @@ describe('Flyer Processing Background Job Integration Test', () => {
|
||||
const uploadReq = request
|
||||
.post('/api/ai/upload-and-process')
|
||||
.field('checksum', checksum)
|
||||
// Pass the baseUrl directly in the form data to ensure the worker receives it,
|
||||
// bypassing issues with vi.stubEnv in multi-threaded test environments.
|
||||
.field('baseUrl', 'http://localhost:3000')
|
||||
.attach('flyerFile', uniqueContent, uniqueFileName);
|
||||
if (token) {
|
||||
uploadReq.set('Authorization', `Bearer ${token}`);
|
||||
@@ -120,25 +141,19 @@ describe('Flyer Processing Background Job Integration Test', () => {
|
||||
// Assert 1: Check that a job ID was returned.
|
||||
expect(jobId).toBeTypeOf('string');
|
||||
|
||||
// Act 2: Poll for the job status until it completes.
|
||||
let jobStatus;
|
||||
// Poll for up to 210 seconds (70 * 3s). This should be greater than the worker's
|
||||
// lockDuration (120s) to patiently wait for long-running jobs.
|
||||
const maxRetries = 70;
|
||||
for (let i = 0; i < maxRetries; i++) {
|
||||
console.log(`Polling attempt ${i + 1}...`);
|
||||
await new Promise((resolve) => setTimeout(resolve, 3000)); // Wait 3 seconds between polls
|
||||
const statusReq = request.get(`/api/ai/jobs/${jobId}/status`);
|
||||
if (token) {
|
||||
statusReq.set('Authorization', `Bearer ${token}`);
|
||||
}
|
||||
const statusResponse = await statusReq;
|
||||
jobStatus = statusResponse.body;
|
||||
console.log(`Job status: ${JSON.stringify(jobStatus)}`);
|
||||
if (jobStatus.state === 'completed' || jobStatus.state === 'failed') {
|
||||
break;
|
||||
}
|
||||
}
|
||||
// Act 2: Poll for job completion using the new utility.
|
||||
const jobStatus = await poll(
|
||||
async () => {
|
||||
const statusReq = request.get(`/api/ai/jobs/${jobId}/status`);
|
||||
if (token) {
|
||||
statusReq.set('Authorization', `Bearer ${token}`);
|
||||
}
|
||||
const statusResponse = await statusReq;
|
||||
return statusResponse.body;
|
||||
},
|
||||
(status) => status.state === 'completed' || status.state === 'failed',
|
||||
{ timeout: 210000, interval: 3000, description: 'flyer processing' },
|
||||
);
|
||||
|
||||
// Assert 2: Check that the job completed successfully.
|
||||
if (jobStatus?.state === 'failed') {
|
||||
@@ -220,7 +235,7 @@ describe('Flyer Processing Background Job Integration Test', () => {
|
||||
const imageWithExifBuffer = Buffer.from(jpegWithExif, 'binary');
|
||||
|
||||
const uniqueFileName = `test-flyer-with-exif-${Date.now()}.jpg`;
|
||||
const mockImageFile = new File([imageWithExifBuffer], uniqueFileName, { type: 'image/jpeg' });
|
||||
const mockImageFile = new File([new Uint8Array(imageWithExifBuffer)], uniqueFileName, { type: 'image/jpeg' });
|
||||
const checksum = await generateFileChecksum(mockImageFile);
|
||||
|
||||
// Track original and derived files for cleanup
|
||||
@@ -233,25 +248,24 @@ describe('Flyer Processing Background Job Integration Test', () => {
|
||||
const uploadResponse = await request
|
||||
.post('/api/ai/upload-and-process')
|
||||
.set('Authorization', `Bearer ${token}`)
|
||||
.field('baseUrl', 'http://localhost:3000')
|
||||
.field('checksum', checksum)
|
||||
.attach('flyerFile', imageWithExifBuffer, uniqueFileName);
|
||||
|
||||
const { jobId } = uploadResponse.body;
|
||||
expect(jobId).toBeTypeOf('string');
|
||||
|
||||
// Poll for job completion
|
||||
let jobStatus;
|
||||
const maxRetries = 60; // Poll for up to 180 seconds
|
||||
for (let i = 0; i < maxRetries; i++) {
|
||||
await new Promise((resolve) => setTimeout(resolve, 3000));
|
||||
const statusResponse = await request
|
||||
.get(`/api/ai/jobs/${jobId}/status`)
|
||||
.set('Authorization', `Bearer ${token}`);
|
||||
jobStatus = statusResponse.body;
|
||||
if (jobStatus.state === 'completed' || jobStatus.state === 'failed') {
|
||||
break;
|
||||
}
|
||||
}
|
||||
// Poll for job completion using the new utility.
|
||||
const jobStatus = await poll(
|
||||
async () => {
|
||||
const statusResponse = await request
|
||||
.get(`/api/ai/jobs/${jobId}/status`)
|
||||
.set('Authorization', `Bearer ${token}`);
|
||||
return statusResponse.body;
|
||||
},
|
||||
(status) => status.state === 'completed' || status.state === 'failed',
|
||||
{ timeout: 180000, interval: 3000, description: 'EXIF stripping job' },
|
||||
);
|
||||
|
||||
// 3. Assert
|
||||
if (jobStatus?.state === 'failed') {
|
||||
@@ -306,7 +320,7 @@ describe('Flyer Processing Background Job Integration Test', () => {
|
||||
.toBuffer();
|
||||
|
||||
const uniqueFileName = `test-flyer-with-metadata-${Date.now()}.png`;
|
||||
const mockImageFile = new File([Buffer.from(imageWithMetadataBuffer)], uniqueFileName, { type: 'image/png' });
|
||||
const mockImageFile = new File([new Uint8Array(imageWithMetadataBuffer)], uniqueFileName, { type: 'image/png' });
|
||||
const checksum = await generateFileChecksum(mockImageFile);
|
||||
|
||||
// Track files for cleanup
|
||||
@@ -319,25 +333,24 @@ describe('Flyer Processing Background Job Integration Test', () => {
|
||||
const uploadResponse = await request
|
||||
.post('/api/ai/upload-and-process')
|
||||
.set('Authorization', `Bearer ${token}`)
|
||||
.field('baseUrl', 'http://localhost:3000')
|
||||
.field('checksum', checksum)
|
||||
.attach('flyerFile', imageWithMetadataBuffer, uniqueFileName);
|
||||
|
||||
const { jobId } = uploadResponse.body;
|
||||
expect(jobId).toBeTypeOf('string');
|
||||
|
||||
// Poll for job completion
|
||||
let jobStatus;
|
||||
const maxRetries = 60; // Poll for up to 180 seconds
|
||||
for (let i = 0; i < maxRetries; i++) {
|
||||
await new Promise((resolve) => setTimeout(resolve, 3000));
|
||||
const statusResponse = await request
|
||||
.get(`/api/ai/jobs/${jobId}/status`)
|
||||
.set('Authorization', `Bearer ${token}`);
|
||||
jobStatus = statusResponse.body;
|
||||
if (jobStatus.state === 'completed' || jobStatus.state === 'failed') {
|
||||
break;
|
||||
}
|
||||
}
|
||||
// Poll for job completion using the new utility.
|
||||
const jobStatus = await poll(
|
||||
async () => {
|
||||
const statusResponse = await request
|
||||
.get(`/api/ai/jobs/${jobId}/status`)
|
||||
.set('Authorization', `Bearer ${token}`);
|
||||
return statusResponse.body;
|
||||
},
|
||||
(status) => status.state === 'completed' || status.state === 'failed',
|
||||
{ timeout: 180000, interval: 3000, description: 'PNG metadata stripping job' },
|
||||
);
|
||||
|
||||
// 3. Assert job completion
|
||||
if (jobStatus?.state === 'failed') {
|
||||
@@ -369,14 +382,14 @@ it(
|
||||
async () => {
|
||||
// Arrange: Mock the AI service to throw an error for this specific test.
|
||||
const aiError = new Error('AI model failed to extract data.');
|
||||
mockExtractCoreData.mockRejectedValueOnce(aiError);
|
||||
mockExtractCoreData.mockRejectedValue(aiError);
|
||||
|
||||
// Arrange: Prepare a unique flyer file for upload.
|
||||
const imagePath = path.resolve(__dirname, '../assets/test-flyer-image.jpg');
|
||||
const imageBuffer = await fs.readFile(imagePath);
|
||||
const uniqueContent = Buffer.concat([imageBuffer, Buffer.from(`fail-test-${Date.now()}`)]);
|
||||
const uniqueFileName = `ai-fail-test-${Date.now()}.jpg`;
|
||||
const mockImageFile = new File([uniqueContent], uniqueFileName, { type: 'image/jpeg' });
|
||||
const mockImageFile = new File([new Uint8Array(uniqueContent)], uniqueFileName, { type: 'image/jpeg' });
|
||||
const checksum = await generateFileChecksum(mockImageFile);
|
||||
|
||||
// Track created files for cleanup
|
||||
@@ -386,23 +399,22 @@ it(
|
||||
// Act 1: Upload the file to start the background job.
|
||||
const uploadResponse = await request
|
||||
.post('/api/ai/upload-and-process')
|
||||
.field('baseUrl', 'http://localhost:3000')
|
||||
.field('checksum', checksum)
|
||||
.attach('flyerFile', uniqueContent, uniqueFileName);
|
||||
|
||||
const { jobId } = uploadResponse.body;
|
||||
expect(jobId).toBeTypeOf('string');
|
||||
|
||||
// Act 2: Poll for the job status until it completes or fails.
|
||||
let jobStatus;
|
||||
const maxRetries = 60;
|
||||
for (let i = 0; i < maxRetries; i++) {
|
||||
await new Promise((resolve) => setTimeout(resolve, 3000));
|
||||
const statusResponse = await request.get(`/api/ai/jobs/${jobId}/status`);
|
||||
jobStatus = statusResponse.body;
|
||||
if (jobStatus.state === 'completed' || jobStatus.state === 'failed') {
|
||||
break;
|
||||
}
|
||||
}
|
||||
// Act 2: Poll for job completion using the new utility.
|
||||
const jobStatus = await poll(
|
||||
async () => {
|
||||
const statusResponse = await request.get(`/api/ai/jobs/${jobId}/status`);
|
||||
return statusResponse.body;
|
||||
},
|
||||
(status) => status.state === 'completed' || status.state === 'failed',
|
||||
{ timeout: 180000, interval: 3000, description: 'AI failure test job' },
|
||||
);
|
||||
|
||||
// Assert 1: Check that the job failed.
|
||||
expect(jobStatus?.state).toBe('failed');
|
||||
@@ -418,16 +430,18 @@ it(
|
||||
it(
|
||||
'should handle a database failure during flyer creation',
|
||||
async () => {
|
||||
// Arrange: Mock the database creation function to throw an error for this specific test.
|
||||
// Arrange: Mock the database transaction function to throw an error.
|
||||
// This is a more realistic simulation of a DB failure than mocking the inner createFlyerAndItems function.
|
||||
const dbError = new Error('DB transaction failed');
|
||||
vi.mocked(createFlyerAndItems).mockRejectedValueOnce(dbError);
|
||||
const { withTransaction } = await import('../../services/db/index.db');
|
||||
vi.mocked(withTransaction).mockRejectedValue(dbError);
|
||||
|
||||
// Arrange: Prepare a unique flyer file for upload.
|
||||
const imagePath = path.resolve(__dirname, '../assets/test-flyer-image.jpg');
|
||||
const imageBuffer = await fs.readFile(imagePath);
|
||||
const uniqueContent = Buffer.concat([imageBuffer, Buffer.from(`db-fail-test-${Date.now()}`)]);
|
||||
const uniqueFileName = `db-fail-test-${Date.now()}.jpg`;
|
||||
const mockImageFile = new File([uniqueContent], uniqueFileName, { type: 'image/jpeg' });
|
||||
const mockImageFile = new File([new Uint8Array(uniqueContent)], uniqueFileName, { type: 'image/jpeg' });
|
||||
const checksum = await generateFileChecksum(mockImageFile);
|
||||
|
||||
// Track created files for cleanup
|
||||
@@ -437,23 +451,22 @@ it(
|
||||
// Act 1: Upload the file to start the background job.
|
||||
const uploadResponse = await request
|
||||
.post('/api/ai/upload-and-process')
|
||||
.field('baseUrl', 'http://localhost:3000')
|
||||
.field('checksum', checksum)
|
||||
.attach('flyerFile', uniqueContent, uniqueFileName);
|
||||
|
||||
const { jobId } = uploadResponse.body;
|
||||
expect(jobId).toBeTypeOf('string');
|
||||
|
||||
// Act 2: Poll for the job status until it completes or fails.
|
||||
let jobStatus;
|
||||
const maxRetries = 60;
|
||||
for (let i = 0; i < maxRetries; i++) {
|
||||
await new Promise((resolve) => setTimeout(resolve, 3000));
|
||||
const statusResponse = await request.get(`/api/ai/jobs/${jobId}/status`);
|
||||
jobStatus = statusResponse.body;
|
||||
if (jobStatus.state === 'completed' || jobStatus.state === 'failed') {
|
||||
break;
|
||||
}
|
||||
}
|
||||
// Act 2: Poll for job completion using the new utility.
|
||||
const jobStatus = await poll(
|
||||
async () => {
|
||||
const statusResponse = await request.get(`/api/ai/jobs/${jobId}/status`);
|
||||
return statusResponse.body;
|
||||
},
|
||||
(status) => status.state === 'completed' || status.state === 'failed',
|
||||
{ timeout: 180000, interval: 3000, description: 'DB failure test job' },
|
||||
);
|
||||
|
||||
// Assert 1: Check that the job failed.
|
||||
expect(jobStatus?.state).toBe('failed');
|
||||
@@ -471,7 +484,7 @@ it(
|
||||
async () => {
|
||||
// Arrange: Mock the AI service to throw an error, causing the job to fail.
|
||||
const aiError = new Error('Simulated AI failure for cleanup test.');
|
||||
mockExtractCoreData.mockRejectedValueOnce(aiError);
|
||||
mockExtractCoreData.mockRejectedValue(aiError);
|
||||
|
||||
// Arrange: Prepare a unique flyer file for upload.
|
||||
const imagePath = path.resolve(__dirname, '../assets/test-flyer-image.jpg');
|
||||
@@ -481,7 +494,7 @@ it(
|
||||
Buffer.from(`cleanup-fail-test-${Date.now()}`),
|
||||
]);
|
||||
const uniqueFileName = `cleanup-fail-test-${Date.now()}.jpg`;
|
||||
const mockImageFile = new File([uniqueContent], uniqueFileName, { type: 'image/jpeg' });
|
||||
const mockImageFile = new File([new Uint8Array(uniqueContent)], uniqueFileName, { type: 'image/jpeg' });
|
||||
const checksum = await generateFileChecksum(mockImageFile);
|
||||
|
||||
// Track the path of the file that will be created in the uploads directory.
|
||||
@@ -492,23 +505,22 @@ it(
|
||||
// Act 1: Upload the file to start the background job.
|
||||
const uploadResponse = await request
|
||||
.post('/api/ai/upload-and-process')
|
||||
.field('baseUrl', 'http://localhost:3000')
|
||||
.field('checksum', checksum)
|
||||
.attach('flyerFile', uniqueContent, uniqueFileName);
|
||||
|
||||
const { jobId } = uploadResponse.body;
|
||||
expect(jobId).toBeTypeOf('string');
|
||||
|
||||
// Act 2: Poll for the job status until it fails.
|
||||
let jobStatus;
|
||||
const maxRetries = 60;
|
||||
for (let i = 0; i < maxRetries; i++) {
|
||||
await new Promise((resolve) => setTimeout(resolve, 3000));
|
||||
const statusResponse = await request.get(`/api/ai/jobs/${jobId}/status`);
|
||||
jobStatus = statusResponse.body;
|
||||
if (jobStatus.state === 'failed') {
|
||||
break;
|
||||
}
|
||||
}
|
||||
// Act 2: Poll for job completion using the new utility.
|
||||
const jobStatus = await poll(
|
||||
async () => {
|
||||
const statusResponse = await request.get(`/api/ai/jobs/${jobId}/status`);
|
||||
return statusResponse.body;
|
||||
},
|
||||
(status) => status.state === 'failed', // We expect this one to fail
|
||||
{ timeout: 180000, interval: 3000, description: 'file cleanup failure test job' },
|
||||
);
|
||||
|
||||
// Assert 1: Check that the job actually failed.
|
||||
expect(jobStatus?.state).toBe('failed');
|
||||
|
||||
@@ -11,6 +11,7 @@ import * as db from '../../services/db/index.db';
|
||||
import { cleanupDb } from '../utils/cleanup';
|
||||
import { logger } from '../../services/logger.server';
|
||||
import * as imageProcessor from '../../utils/imageProcessor';
|
||||
import { poll } from '../utils/poll';
|
||||
import type {
|
||||
UserProfile,
|
||||
UserAchievement,
|
||||
@@ -66,6 +67,10 @@ describe('Gamification Flow Integration Test', () => {
|
||||
request,
|
||||
}));
|
||||
|
||||
// Stub environment variables for URL generation in the background worker.
|
||||
// This needs to be in beforeAll to ensure it's set before any code that might use it is imported.
|
||||
vi.stubEnv('FRONTEND_URL', 'http://localhost:3001');
|
||||
|
||||
// Setup default mock response for the AI service's extractCoreDataFromFlyerImage method.
|
||||
mockExtractCoreData.mockResolvedValue({
|
||||
store_name: 'Gamification Test Store',
|
||||
@@ -96,16 +101,12 @@ describe('Gamification Flow Integration Test', () => {
|
||||
it(
|
||||
'should award the "First Upload" achievement after a user successfully uploads and processes their first flyer',
|
||||
async () => {
|
||||
// --- Arrange: Stub environment variables for URL generation in the background worker ---
|
||||
const testBaseUrl = 'http://localhost:3001'; // Use a fixed port for predictability
|
||||
vi.stubEnv('FRONTEND_URL', testBaseUrl);
|
||||
|
||||
// --- Arrange: Prepare a unique flyer file for upload ---
|
||||
const imagePath = path.resolve(__dirname, '../assets/test-flyer-image.jpg');
|
||||
const imageBuffer = await fs.readFile(imagePath);
|
||||
const uniqueContent = Buffer.concat([imageBuffer, Buffer.from(Date.now().toString())]);
|
||||
const uniqueFileName = `gamification-test-flyer-${Date.now()}.jpg`;
|
||||
const mockImageFile = new File([uniqueContent], uniqueFileName, { type: 'image/jpeg' });
|
||||
const mockImageFile = new File([new Uint8Array(uniqueContent)], uniqueFileName, { type: 'image/jpeg' });
|
||||
const checksum = await generateFileChecksum(mockImageFile);
|
||||
|
||||
// Track created files for cleanup
|
||||
@@ -124,20 +125,19 @@ describe('Gamification Flow Integration Test', () => {
|
||||
const { jobId } = uploadResponse.body;
|
||||
expect(jobId).toBeTypeOf('string');
|
||||
|
||||
// --- Act 2: Poll for job completion ---
|
||||
let jobStatus;
|
||||
const maxRetries = 60; // Poll for up to 180 seconds
|
||||
for (let i = 0; i < maxRetries; i++) {
|
||||
await new Promise((resolve) => setTimeout(resolve, 3000));
|
||||
const statusResponse = await request
|
||||
.get(`/api/ai/jobs/${jobId}/status`)
|
||||
.set('Authorization', `Bearer ${authToken}`);
|
||||
jobStatus = statusResponse.body;
|
||||
if (jobStatus.state === 'completed' || jobStatus.state === 'failed') {
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!jobStatus) {
|
||||
// --- Act 2: Poll for job completion using the new utility ---
|
||||
const jobStatus = await poll(
|
||||
async () => {
|
||||
const statusResponse = await request
|
||||
.get(`/api/ai/jobs/${jobId}/status`)
|
||||
.set('Authorization', `Bearer ${authToken}`);
|
||||
return statusResponse.body;
|
||||
},
|
||||
(status) => status.state === 'completed' || status.state === 'failed',
|
||||
{ timeout: 180000, interval: 3000, description: 'gamification flyer processing' },
|
||||
);
|
||||
|
||||
if (!jobStatus) {
|
||||
console.error('[DEBUG] Gamification test job timed out: No job status received.');
|
||||
throw new Error('Gamification test job timed out: No job status received.');
|
||||
}
|
||||
@@ -187,8 +187,6 @@ describe('Gamification Flow Integration Test', () => {
|
||||
firstUploadAchievement!.points_value,
|
||||
);
|
||||
|
||||
// --- Cleanup ---
|
||||
vi.unstubAllEnvs();
|
||||
},
|
||||
240000, // Increase timeout to 240s to match other long-running processing tests
|
||||
);
|
||||
@@ -196,10 +194,6 @@ describe('Gamification Flow Integration Test', () => {
|
||||
describe('Legacy Flyer Upload', () => {
|
||||
it('should process a legacy upload and save fully qualified URLs to the database', async () => {
|
||||
// --- Arrange ---
|
||||
// 1. Stub environment variables to have a predictable base URL for the test.
|
||||
const testBaseUrl = 'https://cdn.example.com';
|
||||
vi.stubEnv('FRONTEND_URL', testBaseUrl);
|
||||
|
||||
// 2. Mock the icon generator to return a predictable filename.
|
||||
vi.mocked(imageProcessor.generateFlyerIcon).mockResolvedValue('legacy-icon.webp');
|
||||
|
||||
@@ -207,7 +201,7 @@ describe('Gamification Flow Integration Test', () => {
|
||||
const imagePath = path.resolve(__dirname, '../assets/test-flyer-image.jpg');
|
||||
const imageBuffer = await fs.readFile(imagePath);
|
||||
const uniqueFileName = `legacy-upload-test-${Date.now()}.jpg`;
|
||||
const mockImageFile = new File([imageBuffer], uniqueFileName, { type: 'image/jpeg' });
|
||||
const mockImageFile = new File([new Uint8Array(imageBuffer)], uniqueFileName, { type: 'image/jpeg' });
|
||||
const checksum = await generateFileChecksum(mockImageFile);
|
||||
|
||||
// Track created files for cleanup.
|
||||
@@ -257,11 +251,9 @@ describe('Gamification Flow Integration Test', () => {
|
||||
createdStoreIds.push(savedFlyer.store_id!); // Add for cleanup.
|
||||
|
||||
// 8. Assert that the URLs are fully qualified.
|
||||
expect(savedFlyer.image_url).to.equal(`${testBaseUrl}/flyer-images/${uniqueFileName}`);
|
||||
expect(savedFlyer.icon_url).to.equal(`${testBaseUrl}/flyer-images/icons/legacy-icon.webp`);
|
||||
|
||||
// --- Cleanup ---
|
||||
vi.unstubAllEnvs();
|
||||
expect(savedFlyer.image_url).to.equal(newFlyer.image_url);
|
||||
expect(savedFlyer.icon_url).to.equal(newFlyer.icon_url);
|
||||
expect(newFlyer.image_url).toContain('http://localhost:3001/flyer-images/');
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -13,6 +13,7 @@ import type {
|
||||
} from '../../types';
|
||||
import { getPool } from '../../services/db/connection.db';
|
||||
import { cleanupDb } from '../utils/cleanup';
|
||||
import { poll } from '../utils/poll';
|
||||
import { createAndLoginUser } from '../utils/testHelpers';
|
||||
|
||||
/**
|
||||
@@ -42,27 +43,12 @@ describe('Public API Routes Integration Tests', () => {
|
||||
});
|
||||
testUser = createdUser;
|
||||
|
||||
// DEBUG: Verify user existence in DB
|
||||
console.log(`[DEBUG] createAndLoginUser returned user ID: ${testUser.user.user_id}`);
|
||||
const userCheck = await pool.query('SELECT user_id FROM public.users WHERE user_id = $1', [testUser.user.user_id]);
|
||||
console.log(`[DEBUG] DB check for user found ${userCheck.rowCount ?? 0} rows.`);
|
||||
if (!userCheck.rowCount) {
|
||||
console.error(`[DEBUG] CRITICAL: User ${testUser.user.user_id} does not exist in public.users table! Attempting to wait...`);
|
||||
// Wait loop to ensure user persistence if there's a race condition
|
||||
for (let i = 0; i < 5; i++) {
|
||||
await new Promise((resolve) => setTimeout(resolve, 500));
|
||||
const retryCheck = await pool.query('SELECT user_id FROM public.users WHERE user_id = $1', [testUser.user.user_id]);
|
||||
if (retryCheck.rowCount && retryCheck.rowCount > 0) {
|
||||
console.log(`[DEBUG] User found after retry ${i + 1}`);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
// Final check before proceeding to avoid FK error
|
||||
const finalCheck = await pool.query('SELECT user_id FROM public.users WHERE user_id = $1', [testUser.user.user_id]);
|
||||
if (!finalCheck.rowCount) {
|
||||
throw new Error(`User ${testUser.user.user_id} failed to persist in DB. Cannot continue test.`);
|
||||
}
|
||||
// Poll to ensure the user record has propagated before creating dependent records.
|
||||
await poll(
|
||||
() => pool.query('SELECT 1 FROM public.users WHERE user_id = $1', [testUser.user.user_id]),
|
||||
(result) => (result.rowCount ?? 0) > 0,
|
||||
{ timeout: 5000, interval: 500, description: `user ${testUser.user.user_id} to persist` },
|
||||
);
|
||||
|
||||
// Create a recipe
|
||||
const recipeRes = await pool.query(
|
||||
|
||||
@@ -5,6 +5,7 @@ import * as bcrypt from 'bcrypt';
|
||||
import { getPool } from '../../services/db/connection.db';
|
||||
import type { ShoppingList } from '../../types';
|
||||
import { logger } from '../../services/logger.server';
|
||||
import { poll } from '../utils/poll';
|
||||
|
||||
describe('Shopping List DB Service Tests', () => {
|
||||
it('should create and retrieve a shopping list for a user', async ({ onTestFinished }) => {
|
||||
@@ -19,6 +20,12 @@ describe('Shopping List DB Service Tests', () => {
|
||||
);
|
||||
const testUserId = userprofile.user.user_id;
|
||||
|
||||
// Poll to ensure the user record has propagated before creating dependent records.
|
||||
await poll(
|
||||
() => getPool().query('SELECT 1 FROM public.users WHERE user_id = $1', [testUserId]),
|
||||
(result) => (result.rowCount ?? 0) > 0,
|
||||
{ timeout: 5000, interval: 500, description: `user ${testUserId} to persist` },
|
||||
);
|
||||
onTestFinished(async () => {
|
||||
await getPool().query('DELETE FROM public.users WHERE user_id = $1', [testUserId]);
|
||||
});
|
||||
@@ -51,6 +58,13 @@ describe('Shopping List DB Service Tests', () => {
|
||||
);
|
||||
const testUserId = userprofile.user.user_id;
|
||||
|
||||
// Poll to ensure the user record has propagated before creating dependent records.
|
||||
await poll(
|
||||
() => getPool().query('SELECT 1 FROM public.users WHERE user_id = $1', [testUserId]),
|
||||
(result) => (result.rowCount ?? 0) > 0,
|
||||
{ timeout: 5000, interval: 500, description: `user ${testUserId} to persist` },
|
||||
);
|
||||
|
||||
onTestFinished(async () => {
|
||||
await getPool().query('DELETE FROM public.users WHERE user_id = $1', [testUserId]);
|
||||
});
|
||||
|
||||
@@ -1,12 +1,15 @@
|
||||
// src/tests/integration/user.integration.test.ts
|
||||
import { describe, it, expect, beforeAll, afterAll } from 'vitest';
|
||||
import supertest from 'supertest';
|
||||
import path from 'path';
|
||||
import fs from 'node:fs/promises';
|
||||
import app from '../../../server';
|
||||
import { logger } from '../../services/logger.server';
|
||||
import { getPool } from '../../services/db/connection.db';
|
||||
import type { UserProfile, MasterGroceryItem, ShoppingList } from '../../types';
|
||||
import { createAndLoginUser, TEST_PASSWORD } from '../utils/testHelpers';
|
||||
import { cleanupDb } from '../utils/cleanup';
|
||||
import { cleanupFiles } from '../utils/cleanupFiles';
|
||||
|
||||
/**
|
||||
* @vitest-environment node
|
||||
@@ -33,6 +36,25 @@ describe('User API Routes Integration Tests', () => {
|
||||
// This now cleans up ALL users created by this test suite to prevent pollution.
|
||||
afterAll(async () => {
|
||||
await cleanupDb({ userIds: createdUserIds });
|
||||
|
||||
// Safeguard to clean up any avatar files created during tests.
|
||||
const uploadDir = path.resolve(__dirname, '../../../uploads/avatars');
|
||||
try {
|
||||
const allFiles = await fs.readdir(uploadDir);
|
||||
// Filter for any file that contains any of the user IDs created in this test suite.
|
||||
const testFiles = allFiles
|
||||
.filter((f) => createdUserIds.some((userId) => userId && f.includes(userId)))
|
||||
.map((f) => path.join(uploadDir, f));
|
||||
|
||||
if (testFiles.length > 0) {
|
||||
await cleanupFiles(testFiles);
|
||||
}
|
||||
} catch (error) {
|
||||
// Ignore if the directory doesn't exist, but log other errors.
|
||||
if (error instanceof Error && (error as NodeJS.ErrnoException).code !== 'ENOENT') {
|
||||
console.error('Error during user integration test avatar file cleanup:', error);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
it('should fetch the authenticated user profile via GET /api/users/profile', async () => {
|
||||
@@ -295,4 +317,64 @@ describe('User API Routes Integration Tests', () => {
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
it('should allow a user to upload an avatar image and update their profile', async () => {
|
||||
// Arrange: Path to a dummy image file
|
||||
const dummyImagePath = path.resolve(__dirname, '../assets/test-flyer-image.jpg');
|
||||
|
||||
// Act: Make the POST request to upload the avatar
|
||||
const response = await request
|
||||
.post('/api/users/profile/avatar')
|
||||
.set('Authorization', `Bearer ${authToken}`)
|
||||
.attach('avatar', dummyImagePath);
|
||||
|
||||
// Assert: Check the response
|
||||
expect(response.status).toBe(200);
|
||||
const updatedProfile = response.body;
|
||||
expect(updatedProfile.avatar_url).toBeDefined();
|
||||
expect(updatedProfile.avatar_url).not.toBeNull();
|
||||
expect(updatedProfile.avatar_url).toContain('/uploads/avatars/test-avatar');
|
||||
|
||||
// Assert (Verification): Fetch the profile again to ensure the change was persisted
|
||||
const verifyResponse = await request
|
||||
.get('/api/users/profile')
|
||||
.set('Authorization', `Bearer ${authToken}`);
|
||||
const refetchedProfile = verifyResponse.body;
|
||||
expect(refetchedProfile.avatar_url).toBe(updatedProfile.avatar_url);
|
||||
});
|
||||
|
||||
it('should reject avatar upload for an invalid file type', async () => {
|
||||
// Arrange: Create a buffer representing a text file.
|
||||
const invalidFileBuffer = Buffer.from('This is not an image file.');
|
||||
const invalidFileName = 'test.txt';
|
||||
|
||||
// Act: Attempt to upload the text file to the avatar endpoint.
|
||||
const response = await request
|
||||
.post('/api/users/profile/avatar')
|
||||
.set('Authorization', `Bearer ${authToken}`)
|
||||
.attach('avatar', invalidFileBuffer, invalidFileName);
|
||||
|
||||
// Assert: Check for a 400 Bad Request response.
|
||||
// This error comes from the multer fileFilter configuration in the route.
|
||||
expect(response.status).toBe(400);
|
||||
expect(response.body.message).toBe('Only image files are allowed!');
|
||||
});
|
||||
|
||||
it('should reject avatar upload for a file that is too large', async () => {
|
||||
// Arrange: Create a buffer larger than the configured limit (e.g., > 1MB).
|
||||
// The limit is set in the multer middleware in `user.routes.ts`.
|
||||
// We'll create a 2MB buffer to be safe.
|
||||
const largeFileBuffer = Buffer.alloc(2 * 1024 * 1024, 'a');
|
||||
const largeFileName = 'large-avatar.jpg';
|
||||
|
||||
// Act: Attempt to upload the large file.
|
||||
const response = await request
|
||||
.post('/api/users/profile/avatar')
|
||||
.set('Authorization', `Bearer ${authToken}`)
|
||||
.attach('avatar', largeFileBuffer, largeFileName);
|
||||
|
||||
// Assert: Check for a 400 Bad Request response from the multer error handler.
|
||||
expect(response.status).toBe(400);
|
||||
expect(response.body.message).toBe('File upload error: File too large');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,85 +1,57 @@
|
||||
// src/tests/utils/cleanup.ts
|
||||
import { getPool } from '../../services/db/connection.db';
|
||||
import { logger } from '../../services/logger.server';
|
||||
import fs from 'node:fs/promises';
|
||||
import path from 'path';
|
||||
|
||||
export interface TestResourceIds {
|
||||
userIds?: string[];
|
||||
flyerIds?: number[];
|
||||
storeIds?: number[];
|
||||
recipeIds?: number[];
|
||||
masterItemIds?: number[];
|
||||
budgetIds?: number[];
|
||||
interface CleanupOptions {
|
||||
userIds?: (string | null | undefined)[];
|
||||
flyerIds?: (number | null | undefined)[];
|
||||
storeIds?: (number | null | undefined)[];
|
||||
recipeIds?: (number | null | undefined)[];
|
||||
budgetIds?: (number | null | undefined)[];
|
||||
}
|
||||
|
||||
/**
|
||||
* A robust cleanup utility for integration tests.
|
||||
* It deletes entities in the correct order to avoid foreign key violations.
|
||||
* It's designed to be called in an `afterAll` hook.
|
||||
*
|
||||
* @param ids An object containing arrays of IDs for each resource type to clean up.
|
||||
* A centralized utility to clean up database records created during tests.
|
||||
* It deletes records in an order that respects foreign key constraints.
|
||||
* It performs operations on a single client connection but does not use a
|
||||
* transaction, ensuring that a failure to delete from one table does not
|
||||
* prevent cleanup attempts on others.
|
||||
*/
|
||||
export const cleanupDb = async (ids: TestResourceIds) => {
|
||||
export const cleanupDb = async (options: CleanupOptions) => {
|
||||
const pool = getPool();
|
||||
logger.info('[Test Cleanup] Starting database resource cleanup...');
|
||||
|
||||
const {
|
||||
userIds = [],
|
||||
flyerIds = [],
|
||||
storeIds = [],
|
||||
recipeIds = [],
|
||||
masterItemIds = [],
|
||||
budgetIds = [],
|
||||
} = ids;
|
||||
const client = await pool.connect();
|
||||
|
||||
try {
|
||||
// --- Stage 1: Delete most dependent records ---
|
||||
// These records depend on users, recipes, flyers, etc.
|
||||
if (userIds.length > 0) {
|
||||
await pool.query('DELETE FROM public.recipe_comments WHERE user_id = ANY($1::uuid[])', [userIds]);
|
||||
await pool.query('DELETE FROM public.suggested_corrections WHERE user_id = ANY($1::uuid[])', [userIds]);
|
||||
await pool.query('DELETE FROM public.shopping_lists WHERE user_id = ANY($1::uuid[])', [userIds]); // Assumes shopping_list_items cascades
|
||||
await pool.query('DELETE FROM public.user_watched_items WHERE user_id = ANY($1::uuid[])', [userIds]);
|
||||
await pool.query('DELETE FROM public.user_achievements WHERE user_id = ANY($1::uuid[])', [userIds]);
|
||||
await pool.query('DELETE FROM public.activity_log WHERE user_id = ANY($1::uuid[])', [userIds]);
|
||||
// Order of deletion matters to avoid foreign key violations.
|
||||
// Children entities first, then parents.
|
||||
|
||||
if (options.budgetIds?.filter(Boolean).length) {
|
||||
await client.query('DELETE FROM public.budgets WHERE budget_id = ANY($1::int[])', [options.budgetIds]);
|
||||
logger.debug(`Cleaned up ${options.budgetIds.length} budget(s).`);
|
||||
}
|
||||
|
||||
// --- Stage 2: Delete parent records that other things depend on ---
|
||||
if (recipeIds.length > 0) {
|
||||
await pool.query('DELETE FROM public.recipes WHERE recipe_id = ANY($1::int[])', [recipeIds]);
|
||||
if (options.recipeIds?.filter(Boolean).length) {
|
||||
await client.query('DELETE FROM public.recipes WHERE recipe_id = ANY($1::int[])', [options.recipeIds]);
|
||||
logger.debug(`Cleaned up ${options.recipeIds.length} recipe(s).`);
|
||||
}
|
||||
|
||||
// Flyers might be created by users, but we clean them up separately.
|
||||
// flyer_items should cascade from this.
|
||||
if (flyerIds.length > 0) {
|
||||
await pool.query('DELETE FROM public.flyers WHERE flyer_id = ANY($1::bigint[])', [flyerIds]);
|
||||
if (options.flyerIds?.filter(Boolean).length) {
|
||||
await client.query('DELETE FROM public.flyers WHERE flyer_id = ANY($1::int[])', [options.flyerIds]);
|
||||
logger.debug(`Cleaned up ${options.flyerIds.length} flyer(s).`);
|
||||
}
|
||||
|
||||
// Stores are parents of flyers, so they come after.
|
||||
if (storeIds.length > 0) {
|
||||
await pool.query('DELETE FROM public.stores WHERE store_id = ANY($1::int[])', [storeIds]);
|
||||
if (options.storeIds?.filter(Boolean).length) {
|
||||
await client.query('DELETE FROM public.stores WHERE store_id = ANY($1::int[])', [options.storeIds]);
|
||||
logger.debug(`Cleaned up ${options.storeIds.length} store(s).`);
|
||||
}
|
||||
|
||||
// Master items are parents of flyer_items and watched_items.
|
||||
if (masterItemIds.length > 0) {
|
||||
await pool.query('DELETE FROM public.master_grocery_items WHERE master_grocery_item_id = ANY($1::int[])', [masterItemIds]);
|
||||
if (options.userIds?.filter(Boolean).length) {
|
||||
await client.query('DELETE FROM public.users WHERE user_id = ANY($1::uuid[])', [options.userIds]);
|
||||
logger.debug(`Cleaned up ${options.userIds.length} user(s).`);
|
||||
}
|
||||
|
||||
// Budgets are parents of nothing, but depend on users.
|
||||
if (budgetIds.length > 0) {
|
||||
await pool.query('DELETE FROM public.budgets WHERE budget_id = ANY($1::int[])', [budgetIds]);
|
||||
}
|
||||
|
||||
// --- Stage 3: Delete the root user records ---
|
||||
if (userIds.length > 0) {
|
||||
const { rowCount } = await pool.query('DELETE FROM public.users WHERE user_id = ANY($1::uuid[])', [userIds]);
|
||||
logger.info(`[Test Cleanup] Cleaned up ${rowCount} user(s).`);
|
||||
}
|
||||
|
||||
logger.info('[Test Cleanup] Finished database resource cleanup successfully.');
|
||||
} catch (error) {
|
||||
logger.error({ error }, '[Test Cleanup] CRITICAL: An error occurred during database cleanup.');
|
||||
throw error; // Re-throw to fail the test suite
|
||||
logger.error({ error, options }, 'A database cleanup operation failed.');
|
||||
} finally {
|
||||
client.release();
|
||||
}
|
||||
};
|
||||
@@ -1,48 +1,30 @@
|
||||
// src/tests/utils/cleanupFiles.ts
|
||||
import fs from 'node:fs/promises';
|
||||
import path from 'path';
|
||||
import { logger } from '../../services/logger.server';
|
||||
|
||||
/**
|
||||
* Safely cleans up files from the filesystem.
|
||||
* Designed to be used in `afterAll` or `afterEach` hooks in integration tests.
|
||||
*
|
||||
* @param filePaths An array of file paths to clean up.
|
||||
* A centralized utility to clean up files created during tests.
|
||||
* It iterates through a list of file paths and attempts to delete each one.
|
||||
* It gracefully handles errors for files that don't exist (e.g., already deleted
|
||||
* or never created due to a test failure).
|
||||
*/
|
||||
export const cleanupFiles = async (filePaths: string[]) => {
|
||||
if (!filePaths || filePaths.length === 0) {
|
||||
logger.info('[Test Cleanup] No file paths provided for cleanup.');
|
||||
export const cleanupFiles = async (filePaths: (string | undefined | null)[]) => {
|
||||
const validPaths = filePaths.filter((p): p is string => !!p);
|
||||
if (validPaths.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
logger.info(`[Test Cleanup] Starting filesystem cleanup for ${filePaths.length} file(s)...`);
|
||||
logger.debug(`Cleaning up ${validPaths.length} test-created file(s)...`);
|
||||
|
||||
try {
|
||||
await Promise.all(
|
||||
filePaths.map(async (filePath) => {
|
||||
try {
|
||||
await fs.unlink(filePath);
|
||||
logger.debug(`[Test Cleanup] Successfully deleted file: ${filePath}`);
|
||||
} catch (err: any) {
|
||||
// Ignore "file not found" errors, but log other errors.
|
||||
if (err.code === 'ENOENT') {
|
||||
logger.debug(`[Test Cleanup] File not found, skipping: ${filePath}`);
|
||||
} else {
|
||||
logger.warn(
|
||||
{ err, filePath },
|
||||
'[Test Cleanup] Failed to clean up file from filesystem.',
|
||||
);
|
||||
}
|
||||
}
|
||||
}),
|
||||
);
|
||||
const cleanupPromises = validPaths.map(async (filePath) => {
|
||||
try {
|
||||
await fs.unlink(filePath);
|
||||
} catch (error) {
|
||||
if (error instanceof Error && (error as NodeJS.ErrnoException).code !== 'ENOENT') {
|
||||
logger.error({ error, filePath }, 'Failed to delete test file during cleanup.');
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
logger.info('[Test Cleanup] Finished filesystem cleanup successfully.');
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
{ error },
|
||||
'[Test Cleanup] CRITICAL: An error occurred during filesystem cleanup.',
|
||||
);
|
||||
throw error; // Re-throw to fail the test suite if cleanup fails
|
||||
}
|
||||
await Promise.allSettled(cleanupPromises);
|
||||
};
|
||||
36
src/tests/utils/poll.ts
Normal file
36
src/tests/utils/poll.ts
Normal file
@@ -0,0 +1,36 @@
|
||||
// src/tests/utils/poll.ts
|
||||
|
||||
interface PollOptions {
|
||||
/** The maximum time to wait in milliseconds. Defaults to 10000 (10 seconds). */
|
||||
timeout?: number;
|
||||
/** The interval between attempts in milliseconds. Defaults to 500. */
|
||||
interval?: number;
|
||||
/** A description of the operation for better error messages. */
|
||||
description?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* A generic polling utility for asynchronous operations in tests.
|
||||
*
|
||||
* @param fn The async function to execute on each attempt.
|
||||
* @param validate A function that returns `true` if the result is satisfactory, ending the poll.
|
||||
* @param options Polling options like timeout and interval.
|
||||
* @returns A promise that resolves with the first valid result from `fn`.
|
||||
* @throws An error if the timeout is reached before `validate` returns `true`.
|
||||
*/
|
||||
export async function poll<T>(
|
||||
fn: () => Promise<T>,
|
||||
validate: (result: T) => boolean,
|
||||
options: PollOptions = {},
|
||||
): Promise<T> {
|
||||
const { timeout = 10000, interval = 500, description = 'operation' } = options;
|
||||
const startTime = Date.now();
|
||||
|
||||
while (Date.now() - startTime < timeout) {
|
||||
const result = await fn();
|
||||
if (validate(result)) return result;
|
||||
await new Promise((resolve) => setTimeout(resolve, interval));
|
||||
}
|
||||
|
||||
throw new Error(`Polling timed out for ${description} after ${timeout}ms.`);
|
||||
}
|
||||
@@ -1,9 +1,15 @@
|
||||
// src/utils/dateUtils.test.ts
|
||||
import { describe, it, expect, vi, afterEach, beforeEach } from 'vitest';
|
||||
import { getSimpleWeekAndYear } from './dateUtils';
|
||||
import {
|
||||
calculateSimpleWeekAndYear,
|
||||
formatShortDate,
|
||||
calculateDaysBetween,
|
||||
formatDateRange,
|
||||
getCurrentDateISOString,
|
||||
} from './dateUtils';
|
||||
|
||||
describe('dateUtils', () => {
|
||||
describe('getSimpleWeekAndYear', () => {
|
||||
describe('calculateSimpleWeekAndYear', () => {
|
||||
beforeEach(() => {
|
||||
// Use fake timers to control the current date in tests
|
||||
vi.useFakeTimers();
|
||||
@@ -16,35 +22,35 @@ describe('dateUtils', () => {
|
||||
|
||||
it('should return week 1 for the first day of the year', () => {
|
||||
const date = new Date('2024-01-01T12:00:00Z');
|
||||
expect(getSimpleWeekAndYear(date)).toEqual({ year: 2024, week: 1 });
|
||||
expect(calculateSimpleWeekAndYear(date)).toEqual({ year: 2024, week: 1 });
|
||||
});
|
||||
|
||||
it('should return week 1 for the 7th day of the year', () => {
|
||||
const date = new Date('2024-01-07T12:00:00Z');
|
||||
expect(getSimpleWeekAndYear(date)).toEqual({ year: 2024, week: 1 });
|
||||
expect(calculateSimpleWeekAndYear(date)).toEqual({ year: 2024, week: 1 });
|
||||
});
|
||||
|
||||
it('should return week 2 for the 8th day of the year', () => {
|
||||
const date = new Date('2024-01-08T12:00:00Z');
|
||||
expect(getSimpleWeekAndYear(date)).toEqual({ year: 2024, week: 2 });
|
||||
expect(calculateSimpleWeekAndYear(date)).toEqual({ year: 2024, week: 2 });
|
||||
});
|
||||
|
||||
it('should correctly calculate the week for a date in the middle of the year', () => {
|
||||
// July 1st is the 183rd day of a leap year. 182 / 7 = 26. So it's week 27.
|
||||
const date = new Date('2024-07-01T12:00:00Z');
|
||||
expect(getSimpleWeekAndYear(date)).toEqual({ year: 2024, week: 27 });
|
||||
expect(calculateSimpleWeekAndYear(date)).toEqual({ year: 2024, week: 27 });
|
||||
});
|
||||
|
||||
it('should correctly calculate the week for the last day of a non-leap year', () => {
|
||||
// Dec 31, 2023 is the 365th day. 364 / 7 = 52. So it's week 53.
|
||||
const date = new Date('2023-12-31T12:00:00Z');
|
||||
expect(getSimpleWeekAndYear(date)).toEqual({ year: 2023, week: 53 });
|
||||
expect(calculateSimpleWeekAndYear(date)).toEqual({ year: 2023, week: 53 });
|
||||
});
|
||||
|
||||
it('should correctly calculate the week for the last day of a leap year', () => {
|
||||
// Dec 31, 2024 is the 366th day. 365 / 7 = 52.14. floor(52.14) + 1 = 53.
|
||||
const date = new Date('2024-12-31T12:00:00Z');
|
||||
expect(getSimpleWeekAndYear(date)).toEqual({ year: 2024, week: 53 });
|
||||
expect(calculateSimpleWeekAndYear(date)).toEqual({ year: 2024, week: 53 });
|
||||
});
|
||||
|
||||
it('should use the current date if no date is provided', () => {
|
||||
@@ -52,7 +58,172 @@ describe('dateUtils', () => {
|
||||
vi.setSystemTime(fakeCurrentDate);
|
||||
|
||||
// 40 / 7 = 5.71. floor(5.71) + 1 = 6.
|
||||
expect(getSimpleWeekAndYear()).toEqual({ year: 2025, week: 6 });
|
||||
expect(calculateSimpleWeekAndYear()).toEqual({ year: 2025, week: 6 });
|
||||
});
|
||||
});
|
||||
|
||||
describe('formatShortDate', () => {
|
||||
it('should format a valid YYYY-MM-DD date string correctly', () => {
|
||||
expect(formatShortDate('2024-07-26')).toBe('Jul 26');
|
||||
});
|
||||
|
||||
it('should handle single-digit days correctly', () => {
|
||||
expect(formatShortDate('2025-01-05')).toBe('Jan 5');
|
||||
});
|
||||
|
||||
it('should handle dates at the end of the year', () => {
|
||||
expect(formatShortDate('2023-12-31')).toBe('Dec 31');
|
||||
});
|
||||
|
||||
it('should return null for a null input', () => {
|
||||
expect(formatShortDate(null)).toBeNull();
|
||||
});
|
||||
|
||||
it('should return null for an undefined input', () => {
|
||||
expect(formatShortDate(undefined)).toBeNull();
|
||||
});
|
||||
|
||||
it('should return null for an empty string input', () => {
|
||||
expect(formatShortDate('')).toBeNull();
|
||||
});
|
||||
|
||||
it('should return null for an invalid date string', () => {
|
||||
expect(formatShortDate('not-a-real-date')).toBeNull();
|
||||
});
|
||||
|
||||
it('should return null for a malformed date string', () => {
|
||||
expect(formatShortDate('2024-13-01')).toBeNull(); // Invalid month
|
||||
});
|
||||
|
||||
it('should correctly format a full ISO string with time and timezone', () => {
|
||||
expect(formatShortDate('2024-12-25T10:00:00Z')).toBe('Dec 25');
|
||||
});
|
||||
});
|
||||
|
||||
describe('calculateDaysBetween', () => {
|
||||
it('should calculate the difference in days between two valid date strings', () => {
|
||||
expect(calculateDaysBetween('2023-01-01', '2023-01-05')).toBe(4);
|
||||
});
|
||||
|
||||
it('should return a negative number if the end date is before the start date', () => {
|
||||
expect(calculateDaysBetween('2023-01-05', '2023-01-01')).toBe(-4);
|
||||
});
|
||||
|
||||
it('should handle Date objects', () => {
|
||||
const start = new Date('2023-01-01');
|
||||
const end = new Date('2023-01-10');
|
||||
expect(calculateDaysBetween(start, end)).toBe(9);
|
||||
});
|
||||
|
||||
it('should return null if either date is null or undefined', () => {
|
||||
expect(calculateDaysBetween(null, '2023-01-01')).toBeNull();
|
||||
expect(calculateDaysBetween('2023-01-01', undefined)).toBeNull();
|
||||
});
|
||||
|
||||
it('should return null if either date is invalid', () => {
|
||||
expect(calculateDaysBetween('invalid', '2023-01-01')).toBeNull();
|
||||
expect(calculateDaysBetween('2023-01-01', 'invalid')).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe('formatDateRange', () => {
|
||||
it('should format a range with two different valid dates', () => {
|
||||
expect(formatDateRange('2023-01-01', '2023-01-05')).toBe('Jan 1 - Jan 5');
|
||||
});
|
||||
|
||||
it('should format a range with the same start and end date as a single date', () => {
|
||||
expect(formatDateRange('2023-01-01', '2023-01-01')).toBe('Jan 1');
|
||||
});
|
||||
|
||||
it('should return only the start date if end date is missing', () => {
|
||||
expect(formatDateRange('2023-01-01', null)).toBe('Jan 1');
|
||||
expect(formatDateRange('2023-01-01', undefined)).toBe('Jan 1');
|
||||
});
|
||||
|
||||
it('should return only the end date if start date is missing', () => {
|
||||
expect(formatDateRange(null, '2023-01-05')).toBe('Jan 5');
|
||||
expect(formatDateRange(undefined, '2023-01-05')).toBe('Jan 5');
|
||||
});
|
||||
|
||||
it('should return null if both dates are missing or invalid', () => {
|
||||
expect(formatDateRange(null, null)).toBeNull();
|
||||
expect(formatDateRange(undefined, undefined)).toBeNull();
|
||||
expect(formatDateRange('invalid', 'invalid')).toBeNull();
|
||||
});
|
||||
|
||||
it('should handle one valid and one invalid date by showing only the valid one', () => {
|
||||
expect(formatDateRange('2023-01-01', 'invalid')).toBe('Jan 1');
|
||||
expect(formatDateRange('invalid', '2023-01-05')).toBe('Jan 5');
|
||||
});
|
||||
|
||||
it('should handle empty strings as invalid dates', () => {
|
||||
expect(formatDateRange('', '2023-01-05')).toBe('Jan 5');
|
||||
expect(formatDateRange('2023-01-05', '')).toBe('Jan 5');
|
||||
expect(formatDateRange('', '')).toBeNull();
|
||||
});
|
||||
|
||||
it('should handle garbage strings as invalid dates', () => {
|
||||
expect(formatDateRange('garbage', '2023-01-05')).toBe('Jan 5');
|
||||
expect(formatDateRange('2023-01-05', 'garbage')).toBe('Jan 5');
|
||||
});
|
||||
|
||||
it('should handle start date being after end date (raw format)', () => {
|
||||
// The function currently doesn't validate order, it just formats what it's given.
|
||||
// This test ensures it doesn't crash or behave unexpectedly.
|
||||
expect(formatDateRange('2023-02-01', '2023-01-01')).toBe('Feb 1 - Jan 1');
|
||||
});
|
||||
|
||||
it('should handle dates with time components correctly', () => {
|
||||
// parseISO should handle the time component and formatShortDate should strip it
|
||||
expect(formatDateRange('2023-01-01T10:00:00', '2023-01-05T15:30:00')).toBe(
|
||||
'Jan 1 - Jan 5',
|
||||
);
|
||||
});
|
||||
|
||||
describe('verbose mode', () => {
|
||||
it('should format a range with two different valid dates verbosely', () => {
|
||||
expect(formatDateRange('2023-01-01', '2023-01-05', { verbose: true })).toBe(
|
||||
'Deals valid from January 1, 2023 to January 5, 2023',
|
||||
);
|
||||
});
|
||||
|
||||
it('should format a range with the same start and end date verbosely', () => {
|
||||
expect(formatDateRange('2023-01-01', '2023-01-01', { verbose: true })).toBe(
|
||||
'Valid on January 1, 2023',
|
||||
);
|
||||
});
|
||||
|
||||
it('should format only the start date verbosely', () => {
|
||||
expect(formatDateRange('2023-01-01', null, { verbose: true })).toBe(
|
||||
'Deals start January 1, 2023',
|
||||
);
|
||||
});
|
||||
|
||||
it('should format only the end date verbosely', () => {
|
||||
expect(formatDateRange(null, '2023-01-05', { verbose: true })).toBe(
|
||||
'Deals end January 5, 2023',
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle one valid and one invalid date verbosely', () => {
|
||||
expect(formatDateRange('2023-01-01', 'invalid', { verbose: true })).toBe(
|
||||
'Deals start January 1, 2023',
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle start date being after end date verbosely', () => {
|
||||
expect(formatDateRange('2023-02-01', '2023-01-01', { verbose: true })).toBe(
|
||||
'Deals valid from February 1, 2023 to January 1, 2023',
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('getCurrentDateISOString', () => {
|
||||
it('should return the current date in YYYY-MM-DD format', () => {
|
||||
const fakeDate = new Date('2025-12-25T10:00:00Z');
|
||||
vi.setSystemTime(fakeDate);
|
||||
expect(getCurrentDateISOString()).toBe('2025-12-25');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
// src/utils/dateUtils.ts
|
||||
import { parseISO, format, isValid, differenceInDays } from 'date-fns';
|
||||
|
||||
/**
|
||||
* Calculates the current year and a simplified week number.
|
||||
@@ -7,16 +8,93 @@
|
||||
* For true ISO 8601 week numbers (where week 1 is the first week with a Thursday),
|
||||
* a dedicated library like `date-fns` (`getISOWeek` and `getISOWeekYear`) is recommended.
|
||||
*
|
||||
* @param date The date to calculate the week for. Defaults to the current date.
|
||||
* @param date The date to calculate the simple week for. Defaults to the current date.
|
||||
* @returns An object containing the year and week number.
|
||||
*/
|
||||
export function getSimpleWeekAndYear(date: Date = new Date()): { year: number; week: number } {
|
||||
export function calculateSimpleWeekAndYear(date: Date = new Date()): { year: number; week: number } {
|
||||
const year = date.getFullYear();
|
||||
const startOfYear = new Date(year, 0, 1);
|
||||
// Calculate the difference in days from the start of the year (day 0 to 364/365)
|
||||
const dayOfYear = (date.getTime() - startOfYear.getTime()) / (1000 * 60 * 60 * 24);
|
||||
// Use UTC dates to calculate the difference in days.
|
||||
// This avoids issues with Daylight Saving Time (DST) where a day might have 23 or 25 hours,
|
||||
// which can cause the millisecond-based calculation to be slightly off (e.g., 149.96 days).
|
||||
const startOfYear = Date.UTC(year, 0, 1);
|
||||
const current = Date.UTC(year, date.getMonth(), date.getDate());
|
||||
const msPerDay = 1000 * 60 * 60 * 24;
|
||||
const dayOfYear = (current - startOfYear) / msPerDay;
|
||||
// Divide by 7, take the floor to get the zero-based week, and add 1 for a one-based week number.
|
||||
const week = Math.floor(dayOfYear / 7) + 1;
|
||||
|
||||
return { year, week };
|
||||
}
|
||||
|
||||
export const formatShortDate = (dateString: string | null | undefined): string | null => {
|
||||
if (!dateString) return null;
|
||||
// Using `parseISO` from date-fns is more reliable than `new Date()` for YYYY-MM-DD strings.
|
||||
// It correctly interprets the string as a local date, avoiding timezone-related "off-by-one" errors.
|
||||
const date = parseISO(dateString);
|
||||
if (isValid(date)) {
|
||||
return format(date, 'MMM d');
|
||||
}
|
||||
return null;
|
||||
};
|
||||
|
||||
export const calculateDaysBetween = (
|
||||
startDate: string | Date | null | undefined,
|
||||
endDate: string | Date | null | undefined,
|
||||
): number | null => {
|
||||
if (!startDate || !endDate) return null;
|
||||
|
||||
const start = typeof startDate === 'string' ? parseISO(startDate) : startDate;
|
||||
const end = typeof endDate === 'string' ? parseISO(endDate) : endDate;
|
||||
|
||||
if (!isValid(start) || !isValid(end)) return null;
|
||||
|
||||
return differenceInDays(end, start);
|
||||
};
|
||||
|
||||
interface DateRangeOptions {
|
||||
verbose?: boolean;
|
||||
}
|
||||
|
||||
export const formatDateRange = (
|
||||
startDate: string | null | undefined,
|
||||
endDate: string | null | undefined,
|
||||
options?: DateRangeOptions,
|
||||
): string | null => {
|
||||
if (!options?.verbose) {
|
||||
const start = formatShortDate(startDate);
|
||||
const end = formatShortDate(endDate);
|
||||
|
||||
if (start && end) {
|
||||
return start === end ? start : `${start} - ${end}`;
|
||||
}
|
||||
return start || end || null;
|
||||
}
|
||||
|
||||
// Verbose format logic
|
||||
const dateFormat = 'MMMM d, yyyy';
|
||||
const formatFn = (dateStr: string | null | undefined) => {
|
||||
if (!dateStr) return null;
|
||||
const date = parseISO(dateStr);
|
||||
return isValid(date) ? format(date, dateFormat) : null;
|
||||
};
|
||||
|
||||
const start = formatFn(startDate);
|
||||
const end = formatFn(endDate);
|
||||
|
||||
if (start && end) {
|
||||
return start === end ? `Valid on ${start}` : `Deals valid from ${start} to ${end}`;
|
||||
}
|
||||
if (start) return `Deals start ${start}`;
|
||||
if (end) return `Deals end ${end}`;
|
||||
return null;
|
||||
};
|
||||
|
||||
/**
|
||||
* Returns the current date as an ISO 8601 string (YYYY-MM-DD).
|
||||
* Useful for getting "today" without the time component.
|
||||
*/
|
||||
export const getCurrentDateISOString = (): string => {
|
||||
return format(new Date(), 'yyyy-MM-dd');
|
||||
};
|
||||
|
||||
export { calculateSimpleWeekAndYear as getSimpleWeekAndYear };
|
||||
|
||||
33
src/utils/formatUtils.test.ts
Normal file
33
src/utils/formatUtils.test.ts
Normal file
@@ -0,0 +1,33 @@
|
||||
// src/utils/formatUtils.test.ts
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { formatCurrency } from './formatUtils';
|
||||
|
||||
describe('formatCurrency', () => {
|
||||
it('should format a positive integer of cents correctly', () => {
|
||||
expect(formatCurrency(199)).toBe('$1.99');
|
||||
});
|
||||
|
||||
it('should format a larger number of cents correctly', () => {
|
||||
expect(formatCurrency(12345)).toBe('$123.45');
|
||||
});
|
||||
|
||||
it('should handle single-digit cents correctly', () => {
|
||||
expect(formatCurrency(5)).toBe('$0.05');
|
||||
});
|
||||
|
||||
it('should handle zero cents correctly', () => {
|
||||
expect(formatCurrency(0)).toBe('$0.00');
|
||||
});
|
||||
|
||||
it('should return "N/A" for a null input', () => {
|
||||
expect(formatCurrency(null)).toBe('N/A');
|
||||
});
|
||||
|
||||
it('should return "N/A" for an undefined input', () => {
|
||||
expect(formatCurrency(undefined)).toBe('N/A');
|
||||
});
|
||||
|
||||
it('should handle negative cents correctly', () => {
|
||||
expect(formatCurrency(-500)).toBe('-$5.00');
|
||||
});
|
||||
});
|
||||
14
src/utils/formatUtils.ts
Normal file
14
src/utils/formatUtils.ts
Normal file
@@ -0,0 +1,14 @@
|
||||
// src/utils/formatUtils.ts
|
||||
|
||||
/**
|
||||
* Formats a numeric value in cents into a currency string (e.g., $4.99).
|
||||
* Handles different locales and currency symbols gracefully.
|
||||
*
|
||||
* @param amountInCents The amount in cents to format. Can be null or undefined.
|
||||
* @returns A formatted currency string (e.g., "$4.99"), or 'N/A' if the input is null/undefined.
|
||||
*/
|
||||
export const formatCurrency = (amountInCents: number | null | undefined): string => {
|
||||
if (amountInCents === null || amountInCents === undefined) return 'N/A';
|
||||
|
||||
return new Intl.NumberFormat('en-US', { style: 'currency', currency: 'USD' }).format(amountInCents / 100);
|
||||
};
|
||||
@@ -71,11 +71,11 @@ describe('generateFlyerIcon', () => {
|
||||
expect(mocks.mkdir).toHaveBeenCalledWith(iconsDirectory, { recursive: true });
|
||||
|
||||
// Check that sharp was called with the correct source
|
||||
expect(mocks.sharp).toHaveBeenCalledWith(sourceImagePath);
|
||||
expect(mocks.sharp).toHaveBeenCalledWith(sourceImagePath, { failOn: 'none' });
|
||||
|
||||
// Check the processing chain
|
||||
expect(mocks.resize).toHaveBeenCalledWith(64, 64, { fit: 'cover' });
|
||||
expect(mocks.webp).toHaveBeenCalledWith({ quality: 80 });
|
||||
expect(mocks.resize).toHaveBeenCalledWith({ width: 128, height: 128, fit: 'inside' });
|
||||
expect(mocks.webp).toHaveBeenCalledWith({ quality: 75 });
|
||||
expect(mocks.toFile).toHaveBeenCalledWith('/path/to/icons/icon-flyer-image-1.webp');
|
||||
|
||||
// Check the returned filename
|
||||
@@ -87,8 +87,11 @@ describe('generateFlyerIcon', () => {
|
||||
mocks.toFile.mockRejectedValue(sharpError);
|
||||
|
||||
await expect(
|
||||
generateFlyerIcon('/path/to/bad-image.jpg', '/path/to/icons', logger),
|
||||
).rejects.toThrow('Icon generation failed.');
|
||||
expect(logger.error).toHaveBeenCalledWith(expect.any(Object), 'Failed to generate flyer icon:');
|
||||
generateFlyerIcon('/path/to/bad-image.jpg', '/path/to/icons', logger), // This was a duplicate, fixed.
|
||||
).rejects.toThrow('Failed to generate icon for /path/to/bad-image.jpg.');
|
||||
expect(logger.error).toHaveBeenCalledWith(
|
||||
{ err: sharpError, sourcePath: '/path/to/bad-image.jpg', outputPath: '/path/to/icons/icon-bad-image.webp' },
|
||||
'An error occurred during icon generation.',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -6,46 +6,91 @@ import type { Logger } from 'pino';
|
||||
import { sanitizeFilename } from './stringUtils';
|
||||
|
||||
/**
|
||||
* Generates a 64x64 square icon from a source image.
|
||||
* @param sourceImagePath The full path to the original image file.
|
||||
* @param iconsDirectory The directory where the icon should be saved.
|
||||
* @returns A promise that resolves to the filename of the newly created icon.
|
||||
* @param logger The request-scoped logger instance, as per ADR-004.
|
||||
* @throws An error if the icon generation fails.
|
||||
* Processes an uploaded image file by stripping all metadata (like EXIF)
|
||||
* and optimizing it for web use.
|
||||
*
|
||||
* @param sourcePath The path to the temporary uploaded file.
|
||||
* @param destinationDir The directory where the final image should be saved.
|
||||
* @param originalFileName The original name of the file, used to determine the output name.
|
||||
* @param logger A pino logger instance for logging.
|
||||
* @returns The file name of the newly processed image.
|
||||
*/
|
||||
export async function generateFlyerIcon(
|
||||
sourceImagePath: string,
|
||||
iconsDirectory: string,
|
||||
export async function processAndSaveImage(
|
||||
sourcePath: string,
|
||||
destinationDir: string,
|
||||
originalFileName: string,
|
||||
logger: Logger,
|
||||
): Promise<string> {
|
||||
// Create a unique-ish filename to avoid collisions, but keep the original extension.
|
||||
const fileExt = path.extname(originalFileName);
|
||||
const fileBase = path.basename(originalFileName, fileExt);
|
||||
const outputFileName = `${fileBase}-${Date.now()}${fileExt}`;
|
||||
const outputPath = path.join(destinationDir, outputFileName);
|
||||
|
||||
try {
|
||||
// 1. Create a new filename, standardizing the extension to .webp for consistency and performance.
|
||||
// We sanitize the original filename to remove spaces and special characters, ensuring URL safety.
|
||||
// The sourceImagePath is already sanitized by multer, but we apply it here again for robustness
|
||||
// in case this function is ever called from a different context.
|
||||
const sanitizedBaseName = sanitizeFilename(path.basename(sourceImagePath));
|
||||
const originalFileName = path.parse(sanitizedBaseName).name;
|
||||
const iconFileName = `icon-${originalFileName}.webp`;
|
||||
const iconOutputPath = path.join(iconsDirectory, iconFileName);
|
||||
// Ensure the destination directory exists.
|
||||
await fs.mkdir(destinationDir, { recursive: true });
|
||||
|
||||
// Ensure the icons subdirectory exists.
|
||||
await fs.mkdir(iconsDirectory, { recursive: true });
|
||||
logger.debug({ sourcePath, outputPath }, 'Starting image processing: stripping metadata and optimizing.');
|
||||
|
||||
// 2. Use sharp to process the image.
|
||||
await sharp(sourceImagePath)
|
||||
// Use `resize` with a `fit` strategy to prevent distortion.
|
||||
// `sharp.fit.cover` will resize to fill 64x64 and crop any excess,
|
||||
// ensuring the icon is always a non-distorted square.
|
||||
.resize(64, 64, { fit: sharp.fit.cover })
|
||||
// 3. Convert the output to WebP format.
|
||||
// The `quality` option is a good balance between size and clarity.
|
||||
.webp({ quality: 80 })
|
||||
.toFile(iconOutputPath);
|
||||
// Use sharp to process the image.
|
||||
// .withMetadata({}) strips all EXIF and other metadata.
|
||||
// .jpeg() and .png() apply format-specific optimizations.
|
||||
await sharp(sourcePath, { failOn: 'none' })
|
||||
.withMetadata({}) // This is the key to stripping metadata
|
||||
.jpeg({ quality: 85, mozjpeg: true }) // Optimize JPEGs
|
||||
.png({ compressionLevel: 8, quality: 85 }) // Optimize PNGs
|
||||
.toFile(outputPath);
|
||||
|
||||
logger.info(`Generated 64x64 icon: ${iconFileName}`);
|
||||
return iconFileName;
|
||||
logger.info(`Successfully processed image and saved to ${outputPath}`);
|
||||
return outputFileName;
|
||||
} catch (error) {
|
||||
logger.error({ error, sourceImagePath }, 'Failed to generate flyer icon:');
|
||||
throw new Error('Icon generation failed.');
|
||||
logger.error(
|
||||
{ err: error, sourcePath, outputPath },
|
||||
'An error occurred during image processing and saving.',
|
||||
);
|
||||
// Re-throw the error to be handled by the calling service (e.g., the worker).
|
||||
throw new Error(`Failed to process image ${originalFileName}.`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates a small WebP icon from a source image.
|
||||
*
|
||||
* @param sourcePath The path to the source image (can be the original upload or the processed image).
|
||||
* @param outputDir The directory to save the icon in (e.g., 'flyer-images/icons').
|
||||
* @param logger A pino logger instance for logging.
|
||||
* @returns The file name of the generated icon.
|
||||
*/
|
||||
export async function generateFlyerIcon(
|
||||
sourcePath: string,
|
||||
outputDir: string,
|
||||
logger: Logger,
|
||||
): Promise<string> {
|
||||
// Sanitize the base name of the source file to create a clean icon name.
|
||||
const sourceBaseName = path.parse(sourcePath).name;
|
||||
const iconFileName = `icon-${sanitizeFilename(sourceBaseName)}.webp`;
|
||||
const outputPath = path.join(outputDir, iconFileName);
|
||||
|
||||
try {
|
||||
// Ensure the output directory exists.
|
||||
await fs.mkdir(outputDir, { recursive: true });
|
||||
|
||||
logger.debug({ sourcePath, outputPath }, 'Starting icon generation.');
|
||||
|
||||
await sharp(sourcePath, { failOn: 'none' })
|
||||
.resize({ width: 128, height: 128, fit: 'inside' })
|
||||
.webp({ quality: 75 }) // Slightly lower quality for icons is acceptable.
|
||||
.toFile(outputPath);
|
||||
|
||||
logger.info(`Successfully generated icon: ${outputPath}`);
|
||||
return iconFileName;
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
{ err: error, sourcePath, outputPath },
|
||||
'An error occurred during icon generation.',
|
||||
);
|
||||
// Re-throw the error to be handled by the calling service.
|
||||
throw new Error(`Failed to generate icon for ${sourcePath}.`);
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user