Compare commits

...

6 Commits

Author SHA1 Message Date
Gitea Actions
4c70905950 ci: Bump version to 0.9.26 [skip ci] 2026-01-05 14:51:27 +05:00
0b4884ff2a even more and more test fixes
All checks were successful
Deploy to Test Environment / deploy-to-test (push) Successful in 26m1s
2026-01-05 01:50:54 -08:00
Gitea Actions
e4acab77c8 ci: Bump version to 0.9.25 [skip ci] 2026-01-05 14:26:57 +05:00
4e20b1b430 even more and more test fixes
Some checks failed
Deploy to Test Environment / deploy-to-test (push) Failing after 54s
2026-01-05 01:26:12 -08:00
Gitea Actions
15747ac942 ci: Bump version to 0.9.24 [skip ci] 2026-01-05 12:37:56 +05:00
e5fa89ef17 even more and more test fixes
All checks were successful
Deploy to Test Environment / deploy-to-test (push) Successful in 27m55s
2026-01-04 23:36:56 -08:00
38 changed files with 1293 additions and 936 deletions

View File

@@ -335,7 +335,8 @@ jobs:
fi
GITEA_SERVER_URL="https://gitea.projectium.com" # Your Gitea instance URL
COMMIT_MESSAGE=$(git log -1 --grep="\[skip ci\]" --invert-grep --pretty=%s)
# Sanitize commit message to prevent shell injection or build breaks (removes quotes, backticks, backslashes, $)
COMMIT_MESSAGE=$(git log -1 --grep="\[skip ci\]" --invert-grep --pretty=%s | tr -d '"`\\$')
PACKAGE_VERSION=$(node -p "require('./package.json').version")
VITE_APP_VERSION="$(date +'%Y%m%d-%H%M'):$(git rev-parse --short HEAD):$PACKAGE_VERSION" \
VITE_APP_COMMIT_URL="$GITEA_SERVER_URL/${{ gitea.repository }}/commit/${{ gitea.sha }}" \

View File

@@ -16,6 +16,27 @@ if (missingSecrets.length > 0) {
console.log('[ecosystem.config.cjs] ✅ Critical environment variables are present.');
}
// --- Shared Environment Variables ---
// Define common variables to reduce duplication and ensure consistency across apps.
const sharedEnv = {
DB_HOST: process.env.DB_HOST,
DB_USER: process.env.DB_USER,
DB_PASSWORD: process.env.DB_PASSWORD,
DB_NAME: process.env.DB_NAME,
REDIS_URL: process.env.REDIS_URL,
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
FRONTEND_URL: process.env.FRONTEND_URL,
JWT_SECRET: process.env.JWT_SECRET,
GEMINI_API_KEY: process.env.GEMINI_API_KEY,
GOOGLE_MAPS_API_KEY: process.env.GOOGLE_MAPS_API_KEY,
SMTP_HOST: process.env.SMTP_HOST,
SMTP_PORT: process.env.SMTP_PORT,
SMTP_SECURE: process.env.SMTP_SECURE,
SMTP_USER: process.env.SMTP_USER,
SMTP_PASS: process.env.SMTP_PASS,
SMTP_FROM_EMAIL: process.env.SMTP_FROM_EMAIL,
};
module.exports = {
apps: [
{
@@ -25,6 +46,11 @@ module.exports = {
script: './node_modules/.bin/tsx',
args: 'server.ts',
max_memory_restart: '500M',
// Production Optimization: Run in cluster mode to utilize all CPU cores
instances: 'max',
exec_mode: 'cluster',
kill_timeout: 5000, // Allow 5s for graceful shutdown of API requests
log_date_format: 'YYYY-MM-DD HH:mm:ss Z',
// Restart Logic
max_restarts: 40,
@@ -36,46 +62,16 @@ module.exports = {
NODE_ENV: 'production',
name: 'flyer-crawler-api',
cwd: '/var/www/flyer-crawler.projectium.com',
DB_HOST: process.env.DB_HOST,
DB_USER: process.env.DB_USER,
DB_PASSWORD: process.env.DB_PASSWORD,
DB_NAME: process.env.DB_NAME,
REDIS_URL: process.env.REDIS_URL,
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
FRONTEND_URL: process.env.FRONTEND_URL,
JWT_SECRET: process.env.JWT_SECRET,
GEMINI_API_KEY: process.env.GEMINI_API_KEY,
GOOGLE_MAPS_API_KEY: process.env.GOOGLE_MAPS_API_KEY,
SMTP_HOST: process.env.SMTP_HOST,
SMTP_PORT: process.env.SMTP_PORT,
SMTP_SECURE: process.env.SMTP_SECURE,
SMTP_USER: process.env.SMTP_USER,
SMTP_PASS: process.env.SMTP_PASS,
SMTP_FROM_EMAIL: process.env.SMTP_FROM_EMAIL,
WORKER_LOCK_DURATION: '120000',
...sharedEnv,
},
// Test Environment Settings
env_test: {
NODE_ENV: 'test',
name: 'flyer-crawler-api-test',
cwd: '/var/www/flyer-crawler-test.projectium.com',
DB_HOST: process.env.DB_HOST,
DB_USER: process.env.DB_USER,
DB_PASSWORD: process.env.DB_PASSWORD,
DB_NAME: process.env.DB_NAME,
REDIS_URL: process.env.REDIS_URL,
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
FRONTEND_URL: process.env.FRONTEND_URL,
JWT_SECRET: process.env.JWT_SECRET,
GEMINI_API_KEY: process.env.GEMINI_API_KEY,
GOOGLE_MAPS_API_KEY: process.env.GOOGLE_MAPS_API_KEY,
SMTP_HOST: process.env.SMTP_HOST,
SMTP_PORT: process.env.SMTP_PORT,
SMTP_SECURE: process.env.SMTP_SECURE,
SMTP_USER: process.env.SMTP_USER,
SMTP_PASS: process.env.SMTP_PASS,
SMTP_FROM_EMAIL: process.env.SMTP_FROM_EMAIL,
WORKER_LOCK_DURATION: '120000',
...sharedEnv,
},
// Development Environment Settings
env_development: {
@@ -83,23 +79,8 @@ module.exports = {
name: 'flyer-crawler-api-dev',
watch: true,
ignore_watch: ['node_modules', 'logs', '*.log', 'flyer-images', '.git'],
DB_HOST: process.env.DB_HOST,
DB_USER: process.env.DB_USER,
DB_PASSWORD: process.env.DB_PASSWORD,
DB_NAME: process.env.DB_NAME,
REDIS_URL: process.env.REDIS_URL,
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
FRONTEND_URL: process.env.FRONTEND_URL,
JWT_SECRET: process.env.JWT_SECRET,
GEMINI_API_KEY: process.env.GEMINI_API_KEY,
GOOGLE_MAPS_API_KEY: process.env.GOOGLE_MAPS_API_KEY,
SMTP_HOST: process.env.SMTP_HOST,
SMTP_PORT: process.env.SMTP_PORT,
SMTP_SECURE: process.env.SMTP_SECURE,
SMTP_USER: process.env.SMTP_USER,
SMTP_PASS: process.env.SMTP_PASS,
SMTP_FROM_EMAIL: process.env.SMTP_FROM_EMAIL,
WORKER_LOCK_DURATION: '120000',
...sharedEnv,
},
},
{
@@ -108,6 +89,8 @@ module.exports = {
script: './node_modules/.bin/tsx',
args: 'src/services/worker.ts',
max_memory_restart: '1G',
kill_timeout: 10000, // Workers may need more time to complete a job
log_date_format: 'YYYY-MM-DD HH:mm:ss Z',
// Restart Logic
max_restarts: 40,
@@ -119,44 +102,14 @@ module.exports = {
NODE_ENV: 'production',
name: 'flyer-crawler-worker',
cwd: '/var/www/flyer-crawler.projectium.com',
DB_HOST: process.env.DB_HOST,
DB_USER: process.env.DB_USER,
DB_PASSWORD: process.env.DB_PASSWORD,
DB_NAME: process.env.DB_NAME,
REDIS_URL: process.env.REDIS_URL,
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
FRONTEND_URL: process.env.FRONTEND_URL,
JWT_SECRET: process.env.JWT_SECRET,
GEMINI_API_KEY: process.env.GEMINI_API_KEY,
GOOGLE_MAPS_API_KEY: process.env.GOOGLE_MAPS_API_KEY,
SMTP_HOST: process.env.SMTP_HOST,
SMTP_PORT: process.env.SMTP_PORT,
SMTP_SECURE: process.env.SMTP_SECURE,
SMTP_USER: process.env.SMTP_USER,
SMTP_PASS: process.env.SMTP_PASS,
SMTP_FROM_EMAIL: process.env.SMTP_FROM_EMAIL,
...sharedEnv,
},
// Test Environment Settings
env_test: {
NODE_ENV: 'test',
name: 'flyer-crawler-worker-test',
cwd: '/var/www/flyer-crawler-test.projectium.com',
DB_HOST: process.env.DB_HOST,
DB_USER: process.env.DB_USER,
DB_PASSWORD: process.env.DB_PASSWORD,
DB_NAME: process.env.DB_NAME,
REDIS_URL: process.env.REDIS_URL,
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
FRONTEND_URL: process.env.FRONTEND_URL,
JWT_SECRET: process.env.JWT_SECRET,
GEMINI_API_KEY: process.env.GEMINI_API_KEY,
GOOGLE_MAPS_API_KEY: process.env.GOOGLE_MAPS_API_KEY,
SMTP_HOST: process.env.SMTP_HOST,
SMTP_PORT: process.env.SMTP_PORT,
SMTP_SECURE: process.env.SMTP_SECURE,
SMTP_USER: process.env.SMTP_USER,
SMTP_PASS: process.env.SMTP_PASS,
SMTP_FROM_EMAIL: process.env.SMTP_FROM_EMAIL,
...sharedEnv,
},
// Development Environment Settings
env_development: {
@@ -164,22 +117,7 @@ module.exports = {
name: 'flyer-crawler-worker-dev',
watch: true,
ignore_watch: ['node_modules', 'logs', '*.log', 'flyer-images', '.git'],
DB_HOST: process.env.DB_HOST,
DB_USER: process.env.DB_USER,
DB_PASSWORD: process.env.DB_PASSWORD,
DB_NAME: process.env.DB_NAME,
REDIS_URL: process.env.REDIS_URL,
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
FRONTEND_URL: process.env.FRONTEND_URL,
JWT_SECRET: process.env.JWT_SECRET,
GEMINI_API_KEY: process.env.GEMINI_API_KEY,
GOOGLE_MAPS_API_KEY: process.env.GOOGLE_MAPS_API_KEY,
SMTP_HOST: process.env.SMTP_HOST,
SMTP_PORT: process.env.SMTP_PORT,
SMTP_SECURE: process.env.SMTP_SECURE,
SMTP_USER: process.env.SMTP_USER,
SMTP_PASS: process.env.SMTP_PASS,
SMTP_FROM_EMAIL: process.env.SMTP_FROM_EMAIL,
...sharedEnv,
},
},
{
@@ -188,6 +126,8 @@ module.exports = {
script: './node_modules/.bin/tsx',
args: 'src/services/worker.ts',
max_memory_restart: '1G',
kill_timeout: 10000,
log_date_format: 'YYYY-MM-DD HH:mm:ss Z',
// Restart Logic
max_restarts: 40,
@@ -199,44 +139,14 @@ module.exports = {
NODE_ENV: 'production',
name: 'flyer-crawler-analytics-worker',
cwd: '/var/www/flyer-crawler.projectium.com',
DB_HOST: process.env.DB_HOST,
DB_USER: process.env.DB_USER,
DB_PASSWORD: process.env.DB_PASSWORD,
DB_NAME: process.env.DB_NAME,
REDIS_URL: process.env.REDIS_URL,
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
FRONTEND_URL: process.env.FRONTEND_URL,
JWT_SECRET: process.env.JWT_SECRET,
GEMINI_API_KEY: process.env.GEMINI_API_KEY,
GOOGLE_MAPS_API_KEY: process.env.GOOGLE_MAPS_API_KEY,
SMTP_HOST: process.env.SMTP_HOST,
SMTP_PORT: process.env.SMTP_PORT,
SMTP_SECURE: process.env.SMTP_SECURE,
SMTP_USER: process.env.SMTP_USER,
SMTP_PASS: process.env.SMTP_PASS,
SMTP_FROM_EMAIL: process.env.SMTP_FROM_EMAIL,
...sharedEnv,
},
// Test Environment Settings
env_test: {
NODE_ENV: 'test',
name: 'flyer-crawler-analytics-worker-test',
cwd: '/var/www/flyer-crawler-test.projectium.com',
DB_HOST: process.env.DB_HOST,
DB_USER: process.env.DB_USER,
DB_PASSWORD: process.env.DB_PASSWORD,
DB_NAME: process.env.DB_NAME,
REDIS_URL: process.env.REDIS_URL,
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
FRONTEND_URL: process.env.FRONTEND_URL,
JWT_SECRET: process.env.JWT_SECRET,
GEMINI_API_KEY: process.env.GEMINI_API_KEY,
GOOGLE_MAPS_API_KEY: process.env.GOOGLE_MAPS_API_KEY,
SMTP_HOST: process.env.SMTP_HOST,
SMTP_PORT: process.env.SMTP_PORT,
SMTP_SECURE: process.env.SMTP_SECURE,
SMTP_USER: process.env.SMTP_USER,
SMTP_PASS: process.env.SMTP_PASS,
SMTP_FROM_EMAIL: process.env.SMTP_FROM_EMAIL,
...sharedEnv,
},
// Development Environment Settings
env_development: {
@@ -244,22 +154,7 @@ module.exports = {
name: 'flyer-crawler-analytics-worker-dev',
watch: true,
ignore_watch: ['node_modules', 'logs', '*.log', 'flyer-images', '.git'],
DB_HOST: process.env.DB_HOST,
DB_USER: process.env.DB_USER,
DB_PASSWORD: process.env.DB_PASSWORD,
DB_NAME: process.env.DB_NAME,
REDIS_URL: process.env.REDIS_URL,
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
FRONTEND_URL: process.env.FRONTEND_URL,
JWT_SECRET: process.env.JWT_SECRET,
GEMINI_API_KEY: process.env.GEMINI_API_KEY,
GOOGLE_MAPS_API_KEY: process.env.GOOGLE_MAPS_API_KEY,
SMTP_HOST: process.env.SMTP_HOST,
SMTP_PORT: process.env.SMTP_PORT,
SMTP_SECURE: process.env.SMTP_SECURE,
SMTP_USER: process.env.SMTP_USER,
SMTP_PASS: process.env.SMTP_PASS,
SMTP_FROM_EMAIL: process.env.SMTP_FROM_EMAIL,
...sharedEnv,
},
},
],

4
package-lock.json generated
View File

@@ -1,12 +1,12 @@
{
"name": "flyer-crawler",
"version": "0.9.23",
"version": "0.9.26",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "flyer-crawler",
"version": "0.9.23",
"version": "0.9.26",
"dependencies": {
"@bull-board/api": "^6.14.2",
"@bull-board/express": "^6.14.2",

View File

@@ -1,7 +1,7 @@
{
"name": "flyer-crawler",
"private": true,
"version": "0.9.23",
"version": "0.9.26",
"type": "module",
"scripts": {
"dev": "concurrently \"npm:start:dev\" \"vite\"",

View File

@@ -1,8 +1,8 @@
// src/features/flyer/FlyerDisplay.tsx
import React from 'react';
import { ScanIcon } from '../../components/icons/ScanIcon';
import { formatDateRange } from '../../utils/dateUtils';
import type { Store } from '../../types';
import { formatDateRange } from './dateUtils';
import { ScanIcon } from '../../components/icons/ScanIcon';
export interface FlyerDisplayProps {
imageUrl: string | null;

View File

@@ -3,7 +3,7 @@ import React from 'react';
import { render, screen, fireEvent, waitFor } from '@testing-library/react';
import { describe, it, expect, vi, beforeEach, afterEach, type Mocked } from 'vitest';
import { FlyerList } from './FlyerList';
import { formatShortDate } from './dateUtils';
import { formatShortDate } from '../../utils/dateUtils';
import type { Flyer, UserProfile } from '../../types';
import { createMockUserProfile } from '../../tests/utils/mockFactories';
import { createMockFlyer } from '../../tests/utils/mockFactories';

View File

@@ -7,7 +7,7 @@ import { parseISO, format, isValid } from 'date-fns';
import { MapPinIcon, Trash2Icon } from 'lucide-react';
import { logger } from '../../services/logger.client';
import * as apiClient from '../../services/apiClient';
import { calculateDaysBetween, formatDateRange } from './dateUtils';
import { calculateDaysBetween, formatDateRange, getCurrentDateISOString } from '../../utils/dateUtils';
interface FlyerListProps {
flyers: Flyer[];
@@ -54,7 +54,7 @@ export const FlyerList: React.FC<FlyerListProps> = ({
verbose: true,
});
const daysLeft = calculateDaysBetween(format(new Date(), 'yyyy-MM-dd'), flyer.valid_to);
const daysLeft = calculateDaysBetween(getCurrentDateISOString(), flyer.valid_to);
let daysLeftText = '';
let daysLeftColor = '';

View File

@@ -1,130 +0,0 @@
// src/features/flyer/dateUtils.test.ts
import { describe, it, expect } from 'vitest';
import { formatShortDate, calculateDaysBetween, formatDateRange } from './dateUtils';
describe('formatShortDate', () => {
it('should format a valid YYYY-MM-DD date string correctly', () => {
expect(formatShortDate('2024-07-26')).toBe('Jul 26');
});
it('should handle single-digit days correctly', () => {
expect(formatShortDate('2025-01-05')).toBe('Jan 5');
});
it('should handle dates at the end of the year', () => {
expect(formatShortDate('2023-12-31')).toBe('Dec 31');
});
it('should return null for a null input', () => {
expect(formatShortDate(null)).toBeNull();
});
it('should return null for an undefined input', () => {
expect(formatShortDate(undefined)).toBeNull();
});
it('should return null for an empty string input', () => {
expect(formatShortDate('')).toBeNull();
});
it('should return null for an invalid date string', () => {
expect(formatShortDate('not-a-real-date')).toBeNull();
});
it('should return null for a malformed date string', () => {
expect(formatShortDate('2024-13-01')).toBeNull(); // Invalid month
});
it('should correctly format a full ISO string with time and timezone', () => {
expect(formatShortDate('2024-12-25T10:00:00Z')).toBe('Dec 25');
});
});
describe('calculateDaysBetween', () => {
it('should calculate the difference in days between two valid date strings', () => {
expect(calculateDaysBetween('2023-01-01', '2023-01-05')).toBe(4);
});
it('should return a negative number if the end date is before the start date', () => {
expect(calculateDaysBetween('2023-01-05', '2023-01-01')).toBe(-4);
});
it('should handle Date objects', () => {
const start = new Date('2023-01-01');
const end = new Date('2023-01-10');
expect(calculateDaysBetween(start, end)).toBe(9);
});
it('should return null if either date is null or undefined', () => {
expect(calculateDaysBetween(null, '2023-01-01')).toBeNull();
expect(calculateDaysBetween('2023-01-01', undefined)).toBeNull();
});
it('should return null if either date is invalid', () => {
expect(calculateDaysBetween('invalid', '2023-01-01')).toBeNull();
expect(calculateDaysBetween('2023-01-01', 'invalid')).toBeNull();
});
});
describe('formatDateRange', () => {
it('should format a range with two different valid dates', () => {
expect(formatDateRange('2023-01-01', '2023-01-05')).toBe('Jan 1 - Jan 5');
});
it('should format a range with the same start and end date as a single date', () => {
expect(formatDateRange('2023-01-01', '2023-01-01')).toBe('Jan 1');
});
it('should return only the start date if end date is missing', () => {
expect(formatDateRange('2023-01-01', null)).toBe('Jan 1');
expect(formatDateRange('2023-01-01', undefined)).toBe('Jan 1');
});
it('should return only the end date if start date is missing', () => {
expect(formatDateRange(null, '2023-01-05')).toBe('Jan 5');
expect(formatDateRange(undefined, '2023-01-05')).toBe('Jan 5');
});
it('should return null if both dates are missing or invalid', () => {
expect(formatDateRange(null, null)).toBeNull();
expect(formatDateRange(undefined, undefined)).toBeNull();
expect(formatDateRange('invalid', 'invalid')).toBeNull();
});
it('should handle one valid and one invalid date by showing only the valid one', () => {
expect(formatDateRange('2023-01-01', 'invalid')).toBe('Jan 1');
expect(formatDateRange('invalid', '2023-01-05')).toBe('Jan 5');
});
describe('verbose mode', () => {
it('should format a range with two different valid dates verbosely', () => {
expect(formatDateRange('2023-01-01', '2023-01-05', { verbose: true })).toBe(
'Deals valid from January 1, 2023 to January 5, 2023',
);
});
it('should format a range with the same start and end date verbosely', () => {
expect(formatDateRange('2023-01-01', '2023-01-01', { verbose: true })).toBe(
'Valid on January 1, 2023',
);
});
it('should format only the start date verbosely', () => {
expect(formatDateRange('2023-01-01', null, { verbose: true })).toBe(
'Deals start January 1, 2023',
);
});
it('should format only the end date verbosely', () => {
expect(formatDateRange(null, '2023-01-05', { verbose: true })).toBe(
'Deals end January 5, 2023',
);
});
it('should handle one valid and one invalid date verbosely', () => {
expect(formatDateRange('2023-01-01', 'invalid', { verbose: true })).toBe(
'Deals start January 1, 2023',
);
});
});
});

View File

@@ -1,65 +0,0 @@
// src/features/flyer/dateUtils.ts
import { parseISO, format, isValid, differenceInDays } from 'date-fns';
export const formatShortDate = (dateString: string | null | undefined): string | null => {
if (!dateString) return null;
// Using `parseISO` from date-fns is more reliable than `new Date()` for YYYY-MM-DD strings.
// It correctly interprets the string as a local date, avoiding timezone-related "off-by-one" errors.
const date = parseISO(dateString);
if (isValid(date)) {
return format(date, 'MMM d');
}
return null;
};
export const calculateDaysBetween = (
startDate: string | Date | null | undefined,
endDate: string | Date | null | undefined,
): number | null => {
if (!startDate || !endDate) return null;
const start = typeof startDate === 'string' ? parseISO(startDate) : startDate;
const end = typeof endDate === 'string' ? parseISO(endDate) : endDate;
if (!isValid(start) || !isValid(end)) return null;
return differenceInDays(end, start);
};
interface DateRangeOptions {
verbose?: boolean;
}
export const formatDateRange = (
startDate: string | null | undefined,
endDate: string | null | undefined,
options?: DateRangeOptions,
): string | null => {
if (!options?.verbose) {
const start = formatShortDate(startDate);
const end = formatShortDate(endDate);
if (start && end) {
return start === end ? start : `${start} - ${end}`;
}
return start || end || null;
}
// Verbose format logic
const dateFormat = 'MMMM d, yyyy';
const formatFn = (dateStr: string | null | undefined) => {
if (!dateStr) return null;
const date = parseISO(dateStr);
return isValid(date) ? format(date, dateFormat) : null;
};
const start = formatFn(startDate);
const end = formatFn(endDate);
if (start && end) {
return start === end ? `Valid on ${start}` : `Deals valid from ${start} to ${end}`;
}
if (start) return `Deals start ${start}`;
if (end) return `Deals end ${end}`;
return null;
};

View File

@@ -13,7 +13,7 @@ vi.mock('../services/db/index.db', () => ({
getFlyerItems: vi.fn(),
getFlyerItemsForFlyers: vi.fn(),
countFlyerItemsForFlyers: vi.fn(),
trackFlyerItemInteraction: vi.fn(),
trackFlyerItemInteraction: vi.fn().mockResolvedValue(undefined),
},
}));
@@ -165,7 +165,7 @@ describe('Flyer Routes (/api/flyers)', () => {
expect(response.status).toBe(500);
expect(response.body.message).toBe('DB Error');
expect(mockLogger.error).toHaveBeenCalledWith(
{ error: dbError, flyerId: '123' },
{ error: dbError, flyerId: 123 },
'Error fetching flyer items in /api/flyers/:id/items:',
);
});

View File

@@ -23,8 +23,8 @@ import * as db from './db/index.db';
import { flyerQueue } from './queueService.server';
import type { Job } from 'bullmq';
import { createFlyerAndItems } from './db/flyer.db';
import { getBaseUrl } from '../utils/serverUtils';
import { generateFlyerIcon } from '../utils/imageProcessor';
import { getBaseUrl } from '../utils/serverUtils'; // This was a duplicate, fixed.
import { generateFlyerIcon, processAndSaveImage } from '../utils/imageProcessor';
import { AdminRepository } from './db/admin.db';
import path from 'path';
import { ValidationError } from './db/errors.db'; // Keep this import for ValidationError
@@ -215,7 +215,11 @@ export class AIService {
this.logger.warn(
'[AIService] Mock generateContent called. This should only happen in tests when no API key is available.',
);
return { text: '[]' } as unknown as GenerateContentResponse;
// Return a minimal valid JSON object structure to prevent downstream parsing errors.
const mockResponse = { store_name: 'Mock Store', items: [] };
return {
text: JSON.stringify(mockResponse),
} as unknown as GenerateContentResponse;
},
};
}
@@ -369,62 +373,43 @@ export class AIService {
* @returns The parsed JSON object, or null if parsing fails.
*/
private _parseJsonFromAiResponse<T>(responseText: string | undefined, logger: Logger): T | null {
// --- START HYPER-DIAGNOSTIC LOGGING ---
console.log('\n--- DIAGNOSING _parseJsonFromAiResponse ---');
console.log(
`1. Initial responseText (Type: ${typeof responseText}):`,
JSON.stringify(responseText),
// --- START EXTENSIVE DEBUG LOGGING ---
logger.debug(
{
responseText_type: typeof responseText,
responseText_length: responseText?.length,
responseText_preview: responseText?.substring(0, 200),
},
'[_parseJsonFromAiResponse] Starting JSON parsing.',
);
// --- END HYPER-DIAGNOSTIC LOGGING ---
if (!responseText) {
logger.warn(
'[_parseJsonFromAiResponse] Response text is empty or undefined. Returning null.',
);
console.log('2. responseText is falsy. ABORTING.');
console.log('--- END DIAGNOSIS ---\n');
logger.warn('[_parseJsonFromAiResponse] Response text is empty or undefined. Aborting parsing.');
return null;
}
// Find the start of the JSON, which can be inside a markdown block
const markdownRegex = /```(json)?\s*([\s\S]*?)\s*```/;
const markdownMatch = responseText.match(markdownRegex);
console.log('2. Regex Result (markdownMatch):', markdownMatch);
let jsonString;
if (markdownMatch && markdownMatch[2] !== undefined) {
// Check for capture group
console.log('3. Regex matched. Processing Captured Group.');
console.log(
` - Captured content (Type: ${typeof markdownMatch[2]}, Length: ${markdownMatch[2].length}):`,
JSON.stringify(markdownMatch[2]),
);
logger.debug(
{ rawCapture: markdownMatch[2] },
{ capturedLength: markdownMatch[2].length },
'[_parseJsonFromAiResponse] Found JSON content within markdown code block.',
);
jsonString = markdownMatch[2].trim();
console.log(
`4. After trimming, jsonString is (Type: ${typeof jsonString}, Length: ${jsonString.length}):`,
JSON.stringify(jsonString),
);
logger.debug(
{ trimmedJsonString: jsonString },
'[_parseJsonFromAiResponse] Trimmed extracted JSON string.',
);
} else {
console.log(
'3. Regex did NOT match or capture group 2 is undefined. Will attempt to parse entire responseText.',
);
logger.debug('[_parseJsonFromAiResponse] No markdown code block found. Using raw response text.');
jsonString = responseText;
}
// Find the first '{' or '[' and the last '}' or ']' to isolate the JSON object.
const firstBrace = jsonString.indexOf('{');
const firstBracket = jsonString.indexOf('[');
console.log(
`5. Index search on jsonString: firstBrace=${firstBrace}, firstBracket=${firstBracket}`,
logger.debug(
{ firstBrace, firstBracket },
'[_parseJsonFromAiResponse] Searching for start of JSON.',
);
// Determine the starting point of the JSON content
@@ -432,37 +417,44 @@ export class AIService {
firstBrace === -1 || (firstBracket !== -1 && firstBracket < firstBrace)
? firstBracket
: firstBrace;
console.log('6. Calculated startIndex:', startIndex);
if (startIndex === -1) {
logger.error(
{ responseText },
"[_parseJsonFromAiResponse] Could not find starting '{' or '[' in response.",
);
console.log('7. startIndex is -1. ABORTING.');
console.log('--- END DIAGNOSIS ---\n');
return null;
}
const jsonSlice = jsonString.substring(startIndex);
console.log(
`8. Sliced string to be parsed (jsonSlice) (Length: ${jsonSlice.length}):`,
JSON.stringify(jsonSlice),
// Find the last brace or bracket to gracefully handle trailing text.
// This is a robust way to handle cases where the AI might add trailing text after the JSON.
const lastBrace = jsonString.lastIndexOf('}');
const lastBracket = jsonString.lastIndexOf(']');
const endIndex = Math.max(lastBrace, lastBracket);
if (endIndex === -1) {
logger.error(
{ responseText },
"[_parseJsonFromAiResponse] Could not find ending '}' or ']' in response.",
);
return null;
}
const jsonSlice = jsonString.substring(startIndex, endIndex + 1);
logger.debug(
{ sliceLength: jsonSlice.length },
'[_parseJsonFromAiResponse] Extracted JSON slice for parsing.',
);
try {
console.log('9. Attempting JSON.parse on jsonSlice...');
const parsed = JSON.parse(jsonSlice) as T;
console.log('10. SUCCESS: JSON.parse succeeded.');
console.log('--- END DIAGNOSIS (SUCCESS) ---\n');
logger.info('[_parseJsonFromAiResponse] Successfully parsed JSON from AI response.');
return parsed;
} catch (e) {
logger.error(
{ jsonSlice, error: e, errorMessage: (e as Error).message, stack: (e as Error).stack },
'[_parseJsonFromAiResponse] Failed to parse JSON slice.',
);
console.error('10. FAILURE: JSON.parse FAILED. Error:', e);
console.log('--- END DIAGNOSIS (FAILURE) ---\n');
return null;
}
}
@@ -795,6 +787,18 @@ async enqueueFlyerProcessing(
}
const baseUrl = getBaseUrl(logger);
// --- START DEBUGGING ---
// Add a fail-fast check to ensure the baseUrl is a valid URL before enqueuing.
// This will make the test fail at the upload step if the URL is the problem,
// which is easier to debug than a worker failure.
if (!baseUrl || !baseUrl.startsWith('http')) {
const errorMessage = `[aiService] FATAL: The generated baseUrl is not a valid absolute URL. Value: "${baseUrl}". This will cause the flyer processing worker to fail. Check the FRONTEND_URL environment variable.`;
logger.error(errorMessage);
// Throw a standard error that the calling route can handle.
throw new Error(errorMessage);
}
logger.info({ baseUrl }, '[aiService] Enqueuing job with valid baseUrl.');
// --- END DEBUGGING ---
// 3. Add job to the queue
const job = await flyerQueue.add('process-flyer', {
@@ -818,6 +822,7 @@ async enqueueFlyerProcessing(
body: any,
logger: Logger,
): { parsed: FlyerProcessPayload; extractedData: Partial<ExtractedCoreData> | null | undefined } {
logger.debug({ body, type: typeof body }, '[AIService] Starting _parseLegacyPayload');
let parsed: FlyerProcessPayload = {};
try {
@@ -826,6 +831,7 @@ async enqueueFlyerProcessing(
logger.warn({ error: errMsg(e) }, '[AIService] Failed to parse top-level request body string.');
return { parsed: {}, extractedData: {} };
}
logger.debug({ parsed }, '[AIService] Parsed top-level body');
// If the real payload is nested inside a 'data' property (which could be a string),
// we parse it out but keep the original `parsed` object for top-level properties like checksum.
@@ -841,13 +847,16 @@ async enqueueFlyerProcessing(
potentialPayload = parsed.data;
}
}
logger.debug({ potentialPayload }, '[AIService] Potential payload after checking "data" property');
// The extracted data is either in an `extractedData` key or is the payload itself.
const extractedData = potentialPayload.extractedData ?? potentialPayload;
logger.debug({ extractedData: !!extractedData }, '[AIService] Extracted data object');
// Merge for checksum lookup: properties in the outer `parsed` object (like a top-level checksum)
// take precedence over any same-named properties inside `potentialPayload`.
const finalParsed = { ...potentialPayload, ...parsed };
logger.debug({ finalParsed }, '[AIService] Final parsed object for checksum lookup');
return { parsed: finalParsed, extractedData };
}
@@ -858,10 +867,12 @@ async enqueueFlyerProcessing(
userProfile: UserProfile | undefined,
logger: Logger,
): Promise<Flyer> {
logger.debug({ body, file }, '[AIService] Starting processLegacyFlyerUpload');
const { parsed, extractedData: initialExtractedData } = this._parseLegacyPayload(body, logger);
let extractedData = initialExtractedData;
const checksum = parsed.checksum ?? parsed?.data?.checksum ?? '';
logger.debug({ checksum, parsed }, '[AIService] Extracted checksum from legacy payload');
if (!checksum) {
throw new ValidationError([], 'Checksum is required.');
}
@@ -896,12 +907,24 @@ async enqueueFlyerProcessing(
logger.warn('extractedData.store_name missing; using fallback store name.');
}
const iconsDir = path.join(path.dirname(file.path), 'icons');
const iconFileName = await generateFlyerIcon(file.path, iconsDir, logger);
// Process the uploaded image to strip metadata and optimize it.
const flyerImageDir = path.dirname(file.path);
const processedImageFileName = await processAndSaveImage(
file.path,
flyerImageDir,
originalFileName,
logger,
);
const processedImagePath = path.join(flyerImageDir, processedImageFileName);
// Generate the icon from the newly processed (and cleaned) image.
const iconsDir = path.join(flyerImageDir, 'icons');
const iconFileName = await generateFlyerIcon(processedImagePath, iconsDir, logger);
const baseUrl = getBaseUrl(logger);
const iconUrl = `${baseUrl}/flyer-images/icons/${iconFileName}`;
const imageUrl = `${baseUrl}/flyer-images/${file.filename}`;
const imageUrl = `${baseUrl}/flyer-images/${processedImageFileName}`;
logger.debug({ imageUrl, iconUrl }, 'Constructed URLs for legacy upload');
const flyerData: FlyerInsert = {
file_name: originalFileName,

View File

@@ -134,7 +134,6 @@ describe('AuthService', () => {
'hashed-password',
{ full_name: 'Test User', avatar_url: undefined },
reqLog,
{},
);
expect(transactionalAdminRepoMocks.logActivity).toHaveBeenCalledWith(
expect.objectContaining({

View File

@@ -40,7 +40,6 @@ class AuthService {
hashedPassword,
{ full_name: fullName, avatar_url: avatarUrl },
reqLog,
client, // Pass the transactional client
);
logger.info(`Successfully created new user in DB: ${newUser.user.email} (ID: ${newUser.user.user_id})`);

View File

@@ -24,6 +24,16 @@ vi.mock('../services/logger.server', () => ({
// Mock the date utility to control the output for the weekly analytics job
vi.mock('../utils/dateUtils', () => ({
getSimpleWeekAndYear: vi.fn(() => ({ year: 2024, week: 42 })),
getCurrentDateISOString: vi.fn(() => '2024-10-18'),
}));
vi.mock('../services/queueService.server', () => ({
analyticsQueue: {
add: vi.fn(),
},
weeklyAnalyticsQueue: {
add: vi.fn(),
},
}));
import { BackgroundJobService, startBackgroundJobs } from './backgroundJobService';
@@ -32,6 +42,7 @@ import type { PersonalizationRepository } from './db/personalization.db';
import type { NotificationRepository } from './db/notification.db';
import { createMockWatchedItemDeal } from '../tests/utils/mockFactories';
import { logger as globalMockLogger } from '../services/logger.server'; // Import the mocked logger
import { analyticsQueue, weeklyAnalyticsQueue } from '../services/queueService.server';
describe('Background Job Service', () => {
// Create mock dependencies that will be injected into the service
@@ -118,6 +129,35 @@ describe('Background Job Service', () => {
mockServiceLogger,
);
describe('Manual Triggers', () => {
it('triggerAnalyticsReport should add a daily report job to the queue', async () => {
vi.mocked(analyticsQueue.add).mockResolvedValue({ id: 'manual-job-1' } as any);
const jobId = await service.triggerAnalyticsReport();
expect(jobId).toContain('manual-report-');
expect(analyticsQueue.add).toHaveBeenCalledWith(
'generate-daily-report',
{ reportDate: '2024-10-18' },
{ jobId: expect.stringContaining('manual-report-') },
);
});
it('triggerWeeklyAnalyticsReport should add a weekly report job to the queue', async () => {
vi.mocked(weeklyAnalyticsQueue.add).mockResolvedValue({ id: 'manual-weekly-job-1' } as any);
const jobId = await service.triggerWeeklyAnalyticsReport();
expect(jobId).toContain('manual-weekly-report-');
expect(weeklyAnalyticsQueue.add).toHaveBeenCalledWith(
'generate-weekly-report',
{
reportYear: 2024, // From mocked dateUtils
reportWeek: 42, // From mocked dateUtils
},
{ jobId: expect.stringContaining('manual-weekly-report-') },
);
});
});
it('should do nothing if no deals are found for any user', async () => {
mockPersonalizationRepo.getBestSalePricesForAllUsers.mockResolvedValue([]);
await service.runDailyDealCheck();
@@ -153,24 +193,27 @@ describe('Background Job Service', () => {
// Check that in-app notifications were created for both users
expect(mockNotificationRepo.createBulkNotifications).toHaveBeenCalledTimes(1);
const notificationPayload = mockNotificationRepo.createBulkNotifications.mock.calls[0][0];
expect(notificationPayload).toHaveLength(2);
// Use expect.arrayContaining to be order-agnostic.
expect(notificationPayload).toEqual(
expect.arrayContaining([
{
user_id: 'user-1',
content: 'You have 1 new deal(s) on your watched items!',
link_url: '/dashboard/deals',
updated_at: expect.any(String),
},
{
user_id: 'user-2',
content: 'You have 2 new deal(s) on your watched items!',
link_url: '/dashboard/deals',
updated_at: expect.any(String),
},
]),
// Sort by user_id to ensure a consistent order for a direct `toEqual` comparison.
// This provides a clearer diff on failure than `expect.arrayContaining`.
const sortedPayload = [...notificationPayload].sort((a, b) =>
a.user_id.localeCompare(b.user_id),
);
expect(sortedPayload).toEqual([
{
user_id: 'user-1',
content: 'You have 1 new deal(s) on your watched items!',
link_url: '/dashboard/deals',
updated_at: expect.any(String),
},
{
user_id: 'user-2',
content: 'You have 2 new deal(s) on your watched items!',
link_url: '/dashboard/deals',
updated_at: expect.any(String),
},
]);
});
it('should handle and log errors for individual users without stopping the process', async () => {
@@ -252,7 +295,7 @@ describe('Background Job Service', () => {
vi.mocked(mockWeeklyAnalyticsQueue.add).mockClear();
});
it('should schedule three cron jobs with the correct schedules', () => {
it('should schedule four cron jobs with the correct schedules', () => {
startBackgroundJobs(
mockBackgroundJobService,
mockAnalyticsQueue,

View File

@@ -2,8 +2,9 @@
import cron from 'node-cron';
import type { Logger } from 'pino';
import type { Queue } from 'bullmq';
import { Notification, WatchedItemDeal } from '../types';
import { getSimpleWeekAndYear } from '../utils/dateUtils';
import { formatCurrency } from '../utils/formatUtils';
import { getSimpleWeekAndYear, getCurrentDateISOString } from '../utils/dateUtils';
import type { Notification, WatchedItemDeal } from '../types';
// Import types for repositories from their source files
import type { PersonalizationRepository } from './db/personalization.db';
import type { NotificationRepository } from './db/notification.db';
@@ -25,7 +26,7 @@ export class BackgroundJobService {
) {}
public async triggerAnalyticsReport(): Promise<string> {
const reportDate = new Date().toISOString().split('T')[0]; // YYYY-MM-DD
const reportDate = getCurrentDateISOString(); // YYYY-MM-DD
const jobId = `manual-report-${reportDate}-${Date.now()}`;
const job = await analyticsQueue.add('generate-daily-report', { reportDate }, { jobId });
return job.id!;
@@ -57,14 +58,16 @@ export class BackgroundJobService {
const dealsListHtml = deals
.map(
(deal) =>
`<li><strong>${deal.item_name}</strong> is on sale for <strong>$${(deal.best_price_in_cents / 100).toFixed(2)}</strong> at ${deal.store_name}!</li>`,
`<li><strong>${deal.item_name}</strong> is on sale for <strong>${formatCurrency(
deal.best_price_in_cents,
)}</strong> at ${deal.store_name}!</li>`,
)
.join('');
const html = `<p>Hi ${recipientName},</p><p>We found some great deals on items you're watching:</p><ul>${dealsListHtml}</ul>`;
const text = `Hi ${recipientName},\n\nWe found some great deals on items you're watching. Visit the deals page on the site to learn more.`;
// Use a predictable Job ID to prevent duplicate email notifications for the same user on the same day.
const today = new Date().toISOString().split('T')[0];
const today = getCurrentDateISOString();
const jobId = `deal-email-${userProfile.user_id}-${today}`;
return {
@@ -82,12 +85,11 @@ export class BackgroundJobService {
private _prepareInAppNotification(
userId: string,
dealCount: number,
): Omit<Notification, 'notification_id' | 'is_read' | 'created_at'> {
): Omit<Notification, 'notification_id' | 'is_read' | 'created_at' | 'updated_at'> {
return {
user_id: userId,
content: `You have ${dealCount} new deal(s) on your watched items!`,
link_url: '/dashboard/deals', // A link to the future "My Deals" page
updated_at: new Date().toISOString(),
};
}
@@ -129,8 +131,10 @@ export class BackgroundJobService {
return acc;
}, {});
const allNotifications: Omit<Notification, 'notification_id' | 'is_read' | 'created_at'>[] =
[];
const allNotifications: Omit<
Notification,
'notification_id' | 'is_read' | 'created_at' | 'updated_at'
>[] = [];
// 3. Process each user's deals in parallel.
const userProcessingPromises = Object.values(dealsByUser).map(
@@ -171,7 +175,11 @@ export class BackgroundJobService {
// 7. Bulk insert all in-app notifications in a single query.
if (allNotifications.length > 0) {
await this.notificationRepo.createBulkNotifications(allNotifications, this.logger);
const notificationsForDb = allNotifications.map((n) => ({
...n,
updated_at: new Date().toISOString(),
}));
await this.notificationRepo.createBulkNotifications(notificationsForDb, this.logger);
this.logger.info(
`[BackgroundJob] Successfully created ${allNotifications.length} in-app notifications.`,
);
@@ -244,7 +252,7 @@ export function startBackgroundJobs(
(async () => {
logger.info('[BackgroundJob] Enqueuing daily analytics report generation job.');
try {
const reportDate = new Date().toISOString().split('T')[0]; // YYYY-MM-DD
const reportDate = getCurrentDateISOString(); // YYYY-MM-DD
// We use a unique job ID to prevent duplicate jobs for the same day if the scheduler restarts.
await analyticsQueue.add(
'generate-daily-report',

View File

@@ -282,6 +282,95 @@ describe('User DB Service', () => {
});
});
describe('_createUser (private)', () => {
it('should execute queries in order and return a full user profile', async () => {
const mockUser = {
user_id: 'private-user-id',
email: 'private@example.com',
};
const mockDbProfile = {
user_id: 'private-user-id',
email: 'private@example.com',
role: 'user',
full_name: 'Private User',
avatar_url: null,
points: 0,
preferences: null,
created_at: new Date().toISOString(),
updated_at: new Date().toISOString(),
user_created_at: new Date().toISOString(),
user_updated_at: new Date().toISOString(),
};
const expectedProfile: UserProfile = {
user: {
user_id: mockDbProfile.user_id,
email: mockDbProfile.email,
created_at: mockDbProfile.user_created_at,
updated_at: mockDbProfile.user_updated_at,
},
full_name: 'Private User',
avatar_url: null,
role: 'user',
points: 0,
preferences: null,
created_at: mockDbProfile.created_at,
updated_at: mockDbProfile.updated_at,
};
// Mock the sequence of queries on the client
(mockPoolInstance.query as Mock)
.mockResolvedValueOnce({ rows: [] }) // set_config
.mockResolvedValueOnce({ rows: [mockUser] }) // INSERT user
.mockResolvedValueOnce({ rows: [mockDbProfile] }); // SELECT profile
// Access private method for testing
const result = await (userRepo as any)._createUser(
mockPoolInstance, // Pass the mock client
'private@example.com',
'hashedpass',
{ full_name: 'Private User' },
mockLogger,
);
expect(result).toEqual(expectedProfile);
expect(mockPoolInstance.query).toHaveBeenCalledTimes(3);
expect(mockPoolInstance.query).toHaveBeenNthCalledWith(
1,
"SELECT set_config('my_app.user_metadata', $1, true)",
[JSON.stringify({ full_name: 'Private User' })],
);
expect(mockPoolInstance.query).toHaveBeenNthCalledWith(
2,
'INSERT INTO public.users (email, password_hash) VALUES ($1, $2) RETURNING user_id, email',
['private@example.com', 'hashedpass'],
);
expect(mockPoolInstance.query).toHaveBeenNthCalledWith(
3,
expect.stringContaining('FROM public.users u'),
['private-user-id'],
);
});
it('should throw an error if profile is not found after user creation', async () => {
const mockUser = { user_id: 'no-profile-user', email: 'no-profile@example.com' };
(mockPoolInstance.query as Mock)
.mockResolvedValueOnce({ rows: [] }) // set_config
.mockResolvedValueOnce({ rows: [mockUser] }) // INSERT user
.mockResolvedValueOnce({ rows: [] }); // SELECT profile returns nothing
await expect(
(userRepo as any)._createUser(
mockPoolInstance,
'no-profile@example.com',
'pass',
{},
mockLogger,
),
).rejects.toThrow('Failed to create or retrieve user profile after registration.');
});
});
describe('findUserWithProfileByEmail', () => {
it('should query for a user and their profile by email', async () => {
const mockDbResult: any = {

View File

@@ -61,6 +61,64 @@ export class UserRepository {
}
}
/**
* The internal logic for creating a user. This method assumes it is being run
* within a database transaction and operates on a single PoolClient.
*/
private async _createUser(
dbClient: PoolClient,
email: string,
passwordHash: string | null,
profileData: { full_name?: string; avatar_url?: string },
logger: Logger,
): Promise<UserProfile> {
logger.debug(`[DB _createUser] Starting user creation for email: ${email}`);
await dbClient.query("SELECT set_config('my_app.user_metadata', $1, true)", [
JSON.stringify(profileData ?? {}),
]);
logger.debug(`[DB _createUser] Session metadata set for ${email}.`);
const userInsertRes = await dbClient.query<{ user_id: string; email: string }>(
'INSERT INTO public.users (email, password_hash) VALUES ($1, $2) RETURNING user_id, email',
[email, passwordHash],
);
const newUserId = userInsertRes.rows[0].user_id;
logger.debug(`[DB _createUser] Inserted into users table. New user ID: ${newUserId}`);
const profileQuery = `
SELECT u.user_id, u.email, u.created_at as user_created_at, u.updated_at as user_updated_at, p.full_name, p.avatar_url, p.role, p.points, p.preferences, p.created_at, p.updated_at
FROM public.users u
JOIN public.profiles p ON u.user_id = p.user_id
WHERE u.user_id = $1;
`;
const finalProfileRes = await dbClient.query(profileQuery, [newUserId]);
const flatProfile = finalProfileRes.rows[0];
if (!flatProfile) {
throw new Error('Failed to create or retrieve user profile after registration.');
}
const fullUserProfile: UserProfile = {
user: {
user_id: flatProfile.user_id,
email: flatProfile.email,
created_at: flatProfile.user_created_at,
updated_at: flatProfile.user_updated_at,
},
full_name: flatProfile.full_name,
avatar_url: flatProfile.avatar_url,
role: flatProfile.role,
points: flatProfile.points,
preferences: flatProfile.preferences,
created_at: flatProfile.created_at,
updated_at: flatProfile.updated_at,
};
logger.debug({ user: fullUserProfile }, `[DB _createUser] Fetched full profile for new user:`);
return fullUserProfile;
}
/**
* Creates a new user in the public.users table.
* This method expects to be run within a transaction, so it requires a PoolClient.
@@ -74,60 +132,18 @@ export class UserRepository {
passwordHash: string | null,
profileData: { full_name?: string; avatar_url?: string },
logger: Logger,
// Allow passing a transactional client
client: Pool | PoolClient = this.db,
): Promise<UserProfile> {
// This method is now a wrapper that ensures the core logic runs within a transaction.
try {
logger.debug(`[DB createUser] Starting user creation for email: ${email}`);
// Use 'set_config' to safely pass parameters to a configuration variable.
await client.query("SELECT set_config('my_app.user_metadata', $1, true)", [
JSON.stringify(profileData),
]);
logger.debug(`[DB createUser] Session metadata set for ${email}.`);
// Insert the new user into the 'users' table. This will fire the trigger.
const userInsertRes = await client.query<{ user_id: string }>(
'INSERT INTO public.users (email, password_hash) VALUES ($1, $2) RETURNING user_id, email',
[email, passwordHash],
);
const newUserId = userInsertRes.rows[0].user_id;
logger.debug(`[DB createUser] Inserted into users table. New user ID: ${newUserId}`);
// After the trigger has run, fetch the complete profile data.
const profileQuery = `
SELECT u.user_id, u.email, u.created_at as user_created_at, u.updated_at as user_updated_at, p.full_name, p.avatar_url, p.role, p.points, p.preferences, p.created_at, p.updated_at
FROM public.users u
JOIN public.profiles p ON u.user_id = p.user_id
WHERE u.user_id = $1;
`;
const finalProfileRes = await client.query(profileQuery, [newUserId]);
const flatProfile = finalProfileRes.rows[0];
if (!flatProfile) {
throw new Error('Failed to create or retrieve user profile after registration.');
// If this.db has a 'connect' method, it's a Pool. We must start a transaction.
if ('connect' in this.db) {
return await withTransaction(async (client) => {
return this._createUser(client, email, passwordHash, profileData, logger);
});
} else {
// If this.db is already a PoolClient, we're inside a transaction. Use it directly.
return await this._createUser(this.db as PoolClient, email, passwordHash, profileData, logger);
}
// Construct the nested UserProfile object to match the type definition.
const fullUserProfile: UserProfile = {
// user_id is now correctly part of the nested user object, not at the top level.
user: {
user_id: flatProfile.user_id,
email: flatProfile.email,
created_at: flatProfile.user_created_at,
updated_at: flatProfile.user_updated_at,
},
full_name: flatProfile.full_name,
avatar_url: flatProfile.avatar_url,
role: flatProfile.role,
points: flatProfile.points,
preferences: flatProfile.preferences,
created_at: flatProfile.created_at,
updated_at: flatProfile.updated_at,
};
logger.debug({ user: fullUserProfile }, `[DB createUser] Fetched full profile for new user:`);
return fullUserProfile;
} catch (error) {
handleDbError(error, logger, 'Error during createUser', { email }, {
uniqueMessage: 'A user with this email address already exists.',
@@ -136,6 +152,7 @@ export class UserRepository {
}
}
/**
* Finds a user by their email and joins their profile data.
* This is used by the LocalStrategy to get all necessary data for authentication and session creation in one query.

View File

@@ -77,6 +77,7 @@ export class FlyerDataTransformer {
baseUrl: string | undefined,
logger: Logger,
): { imageUrl: string; iconUrl: string } {
logger.debug({ firstImage, iconFileName, baseUrl }, 'Building URLs');
let finalBaseUrl = baseUrl;
if (!finalBaseUrl) {
const port = process.env.PORT || 3000;
@@ -84,8 +85,10 @@ export class FlyerDataTransformer {
logger.warn(`Base URL not provided in job data. Falling back to default local URL: ${finalBaseUrl}`);
}
finalBaseUrl = finalBaseUrl.endsWith('/') ? finalBaseUrl.slice(0, -1) : finalBaseUrl;
const imageUrl = `${finalBaseUrl}/flyer-images/${path.basename(firstImage)}`;
const imageBasename = path.basename(firstImage);
const imageUrl = `${finalBaseUrl}/flyer-images/${imageBasename}`;
const iconUrl = `${finalBaseUrl}/flyer-images/icons/${iconFileName}`;
logger.debug({ imageUrl, iconUrl, imageBasename }, 'Constructed URLs');
return { imageUrl, iconUrl };
}

View File

@@ -25,6 +25,12 @@ vi.mock('node:fs/promises', async (importOriginal) => {
};
});
// Mock image processor functions
vi.mock('../utils/imageProcessor', () => ({
processAndSaveImage: vi.fn(),
generateFlyerIcon: vi.fn(),
}));
// Import service and dependencies (FlyerJobData already imported from types above)
import { FlyerProcessingService } from './flyerProcessingService.server';
import * as db from './db/index.db';
@@ -42,6 +48,7 @@ import { NotFoundError } from './db/errors.db';
import { FlyerFileHandler } from './flyerFileHandler.server';
import { FlyerAiProcessor } from './flyerAiProcessor.server';
import type { IFileSystem, ICommandExecutor } from './flyerFileHandler.server';
import { processAndSaveImage, generateFlyerIcon } from '../utils/imageProcessor';
import type { AIService } from './aiService.server';
// Mock dependencies
@@ -172,6 +179,10 @@ describe('FlyerProcessingService', () => {
// FIX: Provide a default mock for getAllMasterItems to prevent a TypeError on `.length`.
vi.mocked(mockedDb.personalizationRepo.getAllMasterItems).mockResolvedValue([]);
});
beforeEach(() => {
vi.mocked(processAndSaveImage).mockResolvedValue('processed-flyer.jpg');
vi.mocked(generateFlyerIcon).mockResolvedValue('icon-flyer.webp');
});
const createMockJob = (data: Partial<FlyerJobData>): Job<FlyerJobData> => {
return {
@@ -200,22 +211,49 @@ describe('FlyerProcessingService', () => {
};
describe('processJob (Orchestrator)', () => {
it('should process an image file successfully and enqueue a cleanup job', async () => {
const job = createMockJob({ filePath: '/tmp/flyer.jpg', originalFileName: 'flyer.jpg' });
it('should process an image file successfully, using processed image URLs, and enqueue a cleanup job', async () => {
const job = createMockJob({ filePath: '/tmp/flyer.jpg', originalFileName: 'flyer.jpg', baseUrl: 'http://test.com' });
// Simulate the file handler processing the image and returning the path to the new, cleaned file.
mockFileHandler.prepareImageInputs.mockResolvedValue({
imagePaths: [{ path: '/tmp/flyer-processed.jpeg', mimetype: 'image/jpeg' }],
createdImagePaths: ['/tmp/flyer-processed.jpeg'],
});
const result = await service.processJob(job);
expect(result).toEqual({ flyerId: 1 });
expect(mockFileHandler.prepareImageInputs).toHaveBeenCalledWith(job.data.filePath, job, expect.any(Object));
expect(mockAiProcessor.extractAndValidateData).toHaveBeenCalledTimes(1);
// Verify that the transaction function was called.
expect(mockedDb.withTransaction).toHaveBeenCalledTimes(1);
// Verify that the functions inside the transaction were called.
expect(createFlyerAndItems).toHaveBeenCalledTimes(1);
// Assert that the image processing functions were called correctly
// The first image path from prepareImageInputs is the *processed* one.
expect(processAndSaveImage).toHaveBeenCalledWith('/tmp/flyer-processed.jpeg', '/tmp', 'flyer.jpg', expect.any(Object));
// The icon is generated from the *newly processed* image from processAndSaveImage
expect(generateFlyerIcon).toHaveBeenCalledWith('/tmp/processed-flyer.jpg', '/tmp/icons', expect.any(Object));
// Assert that createFlyerAndItems was called with the CORRECT, overwritten URLs
const createFlyerAndItemsCall = vi.mocked(createFlyerAndItems).mock.calls[0];
const flyerDataArg = createFlyerAndItemsCall[0]; // The flyerData object
expect(flyerDataArg.image_url).toBe('http://test.com/flyer-images/processed-flyer.jpg');
expect(flyerDataArg.icon_url).toBe('http://test.com/flyer-images/icons/icon-flyer.webp');
expect(mocks.mockAdminLogActivity).toHaveBeenCalledTimes(1);
// Assert that the cleanup job includes all original and generated files
expect(mockCleanupQueue.add).toHaveBeenCalledWith(
'cleanup-flyer-files',
{ flyerId: 1, paths: ['/tmp/flyer.jpg'] },
{
flyerId: 1,
paths: [
'/tmp/flyer.jpg', // original job path
'/tmp/flyer-processed.jpeg', // from prepareImageInputs
'/tmp/processed-flyer.jpg', // from processAndSaveImage
'/tmp/icons/icon-flyer.webp', // from generateFlyerIcon
],
},
expect.any(Object),
);
});
@@ -226,7 +264,10 @@ describe('FlyerProcessingService', () => {
// Mock the file handler to return multiple created paths
const createdPaths = ['/tmp/flyer-1.jpg', '/tmp/flyer-2.jpg'];
mockFileHandler.prepareImageInputs.mockResolvedValue({
imagePaths: createdPaths.map(p => ({ path: p, mimetype: 'image/jpeg' })),
imagePaths: [
{ path: '/tmp/flyer-1.jpg', mimetype: 'image/jpeg' },
{ path: '/tmp/flyer-2.jpg', mimetype: 'image/jpeg' },
],
createdImagePaths: createdPaths,
});
@@ -237,15 +278,17 @@ describe('FlyerProcessingService', () => {
expect(mockFileHandler.prepareImageInputs).toHaveBeenCalledWith('/tmp/flyer.pdf', job, expect.any(Object));
expect(mockAiProcessor.extractAndValidateData).toHaveBeenCalledTimes(1);
expect(createFlyerAndItems).toHaveBeenCalledTimes(1);
// Verify cleanup job includes original PDF and both generated images
// Verify cleanup job includes original PDF and all generated/processed images
expect(mockCleanupQueue.add).toHaveBeenCalledWith(
'cleanup-flyer-files',
{
flyerId: 1,
paths: [
'/tmp/flyer.pdf',
'/tmp/flyer-1.jpg',
'/tmp/flyer-2.jpg',
'/tmp/flyer.pdf', // original job path
'/tmp/flyer-1.jpg', // from prepareImageInputs
'/tmp/flyer-2.jpg', // from prepareImageInputs
'/tmp/processed-flyer.jpg', // from processAndSaveImage
'/tmp/icons/icon-flyer.webp', // from generateFlyerIcon
],
},
expect.any(Object),
@@ -387,7 +430,15 @@ describe('FlyerProcessingService', () => {
expect(mockAiProcessor.extractAndValidateData).toHaveBeenCalledTimes(1);
expect(mockCleanupQueue.add).toHaveBeenCalledWith(
'cleanup-flyer-files',
{ flyerId: 1, paths: ['/tmp/flyer.gif', convertedPath] },
{
flyerId: 1,
paths: [
'/tmp/flyer.gif', // original job path
convertedPath, // from prepareImageInputs
'/tmp/processed-flyer.jpg', // from processAndSaveImage
'/tmp/icons/icon-flyer.webp', // from generateFlyerIcon
],
},
expect.any(Object),
);
});
@@ -633,5 +684,30 @@ describe('FlyerProcessingService', () => {
'Job received no paths and could not derive any from the database. Skipping.',
);
});
it('should derive paths from DB and delete files if job paths are empty', async () => {
const job = createMockCleanupJob({ flyerId: 1, paths: [] }); // Empty paths
const mockFlyer = createMockFlyer({
image_url: 'http://localhost:3000/flyer-images/flyer-abc.jpg',
icon_url: 'http://localhost:3000/flyer-images/icons/icon-flyer-abc.webp',
});
// Mock DB call to return a flyer
vi.mocked(mockedDb.flyerRepo.getFlyerById).mockResolvedValue(mockFlyer);
mocks.unlink.mockResolvedValue(undefined);
// Mock process.env.STORAGE_PATH
vi.stubEnv('STORAGE_PATH', '/var/www/app/flyer-images');
const result = await service.processCleanupJob(job);
expect(result).toEqual({ status: 'success', deletedCount: 2 });
expect(mocks.unlink).toHaveBeenCalledTimes(2);
expect(mocks.unlink).toHaveBeenCalledWith('/var/www/app/flyer-images/flyer-abc.jpg');
expect(mocks.unlink).toHaveBeenCalledWith('/var/www/app/flyer-images/icons/icon-flyer-abc.webp');
const { logger } = await import('./logger.server');
expect(logger.warn).toHaveBeenCalledWith(
'Cleanup job for flyer 1 received no paths. Attempting to derive paths from DB.',
);
});
});
});

View File

@@ -1,6 +1,5 @@
// src/services/flyerProcessingService.server.ts
import type { Job, Queue } from 'bullmq';
import { UnrecoverableError } from 'bullmq';
import { UnrecoverableError, type Job, type Queue } from 'bullmq';
import path from 'path';
import type { Logger } from 'pino';
import type { FlyerFileHandler, IFileSystem, ICommandExecutor } from './flyerFileHandler.server';
@@ -19,6 +18,7 @@ import {
import { NotFoundError } from './db/errors.db';
import { createFlyerAndItems } from './db/flyer.db';
import { logger as globalLogger } from './logger.server';
import { processAndSaveImage, generateFlyerIcon } from '../utils/imageProcessor';
// Define ProcessingStage locally as it's not exported from the types file.
export type ProcessingStage = {
@@ -80,6 +80,31 @@ export class FlyerProcessingService {
stages[0].detail = `${imagePaths.length} page(s) ready for AI.`;
await job.updateProgress({ stages });
// --- START FIX for Integration Tests ---
// The integration tests upload single-page images (JPG/PNG). We assume the first
// image is the primary one to be processed for metadata stripping and icon generation.
const primaryImage = imagePaths[0];
if (!primaryImage) {
throw new FlyerProcessingError('No processable image found after preparation stage.', 'INPUT_ERROR');
}
const flyerImageDir = path.dirname(primaryImage.path);
// Process the main image to strip metadata and optimize it. This creates a new file.
const processedImageFileName = await processAndSaveImage(
primaryImage.path,
flyerImageDir,
job.data.originalFileName,
logger,
);
const processedImagePath = path.join(flyerImageDir, processedImageFileName);
allFilePaths.push(processedImagePath); // Track the new file for cleanup.
// Generate the icon from the NEWLY PROCESSED image.
const iconsDir = path.join(flyerImageDir, 'icons');
const iconFileName = await generateFlyerIcon(processedImagePath, iconsDir, logger);
allFilePaths.push(path.join(iconsDir, iconFileName)); // Track icon for cleanup.
// Stage 2: Extract Data with AI
stages[1].status = 'in-progress';
await job.updateProgress({ stages });
@@ -101,6 +126,11 @@ export class FlyerProcessingService {
logger,
job.data.baseUrl,
);
// Overwrite the URLs generated by the transformer to point to our processed files.
// This ensures the correct, metadata-stripped image is referenced in the database.
flyerData.image_url = `${job.data.baseUrl}/flyer-images/${processedImageFileName}`;
flyerData.icon_url = `${job.data.baseUrl}/flyer-images/icons/${iconFileName}`;
stages[2].status = 'completed';
await job.updateProgress({ stages });

View File

@@ -1,15 +1,13 @@
// src/tests/e2e/admin-dashboard.e2e.test.ts
import { describe, it, expect, afterAll } from 'vitest';
import supertest from 'supertest';
import app from '../../../server';
import * as apiClient from '../../services/apiClient';
import { getPool } from '../../services/db/connection.db';
import { cleanupDb } from '../utils/cleanup';
/**
* @vitest-environment node
*/
const request = supertest(app);
describe('E2E Admin Dashboard Flow', () => {
// Use a unique email for every run to avoid collisions
const uniqueId = Date.now();
@@ -21,25 +19,18 @@ describe('E2E Admin Dashboard Flow', () => {
afterAll(async () => {
// Safety cleanup: Ensure the user is deleted from the DB if the test fails mid-way.
if (adminUserId) {
try {
await getPool().query('DELETE FROM public.users WHERE user_id = $1', [adminUserId]);
} catch (err) {
console.error('Error cleaning up E2E admin user:', err);
}
}
await cleanupDb({
userIds: [adminUserId],
});
});
it('should allow an admin to log in and access dashboard features', async () => {
// 1. Register a new user (initially a regular user)
const registerResponse = await request.post('/api/auth/register').send({
email: adminEmail,
password: adminPassword,
full_name: 'E2E Admin User',
});
const registerResponse = await apiClient.registerUser(adminEmail, adminPassword, 'E2E Admin User');
expect(registerResponse.status).toBe(201);
const registeredUser = registerResponse.body.userprofile.user;
const registerData = await registerResponse.json();
const registeredUser = registerData.userprofile.user;
adminUserId = registeredUser.user_id;
expect(adminUserId).toBeDefined();
@@ -50,46 +41,43 @@ describe('E2E Admin Dashboard Flow', () => {
]);
// 3. Login to get the access token (now with admin privileges)
const loginResponse = await request.post('/api/auth/login').send({
email: adminEmail,
password: adminPassword,
});
const loginResponse = await apiClient.loginUser(adminEmail, adminPassword, false);
expect(loginResponse.status).toBe(200);
authToken = loginResponse.body.token;
const loginData = await loginResponse.json();
authToken = loginData.token;
expect(authToken).toBeDefined();
// Verify the role returned in the login response is now 'admin'
expect(loginResponse.body.userprofile.role).toBe('admin');
expect(loginData.userprofile.role).toBe('admin');
// 4. Fetch System Stats (Protected Admin Route)
const statsResponse = await request
.get('/api/admin/stats')
.set('Authorization', `Bearer ${authToken}`);
const statsResponse = await apiClient.getApplicationStats(authToken);
expect(statsResponse.status).toBe(200);
expect(statsResponse.body).toHaveProperty('userCount');
expect(statsResponse.body).toHaveProperty('flyerCount');
const statsData = await statsResponse.json();
expect(statsData).toHaveProperty('userCount');
expect(statsData).toHaveProperty('flyerCount');
// 5. Fetch User List (Protected Admin Route)
const usersResponse = await request
.get('/api/admin/users')
.set('Authorization', `Bearer ${authToken}`);
const usersResponse = await apiClient.authedGet('/admin/users', { tokenOverride: authToken });
expect(usersResponse.status).toBe(200);
expect(Array.isArray(usersResponse.body)).toBe(true);
const usersData = await usersResponse.json();
expect(Array.isArray(usersData)).toBe(true);
// The list should contain the admin user we just created
const self = usersResponse.body.find((u: any) => u.user_id === adminUserId);
const self = usersData.find((u: any) => u.user_id === adminUserId);
expect(self).toBeDefined();
// 6. Check Queue Status (Protected Admin Route)
const queueResponse = await request
.get('/api/admin/queues/status')
.set('Authorization', `Bearer ${authToken}`);
const queueResponse = await apiClient.authedGet('/admin/queues/status', {
tokenOverride: authToken,
});
expect(queueResponse.status).toBe(200);
expect(Array.isArray(queueResponse.body)).toBe(true);
const queueData = await queueResponse.json();
expect(Array.isArray(queueData)).toBe(true);
// Verify that the 'flyer-processing' queue is present in the status report
const flyerQueue = queueResponse.body.find((q: any) => q.name === 'flyer-processing');
const flyerQueue = queueData.find((q: any) => q.name === 'flyer-processing');
expect(flyerQueue).toBeDefined();
expect(flyerQueue.counts).toBeDefined();
});

View File

@@ -2,6 +2,7 @@
import { describe, it, expect, afterAll, beforeAll } from 'vitest';
import * as apiClient from '../../services/apiClient';
import { cleanupDb } from '../utils/cleanup';
import { poll } from '../utils/poll';
import { createAndLoginUser, TEST_PASSWORD } from '../utils/testHelpers';
import type { UserProfile } from '../../types';
@@ -178,34 +179,26 @@ describe('Authentication E2E Flow', () => {
expect(registerResponse.status).toBe(201);
createdUserIds.push(registerData.userprofile.user.user_id);
// Instead of a fixed delay, poll by attempting to log in. This is more robust
// and confirms the user record is committed and readable by subsequent transactions.
let loginSuccess = false;
for (let i = 0; i < 10; i++) {
// Poll for up to 10 seconds
const loginResponse = await apiClient.loginUser(email, TEST_PASSWORD, false);
if (loginResponse.ok) {
loginSuccess = true;
break;
}
await new Promise((resolve) => setTimeout(resolve, 1000));
}
expect(loginSuccess, 'User should be able to log in after registration. DB might be lagging.').toBe(true);
// Poll until the user can log in, confirming the record has propagated.
await poll(
() => apiClient.loginUser(email, TEST_PASSWORD, false),
(response) => response.ok,
{ timeout: 10000, interval: 1000, description: 'user login after registration' },
);
// Act 1: Request a password reset
const forgotResponse = await apiClient.requestPasswordReset(email);
const forgotData = await forgotResponse.json();
const resetToken = forgotData.token;
// --- DEBUG SECTION FOR FAILURE ---
if (!resetToken) {
console.error(' [DEBUG FAILURE] Token missing in response:', JSON.stringify(forgotData, null, 2));
console.error(' [DEBUG FAILURE] This usually means the backend hit a DB error or is not in NODE_ENV=test mode.');
}
// ---------------------------------
// Poll for the password reset token.
const { response: forgotResponse, token: resetToken } = await poll(
async () => {
const response = await apiClient.requestPasswordReset(email);
// Clone to read body without consuming the original response stream
const data = response.ok ? await response.clone().json() : {};
return { response, token: data.token };
},
(result) => !!result.token,
{ timeout: 10000, interval: 1000, description: 'password reset token generation' },
);
// Assert 1: Check that we received a token.
expect(forgotResponse.status).toBe(200);
expect(resetToken, 'Backend returned 200 but no token. Check backend logs for "Connection terminated" errors.').toBeDefined();
expect(resetToken).toBeTypeOf('string');

View File

@@ -1,18 +1,16 @@
// src/tests/e2e/flyer-upload.e2e.test.ts
import { describe, it, expect, afterAll } from 'vitest';
import supertest from 'supertest';
import app from '../../../server';
import { getPool } from '../../services/db/connection.db';
import crypto from 'crypto';
import * as apiClient from '../../services/apiClient';
import path from 'path';
import fs from 'fs';
import { cleanupDb } from '../utils/cleanup';
import { poll } from '../utils/poll';
/**
* @vitest-environment node
*/
const request = supertest(app);
describe('E2E Flyer Upload and Processing Workflow', () => {
const uniqueId = Date.now();
const userEmail = `e2e-uploader-${uniqueId}@example.com`;
@@ -23,33 +21,24 @@ describe('E2E Flyer Upload and Processing Workflow', () => {
let flyerId: number | null = null;
afterAll(async () => {
// Cleanup: Delete the flyer and user created during the test
const pool = getPool();
if (flyerId) {
await pool.query('DELETE FROM public.flyers WHERE flyer_id = $1', [flyerId]);
}
if (userId) {
await pool.query('DELETE FROM public.users WHERE user_id = $1', [userId]);
}
// Use the centralized cleanup utility for robustness.
await cleanupDb({
userIds: [userId],
flyerIds: [flyerId],
});
});
it('should allow a user to upload a flyer and wait for processing to complete', async () => {
// 1. Register a new user
const registerResponse = await request.post('/api/auth/register').send({
email: userEmail,
password: userPassword,
full_name: 'E2E Flyer Uploader',
});
const registerResponse = await apiClient.registerUser(userEmail, userPassword, 'E2E Flyer Uploader');
expect(registerResponse.status).toBe(201);
// 2. Login to get the access token
const loginResponse = await request.post('/api/auth/login').send({
email: userEmail,
password: userPassword,
});
const loginResponse = await apiClient.loginUser(userEmail, userPassword, false);
expect(loginResponse.status).toBe(200);
authToken = loginResponse.body.token;
userId = loginResponse.body.userprofile.user.user_id;
const loginData = await loginResponse.json();
authToken = loginData.token;
userId = loginData.userprofile.user.user_id;
expect(authToken).toBeDefined();
// 3. Prepare the flyer file
@@ -73,34 +62,37 @@ describe('E2E Flyer Upload and Processing Workflow', () => {
]);
}
// Create a File object for the apiClient
// FIX: The Node.js `Buffer` type can be incompatible with the web `File` API's
// expected `BlobPart` type in some TypeScript configurations. Explicitly creating
// a `Uint8Array` from the buffer ensures compatibility and resolves the type error.
// `Uint8Array` is a valid `BufferSource`, which is a valid `BlobPart`.
const flyerFile = new File([new Uint8Array(fileBuffer)], fileName, { type: 'image/jpeg' });
// Calculate checksum (required by the API)
const checksum = crypto.createHash('sha256').update(fileBuffer).digest('hex');
// 4. Upload the flyer
const uploadResponse = await request
.post('/api/ai/upload-and-process')
.set('Authorization', `Bearer ${authToken}`)
.field('checksum', checksum)
.attach('flyerFile', fileBuffer, fileName);
const uploadResponse = await apiClient.uploadAndProcessFlyer(flyerFile, checksum, authToken);
expect(uploadResponse.status).toBe(202);
const jobId = uploadResponse.body.jobId;
const uploadData = await uploadResponse.json();
const jobId = uploadData.jobId;
expect(jobId).toBeDefined();
// 5. Poll for job completion
let jobStatus;
const maxRetries = 60; // Poll for up to 180 seconds
for (let i = 0; i < maxRetries; i++) {
await new Promise((resolve) => setTimeout(resolve, 3000)); // Wait 3s
const statusResponse = await request
.get(`/api/ai/jobs/${jobId}/status`)
.set('Authorization', `Bearer ${authToken}`);
jobStatus = statusResponse.body;
if (jobStatus.state === 'completed' || jobStatus.state === 'failed') {
break;
}
// 5. Poll for job completion using the new utility
const jobStatus = await poll(
async () => {
const statusResponse = await apiClient.getJobStatus(jobId, authToken);
return statusResponse.json();
},
(status) => status.state === 'completed' || status.state === 'failed',
{ timeout: 180000, interval: 3000, description: 'flyer processing job completion' },
);
if (jobStatus.state === 'failed') {
// Log the failure reason for easier debugging in CI/CD environments.
console.error('E2E flyer processing job failed. Reason:', jobStatus.failedReason);
}
expect(jobStatus.state).toBe('completed');

View File

@@ -1,15 +1,12 @@
// src/tests/e2e/user-journey.e2e.test.ts
import { describe, it, expect, afterAll } from 'vitest';
import supertest from 'supertest';
import app from '../../../server';
import { getPool } from '../../services/db/connection.db';
import * as apiClient from '../../services/apiClient';
import { cleanupDb } from '../utils/cleanup';
/**
* @vitest-environment node
*/
const request = supertest(app);
describe('E2E User Journey', () => {
// Use a unique email for every run to avoid collisions
const uniqueId = Date.now();
@@ -23,65 +20,54 @@ describe('E2E User Journey', () => {
afterAll(async () => {
// Safety cleanup: Ensure the user is deleted from the DB if the test fails mid-way.
// If the test succeeds, the user deletes their own account, so this acts as a fallback.
if (userId) {
try {
await getPool().query('DELETE FROM public.users WHERE user_id = $1', [userId]);
} catch (err) {
console.error('Error cleaning up E2E test user:', err);
}
}
await cleanupDb({
userIds: [userId],
});
});
it('should complete a full user lifecycle: Register -> Login -> Manage List -> Delete Account', async () => {
// 1. Register a new user
const registerResponse = await request.post('/api/auth/register').send({
email: userEmail,
password: userPassword,
full_name: 'E2E Traveler',
});
const registerResponse = await apiClient.registerUser(userEmail, userPassword, 'E2E Traveler');
expect(registerResponse.status).toBe(201);
expect(registerResponse.body.message).toBe('User registered successfully!');
const registerData = await registerResponse.json();
expect(registerData.message).toBe('User registered successfully!');
// 2. Login to get the access token
const loginResponse = await request.post('/api/auth/login').send({
email: userEmail,
password: userPassword,
});
const loginResponse = await apiClient.loginUser(userEmail, userPassword, false);
expect(loginResponse.status).toBe(200);
authToken = loginResponse.body.token;
userId = loginResponse.body.userprofile.user.user_id;
const loginData = await loginResponse.json();
authToken = loginData.token;
userId = loginData.userprofile.user.user_id;
expect(authToken).toBeDefined();
expect(userId).toBeDefined();
// 3. Create a Shopping List
const createListResponse = await request
.post('/api/users/shopping-lists')
.set('Authorization', `Bearer ${authToken}`)
.send({ name: 'E2E Party List' });
const createListResponse = await apiClient.createShoppingList('E2E Party List', authToken);
expect(createListResponse.status).toBe(201);
shoppingListId = createListResponse.body.shopping_list_id;
const createListData = await createListResponse.json();
shoppingListId = createListData.shopping_list_id;
expect(shoppingListId).toBeDefined();
// 4. Add an item to the list
const addItemResponse = await request
.post(`/api/users/shopping-lists/${shoppingListId}/items`)
.set('Authorization', `Bearer ${authToken}`)
.send({ customItemName: 'Chips' });
const addItemResponse = await apiClient.addShoppingListItem(
shoppingListId,
{ customItemName: 'Chips' },
authToken,
);
expect(addItemResponse.status).toBe(201);
expect(addItemResponse.body.custom_item_name).toBe('Chips');
const addItemData = await addItemResponse.json();
expect(addItemData.custom_item_name).toBe('Chips');
// 5. Verify the list and item exist via GET
const getListsResponse = await request
.get('/api/users/shopping-lists')
.set('Authorization', `Bearer ${authToken}`);
const getListsResponse = await apiClient.fetchShoppingLists(authToken);
expect(getListsResponse.status).toBe(200);
const myLists = getListsResponse.body;
const myLists = await getListsResponse.json();
const targetList = myLists.find((l: any) => l.shopping_list_id === shoppingListId);
expect(targetList).toBeDefined();
@@ -89,19 +75,16 @@ describe('E2E User Journey', () => {
expect(targetList.items[0].custom_item_name).toBe('Chips');
// 6. Delete the User Account (Self-Service)
const deleteAccountResponse = await request
.delete('/api/users/account')
.set('Authorization', `Bearer ${authToken}`)
.send({ password: userPassword });
const deleteAccountResponse = await apiClient.deleteUserAccount(userPassword, {
tokenOverride: authToken,
});
expect(deleteAccountResponse.status).toBe(200);
expect(deleteAccountResponse.body.message).toBe('Account deleted successfully.');
const deleteData = await deleteAccountResponse.json();
expect(deleteData.message).toBe('Account deleted successfully.');
// 7. Verify Login is no longer possible
const failLoginResponse = await request.post('/api/auth/login').send({
email: userEmail,
password: userPassword,
});
const failLoginResponse = await apiClient.loginUser(userEmail, userPassword, false);
expect(failLoginResponse.status).toBe(401);

View File

@@ -6,6 +6,7 @@ import { getPool } from '../../services/db/connection.db';
import { logger } from '../../services/logger.server';
import type { UserProfile } from '../../types';
import { cleanupDb } from '../utils/cleanup';
import { poll } from '../utils/poll';
describe('Database Service Integration Tests', () => {
let testUser: UserProfile;
@@ -26,6 +27,13 @@ describe('Database Service Integration Tests', () => {
{ full_name: fullName },
logger,
);
// Poll to ensure the user record is findable before tests run.
await poll(
() => db.userRepo.findUserByEmail(testUserEmail, logger),
(foundUser) => !!foundUser,
{ timeout: 5000, interval: 500, description: `user ${testUserEmail} to be findable` },
);
});
afterEach(async () => {

View File

@@ -1,5 +1,5 @@
// src/tests/integration/flyer-processing.integration.test.ts
import { describe, it, expect, beforeAll, afterAll, vi } from 'vitest';
import { describe, it, expect, beforeAll, afterAll, vi, beforeEach } from 'vitest';
import supertest from 'supertest';
import app from '../../../server';
import fs from 'node:fs/promises';
@@ -11,6 +11,7 @@ import { logger } from '../../services/logger.server';
import type { UserProfile, ExtractedFlyerItem } from '../../types';
import { createAndLoginUser } from '../utils/testHelpers';
import { cleanupDb } from '../utils/cleanup';
import { poll } from '../utils/poll';
import { cleanupFiles } from '../utils/cleanupFiles';
import piexif from 'piexifjs';
import exifParser from 'exif-parser';
@@ -55,7 +56,16 @@ describe('Flyer Processing Background Job Integration Test', () => {
const createdFilePaths: string[] = [];
beforeAll(async () => {
// Setup default mock response for the AI service's extractCoreDataFromFlyerImage method.
// FIX: Stub FRONTEND_URL to ensure valid absolute URLs (http://...) are generated
// for the database, satisfying the 'url_check' constraint.
vi.stubEnv('FRONTEND_URL', 'http://localhost:3000');
});
// FIX: Reset mocks before each test to ensure isolation.
// This prevents "happy path" mocks from leaking into error handling tests and vice versa.
beforeEach(async () => {
// 1. Reset AI Service Mock to default success state
mockExtractCoreData.mockReset();
mockExtractCoreData.mockResolvedValue({
store_name: 'Mock Store',
valid_from: null,
@@ -71,9 +81,17 @@ describe('Flyer Processing Background Job Integration Test', () => {
},
],
});
// 2. Restore DB Service Mock to real implementation
// This ensures that unless a test specifically mocks a failure, the DB logic works as expected.
const actual = await vi.importActual<typeof import('../../services/db/flyer.db')>('../../services/db/flyer.db');
vi.mocked(createFlyerAndItems).mockReset();
vi.mocked(createFlyerAndItems).mockImplementation(actual.createFlyerAndItems);
});
afterAll(async () => {
vi.unstubAllEnvs(); // Clean up env stubs
// Use the centralized cleanup utility.
await cleanupDb({
userIds: createdUserIds,
@@ -96,7 +114,7 @@ describe('Flyer Processing Background Job Integration Test', () => {
// This prevents a 409 Conflict error when the second test runs.
const uniqueContent = Buffer.concat([imageBuffer, Buffer.from(Date.now().toString())]);
const uniqueFileName = `test-flyer-image-${Date.now()}.jpg`;
const mockImageFile = new File([uniqueContent], uniqueFileName, { type: 'image/jpeg' });
const mockImageFile = new File([new Uint8Array(uniqueContent)], uniqueFileName, { type: 'image/jpeg' });
const checksum = await generateFileChecksum(mockImageFile);
// Track created files for cleanup
@@ -120,25 +138,19 @@ describe('Flyer Processing Background Job Integration Test', () => {
// Assert 1: Check that a job ID was returned.
expect(jobId).toBeTypeOf('string');
// Act 2: Poll for the job status until it completes.
let jobStatus;
// Poll for up to 210 seconds (70 * 3s). This should be greater than the worker's
// lockDuration (120s) to patiently wait for long-running jobs.
const maxRetries = 70;
for (let i = 0; i < maxRetries; i++) {
console.log(`Polling attempt ${i + 1}...`);
await new Promise((resolve) => setTimeout(resolve, 3000)); // Wait 3 seconds between polls
const statusReq = request.get(`/api/ai/jobs/${jobId}/status`);
if (token) {
statusReq.set('Authorization', `Bearer ${token}`);
}
const statusResponse = await statusReq;
jobStatus = statusResponse.body;
console.log(`Job status: ${JSON.stringify(jobStatus)}`);
if (jobStatus.state === 'completed' || jobStatus.state === 'failed') {
break;
}
}
// Act 2: Poll for job completion using the new utility.
const jobStatus = await poll(
async () => {
const statusReq = request.get(`/api/ai/jobs/${jobId}/status`);
if (token) {
statusReq.set('Authorization', `Bearer ${token}`);
}
const statusResponse = await statusReq;
return statusResponse.body;
},
(status) => status.state === 'completed' || status.state === 'failed',
{ timeout: 210000, interval: 3000, description: 'flyer processing' },
);
// Assert 2: Check that the job completed successfully.
if (jobStatus?.state === 'failed') {
@@ -220,7 +232,7 @@ describe('Flyer Processing Background Job Integration Test', () => {
const imageWithExifBuffer = Buffer.from(jpegWithExif, 'binary');
const uniqueFileName = `test-flyer-with-exif-${Date.now()}.jpg`;
const mockImageFile = new File([imageWithExifBuffer], uniqueFileName, { type: 'image/jpeg' });
const mockImageFile = new File([new Uint8Array(imageWithExifBuffer)], uniqueFileName, { type: 'image/jpeg' });
const checksum = await generateFileChecksum(mockImageFile);
// Track original and derived files for cleanup
@@ -239,19 +251,17 @@ describe('Flyer Processing Background Job Integration Test', () => {
const { jobId } = uploadResponse.body;
expect(jobId).toBeTypeOf('string');
// Poll for job completion
let jobStatus;
const maxRetries = 60; // Poll for up to 180 seconds
for (let i = 0; i < maxRetries; i++) {
await new Promise((resolve) => setTimeout(resolve, 3000));
const statusResponse = await request
.get(`/api/ai/jobs/${jobId}/status`)
.set('Authorization', `Bearer ${token}`);
jobStatus = statusResponse.body;
if (jobStatus.state === 'completed' || jobStatus.state === 'failed') {
break;
}
}
// Poll for job completion using the new utility.
const jobStatus = await poll(
async () => {
const statusResponse = await request
.get(`/api/ai/jobs/${jobId}/status`)
.set('Authorization', `Bearer ${token}`);
return statusResponse.body;
},
(status) => status.state === 'completed' || status.state === 'failed',
{ timeout: 180000, interval: 3000, description: 'EXIF stripping job' },
);
// 3. Assert
if (jobStatus?.state === 'failed') {
@@ -306,7 +316,7 @@ describe('Flyer Processing Background Job Integration Test', () => {
.toBuffer();
const uniqueFileName = `test-flyer-with-metadata-${Date.now()}.png`;
const mockImageFile = new File([Buffer.from(imageWithMetadataBuffer)], uniqueFileName, { type: 'image/png' });
const mockImageFile = new File([new Uint8Array(imageWithMetadataBuffer)], uniqueFileName, { type: 'image/png' });
const checksum = await generateFileChecksum(mockImageFile);
// Track files for cleanup
@@ -325,19 +335,17 @@ describe('Flyer Processing Background Job Integration Test', () => {
const { jobId } = uploadResponse.body;
expect(jobId).toBeTypeOf('string');
// Poll for job completion
let jobStatus;
const maxRetries = 60; // Poll for up to 180 seconds
for (let i = 0; i < maxRetries; i++) {
await new Promise((resolve) => setTimeout(resolve, 3000));
const statusResponse = await request
.get(`/api/ai/jobs/${jobId}/status`)
.set('Authorization', `Bearer ${token}`);
jobStatus = statusResponse.body;
if (jobStatus.state === 'completed' || jobStatus.state === 'failed') {
break;
}
}
// Poll for job completion using the new utility.
const jobStatus = await poll(
async () => {
const statusResponse = await request
.get(`/api/ai/jobs/${jobId}/status`)
.set('Authorization', `Bearer ${token}`);
return statusResponse.body;
},
(status) => status.state === 'completed' || status.state === 'failed',
{ timeout: 180000, interval: 3000, description: 'PNG metadata stripping job' },
);
// 3. Assert job completion
if (jobStatus?.state === 'failed') {
@@ -376,7 +384,7 @@ it(
const imageBuffer = await fs.readFile(imagePath);
const uniqueContent = Buffer.concat([imageBuffer, Buffer.from(`fail-test-${Date.now()}`)]);
const uniqueFileName = `ai-fail-test-${Date.now()}.jpg`;
const mockImageFile = new File([uniqueContent], uniqueFileName, { type: 'image/jpeg' });
const mockImageFile = new File([new Uint8Array(uniqueContent)], uniqueFileName, { type: 'image/jpeg' });
const checksum = await generateFileChecksum(mockImageFile);
// Track created files for cleanup
@@ -392,17 +400,15 @@ it(
const { jobId } = uploadResponse.body;
expect(jobId).toBeTypeOf('string');
// Act 2: Poll for the job status until it completes or fails.
let jobStatus;
const maxRetries = 60;
for (let i = 0; i < maxRetries; i++) {
await new Promise((resolve) => setTimeout(resolve, 3000));
const statusResponse = await request.get(`/api/ai/jobs/${jobId}/status`);
jobStatus = statusResponse.body;
if (jobStatus.state === 'completed' || jobStatus.state === 'failed') {
break;
}
}
// Act 2: Poll for job completion using the new utility.
const jobStatus = await poll(
async () => {
const statusResponse = await request.get(`/api/ai/jobs/${jobId}/status`);
return statusResponse.body;
},
(status) => status.state === 'completed' || status.state === 'failed',
{ timeout: 180000, interval: 3000, description: 'AI failure test job' },
);
// Assert 1: Check that the job failed.
expect(jobStatus?.state).toBe('failed');
@@ -427,7 +433,7 @@ it(
const imageBuffer = await fs.readFile(imagePath);
const uniqueContent = Buffer.concat([imageBuffer, Buffer.from(`db-fail-test-${Date.now()}`)]);
const uniqueFileName = `db-fail-test-${Date.now()}.jpg`;
const mockImageFile = new File([uniqueContent], uniqueFileName, { type: 'image/jpeg' });
const mockImageFile = new File([new Uint8Array(uniqueContent)], uniqueFileName, { type: 'image/jpeg' });
const checksum = await generateFileChecksum(mockImageFile);
// Track created files for cleanup
@@ -443,17 +449,15 @@ it(
const { jobId } = uploadResponse.body;
expect(jobId).toBeTypeOf('string');
// Act 2: Poll for the job status until it completes or fails.
let jobStatus;
const maxRetries = 60;
for (let i = 0; i < maxRetries; i++) {
await new Promise((resolve) => setTimeout(resolve, 3000));
const statusResponse = await request.get(`/api/ai/jobs/${jobId}/status`);
jobStatus = statusResponse.body;
if (jobStatus.state === 'completed' || jobStatus.state === 'failed') {
break;
}
}
// Act 2: Poll for job completion using the new utility.
const jobStatus = await poll(
async () => {
const statusResponse = await request.get(`/api/ai/jobs/${jobId}/status`);
return statusResponse.body;
},
(status) => status.state === 'completed' || status.state === 'failed',
{ timeout: 180000, interval: 3000, description: 'DB failure test job' },
);
// Assert 1: Check that the job failed.
expect(jobStatus?.state).toBe('failed');
@@ -481,7 +485,7 @@ it(
Buffer.from(`cleanup-fail-test-${Date.now()}`),
]);
const uniqueFileName = `cleanup-fail-test-${Date.now()}.jpg`;
const mockImageFile = new File([uniqueContent], uniqueFileName, { type: 'image/jpeg' });
const mockImageFile = new File([new Uint8Array(uniqueContent)], uniqueFileName, { type: 'image/jpeg' });
const checksum = await generateFileChecksum(mockImageFile);
// Track the path of the file that will be created in the uploads directory.
@@ -498,17 +502,15 @@ it(
const { jobId } = uploadResponse.body;
expect(jobId).toBeTypeOf('string');
// Act 2: Poll for the job status until it fails.
let jobStatus;
const maxRetries = 60;
for (let i = 0; i < maxRetries; i++) {
await new Promise((resolve) => setTimeout(resolve, 3000));
const statusResponse = await request.get(`/api/ai/jobs/${jobId}/status`);
jobStatus = statusResponse.body;
if (jobStatus.state === 'failed') {
break;
}
}
// Act 2: Poll for job completion using the new utility.
const jobStatus = await poll(
async () => {
const statusResponse = await request.get(`/api/ai/jobs/${jobId}/status`);
return statusResponse.body;
},
(status) => status.state === 'failed', // We expect this one to fail
{ timeout: 180000, interval: 3000, description: 'file cleanup failure test job' },
);
// Assert 1: Check that the job actually failed.
expect(jobStatus?.state).toBe('failed');

View File

@@ -11,6 +11,7 @@ import * as db from '../../services/db/index.db';
import { cleanupDb } from '../utils/cleanup';
import { logger } from '../../services/logger.server';
import * as imageProcessor from '../../utils/imageProcessor';
import { poll } from '../utils/poll';
import type {
UserProfile,
UserAchievement,
@@ -66,6 +67,10 @@ describe('Gamification Flow Integration Test', () => {
request,
}));
// Stub environment variables for URL generation in the background worker.
// This needs to be in beforeAll to ensure it's set before any code that might use it is imported.
vi.stubEnv('FRONTEND_URL', 'http://localhost:3001');
// Setup default mock response for the AI service's extractCoreDataFromFlyerImage method.
mockExtractCoreData.mockResolvedValue({
store_name: 'Gamification Test Store',
@@ -96,16 +101,12 @@ describe('Gamification Flow Integration Test', () => {
it(
'should award the "First Upload" achievement after a user successfully uploads and processes their first flyer',
async () => {
// --- Arrange: Stub environment variables for URL generation in the background worker ---
const testBaseUrl = 'http://localhost:3001'; // Use a fixed port for predictability
vi.stubEnv('FRONTEND_URL', testBaseUrl);
// --- Arrange: Prepare a unique flyer file for upload ---
const imagePath = path.resolve(__dirname, '../assets/test-flyer-image.jpg');
const imageBuffer = await fs.readFile(imagePath);
const uniqueContent = Buffer.concat([imageBuffer, Buffer.from(Date.now().toString())]);
const uniqueFileName = `gamification-test-flyer-${Date.now()}.jpg`;
const mockImageFile = new File([uniqueContent], uniqueFileName, { type: 'image/jpeg' });
const mockImageFile = new File([new Uint8Array(uniqueContent)], uniqueFileName, { type: 'image/jpeg' });
const checksum = await generateFileChecksum(mockImageFile);
// Track created files for cleanup
@@ -124,20 +125,19 @@ describe('Gamification Flow Integration Test', () => {
const { jobId } = uploadResponse.body;
expect(jobId).toBeTypeOf('string');
// --- Act 2: Poll for job completion ---
let jobStatus;
const maxRetries = 60; // Poll for up to 180 seconds
for (let i = 0; i < maxRetries; i++) {
await new Promise((resolve) => setTimeout(resolve, 3000));
const statusResponse = await request
.get(`/api/ai/jobs/${jobId}/status`)
.set('Authorization', `Bearer ${authToken}`);
jobStatus = statusResponse.body;
if (jobStatus.state === 'completed' || jobStatus.state === 'failed') {
break;
}
}
if (!jobStatus) {
// --- Act 2: Poll for job completion using the new utility ---
const jobStatus = await poll(
async () => {
const statusResponse = await request
.get(`/api/ai/jobs/${jobId}/status`)
.set('Authorization', `Bearer ${authToken}`);
return statusResponse.body;
},
(status) => status.state === 'completed' || status.state === 'failed',
{ timeout: 180000, interval: 3000, description: 'gamification flyer processing' },
);
if (!jobStatus) {
console.error('[DEBUG] Gamification test job timed out: No job status received.');
throw new Error('Gamification test job timed out: No job status received.');
}
@@ -187,8 +187,6 @@ describe('Gamification Flow Integration Test', () => {
firstUploadAchievement!.points_value,
);
// --- Cleanup ---
vi.unstubAllEnvs();
},
240000, // Increase timeout to 240s to match other long-running processing tests
);
@@ -196,10 +194,6 @@ describe('Gamification Flow Integration Test', () => {
describe('Legacy Flyer Upload', () => {
it('should process a legacy upload and save fully qualified URLs to the database', async () => {
// --- Arrange ---
// 1. Stub environment variables to have a predictable base URL for the test.
const testBaseUrl = 'https://cdn.example.com';
vi.stubEnv('FRONTEND_URL', testBaseUrl);
// 2. Mock the icon generator to return a predictable filename.
vi.mocked(imageProcessor.generateFlyerIcon).mockResolvedValue('legacy-icon.webp');
@@ -207,7 +201,7 @@ describe('Gamification Flow Integration Test', () => {
const imagePath = path.resolve(__dirname, '../assets/test-flyer-image.jpg');
const imageBuffer = await fs.readFile(imagePath);
const uniqueFileName = `legacy-upload-test-${Date.now()}.jpg`;
const mockImageFile = new File([imageBuffer], uniqueFileName, { type: 'image/jpeg' });
const mockImageFile = new File([new Uint8Array(imageBuffer)], uniqueFileName, { type: 'image/jpeg' });
const checksum = await generateFileChecksum(mockImageFile);
// Track created files for cleanup.
@@ -257,11 +251,9 @@ describe('Gamification Flow Integration Test', () => {
createdStoreIds.push(savedFlyer.store_id!); // Add for cleanup.
// 8. Assert that the URLs are fully qualified.
expect(savedFlyer.image_url).to.equal(`${testBaseUrl}/flyer-images/${uniqueFileName}`);
expect(savedFlyer.icon_url).to.equal(`${testBaseUrl}/flyer-images/icons/legacy-icon.webp`);
// --- Cleanup ---
vi.unstubAllEnvs();
expect(savedFlyer.image_url).to.equal(newFlyer.image_url);
expect(savedFlyer.icon_url).to.equal(newFlyer.icon_url);
expect(newFlyer.image_url).toContain('http://localhost:3001/flyer-images/');
});
});
});

View File

@@ -13,6 +13,7 @@ import type {
} from '../../types';
import { getPool } from '../../services/db/connection.db';
import { cleanupDb } from '../utils/cleanup';
import { poll } from '../utils/poll';
import { createAndLoginUser } from '../utils/testHelpers';
/**
@@ -42,27 +43,12 @@ describe('Public API Routes Integration Tests', () => {
});
testUser = createdUser;
// DEBUG: Verify user existence in DB
console.log(`[DEBUG] createAndLoginUser returned user ID: ${testUser.user.user_id}`);
const userCheck = await pool.query('SELECT user_id FROM public.users WHERE user_id = $1', [testUser.user.user_id]);
console.log(`[DEBUG] DB check for user found ${userCheck.rowCount ?? 0} rows.`);
if (!userCheck.rowCount) {
console.error(`[DEBUG] CRITICAL: User ${testUser.user.user_id} does not exist in public.users table! Attempting to wait...`);
// Wait loop to ensure user persistence if there's a race condition
for (let i = 0; i < 5; i++) {
await new Promise((resolve) => setTimeout(resolve, 500));
const retryCheck = await pool.query('SELECT user_id FROM public.users WHERE user_id = $1', [testUser.user.user_id]);
if (retryCheck.rowCount && retryCheck.rowCount > 0) {
console.log(`[DEBUG] User found after retry ${i + 1}`);
break;
}
}
}
// Final check before proceeding to avoid FK error
const finalCheck = await pool.query('SELECT user_id FROM public.users WHERE user_id = $1', [testUser.user.user_id]);
if (!finalCheck.rowCount) {
throw new Error(`User ${testUser.user.user_id} failed to persist in DB. Cannot continue test.`);
}
// Poll to ensure the user record has propagated before creating dependent records.
await poll(
() => pool.query('SELECT 1 FROM public.users WHERE user_id = $1', [testUser.user.user_id]),
(result) => (result.rowCount ?? 0) > 0,
{ timeout: 5000, interval: 500, description: `user ${testUser.user.user_id} to persist` },
);
// Create a recipe
const recipeRes = await pool.query(

View File

@@ -5,6 +5,7 @@ import * as bcrypt from 'bcrypt';
import { getPool } from '../../services/db/connection.db';
import type { ShoppingList } from '../../types';
import { logger } from '../../services/logger.server';
import { poll } from '../utils/poll';
describe('Shopping List DB Service Tests', () => {
it('should create and retrieve a shopping list for a user', async ({ onTestFinished }) => {
@@ -19,6 +20,12 @@ describe('Shopping List DB Service Tests', () => {
);
const testUserId = userprofile.user.user_id;
// Poll to ensure the user record has propagated before creating dependent records.
await poll(
() => getPool().query('SELECT 1 FROM public.users WHERE user_id = $1', [testUserId]),
(result) => (result.rowCount ?? 0) > 0,
{ timeout: 5000, interval: 500, description: `user ${testUserId} to persist` },
);
onTestFinished(async () => {
await getPool().query('DELETE FROM public.users WHERE user_id = $1', [testUserId]);
});
@@ -51,6 +58,13 @@ describe('Shopping List DB Service Tests', () => {
);
const testUserId = userprofile.user.user_id;
// Poll to ensure the user record has propagated before creating dependent records.
await poll(
() => getPool().query('SELECT 1 FROM public.users WHERE user_id = $1', [testUserId]),
(result) => (result.rowCount ?? 0) > 0,
{ timeout: 5000, interval: 500, description: `user ${testUserId} to persist` },
);
onTestFinished(async () => {
await getPool().query('DELETE FROM public.users WHERE user_id = $1', [testUserId]);
});

View File

@@ -1,12 +1,15 @@
// src/tests/integration/user.integration.test.ts
import { describe, it, expect, beforeAll, afterAll } from 'vitest';
import supertest from 'supertest';
import path from 'path';
import fs from 'node:fs/promises';
import app from '../../../server';
import { logger } from '../../services/logger.server';
import { getPool } from '../../services/db/connection.db';
import type { UserProfile, MasterGroceryItem, ShoppingList } from '../../types';
import { createAndLoginUser, TEST_PASSWORD } from '../utils/testHelpers';
import { cleanupDb } from '../utils/cleanup';
import { cleanupFiles } from '../utils/cleanupFiles';
/**
* @vitest-environment node
@@ -33,6 +36,25 @@ describe('User API Routes Integration Tests', () => {
// This now cleans up ALL users created by this test suite to prevent pollution.
afterAll(async () => {
await cleanupDb({ userIds: createdUserIds });
// Safeguard to clean up any avatar files created during tests.
const uploadDir = path.resolve(__dirname, '../../../uploads/avatars');
try {
const allFiles = await fs.readdir(uploadDir);
// Filter for any file that contains any of the user IDs created in this test suite.
const testFiles = allFiles
.filter((f) => createdUserIds.some((userId) => userId && f.includes(userId)))
.map((f) => path.join(uploadDir, f));
if (testFiles.length > 0) {
await cleanupFiles(testFiles);
}
} catch (error) {
// Ignore if the directory doesn't exist, but log other errors.
if (error instanceof Error && (error as NodeJS.ErrnoException).code !== 'ENOENT') {
console.error('Error during user integration test avatar file cleanup:', error);
}
}
});
it('should fetch the authenticated user profile via GET /api/users/profile', async () => {
@@ -295,4 +317,64 @@ describe('User API Routes Integration Tests', () => {
);
});
});
it('should allow a user to upload an avatar image and update their profile', async () => {
// Arrange: Path to a dummy image file
const dummyImagePath = path.resolve(__dirname, '../assets/test-flyer-image.jpg');
// Act: Make the POST request to upload the avatar
const response = await request
.post('/api/users/profile/avatar')
.set('Authorization', `Bearer ${authToken}`)
.attach('avatar', dummyImagePath);
// Assert: Check the response
expect(response.status).toBe(200);
const updatedProfile = response.body;
expect(updatedProfile.avatar_url).toBeDefined();
expect(updatedProfile.avatar_url).not.toBeNull();
expect(updatedProfile.avatar_url).toContain('/uploads/avatars/avatar-');
// Assert (Verification): Fetch the profile again to ensure the change was persisted
const verifyResponse = await request
.get('/api/users/profile')
.set('Authorization', `Bearer ${authToken}`);
const refetchedProfile = verifyResponse.body;
expect(refetchedProfile.avatar_url).toBe(updatedProfile.avatar_url);
});
it('should reject avatar upload for an invalid file type', async () => {
// Arrange: Create a buffer representing a text file.
const invalidFileBuffer = Buffer.from('This is not an image file.');
const invalidFileName = 'test.txt';
// Act: Attempt to upload the text file to the avatar endpoint.
const response = await request
.post('/api/users/profile/avatar')
.set('Authorization', `Bearer ${authToken}`)
.attach('avatar', invalidFileBuffer, invalidFileName);
// Assert: Check for a 400 Bad Request response.
// This error comes from the multer fileFilter configuration in the route.
expect(response.status).toBe(400);
expect(response.body.message).toBe('Only image files are allowed!');
});
it('should reject avatar upload for a file that is too large', async () => {
// Arrange: Create a buffer larger than the configured limit (e.g., > 1MB).
// The limit is set in the multer middleware in `user.routes.ts`.
// We'll create a 2MB buffer to be safe.
const largeFileBuffer = Buffer.alloc(2 * 1024 * 1024, 'a');
const largeFileName = 'large-avatar.jpg';
// Act: Attempt to upload the large file.
const response = await request
.post('/api/users/profile/avatar')
.set('Authorization', `Bearer ${authToken}`)
.attach('avatar', largeFileBuffer, largeFileName);
// Assert: Check for a 400 Bad Request response from the multer error handler.
expect(response.status).toBe(400);
expect(response.body.message).toBe('File too large');
});
});

View File

@@ -1,85 +1,57 @@
// src/tests/utils/cleanup.ts
import { getPool } from '../../services/db/connection.db';
import { logger } from '../../services/logger.server';
import fs from 'node:fs/promises';
import path from 'path';
export interface TestResourceIds {
userIds?: string[];
flyerIds?: number[];
storeIds?: number[];
recipeIds?: number[];
masterItemIds?: number[];
budgetIds?: number[];
interface CleanupOptions {
userIds?: (string | null | undefined)[];
flyerIds?: (number | null | undefined)[];
storeIds?: (number | null | undefined)[];
recipeIds?: (number | null | undefined)[];
budgetIds?: (number | null | undefined)[];
}
/**
* A robust cleanup utility for integration tests.
* It deletes entities in the correct order to avoid foreign key violations.
* It's designed to be called in an `afterAll` hook.
*
* @param ids An object containing arrays of IDs for each resource type to clean up.
* A centralized utility to clean up database records created during tests.
* It deletes records in an order that respects foreign key constraints.
* It performs operations on a single client connection but does not use a
* transaction, ensuring that a failure to delete from one table does not
* prevent cleanup attempts on others.
*/
export const cleanupDb = async (ids: TestResourceIds) => {
export const cleanupDb = async (options: CleanupOptions) => {
const pool = getPool();
logger.info('[Test Cleanup] Starting database resource cleanup...');
const {
userIds = [],
flyerIds = [],
storeIds = [],
recipeIds = [],
masterItemIds = [],
budgetIds = [],
} = ids;
const client = await pool.connect();
try {
// --- Stage 1: Delete most dependent records ---
// These records depend on users, recipes, flyers, etc.
if (userIds.length > 0) {
await pool.query('DELETE FROM public.recipe_comments WHERE user_id = ANY($1::uuid[])', [userIds]);
await pool.query('DELETE FROM public.suggested_corrections WHERE user_id = ANY($1::uuid[])', [userIds]);
await pool.query('DELETE FROM public.shopping_lists WHERE user_id = ANY($1::uuid[])', [userIds]); // Assumes shopping_list_items cascades
await pool.query('DELETE FROM public.user_watched_items WHERE user_id = ANY($1::uuid[])', [userIds]);
await pool.query('DELETE FROM public.user_achievements WHERE user_id = ANY($1::uuid[])', [userIds]);
await pool.query('DELETE FROM public.activity_log WHERE user_id = ANY($1::uuid[])', [userIds]);
// Order of deletion matters to avoid foreign key violations.
// Children entities first, then parents.
if (options.budgetIds?.filter(Boolean).length) {
await client.query('DELETE FROM public.budgets WHERE budget_id = ANY($1::int[])', [options.budgetIds]);
logger.debug(`Cleaned up ${options.budgetIds.length} budget(s).`);
}
// --- Stage 2: Delete parent records that other things depend on ---
if (recipeIds.length > 0) {
await pool.query('DELETE FROM public.recipes WHERE recipe_id = ANY($1::int[])', [recipeIds]);
if (options.recipeIds?.filter(Boolean).length) {
await client.query('DELETE FROM public.recipes WHERE recipe_id = ANY($1::int[])', [options.recipeIds]);
logger.debug(`Cleaned up ${options.recipeIds.length} recipe(s).`);
}
// Flyers might be created by users, but we clean them up separately.
// flyer_items should cascade from this.
if (flyerIds.length > 0) {
await pool.query('DELETE FROM public.flyers WHERE flyer_id = ANY($1::bigint[])', [flyerIds]);
if (options.flyerIds?.filter(Boolean).length) {
await client.query('DELETE FROM public.flyers WHERE flyer_id = ANY($1::int[])', [options.flyerIds]);
logger.debug(`Cleaned up ${options.flyerIds.length} flyer(s).`);
}
// Stores are parents of flyers, so they come after.
if (storeIds.length > 0) {
await pool.query('DELETE FROM public.stores WHERE store_id = ANY($1::int[])', [storeIds]);
if (options.storeIds?.filter(Boolean).length) {
await client.query('DELETE FROM public.stores WHERE store_id = ANY($1::int[])', [options.storeIds]);
logger.debug(`Cleaned up ${options.storeIds.length} store(s).`);
}
// Master items are parents of flyer_items and watched_items.
if (masterItemIds.length > 0) {
await pool.query('DELETE FROM public.master_grocery_items WHERE master_grocery_item_id = ANY($1::int[])', [masterItemIds]);
if (options.userIds?.filter(Boolean).length) {
await client.query('DELETE FROM public.users WHERE user_id = ANY($1::uuid[])', [options.userIds]);
logger.debug(`Cleaned up ${options.userIds.length} user(s).`);
}
// Budgets are parents of nothing, but depend on users.
if (budgetIds.length > 0) {
await pool.query('DELETE FROM public.budgets WHERE budget_id = ANY($1::int[])', [budgetIds]);
}
// --- Stage 3: Delete the root user records ---
if (userIds.length > 0) {
const { rowCount } = await pool.query('DELETE FROM public.users WHERE user_id = ANY($1::uuid[])', [userIds]);
logger.info(`[Test Cleanup] Cleaned up ${rowCount} user(s).`);
}
logger.info('[Test Cleanup] Finished database resource cleanup successfully.');
} catch (error) {
logger.error({ error }, '[Test Cleanup] CRITICAL: An error occurred during database cleanup.');
throw error; // Re-throw to fail the test suite
logger.error({ error, options }, 'A database cleanup operation failed.');
} finally {
client.release();
}
};

View File

@@ -1,48 +1,30 @@
// src/tests/utils/cleanupFiles.ts
import fs from 'node:fs/promises';
import path from 'path';
import { logger } from '../../services/logger.server';
/**
* Safely cleans up files from the filesystem.
* Designed to be used in `afterAll` or `afterEach` hooks in integration tests.
*
* @param filePaths An array of file paths to clean up.
* A centralized utility to clean up files created during tests.
* It iterates through a list of file paths and attempts to delete each one.
* It gracefully handles errors for files that don't exist (e.g., already deleted
* or never created due to a test failure).
*/
export const cleanupFiles = async (filePaths: string[]) => {
if (!filePaths || filePaths.length === 0) {
logger.info('[Test Cleanup] No file paths provided for cleanup.');
export const cleanupFiles = async (filePaths: (string | undefined | null)[]) => {
const validPaths = filePaths.filter((p): p is string => !!p);
if (validPaths.length === 0) {
return;
}
logger.info(`[Test Cleanup] Starting filesystem cleanup for ${filePaths.length} file(s)...`);
logger.debug(`Cleaning up ${validPaths.length} test-created file(s)...`);
try {
await Promise.all(
filePaths.map(async (filePath) => {
try {
await fs.unlink(filePath);
logger.debug(`[Test Cleanup] Successfully deleted file: ${filePath}`);
} catch (err: any) {
// Ignore "file not found" errors, but log other errors.
if (err.code === 'ENOENT') {
logger.debug(`[Test Cleanup] File not found, skipping: ${filePath}`);
} else {
logger.warn(
{ err, filePath },
'[Test Cleanup] Failed to clean up file from filesystem.',
);
}
}
}),
);
const cleanupPromises = validPaths.map(async (filePath) => {
try {
await fs.unlink(filePath);
} catch (error) {
if (error instanceof Error && (error as NodeJS.ErrnoException).code !== 'ENOENT') {
logger.error({ error, filePath }, 'Failed to delete test file during cleanup.');
}
}
});
logger.info('[Test Cleanup] Finished filesystem cleanup successfully.');
} catch (error) {
logger.error(
{ error },
'[Test Cleanup] CRITICAL: An error occurred during filesystem cleanup.',
);
throw error; // Re-throw to fail the test suite if cleanup fails
}
await Promise.allSettled(cleanupPromises);
};

36
src/tests/utils/poll.ts Normal file
View File

@@ -0,0 +1,36 @@
// src/tests/utils/poll.ts
interface PollOptions {
/** The maximum time to wait in milliseconds. Defaults to 10000 (10 seconds). */
timeout?: number;
/** The interval between attempts in milliseconds. Defaults to 500. */
interval?: number;
/** A description of the operation for better error messages. */
description?: string;
}
/**
* A generic polling utility for asynchronous operations in tests.
*
* @param fn The async function to execute on each attempt.
* @param validate A function that returns `true` if the result is satisfactory, ending the poll.
* @param options Polling options like timeout and interval.
* @returns A promise that resolves with the first valid result from `fn`.
* @throws An error if the timeout is reached before `validate` returns `true`.
*/
export async function poll<T>(
fn: () => Promise<T>,
validate: (result: T) => boolean,
options: PollOptions = {},
): Promise<T> {
const { timeout = 10000, interval = 500, description = 'operation' } = options;
const startTime = Date.now();
while (Date.now() - startTime < timeout) {
const result = await fn();
if (validate(result)) return result;
await new Promise((resolve) => setTimeout(resolve, interval));
}
throw new Error(`Polling timed out for ${description} after ${timeout}ms.`);
}

View File

@@ -1,9 +1,15 @@
// src/utils/dateUtils.test.ts
import { describe, it, expect, vi, afterEach, beforeEach } from 'vitest';
import { getSimpleWeekAndYear } from './dateUtils';
import {
calculateSimpleWeekAndYear,
formatShortDate,
calculateDaysBetween,
formatDateRange,
getCurrentDateISOString,
} from './dateUtils';
describe('dateUtils', () => {
describe('getSimpleWeekAndYear', () => {
describe('calculateSimpleWeekAndYear', () => {
beforeEach(() => {
// Use fake timers to control the current date in tests
vi.useFakeTimers();
@@ -16,35 +22,35 @@ describe('dateUtils', () => {
it('should return week 1 for the first day of the year', () => {
const date = new Date('2024-01-01T12:00:00Z');
expect(getSimpleWeekAndYear(date)).toEqual({ year: 2024, week: 1 });
expect(calculateSimpleWeekAndYear(date)).toEqual({ year: 2024, week: 1 });
});
it('should return week 1 for the 7th day of the year', () => {
const date = new Date('2024-01-07T12:00:00Z');
expect(getSimpleWeekAndYear(date)).toEqual({ year: 2024, week: 1 });
expect(calculateSimpleWeekAndYear(date)).toEqual({ year: 2024, week: 1 });
});
it('should return week 2 for the 8th day of the year', () => {
const date = new Date('2024-01-08T12:00:00Z');
expect(getSimpleWeekAndYear(date)).toEqual({ year: 2024, week: 2 });
expect(calculateSimpleWeekAndYear(date)).toEqual({ year: 2024, week: 2 });
});
it('should correctly calculate the week for a date in the middle of the year', () => {
// July 1st is the 183rd day of a leap year. 182 / 7 = 26. So it's week 27.
const date = new Date('2024-07-01T12:00:00Z');
expect(getSimpleWeekAndYear(date)).toEqual({ year: 2024, week: 27 });
expect(calculateSimpleWeekAndYear(date)).toEqual({ year: 2024, week: 27 });
});
it('should correctly calculate the week for the last day of a non-leap year', () => {
// Dec 31, 2023 is the 365th day. 364 / 7 = 52. So it's week 53.
const date = new Date('2023-12-31T12:00:00Z');
expect(getSimpleWeekAndYear(date)).toEqual({ year: 2023, week: 53 });
expect(calculateSimpleWeekAndYear(date)).toEqual({ year: 2023, week: 53 });
});
it('should correctly calculate the week for the last day of a leap year', () => {
// Dec 31, 2024 is the 366th day. 365 / 7 = 52.14. floor(52.14) + 1 = 53.
const date = new Date('2024-12-31T12:00:00Z');
expect(getSimpleWeekAndYear(date)).toEqual({ year: 2024, week: 53 });
expect(calculateSimpleWeekAndYear(date)).toEqual({ year: 2024, week: 53 });
});
it('should use the current date if no date is provided', () => {
@@ -52,7 +58,172 @@ describe('dateUtils', () => {
vi.setSystemTime(fakeCurrentDate);
// 40 / 7 = 5.71. floor(5.71) + 1 = 6.
expect(getSimpleWeekAndYear()).toEqual({ year: 2025, week: 6 });
expect(calculateSimpleWeekAndYear()).toEqual({ year: 2025, week: 6 });
});
});
describe('formatShortDate', () => {
it('should format a valid YYYY-MM-DD date string correctly', () => {
expect(formatShortDate('2024-07-26')).toBe('Jul 26');
});
it('should handle single-digit days correctly', () => {
expect(formatShortDate('2025-01-05')).toBe('Jan 5');
});
it('should handle dates at the end of the year', () => {
expect(formatShortDate('2023-12-31')).toBe('Dec 31');
});
it('should return null for a null input', () => {
expect(formatShortDate(null)).toBeNull();
});
it('should return null for an undefined input', () => {
expect(formatShortDate(undefined)).toBeNull();
});
it('should return null for an empty string input', () => {
expect(formatShortDate('')).toBeNull();
});
it('should return null for an invalid date string', () => {
expect(formatShortDate('not-a-real-date')).toBeNull();
});
it('should return null for a malformed date string', () => {
expect(formatShortDate('2024-13-01')).toBeNull(); // Invalid month
});
it('should correctly format a full ISO string with time and timezone', () => {
expect(formatShortDate('2024-12-25T10:00:00Z')).toBe('Dec 25');
});
});
describe('calculateDaysBetween', () => {
it('should calculate the difference in days between two valid date strings', () => {
expect(calculateDaysBetween('2023-01-01', '2023-01-05')).toBe(4);
});
it('should return a negative number if the end date is before the start date', () => {
expect(calculateDaysBetween('2023-01-05', '2023-01-01')).toBe(-4);
});
it('should handle Date objects', () => {
const start = new Date('2023-01-01');
const end = new Date('2023-01-10');
expect(calculateDaysBetween(start, end)).toBe(9);
});
it('should return null if either date is null or undefined', () => {
expect(calculateDaysBetween(null, '2023-01-01')).toBeNull();
expect(calculateDaysBetween('2023-01-01', undefined)).toBeNull();
});
it('should return null if either date is invalid', () => {
expect(calculateDaysBetween('invalid', '2023-01-01')).toBeNull();
expect(calculateDaysBetween('2023-01-01', 'invalid')).toBeNull();
});
});
describe('formatDateRange', () => {
it('should format a range with two different valid dates', () => {
expect(formatDateRange('2023-01-01', '2023-01-05')).toBe('Jan 1 - Jan 5');
});
it('should format a range with the same start and end date as a single date', () => {
expect(formatDateRange('2023-01-01', '2023-01-01')).toBe('Jan 1');
});
it('should return only the start date if end date is missing', () => {
expect(formatDateRange('2023-01-01', null)).toBe('Jan 1');
expect(formatDateRange('2023-01-01', undefined)).toBe('Jan 1');
});
it('should return only the end date if start date is missing', () => {
expect(formatDateRange(null, '2023-01-05')).toBe('Jan 5');
expect(formatDateRange(undefined, '2023-01-05')).toBe('Jan 5');
});
it('should return null if both dates are missing or invalid', () => {
expect(formatDateRange(null, null)).toBeNull();
expect(formatDateRange(undefined, undefined)).toBeNull();
expect(formatDateRange('invalid', 'invalid')).toBeNull();
});
it('should handle one valid and one invalid date by showing only the valid one', () => {
expect(formatDateRange('2023-01-01', 'invalid')).toBe('Jan 1');
expect(formatDateRange('invalid', '2023-01-05')).toBe('Jan 5');
});
it('should handle empty strings as invalid dates', () => {
expect(formatDateRange('', '2023-01-05')).toBe('Jan 5');
expect(formatDateRange('2023-01-05', '')).toBe('Jan 5');
expect(formatDateRange('', '')).toBeNull();
});
it('should handle garbage strings as invalid dates', () => {
expect(formatDateRange('garbage', '2023-01-05')).toBe('Jan 5');
expect(formatDateRange('2023-01-05', 'garbage')).toBe('Jan 5');
});
it('should handle start date being after end date (raw format)', () => {
// The function currently doesn't validate order, it just formats what it's given.
// This test ensures it doesn't crash or behave unexpectedly.
expect(formatDateRange('2023-02-01', '2023-01-01')).toBe('Feb 1 - Jan 1');
});
it('should handle dates with time components correctly', () => {
// parseISO should handle the time component and formatShortDate should strip it
expect(formatDateRange('2023-01-01T10:00:00', '2023-01-05T15:30:00')).toBe(
'Jan 1 - Jan 5',
);
});
describe('verbose mode', () => {
it('should format a range with two different valid dates verbosely', () => {
expect(formatDateRange('2023-01-01', '2023-01-05', { verbose: true })).toBe(
'Deals valid from January 1, 2023 to January 5, 2023',
);
});
it('should format a range with the same start and end date verbosely', () => {
expect(formatDateRange('2023-01-01', '2023-01-01', { verbose: true })).toBe(
'Valid on January 1, 2023',
);
});
it('should format only the start date verbosely', () => {
expect(formatDateRange('2023-01-01', null, { verbose: true })).toBe(
'Deals start January 1, 2023',
);
});
it('should format only the end date verbosely', () => {
expect(formatDateRange(null, '2023-01-05', { verbose: true })).toBe(
'Deals end January 5, 2023',
);
});
it('should handle one valid and one invalid date verbosely', () => {
expect(formatDateRange('2023-01-01', 'invalid', { verbose: true })).toBe(
'Deals start January 1, 2023',
);
});
it('should handle start date being after end date verbosely', () => {
expect(formatDateRange('2023-02-01', '2023-01-01', { verbose: true })).toBe(
'Deals valid from February 1, 2023 to January 1, 2023',
);
});
});
});
describe('getCurrentDateISOString', () => {
it('should return the current date in YYYY-MM-DD format', () => {
const fakeDate = new Date('2025-12-25T10:00:00Z');
vi.setSystemTime(fakeDate);
expect(getCurrentDateISOString()).toBe('2025-12-25');
});
});
});

View File

@@ -1,4 +1,5 @@
// src/utils/dateUtils.ts
import { parseISO, format, isValid, differenceInDays } from 'date-fns';
/**
* Calculates the current year and a simplified week number.
@@ -7,16 +8,93 @@
* For true ISO 8601 week numbers (where week 1 is the first week with a Thursday),
* a dedicated library like `date-fns` (`getISOWeek` and `getISOWeekYear`) is recommended.
*
* @param date The date to calculate the week for. Defaults to the current date.
* @param date The date to calculate the simple week for. Defaults to the current date.
* @returns An object containing the year and week number.
*/
export function getSimpleWeekAndYear(date: Date = new Date()): { year: number; week: number } {
export function calculateSimpleWeekAndYear(date: Date = new Date()): { year: number; week: number } {
const year = date.getFullYear();
const startOfYear = new Date(year, 0, 1);
// Calculate the difference in days from the start of the year (day 0 to 364/365)
const dayOfYear = (date.getTime() - startOfYear.getTime()) / (1000 * 60 * 60 * 24);
// Use UTC dates to calculate the difference in days.
// This avoids issues with Daylight Saving Time (DST) where a day might have 23 or 25 hours,
// which can cause the millisecond-based calculation to be slightly off (e.g., 149.96 days).
const startOfYear = Date.UTC(year, 0, 1);
const current = Date.UTC(year, date.getMonth(), date.getDate());
const msPerDay = 1000 * 60 * 60 * 24;
const dayOfYear = (current - startOfYear) / msPerDay;
// Divide by 7, take the floor to get the zero-based week, and add 1 for a one-based week number.
const week = Math.floor(dayOfYear / 7) + 1;
return { year, week };
}
export const formatShortDate = (dateString: string | null | undefined): string | null => {
if (!dateString) return null;
// Using `parseISO` from date-fns is more reliable than `new Date()` for YYYY-MM-DD strings.
// It correctly interprets the string as a local date, avoiding timezone-related "off-by-one" errors.
const date = parseISO(dateString);
if (isValid(date)) {
return format(date, 'MMM d');
}
return null;
};
export const calculateDaysBetween = (
startDate: string | Date | null | undefined,
endDate: string | Date | null | undefined,
): number | null => {
if (!startDate || !endDate) return null;
const start = typeof startDate === 'string' ? parseISO(startDate) : startDate;
const end = typeof endDate === 'string' ? parseISO(endDate) : endDate;
if (!isValid(start) || !isValid(end)) return null;
return differenceInDays(end, start);
};
interface DateRangeOptions {
verbose?: boolean;
}
export const formatDateRange = (
startDate: string | null | undefined,
endDate: string | null | undefined,
options?: DateRangeOptions,
): string | null => {
if (!options?.verbose) {
const start = formatShortDate(startDate);
const end = formatShortDate(endDate);
if (start && end) {
return start === end ? start : `${start} - ${end}`;
}
return start || end || null;
}
// Verbose format logic
const dateFormat = 'MMMM d, yyyy';
const formatFn = (dateStr: string | null | undefined) => {
if (!dateStr) return null;
const date = parseISO(dateStr);
return isValid(date) ? format(date, dateFormat) : null;
};
const start = formatFn(startDate);
const end = formatFn(endDate);
if (start && end) {
return start === end ? `Valid on ${start}` : `Deals valid from ${start} to ${end}`;
}
if (start) return `Deals start ${start}`;
if (end) return `Deals end ${end}`;
return null;
};
/**
* Returns the current date as an ISO 8601 string (YYYY-MM-DD).
* Useful for getting "today" without the time component.
*/
export const getCurrentDateISOString = (): string => {
return format(new Date(), 'yyyy-MM-dd');
};
export { calculateSimpleWeekAndYear as getSimpleWeekAndYear };

View File

@@ -0,0 +1,33 @@
// src/utils/formatUtils.test.ts
import { describe, it, expect } from 'vitest';
import { formatCurrency } from './formatUtils';
describe('formatCurrency', () => {
it('should format a positive integer of cents correctly', () => {
expect(formatCurrency(199)).toBe('$1.99');
});
it('should format a larger number of cents correctly', () => {
expect(formatCurrency(12345)).toBe('$123.45');
});
it('should handle single-digit cents correctly', () => {
expect(formatCurrency(5)).toBe('$0.05');
});
it('should handle zero cents correctly', () => {
expect(formatCurrency(0)).toBe('$0.00');
});
it('should return "N/A" for a null input', () => {
expect(formatCurrency(null)).toBe('N/A');
});
it('should return "N/A" for an undefined input', () => {
expect(formatCurrency(undefined)).toBe('N/A');
});
it('should handle negative cents correctly', () => {
expect(formatCurrency(-500)).toBe('-$5.00');
});
});

14
src/utils/formatUtils.ts Normal file
View File

@@ -0,0 +1,14 @@
// src/utils/formatUtils.ts
/**
* Formats a numeric value in cents into a currency string (e.g., $4.99).
* Handles different locales and currency symbols gracefully.
*
* @param amountInCents The amount in cents to format. Can be null or undefined.
* @returns A formatted currency string (e.g., "$4.99"), or 'N/A' if the input is null/undefined.
*/
export const formatCurrency = (amountInCents: number | null | undefined): string => {
if (amountInCents === null || amountInCents === undefined) return 'N/A';
return new Intl.NumberFormat('en-US', { style: 'currency', currency: 'USD' }).format(amountInCents / 100);
};

View File

@@ -3,49 +3,92 @@ import sharp from 'sharp';
import path from 'path';
import fs from 'node:fs/promises';
import type { Logger } from 'pino';
import { sanitizeFilename } from './stringUtils';
/**
* Generates a 64x64 square icon from a source image.
* @param sourceImagePath The full path to the original image file.
* @param iconsDirectory The directory where the icon should be saved.
* @returns A promise that resolves to the filename of the newly created icon.
* @param logger The request-scoped logger instance, as per ADR-004.
* @throws An error if the icon generation fails.
* Processes an uploaded image file by stripping all metadata (like EXIF)
* and optimizing it for web use.
*
* @param sourcePath The path to the temporary uploaded file.
* @param destinationDir The directory where the final image should be saved.
* @param originalFileName The original name of the file, used to determine the output name.
* @param logger A pino logger instance for logging.
* @returns The file name of the newly processed image.
*/
export async function generateFlyerIcon(
sourceImagePath: string,
iconsDirectory: string,
export async function processAndSaveImage(
sourcePath: string,
destinationDir: string,
originalFileName: string,
logger: Logger,
): Promise<string> {
// Create a unique-ish filename to avoid collisions, but keep the original extension.
const fileExt = path.extname(originalFileName);
const fileBase = path.basename(originalFileName, fileExt);
const outputFileName = `${fileBase}-${Date.now()}${fileExt}`;
const outputPath = path.join(destinationDir, outputFileName);
try {
// 1. Create a new filename, standardizing the extension to .webp for consistency and performance.
// We sanitize the original filename to remove spaces and special characters, ensuring URL safety.
// The sourceImagePath is already sanitized by multer, but we apply it here again for robustness
// in case this function is ever called from a different context.
const sanitizedBaseName = sanitizeFilename(path.basename(sourceImagePath));
const originalFileName = path.parse(sanitizedBaseName).name;
const iconFileName = `icon-${originalFileName}.webp`;
const iconOutputPath = path.join(iconsDirectory, iconFileName);
// Ensure the destination directory exists.
await fs.mkdir(destinationDir, { recursive: true });
// Ensure the icons subdirectory exists.
await fs.mkdir(iconsDirectory, { recursive: true });
logger.debug({ sourcePath, outputPath }, 'Starting image processing: stripping metadata and optimizing.');
// 2. Use sharp to process the image.
await sharp(sourceImagePath)
// Use `resize` with a `fit` strategy to prevent distortion.
// `sharp.fit.cover` will resize to fill 64x64 and crop any excess,
// ensuring the icon is always a non-distorted square.
.resize(64, 64, { fit: sharp.fit.cover })
// 3. Convert the output to WebP format.
// The `quality` option is a good balance between size and clarity.
.webp({ quality: 80 })
.toFile(iconOutputPath);
// Use sharp to process the image.
// .withMetadata({}) strips all EXIF and other metadata.
// .jpeg() and .png() apply format-specific optimizations.
await sharp(sourcePath, { failOn: 'none' })
.withMetadata({}) // This is the key to stripping metadata
.jpeg({ quality: 85, mozjpeg: true }) // Optimize JPEGs
.png({ compressionLevel: 8, quality: 85 }) // Optimize PNGs
.toFile(outputPath);
logger.info(`Generated 64x64 icon: ${iconFileName}`);
return iconFileName;
logger.info(`Successfully processed image and saved to ${outputPath}`);
return outputFileName;
} catch (error) {
logger.error({ error, sourceImagePath }, 'Failed to generate flyer icon:');
throw new Error('Icon generation failed.');
logger.error(
{ err: error, sourcePath, outputPath },
'An error occurred during image processing and saving.',
);
// Re-throw the error to be handled by the calling service (e.g., the worker).
throw new Error(`Failed to process image ${originalFileName}.`);
}
}
/**
* Generates a small WebP icon from a source image.
*
* @param sourcePath The path to the source image (can be the original upload or the processed image).
* @param outputDir The directory to save the icon in (e.g., 'flyer-images/icons').
* @param logger A pino logger instance for logging.
* @returns The file name of the generated icon.
*/
export async function generateFlyerIcon(
sourcePath: string,
outputDir: string,
logger: Logger,
): Promise<string> {
// Use the source file's name (without extension) to create the icon name.
const iconFileName = `icon-${path.parse(sourcePath).name}.webp`;
const outputPath = path.join(outputDir, iconFileName);
try {
// Ensure the output directory exists.
await fs.mkdir(outputDir, { recursive: true });
logger.debug({ sourcePath, outputPath }, 'Starting icon generation.');
await sharp(sourcePath, { failOn: 'none' })
.resize({ width: 128, height: 128, fit: 'inside' })
.webp({ quality: 75 }) // Slightly lower quality for icons is acceptable.
.toFile(outputPath);
logger.info(`Successfully generated icon: ${outputPath}`);
return iconFileName;
} catch (error) {
logger.error(
{ err: error, sourcePath, outputPath },
'An error occurred during icon generation.',
);
// Re-throw the error to be handled by the calling service.
throw new Error(`Failed to generate icon for ${sourcePath}.`);
}
}