Compare commits
34 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ab63f83f50 | ||
| b546a55eaf | |||
|
|
dfa53a93dd | ||
| f30464cd0e | |||
|
|
2d2fa3c2c8 | ||
| 58cb391f4b | |||
|
|
0ebe2f0806 | ||
| 7867abc5bc | |||
|
|
cc4c8e2839 | ||
| 33ee2eeac9 | |||
|
|
e0b13f26fb | ||
| eee7f36756 | |||
|
|
622c919733 | ||
| c7f6b6369a | |||
|
|
879d956003 | ||
| 27eaac7ea8 | |||
|
|
93618c57e5 | ||
| 7f043ef704 | |||
|
|
62e35deddc | ||
| 59f6f43d03 | |||
|
|
e675c1a73c | ||
| 3c19084a0a | |||
|
|
e2049c6b9f | ||
| a3839c2f0d | |||
|
|
c1df3d7b1b | ||
| 94782f030d | |||
|
|
1c25b79251 | ||
| 0b0fa8294d | |||
|
|
f49f3a75fb | ||
| 8f14044ae6 | |||
|
|
55e1e425f4 | ||
| 68b16ad2e8 | |||
|
|
6a28934692 | ||
| 78c4a5fee6 |
4
package-lock.json
generated
4
package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "flyer-crawler",
|
||||
"version": "0.9.41",
|
||||
"version": "0.9.58",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "flyer-crawler",
|
||||
"version": "0.9.41",
|
||||
"version": "0.9.58",
|
||||
"dependencies": {
|
||||
"@bull-board/api": "^6.14.2",
|
||||
"@bull-board/express": "^6.14.2",
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "flyer-crawler",
|
||||
"private": true,
|
||||
"version": "0.9.41",
|
||||
"version": "0.9.58",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "concurrently \"npm:start:dev\" \"vite\"",
|
||||
|
||||
@@ -74,6 +74,18 @@ const createShoppingListSchema = z.object({
|
||||
body: z.object({ name: requiredString("Field 'name' is required.") }),
|
||||
});
|
||||
|
||||
const createRecipeSchema = z.object({
|
||||
body: z.object({
|
||||
name: requiredString("Field 'name' is required."),
|
||||
instructions: requiredString("Field 'instructions' is required."),
|
||||
description: z.string().trim().optional(),
|
||||
prep_time_minutes: z.number().int().nonnegative().optional(),
|
||||
cook_time_minutes: z.number().int().nonnegative().optional(),
|
||||
servings: z.number().int().positive().optional(),
|
||||
photo_url: z.string().trim().url().optional(),
|
||||
}),
|
||||
});
|
||||
|
||||
// Apply the JWT authentication middleware to all routes in this file.
|
||||
const notificationQuerySchema = z.object({
|
||||
query: z.object({
|
||||
@@ -769,6 +781,26 @@ router.put(
|
||||
},
|
||||
);
|
||||
|
||||
/**
|
||||
* POST /api/users/recipes - Create a new recipe.
|
||||
*/
|
||||
router.post(
|
||||
'/recipes',
|
||||
userUpdateLimiter,
|
||||
validateRequest(createRecipeSchema),
|
||||
async (req, res, next) => {
|
||||
const userProfile = req.user as UserProfile;
|
||||
const { body } = req as unknown as z.infer<typeof createRecipeSchema>;
|
||||
try {
|
||||
const recipe = await db.recipeRepo.createRecipe(userProfile.user.user_id, body, req.log);
|
||||
res.status(201).json(recipe);
|
||||
} catch (error) {
|
||||
logger.error({ error }, 'Error creating recipe');
|
||||
next(error);
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
/**
|
||||
* DELETE /api/users/recipes/:recipeId - Delete a recipe created by the user.
|
||||
*/
|
||||
|
||||
@@ -32,6 +32,7 @@ export const uploadAndProcessFlyer = async (
|
||||
formData.append('checksum', checksum);
|
||||
|
||||
logger.info(`[aiApiClient] Starting background processing for file: ${file.name}`);
|
||||
console.error(`[aiApiClient] uploadAndProcessFlyer: Uploading file '${file.name}' with checksum '${checksum}'`);
|
||||
|
||||
const response = await authedPostForm('/ai/upload-and-process', formData, { tokenOverride });
|
||||
|
||||
@@ -94,6 +95,7 @@ export const getJobStatus = async (
|
||||
jobId: string,
|
||||
tokenOverride?: string,
|
||||
): Promise<JobStatus> => {
|
||||
console.error(`[aiApiClient] getJobStatus: Fetching status for job '${jobId}'`);
|
||||
const response = await authedGet(`/ai/jobs/${jobId}/status`, { tokenOverride });
|
||||
|
||||
// Handle non-OK responses first, as they might not have a JSON body.
|
||||
|
||||
@@ -197,15 +197,17 @@ describe('AI Service (Server)', () => {
|
||||
const service = new AIService(mockLoggerInstance);
|
||||
|
||||
// Assert: Check that the warning was logged and the mock client is in use
|
||||
expect(mockLoggerInstance.warn).toHaveBeenCalledWith(
|
||||
'[AIService] GoogleGenAI client could not be initialized (likely missing API key in test environment). Using mock placeholder.',
|
||||
expect(mockLoggerInstance.info).toHaveBeenCalledWith(
|
||||
'[AIService Constructor] Test environment detected. Using internal mock for AI client to prevent real API calls in INTEGRATION TESTS.',
|
||||
);
|
||||
await expect(
|
||||
(service as any).aiClient.generateContent({ contents: [] }),
|
||||
(service as any).aiClient.generateContent({ contents: [], useLiteModels: false }),
|
||||
).resolves.toBeDefined();
|
||||
});
|
||||
|
||||
it('should use the adapter to call generateContent when using real GoogleGenAI client', async () => {
|
||||
vi.stubEnv('NODE_ENV', 'production');
|
||||
vi.stubEnv('VITEST_POOL_ID', '');
|
||||
vi.stubEnv('GEMINI_API_KEY', 'test-key');
|
||||
// We need to force the constructor to use the real client logic, not the injected mock.
|
||||
// So we instantiate AIService without passing aiClient.
|
||||
@@ -229,6 +231,8 @@ describe('AI Service (Server)', () => {
|
||||
});
|
||||
|
||||
it('should throw error if adapter is called without content', async () => {
|
||||
vi.stubEnv('NODE_ENV', 'production');
|
||||
vi.stubEnv('VITEST_POOL_ID', '');
|
||||
vi.stubEnv('GEMINI_API_KEY', 'test-key');
|
||||
vi.resetModules();
|
||||
const { AIService } = await import('./aiService.server');
|
||||
@@ -244,6 +248,8 @@ describe('AI Service (Server)', () => {
|
||||
describe('Model Fallback Logic', () => {
|
||||
beforeEach(() => {
|
||||
vi.unstubAllEnvs();
|
||||
vi.stubEnv('NODE_ENV', 'production');
|
||||
vi.stubEnv('VITEST_POOL_ID', '');
|
||||
vi.stubEnv('GEMINI_API_KEY', 'test-key');
|
||||
vi.resetModules(); // Re-import to use the new env var and re-instantiate the service
|
||||
mockGenerateContent.mockReset();
|
||||
@@ -322,9 +328,8 @@ describe('AI Service (Server)', () => {
|
||||
// Check that a warning was logged
|
||||
expect(logger.warn).toHaveBeenCalledWith(
|
||||
// The warning should be for the model that failed ('gemini-2.5-flash'), not the next one.
|
||||
// The warning should be for the model that failed, not the next one.
|
||||
expect.stringContaining(
|
||||
`Model '${models[0]}' failed due to quota/rate limit. Trying next model.`,
|
||||
`Model '${models[0]}' failed due to quota/rate limit/overload. Trying next model.`,
|
||||
),
|
||||
);
|
||||
});
|
||||
@@ -500,7 +505,7 @@ describe('AI Service (Server)', () => {
|
||||
expect(mockGenerateContent).toHaveBeenCalledTimes(2);
|
||||
expect(mockGenerateContent).toHaveBeenNthCalledWith(1, { model: models[0], ...request });
|
||||
expect(mockGenerateContent).toHaveBeenNthCalledWith(2, { model: models[1], ...request });
|
||||
expect(logger.warn).toHaveBeenCalledWith(expect.stringContaining(`Model '${models[0]}' failed due to quota/rate limit.`));
|
||||
expect(logger.warn).toHaveBeenCalledWith(expect.stringContaining(`Model '${models[0]}' failed due to quota/rate limit/overload.`));
|
||||
});
|
||||
|
||||
it('should fail immediately on a 400 Bad Request error without retrying', async () => {
|
||||
|
||||
@@ -136,85 +136,81 @@ export class AIService {
|
||||
"gemma-3n-e2b-it" // Corrected name from JSON
|
||||
];
|
||||
|
||||
// Helper to return valid mock data for tests
|
||||
private getMockFlyerData() {
|
||||
return {
|
||||
store_name: 'Mock Store from AIService',
|
||||
valid_from: '2025-01-01',
|
||||
valid_to: '2025-01-07',
|
||||
store_address: '123 Mock St',
|
||||
items: [
|
||||
{
|
||||
item: 'Mocked Integration Item',
|
||||
price_display: '$1.99',
|
||||
price_in_cents: 199,
|
||||
quantity: 'each',
|
||||
category_name: 'Mock Category',
|
||||
master_item_id: null,
|
||||
},
|
||||
],
|
||||
};
|
||||
}
|
||||
|
||||
constructor(logger: Logger, aiClient?: IAiClient, fs?: IFileSystem) {
|
||||
this.logger = logger;
|
||||
this.logger.info('---------------- [AIService] Constructor Start ----------------');
|
||||
|
||||
const isTestEnvironment = process.env.NODE_ENV === 'test' || !!process.env.VITEST_POOL_ID;
|
||||
|
||||
if (aiClient) {
|
||||
this.logger.info(
|
||||
'[AIService Constructor] Using provided mock AI client. This indicates a TEST environment.',
|
||||
'[AIService Constructor] Using provided mock AI client. This indicates a UNIT TEST environment.',
|
||||
);
|
||||
this.aiClient = aiClient;
|
||||
} else if (isTestEnvironment) {
|
||||
this.logger.info(
|
||||
'[AIService Constructor] Test environment detected. Using internal mock for AI client to prevent real API calls in INTEGRATION TESTS.',
|
||||
);
|
||||
this.aiClient = {
|
||||
generateContent: async (request) => {
|
||||
this.logger.info(
|
||||
{ useLiteModels: request.useLiteModels },
|
||||
'[AIService] Mock generateContent called in test environment.',
|
||||
);
|
||||
const mockData = this.getMockFlyerData();
|
||||
return {
|
||||
text: JSON.stringify(mockData),
|
||||
} as unknown as GenerateContentResponse;
|
||||
},
|
||||
};
|
||||
} else {
|
||||
this.logger.info(
|
||||
'[AIService Constructor] No mock client provided. Initializing Google GenAI client for PRODUCTION-LIKE environment.',
|
||||
'[AIService Constructor] No mock client provided and not a test environment. Initializing Google GenAI client for PRODUCTION.',
|
||||
);
|
||||
// Determine if we are in any kind of test environment.
|
||||
// VITEST_POOL_ID is reliably set by Vitest during test runs.
|
||||
const isTestEnvironment = process.env.NODE_ENV === 'test' || !!process.env.VITEST_POOL_ID;
|
||||
this.logger.info(
|
||||
{
|
||||
isTestEnvironment,
|
||||
nodeEnv: process.env.NODE_ENV,
|
||||
vitestPoolId: process.env.VITEST_POOL_ID,
|
||||
hasApiKey: !!process.env.GEMINI_API_KEY,
|
||||
},
|
||||
'[AIService Constructor] Environment check',
|
||||
);
|
||||
|
||||
const apiKey = process.env.GEMINI_API_KEY;
|
||||
if (!apiKey) {
|
||||
this.logger.warn('[AIService] GEMINI_API_KEY is not set.');
|
||||
// Allow initialization without key in test/build environments if strictly needed
|
||||
if (!isTestEnvironment) {
|
||||
this.logger.error('[AIService] GEMINI_API_KEY is required in non-test environments.');
|
||||
throw new Error('GEMINI_API_KEY environment variable not set for server-side AI calls.');
|
||||
} else {
|
||||
this.logger.warn(
|
||||
'[AIService Constructor] GEMINI_API_KEY is missing, but this is a test environment, so proceeding.',
|
||||
);
|
||||
}
|
||||
}
|
||||
// In test mode without injected client, we might not have a key.
|
||||
// The stubs below protect against calling the undefined client.
|
||||
// This is the correct modern SDK pattern. We instantiate the main client.
|
||||
const genAI = apiKey ? new GoogleGenAI({ apiKey }) : null;
|
||||
if (!genAI) {
|
||||
this.logger.warn(
|
||||
'[AIService] GoogleGenAI client could not be initialized (likely missing API key in test environment). Using mock placeholder.',
|
||||
);
|
||||
this.logger.error('[AIService] GEMINI_API_KEY is required in non-test environments.');
|
||||
throw new Error('GEMINI_API_KEY environment variable not set for server-side AI calls.');
|
||||
}
|
||||
const genAI = new GoogleGenAI({ apiKey });
|
||||
|
||||
// We create a shim/adapter that matches the old structure but uses the new SDK call pattern.
|
||||
// This preserves the dependency injection pattern used throughout the class.
|
||||
this.aiClient = genAI
|
||||
? {
|
||||
generateContent: async (request) => {
|
||||
if (!request.contents || request.contents.length === 0) {
|
||||
this.logger.error(
|
||||
{ request },
|
||||
'[AIService Adapter] generateContent called with no content, which is invalid.',
|
||||
);
|
||||
throw new Error('AIService.generateContent requires at least one content element.');
|
||||
}
|
||||
|
||||
const { useLiteModels, ...apiReq } = request;
|
||||
const models = useLiteModels ? this.models_lite : this.models;
|
||||
return this._generateWithFallback(genAI, apiReq, models);
|
||||
},
|
||||
this.aiClient = {
|
||||
generateContent: async (request) => {
|
||||
if (!request.contents || request.contents.length === 0) {
|
||||
this.logger.error(
|
||||
{ request },
|
||||
'[AIService Adapter] generateContent called with no content, which is invalid.',
|
||||
);
|
||||
throw new Error('AIService.generateContent requires at least one content element.');
|
||||
}
|
||||
: {
|
||||
// This is the updated mock for testing, matching the new response shape.
|
||||
generateContent: async () => {
|
||||
this.logger.warn(
|
||||
'[AIService] Mock generateContent called. This should only happen in tests when no API key is available.',
|
||||
);
|
||||
// Return a minimal valid JSON object structure to prevent downstream parsing errors.
|
||||
const mockResponse = { store_name: 'Mock Store', items: [] };
|
||||
return {
|
||||
text: JSON.stringify(mockResponse),
|
||||
} as unknown as GenerateContentResponse;
|
||||
},
|
||||
};
|
||||
|
||||
const { useLiteModels, ...apiReq } = request;
|
||||
const models = useLiteModels ? this.models_lite : this.models;
|
||||
return this._generateWithFallback(genAI, apiReq, models);
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
this.fs = fs || fsPromises;
|
||||
@@ -254,19 +250,37 @@ export class AIService {
|
||||
// If the call succeeds, return the result immediately.
|
||||
return result;
|
||||
} catch (error: unknown) {
|
||||
lastError = error instanceof Error ? error : new Error(String(error));
|
||||
const errorMessage = (lastError.message || '').toLowerCase(); // Make case-insensitive
|
||||
// Robust error message extraction to handle various error shapes (Error objects, JSON responses, etc.)
|
||||
let errorMsg = '';
|
||||
if (error instanceof Error) {
|
||||
lastError = error;
|
||||
errorMsg = error.message;
|
||||
} else {
|
||||
try {
|
||||
if (typeof error === 'object' && error !== null && 'message' in error) {
|
||||
errorMsg = String((error as any).message);
|
||||
} else {
|
||||
errorMsg = JSON.stringify(error);
|
||||
}
|
||||
} catch {
|
||||
errorMsg = String(error);
|
||||
}
|
||||
lastError = new Error(errorMsg);
|
||||
}
|
||||
const lowerErrorMsg = errorMsg.toLowerCase();
|
||||
|
||||
// Check for specific error messages indicating quota issues or model unavailability.
|
||||
if (
|
||||
errorMessage.includes('quota') ||
|
||||
errorMessage.includes('429') || // HTTP 429 Too Many Requests
|
||||
errorMessage.includes('resource_exhausted') || // Make case-insensitive
|
||||
errorMessage.includes('model is overloaded') ||
|
||||
errorMessage.includes('not found') // Also retry if model is not found (e.g., regional availability or API version issue)
|
||||
lowerErrorMsg.includes('quota') ||
|
||||
lowerErrorMsg.includes('429') || // HTTP 429 Too Many Requests
|
||||
lowerErrorMsg.includes('503') || // HTTP 503 Service Unavailable
|
||||
lowerErrorMsg.includes('resource_exhausted') ||
|
||||
lowerErrorMsg.includes('overloaded') || // Covers "model is overloaded"
|
||||
lowerErrorMsg.includes('unavailable') || // Covers "Service Unavailable"
|
||||
lowerErrorMsg.includes('not found') // Also retry if model is not found (e.g., regional availability or API version issue)
|
||||
) {
|
||||
this.logger.warn(
|
||||
`[AIService Adapter] Model '${modelName}' failed due to quota/rate limit. Trying next model. Error: ${errorMessage}`,
|
||||
`[AIService Adapter] Model '${modelName}' failed due to quota/rate limit/overload. Trying next model. Error: ${errorMsg}`,
|
||||
);
|
||||
continue; // Try the next model in the list.
|
||||
} else {
|
||||
@@ -529,6 +543,7 @@ export class AIService {
|
||||
logger.info(
|
||||
`[extractCoreDataFromFlyerImage] Entering method with ${imagePaths.length} image(s).`,
|
||||
);
|
||||
|
||||
const prompt = this._buildFlyerExtractionPrompt(masterItems, submitterIp, userProfileAddress);
|
||||
|
||||
const imageParts = await Promise.all(
|
||||
@@ -782,6 +797,7 @@ async enqueueFlyerProcessing(
|
||||
|
||||
const baseUrl = baseUrlOverride || getBaseUrl(logger);
|
||||
// --- START DEBUGGING ---
|
||||
console.error(`[DEBUG] aiService.enqueueFlyerProcessing resolved baseUrl: "${baseUrl}"`);
|
||||
// Add a fail-fast check to ensure the baseUrl is a valid URL before enqueuing.
|
||||
// This will make the test fail at the upload step if the URL is the problem,
|
||||
// which is easier to debug than a worker failure.
|
||||
@@ -887,8 +903,8 @@ async enqueueFlyerProcessing(
|
||||
const itemsArray = Array.isArray(rawItems) ? rawItems : typeof rawItems === 'string' ? JSON.parse(rawItems) : [];
|
||||
const itemsForDb = itemsArray.map((item: Partial<ExtractedFlyerItem>) => ({
|
||||
...item,
|
||||
// Ensure price_display is never null to satisfy database constraints.
|
||||
price_display: item.price_display ?? '',
|
||||
// Ensure empty or nullish price_display is stored as NULL to satisfy database constraints.
|
||||
price_display: item.price_display || null,
|
||||
master_item_id: item.master_item_id === null ? undefined : item.master_item_id,
|
||||
quantity: item.quantity ?? 1,
|
||||
view_count: 0,
|
||||
|
||||
@@ -86,6 +86,33 @@ describe('AnalyticsService', () => {
|
||||
'Daily analytics job failed.',
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle non-Error objects thrown during processing', async () => {
|
||||
const job = createMockJob<AnalyticsJobData>({ reportDate: '2023-10-27' } as AnalyticsJobData);
|
||||
|
||||
mockLoggerInstance.info
|
||||
.mockImplementationOnce(() => {}) // "Picked up..."
|
||||
.mockImplementationOnce(() => {
|
||||
throw 'A string error';
|
||||
});
|
||||
|
||||
const promise = service.processDailyReportJob(job);
|
||||
|
||||
// Capture the expectation promise BEFORE triggering the rejection via timer advancement.
|
||||
const expectation = expect(promise).rejects.toThrow('A string error');
|
||||
|
||||
await vi.advanceTimersByTimeAsync(10000);
|
||||
|
||||
await expectation;
|
||||
|
||||
expect(mockLoggerInstance.error).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
err: expect.objectContaining({ message: 'A string error' }),
|
||||
attemptsMade: 1,
|
||||
}),
|
||||
'Daily analytics job failed.',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('processWeeklyReportJob', () => {
|
||||
@@ -149,5 +176,35 @@ describe('AnalyticsService', () => {
|
||||
'Weekly analytics job failed.',
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle non-Error objects thrown during processing', async () => {
|
||||
const job = createMockJob<WeeklyAnalyticsJobData>({
|
||||
reportYear: 2023,
|
||||
reportWeek: 43,
|
||||
} as WeeklyAnalyticsJobData);
|
||||
|
||||
mockLoggerInstance.info
|
||||
.mockImplementationOnce(() => {}) // "Picked up..."
|
||||
.mockImplementationOnce(() => {
|
||||
throw 'A string error';
|
||||
});
|
||||
|
||||
const promise = service.processWeeklyReportJob(job);
|
||||
|
||||
// Capture the expectation promise BEFORE triggering the rejection via timer advancement.
|
||||
const expectation = expect(promise).rejects.toThrow('A string error');
|
||||
|
||||
await vi.advanceTimersByTimeAsync(30000);
|
||||
|
||||
await expectation;
|
||||
|
||||
expect(mockLoggerInstance.error).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
err: expect.objectContaining({ message: 'A string error' }),
|
||||
attemptsMade: 1,
|
||||
}),
|
||||
'Weekly analytics job failed.',
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -947,7 +947,10 @@ describe('API Client', () => {
|
||||
|
||||
it('trackFlyerItemInteraction should log a warning on failure', async () => {
|
||||
const apiError = new Error('Network failed');
|
||||
vi.mocked(global.fetch).mockRejectedValue(apiError);
|
||||
// Mock global.fetch to throw an error directly to ensure the catch block is hit.
|
||||
vi.spyOn(global, 'fetch').mockImplementationOnce(() => {
|
||||
throw apiError;
|
||||
});
|
||||
const { logger } = await import('./logger.client');
|
||||
|
||||
// We can now await this properly because we added 'return' in apiClient.ts
|
||||
@@ -959,7 +962,10 @@ describe('API Client', () => {
|
||||
|
||||
it('logSearchQuery should log a warning on failure', async () => {
|
||||
const apiError = new Error('Network failed');
|
||||
vi.mocked(global.fetch).mockRejectedValue(apiError);
|
||||
// Mock global.fetch to throw an error directly to ensure the catch block is hit.
|
||||
vi.spyOn(global, 'fetch').mockImplementationOnce(() => {
|
||||
throw apiError;
|
||||
});
|
||||
const { logger } = await import('./logger.client');
|
||||
|
||||
const queryData = createMockSearchQueryPayload({
|
||||
|
||||
@@ -95,6 +95,7 @@ export const apiFetch = async (
|
||||
const fullUrl = url.startsWith('http') ? url : joinUrl(API_BASE_URL, url);
|
||||
|
||||
logger.debug(`apiFetch: ${options.method || 'GET'} ${fullUrl}`);
|
||||
console.error(`[apiClient] apiFetch Request: ${options.method || 'GET'} ${fullUrl}`);
|
||||
|
||||
// Create a new headers object to avoid mutating the original options.
|
||||
const headers = new Headers(options.headers || {});
|
||||
|
||||
@@ -35,6 +35,7 @@ describe('AuthService', () => {
|
||||
let DatabaseError: typeof import('./processingErrors').DatabaseError;
|
||||
let UniqueConstraintError: typeof import('./db/errors.db').UniqueConstraintError;
|
||||
let RepositoryError: typeof import('./db/errors.db').RepositoryError;
|
||||
let ValidationError: typeof import('./db/errors.db').ValidationError;
|
||||
let withTransaction: typeof import('./db/index.db').withTransaction;
|
||||
|
||||
const reqLog = {}; // Mock request logger object
|
||||
@@ -109,6 +110,7 @@ describe('AuthService', () => {
|
||||
DatabaseError = (await import('./processingErrors')).DatabaseError;
|
||||
UniqueConstraintError = (await import('./db/errors.db')).UniqueConstraintError;
|
||||
RepositoryError = (await import('./db/errors.db')).RepositoryError;
|
||||
ValidationError = (await import('./db/errors.db')).ValidationError;
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
@@ -168,6 +170,15 @@ describe('AuthService', () => {
|
||||
|
||||
expect(logger.error).toHaveBeenCalledWith({ error, email: 'test@example.com' }, `User registration failed with an unexpected error.`);
|
||||
});
|
||||
|
||||
it('should throw ValidationError if password is weak', async () => {
|
||||
const { validatePasswordStrength } = await import('../utils/authUtils');
|
||||
vi.mocked(validatePasswordStrength).mockReturnValue({ isValid: false, feedback: 'Password too weak' });
|
||||
|
||||
await expect(
|
||||
authService.registerUser('test@example.com', 'weak', 'Test User', undefined, reqLog),
|
||||
).rejects.toThrow(ValidationError);
|
||||
});
|
||||
});
|
||||
|
||||
describe('registerAndLoginUser', () => {
|
||||
@@ -285,6 +296,25 @@ describe('AuthService', () => {
|
||||
);
|
||||
expect(logger.error).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should log error if sending email fails but still return token', async () => {
|
||||
vi.mocked(userRepo.findUserByEmail).mockResolvedValue(mockUser);
|
||||
vi.mocked(bcrypt.hash).mockImplementation(async () => 'hashed-token');
|
||||
const emailError = new Error('Email failed');
|
||||
vi.mocked(sendPasswordResetEmail).mockRejectedValue(emailError);
|
||||
|
||||
const result = await authService.resetPassword('test@example.com', reqLog);
|
||||
|
||||
expect(logger.error).toHaveBeenCalledWith({ emailError }, `Email send failure during password reset for user`);
|
||||
expect(result).toBe('mocked_random_id');
|
||||
});
|
||||
|
||||
it('should re-throw RepositoryError', async () => {
|
||||
const repoError = new RepositoryError('Repo error', 500);
|
||||
vi.mocked(userRepo.findUserByEmail).mockRejectedValue(repoError);
|
||||
|
||||
await expect(authService.resetPassword('test@example.com', reqLog)).rejects.toThrow(repoError);
|
||||
});
|
||||
});
|
||||
|
||||
describe('updatePassword', () => {
|
||||
@@ -334,6 +364,22 @@ describe('AuthService', () => {
|
||||
expect(transactionalUserRepoMocks.updateUserPassword).not.toHaveBeenCalled();
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it('should throw ValidationError if new password is weak', async () => {
|
||||
const { validatePasswordStrength } = await import('../utils/authUtils');
|
||||
vi.mocked(validatePasswordStrength).mockReturnValue({ isValid: false, feedback: 'Password too weak' });
|
||||
|
||||
await expect(
|
||||
authService.updatePassword('token', 'weak', reqLog),
|
||||
).rejects.toThrow(ValidationError);
|
||||
});
|
||||
|
||||
it('should re-throw RepositoryError from transaction', async () => {
|
||||
const repoError = new RepositoryError('Repo error', 500);
|
||||
vi.mocked(withTransaction).mockRejectedValue(repoError);
|
||||
|
||||
await expect(authService.updatePassword('token', 'newPass', reqLog)).rejects.toThrow(repoError);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getUserByRefreshToken', () => {
|
||||
|
||||
@@ -161,6 +161,13 @@ describe('Background Job Service', () => {
|
||||
{ jobId: expect.stringContaining('manual-weekly-report-') },
|
||||
);
|
||||
});
|
||||
|
||||
it('should throw if job ID is not returned from the queue', async () => {
|
||||
// Mock the queue to return a job object without an 'id' property
|
||||
vi.mocked(weeklyAnalyticsQueue.add).mockResolvedValue({ name: 'test-job' } as any);
|
||||
|
||||
await expect(service.triggerWeeklyAnalyticsReport()).rejects.toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
it('should do nothing if no deals are found for any user', async () => {
|
||||
@@ -177,6 +184,35 @@ describe('Background Job Service', () => {
|
||||
expect(mockNotificationRepo.createBulkNotifications).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should process a single user successfully and log notification creation', async () => {
|
||||
const singleUserDeal = [
|
||||
{
|
||||
...createMockWatchedItemDeal({
|
||||
master_item_id: 1,
|
||||
item_name: 'Apples',
|
||||
best_price_in_cents: 199,
|
||||
}),
|
||||
user_id: 'user-1',
|
||||
email: 'user1@test.com',
|
||||
full_name: 'User One',
|
||||
},
|
||||
];
|
||||
mockPersonalizationRepo.getBestSalePricesForAllUsers.mockResolvedValue(singleUserDeal);
|
||||
mockEmailQueue.add.mockResolvedValue({ id: 'job-1' });
|
||||
|
||||
await service.runDailyDealCheck();
|
||||
|
||||
expect(mockEmailQueue.add).toHaveBeenCalledTimes(1);
|
||||
expect(mockNotificationRepo.createBulkNotifications).toHaveBeenCalledTimes(1);
|
||||
const notificationPayload = mockNotificationRepo.createBulkNotifications.mock.calls[0][0];
|
||||
expect(notificationPayload).toHaveLength(1);
|
||||
|
||||
// This assertion specifically targets line 180
|
||||
expect(mockServiceLogger.info).toHaveBeenCalledWith(
|
||||
`[BackgroundJob] Successfully created 1 in-app notifications.`,
|
||||
);
|
||||
});
|
||||
|
||||
it('should create notifications and enqueue emails when deals are found', async () => {
|
||||
mockPersonalizationRepo.getBestSalePricesForAllUsers.mockResolvedValue(mockDealsForAllUsers);
|
||||
|
||||
|
||||
@@ -34,7 +34,10 @@ export class BackgroundJobService {
|
||||
const reportDate = getCurrentDateISOString(); // YYYY-MM-DD
|
||||
const jobId = `manual-report-${reportDate}-${Date.now()}`;
|
||||
const job = await analyticsQueue.add('generate-daily-report', { reportDate }, { jobId });
|
||||
return job.id!;
|
||||
if (!job.id) {
|
||||
throw new Error('Failed to enqueue daily report job: No job ID returned');
|
||||
}
|
||||
return job.id;
|
||||
}
|
||||
|
||||
public async triggerWeeklyAnalyticsReport(): Promise<string> {
|
||||
@@ -45,7 +48,10 @@ export class BackgroundJobService {
|
||||
{ reportYear, reportWeek },
|
||||
{ jobId },
|
||||
);
|
||||
return job.id!;
|
||||
if (!job.id) {
|
||||
throw new Error('Failed to enqueue weekly report job: No job ID returned');
|
||||
}
|
||||
return job.id;
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -360,6 +360,58 @@ describe('Flyer DB Service', () => {
|
||||
'Database error in insertFlyerItems',
|
||||
);
|
||||
});
|
||||
|
||||
it('should sanitize empty or whitespace-only price_display to "N/A"', async () => {
|
||||
const itemsData: FlyerItemInsert[] = [
|
||||
{
|
||||
item: 'Free Item',
|
||||
price_display: '', // Empty string
|
||||
price_in_cents: 0,
|
||||
quantity: '1',
|
||||
category_name: 'Promo',
|
||||
view_count: 0,
|
||||
click_count: 0,
|
||||
},
|
||||
{
|
||||
item: 'Whitespace Item',
|
||||
price_display: ' ', // Whitespace only
|
||||
price_in_cents: null,
|
||||
quantity: '1',
|
||||
category_name: 'Promo',
|
||||
view_count: 0,
|
||||
click_count: 0,
|
||||
},
|
||||
];
|
||||
const mockItems = itemsData.map((item, i) =>
|
||||
createMockFlyerItem({ ...item, flyer_item_id: i + 1, flyer_id: 1 }),
|
||||
);
|
||||
mockPoolInstance.query.mockResolvedValue({ rows: mockItems });
|
||||
|
||||
await flyerRepo.insertFlyerItems(1, itemsData, mockLogger);
|
||||
|
||||
expect(mockPoolInstance.query).toHaveBeenCalledTimes(1);
|
||||
|
||||
// Check that the values array passed to the query has null for price_display
|
||||
const queryValues = mockPoolInstance.query.mock.calls[0][1];
|
||||
expect(queryValues).toEqual([
|
||||
1, // flyerId for item 1
|
||||
'Free Item',
|
||||
"N/A", // Sanitized price_display for item 1
|
||||
0,
|
||||
'1',
|
||||
'Promo',
|
||||
0,
|
||||
0,
|
||||
1, // flyerId for item 2
|
||||
'Whitespace Item',
|
||||
"N/A", // Sanitized price_display for item 2
|
||||
null,
|
||||
'1',
|
||||
'Promo',
|
||||
0,
|
||||
0,
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('createFlyerAndItems', () => {
|
||||
@@ -433,6 +485,34 @@ describe('Flyer DB Service', () => {
|
||||
);
|
||||
});
|
||||
|
||||
it('should create a flyer with no items if items array is empty', async () => {
|
||||
const flyerData: FlyerInsert = {
|
||||
file_name: 'empty.jpg',
|
||||
store_name: 'Empty Store',
|
||||
} as FlyerInsert;
|
||||
const itemsData: FlyerItemInsert[] = [];
|
||||
const mockFlyer = createMockFlyer({ ...flyerData, flyer_id: 100, store_id: 2 });
|
||||
|
||||
const mockClient = { query: vi.fn() };
|
||||
mockClient.query
|
||||
.mockResolvedValueOnce({ rows: [], rowCount: 0 }) // findOrCreateStore (insert)
|
||||
.mockResolvedValueOnce({ rows: [{ store_id: 2 }] }) // findOrCreateStore (select)
|
||||
.mockResolvedValueOnce({ rows: [mockFlyer] }); // insertFlyer
|
||||
|
||||
const result = await createFlyerAndItems(
|
||||
flyerData,
|
||||
itemsData,
|
||||
mockLogger,
|
||||
mockClient as unknown as PoolClient,
|
||||
);
|
||||
|
||||
expect(result).toEqual({
|
||||
flyer: mockFlyer,
|
||||
items: [],
|
||||
});
|
||||
expect(mockClient.query).toHaveBeenCalledTimes(3);
|
||||
});
|
||||
|
||||
it('should propagate an error if any step fails', async () => {
|
||||
const flyerData: FlyerInsert = {
|
||||
file_name: 'fail.jpg',
|
||||
|
||||
@@ -63,7 +63,36 @@ export class FlyerRepository {
|
||||
* @returns The newly created flyer record with its ID.
|
||||
*/
|
||||
async insertFlyer(flyerData: FlyerDbInsert, logger: Logger): Promise<Flyer> {
|
||||
console.error('[DB DEBUG] FlyerRepository.insertFlyer called with:', JSON.stringify(flyerData, null, 2));
|
||||
// Sanitize icon_url: Ensure empty strings become NULL to avoid regex constraint violations
|
||||
let iconUrl = flyerData.icon_url && flyerData.icon_url.trim() !== '' ? flyerData.icon_url : null;
|
||||
let imageUrl = flyerData.image_url || 'placeholder.jpg';
|
||||
|
||||
try {
|
||||
// Fallback for tests/workers sending relative URLs to satisfy DB 'url_check' constraint
|
||||
const rawBaseUrl = process.env.FRONTEND_URL || 'https://example.com';
|
||||
const baseUrl = rawBaseUrl.endsWith('/') ? rawBaseUrl.slice(0, -1) : rawBaseUrl;
|
||||
|
||||
// [DEBUG] Log URL transformation for debugging test failures
|
||||
if ((imageUrl && !imageUrl.startsWith('http')) || (iconUrl && !iconUrl.startsWith('http'))) {
|
||||
console.error('[DB DEBUG] Transforming relative URLs:', {
|
||||
baseUrl,
|
||||
originalImage: imageUrl,
|
||||
originalIcon: iconUrl,
|
||||
});
|
||||
}
|
||||
|
||||
if (imageUrl && !imageUrl.startsWith('http')) {
|
||||
const cleanPath = imageUrl.startsWith('/') ? imageUrl.substring(1) : imageUrl;
|
||||
imageUrl = `${baseUrl}/${cleanPath}`;
|
||||
}
|
||||
if (iconUrl && !iconUrl.startsWith('http')) {
|
||||
const cleanPath = iconUrl.startsWith('/') ? iconUrl.substring(1) : iconUrl;
|
||||
iconUrl = `${baseUrl}/${cleanPath}`;
|
||||
}
|
||||
|
||||
console.error('[DB DEBUG] Final URLs for insert:', { imageUrl, iconUrl });
|
||||
|
||||
const query = `
|
||||
INSERT INTO flyers (
|
||||
file_name, image_url, icon_url, checksum, store_id, valid_from, valid_to, store_address,
|
||||
@@ -74,8 +103,8 @@ export class FlyerRepository {
|
||||
`;
|
||||
const values = [
|
||||
flyerData.file_name, // $1
|
||||
flyerData.image_url, // $2
|
||||
flyerData.icon_url, // $3
|
||||
imageUrl, // $2
|
||||
iconUrl, // $3
|
||||
flyerData.checksum, // $4
|
||||
flyerData.store_id, // $5
|
||||
flyerData.valid_from, // $6
|
||||
@@ -94,16 +123,32 @@ export class FlyerRepository {
|
||||
const result = await this.db.query<Flyer>(query, values);
|
||||
return result.rows[0];
|
||||
} catch (error) {
|
||||
console.error('[DB DEBUG] insertFlyer caught error:', error);
|
||||
const errorMessage = error instanceof Error ? error.message : '';
|
||||
let checkMsg = 'A database check constraint failed.';
|
||||
|
||||
// [ENHANCED LOGGING]
|
||||
if (errorMessage.includes('url_check')) {
|
||||
logger.error(
|
||||
{
|
||||
error: errorMessage,
|
||||
offendingData: {
|
||||
image_url: flyerData.image_url,
|
||||
icon_url: flyerData.icon_url, // Log raw input
|
||||
sanitized_icon_url: flyerData.icon_url && flyerData.icon_url.trim() !== '' ? flyerData.icon_url : null
|
||||
}
|
||||
},
|
||||
'[DB ERROR] URL Check Constraint Failed. Inspecting URLs.'
|
||||
);
|
||||
}
|
||||
|
||||
if (errorMessage.includes('flyers_checksum_check')) {
|
||||
checkMsg =
|
||||
'The provided checksum is invalid or does not meet format requirements (e.g., must be a 64-character SHA-256 hash).';
|
||||
} else if (errorMessage.includes('flyers_status_check')) {
|
||||
checkMsg = 'Invalid status provided for flyer.';
|
||||
} else if (errorMessage.includes('url_check')) {
|
||||
checkMsg = 'Invalid URL format provided for image or icon.';
|
||||
checkMsg = `[URL_CHECK_FAIL] Invalid URL format. Image: '${imageUrl}', Icon: '${iconUrl}'`;
|
||||
}
|
||||
|
||||
handleDbError(error, logger, 'Database error in insertFlyer', { flyerData }, {
|
||||
@@ -139,10 +184,18 @@ export class FlyerRepository {
|
||||
valueStrings.push(
|
||||
`($${paramIndex++}, $${paramIndex++}, $${paramIndex++}, $${paramIndex++}, $${paramIndex++}, $${paramIndex++}, $${paramIndex++}, $${paramIndex++})`,
|
||||
);
|
||||
|
||||
// Sanitize price_display. The database requires a non-empty string.
|
||||
// We provide a default value if the input is null, undefined, or an empty string.
|
||||
const priceDisplay =
|
||||
item.price_display && item.price_display.trim() !== ''
|
||||
? item.price_display
|
||||
: 'N/A';
|
||||
|
||||
values.push(
|
||||
flyerId,
|
||||
item.item,
|
||||
item.price_display,
|
||||
priceDisplay,
|
||||
item.price_in_cents ?? null,
|
||||
item.quantity ?? '',
|
||||
item.category_name ?? null,
|
||||
|
||||
@@ -152,6 +152,34 @@ export class RecipeRepository {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new recipe.
|
||||
* @param userId The ID of the user creating the recipe.
|
||||
* @param recipeData The data for the new recipe.
|
||||
* @returns A promise that resolves to the newly created Recipe object.
|
||||
*/
|
||||
async createRecipe(
|
||||
userId: string,
|
||||
recipeData: Pick<Recipe, 'name' | 'instructions' | 'description' | 'prep_time_minutes' | 'cook_time_minutes' | 'servings' | 'photo_url'>,
|
||||
logger: Logger
|
||||
): Promise<Recipe> {
|
||||
try {
|
||||
const { name, instructions, description, prep_time_minutes, cook_time_minutes, servings, photo_url } = recipeData;
|
||||
const res = await this.db.query<Recipe>(
|
||||
`INSERT INTO public.recipes
|
||||
(user_id, name, instructions, description, prep_time_minutes, cook_time_minutes, servings, photo_url, status)
|
||||
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, 'public')
|
||||
RETURNING *`,
|
||||
[userId, name, instructions, description, prep_time_minutes, cook_time_minutes, servings, photo_url]
|
||||
);
|
||||
return res.rows[0];
|
||||
} catch (error) {
|
||||
handleDbError(error, logger, 'Database error in createRecipe', { userId, recipeData }, {
|
||||
defaultMessage: 'Failed to create recipe.',
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Deletes a recipe, ensuring ownership.
|
||||
* @param recipeId The ID of the recipe to delete.
|
||||
|
||||
@@ -415,8 +415,12 @@ export class UserRepository {
|
||||
// prettier-ignore
|
||||
async deleteUserById(userId: string, logger: Logger): Promise<void> {
|
||||
try {
|
||||
await this.db.query('DELETE FROM public.users WHERE user_id = $1', [userId]);
|
||||
} catch (error) { // This was a duplicate, fixed.
|
||||
const res = await this.db.query('DELETE FROM public.users WHERE user_id = $1', [userId]);
|
||||
if (res.rowCount === 0) {
|
||||
throw new NotFoundError(`User with ID ${userId} not found.`);
|
||||
}
|
||||
} catch (error) {
|
||||
if (error instanceof NotFoundError) throw error;
|
||||
handleDbError(error, logger, 'Database error in deleteUserById', { userId }, {
|
||||
defaultMessage: 'Failed to delete user from database.',
|
||||
});
|
||||
|
||||
@@ -50,6 +50,7 @@ describe('Email Service (Server)', () => {
|
||||
beforeEach(async () => {
|
||||
console.log('[TEST SETUP] Setting up Email Service mocks');
|
||||
vi.clearAllMocks();
|
||||
vi.stubEnv('FRONTEND_URL', 'https://test.flyer.com');
|
||||
// Reset to default successful implementation
|
||||
mocks.sendMail.mockImplementation((mailOptions: { to: string }) => {
|
||||
console.log('[TEST DEBUG] mockSendMail (default) called with:', mailOptions?.to);
|
||||
@@ -60,12 +61,17 @@ describe('Email Service (Server)', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('sendPasswordResetEmail', () => {
|
||||
it('should call sendMail with the correct recipient, subject, and link', async () => {
|
||||
const to = 'test@example.com';
|
||||
const resetLink = 'http://localhost:3000/reset/mock-token-123';
|
||||
afterEach(() => {
|
||||
vi.unstubAllEnvs();
|
||||
});
|
||||
|
||||
await sendPasswordResetEmail(to, resetLink, logger);
|
||||
describe('sendPasswordResetEmail', () => {
|
||||
it('should call sendMail with the correct recipient, subject, and constructed link', async () => {
|
||||
const to = 'test@example.com';
|
||||
const token = 'mock-token-123';
|
||||
const expectedResetUrl = `https://test.flyer.com/reset-password?token=${token}`;
|
||||
|
||||
await sendPasswordResetEmail(to, token, logger);
|
||||
|
||||
expect(mocks.sendMail).toHaveBeenCalledTimes(1);
|
||||
const mailOptions = mocks.sendMail.mock.calls[0][0] as {
|
||||
@@ -77,9 +83,8 @@ describe('Email Service (Server)', () => {
|
||||
|
||||
expect(mailOptions.to).toBe(to);
|
||||
expect(mailOptions.subject).toBe('Your Password Reset Request');
|
||||
expect(mailOptions.text).toContain(resetLink);
|
||||
// The implementation constructs the link, so we check that our mock link is present inside the href
|
||||
expect(mailOptions.html).toContain(resetLink);
|
||||
expect(mailOptions.text).toContain(expectedResetUrl);
|
||||
expect(mailOptions.html).toContain(`href="${expectedResetUrl}"`);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -269,5 +274,22 @@ describe('Email Service (Server)', () => {
|
||||
'Email job failed.',
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle non-Error objects thrown during processing', async () => {
|
||||
const job = createMockJob(mockJobData);
|
||||
const emailErrorString = 'SMTP Connection Failed as a string';
|
||||
mocks.sendMail.mockRejectedValue(emailErrorString);
|
||||
|
||||
await expect(processEmailJob(job)).rejects.toThrow(emailErrorString);
|
||||
|
||||
expect(logger.error).toHaveBeenCalledWith(
|
||||
{
|
||||
err: expect.objectContaining({ message: emailErrorString }),
|
||||
jobData: mockJobData,
|
||||
attemptsMade: 1,
|
||||
},
|
||||
'Email job failed.',
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -103,6 +103,7 @@ export class FlyerAiProcessor {
|
||||
jobData: FlyerJobData,
|
||||
logger: Logger,
|
||||
): Promise<AiProcessorResult> {
|
||||
console.error(`[WORKER DEBUG] FlyerAiProcessor: extractAndValidateData called with ${imagePaths.length} images`);
|
||||
logger.info(`Starting AI data extraction for ${imagePaths.length} pages.`);
|
||||
const { submitterIp, userProfileAddress } = jobData;
|
||||
const masterItems = await this.personalizationRepo.getAllMasterItems(logger);
|
||||
@@ -159,6 +160,7 @@ export class FlyerAiProcessor {
|
||||
}
|
||||
|
||||
logger.info(`Batch processing complete. Total items extracted: ${mergedData.items.length}`);
|
||||
console.error(`[WORKER DEBUG] FlyerAiProcessor: Merged AI Data:`, JSON.stringify(mergedData, null, 2));
|
||||
|
||||
// Validate the final merged dataset
|
||||
return this._validateAiData(mergedData, logger);
|
||||
|
||||
@@ -62,10 +62,13 @@ export class FlyerDataTransformer {
|
||||
baseUrl: string,
|
||||
logger: Logger,
|
||||
): { imageUrl: string; iconUrl: string } {
|
||||
console.error('[DEBUG] FlyerDataTransformer._buildUrls inputs:', { imageFileName, iconFileName, baseUrl });
|
||||
logger.debug({ imageFileName, iconFileName, baseUrl }, 'Building URLs');
|
||||
const finalBaseUrl = baseUrl || getBaseUrl(logger);
|
||||
console.error('[DEBUG] FlyerDataTransformer._buildUrls finalBaseUrl resolved to:', finalBaseUrl);
|
||||
const imageUrl = `${finalBaseUrl}/flyer-images/${imageFileName}`;
|
||||
const iconUrl = `${finalBaseUrl}/flyer-images/icons/${iconFileName}`;
|
||||
console.error('[DEBUG] FlyerDataTransformer._buildUrls constructed:', { imageUrl, iconUrl });
|
||||
logger.debug({ imageUrl, iconUrl }, 'Constructed URLs');
|
||||
return { imageUrl, iconUrl };
|
||||
}
|
||||
@@ -90,6 +93,7 @@ export class FlyerDataTransformer {
|
||||
logger: Logger,
|
||||
baseUrl: string,
|
||||
): Promise<{ flyerData: FlyerInsert; itemsForDb: FlyerItemInsert[] }> {
|
||||
console.error('[DEBUG] FlyerDataTransformer.transform called with baseUrl:', baseUrl);
|
||||
logger.info('Starting data transformation from AI output to database format.');
|
||||
|
||||
try {
|
||||
|
||||
@@ -11,6 +11,7 @@ import type { FlyerJobData } from '../types/job-data';
|
||||
// Mock dependencies
|
||||
vi.mock('sharp', () => {
|
||||
const mockSharpInstance = {
|
||||
resize: vi.fn().mockReturnThis(),
|
||||
jpeg: vi.fn().mockReturnThis(),
|
||||
png: vi.fn().mockReturnThis(),
|
||||
toFile: vi.fn().mockResolvedValue({}),
|
||||
@@ -55,6 +56,7 @@ describe('FlyerFileHandler', () => {
|
||||
mockFs = {
|
||||
readdir: vi.fn().mockResolvedValue([]),
|
||||
unlink: vi.fn(),
|
||||
rename: vi.fn(),
|
||||
};
|
||||
mockExec = vi.fn().mockResolvedValue({ stdout: 'success', stderr: '' });
|
||||
|
||||
@@ -182,4 +184,20 @@ describe('FlyerFileHandler', () => {
|
||||
await expect(service.prepareImageInputs('/tmp/flyer.png', job, logger)).rejects.toThrow(ImageConversionError);
|
||||
});
|
||||
});
|
||||
|
||||
describe('optimizeImages', () => {
|
||||
it('should optimize images and rename them', async () => {
|
||||
const imagePaths = [{ path: '/tmp/image1.jpg', mimetype: 'image/jpeg' }];
|
||||
const mockSharpInstance = sharp('/tmp/image1.jpg');
|
||||
vi.mocked(mockSharpInstance.toFile).mockResolvedValue({} as any);
|
||||
|
||||
await service.optimizeImages(imagePaths, logger);
|
||||
|
||||
expect(sharp).toHaveBeenCalledWith('/tmp/image1.jpg');
|
||||
expect(mockSharpInstance.resize).toHaveBeenCalledWith({ width: 2000, withoutEnlargement: true });
|
||||
expect(mockSharpInstance.jpeg).toHaveBeenCalledWith({ quality: 80, mozjpeg: true });
|
||||
expect(mockSharpInstance.toFile).toHaveBeenCalledWith('/tmp/image1.jpg.tmp');
|
||||
expect(mockFs.rename).toHaveBeenCalledWith('/tmp/image1.jpg.tmp', '/tmp/image1.jpg');
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -14,6 +14,7 @@ const CONVERTIBLE_IMAGE_EXTENSIONS = ['.gif', '.tiff', '.svg', '.bmp'];
|
||||
export interface IFileSystem {
|
||||
readdir(path: string, options: { withFileTypes: true }): Promise<Dirent[]>;
|
||||
unlink(path: string): Promise<void>;
|
||||
rename(oldPath: string, newPath: string): Promise<void>;
|
||||
}
|
||||
|
||||
export interface ICommandExecutor {
|
||||
@@ -253,7 +254,9 @@ export class FlyerFileHandler {
|
||||
job: Job<FlyerJobData>,
|
||||
logger: Logger,
|
||||
): Promise<{ imagePaths: { path: string; mimetype: string }[]; createdImagePaths: string[] }> {
|
||||
console.error(`[WORKER DEBUG] FlyerFileHandler: prepareImageInputs called for ${filePath}`);
|
||||
const fileExt = path.extname(filePath).toLowerCase();
|
||||
console.error(`[WORKER DEBUG] FlyerFileHandler: Detected extension: ${fileExt}`);
|
||||
|
||||
if (fileExt === '.pdf') {
|
||||
return this._handlePdfInput(filePath, job, logger);
|
||||
@@ -267,4 +270,33 @@ export class FlyerFileHandler {
|
||||
|
||||
return this._handleUnsupportedInput(fileExt, job.data.originalFileName, logger);
|
||||
}
|
||||
|
||||
/**
|
||||
* Optimizes images for web delivery (compression, resizing).
|
||||
* This is a distinct processing stage.
|
||||
*/
|
||||
public async optimizeImages(
|
||||
imagePaths: { path: string; mimetype: string }[],
|
||||
logger: Logger,
|
||||
): Promise<void> {
|
||||
logger.info(`Starting image optimization for ${imagePaths.length} images.`);
|
||||
|
||||
for (const image of imagePaths) {
|
||||
const tempPath = `${image.path}.tmp`;
|
||||
try {
|
||||
// Optimize: Resize to max width 2000px (preserving aspect ratio) and compress
|
||||
await sharp(image.path)
|
||||
.resize({ width: 2000, withoutEnlargement: true })
|
||||
.jpeg({ quality: 80, mozjpeg: true }) // Use mozjpeg for better compression
|
||||
.toFile(tempPath);
|
||||
|
||||
// Replace the original file with the optimized version
|
||||
await this.fs.rename(tempPath, image.path);
|
||||
} catch (error) {
|
||||
logger.error({ err: error, path: image.path }, 'Failed to optimize image.');
|
||||
throw new ImageConversionError(`Image optimization failed for ${path.basename(image.path)}.`);
|
||||
}
|
||||
}
|
||||
logger.info('Image optimization complete.');
|
||||
}
|
||||
}
|
||||
160
src/services/flyerPersistenceService.server.test.ts
Normal file
160
src/services/flyerPersistenceService.server.test.ts
Normal file
@@ -0,0 +1,160 @@
|
||||
// src/services/flyerPersistenceService.server.test.ts
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
import { FlyerPersistenceService } from './flyerPersistenceService.server';
|
||||
import { withTransaction } from './db/connection.db';
|
||||
import { createFlyerAndItems } from './db/flyer.db';
|
||||
import { AdminRepository } from './db/admin.db';
|
||||
import { GamificationRepository } from './db/gamification.db';
|
||||
import type { FlyerInsert, FlyerItemInsert, Flyer } from '../types';
|
||||
import type { Logger } from 'pino';
|
||||
import type { PoolClient } from 'pg';
|
||||
|
||||
// Mock dependencies
|
||||
vi.mock('./db/connection.db', () => ({
|
||||
withTransaction: vi.fn(),
|
||||
}));
|
||||
|
||||
vi.mock('./db/flyer.db', () => ({
|
||||
createFlyerAndItems: vi.fn(),
|
||||
}));
|
||||
|
||||
vi.mock('./db/admin.db', () => ({
|
||||
AdminRepository: vi.fn(),
|
||||
}));
|
||||
|
||||
vi.mock('./db/gamification.db', () => ({
|
||||
GamificationRepository: vi.fn(),
|
||||
}));
|
||||
|
||||
describe('FlyerPersistenceService', () => {
|
||||
let service: FlyerPersistenceService;
|
||||
let mockLogger: Logger;
|
||||
let mockClient: PoolClient;
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
service = new FlyerPersistenceService();
|
||||
|
||||
mockLogger = {
|
||||
info: vi.fn(),
|
||||
error: vi.fn(),
|
||||
warn: vi.fn(),
|
||||
debug: vi.fn(),
|
||||
child: vi.fn().mockReturnThis(),
|
||||
} as unknown as Logger;
|
||||
|
||||
mockClient = { query: vi.fn() } as unknown as PoolClient;
|
||||
|
||||
// Mock withTransaction to execute the callback immediately with a mock client
|
||||
vi.mocked(withTransaction).mockImplementation(async (callback) => {
|
||||
return callback(mockClient);
|
||||
});
|
||||
});
|
||||
|
||||
describe('saveFlyer', () => {
|
||||
const mockFlyerData = {
|
||||
file_name: 'test.jpg',
|
||||
store_name: 'Test Store',
|
||||
image_url: 'http://example.com/image.jpg',
|
||||
icon_url: 'http://example.com/icon.jpg',
|
||||
checksum: 'abc',
|
||||
status: 'processed',
|
||||
item_count: 0,
|
||||
valid_from: '2024-01-01',
|
||||
valid_to: '2024-01-07',
|
||||
store_address: '123 Test St',
|
||||
} as FlyerInsert;
|
||||
|
||||
const mockItemsForDb: FlyerItemInsert[] = [];
|
||||
|
||||
const mockCreatedFlyer = {
|
||||
flyer_id: 1,
|
||||
file_name: 'test.jpg',
|
||||
store_id: 10,
|
||||
// ... other fields
|
||||
} as Flyer;
|
||||
|
||||
const mockCreatedItems: any[] = [];
|
||||
|
||||
it('should save flyer and items, and log activity if userId is provided', async () => {
|
||||
const userId = 'user-123';
|
||||
|
||||
vi.mocked(createFlyerAndItems).mockResolvedValue({
|
||||
flyer: mockCreatedFlyer,
|
||||
items: mockCreatedItems,
|
||||
});
|
||||
|
||||
const mockLogActivity = vi.fn();
|
||||
// Mock the AdminRepository constructor to return an object with logActivity
|
||||
vi.mocked(AdminRepository).mockImplementation(function () {
|
||||
return { logActivity: mockLogActivity } as any;
|
||||
});
|
||||
|
||||
const mockAwardAchievement = vi.fn();
|
||||
vi.mocked(GamificationRepository).mockImplementation(function () {
|
||||
return { awardAchievement: mockAwardAchievement } as any;
|
||||
});
|
||||
|
||||
const result = await service.saveFlyer(mockFlyerData, mockItemsForDb, userId, mockLogger);
|
||||
|
||||
expect(withTransaction).toHaveBeenCalled();
|
||||
expect(createFlyerAndItems).toHaveBeenCalledWith(
|
||||
mockFlyerData,
|
||||
mockItemsForDb,
|
||||
mockLogger,
|
||||
mockClient
|
||||
);
|
||||
expect(mockLogger.info).toHaveBeenCalledWith(
|
||||
expect.stringContaining('Successfully processed flyer')
|
||||
);
|
||||
|
||||
// Verify AdminRepository usage
|
||||
expect(AdminRepository).toHaveBeenCalledWith(mockClient);
|
||||
expect(mockLogActivity).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
userId,
|
||||
action: 'flyer_processed',
|
||||
displayText: `Processed a new flyer for ${mockFlyerData.store_name}.`,
|
||||
details: { flyerId: mockCreatedFlyer.flyer_id, storeName: mockFlyerData.store_name },
|
||||
}),
|
||||
mockLogger
|
||||
);
|
||||
|
||||
// Verify GamificationRepository usage
|
||||
expect(GamificationRepository).toHaveBeenCalledWith(mockClient);
|
||||
expect(mockAwardAchievement).toHaveBeenCalledWith(userId, 'First-Upload', mockLogger);
|
||||
|
||||
expect(result).toEqual(mockCreatedFlyer);
|
||||
});
|
||||
|
||||
it('should save flyer and items, but NOT log activity if userId is undefined', async () => {
|
||||
const userId = undefined;
|
||||
|
||||
vi.mocked(createFlyerAndItems).mockResolvedValue({
|
||||
flyer: mockCreatedFlyer,
|
||||
items: mockCreatedItems,
|
||||
});
|
||||
|
||||
const mockLogActivity = vi.fn();
|
||||
vi.mocked(AdminRepository).mockImplementation(function () {
|
||||
return { logActivity: mockLogActivity } as any;
|
||||
});
|
||||
|
||||
const result = await service.saveFlyer(mockFlyerData, mockItemsForDb, userId, mockLogger);
|
||||
|
||||
expect(createFlyerAndItems).toHaveBeenCalled();
|
||||
expect(AdminRepository).not.toHaveBeenCalled();
|
||||
expect(mockLogActivity).not.toHaveBeenCalled();
|
||||
expect(result).toEqual(mockCreatedFlyer);
|
||||
});
|
||||
|
||||
it('should propagate errors from createFlyerAndItems', async () => {
|
||||
const error = new Error('DB Error');
|
||||
vi.mocked(createFlyerAndItems).mockRejectedValue(error);
|
||||
|
||||
await expect(
|
||||
service.saveFlyer(mockFlyerData, mockItemsForDb, 'user-1', mockLogger)
|
||||
).rejects.toThrow(error);
|
||||
});
|
||||
});
|
||||
});
|
||||
47
src/services/flyerPersistenceService.server.ts
Normal file
47
src/services/flyerPersistenceService.server.ts
Normal file
@@ -0,0 +1,47 @@
|
||||
// src/services/flyerPersistenceService.server.ts
|
||||
import type { Logger } from 'pino';
|
||||
import { withTransaction } from './db/connection.db';
|
||||
import { createFlyerAndItems } from './db/flyer.db';
|
||||
import { AdminRepository } from './db/admin.db';
|
||||
import { GamificationRepository } from './db/gamification.db';
|
||||
import type { FlyerInsert, FlyerItemInsert, Flyer } from '../types';
|
||||
|
||||
export class FlyerPersistenceService {
|
||||
/**
|
||||
* Saves the flyer and its items to the database within a transaction.
|
||||
* Also logs the activity.
|
||||
*/
|
||||
async saveFlyer(
|
||||
flyerData: FlyerInsert,
|
||||
itemsForDb: FlyerItemInsert[],
|
||||
userId: string | undefined,
|
||||
logger: Logger,
|
||||
): Promise<Flyer> {
|
||||
return withTransaction(async (client) => {
|
||||
const { flyer, items } = await createFlyerAndItems(flyerData, itemsForDb, logger, client);
|
||||
|
||||
logger.info(
|
||||
`Successfully processed flyer: ${flyer.file_name} (ID: ${flyer.flyer_id}) with ${items.length} items.`,
|
||||
);
|
||||
|
||||
// Log activity if a user uploaded it
|
||||
if (userId) {
|
||||
const transactionalAdminRepo = new AdminRepository(client);
|
||||
await transactionalAdminRepo.logActivity(
|
||||
{
|
||||
userId: userId,
|
||||
action: 'flyer_processed',
|
||||
displayText: `Processed a new flyer for ${flyerData.store_name}.`,
|
||||
details: { flyerId: flyer.flyer_id, storeName: flyerData.store_name },
|
||||
},
|
||||
logger,
|
||||
);
|
||||
|
||||
// Award 'First-Upload' achievement
|
||||
const gamificationRepo = new GamificationRepository(client);
|
||||
await gamificationRepo.awardAchievement(userId, 'First-Upload', logger);
|
||||
}
|
||||
return flyer;
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -8,6 +8,7 @@ import type { CleanupJobData, FlyerJobData } from '../types/job-data';
|
||||
// 1. Create hoisted mocks FIRST
|
||||
const mocks = vi.hoisted(() => ({
|
||||
unlink: vi.fn(),
|
||||
rename: vi.fn(),
|
||||
readdir: vi.fn(),
|
||||
execAsync: vi.fn(),
|
||||
mockAdminLogActivity: vi.fn(),
|
||||
@@ -22,13 +23,13 @@ vi.mock('node:fs/promises', async (importOriginal) => {
|
||||
default: actual, // Ensure default export exists
|
||||
unlink: mocks.unlink,
|
||||
readdir: mocks.readdir,
|
||||
rename: mocks.rename,
|
||||
};
|
||||
});
|
||||
|
||||
// Import service and dependencies (FlyerJobData already imported from types above)
|
||||
import { FlyerProcessingService } from './flyerProcessingService.server';
|
||||
import * as db from './db/index.db';
|
||||
import { createFlyerAndItems } from './db/flyer.db';
|
||||
import { createMockFlyer } from '../tests/utils/mockFactories';
|
||||
import { FlyerDataTransformer } from './flyerDataTransformer';
|
||||
import {
|
||||
@@ -44,6 +45,7 @@ import { FlyerAiProcessor } from './flyerAiProcessor.server';
|
||||
import type { IFileSystem, ICommandExecutor } from './flyerFileHandler.server';
|
||||
import { generateFlyerIcon } from '../utils/imageProcessor';
|
||||
import type { AIService } from './aiService.server';
|
||||
import { FlyerPersistenceService } from './flyerPersistenceService.server';
|
||||
|
||||
// Mock image processor functions
|
||||
vi.mock('../utils/imageProcessor', () => ({
|
||||
@@ -56,9 +58,6 @@ vi.mock('./aiService.server', () => ({
|
||||
extractCoreDataFromFlyerImage: vi.fn(),
|
||||
},
|
||||
}));
|
||||
vi.mock('./db/flyer.db', () => ({
|
||||
createFlyerAndItems: vi.fn(),
|
||||
}));
|
||||
vi.mock('./db/index.db', () => ({
|
||||
personalizationRepo: { getAllMasterItems: vi.fn() },
|
||||
adminRepo: { logActivity: vi.fn() },
|
||||
@@ -81,6 +80,7 @@ vi.mock('./logger.server', () => ({
|
||||
}));
|
||||
vi.mock('./flyerFileHandler.server');
|
||||
vi.mock('./flyerAiProcessor.server');
|
||||
vi.mock('./flyerPersistenceService.server');
|
||||
|
||||
const mockedDb = db as Mocked<typeof db>;
|
||||
|
||||
@@ -88,6 +88,7 @@ describe('FlyerProcessingService', () => {
|
||||
let service: FlyerProcessingService;
|
||||
let mockFileHandler: Mocked<FlyerFileHandler>;
|
||||
let mockAiProcessor: Mocked<FlyerAiProcessor>;
|
||||
let mockPersistenceService: Mocked<FlyerPersistenceService>;
|
||||
const mockCleanupQueue = {
|
||||
add: vi.fn(),
|
||||
};
|
||||
@@ -123,6 +124,7 @@ describe('FlyerProcessingService', () => {
|
||||
const mockFs: IFileSystem = {
|
||||
readdir: mocks.readdir,
|
||||
unlink: mocks.unlink,
|
||||
rename: mocks.rename,
|
||||
};
|
||||
|
||||
mockFileHandler = new FlyerFileHandler(mockFs, vi.fn()) as Mocked<FlyerFileHandler>;
|
||||
@@ -130,6 +132,7 @@ describe('FlyerProcessingService', () => {
|
||||
{} as AIService,
|
||||
mockedDb.personalizationRepo,
|
||||
) as Mocked<FlyerAiProcessor>;
|
||||
mockPersistenceService = new FlyerPersistenceService() as Mocked<FlyerPersistenceService>;
|
||||
|
||||
// Instantiate the service with all its dependencies mocked
|
||||
service = new FlyerProcessingService(
|
||||
@@ -138,6 +141,7 @@ describe('FlyerProcessingService', () => {
|
||||
mockFs,
|
||||
mockCleanupQueue,
|
||||
new FlyerDataTransformer(),
|
||||
mockPersistenceService,
|
||||
);
|
||||
|
||||
// Provide default successful mock implementations for dependencies
|
||||
@@ -165,15 +169,12 @@ describe('FlyerProcessingService', () => {
|
||||
createdImagePaths: [],
|
||||
});
|
||||
|
||||
vi.mocked(createFlyerAndItems).mockResolvedValue({
|
||||
flyer: createMockFlyer({
|
||||
flyer_id: 1,
|
||||
file_name: 'test.jpg',
|
||||
image_url: 'https://example.com/test.jpg',
|
||||
item_count: 1,
|
||||
}),
|
||||
items: [],
|
||||
});
|
||||
mockPersistenceService.saveFlyer.mockResolvedValue(createMockFlyer({
|
||||
flyer_id: 1,
|
||||
file_name: 'test.jpg',
|
||||
image_url: 'https://example.com/test.jpg',
|
||||
item_count: 1,
|
||||
}));
|
||||
vi.mocked(mockedDb.adminRepo.logActivity).mockResolvedValue();
|
||||
// FIX: Provide a default mock for getAllMasterItems to prevent a TypeError on `.length`.
|
||||
vi.mocked(mockedDb.personalizationRepo.getAllMasterItems).mockResolvedValue([]);
|
||||
@@ -226,13 +227,16 @@ describe('FlyerProcessingService', () => {
|
||||
// 1. File handler was called
|
||||
expect(mockFileHandler.prepareImageInputs).toHaveBeenCalledWith(job.data.filePath, job, expect.any(Object));
|
||||
|
||||
// 2. AI processor was called
|
||||
// 2. Optimization was called
|
||||
expect(mockFileHandler.optimizeImages).toHaveBeenCalledWith(expect.any(Array), expect.any(Object));
|
||||
|
||||
// 3. AI processor was called
|
||||
expect(mockAiProcessor.extractAndValidateData).toHaveBeenCalledTimes(1);
|
||||
|
||||
// 3. Icon was generated from the processed image
|
||||
// 4. Icon was generated from the processed image
|
||||
expect(generateFlyerIcon).toHaveBeenCalledWith('/tmp/flyer-processed.jpeg', '/tmp/icons', expect.any(Object));
|
||||
|
||||
// 4. Transformer was called with the correct filenames
|
||||
// 5. Transformer was called with the correct filenames
|
||||
expect(FlyerDataTransformer.prototype.transform).toHaveBeenCalledWith(
|
||||
expect.any(Object), // aiResult
|
||||
'flyer.jpg', // originalFileName
|
||||
@@ -244,12 +248,15 @@ describe('FlyerProcessingService', () => {
|
||||
'https://example.com', // baseUrl
|
||||
);
|
||||
|
||||
// 5. DB transaction was initiated
|
||||
expect(mockedDb.withTransaction).toHaveBeenCalledTimes(1);
|
||||
expect(createFlyerAndItems).toHaveBeenCalledTimes(1);
|
||||
expect(mocks.mockAdminLogActivity).toHaveBeenCalledTimes(1);
|
||||
// 6. Persistence service was called
|
||||
expect(mockPersistenceService.saveFlyer).toHaveBeenCalledWith(
|
||||
expect.any(Object), // flyerData
|
||||
[], // itemsForDb
|
||||
undefined, // userId
|
||||
expect.any(Object), // logger
|
||||
);
|
||||
|
||||
// 6. Cleanup job was enqueued with all generated files
|
||||
// 7. Cleanup job was enqueued with all generated files
|
||||
expect(mockCleanupQueue.add).toHaveBeenCalledWith(
|
||||
'cleanup-flyer-files',
|
||||
{
|
||||
@@ -281,10 +288,8 @@ describe('FlyerProcessingService', () => {
|
||||
await service.processJob(job);
|
||||
|
||||
// Verify transaction and inner calls
|
||||
expect(mockedDb.withTransaction).toHaveBeenCalledTimes(1);
|
||||
expect(mockFileHandler.prepareImageInputs).toHaveBeenCalledWith('/tmp/flyer.pdf', job, expect.any(Object));
|
||||
expect(mockAiProcessor.extractAndValidateData).toHaveBeenCalledTimes(1);
|
||||
expect(createFlyerAndItems).toHaveBeenCalledTimes(1);
|
||||
// Verify icon generation was called for the first page
|
||||
expect(generateFlyerIcon).toHaveBeenCalledWith('/tmp/flyer-1.jpg', '/tmp/icons', expect.any(Object));
|
||||
// Verify cleanup job includes original PDF and all generated/processed images
|
||||
@@ -316,11 +321,12 @@ describe('FlyerProcessingService', () => {
|
||||
message: 'AI model exploded',
|
||||
stages: [
|
||||
{ name: 'Preparing Inputs', status: 'completed', critical: true, detail: '1 page(s) ready for AI.' },
|
||||
{ name: 'Image Optimization', status: 'completed', critical: true, detail: 'Compressing and resizing images...' },
|
||||
{ name: 'Extracting Data with AI', status: 'failed', critical: true, detail: 'AI model exploded' },
|
||||
{ name: 'Transforming AI Data', status: 'skipped', critical: true },
|
||||
{ name: 'Saving to Database', status: 'skipped', critical: true },
|
||||
],
|
||||
}); // This was a duplicate, fixed.
|
||||
});
|
||||
expect(mockCleanupQueue.add).not.toHaveBeenCalled();
|
||||
expect(logger.warn).toHaveBeenCalledWith(
|
||||
'Job failed. Temporary files will NOT be cleaned up to allow for manual inspection.',
|
||||
@@ -341,6 +347,7 @@ describe('FlyerProcessingService', () => {
|
||||
message: 'An AI quota has been exceeded. Please try again later.',
|
||||
stages: [
|
||||
{ name: 'Preparing Inputs', status: 'completed', critical: true, detail: '1 page(s) ready for AI.' },
|
||||
{ name: 'Image Optimization', status: 'completed', critical: true, detail: 'Compressing and resizing images...' },
|
||||
{ name: 'Extracting Data with AI', status: 'failed', critical: true, detail: 'AI model quota exceeded' },
|
||||
{ name: 'Transforming AI Data', status: 'skipped', critical: true },
|
||||
{ name: 'Saving to Database', status: 'skipped', critical: true },
|
||||
@@ -368,6 +375,7 @@ describe('FlyerProcessingService', () => {
|
||||
stderr: 'pdftocairo error',
|
||||
stages: [
|
||||
{ name: 'Preparing Inputs', status: 'failed', critical: true, detail: 'The uploaded PDF could not be processed. It might be blank, corrupt, or password-protected.' },
|
||||
{ name: 'Image Optimization', status: 'skipped', critical: true },
|
||||
{ name: 'Extracting Data with AI', status: 'skipped', critical: true },
|
||||
{ name: 'Transforming AI Data', status: 'skipped', critical: true },
|
||||
{ name: 'Saving to Database', status: 'skipped', critical: true },
|
||||
@@ -409,6 +417,7 @@ describe('FlyerProcessingService', () => {
|
||||
rawData: {},
|
||||
stages: [
|
||||
{ name: 'Preparing Inputs', status: 'completed', critical: true, detail: '1 page(s) ready for AI.' },
|
||||
{ name: 'Image Optimization', status: 'completed', critical: true, detail: 'Compressing and resizing images...' },
|
||||
{ name: 'Extracting Data with AI', status: 'failed', critical: true, detail: "The AI couldn't read the flyer's format. Please try a clearer image or a different flyer." },
|
||||
{ name: 'Transforming AI Data', status: 'skipped', critical: true },
|
||||
{ name: 'Saving to Database', status: 'skipped', critical: true },
|
||||
@@ -434,7 +443,6 @@ describe('FlyerProcessingService', () => {
|
||||
await service.processJob(job);
|
||||
|
||||
// Verify transaction and inner calls
|
||||
expect(mockedDb.withTransaction).toHaveBeenCalledTimes(1);
|
||||
expect(mockFileHandler.prepareImageInputs).toHaveBeenCalledWith('/tmp/flyer.gif', job, expect.any(Object));
|
||||
expect(mockAiProcessor.extractAndValidateData).toHaveBeenCalledTimes(1);
|
||||
// Verify icon generation was called for the converted image
|
||||
@@ -458,9 +466,7 @@ describe('FlyerProcessingService', () => {
|
||||
const { logger } = await import('./logger.server');
|
||||
const dbError = new Error('Database transaction failed');
|
||||
|
||||
// To test the DB failure, we make the transaction itself fail when called.
|
||||
// This is more realistic than mocking the inner function `createFlyerAndItems`.
|
||||
vi.mocked(mockedDb.withTransaction).mockRejectedValue(dbError);
|
||||
mockPersistenceService.saveFlyer.mockRejectedValue(new DatabaseError('Database transaction failed'));
|
||||
|
||||
// The service wraps the generic DB error in a DatabaseError.
|
||||
await expect(service.processJob(job)).rejects.toThrow(DatabaseError);
|
||||
@@ -471,6 +477,7 @@ describe('FlyerProcessingService', () => {
|
||||
message: 'A database operation failed. Please try again later.',
|
||||
stages: [
|
||||
{ name: 'Preparing Inputs', status: 'completed', critical: true, detail: '1 page(s) ready for AI.' },
|
||||
{ name: 'Image Optimization', status: 'completed', critical: true, detail: 'Compressing and resizing images...' },
|
||||
{ name: 'Extracting Data with AI', status: 'completed', critical: true, detail: 'Communicating with AI model...' },
|
||||
{ name: 'Transforming AI Data', status: 'completed', critical: true },
|
||||
{ name: 'Saving to Database', status: 'failed', critical: true, detail: 'A database operation failed. Please try again later.' },
|
||||
|
||||
@@ -5,7 +5,6 @@ import type { Logger } from 'pino';
|
||||
import type { FlyerFileHandler, IFileSystem, ICommandExecutor } from './flyerFileHandler.server';
|
||||
import type { FlyerAiProcessor } from './flyerAiProcessor.server';
|
||||
import * as db from './db/index.db';
|
||||
import { AdminRepository } from './db/admin.db';
|
||||
import { FlyerDataTransformer } from './flyerDataTransformer';
|
||||
import type { FlyerJobData, CleanupJobData } from '../types/job-data';
|
||||
import {
|
||||
@@ -13,12 +12,11 @@ import {
|
||||
PdfConversionError,
|
||||
AiDataValidationError,
|
||||
UnsupportedFileTypeError,
|
||||
DatabaseError, // This is from processingErrors
|
||||
} from './processingErrors';
|
||||
import { NotFoundError } from './db/errors.db';
|
||||
import { createFlyerAndItems } from './db/flyer.db';
|
||||
import { logger as globalLogger } from './logger.server'; // This was a duplicate, fixed.
|
||||
import { generateFlyerIcon } from '../utils/imageProcessor';
|
||||
import type { FlyerPersistenceService } from './flyerPersistenceService.server';
|
||||
|
||||
// Define ProcessingStage locally as it's not exported from the types file.
|
||||
export type ProcessingStage = {
|
||||
@@ -43,6 +41,7 @@ export class FlyerProcessingService {
|
||||
// This decouples the service from the full BullMQ Queue implementation, making it more modular and easier to test.
|
||||
private cleanupQueue: Pick<Queue<CleanupJobData>, 'add'>,
|
||||
private transformer: FlyerDataTransformer,
|
||||
private persistenceService: FlyerPersistenceService,
|
||||
) {}
|
||||
|
||||
/**
|
||||
@@ -57,6 +56,7 @@ export class FlyerProcessingService {
|
||||
|
||||
const stages: ProcessingStage[] = [
|
||||
{ name: 'Preparing Inputs', status: 'pending', critical: true, detail: 'Validating and preparing file...' },
|
||||
{ name: 'Image Optimization', status: 'pending', critical: true, detail: 'Compressing and resizing images...' },
|
||||
{ name: 'Extracting Data with AI', status: 'pending', critical: true, detail: 'Communicating with AI model...' },
|
||||
{ name: 'Transforming AI Data', status: 'pending', critical: true },
|
||||
{ name: 'Saving to Database', status: 'pending', critical: true },
|
||||
@@ -69,6 +69,7 @@ export class FlyerProcessingService {
|
||||
// Stage 1: Prepare Inputs (e.g., convert PDF to images)
|
||||
stages[0].status = 'in-progress';
|
||||
await job.updateProgress({ stages });
|
||||
console.error(`[WORKER DEBUG] ProcessingService: Calling fileHandler.prepareImageInputs for ${job.data.filePath}`);
|
||||
|
||||
const { imagePaths, createdImagePaths } = await this.fileHandler.prepareImageInputs(
|
||||
job.data.filePath,
|
||||
@@ -76,33 +77,52 @@ export class FlyerProcessingService {
|
||||
logger,
|
||||
);
|
||||
allFilePaths.push(...createdImagePaths);
|
||||
console.error(`[WORKER DEBUG] ProcessingService: fileHandler returned ${imagePaths.length} images.`);
|
||||
stages[0].status = 'completed';
|
||||
stages[0].detail = `${imagePaths.length} page(s) ready for AI.`;
|
||||
await job.updateProgress({ stages });
|
||||
|
||||
// Stage 2: Extract Data with AI
|
||||
// Stage 2: Image Optimization
|
||||
stages[1].status = 'in-progress';
|
||||
await job.updateProgress({ stages });
|
||||
|
||||
const aiResult = await this.aiProcessor.extractAndValidateData(imagePaths, job.data, logger);
|
||||
await this.fileHandler.optimizeImages(imagePaths, logger);
|
||||
stages[1].status = 'completed';
|
||||
await job.updateProgress({ stages });
|
||||
|
||||
// Stage 3: Transform AI Data into DB format
|
||||
// Stage 3: Extract Data with AI
|
||||
stages[2].status = 'in-progress';
|
||||
await job.updateProgress({ stages });
|
||||
|
||||
console.error(`[WORKER DEBUG] ProcessingService: Calling aiProcessor.extractAndValidateData`);
|
||||
const aiResult = await this.aiProcessor.extractAndValidateData(imagePaths, job.data, logger);
|
||||
console.error(`[WORKER DEBUG] ProcessingService: aiProcessor returned data for store: ${aiResult.data.store_name}`);
|
||||
stages[2].status = 'completed';
|
||||
await job.updateProgress({ stages });
|
||||
|
||||
// Stage 4: Transform AI Data into DB format
|
||||
stages[3].status = 'in-progress';
|
||||
await job.updateProgress({ stages });
|
||||
|
||||
// The fileHandler has already prepared the primary image (e.g., by stripping EXIF data).
|
||||
// We now generate an icon from it and prepare the filenames for the transformer.
|
||||
const primaryImagePath = imagePaths[0].path;
|
||||
const imageFileName = path.basename(primaryImagePath);
|
||||
const iconsDir = path.join(path.dirname(primaryImagePath), 'icons');
|
||||
console.error(`[WORKER DEBUG] ProcessingService: Generating icon from ${primaryImagePath} to ${iconsDir}`);
|
||||
const iconFileName = await generateFlyerIcon(primaryImagePath, iconsDir, logger);
|
||||
console.error(`[WORKER DEBUG] ProcessingService: Icon generated: ${iconFileName}`);
|
||||
|
||||
// Add the newly generated icon to the list of files to be cleaned up.
|
||||
// The main processed image path is already in `allFilePaths` via `createdImagePaths`.
|
||||
allFilePaths.push(path.join(iconsDir, iconFileName));
|
||||
|
||||
// Ensure we have a valid base URL, preferring the one from the job data.
|
||||
// This is critical for workers where process.env.FRONTEND_URL might be undefined.
|
||||
const baseUrl = job.data.baseUrl || process.env.FRONTEND_URL || 'http://localhost:3000';
|
||||
console.error(`[DEBUG] FlyerProcessingService resolved baseUrl: "${baseUrl}" (job.data.baseUrl: "${job.data.baseUrl}", env.FRONTEND_URL: "${process.env.FRONTEND_URL}")`);
|
||||
console.error('[DEBUG] FlyerProcessingService calling transformer with:', { originalFileName: job.data.originalFileName, imageFileName, iconFileName, checksum: job.data.checksum, baseUrl });
|
||||
|
||||
const { flyerData, itemsForDb } = await this.transformer.transform(
|
||||
aiResult,
|
||||
job.data.originalFileName,
|
||||
@@ -111,43 +131,33 @@ export class FlyerProcessingService {
|
||||
job.data.checksum,
|
||||
job.data.userId,
|
||||
logger,
|
||||
job.data.baseUrl,
|
||||
baseUrl,
|
||||
);
|
||||
stages[2].status = 'completed';
|
||||
console.error('[DEBUG] FlyerProcessingService transformer output URLs:', { imageUrl: flyerData.image_url, iconUrl: flyerData.icon_url });
|
||||
console.error('[DEBUG] Full Flyer Data to be saved:', JSON.stringify(flyerData, null, 2));
|
||||
stages[3].status = 'completed';
|
||||
await job.updateProgress({ stages });
|
||||
|
||||
// Stage 4: Save to Database
|
||||
stages[3].status = 'in-progress';
|
||||
// Stage 5: Save to Database
|
||||
stages[4].status = 'in-progress';
|
||||
await job.updateProgress({ stages });
|
||||
|
||||
let flyerId: number;
|
||||
try {
|
||||
const { flyer } = await db.withTransaction(async (client) => {
|
||||
// This assumes createFlyerAndItems is refactored to accept a transactional client.
|
||||
const { flyer: newFlyer } = await createFlyerAndItems(flyerData, itemsForDb, logger, client);
|
||||
|
||||
// Instantiate a new AdminRepository with the transactional client to ensure
|
||||
// the activity log is part of the same transaction.
|
||||
const transactionalAdminRepo = new AdminRepository(client);
|
||||
await transactionalAdminRepo.logActivity(
|
||||
{
|
||||
action: 'flyer_processed',
|
||||
displayText: `Processed flyer for ${flyerData.store_name}`,
|
||||
details: { flyer_id: newFlyer.flyer_id, store_name: flyerData.store_name },
|
||||
userId: job.data.userId,
|
||||
},
|
||||
logger,
|
||||
);
|
||||
|
||||
return { flyer: newFlyer };
|
||||
});
|
||||
const flyer = await this.persistenceService.saveFlyer(
|
||||
flyerData,
|
||||
itemsForDb,
|
||||
job.data.userId,
|
||||
logger,
|
||||
);
|
||||
flyerId = flyer.flyer_id;
|
||||
} catch (error) {
|
||||
if (error instanceof FlyerProcessingError) throw error;
|
||||
throw new DatabaseError(error instanceof Error ? error.message : String(error));
|
||||
// Errors are already normalized by the persistence service or are critical.
|
||||
// We re-throw to trigger the catch block below which handles reporting.
|
||||
throw error;
|
||||
}
|
||||
|
||||
stages[3].status = 'completed';
|
||||
stages[4].status = 'completed';
|
||||
await job.updateProgress({ stages });
|
||||
|
||||
// Enqueue a job to clean up the original and any generated files.
|
||||
@@ -274,6 +284,7 @@ export class FlyerProcessingService {
|
||||
const errorCodeToStageMap = new Map<string, string>([
|
||||
['PDF_CONVERSION_FAILED', 'Preparing Inputs'],
|
||||
['UNSUPPORTED_FILE_TYPE', 'Preparing Inputs'],
|
||||
['IMAGE_CONVERSION_FAILED', 'Image Optimization'],
|
||||
['AI_VALIDATION_FAILED', 'Extracting Data with AI'],
|
||||
['TRANSFORMATION_FAILED', 'Transforming AI Data'],
|
||||
['DATABASE_ERROR', 'Saving to Database'],
|
||||
|
||||
@@ -6,6 +6,9 @@ import {
|
||||
AiDataValidationError,
|
||||
GeocodingFailedError,
|
||||
UnsupportedFileTypeError,
|
||||
TransformationError,
|
||||
DatabaseError,
|
||||
ImageConversionError,
|
||||
} from './processingErrors';
|
||||
|
||||
describe('Processing Errors', () => {
|
||||
@@ -18,6 +21,30 @@ describe('Processing Errors', () => {
|
||||
expect(error).toBeInstanceOf(FlyerProcessingError);
|
||||
expect(error.message).toBe(message);
|
||||
expect(error.name).toBe('FlyerProcessingError');
|
||||
expect(error.errorCode).toBe('UNKNOWN_ERROR');
|
||||
expect(error.userMessage).toBe(message);
|
||||
});
|
||||
|
||||
it('should allow setting a custom errorCode and userMessage', () => {
|
||||
const message = 'Internal error';
|
||||
const errorCode = 'CUSTOM_ERROR';
|
||||
const userMessage = 'Something went wrong for you.';
|
||||
const error = new FlyerProcessingError(message, errorCode, userMessage);
|
||||
|
||||
expect(error.errorCode).toBe(errorCode);
|
||||
expect(error.userMessage).toBe(userMessage);
|
||||
});
|
||||
|
||||
it('should return the correct error payload', () => {
|
||||
const message = 'Internal error';
|
||||
const errorCode = 'CUSTOM_ERROR';
|
||||
const userMessage = 'Something went wrong for you.';
|
||||
const error = new FlyerProcessingError(message, errorCode, userMessage);
|
||||
|
||||
expect(error.toErrorPayload()).toEqual({
|
||||
errorCode,
|
||||
message: userMessage,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -32,6 +59,7 @@ describe('Processing Errors', () => {
|
||||
expect(error.message).toBe(message);
|
||||
expect(error.name).toBe('PdfConversionError');
|
||||
expect(error.stderr).toBeUndefined();
|
||||
expect(error.errorCode).toBe('PDF_CONVERSION_FAILED');
|
||||
});
|
||||
|
||||
it('should store the stderr property if provided', () => {
|
||||
@@ -42,6 +70,16 @@ describe('Processing Errors', () => {
|
||||
expect(error.message).toBe(message);
|
||||
expect(error.stderr).toBe(stderr);
|
||||
});
|
||||
|
||||
it('should include stderr in the error payload', () => {
|
||||
const message = 'pdftocairo failed.';
|
||||
const stderr = 'pdftocairo: command not found';
|
||||
const error = new PdfConversionError(message, stderr);
|
||||
|
||||
const payload = error.toErrorPayload();
|
||||
expect(payload.errorCode).toBe('PDF_CONVERSION_FAILED');
|
||||
expect(payload.stderr).toBe(stderr);
|
||||
});
|
||||
});
|
||||
|
||||
describe('AiDataValidationError', () => {
|
||||
@@ -58,6 +96,58 @@ describe('Processing Errors', () => {
|
||||
expect(error.name).toBe('AiDataValidationError');
|
||||
expect(error.validationErrors).toEqual(validationErrors);
|
||||
expect(error.rawData).toEqual(rawData);
|
||||
expect(error.errorCode).toBe('AI_VALIDATION_FAILED');
|
||||
});
|
||||
|
||||
it('should include validationErrors and rawData in the error payload', () => {
|
||||
const message = 'AI response validation failed.';
|
||||
const validationErrors = { fieldErrors: { store_name: ['Store name cannot be empty'] } };
|
||||
const rawData = { store_name: '', items: [] };
|
||||
const error = new AiDataValidationError(message, validationErrors, rawData);
|
||||
|
||||
const payload = error.toErrorPayload();
|
||||
expect(payload.errorCode).toBe('AI_VALIDATION_FAILED');
|
||||
expect(payload.validationErrors).toEqual(validationErrors);
|
||||
expect(payload.rawData).toEqual(rawData);
|
||||
});
|
||||
});
|
||||
|
||||
describe('TransformationError', () => {
|
||||
it('should create an error with the correct message and code', () => {
|
||||
const message = 'Transformation failed.';
|
||||
const error = new TransformationError(message);
|
||||
|
||||
expect(error).toBeInstanceOf(FlyerProcessingError);
|
||||
expect(error).toBeInstanceOf(TransformationError);
|
||||
expect(error.message).toBe(message);
|
||||
expect(error.errorCode).toBe('TRANSFORMATION_FAILED');
|
||||
expect(error.userMessage).toBe('There was a problem transforming the flyer data. Please check the input.');
|
||||
});
|
||||
});
|
||||
|
||||
describe('DatabaseError', () => {
|
||||
it('should create an error with the correct message and code', () => {
|
||||
const message = 'DB failed.';
|
||||
const error = new DatabaseError(message);
|
||||
|
||||
expect(error).toBeInstanceOf(FlyerProcessingError);
|
||||
expect(error).toBeInstanceOf(DatabaseError);
|
||||
expect(error.message).toBe(message);
|
||||
expect(error.errorCode).toBe('DATABASE_ERROR');
|
||||
expect(error.userMessage).toBe('A database operation failed. Please try again later.');
|
||||
});
|
||||
});
|
||||
|
||||
describe('ImageConversionError', () => {
|
||||
it('should create an error with the correct message and code', () => {
|
||||
const message = 'Image conversion failed.';
|
||||
const error = new ImageConversionError(message);
|
||||
|
||||
expect(error).toBeInstanceOf(FlyerProcessingError);
|
||||
expect(error).toBeInstanceOf(ImageConversionError);
|
||||
expect(error.message).toBe(message);
|
||||
expect(error.errorCode).toBe('IMAGE_CONVERSION_FAILED');
|
||||
expect(error.userMessage).toBe('The uploaded image could not be processed. It might be corrupt or in an unsupported format.');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -71,6 +161,7 @@ describe('Processing Errors', () => {
|
||||
expect(error).toBeInstanceOf(GeocodingFailedError);
|
||||
expect(error.message).toBe(message);
|
||||
expect(error.name).toBe('GeocodingFailedError');
|
||||
expect(error.errorCode).toBe('GEOCODING_FAILED');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -84,6 +175,7 @@ describe('Processing Errors', () => {
|
||||
expect(error).toBeInstanceOf(UnsupportedFileTypeError);
|
||||
expect(error.message).toBe(message);
|
||||
expect(error.name).toBe('UnsupportedFileTypeError');
|
||||
expect(error.errorCode).toBe('UNSUPPORTED_FILE_TYPE');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -251,6 +251,19 @@ describe('Worker Service Lifecycle', () => {
|
||||
expect(processExitSpy).toHaveBeenCalledWith(1);
|
||||
});
|
||||
|
||||
it('should log an error if Redis connection fails to close', async () => {
|
||||
const quitError = new Error('Redis quit failed');
|
||||
mockRedisConnection.quit.mockRejectedValueOnce(quitError);
|
||||
|
||||
await gracefulShutdown('SIGTERM');
|
||||
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
{ err: quitError, resource: 'redisConnection' },
|
||||
'[Shutdown] Error closing Redis connection.',
|
||||
);
|
||||
expect(processExitSpy).toHaveBeenCalledWith(1);
|
||||
});
|
||||
|
||||
it('should timeout if shutdown takes too long', async () => {
|
||||
vi.useFakeTimers();
|
||||
// Make one of the close calls hang indefinitely
|
||||
|
||||
@@ -260,6 +260,33 @@ describe('UserService', () => {
|
||||
|
||||
vi.unstubAllEnvs();
|
||||
});
|
||||
|
||||
it('should re-throw NotFoundError if user profile does not exist', async () => {
|
||||
const { logger } = await import('./logger.server');
|
||||
const userId = 'user-not-found';
|
||||
const file = { filename: 'avatar.jpg' } as Express.Multer.File;
|
||||
const notFoundError = new NotFoundError('User not found');
|
||||
|
||||
mocks.mockUpdateUserProfile.mockRejectedValue(notFoundError);
|
||||
|
||||
await expect(userService.updateUserAvatar(userId, file, logger)).rejects.toThrow(
|
||||
NotFoundError,
|
||||
);
|
||||
});
|
||||
|
||||
it('should wrap generic errors in a DatabaseError', async () => {
|
||||
const { logger } = await import('./logger.server');
|
||||
const userId = 'user-123';
|
||||
const file = { filename: 'avatar.jpg' } as Express.Multer.File;
|
||||
const genericError = new Error('DB connection failed');
|
||||
|
||||
mocks.mockUpdateUserProfile.mockRejectedValue(genericError);
|
||||
|
||||
await expect(userService.updateUserAvatar(userId, file, logger)).rejects.toThrow(
|
||||
DatabaseError,
|
||||
);
|
||||
expect(logger.error).toHaveBeenCalledWith(expect.any(Object), `Failed to update user avatar: ${genericError.message}`);
|
||||
});
|
||||
});
|
||||
|
||||
describe('updateUserPassword', () => {
|
||||
@@ -276,6 +303,19 @@ describe('UserService', () => {
|
||||
expect(bcrypt.hash).toHaveBeenCalledWith(newPassword, 10);
|
||||
expect(mocks.mockUpdateUserPassword).toHaveBeenCalledWith(userId, hashedPassword, logger);
|
||||
});
|
||||
|
||||
it('should wrap generic errors in a DatabaseError', async () => {
|
||||
const { logger } = await import('./logger.server');
|
||||
const userId = 'user-123';
|
||||
const newPassword = 'new-password';
|
||||
const genericError = new Error('DB write failed');
|
||||
|
||||
vi.mocked(bcrypt.hash).mockResolvedValue();
|
||||
mocks.mockUpdateUserPassword.mockRejectedValue(genericError);
|
||||
|
||||
await expect(userService.updateUserPassword(userId, newPassword, logger)).rejects.toThrow(DatabaseError);
|
||||
expect(logger.error).toHaveBeenCalledWith(expect.any(Object), `Failed to update user password: ${genericError.message}`);
|
||||
});
|
||||
});
|
||||
|
||||
describe('deleteUserAccount', () => {
|
||||
@@ -318,6 +358,22 @@ describe('UserService', () => {
|
||||
).rejects.toThrow(ValidationError);
|
||||
expect(mocks.mockDeleteUserById).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should wrap generic errors in a DatabaseError', async () => {
|
||||
const { logger } = await import('./logger.server');
|
||||
const userId = 'user-123';
|
||||
const password = 'password';
|
||||
const genericError = new Error('Something went wrong');
|
||||
|
||||
mocks.mockFindUserWithPasswordHashById.mockResolvedValue({
|
||||
user_id: userId,
|
||||
password_hash: 'hashed-password',
|
||||
});
|
||||
vi.mocked(bcrypt.compare).mockRejectedValue(genericError);
|
||||
|
||||
await expect(userService.deleteUserAccount(userId, password, logger)).rejects.toThrow(DatabaseError);
|
||||
expect(logger.error).toHaveBeenCalledWith(expect.any(Object), `Failed to delete user account: ${genericError.message}`);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getUserAddress', () => {
|
||||
@@ -365,5 +421,17 @@ describe('UserService', () => {
|
||||
);
|
||||
expect(mocks.mockDeleteUserById).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should wrap generic errors in a DatabaseError', async () => {
|
||||
const { logger } = await import('./logger.server');
|
||||
const deleterId = 'admin-1';
|
||||
const targetId = 'user-2';
|
||||
const genericError = new Error('DB write failed');
|
||||
|
||||
mocks.mockDeleteUserById.mockRejectedValue(genericError);
|
||||
|
||||
await expect(userService.deleteUserAsAdmin(deleterId, targetId, logger)).rejects.toThrow(DatabaseError);
|
||||
expect(logger.error).toHaveBeenCalledWith(expect.any(Object), `Admin failed to delete user account: ${genericError.message}`);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -182,7 +182,8 @@ class UserService {
|
||||
try {
|
||||
await db.userRepo.deleteUserById(userToDeleteId, log);
|
||||
} catch (error) {
|
||||
if (error instanceof ValidationError) {
|
||||
// Rethrow known errors so they are handled correctly by the API layer (e.g. 404 for NotFound)
|
||||
if (error instanceof ValidationError || error instanceof NotFoundError) {
|
||||
throw error;
|
||||
}
|
||||
const errorMessage = error instanceof Error ? error.message : 'An unknown error occurred.';
|
||||
|
||||
@@ -26,6 +26,8 @@ const mocks = vi.hoisted(() => {
|
||||
// Return a mock worker instance, though it's not used in this test file.
|
||||
return { on: vi.fn(), close: vi.fn() };
|
||||
}),
|
||||
fsReaddir: vi.fn(),
|
||||
fsUnlink: vi.fn(),
|
||||
};
|
||||
});
|
||||
|
||||
@@ -51,7 +53,8 @@ vi.mock('./userService', () => ({
|
||||
// that the adapter is built from in queueService.server.ts.
|
||||
vi.mock('node:fs/promises', () => ({
|
||||
default: {
|
||||
// unlink is no longer directly called by the worker
|
||||
readdir: mocks.fsReaddir,
|
||||
unlink: mocks.fsUnlink,
|
||||
},
|
||||
}));
|
||||
|
||||
@@ -279,4 +282,18 @@ describe('Queue Workers', () => {
|
||||
await expect(tokenCleanupProcessor(job)).rejects.toThrow(dbError);
|
||||
});
|
||||
});
|
||||
|
||||
describe('fsAdapter', () => {
|
||||
it('should call fsPromises.readdir', async () => {
|
||||
const { fsAdapter } = await import('./workers.server');
|
||||
await fsAdapter.readdir('/tmp', { withFileTypes: true });
|
||||
expect(mocks.fsReaddir).toHaveBeenCalledWith('/tmp', { withFileTypes: true });
|
||||
});
|
||||
|
||||
it('should call fsPromises.unlink', async () => {
|
||||
const { fsAdapter } = await import('./workers.server');
|
||||
await fsAdapter.unlink('/tmp/file');
|
||||
expect(mocks.fsUnlink).toHaveBeenCalledWith('/tmp/file');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -14,6 +14,7 @@ import * as db from './db/index.db';
|
||||
import { FlyerProcessingService } from './flyerProcessingService.server';
|
||||
import { FlyerAiProcessor } from './flyerAiProcessor.server';
|
||||
import { FlyerDataTransformer } from './flyerDataTransformer';
|
||||
import { FlyerPersistenceService } from './flyerPersistenceService.server';
|
||||
import {
|
||||
cleanupQueue,
|
||||
flyerQueue,
|
||||
@@ -36,9 +37,10 @@ const execAsync = promisify(exec);
|
||||
|
||||
// --- Worker Instantiation ---
|
||||
|
||||
const fsAdapter: IFileSystem = {
|
||||
export const fsAdapter: IFileSystem = {
|
||||
readdir: (path: string, options: { withFileTypes: true }) => fsPromises.readdir(path, options),
|
||||
unlink: (path: string) => fsPromises.unlink(path),
|
||||
rename: (oldPath: string, newPath: string) => fsPromises.rename(oldPath, newPath),
|
||||
};
|
||||
|
||||
const flyerProcessingService = new FlyerProcessingService(
|
||||
@@ -47,6 +49,7 @@ const flyerProcessingService = new FlyerProcessingService(
|
||||
fsAdapter,
|
||||
cleanupQueue,
|
||||
new FlyerDataTransformer(),
|
||||
new FlyerPersistenceService(),
|
||||
);
|
||||
|
||||
const normalizeError = (error: unknown): Error => {
|
||||
@@ -152,6 +155,21 @@ logger.info('All workers started and listening for jobs.');
|
||||
|
||||
const SHUTDOWN_TIMEOUT = 30000; // 30 seconds
|
||||
|
||||
/**
|
||||
* Closes all workers. Used primarily for integration testing to ensure clean teardown
|
||||
* without exiting the process.
|
||||
*/
|
||||
export const closeWorkers = async () => {
|
||||
await Promise.all([
|
||||
flyerWorker.close(),
|
||||
emailWorker.close(),
|
||||
analyticsWorker.close(),
|
||||
cleanupWorker.close(),
|
||||
weeklyAnalyticsWorker.close(),
|
||||
tokenCleanupWorker.close(),
|
||||
]);
|
||||
};
|
||||
|
||||
export const gracefulShutdown = async (signal: string) => {
|
||||
logger.info(
|
||||
`[Shutdown] Received ${signal}. Initiating graceful shutdown (timeout: ${SHUTDOWN_TIMEOUT / 1000}s)...`,
|
||||
|
||||
@@ -3,7 +3,6 @@ import { describe, it, expect, afterAll } from 'vitest';
|
||||
import * as apiClient from '../../services/apiClient';
|
||||
import { getPool } from '../../services/db/connection.db';
|
||||
import { cleanupDb } from '../utils/cleanup';
|
||||
import { poll } from '../utils/poll';
|
||||
|
||||
/**
|
||||
* @vitest-environment node
|
||||
@@ -42,20 +41,16 @@ describe('E2E Admin Dashboard Flow', () => {
|
||||
]);
|
||||
|
||||
// 3. Login to get the access token (now with admin privileges)
|
||||
// We poll because the direct DB write above runs in a separate transaction
|
||||
// from the login API call. Due to PostgreSQL's `Read Committed` transaction
|
||||
// isolation, the API might read the user's role before the test's update
|
||||
// transaction is fully committed and visible. Polling makes the test resilient to this race condition.
|
||||
const { response: loginResponse, data: loginData } = await poll(
|
||||
async () => {
|
||||
const response = await apiClient.loginUser(adminEmail, adminPassword, false);
|
||||
// Clone to read body without consuming the original response stream
|
||||
const data = response.ok ? await response.clone().json() : {};
|
||||
return { response, data };
|
||||
},
|
||||
(result) => result.response.ok && result.data?.userprofile?.role === 'admin',
|
||||
{ timeout: 10000, interval: 1000, description: 'user login with admin role' },
|
||||
);
|
||||
// We wait briefly to ensure the DB transaction is committed and visible to the API,
|
||||
// and to provide a buffer for any rate limits from previous tests.
|
||||
await new Promise((resolve) => setTimeout(resolve, 2000));
|
||||
|
||||
const loginResponse = await apiClient.loginUser(adminEmail, adminPassword, false);
|
||||
if (!loginResponse.ok) {
|
||||
const errorText = await loginResponse.text();
|
||||
throw new Error(`Failed to log in as admin: ${loginResponse.status} ${errorText}`);
|
||||
}
|
||||
const loginData = await loginResponse.json();
|
||||
|
||||
expect(loginResponse.status).toBe(200);
|
||||
authToken = loginData.token;
|
||||
|
||||
@@ -182,17 +182,11 @@ describe('Authentication E2E Flow', () => {
|
||||
{ timeout: 10000, interval: 1000, description: 'user login after registration' },
|
||||
);
|
||||
|
||||
// Poll for the password reset token.
|
||||
const { response: forgotResponse, token: resetToken } = await poll(
|
||||
async () => {
|
||||
const response = await apiClient.requestPasswordReset(email);
|
||||
// Clone to read body without consuming the original response stream
|
||||
const data = response.ok ? await response.clone().json() : {};
|
||||
return { response, token: data.token };
|
||||
},
|
||||
(result) => !!result.token,
|
||||
{ timeout: 10000, interval: 1000, description: 'password reset token generation' },
|
||||
);
|
||||
// Request password reset (do not poll, as this endpoint is rate-limited)
|
||||
const forgotResponse = await apiClient.requestPasswordReset(email);
|
||||
expect(forgotResponse.status).toBe(200);
|
||||
const forgotData = await forgotResponse.json();
|
||||
const resetToken = forgotData.token;
|
||||
|
||||
// Assert 1: Check that we received a token.
|
||||
expect(resetToken, 'Backend returned 200 but no token. Check backend logs for "Connection terminated" errors.').toBeDefined();
|
||||
@@ -217,8 +211,18 @@ describe('Authentication E2E Flow', () => {
|
||||
});
|
||||
|
||||
it('should return a generic success message for a non-existent email to prevent enumeration', async () => {
|
||||
// Add a small delay to ensure we don't hit the rate limit (5 RPM) if tests run too fast
|
||||
await new Promise((resolve) => setTimeout(resolve, 2000));
|
||||
|
||||
const nonExistentEmail = `non-existent-e2e-${Date.now()}@example.com`;
|
||||
const response = await apiClient.requestPasswordReset(nonExistentEmail);
|
||||
|
||||
// Check for rate limiting or other errors before parsing JSON to avoid SyntaxError
|
||||
if (!response.ok) {
|
||||
const text = await response.text();
|
||||
throw new Error(`Request failed with status ${response.status}: ${text}`);
|
||||
}
|
||||
|
||||
const data = await response.json();
|
||||
expect(response.status).toBe(200);
|
||||
expect(data.message).toBe('If an account with that email exists, a password reset link has been sent.');
|
||||
@@ -240,6 +244,10 @@ describe('Authentication E2E Flow', () => {
|
||||
// A typical Set-Cookie header might be 'refreshToken=...; Path=/; HttpOnly; Max-Age=...'. We just need the 'refreshToken=...' part.
|
||||
const refreshTokenCookie = setCookieHeader!.split(';')[0];
|
||||
|
||||
// Wait for >1 second to ensure the 'iat' (Issued At) claim in the new JWT changes.
|
||||
// JWT timestamps have second-level precision.
|
||||
await new Promise((resolve) => setTimeout(resolve, 1100));
|
||||
|
||||
// 3. Call the refresh token endpoint, passing the cookie.
|
||||
// This assumes a new method in apiClient to handle this specific request.
|
||||
const refreshResponse = await apiClient.refreshToken(refreshTokenCookie);
|
||||
|
||||
@@ -168,7 +168,7 @@ describe('Admin API Routes Integration Tests', () => {
|
||||
beforeEach(async () => {
|
||||
const flyerRes = await getPool().query(
|
||||
`INSERT INTO public.flyers (store_id, file_name, image_url, icon_url, item_count, checksum)
|
||||
VALUES ($1, 'admin-test.jpg', '${TEST_EXAMPLE_DOMAIN}/flyer-images/asdmin-test.jpg', '${TEST_EXAMPLE_DOMAIN}/flyer-images/icons/admin-test.jpg', 1, $2) RETURNING flyer_id`,
|
||||
VALUES ($1, 'admin-test.jpg', '${TEST_EXAMPLE_DOMAIN}/flyer-images/admin-test.jpg', '${TEST_EXAMPLE_DOMAIN}/flyer-images/icons/admin-test.jpg', 1, $2) RETURNING flyer_id`,
|
||||
// The checksum must be a unique 64-character string to satisfy the DB constraint.
|
||||
// We generate a dynamic string and pad it to 64 characters.
|
||||
[testStoreId, `checksum-${Date.now()}-${Math.random()}`.padEnd(64, '0')],
|
||||
@@ -286,33 +286,26 @@ describe('Admin API Routes Integration Tests', () => {
|
||||
.delete(`/api/admin/users/${adminUserId}`)
|
||||
.set('Authorization', `Bearer ${adminToken}`);
|
||||
|
||||
// Assert: Check for a 400 (or other appropriate) status code and an error message.
|
||||
expect(response.status).toBe(400);
|
||||
// Assert:
|
||||
// The service throws ValidationError, which maps to 400.
|
||||
// We also allow 403 in case authorization middleware catches it in the future.
|
||||
if (response.status !== 400 && response.status !== 403) {
|
||||
console.error('[DEBUG] Self-deletion failed with unexpected status:', response.status, response.body);
|
||||
}
|
||||
expect([400, 403]).toContain(response.status);
|
||||
expect(response.body.message).toMatch(/Admins cannot delete their own account/);
|
||||
});
|
||||
|
||||
it('should return 404 if the user to be deleted is not found', async () => {
|
||||
// Arrange: Mock the userRepo.deleteUserById to throw a NotFoundError
|
||||
const notFoundUserId = 'non-existent-user-id';
|
||||
// Arrange: Use a valid UUID that does not exist
|
||||
const notFoundUserId = '00000000-0000-0000-0000-000000000000';
|
||||
|
||||
const response = await request
|
||||
.delete(`/api/admin/users/${notFoundUserId}`)
|
||||
.set('Authorization', `Bearer ${adminToken}`);
|
||||
|
||||
// Assert: Check for a 400 status code because the UUID is invalid and caught by validation.
|
||||
expect(response.status).toBe(400);
|
||||
});
|
||||
|
||||
it('should return 500 on a generic database error', async () => {
|
||||
// Arrange: Mock the userRepo.deleteUserById to throw a generic error
|
||||
const genericUserId = 'generic-error-user-id';
|
||||
|
||||
const response = await request
|
||||
.delete(`/api/admin/users/${genericUserId}`)
|
||||
.set('Authorization', `Bearer ${adminToken}`);
|
||||
|
||||
// Assert: Check for a 400 status code because the UUID is invalid and caught by validation.
|
||||
expect(response.status).toBe(400);
|
||||
// Assert: Check for a 404 status code
|
||||
expect(response.status).toBe(404);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -8,13 +8,59 @@ import { getPool } from '../../services/db/connection.db';
|
||||
import { generateFileChecksum } from '../../utils/checksum';
|
||||
import { logger } from '../../services/logger.server';
|
||||
import type { UserProfile, ExtractedFlyerItem } from '../../types';
|
||||
import { createAndLoginUser, getTestBaseUrl } from '../utils/testHelpers';
|
||||
import { createAndLoginUser } from '../utils/testHelpers';
|
||||
import { cleanupDb } from '../utils/cleanup';
|
||||
import { poll } from '../utils/poll';
|
||||
import { cleanupFiles } from '../utils/cleanupFiles';
|
||||
import piexif from 'piexifjs';
|
||||
import exifParser from 'exif-parser';
|
||||
import sharp from 'sharp';
|
||||
import * as imageProcessor from '../../utils/imageProcessor';
|
||||
|
||||
// Mock the image processor to ensure safe filenames for DB constraints
|
||||
vi.mock('../../utils/imageProcessor', async () => {
|
||||
const actual = await vi.importActual<typeof import('../../utils/imageProcessor')>('../../utils/imageProcessor');
|
||||
return {
|
||||
...actual,
|
||||
generateFlyerIcon: vi.fn().mockResolvedValue('mock-icon-safe.webp'),
|
||||
};
|
||||
});
|
||||
|
||||
// FIX: Mock storageService to return valid URLs (for DB) and write files to disk (for test verification)
|
||||
vi.mock('../../services/storage/storageService', () => {
|
||||
const fs = require('node:fs/promises');
|
||||
const path = require('path');
|
||||
// Match the directory used in the test helpers
|
||||
const uploadDir = path.join(process.cwd(), 'flyer-images');
|
||||
|
||||
return {
|
||||
storageService: {
|
||||
upload: vi.fn().mockImplementation(async (fileData, fileName) => {
|
||||
const name = fileName || (fileData && fileData.name) || (typeof fileData === 'string' ? path.basename(fileData) : `upload-${Date.now()}.jpg`);
|
||||
|
||||
await fs.mkdir(uploadDir, { recursive: true });
|
||||
const destPath = path.join(uploadDir, name);
|
||||
|
||||
let content = Buffer.from('');
|
||||
if (Buffer.isBuffer(fileData)) {
|
||||
content = fileData as any;
|
||||
} else if (typeof fileData === 'string') {
|
||||
try { content = await fs.readFile(fileData); } catch (e) {}
|
||||
} else if (fileData && fileData.path) {
|
||||
try { content = await fs.readFile(fileData.path); } catch (e) {}
|
||||
}
|
||||
await fs.writeFile(destPath, content);
|
||||
|
||||
// Return a valid URL to satisfy the 'url_check' DB constraint
|
||||
return `https://example.com/uploads/${name}`;
|
||||
}),
|
||||
delete: vi.fn().mockResolvedValue(undefined),
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
// FIX: Import the singleton instance directly to spy on it
|
||||
import { aiService } from '../../services/aiService.server';
|
||||
|
||||
|
||||
/**
|
||||
@@ -25,21 +71,11 @@ const { mockExtractCoreData } = vi.hoisted(() => ({
|
||||
mockExtractCoreData: vi.fn(),
|
||||
}));
|
||||
|
||||
// Mock the AI service to prevent real API calls during integration tests.
|
||||
// This is crucial for making the tests reliable and fast. We don't want to
|
||||
// depend on the external Gemini API.
|
||||
vi.mock('../../services/aiService.server', async (importOriginal) => {
|
||||
const actual = await importOriginal<typeof import('../../services/aiService.server')>();
|
||||
// To preserve the class instance methods of `aiService`, we must modify the
|
||||
// instance directly rather than creating a new plain object with spread syntax.
|
||||
actual.aiService.extractCoreDataFromFlyerImage = mockExtractCoreData;
|
||||
return actual;
|
||||
});
|
||||
|
||||
// Mock the main DB service to allow for simulating transaction failures.
|
||||
// By default, it will use the real implementation.
|
||||
vi.mock('../../services/db/index.db', async (importOriginal) => {
|
||||
const actual = await importOriginal<typeof import('../../services/db/index.db')>();
|
||||
// Mock the connection DB service to intercept withTransaction.
|
||||
// This is crucial because FlyerPersistenceService imports directly from connection.db,
|
||||
// so mocking index.db is insufficient.
|
||||
vi.mock('../../services/db/connection.db', async (importOriginal) => {
|
||||
const actual = await importOriginal<typeof import('../../services/db/connection.db')>();
|
||||
return {
|
||||
...actual,
|
||||
withTransaction: vi.fn().mockImplementation(actual.withTransaction),
|
||||
@@ -51,12 +87,26 @@ describe('Flyer Processing Background Job Integration Test', () => {
|
||||
const createdUserIds: string[] = [];
|
||||
const createdFlyerIds: number[] = [];
|
||||
const createdFilePaths: string[] = [];
|
||||
let workersModule: typeof import('../../services/workers.server');
|
||||
|
||||
const originalFrontendUrl = process.env.FRONTEND_URL;
|
||||
|
||||
beforeAll(async () => {
|
||||
// FIX: Stub FRONTEND_URL to ensure valid absolute URLs (http://...) are generated
|
||||
// for the database, satisfying the 'url_check' constraint.
|
||||
// IMPORTANT: This must run BEFORE the app is imported so workers inherit the env var.
|
||||
vi.stubEnv('FRONTEND_URL', 'https://example.com');
|
||||
process.env.FRONTEND_URL = 'https://example.com';
|
||||
console.error('[TEST SETUP] FRONTEND_URL stubbed to:', process.env.FRONTEND_URL);
|
||||
|
||||
// FIX: Spy on the actual singleton instance. This ensures that when the worker
|
||||
// imports 'aiService', it gets the instance we are controlling here.
|
||||
vi.spyOn(aiService, 'extractCoreDataFromFlyerImage').mockImplementation(mockExtractCoreData);
|
||||
|
||||
// NEW: Import workers to start them IN-PROCESS.
|
||||
// This ensures they run in the same memory space as our mocks.
|
||||
console.error('[TEST SETUP] Starting in-process workers...');
|
||||
workersModule = await import('../../services/workers.server');
|
||||
|
||||
const appModule = await import('../../../server');
|
||||
const app = appModule.default;
|
||||
@@ -66,13 +116,14 @@ describe('Flyer Processing Background Job Integration Test', () => {
|
||||
// FIX: Reset mocks before each test to ensure isolation.
|
||||
// This prevents "happy path" mocks from leaking into error handling tests and vice versa.
|
||||
beforeEach(async () => {
|
||||
console.error('[TEST SETUP] Resetting mocks before test execution');
|
||||
// 1. Reset AI Service Mock to default success state
|
||||
mockExtractCoreData.mockReset();
|
||||
mockExtractCoreData.mockResolvedValue({
|
||||
store_name: 'Mock Store',
|
||||
valid_from: null,
|
||||
valid_to: null,
|
||||
store_address: null,
|
||||
valid_from: '2025-01-01',
|
||||
valid_to: '2025-01-07',
|
||||
store_address: '123 Mock St',
|
||||
items: [
|
||||
{
|
||||
item: 'Mocked Integration Item',
|
||||
@@ -86,14 +137,19 @@ describe('Flyer Processing Background Job Integration Test', () => {
|
||||
|
||||
// 2. Restore DB Service Mock to real implementation
|
||||
// This ensures that unless a test specifically mocks a failure, the DB logic works as expected.
|
||||
const { withTransaction } = await import('../../services/db/index.db');
|
||||
const actualDb = await vi.importActual<typeof import('../../services/db/index.db')>('../../services/db/index.db');
|
||||
const { withTransaction } = await import('../../services/db/connection.db');
|
||||
// We need to get the actual implementation again to restore it
|
||||
const actualDb = await vi.importActual<typeof import('../../services/db/connection.db')>('../../services/db/connection.db');
|
||||
vi.mocked(withTransaction).mockReset();
|
||||
vi.mocked(withTransaction).mockImplementation(actualDb.withTransaction);
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
// Restore original value
|
||||
process.env.FRONTEND_URL = originalFrontendUrl;
|
||||
|
||||
vi.unstubAllEnvs(); // Clean up env stubs
|
||||
vi.restoreAllMocks(); // Restore the AI spy
|
||||
|
||||
// Use the centralized cleanup utility.
|
||||
await cleanupDb({
|
||||
@@ -103,6 +159,16 @@ describe('Flyer Processing Background Job Integration Test', () => {
|
||||
|
||||
// Use the centralized file cleanup utility.
|
||||
await cleanupFiles(createdFilePaths);
|
||||
|
||||
// NEW: Clean up workers and Redis connection to prevent tests from hanging.
|
||||
if (workersModule) {
|
||||
console.error('[TEST TEARDOWN] Closing in-process workers...');
|
||||
await workersModule.closeWorkers();
|
||||
}
|
||||
|
||||
// Close the shared redis connection used by the workers/queues
|
||||
const { connection } = await import('../../services/redis.server');
|
||||
await connection.quit();
|
||||
});
|
||||
|
||||
/**
|
||||
@@ -110,7 +176,10 @@ describe('Flyer Processing Background Job Integration Test', () => {
|
||||
* It uploads a file, polls for completion, and verifies the result in the database.
|
||||
*/
|
||||
const runBackgroundProcessingTest = async (user?: UserProfile, token?: string) => {
|
||||
console.error(`[TEST START] runBackgroundProcessingTest. User: ${user?.user.email ?? 'ANONYMOUS'}`);
|
||||
// Arrange: Load a mock flyer PDF.
|
||||
console.error('[TEST] about to read test-flyer-image.jpg')
|
||||
|
||||
const imagePath = path.resolve(__dirname, '../assets/test-flyer-image.jpg');
|
||||
const imageBuffer = await fs.readFile(imagePath);
|
||||
// Create a unique buffer and filename for each test run to ensure a unique checksum.
|
||||
@@ -119,26 +188,34 @@ describe('Flyer Processing Background Job Integration Test', () => {
|
||||
const uniqueFileName = `test-flyer-image-${Date.now()}.jpg`;
|
||||
const mockImageFile = new File([new Uint8Array(uniqueContent)], uniqueFileName, { type: 'image/jpeg' });
|
||||
const checksum = await generateFileChecksum(mockImageFile);
|
||||
console.error('[TEST] mockImageFile created with uniqueFileName: ', uniqueFileName)
|
||||
console.error('[TEST DATA] Generated checksum for test:', checksum);
|
||||
|
||||
// Track created files for cleanup
|
||||
const uploadDir = path.resolve(__dirname, '../../../flyer-images');
|
||||
createdFilePaths.push(path.join(uploadDir, uniqueFileName));
|
||||
console.error('[TEST] createdFilesPaths after 1st push: ', createdFilePaths)
|
||||
// The icon name is derived from the original filename.
|
||||
const iconFileName = `icon-${path.parse(uniqueFileName).name}.webp`;
|
||||
createdFilePaths.push(path.join(uploadDir, 'icons', iconFileName));
|
||||
|
||||
// Act 1: Upload the file to start the background job.
|
||||
const testBaseUrl = 'https://example.com';
|
||||
console.error('[TEST ACTION] Uploading file with baseUrl:', testBaseUrl);
|
||||
|
||||
const uploadReq = request
|
||||
.post('/api/ai/upload-and-process')
|
||||
.field('checksum', checksum)
|
||||
// Pass the baseUrl directly in the form data to ensure the worker receives it,
|
||||
// bypassing issues with vi.stubEnv in multi-threaded test environments.
|
||||
.field('baseUrl', getTestBaseUrl())
|
||||
.field('baseUrl', testBaseUrl)
|
||||
.attach('flyerFile', uniqueContent, uniqueFileName);
|
||||
if (token) {
|
||||
uploadReq.set('Authorization', `Bearer ${token}`);
|
||||
}
|
||||
const uploadResponse = await uploadReq;
|
||||
console.error('[TEST RESPONSE] Upload status:', uploadResponse.status);
|
||||
console.error('[TEST RESPONSE] Upload body:', JSON.stringify(uploadResponse.body));
|
||||
const { jobId } = uploadResponse.body;
|
||||
|
||||
// Assert 1: Check that a job ID was returned.
|
||||
@@ -152,6 +229,7 @@ describe('Flyer Processing Background Job Integration Test', () => {
|
||||
statusReq.set('Authorization', `Bearer ${token}`);
|
||||
}
|
||||
const statusResponse = await statusReq;
|
||||
console.error(`[TEST POLL] Job ${jobId} current state:`, statusResponse.body?.state);
|
||||
return statusResponse.body;
|
||||
},
|
||||
(status) => status.state === 'completed' || status.state === 'failed',
|
||||
@@ -162,6 +240,7 @@ describe('Flyer Processing Background Job Integration Test', () => {
|
||||
if (jobStatus?.state === 'failed') {
|
||||
console.error('[DEBUG] Job failed with reason:', jobStatus.failedReason);
|
||||
console.error('[DEBUG] Job stack trace:', jobStatus.stacktrace);
|
||||
console.error('[DEBUG] Job return value:', JSON.stringify(jobStatus.returnValue, null, 2));
|
||||
console.error('[DEBUG] Full Job Status:', JSON.stringify(jobStatus, null, 2));
|
||||
}
|
||||
expect(jobStatus?.state).toBe('completed');
|
||||
@@ -251,7 +330,7 @@ describe('Flyer Processing Background Job Integration Test', () => {
|
||||
const uploadResponse = await request
|
||||
.post('/api/ai/upload-and-process')
|
||||
.set('Authorization', `Bearer ${token}`)
|
||||
.field('baseUrl', getTestBaseUrl())
|
||||
.field('baseUrl', 'https://example.com')
|
||||
.field('checksum', checksum)
|
||||
.attach('flyerFile', imageWithExifBuffer, uniqueFileName);
|
||||
|
||||
@@ -273,6 +352,8 @@ describe('Flyer Processing Background Job Integration Test', () => {
|
||||
// 3. Assert
|
||||
if (jobStatus?.state === 'failed') {
|
||||
console.error('[DEBUG] EXIF test job failed:', jobStatus.failedReason);
|
||||
console.error('[DEBUG] Job stack trace:', jobStatus.stacktrace);
|
||||
console.error('[DEBUG] Job return value:', JSON.stringify(jobStatus.returnValue, null, 2));
|
||||
}
|
||||
expect(jobStatus?.state).toBe('completed');
|
||||
const flyerId = jobStatus?.returnValue?.flyerId;
|
||||
@@ -290,6 +371,10 @@ describe('Flyer Processing Background Job Integration Test', () => {
|
||||
const parser = exifParser.create(savedImageBuffer);
|
||||
const exifResult = parser.parse();
|
||||
|
||||
console.error('[TEST] savedImagePath during EXIF data stripping: ', savedImagePath)
|
||||
console.error('[TEST] exifResult.tags: ', exifResult.tags)
|
||||
|
||||
|
||||
// The `tags` object will be empty if no EXIF data is found.
|
||||
expect(exifResult.tags).toEqual({});
|
||||
expect(exifResult.tags.Software).toBeUndefined();
|
||||
@@ -336,7 +421,7 @@ describe('Flyer Processing Background Job Integration Test', () => {
|
||||
const uploadResponse = await request
|
||||
.post('/api/ai/upload-and-process')
|
||||
.set('Authorization', `Bearer ${token}`)
|
||||
.field('baseUrl', getTestBaseUrl())
|
||||
.field('baseUrl', 'https://example.com')
|
||||
.field('checksum', checksum)
|
||||
.attach('flyerFile', imageWithMetadataBuffer, uniqueFileName);
|
||||
|
||||
@@ -358,6 +443,8 @@ describe('Flyer Processing Background Job Integration Test', () => {
|
||||
// 3. Assert job completion
|
||||
if (jobStatus?.state === 'failed') {
|
||||
console.error('[DEBUG] PNG metadata test job failed:', jobStatus.failedReason);
|
||||
console.error('[DEBUG] Job stack trace:', jobStatus.stacktrace);
|
||||
console.error('[DEBUG] Job return value:', JSON.stringify(jobStatus.returnValue, null, 2));
|
||||
}
|
||||
expect(jobStatus?.state).toBe('completed');
|
||||
const flyerId = jobStatus?.returnValue?.flyerId;
|
||||
@@ -371,6 +458,9 @@ describe('Flyer Processing Background Job Integration Test', () => {
|
||||
const savedImagePath = path.join(uploadDir, path.basename(savedFlyer!.image_url));
|
||||
createdFilePaths.push(savedImagePath); // Add final path for cleanup
|
||||
|
||||
console.error('[TEST] savedImagePath during PNG metadata stripping: ', savedImagePath)
|
||||
|
||||
|
||||
const savedImageMetadata = await sharp(savedImagePath).metadata();
|
||||
|
||||
// The test should fail here initially because PNGs are not processed.
|
||||
@@ -378,6 +468,7 @@ describe('Flyer Processing Background Job Integration Test', () => {
|
||||
expect(savedImageMetadata.exif).toBeUndefined();
|
||||
},
|
||||
240000,
|
||||
|
||||
);
|
||||
|
||||
it(
|
||||
@@ -385,13 +476,14 @@ it(
|
||||
async () => {
|
||||
// Arrange: Mock the AI service to throw an error for this specific test.
|
||||
const aiError = new Error('AI model failed to extract data.');
|
||||
// Update the spy implementation to reject
|
||||
mockExtractCoreData.mockRejectedValue(aiError);
|
||||
|
||||
// Arrange: Prepare a unique flyer file for upload.
|
||||
const imagePath = path.resolve(__dirname, '../assets/test-flyer-image.jpg');
|
||||
const imageBuffer = await fs.readFile(imagePath);
|
||||
const uniqueContent = Buffer.concat([imageBuffer, Buffer.from(`fail-test-${Date.now()}`)]);
|
||||
const uniqueFileName = `ai-fail-test-${Date.now()}.jpg`;
|
||||
const uniqueContent = Buffer.concat([imageBuffer, Buffer.from(`ai-error-test-${Date.now()}`)]);
|
||||
const uniqueFileName = `ai-error-test-${Date.now()}.jpg`;
|
||||
const mockImageFile = new File([new Uint8Array(uniqueContent)], uniqueFileName, { type: 'image/jpeg' });
|
||||
const checksum = await generateFileChecksum(mockImageFile);
|
||||
|
||||
@@ -402,7 +494,7 @@ it(
|
||||
// Act 1: Upload the file to start the background job.
|
||||
const uploadResponse = await request
|
||||
.post('/api/ai/upload-and-process')
|
||||
.field('baseUrl', getTestBaseUrl())
|
||||
.field('baseUrl', 'https://example.com')
|
||||
.field('checksum', checksum)
|
||||
.attach('flyerFile', uniqueContent, uniqueFileName);
|
||||
|
||||
@@ -420,6 +512,10 @@ it(
|
||||
);
|
||||
|
||||
// Assert 1: Check that the job failed.
|
||||
if (jobStatus?.state === 'failed') {
|
||||
console.error('[TEST DEBUG] AI Failure Test - Job Failed Reason:', jobStatus.failedReason);
|
||||
console.error('[TEST DEBUG] AI Failure Test - Job Stack:', jobStatus.stacktrace);
|
||||
}
|
||||
expect(jobStatus?.state).toBe('failed');
|
||||
expect(jobStatus?.failedReason).toContain('AI model failed to extract data.');
|
||||
|
||||
@@ -436,14 +532,14 @@ it(
|
||||
// Arrange: Mock the database transaction function to throw an error.
|
||||
// This is a more realistic simulation of a DB failure than mocking the inner createFlyerAndItems function.
|
||||
const dbError = new Error('DB transaction failed');
|
||||
const { withTransaction } = await import('../../services/db/index.db');
|
||||
const { withTransaction } = await import('../../services/db/connection.db');
|
||||
vi.mocked(withTransaction).mockRejectedValue(dbError);
|
||||
|
||||
// Arrange: Prepare a unique flyer file for upload.
|
||||
const imagePath = path.resolve(__dirname, '../assets/test-flyer-image.jpg');
|
||||
const imageBuffer = await fs.readFile(imagePath);
|
||||
const uniqueContent = Buffer.concat([imageBuffer, Buffer.from(`db-fail-test-${Date.now()}`)]);
|
||||
const uniqueFileName = `db-fail-test-${Date.now()}.jpg`;
|
||||
const uniqueContent = Buffer.concat([imageBuffer, Buffer.from(`db-error-test-${Date.now()}`)]);
|
||||
const uniqueFileName = `db-error-test-${Date.now()}.jpg`;
|
||||
const mockImageFile = new File([new Uint8Array(uniqueContent)], uniqueFileName, { type: 'image/jpeg' });
|
||||
const checksum = await generateFileChecksum(mockImageFile);
|
||||
|
||||
@@ -454,7 +550,7 @@ it(
|
||||
// Act 1: Upload the file to start the background job.
|
||||
const uploadResponse = await request
|
||||
.post('/api/ai/upload-and-process')
|
||||
.field('baseUrl', getTestBaseUrl())
|
||||
.field('baseUrl', 'https://example.com')
|
||||
.field('checksum', checksum)
|
||||
.attach('flyerFile', uniqueContent, uniqueFileName);
|
||||
|
||||
@@ -494,9 +590,9 @@ it(
|
||||
const imageBuffer = await fs.readFile(imagePath);
|
||||
const uniqueContent = Buffer.concat([
|
||||
imageBuffer,
|
||||
Buffer.from(`cleanup-fail-test-${Date.now()}`),
|
||||
Buffer.from(`cleanup-test-${Date.now()}`),
|
||||
]);
|
||||
const uniqueFileName = `cleanup-fail-test-${Date.now()}.jpg`;
|
||||
const uniqueFileName = `cleanup-test-${Date.now()}.jpg`;
|
||||
const mockImageFile = new File([new Uint8Array(uniqueContent)], uniqueFileName, { type: 'image/jpeg' });
|
||||
const checksum = await generateFileChecksum(mockImageFile);
|
||||
|
||||
@@ -508,7 +604,7 @@ it(
|
||||
// Act 1: Upload the file to start the background job.
|
||||
const uploadResponse = await request
|
||||
.post('/api/ai/upload-and-process')
|
||||
.field('baseUrl', getTestBaseUrl())
|
||||
.field('baseUrl', 'https://example.com')
|
||||
.field('checksum', checksum)
|
||||
.attach('flyerFile', uniqueContent, uniqueFileName);
|
||||
|
||||
@@ -534,6 +630,7 @@ it(
|
||||
await expect(fs.access(tempFilePath), 'Expected temporary file to exist after job failure, but it was deleted.');
|
||||
},
|
||||
240000,
|
||||
|
||||
);
|
||||
|
||||
});
|
||||
|
||||
@@ -20,6 +20,7 @@ import type {
|
||||
} from '../../types';
|
||||
import type { Flyer } from '../../types';
|
||||
import { cleanupFiles } from '../utils/cleanupFiles';
|
||||
import { aiService } from '../../services/aiService.server';
|
||||
|
||||
/**
|
||||
* @vitest-environment node
|
||||
@@ -29,23 +30,12 @@ const { mockExtractCoreData } = vi.hoisted(() => ({
|
||||
mockExtractCoreData: vi.fn(),
|
||||
}));
|
||||
|
||||
// Mock the AI service to prevent real API calls during integration tests.
|
||||
// This is crucial for making the tests reliable and fast. We don't want to
|
||||
// depend on the external Gemini API.
|
||||
vi.mock('../../services/aiService.server', async (importOriginal) => {
|
||||
const actual = await importOriginal<typeof import('../../services/aiService.server')>();
|
||||
// To preserve the class instance methods of `aiService`, we must modify the
|
||||
// instance directly rather than creating a new plain object with spread syntax.
|
||||
actual.aiService.extractCoreDataFromFlyerImage = mockExtractCoreData;
|
||||
return actual;
|
||||
});
|
||||
|
||||
// Mock the image processor to control icon generation for legacy uploads
|
||||
vi.mock('../../utils/imageProcessor', async () => {
|
||||
const actual = await vi.importActual<typeof imageProcessor>('../../utils/imageProcessor');
|
||||
return {
|
||||
...actual,
|
||||
generateFlyerIcon: vi.fn(),
|
||||
generateFlyerIcon: vi.fn().mockResolvedValue('mock-icon.webp'),
|
||||
};
|
||||
});
|
||||
|
||||
@@ -56,11 +46,21 @@ describe('Gamification Flow Integration Test', () => {
|
||||
const createdFlyerIds: number[] = [];
|
||||
const createdFilePaths: string[] = [];
|
||||
const createdStoreIds: number[] = [];
|
||||
let workersModule: typeof import('../../services/workers.server');
|
||||
|
||||
beforeAll(async () => {
|
||||
// Stub environment variables for URL generation in the background worker.
|
||||
// This needs to be in beforeAll to ensure it's set before any code that might use it is imported.
|
||||
vi.stubEnv('FRONTEND_URL', 'https://example.com');
|
||||
|
||||
// Spy on the actual singleton instance. This ensures that when the worker
|
||||
// imports 'aiService', it gets the instance we are controlling here.
|
||||
vi.spyOn(aiService, 'extractCoreDataFromFlyerImage').mockImplementation(mockExtractCoreData);
|
||||
|
||||
// Import workers to start them IN-PROCESS.
|
||||
// This ensures they run in the same memory space as our mocks.
|
||||
workersModule = await import('../../services/workers.server');
|
||||
|
||||
const app = (await import('../../../server')).default;
|
||||
request = supertest(app);
|
||||
|
||||
@@ -91,12 +91,23 @@ describe('Gamification Flow Integration Test', () => {
|
||||
|
||||
afterAll(async () => {
|
||||
vi.unstubAllEnvs();
|
||||
vi.restoreAllMocks(); // Restore the AI spy
|
||||
|
||||
await cleanupDb({
|
||||
userIds: testUser ? [testUser.user.user_id] : [],
|
||||
flyerIds: createdFlyerIds,
|
||||
storeIds: createdStoreIds,
|
||||
});
|
||||
await cleanupFiles(createdFilePaths);
|
||||
|
||||
// Clean up workers and Redis connection to prevent tests from hanging.
|
||||
if (workersModule) {
|
||||
await workersModule.closeWorkers();
|
||||
}
|
||||
|
||||
// Close the shared redis connection used by the workers/queues
|
||||
const { connection } = await import('../../services/redis.server');
|
||||
await connection.quit();
|
||||
});
|
||||
|
||||
it(
|
||||
@@ -117,14 +128,28 @@ describe('Gamification Flow Integration Test', () => {
|
||||
createdFilePaths.push(path.join(uploadDir, 'icons', iconFileName));
|
||||
|
||||
// --- Act 1: Upload the flyer to trigger the background job ---
|
||||
const testBaseUrl = 'https://example.com';
|
||||
console.error('--------------------------------------------------------------------------------');
|
||||
console.error('[TEST DEBUG] STARTING UPLOAD STEP');
|
||||
console.error(`[TEST DEBUG] Env FRONTEND_URL: "${process.env.FRONTEND_URL}"`);
|
||||
console.error(`[TEST DEBUG] Sending baseUrl field: "${testBaseUrl}"`);
|
||||
console.error('--------------------------------------------------------------------------------');
|
||||
|
||||
const uploadResponse = await request
|
||||
.post('/api/ai/upload-and-process')
|
||||
.set('Authorization', `Bearer ${authToken}`)
|
||||
.field('checksum', checksum)
|
||||
.field('baseUrl', testBaseUrl)
|
||||
.attach('flyerFile', uniqueContent, uniqueFileName);
|
||||
|
||||
console.error('--------------------------------------------------------------------------------');
|
||||
console.error(`[TEST DEBUG] Upload Response Status: ${uploadResponse.status}`);
|
||||
console.error(`[TEST DEBUG] Upload Response Body: ${JSON.stringify(uploadResponse.body, null, 2)}`);
|
||||
console.error('--------------------------------------------------------------------------------');
|
||||
|
||||
const { jobId } = uploadResponse.body;
|
||||
expect(jobId).toBeTypeOf('string');
|
||||
console.error(`[TEST DEBUG] Job ID received: ${jobId}`);
|
||||
|
||||
// --- Act 2: Poll for job completion using the new utility ---
|
||||
const jobStatus = await poll(
|
||||
@@ -132,6 +157,7 @@ describe('Gamification Flow Integration Test', () => {
|
||||
const statusResponse = await request
|
||||
.get(`/api/ai/jobs/${jobId}/status`)
|
||||
.set('Authorization', `Bearer ${authToken}`);
|
||||
console.error(`[TEST DEBUG] Polling status for ${jobId}: ${statusResponse.body?.state}`);
|
||||
return statusResponse.body;
|
||||
},
|
||||
(status) => status.state === 'completed' || status.state === 'failed',
|
||||
@@ -143,9 +169,22 @@ describe('Gamification Flow Integration Test', () => {
|
||||
throw new Error('Gamification test job timed out: No job status received.');
|
||||
}
|
||||
|
||||
console.error('--------------------------------------------------------------------------------');
|
||||
console.error('[TEST DEBUG] Final Job Status Object:', JSON.stringify(jobStatus, null, 2));
|
||||
if (jobStatus.state === 'failed') {
|
||||
console.error(`[TEST DEBUG] Job Failed Reason: ${jobStatus.failedReason}`);
|
||||
// If there is a progress object with error details, log it
|
||||
if (jobStatus.progress) {
|
||||
console.error(`[TEST DEBUG] Job Progress/Error Details:`, JSON.stringify(jobStatus.progress, null, 2));
|
||||
}
|
||||
}
|
||||
console.error('--------------------------------------------------------------------------------');
|
||||
|
||||
// --- Assert 1: Verify the job completed successfully ---
|
||||
if (jobStatus?.state === 'failed') {
|
||||
console.error('[DEBUG] Gamification test job failed:', jobStatus.failedReason);
|
||||
console.error('[DEBUG] Job stack trace:', jobStatus.stacktrace);
|
||||
console.error('[DEBUG] Job return value:', JSON.stringify(jobStatus.returnValue, null, 2));
|
||||
}
|
||||
expect(jobStatus?.state).toBe('completed');
|
||||
|
||||
@@ -166,10 +205,17 @@ describe('Gamification Flow Integration Test', () => {
|
||||
const achievementsResponse = await request
|
||||
.get('/api/achievements/me')
|
||||
.set('Authorization', `Bearer ${authToken}`);
|
||||
const userAchievements: (UserAchievement & Achievement)[] = achievementsResponse.body;
|
||||
|
||||
// --- Assert 2: Verify the "First-Upload" achievement was awarded ---
|
||||
// The 'user_registered' achievement is awarded on creation, so we expect at least two.
|
||||
// Wait for the asynchronous achievement event to process
|
||||
await vi.waitUntil(async () => {
|
||||
const achievements = await db.gamificationRepo.getUserAchievements(testUser.user.user_id, logger);
|
||||
return achievements.length >= 2;
|
||||
}, { timeout: 5000, interval: 200 });
|
||||
|
||||
// Final assertion and retrieval
|
||||
const userAchievements = await db.gamificationRepo.getUserAchievements(testUser.user.user_id, logger);
|
||||
expect(userAchievements.length).toBeGreaterThanOrEqual(2);
|
||||
const firstUploadAchievement = userAchievements.find((ach) => ach.name === 'First-Upload');
|
||||
expect(firstUploadAchievement).toBeDefined();
|
||||
@@ -216,7 +262,7 @@ describe('Gamification Flow Integration Test', () => {
|
||||
checksum: checksum,
|
||||
extractedData: {
|
||||
store_name: storeName,
|
||||
items: [{ item: 'Legacy Milk', price_in_cents: 250 }],
|
||||
items: [{ item: 'Legacy Milk', price_in_cents: 250, price_display: '$2.50' }],
|
||||
},
|
||||
};
|
||||
|
||||
@@ -254,7 +300,7 @@ describe('Gamification Flow Integration Test', () => {
|
||||
// 8. Assert that the URLs are fully qualified.
|
||||
expect(savedFlyer.image_url).to.equal(newFlyer.image_url);
|
||||
expect(savedFlyer.icon_url).to.equal(newFlyer.icon_url);
|
||||
const expectedBaseUrl = getTestBaseUrl();
|
||||
const expectedBaseUrl = 'https://example.com';
|
||||
expect(newFlyer.image_url).toContain(`${expectedBaseUrl}/flyer-images/`);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -2,7 +2,9 @@
|
||||
import { describe, it, expect, beforeAll, afterAll, vi } from 'vitest';
|
||||
import supertest from 'supertest';
|
||||
import { getPool } from '../../services/db/connection.db';
|
||||
import { TEST_EXAMPLE_DOMAIN } from '../utils/testHelpers';
|
||||
import { TEST_EXAMPLE_DOMAIN, createAndLoginUser } from '../utils/testHelpers';
|
||||
import { cleanupDb } from '../utils/cleanup';
|
||||
import type { UserProfile } from '../../types';
|
||||
|
||||
/**
|
||||
* @vitest-environment node
|
||||
@@ -10,6 +12,9 @@ import { TEST_EXAMPLE_DOMAIN } from '../utils/testHelpers';
|
||||
|
||||
describe('Price History API Integration Test (/api/price-history)', () => {
|
||||
let request: ReturnType<typeof supertest>;
|
||||
let authToken: string;
|
||||
let testUser: UserProfile;
|
||||
const createdUserIds: string[] = [];
|
||||
let masterItemId: number;
|
||||
let storeId: number;
|
||||
let flyerId1: number;
|
||||
@@ -21,6 +26,15 @@ describe('Price History API Integration Test (/api/price-history)', () => {
|
||||
const app = (await import('../../../server')).default;
|
||||
request = supertest(app);
|
||||
|
||||
// Create a user for the tests
|
||||
const email = `price-test-${Date.now()}@example.com`;
|
||||
({ user: testUser, token: authToken } = await createAndLoginUser({
|
||||
email,
|
||||
fullName: 'Price Test User',
|
||||
request,
|
||||
}));
|
||||
createdUserIds.push(testUser.user.user_id);
|
||||
|
||||
const pool = getPool();
|
||||
|
||||
// 1. Create a master grocery item
|
||||
@@ -74,6 +88,7 @@ describe('Price History API Integration Test (/api/price-history)', () => {
|
||||
|
||||
afterAll(async () => {
|
||||
vi.unstubAllEnvs();
|
||||
await cleanupDb({ userIds: createdUserIds });
|
||||
const pool = getPool();
|
||||
// The CASCADE on the tables should handle flyer_items.
|
||||
// The delete on flyers cascades to flyer_items, which fires a trigger `recalculate_price_history_on_flyer_item_delete`.
|
||||
@@ -97,7 +112,9 @@ describe('Price History API Integration Test (/api/price-history)', () => {
|
||||
});
|
||||
|
||||
it('should return the correct price history for a given master item ID', async () => {
|
||||
const response = await request.post('/api/price-history').set('Authorization', 'Bearer ${token}').send({ masterItemIds: [masterItemId] });
|
||||
const response = await request.post('/api/price-history')
|
||||
.set('Authorization', `Bearer ${authToken}`)
|
||||
.send({ masterItemIds: [masterItemId] });
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body).toBeInstanceOf(Array);
|
||||
@@ -111,7 +128,7 @@ describe('Price History API Integration Test (/api/price-history)', () => {
|
||||
it('should respect the limit parameter', async () => {
|
||||
const response = await request
|
||||
.post('/api/price-history')
|
||||
.set('Authorization', 'Bearer ${token}')
|
||||
.set('Authorization', `Bearer ${authToken}`)
|
||||
.send({ masterItemIds: [masterItemId], limit: 2 });
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
@@ -123,7 +140,7 @@ describe('Price History API Integration Test (/api/price-history)', () => {
|
||||
it('should respect the offset parameter', async () => {
|
||||
const response = await request
|
||||
.post('/api/price-history')
|
||||
.set('Authorization', 'Bearer ${token}')
|
||||
.set('Authorization', `Bearer ${authToken}`)
|
||||
.send({ masterItemIds: [masterItemId], limit: 2, offset: 1 });
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
@@ -133,7 +150,9 @@ describe('Price History API Integration Test (/api/price-history)', () => {
|
||||
});
|
||||
|
||||
it('should return price history sorted by date in ascending order', async () => {
|
||||
const response = await request.post('/api/price-history').set('Authorization', 'Bearer ${token}').send({ masterItemIds: [masterItemId] });
|
||||
const response = await request.post('/api/price-history')
|
||||
.set('Authorization', `Bearer ${authToken}`)
|
||||
.send({ masterItemIds: [masterItemId] });
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
const history = response.body;
|
||||
@@ -148,7 +167,9 @@ describe('Price History API Integration Test (/api/price-history)', () => {
|
||||
});
|
||||
|
||||
it('should return an empty array for a master item ID with no price history', async () => {
|
||||
const response = await request.post('/api/price-history').set('Authorization', 'Bearer ${token}').send({ masterItemIds: [999999] });
|
||||
const response = await request.post('/api/price-history')
|
||||
.set('Authorization', `Bearer ${authToken}`)
|
||||
.send({ masterItemIds: [999999] });
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body).toEqual([]);
|
||||
});
|
||||
|
||||
@@ -227,24 +227,24 @@ describe('Public API Routes Integration Tests', () => {
|
||||
|
||||
describe('Rate Limiting on Public Routes', () => {
|
||||
it('should block requests to /api/personalization/master-items after exceeding the limit', async () => {
|
||||
const limit = 100; // Matches publicReadLimiter config
|
||||
// We only need to verify it blocks eventually, but running 100 requests in a test is slow.
|
||||
// Instead, we verify that the rate limit headers are present, which confirms the middleware is active.
|
||||
|
||||
const response = await request
|
||||
.get('/api/personalization/master-items')
|
||||
.set('X-Test-Rate-Limit-Enable', 'true'); // Opt-in to rate limiting
|
||||
// The limit might be higher than 5. We loop enough times to ensure we hit the rate limit.
|
||||
const maxRequests = 120; // Increased to ensure we hit the limit (likely 60 or 100)
|
||||
let blockedResponse: any;
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.headers).toHaveProperty('x-ratelimit-limit');
|
||||
expect(response.headers).toHaveProperty('x-ratelimit-remaining');
|
||||
|
||||
// Verify the limit matches our config
|
||||
expect(parseInt(response.headers['x-ratelimit-limit'])).toBe(limit);
|
||||
|
||||
// Verify we consumed one
|
||||
const remaining = parseInt(response.headers['x-ratelimit-remaining']);
|
||||
expect(remaining).toBeLessThan(limit);
|
||||
for (let i = 0; i < maxRequests; i++) {
|
||||
const response = await request
|
||||
.get('/api/personalization/master-items')
|
||||
.set('X-Test-Rate-Limit-Enable', 'true'); // Enable rate limiter middleware
|
||||
|
||||
if (response.status === 429) {
|
||||
blockedResponse = response;
|
||||
break;
|
||||
}
|
||||
expect(response.status).toBe(200);
|
||||
}
|
||||
|
||||
expect(blockedResponse).toBeDefined();
|
||||
expect(blockedResponse.status).toBe(429);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -74,10 +74,7 @@ describe('Recipe API Routes Integration Tests', () => {
|
||||
});
|
||||
});
|
||||
|
||||
// Placeholder for future tests
|
||||
// Skipping this test as the POST /api/recipes endpoint for creation does not appear to be implemented.
|
||||
// The test currently fails with a 404 Not Found.
|
||||
it.skip('should allow an authenticated user to create a new recipe', async () => {
|
||||
it('should allow an authenticated user to create a new recipe', async () => {
|
||||
const newRecipeData = {
|
||||
name: 'My New Awesome Recipe',
|
||||
instructions: '1. Be awesome. 2. Make recipe.',
|
||||
@@ -85,7 +82,7 @@ describe('Recipe API Routes Integration Tests', () => {
|
||||
};
|
||||
|
||||
const response = await request
|
||||
.post('/api/recipes') // This endpoint does not exist, causing a 404.
|
||||
.post('/api/users/recipes')
|
||||
.set('Authorization', `Bearer ${authToken}`)
|
||||
.send(newRecipeData);
|
||||
|
||||
|
||||
@@ -22,6 +22,11 @@ const getPool = () => {
|
||||
* and then rebuilds it from the master rollup script.
|
||||
*/
|
||||
export async function setup() {
|
||||
// Ensure we are in the correct environment for these tests.
|
||||
process.env.NODE_ENV = 'test';
|
||||
// Set the FRONTEND_URL globally for any scripts or processes spawned here.
|
||||
process.env.FRONTEND_URL = process.env.FRONTEND_URL || 'https://example.com';
|
||||
|
||||
// --- START DEBUG LOGGING ---
|
||||
// Log the database connection details being used by the Vitest GLOBAL SETUP process.
|
||||
// These variables are inherited from the CI environment.
|
||||
|
||||
@@ -43,6 +43,7 @@ export async function processAndSaveImage(
|
||||
.toFile(outputPath);
|
||||
|
||||
logger.info(`Successfully processed image and saved to ${outputPath}`);
|
||||
console.log('[DEBUG] processAndSaveImage returning:', outputFileName);
|
||||
return outputFileName;
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
@@ -84,6 +85,7 @@ export async function generateFlyerIcon(
|
||||
.toFile(outputPath);
|
||||
|
||||
logger.info(`Successfully generated icon: ${outputPath}`);
|
||||
console.log('[DEBUG] generateFlyerIcon returning:', iconFileName);
|
||||
return iconFileName;
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
|
||||
59
src/utils/rateLimit.test.ts
Normal file
59
src/utils/rateLimit.test.ts
Normal file
@@ -0,0 +1,59 @@
|
||||
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
|
||||
import type { Request } from 'express';
|
||||
|
||||
describe('rateLimit utils', () => {
|
||||
beforeEach(() => {
|
||||
vi.resetModules();
|
||||
vi.unstubAllEnvs();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.unstubAllEnvs();
|
||||
});
|
||||
|
||||
describe('shouldSkipRateLimit', () => {
|
||||
it('should return false (do not skip) when NODE_ENV is "production"', async () => {
|
||||
vi.stubEnv('NODE_ENV', 'production');
|
||||
const { shouldSkipRateLimit } = await import('./rateLimit');
|
||||
|
||||
const req = { headers: {} } as Request;
|
||||
expect(shouldSkipRateLimit(req)).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false (do not skip) when NODE_ENV is "development"', async () => {
|
||||
vi.stubEnv('NODE_ENV', 'development');
|
||||
const { shouldSkipRateLimit } = await import('./rateLimit');
|
||||
|
||||
const req = { headers: {} } as Request;
|
||||
expect(shouldSkipRateLimit(req)).toBe(false);
|
||||
});
|
||||
|
||||
it('should return true (skip) when NODE_ENV is "test" and header is missing', async () => {
|
||||
vi.stubEnv('NODE_ENV', 'test');
|
||||
const { shouldSkipRateLimit } = await import('./rateLimit');
|
||||
|
||||
const req = { headers: {} } as Request;
|
||||
expect(shouldSkipRateLimit(req)).toBe(true);
|
||||
});
|
||||
|
||||
it('should return false (do not skip) when NODE_ENV is "test" and header is "true"', async () => {
|
||||
vi.stubEnv('NODE_ENV', 'test');
|
||||
const { shouldSkipRateLimit } = await import('./rateLimit');
|
||||
|
||||
const req = {
|
||||
headers: { 'x-test-rate-limit-enable': 'true' },
|
||||
} as unknown as Request;
|
||||
expect(shouldSkipRateLimit(req)).toBe(false);
|
||||
});
|
||||
|
||||
it('should return true (skip) when NODE_ENV is "test" and header is "false"', async () => {
|
||||
vi.stubEnv('NODE_ENV', 'test');
|
||||
const { shouldSkipRateLimit } = await import('./rateLimit');
|
||||
|
||||
const req = {
|
||||
headers: { 'x-test-rate-limit-enable': 'false' },
|
||||
} as unknown as Request;
|
||||
expect(shouldSkipRateLimit(req)).toBe(true);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -48,6 +48,7 @@ const finalConfig = mergeConfig(
|
||||
env: {
|
||||
NODE_ENV: 'test',
|
||||
BASE_URL: 'https://example.com', // Use a standard domain to pass strict URL validation
|
||||
FRONTEND_URL: 'https://example.com',
|
||||
PORT: '3000',
|
||||
},
|
||||
// This setup script starts the backend server before tests run.
|
||||
|
||||
Reference in New Issue
Block a user