All checks were successful
Deploy to Test Environment / deploy-to-test (push) Successful in 17m56s
879 lines
39 KiB
TypeScript
879 lines
39 KiB
TypeScript
// src/tests/integration/flyer-processing.integration.test.ts
|
|
import { describe, it, expect, beforeAll, afterAll, vi, beforeEach } from 'vitest';
|
|
import supertest from 'supertest';
|
|
import fs from 'node:fs/promises';
|
|
import path from 'path';
|
|
import * as db from '../../services/db/index.db';
|
|
import { generateFileChecksum } from '../../utils/checksum';
|
|
import { logger } from '../../services/logger.server';
|
|
import type { UserProfile } from '../../types';
|
|
import { createAndLoginUser, getFlyerBaseUrl } from '../utils/testHelpers';
|
|
import { cleanupDb } from '../utils/cleanup';
|
|
import { poll } from '../utils/poll';
|
|
import { cleanupFiles } from '../utils/cleanupFiles';
|
|
import piexif from 'piexifjs';
|
|
import exifParser from 'exif-parser';
|
|
import sharp from 'sharp';
|
|
|
|
const FLYER_BASE_URL = getFlyerBaseUrl();
|
|
|
|
// NOTE: STORAGE_PATH is set via the CI environment (deploy-to-test.yml).
|
|
// This ensures multer and flyerProcessingService use the test runner's directory
|
|
// instead of the production path (/var/www/.../flyer-images).
|
|
// The testStoragePath variable is used for constructing paths in test assertions.
|
|
const testStoragePath =
|
|
process.env.STORAGE_PATH || path.resolve(__dirname, '../../../flyer-images');
|
|
|
|
// Mock the image processor to ensure safe filenames for DB constraints
|
|
vi.mock('../../utils/imageProcessor', async () => {
|
|
const actual = await vi.importActual<typeof import('../../utils/imageProcessor')>(
|
|
'../../utils/imageProcessor',
|
|
);
|
|
// eslint-disable-next-line @typescript-eslint/no-require-imports
|
|
const pathModule = require('path');
|
|
return {
|
|
...actual,
|
|
// Return a realistic icon filename based on the source file
|
|
generateFlyerIcon: vi.fn().mockImplementation(async (sourcePath: string) => {
|
|
const baseName = pathModule.parse(pathModule.basename(sourcePath)).name;
|
|
return `icon-${baseName}.webp`;
|
|
}),
|
|
};
|
|
});
|
|
|
|
// FIX: Mock storageService to return valid URLs (for DB) and write files to disk (for test verification)
|
|
// NOTE: We use process.env.STORAGE_PATH which is set by the global setup to the temp directory.
|
|
vi.mock('../../services/storage/storageService', () => {
|
|
// eslint-disable-next-line @typescript-eslint/no-require-imports
|
|
const fsModule = require('node:fs/promises');
|
|
// eslint-disable-next-line @typescript-eslint/no-require-imports
|
|
const pathModule = require('path');
|
|
|
|
return {
|
|
storageService: {
|
|
upload: vi
|
|
.fn()
|
|
.mockImplementation(
|
|
async (
|
|
fileData: Buffer | string | { name?: string; path?: string },
|
|
fileName?: string,
|
|
) => {
|
|
const name =
|
|
fileName ||
|
|
(fileData && typeof fileData === 'object' && 'name' in fileData && fileData.name) ||
|
|
(typeof fileData === 'string'
|
|
? pathModule.basename(fileData)
|
|
: `upload-${Date.now()}.jpg`);
|
|
|
|
// Use the STORAGE_PATH from the environment (set by global setup to temp directory)
|
|
const uploadDir =
|
|
process.env.STORAGE_PATH || pathModule.join(process.cwd(), 'flyer-images');
|
|
await fsModule.mkdir(uploadDir, { recursive: true });
|
|
const destPath = pathModule.join(uploadDir, name);
|
|
|
|
let content: Buffer = Buffer.from('');
|
|
if (Buffer.isBuffer(fileData)) {
|
|
content = Buffer.from(fileData);
|
|
} else if (typeof fileData === 'string') {
|
|
try {
|
|
content = await fsModule.readFile(fileData);
|
|
} catch {
|
|
/* ignore */
|
|
}
|
|
} else if (
|
|
fileData &&
|
|
typeof fileData === 'object' &&
|
|
'path' in fileData &&
|
|
fileData.path
|
|
) {
|
|
try {
|
|
content = await fsModule.readFile(fileData.path);
|
|
} catch {
|
|
/* ignore */
|
|
}
|
|
}
|
|
await fsModule.writeFile(destPath, content);
|
|
|
|
// Return a valid URL to satisfy the 'url_check' DB constraint
|
|
return `${FLYER_BASE_URL}/flyer-images/${name}`;
|
|
},
|
|
),
|
|
delete: vi.fn().mockResolvedValue(undefined),
|
|
},
|
|
};
|
|
});
|
|
|
|
/**
|
|
* @vitest-environment node
|
|
*/
|
|
|
|
// NOTE ON MOCKING STRATEGY:
|
|
// Vitest creates separate module instances for test files vs global setup, which breaks
|
|
// dependency injection approaches. For failure tests, we use vi.spyOn(aiService, ...)
|
|
// which modifies the actual singleton object and works across module boundaries.
|
|
// For happy path tests, the beforeEach hook sets up default mocks via DI which still works
|
|
// because the workers are already loaded with the same module instance.
|
|
import type { AiProcessorResult } from '../../services/flyerAiProcessor.server';
|
|
|
|
describe('Flyer Processing Background Job Integration Test', () => {
|
|
let request: ReturnType<typeof supertest>;
|
|
const createdUserIds: string[] = [];
|
|
const createdFlyerIds: number[] = [];
|
|
const createdFilePaths: string[] = [];
|
|
const createdStoreIds: number[] = [];
|
|
// IMPORTANT: We get flyerProcessingService from monitoringService rather than importing
|
|
// workers.server.ts directly. This ensures we get the SAME instance that the workers use,
|
|
// since monitoringService is already imported by the server (via ai.routes.ts).
|
|
// Importing workers.server.ts directly creates a NEW module instance with different objects.
|
|
let flyerProcessingService: typeof import('../../services/workers.server').flyerProcessingService;
|
|
|
|
const originalFrontendUrl = process.env.FRONTEND_URL;
|
|
|
|
beforeAll(async () => {
|
|
// FIX: Stub FRONTEND_URL to ensure valid absolute URLs (http://...) are generated
|
|
// for the database, satisfying the 'url_check' constraint.
|
|
// IMPORTANT: This must run BEFORE the app is imported so workers inherit the env var.
|
|
vi.stubEnv('FRONTEND_URL', 'https://example.com');
|
|
|
|
// STORAGE_PATH is primarily set via CI environment (deploy-to-test.yml).
|
|
// This stubEnv call serves as a fallback for local development runs.
|
|
// It ensures multer and flyerProcessingService use the test directory, not production path.
|
|
vi.stubEnv('STORAGE_PATH', testStoragePath);
|
|
console.error('[TEST SETUP] STORAGE_PATH:', testStoragePath);
|
|
process.env.FRONTEND_URL = 'https://example.com';
|
|
console.error('[TEST SETUP] FRONTEND_URL stubbed to:', process.env.FRONTEND_URL);
|
|
|
|
// NOTE: The aiService mock is now set up via vi.mock() at the module level (above).
|
|
// This ensures workers get the mocked version when they import aiService.
|
|
|
|
const appModule = await import('../../../server');
|
|
const app = appModule.default;
|
|
request = supertest(app);
|
|
|
|
// CRITICAL: Import flyerProcessingService from monitoringService, NOT from workers.server.
|
|
// The server has already imported monitoringService (via ai.routes.ts), which imports workers.server.
|
|
// By importing from monitoringService, we get the SAME flyerProcessingService instance
|
|
// that the workers are using. This allows our mock injections to work correctly.
|
|
const monitoringModule = await import('../../services/monitoringService.server');
|
|
flyerProcessingService = monitoringModule.flyerProcessingService;
|
|
console.error(
|
|
'[TEST SETUP] Got flyerProcessingService from monitoringService (shared instance)',
|
|
);
|
|
});
|
|
|
|
// Helper function to create default mock AI response
|
|
const createDefaultMockAiResult = (): AiProcessorResult => ({
|
|
data: {
|
|
store_name: 'Mock Store',
|
|
valid_from: '2025-01-01',
|
|
valid_to: '2025-01-07',
|
|
store_address: '123 Mock St',
|
|
items: [
|
|
{
|
|
item: 'Mocked Integration Item',
|
|
price_display: '$1.99',
|
|
price_in_cents: 199,
|
|
quantity: 'each',
|
|
category_name: 'Mock Category',
|
|
},
|
|
],
|
|
},
|
|
needsReview: false,
|
|
});
|
|
|
|
// FIX: Reset mocks before each test to ensure isolation.
|
|
// This prevents "happy path" mocks from leaking into error handling tests and vice versa.
|
|
beforeEach(async () => {
|
|
console.error('[TEST SETUP] Resetting mocks before test execution');
|
|
|
|
if (flyerProcessingService) {
|
|
// 1. Reset AI Processor to default success state via dependency injection
|
|
// This replaces the vi.mock approach which didn't work across module boundaries
|
|
flyerProcessingService
|
|
._getAiProcessor()
|
|
._setExtractAndValidateData(async () => createDefaultMockAiResult());
|
|
console.error('[TEST SETUP] AI processor mock set to default success state via DI');
|
|
|
|
// 2. Restore withTransaction to real implementation via dependency injection
|
|
// This ensures that unless a test specifically injects a mock, the DB logic works as expected.
|
|
const { withTransaction } = await import('../../services/db/connection.db');
|
|
flyerProcessingService._getPersistenceService()._setWithTransaction(withTransaction);
|
|
console.error('[TEST SETUP] withTransaction restored to real implementation via DI');
|
|
|
|
// 3. Restore cleanup queue to real implementation
|
|
// Some tests replace it with a no-op to prevent file cleanup during verification
|
|
const { cleanupQueue } = await import('../../services/queues.server');
|
|
flyerProcessingService._setCleanupQueue(cleanupQueue);
|
|
console.error('[TEST SETUP] cleanupQueue restored to real implementation via DI');
|
|
}
|
|
});
|
|
|
|
afterAll(async () => {
|
|
// Restore original value
|
|
process.env.FRONTEND_URL = originalFrontendUrl;
|
|
|
|
vi.unstubAllEnvs(); // Clean up env stubs
|
|
vi.restoreAllMocks(); // Restore the AI spy
|
|
|
|
// CRITICAL: Close workers FIRST before any cleanup to ensure no pending jobs
|
|
// are trying to access files or databases during cleanup.
|
|
// This prevents the Node.js async hooks crash that occurs when fs operations
|
|
// are rejected during process shutdown.
|
|
// NOTE: We import workers.server here for the closeWorkers function.
|
|
// This is safe because the server has already loaded this module.
|
|
try {
|
|
console.error('[TEST TEARDOWN] Closing in-process workers...');
|
|
const { closeWorkers } = await import('../../services/workers.server');
|
|
await closeWorkers();
|
|
// Give workers a moment to fully release resources
|
|
await new Promise((resolve) => setTimeout(resolve, 100));
|
|
} catch (error) {
|
|
console.error('[TEST TEARDOWN] Error closing workers:', error);
|
|
}
|
|
|
|
// Close the shared redis connection used by the workers/queues
|
|
const { connection } = await import('../../services/redis.server');
|
|
await connection.quit();
|
|
|
|
// Use the centralized cleanup utility.
|
|
await cleanupDb({
|
|
userIds: createdUserIds,
|
|
flyerIds: createdFlyerIds,
|
|
storeIds: createdStoreIds,
|
|
});
|
|
|
|
// Use the centralized file cleanup utility.
|
|
await cleanupFiles(createdFilePaths);
|
|
|
|
// Final delay to let any remaining async operations settle
|
|
// This helps prevent the Node.js async context assertion failure
|
|
await new Promise((resolve) => setTimeout(resolve, 50));
|
|
});
|
|
|
|
/**
|
|
* This is the new end-to-end test for the background job processing flow.
|
|
* It uploads a file, polls for completion, and verifies the result in the database.
|
|
*/
|
|
const runBackgroundProcessingTest = async (user?: UserProfile, token?: string) => {
|
|
console.error(
|
|
`[TEST START] runBackgroundProcessingTest. User: ${user?.user.email ?? 'ANONYMOUS'}`,
|
|
);
|
|
// Arrange: Load a mock flyer PDF.
|
|
console.error('[TEST] about to read test-flyer-image.jpg');
|
|
|
|
const imagePath = path.resolve(__dirname, '../assets/test-flyer-image.jpg');
|
|
const imageBuffer = await fs.readFile(imagePath);
|
|
// Create a unique buffer and filename for each test run to ensure a unique checksum.
|
|
// This prevents a 409 Conflict error when the second test runs.
|
|
const uniqueContent = Buffer.concat([imageBuffer, Buffer.from(Date.now().toString())]);
|
|
const uniqueFileName = `test-flyer-image-${Date.now()}.jpg`;
|
|
const mockImageFile = new File([new Uint8Array(uniqueContent)], uniqueFileName, {
|
|
type: 'image/jpeg',
|
|
});
|
|
const checksum = await generateFileChecksum(mockImageFile);
|
|
console.error('[TEST] mockImageFile created with uniqueFileName: ', uniqueFileName);
|
|
console.error('[TEST DATA] Generated checksum for test:', checksum);
|
|
|
|
// Track created files for cleanup
|
|
const uploadDir = testStoragePath;
|
|
createdFilePaths.push(path.join(uploadDir, uniqueFileName));
|
|
console.error('[TEST] createdFilesPaths after 1st push: ', createdFilePaths);
|
|
// The icon name is derived from the original filename.
|
|
const iconFileName = `icon-${path.parse(uniqueFileName).name}.webp`;
|
|
createdFilePaths.push(path.join(uploadDir, 'icons', iconFileName));
|
|
|
|
// Act 1: Upload the file to start the background job.
|
|
const testBaseUrl = 'https://example.com';
|
|
console.error('[TEST ACTION] Uploading file with baseUrl:', testBaseUrl);
|
|
|
|
const uploadReq = request
|
|
.post('/api/v1/ai/upload-and-process')
|
|
.field('checksum', checksum)
|
|
// Pass the baseUrl directly in the form data to ensure the worker receives it,
|
|
// bypassing issues with vi.stubEnv in multi-threaded test environments.
|
|
.field('baseUrl', testBaseUrl)
|
|
.attach('flyerFile', uniqueContent, uniqueFileName);
|
|
if (token) {
|
|
uploadReq.set('Authorization', `Bearer ${token}`);
|
|
}
|
|
const uploadResponse = await uploadReq;
|
|
console.error('[TEST RESPONSE] Upload status:', uploadResponse.status);
|
|
console.error('[TEST RESPONSE] Upload body:', JSON.stringify(uploadResponse.body));
|
|
const { jobId } = uploadResponse.body.data;
|
|
|
|
// Assert 1: Check that a job ID was returned.
|
|
expect(jobId).toBeTypeOf('string');
|
|
|
|
// Act 2: Poll for job completion using the new utility.
|
|
const jobStatus = await poll(
|
|
async () => {
|
|
const statusReq = request.get(`/api/v1/ai/jobs/${jobId}/status`);
|
|
if (token) {
|
|
statusReq.set('Authorization', `Bearer ${token}`);
|
|
}
|
|
const statusResponse = await statusReq;
|
|
console.error(`[TEST POLL] Job ${jobId} current state:`, statusResponse.body?.data?.state);
|
|
return statusResponse.body.data;
|
|
},
|
|
(status) => status.state === 'completed' || status.state === 'failed',
|
|
{ timeout: 210000, interval: 3000, description: 'flyer processing' },
|
|
);
|
|
|
|
// Assert 2: Check that the job completed successfully.
|
|
if (jobStatus?.state === 'failed') {
|
|
console.error('[DEBUG] Job failed with reason:', jobStatus.failedReason);
|
|
console.error('[DEBUG] Job stack trace:', jobStatus.stacktrace);
|
|
console.error('[DEBUG] Job return value:', JSON.stringify(jobStatus.returnValue, null, 2));
|
|
console.error('[DEBUG] Full Job Status:', JSON.stringify(jobStatus, null, 2));
|
|
}
|
|
expect(jobStatus?.state).toBe('completed');
|
|
const flyerId = jobStatus?.returnValue?.flyerId;
|
|
expect(flyerId).toBeTypeOf('number');
|
|
createdFlyerIds.push(flyerId); // Track for cleanup
|
|
|
|
// Assert 3: Verify the flyer and its items were actually saved in the database.
|
|
const savedFlyer = await db.flyerRepo.findFlyerByChecksum(checksum, logger);
|
|
expect(savedFlyer).toBeDefined();
|
|
expect(savedFlyer?.flyer_id).toBe(flyerId);
|
|
if (savedFlyer?.store_id) {
|
|
createdStoreIds.push(savedFlyer.store_id);
|
|
}
|
|
expect(savedFlyer?.file_name).toBe(uniqueFileName);
|
|
// Also add the final processed image path to the cleanup list.
|
|
// This is important because JPEGs are re-processed to strip EXIF data, creating a new file.
|
|
const savedImagePath = path.join(uploadDir, path.basename(savedFlyer!.image_url));
|
|
createdFilePaths.push(savedImagePath);
|
|
|
|
const items = await db.flyerRepo.getFlyerItems(flyerId, logger);
|
|
// The stubbed AI response returns items, so we expect them to be here.
|
|
expect(items.length).toBeGreaterThan(0);
|
|
expect(items[0].item).toBeTypeOf('string');
|
|
|
|
// Assert 4: Verify user association is correct.
|
|
if (token) {
|
|
expect(savedFlyer?.uploaded_by).toBe(user?.user.user_id);
|
|
} else {
|
|
expect(savedFlyer?.uploaded_by).toBe(null);
|
|
}
|
|
};
|
|
|
|
it('should successfully process a flyer for an AUTHENTICATED user via the background queue', async () => {
|
|
// Arrange: Create a new user specifically for this test.
|
|
const email = `auth-flyer-user-${Date.now()}@example.com`;
|
|
const { user: authUser, token } = await createAndLoginUser({
|
|
email,
|
|
fullName: 'Flyer Uploader',
|
|
request,
|
|
});
|
|
createdUserIds.push(authUser.user.user_id); // Track for cleanup
|
|
|
|
// Act & Assert
|
|
await runBackgroundProcessingTest(authUser, token);
|
|
}, 240000); // Increase timeout to 240 seconds for this long-running test
|
|
|
|
it('should successfully process a flyer for an ANONYMOUS user via the background queue', async () => {
|
|
// Act & Assert: Call the test helper without a user or token.
|
|
await runBackgroundProcessingTest();
|
|
}, 240000); // Increase timeout to 240 seconds for this long-running test
|
|
|
|
it('should strip EXIF data from uploaded JPEG images during processing', async () => {
|
|
// Arrange: Spy on the cleanup queue to prevent file deletion before we can verify.
|
|
// We use vi.spyOn instead of DI because the worker uses a different module instance
|
|
// due to Vitest's VM isolation. Spying on the queue's add method works across boundaries.
|
|
const { cleanupQueue } = await import('../../services/queues.server');
|
|
|
|
// Drain the cleanup queue and pause it to prevent any jobs from being processed during this test.
|
|
// The cleanup worker runs in a separate module instance, so we need to pause at the queue level.
|
|
await cleanupQueue.drain();
|
|
await cleanupQueue.pause();
|
|
console.error('[EXIF TEST DEBUG] Cleanup queue drained and paused');
|
|
|
|
const cleanupQueueSpy = vi
|
|
.spyOn(cleanupQueue, 'add')
|
|
.mockResolvedValue({ id: 'noop-spy' } as never);
|
|
|
|
// Arrange: Create a user for this test
|
|
const { user: authUser, token } = await createAndLoginUser({
|
|
email: `exif-user-${Date.now()}@example.com`,
|
|
fullName: 'EXIF Tester',
|
|
request,
|
|
});
|
|
createdUserIds.push(authUser.user.user_id);
|
|
|
|
// 1. Create an image buffer with EXIF data
|
|
const imagePath = path.resolve(__dirname, '../assets/test-flyer-image.jpg');
|
|
const imageBuffer = await fs.readFile(imagePath);
|
|
const jpegDataAsString = imageBuffer.toString('binary');
|
|
|
|
const exifObj = {
|
|
'0th': { [piexif.ImageIFD.Software]: 'Gemini Code Assist Test' },
|
|
Exif: { [piexif.ExifIFD.DateTimeOriginal]: '2025:12:25 10:00:00' },
|
|
};
|
|
const exifBytes = piexif.dump(exifObj);
|
|
const jpegWithExif = piexif.insert(exifBytes, jpegDataAsString);
|
|
const imageWithExifBuffer = Buffer.from(jpegWithExif, 'binary');
|
|
|
|
const uniqueFileName = `test-flyer-with-exif-${Date.now()}.jpg`;
|
|
const mockImageFile = new File([new Uint8Array(imageWithExifBuffer)], uniqueFileName, {
|
|
type: 'image/jpeg',
|
|
});
|
|
const checksum = await generateFileChecksum(mockImageFile);
|
|
|
|
// Track original file for cleanup - the actual processed filename will be determined
|
|
// after the job completes by looking at the saved flyer record
|
|
const uploadDir = testStoragePath;
|
|
createdFilePaths.push(path.join(uploadDir, uniqueFileName));
|
|
|
|
// 2. Act: Upload the file and wait for processing
|
|
const uploadResponse = await request
|
|
.post('/api/v1/ai/upload-and-process')
|
|
.set('Authorization', `Bearer ${token}`)
|
|
.field('baseUrl', 'https://example.com')
|
|
.field('checksum', checksum)
|
|
.attach('flyerFile', imageWithExifBuffer, uniqueFileName);
|
|
|
|
const { jobId } = uploadResponse.body.data;
|
|
expect(jobId).toBeTypeOf('string');
|
|
|
|
// Poll for job completion using the new utility.
|
|
const jobStatus = await poll(
|
|
async () => {
|
|
const statusResponse = await request
|
|
.get(`/api/v1/ai/jobs/${jobId}/status`)
|
|
.set('Authorization', `Bearer ${token}`);
|
|
return statusResponse.body.data;
|
|
},
|
|
(status) => status.state === 'completed' || status.state === 'failed',
|
|
{ timeout: 180000, interval: 3000, description: 'EXIF stripping job' },
|
|
);
|
|
|
|
// 3. Assert
|
|
if (jobStatus?.state === 'failed') {
|
|
console.error('[DEBUG] EXIF test job failed:', jobStatus.failedReason);
|
|
console.error('[DEBUG] Job stack trace:', jobStatus.stacktrace);
|
|
console.error('[DEBUG] Job return value:', JSON.stringify(jobStatus.returnValue, null, 2));
|
|
}
|
|
expect(jobStatus?.state).toBe('completed');
|
|
const flyerId = jobStatus?.returnValue?.flyerId;
|
|
expect(flyerId).toBeTypeOf('number');
|
|
createdFlyerIds.push(flyerId);
|
|
|
|
// 4. Verify EXIF data is stripped from the saved file
|
|
const savedFlyer = await db.flyerRepo.findFlyerByChecksum(checksum, logger);
|
|
expect(savedFlyer).toBeDefined();
|
|
if (savedFlyer?.store_id) {
|
|
createdStoreIds.push(savedFlyer.store_id);
|
|
}
|
|
|
|
// Extract the actual processed filename from the saved flyer's image_url
|
|
// The URL format is: ${FLYER_BASE_URL}/flyer-images/filename.ext
|
|
const imageUrlPath = new URL(savedFlyer!.image_url).pathname;
|
|
const processedFileName = path.basename(imageUrlPath);
|
|
const savedImagePath = path.join(uploadDir, processedFileName);
|
|
console.error('[TEST] savedImagePath during EXIF data stripping: ', savedImagePath);
|
|
|
|
// Track the processed file for cleanup
|
|
createdFilePaths.push(savedImagePath);
|
|
// Also track the icon if it exists
|
|
const iconFileName = `icon-${path.parse(processedFileName).name}.webp`;
|
|
createdFilePaths.push(path.join(uploadDir, 'icons', iconFileName));
|
|
|
|
const savedImageBuffer = await fs.readFile(savedImagePath);
|
|
const parser = exifParser.create(savedImageBuffer);
|
|
const exifResult = parser.parse();
|
|
|
|
console.error('[TEST] exifResult.tags: ', exifResult.tags);
|
|
|
|
// The `tags` object will be empty if no EXIF data is found.
|
|
expect(exifResult.tags).toEqual({});
|
|
expect(exifResult.tags.Software).toBeUndefined();
|
|
|
|
// Cleanup: Restore the spy and resume the queue
|
|
cleanupQueueSpy.mockRestore();
|
|
await cleanupQueue.resume();
|
|
console.error('[EXIF TEST DEBUG] Cleanup queue resumed');
|
|
}, 240000);
|
|
|
|
it('should strip metadata from uploaded PNG images during processing', async () => {
|
|
// Arrange: Spy on the cleanup queue to prevent file deletion before we can verify.
|
|
// We use vi.spyOn instead of DI because the worker uses a different module instance
|
|
// due to Vitest's VM isolation. Spying on the queue's add method works across boundaries.
|
|
const { cleanupQueue } = await import('../../services/queues.server');
|
|
|
|
// Drain the cleanup queue and pause it to prevent any jobs from being processed during this test.
|
|
// We need to drain first because there might be jobs already in the queue from setup or previous tests.
|
|
await cleanupQueue.drain();
|
|
await cleanupQueue.pause();
|
|
console.error('[PNG TEST DEBUG] Cleanup queue drained and paused');
|
|
|
|
const cleanupQueueSpy = vi.spyOn(cleanupQueue, 'add').mockImplementation(async (...args) => {
|
|
console.error(
|
|
'[PNG TEST DEBUG] cleanupQueue.add was called via spy! Args:',
|
|
JSON.stringify(args),
|
|
);
|
|
return { id: 'noop-spy' } as never;
|
|
});
|
|
console.error('[PNG TEST DEBUG] Cleanup queue.add spied to return no-op');
|
|
console.error('[PNG TEST DEBUG] testStoragePath:', testStoragePath);
|
|
console.error('[PNG TEST DEBUG] process.env.STORAGE_PATH:', process.env.STORAGE_PATH);
|
|
|
|
// Arrange: Create a user for this test
|
|
const { user: authUser, token } = await createAndLoginUser({
|
|
email: `png-meta-user-${Date.now()}@example.com`,
|
|
fullName: 'PNG Metadata Tester',
|
|
request,
|
|
});
|
|
createdUserIds.push(authUser.user.user_id);
|
|
|
|
// 1. Create a PNG image buffer with custom metadata using sharp
|
|
const imagePath = path.resolve(__dirname, '../assets/test-flyer-image.jpg');
|
|
|
|
const imageWithMetadataBuffer = await sharp(imagePath)
|
|
.png() // Convert to PNG
|
|
.withMetadata({
|
|
exif: {
|
|
IFD0: {
|
|
Copyright: 'Gemini Code Assist PNG Test',
|
|
},
|
|
},
|
|
})
|
|
.toBuffer();
|
|
|
|
const uniqueFileName = `test-flyer-with-metadata-${Date.now()}.png`;
|
|
const mockImageFile = new File([new Uint8Array(imageWithMetadataBuffer)], uniqueFileName, {
|
|
type: 'image/png',
|
|
});
|
|
const checksum = await generateFileChecksum(mockImageFile);
|
|
|
|
// Track original file for cleanup - the actual processed filename will be determined
|
|
// after the job completes by looking at the saved flyer record
|
|
const uploadDir = testStoragePath;
|
|
createdFilePaths.push(path.join(uploadDir, uniqueFileName));
|
|
|
|
// 2. Act: Upload the file and wait for processing
|
|
const uploadResponse = await request
|
|
.post('/api/v1/ai/upload-and-process')
|
|
.set('Authorization', `Bearer ${token}`)
|
|
.field('baseUrl', 'https://example.com')
|
|
.field('checksum', checksum)
|
|
.attach('flyerFile', imageWithMetadataBuffer, uniqueFileName);
|
|
|
|
const { jobId } = uploadResponse.body.data;
|
|
expect(jobId).toBeTypeOf('string');
|
|
|
|
// Debug: Check files right after upload
|
|
const filesAfterUpload = await fs.readdir(uploadDir);
|
|
console.error('[PNG TEST DEBUG] Files right after upload:', filesAfterUpload);
|
|
|
|
// Poll for job completion using the new utility.
|
|
const jobStatus = await poll(
|
|
async () => {
|
|
const statusResponse = await request
|
|
.get(`/api/v1/ai/jobs/${jobId}/status`)
|
|
.set('Authorization', `Bearer ${token}`);
|
|
return statusResponse.body.data;
|
|
},
|
|
(status) => status.state === 'completed' || status.state === 'failed',
|
|
{ timeout: 180000, interval: 3000, description: 'PNG metadata stripping job' },
|
|
);
|
|
|
|
// 3. Assert job completion
|
|
if (jobStatus?.state === 'failed') {
|
|
console.error('[DEBUG] PNG metadata test job failed:', jobStatus.failedReason);
|
|
console.error('[DEBUG] Job stack trace:', jobStatus.stacktrace);
|
|
console.error('[DEBUG] Job return value:', JSON.stringify(jobStatus.returnValue, null, 2));
|
|
}
|
|
expect(jobStatus?.state).toBe('completed');
|
|
const flyerId = jobStatus?.returnValue?.flyerId;
|
|
expect(flyerId).toBeTypeOf('number');
|
|
createdFlyerIds.push(flyerId);
|
|
|
|
// 4. Verify metadata is stripped from the saved file
|
|
const savedFlyer = await db.flyerRepo.findFlyerByChecksum(checksum, logger);
|
|
expect(savedFlyer).toBeDefined();
|
|
if (savedFlyer?.store_id) {
|
|
createdStoreIds.push(savedFlyer.store_id);
|
|
}
|
|
|
|
// Extract the actual processed filename from the saved flyer's image_url
|
|
// The URL format is: ${FLYER_BASE_URL}/flyer-images/filename.ext
|
|
const imageUrlPath = new URL(savedFlyer!.image_url).pathname;
|
|
const processedFileName = path.basename(imageUrlPath);
|
|
const savedImagePath = path.join(uploadDir, processedFileName);
|
|
console.error('[TEST] savedImagePath during PNG metadata stripping: ', savedImagePath);
|
|
|
|
// Track the processed file for cleanup
|
|
createdFilePaths.push(savedImagePath);
|
|
// Also track the icon if it exists
|
|
const iconFileName = `icon-${path.parse(processedFileName).name}.webp`;
|
|
createdFilePaths.push(path.join(uploadDir, 'icons', iconFileName));
|
|
|
|
// Debug: List files in the upload directory to verify what exists
|
|
const filesInUploadDir = await fs.readdir(uploadDir);
|
|
console.error('[PNG TEST DEBUG] Files in upload directory:', filesInUploadDir);
|
|
console.error('[PNG TEST DEBUG] Looking for file:', processedFileName);
|
|
console.error('[PNG TEST DEBUG] Full path:', savedImagePath);
|
|
|
|
// Check if the file exists before trying to read metadata
|
|
try {
|
|
await fs.access(savedImagePath);
|
|
console.error('[PNG TEST DEBUG] File exists at path');
|
|
// Verify the file is actually readable
|
|
const fileStats = await fs.stat(savedImagePath);
|
|
console.error('[PNG TEST DEBUG] File stats:', {
|
|
size: fileStats.size,
|
|
isFile: fileStats.isFile(),
|
|
});
|
|
} catch (err) {
|
|
console.error('[PNG TEST DEBUG] File does NOT exist at path!', err);
|
|
// List all files that might be the processed file
|
|
const matchingFiles = filesInUploadDir.filter((f) => f.includes('-processed.'));
|
|
console.error('[PNG TEST DEBUG] Files containing "-processed.":', matchingFiles);
|
|
}
|
|
|
|
// Small delay to ensure file is fully written
|
|
await new Promise((resolve) => setTimeout(resolve, 100));
|
|
|
|
const savedImageMetadata = await sharp(savedImagePath).metadata();
|
|
|
|
// The `exif` property should be undefined after stripping.
|
|
expect(savedImageMetadata.exif).toBeUndefined();
|
|
|
|
// Cleanup: Restore the spy and resume the queue
|
|
cleanupQueueSpy.mockRestore();
|
|
await cleanupQueue.resume();
|
|
console.error('[PNG TEST DEBUG] Cleanup queue resumed');
|
|
}, 240000);
|
|
|
|
// TODO: This test cannot inject mocks into the worker's service instance because Vitest's
|
|
// globalSetup runs in a separate Node.js context from test files. The flyerProcessingService
|
|
// singleton is created in the globalSetup context, while tests run in a different context.
|
|
// To fix this, we'd need either:
|
|
// 1. A test-only API endpoint to inject mocks into the running server
|
|
// 2. A file-based or Redis-based mock injection mechanism
|
|
// 3. Running tests in the same process as the server (not supported by Vitest globalSetup)
|
|
it.todo(
|
|
'should handle a failure from the AI service gracefully - requires mock injection mechanism',
|
|
async () => {
|
|
// Arrange: Use the global flyerProcessingService singleton to inject a failing AI function.
|
|
// This works because workers.server.ts stores the service instance on `global.__flyerProcessingService_singleton__`,
|
|
// which is shared across all module contexts (test file, global setup, and worker).
|
|
// We access the FlyerAiProcessor through the service and use its DI method.
|
|
const { flyerProcessingService } = await import('../../services/workers.server');
|
|
const aiProcessor = flyerProcessingService._getAiProcessor();
|
|
|
|
const aiError = new Error('AI model failed to extract data.');
|
|
aiProcessor._setExtractAndValidateData(async () => {
|
|
console.error('[AI FAILURE TEST] Mock AI function called - throwing error');
|
|
throw aiError;
|
|
});
|
|
console.error('[AI FAILURE TEST] AI processor mock function injected via DI');
|
|
|
|
// Arrange: Prepare a unique flyer file for upload.
|
|
const imagePath = path.resolve(__dirname, '../assets/test-flyer-image.jpg');
|
|
const imageBuffer = await fs.readFile(imagePath);
|
|
const uniqueContent = Buffer.concat([
|
|
imageBuffer,
|
|
Buffer.from(`ai-error-test-${Date.now()}`),
|
|
]);
|
|
const uniqueFileName = `ai-error-test-${Date.now()}.jpg`;
|
|
const mockImageFile = new File([new Uint8Array(uniqueContent)], uniqueFileName, {
|
|
type: 'image/jpeg',
|
|
});
|
|
const checksum = await generateFileChecksum(mockImageFile);
|
|
|
|
// Track created files for cleanup
|
|
const uploadDir = testStoragePath;
|
|
createdFilePaths.push(path.join(uploadDir, uniqueFileName));
|
|
|
|
// Act 1: Upload the file to start the background job.
|
|
const uploadResponse = await request
|
|
.post('/api/v1/ai/upload-and-process')
|
|
.field('baseUrl', 'https://example.com')
|
|
.field('checksum', checksum)
|
|
.attach('flyerFile', uniqueContent, uniqueFileName);
|
|
|
|
const { jobId } = uploadResponse.body.data;
|
|
expect(jobId).toBeTypeOf('string');
|
|
|
|
// Act 2: Poll for job completion using the new utility.
|
|
const jobStatus = await poll(
|
|
async () => {
|
|
const statusResponse = await request.get(`/api/v1/ai/jobs/${jobId}/status`);
|
|
return statusResponse.body.data;
|
|
},
|
|
(status) => status.state === 'completed' || status.state === 'failed',
|
|
{ timeout: 180000, interval: 3000, description: 'AI failure test job' },
|
|
);
|
|
|
|
// Assert 1: Check that the job failed.
|
|
if (jobStatus?.state === 'failed') {
|
|
console.error('[TEST DEBUG] AI Failure Test - Job Failed Reason:', jobStatus.failedReason);
|
|
console.error('[TEST DEBUG] AI Failure Test - Job Stack:', jobStatus.stacktrace);
|
|
}
|
|
expect(jobStatus?.state).toBe('failed');
|
|
expect(jobStatus?.failedReason).toContain('AI model failed to extract data.');
|
|
|
|
// Assert 2: Verify the flyer was NOT saved in the database.
|
|
const savedFlyer = await db.flyerRepo.findFlyerByChecksum(checksum, logger);
|
|
expect(savedFlyer).toBeUndefined();
|
|
|
|
// Cleanup: Reset the DI function to restore normal behavior
|
|
aiProcessor._setExtractAndValidateData(null);
|
|
console.error('[AI FAILURE TEST] AI processor DI function reset');
|
|
},
|
|
240000,
|
|
);
|
|
|
|
// TODO: Same issue as AI failure test - cannot inject mocks across Vitest's globalSetup boundary.
|
|
it.todo(
|
|
'should handle a database failure during flyer creation - requires mock injection mechanism',
|
|
async () => {
|
|
// Arrange: Use the global flyerProcessingService singleton for DI.
|
|
// Same approach as the AI failure test - access through global singleton.
|
|
const { flyerProcessingService } = await import('../../services/workers.server');
|
|
const aiProcessor = flyerProcessingService._getAiProcessor();
|
|
|
|
// Mock AI to return valid data (we need AI to succeed but DB to fail)
|
|
aiProcessor._setExtractAndValidateData(async () => {
|
|
console.error('[DB FAILURE TEST] Mock AI function called - returning valid data');
|
|
return {
|
|
data: {
|
|
store_name: 'DB Failure Test Store',
|
|
valid_from: '2025-01-01',
|
|
valid_to: '2025-01-07',
|
|
store_address: '123 Test St',
|
|
items: [{ item: 'Test Item', price_display: '$1.99', price_in_cents: 199 }],
|
|
},
|
|
needsReview: false,
|
|
};
|
|
});
|
|
console.error('[DB FAILURE TEST] AI processor mock function injected');
|
|
|
|
// Inject a failing withTransaction function
|
|
const dbError = new Error('DB transaction failed');
|
|
const failingWithTransaction = vi.fn().mockRejectedValue(dbError);
|
|
console.error('[DB FAILURE TEST] About to inject failingWithTransaction mock');
|
|
flyerProcessingService._getPersistenceService()._setWithTransaction(failingWithTransaction);
|
|
console.error('[DB FAILURE TEST] failingWithTransaction mock injected successfully');
|
|
|
|
// Arrange: Prepare a unique flyer file for upload.
|
|
const imagePath = path.resolve(__dirname, '../assets/test-flyer-image.jpg');
|
|
const imageBuffer = await fs.readFile(imagePath);
|
|
const uniqueContent = Buffer.concat([
|
|
imageBuffer,
|
|
Buffer.from(`db-error-test-${Date.now()}`),
|
|
]);
|
|
const uniqueFileName = `db-error-test-${Date.now()}.jpg`;
|
|
const mockImageFile = new File([new Uint8Array(uniqueContent)], uniqueFileName, {
|
|
type: 'image/jpeg',
|
|
});
|
|
const checksum = await generateFileChecksum(mockImageFile);
|
|
|
|
// Track created files for cleanup
|
|
const uploadDir = testStoragePath;
|
|
createdFilePaths.push(path.join(uploadDir, uniqueFileName));
|
|
|
|
// Act 1: Upload the file to start the background job.
|
|
const uploadResponse = await request
|
|
.post('/api/v1/ai/upload-and-process')
|
|
.field('baseUrl', 'https://example.com')
|
|
.field('checksum', checksum)
|
|
.attach('flyerFile', uniqueContent, uniqueFileName);
|
|
|
|
const { jobId } = uploadResponse.body.data;
|
|
expect(jobId).toBeTypeOf('string');
|
|
|
|
// Act 2: Poll for job completion using the new utility.
|
|
const jobStatus = await poll(
|
|
async () => {
|
|
const statusResponse = await request.get(`/api/v1/ai/jobs/${jobId}/status`);
|
|
return statusResponse.body.data;
|
|
},
|
|
(status) => status.state === 'completed' || status.state === 'failed',
|
|
{ timeout: 180000, interval: 3000, description: 'DB failure test job' },
|
|
);
|
|
|
|
// Assert 1: Check that the job failed.
|
|
expect(jobStatus?.state).toBe('failed');
|
|
expect(jobStatus?.failedReason).toContain('DB transaction failed');
|
|
|
|
// Assert 2: Verify the flyer was NOT saved in the database.
|
|
const savedFlyer = await db.flyerRepo.findFlyerByChecksum(checksum, logger);
|
|
expect(savedFlyer).toBeUndefined();
|
|
|
|
// Cleanup: Reset the DI functions to restore normal behavior
|
|
aiProcessor._setExtractAndValidateData(null);
|
|
flyerProcessingService._getPersistenceService()._setWithTransaction(null);
|
|
console.error('[DB FAILURE TEST] DI functions reset');
|
|
},
|
|
240000,
|
|
);
|
|
|
|
// TODO: Same issue as AI failure test - cannot inject mocks across Vitest's globalSetup boundary.
|
|
it.todo(
|
|
'should NOT clean up temporary files when a job fails - requires mock injection mechanism',
|
|
async () => {
|
|
// Arrange: Use the global flyerProcessingService singleton for DI.
|
|
// Same approach as the AI failure test - access through global singleton.
|
|
const { flyerProcessingService } = await import('../../services/workers.server');
|
|
const aiProcessor = flyerProcessingService._getAiProcessor();
|
|
|
|
const aiError = new Error('Simulated AI failure for cleanup test.');
|
|
aiProcessor._setExtractAndValidateData(async () => {
|
|
console.error('[CLEANUP TEST] Mock AI function called - throwing error');
|
|
throw aiError;
|
|
});
|
|
console.error('[CLEANUP TEST] AI processor mock function injected via DI');
|
|
|
|
// Arrange: Prepare a unique flyer file for upload.
|
|
const imagePath = path.resolve(__dirname, '../assets/test-flyer-image.jpg');
|
|
const imageBuffer = await fs.readFile(imagePath);
|
|
const uniqueContent = Buffer.concat([imageBuffer, Buffer.from(`cleanup-test-${Date.now()}`)]);
|
|
const uniqueFileName = `cleanup-test-${Date.now()}.jpg`;
|
|
const mockImageFile = new File([new Uint8Array(uniqueContent)], uniqueFileName, {
|
|
type: 'image/jpeg',
|
|
});
|
|
const checksum = await generateFileChecksum(mockImageFile);
|
|
|
|
// Track the path of the file that will be created in the uploads directory.
|
|
const uploadDir = testStoragePath;
|
|
const tempFilePath = path.join(uploadDir, uniqueFileName);
|
|
createdFilePaths.push(tempFilePath);
|
|
|
|
// Act 1: Upload the file to start the background job.
|
|
const uploadResponse = await request
|
|
.post('/api/v1/ai/upload-and-process')
|
|
.field('baseUrl', 'https://example.com')
|
|
.field('checksum', checksum)
|
|
.attach('flyerFile', uniqueContent, uniqueFileName);
|
|
|
|
const { jobId } = uploadResponse.body.data;
|
|
expect(jobId).toBeTypeOf('string');
|
|
|
|
// Act 2: Poll for job completion using the new utility.
|
|
const jobStatus = await poll(
|
|
async () => {
|
|
const statusResponse = await request.get(`/api/v1/ai/jobs/${jobId}/status`);
|
|
return statusResponse.body.data;
|
|
},
|
|
(status) => status.state === 'completed' || status.state === 'failed',
|
|
{ timeout: 180000, interval: 3000, description: 'file cleanup failure test job' },
|
|
);
|
|
|
|
// Assert 1: Check that the job actually failed.
|
|
expect(jobStatus?.state).toBe('failed');
|
|
expect(jobStatus?.failedReason).toContain('Simulated AI failure for cleanup test.');
|
|
|
|
// Assert 2: Verify the temporary file was NOT deleted.
|
|
// fs.access throws if the file doesn't exist, so we expect it NOT to throw.
|
|
await expect(fs.access(tempFilePath)).resolves.toBeUndefined();
|
|
|
|
// Cleanup: Reset the DI function to restore normal behavior
|
|
aiProcessor._setExtractAndValidateData(null);
|
|
console.error('[CLEANUP TEST] AI processor DI function reset');
|
|
},
|
|
240000,
|
|
);
|
|
});
|