Compare commits
12 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e2049c6b9f | ||
| a3839c2f0d | |||
|
|
c1df3d7b1b | ||
| 94782f030d | |||
|
|
1c25b79251 | ||
| 0b0fa8294d | |||
|
|
f49f3a75fb | ||
| 8f14044ae6 | |||
|
|
55e1e425f4 | ||
| 68b16ad2e8 | |||
|
|
6a28934692 | ||
| 78c4a5fee6 |
4
package-lock.json
generated
4
package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "flyer-crawler",
|
||||
"version": "0.9.41",
|
||||
"version": "0.9.47",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "flyer-crawler",
|
||||
"version": "0.9.41",
|
||||
"version": "0.9.47",
|
||||
"dependencies": {
|
||||
"@bull-board/api": "^6.14.2",
|
||||
"@bull-board/express": "^6.14.2",
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "flyer-crawler",
|
||||
"private": true,
|
||||
"version": "0.9.41",
|
||||
"version": "0.9.47",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "concurrently \"npm:start:dev\" \"vite\"",
|
||||
|
||||
@@ -197,15 +197,17 @@ describe('AI Service (Server)', () => {
|
||||
const service = new AIService(mockLoggerInstance);
|
||||
|
||||
// Assert: Check that the warning was logged and the mock client is in use
|
||||
expect(mockLoggerInstance.warn).toHaveBeenCalledWith(
|
||||
'[AIService] GoogleGenAI client could not be initialized (likely missing API key in test environment). Using mock placeholder.',
|
||||
expect(mockLoggerInstance.info).toHaveBeenCalledWith(
|
||||
'[AIService Constructor] Test environment detected. Using internal mock for AI client to prevent real API calls in INTEGRATION TESTS.',
|
||||
);
|
||||
await expect(
|
||||
(service as any).aiClient.generateContent({ contents: [] }),
|
||||
(service as any).aiClient.generateContent({ contents: [], useLiteModels: false }),
|
||||
).resolves.toBeDefined();
|
||||
});
|
||||
|
||||
it('should use the adapter to call generateContent when using real GoogleGenAI client', async () => {
|
||||
vi.stubEnv('NODE_ENV', 'production');
|
||||
vi.stubEnv('VITEST_POOL_ID', '');
|
||||
vi.stubEnv('GEMINI_API_KEY', 'test-key');
|
||||
// We need to force the constructor to use the real client logic, not the injected mock.
|
||||
// So we instantiate AIService without passing aiClient.
|
||||
@@ -229,6 +231,8 @@ describe('AI Service (Server)', () => {
|
||||
});
|
||||
|
||||
it('should throw error if adapter is called without content', async () => {
|
||||
vi.stubEnv('NODE_ENV', 'production');
|
||||
vi.stubEnv('VITEST_POOL_ID', '');
|
||||
vi.stubEnv('GEMINI_API_KEY', 'test-key');
|
||||
vi.resetModules();
|
||||
const { AIService } = await import('./aiService.server');
|
||||
@@ -244,6 +248,8 @@ describe('AI Service (Server)', () => {
|
||||
describe('Model Fallback Logic', () => {
|
||||
beforeEach(() => {
|
||||
vi.unstubAllEnvs();
|
||||
vi.stubEnv('NODE_ENV', 'production');
|
||||
vi.stubEnv('VITEST_POOL_ID', '');
|
||||
vi.stubEnv('GEMINI_API_KEY', 'test-key');
|
||||
vi.resetModules(); // Re-import to use the new env var and re-instantiate the service
|
||||
mockGenerateContent.mockReset();
|
||||
|
||||
@@ -136,85 +136,81 @@ export class AIService {
|
||||
"gemma-3n-e2b-it" // Corrected name from JSON
|
||||
];
|
||||
|
||||
// Helper to return valid mock data for tests
|
||||
private getMockFlyerData() {
|
||||
return {
|
||||
store_name: 'Mock Store from AIService',
|
||||
valid_from: '2025-01-01',
|
||||
valid_to: '2025-01-07',
|
||||
store_address: '123 Mock St',
|
||||
items: [
|
||||
{
|
||||
item: 'Mocked Integration Item',
|
||||
price_display: '$1.99',
|
||||
price_in_cents: 199,
|
||||
quantity: 'each',
|
||||
category_name: 'Mock Category',
|
||||
master_item_id: null,
|
||||
},
|
||||
],
|
||||
};
|
||||
}
|
||||
|
||||
constructor(logger: Logger, aiClient?: IAiClient, fs?: IFileSystem) {
|
||||
this.logger = logger;
|
||||
this.logger.info('---------------- [AIService] Constructor Start ----------------');
|
||||
|
||||
const isTestEnvironment = process.env.NODE_ENV === 'test' || !!process.env.VITEST_POOL_ID;
|
||||
|
||||
if (aiClient) {
|
||||
this.logger.info(
|
||||
'[AIService Constructor] Using provided mock AI client. This indicates a TEST environment.',
|
||||
'[AIService Constructor] Using provided mock AI client. This indicates a UNIT TEST environment.',
|
||||
);
|
||||
this.aiClient = aiClient;
|
||||
} else if (isTestEnvironment) {
|
||||
this.logger.info(
|
||||
'[AIService Constructor] Test environment detected. Using internal mock for AI client to prevent real API calls in INTEGRATION TESTS.',
|
||||
);
|
||||
this.aiClient = {
|
||||
generateContent: async (request) => {
|
||||
this.logger.info(
|
||||
{ useLiteModels: request.useLiteModels },
|
||||
'[AIService] Mock generateContent called in test environment.',
|
||||
);
|
||||
const mockData = this.getMockFlyerData();
|
||||
return {
|
||||
text: JSON.stringify(mockData),
|
||||
} as unknown as GenerateContentResponse;
|
||||
},
|
||||
};
|
||||
} else {
|
||||
this.logger.info(
|
||||
'[AIService Constructor] No mock client provided. Initializing Google GenAI client for PRODUCTION-LIKE environment.',
|
||||
'[AIService Constructor] No mock client provided and not a test environment. Initializing Google GenAI client for PRODUCTION.',
|
||||
);
|
||||
// Determine if we are in any kind of test environment.
|
||||
// VITEST_POOL_ID is reliably set by Vitest during test runs.
|
||||
const isTestEnvironment = process.env.NODE_ENV === 'test' || !!process.env.VITEST_POOL_ID;
|
||||
this.logger.info(
|
||||
{
|
||||
isTestEnvironment,
|
||||
nodeEnv: process.env.NODE_ENV,
|
||||
vitestPoolId: process.env.VITEST_POOL_ID,
|
||||
hasApiKey: !!process.env.GEMINI_API_KEY,
|
||||
},
|
||||
'[AIService Constructor] Environment check',
|
||||
);
|
||||
|
||||
const apiKey = process.env.GEMINI_API_KEY;
|
||||
if (!apiKey) {
|
||||
this.logger.warn('[AIService] GEMINI_API_KEY is not set.');
|
||||
// Allow initialization without key in test/build environments if strictly needed
|
||||
if (!isTestEnvironment) {
|
||||
this.logger.error('[AIService] GEMINI_API_KEY is required in non-test environments.');
|
||||
throw new Error('GEMINI_API_KEY environment variable not set for server-side AI calls.');
|
||||
} else {
|
||||
this.logger.warn(
|
||||
'[AIService Constructor] GEMINI_API_KEY is missing, but this is a test environment, so proceeding.',
|
||||
);
|
||||
}
|
||||
}
|
||||
// In test mode without injected client, we might not have a key.
|
||||
// The stubs below protect against calling the undefined client.
|
||||
// This is the correct modern SDK pattern. We instantiate the main client.
|
||||
const genAI = apiKey ? new GoogleGenAI({ apiKey }) : null;
|
||||
if (!genAI) {
|
||||
this.logger.warn(
|
||||
'[AIService] GoogleGenAI client could not be initialized (likely missing API key in test environment). Using mock placeholder.',
|
||||
);
|
||||
this.logger.error('[AIService] GEMINI_API_KEY is required in non-test environments.');
|
||||
throw new Error('GEMINI_API_KEY environment variable not set for server-side AI calls.');
|
||||
}
|
||||
const genAI = new GoogleGenAI({ apiKey });
|
||||
|
||||
// We create a shim/adapter that matches the old structure but uses the new SDK call pattern.
|
||||
// This preserves the dependency injection pattern used throughout the class.
|
||||
this.aiClient = genAI
|
||||
? {
|
||||
generateContent: async (request) => {
|
||||
if (!request.contents || request.contents.length === 0) {
|
||||
this.logger.error(
|
||||
{ request },
|
||||
'[AIService Adapter] generateContent called with no content, which is invalid.',
|
||||
);
|
||||
throw new Error('AIService.generateContent requires at least one content element.');
|
||||
}
|
||||
|
||||
const { useLiteModels, ...apiReq } = request;
|
||||
const models = useLiteModels ? this.models_lite : this.models;
|
||||
return this._generateWithFallback(genAI, apiReq, models);
|
||||
},
|
||||
this.aiClient = {
|
||||
generateContent: async (request) => {
|
||||
if (!request.contents || request.contents.length === 0) {
|
||||
this.logger.error(
|
||||
{ request },
|
||||
'[AIService Adapter] generateContent called with no content, which is invalid.',
|
||||
);
|
||||
throw new Error('AIService.generateContent requires at least one content element.');
|
||||
}
|
||||
: {
|
||||
// This is the updated mock for testing, matching the new response shape.
|
||||
generateContent: async () => {
|
||||
this.logger.warn(
|
||||
'[AIService] Mock generateContent called. This should only happen in tests when no API key is available.',
|
||||
);
|
||||
// Return a minimal valid JSON object structure to prevent downstream parsing errors.
|
||||
const mockResponse = { store_name: 'Mock Store', items: [] };
|
||||
return {
|
||||
text: JSON.stringify(mockResponse),
|
||||
} as unknown as GenerateContentResponse;
|
||||
},
|
||||
};
|
||||
|
||||
const { useLiteModels, ...apiReq } = request;
|
||||
const models = useLiteModels ? this.models_lite : this.models;
|
||||
return this._generateWithFallback(genAI, apiReq, models);
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
this.fs = fs || fsPromises;
|
||||
@@ -887,8 +883,8 @@ async enqueueFlyerProcessing(
|
||||
const itemsArray = Array.isArray(rawItems) ? rawItems : typeof rawItems === 'string' ? JSON.parse(rawItems) : [];
|
||||
const itemsForDb = itemsArray.map((item: Partial<ExtractedFlyerItem>) => ({
|
||||
...item,
|
||||
// Ensure price_display is never null to satisfy database constraints.
|
||||
price_display: item.price_display ?? '',
|
||||
// Ensure empty or nullish price_display is stored as NULL to satisfy database constraints.
|
||||
price_display: item.price_display || null,
|
||||
master_item_id: item.master_item_id === null ? undefined : item.master_item_id,
|
||||
quantity: item.quantity ?? 1,
|
||||
view_count: 0,
|
||||
|
||||
@@ -360,6 +360,58 @@ describe('Flyer DB Service', () => {
|
||||
'Database error in insertFlyerItems',
|
||||
);
|
||||
});
|
||||
|
||||
it('should sanitize empty or whitespace-only price_display to null', async () => {
|
||||
const itemsData: FlyerItemInsert[] = [
|
||||
{
|
||||
item: 'Free Item',
|
||||
price_display: '', // Empty string
|
||||
price_in_cents: 0,
|
||||
quantity: '1',
|
||||
category_name: 'Promo',
|
||||
view_count: 0,
|
||||
click_count: 0,
|
||||
},
|
||||
{
|
||||
item: 'Whitespace Item',
|
||||
price_display: ' ', // Whitespace only
|
||||
price_in_cents: null,
|
||||
quantity: '1',
|
||||
category_name: 'Promo',
|
||||
view_count: 0,
|
||||
click_count: 0,
|
||||
},
|
||||
];
|
||||
const mockItems = itemsData.map((item, i) =>
|
||||
createMockFlyerItem({ ...item, flyer_item_id: i + 1, flyer_id: 1 }),
|
||||
);
|
||||
mockPoolInstance.query.mockResolvedValue({ rows: mockItems });
|
||||
|
||||
await flyerRepo.insertFlyerItems(1, itemsData, mockLogger);
|
||||
|
||||
expect(mockPoolInstance.query).toHaveBeenCalledTimes(1);
|
||||
|
||||
// Check that the values array passed to the query has null for price_display
|
||||
const queryValues = mockPoolInstance.query.mock.calls[0][1];
|
||||
expect(queryValues).toEqual([
|
||||
1, // flyerId for item 1
|
||||
'Free Item',
|
||||
null, // Sanitized price_display for item 1
|
||||
0,
|
||||
'1',
|
||||
'Promo',
|
||||
0,
|
||||
0,
|
||||
1, // flyerId for item 2
|
||||
'Whitespace Item',
|
||||
null, // Sanitized price_display for item 2
|
||||
null,
|
||||
'1',
|
||||
'Promo',
|
||||
0,
|
||||
0,
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('createFlyerAndItems', () => {
|
||||
@@ -433,6 +485,34 @@ describe('Flyer DB Service', () => {
|
||||
);
|
||||
});
|
||||
|
||||
it('should create a flyer with no items if items array is empty', async () => {
|
||||
const flyerData: FlyerInsert = {
|
||||
file_name: 'empty.jpg',
|
||||
store_name: 'Empty Store',
|
||||
} as FlyerInsert;
|
||||
const itemsData: FlyerItemInsert[] = [];
|
||||
const mockFlyer = createMockFlyer({ ...flyerData, flyer_id: 100, store_id: 2 });
|
||||
|
||||
const mockClient = { query: vi.fn() };
|
||||
mockClient.query
|
||||
.mockResolvedValueOnce({ rows: [], rowCount: 0 }) // findOrCreateStore (insert)
|
||||
.mockResolvedValueOnce({ rows: [{ store_id: 2 }] }) // findOrCreateStore (select)
|
||||
.mockResolvedValueOnce({ rows: [mockFlyer] }); // insertFlyer
|
||||
|
||||
const result = await createFlyerAndItems(
|
||||
flyerData,
|
||||
itemsData,
|
||||
mockLogger,
|
||||
mockClient as unknown as PoolClient,
|
||||
);
|
||||
|
||||
expect(result).toEqual({
|
||||
flyer: mockFlyer,
|
||||
items: [],
|
||||
});
|
||||
expect(mockClient.query).toHaveBeenCalledTimes(3);
|
||||
});
|
||||
|
||||
it('should propagate an error if any step fails', async () => {
|
||||
const flyerData: FlyerInsert = {
|
||||
file_name: 'fail.jpg',
|
||||
|
||||
@@ -63,6 +63,7 @@ export class FlyerRepository {
|
||||
* @returns The newly created flyer record with its ID.
|
||||
*/
|
||||
async insertFlyer(flyerData: FlyerDbInsert, logger: Logger): Promise<Flyer> {
|
||||
console.error('[DEBUG] FlyerRepository.insertFlyer called with:', JSON.stringify(flyerData, null, 2));
|
||||
try {
|
||||
const query = `
|
||||
INSERT INTO flyers (
|
||||
@@ -139,10 +140,18 @@ export class FlyerRepository {
|
||||
valueStrings.push(
|
||||
`($${paramIndex++}, $${paramIndex++}, $${paramIndex++}, $${paramIndex++}, $${paramIndex++}, $${paramIndex++}, $${paramIndex++}, $${paramIndex++})`,
|
||||
);
|
||||
|
||||
// Sanitize price_display. The database requires a non-empty string.
|
||||
// We provide a default value if the input is null, undefined, or an empty string.
|
||||
const priceDisplay =
|
||||
item.price_display && item.price_display.trim() !== ''
|
||||
? item.price_display
|
||||
: 'N/A';
|
||||
|
||||
values.push(
|
||||
flyerId,
|
||||
item.item,
|
||||
item.price_display,
|
||||
priceDisplay,
|
||||
item.price_in_cents ?? null,
|
||||
item.quantity ?? '',
|
||||
item.category_name ?? null,
|
||||
|
||||
@@ -62,10 +62,13 @@ export class FlyerDataTransformer {
|
||||
baseUrl: string,
|
||||
logger: Logger,
|
||||
): { imageUrl: string; iconUrl: string } {
|
||||
console.log('[DEBUG] FlyerDataTransformer._buildUrls inputs:', { imageFileName, iconFileName, baseUrl });
|
||||
logger.debug({ imageFileName, iconFileName, baseUrl }, 'Building URLs');
|
||||
const finalBaseUrl = baseUrl || getBaseUrl(logger);
|
||||
console.log('[DEBUG] FlyerDataTransformer._buildUrls finalBaseUrl resolved to:', finalBaseUrl);
|
||||
const imageUrl = `${finalBaseUrl}/flyer-images/${imageFileName}`;
|
||||
const iconUrl = `${finalBaseUrl}/flyer-images/icons/${iconFileName}`;
|
||||
console.log('[DEBUG] FlyerDataTransformer._buildUrls constructed:', { imageUrl, iconUrl });
|
||||
logger.debug({ imageUrl, iconUrl }, 'Constructed URLs');
|
||||
return { imageUrl, iconUrl };
|
||||
}
|
||||
@@ -90,6 +93,7 @@ export class FlyerDataTransformer {
|
||||
logger: Logger,
|
||||
baseUrl: string,
|
||||
): Promise<{ flyerData: FlyerInsert; itemsForDb: FlyerItemInsert[] }> {
|
||||
console.log('[DEBUG] FlyerDataTransformer.transform called with baseUrl:', baseUrl);
|
||||
logger.info('Starting data transformation from AI output to database format.');
|
||||
|
||||
try {
|
||||
|
||||
@@ -103,6 +103,8 @@ export class FlyerProcessingService {
|
||||
// The main processed image path is already in `allFilePaths` via `createdImagePaths`.
|
||||
allFilePaths.push(path.join(iconsDir, iconFileName));
|
||||
|
||||
console.log('[DEBUG] FlyerProcessingService calling transformer with:', { originalFileName: job.data.originalFileName, imageFileName, iconFileName, checksum: job.data.checksum, baseUrl: job.data.baseUrl });
|
||||
|
||||
const { flyerData, itemsForDb } = await this.transformer.transform(
|
||||
aiResult,
|
||||
job.data.originalFileName,
|
||||
|
||||
@@ -15,6 +15,8 @@ import { cleanupFiles } from '../utils/cleanupFiles';
|
||||
import piexif from 'piexifjs';
|
||||
import exifParser from 'exif-parser';
|
||||
import sharp from 'sharp';
|
||||
// FIX: Import the singleton instance directly to spy on it
|
||||
import { aiService } from '../../services/aiService.server';
|
||||
|
||||
|
||||
/**
|
||||
@@ -25,16 +27,8 @@ const { mockExtractCoreData } = vi.hoisted(() => ({
|
||||
mockExtractCoreData: vi.fn(),
|
||||
}));
|
||||
|
||||
// Mock the AI service to prevent real API calls during integration tests.
|
||||
// This is crucial for making the tests reliable and fast. We don't want to
|
||||
// depend on the external Gemini API.
|
||||
vi.mock('../../services/aiService.server', async (importOriginal) => {
|
||||
const actual = await importOriginal<typeof import('../../services/aiService.server')>();
|
||||
// To preserve the class instance methods of `aiService`, we must modify the
|
||||
// instance directly rather than creating a new plain object with spread syntax.
|
||||
actual.aiService.extractCoreDataFromFlyerImage = mockExtractCoreData;
|
||||
return actual;
|
||||
});
|
||||
// REMOVED: vi.mock('../../services/aiService.server', ...)
|
||||
// The previous mock was not effectively intercepting the singleton instance used by the worker.
|
||||
|
||||
// Mock the main DB service to allow for simulating transaction failures.
|
||||
// By default, it will use the real implementation.
|
||||
@@ -57,6 +51,11 @@ describe('Flyer Processing Background Job Integration Test', () => {
|
||||
// for the database, satisfying the 'url_check' constraint.
|
||||
// IMPORTANT: This must run BEFORE the app is imported so workers inherit the env var.
|
||||
vi.stubEnv('FRONTEND_URL', 'https://example.com');
|
||||
console.log('[TEST SETUP] FRONTEND_URL stubbed to:', process.env.FRONTEND_URL);
|
||||
|
||||
// FIX: Spy on the actual singleton instance. This ensures that when the worker
|
||||
// imports 'aiService', it gets the instance we are controlling here.
|
||||
vi.spyOn(aiService, 'extractCoreDataFromFlyerImage').mockImplementation(mockExtractCoreData);
|
||||
|
||||
const appModule = await import('../../../server');
|
||||
const app = appModule.default;
|
||||
@@ -66,13 +65,14 @@ describe('Flyer Processing Background Job Integration Test', () => {
|
||||
// FIX: Reset mocks before each test to ensure isolation.
|
||||
// This prevents "happy path" mocks from leaking into error handling tests and vice versa.
|
||||
beforeEach(async () => {
|
||||
console.log('[TEST SETUP] Resetting mocks before test execution');
|
||||
// 1. Reset AI Service Mock to default success state
|
||||
mockExtractCoreData.mockReset();
|
||||
mockExtractCoreData.mockResolvedValue({
|
||||
store_name: 'Mock Store',
|
||||
valid_from: null,
|
||||
valid_to: null,
|
||||
store_address: null,
|
||||
valid_from: '2025-01-01',
|
||||
valid_to: '2025-01-07',
|
||||
store_address: '123 Mock St',
|
||||
items: [
|
||||
{
|
||||
item: 'Mocked Integration Item',
|
||||
@@ -94,6 +94,7 @@ describe('Flyer Processing Background Job Integration Test', () => {
|
||||
|
||||
afterAll(async () => {
|
||||
vi.unstubAllEnvs(); // Clean up env stubs
|
||||
vi.restoreAllMocks(); // Restore the AI spy
|
||||
|
||||
// Use the centralized cleanup utility.
|
||||
await cleanupDb({
|
||||
@@ -110,7 +111,10 @@ describe('Flyer Processing Background Job Integration Test', () => {
|
||||
* It uploads a file, polls for completion, and verifies the result in the database.
|
||||
*/
|
||||
const runBackgroundProcessingTest = async (user?: UserProfile, token?: string) => {
|
||||
console.log(`[TEST START] runBackgroundProcessingTest. User: ${user?.user.email ?? 'ANONYMOUS'}`);
|
||||
// Arrange: Load a mock flyer PDF.
|
||||
console.log('[TEST] about to read test-flyer-image.jpg')
|
||||
|
||||
const imagePath = path.resolve(__dirname, '../assets/test-flyer-image.jpg');
|
||||
const imageBuffer = await fs.readFile(imagePath);
|
||||
// Create a unique buffer and filename for each test run to ensure a unique checksum.
|
||||
@@ -119,26 +123,34 @@ describe('Flyer Processing Background Job Integration Test', () => {
|
||||
const uniqueFileName = `test-flyer-image-${Date.now()}.jpg`;
|
||||
const mockImageFile = new File([new Uint8Array(uniqueContent)], uniqueFileName, { type: 'image/jpeg' });
|
||||
const checksum = await generateFileChecksum(mockImageFile);
|
||||
console.log('[TEST] mockImageFile created with uniqueFileName: ', uniqueFileName)
|
||||
console.log('[TEST DATA] Generated checksum for test:', checksum);
|
||||
|
||||
// Track created files for cleanup
|
||||
const uploadDir = path.resolve(__dirname, '../../../flyer-images');
|
||||
createdFilePaths.push(path.join(uploadDir, uniqueFileName));
|
||||
console.log('[TEST] createdFilesPaths after 1st push: ', createdFilePaths)
|
||||
// The icon name is derived from the original filename.
|
||||
const iconFileName = `icon-${path.parse(uniqueFileName).name}.webp`;
|
||||
createdFilePaths.push(path.join(uploadDir, 'icons', iconFileName));
|
||||
|
||||
// Act 1: Upload the file to start the background job.
|
||||
const testBaseUrl = getTestBaseUrl();
|
||||
console.log('[TEST ACTION] Uploading file with baseUrl:', testBaseUrl);
|
||||
|
||||
const uploadReq = request
|
||||
.post('/api/ai/upload-and-process')
|
||||
.field('checksum', checksum)
|
||||
// Pass the baseUrl directly in the form data to ensure the worker receives it,
|
||||
// bypassing issues with vi.stubEnv in multi-threaded test environments.
|
||||
.field('baseUrl', getTestBaseUrl())
|
||||
.field('baseUrl', testBaseUrl)
|
||||
.attach('flyerFile', uniqueContent, uniqueFileName);
|
||||
if (token) {
|
||||
uploadReq.set('Authorization', `Bearer ${token}`);
|
||||
}
|
||||
const uploadResponse = await uploadReq;
|
||||
console.log('[TEST RESPONSE] Upload status:', uploadResponse.status);
|
||||
console.log('[TEST RESPONSE] Upload body:', JSON.stringify(uploadResponse.body));
|
||||
const { jobId } = uploadResponse.body;
|
||||
|
||||
// Assert 1: Check that a job ID was returned.
|
||||
@@ -152,6 +164,7 @@ describe('Flyer Processing Background Job Integration Test', () => {
|
||||
statusReq.set('Authorization', `Bearer ${token}`);
|
||||
}
|
||||
const statusResponse = await statusReq;
|
||||
console.log(`[TEST POLL] Job ${jobId} current state:`, statusResponse.body?.state);
|
||||
return statusResponse.body;
|
||||
},
|
||||
(status) => status.state === 'completed' || status.state === 'failed',
|
||||
@@ -290,6 +303,10 @@ describe('Flyer Processing Background Job Integration Test', () => {
|
||||
const parser = exifParser.create(savedImageBuffer);
|
||||
const exifResult = parser.parse();
|
||||
|
||||
console.log('[TEST] savedImagePath during EXIF data stripping: ', savedImagePath)
|
||||
console.log('[TEST] exifResult.tags: ', exifResult.tags)
|
||||
|
||||
|
||||
// The `tags` object will be empty if no EXIF data is found.
|
||||
expect(exifResult.tags).toEqual({});
|
||||
expect(exifResult.tags.Software).toBeUndefined();
|
||||
@@ -371,6 +388,9 @@ describe('Flyer Processing Background Job Integration Test', () => {
|
||||
const savedImagePath = path.join(uploadDir, path.basename(savedFlyer!.image_url));
|
||||
createdFilePaths.push(savedImagePath); // Add final path for cleanup
|
||||
|
||||
console.log('[TEST] savedImagePath during PNG metadata stripping: ', savedImagePath)
|
||||
|
||||
|
||||
const savedImageMetadata = await sharp(savedImagePath).metadata();
|
||||
|
||||
// The test should fail here initially because PNGs are not processed.
|
||||
@@ -378,6 +398,7 @@ describe('Flyer Processing Background Job Integration Test', () => {
|
||||
expect(savedImageMetadata.exif).toBeUndefined();
|
||||
},
|
||||
240000,
|
||||
|
||||
);
|
||||
|
||||
it(
|
||||
@@ -385,6 +406,7 @@ it(
|
||||
async () => {
|
||||
// Arrange: Mock the AI service to throw an error for this specific test.
|
||||
const aiError = new Error('AI model failed to extract data.');
|
||||
// Update the spy implementation to reject
|
||||
mockExtractCoreData.mockRejectedValue(aiError);
|
||||
|
||||
// Arrange: Prepare a unique flyer file for upload.
|
||||
@@ -534,6 +556,7 @@ it(
|
||||
await expect(fs.access(tempFilePath), 'Expected temporary file to exist after job failure, but it was deleted.');
|
||||
},
|
||||
240000,
|
||||
|
||||
);
|
||||
|
||||
});
|
||||
|
||||
@@ -216,7 +216,7 @@ describe('Gamification Flow Integration Test', () => {
|
||||
checksum: checksum,
|
||||
extractedData: {
|
||||
store_name: storeName,
|
||||
items: [{ item: 'Legacy Milk', price_in_cents: 250 }],
|
||||
items: [{ item: 'Legacy Milk', price_in_cents: 250, price_display: '$2.50' }],
|
||||
},
|
||||
};
|
||||
|
||||
|
||||
@@ -2,7 +2,9 @@
|
||||
import { describe, it, expect, beforeAll, afterAll, vi } from 'vitest';
|
||||
import supertest from 'supertest';
|
||||
import { getPool } from '../../services/db/connection.db';
|
||||
import { TEST_EXAMPLE_DOMAIN } from '../utils/testHelpers';
|
||||
import { TEST_EXAMPLE_DOMAIN, createAndLoginUser } from '../utils/testHelpers';
|
||||
import { cleanupDb } from '../utils/cleanup';
|
||||
import type { UserProfile } from '../../types';
|
||||
|
||||
/**
|
||||
* @vitest-environment node
|
||||
@@ -10,6 +12,9 @@ import { TEST_EXAMPLE_DOMAIN } from '../utils/testHelpers';
|
||||
|
||||
describe('Price History API Integration Test (/api/price-history)', () => {
|
||||
let request: ReturnType<typeof supertest>;
|
||||
let authToken: string;
|
||||
let testUser: UserProfile;
|
||||
const createdUserIds: string[] = [];
|
||||
let masterItemId: number;
|
||||
let storeId: number;
|
||||
let flyerId1: number;
|
||||
@@ -21,6 +26,15 @@ describe('Price History API Integration Test (/api/price-history)', () => {
|
||||
const app = (await import('../../../server')).default;
|
||||
request = supertest(app);
|
||||
|
||||
// Create a user for the tests
|
||||
const email = `price-test-${Date.now()}@example.com`;
|
||||
({ user: testUser, token: authToken } = await createAndLoginUser({
|
||||
email,
|
||||
fullName: 'Price Test User',
|
||||
request,
|
||||
}));
|
||||
createdUserIds.push(testUser.user.user_id);
|
||||
|
||||
const pool = getPool();
|
||||
|
||||
// 1. Create a master grocery item
|
||||
@@ -74,6 +88,7 @@ describe('Price History API Integration Test (/api/price-history)', () => {
|
||||
|
||||
afterAll(async () => {
|
||||
vi.unstubAllEnvs();
|
||||
await cleanupDb({ userIds: createdUserIds });
|
||||
const pool = getPool();
|
||||
// The CASCADE on the tables should handle flyer_items.
|
||||
// The delete on flyers cascades to flyer_items, which fires a trigger `recalculate_price_history_on_flyer_item_delete`.
|
||||
@@ -97,7 +112,9 @@ describe('Price History API Integration Test (/api/price-history)', () => {
|
||||
});
|
||||
|
||||
it('should return the correct price history for a given master item ID', async () => {
|
||||
const response = await request.post('/api/price-history').set('Authorization', 'Bearer ${token}').send({ masterItemIds: [masterItemId] });
|
||||
const response = await request.post('/api/price-history')
|
||||
.set('Authorization', `Bearer ${authToken}`)
|
||||
.send({ masterItemIds: [masterItemId] });
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body).toBeInstanceOf(Array);
|
||||
@@ -111,7 +128,7 @@ describe('Price History API Integration Test (/api/price-history)', () => {
|
||||
it('should respect the limit parameter', async () => {
|
||||
const response = await request
|
||||
.post('/api/price-history')
|
||||
.set('Authorization', 'Bearer ${token}')
|
||||
.set('Authorization', `Bearer ${authToken}`)
|
||||
.send({ masterItemIds: [masterItemId], limit: 2 });
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
@@ -123,7 +140,7 @@ describe('Price History API Integration Test (/api/price-history)', () => {
|
||||
it('should respect the offset parameter', async () => {
|
||||
const response = await request
|
||||
.post('/api/price-history')
|
||||
.set('Authorization', 'Bearer ${token}')
|
||||
.set('Authorization', `Bearer ${authToken}`)
|
||||
.send({ masterItemIds: [masterItemId], limit: 2, offset: 1 });
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
@@ -133,7 +150,9 @@ describe('Price History API Integration Test (/api/price-history)', () => {
|
||||
});
|
||||
|
||||
it('should return price history sorted by date in ascending order', async () => {
|
||||
const response = await request.post('/api/price-history').set('Authorization', 'Bearer ${token}').send({ masterItemIds: [masterItemId] });
|
||||
const response = await request.post('/api/price-history')
|
||||
.set('Authorization', `Bearer ${authToken}`)
|
||||
.send({ masterItemIds: [masterItemId] });
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
const history = response.body;
|
||||
@@ -148,7 +167,9 @@ describe('Price History API Integration Test (/api/price-history)', () => {
|
||||
});
|
||||
|
||||
it('should return an empty array for a master item ID with no price history', async () => {
|
||||
const response = await request.post('/api/price-history').set('Authorization', 'Bearer ${token}').send({ masterItemIds: [999999] });
|
||||
const response = await request.post('/api/price-history')
|
||||
.set('Authorization', `Bearer ${authToken}`)
|
||||
.send({ masterItemIds: [999999] });
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body).toEqual([]);
|
||||
});
|
||||
|
||||
@@ -227,24 +227,26 @@ describe('Public API Routes Integration Tests', () => {
|
||||
|
||||
describe('Rate Limiting on Public Routes', () => {
|
||||
it('should block requests to /api/personalization/master-items after exceeding the limit', async () => {
|
||||
const limit = 100; // Matches publicReadLimiter config
|
||||
// We only need to verify it blocks eventually, but running 100 requests in a test is slow.
|
||||
// Instead, we verify that the rate limit headers are present, which confirms the middleware is active.
|
||||
|
||||
const response = await request
|
||||
.get('/api/personalization/master-items')
|
||||
.set('X-Test-Rate-Limit-Enable', 'true'); // Opt-in to rate limiting
|
||||
// The limit might be higher than 5. We loop enough times to ensure we hit the rate limit.
|
||||
const maxRequests = 30;
|
||||
let blockedResponse: any;
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.headers).toHaveProperty('x-ratelimit-limit');
|
||||
expect(response.headers).toHaveProperty('x-ratelimit-remaining');
|
||||
|
||||
// Verify the limit matches our config
|
||||
expect(parseInt(response.headers['x-ratelimit-limit'])).toBe(limit);
|
||||
|
||||
// Verify we consumed one
|
||||
const remaining = parseInt(response.headers['x-ratelimit-remaining']);
|
||||
expect(remaining).toBeLessThan(limit);
|
||||
for (let i = 0; i < maxRequests; i++) {
|
||||
const response = await request
|
||||
.get('/api/personalization/master-items')
|
||||
.set('X-Test-Rate-Limit-Enable', 'true'); // Enable rate limiter middleware
|
||||
|
||||
if (response.status === 429) {
|
||||
blockedResponse = response;
|
||||
break;
|
||||
}
|
||||
expect(response.status).toBe(200);
|
||||
}
|
||||
|
||||
expect(blockedResponse).toBeDefined();
|
||||
expect(blockedResponse.status).toBe(429);
|
||||
expect(blockedResponse.headers).toHaveProperty('x-ratelimit-limit');
|
||||
expect(blockedResponse.headers).toHaveProperty('x-ratelimit-remaining');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -22,6 +22,11 @@ const getPool = () => {
|
||||
* and then rebuilds it from the master rollup script.
|
||||
*/
|
||||
export async function setup() {
|
||||
// Ensure we are in the correct environment for these tests.
|
||||
process.env.NODE_ENV = 'test';
|
||||
// Set the FRONTEND_URL globally for any scripts or processes spawned here.
|
||||
process.env.FRONTEND_URL = process.env.FRONTEND_URL || 'https://example.com';
|
||||
|
||||
// --- START DEBUG LOGGING ---
|
||||
// Log the database connection details being used by the Vitest GLOBAL SETUP process.
|
||||
// These variables are inherited from the CI environment.
|
||||
|
||||
@@ -43,6 +43,7 @@ export async function processAndSaveImage(
|
||||
.toFile(outputPath);
|
||||
|
||||
logger.info(`Successfully processed image and saved to ${outputPath}`);
|
||||
console.log('[DEBUG] processAndSaveImage returning:', outputFileName);
|
||||
return outputFileName;
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
@@ -84,6 +85,7 @@ export async function generateFlyerIcon(
|
||||
.toFile(outputPath);
|
||||
|
||||
logger.info(`Successfully generated icon: ${outputPath}`);
|
||||
console.log('[DEBUG] generateFlyerIcon returning:', iconFileName);
|
||||
return iconFileName;
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
|
||||
Reference in New Issue
Block a user