Compare commits
4 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
cb453aa949 | ||
| 2651bd16ae | |||
|
|
91e0f0c46f | ||
| e6986d512b |
4
package-lock.json
generated
4
package-lock.json
generated
@@ -1,12 +1,12 @@
|
|||||||
{
|
{
|
||||||
"name": "flyer-crawler",
|
"name": "flyer-crawler",
|
||||||
"version": "0.9.15",
|
"version": "0.9.17",
|
||||||
"lockfileVersion": 3,
|
"lockfileVersion": 3,
|
||||||
"requires": true,
|
"requires": true,
|
||||||
"packages": {
|
"packages": {
|
||||||
"": {
|
"": {
|
||||||
"name": "flyer-crawler",
|
"name": "flyer-crawler",
|
||||||
"version": "0.9.15",
|
"version": "0.9.17",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@bull-board/api": "^6.14.2",
|
"@bull-board/api": "^6.14.2",
|
||||||
"@bull-board/express": "^6.14.2",
|
"@bull-board/express": "^6.14.2",
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
{
|
{
|
||||||
"name": "flyer-crawler",
|
"name": "flyer-crawler",
|
||||||
"private": true,
|
"private": true,
|
||||||
"version": "0.9.15",
|
"version": "0.9.17",
|
||||||
"type": "module",
|
"type": "module",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"dev": "concurrently \"npm:start:dev\" \"vite\"",
|
"dev": "concurrently \"npm:start:dev\" \"vite\"",
|
||||||
|
|||||||
@@ -59,21 +59,21 @@ describe('FlyerReviewPage', () => {
|
|||||||
file_name: 'flyer1.jpg',
|
file_name: 'flyer1.jpg',
|
||||||
created_at: '2023-01-01T00:00:00Z',
|
created_at: '2023-01-01T00:00:00Z',
|
||||||
store: { name: 'Store A' },
|
store: { name: 'Store A' },
|
||||||
icon_url: 'icon1.jpg',
|
icon_url: 'http://example.com/icon1.jpg',
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
flyer_id: 2,
|
flyer_id: 2,
|
||||||
file_name: 'flyer2.jpg',
|
file_name: 'flyer2.jpg',
|
||||||
created_at: '2023-01-02T00:00:00Z',
|
created_at: '2023-01-02T00:00:00Z',
|
||||||
store: { name: 'Store B' },
|
store: { name: 'Store B' },
|
||||||
icon_url: 'icon2.jpg',
|
icon_url: 'http://example.com/icon2.jpg',
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
flyer_id: 3,
|
flyer_id: 3,
|
||||||
file_name: 'flyer3.jpg',
|
file_name: 'flyer3.jpg',
|
||||||
created_at: '2023-01-03T00:00:00Z',
|
created_at: '2023-01-03T00:00:00Z',
|
||||||
store: null,
|
store: null,
|
||||||
icon_url: null,
|
icon_url: 'http://example.com/icon2.jpg',
|
||||||
},
|
},
|
||||||
];
|
];
|
||||||
|
|
||||||
|
|||||||
@@ -203,12 +203,13 @@ describe('AI Service (Server)', () => {
|
|||||||
|
|
||||||
// Access the private aiClient (which is now the adapter)
|
// Access the private aiClient (which is now the adapter)
|
||||||
const adapter = (service as any).aiClient;
|
const adapter = (service as any).aiClient;
|
||||||
|
const models = (service as any).models;
|
||||||
|
|
||||||
const request = { contents: [{ parts: [{ text: 'test' }] }] };
|
const request = { contents: [{ parts: [{ text: 'test' }] }] };
|
||||||
await adapter.generateContent(request);
|
await adapter.generateContent(request);
|
||||||
|
|
||||||
expect(mockGenerateContent).toHaveBeenCalledWith({
|
expect(mockGenerateContent).toHaveBeenCalledWith({
|
||||||
model: 'gemini-3-flash-preview',
|
model: models[0],
|
||||||
...request,
|
...request,
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
@@ -238,11 +239,44 @@ describe('AI Service (Server)', () => {
|
|||||||
vi.unstubAllEnvs();
|
vi.unstubAllEnvs();
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('should use lite models when useLiteModels is true', async () => {
|
||||||
|
// Arrange
|
||||||
|
const { AIService } = await import('./aiService.server');
|
||||||
|
const { logger } = await import('./logger.server');
|
||||||
|
const serviceWithFallback = new AIService(logger);
|
||||||
|
const models_lite = (serviceWithFallback as any).models_lite;
|
||||||
|
const successResponse = { text: 'Success from lite model', candidates: [] };
|
||||||
|
|
||||||
|
mockGenerateContent.mockResolvedValue(successResponse);
|
||||||
|
|
||||||
|
const request = {
|
||||||
|
contents: [{ parts: [{ text: 'test prompt' }] }],
|
||||||
|
useLiteModels: true,
|
||||||
|
};
|
||||||
|
// The adapter strips `useLiteModels` before calling the underlying client,
|
||||||
|
// so we prepare the expected request shape for our assertions.
|
||||||
|
const { useLiteModels, ...apiReq } = request;
|
||||||
|
|
||||||
|
// Act
|
||||||
|
const result = await (serviceWithFallback as any).aiClient.generateContent(request);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
expect(result).toEqual(successResponse);
|
||||||
|
expect(mockGenerateContent).toHaveBeenCalledTimes(1);
|
||||||
|
|
||||||
|
// Check that the first model from the lite list was used
|
||||||
|
expect(mockGenerateContent).toHaveBeenCalledWith({
|
||||||
|
model: models_lite[0],
|
||||||
|
...apiReq,
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
it('should try the next model if the first one fails with a quota error', async () => {
|
it('should try the next model if the first one fails with a quota error', async () => {
|
||||||
// Arrange
|
// Arrange
|
||||||
const { AIService } = await import('./aiService.server');
|
const { AIService } = await import('./aiService.server');
|
||||||
const { logger } = await import('./logger.server');
|
const { logger } = await import('./logger.server');
|
||||||
const serviceWithFallback = new AIService(logger);
|
const serviceWithFallback = new AIService(logger);
|
||||||
|
const models = (serviceWithFallback as any).models;
|
||||||
|
|
||||||
const quotaError = new Error('User rate limit exceeded due to quota');
|
const quotaError = new Error('User rate limit exceeded due to quota');
|
||||||
const successResponse = { text: 'Success from fallback model', candidates: [] };
|
const successResponse = { text: 'Success from fallback model', candidates: [] };
|
||||||
@@ -260,22 +294,23 @@ describe('AI Service (Server)', () => {
|
|||||||
expect(mockGenerateContent).toHaveBeenCalledTimes(2);
|
expect(mockGenerateContent).toHaveBeenCalledTimes(2);
|
||||||
|
|
||||||
// Check first call
|
// Check first call
|
||||||
expect(mockGenerateContent).toHaveBeenNthCalledWith(1, { // The first model in the list is now 'gemini-3-flash-preview'
|
expect(mockGenerateContent).toHaveBeenNthCalledWith(1, { // The first model in the list
|
||||||
model: 'gemini-3-flash-preview',
|
model: models[0],
|
||||||
...request,
|
...request,
|
||||||
});
|
});
|
||||||
|
|
||||||
// Check second call
|
// Check second call
|
||||||
expect(mockGenerateContent).toHaveBeenNthCalledWith(2, { // The second model in the list is 'gemini-2.5-pro'
|
expect(mockGenerateContent).toHaveBeenNthCalledWith(2, { // The second model in the list
|
||||||
model: 'gemini-2.5-pro',
|
model: models[1],
|
||||||
...request,
|
...request,
|
||||||
});
|
});
|
||||||
|
|
||||||
// Check that a warning was logged
|
// Check that a warning was logged
|
||||||
expect(logger.warn).toHaveBeenCalledWith(
|
expect(logger.warn).toHaveBeenCalledWith(
|
||||||
// The warning should be for the model that failed ('gemini-3-flash-preview'), not the next one.
|
// The warning should be for the model that failed ('gemini-2.5-flash'), not the next one.
|
||||||
|
// The warning should be for the model that failed, not the next one.
|
||||||
expect.stringContaining(
|
expect.stringContaining(
|
||||||
"Model 'gemini-3-flash-preview' failed due to quota/rate limit. Trying next model.",
|
`Model '${models[0]}' failed due to quota/rate limit. Trying next model.`,
|
||||||
),
|
),
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
@@ -285,6 +320,7 @@ describe('AI Service (Server)', () => {
|
|||||||
const { AIService } = await import('./aiService.server');
|
const { AIService } = await import('./aiService.server');
|
||||||
const { logger } = await import('./logger.server');
|
const { logger } = await import('./logger.server');
|
||||||
const serviceWithFallback = new AIService(logger);
|
const serviceWithFallback = new AIService(logger);
|
||||||
|
const models = (serviceWithFallback as any).models;
|
||||||
|
|
||||||
const nonRetriableError = new Error('Invalid API Key');
|
const nonRetriableError = new Error('Invalid API Key');
|
||||||
mockGenerateContent.mockRejectedValueOnce(nonRetriableError);
|
mockGenerateContent.mockRejectedValueOnce(nonRetriableError);
|
||||||
@@ -298,8 +334,10 @@ describe('AI Service (Server)', () => {
|
|||||||
|
|
||||||
expect(mockGenerateContent).toHaveBeenCalledTimes(1);
|
expect(mockGenerateContent).toHaveBeenCalledTimes(1);
|
||||||
expect(logger.error).toHaveBeenCalledWith(
|
expect(logger.error).toHaveBeenCalledWith(
|
||||||
{ error: nonRetriableError }, // The first model in the list is now 'gemini-3-flash-preview'
|
{ error: nonRetriableError }, // The first model in the list is now 'gemini-2.5-flash'
|
||||||
`[AIService Adapter] Model 'gemini-3-flash-preview' failed with a non-retriable error.`,
|
`[AIService Adapter] Model 'gemini-2.5-flash' failed with a non-retriable error.`,
|
||||||
|
{ error: nonRetriableError }, // The first model in the list
|
||||||
|
`[AIService Adapter] Model '${models[0]}' failed with a non-retriable error.`,
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -839,6 +877,23 @@ describe('AI Service (Server)', () => {
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
describe('generateRecipeSuggestion', () => {
|
||||||
|
it('should call generateContent with useLiteModels set to true', async () => {
|
||||||
|
const ingredients = ['carrots', 'onions'];
|
||||||
|
const expectedPrompt = `Suggest a simple recipe using these ingredients: ${ingredients.join(
|
||||||
|
', ',
|
||||||
|
)}. Keep it brief.`;
|
||||||
|
mockAiClient.generateContent.mockResolvedValue({ text: 'Some recipe', candidates: [] });
|
||||||
|
|
||||||
|
await aiServiceInstance.generateRecipeSuggestion(ingredients, mockLoggerInstance);
|
||||||
|
|
||||||
|
expect(mockAiClient.generateContent).toHaveBeenCalledWith({
|
||||||
|
contents: [{ parts: [{ text: expectedPrompt }] }],
|
||||||
|
useLiteModels: true,
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
describe('planTripWithMaps', () => {
|
describe('planTripWithMaps', () => {
|
||||||
const mockUserLocation: GeolocationCoordinates = {
|
const mockUserLocation: GeolocationCoordinates = {
|
||||||
latitude: 45,
|
latitude: 45,
|
||||||
@@ -949,6 +1004,7 @@ describe('AI Service (Server)', () => {
|
|||||||
userId: 'user123',
|
userId: 'user123',
|
||||||
submitterIp: '127.0.0.1',
|
submitterIp: '127.0.0.1',
|
||||||
userProfileAddress: '123 St, City, Country', // Partial address match based on filter(Boolean)
|
userProfileAddress: '123 St, City, Country', // Partial address match based on filter(Boolean)
|
||||||
|
baseUrl: 'http://localhost:3000',
|
||||||
});
|
});
|
||||||
expect(result.id).toBe('job123');
|
expect(result.id).toBe('job123');
|
||||||
});
|
});
|
||||||
@@ -970,6 +1026,7 @@ describe('AI Service (Server)', () => {
|
|||||||
expect.objectContaining({
|
expect.objectContaining({
|
||||||
userId: undefined,
|
userId: undefined,
|
||||||
userProfileAddress: undefined,
|
userProfileAddress: undefined,
|
||||||
|
baseUrl: 'http://localhost:3000',
|
||||||
}),
|
}),
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -23,6 +23,7 @@ import * as db from './db/index.db';
|
|||||||
import { flyerQueue } from './queueService.server';
|
import { flyerQueue } from './queueService.server';
|
||||||
import type { Job } from 'bullmq';
|
import type { Job } from 'bullmq';
|
||||||
import { createFlyerAndItems } from './db/flyer.db';
|
import { createFlyerAndItems } from './db/flyer.db';
|
||||||
|
import { getBaseUrl } from '../utils/serverUtils';
|
||||||
import { generateFlyerIcon } from '../utils/imageProcessor';
|
import { generateFlyerIcon } from '../utils/imageProcessor';
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
import { ValidationError } from './db/errors.db'; // Keep this import for ValidationError
|
import { ValidationError } from './db/errors.db'; // Keep this import for ValidationError
|
||||||
@@ -91,11 +92,55 @@ export class AIService {
|
|||||||
private fs: IFileSystem;
|
private fs: IFileSystem;
|
||||||
private rateLimiter: <T>(fn: () => Promise<T>) => Promise<T>;
|
private rateLimiter: <T>(fn: () => Promise<T>) => Promise<T>;
|
||||||
private logger: Logger;
|
private logger: Logger;
|
||||||
// The fallback list is ordered by preference (speed/cost vs. power).
|
|
||||||
// We try the fastest models first, then the more powerful 'pro' model as a high-quality fallback,
|
// OPTIMIZED: Flyer Image Processing (Vision + Long Output)
|
||||||
// and finally the 'lite' model as a last resort.
|
// PRIORITIES:
|
||||||
private readonly models = [ 'gemini-3-flash-preview','gemini-2.5-pro', 'gemini-2.5-flash', 'gemini-2.5-flash-lite','gemini-2.0-flash-001','gemini-2.0-flash','gemini-2.0-flash-exp','gemini-2.0-flash-lite-001','gemini-2.0-flash-lite', 'gemma-3-27b-it', 'gemma-3-12b-it'];
|
// 1. Output Limit: Must be 65k+ (Gemini 2.5/3.0) to avoid cutting off data.
|
||||||
private readonly models_lite = ["gemma-3-4b-it", "gemma-3-2b-it", "gemma-3-1b-it"];
|
// 2. Intelligence: 'Pro' models handle messy layouts better.
|
||||||
|
// 3. Quota Management: 'Preview' and 'Exp' models are added as fallbacks to tap into separate rate limits.
|
||||||
|
private readonly models = [
|
||||||
|
// --- TIER A: The Happy Path (Fast & Stable) ---
|
||||||
|
'gemini-2.5-flash', // Primary workhorse. 65k output.
|
||||||
|
'gemini-2.5-flash-lite', // Cost-saver. 65k output.
|
||||||
|
|
||||||
|
// --- TIER B: The Heavy Lifters (Complex Layouts) ---
|
||||||
|
'gemini-2.5-pro', // High IQ for messy flyers. 65k output.
|
||||||
|
|
||||||
|
// --- TIER C: Separate Quota Buckets (Previews) ---
|
||||||
|
'gemini-3-flash-preview', // Newer/Faster. Separate 'Preview' quota. 65k output.
|
||||||
|
'gemini-3-pro-preview', // High IQ. Separate 'Preview' quota. 65k output.
|
||||||
|
|
||||||
|
// --- TIER D: Experimental Buckets (High Capacity) ---
|
||||||
|
'gemini-exp-1206', // Excellent reasoning. Separate 'Experimental' quota. 65k output.
|
||||||
|
|
||||||
|
// --- TIER E: Last Resorts (Lower Capacity/Local) ---
|
||||||
|
'gemma-3-27b-it', // Open model fallback.
|
||||||
|
'gemini-2.0-flash-exp' // Exp fallback. WARNING: 8k output limit. Good for small flyers only.
|
||||||
|
];
|
||||||
|
|
||||||
|
// OPTIMIZED: Simple Text Tasks (Recipes, Shopping Lists, Summaries)
|
||||||
|
// PRIORITIES:
|
||||||
|
// 1. Cost/Speed: These tasks are simple.
|
||||||
|
// 2. Output Limit: The 8k limit of Gemini 2.0 is perfectly fine here.
|
||||||
|
private readonly models_lite = [
|
||||||
|
// --- Best Value (Smart + Cheap) ---
|
||||||
|
"gemini-2.5-flash-lite", // Current generation efficiency king.
|
||||||
|
|
||||||
|
// --- The "Recycled" Gemini 2.0 Models (Perfect for Text) ---
|
||||||
|
"gemini-2.0-flash-lite-001", // Extremely cheap, very capable for text.
|
||||||
|
"gemini-2.0-flash-001", // Smarter than Lite, good for complex recipes.
|
||||||
|
|
||||||
|
// --- Open Models (Good for simple categorization) ---
|
||||||
|
"gemma-3-12b-it", // Solid reasoning for an open model.
|
||||||
|
"gemma-3-4b-it", // Very fast.
|
||||||
|
|
||||||
|
// --- Quota Fallbacks (Experimental/Preview) ---
|
||||||
|
"gemini-2.0-flash-exp", // Use this separate quota bucket if others are exhausted.
|
||||||
|
|
||||||
|
// --- Edge/Nano Models (Simple string manipulation only) ---
|
||||||
|
"gemma-3n-e4b-it", // Corrected name from JSON
|
||||||
|
"gemma-3n-e2b-it" // Corrected name from JSON
|
||||||
|
];
|
||||||
|
|
||||||
constructor(logger: Logger, aiClient?: IAiClient, fs?: IFileSystem) {
|
constructor(logger: Logger, aiClient?: IAiClient, fs?: IFileSystem) {
|
||||||
this.logger = logger;
|
this.logger = logger;
|
||||||
@@ -780,6 +825,8 @@ async enqueueFlyerProcessing(
|
|||||||
.join(', ');
|
.join(', ');
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const baseUrl = getBaseUrl(logger);
|
||||||
|
|
||||||
// 3. Add job to the queue
|
// 3. Add job to the queue
|
||||||
const job = await flyerQueue.add('process-flyer', {
|
const job = await flyerQueue.add('process-flyer', {
|
||||||
filePath: file.path,
|
filePath: file.path,
|
||||||
@@ -788,6 +835,7 @@ async enqueueFlyerProcessing(
|
|||||||
userId: userProfile?.user.user_id,
|
userId: userProfile?.user.user_id,
|
||||||
submitterIp: submitterIp,
|
submitterIp: submitterIp,
|
||||||
userProfileAddress: userProfileAddress,
|
userProfileAddress: userProfileAddress,
|
||||||
|
baseUrl: baseUrl,
|
||||||
});
|
});
|
||||||
|
|
||||||
logger.info(
|
logger.info(
|
||||||
@@ -882,20 +930,7 @@ async enqueueFlyerProcessing(
|
|||||||
const iconsDir = path.join(path.dirname(file.path), 'icons');
|
const iconsDir = path.join(path.dirname(file.path), 'icons');
|
||||||
const iconFileName = await generateFlyerIcon(file.path, iconsDir, logger);
|
const iconFileName = await generateFlyerIcon(file.path, iconsDir, logger);
|
||||||
|
|
||||||
// Construct proper URLs including protocol and host to satisfy DB constraints.
|
const baseUrl = getBaseUrl(logger);
|
||||||
let baseUrl = (process.env.FRONTEND_URL || process.env.BASE_URL || '').trim();
|
|
||||||
if (!baseUrl || !baseUrl.startsWith('http')) {
|
|
||||||
const port = process.env.PORT || 3000;
|
|
||||||
const fallbackUrl = `http://localhost:${port}`;
|
|
||||||
if (baseUrl) {
|
|
||||||
logger.warn(
|
|
||||||
`FRONTEND_URL/BASE_URL is invalid or incomplete ('${baseUrl}'). Falling back to default local URL: ${fallbackUrl}`,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
baseUrl = fallbackUrl;
|
|
||||||
}
|
|
||||||
baseUrl = baseUrl.endsWith('/') ? baseUrl.slice(0, -1) : baseUrl;
|
|
||||||
|
|
||||||
const iconUrl = `${baseUrl}/flyer-images/icons/${iconFileName}`;
|
const iconUrl = `${baseUrl}/flyer-images/icons/${iconFileName}`;
|
||||||
const imageUrl = `${baseUrl}/flyer-images/${file.filename}`;
|
const imageUrl = `${baseUrl}/flyer-images/${file.filename}`;
|
||||||
|
|
||||||
|
|||||||
@@ -21,6 +21,7 @@ const createMockJobData = (data: Partial<FlyerJobData>): FlyerJobData => ({
|
|||||||
filePath: '/tmp/flyer.jpg',
|
filePath: '/tmp/flyer.jpg',
|
||||||
originalFileName: 'flyer.jpg',
|
originalFileName: 'flyer.jpg',
|
||||||
checksum: 'checksum-123',
|
checksum: 'checksum-123',
|
||||||
|
baseUrl: 'http://localhost:3000',
|
||||||
...data,
|
...data,
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@@ -64,6 +64,7 @@ describe('FlyerDataTransformer', () => {
|
|||||||
const originalFileName = 'my-flyer.pdf';
|
const originalFileName = 'my-flyer.pdf';
|
||||||
const checksum = 'checksum-abc-123';
|
const checksum = 'checksum-abc-123';
|
||||||
const userId = 'user-xyz-456';
|
const userId = 'user-xyz-456';
|
||||||
|
const baseUrl = 'http://test.host';
|
||||||
|
|
||||||
// Act
|
// Act
|
||||||
const { flyerData, itemsForDb } = await transformer.transform(
|
const { flyerData, itemsForDb } = await transformer.transform(
|
||||||
@@ -73,11 +74,9 @@ describe('FlyerDataTransformer', () => {
|
|||||||
checksum,
|
checksum,
|
||||||
userId,
|
userId,
|
||||||
mockLogger,
|
mockLogger,
|
||||||
|
baseUrl,
|
||||||
);
|
);
|
||||||
|
|
||||||
// Dynamically construct the expected base URL, mirroring the logic in the transformer.
|
|
||||||
const expectedBaseUrl = `http://localhost:3000`;
|
|
||||||
|
|
||||||
// Assert
|
// Assert
|
||||||
// 0. Check logging
|
// 0. Check logging
|
||||||
expect(mockLogger.info).toHaveBeenCalledWith(
|
expect(mockLogger.info).toHaveBeenCalledWith(
|
||||||
@@ -91,8 +90,8 @@ describe('FlyerDataTransformer', () => {
|
|||||||
// 1. Check flyer data
|
// 1. Check flyer data
|
||||||
expect(flyerData).toEqual({
|
expect(flyerData).toEqual({
|
||||||
file_name: originalFileName,
|
file_name: originalFileName,
|
||||||
image_url: `${expectedBaseUrl}/flyer-images/flyer-page-1.jpg`,
|
image_url: `${baseUrl}/flyer-images/flyer-page-1.jpg`,
|
||||||
icon_url: `${expectedBaseUrl}/flyer-images/icons/icon-flyer-page-1.webp`,
|
icon_url: `${baseUrl}/flyer-images/icons/icon-flyer-page-1.webp`,
|
||||||
checksum,
|
checksum,
|
||||||
store_name: 'Test Store',
|
store_name: 'Test Store',
|
||||||
valid_from: '2024-01-01',
|
valid_from: '2024-01-01',
|
||||||
@@ -157,11 +156,9 @@ describe('FlyerDataTransformer', () => {
|
|||||||
checksum,
|
checksum,
|
||||||
undefined,
|
undefined,
|
||||||
mockLogger,
|
mockLogger,
|
||||||
|
'http://another.host',
|
||||||
);
|
);
|
||||||
|
|
||||||
// Dynamically construct the expected base URL, mirroring the logic in the transformer.
|
|
||||||
const expectedBaseUrl = `http://localhost:3000`;
|
|
||||||
|
|
||||||
// Assert
|
// Assert
|
||||||
// 0. Check logging
|
// 0. Check logging
|
||||||
expect(mockLogger.info).toHaveBeenCalledWith(
|
expect(mockLogger.info).toHaveBeenCalledWith(
|
||||||
@@ -178,8 +175,8 @@ describe('FlyerDataTransformer', () => {
|
|||||||
expect(itemsForDb).toHaveLength(0);
|
expect(itemsForDb).toHaveLength(0);
|
||||||
expect(flyerData).toEqual({
|
expect(flyerData).toEqual({
|
||||||
file_name: originalFileName,
|
file_name: originalFileName,
|
||||||
image_url: `${expectedBaseUrl}/flyer-images/another.png`,
|
image_url: `http://another.host/flyer-images/another.png`,
|
||||||
icon_url: `${expectedBaseUrl}/flyer-images/icons/icon-another.webp`,
|
icon_url: `http://another.host/flyer-images/icons/icon-another.webp`,
|
||||||
checksum,
|
checksum,
|
||||||
store_name: 'Unknown Store (auto)', // Should use fallback
|
store_name: 'Unknown Store (auto)', // Should use fallback
|
||||||
valid_from: null,
|
valid_from: null,
|
||||||
@@ -232,6 +229,7 @@ describe('FlyerDataTransformer', () => {
|
|||||||
'checksum',
|
'checksum',
|
||||||
'user-1',
|
'user-1',
|
||||||
mockLogger,
|
mockLogger,
|
||||||
|
'http://normalize.host',
|
||||||
);
|
);
|
||||||
|
|
||||||
// Assert
|
// Assert
|
||||||
@@ -251,4 +249,47 @@ describe('FlyerDataTransformer', () => {
|
|||||||
}),
|
}),
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('should use fallback baseUrl if none is provided and log a warning', async () => {
|
||||||
|
// Arrange
|
||||||
|
const aiResult: AiProcessorResult = {
|
||||||
|
data: {
|
||||||
|
store_name: 'Test Store',
|
||||||
|
valid_from: '2024-01-01',
|
||||||
|
valid_to: '2024-01-07',
|
||||||
|
store_address: '123 Test St',
|
||||||
|
items: [],
|
||||||
|
},
|
||||||
|
needsReview: false,
|
||||||
|
};
|
||||||
|
const imagePaths = [{ path: '/uploads/flyer-page-1.jpg', mimetype: 'image/jpeg' }];
|
||||||
|
const baseUrl = undefined; // Explicitly pass undefined for this test
|
||||||
|
|
||||||
|
// The fallback logic uses process.env.PORT || 3000.
|
||||||
|
// The beforeEach sets PORT to '', so it should fallback to 3000.
|
||||||
|
const expectedFallbackUrl = 'http://localhost:3000';
|
||||||
|
|
||||||
|
// Act
|
||||||
|
const { flyerData } = await transformer.transform(
|
||||||
|
aiResult,
|
||||||
|
imagePaths,
|
||||||
|
'my-flyer.pdf',
|
||||||
|
'checksum-abc-123',
|
||||||
|
'user-xyz-456',
|
||||||
|
mockLogger,
|
||||||
|
baseUrl, // Pass undefined here
|
||||||
|
);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
// 1. Check that a warning was logged
|
||||||
|
expect(mockLogger.warn).toHaveBeenCalledWith(
|
||||||
|
`Base URL not provided in job data. Falling back to default local URL: ${expectedFallbackUrl}`,
|
||||||
|
);
|
||||||
|
|
||||||
|
// 2. Check that the URLs were constructed with the fallback
|
||||||
|
expect(flyerData.image_url).toBe(`${expectedFallbackUrl}/flyer-images/flyer-page-1.jpg`);
|
||||||
|
expect(flyerData.icon_url).toBe(
|
||||||
|
`${expectedFallbackUrl}/flyer-images/icons/icon-flyer-page-1.webp`,
|
||||||
|
);
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -55,6 +55,7 @@ export class FlyerDataTransformer {
|
|||||||
checksum: string,
|
checksum: string,
|
||||||
userId: string | undefined,
|
userId: string | undefined,
|
||||||
logger: Logger,
|
logger: Logger,
|
||||||
|
baseUrl?: string,
|
||||||
): Promise<{ flyerData: FlyerInsert; itemsForDb: FlyerItemInsert[] }> {
|
): Promise<{ flyerData: FlyerInsert; itemsForDb: FlyerItemInsert[] }> {
|
||||||
logger.info('Starting data transformation from AI output to database format.');
|
logger.info('Starting data transformation from AI output to database format.');
|
||||||
|
|
||||||
@@ -75,37 +76,29 @@ export class FlyerDataTransformer {
|
|||||||
logger.warn('AI did not return a store name. Using fallback "Unknown Store (auto)".');
|
logger.warn('AI did not return a store name. Using fallback "Unknown Store (auto)".');
|
||||||
}
|
}
|
||||||
|
|
||||||
// Construct proper URLs including protocol and host to satisfy DB constraints.
|
// The baseUrl is passed from the job payload to ensure the worker has the correct environment context.
|
||||||
// This logic is made more robust to handle cases where env vars might be present but invalid (e.g., whitespace or missing protocol).
|
// If it's missing for any reason, we fall back to a sensible default for local development.
|
||||||
let baseUrl = (process.env.FRONTEND_URL || process.env.BASE_URL || '').trim();
|
let finalBaseUrl = baseUrl;
|
||||||
|
if (!finalBaseUrl) {
|
||||||
if (!baseUrl || !baseUrl.startsWith('http')) {
|
|
||||||
const port = process.env.PORT || 3000;
|
const port = process.env.PORT || 3000;
|
||||||
const fallbackUrl = `http://localhost:${port}`;
|
finalBaseUrl = `http://localhost:${port}`;
|
||||||
if (baseUrl) {
|
logger.warn(
|
||||||
// It was set but invalid
|
`Base URL not provided in job data. Falling back to default local URL: ${finalBaseUrl}`,
|
||||||
logger.warn(
|
);
|
||||||
`FRONTEND_URL/BASE_URL is invalid or incomplete ('${baseUrl}'). Falling back to default local URL: ${fallbackUrl}`,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
baseUrl = fallbackUrl;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
baseUrl = baseUrl.endsWith('/') ? baseUrl.slice(0, -1) : baseUrl;
|
finalBaseUrl = finalBaseUrl.endsWith('/') ? finalBaseUrl.slice(0, -1) : finalBaseUrl;
|
||||||
|
|
||||||
const flyerData: FlyerInsert = {
|
const flyerData: FlyerInsert = {
|
||||||
file_name: originalFileName,
|
file_name: originalFileName,
|
||||||
image_url: `${baseUrl}/flyer-images/${path.basename(firstImage)}`,
|
image_url: `${finalBaseUrl}/flyer-images/${path.basename(firstImage)}`,
|
||||||
icon_url: `${baseUrl}/flyer-images/icons/${iconFileName}`,
|
icon_url: `${finalBaseUrl}/flyer-images/icons/${iconFileName}`,
|
||||||
checksum,
|
checksum,
|
||||||
store_name: storeName,
|
store_name: storeName,
|
||||||
valid_from: extractedData.valid_from,
|
valid_from: extractedData.valid_from,
|
||||||
valid_to: extractedData.valid_to,
|
valid_to: extractedData.valid_to,
|
||||||
store_address: extractedData.store_address, // The number of items is now calculated directly from the transformed data.
|
store_address: extractedData.store_address,
|
||||||
item_count: itemsForDb.length,
|
item_count: itemsForDb.length,
|
||||||
// Defensively handle the userId. An empty string ('') is not a valid UUID,
|
|
||||||
// but `null` is. This ensures that any falsy value for userId (undefined, null, '')
|
|
||||||
// is converted to `null` for the database, preventing a 22P02 error.
|
|
||||||
uploaded_by: userId ? userId : null,
|
uploaded_by: userId ? userId : null,
|
||||||
status: needsReview ? 'needs_review' : 'processed',
|
status: needsReview ? 'needs_review' : 'processed',
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -166,6 +166,7 @@ describe('FlyerProcessingService', () => {
|
|||||||
filePath: '/tmp/flyer.jpg',
|
filePath: '/tmp/flyer.jpg',
|
||||||
originalFileName: 'flyer.jpg',
|
originalFileName: 'flyer.jpg',
|
||||||
checksum: 'checksum-123',
|
checksum: 'checksum-123',
|
||||||
|
baseUrl: 'http://localhost:3000',
|
||||||
...data,
|
...data,
|
||||||
},
|
},
|
||||||
updateProgress: vi.fn(),
|
updateProgress: vi.fn(),
|
||||||
|
|||||||
@@ -99,35 +99,11 @@ export class FlyerProcessingService {
|
|||||||
job.data.checksum,
|
job.data.checksum,
|
||||||
job.data.userId,
|
job.data.userId,
|
||||||
logger,
|
logger,
|
||||||
|
job.data.baseUrl,
|
||||||
);
|
);
|
||||||
stages[2].status = 'completed';
|
stages[2].status = 'completed';
|
||||||
await job.updateProgress({ stages });
|
await job.updateProgress({ stages });
|
||||||
|
|
||||||
// Sanitize URLs before database insertion to prevent constraint violations,
|
|
||||||
// especially in test environments where a base URL might not be configured.
|
|
||||||
const sanitizeUrl = (url: string): string => {
|
|
||||||
if (url.startsWith('http')) {
|
|
||||||
return url;
|
|
||||||
}
|
|
||||||
// If it's a relative path, build an absolute URL.
|
|
||||||
let baseUrl = (process.env.FRONTEND_URL || process.env.BASE_URL || '').trim();
|
|
||||||
if (!baseUrl || !baseUrl.startsWith('http')) {
|
|
||||||
const port = process.env.PORT || 3000;
|
|
||||||
const fallbackUrl = `http://localhost:${port}`;
|
|
||||||
if (baseUrl) {
|
|
||||||
logger.warn(
|
|
||||||
`URL Sanitization: FRONTEND_URL/BASE_URL is invalid ('${baseUrl}'). Falling back to ${fallbackUrl}.`,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
baseUrl = fallbackUrl;
|
|
||||||
}
|
|
||||||
baseUrl = baseUrl.endsWith('/') ? baseUrl.slice(0, -1) : baseUrl;
|
|
||||||
return `${baseUrl}${url.startsWith('/') ? url : `/${url}`}`;
|
|
||||||
};
|
|
||||||
|
|
||||||
flyerData.image_url = sanitizeUrl(flyerData.image_url);
|
|
||||||
flyerData.icon_url = sanitizeUrl(flyerData.icon_url);
|
|
||||||
|
|
||||||
// Stage 4: Save to Database
|
// Stage 4: Save to Database
|
||||||
stages[3].status = 'in-progress';
|
stages[3].status = 'in-progress';
|
||||||
await job.updateProgress({ stages });
|
await job.updateProgress({ stages });
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
// src/tests/e2e/auth.e2e.test.ts
|
// src/tests/e2e/auth.e2e.test.ts
|
||||||
import { describe, it, expect, afterAll, beforeAll } from 'vitest';
|
import { describe, it, expect, afterAll, beforeAll } from 'vitest';
|
||||||
import * as apiClient from '../../services/apiClient';
|
import * as apiClient from '../../services/apiClient';
|
||||||
import { cleanupDb } from '../utils/cleanup';
|
import { cleanupDb } from '../utils/cleanup';
|
||||||
import { createAndLoginUser, TEST_PASSWORD } from '../utils/testHelpers';
|
import { createAndLoginUser, TEST_PASSWORD } from '../utils/testHelpers';
|
||||||
@@ -13,15 +13,19 @@ describe('Authentication E2E Flow', () => {
|
|||||||
let testUser: UserProfile;
|
let testUser: UserProfile;
|
||||||
const createdUserIds: string[] = [];
|
const createdUserIds: string[] = [];
|
||||||
|
|
||||||
beforeAll(async () => {
|
beforeAll(async () => {
|
||||||
// Create a user that can be used for login-related tests in this suite.
|
// Create a user that can be used for login-related tests in this suite.
|
||||||
const { user } = await createAndLoginUser({
|
try {
|
||||||
email: `e2e-login-user-${Date.now()}@example.com`,
|
const { user } = await createAndLoginUser({
|
||||||
fullName: 'E2E Login User',
|
email: `e2e-login-user-${Date.now()}@example.com`,
|
||||||
// E2E tests use apiClient which doesn't need the `request` object.
|
fullName: 'E2E Login User',
|
||||||
});
|
});
|
||||||
testUser = user;
|
testUser = user;
|
||||||
createdUserIds.push(user.user.user_id);
|
createdUserIds.push(user.user.user_id);
|
||||||
|
} catch (error) {
|
||||||
|
console.error('[FATAL] Setup failed. DB might be down.', error);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
afterAll(async () => {
|
afterAll(async () => {
|
||||||
@@ -70,7 +74,7 @@ describe('Authentication E2E Flow', () => {
|
|||||||
const firstResponse = await apiClient.registerUser(email, TEST_PASSWORD, 'Duplicate User');
|
const firstResponse = await apiClient.registerUser(email, TEST_PASSWORD, 'Duplicate User');
|
||||||
const firstData = await firstResponse.json();
|
const firstData = await firstResponse.json();
|
||||||
expect(firstResponse.status).toBe(201);
|
expect(firstResponse.status).toBe(201);
|
||||||
createdUserIds.push(firstData.userprofile.user.user_id); // Add for cleanup
|
createdUserIds.push(firstData.userprofile.user.user_id);
|
||||||
|
|
||||||
// Act 2: Attempt to register the same user again
|
// Act 2: Attempt to register the same user again
|
||||||
const secondResponse = await apiClient.registerUser(email, TEST_PASSWORD, 'Duplicate User');
|
const secondResponse = await apiClient.registerUser(email, TEST_PASSWORD, 'Duplicate User');
|
||||||
@@ -186,17 +190,23 @@ describe('Authentication E2E Flow', () => {
|
|||||||
}
|
}
|
||||||
await new Promise((resolve) => setTimeout(resolve, 1000));
|
await new Promise((resolve) => setTimeout(resolve, 1000));
|
||||||
}
|
}
|
||||||
expect(loginSuccess, 'User should be able to log in after registration before password reset is attempted.').toBe(true);
|
expect(loginSuccess, 'User should be able to log in after registration. DB might be lagging.').toBe(true);
|
||||||
|
|
||||||
// Act 1: Request a password reset.
|
// Act 1: Request a password reset
|
||||||
// The test environment returns the token directly in the response for E2E testing.
|
|
||||||
const forgotResponse = await apiClient.requestPasswordReset(email);
|
const forgotResponse = await apiClient.requestPasswordReset(email);
|
||||||
const forgotData = await forgotResponse.json();
|
const forgotData = await forgotResponse.json();
|
||||||
const resetToken = forgotData.token;
|
const resetToken = forgotData.token;
|
||||||
|
|
||||||
|
// --- DEBUG SECTION FOR FAILURE ---
|
||||||
|
if (!resetToken) {
|
||||||
|
console.error(' [DEBUG FAILURE] Token missing in response:', JSON.stringify(forgotData, null, 2));
|
||||||
|
console.error(' [DEBUG FAILURE] This usually means the backend hit a DB error or is not in NODE_ENV=test mode.');
|
||||||
|
}
|
||||||
|
// ---------------------------------
|
||||||
|
|
||||||
// Assert 1: Check that we received a token.
|
// Assert 1: Check that we received a token.
|
||||||
expect(forgotResponse.status).toBe(200);
|
expect(forgotResponse.status).toBe(200);
|
||||||
expect(resetToken).toBeDefined();
|
expect(resetToken, 'Backend returned 200 but no token. Check backend logs for "Connection terminated" errors.').toBeDefined();
|
||||||
expect(resetToken).toBeTypeOf('string');
|
expect(resetToken).toBeTypeOf('string');
|
||||||
|
|
||||||
// Act 2: Use the token to set a new password.
|
// Act 2: Use the token to set a new password.
|
||||||
@@ -208,7 +218,7 @@ describe('Authentication E2E Flow', () => {
|
|||||||
expect(resetResponse.status).toBe(200);
|
expect(resetResponse.status).toBe(200);
|
||||||
expect(resetData.message).toBe('Password has been reset successfully.');
|
expect(resetData.message).toBe('Password has been reset successfully.');
|
||||||
|
|
||||||
// Act 3 & Assert 3 (Verification): Log in with the NEW password to confirm the change.
|
// Act 3: Log in with the NEW password
|
||||||
const loginResponse = await apiClient.loginUser(email, newPassword, false);
|
const loginResponse = await apiClient.loginUser(email, newPassword, false);
|
||||||
const loginData = await loginResponse.json();
|
const loginData = await loginResponse.json();
|
||||||
|
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
// src/types/job-data.ts
|
// src/types/job-data.ts
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Defines the data structure for a flyer processing job.
|
* Defines the shape of the data payload for a flyer processing job.
|
||||||
* This is the information passed to the worker when a new flyer is uploaded.
|
* This is the data that gets passed to the BullMQ worker.
|
||||||
*/
|
*/
|
||||||
export interface FlyerJobData {
|
export interface FlyerJobData {
|
||||||
filePath: string;
|
filePath: string;
|
||||||
@@ -11,44 +11,13 @@ export interface FlyerJobData {
|
|||||||
userId?: string;
|
userId?: string;
|
||||||
submitterIp?: string;
|
submitterIp?: string;
|
||||||
userProfileAddress?: string;
|
userProfileAddress?: string;
|
||||||
|
baseUrl: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Defines the data structure for an email sending job.
|
* Defines the shape of the data payload for a file cleanup job.
|
||||||
*/
|
|
||||||
export interface EmailJobData {
|
|
||||||
to: string;
|
|
||||||
subject: string;
|
|
||||||
text: string;
|
|
||||||
html: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Defines the data structure for a daily analytics reporting job.
|
|
||||||
*/
|
|
||||||
export interface AnalyticsJobData {
|
|
||||||
reportDate: string; // e.g., '2024-10-26'
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Defines the data structure for a weekly analytics reporting job.
|
|
||||||
*/
|
|
||||||
export interface WeeklyAnalyticsJobData {
|
|
||||||
reportYear: number;
|
|
||||||
reportWeek: number; // ISO week number (1-53)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Defines the data structure for a file cleanup job, which runs after a flyer is successfully processed.
|
|
||||||
*/
|
*/
|
||||||
export interface CleanupJobData {
|
export interface CleanupJobData {
|
||||||
flyerId: number;
|
flyerId: number;
|
||||||
paths?: string[];
|
paths: string[];
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Defines the data structure for the job that cleans up expired password reset tokens.
|
|
||||||
*/
|
|
||||||
export interface TokenCleanupJobData {
|
|
||||||
timestamp: string;
|
|
||||||
}
|
}
|
||||||
26
src/utils/serverUtils.ts
Normal file
26
src/utils/serverUtils.ts
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
// src/utils/serverUtils.ts
|
||||||
|
import type { Logger } from 'pino';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Constructs a fully qualified base URL for generating absolute URLs.
|
||||||
|
* It prioritizes `FRONTEND_URL`, then `BASE_URL`, and falls back to a localhost URL
|
||||||
|
* based on the `PORT` environment variable. It also logs a warning if the provided
|
||||||
|
* URL is invalid or missing.
|
||||||
|
*
|
||||||
|
* @param logger - The logger instance to use for warnings.
|
||||||
|
* @returns A validated, fully qualified base URL without a trailing slash.
|
||||||
|
*/
|
||||||
|
export function getBaseUrl(logger: Logger): string {
|
||||||
|
let baseUrl = (process.env.FRONTEND_URL || process.env.BASE_URL || '').trim();
|
||||||
|
if (!baseUrl || !baseUrl.startsWith('http')) {
|
||||||
|
const port = process.env.PORT || 3000;
|
||||||
|
const fallbackUrl = `http://localhost:${port}`;
|
||||||
|
if (baseUrl) {
|
||||||
|
logger.warn(
|
||||||
|
`[getBaseUrl] FRONTEND_URL/BASE_URL is invalid or incomplete ('${baseUrl}'). Falling back to default local URL: ${fallbackUrl}`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
baseUrl = fallbackUrl;
|
||||||
|
}
|
||||||
|
return baseUrl.endsWith('/') ? baseUrl.slice(0, -1) : baseUrl;
|
||||||
|
}
|
||||||
Reference in New Issue
Block a user