Compare commits
9 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7a1c14ce89 | ||
| 6fafc3d089 | |||
|
|
4316866bce | ||
| 356c1a1894 | |||
|
|
2a310648ca | ||
| 8592633c22 | |||
|
|
0a9cdb8709 | ||
| 0d21e098f8 | |||
| b6799ed167 |
@@ -390,8 +390,15 @@ jobs:
|
||||
|
||||
run: |
|
||||
# Fail-fast check to ensure secrets are configured in Gitea.
|
||||
if [ -z "$DB_HOST" ] || [ -z "$DB_USER" ] || [ -z "$DB_PASSWORD" ] || [ -z "$DB_NAME" ]; then
|
||||
echo "ERROR: One or more test database secrets (DB_HOST, DB_USER, DB_PASSWORD, DB_DATABASE_TEST) are not set in Gitea repository settings."
|
||||
MISSING_SECRETS=""
|
||||
if [ -z "$DB_HOST" ]; then MISSING_SECRETS="${MISSING_SECRETS} DB_HOST"; fi
|
||||
if [ -z "$DB_USER" ]; then MISSING_SECRETS="${MISSING_SECRETS} DB_USER"; fi
|
||||
if [ -z "$DB_PASSWORD" ]; then MISSING_SECRETS="${MISSING_SECRETS} DB_PASSWORD"; fi
|
||||
if [ -z "$DB_NAME" ]; then MISSING_SECRETS="${MISSING_SECRETS} DB_NAME"; fi
|
||||
if [ -z "$JWT_SECRET" ]; then MISSING_SECRETS="${MISSING_SECRETS} JWT_SECRET"; fi
|
||||
|
||||
if [ ! -z "$MISSING_SECRETS" ]; then
|
||||
echo "ERROR: The following required secrets are missing in Gitea:${MISSING_SECRETS}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
|
||||
@@ -11,6 +11,7 @@ if (missingSecrets.length > 0) {
|
||||
console.warn('\n[ecosystem.config.cjs] ⚠️ WARNING: The following environment variables are MISSING in the shell:');
|
||||
missingSecrets.forEach(key => console.warn(` - ${key}`));
|
||||
console.warn('[ecosystem.config.cjs] The application may crash if these are required for startup.\n');
|
||||
process.exit(1); // Fail fast so PM2 doesn't attempt to start a broken app
|
||||
} else {
|
||||
console.log('[ecosystem.config.cjs] ✅ Critical environment variables are present.');
|
||||
}
|
||||
|
||||
4
package-lock.json
generated
4
package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "flyer-crawler",
|
||||
"version": "0.2.11",
|
||||
"version": "0.2.15",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "flyer-crawler",
|
||||
"version": "0.2.11",
|
||||
"version": "0.2.15",
|
||||
"dependencies": {
|
||||
"@bull-board/api": "^6.14.2",
|
||||
"@bull-board/express": "^6.14.2",
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "flyer-crawler",
|
||||
"private": true,
|
||||
"version": "0.2.11",
|
||||
"version": "0.2.15",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "concurrently \"npm:start:dev\" \"vite\"",
|
||||
|
||||
@@ -260,6 +260,13 @@ const jwtOptions = {
|
||||
secretOrKey: JWT_SECRET,
|
||||
};
|
||||
|
||||
// --- DEBUG LOGGING FOR JWT SECRET ---
|
||||
if (!JWT_SECRET) {
|
||||
logger.fatal('[Passport] CRITICAL: JWT_SECRET is missing or empty in environment variables! JwtStrategy will fail.');
|
||||
} else {
|
||||
logger.info(`[Passport] JWT_SECRET loaded successfully (length: ${JWT_SECRET.length}).`);
|
||||
}
|
||||
|
||||
passport.use(
|
||||
new JwtStrategy(jwtOptions, async (jwt_payload, done) => {
|
||||
logger.debug(
|
||||
|
||||
@@ -19,6 +19,7 @@ vi.mock('./logger.client', () => ({
|
||||
debug: vi.fn(),
|
||||
info: vi.fn(),
|
||||
error: vi.fn(),
|
||||
warn: vi.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
@@ -285,9 +286,25 @@ describe('AI API Client (Network Mocking with MSW)', () => {
|
||||
await expect(aiApiClient.getJobStatus(jobId)).rejects.toThrow('Job not found');
|
||||
});
|
||||
|
||||
it('should throw a generic error if the API response is not valid JSON', async () => {
|
||||
server.use(http.get(`http://localhost/api/ai/jobs/${jobId}/status`, () => HttpResponse.text('Invalid JSON')));
|
||||
await expect(aiApiClient.getJobStatus(jobId)).rejects.toThrow(expect.any(SyntaxError));
|
||||
it('should throw a specific error if a 200 OK response is not valid JSON', async () => {
|
||||
server.use(
|
||||
http.get(`http://localhost/api/ai/jobs/${jobId}/status`, () => {
|
||||
// A 200 OK response that is not JSON is a server-side contract violation.
|
||||
return HttpResponse.text('This should have been JSON', { status: 200 });
|
||||
}),
|
||||
);
|
||||
await expect(aiApiClient.getJobStatus(jobId)).rejects.toThrow(
|
||||
'Failed to parse job status from a successful API response.',
|
||||
);
|
||||
});
|
||||
|
||||
it('should throw a generic error with status text if the non-ok API response is not valid JSON', async () => {
|
||||
server.use(
|
||||
http.get(`http://localhost/api/ai/jobs/${jobId}/status`, () => {
|
||||
return HttpResponse.text('Gateway Timeout', { status: 504, statusText: 'Gateway Timeout' });
|
||||
}),
|
||||
);
|
||||
await expect(aiApiClient.getJobStatus(jobId)).rejects.toThrow('API Error: 504 Gateway Timeout');
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
@@ -12,7 +12,7 @@ import type {
|
||||
GroundedResponse,
|
||||
} from '../types';
|
||||
import { logger } from './logger.client';
|
||||
import { apiFetch } from './apiClient';
|
||||
import { apiFetch, authedGet, authedPost, authedPostForm } from './apiClient';
|
||||
|
||||
/**
|
||||
* Uploads a flyer file to the backend to be processed asynchronously.
|
||||
@@ -33,14 +33,7 @@ export const uploadAndProcessFlyer = async (
|
||||
|
||||
logger.info(`[aiApiClient] Starting background processing for file: ${file.name}`);
|
||||
|
||||
const response = await apiFetch(
|
||||
'/ai/upload-and-process',
|
||||
{
|
||||
method: 'POST',
|
||||
body: formData,
|
||||
},
|
||||
{ tokenOverride },
|
||||
);
|
||||
const response = await authedPostForm('/ai/upload-and-process', formData, { tokenOverride });
|
||||
|
||||
if (!response.ok) {
|
||||
let errorBody;
|
||||
@@ -101,18 +94,29 @@ export const getJobStatus = async (
|
||||
jobId: string,
|
||||
tokenOverride?: string,
|
||||
): Promise<JobStatus> => {
|
||||
const response = await apiFetch(`/ai/jobs/${jobId}/status`, {}, { tokenOverride });
|
||||
const response = await authedGet(`/ai/jobs/${jobId}/status`, { tokenOverride });
|
||||
|
||||
// Handle non-OK responses first, as they might not have a JSON body.
|
||||
if (!response.ok) {
|
||||
let errorMessage = `API Error: ${response.status} ${response.statusText}`;
|
||||
try {
|
||||
// Try to get a more specific message from the body.
|
||||
const errorData = await response.json();
|
||||
if (errorData.message) {
|
||||
errorMessage = errorData.message;
|
||||
}
|
||||
} catch (e) {
|
||||
// The body was not JSON, which is fine for a server error page.
|
||||
// The default message is sufficient.
|
||||
logger.warn('getJobStatus received a non-JSON error response.', { status: response.status });
|
||||
}
|
||||
throw new Error(errorMessage);
|
||||
}
|
||||
|
||||
// If we get here, the response is OK (2xx). Now parse the body.
|
||||
try {
|
||||
const statusData: JobStatus = await response.json();
|
||||
|
||||
if (!response.ok) {
|
||||
// If the HTTP response itself is an error (e.g., 404, 500), throw an error.
|
||||
// Use the message from the JSON body if available.
|
||||
const errorMessage = (statusData as any).message || `API Error: ${response.status}`;
|
||||
throw new Error(errorMessage);
|
||||
}
|
||||
|
||||
// If the job itself has failed, we should treat this as an error condition
|
||||
// for the polling logic by rejecting the promise. This will stop the polling loop.
|
||||
if (statusData.state === 'failed') {
|
||||
@@ -130,9 +134,13 @@ export const getJobStatus = async (
|
||||
|
||||
return statusData;
|
||||
} catch (error) {
|
||||
// This block catches errors from `response.json()` (if the body is not valid JSON)
|
||||
// and also re-throws the errors we created above.
|
||||
throw error;
|
||||
// If it's the specific error we threw, just re-throw it.
|
||||
if (error instanceof JobFailedError) {
|
||||
throw error;
|
||||
}
|
||||
// This now primarily catches JSON parsing errors on an OK response, which is unexpected.
|
||||
logger.error('getJobStatus failed to parse a successful API response.', { error });
|
||||
throw new Error('Failed to parse job status from a successful API response.');
|
||||
}
|
||||
};
|
||||
|
||||
@@ -145,14 +153,7 @@ export const isImageAFlyer = (
|
||||
|
||||
// Use apiFetchWithAuth for FormData to let the browser set the correct Content-Type.
|
||||
// The URL must be relative, as the helper constructs the full path.
|
||||
return apiFetch(
|
||||
'/ai/check-flyer',
|
||||
{
|
||||
method: 'POST',
|
||||
body: formData,
|
||||
},
|
||||
{ tokenOverride },
|
||||
);
|
||||
return authedPostForm('/ai/check-flyer', formData, { tokenOverride });
|
||||
};
|
||||
|
||||
export const extractAddressFromImage = (
|
||||
@@ -162,14 +163,7 @@ export const extractAddressFromImage = (
|
||||
const formData = new FormData();
|
||||
formData.append('image', imageFile);
|
||||
|
||||
return apiFetch(
|
||||
'/ai/extract-address',
|
||||
{
|
||||
method: 'POST',
|
||||
body: formData,
|
||||
},
|
||||
{ tokenOverride },
|
||||
);
|
||||
return authedPostForm('/ai/extract-address', formData, { tokenOverride });
|
||||
};
|
||||
|
||||
export const extractLogoFromImage = (
|
||||
@@ -181,14 +175,7 @@ export const extractLogoFromImage = (
|
||||
formData.append('images', file);
|
||||
});
|
||||
|
||||
return apiFetch(
|
||||
'/ai/extract-logo',
|
||||
{
|
||||
method: 'POST',
|
||||
body: formData,
|
||||
},
|
||||
{ tokenOverride },
|
||||
);
|
||||
return authedPostForm('/ai/extract-logo', formData, { tokenOverride });
|
||||
};
|
||||
|
||||
export const getQuickInsights = (
|
||||
@@ -196,16 +183,7 @@ export const getQuickInsights = (
|
||||
signal?: AbortSignal,
|
||||
tokenOverride?: string,
|
||||
): Promise<Response> => {
|
||||
return apiFetch(
|
||||
'/ai/quick-insights',
|
||||
{
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ items }),
|
||||
signal,
|
||||
},
|
||||
{ tokenOverride, signal },
|
||||
);
|
||||
return authedPost('/ai/quick-insights', { items }, { tokenOverride, signal });
|
||||
};
|
||||
|
||||
export const getDeepDiveAnalysis = (
|
||||
@@ -213,16 +191,7 @@ export const getDeepDiveAnalysis = (
|
||||
signal?: AbortSignal,
|
||||
tokenOverride?: string,
|
||||
): Promise<Response> => {
|
||||
return apiFetch(
|
||||
'/ai/deep-dive',
|
||||
{
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ items }),
|
||||
signal,
|
||||
},
|
||||
{ tokenOverride, signal },
|
||||
);
|
||||
return authedPost('/ai/deep-dive', { items }, { tokenOverride, signal });
|
||||
};
|
||||
|
||||
export const searchWeb = (
|
||||
@@ -230,16 +199,7 @@ export const searchWeb = (
|
||||
signal?: AbortSignal,
|
||||
tokenOverride?: string,
|
||||
): Promise<Response> => {
|
||||
return apiFetch(
|
||||
'/ai/search-web',
|
||||
{
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ query }),
|
||||
signal,
|
||||
},
|
||||
{ tokenOverride, signal },
|
||||
);
|
||||
return authedPost('/ai/search-web', { query }, { tokenOverride, signal });
|
||||
};
|
||||
|
||||
// ============================================================================
|
||||
@@ -254,15 +214,7 @@ export const planTripWithMaps = async (
|
||||
tokenOverride?: string,
|
||||
): Promise<Response> => {
|
||||
logger.debug('Stub: planTripWithMaps called with location:', { userLocation });
|
||||
return apiFetch(
|
||||
'/ai/plan-trip',
|
||||
{
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ items, store, userLocation }),
|
||||
},
|
||||
{ signal, tokenOverride },
|
||||
);
|
||||
return authedPost('/ai/plan-trip', { items, store, userLocation }, { signal, tokenOverride });
|
||||
};
|
||||
|
||||
/**
|
||||
@@ -276,16 +228,7 @@ export const generateImageFromText = (
|
||||
tokenOverride?: string,
|
||||
): Promise<Response> => {
|
||||
logger.debug('Stub: generateImageFromText called with prompt:', { prompt });
|
||||
return apiFetch(
|
||||
'/ai/generate-image',
|
||||
{
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ prompt }),
|
||||
signal,
|
||||
},
|
||||
{ tokenOverride, signal },
|
||||
);
|
||||
return authedPost('/ai/generate-image', { prompt }, { tokenOverride, signal });
|
||||
};
|
||||
|
||||
/**
|
||||
@@ -299,16 +242,7 @@ export const generateSpeechFromText = (
|
||||
tokenOverride?: string,
|
||||
): Promise<Response> => {
|
||||
logger.debug('Stub: generateSpeechFromText called with text:', { text });
|
||||
return apiFetch(
|
||||
'/ai/generate-speech',
|
||||
{
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ text }),
|
||||
signal,
|
||||
},
|
||||
{ tokenOverride, signal },
|
||||
);
|
||||
return authedPost('/ai/generate-speech', { text }, { tokenOverride, signal });
|
||||
};
|
||||
|
||||
/**
|
||||
@@ -361,11 +295,7 @@ export const rescanImageArea = (
|
||||
formData.append('cropArea', JSON.stringify(cropArea));
|
||||
formData.append('extractionType', extractionType);
|
||||
|
||||
return apiFetch(
|
||||
'/ai/rescan-area',
|
||||
{ method: 'POST', body: formData },
|
||||
{ tokenOverride },
|
||||
);
|
||||
return authedPostForm('/ai/rescan-area', formData, { tokenOverride });
|
||||
};
|
||||
|
||||
/**
|
||||
@@ -379,12 +309,5 @@ export const compareWatchedItemPrices = (
|
||||
): Promise<Response> => {
|
||||
// Use the apiFetch wrapper for consistency with other API calls in this file.
|
||||
// This centralizes token handling and base URL logic.
|
||||
return apiFetch(
|
||||
'/ai/compare-prices',
|
||||
{
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ items: watchedItems }),
|
||||
},
|
||||
{ signal },
|
||||
)};
|
||||
return authedPost('/ai/compare-prices', { items: watchedItems }, { signal });
|
||||
};
|
||||
|
||||
@@ -596,40 +596,6 @@ describe('AI Service (Server)', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('_normalizeExtractedItems (private method)', () => {
|
||||
it('should replace null or undefined fields with default values', () => {
|
||||
const rawItems: {
|
||||
item: string;
|
||||
price_display: null;
|
||||
quantity: undefined;
|
||||
category_name: null;
|
||||
master_item_id: null;
|
||||
}[] = [
|
||||
{
|
||||
item: 'Test',
|
||||
price_display: null,
|
||||
quantity: undefined,
|
||||
category_name: null,
|
||||
master_item_id: null,
|
||||
},
|
||||
];
|
||||
const [normalized] = (
|
||||
aiServiceInstance as unknown as {
|
||||
_normalizeExtractedItems: (items: typeof rawItems) => {
|
||||
price_display: string;
|
||||
quantity: string;
|
||||
category_name: string;
|
||||
master_item_id: undefined;
|
||||
}[];
|
||||
}
|
||||
)._normalizeExtractedItems(rawItems);
|
||||
expect(normalized.price_display).toBe('');
|
||||
expect(normalized.quantity).toBe('');
|
||||
expect(normalized.category_name).toBe('Other/Miscellaneous');
|
||||
expect(normalized.master_item_id).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('extractTextFromImageArea', () => {
|
||||
it('should call sharp to crop the image and call the AI with the correct prompt', async () => {
|
||||
console.log("TEST START: 'should call sharp to crop...'");
|
||||
|
||||
@@ -7,6 +7,17 @@ import { http, HttpResponse } from 'msw';
|
||||
vi.unmock('./apiClient');
|
||||
|
||||
import * as apiClient from './apiClient';
|
||||
import {
|
||||
createMockAddressPayload,
|
||||
createMockBudget,
|
||||
createMockLoginPayload,
|
||||
createMockProfileUpdatePayload,
|
||||
createMockRecipeCommentPayload,
|
||||
createMockRegisterUserPayload,
|
||||
createMockSearchQueryPayload,
|
||||
createMockShoppingListItemPayload,
|
||||
createMockWatchedItemPayload,
|
||||
} from '../tests/utils/mockFactories';
|
||||
|
||||
// Mock the logger to keep test output clean and verifiable.
|
||||
vi.mock('./logger', () => ({
|
||||
@@ -229,33 +240,6 @@ describe('API Client', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('Analytics API Functions', () => {
|
||||
it('trackFlyerItemInteraction should log a warning on failure', async () => {
|
||||
const { logger } = await import('./logger.client');
|
||||
const apiError = new Error('Network failed');
|
||||
vi.mocked(global.fetch).mockRejectedValue(apiError);
|
||||
|
||||
// We can now await this properly because we added 'return' in apiClient.ts
|
||||
await apiClient.trackFlyerItemInteraction(123, 'click');
|
||||
expect(logger.warn).toHaveBeenCalledWith('Failed to track flyer item interaction', {
|
||||
error: apiError,
|
||||
});
|
||||
});
|
||||
|
||||
it('logSearchQuery should log a warning on failure', async () => {
|
||||
const { logger } = await import('./logger.client');
|
||||
const apiError = new Error('Network failed');
|
||||
vi.mocked(global.fetch).mockRejectedValue(apiError);
|
||||
|
||||
await apiClient.logSearchQuery({
|
||||
query_text: 'test',
|
||||
result_count: 0,
|
||||
was_successful: false,
|
||||
});
|
||||
expect(logger.warn).toHaveBeenCalledWith('Failed to log search query', { error: apiError });
|
||||
});
|
||||
});
|
||||
|
||||
describe('apiFetch (with FormData)', () => {
|
||||
it('should handle FormData correctly by not setting Content-Type', async () => {
|
||||
localStorage.setItem('authToken', 'form-data-token');
|
||||
@@ -317,10 +301,11 @@ describe('API Client', () => {
|
||||
});
|
||||
|
||||
it('addWatchedItem should send a POST request with the correct body', async () => {
|
||||
await apiClient.addWatchedItem('Apples', 'Produce');
|
||||
const watchedItemData = createMockWatchedItemPayload({ itemName: 'Apples', category: 'Produce' });
|
||||
await apiClient.addWatchedItem(watchedItemData.itemName, watchedItemData.category);
|
||||
|
||||
expect(capturedUrl?.pathname).toBe('/api/users/watched-items');
|
||||
expect(capturedBody).toEqual({ itemName: 'Apples', category: 'Produce' });
|
||||
expect(capturedBody).toEqual(watchedItemData);
|
||||
});
|
||||
|
||||
it('removeWatchedItem should send a DELETE request to the correct URL', async () => {
|
||||
@@ -337,12 +322,12 @@ describe('API Client', () => {
|
||||
});
|
||||
|
||||
it('createBudget should send a POST request with budget data', async () => {
|
||||
const budgetData = {
|
||||
const budgetData = createMockBudget({
|
||||
name: 'Groceries',
|
||||
amount_cents: 50000,
|
||||
period: 'monthly' as const,
|
||||
period: 'monthly',
|
||||
start_date: '2024-01-01',
|
||||
};
|
||||
});
|
||||
await apiClient.createBudget(budgetData);
|
||||
|
||||
expect(capturedUrl?.pathname).toBe('/api/budgets');
|
||||
@@ -461,7 +446,7 @@ describe('API Client', () => {
|
||||
|
||||
it('addShoppingListItem should send a POST request with item data', async () => {
|
||||
const listId = 42;
|
||||
const itemData = { customItemName: 'Paper Towels' };
|
||||
const itemData = createMockShoppingListItemPayload({ customItemName: 'Paper Towels' });
|
||||
await apiClient.addShoppingListItem(listId, itemData);
|
||||
|
||||
expect(capturedUrl?.pathname).toBe(`/api/users/shopping-lists/${listId}/items`);
|
||||
@@ -547,7 +532,7 @@ describe('API Client', () => {
|
||||
|
||||
it('addRecipeComment should send a POST request with content and optional parentId', async () => {
|
||||
const recipeId = 456;
|
||||
const commentData = { content: 'This is a reply', parentCommentId: 789 };
|
||||
const commentData = createMockRecipeCommentPayload({ content: 'This is a reply', parentCommentId: 789 });
|
||||
await apiClient.addRecipeComment(recipeId, commentData.content, commentData.parentCommentId);
|
||||
expect(capturedUrl?.pathname).toBe(`/api/recipes/${recipeId}/comments`);
|
||||
expect(capturedBody).toEqual(commentData);
|
||||
@@ -563,7 +548,7 @@ describe('API Client', () => {
|
||||
describe('User Profile and Settings API Functions', () => {
|
||||
it('updateUserProfile should send a PUT request with profile data', async () => {
|
||||
localStorage.setItem('authToken', 'user-settings-token');
|
||||
const profileData = { full_name: 'John Doe' };
|
||||
const profileData = createMockProfileUpdatePayload({ full_name: 'John Doe' });
|
||||
await apiClient.updateUserProfile(profileData, { tokenOverride: 'override-token' });
|
||||
expect(capturedUrl?.pathname).toBe('/api/users/profile');
|
||||
expect(capturedBody).toEqual(profileData);
|
||||
@@ -619,14 +604,14 @@ describe('API Client', () => {
|
||||
});
|
||||
|
||||
it('registerUser should send a POST request with user data', async () => {
|
||||
await apiClient.registerUser('test@example.com', 'password123', 'Test User');
|
||||
expect(capturedUrl?.pathname).toBe('/api/auth/register');
|
||||
expect(capturedBody).toEqual({
|
||||
const userData = createMockRegisterUserPayload({
|
||||
email: 'test@example.com',
|
||||
password: 'password123',
|
||||
full_name: 'Test User',
|
||||
avatar_url: undefined,
|
||||
});
|
||||
await apiClient.registerUser(userData.email, userData.password, userData.full_name);
|
||||
expect(capturedUrl?.pathname).toBe('/api/auth/register');
|
||||
expect(capturedBody).toEqual(userData);
|
||||
});
|
||||
|
||||
it('deleteUserAccount should send a DELETE request with the confirmation password', async () => {
|
||||
@@ -654,7 +639,7 @@ describe('API Client', () => {
|
||||
});
|
||||
|
||||
it('updateUserAddress should send a PUT request with address data', async () => {
|
||||
const addressData = { address_line_1: '123 Main St', city: 'Anytown' };
|
||||
const addressData = createMockAddressPayload({ address_line_1: '123 Main St', city: 'Anytown' });
|
||||
await apiClient.updateUserAddress(addressData);
|
||||
expect(capturedUrl?.pathname).toBe('/api/users/profile/address');
|
||||
expect(capturedBody).toEqual(addressData);
|
||||
@@ -942,53 +927,49 @@ describe('API Client', () => {
|
||||
});
|
||||
|
||||
it('logSearchQuery should send a POST request with query data', async () => {
|
||||
const queryData = { query_text: 'apples', result_count: 10, was_successful: true };
|
||||
const queryData = createMockSearchQueryPayload({ query_text: 'apples', result_count: 10, was_successful: true });
|
||||
await apiClient.logSearchQuery(queryData);
|
||||
expect(capturedUrl?.pathname).toBe('/api/search/log');
|
||||
expect(capturedBody).toEqual(queryData);
|
||||
});
|
||||
|
||||
it('trackFlyerItemInteraction should log a warning on failure', async () => {
|
||||
const { logger } = await import('./logger.client');
|
||||
const apiError = new Error('Network failed');
|
||||
vi.mocked(global.fetch).mockRejectedValue(apiError);
|
||||
const { logger } = await import('./logger.client');
|
||||
|
||||
// We can now await this properly because we added 'return' in apiClient.ts
|
||||
await apiClient.trackFlyerItemInteraction(123, 'click');
|
||||
expect(logger.warn).toHaveBeenCalledWith('Failed to track flyer item interaction', {
|
||||
error: apiError,
|
||||
});
|
||||
|
||||
expect(logger.warn).toHaveBeenCalledWith('Failed to track flyer item interaction', {
|
||||
error: apiError,
|
||||
});
|
||||
});
|
||||
|
||||
it('logSearchQuery should log a warning on failure', async () => {
|
||||
const { logger } = await import('./logger.client');
|
||||
const apiError = new Error('Network failed');
|
||||
vi.mocked(global.fetch).mockRejectedValue(apiError);
|
||||
const { logger } = await import('./logger.client');
|
||||
|
||||
await apiClient.logSearchQuery({
|
||||
const queryData = createMockSearchQueryPayload({
|
||||
query_text: 'test',
|
||||
result_count: 0,
|
||||
was_successful: false,
|
||||
});
|
||||
expect(logger.warn).toHaveBeenCalledWith('Failed to log search query', { error: apiError });
|
||||
|
||||
await apiClient.logSearchQuery(queryData);
|
||||
expect(logger.warn).toHaveBeenCalledWith('Failed to log search query', { error: apiError });
|
||||
});
|
||||
});
|
||||
|
||||
describe('Authentication API Functions', () => {
|
||||
it('loginUser should send a POST request with credentials', async () => {
|
||||
await apiClient.loginUser('test@example.com', 'password123', true);
|
||||
expect(capturedUrl?.pathname).toBe('/api/auth/login');
|
||||
expect(capturedBody).toEqual({
|
||||
const loginData = createMockLoginPayload({
|
||||
email: 'test@example.com',
|
||||
password: 'password123',
|
||||
rememberMe: true,
|
||||
});
|
||||
await apiClient.loginUser(loginData.email, loginData.password, loginData.rememberMe);
|
||||
expect(capturedUrl?.pathname).toBe('/api/auth/login');
|
||||
expect(capturedBody).toEqual(loginData);
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -109,16 +109,22 @@ export const sendDealNotificationEmail = async (
|
||||
|
||||
const text = `Hi ${recipientName},\n\nWe found some great deals on items you're watching. Visit the deals page on the site to learn more.\n\nFlyer Crawler`;
|
||||
|
||||
// Use the generic sendEmail function to send the composed email
|
||||
await sendEmail(
|
||||
{
|
||||
to,
|
||||
subject,
|
||||
text,
|
||||
html,
|
||||
},
|
||||
logger,
|
||||
);
|
||||
try {
|
||||
// Use the generic sendEmail function to send the composed email
|
||||
await sendEmail(
|
||||
{
|
||||
to,
|
||||
subject,
|
||||
text,
|
||||
html,
|
||||
},
|
||||
logger,
|
||||
);
|
||||
} catch (err) {
|
||||
const error = err instanceof Error ? err : new Error(String(err));
|
||||
logger.error({ err: error, to, subject }, 'Failed to send email.');
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
|
||||
75
src/services/flyerAiProcessor.server.test.ts
Normal file
75
src/services/flyerAiProcessor.server.test.ts
Normal file
@@ -0,0 +1,75 @@
|
||||
// src/services/flyerAiProcessor.server.test.ts
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
import { FlyerAiProcessor } from './flyerAiProcessor.server';
|
||||
import { AiDataValidationError } from './processingErrors';
|
||||
import { logger } from './logger.server';
|
||||
import type { AIService } from './aiService.server';
|
||||
import type { PersonalizationRepository } from './db/personalization.db';
|
||||
import type { FlyerJobData } from './flyerProcessingService.server';
|
||||
|
||||
vi.mock('./logger.server', () => ({
|
||||
logger: {
|
||||
info: vi.fn(),
|
||||
error: vi.fn(),
|
||||
warn: vi.fn(),
|
||||
debug: vi.fn(),
|
||||
child: vi.fn().mockReturnThis(),
|
||||
},
|
||||
}));
|
||||
|
||||
const createMockJobData = (data: Partial<FlyerJobData>): FlyerJobData => ({
|
||||
filePath: '/tmp/flyer.jpg',
|
||||
originalFileName: 'flyer.jpg',
|
||||
checksum: 'checksum-123',
|
||||
...data,
|
||||
});
|
||||
|
||||
describe('FlyerAiProcessor', () => {
|
||||
let service: FlyerAiProcessor;
|
||||
let mockAiService: AIService;
|
||||
let mockPersonalizationRepo: PersonalizationRepository;
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
|
||||
mockAiService = {
|
||||
extractCoreDataFromFlyerImage: vi.fn(),
|
||||
} as unknown as AIService;
|
||||
mockPersonalizationRepo = {
|
||||
getAllMasterItems: vi.fn().mockResolvedValue([]),
|
||||
} as unknown as PersonalizationRepository;
|
||||
|
||||
service = new FlyerAiProcessor(mockAiService, mockPersonalizationRepo);
|
||||
});
|
||||
|
||||
it('should call AI service and return validated data on success', async () => {
|
||||
const jobData = createMockJobData({});
|
||||
const mockAiResponse = {
|
||||
store_name: 'AI Store',
|
||||
valid_from: '2024-01-01',
|
||||
valid_to: '2024-01-07',
|
||||
store_address: '123 AI St',
|
||||
items: [],
|
||||
};
|
||||
vi.mocked(mockAiService.extractCoreDataFromFlyerImage).mockResolvedValue(mockAiResponse);
|
||||
|
||||
const result = await service.extractAndValidateData([], jobData, logger);
|
||||
|
||||
expect(mockAiService.extractCoreDataFromFlyerImage).toHaveBeenCalledTimes(1);
|
||||
expect(mockPersonalizationRepo.getAllMasterItems).toHaveBeenCalledTimes(1);
|
||||
expect(result).toEqual(mockAiResponse);
|
||||
});
|
||||
|
||||
it('should throw AiDataValidationError if AI response validation fails', async () => {
|
||||
const jobData = createMockJobData({});
|
||||
// Mock AI to return data missing a required field ('store_name')
|
||||
vi.mocked(mockAiService.extractCoreDataFromFlyerImage).mockResolvedValue({
|
||||
valid_from: '2024-01-01',
|
||||
items: [],
|
||||
} as any);
|
||||
|
||||
await expect(service.extractAndValidateData([], jobData, logger)).rejects.toThrow(
|
||||
AiDataValidationError,
|
||||
);
|
||||
});
|
||||
});
|
||||
88
src/services/flyerAiProcessor.server.ts
Normal file
88
src/services/flyerAiProcessor.server.ts
Normal file
@@ -0,0 +1,88 @@
|
||||
// src/services/flyerAiProcessor.server.ts
|
||||
import { z } from 'zod';
|
||||
import type { Logger } from 'pino';
|
||||
import type { AIService } from './aiService.server';
|
||||
import type { PersonalizationRepository } from './db/personalization.db';
|
||||
import { AiDataValidationError } from './processingErrors';
|
||||
import type { FlyerJobData } from './flyerProcessingService.server';
|
||||
|
||||
// Helper for consistent required string validation (handles missing/null/empty)
|
||||
const requiredString = (message: string) =>
|
||||
z.preprocess((val) => val ?? '', z.string().min(1, message));
|
||||
|
||||
// --- Zod Schemas for AI Response Validation ---
|
||||
const ExtractedFlyerItemSchema = z.object({
|
||||
item: z.string().nullable(),
|
||||
price_display: z.string().nullable(),
|
||||
price_in_cents: z.number().nullable(),
|
||||
quantity: z.string().nullable(),
|
||||
category_name: z.string().nullable(),
|
||||
master_item_id: z.number().nullish(),
|
||||
});
|
||||
|
||||
export const AiFlyerDataSchema = z.object({
|
||||
store_name: z.string().nullable(),
|
||||
valid_from: z.string().nullable(),
|
||||
valid_to: z.string().nullable(),
|
||||
store_address: z.string().nullable(),
|
||||
items: z.array(ExtractedFlyerItemSchema),
|
||||
});
|
||||
|
||||
export type ValidatedAiDataType = z.infer<typeof AiFlyerDataSchema>;
|
||||
|
||||
/**
|
||||
* This class encapsulates the logic for interacting with the AI service
|
||||
* to extract and validate data from flyer images.
|
||||
*/
|
||||
export class FlyerAiProcessor {
|
||||
constructor(
|
||||
private ai: AIService,
|
||||
private personalizationRepo: PersonalizationRepository,
|
||||
) {}
|
||||
|
||||
/**
|
||||
* Validates the raw data from the AI against the Zod schema.
|
||||
*/
|
||||
private _validateAiData(
|
||||
extractedData: unknown,
|
||||
logger: Logger,
|
||||
): ValidatedAiDataType {
|
||||
const validationResult = AiFlyerDataSchema.safeParse(extractedData);
|
||||
if (!validationResult.success) {
|
||||
const errors = validationResult.error.flatten();
|
||||
logger.error({ errors, rawData: extractedData }, 'AI response failed validation.');
|
||||
throw new AiDataValidationError(
|
||||
'AI response validation failed. The returned data structure is incorrect.',
|
||||
errors,
|
||||
extractedData,
|
||||
);
|
||||
}
|
||||
|
||||
logger.info(`AI extracted ${validationResult.data.items.length} items.`);
|
||||
return validationResult.data;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calls the AI service to extract structured data from the flyer images and validates the response.
|
||||
*/
|
||||
public async extractAndValidateData(
|
||||
imagePaths: { path: string; mimetype: string }[],
|
||||
jobData: FlyerJobData,
|
||||
logger: Logger,
|
||||
): Promise<ValidatedAiDataType> {
|
||||
logger.info(`Starting AI data extraction.`);
|
||||
const { submitterIp, userProfileAddress } = jobData;
|
||||
const masterItems = await this.personalizationRepo.getAllMasterItems(logger);
|
||||
logger.debug(`Retrieved ${masterItems.length} master items for AI matching.`);
|
||||
|
||||
const extractedData = await this.ai.extractCoreDataFromFlyerImage(
|
||||
imagePaths,
|
||||
masterItems,
|
||||
submitterIp,
|
||||
userProfileAddress,
|
||||
logger,
|
||||
);
|
||||
|
||||
return this._validateAiData(extractedData, logger);
|
||||
}
|
||||
}
|
||||
@@ -4,7 +4,7 @@ import { FlyerDataTransformer } from './flyerDataTransformer';
|
||||
import { logger as mockLogger } from './logger.server';
|
||||
import { generateFlyerIcon } from '../utils/imageProcessor';
|
||||
import type { z } from 'zod';
|
||||
import type { AiFlyerDataSchema } from './flyerProcessingService.server';
|
||||
import type { AiFlyerDataSchema } from './flyerAiProcessor.server';
|
||||
import type { FlyerItemInsert } from '../types';
|
||||
|
||||
// Mock the dependencies
|
||||
@@ -109,9 +109,6 @@ describe('FlyerDataTransformer', () => {
|
||||
view_count: 0,
|
||||
click_count: 0,
|
||||
}),
|
||||
); // Use a more specific type assertion to check for the added property.
|
||||
expect((itemsForDb[0] as FlyerItemInsert & { updated_at: string }).updated_at).toBeTypeOf(
|
||||
'string',
|
||||
);
|
||||
|
||||
// 3. Check that generateFlyerIcon was called correctly
|
||||
@@ -175,4 +172,62 @@ describe('FlyerDataTransformer', () => {
|
||||
uploaded_by: undefined, // Should be undefined
|
||||
});
|
||||
});
|
||||
|
||||
it('should correctly normalize item fields with null, undefined, or empty values', async () => {
|
||||
// Arrange
|
||||
const extractedData: z.infer<typeof AiFlyerDataSchema> = {
|
||||
store_name: 'Test Store',
|
||||
valid_from: '2024-01-01',
|
||||
valid_to: '2024-01-07',
|
||||
store_address: '123 Test St',
|
||||
items: [
|
||||
// Case 1: All fields are null or undefined
|
||||
{
|
||||
item: null,
|
||||
price_display: null,
|
||||
price_in_cents: null,
|
||||
quantity: null,
|
||||
category_name: null,
|
||||
master_item_id: null,
|
||||
},
|
||||
// Case 2: Fields are empty strings
|
||||
{
|
||||
item: ' ', // whitespace only
|
||||
price_display: '',
|
||||
price_in_cents: 200,
|
||||
quantity: '',
|
||||
category_name: '',
|
||||
master_item_id: 20,
|
||||
},
|
||||
],
|
||||
};
|
||||
const imagePaths = [{ path: '/uploads/flyer-page-1.jpg', mimetype: 'image/jpeg' }];
|
||||
|
||||
// Act
|
||||
const { itemsForDb } = await transformer.transform(
|
||||
extractedData,
|
||||
imagePaths,
|
||||
'file.pdf',
|
||||
'checksum',
|
||||
'user-1',
|
||||
mockLogger,
|
||||
);
|
||||
|
||||
// Assert
|
||||
expect(itemsForDb).toHaveLength(2);
|
||||
|
||||
// Check Case 1 (null/undefined values)
|
||||
expect(itemsForDb[0]).toEqual(
|
||||
expect.objectContaining({
|
||||
item: 'Unknown Item', price_display: '', quantity: '', category_name: 'Other/Miscellaneous', master_item_id: undefined,
|
||||
}),
|
||||
);
|
||||
|
||||
// Check Case 2 (empty string values)
|
||||
expect(itemsForDb[1]).toEqual(
|
||||
expect.objectContaining({
|
||||
item: 'Unknown Item', price_display: '', quantity: '', category_name: 'Other/Miscellaneous', master_item_id: 20,
|
||||
}),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -3,7 +3,7 @@ import path from 'path';
|
||||
import type { z } from 'zod';
|
||||
import type { Logger } from 'pino';
|
||||
import type { FlyerInsert, FlyerItemInsert } from '../types';
|
||||
import type { AiFlyerDataSchema } from './flyerProcessingService.server';
|
||||
import type { AiFlyerDataSchema } from './flyerAiProcessor.server';
|
||||
import { generateFlyerIcon } from '../utils/imageProcessor';
|
||||
|
||||
/**
|
||||
@@ -11,6 +11,31 @@ import { generateFlyerIcon } from '../utils/imageProcessor';
|
||||
* into the structured format required for database insertion (FlyerInsert and FlyerItemInsert).
|
||||
*/
|
||||
export class FlyerDataTransformer {
|
||||
/**
|
||||
* Normalizes a single raw item from the AI, providing default values for nullable fields.
|
||||
* @param item The raw item object from the AI.
|
||||
* @returns A normalized item object ready for database insertion.
|
||||
*/
|
||||
private _normalizeItem(
|
||||
item: z.infer<typeof AiFlyerDataSchema>['items'][number],
|
||||
): FlyerItemInsert {
|
||||
return {
|
||||
...item,
|
||||
// Use logical OR to default falsy values (null, undefined, '') to a fallback.
|
||||
// The trim is important for cases where the AI returns only whitespace.
|
||||
item: String(item.item || '').trim() || 'Unknown Item',
|
||||
// Use nullish coalescing to default only null/undefined to an empty string.
|
||||
price_display: String(item.price_display ?? ''),
|
||||
quantity: String(item.quantity ?? ''),
|
||||
// Use logical OR to default falsy category names (null, undefined, '') to a fallback.
|
||||
category_name: String(item.category_name || 'Other/Miscellaneous'),
|
||||
// Use nullish coalescing to convert null to undefined for the database.
|
||||
master_item_id: item.master_item_id ?? undefined,
|
||||
view_count: 0,
|
||||
click_count: 0,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Transforms AI-extracted data into database-ready flyer and item records.
|
||||
* @param extractedData The validated data from the AI.
|
||||
@@ -38,27 +63,7 @@ export class FlyerDataTransformer {
|
||||
logger,
|
||||
);
|
||||
|
||||
const itemsForDb: FlyerItemInsert[] = extractedData.items.map((item) => ({
|
||||
...item,
|
||||
// Ensure 'item' is always a string, defaulting to 'Unknown Item' if null/undefined/empty.
|
||||
item:
|
||||
item.item === null || item.item === undefined || String(item.item).trim() === ''
|
||||
? 'Unknown Item'
|
||||
: String(item.item),
|
||||
// Ensure 'price_display' is always a string, defaulting to empty if null/undefined.
|
||||
price_display:
|
||||
item.price_display === null || item.price_display === undefined
|
||||
? ''
|
||||
: String(item.price_display),
|
||||
// Ensure 'quantity' is always a string, defaulting to empty if null/undefined.
|
||||
quantity: item.quantity === null || item.quantity === undefined ? '' : String(item.quantity),
|
||||
// Ensure 'category_name' is always a string, defaulting to 'Other/Miscellaneous' if null/undefined.
|
||||
category_name: item.category_name === null || item.category_name === undefined ? 'Other/Miscellaneous' : String(item.category_name),
|
||||
master_item_id: item.master_item_id === null ? undefined : item.master_item_id, // Convert null to undefined
|
||||
view_count: 0,
|
||||
click_count: 0,
|
||||
updated_at: new Date().toISOString(),
|
||||
}));
|
||||
const itemsForDb: FlyerItemInsert[] = extractedData.items.map((item) => this._normalizeItem(item));
|
||||
|
||||
const storeName = extractedData.store_name || 'Unknown Store (auto)';
|
||||
if (!extractedData.store_name) {
|
||||
|
||||
129
src/services/flyerFileHandler.server.test.ts
Normal file
129
src/services/flyerFileHandler.server.test.ts
Normal file
@@ -0,0 +1,129 @@
|
||||
// src/services/flyerFileHandler.server.test.ts
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
import { Job } from 'bullmq';
|
||||
import type { Dirent } from 'node:fs';
|
||||
import sharp from 'sharp';
|
||||
import { FlyerFileHandler, ICommandExecutor, IFileSystem } from './flyerFileHandler.server';
|
||||
import { PdfConversionError, UnsupportedFileTypeError } from './processingErrors';
|
||||
import { logger } from './logger.server';
|
||||
import type { FlyerJobData } from './flyerProcessingService.server';
|
||||
|
||||
// Mock dependencies
|
||||
vi.mock('sharp', () => {
|
||||
const mockSharpInstance = {
|
||||
png: vi.fn().mockReturnThis(),
|
||||
toFile: vi.fn().mockResolvedValue({}),
|
||||
};
|
||||
return {
|
||||
__esModule: true,
|
||||
default: vi.fn(() => mockSharpInstance),
|
||||
};
|
||||
});
|
||||
|
||||
vi.mock('./logger.server', () => ({
|
||||
logger: {
|
||||
info: vi.fn(),
|
||||
error: vi.fn(),
|
||||
warn: vi.fn(),
|
||||
debug: vi.fn(),
|
||||
child: vi.fn().mockReturnThis(),
|
||||
},
|
||||
}));
|
||||
|
||||
const createMockJob = (data: Partial<FlyerJobData>): Job<FlyerJobData> => {
|
||||
return {
|
||||
id: 'job-1',
|
||||
data: {
|
||||
filePath: '/tmp/flyer.jpg',
|
||||
originalFileName: 'flyer.jpg',
|
||||
checksum: 'checksum-123',
|
||||
...data,
|
||||
},
|
||||
updateProgress: vi.fn(),
|
||||
} as unknown as Job<FlyerJobData>;
|
||||
};
|
||||
|
||||
describe('FlyerFileHandler', () => {
|
||||
let service: FlyerFileHandler;
|
||||
let mockFs: IFileSystem;
|
||||
let mockExec: ICommandExecutor;
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
|
||||
mockFs = {
|
||||
readdir: vi.fn().mockResolvedValue([]),
|
||||
unlink: vi.fn(),
|
||||
};
|
||||
mockExec = vi.fn().mockResolvedValue({ stdout: 'success', stderr: '' });
|
||||
|
||||
service = new FlyerFileHandler(mockFs, mockExec);
|
||||
});
|
||||
|
||||
it('should convert a PDF and return image paths', async () => {
|
||||
const job = createMockJob({ filePath: '/tmp/flyer.pdf' });
|
||||
vi.mocked(mockFs.readdir).mockResolvedValue([
|
||||
{ name: 'flyer-1.jpg' },
|
||||
{ name: 'flyer-2.jpg' },
|
||||
] as Dirent[]);
|
||||
|
||||
const { imagePaths, createdImagePaths } = await service.prepareImageInputs(
|
||||
'/tmp/flyer.pdf',
|
||||
job,
|
||||
logger,
|
||||
);
|
||||
|
||||
expect(mockExec).toHaveBeenCalledWith('pdftocairo -jpeg -r 150 "/tmp/flyer.pdf" "/tmp/flyer"');
|
||||
expect(imagePaths).toHaveLength(2);
|
||||
expect(imagePaths[0].path).toContain('flyer-1.jpg');
|
||||
expect(createdImagePaths).toHaveLength(2);
|
||||
});
|
||||
|
||||
it('should throw PdfConversionError if PDF conversion yields no images', async () => {
|
||||
const job = createMockJob({ filePath: '/tmp/flyer.pdf' });
|
||||
vi.mocked(mockFs.readdir).mockResolvedValue([]); // No images found
|
||||
|
||||
await expect(service.prepareImageInputs('/tmp/flyer.pdf', job, logger)).rejects.toThrow(
|
||||
PdfConversionError,
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle supported image types directly', async () => {
|
||||
const job = createMockJob({ filePath: '/tmp/flyer.jpg' });
|
||||
const { imagePaths, createdImagePaths } = await service.prepareImageInputs(
|
||||
'/tmp/flyer.jpg',
|
||||
job,
|
||||
logger,
|
||||
);
|
||||
|
||||
expect(imagePaths).toEqual([{ path: '/tmp/flyer.jpg', mimetype: 'image/jpeg' }]);
|
||||
expect(createdImagePaths).toEqual([]);
|
||||
expect(mockExec).not.toHaveBeenCalled();
|
||||
expect(sharp).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should convert convertible image types to PNG', async () => {
|
||||
const job = createMockJob({ filePath: '/tmp/flyer.gif' });
|
||||
const mockSharpInstance = sharp('/tmp/flyer.gif');
|
||||
vi.mocked(mockSharpInstance.toFile).mockResolvedValue({} as any);
|
||||
|
||||
const { imagePaths, createdImagePaths } = await service.prepareImageInputs(
|
||||
'/tmp/flyer.gif',
|
||||
job,
|
||||
logger,
|
||||
);
|
||||
|
||||
expect(sharp).toHaveBeenCalledWith('/tmp/flyer.gif');
|
||||
expect(mockSharpInstance.png).toHaveBeenCalled();
|
||||
expect(mockSharpInstance.toFile).toHaveBeenCalledWith('/tmp/flyer-converted.png');
|
||||
expect(imagePaths).toEqual([{ path: '/tmp/flyer-converted.png', mimetype: 'image/png' }]);
|
||||
expect(createdImagePaths).toEqual(['/tmp/flyer-converted.png']);
|
||||
});
|
||||
|
||||
it('should throw UnsupportedFileTypeError for unsupported types', async () => {
|
||||
const job = createMockJob({ filePath: '/tmp/document.txt' });
|
||||
await expect(service.prepareImageInputs('/tmp/document.txt', job, logger)).rejects.toThrow(
|
||||
UnsupportedFileTypeError,
|
||||
);
|
||||
});
|
||||
});
|
||||
207
src/services/flyerFileHandler.server.ts
Normal file
207
src/services/flyerFileHandler.server.ts
Normal file
@@ -0,0 +1,207 @@
|
||||
// src/services/flyerFileHandler.server.ts
|
||||
import path from 'path';
|
||||
import sharp from 'sharp';
|
||||
import type { Dirent } from 'node:fs';
|
||||
import type { Job } from 'bullmq';
|
||||
import type { Logger } from 'pino';
|
||||
import { PdfConversionError, UnsupportedFileTypeError } from './processingErrors';
|
||||
import type { FlyerJobData } from './flyerProcessingService.server';
|
||||
|
||||
// Define the image formats supported by the AI model
|
||||
const SUPPORTED_IMAGE_EXTENSIONS = ['.jpg', '.jpeg', '.png', '.webp', '.heic', '.heif'];
|
||||
// Define image formats that are not directly supported but can be converted to PNG.
|
||||
const CONVERTIBLE_IMAGE_EXTENSIONS = ['.gif', '.tiff', '.svg', '.bmp'];
|
||||
|
||||
export interface IFileSystem {
|
||||
readdir(path: string, options: { withFileTypes: true }): Promise<Dirent[]>;
|
||||
unlink(path: string): Promise<void>;
|
||||
}
|
||||
|
||||
export interface ICommandExecutor {
|
||||
(command: string): Promise<{ stdout: string; stderr: string }>;
|
||||
}
|
||||
|
||||
/**
|
||||
* This class encapsulates the logic for handling different file types (PDF, images)
|
||||
* and preparing them for AI processing.
|
||||
*/
|
||||
export class FlyerFileHandler {
|
||||
constructor(
|
||||
private fs: IFileSystem,
|
||||
private exec: ICommandExecutor,
|
||||
) {}
|
||||
|
||||
/**
|
||||
* Executes the pdftocairo command to convert the PDF.
|
||||
*/
|
||||
private async _executePdfConversion(
|
||||
filePath: string,
|
||||
outputFilePrefix: string,
|
||||
logger: Logger,
|
||||
): Promise<{ stdout: string; stderr: string }> {
|
||||
const command = `pdftocairo -jpeg -r 150 "${filePath}" "${outputFilePrefix}"`;
|
||||
logger.info(`Executing PDF conversion command`);
|
||||
logger.debug({ command });
|
||||
try {
|
||||
const { stdout, stderr } = await this.exec(command);
|
||||
if (stdout) logger.debug({ stdout }, `[Worker] pdftocairo stdout for ${filePath}:`);
|
||||
if (stderr) logger.warn({ stderr }, `[Worker] pdftocairo stderr for ${filePath}:`);
|
||||
return { stdout, stderr };
|
||||
} catch (error) {
|
||||
const execError = error as Error & { stderr?: string };
|
||||
const errorMessage = `The pdftocairo command failed for file: ${filePath}.`;
|
||||
logger.error({ err: execError, stderr: execError.stderr }, errorMessage);
|
||||
throw new PdfConversionError(errorMessage, execError.stderr);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Scans the output directory for generated JPEG images and returns their paths.
|
||||
*/
|
||||
private async _collectGeneratedImages(
|
||||
outputDir: string,
|
||||
outputFilePrefix: string,
|
||||
logger: Logger,
|
||||
): Promise<string[]> {
|
||||
logger.debug(`[Worker] Reading contents of output directory: ${outputDir}`);
|
||||
const filesInDir = await this.fs.readdir(outputDir, { withFileTypes: true });
|
||||
logger.debug(`[Worker] Found ${filesInDir.length} total entries in output directory.`);
|
||||
|
||||
const generatedImages = filesInDir
|
||||
.filter((f) => f.name.startsWith(path.basename(outputFilePrefix)) && f.name.endsWith('.jpg'))
|
||||
.sort((a, b) => a.name.localeCompare(b.name, undefined, { numeric: true }));
|
||||
|
||||
logger.debug(
|
||||
{ imageNames: generatedImages.map((f) => f.name) },
|
||||
`Filtered down to ${generatedImages.length} generated JPGs.`,
|
||||
);
|
||||
|
||||
return generatedImages.map((img) => path.join(outputDir, img.name));
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a PDF file to a series of JPEG images using an external tool.
|
||||
*/
|
||||
private async _convertPdfToImages(
|
||||
filePath: string,
|
||||
job: Job<FlyerJobData>,
|
||||
logger: Logger,
|
||||
): Promise<string[]> {
|
||||
logger.info(`Starting PDF conversion for: ${filePath}`);
|
||||
await job.updateProgress({ message: 'Converting PDF to images...' });
|
||||
|
||||
const outputDir = path.dirname(filePath);
|
||||
const outputFilePrefix = path.join(outputDir, path.basename(filePath, '.pdf'));
|
||||
logger.debug({ outputDir, outputFilePrefix }, `PDF output details`);
|
||||
|
||||
const { stderr } = await this._executePdfConversion(filePath, outputFilePrefix, logger);
|
||||
|
||||
const imagePaths = await this._collectGeneratedImages(outputDir, outputFilePrefix, logger);
|
||||
|
||||
if (imagePaths.length === 0) {
|
||||
const errorMessage = `PDF conversion resulted in 0 images for file: ${filePath}. The PDF might be blank or corrupt.`;
|
||||
logger.error({ stderr }, `PdfConversionError: ${errorMessage}`);
|
||||
throw new PdfConversionError(errorMessage, stderr);
|
||||
}
|
||||
|
||||
return imagePaths;
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts an image file (e.g., GIF, TIFF) to a PNG format that the AI can process.
|
||||
*/
|
||||
private async _convertImageToPng(filePath: string, logger: Logger): Promise<string> {
|
||||
const outputDir = path.dirname(filePath);
|
||||
const originalFileName = path.parse(path.basename(filePath)).name;
|
||||
const newFileName = `${originalFileName}-converted.png`;
|
||||
const outputPath = path.join(outputDir, newFileName);
|
||||
|
||||
logger.info({ from: filePath, to: outputPath }, 'Converting unsupported image format to PNG.');
|
||||
|
||||
try {
|
||||
await sharp(filePath).png().toFile(outputPath);
|
||||
return outputPath;
|
||||
} catch (error) {
|
||||
logger.error({ err: error, filePath }, 'Failed to convert image to PNG using sharp.');
|
||||
throw new Error(`Image conversion to PNG failed for ${path.basename(filePath)}.`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Handles PDF files by converting them to a series of JPEG images.
|
||||
*/
|
||||
private async _handlePdfInput(
|
||||
filePath: string,
|
||||
job: Job<FlyerJobData>,
|
||||
logger: Logger,
|
||||
): Promise<{ imagePaths: { path: string; mimetype: string }[]; createdImagePaths: string[] }> {
|
||||
const createdImagePaths = await this._convertPdfToImages(filePath, job, logger);
|
||||
const imagePaths = createdImagePaths.map((p) => ({ path: p, mimetype: 'image/jpeg' }));
|
||||
logger.info(`Converted PDF to ${imagePaths.length} images.`);
|
||||
return { imagePaths, createdImagePaths };
|
||||
}
|
||||
|
||||
/**
|
||||
* Handles image files that are directly supported by the AI.
|
||||
*/
|
||||
private async _handleSupportedImageInput(
|
||||
filePath: string,
|
||||
fileExt: string,
|
||||
logger: Logger,
|
||||
): Promise<{ imagePaths: { path: string; mimetype: string }[]; createdImagePaths: string[] }> {
|
||||
logger.info(`Processing as a single image file: ${filePath}`);
|
||||
const mimetype =
|
||||
fileExt === '.jpg' || fileExt === '.jpeg' ? 'image/jpeg' : `image/${fileExt.slice(1)}`;
|
||||
const imagePaths = [{ path: filePath, mimetype }];
|
||||
return { imagePaths, createdImagePaths: [] };
|
||||
}
|
||||
|
||||
/**
|
||||
* Handles image files that need to be converted to PNG before AI processing.
|
||||
*/
|
||||
private async _handleConvertibleImageInput(
|
||||
filePath: string,
|
||||
logger: Logger,
|
||||
): Promise<{ imagePaths: { path: string; mimetype: string }[]; createdImagePaths: string[] }> {
|
||||
const createdPngPath = await this._convertImageToPng(filePath, logger);
|
||||
const imagePaths = [{ path: createdPngPath, mimetype: 'image/png' }];
|
||||
const createdImagePaths = [createdPngPath];
|
||||
return { imagePaths, createdImagePaths };
|
||||
}
|
||||
|
||||
/**
|
||||
* Throws an error for unsupported file types.
|
||||
*/
|
||||
private _handleUnsupportedInput(
|
||||
fileExt: string,
|
||||
originalFileName: string,
|
||||
logger: Logger,
|
||||
): never {
|
||||
const errorMessage = `Unsupported file type: ${fileExt}. Supported types are PDF, JPG, PNG, WEBP, HEIC, HEIF, GIF, TIFF, SVG, BMP.`;
|
||||
logger.error({ originalFileName, fileExt }, errorMessage);
|
||||
throw new UnsupportedFileTypeError(errorMessage);
|
||||
}
|
||||
|
||||
/**
|
||||
* Prepares the input images for the AI service. If the input is a PDF, it's converted to images.
|
||||
*/
|
||||
public async prepareImageInputs(
|
||||
filePath: string,
|
||||
job: Job<FlyerJobData>,
|
||||
logger: Logger,
|
||||
): Promise<{ imagePaths: { path: string; mimetype: string }[]; createdImagePaths: string[] }> {
|
||||
const fileExt = path.extname(filePath).toLowerCase();
|
||||
|
||||
if (fileExt === '.pdf') {
|
||||
return this._handlePdfInput(filePath, job, logger);
|
||||
}
|
||||
if (SUPPORTED_IMAGE_EXTENSIONS.includes(fileExt)) {
|
||||
return this._handleSupportedImageInput(filePath, fileExt, logger);
|
||||
}
|
||||
if (CONVERTIBLE_IMAGE_EXTENSIONS.includes(fileExt)) {
|
||||
return this._handleConvertibleImageInput(filePath, logger);
|
||||
}
|
||||
|
||||
return this._handleUnsupportedInput(fileExt, job.data.originalFileName, logger);
|
||||
}
|
||||
}
|
||||
@@ -5,7 +5,7 @@ import { Job, UnrecoverableError } from 'bullmq';
|
||||
import type { Dirent } from 'node:fs';
|
||||
import type { Logger } from 'pino';
|
||||
import { z } from 'zod';
|
||||
import { AiFlyerDataSchema } from './flyerProcessingService.server';
|
||||
import { AiFlyerDataSchema } from './flyerAiProcessor.server';
|
||||
import type { Flyer, FlyerInsert, FlyerItemInsert } from '../types';
|
||||
import type { CleanupJobData } from './flyerProcessingService.server';
|
||||
export interface FlyerJobData {
|
||||
@@ -36,29 +36,21 @@ vi.mock('node:fs/promises', async (importOriginal) => {
|
||||
};
|
||||
});
|
||||
|
||||
// Mock sharp for the new image conversion logic
|
||||
const mockSharpInstance = {
|
||||
png: vi.fn(() => mockSharpInstance),
|
||||
toFile: vi.fn().mockResolvedValue({}),
|
||||
};
|
||||
vi.mock('sharp', () => ({
|
||||
__esModule: true,
|
||||
default: vi.fn(() => mockSharpInstance),
|
||||
}));
|
||||
|
||||
// Import service and dependencies (FlyerJobData already imported from types above)
|
||||
import { FlyerProcessingService } from './flyerProcessingService.server';
|
||||
import * as aiService from './aiService.server';
|
||||
import * as db from './db/index.db';
|
||||
import { createFlyerAndItems } from './db/flyer.db';
|
||||
import * as imageProcessor from '../utils/imageProcessor';
|
||||
import { createMockFlyer } from '../tests/utils/mockFactories';
|
||||
import { FlyerDataTransformer } from './flyerDataTransformer';
|
||||
import {
|
||||
AiDataValidationError, // This was a duplicate, fixed.
|
||||
AiDataValidationError,
|
||||
PdfConversionError,
|
||||
UnsupportedFileTypeError,
|
||||
} from './processingErrors';
|
||||
import { FlyerFileHandler } from './flyerFileHandler.server';
|
||||
import { FlyerAiProcessor } from './flyerAiProcessor.server';
|
||||
import type { IFileSystem, ICommandExecutor } from './flyerFileHandler.server';
|
||||
import type { AIService } from './aiService.server';
|
||||
|
||||
// Mock dependencies
|
||||
vi.mock('./aiService.server', () => ({
|
||||
@@ -73,9 +65,6 @@ vi.mock('./db/index.db', () => ({
|
||||
personalizationRepo: { getAllMasterItems: vi.fn() },
|
||||
adminRepo: { logActivity: vi.fn() },
|
||||
}));
|
||||
vi.mock('../utils/imageProcessor', () => ({
|
||||
generateFlyerIcon: vi.fn().mockResolvedValue('icon-test.webp'),
|
||||
}));
|
||||
vi.mock('./logger.server', () => ({
|
||||
logger: {
|
||||
info: vi.fn(),
|
||||
@@ -85,13 +74,15 @@ vi.mock('./logger.server', () => ({
|
||||
child: vi.fn().mockReturnThis(),
|
||||
},
|
||||
}));
|
||||
vi.mock('./flyerFileHandler.server');
|
||||
vi.mock('./flyerAiProcessor.server');
|
||||
|
||||
const mockedAiService = aiService as Mocked<typeof aiService>;
|
||||
const mockedDb = db as Mocked<typeof db>;
|
||||
const mockedImageProcessor = imageProcessor as Mocked<typeof imageProcessor>;
|
||||
|
||||
describe('FlyerProcessingService', () => {
|
||||
let service: FlyerProcessingService;
|
||||
let mockFileHandler: Mocked<FlyerFileHandler>;
|
||||
let mockAiProcessor: Mocked<FlyerAiProcessor>;
|
||||
const mockCleanupQueue = {
|
||||
add: vi.fn(),
|
||||
};
|
||||
@@ -112,30 +103,35 @@ describe('FlyerProcessingService', () => {
|
||||
itemsForDb: [],
|
||||
});
|
||||
|
||||
// Default mock implementation for the promisified exec
|
||||
mocks.execAsync.mockResolvedValue({ stdout: 'success', stderr: '' });
|
||||
|
||||
// Default mock for readdir returns an empty array of Dirent-like objects.
|
||||
mocks.readdir.mockResolvedValue([]);
|
||||
|
||||
// Mock the file system adapter that will be passed to the service
|
||||
const mockFs = {
|
||||
const mockFs: IFileSystem = {
|
||||
readdir: mocks.readdir,
|
||||
unlink: mocks.unlink,
|
||||
};
|
||||
|
||||
mockFileHandler = new FlyerFileHandler(mockFs, vi.fn()) as Mocked<FlyerFileHandler>;
|
||||
mockAiProcessor = new FlyerAiProcessor(
|
||||
{} as AIService,
|
||||
mockedDb.personalizationRepo,
|
||||
) as Mocked<FlyerAiProcessor>;
|
||||
|
||||
// Instantiate the service with all its dependencies mocked
|
||||
service = new FlyerProcessingService(
|
||||
mockedAiService.aiService,
|
||||
{} as AIService,
|
||||
mockFileHandler,
|
||||
mockAiProcessor,
|
||||
mockedDb,
|
||||
mockFs,
|
||||
mocks.execAsync,
|
||||
vi.fn(),
|
||||
mockCleanupQueue,
|
||||
new FlyerDataTransformer(),
|
||||
);
|
||||
|
||||
// Provide default successful mock implementations for dependencies
|
||||
vi.mocked(mockedAiService.aiService.extractCoreDataFromFlyerImage).mockResolvedValue({
|
||||
mockAiProcessor.extractAndValidateData.mockResolvedValue({
|
||||
store_name: 'Mock Store',
|
||||
valid_from: '2024-01-01',
|
||||
valid_to: '2024-01-07',
|
||||
@@ -151,6 +147,11 @@ describe('FlyerProcessingService', () => {
|
||||
},
|
||||
],
|
||||
});
|
||||
mockFileHandler.prepareImageInputs.mockResolvedValue({
|
||||
imagePaths: [{ path: '/tmp/flyer.jpg', mimetype: 'image/jpeg' }],
|
||||
createdImagePaths: [],
|
||||
});
|
||||
|
||||
vi.mocked(createFlyerAndItems).mockResolvedValue({
|
||||
flyer: createMockFlyer({
|
||||
flyer_id: 1,
|
||||
@@ -160,7 +161,6 @@ describe('FlyerProcessingService', () => {
|
||||
}),
|
||||
items: [],
|
||||
});
|
||||
mockedImageProcessor.generateFlyerIcon.mockResolvedValue('icon-test.jpg');
|
||||
vi.mocked(mockedDb.adminRepo.logActivity).mockResolvedValue();
|
||||
// FIX: Provide a default mock for getAllMasterItems to prevent a TypeError on `.length`.
|
||||
vi.mocked(mockedDb.personalizationRepo.getAllMasterItems).mockResolvedValue([]);
|
||||
@@ -198,10 +198,10 @@ describe('FlyerProcessingService', () => {
|
||||
const result = await service.processJob(job);
|
||||
|
||||
expect(result).toEqual({ flyerId: 1 });
|
||||
expect(mockedAiService.aiService.extractCoreDataFromFlyerImage).toHaveBeenCalledTimes(1);
|
||||
expect(mockFileHandler.prepareImageInputs).toHaveBeenCalledWith(job.data.filePath, job, expect.any(Object));
|
||||
expect(mockAiProcessor.extractAndValidateData).toHaveBeenCalledTimes(1);
|
||||
expect(createFlyerAndItems).toHaveBeenCalledTimes(1);
|
||||
expect(mockedDb.adminRepo.logActivity).toHaveBeenCalledTimes(1);
|
||||
expect(mocks.execAsync).not.toHaveBeenCalled();
|
||||
expect(mockCleanupQueue.add).toHaveBeenCalledWith(
|
||||
'cleanup-flyer-files',
|
||||
{ flyerId: 1, paths: ['/tmp/flyer.jpg'] },
|
||||
@@ -212,29 +212,17 @@ describe('FlyerProcessingService', () => {
|
||||
it('should convert a PDF, process its images, and enqueue a cleanup job for all files', async () => {
|
||||
const job = createMockJob({ filePath: '/tmp/flyer.pdf', originalFileName: 'flyer.pdf' });
|
||||
|
||||
// Mock readdir to return Dirent-like objects for the converted files
|
||||
mocks.readdir.mockResolvedValue([
|
||||
{ name: 'flyer-1.jpg' },
|
||||
{ name: 'flyer-2.jpg' },
|
||||
] as Dirent[]);
|
||||
// Mock the file handler to return multiple created paths
|
||||
const createdPaths = ['/tmp/flyer-1.jpg', '/tmp/flyer-2.jpg'];
|
||||
mockFileHandler.prepareImageInputs.mockResolvedValue({
|
||||
imagePaths: createdPaths.map(p => ({ path: p, mimetype: 'image/jpeg' })),
|
||||
createdImagePaths: createdPaths,
|
||||
});
|
||||
|
||||
await service.processJob(job);
|
||||
|
||||
// Verify that pdftocairo was called
|
||||
expect(mocks.execAsync).toHaveBeenCalledWith(
|
||||
expect.stringContaining('pdftocairo -jpeg -r 150'),
|
||||
);
|
||||
// Verify AI service was called with the converted images
|
||||
expect(mockedAiService.aiService.extractCoreDataFromFlyerImage).toHaveBeenCalledWith(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({ path: expect.stringContaining('flyer-1.jpg') }),
|
||||
expect.objectContaining({ path: expect.stringContaining('flyer-2.jpg') }),
|
||||
]),
|
||||
expect.any(Array),
|
||||
undefined, // submitterIp
|
||||
undefined, // userProfileAddress
|
||||
expect.any(Object), // The job-specific logger
|
||||
);
|
||||
expect(mockFileHandler.prepareImageInputs).toHaveBeenCalledWith('/tmp/flyer.pdf', job, expect.any(Object));
|
||||
expect(mockAiProcessor.extractAndValidateData).toHaveBeenCalledTimes(1);
|
||||
expect(createFlyerAndItems).toHaveBeenCalledTimes(1);
|
||||
// Verify cleanup job includes original PDF and both generated images
|
||||
expect(mockCleanupQueue.add).toHaveBeenCalledWith(
|
||||
@@ -243,8 +231,8 @@ describe('FlyerProcessingService', () => {
|
||||
flyerId: 1,
|
||||
paths: [
|
||||
'/tmp/flyer.pdf',
|
||||
expect.stringContaining('flyer-1.jpg'),
|
||||
expect.stringContaining('flyer-2.jpg'),
|
||||
'/tmp/flyer-1.jpg',
|
||||
'/tmp/flyer-2.jpg',
|
||||
],
|
||||
},
|
||||
expect.any(Object),
|
||||
@@ -255,7 +243,7 @@ describe('FlyerProcessingService', () => {
|
||||
const job = createMockJob({});
|
||||
const { logger } = await import('./logger.server');
|
||||
const aiError = new Error('AI model exploded');
|
||||
vi.mocked(mockedAiService.aiService.extractCoreDataFromFlyerImage).mockRejectedValue(aiError);
|
||||
mockAiProcessor.extractAndValidateData.mockRejectedValue(aiError);
|
||||
|
||||
await expect(service.processJob(job)).rejects.toThrow('AI model exploded');
|
||||
|
||||
@@ -274,9 +262,7 @@ describe('FlyerProcessingService', () => {
|
||||
// Simulate an AI error that contains a keyword for unrecoverable errors
|
||||
const quotaError = new Error('AI model quota exceeded');
|
||||
const { logger } = await import('./logger.server');
|
||||
vi.mocked(mockedAiService.aiService.extractCoreDataFromFlyerImage).mockRejectedValue(
|
||||
quotaError,
|
||||
);
|
||||
mockAiProcessor.extractAndValidateData.mockRejectedValue(quotaError);
|
||||
|
||||
await expect(service.processJob(job)).rejects.toThrow(UnrecoverableError);
|
||||
|
||||
@@ -294,8 +280,7 @@ describe('FlyerProcessingService', () => {
|
||||
const job = createMockJob({ filePath: '/tmp/bad.pdf', originalFileName: 'bad.pdf' });
|
||||
const { logger } = await import('./logger.server');
|
||||
const conversionError = new PdfConversionError('Conversion failed', 'pdftocairo error');
|
||||
// Make the conversion step fail
|
||||
mocks.execAsync.mockRejectedValue(conversionError);
|
||||
mockFileHandler.prepareImageInputs.mockRejectedValue(conversionError);
|
||||
|
||||
await expect(service.processJob(job)).rejects.toThrow(conversionError);
|
||||
|
||||
@@ -314,10 +299,7 @@ describe('FlyerProcessingService', () => {
|
||||
const { logger } = await import('./logger.server');
|
||||
const job = createMockJob({});
|
||||
const validationError = new AiDataValidationError('Validation failed', {}, {});
|
||||
// Make the AI extraction step fail with a validation error
|
||||
vi.mocked(mockedAiService.aiService.extractCoreDataFromFlyerImage).mockRejectedValue(
|
||||
validationError,
|
||||
);
|
||||
mockAiProcessor.extractAndValidateData.mockRejectedValue(validationError);
|
||||
|
||||
await expect(service.processJob(job)).rejects.toThrow(validationError);
|
||||
|
||||
@@ -337,63 +319,23 @@ describe('FlyerProcessingService', () => {
|
||||
);
|
||||
});
|
||||
|
||||
// FIX: This test was incorrect. The service *does* support GIF conversion.
|
||||
// It is now a success case, verifying that conversion works as intended.
|
||||
it('should convert a GIF image to PNG and then process it', async () => {
|
||||
console.log('\n--- [TEST LOG] ---: Starting GIF conversion success test...');
|
||||
it('should handle convertible image types and include original and converted files in cleanup', async () => {
|
||||
const job = createMockJob({ filePath: '/tmp/flyer.gif', originalFileName: 'flyer.gif' });
|
||||
const convertedPath = '/tmp/flyer-converted.png';
|
||||
|
||||
// Mock the file handler to return the converted path
|
||||
mockFileHandler.prepareImageInputs.mockResolvedValue({
|
||||
imagePaths: [{ path: convertedPath, mimetype: 'image/png' }],
|
||||
createdImagePaths: [convertedPath],
|
||||
});
|
||||
|
||||
await service.processJob(job);
|
||||
|
||||
console.log('--- [TEST LOG] ---: Verifying sharp conversion for GIF...');
|
||||
expect(sharp).toHaveBeenCalledWith('/tmp/flyer.gif');
|
||||
expect(mockSharpInstance.toFile).toHaveBeenCalledWith('/tmp/flyer-converted.png');
|
||||
|
||||
console.log('--- [TEST LOG] ---: Verifying AI service call and cleanup for GIF...');
|
||||
expect(mockedAiService.aiService.extractCoreDataFromFlyerImage).toHaveBeenCalledWith(
|
||||
[{ path: '/tmp/flyer-converted.png', mimetype: 'image/png' }],
|
||||
[],
|
||||
undefined,
|
||||
undefined,
|
||||
expect.any(Object),
|
||||
);
|
||||
expect(mockFileHandler.prepareImageInputs).toHaveBeenCalledWith('/tmp/flyer.gif', job, expect.any(Object));
|
||||
expect(mockAiProcessor.extractAndValidateData).toHaveBeenCalledTimes(1);
|
||||
expect(mockCleanupQueue.add).toHaveBeenCalledWith(
|
||||
'cleanup-flyer-files',
|
||||
{ flyerId: 1, paths: ['/tmp/flyer.gif', '/tmp/flyer-converted.png'] },
|
||||
expect.any(Object),
|
||||
);
|
||||
});
|
||||
|
||||
it('should convert a TIFF image to PNG and then process it', async () => {
|
||||
console.log('\n--- [TEST LOG] ---: Starting TIFF conversion success test...');
|
||||
const job = createMockJob({ filePath: '/tmp/flyer.tiff', originalFileName: 'flyer.tiff' });
|
||||
|
||||
await service.processJob(job);
|
||||
|
||||
expect(sharp).toHaveBeenCalledWith('/tmp/flyer.tiff');
|
||||
expect(mockSharpInstance.png).toHaveBeenCalled();
|
||||
expect(mockSharpInstance.toFile).toHaveBeenCalledWith('/tmp/flyer-converted.png');
|
||||
|
||||
console.log('--- [DEBUG] ---: In TIFF test, logging actual AI call arguments:');
|
||||
console.log(
|
||||
JSON.stringify(
|
||||
vi.mocked(mockedAiService.aiService.extractCoreDataFromFlyerImage).mock.calls[0],
|
||||
null,
|
||||
2,
|
||||
),
|
||||
);
|
||||
|
||||
expect(mockedAiService.aiService.extractCoreDataFromFlyerImage).toHaveBeenCalledWith(
|
||||
[{ path: '/tmp/flyer-converted.png', mimetype: 'image/png' }], // masterItems is mocked to []
|
||||
[], // submitterIp is undefined in the mock job
|
||||
undefined, // userProfileAddress is undefined in the mock job
|
||||
undefined, // The job-specific logger
|
||||
expect.any(Object),
|
||||
);
|
||||
|
||||
expect(mockCleanupQueue.add).toHaveBeenCalledWith(
|
||||
'cleanup-flyer-files',
|
||||
{ flyerId: 1, paths: ['/tmp/flyer.tiff', '/tmp/flyer-converted.png'] },
|
||||
{ flyerId: 1, paths: ['/tmp/flyer.gif', convertedPath] },
|
||||
expect.any(Object),
|
||||
);
|
||||
});
|
||||
@@ -421,13 +363,14 @@ describe('FlyerProcessingService', () => {
|
||||
filePath: '/tmp/document.txt',
|
||||
originalFileName: 'document.txt',
|
||||
});
|
||||
const fileTypeError = new UnsupportedFileTypeError('Unsupported file type: .txt. Supported types are PDF, JPG, PNG, WEBP, HEIC, HEIF, GIF, TIFF, SVG, BMP.');
|
||||
mockFileHandler.prepareImageInputs.mockRejectedValue(fileTypeError);
|
||||
const { logger } = await import('./logger.server');
|
||||
|
||||
await expect(service.processJob(job)).rejects.toThrow(UnsupportedFileTypeError);
|
||||
expect(job.updateProgress).toHaveBeenCalledWith({
|
||||
errorCode: 'UNSUPPORTED_FILE_TYPE',
|
||||
message:
|
||||
'Unsupported file type: .txt. Supported types are PDF, JPG, PNG, WEBP, HEIC, HEIF, GIF, TIFF, SVG, BMP.', // This was a duplicate, fixed.
|
||||
message: 'Unsupported file type: .txt. Supported types are PDF, JPG, PNG, WEBP, HEIC, HEIF, GIF, TIFF, SVG, BMP.',
|
||||
});
|
||||
expect(mockCleanupQueue.add).not.toHaveBeenCalled();
|
||||
expect(logger.warn).toHaveBeenCalledWith(
|
||||
@@ -457,327 +400,14 @@ describe('FlyerProcessingService', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('_prepareImageInputs (private method)', () => {
|
||||
it('should throw UnsupportedFileTypeError for an unsupported file type', async () => {
|
||||
const { logger } = await import('./logger.server');
|
||||
const job = createMockJob({
|
||||
filePath: '/tmp/unsupported.doc',
|
||||
originalFileName: 'unsupported.doc',
|
||||
});
|
||||
const privateMethod = (service as any)._prepareImageInputs;
|
||||
|
||||
await expect(privateMethod('/tmp/unsupported.doc', job, logger)).rejects.toThrow(
|
||||
UnsupportedFileTypeError,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('_convertImageToPng (private method)', () => {
|
||||
it('should throw an error if sharp fails', async () => {
|
||||
const { logger } = await import('./logger.server');
|
||||
const sharpError = new Error('Sharp failed');
|
||||
vi.mocked(mockSharpInstance.toFile).mockRejectedValue(sharpError);
|
||||
const privateMethod = (service as any)._convertImageToPng;
|
||||
|
||||
await expect(privateMethod('/tmp/image.gif', logger)).rejects.toThrow(
|
||||
'Image conversion to PNG failed for image.gif',
|
||||
);
|
||||
expect(logger.error).toHaveBeenCalledWith(
|
||||
{ err: sharpError, filePath: '/tmp/image.gif' },
|
||||
'Failed to convert image to PNG using sharp.',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('_extractFlyerDataWithAI (private method)', () => {
|
||||
it('should call AI service and return validated data on success', async () => {
|
||||
const { logger } = await import('./logger.server');
|
||||
const jobData = createMockJob({}).data;
|
||||
const mockAiResponse = {
|
||||
store_name: 'AI Store',
|
||||
valid_from: '2024-01-01',
|
||||
valid_to: '2024-01-07',
|
||||
store_address: '123 AI St',
|
||||
items: [],
|
||||
};
|
||||
vi.mocked(mockedAiService.aiService.extractCoreDataFromFlyerImage).mockResolvedValue(
|
||||
mockAiResponse,
|
||||
);
|
||||
|
||||
const result = await (service as any)._extractFlyerDataWithAI([], jobData, logger);
|
||||
|
||||
expect(mockedAiService.aiService.extractCoreDataFromFlyerImage).toHaveBeenCalledTimes(1);
|
||||
expect(result).toEqual(mockAiResponse);
|
||||
});
|
||||
|
||||
it('should throw AiDataValidationError if AI response validation fails', async () => {
|
||||
const { logger } = await import('./logger.server');
|
||||
const jobData = createMockJob({}).data;
|
||||
// Mock AI to return data missing a required field ('store_name')
|
||||
vi.mocked(mockedAiService.aiService.extractCoreDataFromFlyerImage).mockResolvedValue({
|
||||
valid_from: '2024-01-01',
|
||||
items: [],
|
||||
} as any);
|
||||
|
||||
await expect((service as any)._extractFlyerDataWithAI([], jobData, logger)).rejects.toThrow(
|
||||
AiDataValidationError,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('_validateAiData (private method)', () => {
|
||||
it('should return validated data on success', async () => {
|
||||
const { logger } = await import('./logger.server');
|
||||
const validData = {
|
||||
store_name: 'Test Store',
|
||||
valid_from: '2024-01-01',
|
||||
valid_to: '2024-01-07',
|
||||
store_address: '123 Test St',
|
||||
items: [
|
||||
{ item: 'Apple', price_display: '$1', price_in_cents: 100, quantity: '1', category_name: 'Produce' },
|
||||
],
|
||||
};
|
||||
|
||||
const result = (service as any)._validateAiData(validData, logger);
|
||||
|
||||
expect(result).toEqual(validData);
|
||||
expect(logger.info).toHaveBeenCalledWith('AI extracted 1 items.');
|
||||
});
|
||||
|
||||
it('should throw AiDataValidationError on invalid data', async () => {
|
||||
const { logger } = await import('./logger.server');
|
||||
const invalidData = {
|
||||
// store_name is missing, which will fail validation
|
||||
items: [],
|
||||
};
|
||||
|
||||
expect(() => (service as any)._validateAiData(invalidData, logger)).toThrow(
|
||||
AiDataValidationError,
|
||||
);
|
||||
expect(logger.error).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
errors: expect.any(Object),
|
||||
rawData: invalidData,
|
||||
}),
|
||||
'AI response failed validation.',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('_enqueueCleanup (private method)', () => {
|
||||
it('should enqueue a cleanup job with the correct parameters', async () => {
|
||||
const { logger } = await import('./logger.server');
|
||||
const flyerId = 42;
|
||||
const paths = ['/tmp/file1.jpg', '/tmp/file2.pdf'];
|
||||
|
||||
// Access and call the private method for testing
|
||||
await (
|
||||
service as unknown as {
|
||||
_enqueueCleanup: (flyerId: number, paths: string[], logger: Logger) => Promise<void>;
|
||||
}
|
||||
)._enqueueCleanup(flyerId, paths, logger);
|
||||
|
||||
expect(mockCleanupQueue.add).toHaveBeenCalledWith(
|
||||
'cleanup-flyer-files',
|
||||
{ flyerId, paths },
|
||||
{ jobId: `cleanup-flyer-${flyerId}`, removeOnComplete: true },
|
||||
);
|
||||
});
|
||||
|
||||
it('should not call the queue if the paths array is empty', async () => {
|
||||
const { logger } = await import('./logger.server');
|
||||
// Access and call the private method with an empty array
|
||||
await (
|
||||
service as unknown as {
|
||||
_enqueueCleanup: (flyerId: number, paths: string[], logger: Logger) => Promise<void>;
|
||||
}
|
||||
)._enqueueCleanup(123, [], logger);
|
||||
|
||||
expect(mockCleanupQueue.add).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('_saveProcessedFlyerData (private method)', () => {
|
||||
it('should save flyer to DB and log activity', async () => {
|
||||
const { logger } = await import('./logger.server');
|
||||
// Arrange
|
||||
const mockFlyerData: FlyerInsert = {
|
||||
file_name: 'flyer.jpg',
|
||||
image_url: '/flyer-images/flyer.jpg',
|
||||
icon_url: '/flyer-images/icons/icon-flyer.webp',
|
||||
checksum: 'checksum-123',
|
||||
store_name: 'Test Store',
|
||||
item_count: 1,
|
||||
valid_from: null,
|
||||
valid_to: null,
|
||||
store_address: null,
|
||||
uploaded_by: 'user-abc',
|
||||
};
|
||||
const mockItemsForDb: FlyerItemInsert[] = [
|
||||
{
|
||||
item: 'Test Item',
|
||||
price_display: '$1.99',
|
||||
price_in_cents: 199,
|
||||
quantity: 'each',
|
||||
category_name: 'Test Category',
|
||||
master_item_id: 1,
|
||||
view_count: 0,
|
||||
click_count: 0,
|
||||
},
|
||||
];
|
||||
const userId = 'user-abc';
|
||||
|
||||
// The DB create function is also mocked in beforeEach.
|
||||
// Create a complete mock that satisfies the Flyer type.
|
||||
const mockNewFlyer = createMockFlyer({
|
||||
flyer_id: 1,
|
||||
store: { name: 'Test Store' }, // Match the logActivity expectation
|
||||
});
|
||||
vi.mocked(createFlyerAndItems).mockResolvedValue({ flyer: mockNewFlyer, items: [] });
|
||||
|
||||
// Act: Access and call the private method for testing
|
||||
const result = await (
|
||||
service as unknown as {
|
||||
_saveProcessedFlyerData: (
|
||||
flyerData: FlyerInsert,
|
||||
itemsForDb: FlyerItemInsert[],
|
||||
userId: string | undefined,
|
||||
logger: Logger,
|
||||
) => Promise<Flyer>;
|
||||
}
|
||||
)._saveProcessedFlyerData(mockFlyerData, mockItemsForDb, userId, logger);
|
||||
|
||||
// Assert
|
||||
// 1. Transformer should NOT be called from this method anymore.
|
||||
expect(FlyerDataTransformer.prototype.transform).not.toHaveBeenCalled();
|
||||
|
||||
// 2. DB function was called with the transformed data
|
||||
expect(createFlyerAndItems).toHaveBeenCalledWith(
|
||||
mockFlyerData,
|
||||
mockItemsForDb,
|
||||
logger,
|
||||
);
|
||||
|
||||
// 3. Activity was logged with all expected fields
|
||||
expect(mockedDb.adminRepo.logActivity).toHaveBeenCalledWith(
|
||||
{
|
||||
userId: 'user-abc',
|
||||
action: 'flyer_processed' as const,
|
||||
displayText: 'Processed a new flyer for Test Store.',
|
||||
details: { flyerId: 1, storeName: 'Test Store' },
|
||||
},
|
||||
logger,
|
||||
);
|
||||
|
||||
// 4. The method returned the new flyer
|
||||
expect(result).toEqual(mockNewFlyer);
|
||||
});
|
||||
});
|
||||
|
||||
describe('_logFlyerProcessedActivity (private method)', () => {
|
||||
it('should log the flyer processing activity with a user ID', async () => {
|
||||
const { logger } = await import('./logger.server');
|
||||
const mockFlyer = createMockFlyer({ flyer_id: 1, store: { name: 'Test Store' } });
|
||||
const userId = 'user-123';
|
||||
|
||||
// Access and call the private method for testing
|
||||
await (service as any)._logFlyerProcessedActivity(mockFlyer, userId, logger);
|
||||
|
||||
expect(mockedDb.adminRepo.logActivity).toHaveBeenCalledWith(
|
||||
{
|
||||
userId: 'user-123',
|
||||
action: 'flyer_processed',
|
||||
displayText: 'Processed a new flyer for Test Store.',
|
||||
details: { flyerId: 1, storeName: 'Test Store' },
|
||||
},
|
||||
logger,
|
||||
);
|
||||
});
|
||||
|
||||
it('should log the activity without a user ID for anonymous uploads', async () => {
|
||||
const { logger } = await import('./logger.server');
|
||||
const mockFlyer = createMockFlyer({ flyer_id: 2, store: { name: 'Another Store' } });
|
||||
|
||||
// Call with undefined userId
|
||||
await (service as any)._logFlyerProcessedActivity(mockFlyer, undefined, logger);
|
||||
|
||||
expect(mockedDb.adminRepo.logActivity).toHaveBeenCalledWith(
|
||||
{
|
||||
userId: undefined,
|
||||
action: 'flyer_processed',
|
||||
displayText: 'Processed a new flyer for Another Store.',
|
||||
details: { flyerId: 2, storeName: 'Another Store' },
|
||||
},
|
||||
logger,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('_convertPdfToImages (private method)', () => {
|
||||
it('should call pdftocairo and return sorted image paths on success', async () => {
|
||||
const { logger } = await import('./logger.server');
|
||||
const job = createMockJob({ filePath: '/tmp/test.pdf' });
|
||||
// Mock readdir to return unsorted Dirent-like objects
|
||||
mocks.readdir.mockResolvedValue([
|
||||
{ name: 'test-10.jpg' },
|
||||
{ name: 'test-1.jpg' },
|
||||
{ name: 'test-2.jpg' },
|
||||
{ name: 'other-file.txt' },
|
||||
] as Dirent[]);
|
||||
|
||||
// Access and call the private method for testing
|
||||
const imagePaths = await (
|
||||
service as unknown as {
|
||||
_convertPdfToImages: (filePath: string, job: Job, logger: Logger) => Promise<string[]>;
|
||||
}
|
||||
)._convertPdfToImages('/tmp/test.pdf', job, logger);
|
||||
|
||||
expect(mocks.execAsync).toHaveBeenCalledWith(
|
||||
'pdftocairo -jpeg -r 150 "/tmp/test.pdf" "/tmp/test"',
|
||||
);
|
||||
expect(job.updateProgress).toHaveBeenCalledWith({ message: 'Converting PDF to images...' });
|
||||
// Verify that the paths are correctly sorted numerically
|
||||
expect(imagePaths).toEqual(['/tmp/test-1.jpg', '/tmp/test-2.jpg', '/tmp/test-10.jpg']);
|
||||
});
|
||||
|
||||
it('should throw PdfConversionError if no images are generated', async () => {
|
||||
const { logger } = await import('./logger.server');
|
||||
const job = createMockJob({ filePath: '/tmp/empty.pdf' });
|
||||
// Mock readdir to return no matching files
|
||||
mocks.readdir.mockResolvedValue([]);
|
||||
|
||||
await expect(
|
||||
(
|
||||
service as unknown as {
|
||||
_convertPdfToImages: (filePath: string, job: Job, logger: Logger) => Promise<string[]>;
|
||||
}
|
||||
)._convertPdfToImages('/tmp/empty.pdf', job, logger),
|
||||
).rejects.toThrow('PDF conversion resulted in 0 images for file: /tmp/empty.pdf');
|
||||
});
|
||||
|
||||
it('should re-throw an error if the exec command fails', async () => {
|
||||
const { logger } = await import('./logger.server');
|
||||
const job = createMockJob({ filePath: '/tmp/bad.pdf' });
|
||||
const commandError = new Error('pdftocairo not found');
|
||||
mocks.execAsync.mockRejectedValue(commandError);
|
||||
|
||||
await expect(
|
||||
(
|
||||
service as unknown as {
|
||||
_convertPdfToImages: (filePath: string, job: Job, logger: Logger) => Promise<string[]>;
|
||||
}
|
||||
)._convertPdfToImages('/tmp/bad.pdf', job, logger),
|
||||
).rejects.toThrow(commandError);
|
||||
});
|
||||
});
|
||||
|
||||
describe('_reportErrorAndThrow (private method)', () => {
|
||||
it('should update progress and throw UnrecoverableError for quota messages', async () => {
|
||||
const { logger } = await import('./logger.server');
|
||||
const job = createMockJob({});
|
||||
const quotaError = new Error('RESOURCE_EXHAUSTED');
|
||||
const privateMethod = (service as any)._reportErrorAndThrow;
|
||||
|
||||
await expect(privateMethod(quotaError, job, {} as Logger)).rejects.toThrow(
|
||||
await expect(privateMethod(quotaError, job, logger)).rejects.toThrow(
|
||||
UnrecoverableError,
|
||||
);
|
||||
|
||||
@@ -787,12 +417,37 @@ describe('FlyerProcessingService', () => {
|
||||
});
|
||||
});
|
||||
|
||||
it('should use toErrorPayload for FlyerProcessingError instances', async () => {
|
||||
const { logger } = await import('./logger.server');
|
||||
const job = createMockJob({});
|
||||
const validationError = new AiDataValidationError(
|
||||
'Validation failed',
|
||||
{ foo: 'bar' },
|
||||
{ raw: 'data' },
|
||||
);
|
||||
const privateMethod = (service as any)._reportErrorAndThrow;
|
||||
|
||||
await expect(privateMethod(validationError, job, logger)).rejects.toThrow(
|
||||
validationError,
|
||||
);
|
||||
|
||||
// The payload should now come from the error's `toErrorPayload` method
|
||||
expect(job.updateProgress).toHaveBeenCalledWith({
|
||||
errorCode: 'AI_VALIDATION_FAILED',
|
||||
message:
|
||||
"The AI couldn't read the flyer's format. Please try a clearer image or a different flyer.",
|
||||
validationErrors: { foo: 'bar' },
|
||||
rawData: { raw: 'data' },
|
||||
});
|
||||
});
|
||||
|
||||
it('should update progress and re-throw standard errors', async () => {
|
||||
const { logger } = await import('./logger.server');
|
||||
const job = createMockJob({});
|
||||
const genericError = new Error('A standard failure');
|
||||
const privateMethod = (service as any)._reportErrorAndThrow;
|
||||
|
||||
await expect(privateMethod(genericError, job, {} as Logger)).rejects.toThrow(genericError);
|
||||
await expect(privateMethod(genericError, job, logger)).rejects.toThrow(genericError);
|
||||
|
||||
expect(job.updateProgress).toHaveBeenCalledWith({
|
||||
errorCode: 'UNKNOWN_ERROR',
|
||||
@@ -801,11 +456,12 @@ describe('FlyerProcessingService', () => {
|
||||
});
|
||||
|
||||
it('should wrap and throw non-Error objects', async () => {
|
||||
const { logger } = await import('./logger.server');
|
||||
const job = createMockJob({});
|
||||
const nonError = 'just a string error';
|
||||
const privateMethod = (service as any)._reportErrorAndThrow;
|
||||
|
||||
await expect(privateMethod(nonError, job, {} as Logger)).rejects.toThrow('just a string error');
|
||||
await expect(privateMethod(nonError, job, logger)).rejects.toThrow('just a string error');
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
@@ -1,44 +1,25 @@
|
||||
// src/services/flyerProcessingService.server.ts
|
||||
import { Job, JobsOptions, UnrecoverableError } from 'bullmq';
|
||||
import sharp from 'sharp';
|
||||
import path from 'path';
|
||||
import type { Dirent } from 'node:fs';
|
||||
import { z } from 'zod';
|
||||
|
||||
import type { AIService } from './aiService.server';
|
||||
import * as db from './db/index.db';
|
||||
import { createFlyerAndItems } from './db/flyer.db';
|
||||
import {
|
||||
PdfConversionError,
|
||||
AiDataValidationError,
|
||||
UnsupportedFileTypeError,
|
||||
FlyerProcessingError,
|
||||
PdfConversionError,
|
||||
} from './processingErrors';
|
||||
import { FlyerDataTransformer } from './flyerDataTransformer';
|
||||
import { logger as globalLogger } from './logger.server';
|
||||
import type { Logger } from 'pino';
|
||||
import type { Flyer, FlyerInsert, FlyerItemInsert } from '../types';
|
||||
|
||||
// Helper for consistent required string validation (handles missing/null/empty)
|
||||
const requiredString = (message: string) =>
|
||||
z.preprocess((val) => val ?? '', z.string().min(1, message));
|
||||
|
||||
// Define the image formats supported by the AI model
|
||||
const SUPPORTED_IMAGE_EXTENSIONS = ['.jpg', '.jpeg', '.png', '.webp', '.heic', '.heif'];
|
||||
|
||||
// Define image formats that are not directly supported but can be converted to PNG.
|
||||
const CONVERTIBLE_IMAGE_EXTENSIONS = ['.gif', '.tiff', '.svg', '.bmp'];
|
||||
import { FlyerFileHandler, ICommandExecutor, IFileSystem } from './flyerFileHandler.server';
|
||||
import { FlyerAiProcessor } from './flyerAiProcessor.server';
|
||||
|
||||
// --- Start: Interfaces for Dependency Injection ---
|
||||
|
||||
export interface IFileSystem {
|
||||
readdir(path: string, options: { withFileTypes: true }): Promise<Dirent[]>;
|
||||
unlink(path: string): Promise<void>;
|
||||
}
|
||||
|
||||
export interface ICommandExecutor {
|
||||
(command: string): Promise<{ stdout: string; stderr: string }>;
|
||||
}
|
||||
|
||||
export interface FlyerJobData {
|
||||
filePath: string;
|
||||
originalFileName: string;
|
||||
@@ -62,24 +43,6 @@ interface ICleanupQueue {
|
||||
add(name: string, data: CleanupJobData, opts?: JobsOptions): Promise<Job<CleanupJobData>>;
|
||||
}
|
||||
|
||||
// --- Zod Schemas for AI Response Validation (exported for the transformer) ---
|
||||
const ExtractedFlyerItemSchema = z.object({
|
||||
item: z.string().nullable(), // AI might return null or empty, normalize later
|
||||
price_display: z.string().nullable(), // AI might return null or empty, normalize later
|
||||
price_in_cents: z.number().nullable(),
|
||||
quantity: z.string().nullable(), // AI might return null or empty, normalize later
|
||||
category_name: z.string().nullable(), // AI might return null or empty, normalize later
|
||||
master_item_id: z.number().nullish(), // .nullish() allows null or undefined
|
||||
});
|
||||
|
||||
export const AiFlyerDataSchema = z.object({
|
||||
store_name: z.string().nullable(), // AI might return null or empty, normalize later
|
||||
valid_from: z.string().nullable(),
|
||||
valid_to: z.string().nullable(),
|
||||
store_address: z.string().nullable(),
|
||||
items: z.array(ExtractedFlyerItemSchema),
|
||||
});
|
||||
|
||||
/**
|
||||
* This class encapsulates the business logic for processing a flyer from a file.
|
||||
* It handles PDF conversion, AI data extraction, and saving the results to the database.
|
||||
@@ -87,6 +50,8 @@ export const AiFlyerDataSchema = z.object({
|
||||
export class FlyerProcessingService {
|
||||
constructor(
|
||||
private ai: AIService,
|
||||
private fileHandler: FlyerFileHandler,
|
||||
private aiProcessor: FlyerAiProcessor,
|
||||
private database: typeof db,
|
||||
private fs: IFileSystem,
|
||||
private exec: ICommandExecutor,
|
||||
@@ -94,199 +59,6 @@ export class FlyerProcessingService {
|
||||
private transformer: FlyerDataTransformer,
|
||||
) {}
|
||||
|
||||
/**
|
||||
* Executes the pdftocairo command to convert the PDF.
|
||||
*/
|
||||
private async _executePdfConversion(
|
||||
filePath: string,
|
||||
outputFilePrefix: string,
|
||||
logger: Logger,
|
||||
): Promise<{ stdout: string; stderr: string }> {
|
||||
const command = `pdftocairo -jpeg -r 150 "${filePath}" "${outputFilePrefix}"`;
|
||||
logger.info(`Executing PDF conversion command`);
|
||||
logger.debug({ command });
|
||||
const { stdout, stderr } = await this.exec(command);
|
||||
|
||||
if (stdout) logger.debug({ stdout }, `[Worker] pdftocairo stdout for ${filePath}:`);
|
||||
if (stderr) logger.warn({ stderr }, `[Worker] pdftocairo stderr for ${filePath}:`);
|
||||
|
||||
return { stdout, stderr };
|
||||
}
|
||||
|
||||
/**
|
||||
* Scans the output directory for generated JPEG images and returns their paths.
|
||||
*/
|
||||
private async _collectGeneratedImages(
|
||||
outputDir: string,
|
||||
outputFilePrefix: string,
|
||||
logger: Logger,
|
||||
): Promise<string[]> {
|
||||
logger.debug(`[Worker] Reading contents of output directory: ${outputDir}`);
|
||||
const filesInDir = await this.fs.readdir(outputDir, { withFileTypes: true });
|
||||
logger.debug(`[Worker] Found ${filesInDir.length} total entries in output directory.`);
|
||||
|
||||
const generatedImages = filesInDir
|
||||
.filter((f) => f.name.startsWith(path.basename(outputFilePrefix)) && f.name.endsWith('.jpg'))
|
||||
.sort((a, b) => a.name.localeCompare(b.name, undefined, { numeric: true }));
|
||||
|
||||
logger.debug(
|
||||
{ imageNames: generatedImages.map((f) => f.name) },
|
||||
`Filtered down to ${generatedImages.length} generated JPGs.`,
|
||||
);
|
||||
|
||||
return generatedImages.map((img) => path.join(outputDir, img.name));
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a PDF file to a series of JPEG images using an external tool.
|
||||
* @param filePath The path to the PDF file.
|
||||
* @param job The BullMQ job instance for progress updates.
|
||||
* @returns A promise that resolves to an array of paths to the created image files.
|
||||
*/
|
||||
private async _convertPdfToImages(
|
||||
filePath: string,
|
||||
job: Job<FlyerJobData>,
|
||||
logger: Logger,
|
||||
): Promise<string[]> {
|
||||
logger.info(`Starting PDF conversion for: ${filePath}`);
|
||||
await job.updateProgress({ message: 'Converting PDF to images...' });
|
||||
|
||||
const outputDir = path.dirname(filePath);
|
||||
const outputFilePrefix = path.join(outputDir, path.basename(filePath, '.pdf'));
|
||||
logger.debug({ outputDir, outputFilePrefix }, `PDF output details`);
|
||||
|
||||
const { stderr } = await this._executePdfConversion(filePath, outputFilePrefix, logger);
|
||||
|
||||
const imagePaths = await this._collectGeneratedImages(outputDir, outputFilePrefix, logger);
|
||||
|
||||
if (imagePaths.length === 0) {
|
||||
const errorMessage = `PDF conversion resulted in 0 images for file: ${filePath}. The PDF might be blank or corrupt.`;
|
||||
logger.error({ stderr }, `PdfConversionError: ${errorMessage}`);
|
||||
throw new PdfConversionError(errorMessage, stderr);
|
||||
}
|
||||
|
||||
return imagePaths;
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts an image file (e.g., GIF, TIFF) to a PNG format that the AI can process.
|
||||
* @param filePath The path to the source image file.
|
||||
* @param logger A logger instance.
|
||||
* @returns The path to the newly created PNG file.
|
||||
*/
|
||||
private async _convertImageToPng(filePath: string, logger: Logger): Promise<string> {
|
||||
const outputDir = path.dirname(filePath);
|
||||
const originalFileName = path.parse(path.basename(filePath)).name;
|
||||
const newFileName = `${originalFileName}-converted.png`;
|
||||
const outputPath = path.join(outputDir, newFileName);
|
||||
|
||||
logger.info({ from: filePath, to: outputPath }, 'Converting unsupported image format to PNG.');
|
||||
|
||||
try {
|
||||
await sharp(filePath).png().toFile(outputPath);
|
||||
return outputPath;
|
||||
} catch (error) {
|
||||
logger.error({ err: error, filePath }, 'Failed to convert image to PNG using sharp.');
|
||||
throw new Error(`Image conversion to PNG failed for ${path.basename(filePath)}.`);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Prepares the input images for the AI service. If the input is a PDF, it's converted to images.
|
||||
* @param filePath The path to the original uploaded file.
|
||||
* @param job The BullMQ job instance.
|
||||
* @returns An object containing the final image paths for the AI and a list of any newly created image files.
|
||||
*/
|
||||
private async _prepareImageInputs(
|
||||
filePath: string,
|
||||
job: Job<FlyerJobData>,
|
||||
logger: Logger,
|
||||
): Promise<{ imagePaths: { path: string; mimetype: string }[]; createdImagePaths: string[] }> {
|
||||
const fileExt = path.extname(filePath).toLowerCase();
|
||||
let imagePaths: { path: string; mimetype: string }[] = [];
|
||||
let createdImagePaths: string[] = [];
|
||||
|
||||
// Handle PDF conversion separately
|
||||
if (fileExt === '.pdf') {
|
||||
createdImagePaths = await this._convertPdfToImages(filePath, job, logger);
|
||||
imagePaths = createdImagePaths.map((p) => ({ path: p, mimetype: 'image/jpeg' }));
|
||||
logger.info(`Converted PDF to ${imagePaths.length} images.`);
|
||||
// Handle directly supported single-image formats
|
||||
} else if (SUPPORTED_IMAGE_EXTENSIONS.includes(fileExt)) {
|
||||
logger.info(`Processing as a single image file: ${filePath}`);
|
||||
// Normalize .jpg to image/jpeg for consistency
|
||||
const mimetype =
|
||||
fileExt === '.jpg' || fileExt === '.jpeg' ? 'image/jpeg' : `image/${fileExt.slice(1)}`;
|
||||
imagePaths = [{ path: filePath, mimetype }];
|
||||
// No new images created, so createdImagePaths remains empty.
|
||||
// Handle convertible image formats
|
||||
} else if (CONVERTIBLE_IMAGE_EXTENSIONS.includes(fileExt)) {
|
||||
const createdPngPath = await this._convertImageToPng(filePath, logger);
|
||||
imagePaths = [{ path: createdPngPath, mimetype: 'image/png' }];
|
||||
// The new PNG is a temporary file that needs to be cleaned up.
|
||||
createdImagePaths = [createdPngPath];
|
||||
} else {
|
||||
// If the file is neither a PDF nor a supported image, throw an error.
|
||||
const errorMessage = `Unsupported file type: ${fileExt}. Supported types are PDF, JPG, PNG, WEBP, HEIC, HEIF, GIF, TIFF, SVG, BMP.`;
|
||||
logger.error({ originalFileName: job.data.originalFileName, fileExt }, errorMessage);
|
||||
throw new UnsupportedFileTypeError(errorMessage);
|
||||
}
|
||||
|
||||
return { imagePaths, createdImagePaths };
|
||||
}
|
||||
|
||||
/**
|
||||
* Validates the raw data from the AI against the Zod schema.
|
||||
* @param extractedData The raw, unknown data from the AI service.
|
||||
* @param logger The job-specific logger instance.
|
||||
* @returns The validated and typed data.
|
||||
* @throws {AiDataValidationError} If the data does not conform to the schema.
|
||||
*/
|
||||
private _validateAiData(
|
||||
extractedData: unknown,
|
||||
logger: Logger,
|
||||
): z.infer<typeof AiFlyerDataSchema> {
|
||||
const validationResult = AiFlyerDataSchema.safeParse(extractedData);
|
||||
if (!validationResult.success) {
|
||||
const errors = validationResult.error.flatten();
|
||||
logger.error({ errors, rawData: extractedData }, 'AI response failed validation.');
|
||||
throw new AiDataValidationError(
|
||||
'AI response validation failed. The returned data structure is incorrect.',
|
||||
errors,
|
||||
extractedData,
|
||||
);
|
||||
}
|
||||
|
||||
logger.info(`AI extracted ${validationResult.data.items.length} items.`);
|
||||
return validationResult.data;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calls the AI service to extract structured data from the flyer images.
|
||||
* @param imagePaths An array of paths and mimetypes for the images.
|
||||
* @param jobData The data from the BullMQ job.
|
||||
* @returns A promise that resolves to the validated, structured flyer data.
|
||||
*/
|
||||
private async _extractFlyerDataWithAI(
|
||||
imagePaths: { path: string; mimetype: string }[],
|
||||
jobData: FlyerJobData,
|
||||
logger: Logger,
|
||||
): Promise<z.infer<typeof AiFlyerDataSchema>> {
|
||||
logger.info(`Starting AI data extraction.`);
|
||||
const { submitterIp, userProfileAddress } = jobData;
|
||||
const masterItems = await this.database.personalizationRepo.getAllMasterItems(logger);
|
||||
logger.debug(`Retrieved ${masterItems.length} master items for AI matching.`);
|
||||
|
||||
const extractedData = await this.ai.extractCoreDataFromFlyerImage(
|
||||
imagePaths,
|
||||
masterItems,
|
||||
submitterIp, // Pass the job-specific logger
|
||||
userProfileAddress, // Pass the job-specific logger
|
||||
logger,
|
||||
);
|
||||
|
||||
return this._validateAiData(extractedData, logger);
|
||||
}
|
||||
|
||||
/**
|
||||
* Saves the extracted flyer data to the database.
|
||||
* @param extractedData The structured data from the AI.
|
||||
@@ -369,13 +141,13 @@ export class FlyerProcessingService {
|
||||
logger: Logger,
|
||||
): Promise<never> {
|
||||
const wrappedError = error instanceof Error ? error : new Error(String(error));
|
||||
const errorMessage = wrappedError.message || '';
|
||||
const errorMessage = wrappedError.message || 'An unknown error occurred.';
|
||||
|
||||
// First, check for unrecoverable quota-related errors.
|
||||
if (
|
||||
errorMessage.includes('quota') ||
|
||||
errorMessage.includes('429') ||
|
||||
errorMessage.includes('RESOURCE_EXHAUSTED')
|
||||
errorMessage.toLowerCase().includes('resource_exhausted')
|
||||
) {
|
||||
logger.error(
|
||||
{ err: wrappedError, jobId: job.id },
|
||||
@@ -389,42 +161,34 @@ export class FlyerProcessingService {
|
||||
throw new UnrecoverableError(errorMessage);
|
||||
}
|
||||
|
||||
// Define a structured error payload for job progress updates.
|
||||
// This allows the frontend to provide more specific feedback.
|
||||
let errorPayload = {
|
||||
errorCode: 'UNKNOWN_ERROR',
|
||||
message: 'An unexpected error occurred during processing.',
|
||||
};
|
||||
let errorPayload: { errorCode: string; message: string; [key: string]: any };
|
||||
|
||||
if (error instanceof UnsupportedFileTypeError) {
|
||||
logger.error({ err: error }, `Unsupported file type error.`);
|
||||
errorPayload = {
|
||||
errorCode: 'UNSUPPORTED_FILE_TYPE',
|
||||
message: error.message, // The message is already user-friendly
|
||||
};
|
||||
} else if (error instanceof PdfConversionError) {
|
||||
logger.error({ err: error, stderr: error.stderr }, `PDF Conversion failed.`);
|
||||
errorPayload = {
|
||||
errorCode: 'PDF_CONVERSION_FAILED',
|
||||
message:
|
||||
'The uploaded PDF could not be processed. It might be blank, corrupt, or password-protected.',
|
||||
};
|
||||
} else if (error instanceof AiDataValidationError) {
|
||||
// Handle our custom, structured processing errors.
|
||||
if (wrappedError instanceof FlyerProcessingError) {
|
||||
// Use the properties from the custom error itself.
|
||||
errorPayload = wrappedError.toErrorPayload();
|
||||
// Log with specific details based on the error type
|
||||
if (wrappedError instanceof AiDataValidationError) {
|
||||
logger.error(
|
||||
{ err: wrappedError, validationErrors: wrappedError.validationErrors, rawData: wrappedError.rawData },
|
||||
`AI Data Validation failed.`,
|
||||
);
|
||||
} else if (wrappedError instanceof PdfConversionError) {
|
||||
logger.error({ err: wrappedError, stderr: wrappedError.stderr }, `PDF Conversion failed.`);
|
||||
} else {
|
||||
// Generic log for other FlyerProcessingErrors like UnsupportedFileTypeError
|
||||
logger.error({ err: wrappedError }, `${wrappedError.name} occurred during processing.`);
|
||||
}
|
||||
} else {
|
||||
// Handle generic/unknown errors.
|
||||
logger.error(
|
||||
{ err: error, validationErrors: error.validationErrors, rawData: error.rawData },
|
||||
`AI Data Validation failed.`,
|
||||
);
|
||||
errorPayload = {
|
||||
errorCode: 'AI_VALIDATION_FAILED',
|
||||
message:
|
||||
"The AI couldn't read the flyer's format. Please try a clearer image or a different flyer.",
|
||||
};
|
||||
} else if (error instanceof Error) {
|
||||
logger.error(
|
||||
{ err: error, attemptsMade: job.attemptsMade, totalAttempts: job.opts.attempts },
|
||||
{ err: wrappedError, attemptsMade: job.attemptsMade, totalAttempts: job.opts.attempts },
|
||||
`A generic error occurred in job.`,
|
||||
);
|
||||
errorPayload.message = error.message;
|
||||
errorPayload = {
|
||||
errorCode: 'UNKNOWN_ERROR',
|
||||
message: errorMessage,
|
||||
};
|
||||
}
|
||||
|
||||
await job.updateProgress(errorPayload);
|
||||
@@ -443,18 +207,17 @@ export class FlyerProcessingService {
|
||||
logger: Logger,
|
||||
): Promise<{ flyerId: number }> {
|
||||
const { filePath } = job.data;
|
||||
const createdImagePaths: string[] = [];
|
||||
|
||||
// Step 1: Prepare image inputs (convert PDF, etc.)
|
||||
await job.updateProgress({ message: 'Starting process...' });
|
||||
const { imagePaths, createdImagePaths: tempImagePaths } = await this._prepareImageInputs(
|
||||
const { imagePaths, createdImagePaths } = await this.fileHandler.prepareImageInputs(
|
||||
filePath,
|
||||
job,
|
||||
logger,
|
||||
);
|
||||
createdImagePaths.push(...tempImagePaths);
|
||||
|
||||
await job.updateProgress({ message: 'Extracting data...' });
|
||||
const extractedData = await this._extractFlyerDataWithAI(imagePaths, job.data, logger);
|
||||
const extractedData = await this.aiProcessor.extractAndValidateData(imagePaths, job.data, logger);
|
||||
|
||||
await job.updateProgress({ message: 'Transforming data...' });
|
||||
const { flyerData, itemsForDb } = await this.transformer.transform(
|
||||
@@ -475,7 +238,7 @@ export class FlyerProcessingService {
|
||||
);
|
||||
logger.info({ flyerId: newFlyer.flyer_id }, `Job processed successfully.`);
|
||||
|
||||
// On success, enqueue the cleanup job for all temporary files.
|
||||
// Step 3: On success, enqueue a cleanup job for all temporary files.
|
||||
const pathsToClean = [filePath, ...createdImagePaths];
|
||||
await this._enqueueCleanup(newFlyer.flyer_id, pathsToClean, logger);
|
||||
|
||||
@@ -518,41 +281,53 @@ export class FlyerProcessingService {
|
||||
|
||||
logger.info({ paths }, `Picked up file cleanup job.`);
|
||||
|
||||
try {
|
||||
if (!paths || paths.length === 0) {
|
||||
logger.warn(`Job received no paths to clean. Skipping.`);
|
||||
return { status: 'skipped', reason: 'no paths' };
|
||||
}
|
||||
if (!paths?.length) {
|
||||
logger.warn(`Job received no paths to clean. Skipping.`);
|
||||
return { status: 'skipped', reason: 'no paths' };
|
||||
}
|
||||
|
||||
for (const filePath of paths) {
|
||||
try {
|
||||
await this.fs.unlink(filePath);
|
||||
// Use Promise.allSettled to attempt deleting all files and collect results.
|
||||
// This is more robust than a for-loop as it attempts to delete all files
|
||||
// even if one of them fails, and then reports on the collective result.
|
||||
const deletionPromises = paths.map((path) => this.fs.unlink(path));
|
||||
const results = await Promise.allSettled(deletionPromises);
|
||||
|
||||
// Process results using reduce for a more functional approach, avoiding mutable variables.
|
||||
const { deletedCount, failedDeletions } = results.reduce(
|
||||
(acc, result, index) => {
|
||||
const filePath = paths[index];
|
||||
if (result.status === 'fulfilled') {
|
||||
logger.info(`Deleted temporary file: ${filePath}`);
|
||||
} catch (unlinkError: unknown) {
|
||||
acc.deletedCount++;
|
||||
} else {
|
||||
const unlinkError = result.reason;
|
||||
if (
|
||||
unlinkError instanceof Error &&
|
||||
'code' in unlinkError &&
|
||||
(unlinkError as NodeJS.ErrnoException).code === 'ENOENT'
|
||||
) {
|
||||
logger.warn(`File not found during cleanup (already deleted?): ${filePath}`);
|
||||
acc.deletedCount++; // Still counts as a success for the job's purpose.
|
||||
} else {
|
||||
// Re-throw other errors to be caught by the outer catch block
|
||||
throw unlinkError;
|
||||
logger.error({ err: unlinkError, path: filePath }, 'Failed to delete temporary file.');
|
||||
acc.failedDeletions.push({ path: filePath, reason: unlinkError });
|
||||
}
|
||||
}
|
||||
}
|
||||
logger.info(`Successfully cleaned up ${paths.length} file(s).`);
|
||||
return { status: 'success', deletedCount: paths.length };
|
||||
} catch (error) {
|
||||
const wrappedError = error instanceof Error ? error : new Error(String(error));
|
||||
logger.error(
|
||||
{
|
||||
err: wrappedError,
|
||||
attemptsMade: job.attemptsMade,
|
||||
},
|
||||
`File cleanup job failed.`,
|
||||
);
|
||||
throw wrappedError;
|
||||
return acc;
|
||||
},
|
||||
{ deletedCount: 0, failedDeletions: [] as { path: string; reason: unknown }[] },
|
||||
);
|
||||
|
||||
// If any deletions failed for reasons other than 'file not found', fail the job.
|
||||
if (failedDeletions.length > 0) {
|
||||
const failedPaths = failedDeletions.map(({ path }) => path).join(', ');
|
||||
const errorMessage = `Failed to delete ${failedDeletions.length} file(s): ${failedPaths}`;
|
||||
// Throw an error to make the job fail and be retried by BullMQ.
|
||||
// The individual errors have already been logged.
|
||||
throw new Error(errorMessage);
|
||||
}
|
||||
|
||||
logger.info(`Successfully cleaned up ${deletedCount} file(s).`);
|
||||
return { status: 'success', deletedCount };
|
||||
}
|
||||
}
|
||||
|
||||
@@ -70,12 +70,17 @@ vi.mock('bullmq', () => ({
|
||||
}));
|
||||
|
||||
// Mock flyerProcessingService.server as flyerWorker and cleanupWorker depend on it
|
||||
vi.mock('./flyerProcessingService.server', () => ({
|
||||
FlyerProcessingService: class {
|
||||
processJob = mocks.processFlyerJob;
|
||||
processCleanupJob = mocks.processCleanupJob;
|
||||
},
|
||||
}));
|
||||
vi.mock('./flyerProcessingService.server', () => {
|
||||
// Mock the constructor to return an object with the mocked methods
|
||||
return {
|
||||
FlyerProcessingService: vi.fn().mockImplementation(function () {
|
||||
return {
|
||||
processJob: mocks.processFlyerJob,
|
||||
processCleanupJob: mocks.processCleanupJob,
|
||||
};
|
||||
}),
|
||||
};
|
||||
});
|
||||
|
||||
// Mock flyerDataTransformer as it's a dependency of FlyerProcessingService
|
||||
vi.mock('./flyerDataTransformer', () => ({
|
||||
|
||||
@@ -6,13 +6,17 @@ import type { Job } from 'bullmq';
|
||||
const mocks = vi.hoisted(() => {
|
||||
// This object will store the processor functions captured from the worker constructors.
|
||||
const capturedProcessors: Record<string, (job: Job) => Promise<unknown>> = {};
|
||||
|
||||
return {
|
||||
sendEmail: vi.fn(),
|
||||
unlink: vi.fn(),
|
||||
// Service method mocks
|
||||
processFlyerJob: vi.fn(),
|
||||
processCleanupJob: vi.fn(),
|
||||
processEmailJob: vi.fn(),
|
||||
processDailyReportJob: vi.fn(),
|
||||
processWeeklyReportJob: vi.fn(),
|
||||
processTokenCleanupJob: vi.fn(),
|
||||
|
||||
// Test utilities
|
||||
capturedProcessors,
|
||||
deleteExpiredResetTokens: vi.fn(),
|
||||
// Mock the Worker constructor to capture the processor function. It must be a
|
||||
// `function` and not an arrow function so it can be called with `new`.
|
||||
MockWorker: vi.fn(function (name: string, processor: (job: Job) => Promise<unknown>) {
|
||||
@@ -26,23 +30,28 @@ const mocks = vi.hoisted(() => {
|
||||
});
|
||||
|
||||
// --- Mock Modules ---
|
||||
vi.mock('./emailService.server', async (importOriginal) => {
|
||||
const actual = await importOriginal<typeof import('./emailService.server')>();
|
||||
return {
|
||||
...actual,
|
||||
// We only need to mock the specific function being called by the worker.
|
||||
// The rest of the module can retain its original implementation if needed elsewhere.
|
||||
sendEmail: mocks.sendEmail,
|
||||
};
|
||||
});
|
||||
vi.mock('./emailService.server', () => ({
|
||||
processEmailJob: mocks.processEmailJob,
|
||||
}));
|
||||
|
||||
vi.mock('./analyticsService.server', () => ({
|
||||
analyticsService: {
|
||||
processDailyReportJob: mocks.processDailyReportJob,
|
||||
processWeeklyReportJob: mocks.processWeeklyReportJob,
|
||||
},
|
||||
}));
|
||||
|
||||
vi.mock('./userService', () => ({
|
||||
userService: {
|
||||
processTokenCleanupJob: mocks.processTokenCleanupJob,
|
||||
},
|
||||
}));
|
||||
|
||||
// The workers use an `fsAdapter`. We can mock the underlying `fsPromises`
|
||||
// that the adapter is built from in queueService.server.ts.
|
||||
vi.mock('node:fs/promises', () => ({
|
||||
default: {
|
||||
unlink: mocks.unlink,
|
||||
// Add other fs functions if needed by other tests
|
||||
readdir: vi.fn(),
|
||||
// unlink is no longer directly called by the worker
|
||||
},
|
||||
}));
|
||||
|
||||
@@ -56,28 +65,29 @@ vi.mock('./logger.server', () => ({
|
||||
},
|
||||
}));
|
||||
|
||||
vi.mock('./db/index.db', () => ({
|
||||
userRepo: {
|
||||
deleteExpiredResetTokens: mocks.deleteExpiredResetTokens,
|
||||
},
|
||||
}));
|
||||
|
||||
// Mock bullmq to capture the processor functions passed to the Worker constructor
|
||||
import { logger as mockLogger } from './logger.server';
|
||||
vi.mock('bullmq', () => ({
|
||||
Worker: mocks.MockWorker,
|
||||
// FIX: Use a standard function for the mock constructor to allow `new Queue(...)` to work.
|
||||
Queue: vi.fn(function () {
|
||||
return { add: vi.fn() };
|
||||
}),
|
||||
// Add UnrecoverableError to the mock so it can be used in tests
|
||||
UnrecoverableError: class UnrecoverableError extends Error {},
|
||||
}));
|
||||
|
||||
// Mock flyerProcessingService.server as flyerWorker depends on it
|
||||
vi.mock('./flyerProcessingService.server', () => ({
|
||||
FlyerProcessingService: class {
|
||||
processJob = mocks.processFlyerJob;
|
||||
},
|
||||
}));
|
||||
vi.mock('./flyerProcessingService.server', () => {
|
||||
// Mock the constructor to return an object with the mocked methods
|
||||
return {
|
||||
FlyerProcessingService: vi.fn().mockImplementation(function () {
|
||||
return {
|
||||
processJob: mocks.processFlyerJob,
|
||||
processCleanupJob: mocks.processCleanupJob,
|
||||
};
|
||||
}),
|
||||
};
|
||||
});
|
||||
|
||||
// Mock flyerDataTransformer as it's a dependency of FlyerProcessingService
|
||||
vi.mock('./flyerDataTransformer', () => ({
|
||||
@@ -112,12 +122,13 @@ describe('Queue Workers', () => {
|
||||
|
||||
beforeEach(async () => {
|
||||
vi.clearAllMocks();
|
||||
|
||||
// Reset default mock implementations for hoisted mocks
|
||||
mocks.sendEmail.mockResolvedValue(undefined);
|
||||
mocks.unlink.mockResolvedValue(undefined);
|
||||
mocks.processFlyerJob.mockResolvedValue({ flyerId: 123 }); // Default success for flyer processing
|
||||
mocks.deleteExpiredResetTokens.mockResolvedValue(5);
|
||||
mocks.processFlyerJob.mockResolvedValue({ flyerId: 123 });
|
||||
mocks.processCleanupJob.mockResolvedValue({ status: 'success' });
|
||||
mocks.processEmailJob.mockResolvedValue(undefined);
|
||||
mocks.processDailyReportJob.mockResolvedValue({ status: 'success' });
|
||||
mocks.processWeeklyReportJob.mockResolvedValue({ status: 'success' });
|
||||
mocks.processTokenCleanupJob.mockResolvedValue({ deletedCount: 5 });
|
||||
|
||||
// Reset modules to re-evaluate the workers.server.ts file with fresh mocks.
|
||||
// This ensures that new worker instances are created and their processors are captured for each test.
|
||||
@@ -162,10 +173,24 @@ describe('Queue Workers', () => {
|
||||
|
||||
await expect(flyerProcessor(job)).rejects.toThrow('Flyer processing failed');
|
||||
});
|
||||
|
||||
it('should re-throw UnrecoverableError from the service layer', async () => {
|
||||
const { UnrecoverableError } = await import('bullmq');
|
||||
const job = createMockJob({
|
||||
filePath: '/tmp/fail.pdf',
|
||||
originalFileName: 'fail.pdf',
|
||||
checksum: 'def',
|
||||
});
|
||||
const unrecoverableError = new UnrecoverableError('Quota exceeded');
|
||||
mocks.processFlyerJob.mockRejectedValue(unrecoverableError);
|
||||
|
||||
// The worker should just let this specific error type pass through.
|
||||
await expect(flyerProcessor(job)).rejects.toThrow(unrecoverableError);
|
||||
});
|
||||
});
|
||||
|
||||
describe('emailWorker', () => {
|
||||
it('should call emailService.sendEmail with the job data', async () => {
|
||||
it('should call emailService.processEmailJob with the job', async () => {
|
||||
const jobData = {
|
||||
to: 'test@example.com',
|
||||
subject: 'Test Email',
|
||||
@@ -173,173 +198,84 @@ describe('Queue Workers', () => {
|
||||
text: 'Hello',
|
||||
};
|
||||
const job = createMockJob(jobData);
|
||||
|
||||
await emailProcessor(job);
|
||||
|
||||
expect(mocks.sendEmail).toHaveBeenCalledTimes(1);
|
||||
// The implementation passes the logger as the second argument
|
||||
expect(mocks.sendEmail).toHaveBeenCalledWith(jobData, expect.anything());
|
||||
expect(mocks.processEmailJob).toHaveBeenCalledTimes(1);
|
||||
expect(mocks.processEmailJob).toHaveBeenCalledWith(job);
|
||||
});
|
||||
|
||||
it('should log and re-throw an error if sendEmail fails with a non-Error object', async () => {
|
||||
const job = createMockJob({ to: 'fail@example.com', subject: 'fail', html: '', text: '' });
|
||||
const emailError = 'SMTP server is down'; // Reject with a string
|
||||
mocks.sendEmail.mockRejectedValue(emailError);
|
||||
|
||||
await expect(emailProcessor(job)).rejects.toThrow(emailError);
|
||||
|
||||
// The worker should wrap the string in an Error object for logging
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
{ err: new Error(emailError), jobData: job.data },
|
||||
`[EmailWorker] Job ${job.id} failed. Attempt ${job.attemptsMade}/${job.opts.attempts}.`,
|
||||
);
|
||||
});
|
||||
|
||||
it('should re-throw an error if sendEmail fails', async () => {
|
||||
it('should re-throw an error if processEmailJob fails', async () => {
|
||||
const job = createMockJob({ to: 'fail@example.com', subject: 'fail', html: '', text: '' });
|
||||
const emailError = new Error('SMTP server is down');
|
||||
mocks.sendEmail.mockRejectedValue(emailError);
|
||||
|
||||
mocks.processEmailJob.mockRejectedValue(emailError);
|
||||
await expect(emailProcessor(job)).rejects.toThrow('SMTP server is down');
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
{ err: emailError, jobData: job.data },
|
||||
`[EmailWorker] Job ${job.id} failed. Attempt ${job.attemptsMade}/${job.opts.attempts}.`,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('analyticsWorker', () => {
|
||||
it('should complete successfully for a valid report date', async () => {
|
||||
vi.useFakeTimers();
|
||||
it('should call analyticsService.processDailyReportJob with the job', async () => {
|
||||
const job = createMockJob({ reportDate: '2024-01-01' });
|
||||
|
||||
const promise = analyticsProcessor(job);
|
||||
// Advance timers to simulate the 10-second task completing
|
||||
await vi.advanceTimersByTimeAsync(10000);
|
||||
await promise; // Wait for the promise to resolve
|
||||
|
||||
// No error should be thrown
|
||||
expect(true).toBe(true);
|
||||
vi.useRealTimers();
|
||||
await analyticsProcessor(job);
|
||||
expect(mocks.processDailyReportJob).toHaveBeenCalledTimes(1);
|
||||
expect(mocks.processDailyReportJob).toHaveBeenCalledWith(job);
|
||||
});
|
||||
|
||||
it('should throw an error if reportDate is "FAIL"', async () => {
|
||||
it('should re-throw an error if processDailyReportJob fails', async () => {
|
||||
const job = createMockJob({ reportDate: 'FAIL' });
|
||||
|
||||
await expect(analyticsProcessor(job)).rejects.toThrow(
|
||||
'This is a test failure for the analytics job.',
|
||||
);
|
||||
const analyticsError = new Error('Analytics processing failed');
|
||||
mocks.processDailyReportJob.mockRejectedValue(analyticsError);
|
||||
await expect(analyticsProcessor(job)).rejects.toThrow('Analytics processing failed');
|
||||
});
|
||||
});
|
||||
|
||||
describe('cleanupWorker', () => {
|
||||
it('should call unlink for each path provided in the job data', async () => {
|
||||
it('should call flyerProcessingService.processCleanupJob with the job', async () => {
|
||||
const jobData = {
|
||||
flyerId: 123,
|
||||
paths: ['/tmp/file1.jpg', '/tmp/file2.pdf'],
|
||||
};
|
||||
const job = createMockJob(jobData);
|
||||
mocks.unlink.mockResolvedValue(undefined);
|
||||
|
||||
await cleanupProcessor(job);
|
||||
|
||||
expect(mocks.unlink).toHaveBeenCalledTimes(2);
|
||||
expect(mocks.unlink).toHaveBeenCalledWith('/tmp/file1.jpg');
|
||||
expect(mocks.unlink).toHaveBeenCalledWith('/tmp/file2.pdf');
|
||||
expect(mocks.processCleanupJob).toHaveBeenCalledTimes(1);
|
||||
expect(mocks.processCleanupJob).toHaveBeenCalledWith(job);
|
||||
});
|
||||
|
||||
it('should not throw an error if a file is already deleted (ENOENT)', async () => {
|
||||
const jobData = {
|
||||
flyerId: 123,
|
||||
paths: ['/tmp/existing.jpg', '/tmp/already-deleted.jpg'],
|
||||
};
|
||||
it('should re-throw an error if processCleanupJob fails', async () => {
|
||||
const jobData = { flyerId: 123, paths: ['/tmp/protected-file.jpg'] };
|
||||
const job = createMockJob(jobData);
|
||||
// Use the built-in NodeJS.ErrnoException type for mock system errors.
|
||||
const enoentError: NodeJS.ErrnoException = new Error('File not found');
|
||||
enoentError.code = 'ENOENT';
|
||||
|
||||
// First call succeeds, second call fails with ENOENT
|
||||
mocks.unlink.mockResolvedValueOnce(undefined).mockRejectedValueOnce(enoentError);
|
||||
|
||||
// The processor should complete without throwing
|
||||
await expect(cleanupProcessor(job)).resolves.toBeUndefined();
|
||||
|
||||
expect(mocks.unlink).toHaveBeenCalledTimes(2);
|
||||
});
|
||||
|
||||
it('should re-throw an error for issues other than ENOENT (e.g., permissions)', async () => {
|
||||
const jobData = {
|
||||
flyerId: 123,
|
||||
paths: ['/tmp/protected-file.jpg'],
|
||||
};
|
||||
const job = createMockJob(jobData);
|
||||
// Use the built-in NodeJS.ErrnoException type for mock system errors.
|
||||
const permissionError: NodeJS.ErrnoException = new Error('Permission denied');
|
||||
permissionError.code = 'EACCES';
|
||||
|
||||
mocks.unlink.mockRejectedValue(permissionError);
|
||||
|
||||
const cleanupError = new Error('Permission denied');
|
||||
mocks.processCleanupJob.mockRejectedValue(cleanupError);
|
||||
await expect(cleanupProcessor(job)).rejects.toThrow('Permission denied');
|
||||
|
||||
// Verify the error was logged by the worker's catch block
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
{ err: permissionError },
|
||||
expect.stringContaining(
|
||||
`[CleanupWorker] Job ${job.id} for flyer ${job.data.flyerId} failed.`,
|
||||
),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('weeklyAnalyticsWorker', () => {
|
||||
it('should complete successfully for a valid report date', async () => {
|
||||
vi.useFakeTimers();
|
||||
it('should call analyticsService.processWeeklyReportJob with the job', async () => {
|
||||
const job = createMockJob({ reportYear: 2024, reportWeek: 1 });
|
||||
|
||||
const promise = weeklyAnalyticsProcessor(job);
|
||||
// Advance timers to simulate the 30-second task completing
|
||||
await vi.advanceTimersByTimeAsync(30000);
|
||||
await promise; // Wait for the promise to resolve
|
||||
|
||||
// No error should be thrown
|
||||
expect(true).toBe(true);
|
||||
vi.useRealTimers();
|
||||
await weeklyAnalyticsProcessor(job);
|
||||
expect(mocks.processWeeklyReportJob).toHaveBeenCalledTimes(1);
|
||||
expect(mocks.processWeeklyReportJob).toHaveBeenCalledWith(job);
|
||||
});
|
||||
|
||||
it('should re-throw an error if the job fails', async () => {
|
||||
vi.useFakeTimers();
|
||||
it('should re-throw an error if processWeeklyReportJob fails', async () => {
|
||||
const job = createMockJob({ reportYear: 2024, reportWeek: 1 });
|
||||
// Mock the internal logic to throw an error
|
||||
const originalSetTimeout = setTimeout;
|
||||
vi.spyOn(global, 'setTimeout').mockImplementation((callback, ms) => {
|
||||
if (ms === 30000) {
|
||||
// Target the simulated delay
|
||||
throw new Error('Weekly analytics job failed');
|
||||
}
|
||||
return originalSetTimeout(callback, ms);
|
||||
});
|
||||
|
||||
const weeklyError = new Error('Weekly analytics job failed');
|
||||
mocks.processWeeklyReportJob.mockRejectedValue(weeklyError);
|
||||
await expect(weeklyAnalyticsProcessor(job)).rejects.toThrow('Weekly analytics job failed');
|
||||
vi.useRealTimers();
|
||||
vi.restoreAllMocks(); // Restore setTimeout mock
|
||||
});
|
||||
});
|
||||
|
||||
describe('tokenCleanupWorker', () => {
|
||||
it('should call userRepo.deleteExpiredResetTokens and return the count', async () => {
|
||||
it('should call userService.processTokenCleanupJob with the job', async () => {
|
||||
const job = createMockJob({ timestamp: new Date().toISOString() });
|
||||
mocks.deleteExpiredResetTokens.mockResolvedValue(10);
|
||||
|
||||
const result = await tokenCleanupProcessor(job);
|
||||
|
||||
expect(mocks.deleteExpiredResetTokens).toHaveBeenCalledTimes(1);
|
||||
expect(result).toEqual({ deletedCount: 10 });
|
||||
await tokenCleanupProcessor(job);
|
||||
expect(mocks.processTokenCleanupJob).toHaveBeenCalledTimes(1);
|
||||
expect(mocks.processTokenCleanupJob).toHaveBeenCalledWith(job);
|
||||
});
|
||||
|
||||
it('should re-throw an error if the database call fails', async () => {
|
||||
it('should re-throw an error if processTokenCleanupJob fails', async () => {
|
||||
const job = createMockJob({ timestamp: new Date().toISOString() });
|
||||
const dbError = new Error('DB cleanup failed');
|
||||
mocks.deleteExpiredResetTokens.mockRejectedValue(dbError);
|
||||
mocks.processTokenCleanupJob.mockRejectedValue(dbError);
|
||||
await expect(tokenCleanupProcessor(job)).rejects.toThrow(dbError);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -13,8 +13,9 @@ import * as db from './db/index.db';
|
||||
import {
|
||||
FlyerProcessingService,
|
||||
type FlyerJobData,
|
||||
type IFileSystem,
|
||||
} from './flyerProcessingService.server';
|
||||
import { FlyerFileHandler, type IFileSystem } from './flyerFileHandler.server';
|
||||
import { FlyerAiProcessor } from './flyerAiProcessor.server';
|
||||
import { FlyerDataTransformer } from './flyerDataTransformer';
|
||||
import {
|
||||
flyerQueue,
|
||||
@@ -41,6 +42,8 @@ const fsAdapter: IFileSystem = {
|
||||
|
||||
const flyerProcessingService = new FlyerProcessingService(
|
||||
aiService,
|
||||
new FlyerFileHandler(fsAdapter, execAsync),
|
||||
new FlyerAiProcessor(aiService, db.personalizationRepo),
|
||||
db,
|
||||
fsAdapter,
|
||||
execAsync,
|
||||
@@ -52,6 +55,25 @@ const normalizeError = (error: unknown): Error => {
|
||||
return error instanceof Error ? error : new Error(String(error));
|
||||
};
|
||||
|
||||
/**
|
||||
* Creates a higher-order function to wrap worker processors with common logic.
|
||||
* This includes error normalization to ensure that any thrown value is an Error instance,
|
||||
* which is a best practice for BullMQ workers.
|
||||
* @param processor The core logic for the worker.
|
||||
* @returns An async function that takes a job and executes the processor.
|
||||
*/
|
||||
const createWorkerProcessor = <T>(processor: (job: Job<T>) => Promise<any>) => {
|
||||
return async (job: Job<T>) => {
|
||||
try {
|
||||
return await processor(job);
|
||||
} catch (error: unknown) {
|
||||
// The service layer now handles detailed logging. This block just ensures
|
||||
// any unexpected errors are normalized before BullMQ handles them.
|
||||
throw normalizeError(error);
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
const attachWorkerEventListeners = (worker: Worker) => {
|
||||
worker.on('completed', (job: Job, returnValue: unknown) => {
|
||||
logger.info({ returnValue }, `[${worker.name}] Job ${job.id} completed successfully.`);
|
||||
@@ -67,17 +89,7 @@ const attachWorkerEventListeners = (worker: Worker) => {
|
||||
|
||||
export const flyerWorker = new Worker<FlyerJobData>(
|
||||
'flyer-processing',
|
||||
async (job) => {
|
||||
try {
|
||||
return await flyerProcessingService.processJob(job);
|
||||
} catch (error: unknown) {
|
||||
// The service layer now handles identifying unrecoverable errors and throws
|
||||
// a `BullMQ.UnrecoverableError` which the worker respects. This catch block
|
||||
// simply ensures any non-Error exceptions are properly wrapped before being
|
||||
// re-thrown for BullMQ to handle retries.
|
||||
throw normalizeError(error);
|
||||
}
|
||||
},
|
||||
createWorkerProcessor((job) => flyerProcessingService.processJob(job)),
|
||||
{
|
||||
connection,
|
||||
concurrency: parseInt(process.env.WORKER_CONCURRENCY || '1', 10),
|
||||
@@ -86,17 +98,7 @@ export const flyerWorker = new Worker<FlyerJobData>(
|
||||
|
||||
export const emailWorker = new Worker<EmailJobData>(
|
||||
'email-sending',
|
||||
async (job: Job<EmailJobData>) => {
|
||||
try {
|
||||
// Delegate all logic to the service layer
|
||||
return await emailService.processEmailJob(job);
|
||||
} catch (error: unknown) {
|
||||
// The service layer now handles logging. This block just ensures
|
||||
// any unexpected errors are normalized before BullMQ handles them.
|
||||
const wrappedError = normalizeError(error);
|
||||
throw wrappedError;
|
||||
}
|
||||
},
|
||||
createWorkerProcessor((job) => emailService.processEmailJob(job)),
|
||||
{
|
||||
connection,
|
||||
concurrency: parseInt(process.env.EMAIL_WORKER_CONCURRENCY || '10', 10),
|
||||
@@ -105,15 +107,7 @@ export const emailWorker = new Worker<EmailJobData>(
|
||||
|
||||
export const analyticsWorker = new Worker<AnalyticsJobData>(
|
||||
'analytics-reporting',
|
||||
async (job: Job<AnalyticsJobData>) => {
|
||||
try {
|
||||
return await analyticsService.processDailyReportJob(job);
|
||||
} catch (error: unknown) {
|
||||
// The service layer now handles logging. This block just ensures
|
||||
// any unexpected errors are normalized before BullMQ handles them.
|
||||
throw normalizeError(error);
|
||||
}
|
||||
},
|
||||
createWorkerProcessor((job) => analyticsService.processDailyReportJob(job)),
|
||||
{
|
||||
connection,
|
||||
concurrency: parseInt(process.env.ANALYTICS_WORKER_CONCURRENCY || '1', 10),
|
||||
@@ -122,15 +116,7 @@ export const analyticsWorker = new Worker<AnalyticsJobData>(
|
||||
|
||||
export const cleanupWorker = new Worker<CleanupJobData>(
|
||||
'file-cleanup',
|
||||
async (job: Job<CleanupJobData>) => {
|
||||
try {
|
||||
return await flyerProcessingService.processCleanupJob(job);
|
||||
} catch (error: unknown) {
|
||||
// The service layer now handles logging. This block just ensures
|
||||
// any unexpected errors are normalized before BullMQ handles them.
|
||||
throw normalizeError(error);
|
||||
}
|
||||
},
|
||||
createWorkerProcessor((job) => flyerProcessingService.processCleanupJob(job)),
|
||||
{
|
||||
connection,
|
||||
concurrency: parseInt(process.env.CLEANUP_WORKER_CONCURRENCY || '10', 10),
|
||||
@@ -139,15 +125,7 @@ export const cleanupWorker = new Worker<CleanupJobData>(
|
||||
|
||||
export const weeklyAnalyticsWorker = new Worker<WeeklyAnalyticsJobData>(
|
||||
'weekly-analytics-reporting',
|
||||
async (job: Job<WeeklyAnalyticsJobData>) => {
|
||||
try {
|
||||
return await analyticsService.processWeeklyReportJob(job);
|
||||
} catch (error: unknown) {
|
||||
// The service layer now handles logging. This block just ensures
|
||||
// any unexpected errors are normalized before BullMQ handles them.
|
||||
throw normalizeError(error);
|
||||
}
|
||||
},
|
||||
createWorkerProcessor((job) => analyticsService.processWeeklyReportJob(job)),
|
||||
{
|
||||
connection,
|
||||
concurrency: parseInt(process.env.WEEKLY_ANALYTICS_WORKER_CONCURRENCY || '1', 10),
|
||||
@@ -156,15 +134,7 @@ export const weeklyAnalyticsWorker = new Worker<WeeklyAnalyticsJobData>(
|
||||
|
||||
export const tokenCleanupWorker = new Worker<TokenCleanupJobData>(
|
||||
'token-cleanup',
|
||||
async (job: Job<TokenCleanupJobData>) => {
|
||||
try {
|
||||
return await userService.processTokenCleanupJob(job);
|
||||
} catch (error: unknown) {
|
||||
// The service layer now handles logging. This block just ensures
|
||||
// any unexpected errors are normalized before BullMQ handles them.
|
||||
throw normalizeError(error);
|
||||
}
|
||||
},
|
||||
createWorkerProcessor((job) => userService.processTokenCleanupJob(job)),
|
||||
{
|
||||
connection,
|
||||
concurrency: 1,
|
||||
|
||||
@@ -39,6 +39,7 @@ import {
|
||||
ShoppingTripItem,
|
||||
Receipt,
|
||||
ReceiptItem,
|
||||
SearchQuery,
|
||||
ProcessingStage,
|
||||
UserAlert,
|
||||
UserSubmittedPrice,
|
||||
@@ -1451,3 +1452,66 @@ export const createMockAppliance = (overrides: Partial<Appliance> = {}): Applian
|
||||
...overrides,
|
||||
};
|
||||
};
|
||||
|
||||
// src/tests/utils/mockFactories.ts
|
||||
|
||||
// ... existing factories
|
||||
|
||||
export const createMockShoppingListItemPayload = (overrides: Partial<{ masterItemId: number; customItemName: string }> = {}): { masterItemId?: number; customItemName?: string } => ({
|
||||
customItemName: 'Mock Item',
|
||||
...overrides,
|
||||
});
|
||||
|
||||
export const createMockRecipeCommentPayload = (overrides: Partial<{ content: string; parentCommentId: number }> = {}): { content: string; parentCommentId?: number } => ({
|
||||
content: 'This is a mock comment.',
|
||||
...overrides,
|
||||
});
|
||||
|
||||
export const createMockProfileUpdatePayload = (overrides: Partial<Profile> = {}): Partial<Profile> => ({
|
||||
full_name: 'Mock User',
|
||||
...overrides,
|
||||
});
|
||||
|
||||
export const createMockAddressPayload = (overrides: Partial<Address> = {}): Partial<Address> => ({
|
||||
address_line_1: '123 Mock St',
|
||||
city: 'Mockville',
|
||||
province_state: 'MS',
|
||||
postal_code: '12345',
|
||||
country: 'Mockland',
|
||||
...overrides,
|
||||
});
|
||||
|
||||
export const createMockSearchQueryPayload = (overrides: Partial<Omit<SearchQuery, 'search_query_id' | 'id' | 'created_at' | 'user_id'>> = {}): Omit<SearchQuery, 'search_query_id' | 'id' | 'created_at' | 'user_id'> => ({
|
||||
query_text: 'mock search',
|
||||
result_count: 5,
|
||||
was_successful: true,
|
||||
...overrides,
|
||||
});
|
||||
|
||||
export const createMockWatchedItemPayload = (overrides: Partial<{ itemName: string; category: string }> = {}): { itemName: string; category: string } => ({
|
||||
itemName: 'Mock Watched Item',
|
||||
category: 'Pantry',
|
||||
...overrides,
|
||||
});
|
||||
|
||||
export const createMockRegisterUserPayload = (
|
||||
overrides: Partial<{
|
||||
email: string;
|
||||
password: string;
|
||||
full_name: string;
|
||||
avatar_url: string | undefined;
|
||||
}> = {},
|
||||
) => ({
|
||||
email: 'mock@example.com',
|
||||
password: 'password123',
|
||||
full_name: 'Mock User',
|
||||
avatar_url: undefined,
|
||||
...overrides,
|
||||
});
|
||||
|
||||
export const createMockLoginPayload = (overrides: Partial<{ email: string; password: string; rememberMe: boolean }> = {}) => ({
|
||||
email: 'mock@example.com',
|
||||
password: 'password123',
|
||||
rememberMe: false,
|
||||
...overrides,
|
||||
});
|
||||
|
||||
Reference in New Issue
Block a user