Compare commits
9 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2a310648ca | ||
| 8592633c22 | |||
|
|
0a9cdb8709 | ||
| 0d21e098f8 | |||
| b6799ed167 | |||
|
|
be5bda169e | ||
| 4ede403356 | |||
| 5d31605b80 | |||
| ddd4ad024e |
@@ -158,7 +158,7 @@ jobs:
|
|||||||
else
|
else
|
||||||
echo "Version mismatch (Running: $RUNNING_VERSION -> Deployed: $NEW_VERSION) or app not running. Reloading PM2..."
|
echo "Version mismatch (Running: $RUNNING_VERSION -> Deployed: $NEW_VERSION) or app not running. Reloading PM2..."
|
||||||
fi
|
fi
|
||||||
pm2 startOrReload ecosystem.config.cjs --env production && pm2 save
|
pm2 startOrReload ecosystem.config.cjs --env production --update-env && pm2 save
|
||||||
echo "Production backend server reloaded successfully."
|
echo "Production backend server reloaded successfully."
|
||||||
else
|
else
|
||||||
echo "Version $NEW_VERSION is already running. Skipping PM2 reload."
|
echo "Version $NEW_VERSION is already running. Skipping PM2 reload."
|
||||||
|
|||||||
@@ -406,7 +406,7 @@ jobs:
|
|||||||
# Use `startOrReload` with the ecosystem file. This is the standard, idempotent way to deploy.
|
# Use `startOrReload` with the ecosystem file. This is the standard, idempotent way to deploy.
|
||||||
# It will START the process if it's not running, or RELOAD it if it is.
|
# It will START the process if it's not running, or RELOAD it if it is.
|
||||||
# We also add `&& pm2 save` to persist the process list across server reboots.
|
# We also add `&& pm2 save` to persist the process list across server reboots.
|
||||||
pm2 startOrReload ecosystem.config.cjs --env test && pm2 save
|
pm2 startOrReload ecosystem.config.cjs --env test --update-env && pm2 save
|
||||||
echo "Test backend server reloaded successfully."
|
echo "Test backend server reloaded successfully."
|
||||||
|
|
||||||
# After a successful deployment, update the schema hash in the database.
|
# After a successful deployment, update the schema hash in the database.
|
||||||
|
|||||||
@@ -157,7 +157,7 @@ jobs:
|
|||||||
else
|
else
|
||||||
echo "Version mismatch (Running: $RUNNING_VERSION -> Deployed: $NEW_VERSION) or app not running. Reloading PM2..."
|
echo "Version mismatch (Running: $RUNNING_VERSION -> Deployed: $NEW_VERSION) or app not running. Reloading PM2..."
|
||||||
fi
|
fi
|
||||||
pm2 startOrReload ecosystem.config.cjs --env production && pm2 save
|
pm2 startOrReload ecosystem.config.cjs --env production --update-env && pm2 save
|
||||||
echo "Production backend server reloaded successfully."
|
echo "Production backend server reloaded successfully."
|
||||||
else
|
else
|
||||||
echo "Version $NEW_VERSION is already running. Skipping PM2 reload."
|
echo "Version $NEW_VERSION is already running. Skipping PM2 reload."
|
||||||
|
|||||||
@@ -3,6 +3,18 @@
|
|||||||
// It allows us to define all the settings for our application in one place.
|
// It allows us to define all the settings for our application in one place.
|
||||||
// The .cjs extension is required because the project's package.json has "type": "module".
|
// The .cjs extension is required because the project's package.json has "type": "module".
|
||||||
|
|
||||||
|
// --- Environment Variable Validation ---
|
||||||
|
const requiredSecrets = ['DB_HOST', 'JWT_SECRET', 'GEMINI_API_KEY'];
|
||||||
|
const missingSecrets = requiredSecrets.filter(key => !process.env[key]);
|
||||||
|
|
||||||
|
if (missingSecrets.length > 0) {
|
||||||
|
console.warn('\n[ecosystem.config.cjs] ⚠️ WARNING: The following environment variables are MISSING in the shell:');
|
||||||
|
missingSecrets.forEach(key => console.warn(` - ${key}`));
|
||||||
|
console.warn('[ecosystem.config.cjs] The application may crash if these are required for startup.\n');
|
||||||
|
} else {
|
||||||
|
console.log('[ecosystem.config.cjs] ✅ Critical environment variables are present.');
|
||||||
|
}
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
apps: [
|
apps: [
|
||||||
{
|
{
|
||||||
|
|||||||
4
package-lock.json
generated
4
package-lock.json
generated
@@ -1,12 +1,12 @@
|
|||||||
{
|
{
|
||||||
"name": "flyer-crawler",
|
"name": "flyer-crawler",
|
||||||
"version": "0.2.10",
|
"version": "0.2.13",
|
||||||
"lockfileVersion": 3,
|
"lockfileVersion": 3,
|
||||||
"requires": true,
|
"requires": true,
|
||||||
"packages": {
|
"packages": {
|
||||||
"": {
|
"": {
|
||||||
"name": "flyer-crawler",
|
"name": "flyer-crawler",
|
||||||
"version": "0.2.10",
|
"version": "0.2.13",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@bull-board/api": "^6.14.2",
|
"@bull-board/api": "^6.14.2",
|
||||||
"@bull-board/express": "^6.14.2",
|
"@bull-board/express": "^6.14.2",
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
{
|
{
|
||||||
"name": "flyer-crawler",
|
"name": "flyer-crawler",
|
||||||
"private": true,
|
"private": true,
|
||||||
"version": "0.2.10",
|
"version": "0.2.13",
|
||||||
"type": "module",
|
"type": "module",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"dev": "concurrently \"npm:start:dev\" \"vite\"",
|
"dev": "concurrently \"npm:start:dev\" \"vite\"",
|
||||||
|
|||||||
@@ -61,7 +61,7 @@ describe('useAppInitialization Hook', () => {
|
|||||||
// Mock localStorage
|
// Mock localStorage
|
||||||
Object.defineProperty(window, 'localStorage', {
|
Object.defineProperty(window, 'localStorage', {
|
||||||
value: {
|
value: {
|
||||||
getItem: vi.fn(),
|
getItem: vi.fn().mockReturnValue(null),
|
||||||
setItem: vi.fn(),
|
setItem: vi.fn(),
|
||||||
removeItem: vi.fn(),
|
removeItem: vi.fn(),
|
||||||
clear: vi.fn(),
|
clear: vi.fn(),
|
||||||
@@ -74,6 +74,7 @@ describe('useAppInitialization Hook', () => {
|
|||||||
matches: false, // default to light mode
|
matches: false, // default to light mode
|
||||||
})),
|
})),
|
||||||
writable: true,
|
writable: true,
|
||||||
|
configurable: true,
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@@ -19,6 +19,7 @@ vi.mock('./logger.client', () => ({
|
|||||||
debug: vi.fn(),
|
debug: vi.fn(),
|
||||||
info: vi.fn(),
|
info: vi.fn(),
|
||||||
error: vi.fn(),
|
error: vi.fn(),
|
||||||
|
warn: vi.fn(),
|
||||||
},
|
},
|
||||||
}));
|
}));
|
||||||
|
|
||||||
@@ -285,9 +286,25 @@ describe('AI API Client (Network Mocking with MSW)', () => {
|
|||||||
await expect(aiApiClient.getJobStatus(jobId)).rejects.toThrow('Job not found');
|
await expect(aiApiClient.getJobStatus(jobId)).rejects.toThrow('Job not found');
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should throw a generic error if the API response is not valid JSON', async () => {
|
it('should throw a specific error if a 200 OK response is not valid JSON', async () => {
|
||||||
server.use(http.get(`http://localhost/api/ai/jobs/${jobId}/status`, () => HttpResponse.text('Invalid JSON')));
|
server.use(
|
||||||
await expect(aiApiClient.getJobStatus(jobId)).rejects.toThrow(expect.any(SyntaxError));
|
http.get(`http://localhost/api/ai/jobs/${jobId}/status`, () => {
|
||||||
|
// A 200 OK response that is not JSON is a server-side contract violation.
|
||||||
|
return HttpResponse.text('This should have been JSON', { status: 200 });
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
await expect(aiApiClient.getJobStatus(jobId)).rejects.toThrow(
|
||||||
|
'Failed to parse job status from a successful API response.',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should throw a generic error with status text if the non-ok API response is not valid JSON', async () => {
|
||||||
|
server.use(
|
||||||
|
http.get(`http://localhost/api/ai/jobs/${jobId}/status`, () => {
|
||||||
|
return HttpResponse.text('Gateway Timeout', { status: 504, statusText: 'Gateway Timeout' });
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
await expect(aiApiClient.getJobStatus(jobId)).rejects.toThrow('API Error: 504 Gateway Timeout');
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@@ -12,7 +12,7 @@ import type {
|
|||||||
GroundedResponse,
|
GroundedResponse,
|
||||||
} from '../types';
|
} from '../types';
|
||||||
import { logger } from './logger.client';
|
import { logger } from './logger.client';
|
||||||
import { apiFetch } from './apiClient';
|
import { apiFetch, authedGet, authedPost, authedPostForm } from './apiClient';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Uploads a flyer file to the backend to be processed asynchronously.
|
* Uploads a flyer file to the backend to be processed asynchronously.
|
||||||
@@ -33,14 +33,7 @@ export const uploadAndProcessFlyer = async (
|
|||||||
|
|
||||||
logger.info(`[aiApiClient] Starting background processing for file: ${file.name}`);
|
logger.info(`[aiApiClient] Starting background processing for file: ${file.name}`);
|
||||||
|
|
||||||
const response = await apiFetch(
|
const response = await authedPostForm('/ai/upload-and-process', formData, { tokenOverride });
|
||||||
'/ai/upload-and-process',
|
|
||||||
{
|
|
||||||
method: 'POST',
|
|
||||||
body: formData,
|
|
||||||
},
|
|
||||||
{ tokenOverride },
|
|
||||||
);
|
|
||||||
|
|
||||||
if (!response.ok) {
|
if (!response.ok) {
|
||||||
let errorBody;
|
let errorBody;
|
||||||
@@ -101,18 +94,29 @@ export const getJobStatus = async (
|
|||||||
jobId: string,
|
jobId: string,
|
||||||
tokenOverride?: string,
|
tokenOverride?: string,
|
||||||
): Promise<JobStatus> => {
|
): Promise<JobStatus> => {
|
||||||
const response = await apiFetch(`/ai/jobs/${jobId}/status`, {}, { tokenOverride });
|
const response = await authedGet(`/ai/jobs/${jobId}/status`, { tokenOverride });
|
||||||
|
|
||||||
|
// Handle non-OK responses first, as they might not have a JSON body.
|
||||||
|
if (!response.ok) {
|
||||||
|
let errorMessage = `API Error: ${response.status} ${response.statusText}`;
|
||||||
|
try {
|
||||||
|
// Try to get a more specific message from the body.
|
||||||
|
const errorData = await response.json();
|
||||||
|
if (errorData.message) {
|
||||||
|
errorMessage = errorData.message;
|
||||||
|
}
|
||||||
|
} catch (e) {
|
||||||
|
// The body was not JSON, which is fine for a server error page.
|
||||||
|
// The default message is sufficient.
|
||||||
|
logger.warn('getJobStatus received a non-JSON error response.', { status: response.status });
|
||||||
|
}
|
||||||
|
throw new Error(errorMessage);
|
||||||
|
}
|
||||||
|
|
||||||
|
// If we get here, the response is OK (2xx). Now parse the body.
|
||||||
try {
|
try {
|
||||||
const statusData: JobStatus = await response.json();
|
const statusData: JobStatus = await response.json();
|
||||||
|
|
||||||
if (!response.ok) {
|
|
||||||
// If the HTTP response itself is an error (e.g., 404, 500), throw an error.
|
|
||||||
// Use the message from the JSON body if available.
|
|
||||||
const errorMessage = (statusData as any).message || `API Error: ${response.status}`;
|
|
||||||
throw new Error(errorMessage);
|
|
||||||
}
|
|
||||||
|
|
||||||
// If the job itself has failed, we should treat this as an error condition
|
// If the job itself has failed, we should treat this as an error condition
|
||||||
// for the polling logic by rejecting the promise. This will stop the polling loop.
|
// for the polling logic by rejecting the promise. This will stop the polling loop.
|
||||||
if (statusData.state === 'failed') {
|
if (statusData.state === 'failed') {
|
||||||
@@ -130,9 +134,13 @@ export const getJobStatus = async (
|
|||||||
|
|
||||||
return statusData;
|
return statusData;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
// This block catches errors from `response.json()` (if the body is not valid JSON)
|
// If it's the specific error we threw, just re-throw it.
|
||||||
// and also re-throws the errors we created above.
|
if (error instanceof JobFailedError) {
|
||||||
throw error;
|
throw error;
|
||||||
|
}
|
||||||
|
// This now primarily catches JSON parsing errors on an OK response, which is unexpected.
|
||||||
|
logger.error('getJobStatus failed to parse a successful API response.', { error });
|
||||||
|
throw new Error('Failed to parse job status from a successful API response.');
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -145,14 +153,7 @@ export const isImageAFlyer = (
|
|||||||
|
|
||||||
// Use apiFetchWithAuth for FormData to let the browser set the correct Content-Type.
|
// Use apiFetchWithAuth for FormData to let the browser set the correct Content-Type.
|
||||||
// The URL must be relative, as the helper constructs the full path.
|
// The URL must be relative, as the helper constructs the full path.
|
||||||
return apiFetch(
|
return authedPostForm('/ai/check-flyer', formData, { tokenOverride });
|
||||||
'/ai/check-flyer',
|
|
||||||
{
|
|
||||||
method: 'POST',
|
|
||||||
body: formData,
|
|
||||||
},
|
|
||||||
{ tokenOverride },
|
|
||||||
);
|
|
||||||
};
|
};
|
||||||
|
|
||||||
export const extractAddressFromImage = (
|
export const extractAddressFromImage = (
|
||||||
@@ -162,14 +163,7 @@ export const extractAddressFromImage = (
|
|||||||
const formData = new FormData();
|
const formData = new FormData();
|
||||||
formData.append('image', imageFile);
|
formData.append('image', imageFile);
|
||||||
|
|
||||||
return apiFetch(
|
return authedPostForm('/ai/extract-address', formData, { tokenOverride });
|
||||||
'/ai/extract-address',
|
|
||||||
{
|
|
||||||
method: 'POST',
|
|
||||||
body: formData,
|
|
||||||
},
|
|
||||||
{ tokenOverride },
|
|
||||||
);
|
|
||||||
};
|
};
|
||||||
|
|
||||||
export const extractLogoFromImage = (
|
export const extractLogoFromImage = (
|
||||||
@@ -181,14 +175,7 @@ export const extractLogoFromImage = (
|
|||||||
formData.append('images', file);
|
formData.append('images', file);
|
||||||
});
|
});
|
||||||
|
|
||||||
return apiFetch(
|
return authedPostForm('/ai/extract-logo', formData, { tokenOverride });
|
||||||
'/ai/extract-logo',
|
|
||||||
{
|
|
||||||
method: 'POST',
|
|
||||||
body: formData,
|
|
||||||
},
|
|
||||||
{ tokenOverride },
|
|
||||||
);
|
|
||||||
};
|
};
|
||||||
|
|
||||||
export const getQuickInsights = (
|
export const getQuickInsights = (
|
||||||
@@ -196,16 +183,7 @@ export const getQuickInsights = (
|
|||||||
signal?: AbortSignal,
|
signal?: AbortSignal,
|
||||||
tokenOverride?: string,
|
tokenOverride?: string,
|
||||||
): Promise<Response> => {
|
): Promise<Response> => {
|
||||||
return apiFetch(
|
return authedPost('/ai/quick-insights', { items }, { tokenOverride, signal });
|
||||||
'/ai/quick-insights',
|
|
||||||
{
|
|
||||||
method: 'POST',
|
|
||||||
headers: { 'Content-Type': 'application/json' },
|
|
||||||
body: JSON.stringify({ items }),
|
|
||||||
signal,
|
|
||||||
},
|
|
||||||
{ tokenOverride, signal },
|
|
||||||
);
|
|
||||||
};
|
};
|
||||||
|
|
||||||
export const getDeepDiveAnalysis = (
|
export const getDeepDiveAnalysis = (
|
||||||
@@ -213,16 +191,7 @@ export const getDeepDiveAnalysis = (
|
|||||||
signal?: AbortSignal,
|
signal?: AbortSignal,
|
||||||
tokenOverride?: string,
|
tokenOverride?: string,
|
||||||
): Promise<Response> => {
|
): Promise<Response> => {
|
||||||
return apiFetch(
|
return authedPost('/ai/deep-dive', { items }, { tokenOverride, signal });
|
||||||
'/ai/deep-dive',
|
|
||||||
{
|
|
||||||
method: 'POST',
|
|
||||||
headers: { 'Content-Type': 'application/json' },
|
|
||||||
body: JSON.stringify({ items }),
|
|
||||||
signal,
|
|
||||||
},
|
|
||||||
{ tokenOverride, signal },
|
|
||||||
);
|
|
||||||
};
|
};
|
||||||
|
|
||||||
export const searchWeb = (
|
export const searchWeb = (
|
||||||
@@ -230,16 +199,7 @@ export const searchWeb = (
|
|||||||
signal?: AbortSignal,
|
signal?: AbortSignal,
|
||||||
tokenOverride?: string,
|
tokenOverride?: string,
|
||||||
): Promise<Response> => {
|
): Promise<Response> => {
|
||||||
return apiFetch(
|
return authedPost('/ai/search-web', { query }, { tokenOverride, signal });
|
||||||
'/ai/search-web',
|
|
||||||
{
|
|
||||||
method: 'POST',
|
|
||||||
headers: { 'Content-Type': 'application/json' },
|
|
||||||
body: JSON.stringify({ query }),
|
|
||||||
signal,
|
|
||||||
},
|
|
||||||
{ tokenOverride, signal },
|
|
||||||
);
|
|
||||||
};
|
};
|
||||||
|
|
||||||
// ============================================================================
|
// ============================================================================
|
||||||
@@ -254,15 +214,7 @@ export const planTripWithMaps = async (
|
|||||||
tokenOverride?: string,
|
tokenOverride?: string,
|
||||||
): Promise<Response> => {
|
): Promise<Response> => {
|
||||||
logger.debug('Stub: planTripWithMaps called with location:', { userLocation });
|
logger.debug('Stub: planTripWithMaps called with location:', { userLocation });
|
||||||
return apiFetch(
|
return authedPost('/ai/plan-trip', { items, store, userLocation }, { signal, tokenOverride });
|
||||||
'/ai/plan-trip',
|
|
||||||
{
|
|
||||||
method: 'POST',
|
|
||||||
headers: { 'Content-Type': 'application/json' },
|
|
||||||
body: JSON.stringify({ items, store, userLocation }),
|
|
||||||
},
|
|
||||||
{ signal, tokenOverride },
|
|
||||||
);
|
|
||||||
};
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -276,16 +228,7 @@ export const generateImageFromText = (
|
|||||||
tokenOverride?: string,
|
tokenOverride?: string,
|
||||||
): Promise<Response> => {
|
): Promise<Response> => {
|
||||||
logger.debug('Stub: generateImageFromText called with prompt:', { prompt });
|
logger.debug('Stub: generateImageFromText called with prompt:', { prompt });
|
||||||
return apiFetch(
|
return authedPost('/ai/generate-image', { prompt }, { tokenOverride, signal });
|
||||||
'/ai/generate-image',
|
|
||||||
{
|
|
||||||
method: 'POST',
|
|
||||||
headers: { 'Content-Type': 'application/json' },
|
|
||||||
body: JSON.stringify({ prompt }),
|
|
||||||
signal,
|
|
||||||
},
|
|
||||||
{ tokenOverride, signal },
|
|
||||||
);
|
|
||||||
};
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -299,16 +242,7 @@ export const generateSpeechFromText = (
|
|||||||
tokenOverride?: string,
|
tokenOverride?: string,
|
||||||
): Promise<Response> => {
|
): Promise<Response> => {
|
||||||
logger.debug('Stub: generateSpeechFromText called with text:', { text });
|
logger.debug('Stub: generateSpeechFromText called with text:', { text });
|
||||||
return apiFetch(
|
return authedPost('/ai/generate-speech', { text }, { tokenOverride, signal });
|
||||||
'/ai/generate-speech',
|
|
||||||
{
|
|
||||||
method: 'POST',
|
|
||||||
headers: { 'Content-Type': 'application/json' },
|
|
||||||
body: JSON.stringify({ text }),
|
|
||||||
signal,
|
|
||||||
},
|
|
||||||
{ tokenOverride, signal },
|
|
||||||
);
|
|
||||||
};
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -361,11 +295,7 @@ export const rescanImageArea = (
|
|||||||
formData.append('cropArea', JSON.stringify(cropArea));
|
formData.append('cropArea', JSON.stringify(cropArea));
|
||||||
formData.append('extractionType', extractionType);
|
formData.append('extractionType', extractionType);
|
||||||
|
|
||||||
return apiFetch(
|
return authedPostForm('/ai/rescan-area', formData, { tokenOverride });
|
||||||
'/ai/rescan-area',
|
|
||||||
{ method: 'POST', body: formData },
|
|
||||||
{ tokenOverride },
|
|
||||||
);
|
|
||||||
};
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -379,12 +309,5 @@ export const compareWatchedItemPrices = (
|
|||||||
): Promise<Response> => {
|
): Promise<Response> => {
|
||||||
// Use the apiFetch wrapper for consistency with other API calls in this file.
|
// Use the apiFetch wrapper for consistency with other API calls in this file.
|
||||||
// This centralizes token handling and base URL logic.
|
// This centralizes token handling and base URL logic.
|
||||||
return apiFetch(
|
return authedPost('/ai/compare-prices', { items: watchedItems }, { signal });
|
||||||
'/ai/compare-prices',
|
};
|
||||||
{
|
|
||||||
method: 'POST',
|
|
||||||
headers: { 'Content-Type': 'application/json' },
|
|
||||||
body: JSON.stringify({ items: watchedItems }),
|
|
||||||
},
|
|
||||||
{ signal },
|
|
||||||
)};
|
|
||||||
|
|||||||
@@ -596,40 +596,6 @@ describe('AI Service (Server)', () => {
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('_normalizeExtractedItems (private method)', () => {
|
|
||||||
it('should replace null or undefined fields with default values', () => {
|
|
||||||
const rawItems: {
|
|
||||||
item: string;
|
|
||||||
price_display: null;
|
|
||||||
quantity: undefined;
|
|
||||||
category_name: null;
|
|
||||||
master_item_id: null;
|
|
||||||
}[] = [
|
|
||||||
{
|
|
||||||
item: 'Test',
|
|
||||||
price_display: null,
|
|
||||||
quantity: undefined,
|
|
||||||
category_name: null,
|
|
||||||
master_item_id: null,
|
|
||||||
},
|
|
||||||
];
|
|
||||||
const [normalized] = (
|
|
||||||
aiServiceInstance as unknown as {
|
|
||||||
_normalizeExtractedItems: (items: typeof rawItems) => {
|
|
||||||
price_display: string;
|
|
||||||
quantity: string;
|
|
||||||
category_name: string;
|
|
||||||
master_item_id: undefined;
|
|
||||||
}[];
|
|
||||||
}
|
|
||||||
)._normalizeExtractedItems(rawItems);
|
|
||||||
expect(normalized.price_display).toBe('');
|
|
||||||
expect(normalized.quantity).toBe('');
|
|
||||||
expect(normalized.category_name).toBe('Other/Miscellaneous');
|
|
||||||
expect(normalized.master_item_id).toBeUndefined();
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('extractTextFromImageArea', () => {
|
describe('extractTextFromImageArea', () => {
|
||||||
it('should call sharp to crop the image and call the AI with the correct prompt', async () => {
|
it('should call sharp to crop the image and call the AI with the correct prompt', async () => {
|
||||||
console.log("TEST START: 'should call sharp to crop...'");
|
console.log("TEST START: 'should call sharp to crop...'");
|
||||||
|
|||||||
79
src/services/analyticsService.server.ts
Normal file
79
src/services/analyticsService.server.ts
Normal file
@@ -0,0 +1,79 @@
|
|||||||
|
// src/services/analyticsService.server.ts
|
||||||
|
import type { Job } from 'bullmq';
|
||||||
|
import { logger as globalLogger } from './logger.server';
|
||||||
|
import type { AnalyticsJobData, WeeklyAnalyticsJobData } from './queues.server';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A service class to encapsulate business logic for analytics-related background jobs.
|
||||||
|
*/
|
||||||
|
export class AnalyticsService {
|
||||||
|
/**
|
||||||
|
* Processes a job to generate a daily analytics report.
|
||||||
|
* This is currently a mock implementation.
|
||||||
|
* @param job The BullMQ job object.
|
||||||
|
*/
|
||||||
|
async processDailyReportJob(job: Job<AnalyticsJobData>) {
|
||||||
|
const { reportDate } = job.data;
|
||||||
|
const logger = globalLogger.child({
|
||||||
|
jobId: job.id,
|
||||||
|
jobName: job.name,
|
||||||
|
reportDate,
|
||||||
|
});
|
||||||
|
|
||||||
|
logger.info(`Picked up daily analytics job.`);
|
||||||
|
|
||||||
|
try {
|
||||||
|
// This is mock logic, but we keep it in the service
|
||||||
|
if (reportDate === 'FAIL') {
|
||||||
|
throw new Error('This is a test failure for the analytics job.');
|
||||||
|
}
|
||||||
|
// Simulate work
|
||||||
|
await new Promise((resolve) => setTimeout(resolve, 10000));
|
||||||
|
logger.info(`Successfully generated report for ${reportDate}.`);
|
||||||
|
return { status: 'success', reportDate };
|
||||||
|
} catch (error) {
|
||||||
|
const wrappedError = error instanceof Error ? error : new Error(String(error));
|
||||||
|
logger.error(
|
||||||
|
{
|
||||||
|
err: wrappedError,
|
||||||
|
attemptsMade: job.attemptsMade,
|
||||||
|
},
|
||||||
|
`Daily analytics job failed.`,
|
||||||
|
);
|
||||||
|
throw wrappedError;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Processes a job to generate a weekly analytics report.
|
||||||
|
* This is currently a mock implementation.
|
||||||
|
* @param job The BullMQ job object.
|
||||||
|
*/
|
||||||
|
async processWeeklyReportJob(job: Job<WeeklyAnalyticsJobData>) {
|
||||||
|
const { reportYear, reportWeek } = job.data;
|
||||||
|
const logger = globalLogger.child({
|
||||||
|
jobId: job.id,
|
||||||
|
jobName: job.name,
|
||||||
|
reportYear,
|
||||||
|
reportWeek,
|
||||||
|
});
|
||||||
|
|
||||||
|
logger.info(`Picked up weekly analytics job.`);
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Mock logic
|
||||||
|
await new Promise((resolve) => setTimeout(resolve, 30000));
|
||||||
|
logger.info(`Successfully generated weekly report for week ${reportWeek}, ${reportYear}.`);
|
||||||
|
return { status: 'success', reportYear, reportWeek };
|
||||||
|
} catch (error) {
|
||||||
|
const wrappedError = error instanceof Error ? error : new Error(String(error));
|
||||||
|
logger.error(
|
||||||
|
{ err: wrappedError, attemptsMade: job.attemptsMade },
|
||||||
|
`Weekly analytics job failed.`,
|
||||||
|
);
|
||||||
|
throw wrappedError;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export const analyticsService = new AnalyticsService();
|
||||||
@@ -7,6 +7,17 @@ import { http, HttpResponse } from 'msw';
|
|||||||
vi.unmock('./apiClient');
|
vi.unmock('./apiClient');
|
||||||
|
|
||||||
import * as apiClient from './apiClient';
|
import * as apiClient from './apiClient';
|
||||||
|
import {
|
||||||
|
createMockAddressPayload,
|
||||||
|
createMockBudget,
|
||||||
|
createMockLoginPayload,
|
||||||
|
createMockProfileUpdatePayload,
|
||||||
|
createMockRecipeCommentPayload,
|
||||||
|
createMockRegisterUserPayload,
|
||||||
|
createMockSearchQueryPayload,
|
||||||
|
createMockShoppingListItemPayload,
|
||||||
|
createMockWatchedItemPayload,
|
||||||
|
} from '../tests/utils/mockFactories';
|
||||||
|
|
||||||
// Mock the logger to keep test output clean and verifiable.
|
// Mock the logger to keep test output clean and verifiable.
|
||||||
vi.mock('./logger', () => ({
|
vi.mock('./logger', () => ({
|
||||||
@@ -229,33 +240,6 @@ describe('API Client', () => {
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('Analytics API Functions', () => {
|
|
||||||
it('trackFlyerItemInteraction should log a warning on failure', async () => {
|
|
||||||
const { logger } = await import('./logger.client');
|
|
||||||
const apiError = new Error('Network failed');
|
|
||||||
vi.mocked(global.fetch).mockRejectedValue(apiError);
|
|
||||||
|
|
||||||
// We can now await this properly because we added 'return' in apiClient.ts
|
|
||||||
await apiClient.trackFlyerItemInteraction(123, 'click');
|
|
||||||
expect(logger.warn).toHaveBeenCalledWith('Failed to track flyer item interaction', {
|
|
||||||
error: apiError,
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
it('logSearchQuery should log a warning on failure', async () => {
|
|
||||||
const { logger } = await import('./logger.client');
|
|
||||||
const apiError = new Error('Network failed');
|
|
||||||
vi.mocked(global.fetch).mockRejectedValue(apiError);
|
|
||||||
|
|
||||||
await apiClient.logSearchQuery({
|
|
||||||
query_text: 'test',
|
|
||||||
result_count: 0,
|
|
||||||
was_successful: false,
|
|
||||||
});
|
|
||||||
expect(logger.warn).toHaveBeenCalledWith('Failed to log search query', { error: apiError });
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('apiFetch (with FormData)', () => {
|
describe('apiFetch (with FormData)', () => {
|
||||||
it('should handle FormData correctly by not setting Content-Type', async () => {
|
it('should handle FormData correctly by not setting Content-Type', async () => {
|
||||||
localStorage.setItem('authToken', 'form-data-token');
|
localStorage.setItem('authToken', 'form-data-token');
|
||||||
@@ -317,10 +301,11 @@ describe('API Client', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('addWatchedItem should send a POST request with the correct body', async () => {
|
it('addWatchedItem should send a POST request with the correct body', async () => {
|
||||||
await apiClient.addWatchedItem('Apples', 'Produce');
|
const watchedItemData = createMockWatchedItemPayload({ itemName: 'Apples', category: 'Produce' });
|
||||||
|
await apiClient.addWatchedItem(watchedItemData.itemName, watchedItemData.category);
|
||||||
|
|
||||||
expect(capturedUrl?.pathname).toBe('/api/users/watched-items');
|
expect(capturedUrl?.pathname).toBe('/api/users/watched-items');
|
||||||
expect(capturedBody).toEqual({ itemName: 'Apples', category: 'Produce' });
|
expect(capturedBody).toEqual(watchedItemData);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('removeWatchedItem should send a DELETE request to the correct URL', async () => {
|
it('removeWatchedItem should send a DELETE request to the correct URL', async () => {
|
||||||
@@ -337,12 +322,12 @@ describe('API Client', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('createBudget should send a POST request with budget data', async () => {
|
it('createBudget should send a POST request with budget data', async () => {
|
||||||
const budgetData = {
|
const budgetData = createMockBudget({
|
||||||
name: 'Groceries',
|
name: 'Groceries',
|
||||||
amount_cents: 50000,
|
amount_cents: 50000,
|
||||||
period: 'monthly' as const,
|
period: 'monthly',
|
||||||
start_date: '2024-01-01',
|
start_date: '2024-01-01',
|
||||||
};
|
});
|
||||||
await apiClient.createBudget(budgetData);
|
await apiClient.createBudget(budgetData);
|
||||||
|
|
||||||
expect(capturedUrl?.pathname).toBe('/api/budgets');
|
expect(capturedUrl?.pathname).toBe('/api/budgets');
|
||||||
@@ -461,7 +446,7 @@ describe('API Client', () => {
|
|||||||
|
|
||||||
it('addShoppingListItem should send a POST request with item data', async () => {
|
it('addShoppingListItem should send a POST request with item data', async () => {
|
||||||
const listId = 42;
|
const listId = 42;
|
||||||
const itemData = { customItemName: 'Paper Towels' };
|
const itemData = createMockShoppingListItemPayload({ customItemName: 'Paper Towels' });
|
||||||
await apiClient.addShoppingListItem(listId, itemData);
|
await apiClient.addShoppingListItem(listId, itemData);
|
||||||
|
|
||||||
expect(capturedUrl?.pathname).toBe(`/api/users/shopping-lists/${listId}/items`);
|
expect(capturedUrl?.pathname).toBe(`/api/users/shopping-lists/${listId}/items`);
|
||||||
@@ -547,7 +532,7 @@ describe('API Client', () => {
|
|||||||
|
|
||||||
it('addRecipeComment should send a POST request with content and optional parentId', async () => {
|
it('addRecipeComment should send a POST request with content and optional parentId', async () => {
|
||||||
const recipeId = 456;
|
const recipeId = 456;
|
||||||
const commentData = { content: 'This is a reply', parentCommentId: 789 };
|
const commentData = createMockRecipeCommentPayload({ content: 'This is a reply', parentCommentId: 789 });
|
||||||
await apiClient.addRecipeComment(recipeId, commentData.content, commentData.parentCommentId);
|
await apiClient.addRecipeComment(recipeId, commentData.content, commentData.parentCommentId);
|
||||||
expect(capturedUrl?.pathname).toBe(`/api/recipes/${recipeId}/comments`);
|
expect(capturedUrl?.pathname).toBe(`/api/recipes/${recipeId}/comments`);
|
||||||
expect(capturedBody).toEqual(commentData);
|
expect(capturedBody).toEqual(commentData);
|
||||||
@@ -563,7 +548,7 @@ describe('API Client', () => {
|
|||||||
describe('User Profile and Settings API Functions', () => {
|
describe('User Profile and Settings API Functions', () => {
|
||||||
it('updateUserProfile should send a PUT request with profile data', async () => {
|
it('updateUserProfile should send a PUT request with profile data', async () => {
|
||||||
localStorage.setItem('authToken', 'user-settings-token');
|
localStorage.setItem('authToken', 'user-settings-token');
|
||||||
const profileData = { full_name: 'John Doe' };
|
const profileData = createMockProfileUpdatePayload({ full_name: 'John Doe' });
|
||||||
await apiClient.updateUserProfile(profileData, { tokenOverride: 'override-token' });
|
await apiClient.updateUserProfile(profileData, { tokenOverride: 'override-token' });
|
||||||
expect(capturedUrl?.pathname).toBe('/api/users/profile');
|
expect(capturedUrl?.pathname).toBe('/api/users/profile');
|
||||||
expect(capturedBody).toEqual(profileData);
|
expect(capturedBody).toEqual(profileData);
|
||||||
@@ -619,14 +604,14 @@ describe('API Client', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('registerUser should send a POST request with user data', async () => {
|
it('registerUser should send a POST request with user data', async () => {
|
||||||
await apiClient.registerUser('test@example.com', 'password123', 'Test User');
|
const userData = createMockRegisterUserPayload({
|
||||||
expect(capturedUrl?.pathname).toBe('/api/auth/register');
|
|
||||||
expect(capturedBody).toEqual({
|
|
||||||
email: 'test@example.com',
|
email: 'test@example.com',
|
||||||
password: 'password123',
|
password: 'password123',
|
||||||
full_name: 'Test User',
|
full_name: 'Test User',
|
||||||
avatar_url: undefined,
|
|
||||||
});
|
});
|
||||||
|
await apiClient.registerUser(userData.email, userData.password, userData.full_name);
|
||||||
|
expect(capturedUrl?.pathname).toBe('/api/auth/register');
|
||||||
|
expect(capturedBody).toEqual(userData);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('deleteUserAccount should send a DELETE request with the confirmation password', async () => {
|
it('deleteUserAccount should send a DELETE request with the confirmation password', async () => {
|
||||||
@@ -654,7 +639,7 @@ describe('API Client', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('updateUserAddress should send a PUT request with address data', async () => {
|
it('updateUserAddress should send a PUT request with address data', async () => {
|
||||||
const addressData = { address_line_1: '123 Main St', city: 'Anytown' };
|
const addressData = createMockAddressPayload({ address_line_1: '123 Main St', city: 'Anytown' });
|
||||||
await apiClient.updateUserAddress(addressData);
|
await apiClient.updateUserAddress(addressData);
|
||||||
expect(capturedUrl?.pathname).toBe('/api/users/profile/address');
|
expect(capturedUrl?.pathname).toBe('/api/users/profile/address');
|
||||||
expect(capturedBody).toEqual(addressData);
|
expect(capturedBody).toEqual(addressData);
|
||||||
@@ -942,53 +927,49 @@ describe('API Client', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('logSearchQuery should send a POST request with query data', async () => {
|
it('logSearchQuery should send a POST request with query data', async () => {
|
||||||
const queryData = { query_text: 'apples', result_count: 10, was_successful: true };
|
const queryData = createMockSearchQueryPayload({ query_text: 'apples', result_count: 10, was_successful: true });
|
||||||
await apiClient.logSearchQuery(queryData);
|
await apiClient.logSearchQuery(queryData);
|
||||||
expect(capturedUrl?.pathname).toBe('/api/search/log');
|
expect(capturedUrl?.pathname).toBe('/api/search/log');
|
||||||
expect(capturedBody).toEqual(queryData);
|
expect(capturedBody).toEqual(queryData);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('trackFlyerItemInteraction should log a warning on failure', async () => {
|
it('trackFlyerItemInteraction should log a warning on failure', async () => {
|
||||||
const { logger } = await import('./logger.client');
|
|
||||||
const apiError = new Error('Network failed');
|
const apiError = new Error('Network failed');
|
||||||
vi.mocked(global.fetch).mockRejectedValue(apiError);
|
vi.mocked(global.fetch).mockRejectedValue(apiError);
|
||||||
|
const { logger } = await import('./logger.client');
|
||||||
|
|
||||||
// We can now await this properly because we added 'return' in apiClient.ts
|
// We can now await this properly because we added 'return' in apiClient.ts
|
||||||
await apiClient.trackFlyerItemInteraction(123, 'click');
|
await apiClient.trackFlyerItemInteraction(123, 'click');
|
||||||
expect(logger.warn).toHaveBeenCalledWith('Failed to track flyer item interaction', {
|
expect(logger.warn).toHaveBeenCalledWith('Failed to track flyer item interaction', {
|
||||||
error: apiError,
|
error: apiError,
|
||||||
});
|
});
|
||||||
|
|
||||||
expect(logger.warn).toHaveBeenCalledWith('Failed to track flyer item interaction', {
|
|
||||||
error: apiError,
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it('logSearchQuery should log a warning on failure', async () => {
|
it('logSearchQuery should log a warning on failure', async () => {
|
||||||
const { logger } = await import('./logger.client');
|
|
||||||
const apiError = new Error('Network failed');
|
const apiError = new Error('Network failed');
|
||||||
vi.mocked(global.fetch).mockRejectedValue(apiError);
|
vi.mocked(global.fetch).mockRejectedValue(apiError);
|
||||||
|
const { logger } = await import('./logger.client');
|
||||||
|
|
||||||
await apiClient.logSearchQuery({
|
const queryData = createMockSearchQueryPayload({
|
||||||
query_text: 'test',
|
query_text: 'test',
|
||||||
result_count: 0,
|
result_count: 0,
|
||||||
was_successful: false,
|
was_successful: false,
|
||||||
});
|
});
|
||||||
expect(logger.warn).toHaveBeenCalledWith('Failed to log search query', { error: apiError });
|
await apiClient.logSearchQuery(queryData);
|
||||||
|
|
||||||
expect(logger.warn).toHaveBeenCalledWith('Failed to log search query', { error: apiError });
|
expect(logger.warn).toHaveBeenCalledWith('Failed to log search query', { error: apiError });
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('Authentication API Functions', () => {
|
describe('Authentication API Functions', () => {
|
||||||
it('loginUser should send a POST request with credentials', async () => {
|
it('loginUser should send a POST request with credentials', async () => {
|
||||||
await apiClient.loginUser('test@example.com', 'password123', true);
|
const loginData = createMockLoginPayload({
|
||||||
expect(capturedUrl?.pathname).toBe('/api/auth/login');
|
|
||||||
expect(capturedBody).toEqual({
|
|
||||||
email: 'test@example.com',
|
email: 'test@example.com',
|
||||||
password: 'password123',
|
password: 'password123',
|
||||||
rememberMe: true,
|
rememberMe: true,
|
||||||
});
|
});
|
||||||
|
await apiClient.loginUser(loginData.email, loginData.password, loginData.rememberMe);
|
||||||
|
expect(capturedUrl?.pathname).toBe('/api/auth/login');
|
||||||
|
expect(capturedBody).toEqual(loginData);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -4,8 +4,11 @@
|
|||||||
* It is configured via environment variables and should only be used on the server.
|
* It is configured via environment variables and should only be used on the server.
|
||||||
*/
|
*/
|
||||||
import nodemailer from 'nodemailer';
|
import nodemailer from 'nodemailer';
|
||||||
|
import type { Job } from 'bullmq';
|
||||||
import type { Logger } from 'pino';
|
import type { Logger } from 'pino';
|
||||||
|
import { logger as globalLogger } from './logger.server';
|
||||||
import { WatchedItemDeal } from '../types';
|
import { WatchedItemDeal } from '../types';
|
||||||
|
import type { EmailJobData } from './queues.server';
|
||||||
|
|
||||||
// 1. Create a Nodemailer transporter using SMTP configuration from environment variables.
|
// 1. Create a Nodemailer transporter using SMTP configuration from environment variables.
|
||||||
// For development, you can use a service like Ethereal (https://ethereal.email/)
|
// For development, you can use a service like Ethereal (https://ethereal.email/)
|
||||||
@@ -20,18 +23,11 @@ const transporter = nodemailer.createTransport({
|
|||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
interface EmailOptions {
|
|
||||||
to: string;
|
|
||||||
subject: string;
|
|
||||||
text: string;
|
|
||||||
html: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Sends an email using the pre-configured transporter.
|
* Sends an email using the pre-configured transporter.
|
||||||
* @param options The email options, including recipient, subject, and body.
|
* @param options The email options, including recipient, subject, and body.
|
||||||
*/
|
*/
|
||||||
export const sendEmail = async (options: EmailOptions, logger: Logger) => {
|
export const sendEmail = async (options: EmailJobData, logger: Logger) => {
|
||||||
const mailOptions = {
|
const mailOptions = {
|
||||||
from: `"Flyer Crawler" <${process.env.SMTP_FROM_EMAIL}>`, // sender address
|
from: `"Flyer Crawler" <${process.env.SMTP_FROM_EMAIL}>`, // sender address
|
||||||
to: options.to,
|
to: options.to,
|
||||||
@@ -40,16 +36,37 @@ export const sendEmail = async (options: EmailOptions, logger: Logger) => {
|
|||||||
html: options.html,
|
html: options.html,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const info = await transporter.sendMail(mailOptions);
|
||||||
|
logger.info(
|
||||||
|
{ to: options.to, subject: options.subject, messageId: info.messageId },
|
||||||
|
`Email sent successfully.`,
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Processes an email sending job from the queue.
|
||||||
|
* This is the entry point for the email worker.
|
||||||
|
* It encapsulates logging and error handling for the job.
|
||||||
|
* @param job The BullMQ job object.
|
||||||
|
*/
|
||||||
|
export const processEmailJob = async (job: Job<EmailJobData>) => {
|
||||||
|
const jobLogger = globalLogger.child({
|
||||||
|
jobId: job.id,
|
||||||
|
jobName: job.name,
|
||||||
|
recipient: job.data.to,
|
||||||
|
});
|
||||||
|
|
||||||
|
jobLogger.info(`Picked up email job.`);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const info = await transporter.sendMail(mailOptions);
|
await sendEmail(job.data, jobLogger);
|
||||||
logger.info(
|
|
||||||
{ to: options.to, subject: options.subject, messageId: info.messageId },
|
|
||||||
`Email sent successfully.`,
|
|
||||||
);
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error({ err: error, to: options.to, subject: options.subject }, 'Failed to send email.');
|
const wrappedError = error instanceof Error ? error : new Error(String(error));
|
||||||
// Re-throwing the error is important so the background job knows it failed.
|
jobLogger.error(
|
||||||
throw error;
|
{ err: wrappedError, jobData: job.data, attemptsMade: job.attemptsMade },
|
||||||
|
`Email job failed.`,
|
||||||
|
);
|
||||||
|
throw wrappedError;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -92,16 +109,22 @@ export const sendDealNotificationEmail = async (
|
|||||||
|
|
||||||
const text = `Hi ${recipientName},\n\nWe found some great deals on items you're watching. Visit the deals page on the site to learn more.\n\nFlyer Crawler`;
|
const text = `Hi ${recipientName},\n\nWe found some great deals on items you're watching. Visit the deals page on the site to learn more.\n\nFlyer Crawler`;
|
||||||
|
|
||||||
// Use the generic sendEmail function to send the composed email
|
try {
|
||||||
await sendEmail(
|
// Use the generic sendEmail function to send the composed email
|
||||||
{
|
await sendEmail(
|
||||||
to,
|
{
|
||||||
subject,
|
to,
|
||||||
text,
|
subject,
|
||||||
html,
|
text,
|
||||||
},
|
html,
|
||||||
logger,
|
},
|
||||||
);
|
logger,
|
||||||
|
);
|
||||||
|
} catch (err) {
|
||||||
|
const error = err instanceof Error ? err : new Error(String(err));
|
||||||
|
logger.error({ err: error, to, subject }, 'Failed to send email.');
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|||||||
75
src/services/flyerAiProcessor.server.test.ts
Normal file
75
src/services/flyerAiProcessor.server.test.ts
Normal file
@@ -0,0 +1,75 @@
|
|||||||
|
// src/services/flyerAiProcessor.server.test.ts
|
||||||
|
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||||
|
import { FlyerAiProcessor } from './flyerAiProcessor.server';
|
||||||
|
import { AiDataValidationError } from './processingErrors';
|
||||||
|
import { logger } from './logger.server';
|
||||||
|
import type { AIService } from './aiService.server';
|
||||||
|
import type { PersonalizationRepository } from './db/personalization.db';
|
||||||
|
import type { FlyerJobData } from './flyerProcessingService.server';
|
||||||
|
|
||||||
|
vi.mock('./logger.server', () => ({
|
||||||
|
logger: {
|
||||||
|
info: vi.fn(),
|
||||||
|
error: vi.fn(),
|
||||||
|
warn: vi.fn(),
|
||||||
|
debug: vi.fn(),
|
||||||
|
child: vi.fn().mockReturnThis(),
|
||||||
|
},
|
||||||
|
}));
|
||||||
|
|
||||||
|
const createMockJobData = (data: Partial<FlyerJobData>): FlyerJobData => ({
|
||||||
|
filePath: '/tmp/flyer.jpg',
|
||||||
|
originalFileName: 'flyer.jpg',
|
||||||
|
checksum: 'checksum-123',
|
||||||
|
...data,
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('FlyerAiProcessor', () => {
|
||||||
|
let service: FlyerAiProcessor;
|
||||||
|
let mockAiService: AIService;
|
||||||
|
let mockPersonalizationRepo: PersonalizationRepository;
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
vi.clearAllMocks();
|
||||||
|
|
||||||
|
mockAiService = {
|
||||||
|
extractCoreDataFromFlyerImage: vi.fn(),
|
||||||
|
} as unknown as AIService;
|
||||||
|
mockPersonalizationRepo = {
|
||||||
|
getAllMasterItems: vi.fn().mockResolvedValue([]),
|
||||||
|
} as unknown as PersonalizationRepository;
|
||||||
|
|
||||||
|
service = new FlyerAiProcessor(mockAiService, mockPersonalizationRepo);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should call AI service and return validated data on success', async () => {
|
||||||
|
const jobData = createMockJobData({});
|
||||||
|
const mockAiResponse = {
|
||||||
|
store_name: 'AI Store',
|
||||||
|
valid_from: '2024-01-01',
|
||||||
|
valid_to: '2024-01-07',
|
||||||
|
store_address: '123 AI St',
|
||||||
|
items: [],
|
||||||
|
};
|
||||||
|
vi.mocked(mockAiService.extractCoreDataFromFlyerImage).mockResolvedValue(mockAiResponse);
|
||||||
|
|
||||||
|
const result = await service.extractAndValidateData([], jobData, logger);
|
||||||
|
|
||||||
|
expect(mockAiService.extractCoreDataFromFlyerImage).toHaveBeenCalledTimes(1);
|
||||||
|
expect(mockPersonalizationRepo.getAllMasterItems).toHaveBeenCalledTimes(1);
|
||||||
|
expect(result).toEqual(mockAiResponse);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should throw AiDataValidationError if AI response validation fails', async () => {
|
||||||
|
const jobData = createMockJobData({});
|
||||||
|
// Mock AI to return data missing a required field ('store_name')
|
||||||
|
vi.mocked(mockAiService.extractCoreDataFromFlyerImage).mockResolvedValue({
|
||||||
|
valid_from: '2024-01-01',
|
||||||
|
items: [],
|
||||||
|
} as any);
|
||||||
|
|
||||||
|
await expect(service.extractAndValidateData([], jobData, logger)).rejects.toThrow(
|
||||||
|
AiDataValidationError,
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
88
src/services/flyerAiProcessor.server.ts
Normal file
88
src/services/flyerAiProcessor.server.ts
Normal file
@@ -0,0 +1,88 @@
|
|||||||
|
// src/services/flyerAiProcessor.server.ts
|
||||||
|
import { z } from 'zod';
|
||||||
|
import type { Logger } from 'pino';
|
||||||
|
import type { AIService } from './aiService.server';
|
||||||
|
import type { PersonalizationRepository } from './db/personalization.db';
|
||||||
|
import { AiDataValidationError } from './processingErrors';
|
||||||
|
import type { FlyerJobData } from './flyerProcessingService.server';
|
||||||
|
|
||||||
|
// Helper for consistent required string validation (handles missing/null/empty)
|
||||||
|
const requiredString = (message: string) =>
|
||||||
|
z.preprocess((val) => val ?? '', z.string().min(1, message));
|
||||||
|
|
||||||
|
// --- Zod Schemas for AI Response Validation ---
|
||||||
|
const ExtractedFlyerItemSchema = z.object({
|
||||||
|
item: z.string().nullable(),
|
||||||
|
price_display: z.string().nullable(),
|
||||||
|
price_in_cents: z.number().nullable(),
|
||||||
|
quantity: z.string().nullable(),
|
||||||
|
category_name: z.string().nullable(),
|
||||||
|
master_item_id: z.number().nullish(),
|
||||||
|
});
|
||||||
|
|
||||||
|
export const AiFlyerDataSchema = z.object({
|
||||||
|
store_name: z.string().nullable(),
|
||||||
|
valid_from: z.string().nullable(),
|
||||||
|
valid_to: z.string().nullable(),
|
||||||
|
store_address: z.string().nullable(),
|
||||||
|
items: z.array(ExtractedFlyerItemSchema),
|
||||||
|
});
|
||||||
|
|
||||||
|
export type ValidatedAiDataType = z.infer<typeof AiFlyerDataSchema>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This class encapsulates the logic for interacting with the AI service
|
||||||
|
* to extract and validate data from flyer images.
|
||||||
|
*/
|
||||||
|
export class FlyerAiProcessor {
|
||||||
|
constructor(
|
||||||
|
private ai: AIService,
|
||||||
|
private personalizationRepo: PersonalizationRepository,
|
||||||
|
) {}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Validates the raw data from the AI against the Zod schema.
|
||||||
|
*/
|
||||||
|
private _validateAiData(
|
||||||
|
extractedData: unknown,
|
||||||
|
logger: Logger,
|
||||||
|
): ValidatedAiDataType {
|
||||||
|
const validationResult = AiFlyerDataSchema.safeParse(extractedData);
|
||||||
|
if (!validationResult.success) {
|
||||||
|
const errors = validationResult.error.flatten();
|
||||||
|
logger.error({ errors, rawData: extractedData }, 'AI response failed validation.');
|
||||||
|
throw new AiDataValidationError(
|
||||||
|
'AI response validation failed. The returned data structure is incorrect.',
|
||||||
|
errors,
|
||||||
|
extractedData,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(`AI extracted ${validationResult.data.items.length} items.`);
|
||||||
|
return validationResult.data;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Calls the AI service to extract structured data from the flyer images and validates the response.
|
||||||
|
*/
|
||||||
|
public async extractAndValidateData(
|
||||||
|
imagePaths: { path: string; mimetype: string }[],
|
||||||
|
jobData: FlyerJobData,
|
||||||
|
logger: Logger,
|
||||||
|
): Promise<ValidatedAiDataType> {
|
||||||
|
logger.info(`Starting AI data extraction.`);
|
||||||
|
const { submitterIp, userProfileAddress } = jobData;
|
||||||
|
const masterItems = await this.personalizationRepo.getAllMasterItems(logger);
|
||||||
|
logger.debug(`Retrieved ${masterItems.length} master items for AI matching.`);
|
||||||
|
|
||||||
|
const extractedData = await this.ai.extractCoreDataFromFlyerImage(
|
||||||
|
imagePaths,
|
||||||
|
masterItems,
|
||||||
|
submitterIp,
|
||||||
|
userProfileAddress,
|
||||||
|
logger,
|
||||||
|
);
|
||||||
|
|
||||||
|
return this._validateAiData(extractedData, logger);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -4,7 +4,7 @@ import { FlyerDataTransformer } from './flyerDataTransformer';
|
|||||||
import { logger as mockLogger } from './logger.server';
|
import { logger as mockLogger } from './logger.server';
|
||||||
import { generateFlyerIcon } from '../utils/imageProcessor';
|
import { generateFlyerIcon } from '../utils/imageProcessor';
|
||||||
import type { z } from 'zod';
|
import type { z } from 'zod';
|
||||||
import type { AiFlyerDataSchema } from './flyerProcessingService.server';
|
import type { AiFlyerDataSchema } from './flyerAiProcessor.server';
|
||||||
import type { FlyerItemInsert } from '../types';
|
import type { FlyerItemInsert } from '../types';
|
||||||
|
|
||||||
// Mock the dependencies
|
// Mock the dependencies
|
||||||
@@ -109,9 +109,6 @@ describe('FlyerDataTransformer', () => {
|
|||||||
view_count: 0,
|
view_count: 0,
|
||||||
click_count: 0,
|
click_count: 0,
|
||||||
}),
|
}),
|
||||||
); // Use a more specific type assertion to check for the added property.
|
|
||||||
expect((itemsForDb[0] as FlyerItemInsert & { updated_at: string }).updated_at).toBeTypeOf(
|
|
||||||
'string',
|
|
||||||
);
|
);
|
||||||
|
|
||||||
// 3. Check that generateFlyerIcon was called correctly
|
// 3. Check that generateFlyerIcon was called correctly
|
||||||
@@ -153,6 +150,9 @@ describe('FlyerDataTransformer', () => {
|
|||||||
expect(mockLogger.info).toHaveBeenCalledWith(
|
expect(mockLogger.info).toHaveBeenCalledWith(
|
||||||
'Starting data transformation from AI output to database format.',
|
'Starting data transformation from AI output to database format.',
|
||||||
);
|
);
|
||||||
|
expect(mockLogger.warn).toHaveBeenCalledWith(
|
||||||
|
'AI did not return a store name. Using fallback "Unknown Store (auto)".',
|
||||||
|
);
|
||||||
expect(mockLogger.info).toHaveBeenCalledWith(
|
expect(mockLogger.info).toHaveBeenCalledWith(
|
||||||
{ itemCount: 0, storeName: 'Unknown Store (auto)' },
|
{ itemCount: 0, storeName: 'Unknown Store (auto)' },
|
||||||
'Data transformation complete.',
|
'Data transformation complete.',
|
||||||
@@ -172,4 +172,62 @@ describe('FlyerDataTransformer', () => {
|
|||||||
uploaded_by: undefined, // Should be undefined
|
uploaded_by: undefined, // Should be undefined
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('should correctly normalize item fields with null, undefined, or empty values', async () => {
|
||||||
|
// Arrange
|
||||||
|
const extractedData: z.infer<typeof AiFlyerDataSchema> = {
|
||||||
|
store_name: 'Test Store',
|
||||||
|
valid_from: '2024-01-01',
|
||||||
|
valid_to: '2024-01-07',
|
||||||
|
store_address: '123 Test St',
|
||||||
|
items: [
|
||||||
|
// Case 1: All fields are null or undefined
|
||||||
|
{
|
||||||
|
item: null,
|
||||||
|
price_display: null,
|
||||||
|
price_in_cents: null,
|
||||||
|
quantity: null,
|
||||||
|
category_name: null,
|
||||||
|
master_item_id: null,
|
||||||
|
},
|
||||||
|
// Case 2: Fields are empty strings
|
||||||
|
{
|
||||||
|
item: ' ', // whitespace only
|
||||||
|
price_display: '',
|
||||||
|
price_in_cents: 200,
|
||||||
|
quantity: '',
|
||||||
|
category_name: '',
|
||||||
|
master_item_id: 20,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
};
|
||||||
|
const imagePaths = [{ path: '/uploads/flyer-page-1.jpg', mimetype: 'image/jpeg' }];
|
||||||
|
|
||||||
|
// Act
|
||||||
|
const { itemsForDb } = await transformer.transform(
|
||||||
|
extractedData,
|
||||||
|
imagePaths,
|
||||||
|
'file.pdf',
|
||||||
|
'checksum',
|
||||||
|
'user-1',
|
||||||
|
mockLogger,
|
||||||
|
);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
expect(itemsForDb).toHaveLength(2);
|
||||||
|
|
||||||
|
// Check Case 1 (null/undefined values)
|
||||||
|
expect(itemsForDb[0]).toEqual(
|
||||||
|
expect.objectContaining({
|
||||||
|
item: 'Unknown Item', price_display: '', quantity: '', category_name: 'Other/Miscellaneous', master_item_id: undefined,
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
// Check Case 2 (empty string values)
|
||||||
|
expect(itemsForDb[1]).toEqual(
|
||||||
|
expect.objectContaining({
|
||||||
|
item: 'Unknown Item', price_display: '', quantity: '', category_name: 'Other/Miscellaneous', master_item_id: 20,
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -3,7 +3,7 @@ import path from 'path';
|
|||||||
import type { z } from 'zod';
|
import type { z } from 'zod';
|
||||||
import type { Logger } from 'pino';
|
import type { Logger } from 'pino';
|
||||||
import type { FlyerInsert, FlyerItemInsert } from '../types';
|
import type { FlyerInsert, FlyerItemInsert } from '../types';
|
||||||
import type { AiFlyerDataSchema } from './flyerProcessingService.server';
|
import type { AiFlyerDataSchema } from './flyerAiProcessor.server';
|
||||||
import { generateFlyerIcon } from '../utils/imageProcessor';
|
import { generateFlyerIcon } from '../utils/imageProcessor';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -11,6 +11,31 @@ import { generateFlyerIcon } from '../utils/imageProcessor';
|
|||||||
* into the structured format required for database insertion (FlyerInsert and FlyerItemInsert).
|
* into the structured format required for database insertion (FlyerInsert and FlyerItemInsert).
|
||||||
*/
|
*/
|
||||||
export class FlyerDataTransformer {
|
export class FlyerDataTransformer {
|
||||||
|
/**
|
||||||
|
* Normalizes a single raw item from the AI, providing default values for nullable fields.
|
||||||
|
* @param item The raw item object from the AI.
|
||||||
|
* @returns A normalized item object ready for database insertion.
|
||||||
|
*/
|
||||||
|
private _normalizeItem(
|
||||||
|
item: z.infer<typeof AiFlyerDataSchema>['items'][number],
|
||||||
|
): FlyerItemInsert {
|
||||||
|
return {
|
||||||
|
...item,
|
||||||
|
// Use logical OR to default falsy values (null, undefined, '') to a fallback.
|
||||||
|
// The trim is important for cases where the AI returns only whitespace.
|
||||||
|
item: String(item.item || '').trim() || 'Unknown Item',
|
||||||
|
// Use nullish coalescing to default only null/undefined to an empty string.
|
||||||
|
price_display: String(item.price_display ?? ''),
|
||||||
|
quantity: String(item.quantity ?? ''),
|
||||||
|
// Use logical OR to default falsy category names (null, undefined, '') to a fallback.
|
||||||
|
category_name: String(item.category_name || 'Other/Miscellaneous'),
|
||||||
|
// Use nullish coalescing to convert null to undefined for the database.
|
||||||
|
master_item_id: item.master_item_id ?? undefined,
|
||||||
|
view_count: 0,
|
||||||
|
click_count: 0,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Transforms AI-extracted data into database-ready flyer and item records.
|
* Transforms AI-extracted data into database-ready flyer and item records.
|
||||||
* @param extractedData The validated data from the AI.
|
* @param extractedData The validated data from the AI.
|
||||||
@@ -38,34 +63,19 @@ export class FlyerDataTransformer {
|
|||||||
logger,
|
logger,
|
||||||
);
|
);
|
||||||
|
|
||||||
const itemsForDb: FlyerItemInsert[] = extractedData.items.map((item) => ({
|
const itemsForDb: FlyerItemInsert[] = extractedData.items.map((item) => this._normalizeItem(item));
|
||||||
...item,
|
|
||||||
// Ensure 'item' is always a string, defaulting to 'Unknown Item' if null/undefined/empty.
|
const storeName = extractedData.store_name || 'Unknown Store (auto)';
|
||||||
item:
|
if (!extractedData.store_name) {
|
||||||
item.item === null || item.item === undefined || String(item.item).trim() === ''
|
logger.warn('AI did not return a store name. Using fallback "Unknown Store (auto)".');
|
||||||
? 'Unknown Item'
|
}
|
||||||
: String(item.item),
|
|
||||||
// Ensure 'price_display' is always a string, defaulting to empty if null/undefined.
|
|
||||||
price_display:
|
|
||||||
item.price_display === null || item.price_display === undefined
|
|
||||||
? ''
|
|
||||||
: String(item.price_display),
|
|
||||||
// Ensure 'quantity' is always a string, defaulting to empty if null/undefined.
|
|
||||||
quantity: item.quantity === null || item.quantity === undefined ? '' : String(item.quantity),
|
|
||||||
// Ensure 'category_name' is always a string, defaulting to 'Other/Miscellaneous' if null/undefined.
|
|
||||||
category_name: item.category_name === null || item.category_name === undefined ? 'Other/Miscellaneous' : String(item.category_name),
|
|
||||||
master_item_id: item.master_item_id === null ? undefined : item.master_item_id, // Convert null to undefined
|
|
||||||
view_count: 0,
|
|
||||||
click_count: 0,
|
|
||||||
updated_at: new Date().toISOString(),
|
|
||||||
}));
|
|
||||||
|
|
||||||
const flyerData: FlyerInsert = {
|
const flyerData: FlyerInsert = {
|
||||||
file_name: originalFileName,
|
file_name: originalFileName,
|
||||||
image_url: `/flyer-images/${path.basename(firstImage)}`,
|
image_url: `/flyer-images/${path.basename(firstImage)}`,
|
||||||
icon_url: `/flyer-images/icons/${iconFileName}`,
|
icon_url: `/flyer-images/icons/${iconFileName}`,
|
||||||
checksum,
|
checksum,
|
||||||
store_name: extractedData.store_name || 'Unknown Store (auto)',
|
store_name: storeName,
|
||||||
valid_from: extractedData.valid_from,
|
valid_from: extractedData.valid_from,
|
||||||
valid_to: extractedData.valid_to,
|
valid_to: extractedData.valid_to,
|
||||||
store_address: extractedData.store_address, // The number of items is now calculated directly from the transformed data.
|
store_address: extractedData.store_address, // The number of items is now calculated directly from the transformed data.
|
||||||
|
|||||||
129
src/services/flyerFileHandler.server.test.ts
Normal file
129
src/services/flyerFileHandler.server.test.ts
Normal file
@@ -0,0 +1,129 @@
|
|||||||
|
// src/services/flyerFileHandler.server.test.ts
|
||||||
|
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||||
|
import { Job } from 'bullmq';
|
||||||
|
import type { Dirent } from 'node:fs';
|
||||||
|
import sharp from 'sharp';
|
||||||
|
import { FlyerFileHandler, ICommandExecutor, IFileSystem } from './flyerFileHandler.server';
|
||||||
|
import { PdfConversionError, UnsupportedFileTypeError } from './processingErrors';
|
||||||
|
import { logger } from './logger.server';
|
||||||
|
import type { FlyerJobData } from './flyerProcessingService.server';
|
||||||
|
|
||||||
|
// Mock dependencies
|
||||||
|
vi.mock('sharp', () => {
|
||||||
|
const mockSharpInstance = {
|
||||||
|
png: vi.fn().mockReturnThis(),
|
||||||
|
toFile: vi.fn().mockResolvedValue({}),
|
||||||
|
};
|
||||||
|
return {
|
||||||
|
__esModule: true,
|
||||||
|
default: vi.fn(() => mockSharpInstance),
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
vi.mock('./logger.server', () => ({
|
||||||
|
logger: {
|
||||||
|
info: vi.fn(),
|
||||||
|
error: vi.fn(),
|
||||||
|
warn: vi.fn(),
|
||||||
|
debug: vi.fn(),
|
||||||
|
child: vi.fn().mockReturnThis(),
|
||||||
|
},
|
||||||
|
}));
|
||||||
|
|
||||||
|
const createMockJob = (data: Partial<FlyerJobData>): Job<FlyerJobData> => {
|
||||||
|
return {
|
||||||
|
id: 'job-1',
|
||||||
|
data: {
|
||||||
|
filePath: '/tmp/flyer.jpg',
|
||||||
|
originalFileName: 'flyer.jpg',
|
||||||
|
checksum: 'checksum-123',
|
||||||
|
...data,
|
||||||
|
},
|
||||||
|
updateProgress: vi.fn(),
|
||||||
|
} as unknown as Job<FlyerJobData>;
|
||||||
|
};
|
||||||
|
|
||||||
|
describe('FlyerFileHandler', () => {
|
||||||
|
let service: FlyerFileHandler;
|
||||||
|
let mockFs: IFileSystem;
|
||||||
|
let mockExec: ICommandExecutor;
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
vi.clearAllMocks();
|
||||||
|
|
||||||
|
mockFs = {
|
||||||
|
readdir: vi.fn().mockResolvedValue([]),
|
||||||
|
unlink: vi.fn(),
|
||||||
|
};
|
||||||
|
mockExec = vi.fn().mockResolvedValue({ stdout: 'success', stderr: '' });
|
||||||
|
|
||||||
|
service = new FlyerFileHandler(mockFs, mockExec);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should convert a PDF and return image paths', async () => {
|
||||||
|
const job = createMockJob({ filePath: '/tmp/flyer.pdf' });
|
||||||
|
vi.mocked(mockFs.readdir).mockResolvedValue([
|
||||||
|
{ name: 'flyer-1.jpg' },
|
||||||
|
{ name: 'flyer-2.jpg' },
|
||||||
|
] as Dirent[]);
|
||||||
|
|
||||||
|
const { imagePaths, createdImagePaths } = await service.prepareImageInputs(
|
||||||
|
'/tmp/flyer.pdf',
|
||||||
|
job,
|
||||||
|
logger,
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(mockExec).toHaveBeenCalledWith('pdftocairo -jpeg -r 150 "/tmp/flyer.pdf" "/tmp/flyer"');
|
||||||
|
expect(imagePaths).toHaveLength(2);
|
||||||
|
expect(imagePaths[0].path).toContain('flyer-1.jpg');
|
||||||
|
expect(createdImagePaths).toHaveLength(2);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should throw PdfConversionError if PDF conversion yields no images', async () => {
|
||||||
|
const job = createMockJob({ filePath: '/tmp/flyer.pdf' });
|
||||||
|
vi.mocked(mockFs.readdir).mockResolvedValue([]); // No images found
|
||||||
|
|
||||||
|
await expect(service.prepareImageInputs('/tmp/flyer.pdf', job, logger)).rejects.toThrow(
|
||||||
|
PdfConversionError,
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle supported image types directly', async () => {
|
||||||
|
const job = createMockJob({ filePath: '/tmp/flyer.jpg' });
|
||||||
|
const { imagePaths, createdImagePaths } = await service.prepareImageInputs(
|
||||||
|
'/tmp/flyer.jpg',
|
||||||
|
job,
|
||||||
|
logger,
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(imagePaths).toEqual([{ path: '/tmp/flyer.jpg', mimetype: 'image/jpeg' }]);
|
||||||
|
expect(createdImagePaths).toEqual([]);
|
||||||
|
expect(mockExec).not.toHaveBeenCalled();
|
||||||
|
expect(sharp).not.toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should convert convertible image types to PNG', async () => {
|
||||||
|
const job = createMockJob({ filePath: '/tmp/flyer.gif' });
|
||||||
|
const mockSharpInstance = sharp('/tmp/flyer.gif');
|
||||||
|
vi.mocked(mockSharpInstance.toFile).mockResolvedValue({} as any);
|
||||||
|
|
||||||
|
const { imagePaths, createdImagePaths } = await service.prepareImageInputs(
|
||||||
|
'/tmp/flyer.gif',
|
||||||
|
job,
|
||||||
|
logger,
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(sharp).toHaveBeenCalledWith('/tmp/flyer.gif');
|
||||||
|
expect(mockSharpInstance.png).toHaveBeenCalled();
|
||||||
|
expect(mockSharpInstance.toFile).toHaveBeenCalledWith('/tmp/flyer-converted.png');
|
||||||
|
expect(imagePaths).toEqual([{ path: '/tmp/flyer-converted.png', mimetype: 'image/png' }]);
|
||||||
|
expect(createdImagePaths).toEqual(['/tmp/flyer-converted.png']);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should throw UnsupportedFileTypeError for unsupported types', async () => {
|
||||||
|
const job = createMockJob({ filePath: '/tmp/document.txt' });
|
||||||
|
await expect(service.prepareImageInputs('/tmp/document.txt', job, logger)).rejects.toThrow(
|
||||||
|
UnsupportedFileTypeError,
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
207
src/services/flyerFileHandler.server.ts
Normal file
207
src/services/flyerFileHandler.server.ts
Normal file
@@ -0,0 +1,207 @@
|
|||||||
|
// src/services/flyerFileHandler.server.ts
|
||||||
|
import path from 'path';
|
||||||
|
import sharp from 'sharp';
|
||||||
|
import type { Dirent } from 'node:fs';
|
||||||
|
import type { Job } from 'bullmq';
|
||||||
|
import type { Logger } from 'pino';
|
||||||
|
import { PdfConversionError, UnsupportedFileTypeError } from './processingErrors';
|
||||||
|
import type { FlyerJobData } from './flyerProcessingService.server';
|
||||||
|
|
||||||
|
// Define the image formats supported by the AI model
|
||||||
|
const SUPPORTED_IMAGE_EXTENSIONS = ['.jpg', '.jpeg', '.png', '.webp', '.heic', '.heif'];
|
||||||
|
// Define image formats that are not directly supported but can be converted to PNG.
|
||||||
|
const CONVERTIBLE_IMAGE_EXTENSIONS = ['.gif', '.tiff', '.svg', '.bmp'];
|
||||||
|
|
||||||
|
export interface IFileSystem {
|
||||||
|
readdir(path: string, options: { withFileTypes: true }): Promise<Dirent[]>;
|
||||||
|
unlink(path: string): Promise<void>;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ICommandExecutor {
|
||||||
|
(command: string): Promise<{ stdout: string; stderr: string }>;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This class encapsulates the logic for handling different file types (PDF, images)
|
||||||
|
* and preparing them for AI processing.
|
||||||
|
*/
|
||||||
|
export class FlyerFileHandler {
|
||||||
|
constructor(
|
||||||
|
private fs: IFileSystem,
|
||||||
|
private exec: ICommandExecutor,
|
||||||
|
) {}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Executes the pdftocairo command to convert the PDF.
|
||||||
|
*/
|
||||||
|
private async _executePdfConversion(
|
||||||
|
filePath: string,
|
||||||
|
outputFilePrefix: string,
|
||||||
|
logger: Logger,
|
||||||
|
): Promise<{ stdout: string; stderr: string }> {
|
||||||
|
const command = `pdftocairo -jpeg -r 150 "${filePath}" "${outputFilePrefix}"`;
|
||||||
|
logger.info(`Executing PDF conversion command`);
|
||||||
|
logger.debug({ command });
|
||||||
|
try {
|
||||||
|
const { stdout, stderr } = await this.exec(command);
|
||||||
|
if (stdout) logger.debug({ stdout }, `[Worker] pdftocairo stdout for ${filePath}:`);
|
||||||
|
if (stderr) logger.warn({ stderr }, `[Worker] pdftocairo stderr for ${filePath}:`);
|
||||||
|
return { stdout, stderr };
|
||||||
|
} catch (error) {
|
||||||
|
const execError = error as Error & { stderr?: string };
|
||||||
|
const errorMessage = `The pdftocairo command failed for file: ${filePath}.`;
|
||||||
|
logger.error({ err: execError, stderr: execError.stderr }, errorMessage);
|
||||||
|
throw new PdfConversionError(errorMessage, execError.stderr);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Scans the output directory for generated JPEG images and returns their paths.
|
||||||
|
*/
|
||||||
|
private async _collectGeneratedImages(
|
||||||
|
outputDir: string,
|
||||||
|
outputFilePrefix: string,
|
||||||
|
logger: Logger,
|
||||||
|
): Promise<string[]> {
|
||||||
|
logger.debug(`[Worker] Reading contents of output directory: ${outputDir}`);
|
||||||
|
const filesInDir = await this.fs.readdir(outputDir, { withFileTypes: true });
|
||||||
|
logger.debug(`[Worker] Found ${filesInDir.length} total entries in output directory.`);
|
||||||
|
|
||||||
|
const generatedImages = filesInDir
|
||||||
|
.filter((f) => f.name.startsWith(path.basename(outputFilePrefix)) && f.name.endsWith('.jpg'))
|
||||||
|
.sort((a, b) => a.name.localeCompare(b.name, undefined, { numeric: true }));
|
||||||
|
|
||||||
|
logger.debug(
|
||||||
|
{ imageNames: generatedImages.map((f) => f.name) },
|
||||||
|
`Filtered down to ${generatedImages.length} generated JPGs.`,
|
||||||
|
);
|
||||||
|
|
||||||
|
return generatedImages.map((img) => path.join(outputDir, img.name));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Converts a PDF file to a series of JPEG images using an external tool.
|
||||||
|
*/
|
||||||
|
private async _convertPdfToImages(
|
||||||
|
filePath: string,
|
||||||
|
job: Job<FlyerJobData>,
|
||||||
|
logger: Logger,
|
||||||
|
): Promise<string[]> {
|
||||||
|
logger.info(`Starting PDF conversion for: ${filePath}`);
|
||||||
|
await job.updateProgress({ message: 'Converting PDF to images...' });
|
||||||
|
|
||||||
|
const outputDir = path.dirname(filePath);
|
||||||
|
const outputFilePrefix = path.join(outputDir, path.basename(filePath, '.pdf'));
|
||||||
|
logger.debug({ outputDir, outputFilePrefix }, `PDF output details`);
|
||||||
|
|
||||||
|
const { stderr } = await this._executePdfConversion(filePath, outputFilePrefix, logger);
|
||||||
|
|
||||||
|
const imagePaths = await this._collectGeneratedImages(outputDir, outputFilePrefix, logger);
|
||||||
|
|
||||||
|
if (imagePaths.length === 0) {
|
||||||
|
const errorMessage = `PDF conversion resulted in 0 images for file: ${filePath}. The PDF might be blank or corrupt.`;
|
||||||
|
logger.error({ stderr }, `PdfConversionError: ${errorMessage}`);
|
||||||
|
throw new PdfConversionError(errorMessage, stderr);
|
||||||
|
}
|
||||||
|
|
||||||
|
return imagePaths;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Converts an image file (e.g., GIF, TIFF) to a PNG format that the AI can process.
|
||||||
|
*/
|
||||||
|
private async _convertImageToPng(filePath: string, logger: Logger): Promise<string> {
|
||||||
|
const outputDir = path.dirname(filePath);
|
||||||
|
const originalFileName = path.parse(path.basename(filePath)).name;
|
||||||
|
const newFileName = `${originalFileName}-converted.png`;
|
||||||
|
const outputPath = path.join(outputDir, newFileName);
|
||||||
|
|
||||||
|
logger.info({ from: filePath, to: outputPath }, 'Converting unsupported image format to PNG.');
|
||||||
|
|
||||||
|
try {
|
||||||
|
await sharp(filePath).png().toFile(outputPath);
|
||||||
|
return outputPath;
|
||||||
|
} catch (error) {
|
||||||
|
logger.error({ err: error, filePath }, 'Failed to convert image to PNG using sharp.');
|
||||||
|
throw new Error(`Image conversion to PNG failed for ${path.basename(filePath)}.`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handles PDF files by converting them to a series of JPEG images.
|
||||||
|
*/
|
||||||
|
private async _handlePdfInput(
|
||||||
|
filePath: string,
|
||||||
|
job: Job<FlyerJobData>,
|
||||||
|
logger: Logger,
|
||||||
|
): Promise<{ imagePaths: { path: string; mimetype: string }[]; createdImagePaths: string[] }> {
|
||||||
|
const createdImagePaths = await this._convertPdfToImages(filePath, job, logger);
|
||||||
|
const imagePaths = createdImagePaths.map((p) => ({ path: p, mimetype: 'image/jpeg' }));
|
||||||
|
logger.info(`Converted PDF to ${imagePaths.length} images.`);
|
||||||
|
return { imagePaths, createdImagePaths };
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handles image files that are directly supported by the AI.
|
||||||
|
*/
|
||||||
|
private async _handleSupportedImageInput(
|
||||||
|
filePath: string,
|
||||||
|
fileExt: string,
|
||||||
|
logger: Logger,
|
||||||
|
): Promise<{ imagePaths: { path: string; mimetype: string }[]; createdImagePaths: string[] }> {
|
||||||
|
logger.info(`Processing as a single image file: ${filePath}`);
|
||||||
|
const mimetype =
|
||||||
|
fileExt === '.jpg' || fileExt === '.jpeg' ? 'image/jpeg' : `image/${fileExt.slice(1)}`;
|
||||||
|
const imagePaths = [{ path: filePath, mimetype }];
|
||||||
|
return { imagePaths, createdImagePaths: [] };
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handles image files that need to be converted to PNG before AI processing.
|
||||||
|
*/
|
||||||
|
private async _handleConvertibleImageInput(
|
||||||
|
filePath: string,
|
||||||
|
logger: Logger,
|
||||||
|
): Promise<{ imagePaths: { path: string; mimetype: string }[]; createdImagePaths: string[] }> {
|
||||||
|
const createdPngPath = await this._convertImageToPng(filePath, logger);
|
||||||
|
const imagePaths = [{ path: createdPngPath, mimetype: 'image/png' }];
|
||||||
|
const createdImagePaths = [createdPngPath];
|
||||||
|
return { imagePaths, createdImagePaths };
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Throws an error for unsupported file types.
|
||||||
|
*/
|
||||||
|
private _handleUnsupportedInput(
|
||||||
|
fileExt: string,
|
||||||
|
originalFileName: string,
|
||||||
|
logger: Logger,
|
||||||
|
): never {
|
||||||
|
const errorMessage = `Unsupported file type: ${fileExt}. Supported types are PDF, JPG, PNG, WEBP, HEIC, HEIF, GIF, TIFF, SVG, BMP.`;
|
||||||
|
logger.error({ originalFileName, fileExt }, errorMessage);
|
||||||
|
throw new UnsupportedFileTypeError(errorMessage);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Prepares the input images for the AI service. If the input is a PDF, it's converted to images.
|
||||||
|
*/
|
||||||
|
public async prepareImageInputs(
|
||||||
|
filePath: string,
|
||||||
|
job: Job<FlyerJobData>,
|
||||||
|
logger: Logger,
|
||||||
|
): Promise<{ imagePaths: { path: string; mimetype: string }[]; createdImagePaths: string[] }> {
|
||||||
|
const fileExt = path.extname(filePath).toLowerCase();
|
||||||
|
|
||||||
|
if (fileExt === '.pdf') {
|
||||||
|
return this._handlePdfInput(filePath, job, logger);
|
||||||
|
}
|
||||||
|
if (SUPPORTED_IMAGE_EXTENSIONS.includes(fileExt)) {
|
||||||
|
return this._handleSupportedImageInput(filePath, fileExt, logger);
|
||||||
|
}
|
||||||
|
if (CONVERTIBLE_IMAGE_EXTENSIONS.includes(fileExt)) {
|
||||||
|
return this._handleConvertibleImageInput(filePath, logger);
|
||||||
|
}
|
||||||
|
|
||||||
|
return this._handleUnsupportedInput(fileExt, job.data.originalFileName, logger);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,13 +1,13 @@
|
|||||||
// src/services/flyerProcessingService.server.test.ts
|
// src/services/flyerProcessingService.server.test.ts
|
||||||
import { describe, it, expect, vi, beforeEach, type Mocked } from 'vitest';
|
import { describe, it, expect, vi, beforeEach, type Mocked } from 'vitest';
|
||||||
import sharp from 'sharp';
|
import sharp from 'sharp';
|
||||||
import { Job } from 'bullmq';
|
import { Job, UnrecoverableError } from 'bullmq';
|
||||||
import type { Dirent } from 'node:fs';
|
import type { Dirent } from 'node:fs';
|
||||||
import type { Logger } from 'pino';
|
import type { Logger } from 'pino';
|
||||||
import { z } from 'zod';
|
import { z } from 'zod';
|
||||||
import { AiFlyerDataSchema } from './flyerProcessingService.server';
|
import { AiFlyerDataSchema } from './flyerAiProcessor.server';
|
||||||
import type { Flyer, FlyerInsert } from '../types';
|
import type { Flyer, FlyerInsert, FlyerItemInsert } from '../types';
|
||||||
|
import type { CleanupJobData } from './flyerProcessingService.server';
|
||||||
export interface FlyerJobData {
|
export interface FlyerJobData {
|
||||||
filePath: string;
|
filePath: string;
|
||||||
originalFileName: string;
|
originalFileName: string;
|
||||||
@@ -36,22 +36,10 @@ vi.mock('node:fs/promises', async (importOriginal) => {
|
|||||||
};
|
};
|
||||||
});
|
});
|
||||||
|
|
||||||
// Mock sharp for the new image conversion logic
|
|
||||||
const mockSharpInstance = {
|
|
||||||
png: vi.fn(() => mockSharpInstance),
|
|
||||||
toFile: vi.fn().mockResolvedValue({}),
|
|
||||||
};
|
|
||||||
vi.mock('sharp', () => ({
|
|
||||||
__esModule: true,
|
|
||||||
default: vi.fn(() => mockSharpInstance),
|
|
||||||
}));
|
|
||||||
|
|
||||||
// Import service and dependencies (FlyerJobData already imported from types above)
|
// Import service and dependencies (FlyerJobData already imported from types above)
|
||||||
import { FlyerProcessingService } from './flyerProcessingService.server';
|
import { FlyerProcessingService } from './flyerProcessingService.server';
|
||||||
import * as aiService from './aiService.server';
|
|
||||||
import * as db from './db/index.db';
|
import * as db from './db/index.db';
|
||||||
import { createFlyerAndItems } from './db/flyer.db';
|
import { createFlyerAndItems } from './db/flyer.db';
|
||||||
import * as imageProcessor from '../utils/imageProcessor';
|
|
||||||
import { createMockFlyer } from '../tests/utils/mockFactories';
|
import { createMockFlyer } from '../tests/utils/mockFactories';
|
||||||
import { FlyerDataTransformer } from './flyerDataTransformer';
|
import { FlyerDataTransformer } from './flyerDataTransformer';
|
||||||
import {
|
import {
|
||||||
@@ -59,6 +47,10 @@ import {
|
|||||||
PdfConversionError,
|
PdfConversionError,
|
||||||
UnsupportedFileTypeError,
|
UnsupportedFileTypeError,
|
||||||
} from './processingErrors';
|
} from './processingErrors';
|
||||||
|
import { FlyerFileHandler } from './flyerFileHandler.server';
|
||||||
|
import { FlyerAiProcessor } from './flyerAiProcessor.server';
|
||||||
|
import type { IFileSystem, ICommandExecutor } from './flyerFileHandler.server';
|
||||||
|
import type { AIService } from './aiService.server';
|
||||||
|
|
||||||
// Mock dependencies
|
// Mock dependencies
|
||||||
vi.mock('./aiService.server', () => ({
|
vi.mock('./aiService.server', () => ({
|
||||||
@@ -73,9 +65,6 @@ vi.mock('./db/index.db', () => ({
|
|||||||
personalizationRepo: { getAllMasterItems: vi.fn() },
|
personalizationRepo: { getAllMasterItems: vi.fn() },
|
||||||
adminRepo: { logActivity: vi.fn() },
|
adminRepo: { logActivity: vi.fn() },
|
||||||
}));
|
}));
|
||||||
vi.mock('../utils/imageProcessor', () => ({
|
|
||||||
generateFlyerIcon: vi.fn().mockResolvedValue('icon-test.webp'),
|
|
||||||
}));
|
|
||||||
vi.mock('./logger.server', () => ({
|
vi.mock('./logger.server', () => ({
|
||||||
logger: {
|
logger: {
|
||||||
info: vi.fn(),
|
info: vi.fn(),
|
||||||
@@ -85,13 +74,15 @@ vi.mock('./logger.server', () => ({
|
|||||||
child: vi.fn().mockReturnThis(),
|
child: vi.fn().mockReturnThis(),
|
||||||
},
|
},
|
||||||
}));
|
}));
|
||||||
|
vi.mock('./flyerFileHandler.server');
|
||||||
|
vi.mock('./flyerAiProcessor.server');
|
||||||
|
|
||||||
const mockedAiService = aiService as Mocked<typeof aiService>;
|
|
||||||
const mockedDb = db as Mocked<typeof db>;
|
const mockedDb = db as Mocked<typeof db>;
|
||||||
const mockedImageProcessor = imageProcessor as Mocked<typeof imageProcessor>;
|
|
||||||
|
|
||||||
describe('FlyerProcessingService', () => {
|
describe('FlyerProcessingService', () => {
|
||||||
let service: FlyerProcessingService;
|
let service: FlyerProcessingService;
|
||||||
|
let mockFileHandler: Mocked<FlyerFileHandler>;
|
||||||
|
let mockAiProcessor: Mocked<FlyerAiProcessor>;
|
||||||
const mockCleanupQueue = {
|
const mockCleanupQueue = {
|
||||||
add: vi.fn(),
|
add: vi.fn(),
|
||||||
};
|
};
|
||||||
@@ -112,30 +103,35 @@ describe('FlyerProcessingService', () => {
|
|||||||
itemsForDb: [],
|
itemsForDb: [],
|
||||||
});
|
});
|
||||||
|
|
||||||
// Default mock implementation for the promisified exec
|
|
||||||
mocks.execAsync.mockResolvedValue({ stdout: 'success', stderr: '' });
|
|
||||||
|
|
||||||
// Default mock for readdir returns an empty array of Dirent-like objects.
|
// Default mock for readdir returns an empty array of Dirent-like objects.
|
||||||
mocks.readdir.mockResolvedValue([]);
|
mocks.readdir.mockResolvedValue([]);
|
||||||
|
|
||||||
// Mock the file system adapter that will be passed to the service
|
// Mock the file system adapter that will be passed to the service
|
||||||
const mockFs = {
|
const mockFs: IFileSystem = {
|
||||||
readdir: mocks.readdir,
|
readdir: mocks.readdir,
|
||||||
unlink: mocks.unlink,
|
unlink: mocks.unlink,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
mockFileHandler = new FlyerFileHandler(mockFs, vi.fn()) as Mocked<FlyerFileHandler>;
|
||||||
|
mockAiProcessor = new FlyerAiProcessor(
|
||||||
|
{} as AIService,
|
||||||
|
mockedDb.personalizationRepo,
|
||||||
|
) as Mocked<FlyerAiProcessor>;
|
||||||
|
|
||||||
// Instantiate the service with all its dependencies mocked
|
// Instantiate the service with all its dependencies mocked
|
||||||
service = new FlyerProcessingService(
|
service = new FlyerProcessingService(
|
||||||
mockedAiService.aiService,
|
{} as AIService,
|
||||||
|
mockFileHandler,
|
||||||
|
mockAiProcessor,
|
||||||
mockedDb,
|
mockedDb,
|
||||||
mockFs,
|
mockFs,
|
||||||
mocks.execAsync,
|
vi.fn(),
|
||||||
mockCleanupQueue,
|
mockCleanupQueue,
|
||||||
new FlyerDataTransformer(),
|
new FlyerDataTransformer(),
|
||||||
);
|
);
|
||||||
|
|
||||||
// Provide default successful mock implementations for dependencies
|
// Provide default successful mock implementations for dependencies
|
||||||
vi.mocked(mockedAiService.aiService.extractCoreDataFromFlyerImage).mockResolvedValue({
|
mockAiProcessor.extractAndValidateData.mockResolvedValue({
|
||||||
store_name: 'Mock Store',
|
store_name: 'Mock Store',
|
||||||
valid_from: '2024-01-01',
|
valid_from: '2024-01-01',
|
||||||
valid_to: '2024-01-07',
|
valid_to: '2024-01-07',
|
||||||
@@ -151,6 +147,11 @@ describe('FlyerProcessingService', () => {
|
|||||||
},
|
},
|
||||||
],
|
],
|
||||||
});
|
});
|
||||||
|
mockFileHandler.prepareImageInputs.mockResolvedValue({
|
||||||
|
imagePaths: [{ path: '/tmp/flyer.jpg', mimetype: 'image/jpeg' }],
|
||||||
|
createdImagePaths: [],
|
||||||
|
});
|
||||||
|
|
||||||
vi.mocked(createFlyerAndItems).mockResolvedValue({
|
vi.mocked(createFlyerAndItems).mockResolvedValue({
|
||||||
flyer: createMockFlyer({
|
flyer: createMockFlyer({
|
||||||
flyer_id: 1,
|
flyer_id: 1,
|
||||||
@@ -160,7 +161,6 @@ describe('FlyerProcessingService', () => {
|
|||||||
}),
|
}),
|
||||||
items: [],
|
items: [],
|
||||||
});
|
});
|
||||||
mockedImageProcessor.generateFlyerIcon.mockResolvedValue('icon-test.jpg');
|
|
||||||
vi.mocked(mockedDb.adminRepo.logActivity).mockResolvedValue();
|
vi.mocked(mockedDb.adminRepo.logActivity).mockResolvedValue();
|
||||||
// FIX: Provide a default mock for getAllMasterItems to prevent a TypeError on `.length`.
|
// FIX: Provide a default mock for getAllMasterItems to prevent a TypeError on `.length`.
|
||||||
vi.mocked(mockedDb.personalizationRepo.getAllMasterItems).mockResolvedValue([]);
|
vi.mocked(mockedDb.personalizationRepo.getAllMasterItems).mockResolvedValue([]);
|
||||||
@@ -181,6 +181,16 @@ describe('FlyerProcessingService', () => {
|
|||||||
} as unknown as Job<FlyerJobData>;
|
} as unknown as Job<FlyerJobData>;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const createMockCleanupJob = (data: CleanupJobData): Job<CleanupJobData> => {
|
||||||
|
return {
|
||||||
|
id: `cleanup-job-${data.flyerId}`,
|
||||||
|
data,
|
||||||
|
opts: { attempts: 3 },
|
||||||
|
attemptsMade: 1,
|
||||||
|
updateProgress: vi.fn(),
|
||||||
|
} as unknown as Job<CleanupJobData>;
|
||||||
|
};
|
||||||
|
|
||||||
describe('processJob (Orchestrator)', () => {
|
describe('processJob (Orchestrator)', () => {
|
||||||
it('should process an image file successfully and enqueue a cleanup job', async () => {
|
it('should process an image file successfully and enqueue a cleanup job', async () => {
|
||||||
const job = createMockJob({ filePath: '/tmp/flyer.jpg', originalFileName: 'flyer.jpg' });
|
const job = createMockJob({ filePath: '/tmp/flyer.jpg', originalFileName: 'flyer.jpg' });
|
||||||
@@ -188,10 +198,10 @@ describe('FlyerProcessingService', () => {
|
|||||||
const result = await service.processJob(job);
|
const result = await service.processJob(job);
|
||||||
|
|
||||||
expect(result).toEqual({ flyerId: 1 });
|
expect(result).toEqual({ flyerId: 1 });
|
||||||
expect(mockedAiService.aiService.extractCoreDataFromFlyerImage).toHaveBeenCalledTimes(1);
|
expect(mockFileHandler.prepareImageInputs).toHaveBeenCalledWith(job.data.filePath, job, expect.any(Object));
|
||||||
|
expect(mockAiProcessor.extractAndValidateData).toHaveBeenCalledTimes(1);
|
||||||
expect(createFlyerAndItems).toHaveBeenCalledTimes(1);
|
expect(createFlyerAndItems).toHaveBeenCalledTimes(1);
|
||||||
expect(mockedDb.adminRepo.logActivity).toHaveBeenCalledTimes(1);
|
expect(mockedDb.adminRepo.logActivity).toHaveBeenCalledTimes(1);
|
||||||
expect(mocks.execAsync).not.toHaveBeenCalled();
|
|
||||||
expect(mockCleanupQueue.add).toHaveBeenCalledWith(
|
expect(mockCleanupQueue.add).toHaveBeenCalledWith(
|
||||||
'cleanup-flyer-files',
|
'cleanup-flyer-files',
|
||||||
{ flyerId: 1, paths: ['/tmp/flyer.jpg'] },
|
{ flyerId: 1, paths: ['/tmp/flyer.jpg'] },
|
||||||
@@ -202,29 +212,17 @@ describe('FlyerProcessingService', () => {
|
|||||||
it('should convert a PDF, process its images, and enqueue a cleanup job for all files', async () => {
|
it('should convert a PDF, process its images, and enqueue a cleanup job for all files', async () => {
|
||||||
const job = createMockJob({ filePath: '/tmp/flyer.pdf', originalFileName: 'flyer.pdf' });
|
const job = createMockJob({ filePath: '/tmp/flyer.pdf', originalFileName: 'flyer.pdf' });
|
||||||
|
|
||||||
// Mock readdir to return Dirent-like objects for the converted files
|
// Mock the file handler to return multiple created paths
|
||||||
mocks.readdir.mockResolvedValue([
|
const createdPaths = ['/tmp/flyer-1.jpg', '/tmp/flyer-2.jpg'];
|
||||||
{ name: 'flyer-1.jpg' },
|
mockFileHandler.prepareImageInputs.mockResolvedValue({
|
||||||
{ name: 'flyer-2.jpg' },
|
imagePaths: createdPaths.map(p => ({ path: p, mimetype: 'image/jpeg' })),
|
||||||
] as Dirent[]);
|
createdImagePaths: createdPaths,
|
||||||
|
});
|
||||||
|
|
||||||
await service.processJob(job);
|
await service.processJob(job);
|
||||||
|
|
||||||
// Verify that pdftocairo was called
|
expect(mockFileHandler.prepareImageInputs).toHaveBeenCalledWith('/tmp/flyer.pdf', job, expect.any(Object));
|
||||||
expect(mocks.execAsync).toHaveBeenCalledWith(
|
expect(mockAiProcessor.extractAndValidateData).toHaveBeenCalledTimes(1);
|
||||||
expect.stringContaining('pdftocairo -jpeg -r 150'),
|
|
||||||
);
|
|
||||||
// Verify AI service was called with the converted images
|
|
||||||
expect(mockedAiService.aiService.extractCoreDataFromFlyerImage).toHaveBeenCalledWith(
|
|
||||||
expect.arrayContaining([
|
|
||||||
expect.objectContaining({ path: expect.stringContaining('flyer-1.jpg') }),
|
|
||||||
expect.objectContaining({ path: expect.stringContaining('flyer-2.jpg') }),
|
|
||||||
]),
|
|
||||||
expect.any(Array),
|
|
||||||
undefined, // submitterIp
|
|
||||||
undefined, // userProfileAddress
|
|
||||||
expect.any(Object), // The job-specific logger
|
|
||||||
);
|
|
||||||
expect(createFlyerAndItems).toHaveBeenCalledTimes(1);
|
expect(createFlyerAndItems).toHaveBeenCalledTimes(1);
|
||||||
// Verify cleanup job includes original PDF and both generated images
|
// Verify cleanup job includes original PDF and both generated images
|
||||||
expect(mockCleanupQueue.add).toHaveBeenCalledWith(
|
expect(mockCleanupQueue.add).toHaveBeenCalledWith(
|
||||||
@@ -233,8 +231,8 @@ describe('FlyerProcessingService', () => {
|
|||||||
flyerId: 1,
|
flyerId: 1,
|
||||||
paths: [
|
paths: [
|
||||||
'/tmp/flyer.pdf',
|
'/tmp/flyer.pdf',
|
||||||
expect.stringContaining('flyer-1.jpg'),
|
'/tmp/flyer-1.jpg',
|
||||||
expect.stringContaining('flyer-2.jpg'),
|
'/tmp/flyer-2.jpg',
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
expect.any(Object),
|
expect.any(Object),
|
||||||
@@ -243,42 +241,65 @@ describe('FlyerProcessingService', () => {
|
|||||||
|
|
||||||
it('should throw an error and not enqueue cleanup if the AI service fails', async () => {
|
it('should throw an error and not enqueue cleanup if the AI service fails', async () => {
|
||||||
const job = createMockJob({});
|
const job = createMockJob({});
|
||||||
|
const { logger } = await import('./logger.server');
|
||||||
const aiError = new Error('AI model exploded');
|
const aiError = new Error('AI model exploded');
|
||||||
vi.mocked(mockedAiService.aiService.extractCoreDataFromFlyerImage).mockRejectedValue(aiError);
|
mockAiProcessor.extractAndValidateData.mockRejectedValue(aiError);
|
||||||
|
|
||||||
await expect(service.processJob(job)).rejects.toThrow('AI model exploded');
|
await expect(service.processJob(job)).rejects.toThrow('AI model exploded');
|
||||||
|
|
||||||
expect(job.updateProgress).toHaveBeenCalledWith({
|
expect(job.updateProgress).toHaveBeenCalledWith({
|
||||||
errorCode: 'UNKNOWN_ERROR',
|
errorCode: 'UNKNOWN_ERROR',
|
||||||
message: 'AI model exploded',
|
message: 'AI model exploded',
|
||||||
|
}); // This was a duplicate, fixed.
|
||||||
|
expect(mockCleanupQueue.add).not.toHaveBeenCalled();
|
||||||
|
expect(logger.warn).toHaveBeenCalledWith(
|
||||||
|
'Job failed. Temporary files will NOT be cleaned up to allow for manual inspection.',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should throw UnrecoverableError for quota issues and not enqueue cleanup', async () => {
|
||||||
|
const job = createMockJob({});
|
||||||
|
// Simulate an AI error that contains a keyword for unrecoverable errors
|
||||||
|
const quotaError = new Error('AI model quota exceeded');
|
||||||
|
const { logger } = await import('./logger.server');
|
||||||
|
mockAiProcessor.extractAndValidateData.mockRejectedValue(quotaError);
|
||||||
|
|
||||||
|
await expect(service.processJob(job)).rejects.toThrow(UnrecoverableError);
|
||||||
|
|
||||||
|
expect(job.updateProgress).toHaveBeenCalledWith({
|
||||||
|
errorCode: 'QUOTA_EXCEEDED',
|
||||||
|
message: 'An AI quota has been exceeded. Please try again later.',
|
||||||
});
|
});
|
||||||
expect(mockCleanupQueue.add).not.toHaveBeenCalled();
|
expect(mockCleanupQueue.add).not.toHaveBeenCalled();
|
||||||
|
expect(logger.warn).toHaveBeenCalledWith(
|
||||||
|
'Job failed. Temporary files will NOT be cleaned up to allow for manual inspection.',
|
||||||
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should throw PdfConversionError and not enqueue cleanup if PDF conversion fails', async () => {
|
it('should throw PdfConversionError and not enqueue cleanup if PDF conversion fails', async () => {
|
||||||
const job = createMockJob({ filePath: '/tmp/bad.pdf', originalFileName: 'bad.pdf' });
|
const job = createMockJob({ filePath: '/tmp/bad.pdf', originalFileName: 'bad.pdf' });
|
||||||
|
const { logger } = await import('./logger.server');
|
||||||
const conversionError = new PdfConversionError('Conversion failed', 'pdftocairo error');
|
const conversionError = new PdfConversionError('Conversion failed', 'pdftocairo error');
|
||||||
// Make the conversion step fail
|
mockFileHandler.prepareImageInputs.mockRejectedValue(conversionError);
|
||||||
mocks.execAsync.mockRejectedValue(conversionError);
|
|
||||||
|
|
||||||
await expect(service.processJob(job)).rejects.toThrow(conversionError);
|
await expect(service.processJob(job)).rejects.toThrow(conversionError);
|
||||||
|
|
||||||
expect(job.updateProgress).toHaveBeenCalledWith({
|
expect(job.updateProgress).toHaveBeenCalledWith({
|
||||||
errorCode: 'PDF_CONVERSION_FAILED',
|
errorCode: 'PDF_CONVERSION_FAILED',
|
||||||
message:
|
message:
|
||||||
'The uploaded PDF could not be processed. It might be blank, corrupt, or password-protected.',
|
'The uploaded PDF could not be processed. It might be blank, corrupt, or password-protected.', // This was a duplicate, fixed.
|
||||||
});
|
});
|
||||||
expect(mockCleanupQueue.add).not.toHaveBeenCalled();
|
expect(mockCleanupQueue.add).not.toHaveBeenCalled();
|
||||||
|
expect(logger.warn).toHaveBeenCalledWith(
|
||||||
|
'Job failed. Temporary files will NOT be cleaned up to allow for manual inspection.',
|
||||||
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should throw AiDataValidationError and not enqueue cleanup if AI validation fails', async () => {
|
it('should throw AiDataValidationError and not enqueue cleanup if AI validation fails', async () => {
|
||||||
const { logger } = await import('./logger.server');
|
const { logger } = await import('./logger.server');
|
||||||
const job = createMockJob({});
|
const job = createMockJob({});
|
||||||
const validationError = new AiDataValidationError('Validation failed', {}, {});
|
const validationError = new AiDataValidationError('Validation failed', {}, {});
|
||||||
// Make the AI extraction step fail with a validation error
|
mockAiProcessor.extractAndValidateData.mockRejectedValue(validationError);
|
||||||
vi.mocked(mockedAiService.aiService.extractCoreDataFromFlyerImage).mockRejectedValue(
|
|
||||||
validationError,
|
|
||||||
);
|
|
||||||
|
|
||||||
await expect(service.processJob(job)).rejects.toThrow(validationError);
|
await expect(service.processJob(job)).rejects.toThrow(validationError);
|
||||||
|
|
||||||
@@ -290,74 +311,38 @@ describe('FlyerProcessingService', () => {
|
|||||||
expect(job.updateProgress).toHaveBeenCalledWith({
|
expect(job.updateProgress).toHaveBeenCalledWith({
|
||||||
errorCode: 'AI_VALIDATION_FAILED',
|
errorCode: 'AI_VALIDATION_FAILED',
|
||||||
message:
|
message:
|
||||||
"The AI couldn't read the flyer's format. Please try a clearer image or a different flyer.",
|
"The AI couldn't read the flyer's format. Please try a clearer image or a different flyer.", // This was a duplicate, fixed.
|
||||||
});
|
});
|
||||||
expect(mockCleanupQueue.add).not.toHaveBeenCalled();
|
expect(mockCleanupQueue.add).not.toHaveBeenCalled();
|
||||||
|
expect(logger.warn).toHaveBeenCalledWith(
|
||||||
|
'Job failed. Temporary files will NOT be cleaned up to allow for manual inspection.',
|
||||||
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
// FIX: This test was incorrect. The service *does* support GIF conversion.
|
it('should handle convertible image types and include original and converted files in cleanup', async () => {
|
||||||
// It is now a success case, verifying that conversion works as intended.
|
|
||||||
it('should convert a GIF image to PNG and then process it', async () => {
|
|
||||||
console.log('\n--- [TEST LOG] ---: Starting GIF conversion success test...');
|
|
||||||
const job = createMockJob({ filePath: '/tmp/flyer.gif', originalFileName: 'flyer.gif' });
|
const job = createMockJob({ filePath: '/tmp/flyer.gif', originalFileName: 'flyer.gif' });
|
||||||
|
const convertedPath = '/tmp/flyer-converted.png';
|
||||||
|
|
||||||
|
// Mock the file handler to return the converted path
|
||||||
|
mockFileHandler.prepareImageInputs.mockResolvedValue({
|
||||||
|
imagePaths: [{ path: convertedPath, mimetype: 'image/png' }],
|
||||||
|
createdImagePaths: [convertedPath],
|
||||||
|
});
|
||||||
|
|
||||||
await service.processJob(job);
|
await service.processJob(job);
|
||||||
|
|
||||||
console.log('--- [TEST LOG] ---: Verifying sharp conversion for GIF...');
|
expect(mockFileHandler.prepareImageInputs).toHaveBeenCalledWith('/tmp/flyer.gif', job, expect.any(Object));
|
||||||
expect(sharp).toHaveBeenCalledWith('/tmp/flyer.gif');
|
expect(mockAiProcessor.extractAndValidateData).toHaveBeenCalledTimes(1);
|
||||||
expect(mockSharpInstance.toFile).toHaveBeenCalledWith('/tmp/flyer-converted.png');
|
|
||||||
|
|
||||||
console.log('--- [TEST LOG] ---: Verifying AI service call and cleanup for GIF...');
|
|
||||||
expect(mockedAiService.aiService.extractCoreDataFromFlyerImage).toHaveBeenCalledWith(
|
|
||||||
[{ path: '/tmp/flyer-converted.png', mimetype: 'image/png' }],
|
|
||||||
[],
|
|
||||||
undefined,
|
|
||||||
undefined,
|
|
||||||
expect.any(Object),
|
|
||||||
);
|
|
||||||
expect(mockCleanupQueue.add).toHaveBeenCalledWith(
|
expect(mockCleanupQueue.add).toHaveBeenCalledWith(
|
||||||
'cleanup-flyer-files',
|
'cleanup-flyer-files',
|
||||||
{ flyerId: 1, paths: ['/tmp/flyer.gif', '/tmp/flyer-converted.png'] },
|
{ flyerId: 1, paths: ['/tmp/flyer.gif', convertedPath] },
|
||||||
expect.any(Object),
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should convert a TIFF image to PNG and then process it', async () => {
|
|
||||||
console.log('\n--- [TEST LOG] ---: Starting TIFF conversion success test...');
|
|
||||||
const job = createMockJob({ filePath: '/tmp/flyer.tiff', originalFileName: 'flyer.tiff' });
|
|
||||||
|
|
||||||
await service.processJob(job);
|
|
||||||
|
|
||||||
expect(sharp).toHaveBeenCalledWith('/tmp/flyer.tiff');
|
|
||||||
expect(mockSharpInstance.png).toHaveBeenCalled();
|
|
||||||
expect(mockSharpInstance.toFile).toHaveBeenCalledWith('/tmp/flyer-converted.png');
|
|
||||||
|
|
||||||
console.log('--- [DEBUG] ---: In TIFF test, logging actual AI call arguments:');
|
|
||||||
console.log(
|
|
||||||
JSON.stringify(
|
|
||||||
vi.mocked(mockedAiService.aiService.extractCoreDataFromFlyerImage).mock.calls[0],
|
|
||||||
null,
|
|
||||||
2,
|
|
||||||
),
|
|
||||||
);
|
|
||||||
|
|
||||||
expect(mockedAiService.aiService.extractCoreDataFromFlyerImage).toHaveBeenCalledWith(
|
|
||||||
[{ path: '/tmp/flyer-converted.png', mimetype: 'image/png' }], // masterItems is mocked to []
|
|
||||||
[], // submitterIp is undefined in the mock job
|
|
||||||
undefined, // userProfileAddress is undefined in the mock job
|
|
||||||
undefined, // The job-specific logger
|
|
||||||
expect.any(Object),
|
|
||||||
);
|
|
||||||
|
|
||||||
expect(mockCleanupQueue.add).toHaveBeenCalledWith(
|
|
||||||
'cleanup-flyer-files',
|
|
||||||
{ flyerId: 1, paths: ['/tmp/flyer.tiff', '/tmp/flyer-converted.png'] },
|
|
||||||
expect.any(Object),
|
expect.any(Object),
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should throw an error and not enqueue cleanup if the database service fails', async () => {
|
it('should throw an error and not enqueue cleanup if the database service fails', async () => {
|
||||||
const job = createMockJob({});
|
const job = createMockJob({});
|
||||||
|
const { logger } = await import('./logger.server');
|
||||||
const dbError = new Error('Database transaction failed');
|
const dbError = new Error('Database transaction failed');
|
||||||
vi.mocked(createFlyerAndItems).mockRejectedValue(dbError);
|
vi.mocked(createFlyerAndItems).mockRejectedValue(dbError);
|
||||||
|
|
||||||
@@ -366,8 +351,11 @@ describe('FlyerProcessingService', () => {
|
|||||||
expect(job.updateProgress).toHaveBeenCalledWith({
|
expect(job.updateProgress).toHaveBeenCalledWith({
|
||||||
errorCode: 'UNKNOWN_ERROR',
|
errorCode: 'UNKNOWN_ERROR',
|
||||||
message: 'Database transaction failed',
|
message: 'Database transaction failed',
|
||||||
});
|
}); // This was a duplicate, fixed.
|
||||||
expect(mockCleanupQueue.add).not.toHaveBeenCalled();
|
expect(mockCleanupQueue.add).not.toHaveBeenCalled();
|
||||||
|
expect(logger.warn).toHaveBeenCalledWith(
|
||||||
|
'Job failed. Temporary files will NOT be cleaned up to allow for manual inspection.',
|
||||||
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should throw UnsupportedFileTypeError for an unsupported file type', async () => {
|
it('should throw UnsupportedFileTypeError for an unsupported file type', async () => {
|
||||||
@@ -375,25 +363,24 @@ describe('FlyerProcessingService', () => {
|
|||||||
filePath: '/tmp/document.txt',
|
filePath: '/tmp/document.txt',
|
||||||
originalFileName: 'document.txt',
|
originalFileName: 'document.txt',
|
||||||
});
|
});
|
||||||
|
const fileTypeError = new UnsupportedFileTypeError('Unsupported file type: .txt. Supported types are PDF, JPG, PNG, WEBP, HEIC, HEIF, GIF, TIFF, SVG, BMP.');
|
||||||
|
mockFileHandler.prepareImageInputs.mockRejectedValue(fileTypeError);
|
||||||
|
const { logger } = await import('./logger.server');
|
||||||
|
|
||||||
await expect(service.processJob(job)).rejects.toThrow(UnsupportedFileTypeError);
|
await expect(service.processJob(job)).rejects.toThrow(UnsupportedFileTypeError);
|
||||||
expect(job.updateProgress).toHaveBeenCalledWith({
|
expect(job.updateProgress).toHaveBeenCalledWith({
|
||||||
errorCode: 'UNSUPPORTED_FILE_TYPE',
|
errorCode: 'UNSUPPORTED_FILE_TYPE',
|
||||||
message:
|
message: 'Unsupported file type: .txt. Supported types are PDF, JPG, PNG, WEBP, HEIC, HEIF, GIF, TIFF, SVG, BMP.',
|
||||||
'Unsupported file type: .txt. Supported types are PDF, JPG, PNG, WEBP, HEIC, HEIF, GIF, TIFF, SVG, BMP.',
|
|
||||||
});
|
});
|
||||||
expect(mockCleanupQueue.add).not.toHaveBeenCalled();
|
expect(mockCleanupQueue.add).not.toHaveBeenCalled();
|
||||||
});
|
expect(logger.warn).toHaveBeenCalledWith(
|
||||||
|
'Job failed. Temporary files will NOT be cleaned up to allow for manual inspection.',
|
||||||
it('should log a warning and not enqueue cleanup if the job fails but a flyer ID was somehow generated', async () => {
|
);
|
||||||
const job = createMockJob({});
|
|
||||||
vi.mocked(createFlyerAndItems).mockRejectedValue(new Error('DB Error'));
|
|
||||||
await expect(service.processJob(job)).rejects.toThrow();
|
|
||||||
expect(mockCleanupQueue.add).not.toHaveBeenCalled();
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should throw an error and not enqueue cleanup if icon generation fails', async () => {
|
it('should throw an error and not enqueue cleanup if icon generation fails', async () => {
|
||||||
const job = createMockJob({});
|
const job = createMockJob({});
|
||||||
|
const { logger } = await import('./logger.server');
|
||||||
const iconError = new Error('Icon generation failed.');
|
const iconError = new Error('Icon generation failed.');
|
||||||
// The `transform` method calls `generateFlyerIcon`. In `beforeEach`, `transform` is mocked
|
// The `transform` method calls `generateFlyerIcon`. In `beforeEach`, `transform` is mocked
|
||||||
// to always succeed. For this test, we override that mock to simulate a failure
|
// to always succeed. For this test, we override that mock to simulate a failure
|
||||||
@@ -405,235 +392,140 @@ describe('FlyerProcessingService', () => {
|
|||||||
expect(job.updateProgress).toHaveBeenCalledWith({
|
expect(job.updateProgress).toHaveBeenCalledWith({
|
||||||
errorCode: 'UNKNOWN_ERROR',
|
errorCode: 'UNKNOWN_ERROR',
|
||||||
message: 'Icon generation failed.',
|
message: 'Icon generation failed.',
|
||||||
});
|
}); // This was a duplicate, fixed.
|
||||||
expect(mockCleanupQueue.add).not.toHaveBeenCalled();
|
expect(mockCleanupQueue.add).not.toHaveBeenCalled();
|
||||||
|
expect(logger.warn).toHaveBeenCalledWith(
|
||||||
|
'Job failed. Temporary files will NOT be cleaned up to allow for manual inspection.',
|
||||||
|
);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('_prepareImageInputs (private method)', () => {
|
describe('_reportErrorAndThrow (private method)', () => {
|
||||||
it('should throw UnsupportedFileTypeError for an unsupported file type', async () => {
|
it('should update progress and throw UnrecoverableError for quota messages', async () => {
|
||||||
const { logger } = await import('./logger.server');
|
const { logger } = await import('./logger.server');
|
||||||
const job = createMockJob({
|
const job = createMockJob({});
|
||||||
filePath: '/tmp/unsupported.doc',
|
const quotaError = new Error('RESOURCE_EXHAUSTED');
|
||||||
originalFileName: 'unsupported.doc',
|
const privateMethod = (service as any)._reportErrorAndThrow;
|
||||||
|
|
||||||
|
await expect(privateMethod(quotaError, job, logger)).rejects.toThrow(
|
||||||
|
UnrecoverableError,
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(job.updateProgress).toHaveBeenCalledWith({
|
||||||
|
errorCode: 'QUOTA_EXCEEDED',
|
||||||
|
message: 'An AI quota has been exceeded. Please try again later.',
|
||||||
});
|
});
|
||||||
const privateMethod = (service as any)._prepareImageInputs;
|
});
|
||||||
|
|
||||||
await expect(privateMethod('/tmp/unsupported.doc', job, logger)).rejects.toThrow(
|
it('should use toErrorPayload for FlyerProcessingError instances', async () => {
|
||||||
UnsupportedFileTypeError,
|
const { logger } = await import('./logger.server');
|
||||||
|
const job = createMockJob({});
|
||||||
|
const validationError = new AiDataValidationError(
|
||||||
|
'Validation failed',
|
||||||
|
{ foo: 'bar' },
|
||||||
|
{ raw: 'data' },
|
||||||
);
|
);
|
||||||
|
const privateMethod = (service as any)._reportErrorAndThrow;
|
||||||
|
|
||||||
|
await expect(privateMethod(validationError, job, logger)).rejects.toThrow(
|
||||||
|
validationError,
|
||||||
|
);
|
||||||
|
|
||||||
|
// The payload should now come from the error's `toErrorPayload` method
|
||||||
|
expect(job.updateProgress).toHaveBeenCalledWith({
|
||||||
|
errorCode: 'AI_VALIDATION_FAILED',
|
||||||
|
message:
|
||||||
|
"The AI couldn't read the flyer's format. Please try a clearer image or a different flyer.",
|
||||||
|
validationErrors: { foo: 'bar' },
|
||||||
|
rawData: { raw: 'data' },
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should update progress and re-throw standard errors', async () => {
|
||||||
|
const { logger } = await import('./logger.server');
|
||||||
|
const job = createMockJob({});
|
||||||
|
const genericError = new Error('A standard failure');
|
||||||
|
const privateMethod = (service as any)._reportErrorAndThrow;
|
||||||
|
|
||||||
|
await expect(privateMethod(genericError, job, logger)).rejects.toThrow(genericError);
|
||||||
|
|
||||||
|
expect(job.updateProgress).toHaveBeenCalledWith({
|
||||||
|
errorCode: 'UNKNOWN_ERROR',
|
||||||
|
message: 'A standard failure', // This was a duplicate, fixed.
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should wrap and throw non-Error objects', async () => {
|
||||||
|
const { logger } = await import('./logger.server');
|
||||||
|
const job = createMockJob({});
|
||||||
|
const nonError = 'just a string error';
|
||||||
|
const privateMethod = (service as any)._reportErrorAndThrow;
|
||||||
|
|
||||||
|
await expect(privateMethod(nonError, job, logger)).rejects.toThrow('just a string error');
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('_convertImageToPng (private method)', () => {
|
describe('processCleanupJob', () => {
|
||||||
it('should throw an error if sharp fails', async () => {
|
it('should delete all files successfully', async () => {
|
||||||
const { logger } = await import('./logger.server');
|
const job = createMockCleanupJob({ flyerId: 1, paths: ['/tmp/file1', '/tmp/file2'] });
|
||||||
const sharpError = new Error('Sharp failed');
|
mocks.unlink.mockResolvedValue(undefined);
|
||||||
vi.mocked(mockSharpInstance.toFile).mockRejectedValue(sharpError);
|
|
||||||
const privateMethod = (service as any)._convertImageToPng;
|
|
||||||
|
|
||||||
await expect(privateMethod('/tmp/image.gif', logger)).rejects.toThrow(
|
const result = await service.processCleanupJob(job);
|
||||||
'Image conversion to PNG failed for image.gif',
|
|
||||||
|
expect(mocks.unlink).toHaveBeenCalledTimes(2);
|
||||||
|
expect(mocks.unlink).toHaveBeenCalledWith('/tmp/file1');
|
||||||
|
expect(mocks.unlink).toHaveBeenCalledWith('/tmp/file2');
|
||||||
|
expect(result).toEqual({ status: 'success', deletedCount: 2 });
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle ENOENT errors gracefully and still succeed', async () => {
|
||||||
|
const job = createMockCleanupJob({ flyerId: 1, paths: ['/tmp/file1', '/tmp/file2'] });
|
||||||
|
const enoentError: NodeJS.ErrnoException = new Error('File not found');
|
||||||
|
enoentError.code = 'ENOENT';
|
||||||
|
|
||||||
|
mocks.unlink.mockResolvedValueOnce(undefined).mockRejectedValueOnce(enoentError);
|
||||||
|
|
||||||
|
const result = await service.processCleanupJob(job);
|
||||||
|
|
||||||
|
expect(mocks.unlink).toHaveBeenCalledTimes(2);
|
||||||
|
expect(result).toEqual({ status: 'success', deletedCount: 2 });
|
||||||
|
// Check that the warning was logged
|
||||||
|
const { logger } = await import('./logger.server');
|
||||||
|
expect(logger.warn).toHaveBeenCalledWith(
|
||||||
|
'File not found during cleanup (already deleted?): /tmp/file2',
|
||||||
);
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should throw an aggregate error if a non-ENOENT error occurs', async () => {
|
||||||
|
const job = createMockCleanupJob({
|
||||||
|
flyerId: 1,
|
||||||
|
paths: ['/tmp/file1', '/tmp/permission-denied'],
|
||||||
|
});
|
||||||
|
const permissionError: NodeJS.ErrnoException = new Error('Permission denied');
|
||||||
|
permissionError.code = 'EACCES';
|
||||||
|
|
||||||
|
mocks.unlink.mockResolvedValueOnce(undefined).mockRejectedValueOnce(permissionError);
|
||||||
|
|
||||||
|
await expect(service.processCleanupJob(job)).rejects.toThrow(
|
||||||
|
'Failed to delete 1 file(s): /tmp/permission-denied',
|
||||||
|
);
|
||||||
|
|
||||||
|
// Check that the error was logged
|
||||||
|
const { logger } = await import('./logger.server');
|
||||||
expect(logger.error).toHaveBeenCalledWith(
|
expect(logger.error).toHaveBeenCalledWith(
|
||||||
{ err: sharpError, filePath: '/tmp/image.gif' },
|
expect.objectContaining({ err: permissionError, path: '/tmp/permission-denied' }),
|
||||||
'Failed to convert image to PNG using sharp.',
|
'Failed to delete temporary file.',
|
||||||
);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('_extractFlyerDataWithAI (private method)', () => {
|
|
||||||
it('should throw AiDataValidationError if AI response validation fails', async () => {
|
|
||||||
const { logger } = await import('./logger.server');
|
|
||||||
const jobData = createMockJob({}).data;
|
|
||||||
// Mock AI to return data missing a required field ('store_name')
|
|
||||||
vi.mocked(mockedAiService.aiService.extractCoreDataFromFlyerImage).mockResolvedValue({
|
|
||||||
valid_from: '2024-01-01',
|
|
||||||
items: [],
|
|
||||||
} as any);
|
|
||||||
|
|
||||||
await expect((service as any)._extractFlyerDataWithAI([], jobData, logger)).rejects.toThrow(
|
|
||||||
AiDataValidationError,
|
|
||||||
);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('_enqueueCleanup (private method)', () => {
|
|
||||||
it('should enqueue a cleanup job with the correct parameters', async () => {
|
|
||||||
const { logger } = await import('./logger.server');
|
|
||||||
const flyerId = 42;
|
|
||||||
const paths = ['/tmp/file1.jpg', '/tmp/file2.pdf'];
|
|
||||||
|
|
||||||
// Access and call the private method for testing
|
|
||||||
await (
|
|
||||||
service as unknown as {
|
|
||||||
_enqueueCleanup: (flyerId: number, paths: string[], logger: Logger) => Promise<void>;
|
|
||||||
}
|
|
||||||
)._enqueueCleanup(flyerId, paths, logger);
|
|
||||||
|
|
||||||
expect(mockCleanupQueue.add).toHaveBeenCalledWith(
|
|
||||||
'cleanup-flyer-files',
|
|
||||||
{ flyerId, paths },
|
|
||||||
{ jobId: `cleanup-flyer-${flyerId}`, removeOnComplete: true },
|
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should not call the queue if the paths array is empty', async () => {
|
it('should skip processing and return "skipped" if paths array is empty', async () => {
|
||||||
|
const job = createMockCleanupJob({ flyerId: 1, paths: [] });
|
||||||
|
const result = await service.processCleanupJob(job);
|
||||||
|
|
||||||
|
expect(mocks.unlink).not.toHaveBeenCalled();
|
||||||
|
expect(result).toEqual({ status: 'skipped', reason: 'no paths' });
|
||||||
const { logger } = await import('./logger.server');
|
const { logger } = await import('./logger.server');
|
||||||
// Access and call the private method with an empty array
|
expect(logger.warn).toHaveBeenCalledWith('Job received no paths to clean. Skipping.');
|
||||||
await (
|
|
||||||
service as unknown as {
|
|
||||||
_enqueueCleanup: (flyerId: number, paths: string[], logger: Logger) => Promise<void>;
|
|
||||||
}
|
|
||||||
)._enqueueCleanup(123, [], logger);
|
|
||||||
|
|
||||||
expect(mockCleanupQueue.add).not.toHaveBeenCalled();
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('_saveProcessedFlyerData (private method)', () => {
|
|
||||||
it('should transform data, create flyer in DB, and log activity', async () => {
|
|
||||||
const { logger } = await import('./logger.server');
|
|
||||||
// Arrange
|
|
||||||
const mockExtractedData = {
|
|
||||||
store_name: 'Test Store',
|
|
||||||
valid_from: '2024-01-01',
|
|
||||||
valid_to: '2024-01-07',
|
|
||||||
store_address: '123 Mock St',
|
|
||||||
items: [
|
|
||||||
{
|
|
||||||
item: 'Test Item',
|
|
||||||
price_display: '$1.99',
|
|
||||||
price_in_cents: 199,
|
|
||||||
quantity: 'each',
|
|
||||||
category_name: 'Test Category',
|
|
||||||
master_item_id: 1,
|
|
||||||
},
|
|
||||||
],
|
|
||||||
};
|
|
||||||
const mockImagePaths = [{ path: '/tmp/flyer.jpg', mimetype: 'image/jpeg' }];
|
|
||||||
const mockJobData = {
|
|
||||||
filePath: '/tmp/flyer.jpg',
|
|
||||||
originalFileName: 'flyer.jpg',
|
|
||||||
checksum: 'checksum-123',
|
|
||||||
userId: 'user-abc',
|
|
||||||
};
|
|
||||||
|
|
||||||
// The DB create function is also mocked in beforeEach.
|
|
||||||
// Create a complete mock that satisfies the Flyer type.
|
|
||||||
const mockNewFlyer = createMockFlyer({
|
|
||||||
flyer_id: 1,
|
|
||||||
file_name: 'flyer.jpg',
|
|
||||||
image_url: '/flyer-images/flyer.jpg',
|
|
||||||
icon_url: '/flyer-images/icons/icon-flyer.webp',
|
|
||||||
checksum: 'checksum-123',
|
|
||||||
store_id: 1,
|
|
||||||
item_count: 1,
|
|
||||||
});
|
|
||||||
vi.mocked(createFlyerAndItems).mockResolvedValue({ flyer: mockNewFlyer, items: [] });
|
|
||||||
|
|
||||||
// Act: Access and call the private method for testing
|
|
||||||
const result = await (
|
|
||||||
service as unknown as {
|
|
||||||
_saveProcessedFlyerData: (
|
|
||||||
extractedData: z.infer<typeof AiFlyerDataSchema>,
|
|
||||||
imagePaths: { path: string; mimetype: string }[],
|
|
||||||
jobData: FlyerJobData,
|
|
||||||
logger: Logger,
|
|
||||||
) => Promise<Flyer>;
|
|
||||||
}
|
|
||||||
)._saveProcessedFlyerData(mockExtractedData, mockImagePaths, mockJobData, logger);
|
|
||||||
|
|
||||||
// Assert
|
|
||||||
// 1. Transformer was called correctly
|
|
||||||
expect(FlyerDataTransformer.prototype.transform).toHaveBeenCalledWith(
|
|
||||||
mockExtractedData,
|
|
||||||
mockImagePaths,
|
|
||||||
mockJobData.originalFileName,
|
|
||||||
mockJobData.checksum,
|
|
||||||
mockJobData.userId,
|
|
||||||
logger,
|
|
||||||
);
|
|
||||||
|
|
||||||
// 2. DB function was called with the transformed data
|
|
||||||
// The data comes from the mock defined in `beforeEach`.
|
|
||||||
expect(createFlyerAndItems).toHaveBeenCalledWith(
|
|
||||||
expect.objectContaining({ store_name: 'Mock Store', checksum: 'checksum-123' }),
|
|
||||||
[], // itemsForDb from the mock
|
|
||||||
logger,
|
|
||||||
);
|
|
||||||
|
|
||||||
// 3. Activity was logged with all expected fields
|
|
||||||
expect(mockedDb.adminRepo.logActivity).toHaveBeenCalledWith(
|
|
||||||
{
|
|
||||||
userId: 'user-abc',
|
|
||||||
action: 'flyer_processed' as const,
|
|
||||||
displayText: 'Processed a new flyer for Mock Store.', // This was a duplicate, fixed.
|
|
||||||
details: { flyerId: 1, storeName: 'Mock Store' },
|
|
||||||
},
|
|
||||||
logger,
|
|
||||||
);
|
|
||||||
|
|
||||||
// 4. The method returned the new flyer
|
|
||||||
expect(result).toEqual(mockNewFlyer);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('_convertPdfToImages (private method)', () => {
|
|
||||||
it('should call pdftocairo and return sorted image paths on success', async () => {
|
|
||||||
const { logger } = await import('./logger.server');
|
|
||||||
const job = createMockJob({ filePath: '/tmp/test.pdf' });
|
|
||||||
// Mock readdir to return unsorted Dirent-like objects
|
|
||||||
mocks.readdir.mockResolvedValue([
|
|
||||||
{ name: 'test-10.jpg' },
|
|
||||||
{ name: 'test-1.jpg' },
|
|
||||||
{ name: 'test-2.jpg' },
|
|
||||||
{ name: 'other-file.txt' },
|
|
||||||
] as Dirent[]);
|
|
||||||
|
|
||||||
// Access and call the private method for testing
|
|
||||||
const imagePaths = await (
|
|
||||||
service as unknown as {
|
|
||||||
_convertPdfToImages: (filePath: string, job: Job, logger: Logger) => Promise<string[]>;
|
|
||||||
}
|
|
||||||
)._convertPdfToImages('/tmp/test.pdf', job, logger);
|
|
||||||
|
|
||||||
expect(mocks.execAsync).toHaveBeenCalledWith(
|
|
||||||
'pdftocairo -jpeg -r 150 "/tmp/test.pdf" "/tmp/test"',
|
|
||||||
);
|
|
||||||
expect(job.updateProgress).toHaveBeenCalledWith({ message: 'Converting PDF to images...' });
|
|
||||||
// Verify that the paths are correctly sorted numerically
|
|
||||||
expect(imagePaths).toEqual(['/tmp/test-1.jpg', '/tmp/test-2.jpg', '/tmp/test-10.jpg']);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should throw PdfConversionError if no images are generated', async () => {
|
|
||||||
const { logger } = await import('./logger.server');
|
|
||||||
const job = createMockJob({ filePath: '/tmp/empty.pdf' });
|
|
||||||
// Mock readdir to return no matching files
|
|
||||||
mocks.readdir.mockResolvedValue([]);
|
|
||||||
|
|
||||||
await expect(
|
|
||||||
(
|
|
||||||
service as unknown as {
|
|
||||||
_convertPdfToImages: (filePath: string, job: Job, logger: Logger) => Promise<string[]>;
|
|
||||||
}
|
|
||||||
)._convertPdfToImages('/tmp/empty.pdf', job, logger),
|
|
||||||
).rejects.toThrow('PDF conversion resulted in 0 images for file: /tmp/empty.pdf');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should re-throw an error if the exec command fails', async () => {
|
|
||||||
const { logger } = await import('./logger.server');
|
|
||||||
const job = createMockJob({ filePath: '/tmp/bad.pdf' });
|
|
||||||
const commandError = new Error('pdftocairo not found');
|
|
||||||
mocks.execAsync.mockRejectedValue(commandError);
|
|
||||||
|
|
||||||
await expect(
|
|
||||||
(
|
|
||||||
service as unknown as {
|
|
||||||
_convertPdfToImages: (filePath: string, job: Job, logger: Logger) => Promise<string[]>;
|
|
||||||
}
|
|
||||||
)._convertPdfToImages('/tmp/bad.pdf', job, logger),
|
|
||||||
).rejects.toThrow(commandError);
|
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -1,43 +1,25 @@
|
|||||||
// src/services/flyerProcessingService.server.ts
|
// src/services/flyerProcessingService.server.ts
|
||||||
import type { Job, JobsOptions } from 'bullmq';
|
import { Job, JobsOptions, UnrecoverableError } from 'bullmq';
|
||||||
import sharp from 'sharp';
|
|
||||||
import path from 'path';
|
|
||||||
import type { Dirent } from 'node:fs';
|
import type { Dirent } from 'node:fs';
|
||||||
import { z } from 'zod';
|
|
||||||
|
|
||||||
import type { AIService } from './aiService.server';
|
import type { AIService } from './aiService.server';
|
||||||
import * as db from './db/index.db';
|
import * as db from './db/index.db';
|
||||||
import { createFlyerAndItems } from './db/flyer.db';
|
import { createFlyerAndItems } from './db/flyer.db';
|
||||||
import {
|
import {
|
||||||
PdfConversionError,
|
|
||||||
AiDataValidationError,
|
AiDataValidationError,
|
||||||
UnsupportedFileTypeError,
|
UnsupportedFileTypeError,
|
||||||
|
FlyerProcessingError,
|
||||||
|
PdfConversionError,
|
||||||
} from './processingErrors';
|
} from './processingErrors';
|
||||||
import { FlyerDataTransformer } from './flyerDataTransformer';
|
import { FlyerDataTransformer } from './flyerDataTransformer';
|
||||||
import { logger as globalLogger } from './logger.server';
|
import { logger as globalLogger } from './logger.server';
|
||||||
import type { Logger } from 'pino';
|
import type { Logger } from 'pino';
|
||||||
|
import type { Flyer, FlyerInsert, FlyerItemInsert } from '../types';
|
||||||
// Helper for consistent required string validation (handles missing/null/empty)
|
import { FlyerFileHandler, ICommandExecutor, IFileSystem } from './flyerFileHandler.server';
|
||||||
const requiredString = (message: string) =>
|
import { FlyerAiProcessor } from './flyerAiProcessor.server';
|
||||||
z.preprocess((val) => val ?? '', z.string().min(1, message));
|
|
||||||
|
|
||||||
// Define the image formats supported by the AI model
|
|
||||||
const SUPPORTED_IMAGE_EXTENSIONS = ['.jpg', '.jpeg', '.png', '.webp', '.heic', '.heif'];
|
|
||||||
|
|
||||||
// Define image formats that are not directly supported but can be converted to PNG.
|
|
||||||
const CONVERTIBLE_IMAGE_EXTENSIONS = ['.gif', '.tiff', '.svg', '.bmp'];
|
|
||||||
|
|
||||||
// --- Start: Interfaces for Dependency Injection ---
|
// --- Start: Interfaces for Dependency Injection ---
|
||||||
|
|
||||||
export interface IFileSystem {
|
|
||||||
readdir(path: string, options: { withFileTypes: true }): Promise<Dirent[]>;
|
|
||||||
unlink(path: string): Promise<void>;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface ICommandExecutor {
|
|
||||||
(command: string): Promise<{ stdout: string; stderr: string }>;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface FlyerJobData {
|
export interface FlyerJobData {
|
||||||
filePath: string;
|
filePath: string;
|
||||||
originalFileName: string;
|
originalFileName: string;
|
||||||
@@ -47,7 +29,7 @@ export interface FlyerJobData {
|
|||||||
userProfileAddress?: string;
|
userProfileAddress?: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
interface CleanupJobData {
|
export interface CleanupJobData {
|
||||||
flyerId: number;
|
flyerId: number;
|
||||||
// An array of absolute file paths to be deleted. Made optional for manual cleanup triggers.
|
// An array of absolute file paths to be deleted. Made optional for manual cleanup triggers.
|
||||||
paths?: string[];
|
paths?: string[];
|
||||||
@@ -61,24 +43,6 @@ interface ICleanupQueue {
|
|||||||
add(name: string, data: CleanupJobData, opts?: JobsOptions): Promise<Job<CleanupJobData>>;
|
add(name: string, data: CleanupJobData, opts?: JobsOptions): Promise<Job<CleanupJobData>>;
|
||||||
}
|
}
|
||||||
|
|
||||||
// --- Zod Schemas for AI Response Validation (exported for the transformer) ---
|
|
||||||
const ExtractedFlyerItemSchema = z.object({
|
|
||||||
item: z.string().nullable(), // AI might return null or empty, normalize later
|
|
||||||
price_display: z.string().nullable(), // AI might return null or empty, normalize later
|
|
||||||
price_in_cents: z.number().nullable(),
|
|
||||||
quantity: z.string().nullable(), // AI might return null or empty, normalize later
|
|
||||||
category_name: z.string().nullable(), // AI might return null or empty, normalize later
|
|
||||||
master_item_id: z.number().nullish(), // .nullish() allows null or undefined
|
|
||||||
});
|
|
||||||
|
|
||||||
export const AiFlyerDataSchema = z.object({
|
|
||||||
store_name: z.string().nullable(), // AI might return null or empty, normalize later
|
|
||||||
valid_from: z.string().nullable(),
|
|
||||||
valid_to: z.string().nullable(),
|
|
||||||
store_address: z.string().nullable(),
|
|
||||||
items: z.array(ExtractedFlyerItemSchema),
|
|
||||||
});
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This class encapsulates the business logic for processing a flyer from a file.
|
* This class encapsulates the business logic for processing a flyer from a file.
|
||||||
* It handles PDF conversion, AI data extraction, and saving the results to the database.
|
* It handles PDF conversion, AI data extraction, and saving the results to the database.
|
||||||
@@ -86,6 +50,8 @@ export const AiFlyerDataSchema = z.object({
|
|||||||
export class FlyerProcessingService {
|
export class FlyerProcessingService {
|
||||||
constructor(
|
constructor(
|
||||||
private ai: AIService,
|
private ai: AIService,
|
||||||
|
private fileHandler: FlyerFileHandler,
|
||||||
|
private aiProcessor: FlyerAiProcessor,
|
||||||
private database: typeof db,
|
private database: typeof db,
|
||||||
private fs: IFileSystem,
|
private fs: IFileSystem,
|
||||||
private exec: ICommandExecutor,
|
private exec: ICommandExecutor,
|
||||||
@@ -93,156 +59,6 @@ export class FlyerProcessingService {
|
|||||||
private transformer: FlyerDataTransformer,
|
private transformer: FlyerDataTransformer,
|
||||||
) {}
|
) {}
|
||||||
|
|
||||||
/**
|
|
||||||
* Converts a PDF file to a series of JPEG images using an external tool.
|
|
||||||
* @param filePath The path to the PDF file.
|
|
||||||
* @param job The BullMQ job instance for progress updates.
|
|
||||||
* @returns A promise that resolves to an array of paths to the created image files.
|
|
||||||
*/
|
|
||||||
private async _convertPdfToImages(
|
|
||||||
filePath: string,
|
|
||||||
job: Job<FlyerJobData>,
|
|
||||||
logger: Logger,
|
|
||||||
): Promise<string[]> {
|
|
||||||
logger.info(`Starting PDF conversion for: ${filePath}`);
|
|
||||||
await job.updateProgress({ message: 'Converting PDF to images...' });
|
|
||||||
|
|
||||||
const outputDir = path.dirname(filePath);
|
|
||||||
const outputFilePrefix = path.join(outputDir, path.basename(filePath, '.pdf'));
|
|
||||||
logger.debug({ outputDir, outputFilePrefix }, `PDF output details`);
|
|
||||||
|
|
||||||
const command = `pdftocairo -jpeg -r 150 "${filePath}" "${outputFilePrefix}"`;
|
|
||||||
logger.info(`Executing PDF conversion command`);
|
|
||||||
logger.debug({ command });
|
|
||||||
const { stdout, stderr } = await this.exec(command);
|
|
||||||
|
|
||||||
if (stdout) logger.debug({ stdout }, `[Worker] pdftocairo stdout for ${filePath}:`);
|
|
||||||
if (stderr) logger.warn({ stderr }, `[Worker] pdftocairo stderr for ${filePath}:`);
|
|
||||||
|
|
||||||
logger.debug(`[Worker] Reading contents of output directory: ${outputDir}`);
|
|
||||||
const filesInDir = await this.fs.readdir(outputDir, { withFileTypes: true });
|
|
||||||
logger.debug(`[Worker] Found ${filesInDir.length} total entries in output directory.`);
|
|
||||||
|
|
||||||
const generatedImages = filesInDir
|
|
||||||
.filter((f) => f.name.startsWith(path.basename(outputFilePrefix)) && f.name.endsWith('.jpg'))
|
|
||||||
.sort((a, b) => a.name.localeCompare(b.name, undefined, { numeric: true }));
|
|
||||||
|
|
||||||
logger.debug(
|
|
||||||
{ imageNames: generatedImages.map((f) => f.name) },
|
|
||||||
`Filtered down to ${generatedImages.length} generated JPGs.`,
|
|
||||||
);
|
|
||||||
|
|
||||||
if (generatedImages.length === 0) {
|
|
||||||
const errorMessage = `PDF conversion resulted in 0 images for file: ${filePath}. The PDF might be blank or corrupt.`;
|
|
||||||
logger.error({ stderr }, `PdfConversionError: ${errorMessage}`);
|
|
||||||
throw new PdfConversionError(errorMessage, stderr);
|
|
||||||
}
|
|
||||||
|
|
||||||
return generatedImages.map((img) => path.join(outputDir, img.name));
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Converts an image file (e.g., GIF, TIFF) to a PNG format that the AI can process.
|
|
||||||
* @param filePath The path to the source image file.
|
|
||||||
* @param logger A logger instance.
|
|
||||||
* @returns The path to the newly created PNG file.
|
|
||||||
*/
|
|
||||||
private async _convertImageToPng(filePath: string, logger: Logger): Promise<string> {
|
|
||||||
const outputDir = path.dirname(filePath);
|
|
||||||
const originalFileName = path.parse(path.basename(filePath)).name;
|
|
||||||
const newFileName = `${originalFileName}-converted.png`;
|
|
||||||
const outputPath = path.join(outputDir, newFileName);
|
|
||||||
|
|
||||||
logger.info({ from: filePath, to: outputPath }, 'Converting unsupported image format to PNG.');
|
|
||||||
|
|
||||||
try {
|
|
||||||
await sharp(filePath).png().toFile(outputPath);
|
|
||||||
return outputPath;
|
|
||||||
} catch (error) {
|
|
||||||
logger.error({ err: error, filePath }, 'Failed to convert image to PNG using sharp.');
|
|
||||||
throw new Error(`Image conversion to PNG failed for ${path.basename(filePath)}.`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Prepares the input images for the AI service. If the input is a PDF, it's converted to images.
|
|
||||||
* @param filePath The path to the original uploaded file.
|
|
||||||
* @param job The BullMQ job instance.
|
|
||||||
* @returns An object containing the final image paths for the AI and a list of any newly created image files.
|
|
||||||
*/
|
|
||||||
private async _prepareImageInputs(
|
|
||||||
filePath: string,
|
|
||||||
job: Job<FlyerJobData>,
|
|
||||||
logger: Logger,
|
|
||||||
): Promise<{ imagePaths: { path: string; mimetype: string }[]; createdImagePaths: string[] }> {
|
|
||||||
const fileExt = path.extname(filePath).toLowerCase();
|
|
||||||
|
|
||||||
// Handle PDF conversion separately
|
|
||||||
if (fileExt === '.pdf') {
|
|
||||||
const createdImagePaths = await this._convertPdfToImages(filePath, job, logger);
|
|
||||||
const imagePaths = createdImagePaths.map((p) => ({ path: p, mimetype: 'image/jpeg' }));
|
|
||||||
logger.info(`Converted PDF to ${imagePaths.length} images.`);
|
|
||||||
return { imagePaths, createdImagePaths };
|
|
||||||
// Handle directly supported single-image formats
|
|
||||||
} else if (SUPPORTED_IMAGE_EXTENSIONS.includes(fileExt)) {
|
|
||||||
logger.info(`Processing as a single image file: ${filePath}`);
|
|
||||||
// Normalize .jpg to image/jpeg for consistency
|
|
||||||
const mimetype =
|
|
||||||
fileExt === '.jpg' || fileExt === '.jpeg' ? 'image/jpeg' : `image/${fileExt.slice(1)}`;
|
|
||||||
const imagePaths = [{ path: filePath, mimetype }];
|
|
||||||
return { imagePaths, createdImagePaths: [] };
|
|
||||||
// Handle convertible image formats
|
|
||||||
} else if (CONVERTIBLE_IMAGE_EXTENSIONS.includes(fileExt)) {
|
|
||||||
const createdPngPath = await this._convertImageToPng(filePath, logger);
|
|
||||||
const imagePaths = [{ path: createdPngPath, mimetype: 'image/png' }];
|
|
||||||
// The new PNG is a temporary file that needs to be cleaned up.
|
|
||||||
return { imagePaths, createdImagePaths: [createdPngPath] };
|
|
||||||
} else {
|
|
||||||
// If the file is neither a PDF nor a supported image, throw an error.
|
|
||||||
const errorMessage = `Unsupported file type: ${fileExt}. Supported types are PDF, JPG, PNG, WEBP, HEIC, HEIF, GIF, TIFF, SVG, BMP.`;
|
|
||||||
logger.error({ originalFileName: job.data.originalFileName, fileExt }, errorMessage);
|
|
||||||
throw new UnsupportedFileTypeError(errorMessage);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Calls the AI service to extract structured data from the flyer images.
|
|
||||||
* @param imagePaths An array of paths and mimetypes for the images.
|
|
||||||
* @param jobData The data from the BullMQ job.
|
|
||||||
* @returns A promise that resolves to the validated, structured flyer data.
|
|
||||||
*/
|
|
||||||
private async _extractFlyerDataWithAI(
|
|
||||||
imagePaths: { path: string; mimetype: string }[],
|
|
||||||
jobData: FlyerJobData,
|
|
||||||
logger: Logger,
|
|
||||||
): Promise<z.infer<typeof AiFlyerDataSchema>> {
|
|
||||||
logger.info(`Starting AI data extraction.`);
|
|
||||||
const { submitterIp, userProfileAddress } = jobData;
|
|
||||||
const masterItems = await this.database.personalizationRepo.getAllMasterItems(logger);
|
|
||||||
logger.debug(`Retrieved ${masterItems.length} master items for AI matching.`);
|
|
||||||
|
|
||||||
const extractedData = await this.ai.extractCoreDataFromFlyerImage(
|
|
||||||
imagePaths,
|
|
||||||
masterItems,
|
|
||||||
submitterIp, // Pass the job-specific logger
|
|
||||||
userProfileAddress, // Pass the job-specific logger
|
|
||||||
logger,
|
|
||||||
);
|
|
||||||
|
|
||||||
const validationResult = AiFlyerDataSchema.safeParse(extractedData);
|
|
||||||
if (!validationResult.success) {
|
|
||||||
const errors = validationResult.error.flatten();
|
|
||||||
logger.error({ errors, rawData: extractedData }, 'AI response failed validation.');
|
|
||||||
throw new AiDataValidationError(
|
|
||||||
'AI response validation failed. The returned data structure is incorrect.',
|
|
||||||
errors,
|
|
||||||
extractedData,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.info(`AI extracted ${validationResult.data.items.length} items.`);
|
|
||||||
return validationResult.data;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Saves the extracted flyer data to the database.
|
* Saves the extracted flyer data to the database.
|
||||||
* @param extractedData The structured data from the AI.
|
* @param extractedData The structured data from the AI.
|
||||||
@@ -251,47 +67,44 @@ export class FlyerProcessingService {
|
|||||||
* @returns A promise that resolves to the newly created flyer record.
|
* @returns A promise that resolves to the newly created flyer record.
|
||||||
*/
|
*/
|
||||||
private async _saveProcessedFlyerData(
|
private async _saveProcessedFlyerData(
|
||||||
extractedData: z.infer<typeof AiFlyerDataSchema>,
|
flyerData: FlyerInsert,
|
||||||
imagePaths: { path: string; mimetype: string }[],
|
itemsForDb: FlyerItemInsert[],
|
||||||
jobData: FlyerJobData,
|
userId: string | undefined,
|
||||||
logger: Logger,
|
logger: Logger,
|
||||||
) {
|
) {
|
||||||
logger.info(`Preparing to save extracted data to database.`);
|
logger.info(`Preparing to save extracted data to database.`);
|
||||||
|
|
||||||
// Ensure store_name is a non-empty string before passing to the transformer.
|
// 1. Save the transformed data to the database.
|
||||||
// This makes the handling of the nullable store_name explicit in this service.
|
|
||||||
const dataForTransformer = { ...extractedData };
|
|
||||||
if (!dataForTransformer.store_name) {
|
|
||||||
logger.warn('AI did not return a store name. Using fallback "Unknown Store (auto)".');
|
|
||||||
dataForTransformer.store_name = 'Unknown Store (auto)';
|
|
||||||
}
|
|
||||||
|
|
||||||
// 1. Transform the AI data into database-ready records.
|
|
||||||
const { flyerData, itemsForDb } = await this.transformer.transform(
|
|
||||||
dataForTransformer,
|
|
||||||
imagePaths,
|
|
||||||
jobData.originalFileName,
|
|
||||||
jobData.checksum,
|
|
||||||
jobData.userId,
|
|
||||||
// Pass the job-specific logger to the transformer
|
|
||||||
logger,
|
|
||||||
);
|
|
||||||
|
|
||||||
// 2. Save the transformed data to the database.
|
|
||||||
const { flyer: newFlyer } = await createFlyerAndItems(flyerData, itemsForDb, logger);
|
const { flyer: newFlyer } = await createFlyerAndItems(flyerData, itemsForDb, logger);
|
||||||
logger.info({ newFlyerId: newFlyer.flyer_id }, `Successfully saved new flyer.`);
|
logger.info({ newFlyerId: newFlyer.flyer_id }, `Successfully saved new flyer.`);
|
||||||
|
|
||||||
|
// 2. Log the activity.
|
||||||
|
await this._logFlyerProcessedActivity(newFlyer, userId, logger);
|
||||||
|
|
||||||
|
return newFlyer;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Logs the successful processing of a flyer to the admin activity log.
|
||||||
|
* @param newFlyer The newly created flyer record from the database.
|
||||||
|
* @param userId The ID of the user who uploaded the flyer, if available.
|
||||||
|
* @param logger The job-specific logger instance.
|
||||||
|
*/
|
||||||
|
private async _logFlyerProcessedActivity(
|
||||||
|
newFlyer: Flyer,
|
||||||
|
userId: string | undefined,
|
||||||
|
logger: Logger,
|
||||||
|
) {
|
||||||
|
const storeName = newFlyer.store?.name || 'Unknown Store';
|
||||||
await this.database.adminRepo.logActivity(
|
await this.database.adminRepo.logActivity(
|
||||||
{
|
{
|
||||||
userId: jobData.userId,
|
userId: userId,
|
||||||
action: 'flyer_processed',
|
action: 'flyer_processed',
|
||||||
displayText: `Processed a new flyer for ${flyerData.store_name}.`,
|
displayText: `Processed a new flyer for ${storeName}.`,
|
||||||
details: { flyerId: newFlyer.flyer_id, storeName: flyerData.store_name },
|
details: { flyerId: newFlyer.flyer_id, storeName },
|
||||||
},
|
},
|
||||||
logger,
|
logger,
|
||||||
);
|
);
|
||||||
|
|
||||||
return newFlyer;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -313,10 +126,127 @@ export class FlyerProcessingService {
|
|||||||
logger.info({ flyerId }, `Enqueued cleanup job.`);
|
logger.info({ flyerId }, `Enqueued cleanup job.`);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Centralized error handler for the `processJob` method. It logs the error,
|
||||||
|
* updates the job's progress with a user-friendly message, and re-throws the
|
||||||
|
* error for the worker to handle retries or final failure. It also identifies
|
||||||
|
* unrecoverable errors to prevent unnecessary retries.
|
||||||
|
* @param error The error caught during processing.
|
||||||
|
* @param job The BullMQ job instance.
|
||||||
|
* @param logger The job-specific logger.
|
||||||
|
*/
|
||||||
|
private async _reportErrorAndThrow(
|
||||||
|
error: unknown,
|
||||||
|
job: Job<FlyerJobData>,
|
||||||
|
logger: Logger,
|
||||||
|
): Promise<never> {
|
||||||
|
const wrappedError = error instanceof Error ? error : new Error(String(error));
|
||||||
|
const errorMessage = wrappedError.message || 'An unknown error occurred.';
|
||||||
|
|
||||||
|
// First, check for unrecoverable quota-related errors.
|
||||||
|
if (
|
||||||
|
errorMessage.includes('quota') ||
|
||||||
|
errorMessage.includes('429') ||
|
||||||
|
errorMessage.toLowerCase().includes('resource_exhausted')
|
||||||
|
) {
|
||||||
|
logger.error(
|
||||||
|
{ err: wrappedError, jobId: job.id },
|
||||||
|
'[FlyerProcessingService] Unrecoverable quota error detected. Failing job immediately.',
|
||||||
|
);
|
||||||
|
await job.updateProgress({
|
||||||
|
errorCode: 'QUOTA_EXCEEDED',
|
||||||
|
message: 'An AI quota has been exceeded. Please try again later.',
|
||||||
|
});
|
||||||
|
// This specific error type tells the BullMQ worker to fail the job without retries.
|
||||||
|
throw new UnrecoverableError(errorMessage);
|
||||||
|
}
|
||||||
|
|
||||||
|
let errorPayload: { errorCode: string; message: string; [key: string]: any };
|
||||||
|
|
||||||
|
// Handle our custom, structured processing errors.
|
||||||
|
if (wrappedError instanceof FlyerProcessingError) {
|
||||||
|
// Use the properties from the custom error itself.
|
||||||
|
errorPayload = wrappedError.toErrorPayload();
|
||||||
|
// Log with specific details based on the error type
|
||||||
|
if (wrappedError instanceof AiDataValidationError) {
|
||||||
|
logger.error(
|
||||||
|
{ err: wrappedError, validationErrors: wrappedError.validationErrors, rawData: wrappedError.rawData },
|
||||||
|
`AI Data Validation failed.`,
|
||||||
|
);
|
||||||
|
} else if (wrappedError instanceof PdfConversionError) {
|
||||||
|
logger.error({ err: wrappedError, stderr: wrappedError.stderr }, `PDF Conversion failed.`);
|
||||||
|
} else {
|
||||||
|
// Generic log for other FlyerProcessingErrors like UnsupportedFileTypeError
|
||||||
|
logger.error({ err: wrappedError }, `${wrappedError.name} occurred during processing.`);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Handle generic/unknown errors.
|
||||||
|
logger.error(
|
||||||
|
{ err: wrappedError, attemptsMade: job.attemptsMade, totalAttempts: job.opts.attempts },
|
||||||
|
`A generic error occurred in job.`,
|
||||||
|
);
|
||||||
|
errorPayload = {
|
||||||
|
errorCode: 'UNKNOWN_ERROR',
|
||||||
|
message: errorMessage,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
await job.updateProgress(errorPayload);
|
||||||
|
throw wrappedError;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Orchestrates the series of steps involved in processing a flyer.
|
||||||
|
* This "happy path" method is called by the main `processJob` method.
|
||||||
|
* @param job The BullMQ job instance.
|
||||||
|
* @param logger The job-specific logger.
|
||||||
|
* @returns A promise that resolves with the new flyer's ID.
|
||||||
|
*/
|
||||||
|
private async _runProcessingSteps(
|
||||||
|
job: Job<FlyerJobData>,
|
||||||
|
logger: Logger,
|
||||||
|
): Promise<{ flyerId: number }> {
|
||||||
|
const { filePath } = job.data;
|
||||||
|
|
||||||
|
// Step 1: Prepare image inputs (convert PDF, etc.)
|
||||||
|
await job.updateProgress({ message: 'Starting process...' });
|
||||||
|
const { imagePaths, createdImagePaths } = await this.fileHandler.prepareImageInputs(
|
||||||
|
filePath,
|
||||||
|
job,
|
||||||
|
logger,
|
||||||
|
);
|
||||||
|
|
||||||
|
await job.updateProgress({ message: 'Extracting data...' });
|
||||||
|
const extractedData = await this.aiProcessor.extractAndValidateData(imagePaths, job.data, logger);
|
||||||
|
|
||||||
|
await job.updateProgress({ message: 'Transforming data...' });
|
||||||
|
const { flyerData, itemsForDb } = await this.transformer.transform(
|
||||||
|
extractedData,
|
||||||
|
imagePaths,
|
||||||
|
job.data.originalFileName,
|
||||||
|
job.data.checksum,
|
||||||
|
job.data.userId,
|
||||||
|
logger,
|
||||||
|
);
|
||||||
|
|
||||||
|
await job.updateProgress({ message: 'Saving to database...' });
|
||||||
|
const newFlyer = await this._saveProcessedFlyerData(
|
||||||
|
flyerData,
|
||||||
|
itemsForDb,
|
||||||
|
job.data.userId,
|
||||||
|
logger,
|
||||||
|
);
|
||||||
|
logger.info({ flyerId: newFlyer.flyer_id }, `Job processed successfully.`);
|
||||||
|
|
||||||
|
// Step 3: On success, enqueue a cleanup job for all temporary files.
|
||||||
|
const pathsToClean = [filePath, ...createdImagePaths];
|
||||||
|
await this._enqueueCleanup(newFlyer.flyer_id, pathsToClean, logger);
|
||||||
|
|
||||||
|
return { flyerId: newFlyer.flyer_id };
|
||||||
|
}
|
||||||
|
|
||||||
async processJob(job: Job<FlyerJobData>) {
|
async processJob(job: Job<FlyerJobData>) {
|
||||||
const { filePath, originalFileName } = job.data;
|
const { originalFileName } = job.data;
|
||||||
const createdImagePaths: string[] = [];
|
|
||||||
let newFlyerId: number | undefined;
|
|
||||||
|
|
||||||
// Create a job-specific logger instance with context, as per ADR-004
|
// Create a job-specific logger instance with context, as per ADR-004
|
||||||
const logger = globalLogger.child({
|
const logger = globalLogger.child({
|
||||||
@@ -330,80 +260,74 @@ export class FlyerProcessingService {
|
|||||||
logger.info(`Picked up job.`);
|
logger.info(`Picked up job.`);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
await job.updateProgress({ message: 'Starting process...' });
|
return await this._runProcessingSteps(job, logger);
|
||||||
const { imagePaths, createdImagePaths: tempImagePaths } = await this._prepareImageInputs(
|
|
||||||
filePath,
|
|
||||||
job,
|
|
||||||
logger,
|
|
||||||
);
|
|
||||||
createdImagePaths.push(...tempImagePaths);
|
|
||||||
|
|
||||||
await job.updateProgress({ message: 'Extracting data...' });
|
|
||||||
const extractedData = await this._extractFlyerDataWithAI(imagePaths, job.data, logger);
|
|
||||||
|
|
||||||
await job.updateProgress({ message: 'Saving to database...' });
|
|
||||||
const newFlyer = await this._saveProcessedFlyerData(
|
|
||||||
extractedData,
|
|
||||||
imagePaths,
|
|
||||||
job.data,
|
|
||||||
logger,
|
|
||||||
); // Pass logger
|
|
||||||
|
|
||||||
newFlyerId = newFlyer.flyer_id;
|
|
||||||
logger.info({ flyerId: newFlyerId }, `Job processed successfully.`);
|
|
||||||
return { flyerId: newFlyer.flyer_id };
|
|
||||||
} catch (error: unknown) {
|
} catch (error: unknown) {
|
||||||
// Define a structured error payload for job progress updates.
|
// On failure, explicitly log that we are not cleaning up files to allow for manual inspection.
|
||||||
// This allows the frontend to provide more specific feedback.
|
logger.warn(
|
||||||
let errorPayload = {
|
`Job failed. Temporary files will NOT be cleaned up to allow for manual inspection.`,
|
||||||
errorCode: 'UNKNOWN_ERROR',
|
);
|
||||||
message: 'An unexpected error occurred during processing.',
|
// Delegate all error handling to a separate, testable method.
|
||||||
};
|
await this._reportErrorAndThrow(error, job, logger);
|
||||||
|
|
||||||
if (error instanceof UnsupportedFileTypeError) {
|
|
||||||
logger.error({ err: error }, `Unsupported file type error.`);
|
|
||||||
errorPayload = {
|
|
||||||
errorCode: 'UNSUPPORTED_FILE_TYPE',
|
|
||||||
message: error.message, // The message is already user-friendly
|
|
||||||
};
|
|
||||||
} else if (error instanceof PdfConversionError) {
|
|
||||||
logger.error({ err: error, stderr: error.stderr }, `PDF Conversion failed.`);
|
|
||||||
errorPayload = {
|
|
||||||
errorCode: 'PDF_CONVERSION_FAILED',
|
|
||||||
message:
|
|
||||||
'The uploaded PDF could not be processed. It might be blank, corrupt, or password-protected.',
|
|
||||||
};
|
|
||||||
} else if (error instanceof AiDataValidationError) {
|
|
||||||
logger.error(
|
|
||||||
{ err: error, validationErrors: error.validationErrors, rawData: error.rawData },
|
|
||||||
`AI Data Validation failed.`,
|
|
||||||
);
|
|
||||||
errorPayload = {
|
|
||||||
errorCode: 'AI_VALIDATION_FAILED',
|
|
||||||
message:
|
|
||||||
"The AI couldn't read the flyer's format. Please try a clearer image or a different flyer.",
|
|
||||||
};
|
|
||||||
} else if (error instanceof Error) {
|
|
||||||
logger.error(
|
|
||||||
{ err: error, attemptsMade: job.attemptsMade, totalAttempts: job.opts.attempts },
|
|
||||||
`A generic error occurred in job.`,
|
|
||||||
);
|
|
||||||
// For generic errors, we can pass the message along, but still use a code.
|
|
||||||
errorPayload.message = error.message;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Update the job's progress with the structured error payload.
|
|
||||||
await job.updateProgress(errorPayload);
|
|
||||||
throw error;
|
|
||||||
} finally {
|
|
||||||
if (newFlyerId) {
|
|
||||||
const pathsToClean = [filePath, ...createdImagePaths];
|
|
||||||
await this._enqueueCleanup(newFlyerId, pathsToClean, logger);
|
|
||||||
} else {
|
|
||||||
logger.warn(
|
|
||||||
`Job failed. Temporary files will NOT be cleaned up to allow for manual inspection.`,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async processCleanupJob(job: Job<CleanupJobData>) {
|
||||||
|
const { flyerId, paths } = job.data;
|
||||||
|
const logger = globalLogger.child({
|
||||||
|
jobId: job.id,
|
||||||
|
jobName: job.name,
|
||||||
|
flyerId,
|
||||||
|
});
|
||||||
|
|
||||||
|
logger.info({ paths }, `Picked up file cleanup job.`);
|
||||||
|
|
||||||
|
if (!paths?.length) {
|
||||||
|
logger.warn(`Job received no paths to clean. Skipping.`);
|
||||||
|
return { status: 'skipped', reason: 'no paths' };
|
||||||
|
}
|
||||||
|
|
||||||
|
// Use Promise.allSettled to attempt deleting all files and collect results.
|
||||||
|
// This is more robust than a for-loop as it attempts to delete all files
|
||||||
|
// even if one of them fails, and then reports on the collective result.
|
||||||
|
const deletionPromises = paths.map((path) => this.fs.unlink(path));
|
||||||
|
const results = await Promise.allSettled(deletionPromises);
|
||||||
|
|
||||||
|
// Process results using reduce for a more functional approach, avoiding mutable variables.
|
||||||
|
const { deletedCount, failedDeletions } = results.reduce(
|
||||||
|
(acc, result, index) => {
|
||||||
|
const filePath = paths[index];
|
||||||
|
if (result.status === 'fulfilled') {
|
||||||
|
logger.info(`Deleted temporary file: ${filePath}`);
|
||||||
|
acc.deletedCount++;
|
||||||
|
} else {
|
||||||
|
const unlinkError = result.reason;
|
||||||
|
if (
|
||||||
|
unlinkError instanceof Error &&
|
||||||
|
'code' in unlinkError &&
|
||||||
|
(unlinkError as NodeJS.ErrnoException).code === 'ENOENT'
|
||||||
|
) {
|
||||||
|
logger.warn(`File not found during cleanup (already deleted?): ${filePath}`);
|
||||||
|
acc.deletedCount++; // Still counts as a success for the job's purpose.
|
||||||
|
} else {
|
||||||
|
logger.error({ err: unlinkError, path: filePath }, 'Failed to delete temporary file.');
|
||||||
|
acc.failedDeletions.push({ path: filePath, reason: unlinkError });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return acc;
|
||||||
|
},
|
||||||
|
{ deletedCount: 0, failedDeletions: [] as { path: string; reason: unknown }[] },
|
||||||
|
);
|
||||||
|
|
||||||
|
// If any deletions failed for reasons other than 'file not found', fail the job.
|
||||||
|
if (failedDeletions.length > 0) {
|
||||||
|
const failedPaths = failedDeletions.map(({ path }) => path).join(', ');
|
||||||
|
const errorMessage = `Failed to delete ${failedDeletions.length} file(s): ${failedPaths}`;
|
||||||
|
// Throw an error to make the job fail and be retried by BullMQ.
|
||||||
|
// The individual errors have already been logged.
|
||||||
|
throw new Error(errorMessage);
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(`Successfully cleaned up ${deletedCount} file(s).`);
|
||||||
|
return { status: 'success', deletedCount };
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -3,13 +3,23 @@
|
|||||||
/**
|
/**
|
||||||
* Base class for all flyer processing errors.
|
* Base class for all flyer processing errors.
|
||||||
* This allows for catching all processing-related errors with a single `catch` block.
|
* This allows for catching all processing-related errors with a single `catch` block.
|
||||||
|
* Each custom error should define its own `errorCode` and a user-friendly `message`.
|
||||||
*/
|
*/
|
||||||
export class FlyerProcessingError extends Error {
|
export class FlyerProcessingError extends Error {
|
||||||
constructor(message: string) {
|
public errorCode: string;
|
||||||
super(message);
|
public userMessage: string;
|
||||||
|
|
||||||
|
constructor(message: string, errorCode: string = 'UNKNOWN_ERROR', userMessage?: string) {
|
||||||
|
super(message); // The 'message' property of Error is for internal/developer use.
|
||||||
this.name = this.constructor.name;
|
this.name = this.constructor.name;
|
||||||
|
this.errorCode = errorCode;
|
||||||
|
this.userMessage = userMessage || message; // User-friendly message for UI
|
||||||
Object.setPrototypeOf(this, new.target.prototype);
|
Object.setPrototypeOf(this, new.target.prototype);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
toErrorPayload(): { errorCode: string; message: string; [key: string]: any } {
|
||||||
|
return { errorCode: this.errorCode, message: this.userMessage };
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -18,9 +28,17 @@ export class FlyerProcessingError extends Error {
|
|||||||
export class PdfConversionError extends FlyerProcessingError {
|
export class PdfConversionError extends FlyerProcessingError {
|
||||||
public stderr?: string;
|
public stderr?: string;
|
||||||
constructor(message: string, stderr?: string) {
|
constructor(message: string, stderr?: string) {
|
||||||
super(message);
|
super(
|
||||||
|
message,
|
||||||
|
'PDF_CONVERSION_FAILED',
|
||||||
|
'The uploaded PDF could not be processed. It might be blank, corrupt, or password-protected.',
|
||||||
|
);
|
||||||
this.stderr = stderr;
|
this.stderr = stderr;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
toErrorPayload(): { errorCode: string; message: string; [key: string]: any } {
|
||||||
|
return { ...super.toErrorPayload(), stderr: this.stderr };
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -32,7 +50,15 @@ export class AiDataValidationError extends FlyerProcessingError {
|
|||||||
public validationErrors: object,
|
public validationErrors: object,
|
||||||
public rawData: unknown,
|
public rawData: unknown,
|
||||||
) {
|
) {
|
||||||
super(message);
|
super(
|
||||||
|
message,
|
||||||
|
'AI_VALIDATION_FAILED',
|
||||||
|
"The AI couldn't read the flyer's format. Please try a clearer image or a different flyer.",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
toErrorPayload(): { errorCode: string; message: string; [key: string]: any } {
|
||||||
|
return { ...super.toErrorPayload(), validationErrors: this.validationErrors, rawData: this.rawData };
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -41,7 +67,7 @@ export class AiDataValidationError extends FlyerProcessingError {
|
|||||||
*/
|
*/
|
||||||
export class GeocodingFailedError extends FlyerProcessingError {
|
export class GeocodingFailedError extends FlyerProcessingError {
|
||||||
constructor(message: string) {
|
constructor(message: string) {
|
||||||
super(message);
|
super(message, 'GEOCODING_FAILED', 'Failed to geocode the address.');
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -50,6 +76,6 @@ export class GeocodingFailedError extends FlyerProcessingError {
|
|||||||
*/
|
*/
|
||||||
export class UnsupportedFileTypeError extends FlyerProcessingError {
|
export class UnsupportedFileTypeError extends FlyerProcessingError {
|
||||||
constructor(message: string) {
|
constructor(message: string) {
|
||||||
super(message);
|
super(message, 'UNSUPPORTED_FILE_TYPE', message); // The message is already user-friendly.
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -8,12 +8,17 @@ const mocks = vi.hoisted(() => {
|
|||||||
const capturedProcessors: Record<string, (job: Job) => Promise<unknown>> = {};
|
const capturedProcessors: Record<string, (job: Job) => Promise<unknown>> = {};
|
||||||
|
|
||||||
return {
|
return {
|
||||||
sendEmail: vi.fn(),
|
// Service method mocks
|
||||||
unlink: vi.fn(),
|
|
||||||
processFlyerJob: vi.fn(),
|
processFlyerJob: vi.fn(),
|
||||||
|
processCleanupJob: vi.fn(),
|
||||||
|
processEmailJob: vi.fn(),
|
||||||
|
processDailyReportJob: vi.fn(),
|
||||||
|
processWeeklyReportJob: vi.fn(),
|
||||||
|
processTokenCleanupJob: vi.fn(),
|
||||||
|
|
||||||
|
// Test utilities
|
||||||
capturedProcessors,
|
capturedProcessors,
|
||||||
deleteExpiredResetTokens: vi.fn(),
|
// Mock the Worker constructor to capture the processor function. It must be a`
|
||||||
// Mock the Worker constructor to capture the processor function. It must be a
|
|
||||||
// `function` and not an arrow function so it can be called with `new`.
|
// `function` and not an arrow function so it can be called with `new`.
|
||||||
MockWorker: vi.fn(function (name: string, processor: (job: Job) => Promise<unknown>) {
|
MockWorker: vi.fn(function (name: string, processor: (job: Job) => Promise<unknown>) {
|
||||||
if (processor) {
|
if (processor) {
|
||||||
@@ -26,23 +31,20 @@ const mocks = vi.hoisted(() => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
// --- Mock Modules ---
|
// --- Mock Modules ---
|
||||||
vi.mock('./emailService.server', async (importOriginal) => {
|
vi.mock('./emailService.server', () => ({
|
||||||
const actual = await importOriginal<typeof import('./emailService.server')>();
|
processEmailJob: mocks.processEmailJob,
|
||||||
return {
|
}));
|
||||||
...actual,
|
|
||||||
// We only need to mock the specific function being called by the worker.
|
|
||||||
// The rest of the module can retain its original implementation if needed elsewhere.
|
|
||||||
sendEmail: mocks.sendEmail,
|
|
||||||
};
|
|
||||||
});
|
|
||||||
|
|
||||||
// The workers use an `fsAdapter`. We can mock the underlying `fsPromises`
|
vi.mock('./analyticsService.server', () => ({
|
||||||
// that the adapter is built from in queueService.server.ts.
|
analyticsService: {
|
||||||
vi.mock('node:fs/promises', () => ({
|
processDailyReportJob: mocks.processDailyReportJob,
|
||||||
default: {
|
processWeeklyReportJob: mocks.processWeeklyReportJob,
|
||||||
unlink: mocks.unlink,
|
},
|
||||||
// Add other fs functions if needed by other tests
|
}));
|
||||||
readdir: vi.fn(),
|
|
||||||
|
vi.mock('./userService', () => ({
|
||||||
|
userService: {
|
||||||
|
processTokenCleanupJob: mocks.processTokenCleanupJob,
|
||||||
},
|
},
|
||||||
}));
|
}));
|
||||||
|
|
||||||
@@ -56,28 +58,29 @@ vi.mock('./logger.server', () => ({
|
|||||||
},
|
},
|
||||||
}));
|
}));
|
||||||
|
|
||||||
vi.mock('./db/index.db', () => ({
|
|
||||||
userRepo: {
|
|
||||||
deleteExpiredResetTokens: mocks.deleteExpiredResetTokens,
|
|
||||||
},
|
|
||||||
}));
|
|
||||||
|
|
||||||
// Mock bullmq to capture the processor functions passed to the Worker constructor
|
// Mock bullmq to capture the processor functions passed to the Worker constructor
|
||||||
import { logger as mockLogger } from './logger.server';
|
import { logger as mockLogger } from './logger.server';
|
||||||
vi.mock('bullmq', () => ({
|
vi.mock('bullmq', () => ({
|
||||||
Worker: mocks.MockWorker,
|
Worker: mocks.MockWorker,
|
||||||
// FIX: Use a standard function for the mock constructor to allow `new Queue(...)` to work.
|
|
||||||
Queue: vi.fn(function () {
|
Queue: vi.fn(function () {
|
||||||
return { add: vi.fn() };
|
return { add: vi.fn() };
|
||||||
}),
|
}),
|
||||||
|
// Add UnrecoverableError to the mock so it can be used in tests
|
||||||
|
UnrecoverableError: class UnrecoverableError extends Error {},
|
||||||
}));
|
}));
|
||||||
|
|
||||||
// Mock flyerProcessingService.server as flyerWorker depends on it
|
// Mock flyerProcessingService.server as flyerWorker and cleanupWorker depend on it
|
||||||
vi.mock('./flyerProcessingService.server', () => ({
|
vi.mock('./flyerProcessingService.server', () => {
|
||||||
FlyerProcessingService: class {
|
// Mock the constructor to return an object with the mocked methods
|
||||||
processJob = mocks.processFlyerJob;
|
return {
|
||||||
},
|
FlyerProcessingService: vi.fn().mockImplementation(function () {
|
||||||
}));
|
return {
|
||||||
|
processJob: mocks.processFlyerJob,
|
||||||
|
processCleanupJob: mocks.processCleanupJob,
|
||||||
|
};
|
||||||
|
}),
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
// Mock flyerDataTransformer as it's a dependency of FlyerProcessingService
|
// Mock flyerDataTransformer as it's a dependency of FlyerProcessingService
|
||||||
vi.mock('./flyerDataTransformer', () => ({
|
vi.mock('./flyerDataTransformer', () => ({
|
||||||
@@ -110,15 +113,16 @@ describe('Queue Workers', () => {
|
|||||||
let tokenCleanupProcessor: (job: Job) => Promise<unknown>;
|
let tokenCleanupProcessor: (job: Job) => Promise<unknown>;
|
||||||
|
|
||||||
beforeEach(async () => {
|
beforeEach(async () => {
|
||||||
|
// Reset default mock implementations for hoisted mocks
|
||||||
|
mocks.processFlyerJob.mockResolvedValue({ flyerId: 123 });
|
||||||
|
mocks.processCleanupJob.mockResolvedValue({ status: 'success' });
|
||||||
|
mocks.processEmailJob.mockResolvedValue(undefined);
|
||||||
|
mocks.processDailyReportJob.mockResolvedValue({ status: 'success' });
|
||||||
|
mocks.processWeeklyReportJob.mockResolvedValue({ status: 'success' });
|
||||||
|
mocks.processTokenCleanupJob.mockResolvedValue({ deletedCount: 5 });
|
||||||
|
|
||||||
vi.clearAllMocks();
|
vi.clearAllMocks();
|
||||||
vi.resetModules();
|
vi.resetModules();
|
||||||
|
|
||||||
// Reset default mock implementations for hoisted mocks
|
|
||||||
mocks.sendEmail.mockResolvedValue(undefined);
|
|
||||||
mocks.unlink.mockResolvedValue(undefined);
|
|
||||||
mocks.processFlyerJob.mockResolvedValue({ flyerId: 123 }); // Default success for flyer processing
|
|
||||||
mocks.deleteExpiredResetTokens.mockResolvedValue(5);
|
|
||||||
|
|
||||||
await import('./workers.server');
|
await import('./workers.server');
|
||||||
|
|
||||||
flyerProcessor = mocks.capturedProcessors['flyer-processing'];
|
flyerProcessor = mocks.capturedProcessors['flyer-processing'];
|
||||||
@@ -155,10 +159,24 @@ describe('Queue Workers', () => {
|
|||||||
|
|
||||||
await expect(flyerProcessor(job)).rejects.toThrow('Flyer processing failed');
|
await expect(flyerProcessor(job)).rejects.toThrow('Flyer processing failed');
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('should re-throw UnrecoverableError from the service layer', async () => {
|
||||||
|
const { UnrecoverableError } = await import('bullmq');
|
||||||
|
const job = createMockJob({
|
||||||
|
filePath: '/tmp/fail.pdf',
|
||||||
|
originalFileName: 'fail.pdf',
|
||||||
|
checksum: 'def',
|
||||||
|
});
|
||||||
|
const unrecoverableError = new UnrecoverableError('Quota exceeded');
|
||||||
|
mocks.processFlyerJob.mockRejectedValue(unrecoverableError);
|
||||||
|
|
||||||
|
// The worker should just let this specific error type pass through.
|
||||||
|
await expect(flyerProcessor(job)).rejects.toThrow(unrecoverableError);
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('emailWorker', () => {
|
describe('emailWorker', () => {
|
||||||
it('should call emailService.sendEmail with the job data', async () => {
|
it('should call emailService.processEmailJob with the job', async () => {
|
||||||
const jobData = {
|
const jobData = {
|
||||||
to: 'test@example.com',
|
to: 'test@example.com',
|
||||||
subject: 'Test Email',
|
subject: 'Test Email',
|
||||||
@@ -166,173 +184,84 @@ describe('Queue Workers', () => {
|
|||||||
text: 'Hello',
|
text: 'Hello',
|
||||||
};
|
};
|
||||||
const job = createMockJob(jobData);
|
const job = createMockJob(jobData);
|
||||||
|
|
||||||
await emailProcessor(job);
|
await emailProcessor(job);
|
||||||
|
expect(mocks.processEmailJob).toHaveBeenCalledTimes(1);
|
||||||
expect(mocks.sendEmail).toHaveBeenCalledTimes(1);
|
expect(mocks.processEmailJob).toHaveBeenCalledWith(job);
|
||||||
// The implementation passes the logger as the second argument
|
|
||||||
expect(mocks.sendEmail).toHaveBeenCalledWith(jobData, expect.anything());
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should log and re-throw an error if sendEmail fails with a non-Error object', async () => {
|
it('should re-throw an error if processEmailJob fails', async () => {
|
||||||
const job = createMockJob({ to: 'fail@example.com', subject: 'fail', html: '', text: '' });
|
|
||||||
const emailError = 'SMTP server is down'; // Reject with a string
|
|
||||||
mocks.sendEmail.mockRejectedValue(emailError);
|
|
||||||
|
|
||||||
await expect(emailProcessor(job)).rejects.toThrow(emailError);
|
|
||||||
|
|
||||||
// The worker should wrap the string in an Error object for logging
|
|
||||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
|
||||||
{ err: new Error(emailError), jobData: job.data },
|
|
||||||
`[EmailWorker] Job ${job.id} failed. Attempt ${job.attemptsMade}/${job.opts.attempts}.`,
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should re-throw an error if sendEmail fails', async () => {
|
|
||||||
const job = createMockJob({ to: 'fail@example.com', subject: 'fail', html: '', text: '' });
|
const job = createMockJob({ to: 'fail@example.com', subject: 'fail', html: '', text: '' });
|
||||||
const emailError = new Error('SMTP server is down');
|
const emailError = new Error('SMTP server is down');
|
||||||
mocks.sendEmail.mockRejectedValue(emailError);
|
mocks.processEmailJob.mockRejectedValue(emailError);
|
||||||
|
|
||||||
await expect(emailProcessor(job)).rejects.toThrow('SMTP server is down');
|
await expect(emailProcessor(job)).rejects.toThrow('SMTP server is down');
|
||||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
|
||||||
{ err: emailError, jobData: job.data },
|
|
||||||
`[EmailWorker] Job ${job.id} failed. Attempt ${job.attemptsMade}/${job.opts.attempts}.`,
|
|
||||||
);
|
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('analyticsWorker', () => {
|
describe('analyticsWorker', () => {
|
||||||
it('should complete successfully for a valid report date', async () => {
|
it('should call analyticsService.processDailyReportJob with the job', async () => {
|
||||||
vi.useFakeTimers();
|
|
||||||
const job = createMockJob({ reportDate: '2024-01-01' });
|
const job = createMockJob({ reportDate: '2024-01-01' });
|
||||||
|
await analyticsProcessor(job);
|
||||||
const promise = analyticsProcessor(job);
|
expect(mocks.processDailyReportJob).toHaveBeenCalledTimes(1);
|
||||||
// Advance timers to simulate the 10-second task completing
|
expect(mocks.processDailyReportJob).toHaveBeenCalledWith(job);
|
||||||
await vi.advanceTimersByTimeAsync(10000);
|
|
||||||
await promise; // Wait for the promise to resolve
|
|
||||||
|
|
||||||
// No error should be thrown
|
|
||||||
expect(true).toBe(true);
|
|
||||||
vi.useRealTimers();
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should throw an error if reportDate is "FAIL"', async () => {
|
it('should re-throw an error if processDailyReportJob fails', async () => {
|
||||||
const job = createMockJob({ reportDate: 'FAIL' });
|
const job = createMockJob({ reportDate: 'FAIL' });
|
||||||
|
const analyticsError = new Error('Analytics processing failed');
|
||||||
await expect(analyticsProcessor(job)).rejects.toThrow(
|
mocks.processDailyReportJob.mockRejectedValue(analyticsError);
|
||||||
'This is a test failure for the analytics job.',
|
await expect(analyticsProcessor(job)).rejects.toThrow('Analytics processing failed');
|
||||||
);
|
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('cleanupWorker', () => {
|
describe('cleanupWorker', () => {
|
||||||
it('should call unlink for each path provided in the job data', async () => {
|
it('should call flyerProcessingService.processCleanupJob with the job', async () => {
|
||||||
const jobData = {
|
const jobData = {
|
||||||
flyerId: 123,
|
flyerId: 123,
|
||||||
paths: ['/tmp/file1.jpg', '/tmp/file2.pdf'],
|
paths: ['/tmp/file1.jpg', '/tmp/file2.pdf'],
|
||||||
};
|
};
|
||||||
const job = createMockJob(jobData);
|
const job = createMockJob(jobData);
|
||||||
mocks.unlink.mockResolvedValue(undefined);
|
|
||||||
|
|
||||||
await cleanupProcessor(job);
|
await cleanupProcessor(job);
|
||||||
|
expect(mocks.processCleanupJob).toHaveBeenCalledTimes(1);
|
||||||
expect(mocks.unlink).toHaveBeenCalledTimes(2);
|
expect(mocks.processCleanupJob).toHaveBeenCalledWith(job);
|
||||||
expect(mocks.unlink).toHaveBeenCalledWith('/tmp/file1.jpg');
|
|
||||||
expect(mocks.unlink).toHaveBeenCalledWith('/tmp/file2.pdf');
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should not throw an error if a file is already deleted (ENOENT)', async () => {
|
it('should re-throw an error if processCleanupJob fails', async () => {
|
||||||
const jobData = {
|
const jobData = { flyerId: 123, paths: ['/tmp/protected-file.jpg'] };
|
||||||
flyerId: 123,
|
|
||||||
paths: ['/tmp/existing.jpg', '/tmp/already-deleted.jpg'],
|
|
||||||
};
|
|
||||||
const job = createMockJob(jobData);
|
const job = createMockJob(jobData);
|
||||||
// Use the built-in NodeJS.ErrnoException type for mock system errors.
|
const cleanupError = new Error('Permission denied');
|
||||||
const enoentError: NodeJS.ErrnoException = new Error('File not found');
|
mocks.processCleanupJob.mockRejectedValue(cleanupError);
|
||||||
enoentError.code = 'ENOENT';
|
|
||||||
|
|
||||||
// First call succeeds, second call fails with ENOENT
|
|
||||||
mocks.unlink.mockResolvedValueOnce(undefined).mockRejectedValueOnce(enoentError);
|
|
||||||
|
|
||||||
// The processor should complete without throwing
|
|
||||||
await expect(cleanupProcessor(job)).resolves.toBeUndefined();
|
|
||||||
|
|
||||||
expect(mocks.unlink).toHaveBeenCalledTimes(2);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should re-throw an error for issues other than ENOENT (e.g., permissions)', async () => {
|
|
||||||
const jobData = {
|
|
||||||
flyerId: 123,
|
|
||||||
paths: ['/tmp/protected-file.jpg'],
|
|
||||||
};
|
|
||||||
const job = createMockJob(jobData);
|
|
||||||
// Use the built-in NodeJS.ErrnoException type for mock system errors.
|
|
||||||
const permissionError: NodeJS.ErrnoException = new Error('Permission denied');
|
|
||||||
permissionError.code = 'EACCES';
|
|
||||||
|
|
||||||
mocks.unlink.mockRejectedValue(permissionError);
|
|
||||||
|
|
||||||
await expect(cleanupProcessor(job)).rejects.toThrow('Permission denied');
|
await expect(cleanupProcessor(job)).rejects.toThrow('Permission denied');
|
||||||
|
|
||||||
// Verify the error was logged by the worker's catch block
|
|
||||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
|
||||||
{ err: permissionError },
|
|
||||||
expect.stringContaining(
|
|
||||||
`[CleanupWorker] Job ${job.id} for flyer ${job.data.flyerId} failed.`,
|
|
||||||
),
|
|
||||||
);
|
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('weeklyAnalyticsWorker', () => {
|
describe('weeklyAnalyticsWorker', () => {
|
||||||
it('should complete successfully for a valid report date', async () => {
|
it('should call analyticsService.processWeeklyReportJob with the job', async () => {
|
||||||
vi.useFakeTimers();
|
|
||||||
const job = createMockJob({ reportYear: 2024, reportWeek: 1 });
|
const job = createMockJob({ reportYear: 2024, reportWeek: 1 });
|
||||||
|
await weeklyAnalyticsProcessor(job);
|
||||||
const promise = weeklyAnalyticsProcessor(job);
|
expect(mocks.processWeeklyReportJob).toHaveBeenCalledTimes(1);
|
||||||
// Advance timers to simulate the 30-second task completing
|
expect(mocks.processWeeklyReportJob).toHaveBeenCalledWith(job);
|
||||||
await vi.advanceTimersByTimeAsync(30000);
|
|
||||||
await promise; // Wait for the promise to resolve
|
|
||||||
|
|
||||||
// No error should be thrown
|
|
||||||
expect(true).toBe(true);
|
|
||||||
vi.useRealTimers();
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should re-throw an error if the job fails', async () => {
|
it('should re-throw an error if processWeeklyReportJob fails', async () => {
|
||||||
vi.useFakeTimers();
|
|
||||||
const job = createMockJob({ reportYear: 2024, reportWeek: 1 });
|
const job = createMockJob({ reportYear: 2024, reportWeek: 1 });
|
||||||
// Mock the internal logic to throw an error
|
const weeklyError = new Error('Weekly analytics job failed');
|
||||||
const originalSetTimeout = setTimeout;
|
mocks.processWeeklyReportJob.mockRejectedValue(weeklyError);
|
||||||
vi.spyOn(global, 'setTimeout').mockImplementation((callback, ms) => {
|
|
||||||
if (ms === 30000) {
|
|
||||||
// Target the simulated delay
|
|
||||||
throw new Error('Weekly analytics job failed');
|
|
||||||
}
|
|
||||||
return originalSetTimeout(callback, ms);
|
|
||||||
});
|
|
||||||
|
|
||||||
await expect(weeklyAnalyticsProcessor(job)).rejects.toThrow('Weekly analytics job failed');
|
await expect(weeklyAnalyticsProcessor(job)).rejects.toThrow('Weekly analytics job failed');
|
||||||
vi.useRealTimers();
|
|
||||||
vi.restoreAllMocks(); // Restore setTimeout mock
|
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('tokenCleanupWorker', () => {
|
describe('tokenCleanupWorker', () => {
|
||||||
it('should call userRepo.deleteExpiredResetTokens and return the count', async () => {
|
it('should call userService.processTokenCleanupJob with the job', async () => {
|
||||||
const job = createMockJob({ timestamp: new Date().toISOString() });
|
const job = createMockJob({ timestamp: new Date().toISOString() });
|
||||||
mocks.deleteExpiredResetTokens.mockResolvedValue(10);
|
await tokenCleanupProcessor(job);
|
||||||
|
expect(mocks.processTokenCleanupJob).toHaveBeenCalledTimes(1);
|
||||||
const result = await tokenCleanupProcessor(job);
|
expect(mocks.processTokenCleanupJob).toHaveBeenCalledWith(job);
|
||||||
|
|
||||||
expect(mocks.deleteExpiredResetTokens).toHaveBeenCalledTimes(1);
|
|
||||||
expect(result).toEqual({ deletedCount: 10 });
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should re-throw an error if the database call fails', async () => {
|
it('should re-throw an error if processTokenCleanupJob fails', async () => {
|
||||||
const job = createMockJob({ timestamp: new Date().toISOString() });
|
const job = createMockJob({ timestamp: new Date().toISOString() });
|
||||||
const dbError = new Error('DB cleanup failed');
|
const dbError = new Error('DB cleanup failed');
|
||||||
mocks.deleteExpiredResetTokens.mockRejectedValue(dbError);
|
mocks.processTokenCleanupJob.mockRejectedValue(dbError);
|
||||||
await expect(tokenCleanupProcessor(job)).rejects.toThrow(dbError);
|
await expect(tokenCleanupProcessor(job)).rejects.toThrow(dbError);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -1,9 +1,12 @@
|
|||||||
// src/services/userService.ts
|
// src/services/userService.ts
|
||||||
import * as db from './db/index.db';
|
import * as db from './db/index.db';
|
||||||
|
import type { Job } from 'bullmq';
|
||||||
import type { Logger } from 'pino';
|
import type { Logger } from 'pino';
|
||||||
import { AddressRepository } from './db/address.db';
|
import { AddressRepository } from './db/address.db';
|
||||||
import { UserRepository } from './db/user.db';
|
import { UserRepository } from './db/user.db';
|
||||||
import type { Address, UserProfile } from '../types';
|
import type { Address, UserProfile } from '../types';
|
||||||
|
import { logger as globalLogger } from './logger.server';
|
||||||
|
import type { TokenCleanupJobData } from './queues.server';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Encapsulates user-related business logic that may involve multiple repository calls.
|
* Encapsulates user-related business logic that may involve multiple repository calls.
|
||||||
@@ -44,6 +47,35 @@ class UserService {
|
|||||||
return addressId;
|
return addressId;
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Processes a job to clean up expired password reset tokens from the database.
|
||||||
|
* @param job The BullMQ job object.
|
||||||
|
* @returns An object containing the count of deleted tokens.
|
||||||
|
*/
|
||||||
|
async processTokenCleanupJob(
|
||||||
|
job: Job<TokenCleanupJobData>,
|
||||||
|
): Promise<{ deletedCount: number }> {
|
||||||
|
const logger = globalLogger.child({
|
||||||
|
jobId: job.id,
|
||||||
|
jobName: job.name,
|
||||||
|
});
|
||||||
|
|
||||||
|
logger.info('Picked up expired token cleanup job.');
|
||||||
|
|
||||||
|
try {
|
||||||
|
const deletedCount = await db.userRepo.deleteExpiredResetTokens(logger);
|
||||||
|
logger.info(`Successfully deleted ${deletedCount} expired tokens.`);
|
||||||
|
return { deletedCount };
|
||||||
|
} catch (error) {
|
||||||
|
const wrappedError = error instanceof Error ? error : new Error(String(error));
|
||||||
|
logger.error(
|
||||||
|
{ err: wrappedError, attemptsMade: job.attemptsMade },
|
||||||
|
'Expired token cleanup job failed.',
|
||||||
|
);
|
||||||
|
throw wrappedError;
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export const userService = new UserService();
|
export const userService = new UserService();
|
||||||
|
|||||||
@@ -6,13 +6,17 @@ import type { Job } from 'bullmq';
|
|||||||
const mocks = vi.hoisted(() => {
|
const mocks = vi.hoisted(() => {
|
||||||
// This object will store the processor functions captured from the worker constructors.
|
// This object will store the processor functions captured from the worker constructors.
|
||||||
const capturedProcessors: Record<string, (job: Job) => Promise<unknown>> = {};
|
const capturedProcessors: Record<string, (job: Job) => Promise<unknown>> = {};
|
||||||
|
|
||||||
return {
|
return {
|
||||||
sendEmail: vi.fn(),
|
// Service method mocks
|
||||||
unlink: vi.fn(),
|
|
||||||
processFlyerJob: vi.fn(),
|
processFlyerJob: vi.fn(),
|
||||||
|
processCleanupJob: vi.fn(),
|
||||||
|
processEmailJob: vi.fn(),
|
||||||
|
processDailyReportJob: vi.fn(),
|
||||||
|
processWeeklyReportJob: vi.fn(),
|
||||||
|
processTokenCleanupJob: vi.fn(),
|
||||||
|
|
||||||
|
// Test utilities
|
||||||
capturedProcessors,
|
capturedProcessors,
|
||||||
deleteExpiredResetTokens: vi.fn(),
|
|
||||||
// Mock the Worker constructor to capture the processor function. It must be a
|
// Mock the Worker constructor to capture the processor function. It must be a
|
||||||
// `function` and not an arrow function so it can be called with `new`.
|
// `function` and not an arrow function so it can be called with `new`.
|
||||||
MockWorker: vi.fn(function (name: string, processor: (job: Job) => Promise<unknown>) {
|
MockWorker: vi.fn(function (name: string, processor: (job: Job) => Promise<unknown>) {
|
||||||
@@ -26,23 +30,28 @@ const mocks = vi.hoisted(() => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
// --- Mock Modules ---
|
// --- Mock Modules ---
|
||||||
vi.mock('./emailService.server', async (importOriginal) => {
|
vi.mock('./emailService.server', () => ({
|
||||||
const actual = await importOriginal<typeof import('./emailService.server')>();
|
processEmailJob: mocks.processEmailJob,
|
||||||
return {
|
}));
|
||||||
...actual,
|
|
||||||
// We only need to mock the specific function being called by the worker.
|
vi.mock('./analyticsService.server', () => ({
|
||||||
// The rest of the module can retain its original implementation if needed elsewhere.
|
analyticsService: {
|
||||||
sendEmail: mocks.sendEmail,
|
processDailyReportJob: mocks.processDailyReportJob,
|
||||||
};
|
processWeeklyReportJob: mocks.processWeeklyReportJob,
|
||||||
});
|
},
|
||||||
|
}));
|
||||||
|
|
||||||
|
vi.mock('./userService', () => ({
|
||||||
|
userService: {
|
||||||
|
processTokenCleanupJob: mocks.processTokenCleanupJob,
|
||||||
|
},
|
||||||
|
}));
|
||||||
|
|
||||||
// The workers use an `fsAdapter`. We can mock the underlying `fsPromises`
|
// The workers use an `fsAdapter`. We can mock the underlying `fsPromises`
|
||||||
// that the adapter is built from in queueService.server.ts.
|
// that the adapter is built from in queueService.server.ts.
|
||||||
vi.mock('node:fs/promises', () => ({
|
vi.mock('node:fs/promises', () => ({
|
||||||
default: {
|
default: {
|
||||||
unlink: mocks.unlink,
|
// unlink is no longer directly called by the worker
|
||||||
// Add other fs functions if needed by other tests
|
|
||||||
readdir: vi.fn(),
|
|
||||||
},
|
},
|
||||||
}));
|
}));
|
||||||
|
|
||||||
@@ -56,28 +65,29 @@ vi.mock('./logger.server', () => ({
|
|||||||
},
|
},
|
||||||
}));
|
}));
|
||||||
|
|
||||||
vi.mock('./db/index.db', () => ({
|
|
||||||
userRepo: {
|
|
||||||
deleteExpiredResetTokens: mocks.deleteExpiredResetTokens,
|
|
||||||
},
|
|
||||||
}));
|
|
||||||
|
|
||||||
// Mock bullmq to capture the processor functions passed to the Worker constructor
|
// Mock bullmq to capture the processor functions passed to the Worker constructor
|
||||||
import { logger as mockLogger } from './logger.server';
|
|
||||||
vi.mock('bullmq', () => ({
|
vi.mock('bullmq', () => ({
|
||||||
Worker: mocks.MockWorker,
|
Worker: mocks.MockWorker,
|
||||||
// FIX: Use a standard function for the mock constructor to allow `new Queue(...)` to work.
|
// FIX: Use a standard function for the mock constructor to allow `new Queue(...)` to work.
|
||||||
Queue: vi.fn(function () {
|
Queue: vi.fn(function () {
|
||||||
return { add: vi.fn() };
|
return { add: vi.fn() };
|
||||||
}),
|
}),
|
||||||
|
// Add UnrecoverableError to the mock so it can be used in tests
|
||||||
|
UnrecoverableError: class UnrecoverableError extends Error {},
|
||||||
}));
|
}));
|
||||||
|
|
||||||
// Mock flyerProcessingService.server as flyerWorker depends on it
|
// Mock flyerProcessingService.server as flyerWorker depends on it
|
||||||
vi.mock('./flyerProcessingService.server', () => ({
|
vi.mock('./flyerProcessingService.server', () => {
|
||||||
FlyerProcessingService: class {
|
// Mock the constructor to return an object with the mocked methods
|
||||||
processJob = mocks.processFlyerJob;
|
return {
|
||||||
},
|
FlyerProcessingService: vi.fn().mockImplementation(function () {
|
||||||
}));
|
return {
|
||||||
|
processJob: mocks.processFlyerJob,
|
||||||
|
processCleanupJob: mocks.processCleanupJob,
|
||||||
|
};
|
||||||
|
}),
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
// Mock flyerDataTransformer as it's a dependency of FlyerProcessingService
|
// Mock flyerDataTransformer as it's a dependency of FlyerProcessingService
|
||||||
vi.mock('./flyerDataTransformer', () => ({
|
vi.mock('./flyerDataTransformer', () => ({
|
||||||
@@ -112,12 +122,13 @@ describe('Queue Workers', () => {
|
|||||||
|
|
||||||
beforeEach(async () => {
|
beforeEach(async () => {
|
||||||
vi.clearAllMocks();
|
vi.clearAllMocks();
|
||||||
|
|
||||||
// Reset default mock implementations for hoisted mocks
|
// Reset default mock implementations for hoisted mocks
|
||||||
mocks.sendEmail.mockResolvedValue(undefined);
|
mocks.processFlyerJob.mockResolvedValue({ flyerId: 123 });
|
||||||
mocks.unlink.mockResolvedValue(undefined);
|
mocks.processCleanupJob.mockResolvedValue({ status: 'success' });
|
||||||
mocks.processFlyerJob.mockResolvedValue({ flyerId: 123 }); // Default success for flyer processing
|
mocks.processEmailJob.mockResolvedValue(undefined);
|
||||||
mocks.deleteExpiredResetTokens.mockResolvedValue(5);
|
mocks.processDailyReportJob.mockResolvedValue({ status: 'success' });
|
||||||
|
mocks.processWeeklyReportJob.mockResolvedValue({ status: 'success' });
|
||||||
|
mocks.processTokenCleanupJob.mockResolvedValue({ deletedCount: 5 });
|
||||||
|
|
||||||
// Reset modules to re-evaluate the workers.server.ts file with fresh mocks.
|
// Reset modules to re-evaluate the workers.server.ts file with fresh mocks.
|
||||||
// This ensures that new worker instances are created and their processors are captured for each test.
|
// This ensures that new worker instances are created and their processors are captured for each test.
|
||||||
@@ -162,10 +173,24 @@ describe('Queue Workers', () => {
|
|||||||
|
|
||||||
await expect(flyerProcessor(job)).rejects.toThrow('Flyer processing failed');
|
await expect(flyerProcessor(job)).rejects.toThrow('Flyer processing failed');
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('should re-throw UnrecoverableError from the service layer', async () => {
|
||||||
|
const { UnrecoverableError } = await import('bullmq');
|
||||||
|
const job = createMockJob({
|
||||||
|
filePath: '/tmp/fail.pdf',
|
||||||
|
originalFileName: 'fail.pdf',
|
||||||
|
checksum: 'def',
|
||||||
|
});
|
||||||
|
const unrecoverableError = new UnrecoverableError('Quota exceeded');
|
||||||
|
mocks.processFlyerJob.mockRejectedValue(unrecoverableError);
|
||||||
|
|
||||||
|
// The worker should just let this specific error type pass through.
|
||||||
|
await expect(flyerProcessor(job)).rejects.toThrow(unrecoverableError);
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('emailWorker', () => {
|
describe('emailWorker', () => {
|
||||||
it('should call emailService.sendEmail with the job data', async () => {
|
it('should call emailService.processEmailJob with the job', async () => {
|
||||||
const jobData = {
|
const jobData = {
|
||||||
to: 'test@example.com',
|
to: 'test@example.com',
|
||||||
subject: 'Test Email',
|
subject: 'Test Email',
|
||||||
@@ -173,173 +198,84 @@ describe('Queue Workers', () => {
|
|||||||
text: 'Hello',
|
text: 'Hello',
|
||||||
};
|
};
|
||||||
const job = createMockJob(jobData);
|
const job = createMockJob(jobData);
|
||||||
|
|
||||||
await emailProcessor(job);
|
await emailProcessor(job);
|
||||||
|
expect(mocks.processEmailJob).toHaveBeenCalledTimes(1);
|
||||||
expect(mocks.sendEmail).toHaveBeenCalledTimes(1);
|
expect(mocks.processEmailJob).toHaveBeenCalledWith(job);
|
||||||
// The implementation passes the logger as the second argument
|
|
||||||
expect(mocks.sendEmail).toHaveBeenCalledWith(jobData, expect.anything());
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should log and re-throw an error if sendEmail fails with a non-Error object', async () => {
|
it('should re-throw an error if processEmailJob fails', async () => {
|
||||||
const job = createMockJob({ to: 'fail@example.com', subject: 'fail', html: '', text: '' });
|
|
||||||
const emailError = 'SMTP server is down'; // Reject with a string
|
|
||||||
mocks.sendEmail.mockRejectedValue(emailError);
|
|
||||||
|
|
||||||
await expect(emailProcessor(job)).rejects.toThrow(emailError);
|
|
||||||
|
|
||||||
// The worker should wrap the string in an Error object for logging
|
|
||||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
|
||||||
{ err: new Error(emailError), jobData: job.data },
|
|
||||||
`[EmailWorker] Job ${job.id} failed. Attempt ${job.attemptsMade}/${job.opts.attempts}.`,
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should re-throw an error if sendEmail fails', async () => {
|
|
||||||
const job = createMockJob({ to: 'fail@example.com', subject: 'fail', html: '', text: '' });
|
const job = createMockJob({ to: 'fail@example.com', subject: 'fail', html: '', text: '' });
|
||||||
const emailError = new Error('SMTP server is down');
|
const emailError = new Error('SMTP server is down');
|
||||||
mocks.sendEmail.mockRejectedValue(emailError);
|
mocks.processEmailJob.mockRejectedValue(emailError);
|
||||||
|
|
||||||
await expect(emailProcessor(job)).rejects.toThrow('SMTP server is down');
|
await expect(emailProcessor(job)).rejects.toThrow('SMTP server is down');
|
||||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
|
||||||
{ err: emailError, jobData: job.data },
|
|
||||||
`[EmailWorker] Job ${job.id} failed. Attempt ${job.attemptsMade}/${job.opts.attempts}.`,
|
|
||||||
);
|
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('analyticsWorker', () => {
|
describe('analyticsWorker', () => {
|
||||||
it('should complete successfully for a valid report date', async () => {
|
it('should call analyticsService.processDailyReportJob with the job', async () => {
|
||||||
vi.useFakeTimers();
|
|
||||||
const job = createMockJob({ reportDate: '2024-01-01' });
|
const job = createMockJob({ reportDate: '2024-01-01' });
|
||||||
|
await analyticsProcessor(job);
|
||||||
const promise = analyticsProcessor(job);
|
expect(mocks.processDailyReportJob).toHaveBeenCalledTimes(1);
|
||||||
// Advance timers to simulate the 10-second task completing
|
expect(mocks.processDailyReportJob).toHaveBeenCalledWith(job);
|
||||||
await vi.advanceTimersByTimeAsync(10000);
|
|
||||||
await promise; // Wait for the promise to resolve
|
|
||||||
|
|
||||||
// No error should be thrown
|
|
||||||
expect(true).toBe(true);
|
|
||||||
vi.useRealTimers();
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should throw an error if reportDate is "FAIL"', async () => {
|
it('should re-throw an error if processDailyReportJob fails', async () => {
|
||||||
const job = createMockJob({ reportDate: 'FAIL' });
|
const job = createMockJob({ reportDate: 'FAIL' });
|
||||||
|
const analyticsError = new Error('Analytics processing failed');
|
||||||
await expect(analyticsProcessor(job)).rejects.toThrow(
|
mocks.processDailyReportJob.mockRejectedValue(analyticsError);
|
||||||
'This is a test failure for the analytics job.',
|
await expect(analyticsProcessor(job)).rejects.toThrow('Analytics processing failed');
|
||||||
);
|
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('cleanupWorker', () => {
|
describe('cleanupWorker', () => {
|
||||||
it('should call unlink for each path provided in the job data', async () => {
|
it('should call flyerProcessingService.processCleanupJob with the job', async () => {
|
||||||
const jobData = {
|
const jobData = {
|
||||||
flyerId: 123,
|
flyerId: 123,
|
||||||
paths: ['/tmp/file1.jpg', '/tmp/file2.pdf'],
|
paths: ['/tmp/file1.jpg', '/tmp/file2.pdf'],
|
||||||
};
|
};
|
||||||
const job = createMockJob(jobData);
|
const job = createMockJob(jobData);
|
||||||
mocks.unlink.mockResolvedValue(undefined);
|
|
||||||
|
|
||||||
await cleanupProcessor(job);
|
await cleanupProcessor(job);
|
||||||
|
expect(mocks.processCleanupJob).toHaveBeenCalledTimes(1);
|
||||||
expect(mocks.unlink).toHaveBeenCalledTimes(2);
|
expect(mocks.processCleanupJob).toHaveBeenCalledWith(job);
|
||||||
expect(mocks.unlink).toHaveBeenCalledWith('/tmp/file1.jpg');
|
|
||||||
expect(mocks.unlink).toHaveBeenCalledWith('/tmp/file2.pdf');
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should not throw an error if a file is already deleted (ENOENT)', async () => {
|
it('should re-throw an error if processCleanupJob fails', async () => {
|
||||||
const jobData = {
|
const jobData = { flyerId: 123, paths: ['/tmp/protected-file.jpg'] };
|
||||||
flyerId: 123,
|
|
||||||
paths: ['/tmp/existing.jpg', '/tmp/already-deleted.jpg'],
|
|
||||||
};
|
|
||||||
const job = createMockJob(jobData);
|
const job = createMockJob(jobData);
|
||||||
// Use the built-in NodeJS.ErrnoException type for mock system errors.
|
const cleanupError = new Error('Permission denied');
|
||||||
const enoentError: NodeJS.ErrnoException = new Error('File not found');
|
mocks.processCleanupJob.mockRejectedValue(cleanupError);
|
||||||
enoentError.code = 'ENOENT';
|
|
||||||
|
|
||||||
// First call succeeds, second call fails with ENOENT
|
|
||||||
mocks.unlink.mockResolvedValueOnce(undefined).mockRejectedValueOnce(enoentError);
|
|
||||||
|
|
||||||
// The processor should complete without throwing
|
|
||||||
await expect(cleanupProcessor(job)).resolves.toBeUndefined();
|
|
||||||
|
|
||||||
expect(mocks.unlink).toHaveBeenCalledTimes(2);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should re-throw an error for issues other than ENOENT (e.g., permissions)', async () => {
|
|
||||||
const jobData = {
|
|
||||||
flyerId: 123,
|
|
||||||
paths: ['/tmp/protected-file.jpg'],
|
|
||||||
};
|
|
||||||
const job = createMockJob(jobData);
|
|
||||||
// Use the built-in NodeJS.ErrnoException type for mock system errors.
|
|
||||||
const permissionError: NodeJS.ErrnoException = new Error('Permission denied');
|
|
||||||
permissionError.code = 'EACCES';
|
|
||||||
|
|
||||||
mocks.unlink.mockRejectedValue(permissionError);
|
|
||||||
|
|
||||||
await expect(cleanupProcessor(job)).rejects.toThrow('Permission denied');
|
await expect(cleanupProcessor(job)).rejects.toThrow('Permission denied');
|
||||||
|
|
||||||
// Verify the error was logged by the worker's catch block
|
|
||||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
|
||||||
{ err: permissionError },
|
|
||||||
expect.stringContaining(
|
|
||||||
`[CleanupWorker] Job ${job.id} for flyer ${job.data.flyerId} failed.`,
|
|
||||||
),
|
|
||||||
);
|
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('weeklyAnalyticsWorker', () => {
|
describe('weeklyAnalyticsWorker', () => {
|
||||||
it('should complete successfully for a valid report date', async () => {
|
it('should call analyticsService.processWeeklyReportJob with the job', async () => {
|
||||||
vi.useFakeTimers();
|
|
||||||
const job = createMockJob({ reportYear: 2024, reportWeek: 1 });
|
const job = createMockJob({ reportYear: 2024, reportWeek: 1 });
|
||||||
|
await weeklyAnalyticsProcessor(job);
|
||||||
const promise = weeklyAnalyticsProcessor(job);
|
expect(mocks.processWeeklyReportJob).toHaveBeenCalledTimes(1);
|
||||||
// Advance timers to simulate the 30-second task completing
|
expect(mocks.processWeeklyReportJob).toHaveBeenCalledWith(job);
|
||||||
await vi.advanceTimersByTimeAsync(30000);
|
|
||||||
await promise; // Wait for the promise to resolve
|
|
||||||
|
|
||||||
// No error should be thrown
|
|
||||||
expect(true).toBe(true);
|
|
||||||
vi.useRealTimers();
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should re-throw an error if the job fails', async () => {
|
it('should re-throw an error if processWeeklyReportJob fails', async () => {
|
||||||
vi.useFakeTimers();
|
|
||||||
const job = createMockJob({ reportYear: 2024, reportWeek: 1 });
|
const job = createMockJob({ reportYear: 2024, reportWeek: 1 });
|
||||||
// Mock the internal logic to throw an error
|
const weeklyError = new Error('Weekly analytics job failed');
|
||||||
const originalSetTimeout = setTimeout;
|
mocks.processWeeklyReportJob.mockRejectedValue(weeklyError);
|
||||||
vi.spyOn(global, 'setTimeout').mockImplementation((callback, ms) => {
|
|
||||||
if (ms === 30000) {
|
|
||||||
// Target the simulated delay
|
|
||||||
throw new Error('Weekly analytics job failed');
|
|
||||||
}
|
|
||||||
return originalSetTimeout(callback, ms);
|
|
||||||
});
|
|
||||||
|
|
||||||
await expect(weeklyAnalyticsProcessor(job)).rejects.toThrow('Weekly analytics job failed');
|
await expect(weeklyAnalyticsProcessor(job)).rejects.toThrow('Weekly analytics job failed');
|
||||||
vi.useRealTimers();
|
|
||||||
vi.restoreAllMocks(); // Restore setTimeout mock
|
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('tokenCleanupWorker', () => {
|
describe('tokenCleanupWorker', () => {
|
||||||
it('should call userRepo.deleteExpiredResetTokens and return the count', async () => {
|
it('should call userService.processTokenCleanupJob with the job', async () => {
|
||||||
const job = createMockJob({ timestamp: new Date().toISOString() });
|
const job = createMockJob({ timestamp: new Date().toISOString() });
|
||||||
mocks.deleteExpiredResetTokens.mockResolvedValue(10);
|
await tokenCleanupProcessor(job);
|
||||||
|
expect(mocks.processTokenCleanupJob).toHaveBeenCalledTimes(1);
|
||||||
const result = await tokenCleanupProcessor(job);
|
expect(mocks.processTokenCleanupJob).toHaveBeenCalledWith(job);
|
||||||
|
|
||||||
expect(mocks.deleteExpiredResetTokens).toHaveBeenCalledTimes(1);
|
|
||||||
expect(result).toEqual({ deletedCount: 10 });
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should re-throw an error if the database call fails', async () => {
|
it('should re-throw an error if processTokenCleanupJob fails', async () => {
|
||||||
const job = createMockJob({ timestamp: new Date().toISOString() });
|
const job = createMockJob({ timestamp: new Date().toISOString() });
|
||||||
const dbError = new Error('DB cleanup failed');
|
const dbError = new Error('DB cleanup failed');
|
||||||
mocks.deleteExpiredResetTokens.mockRejectedValue(dbError);
|
mocks.processTokenCleanupJob.mockRejectedValue(dbError);
|
||||||
await expect(tokenCleanupProcessor(job)).rejects.toThrow(dbError);
|
await expect(tokenCleanupProcessor(job)).rejects.toThrow(dbError);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -6,13 +6,16 @@ import { promisify } from 'util';
|
|||||||
import { logger } from './logger.server';
|
import { logger } from './logger.server';
|
||||||
import { connection } from './redis.server';
|
import { connection } from './redis.server';
|
||||||
import { aiService } from './aiService.server';
|
import { aiService } from './aiService.server';
|
||||||
|
import { analyticsService } from './analyticsService.server';
|
||||||
|
import { userService } from './userService';
|
||||||
import * as emailService from './emailService.server';
|
import * as emailService from './emailService.server';
|
||||||
import * as db from './db/index.db';
|
import * as db from './db/index.db';
|
||||||
import {
|
import {
|
||||||
FlyerProcessingService,
|
FlyerProcessingService,
|
||||||
type FlyerJobData,
|
type FlyerJobData,
|
||||||
type IFileSystem,
|
|
||||||
} from './flyerProcessingService.server';
|
} from './flyerProcessingService.server';
|
||||||
|
import { FlyerFileHandler, type IFileSystem } from './flyerFileHandler.server';
|
||||||
|
import { FlyerAiProcessor } from './flyerAiProcessor.server';
|
||||||
import { FlyerDataTransformer } from './flyerDataTransformer';
|
import { FlyerDataTransformer } from './flyerDataTransformer';
|
||||||
import {
|
import {
|
||||||
flyerQueue,
|
flyerQueue,
|
||||||
@@ -39,6 +42,8 @@ const fsAdapter: IFileSystem = {
|
|||||||
|
|
||||||
const flyerProcessingService = new FlyerProcessingService(
|
const flyerProcessingService = new FlyerProcessingService(
|
||||||
aiService,
|
aiService,
|
||||||
|
new FlyerFileHandler(fsAdapter, execAsync),
|
||||||
|
new FlyerAiProcessor(aiService, db.personalizationRepo),
|
||||||
db,
|
db,
|
||||||
fsAdapter,
|
fsAdapter,
|
||||||
execAsync,
|
execAsync,
|
||||||
@@ -50,6 +55,25 @@ const normalizeError = (error: unknown): Error => {
|
|||||||
return error instanceof Error ? error : new Error(String(error));
|
return error instanceof Error ? error : new Error(String(error));
|
||||||
};
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates a higher-order function to wrap worker processors with common logic.
|
||||||
|
* This includes error normalization to ensure that any thrown value is an Error instance,
|
||||||
|
* which is a best practice for BullMQ workers.
|
||||||
|
* @param processor The core logic for the worker.
|
||||||
|
* @returns An async function that takes a job and executes the processor.
|
||||||
|
*/
|
||||||
|
const createWorkerProcessor = <T>(processor: (job: Job<T>) => Promise<any>) => {
|
||||||
|
return async (job: Job<T>) => {
|
||||||
|
try {
|
||||||
|
return await processor(job);
|
||||||
|
} catch (error: unknown) {
|
||||||
|
// The service layer now handles detailed logging. This block just ensures
|
||||||
|
// any unexpected errors are normalized before BullMQ handles them.
|
||||||
|
throw normalizeError(error);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
const attachWorkerEventListeners = (worker: Worker) => {
|
const attachWorkerEventListeners = (worker: Worker) => {
|
||||||
worker.on('completed', (job: Job, returnValue: unknown) => {
|
worker.on('completed', (job: Job, returnValue: unknown) => {
|
||||||
logger.info({ returnValue }, `[${worker.name}] Job ${job.id} completed successfully.`);
|
logger.info({ returnValue }, `[${worker.name}] Job ${job.id} completed successfully.`);
|
||||||
@@ -65,26 +89,7 @@ const attachWorkerEventListeners = (worker: Worker) => {
|
|||||||
|
|
||||||
export const flyerWorker = new Worker<FlyerJobData>(
|
export const flyerWorker = new Worker<FlyerJobData>(
|
||||||
'flyer-processing',
|
'flyer-processing',
|
||||||
async (job) => {
|
createWorkerProcessor((job) => flyerProcessingService.processJob(job)),
|
||||||
try {
|
|
||||||
return await flyerProcessingService.processJob(job);
|
|
||||||
} catch (error: unknown) {
|
|
||||||
const wrappedError = normalizeError(error);
|
|
||||||
const errorMessage = wrappedError.message || '';
|
|
||||||
if (
|
|
||||||
errorMessage.includes('quota') ||
|
|
||||||
errorMessage.includes('429') ||
|
|
||||||
errorMessage.includes('RESOURCE_EXHAUSTED')
|
|
||||||
) {
|
|
||||||
logger.error(
|
|
||||||
{ err: wrappedError, jobId: job.id },
|
|
||||||
'[FlyerWorker] Unrecoverable quota error detected. Failing job immediately.',
|
|
||||||
);
|
|
||||||
throw new UnrecoverableError(errorMessage);
|
|
||||||
}
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
connection,
|
connection,
|
||||||
concurrency: parseInt(process.env.WORKER_CONCURRENCY || '1', 10),
|
concurrency: parseInt(process.env.WORKER_CONCURRENCY || '1', 10),
|
||||||
@@ -93,24 +98,7 @@ export const flyerWorker = new Worker<FlyerJobData>(
|
|||||||
|
|
||||||
export const emailWorker = new Worker<EmailJobData>(
|
export const emailWorker = new Worker<EmailJobData>(
|
||||||
'email-sending',
|
'email-sending',
|
||||||
async (job: Job<EmailJobData>) => {
|
createWorkerProcessor((job) => emailService.processEmailJob(job)),
|
||||||
const { to, subject } = job.data;
|
|
||||||
const jobLogger = logger.child({ jobId: job.id, jobName: job.name });
|
|
||||||
jobLogger.info({ to, subject }, `[EmailWorker] Sending email for job ${job.id}`);
|
|
||||||
try {
|
|
||||||
await emailService.sendEmail(job.data, jobLogger);
|
|
||||||
} catch (error: unknown) {
|
|
||||||
const wrappedError = normalizeError(error);
|
|
||||||
logger.error(
|
|
||||||
{
|
|
||||||
err: wrappedError,
|
|
||||||
jobData: job.data,
|
|
||||||
},
|
|
||||||
`[EmailWorker] Job ${job.id} failed. Attempt ${job.attemptsMade}/${job.opts.attempts}.`,
|
|
||||||
);
|
|
||||||
throw wrappedError;
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
connection,
|
connection,
|
||||||
concurrency: parseInt(process.env.EMAIL_WORKER_CONCURRENCY || '10', 10),
|
concurrency: parseInt(process.env.EMAIL_WORKER_CONCURRENCY || '10', 10),
|
||||||
@@ -119,23 +107,7 @@ export const emailWorker = new Worker<EmailJobData>(
|
|||||||
|
|
||||||
export const analyticsWorker = new Worker<AnalyticsJobData>(
|
export const analyticsWorker = new Worker<AnalyticsJobData>(
|
||||||
'analytics-reporting',
|
'analytics-reporting',
|
||||||
async (job: Job<AnalyticsJobData>) => {
|
createWorkerProcessor((job) => analyticsService.processDailyReportJob(job)),
|
||||||
const { reportDate } = job.data;
|
|
||||||
logger.info({ reportDate }, `[AnalyticsWorker] Starting report generation for job ${job.id}`);
|
|
||||||
try {
|
|
||||||
if (reportDate === 'FAIL') {
|
|
||||||
throw new Error('This is a test failure for the analytics job.');
|
|
||||||
}
|
|
||||||
await new Promise((resolve) => setTimeout(resolve, 10000));
|
|
||||||
logger.info(`[AnalyticsWorker] Successfully generated report for ${reportDate}.`);
|
|
||||||
} catch (error: unknown) {
|
|
||||||
const wrappedError = normalizeError(error);
|
|
||||||
logger.error({ err: wrappedError, jobData: job.data },
|
|
||||||
`[AnalyticsWorker] Job ${job.id} failed. Attempt ${job.attemptsMade}/${job.opts.attempts}.`,
|
|
||||||
);
|
|
||||||
throw wrappedError;
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
connection,
|
connection,
|
||||||
concurrency: parseInt(process.env.ANALYTICS_WORKER_CONCURRENCY || '1', 10),
|
concurrency: parseInt(process.env.ANALYTICS_WORKER_CONCURRENCY || '1', 10),
|
||||||
@@ -144,51 +116,7 @@ export const analyticsWorker = new Worker<AnalyticsJobData>(
|
|||||||
|
|
||||||
export const cleanupWorker = new Worker<CleanupJobData>(
|
export const cleanupWorker = new Worker<CleanupJobData>(
|
||||||
'file-cleanup',
|
'file-cleanup',
|
||||||
async (job: Job<CleanupJobData>) => {
|
createWorkerProcessor((job) => flyerProcessingService.processCleanupJob(job)),
|
||||||
const { flyerId, paths } = job.data;
|
|
||||||
logger.info(
|
|
||||||
{ paths },
|
|
||||||
`[CleanupWorker] Starting file cleanup for job ${job.id} (Flyer ID: ${flyerId})`,
|
|
||||||
);
|
|
||||||
|
|
||||||
try {
|
|
||||||
if (!paths || paths.length === 0) {
|
|
||||||
logger.warn(
|
|
||||||
`[CleanupWorker] Job ${job.id} for flyer ${flyerId} received no paths to clean. Skipping.`,
|
|
||||||
);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
for (const filePath of paths) {
|
|
||||||
try {
|
|
||||||
await fsAdapter.unlink(filePath);
|
|
||||||
logger.info(`[CleanupWorker] Deleted temporary file: ${filePath}`);
|
|
||||||
} catch (unlinkError: unknown) {
|
|
||||||
if (
|
|
||||||
unlinkError instanceof Error &&
|
|
||||||
'code' in unlinkError &&
|
|
||||||
(unlinkError as any).code === 'ENOENT'
|
|
||||||
) {
|
|
||||||
logger.warn(
|
|
||||||
`[CleanupWorker] File not found during cleanup (already deleted?): ${filePath}`,
|
|
||||||
);
|
|
||||||
} else {
|
|
||||||
throw unlinkError;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
logger.info(
|
|
||||||
`[CleanupWorker] Successfully cleaned up ${paths.length} file(s) for flyer ${flyerId}.`,
|
|
||||||
);
|
|
||||||
} catch (error: unknown) {
|
|
||||||
const wrappedError = normalizeError(error);
|
|
||||||
logger.error(
|
|
||||||
{ err: wrappedError },
|
|
||||||
`[CleanupWorker] Job ${job.id} for flyer ${flyerId} failed. Attempt ${job.attemptsMade}/${job.opts.attempts}.`,
|
|
||||||
);
|
|
||||||
throw wrappedError;
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
connection,
|
connection,
|
||||||
concurrency: parseInt(process.env.CLEANUP_WORKER_CONCURRENCY || '10', 10),
|
concurrency: parseInt(process.env.CLEANUP_WORKER_CONCURRENCY || '10', 10),
|
||||||
@@ -197,26 +125,7 @@ export const cleanupWorker = new Worker<CleanupJobData>(
|
|||||||
|
|
||||||
export const weeklyAnalyticsWorker = new Worker<WeeklyAnalyticsJobData>(
|
export const weeklyAnalyticsWorker = new Worker<WeeklyAnalyticsJobData>(
|
||||||
'weekly-analytics-reporting',
|
'weekly-analytics-reporting',
|
||||||
async (job: Job<WeeklyAnalyticsJobData>) => {
|
createWorkerProcessor((job) => analyticsService.processWeeklyReportJob(job)),
|
||||||
const { reportYear, reportWeek } = job.data;
|
|
||||||
logger.info(
|
|
||||||
{ reportYear, reportWeek },
|
|
||||||
`[WeeklyAnalyticsWorker] Starting weekly report generation for job ${job.id}`,
|
|
||||||
);
|
|
||||||
try {
|
|
||||||
await new Promise((resolve) => setTimeout(resolve, 30000));
|
|
||||||
logger.info(
|
|
||||||
`[WeeklyAnalyticsWorker] Successfully generated weekly report for week ${reportWeek}, ${reportYear}.`,
|
|
||||||
);
|
|
||||||
} catch (error: unknown) {
|
|
||||||
const wrappedError = normalizeError(error);
|
|
||||||
logger.error(
|
|
||||||
{ err: wrappedError, jobData: job.data },
|
|
||||||
`[WeeklyAnalyticsWorker] Job ${job.id} failed. Attempt ${job.attemptsMade}/${job.opts.attempts}.`,
|
|
||||||
);
|
|
||||||
throw wrappedError;
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
connection,
|
connection,
|
||||||
concurrency: parseInt(process.env.WEEKLY_ANALYTICS_WORKER_CONCURRENCY || '1', 10),
|
concurrency: parseInt(process.env.WEEKLY_ANALYTICS_WORKER_CONCURRENCY || '1', 10),
|
||||||
@@ -225,19 +134,7 @@ export const weeklyAnalyticsWorker = new Worker<WeeklyAnalyticsJobData>(
|
|||||||
|
|
||||||
export const tokenCleanupWorker = new Worker<TokenCleanupJobData>(
|
export const tokenCleanupWorker = new Worker<TokenCleanupJobData>(
|
||||||
'token-cleanup',
|
'token-cleanup',
|
||||||
async (job: Job<TokenCleanupJobData>) => {
|
createWorkerProcessor((job) => userService.processTokenCleanupJob(job)),
|
||||||
const jobLogger = logger.child({ jobId: job.id, jobName: job.name });
|
|
||||||
jobLogger.info('[TokenCleanupWorker] Starting cleanup of expired password reset tokens.');
|
|
||||||
try {
|
|
||||||
const deletedCount = await db.userRepo.deleteExpiredResetTokens(jobLogger);
|
|
||||||
jobLogger.info(`[TokenCleanupWorker] Successfully deleted ${deletedCount} expired tokens.`);
|
|
||||||
return { deletedCount };
|
|
||||||
} catch (error: unknown) {
|
|
||||||
const wrappedError = normalizeError(error);
|
|
||||||
jobLogger.error({ err: wrappedError }, `[TokenCleanupWorker] Job ${job.id} failed.`);
|
|
||||||
throw wrappedError;
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
connection,
|
connection,
|
||||||
concurrency: 1,
|
concurrency: 1,
|
||||||
|
|||||||
@@ -39,6 +39,7 @@ import {
|
|||||||
ShoppingTripItem,
|
ShoppingTripItem,
|
||||||
Receipt,
|
Receipt,
|
||||||
ReceiptItem,
|
ReceiptItem,
|
||||||
|
SearchQuery,
|
||||||
ProcessingStage,
|
ProcessingStage,
|
||||||
UserAlert,
|
UserAlert,
|
||||||
UserSubmittedPrice,
|
UserSubmittedPrice,
|
||||||
@@ -1451,3 +1452,66 @@ export const createMockAppliance = (overrides: Partial<Appliance> = {}): Applian
|
|||||||
...overrides,
|
...overrides,
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// src/tests/utils/mockFactories.ts
|
||||||
|
|
||||||
|
// ... existing factories
|
||||||
|
|
||||||
|
export const createMockShoppingListItemPayload = (overrides: Partial<{ masterItemId: number; customItemName: string }> = {}): { masterItemId?: number; customItemName?: string } => ({
|
||||||
|
customItemName: 'Mock Item',
|
||||||
|
...overrides,
|
||||||
|
});
|
||||||
|
|
||||||
|
export const createMockRecipeCommentPayload = (overrides: Partial<{ content: string; parentCommentId: number }> = {}): { content: string; parentCommentId?: number } => ({
|
||||||
|
content: 'This is a mock comment.',
|
||||||
|
...overrides,
|
||||||
|
});
|
||||||
|
|
||||||
|
export const createMockProfileUpdatePayload = (overrides: Partial<Profile> = {}): Partial<Profile> => ({
|
||||||
|
full_name: 'Mock User',
|
||||||
|
...overrides,
|
||||||
|
});
|
||||||
|
|
||||||
|
export const createMockAddressPayload = (overrides: Partial<Address> = {}): Partial<Address> => ({
|
||||||
|
address_line_1: '123 Mock St',
|
||||||
|
city: 'Mockville',
|
||||||
|
province_state: 'MS',
|
||||||
|
postal_code: '12345',
|
||||||
|
country: 'Mockland',
|
||||||
|
...overrides,
|
||||||
|
});
|
||||||
|
|
||||||
|
export const createMockSearchQueryPayload = (overrides: Partial<Omit<SearchQuery, 'search_query_id' | 'id' | 'created_at' | 'user_id'>> = {}): Omit<SearchQuery, 'search_query_id' | 'id' | 'created_at' | 'user_id'> => ({
|
||||||
|
query_text: 'mock search',
|
||||||
|
result_count: 5,
|
||||||
|
was_successful: true,
|
||||||
|
...overrides,
|
||||||
|
});
|
||||||
|
|
||||||
|
export const createMockWatchedItemPayload = (overrides: Partial<{ itemName: string; category: string }> = {}): { itemName: string; category: string } => ({
|
||||||
|
itemName: 'Mock Watched Item',
|
||||||
|
category: 'Pantry',
|
||||||
|
...overrides,
|
||||||
|
});
|
||||||
|
|
||||||
|
export const createMockRegisterUserPayload = (
|
||||||
|
overrides: Partial<{
|
||||||
|
email: string;
|
||||||
|
password: string;
|
||||||
|
full_name: string;
|
||||||
|
avatar_url: string | undefined;
|
||||||
|
}> = {},
|
||||||
|
) => ({
|
||||||
|
email: 'mock@example.com',
|
||||||
|
password: 'password123',
|
||||||
|
full_name: 'Mock User',
|
||||||
|
avatar_url: undefined,
|
||||||
|
...overrides,
|
||||||
|
});
|
||||||
|
|
||||||
|
export const createMockLoginPayload = (overrides: Partial<{ email: string; password: string; rememberMe: boolean }> = {}) => ({
|
||||||
|
email: 'mock@example.com',
|
||||||
|
password: 'password123',
|
||||||
|
rememberMe: false,
|
||||||
|
...overrides,
|
||||||
|
});
|
||||||
|
|||||||
Reference in New Issue
Block a user