Compare commits
27 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
18c1161587 | ||
| 0010396780 | |||
|
|
d4557e13fb | ||
| 3e41130c69 | |||
|
|
d9034563d6 | ||
| 5836a75157 | |||
|
|
790008ae0d | ||
|
|
b5b91eb968 | ||
| 38eb810e7a | |||
|
|
458588a6e7 | ||
| 0b4113417f | |||
|
|
b59d2a9533 | ||
| 6740b35f8a | |||
|
|
92ad82a012 | ||
| 672e4ca597 | |||
|
|
e4d70a9b37 | ||
| c30f1c4162 | |||
|
|
44062a9f5b | ||
| 17fac8cf86 | |||
|
|
9fa8553486 | ||
|
|
f5b0b3b543 | ||
| e3ed5c7e63 | |||
|
|
ae0040e092 | ||
| 1f3f99d430 | |||
|
|
7be72f1758 | ||
| 0967c7a33d | |||
| 1f1c0fa6f3 |
25
package-lock.json
generated
25
package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "flyer-crawler",
|
||||
"version": "0.5.2",
|
||||
"version": "0.7.3",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "flyer-crawler",
|
||||
"version": "0.5.2",
|
||||
"version": "0.7.3",
|
||||
"dependencies": {
|
||||
"@bull-board/api": "^6.14.2",
|
||||
"@bull-board/express": "^6.14.2",
|
||||
@@ -18,6 +18,7 @@
|
||||
"connect-timeout": "^1.9.1",
|
||||
"cookie-parser": "^1.4.7",
|
||||
"date-fns": "^4.1.0",
|
||||
"exif-parser": "^0.1.12",
|
||||
"express": "^5.1.0",
|
||||
"express-list-endpoints": "^7.1.1",
|
||||
"express-rate-limit": "^8.2.1",
|
||||
@@ -35,6 +36,7 @@
|
||||
"passport-local": "^1.0.0",
|
||||
"pdfjs-dist": "^5.4.394",
|
||||
"pg": "^8.16.3",
|
||||
"piexifjs": "^1.0.6",
|
||||
"pino": "^10.1.0",
|
||||
"react": "^19.2.0",
|
||||
"react-dom": "^19.2.0",
|
||||
@@ -66,6 +68,7 @@
|
||||
"@types/passport-jwt": "^4.0.1",
|
||||
"@types/passport-local": "^1.0.38",
|
||||
"@types/pg": "^8.15.6",
|
||||
"@types/piexifjs": "^1.0.0",
|
||||
"@types/pino": "^7.0.4",
|
||||
"@types/react": "^19.2.7",
|
||||
"@types/react-dom": "^19.2.3",
|
||||
@@ -5435,6 +5438,13 @@
|
||||
"pg-types": "^2.2.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/piexifjs": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/@types/piexifjs/-/piexifjs-1.0.0.tgz",
|
||||
"integrity": "sha512-PPiGeCkmkZQgYjvqtjD3kp4OkbCox2vEFVuK4DaLVOIazJLAXk+/ujbizkIPH5CN4AnN9Clo5ckzUlaj3+SzCA==",
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@types/pino": {
|
||||
"version": "7.0.4",
|
||||
"resolved": "https://registry.npmjs.org/@types/pino/-/pino-7.0.4.tgz",
|
||||
@@ -8965,6 +8975,11 @@
|
||||
"bare-events": "^2.7.0"
|
||||
}
|
||||
},
|
||||
"node_modules/exif-parser": {
|
||||
"version": "0.1.12",
|
||||
"resolved": "https://registry.npmjs.org/exif-parser/-/exif-parser-0.1.12.tgz",
|
||||
"integrity": "sha512-c2bQfLNbMzLPmzQuOr8fy0csy84WmwnER81W88DzTp9CYNPJ6yzOj2EZAh9pywYpqHnshVLHQJ8WzldAyfY+Iw=="
|
||||
},
|
||||
"node_modules/expect-type": {
|
||||
"version": "1.3.0",
|
||||
"resolved": "https://registry.npmjs.org/expect-type/-/expect-type-1.3.0.tgz",
|
||||
@@ -13363,6 +13378,12 @@
|
||||
"url": "https://github.com/sponsors/jonschlinkert"
|
||||
}
|
||||
},
|
||||
"node_modules/piexifjs": {
|
||||
"version": "1.0.6",
|
||||
"resolved": "https://registry.npmjs.org/piexifjs/-/piexifjs-1.0.6.tgz",
|
||||
"integrity": "sha512-0wVyH0cKohzBQ5Gi2V1BuxYpxWfxF3cSqfFXfPIpl5tl9XLS5z4ogqhUCD20AbHi0h9aJkqXNJnkVev6gwh2ag==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/pino": {
|
||||
"version": "10.1.0",
|
||||
"resolved": "https://registry.npmjs.org/pino/-/pino-10.1.0.tgz",
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "flyer-crawler",
|
||||
"private": true,
|
||||
"version": "0.5.2",
|
||||
"version": "0.7.3",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "concurrently \"npm:start:dev\" \"vite\"",
|
||||
@@ -37,6 +37,7 @@
|
||||
"connect-timeout": "^1.9.1",
|
||||
"cookie-parser": "^1.4.7",
|
||||
"date-fns": "^4.1.0",
|
||||
"exif-parser": "^0.1.12",
|
||||
"express": "^5.1.0",
|
||||
"express-list-endpoints": "^7.1.1",
|
||||
"express-rate-limit": "^8.2.1",
|
||||
@@ -54,6 +55,7 @@
|
||||
"passport-local": "^1.0.0",
|
||||
"pdfjs-dist": "^5.4.394",
|
||||
"pg": "^8.16.3",
|
||||
"piexifjs": "^1.0.6",
|
||||
"pino": "^10.1.0",
|
||||
"react": "^19.2.0",
|
||||
"react-dom": "^19.2.0",
|
||||
@@ -85,6 +87,7 @@
|
||||
"@types/passport-jwt": "^4.0.1",
|
||||
"@types/passport-local": "^1.0.38",
|
||||
"@types/pg": "^8.15.6",
|
||||
"@types/piexifjs": "^1.0.0",
|
||||
"@types/pino": "^7.0.4",
|
||||
"@types/react": "^19.2.7",
|
||||
"@types/react-dom": "^19.2.3",
|
||||
|
||||
@@ -263,14 +263,16 @@ describe('FlyerUploader', () => {
|
||||
});
|
||||
|
||||
it('should clear the polling timeout when a job fails', async () => {
|
||||
const clearTimeoutSpy = vi.spyOn(global, 'clearTimeout');
|
||||
console.log('--- [TEST LOG] ---: 1. Setting up mocks for failed job timeout clearance.');
|
||||
mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue({ jobId: 'job-fail-timeout' });
|
||||
|
||||
// We need at least one 'active' response to establish a timeout loop so we have something to clear
|
||||
// The second call should be a rejection, as this is how getJobStatus signals a failure.
|
||||
mockedAiApiClient.getJobStatus
|
||||
.mockResolvedValueOnce({ state: 'active', progress: { message: 'Working...' } })
|
||||
.mockResolvedValueOnce({
|
||||
state: 'active',
|
||||
progress: { message: 'Working...' },
|
||||
} as aiApiClientModule.JobStatus)
|
||||
.mockRejectedValueOnce(new aiApiClientModule.JobFailedError('Fatal Error', 'UNKNOWN_ERROR'));
|
||||
|
||||
renderComponent();
|
||||
@@ -284,23 +286,12 @@ describe('FlyerUploader', () => {
|
||||
|
||||
// Wait for the failure UI
|
||||
await waitFor(() => expect(screen.getByText(/Polling failed: Fatal Error/i)).toBeInTheDocument(), { timeout: 4000 });
|
||||
|
||||
// Verify clearTimeout was called
|
||||
expect(clearTimeoutSpy).toHaveBeenCalled();
|
||||
|
||||
// Verify no further polling occurs
|
||||
const callsBefore = mockedAiApiClient.getJobStatus.mock.calls.length;
|
||||
// Wait for a duration longer than the polling interval
|
||||
await act(() => new Promise((r) => setTimeout(r, 4000)));
|
||||
expect(mockedAiApiClient.getJobStatus).toHaveBeenCalledTimes(callsBefore);
|
||||
|
||||
clearTimeoutSpy.mockRestore();
|
||||
});
|
||||
|
||||
it('should clear the polling timeout when the component unmounts', async () => {
|
||||
const clearTimeoutSpy = vi.spyOn(global, 'clearTimeout');
|
||||
console.log('--- [TEST LOG] ---: 1. Setting up mocks for unmount timeout clearance.');
|
||||
it('should stop polling for job status when the component unmounts', async () => {
|
||||
console.log('--- [TEST LOG] ---: 1. Setting up mocks for unmount polling stop.');
|
||||
mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue({ jobId: 'job-unmount' });
|
||||
// Mock getJobStatus to always return 'active' to keep polling
|
||||
mockedAiApiClient.getJobStatus.mockResolvedValue({
|
||||
state: 'active',
|
||||
progress: { message: 'Polling...' },
|
||||
@@ -312,26 +303,38 @@ describe('FlyerUploader', () => {
|
||||
|
||||
fireEvent.change(input, { target: { files: [file] } });
|
||||
|
||||
// Wait for the first poll to complete and the UI to show the polling state
|
||||
// Wait for the first poll to complete and UI to update
|
||||
await screen.findByText('Polling...');
|
||||
|
||||
// Now that we are in a polling state (and a timeout is set), unmount the component
|
||||
console.log('--- [TEST LOG] ---: 2. Unmounting component to trigger cleanup effect.');
|
||||
// Wait for exactly one call to be sure polling has started.
|
||||
await waitFor(() => {
|
||||
expect(mockedAiApiClient.getJobStatus).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
console.log('--- [TEST LOG] ---: 2. First poll confirmed.');
|
||||
|
||||
// Record the number of calls before unmounting.
|
||||
const callsBeforeUnmount = mockedAiApiClient.getJobStatus.mock.calls.length;
|
||||
|
||||
// Now unmount the component, which should stop the polling.
|
||||
console.log('--- [TEST LOG] ---: 3. Unmounting component.');
|
||||
unmount();
|
||||
|
||||
// Verify that the cleanup function in the useEffect hook was called
|
||||
expect(clearTimeoutSpy).toHaveBeenCalled();
|
||||
console.log('--- [TEST LOG] ---: 3. clearTimeout confirmed.');
|
||||
// Wait for a duration longer than the polling interval (3s) to see if more calls are made.
|
||||
console.log('--- [TEST LOG] ---: 4. Waiting for 4 seconds to check for further polling.');
|
||||
await act(() => new Promise((resolve) => setTimeout(resolve, 4000)));
|
||||
|
||||
clearTimeoutSpy.mockRestore();
|
||||
// Verify that getJobStatus was not called again after unmounting.
|
||||
console.log('--- [TEST LOG] ---: 5. Asserting no new polls occurred.');
|
||||
expect(mockedAiApiClient.getJobStatus).toHaveBeenCalledTimes(callsBeforeUnmount);
|
||||
});
|
||||
|
||||
it('should handle a duplicate flyer error (409)', async () => {
|
||||
console.log('--- [TEST LOG] ---: 1. Setting up mock for 409 duplicate error.');
|
||||
// The API client now throws a structured error for non-2xx responses.
|
||||
// The API client throws a structured error, which useFlyerUploader now parses
|
||||
// to set both the errorMessage and the duplicateFlyerId.
|
||||
mockedAiApiClient.uploadAndProcessFlyer.mockRejectedValue({
|
||||
status: 409,
|
||||
body: { flyerId: 99, message: 'Duplicate' },
|
||||
body: { flyerId: 99, message: 'This flyer has already been processed.' },
|
||||
});
|
||||
|
||||
console.log('--- [TEST LOG] ---: 2. Rendering and uploading.');
|
||||
@@ -345,9 +348,10 @@ describe('FlyerUploader', () => {
|
||||
|
||||
try {
|
||||
console.log('--- [TEST LOG] ---: 4. AWAITING duplicate flyer message...');
|
||||
expect(
|
||||
await screen.findByText(/This flyer has already been processed/i),
|
||||
).toBeInTheDocument();
|
||||
// With the fix, the duplicate error message and the link are combined into a single paragraph.
|
||||
// We now look for this combined message.
|
||||
const errorMessage = await screen.findByText(/This flyer has already been processed. You can view it here:/i);
|
||||
expect(errorMessage).toBeInTheDocument();
|
||||
console.log('--- [TEST LOG] ---: 5. SUCCESS: Duplicate message found.');
|
||||
} catch (error) {
|
||||
console.error('--- [TEST LOG] ---: 5. ERROR: findByText for duplicate message timed out.');
|
||||
|
||||
@@ -30,6 +30,12 @@ export const FlyerUploader: React.FC<FlyerUploaderProps> = ({ onProcessingComple
|
||||
if (statusMessage) logger.info(`FlyerUploader Status: ${statusMessage}`);
|
||||
}, [statusMessage]);
|
||||
|
||||
useEffect(() => {
|
||||
if (errorMessage) {
|
||||
logger.error(`[FlyerUploader] Error encountered: ${errorMessage}`, { duplicateFlyerId });
|
||||
}
|
||||
}, [errorMessage, duplicateFlyerId]);
|
||||
|
||||
// Handle completion and navigation
|
||||
useEffect(() => {
|
||||
if (processingState === 'completed' && flyerId) {
|
||||
@@ -94,14 +100,15 @@ export const FlyerUploader: React.FC<FlyerUploaderProps> = ({ onProcessingComple
|
||||
|
||||
{errorMessage && (
|
||||
<div className="text-red-600 dark:text-red-400 font-semibold p-4 bg-red-100 dark:bg-red-900/30 rounded-md">
|
||||
<p>{errorMessage}</p>
|
||||
{duplicateFlyerId && (
|
||||
{duplicateFlyerId ? (
|
||||
<p>
|
||||
This flyer has already been processed. You can view it here:{' '}
|
||||
{errorMessage} You can view it here:{' '}
|
||||
<Link to={`/flyers/${duplicateFlyerId}`} className="text-blue-500 underline" data-discover="true">
|
||||
Flyer #{duplicateFlyerId}
|
||||
</Link>
|
||||
</p>
|
||||
) : (
|
||||
<p>{errorMessage}</p>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
|
||||
@@ -3,6 +3,7 @@ import { useState, useCallback, useRef, useEffect } from 'react';
|
||||
import { logger } from '../services/logger.client';
|
||||
import { notifyError } from '../services/notificationService';
|
||||
|
||||
|
||||
/**
|
||||
* A custom React hook to simplify API calls, including loading and error states.
|
||||
* It is designed to work with apiClient functions that return a `Promise<Response>`.
|
||||
@@ -29,6 +30,14 @@ export function useApi<T, TArgs extends unknown[]>(
|
||||
const lastErrorMessageRef = useRef<string | null>(null);
|
||||
const abortControllerRef = useRef<AbortController>(new AbortController());
|
||||
|
||||
// Use a ref to track the latest apiFunction. This allows us to keep `execute` stable
|
||||
// even if `apiFunction` is recreated on every render (common with inline arrow functions).
|
||||
const apiFunctionRef = useRef(apiFunction);
|
||||
|
||||
useEffect(() => {
|
||||
apiFunctionRef.current = apiFunction;
|
||||
}, [apiFunction]);
|
||||
|
||||
// This effect ensures that when the component using the hook unmounts,
|
||||
// any in-flight request is cancelled.
|
||||
useEffect(() => {
|
||||
@@ -59,7 +68,7 @@ export function useApi<T, TArgs extends unknown[]>(
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await apiFunction(...args, abortControllerRef.current.signal);
|
||||
const response = await apiFunctionRef.current(...args, abortControllerRef.current.signal);
|
||||
|
||||
if (!response.ok) {
|
||||
// Attempt to parse a JSON error response. This is aligned with ADR-003,
|
||||
@@ -98,7 +107,17 @@ export function useApi<T, TArgs extends unknown[]>(
|
||||
}
|
||||
return result;
|
||||
} catch (e) {
|
||||
const err = e instanceof Error ? e : new Error('An unknown error occurred.');
|
||||
let err: Error;
|
||||
if (e instanceof Error) {
|
||||
err = e;
|
||||
} else if (typeof e === 'object' && e !== null && 'status' in e) {
|
||||
// Handle structured errors (e.g. { status: 409, body: { ... } })
|
||||
const structuredError = e as { status: number; body?: { message?: string } };
|
||||
const message = structuredError.body?.message || `Request failed with status ${structuredError.status}`;
|
||||
err = new Error(message);
|
||||
} else {
|
||||
err = new Error('An unknown error occurred.');
|
||||
}
|
||||
// If the error is an AbortError, it's an intentional cancellation, so we don't set an error state.
|
||||
if (err.name === 'AbortError') {
|
||||
logger.info('API request was cancelled.', { functionName: apiFunction.name });
|
||||
@@ -122,7 +141,7 @@ export function useApi<T, TArgs extends unknown[]>(
|
||||
setIsRefetching(false);
|
||||
}
|
||||
},
|
||||
[apiFunction],
|
||||
[], // execute is now stable because it uses apiFunctionRef
|
||||
); // abortControllerRef is stable
|
||||
|
||||
return { execute, loading, isRefetching, error, data, reset };
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
// src/hooks/useFlyerUploader.ts
|
||||
// src/hooks/useFlyerUploader.ts
|
||||
import { useState, useCallback } from 'react';
|
||||
import { useState, useCallback, useMemo } from 'react';
|
||||
import { useQuery, useMutation, useQueryClient } from '@tanstack/react-query';
|
||||
import {
|
||||
uploadAndProcessFlyer,
|
||||
@@ -14,6 +14,28 @@ import type { ProcessingStage } from '../types';
|
||||
|
||||
export type ProcessingState = 'idle' | 'uploading' | 'polling' | 'completed' | 'error';
|
||||
|
||||
// Define a type for the structured error thrown by the API client
|
||||
interface ApiError {
|
||||
status: number;
|
||||
body: {
|
||||
message: string;
|
||||
flyerId?: number;
|
||||
};
|
||||
}
|
||||
|
||||
// Type guard to check if an error is a structured API error
|
||||
function isApiError(error: unknown): error is ApiError {
|
||||
return (
|
||||
typeof error === 'object' &&
|
||||
error !== null &&
|
||||
'status' in error &&
|
||||
typeof (error as { status: unknown }).status === 'number' &&
|
||||
'body' in error &&
|
||||
typeof (error as { body: unknown }).body === 'object' &&
|
||||
(error as { body: unknown }).body !== null &&
|
||||
'message' in ((error as { body: unknown }).body as object)
|
||||
);
|
||||
}
|
||||
export const useFlyerUploader = () => {
|
||||
const queryClient = useQueryClient();
|
||||
const [jobId, setJobId] = useState<string | null>(null);
|
||||
@@ -81,40 +103,57 @@ export const useFlyerUploader = () => {
|
||||
queryClient.removeQueries({ queryKey: ['jobStatus'] });
|
||||
}, [uploadMutation, queryClient]);
|
||||
|
||||
// Consolidate state for the UI from the react-query hooks
|
||||
const processingState = ((): ProcessingState => {
|
||||
if (uploadMutation.isPending) return 'uploading';
|
||||
if (jobStatus && (jobStatus.state === 'active' || jobStatus.state === 'waiting'))
|
||||
return 'polling';
|
||||
if (jobStatus?.state === 'completed') {
|
||||
// If the job is complete but didn't return a flyerId, it's an error state.
|
||||
if (!jobStatus.returnValue?.flyerId) {
|
||||
return 'error';
|
||||
// Consolidate state derivation for the UI from the react-query hooks using useMemo.
|
||||
// This improves performance by memoizing the derived state and makes the logic easier to follow.
|
||||
const { processingState, errorMessage, duplicateFlyerId, flyerId, statusMessage } = useMemo(() => {
|
||||
// The order of these checks is critical. Errors must be checked first to override
|
||||
// any stale `jobStatus` from a previous successful poll.
|
||||
const state: ProcessingState = (() => {
|
||||
if (uploadMutation.isError || pollError) return 'error';
|
||||
if (uploadMutation.isPending) return 'uploading';
|
||||
if (jobStatus && (jobStatus.state === 'active' || jobStatus.state === 'waiting'))
|
||||
return 'polling';
|
||||
if (jobStatus?.state === 'completed') {
|
||||
if (!jobStatus.returnValue?.flyerId) return 'error';
|
||||
return 'completed';
|
||||
}
|
||||
return 'completed';
|
||||
}
|
||||
if (uploadMutation.isError || jobStatus?.state === 'failed' || pollError) return 'error';
|
||||
return 'idle';
|
||||
})();
|
||||
return 'idle';
|
||||
})();
|
||||
|
||||
const getErrorMessage = () => {
|
||||
const uploadError = uploadMutation.error as any;
|
||||
if (uploadMutation.isError) {
|
||||
return uploadError?.body?.message || uploadError?.message || 'Upload failed.';
|
||||
}
|
||||
if (pollError) return `Polling failed: ${pollError.message}`;
|
||||
if (jobStatus?.state === 'failed') {
|
||||
return `Processing failed: ${jobStatus.progress?.message || jobStatus.failedReason}`;
|
||||
}
|
||||
if (jobStatus?.state === 'completed' && !jobStatus.returnValue?.flyerId) {
|
||||
return 'Job completed but did not return a flyer ID.';
|
||||
}
|
||||
return null;
|
||||
};
|
||||
let msg: string | null = null;
|
||||
let dupId: number | null = null;
|
||||
|
||||
const errorMessage = getErrorMessage();
|
||||
const duplicateFlyerId = (uploadMutation.error as any)?.body?.flyerId ?? null;
|
||||
const flyerId = jobStatus?.state === 'completed' ? jobStatus.returnValue?.flyerId : null;
|
||||
if (state === 'error') {
|
||||
if (uploadMutation.isError) {
|
||||
const uploadError = uploadMutation.error;
|
||||
if (isApiError(uploadError)) {
|
||||
msg = uploadError.body.message;
|
||||
// Specifically handle 409 Conflict for duplicate flyers
|
||||
if (uploadError.status === 409) {
|
||||
dupId = uploadError.body.flyerId ?? null;
|
||||
}
|
||||
} else if (uploadError instanceof Error) {
|
||||
msg = uploadError.message;
|
||||
} else {
|
||||
msg = 'An unknown upload error occurred.';
|
||||
}
|
||||
} else if (pollError) {
|
||||
msg = `Polling failed: ${pollError.message}`;
|
||||
} else if (jobStatus?.state === 'failed') {
|
||||
msg = `Processing failed: ${jobStatus.progress?.message || jobStatus.failedReason || 'Unknown reason'}`;
|
||||
} else if (jobStatus?.state === 'completed' && !jobStatus.returnValue?.flyerId) {
|
||||
msg = 'Job completed but did not return a flyer ID.';
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
processingState: state,
|
||||
errorMessage: msg,
|
||||
duplicateFlyerId: dupId,
|
||||
flyerId: jobStatus?.state === 'completed' ? jobStatus.returnValue?.flyerId ?? null : null,
|
||||
statusMessage: uploadMutation.isPending ? 'Uploading file...' : jobStatus?.progress?.message,
|
||||
};
|
||||
}, [uploadMutation, jobStatus, pollError]);
|
||||
|
||||
return {
|
||||
processingState,
|
||||
|
||||
@@ -15,7 +15,7 @@ export const AuthProvider: React.FC<{ children: ReactNode }> = ({ children }) =>
|
||||
// FIX: Stabilize the apiFunction passed to useApi.
|
||||
// By wrapping this in useCallback, we ensure the same function instance is passed to
|
||||
// useApi on every render. This prevents the `execute` function returned by `useApi`
|
||||
// from being recreated, which in turn breaks the infinite re-render loop in the useEffect below.
|
||||
// from being recreated, which in turn breaks the infinite re-render loop in the useEffect.
|
||||
const getProfileCallback = useCallback(() => apiClient.getAuthenticatedUserProfile(), []);
|
||||
|
||||
const { execute: checkTokenApi } = useApi<UserProfile, []>(getProfileCallback);
|
||||
|
||||
@@ -4,17 +4,21 @@ import { FlyersContext, FlyersContextType } from '../contexts/FlyersContext';
|
||||
import type { Flyer } from '../types';
|
||||
import * as apiClient from '../services/apiClient';
|
||||
import { useInfiniteQuery } from '../hooks/useInfiniteQuery';
|
||||
import { useCallback } from 'react';
|
||||
|
||||
export const FlyersProvider: React.FC<{ children: ReactNode }> = ({ children }) => {
|
||||
// Memoize the fetch function to ensure stability for the useInfiniteQuery hook.
|
||||
const fetchFlyersFn = useCallback(apiClient.fetchFlyers, []);
|
||||
|
||||
const {
|
||||
data: flyers,
|
||||
isLoading: isLoadingFlyers,
|
||||
isLoading: isLoadingFlyers,
|
||||
error: flyersError,
|
||||
fetchNextPage: fetchNextFlyersPage,
|
||||
hasNextPage: hasNextFlyersPage,
|
||||
refetch: refetchFlyers,
|
||||
isRefetching: isRefetchingFlyers,
|
||||
} = useInfiniteQuery<Flyer>(apiClient.fetchFlyers);
|
||||
} = useInfiniteQuery<Flyer>(fetchFlyersFn);
|
||||
|
||||
const value: FlyersContextType = {
|
||||
flyers: flyers || [],
|
||||
@@ -26,5 +30,5 @@ export const FlyersProvider: React.FC<{ children: ReactNode }> = ({ children })
|
||||
refetchFlyers,
|
||||
};
|
||||
|
||||
return <FlyersContext.Provider value={value}>{children}</FlyersContext.Provider>;
|
||||
return <FlyersContext.Provider value={value}>{children}</FlyersContext.Provider>;
|
||||
};
|
||||
|
||||
@@ -1,14 +1,22 @@
|
||||
// src/providers/MasterItemsProvider.tsx
|
||||
import React, { ReactNode, useMemo } from 'react';
|
||||
import React, { ReactNode, useMemo, useEffect, useCallback } from 'react';
|
||||
import { MasterItemsContext } from '../contexts/MasterItemsContext';
|
||||
import type { MasterGroceryItem } from '../types';
|
||||
import * as apiClient from '../services/apiClient';
|
||||
import { useApiOnMount } from '../hooks/useApiOnMount';
|
||||
import { logger } from '../services/logger.client';
|
||||
|
||||
export const MasterItemsProvider: React.FC<{ children: ReactNode }> = ({ children }) => {
|
||||
const { data, loading, error } = useApiOnMount<MasterGroceryItem[], []>(() =>
|
||||
apiClient.fetchMasterItems(),
|
||||
);
|
||||
// LOGGING: Check if the provider is unmounting/remounting repeatedly
|
||||
useEffect(() => {
|
||||
logger.debug('MasterItemsProvider: MOUNTED');
|
||||
return () => logger.debug('MasterItemsProvider: UNMOUNTED');
|
||||
}, []);
|
||||
|
||||
// Memoize the fetch function to ensure stability for the useApiOnMount hook.
|
||||
const fetchFn = useCallback(() => apiClient.fetchMasterItems(), []);
|
||||
|
||||
const { data, loading, error } = useApiOnMount<MasterGroceryItem[], []>(fetchFn);
|
||||
|
||||
const value = useMemo(
|
||||
() => ({
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
// src/providers/UserDataProvider.tsx
|
||||
import React, { useState, useEffect, useMemo, ReactNode } from 'react';
|
||||
import { logger } from '../services/logger.client';
|
||||
import React, { useState, useEffect, useMemo, ReactNode, useCallback } from 'react';
|
||||
import { UserDataContext } from '../contexts/UserDataContext';
|
||||
import type { MasterGroceryItem, ShoppingList } from '../types';
|
||||
import * as apiClient from '../services/apiClient';
|
||||
@@ -9,18 +10,25 @@ import { useAuth } from '../hooks/useAuth';
|
||||
export const UserDataProvider: React.FC<{ children: ReactNode }> = ({ children }) => {
|
||||
const { userProfile } = useAuth();
|
||||
|
||||
// Wrap the API calls in useCallback to prevent unnecessary re-renders.
|
||||
const fetchWatchedItemsFn = useCallback(
|
||||
() => apiClient.fetchWatchedItems(),
|
||||
[],
|
||||
);
|
||||
const fetchShoppingListsFn = useCallback(() => apiClient.fetchShoppingLists(), []);
|
||||
|
||||
const {
|
||||
data: watchedItemsData,
|
||||
loading: isLoadingWatched,
|
||||
error: watchedItemsError,
|
||||
} = useApiOnMount<MasterGroceryItem[], []>(() => apiClient.fetchWatchedItems(), [userProfile], {
|
||||
} = useApiOnMount<MasterGroceryItem[], []>(fetchWatchedItemsFn, [userProfile], {
|
||||
enabled: !!userProfile,
|
||||
});
|
||||
const {
|
||||
data: shoppingListsData,
|
||||
loading: isLoadingShoppingLists,
|
||||
loading: isLoadingShoppingLists,
|
||||
error: shoppingListsError,
|
||||
} = useApiOnMount<ShoppingList[], []>(() => apiClient.fetchShoppingLists(), [userProfile], {
|
||||
} = useApiOnMount<ShoppingList[], []>(fetchShoppingListsFn, [userProfile], {
|
||||
enabled: !!userProfile,
|
||||
});
|
||||
|
||||
@@ -32,7 +40,7 @@ export const UserDataProvider: React.FC<{ children: ReactNode }> = ({ children }
|
||||
useEffect(() => {
|
||||
// When the user logs out (user becomes null), immediately clear all user-specific data.
|
||||
// This also serves to clear out old data when a new user logs in, before their new data arrives.
|
||||
if (!userProfile) {
|
||||
if (!userProfile) {
|
||||
setWatchedItems([]);
|
||||
setShoppingLists([]);
|
||||
return;
|
||||
@@ -60,7 +68,7 @@ export const UserDataProvider: React.FC<{ children: ReactNode }> = ({ children }
|
||||
watchedItemsError,
|
||||
shoppingListsError,
|
||||
],
|
||||
);
|
||||
);
|
||||
|
||||
return <UserDataContext.Provider value={value}>{children}</UserDataContext.Provider>;
|
||||
};
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
// src/routes/admin.content.routes.test.ts
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
import { describe, it, expect, vi, beforeEach, afterAll } from 'vitest';
|
||||
import supertest from 'supertest';
|
||||
import type { Request, Response, NextFunction } from 'express';
|
||||
import path from 'path';
|
||||
import {
|
||||
createMockUserProfile,
|
||||
createMockSuggestedCorrection,
|
||||
@@ -15,6 +16,7 @@ import type { SuggestedCorrection, Brand, UserProfile, UnmatchedFlyerItem } from
|
||||
import { NotFoundError } from '../services/db/errors.db'; // This can stay, it's a type/class not a module with side effects.
|
||||
import fs from 'node:fs/promises';
|
||||
import { createTestApp } from '../tests/utils/createTestApp';
|
||||
import { cleanupFiles } from '../tests/utils/cleanupFiles';
|
||||
|
||||
// Mock the file upload middleware to allow testing the controller's internal check
|
||||
vi.mock('../middleware/fileUpload.middleware', () => ({
|
||||
@@ -140,6 +142,26 @@ describe('Admin Content Management Routes (/api/admin)', () => {
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
// Safeguard to clean up any logo files created during tests.
|
||||
const uploadDir = path.resolve(__dirname, '../../../flyer-images');
|
||||
try {
|
||||
const allFiles = await fs.readdir(uploadDir);
|
||||
// Files are named like 'logoImage-timestamp-original.ext'
|
||||
const testFiles = allFiles
|
||||
.filter((f) => f.startsWith('logoImage-'))
|
||||
.map((f) => path.join(uploadDir, f));
|
||||
|
||||
if (testFiles.length > 0) {
|
||||
await cleanupFiles(testFiles);
|
||||
}
|
||||
} catch (error) {
|
||||
if (error instanceof Error && (error as NodeJS.ErrnoException).code !== 'ENOENT') {
|
||||
console.error('Error during admin content test file cleanup:', error);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
describe('Corrections Routes', () => {
|
||||
it('GET /corrections should return corrections data', async () => {
|
||||
const mockCorrections: SuggestedCorrection[] = [
|
||||
|
||||
@@ -165,6 +165,38 @@ describe('Auth Routes (/api/auth)', () => {
|
||||
);
|
||||
});
|
||||
|
||||
it('should allow registration with an empty string for avatar_url', async () => {
|
||||
// Arrange
|
||||
const email = 'avatar-user@test.com';
|
||||
const mockNewUser = createMockUserProfile({
|
||||
user: { user_id: 'avatar-user-id', email },
|
||||
});
|
||||
mockedAuthService.registerAndLoginUser.mockResolvedValue({
|
||||
newUserProfile: mockNewUser,
|
||||
accessToken: 'avatar-access-token',
|
||||
refreshToken: 'avatar-refresh-token',
|
||||
});
|
||||
|
||||
// Act
|
||||
const response = await supertest(app).post('/api/auth/register').send({
|
||||
email,
|
||||
password: strongPassword,
|
||||
full_name: 'Avatar User',
|
||||
avatar_url: '', // Send an empty string
|
||||
});
|
||||
|
||||
// Assert
|
||||
expect(response.status).toBe(201);
|
||||
expect(response.body.message).toBe('User registered successfully!');
|
||||
expect(mockedAuthService.registerAndLoginUser).toHaveBeenCalledWith(
|
||||
email,
|
||||
strongPassword,
|
||||
'Avatar User',
|
||||
undefined, // The preprocess step in the Zod schema should convert '' to undefined
|
||||
mockLogger,
|
||||
);
|
||||
});
|
||||
|
||||
it('should set a refresh token cookie on successful registration', async () => {
|
||||
const mockNewUser = createMockUserProfile({
|
||||
user: { user_id: 'new-user-id', email: 'cookie@test.com' },
|
||||
|
||||
@@ -23,7 +23,9 @@ const forgotPasswordLimiter = rateLimit({
|
||||
message: 'Too many password reset requests from this IP, please try again after 15 minutes.',
|
||||
standardHeaders: true,
|
||||
legacyHeaders: false,
|
||||
skip: () => isTestEnv, // Skip this middleware if in test environment
|
||||
// Do not skip in test environment so we can write integration tests for it.
|
||||
// The limiter uses an in-memory store by default, so counts are reset when the test server restarts.
|
||||
// skip: () => isTestEnv,
|
||||
});
|
||||
|
||||
const resetPasswordLimiter = rateLimit({
|
||||
@@ -49,7 +51,11 @@ const registerSchema = z.object({
|
||||
}),
|
||||
// Sanitize optional string inputs.
|
||||
full_name: z.string().trim().optional(),
|
||||
avatar_url: z.string().trim().url().optional(),
|
||||
// Allow empty string or valid URL. If empty string is received, convert to undefined.
|
||||
avatar_url: z.preprocess(
|
||||
(val) => (val === '' ? undefined : val),
|
||||
z.string().trim().url().optional(),
|
||||
),
|
||||
}),
|
||||
});
|
||||
|
||||
|
||||
@@ -19,6 +19,12 @@ router.get(
|
||||
validateRequest(emptySchema),
|
||||
async (req: Request, res: Response, next: NextFunction) => {
|
||||
try {
|
||||
// LOGGING: Track how often this heavy DB call is actually made vs served from cache
|
||||
req.log.info('Fetching master items list from database...');
|
||||
|
||||
// Optimization: This list changes rarely. Instruct clients to cache it for 1 hour (3600s).
|
||||
res.set('Cache-Control', 'public, max-age=3600');
|
||||
|
||||
const masterItems = await db.personalizationRepo.getAllMasterItems(req.log);
|
||||
res.json(masterItems);
|
||||
} catch (error) {
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
// src/routes/user.routes.test.ts
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
import { describe, it, expect, vi, beforeEach, afterAll } from 'vitest';
|
||||
import supertest from 'supertest';
|
||||
import express from 'express';
|
||||
import path from 'path';
|
||||
import fs from 'node:fs/promises';
|
||||
import {
|
||||
createMockUserProfile,
|
||||
@@ -19,6 +20,7 @@ import { Appliance, Notification, DietaryRestriction } from '../types';
|
||||
import { ForeignKeyConstraintError, NotFoundError, ValidationError } from '../services/db/errors.db';
|
||||
import { createTestApp } from '../tests/utils/createTestApp';
|
||||
import { mockLogger } from '../tests/utils/mockLogger';
|
||||
import { cleanupFiles } from '../tests/utils/cleanupFiles';
|
||||
import { logger } from '../services/logger.server';
|
||||
import { userService } from '../services/userService';
|
||||
|
||||
@@ -166,6 +168,26 @@ describe('User Routes (/api/users)', () => {
|
||||
beforeEach(() => {
|
||||
// All tests in this block will use the authenticated app
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
// Safeguard to clean up any avatar files created during tests.
|
||||
const uploadDir = path.resolve(__dirname, '../../../uploads/avatars');
|
||||
try {
|
||||
const allFiles = await fs.readdir(uploadDir);
|
||||
// Files are named like 'avatar-user-123-timestamp.ext'
|
||||
const testFiles = allFiles
|
||||
.filter((f) => f.startsWith(`avatar-${mockUserProfile.user.user_id}`))
|
||||
.map((f) => path.join(uploadDir, f));
|
||||
|
||||
if (testFiles.length > 0) {
|
||||
await cleanupFiles(testFiles);
|
||||
}
|
||||
} catch (error) {
|
||||
if (error instanceof Error && (error as NodeJS.ErrnoException).code !== 'ENOENT') {
|
||||
console.error('Error during user routes test file cleanup:', error);
|
||||
}
|
||||
}
|
||||
});
|
||||
describe('GET /profile', () => {
|
||||
it('should return the full user profile', async () => {
|
||||
vi.mocked(db.userRepo.findUserProfileById).mockResolvedValue(mockUserProfile);
|
||||
@@ -563,6 +585,27 @@ describe('User Routes (/api/users)', () => {
|
||||
expect(response.body).toEqual(updatedProfile);
|
||||
});
|
||||
|
||||
it('should allow updating the profile with an empty string for avatar_url', async () => {
|
||||
// Arrange
|
||||
const profileUpdates = { avatar_url: '' };
|
||||
// The service should receive `undefined` after Zod preprocessing
|
||||
const updatedProfile = createMockUserProfile({ ...mockUserProfile, avatar_url: undefined });
|
||||
vi.mocked(db.userRepo.updateUserProfile).mockResolvedValue(updatedProfile);
|
||||
|
||||
// Act
|
||||
const response = await supertest(app).put('/api/users/profile').send(profileUpdates);
|
||||
|
||||
// Assert
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body).toEqual(updatedProfile);
|
||||
// Verify that the Zod schema preprocessed the empty string to undefined
|
||||
expect(db.userRepo.updateUserProfile).toHaveBeenCalledWith(
|
||||
mockUserProfile.user.user_id,
|
||||
{ avatar_url: undefined },
|
||||
expectLogger,
|
||||
);
|
||||
});
|
||||
|
||||
it('should return 500 on a generic database error', async () => {
|
||||
const dbError = new Error('DB Connection Failed');
|
||||
vi.mocked(db.userRepo.updateUserProfile).mockRejectedValue(dbError);
|
||||
|
||||
@@ -26,7 +26,13 @@ const router = express.Router();
|
||||
|
||||
const updateProfileSchema = z.object({
|
||||
body: z
|
||||
.object({ full_name: z.string().optional(), avatar_url: z.string().url().optional() })
|
||||
.object({
|
||||
full_name: z.string().optional(),
|
||||
avatar_url: z.preprocess(
|
||||
(val) => (val === '' ? undefined : val),
|
||||
z.string().trim().url().optional(),
|
||||
),
|
||||
})
|
||||
.refine((data) => Object.keys(data).length > 0, {
|
||||
message: 'At least one field to update must be provided.',
|
||||
}),
|
||||
|
||||
@@ -6,12 +6,13 @@ import type { FlyerStatus, MasterGroceryItem, UserProfile } from '../types';
|
||||
// Import the class, not the singleton instance, so we can instantiate it with mocks.
|
||||
import {
|
||||
AIService,
|
||||
AiFlyerDataSchema,
|
||||
aiService as aiServiceSingleton,
|
||||
DuplicateFlyerError,
|
||||
type RawFlyerItem,
|
||||
} from './aiService.server';
|
||||
import { createMockMasterGroceryItem } from '../tests/utils/mockFactories';
|
||||
import { ValidationError } from './db/errors.db';
|
||||
import { AiFlyerDataSchema } from '../types/ai';
|
||||
|
||||
// Mock the logger to prevent the real pino instance from being created, which causes issues with 'pino-pretty' in tests.
|
||||
vi.mock('./logger.server', () => ({
|
||||
@@ -1058,4 +1059,56 @@ describe('AI Service (Server)', () => {
|
||||
expect(aiServiceSingleton).toBeInstanceOf(AIService);
|
||||
});
|
||||
});
|
||||
|
||||
describe('_normalizeExtractedItems (private method)', () => {
|
||||
it('should correctly normalize items with null or undefined price_in_cents', () => {
|
||||
const rawItems: RawFlyerItem[] = [
|
||||
{
|
||||
item: 'Valid Item',
|
||||
price_display: '$1.99',
|
||||
price_in_cents: 199,
|
||||
quantity: '1',
|
||||
category_name: 'Category A',
|
||||
master_item_id: 1,
|
||||
},
|
||||
{
|
||||
item: 'Item with Null Price',
|
||||
price_display: null,
|
||||
price_in_cents: null, // Test case for null
|
||||
quantity: '1',
|
||||
category_name: 'Category B',
|
||||
master_item_id: 2,
|
||||
},
|
||||
{
|
||||
item: 'Item with Undefined Price',
|
||||
price_display: '$2.99',
|
||||
price_in_cents: undefined, // Test case for undefined
|
||||
quantity: '1',
|
||||
category_name: 'Category C',
|
||||
master_item_id: 3,
|
||||
},
|
||||
{
|
||||
item: null, // Test null item name
|
||||
price_display: undefined, // Test undefined display price
|
||||
price_in_cents: 50,
|
||||
quantity: null, // Test null quantity
|
||||
category_name: undefined, // Test undefined category
|
||||
master_item_id: null, // Test null master_item_id
|
||||
},
|
||||
];
|
||||
|
||||
// Access the private method for testing
|
||||
const normalized = (aiServiceInstance as any)._normalizeExtractedItems(rawItems);
|
||||
|
||||
expect(normalized).toHaveLength(4);
|
||||
expect(normalized[0].price_in_cents).toBe(199);
|
||||
expect(normalized[1].price_in_cents).toBe(null); // null should remain null
|
||||
expect(normalized[2].price_in_cents).toBe(null); // undefined should become null
|
||||
expect(normalized[3].item).toBe('Unknown Item');
|
||||
expect(normalized[3].quantity).toBe('');
|
||||
expect(normalized[3].category_name).toBe('Other/Miscellaneous');
|
||||
expect(normalized[3].master_item_id).toBeUndefined(); // nullish coalescing to undefined
|
||||
});
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
* It is intended to be used only by the backend (e.g., server.ts) and should never be imported into client-side code.
|
||||
* The `.server.ts` naming convention helps enforce this separation.
|
||||
*/
|
||||
|
||||
import { GoogleGenAI, type GenerateContentResponse, type Content, type Tool } from '@google/genai';
|
||||
import fsPromises from 'node:fs/promises';
|
||||
import type { Logger } from 'pino';
|
||||
@@ -26,29 +25,11 @@ import type { Job } from 'bullmq';
|
||||
import { createFlyerAndItems } from './db/flyer.db';
|
||||
import { generateFlyerIcon } from '../utils/imageProcessor';
|
||||
import path from 'path';
|
||||
import { ValidationError } from './db/errors.db';
|
||||
|
||||
// Helper for consistent required string validation (handles missing/null/empty)
|
||||
const requiredString = (message: string) =>
|
||||
z.preprocess((val) => val ?? '', z.string().min(1, message));
|
||||
|
||||
// --- Zod Schemas for AI Response Validation (exported for the transformer) ---
|
||||
const ExtractedFlyerItemSchema = z.object({
|
||||
item: z.string(),
|
||||
price_display: z.string(),
|
||||
price_in_cents: z.number().nullable(),
|
||||
quantity: z.string(),
|
||||
category_name: z.string(),
|
||||
master_item_id: z.number().nullish(), // .nullish() allows null or undefined
|
||||
});
|
||||
|
||||
export const AiFlyerDataSchema = z.object({
|
||||
store_name: requiredString('Store name cannot be empty'),
|
||||
valid_from: z.string().nullable(),
|
||||
valid_to: z.string().nullable(),
|
||||
store_address: z.string().nullable(),
|
||||
items: z.array(ExtractedFlyerItemSchema),
|
||||
});
|
||||
import { ValidationError } from './db/errors.db'; // Keep this import for ValidationError
|
||||
import {
|
||||
AiFlyerDataSchema,
|
||||
ExtractedFlyerItemSchema,
|
||||
} from '../types/ai'; // Import consolidated schemas
|
||||
|
||||
interface FlyerProcessPayload extends Partial<ExtractedCoreData> {
|
||||
checksum?: string;
|
||||
@@ -89,10 +70,10 @@ interface IAiClient {
|
||||
* This type is intentionally loose to accommodate potential null/undefined values
|
||||
* from the AI before they are cleaned and normalized.
|
||||
*/
|
||||
type RawFlyerItem = {
|
||||
item: string;
|
||||
export type RawFlyerItem = {
|
||||
item: string | null;
|
||||
price_display: string | null | undefined;
|
||||
price_in_cents: number | null;
|
||||
price_in_cents: number | null | undefined;
|
||||
quantity: string | null | undefined;
|
||||
category_name: string | null | undefined;
|
||||
master_item_id?: number | null | undefined;
|
||||
@@ -507,7 +488,7 @@ export class AIService {
|
||||
userProfileAddress?: string,
|
||||
logger: Logger = this.logger,
|
||||
): Promise<{
|
||||
store_name: string;
|
||||
store_name: string | null;
|
||||
valid_from: string | null;
|
||||
valid_to: string | null;
|
||||
store_address: string | null;
|
||||
@@ -606,6 +587,8 @@ export class AIService {
|
||||
item.category_name === null || item.category_name === undefined
|
||||
? 'Other/Miscellaneous'
|
||||
: String(item.category_name),
|
||||
// Ensure undefined is converted to null to match the Zod schema.
|
||||
price_in_cents: item.price_in_cents ?? null,
|
||||
master_item_id: item.master_item_id ?? undefined,
|
||||
}));
|
||||
}
|
||||
|
||||
@@ -283,7 +283,10 @@ export const fetchFlyerById = (flyerId: number): Promise<Response> =>
|
||||
* Fetches all master grocery items from the backend.
|
||||
* @returns A promise that resolves to an array of MasterGroceryItem objects.
|
||||
*/
|
||||
export const fetchMasterItems = (): Promise<Response> => publicGet('/personalization/master-items');
|
||||
export const fetchMasterItems = (): Promise<Response> => {
|
||||
logger.debug('apiClient: fetchMasterItems called');
|
||||
return publicGet('/personalization/master-items');
|
||||
};
|
||||
|
||||
/**
|
||||
* Fetches all categories from the backend.
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
import { FlyerAiProcessor } from './flyerAiProcessor.server';
|
||||
import { AiDataValidationError } from './processingErrors';
|
||||
import { logger } from './logger.server';
|
||||
import { logger } from './logger.server'; // Keep this import for the logger instance
|
||||
import type { AIService } from './aiService.server';
|
||||
import type { PersonalizationRepository } from './db/personalization.db';
|
||||
import type { FlyerJobData } from '../types/job-data';
|
||||
@@ -127,4 +127,98 @@ describe('FlyerAiProcessor', () => {
|
||||
expect(result.needsReview).toBe(true);
|
||||
expect(logger.warn).toHaveBeenCalledWith(expect.any(Object), expect.stringContaining('contains no items. The flyer will be saved with an item_count of 0. Flagging for review.'));
|
||||
});
|
||||
|
||||
describe('Batching Logic', () => {
|
||||
it('should process images in batches and merge the results correctly', async () => {
|
||||
// Arrange
|
||||
const jobData = createMockJobData({});
|
||||
// 5 images, with BATCH_SIZE = 4, should result in 2 batches.
|
||||
const imagePaths = [
|
||||
{ path: 'page1.jpg', mimetype: 'image/jpeg' },
|
||||
{ path: 'page2.jpg', mimetype: 'image/jpeg' },
|
||||
{ path: 'page3.jpg', mimetype: 'image/jpeg' },
|
||||
{ path: 'page4.jpg', mimetype: 'image/jpeg' },
|
||||
{ path: 'page5.jpg', mimetype: 'image/jpeg' },
|
||||
];
|
||||
|
||||
const mockAiResponseBatch1 = {
|
||||
store_name: 'Batch 1 Store',
|
||||
valid_from: '2025-01-01',
|
||||
valid_to: '2025-01-07',
|
||||
store_address: '123 Batch St',
|
||||
items: [
|
||||
{ item: 'Item A', price_display: '$1', price_in_cents: 100, quantity: '1', category_name: 'Cat A', master_item_id: 1 },
|
||||
{ item: 'Item B', price_display: '$2', price_in_cents: 200, quantity: '1', category_name: 'Cat B', master_item_id: 2 },
|
||||
],
|
||||
};
|
||||
|
||||
const mockAiResponseBatch2 = {
|
||||
store_name: 'Batch 2 Store', // This should be ignored in the merge
|
||||
valid_from: null,
|
||||
valid_to: null,
|
||||
store_address: null,
|
||||
items: [
|
||||
{ item: 'Item C', price_display: '$3', price_in_cents: 300, quantity: '1', category_name: 'Cat C', master_item_id: 3 },
|
||||
],
|
||||
};
|
||||
|
||||
// Mock the AI service to return different results for each batch call
|
||||
vi.mocked(mockAiService.extractCoreDataFromFlyerImage)
|
||||
.mockResolvedValueOnce(mockAiResponseBatch1)
|
||||
.mockResolvedValueOnce(mockAiResponseBatch2);
|
||||
|
||||
// Act
|
||||
const result = await service.extractAndValidateData(imagePaths, jobData, logger);
|
||||
|
||||
// Assert
|
||||
// 1. AI service was called twice (for 2 batches)
|
||||
expect(mockAiService.extractCoreDataFromFlyerImage).toHaveBeenCalledTimes(2);
|
||||
|
||||
// 2. Check the arguments for each call
|
||||
expect(mockAiService.extractCoreDataFromFlyerImage).toHaveBeenNthCalledWith(1, imagePaths.slice(0, 4), [], undefined, undefined, logger);
|
||||
expect(mockAiService.extractCoreDataFromFlyerImage).toHaveBeenNthCalledWith(2, imagePaths.slice(4, 5), [], undefined, undefined, logger);
|
||||
|
||||
// 3. Check the merged data
|
||||
expect(result.data.store_name).toBe('Batch 1 Store'); // Metadata from the first batch
|
||||
expect(result.data.valid_from).toBe('2025-01-01');
|
||||
expect(result.data.valid_to).toBe('2025-01-07');
|
||||
expect(result.data.store_address).toBe('123 Batch St');
|
||||
|
||||
// 4. Check that items from both batches are merged
|
||||
expect(result.data.items).toHaveLength(3);
|
||||
expect(result.data.items).toEqual(expect.arrayContaining([
|
||||
expect.objectContaining({ item: 'Item A' }),
|
||||
expect.objectContaining({ item: 'Item B' }),
|
||||
expect.objectContaining({ item: 'Item C' }),
|
||||
]));
|
||||
|
||||
// 5. Check that the job is not flagged for review
|
||||
expect(result.needsReview).toBe(false);
|
||||
});
|
||||
|
||||
it('should fill in missing metadata from subsequent batches', async () => {
|
||||
// Arrange
|
||||
const jobData = createMockJobData({});
|
||||
const imagePaths = [
|
||||
{ path: 'page1.jpg', mimetype: 'image/jpeg' }, { path: 'page2.jpg', mimetype: 'image/jpeg' }, { path: 'page3.jpg', mimetype: 'image/jpeg' }, { path: 'page4.jpg', mimetype: 'image/jpeg' }, { path: 'page5.jpg', mimetype: 'image/jpeg' },
|
||||
];
|
||||
|
||||
const mockAiResponseBatch1 = { store_name: null, valid_from: '2025-01-01', valid_to: '2025-01-07', store_address: null, items: [{ item: 'Item A', price_display: '$1', price_in_cents: 100, quantity: '1', category_name: 'Cat A', master_item_id: 1 }] };
|
||||
const mockAiResponseBatch2 = { store_name: 'Batch 2 Store', valid_from: '2025-01-02', valid_to: null, store_address: '456 Subsequent St', items: [{ item: 'Item C', price_display: '$3', price_in_cents: 300, quantity: '1', category_name: 'Cat C', master_item_id: 3 }] };
|
||||
|
||||
vi.mocked(mockAiService.extractCoreDataFromFlyerImage)
|
||||
.mockResolvedValueOnce(mockAiResponseBatch1)
|
||||
.mockResolvedValueOnce(mockAiResponseBatch2);
|
||||
|
||||
// Act
|
||||
const result = await service.extractAndValidateData(imagePaths, jobData, logger);
|
||||
|
||||
// Assert
|
||||
expect(result.data.store_name).toBe('Batch 2 Store'); // Filled from batch 2
|
||||
expect(result.data.valid_from).toBe('2025-01-01'); // Kept from batch 1
|
||||
expect(result.data.valid_to).toBe('2025-01-07'); // Kept from batch 1
|
||||
expect(result.data.store_address).toBe('456 Subsequent St'); // Filled from batch 2
|
||||
expect(result.data.items).toHaveLength(2);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -5,28 +5,11 @@ import type { AIService } from './aiService.server';
|
||||
import type { PersonalizationRepository } from './db/personalization.db';
|
||||
import { AiDataValidationError } from './processingErrors';
|
||||
import type { FlyerJobData } from '../types/job-data';
|
||||
|
||||
// Helper for consistent required string validation (handles missing/null/empty)
|
||||
const requiredString = (message: string) =>
|
||||
z.preprocess((val) => val ?? '', z.string().min(1, message));
|
||||
|
||||
// --- Zod Schemas for AI Response Validation ---
|
||||
const ExtractedFlyerItemSchema = z.object({
|
||||
item: z.string().nullable(),
|
||||
price_display: z.string().nullable(),
|
||||
price_in_cents: z.number().nullable(),
|
||||
quantity: z.string().nullable(),
|
||||
category_name: z.string().nullable(),
|
||||
master_item_id: z.number().nullish(),
|
||||
});
|
||||
|
||||
export const AiFlyerDataSchema = z.object({
|
||||
store_name: z.string().nullable(),
|
||||
valid_from: z.string().nullable(),
|
||||
valid_to: z.string().nullable(),
|
||||
store_address: z.string().nullable(),
|
||||
items: z.array(ExtractedFlyerItemSchema),
|
||||
});
|
||||
import {
|
||||
AiFlyerDataSchema,
|
||||
ExtractedFlyerItemSchema,
|
||||
requiredString,
|
||||
} from '../types/ai'; // Import consolidated schemas and helper
|
||||
|
||||
export type ValidatedAiDataType = z.infer<typeof AiFlyerDataSchema>;
|
||||
|
||||
@@ -94,19 +77,64 @@ export class FlyerAiProcessor {
|
||||
jobData: FlyerJobData,
|
||||
logger: Logger,
|
||||
): Promise<AiProcessorResult> {
|
||||
logger.info(`Starting AI data extraction.`);
|
||||
logger.info(`Starting AI data extraction for ${imagePaths.length} pages.`);
|
||||
const { submitterIp, userProfileAddress } = jobData;
|
||||
const masterItems = await this.personalizationRepo.getAllMasterItems(logger);
|
||||
logger.debug(`Retrieved ${masterItems.length} master items for AI matching.`);
|
||||
|
||||
const extractedData = await this.ai.extractCoreDataFromFlyerImage(
|
||||
imagePaths,
|
||||
masterItems,
|
||||
submitterIp,
|
||||
userProfileAddress,
|
||||
logger,
|
||||
);
|
||||
// BATCHING LOGIC: Process images in chunks to avoid hitting AI payload/token limits.
|
||||
const BATCH_SIZE = 4;
|
||||
const batches = [];
|
||||
for (let i = 0; i < imagePaths.length; i += BATCH_SIZE) {
|
||||
batches.push(imagePaths.slice(i, i + BATCH_SIZE));
|
||||
}
|
||||
|
||||
return this._validateAiData(extractedData, logger);
|
||||
// Initialize container for merged data
|
||||
const mergedData: ValidatedAiDataType = {
|
||||
store_name: null,
|
||||
valid_from: null,
|
||||
valid_to: null,
|
||||
store_address: null,
|
||||
items: [],
|
||||
};
|
||||
|
||||
logger.info(`Processing ${imagePaths.length} pages in ${batches.length} batches (Batch Size: ${BATCH_SIZE}).`);
|
||||
|
||||
for (const [index, batch] of batches.entries()) {
|
||||
logger.info(`Processing batch ${index + 1}/${batches.length} (${batch.length} pages)...`);
|
||||
|
||||
// The AI service handles rate limiting internally (e.g., max 5 RPM).
|
||||
// Processing these sequentially ensures we respect that limit.
|
||||
const batchResult = await this.ai.extractCoreDataFromFlyerImage(
|
||||
batch,
|
||||
masterItems,
|
||||
submitterIp,
|
||||
userProfileAddress,
|
||||
logger,
|
||||
);
|
||||
|
||||
// MERGE LOGIC:
|
||||
// 1. Metadata (Store Name, Dates): Prioritize the first batch (usually the cover page).
|
||||
// If subsequent batches have data and the current is null, fill it in.
|
||||
if (index === 0) {
|
||||
mergedData.store_name = batchResult.store_name;
|
||||
mergedData.valid_from = batchResult.valid_from;
|
||||
mergedData.valid_to = batchResult.valid_to;
|
||||
mergedData.store_address = batchResult.store_address;
|
||||
} else {
|
||||
if (!mergedData.store_name && batchResult.store_name) mergedData.store_name = batchResult.store_name;
|
||||
if (!mergedData.valid_from && batchResult.valid_from) mergedData.valid_from = batchResult.valid_from;
|
||||
if (!mergedData.valid_to && batchResult.valid_to) mergedData.valid_to = batchResult.valid_to;
|
||||
if (!mergedData.store_address && batchResult.store_address) mergedData.store_address = batchResult.store_address;
|
||||
}
|
||||
|
||||
// 2. Items: Append all found items to the master list.
|
||||
mergedData.items.push(...batchResult.items);
|
||||
}
|
||||
|
||||
logger.info(`Batch processing complete. Total items extracted: ${mergedData.items.length}`);
|
||||
|
||||
// Validate the final merged dataset
|
||||
return this._validateAiData(mergedData, logger);
|
||||
}
|
||||
}
|
||||
@@ -2,8 +2,9 @@
|
||||
import path from 'path';
|
||||
import type { z } from 'zod';
|
||||
import type { Logger } from 'pino';
|
||||
import type { FlyerInsert, FlyerItemInsert, FlyerStatus } from '../types';
|
||||
import type { AiFlyerDataSchema, AiProcessorResult } from './flyerAiProcessor.server';
|
||||
import type { FlyerInsert, FlyerItemInsert } from '../types';
|
||||
import type { AiProcessorResult } from './flyerAiProcessor.server'; // Keep this import for AiProcessorResult
|
||||
import { AiFlyerDataSchema } from '../types/ai'; // Import consolidated schema
|
||||
import { generateFlyerIcon } from '../utils/imageProcessor';
|
||||
|
||||
/**
|
||||
|
||||
@@ -4,13 +4,14 @@ import { Job } from 'bullmq';
|
||||
import type { Dirent } from 'node:fs';
|
||||
import sharp from 'sharp';
|
||||
import { FlyerFileHandler, ICommandExecutor, IFileSystem } from './flyerFileHandler.server';
|
||||
import { PdfConversionError, UnsupportedFileTypeError } from './processingErrors';
|
||||
import { ImageConversionError, PdfConversionError, UnsupportedFileTypeError } from './processingErrors';
|
||||
import { logger } from './logger.server';
|
||||
import type { FlyerJobData } from '../types/job-data';
|
||||
|
||||
// Mock dependencies
|
||||
vi.mock('sharp', () => {
|
||||
const mockSharpInstance = {
|
||||
jpeg: vi.fn().mockReturnThis(),
|
||||
png: vi.fn().mockReturnThis(),
|
||||
toFile: vi.fn().mockResolvedValue({}),
|
||||
};
|
||||
@@ -88,20 +89,6 @@ describe('FlyerFileHandler', () => {
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle supported image types directly', async () => {
|
||||
const job = createMockJob({ filePath: '/tmp/flyer.jpg' });
|
||||
const { imagePaths, createdImagePaths } = await service.prepareImageInputs(
|
||||
'/tmp/flyer.jpg',
|
||||
job,
|
||||
logger,
|
||||
);
|
||||
|
||||
expect(imagePaths).toEqual([{ path: '/tmp/flyer.jpg', mimetype: 'image/jpeg' }]);
|
||||
expect(createdImagePaths).toEqual([]);
|
||||
expect(mockExec).not.toHaveBeenCalled();
|
||||
expect(sharp).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should convert convertible image types to PNG', async () => {
|
||||
const job = createMockJob({ filePath: '/tmp/flyer.gif' });
|
||||
const mockSharpInstance = sharp('/tmp/flyer.gif');
|
||||
@@ -126,4 +113,73 @@ describe('FlyerFileHandler', () => {
|
||||
UnsupportedFileTypeError,
|
||||
);
|
||||
});
|
||||
|
||||
describe('Image Processing', () => {
|
||||
it('should process a JPEG to strip EXIF data', async () => {
|
||||
const job = createMockJob({ filePath: '/tmp/flyer.jpg' });
|
||||
const mockSharpInstance = sharp('/tmp/flyer.jpg');
|
||||
vi.mocked(mockSharpInstance.toFile).mockResolvedValue({} as any);
|
||||
|
||||
const { imagePaths, createdImagePaths } = await service.prepareImageInputs(
|
||||
'/tmp/flyer.jpg',
|
||||
job,
|
||||
logger,
|
||||
);
|
||||
|
||||
expect(sharp).toHaveBeenCalledWith('/tmp/flyer.jpg');
|
||||
expect(mockSharpInstance.jpeg).toHaveBeenCalledWith({ quality: 90 });
|
||||
expect(mockSharpInstance.toFile).toHaveBeenCalledWith('/tmp/flyer-processed.jpeg');
|
||||
expect(imagePaths).toEqual([{ path: '/tmp/flyer-processed.jpeg', mimetype: 'image/jpeg' }]);
|
||||
expect(createdImagePaths).toEqual(['/tmp/flyer-processed.jpeg']);
|
||||
});
|
||||
|
||||
it('should process a PNG to strip metadata', async () => {
|
||||
const job = createMockJob({ filePath: '/tmp/flyer.png' });
|
||||
const mockSharpInstance = sharp('/tmp/flyer.png');
|
||||
vi.mocked(mockSharpInstance.toFile).mockResolvedValue({} as any);
|
||||
|
||||
const { imagePaths, createdImagePaths } = await service.prepareImageInputs(
|
||||
'/tmp/flyer.png',
|
||||
job,
|
||||
logger,
|
||||
);
|
||||
|
||||
expect(sharp).toHaveBeenCalledWith('/tmp/flyer.png');
|
||||
expect(mockSharpInstance.png).toHaveBeenCalledWith({ quality: 90 });
|
||||
expect(mockSharpInstance.toFile).toHaveBeenCalledWith('/tmp/flyer-processed.png');
|
||||
expect(imagePaths).toEqual([{ path: '/tmp/flyer-processed.png', mimetype: 'image/png' }]);
|
||||
expect(createdImagePaths).toEqual(['/tmp/flyer-processed.png']);
|
||||
});
|
||||
|
||||
it('should handle other supported image types (e.g. webp) directly without processing', async () => {
|
||||
const job = createMockJob({ filePath: '/tmp/flyer.webp' });
|
||||
const { imagePaths, createdImagePaths } = await service.prepareImageInputs(
|
||||
'/tmp/flyer.webp',
|
||||
job,
|
||||
logger,
|
||||
);
|
||||
|
||||
expect(imagePaths).toEqual([{ path: '/tmp/flyer.webp', mimetype: 'image/webp' }]);
|
||||
expect(createdImagePaths).toEqual([]);
|
||||
expect(sharp).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should throw ImageConversionError if sharp fails during JPEG processing', async () => {
|
||||
const job = createMockJob({ filePath: '/tmp/flyer.jpg' });
|
||||
const sharpError = new Error('Sharp failed');
|
||||
const mockSharpInstance = sharp('/tmp/flyer.jpg');
|
||||
vi.mocked(mockSharpInstance.toFile).mockRejectedValue(sharpError);
|
||||
|
||||
await expect(service.prepareImageInputs('/tmp/flyer.jpg', job, logger)).rejects.toThrow(ImageConversionError);
|
||||
});
|
||||
|
||||
it('should throw ImageConversionError if sharp fails during PNG processing', async () => {
|
||||
const job = createMockJob({ filePath: '/tmp/flyer.png' });
|
||||
const sharpError = new Error('Sharp failed');
|
||||
const mockSharpInstance = sharp('/tmp/flyer.png');
|
||||
vi.mocked(mockSharpInstance.toFile).mockRejectedValue(sharpError);
|
||||
|
||||
await expect(service.prepareImageInputs('/tmp/flyer.png', job, logger)).rejects.toThrow(ImageConversionError);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -105,6 +105,53 @@ export class FlyerFileHandler {
|
||||
return imagePaths;
|
||||
}
|
||||
|
||||
/**
|
||||
* Processes a JPEG image to strip EXIF data by re-saving it.
|
||||
* This ensures user privacy and metadata consistency.
|
||||
* @returns The path to the newly created, processed JPEG file.
|
||||
*/
|
||||
private async _stripExifDataFromJpeg(filePath: string, logger: Logger): Promise<string> {
|
||||
const outputDir = path.dirname(filePath);
|
||||
const originalFileName = path.parse(path.basename(filePath)).name;
|
||||
// Suffix to avoid overwriting, and keep extension.
|
||||
const newFileName = `${originalFileName}-processed.jpeg`;
|
||||
const outputPath = path.join(outputDir, newFileName);
|
||||
|
||||
logger.info({ from: filePath, to: outputPath }, 'Processing JPEG to strip EXIF data.');
|
||||
|
||||
try {
|
||||
// By default, sharp strips metadata when re-saving.
|
||||
// We also apply a reasonable quality setting for web optimization.
|
||||
await sharp(filePath).jpeg({ quality: 90 }).toFile(outputPath);
|
||||
return outputPath;
|
||||
} catch (error) {
|
||||
logger.error({ err: error, filePath }, 'Failed to process JPEG with sharp.');
|
||||
throw new ImageConversionError(`JPEG processing failed for ${path.basename(filePath)}.`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Processes a PNG image to strip metadata by re-saving it.
|
||||
* @returns The path to the newly created, processed PNG file.
|
||||
*/
|
||||
private async _stripMetadataFromPng(filePath: string, logger: Logger): Promise<string> {
|
||||
const outputDir = path.dirname(filePath);
|
||||
const originalFileName = path.parse(path.basename(filePath)).name;
|
||||
const newFileName = `${originalFileName}-processed.png`;
|
||||
const outputPath = path.join(outputDir, newFileName);
|
||||
|
||||
logger.info({ from: filePath, to: outputPath }, 'Processing PNG to strip metadata.');
|
||||
|
||||
try {
|
||||
// Re-saving with sharp strips metadata. We also apply a reasonable quality setting.
|
||||
await sharp(filePath).png({ quality: 90 }).toFile(outputPath);
|
||||
return outputPath;
|
||||
} catch (error) {
|
||||
logger.error({ err: error, filePath }, 'Failed to process PNG with sharp.');
|
||||
throw new ImageConversionError(`PNG processing failed for ${path.basename(filePath)}.`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts an image file (e.g., GIF, TIFF) to a PNG format that the AI can process.
|
||||
*/
|
||||
@@ -147,11 +194,29 @@ export class FlyerFileHandler {
|
||||
fileExt: string,
|
||||
logger: Logger,
|
||||
): Promise<{ imagePaths: { path: string; mimetype: string }[]; createdImagePaths: string[] }> {
|
||||
logger.info(`Processing as a single image file: ${filePath}`);
|
||||
const mimetype =
|
||||
fileExt === '.jpg' || fileExt === '.jpeg' ? 'image/jpeg' : `image/${fileExt.slice(1)}`;
|
||||
const imagePaths = [{ path: filePath, mimetype }];
|
||||
return { imagePaths, createdImagePaths: [] };
|
||||
// For JPEGs, we will re-process them to strip EXIF data.
|
||||
if (fileExt === '.jpg' || fileExt === '.jpeg') {
|
||||
const processedPath = await this._stripExifDataFromJpeg(filePath, logger);
|
||||
return {
|
||||
imagePaths: [{ path: processedPath, mimetype: 'image/jpeg' }],
|
||||
// The original file will be cleaned up by the orchestrator, but we must also track this new file.
|
||||
createdImagePaths: [processedPath],
|
||||
};
|
||||
}
|
||||
|
||||
// For PNGs, also re-process to strip metadata.
|
||||
if (fileExt === '.png') {
|
||||
const processedPath = await this._stripMetadataFromPng(filePath, logger);
|
||||
return {
|
||||
imagePaths: [{ path: processedPath, mimetype: 'image/png' }],
|
||||
createdImagePaths: [processedPath],
|
||||
};
|
||||
}
|
||||
|
||||
// For other supported types like WEBP, etc., which are less likely to have problematic EXIF,
|
||||
// we can process them directly without modification for now.
|
||||
logger.info(`Processing as a single image file (non-JPEG/PNG): ${filePath}`);
|
||||
return { imagePaths: [{ path: filePath, mimetype: `image/${fileExt.slice(1)}` }], createdImagePaths: [] };
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -133,6 +133,12 @@ export class FlyerProcessingService {
|
||||
return { flyerId: flyer.flyer_id };
|
||||
} catch (error) {
|
||||
logger.warn('Job failed. Temporary files will NOT be cleaned up to allow for manual inspection.');
|
||||
// Add detailed logging of the raw error object
|
||||
if (error instanceof Error) {
|
||||
logger.error({ err: error, stack: error.stack }, 'Raw error object in processJob catch block');
|
||||
} else {
|
||||
logger.error({ error }, 'Raw non-Error object in processJob catch block');
|
||||
}
|
||||
// This private method handles error reporting and re-throwing.
|
||||
await this._reportErrorAndThrow(error, job, logger, stages);
|
||||
// This line is technically unreachable because the above method always throws,
|
||||
|
||||
166
src/services/gamificationService.test.ts
Normal file
166
src/services/gamificationService.test.ts
Normal file
@@ -0,0 +1,166 @@
|
||||
// src/services/gamificationService.test.ts
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
import { gamificationService } from './gamificationService';
|
||||
import { gamificationRepo } from './db/index.db';
|
||||
import { ForeignKeyConstraintError } from './db/errors.db';
|
||||
import { logger as mockLogger } from './logger.server';
|
||||
import {
|
||||
createMockAchievement,
|
||||
createMockLeaderboardUser,
|
||||
createMockUserAchievement,
|
||||
} from '../tests/utils/mockFactories';
|
||||
|
||||
// Mock dependencies
|
||||
vi.mock('./db/index.db', () => ({
|
||||
gamificationRepo: {
|
||||
awardAchievement: vi.fn(),
|
||||
getAllAchievements: vi.fn(),
|
||||
getLeaderboard: vi.fn(),
|
||||
getUserAchievements: vi.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
vi.mock('./logger.server', () => ({
|
||||
logger: {
|
||||
info: vi.fn(),
|
||||
error: vi.fn(),
|
||||
warn: vi.fn(),
|
||||
debug: vi.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
// Mock the error class
|
||||
vi.mock('./db/errors.db', () => ({
|
||||
ForeignKeyConstraintError: class extends Error {
|
||||
constructor(message: string) {
|
||||
super(message);
|
||||
this.name = 'ForeignKeyConstraintError';
|
||||
}
|
||||
},
|
||||
}));
|
||||
|
||||
describe('GamificationService', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
describe('awardAchievement', () => {
|
||||
it('should call the repository to award an achievement', async () => {
|
||||
const userId = 'user-123';
|
||||
const achievementName = 'First-Upload';
|
||||
vi.mocked(gamificationRepo.awardAchievement).mockResolvedValue(undefined);
|
||||
|
||||
await gamificationService.awardAchievement(userId, achievementName, mockLogger);
|
||||
|
||||
expect(gamificationRepo.awardAchievement).toHaveBeenCalledWith(userId, achievementName, mockLogger);
|
||||
});
|
||||
|
||||
it('should re-throw ForeignKeyConstraintError without logging it as a service error', async () => {
|
||||
const userId = 'user-123';
|
||||
const achievementName = 'NonExistentAchievement';
|
||||
const fkError = new ForeignKeyConstraintError('Achievement not found');
|
||||
vi.mocked(gamificationRepo.awardAchievement).mockRejectedValue(fkError);
|
||||
|
||||
await expect(
|
||||
gamificationService.awardAchievement(userId, achievementName, mockLogger),
|
||||
).rejects.toThrow(fkError);
|
||||
|
||||
expect(mockLogger.error).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should log and re-throw generic errors', async () => {
|
||||
const userId = 'user-123';
|
||||
const achievementName = 'First-Upload';
|
||||
const dbError = new Error('DB connection failed');
|
||||
vi.mocked(gamificationRepo.awardAchievement).mockRejectedValue(dbError);
|
||||
|
||||
await expect(
|
||||
gamificationService.awardAchievement(userId, achievementName, mockLogger),
|
||||
).rejects.toThrow(dbError);
|
||||
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
{ error: dbError, userId, achievementName },
|
||||
'Error awarding achievement via admin endpoint:',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getAllAchievements', () => {
|
||||
it('should return all achievements from the repository', async () => {
|
||||
const mockAchievements = [
|
||||
createMockAchievement({ name: 'Achieve1' }),
|
||||
createMockAchievement({ name: 'Achieve2' }),
|
||||
];
|
||||
vi.mocked(gamificationRepo.getAllAchievements).mockResolvedValue(mockAchievements);
|
||||
|
||||
const result = await gamificationService.getAllAchievements(mockLogger);
|
||||
|
||||
expect(result).toEqual(mockAchievements);
|
||||
expect(gamificationRepo.getAllAchievements).toHaveBeenCalledWith(mockLogger);
|
||||
});
|
||||
|
||||
it('should log and re-throw an error if the repository fails', async () => {
|
||||
const dbError = new Error('DB Error');
|
||||
vi.mocked(gamificationRepo.getAllAchievements).mockRejectedValue(dbError);
|
||||
|
||||
await expect(gamificationService.getAllAchievements(mockLogger)).rejects.toThrow(dbError);
|
||||
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
{ error: dbError },
|
||||
'Error in getAllAchievements service method',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getLeaderboard', () => {
|
||||
it('should return the leaderboard from the repository', async () => {
|
||||
const mockLeaderboard = [createMockLeaderboardUser({ rank: '1' })];
|
||||
vi.mocked(gamificationRepo.getLeaderboard).mockResolvedValue(mockLeaderboard);
|
||||
|
||||
const result = await gamificationService.getLeaderboard(10, mockLogger);
|
||||
|
||||
expect(result).toEqual(mockLeaderboard);
|
||||
expect(gamificationRepo.getLeaderboard).toHaveBeenCalledWith(10, mockLogger);
|
||||
});
|
||||
|
||||
it('should log and re-throw an error if the repository fails', async () => {
|
||||
const dbError = new Error('DB Error');
|
||||
vi.mocked(gamificationRepo.getLeaderboard).mockRejectedValue(dbError);
|
||||
|
||||
await expect(gamificationService.getLeaderboard(10, mockLogger)).rejects.toThrow(dbError);
|
||||
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
{ error: dbError, limit: 10 },
|
||||
'Error fetching leaderboard in service method.',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getUserAchievements', () => {
|
||||
it("should return a user's achievements from the repository", async () => {
|
||||
const userId = 'user-123';
|
||||
const mockUserAchievements = [createMockUserAchievement({ user_id: userId })];
|
||||
vi.mocked(gamificationRepo.getUserAchievements).mockResolvedValue(mockUserAchievements);
|
||||
|
||||
const result = await gamificationService.getUserAchievements(userId, mockLogger);
|
||||
|
||||
expect(result).toEqual(mockUserAchievements);
|
||||
expect(gamificationRepo.getUserAchievements).toHaveBeenCalledWith(userId, mockLogger);
|
||||
});
|
||||
|
||||
it('should log and re-throw an error if the repository fails', async () => {
|
||||
const userId = 'user-123';
|
||||
const dbError = new Error('DB Error');
|
||||
vi.mocked(gamificationRepo.getUserAchievements).mockRejectedValue(dbError);
|
||||
|
||||
await expect(gamificationService.getUserAchievements(userId, mockLogger)).rejects.toThrow(
|
||||
dbError,
|
||||
);
|
||||
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
{ error: dbError, userId },
|
||||
'Error fetching user achievements in service method.',
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
209
src/services/monitoringService.server.test.ts
Normal file
209
src/services/monitoringService.server.test.ts
Normal file
@@ -0,0 +1,209 @@
|
||||
// src/services/monitoringService.server.test.ts
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
import type { Job, Queue } from 'bullmq';
|
||||
import { NotFoundError, ValidationError } from './db/errors.db';
|
||||
import { logger } from './logger.server';
|
||||
|
||||
// --- Hoisted Mocks ---
|
||||
const mocks = vi.hoisted(() => {
|
||||
const createMockWorker = (name: string) => ({
|
||||
name,
|
||||
isRunning: vi.fn().mockReturnValue(true),
|
||||
});
|
||||
|
||||
const createMockQueue = (name: string) => ({
|
||||
name,
|
||||
getJobCounts: vi.fn().mockResolvedValue({}),
|
||||
getJob: vi.fn(),
|
||||
});
|
||||
|
||||
return {
|
||||
flyerWorker: createMockWorker('flyer-processing'),
|
||||
emailWorker: createMockWorker('email-sending'),
|
||||
analyticsWorker: createMockWorker('analytics-reporting'),
|
||||
cleanupWorker: createMockWorker('file-cleanup'),
|
||||
weeklyAnalyticsWorker: createMockWorker('weekly-analytics-reporting'),
|
||||
|
||||
flyerQueue: createMockQueue('flyer-processing'),
|
||||
emailQueue: createMockQueue('email-sending'),
|
||||
analyticsQueue: createMockQueue('analytics-reporting'),
|
||||
cleanupQueue: createMockQueue('file-cleanup'),
|
||||
weeklyAnalyticsQueue: createMockQueue('weekly-analytics-reporting'),
|
||||
};
|
||||
});
|
||||
|
||||
// --- Mock Modules ---
|
||||
vi.mock('./queueService.server', () => ({
|
||||
flyerQueue: mocks.flyerQueue,
|
||||
emailQueue: mocks.emailQueue,
|
||||
analyticsQueue: mocks.analyticsQueue,
|
||||
cleanupQueue: mocks.cleanupQueue,
|
||||
weeklyAnalyticsQueue: mocks.weeklyAnalyticsQueue,
|
||||
}));
|
||||
|
||||
vi.mock('./workers.server', () => ({
|
||||
flyerWorker: mocks.flyerWorker,
|
||||
emailWorker: mocks.emailWorker,
|
||||
analyticsWorker: mocks.analyticsWorker,
|
||||
cleanupWorker: mocks.cleanupWorker,
|
||||
weeklyAnalyticsWorker: mocks.weeklyAnalyticsWorker,
|
||||
}));
|
||||
|
||||
vi.mock('./db/errors.db', () => ({
|
||||
NotFoundError: class NotFoundError extends Error {
|
||||
constructor(message: string) {
|
||||
super(message);
|
||||
this.name = 'NotFoundError';
|
||||
}
|
||||
},
|
||||
ValidationError: class ValidationError extends Error {
|
||||
constructor(issues: [], message: string) {
|
||||
super(message);
|
||||
this.name = 'ValidationError';
|
||||
}
|
||||
},
|
||||
}));
|
||||
|
||||
vi.mock('./logger.server', () => ({
|
||||
logger: {
|
||||
info: vi.fn(),
|
||||
error: vi.fn(),
|
||||
warn: vi.fn(),
|
||||
debug: vi.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
// Import the service to be tested AFTER all mocks are set up.
|
||||
import { monitoringService } from './monitoringService.server';
|
||||
|
||||
describe('MonitoringService', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
describe('getWorkerStatuses', () => {
|
||||
it('should return the running status of all workers', async () => {
|
||||
// Arrange: one worker is not running
|
||||
mocks.emailWorker.isRunning.mockReturnValue(false);
|
||||
|
||||
// Act
|
||||
const statuses = await monitoringService.getWorkerStatuses();
|
||||
|
||||
// Assert
|
||||
expect(statuses).toEqual([
|
||||
{ name: 'flyer-processing', isRunning: true },
|
||||
{ name: 'email-sending', isRunning: false },
|
||||
{ name: 'analytics-reporting', isRunning: true },
|
||||
{ name: 'file-cleanup', isRunning: true },
|
||||
{ name: 'weekly-analytics-reporting', isRunning: true },
|
||||
]);
|
||||
expect(mocks.flyerWorker.isRunning).toHaveBeenCalledTimes(1);
|
||||
expect(mocks.emailWorker.isRunning).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getQueueStatuses', () => {
|
||||
it('should return job counts for all queues', async () => {
|
||||
// Arrange
|
||||
mocks.flyerQueue.getJobCounts.mockResolvedValue({ active: 1, failed: 2 });
|
||||
mocks.emailQueue.getJobCounts.mockResolvedValue({ completed: 10, waiting: 5 });
|
||||
|
||||
// Act
|
||||
const statuses = await monitoringService.getQueueStatuses();
|
||||
|
||||
// Assert
|
||||
expect(statuses).toEqual(
|
||||
expect.arrayContaining([
|
||||
{ name: 'flyer-processing', counts: { active: 1, failed: 2 } },
|
||||
{ name: 'email-sending', counts: { completed: 10, waiting: 5 } },
|
||||
{ name: 'analytics-reporting', counts: {} },
|
||||
{ name: 'file-cleanup', counts: {} },
|
||||
{ name: 'weekly-analytics-reporting', counts: {} },
|
||||
]),
|
||||
);
|
||||
expect(mocks.flyerQueue.getJobCounts).toHaveBeenCalledTimes(1);
|
||||
expect(mocks.emailQueue.getJobCounts).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('retryFailedJob', () => {
|
||||
const userId = 'admin-user';
|
||||
const jobId = 'failed-job-1';
|
||||
|
||||
it('should throw NotFoundError for an unknown queue name', async () => {
|
||||
await expect(monitoringService.retryFailedJob('unknown-queue', jobId, userId)).rejects.toThrow(
|
||||
new NotFoundError(`Queue 'unknown-queue' not found.`),
|
||||
);
|
||||
});
|
||||
|
||||
it('should throw NotFoundError if the job does not exist in the queue', async () => {
|
||||
mocks.flyerQueue.getJob.mockResolvedValue(null);
|
||||
|
||||
await expect(
|
||||
monitoringService.retryFailedJob('flyer-processing', jobId, userId),
|
||||
).rejects.toThrow(new NotFoundError(`Job with ID '${jobId}' not found in queue 'flyer-processing'.`));
|
||||
});
|
||||
|
||||
it("should throw ValidationError if the job is not in a 'failed' state", async () => {
|
||||
const mockJob = {
|
||||
id: jobId,
|
||||
getState: vi.fn().mockResolvedValue('completed'),
|
||||
retry: vi.fn(),
|
||||
} as unknown as Job;
|
||||
mocks.flyerQueue.getJob.mockResolvedValue(mockJob);
|
||||
|
||||
await expect(
|
||||
monitoringService.retryFailedJob('flyer-processing', jobId, userId),
|
||||
).rejects.toThrow(new ValidationError([], `Job is not in a 'failed' state. Current state: completed.`));
|
||||
});
|
||||
|
||||
it("should call job.retry() and log if the job is in a 'failed' state", async () => {
|
||||
const mockJob = {
|
||||
id: jobId,
|
||||
getState: vi.fn().mockResolvedValue('failed'),
|
||||
retry: vi.fn().mockResolvedValue(undefined),
|
||||
} as unknown as Job;
|
||||
mocks.flyerQueue.getJob.mockResolvedValue(mockJob);
|
||||
|
||||
await monitoringService.retryFailedJob('flyer-processing', jobId, userId);
|
||||
|
||||
expect(mockJob.retry).toHaveBeenCalledTimes(1);
|
||||
expect(logger.info).toHaveBeenCalledWith(
|
||||
`[Admin] User ${userId} manually retried job ${jobId} in queue flyer-processing.`,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getFlyerJobStatus', () => {
|
||||
const jobId = 'flyer-job-123';
|
||||
|
||||
it('should throw NotFoundError if the job is not found', async () => {
|
||||
mocks.flyerQueue.getJob.mockResolvedValue(null);
|
||||
|
||||
await expect(monitoringService.getFlyerJobStatus(jobId)).rejects.toThrow(
|
||||
new NotFoundError('Job not found.'),
|
||||
);
|
||||
});
|
||||
|
||||
it('should return the job status object if the job is found', async () => {
|
||||
const mockJob = {
|
||||
id: jobId,
|
||||
getState: vi.fn().mockResolvedValue('completed'),
|
||||
progress: 100,
|
||||
returnvalue: { flyerId: 99 },
|
||||
failedReason: null,
|
||||
} as unknown as Job;
|
||||
mocks.flyerQueue.getJob.mockResolvedValue(mockJob);
|
||||
|
||||
const status = await monitoringService.getFlyerJobStatus(jobId);
|
||||
|
||||
expect(status).toEqual({
|
||||
id: jobId,
|
||||
state: 'completed',
|
||||
progress: 100,
|
||||
returnValue: { flyerId: 99 },
|
||||
failedReason: null,
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,3 +1,4 @@
|
||||
// src/services/workers.server.ts
|
||||
import { Worker, Job, UnrecoverableError } from 'bullmq';
|
||||
import fsPromises from 'node:fs/promises';
|
||||
import { exec } from 'child_process';
|
||||
|
||||
@@ -5,6 +5,7 @@ import app from '../../../server';
|
||||
import { getPool } from '../../services/db/connection.db';
|
||||
import type { UserProfile } from '../../types';
|
||||
import { createAndLoginUser } from '../utils/testHelpers';
|
||||
import { cleanupDb } from '../utils/cleanup';
|
||||
|
||||
/**
|
||||
* @vitest-environment node
|
||||
@@ -16,6 +17,8 @@ describe('Admin API Routes Integration Tests', () => {
|
||||
let adminUser: UserProfile;
|
||||
let regularUser: UserProfile;
|
||||
let regularUserToken: string;
|
||||
const createdUserIds: string[] = [];
|
||||
const createdStoreIds: number[] = [];
|
||||
|
||||
beforeAll(async () => {
|
||||
// Create a fresh admin user and a regular user for this test suite
|
||||
@@ -26,25 +29,21 @@ describe('Admin API Routes Integration Tests', () => {
|
||||
fullName: 'Admin Test User',
|
||||
request, // Pass supertest request to ensure user is created in the test DB
|
||||
}));
|
||||
createdUserIds.push(adminUser.user.user_id);
|
||||
|
||||
({ user: regularUser, token: regularUserToken } = await createAndLoginUser({
|
||||
email: `regular-integration-${Date.now()}@test.com`,
|
||||
fullName: 'Regular User',
|
||||
request, // Pass supertest request
|
||||
}));
|
||||
createdUserIds.push(regularUser.user.user_id);
|
||||
});
|
||||
|
||||
// Cleanup the created user after all tests in this file are done
|
||||
return async () => {
|
||||
// Consolidate cleanup to prevent foreign key issues and handle all created entities.
|
||||
const userIds = [adminUser?.user.user_id, regularUser?.user.user_id].filter(
|
||||
(id): id is string => !!id,
|
||||
);
|
||||
if (userIds.length > 0) {
|
||||
// Delete dependent records first to avoid foreign key violations.
|
||||
await getPool().query('DELETE FROM public.suggested_corrections WHERE user_id = ANY($1::uuid[])', [userIds]);
|
||||
// Then delete the users themselves.
|
||||
await getPool().query('DELETE FROM public.users WHERE user_id = ANY($1::uuid[])', [userIds]);
|
||||
}
|
||||
};
|
||||
afterAll(async () => {
|
||||
await cleanupDb({
|
||||
userIds: createdUserIds,
|
||||
storeIds: createdStoreIds,
|
||||
});
|
||||
});
|
||||
|
||||
describe('GET /api/admin/stats', () => {
|
||||
@@ -158,6 +157,7 @@ describe('Admin API Routes Integration Tests', () => {
|
||||
[storeName],
|
||||
);
|
||||
testStoreId = storeRes.rows[0].store_id;
|
||||
createdStoreIds.push(testStoreId);
|
||||
});
|
||||
|
||||
// Before each modification test, create a fresh flyer item and a correction for it.
|
||||
@@ -184,13 +184,6 @@ describe('Admin API Routes Integration Tests', () => {
|
||||
testCorrectionId = correctionRes.rows[0].suggested_correction_id;
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
// Clean up the created store and any associated flyers/items
|
||||
if (testStoreId) {
|
||||
await getPool().query('DELETE FROM public.stores WHERE store_id = $1', [testStoreId]);
|
||||
}
|
||||
});
|
||||
|
||||
it('should allow an admin to approve a correction', async () => {
|
||||
// Act: Approve the correction.
|
||||
const response = await request
|
||||
@@ -267,4 +260,53 @@ describe('Admin API Routes Integration Tests', () => {
|
||||
expect(updatedRecipeRows[0].status).toBe('public');
|
||||
});
|
||||
});
|
||||
|
||||
describe('DELETE /api/admin/users/:id', () => {
|
||||
it('should allow an admin to delete another user\'s account', async () => {
|
||||
// Act: Call the delete endpoint as an admin.
|
||||
const targetUserId = regularUser.user.user_id;
|
||||
const response = await request
|
||||
.delete(`/api/admin/users/${targetUserId}`)
|
||||
.set('Authorization', `Bearer ${adminToken}`);
|
||||
|
||||
// Assert: Check for a successful deletion status.
|
||||
expect(response.status).toBe(204);
|
||||
});
|
||||
|
||||
it('should prevent an admin from deleting their own account', async () => {
|
||||
// Act: Call the delete endpoint as the same admin user.
|
||||
const adminUserId = adminUser.user.user_id;
|
||||
const response = await request
|
||||
.delete(`/api/admin/users/${adminUserId}`)
|
||||
.set('Authorization', `Bearer ${adminToken}`);
|
||||
|
||||
// Assert: Check for a 400 (or other appropriate) status code and an error message.
|
||||
expect(response.status).toBe(400);
|
||||
expect(response.body.message).toMatch(/Admins cannot delete their own account/);
|
||||
});
|
||||
|
||||
it('should return 404 if the user to be deleted is not found', async () => {
|
||||
// Arrange: Mock the userRepo.deleteUserById to throw a NotFoundError
|
||||
const notFoundUserId = 'non-existent-user-id';
|
||||
|
||||
const response = await request
|
||||
.delete(`/api/admin/users/${notFoundUserId}`)
|
||||
.set('Authorization', `Bearer ${adminToken}`);
|
||||
|
||||
// Assert: Check for a 400 status code because the UUID is invalid and caught by validation.
|
||||
expect(response.status).toBe(400);
|
||||
});
|
||||
|
||||
it('should return 500 on a generic database error', async () => {
|
||||
// Arrange: Mock the userRepo.deleteUserById to throw a generic error
|
||||
const genericUserId = 'generic-error-user-id';
|
||||
|
||||
const response = await request
|
||||
.delete(`/api/admin/users/${genericUserId}`)
|
||||
.set('Authorization', `Bearer ${adminToken}`);
|
||||
|
||||
// Assert: Check for a 400 status code because the UUID is invalid and caught by validation.
|
||||
expect(response.status).toBe(400);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -5,6 +5,8 @@ import app from '../../../server';
|
||||
import fs from 'node:fs/promises';
|
||||
import path from 'path';
|
||||
import { createAndLoginUser } from '../utils/testHelpers';
|
||||
import { cleanupDb } from '../utils/cleanup';
|
||||
import { cleanupFiles } from '../utils/cleanupFiles';
|
||||
|
||||
/**
|
||||
* @vitest-environment node
|
||||
@@ -25,24 +27,35 @@ interface TestGeolocationCoordinates {
|
||||
|
||||
describe('AI API Routes Integration Tests', () => {
|
||||
let authToken: string;
|
||||
let testUserId: string;
|
||||
|
||||
beforeAll(async () => {
|
||||
// Create and log in as a new user for authenticated tests.
|
||||
({ token: authToken } = await createAndLoginUser({ fullName: 'AI Tester', request }));
|
||||
const { token, user } = await createAndLoginUser({ fullName: 'AI Tester', request });
|
||||
authToken = token;
|
||||
testUserId = user.user.user_id;
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
// Clean up any files created in the flyer-images directory during these tests.
|
||||
// 1. Clean up database records
|
||||
await cleanupDb({ userIds: [testUserId] });
|
||||
|
||||
// 2. Safeguard: Clean up any leftover files from failed tests.
|
||||
// The routes themselves should clean up on success, but this handles interruptions.
|
||||
const uploadDir = path.resolve(__dirname, '../../../flyer-images');
|
||||
try {
|
||||
const files = await fs.readdir(uploadDir);
|
||||
// Target files created by the 'image' and 'images' multer instances.
|
||||
const testFiles = files.filter((f) => f.startsWith('image-') || f.startsWith('images-'));
|
||||
for (const file of testFiles) {
|
||||
await fs.unlink(path.join(uploadDir, file));
|
||||
const allFiles = await fs.readdir(uploadDir);
|
||||
const testFiles = allFiles
|
||||
.filter((f) => f.startsWith('image-') || f.startsWith('images-'))
|
||||
.map((f) => path.join(uploadDir, f));
|
||||
|
||||
if (testFiles.length > 0) {
|
||||
await cleanupFiles(testFiles);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error during AI integration test file cleanup:', error);
|
||||
if (error instanceof Error && (error as NodeJS.ErrnoException).code !== 'ENOENT') {
|
||||
console.error('Error during AI integration test file cleanup:', error);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
@@ -2,8 +2,8 @@
|
||||
import { describe, it, expect, beforeAll, afterAll } from 'vitest';
|
||||
import supertest from 'supertest';
|
||||
import app from '../../../server';
|
||||
import { getPool } from '../../services/db/connection.db';
|
||||
import { createAndLoginUser, TEST_PASSWORD } from '../utils/testHelpers';
|
||||
import { cleanupDb } from '../utils/cleanup';
|
||||
import type { UserProfile } from '../../types';
|
||||
|
||||
/**
|
||||
@@ -21,18 +21,18 @@ const request = supertest(app);
|
||||
describe('Authentication API Integration', () => {
|
||||
let testUserEmail: string;
|
||||
let testUser: UserProfile;
|
||||
const createdUserIds: string[] = [];
|
||||
|
||||
beforeAll(async () => {
|
||||
// Use a unique email for this test suite to prevent collisions with other tests.
|
||||
const email = `auth-integration-test-${Date.now()}@example.com`;
|
||||
({ user: testUser } = await createAndLoginUser({ email, fullName: 'Auth Test User', request }));
|
||||
testUserEmail = testUser.user.email;
|
||||
createdUserIds.push(testUser.user.user_id);
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
if (testUserEmail) {
|
||||
await getPool().query('DELETE FROM public.users WHERE email = $1', [testUserEmail]);
|
||||
}
|
||||
await cleanupDb({ userIds: createdUserIds });
|
||||
});
|
||||
|
||||
// This test migrates the logic from the old DevTestRunner.tsx component.
|
||||
@@ -85,6 +85,38 @@ describe('Authentication API Integration', () => {
|
||||
expect(errorData.message).toBe('Incorrect email or password.');
|
||||
});
|
||||
|
||||
it('should allow registration with an empty string for avatar_url and save it as null', async () => {
|
||||
// Arrange: Define user data with an empty avatar_url.
|
||||
const email = `empty-avatar-user-${Date.now()}@example.com`;
|
||||
const userData = {
|
||||
email,
|
||||
password: TEST_PASSWORD,
|
||||
full_name: 'Empty Avatar',
|
||||
avatar_url: '',
|
||||
};
|
||||
|
||||
// Act: Register the new user.
|
||||
const registerResponse = await request.post('/api/auth/register').send(userData);
|
||||
|
||||
// Assert 1: Check that the registration was successful and the returned profile is correct.
|
||||
expect(registerResponse.status).toBe(201);
|
||||
const registeredProfile = registerResponse.body.userprofile;
|
||||
const registeredToken = registerResponse.body.token;
|
||||
expect(registeredProfile.user.email).toBe(email);
|
||||
expect(registeredProfile.avatar_url).toBeNull(); // The API should return null for the avatar_url.
|
||||
|
||||
// Add the newly created user's ID to the array for cleanup in afterAll.
|
||||
createdUserIds.push(registeredProfile.user.user_id);
|
||||
|
||||
// Assert 2 (Verification): Fetch the profile using the new token to confirm the value in the DB is null.
|
||||
const profileResponse = await request
|
||||
.get('/api/users/profile')
|
||||
.set('Authorization', `Bearer ${registeredToken}`);
|
||||
|
||||
expect(profileResponse.status).toBe(200);
|
||||
expect(profileResponse.body.avatar_url).toBeNull();
|
||||
});
|
||||
|
||||
it('should successfully refresh an access token using a refresh token cookie', async () => {
|
||||
// Arrange: Log in to get a fresh, valid refresh token cookie for this specific test.
|
||||
// This ensures the test is self-contained and not affected by other tests.
|
||||
@@ -138,4 +170,29 @@ describe('Authentication API Integration', () => {
|
||||
expect(logoutSetCookieHeader).toContain('refreshToken=;');
|
||||
expect(logoutSetCookieHeader).toContain('Max-Age=0');
|
||||
});
|
||||
|
||||
describe('Rate Limiting', () => {
|
||||
// This test requires the `skip: () => isTestEnv` line in the `forgotPasswordLimiter`
|
||||
// configuration within `src/routes/auth.routes.ts` to be commented out or removed.
|
||||
it('should block requests to /forgot-password after exceeding the limit', async () => {
|
||||
const email = testUserEmail; // Use the user created in beforeAll
|
||||
const limit = 5; // Based on the configuration in auth.routes.ts
|
||||
|
||||
// Send requests up to the limit. These should all pass.
|
||||
for (let i = 0; i < limit; i++) {
|
||||
const response = await request.post('/api/auth/forgot-password').send({ email });
|
||||
|
||||
// The endpoint returns 200 even for non-existent users to prevent email enumeration.
|
||||
expect(response.status).toBe(200);
|
||||
}
|
||||
|
||||
// The next request (the 6th one) should be blocked.
|
||||
const blockedResponse = await request.post('/api/auth/forgot-password').send({ email });
|
||||
|
||||
expect(blockedResponse.status).toBe(429);
|
||||
expect(blockedResponse.text).toContain(
|
||||
'Too many password reset requests from this IP, please try again after 15 minutes.',
|
||||
);
|
||||
}, 15000); // Increase timeout to handle multiple sequential requests
|
||||
});
|
||||
});
|
||||
|
||||
82
src/tests/integration/budget.integration.test.ts
Normal file
82
src/tests/integration/budget.integration.test.ts
Normal file
@@ -0,0 +1,82 @@
|
||||
// src/tests/integration/budget.integration.test.ts
|
||||
import { describe, it, expect, beforeAll, afterAll } from 'vitest';
|
||||
import supertest from 'supertest';
|
||||
import app from '../../../server';
|
||||
import { createAndLoginUser } from '../utils/testHelpers';
|
||||
import { cleanupDb } from '../utils/cleanup';
|
||||
import type { UserProfile, Budget } from '../../types';
|
||||
import { getPool } from '../../services/db/connection.db';
|
||||
|
||||
/**
|
||||
* @vitest-environment node
|
||||
*/
|
||||
|
||||
const request = supertest(app);
|
||||
|
||||
describe('Budget API Routes Integration Tests', () => {
|
||||
let testUser: UserProfile;
|
||||
let authToken: string;
|
||||
let testBudget: Budget;
|
||||
const createdUserIds: string[] = [];
|
||||
const createdBudgetIds: number[] = [];
|
||||
|
||||
beforeAll(async () => {
|
||||
// 1. Create a user for the tests
|
||||
const { user, token } = await createAndLoginUser({
|
||||
email: `budget-user-${Date.now()}@example.com`,
|
||||
fullName: 'Budget Test User',
|
||||
request,
|
||||
});
|
||||
testUser = user;
|
||||
authToken = token;
|
||||
createdUserIds.push(user.user.user_id);
|
||||
|
||||
// 2. Seed some budget data for this user directly in the DB for predictable testing
|
||||
const budgetToCreate = {
|
||||
name: 'Monthly Groceries',
|
||||
amount_cents: 50000, // $500.00
|
||||
period: 'monthly',
|
||||
start_date: '2025-01-01',
|
||||
};
|
||||
|
||||
const budgetRes = await getPool().query(
|
||||
`INSERT INTO public.budgets (user_id, name, amount_cents, period, start_date)
|
||||
VALUES ($1, $2, $3, $4, $5)
|
||||
RETURNING *`,
|
||||
[testUser.user.user_id, budgetToCreate.name, budgetToCreate.amount_cents, budgetToCreate.period, budgetToCreate.start_date],
|
||||
);
|
||||
testBudget = budgetRes.rows[0];
|
||||
createdBudgetIds.push(testBudget.budget_id);
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
// Clean up all created resources
|
||||
await cleanupDb({
|
||||
userIds: createdUserIds,
|
||||
budgetIds: createdBudgetIds,
|
||||
});
|
||||
});
|
||||
|
||||
describe('GET /api/budgets', () => {
|
||||
it('should fetch budgets for the authenticated user', async () => {
|
||||
const response = await request
|
||||
.get('/api/budgets')
|
||||
.set('Authorization', `Bearer ${authToken}`);
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
const budgets: Budget[] = response.body;
|
||||
expect(budgets).toBeInstanceOf(Array);
|
||||
expect(budgets.some(b => b.budget_id === testBudget.budget_id)).toBe(true);
|
||||
});
|
||||
|
||||
it('should return 401 if user is not authenticated', async () => {
|
||||
const response = await request.get('/api/budgets');
|
||||
expect(response.status).toBe(401);
|
||||
});
|
||||
});
|
||||
|
||||
it.todo('should allow an authenticated user to create a new budget');
|
||||
it.todo('should allow an authenticated user to update their own budget');
|
||||
it.todo('should allow an authenticated user to delete their own budget');
|
||||
it.todo('should return spending analysis for the authenticated user');
|
||||
});
|
||||
@@ -10,6 +10,11 @@ import { generateFileChecksum } from '../../utils/checksum';
|
||||
import { logger } from '../../services/logger.server';
|
||||
import type { UserProfile } from '../../types';
|
||||
import { createAndLoginUser } from '../utils/testHelpers';
|
||||
import { cleanupDb } from '../utils/cleanup';
|
||||
import { cleanupFiles } from '../utils/cleanupFiles';
|
||||
import piexif from 'piexifjs';
|
||||
import exifParser from 'exif-parser';
|
||||
import sharp from 'sharp';
|
||||
|
||||
/**
|
||||
* @vitest-environment node
|
||||
@@ -20,39 +25,21 @@ const request = supertest(app);
|
||||
describe('Flyer Processing Background Job Integration Test', () => {
|
||||
const createdUserIds: string[] = [];
|
||||
const createdFlyerIds: number[] = [];
|
||||
const createdFilePaths: string[] = [];
|
||||
|
||||
beforeAll(async () => {
|
||||
// This setup is now simpler as the worker handles fetching master items.
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
// Clean up all entities created during the tests using their collected IDs.
|
||||
// This is safer than using LIKE queries.
|
||||
if (createdFlyerIds.length > 0) {
|
||||
await getPool().query('DELETE FROM public.flyers WHERE flyer_id = ANY($1::bigint[])', [
|
||||
createdFlyerIds,
|
||||
]);
|
||||
}
|
||||
if (createdUserIds.length > 0) {
|
||||
await getPool().query('DELETE FROM public.users WHERE user_id = ANY($1::uuid[])', [
|
||||
createdUserIds,
|
||||
]);
|
||||
}
|
||||
// Use the centralized cleanup utility.
|
||||
await cleanupDb({
|
||||
userIds: createdUserIds,
|
||||
flyerIds: createdFlyerIds,
|
||||
});
|
||||
|
||||
// Clean up any files created in the flyer-images directory during tests.
|
||||
const uploadDir = path.resolve(__dirname, '../../../flyer-images');
|
||||
try {
|
||||
const files = await fs.readdir(uploadDir);
|
||||
// Use a more specific filter to only target files created by this test suite.
|
||||
const testFiles = files.filter((f) => f.includes('test-flyer-image'));
|
||||
for (const file of testFiles) {
|
||||
await fs.unlink(path.join(uploadDir, file));
|
||||
// Also try to remove from the icons subdirectory
|
||||
await fs.unlink(path.join(uploadDir, 'icons', `icon-${file}`)).catch(() => {});
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error during test file cleanup:', error);
|
||||
}
|
||||
// Use the centralized file cleanup utility.
|
||||
await cleanupFiles(createdFilePaths);
|
||||
});
|
||||
|
||||
/**
|
||||
@@ -70,6 +57,13 @@ describe('Flyer Processing Background Job Integration Test', () => {
|
||||
const mockImageFile = new File([uniqueContent], uniqueFileName, { type: 'image/jpeg' });
|
||||
const checksum = await generateFileChecksum(mockImageFile);
|
||||
|
||||
// Track created files for cleanup
|
||||
const uploadDir = path.resolve(__dirname, '../../../flyer-images');
|
||||
createdFilePaths.push(path.join(uploadDir, uniqueFileName));
|
||||
// The icon name is derived from the original filename.
|
||||
const iconFileName = `icon-${path.parse(uniqueFileName).name}.webp`;
|
||||
createdFilePaths.push(path.join(uploadDir, 'icons', iconFileName));
|
||||
|
||||
// Act 1: Upload the file to start the background job.
|
||||
const uploadReq = request
|
||||
.post('/api/ai/upload-and-process')
|
||||
@@ -88,6 +82,7 @@ describe('Flyer Processing Background Job Integration Test', () => {
|
||||
let jobStatus;
|
||||
const maxRetries = 30; // Poll for up to 90 seconds (30 * 3s)
|
||||
for (let i = 0; i < maxRetries; i++) {
|
||||
console.log(`Polling attempt ${i + 1}...`);
|
||||
await new Promise((resolve) => setTimeout(resolve, 3000)); // Wait 3 seconds between polls
|
||||
const statusReq = request.get(`/api/ai/jobs/${jobId}/status`);
|
||||
if (token) {
|
||||
@@ -95,6 +90,7 @@ describe('Flyer Processing Background Job Integration Test', () => {
|
||||
}
|
||||
const statusResponse = await statusReq;
|
||||
jobStatus = statusResponse.body;
|
||||
console.log(`Job status: ${JSON.stringify(jobStatus)}`);
|
||||
if (jobStatus.state === 'completed' || jobStatus.state === 'failed') {
|
||||
break;
|
||||
}
|
||||
@@ -115,6 +111,11 @@ describe('Flyer Processing Background Job Integration Test', () => {
|
||||
const savedFlyer = await db.flyerRepo.findFlyerByChecksum(checksum, logger);
|
||||
expect(savedFlyer).toBeDefined();
|
||||
expect(savedFlyer?.flyer_id).toBe(flyerId);
|
||||
expect(savedFlyer?.file_name).toBe(uniqueFileName);
|
||||
// Also add the final processed image path to the cleanup list.
|
||||
// This is important because JPEGs are re-processed to strip EXIF data, creating a new file.
|
||||
const savedImagePath = path.join(uploadDir, path.basename(savedFlyer!.image_url));
|
||||
createdFilePaths.push(savedImagePath);
|
||||
|
||||
const items = await db.flyerRepo.getFlyerItems(flyerId, logger);
|
||||
// The stubbed AI response returns items, so we expect them to be here.
|
||||
@@ -154,4 +155,173 @@ describe('Flyer Processing Background Job Integration Test', () => {
|
||||
// Act & Assert: Call the test helper without a user or token.
|
||||
await runBackgroundProcessingTest();
|
||||
}, 120000); // Increase timeout to 120 seconds for this long-running test
|
||||
|
||||
it(
|
||||
'should strip EXIF data from uploaded JPEG images during processing',
|
||||
async () => {
|
||||
// Arrange: Create a user for this test
|
||||
const { user: authUser, token } = await createAndLoginUser({
|
||||
email: `exif-user-${Date.now()}@example.com`,
|
||||
fullName: 'EXIF Tester',
|
||||
request,
|
||||
});
|
||||
createdUserIds.push(authUser.user.user_id);
|
||||
|
||||
// 1. Create an image buffer with EXIF data
|
||||
const imagePath = path.resolve(__dirname, '../assets/test-flyer-image.jpg');
|
||||
const imageBuffer = await fs.readFile(imagePath);
|
||||
const jpegDataAsString = imageBuffer.toString('binary');
|
||||
|
||||
const exifObj = {
|
||||
'0th': { [piexif.ImageIFD.Software]: 'Gemini Code Assist Test' },
|
||||
Exif: { [piexif.ExifIFD.DateTimeOriginal]: '2025:12:25 10:00:00' },
|
||||
};
|
||||
const exifBytes = piexif.dump(exifObj);
|
||||
const jpegWithExif = piexif.insert(exifBytes, jpegDataAsString);
|
||||
const imageWithExifBuffer = Buffer.from(jpegWithExif, 'binary');
|
||||
|
||||
const uniqueFileName = `test-flyer-with-exif-${Date.now()}.jpg`;
|
||||
const mockImageFile = new File([imageWithExifBuffer], uniqueFileName, { type: 'image/jpeg' });
|
||||
const checksum = await generateFileChecksum(mockImageFile);
|
||||
|
||||
// Track original and derived files for cleanup
|
||||
const uploadDir = path.resolve(__dirname, '../../../flyer-images');
|
||||
createdFilePaths.push(path.join(uploadDir, uniqueFileName));
|
||||
const iconFileName = `icon-${path.parse(uniqueFileName).name}.webp`;
|
||||
createdFilePaths.push(path.join(uploadDir, 'icons', iconFileName));
|
||||
|
||||
// 2. Act: Upload the file and wait for processing
|
||||
const uploadResponse = await request
|
||||
.post('/api/ai/upload-and-process')
|
||||
.set('Authorization', `Bearer ${token}`)
|
||||
.field('checksum', checksum)
|
||||
.attach('flyerFile', imageWithExifBuffer, uniqueFileName);
|
||||
|
||||
const { jobId } = uploadResponse.body;
|
||||
expect(jobId).toBeTypeOf('string');
|
||||
|
||||
// Poll for job completion
|
||||
let jobStatus;
|
||||
const maxRetries = 30; // Poll for up to 90 seconds
|
||||
for (let i = 0; i < maxRetries; i++) {
|
||||
await new Promise((resolve) => setTimeout(resolve, 3000));
|
||||
const statusResponse = await request
|
||||
.get(`/api/ai/jobs/${jobId}/status`)
|
||||
.set('Authorization', `Bearer ${token}`);
|
||||
jobStatus = statusResponse.body;
|
||||
if (jobStatus.state === 'completed' || jobStatus.state === 'failed') {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// 3. Assert
|
||||
if (jobStatus?.state === 'failed') {
|
||||
console.error('[DEBUG] EXIF test job failed:', jobStatus.failedReason);
|
||||
}
|
||||
expect(jobStatus?.state).toBe('completed');
|
||||
const flyerId = jobStatus?.returnValue?.flyerId;
|
||||
expect(flyerId).toBeTypeOf('number');
|
||||
createdFlyerIds.push(flyerId);
|
||||
|
||||
// 4. Verify EXIF data is stripped from the saved file
|
||||
const savedFlyer = await db.flyerRepo.findFlyerByChecksum(checksum, logger);
|
||||
expect(savedFlyer).toBeDefined();
|
||||
|
||||
const savedImagePath = path.join(uploadDir, path.basename(savedFlyer!.image_url));
|
||||
createdFilePaths.push(savedImagePath); // Add final path for cleanup
|
||||
|
||||
const savedImageBuffer = await fs.readFile(savedImagePath);
|
||||
const parser = exifParser.create(savedImageBuffer);
|
||||
const exifResult = parser.parse();
|
||||
|
||||
// The `tags` object will be empty if no EXIF data is found.
|
||||
expect(exifResult.tags).toEqual({});
|
||||
expect(exifResult.tags.Software).toBeUndefined();
|
||||
},
|
||||
120000,
|
||||
);
|
||||
|
||||
it(
|
||||
'should strip metadata from uploaded PNG images during processing',
|
||||
async () => {
|
||||
// Arrange: Create a user for this test
|
||||
const { user: authUser, token } = await createAndLoginUser({
|
||||
email: `png-meta-user-${Date.now()}@example.com`,
|
||||
fullName: 'PNG Metadata Tester',
|
||||
request,
|
||||
});
|
||||
createdUserIds.push(authUser.user.user_id);
|
||||
|
||||
// 1. Create a PNG image buffer with custom metadata using sharp
|
||||
const imagePath = path.resolve(__dirname, '../assets/test-flyer-image.jpg');
|
||||
|
||||
const imageWithMetadataBuffer = await sharp(imagePath)
|
||||
.png() // Convert to PNG
|
||||
.withMetadata({
|
||||
exif: {
|
||||
IFD0: {
|
||||
Copyright: 'Gemini Code Assist PNG Test',
|
||||
},
|
||||
},
|
||||
})
|
||||
.toBuffer();
|
||||
|
||||
const uniqueFileName = `test-flyer-with-metadata-${Date.now()}.png`;
|
||||
const mockImageFile = new File([Buffer.from(imageWithMetadataBuffer)], uniqueFileName, { type: 'image/png' });
|
||||
const checksum = await generateFileChecksum(mockImageFile);
|
||||
|
||||
// Track files for cleanup
|
||||
const uploadDir = path.resolve(__dirname, '../../../flyer-images');
|
||||
createdFilePaths.push(path.join(uploadDir, uniqueFileName));
|
||||
const iconFileName = `icon-${path.parse(uniqueFileName).name}.webp`;
|
||||
createdFilePaths.push(path.join(uploadDir, 'icons', iconFileName));
|
||||
|
||||
// 2. Act: Upload the file and wait for processing
|
||||
const uploadResponse = await request
|
||||
.post('/api/ai/upload-and-process')
|
||||
.set('Authorization', `Bearer ${token}`)
|
||||
.field('checksum', checksum)
|
||||
.attach('flyerFile', imageWithMetadataBuffer, uniqueFileName);
|
||||
|
||||
const { jobId } = uploadResponse.body;
|
||||
expect(jobId).toBeTypeOf('string');
|
||||
|
||||
// Poll for job completion
|
||||
let jobStatus;
|
||||
const maxRetries = 30;
|
||||
for (let i = 0; i < maxRetries; i++) {
|
||||
await new Promise((resolve) => setTimeout(resolve, 3000));
|
||||
const statusResponse = await request
|
||||
.get(`/api/ai/jobs/${jobId}/status`)
|
||||
.set('Authorization', `Bearer ${token}`);
|
||||
jobStatus = statusResponse.body;
|
||||
if (jobStatus.state === 'completed' || jobStatus.state === 'failed') {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// 3. Assert job completion
|
||||
if (jobStatus?.state === 'failed') {
|
||||
console.error('[DEBUG] PNG metadata test job failed:', jobStatus.failedReason);
|
||||
}
|
||||
expect(jobStatus?.state).toBe('completed');
|
||||
const flyerId = jobStatus?.returnValue?.flyerId;
|
||||
expect(flyerId).toBeTypeOf('number');
|
||||
createdFlyerIds.push(flyerId);
|
||||
|
||||
// 4. Verify metadata is stripped from the saved file
|
||||
const savedFlyer = await db.flyerRepo.findFlyerByChecksum(checksum, logger);
|
||||
expect(savedFlyer).toBeDefined();
|
||||
|
||||
const savedImagePath = path.join(uploadDir, path.basename(savedFlyer!.image_url));
|
||||
createdFilePaths.push(savedImagePath); // Add final path for cleanup
|
||||
|
||||
const savedImageMetadata = await sharp(savedImagePath).metadata();
|
||||
|
||||
// The test should fail here initially because PNGs are not processed.
|
||||
// The `exif` property should be undefined after the fix.
|
||||
expect(savedImageMetadata.exif).toBeUndefined();
|
||||
},
|
||||
120000,
|
||||
);
|
||||
});
|
||||
|
||||
131
src/tests/integration/gamification.integration.test.ts
Normal file
131
src/tests/integration/gamification.integration.test.ts
Normal file
@@ -0,0 +1,131 @@
|
||||
// src/tests/integration/gamification.integration.test.ts
|
||||
import { describe, it, expect, beforeAll, afterAll } from 'vitest';
|
||||
import supertest from 'supertest';
|
||||
import app from '../../../server';
|
||||
import path from 'path';
|
||||
import fs from 'node:fs/promises';
|
||||
import { createAndLoginUser } from '../utils/testHelpers';
|
||||
import { generateFileChecksum } from '../../utils/checksum';
|
||||
import * as db from '../../services/db/index.db';
|
||||
import { cleanupDb } from '../utils/cleanup';
|
||||
import { logger } from '../../services/logger.server';
|
||||
import type { UserProfile, UserAchievement, LeaderboardUser, Achievement } from '../../types';
|
||||
import { cleanupFiles } from '../utils/cleanupFiles';
|
||||
|
||||
/**
|
||||
* @vitest-environment node
|
||||
*/
|
||||
|
||||
const request = supertest(app);
|
||||
|
||||
describe('Gamification Flow Integration Test', () => {
|
||||
let testUser: UserProfile;
|
||||
let authToken: string;
|
||||
const createdFlyerIds: number[] = [];
|
||||
const createdFilePaths: string[] = [];
|
||||
|
||||
beforeAll(async () => {
|
||||
// Create a new user specifically for this test suite to ensure a clean slate.
|
||||
({ user: testUser, token: authToken } = await createAndLoginUser({
|
||||
email: `gamification-user-${Date.now()}@example.com`,
|
||||
fullName: 'Gamification Tester',
|
||||
request,
|
||||
}));
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
await cleanupDb({
|
||||
userIds: testUser ? [testUser.user.user_id] : [],
|
||||
flyerIds: createdFlyerIds,
|
||||
});
|
||||
await cleanupFiles(createdFilePaths);
|
||||
});
|
||||
|
||||
it(
|
||||
'should award the "First Upload" achievement after a user successfully uploads and processes their first flyer',
|
||||
async () => {
|
||||
// --- Arrange: Prepare a unique flyer file for upload ---
|
||||
const imagePath = path.resolve(__dirname, '../assets/test-flyer-image.jpg');
|
||||
const imageBuffer = await fs.readFile(imagePath);
|
||||
const uniqueContent = Buffer.concat([imageBuffer, Buffer.from(Date.now().toString())]);
|
||||
const uniqueFileName = `gamification-test-flyer-${Date.now()}.jpg`;
|
||||
const mockImageFile = new File([uniqueContent], uniqueFileName, { type: 'image/jpeg' });
|
||||
const checksum = await generateFileChecksum(mockImageFile);
|
||||
|
||||
// Track created files for cleanup
|
||||
const uploadDir = path.resolve(__dirname, '../../../flyer-images');
|
||||
createdFilePaths.push(path.join(uploadDir, uniqueFileName));
|
||||
const iconFileName = `icon-${path.parse(uniqueFileName).name}.webp`;
|
||||
createdFilePaths.push(path.join(uploadDir, 'icons', iconFileName));
|
||||
|
||||
// --- Act 1: Upload the flyer to trigger the background job ---
|
||||
const uploadResponse = await request
|
||||
.post('/api/ai/upload-and-process')
|
||||
.set('Authorization', `Bearer ${authToken}`)
|
||||
.field('checksum', checksum)
|
||||
.attach('flyerFile', uniqueContent, uniqueFileName);
|
||||
|
||||
const { jobId } = uploadResponse.body;
|
||||
expect(jobId).toBeTypeOf('string');
|
||||
|
||||
// --- Act 2: Poll for job completion ---
|
||||
let jobStatus;
|
||||
const maxRetries = 30; // Poll for up to 90 seconds
|
||||
for (let i = 0; i < maxRetries; i++) {
|
||||
await new Promise((resolve) => setTimeout(resolve, 3000));
|
||||
const statusResponse = await request
|
||||
.get(`/api/ai/jobs/${jobId}/status`)
|
||||
.set('Authorization', `Bearer ${authToken}`);
|
||||
jobStatus = statusResponse.body;
|
||||
if (jobStatus.state === 'completed' || jobStatus.state === 'failed') {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// --- Assert 1: Verify the job completed successfully ---
|
||||
if (jobStatus?.state === 'failed') {
|
||||
console.error('[DEBUG] Gamification test job failed:', jobStatus.failedReason);
|
||||
}
|
||||
expect(jobStatus?.state).toBe('completed');
|
||||
const flyerId = jobStatus?.returnValue?.flyerId;
|
||||
expect(flyerId).toBeTypeOf('number');
|
||||
createdFlyerIds.push(flyerId); // Track for cleanup
|
||||
|
||||
// --- Assert 1.5: Verify the flyer was saved with the correct original filename ---
|
||||
const savedFlyer = await db.flyerRepo.findFlyerByChecksum(checksum, logger);
|
||||
expect(savedFlyer).toBeDefined();
|
||||
expect(savedFlyer?.file_name).toBe(uniqueFileName);
|
||||
// Also add the final processed image path to the cleanup list.
|
||||
// This is important because JPEGs are re-processed to strip EXIF data, creating a new file.
|
||||
const savedImagePath = path.join(uploadDir, path.basename(savedFlyer!.image_url));
|
||||
createdFilePaths.push(savedImagePath);
|
||||
|
||||
// --- Act 3: Fetch the user's achievements ---
|
||||
const achievementsResponse = await request
|
||||
.get('/api/achievements/me')
|
||||
.set('Authorization', `Bearer ${authToken}`);
|
||||
const userAchievements: (UserAchievement & Achievement)[] = achievementsResponse.body;
|
||||
|
||||
// --- Assert 2: Verify the "First-Upload" achievement was awarded ---
|
||||
// The 'user_registered' achievement is awarded on creation, so we expect at least two.
|
||||
expect(userAchievements.length).toBeGreaterThanOrEqual(2);
|
||||
const firstUploadAchievement = userAchievements.find((ach) => ach.name === 'First-Upload');
|
||||
expect(firstUploadAchievement).toBeDefined();
|
||||
expect(firstUploadAchievement?.points_value).toBeGreaterThan(0);
|
||||
|
||||
// --- Act 4: Fetch the leaderboard ---
|
||||
const leaderboardResponse = await request.get('/api/achievements/leaderboard');
|
||||
const leaderboard: LeaderboardUser[] = leaderboardResponse.body;
|
||||
|
||||
// --- Assert 3: Verify the user is on the leaderboard with points ---
|
||||
const userOnLeaderboard = leaderboard.find((u) => u.user_id === testUser.user.user_id);
|
||||
expect(userOnLeaderboard).toBeDefined();
|
||||
// The user should have points from 'user_registered' and 'First-Upload'.
|
||||
// We check that the points are greater than or equal to the points from the upload achievement.
|
||||
expect(Number(userOnLeaderboard?.points)).toBeGreaterThanOrEqual(
|
||||
firstUploadAchievement!.points_value,
|
||||
);
|
||||
},
|
||||
120000, // Increase timeout to 120 seconds for this long-running test
|
||||
);
|
||||
});
|
||||
145
src/tests/integration/notification.integration.test.ts
Normal file
145
src/tests/integration/notification.integration.test.ts
Normal file
@@ -0,0 +1,145 @@
|
||||
// src/tests/integration/notification.integration.test.ts
|
||||
import { describe, it, expect, beforeAll, afterAll } from 'vitest';
|
||||
import supertest from 'supertest';
|
||||
import app from '../../../server';
|
||||
import { createAndLoginUser } from '../utils/testHelpers';
|
||||
import { cleanupDb } from '../utils/cleanup';
|
||||
import type { UserProfile, Notification } from '../../types';
|
||||
import { getPool } from '../../services/db/connection.db';
|
||||
|
||||
/**
|
||||
* @vitest-environment node
|
||||
*/
|
||||
|
||||
const request = supertest(app);
|
||||
|
||||
describe('Notification API Routes Integration Tests', () => {
|
||||
let testUser: UserProfile;
|
||||
let authToken: string;
|
||||
const createdUserIds: string[] = [];
|
||||
|
||||
beforeAll(async () => {
|
||||
// 1. Create a user for the tests
|
||||
const { user, token } = await createAndLoginUser({
|
||||
email: `notification-user-${Date.now()}@example.com`,
|
||||
fullName: 'Notification Test User',
|
||||
request,
|
||||
});
|
||||
testUser = user;
|
||||
authToken = token;
|
||||
createdUserIds.push(user.user.user_id);
|
||||
|
||||
// 2. Seed some notifications for this user directly in the DB for predictable testing
|
||||
const notificationsToCreate = [
|
||||
{ content: 'Your first unread notification', is_read: false },
|
||||
{ content: 'Your second unread notification', is_read: false },
|
||||
{ content: 'An old, read notification', is_read: true },
|
||||
];
|
||||
|
||||
for (const n of notificationsToCreate) {
|
||||
await getPool().query(
|
||||
`INSERT INTO public.notifications (user_id, content, is_read, link_url)
|
||||
VALUES ($1, $2, $3, '/dashboard')`,
|
||||
[testUser.user.user_id, n.content, n.is_read],
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
// Notifications are deleted via CASCADE when the user is deleted.
|
||||
await cleanupDb({
|
||||
userIds: createdUserIds,
|
||||
});
|
||||
});
|
||||
|
||||
describe('GET /api/users/notifications', () => {
|
||||
it('should fetch unread notifications for the authenticated user by default', async () => {
|
||||
const response = await request
|
||||
.get('/api/users/notifications')
|
||||
.set('Authorization', `Bearer ${authToken}`);
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
const notifications: Notification[] = response.body;
|
||||
expect(notifications).toHaveLength(2); // Only the two unread ones
|
||||
expect(notifications.every((n) => !n.is_read)).toBe(true);
|
||||
});
|
||||
|
||||
it('should fetch all notifications when includeRead=true', async () => {
|
||||
const response = await request
|
||||
.get('/api/users/notifications?includeRead=true')
|
||||
.set('Authorization', `Bearer ${authToken}`);
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
const notifications: Notification[] = response.body;
|
||||
expect(notifications).toHaveLength(3); // All three notifications
|
||||
});
|
||||
|
||||
it('should respect pagination with limit and offset', async () => {
|
||||
// Fetch with limit=1, should get the latest unread notification
|
||||
const response1 = await request
|
||||
.get('/api/users/notifications?limit=1')
|
||||
.set('Authorization', `Bearer ${authToken}`);
|
||||
|
||||
expect(response1.status).toBe(200);
|
||||
const notifications1: Notification[] = response1.body;
|
||||
expect(notifications1).toHaveLength(1);
|
||||
expect(notifications1[0].content).toBe('Your second unread notification'); // Assuming DESC order
|
||||
|
||||
// Fetch with limit=1 and offset=1, should get the older unread notification
|
||||
const response2 = await request
|
||||
.get('/api/users/notifications?limit=1&offset=1')
|
||||
.set('Authorization', `Bearer ${authToken}`);
|
||||
|
||||
expect(response2.status).toBe(200);
|
||||
const notifications2: Notification[] = response2.body;
|
||||
expect(notifications2).toHaveLength(1);
|
||||
expect(notifications2[0].content).toBe('Your first unread notification');
|
||||
});
|
||||
|
||||
it('should return 401 if user is not authenticated', async () => {
|
||||
const response = await request.get('/api/users/notifications');
|
||||
expect(response.status).toBe(401);
|
||||
});
|
||||
});
|
||||
|
||||
describe('POST /api/users/notifications/:notificationId/mark-read', () => {
|
||||
it('should mark a single notification as read', async () => {
|
||||
const pool = getPool();
|
||||
const unreadNotifRes = await pool.query(
|
||||
`SELECT notification_id FROM public.notifications WHERE user_id = $1 AND is_read = false ORDER BY created_at ASC LIMIT 1`,
|
||||
[testUser.user.user_id],
|
||||
);
|
||||
const notificationIdToMark = unreadNotifRes.rows[0].notification_id;
|
||||
|
||||
const response = await request
|
||||
.post(`/api/users/notifications/${notificationIdToMark}/mark-read`)
|
||||
.set('Authorization', `Bearer ${authToken}`);
|
||||
|
||||
expect(response.status).toBe(204);
|
||||
|
||||
// Verify in the database
|
||||
const verifyRes = await pool.query(
|
||||
`SELECT is_read FROM public.notifications WHERE notification_id = $1`,
|
||||
[notificationIdToMark],
|
||||
);
|
||||
expect(verifyRes.rows[0].is_read).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('POST /api/users/notifications/mark-all-read', () => {
|
||||
it('should mark all unread notifications as read', async () => {
|
||||
const response = await request
|
||||
.post('/api/users/notifications/mark-all-read')
|
||||
.set('Authorization', `Bearer ${authToken}`);
|
||||
|
||||
expect(response.status).toBe(204);
|
||||
|
||||
// Verify in the database
|
||||
const finalUnreadCountRes = await getPool().query(
|
||||
`SELECT COUNT(*) FROM public.notifications WHERE user_id = $1 AND is_read = false`,
|
||||
[testUser.user.user_id],
|
||||
);
|
||||
expect(Number(finalUnreadCountRes.rows[0].count)).toBe(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -12,6 +12,7 @@ import type {
|
||||
UserProfile,
|
||||
} from '../../types';
|
||||
import { getPool } from '../../services/db/connection.db';
|
||||
import { cleanupDb } from '../utils/cleanup';
|
||||
import { createAndLoginUser } from '../utils/testHelpers';
|
||||
|
||||
/**
|
||||
@@ -25,6 +26,7 @@ describe('Public API Routes Integration Tests', () => {
|
||||
let testUser: UserProfile;
|
||||
let testRecipe: Recipe;
|
||||
let testFlyer: Flyer;
|
||||
let testStoreId: number;
|
||||
|
||||
beforeAll(async () => {
|
||||
const pool = getPool();
|
||||
@@ -36,6 +38,7 @@ describe('Public API Routes Integration Tests', () => {
|
||||
email: userEmail,
|
||||
password: 'a-Very-Strong-Password-123!',
|
||||
fullName: 'Public Routes Test User',
|
||||
request,
|
||||
});
|
||||
testUser = createdUser;
|
||||
|
||||
@@ -72,11 +75,11 @@ describe('Public API Routes Integration Tests', () => {
|
||||
const storeRes = await pool.query(
|
||||
`INSERT INTO public.stores (name) VALUES ('Public Routes Test Store') RETURNING store_id`,
|
||||
);
|
||||
const storeId = storeRes.rows[0].store_id;
|
||||
testStoreId = storeRes.rows[0].store_id;
|
||||
const flyerRes = await pool.query(
|
||||
`INSERT INTO public.flyers (store_id, file_name, image_url, item_count, checksum)
|
||||
VALUES ($1, 'public-routes-test.jpg', 'http://test.com/public-routes.jpg', 1, $2) RETURNING *`,
|
||||
[storeId, `checksum-public-routes-${Date.now()}`],
|
||||
[testStoreId, `checksum-public-routes-${Date.now()}`],
|
||||
);
|
||||
testFlyer = flyerRes.rows[0];
|
||||
|
||||
@@ -88,16 +91,12 @@ describe('Public API Routes Integration Tests', () => {
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
const pool = getPool();
|
||||
if (testRecipe) {
|
||||
await pool.query('DELETE FROM public.recipes WHERE recipe_id = $1', [testRecipe.recipe_id]);
|
||||
}
|
||||
if (testUser) {
|
||||
await pool.query('DELETE FROM public.users WHERE user_id = $1', [testUser.user.user_id]);
|
||||
}
|
||||
if (testFlyer) {
|
||||
await pool.query('DELETE FROM public.flyers WHERE flyer_id = $1', [testFlyer.flyer_id]);
|
||||
}
|
||||
await cleanupDb({
|
||||
userIds: testUser ? [testUser.user.user_id] : [],
|
||||
recipeIds: testRecipe ? [testRecipe.recipe_id] : [],
|
||||
flyerIds: testFlyer ? [testFlyer.flyer_id] : [],
|
||||
storeIds: testStoreId ? [testStoreId] : [],
|
||||
});
|
||||
});
|
||||
|
||||
describe('Health Check Endpoints', () => {
|
||||
|
||||
127
src/tests/integration/recipe.integration.test.ts
Normal file
127
src/tests/integration/recipe.integration.test.ts
Normal file
@@ -0,0 +1,127 @@
|
||||
// src/tests/integration/recipe.integration.test.ts
|
||||
import { describe, it, expect, beforeAll, afterAll } from 'vitest';
|
||||
import supertest from 'supertest';
|
||||
import app from '../../../server';
|
||||
import { createAndLoginUser } from '../utils/testHelpers';
|
||||
import { cleanupDb } from '../utils/cleanup';
|
||||
import type { UserProfile, Recipe, RecipeComment } from '../../types';
|
||||
import { getPool } from '../../services/db/connection.db';
|
||||
|
||||
/**
|
||||
* @vitest-environment node
|
||||
*/
|
||||
|
||||
const request = supertest(app);
|
||||
|
||||
describe('Recipe API Routes Integration Tests', () => {
|
||||
let testUser: UserProfile;
|
||||
let authToken: string;
|
||||
let testRecipe: Recipe;
|
||||
const createdUserIds: string[] = [];
|
||||
const createdRecipeIds: number[] = [];
|
||||
|
||||
beforeAll(async () => {
|
||||
// Create a user to own the recipe and perform authenticated actions
|
||||
const { user, token } = await createAndLoginUser({
|
||||
email: `recipe-user-${Date.now()}@example.com`,
|
||||
fullName: 'Recipe Test User',
|
||||
request,
|
||||
});
|
||||
testUser = user;
|
||||
authToken = token;
|
||||
createdUserIds.push(user.user.user_id);
|
||||
|
||||
// Create a recipe owned by the test user
|
||||
const recipeRes = await getPool().query(
|
||||
`INSERT INTO public.recipes (name, instructions, user_id, status, description)
|
||||
VALUES ('Integration Test Recipe', '1. Do this. 2. Do that.', $1, 'public', 'A test recipe description.')
|
||||
RETURNING *`,
|
||||
[testUser.user.user_id],
|
||||
);
|
||||
testRecipe = recipeRes.rows[0];
|
||||
createdRecipeIds.push(testRecipe.recipe_id);
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
// Clean up all created resources
|
||||
await cleanupDb({
|
||||
userIds: createdUserIds,
|
||||
recipeIds: createdRecipeIds,
|
||||
});
|
||||
});
|
||||
|
||||
describe('GET /api/recipes/:recipeId', () => {
|
||||
it('should fetch a single public recipe by its ID', async () => {
|
||||
const response = await request.get(`/api/recipes/${testRecipe.recipe_id}`);
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body).toBeDefined();
|
||||
expect(response.body.recipe_id).toBe(testRecipe.recipe_id);
|
||||
expect(response.body.name).toBe('Integration Test Recipe');
|
||||
});
|
||||
|
||||
it('should return 404 for a non-existent recipe ID', async () => {
|
||||
const response = await request.get('/api/recipes/999999');
|
||||
expect(response.status).toBe(404);
|
||||
});
|
||||
});
|
||||
|
||||
// Placeholder for future tests
|
||||
// Skipping this test as the POST /api/recipes endpoint for creation does not appear to be implemented.
|
||||
// The test currently fails with a 404 Not Found.
|
||||
it.skip('should allow an authenticated user to create a new recipe', async () => {
|
||||
const newRecipeData = {
|
||||
name: 'My New Awesome Recipe',
|
||||
instructions: '1. Be awesome. 2. Make recipe.',
|
||||
description: 'A recipe created during an integration test.',
|
||||
};
|
||||
|
||||
const response = await request
|
||||
.post('/api/recipes') // This endpoint does not exist, causing a 404.
|
||||
.set('Authorization', `Bearer ${authToken}`)
|
||||
.send(newRecipeData);
|
||||
|
||||
// Assert the response from the POST request
|
||||
expect(response.status).toBe(201);
|
||||
const createdRecipe: Recipe = response.body;
|
||||
expect(createdRecipe).toBeDefined();
|
||||
expect(createdRecipe.recipe_id).toBeTypeOf('number');
|
||||
expect(createdRecipe.name).toBe(newRecipeData.name);
|
||||
expect(createdRecipe.user_id).toBe(testUser.user.user_id);
|
||||
|
||||
// Add the new recipe ID to the cleanup array to ensure it's deleted after tests
|
||||
createdRecipeIds.push(createdRecipe.recipe_id);
|
||||
|
||||
// Verify the recipe can be fetched from the public endpoint
|
||||
const verifyResponse = await request.get(`/api/recipes/${createdRecipe.recipe_id}`);
|
||||
expect(verifyResponse.status).toBe(200);
|
||||
expect(verifyResponse.body.name).toBe(newRecipeData.name);
|
||||
});
|
||||
it('should allow an authenticated user to update their own recipe', async () => {
|
||||
const recipeUpdates = {
|
||||
name: 'Updated Integration Test Recipe',
|
||||
instructions: '1. Do the new thing. 2. Do the other new thing.',
|
||||
};
|
||||
|
||||
const response = await request
|
||||
.put(`/api/users/recipes/${testRecipe.recipe_id}`) // Authenticated recipe update endpoint
|
||||
.set('Authorization', `Bearer ${authToken}`)
|
||||
.send(recipeUpdates);
|
||||
|
||||
// Assert the response from the PUT request
|
||||
expect(response.status).toBe(200);
|
||||
const updatedRecipe: Recipe = response.body;
|
||||
expect(updatedRecipe.name).toBe(recipeUpdates.name);
|
||||
expect(updatedRecipe.instructions).toBe(recipeUpdates.instructions);
|
||||
|
||||
// Verify the changes were persisted by fetching the recipe again
|
||||
const verifyResponse = await request.get(`/api/recipes/${testRecipe.recipe_id}`);
|
||||
expect(verifyResponse.status).toBe(200);
|
||||
expect(verifyResponse.body.name).toBe(recipeUpdates.name);
|
||||
});
|
||||
it.todo('should prevent a user from updating another user\'s recipe');
|
||||
it.todo('should allow an authenticated user to delete their own recipe');
|
||||
it.todo('should prevent a user from deleting another user\'s recipe');
|
||||
it.todo('should allow an authenticated user to post a comment on a recipe');
|
||||
it.todo('should allow an authenticated user to fork a recipe');
|
||||
});
|
||||
@@ -6,6 +6,7 @@ import { logger } from '../../services/logger.server';
|
||||
import { getPool } from '../../services/db/connection.db';
|
||||
import type { UserProfile, MasterGroceryItem, ShoppingList } from '../../types';
|
||||
import { createAndLoginUser, TEST_PASSWORD } from '../utils/testHelpers';
|
||||
import { cleanupDb } from '../utils/cleanup';
|
||||
|
||||
/**
|
||||
* @vitest-environment node
|
||||
@@ -16,6 +17,7 @@ const request = supertest(app);
|
||||
describe('User API Routes Integration Tests', () => {
|
||||
let testUser: UserProfile;
|
||||
let authToken: string;
|
||||
const createdUserIds: string[] = [];
|
||||
|
||||
// Before any tests run, create a new user and log them in.
|
||||
// The token will be used for all subsequent API calls in this test suite.
|
||||
@@ -24,28 +26,13 @@ describe('User API Routes Integration Tests', () => {
|
||||
const { user, token } = await createAndLoginUser({ email, fullName: 'Test User', request });
|
||||
testUser = user;
|
||||
authToken = token;
|
||||
createdUserIds.push(user.user.user_id);
|
||||
});
|
||||
|
||||
// After all tests, clean up by deleting the created user.
|
||||
// This now cleans up ALL users created by this test suite to prevent pollution.
|
||||
afterAll(async () => {
|
||||
const pool = getPool();
|
||||
try {
|
||||
// Find all users created during this test run by their email pattern.
|
||||
const res = await pool.query(
|
||||
"SELECT user_id FROM public.users WHERE email LIKE 'user-test-%' OR email LIKE 'delete-me-%' OR email LIKE 'reset-me-%'",
|
||||
);
|
||||
if (res.rows.length > 0) {
|
||||
const userIds = res.rows.map((r) => r.user_id);
|
||||
logger.debug(
|
||||
`[user.integration.test.ts afterAll] Cleaning up ${userIds.length} test users...`,
|
||||
);
|
||||
// Use a direct DB query for cleanup, which is faster and more reliable than API calls.
|
||||
await pool.query('DELETE FROM public.users WHERE user_id = ANY($1::uuid[])', [userIds]);
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error({ error }, 'Failed to clean up test users from database.');
|
||||
}
|
||||
await cleanupDb({ userIds: createdUserIds });
|
||||
});
|
||||
|
||||
it('should fetch the authenticated user profile via GET /api/users/profile', async () => {
|
||||
@@ -88,6 +75,32 @@ describe('User API Routes Integration Tests', () => {
|
||||
expect(refetchedProfile.full_name).toBe('Updated Test User');
|
||||
});
|
||||
|
||||
it('should allow updating the profile with an empty string for avatar_url', async () => {
|
||||
// Arrange: Define the profile updates.
|
||||
const profileUpdates = {
|
||||
full_name: 'Empty Avatar User',
|
||||
avatar_url: '',
|
||||
};
|
||||
|
||||
// Act: Call the update endpoint with the new data and the auth token.
|
||||
const response = await request
|
||||
.put('/api/users/profile')
|
||||
.set('Authorization', `Bearer ${authToken}`)
|
||||
.send(profileUpdates);
|
||||
const updatedProfile = response.body;
|
||||
|
||||
// Assert: Check that the returned profile reflects the changes.
|
||||
expect(response.status).toBe(200);
|
||||
expect(updatedProfile.full_name).toBe('Empty Avatar User');
|
||||
expect(updatedProfile.avatar_url).toBeNull();
|
||||
|
||||
// Also, fetch the profile again to ensure the change was persisted in the database as NULL.
|
||||
const refetchResponse = await request
|
||||
.get('/api/users/profile')
|
||||
.set('Authorization', `Bearer ${authToken}`);
|
||||
expect(refetchResponse.body.avatar_url).toBeNull();
|
||||
});
|
||||
|
||||
it('should update user preferences via PUT /api/users/profile/preferences', async () => {
|
||||
// Arrange: Define the preference updates.
|
||||
const preferenceUpdates = {
|
||||
@@ -130,7 +143,8 @@ describe('User API Routes Integration Tests', () => {
|
||||
it('should allow a user to delete their own account and then fail to log in', async () => {
|
||||
// Arrange: Create a new, separate user just for this deletion test.
|
||||
const deletionEmail = `delete-me-${Date.now()}@example.com`;
|
||||
const { token: deletionToken } = await createAndLoginUser({ email: deletionEmail, request });
|
||||
const { user: deletionUser, token: deletionToken } = await createAndLoginUser({ email: deletionEmail, request });
|
||||
createdUserIds.push(deletionUser.user.user_id);
|
||||
|
||||
// Act: Call the delete endpoint with the correct password and token.
|
||||
const response = await request
|
||||
@@ -156,6 +170,7 @@ describe('User API Routes Integration Tests', () => {
|
||||
// Arrange: Create a new user for the password reset flow.
|
||||
const resetEmail = `reset-me-${Date.now()}@example.com`;
|
||||
const { user: resetUser } = await createAndLoginUser({ email: resetEmail, request });
|
||||
createdUserIds.push(resetUser.user.user_id);
|
||||
|
||||
// Act 1: Request a password reset. In our test environment, the token is returned in the response.
|
||||
const resetRequestRawResponse = await request
|
||||
|
||||
@@ -2,9 +2,9 @@
|
||||
import { describe, it, expect, beforeAll, afterAll } from 'vitest';
|
||||
import supertest from 'supertest';
|
||||
import app from '../../../server';
|
||||
import { getPool } from '../../services/db/connection.db';
|
||||
import type { UserProfile } from '../../types';
|
||||
import { createAndLoginUser } from '../utils/testHelpers';
|
||||
import { cleanupDb } from '../utils/cleanup';
|
||||
|
||||
/**
|
||||
* @vitest-environment node
|
||||
@@ -29,10 +29,7 @@ describe('User Routes Integration Tests (/api/users)', () => {
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
if (testUser) {
|
||||
// Clean up the created user from the database
|
||||
await getPool().query('DELETE FROM public.users WHERE user_id = $1', [testUser.user.user_id]);
|
||||
}
|
||||
await cleanupDb({ userIds: testUser ? [testUser.user.user_id] : [] });
|
||||
});
|
||||
|
||||
describe('GET /api/users/profile', () => {
|
||||
|
||||
85
src/tests/utils/cleanup.ts
Normal file
85
src/tests/utils/cleanup.ts
Normal file
@@ -0,0 +1,85 @@
|
||||
// src/tests/utils/cleanup.ts
|
||||
import { getPool } from '../../services/db/connection.db';
|
||||
import { logger } from '../../services/logger.server';
|
||||
import fs from 'node:fs/promises';
|
||||
import path from 'path';
|
||||
|
||||
export interface TestResourceIds {
|
||||
userIds?: string[];
|
||||
flyerIds?: number[];
|
||||
storeIds?: number[];
|
||||
recipeIds?: number[];
|
||||
masterItemIds?: number[];
|
||||
budgetIds?: number[];
|
||||
}
|
||||
|
||||
/**
|
||||
* A robust cleanup utility for integration tests.
|
||||
* It deletes entities in the correct order to avoid foreign key violations.
|
||||
* It's designed to be called in an `afterAll` hook.
|
||||
*
|
||||
* @param ids An object containing arrays of IDs for each resource type to clean up.
|
||||
*/
|
||||
export const cleanupDb = async (ids: TestResourceIds) => {
|
||||
const pool = getPool();
|
||||
logger.info('[Test Cleanup] Starting database resource cleanup...');
|
||||
|
||||
const {
|
||||
userIds = [],
|
||||
flyerIds = [],
|
||||
storeIds = [],
|
||||
recipeIds = [],
|
||||
masterItemIds = [],
|
||||
budgetIds = [],
|
||||
} = ids;
|
||||
|
||||
try {
|
||||
// --- Stage 1: Delete most dependent records ---
|
||||
// These records depend on users, recipes, flyers, etc.
|
||||
if (userIds.length > 0) {
|
||||
await pool.query('DELETE FROM public.recipe_comments WHERE user_id = ANY($1::uuid[])', [userIds]);
|
||||
await pool.query('DELETE FROM public.suggested_corrections WHERE user_id = ANY($1::uuid[])', [userIds]);
|
||||
await pool.query('DELETE FROM public.shopping_lists WHERE user_id = ANY($1::uuid[])', [userIds]); // Assumes shopping_list_items cascades
|
||||
await pool.query('DELETE FROM public.user_watched_items WHERE user_id = ANY($1::uuid[])', [userIds]);
|
||||
await pool.query('DELETE FROM public.user_achievements WHERE user_id = ANY($1::uuid[])', [userIds]);
|
||||
await pool.query('DELETE FROM public.activity_log WHERE user_id = ANY($1::uuid[])', [userIds]);
|
||||
}
|
||||
|
||||
// --- Stage 2: Delete parent records that other things depend on ---
|
||||
if (recipeIds.length > 0) {
|
||||
await pool.query('DELETE FROM public.recipes WHERE recipe_id = ANY($1::int[])', [recipeIds]);
|
||||
}
|
||||
|
||||
// Flyers might be created by users, but we clean them up separately.
|
||||
// flyer_items should cascade from this.
|
||||
if (flyerIds.length > 0) {
|
||||
await pool.query('DELETE FROM public.flyers WHERE flyer_id = ANY($1::bigint[])', [flyerIds]);
|
||||
}
|
||||
|
||||
// Stores are parents of flyers, so they come after.
|
||||
if (storeIds.length > 0) {
|
||||
await pool.query('DELETE FROM public.stores WHERE store_id = ANY($1::int[])', [storeIds]);
|
||||
}
|
||||
|
||||
// Master items are parents of flyer_items and watched_items.
|
||||
if (masterItemIds.length > 0) {
|
||||
await pool.query('DELETE FROM public.master_grocery_items WHERE master_grocery_item_id = ANY($1::int[])', [masterItemIds]);
|
||||
}
|
||||
|
||||
// Budgets are parents of nothing, but depend on users.
|
||||
if (budgetIds.length > 0) {
|
||||
await pool.query('DELETE FROM public.budgets WHERE budget_id = ANY($1::int[])', [budgetIds]);
|
||||
}
|
||||
|
||||
// --- Stage 3: Delete the root user records ---
|
||||
if (userIds.length > 0) {
|
||||
const { rowCount } = await pool.query('DELETE FROM public.users WHERE user_id = ANY($1::uuid[])', [userIds]);
|
||||
logger.info(`[Test Cleanup] Cleaned up ${rowCount} user(s).`);
|
||||
}
|
||||
|
||||
logger.info('[Test Cleanup] Finished database resource cleanup successfully.');
|
||||
} catch (error) {
|
||||
logger.error({ error }, '[Test Cleanup] CRITICAL: An error occurred during database cleanup.');
|
||||
throw error; // Re-throw to fail the test suite
|
||||
}
|
||||
};
|
||||
48
src/tests/utils/cleanupFiles.ts
Normal file
48
src/tests/utils/cleanupFiles.ts
Normal file
@@ -0,0 +1,48 @@
|
||||
// src/tests/utils/cleanupFiles.ts
|
||||
import fs from 'node:fs/promises';
|
||||
import path from 'path';
|
||||
import { logger } from '../../services/logger.server';
|
||||
|
||||
/**
|
||||
* Safely cleans up files from the filesystem.
|
||||
* Designed to be used in `afterAll` or `afterEach` hooks in integration tests.
|
||||
*
|
||||
* @param filePaths An array of file paths to clean up.
|
||||
*/
|
||||
export const cleanupFiles = async (filePaths: string[]) => {
|
||||
if (!filePaths || filePaths.length === 0) {
|
||||
logger.info('[Test Cleanup] No file paths provided for cleanup.');
|
||||
return;
|
||||
}
|
||||
|
||||
logger.info(`[Test Cleanup] Starting filesystem cleanup for ${filePaths.length} file(s)...`);
|
||||
|
||||
try {
|
||||
await Promise.all(
|
||||
filePaths.map(async (filePath) => {
|
||||
try {
|
||||
await fs.unlink(filePath);
|
||||
logger.debug(`[Test Cleanup] Successfully deleted file: ${filePath}`);
|
||||
} catch (err: any) {
|
||||
// Ignore "file not found" errors, but log other errors.
|
||||
if (err.code === 'ENOENT') {
|
||||
logger.debug(`[Test Cleanup] File not found, skipping: ${filePath}`);
|
||||
} else {
|
||||
logger.warn(
|
||||
{ err, filePath },
|
||||
'[Test Cleanup] Failed to clean up file from filesystem.',
|
||||
);
|
||||
}
|
||||
}
|
||||
}),
|
||||
);
|
||||
|
||||
logger.info('[Test Cleanup] Finished filesystem cleanup successfully.');
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
{ error },
|
||||
'[Test Cleanup] CRITICAL: An error occurred during filesystem cleanup.',
|
||||
);
|
||||
throw error; // Re-throw to fail the test suite if cleanup fails
|
||||
}
|
||||
};
|
||||
29
src/types/ai.ts
Normal file
29
src/types/ai.ts
Normal file
@@ -0,0 +1,29 @@
|
||||
// src/types/ai.ts
|
||||
|
||||
import { z } from 'zod';
|
||||
|
||||
// Helper for consistent required string validation (handles missing/null/empty)
|
||||
// This is moved here as it's directly related to the schemas.
|
||||
export const requiredString = (message: string) =>
|
||||
z.preprocess((val) => val ?? '', z.string().min(1, message));
|
||||
|
||||
// --- Zod Schemas for AI Response Validation ---
|
||||
// These schemas define the expected structure of data returned by the AI.
|
||||
// They are used for validation and type inference across multiple services.
|
||||
|
||||
export const ExtractedFlyerItemSchema = z.object({
|
||||
item: z.string().nullable(),
|
||||
price_display: z.string().nullable(),
|
||||
price_in_cents: z.number().nullable(),
|
||||
quantity: z.string().nullable(),
|
||||
category_name: z.string().nullable(),
|
||||
master_item_id: z.number().nullish(), // .nullish() allows null or undefined
|
||||
});
|
||||
|
||||
export const AiFlyerDataSchema = z.object({
|
||||
store_name: z.string().nullable(),
|
||||
valid_from: z.string().nullable(),
|
||||
valid_to: z.string().nullable(),
|
||||
store_address: z.string().nullable(),
|
||||
items: z.array(ExtractedFlyerItemSchema),
|
||||
});
|
||||
8
src/types/exif-parser.d.ts
vendored
Normal file
8
src/types/exif-parser.d.ts
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
// src/types/exif-parser.d.ts
|
||||
|
||||
/**
|
||||
* This declaration file provides a basic module definition for 'exif-parser',
|
||||
* which does not ship with its own TypeScript types. This allows TypeScript
|
||||
* to recognize it as a module and avoids "implicit any" errors.
|
||||
*/
|
||||
declare module 'exif-parser';
|
||||
102
src/utils/authUtils.test.ts
Normal file
102
src/utils/authUtils.test.ts
Normal file
@@ -0,0 +1,102 @@
|
||||
// src/utils/authUtils.test.ts
|
||||
import { describe, it, expect, vi } from 'vitest';
|
||||
import zxcvbn from 'zxcvbn';
|
||||
import { validatePasswordStrength } from './authUtils';
|
||||
|
||||
// Mock the zxcvbn library to control its output for tests
|
||||
vi.mock('zxcvbn');
|
||||
|
||||
// Helper function to create a complete mock zxcvbn result, satisfying the type.
|
||||
const createMockZxcvbnResult = (
|
||||
score: 0 | 1 | 2 | 3 | 4,
|
||||
suggestions: string[] = [],
|
||||
): zxcvbn.ZXCVBNResult => ({
|
||||
score,
|
||||
feedback: {
|
||||
suggestions,
|
||||
warning: '',
|
||||
},
|
||||
// Add dummy values for the other required properties to satisfy the type.
|
||||
guesses: 1,
|
||||
guesses_log10: 1,
|
||||
crack_times_seconds: {
|
||||
online_throttling_100_per_hour: 1,
|
||||
online_no_throttling_10_per_second: 1,
|
||||
offline_slow_hashing_1e4_per_second: 1,
|
||||
offline_fast_hashing_1e10_per_second: 1,
|
||||
},
|
||||
crack_times_display: {
|
||||
online_throttling_100_per_hour: '1 second',
|
||||
online_no_throttling_10_per_second: '1 second',
|
||||
offline_slow_hashing_1e4_per_second: '1 second',
|
||||
offline_fast_hashing_1e10_per_second: '1 second',
|
||||
},
|
||||
sequence: [],
|
||||
calc_time: 1,
|
||||
});
|
||||
|
||||
describe('validatePasswordStrength', () => {
|
||||
it('should return invalid for a very weak password (score 0)', () => {
|
||||
// Arrange: Mock zxcvbn to return a score of 0 and specific feedback
|
||||
vi.mocked(zxcvbn).mockReturnValue(
|
||||
createMockZxcvbnResult(0, ['Add more words', 'Use a longer password']),
|
||||
);
|
||||
|
||||
// Act
|
||||
const result = validatePasswordStrength('password');
|
||||
|
||||
// Assert
|
||||
expect(result.isValid).toBe(false);
|
||||
expect(result.feedback).toBe('Password is too weak. Add more words Use a longer password');
|
||||
});
|
||||
|
||||
it('should return invalid for a weak password (score 1)', () => {
|
||||
// Arrange: Mock zxcvbn to return a score of 1
|
||||
vi.mocked(zxcvbn).mockReturnValue(createMockZxcvbnResult(1, ['Avoid common words']));
|
||||
|
||||
// Act
|
||||
const result = validatePasswordStrength('password123');
|
||||
|
||||
// Assert
|
||||
expect(result.isValid).toBe(false);
|
||||
expect(result.feedback).toBe('Password is too weak. Avoid common words');
|
||||
});
|
||||
|
||||
it('should return invalid for a medium password (score 2)', () => {
|
||||
// Arrange: Mock zxcvbn to return a score of 2
|
||||
vi.mocked(zxcvbn).mockReturnValue(
|
||||
createMockZxcvbnResult(2, ['Add another symbol or number']),
|
||||
);
|
||||
|
||||
// Act
|
||||
const result = validatePasswordStrength('Password123');
|
||||
|
||||
// Assert
|
||||
expect(result.isValid).toBe(false);
|
||||
expect(result.feedback).toBe('Password is too weak. Add another symbol or number');
|
||||
});
|
||||
|
||||
it('should return valid for a good password (score 3)', () => {
|
||||
// Arrange: Mock zxcvbn to return a score of 3 (the minimum required)
|
||||
vi.mocked(zxcvbn).mockReturnValue(createMockZxcvbnResult(3));
|
||||
|
||||
// Act
|
||||
const result = validatePasswordStrength('a-Strong-Password!');
|
||||
|
||||
// Assert
|
||||
expect(result.isValid).toBe(true);
|
||||
expect(result.feedback).toBe('');
|
||||
});
|
||||
|
||||
it('should return valid for a very strong password (score 4)', () => {
|
||||
// Arrange: Mock zxcvbn to return a score of 4
|
||||
vi.mocked(zxcvbn).mockReturnValue(createMockZxcvbnResult(4));
|
||||
|
||||
// Act
|
||||
const result = validatePasswordStrength('a-Very-Strong-Password-123!');
|
||||
|
||||
// Assert
|
||||
expect(result.isValid).toBe(true);
|
||||
expect(result.feedback).toBe('');
|
||||
});
|
||||
});
|
||||
97
src/utils/fileUtils.test.ts
Normal file
97
src/utils/fileUtils.test.ts
Normal file
@@ -0,0 +1,97 @@
|
||||
// src/utils/fileUtils.test.ts
|
||||
import { describe, it, expect, vi, beforeEach, Mocked } from 'vitest';
|
||||
import fs from 'node:fs/promises';
|
||||
import { logger } from '../services/logger.server';
|
||||
import { cleanupUploadedFile, cleanupUploadedFiles } from './fileUtils';
|
||||
|
||||
// Mock dependencies
|
||||
vi.mock('node:fs/promises', () => ({
|
||||
default: {
|
||||
unlink: vi.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
vi.mock('../services/logger.server', () => ({
|
||||
logger: {
|
||||
warn: vi.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
// Cast the mocked imports for type safety
|
||||
const mockedFs = fs as Mocked<typeof fs>;
|
||||
const mockedLogger = logger as Mocked<typeof logger>;
|
||||
|
||||
describe('fileUtils', () => {
|
||||
beforeEach(() => {
|
||||
// Clear mock history before each test
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
describe('cleanupUploadedFile', () => {
|
||||
it('should call fs.unlink with the correct file path', async () => {
|
||||
const mockFile = { path: '/tmp/test-file.jpg' } as Express.Multer.File;
|
||||
mockedFs.unlink.mockResolvedValue(undefined);
|
||||
|
||||
await cleanupUploadedFile(mockFile);
|
||||
|
||||
expect(mockedFs.unlink).toHaveBeenCalledWith('/tmp/test-file.jpg');
|
||||
});
|
||||
|
||||
it('should not call fs.unlink if the file is undefined', async () => {
|
||||
await cleanupUploadedFile(undefined);
|
||||
expect(mockedFs.unlink).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should log a warning and not throw if fs.unlink fails', async () => {
|
||||
const mockFile = { path: '/tmp/non-existent-file.jpg' } as Express.Multer.File;
|
||||
const unlinkError = new Error('ENOENT: no such file or directory');
|
||||
mockedFs.unlink.mockRejectedValue(unlinkError);
|
||||
|
||||
// Use a try-catch to ensure no error is thrown from the function itself
|
||||
let didThrow = false;
|
||||
try {
|
||||
await cleanupUploadedFile(mockFile);
|
||||
} catch {
|
||||
didThrow = true;
|
||||
}
|
||||
|
||||
expect(didThrow).toBe(false);
|
||||
expect(mockedLogger.warn).toHaveBeenCalledWith(
|
||||
{ err: unlinkError, filePath: mockFile.path },
|
||||
'Failed to clean up uploaded file.',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('cleanupUploadedFiles', () => {
|
||||
const mockFiles = [
|
||||
{ path: '/tmp/file1.jpg' },
|
||||
{ path: '/tmp/file2.png' },
|
||||
] as Express.Multer.File[];
|
||||
|
||||
it('should call fs.unlink for each file in the array', async () => {
|
||||
mockedFs.unlink.mockResolvedValue(undefined);
|
||||
|
||||
await cleanupUploadedFiles(mockFiles);
|
||||
|
||||
expect(mockedFs.unlink).toHaveBeenCalledTimes(2);
|
||||
expect(mockedFs.unlink).toHaveBeenCalledWith('/tmp/file1.jpg');
|
||||
expect(mockedFs.unlink).toHaveBeenCalledWith('/tmp/file2.png');
|
||||
});
|
||||
|
||||
it('should not call fs.unlink if the files array is undefined', async () => {
|
||||
await cleanupUploadedFiles(undefined);
|
||||
expect(mockedFs.unlink).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should not call fs.unlink if the input is not an array', async () => {
|
||||
await cleanupUploadedFiles({ not: 'an array' } as unknown as Express.Multer.File[]);
|
||||
expect(mockedFs.unlink).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should handle an empty array gracefully', async () => {
|
||||
await cleanupUploadedFiles([]);
|
||||
expect(mockedFs.unlink).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
});
|
||||
Reference in New Issue
Block a user