huge linting fixes
Some checks failed
Deploy to Test Environment / deploy-to-test (push) Failing after 58s
Some checks failed
Deploy to Test Environment / deploy-to-test (push) Failing after 58s
This commit is contained in:
@@ -43,9 +43,23 @@ export default tseslint.config(
|
|||||||
],
|
],
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
// Relaxed rules for test files - see ADR-021 for rationale
|
// Relaxed rules for test files and test setup - see ADR-021 for rationale
|
||||||
{
|
{
|
||||||
files: ['**/*.test.ts', '**/*.test.tsx', '**/*.spec.ts', '**/*.spec.tsx'],
|
files: [
|
||||||
|
'**/*.test.ts',
|
||||||
|
'**/*.test.tsx',
|
||||||
|
'**/*.spec.ts',
|
||||||
|
'**/*.spec.tsx',
|
||||||
|
'**/tests/setup/**/*.ts',
|
||||||
|
],
|
||||||
|
rules: {
|
||||||
|
'@typescript-eslint/no-explicit-any': 'off',
|
||||||
|
'@typescript-eslint/no-unsafe-function-type': 'off',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
// Relaxed rules for type definition files - 'any' is often necessary for third-party library types
|
||||||
|
{
|
||||||
|
files: ['**/*.d.ts'],
|
||||||
rules: {
|
rules: {
|
||||||
'@typescript-eslint/no-explicit-any': 'off',
|
'@typescript-eslint/no-explicit-any': 'off',
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -125,7 +125,11 @@ vi.mock('pdfjs-dist', () => ({
|
|||||||
// Mock the new config module
|
// Mock the new config module
|
||||||
vi.mock('./config', () => ({
|
vi.mock('./config', () => ({
|
||||||
default: {
|
default: {
|
||||||
app: { version: '20250101-1200:abc1234:1.0.0', commitMessage: 'Initial commit', commitUrl: '#' },
|
app: {
|
||||||
|
version: '20250101-1200:abc1234:1.0.0',
|
||||||
|
commitMessage: 'Initial commit',
|
||||||
|
commitUrl: '#',
|
||||||
|
},
|
||||||
google: { mapsEmbedApiKey: 'mock-key' },
|
google: { mapsEmbedApiKey: 'mock-key' },
|
||||||
},
|
},
|
||||||
}));
|
}));
|
||||||
@@ -450,7 +454,9 @@ describe('App Component', () => {
|
|||||||
fireEvent.click(screen.getByText('Open Voice Assistant'));
|
fireEvent.click(screen.getByText('Open Voice Assistant'));
|
||||||
|
|
||||||
console.log('[TEST DEBUG] Waiting for voice-assistant-mock');
|
console.log('[TEST DEBUG] Waiting for voice-assistant-mock');
|
||||||
expect(await screen.findByTestId('voice-assistant-mock', {}, { timeout: 3000 })).toBeInTheDocument();
|
expect(
|
||||||
|
await screen.findByTestId('voice-assistant-mock', {}, { timeout: 3000 }),
|
||||||
|
).toBeInTheDocument();
|
||||||
|
|
||||||
// Close modal
|
// Close modal
|
||||||
fireEvent.click(screen.getByText('Close Voice Assistant'));
|
fireEvent.click(screen.getByText('Close Voice Assistant'));
|
||||||
@@ -598,11 +604,15 @@ describe('App Component', () => {
|
|||||||
updateProfile: vi.fn(),
|
updateProfile: vi.fn(),
|
||||||
});
|
});
|
||||||
// Mock the login function to simulate a successful login. Signature: (token, profile)
|
// Mock the login function to simulate a successful login. Signature: (token, profile)
|
||||||
const mockLoginSuccess = vi.fn(async (_token: string, _profile?: UserProfile) => {
|
const _mockLoginSuccess = vi.fn(async (_token: string, _profile?: UserProfile) => {
|
||||||
// Simulate fetching profile after login
|
// Simulate fetching profile after login
|
||||||
const profileResponse = await mockedApiClient.getAuthenticatedUserProfile();
|
const profileResponse = await mockedApiClient.getAuthenticatedUserProfile();
|
||||||
const userProfileData: UserProfile = await profileResponse.json();
|
const userProfileData: UserProfile = await profileResponse.json();
|
||||||
mockUseAuth.mockReturnValue({ ...mockUseAuth(), userProfile: userProfileData, authStatus: 'AUTHENTICATED' });
|
mockUseAuth.mockReturnValue({
|
||||||
|
...mockUseAuth(),
|
||||||
|
userProfile: userProfileData,
|
||||||
|
authStatus: 'AUTHENTICATED',
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
console.log('[TEST DEBUG] Rendering App');
|
console.log('[TEST DEBUG] Rendering App');
|
||||||
|
|||||||
@@ -22,7 +22,7 @@ vi.mock('../config', () => ({
|
|||||||
},
|
},
|
||||||
}));
|
}));
|
||||||
|
|
||||||
const mockedApiClient = vi.mocked(apiClient);
|
const _mockedApiClient = vi.mocked(apiClient);
|
||||||
const mockedUseAppInitialization = vi.mocked(useAppInitialization);
|
const mockedUseAppInitialization = vi.mocked(useAppInitialization);
|
||||||
const mockedUseModal = vi.mocked(useModal);
|
const mockedUseModal = vi.mocked(useModal);
|
||||||
|
|
||||||
|
|||||||
@@ -70,7 +70,7 @@ describe('FlyerUploader', () => {
|
|||||||
|
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
// Disable react-query's online manager to prevent it from interfering with fake timers
|
// Disable react-query's online manager to prevent it from interfering with fake timers
|
||||||
onlineManager.setEventListener((setOnline) => {
|
onlineManager.setEventListener((_setOnline) => {
|
||||||
return () => {};
|
return () => {};
|
||||||
});
|
});
|
||||||
console.log(`\n--- [TEST LOG] ---: Starting test: "${expect.getState().currentTestName}"`);
|
console.log(`\n--- [TEST LOG] ---: Starting test: "${expect.getState().currentTestName}"`);
|
||||||
@@ -130,11 +130,14 @@ describe('FlyerUploader', () => {
|
|||||||
|
|
||||||
try {
|
try {
|
||||||
// The polling interval is 3s, so we wait for a bit longer.
|
// The polling interval is 3s, so we wait for a bit longer.
|
||||||
await waitFor(() => {
|
await waitFor(
|
||||||
|
() => {
|
||||||
const calls = mockedAiApiClient.getJobStatus.mock.calls.length;
|
const calls = mockedAiApiClient.getJobStatus.mock.calls.length;
|
||||||
console.log(`--- [TEST LOG] ---: 10. waitFor check: getJobStatus calls = ${calls}`);
|
console.log(`--- [TEST LOG] ---: 10. waitFor check: getJobStatus calls = ${calls}`);
|
||||||
expect(mockedAiApiClient.getJobStatus).toHaveBeenCalledTimes(2);
|
expect(mockedAiApiClient.getJobStatus).toHaveBeenCalledTimes(2);
|
||||||
}, { timeout: 4000 });
|
},
|
||||||
|
{ timeout: 4000 },
|
||||||
|
);
|
||||||
console.log('--- [TEST LOG] ---: 11. SUCCESS: Second poll confirmed.');
|
console.log('--- [TEST LOG] ---: 11. SUCCESS: Second poll confirmed.');
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error('--- [TEST LOG] ---: 11. ERROR: waitFor for second poll timed out.');
|
console.error('--- [TEST LOG] ---: 11. ERROR: waitFor for second poll timed out.');
|
||||||
@@ -202,7 +205,8 @@ describe('FlyerUploader', () => {
|
|||||||
'--- [TEST LOG] ---: 8a. waitFor check: Waiting for completion text and job status count.',
|
'--- [TEST LOG] ---: 8a. waitFor check: Waiting for completion text and job status count.',
|
||||||
);
|
);
|
||||||
// Wait for the second poll to occur and the UI to update.
|
// Wait for the second poll to occur and the UI to update.
|
||||||
await waitFor(() => {
|
await waitFor(
|
||||||
|
() => {
|
||||||
console.log(
|
console.log(
|
||||||
`--- [TEST LOG] ---: 8b. waitFor interval: calls=${
|
`--- [TEST LOG] ---: 8b. waitFor interval: calls=${
|
||||||
mockedAiApiClient.getJobStatus.mock.calls.length
|
mockedAiApiClient.getJobStatus.mock.calls.length
|
||||||
@@ -211,7 +215,9 @@ describe('FlyerUploader', () => {
|
|||||||
expect(
|
expect(
|
||||||
screen.getByText('Processing complete! Redirecting to flyer 42...'),
|
screen.getByText('Processing complete! Redirecting to flyer 42...'),
|
||||||
).toBeInTheDocument();
|
).toBeInTheDocument();
|
||||||
}, { timeout: 4000 });
|
},
|
||||||
|
{ timeout: 4000 },
|
||||||
|
);
|
||||||
console.log('--- [TEST LOG] ---: 9. SUCCESS: Completion message found.');
|
console.log('--- [TEST LOG] ---: 9. SUCCESS: Completion message found.');
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error('--- [TEST LOG] ---: 9. ERROR: waitFor for completion message timed out.');
|
console.error('--- [TEST LOG] ---: 9. ERROR: waitFor for completion message timed out.');
|
||||||
@@ -234,7 +240,10 @@ describe('FlyerUploader', () => {
|
|||||||
mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue({ jobId: 'job-fail' });
|
mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue({ jobId: 'job-fail' });
|
||||||
// The getJobStatus function throws a specific error when the job fails,
|
// The getJobStatus function throws a specific error when the job fails,
|
||||||
// which is then caught by react-query and placed in the `error` state.
|
// which is then caught by react-query and placed in the `error` state.
|
||||||
const jobFailedError = new aiApiClientModule.JobFailedError('AI model exploded', 'UNKNOWN_ERROR');
|
const jobFailedError = new aiApiClientModule.JobFailedError(
|
||||||
|
'AI model exploded',
|
||||||
|
'UNKNOWN_ERROR',
|
||||||
|
);
|
||||||
mockedAiApiClient.getJobStatus.mockRejectedValue(jobFailedError);
|
mockedAiApiClient.getJobStatus.mockRejectedValue(jobFailedError);
|
||||||
|
|
||||||
console.log('--- [TEST LOG] ---: 2. Rendering and uploading.');
|
console.log('--- [TEST LOG] ---: 2. Rendering and uploading.');
|
||||||
@@ -285,7 +294,10 @@ describe('FlyerUploader', () => {
|
|||||||
await screen.findByText('Working...');
|
await screen.findByText('Working...');
|
||||||
|
|
||||||
// Wait for the failure UI
|
// Wait for the failure UI
|
||||||
await waitFor(() => expect(screen.getByText(/Polling failed: Fatal Error/i)).toBeInTheDocument(), { timeout: 4000 });
|
await waitFor(
|
||||||
|
() => expect(screen.getByText(/Polling failed: Fatal Error/i)).toBeInTheDocument(),
|
||||||
|
{ timeout: 4000 },
|
||||||
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should stop polling for job status when the component unmounts', async () => {
|
it('should stop polling for job status when the component unmounts', async () => {
|
||||||
@@ -350,7 +362,9 @@ describe('FlyerUploader', () => {
|
|||||||
console.log('--- [TEST LOG] ---: 4. AWAITING duplicate flyer message...');
|
console.log('--- [TEST LOG] ---: 4. AWAITING duplicate flyer message...');
|
||||||
// With the fix, the duplicate error message and the link are combined into a single paragraph.
|
// With the fix, the duplicate error message and the link are combined into a single paragraph.
|
||||||
// We now look for this combined message.
|
// We now look for this combined message.
|
||||||
const errorMessage = await screen.findByText(/This flyer has already been processed. You can view it here:/i);
|
const errorMessage = await screen.findByText(
|
||||||
|
/This flyer has already been processed. You can view it here:/i,
|
||||||
|
);
|
||||||
expect(errorMessage).toBeInTheDocument();
|
expect(errorMessage).toBeInTheDocument();
|
||||||
console.log('--- [TEST LOG] ---: 5. SUCCESS: Duplicate message found.');
|
console.log('--- [TEST LOG] ---: 5. SUCCESS: Duplicate message found.');
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
|||||||
@@ -27,10 +27,9 @@ export const VoiceAssistant: React.FC<VoiceAssistantProps> = ({ isOpen, onClose
|
|||||||
const [modelTranscript, setModelTranscript] = useState('');
|
const [modelTranscript, setModelTranscript] = useState('');
|
||||||
const [history, setHistory] = useState<{ speaker: 'user' | 'model'; text: string }[]>([]);
|
const [history, setHistory] = useState<{ speaker: 'user' | 'model'; text: string }[]>([]);
|
||||||
|
|
||||||
// Use `any` for the session promise ref to avoid type conflicts with the underlying Google AI SDK,
|
// The session promise ref holds the promise returned by startVoiceSession.
|
||||||
// which may have a more complex session object type. The `LiveSession` interface is used
|
// We type it as Promise<LiveSession> to allow calling .then() with proper typing.
|
||||||
// conceptually in callbacks, but `any` provides flexibility for the initial assignment.
|
const sessionPromiseRef = useRef<Promise<LiveSession> | null>(null);
|
||||||
const sessionPromiseRef = useRef<any | null>(null);
|
|
||||||
const mediaStreamRef = useRef<MediaStream | null>(null);
|
const mediaStreamRef = useRef<MediaStream | null>(null);
|
||||||
const audioContextRef = useRef<AudioContext | null>(null);
|
const audioContextRef = useRef<AudioContext | null>(null);
|
||||||
const scriptProcessorRef = useRef<ScriptProcessorNode | null>(null);
|
const scriptProcessorRef = useRef<ScriptProcessorNode | null>(null);
|
||||||
@@ -151,7 +150,7 @@ export const VoiceAssistant: React.FC<VoiceAssistantProps> = ({ isOpen, onClose
|
|||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
sessionPromiseRef.current = startVoiceSession(callbacks);
|
sessionPromiseRef.current = startVoiceSession(callbacks) as Promise<LiveSession>;
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
// We check if the caught object is an instance of Error to safely access its message property.
|
// We check if the caught object is an instance of Error to safely access its message property.
|
||||||
// This avoids using 'any' and handles different types of thrown values.
|
// This avoids using 'any' and handles different types of thrown values.
|
||||||
|
|||||||
@@ -70,7 +70,7 @@ describe('useAppInitialization Hook', () => {
|
|||||||
});
|
});
|
||||||
// Mock matchMedia
|
// Mock matchMedia
|
||||||
Object.defineProperty(window, 'matchMedia', {
|
Object.defineProperty(window, 'matchMedia', {
|
||||||
value: vi.fn().mockImplementation((query) => ({
|
value: vi.fn().mockImplementation((_query) => ({
|
||||||
matches: false, // default to light mode
|
matches: false, // default to light mode
|
||||||
})),
|
})),
|
||||||
writable: true,
|
writable: true,
|
||||||
@@ -98,7 +98,8 @@ describe('useAppInitialization Hook', () => {
|
|||||||
|
|
||||||
it('should call navigate to clean the URL after processing a token', async () => {
|
it('should call navigate to clean the URL after processing a token', async () => {
|
||||||
renderHook(() => useAppInitialization(), {
|
renderHook(() => useAppInitialization(), {
|
||||||
wrapper: (props) => wrapper({ ...props, initialEntries: ['/some/path?googleAuthToken=test-token'] }),
|
wrapper: (props) =>
|
||||||
|
wrapper({ ...props, initialEntries: ['/some/path?googleAuthToken=test-token'] }),
|
||||||
});
|
});
|
||||||
await waitFor(() => {
|
await waitFor(() => {
|
||||||
expect(mockLogin).toHaveBeenCalledWith('test-token');
|
expect(mockLogin).toHaveBeenCalledWith('test-token');
|
||||||
@@ -106,14 +107,14 @@ describe('useAppInitialization Hook', () => {
|
|||||||
expect(mockNavigate).toHaveBeenCalledWith('/some/path', { replace: true });
|
expect(mockNavigate).toHaveBeenCalledWith('/some/path', { replace: true });
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should open \"What's New\" modal if version is new", () => {
|
it('should open "What\'s New" modal if version is new', () => {
|
||||||
vi.spyOn(window.localStorage, 'getItem').mockReturnValue('1.0.0');
|
vi.spyOn(window.localStorage, 'getItem').mockReturnValue('1.0.0');
|
||||||
renderHook(() => useAppInitialization(), { wrapper });
|
renderHook(() => useAppInitialization(), { wrapper });
|
||||||
expect(mockOpenModal).toHaveBeenCalledWith('whatsNew');
|
expect(mockOpenModal).toHaveBeenCalledWith('whatsNew');
|
||||||
expect(window.localStorage.setItem).toHaveBeenCalledWith('lastSeenVersion', '1.0.1');
|
expect(window.localStorage.setItem).toHaveBeenCalledWith('lastSeenVersion', '1.0.1');
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should not open \"What's New\" modal if version is the same", () => {
|
it('should not open "What\'s New" modal if version is the same', () => {
|
||||||
vi.spyOn(window.localStorage, 'getItem').mockReturnValue('1.0.1');
|
vi.spyOn(window.localStorage, 'getItem').mockReturnValue('1.0.1');
|
||||||
renderHook(() => useAppInitialization(), { wrapper });
|
renderHook(() => useAppInitialization(), { wrapper });
|
||||||
expect(mockOpenModal).not.toHaveBeenCalled();
|
expect(mockOpenModal).not.toHaveBeenCalled();
|
||||||
|
|||||||
@@ -153,11 +153,12 @@ describe('Passport Configuration', () => {
|
|||||||
logger,
|
logger,
|
||||||
);
|
);
|
||||||
// The strategy now just strips auth fields.
|
// The strategy now just strips auth fields.
|
||||||
|
// SECURITY: password_hash and refresh_token are intentionally discarded.
|
||||||
const {
|
const {
|
||||||
password_hash,
|
password_hash: _password_hash,
|
||||||
failed_login_attempts,
|
failed_login_attempts: _failed_login_attempts,
|
||||||
last_failed_login,
|
last_failed_login: _last_failed_login,
|
||||||
refresh_token,
|
refresh_token: _refresh_token,
|
||||||
...expectedUserProfile
|
...expectedUserProfile
|
||||||
} = mockAuthableProfile;
|
} = mockAuthableProfile;
|
||||||
expect(done).toHaveBeenCalledWith(null, expectedUserProfile);
|
expect(done).toHaveBeenCalledWith(null, expectedUserProfile);
|
||||||
|
|||||||
@@ -141,13 +141,20 @@ passport.use(
|
|||||||
// sensitive fields before passing the profile to the session.
|
// sensitive fields before passing the profile to the session.
|
||||||
// The `...userProfile` rest parameter will contain the clean UserProfile object,
|
// The `...userProfile` rest parameter will contain the clean UserProfile object,
|
||||||
// which no longer has a top-level email property.
|
// which no longer has a top-level email property.
|
||||||
|
// SECURITY: password_hash and refresh_token are intentionally discarded - never send to client.
|
||||||
const {
|
const {
|
||||||
password_hash,
|
password_hash: _password_hash,
|
||||||
failed_login_attempts,
|
failed_login_attempts,
|
||||||
last_failed_login,
|
last_failed_login,
|
||||||
refresh_token,
|
refresh_token: _refresh_token,
|
||||||
...cleanUserProfile
|
...cleanUserProfile
|
||||||
} = userprofile;
|
} = userprofile;
|
||||||
|
|
||||||
|
// Log login metadata for audit purposes (non-sensitive fields only)
|
||||||
|
req.log.debug(
|
||||||
|
{ failed_login_attempts, last_failed_login },
|
||||||
|
'User login metadata stripped from session',
|
||||||
|
);
|
||||||
return done(null, cleanUserProfile);
|
return done(null, cleanUserProfile);
|
||||||
} catch (err: unknown) {
|
} catch (err: unknown) {
|
||||||
req.log.error({ error: err }, 'Error during local authentication strategy:');
|
req.log.error({ error: err }, 'Error during local authentication strategy:');
|
||||||
@@ -269,7 +276,9 @@ const jwtOptions = {
|
|||||||
|
|
||||||
// --- DEBUG LOGGING FOR JWT SECRET ---
|
// --- DEBUG LOGGING FOR JWT SECRET ---
|
||||||
if (!JWT_SECRET) {
|
if (!JWT_SECRET) {
|
||||||
logger.fatal('[Passport] CRITICAL: JWT_SECRET is missing or empty in environment variables! JwtStrategy will fail.');
|
logger.fatal(
|
||||||
|
'[Passport] CRITICAL: JWT_SECRET is missing or empty in environment variables! JwtStrategy will fail.',
|
||||||
|
);
|
||||||
} else {
|
} else {
|
||||||
logger.info(`[Passport] JWT_SECRET loaded successfully (length: ${JWT_SECRET.length}).`);
|
logger.info(`[Passport] JWT_SECRET loaded successfully (length: ${JWT_SECRET.length}).`);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -146,7 +146,7 @@ describe('flyerDbInsertSchema', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('should fail if store_id is missing', () => {
|
it('should fail if store_id is missing', () => {
|
||||||
const { store_id, ...invalid } = validDbFlyer;
|
const { store_id: _store_id, ...invalid } = validDbFlyer;
|
||||||
const result = flyerDbInsertSchema.safeParse(invalid);
|
const result = flyerDbInsertSchema.safeParse(invalid);
|
||||||
expect(result.success).toBe(false);
|
expect(result.success).toBe(false);
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -24,7 +24,7 @@ vi.mock('./logger.client', () => ({
|
|||||||
}));
|
}));
|
||||||
|
|
||||||
// 2. Mock ./apiClient to simply pass calls through to the global fetch.
|
// 2. Mock ./apiClient to simply pass calls through to the global fetch.
|
||||||
vi.mock('./apiClient', async (importOriginal) => {
|
vi.mock('./apiClient', async () => {
|
||||||
// This is the core logic we want to preserve: it calls the global fetch
|
// This is the core logic we want to preserve: it calls the global fetch
|
||||||
// which is then intercepted by MSW.
|
// which is then intercepted by MSW.
|
||||||
const apiFetch = (
|
const apiFetch = (
|
||||||
@@ -75,11 +75,19 @@ vi.mock('./apiClient', async (importOriginal) => {
|
|||||||
authedPost: <T>(endpoint: string, body: T, options: import('./apiClient').ApiOptions = {}) => {
|
authedPost: <T>(endpoint: string, body: T, options: import('./apiClient').ApiOptions = {}) => {
|
||||||
return apiFetch(
|
return apiFetch(
|
||||||
endpoint,
|
endpoint,
|
||||||
{ method: 'POST', headers: { 'Content-Type': 'application/json' }, body: JSON.stringify(body) },
|
{
|
||||||
|
method: 'POST',
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
body: JSON.stringify(body),
|
||||||
|
},
|
||||||
options,
|
options,
|
||||||
);
|
);
|
||||||
},
|
},
|
||||||
authedPostForm: (endpoint: string, formData: FormData, options: import('./apiClient').ApiOptions = {}) => {
|
authedPostForm: (
|
||||||
|
endpoint: string,
|
||||||
|
formData: FormData,
|
||||||
|
options: import('./apiClient').ApiOptions = {},
|
||||||
|
) => {
|
||||||
return apiFetch(endpoint, { method: 'POST', body: formData }, options);
|
return apiFetch(endpoint, { method: 'POST', body: formData }, options);
|
||||||
},
|
},
|
||||||
// Add a mock for ApiOptions to satisfy the compiler
|
// Add a mock for ApiOptions to satisfy the compiler
|
||||||
@@ -322,7 +330,10 @@ describe('AI API Client (Network Mocking with MSW)', () => {
|
|||||||
it('should throw a generic error with status text if the non-ok API response is not valid JSON', async () => {
|
it('should throw a generic error with status text if the non-ok API response is not valid JSON', async () => {
|
||||||
server.use(
|
server.use(
|
||||||
http.get(`http://localhost/api/ai/jobs/${jobId}/status`, () => {
|
http.get(`http://localhost/api/ai/jobs/${jobId}/status`, () => {
|
||||||
return HttpResponse.text('Gateway Timeout', { status: 504, statusText: 'Gateway Timeout' });
|
return HttpResponse.text('Gateway Timeout', {
|
||||||
|
status: 504,
|
||||||
|
statusText: 'Gateway Timeout',
|
||||||
|
});
|
||||||
}),
|
}),
|
||||||
);
|
);
|
||||||
await expect(aiApiClient.getJobStatus(jobId)).rejects.toThrow('Gateway Timeout');
|
await expect(aiApiClient.getJobStatus(jobId)).rejects.toThrow('Gateway Timeout');
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
// src/services/aiService.server.test.ts
|
// src/services/aiService.server.test.ts
|
||||||
import { describe, it, expect, vi, beforeEach, afterEach, type Mock } from 'vitest';
|
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
|
||||||
import type { Job } from 'bullmq';
|
import type { Job } from 'bullmq';
|
||||||
import { createMockLogger } from '../tests/utils/mockLogger';
|
import { createMockLogger } from '../tests/utils/mockLogger';
|
||||||
import type { Logger } from 'pino';
|
import type { Logger } from 'pino';
|
||||||
@@ -9,7 +9,6 @@ import {
|
|||||||
AIService,
|
AIService,
|
||||||
aiService as aiServiceSingleton,
|
aiService as aiServiceSingleton,
|
||||||
DuplicateFlyerError,
|
DuplicateFlyerError,
|
||||||
type RawFlyerItem,
|
|
||||||
} from './aiService.server';
|
} from './aiService.server';
|
||||||
import {
|
import {
|
||||||
createMockMasterGroceryItem,
|
createMockMasterGroceryItem,
|
||||||
@@ -30,14 +29,15 @@ import { logger as mockLoggerInstance } from './logger.server';
|
|||||||
// Explicitly unmock the service under test to ensure we import the real implementation.
|
// Explicitly unmock the service under test to ensure we import the real implementation.
|
||||||
vi.unmock('./aiService.server');
|
vi.unmock('./aiService.server');
|
||||||
|
|
||||||
const { mockGenerateContent, mockToBuffer, mockExtract, mockSharp, mockAdminLogActivity } = vi.hoisted(() => {
|
const { mockGenerateContent, mockToBuffer, mockExtract, mockSharp, mockAdminLogActivity } =
|
||||||
|
vi.hoisted(() => {
|
||||||
const mockGenerateContent = vi.fn();
|
const mockGenerateContent = vi.fn();
|
||||||
const mockToBuffer = vi.fn();
|
const mockToBuffer = vi.fn();
|
||||||
const mockExtract = vi.fn(() => ({ toBuffer: mockToBuffer }));
|
const mockExtract = vi.fn(() => ({ toBuffer: mockToBuffer }));
|
||||||
const mockSharp = vi.fn(() => ({ extract: mockExtract }));
|
const mockSharp = vi.fn(() => ({ extract: mockExtract }));
|
||||||
const mockAdminLogActivity = vi.fn();
|
const mockAdminLogActivity = vi.fn();
|
||||||
return { mockGenerateContent, mockToBuffer, mockExtract, mockSharp, mockAdminLogActivity };
|
return { mockGenerateContent, mockToBuffer, mockExtract, mockSharp, mockAdminLogActivity };
|
||||||
});
|
});
|
||||||
|
|
||||||
// Mock sharp, as it's a direct dependency of the service.
|
// Mock sharp, as it's a direct dependency of the service.
|
||||||
vi.mock('sharp', () => ({
|
vi.mock('sharp', () => ({
|
||||||
@@ -151,6 +151,7 @@ describe('AI Service (Server)', () => {
|
|||||||
const resultEmpty = AiFlyerDataSchema.safeParse(dataWithEmpty);
|
const resultEmpty = AiFlyerDataSchema.safeParse(dataWithEmpty);
|
||||||
|
|
||||||
expect(resultNull.success).toBe(false);
|
expect(resultNull.success).toBe(false);
|
||||||
|
expect(resultEmpty.success).toBe(false);
|
||||||
// Null checks fail with a generic type error, which is acceptable.
|
// Null checks fail with a generic type error, which is acceptable.
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
@@ -275,7 +276,7 @@ describe('AI Service (Server)', () => {
|
|||||||
};
|
};
|
||||||
// The adapter strips `useLiteModels` before calling the underlying client,
|
// The adapter strips `useLiteModels` before calling the underlying client,
|
||||||
// so we prepare the expected request shape for our assertions.
|
// so we prepare the expected request shape for our assertions.
|
||||||
const { useLiteModels, ...apiReq } = request;
|
const { useLiteModels: _useLiteModels, ...apiReq } = request;
|
||||||
|
|
||||||
// Act
|
// Act
|
||||||
const result = await (serviceWithFallback as any).aiClient.generateContent(request);
|
const result = await (serviceWithFallback as any).aiClient.generateContent(request);
|
||||||
@@ -291,6 +292,68 @@ describe('AI Service (Server)', () => {
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('should use full models when useLiteModels is explicitly false', async () => {
|
||||||
|
// Arrange
|
||||||
|
const { AIService } = await import('./aiService.server');
|
||||||
|
const { logger } = await import('./logger.server');
|
||||||
|
const serviceWithFallback = new AIService(logger);
|
||||||
|
const models = (serviceWithFallback as any).models;
|
||||||
|
const models_lite = (serviceWithFallback as any).models_lite;
|
||||||
|
const successResponse = { text: 'Success from full model', candidates: [] };
|
||||||
|
|
||||||
|
mockGenerateContent.mockResolvedValue(successResponse);
|
||||||
|
|
||||||
|
const request = {
|
||||||
|
contents: [{ parts: [{ text: 'test prompt' }] }],
|
||||||
|
useLiteModels: false,
|
||||||
|
};
|
||||||
|
const { useLiteModels: _useLiteModels, ...apiReq } = request;
|
||||||
|
|
||||||
|
// Act
|
||||||
|
const result = await (serviceWithFallback as any).aiClient.generateContent(request);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
expect(result).toEqual(successResponse);
|
||||||
|
expect(mockGenerateContent).toHaveBeenCalledTimes(1);
|
||||||
|
|
||||||
|
// Check that the first model from the FULL list was used, not lite
|
||||||
|
expect(mockGenerateContent).toHaveBeenCalledWith({
|
||||||
|
model: models[0],
|
||||||
|
...apiReq,
|
||||||
|
});
|
||||||
|
// Verify it's actually different from the lite list
|
||||||
|
expect(models[0]).not.toBe(models_lite[0]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should use full models when useLiteModels is omitted (default behavior)', async () => {
|
||||||
|
// Arrange
|
||||||
|
const { AIService } = await import('./aiService.server');
|
||||||
|
const { logger } = await import('./logger.server');
|
||||||
|
const serviceWithFallback = new AIService(logger);
|
||||||
|
const models = (serviceWithFallback as any).models;
|
||||||
|
const successResponse = { text: 'Success from full model', candidates: [] };
|
||||||
|
|
||||||
|
mockGenerateContent.mockResolvedValue(successResponse);
|
||||||
|
|
||||||
|
// Note: useLiteModels is NOT included in the request
|
||||||
|
const request = {
|
||||||
|
contents: [{ parts: [{ text: 'test prompt' }] }],
|
||||||
|
};
|
||||||
|
|
||||||
|
// Act
|
||||||
|
const result = await (serviceWithFallback as any).aiClient.generateContent(request);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
expect(result).toEqual(successResponse);
|
||||||
|
expect(mockGenerateContent).toHaveBeenCalledTimes(1);
|
||||||
|
|
||||||
|
// Check that the first model from the full list was used
|
||||||
|
expect(mockGenerateContent).toHaveBeenCalledWith({
|
||||||
|
model: models[0],
|
||||||
|
...request,
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
it('should try the next model if the first one fails with a quota error', async () => {
|
it('should try the next model if the first one fails with a quota error', async () => {
|
||||||
// Arrange
|
// Arrange
|
||||||
const { AIService } = await import('./aiService.server');
|
const { AIService } = await import('./aiService.server');
|
||||||
@@ -314,13 +377,15 @@ describe('AI Service (Server)', () => {
|
|||||||
expect(mockGenerateContent).toHaveBeenCalledTimes(2);
|
expect(mockGenerateContent).toHaveBeenCalledTimes(2);
|
||||||
|
|
||||||
// Check first call
|
// Check first call
|
||||||
expect(mockGenerateContent).toHaveBeenNthCalledWith(1, { // The first model in the list
|
expect(mockGenerateContent).toHaveBeenNthCalledWith(1, {
|
||||||
|
// The first model in the list
|
||||||
model: models[0],
|
model: models[0],
|
||||||
...request,
|
...request,
|
||||||
});
|
});
|
||||||
|
|
||||||
// Check second call
|
// Check second call
|
||||||
expect(mockGenerateContent).toHaveBeenNthCalledWith(2, { // The second model in the list
|
expect(mockGenerateContent).toHaveBeenNthCalledWith(2, {
|
||||||
|
// The second model in the list
|
||||||
model: models[1],
|
model: models[1],
|
||||||
...request,
|
...request,
|
||||||
});
|
});
|
||||||
@@ -340,6 +405,7 @@ describe('AI Service (Server)', () => {
|
|||||||
const { logger } = await import('./logger.server');
|
const { logger } = await import('./logger.server');
|
||||||
const serviceWithFallback = new AIService(logger);
|
const serviceWithFallback = new AIService(logger);
|
||||||
const models = (serviceWithFallback as any).models;
|
const models = (serviceWithFallback as any).models;
|
||||||
|
const firstModel = models[0];
|
||||||
|
|
||||||
const nonRetriableError = new Error('Invalid API Key');
|
const nonRetriableError = new Error('Invalid API Key');
|
||||||
mockGenerateContent.mockRejectedValueOnce(nonRetriableError);
|
mockGenerateContent.mockRejectedValueOnce(nonRetriableError);
|
||||||
@@ -353,8 +419,8 @@ describe('AI Service (Server)', () => {
|
|||||||
|
|
||||||
expect(mockGenerateContent).toHaveBeenCalledTimes(1);
|
expect(mockGenerateContent).toHaveBeenCalledTimes(1);
|
||||||
expect(logger.error).toHaveBeenCalledWith(
|
expect(logger.error).toHaveBeenCalledWith(
|
||||||
{ error: nonRetriableError }, // The first model in the list is now 'gemini-2.5-flash'
|
{ error: nonRetriableError },
|
||||||
`[AIService Adapter] Model 'gemini-2.5-flash' failed with a non-retriable error.`,
|
`[AIService Adapter] Model '${firstModel}' failed with a non-retriable error.`,
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -407,7 +473,9 @@ describe('AI Service (Server)', () => {
|
|||||||
// Access private property for testing purposes
|
// Access private property for testing purposes
|
||||||
const modelsLite = (serviceWithFallback as any).models_lite as string[];
|
const modelsLite = (serviceWithFallback as any).models_lite as string[];
|
||||||
// Use a quota error to trigger the fallback logic for each model
|
// Use a quota error to trigger the fallback logic for each model
|
||||||
const errors = modelsLite.map((model, i) => new Error(`Quota error for lite model ${model} (${i})`));
|
const errors = modelsLite.map(
|
||||||
|
(model, i) => new Error(`Quota error for lite model ${model} (${i})`),
|
||||||
|
);
|
||||||
const lastError = errors[errors.length - 1];
|
const lastError = errors[errors.length - 1];
|
||||||
|
|
||||||
// Dynamically setup mocks
|
// Dynamically setup mocks
|
||||||
@@ -421,7 +489,7 @@ describe('AI Service (Server)', () => {
|
|||||||
};
|
};
|
||||||
// The adapter strips `useLiteModels` before calling the underlying client,
|
// The adapter strips `useLiteModels` before calling the underlying client,
|
||||||
// so we prepare the expected request shape for our assertions.
|
// so we prepare the expected request shape for our assertions.
|
||||||
const { useLiteModels, ...apiReq } = request;
|
const { useLiteModels: _useLiteModels, ...apiReq } = request;
|
||||||
|
|
||||||
// Act & Assert
|
// Act & Assert
|
||||||
// Expect the entire operation to reject with the error from the very last model attempt.
|
// Expect the entire operation to reject with the error from the very last model attempt.
|
||||||
@@ -454,9 +522,7 @@ describe('AI Service (Server)', () => {
|
|||||||
const error1 = new Error('Quota exceeded for model 1');
|
const error1 = new Error('Quota exceeded for model 1');
|
||||||
const successResponse = { text: 'Success', candidates: [] };
|
const successResponse = { text: 'Success', candidates: [] };
|
||||||
|
|
||||||
mockGenerateContent
|
mockGenerateContent.mockRejectedValueOnce(error1).mockResolvedValueOnce(successResponse);
|
||||||
.mockRejectedValueOnce(error1)
|
|
||||||
.mockResolvedValueOnce(successResponse);
|
|
||||||
|
|
||||||
const request = { contents: [{ parts: [{ text: 'test prompt' }] }] };
|
const request = { contents: [{ parts: [{ text: 'test prompt' }] }] };
|
||||||
|
|
||||||
@@ -505,7 +571,9 @@ describe('AI Service (Server)', () => {
|
|||||||
expect(mockGenerateContent).toHaveBeenCalledTimes(2);
|
expect(mockGenerateContent).toHaveBeenCalledTimes(2);
|
||||||
expect(mockGenerateContent).toHaveBeenNthCalledWith(1, { model: models[0], ...request });
|
expect(mockGenerateContent).toHaveBeenNthCalledWith(1, { model: models[0], ...request });
|
||||||
expect(mockGenerateContent).toHaveBeenNthCalledWith(2, { model: models[1], ...request });
|
expect(mockGenerateContent).toHaveBeenNthCalledWith(2, { model: models[1], ...request });
|
||||||
expect(logger.warn).toHaveBeenCalledWith(expect.stringContaining(`Model '${models[0]}' failed due to quota/rate limit/overload.`));
|
expect(logger.warn).toHaveBeenCalledWith(
|
||||||
|
expect.stringContaining(`Model '${models[0]}' failed due to quota/rate limit/overload.`),
|
||||||
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should fail immediately on a 400 Bad Request error without retrying', async () => {
|
it('should fail immediately on a 400 Bad Request error without retrying', async () => {
|
||||||
@@ -521,7 +589,9 @@ describe('AI Service (Server)', () => {
|
|||||||
const request = { contents: [{ parts: [{ text: 'test prompt' }] }] };
|
const request = { contents: [{ parts: [{ text: 'test prompt' }] }] };
|
||||||
|
|
||||||
// Act & Assert
|
// Act & Assert
|
||||||
await expect((serviceWithFallback as any).aiClient.generateContent(request)).rejects.toThrow(nonRetriableError);
|
await expect((serviceWithFallback as any).aiClient.generateContent(request)).rejects.toThrow(
|
||||||
|
nonRetriableError,
|
||||||
|
);
|
||||||
|
|
||||||
expect(mockGenerateContent).toHaveBeenCalledTimes(1);
|
expect(mockGenerateContent).toHaveBeenCalledTimes(1);
|
||||||
expect(mockGenerateContent).toHaveBeenCalledWith({ model: models[0], ...request });
|
expect(mockGenerateContent).toHaveBeenCalledWith({ model: models[0], ...request });
|
||||||
@@ -1054,8 +1124,9 @@ describe('AI Service (Server)', () => {
|
|||||||
filename: 'upload.jpg',
|
filename: 'upload.jpg',
|
||||||
originalname: 'orig.jpg',
|
originalname: 'orig.jpg',
|
||||||
} as Express.Multer.File; // This was a duplicate, fixed.
|
} as Express.Multer.File; // This was a duplicate, fixed.
|
||||||
const mockProfile = createMockUserProfile({ user: { user_id: 'a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a11' } });
|
const mockProfile = createMockUserProfile({
|
||||||
|
user: { user_id: 'a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a11' },
|
||||||
|
});
|
||||||
|
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
// Default success mocks. Use createMockFlyer for a more complete mock.
|
// Default success mocks. Use createMockFlyer for a more complete mock.
|
||||||
@@ -1086,26 +1157,18 @@ describe('AI Service (Server)', () => {
|
|||||||
it('should throw ValidationError if checksum is missing', async () => {
|
it('should throw ValidationError if checksum is missing', async () => {
|
||||||
const body = { data: JSON.stringify({}) }; // No checksum
|
const body = { data: JSON.stringify({}) }; // No checksum
|
||||||
await expect(
|
await expect(
|
||||||
aiServiceInstance.processLegacyFlyerUpload(
|
aiServiceInstance.processLegacyFlyerUpload(mockFile, body, mockProfile, mockLoggerInstance),
|
||||||
mockFile,
|
|
||||||
body,
|
|
||||||
mockProfile,
|
|
||||||
mockLoggerInstance,
|
|
||||||
),
|
|
||||||
).rejects.toThrow(ValidationError);
|
).rejects.toThrow(ValidationError);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should throw DuplicateFlyerError if checksum exists', async () => {
|
it('should throw DuplicateFlyerError if checksum exists', async () => {
|
||||||
vi.mocked(dbModule.flyerRepo.findFlyerByChecksum).mockResolvedValue(createMockFlyer({ flyer_id: 55 }));
|
vi.mocked(dbModule.flyerRepo.findFlyerByChecksum).mockResolvedValue(
|
||||||
|
createMockFlyer({ flyer_id: 55 }),
|
||||||
|
);
|
||||||
const body = { checksum: 'dup-sum' };
|
const body = { checksum: 'dup-sum' };
|
||||||
|
|
||||||
await expect(
|
await expect(
|
||||||
aiServiceInstance.processLegacyFlyerUpload(
|
aiServiceInstance.processLegacyFlyerUpload(mockFile, body, mockProfile, mockLoggerInstance),
|
||||||
mockFile,
|
|
||||||
body,
|
|
||||||
mockProfile,
|
|
||||||
mockLoggerInstance,
|
|
||||||
),
|
|
||||||
).rejects.toThrow(DuplicateFlyerError);
|
).rejects.toThrow(DuplicateFlyerError);
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -1225,12 +1288,7 @@ describe('AI Service (Server)', () => {
|
|||||||
|
|
||||||
// This will eventually throw ValidationError because checksum won't be found
|
// This will eventually throw ValidationError because checksum won't be found
|
||||||
await expect(
|
await expect(
|
||||||
aiServiceInstance.processLegacyFlyerUpload(
|
aiServiceInstance.processLegacyFlyerUpload(mockFile, body, mockProfile, mockLoggerInstance),
|
||||||
mockFile,
|
|
||||||
body,
|
|
||||||
mockProfile,
|
|
||||||
mockLoggerInstance,
|
|
||||||
),
|
|
||||||
).rejects.toThrow(ValidationError);
|
).rejects.toThrow(ValidationError);
|
||||||
|
|
||||||
// Verify that the error was caught and logged using errMsg logic
|
// Verify that the error was caught and logged using errMsg logic
|
||||||
@@ -1241,19 +1299,17 @@ describe('AI Service (Server)', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('should log and re-throw the original error if the database transaction fails', async () => {
|
it('should log and re-throw the original error if the database transaction fails', async () => {
|
||||||
const body = { checksum: 'legacy-fail-checksum', extractedData: { store_name: 'Fail Store' } };
|
const body = {
|
||||||
|
checksum: 'legacy-fail-checksum',
|
||||||
|
extractedData: { store_name: 'Fail Store' },
|
||||||
|
};
|
||||||
const dbError = new Error('DB transaction failed');
|
const dbError = new Error('DB transaction failed');
|
||||||
|
|
||||||
// Mock withTransaction to fail
|
// Mock withTransaction to fail
|
||||||
vi.mocked(withTransaction).mockRejectedValue(dbError);
|
vi.mocked(withTransaction).mockRejectedValue(dbError);
|
||||||
|
|
||||||
await expect(
|
await expect(
|
||||||
aiServiceInstance.processLegacyFlyerUpload(
|
aiServiceInstance.processLegacyFlyerUpload(mockFile, body, mockProfile, mockLoggerInstance),
|
||||||
mockFile,
|
|
||||||
body,
|
|
||||||
mockProfile,
|
|
||||||
mockLoggerInstance,
|
|
||||||
),
|
|
||||||
).rejects.toThrow(dbError);
|
).rejects.toThrow(dbError);
|
||||||
|
|
||||||
// Verify the service-level error logging
|
// Verify the service-level error logging
|
||||||
|
|||||||
@@ -18,7 +18,7 @@ import type {
|
|||||||
FlyerInsert,
|
FlyerInsert,
|
||||||
Flyer,
|
Flyer,
|
||||||
} from '../types';
|
} from '../types';
|
||||||
import { DatabaseError, FlyerProcessingError } from './processingErrors';
|
import { FlyerProcessingError } from './processingErrors';
|
||||||
import * as db from './db/index.db';
|
import * as db from './db/index.db';
|
||||||
import { flyerQueue } from './queueService.server';
|
import { flyerQueue } from './queueService.server';
|
||||||
import type { Job } from 'bullmq';
|
import type { Job } from 'bullmq';
|
||||||
@@ -28,10 +28,7 @@ import { generateFlyerIcon, processAndSaveImage } from '../utils/imageProcessor'
|
|||||||
import { AdminRepository } from './db/admin.db';
|
import { AdminRepository } from './db/admin.db';
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
import { ValidationError } from './db/errors.db'; // Keep this import for ValidationError
|
import { ValidationError } from './db/errors.db'; // Keep this import for ValidationError
|
||||||
import {
|
import { AiFlyerDataSchema, ExtractedFlyerItemSchema } from '../types/ai'; // Import consolidated schemas
|
||||||
AiFlyerDataSchema,
|
|
||||||
ExtractedFlyerItemSchema,
|
|
||||||
} from '../types/ai'; // Import consolidated schemas
|
|
||||||
|
|
||||||
interface FlyerProcessPayload extends Partial<ExtractedCoreData> {
|
interface FlyerProcessPayload extends Partial<ExtractedCoreData> {
|
||||||
checksum?: string;
|
checksum?: string;
|
||||||
@@ -76,7 +73,10 @@ interface IAiClient {
|
|||||||
export type RawFlyerItem = z.infer<typeof ExtractedFlyerItemSchema>;
|
export type RawFlyerItem = z.infer<typeof ExtractedFlyerItemSchema>;
|
||||||
|
|
||||||
export class DuplicateFlyerError extends FlyerProcessingError {
|
export class DuplicateFlyerError extends FlyerProcessingError {
|
||||||
constructor(message: string, public flyerId: number) {
|
constructor(
|
||||||
|
message: string,
|
||||||
|
public flyerId: number,
|
||||||
|
) {
|
||||||
super(message, 'DUPLICATE_FLYER', message);
|
super(message, 'DUPLICATE_FLYER', message);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -87,7 +87,7 @@ export class AIService {
|
|||||||
private rateLimiter: <T>(fn: () => Promise<T>) => Promise<T>;
|
private rateLimiter: <T>(fn: () => Promise<T>) => Promise<T>;
|
||||||
private logger: Logger;
|
private logger: Logger;
|
||||||
|
|
||||||
// OPTIMIZED: Flyer Image Processing (Vision + Long Output)
|
// OPTIMIZED: Flyer Image Processing (Vision + Long Output)
|
||||||
// PRIORITIES:
|
// PRIORITIES:
|
||||||
// 1. Output Limit: Must be 65k+ (Gemini 2.5/3.0) to avoid cutting off data.
|
// 1. Output Limit: Must be 65k+ (Gemini 2.5/3.0) to avoid cutting off data.
|
||||||
// 2. Intelligence: 'Pro' models handle messy layouts better.
|
// 2. Intelligence: 'Pro' models handle messy layouts better.
|
||||||
@@ -109,7 +109,7 @@ export class AIService {
|
|||||||
|
|
||||||
// --- TIER E: Last Resorts (Lower Capacity/Local) ---
|
// --- TIER E: Last Resorts (Lower Capacity/Local) ---
|
||||||
'gemma-3-27b-it', // Open model fallback.
|
'gemma-3-27b-it', // Open model fallback.
|
||||||
'gemini-2.0-flash-exp' // Exp fallback. WARNING: 8k output limit. Good for small flyers only.
|
'gemini-2.0-flash-exp', // Exp fallback. WARNING: 8k output limit. Good for small flyers only.
|
||||||
];
|
];
|
||||||
|
|
||||||
// OPTIMIZED: Simple Text Tasks (Recipes, Shopping Lists, Summaries)
|
// OPTIMIZED: Simple Text Tasks (Recipes, Shopping Lists, Summaries)
|
||||||
@@ -118,22 +118,22 @@ export class AIService {
|
|||||||
// 2. Output Limit: The 8k limit of Gemini 2.0 is perfectly fine here.
|
// 2. Output Limit: The 8k limit of Gemini 2.0 is perfectly fine here.
|
||||||
private readonly models_lite = [
|
private readonly models_lite = [
|
||||||
// --- Best Value (Smart + Cheap) ---
|
// --- Best Value (Smart + Cheap) ---
|
||||||
"gemini-2.5-flash-lite", // Current generation efficiency king.
|
'gemini-2.5-flash-lite', // Current generation efficiency king.
|
||||||
|
|
||||||
// --- The "Recycled" Gemini 2.0 Models (Perfect for Text) ---
|
// --- The "Recycled" Gemini 2.0 Models (Perfect for Text) ---
|
||||||
"gemini-2.0-flash-lite-001", // Extremely cheap, very capable for text.
|
'gemini-2.0-flash-lite-001', // Extremely cheap, very capable for text.
|
||||||
"gemini-2.0-flash-001", // Smarter than Lite, good for complex recipes.
|
'gemini-2.0-flash-001', // Smarter than Lite, good for complex recipes.
|
||||||
|
|
||||||
// --- Open Models (Good for simple categorization) ---
|
// --- Open Models (Good for simple categorization) ---
|
||||||
"gemma-3-12b-it", // Solid reasoning for an open model.
|
'gemma-3-12b-it', // Solid reasoning for an open model.
|
||||||
"gemma-3-4b-it", // Very fast.
|
'gemma-3-4b-it', // Very fast.
|
||||||
|
|
||||||
// --- Quota Fallbacks (Experimental/Preview) ---
|
// --- Quota Fallbacks (Experimental/Preview) ---
|
||||||
"gemini-2.0-flash-exp", // Use this separate quota bucket if others are exhausted.
|
'gemini-2.0-flash-exp', // Use this separate quota bucket if others are exhausted.
|
||||||
|
|
||||||
// --- Edge/Nano Models (Simple string manipulation only) ---
|
// --- Edge/Nano Models (Simple string manipulation only) ---
|
||||||
"gemma-3n-e4b-it", // Corrected name from JSON
|
'gemma-3n-e4b-it', // Corrected name from JSON
|
||||||
"gemma-3n-e2b-it" // Corrected name from JSON
|
'gemma-3n-e2b-it', // Corrected name from JSON
|
||||||
];
|
];
|
||||||
|
|
||||||
// Helper to return valid mock data for tests
|
// Helper to return valid mock data for tests
|
||||||
@@ -258,7 +258,7 @@ export class AIService {
|
|||||||
} else {
|
} else {
|
||||||
try {
|
try {
|
||||||
if (typeof error === 'object' && error !== null && 'message' in error) {
|
if (typeof error === 'object' && error !== null && 'message' in error) {
|
||||||
errorMsg = String((error as any).message);
|
errorMsg = String((error as { message: unknown }).message);
|
||||||
} else {
|
} else {
|
||||||
errorMsg = JSON.stringify(error);
|
errorMsg = JSON.stringify(error);
|
||||||
}
|
}
|
||||||
@@ -391,7 +391,9 @@ export class AIService {
|
|||||||
);
|
);
|
||||||
|
|
||||||
if (!responseText) {
|
if (!responseText) {
|
||||||
logger.warn('[_parseJsonFromAiResponse] Response text is empty or undefined. Aborting parsing.');
|
logger.warn(
|
||||||
|
'[_parseJsonFromAiResponse] Response text is empty or undefined. Aborting parsing.',
|
||||||
|
);
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -407,7 +409,9 @@ export class AIService {
|
|||||||
);
|
);
|
||||||
jsonString = markdownMatch[2].trim();
|
jsonString = markdownMatch[2].trim();
|
||||||
} else {
|
} else {
|
||||||
logger.debug('[_parseJsonFromAiResponse] No markdown code block found. Using raw response text.');
|
logger.debug(
|
||||||
|
'[_parseJsonFromAiResponse] No markdown code block found. Using raw response text.',
|
||||||
|
);
|
||||||
jsonString = responseText;
|
jsonString = responseText;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -537,9 +541,15 @@ export class AIService {
|
|||||||
submitterIp?: string,
|
submitterIp?: string,
|
||||||
userProfileAddress?: string,
|
userProfileAddress?: string,
|
||||||
logger: Logger = this.logger,
|
logger: Logger = this.logger,
|
||||||
): Promise<{
|
): Promise<
|
||||||
store_name: string | null; valid_from: string | null; valid_to: string | null; store_address: string | null; items: z.infer<typeof ExtractedFlyerItemSchema>[];
|
{
|
||||||
} & z.infer<typeof AiFlyerDataSchema>> {
|
store_name: string | null;
|
||||||
|
valid_from: string | null;
|
||||||
|
valid_to: string | null;
|
||||||
|
store_address: string | null;
|
||||||
|
items: z.infer<typeof ExtractedFlyerItemSchema>[];
|
||||||
|
} & z.infer<typeof AiFlyerDataSchema>
|
||||||
|
> {
|
||||||
logger.info(
|
logger.info(
|
||||||
`[extractCoreDataFromFlyerImage] Entering method with ${imagePaths.length} image(s).`,
|
`[extractCoreDataFromFlyerImage] Entering method with ${imagePaths.length} image(s).`,
|
||||||
);
|
);
|
||||||
@@ -761,8 +771,7 @@ export class AIService {
|
|||||||
*/
|
*/
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async enqueueFlyerProcessing(
|
||||||
async enqueueFlyerProcessing(
|
|
||||||
file: Express.Multer.File,
|
file: Express.Multer.File,
|
||||||
checksum: string,
|
checksum: string,
|
||||||
userProfile: UserProfile | undefined,
|
userProfile: UserProfile | undefined,
|
||||||
@@ -821,15 +830,13 @@ async enqueueFlyerProcessing(
|
|||||||
baseUrl: baseUrl,
|
baseUrl: baseUrl,
|
||||||
});
|
});
|
||||||
|
|
||||||
logger.info(
|
logger.info(`Enqueued flyer for processing. File: ${file.originalname}, Job ID: ${job.id}`);
|
||||||
`Enqueued flyer for processing. File: ${file.originalname}, Job ID: ${job.id}`,
|
|
||||||
);
|
|
||||||
|
|
||||||
return job;
|
return job;
|
||||||
}
|
}
|
||||||
|
|
||||||
private _parseLegacyPayload(
|
private _parseLegacyPayload(
|
||||||
body: any,
|
body: unknown,
|
||||||
logger: Logger,
|
logger: Logger,
|
||||||
): { parsed: FlyerProcessPayload; extractedData: Partial<ExtractedCoreData> | null | undefined } {
|
): { parsed: FlyerProcessPayload; extractedData: Partial<ExtractedCoreData> | null | undefined } {
|
||||||
logger.debug({ body, type: typeof body }, '[AIService] Starting _parseLegacyPayload');
|
logger.debug({ body, type: typeof body }, '[AIService] Starting _parseLegacyPayload');
|
||||||
@@ -838,7 +845,10 @@ async enqueueFlyerProcessing(
|
|||||||
try {
|
try {
|
||||||
parsed = typeof body === 'string' ? JSON.parse(body) : body || {};
|
parsed = typeof body === 'string' ? JSON.parse(body) : body || {};
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
logger.warn({ error: errMsg(e) }, '[AIService] Failed to parse top-level request body string.');
|
logger.warn(
|
||||||
|
{ error: errMsg(e) },
|
||||||
|
'[AIService] Failed to parse top-level request body string.',
|
||||||
|
);
|
||||||
return { parsed: {}, extractedData: {} };
|
return { parsed: {}, extractedData: {} };
|
||||||
}
|
}
|
||||||
logger.debug({ parsed }, '[AIService] Parsed top-level body');
|
logger.debug({ parsed }, '[AIService] Parsed top-level body');
|
||||||
@@ -851,13 +861,19 @@ async enqueueFlyerProcessing(
|
|||||||
try {
|
try {
|
||||||
potentialPayload = JSON.parse(parsed.data);
|
potentialPayload = JSON.parse(parsed.data);
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
logger.warn({ error: errMsg(e) }, '[AIService] Failed to parse nested "data" property string.');
|
logger.warn(
|
||||||
|
{ error: errMsg(e) },
|
||||||
|
'[AIService] Failed to parse nested "data" property string.',
|
||||||
|
);
|
||||||
}
|
}
|
||||||
} else if (typeof parsed.data === 'object') {
|
} else if (typeof parsed.data === 'object') {
|
||||||
potentialPayload = parsed.data;
|
potentialPayload = parsed.data;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
logger.debug({ potentialPayload }, '[AIService] Potential payload after checking "data" property');
|
logger.debug(
|
||||||
|
{ potentialPayload },
|
||||||
|
'[AIService] Potential payload after checking "data" property',
|
||||||
|
);
|
||||||
|
|
||||||
// The extracted data is either in an `extractedData` key or is the payload itself.
|
// The extracted data is either in an `extractedData` key or is the payload itself.
|
||||||
const extractedData = potentialPayload.extractedData ?? potentialPayload;
|
const extractedData = potentialPayload.extractedData ?? potentialPayload;
|
||||||
@@ -873,7 +889,7 @@ async enqueueFlyerProcessing(
|
|||||||
|
|
||||||
async processLegacyFlyerUpload(
|
async processLegacyFlyerUpload(
|
||||||
file: Express.Multer.File,
|
file: Express.Multer.File,
|
||||||
body: any,
|
body: unknown,
|
||||||
userProfile: UserProfile | undefined,
|
userProfile: UserProfile | undefined,
|
||||||
logger: Logger,
|
logger: Logger,
|
||||||
): Promise<Flyer> {
|
): Promise<Flyer> {
|
||||||
@@ -889,10 +905,14 @@ async enqueueFlyerProcessing(
|
|||||||
|
|
||||||
const existingFlyer = await db.flyerRepo.findFlyerByChecksum(checksum, logger);
|
const existingFlyer = await db.flyerRepo.findFlyerByChecksum(checksum, logger);
|
||||||
if (existingFlyer) {
|
if (existingFlyer) {
|
||||||
throw new DuplicateFlyerError('This flyer has already been processed.', existingFlyer.flyer_id);
|
throw new DuplicateFlyerError(
|
||||||
|
'This flyer has already been processed.',
|
||||||
|
existingFlyer.flyer_id,
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
const originalFileName = parsed.originalFileName ?? parsed?.data?.originalFileName ?? file.originalname;
|
const originalFileName =
|
||||||
|
parsed.originalFileName ?? parsed?.data?.originalFileName ?? file.originalname;
|
||||||
|
|
||||||
if (!extractedData || typeof extractedData !== 'object') {
|
if (!extractedData || typeof extractedData !== 'object') {
|
||||||
logger.warn({ bodyData: parsed }, 'Missing extractedData in legacy payload.');
|
logger.warn({ bodyData: parsed }, 'Missing extractedData in legacy payload.');
|
||||||
@@ -900,7 +920,11 @@ async enqueueFlyerProcessing(
|
|||||||
}
|
}
|
||||||
|
|
||||||
const rawItems = extractedData.items ?? [];
|
const rawItems = extractedData.items ?? [];
|
||||||
const itemsArray = Array.isArray(rawItems) ? rawItems : typeof rawItems === 'string' ? JSON.parse(rawItems) : [];
|
const itemsArray = Array.isArray(rawItems)
|
||||||
|
? rawItems
|
||||||
|
: typeof rawItems === 'string'
|
||||||
|
? JSON.parse(rawItems)
|
||||||
|
: [];
|
||||||
const itemsForDb = itemsArray.map((item: Partial<ExtractedFlyerItem>) => ({
|
const itemsForDb = itemsArray.map((item: Partial<ExtractedFlyerItem>) => ({
|
||||||
...item,
|
...item,
|
||||||
// Ensure empty or nullish price_display is stored as NULL to satisfy database constraints.
|
// Ensure empty or nullish price_display is stored as NULL to satisfy database constraints.
|
||||||
@@ -912,7 +936,10 @@ async enqueueFlyerProcessing(
|
|||||||
updated_at: new Date().toISOString(),
|
updated_at: new Date().toISOString(),
|
||||||
}));
|
}));
|
||||||
|
|
||||||
const storeName = extractedData.store_name && String(extractedData.store_name).trim().length > 0 ? String(extractedData.store_name) : 'Unknown Store (auto)';
|
const storeName =
|
||||||
|
extractedData.store_name && String(extractedData.store_name).trim().length > 0
|
||||||
|
? String(extractedData.store_name)
|
||||||
|
: 'Unknown Store (auto)';
|
||||||
if (storeName.startsWith('Unknown')) {
|
if (storeName.startsWith('Unknown')) {
|
||||||
logger.warn('extractedData.store_name missing; using fallback store name.');
|
logger.warn('extractedData.store_name missing; using fallback store name.');
|
||||||
}
|
}
|
||||||
@@ -950,7 +977,8 @@ async enqueueFlyerProcessing(
|
|||||||
uploaded_by: userProfile?.user.user_id,
|
uploaded_by: userProfile?.user.user_id,
|
||||||
};
|
};
|
||||||
|
|
||||||
return db.withTransaction(async (client) => {
|
return db
|
||||||
|
.withTransaction(async (client) => {
|
||||||
const { flyer, items } = await createFlyerAndItems(flyerData, itemsForDb, logger, client);
|
const { flyer, items } = await createFlyerAndItems(flyerData, itemsForDb, logger, client);
|
||||||
|
|
||||||
logger.info(
|
logger.info(
|
||||||
@@ -968,7 +996,8 @@ async enqueueFlyerProcessing(
|
|||||||
logger,
|
logger,
|
||||||
);
|
);
|
||||||
return flyer;
|
return flyer;
|
||||||
}).catch((error) => {
|
})
|
||||||
|
.catch((error) => {
|
||||||
logger.error({ err: error, checksum }, 'Legacy flyer upload database transaction failed.');
|
logger.error({ err: error, checksum }, 'Legacy flyer upload database transaction failed.');
|
||||||
throw error;
|
throw error;
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -18,10 +18,14 @@ const { transactionalUserRepoMocks, transactionalAdminRepoMocks } = vi.hoisted((
|
|||||||
});
|
});
|
||||||
|
|
||||||
vi.mock('./db/user.db', () => ({
|
vi.mock('./db/user.db', () => ({
|
||||||
UserRepository: vi.fn().mockImplementation(function () { return transactionalUserRepoMocks }),
|
UserRepository: vi.fn().mockImplementation(function () {
|
||||||
|
return transactionalUserRepoMocks;
|
||||||
|
}),
|
||||||
}));
|
}));
|
||||||
vi.mock('./db/admin.db', () => ({
|
vi.mock('./db/admin.db', () => ({
|
||||||
AdminRepository: vi.fn().mockImplementation(function () { return transactionalAdminRepoMocks }),
|
AdminRepository: vi.fn().mockImplementation(function () {
|
||||||
|
return transactionalAdminRepoMocks;
|
||||||
|
}),
|
||||||
}));
|
}));
|
||||||
|
|
||||||
describe('AuthService', () => {
|
describe('AuthService', () => {
|
||||||
@@ -29,7 +33,7 @@ describe('AuthService', () => {
|
|||||||
let bcrypt: typeof import('bcrypt');
|
let bcrypt: typeof import('bcrypt');
|
||||||
let jwt: typeof jsonwebtoken & { default: typeof jsonwebtoken };
|
let jwt: typeof jsonwebtoken & { default: typeof jsonwebtoken };
|
||||||
let userRepo: typeof import('./db/index.db').userRepo;
|
let userRepo: typeof import('./db/index.db').userRepo;
|
||||||
let adminRepo: typeof import('./db/index.db').adminRepo;
|
let _adminRepo: typeof import('./db/index.db').adminRepo;
|
||||||
let logger: typeof import('./logger.server').logger;
|
let logger: typeof import('./logger.server').logger;
|
||||||
let sendPasswordResetEmail: typeof import('./emailService.server').sendPasswordResetEmail;
|
let sendPasswordResetEmail: typeof import('./emailService.server').sendPasswordResetEmail;
|
||||||
let DatabaseError: typeof import('./processingErrors').DatabaseError;
|
let DatabaseError: typeof import('./processingErrors').DatabaseError;
|
||||||
@@ -98,7 +102,7 @@ describe('AuthService', () => {
|
|||||||
jwt = (await import('jsonwebtoken')) as typeof jwt;
|
jwt = (await import('jsonwebtoken')) as typeof jwt;
|
||||||
const dbModule = await import('./db/index.db');
|
const dbModule = await import('./db/index.db');
|
||||||
userRepo = dbModule.userRepo;
|
userRepo = dbModule.userRepo;
|
||||||
adminRepo = dbModule.adminRepo;
|
_adminRepo = dbModule.adminRepo;
|
||||||
logger = (await import('./logger.server')).logger;
|
logger = (await import('./logger.server')).logger;
|
||||||
withTransaction = (await import('./db/index.db')).withTransaction;
|
withTransaction = (await import('./db/index.db')).withTransaction;
|
||||||
vi.mocked(withTransaction).mockImplementation(async (callback: any) => {
|
vi.mocked(withTransaction).mockImplementation(async (callback: any) => {
|
||||||
@@ -168,12 +172,18 @@ describe('AuthService', () => {
|
|||||||
authService.registerUser('test@example.com', 'password123', undefined, undefined, reqLog),
|
authService.registerUser('test@example.com', 'password123', undefined, undefined, reqLog),
|
||||||
).rejects.toThrow(DatabaseError);
|
).rejects.toThrow(DatabaseError);
|
||||||
|
|
||||||
expect(logger.error).toHaveBeenCalledWith({ error, email: 'test@example.com' }, `User registration failed with an unexpected error.`);
|
expect(logger.error).toHaveBeenCalledWith(
|
||||||
|
{ error, email: 'test@example.com' },
|
||||||
|
`User registration failed with an unexpected error.`,
|
||||||
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should throw ValidationError if password is weak', async () => {
|
it('should throw ValidationError if password is weak', async () => {
|
||||||
const { validatePasswordStrength } = await import('../utils/authUtils');
|
const { validatePasswordStrength } = await import('../utils/authUtils');
|
||||||
vi.mocked(validatePasswordStrength).mockReturnValue({ isValid: false, feedback: 'Password too weak' });
|
vi.mocked(validatePasswordStrength).mockReturnValue({
|
||||||
|
isValid: false,
|
||||||
|
feedback: 'Password too weak',
|
||||||
|
});
|
||||||
|
|
||||||
await expect(
|
await expect(
|
||||||
authService.registerUser('test@example.com', 'weak', 'Test User', undefined, reqLog),
|
authService.registerUser('test@example.com', 'weak', 'Test User', undefined, reqLog),
|
||||||
@@ -248,7 +258,9 @@ describe('AuthService', () => {
|
|||||||
vi.mocked(userRepo.saveRefreshToken).mockRejectedValue(error);
|
vi.mocked(userRepo.saveRefreshToken).mockRejectedValue(error);
|
||||||
|
|
||||||
// The service method now directly propagates the error from the repo.
|
// The service method now directly propagates the error from the repo.
|
||||||
await expect(authService.saveRefreshToken('user-123', 'token', reqLog)).rejects.toThrow(error);
|
await expect(authService.saveRefreshToken('user-123', 'token', reqLog)).rejects.toThrow(
|
||||||
|
error,
|
||||||
|
);
|
||||||
expect(logger.error).not.toHaveBeenCalled();
|
expect(logger.error).not.toHaveBeenCalled();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
@@ -305,7 +317,10 @@ describe('AuthService', () => {
|
|||||||
|
|
||||||
const result = await authService.resetPassword('test@example.com', reqLog);
|
const result = await authService.resetPassword('test@example.com', reqLog);
|
||||||
|
|
||||||
expect(logger.error).toHaveBeenCalledWith({ emailError }, `Email send failure during password reset for user`);
|
expect(logger.error).toHaveBeenCalledWith(
|
||||||
|
{ emailError },
|
||||||
|
`Email send failure during password reset for user`,
|
||||||
|
);
|
||||||
expect(result).toBe('mocked_random_id');
|
expect(result).toBe('mocked_random_id');
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -313,7 +328,9 @@ describe('AuthService', () => {
|
|||||||
const repoError = new RepositoryError('Repo error', 500);
|
const repoError = new RepositoryError('Repo error', 500);
|
||||||
vi.mocked(userRepo.findUserByEmail).mockRejectedValue(repoError);
|
vi.mocked(userRepo.findUserByEmail).mockRejectedValue(repoError);
|
||||||
|
|
||||||
await expect(authService.resetPassword('test@example.com', reqLog)).rejects.toThrow(repoError);
|
await expect(authService.resetPassword('test@example.com', reqLog)).rejects.toThrow(
|
||||||
|
repoError,
|
||||||
|
);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -336,7 +353,10 @@ describe('AuthService', () => {
|
|||||||
'new-hashed-password',
|
'new-hashed-password',
|
||||||
reqLog,
|
reqLog,
|
||||||
);
|
);
|
||||||
expect(transactionalUserRepoMocks.deleteResetToken).toHaveBeenCalledWith('hashed-token', reqLog);
|
expect(transactionalUserRepoMocks.deleteResetToken).toHaveBeenCalledWith(
|
||||||
|
'hashed-token',
|
||||||
|
reqLog,
|
||||||
|
);
|
||||||
expect(transactionalAdminRepoMocks.logActivity).toHaveBeenCalledWith(
|
expect(transactionalAdminRepoMocks.logActivity).toHaveBeenCalledWith(
|
||||||
expect.objectContaining({ action: 'password_reset' }),
|
expect.objectContaining({ action: 'password_reset' }),
|
||||||
reqLog,
|
reqLog,
|
||||||
@@ -351,9 +371,14 @@ describe('AuthService', () => {
|
|||||||
const dbError = new Error('Transaction failed');
|
const dbError = new Error('Transaction failed');
|
||||||
vi.mocked(withTransaction).mockRejectedValue(dbError);
|
vi.mocked(withTransaction).mockRejectedValue(dbError);
|
||||||
|
|
||||||
await expect(authService.updatePassword('valid-token', 'newPassword', reqLog)).rejects.toThrow(DatabaseError);
|
await expect(
|
||||||
|
authService.updatePassword('valid-token', 'newPassword', reqLog),
|
||||||
|
).rejects.toThrow(DatabaseError);
|
||||||
|
|
||||||
expect(logger.error).toHaveBeenCalledWith({ error: dbError }, `An unexpected error occurred during password update.`);
|
expect(logger.error).toHaveBeenCalledWith(
|
||||||
|
{ error: dbError },
|
||||||
|
`An unexpected error occurred during password update.`,
|
||||||
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return null if token is invalid or not found', async () => {
|
it('should return null if token is invalid or not found', async () => {
|
||||||
@@ -367,24 +392,34 @@ describe('AuthService', () => {
|
|||||||
|
|
||||||
it('should throw ValidationError if new password is weak', async () => {
|
it('should throw ValidationError if new password is weak', async () => {
|
||||||
const { validatePasswordStrength } = await import('../utils/authUtils');
|
const { validatePasswordStrength } = await import('../utils/authUtils');
|
||||||
vi.mocked(validatePasswordStrength).mockReturnValue({ isValid: false, feedback: 'Password too weak' });
|
vi.mocked(validatePasswordStrength).mockReturnValue({
|
||||||
|
isValid: false,
|
||||||
|
feedback: 'Password too weak',
|
||||||
|
});
|
||||||
|
|
||||||
await expect(
|
await expect(authService.updatePassword('token', 'weak', reqLog)).rejects.toThrow(
|
||||||
authService.updatePassword('token', 'weak', reqLog),
|
ValidationError,
|
||||||
).rejects.toThrow(ValidationError);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should re-throw RepositoryError from transaction', async () => {
|
it('should re-throw RepositoryError from transaction', async () => {
|
||||||
const repoError = new RepositoryError('Repo error', 500);
|
const repoError = new RepositoryError('Repo error', 500);
|
||||||
vi.mocked(withTransaction).mockRejectedValue(repoError);
|
vi.mocked(withTransaction).mockRejectedValue(repoError);
|
||||||
|
|
||||||
await expect(authService.updatePassword('token', 'newPass', reqLog)).rejects.toThrow(repoError);
|
await expect(authService.updatePassword('token', 'newPass', reqLog)).rejects.toThrow(
|
||||||
|
repoError,
|
||||||
|
);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('getUserByRefreshToken', () => {
|
describe('getUserByRefreshToken', () => {
|
||||||
it('should return user profile if token exists', async () => {
|
it('should return user profile if token exists', async () => {
|
||||||
vi.mocked(userRepo.findUserByRefreshToken).mockResolvedValue({ user_id: 'user-123', email: 'test@example.com', created_at: new Date().toISOString(), updated_at: new Date().toISOString() });
|
vi.mocked(userRepo.findUserByRefreshToken).mockResolvedValue({
|
||||||
|
user_id: 'user-123',
|
||||||
|
email: 'test@example.com',
|
||||||
|
created_at: new Date().toISOString(),
|
||||||
|
updated_at: new Date().toISOString(),
|
||||||
|
});
|
||||||
vi.mocked(userRepo.findUserProfileById).mockResolvedValue(mockUserProfile);
|
vi.mocked(userRepo.findUserProfileById).mockResolvedValue(mockUserProfile);
|
||||||
|
|
||||||
const result = await authService.getUserByRefreshToken('valid-token', reqLog);
|
const result = await authService.getUserByRefreshToken('valid-token', reqLog);
|
||||||
@@ -423,7 +458,9 @@ describe('AuthService', () => {
|
|||||||
const repoError = new RepositoryError('Some repo error', 500);
|
const repoError = new RepositoryError('Some repo error', 500);
|
||||||
vi.mocked(userRepo.findUserByRefreshToken).mockRejectedValue(repoError);
|
vi.mocked(userRepo.findUserByRefreshToken).mockRejectedValue(repoError);
|
||||||
|
|
||||||
await expect(authService.getUserByRefreshToken('any-token', reqLog)).rejects.toThrow(repoError);
|
await expect(authService.getUserByRefreshToken('any-token', reqLog)).rejects.toThrow(
|
||||||
|
repoError,
|
||||||
|
);
|
||||||
// The original error is re-thrown, so the generic wrapper log should not be called.
|
// The original error is re-thrown, so the generic wrapper log should not be called.
|
||||||
expect(logger.error).not.toHaveBeenCalledWith(
|
expect(logger.error).not.toHaveBeenCalledWith(
|
||||||
expect.any(Object),
|
expect.any(Object),
|
||||||
@@ -449,7 +486,12 @@ describe('AuthService', () => {
|
|||||||
|
|
||||||
describe('refreshAccessToken', () => {
|
describe('refreshAccessToken', () => {
|
||||||
it('should return new access token if user found', async () => {
|
it('should return new access token if user found', async () => {
|
||||||
vi.mocked(userRepo.findUserByRefreshToken).mockResolvedValue({ user_id: 'user-123', email: 'test@example.com', created_at: new Date().toISOString(), updated_at: new Date().toISOString() });
|
vi.mocked(userRepo.findUserByRefreshToken).mockResolvedValue({
|
||||||
|
user_id: 'user-123',
|
||||||
|
email: 'test@example.com',
|
||||||
|
created_at: new Date().toISOString(),
|
||||||
|
updated_at: new Date().toISOString(),
|
||||||
|
});
|
||||||
vi.mocked(userRepo.findUserProfileById).mockResolvedValue(mockUserProfile);
|
vi.mocked(userRepo.findUserProfileById).mockResolvedValue(mockUserProfile);
|
||||||
// FIX: The global mock for jsonwebtoken provides a `default` export.
|
// FIX: The global mock for jsonwebtoken provides a `default` export.
|
||||||
// The code under test (`authService`) uses `import jwt from 'jsonwebtoken'`, so it gets the default export.
|
// The code under test (`authService`) uses `import jwt from 'jsonwebtoken'`, so it gets the default export.
|
||||||
|
|||||||
@@ -2,7 +2,8 @@
|
|||||||
import * as bcrypt from 'bcrypt';
|
import * as bcrypt from 'bcrypt';
|
||||||
import jwt from 'jsonwebtoken';
|
import jwt from 'jsonwebtoken';
|
||||||
import crypto from 'crypto';
|
import crypto from 'crypto';
|
||||||
import { DatabaseError, FlyerProcessingError } from './processingErrors';
|
import { DatabaseError } from './processingErrors';
|
||||||
|
import type { Logger } from 'pino';
|
||||||
import { withTransaction, userRepo } from './db/index.db';
|
import { withTransaction, userRepo } from './db/index.db';
|
||||||
import { RepositoryError, ValidationError } from './db/errors.db';
|
import { RepositoryError, ValidationError } from './db/errors.db';
|
||||||
import { logger } from './logger.server';
|
import { logger } from './logger.server';
|
||||||
@@ -18,7 +19,7 @@ class AuthService {
|
|||||||
password: string,
|
password: string,
|
||||||
fullName: string | undefined,
|
fullName: string | undefined,
|
||||||
avatarUrl: string | undefined,
|
avatarUrl: string | undefined,
|
||||||
reqLog: any,
|
reqLog: Logger,
|
||||||
) {
|
) {
|
||||||
const strength = validatePasswordStrength(password);
|
const strength = validatePasswordStrength(password);
|
||||||
if (!strength.isValid) {
|
if (!strength.isValid) {
|
||||||
@@ -42,10 +43,17 @@ class AuthService {
|
|||||||
reqLog,
|
reqLog,
|
||||||
);
|
);
|
||||||
|
|
||||||
logger.info(`Successfully created new user in DB: ${newUser.user.email} (ID: ${newUser.user.user_id})`);
|
logger.info(
|
||||||
|
`Successfully created new user in DB: ${newUser.user.email} (ID: ${newUser.user.user_id})`,
|
||||||
|
);
|
||||||
|
|
||||||
await adminRepo.logActivity(
|
await adminRepo.logActivity(
|
||||||
{ userId: newUser.user.user_id, action: 'user_registered', displayText: `${email} has registered.`, icon: 'user-plus' },
|
{
|
||||||
|
userId: newUser.user.user_id,
|
||||||
|
action: 'user_registered',
|
||||||
|
displayText: `${email} has registered.`,
|
||||||
|
icon: 'user-plus',
|
||||||
|
},
|
||||||
reqLog,
|
reqLog,
|
||||||
);
|
);
|
||||||
|
|
||||||
@@ -57,7 +65,8 @@ class AuthService {
|
|||||||
}
|
}
|
||||||
// For unknown errors, log them and wrap them in a generic DatabaseError
|
// For unknown errors, log them and wrap them in a generic DatabaseError
|
||||||
// to standardize the error contract of the service layer.
|
// to standardize the error contract of the service layer.
|
||||||
const message = error instanceof Error ? error.message : 'An unknown error occurred during registration.';
|
const message =
|
||||||
|
error instanceof Error ? error.message : 'An unknown error occurred during registration.';
|
||||||
logger.error({ error, email }, `User registration failed with an unexpected error.`);
|
logger.error({ error, email }, `User registration failed with an unexpected error.`);
|
||||||
throw new DatabaseError(message);
|
throw new DatabaseError(message);
|
||||||
});
|
});
|
||||||
@@ -68,15 +77,9 @@ class AuthService {
|
|||||||
password: string,
|
password: string,
|
||||||
fullName: string | undefined,
|
fullName: string | undefined,
|
||||||
avatarUrl: string | undefined,
|
avatarUrl: string | undefined,
|
||||||
reqLog: any,
|
reqLog: Logger,
|
||||||
): Promise<{ newUserProfile: UserProfile; accessToken: string; refreshToken: string }> {
|
): Promise<{ newUserProfile: UserProfile; accessToken: string; refreshToken: string }> {
|
||||||
const newUserProfile = await this.registerUser(
|
const newUserProfile = await this.registerUser(email, password, fullName, avatarUrl, reqLog);
|
||||||
email,
|
|
||||||
password,
|
|
||||||
fullName,
|
|
||||||
avatarUrl,
|
|
||||||
reqLog,
|
|
||||||
);
|
|
||||||
const { accessToken, refreshToken } = await this.handleSuccessfulLogin(newUserProfile, reqLog);
|
const { accessToken, refreshToken } = await this.handleSuccessfulLogin(newUserProfile, reqLog);
|
||||||
return { newUserProfile, accessToken, refreshToken };
|
return { newUserProfile, accessToken, refreshToken };
|
||||||
}
|
}
|
||||||
@@ -93,19 +96,19 @@ class AuthService {
|
|||||||
return { accessToken, refreshToken };
|
return { accessToken, refreshToken };
|
||||||
}
|
}
|
||||||
|
|
||||||
async saveRefreshToken(userId: string, refreshToken: string, reqLog: any) {
|
async saveRefreshToken(userId: string, refreshToken: string, reqLog: Logger) {
|
||||||
// The repository method `saveRefreshToken` already includes robust error handling
|
// The repository method `saveRefreshToken` already includes robust error handling
|
||||||
// and logging via `handleDbError`. No need for a redundant try/catch block here.
|
// and logging via `handleDbError`. No need for a redundant try/catch block here.
|
||||||
await userRepo.saveRefreshToken(userId, refreshToken, reqLog);
|
await userRepo.saveRefreshToken(userId, refreshToken, reqLog);
|
||||||
}
|
}
|
||||||
|
|
||||||
async handleSuccessfulLogin(userProfile: UserProfile, reqLog: any) {
|
async handleSuccessfulLogin(userProfile: UserProfile, reqLog: Logger) {
|
||||||
const { accessToken, refreshToken } = this.generateAuthTokens(userProfile);
|
const { accessToken, refreshToken } = this.generateAuthTokens(userProfile);
|
||||||
await this.saveRefreshToken(userProfile.user.user_id, refreshToken, reqLog);
|
await this.saveRefreshToken(userProfile.user.user_id, refreshToken, reqLog);
|
||||||
return { accessToken, refreshToken };
|
return { accessToken, refreshToken };
|
||||||
}
|
}
|
||||||
|
|
||||||
async resetPassword(email: string, reqLog: any) {
|
async resetPassword(email: string, reqLog: Logger) {
|
||||||
try {
|
try {
|
||||||
logger.debug(`[API /forgot-password] Received request for email: ${email}`);
|
logger.debug(`[API /forgot-password] Received request for email: ${email}`);
|
||||||
const user = await userRepo.findUserByEmail(email, reqLog);
|
const user = await userRepo.findUserByEmail(email, reqLog);
|
||||||
@@ -124,7 +127,13 @@ class AuthService {
|
|||||||
// Wrap the token creation in a transaction to ensure atomicity of the DELETE and INSERT operations.
|
// Wrap the token creation in a transaction to ensure atomicity of the DELETE and INSERT operations.
|
||||||
await withTransaction(async (client) => {
|
await withTransaction(async (client) => {
|
||||||
const transactionalUserRepo = new (await import('./db/user.db')).UserRepository(client);
|
const transactionalUserRepo = new (await import('./db/user.db')).UserRepository(client);
|
||||||
await transactionalUserRepo.createPasswordResetToken(user.user_id, tokenHash, expiresAt, reqLog, client);
|
await transactionalUserRepo.createPasswordResetToken(
|
||||||
|
user.user_id,
|
||||||
|
tokenHash,
|
||||||
|
expiresAt,
|
||||||
|
reqLog,
|
||||||
|
client,
|
||||||
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
const resetLink = `${process.env.FRONTEND_URL}/reset-password/${token}`;
|
const resetLink = `${process.env.FRONTEND_URL}/reset-password/${token}`;
|
||||||
@@ -146,12 +155,15 @@ class AuthService {
|
|||||||
}
|
}
|
||||||
// For unknown errors, log them and wrap them in a generic DatabaseError.
|
// For unknown errors, log them and wrap them in a generic DatabaseError.
|
||||||
const message = error instanceof Error ? error.message : 'An unknown error occurred.';
|
const message = error instanceof Error ? error.message : 'An unknown error occurred.';
|
||||||
logger.error({ error, email }, `An unexpected error occurred during password reset for email: ${email}`);
|
logger.error(
|
||||||
|
{ error, email },
|
||||||
|
`An unexpected error occurred during password reset for email: ${email}`,
|
||||||
|
);
|
||||||
throw new DatabaseError(message);
|
throw new DatabaseError(message);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async updatePassword(token: string, newPassword: string, reqLog: any) {
|
async updatePassword(token: string, newPassword: string, reqLog: Logger) {
|
||||||
const strength = validatePasswordStrength(newPassword);
|
const strength = validatePasswordStrength(newPassword);
|
||||||
if (!strength.isValid) {
|
if (!strength.isValid) {
|
||||||
throw new ValidationError([], strength.feedback);
|
throw new ValidationError([], strength.feedback);
|
||||||
@@ -184,7 +196,12 @@ class AuthService {
|
|||||||
await transactionalUserRepo.updateUserPassword(tokenRecord.user_id, hashedPassword, reqLog);
|
await transactionalUserRepo.updateUserPassword(tokenRecord.user_id, hashedPassword, reqLog);
|
||||||
await transactionalUserRepo.deleteResetToken(tokenRecord.token_hash, reqLog);
|
await transactionalUserRepo.deleteResetToken(tokenRecord.token_hash, reqLog);
|
||||||
await adminRepo.logActivity(
|
await adminRepo.logActivity(
|
||||||
{ userId: tokenRecord.user_id, action: 'password_reset', displayText: `User ID ${tokenRecord.user_id} has reset their password.`, icon: 'key' },
|
{
|
||||||
|
userId: tokenRecord.user_id,
|
||||||
|
action: 'password_reset',
|
||||||
|
displayText: `User ID ${tokenRecord.user_id} has reset their password.`,
|
||||||
|
icon: 'key',
|
||||||
|
},
|
||||||
reqLog,
|
reqLog,
|
||||||
);
|
);
|
||||||
|
|
||||||
@@ -201,7 +218,7 @@ class AuthService {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
async getUserByRefreshToken(refreshToken: string, reqLog: any) {
|
async getUserByRefreshToken(refreshToken: string, reqLog: Logger) {
|
||||||
try {
|
try {
|
||||||
const basicUser = await userRepo.findUserByRefreshToken(refreshToken, reqLog);
|
const basicUser = await userRepo.findUserByRefreshToken(refreshToken, reqLog);
|
||||||
if (!basicUser) {
|
if (!basicUser) {
|
||||||
@@ -216,19 +233,25 @@ class AuthService {
|
|||||||
}
|
}
|
||||||
// For unknown errors, log them and wrap them in a generic DatabaseError.
|
// For unknown errors, log them and wrap them in a generic DatabaseError.
|
||||||
const errorMessage = error instanceof Error ? error.message : 'An unknown error occurred.';
|
const errorMessage = error instanceof Error ? error.message : 'An unknown error occurred.';
|
||||||
logger.error({ error, refreshToken }, 'An unexpected error occurred while fetching user by refresh token.');
|
logger.error(
|
||||||
|
{ error, refreshToken },
|
||||||
|
'An unexpected error occurred while fetching user by refresh token.',
|
||||||
|
);
|
||||||
throw new DatabaseError(errorMessage);
|
throw new DatabaseError(errorMessage);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async logout(refreshToken: string, reqLog: any) {
|
async logout(refreshToken: string, reqLog: Logger) {
|
||||||
// The repository method `deleteRefreshToken` now includes robust error handling
|
// The repository method `deleteRefreshToken` now includes robust error handling
|
||||||
// and logging via `handleDbError`. No need for a redundant try/catch block here.
|
// and logging via `handleDbError`. No need for a redundant try/catch block here.
|
||||||
// The original implementation also swallowed errors, which is now fixed.
|
// The original implementation also swallowed errors, which is now fixed.
|
||||||
await userRepo.deleteRefreshToken(refreshToken, reqLog);
|
await userRepo.deleteRefreshToken(refreshToken, reqLog);
|
||||||
}
|
}
|
||||||
|
|
||||||
async refreshAccessToken(refreshToken: string, reqLog: any): Promise<{ accessToken: string } | null> {
|
async refreshAccessToken(
|
||||||
|
refreshToken: string,
|
||||||
|
reqLog: Logger,
|
||||||
|
): Promise<{ accessToken: string } | null> {
|
||||||
const user = await this.getUserByRefreshToken(refreshToken, reqLog);
|
const user = await this.getUserByRefreshToken(refreshToken, reqLog);
|
||||||
if (!user) {
|
if (!user) {
|
||||||
return null;
|
return null;
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
import type { Pool, PoolClient } from 'pg';
|
import type { Pool, PoolClient } from 'pg';
|
||||||
import { getPool } from './connection.db';
|
import { getPool } from './connection.db';
|
||||||
import type { Logger } from 'pino';
|
import type { Logger } from 'pino';
|
||||||
import { UniqueConstraintError, NotFoundError, handleDbError } from './errors.db';
|
import { NotFoundError, handleDbError } from './errors.db';
|
||||||
import { Address } from '../../types';
|
import { Address } from '../../types';
|
||||||
|
|
||||||
export class AddressRepository {
|
export class AddressRepository {
|
||||||
@@ -30,9 +30,15 @@ export class AddressRepository {
|
|||||||
}
|
}
|
||||||
return res.rows[0];
|
return res.rows[0];
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
handleDbError(error, logger, 'Database error in getAddressById', { addressId }, {
|
handleDbError(
|
||||||
|
error,
|
||||||
|
logger,
|
||||||
|
'Database error in getAddressById',
|
||||||
|
{ addressId },
|
||||||
|
{
|
||||||
defaultMessage: 'Failed to retrieve address.',
|
defaultMessage: 'Failed to retrieve address.',
|
||||||
});
|
},
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -76,10 +82,16 @@ export class AddressRepository {
|
|||||||
const res = await this.db.query<{ address_id: number }>(query, values);
|
const res = await this.db.query<{ address_id: number }>(query, values);
|
||||||
return res.rows[0].address_id;
|
return res.rows[0].address_id;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
handleDbError(error, logger, 'Database error in upsertAddress', { address }, {
|
handleDbError(
|
||||||
|
error,
|
||||||
|
logger,
|
||||||
|
'Database error in upsertAddress',
|
||||||
|
{ address },
|
||||||
|
{
|
||||||
uniqueMessage: 'An identical address already exists.',
|
uniqueMessage: 'An identical address already exists.',
|
||||||
defaultMessage: 'Failed to upsert address.',
|
defaultMessage: 'Failed to upsert address.',
|
||||||
});
|
},
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
// src/services/db/admin.db.test.ts
|
// src/services/db/admin.db.test.ts
|
||||||
import { describe, it, expect, vi, beforeEach, Mock } from 'vitest';
|
import { describe, it, expect, vi, beforeEach, Mock } from 'vitest';
|
||||||
import type { Pool, PoolClient } from 'pg';
|
import type { PoolClient } from 'pg';
|
||||||
import { ForeignKeyConstraintError, NotFoundError } from './errors.db';
|
import { ForeignKeyConstraintError, NotFoundError } from './errors.db';
|
||||||
import { AdminRepository } from './admin.db';
|
import { AdminRepository } from './admin.db';
|
||||||
import type { SuggestedCorrection, AdminUserView, Profile, Flyer } from '../../types';
|
import type { SuggestedCorrection, AdminUserView, Profile, Flyer } from '../../types';
|
||||||
@@ -84,10 +84,7 @@ describe('Admin DB Service', () => {
|
|||||||
mockDb.query.mockResolvedValue({ rows: [] }); // Mock the function call
|
mockDb.query.mockResolvedValue({ rows: [] }); // Mock the function call
|
||||||
await adminRepo.approveCorrection(123, mockLogger);
|
await adminRepo.approveCorrection(123, mockLogger);
|
||||||
|
|
||||||
expect(mockDb.query).toHaveBeenCalledWith(
|
expect(mockDb.query).toHaveBeenCalledWith('SELECT public.approve_correction($1)', [123]);
|
||||||
'SELECT public.approve_correction($1)',
|
|
||||||
[123],
|
|
||||||
);
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should throw an error if the database function fails', async () => {
|
it('should throw an error if the database function fails', async () => {
|
||||||
@@ -223,9 +220,7 @@ describe('Admin DB Service', () => {
|
|||||||
|
|
||||||
const result = await adminRepo.getDailyStatsForLast30Days(mockLogger);
|
const result = await adminRepo.getDailyStatsForLast30Days(mockLogger);
|
||||||
|
|
||||||
expect(mockDb.query).toHaveBeenCalledWith(
|
expect(mockDb.query).toHaveBeenCalledWith(expect.stringContaining('WITH date_series AS'));
|
||||||
expect.stringContaining('WITH date_series AS'),
|
|
||||||
);
|
|
||||||
expect(result).toEqual(mockStats);
|
expect(result).toEqual(mockStats);
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -347,10 +342,10 @@ describe('Admin DB Service', () => {
|
|||||||
const mockRecipe = { recipe_id: 1, status: 'public' };
|
const mockRecipe = { recipe_id: 1, status: 'public' };
|
||||||
mockDb.query.mockResolvedValue({ rows: [mockRecipe], rowCount: 1 });
|
mockDb.query.mockResolvedValue({ rows: [mockRecipe], rowCount: 1 });
|
||||||
const result = await adminRepo.updateRecipeStatus(1, 'public', mockLogger);
|
const result = await adminRepo.updateRecipeStatus(1, 'public', mockLogger);
|
||||||
expect(mockDb.query).toHaveBeenCalledWith(
|
expect(mockDb.query).toHaveBeenCalledWith(expect.stringContaining('UPDATE public.recipes'), [
|
||||||
expect.stringContaining('UPDATE public.recipes'),
|
'public',
|
||||||
['public', 1],
|
1,
|
||||||
);
|
]);
|
||||||
expect(result).toEqual(mockRecipe);
|
expect(result).toEqual(mockRecipe);
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -592,10 +587,10 @@ describe('Admin DB Service', () => {
|
|||||||
const mockReceipt = { receipt_id: 1, status: 'completed' };
|
const mockReceipt = { receipt_id: 1, status: 'completed' };
|
||||||
mockDb.query.mockResolvedValue({ rows: [mockReceipt], rowCount: 1 });
|
mockDb.query.mockResolvedValue({ rows: [mockReceipt], rowCount: 1 });
|
||||||
const result = await adminRepo.updateReceiptStatus(1, 'completed', mockLogger);
|
const result = await adminRepo.updateReceiptStatus(1, 'completed', mockLogger);
|
||||||
expect(mockDb.query).toHaveBeenCalledWith(
|
expect(mockDb.query).toHaveBeenCalledWith(expect.stringContaining('UPDATE public.receipts'), [
|
||||||
expect.stringContaining('UPDATE public.receipts'),
|
'completed',
|
||||||
['completed', 1],
|
1,
|
||||||
);
|
]);
|
||||||
expect(result).toEqual(mockReceipt);
|
expect(result).toEqual(mockReceipt);
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -748,7 +743,10 @@ describe('Admin DB Service', () => {
|
|||||||
await expect(adminRepo.getFlyersForReview(mockLogger)).rejects.toThrow(
|
await expect(adminRepo.getFlyersForReview(mockLogger)).rejects.toThrow(
|
||||||
'Failed to retrieve flyers for review.',
|
'Failed to retrieve flyers for review.',
|
||||||
);
|
);
|
||||||
expect(mockLogger.error).toHaveBeenCalledWith({ err: dbError }, 'Database error in getFlyersForReview');
|
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||||
|
{ err: dbError },
|
||||||
|
'Database error in getFlyersForReview',
|
||||||
|
);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
// src/services/db/admin.db.ts
|
// src/services/db/admin.db.ts
|
||||||
import type { Pool, PoolClient } from 'pg';
|
import type { Pool, PoolClient } from 'pg';
|
||||||
import { getPool, withTransaction } from './connection.db';
|
import { getPool, withTransaction } from './connection.db';
|
||||||
import { ForeignKeyConstraintError, NotFoundError, CheckConstraintError, handleDbError } from './errors.db';
|
import { NotFoundError, handleDbError } from './errors.db';
|
||||||
import type { Logger } from 'pino';
|
import type { Logger } from 'pino';
|
||||||
import {
|
import {
|
||||||
SuggestedCorrection,
|
SuggestedCorrection,
|
||||||
@@ -262,9 +262,15 @@ export class AdminRepository {
|
|||||||
const res = await this.db.query<MostFrequentSaleItem>(query, [days, limit]);
|
const res = await this.db.query<MostFrequentSaleItem>(query, [days, limit]);
|
||||||
return res.rows;
|
return res.rows;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
handleDbError(error, logger, 'Database error in getMostFrequentSaleItems', { days, limit }, {
|
handleDbError(
|
||||||
|
error,
|
||||||
|
logger,
|
||||||
|
'Database error in getMostFrequentSaleItems',
|
||||||
|
{ days, limit },
|
||||||
|
{
|
||||||
defaultMessage: 'Failed to get most frequent sale items.',
|
defaultMessage: 'Failed to get most frequent sale items.',
|
||||||
});
|
},
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -292,10 +298,16 @@ export class AdminRepository {
|
|||||||
if (error instanceof NotFoundError) {
|
if (error instanceof NotFoundError) {
|
||||||
throw error;
|
throw error;
|
||||||
}
|
}
|
||||||
handleDbError(error, logger, 'Database error in updateRecipeCommentStatus', { commentId, status }, {
|
handleDbError(
|
||||||
|
error,
|
||||||
|
logger,
|
||||||
|
'Database error in updateRecipeCommentStatus',
|
||||||
|
{ commentId, status },
|
||||||
|
{
|
||||||
checkMessage: 'Invalid status provided for recipe comment.',
|
checkMessage: 'Invalid status provided for recipe comment.',
|
||||||
defaultMessage: 'Failed to update recipe comment status.',
|
defaultMessage: 'Failed to update recipe comment status.',
|
||||||
});
|
},
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -326,9 +338,15 @@ export class AdminRepository {
|
|||||||
const res = await this.db.query<UnmatchedFlyerItem>(query);
|
const res = await this.db.query<UnmatchedFlyerItem>(query);
|
||||||
return res.rows;
|
return res.rows;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
handleDbError(error, logger, 'Database error in getUnmatchedFlyerItems', {}, {
|
handleDbError(
|
||||||
|
error,
|
||||||
|
logger,
|
||||||
|
'Database error in getUnmatchedFlyerItems',
|
||||||
|
{},
|
||||||
|
{
|
||||||
defaultMessage: 'Failed to retrieve unmatched flyer items.',
|
defaultMessage: 'Failed to retrieve unmatched flyer items.',
|
||||||
});
|
},
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -354,10 +372,16 @@ export class AdminRepository {
|
|||||||
if (error instanceof NotFoundError) {
|
if (error instanceof NotFoundError) {
|
||||||
throw error;
|
throw error;
|
||||||
}
|
}
|
||||||
handleDbError(error, logger, 'Database error in updateRecipeStatus', { recipeId, status }, {
|
handleDbError(
|
||||||
|
error,
|
||||||
|
logger,
|
||||||
|
'Database error in updateRecipeStatus',
|
||||||
|
{ recipeId, status },
|
||||||
|
{
|
||||||
checkMessage: 'Invalid status provided for recipe.',
|
checkMessage: 'Invalid status provided for recipe.',
|
||||||
defaultMessage: 'Failed to update recipe status.',
|
defaultMessage: 'Failed to update recipe status.',
|
||||||
});
|
},
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -414,7 +438,10 @@ export class AdminRepository {
|
|||||||
logger,
|
logger,
|
||||||
'Database transaction error in resolveUnmatchedFlyerItem',
|
'Database transaction error in resolveUnmatchedFlyerItem',
|
||||||
{ unmatchedFlyerItemId, masterItemId },
|
{ unmatchedFlyerItemId, masterItemId },
|
||||||
{ fkMessage: 'The specified master item ID does not exist.', defaultMessage: 'Failed to resolve unmatched flyer item.' },
|
{
|
||||||
|
fkMessage: 'The specified master item ID does not exist.',
|
||||||
|
defaultMessage: 'Failed to resolve unmatched flyer item.',
|
||||||
|
},
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -587,10 +614,16 @@ export class AdminRepository {
|
|||||||
return res.rows[0];
|
return res.rows[0];
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
if (error instanceof NotFoundError) throw error;
|
if (error instanceof NotFoundError) throw error;
|
||||||
handleDbError(error, logger, 'Database error in updateReceiptStatus', { receiptId, status }, {
|
handleDbError(
|
||||||
|
error,
|
||||||
|
logger,
|
||||||
|
'Database error in updateReceiptStatus',
|
||||||
|
{ receiptId, status },
|
||||||
|
{
|
||||||
checkMessage: 'Invalid status provided for receipt.',
|
checkMessage: 'Invalid status provided for receipt.',
|
||||||
defaultMessage: 'Failed to update receipt status.',
|
defaultMessage: 'Failed to update receipt status.',
|
||||||
});
|
},
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -603,9 +636,15 @@ export class AdminRepository {
|
|||||||
const res = await this.db.query<AdminUserView>(query);
|
const res = await this.db.query<AdminUserView>(query);
|
||||||
return res.rows;
|
return res.rows;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
handleDbError(error, logger, 'Database error in getAllUsers', {}, {
|
handleDbError(
|
||||||
|
error,
|
||||||
|
logger,
|
||||||
|
'Database error in getAllUsers',
|
||||||
|
{},
|
||||||
|
{
|
||||||
defaultMessage: 'Failed to retrieve all users.',
|
defaultMessage: 'Failed to retrieve all users.',
|
||||||
});
|
},
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -629,11 +668,17 @@ export class AdminRepository {
|
|||||||
if (error instanceof NotFoundError) {
|
if (error instanceof NotFoundError) {
|
||||||
throw error;
|
throw error;
|
||||||
}
|
}
|
||||||
handleDbError(error, logger, 'Database error in updateUserRole', { userId, role }, {
|
handleDbError(
|
||||||
|
error,
|
||||||
|
logger,
|
||||||
|
'Database error in updateUserRole',
|
||||||
|
{ userId, role },
|
||||||
|
{
|
||||||
fkMessage: 'The specified user does not exist.',
|
fkMessage: 'The specified user does not exist.',
|
||||||
checkMessage: 'Invalid role provided for user.',
|
checkMessage: 'Invalid role provided for user.',
|
||||||
defaultMessage: 'Failed to update user role.',
|
defaultMessage: 'Failed to update user role.',
|
||||||
});
|
},
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -660,9 +705,15 @@ export class AdminRepository {
|
|||||||
const res = await this.db.query<Flyer>(query);
|
const res = await this.db.query<Flyer>(query);
|
||||||
return res.rows;
|
return res.rows;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
handleDbError(error, logger, 'Database error in getFlyersForReview', {}, {
|
handleDbError(
|
||||||
|
error,
|
||||||
|
logger,
|
||||||
|
'Database error in getFlyersForReview',
|
||||||
|
{},
|
||||||
|
{
|
||||||
defaultMessage: 'Failed to retrieve flyers for review.',
|
defaultMessage: 'Failed to retrieve flyers for review.',
|
||||||
});
|
},
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ import { ForeignKeyConstraintError } from './errors.db';
|
|||||||
vi.unmock('./budget.db');
|
vi.unmock('./budget.db');
|
||||||
|
|
||||||
import { BudgetRepository } from './budget.db';
|
import { BudgetRepository } from './budget.db';
|
||||||
import type { Pool, PoolClient } from 'pg';
|
import type { PoolClient } from 'pg';
|
||||||
import type { Budget, SpendingByCategory } from '../../types';
|
import type { Budget, SpendingByCategory } from '../../types';
|
||||||
|
|
||||||
// Mock the logger to prevent console output during tests
|
// Mock the logger to prevent console output during tests
|
||||||
@@ -260,7 +260,6 @@ describe('Budget DB Service', () => {
|
|||||||
).rejects.toThrow('Budget not found or user does not have permission to update.');
|
).rejects.toThrow('Budget not found or user does not have permission to update.');
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
||||||
it('should throw an error if no rows are updated', async () => {
|
it('should throw an error if no rows are updated', async () => {
|
||||||
// Arrange: Mock the query to return 0 rows affected
|
// Arrange: Mock the query to return 0 rows affected
|
||||||
mockDb.query.mockResolvedValue({ rows: [], rowCount: 0 });
|
mockDb.query.mockResolvedValue({ rows: [], rowCount: 0 });
|
||||||
|
|||||||
@@ -56,11 +56,11 @@ describe('DB Connection Service', () => {
|
|||||||
// Reset specific method behaviors
|
// Reset specific method behaviors
|
||||||
mocks.mockPoolInstance.query.mockReset();
|
mocks.mockPoolInstance.query.mockReset();
|
||||||
|
|
||||||
// Mock pool.on to capture the error handler
|
// Mock pool.on to capture the error handler (kept for potential future use in error handling tests)
|
||||||
let capturedErrorHandler: ((err: Error, client: PoolClient) => void) | undefined;
|
let _capturedErrorHandler: ((err: Error, client: PoolClient) => void) | undefined;
|
||||||
vi.mocked(mocks.mockPoolInstance.on).mockImplementation((event, handler) => {
|
vi.mocked(mocks.mockPoolInstance.on).mockImplementation((event, handler) => {
|
||||||
if (event === 'error') {
|
if (event === 'error') {
|
||||||
capturedErrorHandler = handler as (err: Error, client: PoolClient) => void;
|
_capturedErrorHandler = handler as (err: Error, client: PoolClient) => void;
|
||||||
}
|
}
|
||||||
return mocks.mockPoolInstance; // Return the mock instance for chaining
|
return mocks.mockPoolInstance; // Return the mock instance for chaining
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -15,7 +15,7 @@ export const conversionRepo = {
|
|||||||
const { masterItemId } = filters;
|
const { masterItemId } = filters;
|
||||||
try {
|
try {
|
||||||
let query = 'SELECT * FROM public.unit_conversions';
|
let query = 'SELECT * FROM public.unit_conversions';
|
||||||
const params: any[] = [];
|
const params: (string | number)[] = [];
|
||||||
|
|
||||||
if (masterItemId) {
|
if (masterItemId) {
|
||||||
query += ' WHERE master_item_id = $1';
|
query += ' WHERE master_item_id = $1';
|
||||||
@@ -27,9 +27,15 @@ export const conversionRepo = {
|
|||||||
const result = await getPool().query<UnitConversion>(query, params);
|
const result = await getPool().query<UnitConversion>(query, params);
|
||||||
return result.rows;
|
return result.rows;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
handleDbError(error, logger, 'Database error in getConversions', { filters }, {
|
handleDbError(
|
||||||
|
error,
|
||||||
|
logger,
|
||||||
|
'Database error in getConversions',
|
||||||
|
{ filters },
|
||||||
|
{
|
||||||
defaultMessage: 'Failed to retrieve unit conversions.',
|
defaultMessage: 'Failed to retrieve unit conversions.',
|
||||||
});
|
},
|
||||||
|
);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
||||||
@@ -48,12 +54,19 @@ export const conversionRepo = {
|
|||||||
);
|
);
|
||||||
return res.rows[0];
|
return res.rows[0];
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
handleDbError(error, logger, 'Database error in createConversion', { conversionData }, {
|
handleDbError(
|
||||||
|
error,
|
||||||
|
logger,
|
||||||
|
'Database error in createConversion',
|
||||||
|
{ conversionData },
|
||||||
|
{
|
||||||
fkMessage: 'The specified master item does not exist.',
|
fkMessage: 'The specified master item does not exist.',
|
||||||
uniqueMessage: 'This conversion rule already exists for this item.',
|
uniqueMessage: 'This conversion rule already exists for this item.',
|
||||||
checkMessage: 'Invalid unit conversion data provided (e.g., factor must be > 0, units cannot be the same).',
|
checkMessage:
|
||||||
|
'Invalid unit conversion data provided (e.g., factor must be > 0, units cannot be the same).',
|
||||||
defaultMessage: 'Failed to create unit conversion.',
|
defaultMessage: 'Failed to create unit conversion.',
|
||||||
});
|
},
|
||||||
|
);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
||||||
@@ -70,9 +83,15 @@ export const conversionRepo = {
|
|||||||
throw new NotFoundError(`Unit conversion with ID ${conversionId} not found.`);
|
throw new NotFoundError(`Unit conversion with ID ${conversionId} not found.`);
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
handleDbError(error, logger, 'Database error in deleteConversion', { conversionId }, {
|
handleDbError(
|
||||||
|
error,
|
||||||
|
logger,
|
||||||
|
'Database error in deleteConversion',
|
||||||
|
{ conversionId },
|
||||||
|
{
|
||||||
defaultMessage: 'Failed to delete unit conversion.',
|
defaultMessage: 'Failed to delete unit conversion.',
|
||||||
});
|
},
|
||||||
|
);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
@@ -1,5 +1,5 @@
|
|||||||
// src/services/db/flyer.db.test.ts
|
// src/services/db/flyer.db.test.ts
|
||||||
import { describe, it, expect, vi, beforeEach, Mock } from 'vitest';
|
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||||
import { mockPoolInstance } from '../../tests/setup/tests-setup-unit';
|
import { mockPoolInstance } from '../../tests/setup/tests-setup-unit';
|
||||||
import type { Pool, PoolClient } from 'pg';
|
import type { Pool, PoolClient } from 'pg';
|
||||||
import {
|
import {
|
||||||
@@ -408,7 +408,7 @@ describe('Flyer DB Service', () => {
|
|||||||
expect(queryValues).toEqual([
|
expect(queryValues).toEqual([
|
||||||
1, // flyerId for item 1
|
1, // flyerId for item 1
|
||||||
'Free Item',
|
'Free Item',
|
||||||
"N/A", // Sanitized price_display for item 1
|
'N/A', // Sanitized price_display for item 1
|
||||||
0,
|
0,
|
||||||
'1',
|
'1',
|
||||||
'Promo',
|
'Promo',
|
||||||
@@ -416,7 +416,7 @@ describe('Flyer DB Service', () => {
|
|||||||
0,
|
0,
|
||||||
1, // flyerId for item 2
|
1, // flyerId for item 2
|
||||||
'Whitespace Item',
|
'Whitespace Item',
|
||||||
"N/A", // Sanitized price_display for item 2
|
'N/A', // Sanitized price_display for item 2
|
||||||
null,
|
null,
|
||||||
'1',
|
'1',
|
||||||
'Promo',
|
'Promo',
|
||||||
@@ -428,7 +428,8 @@ describe('Flyer DB Service', () => {
|
|||||||
|
|
||||||
describe('createFlyerAndItems', () => {
|
describe('createFlyerAndItems', () => {
|
||||||
it('should execute find/create store, insert flyer, and insert items using the provided client', async () => {
|
it('should execute find/create store, insert flyer, and insert items using the provided client', async () => {
|
||||||
const flyerData: FlyerInsert = { // This was a duplicate, fixed.
|
const flyerData: FlyerInsert = {
|
||||||
|
// This was a duplicate, fixed.
|
||||||
file_name: 'transact.jpg',
|
file_name: 'transact.jpg',
|
||||||
store_name: 'Transaction Store',
|
store_name: 'Transaction Store',
|
||||||
} as FlyerInsert;
|
} as FlyerInsert;
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
import type { Pool, PoolClient } from 'pg';
|
import type { Pool, PoolClient } from 'pg';
|
||||||
import { getPool, withTransaction } from './connection.db';
|
import { getPool, withTransaction } from './connection.db';
|
||||||
import type { Logger } from 'pino';
|
import type { Logger } from 'pino';
|
||||||
import { UniqueConstraintError, NotFoundError, handleDbError } from './errors.db';
|
import { NotFoundError, handleDbError } from './errors.db';
|
||||||
import { cacheService, CACHE_TTL, CACHE_PREFIX } from '../cacheService.server';
|
import { cacheService, CACHE_TTL, CACHE_PREFIX } from '../cacheService.server';
|
||||||
import type {
|
import type {
|
||||||
Flyer,
|
Flyer,
|
||||||
@@ -51,10 +51,16 @@ export class FlyerRepository {
|
|||||||
return result.rows[0].store_id;
|
return result.rows[0].store_id;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
// Use the centralized error handler for any unexpected database errors.
|
// Use the centralized error handler for any unexpected database errors.
|
||||||
handleDbError(error, logger, 'Database error in findOrCreateStore', { storeName }, {
|
handleDbError(
|
||||||
|
error,
|
||||||
|
logger,
|
||||||
|
'Database error in findOrCreateStore',
|
||||||
|
{ storeName },
|
||||||
|
{
|
||||||
// Any error caught here is unexpected, so we use a generic message.
|
// Any error caught here is unexpected, so we use a generic message.
|
||||||
defaultMessage: 'Failed to find or create store in database.',
|
defaultMessage: 'Failed to find or create store in database.',
|
||||||
});
|
},
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -64,9 +70,13 @@ export class FlyerRepository {
|
|||||||
* @returns The newly created flyer record with its ID.
|
* @returns The newly created flyer record with its ID.
|
||||||
*/
|
*/
|
||||||
async insertFlyer(flyerData: FlyerDbInsert, logger: Logger): Promise<Flyer> {
|
async insertFlyer(flyerData: FlyerDbInsert, logger: Logger): Promise<Flyer> {
|
||||||
console.error('[DB DEBUG] FlyerRepository.insertFlyer called with:', JSON.stringify(flyerData, null, 2));
|
console.error(
|
||||||
|
'[DB DEBUG] FlyerRepository.insertFlyer called with:',
|
||||||
|
JSON.stringify(flyerData, null, 2),
|
||||||
|
);
|
||||||
// Sanitize icon_url: Ensure empty strings become NULL to avoid regex constraint violations
|
// Sanitize icon_url: Ensure empty strings become NULL to avoid regex constraint violations
|
||||||
let iconUrl = flyerData.icon_url && flyerData.icon_url.trim() !== '' ? flyerData.icon_url : null;
|
let iconUrl =
|
||||||
|
flyerData.icon_url && flyerData.icon_url.trim() !== '' ? flyerData.icon_url : null;
|
||||||
let imageUrl = flyerData.image_url || 'placeholder.jpg';
|
let imageUrl = flyerData.image_url || 'placeholder.jpg';
|
||||||
|
|
||||||
try {
|
try {
|
||||||
@@ -136,10 +146,11 @@ export class FlyerRepository {
|
|||||||
offendingData: {
|
offendingData: {
|
||||||
image_url: flyerData.image_url,
|
image_url: flyerData.image_url,
|
||||||
icon_url: flyerData.icon_url, // Log raw input
|
icon_url: flyerData.icon_url, // Log raw input
|
||||||
sanitized_icon_url: flyerData.icon_url && flyerData.icon_url.trim() !== '' ? flyerData.icon_url : null
|
sanitized_icon_url:
|
||||||
}
|
flyerData.icon_url && flyerData.icon_url.trim() !== '' ? flyerData.icon_url : null,
|
||||||
},
|
},
|
||||||
'[DB ERROR] URL Check Constraint Failed. Inspecting URLs.'
|
},
|
||||||
|
'[DB ERROR] URL Check Constraint Failed. Inspecting URLs.',
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -152,12 +163,18 @@ export class FlyerRepository {
|
|||||||
checkMsg = `[URL_CHECK_FAIL] Invalid URL format. Image: '${imageUrl}', Icon: '${iconUrl}'`;
|
checkMsg = `[URL_CHECK_FAIL] Invalid URL format. Image: '${imageUrl}', Icon: '${iconUrl}'`;
|
||||||
}
|
}
|
||||||
|
|
||||||
handleDbError(error, logger, 'Database error in insertFlyer', { flyerData }, {
|
handleDbError(
|
||||||
|
error,
|
||||||
|
logger,
|
||||||
|
'Database error in insertFlyer',
|
||||||
|
{ flyerData },
|
||||||
|
{
|
||||||
uniqueMessage: 'A flyer with this checksum already exists.',
|
uniqueMessage: 'A flyer with this checksum already exists.',
|
||||||
fkMessage: 'The specified user or store for this flyer does not exist.',
|
fkMessage: 'The specified user or store for this flyer does not exist.',
|
||||||
checkMessage: checkMsg,
|
checkMessage: checkMsg,
|
||||||
defaultMessage: 'Failed to insert flyer into database.',
|
defaultMessage: 'Failed to insert flyer into database.',
|
||||||
});
|
},
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -189,9 +206,7 @@ export class FlyerRepository {
|
|||||||
// Sanitize price_display. The database requires a non-empty string.
|
// Sanitize price_display. The database requires a non-empty string.
|
||||||
// We provide a default value if the input is null, undefined, or an empty string.
|
// We provide a default value if the input is null, undefined, or an empty string.
|
||||||
const priceDisplay =
|
const priceDisplay =
|
||||||
item.price_display && item.price_display.trim() !== ''
|
item.price_display && item.price_display.trim() !== '' ? item.price_display : 'N/A';
|
||||||
? item.price_display
|
|
||||||
: 'N/A';
|
|
||||||
|
|
||||||
values.push(
|
values.push(
|
||||||
flyerId,
|
flyerId,
|
||||||
@@ -221,10 +236,16 @@ export class FlyerRepository {
|
|||||||
const result = await this.db.query<FlyerItem>(query, values);
|
const result = await this.db.query<FlyerItem>(query, values);
|
||||||
return result.rows;
|
return result.rows;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
handleDbError(error, logger, 'Database error in insertFlyerItems', { flyerId }, {
|
handleDbError(
|
||||||
|
error,
|
||||||
|
logger,
|
||||||
|
'Database error in insertFlyerItems',
|
||||||
|
{ flyerId },
|
||||||
|
{
|
||||||
fkMessage: 'The specified flyer, category, master item, or product does not exist.',
|
fkMessage: 'The specified flyer, category, master item, or product does not exist.',
|
||||||
defaultMessage: 'An unknown error occurred while inserting flyer items.',
|
defaultMessage: 'An unknown error occurred while inserting flyer items.',
|
||||||
});
|
},
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -248,9 +269,15 @@ export class FlyerRepository {
|
|||||||
const res = await this.db.query<Brand>(query);
|
const res = await this.db.query<Brand>(query);
|
||||||
return res.rows;
|
return res.rows;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
handleDbError(error, logger, 'Database error in getAllBrands', {}, {
|
handleDbError(
|
||||||
|
error,
|
||||||
|
logger,
|
||||||
|
'Database error in getAllBrands',
|
||||||
|
{},
|
||||||
|
{
|
||||||
defaultMessage: 'Failed to retrieve brands from database.',
|
defaultMessage: 'Failed to retrieve brands from database.',
|
||||||
});
|
},
|
||||||
|
);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{ ttl: CACHE_TTL.BRANDS, logger },
|
{ ttl: CACHE_TTL.BRANDS, logger },
|
||||||
@@ -298,9 +325,15 @@ export class FlyerRepository {
|
|||||||
const res = await this.db.query<Flyer>(query, [limit, offset]);
|
const res = await this.db.query<Flyer>(query, [limit, offset]);
|
||||||
return res.rows;
|
return res.rows;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
handleDbError(error, logger, 'Database error in getFlyers', { limit, offset }, {
|
handleDbError(
|
||||||
|
error,
|
||||||
|
logger,
|
||||||
|
'Database error in getFlyers',
|
||||||
|
{ limit, offset },
|
||||||
|
{
|
||||||
defaultMessage: 'Failed to retrieve flyers from database.',
|
defaultMessage: 'Failed to retrieve flyers from database.',
|
||||||
});
|
},
|
||||||
|
);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{ ttl: CACHE_TTL.FLYERS, logger },
|
{ ttl: CACHE_TTL.FLYERS, logger },
|
||||||
@@ -326,9 +359,15 @@ export class FlyerRepository {
|
|||||||
);
|
);
|
||||||
return res.rows;
|
return res.rows;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
handleDbError(error, logger, 'Database error in getFlyerItems', { flyerId }, {
|
handleDbError(
|
||||||
|
error,
|
||||||
|
logger,
|
||||||
|
'Database error in getFlyerItems',
|
||||||
|
{ flyerId },
|
||||||
|
{
|
||||||
defaultMessage: 'Failed to retrieve flyer items from database.',
|
defaultMessage: 'Failed to retrieve flyer items from database.',
|
||||||
});
|
},
|
||||||
|
);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{ ttl: CACHE_TTL.FLYER_ITEMS, logger },
|
{ ttl: CACHE_TTL.FLYER_ITEMS, logger },
|
||||||
@@ -348,9 +387,15 @@ export class FlyerRepository {
|
|||||||
);
|
);
|
||||||
return res.rows;
|
return res.rows;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
handleDbError(error, logger, 'Database error in getFlyerItemsForFlyers', { flyerIds }, {
|
handleDbError(
|
||||||
|
error,
|
||||||
|
logger,
|
||||||
|
'Database error in getFlyerItemsForFlyers',
|
||||||
|
{ flyerIds },
|
||||||
|
{
|
||||||
defaultMessage: 'Failed to retrieve flyer items in batch from database.',
|
defaultMessage: 'Failed to retrieve flyer items in batch from database.',
|
||||||
});
|
},
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -370,9 +415,15 @@ export class FlyerRepository {
|
|||||||
);
|
);
|
||||||
return parseInt(res.rows[0].count, 10);
|
return parseInt(res.rows[0].count, 10);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
handleDbError(error, logger, 'Database error in countFlyerItemsForFlyers', { flyerIds }, {
|
handleDbError(
|
||||||
|
error,
|
||||||
|
logger,
|
||||||
|
'Database error in countFlyerItemsForFlyers',
|
||||||
|
{ flyerIds },
|
||||||
|
{
|
||||||
defaultMessage: 'Failed to count flyer items in batch from database.',
|
defaultMessage: 'Failed to count flyer items in batch from database.',
|
||||||
});
|
},
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -388,9 +439,15 @@ export class FlyerRepository {
|
|||||||
]);
|
]);
|
||||||
return res.rows[0];
|
return res.rows[0];
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
handleDbError(error, logger, 'Database error in findFlyerByChecksum', { checksum }, {
|
handleDbError(
|
||||||
|
error,
|
||||||
|
logger,
|
||||||
|
'Database error in findFlyerByChecksum',
|
||||||
|
{ checksum },
|
||||||
|
{
|
||||||
defaultMessage: 'Failed to find flyer by checksum in database.',
|
defaultMessage: 'Failed to find flyer by checksum in database.',
|
||||||
});
|
},
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -446,9 +503,15 @@ export class FlyerRepository {
|
|||||||
// Invalidate cache after successful deletion
|
// Invalidate cache after successful deletion
|
||||||
await cacheService.invalidateFlyer(flyerId, logger);
|
await cacheService.invalidateFlyer(flyerId, logger);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
handleDbError(error, logger, 'Database transaction error in deleteFlyer', { flyerId }, {
|
handleDbError(
|
||||||
|
error,
|
||||||
|
logger,
|
||||||
|
'Database transaction error in deleteFlyer',
|
||||||
|
{ flyerId },
|
||||||
|
{
|
||||||
defaultMessage: 'Failed to delete flyer.',
|
defaultMessage: 'Failed to delete flyer.',
|
||||||
});
|
},
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,7 +1,5 @@
|
|||||||
// src/services/db/gamification.db.test.ts
|
// src/services/db/gamification.db.test.ts
|
||||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||||
import type { Pool } from 'pg';
|
|
||||||
import { mockPoolInstance } from '../../tests/setup/tests-setup-unit';
|
|
||||||
|
|
||||||
// FIX 2: Un-mock the module we are testing.
|
// FIX 2: Un-mock the module we are testing.
|
||||||
vi.unmock('./gamification.db');
|
vi.unmock('./gamification.db');
|
||||||
@@ -113,10 +111,10 @@ describe('Gamification DB Service', () => {
|
|||||||
mockDb.query.mockResolvedValue({ rows: [] }); // The function returns void
|
mockDb.query.mockResolvedValue({ rows: [] }); // The function returns void
|
||||||
await gamificationRepo.awardAchievement('user-123', 'Test Achievement', mockLogger);
|
await gamificationRepo.awardAchievement('user-123', 'Test Achievement', mockLogger);
|
||||||
|
|
||||||
expect(mockDb.query).toHaveBeenCalledWith(
|
expect(mockDb.query).toHaveBeenCalledWith('SELECT public.award_achievement($1, $2)', [
|
||||||
'SELECT public.award_achievement($1, $2)',
|
'user-123',
|
||||||
['user-123', 'Test Achievement'],
|
'Test Achievement',
|
||||||
);
|
]);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should throw ForeignKeyConstraintError if user or achievement does not exist', async () => {
|
it('should throw ForeignKeyConstraintError if user or achievement does not exist', async () => {
|
||||||
@@ -160,7 +158,7 @@ describe('Gamification DB Service', () => {
|
|||||||
it('should execute the correct SELECT query with a LIMIT and return leaderboard users', async () => {
|
it('should execute the correct SELECT query with a LIMIT and return leaderboard users', async () => {
|
||||||
const mockLeaderboard: LeaderboardUser[] = [
|
const mockLeaderboard: LeaderboardUser[] = [
|
||||||
{ user_id: 'user-1', full_name: 'User One', avatar_url: null, points: 500, rank: '1' },
|
{ user_id: 'user-1', full_name: 'User One', avatar_url: null, points: 500, rank: '1' },
|
||||||
{ user_id: 'user-2', full_name: 'User Two', avatar_url: null, points: 450, rank: '2' }
|
{ user_id: 'user-2', full_name: 'User Two', avatar_url: null, points: 450, rank: '2' },
|
||||||
];
|
];
|
||||||
mockDb.query.mockResolvedValue({ rows: mockLeaderboard });
|
mockDb.query.mockResolvedValue({ rows: mockLeaderboard });
|
||||||
|
|
||||||
|
|||||||
@@ -10,7 +10,6 @@ import { ForeignKeyConstraintError, NotFoundError } from './errors.db';
|
|||||||
import type { Notification } from '../../types';
|
import type { Notification } from '../../types';
|
||||||
import { createMockNotification } from '../../tests/utils/mockFactories';
|
import { createMockNotification } from '../../tests/utils/mockFactories';
|
||||||
|
|
||||||
|
|
||||||
// Mock the logger to prevent console output during tests
|
// Mock the logger to prevent console output during tests
|
||||||
vi.mock('../logger.server', () => ({
|
vi.mock('../logger.server', () => ({
|
||||||
logger: {
|
logger: {
|
||||||
@@ -24,9 +23,6 @@ import { logger as mockLogger } from '../logger.server';
|
|||||||
|
|
||||||
describe('Notification DB Service', () => {
|
describe('Notification DB Service', () => {
|
||||||
let notificationRepo: NotificationRepository;
|
let notificationRepo: NotificationRepository;
|
||||||
const mockDb = {
|
|
||||||
query: vi.fn(),
|
|
||||||
};
|
|
||||||
|
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
vi.clearAllMocks();
|
vi.clearAllMocks();
|
||||||
@@ -283,9 +279,9 @@ describe('Notification DB Service', () => {
|
|||||||
it('should not mark a notification as read if the user does not own it', async () => {
|
it('should not mark a notification as read if the user does not own it', async () => {
|
||||||
mockPoolInstance.query.mockResolvedValue({ rowCount: 0 });
|
mockPoolInstance.query.mockResolvedValue({ rowCount: 0 });
|
||||||
|
|
||||||
await expect(notificationRepo.markNotificationAsRead(1, 'wrong-user', mockLogger)).rejects.toThrow(
|
await expect(
|
||||||
'Notification not found or user does not have permission.',
|
notificationRepo.markNotificationAsRead(1, 'wrong-user', mockLogger),
|
||||||
);
|
).rejects.toThrow('Notification not found or user does not have permission.');
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
// src/services/db/reaction.db.test.ts
|
// src/services/db/reaction.db.test.ts
|
||||||
import { describe, it, expect, vi, beforeEach, Mock } from 'vitest';
|
import { describe, it, expect, vi, beforeEach, Mock } from 'vitest';
|
||||||
import type { Pool, PoolClient } from 'pg';
|
import type { PoolClient } from 'pg';
|
||||||
import { ReactionRepository } from './reaction.db';
|
import { ReactionRepository } from './reaction.db';
|
||||||
import { mockPoolInstance } from '../../tests/setup/tests-setup-unit';
|
import { mockPoolInstance } from '../../tests/setup/tests-setup-unit';
|
||||||
import { withTransaction } from './connection.db';
|
import { withTransaction } from './connection.db';
|
||||||
@@ -214,9 +214,9 @@ describe('Reaction DB Service', () => {
|
|||||||
it('should throw an error if the database query fails', async () => {
|
it('should throw an error if the database query fails', async () => {
|
||||||
const dbError = new Error('DB Error');
|
const dbError = new Error('DB Error');
|
||||||
mockPoolInstance.query.mockRejectedValue(dbError);
|
mockPoolInstance.query.mockRejectedValue(dbError);
|
||||||
await expect(
|
await expect(reactionRepo.getReactionSummary('recipe', '123', mockLogger)).rejects.toThrow(
|
||||||
reactionRepo.getReactionSummary('recipe', '123', mockLogger),
|
'Failed to retrieve reaction summary.',
|
||||||
).rejects.toThrow('Failed to retrieve reaction summary.');
|
);
|
||||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||||
{ err: dbError, entityType: 'recipe', entityId: '123' },
|
{ err: dbError, entityType: 'recipe', entityId: '123' },
|
||||||
'Database error in getReactionSummary',
|
'Database error in getReactionSummary',
|
||||||
|
|||||||
@@ -27,7 +27,7 @@ export class ReactionRepository {
|
|||||||
const { userId, entityType, entityId } = filters;
|
const { userId, entityType, entityId } = filters;
|
||||||
try {
|
try {
|
||||||
let query = 'SELECT * FROM public.user_reactions WHERE 1=1';
|
let query = 'SELECT * FROM public.user_reactions WHERE 1=1';
|
||||||
const params: any[] = [];
|
const params: (string | number)[] = [];
|
||||||
let paramCount = 1;
|
let paramCount = 1;
|
||||||
|
|
||||||
if (userId) {
|
if (userId) {
|
||||||
@@ -50,9 +50,15 @@ export class ReactionRepository {
|
|||||||
const result = await this.db.query<UserReaction>(query, params);
|
const result = await this.db.query<UserReaction>(query, params);
|
||||||
return result.rows;
|
return result.rows;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
handleDbError(error, logger, 'Database error in getReactions', { filters }, {
|
handleDbError(
|
||||||
|
error,
|
||||||
|
logger,
|
||||||
|
'Database error in getReactions',
|
||||||
|
{ filters },
|
||||||
|
{
|
||||||
defaultMessage: 'Failed to retrieve user reactions.',
|
defaultMessage: 'Failed to retrieve user reactions.',
|
||||||
});
|
},
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -88,10 +94,16 @@ export class ReactionRepository {
|
|||||||
return insertRes.rows[0];
|
return insertRes.rows[0];
|
||||||
});
|
});
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
handleDbError(error, logger, 'Database error in toggleReaction', { reactionData }, {
|
handleDbError(
|
||||||
|
error,
|
||||||
|
logger,
|
||||||
|
'Database error in toggleReaction',
|
||||||
|
{ reactionData },
|
||||||
|
{
|
||||||
fkMessage: 'The specified user or entity does not exist.',
|
fkMessage: 'The specified user or entity does not exist.',
|
||||||
defaultMessage: 'Failed to toggle user reaction.',
|
defaultMessage: 'Failed to toggle user reaction.',
|
||||||
});
|
},
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -118,12 +130,21 @@ export class ReactionRepository {
|
|||||||
GROUP BY reaction_type
|
GROUP BY reaction_type
|
||||||
ORDER BY count DESC;
|
ORDER BY count DESC;
|
||||||
`;
|
`;
|
||||||
const result = await getPool().query<{ reaction_type: string; count: number }>(query, [entityType, entityId]);
|
const result = await getPool().query<{ reaction_type: string; count: number }>(query, [
|
||||||
|
entityType,
|
||||||
|
entityId,
|
||||||
|
]);
|
||||||
return result.rows;
|
return result.rows;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
handleDbError(error, logger, 'Database error in getReactionSummary', { entityType, entityId }, {
|
handleDbError(
|
||||||
|
error,
|
||||||
|
logger,
|
||||||
|
'Database error in getReactionSummary',
|
||||||
|
{ entityType, entityId },
|
||||||
|
{
|
||||||
defaultMessage: 'Failed to retrieve reaction summary.',
|
defaultMessage: 'Failed to retrieve reaction summary.',
|
||||||
});
|
},
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -27,7 +27,7 @@ import { UserRepository, exportUserData } from './user.db';
|
|||||||
import { mockPoolInstance } from '../../tests/setup/tests-setup-unit';
|
import { mockPoolInstance } from '../../tests/setup/tests-setup-unit';
|
||||||
import { createMockUserProfile, createMockUser } from '../../tests/utils/mockFactories';
|
import { createMockUserProfile, createMockUser } from '../../tests/utils/mockFactories';
|
||||||
import { UniqueConstraintError, ForeignKeyConstraintError, NotFoundError } from './errors.db';
|
import { UniqueConstraintError, ForeignKeyConstraintError, NotFoundError } from './errors.db';
|
||||||
import type { Profile, ActivityLogItem, SearchQuery, UserProfile, User } from '../../types';
|
import type { ActivityLogItem, SearchQuery, UserProfile } from '../../types';
|
||||||
import { ShoppingRepository } from './shopping.db';
|
import { ShoppingRepository } from './shopping.db';
|
||||||
import { PersonalizationRepository } from './personalization.db';
|
import { PersonalizationRepository } from './personalization.db';
|
||||||
|
|
||||||
@@ -793,7 +793,13 @@ describe('User DB Service', () => {
|
|||||||
it('should execute DELETE and INSERT queries', async () => {
|
it('should execute DELETE and INSERT queries', async () => {
|
||||||
const mockClient = { query: vi.fn().mockResolvedValue({ rows: [] }) };
|
const mockClient = { query: vi.fn().mockResolvedValue({ rows: [] }) };
|
||||||
const expires = new Date();
|
const expires = new Date();
|
||||||
await userRepo.createPasswordResetToken('123', 'token-hash', expires, mockLogger, mockClient as unknown as PoolClient);
|
await userRepo.createPasswordResetToken(
|
||||||
|
'123',
|
||||||
|
'token-hash',
|
||||||
|
expires,
|
||||||
|
mockLogger,
|
||||||
|
mockClient as unknown as PoolClient,
|
||||||
|
);
|
||||||
expect(mockClient.query).toHaveBeenCalledWith(
|
expect(mockClient.query).toHaveBeenCalledWith(
|
||||||
'DELETE FROM public.password_reset_tokens WHERE user_id = $1',
|
'DELETE FROM public.password_reset_tokens WHERE user_id = $1',
|
||||||
['123'],
|
['123'],
|
||||||
@@ -809,7 +815,13 @@ describe('User DB Service', () => {
|
|||||||
(dbError as Error & { code: string }).code = '23503';
|
(dbError as Error & { code: string }).code = '23503';
|
||||||
const mockClient = { query: vi.fn().mockRejectedValue(dbError) };
|
const mockClient = { query: vi.fn().mockRejectedValue(dbError) };
|
||||||
await expect(
|
await expect(
|
||||||
userRepo.createPasswordResetToken('non-existent-user', 'hash', new Date(), mockLogger, mockClient as unknown as PoolClient),
|
userRepo.createPasswordResetToken(
|
||||||
|
'non-existent-user',
|
||||||
|
'hash',
|
||||||
|
new Date(),
|
||||||
|
mockLogger,
|
||||||
|
mockClient as unknown as PoolClient,
|
||||||
|
),
|
||||||
).rejects.toThrow(ForeignKeyConstraintError);
|
).rejects.toThrow(ForeignKeyConstraintError);
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -818,7 +830,13 @@ describe('User DB Service', () => {
|
|||||||
const mockClient = { query: vi.fn().mockRejectedValue(dbError) };
|
const mockClient = { query: vi.fn().mockRejectedValue(dbError) };
|
||||||
const expires = new Date();
|
const expires = new Date();
|
||||||
await expect(
|
await expect(
|
||||||
userRepo.createPasswordResetToken('123', 'token-hash', expires, mockLogger, mockClient as unknown as PoolClient),
|
userRepo.createPasswordResetToken(
|
||||||
|
'123',
|
||||||
|
'token-hash',
|
||||||
|
expires,
|
||||||
|
mockLogger,
|
||||||
|
mockClient as unknown as PoolClient,
|
||||||
|
),
|
||||||
).rejects.toThrow('Failed to create password reset token.');
|
).rejects.toThrow('Failed to create password reset token.');
|
||||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||||
{ err: dbError, userId: '123' },
|
{ err: dbError, userId: '123' },
|
||||||
@@ -901,7 +919,9 @@ describe('User DB Service', () => {
|
|||||||
it('should call profile, watched items, and shopping list functions', async () => {
|
it('should call profile, watched items, and shopping list functions', async () => {
|
||||||
const findProfileSpy = vi.spyOn(UserRepository.prototype, 'findUserProfileById');
|
const findProfileSpy = vi.spyOn(UserRepository.prototype, 'findUserProfileById');
|
||||||
findProfileSpy.mockResolvedValue(
|
findProfileSpy.mockResolvedValue(
|
||||||
createMockUserProfile({ user: createMockUser({ user_id: '123', email: '123@example.com' }) }),
|
createMockUserProfile({
|
||||||
|
user: createMockUser({ user_id: '123', email: '123@example.com' }),
|
||||||
|
}),
|
||||||
);
|
);
|
||||||
const getWatchedItemsSpy = vi.spyOn(PersonalizationRepository.prototype, 'getWatchedItems');
|
const getWatchedItemsSpy = vi.spyOn(PersonalizationRepository.prototype, 'getWatchedItems');
|
||||||
getWatchedItemsSpy.mockResolvedValue([]);
|
getWatchedItemsSpy.mockResolvedValue([]);
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
import { Pool, PoolClient } from 'pg';
|
import { Pool, PoolClient } from 'pg';
|
||||||
import { getPool } from './connection.db';
|
import { getPool } from './connection.db';
|
||||||
import type { Logger } from 'pino';
|
import type { Logger } from 'pino';
|
||||||
import { NotFoundError, handleDbError, UniqueConstraintError } from './errors.db';
|
import { NotFoundError, handleDbError } from './errors.db';
|
||||||
import {
|
import {
|
||||||
Profile,
|
Profile,
|
||||||
MasterGroceryItem,
|
MasterGroceryItem,
|
||||||
@@ -55,9 +55,15 @@ export class UserRepository {
|
|||||||
);
|
);
|
||||||
return res.rows[0];
|
return res.rows[0];
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
handleDbError(error, logger, 'Database error in findUserByEmail', { email }, {
|
handleDbError(
|
||||||
|
error,
|
||||||
|
logger,
|
||||||
|
'Database error in findUserByEmail',
|
||||||
|
{ email },
|
||||||
|
{
|
||||||
defaultMessage: 'Failed to retrieve user from database.',
|
defaultMessage: 'Failed to retrieve user from database.',
|
||||||
});
|
},
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -142,17 +148,28 @@ export class UserRepository {
|
|||||||
});
|
});
|
||||||
} else {
|
} else {
|
||||||
// If this.db is already a PoolClient, we're inside a transaction. Use it directly.
|
// If this.db is already a PoolClient, we're inside a transaction. Use it directly.
|
||||||
return await this._createUser(this.db as PoolClient, email, passwordHash, profileData, logger);
|
return await this._createUser(
|
||||||
|
this.db as PoolClient,
|
||||||
|
email,
|
||||||
|
passwordHash,
|
||||||
|
profileData,
|
||||||
|
logger,
|
||||||
|
);
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
handleDbError(error, logger, 'Error during createUser', { email }, {
|
handleDbError(
|
||||||
|
error,
|
||||||
|
logger,
|
||||||
|
'Error during createUser',
|
||||||
|
{ email },
|
||||||
|
{
|
||||||
uniqueMessage: 'A user with this email address already exists.',
|
uniqueMessage: 'A user with this email address already exists.',
|
||||||
defaultMessage: 'Failed to create user in database.',
|
defaultMessage: 'Failed to create user in database.',
|
||||||
});
|
},
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Finds a user by their email and joins their profile data.
|
* Finds a user by their email and joins their profile data.
|
||||||
* This is used by the LocalStrategy to get all necessary data for authentication and session creation in one query.
|
* This is used by the LocalStrategy to get all necessary data for authentication and session creation in one query.
|
||||||
@@ -207,9 +224,15 @@ export class UserRepository {
|
|||||||
|
|
||||||
return authableProfile;
|
return authableProfile;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
handleDbError(error, logger, 'Database error in findUserWithProfileByEmail', { email }, {
|
handleDbError(
|
||||||
|
error,
|
||||||
|
logger,
|
||||||
|
'Database error in findUserWithProfileByEmail',
|
||||||
|
{ email },
|
||||||
|
{
|
||||||
defaultMessage: 'Failed to retrieve user with profile from database.',
|
defaultMessage: 'Failed to retrieve user with profile from database.',
|
||||||
});
|
},
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -451,10 +474,7 @@ export class UserRepository {
|
|||||||
* @param refreshToken The refresh token to look up.
|
* @param refreshToken The refresh token to look up.
|
||||||
* @returns A promise that resolves to the user object (id, email) or undefined if not found.
|
* @returns A promise that resolves to the user object (id, email) or undefined if not found.
|
||||||
*/
|
*/
|
||||||
async findUserByRefreshToken(
|
async findUserByRefreshToken(refreshToken: string, logger: Logger): Promise<User | undefined> {
|
||||||
refreshToken: string,
|
|
||||||
logger: Logger,
|
|
||||||
): Promise<User | undefined> {
|
|
||||||
try {
|
try {
|
||||||
const res = await this.db.query<User>(
|
const res = await this.db.query<User>(
|
||||||
'SELECT user_id, email, created_at, updated_at FROM public.users WHERE refresh_token = $1',
|
'SELECT user_id, email, created_at, updated_at FROM public.users WHERE refresh_token = $1',
|
||||||
@@ -465,9 +485,15 @@ export class UserRepository {
|
|||||||
}
|
}
|
||||||
return res.rows[0];
|
return res.rows[0];
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
handleDbError(error, logger, 'Database error in findUserByRefreshToken', {}, {
|
handleDbError(
|
||||||
|
error,
|
||||||
|
logger,
|
||||||
|
'Database error in findUserByRefreshToken',
|
||||||
|
{},
|
||||||
|
{
|
||||||
defaultMessage: 'Failed to find user by refresh token.',
|
defaultMessage: 'Failed to find user by refresh token.',
|
||||||
});
|
},
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -559,9 +585,15 @@ export class UserRepository {
|
|||||||
);
|
);
|
||||||
return res.rowCount ?? 0;
|
return res.rowCount ?? 0;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
handleDbError(error, logger, 'Database error in deleteExpiredResetTokens', {}, {
|
handleDbError(
|
||||||
|
error,
|
||||||
|
logger,
|
||||||
|
'Database error in deleteExpiredResetTokens',
|
||||||
|
{},
|
||||||
|
{
|
||||||
defaultMessage: 'Failed to delete expired password reset tokens.',
|
defaultMessage: 'Failed to delete expired password reset tokens.',
|
||||||
});
|
},
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
/**
|
/**
|
||||||
@@ -576,11 +608,17 @@ export class UserRepository {
|
|||||||
[followerId, followingId],
|
[followerId, followingId],
|
||||||
);
|
);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
handleDbError(error, logger, 'Database error in followUser', { followerId, followingId }, {
|
handleDbError(
|
||||||
|
error,
|
||||||
|
logger,
|
||||||
|
'Database error in followUser',
|
||||||
|
{ followerId, followingId },
|
||||||
|
{
|
||||||
fkMessage: 'One or both users do not exist.',
|
fkMessage: 'One or both users do not exist.',
|
||||||
checkMessage: 'A user cannot follow themselves.',
|
checkMessage: 'A user cannot follow themselves.',
|
||||||
defaultMessage: 'Failed to follow user.',
|
defaultMessage: 'Failed to follow user.',
|
||||||
});
|
},
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -596,9 +634,15 @@ export class UserRepository {
|
|||||||
[followerId, followingId],
|
[followerId, followingId],
|
||||||
);
|
);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
handleDbError(error, logger, 'Database error in unfollowUser', { followerId, followingId }, {
|
handleDbError(
|
||||||
|
error,
|
||||||
|
logger,
|
||||||
|
'Database error in unfollowUser',
|
||||||
|
{ followerId, followingId },
|
||||||
|
{
|
||||||
defaultMessage: 'Failed to unfollow user.',
|
defaultMessage: 'Failed to unfollow user.',
|
||||||
});
|
},
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -628,9 +672,15 @@ export class UserRepository {
|
|||||||
const res = await this.db.query<ActivityLogItem>(query, [userId, limit, offset]);
|
const res = await this.db.query<ActivityLogItem>(query, [userId, limit, offset]);
|
||||||
return res.rows;
|
return res.rows;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
handleDbError(error, logger, 'Database error in getUserFeed', { userId, limit, offset }, {
|
handleDbError(
|
||||||
|
error,
|
||||||
|
logger,
|
||||||
|
'Database error in getUserFeed',
|
||||||
|
{ userId, limit, offset },
|
||||||
|
{
|
||||||
defaultMessage: 'Failed to retrieve user feed.',
|
defaultMessage: 'Failed to retrieve user feed.',
|
||||||
});
|
},
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -651,10 +701,16 @@ export class UserRepository {
|
|||||||
);
|
);
|
||||||
return res.rows[0];
|
return res.rows[0];
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
handleDbError(error, logger, 'Database error in logSearchQuery', { queryData }, {
|
handleDbError(
|
||||||
|
error,
|
||||||
|
logger,
|
||||||
|
'Database error in logSearchQuery',
|
||||||
|
{ queryData },
|
||||||
|
{
|
||||||
fkMessage: 'The specified user does not exist.',
|
fkMessage: 'The specified user does not exist.',
|
||||||
defaultMessage: 'Failed to log search query.',
|
defaultMessage: 'Failed to log search query.',
|
||||||
});
|
},
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -5,7 +5,7 @@
|
|||||||
* This is particularly useful for broadcasting application-wide events, such as session expiry.
|
* This is particularly useful for broadcasting application-wide events, such as session expiry.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
type EventCallback = (data?: any) => void;
|
type EventCallback = (data?: unknown) => void;
|
||||||
|
|
||||||
export class EventBus {
|
export class EventBus {
|
||||||
private listeners: { [key: string]: EventCallback[] } = {};
|
private listeners: { [key: string]: EventCallback[] } = {};
|
||||||
@@ -22,7 +22,7 @@ export class EventBus {
|
|||||||
this.listeners[event] = this.listeners[event].filter((l) => l !== callback);
|
this.listeners[event] = this.listeners[event].filter((l) => l !== callback);
|
||||||
}
|
}
|
||||||
|
|
||||||
dispatch(event: string, data?: any): void {
|
dispatch(event: string, data?: unknown): void {
|
||||||
if (!this.listeners[event]) return;
|
if (!this.listeners[event]) return;
|
||||||
this.listeners[event].forEach((callback) => callback(data));
|
this.listeners[event].forEach((callback) => callback(data));
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -5,11 +5,7 @@ import type { AIService } from './aiService.server';
|
|||||||
import type { PersonalizationRepository } from './db/personalization.db';
|
import type { PersonalizationRepository } from './db/personalization.db';
|
||||||
import { AiDataValidationError } from './processingErrors';
|
import { AiDataValidationError } from './processingErrors';
|
||||||
import type { FlyerJobData } from '../types/job-data';
|
import type { FlyerJobData } from '../types/job-data';
|
||||||
import {
|
import { AiFlyerDataSchema } from '../types/ai'; // Import consolidated schemas and helper
|
||||||
AiFlyerDataSchema,
|
|
||||||
ExtractedFlyerItemSchema,
|
|
||||||
requiredString,
|
|
||||||
} from '../types/ai'; // Import consolidated schemas and helper
|
|
||||||
|
|
||||||
export type ValidatedAiDataType = z.infer<typeof AiFlyerDataSchema>;
|
export type ValidatedAiDataType = z.infer<typeof AiFlyerDataSchema>;
|
||||||
|
|
||||||
@@ -31,10 +27,7 @@ export class FlyerAiProcessor {
|
|||||||
/**
|
/**
|
||||||
* Validates the raw data from the AI against the Zod schema.
|
* Validates the raw data from the AI against the Zod schema.
|
||||||
*/
|
*/
|
||||||
private _validateAiData(
|
private _validateAiData(extractedData: unknown, logger: Logger): AiProcessorResult {
|
||||||
extractedData: unknown,
|
|
||||||
logger: Logger,
|
|
||||||
): AiProcessorResult {
|
|
||||||
const validationResult = AiFlyerDataSchema.safeParse(extractedData);
|
const validationResult = AiFlyerDataSchema.safeParse(extractedData);
|
||||||
if (!validationResult.success) {
|
if (!validationResult.success) {
|
||||||
const errors = validationResult.error.flatten();
|
const errors = validationResult.error.flatten();
|
||||||
@@ -91,7 +84,9 @@ export class FlyerAiProcessor {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.info(`AI extracted ${validationResult.data.items.length} items. Needs Review: ${needsReview}`);
|
logger.info(
|
||||||
|
`AI extracted ${validationResult.data.items.length} items. Needs Review: ${needsReview}`,
|
||||||
|
);
|
||||||
return { data: validationResult.data, needsReview };
|
return { data: validationResult.data, needsReview };
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -103,7 +98,9 @@ export class FlyerAiProcessor {
|
|||||||
jobData: FlyerJobData,
|
jobData: FlyerJobData,
|
||||||
logger: Logger,
|
logger: Logger,
|
||||||
): Promise<AiProcessorResult> {
|
): Promise<AiProcessorResult> {
|
||||||
console.error(`[WORKER DEBUG] FlyerAiProcessor: extractAndValidateData called with ${imagePaths.length} images`);
|
console.error(
|
||||||
|
`[WORKER DEBUG] FlyerAiProcessor: extractAndValidateData called with ${imagePaths.length} images`,
|
||||||
|
);
|
||||||
logger.info(`Starting AI data extraction for ${imagePaths.length} pages.`);
|
logger.info(`Starting AI data extraction for ${imagePaths.length} pages.`);
|
||||||
const { submitterIp, userProfileAddress } = jobData;
|
const { submitterIp, userProfileAddress } = jobData;
|
||||||
const masterItems = await this.personalizationRepo.getAllMasterItems(logger);
|
const masterItems = await this.personalizationRepo.getAllMasterItems(logger);
|
||||||
@@ -125,7 +122,9 @@ export class FlyerAiProcessor {
|
|||||||
items: [],
|
items: [],
|
||||||
};
|
};
|
||||||
|
|
||||||
logger.info(`Processing ${imagePaths.length} pages in ${batches.length} batches (Batch Size: ${BATCH_SIZE}).`);
|
logger.info(
|
||||||
|
`Processing ${imagePaths.length} pages in ${batches.length} batches (Batch Size: ${BATCH_SIZE}).`,
|
||||||
|
);
|
||||||
|
|
||||||
for (const [index, batch] of batches.entries()) {
|
for (const [index, batch] of batches.entries()) {
|
||||||
logger.info(`Processing batch ${index + 1}/${batches.length} (${batch.length} pages)...`);
|
logger.info(`Processing batch ${index + 1}/${batches.length} (${batch.length} pages)...`);
|
||||||
@@ -149,10 +148,14 @@ export class FlyerAiProcessor {
|
|||||||
mergedData.valid_to = batchResult.valid_to;
|
mergedData.valid_to = batchResult.valid_to;
|
||||||
mergedData.store_address = batchResult.store_address;
|
mergedData.store_address = batchResult.store_address;
|
||||||
} else {
|
} else {
|
||||||
if (!mergedData.store_name && batchResult.store_name) mergedData.store_name = batchResult.store_name;
|
if (!mergedData.store_name && batchResult.store_name)
|
||||||
if (!mergedData.valid_from && batchResult.valid_from) mergedData.valid_from = batchResult.valid_from;
|
mergedData.store_name = batchResult.store_name;
|
||||||
if (!mergedData.valid_to && batchResult.valid_to) mergedData.valid_to = batchResult.valid_to;
|
if (!mergedData.valid_from && batchResult.valid_from)
|
||||||
if (!mergedData.store_address && batchResult.store_address) mergedData.store_address = batchResult.store_address;
|
mergedData.valid_from = batchResult.valid_from;
|
||||||
|
if (!mergedData.valid_to && batchResult.valid_to)
|
||||||
|
mergedData.valid_to = batchResult.valid_to;
|
||||||
|
if (!mergedData.store_address && batchResult.store_address)
|
||||||
|
mergedData.store_address = batchResult.store_address;
|
||||||
}
|
}
|
||||||
|
|
||||||
// 2. Items: Append all found items to the master list.
|
// 2. Items: Append all found items to the master list.
|
||||||
@@ -160,7 +163,10 @@ export class FlyerAiProcessor {
|
|||||||
}
|
}
|
||||||
|
|
||||||
logger.info(`Batch processing complete. Total items extracted: ${mergedData.items.length}`);
|
logger.info(`Batch processing complete. Total items extracted: ${mergedData.items.length}`);
|
||||||
console.error(`[WORKER DEBUG] FlyerAiProcessor: Merged AI Data:`, JSON.stringify(mergedData, null, 2));
|
console.error(
|
||||||
|
`[WORKER DEBUG] FlyerAiProcessor: Merged AI Data:`,
|
||||||
|
JSON.stringify(mergedData, null, 2),
|
||||||
|
);
|
||||||
|
|
||||||
// Validate the final merged dataset
|
// Validate the final merged dataset
|
||||||
return this._validateAiData(mergedData, logger);
|
return this._validateAiData(mergedData, logger);
|
||||||
|
|||||||
@@ -4,7 +4,6 @@ import { FlyerDataTransformer } from './flyerDataTransformer';
|
|||||||
import { logger as mockLogger } from './logger.server';
|
import { logger as mockLogger } from './logger.server';
|
||||||
import { generateFlyerIcon } from '../utils/imageProcessor';
|
import { generateFlyerIcon } from '../utils/imageProcessor';
|
||||||
import type { AiProcessorResult } from './flyerAiProcessor.server';
|
import type { AiProcessorResult } from './flyerAiProcessor.server';
|
||||||
import type { FlyerItemInsert } from '../types';
|
|
||||||
import { getBaseUrl } from '../utils/serverUtils';
|
import { getBaseUrl } from '../utils/serverUtils';
|
||||||
|
|
||||||
// Mock the dependencies
|
// Mock the dependencies
|
||||||
@@ -126,7 +125,6 @@ describe('FlyerDataTransformer', () => {
|
|||||||
click_count: 0,
|
click_count: 0,
|
||||||
}),
|
}),
|
||||||
);
|
);
|
||||||
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should handle missing optional data gracefully', async () => {
|
it('should handle missing optional data gracefully', async () => {
|
||||||
@@ -238,14 +236,22 @@ describe('FlyerDataTransformer', () => {
|
|||||||
// Check Case 1 (null/undefined values)
|
// Check Case 1 (null/undefined values)
|
||||||
expect(itemsForDb[0]).toEqual(
|
expect(itemsForDb[0]).toEqual(
|
||||||
expect.objectContaining({
|
expect.objectContaining({
|
||||||
item: 'Unknown Item', price_display: '', quantity: '', category_name: 'Other/Miscellaneous', master_item_id: undefined,
|
item: 'Unknown Item',
|
||||||
|
price_display: '',
|
||||||
|
quantity: '',
|
||||||
|
category_name: 'Other/Miscellaneous',
|
||||||
|
master_item_id: undefined,
|
||||||
}),
|
}),
|
||||||
);
|
);
|
||||||
|
|
||||||
// Check Case 2 (empty string values)
|
// Check Case 2 (empty string values)
|
||||||
expect(itemsForDb[1]).toEqual(
|
expect(itemsForDb[1]).toEqual(
|
||||||
expect.objectContaining({
|
expect.objectContaining({
|
||||||
item: 'Unknown Item', price_display: '', quantity: '', category_name: 'Other/Miscellaneous', master_item_id: 20,
|
item: 'Unknown Item',
|
||||||
|
price_display: '',
|
||||||
|
quantity: '',
|
||||||
|
category_name: 'Other/Miscellaneous',
|
||||||
|
master_item_id: 20,
|
||||||
}),
|
}),
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -1,5 +1,4 @@
|
|||||||
// src/services/flyerDataTransformer.ts
|
// src/services/flyerDataTransformer.ts
|
||||||
import path from 'path';
|
|
||||||
import type { z } from 'zod';
|
import type { z } from 'zod';
|
||||||
import type { Logger } from 'pino';
|
import type { Logger } from 'pino';
|
||||||
import type { FlyerInsert, FlyerItemInsert } from '../types';
|
import type { FlyerInsert, FlyerItemInsert } from '../types';
|
||||||
@@ -33,12 +32,12 @@ export class FlyerDataTransformer {
|
|||||||
...item,
|
...item,
|
||||||
// Use nullish coalescing and trim for robustness.
|
// Use nullish coalescing and trim for robustness.
|
||||||
// An empty or whitespace-only name falls back to 'Unknown Item'.
|
// An empty or whitespace-only name falls back to 'Unknown Item'.
|
||||||
item: (String(item.item ?? '')).trim() || 'Unknown Item',
|
item: String(item.item ?? '').trim() || 'Unknown Item',
|
||||||
// Default null/undefined to an empty string and trim.
|
// Default null/undefined to an empty string and trim.
|
||||||
price_display: (String(item.price_display ?? '')).trim(),
|
price_display: String(item.price_display ?? '').trim(),
|
||||||
quantity: (String(item.quantity ?? '')).trim(),
|
quantity: String(item.quantity ?? '').trim(),
|
||||||
// An empty or whitespace-only category falls back to 'Other/Miscellaneous'.
|
// An empty or whitespace-only category falls back to 'Other/Miscellaneous'.
|
||||||
category_name: (String(item.category_name ?? '')).trim() || 'Other/Miscellaneous',
|
category_name: String(item.category_name ?? '').trim() || 'Other/Miscellaneous',
|
||||||
// Overwrite price_in_cents with our calculated value.
|
// Overwrite price_in_cents with our calculated value.
|
||||||
price_in_cents: finalPriceInCents,
|
price_in_cents: finalPriceInCents,
|
||||||
// Use nullish coalescing to convert null to undefined for the database.
|
// Use nullish coalescing to convert null to undefined for the database.
|
||||||
@@ -62,10 +61,17 @@ export class FlyerDataTransformer {
|
|||||||
baseUrl: string,
|
baseUrl: string,
|
||||||
logger: Logger,
|
logger: Logger,
|
||||||
): { imageUrl: string; iconUrl: string } {
|
): { imageUrl: string; iconUrl: string } {
|
||||||
console.error('[DEBUG] FlyerDataTransformer._buildUrls inputs:', { imageFileName, iconFileName, baseUrl });
|
console.error('[DEBUG] FlyerDataTransformer._buildUrls inputs:', {
|
||||||
|
imageFileName,
|
||||||
|
iconFileName,
|
||||||
|
baseUrl,
|
||||||
|
});
|
||||||
logger.debug({ imageFileName, iconFileName, baseUrl }, 'Building URLs');
|
logger.debug({ imageFileName, iconFileName, baseUrl }, 'Building URLs');
|
||||||
const finalBaseUrl = baseUrl || getBaseUrl(logger);
|
const finalBaseUrl = baseUrl || getBaseUrl(logger);
|
||||||
console.error('[DEBUG] FlyerDataTransformer._buildUrls finalBaseUrl resolved to:', finalBaseUrl);
|
console.error(
|
||||||
|
'[DEBUG] FlyerDataTransformer._buildUrls finalBaseUrl resolved to:',
|
||||||
|
finalBaseUrl,
|
||||||
|
);
|
||||||
const imageUrl = `${finalBaseUrl}/flyer-images/${imageFileName}`;
|
const imageUrl = `${finalBaseUrl}/flyer-images/${imageFileName}`;
|
||||||
const iconUrl = `${finalBaseUrl}/flyer-images/icons/${iconFileName}`;
|
const iconUrl = `${finalBaseUrl}/flyer-images/icons/${iconFileName}`;
|
||||||
console.error('[DEBUG] FlyerDataTransformer._buildUrls constructed:', { imageUrl, iconUrl });
|
console.error('[DEBUG] FlyerDataTransformer._buildUrls constructed:', { imageUrl, iconUrl });
|
||||||
@@ -101,7 +107,9 @@ export class FlyerDataTransformer {
|
|||||||
|
|
||||||
const { imageUrl, iconUrl } = this._buildUrls(imageFileName, iconFileName, baseUrl, logger);
|
const { imageUrl, iconUrl } = this._buildUrls(imageFileName, iconFileName, baseUrl, logger);
|
||||||
|
|
||||||
const itemsForDb: FlyerItemInsert[] = extractedData.items.map((item) => this._normalizeItem(item));
|
const itemsForDb: FlyerItemInsert[] = extractedData.items.map((item) =>
|
||||||
|
this._normalizeItem(item),
|
||||||
|
);
|
||||||
|
|
||||||
const storeName = extractedData.store_name || 'Unknown Store (auto)';
|
const storeName = extractedData.store_name || 'Unknown Store (auto)';
|
||||||
if (!extractedData.store_name) {
|
if (!extractedData.store_name) {
|
||||||
|
|||||||
@@ -1,7 +1,6 @@
|
|||||||
// src/services/flyerProcessingService.server.test.ts
|
// src/services/flyerProcessingService.server.test.ts
|
||||||
import { describe, it, expect, vi, beforeEach, type Mocked } from 'vitest';
|
import { describe, it, expect, vi, beforeEach, type Mocked } from 'vitest';
|
||||||
import { Job, UnrecoverableError } from 'bullmq';
|
import { Job, UnrecoverableError } from 'bullmq';
|
||||||
import { AiFlyerDataSchema } from '../types/ai';
|
|
||||||
import type { FlyerInsert } from '../types';
|
import type { FlyerInsert } from '../types';
|
||||||
import type { CleanupJobData, FlyerJobData } from '../types/job-data';
|
import type { CleanupJobData, FlyerJobData } from '../types/job-data';
|
||||||
|
|
||||||
@@ -36,13 +35,12 @@ import {
|
|||||||
AiDataValidationError,
|
AiDataValidationError,
|
||||||
PdfConversionError,
|
PdfConversionError,
|
||||||
UnsupportedFileTypeError,
|
UnsupportedFileTypeError,
|
||||||
TransformationError,
|
|
||||||
DatabaseError,
|
DatabaseError,
|
||||||
} from './processingErrors';
|
} from './processingErrors';
|
||||||
import { NotFoundError } from './db/errors.db';
|
import { NotFoundError } from './db/errors.db';
|
||||||
import { FlyerFileHandler } from './flyerFileHandler.server';
|
import { FlyerFileHandler } from './flyerFileHandler.server';
|
||||||
import { FlyerAiProcessor } from './flyerAiProcessor.server';
|
import { FlyerAiProcessor } from './flyerAiProcessor.server';
|
||||||
import type { IFileSystem, ICommandExecutor } from './flyerFileHandler.server';
|
import type { IFileSystem } from './flyerFileHandler.server';
|
||||||
import { generateFlyerIcon } from '../utils/imageProcessor';
|
import { generateFlyerIcon } from '../utils/imageProcessor';
|
||||||
import type { AIService } from './aiService.server';
|
import type { AIService } from './aiService.server';
|
||||||
import { FlyerPersistenceService } from './flyerPersistenceService.server';
|
import { FlyerPersistenceService } from './flyerPersistenceService.server';
|
||||||
@@ -169,12 +167,14 @@ describe('FlyerProcessingService', () => {
|
|||||||
createdImagePaths: [],
|
createdImagePaths: [],
|
||||||
});
|
});
|
||||||
|
|
||||||
mockPersistenceService.saveFlyer.mockResolvedValue(createMockFlyer({
|
mockPersistenceService.saveFlyer.mockResolvedValue(
|
||||||
|
createMockFlyer({
|
||||||
flyer_id: 1,
|
flyer_id: 1,
|
||||||
file_name: 'test.jpg',
|
file_name: 'test.jpg',
|
||||||
image_url: 'https://example.com/test.jpg',
|
image_url: 'https://example.com/test.jpg',
|
||||||
item_count: 1,
|
item_count: 1,
|
||||||
}));
|
}),
|
||||||
|
);
|
||||||
vi.mocked(mockedDb.adminRepo.logActivity).mockResolvedValue();
|
vi.mocked(mockedDb.adminRepo.logActivity).mockResolvedValue();
|
||||||
// FIX: Provide a default mock for getAllMasterItems to prevent a TypeError on `.length`.
|
// FIX: Provide a default mock for getAllMasterItems to prevent a TypeError on `.length`.
|
||||||
vi.mocked(mockedDb.personalizationRepo.getAllMasterItems).mockResolvedValue([]);
|
vi.mocked(mockedDb.personalizationRepo.getAllMasterItems).mockResolvedValue([]);
|
||||||
@@ -225,16 +225,27 @@ describe('FlyerProcessingService', () => {
|
|||||||
expect(result).toEqual({ flyerId: 1 });
|
expect(result).toEqual({ flyerId: 1 });
|
||||||
|
|
||||||
// 1. File handler was called
|
// 1. File handler was called
|
||||||
expect(mockFileHandler.prepareImageInputs).toHaveBeenCalledWith(job.data.filePath, job, expect.any(Object));
|
expect(mockFileHandler.prepareImageInputs).toHaveBeenCalledWith(
|
||||||
|
job.data.filePath,
|
||||||
|
job,
|
||||||
|
expect.any(Object),
|
||||||
|
);
|
||||||
|
|
||||||
// 2. Optimization was called
|
// 2. Optimization was called
|
||||||
expect(mockFileHandler.optimizeImages).toHaveBeenCalledWith(expect.any(Array), expect.any(Object));
|
expect(mockFileHandler.optimizeImages).toHaveBeenCalledWith(
|
||||||
|
expect.any(Array),
|
||||||
|
expect.any(Object),
|
||||||
|
);
|
||||||
|
|
||||||
// 3. AI processor was called
|
// 3. AI processor was called
|
||||||
expect(mockAiProcessor.extractAndValidateData).toHaveBeenCalledTimes(1);
|
expect(mockAiProcessor.extractAndValidateData).toHaveBeenCalledTimes(1);
|
||||||
|
|
||||||
// 4. Icon was generated from the processed image
|
// 4. Icon was generated from the processed image
|
||||||
expect(generateFlyerIcon).toHaveBeenCalledWith('/tmp/flyer-processed.jpeg', '/tmp/icons', expect.any(Object));
|
expect(generateFlyerIcon).toHaveBeenCalledWith(
|
||||||
|
'/tmp/flyer-processed.jpeg',
|
||||||
|
'/tmp/icons',
|
||||||
|
expect.any(Object),
|
||||||
|
);
|
||||||
|
|
||||||
// 5. Transformer was called with the correct filenames
|
// 5. Transformer was called with the correct filenames
|
||||||
expect(FlyerDataTransformer.prototype.transform).toHaveBeenCalledWith(
|
expect(FlyerDataTransformer.prototype.transform).toHaveBeenCalledWith(
|
||||||
@@ -288,10 +299,18 @@ describe('FlyerProcessingService', () => {
|
|||||||
await service.processJob(job);
|
await service.processJob(job);
|
||||||
|
|
||||||
// Verify transaction and inner calls
|
// Verify transaction and inner calls
|
||||||
expect(mockFileHandler.prepareImageInputs).toHaveBeenCalledWith('/tmp/flyer.pdf', job, expect.any(Object));
|
expect(mockFileHandler.prepareImageInputs).toHaveBeenCalledWith(
|
||||||
|
'/tmp/flyer.pdf',
|
||||||
|
job,
|
||||||
|
expect.any(Object),
|
||||||
|
);
|
||||||
expect(mockAiProcessor.extractAndValidateData).toHaveBeenCalledTimes(1);
|
expect(mockAiProcessor.extractAndValidateData).toHaveBeenCalledTimes(1);
|
||||||
// Verify icon generation was called for the first page
|
// Verify icon generation was called for the first page
|
||||||
expect(generateFlyerIcon).toHaveBeenCalledWith('/tmp/flyer-1.jpg', '/tmp/icons', expect.any(Object));
|
expect(generateFlyerIcon).toHaveBeenCalledWith(
|
||||||
|
'/tmp/flyer-1.jpg',
|
||||||
|
'/tmp/icons',
|
||||||
|
expect.any(Object),
|
||||||
|
);
|
||||||
// Verify cleanup job includes original PDF and all generated/processed images
|
// Verify cleanup job includes original PDF and all generated/processed images
|
||||||
expect(mockCleanupQueue.add).toHaveBeenCalledWith(
|
expect(mockCleanupQueue.add).toHaveBeenCalledWith(
|
||||||
'cleanup-flyer-files',
|
'cleanup-flyer-files',
|
||||||
@@ -320,9 +339,24 @@ describe('FlyerProcessingService', () => {
|
|||||||
errorCode: 'UNKNOWN_ERROR',
|
errorCode: 'UNKNOWN_ERROR',
|
||||||
message: 'AI model exploded',
|
message: 'AI model exploded',
|
||||||
stages: [
|
stages: [
|
||||||
{ name: 'Preparing Inputs', status: 'completed', critical: true, detail: '1 page(s) ready for AI.' },
|
{
|
||||||
{ name: 'Image Optimization', status: 'completed', critical: true, detail: 'Compressing and resizing images...' },
|
name: 'Preparing Inputs',
|
||||||
{ name: 'Extracting Data with AI', status: 'failed', critical: true, detail: 'AI model exploded' },
|
status: 'completed',
|
||||||
|
critical: true,
|
||||||
|
detail: '1 page(s) ready for AI.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'Image Optimization',
|
||||||
|
status: 'completed',
|
||||||
|
critical: true,
|
||||||
|
detail: 'Compressing and resizing images...',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'Extracting Data with AI',
|
||||||
|
status: 'failed',
|
||||||
|
critical: true,
|
||||||
|
detail: 'AI model exploded',
|
||||||
|
},
|
||||||
{ name: 'Transforming AI Data', status: 'skipped', critical: true },
|
{ name: 'Transforming AI Data', status: 'skipped', critical: true },
|
||||||
{ name: 'Saving to Database', status: 'skipped', critical: true },
|
{ name: 'Saving to Database', status: 'skipped', critical: true },
|
||||||
],
|
],
|
||||||
@@ -346,9 +380,24 @@ describe('FlyerProcessingService', () => {
|
|||||||
errorCode: 'QUOTA_EXCEEDED',
|
errorCode: 'QUOTA_EXCEEDED',
|
||||||
message: 'An AI quota has been exceeded. Please try again later.',
|
message: 'An AI quota has been exceeded. Please try again later.',
|
||||||
stages: [
|
stages: [
|
||||||
{ name: 'Preparing Inputs', status: 'completed', critical: true, detail: '1 page(s) ready for AI.' },
|
{
|
||||||
{ name: 'Image Optimization', status: 'completed', critical: true, detail: 'Compressing and resizing images...' },
|
name: 'Preparing Inputs',
|
||||||
{ name: 'Extracting Data with AI', status: 'failed', critical: true, detail: 'AI model quota exceeded' },
|
status: 'completed',
|
||||||
|
critical: true,
|
||||||
|
detail: '1 page(s) ready for AI.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'Image Optimization',
|
||||||
|
status: 'completed',
|
||||||
|
critical: true,
|
||||||
|
detail: 'Compressing and resizing images...',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'Extracting Data with AI',
|
||||||
|
status: 'failed',
|
||||||
|
critical: true,
|
||||||
|
detail: 'AI model quota exceeded',
|
||||||
|
},
|
||||||
{ name: 'Transforming AI Data', status: 'skipped', critical: true },
|
{ name: 'Transforming AI Data', status: 'skipped', critical: true },
|
||||||
{ name: 'Saving to Database', status: 'skipped', critical: true },
|
{ name: 'Saving to Database', status: 'skipped', critical: true },
|
||||||
],
|
],
|
||||||
@@ -374,7 +423,13 @@ describe('FlyerProcessingService', () => {
|
|||||||
'The uploaded PDF could not be processed. It might be blank, corrupt, or password-protected.', // This was a duplicate, fixed.
|
'The uploaded PDF could not be processed. It might be blank, corrupt, or password-protected.', // This was a duplicate, fixed.
|
||||||
stderr: 'pdftocairo error',
|
stderr: 'pdftocairo error',
|
||||||
stages: [
|
stages: [
|
||||||
{ name: 'Preparing Inputs', status: 'failed', critical: true, detail: 'The uploaded PDF could not be processed. It might be blank, corrupt, or password-protected.' },
|
{
|
||||||
|
name: 'Preparing Inputs',
|
||||||
|
status: 'failed',
|
||||||
|
critical: true,
|
||||||
|
detail:
|
||||||
|
'The uploaded PDF could not be processed. It might be blank, corrupt, or password-protected.',
|
||||||
|
},
|
||||||
{ name: 'Image Optimization', status: 'skipped', critical: true },
|
{ name: 'Image Optimization', status: 'skipped', critical: true },
|
||||||
{ name: 'Extracting Data with AI', status: 'skipped', critical: true },
|
{ name: 'Extracting Data with AI', status: 'skipped', critical: true },
|
||||||
{ name: 'Transforming AI Data', status: 'skipped', critical: true },
|
{ name: 'Transforming AI Data', status: 'skipped', critical: true },
|
||||||
@@ -400,7 +455,8 @@ describe('FlyerProcessingService', () => {
|
|||||||
{
|
{
|
||||||
err: validationError,
|
err: validationError,
|
||||||
errorCode: 'AI_VALIDATION_FAILED',
|
errorCode: 'AI_VALIDATION_FAILED',
|
||||||
message: "The AI couldn't read the flyer's format. Please try a clearer image or a different flyer.",
|
message:
|
||||||
|
"The AI couldn't read the flyer's format. Please try a clearer image or a different flyer.",
|
||||||
validationErrors: {},
|
validationErrors: {},
|
||||||
rawData: {},
|
rawData: {},
|
||||||
stages: expect.any(Array), // Stages will be dynamically generated
|
stages: expect.any(Array), // Stages will be dynamically generated
|
||||||
@@ -416,9 +472,25 @@ describe('FlyerProcessingService', () => {
|
|||||||
validationErrors: {},
|
validationErrors: {},
|
||||||
rawData: {},
|
rawData: {},
|
||||||
stages: [
|
stages: [
|
||||||
{ name: 'Preparing Inputs', status: 'completed', critical: true, detail: '1 page(s) ready for AI.' },
|
{
|
||||||
{ name: 'Image Optimization', status: 'completed', critical: true, detail: 'Compressing and resizing images...' },
|
name: 'Preparing Inputs',
|
||||||
{ name: 'Extracting Data with AI', status: 'failed', critical: true, detail: "The AI couldn't read the flyer's format. Please try a clearer image or a different flyer." },
|
status: 'completed',
|
||||||
|
critical: true,
|
||||||
|
detail: '1 page(s) ready for AI.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'Image Optimization',
|
||||||
|
status: 'completed',
|
||||||
|
critical: true,
|
||||||
|
detail: 'Compressing and resizing images...',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'Extracting Data with AI',
|
||||||
|
status: 'failed',
|
||||||
|
critical: true,
|
||||||
|
detail:
|
||||||
|
"The AI couldn't read the flyer's format. Please try a clearer image or a different flyer.",
|
||||||
|
},
|
||||||
{ name: 'Transforming AI Data', status: 'skipped', critical: true },
|
{ name: 'Transforming AI Data', status: 'skipped', critical: true },
|
||||||
{ name: 'Saving to Database', status: 'skipped', critical: true },
|
{ name: 'Saving to Database', status: 'skipped', critical: true },
|
||||||
],
|
],
|
||||||
@@ -443,10 +515,18 @@ describe('FlyerProcessingService', () => {
|
|||||||
await service.processJob(job);
|
await service.processJob(job);
|
||||||
|
|
||||||
// Verify transaction and inner calls
|
// Verify transaction and inner calls
|
||||||
expect(mockFileHandler.prepareImageInputs).toHaveBeenCalledWith('/tmp/flyer.gif', job, expect.any(Object));
|
expect(mockFileHandler.prepareImageInputs).toHaveBeenCalledWith(
|
||||||
|
'/tmp/flyer.gif',
|
||||||
|
job,
|
||||||
|
expect.any(Object),
|
||||||
|
);
|
||||||
expect(mockAiProcessor.extractAndValidateData).toHaveBeenCalledTimes(1);
|
expect(mockAiProcessor.extractAndValidateData).toHaveBeenCalledTimes(1);
|
||||||
// Verify icon generation was called for the converted image
|
// Verify icon generation was called for the converted image
|
||||||
expect(generateFlyerIcon).toHaveBeenCalledWith(convertedPath, '/tmp/icons', expect.any(Object));
|
expect(generateFlyerIcon).toHaveBeenCalledWith(
|
||||||
|
convertedPath,
|
||||||
|
'/tmp/icons',
|
||||||
|
expect.any(Object),
|
||||||
|
);
|
||||||
expect(mockCleanupQueue.add).toHaveBeenCalledWith(
|
expect(mockCleanupQueue.add).toHaveBeenCalledWith(
|
||||||
'cleanup-flyer-files',
|
'cleanup-flyer-files',
|
||||||
{
|
{
|
||||||
@@ -464,9 +544,9 @@ describe('FlyerProcessingService', () => {
|
|||||||
it('should throw an error and not enqueue cleanup if the database service fails', async () => {
|
it('should throw an error and not enqueue cleanup if the database service fails', async () => {
|
||||||
const job = createMockJob({});
|
const job = createMockJob({});
|
||||||
const { logger } = await import('./logger.server');
|
const { logger } = await import('./logger.server');
|
||||||
const dbError = new Error('Database transaction failed');
|
const dbError = new DatabaseError('Database transaction failed');
|
||||||
|
|
||||||
mockPersistenceService.saveFlyer.mockRejectedValue(new DatabaseError('Database transaction failed'));
|
mockPersistenceService.saveFlyer.mockRejectedValue(dbError);
|
||||||
|
|
||||||
// The service wraps the generic DB error in a DatabaseError.
|
// The service wraps the generic DB error in a DatabaseError.
|
||||||
await expect(service.processJob(job)).rejects.toThrow(DatabaseError);
|
await expect(service.processJob(job)).rejects.toThrow(DatabaseError);
|
||||||
@@ -476,11 +556,31 @@ describe('FlyerProcessingService', () => {
|
|||||||
errorCode: 'DATABASE_ERROR',
|
errorCode: 'DATABASE_ERROR',
|
||||||
message: 'A database operation failed. Please try again later.',
|
message: 'A database operation failed. Please try again later.',
|
||||||
stages: [
|
stages: [
|
||||||
{ name: 'Preparing Inputs', status: 'completed', critical: true, detail: '1 page(s) ready for AI.' },
|
{
|
||||||
{ name: 'Image Optimization', status: 'completed', critical: true, detail: 'Compressing and resizing images...' },
|
name: 'Preparing Inputs',
|
||||||
{ name: 'Extracting Data with AI', status: 'completed', critical: true, detail: 'Communicating with AI model...' },
|
status: 'completed',
|
||||||
|
critical: true,
|
||||||
|
detail: '1 page(s) ready for AI.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'Image Optimization',
|
||||||
|
status: 'completed',
|
||||||
|
critical: true,
|
||||||
|
detail: 'Compressing and resizing images...',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'Extracting Data with AI',
|
||||||
|
status: 'completed',
|
||||||
|
critical: true,
|
||||||
|
detail: 'Communicating with AI model...',
|
||||||
|
},
|
||||||
{ name: 'Transforming AI Data', status: 'completed', critical: true },
|
{ name: 'Transforming AI Data', status: 'completed', critical: true },
|
||||||
{ name: 'Saving to Database', status: 'failed', critical: true, detail: 'A database operation failed. Please try again later.' },
|
{
|
||||||
|
name: 'Saving to Database',
|
||||||
|
status: 'failed',
|
||||||
|
critical: true,
|
||||||
|
detail: 'A database operation failed. Please try again later.',
|
||||||
|
},
|
||||||
],
|
],
|
||||||
});
|
});
|
||||||
expect(mockCleanupQueue.add).not.toHaveBeenCalled();
|
expect(mockCleanupQueue.add).not.toHaveBeenCalled();
|
||||||
@@ -494,7 +594,9 @@ describe('FlyerProcessingService', () => {
|
|||||||
filePath: '/tmp/document.txt',
|
filePath: '/tmp/document.txt',
|
||||||
originalFileName: 'document.txt',
|
originalFileName: 'document.txt',
|
||||||
});
|
});
|
||||||
const fileTypeError = new UnsupportedFileTypeError('Unsupported file type: .txt. Supported types are PDF, JPG, PNG, WEBP, HEIC, HEIF, GIF, TIFF, SVG, BMP.');
|
const fileTypeError = new UnsupportedFileTypeError(
|
||||||
|
'Unsupported file type: .txt. Supported types are PDF, JPG, PNG, WEBP, HEIC, HEIF, GIF, TIFF, SVG, BMP.',
|
||||||
|
);
|
||||||
mockFileHandler.prepareImageInputs.mockRejectedValue(fileTypeError);
|
mockFileHandler.prepareImageInputs.mockRejectedValue(fileTypeError);
|
||||||
const { logger } = await import('./logger.server');
|
const { logger } = await import('./logger.server');
|
||||||
|
|
||||||
@@ -502,7 +604,12 @@ describe('FlyerProcessingService', () => {
|
|||||||
|
|
||||||
await expect(service.processJob(job)).rejects.toThrow(UnsupportedFileTypeError);
|
await expect(service.processJob(job)).rejects.toThrow(UnsupportedFileTypeError);
|
||||||
|
|
||||||
expect(reportErrorSpy).toHaveBeenCalledWith(fileTypeError, job, expect.any(Object), expect.any(Array));
|
expect(reportErrorSpy).toHaveBeenCalledWith(
|
||||||
|
fileTypeError,
|
||||||
|
job,
|
||||||
|
expect.any(Object),
|
||||||
|
expect.any(Array),
|
||||||
|
);
|
||||||
expect(mockCleanupQueue.add).not.toHaveBeenCalled();
|
expect(mockCleanupQueue.add).not.toHaveBeenCalled();
|
||||||
expect(logger.warn).toHaveBeenCalledWith(
|
expect(logger.warn).toHaveBeenCalledWith(
|
||||||
'Job failed. Temporary files will NOT be cleaned up to allow for manual inspection.',
|
'Job failed. Temporary files will NOT be cleaned up to allow for manual inspection.',
|
||||||
@@ -519,7 +626,12 @@ describe('FlyerProcessingService', () => {
|
|||||||
|
|
||||||
await expect(service.processJob(job)).rejects.toThrow('Icon generation failed.');
|
await expect(service.processJob(job)).rejects.toThrow('Icon generation failed.');
|
||||||
|
|
||||||
expect(reportErrorSpy).toHaveBeenCalledWith(iconGenError, job, expect.any(Object), expect.any(Array));
|
expect(reportErrorSpy).toHaveBeenCalledWith(
|
||||||
|
iconGenError,
|
||||||
|
job,
|
||||||
|
expect.any(Object),
|
||||||
|
expect.any(Array),
|
||||||
|
);
|
||||||
expect(mockCleanupQueue.add).not.toHaveBeenCalled();
|
expect(mockCleanupQueue.add).not.toHaveBeenCalled();
|
||||||
expect(logger.warn).toHaveBeenCalledWith(
|
expect(logger.warn).toHaveBeenCalledWith(
|
||||||
'Job failed. Temporary files will NOT be cleaned up to allow for manual inspection.',
|
'Job failed. Temporary files will NOT be cleaned up to allow for manual inspection.',
|
||||||
@@ -539,7 +651,9 @@ describe('FlyerProcessingService', () => {
|
|||||||
];
|
];
|
||||||
const privateMethod = (service as any)._reportErrorAndThrow;
|
const privateMethod = (service as any)._reportErrorAndThrow;
|
||||||
|
|
||||||
await expect(privateMethod(genericError, job, logger, initialStages)).rejects.toThrow(genericError);
|
await expect(privateMethod(genericError, job, logger, initialStages)).rejects.toThrow(
|
||||||
|
genericError,
|
||||||
|
);
|
||||||
|
|
||||||
expect(job.updateProgress).toHaveBeenCalledWith({
|
expect(job.updateProgress).toHaveBeenCalledWith({
|
||||||
errorCode: 'UNKNOWN_ERROR',
|
errorCode: 'UNKNOWN_ERROR',
|
||||||
@@ -565,15 +679,24 @@ describe('FlyerProcessingService', () => {
|
|||||||
];
|
];
|
||||||
const privateMethod = (service as any)._reportErrorAndThrow;
|
const privateMethod = (service as any)._reportErrorAndThrow;
|
||||||
|
|
||||||
await expect(privateMethod(validationError, job, logger, initialStages)).rejects.toThrow(validationError);
|
await expect(privateMethod(validationError, job, logger, initialStages)).rejects.toThrow(
|
||||||
|
validationError,
|
||||||
|
);
|
||||||
|
|
||||||
expect(job.updateProgress).toHaveBeenCalledWith({
|
expect(job.updateProgress).toHaveBeenCalledWith({
|
||||||
errorCode: 'AI_VALIDATION_FAILED',
|
errorCode: 'AI_VALIDATION_FAILED',
|
||||||
message: "The AI couldn't read the flyer's format. Please try a clearer image or a different flyer.",
|
message:
|
||||||
|
"The AI couldn't read the flyer's format. Please try a clearer image or a different flyer.",
|
||||||
validationErrors: { foo: 'bar' },
|
validationErrors: { foo: 'bar' },
|
||||||
rawData: { raw: 'data' },
|
rawData: { raw: 'data' },
|
||||||
stages: [
|
stages: [
|
||||||
{ name: 'Extracting Data with AI', status: 'failed', critical: true, detail: "The AI couldn't read the flyer's format. Please try a clearer image or a different flyer." },
|
{
|
||||||
|
name: 'Extracting Data with AI',
|
||||||
|
status: 'failed',
|
||||||
|
critical: true,
|
||||||
|
detail:
|
||||||
|
"The AI couldn't read the flyer's format. Please try a clearer image or a different flyer.",
|
||||||
|
},
|
||||||
],
|
],
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
@@ -584,9 +707,7 @@ describe('FlyerProcessingService', () => {
|
|||||||
const quotaError = new Error('RESOURCE_EXHAUSTED');
|
const quotaError = new Error('RESOURCE_EXHAUSTED');
|
||||||
const privateMethod = (service as any)._reportErrorAndThrow;
|
const privateMethod = (service as any)._reportErrorAndThrow;
|
||||||
|
|
||||||
await expect(privateMethod(quotaError, job, logger, [])).rejects.toThrow(
|
await expect(privateMethod(quotaError, job, logger, [])).rejects.toThrow(UnrecoverableError);
|
||||||
UnrecoverableError,
|
|
||||||
);
|
|
||||||
|
|
||||||
expect(job.updateProgress).toHaveBeenCalledWith({
|
expect(job.updateProgress).toHaveBeenCalledWith({
|
||||||
errorCode: 'QUOTA_EXCEEDED',
|
errorCode: 'QUOTA_EXCEEDED',
|
||||||
@@ -601,9 +722,7 @@ describe('FlyerProcessingService', () => {
|
|||||||
const nonError = 'just a string error';
|
const nonError = 'just a string error';
|
||||||
const privateMethod = (service as any)._reportErrorAndThrow;
|
const privateMethod = (service as any)._reportErrorAndThrow;
|
||||||
|
|
||||||
await expect(privateMethod(nonError, job, logger, [])).rejects.toThrow(
|
await expect(privateMethod(nonError, job, logger, [])).rejects.toThrow('just a string error');
|
||||||
'just a string error',
|
|
||||||
);
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should correctly identify the failed stage based on error code', async () => {
|
it('should correctly identify the failed stage based on error code', async () => {
|
||||||
@@ -618,12 +737,19 @@ describe('FlyerProcessingService', () => {
|
|||||||
|
|
||||||
await expect(privateMethod(pdfError, job, logger, initialStages)).rejects.toThrow(pdfError);
|
await expect(privateMethod(pdfError, job, logger, initialStages)).rejects.toThrow(pdfError);
|
||||||
|
|
||||||
expect(job.updateProgress).toHaveBeenCalledWith(expect.objectContaining({
|
expect(job.updateProgress).toHaveBeenCalledWith(
|
||||||
|
expect.objectContaining({
|
||||||
stages: [
|
stages: [
|
||||||
{ name: 'Preparing Inputs', status: 'failed', critical: true, detail: expect.any(String) },
|
{
|
||||||
|
name: 'Preparing Inputs',
|
||||||
|
status: 'failed',
|
||||||
|
critical: true,
|
||||||
|
detail: expect.any(String),
|
||||||
|
},
|
||||||
{ name: 'Extracting Data with AI', status: 'skipped', critical: true },
|
{ name: 'Extracting Data with AI', status: 'skipped', critical: true },
|
||||||
],
|
],
|
||||||
}));
|
}),
|
||||||
|
);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -717,7 +843,9 @@ describe('FlyerProcessingService', () => {
|
|||||||
expect(result).toEqual({ status: 'success', deletedCount: 2 });
|
expect(result).toEqual({ status: 'success', deletedCount: 2 });
|
||||||
expect(mocks.unlink).toHaveBeenCalledTimes(2);
|
expect(mocks.unlink).toHaveBeenCalledTimes(2);
|
||||||
expect(mocks.unlink).toHaveBeenCalledWith('/var/www/app/flyer-images/flyer-abc.jpg');
|
expect(mocks.unlink).toHaveBeenCalledWith('/var/www/app/flyer-images/flyer-abc.jpg');
|
||||||
expect(mocks.unlink).toHaveBeenCalledWith('/var/www/app/flyer-images/icons/icon-flyer-abc.webp');
|
expect(mocks.unlink).toHaveBeenCalledWith(
|
||||||
|
'/var/www/app/flyer-images/icons/icon-flyer-abc.webp',
|
||||||
|
);
|
||||||
const { logger } = await import('./logger.server');
|
const { logger } = await import('./logger.server');
|
||||||
expect(logger.warn).toHaveBeenCalledWith(
|
expect(logger.warn).toHaveBeenCalledWith(
|
||||||
'Cleanup job for flyer 1 received no paths. Attempting to derive paths from DB.',
|
'Cleanup job for flyer 1 received no paths. Attempting to derive paths from DB.',
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
// src/services/monitoringService.server.test.ts
|
// src/services/monitoringService.server.test.ts
|
||||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||||
import type { Job, Queue } from 'bullmq';
|
import type { Job } from 'bullmq';
|
||||||
import { NotFoundError, ValidationError } from './db/errors.db';
|
import { NotFoundError, ValidationError } from './db/errors.db';
|
||||||
import { logger } from './logger.server';
|
import { logger } from './logger.server';
|
||||||
|
|
||||||
@@ -131,9 +131,9 @@ describe('MonitoringService', () => {
|
|||||||
const jobId = 'failed-job-1';
|
const jobId = 'failed-job-1';
|
||||||
|
|
||||||
it('should throw NotFoundError for an unknown queue name', async () => {
|
it('should throw NotFoundError for an unknown queue name', async () => {
|
||||||
await expect(monitoringService.retryFailedJob('unknown-queue', jobId, userId)).rejects.toThrow(
|
await expect(
|
||||||
new NotFoundError(`Queue 'unknown-queue' not found.`),
|
monitoringService.retryFailedJob('unknown-queue', jobId, userId),
|
||||||
);
|
).rejects.toThrow(new NotFoundError(`Queue 'unknown-queue' not found.`));
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should throw NotFoundError if the job does not exist in the queue', async () => {
|
it('should throw NotFoundError if the job does not exist in the queue', async () => {
|
||||||
@@ -141,7 +141,9 @@ describe('MonitoringService', () => {
|
|||||||
|
|
||||||
await expect(
|
await expect(
|
||||||
monitoringService.retryFailedJob('flyer-processing', jobId, userId),
|
monitoringService.retryFailedJob('flyer-processing', jobId, userId),
|
||||||
).rejects.toThrow(new NotFoundError(`Job with ID '${jobId}' not found in queue 'flyer-processing'.`));
|
).rejects.toThrow(
|
||||||
|
new NotFoundError(`Job with ID '${jobId}' not found in queue 'flyer-processing'.`),
|
||||||
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should throw ValidationError if the job is not in a 'failed' state", async () => {
|
it("should throw ValidationError if the job is not in a 'failed' state", async () => {
|
||||||
@@ -154,7 +156,9 @@ describe('MonitoringService', () => {
|
|||||||
|
|
||||||
await expect(
|
await expect(
|
||||||
monitoringService.retryFailedJob('flyer-processing', jobId, userId),
|
monitoringService.retryFailedJob('flyer-processing', jobId, userId),
|
||||||
).rejects.toThrow(new ValidationError([], `Job is not in a 'failed' state. Current state: completed.`));
|
).rejects.toThrow(
|
||||||
|
new ValidationError([], `Job is not in a 'failed' state. Current state: completed.`),
|
||||||
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should call job.retry() and log if the job is in a 'failed' state", async () => {
|
it("should call job.retry() and log if the job is in a 'failed' state", async () => {
|
||||||
|
|||||||
@@ -13,7 +13,7 @@ import {
|
|||||||
flyerWorker,
|
flyerWorker,
|
||||||
weeklyAnalyticsWorker,
|
weeklyAnalyticsWorker,
|
||||||
} from './workers.server';
|
} from './workers.server';
|
||||||
import type { Job, Queue } from 'bullmq';
|
import type { Queue } from 'bullmq';
|
||||||
import { NotFoundError, ValidationError } from './db/errors.db';
|
import { NotFoundError, ValidationError } from './db/errors.db';
|
||||||
import { logger } from './logger.server';
|
import { logger } from './logger.server';
|
||||||
|
|
||||||
@@ -23,7 +23,13 @@ class MonitoringService {
|
|||||||
* @returns A promise that resolves to an array of worker statuses.
|
* @returns A promise that resolves to an array of worker statuses.
|
||||||
*/
|
*/
|
||||||
async getWorkerStatuses() {
|
async getWorkerStatuses() {
|
||||||
const workers = [flyerWorker, emailWorker, analyticsWorker, cleanupWorker, weeklyAnalyticsWorker];
|
const workers = [
|
||||||
|
flyerWorker,
|
||||||
|
emailWorker,
|
||||||
|
analyticsWorker,
|
||||||
|
cleanupWorker,
|
||||||
|
weeklyAnalyticsWorker,
|
||||||
|
];
|
||||||
return Promise.all(
|
return Promise.all(
|
||||||
workers.map(async (worker) => ({
|
workers.map(async (worker) => ({
|
||||||
name: worker.name,
|
name: worker.name,
|
||||||
@@ -80,10 +86,7 @@ class MonitoringService {
|
|||||||
|
|
||||||
const jobState = await job.getState();
|
const jobState = await job.getState();
|
||||||
if (jobState !== 'failed') {
|
if (jobState !== 'failed') {
|
||||||
throw new ValidationError(
|
throw new ValidationError([], `Job is not in a 'failed' state. Current state: ${jobState}.`);
|
||||||
[],
|
|
||||||
`Job is not in a 'failed' state. Current state: ${jobState}.`,
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
await job.retry();
|
await job.retry();
|
||||||
@@ -95,7 +98,15 @@ class MonitoringService {
|
|||||||
* @param jobId The ID of the job to retrieve.
|
* @param jobId The ID of the job to retrieve.
|
||||||
* @returns A promise that resolves to a simplified job status object.
|
* @returns A promise that resolves to a simplified job status object.
|
||||||
*/
|
*/
|
||||||
async getFlyerJobStatus(jobId: string): Promise<{ id: string; state: string; progress: number | object | string | boolean; returnValue: any; failedReason: string | null; }> {
|
async getFlyerJobStatus(
|
||||||
|
jobId: string,
|
||||||
|
): Promise<{
|
||||||
|
id: string;
|
||||||
|
state: string;
|
||||||
|
progress: number | object | string | boolean;
|
||||||
|
returnValue: unknown;
|
||||||
|
failedReason: string | null;
|
||||||
|
}> {
|
||||||
const job = await flyerQueue.getJob(jobId);
|
const job = await flyerQueue.getJob(jobId);
|
||||||
if (!job) {
|
if (!job) {
|
||||||
throw new NotFoundError('Job not found.');
|
throw new NotFoundError('Job not found.');
|
||||||
|
|||||||
@@ -17,7 +17,7 @@ export class FlyerProcessingError extends Error {
|
|||||||
Object.setPrototypeOf(this, new.target.prototype);
|
Object.setPrototypeOf(this, new.target.prototype);
|
||||||
}
|
}
|
||||||
|
|
||||||
toErrorPayload(): { errorCode: string; message: string; [key: string]: any } {
|
toErrorPayload(): { errorCode: string; message: string; [key: string]: unknown } {
|
||||||
return { errorCode: this.errorCode, message: this.userMessage };
|
return { errorCode: this.errorCode, message: this.userMessage };
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -36,7 +36,7 @@ export class PdfConversionError extends FlyerProcessingError {
|
|||||||
this.stderr = stderr;
|
this.stderr = stderr;
|
||||||
}
|
}
|
||||||
|
|
||||||
toErrorPayload(): { errorCode: string; message: string; [key: string]: any } {
|
toErrorPayload(): { errorCode: string; message: string; [key: string]: unknown } {
|
||||||
return { ...super.toErrorPayload(), stderr: this.stderr };
|
return { ...super.toErrorPayload(), stderr: this.stderr };
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -57,8 +57,12 @@ export class AiDataValidationError extends FlyerProcessingError {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
toErrorPayload(): { errorCode: string; message: string; [key: string]: any } {
|
toErrorPayload(): { errorCode: string; message: string; [key: string]: unknown } {
|
||||||
return { ...super.toErrorPayload(), validationErrors: this.validationErrors, rawData: this.rawData };
|
return {
|
||||||
|
...super.toErrorPayload(),
|
||||||
|
validationErrors: this.validationErrors,
|
||||||
|
rawData: this.rawData,
|
||||||
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -80,11 +84,7 @@ export class TransformationError extends FlyerProcessingError {
|
|||||||
*/
|
*/
|
||||||
export class DatabaseError extends FlyerProcessingError {
|
export class DatabaseError extends FlyerProcessingError {
|
||||||
constructor(message: string) {
|
constructor(message: string) {
|
||||||
super(
|
super(message, 'DATABASE_ERROR', 'A database operation failed. Please try again later.');
|
||||||
message,
|
|
||||||
'DATABASE_ERROR',
|
|
||||||
'A database operation failed. Please try again later.',
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
/**
|
/**
|
||||||
|
|||||||
@@ -59,7 +59,6 @@ vi.mock('./logger.server', () => ({
|
|||||||
}));
|
}));
|
||||||
|
|
||||||
// Mock bullmq to capture the processor functions passed to the Worker constructor
|
// Mock bullmq to capture the processor functions passed to the Worker constructor
|
||||||
import { logger as mockLogger } from './logger.server';
|
|
||||||
vi.mock('bullmq', () => ({
|
vi.mock('bullmq', () => ({
|
||||||
Worker: mocks.MockWorker,
|
Worker: mocks.MockWorker,
|
||||||
Queue: vi.fn(function () {
|
Queue: vi.fn(function () {
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
// src/services/queues.server.test.ts
|
// src/services/queues.server.test.ts
|
||||||
import { describe, it, expect, vi, beforeEach, type Mock } from 'vitest';
|
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||||
|
|
||||||
// --- Hoisted Mocks ---
|
// --- Hoisted Mocks ---
|
||||||
const mocks = vi.hoisted(() => {
|
const mocks = vi.hoisted(() => {
|
||||||
|
|||||||
@@ -1,13 +1,11 @@
|
|||||||
// src/services/systemService.ts
|
// src/services/systemService.ts
|
||||||
import { exec as nodeExec, type ExecException } from 'child_process';
|
import { exec as nodeExec } from 'child_process';
|
||||||
import { promisify } from 'util';
|
import { promisify } from 'util';
|
||||||
import { logger } from './logger.server';
|
import { logger } from './logger.server';
|
||||||
|
|
||||||
// Define a type for the exec function for better type safety and testability.
|
// Define a type for the exec function for better type safety and testability.
|
||||||
// It matches the signature of a promisified child_process.exec.
|
// It matches the signature of a promisified child_process.exec.
|
||||||
export type ExecAsync = (
|
export type ExecAsync = (command: string) => Promise<{ stdout: string; stderr: string }>;
|
||||||
command: string,
|
|
||||||
) => Promise<{ stdout: string; stderr: string }>;
|
|
||||||
|
|
||||||
export class SystemService {
|
export class SystemService {
|
||||||
private execAsync: ExecAsync;
|
private execAsync: ExecAsync;
|
||||||
@@ -31,11 +29,12 @@ export class SystemService {
|
|||||||
? 'Application is online and running under PM2.'
|
? 'Application is online and running under PM2.'
|
||||||
: 'Application process exists but is not online.';
|
: 'Application process exists but is not online.';
|
||||||
return { success: isOnline, message };
|
return { success: isOnline, message };
|
||||||
} catch (error: ExecException | any) {
|
} catch (error: unknown) {
|
||||||
// If the command fails (non-zero exit code), check if it's because the process doesn't exist.
|
// If the command fails (non-zero exit code), check if it's because the process doesn't exist.
|
||||||
// This is a normal "not found" case, not a system error.
|
// This is a normal "not found" case, not a system error.
|
||||||
// The error message can be in stdout or stderr depending on the pm2 version.
|
// The error message can be in stdout or stderr depending on the pm2 version.
|
||||||
const output = error.stdout || error.stderr || '';
|
const execError = error as { stdout?: string; stderr?: string; message?: string };
|
||||||
|
const output = execError.stdout || execError.stderr || '';
|
||||||
if (output.includes("doesn't exist")) {
|
if (output.includes("doesn't exist")) {
|
||||||
logger.warn('[SystemService] PM2 process "flyer-crawler-api" not found.');
|
logger.warn('[SystemService] PM2 process "flyer-crawler-api" not found.');
|
||||||
return {
|
return {
|
||||||
@@ -44,7 +43,10 @@ export class SystemService {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
// For any other error, log it and re-throw to be handled as a 500.
|
// For any other error, log it and re-throw to be handled as a 500.
|
||||||
logger.error({ error: error.stderr || error.message }, '[SystemService] Error executing pm2 describe:');
|
logger.error(
|
||||||
|
{ error: execError.stderr || execError.message },
|
||||||
|
'[SystemService] Error executing pm2 describe:',
|
||||||
|
);
|
||||||
throw error;
|
throw error;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -33,7 +33,11 @@ describe('E2E Flyer Upload and Processing Workflow', () => {
|
|||||||
|
|
||||||
it('should allow a user to upload a flyer and wait for processing to complete', async () => {
|
it('should allow a user to upload a flyer and wait for processing to complete', async () => {
|
||||||
// 1. Register a new user
|
// 1. Register a new user
|
||||||
const registerResponse = await apiClient.registerUser(userEmail, userPassword, 'E2E Flyer Uploader');
|
const registerResponse = await apiClient.registerUser(
|
||||||
|
userEmail,
|
||||||
|
userPassword,
|
||||||
|
'E2E Flyer Uploader',
|
||||||
|
);
|
||||||
expect(registerResponse.status).toBe(201);
|
expect(registerResponse.status).toBe(201);
|
||||||
|
|
||||||
// 2. Login to get the access token
|
// 2. Login to get the access token
|
||||||
@@ -49,7 +53,7 @@ describe('E2E Flyer Upload and Processing Workflow', () => {
|
|||||||
// Note: In a real E2E scenario against a live AI service, a valid image is required.
|
// Note: In a real E2E scenario against a live AI service, a valid image is required.
|
||||||
// If the AI service is mocked or stubbed in this environment, a dummy buffer might suffice.
|
// If the AI service is mocked or stubbed in this environment, a dummy buffer might suffice.
|
||||||
let fileBuffer: Buffer;
|
let fileBuffer: Buffer;
|
||||||
let fileName = `e2e-test-flyer-${uniqueId}.jpg`;
|
const fileName = `e2e-test-flyer-${uniqueId}.jpg`;
|
||||||
|
|
||||||
const assetPath = path.resolve(__dirname, '../assets/test-flyer-image.jpg');
|
const assetPath = path.resolve(__dirname, '../assets/test-flyer-image.jpg');
|
||||||
if (fs.existsSync(assetPath)) {
|
if (fs.existsSync(assetPath)) {
|
||||||
@@ -61,7 +65,7 @@ describe('E2E Flyer Upload and Processing Workflow', () => {
|
|||||||
// (This might fail if the backend does strict image validation/processing)
|
// (This might fail if the backend does strict image validation/processing)
|
||||||
fileBuffer = Buffer.concat([
|
fileBuffer = Buffer.concat([
|
||||||
Buffer.from([0xff, 0xd8, 0xff, 0xe0]), // JPEG Start of Image
|
Buffer.from([0xff, 0xd8, 0xff, 0xe0]), // JPEG Start of Image
|
||||||
Buffer.from(uniqueId.toString())
|
Buffer.from(uniqueId.toString()),
|
||||||
]);
|
]);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -104,7 +108,10 @@ describe('E2E Flyer Upload and Processing Workflow', () => {
|
|||||||
|
|
||||||
// Fetch the store_id associated with the created flyer for robust cleanup
|
// Fetch the store_id associated with the created flyer for robust cleanup
|
||||||
if (flyerId) {
|
if (flyerId) {
|
||||||
const flyerRes = await getPool().query('SELECT store_id FROM public.flyers WHERE flyer_id = $1', [flyerId]);
|
const flyerRes = await getPool().query(
|
||||||
|
'SELECT store_id FROM public.flyers WHERE flyer_id = $1',
|
||||||
|
[flyerId],
|
||||||
|
);
|
||||||
if (flyerRes.rows.length > 0) {
|
if (flyerRes.rows.length > 0) {
|
||||||
storeId = flyerRes.rows[0].store_id;
|
storeId = flyerRes.rows[0].store_id;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -2,7 +2,6 @@
|
|||||||
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
||||||
import * as db from '../../services/db/index.db';
|
import * as db from '../../services/db/index.db';
|
||||||
import * as bcrypt from 'bcrypt';
|
import * as bcrypt from 'bcrypt';
|
||||||
import { getPool } from '../../services/db/connection.db';
|
|
||||||
import { logger } from '../../services/logger.server';
|
import { logger } from '../../services/logger.server';
|
||||||
import type { UserProfile } from '../../types';
|
import type { UserProfile } from '../../types';
|
||||||
import { cleanupDb } from '../utils/cleanup';
|
import { cleanupDb } from '../utils/cleanup';
|
||||||
|
|||||||
@@ -3,7 +3,7 @@ import { describe, it, expect, beforeAll, afterAll, vi, afterEach } from 'vitest
|
|||||||
import supertest from 'supertest';
|
import supertest from 'supertest';
|
||||||
import { createAndLoginUser } from '../utils/testHelpers';
|
import { createAndLoginUser } from '../utils/testHelpers';
|
||||||
import { cleanupDb } from '../utils/cleanup';
|
import { cleanupDb } from '../utils/cleanup';
|
||||||
import type { UserProfile, Recipe, RecipeComment } from '../../types';
|
import type { UserProfile, Recipe } from '../../types';
|
||||||
import { getPool } from '../../services/db/connection.db';
|
import { getPool } from '../../services/db/connection.db';
|
||||||
|
|
||||||
import { aiService } from '../../services/aiService.server';
|
import { aiService } from '../../services/aiService.server';
|
||||||
@@ -130,9 +130,9 @@ describe('Recipe API Routes Integration Tests', () => {
|
|||||||
expect(verifyResponse.status).toBe(200);
|
expect(verifyResponse.status).toBe(200);
|
||||||
expect(verifyResponse.body.name).toBe(recipeUpdates.name);
|
expect(verifyResponse.body.name).toBe(recipeUpdates.name);
|
||||||
});
|
});
|
||||||
it.todo('should prevent a user from updating another user\'s recipe');
|
it.todo("should prevent a user from updating another user's recipe");
|
||||||
it.todo('should allow an authenticated user to delete their own recipe');
|
it.todo('should allow an authenticated user to delete their own recipe');
|
||||||
it.todo('should prevent a user from deleting another user\'s recipe');
|
it.todo("should prevent a user from deleting another user's recipe");
|
||||||
it.todo('should allow an authenticated user to post a comment on a recipe');
|
it.todo('should allow an authenticated user to post a comment on a recipe');
|
||||||
it.todo('should allow an authenticated user to fork a recipe');
|
it.todo('should allow an authenticated user to fork a recipe');
|
||||||
|
|
||||||
|
|||||||
@@ -3,8 +3,6 @@ import { describe, it, expect, beforeAll, afterAll, vi } from 'vitest';
|
|||||||
import supertest from 'supertest';
|
import supertest from 'supertest';
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
import fs from 'node:fs/promises';
|
import fs from 'node:fs/promises';
|
||||||
import { logger } from '../../services/logger.server';
|
|
||||||
import { getPool } from '../../services/db/connection.db';
|
|
||||||
import type { UserProfile, MasterGroceryItem, ShoppingList } from '../../types';
|
import type { UserProfile, MasterGroceryItem, ShoppingList } from '../../types';
|
||||||
import { createAndLoginUser, TEST_PASSWORD } from '../utils/testHelpers';
|
import { createAndLoginUser, TEST_PASSWORD } from '../utils/testHelpers';
|
||||||
import { cleanupDb } from '../utils/cleanup';
|
import { cleanupDb } from '../utils/cleanup';
|
||||||
@@ -41,7 +39,7 @@ describe('User API Routes Integration Tests', () => {
|
|||||||
vi.unstubAllEnvs();
|
vi.unstubAllEnvs();
|
||||||
await cleanupDb({
|
await cleanupDb({
|
||||||
userIds: createdUserIds,
|
userIds: createdUserIds,
|
||||||
masterItemIds: createdMasterItemIds
|
masterItemIds: createdMasterItemIds,
|
||||||
});
|
});
|
||||||
|
|
||||||
// Safeguard to clean up any avatar files created during tests.
|
// Safeguard to clean up any avatar files created during tests.
|
||||||
@@ -172,7 +170,10 @@ describe('User API Routes Integration Tests', () => {
|
|||||||
it('should allow a user to delete their own account and then fail to log in', async () => {
|
it('should allow a user to delete their own account and then fail to log in', async () => {
|
||||||
// Arrange: Create a new, separate user just for this deletion test.
|
// Arrange: Create a new, separate user just for this deletion test.
|
||||||
const deletionEmail = `delete-me-${Date.now()}@example.com`;
|
const deletionEmail = `delete-me-${Date.now()}@example.com`;
|
||||||
const { user: deletionUser, token: deletionToken } = await createAndLoginUser({ email: deletionEmail, request });
|
const { user: deletionUser, token: deletionToken } = await createAndLoginUser({
|
||||||
|
email: deletionEmail,
|
||||||
|
request,
|
||||||
|
});
|
||||||
createdUserIds.push(deletionUser.user.user_id);
|
createdUserIds.push(deletionUser.user.user_id);
|
||||||
|
|
||||||
// Act: Call the delete endpoint with the correct password and token.
|
// Act: Call the delete endpoint with the correct password and token.
|
||||||
@@ -248,7 +249,8 @@ describe('User API Routes Integration Tests', () => {
|
|||||||
.send({ itemName: 'Integration Test Item', category: 'Other/Miscellaneous' });
|
.send({ itemName: 'Integration Test Item', category: 'Other/Miscellaneous' });
|
||||||
const newItem = addResponse.body;
|
const newItem = addResponse.body;
|
||||||
|
|
||||||
if (newItem?.master_grocery_item_id) createdMasterItemIds.push(newItem.master_grocery_item_id);
|
if (newItem?.master_grocery_item_id)
|
||||||
|
createdMasterItemIds.push(newItem.master_grocery_item_id);
|
||||||
// Assert 1: Check that the item was created correctly.
|
// Assert 1: Check that the item was created correctly.
|
||||||
expect(addResponse.status).toBe(201);
|
expect(addResponse.status).toBe(201);
|
||||||
expect(newItem.name).toBe('Integration Test Item');
|
expect(newItem.name).toBe('Integration Test Item');
|
||||||
|
|||||||
Reference in New Issue
Block a user