add ai agent fallbacks
Some checks failed
Deploy to Test Environment / deploy-to-test (push) Failing after 1m18s
Some checks failed
Deploy to Test Environment / deploy-to-test (push) Failing after 1m18s
This commit is contained in:
@@ -243,9 +243,7 @@ describe('App Component', () => {
|
|||||||
mockedApiClient.fetchShoppingLists.mockImplementation(() =>
|
mockedApiClient.fetchShoppingLists.mockImplementation(() =>
|
||||||
Promise.resolve(new Response(JSON.stringify([]))),
|
Promise.resolve(new Response(JSON.stringify([]))),
|
||||||
);
|
);
|
||||||
mockedAiApiClient.rescanImageArea.mockResolvedValue(
|
mockedAiApiClient.rescanImageArea.mockResolvedValue({ text: 'mocked text' }); // Mock for FlyerCorrectionTool
|
||||||
new Response(JSON.stringify({ text: 'mocked text' })),
|
|
||||||
); // Mock for FlyerCorrectionTool
|
|
||||||
console.log('[TEST DEBUG] beforeEach: Setup complete');
|
console.log('[TEST DEBUG] beforeEach: Setup complete');
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@@ -73,12 +73,11 @@ describe('FlyerUploader', () => {
|
|||||||
|
|
||||||
it('should handle file upload and start polling', async () => {
|
it('should handle file upload and start polling', async () => {
|
||||||
console.log('--- [TEST LOG] ---: 1. Setting up mocks for upload and polling.');
|
console.log('--- [TEST LOG] ---: 1. Setting up mocks for upload and polling.');
|
||||||
mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue(
|
mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue({ jobId: 'job-123' });
|
||||||
new Response(JSON.stringify({ jobId: 'job-123' }), { status: 200 }),
|
mockedAiApiClient.getJobStatus.mockResolvedValue({
|
||||||
);
|
state: 'active',
|
||||||
mockedAiApiClient.getJobStatus.mockResolvedValue(
|
progress: { message: 'Checking...' },
|
||||||
new Response(JSON.stringify({ state: 'active', progress: { message: 'Checking...' } })),
|
});
|
||||||
);
|
|
||||||
|
|
||||||
console.log('--- [TEST LOG] ---: 2. Rendering component and preparing file.');
|
console.log('--- [TEST LOG] ---: 2. Rendering component and preparing file.');
|
||||||
renderComponent();
|
renderComponent();
|
||||||
@@ -131,12 +130,11 @@ describe('FlyerUploader', () => {
|
|||||||
|
|
||||||
it('should handle file upload via drag and drop', async () => {
|
it('should handle file upload via drag and drop', async () => {
|
||||||
console.log('--- [TEST LOG] ---: 1. Setting up mocks for drag and drop.');
|
console.log('--- [TEST LOG] ---: 1. Setting up mocks for drag and drop.');
|
||||||
mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue(
|
mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue({ jobId: 'job-dnd' });
|
||||||
new Response(JSON.stringify({ jobId: 'job-dnd' }), { status: 200 }),
|
mockedAiApiClient.getJobStatus.mockResolvedValue({
|
||||||
);
|
state: 'active',
|
||||||
mockedAiApiClient.getJobStatus.mockResolvedValue(
|
progress: { message: 'Dropped...' },
|
||||||
new Response(JSON.stringify({ state: 'active', progress: { message: 'Dropped...' } })),
|
});
|
||||||
);
|
|
||||||
|
|
||||||
console.log('--- [TEST LOG] ---: 2. Rendering component and preparing file for drop.');
|
console.log('--- [TEST LOG] ---: 2. Rendering component and preparing file for drop.');
|
||||||
renderComponent();
|
renderComponent();
|
||||||
@@ -159,16 +157,10 @@ describe('FlyerUploader', () => {
|
|||||||
it('should poll for status, complete successfully, and redirect', async () => {
|
it('should poll for status, complete successfully, and redirect', async () => {
|
||||||
const onProcessingComplete = vi.fn();
|
const onProcessingComplete = vi.fn();
|
||||||
console.log('--- [TEST LOG] ---: 1. Setting up mock sequence for polling.');
|
console.log('--- [TEST LOG] ---: 1. Setting up mock sequence for polling.');
|
||||||
mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue(
|
mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue({ jobId: 'job-123' });
|
||||||
new Response(JSON.stringify({ jobId: 'job-123' }), { status: 200 }),
|
|
||||||
);
|
|
||||||
mockedAiApiClient.getJobStatus
|
mockedAiApiClient.getJobStatus
|
||||||
.mockResolvedValueOnce(
|
.mockResolvedValueOnce({ state: 'active', progress: { message: 'Analyzing...' } })
|
||||||
new Response(JSON.stringify({ state: 'active', progress: { message: 'Analyzing...' } })),
|
.mockResolvedValueOnce({ state: 'completed', returnValue: { flyerId: 42 } });
|
||||||
)
|
|
||||||
.mockResolvedValueOnce(
|
|
||||||
new Response(JSON.stringify({ state: 'completed', returnValue: { flyerId: 42 } })),
|
|
||||||
);
|
|
||||||
|
|
||||||
console.log('--- [TEST LOG] ---: 2. Rendering component and uploading file.');
|
console.log('--- [TEST LOG] ---: 2. Rendering component and uploading file.');
|
||||||
renderComponent(onProcessingComplete);
|
renderComponent(onProcessingComplete);
|
||||||
@@ -229,12 +221,11 @@ describe('FlyerUploader', () => {
|
|||||||
|
|
||||||
it('should handle a failed job', async () => {
|
it('should handle a failed job', async () => {
|
||||||
console.log('--- [TEST LOG] ---: 1. Setting up mocks for a failed job.');
|
console.log('--- [TEST LOG] ---: 1. Setting up mocks for a failed job.');
|
||||||
mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue(
|
mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue({ jobId: 'job-fail' });
|
||||||
new Response(JSON.stringify({ jobId: 'job-fail' }), { status: 200 }),
|
mockedAiApiClient.getJobStatus.mockResolvedValue({
|
||||||
);
|
state: 'failed',
|
||||||
mockedAiApiClient.getJobStatus.mockResolvedValue(
|
failedReason: 'AI model exploded',
|
||||||
new Response(JSON.stringify({ state: 'failed', failedReason: 'AI model exploded' })),
|
});
|
||||||
);
|
|
||||||
|
|
||||||
console.log('--- [TEST LOG] ---: 2. Rendering and uploading.');
|
console.log('--- [TEST LOG] ---: 2. Rendering and uploading.');
|
||||||
renderComponent();
|
renderComponent();
|
||||||
@@ -260,11 +251,82 @@ describe('FlyerUploader', () => {
|
|||||||
console.log('--- [TEST LOG] ---: 6. "Upload Another" button confirmed.');
|
console.log('--- [TEST LOG] ---: 6. "Upload Another" button confirmed.');
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('should clear the polling timeout when a job fails', async () => {
|
||||||
|
const clearTimeoutSpy = vi.spyOn(global, 'clearTimeout');
|
||||||
|
console.log('--- [TEST LOG] ---: 1. Setting up mocks for failed job timeout clearance.');
|
||||||
|
mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue({ jobId: 'job-fail-timeout' });
|
||||||
|
|
||||||
|
// We need at least one 'active' response to establish a timeout loop so we have something to clear
|
||||||
|
mockedAiApiClient.getJobStatus
|
||||||
|
.mockResolvedValueOnce({ state: 'active', progress: { message: 'Working...' } })
|
||||||
|
.mockResolvedValueOnce({ state: 'failed', failedReason: 'Fatal Error' });
|
||||||
|
|
||||||
|
renderComponent();
|
||||||
|
const file = new File(['content'], 'flyer.pdf', { type: 'application/pdf' });
|
||||||
|
const input = screen.getByLabelText(/click to select a file/i);
|
||||||
|
|
||||||
|
fireEvent.change(input, { target: { files: [file] } });
|
||||||
|
|
||||||
|
// Wait for the first poll to complete and UI to update to "Working..."
|
||||||
|
await screen.findByText('Working...');
|
||||||
|
|
||||||
|
// Advance time to trigger the second poll
|
||||||
|
await act(async () => {
|
||||||
|
vi.advanceTimersByTime(3000);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Wait for the failure UI
|
||||||
|
await screen.findByText(/Processing failed: Fatal Error/i);
|
||||||
|
|
||||||
|
// Verify clearTimeout was called
|
||||||
|
expect(clearTimeoutSpy).toHaveBeenCalled();
|
||||||
|
|
||||||
|
// Verify no further polling occurs
|
||||||
|
const callsBefore = mockedAiApiClient.getJobStatus.mock.calls.length;
|
||||||
|
await act(async () => {
|
||||||
|
vi.advanceTimersByTime(10000);
|
||||||
|
});
|
||||||
|
expect(mockedAiApiClient.getJobStatus).toHaveBeenCalledTimes(callsBefore);
|
||||||
|
|
||||||
|
clearTimeoutSpy.mockRestore();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should clear the polling timeout when the component unmounts', async () => {
|
||||||
|
const clearTimeoutSpy = vi.spyOn(global, 'clearTimeout');
|
||||||
|
console.log('--- [TEST LOG] ---: 1. Setting up mocks for unmount timeout clearance.');
|
||||||
|
mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue({ jobId: 'job-unmount' });
|
||||||
|
mockedAiApiClient.getJobStatus.mockResolvedValue({
|
||||||
|
state: 'active',
|
||||||
|
progress: { message: 'Polling...' },
|
||||||
|
});
|
||||||
|
|
||||||
|
const { unmount } = renderComponent();
|
||||||
|
const file = new File(['content'], 'flyer.pdf', { type: 'application/pdf' });
|
||||||
|
const input = screen.getByLabelText(/click to select a file/i);
|
||||||
|
|
||||||
|
fireEvent.change(input, { target: { files: [file] } });
|
||||||
|
|
||||||
|
// Wait for the first poll to complete and the UI to show the polling state
|
||||||
|
await screen.findByText('Polling...');
|
||||||
|
|
||||||
|
// Now that we are in a polling state (and a timeout is set), unmount the component
|
||||||
|
console.log('--- [TEST LOG] ---: 2. Unmounting component to trigger cleanup effect.');
|
||||||
|
unmount();
|
||||||
|
|
||||||
|
// Verify that the cleanup function in the useEffect hook was called
|
||||||
|
expect(clearTimeoutSpy).toHaveBeenCalled();
|
||||||
|
console.log('--- [TEST LOG] ---: 3. clearTimeout confirmed.');
|
||||||
|
|
||||||
|
clearTimeoutSpy.mockRestore();
|
||||||
|
});
|
||||||
|
|
||||||
it('should handle a duplicate flyer error (409)', async () => {
|
it('should handle a duplicate flyer error (409)', async () => {
|
||||||
console.log('--- [TEST LOG] ---: 1. Setting up mock for 409 duplicate error.');
|
console.log('--- [TEST LOG] ---: 1. Setting up mock for 409 duplicate error.');
|
||||||
mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue(
|
// The API client now throws a structured error for non-2xx responses.
|
||||||
new Response(JSON.stringify({ flyerId: 99, message: 'Duplicate' }), { status: 409 }),
|
mockedAiApiClient.uploadAndProcessFlyer.mockRejectedValue({
|
||||||
);
|
status: 409,
|
||||||
|
body: { flyerId: 99, message: 'Duplicate' },
|
||||||
|
});
|
||||||
|
|
||||||
console.log('--- [TEST LOG] ---: 2. Rendering and uploading.');
|
console.log('--- [TEST LOG] ---: 2. Rendering and uploading.');
|
||||||
renderComponent();
|
renderComponent();
|
||||||
@@ -295,12 +357,11 @@ describe('FlyerUploader', () => {
|
|||||||
|
|
||||||
it('should allow the user to stop watching progress', async () => {
|
it('should allow the user to stop watching progress', async () => {
|
||||||
console.log('--- [TEST LOG] ---: 1. Setting up mocks for infinite polling.');
|
console.log('--- [TEST LOG] ---: 1. Setting up mocks for infinite polling.');
|
||||||
mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue(
|
mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue({ jobId: 'job-stop' });
|
||||||
new Response(JSON.stringify({ jobId: 'job-stop' }), { status: 200 }),
|
mockedAiApiClient.getJobStatus.mockResolvedValue({
|
||||||
);
|
state: 'active',
|
||||||
mockedAiApiClient.getJobStatus.mockResolvedValue(
|
progress: { message: 'Analyzing...' },
|
||||||
new Response(JSON.stringify({ state: 'active', progress: { message: 'Analyzing...' } })),
|
} as any);
|
||||||
);
|
|
||||||
|
|
||||||
console.log('--- [TEST LOG] ---: 2. Rendering and uploading.');
|
console.log('--- [TEST LOG] ---: 2. Rendering and uploading.');
|
||||||
renderComponent();
|
renderComponent();
|
||||||
@@ -362,9 +423,11 @@ describe('FlyerUploader', () => {
|
|||||||
|
|
||||||
it('should handle a generic network error during upload', async () => {
|
it('should handle a generic network error during upload', async () => {
|
||||||
console.log('--- [TEST LOG] ---: 1. Setting up mock for generic upload error.');
|
console.log('--- [TEST LOG] ---: 1. Setting up mock for generic upload error.');
|
||||||
mockedAiApiClient.uploadAndProcessFlyer.mockRejectedValue(
|
// Simulate a structured error from the API client
|
||||||
new Error('Network Error During Upload'),
|
mockedAiApiClient.uploadAndProcessFlyer.mockRejectedValue({
|
||||||
);
|
status: 500,
|
||||||
|
body: { message: 'Network Error During Upload' },
|
||||||
|
});
|
||||||
renderComponent();
|
renderComponent();
|
||||||
const file = new File(['content'], 'flyer.pdf', { type: 'application/pdf' });
|
const file = new File(['content'], 'flyer.pdf', { type: 'application/pdf' });
|
||||||
const input = screen.getByLabelText(/click to select a file/i);
|
const input = screen.getByLabelText(/click to select a file/i);
|
||||||
@@ -379,9 +442,7 @@ describe('FlyerUploader', () => {
|
|||||||
|
|
||||||
it('should handle a generic network error during polling', async () => {
|
it('should handle a generic network error during polling', async () => {
|
||||||
console.log('--- [TEST LOG] ---: 1. Setting up mock for polling error.');
|
console.log('--- [TEST LOG] ---: 1. Setting up mock for polling error.');
|
||||||
mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue(
|
mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue({ jobId: 'job-poll-fail' });
|
||||||
new Response(JSON.stringify({ jobId: 'job-poll-fail' }), { status: 200 }),
|
|
||||||
);
|
|
||||||
mockedAiApiClient.getJobStatus.mockRejectedValue(new Error('Polling Network Error'));
|
mockedAiApiClient.getJobStatus.mockRejectedValue(new Error('Polling Network Error'));
|
||||||
|
|
||||||
renderComponent();
|
renderComponent();
|
||||||
@@ -398,11 +459,9 @@ describe('FlyerUploader', () => {
|
|||||||
|
|
||||||
it('should handle a completed job with a missing flyerId', async () => {
|
it('should handle a completed job with a missing flyerId', async () => {
|
||||||
console.log('--- [TEST LOG] ---: 1. Setting up mock for malformed completion payload.');
|
console.log('--- [TEST LOG] ---: 1. Setting up mock for malformed completion payload.');
|
||||||
mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue(
|
mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue({ jobId: 'job-no-flyerid' });
|
||||||
new Response(JSON.stringify({ jobId: 'job-no-flyerid' }), { status: 200 }),
|
|
||||||
);
|
|
||||||
mockedAiApiClient.getJobStatus.mockResolvedValue(
|
mockedAiApiClient.getJobStatus.mockResolvedValue(
|
||||||
new Response(JSON.stringify({ state: 'completed', returnValue: {} })), // No flyerId
|
{ state: 'completed', returnValue: {} }, // No flyerId
|
||||||
);
|
);
|
||||||
|
|
||||||
renderComponent();
|
renderComponent();
|
||||||
@@ -419,6 +478,27 @@ describe('FlyerUploader', () => {
|
|||||||
console.log('--- [TEST LOG] ---: 4. Assertions passed.');
|
console.log('--- [TEST LOG] ---: 4. Assertions passed.');
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('should handle a non-JSON response during polling', async () => {
|
||||||
|
console.log('--- [TEST LOG] ---: 1. Setting up mock for non-JSON response.');
|
||||||
|
// The actual function would throw, so we mock the rejection.
|
||||||
|
// The new getJobStatus would throw an error like "Failed to parse JSON..."
|
||||||
|
mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue({ jobId: 'job-bad-json' });
|
||||||
|
mockedAiApiClient.getJobStatus.mockRejectedValue(
|
||||||
|
new Error('Failed to parse JSON response from server. Body: <html>502 Bad Gateway</html>'),
|
||||||
|
);
|
||||||
|
|
||||||
|
renderComponent();
|
||||||
|
const file = new File(['content'], 'flyer.pdf', { type: 'application/pdf' });
|
||||||
|
const input = screen.getByLabelText(/click to select a file/i);
|
||||||
|
|
||||||
|
console.log('--- [TEST LOG] ---: 2. Firing file change event.');
|
||||||
|
fireEvent.change(input, { target: { files: [file] } });
|
||||||
|
|
||||||
|
console.log('--- [TEST LOG] ---: 3. Awaiting error message.');
|
||||||
|
expect(await screen.findByText(/Failed to parse JSON response from server/i)).toBeInTheDocument();
|
||||||
|
console.log('--- [TEST LOG] ---: 4. Assertions passed.');
|
||||||
|
});
|
||||||
|
|
||||||
it('should do nothing if the file input is cancelled', () => {
|
it('should do nothing if the file input is cancelled', () => {
|
||||||
renderComponent();
|
renderComponent();
|
||||||
const input = screen.getByLabelText(/click to select a file/i);
|
const input = screen.getByLabelText(/click to select a file/i);
|
||||||
|
|||||||
@@ -60,14 +60,8 @@ export const FlyerUploader: React.FC<FlyerUploaderProps> = ({ onProcessingComple
|
|||||||
const pollStatus = async () => {
|
const pollStatus = async () => {
|
||||||
console.debug(`[DEBUG] pollStatus(): Polling for jobId: ${jobId}`);
|
console.debug(`[DEBUG] pollStatus(): Polling for jobId: ${jobId}`);
|
||||||
try {
|
try {
|
||||||
const statusResponse = await getJobStatus(jobId);
|
const job = await getJobStatus(jobId); // Now returns parsed JSON directly
|
||||||
console.debug(`[DEBUG] pollStatus(): API response status: ${statusResponse.status}`);
|
console.debug('[DEBUG] pollStatus(): Job status received:', job); // The rest of the logic remains the same
|
||||||
if (!statusResponse.ok) {
|
|
||||||
throw new Error(`Failed to get job status (HTTP ${statusResponse.status})`);
|
|
||||||
}
|
|
||||||
|
|
||||||
const job = await statusResponse.json();
|
|
||||||
console.debug('[DEBUG] pollStatus(): Job status received:', job);
|
|
||||||
|
|
||||||
if (job.progress) {
|
if (job.progress) {
|
||||||
setProcessingStages(job.progress.stages || []);
|
setProcessingStages(job.progress.stages || []);
|
||||||
@@ -97,7 +91,13 @@ export const FlyerUploader: React.FC<FlyerUploaderProps> = ({ onProcessingComple
|
|||||||
console.debug(
|
console.debug(
|
||||||
`[DEBUG] pollStatus(): Job state is "failed". Reason: ${job.failedReason}`,
|
`[DEBUG] pollStatus(): Job state is "failed". Reason: ${job.failedReason}`,
|
||||||
);
|
);
|
||||||
|
// Explicitly clear any pending timeout to stop the polling loop immediately.
|
||||||
|
if (pollingTimeoutRef.current) {
|
||||||
|
clearTimeout(pollingTimeoutRef.current);
|
||||||
|
}
|
||||||
setErrorMessage(`Processing failed: ${job.failedReason || 'Unknown error'}`);
|
setErrorMessage(`Processing failed: ${job.failedReason || 'Unknown error'}`);
|
||||||
|
// Clear any stale "in-progress" messages to avoid user confusion.
|
||||||
|
setStatusMessage(null);
|
||||||
setProcessingState('error');
|
setProcessingState('error');
|
||||||
break;
|
break;
|
||||||
|
|
||||||
@@ -150,29 +150,24 @@ export const FlyerUploader: React.FC<FlyerUploaderProps> = ({ onProcessingComple
|
|||||||
`[DEBUG] processFile(): Checksum generated: ${checksum}. Calling uploadAndProcessFlyer.`,
|
`[DEBUG] processFile(): Checksum generated: ${checksum}. Calling uploadAndProcessFlyer.`,
|
||||||
);
|
);
|
||||||
|
|
||||||
const startResponse = await uploadAndProcessFlyer(file, checksum);
|
// The API client now returns parsed JSON on success or throws a structured error on failure.
|
||||||
console.debug(`[DEBUG] processFile(): Upload response status: ${startResponse.status}`);
|
const { jobId: newJobId } = await uploadAndProcessFlyer(file, checksum);
|
||||||
|
|
||||||
if (!startResponse.ok) {
|
|
||||||
const errorData = await startResponse.json();
|
|
||||||
console.debug('[DEBUG] processFile(): Upload failed. Error data:', errorData);
|
|
||||||
if (startResponse.status === 409 && errorData.flyerId) {
|
|
||||||
setErrorMessage(`This flyer has already been processed. You can view it here:`);
|
|
||||||
setDuplicateFlyerId(errorData.flyerId);
|
|
||||||
} else {
|
|
||||||
setErrorMessage(errorData.message || `Upload failed with status ${startResponse.status}`);
|
|
||||||
}
|
|
||||||
setProcessingState('error');
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const { jobId: newJobId } = await startResponse.json();
|
|
||||||
console.debug(`[DEBUG] processFile(): Upload successful. Received jobId: ${newJobId}`);
|
console.debug(`[DEBUG] processFile(): Upload successful. Received jobId: ${newJobId}`);
|
||||||
setJobId(newJobId);
|
setJobId(newJobId);
|
||||||
setProcessingState('polling');
|
setProcessingState('polling');
|
||||||
} catch (error) {
|
} catch (error: any) {
|
||||||
logger.error('An unexpected error occurred during file upload:', { error });
|
// Handle the structured error thrown by the API client.
|
||||||
setErrorMessage(error instanceof Error ? error.message : 'An unexpected error occurred.');
|
logger.error('An error occurred during file upload:', { error });
|
||||||
|
// Handle 409 Conflict for duplicate flyers
|
||||||
|
if (error?.status === 409 && error.body?.flyerId) {
|
||||||
|
setErrorMessage(`This flyer has already been processed. You can view it here:`);
|
||||||
|
setDuplicateFlyerId(error.body.flyerId);
|
||||||
|
} else {
|
||||||
|
// Handle other errors (e.g., validation, server errors)
|
||||||
|
const message =
|
||||||
|
error?.body?.message || error?.message || 'An unexpected error occurred during upload.';
|
||||||
|
setErrorMessage(message);
|
||||||
|
}
|
||||||
setProcessingState('error');
|
setProcessingState('error');
|
||||||
}
|
}
|
||||||
}, []);
|
}, []);
|
||||||
|
|||||||
@@ -1,15 +1,8 @@
|
|||||||
// src/services/aiAnalysisService.ts
|
// src/services/aiAnalysisService.ts
|
||||||
import { Flyer, FlyerItem, MasterGroceryItem, GroundedResponse, Source } from '../types';
|
import { Flyer, FlyerItem, MasterGroceryItem, GroundedResponse } from '../types';
|
||||||
import * as aiApiClient from './aiApiClient';
|
import * as aiApiClient from './aiApiClient';
|
||||||
import { logger } from './logger.client';
|
import { logger } from './logger.client';
|
||||||
|
|
||||||
interface RawSource {
|
|
||||||
web?: {
|
|
||||||
uri?: string;
|
|
||||||
title?: string;
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A service class to encapsulate all AI analysis API calls and related business logic.
|
* A service class to encapsulate all AI analysis API calls and related business logic.
|
||||||
* This decouples the React components and hooks from the data fetching implementation.
|
* This decouples the React components and hooks from the data fetching implementation.
|
||||||
@@ -22,7 +15,8 @@ export class AiAnalysisService {
|
|||||||
*/
|
*/
|
||||||
async getQuickInsights(items: FlyerItem[]): Promise<string> {
|
async getQuickInsights(items: FlyerItem[]): Promise<string> {
|
||||||
logger.info('[AiAnalysisService] getQuickInsights called.');
|
logger.info('[AiAnalysisService] getQuickInsights called.');
|
||||||
return aiApiClient.getQuickInsights(items).then((res) => res.json());
|
const result = await aiApiClient.getQuickInsights(items);
|
||||||
|
return result.text;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -32,7 +26,8 @@ export class AiAnalysisService {
|
|||||||
*/
|
*/
|
||||||
async getDeepDiveAnalysis(items: FlyerItem[]): Promise<string> {
|
async getDeepDiveAnalysis(items: FlyerItem[]): Promise<string> {
|
||||||
logger.info('[AiAnalysisService] getDeepDiveAnalysis called.');
|
logger.info('[AiAnalysisService] getDeepDiveAnalysis called.');
|
||||||
return aiApiClient.getDeepDiveAnalysis(items).then((res) => res.json());
|
const result = await aiApiClient.getDeepDiveAnalysis(items);
|
||||||
|
return result.text;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -44,18 +39,7 @@ export class AiAnalysisService {
|
|||||||
logger.info('[AiAnalysisService] searchWeb called.');
|
logger.info('[AiAnalysisService] searchWeb called.');
|
||||||
// Construct a query string from the item names.
|
// Construct a query string from the item names.
|
||||||
const query = items.map((item) => item.item).join(', ');
|
const query = items.map((item) => item.item).join(', ');
|
||||||
// The API client returns a specific shape that we need to await the JSON from
|
return aiApiClient.searchWeb(query);
|
||||||
const response: { text: string; sources: RawSource[] } = await aiApiClient
|
|
||||||
.searchWeb(query)
|
|
||||||
.then((res) => res.json());
|
|
||||||
// Normalize sources to a consistent format.
|
|
||||||
const mappedSources = (response.sources || []).map(
|
|
||||||
(s: RawSource) =>
|
|
||||||
(s.web
|
|
||||||
? { uri: s.web.uri || '', title: s.web.title || 'Untitled' }
|
|
||||||
: { uri: '', title: 'Untitled' }) as Source,
|
|
||||||
);
|
|
||||||
return { ...response, sources: mappedSources };
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -68,7 +52,7 @@ export class AiAnalysisService {
|
|||||||
logger.info('[AiAnalysisService] planTripWithMaps called.');
|
logger.info('[AiAnalysisService] planTripWithMaps called.');
|
||||||
// Encapsulate geolocation logic within the service.
|
// Encapsulate geolocation logic within the service.
|
||||||
const userLocation = await this.getCurrentLocation();
|
const userLocation = await this.getCurrentLocation();
|
||||||
return aiApiClient.planTripWithMaps(items, store, userLocation).then((res) => res.json());
|
return aiApiClient.planTripWithMaps(items, store, userLocation);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -78,17 +62,7 @@ export class AiAnalysisService {
|
|||||||
*/
|
*/
|
||||||
async compareWatchedItemPrices(watchedItems: MasterGroceryItem[]): Promise<GroundedResponse> {
|
async compareWatchedItemPrices(watchedItems: MasterGroceryItem[]): Promise<GroundedResponse> {
|
||||||
logger.info('[AiAnalysisService] compareWatchedItemPrices called.');
|
logger.info('[AiAnalysisService] compareWatchedItemPrices called.');
|
||||||
const response: { text: string; sources: RawSource[] } = await aiApiClient
|
return aiApiClient.compareWatchedItemPrices(watchedItems);
|
||||||
.compareWatchedItemPrices(watchedItems)
|
|
||||||
.then((res) => res.json());
|
|
||||||
// Normalize sources to a consistent format.
|
|
||||||
const mappedSources = (response.sources || []).map(
|
|
||||||
(s: RawSource) =>
|
|
||||||
(s.web
|
|
||||||
? { uri: s.web.uri || '', title: s.web.title || 'Untitled' }
|
|
||||||
: { uri: '', title: 'Untitled' }) as Source,
|
|
||||||
);
|
|
||||||
return { ...response, sources: mappedSources };
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -98,7 +72,8 @@ export class AiAnalysisService {
|
|||||||
*/
|
*/
|
||||||
async generateImageFromText(prompt: string): Promise<string> {
|
async generateImageFromText(prompt: string): Promise<string> {
|
||||||
logger.info('[AiAnalysisService] generateImageFromText called.');
|
logger.info('[AiAnalysisService] generateImageFromText called.');
|
||||||
return aiApiClient.generateImageFromText(prompt).then((res) => res.json());
|
const result = await aiApiClient.generateImageFromText(prompt);
|
||||||
|
return result.imageUrl;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|||||||
@@ -4,7 +4,13 @@
|
|||||||
* It communicates with the application's own backend endpoints, which then securely
|
* It communicates with the application's own backend endpoints, which then securely
|
||||||
* call the Google AI services. This ensures no API keys are exposed on the client.
|
* call the Google AI services. This ensures no API keys are exposed on the client.
|
||||||
*/
|
*/
|
||||||
import type { FlyerItem, Store, MasterGroceryItem } from '../types';
|
import type {
|
||||||
|
FlyerItem,
|
||||||
|
Store,
|
||||||
|
MasterGroceryItem,
|
||||||
|
ProcessingStage,
|
||||||
|
GroundedResponse,
|
||||||
|
} from '../types';
|
||||||
import { logger } from './logger.client';
|
import { logger } from './logger.client';
|
||||||
import { apiFetch } from './apiClient';
|
import { apiFetch } from './apiClient';
|
||||||
|
|
||||||
@@ -20,14 +26,14 @@ export const uploadAndProcessFlyer = async (
|
|||||||
file: File,
|
file: File,
|
||||||
checksum: string,
|
checksum: string,
|
||||||
tokenOverride?: string,
|
tokenOverride?: string,
|
||||||
): Promise<Response> => {
|
): Promise<{ jobId: string }> => {
|
||||||
const formData = new FormData();
|
const formData = new FormData();
|
||||||
formData.append('flyerFile', file);
|
formData.append('flyerFile', file);
|
||||||
formData.append('checksum', checksum);
|
formData.append('checksum', checksum);
|
||||||
|
|
||||||
logger.info(`[aiApiClient] Starting background processing for file: ${file.name}`);
|
logger.info(`[aiApiClient] Starting background processing for file: ${file.name}`);
|
||||||
|
|
||||||
return apiFetch(
|
const response = await apiFetch(
|
||||||
'/ai/upload-and-process',
|
'/ai/upload-and-process',
|
||||||
{
|
{
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
@@ -35,26 +41,79 @@ export const uploadAndProcessFlyer = async (
|
|||||||
},
|
},
|
||||||
{ tokenOverride },
|
{ tokenOverride },
|
||||||
);
|
);
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
let errorBody;
|
||||||
|
try {
|
||||||
|
errorBody = await response.json();
|
||||||
|
} catch (e) {
|
||||||
|
errorBody = { message: await response.text() };
|
||||||
|
}
|
||||||
|
// Throw a structured error so the component can inspect the status and body
|
||||||
|
throw { status: response.status, body: errorBody };
|
||||||
|
}
|
||||||
|
|
||||||
|
return response.json();
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// Define the expected shape of the job status response
|
||||||
|
export interface JobStatus {
|
||||||
|
id: string;
|
||||||
|
state: 'completed' | 'failed' | 'active' | 'waiting' | 'delayed' | 'paused';
|
||||||
|
progress: {
|
||||||
|
stages?: ProcessingStage[];
|
||||||
|
estimatedTimeRemaining?: number;
|
||||||
|
message?: string;
|
||||||
|
} | null;
|
||||||
|
returnValue: {
|
||||||
|
flyerId?: number;
|
||||||
|
} | null;
|
||||||
|
failedReason: string | null;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Fetches the status of a background processing job.
|
* Fetches the status of a background processing job.
|
||||||
* This is the second step in the new background processing flow.
|
* This is the second step in the new background processing flow.
|
||||||
* @param jobId The ID of the job to check.
|
* @param jobId The ID of the job to check.
|
||||||
* @param tokenOverride Optional token for testing.
|
* @param tokenOverride Optional token for testing.
|
||||||
* @returns A promise that resolves to the API response with the job's status.
|
* @returns A promise that resolves to the parsed job status object.
|
||||||
|
* @throws An error if the network request fails or if the response is not valid JSON.
|
||||||
*/
|
*/
|
||||||
export const getJobStatus = async (jobId: string, tokenOverride?: string): Promise<Response> => {
|
export const getJobStatus = async (
|
||||||
return apiFetch(`/ai/jobs/${jobId}/status`, {}, { tokenOverride });
|
jobId: string,
|
||||||
|
tokenOverride?: string,
|
||||||
|
): Promise<JobStatus> => {
|
||||||
|
const response = await apiFetch(`/ai/jobs/${jobId}/status`, {}, { tokenOverride });
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
let errorText = `API Error: ${response.status} ${response.statusText}`;
|
||||||
|
try {
|
||||||
|
const errorBody = await response.text();
|
||||||
|
if (errorBody) errorText = `API Error ${response.status}: ${errorBody}`;
|
||||||
|
} catch (e) {
|
||||||
|
// ignore if reading body fails
|
||||||
|
}
|
||||||
|
throw new Error(errorText);
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
return await response.json();
|
||||||
|
} catch (error) {
|
||||||
|
const rawText = await response.text();
|
||||||
|
throw new Error(`Failed to parse JSON response from server. Body: ${rawText}`);
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
export const isImageAFlyer = async (imageFile: File, tokenOverride?: string): Promise<Response> => {
|
export const isImageAFlyer = async (
|
||||||
|
imageFile: File,
|
||||||
|
tokenOverride?: string,
|
||||||
|
): Promise<{ is_flyer: boolean }> => {
|
||||||
const formData = new FormData();
|
const formData = new FormData();
|
||||||
formData.append('image', imageFile);
|
formData.append('image', imageFile);
|
||||||
|
|
||||||
// Use apiFetchWithAuth for FormData to let the browser set the correct Content-Type.
|
// Use apiFetchWithAuth for FormData to let the browser set the correct Content-Type.
|
||||||
// The URL must be relative, as the helper constructs the full path.
|
// The URL must be relative, as the helper constructs the full path.
|
||||||
return apiFetch(
|
const response = await apiFetch(
|
||||||
'/ai/check-flyer',
|
'/ai/check-flyer',
|
||||||
{
|
{
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
@@ -62,16 +121,28 @@ export const isImageAFlyer = async (imageFile: File, tokenOverride?: string): Pr
|
|||||||
},
|
},
|
||||||
{ tokenOverride },
|
{ tokenOverride },
|
||||||
);
|
);
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
let errorBody;
|
||||||
|
try {
|
||||||
|
errorBody = await response.json();
|
||||||
|
} catch (e) {
|
||||||
|
errorBody = { message: await response.text() };
|
||||||
|
}
|
||||||
|
throw { status: response.status, body: errorBody };
|
||||||
|
}
|
||||||
|
|
||||||
|
return response.json();
|
||||||
};
|
};
|
||||||
|
|
||||||
export const extractAddressFromImage = async (
|
export const extractAddressFromImage = async (
|
||||||
imageFile: File,
|
imageFile: File,
|
||||||
tokenOverride?: string,
|
tokenOverride?: string,
|
||||||
): Promise<Response> => {
|
): Promise<{ address: string }> => {
|
||||||
const formData = new FormData();
|
const formData = new FormData();
|
||||||
formData.append('image', imageFile);
|
formData.append('image', imageFile);
|
||||||
|
|
||||||
return apiFetch(
|
const response = await apiFetch(
|
||||||
'/ai/extract-address',
|
'/ai/extract-address',
|
||||||
{
|
{
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
@@ -79,18 +150,30 @@ export const extractAddressFromImage = async (
|
|||||||
},
|
},
|
||||||
{ tokenOverride },
|
{ tokenOverride },
|
||||||
);
|
);
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
let errorBody;
|
||||||
|
try {
|
||||||
|
errorBody = await response.json();
|
||||||
|
} catch (e) {
|
||||||
|
errorBody = { message: await response.text() };
|
||||||
|
}
|
||||||
|
throw { status: response.status, body: errorBody };
|
||||||
|
}
|
||||||
|
|
||||||
|
return response.json();
|
||||||
};
|
};
|
||||||
|
|
||||||
export const extractLogoFromImage = async (
|
export const extractLogoFromImage = async (
|
||||||
imageFiles: File[],
|
imageFiles: File[],
|
||||||
tokenOverride?: string,
|
tokenOverride?: string,
|
||||||
): Promise<Response> => {
|
): Promise<{ store_logo_base_64: string | null }> => {
|
||||||
const formData = new FormData();
|
const formData = new FormData();
|
||||||
imageFiles.forEach((file) => {
|
imageFiles.forEach((file) => {
|
||||||
formData.append('images', file);
|
formData.append('images', file);
|
||||||
});
|
});
|
||||||
|
|
||||||
return apiFetch(
|
const response = await apiFetch(
|
||||||
'/ai/extract-logo',
|
'/ai/extract-logo',
|
||||||
{
|
{
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
@@ -98,14 +181,26 @@ export const extractLogoFromImage = async (
|
|||||||
},
|
},
|
||||||
{ tokenOverride },
|
{ tokenOverride },
|
||||||
);
|
);
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
let errorBody;
|
||||||
|
try {
|
||||||
|
errorBody = await response.json();
|
||||||
|
} catch (e) {
|
||||||
|
errorBody = { message: await response.text() };
|
||||||
|
}
|
||||||
|
throw { status: response.status, body: errorBody };
|
||||||
|
}
|
||||||
|
|
||||||
|
return response.json();
|
||||||
};
|
};
|
||||||
|
|
||||||
export const getQuickInsights = async (
|
export const getQuickInsights = async (
|
||||||
items: Partial<FlyerItem>[],
|
items: Partial<FlyerItem>[],
|
||||||
signal?: AbortSignal,
|
signal?: AbortSignal,
|
||||||
tokenOverride?: string,
|
tokenOverride?: string,
|
||||||
): Promise<Response> => {
|
): Promise<{ text: string }> => {
|
||||||
return apiFetch(
|
const response = await apiFetch(
|
||||||
'/ai/quick-insights',
|
'/ai/quick-insights',
|
||||||
{
|
{
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
@@ -115,14 +210,26 @@ export const getQuickInsights = async (
|
|||||||
},
|
},
|
||||||
{ tokenOverride, signal },
|
{ tokenOverride, signal },
|
||||||
);
|
);
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
let errorBody;
|
||||||
|
try {
|
||||||
|
errorBody = await response.json();
|
||||||
|
} catch (e) {
|
||||||
|
errorBody = { message: await response.text() };
|
||||||
|
}
|
||||||
|
throw { status: response.status, body: errorBody };
|
||||||
|
}
|
||||||
|
|
||||||
|
return response.json();
|
||||||
};
|
};
|
||||||
|
|
||||||
export const getDeepDiveAnalysis = async (
|
export const getDeepDiveAnalysis = async (
|
||||||
items: Partial<FlyerItem>[],
|
items: Partial<FlyerItem>[],
|
||||||
signal?: AbortSignal,
|
signal?: AbortSignal,
|
||||||
tokenOverride?: string,
|
tokenOverride?: string,
|
||||||
): Promise<Response> => {
|
): Promise<{ text: string }> => {
|
||||||
return apiFetch(
|
const response = await apiFetch(
|
||||||
'/ai/deep-dive',
|
'/ai/deep-dive',
|
||||||
{
|
{
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
@@ -132,14 +239,26 @@ export const getDeepDiveAnalysis = async (
|
|||||||
},
|
},
|
||||||
{ tokenOverride, signal },
|
{ tokenOverride, signal },
|
||||||
);
|
);
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
let errorBody;
|
||||||
|
try {
|
||||||
|
errorBody = await response.json();
|
||||||
|
} catch (e) {
|
||||||
|
errorBody = { message: await response.text() };
|
||||||
|
}
|
||||||
|
throw { status: response.status, body: errorBody };
|
||||||
|
}
|
||||||
|
|
||||||
|
return response.json();
|
||||||
};
|
};
|
||||||
|
|
||||||
export const searchWeb = async (
|
export const searchWeb = async (
|
||||||
query: string,
|
query: string,
|
||||||
signal?: AbortSignal,
|
signal?: AbortSignal,
|
||||||
tokenOverride?: string,
|
tokenOverride?: string,
|
||||||
): Promise<Response> => {
|
): Promise<GroundedResponse> => {
|
||||||
return apiFetch(
|
const response = await apiFetch(
|
||||||
'/ai/search-web',
|
'/ai/search-web',
|
||||||
{
|
{
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
@@ -149,6 +268,18 @@ export const searchWeb = async (
|
|||||||
},
|
},
|
||||||
{ tokenOverride, signal },
|
{ tokenOverride, signal },
|
||||||
);
|
);
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
let errorBody;
|
||||||
|
try {
|
||||||
|
errorBody = await response.json();
|
||||||
|
} catch (e) {
|
||||||
|
errorBody = { message: await response.text() };
|
||||||
|
}
|
||||||
|
throw { status: response.status, body: errorBody };
|
||||||
|
}
|
||||||
|
|
||||||
|
return response.json();
|
||||||
};
|
};
|
||||||
|
|
||||||
// ============================================================================
|
// ============================================================================
|
||||||
@@ -161,9 +292,9 @@ export const planTripWithMaps = async (
|
|||||||
userLocation: GeolocationCoordinates,
|
userLocation: GeolocationCoordinates,
|
||||||
signal?: AbortSignal,
|
signal?: AbortSignal,
|
||||||
tokenOverride?: string,
|
tokenOverride?: string,
|
||||||
): Promise<Response> => {
|
): Promise<GroundedResponse> => {
|
||||||
logger.debug('Stub: planTripWithMaps called with location:', { userLocation });
|
logger.debug('Stub: planTripWithMaps called with location:', { userLocation });
|
||||||
return apiFetch(
|
const response = await apiFetch(
|
||||||
'/ai/plan-trip',
|
'/ai/plan-trip',
|
||||||
{
|
{
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
@@ -172,6 +303,18 @@ export const planTripWithMaps = async (
|
|||||||
},
|
},
|
||||||
{ signal, tokenOverride },
|
{ signal, tokenOverride },
|
||||||
);
|
);
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
let errorBody;
|
||||||
|
try {
|
||||||
|
errorBody = await response.json();
|
||||||
|
} catch (e) {
|
||||||
|
errorBody = { message: await response.text() };
|
||||||
|
}
|
||||||
|
throw { status: response.status, body: errorBody };
|
||||||
|
}
|
||||||
|
|
||||||
|
return response.json();
|
||||||
};
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -183,9 +326,9 @@ export const generateImageFromText = async (
|
|||||||
prompt: string,
|
prompt: string,
|
||||||
signal?: AbortSignal,
|
signal?: AbortSignal,
|
||||||
tokenOverride?: string,
|
tokenOverride?: string,
|
||||||
): Promise<Response> => {
|
): Promise<{ imageUrl: string }> => {
|
||||||
logger.debug('Stub: generateImageFromText called with prompt:', { prompt });
|
logger.debug('Stub: generateImageFromText called with prompt:', { prompt });
|
||||||
return apiFetch(
|
const response = await apiFetch(
|
||||||
'/ai/generate-image',
|
'/ai/generate-image',
|
||||||
{
|
{
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
@@ -195,6 +338,18 @@ export const generateImageFromText = async (
|
|||||||
},
|
},
|
||||||
{ tokenOverride, signal },
|
{ tokenOverride, signal },
|
||||||
);
|
);
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
let errorBody;
|
||||||
|
try {
|
||||||
|
errorBody = await response.json();
|
||||||
|
} catch (e) {
|
||||||
|
errorBody = { message: await response.text() };
|
||||||
|
}
|
||||||
|
throw { status: response.status, body: errorBody };
|
||||||
|
}
|
||||||
|
|
||||||
|
return response.json();
|
||||||
};
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -206,9 +361,9 @@ export const generateSpeechFromText = async (
|
|||||||
text: string,
|
text: string,
|
||||||
signal?: AbortSignal,
|
signal?: AbortSignal,
|
||||||
tokenOverride?: string,
|
tokenOverride?: string,
|
||||||
): Promise<Response> => {
|
): Promise<{ audioUrl: string }> => {
|
||||||
logger.debug('Stub: generateSpeechFromText called with text:', { text });
|
logger.debug('Stub: generateSpeechFromText called with text:', { text });
|
||||||
return apiFetch(
|
const response = await apiFetch(
|
||||||
'/ai/generate-speech',
|
'/ai/generate-speech',
|
||||||
{
|
{
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
@@ -218,6 +373,18 @@ export const generateSpeechFromText = async (
|
|||||||
},
|
},
|
||||||
{ tokenOverride, signal },
|
{ tokenOverride, signal },
|
||||||
);
|
);
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
let errorBody;
|
||||||
|
try {
|
||||||
|
errorBody = await response.json();
|
||||||
|
} catch (e) {
|
||||||
|
errorBody = { message: await response.text() };
|
||||||
|
}
|
||||||
|
throw { status: response.status, body: errorBody };
|
||||||
|
}
|
||||||
|
|
||||||
|
return response.json();
|
||||||
};
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -264,13 +431,29 @@ export const rescanImageArea = async (
|
|||||||
cropArea: { x: number; y: number; width: number; height: number },
|
cropArea: { x: number; y: number; width: number; height: number },
|
||||||
extractionType: 'store_name' | 'dates' | 'item_details',
|
extractionType: 'store_name' | 'dates' | 'item_details',
|
||||||
tokenOverride?: string,
|
tokenOverride?: string,
|
||||||
): Promise<Response> => {
|
): Promise<{ text: string | undefined }> => {
|
||||||
const formData = new FormData();
|
const formData = new FormData();
|
||||||
formData.append('image', imageFile);
|
formData.append('image', imageFile);
|
||||||
formData.append('cropArea', JSON.stringify(cropArea));
|
formData.append('cropArea', JSON.stringify(cropArea));
|
||||||
formData.append('extractionType', extractionType);
|
formData.append('extractionType', extractionType);
|
||||||
|
|
||||||
return apiFetch('/ai/rescan-area', { method: 'POST', body: formData }, { tokenOverride });
|
const response = await apiFetch(
|
||||||
|
'/ai/rescan-area',
|
||||||
|
{ method: 'POST', body: formData },
|
||||||
|
{ tokenOverride },
|
||||||
|
);
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
let errorBody;
|
||||||
|
try {
|
||||||
|
errorBody = await response.json();
|
||||||
|
} catch (e) {
|
||||||
|
errorBody = { message: await response.text() };
|
||||||
|
}
|
||||||
|
throw { status: response.status, body: errorBody };
|
||||||
|
}
|
||||||
|
|
||||||
|
return response.json();
|
||||||
};
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -281,10 +464,10 @@ export const rescanImageArea = async (
|
|||||||
export const compareWatchedItemPrices = async (
|
export const compareWatchedItemPrices = async (
|
||||||
watchedItems: MasterGroceryItem[],
|
watchedItems: MasterGroceryItem[],
|
||||||
signal?: AbortSignal,
|
signal?: AbortSignal,
|
||||||
): Promise<Response> => {
|
): Promise<GroundedResponse> => {
|
||||||
// Use the apiFetch wrapper for consistency with other API calls in this file.
|
// Use the apiFetch wrapper for consistency with other API calls in this file.
|
||||||
// This centralizes token handling and base URL logic.
|
// This centralizes token handling and base URL logic.
|
||||||
return apiFetch(
|
const response = await apiFetch(
|
||||||
'/ai/compare-prices',
|
'/ai/compare-prices',
|
||||||
{
|
{
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
@@ -293,4 +476,16 @@ export const compareWatchedItemPrices = async (
|
|||||||
},
|
},
|
||||||
{ signal },
|
{ signal },
|
||||||
);
|
);
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
let errorBody;
|
||||||
|
try {
|
||||||
|
errorBody = await response.json();
|
||||||
|
} catch (e) {
|
||||||
|
errorBody = { message: await response.text() };
|
||||||
|
}
|
||||||
|
throw { status: response.status, body: errorBody };
|
||||||
|
}
|
||||||
|
|
||||||
|
return response.json();
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -166,6 +166,127 @@ describe('AI Service (Server)', () => {
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
describe('Model Fallback Logic', () => {
|
||||||
|
const originalEnv = process.env;
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
vi.unstubAllEnvs();
|
||||||
|
process.env = { ...originalEnv, GEMINI_API_KEY: 'test-key' };
|
||||||
|
vi.resetModules(); // Re-import to use the new env var and re-instantiate the service
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
process.env = originalEnv;
|
||||||
|
vi.unstubAllEnvs();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should try the next model if the first one fails with a quota error', async () => {
|
||||||
|
// Arrange
|
||||||
|
const { AIService } = await import('./aiService.server');
|
||||||
|
const { logger } = await import('./logger.server');
|
||||||
|
const serviceWithFallback = new AIService(logger);
|
||||||
|
|
||||||
|
const quotaError = new Error('User rate limit exceeded due to quota');
|
||||||
|
const successResponse = { text: 'Success from fallback model', candidates: [] };
|
||||||
|
|
||||||
|
// Mock the generateContent function to fail on the first call and succeed on the second
|
||||||
|
mockGenerateContent.mockRejectedValueOnce(quotaError).mockResolvedValueOnce(successResponse);
|
||||||
|
|
||||||
|
const request = { contents: [{ parts: [{ text: 'test prompt' }] }] };
|
||||||
|
|
||||||
|
// Act
|
||||||
|
const result = await (serviceWithFallback as any).aiClient.generateContent(request);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
expect(result).toEqual(successResponse);
|
||||||
|
expect(mockGenerateContent).toHaveBeenCalledTimes(2);
|
||||||
|
|
||||||
|
// Check first call
|
||||||
|
expect(mockGenerateContent).toHaveBeenNthCalledWith(1, {
|
||||||
|
model: 'gemini-2.5-flash',
|
||||||
|
...request,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Check second call
|
||||||
|
expect(mockGenerateContent).toHaveBeenNthCalledWith(2, {
|
||||||
|
model: 'gemini-3-flash',
|
||||||
|
...request,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Check that a warning was logged
|
||||||
|
expect(logger.warn).toHaveBeenCalledWith(
|
||||||
|
expect.stringContaining(
|
||||||
|
"Model 'gemini-2.5-flash' failed due to quota/rate limit. Trying next model.",
|
||||||
|
),
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should throw immediately for non-retriable errors', async () => {
|
||||||
|
// Arrange
|
||||||
|
const { AIService } = await import('./aiService.server');
|
||||||
|
const { logger } = await import('./logger.server');
|
||||||
|
const serviceWithFallback = new AIService(logger);
|
||||||
|
|
||||||
|
const nonRetriableError = new Error('Invalid API Key');
|
||||||
|
mockGenerateContent.mockRejectedValueOnce(nonRetriableError);
|
||||||
|
|
||||||
|
const request = { contents: [{ parts: [{ text: 'test prompt' }] }] };
|
||||||
|
|
||||||
|
// Act & Assert
|
||||||
|
await expect((serviceWithFallback as any).aiClient.generateContent(request)).rejects.toThrow(
|
||||||
|
'Invalid API Key',
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(mockGenerateContent).toHaveBeenCalledTimes(1);
|
||||||
|
expect(logger.error).toHaveBeenCalledWith(
|
||||||
|
`[AIService Adapter] Model 'gemini-2.5-flash' failed with a non-retriable error.`,
|
||||||
|
{ error: nonRetriableError },
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should throw the last error if all models fail', async () => {
|
||||||
|
// Arrange
|
||||||
|
const { AIService } = await import('./aiService.server');
|
||||||
|
const { logger } = await import('./logger.server');
|
||||||
|
const serviceWithFallback = new AIService(logger);
|
||||||
|
|
||||||
|
const quotaError1 = new Error('Quota exhausted for model 1');
|
||||||
|
const quotaError2 = new Error('429 Too Many Requests for model 2');
|
||||||
|
const quotaError3 = new Error('RESOURCE_EXHAUSTED for model 3');
|
||||||
|
|
||||||
|
mockGenerateContent
|
||||||
|
.mockRejectedValueOnce(quotaError1)
|
||||||
|
.mockRejectedValueOnce(quotaError2)
|
||||||
|
.mockRejectedValueOnce(quotaError3);
|
||||||
|
|
||||||
|
const request = { contents: [{ parts: [{ text: 'test prompt' }] }] };
|
||||||
|
|
||||||
|
// Act & Assert
|
||||||
|
await expect((serviceWithFallback as any).aiClient.generateContent(request)).rejects.toThrow(
|
||||||
|
quotaError3,
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(mockGenerateContent).toHaveBeenCalledTimes(3);
|
||||||
|
expect(mockGenerateContent).toHaveBeenNthCalledWith(1, {
|
||||||
|
model: 'gemini-2.5-flash',
|
||||||
|
...request,
|
||||||
|
});
|
||||||
|
expect(mockGenerateContent).toHaveBeenNthCalledWith(2, {
|
||||||
|
model: 'gemini-3-flash',
|
||||||
|
...request,
|
||||||
|
});
|
||||||
|
expect(mockGenerateContent).toHaveBeenNthCalledWith(3, {
|
||||||
|
model: 'gemini-2.5-flash-lite',
|
||||||
|
...request,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(logger.error).toHaveBeenCalledWith(
|
||||||
|
'[AIService Adapter] All AI models failed. Throwing last known error.',
|
||||||
|
{ lastError: quotaError3 },
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
describe('extractItemsFromReceiptImage', () => {
|
describe('extractItemsFromReceiptImage', () => {
|
||||||
it('should extract items from a valid AI response', async () => {
|
it('should extract items from a valid AI response', async () => {
|
||||||
const mockAiResponseText = `[
|
const mockAiResponseText = `[
|
||||||
|
|||||||
@@ -72,6 +72,7 @@ export class AIService {
|
|||||||
private fs: IFileSystem;
|
private fs: IFileSystem;
|
||||||
private rateLimiter: <T>(fn: () => Promise<T>) => Promise<T>;
|
private rateLimiter: <T>(fn: () => Promise<T>) => Promise<T>;
|
||||||
private logger: Logger;
|
private logger: Logger;
|
||||||
|
private readonly models = ['gemini-2.5-flash', 'gemini-3-flash', 'gemini-2.5-flash-lite'];
|
||||||
|
|
||||||
constructor(logger: Logger, aiClient?: IAiClient, fs?: IFileSystem) {
|
constructor(logger: Logger, aiClient?: IAiClient, fs?: IFileSystem) {
|
||||||
this.logger = logger;
|
this.logger = logger;
|
||||||
@@ -121,17 +122,11 @@ export class AIService {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
// do not change "gemini-2.5-flash" - this is correct
|
|
||||||
const modelName = 'gemini-2.5-flash';
|
|
||||||
|
|
||||||
// We create a shim/adapter that matches the old structure but uses the new SDK call pattern.
|
// We create a shim/adapter that matches the old structure but uses the new SDK call pattern.
|
||||||
// This preserves the dependency injection pattern used throughout the class.
|
// This preserves the dependency injection pattern used throughout the class.
|
||||||
this.aiClient = genAI
|
this.aiClient = genAI
|
||||||
? {
|
? {
|
||||||
generateContent: async (request) => {
|
generateContent: async (request) => {
|
||||||
// The model name is now injected here, into every call, as the new SDK requires.
|
|
||||||
// Architectural guard clause: All requests from this service must have content.
|
|
||||||
// This prevents sending invalid requests to the API and satisfies TypeScript's strictness.
|
|
||||||
if (!request.contents || request.contents.length === 0) {
|
if (!request.contents || request.contents.length === 0) {
|
||||||
this.logger.error(
|
this.logger.error(
|
||||||
{ request },
|
{ request },
|
||||||
@@ -140,14 +135,7 @@ export class AIService {
|
|||||||
throw new Error('AIService.generateContent requires at least one content element.');
|
throw new Error('AIService.generateContent requires at least one content element.');
|
||||||
}
|
}
|
||||||
|
|
||||||
// Architectural Fix: After the guard clause, assign the guaranteed-to-exist element
|
return this._generateWithFallback(genAI, request);
|
||||||
// to a new constant. This provides a definitive type-safe variable for the compiler.
|
|
||||||
const firstContent = request.contents[0];
|
|
||||||
this.logger.debug(
|
|
||||||
{ modelName, requestParts: firstContent.parts?.length ?? 0 },
|
|
||||||
'[AIService] Calling actual generateContent via adapter.',
|
|
||||||
);
|
|
||||||
return genAI.models.generateContent({ model: modelName, ...request });
|
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
: {
|
: {
|
||||||
@@ -182,6 +170,54 @@ export class AIService {
|
|||||||
this.logger.info('---------------- [AIService] Constructor End ----------------');
|
this.logger.info('---------------- [AIService] Constructor End ----------------');
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private async _generateWithFallback(
|
||||||
|
genAI: GoogleGenAI,
|
||||||
|
request: { contents: Content[]; tools?: Tool[] },
|
||||||
|
): Promise<GenerateContentResponse> {
|
||||||
|
let lastError: Error | null = null;
|
||||||
|
|
||||||
|
for (const modelName of this.models) {
|
||||||
|
try {
|
||||||
|
this.logger.info(
|
||||||
|
`[AIService Adapter] Attempting to generate content with model: ${modelName}`,
|
||||||
|
);
|
||||||
|
const result = await genAI.models.generateContent({ model: modelName, ...request });
|
||||||
|
// If the call succeeds, return the result immediately.
|
||||||
|
return result;
|
||||||
|
} catch (error: unknown) {
|
||||||
|
lastError = error instanceof Error ? error : new Error(String(error));
|
||||||
|
const errorMessage = lastError.message || '';
|
||||||
|
|
||||||
|
// Check for specific error messages indicating quota issues or model unavailability.
|
||||||
|
if (
|
||||||
|
errorMessage.includes('quota') ||
|
||||||
|
errorMessage.includes('429') || // HTTP 429 Too Many Requests
|
||||||
|
errorMessage.includes('RESOURCE_EXHAUSTED') ||
|
||||||
|
errorMessage.includes('model is overloaded')
|
||||||
|
) {
|
||||||
|
this.logger.warn(
|
||||||
|
`[AIService Adapter] Model '${modelName}' failed due to quota/rate limit. Trying next model. Error: ${errorMessage}`,
|
||||||
|
);
|
||||||
|
continue; // Try the next model in the list.
|
||||||
|
} else {
|
||||||
|
// For other errors (e.g., invalid input, safety settings), fail immediately.
|
||||||
|
this.logger.error(
|
||||||
|
{ error: lastError },
|
||||||
|
`[AIService Adapter] Model '${modelName}' failed with a non-retriable error.`,
|
||||||
|
);
|
||||||
|
throw lastError;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// If all models in the list have failed, throw the last error encountered.
|
||||||
|
this.logger.error(
|
||||||
|
{ lastError },
|
||||||
|
'[AIService Adapter] All AI models failed. Throwing last known error.',
|
||||||
|
);
|
||||||
|
throw lastError || new Error('All AI models failed to generate content.');
|
||||||
|
}
|
||||||
|
|
||||||
private async serverFileToGenerativePart(path: string, mimeType: string) {
|
private async serverFileToGenerativePart(path: string, mimeType: string) {
|
||||||
const fileData = await this.fs.readFile(path);
|
const fileData = await this.fs.readFile(path);
|
||||||
return {
|
return {
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
// src/services/queueService.server.ts
|
// src/services/queueService.server.ts
|
||||||
import { Queue, Worker, Job } from 'bullmq';
|
import { Queue, Worker, Job, UnrecoverableError } from 'bullmq';
|
||||||
import IORedis from 'ioredis'; // Correctly imported
|
import IORedis from 'ioredis'; // Correctly imported
|
||||||
import fsPromises from 'node:fs/promises';
|
import fsPromises from 'node:fs/promises';
|
||||||
import { exec } from 'child_process';
|
import { exec } from 'child_process';
|
||||||
@@ -185,9 +185,26 @@ const attachWorkerEventListeners = (worker: Worker) => {
|
|||||||
|
|
||||||
export const flyerWorker = new Worker<FlyerJobData>(
|
export const flyerWorker = new Worker<FlyerJobData>(
|
||||||
'flyer-processing', // Must match the queue name
|
'flyer-processing', // Must match the queue name
|
||||||
(job) => {
|
async (job) => {
|
||||||
// The processJob method creates its own job-specific logger internally.
|
try {
|
||||||
return flyerProcessingService.processJob(job);
|
// The processJob method creates its own job-specific logger internally.
|
||||||
|
return await flyerProcessingService.processJob(job);
|
||||||
|
} catch (error: any) {
|
||||||
|
// Check for quota errors or other unrecoverable errors from the AI service
|
||||||
|
const errorMessage = error?.message || '';
|
||||||
|
if (
|
||||||
|
errorMessage.includes('quota') ||
|
||||||
|
errorMessage.includes('429') ||
|
||||||
|
errorMessage.includes('RESOURCE_EXHAUSTED')
|
||||||
|
) {
|
||||||
|
logger.error(
|
||||||
|
{ err: error, jobId: job.id },
|
||||||
|
'[FlyerWorker] Unrecoverable quota error detected. Failing job immediately.',
|
||||||
|
);
|
||||||
|
throw new UnrecoverableError(errorMessage);
|
||||||
|
}
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
connection,
|
connection,
|
||||||
|
|||||||
Reference in New Issue
Block a user