Compare commits
11 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c4bbf5c251 | ||
| 32a9e6732b | |||
| e7c076e2ed | |||
|
|
dbe8e72efe | ||
| 38bd193042 | |||
|
|
57215e2778 | ||
| 2c1de24e9a | |||
| c8baff7aac | |||
| de3f21a7ec | |||
|
|
c6adbf79e7 | ||
| 7399a27600 |
4
package-lock.json
generated
4
package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "flyer-crawler",
|
||||
"version": "0.1.2",
|
||||
"version": "0.1.6",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "flyer-crawler",
|
||||
"version": "0.1.2",
|
||||
"version": "0.1.6",
|
||||
"dependencies": {
|
||||
"@bull-board/api": "^6.14.2",
|
||||
"@bull-board/express": "^6.14.2",
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "flyer-crawler",
|
||||
"private": true,
|
||||
"version": "0.1.2",
|
||||
"version": "0.1.6",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "concurrently \"npm:start:dev\" \"vite\"",
|
||||
|
||||
@@ -243,7 +243,9 @@ describe('App Component', () => {
|
||||
mockedApiClient.fetchShoppingLists.mockImplementation(() =>
|
||||
Promise.resolve(new Response(JSON.stringify([]))),
|
||||
);
|
||||
mockedAiApiClient.rescanImageArea.mockResolvedValue({ text: 'mocked text' }); // Mock for FlyerCorrectionTool
|
||||
mockedAiApiClient.rescanImageArea.mockResolvedValue(
|
||||
new Response(JSON.stringify({ text: 'mocked text' })),
|
||||
); // Mock for FlyerCorrectionTool
|
||||
console.log('[TEST DEBUG] beforeEach: Setup complete');
|
||||
});
|
||||
|
||||
|
||||
@@ -44,7 +44,7 @@ export const FlyerCorrectionTool: React.FC<FlyerCorrectionToolProps> = ({
|
||||
})
|
||||
.catch((err) => {
|
||||
console.error('[DEBUG] FlyerCorrectionTool: Failed to fetch image.', { err });
|
||||
logger.error('Failed to fetch image for correction tool', { error: err });
|
||||
logger.error({ error: err }, 'Failed to fetch image for correction tool');
|
||||
notifyError('Could not load the image for correction.');
|
||||
});
|
||||
}
|
||||
@@ -164,7 +164,7 @@ export const FlyerCorrectionTool: React.FC<FlyerCorrectionToolProps> = ({
|
||||
const msg = err instanceof Error ? err.message : 'An unknown error occurred.';
|
||||
console.error('[DEBUG] handleRescan: Caught an error.', { error: err });
|
||||
notifyError(msg);
|
||||
logger.error('Error during rescan:', { error: err });
|
||||
logger.error({ error: err }, 'Error during rescan:');
|
||||
} finally {
|
||||
console.debug('[DEBUG] handleRescan: Finished. Setting isProcessing=false.');
|
||||
setIsProcessing(false);
|
||||
|
||||
@@ -112,7 +112,7 @@ export const FlyerUploader: React.FC<FlyerUploaderProps> = ({ onProcessingComple
|
||||
break;
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Error during polling:', { error });
|
||||
logger.error({ error }, 'Error during polling:');
|
||||
setErrorMessage(
|
||||
error instanceof Error ? error.message : 'An unexpected error occurred during polling.',
|
||||
);
|
||||
@@ -157,7 +157,7 @@ export const FlyerUploader: React.FC<FlyerUploaderProps> = ({ onProcessingComple
|
||||
setProcessingState('polling');
|
||||
} catch (error: any) {
|
||||
// Handle the structured error thrown by the API client.
|
||||
logger.error('An error occurred during file upload:', { error });
|
||||
logger.error({ error }, 'An error occurred during file upload:');
|
||||
// Handle 409 Conflict for duplicate flyers
|
||||
if (error?.status === 409 && error.body?.flyerId) {
|
||||
setErrorMessage(`This flyer has already been processed. You can view it here:`);
|
||||
|
||||
@@ -1,94 +1,68 @@
|
||||
// src/middleware/errorHandler.ts
|
||||
import { Request, Response, NextFunction } from 'express';
|
||||
import { ZodError } from 'zod';
|
||||
import {
|
||||
DatabaseError,
|
||||
UniqueConstraintError,
|
||||
ForeignKeyConstraintError,
|
||||
NotFoundError,
|
||||
UniqueConstraintError,
|
||||
ValidationError,
|
||||
ValidationIssue,
|
||||
} from '../services/db/errors.db';
|
||||
import crypto from 'crypto';
|
||||
import { logger } from '../services/logger.server';
|
||||
|
||||
interface HttpError extends Error {
|
||||
status?: number;
|
||||
}
|
||||
|
||||
export const errorHandler = (err: HttpError, req: Request, res: Response, next: NextFunction) => {
|
||||
// If the response headers have already been sent, we must delegate to the default Express error handler.
|
||||
/**
|
||||
* A centralized error handling middleware for the Express application.
|
||||
* This middleware should be the LAST `app.use()` call to catch all errors from previous routes and middleware.
|
||||
*
|
||||
* It standardizes error responses and ensures consistent logging.
|
||||
*/
|
||||
export const errorHandler = (err: Error, req: Request, res: Response, next: NextFunction) => {
|
||||
// If headers have already been sent, delegate to the default Express error handler.
|
||||
if (res.headersSent) {
|
||||
return next(err);
|
||||
}
|
||||
|
||||
// The pino-http middleware guarantees that `req.log` will be available.
|
||||
const log = req.log;
|
||||
// Use the request-scoped logger if available, otherwise fall back to the global logger.
|
||||
const log = req.log || logger;
|
||||
|
||||
// --- 1. Determine Final Status Code and Message ---
|
||||
let statusCode = err.status ?? 500;
|
||||
const message = err.message;
|
||||
let validationIssues: ValidationIssue[] | undefined;
|
||||
let errorId: string | undefined;
|
||||
// --- Handle Zod Validation Errors ---
|
||||
if (err instanceof ZodError) {
|
||||
log.warn({ err: err.flatten() }, 'Request validation failed');
|
||||
return res.status(400).json({
|
||||
message: 'The request data is invalid.',
|
||||
errors: err.issues.map((e) => ({ path: e.path, message: e.message })),
|
||||
});
|
||||
}
|
||||
|
||||
// --- Handle Custom Operational Errors ---
|
||||
if (err instanceof NotFoundError) {
|
||||
log.info({ err }, 'Resource not found');
|
||||
return res.status(404).json({ message: err.message });
|
||||
}
|
||||
|
||||
if (err instanceof ValidationError) {
|
||||
log.warn({ err }, 'Validation error occurred');
|
||||
return res.status(400).json({ message: err.message, errors: err.validationErrors });
|
||||
}
|
||||
|
||||
// Refine the status code for known error types. Check for most specific types first.
|
||||
if (err instanceof UniqueConstraintError) {
|
||||
statusCode = 409; // Conflict
|
||||
} else if (err instanceof NotFoundError) {
|
||||
statusCode = 404;
|
||||
} else if (err instanceof ForeignKeyConstraintError) {
|
||||
statusCode = 400;
|
||||
} else if (err instanceof ValidationError) {
|
||||
statusCode = 400;
|
||||
validationIssues = err.validationErrors;
|
||||
} else if (err instanceof DatabaseError) {
|
||||
// This is a generic fallback for other database errors that are not the specific subclasses above.
|
||||
statusCode = err.status;
|
||||
} else if (err.name === 'UnauthorizedError') {
|
||||
statusCode = err.status || 401;
|
||||
log.warn({ err }, 'Constraint error occurred');
|
||||
return res.status(409).json({ message: err.message }); // Use 409 Conflict for unique constraints
|
||||
}
|
||||
|
||||
// --- 2. Log Based on Final Status Code ---
|
||||
// Log the full error details for debugging, especially for server errors.
|
||||
if (statusCode >= 500) {
|
||||
errorId = crypto.randomBytes(4).toString('hex');
|
||||
// The request-scoped logger already contains user, IP, and request_id.
|
||||
// We add the full error and the request object itself.
|
||||
// Pino's `redact` config will automatically sanitize sensitive fields in `req`.
|
||||
log.error(
|
||||
{
|
||||
err,
|
||||
errorId,
|
||||
req: { method: req.method, url: req.originalUrl, headers: req.headers, body: req.body },
|
||||
},
|
||||
`Unhandled API Error (ID: ${errorId})`,
|
||||
);
|
||||
} else {
|
||||
// For 4xx errors, log at a lower level (e.g., 'warn') to avoid flooding error trackers.
|
||||
// We include the validation errors in the log context if they exist.
|
||||
log.warn(
|
||||
{
|
||||
err,
|
||||
validationErrors: validationIssues, // Add validation issues to the log object
|
||||
statusCode,
|
||||
},
|
||||
`Client Error on ${req.method} ${req.path}: ${message}`,
|
||||
);
|
||||
if (err instanceof ForeignKeyConstraintError) {
|
||||
log.warn({ err }, 'Foreign key constraint violation');
|
||||
return res.status(400).json({ message: err.message });
|
||||
}
|
||||
|
||||
// --- TEST ENVIRONMENT DEBUGGING ---
|
||||
if (process.env.NODE_ENV === 'test') {
|
||||
console.error('--- [TEST] UNHANDLED ERROR ---', err);
|
||||
// --- Handle Generic Errors ---
|
||||
// Log the full error object for debugging. The pino logger will handle redaction.
|
||||
log.error({ err }, 'An unhandled error occurred in an Express route');
|
||||
|
||||
// In production, send a generic message to avoid leaking implementation details.
|
||||
if (process.env.NODE_ENV === 'production') {
|
||||
return res.status(500).json({ message: 'An internal server error occurred.' });
|
||||
}
|
||||
|
||||
// --- 3. Send Response ---
|
||||
// In production, send a generic message for 5xx errors.
|
||||
// In dev/test, send the actual error message for easier debugging.
|
||||
const responseMessage =
|
||||
statusCode >= 500 && process.env.NODE_ENV === 'production'
|
||||
? `An unexpected server error occurred. Please reference error ID: ${errorId}`
|
||||
: message;
|
||||
|
||||
res.status(statusCode).json({
|
||||
message: responseMessage,
|
||||
...(validationIssues && { errors: validationIssues }), // Conditionally add the 'errors' array if it exists
|
||||
});
|
||||
};
|
||||
// In development, send more details for easier debugging.
|
||||
return res.status(500).json({ message: err.message, stack: err.stack });
|
||||
};
|
||||
@@ -135,6 +135,7 @@ router.get('/corrections', async (req, res, next: NextFunction) => {
|
||||
const corrections = await db.adminRepo.getSuggestedCorrections(req.log);
|
||||
res.json(corrections);
|
||||
} catch (error) {
|
||||
logger.error({ error }, 'Error fetching suggested corrections');
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
@@ -144,6 +145,7 @@ router.get('/brands', async (req, res, next: NextFunction) => {
|
||||
const brands = await db.flyerRepo.getAllBrands(req.log);
|
||||
res.json(brands);
|
||||
} catch (error) {
|
||||
logger.error({ error }, 'Error fetching brands');
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
@@ -153,6 +155,7 @@ router.get('/stats', async (req, res, next: NextFunction) => {
|
||||
const stats = await db.adminRepo.getApplicationStats(req.log);
|
||||
res.json(stats);
|
||||
} catch (error) {
|
||||
logger.error({ error }, 'Error fetching application stats');
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
@@ -162,6 +165,7 @@ router.get('/stats/daily', async (req, res, next: NextFunction) => {
|
||||
const dailyStats = await db.adminRepo.getDailyStatsForLast30Days(req.log);
|
||||
res.json(dailyStats);
|
||||
} catch (error) {
|
||||
logger.error({ error }, 'Error fetching daily stats');
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
@@ -176,6 +180,7 @@ router.post(
|
||||
await db.adminRepo.approveCorrection(params.id, req.log); // params.id is now safely typed as number
|
||||
res.status(200).json({ message: 'Correction approved successfully.' });
|
||||
} catch (error) {
|
||||
logger.error({ error }, 'Error approving correction');
|
||||
next(error);
|
||||
}
|
||||
},
|
||||
@@ -191,6 +196,7 @@ router.post(
|
||||
await db.adminRepo.rejectCorrection(params.id, req.log); // params.id is now safely typed as number
|
||||
res.status(200).json({ message: 'Correction rejected successfully.' });
|
||||
} catch (error) {
|
||||
logger.error({ error }, 'Error rejecting correction');
|
||||
next(error);
|
||||
}
|
||||
},
|
||||
@@ -210,6 +216,7 @@ router.put(
|
||||
);
|
||||
res.status(200).json(updatedCorrection);
|
||||
} catch (error) {
|
||||
logger.error({ error }, 'Error updating suggested correction');
|
||||
next(error);
|
||||
}
|
||||
},
|
||||
@@ -225,6 +232,7 @@ router.put(
|
||||
const updatedRecipe = await db.adminRepo.updateRecipeStatus(params.id, body.status, req.log); // This is still a standalone function in admin.db.ts
|
||||
res.status(200).json(updatedRecipe);
|
||||
} catch (error) {
|
||||
logger.error({ error }, 'Error updating recipe status');
|
||||
next(error); // Pass all errors to the central error handler
|
||||
}
|
||||
},
|
||||
@@ -250,6 +258,7 @@ router.post(
|
||||
logger.info({ brandId: params.id, logoUrl }, `Brand logo updated for brand ID: ${params.id}`);
|
||||
res.status(200).json({ message: 'Brand logo updated successfully.', logoUrl });
|
||||
} catch (error) {
|
||||
logger.error({ error }, 'Error updating brand logo');
|
||||
next(error);
|
||||
}
|
||||
},
|
||||
@@ -260,6 +269,7 @@ router.get('/unmatched-items', async (req, res, next: NextFunction) => {
|
||||
const items = await db.adminRepo.getUnmatchedFlyerItems(req.log);
|
||||
res.json(items);
|
||||
} catch (error) {
|
||||
logger.error({ error }, 'Error fetching unmatched items');
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
@@ -279,6 +289,7 @@ router.delete(
|
||||
await db.recipeRepo.deleteRecipe(params.recipeId, userProfile.user.user_id, true, req.log);
|
||||
res.status(204).send();
|
||||
} catch (error: unknown) {
|
||||
logger.error({ error }, 'Error deleting recipe');
|
||||
next(error);
|
||||
}
|
||||
},
|
||||
@@ -297,6 +308,7 @@ router.delete(
|
||||
await db.flyerRepo.deleteFlyer(params.flyerId, req.log);
|
||||
res.status(204).send();
|
||||
} catch (error: unknown) {
|
||||
logger.error({ error }, 'Error deleting flyer');
|
||||
next(error);
|
||||
}
|
||||
},
|
||||
@@ -316,6 +328,7 @@ router.put(
|
||||
); // This is still a standalone function in admin.db.ts
|
||||
res.status(200).json(updatedComment);
|
||||
} catch (error: unknown) {
|
||||
logger.error({ error }, 'Error updating comment status');
|
||||
next(error);
|
||||
}
|
||||
},
|
||||
@@ -326,6 +339,7 @@ router.get('/users', async (req, res, next: NextFunction) => {
|
||||
const users = await db.adminRepo.getAllUsers(req.log);
|
||||
res.json(users);
|
||||
} catch (error) {
|
||||
logger.error({ error }, 'Error fetching users');
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
@@ -345,6 +359,7 @@ router.get(
|
||||
const logs = await db.adminRepo.getActivityLog(limit, offset, req.log);
|
||||
res.json(logs);
|
||||
} catch (error) {
|
||||
logger.error({ error }, 'Error fetching activity log');
|
||||
next(error);
|
||||
}
|
||||
},
|
||||
@@ -360,6 +375,7 @@ router.get(
|
||||
const user = await db.userRepo.findUserProfileById(params.id, req.log);
|
||||
res.json(user);
|
||||
} catch (error) {
|
||||
logger.error({ error }, 'Error fetching user profile');
|
||||
next(error);
|
||||
}
|
||||
},
|
||||
@@ -395,6 +411,7 @@ router.delete(
|
||||
await db.userRepo.deleteUserById(params.id, req.log);
|
||||
res.status(204).send();
|
||||
} catch (error) {
|
||||
logger.error({ error }, 'Error deleting user');
|
||||
next(error);
|
||||
}
|
||||
},
|
||||
@@ -478,6 +495,7 @@ router.post(
|
||||
.status(202)
|
||||
.json({ message: `File cleanup job for flyer ID ${params.flyerId} has been enqueued.` });
|
||||
} catch (error) {
|
||||
logger.error({ error }, 'Error enqueuing cleanup job');
|
||||
next(error);
|
||||
}
|
||||
},
|
||||
@@ -500,6 +518,7 @@ router.post('/trigger/failing-job', async (req: Request, res: Response, next: Ne
|
||||
.status(202)
|
||||
.json({ message: `Failing test job has been enqueued successfully. Job ID: ${job.id}` });
|
||||
} catch (error) {
|
||||
logger.error({ error }, 'Error enqueuing failing job');
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
@@ -572,6 +591,7 @@ router.get('/queues/status', async (req: Request, res: Response, next: NextFunct
|
||||
);
|
||||
res.json(queueStatuses);
|
||||
} catch (error) {
|
||||
logger.error({ error }, 'Error fetching queue statuses');
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
@@ -620,6 +640,7 @@ router.post(
|
||||
);
|
||||
res.status(200).json({ message: `Job ${jobId} has been successfully marked for retry.` });
|
||||
} catch (error) {
|
||||
logger.error({ error }, 'Error retrying job');
|
||||
next(error);
|
||||
}
|
||||
},
|
||||
@@ -651,6 +672,7 @@ router.post(
|
||||
.status(202)
|
||||
.json({ message: 'Successfully enqueued weekly analytics job.', jobId: job.id });
|
||||
} catch (error) {
|
||||
logger.error({ error }, 'Error enqueuing weekly analytics job');
|
||||
next(error);
|
||||
}
|
||||
},
|
||||
|
||||
@@ -134,8 +134,8 @@ router.post(
|
||||
// If the email is a duplicate, return a 409 Conflict status.
|
||||
return res.status(409).json({ message: error.message });
|
||||
}
|
||||
// The createUser method now handles its own transaction logging, so we just log the route failure.
|
||||
logger.error({ error }, `User registration route failed for email: ${email}.`);
|
||||
// Pass the error to the centralized handler
|
||||
return next(error);
|
||||
}
|
||||
},
|
||||
|
||||
@@ -108,6 +108,7 @@ router.post(
|
||||
const items = await db.flyerRepo.getFlyerItemsForFlyers(body.flyerIds, req.log);
|
||||
res.json(items);
|
||||
} catch (error) {
|
||||
req.log.error({ error }, 'Error fetching batch flyer items');
|
||||
next(error);
|
||||
}
|
||||
},
|
||||
@@ -127,6 +128,7 @@ router.post(
|
||||
const count = await db.flyerRepo.countFlyerItemsForFlyers(body.flyerIds ?? [], req.log);
|
||||
res.json({ count });
|
||||
} catch (error) {
|
||||
req.log.error({ error }, 'Error counting batch flyer items');
|
||||
next(error);
|
||||
}
|
||||
},
|
||||
|
||||
@@ -39,10 +39,7 @@ router.get('/db-schema', validateRequest(emptySchema), async (req, res, next: Ne
|
||||
}
|
||||
return res.status(200).json({ success: true, message: 'All required database tables exist.' });
|
||||
} catch (error: unknown) {
|
||||
logger.error(
|
||||
{ error: error instanceof Error ? error.message : error },
|
||||
'Error during DB schema check:',
|
||||
);
|
||||
logger.error({ error }, 'Error during DB schema check:');
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
@@ -133,6 +130,7 @@ router.get(
|
||||
}
|
||||
throw new Error(`Unexpected Redis ping response: ${reply}`); // This will be caught below
|
||||
} catch (error: unknown) {
|
||||
logger.error({ error }, 'Error checking Redis health');
|
||||
next(error);
|
||||
}
|
||||
},
|
||||
|
||||
@@ -46,7 +46,6 @@ router.get(
|
||||
}
|
||||
|
||||
// Check if there was output to stderr, even if the exit code was 0 (success).
|
||||
// This handles warnings or non-fatal errors that should arguably be treated as failures in this context.
|
||||
if (stderr && stderr.trim().length > 0) {
|
||||
logger.error({ stderr }, '[API /pm2-status] PM2 executed but produced stderr:');
|
||||
return next(new Error(`PM2 command produced an error: ${stderr}`));
|
||||
@@ -86,6 +85,7 @@ router.post(
|
||||
|
||||
res.json(coordinates);
|
||||
} catch (error) {
|
||||
logger.error({ error }, 'Error geocoding address');
|
||||
next(error);
|
||||
}
|
||||
},
|
||||
|
||||
@@ -77,7 +77,7 @@ router.use(passport.authenticate('jwt', { session: false }));
|
||||
// Ensure the directory for avatar uploads exists.
|
||||
const avatarUploadDir = path.join(process.cwd(), 'public', 'uploads', 'avatars');
|
||||
fs.mkdir(avatarUploadDir, { recursive: true }).catch((err) => {
|
||||
logger.error('Failed to create avatar upload directory:', err);
|
||||
logger.error({ err }, 'Failed to create avatar upload directory');
|
||||
});
|
||||
|
||||
// Define multer storage configuration. The `req.user` object will be available
|
||||
@@ -122,6 +122,7 @@ router.post(
|
||||
);
|
||||
res.json(updatedProfile);
|
||||
} catch (error) {
|
||||
logger.error({ error }, 'Error uploading avatar');
|
||||
next(error);
|
||||
}
|
||||
},
|
||||
@@ -151,6 +152,7 @@ router.get(
|
||||
);
|
||||
res.json(notifications);
|
||||
} catch (error) {
|
||||
logger.error({ error }, 'Error fetching notifications');
|
||||
next(error);
|
||||
}
|
||||
},
|
||||
@@ -168,6 +170,7 @@ router.post(
|
||||
await db.notificationRepo.markAllNotificationsAsRead(userProfile.user.user_id, req.log);
|
||||
res.status(204).send(); // No Content
|
||||
} catch (error) {
|
||||
logger.error({ error }, 'Error marking all notifications as read');
|
||||
next(error);
|
||||
}
|
||||
},
|
||||
@@ -193,6 +196,7 @@ router.post(
|
||||
);
|
||||
res.status(204).send(); // Success, no content to return
|
||||
} catch (error) {
|
||||
logger.error({ error }, 'Error marking notification as read');
|
||||
next(error);
|
||||
}
|
||||
},
|
||||
@@ -345,11 +349,7 @@ router.post(
|
||||
if (error instanceof ForeignKeyConstraintError) {
|
||||
return res.status(400).json({ message: error.message });
|
||||
}
|
||||
const errorMessage = error instanceof Error ? error.message : 'An unknown error occurred';
|
||||
logger.error({
|
||||
errorMessage,
|
||||
body: req.body,
|
||||
});
|
||||
logger.error({ error, body: req.body }, 'Failed to add watched item');
|
||||
next(error);
|
||||
}
|
||||
},
|
||||
@@ -453,11 +453,7 @@ router.post(
|
||||
if (error instanceof ForeignKeyConstraintError) {
|
||||
return res.status(400).json({ message: error.message });
|
||||
}
|
||||
const errorMessage = error instanceof Error ? error.message : 'An unknown error occurred';
|
||||
logger.error({
|
||||
errorMessage,
|
||||
body: req.body,
|
||||
});
|
||||
logger.error({ error, body: req.body }, 'Failed to create shopping list');
|
||||
next(error);
|
||||
}
|
||||
},
|
||||
@@ -516,12 +512,7 @@ router.post(
|
||||
if (error instanceof ForeignKeyConstraintError) {
|
||||
return res.status(400).json({ message: error.message });
|
||||
}
|
||||
const errorMessage = error instanceof Error ? error.message : 'An unknown error occurred';
|
||||
logger.error({
|
||||
errorMessage,
|
||||
params: req.params,
|
||||
body: req.body,
|
||||
});
|
||||
logger.error({ error, params: req.params, body: req.body }, 'Failed to add shopping list item');
|
||||
next(error);
|
||||
}
|
||||
},
|
||||
@@ -661,11 +652,7 @@ router.put(
|
||||
if (error instanceof ForeignKeyConstraintError) {
|
||||
return res.status(400).json({ message: error.message });
|
||||
}
|
||||
const errorMessage = error instanceof Error ? error.message : 'An unknown error occurred';
|
||||
logger.error({
|
||||
errorMessage,
|
||||
body: req.body,
|
||||
});
|
||||
logger.error({ error, body: req.body }, 'Failed to set user dietary restrictions');
|
||||
next(error);
|
||||
}
|
||||
},
|
||||
@@ -709,11 +696,7 @@ router.put(
|
||||
if (error instanceof ForeignKeyConstraintError) {
|
||||
return res.status(400).json({ message: error.message });
|
||||
}
|
||||
const errorMessage = error instanceof Error ? error.message : 'An unknown error occurred';
|
||||
logger.error({
|
||||
errorMessage,
|
||||
body: req.body,
|
||||
});
|
||||
logger.error({ error, body: req.body }, 'Failed to set user appliances');
|
||||
next(error);
|
||||
}
|
||||
},
|
||||
@@ -743,6 +726,7 @@ router.get(
|
||||
const address = await db.addressRepo.getAddressById(addressId, req.log); // This will throw NotFoundError if not found
|
||||
res.json(address);
|
||||
} catch (error) {
|
||||
logger.error({ error }, 'Error fetching user address');
|
||||
next(error);
|
||||
}
|
||||
},
|
||||
@@ -781,6 +765,7 @@ router.put(
|
||||
const addressId = await userService.upsertUserAddress(userProfile, addressData, req.log); // This was a duplicate, fixed.
|
||||
res.status(200).json({ message: 'Address updated successfully', address_id: addressId });
|
||||
} catch (error) {
|
||||
logger.error({ error }, 'Error updating user address');
|
||||
next(error);
|
||||
}
|
||||
},
|
||||
|
||||
@@ -1,8 +1,15 @@
|
||||
// src/services/aiAnalysisService.ts
|
||||
import { Flyer, FlyerItem, MasterGroceryItem, GroundedResponse } from '../types';
|
||||
import { Flyer, FlyerItem, MasterGroceryItem, GroundedResponse, Source } from '../types';
|
||||
import * as aiApiClient from './aiApiClient';
|
||||
import { logger } from './logger.client';
|
||||
|
||||
interface RawSource {
|
||||
web?: {
|
||||
uri?: string;
|
||||
title?: string;
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* A service class to encapsulate all AI analysis API calls and related business logic.
|
||||
* This decouples the React components and hooks from the data fetching implementation.
|
||||
@@ -15,8 +22,7 @@ export class AiAnalysisService {
|
||||
*/
|
||||
async getQuickInsights(items: FlyerItem[]): Promise<string> {
|
||||
logger.info('[AiAnalysisService] getQuickInsights called.');
|
||||
const result = await aiApiClient.getQuickInsights(items);
|
||||
return result.text;
|
||||
return aiApiClient.getQuickInsights(items).then((res) => res.json());
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -26,8 +32,7 @@ export class AiAnalysisService {
|
||||
*/
|
||||
async getDeepDiveAnalysis(items: FlyerItem[]): Promise<string> {
|
||||
logger.info('[AiAnalysisService] getDeepDiveAnalysis called.');
|
||||
const result = await aiApiClient.getDeepDiveAnalysis(items);
|
||||
return result.text;
|
||||
return aiApiClient.getDeepDiveAnalysis(items).then((res) => res.json());
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -39,7 +44,16 @@ export class AiAnalysisService {
|
||||
logger.info('[AiAnalysisService] searchWeb called.');
|
||||
// Construct a query string from the item names.
|
||||
const query = items.map((item) => item.item).join(', ');
|
||||
return aiApiClient.searchWeb(query);
|
||||
// The API client returns a specific shape that we need to await the JSON from
|
||||
const response: { text: string; sources: RawSource[] } = await aiApiClient
|
||||
.searchWeb(query)
|
||||
.then((res) => res.json());
|
||||
// Normalize sources to a consistent format.
|
||||
const mappedSources = (response.sources || []).map(
|
||||
(s: RawSource) =>
|
||||
(s.web ? { uri: s.web.uri || '', title: s.web.title || 'Untitled' } : { uri: '', title: 'Untitled' }) as Source,
|
||||
);
|
||||
return { ...response, sources: mappedSources };
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -52,7 +66,7 @@ export class AiAnalysisService {
|
||||
logger.info('[AiAnalysisService] planTripWithMaps called.');
|
||||
// Encapsulate geolocation logic within the service.
|
||||
const userLocation = await this.getCurrentLocation();
|
||||
return aiApiClient.planTripWithMaps(items, store, userLocation);
|
||||
return aiApiClient.planTripWithMaps(items, store, userLocation).then((res) => res.json());
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -62,7 +76,15 @@ export class AiAnalysisService {
|
||||
*/
|
||||
async compareWatchedItemPrices(watchedItems: MasterGroceryItem[]): Promise<GroundedResponse> {
|
||||
logger.info('[AiAnalysisService] compareWatchedItemPrices called.');
|
||||
return aiApiClient.compareWatchedItemPrices(watchedItems);
|
||||
const response: { text: string; sources: RawSource[] } = await aiApiClient
|
||||
.compareWatchedItemPrices(watchedItems)
|
||||
.then((res) => res.json());
|
||||
// Normalize sources to a consistent format.
|
||||
const mappedSources = (response.sources || []).map(
|
||||
(s: RawSource) =>
|
||||
(s.web ? { uri: s.web.uri || '', title: s.web.title || 'Untitled' } : { uri: '', title: 'Untitled' }) as Source,
|
||||
);
|
||||
return { ...response, sources: mappedSources };
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -72,8 +94,7 @@ export class AiAnalysisService {
|
||||
*/
|
||||
async generateImageFromText(prompt: string): Promise<string> {
|
||||
logger.info('[AiAnalysisService] generateImageFromText called.');
|
||||
const result = await aiApiClient.generateImageFromText(prompt);
|
||||
return result.imageUrl;
|
||||
return aiApiClient.generateImageFromText(prompt).then((res) => res.json());
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -104,16 +104,16 @@ export const getJobStatus = async (
|
||||
}
|
||||
};
|
||||
|
||||
export const isImageAFlyer = async (
|
||||
export const isImageAFlyer = (
|
||||
imageFile: File,
|
||||
tokenOverride?: string,
|
||||
): Promise<{ is_flyer: boolean }> => {
|
||||
): Promise<Response> => {
|
||||
const formData = new FormData();
|
||||
formData.append('image', imageFile);
|
||||
|
||||
// Use apiFetchWithAuth for FormData to let the browser set the correct Content-Type.
|
||||
// The URL must be relative, as the helper constructs the full path.
|
||||
const response = await apiFetch(
|
||||
return apiFetch(
|
||||
'/ai/check-flyer',
|
||||
{
|
||||
method: 'POST',
|
||||
@@ -121,28 +121,16 @@ export const isImageAFlyer = async (
|
||||
},
|
||||
{ tokenOverride },
|
||||
);
|
||||
|
||||
if (!response.ok) {
|
||||
let errorBody;
|
||||
try {
|
||||
errorBody = await response.json();
|
||||
} catch (e) {
|
||||
errorBody = { message: await response.text() };
|
||||
}
|
||||
throw { status: response.status, body: errorBody };
|
||||
}
|
||||
|
||||
return response.json();
|
||||
};
|
||||
|
||||
export const extractAddressFromImage = async (
|
||||
export const extractAddressFromImage = (
|
||||
imageFile: File,
|
||||
tokenOverride?: string,
|
||||
): Promise<{ address: string }> => {
|
||||
): Promise<Response> => {
|
||||
const formData = new FormData();
|
||||
formData.append('image', imageFile);
|
||||
|
||||
const response = await apiFetch(
|
||||
return apiFetch(
|
||||
'/ai/extract-address',
|
||||
{
|
||||
method: 'POST',
|
||||
@@ -150,30 +138,18 @@ export const extractAddressFromImage = async (
|
||||
},
|
||||
{ tokenOverride },
|
||||
);
|
||||
|
||||
if (!response.ok) {
|
||||
let errorBody;
|
||||
try {
|
||||
errorBody = await response.json();
|
||||
} catch (e) {
|
||||
errorBody = { message: await response.text() };
|
||||
}
|
||||
throw { status: response.status, body: errorBody };
|
||||
}
|
||||
|
||||
return response.json();
|
||||
};
|
||||
|
||||
export const extractLogoFromImage = async (
|
||||
export const extractLogoFromImage = (
|
||||
imageFiles: File[],
|
||||
tokenOverride?: string,
|
||||
): Promise<{ store_logo_base_64: string | null }> => {
|
||||
): Promise<Response> => {
|
||||
const formData = new FormData();
|
||||
imageFiles.forEach((file) => {
|
||||
formData.append('images', file);
|
||||
});
|
||||
|
||||
const response = await apiFetch(
|
||||
return apiFetch(
|
||||
'/ai/extract-logo',
|
||||
{
|
||||
method: 'POST',
|
||||
@@ -181,26 +157,14 @@ export const extractLogoFromImage = async (
|
||||
},
|
||||
{ tokenOverride },
|
||||
);
|
||||
|
||||
if (!response.ok) {
|
||||
let errorBody;
|
||||
try {
|
||||
errorBody = await response.json();
|
||||
} catch (e) {
|
||||
errorBody = { message: await response.text() };
|
||||
}
|
||||
throw { status: response.status, body: errorBody };
|
||||
}
|
||||
|
||||
return response.json();
|
||||
};
|
||||
|
||||
export const getQuickInsights = async (
|
||||
export const getQuickInsights = (
|
||||
items: Partial<FlyerItem>[],
|
||||
signal?: AbortSignal,
|
||||
tokenOverride?: string,
|
||||
): Promise<{ text: string }> => {
|
||||
const response = await apiFetch(
|
||||
): Promise<Response> => {
|
||||
return apiFetch(
|
||||
'/ai/quick-insights',
|
||||
{
|
||||
method: 'POST',
|
||||
@@ -210,26 +174,14 @@ export const getQuickInsights = async (
|
||||
},
|
||||
{ tokenOverride, signal },
|
||||
);
|
||||
|
||||
if (!response.ok) {
|
||||
let errorBody;
|
||||
try {
|
||||
errorBody = await response.json();
|
||||
} catch (e) {
|
||||
errorBody = { message: await response.text() };
|
||||
}
|
||||
throw { status: response.status, body: errorBody };
|
||||
}
|
||||
|
||||
return response.json();
|
||||
};
|
||||
|
||||
export const getDeepDiveAnalysis = async (
|
||||
export const getDeepDiveAnalysis = (
|
||||
items: Partial<FlyerItem>[],
|
||||
signal?: AbortSignal,
|
||||
tokenOverride?: string,
|
||||
): Promise<{ text: string }> => {
|
||||
const response = await apiFetch(
|
||||
): Promise<Response> => {
|
||||
return apiFetch(
|
||||
'/ai/deep-dive',
|
||||
{
|
||||
method: 'POST',
|
||||
@@ -239,26 +191,14 @@ export const getDeepDiveAnalysis = async (
|
||||
},
|
||||
{ tokenOverride, signal },
|
||||
);
|
||||
|
||||
if (!response.ok) {
|
||||
let errorBody;
|
||||
try {
|
||||
errorBody = await response.json();
|
||||
} catch (e) {
|
||||
errorBody = { message: await response.text() };
|
||||
}
|
||||
throw { status: response.status, body: errorBody };
|
||||
}
|
||||
|
||||
return response.json();
|
||||
};
|
||||
|
||||
export const searchWeb = async (
|
||||
export const searchWeb = (
|
||||
query: string,
|
||||
signal?: AbortSignal,
|
||||
tokenOverride?: string,
|
||||
): Promise<GroundedResponse> => {
|
||||
const response = await apiFetch(
|
||||
): Promise<Response> => {
|
||||
return apiFetch(
|
||||
'/ai/search-web',
|
||||
{
|
||||
method: 'POST',
|
||||
@@ -268,18 +208,6 @@ export const searchWeb = async (
|
||||
},
|
||||
{ tokenOverride, signal },
|
||||
);
|
||||
|
||||
if (!response.ok) {
|
||||
let errorBody;
|
||||
try {
|
||||
errorBody = await response.json();
|
||||
} catch (e) {
|
||||
errorBody = { message: await response.text() };
|
||||
}
|
||||
throw { status: response.status, body: errorBody };
|
||||
}
|
||||
|
||||
return response.json();
|
||||
};
|
||||
|
||||
// ============================================================================
|
||||
@@ -292,9 +220,9 @@ export const planTripWithMaps = async (
|
||||
userLocation: GeolocationCoordinates,
|
||||
signal?: AbortSignal,
|
||||
tokenOverride?: string,
|
||||
): Promise<GroundedResponse> => {
|
||||
): Promise<Response> => {
|
||||
logger.debug('Stub: planTripWithMaps called with location:', { userLocation });
|
||||
const response = await apiFetch(
|
||||
return apiFetch(
|
||||
'/ai/plan-trip',
|
||||
{
|
||||
method: 'POST',
|
||||
@@ -303,18 +231,6 @@ export const planTripWithMaps = async (
|
||||
},
|
||||
{ signal, tokenOverride },
|
||||
);
|
||||
|
||||
if (!response.ok) {
|
||||
let errorBody;
|
||||
try {
|
||||
errorBody = await response.json();
|
||||
} catch (e) {
|
||||
errorBody = { message: await response.text() };
|
||||
}
|
||||
throw { status: response.status, body: errorBody };
|
||||
}
|
||||
|
||||
return response.json();
|
||||
};
|
||||
|
||||
/**
|
||||
@@ -322,13 +238,13 @@ export const planTripWithMaps = async (
|
||||
* @param prompt A description of the image to generate (e.g., a meal plan).
|
||||
* @returns A base64-encoded string of the generated PNG image.
|
||||
*/
|
||||
export const generateImageFromText = async (
|
||||
export const generateImageFromText = (
|
||||
prompt: string,
|
||||
signal?: AbortSignal,
|
||||
tokenOverride?: string,
|
||||
): Promise<{ imageUrl: string }> => {
|
||||
): Promise<Response> => {
|
||||
logger.debug('Stub: generateImageFromText called with prompt:', { prompt });
|
||||
const response = await apiFetch(
|
||||
return apiFetch(
|
||||
'/ai/generate-image',
|
||||
{
|
||||
method: 'POST',
|
||||
@@ -338,18 +254,6 @@ export const generateImageFromText = async (
|
||||
},
|
||||
{ tokenOverride, signal },
|
||||
);
|
||||
|
||||
if (!response.ok) {
|
||||
let errorBody;
|
||||
try {
|
||||
errorBody = await response.json();
|
||||
} catch (e) {
|
||||
errorBody = { message: await response.text() };
|
||||
}
|
||||
throw { status: response.status, body: errorBody };
|
||||
}
|
||||
|
||||
return response.json();
|
||||
};
|
||||
|
||||
/**
|
||||
@@ -357,13 +261,13 @@ export const generateImageFromText = async (
|
||||
* @param text The text to be spoken.
|
||||
* @returns A base64-encoded string of the raw audio data.
|
||||
*/
|
||||
export const generateSpeechFromText = async (
|
||||
export const generateSpeechFromText = (
|
||||
text: string,
|
||||
signal?: AbortSignal,
|
||||
tokenOverride?: string,
|
||||
): Promise<{ audioUrl: string }> => {
|
||||
): Promise<Response> => {
|
||||
logger.debug('Stub: generateSpeechFromText called with text:', { text });
|
||||
const response = await apiFetch(
|
||||
return apiFetch(
|
||||
'/ai/generate-speech',
|
||||
{
|
||||
method: 'POST',
|
||||
@@ -373,18 +277,6 @@ export const generateSpeechFromText = async (
|
||||
},
|
||||
{ tokenOverride, signal },
|
||||
);
|
||||
|
||||
if (!response.ok) {
|
||||
let errorBody;
|
||||
try {
|
||||
errorBody = await response.json();
|
||||
} catch (e) {
|
||||
errorBody = { message: await response.text() };
|
||||
}
|
||||
throw { status: response.status, body: errorBody };
|
||||
}
|
||||
|
||||
return response.json();
|
||||
};
|
||||
|
||||
/**
|
||||
@@ -426,34 +318,22 @@ export const startVoiceSession = (callbacks: {
|
||||
* @param tokenOverride Optional token for testing.
|
||||
* @returns A promise that resolves to the API response containing the extracted text.
|
||||
*/
|
||||
export const rescanImageArea = async (
|
||||
export const rescanImageArea = (
|
||||
imageFile: File,
|
||||
cropArea: { x: number; y: number; width: number; height: number },
|
||||
extractionType: 'store_name' | 'dates' | 'item_details',
|
||||
tokenOverride?: string,
|
||||
): Promise<{ text: string | undefined }> => {
|
||||
): Promise<Response> => {
|
||||
const formData = new FormData();
|
||||
formData.append('image', imageFile);
|
||||
formData.append('cropArea', JSON.stringify(cropArea));
|
||||
formData.append('extractionType', extractionType);
|
||||
|
||||
const response = await apiFetch(
|
||||
return apiFetch(
|
||||
'/ai/rescan-area',
|
||||
{ method: 'POST', body: formData },
|
||||
{ tokenOverride },
|
||||
);
|
||||
|
||||
if (!response.ok) {
|
||||
let errorBody;
|
||||
try {
|
||||
errorBody = await response.json();
|
||||
} catch (e) {
|
||||
errorBody = { message: await response.text() };
|
||||
}
|
||||
throw { status: response.status, body: errorBody };
|
||||
}
|
||||
|
||||
return response.json();
|
||||
};
|
||||
|
||||
/**
|
||||
@@ -461,13 +341,13 @@ export const rescanImageArea = async (
|
||||
* @param watchedItems An array of the user's watched master grocery items.
|
||||
* @returns A promise that resolves to the raw `Response` object from the API.
|
||||
*/
|
||||
export const compareWatchedItemPrices = async (
|
||||
export const compareWatchedItemPrices = (
|
||||
watchedItems: MasterGroceryItem[],
|
||||
signal?: AbortSignal,
|
||||
): Promise<GroundedResponse> => {
|
||||
): Promise<Response> => {
|
||||
// Use the apiFetch wrapper for consistency with other API calls in this file.
|
||||
// This centralizes token handling and base URL logic.
|
||||
const response = await apiFetch(
|
||||
return apiFetch(
|
||||
'/ai/compare-prices',
|
||||
{
|
||||
method: 'POST',
|
||||
@@ -475,17 +355,4 @@ export const compareWatchedItemPrices = async (
|
||||
body: JSON.stringify({ items: watchedItems }),
|
||||
},
|
||||
{ signal },
|
||||
);
|
||||
|
||||
if (!response.ok) {
|
||||
let errorBody;
|
||||
try {
|
||||
errorBody = await response.json();
|
||||
} catch (e) {
|
||||
errorBody = { message: await response.text() };
|
||||
}
|
||||
throw { status: response.status, body: errorBody };
|
||||
}
|
||||
|
||||
return response.json();
|
||||
};
|
||||
)};
|
||||
|
||||
@@ -239,8 +239,8 @@ describe('AI Service (Server)', () => {
|
||||
|
||||
expect(mockGenerateContent).toHaveBeenCalledTimes(1);
|
||||
expect(logger.error).toHaveBeenCalledWith(
|
||||
`[AIService Adapter] Model 'gemini-2.5-flash' failed with a non-retriable error.`,
|
||||
{ error: nonRetriableError },
|
||||
`[AIService Adapter] Model 'gemini-2.5-flash' failed with a non-retriable error.`,
|
||||
);
|
||||
});
|
||||
|
||||
@@ -281,8 +281,8 @@ describe('AI Service (Server)', () => {
|
||||
});
|
||||
|
||||
expect(logger.error).toHaveBeenCalledWith(
|
||||
'[AIService Adapter] All AI models failed. Throwing last known error.',
|
||||
{ lastError: quotaError3 },
|
||||
'[AIService Adapter] All AI models failed. Throwing last known error.',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -186,13 +186,13 @@ export class AIService {
|
||||
return result;
|
||||
} catch (error: unknown) {
|
||||
lastError = error instanceof Error ? error : new Error(String(error));
|
||||
const errorMessage = lastError.message || '';
|
||||
const errorMessage = (lastError.message || '').toLowerCase(); // Make case-insensitive
|
||||
|
||||
// Check for specific error messages indicating quota issues or model unavailability.
|
||||
if (
|
||||
errorMessage.includes('quota') ||
|
||||
errorMessage.includes('429') || // HTTP 429 Too Many Requests
|
||||
errorMessage.includes('RESOURCE_EXHAUSTED') ||
|
||||
errorMessage.includes('resource_exhausted') || // Make case-insensitive
|
||||
errorMessage.includes('model is overloaded')
|
||||
) {
|
||||
this.logger.warn(
|
||||
|
||||
@@ -176,15 +176,13 @@ describe('API Client', () => {
|
||||
// We expect the promise to still resolve with the bad response, but log an error.
|
||||
await apiClient.apiFetch('/some/failing/endpoint');
|
||||
|
||||
// FIX: Use stringContaining to be resilient to port numbers (e.g., localhost:3001)
|
||||
// This checks for the essential parts of the log message without being brittle.
|
||||
expect(logger.error).toHaveBeenCalledWith(
|
||||
expect.stringContaining('apiFetch: Request to http://'),
|
||||
'Internal Server Error',
|
||||
);
|
||||
expect(logger.error).toHaveBeenCalledWith(
|
||||
expect.stringContaining('/api/some/failing/endpoint failed with status 500'),
|
||||
'Internal Server Error',
|
||||
expect.objectContaining({
|
||||
status: 500,
|
||||
body: 'Internal Server Error',
|
||||
url: expect.stringContaining('/some/failing/endpoint'),
|
||||
}),
|
||||
'apiFetch: Request failed',
|
||||
);
|
||||
});
|
||||
|
||||
@@ -242,10 +240,6 @@ describe('API Client', () => {
|
||||
expect(logger.warn).toHaveBeenCalledWith('Failed to track flyer item interaction', {
|
||||
error: apiError,
|
||||
});
|
||||
|
||||
expect(logger.warn).toHaveBeenCalledWith('Failed to track flyer item interaction', {
|
||||
error: apiError,
|
||||
});
|
||||
});
|
||||
|
||||
it('logSearchQuery should log a warning on failure', async () => {
|
||||
@@ -259,8 +253,6 @@ describe('API Client', () => {
|
||||
was_successful: false,
|
||||
});
|
||||
expect(logger.warn).toHaveBeenCalledWith('Failed to log search query', { error: apiError });
|
||||
|
||||
expect(logger.warn).toHaveBeenCalledWith('Failed to log search query', { error: apiError });
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
// src/services/apiClient.ts
|
||||
import { Profile, ShoppingListItem, SearchQuery, Budget, Address } from '../types';
|
||||
import { logger } from './logger.client';
|
||||
import { eventBus } from './eventBus';
|
||||
|
||||
// This constant should point to your backend API.
|
||||
// It's often a good practice to store this in an environment variable.
|
||||
@@ -62,12 +63,12 @@ const refreshToken = async (): Promise<string> => {
|
||||
logger.info('Successfully refreshed access token.');
|
||||
return data.token;
|
||||
} catch (error) {
|
||||
logger.error('Failed to refresh token. User will be logged out.', { error });
|
||||
logger.error({ error }, 'Failed to refresh token. User session has expired.');
|
||||
// Only perform browser-specific actions if in the browser environment.
|
||||
if (typeof window !== 'undefined') {
|
||||
localStorage.removeItem('authToken');
|
||||
// A hard redirect is a simple way to reset the app state to logged-out.
|
||||
// window.location.href = '/'; // Removed to allow the caller to handle session expiry.
|
||||
// Dispatch a global event that the UI layer can listen for to handle session expiry.
|
||||
eventBus.dispatch('sessionExpired');
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
@@ -144,9 +145,8 @@ export const apiFetch = async (
|
||||
// --- DEBUG LOGGING for failed requests ---
|
||||
if (!response.ok) {
|
||||
const responseText = await response.clone().text();
|
||||
logger.error(
|
||||
`apiFetch: Request to ${fullUrl} failed with status ${response.status}. Response body:`,
|
||||
responseText,
|
||||
logger.error({ url: fullUrl, status: response.status, body: responseText },
|
||||
'apiFetch: Request failed',
|
||||
);
|
||||
}
|
||||
// --- END DEBUG LOGGING ---
|
||||
|
||||
31
src/services/eventBus.ts
Normal file
31
src/services/eventBus.ts
Normal file
@@ -0,0 +1,31 @@
|
||||
// src/services/eventBus.ts
|
||||
|
||||
/**
|
||||
* A simple, generic event bus for cross-component communication without direct coupling.
|
||||
* This is particularly useful for broadcasting application-wide events, such as session expiry.
|
||||
*/
|
||||
|
||||
type EventCallback = (data?: any) => void;
|
||||
|
||||
class EventBus {
|
||||
private listeners: { [key: string]: EventCallback[] } = {};
|
||||
|
||||
on(event: string, callback: EventCallback): void {
|
||||
if (!this.listeners[event]) {
|
||||
this.listeners[event] = [];
|
||||
}
|
||||
this.listeners[event].push(callback);
|
||||
}
|
||||
|
||||
off(event: string, callback: EventCallback): void {
|
||||
if (!this.listeners[event]) return;
|
||||
this.listeners[event] = this.listeners[event].filter((l) => l !== callback);
|
||||
}
|
||||
|
||||
dispatch(event: string, data?: any): void {
|
||||
if (!this.listeners[event]) return;
|
||||
this.listeners[event].forEach((callback) => callback(data));
|
||||
}
|
||||
}
|
||||
|
||||
export const eventBus = new EventBus();
|
||||
@@ -87,7 +87,7 @@ describe('Geocoding Service', () => {
|
||||
// Assert
|
||||
expect(result).toEqual(coordinates);
|
||||
expect(logger.error).toHaveBeenCalledWith(
|
||||
{ err: 'Redis down', cacheKey: expect.any(String) },
|
||||
{ err: expect.any(Error), cacheKey: expect.any(String) },
|
||||
'Redis GET or JSON.parse command failed. Proceeding without cache.',
|
||||
);
|
||||
expect(mockGoogleService.geocode).toHaveBeenCalled(); // Should still proceed to fetch
|
||||
@@ -107,7 +107,7 @@ describe('Geocoding Service', () => {
|
||||
expect(mocks.mockRedis.get).toHaveBeenCalledWith(cacheKey);
|
||||
// The service should log the JSON parsing error and continue
|
||||
expect(logger.error).toHaveBeenCalledWith(
|
||||
{ err: expect.any(String), cacheKey: expect.any(String) },
|
||||
{ err: expect.any(SyntaxError), cacheKey: expect.any(String) },
|
||||
'Redis GET or JSON.parse command failed. Proceeding without cache.',
|
||||
);
|
||||
expect(mockGoogleService.geocode).toHaveBeenCalledTimes(1);
|
||||
@@ -185,7 +185,7 @@ describe('Geocoding Service', () => {
|
||||
// Assert
|
||||
expect(result).toEqual(coordinates);
|
||||
expect(logger.error).toHaveBeenCalledWith(
|
||||
{ err: 'Network Error' },
|
||||
{ err: expect.any(Error) },
|
||||
expect.stringContaining('An error occurred while calling the Google Maps Geocoding API'),
|
||||
);
|
||||
expect(mockNominatimService.geocode).toHaveBeenCalledWith(address, logger);
|
||||
@@ -223,7 +223,7 @@ describe('Geocoding Service', () => {
|
||||
expect(mockGoogleService.geocode).toHaveBeenCalledTimes(1);
|
||||
expect(mocks.mockRedis.set).toHaveBeenCalledTimes(1);
|
||||
expect(logger.error).toHaveBeenCalledWith(
|
||||
{ err: 'Redis SET failed', cacheKey: expect.any(String) },
|
||||
{ err: expect.any(Error), cacheKey: expect.any(String) },
|
||||
'Redis SET command failed. Result will not be cached.',
|
||||
);
|
||||
});
|
||||
@@ -271,7 +271,7 @@ describe('Geocoding Service', () => {
|
||||
// Act & Assert
|
||||
await expect(geocodingService.clearGeocodeCache(logger)).rejects.toThrow(redisError);
|
||||
expect(logger.error).toHaveBeenCalledWith(
|
||||
{ err: redisError.message },
|
||||
{ err: expect.any(Error) },
|
||||
'Failed to clear geocode cache from Redis.',
|
||||
);
|
||||
expect(mocks.mockRedis.del).not.toHaveBeenCalled();
|
||||
|
||||
@@ -25,10 +25,7 @@ export class GeocodingService {
|
||||
return JSON.parse(cached);
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
{ err: error instanceof Error ? error.message : error, cacheKey },
|
||||
'Redis GET or JSON.parse command failed. Proceeding without cache.',
|
||||
);
|
||||
logger.error({ err: error, cacheKey }, 'Redis GET or JSON.parse command failed. Proceeding without cache.');
|
||||
}
|
||||
|
||||
if (process.env.GOOGLE_MAPS_API_KEY) {
|
||||
@@ -44,8 +41,8 @@ export class GeocodingService {
|
||||
);
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
{ err: error instanceof Error ? error.message : error },
|
||||
'An error occurred while calling the Google Maps Geocoding API. Falling back to Nominatim.',
|
||||
{ err: error },
|
||||
'An error occurred while calling the Google Maps Geocoding API. Falling back to Nominatim.'
|
||||
);
|
||||
}
|
||||
} else {
|
||||
@@ -72,10 +69,7 @@ export class GeocodingService {
|
||||
try {
|
||||
await redis.set(cacheKey, JSON.stringify(result), 'EX', 60 * 60 * 24 * 30); // Cache for 30 days
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
{ err: error instanceof Error ? error.message : error, cacheKey },
|
||||
'Redis SET command failed. Result will not be cached.',
|
||||
);
|
||||
logger.error({ err: error, cacheKey }, 'Redis SET command failed. Result will not be cached.');
|
||||
}
|
||||
}
|
||||
|
||||
@@ -98,10 +92,7 @@ export class GeocodingService {
|
||||
logger.info(`Successfully deleted ${totalDeleted} geocode cache entries.`);
|
||||
return totalDeleted;
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
{ err: error instanceof Error ? error.message : error },
|
||||
'Failed to clear geocode cache from Redis.',
|
||||
);
|
||||
logger.error({ err: error }, 'Failed to clear geocode cache from Redis.');
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -164,6 +164,14 @@ const flyerProcessingService = new FlyerProcessingService(
|
||||
new FlyerDataTransformer(), // Inject the new transformer
|
||||
);
|
||||
|
||||
/**
|
||||
* Helper to ensure that an unknown error is normalized to an Error object.
|
||||
* This ensures consistent logging structure and stack traces.
|
||||
*/
|
||||
const normalizeError = (error: unknown): Error => {
|
||||
return error instanceof Error ? error : new Error(String(error));
|
||||
};
|
||||
|
||||
/**
|
||||
* A generic function to attach logging event listeners to any worker.
|
||||
* This centralizes logging for job completion and final failure.
|
||||
@@ -189,16 +197,17 @@ export const flyerWorker = new Worker<FlyerJobData>(
|
||||
try {
|
||||
// The processJob method creates its own job-specific logger internally.
|
||||
return await flyerProcessingService.processJob(job);
|
||||
} catch (error: any) {
|
||||
} catch (error: unknown) {
|
||||
const wrappedError = normalizeError(error);
|
||||
// Check for quota errors or other unrecoverable errors from the AI service
|
||||
const errorMessage = error?.message || '';
|
||||
const errorMessage = wrappedError.message || '';
|
||||
if (
|
||||
errorMessage.includes('quota') ||
|
||||
errorMessage.includes('429') ||
|
||||
errorMessage.includes('RESOURCE_EXHAUSTED')
|
||||
) {
|
||||
logger.error(
|
||||
{ err: error, jobId: job.id },
|
||||
{ err: wrappedError, jobId: job.id },
|
||||
'[FlyerWorker] Unrecoverable quota error detected. Failing job immediately.',
|
||||
);
|
||||
throw new UnrecoverableError(errorMessage);
|
||||
@@ -224,19 +233,16 @@ export const emailWorker = new Worker<EmailJobData>(
|
||||
try {
|
||||
await emailService.sendEmail(job.data, jobLogger);
|
||||
} catch (error: unknown) {
|
||||
// Standardize error logging to capture the full error object, including the stack trace.
|
||||
// This provides more context for debugging than just logging the message.
|
||||
const wrappedError = normalizeError(error);
|
||||
logger.error(
|
||||
{
|
||||
// Log the full error object for better diagnostics. // The patch requested this specific error handling.
|
||||
err: error instanceof Error ? error : new Error(String(error)),
|
||||
// Also include the job data for context.
|
||||
err: wrappedError,
|
||||
jobData: job.data,
|
||||
},
|
||||
`[EmailWorker] Job ${job.id} failed. Attempt ${job.attemptsMade}/${job.opts.attempts}.`,
|
||||
);
|
||||
// Re-throw to let BullMQ handle the failure and retry.
|
||||
throw error;
|
||||
throw wrappedError;
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -265,15 +271,12 @@ export const analyticsWorker = new Worker<AnalyticsJobData>(
|
||||
await new Promise((resolve) => setTimeout(resolve, 10000)); // Simulate a 10-second task
|
||||
logger.info(`[AnalyticsWorker] Successfully generated report for ${reportDate}.`);
|
||||
} catch (error: unknown) {
|
||||
const wrappedError = normalizeError(error);
|
||||
// Standardize error logging.
|
||||
logger.error(
|
||||
{
|
||||
err: error instanceof Error ? error : new Error(String(error)),
|
||||
jobData: job.data,
|
||||
},
|
||||
logger.error({ err: wrappedError, jobData: job.data },
|
||||
`[AnalyticsWorker] Job ${job.id} failed. Attempt ${job.attemptsMade}/${job.opts.attempts}.`,
|
||||
);
|
||||
throw error; // Re-throw to let BullMQ handle the failure and retry.
|
||||
throw wrappedError; // Re-throw to let BullMQ handle the failure and retry.
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -330,14 +333,13 @@ export const cleanupWorker = new Worker<CleanupJobData>(
|
||||
`[CleanupWorker] Successfully cleaned up ${paths.length} file(s) for flyer ${flyerId}.`,
|
||||
);
|
||||
} catch (error: unknown) {
|
||||
const wrappedError = normalizeError(error);
|
||||
// Standardize error logging.
|
||||
logger.error(
|
||||
{
|
||||
err: error instanceof Error ? error : new Error(String(error)),
|
||||
},
|
||||
{ err: wrappedError },
|
||||
`[CleanupWorker] Job ${job.id} for flyer ${flyerId} failed. Attempt ${job.attemptsMade}/${job.opts.attempts}.`,
|
||||
);
|
||||
throw error; // Re-throw to let BullMQ handle the failure and retry.
|
||||
throw wrappedError; // Re-throw to let BullMQ handle the failure and retry.
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -365,15 +367,13 @@ export const weeklyAnalyticsWorker = new Worker<WeeklyAnalyticsJobData>(
|
||||
`[WeeklyAnalyticsWorker] Successfully generated weekly report for week ${reportWeek}, ${reportYear}.`,
|
||||
);
|
||||
} catch (error: unknown) {
|
||||
const wrappedError = normalizeError(error);
|
||||
// Standardize error logging.
|
||||
logger.error(
|
||||
{
|
||||
err: error instanceof Error ? error : new Error(String(error)),
|
||||
jobData: job.data,
|
||||
},
|
||||
{ err: wrappedError, jobData: job.data },
|
||||
`[WeeklyAnalyticsWorker] Job ${job.id} failed. Attempt ${job.attemptsMade}/${job.opts.attempts}.`,
|
||||
);
|
||||
throw error; // Re-throw to let BullMQ handle the failure and retry.
|
||||
throw wrappedError; // Re-throw to let BullMQ handle the failure and retry.
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -395,8 +395,9 @@ export const tokenCleanupWorker = new Worker<TokenCleanupJobData>(
|
||||
jobLogger.info(`[TokenCleanupWorker] Successfully deleted ${deletedCount} expired tokens.`);
|
||||
return { deletedCount };
|
||||
} catch (error: unknown) {
|
||||
jobLogger.error({ err: error }, `[TokenCleanupWorker] Job ${job.id} failed.`);
|
||||
throw error;
|
||||
const wrappedError = normalizeError(error);
|
||||
jobLogger.error({ err: wrappedError }, `[TokenCleanupWorker] Job ${job.id} failed.`);
|
||||
throw wrappedError;
|
||||
}
|
||||
},
|
||||
{
|
||||
|
||||
@@ -175,7 +175,7 @@ describe('Queue Workers', () => {
|
||||
const emailError = 'SMTP server is down'; // Reject with a string
|
||||
mocks.sendEmail.mockRejectedValue(emailError);
|
||||
|
||||
await expect(emailProcessor(job)).rejects.toBe(emailError);
|
||||
await expect(emailProcessor(job)).rejects.toThrow(emailError);
|
||||
|
||||
// The worker should wrap the string in an Error object for logging
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
|
||||
Reference in New Issue
Block a user