Compare commits
31 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
582035b60e | ||
| 44e7670a89 | |||
| 2abfb3ed6e | |||
|
|
219de4a25c | ||
| 1540d5051f | |||
| 9c978c26fa | |||
|
|
adb109d8e9 | ||
| c668c8785f | |||
|
|
695bbb61b9 | ||
| 877c971833 | |||
| ed3af07aab | |||
|
|
dd4b34edfa | ||
| 91fa2f0516 | |||
|
|
aefd57e57b | ||
| 2ca4eb47ac | |||
| a4fe30da22 | |||
|
|
abab7fd25e | ||
| 53dd26d2d9 | |||
| ab3da0336c | |||
|
|
ed6d6349a2 | ||
| d4db2a709a | |||
| 508583809b | |||
|
|
6b1f7e7590 | ||
| 07bb31f4fb | |||
| a42fb76da8 | |||
|
|
08c320423c | ||
| d2498065ed | |||
| 56dc96f418 | |||
|
|
4e9aa0efc3 | ||
| e5e4b1316c | |||
| e8d511b4de |
@@ -283,7 +283,7 @@ jobs:
|
|||||||
echo "WARNING: No schema hash found in the test database."
|
echo "WARNING: No schema hash found in the test database."
|
||||||
echo "This is expected for a first-time deployment. The hash will be set after a successful deployment."
|
echo "This is expected for a first-time deployment. The hash will be set after a successful deployment."
|
||||||
echo "--- Debug: Dumping schema_info table ---"
|
echo "--- Debug: Dumping schema_info table ---"
|
||||||
PGPASSWORD="$DB_PASSWORD" psql -v ON_ERROR_STOP=0 -h "$DB_HOST" -p 5432 -U "$DB_USER" -d "$DB_NAME" -c "SELECT * FROM public.schema_info;" || true
|
PGPASSWORD="$DB_PASSWORD" psql -v ON_ERROR_STOP=0 -h "$DB_HOST" -p 5432 -U "$DB_USER" -d "$DB_NAME" -P pager=off -c "SELECT * FROM public.schema_info;" || true
|
||||||
echo "----------------------------------------"
|
echo "----------------------------------------"
|
||||||
# We allow the deployment to continue, but a manual schema update is required.
|
# We allow the deployment to continue, but a manual schema update is required.
|
||||||
# You could choose to fail here by adding `exit 1`.
|
# You could choose to fail here by adding `exit 1`.
|
||||||
|
|||||||
@@ -88,7 +88,7 @@ module.exports = {
|
|||||||
// --- General Worker ---
|
// --- General Worker ---
|
||||||
name: 'flyer-crawler-worker',
|
name: 'flyer-crawler-worker',
|
||||||
script: './node_modules/.bin/tsx',
|
script: './node_modules/.bin/tsx',
|
||||||
args: 'src/services/queueService.server.ts', // tsx will execute this file
|
args: 'src/services/worker.ts', // tsx will execute this file
|
||||||
// Production Environment Settings
|
// Production Environment Settings
|
||||||
env_production: {
|
env_production: {
|
||||||
NODE_ENV: 'production',
|
NODE_ENV: 'production',
|
||||||
@@ -164,7 +164,7 @@ module.exports = {
|
|||||||
// --- Analytics Worker ---
|
// --- Analytics Worker ---
|
||||||
name: 'flyer-crawler-analytics-worker',
|
name: 'flyer-crawler-analytics-worker',
|
||||||
script: './node_modules/.bin/tsx',
|
script: './node_modules/.bin/tsx',
|
||||||
args: 'src/services/queueService.server.ts', // tsx will execute this file
|
args: 'src/services/worker.ts', // tsx will execute this file
|
||||||
// Production Environment Settings
|
// Production Environment Settings
|
||||||
env_production: {
|
env_production: {
|
||||||
NODE_ENV: 'production',
|
NODE_ENV: 'production',
|
||||||
|
|||||||
4
package-lock.json
generated
4
package-lock.json
generated
@@ -1,12 +1,12 @@
|
|||||||
{
|
{
|
||||||
"name": "flyer-crawler",
|
"name": "flyer-crawler",
|
||||||
"version": "0.1.6",
|
"version": "0.1.17",
|
||||||
"lockfileVersion": 3,
|
"lockfileVersion": 3,
|
||||||
"requires": true,
|
"requires": true,
|
||||||
"packages": {
|
"packages": {
|
||||||
"": {
|
"": {
|
||||||
"name": "flyer-crawler",
|
"name": "flyer-crawler",
|
||||||
"version": "0.1.6",
|
"version": "0.1.17",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@bull-board/api": "^6.14.2",
|
"@bull-board/api": "^6.14.2",
|
||||||
"@bull-board/express": "^6.14.2",
|
"@bull-board/express": "^6.14.2",
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
{
|
{
|
||||||
"name": "flyer-crawler",
|
"name": "flyer-crawler",
|
||||||
"private": true,
|
"private": true,
|
||||||
"version": "0.1.6",
|
"version": "0.1.17",
|
||||||
"type": "module",
|
"type": "module",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"dev": "concurrently \"npm:start:dev\" \"vite\"",
|
"dev": "concurrently \"npm:start:dev\" \"vite\"",
|
||||||
|
|||||||
@@ -15,16 +15,19 @@ import type { Logger } from 'pino';
|
|||||||
// Create a mock logger that we can inject into requests and assert against.
|
// Create a mock logger that we can inject into requests and assert against.
|
||||||
// We only mock the methods we intend to spy on. The rest of the complex Pino
|
// We only mock the methods we intend to spy on. The rest of the complex Pino
|
||||||
// Logger type is satisfied by casting, which is a common and clean testing practice.
|
// Logger type is satisfied by casting, which is a common and clean testing practice.
|
||||||
const mockLogger = {
|
const { mockLogger } = vi.hoisted(() => {
|
||||||
error: vi.fn(),
|
const mockLogger = {
|
||||||
warn: vi.fn(),
|
error: vi.fn(),
|
||||||
info: vi.fn(),
|
warn: vi.fn(),
|
||||||
debug: vi.fn(),
|
info: vi.fn(),
|
||||||
fatal: vi.fn(),
|
debug: vi.fn(),
|
||||||
trace: vi.fn(),
|
fatal: vi.fn(),
|
||||||
silent: vi.fn(),
|
trace: vi.fn(),
|
||||||
child: vi.fn().mockReturnThis(),
|
silent: vi.fn(),
|
||||||
} as unknown as Logger;
|
child: vi.fn().mockReturnThis(),
|
||||||
|
};
|
||||||
|
return { mockLogger };
|
||||||
|
});
|
||||||
|
|
||||||
// Mock the global logger as a fallback, though our tests will focus on req.log
|
// Mock the global logger as a fallback, though our tests will focus on req.log
|
||||||
vi.mock('../services/logger.server', () => ({ logger: mockLogger }));
|
vi.mock('../services/logger.server', () => ({ logger: mockLogger }));
|
||||||
@@ -37,7 +40,7 @@ const app = express();
|
|||||||
app.use(express.json());
|
app.use(express.json());
|
||||||
// Add a middleware to inject our mock logger into each request as `req.log`
|
// Add a middleware to inject our mock logger into each request as `req.log`
|
||||||
app.use((req: Request, res: Response, next: NextFunction) => {
|
app.use((req: Request, res: Response, next: NextFunction) => {
|
||||||
req.log = mockLogger;
|
req.log = mockLogger as unknown as Logger;
|
||||||
next();
|
next();
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -106,7 +109,10 @@ describe('errorHandler Middleware', () => {
|
|||||||
it('should return a generic 500 error for a standard Error object', async () => {
|
it('should return a generic 500 error for a standard Error object', async () => {
|
||||||
const response = await supertest(app).get('/generic-error');
|
const response = await supertest(app).get('/generic-error');
|
||||||
expect(response.status).toBe(500);
|
expect(response.status).toBe(500);
|
||||||
expect(response.body).toEqual({ message: 'A generic server error occurred.' });
|
// In test/dev, we now expect a stack trace for 5xx errors.
|
||||||
|
expect(response.body.message).toBe('A generic server error occurred.');
|
||||||
|
expect(response.body.stack).toBeDefined();
|
||||||
|
expect(response.body.errorId).toEqual(expect.any(String));
|
||||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||||
expect.objectContaining({
|
expect.objectContaining({
|
||||||
err: expect.any(Error),
|
err: expect.any(Error),
|
||||||
@@ -116,7 +122,7 @@ describe('errorHandler Middleware', () => {
|
|||||||
expect.stringMatching(/Unhandled API Error \(ID: \w+\)/),
|
expect.stringMatching(/Unhandled API Error \(ID: \w+\)/),
|
||||||
);
|
);
|
||||||
expect(consoleErrorSpy).toHaveBeenCalledWith(
|
expect(consoleErrorSpy).toHaveBeenCalledWith(
|
||||||
expect.stringContaining('--- [TEST] UNHANDLED ERROR ---'),
|
expect.stringMatching(/--- \[TEST\] UNHANDLED ERROR \(ID: \w+\) ---/),
|
||||||
expect.any(Error),
|
expect.any(Error),
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
@@ -130,15 +136,11 @@ describe('errorHandler Middleware', () => {
|
|||||||
expect(mockLogger.warn).toHaveBeenCalledWith(
|
expect(mockLogger.warn).toHaveBeenCalledWith(
|
||||||
{
|
{
|
||||||
err: expect.any(Error),
|
err: expect.any(Error),
|
||||||
validationErrors: undefined,
|
|
||||||
statusCode: 404,
|
statusCode: 404,
|
||||||
},
|
},
|
||||||
'Client Error on GET /http-error-404: Resource not found',
|
'Client Error on GET /http-error-404: Resource not found',
|
||||||
);
|
);
|
||||||
expect(consoleErrorSpy).toHaveBeenCalledWith(
|
expect(consoleErrorSpy).not.toHaveBeenCalled();
|
||||||
expect.stringContaining('--- [TEST] UNHANDLED ERROR ---'),
|
|
||||||
expect.any(Error),
|
|
||||||
);
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should handle a NotFoundError with a 404 status', async () => {
|
it('should handle a NotFoundError with a 404 status', async () => {
|
||||||
@@ -150,15 +152,11 @@ describe('errorHandler Middleware', () => {
|
|||||||
expect(mockLogger.warn).toHaveBeenCalledWith(
|
expect(mockLogger.warn).toHaveBeenCalledWith(
|
||||||
{
|
{
|
||||||
err: expect.any(NotFoundError),
|
err: expect.any(NotFoundError),
|
||||||
validationErrors: undefined,
|
|
||||||
statusCode: 404,
|
statusCode: 404,
|
||||||
},
|
},
|
||||||
'Client Error on GET /not-found-error: Specific resource missing',
|
'Client Error on GET /not-found-error: Specific resource missing',
|
||||||
);
|
);
|
||||||
expect(consoleErrorSpy).toHaveBeenCalledWith(
|
expect(consoleErrorSpy).not.toHaveBeenCalled();
|
||||||
expect.stringContaining('--- [TEST] UNHANDLED ERROR ---'),
|
|
||||||
expect.any(NotFoundError),
|
|
||||||
);
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should handle a ForeignKeyConstraintError with a 400 status and the specific error message', async () => {
|
it('should handle a ForeignKeyConstraintError with a 400 status and the specific error message', async () => {
|
||||||
@@ -170,15 +168,11 @@ describe('errorHandler Middleware', () => {
|
|||||||
expect(mockLogger.warn).toHaveBeenCalledWith(
|
expect(mockLogger.warn).toHaveBeenCalledWith(
|
||||||
{
|
{
|
||||||
err: expect.any(ForeignKeyConstraintError),
|
err: expect.any(ForeignKeyConstraintError),
|
||||||
validationErrors: undefined,
|
|
||||||
statusCode: 400,
|
statusCode: 400,
|
||||||
},
|
},
|
||||||
'Client Error on GET /fk-error: The referenced item does not exist.',
|
'Client Error on GET /fk-error: The referenced item does not exist.',
|
||||||
);
|
);
|
||||||
expect(consoleErrorSpy).toHaveBeenCalledWith(
|
expect(consoleErrorSpy).not.toHaveBeenCalled();
|
||||||
expect.stringContaining('--- [TEST] UNHANDLED ERROR ---'),
|
|
||||||
expect.any(ForeignKeyConstraintError),
|
|
||||||
);
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should handle a UniqueConstraintError with a 409 status and the specific error message', async () => {
|
it('should handle a UniqueConstraintError with a 409 status and the specific error message', async () => {
|
||||||
@@ -190,15 +184,11 @@ describe('errorHandler Middleware', () => {
|
|||||||
expect(mockLogger.warn).toHaveBeenCalledWith(
|
expect(mockLogger.warn).toHaveBeenCalledWith(
|
||||||
{
|
{
|
||||||
err: expect.any(UniqueConstraintError),
|
err: expect.any(UniqueConstraintError),
|
||||||
validationErrors: undefined,
|
|
||||||
statusCode: 409,
|
statusCode: 409,
|
||||||
},
|
},
|
||||||
'Client Error on GET /unique-error: This item already exists.',
|
'Client Error on GET /unique-error: This item already exists.',
|
||||||
);
|
);
|
||||||
expect(consoleErrorSpy).toHaveBeenCalledWith(
|
expect(consoleErrorSpy).not.toHaveBeenCalled();
|
||||||
expect.stringContaining('--- [TEST] UNHANDLED ERROR ---'),
|
|
||||||
expect.any(UniqueConstraintError),
|
|
||||||
);
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should handle a ValidationError with a 400 status and include the validation errors array', async () => {
|
it('should handle a ValidationError with a 400 status and include the validation errors array', async () => {
|
||||||
@@ -219,17 +209,17 @@ describe('errorHandler Middleware', () => {
|
|||||||
},
|
},
|
||||||
'Client Error on GET /validation-error: Input validation failed',
|
'Client Error on GET /validation-error: Input validation failed',
|
||||||
);
|
);
|
||||||
expect(consoleErrorSpy).toHaveBeenCalledWith(
|
expect(consoleErrorSpy).not.toHaveBeenCalled();
|
||||||
expect.stringContaining('--- [TEST] UNHANDLED ERROR ---'),
|
|
||||||
expect.any(ValidationError),
|
|
||||||
);
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should handle a DatabaseError with a 500 status and a generic message', async () => {
|
it('should handle a DatabaseError with a 500 status and a generic message', async () => {
|
||||||
const response = await supertest(app).get('/db-error-500');
|
const response = await supertest(app).get('/db-error-500');
|
||||||
|
|
||||||
expect(response.status).toBe(500);
|
expect(response.status).toBe(500);
|
||||||
expect(response.body).toEqual({ message: 'A database connection issue occurred.' });
|
// In test/dev, we now expect a stack trace for 5xx errors.
|
||||||
|
expect(response.body.message).toBe('A database connection issue occurred.');
|
||||||
|
expect(response.body.stack).toBeDefined();
|
||||||
|
expect(response.body.errorId).toEqual(expect.any(String));
|
||||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||||
expect.objectContaining({
|
expect.objectContaining({
|
||||||
err: expect.any(DatabaseError),
|
err: expect.any(DatabaseError),
|
||||||
@@ -239,7 +229,7 @@ describe('errorHandler Middleware', () => {
|
|||||||
expect.stringMatching(/Unhandled API Error \(ID: \w+\)/),
|
expect.stringMatching(/Unhandled API Error \(ID: \w+\)/),
|
||||||
);
|
);
|
||||||
expect(consoleErrorSpy).toHaveBeenCalledWith(
|
expect(consoleErrorSpy).toHaveBeenCalledWith(
|
||||||
expect.stringContaining('--- [TEST] UNHANDLED ERROR ---'),
|
expect.stringMatching(/--- \[TEST\] UNHANDLED ERROR \(ID: \w+\) ---/),
|
||||||
expect.any(DatabaseError),
|
expect.any(DatabaseError),
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
@@ -249,8 +239,14 @@ describe('errorHandler Middleware', () => {
|
|||||||
|
|
||||||
expect(response.status).toBe(401);
|
expect(response.status).toBe(401);
|
||||||
expect(response.body).toEqual({ message: 'Invalid Token' });
|
expect(response.body).toEqual({ message: 'Invalid Token' });
|
||||||
// 4xx errors log as warn
|
expect(mockLogger.warn).toHaveBeenCalledWith(
|
||||||
expect(mockLogger.warn).toHaveBeenCalled();
|
{
|
||||||
|
err: expect.any(Error),
|
||||||
|
statusCode: 401,
|
||||||
|
},
|
||||||
|
'Client Error on GET /unauthorized-error-no-status: Invalid Token',
|
||||||
|
);
|
||||||
|
expect(consoleErrorSpy).not.toHaveBeenCalled();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should handle an UnauthorizedError with explicit status', async () => {
|
it('should handle an UnauthorizedError with explicit status', async () => {
|
||||||
@@ -258,6 +254,14 @@ describe('errorHandler Middleware', () => {
|
|||||||
|
|
||||||
expect(response.status).toBe(401);
|
expect(response.status).toBe(401);
|
||||||
expect(response.body).toEqual({ message: 'Invalid Token' });
|
expect(response.body).toEqual({ message: 'Invalid Token' });
|
||||||
|
expect(mockLogger.warn).toHaveBeenCalledWith(
|
||||||
|
{
|
||||||
|
err: expect.any(Error),
|
||||||
|
statusCode: 401,
|
||||||
|
},
|
||||||
|
'Client Error on GET /unauthorized-error-with-status: Invalid Token',
|
||||||
|
);
|
||||||
|
expect(consoleErrorSpy).not.toHaveBeenCalled();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should call next(err) if headers have already been sent', () => {
|
it('should call next(err) if headers have already been sent', () => {
|
||||||
@@ -302,6 +306,7 @@ describe('errorHandler Middleware', () => {
|
|||||||
expect(response.body.message).toMatch(
|
expect(response.body.message).toMatch(
|
||||||
/An unexpected server error occurred. Please reference error ID: \w+/,
|
/An unexpected server error occurred. Please reference error ID: \w+/,
|
||||||
);
|
);
|
||||||
|
expect(response.body.stack).toBeUndefined();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return the actual error message for client errors (4xx) in production', async () => {
|
it('should return the actual error message for client errors (4xx) in production', async () => {
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
// src/middleware/errorHandler.ts
|
// src/middleware/errorHandler.ts
|
||||||
import { Request, Response, NextFunction } from 'express';
|
import { Request, Response, NextFunction } from 'express';
|
||||||
|
import crypto from 'crypto';
|
||||||
import { ZodError } from 'zod';
|
import { ZodError } from 'zod';
|
||||||
import {
|
import {
|
||||||
ForeignKeyConstraintError,
|
ForeignKeyConstraintError,
|
||||||
@@ -24,45 +25,77 @@ export const errorHandler = (err: Error, req: Request, res: Response, next: Next
|
|||||||
// Use the request-scoped logger if available, otherwise fall back to the global logger.
|
// Use the request-scoped logger if available, otherwise fall back to the global logger.
|
||||||
const log = req.log || logger;
|
const log = req.log || logger;
|
||||||
|
|
||||||
// --- Handle Zod Validation Errors ---
|
// --- Handle Zod Validation Errors (from validateRequest middleware) ---
|
||||||
if (err instanceof ZodError) {
|
if (err instanceof ZodError) {
|
||||||
log.warn({ err: err.flatten() }, 'Request validation failed');
|
const statusCode = 400;
|
||||||
return res.status(400).json({
|
const message = 'The request data is invalid.';
|
||||||
message: 'The request data is invalid.',
|
const errors = err.issues.map((e) => ({ path: e.path, message: e.message }));
|
||||||
errors: err.issues.map((e) => ({ path: e.path, message: e.message })),
|
log.warn({ err, validationErrors: errors, statusCode }, `Client Error on ${req.method} ${req.path}: ${message}`);
|
||||||
});
|
return res.status(statusCode).json({ message, errors });
|
||||||
}
|
}
|
||||||
|
|
||||||
// --- Handle Custom Operational Errors ---
|
// --- Handle Custom Operational Errors ---
|
||||||
if (err instanceof NotFoundError) {
|
if (err instanceof NotFoundError) {
|
||||||
log.info({ err }, 'Resource not found');
|
const statusCode = 404;
|
||||||
return res.status(404).json({ message: err.message });
|
log.warn({ err, statusCode }, `Client Error on ${req.method} ${req.path}: ${err.message}`);
|
||||||
|
return res.status(statusCode).json({ message: err.message });
|
||||||
}
|
}
|
||||||
|
|
||||||
if (err instanceof ValidationError) {
|
if (err instanceof ValidationError) {
|
||||||
log.warn({ err }, 'Validation error occurred');
|
const statusCode = 400;
|
||||||
return res.status(400).json({ message: err.message, errors: err.validationErrors });
|
log.warn(
|
||||||
|
{ err, validationErrors: err.validationErrors, statusCode },
|
||||||
|
`Client Error on ${req.method} ${req.path}: ${err.message}`,
|
||||||
|
);
|
||||||
|
return res.status(statusCode).json({ message: err.message, errors: err.validationErrors });
|
||||||
}
|
}
|
||||||
|
|
||||||
if (err instanceof UniqueConstraintError) {
|
if (err instanceof UniqueConstraintError) {
|
||||||
log.warn({ err }, 'Constraint error occurred');
|
const statusCode = 409;
|
||||||
return res.status(409).json({ message: err.message }); // Use 409 Conflict for unique constraints
|
log.warn({ err, statusCode }, `Client Error on ${req.method} ${req.path}: ${err.message}`);
|
||||||
|
return res.status(statusCode).json({ message: err.message }); // Use 409 Conflict for unique constraints
|
||||||
}
|
}
|
||||||
|
|
||||||
if (err instanceof ForeignKeyConstraintError) {
|
if (err instanceof ForeignKeyConstraintError) {
|
||||||
log.warn({ err }, 'Foreign key constraint violation');
|
const statusCode = 400;
|
||||||
return res.status(400).json({ message: err.message });
|
log.warn({ err, statusCode }, `Client Error on ${req.method} ${req.path}: ${err.message}`);
|
||||||
|
return res.status(statusCode).json({ message: err.message });
|
||||||
}
|
}
|
||||||
|
|
||||||
// --- Handle Generic Errors ---
|
// --- Handle Generic Client Errors (e.g., from express-jwt, or manual status setting) ---
|
||||||
// Log the full error object for debugging. The pino logger will handle redaction.
|
let status = (err as any).status || (err as any).statusCode;
|
||||||
log.error({ err }, 'An unhandled error occurred in an Express route');
|
// Default UnauthorizedError to 401 if no status is present, a common case for express-jwt.
|
||||||
|
if (err.name === 'UnauthorizedError' && !status) {
|
||||||
|
status = 401;
|
||||||
|
}
|
||||||
|
if (status && status >= 400 && status < 500) {
|
||||||
|
log.warn({ err, statusCode: status }, `Client Error on ${req.method} ${req.path}: ${err.message}`);
|
||||||
|
return res.status(status).json({ message: err.message });
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- Handle All Other (500-level) Errors ---
|
||||||
|
const errorId = crypto.randomBytes(4).toString('hex');
|
||||||
|
log.error(
|
||||||
|
{
|
||||||
|
err,
|
||||||
|
errorId,
|
||||||
|
req: { method: req.method, url: req.url, headers: req.headers, body: req.body },
|
||||||
|
},
|
||||||
|
`Unhandled API Error (ID: ${errorId})`,
|
||||||
|
);
|
||||||
|
|
||||||
|
// Also log to console in test environment for visibility in test runners
|
||||||
|
if (process.env.NODE_ENV === 'test') {
|
||||||
|
console.error(`--- [TEST] UNHANDLED ERROR (ID: ${errorId}) ---`, err);
|
||||||
|
}
|
||||||
|
|
||||||
// In production, send a generic message to avoid leaking implementation details.
|
// In production, send a generic message to avoid leaking implementation details.
|
||||||
if (process.env.NODE_ENV === 'production') {
|
if (process.env.NODE_ENV === 'production') {
|
||||||
return res.status(500).json({ message: 'An internal server error occurred.' });
|
return res.status(500).json({
|
||||||
|
message: `An unexpected server error occurred. Please reference error ID: ${errorId}`,
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
// In development, send more details for easier debugging.
|
// In non-production environments (dev, test, etc.), send more details for easier debugging.
|
||||||
return res.status(500).json({ message: err.message, stack: err.stack });
|
return res.status(500).json({ message: err.message, stack: err.stack, errorId });
|
||||||
};
|
};
|
||||||
55
src/providers/ApiProvider.test.tsx
Normal file
55
src/providers/ApiProvider.test.tsx
Normal file
@@ -0,0 +1,55 @@
|
|||||||
|
// src/providers/ApiProvider.test.tsx
|
||||||
|
import React, { useContext } from 'react';
|
||||||
|
import { render, screen } from '@testing-library/react';
|
||||||
|
import { describe, it, expect, vi } from 'vitest';
|
||||||
|
import { ApiProvider } from './ApiProvider';
|
||||||
|
import { ApiContext } from '../contexts/ApiContext';
|
||||||
|
import * as apiClient from '../services/apiClient';
|
||||||
|
|
||||||
|
// Mock the apiClient module.
|
||||||
|
// Since ApiProvider and ApiContext import * as apiClient, mocking it ensures
|
||||||
|
// we control the reference identity and can verify it's being passed correctly.
|
||||||
|
vi.mock('../services/apiClient', () => ({
|
||||||
|
fetchFlyers: vi.fn(),
|
||||||
|
fetchMasterItems: vi.fn(),
|
||||||
|
// Add other mocked methods as needed for the shape to be valid-ish
|
||||||
|
}));
|
||||||
|
|
||||||
|
describe('ApiProvider & ApiContext', () => {
|
||||||
|
const TestConsumer = () => {
|
||||||
|
const contextValue = useContext(ApiContext);
|
||||||
|
// We check if the context value is strictly equal to the imported module
|
||||||
|
return (
|
||||||
|
<div>
|
||||||
|
<span data-testid="value-check">
|
||||||
|
{contextValue === apiClient ? 'Matches apiClient' : 'Does not match'}
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
it('renders children correctly', () => {
|
||||||
|
render(
|
||||||
|
<ApiProvider>
|
||||||
|
<div data-testid="child">Child Content</div>
|
||||||
|
</ApiProvider>
|
||||||
|
);
|
||||||
|
expect(screen.getByTestId('child')).toBeInTheDocument();
|
||||||
|
expect(screen.getByText('Child Content')).toBeInTheDocument();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('provides the apiClient module via context', () => {
|
||||||
|
render(
|
||||||
|
<ApiProvider>
|
||||||
|
<TestConsumer />
|
||||||
|
</ApiProvider>
|
||||||
|
);
|
||||||
|
expect(screen.getByTestId('value-check')).toHaveTextContent('Matches apiClient');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('ApiContext has apiClient as the default value (when no provider is present)', () => {
|
||||||
|
// This verifies the logic in ApiContext.tsx: createContext(apiClient)
|
||||||
|
render(<TestConsumer />);
|
||||||
|
expect(screen.getByTestId('value-check')).toHaveTextContent('Matches apiClient');
|
||||||
|
});
|
||||||
|
});
|
||||||
@@ -13,7 +13,6 @@ import {
|
|||||||
import type { SuggestedCorrection, Brand, UserProfile, UnmatchedFlyerItem } from '../types';
|
import type { SuggestedCorrection, Brand, UserProfile, UnmatchedFlyerItem } from '../types';
|
||||||
import { NotFoundError } from '../services/db/errors.db'; // This can stay, it's a type/class not a module with side effects.
|
import { NotFoundError } from '../services/db/errors.db'; // This can stay, it's a type/class not a module with side effects.
|
||||||
import { createTestApp } from '../tests/utils/createTestApp';
|
import { createTestApp } from '../tests/utils/createTestApp';
|
||||||
import { mockLogger } from '../tests/utils/mockLogger';
|
|
||||||
|
|
||||||
// Mock the file upload middleware to allow testing the controller's internal check
|
// Mock the file upload middleware to allow testing the controller's internal check
|
||||||
vi.mock('../middleware/fileUpload.middleware', () => ({
|
vi.mock('../middleware/fileUpload.middleware', () => ({
|
||||||
@@ -96,8 +95,9 @@ vi.mock('@bull-board/express', () => ({
|
|||||||
}));
|
}));
|
||||||
|
|
||||||
// Mock the logger
|
// Mock the logger
|
||||||
vi.mock('../services/logger.server', () => ({
|
vi.mock('../services/logger.server', async () => ({
|
||||||
logger: mockLogger,
|
// Use async import to avoid hoisting issues with mockLogger
|
||||||
|
logger: (await import('../tests/utils/mockLogger')).mockLogger,
|
||||||
}));
|
}));
|
||||||
|
|
||||||
// Mock the passport middleware
|
// Mock the passport middleware
|
||||||
|
|||||||
@@ -6,7 +6,6 @@ import { createMockUserProfile } from '../tests/utils/mockFactories';
|
|||||||
import type { Job } from 'bullmq';
|
import type { Job } from 'bullmq';
|
||||||
import type { UserProfile } from '../types';
|
import type { UserProfile } from '../types';
|
||||||
import { createTestApp } from '../tests/utils/createTestApp';
|
import { createTestApp } from '../tests/utils/createTestApp';
|
||||||
import { mockLogger } from '../tests/utils/mockLogger';
|
|
||||||
|
|
||||||
// Mock the background job service to control its methods.
|
// Mock the background job service to control its methods.
|
||||||
vi.mock('../services/backgroundJobService', () => ({
|
vi.mock('../services/backgroundJobService', () => ({
|
||||||
@@ -66,8 +65,9 @@ import {
|
|||||||
} from '../services/queueService.server';
|
} from '../services/queueService.server';
|
||||||
|
|
||||||
// Mock the logger
|
// Mock the logger
|
||||||
vi.mock('../services/logger.server', () => ({
|
vi.mock('../services/logger.server', async () => ({
|
||||||
logger: mockLogger,
|
// Use async import to avoid hoisting issues with mockLogger
|
||||||
|
logger: (await import('../tests/utils/mockLogger')).mockLogger,
|
||||||
}));
|
}));
|
||||||
|
|
||||||
// Mock the passport middleware
|
// Mock the passport middleware
|
||||||
|
|||||||
@@ -5,7 +5,16 @@ import type { Request, Response, NextFunction } from 'express';
|
|||||||
import { createMockUserProfile, createMockActivityLogItem } from '../tests/utils/mockFactories';
|
import { createMockUserProfile, createMockActivityLogItem } from '../tests/utils/mockFactories';
|
||||||
import type { UserProfile } from '../types';
|
import type { UserProfile } from '../types';
|
||||||
import { createTestApp } from '../tests/utils/createTestApp';
|
import { createTestApp } from '../tests/utils/createTestApp';
|
||||||
import { mockLogger } from '../tests/utils/mockLogger';
|
|
||||||
|
const { mockLogger } = vi.hoisted(() => ({
|
||||||
|
mockLogger: {
|
||||||
|
info: vi.fn(),
|
||||||
|
warn: vi.fn(),
|
||||||
|
error: vi.fn(),
|
||||||
|
debug: vi.fn(),
|
||||||
|
child: vi.fn().mockReturnThis(),
|
||||||
|
},
|
||||||
|
}));
|
||||||
|
|
||||||
vi.mock('../lib/queue', () => ({
|
vi.mock('../lib/queue', () => ({
|
||||||
serverAdapter: {
|
serverAdapter: {
|
||||||
@@ -27,19 +36,22 @@ vi.mock('../services/db/index.db', () => ({
|
|||||||
notificationRepo: {},
|
notificationRepo: {},
|
||||||
}));
|
}));
|
||||||
|
|
||||||
// Mock the queue service to control worker statuses
|
// Mock the queue service for queue status checks
|
||||||
vi.mock('../services/queueService.server', () => ({
|
vi.mock('../services/queueService.server', () => ({
|
||||||
|
flyerQueue: { name: 'flyer-processing', getJobCounts: vi.fn() },
|
||||||
|
emailQueue: { name: 'email-sending', getJobCounts: vi.fn() },
|
||||||
|
analyticsQueue: { name: 'analytics-reporting', getJobCounts: vi.fn() },
|
||||||
|
cleanupQueue: { name: 'file-cleanup', getJobCounts: vi.fn() },
|
||||||
|
weeklyAnalyticsQueue: { name: 'weekly-analytics-reporting', getJobCounts: vi.fn() },
|
||||||
|
}));
|
||||||
|
|
||||||
|
// Mock the worker service for worker status checks
|
||||||
|
vi.mock('../services/workers.server', () => ({
|
||||||
flyerWorker: { name: 'flyer-processing', isRunning: vi.fn() },
|
flyerWorker: { name: 'flyer-processing', isRunning: vi.fn() },
|
||||||
emailWorker: { name: 'email-sending', isRunning: vi.fn() },
|
emailWorker: { name: 'email-sending', isRunning: vi.fn() },
|
||||||
analyticsWorker: { name: 'analytics-reporting', isRunning: vi.fn() },
|
analyticsWorker: { name: 'analytics-reporting', isRunning: vi.fn() },
|
||||||
cleanupWorker: { name: 'file-cleanup', isRunning: vi.fn() },
|
cleanupWorker: { name: 'file-cleanup', isRunning: vi.fn() },
|
||||||
weeklyAnalyticsWorker: { name: 'weekly-analytics-reporting', isRunning: vi.fn() },
|
weeklyAnalyticsWorker: { name: 'weekly-analytics-reporting', isRunning: vi.fn() },
|
||||||
flyerQueue: { name: 'flyer-processing', getJobCounts: vi.fn() },
|
|
||||||
emailQueue: { name: 'email-sending', getJobCounts: vi.fn() },
|
|
||||||
analyticsQueue: { name: 'analytics-reporting', getJobCounts: vi.fn() },
|
|
||||||
cleanupQueue: { name: 'file-cleanup', getJobCounts: vi.fn() },
|
|
||||||
// FIX: Add the missing weeklyAnalyticsQueue to prevent import errors in admin.routes.ts
|
|
||||||
weeklyAnalyticsQueue: { name: 'weekly-analytics-reporting', getJobCounts: vi.fn() },
|
|
||||||
}));
|
}));
|
||||||
|
|
||||||
// Mock other dependencies that are part of the adminRouter setup but not directly tested here
|
// Mock other dependencies that are part of the adminRouter setup but not directly tested here
|
||||||
@@ -67,8 +79,10 @@ import adminRouter from './admin.routes';
|
|||||||
|
|
||||||
// Import the mocked modules to control them
|
// Import the mocked modules to control them
|
||||||
import * as queueService from '../services/queueService.server';
|
import * as queueService from '../services/queueService.server';
|
||||||
|
import * as workerService from '../services/workers.server';
|
||||||
import { adminRepo } from '../services/db/index.db';
|
import { adminRepo } from '../services/db/index.db';
|
||||||
const mockedQueueService = queueService as Mocked<typeof queueService>;
|
const mockedQueueService = queueService as Mocked<typeof queueService>;
|
||||||
|
const mockedWorkerService = workerService as Mocked<typeof workerService>;
|
||||||
|
|
||||||
// Mock the logger
|
// Mock the logger
|
||||||
vi.mock('../services/logger.server', () => ({
|
vi.mock('../services/logger.server', () => ({
|
||||||
@@ -137,11 +151,11 @@ describe('Admin Monitoring Routes (/api/admin)', () => {
|
|||||||
describe('GET /workers/status', () => {
|
describe('GET /workers/status', () => {
|
||||||
it('should return the status of all registered workers', async () => {
|
it('should return the status of all registered workers', async () => {
|
||||||
// Arrange: Set the mock status for each worker
|
// Arrange: Set the mock status for each worker
|
||||||
vi.mocked(mockedQueueService.flyerWorker.isRunning).mockReturnValue(true);
|
vi.mocked(mockedWorkerService.flyerWorker.isRunning).mockReturnValue(true);
|
||||||
vi.mocked(mockedQueueService.emailWorker.isRunning).mockReturnValue(true);
|
vi.mocked(mockedWorkerService.emailWorker.isRunning).mockReturnValue(true);
|
||||||
vi.mocked(mockedQueueService.analyticsWorker.isRunning).mockReturnValue(false); // Simulate one worker being stopped
|
vi.mocked(mockedWorkerService.analyticsWorker.isRunning).mockReturnValue(false); // Simulate one worker being stopped
|
||||||
vi.mocked(mockedQueueService.cleanupWorker.isRunning).mockReturnValue(true);
|
vi.mocked(mockedWorkerService.cleanupWorker.isRunning).mockReturnValue(true);
|
||||||
vi.mocked(mockedQueueService.weeklyAnalyticsWorker.isRunning).mockReturnValue(true);
|
vi.mocked(mockedWorkerService.weeklyAnalyticsWorker.isRunning).mockReturnValue(true);
|
||||||
|
|
||||||
// Act
|
// Act
|
||||||
const response = await supertest(app).get('/api/admin/workers/status');
|
const response = await supertest(app).get('/api/admin/workers/status');
|
||||||
|
|||||||
@@ -25,12 +25,14 @@ import {
|
|||||||
analyticsQueue,
|
analyticsQueue,
|
||||||
cleanupQueue,
|
cleanupQueue,
|
||||||
weeklyAnalyticsQueue,
|
weeklyAnalyticsQueue,
|
||||||
flyerWorker,
|
} from '../services/queueService.server'; // Import your queues
|
||||||
emailWorker,
|
import {
|
||||||
analyticsWorker,
|
analyticsWorker,
|
||||||
cleanupWorker,
|
cleanupWorker,
|
||||||
|
emailWorker,
|
||||||
|
flyerWorker,
|
||||||
weeklyAnalyticsWorker,
|
weeklyAnalyticsWorker,
|
||||||
} from '../services/queueService.server'; // Import your queues
|
} from '../services/workers.server';
|
||||||
import { getSimpleWeekAndYear } from '../utils/dateUtils';
|
import { getSimpleWeekAndYear } from '../utils/dateUtils';
|
||||||
import {
|
import {
|
||||||
requiredString,
|
requiredString,
|
||||||
|
|||||||
@@ -5,7 +5,6 @@ import type { Request, Response, NextFunction } from 'express';
|
|||||||
import { createMockUserProfile } from '../tests/utils/mockFactories';
|
import { createMockUserProfile } from '../tests/utils/mockFactories';
|
||||||
import type { UserProfile } from '../types';
|
import type { UserProfile } from '../types';
|
||||||
import { createTestApp } from '../tests/utils/createTestApp';
|
import { createTestApp } from '../tests/utils/createTestApp';
|
||||||
import { mockLogger } from '../tests/utils/mockLogger';
|
|
||||||
|
|
||||||
vi.mock('../services/db/index.db', () => ({
|
vi.mock('../services/db/index.db', () => ({
|
||||||
adminRepo: {
|
adminRepo: {
|
||||||
@@ -45,8 +44,9 @@ import adminRouter from './admin.routes';
|
|||||||
import { adminRepo } from '../services/db/index.db';
|
import { adminRepo } from '../services/db/index.db';
|
||||||
|
|
||||||
// Mock the logger
|
// Mock the logger
|
||||||
vi.mock('../services/logger.server', () => ({
|
vi.mock('../services/logger.server', async () => ({
|
||||||
logger: mockLogger,
|
// Use async import to avoid hoisting issues with mockLogger
|
||||||
|
logger: (await import('../tests/utils/mockLogger')).mockLogger,
|
||||||
}));
|
}));
|
||||||
|
|
||||||
// Mock the passport middleware
|
// Mock the passport middleware
|
||||||
|
|||||||
@@ -4,7 +4,6 @@ import supertest from 'supertest';
|
|||||||
import type { Request, Response, NextFunction } from 'express';
|
import type { Request, Response, NextFunction } from 'express';
|
||||||
import { createMockUserProfile } from '../tests/utils/mockFactories';
|
import { createMockUserProfile } from '../tests/utils/mockFactories';
|
||||||
import { createTestApp } from '../tests/utils/createTestApp';
|
import { createTestApp } from '../tests/utils/createTestApp';
|
||||||
import { mockLogger } from '../tests/utils/mockLogger';
|
|
||||||
|
|
||||||
// Mock dependencies
|
// Mock dependencies
|
||||||
vi.mock('../services/geocodingService.server', () => ({
|
vi.mock('../services/geocodingService.server', () => ({
|
||||||
@@ -50,8 +49,9 @@ import adminRouter from './admin.routes';
|
|||||||
import { geocodingService } from '../services/geocodingService.server';
|
import { geocodingService } from '../services/geocodingService.server';
|
||||||
|
|
||||||
// Mock the logger
|
// Mock the logger
|
||||||
vi.mock('../services/logger.server', () => ({
|
vi.mock('../services/logger.server', async () => ({
|
||||||
logger: mockLogger,
|
// Use async import to avoid hoisting issues with mockLogger
|
||||||
|
logger: (await import('../tests/utils/mockLogger')).mockLogger,
|
||||||
}));
|
}));
|
||||||
|
|
||||||
// Mock the passport middleware
|
// Mock the passport middleware
|
||||||
|
|||||||
@@ -6,7 +6,6 @@ import { createMockUserProfile, createMockAdminUserView } from '../tests/utils/m
|
|||||||
import type { UserProfile, Profile } from '../types';
|
import type { UserProfile, Profile } from '../types';
|
||||||
import { NotFoundError } from '../services/db/errors.db';
|
import { NotFoundError } from '../services/db/errors.db';
|
||||||
import { createTestApp } from '../tests/utils/createTestApp';
|
import { createTestApp } from '../tests/utils/createTestApp';
|
||||||
import { mockLogger } from '../tests/utils/mockLogger';
|
|
||||||
|
|
||||||
vi.mock('../services/db/index.db', () => ({
|
vi.mock('../services/db/index.db', () => ({
|
||||||
adminRepo: {
|
adminRepo: {
|
||||||
@@ -44,8 +43,9 @@ vi.mock('@bull-board/express', () => ({
|
|||||||
}));
|
}));
|
||||||
|
|
||||||
// Mock the logger
|
// Mock the logger
|
||||||
vi.mock('../services/logger.server', () => ({
|
vi.mock('../services/logger.server', async () => ({
|
||||||
logger: mockLogger,
|
// Use async import to avoid hoisting issues with mockLogger
|
||||||
|
logger: (await import('../tests/utils/mockLogger')).mockLogger,
|
||||||
}));
|
}));
|
||||||
|
|
||||||
// Import the router AFTER all mocks are defined.
|
// Import the router AFTER all mocks are defined.
|
||||||
|
|||||||
@@ -55,8 +55,9 @@ import aiRouter from './ai.routes';
|
|||||||
import { flyerQueue } from '../services/queueService.server';
|
import { flyerQueue } from '../services/queueService.server';
|
||||||
|
|
||||||
// Mock the logger to keep test output clean
|
// Mock the logger to keep test output clean
|
||||||
vi.mock('../services/logger.server', () => ({
|
vi.mock('../services/logger.server', async () => ({
|
||||||
logger: mockLogger,
|
// Use async import to avoid hoisting issues with mockLogger
|
||||||
|
logger: (await import('../tests/utils/mockLogger')).mockLogger,
|
||||||
}));
|
}));
|
||||||
|
|
||||||
// Mock the passport module to control authentication for different tests.
|
// Mock the passport module to control authentication for different tests.
|
||||||
|
|||||||
@@ -9,7 +9,6 @@ import {
|
|||||||
createMockUserProfile,
|
createMockUserProfile,
|
||||||
createMockUserWithPasswordHash,
|
createMockUserWithPasswordHash,
|
||||||
} from '../tests/utils/mockFactories';
|
} from '../tests/utils/mockFactories';
|
||||||
import { mockLogger } from '../tests/utils/mockLogger';
|
|
||||||
|
|
||||||
// --- FIX: Hoist passport mocks to be available for vi.mock ---
|
// --- FIX: Hoist passport mocks to be available for vi.mock ---
|
||||||
const passportMocks = vi.hoisted(() => {
|
const passportMocks = vi.hoisted(() => {
|
||||||
@@ -111,8 +110,9 @@ vi.mock('../services/db/connection.db', () => ({
|
|||||||
}));
|
}));
|
||||||
|
|
||||||
// Mock the logger
|
// Mock the logger
|
||||||
vi.mock('../services/logger.server', () => ({
|
vi.mock('../services/logger.server', async () => ({
|
||||||
logger: mockLogger,
|
// Use async import to avoid hoisting issues with mockLogger
|
||||||
|
logger: (await import('../tests/utils/mockLogger')).mockLogger,
|
||||||
}));
|
}));
|
||||||
|
|
||||||
// Mock the email service
|
// Mock the email service
|
||||||
@@ -144,6 +144,8 @@ import { UniqueConstraintError } from '../services/db/errors.db'; // Import actu
|
|||||||
import express from 'express';
|
import express from 'express';
|
||||||
import { errorHandler } from '../middleware/errorHandler'; // Assuming this exists
|
import { errorHandler } from '../middleware/errorHandler'; // Assuming this exists
|
||||||
|
|
||||||
|
const { mockLogger } = await import('../tests/utils/mockLogger');
|
||||||
|
|
||||||
const app = express();
|
const app = express();
|
||||||
app.use(express.json());
|
app.use(express.json());
|
||||||
app.use(cookieParser()); // Mount BEFORE router
|
app.use(cookieParser()); // Mount BEFORE router
|
||||||
|
|||||||
@@ -7,7 +7,6 @@ import {
|
|||||||
createMockBudget,
|
createMockBudget,
|
||||||
createMockSpendingByCategory,
|
createMockSpendingByCategory,
|
||||||
} from '../tests/utils/mockFactories';
|
} from '../tests/utils/mockFactories';
|
||||||
import { mockLogger } from '../tests/utils/mockLogger';
|
|
||||||
import { createTestApp } from '../tests/utils/createTestApp';
|
import { createTestApp } from '../tests/utils/createTestApp';
|
||||||
import { ForeignKeyConstraintError, NotFoundError } from '../services/db/errors.db';
|
import { ForeignKeyConstraintError, NotFoundError } from '../services/db/errors.db';
|
||||||
// 1. Mock the Service Layer directly.
|
// 1. Mock the Service Layer directly.
|
||||||
@@ -26,8 +25,9 @@ vi.mock('../services/db/index.db', () => ({
|
|||||||
}));
|
}));
|
||||||
|
|
||||||
// Mock the logger to keep test output clean
|
// Mock the logger to keep test output clean
|
||||||
vi.mock('../services/logger.server', () => ({
|
vi.mock('../services/logger.server', async () => ({
|
||||||
logger: mockLogger,
|
// Use async import to avoid hoisting issues with mockLogger
|
||||||
|
logger: (await import('../tests/utils/mockLogger')).mockLogger,
|
||||||
}));
|
}));
|
||||||
|
|
||||||
// Import the router and mocked DB AFTER all mocks are defined.
|
// Import the router and mocked DB AFTER all mocks are defined.
|
||||||
|
|||||||
@@ -4,7 +4,6 @@ import supertest from 'supertest';
|
|||||||
import type { Request, Response, NextFunction } from 'express';
|
import type { Request, Response, NextFunction } from 'express';
|
||||||
import { createMockUserProfile, createMockWatchedItemDeal } from '../tests/utils/mockFactories';
|
import { createMockUserProfile, createMockWatchedItemDeal } from '../tests/utils/mockFactories';
|
||||||
import type { WatchedItemDeal } from '../types';
|
import type { WatchedItemDeal } from '../types';
|
||||||
import { mockLogger } from '../tests/utils/mockLogger';
|
|
||||||
import { createTestApp } from '../tests/utils/createTestApp';
|
import { createTestApp } from '../tests/utils/createTestApp';
|
||||||
|
|
||||||
// 1. Mock the Service Layer directly.
|
// 1. Mock the Service Layer directly.
|
||||||
@@ -17,10 +16,12 @@ vi.mock('../services/db/deals.db', () => ({
|
|||||||
// Import the router and mocked repo AFTER all mocks are defined.
|
// Import the router and mocked repo AFTER all mocks are defined.
|
||||||
import dealsRouter from './deals.routes';
|
import dealsRouter from './deals.routes';
|
||||||
import { dealsRepo } from '../services/db/deals.db';
|
import { dealsRepo } from '../services/db/deals.db';
|
||||||
|
import { mockLogger } from '../tests/utils/mockLogger';
|
||||||
|
|
||||||
// Mock the logger to keep test output clean
|
// Mock the logger to keep test output clean
|
||||||
vi.mock('../services/logger.server', () => ({
|
vi.mock('../services/logger.server', async () => ({
|
||||||
logger: mockLogger,
|
// Use async import to avoid hoisting issues with mockLogger
|
||||||
|
logger: (await import('../tests/utils/mockLogger')).mockLogger,
|
||||||
}));
|
}));
|
||||||
|
|
||||||
// Mock the passport middleware
|
// Mock the passport middleware
|
||||||
|
|||||||
@@ -23,8 +23,9 @@ import * as db from '../services/db/index.db';
|
|||||||
import { mockLogger } from '../tests/utils/mockLogger';
|
import { mockLogger } from '../tests/utils/mockLogger';
|
||||||
|
|
||||||
// Mock the logger to keep test output clean
|
// Mock the logger to keep test output clean
|
||||||
vi.mock('../services/logger.server', () => ({
|
vi.mock('../services/logger.server', async () => ({
|
||||||
logger: mockLogger,
|
// Use async import to avoid hoisting issues with mockLogger
|
||||||
|
logger: (await import('../tests/utils/mockLogger')).mockLogger,
|
||||||
}));
|
}));
|
||||||
|
|
||||||
// Define a reusable matcher for the logger object.
|
// Define a reusable matcher for the logger object.
|
||||||
|
|||||||
@@ -27,8 +27,9 @@ import gamificationRouter from './gamification.routes';
|
|||||||
import * as db from '../services/db/index.db';
|
import * as db from '../services/db/index.db';
|
||||||
|
|
||||||
// Mock the logger to keep test output clean
|
// Mock the logger to keep test output clean
|
||||||
vi.mock('../services/logger.server', () => ({
|
vi.mock('../services/logger.server', async () => ({
|
||||||
logger: mockLogger,
|
// Use async import to avoid hoisting issues with mockLogger
|
||||||
|
logger: (await import('../tests/utils/mockLogger')).mockLogger,
|
||||||
}));
|
}));
|
||||||
|
|
||||||
// Use vi.hoisted to create mutable mock function references.
|
// Use vi.hoisted to create mutable mock function references.
|
||||||
|
|||||||
@@ -32,8 +32,9 @@ import healthRouter from './health.routes';
|
|||||||
import * as dbConnection from '../services/db/connection.db';
|
import * as dbConnection from '../services/db/connection.db';
|
||||||
|
|
||||||
// Mock the logger to keep test output clean.
|
// Mock the logger to keep test output clean.
|
||||||
vi.mock('../services/logger.server', () => ({
|
vi.mock('../services/logger.server', async () => ({
|
||||||
logger: mockLogger,
|
// Use async import to avoid hoisting issues with mockLogger
|
||||||
|
logger: (await import('../tests/utils/mockLogger')).mockLogger,
|
||||||
}));
|
}));
|
||||||
|
|
||||||
// Cast the mocked import to a Mocked type for type-safe access to mock functions.
|
// Cast the mocked import to a Mocked type for type-safe access to mock functions.
|
||||||
@@ -160,10 +161,14 @@ describe('Health Routes (/api/health)', () => {
|
|||||||
const response = await supertest(app).get('/api/health/db-schema');
|
const response = await supertest(app).get('/api/health/db-schema');
|
||||||
|
|
||||||
expect(response.status).toBe(500);
|
expect(response.status).toBe(500);
|
||||||
expect(response.body.message).toBe('DB connection failed');
|
expect(response.body.message).toBe('DB connection failed'); // This is the message from the original error
|
||||||
expect(logger.error).toHaveBeenCalledWith(
|
expect(response.body.stack).toBeDefined();
|
||||||
{ error: 'DB connection failed' },
|
expect(response.body.errorId).toEqual(expect.any(String));
|
||||||
'Error during DB schema check:',
|
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||||
|
expect.objectContaining({
|
||||||
|
err: expect.any(Error),
|
||||||
|
}),
|
||||||
|
expect.stringMatching(/Unhandled API Error \(ID: \w+\)/),
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -175,10 +180,13 @@ describe('Health Routes (/api/health)', () => {
|
|||||||
const response = await supertest(app).get('/api/health/db-schema');
|
const response = await supertest(app).get('/api/health/db-schema');
|
||||||
|
|
||||||
expect(response.status).toBe(500);
|
expect(response.status).toBe(500);
|
||||||
expect(response.body.message).toBe('DB connection failed');
|
expect(response.body.message).toBe('DB connection failed'); // This is the message from the original error
|
||||||
expect(logger.error).toHaveBeenCalledWith(
|
expect(response.body.errorId).toEqual(expect.any(String));
|
||||||
{ error: dbError },
|
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||||
'Error during DB schema check:',
|
expect.objectContaining({
|
||||||
|
err: expect.objectContaining({ message: 'DB connection failed' }),
|
||||||
|
}),
|
||||||
|
expect.stringMatching(/Unhandled API Error \(ID: \w+\)/),
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
@@ -208,9 +216,11 @@ describe('Health Routes (/api/health)', () => {
|
|||||||
// Assert
|
// Assert
|
||||||
expect(response.status).toBe(500);
|
expect(response.status).toBe(500);
|
||||||
expect(response.body.message).toContain('Storage check failed.');
|
expect(response.body.message).toContain('Storage check failed.');
|
||||||
expect(logger.error).toHaveBeenCalledWith(
|
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||||
{ error: 'EACCES: permission denied' },
|
expect.objectContaining({
|
||||||
expect.stringContaining('Storage check failed for path:'),
|
err: expect.any(Error),
|
||||||
|
}),
|
||||||
|
expect.stringMatching(/Unhandled API Error \(ID: \w+\)/),
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -225,9 +235,11 @@ describe('Health Routes (/api/health)', () => {
|
|||||||
// Assert
|
// Assert
|
||||||
expect(response.status).toBe(500);
|
expect(response.status).toBe(500);
|
||||||
expect(response.body.message).toContain('Storage check failed.');
|
expect(response.body.message).toContain('Storage check failed.');
|
||||||
expect(logger.error).toHaveBeenCalledWith(
|
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||||
{ error: accessError },
|
expect.objectContaining({
|
||||||
expect.stringContaining('Storage check failed for path:'),
|
err: expect.any(Error),
|
||||||
|
}),
|
||||||
|
expect.stringMatching(/Unhandled API Error \(ID: \w+\)/),
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
@@ -282,10 +294,13 @@ describe('Health Routes (/api/health)', () => {
|
|||||||
const response = await supertest(app).get('/api/health/db-pool');
|
const response = await supertest(app).get('/api/health/db-pool');
|
||||||
|
|
||||||
expect(response.status).toBe(500);
|
expect(response.status).toBe(500);
|
||||||
expect(response.body.message).toBe('Pool is not initialized');
|
expect(response.body.message).toBe('Pool is not initialized'); // This is the message from the original error
|
||||||
expect(logger.error).toHaveBeenCalledWith(
|
expect(response.body.errorId).toEqual(expect.any(String));
|
||||||
{ error: 'Pool is not initialized' },
|
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||||
'Error during DB pool health check:',
|
expect.objectContaining({
|
||||||
|
err: expect.any(Error),
|
||||||
|
}),
|
||||||
|
expect.stringMatching(/Unhandled API Error \(ID: \w+\)/),
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -299,10 +314,51 @@ describe('Health Routes (/api/health)', () => {
|
|||||||
const response = await supertest(app).get('/api/health/db-pool');
|
const response = await supertest(app).get('/api/health/db-pool');
|
||||||
|
|
||||||
expect(response.status).toBe(500);
|
expect(response.status).toBe(500);
|
||||||
expect(response.body.message).toBe('Pool is not initialized');
|
expect(response.body.message).toBe('Pool is not initialized'); // This is the message from the original error
|
||||||
expect(logger.error).toHaveBeenCalledWith(
|
expect(response.body.stack).toBeDefined();
|
||||||
{ error: poolError },
|
expect(response.body.errorId).toEqual(expect.any(String));
|
||||||
'Error during DB pool health check:',
|
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||||
|
expect.objectContaining({
|
||||||
|
err: expect.objectContaining({ message: 'Pool is not initialized' }),
|
||||||
|
}),
|
||||||
|
expect.stringMatching(/Unhandled API Error \(ID: \w+\)/),
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
describe('GET /redis', () => {
|
||||||
|
it('should return 500 if Redis ping fails', async () => {
|
||||||
|
const redisError = new Error('Connection timed out');
|
||||||
|
mockedRedisConnection.ping.mockRejectedValue(redisError);
|
||||||
|
|
||||||
|
const response = await supertest(app).get('/api/health/redis');
|
||||||
|
|
||||||
|
expect(response.status).toBe(500);
|
||||||
|
expect(response.body.message).toBe('Connection timed out');
|
||||||
|
expect(response.body.stack).toBeDefined();
|
||||||
|
expect(response.body.errorId).toEqual(expect.any(String));
|
||||||
|
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||||
|
expect.objectContaining({
|
||||||
|
err: expect.any(Error),
|
||||||
|
}),
|
||||||
|
expect.stringMatching(/Unhandled API Error \(ID: \w+\)/),
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return 500 if Redis ping returns an unexpected response', async () => {
|
||||||
|
mockedRedisConnection.ping.mockResolvedValue('OK'); // Not 'PONG'
|
||||||
|
|
||||||
|
const response = await supertest(app).get('/api/health/redis');
|
||||||
|
|
||||||
|
expect(response.status).toBe(500);
|
||||||
|
expect(response.body.message).toContain('Unexpected Redis ping response: OK');
|
||||||
|
expect(response.body.stack).toBeDefined();
|
||||||
|
expect(response.body.errorId).toEqual(expect.any(String));
|
||||||
|
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||||
|
expect.objectContaining({
|
||||||
|
err: expect.any(Error),
|
||||||
|
}),
|
||||||
|
expect.stringMatching(/Unhandled API Error \(ID: \w+\)/),
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -39,7 +39,6 @@ router.get('/db-schema', validateRequest(emptySchema), async (req, res, next: Ne
|
|||||||
}
|
}
|
||||||
return res.status(200).json({ success: true, message: 'All required database tables exist.' });
|
return res.status(200).json({ success: true, message: 'All required database tables exist.' });
|
||||||
} catch (error: unknown) {
|
} catch (error: unknown) {
|
||||||
logger.error({ error }, 'Error during DB schema check:');
|
|
||||||
next(error);
|
next(error);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
@@ -59,10 +58,6 @@ router.get('/storage', validateRequest(emptySchema), async (req, res, next: Next
|
|||||||
message: `Storage directory '${storagePath}' is accessible and writable.`,
|
message: `Storage directory '${storagePath}' is accessible and writable.`,
|
||||||
});
|
});
|
||||||
} catch (error: unknown) {
|
} catch (error: unknown) {
|
||||||
logger.error(
|
|
||||||
{ error: error instanceof Error ? error.message : error },
|
|
||||||
`Storage check failed for path: ${storagePath}`,
|
|
||||||
);
|
|
||||||
next(
|
next(
|
||||||
new Error(
|
new Error(
|
||||||
`Storage check failed. Ensure the directory '${storagePath}' exists and is writable by the application.`,
|
`Storage check failed. Ensure the directory '${storagePath}' exists and is writable by the application.`,
|
||||||
@@ -93,10 +88,6 @@ router.get(
|
|||||||
.json({ success: false, message: `Pool may be under stress. ${message}` });
|
.json({ success: false, message: `Pool may be under stress. ${message}` });
|
||||||
}
|
}
|
||||||
} catch (error: unknown) {
|
} catch (error: unknown) {
|
||||||
logger.error(
|
|
||||||
{ error: error instanceof Error ? error.message : error },
|
|
||||||
'Error during DB pool health check:',
|
|
||||||
);
|
|
||||||
next(error);
|
next(error);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -130,7 +121,6 @@ router.get(
|
|||||||
}
|
}
|
||||||
throw new Error(`Unexpected Redis ping response: ${reply}`); // This will be caught below
|
throw new Error(`Unexpected Redis ping response: ${reply}`); // This will be caught below
|
||||||
} catch (error: unknown) {
|
} catch (error: unknown) {
|
||||||
logger.error({ error }, 'Error checking Redis health');
|
|
||||||
next(error);
|
next(error);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -56,7 +56,6 @@ import {
|
|||||||
createMockUserProfile,
|
createMockUserProfile,
|
||||||
createMockUserWithPasswordHash,
|
createMockUserWithPasswordHash,
|
||||||
} from '../tests/utils/mockFactories';
|
} from '../tests/utils/mockFactories';
|
||||||
import { mockLogger } from '../tests/utils/mockLogger';
|
|
||||||
|
|
||||||
// Mock dependencies before importing the passport configuration
|
// Mock dependencies before importing the passport configuration
|
||||||
vi.mock('../services/db/index.db', () => ({
|
vi.mock('../services/db/index.db', () => ({
|
||||||
@@ -74,9 +73,10 @@ vi.mock('../services/db/index.db', () => ({
|
|||||||
|
|
||||||
const mockedDb = db as Mocked<typeof db>;
|
const mockedDb = db as Mocked<typeof db>;
|
||||||
|
|
||||||
vi.mock('../services/logger.server', () => ({
|
vi.mock('../services/logger.server', async () => ({
|
||||||
// This mock is used by the module under test and can be imported in the test file.
|
// Use async import to avoid hoisting issues with mockLogger
|
||||||
logger: mockLogger,
|
// Note: We need to await the import inside the factory
|
||||||
|
logger: (await import('../tests/utils/mockLogger')).mockLogger,
|
||||||
}));
|
}));
|
||||||
|
|
||||||
// Mock bcrypt for password comparisons
|
// Mock bcrypt for password comparisons
|
||||||
|
|||||||
@@ -6,7 +6,6 @@ import {
|
|||||||
createMockDietaryRestriction,
|
createMockDietaryRestriction,
|
||||||
createMockAppliance,
|
createMockAppliance,
|
||||||
} from '../tests/utils/mockFactories';
|
} from '../tests/utils/mockFactories';
|
||||||
import { mockLogger } from '../tests/utils/mockLogger';
|
|
||||||
import { createTestApp } from '../tests/utils/createTestApp';
|
import { createTestApp } from '../tests/utils/createTestApp';
|
||||||
|
|
||||||
// 1. Mock the Service Layer directly.
|
// 1. Mock the Service Layer directly.
|
||||||
@@ -21,10 +20,12 @@ vi.mock('../services/db/index.db', () => ({
|
|||||||
// Import the router and mocked DB AFTER all mocks are defined.
|
// Import the router and mocked DB AFTER all mocks are defined.
|
||||||
import personalizationRouter from './personalization.routes';
|
import personalizationRouter from './personalization.routes';
|
||||||
import * as db from '../services/db/index.db';
|
import * as db from '../services/db/index.db';
|
||||||
|
import { mockLogger } from '../tests/utils/mockLogger';
|
||||||
|
|
||||||
// Mock the logger to keep test output clean
|
// Mock the logger to keep test output clean
|
||||||
vi.mock('../services/logger.server', () => ({
|
vi.mock('../services/logger.server', async () => ({
|
||||||
logger: mockLogger,
|
// Use async import to avoid hoisting issues with mockLogger
|
||||||
|
logger: (await import('../tests/utils/mockLogger')).mockLogger,
|
||||||
}));
|
}));
|
||||||
|
|
||||||
describe('Personalization Routes (/api/personalization)', () => {
|
describe('Personalization Routes (/api/personalization)', () => {
|
||||||
|
|||||||
@@ -12,8 +12,9 @@ vi.mock('../services/db/price.db', () => ({
|
|||||||
}));
|
}));
|
||||||
|
|
||||||
// Mock the logger to keep test output clean
|
// Mock the logger to keep test output clean
|
||||||
vi.mock('../services/logger.server', () => ({
|
vi.mock('../services/logger.server', async () => ({
|
||||||
logger: mockLogger,
|
// Use async import to avoid hoisting issues with mockLogger
|
||||||
|
logger: (await import('../tests/utils/mockLogger')).mockLogger,
|
||||||
}));
|
}));
|
||||||
|
|
||||||
// Import the router AFTER other setup.
|
// Import the router AFTER other setup.
|
||||||
|
|||||||
@@ -1,7 +1,6 @@
|
|||||||
// src/routes/recipe.routes.test.ts
|
// src/routes/recipe.routes.test.ts
|
||||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||||
import supertest from 'supertest';
|
import supertest from 'supertest';
|
||||||
import { mockLogger } from '../tests/utils/mockLogger';
|
|
||||||
import { createMockRecipe, createMockRecipeComment } from '../tests/utils/mockFactories';
|
import { createMockRecipe, createMockRecipeComment } from '../tests/utils/mockFactories';
|
||||||
import { NotFoundError } from '../services/db/errors.db';
|
import { NotFoundError } from '../services/db/errors.db';
|
||||||
import { createTestApp } from '../tests/utils/createTestApp';
|
import { createTestApp } from '../tests/utils/createTestApp';
|
||||||
@@ -20,10 +19,12 @@ vi.mock('../services/db/index.db', () => ({
|
|||||||
// Import the router and mocked DB AFTER all mocks are defined.
|
// Import the router and mocked DB AFTER all mocks are defined.
|
||||||
import recipeRouter from './recipe.routes';
|
import recipeRouter from './recipe.routes';
|
||||||
import * as db from '../services/db/index.db';
|
import * as db from '../services/db/index.db';
|
||||||
|
import { mockLogger } from '../tests/utils/mockLogger';
|
||||||
|
|
||||||
// Mock the logger to keep test output clean
|
// Mock the logger to keep test output clean
|
||||||
vi.mock('../services/logger.server', () => ({
|
vi.mock('../services/logger.server', async () => ({
|
||||||
logger: mockLogger,
|
// Use async import to avoid hoisting issues with mockLogger
|
||||||
|
logger: (await import('../tests/utils/mockLogger')).mockLogger,
|
||||||
}));
|
}));
|
||||||
|
|
||||||
// Import the mocked db module to control its functions in tests
|
// Import the mocked db module to control its functions in tests
|
||||||
|
|||||||
@@ -1,7 +1,6 @@
|
|||||||
// src/routes/stats.routes.test.ts
|
// src/routes/stats.routes.test.ts
|
||||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||||
import supertest from 'supertest';
|
import supertest from 'supertest';
|
||||||
import { mockLogger } from '../tests/utils/mockLogger';
|
|
||||||
import { createTestApp } from '../tests/utils/createTestApp';
|
import { createTestApp } from '../tests/utils/createTestApp';
|
||||||
|
|
||||||
// 1. Mock the Service Layer directly.
|
// 1. Mock the Service Layer directly.
|
||||||
@@ -14,10 +13,12 @@ vi.mock('../services/db/index.db', () => ({
|
|||||||
// Import the router and mocked DB AFTER all mocks are defined.
|
// Import the router and mocked DB AFTER all mocks are defined.
|
||||||
import statsRouter from './stats.routes';
|
import statsRouter from './stats.routes';
|
||||||
import * as db from '../services/db/index.db';
|
import * as db from '../services/db/index.db';
|
||||||
|
import { mockLogger } from '../tests/utils/mockLogger';
|
||||||
|
|
||||||
// Mock the logger to keep test output clean
|
// Mock the logger to keep test output clean
|
||||||
vi.mock('../services/logger.server', () => ({
|
vi.mock('../services/logger.server', async () => ({
|
||||||
logger: mockLogger,
|
// Use async import to avoid hoisting issues with mockLogger
|
||||||
|
logger: (await import('../tests/utils/mockLogger')).mockLogger,
|
||||||
}));
|
}));
|
||||||
|
|
||||||
const expectLogger = expect.objectContaining({
|
const expectLogger = expect.objectContaining({
|
||||||
|
|||||||
@@ -86,8 +86,9 @@ vi.mock('bcrypt', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
// Mock the logger
|
// Mock the logger
|
||||||
vi.mock('../services/logger.server', () => ({
|
vi.mock('../services/logger.server', async () => ({
|
||||||
logger: mockLogger,
|
// Use async import to avoid hoisting issues with mockLogger
|
||||||
|
logger: (await import('../tests/utils/mockLogger')).mockLogger,
|
||||||
}));
|
}));
|
||||||
|
|
||||||
// Import the router and other modules AFTER mocks are established
|
// Import the router and other modules AFTER mocks are established
|
||||||
@@ -147,8 +148,8 @@ describe('User Routes (/api/users)', () => {
|
|||||||
|
|
||||||
// Assert
|
// Assert
|
||||||
expect(logger.error).toHaveBeenCalledWith(
|
expect(logger.error).toHaveBeenCalledWith(
|
||||||
'Failed to create avatar upload directory:',
|
{ err: mkdirError },
|
||||||
mkdirError,
|
'Failed to create avatar upload directory',
|
||||||
);
|
);
|
||||||
vi.doUnmock('node:fs/promises'); // Clean up
|
vi.doUnmock('node:fs/promises'); // Clean up
|
||||||
});
|
});
|
||||||
|
|||||||
96
src/services/db/price.db.test.ts
Normal file
96
src/services/db/price.db.test.ts
Normal file
@@ -0,0 +1,96 @@
|
|||||||
|
// src/services/db/price.db.test.ts
|
||||||
|
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||||
|
import { mockPoolInstance } from '../../tests/setup/tests-setup-unit';
|
||||||
|
import { getPool } from './connection.db';
|
||||||
|
import { priceRepo } from './price.db';
|
||||||
|
import type { PriceHistoryData } from '../../types';
|
||||||
|
|
||||||
|
// Un-mock the module we are testing to ensure we use the real implementation.
|
||||||
|
vi.unmock('./price.db');
|
||||||
|
|
||||||
|
// Mock dependencies
|
||||||
|
vi.mock('./connection.db', () => ({
|
||||||
|
getPool: vi.fn(),
|
||||||
|
}));
|
||||||
|
|
||||||
|
vi.mock('../logger.server', () => ({
|
||||||
|
logger: {
|
||||||
|
info: vi.fn(),
|
||||||
|
warn: vi.fn(),
|
||||||
|
error: vi.fn(),
|
||||||
|
debug: vi.fn(),
|
||||||
|
},
|
||||||
|
}));
|
||||||
|
|
||||||
|
import { logger as mockLogger } from '../logger.server';
|
||||||
|
|
||||||
|
describe('Price DB Service', () => {
|
||||||
|
beforeEach(() => {
|
||||||
|
vi.clearAllMocks();
|
||||||
|
// Make getPool return our mock instance for each test
|
||||||
|
vi.mocked(getPool).mockReturnValue(mockPoolInstance as any);
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('getPriceHistory', () => {
|
||||||
|
it('should return an empty array if masterItemIds is empty and not query the db', async () => {
|
||||||
|
const result = await priceRepo.getPriceHistory([], mockLogger);
|
||||||
|
expect(result).toEqual([]);
|
||||||
|
expect(mockPoolInstance.query).not.toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should execute the correct query with default limit and offset', async () => {
|
||||||
|
mockPoolInstance.query.mockResolvedValue({ rows: [] });
|
||||||
|
await priceRepo.getPriceHistory([1, 2], mockLogger);
|
||||||
|
expect(mockPoolInstance.query).toHaveBeenCalledWith(
|
||||||
|
expect.stringContaining('LIMIT $2 OFFSET $3'),
|
||||||
|
[[1, 2], 1000, 0],
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should execute the correct query with provided limit and offset', async () => {
|
||||||
|
mockPoolInstance.query.mockResolvedValue({ rows: [] });
|
||||||
|
await priceRepo.getPriceHistory([1, 2], mockLogger, 50, 10);
|
||||||
|
expect(mockPoolInstance.query).toHaveBeenCalledWith(
|
||||||
|
expect.stringContaining('LIMIT $2 OFFSET $3'),
|
||||||
|
[[1, 2], 50, 10],
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return price history data on success', async () => {
|
||||||
|
const mockHistory: PriceHistoryData[] = [
|
||||||
|
{ master_item_id: 1, price_in_cents: 199, date: '2024-01-01' },
|
||||||
|
{ master_item_id: 1, price_in_cents: 209, date: '2024-01-08' },
|
||||||
|
];
|
||||||
|
mockPoolInstance.query.mockResolvedValue({ rows: mockHistory });
|
||||||
|
|
||||||
|
const result = await priceRepo.getPriceHistory([1], mockLogger);
|
||||||
|
expect(result).toEqual(mockHistory);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should log the result count on success', async () => {
|
||||||
|
const mockHistory: PriceHistoryData[] = [
|
||||||
|
{ master_item_id: 1, price_in_cents: 199, date: '2024-01-01' },
|
||||||
|
];
|
||||||
|
mockPoolInstance.query.mockResolvedValue({ rows: mockHistory });
|
||||||
|
|
||||||
|
await priceRepo.getPriceHistory([1], mockLogger, 50, 10);
|
||||||
|
expect(mockLogger.debug).toHaveBeenCalledWith(
|
||||||
|
{ count: 1, itemIds: 1, limit: 50, offset: 10 },
|
||||||
|
'Fetched price history from database.',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should throw a generic error if the database query fails', async () => {
|
||||||
|
const dbError = new Error('DB Connection Error');
|
||||||
|
mockPoolInstance.query.mockRejectedValue(dbError);
|
||||||
|
|
||||||
|
await expect(priceRepo.getPriceHistory([1], mockLogger, 50, 10)).rejects.toThrow(
|
||||||
|
'Failed to retrieve price history.',
|
||||||
|
);
|
||||||
|
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||||
|
{ err: dbError, masterItemIds: [1], limit: 50, offset: 10 },
|
||||||
|
'Database error in getPriceHistory',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
@@ -43,11 +43,19 @@ export const priceRepo = {
|
|||||||
LIMIT $2 OFFSET $3;
|
LIMIT $2 OFFSET $3;
|
||||||
`;
|
`;
|
||||||
|
|
||||||
const result = await getPool().query(query, [masterItemIds, limit, offset]);
|
try {
|
||||||
logger.debug(
|
const result = await getPool().query(query, [masterItemIds, limit, offset]);
|
||||||
{ count: result.rows.length, itemIds: masterItemIds.length, limit, offset },
|
logger.debug(
|
||||||
'Fetched price history from database.',
|
{ count: result.rows.length, itemIds: masterItemIds.length, limit, offset },
|
||||||
);
|
'Fetched price history from database.',
|
||||||
return result.rows;
|
);
|
||||||
|
return result.rows;
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(
|
||||||
|
{ err: error, masterItemIds, limit, offset },
|
||||||
|
'Database error in getPriceHistory',
|
||||||
|
);
|
||||||
|
throw new Error('Failed to retrieve price history.');
|
||||||
|
}
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
84
src/services/eventBus.test.ts
Normal file
84
src/services/eventBus.test.ts
Normal file
@@ -0,0 +1,84 @@
|
|||||||
|
// src/services/eventBus.test.ts
|
||||||
|
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||||
|
import { EventBus } from './eventBus';
|
||||||
|
|
||||||
|
describe('EventBus', () => {
|
||||||
|
let eventBus: EventBus;
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
// Create a new instance for each test to ensure isolation
|
||||||
|
eventBus = new EventBus();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should call a listener when an event is dispatched', () => {
|
||||||
|
const callback = vi.fn();
|
||||||
|
eventBus.on('test-event', callback);
|
||||||
|
eventBus.dispatch('test-event');
|
||||||
|
expect(callback).toHaveBeenCalledTimes(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should pass data to the listener when dispatched', () => {
|
||||||
|
const callback = vi.fn();
|
||||||
|
const data = { message: 'hello' };
|
||||||
|
eventBus.on('data-event', callback);
|
||||||
|
eventBus.dispatch('data-event', data);
|
||||||
|
expect(callback).toHaveBeenCalledWith(data);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should call multiple listeners for the same event', () => {
|
||||||
|
const callback1 = vi.fn();
|
||||||
|
const callback2 = vi.fn();
|
||||||
|
eventBus.on('multi-event', callback1);
|
||||||
|
eventBus.on('multi-event', callback2);
|
||||||
|
eventBus.dispatch('multi-event');
|
||||||
|
expect(callback1).toHaveBeenCalledTimes(1);
|
||||||
|
expect(callback2).toHaveBeenCalledTimes(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should stop calling a listener after it has been removed', () => {
|
||||||
|
const callback = vi.fn();
|
||||||
|
eventBus.on('remove-event', callback);
|
||||||
|
eventBus.dispatch('remove-event');
|
||||||
|
expect(callback).toHaveBeenCalledTimes(1);
|
||||||
|
|
||||||
|
eventBus.off('remove-event', callback);
|
||||||
|
eventBus.dispatch('remove-event');
|
||||||
|
// The callback should still have been called only once from the first dispatch
|
||||||
|
expect(callback).toHaveBeenCalledTimes(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should not throw an error when dispatching an event with no listeners', () => {
|
||||||
|
expect(() => eventBus.dispatch('no-listener-event')).not.toThrow();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should not throw an error when removing a listener that does not exist for an event', () => {
|
||||||
|
const existentCallback = vi.fn();
|
||||||
|
const nonExistentCallback = () => {};
|
||||||
|
eventBus.on('some-event', existentCallback);
|
||||||
|
expect(() => eventBus.off('some-event', nonExistentCallback)).not.toThrow();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should not throw an error when removing a listener from an event that has no listeners', () => {
|
||||||
|
const callback = vi.fn();
|
||||||
|
expect(() => eventBus.off('non-existent-event', callback)).not.toThrow();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle removing one of multiple listeners correctly', () => {
|
||||||
|
const callback1 = vi.fn();
|
||||||
|
const callback2 = vi.fn();
|
||||||
|
eventBus.on('multi-remove-event', callback1);
|
||||||
|
eventBus.on('multi-remove-event', callback2);
|
||||||
|
|
||||||
|
eventBus.dispatch('multi-remove-event');
|
||||||
|
expect(callback1).toHaveBeenCalledTimes(1);
|
||||||
|
expect(callback2).toHaveBeenCalledTimes(1);
|
||||||
|
|
||||||
|
eventBus.off('multi-remove-event', callback1);
|
||||||
|
eventBus.dispatch('multi-remove-event');
|
||||||
|
|
||||||
|
// callback1 should not be called again
|
||||||
|
expect(callback1).toHaveBeenCalledTimes(1);
|
||||||
|
// callback2 should be called again
|
||||||
|
expect(callback2).toHaveBeenCalledTimes(2);
|
||||||
|
});
|
||||||
|
});
|
||||||
@@ -7,7 +7,7 @@
|
|||||||
|
|
||||||
type EventCallback = (data?: any) => void;
|
type EventCallback = (data?: any) => void;
|
||||||
|
|
||||||
class EventBus {
|
export class EventBus {
|
||||||
private listeners: { [key: string]: EventCallback[] } = {};
|
private listeners: { [key: string]: EventCallback[] } = {};
|
||||||
|
|
||||||
on(event: string, callback: EventCallback): void {
|
on(event: string, callback: EventCallback): void {
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
// src/services/queueService.server.test.ts
|
// src/services/queueService.server.test.ts
|
||||||
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
|
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
|
||||||
import { logger as mockLogger } from './logger.server';
|
import { logger as mockLogger } from './logger.server';
|
||||||
import { EventEmitter } from 'node:events';
|
import { EventEmitter } from 'node:events'; // This was a duplicate, fixed.
|
||||||
import type { Job, Worker } from 'bullmq';
|
import type { Job, Worker } from 'bullmq';
|
||||||
import type { Mock } from 'vitest';
|
import type { Mock } from 'vitest';
|
||||||
|
|
||||||
@@ -31,6 +31,7 @@ mockRedisConnection.quit = vi.fn().mockResolvedValue('OK');
|
|||||||
// We make it a mock function that returns our shared `mockRedisConnection` instance.
|
// We make it a mock function that returns our shared `mockRedisConnection` instance.
|
||||||
vi.mock('ioredis', () => ({
|
vi.mock('ioredis', () => ({
|
||||||
default: vi.fn(function () {
|
default: vi.fn(function () {
|
||||||
|
// This was a duplicate, fixed.
|
||||||
return mockRedisConnection;
|
return mockRedisConnection;
|
||||||
}),
|
}),
|
||||||
}));
|
}));
|
||||||
@@ -51,26 +52,35 @@ vi.mock('bullmq', () => ({
|
|||||||
this.add = vi.fn();
|
this.add = vi.fn();
|
||||||
this.close = vi.fn().mockResolvedValue(undefined);
|
this.close = vi.fn().mockResolvedValue(undefined);
|
||||||
return this;
|
return this;
|
||||||
}),
|
}), // This was a duplicate, fixed.
|
||||||
|
UnrecoverableError: class UnrecoverableError extends Error {},
|
||||||
}));
|
}));
|
||||||
|
|
||||||
vi.mock('./logger.server', () => ({
|
vi.mock('./logger.server', () => ({
|
||||||
logger: {
|
logger: {
|
||||||
info: vi.fn(),
|
info: vi.fn(),
|
||||||
error: vi.fn(),
|
error: vi.fn(),
|
||||||
warn: vi.fn(),
|
warn: vi.fn(), // This was a duplicate, fixed.
|
||||||
debug: vi.fn(),
|
debug: vi.fn(),
|
||||||
|
child: vi.fn().mockReturnThis(),
|
||||||
},
|
},
|
||||||
}));
|
}));
|
||||||
|
|
||||||
// Mock other dependencies that are not the focus of this test file.
|
// Mock other dependencies that are not the focus of this test file.
|
||||||
vi.mock('./aiService.server');
|
vi.mock('./aiService.server');
|
||||||
vi.mock('./emailService.server');
|
vi.mock('./emailService.server');
|
||||||
vi.mock('./db/index.db');
|
vi.mock('./db/index.db'); // This was a duplicate, fixed.
|
||||||
|
vi.mock('./flyerProcessingService.server');
|
||||||
|
vi.mock('./flyerDataTransformer');
|
||||||
|
|
||||||
describe('Queue Service Setup and Lifecycle', () => {
|
describe('Worker Service Lifecycle', () => {
|
||||||
let gracefulShutdown: (signal: string) => Promise<void>;
|
let gracefulShutdown: (signal: string) => Promise<void>; // This was a duplicate, fixed.
|
||||||
let flyerWorker: Worker, emailWorker: Worker, analyticsWorker: Worker, cleanupWorker: Worker;
|
let flyerWorker: Worker,
|
||||||
|
emailWorker: Worker,
|
||||||
|
analyticsWorker: Worker,
|
||||||
|
cleanupWorker: Worker,
|
||||||
|
weeklyAnalyticsWorker: Worker,
|
||||||
|
tokenCleanupWorker: Worker;
|
||||||
|
|
||||||
beforeEach(async () => {
|
beforeEach(async () => {
|
||||||
vi.clearAllMocks();
|
vi.clearAllMocks();
|
||||||
@@ -79,22 +89,27 @@ describe('Queue Service Setup and Lifecycle', () => {
|
|||||||
vi.resetModules();
|
vi.resetModules();
|
||||||
|
|
||||||
// Dynamically import the modules after mocks are set up
|
// Dynamically import the modules after mocks are set up
|
||||||
const queueService = await import('./queueService.server');
|
const workerService = await import('./workers.server');
|
||||||
|
|
||||||
// Capture the imported instances for use in tests
|
// Capture the imported instances for use in tests
|
||||||
gracefulShutdown = queueService.gracefulShutdown;
|
gracefulShutdown = workerService.gracefulShutdown;
|
||||||
flyerWorker = queueService.flyerWorker;
|
flyerWorker = workerService.flyerWorker;
|
||||||
emailWorker = queueService.emailWorker;
|
emailWorker = workerService.emailWorker;
|
||||||
analyticsWorker = queueService.analyticsWorker;
|
analyticsWorker = workerService.analyticsWorker;
|
||||||
cleanupWorker = queueService.cleanupWorker;
|
cleanupWorker = workerService.cleanupWorker;
|
||||||
|
weeklyAnalyticsWorker = workerService.weeklyAnalyticsWorker;
|
||||||
|
tokenCleanupWorker = workerService.tokenCleanupWorker;
|
||||||
});
|
});
|
||||||
|
|
||||||
afterEach(() => {
|
afterEach(() => {
|
||||||
// Clean up all event listeners on the mock connection to prevent open handles.
|
// Clean up all event listeners on the mock connection to prevent open handles.
|
||||||
mockRedisConnection.removeAllListeners();
|
mockRedisConnection.removeAllListeners();
|
||||||
|
vi.useRealTimers();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should log a success message when Redis connects', () => {
|
it('should log a success message when Redis connects', () => {
|
||||||
|
// Re-import redis.server to trigger its event listeners with the mock
|
||||||
|
import('./redis.server');
|
||||||
// Act: Simulate the 'connect' event on the mock Redis connection
|
// Act: Simulate the 'connect' event on the mock Redis connection
|
||||||
mockRedisConnection.emit('connect');
|
mockRedisConnection.emit('connect');
|
||||||
|
|
||||||
@@ -103,6 +118,7 @@ describe('Queue Service Setup and Lifecycle', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('should log an error message when Redis connection fails', () => {
|
it('should log an error message when Redis connection fails', () => {
|
||||||
|
import('./redis.server');
|
||||||
const redisError = new Error('Connection refused');
|
const redisError = new Error('Connection refused');
|
||||||
mockRedisConnection.emit('error', redisError);
|
mockRedisConnection.emit('error', redisError);
|
||||||
expect(mockLogger.error).toHaveBeenCalledWith({ err: redisError }, '[Redis] Connection error.');
|
expect(mockLogger.error).toHaveBeenCalledWith({ err: redisError }, '[Redis] Connection error.');
|
||||||
@@ -111,7 +127,14 @@ describe('Queue Service Setup and Lifecycle', () => {
|
|||||||
it('should attach completion and failure listeners to all workers', () => {
|
it('should attach completion and failure listeners to all workers', () => {
|
||||||
// The workers are instantiated when the module is imported in beforeEach.
|
// The workers are instantiated when the module is imported in beforeEach.
|
||||||
// We just need to check that the 'on' method was called for each event.
|
// We just need to check that the 'on' method was called for each event.
|
||||||
const workers = [flyerWorker, emailWorker, analyticsWorker, cleanupWorker];
|
const workers = [
|
||||||
|
flyerWorker,
|
||||||
|
emailWorker,
|
||||||
|
analyticsWorker,
|
||||||
|
cleanupWorker,
|
||||||
|
weeklyAnalyticsWorker,
|
||||||
|
tokenCleanupWorker,
|
||||||
|
];
|
||||||
for (const worker of workers) {
|
for (const worker of workers) {
|
||||||
expect(worker.on).toHaveBeenCalledWith('completed', expect.any(Function));
|
expect(worker.on).toHaveBeenCalledWith('completed', expect.any(Function));
|
||||||
expect(worker.on).toHaveBeenCalledWith('failed', expect.any(Function));
|
expect(worker.on).toHaveBeenCalledWith('failed', expect.any(Function));
|
||||||
@@ -171,15 +194,40 @@ describe('Queue Service Setup and Lifecycle', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('should close all workers, queues, the redis connection, and exit the process', async () => {
|
it('should close all workers, queues, the redis connection, and exit the process', async () => {
|
||||||
|
// We need to import the queues to check if their close methods are called.
|
||||||
|
const {
|
||||||
|
flyerQueue,
|
||||||
|
emailQueue,
|
||||||
|
analyticsQueue,
|
||||||
|
cleanupQueue,
|
||||||
|
weeklyAnalyticsQueue,
|
||||||
|
tokenCleanupQueue,
|
||||||
|
} = await import('./queues.server');
|
||||||
|
|
||||||
await gracefulShutdown('SIGINT');
|
await gracefulShutdown('SIGINT');
|
||||||
expect((flyerWorker as unknown as MockQueueInstance).close).toHaveBeenCalled();
|
|
||||||
expect((emailWorker as unknown as MockQueueInstance).close).toHaveBeenCalled();
|
// Verify workers are closed
|
||||||
expect((analyticsWorker as unknown as MockQueueInstance).close).toHaveBeenCalled();
|
expect((flyerWorker as unknown as MockWorkerInstance).close).toHaveBeenCalled();
|
||||||
expect((cleanupWorker as unknown as MockQueueInstance).close).toHaveBeenCalled();
|
expect((emailWorker as unknown as MockWorkerInstance).close).toHaveBeenCalled();
|
||||||
|
expect((analyticsWorker as unknown as MockWorkerInstance).close).toHaveBeenCalled();
|
||||||
|
expect((cleanupWorker as unknown as MockWorkerInstance).close).toHaveBeenCalled();
|
||||||
|
expect((weeklyAnalyticsWorker as unknown as MockWorkerInstance).close).toHaveBeenCalled();
|
||||||
|
expect((tokenCleanupWorker as unknown as MockWorkerInstance).close).toHaveBeenCalled();
|
||||||
|
|
||||||
|
// Verify queues are closed
|
||||||
|
expect((flyerQueue as unknown as MockQueueInstance).close).toHaveBeenCalled();
|
||||||
|
expect((emailQueue as unknown as MockQueueInstance).close).toHaveBeenCalled();
|
||||||
|
expect((analyticsQueue as unknown as MockQueueInstance).close).toHaveBeenCalled();
|
||||||
|
expect((cleanupQueue as unknown as MockQueueInstance).close).toHaveBeenCalled();
|
||||||
|
expect((weeklyAnalyticsQueue as unknown as MockQueueInstance).close).toHaveBeenCalled();
|
||||||
|
expect((tokenCleanupQueue as unknown as MockQueueInstance).close).toHaveBeenCalled();
|
||||||
|
|
||||||
// Verify the redis connection is also closed
|
// Verify the redis connection is also closed
|
||||||
expect(mockRedisConnection.quit).toHaveBeenCalledTimes(1);
|
expect(mockRedisConnection.quit).toHaveBeenCalledTimes(1);
|
||||||
|
|
||||||
|
// Check for the correct success log message from workers.server.ts
|
||||||
expect(mockLogger.info).toHaveBeenCalledWith(
|
expect(mockLogger.info).toHaveBeenCalledWith(
|
||||||
'[Shutdown] All workers, queues, and connections closed successfully.',
|
'[Shutdown] All resources closed successfully.',
|
||||||
);
|
);
|
||||||
expect(processExitSpy).toHaveBeenCalledWith(0);
|
expect(processExitSpy).toHaveBeenCalledWith(0);
|
||||||
});
|
});
|
||||||
@@ -192,12 +240,34 @@ describe('Queue Service Setup and Lifecycle', () => {
|
|||||||
await gracefulShutdown('SIGTERM');
|
await gracefulShutdown('SIGTERM');
|
||||||
|
|
||||||
// It should still attempt to close all workers
|
// It should still attempt to close all workers
|
||||||
expect((emailWorker as unknown as MockQueueInstance).close).toHaveBeenCalled();
|
expect((emailWorker as unknown as MockWorkerInstance).close).toHaveBeenCalled();
|
||||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||||
{ err: closeError, resource: 'flyerWorker' },
|
{ err: closeError, resource: 'flyerWorker' },
|
||||||
'[Shutdown] Error closing resource.',
|
`[Shutdown] Error closing flyerWorker.`,
|
||||||
);
|
);
|
||||||
expect(processExitSpy).toHaveBeenCalledWith(1);
|
expect(processExitSpy).toHaveBeenCalledWith(1);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('should timeout if shutdown takes too long', async () => {
|
||||||
|
vi.useFakeTimers();
|
||||||
|
// Make one of the close calls hang indefinitely
|
||||||
|
(flyerWorker.close as Mock).mockReturnValue(new Promise(() => {}));
|
||||||
|
|
||||||
|
// Run shutdown but don't await it fully, as it will hang
|
||||||
|
const shutdownPromise = gracefulShutdown('SIGTERM');
|
||||||
|
|
||||||
|
// Advance timers past the timeout threshold
|
||||||
|
await vi.advanceTimersByTimeAsync(31000);
|
||||||
|
|
||||||
|
// Now await the promise to see the timeout result
|
||||||
|
await shutdownPromise;
|
||||||
|
|
||||||
|
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||||
|
`[Shutdown] Graceful shutdown timed out after 30 seconds. Forcing exit.`,
|
||||||
|
);
|
||||||
|
expect(processExitSpy).toHaveBeenCalledWith(1);
|
||||||
|
|
||||||
|
vi.useRealTimers();
|
||||||
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -1,438 +1,32 @@
|
|||||||
// src/services/queueService.server.ts
|
// src/services/queueService.server.ts
|
||||||
import { Queue, Worker, Job, UnrecoverableError } from 'bullmq';
|
|
||||||
import IORedis from 'ioredis'; // Correctly imported
|
|
||||||
import fsPromises from 'node:fs/promises';
|
|
||||||
import { exec } from 'child_process';
|
|
||||||
import { promisify } from 'util';
|
|
||||||
|
|
||||||
import { logger } from './logger.server';
|
import { logger } from './logger.server';
|
||||||
import { aiService } from './aiService.server';
|
import { connection } from './redis.server';
|
||||||
import * as emailService from './emailService.server';
|
|
||||||
import * as db from './db/index.db';
|
|
||||||
import {
|
import {
|
||||||
FlyerProcessingService,
|
flyerQueue,
|
||||||
type FlyerJobData,
|
emailQueue,
|
||||||
type IFileSystem,
|
analyticsQueue,
|
||||||
} from './flyerProcessingService.server';
|
weeklyAnalyticsQueue,
|
||||||
import { FlyerDataTransformer } from './flyerDataTransformer';
|
cleanupQueue,
|
||||||
|
tokenCleanupQueue,
|
||||||
|
} from './queues.server';
|
||||||
|
|
||||||
export const connection = new IORedis(process.env.REDIS_URL!, {
|
// Re-export everything for backward compatibility where possible
|
||||||
maxRetriesPerRequest: null, // Important for BullMQ
|
export { connection } from './redis.server';
|
||||||
password: process.env.REDIS_PASSWORD, // Add the password from environment variables
|
export * from './queues.server';
|
||||||
});
|
|
||||||
|
|
||||||
// --- Redis Connection Event Listeners ---
|
// We do NOT export workers here anymore to prevent side effects.
|
||||||
connection.on('connect', () => {
|
// Consumers needing workers must import from './workers.server'.
|
||||||
logger.info('[Redis] Connection established successfully.');
|
|
||||||
});
|
|
||||||
|
|
||||||
connection.on('error', (err) => {
|
|
||||||
// This is crucial for diagnosing Redis connection issues. // The patch requested this specific error handling.
|
|
||||||
logger.error({ err }, '[Redis] Connection error.');
|
|
||||||
});
|
|
||||||
|
|
||||||
const execAsync = promisify(exec);
|
|
||||||
// --- Queues ---
|
|
||||||
export const flyerQueue = new Queue<FlyerJobData>('flyer-processing', {
|
|
||||||
connection,
|
|
||||||
defaultJobOptions: {
|
|
||||||
attempts: 3, // Attempt a job 3 times before marking it as failed.
|
|
||||||
backoff: {
|
|
||||||
type: 'exponential',
|
|
||||||
delay: 5000, // Start with a 5-second delay for the first retry
|
|
||||||
},
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
export const emailQueue = new Queue<EmailJobData>('email-sending', {
|
|
||||||
connection,
|
|
||||||
defaultJobOptions: {
|
|
||||||
attempts: 5, // Emails can be retried more aggressively
|
|
||||||
backoff: {
|
|
||||||
type: 'exponential',
|
|
||||||
delay: 10000, // Start with a 10-second delay
|
|
||||||
},
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
export const analyticsQueue = new Queue<AnalyticsJobData>('analytics-reporting', {
|
|
||||||
connection,
|
|
||||||
defaultJobOptions: {
|
|
||||||
attempts: 2, // Analytics can be intensive, so fewer retries might be desired.
|
|
||||||
backoff: {
|
|
||||||
type: 'exponential',
|
|
||||||
delay: 60000, // Wait a minute before retrying.
|
|
||||||
},
|
|
||||||
// Remove job from queue on completion to save space, as results are in the DB.
|
|
||||||
removeOnComplete: true,
|
|
||||||
removeOnFail: 50, // Keep the last 50 failed jobs for inspection.
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
export const weeklyAnalyticsQueue = new Queue<WeeklyAnalyticsJobData>(
|
|
||||||
'weekly-analytics-reporting',
|
|
||||||
{
|
|
||||||
connection,
|
|
||||||
defaultJobOptions: {
|
|
||||||
attempts: 2,
|
|
||||||
backoff: {
|
|
||||||
type: 'exponential',
|
|
||||||
delay: 3600000, // 1 hour delay for retries
|
|
||||||
},
|
|
||||||
removeOnComplete: true,
|
|
||||||
removeOnFail: 50,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
);
|
|
||||||
|
|
||||||
export const cleanupQueue = new Queue<CleanupJobData>('file-cleanup', {
|
|
||||||
connection,
|
|
||||||
defaultJobOptions: {
|
|
||||||
attempts: 3,
|
|
||||||
backoff: {
|
|
||||||
type: 'exponential',
|
|
||||||
delay: 30000, // Retry cleanup after 30 seconds
|
|
||||||
},
|
|
||||||
removeOnComplete: true, // No need to keep successful cleanup jobs
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
export const tokenCleanupQueue = new Queue<TokenCleanupJobData>('token-cleanup', {
|
|
||||||
connection,
|
|
||||||
defaultJobOptions: {
|
|
||||||
attempts: 2,
|
|
||||||
backoff: {
|
|
||||||
type: 'exponential',
|
|
||||||
delay: 3600000, // 1 hour delay
|
|
||||||
},
|
|
||||||
removeOnComplete: true,
|
|
||||||
removeOnFail: 10,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
// --- Job Data Interfaces ---
|
|
||||||
|
|
||||||
interface EmailJobData {
|
|
||||||
to: string;
|
|
||||||
subject: string;
|
|
||||||
text: string;
|
|
||||||
html: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Defines the data for an analytics job.
|
* A function to gracefully shut down all queues and connections.
|
||||||
*/
|
* This is for the API process which only uses queues.
|
||||||
interface AnalyticsJobData {
|
* For worker processes, use the gracefulShutdown from workers.server.ts
|
||||||
reportDate: string; // e.g., '2024-10-26'
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Defines the data for a weekly analytics job.
|
|
||||||
*/
|
|
||||||
interface WeeklyAnalyticsJobData {
|
|
||||||
reportYear: number;
|
|
||||||
reportWeek: number; // ISO week number (1-53)
|
|
||||||
}
|
|
||||||
|
|
||||||
interface CleanupJobData {
|
|
||||||
flyerId: number;
|
|
||||||
// An array of absolute file paths to be deleted. Made optional for manual cleanup triggers.
|
|
||||||
paths?: string[];
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Defines the data for a token cleanup job.
|
|
||||||
*/
|
|
||||||
interface TokenCleanupJobData {
|
|
||||||
timestamp: string; // ISO string to ensure the job is unique per run
|
|
||||||
}
|
|
||||||
|
|
||||||
// --- Worker Instantiation ---
|
|
||||||
|
|
||||||
// Create an adapter for fsPromises to match the IFileSystem interface.
|
|
||||||
const fsAdapter: IFileSystem = {
|
|
||||||
readdir: (path: string, options: { withFileTypes: true }) => fsPromises.readdir(path, options),
|
|
||||||
unlink: (path: string) => fsPromises.unlink(path),
|
|
||||||
};
|
|
||||||
|
|
||||||
// Instantiate the service with its real dependencies
|
|
||||||
const flyerProcessingService = new FlyerProcessingService(
|
|
||||||
aiService,
|
|
||||||
db,
|
|
||||||
fsAdapter,
|
|
||||||
execAsync,
|
|
||||||
cleanupQueue, // Inject the cleanup queue to break the circular dependency
|
|
||||||
new FlyerDataTransformer(), // Inject the new transformer
|
|
||||||
);
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Helper to ensure that an unknown error is normalized to an Error object.
|
|
||||||
* This ensures consistent logging structure and stack traces.
|
|
||||||
*/
|
|
||||||
const normalizeError = (error: unknown): Error => {
|
|
||||||
return error instanceof Error ? error : new Error(String(error));
|
|
||||||
};
|
|
||||||
|
|
||||||
/**
|
|
||||||
* A generic function to attach logging event listeners to any worker.
|
|
||||||
* This centralizes logging for job completion and final failure.
|
|
||||||
* @param worker The BullMQ worker instance.
|
|
||||||
*/
|
|
||||||
const attachWorkerEventListeners = (worker: Worker) => {
|
|
||||||
worker.on('completed', (job: Job, returnValue: unknown) => {
|
|
||||||
logger.info({ returnValue }, `[${worker.name}] Job ${job.id} completed successfully.`);
|
|
||||||
});
|
|
||||||
|
|
||||||
worker.on('failed', (job: Job | undefined, error: Error) => {
|
|
||||||
// This event fires after all retries have failed.
|
|
||||||
logger.error(
|
|
||||||
{ err: error, jobData: job?.data },
|
|
||||||
`[${worker.name}] Job ${job?.id} has ultimately failed after all attempts.`,
|
|
||||||
);
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
export const flyerWorker = new Worker<FlyerJobData>(
|
|
||||||
'flyer-processing', // Must match the queue name
|
|
||||||
async (job) => {
|
|
||||||
try {
|
|
||||||
// The processJob method creates its own job-specific logger internally.
|
|
||||||
return await flyerProcessingService.processJob(job);
|
|
||||||
} catch (error: unknown) {
|
|
||||||
const wrappedError = normalizeError(error);
|
|
||||||
// Check for quota errors or other unrecoverable errors from the AI service
|
|
||||||
const errorMessage = wrappedError.message || '';
|
|
||||||
if (
|
|
||||||
errorMessage.includes('quota') ||
|
|
||||||
errorMessage.includes('429') ||
|
|
||||||
errorMessage.includes('RESOURCE_EXHAUSTED')
|
|
||||||
) {
|
|
||||||
logger.error(
|
|
||||||
{ err: wrappedError, jobId: job.id },
|
|
||||||
'[FlyerWorker] Unrecoverable quota error detected. Failing job immediately.',
|
|
||||||
);
|
|
||||||
throw new UnrecoverableError(errorMessage);
|
|
||||||
}
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
connection,
|
|
||||||
concurrency: parseInt(process.env.WORKER_CONCURRENCY || '1', 10),
|
|
||||||
},
|
|
||||||
);
|
|
||||||
/**
|
|
||||||
* A dedicated worker process for sending emails.
|
|
||||||
*/
|
|
||||||
export const emailWorker = new Worker<EmailJobData>(
|
|
||||||
'email-sending',
|
|
||||||
async (job: Job<EmailJobData>) => {
|
|
||||||
const { to, subject } = job.data;
|
|
||||||
// Create a job-specific logger instance
|
|
||||||
const jobLogger = logger.child({ jobId: job.id, jobName: job.name });
|
|
||||||
jobLogger.info({ to, subject }, `[EmailWorker] Sending email for job ${job.id}`);
|
|
||||||
try {
|
|
||||||
await emailService.sendEmail(job.data, jobLogger);
|
|
||||||
} catch (error: unknown) {
|
|
||||||
const wrappedError = normalizeError(error);
|
|
||||||
logger.error(
|
|
||||||
{
|
|
||||||
err: wrappedError,
|
|
||||||
jobData: job.data,
|
|
||||||
},
|
|
||||||
`[EmailWorker] Job ${job.id} failed. Attempt ${job.attemptsMade}/${job.opts.attempts}.`,
|
|
||||||
);
|
|
||||||
// Re-throw to let BullMQ handle the failure and retry.
|
|
||||||
throw wrappedError;
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
connection,
|
|
||||||
concurrency: parseInt(process.env.EMAIL_WORKER_CONCURRENCY || '10', 10),
|
|
||||||
},
|
|
||||||
);
|
|
||||||
|
|
||||||
/**
|
|
||||||
* A dedicated worker for generating daily analytics reports.
|
|
||||||
* This is a placeholder for the actual report generation logic.
|
|
||||||
*/
|
|
||||||
export const analyticsWorker = new Worker<AnalyticsJobData>(
|
|
||||||
'analytics-reporting',
|
|
||||||
async (job: Job<AnalyticsJobData>) => {
|
|
||||||
const { reportDate } = job.data;
|
|
||||||
logger.info({ reportDate }, `[AnalyticsWorker] Starting report generation for job ${job.id}`);
|
|
||||||
try {
|
|
||||||
// Special case for testing the retry mechanism
|
|
||||||
if (reportDate === 'FAIL') {
|
|
||||||
throw new Error('This is a test failure for the analytics job.');
|
|
||||||
}
|
|
||||||
|
|
||||||
// In a real implementation, you would call a database function here.
|
|
||||||
// For example: await db.generateDailyAnalyticsReport(reportDate);
|
|
||||||
await new Promise((resolve) => setTimeout(resolve, 10000)); // Simulate a 10-second task
|
|
||||||
logger.info(`[AnalyticsWorker] Successfully generated report for ${reportDate}.`);
|
|
||||||
} catch (error: unknown) {
|
|
||||||
const wrappedError = normalizeError(error);
|
|
||||||
// Standardize error logging.
|
|
||||||
logger.error({ err: wrappedError, jobData: job.data },
|
|
||||||
`[AnalyticsWorker] Job ${job.id} failed. Attempt ${job.attemptsMade}/${job.opts.attempts}.`,
|
|
||||||
);
|
|
||||||
throw wrappedError; // Re-throw to let BullMQ handle the failure and retry.
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
connection,
|
|
||||||
concurrency: parseInt(process.env.ANALYTICS_WORKER_CONCURRENCY || '1', 10),
|
|
||||||
},
|
|
||||||
);
|
|
||||||
|
|
||||||
/**
|
|
||||||
* A dedicated worker for cleaning up flyer-related files from the filesystem.
|
|
||||||
* This is triggered manually by an admin after a flyer has been reviewed.
|
|
||||||
*/
|
|
||||||
export const cleanupWorker = new Worker<CleanupJobData>(
|
|
||||||
// This worker now handles two types of cleanup jobs.
|
|
||||||
'file-cleanup', // The queue name
|
|
||||||
async (job: Job<CleanupJobData>) => {
|
|
||||||
// Destructure the data from the job payload.
|
|
||||||
const { flyerId, paths } = job.data;
|
|
||||||
logger.info(
|
|
||||||
{ paths },
|
|
||||||
`[CleanupWorker] Starting file cleanup for job ${job.id} (Flyer ID: ${flyerId})`,
|
|
||||||
);
|
|
||||||
|
|
||||||
try {
|
|
||||||
if (!paths || paths.length === 0) {
|
|
||||||
logger.warn(
|
|
||||||
`[CleanupWorker] Job ${job.id} for flyer ${flyerId} received no paths to clean. Skipping.`,
|
|
||||||
);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Iterate over the file paths provided in the job data and delete each one.
|
|
||||||
for (const filePath of paths) {
|
|
||||||
try {
|
|
||||||
await fsAdapter.unlink(filePath);
|
|
||||||
logger.info(`[CleanupWorker] Deleted temporary file: ${filePath}`);
|
|
||||||
} catch (unlinkError: unknown) {
|
|
||||||
// If the file doesn't exist, it's a success from our perspective.
|
|
||||||
// We can log it as a warning and continue without failing the job.
|
|
||||||
if (
|
|
||||||
unlinkError instanceof Error &&
|
|
||||||
'code' in unlinkError &&
|
|
||||||
unlinkError.code === 'ENOENT'
|
|
||||||
) {
|
|
||||||
logger.warn(
|
|
||||||
`[CleanupWorker] File not found during cleanup (already deleted?): ${filePath}`,
|
|
||||||
);
|
|
||||||
} else {
|
|
||||||
throw unlinkError; // For any other error (e.g., permissions), re-throw to fail the job.
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
logger.info(
|
|
||||||
`[CleanupWorker] Successfully cleaned up ${paths.length} file(s) for flyer ${flyerId}.`,
|
|
||||||
);
|
|
||||||
} catch (error: unknown) {
|
|
||||||
const wrappedError = normalizeError(error);
|
|
||||||
// Standardize error logging.
|
|
||||||
logger.error(
|
|
||||||
{ err: wrappedError },
|
|
||||||
`[CleanupWorker] Job ${job.id} for flyer ${flyerId} failed. Attempt ${job.attemptsMade}/${job.opts.attempts}.`,
|
|
||||||
);
|
|
||||||
throw wrappedError; // Re-throw to let BullMQ handle the failure and retry.
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
connection,
|
|
||||||
concurrency: parseInt(process.env.CLEANUP_WORKER_CONCURRENCY || '10', 10),
|
|
||||||
},
|
|
||||||
);
|
|
||||||
|
|
||||||
/**
|
|
||||||
* A dedicated worker for generating weekly analytics reports.
|
|
||||||
* This is a placeholder for the actual report generation logic.
|
|
||||||
*/
|
|
||||||
export const weeklyAnalyticsWorker = new Worker<WeeklyAnalyticsJobData>(
|
|
||||||
'weekly-analytics-reporting',
|
|
||||||
async (job: Job<WeeklyAnalyticsJobData>) => {
|
|
||||||
const { reportYear, reportWeek } = job.data;
|
|
||||||
logger.info(
|
|
||||||
{ reportYear, reportWeek },
|
|
||||||
`[WeeklyAnalyticsWorker] Starting weekly report generation for job ${job.id}`,
|
|
||||||
);
|
|
||||||
try {
|
|
||||||
// Simulate a longer-running task for weekly reports
|
|
||||||
await new Promise((resolve) => setTimeout(resolve, 30000)); // Simulate 30-second task
|
|
||||||
logger.info(
|
|
||||||
`[WeeklyAnalyticsWorker] Successfully generated weekly report for week ${reportWeek}, ${reportYear}.`,
|
|
||||||
);
|
|
||||||
} catch (error: unknown) {
|
|
||||||
const wrappedError = normalizeError(error);
|
|
||||||
// Standardize error logging.
|
|
||||||
logger.error(
|
|
||||||
{ err: wrappedError, jobData: job.data },
|
|
||||||
`[WeeklyAnalyticsWorker] Job ${job.id} failed. Attempt ${job.attemptsMade}/${job.opts.attempts}.`,
|
|
||||||
);
|
|
||||||
throw wrappedError; // Re-throw to let BullMQ handle the failure and retry.
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
connection,
|
|
||||||
concurrency: parseInt(process.env.WEEKLY_ANALYTICS_WORKER_CONCURRENCY || '1', 10),
|
|
||||||
},
|
|
||||||
);
|
|
||||||
|
|
||||||
/**
|
|
||||||
* A dedicated worker for cleaning up expired password reset tokens.
|
|
||||||
*/
|
|
||||||
export const tokenCleanupWorker = new Worker<TokenCleanupJobData>(
|
|
||||||
'token-cleanup',
|
|
||||||
async (job: Job<TokenCleanupJobData>) => {
|
|
||||||
const jobLogger = logger.child({ jobId: job.id, jobName: job.name });
|
|
||||||
jobLogger.info('[TokenCleanupWorker] Starting cleanup of expired password reset tokens.');
|
|
||||||
try {
|
|
||||||
const deletedCount = await db.userRepo.deleteExpiredResetTokens(jobLogger);
|
|
||||||
jobLogger.info(`[TokenCleanupWorker] Successfully deleted ${deletedCount} expired tokens.`);
|
|
||||||
return { deletedCount };
|
|
||||||
} catch (error: unknown) {
|
|
||||||
const wrappedError = normalizeError(error);
|
|
||||||
jobLogger.error({ err: wrappedError }, `[TokenCleanupWorker] Job ${job.id} failed.`);
|
|
||||||
throw wrappedError;
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
connection,
|
|
||||||
concurrency: 1, // This is a low-priority, non-intensive task.
|
|
||||||
},
|
|
||||||
);
|
|
||||||
|
|
||||||
// --- Attach Event Listeners to All Workers ---
|
|
||||||
attachWorkerEventListeners(flyerWorker);
|
|
||||||
attachWorkerEventListeners(emailWorker);
|
|
||||||
attachWorkerEventListeners(analyticsWorker);
|
|
||||||
attachWorkerEventListeners(cleanupWorker);
|
|
||||||
attachWorkerEventListeners(weeklyAnalyticsWorker);
|
|
||||||
attachWorkerEventListeners(tokenCleanupWorker);
|
|
||||||
|
|
||||||
logger.info('All workers started and listening for jobs.');
|
|
||||||
|
|
||||||
/**
|
|
||||||
* A function to gracefully shut down all queue workers and connections.
|
|
||||||
* This is essential for preventing jobs from getting stuck in an 'active' state
|
|
||||||
* when the application process is terminated.
|
|
||||||
* @param signal The signal that triggered the shutdown (e.g., 'SIGINT').
|
|
||||||
*/
|
*/
|
||||||
export const gracefulShutdown = async (signal: string) => {
|
export const gracefulShutdown = async (signal: string) => {
|
||||||
logger.info(`[Shutdown] Received ${signal}. Closing all workers and queues...`);
|
logger.info(`[Shutdown] Received ${signal}. Closing all queues...`);
|
||||||
let exitCode = 0; // Default to success
|
let exitCode = 0; // Default to success
|
||||||
|
|
||||||
const resources = [
|
const resources = [
|
||||||
{ name: 'flyerWorker', close: () => flyerWorker.close() },
|
|
||||||
{ name: 'emailWorker', close: () => emailWorker.close() },
|
|
||||||
{ name: 'analyticsWorker', close: () => analyticsWorker.close() },
|
|
||||||
{ name: 'cleanupWorker', close: () => cleanupWorker.close() },
|
|
||||||
{ name: 'weeklyAnalyticsWorker', close: () => weeklyAnalyticsWorker.close() },
|
|
||||||
{ name: 'tokenCleanupWorker', close: () => tokenCleanupWorker.close() },
|
|
||||||
{ name: 'flyerQueue', close: () => flyerQueue.close() },
|
{ name: 'flyerQueue', close: () => flyerQueue.close() },
|
||||||
{ name: 'emailQueue', close: () => emailQueue.close() },
|
{ name: 'emailQueue', close: () => emailQueue.close() },
|
||||||
{ name: 'analyticsQueue', close: () => analyticsQueue.close() },
|
{ name: 'analyticsQueue', close: () => analyticsQueue.close() },
|
||||||
@@ -455,7 +49,7 @@ export const gracefulShutdown = async (signal: string) => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
if (exitCode === 0) {
|
if (exitCode === 0) {
|
||||||
logger.info('[Shutdown] All workers, queues, and connections closed successfully.');
|
logger.info('[Shutdown] All queues and connections closed successfully.');
|
||||||
} else {
|
} else {
|
||||||
logger.warn('[Shutdown] Graceful shutdown completed with errors.');
|
logger.warn('[Shutdown] Graceful shutdown completed with errors.');
|
||||||
}
|
}
|
||||||
|
|||||||
118
src/services/queueService.test.ts
Normal file
118
src/services/queueService.test.ts
Normal file
@@ -0,0 +1,118 @@
|
|||||||
|
// src/services/queueService.test.ts
|
||||||
|
import { describe, it, expect, vi, beforeEach, afterEach, type Mock } from 'vitest';
|
||||||
|
|
||||||
|
// --- Hoisted Mocks ---
|
||||||
|
const mocks = vi.hoisted(() => {
|
||||||
|
const createMockQueue = (name: string) => ({
|
||||||
|
name,
|
||||||
|
close: vi.fn().mockResolvedValue(undefined),
|
||||||
|
add: vi.fn(),
|
||||||
|
});
|
||||||
|
|
||||||
|
return {
|
||||||
|
flyerQueue: createMockQueue('flyer-processing'),
|
||||||
|
emailQueue: createMockQueue('email-sending'),
|
||||||
|
analyticsQueue: createMockQueue('analytics-reporting'),
|
||||||
|
weeklyAnalyticsQueue: createMockQueue('weekly-analytics-reporting'),
|
||||||
|
cleanupQueue: createMockQueue('file-cleanup'),
|
||||||
|
tokenCleanupQueue: createMockQueue('token-cleanup'),
|
||||||
|
redisConnection: {
|
||||||
|
quit: vi.fn().mockResolvedValue('OK'),
|
||||||
|
},
|
||||||
|
logger: {
|
||||||
|
info: vi.fn(),
|
||||||
|
warn: vi.fn(),
|
||||||
|
error: vi.fn(),
|
||||||
|
debug: vi.fn(),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
// --- Mock Modules ---
|
||||||
|
vi.mock('./queues.server', () => ({
|
||||||
|
flyerQueue: mocks.flyerQueue,
|
||||||
|
emailQueue: mocks.emailQueue,
|
||||||
|
analyticsQueue: mocks.analyticsQueue,
|
||||||
|
weeklyAnalyticsQueue: mocks.weeklyAnalyticsQueue,
|
||||||
|
cleanupQueue: mocks.cleanupQueue,
|
||||||
|
tokenCleanupQueue: mocks.tokenCleanupQueue,
|
||||||
|
}));
|
||||||
|
|
||||||
|
vi.mock('./redis.server', () => ({
|
||||||
|
connection: mocks.redisConnection,
|
||||||
|
}));
|
||||||
|
|
||||||
|
vi.mock('./logger.server', () => ({
|
||||||
|
logger: mocks.logger,
|
||||||
|
}));
|
||||||
|
|
||||||
|
// --- Test ---
|
||||||
|
describe('Queue Service (API Shutdown)', () => {
|
||||||
|
let gracefulShutdown: (signal: string) => Promise<void>;
|
||||||
|
let processExitSpy: Mock;
|
||||||
|
|
||||||
|
beforeEach(async () => {
|
||||||
|
vi.clearAllMocks();
|
||||||
|
vi.resetModules();
|
||||||
|
|
||||||
|
// Spy on process.exit and prevent it from actually exiting
|
||||||
|
processExitSpy = vi.spyOn(process, 'exit').mockImplementation(() => undefined as never);
|
||||||
|
|
||||||
|
// Dynamically import the module under test
|
||||||
|
const queueService = await import('./queueService.server');
|
||||||
|
gracefulShutdown = queueService.gracefulShutdown;
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
processExitSpy.mockRestore();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should attempt to close all queues and the redis connection on shutdown', async () => {
|
||||||
|
await gracefulShutdown('SIGINT');
|
||||||
|
|
||||||
|
expect(mocks.flyerQueue.close).toHaveBeenCalledTimes(1);
|
||||||
|
expect(mocks.emailQueue.close).toHaveBeenCalledTimes(1);
|
||||||
|
expect(mocks.analyticsQueue.close).toHaveBeenCalledTimes(1);
|
||||||
|
expect(mocks.cleanupQueue.close).toHaveBeenCalledTimes(1);
|
||||||
|
expect(mocks.weeklyAnalyticsQueue.close).toHaveBeenCalledTimes(1);
|
||||||
|
expect(mocks.tokenCleanupQueue.close).toHaveBeenCalledTimes(1);
|
||||||
|
expect(mocks.redisConnection.quit).toHaveBeenCalledTimes(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should log success and exit with code 0 if all resources close successfully', async () => {
|
||||||
|
await gracefulShutdown('SIGINT');
|
||||||
|
|
||||||
|
expect(mocks.logger.info).toHaveBeenCalledWith(
|
||||||
|
'[Shutdown] All queues and connections closed successfully.',
|
||||||
|
);
|
||||||
|
expect(processExitSpy).toHaveBeenCalledWith(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should log a warning and exit with code 1 if a queue fails to close', async () => {
|
||||||
|
const closeError = new Error('Queue failed to close');
|
||||||
|
mocks.emailQueue.close.mockRejectedValue(closeError);
|
||||||
|
|
||||||
|
await gracefulShutdown('SIGTERM');
|
||||||
|
|
||||||
|
expect(mocks.logger.error).toHaveBeenCalledWith(
|
||||||
|
{ err: closeError, resource: 'emailQueue' },
|
||||||
|
'[Shutdown] Error closing resource.',
|
||||||
|
);
|
||||||
|
expect(mocks.logger.warn).toHaveBeenCalledWith('[Shutdown] Graceful shutdown completed with errors.');
|
||||||
|
expect(processExitSpy).toHaveBeenCalledWith(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should log a warning and exit with code 1 if the redis connection fails to close', async () => {
|
||||||
|
const redisError = new Error('Redis quit failed');
|
||||||
|
mocks.redisConnection.quit.mockRejectedValue(redisError);
|
||||||
|
|
||||||
|
await gracefulShutdown('SIGTERM');
|
||||||
|
|
||||||
|
expect(mocks.logger.error).toHaveBeenCalledWith(
|
||||||
|
{ err: redisError, resource: 'redisConnection' },
|
||||||
|
'[Shutdown] Error closing resource.',
|
||||||
|
);
|
||||||
|
expect(mocks.logger.warn).toHaveBeenCalledWith('[Shutdown] Graceful shutdown completed with errors.');
|
||||||
|
expect(processExitSpy).toHaveBeenCalledWith(1);
|
||||||
|
});
|
||||||
|
});
|
||||||
@@ -86,20 +86,6 @@ vi.mock('./flyerDataTransformer', () => ({
|
|||||||
},
|
},
|
||||||
}));
|
}));
|
||||||
|
|
||||||
// Import the module under test AFTER the mocks are set up.
|
|
||||||
// This will trigger the instantiation of the workers.
|
|
||||||
import './queueService.server';
|
|
||||||
|
|
||||||
// Destructure the captured processors for easier use in tests.
|
|
||||||
const {
|
|
||||||
'flyer-processing': flyerProcessor,
|
|
||||||
'email-sending': emailProcessor,
|
|
||||||
'analytics-reporting': analyticsProcessor,
|
|
||||||
'file-cleanup': cleanupProcessor,
|
|
||||||
'weekly-analytics-reporting': weeklyAnalyticsProcessor,
|
|
||||||
'token-cleanup': tokenCleanupProcessor,
|
|
||||||
} = mocks.capturedProcessors;
|
|
||||||
|
|
||||||
// Helper to create a mock BullMQ Job object
|
// Helper to create a mock BullMQ Job object
|
||||||
const createMockJob = <T>(data: T): Job<T> => {
|
const createMockJob = <T>(data: T): Job<T> => {
|
||||||
return {
|
return {
|
||||||
@@ -116,14 +102,32 @@ const createMockJob = <T>(data: T): Job<T> => {
|
|||||||
};
|
};
|
||||||
|
|
||||||
describe('Queue Workers', () => {
|
describe('Queue Workers', () => {
|
||||||
beforeEach(() => {
|
let flyerProcessor: (job: Job) => Promise<unknown>;
|
||||||
|
let emailProcessor: (job: Job) => Promise<unknown>;
|
||||||
|
let analyticsProcessor: (job: Job) => Promise<unknown>;
|
||||||
|
let cleanupProcessor: (job: Job) => Promise<unknown>;
|
||||||
|
let weeklyAnalyticsProcessor: (job: Job) => Promise<unknown>;
|
||||||
|
let tokenCleanupProcessor: (job: Job) => Promise<unknown>;
|
||||||
|
|
||||||
|
beforeEach(async () => {
|
||||||
vi.clearAllMocks();
|
vi.clearAllMocks();
|
||||||
|
vi.resetModules();
|
||||||
|
|
||||||
// Reset default mock implementations for hoisted mocks
|
// Reset default mock implementations for hoisted mocks
|
||||||
mocks.sendEmail.mockResolvedValue(undefined);
|
mocks.sendEmail.mockResolvedValue(undefined);
|
||||||
mocks.unlink.mockResolvedValue(undefined);
|
mocks.unlink.mockResolvedValue(undefined);
|
||||||
mocks.processFlyerJob.mockResolvedValue({ flyerId: 123 }); // Default success for flyer processing
|
mocks.processFlyerJob.mockResolvedValue({ flyerId: 123 }); // Default success for flyer processing
|
||||||
|
mocks.deleteExpiredResetTokens.mockResolvedValue(5);
|
||||||
|
|
||||||
|
await import('./workers.server');
|
||||||
|
|
||||||
|
flyerProcessor = mocks.capturedProcessors['flyer-processing'];
|
||||||
|
emailProcessor = mocks.capturedProcessors['email-sending'];
|
||||||
|
analyticsProcessor = mocks.capturedProcessors['analytics-reporting'];
|
||||||
|
cleanupProcessor = mocks.capturedProcessors['file-cleanup'];
|
||||||
|
weeklyAnalyticsProcessor = mocks.capturedProcessors['weekly-analytics-reporting'];
|
||||||
|
tokenCleanupProcessor = mocks.capturedProcessors['token-cleanup'];
|
||||||
});
|
});
|
||||||
mocks.deleteExpiredResetTokens.mockResolvedValue(5);
|
|
||||||
|
|
||||||
describe('flyerWorker', () => {
|
describe('flyerWorker', () => {
|
||||||
it('should call flyerProcessingService.processJob with the job data', async () => {
|
it('should call flyerProcessingService.processJob with the job data', async () => {
|
||||||
|
|||||||
119
src/services/queues.server.test.ts
Normal file
119
src/services/queues.server.test.ts
Normal file
@@ -0,0 +1,119 @@
|
|||||||
|
// src/services/queues.server.test.ts
|
||||||
|
import { describe, it, expect, vi, beforeEach, type Mock } from 'vitest';
|
||||||
|
|
||||||
|
// --- Hoisted Mocks ---
|
||||||
|
const mocks = vi.hoisted(() => {
|
||||||
|
return {
|
||||||
|
// This will be our mock for the BullMQ Queue constructor
|
||||||
|
MockQueue: vi.fn(),
|
||||||
|
// This is a mock for the Redis connection object
|
||||||
|
mockConnection: { id: 'mock-redis-connection' },
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
// --- Mock Modules ---
|
||||||
|
|
||||||
|
// Mock the 'bullmq' library to replace the real Queue constructor with our mock.
|
||||||
|
vi.mock('bullmq', () => ({
|
||||||
|
Queue: mocks.MockQueue,
|
||||||
|
}));
|
||||||
|
|
||||||
|
// Mock our internal redis connection module to export our mock connection object.
|
||||||
|
vi.mock('./redis.server', () => ({
|
||||||
|
connection: mocks.mockConnection,
|
||||||
|
}));
|
||||||
|
|
||||||
|
describe('Queue Definitions', () => {
|
||||||
|
beforeEach(async () => {
|
||||||
|
// Clear any previous mock calls and reset module cache before each test.
|
||||||
|
// This is crucial because the queues are instantiated at the module level.
|
||||||
|
// Resetting modules ensures the `queues.server.ts` file is re-executed.
|
||||||
|
vi.clearAllMocks();
|
||||||
|
vi.resetModules();
|
||||||
|
|
||||||
|
// Dynamically import the module under test. This will trigger the
|
||||||
|
// `new Queue(...)` calls, which will be captured by our mock constructor.
|
||||||
|
await import('./queues.server');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should create flyerQueue with the correct name and options', () => {
|
||||||
|
expect(mocks.MockQueue).toHaveBeenCalledWith('flyer-processing', {
|
||||||
|
connection: mocks.mockConnection,
|
||||||
|
defaultJobOptions: {
|
||||||
|
attempts: 3,
|
||||||
|
backoff: {
|
||||||
|
type: 'exponential',
|
||||||
|
delay: 5000,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should create emailQueue with the correct name and options', () => {
|
||||||
|
expect(mocks.MockQueue).toHaveBeenCalledWith('email-sending', {
|
||||||
|
connection: mocks.mockConnection,
|
||||||
|
defaultJobOptions: {
|
||||||
|
attempts: 5,
|
||||||
|
backoff: {
|
||||||
|
type: 'exponential',
|
||||||
|
delay: 10000,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should create analyticsQueue with the correct name and options', () => {
|
||||||
|
expect(mocks.MockQueue).toHaveBeenCalledWith('analytics-reporting', {
|
||||||
|
connection: mocks.mockConnection,
|
||||||
|
defaultJobOptions: {
|
||||||
|
attempts: 2,
|
||||||
|
backoff: {
|
||||||
|
type: 'exponential',
|
||||||
|
delay: 60000,
|
||||||
|
},
|
||||||
|
removeOnComplete: true,
|
||||||
|
removeOnFail: 50,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should create weeklyAnalyticsQueue with the correct name and options', () => {
|
||||||
|
expect(mocks.MockQueue).toHaveBeenCalledWith('weekly-analytics-reporting', {
|
||||||
|
connection: mocks.mockConnection,
|
||||||
|
defaultJobOptions: {
|
||||||
|
attempts: 2,
|
||||||
|
backoff: { type: 'exponential', delay: 3600000 },
|
||||||
|
removeOnComplete: true,
|
||||||
|
removeOnFail: 50,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should create cleanupQueue with the correct name and options', () => {
|
||||||
|
expect(mocks.MockQueue).toHaveBeenCalledWith('file-cleanup', {
|
||||||
|
connection: mocks.mockConnection,
|
||||||
|
defaultJobOptions: {
|
||||||
|
attempts: 3,
|
||||||
|
backoff: { type: 'exponential', delay: 30000 },
|
||||||
|
removeOnComplete: true,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should create tokenCleanupQueue with the correct name and options', () => {
|
||||||
|
expect(mocks.MockQueue).toHaveBeenCalledWith('token-cleanup', {
|
||||||
|
connection: mocks.mockConnection,
|
||||||
|
defaultJobOptions: {
|
||||||
|
attempts: 2,
|
||||||
|
backoff: { type: 'exponential', delay: 3600000 },
|
||||||
|
removeOnComplete: true,
|
||||||
|
removeOnFail: 10,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should create exactly 6 queues', () => {
|
||||||
|
// This is a good sanity check to ensure no new queues were added without tests.
|
||||||
|
expect(mocks.MockQueue).toHaveBeenCalledTimes(6);
|
||||||
|
});
|
||||||
|
});
|
||||||
96
src/services/queues.server.ts
Normal file
96
src/services/queues.server.ts
Normal file
@@ -0,0 +1,96 @@
|
|||||||
|
import { Queue } from 'bullmq';
|
||||||
|
import { connection } from './redis.server';
|
||||||
|
import type { FlyerJobData } from './flyerProcessingService.server';
|
||||||
|
|
||||||
|
// --- Job Data Interfaces ---
|
||||||
|
|
||||||
|
export interface EmailJobData {
|
||||||
|
to: string;
|
||||||
|
subject: string;
|
||||||
|
text: string;
|
||||||
|
html: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface AnalyticsJobData {
|
||||||
|
reportDate: string; // e.g., '2024-10-26'
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface WeeklyAnalyticsJobData {
|
||||||
|
reportYear: number;
|
||||||
|
reportWeek: number; // ISO week number (1-53)
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface CleanupJobData {
|
||||||
|
flyerId: number;
|
||||||
|
paths?: string[];
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface TokenCleanupJobData {
|
||||||
|
timestamp: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- Queues ---
|
||||||
|
|
||||||
|
export const flyerQueue = new Queue<FlyerJobData>('flyer-processing', {
|
||||||
|
connection,
|
||||||
|
defaultJobOptions: {
|
||||||
|
attempts: 3,
|
||||||
|
backoff: {
|
||||||
|
type: 'exponential',
|
||||||
|
delay: 5000,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
export const emailQueue = new Queue<EmailJobData>('email-sending', {
|
||||||
|
connection,
|
||||||
|
defaultJobOptions: {
|
||||||
|
attempts: 5,
|
||||||
|
backoff: {
|
||||||
|
type: 'exponential',
|
||||||
|
delay: 10000,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
export const analyticsQueue = new Queue<AnalyticsJobData>('analytics-reporting', {
|
||||||
|
connection,
|
||||||
|
defaultJobOptions: {
|
||||||
|
attempts: 2,
|
||||||
|
backoff: {
|
||||||
|
type: 'exponential',
|
||||||
|
delay: 60000,
|
||||||
|
},
|
||||||
|
removeOnComplete: true,
|
||||||
|
removeOnFail: 50,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
export const weeklyAnalyticsQueue = new Queue<WeeklyAnalyticsJobData>('weekly-analytics-reporting', {
|
||||||
|
connection,
|
||||||
|
defaultJobOptions: {
|
||||||
|
attempts: 2,
|
||||||
|
backoff: { type: 'exponential', delay: 3600000 },
|
||||||
|
removeOnComplete: true,
|
||||||
|
removeOnFail: 50,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
export const cleanupQueue = new Queue<CleanupJobData>('file-cleanup', {
|
||||||
|
connection,
|
||||||
|
defaultJobOptions: {
|
||||||
|
attempts: 3,
|
||||||
|
backoff: { type: 'exponential', delay: 30000 },
|
||||||
|
removeOnComplete: true,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
export const tokenCleanupQueue = new Queue<TokenCleanupJobData>('token-cleanup', {
|
||||||
|
connection,
|
||||||
|
defaultJobOptions: {
|
||||||
|
attempts: 2,
|
||||||
|
backoff: { type: 'exponential', delay: 3600000 },
|
||||||
|
removeOnComplete: true,
|
||||||
|
removeOnFail: 10,
|
||||||
|
},
|
||||||
|
});
|
||||||
16
src/services/redis.server.ts
Normal file
16
src/services/redis.server.ts
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
import IORedis from 'ioredis';
|
||||||
|
import { logger } from './logger.server';
|
||||||
|
|
||||||
|
export const connection = new IORedis(process.env.REDIS_URL!, {
|
||||||
|
maxRetriesPerRequest: null, // Important for BullMQ
|
||||||
|
password: process.env.REDIS_PASSWORD,
|
||||||
|
});
|
||||||
|
|
||||||
|
// --- Redis Connection Event Listeners ---
|
||||||
|
connection.on('connect', () => {
|
||||||
|
logger.info('[Redis] Connection established successfully.');
|
||||||
|
});
|
||||||
|
|
||||||
|
connection.on('error', (err) => {
|
||||||
|
logger.error({ err }, '[Redis] Connection error.');
|
||||||
|
});
|
||||||
172
src/services/worker.test.ts
Normal file
172
src/services/worker.test.ts
Normal file
@@ -0,0 +1,172 @@
|
|||||||
|
// src/services/worker.test.ts
|
||||||
|
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
|
||||||
|
|
||||||
|
// --- Hoisted Mocks ---
|
||||||
|
const mocks = vi.hoisted(() => {
|
||||||
|
return {
|
||||||
|
gracefulShutdown: vi.fn(),
|
||||||
|
logger: {
|
||||||
|
info: vi.fn(),
|
||||||
|
error: vi.fn(),
|
||||||
|
warn: vi.fn(),
|
||||||
|
debug: vi.fn(),
|
||||||
|
},
|
||||||
|
// Mock process events
|
||||||
|
processOn: vi.fn(),
|
||||||
|
processExit: vi.fn(),
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
// --- Mock Modules ---
|
||||||
|
vi.mock('./workers.server', () => ({
|
||||||
|
gracefulShutdown: mocks.gracefulShutdown,
|
||||||
|
}));
|
||||||
|
|
||||||
|
vi.mock('./logger.server', () => ({
|
||||||
|
logger: mocks.logger,
|
||||||
|
}));
|
||||||
|
|
||||||
|
describe('Worker Entry Point', () => {
|
||||||
|
let originalProcessOn: typeof process.on;
|
||||||
|
let originalProcessExit: typeof process.exit;
|
||||||
|
let eventHandlers: Record<string, (...args: any[]) => void> = {};
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
vi.clearAllMocks();
|
||||||
|
vi.resetModules(); // This is key to re-run the top-level code in worker.ts
|
||||||
|
|
||||||
|
// Reset default mock implementations
|
||||||
|
mocks.gracefulShutdown.mockResolvedValue(undefined);
|
||||||
|
|
||||||
|
// Spy on and mock process methods
|
||||||
|
originalProcessOn = process.on;
|
||||||
|
originalProcessExit = process.exit;
|
||||||
|
|
||||||
|
// Capture event handlers registered with process.on
|
||||||
|
eventHandlers = {};
|
||||||
|
process.on = vi.fn((event, listener) => {
|
||||||
|
eventHandlers[event] = listener;
|
||||||
|
return process;
|
||||||
|
}) as any;
|
||||||
|
|
||||||
|
process.exit = mocks.processExit as any;
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
// Restore original process methods
|
||||||
|
process.on = originalProcessOn;
|
||||||
|
process.exit = originalProcessExit;
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should log initialization messages on import', async () => {
|
||||||
|
// Act: Import the module to trigger top-level code
|
||||||
|
await import('./worker');
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
expect(mocks.logger.info).toHaveBeenCalledWith('[Worker] Initializing worker process...');
|
||||||
|
expect(mocks.logger.info).toHaveBeenCalledWith(
|
||||||
|
'[Worker] Worker process is running and listening for jobs.',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should register handlers for SIGINT, SIGTERM, uncaughtException, and unhandledRejection', async () => {
|
||||||
|
// Act
|
||||||
|
await import('./worker');
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
expect(process.on).toHaveBeenCalledWith('SIGINT', expect.any(Function));
|
||||||
|
expect(process.on).toHaveBeenCalledWith('SIGTERM', expect.any(Function));
|
||||||
|
expect(process.on).toHaveBeenCalledWith('uncaughtException', expect.any(Function));
|
||||||
|
expect(process.on).toHaveBeenCalledWith('unhandledRejection', expect.any(Function));
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('Shutdown Handling', () => {
|
||||||
|
it('should call gracefulShutdown on SIGINT', async () => {
|
||||||
|
// Arrange
|
||||||
|
await import('./worker');
|
||||||
|
const sigintHandler = eventHandlers['SIGINT'];
|
||||||
|
expect(sigintHandler).toBeDefined();
|
||||||
|
|
||||||
|
// Act
|
||||||
|
sigintHandler();
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
expect(mocks.logger.info).toHaveBeenCalledWith(
|
||||||
|
'[Worker] Received SIGINT. Initiating graceful shutdown...',
|
||||||
|
);
|
||||||
|
expect(mocks.gracefulShutdown).toHaveBeenCalledWith('SIGINT');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should call gracefulShutdown on SIGTERM', async () => {
|
||||||
|
// Arrange
|
||||||
|
await import('./worker');
|
||||||
|
const sigtermHandler = eventHandlers['SIGTERM'];
|
||||||
|
expect(sigtermHandler).toBeDefined();
|
||||||
|
|
||||||
|
// Act
|
||||||
|
sigtermHandler();
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
expect(mocks.logger.info).toHaveBeenCalledWith(
|
||||||
|
'[Worker] Received SIGTERM. Initiating graceful shutdown...',
|
||||||
|
);
|
||||||
|
expect(mocks.gracefulShutdown).toHaveBeenCalledWith('SIGTERM');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should log an error and exit if gracefulShutdown rejects', async () => {
|
||||||
|
// Arrange
|
||||||
|
const shutdownError = new Error('Shutdown failed');
|
||||||
|
mocks.gracefulShutdown.mockRejectedValue(shutdownError);
|
||||||
|
await import('./worker');
|
||||||
|
const sigintHandler = eventHandlers['SIGINT'];
|
||||||
|
|
||||||
|
// Act
|
||||||
|
// The handler catches the rejection, so we don't need to wrap this in expect().rejects
|
||||||
|
await sigintHandler();
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
expect(mocks.logger.error).toHaveBeenCalledWith(
|
||||||
|
{ err: shutdownError },
|
||||||
|
'[Worker] Error during shutdown.',
|
||||||
|
);
|
||||||
|
expect(mocks.processExit).toHaveBeenCalledWith(1);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('Error Handling', () => {
|
||||||
|
it('should log uncaught exceptions', async () => {
|
||||||
|
// Arrange
|
||||||
|
await import('./worker');
|
||||||
|
const exceptionHandler = eventHandlers['uncaughtException'];
|
||||||
|
expect(exceptionHandler).toBeDefined();
|
||||||
|
const testError = new Error('Test uncaught exception');
|
||||||
|
|
||||||
|
// Act
|
||||||
|
exceptionHandler(testError);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
expect(mocks.logger.error).toHaveBeenCalledWith(
|
||||||
|
{ err: testError },
|
||||||
|
'[Worker] Uncaught exception',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should log unhandled promise rejections', async () => {
|
||||||
|
// Arrange
|
||||||
|
await import('./worker');
|
||||||
|
const rejectionHandler = eventHandlers['unhandledRejection'];
|
||||||
|
expect(rejectionHandler).toBeDefined();
|
||||||
|
const testReason = 'Promise rejected';
|
||||||
|
const testPromise = Promise.reject(testReason);
|
||||||
|
|
||||||
|
// Act
|
||||||
|
rejectionHandler(testReason, testPromise);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
expect(mocks.logger.error).toHaveBeenCalledWith(
|
||||||
|
{ reason: testReason, promise: testPromise },
|
||||||
|
'[Worker] Unhandled Rejection',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
31
src/services/worker.ts
Normal file
31
src/services/worker.ts
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
// src/services/worker.ts
|
||||||
|
import { gracefulShutdown } from './workers.server';
|
||||||
|
import { logger } from './logger.server';
|
||||||
|
|
||||||
|
logger.info('[Worker] Initializing worker process...');
|
||||||
|
|
||||||
|
// The workers are instantiated as side effects of importing workers.server.ts.
|
||||||
|
// This pattern ensures they start immediately upon import.
|
||||||
|
|
||||||
|
// Handle graceful shutdown
|
||||||
|
const handleShutdown = (signal: string) => {
|
||||||
|
logger.info(`[Worker] Received ${signal}. Initiating graceful shutdown...`);
|
||||||
|
gracefulShutdown(signal).catch((error: unknown) => {
|
||||||
|
logger.error({ err: error }, '[Worker] Error during shutdown.');
|
||||||
|
process.exit(1);
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
process.on('SIGINT', () => handleShutdown('SIGINT'));
|
||||||
|
process.on('SIGTERM', () => handleShutdown('SIGTERM'));
|
||||||
|
|
||||||
|
// Catch unhandled errors to log them before crashing
|
||||||
|
process.on('uncaughtException', (err) => {
|
||||||
|
logger.error({ err }, '[Worker] Uncaught exception');
|
||||||
|
});
|
||||||
|
|
||||||
|
process.on('unhandledRejection', (reason, promise) => {
|
||||||
|
logger.error({ reason, promise }, '[Worker] Unhandled Rejection');
|
||||||
|
});
|
||||||
|
|
||||||
|
logger.info('[Worker] Worker process is running and listening for jobs.');
|
||||||
346
src/services/workers.server.test.ts
Normal file
346
src/services/workers.server.test.ts
Normal file
@@ -0,0 +1,346 @@
|
|||||||
|
// src/services/workers.server.test.ts
|
||||||
|
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||||
|
import type { Job } from 'bullmq';
|
||||||
|
|
||||||
|
// --- Hoisted Mocks ---
|
||||||
|
const mocks = vi.hoisted(() => {
|
||||||
|
// This object will store the processor functions captured from the worker constructors.
|
||||||
|
const capturedProcessors: Record<string, (job: Job) => Promise<unknown>> = {};
|
||||||
|
|
||||||
|
return {
|
||||||
|
sendEmail: vi.fn(),
|
||||||
|
unlink: vi.fn(),
|
||||||
|
processFlyerJob: vi.fn(),
|
||||||
|
capturedProcessors,
|
||||||
|
deleteExpiredResetTokens: vi.fn(),
|
||||||
|
// Mock the Worker constructor to capture the processor function. It must be a
|
||||||
|
// `function` and not an arrow function so it can be called with `new`.
|
||||||
|
MockWorker: vi.fn(function (name: string, processor: (job: Job) => Promise<unknown>) {
|
||||||
|
if (processor) {
|
||||||
|
capturedProcessors[name] = processor;
|
||||||
|
}
|
||||||
|
// Return a mock worker instance, though it's not used in this test file.
|
||||||
|
return { on: vi.fn(), close: vi.fn() };
|
||||||
|
}),
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
// --- Mock Modules ---
|
||||||
|
vi.mock('./emailService.server', async (importOriginal) => {
|
||||||
|
const actual = await importOriginal<typeof import('./emailService.server')>();
|
||||||
|
return {
|
||||||
|
...actual,
|
||||||
|
// We only need to mock the specific function being called by the worker.
|
||||||
|
// The rest of the module can retain its original implementation if needed elsewhere.
|
||||||
|
sendEmail: mocks.sendEmail,
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
// The workers use an `fsAdapter`. We can mock the underlying `fsPromises`
|
||||||
|
// that the adapter is built from in queueService.server.ts.
|
||||||
|
vi.mock('node:fs/promises', () => ({
|
||||||
|
default: {
|
||||||
|
unlink: mocks.unlink,
|
||||||
|
// Add other fs functions if needed by other tests
|
||||||
|
readdir: vi.fn(),
|
||||||
|
},
|
||||||
|
}));
|
||||||
|
|
||||||
|
vi.mock('./logger.server', () => ({
|
||||||
|
logger: {
|
||||||
|
info: vi.fn(),
|
||||||
|
error: vi.fn(),
|
||||||
|
warn: vi.fn(),
|
||||||
|
debug: vi.fn(),
|
||||||
|
child: vi.fn().mockReturnThis(),
|
||||||
|
},
|
||||||
|
}));
|
||||||
|
|
||||||
|
vi.mock('./db/index.db', () => ({
|
||||||
|
userRepo: {
|
||||||
|
deleteExpiredResetTokens: mocks.deleteExpiredResetTokens,
|
||||||
|
},
|
||||||
|
}));
|
||||||
|
|
||||||
|
// Mock bullmq to capture the processor functions passed to the Worker constructor
|
||||||
|
import { logger as mockLogger } from './logger.server';
|
||||||
|
vi.mock('bullmq', () => ({
|
||||||
|
Worker: mocks.MockWorker,
|
||||||
|
// FIX: Use a standard function for the mock constructor to allow `new Queue(...)` to work.
|
||||||
|
Queue: vi.fn(function () {
|
||||||
|
return { add: vi.fn() };
|
||||||
|
}),
|
||||||
|
}));
|
||||||
|
|
||||||
|
// Mock flyerProcessingService.server as flyerWorker depends on it
|
||||||
|
vi.mock('./flyerProcessingService.server', () => ({
|
||||||
|
FlyerProcessingService: class {
|
||||||
|
processJob = mocks.processFlyerJob;
|
||||||
|
},
|
||||||
|
}));
|
||||||
|
|
||||||
|
// Mock flyerDataTransformer as it's a dependency of FlyerProcessingService
|
||||||
|
vi.mock('./flyerDataTransformer', () => ({
|
||||||
|
FlyerDataTransformer: class {
|
||||||
|
transform = vi.fn(); // Mock transform method
|
||||||
|
},
|
||||||
|
}));
|
||||||
|
|
||||||
|
// Helper to create a mock BullMQ Job object
|
||||||
|
const createMockJob = <T>(data: T): Job<T> => {
|
||||||
|
return {
|
||||||
|
id: 'job-1',
|
||||||
|
data,
|
||||||
|
updateProgress: vi.fn().mockResolvedValue(undefined),
|
||||||
|
log: vi.fn().mockResolvedValue(undefined),
|
||||||
|
opts: { attempts: 3 },
|
||||||
|
attemptsMade: 1,
|
||||||
|
trace: vi.fn().mockResolvedValue(undefined),
|
||||||
|
moveToCompleted: vi.fn().mockResolvedValue(undefined),
|
||||||
|
moveToFailed: vi.fn().mockResolvedValue(undefined),
|
||||||
|
} as unknown as Job<T>;
|
||||||
|
};
|
||||||
|
|
||||||
|
describe('Queue Workers', () => {
|
||||||
|
// These will hold the captured processor functions for each test.
|
||||||
|
let flyerProcessor: (job: Job) => Promise<unknown>;
|
||||||
|
let emailProcessor: (job: Job) => Promise<unknown>;
|
||||||
|
let analyticsProcessor: (job: Job) => Promise<unknown>;
|
||||||
|
let cleanupProcessor: (job: Job) => Promise<unknown>;
|
||||||
|
let weeklyAnalyticsProcessor: (job: Job) => Promise<unknown>;
|
||||||
|
let tokenCleanupProcessor: (job: Job) => Promise<unknown>;
|
||||||
|
|
||||||
|
beforeEach(async () => {
|
||||||
|
vi.clearAllMocks();
|
||||||
|
|
||||||
|
// Reset default mock implementations for hoisted mocks
|
||||||
|
mocks.sendEmail.mockResolvedValue(undefined);
|
||||||
|
mocks.unlink.mockResolvedValue(undefined);
|
||||||
|
mocks.processFlyerJob.mockResolvedValue({ flyerId: 123 }); // Default success for flyer processing
|
||||||
|
mocks.deleteExpiredResetTokens.mockResolvedValue(5);
|
||||||
|
|
||||||
|
// Reset modules to re-evaluate the workers.server.ts file with fresh mocks.
|
||||||
|
// This ensures that new worker instances are created and their processors are captured for each test.
|
||||||
|
vi.resetModules();
|
||||||
|
|
||||||
|
// Dynamically import the module under test AFTER mocks are reset.
|
||||||
|
// This will trigger the instantiation of the workers, and our mocked Worker constructor will capture the processors.
|
||||||
|
await import('./workers.server');
|
||||||
|
|
||||||
|
// Re-capture the processors for each test to ensure isolation.
|
||||||
|
flyerProcessor = mocks.capturedProcessors['flyer-processing'];
|
||||||
|
emailProcessor = mocks.capturedProcessors['email-sending'];
|
||||||
|
analyticsProcessor = mocks.capturedProcessors['analytics-reporting'];
|
||||||
|
cleanupProcessor = mocks.capturedProcessors['file-cleanup'];
|
||||||
|
weeklyAnalyticsProcessor = mocks.capturedProcessors['weekly-analytics-reporting'];
|
||||||
|
tokenCleanupProcessor = mocks.capturedProcessors['token-cleanup'];
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('flyerWorker', () => {
|
||||||
|
it('should call flyerProcessingService.processJob with the job data', async () => {
|
||||||
|
const jobData = {
|
||||||
|
filePath: '/tmp/flyer.pdf',
|
||||||
|
originalFileName: 'flyer.pdf',
|
||||||
|
checksum: 'abc',
|
||||||
|
};
|
||||||
|
const job = createMockJob(jobData);
|
||||||
|
|
||||||
|
await flyerProcessor(job);
|
||||||
|
|
||||||
|
expect(mocks.processFlyerJob).toHaveBeenCalledTimes(1);
|
||||||
|
expect(mocks.processFlyerJob).toHaveBeenCalledWith(job);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should re-throw an error if flyerProcessingService.processJob fails', async () => {
|
||||||
|
const job = createMockJob({
|
||||||
|
filePath: '/tmp/fail.pdf',
|
||||||
|
originalFileName: 'fail.pdf',
|
||||||
|
checksum: 'def',
|
||||||
|
});
|
||||||
|
const processingError = new Error('Flyer processing failed');
|
||||||
|
mocks.processFlyerJob.mockRejectedValue(processingError);
|
||||||
|
|
||||||
|
await expect(flyerProcessor(job)).rejects.toThrow('Flyer processing failed');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('emailWorker', () => {
|
||||||
|
it('should call emailService.sendEmail with the job data', async () => {
|
||||||
|
const jobData = {
|
||||||
|
to: 'test@example.com',
|
||||||
|
subject: 'Test Email',
|
||||||
|
html: '<p>Hello</p>',
|
||||||
|
text: 'Hello',
|
||||||
|
};
|
||||||
|
const job = createMockJob(jobData);
|
||||||
|
|
||||||
|
await emailProcessor(job);
|
||||||
|
|
||||||
|
expect(mocks.sendEmail).toHaveBeenCalledTimes(1);
|
||||||
|
// The implementation passes the logger as the second argument
|
||||||
|
expect(mocks.sendEmail).toHaveBeenCalledWith(jobData, expect.anything());
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should log and re-throw an error if sendEmail fails with a non-Error object', async () => {
|
||||||
|
const job = createMockJob({ to: 'fail@example.com', subject: 'fail', html: '', text: '' });
|
||||||
|
const emailError = 'SMTP server is down'; // Reject with a string
|
||||||
|
mocks.sendEmail.mockRejectedValue(emailError);
|
||||||
|
|
||||||
|
await expect(emailProcessor(job)).rejects.toThrow(emailError);
|
||||||
|
|
||||||
|
// The worker should wrap the string in an Error object for logging
|
||||||
|
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||||
|
{ err: new Error(emailError), jobData: job.data },
|
||||||
|
`[EmailWorker] Job ${job.id} failed. Attempt ${job.attemptsMade}/${job.opts.attempts}.`,
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should re-throw an error if sendEmail fails', async () => {
|
||||||
|
const job = createMockJob({ to: 'fail@example.com', subject: 'fail', html: '', text: '' });
|
||||||
|
const emailError = new Error('SMTP server is down');
|
||||||
|
mocks.sendEmail.mockRejectedValue(emailError);
|
||||||
|
|
||||||
|
await expect(emailProcessor(job)).rejects.toThrow('SMTP server is down');
|
||||||
|
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||||
|
{ err: emailError, jobData: job.data },
|
||||||
|
`[EmailWorker] Job ${job.id} failed. Attempt ${job.attemptsMade}/${job.opts.attempts}.`,
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('analyticsWorker', () => {
|
||||||
|
it('should complete successfully for a valid report date', async () => {
|
||||||
|
vi.useFakeTimers();
|
||||||
|
const job = createMockJob({ reportDate: '2024-01-01' });
|
||||||
|
|
||||||
|
const promise = analyticsProcessor(job);
|
||||||
|
// Advance timers to simulate the 10-second task completing
|
||||||
|
await vi.advanceTimersByTimeAsync(10000);
|
||||||
|
await promise; // Wait for the promise to resolve
|
||||||
|
|
||||||
|
// No error should be thrown
|
||||||
|
expect(true).toBe(true);
|
||||||
|
vi.useRealTimers();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should throw an error if reportDate is "FAIL"', async () => {
|
||||||
|
const job = createMockJob({ reportDate: 'FAIL' });
|
||||||
|
|
||||||
|
await expect(analyticsProcessor(job)).rejects.toThrow(
|
||||||
|
'This is a test failure for the analytics job.',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('cleanupWorker', () => {
|
||||||
|
it('should call unlink for each path provided in the job data', async () => {
|
||||||
|
const jobData = {
|
||||||
|
flyerId: 123,
|
||||||
|
paths: ['/tmp/file1.jpg', '/tmp/file2.pdf'],
|
||||||
|
};
|
||||||
|
const job = createMockJob(jobData);
|
||||||
|
mocks.unlink.mockResolvedValue(undefined);
|
||||||
|
|
||||||
|
await cleanupProcessor(job);
|
||||||
|
|
||||||
|
expect(mocks.unlink).toHaveBeenCalledTimes(2);
|
||||||
|
expect(mocks.unlink).toHaveBeenCalledWith('/tmp/file1.jpg');
|
||||||
|
expect(mocks.unlink).toHaveBeenCalledWith('/tmp/file2.pdf');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should not throw an error if a file is already deleted (ENOENT)', async () => {
|
||||||
|
const jobData = {
|
||||||
|
flyerId: 123,
|
||||||
|
paths: ['/tmp/existing.jpg', '/tmp/already-deleted.jpg'],
|
||||||
|
};
|
||||||
|
const job = createMockJob(jobData);
|
||||||
|
// Use the built-in NodeJS.ErrnoException type for mock system errors.
|
||||||
|
const enoentError: NodeJS.ErrnoException = new Error('File not found');
|
||||||
|
enoentError.code = 'ENOENT';
|
||||||
|
|
||||||
|
// First call succeeds, second call fails with ENOENT
|
||||||
|
mocks.unlink.mockResolvedValueOnce(undefined).mockRejectedValueOnce(enoentError);
|
||||||
|
|
||||||
|
// The processor should complete without throwing
|
||||||
|
await expect(cleanupProcessor(job)).resolves.toBeUndefined();
|
||||||
|
|
||||||
|
expect(mocks.unlink).toHaveBeenCalledTimes(2);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should re-throw an error for issues other than ENOENT (e.g., permissions)', async () => {
|
||||||
|
const jobData = {
|
||||||
|
flyerId: 123,
|
||||||
|
paths: ['/tmp/protected-file.jpg'],
|
||||||
|
};
|
||||||
|
const job = createMockJob(jobData);
|
||||||
|
// Use the built-in NodeJS.ErrnoException type for mock system errors.
|
||||||
|
const permissionError: NodeJS.ErrnoException = new Error('Permission denied');
|
||||||
|
permissionError.code = 'EACCES';
|
||||||
|
|
||||||
|
mocks.unlink.mockRejectedValue(permissionError);
|
||||||
|
|
||||||
|
await expect(cleanupProcessor(job)).rejects.toThrow('Permission denied');
|
||||||
|
|
||||||
|
// Verify the error was logged by the worker's catch block
|
||||||
|
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||||
|
{ err: permissionError },
|
||||||
|
expect.stringContaining(
|
||||||
|
`[CleanupWorker] Job ${job.id} for flyer ${job.data.flyerId} failed.`,
|
||||||
|
),
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('weeklyAnalyticsWorker', () => {
|
||||||
|
it('should complete successfully for a valid report date', async () => {
|
||||||
|
vi.useFakeTimers();
|
||||||
|
const job = createMockJob({ reportYear: 2024, reportWeek: 1 });
|
||||||
|
|
||||||
|
const promise = weeklyAnalyticsProcessor(job);
|
||||||
|
// Advance timers to simulate the 30-second task completing
|
||||||
|
await vi.advanceTimersByTimeAsync(30000);
|
||||||
|
await promise; // Wait for the promise to resolve
|
||||||
|
|
||||||
|
// No error should be thrown
|
||||||
|
expect(true).toBe(true);
|
||||||
|
vi.useRealTimers();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should re-throw an error if the job fails', async () => {
|
||||||
|
vi.useFakeTimers();
|
||||||
|
const job = createMockJob({ reportYear: 2024, reportWeek: 1 });
|
||||||
|
// Mock the internal logic to throw an error
|
||||||
|
const originalSetTimeout = setTimeout;
|
||||||
|
vi.spyOn(global, 'setTimeout').mockImplementation((callback, ms) => {
|
||||||
|
if (ms === 30000) {
|
||||||
|
// Target the simulated delay
|
||||||
|
throw new Error('Weekly analytics job failed');
|
||||||
|
}
|
||||||
|
return originalSetTimeout(callback, ms);
|
||||||
|
});
|
||||||
|
|
||||||
|
await expect(weeklyAnalyticsProcessor(job)).rejects.toThrow('Weekly analytics job failed');
|
||||||
|
vi.useRealTimers();
|
||||||
|
vi.restoreAllMocks(); // Restore setTimeout mock
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('tokenCleanupWorker', () => {
|
||||||
|
it('should call userRepo.deleteExpiredResetTokens and return the count', async () => {
|
||||||
|
const job = createMockJob({ timestamp: new Date().toISOString() });
|
||||||
|
mocks.deleteExpiredResetTokens.mockResolvedValue(10);
|
||||||
|
|
||||||
|
const result = await tokenCleanupProcessor(job);
|
||||||
|
|
||||||
|
expect(mocks.deleteExpiredResetTokens).toHaveBeenCalledTimes(1);
|
||||||
|
expect(result).toEqual({ deletedCount: 10 });
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should re-throw an error if the database call fails', async () => {
|
||||||
|
const job = createMockJob({ timestamp: new Date().toISOString() });
|
||||||
|
const dbError = new Error('DB cleanup failed');
|
||||||
|
mocks.deleteExpiredResetTokens.mockRejectedValue(dbError);
|
||||||
|
await expect(tokenCleanupProcessor(job)).rejects.toThrow(dbError);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
344
src/services/workers.server.ts
Normal file
344
src/services/workers.server.ts
Normal file
@@ -0,0 +1,344 @@
|
|||||||
|
import { Worker, Job, UnrecoverableError } from 'bullmq';
|
||||||
|
import fsPromises from 'node:fs/promises';
|
||||||
|
import { exec } from 'child_process';
|
||||||
|
import { promisify } from 'util';
|
||||||
|
|
||||||
|
import { logger } from './logger.server';
|
||||||
|
import { connection } from './redis.server';
|
||||||
|
import { aiService } from './aiService.server';
|
||||||
|
import * as emailService from './emailService.server';
|
||||||
|
import * as db from './db/index.db';
|
||||||
|
import {
|
||||||
|
FlyerProcessingService,
|
||||||
|
type FlyerJobData,
|
||||||
|
type IFileSystem,
|
||||||
|
} from './flyerProcessingService.server';
|
||||||
|
import { FlyerDataTransformer } from './flyerDataTransformer';
|
||||||
|
import {
|
||||||
|
flyerQueue,
|
||||||
|
emailQueue,
|
||||||
|
analyticsQueue,
|
||||||
|
weeklyAnalyticsQueue,
|
||||||
|
cleanupQueue,
|
||||||
|
tokenCleanupQueue,
|
||||||
|
type EmailJobData,
|
||||||
|
type AnalyticsJobData,
|
||||||
|
type CleanupJobData,
|
||||||
|
type WeeklyAnalyticsJobData,
|
||||||
|
type TokenCleanupJobData,
|
||||||
|
} from './queues.server';
|
||||||
|
|
||||||
|
const execAsync = promisify(exec);
|
||||||
|
|
||||||
|
// --- Worker Instantiation ---
|
||||||
|
|
||||||
|
const fsAdapter: IFileSystem = {
|
||||||
|
readdir: (path: string, options: { withFileTypes: true }) => fsPromises.readdir(path, options),
|
||||||
|
unlink: (path: string) => fsPromises.unlink(path),
|
||||||
|
};
|
||||||
|
|
||||||
|
const flyerProcessingService = new FlyerProcessingService(
|
||||||
|
aiService,
|
||||||
|
db,
|
||||||
|
fsAdapter,
|
||||||
|
execAsync,
|
||||||
|
cleanupQueue,
|
||||||
|
new FlyerDataTransformer(),
|
||||||
|
);
|
||||||
|
|
||||||
|
const normalizeError = (error: unknown): Error => {
|
||||||
|
return error instanceof Error ? error : new Error(String(error));
|
||||||
|
};
|
||||||
|
|
||||||
|
const attachWorkerEventListeners = (worker: Worker) => {
|
||||||
|
worker.on('completed', (job: Job, returnValue: unknown) => {
|
||||||
|
logger.info({ returnValue }, `[${worker.name}] Job ${job.id} completed successfully.`);
|
||||||
|
});
|
||||||
|
|
||||||
|
worker.on('failed', (job: Job | undefined, error: Error) => {
|
||||||
|
logger.error(
|
||||||
|
{ err: error, jobData: job?.data },
|
||||||
|
`[${worker.name}] Job ${job?.id} has ultimately failed after all attempts.`,
|
||||||
|
);
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
export const flyerWorker = new Worker<FlyerJobData>(
|
||||||
|
'flyer-processing',
|
||||||
|
async (job) => {
|
||||||
|
try {
|
||||||
|
return await flyerProcessingService.processJob(job);
|
||||||
|
} catch (error: unknown) {
|
||||||
|
const wrappedError = normalizeError(error);
|
||||||
|
const errorMessage = wrappedError.message || '';
|
||||||
|
if (
|
||||||
|
errorMessage.includes('quota') ||
|
||||||
|
errorMessage.includes('429') ||
|
||||||
|
errorMessage.includes('RESOURCE_EXHAUSTED')
|
||||||
|
) {
|
||||||
|
logger.error(
|
||||||
|
{ err: wrappedError, jobId: job.id },
|
||||||
|
'[FlyerWorker] Unrecoverable quota error detected. Failing job immediately.',
|
||||||
|
);
|
||||||
|
throw new UnrecoverableError(errorMessage);
|
||||||
|
}
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
connection,
|
||||||
|
concurrency: parseInt(process.env.WORKER_CONCURRENCY || '1', 10),
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
export const emailWorker = new Worker<EmailJobData>(
|
||||||
|
'email-sending',
|
||||||
|
async (job: Job<EmailJobData>) => {
|
||||||
|
const { to, subject } = job.data;
|
||||||
|
const jobLogger = logger.child({ jobId: job.id, jobName: job.name });
|
||||||
|
jobLogger.info({ to, subject }, `[EmailWorker] Sending email for job ${job.id}`);
|
||||||
|
try {
|
||||||
|
await emailService.sendEmail(job.data, jobLogger);
|
||||||
|
} catch (error: unknown) {
|
||||||
|
const wrappedError = normalizeError(error);
|
||||||
|
logger.error(
|
||||||
|
{
|
||||||
|
err: wrappedError,
|
||||||
|
jobData: job.data,
|
||||||
|
},
|
||||||
|
`[EmailWorker] Job ${job.id} failed. Attempt ${job.attemptsMade}/${job.opts.attempts}.`,
|
||||||
|
);
|
||||||
|
throw wrappedError;
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
connection,
|
||||||
|
concurrency: parseInt(process.env.EMAIL_WORKER_CONCURRENCY || '10', 10),
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
export const analyticsWorker = new Worker<AnalyticsJobData>(
|
||||||
|
'analytics-reporting',
|
||||||
|
async (job: Job<AnalyticsJobData>) => {
|
||||||
|
const { reportDate } = job.data;
|
||||||
|
logger.info({ reportDate }, `[AnalyticsWorker] Starting report generation for job ${job.id}`);
|
||||||
|
try {
|
||||||
|
if (reportDate === 'FAIL') {
|
||||||
|
throw new Error('This is a test failure for the analytics job.');
|
||||||
|
}
|
||||||
|
await new Promise((resolve) => setTimeout(resolve, 10000));
|
||||||
|
logger.info(`[AnalyticsWorker] Successfully generated report for ${reportDate}.`);
|
||||||
|
} catch (error: unknown) {
|
||||||
|
const wrappedError = normalizeError(error);
|
||||||
|
logger.error({ err: wrappedError, jobData: job.data },
|
||||||
|
`[AnalyticsWorker] Job ${job.id} failed. Attempt ${job.attemptsMade}/${job.opts.attempts}.`,
|
||||||
|
);
|
||||||
|
throw wrappedError;
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
connection,
|
||||||
|
concurrency: parseInt(process.env.ANALYTICS_WORKER_CONCURRENCY || '1', 10),
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
export const cleanupWorker = new Worker<CleanupJobData>(
|
||||||
|
'file-cleanup',
|
||||||
|
async (job: Job<CleanupJobData>) => {
|
||||||
|
const { flyerId, paths } = job.data;
|
||||||
|
logger.info(
|
||||||
|
{ paths },
|
||||||
|
`[CleanupWorker] Starting file cleanup for job ${job.id} (Flyer ID: ${flyerId})`,
|
||||||
|
);
|
||||||
|
|
||||||
|
try {
|
||||||
|
if (!paths || paths.length === 0) {
|
||||||
|
logger.warn(
|
||||||
|
`[CleanupWorker] Job ${job.id} for flyer ${flyerId} received no paths to clean. Skipping.`,
|
||||||
|
);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const filePath of paths) {
|
||||||
|
try {
|
||||||
|
await fsAdapter.unlink(filePath);
|
||||||
|
logger.info(`[CleanupWorker] Deleted temporary file: ${filePath}`);
|
||||||
|
} catch (unlinkError: unknown) {
|
||||||
|
if (
|
||||||
|
unlinkError instanceof Error &&
|
||||||
|
'code' in unlinkError &&
|
||||||
|
(unlinkError as any).code === 'ENOENT'
|
||||||
|
) {
|
||||||
|
logger.warn(
|
||||||
|
`[CleanupWorker] File not found during cleanup (already deleted?): ${filePath}`,
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
throw unlinkError;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
logger.info(
|
||||||
|
`[CleanupWorker] Successfully cleaned up ${paths.length} file(s) for flyer ${flyerId}.`,
|
||||||
|
);
|
||||||
|
} catch (error: unknown) {
|
||||||
|
const wrappedError = normalizeError(error);
|
||||||
|
logger.error(
|
||||||
|
{ err: wrappedError },
|
||||||
|
`[CleanupWorker] Job ${job.id} for flyer ${flyerId} failed. Attempt ${job.attemptsMade}/${job.opts.attempts}.`,
|
||||||
|
);
|
||||||
|
throw wrappedError;
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
connection,
|
||||||
|
concurrency: parseInt(process.env.CLEANUP_WORKER_CONCURRENCY || '10', 10),
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
export const weeklyAnalyticsWorker = new Worker<WeeklyAnalyticsJobData>(
|
||||||
|
'weekly-analytics-reporting',
|
||||||
|
async (job: Job<WeeklyAnalyticsJobData>) => {
|
||||||
|
const { reportYear, reportWeek } = job.data;
|
||||||
|
logger.info(
|
||||||
|
{ reportYear, reportWeek },
|
||||||
|
`[WeeklyAnalyticsWorker] Starting weekly report generation for job ${job.id}`,
|
||||||
|
);
|
||||||
|
try {
|
||||||
|
await new Promise((resolve) => setTimeout(resolve, 30000));
|
||||||
|
logger.info(
|
||||||
|
`[WeeklyAnalyticsWorker] Successfully generated weekly report for week ${reportWeek}, ${reportYear}.`,
|
||||||
|
);
|
||||||
|
} catch (error: unknown) {
|
||||||
|
const wrappedError = normalizeError(error);
|
||||||
|
logger.error(
|
||||||
|
{ err: wrappedError, jobData: job.data },
|
||||||
|
`[WeeklyAnalyticsWorker] Job ${job.id} failed. Attempt ${job.attemptsMade}/${job.opts.attempts}.`,
|
||||||
|
);
|
||||||
|
throw wrappedError;
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
connection,
|
||||||
|
concurrency: parseInt(process.env.WEEKLY_ANALYTICS_WORKER_CONCURRENCY || '1', 10),
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
export const tokenCleanupWorker = new Worker<TokenCleanupJobData>(
|
||||||
|
'token-cleanup',
|
||||||
|
async (job: Job<TokenCleanupJobData>) => {
|
||||||
|
const jobLogger = logger.child({ jobId: job.id, jobName: job.name });
|
||||||
|
jobLogger.info('[TokenCleanupWorker] Starting cleanup of expired password reset tokens.');
|
||||||
|
try {
|
||||||
|
const deletedCount = await db.userRepo.deleteExpiredResetTokens(jobLogger);
|
||||||
|
jobLogger.info(`[TokenCleanupWorker] Successfully deleted ${deletedCount} expired tokens.`);
|
||||||
|
return { deletedCount };
|
||||||
|
} catch (error: unknown) {
|
||||||
|
const wrappedError = normalizeError(error);
|
||||||
|
jobLogger.error({ err: wrappedError }, `[TokenCleanupWorker] Job ${job.id} failed.`);
|
||||||
|
throw wrappedError;
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
connection,
|
||||||
|
concurrency: 1,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
attachWorkerEventListeners(flyerWorker);
|
||||||
|
attachWorkerEventListeners(emailWorker);
|
||||||
|
attachWorkerEventListeners(analyticsWorker);
|
||||||
|
attachWorkerEventListeners(cleanupWorker);
|
||||||
|
attachWorkerEventListeners(weeklyAnalyticsWorker);
|
||||||
|
attachWorkerEventListeners(tokenCleanupWorker);
|
||||||
|
|
||||||
|
logger.info('All workers started and listening for jobs.');
|
||||||
|
|
||||||
|
const SHUTDOWN_TIMEOUT = 30000; // 30 seconds
|
||||||
|
|
||||||
|
export const gracefulShutdown = async (signal: string) => {
|
||||||
|
logger.info(
|
||||||
|
`[Shutdown] Received ${signal}. Initiating graceful shutdown (timeout: ${SHUTDOWN_TIMEOUT / 1000}s)...`,
|
||||||
|
);
|
||||||
|
|
||||||
|
const shutdownPromise = (async () => {
|
||||||
|
let hasErrors = false;
|
||||||
|
|
||||||
|
// Helper function to close a group of resources and log results
|
||||||
|
const closeResources = async (resources: { name: string; close: () => Promise<any> }[], type: string) => {
|
||||||
|
logger.info(`[Shutdown] Closing all ${type}...`);
|
||||||
|
const results = await Promise.allSettled(resources.map((r) => r.close()));
|
||||||
|
let groupHasErrors = false;
|
||||||
|
|
||||||
|
results.forEach((result, index) => {
|
||||||
|
if (result.status === 'rejected') {
|
||||||
|
groupHasErrors = true;
|
||||||
|
logger.error(
|
||||||
|
{ err: result.reason, resource: resources[index].name },
|
||||||
|
`[Shutdown] Error closing ${resources[index].name}.`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!groupHasErrors) logger.info(`[Shutdown] All ${type} closed successfully.`);
|
||||||
|
return groupHasErrors;
|
||||||
|
};
|
||||||
|
|
||||||
|
// Define resource groups for sequential shutdown
|
||||||
|
const workerResources = [
|
||||||
|
{ name: 'flyerWorker', close: () => flyerWorker.close() },
|
||||||
|
{ name: 'emailWorker', close: () => emailWorker.close() },
|
||||||
|
{ name: 'analyticsWorker', close: () => analyticsWorker.close() },
|
||||||
|
{ name: 'cleanupWorker', close: () => cleanupWorker.close() },
|
||||||
|
{ name: 'weeklyAnalyticsWorker', close: () => weeklyAnalyticsWorker.close() },
|
||||||
|
{ name: 'tokenCleanupWorker', close: () => tokenCleanupWorker.close() },
|
||||||
|
];
|
||||||
|
|
||||||
|
const queueResources = [
|
||||||
|
{ name: 'flyerQueue', close: () => flyerQueue.close() },
|
||||||
|
{ name: 'emailQueue', close: () => emailQueue.close() },
|
||||||
|
{ name: 'analyticsQueue', close: () => analyticsQueue.close() },
|
||||||
|
{ name: 'cleanupQueue', close: () => cleanupQueue.close() },
|
||||||
|
{ name: 'weeklyAnalyticsQueue', close: () => weeklyAnalyticsQueue.close() },
|
||||||
|
{ name: 'tokenCleanupQueue', close: () => tokenCleanupQueue.close() },
|
||||||
|
];
|
||||||
|
|
||||||
|
// 1. Close workers first
|
||||||
|
if (await closeResources(workerResources, 'workers')) hasErrors = true;
|
||||||
|
|
||||||
|
// 2. Then close queues
|
||||||
|
if (await closeResources(queueResources, 'queues')) hasErrors = true;
|
||||||
|
|
||||||
|
// 3. Finally, close the Redis connection
|
||||||
|
logger.info('[Shutdown] Closing Redis connection...');
|
||||||
|
try {
|
||||||
|
await connection.quit();
|
||||||
|
logger.info('[Shutdown] Redis connection closed successfully.');
|
||||||
|
} catch (err) {
|
||||||
|
hasErrors = true;
|
||||||
|
logger.error({ err, resource: 'redisConnection' }, `[Shutdown] Error closing Redis connection.`);
|
||||||
|
}
|
||||||
|
|
||||||
|
return hasErrors;
|
||||||
|
})();
|
||||||
|
|
||||||
|
const timeoutPromise = new Promise<string>((resolve) =>
|
||||||
|
setTimeout(() => resolve('timeout'), SHUTDOWN_TIMEOUT),
|
||||||
|
);
|
||||||
|
|
||||||
|
const result = await Promise.race([shutdownPromise, timeoutPromise]);
|
||||||
|
|
||||||
|
if (result === 'timeout') {
|
||||||
|
logger.error(
|
||||||
|
`[Shutdown] Graceful shutdown timed out after ${SHUTDOWN_TIMEOUT / 1000} seconds. Forcing exit.`,
|
||||||
|
);
|
||||||
|
process.exit(1);
|
||||||
|
} else {
|
||||||
|
const hasErrors = result as boolean;
|
||||||
|
if (!hasErrors) {
|
||||||
|
logger.info('[Shutdown] All resources closed successfully.');
|
||||||
|
} else {
|
||||||
|
logger.warn('[Shutdown] Graceful shutdown completed with errors.');
|
||||||
|
}
|
||||||
|
process.exit(hasErrors ? 1 : 0);
|
||||||
|
}
|
||||||
|
};
|
||||||
Reference in New Issue
Block a user