Compare commits
2 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
6e1298b825 | ||
| fc8e43437a |
4
package-lock.json
generated
4
package-lock.json
generated
@@ -1,12 +1,12 @@
|
|||||||
{
|
{
|
||||||
"name": "flyer-crawler",
|
"name": "flyer-crawler",
|
||||||
"version": "0.9.17",
|
"version": "0.9.18",
|
||||||
"lockfileVersion": 3,
|
"lockfileVersion": 3,
|
||||||
"requires": true,
|
"requires": true,
|
||||||
"packages": {
|
"packages": {
|
||||||
"": {
|
"": {
|
||||||
"name": "flyer-crawler",
|
"name": "flyer-crawler",
|
||||||
"version": "0.9.17",
|
"version": "0.9.18",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@bull-board/api": "^6.14.2",
|
"@bull-board/api": "^6.14.2",
|
||||||
"@bull-board/express": "^6.14.2",
|
"@bull-board/express": "^6.14.2",
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
{
|
{
|
||||||
"name": "flyer-crawler",
|
"name": "flyer-crawler",
|
||||||
"private": true,
|
"private": true,
|
||||||
"version": "0.9.17",
|
"version": "0.9.18",
|
||||||
"type": "module",
|
"type": "module",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"dev": "concurrently \"npm:start:dev\" \"vite\"",
|
"dev": "concurrently \"npm:start:dev\" \"vite\"",
|
||||||
|
|||||||
@@ -65,6 +65,7 @@ vi.mock('./db/index.db', () => ({
|
|||||||
adminRepo: {
|
adminRepo: {
|
||||||
logActivity: vi.fn(),
|
logActivity: vi.fn(),
|
||||||
},
|
},
|
||||||
|
withTransaction: vi.fn(),
|
||||||
}));
|
}));
|
||||||
|
|
||||||
vi.mock('./queueService.server', () => ({
|
vi.mock('./queueService.server', () => ({
|
||||||
@@ -85,6 +86,7 @@ vi.mock('../utils/imageProcessor', () => ({
|
|||||||
import * as dbModule from './db/index.db';
|
import * as dbModule from './db/index.db';
|
||||||
import { flyerQueue } from './queueService.server';
|
import { flyerQueue } from './queueService.server';
|
||||||
import { createFlyerAndItems } from './db/flyer.db';
|
import { createFlyerAndItems } from './db/flyer.db';
|
||||||
|
import { withTransaction } from './db/index.db';
|
||||||
import { generateFlyerIcon } from '../utils/imageProcessor';
|
import { generateFlyerIcon } from '../utils/imageProcessor';
|
||||||
|
|
||||||
// Define a mock interface that closely resembles the actual Flyer type for testing purposes.
|
// Define a mock interface that closely resembles the actual Flyer type for testing purposes.
|
||||||
@@ -127,6 +129,9 @@ describe('AI Service (Server)', () => {
|
|||||||
text: '[]',
|
text: '[]',
|
||||||
candidates: [],
|
candidates: [],
|
||||||
});
|
});
|
||||||
|
vi.mocked(withTransaction).mockImplementation(async (callback: any) => {
|
||||||
|
return callback({}); // Mock client
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('AiFlyerDataSchema', () => {
|
describe('AiFlyerDataSchema', () => {
|
||||||
@@ -616,11 +621,8 @@ describe('AI Service (Server)', () => {
|
|||||||
);
|
);
|
||||||
|
|
||||||
expect(mockAiClient.generateContent).toHaveBeenCalledTimes(1);
|
expect(mockAiClient.generateContent).toHaveBeenCalledTimes(1);
|
||||||
expect(result.store_name).toBe('Test Store');
|
// With normalization removed from this service, the result should match the raw AI response.
|
||||||
expect(result.items).toHaveLength(2);
|
expect(result).toEqual(mockAiResponse);
|
||||||
expect(result.items[1].price_display).toBe('');
|
|
||||||
expect(result.items[1].quantity).toBe('');
|
|
||||||
expect(result.items[1].category_name).toBe('Other/Miscellaneous');
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should throw an error if the AI response is not a valid JSON object', async () => {
|
it('should throw an error if the AI response is not a valid JSON object', async () => {
|
||||||
@@ -1220,6 +1222,29 @@ describe('AI Service (Server)', () => {
|
|||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('should log and re-throw the original error if the database transaction fails', async () => {
|
||||||
|
const body = { checksum: 'legacy-fail-checksum', extractedData: { store_name: 'Fail Store' } };
|
||||||
|
const dbError = new Error('DB transaction failed');
|
||||||
|
|
||||||
|
// Mock withTransaction to fail
|
||||||
|
vi.mocked(withTransaction).mockRejectedValue(dbError);
|
||||||
|
|
||||||
|
await expect(
|
||||||
|
aiServiceInstance.processLegacyFlyerUpload(
|
||||||
|
mockFile,
|
||||||
|
body,
|
||||||
|
mockProfile,
|
||||||
|
mockLoggerInstance,
|
||||||
|
),
|
||||||
|
).rejects.toThrow(dbError);
|
||||||
|
|
||||||
|
// Verify the service-level error logging
|
||||||
|
expect(mockLoggerInstance.error).toHaveBeenCalledWith(
|
||||||
|
{ err: dbError, checksum: 'legacy-fail-checksum' },
|
||||||
|
'Legacy flyer upload database transaction failed.',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
it('should handle body as a string', async () => {
|
it('should handle body as a string', async () => {
|
||||||
const payload = { checksum: 'str-body', extractedData: { store_name: 'String Body' } };
|
const payload = { checksum: 'str-body', extractedData: { store_name: 'String Body' } };
|
||||||
const body = JSON.stringify(payload);
|
const body = JSON.stringify(payload);
|
||||||
@@ -1244,56 +1269,4 @@ describe('AI Service (Server)', () => {
|
|||||||
expect(aiServiceSingleton).toBeInstanceOf(AIService);
|
expect(aiServiceSingleton).toBeInstanceOf(AIService);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('_normalizeExtractedItems (private method)', () => {
|
|
||||||
it('should correctly normalize items with null or undefined price_in_cents', () => {
|
|
||||||
const rawItems: RawFlyerItem[] = [
|
|
||||||
{
|
|
||||||
item: 'Valid Item',
|
|
||||||
price_display: '$1.99',
|
|
||||||
price_in_cents: 199,
|
|
||||||
quantity: '1',
|
|
||||||
category_name: 'Category A',
|
|
||||||
master_item_id: 1,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
item: 'Item with Null Price',
|
|
||||||
price_display: null,
|
|
||||||
price_in_cents: null, // Test case for null
|
|
||||||
quantity: '1',
|
|
||||||
category_name: 'Category B',
|
|
||||||
master_item_id: 2,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
item: 'Item with Undefined Price',
|
|
||||||
price_display: '$2.99',
|
|
||||||
price_in_cents: undefined, // Test case for undefined
|
|
||||||
quantity: '1',
|
|
||||||
category_name: 'Category C',
|
|
||||||
master_item_id: 3,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
item: null, // Test null item name
|
|
||||||
price_display: undefined, // Test undefined display price
|
|
||||||
price_in_cents: 50,
|
|
||||||
quantity: null, // Test null quantity
|
|
||||||
category_name: undefined, // Test undefined category
|
|
||||||
master_item_id: null, // Test null master_item_id
|
|
||||||
},
|
|
||||||
];
|
|
||||||
|
|
||||||
// Access the private method for testing
|
|
||||||
const normalized = (aiServiceInstance as any)._normalizeExtractedItems(rawItems);
|
|
||||||
|
|
||||||
expect(normalized).toHaveLength(4);
|
|
||||||
expect(normalized[0].price_in_cents).toBe(199);
|
|
||||||
expect(normalized[1].price_in_cents).toBe(null); // null should remain null
|
|
||||||
expect(normalized[2].price_in_cents).toBe(null); // undefined should become null
|
|
||||||
expect(normalized[3].item).toBe('Unknown Item');
|
|
||||||
expect(normalized[3].quantity).toBe('');
|
|
||||||
expect(normalized[3].category_name).toBe('Other/Miscellaneous');
|
|
||||||
expect(normalized[3].master_item_id).toBeUndefined(); // nullish coalescing to undefined
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -18,13 +18,14 @@ import type {
|
|||||||
FlyerInsert,
|
FlyerInsert,
|
||||||
Flyer,
|
Flyer,
|
||||||
} from '../types';
|
} from '../types';
|
||||||
import { FlyerProcessingError } from './processingErrors';
|
import { DatabaseError, FlyerProcessingError } from './processingErrors';
|
||||||
import * as db from './db/index.db';
|
import * as db from './db/index.db';
|
||||||
import { flyerQueue } from './queueService.server';
|
import { flyerQueue } from './queueService.server';
|
||||||
import type { Job } from 'bullmq';
|
import type { Job } from 'bullmq';
|
||||||
import { createFlyerAndItems } from './db/flyer.db';
|
import { createFlyerAndItems } from './db/flyer.db';
|
||||||
import { getBaseUrl } from '../utils/serverUtils';
|
import { getBaseUrl } from '../utils/serverUtils';
|
||||||
import { generateFlyerIcon } from '../utils/imageProcessor';
|
import { generateFlyerIcon } from '../utils/imageProcessor';
|
||||||
|
import { AdminRepository } from './db/admin.db';
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
import { ValidationError } from './db/errors.db'; // Keep this import for ValidationError
|
import { ValidationError } from './db/errors.db'; // Keep this import for ValidationError
|
||||||
import {
|
import {
|
||||||
@@ -538,12 +539,8 @@ export class AIService {
|
|||||||
userProfileAddress?: string,
|
userProfileAddress?: string,
|
||||||
logger: Logger = this.logger,
|
logger: Logger = this.logger,
|
||||||
): Promise<{
|
): Promise<{
|
||||||
store_name: string | null;
|
store_name: string | null; valid_from: string | null; valid_to: string | null; store_address: string | null; items: z.infer<typeof ExtractedFlyerItemSchema>[];
|
||||||
valid_from: string | null;
|
} & z.infer<typeof AiFlyerDataSchema>> {
|
||||||
valid_to: string | null;
|
|
||||||
store_address: string | null;
|
|
||||||
items: ExtractedFlyerItem[];
|
|
||||||
}> {
|
|
||||||
logger.info(
|
logger.info(
|
||||||
`[extractCoreDataFromFlyerImage] Entering method with ${imagePaths.length} image(s).`,
|
`[extractCoreDataFromFlyerImage] Entering method with ${imagePaths.length} image(s).`,
|
||||||
);
|
);
|
||||||
@@ -599,50 +596,22 @@ export class AIService {
|
|||||||
throw new Error('AI response did not contain a valid JSON object.');
|
throw new Error('AI response did not contain a valid JSON object.');
|
||||||
}
|
}
|
||||||
|
|
||||||
// Normalize the items to create a clean data structure.
|
// The FlyerDataTransformer is now responsible for all normalization.
|
||||||
logger.debug('[extractCoreDataFromFlyerImage] Normalizing extracted items.');
|
// We return the raw items as parsed from the AI response.
|
||||||
const normalizedItems = Array.isArray(extractedData.items)
|
if (!Array.isArray(extractedData.items)) {
|
||||||
? this._normalizeExtractedItems(extractedData.items)
|
extractedData.items = [];
|
||||||
: [];
|
}
|
||||||
|
|
||||||
logger.info(
|
logger.info(
|
||||||
`[extractCoreDataFromFlyerImage] Successfully processed flyer data for store: ${extractedData.store_name}. Exiting method.`,
|
`[extractCoreDataFromFlyerImage] Successfully processed flyer data for store: ${extractedData.store_name}. Exiting method.`,
|
||||||
);
|
);
|
||||||
return { ...extractedData, items: normalizedItems };
|
return extractedData;
|
||||||
} catch (apiError) {
|
} catch (apiError) {
|
||||||
logger.error({ err: apiError }, '[extractCoreDataFromFlyerImage] The entire process failed.');
|
logger.error({ err: apiError }, '[extractCoreDataFromFlyerImage] The entire process failed.');
|
||||||
throw apiError;
|
throw apiError;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Normalizes the raw items returned by the AI, ensuring fields are in the correct format.
|
|
||||||
* @param items An array of raw flyer items from the AI.
|
|
||||||
* @returns A normalized array of flyer items.
|
|
||||||
*/
|
|
||||||
private _normalizeExtractedItems(items: RawFlyerItem[]): ExtractedFlyerItem[] {
|
|
||||||
return items.map((item: RawFlyerItem) => ({
|
|
||||||
...item,
|
|
||||||
// Ensure 'item' is always a string, defaulting to 'Unknown Item' if null/undefined.
|
|
||||||
item:
|
|
||||||
item.item === null || item.item === undefined || String(item.item).trim() === ''
|
|
||||||
? 'Unknown Item'
|
|
||||||
: String(item.item),
|
|
||||||
price_display:
|
|
||||||
item.price_display === null || item.price_display === undefined
|
|
||||||
? ''
|
|
||||||
: String(item.price_display),
|
|
||||||
quantity: item.quantity === null || item.quantity === undefined ? '' : String(item.quantity),
|
|
||||||
category_name:
|
|
||||||
item.category_name === null || item.category_name === undefined
|
|
||||||
? 'Other/Miscellaneous'
|
|
||||||
: String(item.category_name),
|
|
||||||
// Ensure undefined is converted to null to match the Zod schema.
|
|
||||||
price_in_cents: item.price_in_cents ?? null,
|
|
||||||
master_item_id: item.master_item_id ?? undefined,
|
|
||||||
}));
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* SERVER-SIDE FUNCTION
|
* SERVER-SIDE FUNCTION
|
||||||
* Extracts a specific piece of text from a cropped area of an image.
|
* Extracts a specific piece of text from a cropped area of an image.
|
||||||
@@ -948,18 +917,28 @@ async enqueueFlyerProcessing(
|
|||||||
uploaded_by: userProfile?.user.user_id,
|
uploaded_by: userProfile?.user.user_id,
|
||||||
};
|
};
|
||||||
|
|
||||||
const { flyer: newFlyer, items: newItems } = await createFlyerAndItems(flyerData, itemsForDb, logger);
|
return db.withTransaction(async (client) => {
|
||||||
|
const { flyer, items } = await createFlyerAndItems(flyerData, itemsForDb, logger, client);
|
||||||
|
|
||||||
logger.info(`Successfully processed legacy flyer: ${newFlyer.file_name} (ID: ${newFlyer.flyer_id}) with ${newItems.length} items.`);
|
logger.info(
|
||||||
|
`Successfully processed legacy flyer: ${flyer.file_name} (ID: ${flyer.flyer_id}) with ${items.length} items.`,
|
||||||
|
);
|
||||||
|
|
||||||
await db.adminRepo.logActivity({
|
const transactionalAdminRepo = new AdminRepository(client);
|
||||||
userId: userProfile?.user.user_id,
|
await transactionalAdminRepo.logActivity(
|
||||||
action: 'flyer_processed',
|
{
|
||||||
displayText: `Processed a new flyer for ${flyerData.store_name}.`,
|
userId: userProfile?.user.user_id,
|
||||||
details: { flyerId: newFlyer.flyer_id, storeName: flyerData.store_name },
|
action: 'flyer_processed',
|
||||||
}, logger);
|
displayText: `Processed a new flyer for ${flyerData.store_name}.`,
|
||||||
|
details: { flyerId: flyer.flyer_id, storeName: flyerData.store_name },
|
||||||
return newFlyer;
|
},
|
||||||
|
logger,
|
||||||
|
);
|
||||||
|
return flyer;
|
||||||
|
}).catch((error) => {
|
||||||
|
logger.error({ err: error, checksum }, 'Legacy flyer upload database transaction failed.');
|
||||||
|
throw error;
|
||||||
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -2,6 +2,7 @@
|
|||||||
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
|
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
|
||||||
import type { UserProfile } from '../types';
|
import type { UserProfile } from '../types';
|
||||||
import type * as jsonwebtoken from 'jsonwebtoken';
|
import type * as jsonwebtoken from 'jsonwebtoken';
|
||||||
|
import { DatabaseError } from './processingErrors';
|
||||||
|
|
||||||
describe('AuthService', () => {
|
describe('AuthService', () => {
|
||||||
let authService: typeof import('./authService').authService;
|
let authService: typeof import('./authService').authService;
|
||||||
@@ -12,6 +13,10 @@ describe('AuthService', () => {
|
|||||||
let logger: typeof import('./logger.server').logger;
|
let logger: typeof import('./logger.server').logger;
|
||||||
let sendPasswordResetEmail: typeof import('./emailService.server').sendPasswordResetEmail;
|
let sendPasswordResetEmail: typeof import('./emailService.server').sendPasswordResetEmail;
|
||||||
let UniqueConstraintError: typeof import('./db/errors.db').UniqueConstraintError;
|
let UniqueConstraintError: typeof import('./db/errors.db').UniqueConstraintError;
|
||||||
|
let RepositoryError: typeof import('./db/errors.db').RepositoryError;
|
||||||
|
let withTransaction: typeof import('./db/index.db').withTransaction;
|
||||||
|
let transactionalUserRepoMocks: any;
|
||||||
|
let transactionalAdminRepoMocks: any;
|
||||||
|
|
||||||
const reqLog = {}; // Mock request logger object
|
const reqLog = {}; // Mock request logger object
|
||||||
const mockUser = {
|
const mockUser = {
|
||||||
@@ -37,10 +42,24 @@ describe('AuthService', () => {
|
|||||||
vi.stubEnv('JWT_SECRET', 'test-secret');
|
vi.stubEnv('JWT_SECRET', 'test-secret');
|
||||||
vi.stubEnv('FRONTEND_URL', 'http://localhost:3000');
|
vi.stubEnv('FRONTEND_URL', 'http://localhost:3000');
|
||||||
|
|
||||||
|
transactionalUserRepoMocks = {
|
||||||
|
updateUserPassword: vi.fn(),
|
||||||
|
deleteResetToken: vi.fn(),
|
||||||
|
createPasswordResetToken: vi.fn(),
|
||||||
|
createUser: vi.fn(),
|
||||||
|
};
|
||||||
|
transactionalAdminRepoMocks = {
|
||||||
|
logActivity: vi.fn(),
|
||||||
|
};
|
||||||
|
|
||||||
|
const MockTransactionalUserRepository = vi.fn(() => transactionalUserRepoMocks);
|
||||||
|
const MockTransactionalAdminRepository = vi.fn(() => transactionalAdminRepoMocks);
|
||||||
|
|
||||||
// Mock all dependencies before dynamically importing the service
|
// Mock all dependencies before dynamically importing the service
|
||||||
// Core modules like bcrypt, jsonwebtoken, and crypto are now mocked globally in tests-setup-unit.ts
|
// Core modules like bcrypt, jsonwebtoken, and crypto are now mocked globally in tests-setup-unit.ts
|
||||||
vi.mock('bcrypt');
|
vi.mock('bcrypt');
|
||||||
vi.mock('./db/index.db', () => ({
|
vi.mock('./db/index.db', () => ({
|
||||||
|
withTransaction: vi.fn(),
|
||||||
userRepo: {
|
userRepo: {
|
||||||
createUser: vi.fn(),
|
createUser: vi.fn(),
|
||||||
saveRefreshToken: vi.fn(),
|
saveRefreshToken: vi.fn(),
|
||||||
@@ -60,6 +79,12 @@ describe('AuthService', () => {
|
|||||||
vi.mock('./logger.server', () => ({
|
vi.mock('./logger.server', () => ({
|
||||||
logger: { info: vi.fn(), error: vi.fn(), warn: vi.fn(), debug: vi.fn() },
|
logger: { info: vi.fn(), error: vi.fn(), warn: vi.fn(), debug: vi.fn() },
|
||||||
}));
|
}));
|
||||||
|
vi.mock('./db/user.db', () => ({
|
||||||
|
UserRepository: MockTransactionalUserRepository,
|
||||||
|
}));
|
||||||
|
vi.mock('./db/admin.db', () => ({
|
||||||
|
AdminRepository: MockTransactionalAdminRepository,
|
||||||
|
}));
|
||||||
vi.mock('./emailService.server', () => ({
|
vi.mock('./emailService.server', () => ({
|
||||||
sendPasswordResetEmail: vi.fn(),
|
sendPasswordResetEmail: vi.fn(),
|
||||||
}));
|
}));
|
||||||
@@ -74,8 +99,13 @@ describe('AuthService', () => {
|
|||||||
userRepo = dbModule.userRepo;
|
userRepo = dbModule.userRepo;
|
||||||
adminRepo = dbModule.adminRepo;
|
adminRepo = dbModule.adminRepo;
|
||||||
logger = (await import('./logger.server')).logger;
|
logger = (await import('./logger.server')).logger;
|
||||||
|
withTransaction = (await import('./db/index.db')).withTransaction;
|
||||||
|
vi.mocked(withTransaction).mockImplementation(async (callback: any) => {
|
||||||
|
return callback({}); // Mock client
|
||||||
|
});
|
||||||
sendPasswordResetEmail = (await import('./emailService.server')).sendPasswordResetEmail;
|
sendPasswordResetEmail = (await import('./emailService.server')).sendPasswordResetEmail;
|
||||||
UniqueConstraintError = (await import('./db/errors.db')).UniqueConstraintError;
|
UniqueConstraintError = (await import('./db/errors.db')).UniqueConstraintError;
|
||||||
|
RepositoryError = (await import('./db/errors.db')).RepositoryError;
|
||||||
});
|
});
|
||||||
|
|
||||||
afterEach(() => {
|
afterEach(() => {
|
||||||
@@ -85,7 +115,7 @@ describe('AuthService', () => {
|
|||||||
describe('registerUser', () => {
|
describe('registerUser', () => {
|
||||||
it('should successfully register a new user', async () => {
|
it('should successfully register a new user', async () => {
|
||||||
vi.mocked(bcrypt.hash).mockImplementation(async () => 'hashed-password');
|
vi.mocked(bcrypt.hash).mockImplementation(async () => 'hashed-password');
|
||||||
vi.mocked(userRepo.createUser).mockResolvedValue(mockUserProfile);
|
vi.mocked(transactionalUserRepoMocks.createUser).mockResolvedValue(mockUserProfile);
|
||||||
|
|
||||||
const result = await authService.registerUser(
|
const result = await authService.registerUser(
|
||||||
'test@example.com',
|
'test@example.com',
|
||||||
@@ -96,13 +126,14 @@ describe('AuthService', () => {
|
|||||||
);
|
);
|
||||||
|
|
||||||
expect(bcrypt.hash).toHaveBeenCalledWith('password123', 10);
|
expect(bcrypt.hash).toHaveBeenCalledWith('password123', 10);
|
||||||
expect(userRepo.createUser).toHaveBeenCalledWith(
|
expect(transactionalUserRepoMocks.createUser).toHaveBeenCalledWith(
|
||||||
'test@example.com',
|
'test@example.com',
|
||||||
'hashed-password',
|
'hashed-password',
|
||||||
{ full_name: 'Test User', avatar_url: undefined },
|
{ full_name: 'Test User', avatar_url: undefined },
|
||||||
reqLog,
|
reqLog,
|
||||||
|
{},
|
||||||
);
|
);
|
||||||
expect(adminRepo.logActivity).toHaveBeenCalledWith(
|
expect(transactionalAdminRepoMocks.logActivity).toHaveBeenCalledWith(
|
||||||
expect.objectContaining({
|
expect.objectContaining({
|
||||||
action: 'user_registered',
|
action: 'user_registered',
|
||||||
userId: 'user-123',
|
userId: 'user-123',
|
||||||
@@ -115,25 +146,25 @@ describe('AuthService', () => {
|
|||||||
it('should throw UniqueConstraintError if email already exists', async () => {
|
it('should throw UniqueConstraintError if email already exists', async () => {
|
||||||
vi.mocked(bcrypt.hash).mockImplementation(async () => 'hashed-password');
|
vi.mocked(bcrypt.hash).mockImplementation(async () => 'hashed-password');
|
||||||
const error = new UniqueConstraintError('Email exists');
|
const error = new UniqueConstraintError('Email exists');
|
||||||
vi.mocked(userRepo.createUser).mockRejectedValue(error);
|
vi.mocked(withTransaction).mockRejectedValue(error);
|
||||||
|
|
||||||
await expect(
|
await expect(
|
||||||
authService.registerUser('test@example.com', 'password123', undefined, undefined, reqLog),
|
authService.registerUser('test@example.com', 'password123', undefined, undefined, reqLog),
|
||||||
).rejects.toThrow(UniqueConstraintError);
|
).rejects.toThrow(UniqueConstraintError);
|
||||||
|
|
||||||
expect(logger.error).not.toHaveBeenCalled(); // Should not log expected unique constraint errors as system errors
|
expect(logger.error).toHaveBeenCalled();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should log and throw other errors', async () => {
|
it('should log and re-throw generic errors on registration failure', async () => {
|
||||||
vi.mocked(bcrypt.hash).mockImplementation(async () => 'hashed-password');
|
vi.mocked(bcrypt.hash).mockImplementation(async () => 'hashed-password');
|
||||||
const error = new Error('Database failed');
|
const error = new Error('Database failed');
|
||||||
vi.mocked(userRepo.createUser).mockRejectedValue(error);
|
vi.mocked(withTransaction).mockRejectedValue(error);
|
||||||
|
|
||||||
await expect(
|
await expect(
|
||||||
authService.registerUser('test@example.com', 'password123', undefined, undefined, reqLog),
|
authService.registerUser('test@example.com', 'password123', undefined, undefined, reqLog),
|
||||||
).rejects.toThrow('Database failed');
|
).rejects.toThrow(error);
|
||||||
|
|
||||||
expect(logger.error).toHaveBeenCalled();
|
expect(logger.error).toHaveBeenCalledWith({ error, email: 'test@example.com' }, `User registration failed.`);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -141,7 +172,7 @@ describe('AuthService', () => {
|
|||||||
it('should register user and return tokens', async () => {
|
it('should register user and return tokens', async () => {
|
||||||
// Mock registerUser logic (since we can't easily spy on the same class instance method without prototype spying, we rely on the underlying calls)
|
// Mock registerUser logic (since we can't easily spy on the same class instance method without prototype spying, we rely on the underlying calls)
|
||||||
vi.mocked(bcrypt.hash).mockImplementation(async () => 'hashed-password');
|
vi.mocked(bcrypt.hash).mockImplementation(async () => 'hashed-password');
|
||||||
vi.mocked(userRepo.createUser).mockResolvedValue(mockUserProfile);
|
vi.mocked(transactionalUserRepoMocks.createUser).mockResolvedValue(mockUserProfile);
|
||||||
// FIX: The global mock for jsonwebtoken provides a `default` export.
|
// FIX: The global mock for jsonwebtoken provides a `default` export.
|
||||||
// The code under test (`authService`) uses `import jwt from 'jsonwebtoken'`, so it gets the default export.
|
// The code under test (`authService`) uses `import jwt from 'jsonwebtoken'`, so it gets the default export.
|
||||||
// We must mock `jwt.default.sign` to affect the code under test.
|
// We must mock `jwt.default.sign` to affect the code under test.
|
||||||
@@ -199,17 +230,13 @@ describe('AuthService', () => {
|
|||||||
expect(userRepo.saveRefreshToken).toHaveBeenCalledWith('user-123', 'token', reqLog);
|
expect(userRepo.saveRefreshToken).toHaveBeenCalledWith('user-123', 'token', reqLog);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should log and throw error on failure', async () => {
|
it('should propagate the error from the repository on failure', async () => {
|
||||||
const error = new Error('DB Error');
|
const error = new Error('DB Error');
|
||||||
vi.mocked(userRepo.saveRefreshToken).mockRejectedValue(error);
|
vi.mocked(userRepo.saveRefreshToken).mockRejectedValue(error);
|
||||||
|
|
||||||
await expect(authService.saveRefreshToken('user-123', 'token', reqLog)).rejects.toThrow(
|
// The service method now directly propagates the error from the repo.
|
||||||
'DB Error',
|
await expect(authService.saveRefreshToken('user-123', 'token', reqLog)).rejects.toThrow(error);
|
||||||
);
|
expect(logger.error).not.toHaveBeenCalled();
|
||||||
expect(logger.error).toHaveBeenCalledWith(
|
|
||||||
expect.objectContaining({ error }),
|
|
||||||
expect.stringContaining('Failed to save refresh token'),
|
|
||||||
);
|
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -220,11 +247,12 @@ describe('AuthService', () => {
|
|||||||
|
|
||||||
const result = await authService.resetPassword('test@example.com', reqLog);
|
const result = await authService.resetPassword('test@example.com', reqLog);
|
||||||
|
|
||||||
expect(userRepo.createPasswordResetToken).toHaveBeenCalledWith(
|
expect(transactionalUserRepoMocks.createPasswordResetToken).toHaveBeenCalledWith(
|
||||||
'user-123',
|
'user-123',
|
||||||
'hashed-token',
|
'hashed-token',
|
||||||
expect.any(Date),
|
expect.any(Date),
|
||||||
reqLog,
|
reqLog,
|
||||||
|
{},
|
||||||
);
|
);
|
||||||
expect(sendPasswordResetEmail).toHaveBeenCalledWith(
|
expect(sendPasswordResetEmail).toHaveBeenCalledWith(
|
||||||
'test@example.com',
|
'test@example.com',
|
||||||
@@ -258,36 +286,50 @@ describe('AuthService', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
describe('updatePassword', () => {
|
describe('updatePassword', () => {
|
||||||
it('should update password if token is valid', async () => {
|
it('should update password if token is valid and wrap operations in a transaction', async () => {
|
||||||
const mockTokenRecord = {
|
const mockTokenRecord = {
|
||||||
user_id: 'user-123',
|
user_id: 'user-123',
|
||||||
token_hash: 'hashed-token',
|
token_hash: 'hashed-token',
|
||||||
};
|
};
|
||||||
vi.mocked(userRepo.getValidResetTokens).mockResolvedValue([mockTokenRecord] as any);
|
vi.mocked(userRepo.getValidResetTokens).mockResolvedValue([mockTokenRecord] as any);
|
||||||
vi.mocked(bcrypt.compare).mockImplementation(async () => true); // Match found
|
vi.mocked(bcrypt.compare).mockImplementation(async () => true);
|
||||||
vi.mocked(bcrypt.hash).mockImplementation(async () => 'new-hashed-password');
|
vi.mocked(bcrypt.hash).mockImplementation(async () => 'new-hashed-password');
|
||||||
|
|
||||||
const result = await authService.updatePassword('valid-token', 'newPassword', reqLog);
|
const result = await authService.updatePassword('valid-token', 'newPassword', reqLog);
|
||||||
|
|
||||||
expect(userRepo.updateUserPassword).toHaveBeenCalledWith(
|
expect(withTransaction).toHaveBeenCalledTimes(1);
|
||||||
|
|
||||||
|
expect(transactionalUserRepoMocks.updateUserPassword).toHaveBeenCalledWith(
|
||||||
'user-123',
|
'user-123',
|
||||||
'new-hashed-password',
|
'new-hashed-password',
|
||||||
reqLog,
|
reqLog,
|
||||||
);
|
);
|
||||||
expect(userRepo.deleteResetToken).toHaveBeenCalledWith('hashed-token', reqLog);
|
expect(transactionalUserRepoMocks.deleteResetToken).toHaveBeenCalledWith('hashed-token', reqLog);
|
||||||
expect(adminRepo.logActivity).toHaveBeenCalledWith(
|
expect(transactionalAdminRepoMocks.logActivity).toHaveBeenCalledWith(
|
||||||
expect.objectContaining({ action: 'password_reset' }),
|
expect.objectContaining({ action: 'password_reset' }),
|
||||||
reqLog,
|
reqLog,
|
||||||
);
|
);
|
||||||
expect(result).toBe(true);
|
expect(result).toBe(true);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('should log and re-throw an error if the transaction fails', async () => {
|
||||||
|
const mockTokenRecord = { user_id: 'user-123', token_hash: 'hashed-token' };
|
||||||
|
vi.mocked(userRepo.getValidResetTokens).mockResolvedValue([mockTokenRecord] as any);
|
||||||
|
vi.mocked(bcrypt.compare).mockImplementation(async () => true);
|
||||||
|
const dbError = new Error('Transaction failed');
|
||||||
|
vi.mocked(withTransaction).mockRejectedValue(dbError);
|
||||||
|
|
||||||
|
await expect(authService.updatePassword('valid-token', 'newPassword', reqLog)).rejects.toThrow(dbError);
|
||||||
|
|
||||||
|
expect(logger.error).toHaveBeenCalledWith({ error: dbError }, `An error occurred during password update.`);
|
||||||
|
});
|
||||||
|
|
||||||
it('should return null if token is invalid or not found', async () => {
|
it('should return null if token is invalid or not found', async () => {
|
||||||
vi.mocked(userRepo.getValidResetTokens).mockResolvedValue([]);
|
vi.mocked(userRepo.getValidResetTokens).mockResolvedValue([]);
|
||||||
|
|
||||||
const result = await authService.updatePassword('invalid-token', 'newPassword', reqLog);
|
const result = await authService.updatePassword('invalid-token', 'newPassword', reqLog);
|
||||||
|
|
||||||
expect(userRepo.updateUserPassword).not.toHaveBeenCalled();
|
expect(transactionalUserRepoMocks.updateUserPassword).not.toHaveBeenCalled();
|
||||||
expect(result).toBeNull();
|
expect(result).toBeNull();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
@@ -309,6 +351,29 @@ describe('AuthService', () => {
|
|||||||
|
|
||||||
expect(result).toBeNull();
|
expect(result).toBeNull();
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('should throw a DatabaseError if finding the user fails with a generic error', async () => {
|
||||||
|
const dbError = new Error('DB connection failed');
|
||||||
|
vi.mocked(userRepo.findUserByRefreshToken).mockRejectedValue(dbError);
|
||||||
|
|
||||||
|
await expect(authService.getUserByRefreshToken('any-token', reqLog)).rejects.toThrow(DatabaseError);
|
||||||
|
expect(logger.error).toHaveBeenCalledWith(
|
||||||
|
{ error: dbError, refreshToken: 'any-token' },
|
||||||
|
'An unexpected error occurred while fetching user by refresh token.',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should re-throw a RepositoryError if finding the user fails with a known error', async () => {
|
||||||
|
const repoError = new RepositoryError('Some repo error', 500);
|
||||||
|
vi.mocked(userRepo.findUserByRefreshToken).mockRejectedValue(repoError);
|
||||||
|
|
||||||
|
await expect(authService.getUserByRefreshToken('any-token', reqLog)).rejects.toThrow(repoError);
|
||||||
|
// The original error is re-thrown, so the generic wrapper log should not be called.
|
||||||
|
expect(logger.error).not.toHaveBeenCalledWith(
|
||||||
|
expect.any(Object),
|
||||||
|
'An unexpected error occurred while fetching user by refresh token.',
|
||||||
|
);
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('logout', () => {
|
describe('logout', () => {
|
||||||
@@ -317,12 +382,12 @@ describe('AuthService', () => {
|
|||||||
expect(userRepo.deleteRefreshToken).toHaveBeenCalledWith('token', reqLog);
|
expect(userRepo.deleteRefreshToken).toHaveBeenCalledWith('token', reqLog);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should log and throw on error', async () => {
|
it('should propagate the error from the repository on failure', async () => {
|
||||||
const error = new Error('DB Error');
|
const error = new Error('DB Error');
|
||||||
vi.mocked(userRepo.deleteRefreshToken).mockRejectedValue(error);
|
vi.mocked(userRepo.deleteRefreshToken).mockRejectedValue(error);
|
||||||
|
|
||||||
await expect(authService.logout('token', reqLog)).rejects.toThrow('DB Error');
|
await expect(authService.logout('token', reqLog)).rejects.toThrow(error);
|
||||||
expect(logger.error).toHaveBeenCalled();
|
expect(logger.error).not.toHaveBeenCalled();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -345,5 +410,13 @@ describe('AuthService', () => {
|
|||||||
const result = await authService.refreshAccessToken('invalid-token', reqLog);
|
const result = await authService.refreshAccessToken('invalid-token', reqLog);
|
||||||
expect(result).toBeNull();
|
expect(result).toBeNull();
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('should propagate errors from getUserByRefreshToken', async () => {
|
||||||
|
const dbError = new DatabaseError('Underlying DB call failed');
|
||||||
|
// We mock the service's own method since refreshAccessToken calls it directly.
|
||||||
|
vi.spyOn(authService, 'getUserByRefreshToken').mockRejectedValue(dbError);
|
||||||
|
|
||||||
|
await expect(authService.refreshAccessToken('any-token', reqLog)).rejects.toThrow(dbError);
|
||||||
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
@@ -2,9 +2,9 @@
|
|||||||
import * as bcrypt from 'bcrypt';
|
import * as bcrypt from 'bcrypt';
|
||||||
import jwt from 'jsonwebtoken';
|
import jwt from 'jsonwebtoken';
|
||||||
import crypto from 'crypto';
|
import crypto from 'crypto';
|
||||||
import { userRepo, adminRepo } from './db/index.db';
|
import { DatabaseError, FlyerProcessingError } from './processingErrors';
|
||||||
import { UniqueConstraintError } from './db/errors.db';
|
import { withTransaction, userRepo } from './db/index.db';
|
||||||
import { getPool } from './db/connection.db';
|
import { RepositoryError, ValidationError } from './db/errors.db';
|
||||||
import { logger } from './logger.server';
|
import { logger } from './logger.server';
|
||||||
import { sendPasswordResetEmail } from './emailService.server';
|
import { sendPasswordResetEmail } from './emailService.server';
|
||||||
import type { UserProfile } from '../types';
|
import type { UserProfile } from '../types';
|
||||||
@@ -20,44 +20,45 @@ class AuthService {
|
|||||||
avatarUrl: string | undefined,
|
avatarUrl: string | undefined,
|
||||||
reqLog: any,
|
reqLog: any,
|
||||||
) {
|
) {
|
||||||
try {
|
const strength = validatePasswordStrength(password);
|
||||||
|
if (!strength.isValid) {
|
||||||
|
throw new ValidationError([], strength.feedback);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Wrap user creation and activity logging in a transaction for atomicity.
|
||||||
|
// The `createUser` method is now designed to be composed within other transactions.
|
||||||
|
return withTransaction(async (client) => {
|
||||||
|
const transactionalUserRepo = new (await import('./db/user.db')).UserRepository(client);
|
||||||
|
const adminRepo = new (await import('./db/admin.db')).AdminRepository(client);
|
||||||
|
|
||||||
const saltRounds = 10;
|
const saltRounds = 10;
|
||||||
const hashedPassword = await bcrypt.hash(password, saltRounds);
|
const hashedPassword = await bcrypt.hash(password, saltRounds);
|
||||||
logger.info(`Hashing password for new user: ${email}`);
|
logger.info(`Hashing password for new user: ${email}`);
|
||||||
|
|
||||||
// The createUser method in UserRepository now handles its own transaction.
|
const newUser = await transactionalUserRepo.createUser(
|
||||||
const newUser = await userRepo.createUser(
|
|
||||||
email,
|
email,
|
||||||
hashedPassword,
|
hashedPassword,
|
||||||
{ full_name: fullName, avatar_url: avatarUrl },
|
{ full_name: fullName, avatar_url: avatarUrl },
|
||||||
reqLog,
|
reqLog,
|
||||||
|
client, // Pass the transactional client
|
||||||
);
|
);
|
||||||
|
|
||||||
const userEmail = newUser.user.email;
|
logger.info(`Successfully created new user in DB: ${newUser.user.email} (ID: ${newUser.user.user_id})`);
|
||||||
const userId = newUser.user.user_id;
|
|
||||||
logger.info(`Successfully created new user in DB: ${userEmail} (ID: ${userId})`);
|
|
||||||
|
|
||||||
// Use the new standardized logging function
|
|
||||||
await adminRepo.logActivity(
|
await adminRepo.logActivity(
|
||||||
{
|
{ userId: newUser.user.user_id, action: 'user_registered', displayText: `${email} has registered.`, icon: 'user-plus' },
|
||||||
userId: newUser.user.user_id,
|
|
||||||
action: 'user_registered',
|
|
||||||
displayText: `${userEmail} has registered.`,
|
|
||||||
icon: 'user-plus',
|
|
||||||
},
|
|
||||||
reqLog,
|
reqLog,
|
||||||
);
|
);
|
||||||
|
|
||||||
return newUser;
|
return newUser;
|
||||||
} catch (error: unknown) {
|
}).catch((error: unknown) => {
|
||||||
if (error instanceof UniqueConstraintError) {
|
// The repository layer already logs and throws specific, typed errors.
|
||||||
// If the email is a duplicate, return a 409 Conflict status.
|
// We only need to catch, log the high-level operation failure, and re-throw.
|
||||||
throw error;
|
logger.error({ error, email }, `User registration failed.`);
|
||||||
}
|
// Re-throw the original, specific error (e.g., UniqueConstraintError)
|
||||||
logger.error({ error }, `User registration route failed for email: ${email}.`);
|
// so the route handler can generate a precise HTTP response (e.g., 409 Conflict).
|
||||||
// Pass the error to the centralized handler
|
|
||||||
throw error;
|
throw error;
|
||||||
}
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
async registerAndLoginUser(
|
async registerAndLoginUser(
|
||||||
@@ -91,15 +92,9 @@ class AuthService {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async saveRefreshToken(userId: string, refreshToken: string, reqLog: any) {
|
async saveRefreshToken(userId: string, refreshToken: string, reqLog: any) {
|
||||||
try {
|
// The repository method `saveRefreshToken` already includes robust error handling
|
||||||
await userRepo.saveRefreshToken(userId, refreshToken, reqLog);
|
// and logging via `handleDbError`. No need for a redundant try/catch block here.
|
||||||
} catch (tokenErr) {
|
await userRepo.saveRefreshToken(userId, refreshToken, reqLog);
|
||||||
logger.error(
|
|
||||||
{ error: tokenErr },
|
|
||||||
`Failed to save refresh token during login for user: ${userId}`,
|
|
||||||
);
|
|
||||||
throw tokenErr;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async handleSuccessfulLogin(userProfile: UserProfile, reqLog: any) {
|
async handleSuccessfulLogin(userProfile: UserProfile, reqLog: any) {
|
||||||
@@ -124,7 +119,11 @@ class AuthService {
|
|||||||
const tokenHash = await bcrypt.hash(token, saltRounds);
|
const tokenHash = await bcrypt.hash(token, saltRounds);
|
||||||
const expiresAt = new Date(Date.now() + 3600000); // 1 hour
|
const expiresAt = new Date(Date.now() + 3600000); // 1 hour
|
||||||
|
|
||||||
await userRepo.createPasswordResetToken(user.user_id, tokenHash, expiresAt, reqLog);
|
// Wrap the token creation in a transaction to ensure atomicity of the DELETE and INSERT operations.
|
||||||
|
await withTransaction(async (client) => {
|
||||||
|
const transactionalUserRepo = new (await import('./db/user.db')).UserRepository(client);
|
||||||
|
await transactionalUserRepo.createPasswordResetToken(user.user_id, tokenHash, expiresAt, reqLog, client);
|
||||||
|
});
|
||||||
|
|
||||||
const resetLink = `${process.env.FRONTEND_URL}/reset-password/${token}`;
|
const resetLink = `${process.env.FRONTEND_URL}/reset-password/${token}`;
|
||||||
|
|
||||||
@@ -139,13 +138,25 @@ class AuthService {
|
|||||||
|
|
||||||
return token;
|
return token;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error({ error }, `An error occurred during /forgot-password for email: ${email}`);
|
logger.error({ error, email }, `An error occurred during /forgot-password for email: ${email}`);
|
||||||
|
// Re-throw the original error, which might be a specific RepositoryError
|
||||||
|
// or a generic DatabaseError from the underlying layers.
|
||||||
throw error;
|
throw error;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async updatePassword(token: string, newPassword: string, reqLog: any) {
|
async updatePassword(token: string, newPassword: string, reqLog: any) {
|
||||||
try {
|
const strength = validatePasswordStrength(newPassword);
|
||||||
|
if (!strength.isValid) {
|
||||||
|
throw new ValidationError([], strength.feedback);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Wrap all database operations in a transaction to ensure atomicity.
|
||||||
|
return withTransaction(async (client) => {
|
||||||
|
const transactionalUserRepo = new (await import('./db/user.db')).UserRepository(client);
|
||||||
|
const adminRepo = new (await import('./db/admin.db')).AdminRepository(client);
|
||||||
|
|
||||||
|
// This read can happen outside the transaction if we use the non-transactional repo.
|
||||||
const validTokens = await userRepo.getValidResetTokens(reqLog);
|
const validTokens = await userRepo.getValidResetTokens(reqLog);
|
||||||
let tokenRecord;
|
let tokenRecord;
|
||||||
for (const record of validTokens) {
|
for (const record of validTokens) {
|
||||||
@@ -157,32 +168,25 @@ class AuthService {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (!tokenRecord) {
|
if (!tokenRecord) {
|
||||||
return null;
|
return null; // Token is invalid or expired, not an error.
|
||||||
}
|
}
|
||||||
|
|
||||||
const saltRounds = 10;
|
const saltRounds = 10;
|
||||||
const hashedPassword = await bcrypt.hash(newPassword, saltRounds);
|
const hashedPassword = await bcrypt.hash(newPassword, saltRounds);
|
||||||
|
|
||||||
await userRepo.updateUserPassword(tokenRecord.user_id, hashedPassword, reqLog);
|
// These three writes are now atomic.
|
||||||
await userRepo.deleteResetToken(tokenRecord.token_hash, reqLog);
|
await transactionalUserRepo.updateUserPassword(tokenRecord.user_id, hashedPassword, reqLog);
|
||||||
|
await transactionalUserRepo.deleteResetToken(tokenRecord.token_hash, reqLog);
|
||||||
// Log this security event after a successful password reset.
|
|
||||||
await adminRepo.logActivity(
|
await adminRepo.logActivity(
|
||||||
{
|
{ userId: tokenRecord.user_id, action: 'password_reset', displayText: `User ID ${tokenRecord.user_id} has reset their password.`, icon: 'key' },
|
||||||
userId: tokenRecord.user_id,
|
|
||||||
action: 'password_reset',
|
|
||||||
displayText: `User ID ${tokenRecord.user_id} has reset their password.`,
|
|
||||||
icon: 'key',
|
|
||||||
details: { source_ip: null },
|
|
||||||
},
|
|
||||||
reqLog,
|
reqLog,
|
||||||
);
|
);
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
} catch (error) {
|
}).catch((error) => {
|
||||||
logger.error({ error }, `An error occurred during password reset.`);
|
logger.error({ error }, `An error occurred during password update.`);
|
||||||
throw error;
|
throw error;
|
||||||
}
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
async getUserByRefreshToken(refreshToken: string, reqLog: any) {
|
async getUserByRefreshToken(refreshToken: string, reqLog: any) {
|
||||||
@@ -194,18 +198,22 @@ class AuthService {
|
|||||||
const userProfile = await userRepo.findUserProfileById(basicUser.user_id, reqLog);
|
const userProfile = await userRepo.findUserProfileById(basicUser.user_id, reqLog);
|
||||||
return userProfile;
|
return userProfile;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error({ error }, 'An error occurred during /refresh-token.');
|
// Re-throw known repository errors to allow for specific handling upstream.
|
||||||
throw error;
|
if (error instanceof RepositoryError) {
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
// For unknown errors, log them and wrap them in a generic DatabaseError.
|
||||||
|
const errorMessage = error instanceof Error ? error.message : 'An unknown error occurred.';
|
||||||
|
logger.error({ error, refreshToken }, 'An unexpected error occurred while fetching user by refresh token.');
|
||||||
|
throw new DatabaseError(errorMessage);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async logout(refreshToken: string, reqLog: any) {
|
async logout(refreshToken: string, reqLog: any) {
|
||||||
try {
|
// The repository method `deleteRefreshToken` now includes robust error handling
|
||||||
await userRepo.deleteRefreshToken(refreshToken, reqLog);
|
// and logging via `handleDbError`. No need for a redundant try/catch block here.
|
||||||
} catch (err: any) {
|
// The original implementation also swallowed errors, which is now fixed.
|
||||||
logger.error({ error: err }, 'Failed to delete refresh token from DB during logout.');
|
await userRepo.deleteRefreshToken(refreshToken, reqLog);
|
||||||
throw err;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async refreshAccessToken(refreshToken: string, reqLog: any): Promise<{ accessToken: string } | null> {
|
async refreshAccessToken(refreshToken: string, reqLog: any): Promise<{ accessToken: string } | null> {
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||||
import type { Logger } from 'pino';
|
import type { Logger } from 'pino';
|
||||||
import {
|
import {
|
||||||
DatabaseError,
|
RepositoryError,
|
||||||
UniqueConstraintError,
|
UniqueConstraintError,
|
||||||
ForeignKeyConstraintError,
|
ForeignKeyConstraintError,
|
||||||
NotFoundError,
|
NotFoundError,
|
||||||
@@ -18,17 +18,17 @@ import {
|
|||||||
vi.mock('./logger.server');
|
vi.mock('./logger.server');
|
||||||
|
|
||||||
describe('Custom Database and Application Errors', () => {
|
describe('Custom Database and Application Errors', () => {
|
||||||
describe('DatabaseError', () => {
|
describe('RepositoryError', () => {
|
||||||
it('should create a generic database error with a message and status', () => {
|
it('should create a generic database error with a message and status', () => {
|
||||||
const message = 'Generic DB Error';
|
const message = 'Generic DB Error';
|
||||||
const status = 500;
|
const status = 500;
|
||||||
const error = new DatabaseError(message, status);
|
const error = new RepositoryError(message, status);
|
||||||
|
|
||||||
expect(error).toBeInstanceOf(Error);
|
expect(error).toBeInstanceOf(Error);
|
||||||
expect(error).toBeInstanceOf(DatabaseError);
|
expect(error).toBeInstanceOf(RepositoryError);
|
||||||
expect(error.message).toBe(message);
|
expect(error.message).toBe(message);
|
||||||
expect(error.status).toBe(status);
|
expect(error.status).toBe(status);
|
||||||
expect(error.name).toBe('DatabaseError');
|
expect(error.name).toBe('RepositoryError');
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -37,7 +37,7 @@ describe('Custom Database and Application Errors', () => {
|
|||||||
const error = new UniqueConstraintError();
|
const error = new UniqueConstraintError();
|
||||||
|
|
||||||
expect(error).toBeInstanceOf(Error);
|
expect(error).toBeInstanceOf(Error);
|
||||||
expect(error).toBeInstanceOf(DatabaseError);
|
expect(error).toBeInstanceOf(RepositoryError);
|
||||||
expect(error).toBeInstanceOf(UniqueConstraintError);
|
expect(error).toBeInstanceOf(UniqueConstraintError);
|
||||||
expect(error.message).toBe('The record already exists.');
|
expect(error.message).toBe('The record already exists.');
|
||||||
expect(error.status).toBe(409);
|
expect(error.status).toBe(409);
|
||||||
@@ -56,7 +56,7 @@ describe('Custom Database and Application Errors', () => {
|
|||||||
const error = new ForeignKeyConstraintError();
|
const error = new ForeignKeyConstraintError();
|
||||||
|
|
||||||
expect(error).toBeInstanceOf(Error);
|
expect(error).toBeInstanceOf(Error);
|
||||||
expect(error).toBeInstanceOf(DatabaseError);
|
expect(error).toBeInstanceOf(RepositoryError);
|
||||||
expect(error).toBeInstanceOf(ForeignKeyConstraintError);
|
expect(error).toBeInstanceOf(ForeignKeyConstraintError);
|
||||||
expect(error.message).toBe('The referenced record does not exist.');
|
expect(error.message).toBe('The referenced record does not exist.');
|
||||||
expect(error.status).toBe(400);
|
expect(error.status).toBe(400);
|
||||||
@@ -75,7 +75,7 @@ describe('Custom Database and Application Errors', () => {
|
|||||||
const error = new NotFoundError();
|
const error = new NotFoundError();
|
||||||
|
|
||||||
expect(error).toBeInstanceOf(Error);
|
expect(error).toBeInstanceOf(Error);
|
||||||
expect(error).toBeInstanceOf(DatabaseError);
|
expect(error).toBeInstanceOf(RepositoryError);
|
||||||
expect(error).toBeInstanceOf(NotFoundError);
|
expect(error).toBeInstanceOf(NotFoundError);
|
||||||
expect(error.message).toBe('The requested resource was not found.');
|
expect(error.message).toBe('The requested resource was not found.');
|
||||||
expect(error.status).toBe(404);
|
expect(error.status).toBe(404);
|
||||||
@@ -95,7 +95,7 @@ describe('Custom Database and Application Errors', () => {
|
|||||||
const error = new ValidationError(validationIssues);
|
const error = new ValidationError(validationIssues);
|
||||||
|
|
||||||
expect(error).toBeInstanceOf(Error);
|
expect(error).toBeInstanceOf(Error);
|
||||||
expect(error).toBeInstanceOf(DatabaseError);
|
expect(error).toBeInstanceOf(RepositoryError);
|
||||||
expect(error).toBeInstanceOf(ValidationError);
|
expect(error).toBeInstanceOf(ValidationError);
|
||||||
expect(error.message).toBe('The request data is invalid.');
|
expect(error.message).toBe('The request data is invalid.');
|
||||||
expect(error.status).toBe(400);
|
expect(error.status).toBe(400);
|
||||||
@@ -126,7 +126,7 @@ describe('Custom Database and Application Errors', () => {
|
|||||||
describe('NotNullConstraintError', () => {
|
describe('NotNullConstraintError', () => {
|
||||||
it('should create an error with a default message and status 400', () => {
|
it('should create an error with a default message and status 400', () => {
|
||||||
const error = new NotNullConstraintError();
|
const error = new NotNullConstraintError();
|
||||||
expect(error).toBeInstanceOf(DatabaseError);
|
expect(error).toBeInstanceOf(RepositoryError);
|
||||||
expect(error.message).toBe('A required field was left null.');
|
expect(error.message).toBe('A required field was left null.');
|
||||||
expect(error.status).toBe(400);
|
expect(error.status).toBe(400);
|
||||||
expect(error.name).toBe('NotNullConstraintError');
|
expect(error.name).toBe('NotNullConstraintError');
|
||||||
@@ -142,7 +142,7 @@ describe('Custom Database and Application Errors', () => {
|
|||||||
describe('CheckConstraintError', () => {
|
describe('CheckConstraintError', () => {
|
||||||
it('should create an error with a default message and status 400', () => {
|
it('should create an error with a default message and status 400', () => {
|
||||||
const error = new CheckConstraintError();
|
const error = new CheckConstraintError();
|
||||||
expect(error).toBeInstanceOf(DatabaseError);
|
expect(error).toBeInstanceOf(RepositoryError);
|
||||||
expect(error.message).toBe('A check constraint was violated.');
|
expect(error.message).toBe('A check constraint was violated.');
|
||||||
expect(error.status).toBe(400);
|
expect(error.status).toBe(400);
|
||||||
expect(error.name).toBe('CheckConstraintError');
|
expect(error.name).toBe('CheckConstraintError');
|
||||||
@@ -158,7 +158,7 @@ describe('Custom Database and Application Errors', () => {
|
|||||||
describe('InvalidTextRepresentationError', () => {
|
describe('InvalidTextRepresentationError', () => {
|
||||||
it('should create an error with a default message and status 400', () => {
|
it('should create an error with a default message and status 400', () => {
|
||||||
const error = new InvalidTextRepresentationError();
|
const error = new InvalidTextRepresentationError();
|
||||||
expect(error).toBeInstanceOf(DatabaseError);
|
expect(error).toBeInstanceOf(RepositoryError);
|
||||||
expect(error.message).toBe('A value has an invalid format for its data type.');
|
expect(error.message).toBe('A value has an invalid format for its data type.');
|
||||||
expect(error.status).toBe(400);
|
expect(error.status).toBe(400);
|
||||||
expect(error.name).toBe('InvalidTextRepresentationError');
|
expect(error.name).toBe('InvalidTextRepresentationError');
|
||||||
@@ -174,7 +174,7 @@ describe('Custom Database and Application Errors', () => {
|
|||||||
describe('NumericValueOutOfRangeError', () => {
|
describe('NumericValueOutOfRangeError', () => {
|
||||||
it('should create an error with a default message and status 400', () => {
|
it('should create an error with a default message and status 400', () => {
|
||||||
const error = new NumericValueOutOfRangeError();
|
const error = new NumericValueOutOfRangeError();
|
||||||
expect(error).toBeInstanceOf(DatabaseError);
|
expect(error).toBeInstanceOf(RepositoryError);
|
||||||
expect(error.message).toBe('A numeric value is out of the allowed range.');
|
expect(error.message).toBe('A numeric value is out of the allowed range.');
|
||||||
expect(error.status).toBe(400);
|
expect(error.status).toBe(400);
|
||||||
expect(error.name).toBe('NumericValueOutOfRangeError');
|
expect(error.name).toBe('NumericValueOutOfRangeError');
|
||||||
@@ -196,7 +196,7 @@ describe('Custom Database and Application Errors', () => {
|
|||||||
vi.clearAllMocks();
|
vi.clearAllMocks();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should re-throw existing DatabaseError instances without logging', () => {
|
it('should re-throw existing RepositoryError instances without logging', () => {
|
||||||
const notFound = new NotFoundError('Test not found');
|
const notFound = new NotFoundError('Test not found');
|
||||||
expect(() => handleDbError(notFound, mockLogger, 'msg', {})).toThrow(notFound);
|
expect(() => handleDbError(notFound, mockLogger, 'msg', {})).toThrow(notFound);
|
||||||
expect(mockLogger.error).not.toHaveBeenCalled();
|
expect(mockLogger.error).not.toHaveBeenCalled();
|
||||||
|
|||||||
@@ -1,10 +1,11 @@
|
|||||||
// src/services/db/errors.db.ts
|
// src/services/db/errors.db.ts
|
||||||
import type { Logger } from 'pino';
|
import type { Logger } from 'pino';
|
||||||
|
import { DatabaseError as ProcessingDatabaseError } from '../processingErrors';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Base class for custom database errors to ensure they have a status property.
|
* Base class for custom repository-level errors to ensure they have a status property.
|
||||||
*/
|
*/
|
||||||
export class DatabaseError extends Error {
|
export class RepositoryError extends Error {
|
||||||
public status: number;
|
public status: number;
|
||||||
|
|
||||||
constructor(message: string, status: number) {
|
constructor(message: string, status: number) {
|
||||||
@@ -20,7 +21,7 @@ export class DatabaseError extends Error {
|
|||||||
* Thrown when a unique constraint is violated (e.g., trying to register an existing email).
|
* Thrown when a unique constraint is violated (e.g., trying to register an existing email).
|
||||||
* Corresponds to PostgreSQL error code '23505'.
|
* Corresponds to PostgreSQL error code '23505'.
|
||||||
*/
|
*/
|
||||||
export class UniqueConstraintError extends DatabaseError {
|
export class UniqueConstraintError extends RepositoryError {
|
||||||
constructor(message = 'The record already exists.') {
|
constructor(message = 'The record already exists.') {
|
||||||
super(message, 409); // 409 Conflict
|
super(message, 409); // 409 Conflict
|
||||||
}
|
}
|
||||||
@@ -30,7 +31,7 @@ export class UniqueConstraintError extends DatabaseError {
|
|||||||
* Thrown when a foreign key constraint is violated (e.g., trying to reference a non-existent record).
|
* Thrown when a foreign key constraint is violated (e.g., trying to reference a non-existent record).
|
||||||
* Corresponds to PostgreSQL error code '23503'.
|
* Corresponds to PostgreSQL error code '23503'.
|
||||||
*/
|
*/
|
||||||
export class ForeignKeyConstraintError extends DatabaseError {
|
export class ForeignKeyConstraintError extends RepositoryError {
|
||||||
constructor(message = 'The referenced record does not exist.') {
|
constructor(message = 'The referenced record does not exist.') {
|
||||||
super(message, 400); // 400 Bad Request
|
super(message, 400); // 400 Bad Request
|
||||||
}
|
}
|
||||||
@@ -40,7 +41,7 @@ export class ForeignKeyConstraintError extends DatabaseError {
|
|||||||
* Thrown when a 'not null' constraint is violated.
|
* Thrown when a 'not null' constraint is violated.
|
||||||
* Corresponds to PostgreSQL error code '23502'.
|
* Corresponds to PostgreSQL error code '23502'.
|
||||||
*/
|
*/
|
||||||
export class NotNullConstraintError extends DatabaseError {
|
export class NotNullConstraintError extends RepositoryError {
|
||||||
constructor(message = 'A required field was left null.') {
|
constructor(message = 'A required field was left null.') {
|
||||||
super(message, 400); // 400 Bad Request
|
super(message, 400); // 400 Bad Request
|
||||||
}
|
}
|
||||||
@@ -50,7 +51,7 @@ export class NotNullConstraintError extends DatabaseError {
|
|||||||
* Thrown when a 'check' constraint is violated.
|
* Thrown when a 'check' constraint is violated.
|
||||||
* Corresponds to PostgreSQL error code '23514'.
|
* Corresponds to PostgreSQL error code '23514'.
|
||||||
*/
|
*/
|
||||||
export class CheckConstraintError extends DatabaseError {
|
export class CheckConstraintError extends RepositoryError {
|
||||||
constructor(message = 'A check constraint was violated.') {
|
constructor(message = 'A check constraint was violated.') {
|
||||||
super(message, 400); // 400 Bad Request
|
super(message, 400); // 400 Bad Request
|
||||||
}
|
}
|
||||||
@@ -60,7 +61,7 @@ export class CheckConstraintError extends DatabaseError {
|
|||||||
* Thrown when a value has an invalid text representation for its data type (e.g., 'abc' for an integer).
|
* Thrown when a value has an invalid text representation for its data type (e.g., 'abc' for an integer).
|
||||||
* Corresponds to PostgreSQL error code '22P02'.
|
* Corresponds to PostgreSQL error code '22P02'.
|
||||||
*/
|
*/
|
||||||
export class InvalidTextRepresentationError extends DatabaseError {
|
export class InvalidTextRepresentationError extends RepositoryError {
|
||||||
constructor(message = 'A value has an invalid format for its data type.') {
|
constructor(message = 'A value has an invalid format for its data type.') {
|
||||||
super(message, 400); // 400 Bad Request
|
super(message, 400); // 400 Bad Request
|
||||||
}
|
}
|
||||||
@@ -70,7 +71,7 @@ export class InvalidTextRepresentationError extends DatabaseError {
|
|||||||
* Thrown when a numeric value is out of range for its data type (e.g., too large for an integer).
|
* Thrown when a numeric value is out of range for its data type (e.g., too large for an integer).
|
||||||
* Corresponds to PostgreSQL error code '22003'.
|
* Corresponds to PostgreSQL error code '22003'.
|
||||||
*/
|
*/
|
||||||
export class NumericValueOutOfRangeError extends DatabaseError {
|
export class NumericValueOutOfRangeError extends RepositoryError {
|
||||||
constructor(message = 'A numeric value is out of the allowed range.') {
|
constructor(message = 'A numeric value is out of the allowed range.') {
|
||||||
super(message, 400); // 400 Bad Request
|
super(message, 400); // 400 Bad Request
|
||||||
}
|
}
|
||||||
@@ -79,7 +80,7 @@ export class NumericValueOutOfRangeError extends DatabaseError {
|
|||||||
/**
|
/**
|
||||||
* Thrown when a specific record is not found in the database.
|
* Thrown when a specific record is not found in the database.
|
||||||
*/
|
*/
|
||||||
export class NotFoundError extends DatabaseError {
|
export class NotFoundError extends RepositoryError {
|
||||||
constructor(message = 'The requested resource was not found.') {
|
constructor(message = 'The requested resource was not found.') {
|
||||||
super(message, 404); // 404 Not Found
|
super(message, 404); // 404 Not Found
|
||||||
}
|
}
|
||||||
@@ -97,7 +98,7 @@ export interface ValidationIssue {
|
|||||||
/**
|
/**
|
||||||
* Thrown when request validation fails (e.g., missing body fields or invalid params).
|
* Thrown when request validation fails (e.g., missing body fields or invalid params).
|
||||||
*/
|
*/
|
||||||
export class ValidationError extends DatabaseError {
|
export class ValidationError extends RepositoryError {
|
||||||
public validationErrors: ValidationIssue[];
|
public validationErrors: ValidationIssue[];
|
||||||
|
|
||||||
constructor(errors: ValidationIssue[], message = 'The request data is invalid.') {
|
constructor(errors: ValidationIssue[], message = 'The request data is invalid.') {
|
||||||
@@ -138,7 +139,7 @@ export function handleDbError(
|
|||||||
options: HandleDbErrorOptions = {},
|
options: HandleDbErrorOptions = {},
|
||||||
): never {
|
): never {
|
||||||
// If it's already a known domain error (like NotFoundError thrown manually), rethrow it.
|
// If it's already a known domain error (like NotFoundError thrown manually), rethrow it.
|
||||||
if (error instanceof DatabaseError) {
|
if (error instanceof RepositoryError) {
|
||||||
throw error;
|
throw error;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -157,7 +158,7 @@ export function handleDbError(
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Fallback generic error
|
// Fallback generic error
|
||||||
throw new Error(
|
// Use the consistent DatabaseError from the processing errors module for the fallback.
|
||||||
options.defaultMessage || `Failed to perform operation on ${options.entityName || 'database'}.`,
|
const errorMessage = options.defaultMessage || `Failed to perform operation on ${options.entityName || 'database'}.`;
|
||||||
);
|
throw new ProcessingDatabaseError(errorMessage);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -350,8 +350,8 @@ describe('Flyer DB Service', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
describe('createFlyerAndItems', () => {
|
describe('createFlyerAndItems', () => {
|
||||||
it('should use withTransaction to create a flyer and items', async () => {
|
it('should execute find/create store, insert flyer, and insert items using the provided client', async () => {
|
||||||
const flyerData: FlyerInsert = {
|
const flyerData: FlyerInsert = { // This was a duplicate, fixed.
|
||||||
file_name: 'transact.jpg',
|
file_name: 'transact.jpg',
|
||||||
store_name: 'Transaction Store',
|
store_name: 'Transaction Store',
|
||||||
} as FlyerInsert;
|
} as FlyerInsert;
|
||||||
@@ -374,41 +374,31 @@ describe('Flyer DB Service', () => {
|
|||||||
}),
|
}),
|
||||||
];
|
];
|
||||||
|
|
||||||
// Mock the withTransaction to execute the callback with a mock client
|
// Mock the sequence of 4 calls on the client
|
||||||
vi.mocked(withTransaction).mockImplementation(async (callback) => {
|
const mockClient = { query: vi.fn() };
|
||||||
const mockClient = { query: vi.fn() };
|
mockClient.query
|
||||||
// Mock the sequence of 4 calls within the transaction
|
// 1. findOrCreateStore: INSERT ... ON CONFLICT
|
||||||
mockClient.query
|
.mockResolvedValueOnce({ rows: [], rowCount: 0 })
|
||||||
// 1. findOrCreateStore: INSERT ... ON CONFLICT
|
// 2. findOrCreateStore: SELECT store_id
|
||||||
.mockResolvedValueOnce({ rows: [], rowCount: 0 })
|
.mockResolvedValueOnce({ rows: [{ store_id: 1 }] })
|
||||||
// 2. findOrCreateStore: SELECT store_id
|
// 3. insertFlyer
|
||||||
.mockResolvedValueOnce({ rows: [{ store_id: 1 }] })
|
.mockResolvedValueOnce({ rows: [mockFlyer] })
|
||||||
// 3. insertFlyer
|
// 4. insertFlyerItems
|
||||||
.mockResolvedValueOnce({ rows: [mockFlyer] })
|
.mockResolvedValueOnce({ rows: mockItems });
|
||||||
// 4. insertFlyerItems
|
|
||||||
.mockResolvedValueOnce({ rows: mockItems });
|
|
||||||
return callback(mockClient as unknown as PoolClient);
|
|
||||||
});
|
|
||||||
|
|
||||||
const result = await createFlyerAndItems(flyerData, itemsData, mockLogger);
|
const result = await createFlyerAndItems(
|
||||||
|
flyerData,
|
||||||
|
itemsData,
|
||||||
|
mockLogger,
|
||||||
|
mockClient as unknown as PoolClient,
|
||||||
|
);
|
||||||
|
|
||||||
expect(result).toEqual({
|
expect(result).toEqual({
|
||||||
flyer: mockFlyer,
|
flyer: mockFlyer,
|
||||||
items: mockItems,
|
items: mockItems,
|
||||||
});
|
});
|
||||||
expect(withTransaction).toHaveBeenCalledTimes(1);
|
|
||||||
|
|
||||||
// Verify the individual functions were called with the client
|
// Verify the individual functions were called with the client
|
||||||
const callback = (vi.mocked(withTransaction) as Mock).mock.calls[0][0];
|
|
||||||
const mockClient = { query: vi.fn() };
|
|
||||||
// Set up the same mock sequence for verification
|
|
||||||
mockClient.query
|
|
||||||
.mockResolvedValueOnce({ rows: [], rowCount: 0 }) // findOrCreateStore 1
|
|
||||||
.mockResolvedValueOnce({ rows: [{ store_id: 1 }] }) // findOrCreateStore 2
|
|
||||||
.mockResolvedValueOnce({ rows: [mockFlyer] }) // insertFlyer
|
|
||||||
.mockResolvedValueOnce({ rows: mockItems });
|
|
||||||
await callback(mockClient as unknown as PoolClient);
|
|
||||||
|
|
||||||
// findOrCreateStore assertions
|
// findOrCreateStore assertions
|
||||||
expect(mockClient.query).toHaveBeenCalledWith(
|
expect(mockClient.query).toHaveBeenCalledWith(
|
||||||
'INSERT INTO public.stores (name) VALUES ($1) ON CONFLICT (name) DO NOTHING',
|
'INSERT INTO public.stores (name) VALUES ($1) ON CONFLICT (name) DO NOTHING',
|
||||||
@@ -430,28 +420,26 @@ describe('Flyer DB Service', () => {
|
|||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should log and re-throw an error if the transaction fails', async () => {
|
it('should propagate an error if any step fails', async () => {
|
||||||
const flyerData: FlyerInsert = {
|
const flyerData: FlyerInsert = {
|
||||||
file_name: 'fail.jpg',
|
file_name: 'fail.jpg',
|
||||||
store_name: 'Fail Store',
|
store_name: 'Fail Store',
|
||||||
} as FlyerInsert;
|
} as FlyerInsert;
|
||||||
const itemsData: FlyerItemInsert[] = [{ item: 'Failing Item' } as FlyerItemInsert];
|
const itemsData: FlyerItemInsert[] = [{ item: 'Failing Item' } as FlyerItemInsert];
|
||||||
const transactionError = new Error('Underlying transaction failed');
|
const dbError = new Error('Underlying DB call failed');
|
||||||
|
|
||||||
// Mock withTransaction to reject directly
|
// Mock the client to fail on the insertFlyer step
|
||||||
vi.mocked(withTransaction).mockRejectedValue(transactionError);
|
const mockClient = { query: vi.fn() };
|
||||||
|
mockClient.query
|
||||||
|
.mockResolvedValueOnce({ rows: [], rowCount: 0 })
|
||||||
|
.mockResolvedValueOnce({ rows: [{ store_id: 1 }] })
|
||||||
|
.mockRejectedValueOnce(dbError); // insertFlyer fails
|
||||||
|
|
||||||
// Expect the createFlyerAndItems function to reject with the same error
|
// The calling service's withTransaction would catch this.
|
||||||
await expect(createFlyerAndItems(flyerData, itemsData, mockLogger)).rejects.toThrow(
|
// Here, we just expect it to be thrown.
|
||||||
transactionError,
|
await expect(
|
||||||
);
|
createFlyerAndItems(flyerData, itemsData, mockLogger, mockClient as unknown as PoolClient),
|
||||||
|
).rejects.toThrow(dbError);
|
||||||
// Verify that the error was logged before being re-thrown
|
|
||||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
|
||||||
{ err: transactionError },
|
|
||||||
'Database transaction error in createFlyerAndItems',
|
|
||||||
);
|
|
||||||
expect(withTransaction).toHaveBeenCalledTimes(1);
|
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@@ -379,27 +379,23 @@ export async function createFlyerAndItems(
|
|||||||
flyerData: FlyerInsert,
|
flyerData: FlyerInsert,
|
||||||
itemsForDb: FlyerItemInsert[],
|
itemsForDb: FlyerItemInsert[],
|
||||||
logger: Logger,
|
logger: Logger,
|
||||||
|
client: PoolClient,
|
||||||
) {
|
) {
|
||||||
try {
|
// The calling service is now responsible for managing the transaction.
|
||||||
return await withTransaction(async (client) => {
|
// This function assumes it is being run within a transaction via the provided client.
|
||||||
const flyerRepo = new FlyerRepository(client);
|
const flyerRepo = new FlyerRepository(client);
|
||||||
|
|
||||||
// 1. Find or create the store to get the store_id
|
// 1. Find or create the store to get the store_id
|
||||||
const storeId = await flyerRepo.findOrCreateStore(flyerData.store_name, logger);
|
const storeId = await flyerRepo.findOrCreateStore(flyerData.store_name, logger);
|
||||||
|
|
||||||
// 2. Prepare the data for the flyer table, replacing store_name with store_id
|
// 2. Prepare the data for the flyer table, replacing store_name with store_id
|
||||||
const flyerDbData: FlyerDbInsert = { ...flyerData, store_id: storeId };
|
const flyerDbData: FlyerDbInsert = { ...flyerData, store_id: storeId };
|
||||||
|
|
||||||
// 3. Insert the flyer record
|
// 3. Insert the flyer record
|
||||||
const newFlyer = await flyerRepo.insertFlyer(flyerDbData, logger);
|
const newFlyer = await flyerRepo.insertFlyer(flyerDbData, logger);
|
||||||
|
|
||||||
// 4. Insert the associated flyer items
|
// 4. Insert the associated flyer items
|
||||||
const newItems = await flyerRepo.insertFlyerItems(newFlyer.flyer_id, itemsForDb, logger);
|
const newItems = await flyerRepo.insertFlyerItems(newFlyer.flyer_id, itemsForDb, logger);
|
||||||
|
|
||||||
return { flyer: newFlyer, items: newItems };
|
return { flyer: newFlyer, items: newItems };
|
||||||
});
|
|
||||||
} catch (error) {
|
|
||||||
logger.error({ err: error }, 'Database transaction error in createFlyerAndItems');
|
|
||||||
throw error; // Re-throw the error to be handled by the calling service.
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -74,9 +74,11 @@ export class UserRepository {
|
|||||||
passwordHash: string | null,
|
passwordHash: string | null,
|
||||||
profileData: { full_name?: string; avatar_url?: string },
|
profileData: { full_name?: string; avatar_url?: string },
|
||||||
logger: Logger,
|
logger: Logger,
|
||||||
|
// Allow passing a transactional client
|
||||||
|
client: Pool | PoolClient = this.db,
|
||||||
): Promise<UserProfile> {
|
): Promise<UserProfile> {
|
||||||
return withTransaction(async (client: PoolClient) => {
|
try {
|
||||||
logger.debug(`[DB createUser] Starting transaction for email: ${email}`);
|
logger.debug(`[DB createUser] Starting user creation for email: ${email}`);
|
||||||
|
|
||||||
// Use 'set_config' to safely pass parameters to a configuration variable.
|
// Use 'set_config' to safely pass parameters to a configuration variable.
|
||||||
await client.query("SELECT set_config('my_app.user_metadata', $1, true)", [
|
await client.query("SELECT set_config('my_app.user_metadata', $1, true)", [
|
||||||
@@ -126,18 +128,18 @@ export class UserRepository {
|
|||||||
|
|
||||||
logger.debug({ user: fullUserProfile }, `[DB createUser] Fetched full profile for new user:`);
|
logger.debug({ user: fullUserProfile }, `[DB createUser] Fetched full profile for new user:`);
|
||||||
return fullUserProfile;
|
return fullUserProfile;
|
||||||
}).catch((error) => {
|
} catch (error) {
|
||||||
// Specific handling for unique constraint violation on user creation
|
// Specific handling for unique constraint violation on user creation
|
||||||
if (error instanceof Error && 'code' in error && (error as any).code === '23505') {
|
if (error instanceof Error && 'code' in error && (error as any).code === '23505') {
|
||||||
logger.warn(`Attempted to create a user with an existing email: ${email}`);
|
logger.warn(`Attempted to create a user with an existing email: ${email}`);
|
||||||
throw new UniqueConstraintError('A user with this email address already exists.');
|
throw new UniqueConstraintError('A user with this email address already exists.');
|
||||||
}
|
}
|
||||||
// Fallback to generic handler for all other errors
|
// Fallback to generic handler for all other errors
|
||||||
handleDbError(error, logger, 'Error during createUser transaction', { email }, {
|
handleDbError(error, logger, 'Error during createUser', { email }, {
|
||||||
uniqueMessage: 'A user with this email address already exists.',
|
uniqueMessage: 'A user with this email address already exists.',
|
||||||
defaultMessage: 'Failed to create user in database.',
|
defaultMessage: 'Failed to create user in database.',
|
||||||
});
|
});
|
||||||
});
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -464,7 +466,9 @@ export class UserRepository {
|
|||||||
refreshToken,
|
refreshToken,
|
||||||
]);
|
]);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error({ err: error }, 'Database error in deleteRefreshToken');
|
handleDbError(error, logger, 'Database error in deleteRefreshToken', {}, {
|
||||||
|
defaultMessage: 'Failed to delete refresh token.',
|
||||||
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -475,10 +479,11 @@ export class UserRepository {
|
|||||||
* @param expiresAt The timestamp when the token expires.
|
* @param expiresAt The timestamp when the token expires.
|
||||||
*/
|
*/
|
||||||
// prettier-ignore
|
// prettier-ignore
|
||||||
async createPasswordResetToken(userId: string, tokenHash: string, expiresAt: Date, logger: Logger): Promise<void> {
|
async createPasswordResetToken(userId: string, tokenHash: string, expiresAt: Date, logger: Logger, client: PoolClient): Promise<void> {
|
||||||
const client = this.db as PoolClient;
|
|
||||||
try {
|
try {
|
||||||
|
// First, remove any existing tokens for the user to ensure they can only have one active reset request.
|
||||||
await client.query('DELETE FROM public.password_reset_tokens WHERE user_id = $1', [userId]);
|
await client.query('DELETE FROM public.password_reset_tokens WHERE user_id = $1', [userId]);
|
||||||
|
// Then, insert the new token.
|
||||||
await client.query(
|
await client.query(
|
||||||
'INSERT INTO public.password_reset_tokens (user_id, token_hash, expires_at) VALUES ($1, $2, $3)',
|
'INSERT INTO public.password_reset_tokens (user_id, token_hash, expires_at) VALUES ($1, $2, $3)',
|
||||||
[userId, tokenHash, expiresAt]
|
[userId, tokenHash, expiresAt]
|
||||||
@@ -519,10 +524,9 @@ export class UserRepository {
|
|||||||
try {
|
try {
|
||||||
await this.db.query('DELETE FROM public.password_reset_tokens WHERE token_hash = $1', [tokenHash]);
|
await this.db.query('DELETE FROM public.password_reset_tokens WHERE token_hash = $1', [tokenHash]);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error(
|
handleDbError(error, logger, 'Database error in deleteResetToken', { tokenHash }, {
|
||||||
{ err: error, tokenHash },
|
defaultMessage: 'Failed to delete password reset token.',
|
||||||
'Database error in deleteResetToken',
|
});
|
||||||
);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
0
src/services/flyer.db.ts
Normal file
0
src/services/flyer.db.ts
Normal file
@@ -1,5 +1,5 @@
|
|||||||
// src/services/flyerAiProcessor.server.test.ts
|
// src/services/flyerAiProcessor.server.test.ts
|
||||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
|
||||||
import { FlyerAiProcessor } from './flyerAiProcessor.server';
|
import { FlyerAiProcessor } from './flyerAiProcessor.server';
|
||||||
import { AiDataValidationError } from './processingErrors';
|
import { AiDataValidationError } from './processingErrors';
|
||||||
import { logger } from './logger.server'; // Keep this import for the logger instance
|
import { logger } from './logger.server'; // Keep this import for the logger instance
|
||||||
@@ -43,6 +43,11 @@ describe('FlyerAiProcessor', () => {
|
|||||||
service = new FlyerAiProcessor(mockAiService, mockPersonalizationRepo);
|
service = new FlyerAiProcessor(mockAiService, mockPersonalizationRepo);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
// Ensure env stubs are cleaned up after each test
|
||||||
|
vi.unstubAllEnvs();
|
||||||
|
});
|
||||||
|
|
||||||
it('should call AI service and return validated data on success', async () => {
|
it('should call AI service and return validated data on success', async () => {
|
||||||
const jobData = createMockJobData({});
|
const jobData = createMockJobData({});
|
||||||
const mockAiResponse = {
|
const mockAiResponse = {
|
||||||
@@ -73,64 +78,232 @@ describe('FlyerAiProcessor', () => {
|
|||||||
expect(result.needsReview).toBe(false);
|
expect(result.needsReview).toBe(false);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should throw AiDataValidationError if AI response has incorrect data structure', async () => {
|
it('should throw an error if getAllMasterItems fails', async () => {
|
||||||
|
// Arrange
|
||||||
const jobData = createMockJobData({});
|
const jobData = createMockJobData({});
|
||||||
// Mock AI to return a structurally invalid response (e.g., items is not an array)
|
const dbError = new Error('Database connection failed');
|
||||||
const invalidResponse = {
|
vi.mocked(mockPersonalizationRepo.getAllMasterItems).mockRejectedValue(dbError);
|
||||||
store_name: 'Invalid Store',
|
|
||||||
items: 'not-an-array',
|
|
||||||
valid_from: null,
|
|
||||||
valid_to: null,
|
|
||||||
store_address: null,
|
|
||||||
};
|
|
||||||
vi.mocked(mockAiService.extractCoreDataFromFlyerImage).mockResolvedValue(invalidResponse as any);
|
|
||||||
|
|
||||||
const imagePaths = [{ path: 'page1.jpg', mimetype: 'image/jpeg' }];
|
const imagePaths = [{ path: 'page1.jpg', mimetype: 'image/jpeg' }];
|
||||||
await expect(service.extractAndValidateData(imagePaths, jobData, logger)).rejects.toThrow(
|
|
||||||
AiDataValidationError,
|
// Act & Assert
|
||||||
);
|
await expect(
|
||||||
|
service.extractAndValidateData(imagePaths, jobData, logger),
|
||||||
|
).rejects.toThrow(dbError);
|
||||||
|
|
||||||
|
// Verify that the process stops before calling the AI service
|
||||||
|
expect(mockAiService.extractCoreDataFromFlyerImage).not.toHaveBeenCalled();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should pass validation even if store_name is missing', async () => {
|
describe('Validation and Quality Checks', () => {
|
||||||
const jobData = createMockJobData({});
|
it('should pass validation and not flag for review with good quality data', async () => {
|
||||||
const mockAiResponse = {
|
const jobData = createMockJobData({});
|
||||||
store_name: null, // Missing store name
|
const mockAiResponse = {
|
||||||
items: [{ item: 'Test Item', price_display: '$1.99', price_in_cents: 199, quantity: 'each', category_name: 'Grocery' }],
|
store_name: 'Good Store',
|
||||||
// ADDED to satisfy AiFlyerDataSchema
|
valid_from: '2024-01-01',
|
||||||
valid_from: null,
|
valid_to: '2024-01-07',
|
||||||
valid_to: null,
|
store_address: '123 Good St',
|
||||||
store_address: null,
|
items: [
|
||||||
};
|
{ item: 'Priced Item 1', price_in_cents: 199, price_display: '$1.99', quantity: '1', category_name: 'A' },
|
||||||
vi.mocked(mockAiService.extractCoreDataFromFlyerImage).mockResolvedValue(mockAiResponse);
|
{ item: 'Priced Item 2', price_in_cents: 299, price_display: '$2.99', quantity: '1', category_name: 'B' },
|
||||||
const { logger } = await import('./logger.server');
|
],
|
||||||
|
};
|
||||||
|
vi.mocked(mockAiService.extractCoreDataFromFlyerImage).mockResolvedValue(mockAiResponse);
|
||||||
|
const { logger } = await import('./logger.server');
|
||||||
|
|
||||||
const imagePaths = [{ path: 'page1.jpg', mimetype: 'image/jpeg' }];
|
const imagePaths = [{ path: 'page1.jpg', mimetype: 'image/jpeg' }];
|
||||||
const result = await service.extractAndValidateData(imagePaths, jobData, logger);
|
const result = await service.extractAndValidateData(imagePaths, jobData, logger);
|
||||||
|
|
||||||
// It should not throw, but return the data and log a warning.
|
// With all data present and correct, it should not need a review.
|
||||||
expect(result.data).toEqual(mockAiResponse);
|
expect(result.needsReview).toBe(false);
|
||||||
expect(result.needsReview).toBe(true);
|
expect(logger.warn).not.toHaveBeenCalled();
|
||||||
expect(logger.warn).toHaveBeenCalledWith(expect.any(Object), expect.stringContaining('missing a store name. The transformer will use a fallback. Flagging for review.'));
|
});
|
||||||
|
|
||||||
|
it('should throw AiDataValidationError if AI response has incorrect data structure', async () => {
|
||||||
|
const jobData = createMockJobData({});
|
||||||
|
// Mock AI to return a structurally invalid response (e.g., items is not an array)
|
||||||
|
const invalidResponse = {
|
||||||
|
store_name: 'Invalid Store',
|
||||||
|
items: 'not-an-array',
|
||||||
|
valid_from: null,
|
||||||
|
valid_to: null,
|
||||||
|
store_address: null,
|
||||||
|
};
|
||||||
|
vi.mocked(mockAiService.extractCoreDataFromFlyerImage).mockResolvedValue(invalidResponse as any);
|
||||||
|
|
||||||
|
const imagePaths = [{ path: 'page1.jpg', mimetype: 'image/jpeg' }];
|
||||||
|
await expect(service.extractAndValidateData(imagePaths, jobData, logger)).rejects.toThrow(
|
||||||
|
AiDataValidationError,
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should flag for review if store_name is missing', async () => {
|
||||||
|
const jobData = createMockJobData({});
|
||||||
|
const mockAiResponse = {
|
||||||
|
store_name: null, // Missing store name
|
||||||
|
items: [{ item: 'Test Item', price_display: '$1.99', price_in_cents: 199, quantity: 'each', category_name: 'Grocery' }],
|
||||||
|
valid_from: '2024-01-01',
|
||||||
|
valid_to: '2024-01-07',
|
||||||
|
store_address: null,
|
||||||
|
};
|
||||||
|
vi.mocked(mockAiService.extractCoreDataFromFlyerImage).mockResolvedValue(mockAiResponse);
|
||||||
|
const { logger } = await import('./logger.server');
|
||||||
|
|
||||||
|
const imagePaths = [{ path: 'page1.jpg', mimetype: 'image/jpeg' }];
|
||||||
|
const result = await service.extractAndValidateData(imagePaths, jobData, logger);
|
||||||
|
|
||||||
|
expect(result.needsReview).toBe(true);
|
||||||
|
expect(logger.warn).toHaveBeenCalledWith(
|
||||||
|
expect.objectContaining({ qualityIssues: ['Missing store name'] }),
|
||||||
|
expect.stringContaining('AI response has quality issues.'),
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should flag for review if items array is empty', async () => {
|
||||||
|
const jobData = createMockJobData({});
|
||||||
|
const mockAiResponse = {
|
||||||
|
store_name: 'Test Store',
|
||||||
|
items: [], // Empty items array
|
||||||
|
valid_from: '2024-01-01',
|
||||||
|
valid_to: '2024-01-07',
|
||||||
|
store_address: null,
|
||||||
|
};
|
||||||
|
vi.mocked(mockAiService.extractCoreDataFromFlyerImage).mockResolvedValue(mockAiResponse);
|
||||||
|
const { logger } = await import('./logger.server');
|
||||||
|
|
||||||
|
const imagePaths = [{ path: 'page1.jpg', mimetype: 'image/jpeg' }];
|
||||||
|
const result = await service.extractAndValidateData(imagePaths, jobData, logger);
|
||||||
|
expect(result.needsReview).toBe(true);
|
||||||
|
expect(logger.warn).toHaveBeenCalledWith(
|
||||||
|
expect.objectContaining({ qualityIssues: ['No items were extracted'] }),
|
||||||
|
expect.stringContaining('AI response has quality issues.'),
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should flag for review if item price quality is low', async () => {
|
||||||
|
const jobData = createMockJobData({});
|
||||||
|
const mockAiResponse = {
|
||||||
|
store_name: 'Test Store',
|
||||||
|
valid_from: '2024-01-01',
|
||||||
|
valid_to: '2024-01-07',
|
||||||
|
store_address: '123 Test St',
|
||||||
|
items: [
|
||||||
|
{ item: 'Priced Item', price_in_cents: 199, price_display: '$1.99', quantity: '1', category_name: 'A' },
|
||||||
|
{ item: 'Unpriced Item 1', price_in_cents: null, price_display: 'See store', quantity: '1', category_name: 'B' },
|
||||||
|
{ item: 'Unpriced Item 2', price_in_cents: null, price_display: 'FREE', quantity: '1', category_name: 'C' },
|
||||||
|
], // 1/3 = 33% have price, which is < 50%
|
||||||
|
};
|
||||||
|
vi.mocked(mockAiService.extractCoreDataFromFlyerImage).mockResolvedValue(mockAiResponse);
|
||||||
|
const { logger } = await import('./logger.server');
|
||||||
|
|
||||||
|
const imagePaths = [{ path: 'page1.jpg', mimetype: 'image/jpeg' }];
|
||||||
|
const result = await service.extractAndValidateData(imagePaths, jobData, logger);
|
||||||
|
|
||||||
|
expect(result.needsReview).toBe(true);
|
||||||
|
expect(logger.warn).toHaveBeenCalledWith(
|
||||||
|
expect.objectContaining({ qualityIssues: ['Low price quality (33% of items have a price)'] }),
|
||||||
|
expect.stringContaining('AI response has quality issues.'),
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should use a custom price quality threshold from an environment variable', async () => {
|
||||||
|
// Arrange
|
||||||
|
vi.stubEnv('AI_PRICE_QUALITY_THRESHOLD', '0.8'); // Set a stricter threshold (80%)
|
||||||
|
|
||||||
|
const jobData = createMockJobData({});
|
||||||
|
const mockAiResponse = {
|
||||||
|
store_name: 'Test Store',
|
||||||
|
valid_from: '2024-01-01',
|
||||||
|
valid_to: '2024-01-07',
|
||||||
|
store_address: '123 Test St',
|
||||||
|
items: [
|
||||||
|
{ item: 'Priced Item 1', price_in_cents: 199, price_display: '$1.99', quantity: '1', category_name: 'A' },
|
||||||
|
{ item: 'Priced Item 2', price_in_cents: 299, price_display: '$2.99', quantity: '1', category_name: 'B' },
|
||||||
|
{ item: 'Priced Item 3', price_in_cents: 399, price_display: '$3.99', quantity: '1', category_name: 'C' },
|
||||||
|
{ item: 'Unpriced Item 1', price_in_cents: null, price_display: 'See store', quantity: '1', category_name: 'D' },
|
||||||
|
], // 3/4 = 75% have price. This is > 50% (default) but < 80% (custom).
|
||||||
|
};
|
||||||
|
vi.mocked(mockAiService.extractCoreDataFromFlyerImage).mockResolvedValue(mockAiResponse);
|
||||||
|
const { logger } = await import('./logger.server');
|
||||||
|
|
||||||
|
// Act
|
||||||
|
const imagePaths = [{ path: 'page1.jpg', mimetype: 'image/jpeg' }];
|
||||||
|
const result = await service.extractAndValidateData(imagePaths, jobData, logger);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
// Because 75% < 80%, it should be flagged for review.
|
||||||
|
expect(result.needsReview).toBe(true);
|
||||||
|
expect(logger.warn).toHaveBeenCalledWith(
|
||||||
|
expect.objectContaining({ qualityIssues: ['Low price quality (75% of items have a price)'] }),
|
||||||
|
expect.stringContaining('AI response has quality issues.'),
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should flag for review if validity dates are missing', async () => {
|
||||||
|
const jobData = createMockJobData({});
|
||||||
|
const mockAiResponse = {
|
||||||
|
store_name: 'Test Store',
|
||||||
|
valid_from: null, // Missing date
|
||||||
|
valid_to: null, // Missing date
|
||||||
|
store_address: '123 Test St',
|
||||||
|
items: [{ item: 'Test Item', price_in_cents: 199, price_display: '$1.99', quantity: '1', category_name: 'A' }],
|
||||||
|
};
|
||||||
|
vi.mocked(mockAiService.extractCoreDataFromFlyerImage).mockResolvedValue(mockAiResponse);
|
||||||
|
const { logger } = await import('./logger.server');
|
||||||
|
|
||||||
|
const imagePaths = [{ path: 'page1.jpg', mimetype: 'image/jpeg' }];
|
||||||
|
const result = await service.extractAndValidateData(imagePaths, jobData, logger);
|
||||||
|
|
||||||
|
expect(result.needsReview).toBe(true);
|
||||||
|
expect(logger.warn).toHaveBeenCalledWith(
|
||||||
|
expect.objectContaining({ qualityIssues: ['Missing both valid_from and valid_to dates'] }),
|
||||||
|
expect.stringContaining('AI response has quality issues.'),
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should combine multiple quality issues in the log', async () => {
|
||||||
|
const jobData = createMockJobData({});
|
||||||
|
const mockAiResponse = {
|
||||||
|
store_name: null, // Issue 1
|
||||||
|
items: [], // Issue 2
|
||||||
|
valid_from: null, // Issue 3
|
||||||
|
valid_to: null,
|
||||||
|
store_address: null,
|
||||||
|
};
|
||||||
|
vi.mocked(mockAiService.extractCoreDataFromFlyerImage).mockResolvedValue(mockAiResponse);
|
||||||
|
const { logger } = await import('./logger.server');
|
||||||
|
|
||||||
|
const imagePaths = [{ path: 'page1.jpg', mimetype: 'image/jpeg' }];
|
||||||
|
const result = await service.extractAndValidateData(imagePaths, jobData, logger);
|
||||||
|
|
||||||
|
expect(result.needsReview).toBe(true);
|
||||||
|
expect(logger.warn).toHaveBeenCalledWith(
|
||||||
|
expect.objectContaining({
|
||||||
|
qualityIssues: ['Missing store name', 'No items were extracted', 'Missing both valid_from and valid_to dates'],
|
||||||
|
}),
|
||||||
|
'AI response has quality issues. Issues: Missing store name, No items were extracted, Missing both valid_from and valid_to dates',
|
||||||
|
);
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should pass validation even if items array is empty', async () => {
|
it('should pass the userProfileAddress from jobData to the AI service', async () => {
|
||||||
const jobData = createMockJobData({});
|
// Arrange
|
||||||
|
const jobData = createMockJobData({ userProfileAddress: '456 Fallback Ave' });
|
||||||
const mockAiResponse = {
|
const mockAiResponse = {
|
||||||
store_name: 'Test Store',
|
store_name: 'Test Store',
|
||||||
items: [], // Empty items array
|
valid_from: '2024-01-01',
|
||||||
// ADDED to satisfy AiFlyerDataSchema
|
valid_to: '2024-01-07',
|
||||||
valid_from: null,
|
store_address: '123 Test St',
|
||||||
valid_to: null,
|
items: [{ item: 'Test Item', price_in_cents: 199, price_display: '$1.99', quantity: '1', category_name: 'A' }],
|
||||||
store_address: null,
|
|
||||||
};
|
};
|
||||||
vi.mocked(mockAiService.extractCoreDataFromFlyerImage).mockResolvedValue(mockAiResponse);
|
vi.mocked(mockAiService.extractCoreDataFromFlyerImage).mockResolvedValue(mockAiResponse);
|
||||||
const { logger } = await import('./logger.server');
|
|
||||||
|
|
||||||
const imagePaths = [{ path: 'page1.jpg', mimetype: 'image/jpeg' }];
|
const imagePaths = [{ path: 'page1.jpg', mimetype: 'image/jpeg' }];
|
||||||
const result = await service.extractAndValidateData(imagePaths, jobData, logger);
|
await service.extractAndValidateData(imagePaths, jobData, logger);
|
||||||
expect(result.data).toEqual(mockAiResponse);
|
|
||||||
expect(result.needsReview).toBe(true);
|
// Assert
|
||||||
expect(logger.warn).toHaveBeenCalledWith(expect.any(Object), expect.stringContaining('contains no items. The flyer will be saved with an item_count of 0. Flagging for review.'));
|
expect(mockAiService.extractCoreDataFromFlyerImage).toHaveBeenCalledWith(
|
||||||
|
imagePaths, [], undefined, '456 Fallback Ave', logger
|
||||||
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('Batching Logic', () => {
|
describe('Batching Logic', () => {
|
||||||
@@ -201,6 +374,46 @@ describe('FlyerAiProcessor', () => {
|
|||||||
expect(result.needsReview).toBe(false);
|
expect(result.needsReview).toBe(false);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('should handle an empty object response from a batch without crashing', async () => {
|
||||||
|
// Arrange
|
||||||
|
const jobData = createMockJobData({});
|
||||||
|
const imagePaths = [
|
||||||
|
{ path: 'page1.jpg', mimetype: 'image/jpeg' }, { path: 'page2.jpg', mimetype: 'image/jpeg' }, { path: 'page3.jpg', mimetype: 'image/jpeg' }, { path: 'page4.jpg', mimetype: 'image/jpeg' }, { path: 'page5.jpg', mimetype: 'image/jpeg' },
|
||||||
|
];
|
||||||
|
|
||||||
|
const mockAiResponseBatch1 = {
|
||||||
|
store_name: 'Good Store',
|
||||||
|
valid_from: '2025-01-01',
|
||||||
|
valid_to: '2025-01-07',
|
||||||
|
store_address: '123 Good St',
|
||||||
|
items: [
|
||||||
|
{ item: 'Item A', price_display: '$1', price_in_cents: 100, quantity: '1', category_name: 'Cat A', master_item_id: 1 },
|
||||||
|
],
|
||||||
|
};
|
||||||
|
|
||||||
|
// The AI returns an empty object for the second batch.
|
||||||
|
const mockAiResponseBatch2 = {};
|
||||||
|
|
||||||
|
vi.mocked(mockAiService.extractCoreDataFromFlyerImage)
|
||||||
|
.mockResolvedValueOnce(mockAiResponseBatch1)
|
||||||
|
.mockResolvedValueOnce(mockAiResponseBatch2 as any); // Use `as any` to bypass strict type check for the test mock
|
||||||
|
|
||||||
|
// Act
|
||||||
|
const result = await service.extractAndValidateData(imagePaths, jobData, logger);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
// 1. AI service was called twice.
|
||||||
|
expect(mockAiService.extractCoreDataFromFlyerImage).toHaveBeenCalledTimes(2);
|
||||||
|
|
||||||
|
// 2. The final data should only contain data from the first batch.
|
||||||
|
expect(result.data.store_name).toBe('Good Store');
|
||||||
|
expect(result.data.items).toHaveLength(1);
|
||||||
|
expect(result.data.items[0].item).toBe('Item A');
|
||||||
|
|
||||||
|
// 3. The process should complete without errors and not be flagged for review if the first batch was good.
|
||||||
|
expect(result.needsReview).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
it('should fill in missing metadata from subsequent batches', async () => {
|
it('should fill in missing metadata from subsequent batches', async () => {
|
||||||
// Arrange
|
// Arrange
|
||||||
const jobData = createMockJobData({});
|
const jobData = createMockJobData({});
|
||||||
@@ -226,4 +439,40 @@ describe('FlyerAiProcessor', () => {
|
|||||||
expect(result.data.items).toHaveLength(2);
|
expect(result.data.items).toHaveLength(2);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('should handle a single batch correctly when image count is less than BATCH_SIZE', async () => {
|
||||||
|
// Arrange
|
||||||
|
const jobData = createMockJobData({});
|
||||||
|
// 2 images, which is less than the BATCH_SIZE of 4.
|
||||||
|
const imagePaths = [
|
||||||
|
{ path: 'page1.jpg', mimetype: 'image/jpeg' },
|
||||||
|
{ path: 'page2.jpg', mimetype: 'image/jpeg' },
|
||||||
|
];
|
||||||
|
|
||||||
|
const mockAiResponse = {
|
||||||
|
store_name: 'Single Batch Store',
|
||||||
|
valid_from: '2025-02-01',
|
||||||
|
valid_to: '2025-02-07',
|
||||||
|
store_address: '789 Single St',
|
||||||
|
items: [
|
||||||
|
{ item: 'Item X', price_display: '$10', price_in_cents: 1000, quantity: '1', category_name: 'Cat X', master_item_id: 10 },
|
||||||
|
],
|
||||||
|
};
|
||||||
|
|
||||||
|
// Mock the AI service to be called only once.
|
||||||
|
vi.mocked(mockAiService.extractCoreDataFromFlyerImage).mockResolvedValueOnce(mockAiResponse);
|
||||||
|
|
||||||
|
// Act
|
||||||
|
const result = await service.extractAndValidateData(imagePaths, jobData, logger);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
// 1. AI service was called only once.
|
||||||
|
expect(mockAiService.extractCoreDataFromFlyerImage).toHaveBeenCalledTimes(1);
|
||||||
|
|
||||||
|
// 2. Check the arguments for the single call.
|
||||||
|
expect(mockAiService.extractCoreDataFromFlyerImage).toHaveBeenCalledWith(imagePaths, [], undefined, undefined, logger);
|
||||||
|
|
||||||
|
// 3. Check that the final data matches the single batch's data.
|
||||||
|
expect(result.data).toEqual(mockAiResponse);
|
||||||
|
});
|
||||||
});
|
});
|
||||||
@@ -46,26 +46,52 @@ export class FlyerAiProcessor {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
// --- NEW QUALITY CHECK ---
|
// --- Data Quality Checks ---
|
||||||
// After structural validation, perform semantic quality checks.
|
// After structural validation, perform semantic quality checks to flag low-quality
|
||||||
const { store_name, items } = validationResult.data;
|
// extractions for manual review.
|
||||||
let needsReview = false;
|
const { store_name, items, valid_from, valid_to } = validationResult.data;
|
||||||
|
const qualityIssues: string[] = [];
|
||||||
|
|
||||||
// 1. Check for a valid store name, but don't fail the job.
|
// 1. Check for a store name.
|
||||||
// The data transformer will handle this by assigning a fallback name.
|
|
||||||
if (!store_name || store_name.trim() === '') {
|
if (!store_name || store_name.trim() === '') {
|
||||||
logger.warn({ rawData: extractedData }, 'AI response is missing a store name. The transformer will use a fallback. Flagging for review.');
|
qualityIssues.push('Missing store name');
|
||||||
needsReview = true;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// 2. Check that at least one item was extracted, but don't fail the job.
|
// 2. Check that items were extracted.
|
||||||
// An admin can review a flyer with 0 items.
|
|
||||||
if (!items || items.length === 0) {
|
if (!items || items.length === 0) {
|
||||||
logger.warn({ rawData: extractedData }, 'AI response contains no items. The flyer will be saved with an item_count of 0. Flagging for review.');
|
qualityIssues.push('No items were extracted');
|
||||||
needsReview = true;
|
} else {
|
||||||
|
// 3. If items exist, check their quality (e.g., missing prices).
|
||||||
|
// The threshold is configurable via an environment variable, defaulting to 0.5 (50%).
|
||||||
|
const priceQualityThreshold = parseFloat(process.env.AI_PRICE_QUALITY_THRESHOLD || '0.5');
|
||||||
|
|
||||||
|
const itemsWithPrice = items.filter(
|
||||||
|
(item) => item.price_in_cents != null && item.price_in_cents > 0,
|
||||||
|
).length;
|
||||||
|
const priceQualityRatio = itemsWithPrice / items.length;
|
||||||
|
|
||||||
|
if (priceQualityRatio < priceQualityThreshold) {
|
||||||
|
// If the ratio of items with a valid price is below the threshold, flag for review.
|
||||||
|
qualityIssues.push(
|
||||||
|
`Low price quality (${(priceQualityRatio * 100).toFixed(0)}% of items have a price)`,
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.info(`AI extracted ${validationResult.data.items.length} items.`);
|
// 4. Check for flyer validity dates.
|
||||||
|
if (!valid_from && !valid_to) {
|
||||||
|
qualityIssues.push('Missing both valid_from and valid_to dates');
|
||||||
|
}
|
||||||
|
|
||||||
|
const needsReview = qualityIssues.length > 0;
|
||||||
|
if (needsReview) {
|
||||||
|
logger.warn(
|
||||||
|
{ rawData: extractedData, qualityIssues },
|
||||||
|
`AI response has quality issues. Flagging for review. Issues: ${qualityIssues.join(', ')}`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(`AI extracted ${validationResult.data.items.length} items. Needs Review: ${needsReview}`);
|
||||||
return { data: validationResult.data, needsReview };
|
return { data: validationResult.data, needsReview };
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -292,4 +292,227 @@ describe('FlyerDataTransformer', () => {
|
|||||||
`${expectedFallbackUrl}/flyer-images/icons/icon-flyer-page-1.webp`,
|
`${expectedFallbackUrl}/flyer-images/icons/icon-flyer-page-1.webp`,
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
describe('_normalizeItem price parsing', () => {
|
||||||
|
it('should use price_in_cents from AI if it is valid, ignoring price_display', async () => {
|
||||||
|
// Arrange
|
||||||
|
const aiResult: AiProcessorResult = {
|
||||||
|
data: {
|
||||||
|
store_name: 'Test Store',
|
||||||
|
valid_from: '2024-01-01',
|
||||||
|
valid_to: '2024-01-07',
|
||||||
|
store_address: '123 Test St',
|
||||||
|
items: [
|
||||||
|
{
|
||||||
|
item: 'Milk',
|
||||||
|
price_display: '$4.99', // Parsable, but should be ignored
|
||||||
|
price_in_cents: 399, // AI provides a specific (maybe wrong) value
|
||||||
|
quantity: '1L',
|
||||||
|
category_name: 'Dairy',
|
||||||
|
master_item_id: 10,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
needsReview: false,
|
||||||
|
};
|
||||||
|
const imagePaths = [{ path: '/uploads/flyer-page-1.jpg', mimetype: 'image/jpeg' }];
|
||||||
|
|
||||||
|
// Act
|
||||||
|
const { itemsForDb } = await transformer.transform(
|
||||||
|
aiResult,
|
||||||
|
imagePaths,
|
||||||
|
'file.pdf',
|
||||||
|
'checksum',
|
||||||
|
'user-1',
|
||||||
|
mockLogger,
|
||||||
|
'http://test.host',
|
||||||
|
);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
expect(itemsForDb[0].price_in_cents).toBe(399); // AI's value should be prioritized
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should use parsePriceToCents as a fallback if AI price_in_cents is null', async () => {
|
||||||
|
// Arrange
|
||||||
|
const aiResult: AiProcessorResult = {
|
||||||
|
data: {
|
||||||
|
store_name: 'Test Store',
|
||||||
|
valid_from: '2024-01-01',
|
||||||
|
valid_to: '2024-01-07',
|
||||||
|
store_address: '123 Test St',
|
||||||
|
items: [
|
||||||
|
{
|
||||||
|
item: 'Milk',
|
||||||
|
price_display: '$4.99', // Parsable value
|
||||||
|
price_in_cents: null, // AI fails to provide a value
|
||||||
|
quantity: '1L',
|
||||||
|
category_name: 'Dairy',
|
||||||
|
master_item_id: 10,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
needsReview: false,
|
||||||
|
};
|
||||||
|
const imagePaths = [{ path: '/uploads/flyer-page-1.jpg', mimetype: 'image/jpeg' }];
|
||||||
|
|
||||||
|
// Act
|
||||||
|
const { itemsForDb } = await transformer.transform(
|
||||||
|
aiResult,
|
||||||
|
imagePaths,
|
||||||
|
'file.pdf',
|
||||||
|
'checksum',
|
||||||
|
'user-1',
|
||||||
|
mockLogger,
|
||||||
|
'http://test.host',
|
||||||
|
);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
expect(itemsForDb[0].price_in_cents).toBe(499); // Should be parsed from price_display
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should result in null if both AI price and display price are unparsable', async () => {
|
||||||
|
// Arrange
|
||||||
|
const aiResult: AiProcessorResult = {
|
||||||
|
data: {
|
||||||
|
store_name: 'Test Store',
|
||||||
|
valid_from: '2024-01-01',
|
||||||
|
valid_to: '2024-01-07',
|
||||||
|
store_address: '123 Test St',
|
||||||
|
items: [
|
||||||
|
{
|
||||||
|
item: 'Milk',
|
||||||
|
price_display: 'FREE', // Unparsable
|
||||||
|
price_in_cents: null, // AI provides null
|
||||||
|
quantity: '1L',
|
||||||
|
category_name: 'Dairy',
|
||||||
|
master_item_id: 10,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
needsReview: false,
|
||||||
|
};
|
||||||
|
const imagePaths = [{ path: '/uploads/flyer-page-1.jpg', mimetype: 'image/jpeg' }];
|
||||||
|
|
||||||
|
// Act
|
||||||
|
const { itemsForDb } = await transformer.transform(
|
||||||
|
aiResult,
|
||||||
|
imagePaths,
|
||||||
|
'file.pdf',
|
||||||
|
'checksum',
|
||||||
|
'user-1',
|
||||||
|
mockLogger,
|
||||||
|
'http://test.host',
|
||||||
|
);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
expect(itemsForDb[0].price_in_cents).toBeNull();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle non-string values for string fields gracefully by converting them', async () => {
|
||||||
|
// This test verifies that if data with incorrect types bypasses earlier validation,
|
||||||
|
// the transformer is robust enough to convert them to strings instead of crashing.
|
||||||
|
// Arrange
|
||||||
|
const aiResult: AiProcessorResult = {
|
||||||
|
data: {
|
||||||
|
store_name: 'Type-Unsafe Store',
|
||||||
|
valid_from: '2024-01-01',
|
||||||
|
valid_to: '2024-01-07',
|
||||||
|
store_address: '123 Test St',
|
||||||
|
items: [
|
||||||
|
{
|
||||||
|
item: 12345 as any, // Simulate AI returning a number instead of a string
|
||||||
|
price_display: 3.99 as any, // Simulate a number for a string field
|
||||||
|
price_in_cents: 399,
|
||||||
|
quantity: 5 as any, // Simulate a number
|
||||||
|
category_name: 'Dairy',
|
||||||
|
master_item_id: 10,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
needsReview: false,
|
||||||
|
};
|
||||||
|
const imagePaths = [{ path: '/uploads/flyer-page-1.jpg', mimetype: 'image/jpeg' }];
|
||||||
|
|
||||||
|
// Act
|
||||||
|
const { itemsForDb } = await transformer.transform(
|
||||||
|
aiResult,
|
||||||
|
imagePaths,
|
||||||
|
'file.pdf',
|
||||||
|
'checksum',
|
||||||
|
'user-1',
|
||||||
|
mockLogger,
|
||||||
|
'http://robust.host',
|
||||||
|
);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
expect(itemsForDb).toHaveLength(1);
|
||||||
|
expect(itemsForDb[0]).toEqual(
|
||||||
|
expect.objectContaining({
|
||||||
|
item: '12345', // Should be converted to string
|
||||||
|
price_display: '3.99', // Should be converted to string
|
||||||
|
quantity: '5', // Should be converted to string
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('needsReview flag handling', () => {
|
||||||
|
it('should set status to "processed" when needsReview is false', async () => {
|
||||||
|
// Arrange
|
||||||
|
const aiResult: AiProcessorResult = {
|
||||||
|
data: {
|
||||||
|
store_name: 'Test Store',
|
||||||
|
valid_from: '2024-01-01',
|
||||||
|
valid_to: '2024-01-07',
|
||||||
|
store_address: '123 Test St',
|
||||||
|
items: [],
|
||||||
|
},
|
||||||
|
needsReview: false, // Key part of this test
|
||||||
|
};
|
||||||
|
const imagePaths = [{ path: '/uploads/flyer-page-1.jpg', mimetype: 'image/jpeg' }];
|
||||||
|
|
||||||
|
// Act
|
||||||
|
const { flyerData } = await transformer.transform(
|
||||||
|
aiResult,
|
||||||
|
imagePaths,
|
||||||
|
'file.pdf',
|
||||||
|
'checksum',
|
||||||
|
'user-1',
|
||||||
|
mockLogger,
|
||||||
|
'http://test.host',
|
||||||
|
);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
expect(flyerData.status).toBe('processed');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should set status to "needs_review" when needsReview is true', async () => {
|
||||||
|
// Arrange
|
||||||
|
const aiResult: AiProcessorResult = {
|
||||||
|
data: {
|
||||||
|
store_name: 'Test Store',
|
||||||
|
valid_from: '2024-01-01',
|
||||||
|
valid_to: '2024-01-07',
|
||||||
|
store_address: '123 Test St',
|
||||||
|
items: [],
|
||||||
|
},
|
||||||
|
needsReview: true, // Key part of this test
|
||||||
|
};
|
||||||
|
const imagePaths = [{ path: '/uploads/flyer-page-1.jpg', mimetype: 'image/jpeg' }];
|
||||||
|
|
||||||
|
// Act
|
||||||
|
const { flyerData } = await transformer.transform(
|
||||||
|
aiResult,
|
||||||
|
imagePaths,
|
||||||
|
'file.pdf',
|
||||||
|
'checksum',
|
||||||
|
'user-1',
|
||||||
|
mockLogger,
|
||||||
|
'http://test.host',
|
||||||
|
);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
expect(flyerData.status).toBe('needs_review');
|
||||||
|
});
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -7,6 +7,7 @@ import type { AiProcessorResult } from './flyerAiProcessor.server'; // Keep this
|
|||||||
import { AiFlyerDataSchema } from '../types/ai'; // Import consolidated schema
|
import { AiFlyerDataSchema } from '../types/ai'; // Import consolidated schema
|
||||||
import { generateFlyerIcon } from '../utils/imageProcessor';
|
import { generateFlyerIcon } from '../utils/imageProcessor';
|
||||||
import { TransformationError } from './processingErrors';
|
import { TransformationError } from './processingErrors';
|
||||||
|
import { parsePriceToCents } from '../utils/priceParser';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This class is responsible for transforming the validated data from the AI service
|
* This class is responsible for transforming the validated data from the AI service
|
||||||
@@ -21,16 +22,25 @@ export class FlyerDataTransformer {
|
|||||||
private _normalizeItem(
|
private _normalizeItem(
|
||||||
item: z.infer<typeof AiFlyerDataSchema>['items'][number],
|
item: z.infer<typeof AiFlyerDataSchema>['items'][number],
|
||||||
): FlyerItemInsert {
|
): FlyerItemInsert {
|
||||||
|
// If the AI fails to provide `price_in_cents` but provides a parsable `price_display`,
|
||||||
|
// we can use our own parser as a fallback to improve data quality.
|
||||||
|
const priceFromDisplay = parsePriceToCents(item.price_display ?? '');
|
||||||
|
|
||||||
|
// Prioritize the AI's direct `price_in_cents` value, but use the parsed value if the former is null.
|
||||||
|
const finalPriceInCents = item.price_in_cents ?? priceFromDisplay;
|
||||||
|
|
||||||
return {
|
return {
|
||||||
...item,
|
...item,
|
||||||
// Use nullish coalescing and trim for robustness.
|
// Use nullish coalescing and trim for robustness.
|
||||||
// An empty or whitespace-only name falls back to 'Unknown Item'.
|
// An empty or whitespace-only name falls back to 'Unknown Item'.
|
||||||
item: (item.item ?? '').trim() || 'Unknown Item',
|
item: (String(item.item ?? '')).trim() || 'Unknown Item',
|
||||||
// Default null/undefined to an empty string and trim.
|
// Default null/undefined to an empty string and trim.
|
||||||
price_display: (item.price_display ?? '').trim(),
|
price_display: (String(item.price_display ?? '')).trim(),
|
||||||
quantity: (item.quantity ?? '').trim(),
|
quantity: (String(item.quantity ?? '')).trim(),
|
||||||
// An empty or whitespace-only category falls back to 'Other/Miscellaneous'.
|
// An empty or whitespace-only category falls back to 'Other/Miscellaneous'.
|
||||||
category_name: (item.category_name ?? '').trim() || 'Other/Miscellaneous',
|
category_name: (String(item.category_name ?? '')).trim() || 'Other/Miscellaneous',
|
||||||
|
// Overwrite price_in_cents with our calculated value.
|
||||||
|
price_in_cents: finalPriceInCents,
|
||||||
// Use nullish coalescing to convert null to undefined for the database.
|
// Use nullish coalescing to convert null to undefined for the database.
|
||||||
master_item_id: item.master_item_id ?? undefined,
|
master_item_id: item.master_item_id ?? undefined,
|
||||||
view_count: 0,
|
view_count: 0,
|
||||||
@@ -38,6 +48,47 @@ export class FlyerDataTransformer {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generates a 64x64 icon for the flyer's first page.
|
||||||
|
* @param firstImage The path to the first image of the flyer.
|
||||||
|
* @param logger The logger instance.
|
||||||
|
* @returns The filename of the generated icon.
|
||||||
|
*/
|
||||||
|
private async _generateIcon(firstImage: string, logger: Logger): Promise<string> {
|
||||||
|
const iconFileName = await generateFlyerIcon(
|
||||||
|
firstImage,
|
||||||
|
path.join(path.dirname(firstImage), 'icons'),
|
||||||
|
logger,
|
||||||
|
);
|
||||||
|
return iconFileName;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Constructs the full public URLs for the flyer image and its icon.
|
||||||
|
* @param firstImage The path to the first image of the flyer.
|
||||||
|
* @param iconFileName The filename of the generated icon.
|
||||||
|
* @param baseUrl The base URL from the job payload.
|
||||||
|
* @param logger The logger instance.
|
||||||
|
* @returns An object containing the full image_url and icon_url.
|
||||||
|
*/
|
||||||
|
private _buildUrls(
|
||||||
|
firstImage: string,
|
||||||
|
iconFileName: string,
|
||||||
|
baseUrl: string | undefined,
|
||||||
|
logger: Logger,
|
||||||
|
): { imageUrl: string; iconUrl: string } {
|
||||||
|
let finalBaseUrl = baseUrl;
|
||||||
|
if (!finalBaseUrl) {
|
||||||
|
const port = process.env.PORT || 3000;
|
||||||
|
finalBaseUrl = `http://localhost:${port}`;
|
||||||
|
logger.warn(`Base URL not provided in job data. Falling back to default local URL: ${finalBaseUrl}`);
|
||||||
|
}
|
||||||
|
finalBaseUrl = finalBaseUrl.endsWith('/') ? finalBaseUrl.slice(0, -1) : finalBaseUrl;
|
||||||
|
const imageUrl = `${finalBaseUrl}/flyer-images/${path.basename(firstImage)}`;
|
||||||
|
const iconUrl = `${finalBaseUrl}/flyer-images/icons/${iconFileName}`;
|
||||||
|
return { imageUrl, iconUrl };
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Transforms AI-extracted data into database-ready flyer and item records.
|
* Transforms AI-extracted data into database-ready flyer and item records.
|
||||||
* @param extractedData The validated data from the AI.
|
* @param extractedData The validated data from the AI.
|
||||||
@@ -63,11 +114,8 @@ export class FlyerDataTransformer {
|
|||||||
const { data: extractedData, needsReview } = aiResult;
|
const { data: extractedData, needsReview } = aiResult;
|
||||||
|
|
||||||
const firstImage = imagePaths[0].path;
|
const firstImage = imagePaths[0].path;
|
||||||
const iconFileName = await generateFlyerIcon(
|
const iconFileName = await this._generateIcon(firstImage, logger);
|
||||||
firstImage,
|
const { imageUrl, iconUrl } = this._buildUrls(firstImage, iconFileName, baseUrl, logger);
|
||||||
path.join(path.dirname(firstImage), 'icons'),
|
|
||||||
logger,
|
|
||||||
);
|
|
||||||
|
|
||||||
const itemsForDb: FlyerItemInsert[] = extractedData.items.map((item) => this._normalizeItem(item));
|
const itemsForDb: FlyerItemInsert[] = extractedData.items.map((item) => this._normalizeItem(item));
|
||||||
|
|
||||||
@@ -76,23 +124,10 @@ export class FlyerDataTransformer {
|
|||||||
logger.warn('AI did not return a store name. Using fallback "Unknown Store (auto)".');
|
logger.warn('AI did not return a store name. Using fallback "Unknown Store (auto)".');
|
||||||
}
|
}
|
||||||
|
|
||||||
// The baseUrl is passed from the job payload to ensure the worker has the correct environment context.
|
|
||||||
// If it's missing for any reason, we fall back to a sensible default for local development.
|
|
||||||
let finalBaseUrl = baseUrl;
|
|
||||||
if (!finalBaseUrl) {
|
|
||||||
const port = process.env.PORT || 3000;
|
|
||||||
finalBaseUrl = `http://localhost:${port}`;
|
|
||||||
logger.warn(
|
|
||||||
`Base URL not provided in job data. Falling back to default local URL: ${finalBaseUrl}`,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
finalBaseUrl = finalBaseUrl.endsWith('/') ? finalBaseUrl.slice(0, -1) : finalBaseUrl;
|
|
||||||
|
|
||||||
const flyerData: FlyerInsert = {
|
const flyerData: FlyerInsert = {
|
||||||
file_name: originalFileName,
|
file_name: originalFileName,
|
||||||
image_url: `${finalBaseUrl}/flyer-images/${path.basename(firstImage)}`,
|
image_url: imageUrl,
|
||||||
icon_url: `${finalBaseUrl}/flyer-images/icons/${iconFileName}`,
|
icon_url: iconUrl,
|
||||||
checksum,
|
checksum,
|
||||||
store_name: storeName,
|
store_name: storeName,
|
||||||
valid_from: extractedData.valid_from,
|
valid_from: extractedData.valid_from,
|
||||||
|
|||||||
@@ -34,6 +34,7 @@ import {
|
|||||||
AiDataValidationError,
|
AiDataValidationError,
|
||||||
PdfConversionError,
|
PdfConversionError,
|
||||||
UnsupportedFileTypeError,
|
UnsupportedFileTypeError,
|
||||||
|
TransformationError,
|
||||||
} from './processingErrors';
|
} from './processingErrors';
|
||||||
import { FlyerFileHandler } from './flyerFileHandler.server';
|
import { FlyerFileHandler } from './flyerFileHandler.server';
|
||||||
import { FlyerAiProcessor } from './flyerAiProcessor.server';
|
import { FlyerAiProcessor } from './flyerAiProcessor.server';
|
||||||
@@ -376,19 +377,21 @@ describe('FlyerProcessingService', () => {
|
|||||||
const { logger } = await import('./logger.server');
|
const { logger } = await import('./logger.server');
|
||||||
const dbError = new Error('Database transaction failed');
|
const dbError = new Error('Database transaction failed');
|
||||||
vi.mocked(createFlyerAndItems).mockRejectedValue(dbError);
|
vi.mocked(createFlyerAndItems).mockRejectedValue(dbError);
|
||||||
|
|
||||||
|
// The service wraps the generic DB error in a DatabaseError, but _reportErrorAndThrow re-throws the original.
|
||||||
|
await expect(service.processJob(job)).rejects.toThrow(dbError);
|
||||||
|
|
||||||
await expect(service.processJob(job)).rejects.toThrow('Database transaction failed');
|
// The final progress update should reflect the structured DatabaseError.
|
||||||
|
expect(job.updateProgress).toHaveBeenLastCalledWith({
|
||||||
expect(job.updateProgress).toHaveBeenCalledWith({
|
errorCode: 'DATABASE_ERROR',
|
||||||
errorCode: 'UNKNOWN_ERROR',
|
message: 'A database operation failed. Please try again later.',
|
||||||
message: 'Database transaction failed',
|
|
||||||
stages: [
|
stages: [
|
||||||
{ name: 'Preparing Inputs', status: 'completed', critical: true, detail: '1 page(s) ready for AI.' },
|
{ name: 'Preparing Inputs', status: 'completed', critical: true, detail: '1 page(s) ready for AI.' },
|
||||||
{ name: 'Extracting Data with AI', status: 'completed', critical: true, detail: 'Communicating with AI model...' },
|
{ name: 'Extracting Data with AI', status: 'completed', critical: true, detail: 'Communicating with AI model...' },
|
||||||
{ name: 'Transforming AI Data', status: 'completed', critical: true },
|
{ name: 'Transforming AI Data', status: 'completed', critical: true },
|
||||||
{ name: 'Saving to Database', status: 'failed', critical: true, detail: 'Database transaction failed' },
|
{ name: 'Saving to Database', status: 'failed', critical: true, detail: 'A database operation failed. Please try again later.' },
|
||||||
],
|
],
|
||||||
}); // This was a duplicate, fixed.
|
});
|
||||||
expect(mockCleanupQueue.add).not.toHaveBeenCalled();
|
expect(mockCleanupQueue.add).not.toHaveBeenCalled();
|
||||||
expect(logger.warn).toHaveBeenCalledWith(
|
expect(logger.warn).toHaveBeenCalledWith(
|
||||||
'Job failed. Temporary files will NOT be cleaned up to allow for manual inspection.',
|
'Job failed. Temporary files will NOT be cleaned up to allow for manual inspection.',
|
||||||
@@ -418,17 +421,17 @@ describe('FlyerProcessingService', () => {
|
|||||||
it('should delegate to _reportErrorAndThrow if icon generation fails', async () => {
|
it('should delegate to _reportErrorAndThrow if icon generation fails', async () => {
|
||||||
const job = createMockJob({});
|
const job = createMockJob({});
|
||||||
const { logger } = await import('./logger.server');
|
const { logger } = await import('./logger.server');
|
||||||
const iconError = new Error('Icon generation failed.');
|
const transformationError = new TransformationError('Icon generation failed.');
|
||||||
// The `transform` method calls `generateFlyerIcon`. In `beforeEach`, `transform` is mocked
|
// The `transform` method calls `generateFlyerIcon`. In `beforeEach`, `transform` is mocked
|
||||||
// to always succeed. For this test, we override that mock to simulate a failure
|
// to always succeed. For this test, we override that mock to simulate a failure
|
||||||
// bubbling up from the icon generation step.
|
// bubbling up from the icon generation step.
|
||||||
vi.spyOn(FlyerDataTransformer.prototype, 'transform').mockRejectedValue(iconError);
|
vi.spyOn(FlyerDataTransformer.prototype, 'transform').mockRejectedValue(transformationError);
|
||||||
|
|
||||||
const reportErrorSpy = vi.spyOn(service as any, '_reportErrorAndThrow');
|
const reportErrorSpy = vi.spyOn(service as any, '_reportErrorAndThrow');
|
||||||
|
|
||||||
await expect(service.processJob(job)).rejects.toThrow('Icon generation failed.');
|
await expect(service.processJob(job)).rejects.toThrow('Icon generation failed.');
|
||||||
|
|
||||||
expect(reportErrorSpy).toHaveBeenCalledWith(iconError, job, expect.any(Object), expect.any(Array));
|
expect(reportErrorSpy).toHaveBeenCalledWith(transformationError, job, expect.any(Object), expect.any(Array));
|
||||||
expect(mockCleanupQueue.add).not.toHaveBeenCalled();
|
expect(mockCleanupQueue.add).not.toHaveBeenCalled();
|
||||||
expect(logger.warn).toHaveBeenCalledWith(
|
expect(logger.warn).toHaveBeenCalledWith(
|
||||||
'Job failed. Temporary files will NOT be cleaned up to allow for manual inspection.',
|
'Job failed. Temporary files will NOT be cleaned up to allow for manual inspection.',
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ import type { Logger } from 'pino';
|
|||||||
import type { FlyerFileHandler, IFileSystem, ICommandExecutor } from './flyerFileHandler.server';
|
import type { FlyerFileHandler, IFileSystem, ICommandExecutor } from './flyerFileHandler.server';
|
||||||
import type { FlyerAiProcessor } from './flyerAiProcessor.server';
|
import type { FlyerAiProcessor } from './flyerAiProcessor.server';
|
||||||
import type * as Db from './db/index.db';
|
import type * as Db from './db/index.db';
|
||||||
import type { AdminRepository } from './db/admin.db';
|
import { AdminRepository } from './db/admin.db';
|
||||||
import { FlyerDataTransformer } from './flyerDataTransformer';
|
import { FlyerDataTransformer } from './flyerDataTransformer';
|
||||||
import type { FlyerJobData, CleanupJobData } from '../types/job-data';
|
import type { FlyerJobData, CleanupJobData } from '../types/job-data';
|
||||||
import {
|
import {
|
||||||
@@ -13,9 +13,11 @@ import {
|
|||||||
PdfConversionError,
|
PdfConversionError,
|
||||||
AiDataValidationError,
|
AiDataValidationError,
|
||||||
UnsupportedFileTypeError,
|
UnsupportedFileTypeError,
|
||||||
|
DatabaseError,
|
||||||
} from './processingErrors';
|
} from './processingErrors';
|
||||||
import { createFlyerAndItems } from './db/flyer.db';
|
import { createFlyerAndItems } from './db/flyer.db';
|
||||||
import { logger as globalLogger } from './logger.server';
|
import { logger as globalLogger } from './logger.server';
|
||||||
|
import { withTransaction } from './db/index.db';
|
||||||
|
|
||||||
// Define ProcessingStage locally as it's not exported from the types file.
|
// Define ProcessingStage locally as it's not exported from the types file.
|
||||||
export type ProcessingStage = {
|
export type ProcessingStage = {
|
||||||
@@ -108,21 +110,29 @@ export class FlyerProcessingService {
|
|||||||
stages[3].status = 'in-progress';
|
stages[3].status = 'in-progress';
|
||||||
await job.updateProgress({ stages });
|
await job.updateProgress({ stages });
|
||||||
|
|
||||||
const { flyer } = await createFlyerAndItems(flyerData, itemsForDb, logger);
|
const { flyer } = await withTransaction(async (client) => {
|
||||||
|
// This assumes createFlyerAndItems is refactored to accept a transactional client.
|
||||||
|
const { flyer: newFlyer } = await createFlyerAndItems(flyerData, itemsForDb, logger, client);
|
||||||
|
|
||||||
|
// Instantiate a new AdminRepository with the transactional client to ensure
|
||||||
|
// the activity log is part of the same transaction.
|
||||||
|
const transactionalAdminRepo = new AdminRepository(client);
|
||||||
|
await transactionalAdminRepo.logActivity(
|
||||||
|
{
|
||||||
|
action: 'flyer_processed',
|
||||||
|
displayText: `Processed flyer for ${flyerData.store_name}`,
|
||||||
|
details: { flyer_id: newFlyer.flyer_id, store_name: flyerData.store_name },
|
||||||
|
userId: job.data.userId,
|
||||||
|
},
|
||||||
|
logger,
|
||||||
|
);
|
||||||
|
|
||||||
|
return { flyer: newFlyer };
|
||||||
|
});
|
||||||
|
|
||||||
stages[3].status = 'completed';
|
stages[3].status = 'completed';
|
||||||
await job.updateProgress({ stages });
|
await job.updateProgress({ stages });
|
||||||
|
|
||||||
// Stage 5: Log Activity
|
|
||||||
await this.db.adminRepo.logActivity(
|
|
||||||
{
|
|
||||||
action: 'flyer_processed',
|
|
||||||
displayText: `Processed flyer for ${flyerData.store_name}`,
|
|
||||||
details: { flyer_id: flyer.flyer_id, store_name: flyerData.store_name },
|
|
||||||
userId: job.data.userId,
|
|
||||||
},
|
|
||||||
logger,
|
|
||||||
);
|
|
||||||
|
|
||||||
// Enqueue a job to clean up the original and any generated files.
|
// Enqueue a job to clean up the original and any generated files.
|
||||||
await this.cleanupQueue.add(
|
await this.cleanupQueue.add(
|
||||||
'cleanup-flyer-files',
|
'cleanup-flyer-files',
|
||||||
@@ -210,7 +220,8 @@ export class FlyerProcessingService {
|
|||||||
['PDF_CONVERSION_FAILED', 'Preparing Inputs'],
|
['PDF_CONVERSION_FAILED', 'Preparing Inputs'],
|
||||||
['UNSUPPORTED_FILE_TYPE', 'Preparing Inputs'],
|
['UNSUPPORTED_FILE_TYPE', 'Preparing Inputs'],
|
||||||
['AI_VALIDATION_FAILED', 'Extracting Data with AI'],
|
['AI_VALIDATION_FAILED', 'Extracting Data with AI'],
|
||||||
['TRANSFORMATION_FAILED', 'Transforming AI Data'], // Add new mapping
|
['TRANSFORMATION_FAILED', 'Transforming AI Data'],
|
||||||
|
['DATABASE_ERROR', 'Saving to Database'],
|
||||||
]);
|
]);
|
||||||
const normalizedError = error instanceof Error ? error : new Error(String(error));
|
const normalizedError = error instanceof Error ? error : new Error(String(error));
|
||||||
let errorPayload: { errorCode: string; message: string; [key: string]: any };
|
let errorPayload: { errorCode: string; message: string; [key: string]: any };
|
||||||
@@ -227,15 +238,6 @@ export class FlyerProcessingService {
|
|||||||
const failedStageName = errorCodeToStageMap.get(errorPayload.errorCode);
|
const failedStageName = errorCodeToStageMap.get(errorPayload.errorCode);
|
||||||
let errorStageIndex = failedStageName ? stagesToReport.findIndex(s => s.name === failedStageName) : -1;
|
let errorStageIndex = failedStageName ? stagesToReport.findIndex(s => s.name === failedStageName) : -1;
|
||||||
|
|
||||||
// Fallback for generic errors not in the map. This is less robust and relies on string matching.
|
|
||||||
// A future improvement would be to wrap these in specific FlyerProcessingError subclasses.
|
|
||||||
if (errorStageIndex === -1 && errorPayload.message.includes('Icon generation failed')) {
|
|
||||||
errorStageIndex = stagesToReport.findIndex(s => s.name === 'Transforming AI Data');
|
|
||||||
}
|
|
||||||
if (errorStageIndex === -1 && errorPayload.message.includes('Database transaction failed')) {
|
|
||||||
errorStageIndex = stagesToReport.findIndex(s => s.name === 'Saving to Database');
|
|
||||||
}
|
|
||||||
|
|
||||||
// 2. If not mapped, find the currently running stage
|
// 2. If not mapped, find the currently running stage
|
||||||
if (errorStageIndex === -1) {
|
if (errorStageIndex === -1) {
|
||||||
errorStageIndex = stagesToReport.findIndex(s => s.status === 'in-progress');
|
errorStageIndex = stagesToReport.findIndex(s => s.status === 'in-progress');
|
||||||
|
|||||||
@@ -1,7 +1,6 @@
|
|||||||
// src/services/gamificationService.ts
|
// src/services/gamificationService.ts
|
||||||
|
|
||||||
import { gamificationRepo } from './db/index.db';
|
import { gamificationRepo } from './db/index.db';
|
||||||
import { ForeignKeyConstraintError } from './db/errors.db';
|
|
||||||
import type { Logger } from 'pino';
|
import type { Logger } from 'pino';
|
||||||
|
|
||||||
class GamificationService {
|
class GamificationService {
|
||||||
@@ -12,18 +11,9 @@ class GamificationService {
|
|||||||
* @param log The logger instance.
|
* @param log The logger instance.
|
||||||
*/
|
*/
|
||||||
async awardAchievement(userId: string, achievementName: string, log: Logger): Promise<void> {
|
async awardAchievement(userId: string, achievementName: string, log: Logger): Promise<void> {
|
||||||
try {
|
// The repository layer handles database errors, including logging and throwing specific error types.
|
||||||
await gamificationRepo.awardAchievement(userId, achievementName, log);
|
// This service method simply orchestrates the call.
|
||||||
} catch (error) {
|
return gamificationRepo.awardAchievement(userId, achievementName, log);
|
||||||
if (error instanceof ForeignKeyConstraintError) {
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
log.error(
|
|
||||||
{ error, userId, achievementName },
|
|
||||||
'Error awarding achievement via admin endpoint:',
|
|
||||||
);
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -31,12 +21,7 @@ class GamificationService {
|
|||||||
* @param log The logger instance.
|
* @param log The logger instance.
|
||||||
*/
|
*/
|
||||||
async getAllAchievements(log: Logger) {
|
async getAllAchievements(log: Logger) {
|
||||||
try {
|
return gamificationRepo.getAllAchievements(log);
|
||||||
return await gamificationRepo.getAllAchievements(log);
|
|
||||||
} catch (error) {
|
|
||||||
log.error({ error }, 'Error in getAllAchievements service method');
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -45,16 +30,7 @@ class GamificationService {
|
|||||||
* @param log The logger instance.
|
* @param log The logger instance.
|
||||||
*/
|
*/
|
||||||
async getLeaderboard(limit: number, log: Logger) {
|
async getLeaderboard(limit: number, log: Logger) {
|
||||||
// The test failures point to an issue in the underlying repository method,
|
return gamificationRepo.getLeaderboard(limit, log);
|
||||||
// where the database query is not being executed. This service method is a simple
|
|
||||||
// pass-through, so the root cause is likely in `gamification.db.ts`.
|
|
||||||
// Adding robust error handling here is a good practice regardless.
|
|
||||||
try {
|
|
||||||
return await gamificationRepo.getLeaderboard(limit, log);
|
|
||||||
} catch (error) {
|
|
||||||
log.error({ error, limit }, 'Error fetching leaderboard in service method.');
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -63,16 +39,7 @@ class GamificationService {
|
|||||||
* @param log The logger instance.
|
* @param log The logger instance.
|
||||||
*/
|
*/
|
||||||
async getUserAchievements(userId: string, log: Logger) {
|
async getUserAchievements(userId: string, log: Logger) {
|
||||||
// The test failures point to an issue in the underlying repository method,
|
return gamificationRepo.getUserAchievements(userId, log);
|
||||||
// where the database query is not being executed. This service method is a simple
|
|
||||||
// pass-through, so the root cause is likely in `gamification.db.ts`.
|
|
||||||
// Adding robust error handling here is a good practice regardless.
|
|
||||||
try {
|
|
||||||
return await gamificationRepo.getUserAchievements(userId, log);
|
|
||||||
} catch (error) {
|
|
||||||
log.error({ error, userId }, 'Error fetching user achievements in service method.');
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -74,6 +74,19 @@ export class TransformationError extends FlyerProcessingError {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Error thrown when a database operation fails during processing.
|
||||||
|
*/
|
||||||
|
export class DatabaseError extends FlyerProcessingError {
|
||||||
|
constructor(message: string) {
|
||||||
|
super(
|
||||||
|
message,
|
||||||
|
'DATABASE_ERROR',
|
||||||
|
'A database operation failed. Please try again later.',
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
/**
|
/**
|
||||||
* Error thrown when an image conversion fails (e.g., using sharp).
|
* Error thrown when an image conversion fails (e.g., using sharp).
|
||||||
*/
|
*/
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ import type { Address, UserProfile } from '../types';
|
|||||||
import { createMockUserProfile } from '../tests/utils/mockFactories';
|
import { createMockUserProfile } from '../tests/utils/mockFactories';
|
||||||
import * as bcrypt from 'bcrypt';
|
import * as bcrypt from 'bcrypt';
|
||||||
import { ValidationError, NotFoundError } from './db/errors.db';
|
import { ValidationError, NotFoundError } from './db/errors.db';
|
||||||
|
import { DatabaseError } from './processingErrors';
|
||||||
import type { Job } from 'bullmq';
|
import type { Job } from 'bullmq';
|
||||||
import type { TokenCleanupJobData } from '../types/job-data';
|
import type { TokenCleanupJobData } from '../types/job-data';
|
||||||
|
|
||||||
@@ -176,6 +177,30 @@ describe('UserService', () => {
|
|||||||
// 3. Since the address ID did not change, the user profile should NOT be updated.
|
// 3. Since the address ID did not change, the user profile should NOT be updated.
|
||||||
expect(mocks.mockUpdateUserProfile).not.toHaveBeenCalled();
|
expect(mocks.mockUpdateUserProfile).not.toHaveBeenCalled();
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('should throw a DatabaseError if the transaction fails', async () => {
|
||||||
|
const { logger } = await import('./logger.server');
|
||||||
|
const user = createMockUserProfile({
|
||||||
|
user: { user_id: 'user-123' },
|
||||||
|
address_id: null,
|
||||||
|
});
|
||||||
|
const addressData: Partial<Address> = { address_line_1: '123 Fail St' };
|
||||||
|
const dbError = new Error('DB connection lost');
|
||||||
|
|
||||||
|
// Simulate a failure within the transaction (e.g., upsertAddress fails)
|
||||||
|
mocks.mockUpsertAddress.mockRejectedValue(dbError);
|
||||||
|
|
||||||
|
// Act & Assert
|
||||||
|
await expect(userService.upsertUserAddress(user, addressData, logger)).rejects.toThrow(
|
||||||
|
DatabaseError,
|
||||||
|
);
|
||||||
|
|
||||||
|
// Assert that the error was logged correctly
|
||||||
|
expect(logger.error).toHaveBeenCalledWith(
|
||||||
|
{ err: dbError },
|
||||||
|
`Transaction to upsert user address failed: ${dbError.message}`,
|
||||||
|
);
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('processTokenCleanupJob', () => {
|
describe('processTokenCleanupJob', () => {
|
||||||
|
|||||||
@@ -7,8 +7,10 @@ import { AddressRepository } from './db/address.db';
|
|||||||
import { UserRepository } from './db/user.db';
|
import { UserRepository } from './db/user.db';
|
||||||
import type { Address, Profile, UserProfile } from '../types';
|
import type { Address, Profile, UserProfile } from '../types';
|
||||||
import { ValidationError, NotFoundError } from './db/errors.db';
|
import { ValidationError, NotFoundError } from './db/errors.db';
|
||||||
|
import { DatabaseError } from './processingErrors';
|
||||||
import { logger as globalLogger } from './logger.server';
|
import { logger as globalLogger } from './logger.server';
|
||||||
import type { TokenCleanupJobData } from '../types/job-data';
|
import type { TokenCleanupJobData } from '../types/job-data';
|
||||||
|
import { getBaseUrl } from '../utils/serverUtils';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Encapsulates user-related business logic that may involve multiple repository calls.
|
* Encapsulates user-related business logic that may involve multiple repository calls.
|
||||||
@@ -27,27 +29,23 @@ class UserService {
|
|||||||
addressData: Partial<Address>,
|
addressData: Partial<Address>,
|
||||||
logger: Logger,
|
logger: Logger,
|
||||||
): Promise<number> {
|
): Promise<number> {
|
||||||
return db.withTransaction(async (client) => {
|
return db
|
||||||
// Instantiate repositories with the transactional client
|
.withTransaction(async (client) => {
|
||||||
const addressRepo = new AddressRepository(client);
|
const addressRepo = new AddressRepository(client);
|
||||||
const userRepo = new UserRepository(client);
|
const userRepo = new UserRepository(client);
|
||||||
|
const addressId = await addressRepo.upsertAddress(
|
||||||
const addressId = await addressRepo.upsertAddress(
|
{ ...addressData, address_id: userprofile.address_id ?? undefined },
|
||||||
{ ...addressData, address_id: userprofile.address_id ?? undefined },
|
|
||||||
logger,
|
|
||||||
);
|
|
||||||
|
|
||||||
// If the user didn't have an address_id before, update their profile to link it.
|
|
||||||
if (!userprofile.address_id) {
|
|
||||||
await userRepo.updateUserProfile(
|
|
||||||
userprofile.user.user_id,
|
|
||||||
{ address_id: addressId },
|
|
||||||
logger,
|
logger,
|
||||||
);
|
);
|
||||||
}
|
if (!userprofile.address_id) {
|
||||||
|
await userRepo.updateUserProfile(userprofile.user.user_id, { address_id: addressId }, logger);
|
||||||
return addressId;
|
}
|
||||||
});
|
return addressId;
|
||||||
|
})
|
||||||
|
.catch((error) => {
|
||||||
|
logger.error({ err: error, userId: userprofile.user.user_id }, `Transaction to upsert user address failed.`);
|
||||||
|
throw error;
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -55,27 +53,19 @@ class UserService {
|
|||||||
* @param job The BullMQ job object.
|
* @param job The BullMQ job object.
|
||||||
* @returns An object containing the count of deleted tokens.
|
* @returns An object containing the count of deleted tokens.
|
||||||
*/
|
*/
|
||||||
async processTokenCleanupJob(
|
async processTokenCleanupJob(job: Job<TokenCleanupJobData>): Promise<{ deletedCount: number }> {
|
||||||
job: Job<TokenCleanupJobData>,
|
|
||||||
): Promise<{ deletedCount: number }> {
|
|
||||||
const logger = globalLogger.child({
|
const logger = globalLogger.child({
|
||||||
jobId: job.id,
|
jobId: job.id,
|
||||||
jobName: job.name,
|
jobName: job.name,
|
||||||
});
|
});
|
||||||
|
|
||||||
logger.info('Picked up expired token cleanup job.');
|
logger.info('Picked up expired token cleanup job.');
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const deletedCount = await db.userRepo.deleteExpiredResetTokens(logger);
|
const deletedCount = await db.userRepo.deleteExpiredResetTokens(logger);
|
||||||
logger.info(`Successfully deleted ${deletedCount} expired tokens.`);
|
logger.info(`Successfully deleted ${deletedCount} expired tokens.`);
|
||||||
return { deletedCount };
|
return { deletedCount };
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
const wrappedError = error instanceof Error ? error : new Error(String(error));
|
logger.error({ err: error, attemptsMade: job.attemptsMade }, `Expired token cleanup job failed.`);
|
||||||
logger.error(
|
throw error;
|
||||||
{ err: wrappedError, attemptsMade: job.attemptsMade },
|
|
||||||
'Expired token cleanup job failed.',
|
|
||||||
);
|
|
||||||
throw wrappedError;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -87,26 +77,18 @@ class UserService {
|
|||||||
* @returns The updated user profile.
|
* @returns The updated user profile.
|
||||||
*/
|
*/
|
||||||
async updateUserAvatar(userId: string, file: Express.Multer.File, logger: Logger): Promise<Profile> {
|
async updateUserAvatar(userId: string, file: Express.Multer.File, logger: Logger): Promise<Profile> {
|
||||||
// Construct proper URLs including protocol and host to satisfy DB constraints.
|
try {
|
||||||
let baseUrl = (process.env.FRONTEND_URL || process.env.BASE_URL || '').trim();
|
const baseUrl = getBaseUrl(logger);
|
||||||
if (!baseUrl || !baseUrl.startsWith('http')) {
|
const avatarUrl = `${baseUrl}/uploads/avatars/${file.filename}`;
|
||||||
const port = process.env.PORT || 3000;
|
return await db.userRepo.updateUserProfile(userId, { avatar_url: avatarUrl }, logger);
|
||||||
const fallbackUrl = `http://localhost:${port}`;
|
} catch (error) {
|
||||||
if (baseUrl) {
|
// Re-throw known application errors without logging them as system errors.
|
||||||
logger.warn(
|
if (error instanceof NotFoundError) {
|
||||||
`FRONTEND_URL/BASE_URL is invalid or incomplete ('${baseUrl}'). Falling back to default local URL: ${fallbackUrl}`,
|
throw error;
|
||||||
);
|
|
||||||
}
|
}
|
||||||
baseUrl = fallbackUrl;
|
logger.error({ err: error, userId }, `Failed to update user avatar.`);
|
||||||
|
throw error;
|
||||||
}
|
}
|
||||||
baseUrl = baseUrl.endsWith('/') ? baseUrl.slice(0, -1) : baseUrl;
|
|
||||||
|
|
||||||
const avatarUrl = `${baseUrl}/uploads/avatars/${file.filename}`;
|
|
||||||
return db.userRepo.updateUserProfile(
|
|
||||||
userId,
|
|
||||||
{ avatar_url: avatarUrl },
|
|
||||||
logger,
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
/**
|
/**
|
||||||
* Updates a user's password after hashing it.
|
* Updates a user's password after hashing it.
|
||||||
@@ -115,9 +97,14 @@ class UserService {
|
|||||||
* @param logger The logger instance.
|
* @param logger The logger instance.
|
||||||
*/
|
*/
|
||||||
async updateUserPassword(userId: string, newPassword: string, logger: Logger): Promise<void> {
|
async updateUserPassword(userId: string, newPassword: string, logger: Logger): Promise<void> {
|
||||||
const saltRounds = 10;
|
try {
|
||||||
const hashedPassword = await bcrypt.hash(newPassword, saltRounds);
|
const saltRounds = 10;
|
||||||
await db.userRepo.updateUserPassword(userId, hashedPassword, logger);
|
const hashedPassword = await bcrypt.hash(newPassword, saltRounds);
|
||||||
|
await db.userRepo.updateUserPassword(userId, hashedPassword, logger);
|
||||||
|
} catch (error) {
|
||||||
|
logger.error({ err: error, userId }, `Failed to update user password.`);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -127,19 +114,23 @@ class UserService {
|
|||||||
* @param logger The logger instance.
|
* @param logger The logger instance.
|
||||||
*/
|
*/
|
||||||
async deleteUserAccount(userId: string, password: string, logger: Logger): Promise<void> {
|
async deleteUserAccount(userId: string, password: string, logger: Logger): Promise<void> {
|
||||||
const userWithHash = await db.userRepo.findUserWithPasswordHashById(userId, logger);
|
try {
|
||||||
if (!userWithHash || !userWithHash.password_hash) {
|
const userWithHash = await db.userRepo.findUserWithPasswordHashById(userId, logger);
|
||||||
// This case should be rare for a logged-in user but is a good safeguard.
|
if (!userWithHash || !userWithHash.password_hash) {
|
||||||
throw new NotFoundError('User not found or password not set.');
|
throw new NotFoundError('User not found or password not set.');
|
||||||
|
}
|
||||||
|
const isMatch = await bcrypt.compare(password, userWithHash.password_hash);
|
||||||
|
if (!isMatch) {
|
||||||
|
throw new ValidationError([], 'Incorrect password.');
|
||||||
|
}
|
||||||
|
await db.userRepo.deleteUserById(userId, logger);
|
||||||
|
} catch (error) {
|
||||||
|
if (error instanceof NotFoundError || error instanceof ValidationError) {
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
logger.error({ err: error, userId }, `Failed to delete user account.`);
|
||||||
|
throw error;
|
||||||
}
|
}
|
||||||
|
|
||||||
const isMatch = await bcrypt.compare(password, userWithHash.password_hash);
|
|
||||||
if (!isMatch) {
|
|
||||||
// Use ValidationError for a 400-level response in the route
|
|
||||||
throw new ValidationError([], 'Incorrect password.');
|
|
||||||
}
|
|
||||||
|
|
||||||
await db.userRepo.deleteUserById(userId, logger);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -149,18 +140,19 @@ class UserService {
|
|||||||
* @param logger The logger instance.
|
* @param logger The logger instance.
|
||||||
* @returns The address object.
|
* @returns The address object.
|
||||||
*/
|
*/
|
||||||
async getUserAddress(
|
async getUserAddress(userProfile: UserProfile, addressId: number, logger: Logger): Promise<Address> {
|
||||||
userProfile: UserProfile,
|
|
||||||
addressId: number,
|
|
||||||
logger: Logger,
|
|
||||||
): Promise<Address> {
|
|
||||||
// Security check: Ensure the requested addressId matches the one on the user's profile.
|
|
||||||
if (userProfile.address_id !== addressId) {
|
if (userProfile.address_id !== addressId) {
|
||||||
// Use ValidationError to trigger a 403 Forbidden response in the route handler.
|
|
||||||
throw new ValidationError([], 'Forbidden: You can only access your own address.');
|
throw new ValidationError([], 'Forbidden: You can only access your own address.');
|
||||||
}
|
}
|
||||||
// The repo method will throw a NotFoundError if the address doesn't exist.
|
try {
|
||||||
return db.addressRepo.getAddressById(addressId, logger);
|
return await db.addressRepo.getAddressById(addressId, logger);
|
||||||
|
} catch (error) {
|
||||||
|
if (error instanceof NotFoundError) {
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
logger.error({ err: error, userId: userProfile.user.user_id, addressId }, `Failed to get user address.`);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -174,7 +166,15 @@ class UserService {
|
|||||||
if (deleterId === userToDeleteId) {
|
if (deleterId === userToDeleteId) {
|
||||||
throw new ValidationError([], 'Admins cannot delete their own account.');
|
throw new ValidationError([], 'Admins cannot delete their own account.');
|
||||||
}
|
}
|
||||||
await db.userRepo.deleteUserById(userToDeleteId, log);
|
try {
|
||||||
|
await db.userRepo.deleteUserById(userToDeleteId, log);
|
||||||
|
} catch (error) {
|
||||||
|
if (error instanceof ValidationError) {
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
log.error({ err: error, deleterId, userToDeleteId }, `Admin failed to delete user account.`);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -12,11 +12,11 @@ export const requiredString = (message: string) =>
|
|||||||
// They are used for validation and type inference across multiple services.
|
// They are used for validation and type inference across multiple services.
|
||||||
|
|
||||||
export const ExtractedFlyerItemSchema = z.object({
|
export const ExtractedFlyerItemSchema = z.object({
|
||||||
item: z.string().nullable(),
|
item: z.string().nullish(),
|
||||||
price_display: z.string().nullable(),
|
price_display: z.string().nullish(),
|
||||||
price_in_cents: z.number().nullable(),
|
price_in_cents: z.number().nullish(),
|
||||||
quantity: z.string().nullable(),
|
quantity: z.string().nullish(),
|
||||||
category_name: z.string().nullable(),
|
category_name: z.string().nullish(),
|
||||||
master_item_id: z.number().nullish(), // .nullish() allows null or undefined
|
master_item_id: z.number().nullish(), // .nullish() allows null or undefined
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@@ -20,4 +20,11 @@ export interface FlyerJobData {
|
|||||||
export interface CleanupJobData {
|
export interface CleanupJobData {
|
||||||
flyerId: number;
|
flyerId: number;
|
||||||
paths: string[];
|
paths: string[];
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Defines the shape of the data payload for a token cleanup job.
|
||||||
|
*/
|
||||||
|
export interface TokenCleanupJobData {
|
||||||
|
timestamp: string;
|
||||||
}
|
}
|
||||||
Reference in New Issue
Block a user