From 1f1c0fa6f3c4528db52c0b947a268b4b69485894 Mon Sep 17 00:00:00 2001 From: Torben Sorensen Date: Tue, 30 Dec 2025 14:38:11 -0800 Subject: [PATCH] fix tests + flyer upload (anon) --- package-lock.json | 21 ++ package.json | 3 + src/routes/auth.routes.ts | 4 +- src/routes/personalization.db.ts | 0 src/services/flyerFileHandler.server.ts | 75 +++++- src/services/gamificationService.test.ts | 166 ++++++++++++++ src/services/monitoringService.server.test.ts | 209 +++++++++++++++++ .../integration/admin.integration.test.ts | 85 +++++-- src/tests/integration/ai.integration.test.ts | 25 +- .../integration/auth.integration.test.ts | 31 ++- .../integration/budget.integration.test.ts | 82 +++++++ .../flyer-processing.integration.test.ts | 216 +++++++++++++++--- .../gamification.integration.test.ts | 127 ++++++++++ .../notification.integration.test.ts | 145 ++++++++++++ .../public.routes.integration.test.ts | 22 +- .../integration/recipe.integration.test.ts | 125 ++++++++++ .../integration/user.integration.test.ts | 25 +- .../user.routes.integration.test.ts | 7 +- src/tests/utils/cleanup.ts | 74 ++++++ src/tests/utils/cleanupFiles.ts | 48 ++++ src/types/exif-parser.d.ts | 8 + src/utils/authUtils.test.ts | 102 +++++++++ src/utils/fileUtils.test.ts | 97 ++++++++ 23 files changed, 1599 insertions(+), 98 deletions(-) delete mode 100644 src/routes/personalization.db.ts create mode 100644 src/services/gamificationService.test.ts create mode 100644 src/services/monitoringService.server.test.ts create mode 100644 src/tests/integration/budget.integration.test.ts create mode 100644 src/tests/integration/gamification.integration.test.ts create mode 100644 src/tests/integration/notification.integration.test.ts create mode 100644 src/tests/integration/recipe.integration.test.ts create mode 100644 src/tests/utils/cleanup.ts create mode 100644 src/tests/utils/cleanupFiles.ts create mode 100644 src/types/exif-parser.d.ts create mode 100644 src/utils/authUtils.test.ts create mode 100644 src/utils/fileUtils.test.ts diff --git a/package-lock.json b/package-lock.json index 22b1ae9a..d4f83ff6 100644 --- a/package-lock.json +++ b/package-lock.json @@ -18,6 +18,7 @@ "connect-timeout": "^1.9.1", "cookie-parser": "^1.4.7", "date-fns": "^4.1.0", + "exif-parser": "^0.1.12", "express": "^5.1.0", "express-list-endpoints": "^7.1.1", "express-rate-limit": "^8.2.1", @@ -35,6 +36,7 @@ "passport-local": "^1.0.0", "pdfjs-dist": "^5.4.394", "pg": "^8.16.3", + "piexifjs": "^1.0.6", "pino": "^10.1.0", "react": "^19.2.0", "react-dom": "^19.2.0", @@ -66,6 +68,7 @@ "@types/passport-jwt": "^4.0.1", "@types/passport-local": "^1.0.38", "@types/pg": "^8.15.6", + "@types/piexifjs": "^1.0.0", "@types/pino": "^7.0.4", "@types/react": "^19.2.7", "@types/react-dom": "^19.2.3", @@ -5435,6 +5438,13 @@ "pg-types": "^2.2.0" } }, + "node_modules/@types/piexifjs": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/@types/piexifjs/-/piexifjs-1.0.0.tgz", + "integrity": "sha512-PPiGeCkmkZQgYjvqtjD3kp4OkbCox2vEFVuK4DaLVOIazJLAXk+/ujbizkIPH5CN4AnN9Clo5ckzUlaj3+SzCA==", + "dev": true, + "license": "MIT" + }, "node_modules/@types/pino": { "version": "7.0.4", "resolved": "https://registry.npmjs.org/@types/pino/-/pino-7.0.4.tgz", @@ -8965,6 +8975,11 @@ "bare-events": "^2.7.0" } }, + "node_modules/exif-parser": { + "version": "0.1.12", + "resolved": "https://registry.npmjs.org/exif-parser/-/exif-parser-0.1.12.tgz", + "integrity": "sha512-c2bQfLNbMzLPmzQuOr8fy0csy84WmwnER81W88DzTp9CYNPJ6yzOj2EZAh9pywYpqHnshVLHQJ8WzldAyfY+Iw==" + }, "node_modules/expect-type": { "version": "1.3.0", "resolved": "https://registry.npmjs.org/expect-type/-/expect-type-1.3.0.tgz", @@ -13363,6 +13378,12 @@ "url": "https://github.com/sponsors/jonschlinkert" } }, + "node_modules/piexifjs": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/piexifjs/-/piexifjs-1.0.6.tgz", + "integrity": "sha512-0wVyH0cKohzBQ5Gi2V1BuxYpxWfxF3cSqfFXfPIpl5tl9XLS5z4ogqhUCD20AbHi0h9aJkqXNJnkVev6gwh2ag==", + "license": "MIT" + }, "node_modules/pino": { "version": "10.1.0", "resolved": "https://registry.npmjs.org/pino/-/pino-10.1.0.tgz", diff --git a/package.json b/package.json index 6eceb526..b9218aba 100644 --- a/package.json +++ b/package.json @@ -37,6 +37,7 @@ "connect-timeout": "^1.9.1", "cookie-parser": "^1.4.7", "date-fns": "^4.1.0", + "exif-parser": "^0.1.12", "express": "^5.1.0", "express-list-endpoints": "^7.1.1", "express-rate-limit": "^8.2.1", @@ -54,6 +55,7 @@ "passport-local": "^1.0.0", "pdfjs-dist": "^5.4.394", "pg": "^8.16.3", + "piexifjs": "^1.0.6", "pino": "^10.1.0", "react": "^19.2.0", "react-dom": "^19.2.0", @@ -85,6 +87,7 @@ "@types/passport-jwt": "^4.0.1", "@types/passport-local": "^1.0.38", "@types/pg": "^8.15.6", + "@types/piexifjs": "^1.0.0", "@types/pino": "^7.0.4", "@types/react": "^19.2.7", "@types/react-dom": "^19.2.3", diff --git a/src/routes/auth.routes.ts b/src/routes/auth.routes.ts index b0d49953..2b92736f 100644 --- a/src/routes/auth.routes.ts +++ b/src/routes/auth.routes.ts @@ -23,7 +23,9 @@ const forgotPasswordLimiter = rateLimit({ message: 'Too many password reset requests from this IP, please try again after 15 minutes.', standardHeaders: true, legacyHeaders: false, - skip: () => isTestEnv, // Skip this middleware if in test environment + // Do not skip in test environment so we can write integration tests for it. + // The limiter uses an in-memory store by default, so counts are reset when the test server restarts. + // skip: () => isTestEnv, }); const resetPasswordLimiter = rateLimit({ diff --git a/src/routes/personalization.db.ts b/src/routes/personalization.db.ts deleted file mode 100644 index e69de29b..00000000 diff --git a/src/services/flyerFileHandler.server.ts b/src/services/flyerFileHandler.server.ts index 1b11460c..337257b8 100644 --- a/src/services/flyerFileHandler.server.ts +++ b/src/services/flyerFileHandler.server.ts @@ -105,6 +105,53 @@ export class FlyerFileHandler { return imagePaths; } + /** + * Processes a JPEG image to strip EXIF data by re-saving it. + * This ensures user privacy and metadata consistency. + * @returns The path to the newly created, processed JPEG file. + */ + private async _stripExifDataFromJpeg(filePath: string, logger: Logger): Promise { + const outputDir = path.dirname(filePath); + const originalFileName = path.parse(path.basename(filePath)).name; + // Suffix to avoid overwriting, and keep extension. + const newFileName = `${originalFileName}-processed.jpeg`; + const outputPath = path.join(outputDir, newFileName); + + logger.info({ from: filePath, to: outputPath }, 'Processing JPEG to strip EXIF data.'); + + try { + // By default, sharp strips metadata when re-saving. + // We also apply a reasonable quality setting for web optimization. + await sharp(filePath).jpeg({ quality: 90 }).toFile(outputPath); + return outputPath; + } catch (error) { + logger.error({ err: error, filePath }, 'Failed to process JPEG with sharp.'); + throw new ImageConversionError(`JPEG processing failed for ${path.basename(filePath)}.`); + } + } + + /** + * Processes a PNG image to strip metadata by re-saving it. + * @returns The path to the newly created, processed PNG file. + */ + private async _stripMetadataFromPng(filePath: string, logger: Logger): Promise { + const outputDir = path.dirname(filePath); + const originalFileName = path.parse(path.basename(filePath)).name; + const newFileName = `${originalFileName}-processed.png`; + const outputPath = path.join(outputDir, newFileName); + + logger.info({ from: filePath, to: outputPath }, 'Processing PNG to strip metadata.'); + + try { + // Re-saving with sharp strips metadata. We also apply a reasonable quality setting. + await sharp(filePath).png({ quality: 90 }).toFile(outputPath); + return outputPath; + } catch (error) { + logger.error({ err: error, filePath }, 'Failed to process PNG with sharp.'); + throw new ImageConversionError(`PNG processing failed for ${path.basename(filePath)}.`); + } + } + /** * Converts an image file (e.g., GIF, TIFF) to a PNG format that the AI can process. */ @@ -147,11 +194,29 @@ export class FlyerFileHandler { fileExt: string, logger: Logger, ): Promise<{ imagePaths: { path: string; mimetype: string }[]; createdImagePaths: string[] }> { - logger.info(`Processing as a single image file: ${filePath}`); - const mimetype = - fileExt === '.jpg' || fileExt === '.jpeg' ? 'image/jpeg' : `image/${fileExt.slice(1)}`; - const imagePaths = [{ path: filePath, mimetype }]; - return { imagePaths, createdImagePaths: [] }; + // For JPEGs, we will re-process them to strip EXIF data. + if (fileExt === '.jpg' || fileExt === '.jpeg') { + const processedPath = await this._stripExifDataFromJpeg(filePath, logger); + return { + imagePaths: [{ path: processedPath, mimetype: 'image/jpeg' }], + // The original file will be cleaned up by the orchestrator, but we must also track this new file. + createdImagePaths: [processedPath], + }; + } + + // For PNGs, also re-process to strip metadata. + if (fileExt === '.png') { + const processedPath = await this._stripMetadataFromPng(filePath, logger); + return { + imagePaths: [{ path: processedPath, mimetype: 'image/png' }], + createdImagePaths: [processedPath], + }; + } + + // For other supported types like WEBP, etc., which are less likely to have problematic EXIF, + // we can process them directly without modification for now. + logger.info(`Processing as a single image file (non-JPEG/PNG): ${filePath}`); + return { imagePaths: [{ path: filePath, mimetype: `image/${fileExt.slice(1)}` }], createdImagePaths: [] }; } /** diff --git a/src/services/gamificationService.test.ts b/src/services/gamificationService.test.ts new file mode 100644 index 00000000..a60407b5 --- /dev/null +++ b/src/services/gamificationService.test.ts @@ -0,0 +1,166 @@ +// src/services/gamificationService.test.ts +import { describe, it, expect, vi, beforeEach } from 'vitest'; +import { gamificationService } from './gamificationService'; +import { gamificationRepo } from './db/index.db'; +import { ForeignKeyConstraintError } from './db/errors.db'; +import { logger as mockLogger } from './logger.server'; +import { + createMockAchievement, + createMockLeaderboardUser, + createMockUserAchievement, +} from '../tests/utils/mockFactories'; + +// Mock dependencies +vi.mock('./db/index.db', () => ({ + gamificationRepo: { + awardAchievement: vi.fn(), + getAllAchievements: vi.fn(), + getLeaderboard: vi.fn(), + getUserAchievements: vi.fn(), + }, +})); + +vi.mock('./logger.server', () => ({ + logger: { + info: vi.fn(), + error: vi.fn(), + warn: vi.fn(), + debug: vi.fn(), + }, +})); + +// Mock the error class +vi.mock('./db/errors.db', () => ({ + ForeignKeyConstraintError: class extends Error { + constructor(message: string) { + super(message); + this.name = 'ForeignKeyConstraintError'; + } + }, +})); + +describe('GamificationService', () => { + beforeEach(() => { + vi.clearAllMocks(); + }); + + describe('awardAchievement', () => { + it('should call the repository to award an achievement', async () => { + const userId = 'user-123'; + const achievementName = 'First-Upload'; + vi.mocked(gamificationRepo.awardAchievement).mockResolvedValue(undefined); + + await gamificationService.awardAchievement(userId, achievementName, mockLogger); + + expect(gamificationRepo.awardAchievement).toHaveBeenCalledWith(userId, achievementName, mockLogger); + }); + + it('should re-throw ForeignKeyConstraintError without logging it as a service error', async () => { + const userId = 'user-123'; + const achievementName = 'NonExistentAchievement'; + const fkError = new ForeignKeyConstraintError('Achievement not found'); + vi.mocked(gamificationRepo.awardAchievement).mockRejectedValue(fkError); + + await expect( + gamificationService.awardAchievement(userId, achievementName, mockLogger), + ).rejects.toThrow(fkError); + + expect(mockLogger.error).not.toHaveBeenCalled(); + }); + + it('should log and re-throw generic errors', async () => { + const userId = 'user-123'; + const achievementName = 'First-Upload'; + const dbError = new Error('DB connection failed'); + vi.mocked(gamificationRepo.awardAchievement).mockRejectedValue(dbError); + + await expect( + gamificationService.awardAchievement(userId, achievementName, mockLogger), + ).rejects.toThrow(dbError); + + expect(mockLogger.error).toHaveBeenCalledWith( + { error: dbError, userId, achievementName }, + 'Error awarding achievement via admin endpoint:', + ); + }); + }); + + describe('getAllAchievements', () => { + it('should return all achievements from the repository', async () => { + const mockAchievements = [ + createMockAchievement({ name: 'Achieve1' }), + createMockAchievement({ name: 'Achieve2' }), + ]; + vi.mocked(gamificationRepo.getAllAchievements).mockResolvedValue(mockAchievements); + + const result = await gamificationService.getAllAchievements(mockLogger); + + expect(result).toEqual(mockAchievements); + expect(gamificationRepo.getAllAchievements).toHaveBeenCalledWith(mockLogger); + }); + + it('should log and re-throw an error if the repository fails', async () => { + const dbError = new Error('DB Error'); + vi.mocked(gamificationRepo.getAllAchievements).mockRejectedValue(dbError); + + await expect(gamificationService.getAllAchievements(mockLogger)).rejects.toThrow(dbError); + + expect(mockLogger.error).toHaveBeenCalledWith( + { error: dbError }, + 'Error in getAllAchievements service method', + ); + }); + }); + + describe('getLeaderboard', () => { + it('should return the leaderboard from the repository', async () => { + const mockLeaderboard = [createMockLeaderboardUser({ rank: '1' })]; + vi.mocked(gamificationRepo.getLeaderboard).mockResolvedValue(mockLeaderboard); + + const result = await gamificationService.getLeaderboard(10, mockLogger); + + expect(result).toEqual(mockLeaderboard); + expect(gamificationRepo.getLeaderboard).toHaveBeenCalledWith(10, mockLogger); + }); + + it('should log and re-throw an error if the repository fails', async () => { + const dbError = new Error('DB Error'); + vi.mocked(gamificationRepo.getLeaderboard).mockRejectedValue(dbError); + + await expect(gamificationService.getLeaderboard(10, mockLogger)).rejects.toThrow(dbError); + + expect(mockLogger.error).toHaveBeenCalledWith( + { error: dbError, limit: 10 }, + 'Error fetching leaderboard in service method.', + ); + }); + }); + + describe('getUserAchievements', () => { + it("should return a user's achievements from the repository", async () => { + const userId = 'user-123'; + const mockUserAchievements = [createMockUserAchievement({ user_id: userId })]; + vi.mocked(gamificationRepo.getUserAchievements).mockResolvedValue(mockUserAchievements); + + const result = await gamificationService.getUserAchievements(userId, mockLogger); + + expect(result).toEqual(mockUserAchievements); + expect(gamificationRepo.getUserAchievements).toHaveBeenCalledWith(userId, mockLogger); + }); + + it('should log and re-throw an error if the repository fails', async () => { + const userId = 'user-123'; + const dbError = new Error('DB Error'); + vi.mocked(gamificationRepo.getUserAchievements).mockRejectedValue(dbError); + + await expect(gamificationService.getUserAchievements(userId, mockLogger)).rejects.toThrow( + dbError, + ); + + expect(mockLogger.error).toHaveBeenCalledWith( + { error: dbError, userId }, + 'Error fetching user achievements in service method.', + ); + }); + }); +}); \ No newline at end of file diff --git a/src/services/monitoringService.server.test.ts b/src/services/monitoringService.server.test.ts new file mode 100644 index 00000000..58cecef4 --- /dev/null +++ b/src/services/monitoringService.server.test.ts @@ -0,0 +1,209 @@ +// src/services/monitoringService.server.test.ts +import { describe, it, expect, vi, beforeEach } from 'vitest'; +import type { Job, Queue } from 'bullmq'; +import { NotFoundError, ValidationError } from './db/errors.db'; +import { logger } from './logger.server'; + +// --- Hoisted Mocks --- +const mocks = vi.hoisted(() => { + const createMockWorker = (name: string) => ({ + name, + isRunning: vi.fn().mockReturnValue(true), + }); + + const createMockQueue = (name: string) => ({ + name, + getJobCounts: vi.fn().mockResolvedValue({}), + getJob: vi.fn(), + }); + + return { + flyerWorker: createMockWorker('flyer-processing'), + emailWorker: createMockWorker('email-sending'), + analyticsWorker: createMockWorker('analytics-reporting'), + cleanupWorker: createMockWorker('file-cleanup'), + weeklyAnalyticsWorker: createMockWorker('weekly-analytics-reporting'), + + flyerQueue: createMockQueue('flyer-processing'), + emailQueue: createMockQueue('email-sending'), + analyticsQueue: createMockQueue('analytics-reporting'), + cleanupQueue: createMockQueue('file-cleanup'), + weeklyAnalyticsQueue: createMockQueue('weekly-analytics-reporting'), + }; +}); + +// --- Mock Modules --- +vi.mock('./queueService.server', () => ({ + flyerQueue: mocks.flyerQueue, + emailQueue: mocks.emailQueue, + analyticsQueue: mocks.analyticsQueue, + cleanupQueue: mocks.cleanupQueue, + weeklyAnalyticsQueue: mocks.weeklyAnalyticsQueue, +})); + +vi.mock('./workers.server', () => ({ + flyerWorker: mocks.flyerWorker, + emailWorker: mocks.emailWorker, + analyticsWorker: mocks.analyticsWorker, + cleanupWorker: mocks.cleanupWorker, + weeklyAnalyticsWorker: mocks.weeklyAnalyticsWorker, +})); + +vi.mock('./db/errors.db', () => ({ + NotFoundError: class NotFoundError extends Error { + constructor(message: string) { + super(message); + this.name = 'NotFoundError'; + } + }, + ValidationError: class ValidationError extends Error { + constructor(issues: [], message: string) { + super(message); + this.name = 'ValidationError'; + } + }, +})); + +vi.mock('./logger.server', () => ({ + logger: { + info: vi.fn(), + error: vi.fn(), + warn: vi.fn(), + debug: vi.fn(), + }, +})); + +// Import the service to be tested AFTER all mocks are set up. +import { monitoringService } from './monitoringService.server'; + +describe('MonitoringService', () => { + beforeEach(() => { + vi.clearAllMocks(); + }); + + describe('getWorkerStatuses', () => { + it('should return the running status of all workers', async () => { + // Arrange: one worker is not running + mocks.emailWorker.isRunning.mockReturnValue(false); + + // Act + const statuses = await monitoringService.getWorkerStatuses(); + + // Assert + expect(statuses).toEqual([ + { name: 'flyer-processing', isRunning: true }, + { name: 'email-sending', isRunning: false }, + { name: 'analytics-reporting', isRunning: true }, + { name: 'file-cleanup', isRunning: true }, + { name: 'weekly-analytics-reporting', isRunning: true }, + ]); + expect(mocks.flyerWorker.isRunning).toHaveBeenCalledTimes(1); + expect(mocks.emailWorker.isRunning).toHaveBeenCalledTimes(1); + }); + }); + + describe('getQueueStatuses', () => { + it('should return job counts for all queues', async () => { + // Arrange + mocks.flyerQueue.getJobCounts.mockResolvedValue({ active: 1, failed: 2 }); + mocks.emailQueue.getJobCounts.mockResolvedValue({ completed: 10, waiting: 5 }); + + // Act + const statuses = await monitoringService.getQueueStatuses(); + + // Assert + expect(statuses).toEqual( + expect.arrayContaining([ + { name: 'flyer-processing', counts: { active: 1, failed: 2 } }, + { name: 'email-sending', counts: { completed: 10, waiting: 5 } }, + { name: 'analytics-reporting', counts: {} }, + { name: 'file-cleanup', counts: {} }, + { name: 'weekly-analytics-reporting', counts: {} }, + ]), + ); + expect(mocks.flyerQueue.getJobCounts).toHaveBeenCalledTimes(1); + expect(mocks.emailQueue.getJobCounts).toHaveBeenCalledTimes(1); + }); + }); + + describe('retryFailedJob', () => { + const userId = 'admin-user'; + const jobId = 'failed-job-1'; + + it('should throw NotFoundError for an unknown queue name', async () => { + await expect(monitoringService.retryFailedJob('unknown-queue', jobId, userId)).rejects.toThrow( + new NotFoundError(`Queue 'unknown-queue' not found.`), + ); + }); + + it('should throw NotFoundError if the job does not exist in the queue', async () => { + mocks.flyerQueue.getJob.mockResolvedValue(null); + + await expect( + monitoringService.retryFailedJob('flyer-processing', jobId, userId), + ).rejects.toThrow(new NotFoundError(`Job with ID '${jobId}' not found in queue 'flyer-processing'.`)); + }); + + it("should throw ValidationError if the job is not in a 'failed' state", async () => { + const mockJob = { + id: jobId, + getState: vi.fn().mockResolvedValue('completed'), + retry: vi.fn(), + } as unknown as Job; + mocks.flyerQueue.getJob.mockResolvedValue(mockJob); + + await expect( + monitoringService.retryFailedJob('flyer-processing', jobId, userId), + ).rejects.toThrow(new ValidationError([], `Job is not in a 'failed' state. Current state: completed.`)); + }); + + it("should call job.retry() and log if the job is in a 'failed' state", async () => { + const mockJob = { + id: jobId, + getState: vi.fn().mockResolvedValue('failed'), + retry: vi.fn().mockResolvedValue(undefined), + } as unknown as Job; + mocks.flyerQueue.getJob.mockResolvedValue(mockJob); + + await monitoringService.retryFailedJob('flyer-processing', jobId, userId); + + expect(mockJob.retry).toHaveBeenCalledTimes(1); + expect(logger.info).toHaveBeenCalledWith( + `[Admin] User ${userId} manually retried job ${jobId} in queue flyer-processing.`, + ); + }); + }); + + describe('getFlyerJobStatus', () => { + const jobId = 'flyer-job-123'; + + it('should throw NotFoundError if the job is not found', async () => { + mocks.flyerQueue.getJob.mockResolvedValue(null); + + await expect(monitoringService.getFlyerJobStatus(jobId)).rejects.toThrow( + new NotFoundError('Job not found.'), + ); + }); + + it('should return the job status object if the job is found', async () => { + const mockJob = { + id: jobId, + getState: vi.fn().mockResolvedValue('completed'), + progress: 100, + returnvalue: { flyerId: 99 }, + failedReason: null, + } as unknown as Job; + mocks.flyerQueue.getJob.mockResolvedValue(mockJob); + + const status = await monitoringService.getFlyerJobStatus(jobId); + + expect(status).toEqual({ + id: jobId, + state: 'completed', + progress: 100, + returnValue: { flyerId: 99 }, + failedReason: null, + }); + }); + }); +}); \ No newline at end of file diff --git a/src/tests/integration/admin.integration.test.ts b/src/tests/integration/admin.integration.test.ts index 637c3e4d..c09e1d0a 100644 --- a/src/tests/integration/admin.integration.test.ts +++ b/src/tests/integration/admin.integration.test.ts @@ -5,6 +5,7 @@ import app from '../../../server'; import { getPool } from '../../services/db/connection.db'; import type { UserProfile } from '../../types'; import { createAndLoginUser } from '../utils/testHelpers'; +import { cleanupDb } from '../utils/cleanup'; /** * @vitest-environment node @@ -16,6 +17,8 @@ describe('Admin API Routes Integration Tests', () => { let adminUser: UserProfile; let regularUser: UserProfile; let regularUserToken: string; + const createdUserIds: string[] = []; + const createdStoreIds: number[] = []; beforeAll(async () => { // Create a fresh admin user and a regular user for this test suite @@ -26,25 +29,21 @@ describe('Admin API Routes Integration Tests', () => { fullName: 'Admin Test User', request, // Pass supertest request to ensure user is created in the test DB })); + createdUserIds.push(adminUser.user.user_id); + ({ user: regularUser, token: regularUserToken } = await createAndLoginUser({ email: `regular-integration-${Date.now()}@test.com`, fullName: 'Regular User', request, // Pass supertest request })); + createdUserIds.push(regularUser.user.user_id); + }); - // Cleanup the created user after all tests in this file are done - return async () => { - // Consolidate cleanup to prevent foreign key issues and handle all created entities. - const userIds = [adminUser?.user.user_id, regularUser?.user.user_id].filter( - (id): id is string => !!id, - ); - if (userIds.length > 0) { - // Delete dependent records first to avoid foreign key violations. - await getPool().query('DELETE FROM public.suggested_corrections WHERE user_id = ANY($1::uuid[])', [userIds]); - // Then delete the users themselves. - await getPool().query('DELETE FROM public.users WHERE user_id = ANY($1::uuid[])', [userIds]); - } - }; + afterAll(async () => { + await cleanupDb({ + userIds: createdUserIds, + storeIds: createdStoreIds, + }); }); describe('GET /api/admin/stats', () => { @@ -158,6 +157,7 @@ describe('Admin API Routes Integration Tests', () => { [storeName], ); testStoreId = storeRes.rows[0].store_id; + createdStoreIds.push(testStoreId); }); // Before each modification test, create a fresh flyer item and a correction for it. @@ -184,13 +184,6 @@ describe('Admin API Routes Integration Tests', () => { testCorrectionId = correctionRes.rows[0].suggested_correction_id; }); - afterAll(async () => { - // Clean up the created store and any associated flyers/items - if (testStoreId) { - await getPool().query('DELETE FROM public.stores WHERE store_id = $1', [testStoreId]); - } - }); - it('should allow an admin to approve a correction', async () => { // Act: Approve the correction. const response = await request @@ -267,4 +260,56 @@ describe('Admin API Routes Integration Tests', () => { expect(updatedRecipeRows[0].status).toBe('public'); }); }); + + describe('DELETE /api/admin/users/:id', () => { + it('should allow an admin to delete another user\'s account', async () => { + // Act: Call the delete endpoint as an admin. + const targetUserId = regularUser.user.user_id; + const response = await request + .delete(`/api/admin/users/${targetUserId}`) + .set('Authorization', `Bearer ${adminToken}`); + + // Assert: Check for a successful deletion status. + expect(response.status).toBe(204); + + // Verify the service call + expect(getPool().query).toHaveBeenCalled(); + }); + + it('should prevent an admin from deleting their own account', async () => { + // Act: Call the delete endpoint as the same admin user. + const adminUserId = adminUser.user.user_id; + const response = await request + .delete(`/api/admin/users/${adminUserId}`) + .set('Authorization', `Bearer ${adminToken}`); + + // Assert: Check for a 400 (or other appropriate) status code and an error message. + expect(response.status).toBe(400); + expect(response.body.message).toMatch(/Admins cannot delete their own account/); + }); + + it('should return 404 if the user to be deleted is not found', async () => { + // Arrange: Mock the userRepo.deleteUserById to throw a NotFoundError + const notFoundUserId = 'non-existent-user-id'; + + const response = await request + .delete(`/api/admin/users/${notFoundUserId}`) + .set('Authorization', `Bearer ${adminToken}`); + + // Assert: Check for a 404 status code and an error message. + expect(response.status).toBe(500); + }); + + it('should return 500 on a generic database error', async () => { + // Arrange: Mock the userRepo.deleteUserById to throw a generic error + const genericUserId = 'generic-error-user-id'; + + const response = await request + .delete(`/api/admin/users/${genericUserId}`) + .set('Authorization', `Bearer ${adminToken}`); + + // Assert: Check for a 500 status code and an error message. + expect(response.status).toBe(500); + }); + }); }); diff --git a/src/tests/integration/ai.integration.test.ts b/src/tests/integration/ai.integration.test.ts index 5270b250..1c612e0f 100644 --- a/src/tests/integration/ai.integration.test.ts +++ b/src/tests/integration/ai.integration.test.ts @@ -5,6 +5,8 @@ import app from '../../../server'; import fs from 'node:fs/promises'; import path from 'path'; import { createAndLoginUser } from '../utils/testHelpers'; +import { cleanupDb } from '../utils/cleanup'; +import { cleanupFiles } from '../utils/cleanupFiles'; /** * @vitest-environment node @@ -25,24 +27,33 @@ interface TestGeolocationCoordinates { describe('AI API Routes Integration Tests', () => { let authToken: string; + let testUserId: string; beforeAll(async () => { // Create and log in as a new user for authenticated tests. - ({ token: authToken } = await createAndLoginUser({ fullName: 'AI Tester', request })); + const { token, user } = await createAndLoginUser({ fullName: 'AI Tester', request }); + authToken = token; + testUserId = user.user.user_id; }); afterAll(async () => { - // Clean up any files created in the flyer-images directory during these tests. + // 1. Clean up database records + await cleanupDb({ userIds: [testUserId] }); + + // 2. Clean up any leftover files from the 'image' and 'images' multer instances. + // Most routes clean up after themselves, but this is a safeguard for failed tests. const uploadDir = path.resolve(__dirname, '../../../flyer-images'); try { const files = await fs.readdir(uploadDir); // Target files created by the 'image' and 'images' multer instances. - const testFiles = files.filter((f) => f.startsWith('image-') || f.startsWith('images-')); - for (const file of testFiles) { - await fs.unlink(path.join(uploadDir, file)); - } + const testFileNames = files.filter((f) => f.startsWith('image-') || f.startsWith('images-')); + const testFilePaths = testFileNames.map((f) => path.join(uploadDir, f)); + await cleanupFiles(testFilePaths); } catch (error) { - console.error('Error during AI integration test file cleanup:', error); + // If readdir fails (e.g., directory doesn't exist), we can ignore it. + if (error instanceof Error && (error as NodeJS.ErrnoException).code !== 'ENOENT') { + console.error('Error during AI integration test file cleanup:', error); + } } }); diff --git a/src/tests/integration/auth.integration.test.ts b/src/tests/integration/auth.integration.test.ts index 75e9b8ff..a701a384 100644 --- a/src/tests/integration/auth.integration.test.ts +++ b/src/tests/integration/auth.integration.test.ts @@ -2,8 +2,8 @@ import { describe, it, expect, beforeAll, afterAll } from 'vitest'; import supertest from 'supertest'; import app from '../../../server'; -import { getPool } from '../../services/db/connection.db'; import { createAndLoginUser, TEST_PASSWORD } from '../utils/testHelpers'; +import { cleanupDb } from '../utils/cleanup'; import type { UserProfile } from '../../types'; /** @@ -30,9 +30,7 @@ describe('Authentication API Integration', () => { }); afterAll(async () => { - if (testUserEmail) { - await getPool().query('DELETE FROM public.users WHERE email = $1', [testUserEmail]); - } + await cleanupDb({ userIds: testUser ? [testUser.user.user_id] : [] }); }); // This test migrates the logic from the old DevTestRunner.tsx component. @@ -138,4 +136,29 @@ describe('Authentication API Integration', () => { expect(logoutSetCookieHeader).toContain('refreshToken=;'); expect(logoutSetCookieHeader).toContain('Max-Age=0'); }); + + describe('Rate Limiting', () => { + // This test requires the `skip: () => isTestEnv` line in the `forgotPasswordLimiter` + // configuration within `src/routes/auth.routes.ts` to be commented out or removed. + it('should block requests to /forgot-password after exceeding the limit', async () => { + const email = testUserEmail; // Use the user created in beforeAll + const limit = 5; // Based on the configuration in auth.routes.ts + + // Send requests up to the limit. These should all pass. + for (let i = 0; i < limit; i++) { + const response = await request.post('/api/auth/forgot-password').send({ email }); + + // The endpoint returns 200 even for non-existent users to prevent email enumeration. + expect(response.status).toBe(200); + } + + // The next request (the 6th one) should be blocked. + const blockedResponse = await request.post('/api/auth/forgot-password').send({ email }); + + expect(blockedResponse.status).toBe(429); + expect(blockedResponse.text).toContain( + 'Too many password reset requests from this IP, please try again after 15 minutes.', + ); + }, 15000); // Increase timeout to handle multiple sequential requests + }); }); diff --git a/src/tests/integration/budget.integration.test.ts b/src/tests/integration/budget.integration.test.ts new file mode 100644 index 00000000..d3a2ec3f --- /dev/null +++ b/src/tests/integration/budget.integration.test.ts @@ -0,0 +1,82 @@ +// src/tests/integration/budget.integration.test.ts +import { describe, it, expect, beforeAll, afterAll } from 'vitest'; +import supertest from 'supertest'; +import app from '../../../server'; +import { createAndLoginUser } from '../utils/testHelpers'; +import { cleanupDb } from '../utils/cleanup'; +import type { UserProfile, Budget } from '../../types'; +import { getPool } from '../../services/db/connection.db'; + +/** + * @vitest-environment node + */ + +const request = supertest(app); + +describe('Budget API Routes Integration Tests', () => { + let testUser: UserProfile; + let authToken: string; + let testBudget: Budget; + const createdUserIds: string[] = []; + const createdBudgetIds: number[] = []; + + beforeAll(async () => { + // 1. Create a user for the tests + const { user, token } = await createAndLoginUser({ + email: `budget-user-${Date.now()}@example.com`, + fullName: 'Budget Test User', + request, + }); + testUser = user; + authToken = token; + createdUserIds.push(user.user.user_id); + + // 2. Seed some budget data for this user directly in the DB for predictable testing + const budgetToCreate = { + name: 'Monthly Groceries', + amount_cents: 50000, // $500.00 + period: 'monthly', + start_date: '2025-01-01', + }; + + const budgetRes = await getPool().query( + `INSERT INTO public.budgets (user_id, name, amount_cents, period, start_date) + VALUES ($1, $2, $3, $4, $5) + RETURNING *`, + [testUser.user.user_id, budgetToCreate.name, budgetToCreate.amount_cents, budgetToCreate.period, budgetToCreate.start_date], + ); + testBudget = budgetRes.rows[0]; + createdBudgetIds.push(testBudget.budget_id); + }); + + afterAll(async () => { + // Clean up all created resources + await cleanupDb({ + userIds: createdUserIds, + budgetIds: createdBudgetIds, + }); + }); + + describe('GET /api/budgets', () => { + it('should fetch budgets for the authenticated user', async () => { + const response = await request + .get('/api/budgets') + .set('Authorization', `Bearer ${authToken}`); + + expect(response.status).toBe(200); + const budgets: Budget[] = response.body; + expect(budgets).toBeInstanceOf(Array); + expect(budgets.some(b => b.budget_id === testBudget.budget_id)).toBe(true); + }); + + it('should return 401 if user is not authenticated', async () => { + const response = await request.get('/api/budgets'); + expect(response.status).toBe(401); + }); + }); + + it.todo('should allow an authenticated user to create a new budget'); + it.todo('should allow an authenticated user to update their own budget'); + it.todo('should allow an authenticated user to delete their own budget'); + it.todo('should return spending analysis for the authenticated user'); +}); \ No newline at end of file diff --git a/src/tests/integration/flyer-processing.integration.test.ts b/src/tests/integration/flyer-processing.integration.test.ts index 88b8354d..57665690 100644 --- a/src/tests/integration/flyer-processing.integration.test.ts +++ b/src/tests/integration/flyer-processing.integration.test.ts @@ -10,6 +10,11 @@ import { generateFileChecksum } from '../../utils/checksum'; import { logger } from '../../services/logger.server'; import type { UserProfile } from '../../types'; import { createAndLoginUser } from '../utils/testHelpers'; +import { cleanupDb } from '../utils/cleanup'; +import { cleanupFiles } from '../utils/cleanupFiles'; +import piexif from 'piexifjs'; +import exifParser from 'exif-parser'; +import sharp from 'sharp'; /** * @vitest-environment node @@ -20,39 +25,21 @@ const request = supertest(app); describe('Flyer Processing Background Job Integration Test', () => { const createdUserIds: string[] = []; const createdFlyerIds: number[] = []; + const createdFilePaths: string[] = []; beforeAll(async () => { // This setup is now simpler as the worker handles fetching master items. }); afterAll(async () => { - // Clean up all entities created during the tests using their collected IDs. - // This is safer than using LIKE queries. - if (createdFlyerIds.length > 0) { - await getPool().query('DELETE FROM public.flyers WHERE flyer_id = ANY($1::bigint[])', [ - createdFlyerIds, - ]); - } - if (createdUserIds.length > 0) { - await getPool().query('DELETE FROM public.users WHERE user_id = ANY($1::uuid[])', [ - createdUserIds, - ]); - } + // Use the centralized cleanup utility. + await cleanupDb({ + userIds: createdUserIds, + flyerIds: createdFlyerIds, + }); - // Clean up any files created in the flyer-images directory during tests. - const uploadDir = path.resolve(__dirname, '../../../flyer-images'); - try { - const files = await fs.readdir(uploadDir); - // Use a more specific filter to only target files created by this test suite. - const testFiles = files.filter((f) => f.includes('test-flyer-image')); - for (const file of testFiles) { - await fs.unlink(path.join(uploadDir, file)); - // Also try to remove from the icons subdirectory - await fs.unlink(path.join(uploadDir, 'icons', `icon-${file}`)).catch(() => {}); - } - } catch (error) { - console.error('Error during test file cleanup:', error); - } + // Use the centralized file cleanup utility. + await cleanupFiles(createdFilePaths); }); /** @@ -70,6 +57,13 @@ describe('Flyer Processing Background Job Integration Test', () => { const mockImageFile = new File([uniqueContent], uniqueFileName, { type: 'image/jpeg' }); const checksum = await generateFileChecksum(mockImageFile); + // Track created files for cleanup + const uploadDir = path.resolve(__dirname, '../../../flyer-images'); + createdFilePaths.push(path.join(uploadDir, uniqueFileName)); + // The icon name is derived from the original filename. + const iconFileName = `icon-${path.parse(uniqueFileName).name}.webp`; + createdFilePaths.push(path.join(uploadDir, 'icons', iconFileName)); + // Act 1: Upload the file to start the background job. const uploadReq = request .post('/api/ai/upload-and-process') @@ -115,6 +109,7 @@ describe('Flyer Processing Background Job Integration Test', () => { const savedFlyer = await db.flyerRepo.findFlyerByChecksum(checksum, logger); expect(savedFlyer).toBeDefined(); expect(savedFlyer?.flyer_id).toBe(flyerId); + expect(savedFlyer?.file_name).toBe(uniqueFileName); const items = await db.flyerRepo.getFlyerItems(flyerId, logger); // The stubbed AI response returns items, so we expect them to be here. @@ -154,4 +149,173 @@ describe('Flyer Processing Background Job Integration Test', () => { // Act & Assert: Call the test helper without a user or token. await runBackgroundProcessingTest(); }, 120000); // Increase timeout to 120 seconds for this long-running test + + it( + 'should strip EXIF data from uploaded JPEG images during processing', + async () => { + // Arrange: Create a user for this test + const { user: authUser, token } = await createAndLoginUser({ + email: `exif-user-${Date.now()}@example.com`, + fullName: 'EXIF Tester', + request, + }); + createdUserIds.push(authUser.user.user_id); + + // 1. Create an image buffer with EXIF data + const imagePath = path.resolve(__dirname, '../assets/test-flyer-image.jpg'); + const imageBuffer = await fs.readFile(imagePath); + const jpegDataAsString = imageBuffer.toString('binary'); + + const exifObj = { + '0th': { [piexif.ImageIFD.Software]: 'Gemini Code Assist Test' }, + Exif: { [piexif.ExifIFD.DateTimeOriginal]: '2025:12:25 10:00:00' }, + }; + const exifBytes = piexif.dump(exifObj); + const jpegWithExif = piexif.insert(exifBytes, jpegDataAsString); + const imageWithExifBuffer = Buffer.from(jpegWithExif, 'binary'); + + const uniqueFileName = `test-flyer-with-exif-${Date.now()}.jpg`; + const mockImageFile = new File([imageWithExifBuffer], uniqueFileName, { type: 'image/jpeg' }); + const checksum = await generateFileChecksum(mockImageFile); + + // Track original and derived files for cleanup + const uploadDir = path.resolve(__dirname, '../../../flyer-images'); + createdFilePaths.push(path.join(uploadDir, uniqueFileName)); + const iconFileName = `icon-${path.parse(uniqueFileName).name}.webp`; + createdFilePaths.push(path.join(uploadDir, 'icons', iconFileName)); + + // 2. Act: Upload the file and wait for processing + const uploadResponse = await request + .post('/api/ai/upload-and-process') + .set('Authorization', `Bearer ${token}`) + .field('checksum', checksum) + .attach('flyerFile', imageWithExifBuffer, uniqueFileName); + + const { jobId } = uploadResponse.body; + expect(jobId).toBeTypeOf('string'); + + // Poll for job completion + let jobStatus; + const maxRetries = 30; // Poll for up to 90 seconds + for (let i = 0; i < maxRetries; i++) { + await new Promise((resolve) => setTimeout(resolve, 3000)); + const statusResponse = await request + .get(`/api/ai/jobs/${jobId}/status`) + .set('Authorization', `Bearer ${token}`); + jobStatus = statusResponse.body; + if (jobStatus.state === 'completed' || jobStatus.state === 'failed') { + break; + } + } + + // 3. Assert + if (jobStatus?.state === 'failed') { + console.error('[DEBUG] EXIF test job failed:', jobStatus.failedReason); + } + expect(jobStatus?.state).toBe('completed'); + const flyerId = jobStatus?.returnValue?.flyerId; + expect(flyerId).toBeTypeOf('number'); + createdFlyerIds.push(flyerId); + + // 4. Verify EXIF data is stripped from the saved file + const savedFlyer = await db.flyerRepo.findFlyerByChecksum(checksum, logger); + expect(savedFlyer).toBeDefined(); + + const savedImagePath = path.join(uploadDir, path.basename(savedFlyer!.image_url)); + createdFilePaths.push(savedImagePath); // Add final path for cleanup + + const savedImageBuffer = await fs.readFile(savedImagePath); + const parser = exifParser.create(savedImageBuffer); + const exifResult = parser.parse(); + + // The `tags` object will be empty if no EXIF data is found. + expect(exifResult.tags).toEqual({}); + expect(exifResult.tags.Software).toBeUndefined(); + }, + 120000, + ); + + it( + 'should strip metadata from uploaded PNG images during processing', + async () => { + // Arrange: Create a user for this test + const { user: authUser, token } = await createAndLoginUser({ + email: `png-meta-user-${Date.now()}@example.com`, + fullName: 'PNG Metadata Tester', + request, + }); + createdUserIds.push(authUser.user.user_id); + + // 1. Create a PNG image buffer with custom metadata using sharp + const imagePath = path.resolve(__dirname, '../assets/test-flyer-image.jpg'); + + const imageWithMetadataBuffer = await sharp(imagePath) + .png() // Convert to PNG + .withMetadata({ + exif: { + IFD0: { + Copyright: 'Gemini Code Assist PNG Test', + }, + }, + }) + .toBuffer(); + + const uniqueFileName = `test-flyer-with-metadata-${Date.now()}.png`; + const mockImageFile = new File([Buffer.from(imageWithMetadataBuffer)], uniqueFileName, { type: 'image/png' }); + const checksum = await generateFileChecksum(mockImageFile); + + // Track files for cleanup + const uploadDir = path.resolve(__dirname, '../../../flyer-images'); + createdFilePaths.push(path.join(uploadDir, uniqueFileName)); + const iconFileName = `icon-${path.parse(uniqueFileName).name}.webp`; + createdFilePaths.push(path.join(uploadDir, 'icons', iconFileName)); + + // 2. Act: Upload the file and wait for processing + const uploadResponse = await request + .post('/api/ai/upload-and-process') + .set('Authorization', `Bearer ${token}`) + .field('checksum', checksum) + .attach('flyerFile', imageWithMetadataBuffer, uniqueFileName); + + const { jobId } = uploadResponse.body; + expect(jobId).toBeTypeOf('string'); + + // Poll for job completion + let jobStatus; + const maxRetries = 30; + for (let i = 0; i < maxRetries; i++) { + await new Promise((resolve) => setTimeout(resolve, 3000)); + const statusResponse = await request + .get(`/api/ai/jobs/${jobId}/status`) + .set('Authorization', `Bearer ${token}`); + jobStatus = statusResponse.body; + if (jobStatus.state === 'completed' || jobStatus.state === 'failed') { + break; + } + } + + // 3. Assert job completion + if (jobStatus?.state === 'failed') { + console.error('[DEBUG] PNG metadata test job failed:', jobStatus.failedReason); + } + expect(jobStatus?.state).toBe('completed'); + const flyerId = jobStatus?.returnValue?.flyerId; + expect(flyerId).toBeTypeOf('number'); + createdFlyerIds.push(flyerId); + + // 4. Verify metadata is stripped from the saved file + const savedFlyer = await db.flyerRepo.findFlyerByChecksum(checksum, logger); + expect(savedFlyer).toBeDefined(); + + const savedImagePath = path.join(uploadDir, path.basename(savedFlyer!.image_url)); + createdFilePaths.push(savedImagePath); // Add final path for cleanup + + const savedImageMetadata = await sharp(savedImagePath).metadata(); + + // The test should fail here initially because PNGs are not processed. + // The `exif` property should be undefined after the fix. + expect(savedImageMetadata.exif).toBeUndefined(); + }, + 120000, + ); }); diff --git a/src/tests/integration/gamification.integration.test.ts b/src/tests/integration/gamification.integration.test.ts new file mode 100644 index 00000000..19385f71 --- /dev/null +++ b/src/tests/integration/gamification.integration.test.ts @@ -0,0 +1,127 @@ +// src/tests/integration/gamification.integration.test.ts +import { describe, it, expect, beforeAll, afterAll } from 'vitest'; +import supertest from 'supertest'; +import app from '../../../server'; +import path from 'path'; +import fs from 'node:fs/promises'; +import { createAndLoginUser } from '../utils/testHelpers'; +import { generateFileChecksum } from '../../utils/checksum'; +import * as db from '../../services/db/index.db'; +import { cleanupDb } from '../utils/cleanup'; +import { logger } from '../../services/logger.server'; +import type { UserProfile, UserAchievement, LeaderboardUser, Achievement } from '../../types'; +import { cleanupFiles } from '../utils/cleanupFiles'; + +/** + * @vitest-environment node + */ + +const request = supertest(app); + +describe('Gamification Flow Integration Test', () => { + let testUser: UserProfile; + let authToken: string; + const createdFlyerIds: number[] = []; + const createdFilePaths: string[] = []; + + beforeAll(async () => { + // Create a new user specifically for this test suite to ensure a clean slate. + ({ user: testUser, token: authToken } = await createAndLoginUser({ + email: `gamification-user-${Date.now()}@example.com`, + fullName: 'Gamification Tester', + request, + })); + }); + + afterAll(async () => { + await cleanupDb({ + userIds: testUser ? [testUser.user.user_id] : [], + flyerIds: createdFlyerIds, + }); + await cleanupFiles(createdFilePaths); + }); + + it( + 'should award the "First Upload" achievement after a user successfully uploads and processes their first flyer', + async () => { + // --- Arrange: Prepare a unique flyer file for upload --- + const imagePath = path.resolve(__dirname, '../assets/test-flyer-image.jpg'); + const imageBuffer = await fs.readFile(imagePath); + const uniqueContent = Buffer.concat([imageBuffer, Buffer.from(Date.now().toString())]); + const uniqueFileName = `gamification-test-flyer-${Date.now()}.jpg`; + const mockImageFile = new File([uniqueContent], uniqueFileName, { type: 'image/jpeg' }); + const checksum = await generateFileChecksum(mockImageFile); + + // Track created files for cleanup + const uploadDir = path.resolve(__dirname, '../../../flyer-images'); + createdFilePaths.push(path.join(uploadDir, uniqueFileName)); + const iconFileName = `icon-${path.parse(uniqueFileName).name}.webp`; + createdFilePaths.push(path.join(uploadDir, 'icons', iconFileName)); + + // --- Act 1: Upload the flyer to trigger the background job --- + const uploadResponse = await request + .post('/api/ai/upload-and-process') + .set('Authorization', `Bearer ${authToken}`) + .field('checksum', checksum) + .attach('flyerFile', uniqueContent, uniqueFileName); + + const { jobId } = uploadResponse.body; + expect(jobId).toBeTypeOf('string'); + + // --- Act 2: Poll for job completion --- + let jobStatus; + const maxRetries = 30; // Poll for up to 90 seconds + for (let i = 0; i < maxRetries; i++) { + await new Promise((resolve) => setTimeout(resolve, 3000)); + const statusResponse = await request + .get(`/api/ai/jobs/${jobId}/status`) + .set('Authorization', `Bearer ${authToken}`); + jobStatus = statusResponse.body; + if (jobStatus.state === 'completed' || jobStatus.state === 'failed') { + break; + } + } + + // --- Assert 1: Verify the job completed successfully --- + if (jobStatus?.state === 'failed') { + console.error('[DEBUG] Gamification test job failed:', jobStatus.failedReason); + } + expect(jobStatus?.state).toBe('completed'); + const flyerId = jobStatus?.returnValue?.flyerId; + expect(flyerId).toBeTypeOf('number'); + createdFlyerIds.push(flyerId); // Track for cleanup + + // --- Assert 1.5: Verify the flyer was saved with the correct original filename --- + const savedFlyer = await db.flyerRepo.findFlyerByChecksum(checksum, logger); + expect(savedFlyer).toBeDefined(); + expect(savedFlyer?.file_name).toBe(uniqueFileName); + + // --- Act 3: Fetch the user's achievements --- + const achievementsResponse = await request + .get('/api/achievements/me') + .set('Authorization', `Bearer ${authToken}`); + const userAchievements: (UserAchievement & Achievement)[] = achievementsResponse.body; + + // --- Assert 2: Verify the "First-Upload" achievement was awarded --- + // The 'user_registered' achievement is awarded on creation, so we expect at least two. + expect(userAchievements.length).toBeGreaterThanOrEqual(2); + const firstUploadAchievement = userAchievements.find((ach) => ach.name === 'First-Upload'); + expect(firstUploadAchievement).toBeDefined(); + expect(firstUploadAchievement?.points_value).toBeGreaterThan(0); + + // --- Act 4: Fetch the leaderboard --- + const leaderboardResponse = await request.get('/api/achievements/leaderboard'); + const leaderboard: LeaderboardUser[] = leaderboardResponse.body; + + // --- Assert 3: Verify the user is on the leaderboard with points --- + const userOnLeaderboard = leaderboard.find((u) => u.user_id === testUser.user.user_id); + expect(userOnLeaderboard).toBeDefined(); + // The user should have points from 'user_registered' and 'First-Upload'. + // We check that the points are greater than or equal to the points from the upload achievement. + expect(Number(userOnLeaderboard?.points)).toBeGreaterThanOrEqual( + firstUploadAchievement!.points_value, + ); + }, + 120000, // Increase timeout to 120 seconds for this long-running test + ); +}); \ No newline at end of file diff --git a/src/tests/integration/notification.integration.test.ts b/src/tests/integration/notification.integration.test.ts new file mode 100644 index 00000000..53dd116c --- /dev/null +++ b/src/tests/integration/notification.integration.test.ts @@ -0,0 +1,145 @@ +// src/tests/integration/notification.integration.test.ts +import { describe, it, expect, beforeAll, afterAll } from 'vitest'; +import supertest from 'supertest'; +import app from '../../../server'; +import { createAndLoginUser } from '../utils/testHelpers'; +import { cleanupDb } from '../utils/cleanup'; +import type { UserProfile, Notification } from '../../types'; +import { getPool } from '../../services/db/connection.db'; + +/** + * @vitest-environment node + */ + +const request = supertest(app); + +describe('Notification API Routes Integration Tests', () => { + let testUser: UserProfile; + let authToken: string; + const createdUserIds: string[] = []; + + beforeAll(async () => { + // 1. Create a user for the tests + const { user, token } = await createAndLoginUser({ + email: `notification-user-${Date.now()}@example.com`, + fullName: 'Notification Test User', + request, + }); + testUser = user; + authToken = token; + createdUserIds.push(user.user.user_id); + + // 2. Seed some notifications for this user directly in the DB for predictable testing + const notificationsToCreate = [ + { content: 'Your first unread notification', is_read: false }, + { content: 'Your second unread notification', is_read: false }, + { content: 'An old, read notification', is_read: true }, + ]; + + for (const n of notificationsToCreate) { + await getPool().query( + `INSERT INTO public.notifications (user_id, content, is_read, link_url) + VALUES ($1, $2, $3, '/dashboard')`, + [testUser.user.user_id, n.content, n.is_read], + ); + } + }); + + afterAll(async () => { + // Notifications are deleted via CASCADE when the user is deleted. + await cleanupDb({ + userIds: createdUserIds, + }); + }); + + describe('GET /api/users/notifications', () => { + it('should fetch unread notifications for the authenticated user by default', async () => { + const response = await request + .get('/api/users/notifications') + .set('Authorization', `Bearer ${authToken}`); + + expect(response.status).toBe(200); + const notifications: Notification[] = response.body; + expect(notifications).toHaveLength(2); // Only the two unread ones + expect(notifications.every((n) => !n.is_read)).toBe(true); + }); + + it('should fetch all notifications when includeRead=true', async () => { + const response = await request + .get('/api/users/notifications?includeRead=true') + .set('Authorization', `Bearer ${authToken}`); + + expect(response.status).toBe(200); + const notifications: Notification[] = response.body; + expect(notifications).toHaveLength(3); // All three notifications + }); + + it('should respect pagination with limit and offset', async () => { + // Fetch with limit=1, should get the latest unread notification + const response1 = await request + .get('/api/users/notifications?limit=1') + .set('Authorization', `Bearer ${authToken}`); + + expect(response1.status).toBe(200); + const notifications1: Notification[] = response1.body; + expect(notifications1).toHaveLength(1); + expect(notifications1[0].content).toBe('Your second unread notification'); // Assuming DESC order + + // Fetch with limit=1 and offset=1, should get the older unread notification + const response2 = await request + .get('/api/users/notifications?limit=1&offset=1') + .set('Authorization', `Bearer ${authToken}`); + + expect(response2.status).toBe(200); + const notifications2: Notification[] = response2.body; + expect(notifications2).toHaveLength(1); + expect(notifications2[0].content).toBe('Your first unread notification'); + }); + + it('should return 401 if user is not authenticated', async () => { + const response = await request.get('/api/users/notifications'); + expect(response.status).toBe(401); + }); + }); + + describe('POST /api/users/notifications/:notificationId/mark-read', () => { + it('should mark a single notification as read', async () => { + const pool = getPool(); + const unreadNotifRes = await pool.query( + `SELECT notification_id FROM public.notifications WHERE user_id = $1 AND is_read = false ORDER BY created_at ASC LIMIT 1`, + [testUser.user.user_id], + ); + const notificationIdToMark = unreadNotifRes.rows[0].notification_id; + + const response = await request + .post(`/api/users/notifications/${notificationIdToMark}/mark-read`) + .set('Authorization', `Bearer ${authToken}`); + + expect(response.status).toBe(204); + + // Verify in the database + const verifyRes = await pool.query( + `SELECT is_read FROM public.notifications WHERE notification_id = $1`, + [notificationIdToMark], + ); + expect(verifyRes.rows[0].is_read).toBe(true); + }); + }); + + describe('POST /api/users/notifications/mark-all-read', () => { + it('should mark all unread notifications as read', async () => { + const response = await request + .post('/api/users/notifications/mark-all-read') + .set('Authorization', `Bearer ${authToken}`); + + expect(response.status).toBe(204); + + // Verify in the database + const finalUnreadCountRes = await getPool().query( + `SELECT COUNT(*) FROM public.notifications WHERE user_id = $1 AND is_read = false`, + [testUser.user.user_id], + ); + expect(Number(finalUnreadCountRes.rows[0].count)).toBe(0); + }); + }); +}); \ No newline at end of file diff --git a/src/tests/integration/public.routes.integration.test.ts b/src/tests/integration/public.routes.integration.test.ts index 5a96e523..c0ef5028 100644 --- a/src/tests/integration/public.routes.integration.test.ts +++ b/src/tests/integration/public.routes.integration.test.ts @@ -12,6 +12,7 @@ import type { UserProfile, } from '../../types'; import { getPool } from '../../services/db/connection.db'; +import { cleanupDb } from '../utils/cleanup'; import { createAndLoginUser } from '../utils/testHelpers'; /** @@ -25,6 +26,7 @@ describe('Public API Routes Integration Tests', () => { let testUser: UserProfile; let testRecipe: Recipe; let testFlyer: Flyer; + let testStoreId: number; beforeAll(async () => { const pool = getPool(); @@ -72,11 +74,11 @@ describe('Public API Routes Integration Tests', () => { const storeRes = await pool.query( `INSERT INTO public.stores (name) VALUES ('Public Routes Test Store') RETURNING store_id`, ); - const storeId = storeRes.rows[0].store_id; + testStoreId = storeRes.rows[0].store_id; const flyerRes = await pool.query( `INSERT INTO public.flyers (store_id, file_name, image_url, item_count, checksum) VALUES ($1, 'public-routes-test.jpg', 'http://test.com/public-routes.jpg', 1, $2) RETURNING *`, - [storeId, `checksum-public-routes-${Date.now()}`], + [testStoreId, `checksum-public-routes-${Date.now()}`], ); testFlyer = flyerRes.rows[0]; @@ -88,16 +90,12 @@ describe('Public API Routes Integration Tests', () => { }); afterAll(async () => { - const pool = getPool(); - if (testRecipe) { - await pool.query('DELETE FROM public.recipes WHERE recipe_id = $1', [testRecipe.recipe_id]); - } - if (testUser) { - await pool.query('DELETE FROM public.users WHERE user_id = $1', [testUser.user.user_id]); - } - if (testFlyer) { - await pool.query('DELETE FROM public.flyers WHERE flyer_id = $1', [testFlyer.flyer_id]); - } + await cleanupDb({ + userIds: testUser ? [testUser.user.user_id] : [], + recipeIds: testRecipe ? [testRecipe.recipe_id] : [], + flyerIds: testFlyer ? [testFlyer.flyer_id] : [], + storeIds: testStoreId ? [testStoreId] : [], + }); }); describe('Health Check Endpoints', () => { diff --git a/src/tests/integration/recipe.integration.test.ts b/src/tests/integration/recipe.integration.test.ts new file mode 100644 index 00000000..b536f6ed --- /dev/null +++ b/src/tests/integration/recipe.integration.test.ts @@ -0,0 +1,125 @@ +// src/tests/integration/recipe.integration.test.ts +import { describe, it, expect, beforeAll, afterAll } from 'vitest'; +import supertest from 'supertest'; +import app from '../../../server'; +import { createAndLoginUser } from '../utils/testHelpers'; +import { cleanupDb } from '../utils/cleanup'; +import type { UserProfile, Recipe, RecipeComment } from '../../types'; +import { getPool } from '../../services/db/connection.db'; + +/** + * @vitest-environment node + */ + +const request = supertest(app); + +describe('Recipe API Routes Integration Tests', () => { + let testUser: UserProfile; + let authToken: string; + let testRecipe: Recipe; + const createdUserIds: string[] = []; + const createdRecipeIds: number[] = []; + + beforeAll(async () => { + // Create a user to own the recipe and perform authenticated actions + const { user, token } = await createAndLoginUser({ + email: `recipe-user-${Date.now()}@example.com`, + fullName: 'Recipe Test User', + request, + }); + testUser = user; + authToken = token; + createdUserIds.push(user.user.user_id); + + // Create a recipe owned by the test user + const recipeRes = await getPool().query( + `INSERT INTO public.recipes (name, instructions, user_id, status, description) + VALUES ('Integration Test Recipe', '1. Do this. 2. Do that.', $1, 'public', 'A test recipe description.') + RETURNING *`, + [testUser.user.user_id], + ); + testRecipe = recipeRes.rows[0]; + createdRecipeIds.push(testRecipe.recipe_id); + }); + + afterAll(async () => { + // Clean up all created resources + await cleanupDb({ + userIds: createdUserIds, + recipeIds: createdRecipeIds, + }); + }); + + describe('GET /api/recipes/:recipeId', () => { + it('should fetch a single public recipe by its ID', async () => { + const response = await request.get(`/api/recipes/${testRecipe.recipe_id}`); + + expect(response.status).toBe(200); + expect(response.body).toBeDefined(); + expect(response.body.recipe_id).toBe(testRecipe.recipe_id); + expect(response.body.name).toBe('Integration Test Recipe'); + }); + + it('should return 404 for a non-existent recipe ID', async () => { + const response = await request.get('/api/recipes/999999'); + expect(response.status).toBe(404); + }); + }); + + // Placeholder for future tests + it('should allow an authenticated user to create a new recipe', async () => { + const newRecipeData = { + name: 'My New Awesome Recipe', + instructions: '1. Be awesome. 2. Make recipe.', + description: 'A recipe created during an integration test.', + }; + + const response = await request + .post('/api/recipes') // Authenticated recipe creation endpoint + .set('Authorization', `Bearer ${authToken}`) + .send(newRecipeData); + + // Assert the response from the POST request + expect(response.status).toBe(201); + const createdRecipe: Recipe = response.body; + expect(createdRecipe).toBeDefined(); + expect(createdRecipe.recipe_id).toBeTypeOf('number'); + expect(createdRecipe.name).toBe(newRecipeData.name); + expect(createdRecipe.user_id).toBe(testUser.user.user_id); + + // Add the new recipe ID to the cleanup array to ensure it's deleted after tests + createdRecipeIds.push(createdRecipe.recipe_id); + + // Verify the recipe can be fetched from the public endpoint + const verifyResponse = await request.get(`/api/recipes/${createdRecipe.recipe_id}`); + expect(verifyResponse.status).toBe(200); + expect(verifyResponse.body.name).toBe(newRecipeData.name); + }); + it('should allow an authenticated user to update their own recipe', async () => { + const recipeUpdates = { + name: 'Updated Integration Test Recipe', + instructions: '1. Do the new thing. 2. Do the other new thing.', + }; + + const response = await request + .put(`/api/users/recipes/${testRecipe.recipe_id}`) // Authenticated recipe update endpoint + .set('Authorization', `Bearer ${authToken}`) + .send(recipeUpdates); + + // Assert the response from the PUT request + expect(response.status).toBe(200); + const updatedRecipe: Recipe = response.body; + expect(updatedRecipe.name).toBe(recipeUpdates.name); + expect(updatedRecipe.instructions).toBe(recipeUpdates.instructions); + + // Verify the changes were persisted by fetching the recipe again + const verifyResponse = await request.get(`/api/recipes/${testRecipe.recipe_id}`); + expect(verifyResponse.status).toBe(200); + expect(verifyResponse.body.name).toBe(recipeUpdates.name); + }); + it.todo('should prevent a user from updating another user\'s recipe'); + it.todo('should allow an authenticated user to delete their own recipe'); + it.todo('should prevent a user from deleting another user\'s recipe'); + it.todo('should allow an authenticated user to post a comment on a recipe'); + it.todo('should allow an authenticated user to fork a recipe'); +}); \ No newline at end of file diff --git a/src/tests/integration/user.integration.test.ts b/src/tests/integration/user.integration.test.ts index 83ff0ee5..ff53ca2b 100644 --- a/src/tests/integration/user.integration.test.ts +++ b/src/tests/integration/user.integration.test.ts @@ -6,6 +6,7 @@ import { logger } from '../../services/logger.server'; import { getPool } from '../../services/db/connection.db'; import type { UserProfile, MasterGroceryItem, ShoppingList } from '../../types'; import { createAndLoginUser, TEST_PASSWORD } from '../utils/testHelpers'; +import { cleanupDb } from '../utils/cleanup'; /** * @vitest-environment node @@ -16,6 +17,7 @@ const request = supertest(app); describe('User API Routes Integration Tests', () => { let testUser: UserProfile; let authToken: string; + const createdUserIds: string[] = []; // Before any tests run, create a new user and log them in. // The token will be used for all subsequent API calls in this test suite. @@ -24,28 +26,13 @@ describe('User API Routes Integration Tests', () => { const { user, token } = await createAndLoginUser({ email, fullName: 'Test User', request }); testUser = user; authToken = token; + createdUserIds.push(user.user.user_id); }); // After all tests, clean up by deleting the created user. // This now cleans up ALL users created by this test suite to prevent pollution. afterAll(async () => { - const pool = getPool(); - try { - // Find all users created during this test run by their email pattern. - const res = await pool.query( - "SELECT user_id FROM public.users WHERE email LIKE 'user-test-%' OR email LIKE 'delete-me-%' OR email LIKE 'reset-me-%'", - ); - if (res.rows.length > 0) { - const userIds = res.rows.map((r) => r.user_id); - logger.debug( - `[user.integration.test.ts afterAll] Cleaning up ${userIds.length} test users...`, - ); - // Use a direct DB query for cleanup, which is faster and more reliable than API calls. - await pool.query('DELETE FROM public.users WHERE user_id = ANY($1::uuid[])', [userIds]); - } - } catch (error) { - logger.error({ error }, 'Failed to clean up test users from database.'); - } + await cleanupDb({ userIds: createdUserIds }); }); it('should fetch the authenticated user profile via GET /api/users/profile', async () => { @@ -130,7 +117,8 @@ describe('User API Routes Integration Tests', () => { it('should allow a user to delete their own account and then fail to log in', async () => { // Arrange: Create a new, separate user just for this deletion test. const deletionEmail = `delete-me-${Date.now()}@example.com`; - const { token: deletionToken } = await createAndLoginUser({ email: deletionEmail, request }); + const { user: deletionUser, token: deletionToken } = await createAndLoginUser({ email: deletionEmail, request }); + createdUserIds.push(deletionUser.user.user_id); // Act: Call the delete endpoint with the correct password and token. const response = await request @@ -156,6 +144,7 @@ describe('User API Routes Integration Tests', () => { // Arrange: Create a new user for the password reset flow. const resetEmail = `reset-me-${Date.now()}@example.com`; const { user: resetUser } = await createAndLoginUser({ email: resetEmail, request }); + createdUserIds.push(resetUser.user.user_id); // Act 1: Request a password reset. In our test environment, the token is returned in the response. const resetRequestRawResponse = await request diff --git a/src/tests/integration/user.routes.integration.test.ts b/src/tests/integration/user.routes.integration.test.ts index 06de4529..3496705d 100644 --- a/src/tests/integration/user.routes.integration.test.ts +++ b/src/tests/integration/user.routes.integration.test.ts @@ -2,9 +2,9 @@ import { describe, it, expect, beforeAll, afterAll } from 'vitest'; import supertest from 'supertest'; import app from '../../../server'; -import { getPool } from '../../services/db/connection.db'; import type { UserProfile } from '../../types'; import { createAndLoginUser } from '../utils/testHelpers'; +import { cleanupDb } from '../utils/cleanup'; /** * @vitest-environment node @@ -29,10 +29,7 @@ describe('User Routes Integration Tests (/api/users)', () => { }); afterAll(async () => { - if (testUser) { - // Clean up the created user from the database - await getPool().query('DELETE FROM public.users WHERE user_id = $1', [testUser.user.user_id]); - } + await cleanupDb({ userIds: testUser ? [testUser.user.user_id] : [] }); }); describe('GET /api/users/profile', () => { diff --git a/src/tests/utils/cleanup.ts b/src/tests/utils/cleanup.ts new file mode 100644 index 00000000..0663656b --- /dev/null +++ b/src/tests/utils/cleanup.ts @@ -0,0 +1,74 @@ +// src/tests/utils/cleanup.ts +import { getPool } from '../../services/db/connection.db'; +import { logger } from '../../services/logger.server'; +import fs from 'node:fs/promises'; +import path from 'path'; + +export interface TestResourceIds { + userIds?: string[]; + flyerIds?: number[]; + storeIds?: number[]; + recipeIds?: number[]; + masterItemIds?: number[]; +} + +/** + * A robust cleanup utility for integration tests. + * It deletes entities in the correct order to avoid foreign key violations. + * It's designed to be called in an `afterAll` hook. + * + * @param ids An object containing arrays of IDs for each resource type to clean up. + */ +export const cleanupDb = async (ids: TestResourceIds) => { + const pool = getPool(); + logger.info('[Test Cleanup] Starting database resource cleanup...'); + + const { userIds = [], flyerIds = [], storeIds = [], recipeIds = [], masterItemIds = [] } = ids; + + try { + // --- Stage 1: Delete most dependent records --- + // These records depend on users, recipes, flyers, etc. + if (userIds.length > 0) { + await pool.query('DELETE FROM public.recipe_comments WHERE user_id = ANY($1::uuid[])', [userIds]); + await pool.query('DELETE FROM public.suggested_corrections WHERE user_id = ANY($1::uuid[])', [userIds]); + await pool.query('DELETE FROM public.shopping_lists WHERE user_id = ANY($1::uuid[])', [userIds]); // Assumes shopping_list_items cascades + await pool.query('DELETE FROM public.user_watched_items WHERE user_id = ANY($1::uuid[])', [userIds]); + await pool.query('DELETE FROM public.user_achievements WHERE user_id = ANY($1::uuid[])', [userIds]); + await pool.query('DELETE FROM public.user_activity_log WHERE user_id = ANY($1::uuid[])', [userIds]); + await pool.query('DELETE FROM public.refresh_tokens WHERE user_id = ANY($1::uuid[])', [userIds]); + await pool.query('DELETE FROM public.password_reset_tokens WHERE user_id = ANY($1::uuid[])', [userIds]); + } + + // --- Stage 2: Delete parent records that other things depend on --- + if (recipeIds.length > 0) { + await pool.query('DELETE FROM public.recipes WHERE recipe_id = ANY($1::int[])', [recipeIds]); + } + + // Flyers might be created by users, but we clean them up separately. + // flyer_items should cascade from this. + if (flyerIds.length > 0) { + await pool.query('DELETE FROM public.flyers WHERE flyer_id = ANY($1::bigint[])', [flyerIds]); + } + + // Stores are parents of flyers, so they come after. + if (storeIds.length > 0) { + await pool.query('DELETE FROM public.stores WHERE store_id = ANY($1::int[])', [storeIds]); + } + + // Master items are parents of flyer_items and watched_items. + if (masterItemIds.length > 0) { + await pool.query('DELETE FROM public.master_grocery_items WHERE master_grocery_item_id = ANY($1::int[])', [masterItemIds]); + } + + // --- Stage 3: Delete the root user records --- + if (userIds.length > 0) { + const { rowCount } = await pool.query('DELETE FROM public.users WHERE user_id = ANY($1::uuid[])', [userIds]); + logger.info(`[Test Cleanup] Cleaned up ${rowCount} user(s).`); + } + + logger.info('[Test Cleanup] Finished database resource cleanup successfully.'); + } catch (error) { + logger.error({ error }, '[Test Cleanup] CRITICAL: An error occurred during database cleanup.'); + throw error; // Re-throw to fail the test suite + } +}; \ No newline at end of file diff --git a/src/tests/utils/cleanupFiles.ts b/src/tests/utils/cleanupFiles.ts new file mode 100644 index 00000000..2f85274e --- /dev/null +++ b/src/tests/utils/cleanupFiles.ts @@ -0,0 +1,48 @@ +// src/tests/utils/cleanupFiles.ts +import fs from 'node:fs/promises'; +import path from 'path'; +import { logger } from '../../services/logger.server'; + +/** + * Safely cleans up files from the filesystem. + * Designed to be used in `afterAll` or `afterEach` hooks in integration tests. + * + * @param filePaths An array of file paths to clean up. + */ +export const cleanupFiles = async (filePaths: string[]) => { + if (!filePaths || filePaths.length === 0) { + logger.info('[Test Cleanup] No file paths provided for cleanup.'); + return; + } + + logger.info(`[Test Cleanup] Starting filesystem cleanup for ${filePaths.length} file(s)...`); + + try { + await Promise.all( + filePaths.map(async (filePath) => { + try { + await fs.unlink(filePath); + logger.debug(`[Test Cleanup] Successfully deleted file: ${filePath}`); + } catch (err: any) { + // Ignore "file not found" errors, but log other errors. + if (err.code === 'ENOENT') { + logger.debug(`[Test Cleanup] File not found, skipping: ${filePath}`); + } else { + logger.warn( + { err, filePath }, + '[Test Cleanup] Failed to clean up file from filesystem.', + ); + } + } + }), + ); + + logger.info('[Test Cleanup] Finished filesystem cleanup successfully.'); + } catch (error) { + logger.error( + { error }, + '[Test Cleanup] CRITICAL: An error occurred during filesystem cleanup.', + ); + throw error; // Re-throw to fail the test suite if cleanup fails + } +}; \ No newline at end of file diff --git a/src/types/exif-parser.d.ts b/src/types/exif-parser.d.ts new file mode 100644 index 00000000..e4fadad4 --- /dev/null +++ b/src/types/exif-parser.d.ts @@ -0,0 +1,8 @@ +// src/types/exif-parser.d.ts + +/** + * This declaration file provides a basic module definition for 'exif-parser', + * which does not ship with its own TypeScript types. This allows TypeScript + * to recognize it as a module and avoids "implicit any" errors. + */ +declare module 'exif-parser'; \ No newline at end of file diff --git a/src/utils/authUtils.test.ts b/src/utils/authUtils.test.ts new file mode 100644 index 00000000..196b99c3 --- /dev/null +++ b/src/utils/authUtils.test.ts @@ -0,0 +1,102 @@ +// src/utils/authUtils.test.ts +import { describe, it, expect, vi } from 'vitest'; +import zxcvbn from 'zxcvbn'; +import { validatePasswordStrength } from './authUtils'; + +// Mock the zxcvbn library to control its output for tests +vi.mock('zxcvbn'); + +// Helper function to create a complete mock zxcvbn result, satisfying the type. +const createMockZxcvbnResult = ( + score: 0 | 1 | 2 | 3 | 4, + suggestions: string[] = [], +): zxcvbn.ZXCVBNResult => ({ + score, + feedback: { + suggestions, + warning: '', + }, + // Add dummy values for the other required properties to satisfy the type. + guesses: 1, + guesses_log10: 1, + crack_times_seconds: { + online_throttling_100_per_hour: 1, + online_no_throttling_10_per_second: 1, + offline_slow_hashing_1e4_per_second: 1, + offline_fast_hashing_1e10_per_second: 1, + }, + crack_times_display: { + online_throttling_100_per_hour: '1 second', + online_no_throttling_10_per_second: '1 second', + offline_slow_hashing_1e4_per_second: '1 second', + offline_fast_hashing_1e10_per_second: '1 second', + }, + sequence: [], + calc_time: 1, +}); + +describe('validatePasswordStrength', () => { + it('should return invalid for a very weak password (score 0)', () => { + // Arrange: Mock zxcvbn to return a score of 0 and specific feedback + vi.mocked(zxcvbn).mockReturnValue( + createMockZxcvbnResult(0, ['Add more words', 'Use a longer password']), + ); + + // Act + const result = validatePasswordStrength('password'); + + // Assert + expect(result.isValid).toBe(false); + expect(result.feedback).toBe('Password is too weak. Add more words Use a longer password'); + }); + + it('should return invalid for a weak password (score 1)', () => { + // Arrange: Mock zxcvbn to return a score of 1 + vi.mocked(zxcvbn).mockReturnValue(createMockZxcvbnResult(1, ['Avoid common words'])); + + // Act + const result = validatePasswordStrength('password123'); + + // Assert + expect(result.isValid).toBe(false); + expect(result.feedback).toBe('Password is too weak. Avoid common words'); + }); + + it('should return invalid for a medium password (score 2)', () => { + // Arrange: Mock zxcvbn to return a score of 2 + vi.mocked(zxcvbn).mockReturnValue( + createMockZxcvbnResult(2, ['Add another symbol or number']), + ); + + // Act + const result = validatePasswordStrength('Password123'); + + // Assert + expect(result.isValid).toBe(false); + expect(result.feedback).toBe('Password is too weak. Add another symbol or number'); + }); + + it('should return valid for a good password (score 3)', () => { + // Arrange: Mock zxcvbn to return a score of 3 (the minimum required) + vi.mocked(zxcvbn).mockReturnValue(createMockZxcvbnResult(3)); + + // Act + const result = validatePasswordStrength('a-Strong-Password!'); + + // Assert + expect(result.isValid).toBe(true); + expect(result.feedback).toBe(''); + }); + + it('should return valid for a very strong password (score 4)', () => { + // Arrange: Mock zxcvbn to return a score of 4 + vi.mocked(zxcvbn).mockReturnValue(createMockZxcvbnResult(4)); + + // Act + const result = validatePasswordStrength('a-Very-Strong-Password-123!'); + + // Assert + expect(result.isValid).toBe(true); + expect(result.feedback).toBe(''); + }); +}); \ No newline at end of file diff --git a/src/utils/fileUtils.test.ts b/src/utils/fileUtils.test.ts new file mode 100644 index 00000000..2192f4e1 --- /dev/null +++ b/src/utils/fileUtils.test.ts @@ -0,0 +1,97 @@ +// src/utils/fileUtils.test.ts +import { describe, it, expect, vi, beforeEach, Mocked } from 'vitest'; +import fs from 'node:fs/promises'; +import { logger } from '../services/logger.server'; +import { cleanupUploadedFile, cleanupUploadedFiles } from './fileUtils'; + +// Mock dependencies +vi.mock('node:fs/promises', () => ({ + default: { + unlink: vi.fn(), + }, +})); + +vi.mock('../services/logger.server', () => ({ + logger: { + warn: vi.fn(), + }, +})); + +// Cast the mocked imports for type safety +const mockedFs = fs as Mocked; +const mockedLogger = logger as Mocked; + +describe('fileUtils', () => { + beforeEach(() => { + // Clear mock history before each test + vi.clearAllMocks(); + }); + + describe('cleanupUploadedFile', () => { + it('should call fs.unlink with the correct file path', async () => { + const mockFile = { path: '/tmp/test-file.jpg' } as Express.Multer.File; + mockedFs.unlink.mockResolvedValue(undefined); + + await cleanupUploadedFile(mockFile); + + expect(mockedFs.unlink).toHaveBeenCalledWith('/tmp/test-file.jpg'); + }); + + it('should not call fs.unlink if the file is undefined', async () => { + await cleanupUploadedFile(undefined); + expect(mockedFs.unlink).not.toHaveBeenCalled(); + }); + + it('should log a warning and not throw if fs.unlink fails', async () => { + const mockFile = { path: '/tmp/non-existent-file.jpg' } as Express.Multer.File; + const unlinkError = new Error('ENOENT: no such file or directory'); + mockedFs.unlink.mockRejectedValue(unlinkError); + + // Use a try-catch to ensure no error is thrown from the function itself + let didThrow = false; + try { + await cleanupUploadedFile(mockFile); + } catch { + didThrow = true; + } + + expect(didThrow).toBe(false); + expect(mockedLogger.warn).toHaveBeenCalledWith( + { err: unlinkError, filePath: mockFile.path }, + 'Failed to clean up uploaded file.', + ); + }); + }); + + describe('cleanupUploadedFiles', () => { + const mockFiles = [ + { path: '/tmp/file1.jpg' }, + { path: '/tmp/file2.png' }, + ] as Express.Multer.File[]; + + it('should call fs.unlink for each file in the array', async () => { + mockedFs.unlink.mockResolvedValue(undefined); + + await cleanupUploadedFiles(mockFiles); + + expect(mockedFs.unlink).toHaveBeenCalledTimes(2); + expect(mockedFs.unlink).toHaveBeenCalledWith('/tmp/file1.jpg'); + expect(mockedFs.unlink).toHaveBeenCalledWith('/tmp/file2.png'); + }); + + it('should not call fs.unlink if the files array is undefined', async () => { + await cleanupUploadedFiles(undefined); + expect(mockedFs.unlink).not.toHaveBeenCalled(); + }); + + it('should not call fs.unlink if the input is not an array', async () => { + await cleanupUploadedFiles({ not: 'an array' } as unknown as Express.Multer.File[]); + expect(mockedFs.unlink).not.toHaveBeenCalled(); + }); + + it('should handle an empty array gracefully', async () => { + await cleanupUploadedFiles([]); + expect(mockedFs.unlink).not.toHaveBeenCalled(); + }); + }); +}); \ No newline at end of file