Compare commits

...

4 Commits

Author SHA1 Message Date
Gitea Actions
23830c0d4e ci: Bump version to 0.9.69 [skip ci] 2026-01-09 17:24:00 +05:00
ef42fee982 integration test fixes - claude for the win? try 3
All checks were successful
Deploy to Test Environment / deploy-to-test (push) Successful in 32m3s
2026-01-09 04:23:23 -08:00
Gitea Actions
65cb54500c ci: Bump version to 0.9.68 [skip ci] 2026-01-09 16:42:51 +05:00
664ad291be integration test fixes - claude for the win? try 3
All checks were successful
Deploy to Test Environment / deploy-to-test (push) Successful in 30m3s
2026-01-09 03:41:57 -08:00
29 changed files with 1179 additions and 86 deletions

View File

@@ -56,7 +56,10 @@
"mcp__memory__delete_entities",
"mcp__sequential-thinking__sequentialthinking",
"mcp__filesystem__list_directory",
"mcp__filesystem__read_multiple_files"
"mcp__filesystem__read_multiple_files",
"mcp__filesystem__directory_tree",
"mcp__filesystem__read_text_file",
"Bash(wc:*)"
]
}
}

View File

@@ -96,6 +96,23 @@ jobs:
# It prevents the accumulation of duplicate processes from previous test runs.
node -e "const exec = require('child_process').execSync; try { const list = JSON.parse(exec('pm2 jlist').toString()); list.forEach(p => { if (p.name && p.name.endsWith('-test')) { console.log('Deleting test process: ' + p.name + ' (' + p.pm2_env.pm_id + ')'); try { exec('pm2 delete ' + p.pm2_env.pm_id); } catch(e) { console.error('Failed to delete ' + p.pm2_env.pm_id, e.message); } } }); console.log('✅ Test process cleanup complete.'); } catch (e) { if (e.stdout.toString().includes('No process found')) { console.log('No PM2 processes running, cleanup not needed.'); } else { console.error('Error cleaning up test processes:', e.message); } }" || true
- name: Flush Redis Before Tests
# CRITICAL: Clear all Redis data to remove stale BullMQ jobs from previous test runs.
# This prevents old jobs with outdated error messages from polluting test results.
env:
REDIS_PASSWORD: ${{ secrets.REDIS_PASSWORD_TEST }}
run: |
echo "--- Flushing Redis database to remove stale jobs ---"
if [ -z "$REDIS_PASSWORD" ]; then
echo "⚠️ REDIS_PASSWORD_TEST not set, attempting flush without password..."
redis-cli FLUSHDB || echo "Redis flush failed (no password)"
else
redis-cli -a "$REDIS_PASSWORD" FLUSHDB 2>/dev/null && echo "✅ Redis database flushed successfully." || echo "⚠️ Redis flush failed"
fi
# Verify the flush worked by checking key count
KEY_COUNT=$(redis-cli -a "$REDIS_PASSWORD" DBSIZE 2>/dev/null | grep -oE '[0-9]+' || echo "unknown")
echo "Redis key count after flush: $KEY_COUNT"
- name: Run All Tests and Generate Merged Coverage Report
# This single step runs both unit and integration tests, then merges their
# coverage data into a single report. It combines the environment variables

View File

@@ -2,7 +2,7 @@
**Date**: 2025-12-12
**Status**: Proposed
**Status**: Accepted
## Context
@@ -16,3 +16,82 @@ We will implement a dedicated background job processing system using a task queu
**Positive**: Decouples the API from heavy processing, allows for retries on failure, and enables scaling the processing workers independently. Increases application reliability and resilience.
**Negative**: Introduces a new dependency (Redis) into the infrastructure. Requires refactoring of the flyer processing logic to work within a job queue structure.
## Implementation Details
### Queue Infrastructure
The implementation uses **BullMQ v5.65.1** with **ioredis v5.8.2** for Redis connectivity. Six distinct queues handle different job types:
| Queue Name | Purpose | Retry Attempts | Backoff Strategy |
| ---------------------------- | --------------------------- | -------------- | ---------------------- |
| `flyer-processing` | OCR/AI processing of flyers | 3 | Exponential (5s base) |
| `email-sending` | Email delivery | 5 | Exponential (10s base) |
| `analytics-reporting` | Daily report generation | 2 | Exponential (60s base) |
| `weekly-analytics-reporting` | Weekly report generation | 2 | Exponential (1h base) |
| `file-cleanup` | Temporary file cleanup | 3 | Exponential (30s base) |
| `token-cleanup` | Expired token removal | 2 | Exponential (1h base) |
### Key Files
- `src/services/queues.server.ts` - Queue definitions and configuration
- `src/services/workers.server.ts` - Worker implementations with configurable concurrency
- `src/services/redis.server.ts` - Redis connection management
- `src/services/queueService.server.ts` - Queue lifecycle and graceful shutdown
- `src/services/flyerProcessingService.server.ts` - 5-stage flyer processing pipeline
- `src/types/job-data.ts` - TypeScript interfaces for all job data types
### API Design
Endpoints for long-running tasks return **202 Accepted** immediately with a job ID:
```text
POST /api/ai/upload-and-process → 202 { jobId: "..." }
GET /api/ai/jobs/:jobId/status → { state: "...", progress: ... }
```
### Worker Configuration
Workers are configured via environment variables:
- `WORKER_CONCURRENCY` - Flyer processing parallelism (default: 1)
- `EMAIL_WORKER_CONCURRENCY` - Email worker parallelism (default: 10)
- `ANALYTICS_WORKER_CONCURRENCY` - Analytics worker parallelism (default: 1)
- `CLEANUP_WORKER_CONCURRENCY` - Cleanup worker parallelism (default: 10)
### Monitoring
- **Bull Board UI** available at `/api/admin/jobs` for admin users
- Worker status endpoint: `GET /api/admin/workers/status`
- Queue status endpoint: `GET /api/admin/queues/status`
### Graceful Shutdown
Both API and worker processes implement graceful shutdown with a 30-second timeout, ensuring in-flight jobs complete before process termination.
## Compliance Notes
### Deprecated Synchronous Endpoints
The following endpoints process flyers synchronously and are **deprecated**:
- `POST /api/ai/upload-legacy` - For integration testing only
- `POST /api/ai/flyers/process` - Legacy workflow, should migrate to queue-based approach
New integrations MUST use `POST /api/ai/upload-and-process` for queue-based processing.
### Email Handling
- **Bulk emails** (deal notifications): Enqueued via `emailQueue`
- **Transactional emails** (password reset): Sent synchronously for immediate user feedback
## Future Enhancements
Potential improvements for consideration:
1. **Dead Letter Queue (DLQ)**: Move permanently failed jobs to a dedicated queue for analysis
2. **Job Priority Levels**: Allow priority-based processing for different job types
3. **Real-time Progress**: WebSocket/SSE for live job progress updates to clients
4. **Per-Queue Rate Limiting**: Throttle job processing based on external API limits
5. **Job Dependencies**: Support for jobs that depend on completion of other jobs
6. **Prometheus Metrics**: Export queue metrics for observability dashboards

View File

@@ -2,7 +2,7 @@
**Date**: 2025-12-12
**Status**: Proposed
**Status**: Accepted
## Context
@@ -20,3 +20,107 @@ We will implement a multi-layered caching strategy using an in-memory data store
**Positive**: Directly addresses application performance and scalability. Reduces database load and improves API response times for common requests.
**Negative**: Introduces Redis as a dependency if not already used. Adds complexity to the data-fetching logic and requires careful management of cache invalidation to prevent stale data.
## Implementation Details
### Cache Service
A centralized cache service (`src/services/cacheService.server.ts`) provides reusable caching functionality:
- **`getOrSet<T>(key, fetcher, options)`**: Cache-aside pattern implementation
- **`get<T>(key)`**: Retrieve cached value
- **`set<T>(key, value, ttl)`**: Store value with TTL
- **`del(key)`**: Delete specific key
- **`invalidatePattern(pattern)`**: Delete keys matching a pattern
All cache operations are fail-safe - cache failures do not break the application.
### TTL Configuration
Different data types use different TTL values based on volatility:
| Data Type | TTL | Rationale |
| ------------------- | --------- | -------------------------------------- |
| Brands/Stores | 1 hour | Rarely changes, safe to cache longer |
| Flyer lists | 5 minutes | Changes when new flyers are added |
| Individual flyers | 10 minutes| Stable once created |
| Flyer items | 10 minutes| Stable once created |
| Statistics | 5 minutes | Can be slightly stale |
| Frequent sales | 15 minutes| Aggregated data, updated periodically |
| Categories | 1 hour | Rarely changes |
### Cache Key Strategy
Cache keys follow a consistent prefix pattern for pattern-based invalidation:
- `cache:brands` - All brands list
- `cache:flyers:{limit}:{offset}` - Paginated flyer lists
- `cache:flyer:{id}` - Individual flyer data
- `cache:flyer-items:{flyerId}` - Items for a specific flyer
- `cache:stats:*` - Statistics data
- `geocode:{address}` - Geocoding results (30-day TTL)
### Cached Endpoints
The following repository methods implement server-side caching:
| Method | Cache Key Pattern | TTL |
| ------ | ----------------- | --- |
| `FlyerRepository.getAllBrands()` | `cache:brands` | 1 hour |
| `FlyerRepository.getFlyers()` | `cache:flyers:{limit}:{offset}` | 5 minutes |
| `FlyerRepository.getFlyerItems()` | `cache:flyer-items:{flyerId}` | 10 minutes |
### Cache Invalidation
**Event-based invalidation** is triggered on write operations:
- **Flyer creation** (`FlyerPersistenceService.saveFlyer`): Invalidates all `cache:flyers*` keys
- **Flyer deletion** (`FlyerRepository.deleteFlyer`): Invalidates specific flyer and flyer items cache, plus flyer lists
**Manual invalidation** via admin endpoints:
- `POST /api/admin/system/clear-cache` - Clears all application cache (flyers, brands, stats)
- `POST /api/admin/system/clear-geocode-cache` - Clears geocoding cache
### Client-Side Caching
TanStack React Query provides client-side caching with configurable stale times:
| Query Type | Stale Time |
| ----------------- | ----------- |
| Categories | 1 hour |
| Master Items | 10 minutes |
| Flyer Items | 5 minutes |
| Flyers | 2 minutes |
| Shopping Lists | 1 minute |
| Activity Log | 30 seconds |
### Multi-Layer Cache Architecture
```text
Client Request
[TanStack React Query] ← Client-side cache (staleTime-based)
[Express API]
[CacheService.getOrSet()] ← Server-side Redis cache (TTL-based)
[PostgreSQL Database]
```
## Key Files
- `src/services/cacheService.server.ts` - Centralized cache service
- `src/services/db/flyer.db.ts` - Repository with caching for brands, flyers, flyer items
- `src/services/flyerPersistenceService.server.ts` - Cache invalidation on flyer creation
- `src/routes/admin.routes.ts` - Admin cache management endpoints
- `src/config/queryClient.ts` - Client-side query cache configuration
## Future Enhancements
1. **Recipe caching**: Add caching to expensive recipe queries (by-sale-percentage, etc.)
2. **Cache warming**: Pre-populate cache on startup for frequently accessed static data
3. **Cache metrics**: Add hit/miss rate monitoring for observability
4. **Conditional caching**: Skip cache for authenticated user-specific data
5. **Cache compression**: Compress large cached payloads to reduce Redis memory usage

View File

@@ -2,7 +2,7 @@
**Date**: 2025-12-12
**Status**: Proposed
**Status**: Accepted
## Context
@@ -14,9 +14,305 @@ We will formalize the testing pyramid for the project, defining the role of each
1. **Unit Tests (Vitest)**: For isolated functions, components, and repository methods with mocked dependencies. High coverage is expected.
2. **Integration Tests (Supertest)**: For API routes, testing the interaction between controllers, services, and mocked database layers. Focus on contract and middleware correctness.
3. **End-to-End (E2E) Tests (Playwright/Cypress)**: For critical user flows (e.g., login, flyer upload, checkout), running against a real browser and a test database to ensure the entire system works together.
3. **End-to-End (E2E) Tests (Vitest + Supertest)**: For critical user flows (e.g., login, flyer upload, checkout), running against a real test server and database to ensure the entire system works together.
## Consequences
**Positive**: Ensures a consistent and comprehensive approach to quality assurance. Gives developers confidence when refactoring or adding new features. Clearly defines "done" for a new feature.
**Negative**: May require investment in setting up and maintaining the E2E testing environment. Can slightly increase the time required to develop a feature if all test layers are required.
## Implementation Details
### Testing Framework Stack
| Tool | Version | Purpose |
| ---- | ------- | ------- |
| Vitest | 4.0.15 | Test runner for all test types |
| @testing-library/react | 16.3.0 | React component testing |
| @testing-library/jest-dom | 6.9.1 | DOM assertion matchers |
| supertest | 7.1.4 | HTTP assertion library for API testing |
| msw | 2.12.3 | Mock Service Worker for network mocking |
| testcontainers | 11.8.1 | Database containerization (optional) |
| c8 + nyc | 10.1.3 / 17.1.0 | Coverage reporting |
### Test File Organization
```text
src/
├── components/
│ └── *.test.tsx # Component unit tests (colocated)
├── hooks/
│ └── *.test.ts # Hook unit tests (colocated)
├── services/
│ └── *.test.ts # Service unit tests (colocated)
├── routes/
│ └── *.test.ts # Route handler unit tests (colocated)
├── utils/
│ └── *.test.ts # Utility function tests (colocated)
└── tests/
├── setup/ # Test configuration and setup files
├── utils/ # Test utilities, factories, helpers
├── assets/ # Test fixtures (images, files)
├── integration/ # Integration test files (*.test.ts)
└── e2e/ # End-to-end test files (*.e2e.test.ts)
```
**Naming Convention**: `{filename}.test.ts` or `{filename}.test.tsx` for unit/integration, `{filename}.e2e.test.ts` for E2E.
### Configuration Files
| Config | Environment | Purpose |
| ------ | ----------- | ------- |
| `vite.config.ts` | jsdom | Unit tests (React components, hooks) |
| `vitest.config.integration.ts` | node | Integration tests (API routes) |
| `vitest.config.e2e.ts` | node | E2E tests (full user flows) |
| `vitest.workspace.ts` | - | Orchestrates all test projects |
### Test Pyramid
```text
┌─────────────┐
│ E2E │ 5 test files
│ Tests │ Critical user flows
├─────────────┤
│ Integration │ 17 test files
│ Tests │ API contracts + middleware
┌───┴─────────────┴───┐
│ Unit Tests │ 185 test files
│ Components, Hooks, │ Isolated functions
│ Services, Utils │ Mocked dependencies
└─────────────────────┘
```
### Unit Tests
**Purpose**: Test isolated functions, components, and modules with mocked dependencies.
**Environment**: jsdom (browser-like)
**Key Patterns**:
```typescript
// Component testing with providers
import { renderWithProviders, screen } from '@/tests/utils/renderWithProviders';
describe('MyComponent', () => {
it('renders correctly', () => {
renderWithProviders(<MyComponent />);
expect(screen.getByText('Hello')).toBeInTheDocument();
});
});
```
```typescript
// Hook testing
import { renderHook, waitFor } from '@testing-library/react';
import { useMyHook } from './useMyHook';
describe('useMyHook', () => {
it('returns expected value', async () => {
const { result } = renderHook(() => useMyHook());
await waitFor(() => expect(result.current.data).toBeDefined());
});
});
```
**Global Mocks** (automatically applied via `tests-setup-unit.ts`):
- Database connections (`pg.Pool`)
- AI services (`@google/genai`)
- Authentication (`jsonwebtoken`, `bcrypt`)
- Logging (`logger.server`, `logger.client`)
- Notifications (`notificationService`)
### Integration Tests
**Purpose**: Test API routes with real service interactions and database.
**Environment**: node
**Setup**: Real Express server on port 3001, real PostgreSQL database
```typescript
// API route testing pattern
import supertest from 'supertest';
import { createAndLoginUser } from '@/tests/utils/testHelpers';
describe('Auth API', () => {
let request: ReturnType<typeof supertest>;
let authToken: string;
beforeAll(async () => {
const app = (await import('../../../server')).default;
request = supertest(app);
const { token } = await createAndLoginUser(request);
authToken = token;
});
it('GET /api/auth/me returns user profile', async () => {
const response = await request
.get('/api/auth/me')
.set('Authorization', `Bearer ${authToken}`);
expect(response.status).toBe(200);
expect(response.body.user.email).toBeDefined();
});
});
```
**Database Cleanup**:
```typescript
import { cleanupDb } from '@/tests/utils/cleanup';
afterAll(async () => {
await cleanupDb({ users: [testUserId] });
});
```
### E2E Tests
**Purpose**: Test complete user journeys through the application.
**Timeout**: 120 seconds (for long-running flows)
**Current E2E Tests**:
- `auth.e2e.test.ts` - Registration, login, password reset
- `flyer-upload.e2e.test.ts` - Complete flyer upload pipeline
- `user-journey.e2e.test.ts` - Full user workflow
- `admin-authorization.e2e.test.ts` - Admin-specific flows
- `admin-dashboard.e2e.test.ts` - Admin dashboard functionality
### Mock Factories
The project uses comprehensive mock factories (`src/tests/utils/mockFactories.ts`, 1553 lines) for creating test data:
```typescript
import {
createMockUser,
createMockFlyer,
createMockFlyerItem,
createMockRecipe,
resetMockIds,
} from '@/tests/utils/mockFactories';
beforeEach(() => {
resetMockIds(); // Ensure deterministic IDs
});
it('creates flyer with items', () => {
const flyer = createMockFlyer({ store_name: 'TestMart' });
const items = [createMockFlyerItem({ flyer_id: flyer.flyer_id })];
// ...
});
```
**Factory Coverage**: 90+ factory functions for all domain entities including users, flyers, recipes, shopping lists, budgets, achievements, etc.
### Test Utilities
| Utility | Purpose |
| ------- | ------- |
| `renderWithProviders()` | Wrap components with AppProviders + Router |
| `createAndLoginUser()` | Create user and return auth token |
| `cleanupDb()` | Database cleanup respecting FK constraints |
| `createTestApp()` | Create Express app for route testing |
| `poll()` | Polling utility for async operations |
### Coverage Configuration
**Coverage Provider**: v8 (built-in Vitest)
**Report Directories**:
- `.coverage/unit/` - Unit test coverage
- `.coverage/integration/` - Integration test coverage
- `.coverage/e2e/` - E2E test coverage
**Excluded from Coverage**:
- `src/index.tsx`, `src/main.tsx` (entry points)
- `src/tests/**` (test files themselves)
- `src/**/*.d.ts` (type declarations)
- `src/components/icons/**` (icon components)
- `src/db/seed*.ts` (database seeding scripts)
### npm Scripts
```bash
# Run all tests
npm run test
# Run by level
npm run test:unit # Unit tests only (jsdom)
npm run test:integration # Integration tests only (node)
# With coverage
npm run test:coverage # Unit + Integration with reports
# Clean coverage directories
npm run clean
```
### Test Timeouts
| Test Type | Timeout | Rationale |
| --------- | ------- | --------- |
| Unit | 5 seconds | Fast, isolated tests |
| Integration | 60 seconds | AI service calls, DB operations |
| E2E | 120 seconds | Full user flow with multiple API calls |
## Best Practices
### When to Write Each Test Type
1. **Unit Tests** (required):
- Pure functions and utilities
- React components (rendering, user interactions)
- Custom hooks
- Service methods with mocked dependencies
- Repository methods
2. **Integration Tests** (required for API changes):
- New API endpoints
- Authentication/authorization flows
- Middleware behavior
- Database query correctness
3. **E2E Tests** (for critical paths):
- User registration and login
- Core business flows (flyer upload, shopping lists)
- Admin operations
### Test Isolation Guidelines
1. **Reset mock IDs**: Call `resetMockIds()` in `beforeEach()`
2. **Unique test data**: Use timestamps or UUIDs for emails/usernames
3. **Clean up after tests**: Use `cleanupDb()` in `afterAll()`
4. **Don't share state**: Each test should be independent
### Mocking Guidelines
1. **Unit tests**: Mock external dependencies (DB, APIs, services)
2. **Integration tests**: Mock only external APIs (AI services)
3. **E2E tests**: Minimal mocking, use real services where possible
## Key Files
- `vite.config.ts` - Unit test configuration
- `vitest.config.integration.ts` - Integration test configuration
- `vitest.config.e2e.ts` - E2E test configuration
- `vitest.workspace.ts` - Workspace orchestration
- `src/tests/setup/tests-setup-unit.ts` - Global mocks (488 lines)
- `src/tests/setup/integration-global-setup.ts` - Server + DB setup
- `src/tests/utils/mockFactories.ts` - Mock factories (1553 lines)
- `src/tests/utils/testHelpers.ts` - Test utilities
## Future Enhancements
1. **Browser E2E Tests**: Consider adding Playwright for actual browser testing
2. **Visual Regression**: Screenshot comparison for UI components
3. **Performance Testing**: Add benchmarks for critical paths
4. **Mutation Testing**: Verify test quality with mutation testing tools
5. **Coverage Thresholds**: Define minimum coverage requirements per module

View File

@@ -2,7 +2,7 @@
**Date**: 2025-12-12
**Status**: Proposed
**Status**: Partially Implemented
## Context
@@ -16,3 +16,255 @@ We will establish a formal Design System and Component Library. This will involv
- **Positive**: Ensures a consistent and high-quality user interface. Accelerates frontend development by providing reusable, well-documented components. Improves maintainability and reduces technical debt.
- **Negative**: Requires an initial investment in setting up Storybook and migrating existing components. Adds a new dependency and a new workflow for frontend development.
## Implementation Status
### What's Implemented
The codebase has a solid foundation for a design system:
-**Tailwind CSS v4.1.17** as the styling solution
-**Dark mode** fully implemented with system preference detection
-**55 custom icon components** for consistent iconography
-**Component organization** with shared vs. feature-specific separation
-**Accessibility patterns** with ARIA attributes and focus management
### What's Not Yet Implemented
-**Storybook** is not yet installed or configured
-**Formal design token documentation** (colors, typography, spacing)
-**Visual regression testing** for component changes
## Implementation Details
### Component Library Structure
```text
src/
├── components/ # 30+ shared UI components
│ ├── icons/ # 55 SVG icon components
│ ├── Header.tsx
│ ├── Footer.tsx
│ ├── LoadingSpinner.tsx
│ ├── ErrorDisplay.tsx
│ ├── ConfirmationModal.tsx
│ ├── DarkModeToggle.tsx
│ ├── StatCard.tsx
│ ├── PasswordInput.tsx
│ └── ...
├── features/ # Feature-specific components
│ ├── charts/ # PriceChart, PriceHistoryChart
│ ├── flyer/ # FlyerDisplay, FlyerList, FlyerUploader
│ ├── shopping/ # ShoppingListComponent, WatchedItemsList
│ └── voice-assistant/ # VoiceAssistant
├── layouts/ # Page layouts
│ └── MainLayout.tsx
├── pages/ # Page components
│ └── admin/components/ # Admin-specific components
└── providers/ # Context providers
```
### Styling Approach
**Tailwind CSS** with utility-first classes:
```typescript
// Component example with consistent styling patterns
<button className="px-4 py-2 bg-brand-primary text-white rounded-lg
hover:bg-brand-dark transition-colors duration-200
focus:outline-none focus:ring-2 focus:ring-brand-primary
focus:ring-offset-2 dark:focus:ring-offset-gray-800">
Click me
</button>
```
**Common Utility Patterns**:
| Pattern | Classes |
| ------- | ------- |
| Card container | `bg-white dark:bg-gray-800 rounded-lg shadow-md p-6` |
| Primary button | `bg-brand-primary hover:bg-brand-dark text-white rounded-lg px-4 py-2` |
| Secondary button | `bg-gray-100 dark:bg-gray-700 text-gray-700 dark:text-gray-200` |
| Input field | `border border-gray-300 dark:border-gray-600 rounded-md px-3 py-2` |
| Focus ring | `focus:outline-none focus:ring-2 focus:ring-brand-primary` |
### Color System
**Brand Colors** (Tailwind theme extensions):
- `brand-primary` - Primary brand color (blue/teal)
- `brand-light` - Lighter variant
- `brand-dark` - Darker variant for hover states
- `brand-secondary` - Secondary accent color
**Semantic Colors**:
- Gray scale: `gray-50` through `gray-950`
- Error: `red-500`, `red-600`
- Success: `green-500`, `green-600`
- Warning: `yellow-500`, `orange-500`
- Info: `blue-500`, `blue-600`
### Dark Mode Implementation
Dark mode is fully implemented using Tailwind's `dark:` variant:
```typescript
// Initialization in useAppInitialization hook
const initializeDarkMode = () => {
// Priority: user profile > localStorage > system preference
const stored = localStorage.getItem('darkMode');
const systemPreference = window.matchMedia('(prefers-color-scheme: dark)').matches;
const isDarkMode = stored ? stored === 'true' : systemPreference;
document.documentElement.classList.toggle('dark', isDarkMode);
return isDarkMode;
};
```
**Usage in components**:
```typescript
<div className="bg-white dark:bg-gray-800 text-gray-900 dark:text-white">
Content adapts to theme
</div>
```
### Icon System
**55 custom SVG icon components** in `src/components/icons/`:
```typescript
// Icon component pattern
interface IconProps extends React.SVGProps<SVGSVGElement> {
title?: string;
}
export const CheckCircleIcon: React.FC<IconProps> = ({ title, ...props }) => (
<svg {...props} fill="currentColor" viewBox="0 0 24 24">
{title && <title>{title}</title>}
<path d="..." />
</svg>
);
```
**Usage**:
```typescript
<CheckCircleIcon className="w-5 h-5 text-green-500" title="Success" />
```
**External icons**: Lucide React (`lucide-react` v0.555.0) used for additional icons.
### Accessibility Patterns
**ARIA Attributes**:
```typescript
// Modal pattern
<div role="dialog" aria-modal="true" aria-labelledby="modal-title">
<h2 id="modal-title">Modal Title</h2>
</div>
// Button with label
<button aria-label="Close modal">
<XMarkIcon aria-hidden="true" />
</button>
// Loading state
<div role="status" aria-live="polite">
<LoadingSpinner />
</div>
```
**Focus Management**:
- Consistent focus rings: `focus:ring-2 focus:ring-brand-primary focus:ring-offset-2`
- Dark mode offset: `dark:focus:ring-offset-gray-800`
- No outline: `focus:outline-none` (using ring instead)
### State Management
**Context Providers** (see ADR-005):
| Provider | Purpose |
| -------- | ------- |
| `AuthProvider` | Authentication state |
| `ModalProvider` | Modal open/close state |
| `FlyersProvider` | Flyer data |
| `MasterItemsProvider` | Grocery items |
| `UserDataProvider` | User-specific data |
**Provider Hierarchy** in `AppProviders.tsx`:
```typescript
<QueryClientProvider>
<ModalProvider>
<AuthProvider>
<FlyersProvider>
<MasterItemsProvider>
<UserDataProvider>
{children}
</UserDataProvider>
</MasterItemsProvider>
</FlyersProvider>
</AuthProvider>
</ModalProvider>
</QueryClientProvider>
```
## Key Files
- `tailwind.config.js` - Tailwind CSS configuration
- `src/index.css` - Tailwind CSS entry point
- `src/components/` - Shared UI components
- `src/components/icons/` - Icon component library (55 icons)
- `src/providers/AppProviders.tsx` - Context provider composition
- `src/hooks/useAppInitialization.ts` - Dark mode initialization
## Component Guidelines
### When to Create Shared Components
Create a shared component in `src/components/` when:
1. Used in 3+ places across the application
2. Represents a reusable UI pattern (buttons, cards, modals)
3. Has consistent styling/behavior requirements
### Naming Conventions
- **Components**: PascalCase (`LoadingSpinner.tsx`)
- **Icons**: PascalCase with `Icon` suffix (`CheckCircleIcon.tsx`)
- **Hooks**: camelCase with `use` prefix (`useModal.ts`)
- **Contexts**: PascalCase with `Context` suffix (`AuthContext.tsx`)
### Styling Guidelines
1. Use Tailwind utility classes exclusively
2. Include dark mode variants for all colors: `bg-white dark:bg-gray-800`
3. Add focus states for interactive elements
4. Use semantic color names from the design system
## Future Enhancements (Storybook Setup)
To complete ADR-012 implementation:
1. **Install Storybook**:
```bash
npx storybook@latest init
```
2. **Create stories for core components**:
- Button variants
- Form inputs (PasswordInput, etc.)
- Modal components
- Loading states
- Icon showcase
3. **Add visual regression testing** with Chromatic or Percy
4. **Document design tokens** formally in Storybook
5. **Create component composition guidelines**

4
package-lock.json generated
View File

@@ -1,12 +1,12 @@
{
"name": "flyer-crawler",
"version": "0.9.67",
"version": "0.9.69",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "flyer-crawler",
"version": "0.9.67",
"version": "0.9.69",
"dependencies": {
"@bull-board/api": "^6.14.2",
"@bull-board/express": "^6.14.2",

View File

@@ -1,7 +1,7 @@
{
"name": "flyer-crawler",
"private": true,
"version": "0.9.67",
"version": "0.9.69",
"type": "module",
"scripts": {
"dev": "concurrently \"npm:start:dev\" \"vite\"",

View File

@@ -8,6 +8,7 @@ import { z } from 'zod';
import * as db from '../services/db/index.db';
import type { UserProfile } from '../types';
import { geocodingService } from '../services/geocodingService.server';
import { cacheService } from '../services/cacheService.server';
import { requireFileUpload } from '../middleware/fileUpload.middleware'; // This was a duplicate, fixed.
import {
createUploadMiddleware,
@@ -635,6 +636,44 @@ router.post(
},
);
/**
* POST /api/admin/system/clear-cache - Clears the application data cache.
* Clears cached flyers, brands, and stats data from Redis.
* Requires admin privileges.
*/
router.post(
'/system/clear-cache',
adminTriggerLimiter,
validateRequest(emptySchema),
async (req: Request, res: Response, next: NextFunction) => {
const userProfile = req.user as UserProfile;
req.log.info(
`[Admin] Manual cache clear received from user: ${userProfile.user.user_id}`,
);
try {
const [flyersDeleted, brandsDeleted, statsDeleted] = await Promise.all([
cacheService.invalidateFlyers(req.log),
cacheService.invalidateBrands(req.log),
cacheService.invalidateStats(req.log),
]);
const totalDeleted = flyersDeleted + brandsDeleted + statsDeleted;
res.status(200).json({
message: `Successfully cleared the application cache. ${totalDeleted} keys were removed.`,
details: {
flyers: flyersDeleted,
brands: brandsDeleted,
stats: statsDeleted,
},
});
} catch (error) {
req.log.error({ error }, '[Admin] Failed to clear application cache.');
next(error);
}
},
);
/* Catches errors from multer (e.g., file size, file filter) */
router.use(handleMulterError);

View File

@@ -234,6 +234,9 @@ router.post(
* POST /api/ai/upload-legacy - Process a flyer upload from a legacy client.
* This is an authenticated route that processes the flyer synchronously.
* This is used for integration testing the legacy upload flow.
*
* @deprecated Use POST /api/ai/upload-and-process instead for async queue-based processing (ADR-0006).
* This synchronous endpoint is retained only for integration testing purposes.
*/
router.post(
'/upload-legacy',
@@ -282,9 +285,12 @@ router.get(
);
/**
* This endpoint saves the processed flyer data to the database. It is the final step
* in the flyer upload workflow after the AI has extracted the data.
* POST /api/ai/flyers/process - Saves the processed flyer data to the database.
* This is the final step in the flyer upload workflow after the AI has extracted the data.
* It uses `optionalAuth` to handle submissions from both anonymous and authenticated users.
*
* @deprecated Use POST /api/ai/upload-and-process instead for async queue-based processing (ADR-0006).
* This synchronous endpoint processes flyers inline and should be migrated to the queue-based approach.
*/
router.post(
'/flyers/process',

View File

@@ -0,0 +1,226 @@
// src/services/cacheService.server.ts
/**
* @file Centralized caching service implementing the Cache-Aside pattern.
* This service provides a reusable wrapper around Redis for caching read-heavy operations.
* See ADR-009 for the caching strategy documentation.
*/
import type { Logger } from 'pino';
import { connection as redis } from './redis.server';
import { logger as globalLogger } from './logger.server';
/**
* TTL values in seconds for different cache types.
* These can be tuned based on data volatility and freshness requirements.
*/
export const CACHE_TTL = {
/** Brand/store list - rarely changes, safe to cache for 1 hour */
BRANDS: 60 * 60,
/** Flyer list - changes when new flyers are added, cache for 5 minutes */
FLYERS: 5 * 60,
/** Individual flyer data - cache for 10 minutes */
FLYER: 10 * 60,
/** Flyer items - cache for 10 minutes */
FLYER_ITEMS: 10 * 60,
/** Statistics - can be slightly stale, cache for 5 minutes */
STATS: 5 * 60,
/** Most frequent sales - aggregated data, cache for 15 minutes */
FREQUENT_SALES: 15 * 60,
/** Categories - rarely changes, cache for 1 hour */
CATEGORIES: 60 * 60,
} as const;
/**
* Cache key prefixes for different data types.
* Using consistent prefixes allows for pattern-based invalidation.
*/
export const CACHE_PREFIX = {
BRANDS: 'cache:brands',
FLYERS: 'cache:flyers',
FLYER: 'cache:flyer',
FLYER_ITEMS: 'cache:flyer-items',
STATS: 'cache:stats',
FREQUENT_SALES: 'cache:frequent-sales',
CATEGORIES: 'cache:categories',
} as const;
export interface CacheOptions {
/** Time-to-live in seconds */
ttl: number;
/** Optional logger for this operation */
logger?: Logger;
}
/**
* Centralized cache service implementing the Cache-Aside pattern.
* All cache operations are fail-safe - cache failures do not break the application.
*/
class CacheService {
/**
* Retrieves a value from cache.
* @param key The cache key
* @param logger Optional logger for this operation
* @returns The cached value or null if not found/error
*/
async get<T>(key: string, logger: Logger = globalLogger): Promise<T | null> {
try {
const cached = await redis.get(key);
if (cached) {
logger.debug({ cacheKey: key }, 'Cache hit');
return JSON.parse(cached) as T;
}
logger.debug({ cacheKey: key }, 'Cache miss');
return null;
} catch (error) {
logger.warn({ err: error, cacheKey: key }, 'Redis GET failed, proceeding without cache');
return null;
}
}
/**
* Stores a value in cache with TTL.
* @param key The cache key
* @param value The value to cache (will be JSON stringified)
* @param ttl Time-to-live in seconds
* @param logger Optional logger for this operation
*/
async set<T>(key: string, value: T, ttl: number, logger: Logger = globalLogger): Promise<void> {
try {
await redis.set(key, JSON.stringify(value), 'EX', ttl);
logger.debug({ cacheKey: key, ttl }, 'Value cached');
} catch (error) {
logger.warn({ err: error, cacheKey: key }, 'Redis SET failed, value not cached');
}
}
/**
* Deletes a specific key from cache.
* @param key The cache key to delete
* @param logger Optional logger for this operation
*/
async del(key: string, logger: Logger = globalLogger): Promise<void> {
try {
await redis.del(key);
logger.debug({ cacheKey: key }, 'Cache key deleted');
} catch (error) {
logger.warn({ err: error, cacheKey: key }, 'Redis DEL failed');
}
}
/**
* Invalidates all cache keys matching a pattern.
* Uses SCAN for safe iteration over large key sets.
* @param pattern The pattern to match (e.g., 'cache:flyers*')
* @param logger Optional logger for this operation
* @returns The number of keys deleted
*/
async invalidatePattern(pattern: string, logger: Logger = globalLogger): Promise<number> {
let cursor = '0';
let totalDeleted = 0;
try {
do {
const [nextCursor, keys] = await redis.scan(cursor, 'MATCH', pattern, 'COUNT', 100);
cursor = nextCursor;
if (keys.length > 0) {
const deletedCount = await redis.del(...keys);
totalDeleted += deletedCount;
}
} while (cursor !== '0');
logger.info({ pattern, totalDeleted }, 'Cache invalidation completed');
return totalDeleted;
} catch (error) {
logger.error({ err: error, pattern }, 'Cache invalidation failed');
throw error;
}
}
/**
* Implements the Cache-Aside pattern: try cache first, fall back to fetcher, cache result.
* This is the primary method for adding caching to existing repository methods.
*
* @param key The cache key
* @param fetcher Function that retrieves data from the source (e.g., database)
* @param options Cache options including TTL
* @returns The data (from cache or fetcher)
*
* @example
* ```typescript
* const brands = await cacheService.getOrSet(
* CACHE_PREFIX.BRANDS,
* () => this.db.query('SELECT * FROM stores'),
* { ttl: CACHE_TTL.BRANDS, logger }
* );
* ```
*/
async getOrSet<T>(
key: string,
fetcher: () => Promise<T>,
options: CacheOptions,
): Promise<T> {
const logger = options.logger ?? globalLogger;
// Try to get from cache first
const cached = await this.get<T>(key, logger);
if (cached !== null) {
return cached;
}
// Cache miss - fetch from source
const data = await fetcher();
// Cache the result (fire-and-forget, don't await)
this.set(key, data, options.ttl, logger).catch(() => {
// Error already logged in set()
});
return data;
}
// --- Convenience methods for specific cache types ---
/**
* Invalidates all brand-related cache entries.
*/
async invalidateBrands(logger: Logger = globalLogger): Promise<number> {
return this.invalidatePattern(`${CACHE_PREFIX.BRANDS}*`, logger);
}
/**
* Invalidates all flyer-related cache entries.
*/
async invalidateFlyers(logger: Logger = globalLogger): Promise<number> {
const patterns = [
`${CACHE_PREFIX.FLYERS}*`,
`${CACHE_PREFIX.FLYER}*`,
`${CACHE_PREFIX.FLYER_ITEMS}*`,
];
let total = 0;
for (const pattern of patterns) {
total += await this.invalidatePattern(pattern, logger);
}
return total;
}
/**
* Invalidates cache for a specific flyer and its items.
*/
async invalidateFlyer(flyerId: number, logger: Logger = globalLogger): Promise<void> {
await Promise.all([
this.del(`${CACHE_PREFIX.FLYER}:${flyerId}`, logger),
this.del(`${CACHE_PREFIX.FLYER_ITEMS}:${flyerId}`, logger),
// Also invalidate the flyers list since it may contain this flyer
this.invalidatePattern(`${CACHE_PREFIX.FLYERS}*`, logger),
]);
}
/**
* Invalidates all statistics cache entries.
*/
async invalidateStats(logger: Logger = globalLogger): Promise<number> {
return this.invalidatePattern(`${CACHE_PREFIX.STATS}*`, logger);
}
}
export const cacheService = new CacheService();

View File

@@ -18,6 +18,7 @@ describe('Address DB Service', () => {
beforeEach(() => {
vi.clearAllMocks();
mockDb.query.mockReset();
addressRepo = new AddressRepository(mockDb);
});

View File

@@ -40,6 +40,7 @@ describe('Admin DB Service', () => {
beforeEach(() => {
// Reset the global mock's call history before each test.
vi.clearAllMocks();
mockDb.query.mockReset();
// Reset the withTransaction mock before each test
vi.mocked(withTransaction).mockImplementation(async (callback) => {

View File

@@ -47,6 +47,7 @@ describe('Budget DB Service', () => {
beforeEach(() => {
vi.clearAllMocks();
mockDb.query.mockReset();
// Instantiate the repository with the minimal mock db for each test
budgetRepo = new BudgetRepository(mockDb);
});

View File

@@ -28,6 +28,7 @@ import { logger as mockLogger } from '../logger.server';
describe('Conversion DB Service', () => {
beforeEach(() => {
vi.clearAllMocks();
mockPoolInstance.query.mockReset();
// Make getPool return our mock instance for each test
vi.mocked(getPool).mockReturnValue(mockPoolInstance as any);
});

View File

@@ -46,6 +46,7 @@ describe('Flyer DB Service', () => {
beforeEach(() => {
vi.clearAllMocks();
mockPoolInstance.query.mockReset();
//In a transaction, `pool.connect()` returns a client. That client has a `release` method.
// For these tests, we simulate this by having `connect` resolve to the pool instance itself,
// and we ensure the `release` method is mocked on that instance.
@@ -586,18 +587,6 @@ describe('Flyer DB Service', () => {
});
describe('getFlyers', () => {
const expectedQuery = `
SELECT
f.*,
json_build_object(
'store_id', s.store_id,
'name', s.name,
'logo_url', s.logo_url
) as store
FROM public.flyers f
JOIN public.stores s ON f.store_id = s.store_id
ORDER BY f.created_at DESC LIMIT $1 OFFSET $2`;
it('should use default limit and offset when none are provided', async () => {
console.log('[TEST DEBUG] Running test: getFlyers > should use default limit and offset');
const mockFlyers: Flyer[] = [createMockFlyer({ flyer_id: 1 })];
@@ -611,7 +600,7 @@ describe('Flyer DB Service', () => {
);
expect(mockPoolInstance.query).toHaveBeenCalledWith(
expectedQuery,
expect.stringContaining('FROM public.flyers f'),
[20, 0], // Default values
);
});
@@ -629,7 +618,7 @@ describe('Flyer DB Service', () => {
);
expect(mockPoolInstance.query).toHaveBeenCalledWith(
expectedQuery,
expect.stringContaining('FROM public.flyers f'),
[10, 5], // Provided values
);
});

View File

@@ -3,6 +3,7 @@ import type { Pool, PoolClient } from 'pg';
import { getPool, withTransaction } from './connection.db';
import type { Logger } from 'pino';
import { UniqueConstraintError, NotFoundError, handleDbError } from './errors.db';
import { cacheService, CACHE_TTL, CACHE_PREFIX } from '../cacheService.server';
import type {
Flyer,
FlyerItem,
@@ -229,22 +230,31 @@ export class FlyerRepository {
/**
* Retrieves all distinct brands from the stores table.
* Uses cache-aside pattern with 1-hour TTL (brands rarely change).
* @returns A promise that resolves to an array of Brand objects.
*/
async getAllBrands(logger: Logger): Promise<Brand[]> {
try {
const query = `
SELECT s.store_id as brand_id, s.name, s.logo_url, s.created_at, s.updated_at
FROM public.stores s
ORDER BY s.name;
`;
const res = await this.db.query<Brand>(query);
return res.rows;
} catch (error) {
handleDbError(error, logger, 'Database error in getAllBrands', {}, {
defaultMessage: 'Failed to retrieve brands from database.',
});
}
const cacheKey = CACHE_PREFIX.BRANDS;
return cacheService.getOrSet<Brand[]>(
cacheKey,
async () => {
try {
const query = `
SELECT s.store_id as brand_id, s.name, s.logo_url, s.created_at, s.updated_at
FROM public.stores s
ORDER BY s.name;
`;
const res = await this.db.query<Brand>(query);
return res.rows;
} catch (error) {
handleDbError(error, logger, 'Database error in getAllBrands', {}, {
defaultMessage: 'Failed to retrieve brands from database.',
});
}
},
{ ttl: CACHE_TTL.BRANDS, logger },
);
}
/**
@@ -262,49 +272,67 @@ export class FlyerRepository {
/**
* Retrieves all flyers from the database, ordered by creation date.
* Uses cache-aside pattern with 5-minute TTL.
* @param limit The maximum number of flyers to return.
* @param offset The number of flyers to skip.
* @returns A promise that resolves to an array of Flyer objects.
*/
async getFlyers(logger: Logger, limit: number = 20, offset: number = 0): Promise<Flyer[]> {
try {
const query = `
SELECT
f.*,
json_build_object(
'store_id', s.store_id,
'name', s.name,
'logo_url', s.logo_url
) as store
FROM public.flyers f
JOIN public.stores s ON f.store_id = s.store_id
ORDER BY f.created_at DESC LIMIT $1 OFFSET $2`;
const res = await this.db.query<Flyer>(query, [limit, offset]);
return res.rows;
} catch (error) {
handleDbError(error, logger, 'Database error in getFlyers', { limit, offset }, {
defaultMessage: 'Failed to retrieve flyers from database.',
});
}
const cacheKey = `${CACHE_PREFIX.FLYERS}:${limit}:${offset}`;
return cacheService.getOrSet<Flyer[]>(
cacheKey,
async () => {
try {
const query = `
SELECT
f.*,
json_build_object(
'store_id', s.store_id,
'name', s.name,
'logo_url', s.logo_url
) as store
FROM public.flyers f
JOIN public.stores s ON f.store_id = s.store_id
ORDER BY f.created_at DESC LIMIT $1 OFFSET $2`;
const res = await this.db.query<Flyer>(query, [limit, offset]);
return res.rows;
} catch (error) {
handleDbError(error, logger, 'Database error in getFlyers', { limit, offset }, {
defaultMessage: 'Failed to retrieve flyers from database.',
});
}
},
{ ttl: CACHE_TTL.FLYERS, logger },
);
}
/**
* Retrieves all items for a specific flyer.
* Uses cache-aside pattern with 10-minute TTL.
* @param flyerId The ID of the flyer.
* @returns A promise that resolves to an array of FlyerItem objects.
*/
async getFlyerItems(flyerId: number, logger: Logger): Promise<FlyerItem[]> {
try {
const res = await this.db.query<FlyerItem>(
'SELECT * FROM public.flyer_items WHERE flyer_id = $1 ORDER BY flyer_item_id ASC',
[flyerId],
);
return res.rows;
} catch (error) {
handleDbError(error, logger, 'Database error in getFlyerItems', { flyerId }, {
defaultMessage: 'Failed to retrieve flyer items from database.',
});
}
const cacheKey = `${CACHE_PREFIX.FLYER_ITEMS}:${flyerId}`;
return cacheService.getOrSet<FlyerItem[]>(
cacheKey,
async () => {
try {
const res = await this.db.query<FlyerItem>(
'SELECT * FROM public.flyer_items WHERE flyer_id = $1 ORDER BY flyer_item_id ASC',
[flyerId],
);
return res.rows;
} catch (error) {
handleDbError(error, logger, 'Database error in getFlyerItems', { flyerId }, {
defaultMessage: 'Failed to retrieve flyer items from database.',
});
}
},
{ ttl: CACHE_TTL.FLYER_ITEMS, logger },
);
}
/**
@@ -399,6 +427,7 @@ export class FlyerRepository {
/**
* Deletes a flyer and all its associated items in a transaction.
* This should typically be an admin-only action.
* Invalidates related cache entries after successful deletion.
* @param flyerId The ID of the flyer to delete.
*/
async deleteFlyer(flyerId: number, logger: Logger): Promise<void> {
@@ -413,6 +442,9 @@ export class FlyerRepository {
}
logger.info(`Successfully deleted flyer with ID: ${flyerId}`);
});
// Invalidate cache after successful deletion
await cacheService.invalidateFlyer(flyerId, logger);
} catch (error) {
handleDbError(error, logger, 'Database transaction error in deleteFlyer', { flyerId }, {
defaultMessage: 'Failed to delete flyer.',

View File

@@ -29,6 +29,7 @@ describe('Gamification DB Service', () => {
beforeEach(() => {
// Reset the global mock's call history before each test.
vi.clearAllMocks();
mockDb.query.mockReset();
// Instantiate the repository with the mock pool for each test
gamificationRepo = new GamificationRepository(mockDb);

View File

@@ -30,6 +30,7 @@ describe('Notification DB Service', () => {
beforeEach(() => {
vi.clearAllMocks();
mockPoolInstance.query.mockReset();
// Instantiate the repository with the mock pool for each test
notificationRepo = new NotificationRepository(mockPoolInstance as unknown as Pool);

View File

@@ -35,6 +35,7 @@ describe('Personalization DB Service', () => {
beforeEach(() => {
vi.clearAllMocks();
mockQuery.mockReset();
// Reset the withTransaction mock before each test
vi.mocked(withTransaction).mockImplementation(async (callback) => {
const mockClient = { query: vi.fn() };

View File

@@ -27,6 +27,7 @@ import { logger as mockLogger } from '../logger.server';
describe('Price DB Service', () => {
beforeEach(() => {
vi.clearAllMocks();
mockPoolInstance.query.mockReset();
// Make getPool return our mock instance for each test
vi.mocked(getPool).mockReturnValue(mockPoolInstance as any);
});

View File

@@ -34,6 +34,7 @@ describe('Reaction DB Service', () => {
beforeEach(() => {
vi.clearAllMocks();
mockDb.query.mockReset();
reactionRepo = new ReactionRepository(mockDb);
});

View File

@@ -28,6 +28,7 @@ describe('Recipe DB Service', () => {
beforeEach(() => {
vi.clearAllMocks();
mockQuery.mockReset();
// Instantiate the repository with the mock pool for each test
recipeRepo = new RecipeRepository(mockPoolInstance as unknown as Pool);
});

View File

@@ -36,6 +36,7 @@ describe('Shopping DB Service', () => {
beforeEach(() => {
vi.clearAllMocks();
mockPoolInstance.query.mockReset();
// Instantiate the repository with the mock pool for each test
shoppingRepo = new ShoppingRepository(mockPoolInstance as unknown as Pool);
});

View File

@@ -62,6 +62,7 @@ describe('User DB Service', () => {
beforeEach(() => {
vi.clearAllMocks();
mockPoolInstance.query.mockReset();
userRepo = new UserRepository(mockPoolInstance as unknown as PoolClient);
// Provide a default mock implementation for withTransaction for all tests.
vi.mocked(withTransaction).mockImplementation(

View File

@@ -4,12 +4,13 @@ import { withTransaction } from './db/connection.db';
import { createFlyerAndItems } from './db/flyer.db';
import { AdminRepository } from './db/admin.db';
import { GamificationRepository } from './db/gamification.db';
import { cacheService } from './cacheService.server';
import type { FlyerInsert, FlyerItemInsert, Flyer } from '../types';
export class FlyerPersistenceService {
/**
* Saves the flyer and its items to the database within a transaction.
* Also logs the activity.
* Also logs the activity and invalidates related cache entries.
*/
async saveFlyer(
flyerData: FlyerInsert,
@@ -17,7 +18,7 @@ export class FlyerPersistenceService {
userId: string | undefined,
logger: Logger,
): Promise<Flyer> {
return withTransaction(async (client) => {
const flyer = await withTransaction(async (client) => {
const { flyer, items } = await createFlyerAndItems(flyerData, itemsForDb, logger, client);
logger.info(
@@ -43,5 +44,12 @@ export class FlyerPersistenceService {
}
return flyer;
});
// Invalidate flyer list cache after successful creation (fire-and-forget)
cacheService.invalidateFlyers(logger).catch(() => {
// Error already logged in invalidateFlyers
});
return flyer;
}
}

View File

@@ -1,5 +1,5 @@
// src/tests/integration/gamification.integration.test.ts
import { describe, it, expect, beforeAll, afterAll, vi } from 'vitest';
import { describe, it, expect, beforeAll, afterAll, vi, beforeEach } from 'vitest';
import supertest from 'supertest';
import path from 'path';
import fs from 'node:fs/promises';
@@ -70,8 +70,13 @@ describe('Gamification Flow Integration Test', () => {
fullName: 'Gamification Tester',
request,
}));
});
// Setup default mock response for the AI service's extractCoreDataFromFlyerImage method.
beforeEach(() => {
vi.clearAllMocks();
// Reset AI Service Mock to default success state
mockExtractCoreData.mockReset();
mockExtractCoreData.mockResolvedValue({
store_name: 'Gamification Test Store',
valid_from: null,
@@ -87,6 +92,9 @@ describe('Gamification Flow Integration Test', () => {
},
],
});
// Reset Image Processor Mock
vi.mocked(imageProcessor.generateFlyerIcon).mockResolvedValue('mock-icon.webp');
});
afterAll(async () => {

View File

@@ -1,5 +1,5 @@
// src/tests/integration/recipe.integration.test.ts
import { describe, it, expect, beforeAll, afterAll, vi } from 'vitest';
import { describe, it, expect, beforeAll, afterAll, vi, afterEach } from 'vitest';
import supertest from 'supertest';
import { createAndLoginUser } from '../utils/testHelpers';
import { cleanupDb } from '../utils/cleanup';
@@ -49,6 +49,12 @@ describe('Recipe API Routes Integration Tests', () => {
createdRecipeIds.push(testRecipe.recipe_id);
});
afterEach(() => {
vi.clearAllMocks();
// Reset the mock to its default state for the next test
vi.mocked(aiService.generateRecipeSuggestion).mockResolvedValue('Default Mock Suggestion');
});
afterAll(async () => {
vi.unstubAllEnvs();
// Clean up all created resources

View File

@@ -14,22 +14,34 @@ let globalPool: ReturnType<typeof getPool> | null = null;
* This is critical because old jobs with outdated error messages can pollute test results.
*/
async function cleanAllQueues() {
console.log(`[PID:${process.pid}] Cleaning all BullMQ queues...`);
const { flyerQueue, cleanupQueue, emailQueue, analyticsQueue, weeklyAnalyticsQueue, tokenCleanupQueue } = await import('../../services/queues.server');
// Use console.error for visibility in CI logs (stderr is often more reliable)
console.error(`[PID:${process.pid}] [QUEUE CLEANUP] Starting BullMQ queue cleanup...`);
const queues = [flyerQueue, cleanupQueue, emailQueue, analyticsQueue, weeklyAnalyticsQueue, tokenCleanupQueue];
try {
const { flyerQueue, cleanupQueue, emailQueue, analyticsQueue, weeklyAnalyticsQueue, tokenCleanupQueue } = await import('../../services/queues.server');
console.error(`[QUEUE CLEANUP] Successfully imported queue modules`);
for (const queue of queues) {
try {
// obliterate() removes ALL data associated with the queue from Redis
await queue.obliterate({ force: true });
console.log(` ✅ Cleaned queue: ${queue.name}`);
} catch (error) {
// Log but don't fail - the queue might not exist yet
console.log(` ⚠️ Could not clean queue ${queue.name}: ${error instanceof Error ? error.message : 'Unknown error'}`);
const queues = [flyerQueue, cleanupQueue, emailQueue, analyticsQueue, weeklyAnalyticsQueue, tokenCleanupQueue];
for (const queue of queues) {
try {
// Log queue state before cleanup
const jobCounts = await queue.getJobCounts();
console.error(`[QUEUE CLEANUP] Queue "${queue.name}" before cleanup: ${JSON.stringify(jobCounts)}`);
// obliterate() removes ALL data associated with the queue from Redis
await queue.obliterate({ force: true });
console.error(` ✅ [QUEUE CLEANUP] Cleaned queue: ${queue.name}`);
} catch (error) {
// Log but don't fail - the queue might not exist yet
console.error(` ⚠️ [QUEUE CLEANUP] Could not clean queue ${queue.name}: ${error instanceof Error ? error.message : 'Unknown error'}`);
}
}
console.error(`✅ [PID:${process.pid}] [QUEUE CLEANUP] All queues cleaned successfully.`);
} catch (error) {
console.error(`❌ [PID:${process.pid}] [QUEUE CLEANUP] CRITICAL ERROR during queue cleanup:`, error);
// Don't throw - we want the tests to continue even if cleanup fails
}
console.log(`✅ [PID:${process.pid}] All queues cleaned.`);
}
export async function setup() {
@@ -38,11 +50,15 @@ export async function setup() {
// Fix: Set the FRONTEND_URL globally for the test server instance
process.env.FRONTEND_URL = 'https://example.com';
console.log(`\n--- [PID:${process.pid}] Running Integration Test GLOBAL Setup ---`);
console.error(`\n--- [PID:${process.pid}] Running Integration Test GLOBAL Setup ---`);
console.error(`[SETUP] REDIS_URL: ${process.env.REDIS_URL}`);
console.error(`[SETUP] REDIS_PASSWORD is set: ${!!process.env.REDIS_PASSWORD}`);
// CRITICAL: Clean all queues BEFORE running any tests to remove stale jobs
// from previous test runs that may have outdated error messages.
console.error(`[SETUP] About to call cleanAllQueues()...`);
await cleanAllQueues();
console.error(`[SETUP] cleanAllQueues() completed.`);
// The integration setup is now the single source of truth for preparing the test DB.
// It runs the same seed script that `npm run db:reset:test` used.