Compare commits
12 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
23830c0d4e | ||
| ef42fee982 | |||
|
|
65cb54500c | ||
| 664ad291be | |||
|
|
ff912b9055 | ||
| ec32027bd4 | |||
|
|
59f773639b | ||
| dd2be5eecf | |||
|
|
a94bfbd3e9 | ||
| 338bbc9440 | |||
|
|
60aad04642 | ||
| 7f2aff9a24 |
@@ -54,7 +54,12 @@
|
||||
"mcp__memory__create_entities",
|
||||
"mcp__memory__search_nodes",
|
||||
"mcp__memory__delete_entities",
|
||||
"mcp__sequential-thinking__sequentialthinking"
|
||||
"mcp__sequential-thinking__sequentialthinking",
|
||||
"mcp__filesystem__list_directory",
|
||||
"mcp__filesystem__read_multiple_files",
|
||||
"mcp__filesystem__directory_tree",
|
||||
"mcp__filesystem__read_text_file",
|
||||
"Bash(wc:*)"
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
@@ -96,6 +96,23 @@ jobs:
|
||||
# It prevents the accumulation of duplicate processes from previous test runs.
|
||||
node -e "const exec = require('child_process').execSync; try { const list = JSON.parse(exec('pm2 jlist').toString()); list.forEach(p => { if (p.name && p.name.endsWith('-test')) { console.log('Deleting test process: ' + p.name + ' (' + p.pm2_env.pm_id + ')'); try { exec('pm2 delete ' + p.pm2_env.pm_id); } catch(e) { console.error('Failed to delete ' + p.pm2_env.pm_id, e.message); } } }); console.log('✅ Test process cleanup complete.'); } catch (e) { if (e.stdout.toString().includes('No process found')) { console.log('No PM2 processes running, cleanup not needed.'); } else { console.error('Error cleaning up test processes:', e.message); } }" || true
|
||||
|
||||
- name: Flush Redis Before Tests
|
||||
# CRITICAL: Clear all Redis data to remove stale BullMQ jobs from previous test runs.
|
||||
# This prevents old jobs with outdated error messages from polluting test results.
|
||||
env:
|
||||
REDIS_PASSWORD: ${{ secrets.REDIS_PASSWORD_TEST }}
|
||||
run: |
|
||||
echo "--- Flushing Redis database to remove stale jobs ---"
|
||||
if [ -z "$REDIS_PASSWORD" ]; then
|
||||
echo "⚠️ REDIS_PASSWORD_TEST not set, attempting flush without password..."
|
||||
redis-cli FLUSHDB || echo "Redis flush failed (no password)"
|
||||
else
|
||||
redis-cli -a "$REDIS_PASSWORD" FLUSHDB 2>/dev/null && echo "✅ Redis database flushed successfully." || echo "⚠️ Redis flush failed"
|
||||
fi
|
||||
# Verify the flush worked by checking key count
|
||||
KEY_COUNT=$(redis-cli -a "$REDIS_PASSWORD" DBSIZE 2>/dev/null | grep -oE '[0-9]+' || echo "unknown")
|
||||
echo "Redis key count after flush: $KEY_COUNT"
|
||||
|
||||
- name: Run All Tests and Generate Merged Coverage Report
|
||||
# This single step runs both unit and integration tests, then merges their
|
||||
# coverage data into a single report. It combines the environment variables
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
**Date**: 2025-12-12
|
||||
|
||||
**Status**: Proposed
|
||||
**Status**: Accepted
|
||||
|
||||
## Context
|
||||
|
||||
@@ -16,3 +16,82 @@ We will implement a dedicated background job processing system using a task queu
|
||||
|
||||
**Positive**: Decouples the API from heavy processing, allows for retries on failure, and enables scaling the processing workers independently. Increases application reliability and resilience.
|
||||
**Negative**: Introduces a new dependency (Redis) into the infrastructure. Requires refactoring of the flyer processing logic to work within a job queue structure.
|
||||
|
||||
## Implementation Details
|
||||
|
||||
### Queue Infrastructure
|
||||
|
||||
The implementation uses **BullMQ v5.65.1** with **ioredis v5.8.2** for Redis connectivity. Six distinct queues handle different job types:
|
||||
|
||||
| Queue Name | Purpose | Retry Attempts | Backoff Strategy |
|
||||
| ---------------------------- | --------------------------- | -------------- | ---------------------- |
|
||||
| `flyer-processing` | OCR/AI processing of flyers | 3 | Exponential (5s base) |
|
||||
| `email-sending` | Email delivery | 5 | Exponential (10s base) |
|
||||
| `analytics-reporting` | Daily report generation | 2 | Exponential (60s base) |
|
||||
| `weekly-analytics-reporting` | Weekly report generation | 2 | Exponential (1h base) |
|
||||
| `file-cleanup` | Temporary file cleanup | 3 | Exponential (30s base) |
|
||||
| `token-cleanup` | Expired token removal | 2 | Exponential (1h base) |
|
||||
|
||||
### Key Files
|
||||
|
||||
- `src/services/queues.server.ts` - Queue definitions and configuration
|
||||
- `src/services/workers.server.ts` - Worker implementations with configurable concurrency
|
||||
- `src/services/redis.server.ts` - Redis connection management
|
||||
- `src/services/queueService.server.ts` - Queue lifecycle and graceful shutdown
|
||||
- `src/services/flyerProcessingService.server.ts` - 5-stage flyer processing pipeline
|
||||
- `src/types/job-data.ts` - TypeScript interfaces for all job data types
|
||||
|
||||
### API Design
|
||||
|
||||
Endpoints for long-running tasks return **202 Accepted** immediately with a job ID:
|
||||
|
||||
```text
|
||||
POST /api/ai/upload-and-process → 202 { jobId: "..." }
|
||||
GET /api/ai/jobs/:jobId/status → { state: "...", progress: ... }
|
||||
```
|
||||
|
||||
### Worker Configuration
|
||||
|
||||
Workers are configured via environment variables:
|
||||
|
||||
- `WORKER_CONCURRENCY` - Flyer processing parallelism (default: 1)
|
||||
- `EMAIL_WORKER_CONCURRENCY` - Email worker parallelism (default: 10)
|
||||
- `ANALYTICS_WORKER_CONCURRENCY` - Analytics worker parallelism (default: 1)
|
||||
- `CLEANUP_WORKER_CONCURRENCY` - Cleanup worker parallelism (default: 10)
|
||||
|
||||
### Monitoring
|
||||
|
||||
- **Bull Board UI** available at `/api/admin/jobs` for admin users
|
||||
- Worker status endpoint: `GET /api/admin/workers/status`
|
||||
- Queue status endpoint: `GET /api/admin/queues/status`
|
||||
|
||||
### Graceful Shutdown
|
||||
|
||||
Both API and worker processes implement graceful shutdown with a 30-second timeout, ensuring in-flight jobs complete before process termination.
|
||||
|
||||
## Compliance Notes
|
||||
|
||||
### Deprecated Synchronous Endpoints
|
||||
|
||||
The following endpoints process flyers synchronously and are **deprecated**:
|
||||
|
||||
- `POST /api/ai/upload-legacy` - For integration testing only
|
||||
- `POST /api/ai/flyers/process` - Legacy workflow, should migrate to queue-based approach
|
||||
|
||||
New integrations MUST use `POST /api/ai/upload-and-process` for queue-based processing.
|
||||
|
||||
### Email Handling
|
||||
|
||||
- **Bulk emails** (deal notifications): Enqueued via `emailQueue`
|
||||
- **Transactional emails** (password reset): Sent synchronously for immediate user feedback
|
||||
|
||||
## Future Enhancements
|
||||
|
||||
Potential improvements for consideration:
|
||||
|
||||
1. **Dead Letter Queue (DLQ)**: Move permanently failed jobs to a dedicated queue for analysis
|
||||
2. **Job Priority Levels**: Allow priority-based processing for different job types
|
||||
3. **Real-time Progress**: WebSocket/SSE for live job progress updates to clients
|
||||
4. **Per-Queue Rate Limiting**: Throttle job processing based on external API limits
|
||||
5. **Job Dependencies**: Support for jobs that depend on completion of other jobs
|
||||
6. **Prometheus Metrics**: Export queue metrics for observability dashboards
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
**Date**: 2025-12-12
|
||||
|
||||
**Status**: Proposed
|
||||
**Status**: Accepted
|
||||
|
||||
## Context
|
||||
|
||||
@@ -20,3 +20,107 @@ We will implement a multi-layered caching strategy using an in-memory data store
|
||||
|
||||
**Positive**: Directly addresses application performance and scalability. Reduces database load and improves API response times for common requests.
|
||||
**Negative**: Introduces Redis as a dependency if not already used. Adds complexity to the data-fetching logic and requires careful management of cache invalidation to prevent stale data.
|
||||
|
||||
## Implementation Details
|
||||
|
||||
### Cache Service
|
||||
|
||||
A centralized cache service (`src/services/cacheService.server.ts`) provides reusable caching functionality:
|
||||
|
||||
- **`getOrSet<T>(key, fetcher, options)`**: Cache-aside pattern implementation
|
||||
- **`get<T>(key)`**: Retrieve cached value
|
||||
- **`set<T>(key, value, ttl)`**: Store value with TTL
|
||||
- **`del(key)`**: Delete specific key
|
||||
- **`invalidatePattern(pattern)`**: Delete keys matching a pattern
|
||||
|
||||
All cache operations are fail-safe - cache failures do not break the application.
|
||||
|
||||
### TTL Configuration
|
||||
|
||||
Different data types use different TTL values based on volatility:
|
||||
|
||||
| Data Type | TTL | Rationale |
|
||||
| ------------------- | --------- | -------------------------------------- |
|
||||
| Brands/Stores | 1 hour | Rarely changes, safe to cache longer |
|
||||
| Flyer lists | 5 minutes | Changes when new flyers are added |
|
||||
| Individual flyers | 10 minutes| Stable once created |
|
||||
| Flyer items | 10 minutes| Stable once created |
|
||||
| Statistics | 5 minutes | Can be slightly stale |
|
||||
| Frequent sales | 15 minutes| Aggregated data, updated periodically |
|
||||
| Categories | 1 hour | Rarely changes |
|
||||
|
||||
### Cache Key Strategy
|
||||
|
||||
Cache keys follow a consistent prefix pattern for pattern-based invalidation:
|
||||
|
||||
- `cache:brands` - All brands list
|
||||
- `cache:flyers:{limit}:{offset}` - Paginated flyer lists
|
||||
- `cache:flyer:{id}` - Individual flyer data
|
||||
- `cache:flyer-items:{flyerId}` - Items for a specific flyer
|
||||
- `cache:stats:*` - Statistics data
|
||||
- `geocode:{address}` - Geocoding results (30-day TTL)
|
||||
|
||||
### Cached Endpoints
|
||||
|
||||
The following repository methods implement server-side caching:
|
||||
|
||||
| Method | Cache Key Pattern | TTL |
|
||||
| ------ | ----------------- | --- |
|
||||
| `FlyerRepository.getAllBrands()` | `cache:brands` | 1 hour |
|
||||
| `FlyerRepository.getFlyers()` | `cache:flyers:{limit}:{offset}` | 5 minutes |
|
||||
| `FlyerRepository.getFlyerItems()` | `cache:flyer-items:{flyerId}` | 10 minutes |
|
||||
|
||||
### Cache Invalidation
|
||||
|
||||
**Event-based invalidation** is triggered on write operations:
|
||||
|
||||
- **Flyer creation** (`FlyerPersistenceService.saveFlyer`): Invalidates all `cache:flyers*` keys
|
||||
- **Flyer deletion** (`FlyerRepository.deleteFlyer`): Invalidates specific flyer and flyer items cache, plus flyer lists
|
||||
|
||||
**Manual invalidation** via admin endpoints:
|
||||
|
||||
- `POST /api/admin/system/clear-cache` - Clears all application cache (flyers, brands, stats)
|
||||
- `POST /api/admin/system/clear-geocode-cache` - Clears geocoding cache
|
||||
|
||||
### Client-Side Caching
|
||||
|
||||
TanStack React Query provides client-side caching with configurable stale times:
|
||||
|
||||
| Query Type | Stale Time |
|
||||
| ----------------- | ----------- |
|
||||
| Categories | 1 hour |
|
||||
| Master Items | 10 minutes |
|
||||
| Flyer Items | 5 minutes |
|
||||
| Flyers | 2 minutes |
|
||||
| Shopping Lists | 1 minute |
|
||||
| Activity Log | 30 seconds |
|
||||
|
||||
### Multi-Layer Cache Architecture
|
||||
|
||||
```text
|
||||
Client Request
|
||||
↓
|
||||
[TanStack React Query] ← Client-side cache (staleTime-based)
|
||||
↓
|
||||
[Express API]
|
||||
↓
|
||||
[CacheService.getOrSet()] ← Server-side Redis cache (TTL-based)
|
||||
↓
|
||||
[PostgreSQL Database]
|
||||
```
|
||||
|
||||
## Key Files
|
||||
|
||||
- `src/services/cacheService.server.ts` - Centralized cache service
|
||||
- `src/services/db/flyer.db.ts` - Repository with caching for brands, flyers, flyer items
|
||||
- `src/services/flyerPersistenceService.server.ts` - Cache invalidation on flyer creation
|
||||
- `src/routes/admin.routes.ts` - Admin cache management endpoints
|
||||
- `src/config/queryClient.ts` - Client-side query cache configuration
|
||||
|
||||
## Future Enhancements
|
||||
|
||||
1. **Recipe caching**: Add caching to expensive recipe queries (by-sale-percentage, etc.)
|
||||
2. **Cache warming**: Pre-populate cache on startup for frequently accessed static data
|
||||
3. **Cache metrics**: Add hit/miss rate monitoring for observability
|
||||
4. **Conditional caching**: Skip cache for authenticated user-specific data
|
||||
5. **Cache compression**: Compress large cached payloads to reduce Redis memory usage
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
**Date**: 2025-12-12
|
||||
|
||||
**Status**: Proposed
|
||||
**Status**: Accepted
|
||||
|
||||
## Context
|
||||
|
||||
@@ -14,9 +14,305 @@ We will formalize the testing pyramid for the project, defining the role of each
|
||||
|
||||
1. **Unit Tests (Vitest)**: For isolated functions, components, and repository methods with mocked dependencies. High coverage is expected.
|
||||
2. **Integration Tests (Supertest)**: For API routes, testing the interaction between controllers, services, and mocked database layers. Focus on contract and middleware correctness.
|
||||
3. **End-to-End (E2E) Tests (Playwright/Cypress)**: For critical user flows (e.g., login, flyer upload, checkout), running against a real browser and a test database to ensure the entire system works together.
|
||||
3. **End-to-End (E2E) Tests (Vitest + Supertest)**: For critical user flows (e.g., login, flyer upload, checkout), running against a real test server and database to ensure the entire system works together.
|
||||
|
||||
## Consequences
|
||||
|
||||
**Positive**: Ensures a consistent and comprehensive approach to quality assurance. Gives developers confidence when refactoring or adding new features. Clearly defines "done" for a new feature.
|
||||
**Negative**: May require investment in setting up and maintaining the E2E testing environment. Can slightly increase the time required to develop a feature if all test layers are required.
|
||||
|
||||
## Implementation Details
|
||||
|
||||
### Testing Framework Stack
|
||||
|
||||
| Tool | Version | Purpose |
|
||||
| ---- | ------- | ------- |
|
||||
| Vitest | 4.0.15 | Test runner for all test types |
|
||||
| @testing-library/react | 16.3.0 | React component testing |
|
||||
| @testing-library/jest-dom | 6.9.1 | DOM assertion matchers |
|
||||
| supertest | 7.1.4 | HTTP assertion library for API testing |
|
||||
| msw | 2.12.3 | Mock Service Worker for network mocking |
|
||||
| testcontainers | 11.8.1 | Database containerization (optional) |
|
||||
| c8 + nyc | 10.1.3 / 17.1.0 | Coverage reporting |
|
||||
|
||||
### Test File Organization
|
||||
|
||||
```text
|
||||
src/
|
||||
├── components/
|
||||
│ └── *.test.tsx # Component unit tests (colocated)
|
||||
├── hooks/
|
||||
│ └── *.test.ts # Hook unit tests (colocated)
|
||||
├── services/
|
||||
│ └── *.test.ts # Service unit tests (colocated)
|
||||
├── routes/
|
||||
│ └── *.test.ts # Route handler unit tests (colocated)
|
||||
├── utils/
|
||||
│ └── *.test.ts # Utility function tests (colocated)
|
||||
└── tests/
|
||||
├── setup/ # Test configuration and setup files
|
||||
├── utils/ # Test utilities, factories, helpers
|
||||
├── assets/ # Test fixtures (images, files)
|
||||
├── integration/ # Integration test files (*.test.ts)
|
||||
└── e2e/ # End-to-end test files (*.e2e.test.ts)
|
||||
```
|
||||
|
||||
**Naming Convention**: `{filename}.test.ts` or `{filename}.test.tsx` for unit/integration, `{filename}.e2e.test.ts` for E2E.
|
||||
|
||||
### Configuration Files
|
||||
|
||||
| Config | Environment | Purpose |
|
||||
| ------ | ----------- | ------- |
|
||||
| `vite.config.ts` | jsdom | Unit tests (React components, hooks) |
|
||||
| `vitest.config.integration.ts` | node | Integration tests (API routes) |
|
||||
| `vitest.config.e2e.ts` | node | E2E tests (full user flows) |
|
||||
| `vitest.workspace.ts` | - | Orchestrates all test projects |
|
||||
|
||||
### Test Pyramid
|
||||
|
||||
```text
|
||||
┌─────────────┐
|
||||
│ E2E │ 5 test files
|
||||
│ Tests │ Critical user flows
|
||||
├─────────────┤
|
||||
│ Integration │ 17 test files
|
||||
│ Tests │ API contracts + middleware
|
||||
┌───┴─────────────┴───┐
|
||||
│ Unit Tests │ 185 test files
|
||||
│ Components, Hooks, │ Isolated functions
|
||||
│ Services, Utils │ Mocked dependencies
|
||||
└─────────────────────┘
|
||||
```
|
||||
|
||||
### Unit Tests
|
||||
|
||||
**Purpose**: Test isolated functions, components, and modules with mocked dependencies.
|
||||
|
||||
**Environment**: jsdom (browser-like)
|
||||
|
||||
**Key Patterns**:
|
||||
|
||||
```typescript
|
||||
// Component testing with providers
|
||||
import { renderWithProviders, screen } from '@/tests/utils/renderWithProviders';
|
||||
|
||||
describe('MyComponent', () => {
|
||||
it('renders correctly', () => {
|
||||
renderWithProviders(<MyComponent />);
|
||||
expect(screen.getByText('Hello')).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
```typescript
|
||||
// Hook testing
|
||||
import { renderHook, waitFor } from '@testing-library/react';
|
||||
import { useMyHook } from './useMyHook';
|
||||
|
||||
describe('useMyHook', () => {
|
||||
it('returns expected value', async () => {
|
||||
const { result } = renderHook(() => useMyHook());
|
||||
await waitFor(() => expect(result.current.data).toBeDefined());
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
**Global Mocks** (automatically applied via `tests-setup-unit.ts`):
|
||||
|
||||
- Database connections (`pg.Pool`)
|
||||
- AI services (`@google/genai`)
|
||||
- Authentication (`jsonwebtoken`, `bcrypt`)
|
||||
- Logging (`logger.server`, `logger.client`)
|
||||
- Notifications (`notificationService`)
|
||||
|
||||
### Integration Tests
|
||||
|
||||
**Purpose**: Test API routes with real service interactions and database.
|
||||
|
||||
**Environment**: node
|
||||
|
||||
**Setup**: Real Express server on port 3001, real PostgreSQL database
|
||||
|
||||
```typescript
|
||||
// API route testing pattern
|
||||
import supertest from 'supertest';
|
||||
import { createAndLoginUser } from '@/tests/utils/testHelpers';
|
||||
|
||||
describe('Auth API', () => {
|
||||
let request: ReturnType<typeof supertest>;
|
||||
let authToken: string;
|
||||
|
||||
beforeAll(async () => {
|
||||
const app = (await import('../../../server')).default;
|
||||
request = supertest(app);
|
||||
const { token } = await createAndLoginUser(request);
|
||||
authToken = token;
|
||||
});
|
||||
|
||||
it('GET /api/auth/me returns user profile', async () => {
|
||||
const response = await request
|
||||
.get('/api/auth/me')
|
||||
.set('Authorization', `Bearer ${authToken}`);
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body.user.email).toBeDefined();
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
**Database Cleanup**:
|
||||
|
||||
```typescript
|
||||
import { cleanupDb } from '@/tests/utils/cleanup';
|
||||
|
||||
afterAll(async () => {
|
||||
await cleanupDb({ users: [testUserId] });
|
||||
});
|
||||
```
|
||||
|
||||
### E2E Tests
|
||||
|
||||
**Purpose**: Test complete user journeys through the application.
|
||||
|
||||
**Timeout**: 120 seconds (for long-running flows)
|
||||
|
||||
**Current E2E Tests**:
|
||||
|
||||
- `auth.e2e.test.ts` - Registration, login, password reset
|
||||
- `flyer-upload.e2e.test.ts` - Complete flyer upload pipeline
|
||||
- `user-journey.e2e.test.ts` - Full user workflow
|
||||
- `admin-authorization.e2e.test.ts` - Admin-specific flows
|
||||
- `admin-dashboard.e2e.test.ts` - Admin dashboard functionality
|
||||
|
||||
### Mock Factories
|
||||
|
||||
The project uses comprehensive mock factories (`src/tests/utils/mockFactories.ts`, 1553 lines) for creating test data:
|
||||
|
||||
```typescript
|
||||
import {
|
||||
createMockUser,
|
||||
createMockFlyer,
|
||||
createMockFlyerItem,
|
||||
createMockRecipe,
|
||||
resetMockIds,
|
||||
} from '@/tests/utils/mockFactories';
|
||||
|
||||
beforeEach(() => {
|
||||
resetMockIds(); // Ensure deterministic IDs
|
||||
});
|
||||
|
||||
it('creates flyer with items', () => {
|
||||
const flyer = createMockFlyer({ store_name: 'TestMart' });
|
||||
const items = [createMockFlyerItem({ flyer_id: flyer.flyer_id })];
|
||||
// ...
|
||||
});
|
||||
```
|
||||
|
||||
**Factory Coverage**: 90+ factory functions for all domain entities including users, flyers, recipes, shopping lists, budgets, achievements, etc.
|
||||
|
||||
### Test Utilities
|
||||
|
||||
| Utility | Purpose |
|
||||
| ------- | ------- |
|
||||
| `renderWithProviders()` | Wrap components with AppProviders + Router |
|
||||
| `createAndLoginUser()` | Create user and return auth token |
|
||||
| `cleanupDb()` | Database cleanup respecting FK constraints |
|
||||
| `createTestApp()` | Create Express app for route testing |
|
||||
| `poll()` | Polling utility for async operations |
|
||||
|
||||
### Coverage Configuration
|
||||
|
||||
**Coverage Provider**: v8 (built-in Vitest)
|
||||
|
||||
**Report Directories**:
|
||||
|
||||
- `.coverage/unit/` - Unit test coverage
|
||||
- `.coverage/integration/` - Integration test coverage
|
||||
- `.coverage/e2e/` - E2E test coverage
|
||||
|
||||
**Excluded from Coverage**:
|
||||
|
||||
- `src/index.tsx`, `src/main.tsx` (entry points)
|
||||
- `src/tests/**` (test files themselves)
|
||||
- `src/**/*.d.ts` (type declarations)
|
||||
- `src/components/icons/**` (icon components)
|
||||
- `src/db/seed*.ts` (database seeding scripts)
|
||||
|
||||
### npm Scripts
|
||||
|
||||
```bash
|
||||
# Run all tests
|
||||
npm run test
|
||||
|
||||
# Run by level
|
||||
npm run test:unit # Unit tests only (jsdom)
|
||||
npm run test:integration # Integration tests only (node)
|
||||
|
||||
# With coverage
|
||||
npm run test:coverage # Unit + Integration with reports
|
||||
|
||||
# Clean coverage directories
|
||||
npm run clean
|
||||
```
|
||||
|
||||
### Test Timeouts
|
||||
|
||||
| Test Type | Timeout | Rationale |
|
||||
| --------- | ------- | --------- |
|
||||
| Unit | 5 seconds | Fast, isolated tests |
|
||||
| Integration | 60 seconds | AI service calls, DB operations |
|
||||
| E2E | 120 seconds | Full user flow with multiple API calls |
|
||||
|
||||
## Best Practices
|
||||
|
||||
### When to Write Each Test Type
|
||||
|
||||
1. **Unit Tests** (required):
|
||||
- Pure functions and utilities
|
||||
- React components (rendering, user interactions)
|
||||
- Custom hooks
|
||||
- Service methods with mocked dependencies
|
||||
- Repository methods
|
||||
|
||||
2. **Integration Tests** (required for API changes):
|
||||
- New API endpoints
|
||||
- Authentication/authorization flows
|
||||
- Middleware behavior
|
||||
- Database query correctness
|
||||
|
||||
3. **E2E Tests** (for critical paths):
|
||||
- User registration and login
|
||||
- Core business flows (flyer upload, shopping lists)
|
||||
- Admin operations
|
||||
|
||||
### Test Isolation Guidelines
|
||||
|
||||
1. **Reset mock IDs**: Call `resetMockIds()` in `beforeEach()`
|
||||
2. **Unique test data**: Use timestamps or UUIDs for emails/usernames
|
||||
3. **Clean up after tests**: Use `cleanupDb()` in `afterAll()`
|
||||
4. **Don't share state**: Each test should be independent
|
||||
|
||||
### Mocking Guidelines
|
||||
|
||||
1. **Unit tests**: Mock external dependencies (DB, APIs, services)
|
||||
2. **Integration tests**: Mock only external APIs (AI services)
|
||||
3. **E2E tests**: Minimal mocking, use real services where possible
|
||||
|
||||
## Key Files
|
||||
|
||||
- `vite.config.ts` - Unit test configuration
|
||||
- `vitest.config.integration.ts` - Integration test configuration
|
||||
- `vitest.config.e2e.ts` - E2E test configuration
|
||||
- `vitest.workspace.ts` - Workspace orchestration
|
||||
- `src/tests/setup/tests-setup-unit.ts` - Global mocks (488 lines)
|
||||
- `src/tests/setup/integration-global-setup.ts` - Server + DB setup
|
||||
- `src/tests/utils/mockFactories.ts` - Mock factories (1553 lines)
|
||||
- `src/tests/utils/testHelpers.ts` - Test utilities
|
||||
|
||||
## Future Enhancements
|
||||
|
||||
1. **Browser E2E Tests**: Consider adding Playwright for actual browser testing
|
||||
2. **Visual Regression**: Screenshot comparison for UI components
|
||||
3. **Performance Testing**: Add benchmarks for critical paths
|
||||
4. **Mutation Testing**: Verify test quality with mutation testing tools
|
||||
5. **Coverage Thresholds**: Define minimum coverage requirements per module
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
**Date**: 2025-12-12
|
||||
|
||||
**Status**: Proposed
|
||||
**Status**: Partially Implemented
|
||||
|
||||
## Context
|
||||
|
||||
@@ -16,3 +16,255 @@ We will establish a formal Design System and Component Library. This will involv
|
||||
|
||||
- **Positive**: Ensures a consistent and high-quality user interface. Accelerates frontend development by providing reusable, well-documented components. Improves maintainability and reduces technical debt.
|
||||
- **Negative**: Requires an initial investment in setting up Storybook and migrating existing components. Adds a new dependency and a new workflow for frontend development.
|
||||
|
||||
## Implementation Status
|
||||
|
||||
### What's Implemented
|
||||
|
||||
The codebase has a solid foundation for a design system:
|
||||
|
||||
- ✅ **Tailwind CSS v4.1.17** as the styling solution
|
||||
- ✅ **Dark mode** fully implemented with system preference detection
|
||||
- ✅ **55 custom icon components** for consistent iconography
|
||||
- ✅ **Component organization** with shared vs. feature-specific separation
|
||||
- ✅ **Accessibility patterns** with ARIA attributes and focus management
|
||||
|
||||
### What's Not Yet Implemented
|
||||
|
||||
- ❌ **Storybook** is not yet installed or configured
|
||||
- ❌ **Formal design token documentation** (colors, typography, spacing)
|
||||
- ❌ **Visual regression testing** for component changes
|
||||
|
||||
## Implementation Details
|
||||
|
||||
### Component Library Structure
|
||||
|
||||
```text
|
||||
src/
|
||||
├── components/ # 30+ shared UI components
|
||||
│ ├── icons/ # 55 SVG icon components
|
||||
│ ├── Header.tsx
|
||||
│ ├── Footer.tsx
|
||||
│ ├── LoadingSpinner.tsx
|
||||
│ ├── ErrorDisplay.tsx
|
||||
│ ├── ConfirmationModal.tsx
|
||||
│ ├── DarkModeToggle.tsx
|
||||
│ ├── StatCard.tsx
|
||||
│ ├── PasswordInput.tsx
|
||||
│ └── ...
|
||||
├── features/ # Feature-specific components
|
||||
│ ├── charts/ # PriceChart, PriceHistoryChart
|
||||
│ ├── flyer/ # FlyerDisplay, FlyerList, FlyerUploader
|
||||
│ ├── shopping/ # ShoppingListComponent, WatchedItemsList
|
||||
│ └── voice-assistant/ # VoiceAssistant
|
||||
├── layouts/ # Page layouts
|
||||
│ └── MainLayout.tsx
|
||||
├── pages/ # Page components
|
||||
│ └── admin/components/ # Admin-specific components
|
||||
└── providers/ # Context providers
|
||||
```
|
||||
|
||||
### Styling Approach
|
||||
|
||||
**Tailwind CSS** with utility-first classes:
|
||||
|
||||
```typescript
|
||||
// Component example with consistent styling patterns
|
||||
<button className="px-4 py-2 bg-brand-primary text-white rounded-lg
|
||||
hover:bg-brand-dark transition-colors duration-200
|
||||
focus:outline-none focus:ring-2 focus:ring-brand-primary
|
||||
focus:ring-offset-2 dark:focus:ring-offset-gray-800">
|
||||
Click me
|
||||
</button>
|
||||
```
|
||||
|
||||
**Common Utility Patterns**:
|
||||
|
||||
| Pattern | Classes |
|
||||
| ------- | ------- |
|
||||
| Card container | `bg-white dark:bg-gray-800 rounded-lg shadow-md p-6` |
|
||||
| Primary button | `bg-brand-primary hover:bg-brand-dark text-white rounded-lg px-4 py-2` |
|
||||
| Secondary button | `bg-gray-100 dark:bg-gray-700 text-gray-700 dark:text-gray-200` |
|
||||
| Input field | `border border-gray-300 dark:border-gray-600 rounded-md px-3 py-2` |
|
||||
| Focus ring | `focus:outline-none focus:ring-2 focus:ring-brand-primary` |
|
||||
|
||||
### Color System
|
||||
|
||||
**Brand Colors** (Tailwind theme extensions):
|
||||
|
||||
- `brand-primary` - Primary brand color (blue/teal)
|
||||
- `brand-light` - Lighter variant
|
||||
- `brand-dark` - Darker variant for hover states
|
||||
- `brand-secondary` - Secondary accent color
|
||||
|
||||
**Semantic Colors**:
|
||||
|
||||
- Gray scale: `gray-50` through `gray-950`
|
||||
- Error: `red-500`, `red-600`
|
||||
- Success: `green-500`, `green-600`
|
||||
- Warning: `yellow-500`, `orange-500`
|
||||
- Info: `blue-500`, `blue-600`
|
||||
|
||||
### Dark Mode Implementation
|
||||
|
||||
Dark mode is fully implemented using Tailwind's `dark:` variant:
|
||||
|
||||
```typescript
|
||||
// Initialization in useAppInitialization hook
|
||||
const initializeDarkMode = () => {
|
||||
// Priority: user profile > localStorage > system preference
|
||||
const stored = localStorage.getItem('darkMode');
|
||||
const systemPreference = window.matchMedia('(prefers-color-scheme: dark)').matches;
|
||||
const isDarkMode = stored ? stored === 'true' : systemPreference;
|
||||
|
||||
document.documentElement.classList.toggle('dark', isDarkMode);
|
||||
return isDarkMode;
|
||||
};
|
||||
```
|
||||
|
||||
**Usage in components**:
|
||||
|
||||
```typescript
|
||||
<div className="bg-white dark:bg-gray-800 text-gray-900 dark:text-white">
|
||||
Content adapts to theme
|
||||
</div>
|
||||
```
|
||||
|
||||
### Icon System
|
||||
|
||||
**55 custom SVG icon components** in `src/components/icons/`:
|
||||
|
||||
```typescript
|
||||
// Icon component pattern
|
||||
interface IconProps extends React.SVGProps<SVGSVGElement> {
|
||||
title?: string;
|
||||
}
|
||||
|
||||
export const CheckCircleIcon: React.FC<IconProps> = ({ title, ...props }) => (
|
||||
<svg {...props} fill="currentColor" viewBox="0 0 24 24">
|
||||
{title && <title>{title}</title>}
|
||||
<path d="..." />
|
||||
</svg>
|
||||
);
|
||||
```
|
||||
|
||||
**Usage**:
|
||||
|
||||
```typescript
|
||||
<CheckCircleIcon className="w-5 h-5 text-green-500" title="Success" />
|
||||
```
|
||||
|
||||
**External icons**: Lucide React (`lucide-react` v0.555.0) used for additional icons.
|
||||
|
||||
### Accessibility Patterns
|
||||
|
||||
**ARIA Attributes**:
|
||||
|
||||
```typescript
|
||||
// Modal pattern
|
||||
<div role="dialog" aria-modal="true" aria-labelledby="modal-title">
|
||||
<h2 id="modal-title">Modal Title</h2>
|
||||
</div>
|
||||
|
||||
// Button with label
|
||||
<button aria-label="Close modal">
|
||||
<XMarkIcon aria-hidden="true" />
|
||||
</button>
|
||||
|
||||
// Loading state
|
||||
<div role="status" aria-live="polite">
|
||||
<LoadingSpinner />
|
||||
</div>
|
||||
```
|
||||
|
||||
**Focus Management**:
|
||||
|
||||
- Consistent focus rings: `focus:ring-2 focus:ring-brand-primary focus:ring-offset-2`
|
||||
- Dark mode offset: `dark:focus:ring-offset-gray-800`
|
||||
- No outline: `focus:outline-none` (using ring instead)
|
||||
|
||||
### State Management
|
||||
|
||||
**Context Providers** (see ADR-005):
|
||||
|
||||
| Provider | Purpose |
|
||||
| -------- | ------- |
|
||||
| `AuthProvider` | Authentication state |
|
||||
| `ModalProvider` | Modal open/close state |
|
||||
| `FlyersProvider` | Flyer data |
|
||||
| `MasterItemsProvider` | Grocery items |
|
||||
| `UserDataProvider` | User-specific data |
|
||||
|
||||
**Provider Hierarchy** in `AppProviders.tsx`:
|
||||
|
||||
```typescript
|
||||
<QueryClientProvider>
|
||||
<ModalProvider>
|
||||
<AuthProvider>
|
||||
<FlyersProvider>
|
||||
<MasterItemsProvider>
|
||||
<UserDataProvider>
|
||||
{children}
|
||||
</UserDataProvider>
|
||||
</MasterItemsProvider>
|
||||
</FlyersProvider>
|
||||
</AuthProvider>
|
||||
</ModalProvider>
|
||||
</QueryClientProvider>
|
||||
```
|
||||
|
||||
## Key Files
|
||||
|
||||
- `tailwind.config.js` - Tailwind CSS configuration
|
||||
- `src/index.css` - Tailwind CSS entry point
|
||||
- `src/components/` - Shared UI components
|
||||
- `src/components/icons/` - Icon component library (55 icons)
|
||||
- `src/providers/AppProviders.tsx` - Context provider composition
|
||||
- `src/hooks/useAppInitialization.ts` - Dark mode initialization
|
||||
|
||||
## Component Guidelines
|
||||
|
||||
### When to Create Shared Components
|
||||
|
||||
Create a shared component in `src/components/` when:
|
||||
|
||||
1. Used in 3+ places across the application
|
||||
2. Represents a reusable UI pattern (buttons, cards, modals)
|
||||
3. Has consistent styling/behavior requirements
|
||||
|
||||
### Naming Conventions
|
||||
|
||||
- **Components**: PascalCase (`LoadingSpinner.tsx`)
|
||||
- **Icons**: PascalCase with `Icon` suffix (`CheckCircleIcon.tsx`)
|
||||
- **Hooks**: camelCase with `use` prefix (`useModal.ts`)
|
||||
- **Contexts**: PascalCase with `Context` suffix (`AuthContext.tsx`)
|
||||
|
||||
### Styling Guidelines
|
||||
|
||||
1. Use Tailwind utility classes exclusively
|
||||
2. Include dark mode variants for all colors: `bg-white dark:bg-gray-800`
|
||||
3. Add focus states for interactive elements
|
||||
4. Use semantic color names from the design system
|
||||
|
||||
## Future Enhancements (Storybook Setup)
|
||||
|
||||
To complete ADR-012 implementation:
|
||||
|
||||
1. **Install Storybook**:
|
||||
|
||||
```bash
|
||||
npx storybook@latest init
|
||||
```
|
||||
|
||||
2. **Create stories for core components**:
|
||||
- Button variants
|
||||
- Form inputs (PasswordInput, etc.)
|
||||
- Modal components
|
||||
- Loading states
|
||||
- Icon showcase
|
||||
|
||||
3. **Add visual regression testing** with Chromatic or Percy
|
||||
|
||||
4. **Document design tokens** formally in Storybook
|
||||
|
||||
5. **Create component composition guidelines**
|
||||
|
||||
4
package-lock.json
generated
4
package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "flyer-crawler",
|
||||
"version": "0.9.63",
|
||||
"version": "0.9.69",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "flyer-crawler",
|
||||
"version": "0.9.63",
|
||||
"version": "0.9.69",
|
||||
"dependencies": {
|
||||
"@bull-board/api": "^6.14.2",
|
||||
"@bull-board/express": "^6.14.2",
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "flyer-crawler",
|
||||
"private": true,
|
||||
"version": "0.9.63",
|
||||
"version": "0.9.69",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "concurrently \"npm:start:dev\" \"vite\"",
|
||||
|
||||
128
src/hooks/mutations/useAddShoppingListItemMutation.test.tsx
Normal file
128
src/hooks/mutations/useAddShoppingListItemMutation.test.tsx
Normal file
@@ -0,0 +1,128 @@
|
||||
// src/hooks/mutations/useAddShoppingListItemMutation.test.tsx
|
||||
import { renderHook, waitFor } from '@testing-library/react';
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
import { QueryClient, QueryClientProvider } from '@tanstack/react-query';
|
||||
import type { ReactNode } from 'react';
|
||||
import { useAddShoppingListItemMutation } from './useAddShoppingListItemMutation';
|
||||
import * as apiClient from '../../services/apiClient';
|
||||
import * as notificationService from '../../services/notificationService';
|
||||
|
||||
vi.mock('../../services/apiClient');
|
||||
vi.mock('../../services/notificationService');
|
||||
|
||||
const mockedApiClient = vi.mocked(apiClient);
|
||||
const mockedNotifications = vi.mocked(notificationService);
|
||||
|
||||
describe('useAddShoppingListItemMutation', () => {
|
||||
let queryClient: QueryClient;
|
||||
|
||||
const wrapper = ({ children }: { children: ReactNode }) => (
|
||||
<QueryClientProvider client={queryClient}>{children}</QueryClientProvider>
|
||||
);
|
||||
|
||||
beforeEach(() => {
|
||||
vi.resetAllMocks();
|
||||
queryClient = new QueryClient({
|
||||
defaultOptions: {
|
||||
queries: { retry: false },
|
||||
mutations: { retry: false },
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('should add a master item to shopping list successfully', async () => {
|
||||
const mockResponse = { shopping_list_item_id: 1, master_item_id: 42 };
|
||||
mockedApiClient.addShoppingListItem.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve(mockResponse),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useAddShoppingListItemMutation(), { wrapper });
|
||||
|
||||
result.current.mutate({ listId: 1, item: { masterItemId: 42 } });
|
||||
|
||||
await waitFor(() => expect(result.current.isSuccess).toBe(true));
|
||||
|
||||
expect(mockedApiClient.addShoppingListItem).toHaveBeenCalledWith(1, { masterItemId: 42 });
|
||||
expect(mockedNotifications.notifySuccess).toHaveBeenCalledWith('Item added to shopping list');
|
||||
});
|
||||
|
||||
it('should add a custom item to shopping list successfully', async () => {
|
||||
const mockResponse = { shopping_list_item_id: 2, custom_item_name: 'Special Milk' };
|
||||
mockedApiClient.addShoppingListItem.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve(mockResponse),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useAddShoppingListItemMutation(), { wrapper });
|
||||
|
||||
result.current.mutate({ listId: 1, item: { customItemName: 'Special Milk' } });
|
||||
|
||||
await waitFor(() => expect(result.current.isSuccess).toBe(true));
|
||||
|
||||
expect(mockedApiClient.addShoppingListItem).toHaveBeenCalledWith(1, { customItemName: 'Special Milk' });
|
||||
});
|
||||
|
||||
it('should invalidate shopping-lists query on success', async () => {
|
||||
mockedApiClient.addShoppingListItem.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve({}),
|
||||
} as Response);
|
||||
|
||||
const invalidateQueriesSpy = vi.spyOn(queryClient, 'invalidateQueries');
|
||||
|
||||
const { result } = renderHook(() => useAddShoppingListItemMutation(), { wrapper });
|
||||
|
||||
result.current.mutate({ listId: 1, item: { masterItemId: 42 } });
|
||||
|
||||
await waitFor(() => expect(result.current.isSuccess).toBe(true));
|
||||
|
||||
expect(invalidateQueriesSpy).toHaveBeenCalledWith({ queryKey: ['shopping-lists'] });
|
||||
});
|
||||
|
||||
it('should handle API error with error message', async () => {
|
||||
mockedApiClient.addShoppingListItem.mockResolvedValue({
|
||||
ok: false,
|
||||
status: 400,
|
||||
json: () => Promise.resolve({ message: 'Item already exists' }),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useAddShoppingListItemMutation(), { wrapper });
|
||||
|
||||
result.current.mutate({ listId: 1, item: { masterItemId: 42 } });
|
||||
|
||||
await waitFor(() => expect(result.current.isError).toBe(true));
|
||||
|
||||
expect(result.current.error?.message).toBe('Item already exists');
|
||||
expect(mockedNotifications.notifyError).toHaveBeenCalledWith('Item already exists');
|
||||
});
|
||||
|
||||
it('should handle API error without message', async () => {
|
||||
mockedApiClient.addShoppingListItem.mockResolvedValue({
|
||||
ok: false,
|
||||
status: 500,
|
||||
json: () => Promise.reject(new Error('Parse error')),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useAddShoppingListItemMutation(), { wrapper });
|
||||
|
||||
result.current.mutate({ listId: 1, item: { masterItemId: 42 } });
|
||||
|
||||
await waitFor(() => expect(result.current.isError).toBe(true));
|
||||
|
||||
expect(result.current.error?.message).toBe('Request failed with status 500');
|
||||
expect(mockedNotifications.notifyError).toHaveBeenCalledWith('Request failed with status 500');
|
||||
});
|
||||
|
||||
it('should handle network error', async () => {
|
||||
mockedApiClient.addShoppingListItem.mockRejectedValue(new Error('Network error'));
|
||||
|
||||
const { result } = renderHook(() => useAddShoppingListItemMutation(), { wrapper });
|
||||
|
||||
result.current.mutate({ listId: 1, item: { masterItemId: 42 } });
|
||||
|
||||
await waitFor(() => expect(result.current.isError).toBe(true));
|
||||
|
||||
expect(result.current.error?.message).toBe('Network error');
|
||||
});
|
||||
});
|
||||
115
src/hooks/mutations/useAddWatchedItemMutation.test.tsx
Normal file
115
src/hooks/mutations/useAddWatchedItemMutation.test.tsx
Normal file
@@ -0,0 +1,115 @@
|
||||
// src/hooks/mutations/useAddWatchedItemMutation.test.tsx
|
||||
import { renderHook, waitFor } from '@testing-library/react';
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
import { QueryClient, QueryClientProvider } from '@tanstack/react-query';
|
||||
import type { ReactNode } from 'react';
|
||||
import { useAddWatchedItemMutation } from './useAddWatchedItemMutation';
|
||||
import * as apiClient from '../../services/apiClient';
|
||||
import * as notificationService from '../../services/notificationService';
|
||||
|
||||
vi.mock('../../services/apiClient');
|
||||
vi.mock('../../services/notificationService');
|
||||
|
||||
const mockedApiClient = vi.mocked(apiClient);
|
||||
const mockedNotifications = vi.mocked(notificationService);
|
||||
|
||||
describe('useAddWatchedItemMutation', () => {
|
||||
let queryClient: QueryClient;
|
||||
|
||||
const wrapper = ({ children }: { children: ReactNode }) => (
|
||||
<QueryClientProvider client={queryClient}>{children}</QueryClientProvider>
|
||||
);
|
||||
|
||||
beforeEach(() => {
|
||||
vi.resetAllMocks();
|
||||
queryClient = new QueryClient({
|
||||
defaultOptions: {
|
||||
queries: { retry: false },
|
||||
mutations: { retry: false },
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('should add a watched item successfully with category', async () => {
|
||||
const mockResponse = { id: 1, item_name: 'Milk', category: 'Dairy' };
|
||||
mockedApiClient.addWatchedItem.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve(mockResponse),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useAddWatchedItemMutation(), { wrapper });
|
||||
|
||||
result.current.mutate({ itemName: 'Milk', category: 'Dairy' });
|
||||
|
||||
await waitFor(() => expect(result.current.isSuccess).toBe(true));
|
||||
|
||||
expect(mockedApiClient.addWatchedItem).toHaveBeenCalledWith('Milk', 'Dairy');
|
||||
expect(mockedNotifications.notifySuccess).toHaveBeenCalledWith('Item added to watched list');
|
||||
});
|
||||
|
||||
it('should add a watched item without category', async () => {
|
||||
const mockResponse = { id: 1, item_name: 'Bread' };
|
||||
mockedApiClient.addWatchedItem.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve(mockResponse),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useAddWatchedItemMutation(), { wrapper });
|
||||
|
||||
result.current.mutate({ itemName: 'Bread' });
|
||||
|
||||
await waitFor(() => expect(result.current.isSuccess).toBe(true));
|
||||
|
||||
expect(mockedApiClient.addWatchedItem).toHaveBeenCalledWith('Bread', '');
|
||||
});
|
||||
|
||||
it('should invalidate watched-items query on success', async () => {
|
||||
mockedApiClient.addWatchedItem.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve({ id: 1 }),
|
||||
} as Response);
|
||||
|
||||
const invalidateQueriesSpy = vi.spyOn(queryClient, 'invalidateQueries');
|
||||
|
||||
const { result } = renderHook(() => useAddWatchedItemMutation(), { wrapper });
|
||||
|
||||
result.current.mutate({ itemName: 'Eggs' });
|
||||
|
||||
await waitFor(() => expect(result.current.isSuccess).toBe(true));
|
||||
|
||||
expect(invalidateQueriesSpy).toHaveBeenCalledWith({ queryKey: ['watched-items'] });
|
||||
});
|
||||
|
||||
it('should handle API error with error message', async () => {
|
||||
mockedApiClient.addWatchedItem.mockResolvedValue({
|
||||
ok: false,
|
||||
status: 409,
|
||||
json: () => Promise.resolve({ message: 'Item already watched' }),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useAddWatchedItemMutation(), { wrapper });
|
||||
|
||||
result.current.mutate({ itemName: 'Milk' });
|
||||
|
||||
await waitFor(() => expect(result.current.isError).toBe(true));
|
||||
|
||||
expect(result.current.error?.message).toBe('Item already watched');
|
||||
expect(mockedNotifications.notifyError).toHaveBeenCalledWith('Item already watched');
|
||||
});
|
||||
|
||||
it('should handle API error without message', async () => {
|
||||
mockedApiClient.addWatchedItem.mockResolvedValue({
|
||||
ok: false,
|
||||
status: 500,
|
||||
json: () => Promise.reject(new Error('Parse error')),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useAddWatchedItemMutation(), { wrapper });
|
||||
|
||||
result.current.mutate({ itemName: 'Cheese' });
|
||||
|
||||
await waitFor(() => expect(result.current.isError).toBe(true));
|
||||
|
||||
expect(result.current.error?.message).toBe('Request failed with status 500');
|
||||
});
|
||||
});
|
||||
99
src/hooks/mutations/useCreateShoppingListMutation.test.tsx
Normal file
99
src/hooks/mutations/useCreateShoppingListMutation.test.tsx
Normal file
@@ -0,0 +1,99 @@
|
||||
// src/hooks/mutations/useCreateShoppingListMutation.test.tsx
|
||||
import { renderHook, waitFor } from '@testing-library/react';
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
import { QueryClient, QueryClientProvider } from '@tanstack/react-query';
|
||||
import type { ReactNode } from 'react';
|
||||
import { useCreateShoppingListMutation } from './useCreateShoppingListMutation';
|
||||
import * as apiClient from '../../services/apiClient';
|
||||
import * as notificationService from '../../services/notificationService';
|
||||
|
||||
vi.mock('../../services/apiClient');
|
||||
vi.mock('../../services/notificationService');
|
||||
|
||||
const mockedApiClient = vi.mocked(apiClient);
|
||||
const mockedNotifications = vi.mocked(notificationService);
|
||||
|
||||
describe('useCreateShoppingListMutation', () => {
|
||||
let queryClient: QueryClient;
|
||||
|
||||
const wrapper = ({ children }: { children: ReactNode }) => (
|
||||
<QueryClientProvider client={queryClient}>{children}</QueryClientProvider>
|
||||
);
|
||||
|
||||
beforeEach(() => {
|
||||
vi.resetAllMocks();
|
||||
queryClient = new QueryClient({
|
||||
defaultOptions: {
|
||||
queries: { retry: false },
|
||||
mutations: { retry: false },
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('should create a shopping list successfully', async () => {
|
||||
const mockResponse = { shopping_list_id: 1, name: 'Weekly Groceries' };
|
||||
mockedApiClient.createShoppingList.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve(mockResponse),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useCreateShoppingListMutation(), { wrapper });
|
||||
|
||||
result.current.mutate({ name: 'Weekly Groceries' });
|
||||
|
||||
await waitFor(() => expect(result.current.isSuccess).toBe(true));
|
||||
|
||||
expect(mockedApiClient.createShoppingList).toHaveBeenCalledWith('Weekly Groceries');
|
||||
expect(mockedNotifications.notifySuccess).toHaveBeenCalledWith('Shopping list created');
|
||||
});
|
||||
|
||||
it('should invalidate shopping-lists query on success', async () => {
|
||||
mockedApiClient.createShoppingList.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve({ shopping_list_id: 1 }),
|
||||
} as Response);
|
||||
|
||||
const invalidateQueriesSpy = vi.spyOn(queryClient, 'invalidateQueries');
|
||||
|
||||
const { result } = renderHook(() => useCreateShoppingListMutation(), { wrapper });
|
||||
|
||||
result.current.mutate({ name: 'Test List' });
|
||||
|
||||
await waitFor(() => expect(result.current.isSuccess).toBe(true));
|
||||
|
||||
expect(invalidateQueriesSpy).toHaveBeenCalledWith({ queryKey: ['shopping-lists'] });
|
||||
});
|
||||
|
||||
it('should handle API error with error message', async () => {
|
||||
mockedApiClient.createShoppingList.mockResolvedValue({
|
||||
ok: false,
|
||||
status: 400,
|
||||
json: () => Promise.resolve({ message: 'List name already exists' }),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useCreateShoppingListMutation(), { wrapper });
|
||||
|
||||
result.current.mutate({ name: 'Duplicate List' });
|
||||
|
||||
await waitFor(() => expect(result.current.isError).toBe(true));
|
||||
|
||||
expect(result.current.error?.message).toBe('List name already exists');
|
||||
expect(mockedNotifications.notifyError).toHaveBeenCalledWith('List name already exists');
|
||||
});
|
||||
|
||||
it('should handle API error without message', async () => {
|
||||
mockedApiClient.createShoppingList.mockResolvedValue({
|
||||
ok: false,
|
||||
status: 500,
|
||||
json: () => Promise.reject(new Error('Parse error')),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useCreateShoppingListMutation(), { wrapper });
|
||||
|
||||
result.current.mutate({ name: 'Test' });
|
||||
|
||||
await waitFor(() => expect(result.current.isError).toBe(true));
|
||||
|
||||
expect(result.current.error?.message).toBe('Request failed with status 500');
|
||||
});
|
||||
});
|
||||
99
src/hooks/mutations/useDeleteShoppingListMutation.test.tsx
Normal file
99
src/hooks/mutations/useDeleteShoppingListMutation.test.tsx
Normal file
@@ -0,0 +1,99 @@
|
||||
// src/hooks/mutations/useDeleteShoppingListMutation.test.tsx
|
||||
import { renderHook, waitFor } from '@testing-library/react';
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
import { QueryClient, QueryClientProvider } from '@tanstack/react-query';
|
||||
import type { ReactNode } from 'react';
|
||||
import { useDeleteShoppingListMutation } from './useDeleteShoppingListMutation';
|
||||
import * as apiClient from '../../services/apiClient';
|
||||
import * as notificationService from '../../services/notificationService';
|
||||
|
||||
vi.mock('../../services/apiClient');
|
||||
vi.mock('../../services/notificationService');
|
||||
|
||||
const mockedApiClient = vi.mocked(apiClient);
|
||||
const mockedNotifications = vi.mocked(notificationService);
|
||||
|
||||
describe('useDeleteShoppingListMutation', () => {
|
||||
let queryClient: QueryClient;
|
||||
|
||||
const wrapper = ({ children }: { children: ReactNode }) => (
|
||||
<QueryClientProvider client={queryClient}>{children}</QueryClientProvider>
|
||||
);
|
||||
|
||||
beforeEach(() => {
|
||||
vi.resetAllMocks();
|
||||
queryClient = new QueryClient({
|
||||
defaultOptions: {
|
||||
queries: { retry: false },
|
||||
mutations: { retry: false },
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('should delete a shopping list successfully', async () => {
|
||||
const mockResponse = { success: true };
|
||||
mockedApiClient.deleteShoppingList.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve(mockResponse),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useDeleteShoppingListMutation(), { wrapper });
|
||||
|
||||
result.current.mutate({ listId: 123 });
|
||||
|
||||
await waitFor(() => expect(result.current.isSuccess).toBe(true));
|
||||
|
||||
expect(mockedApiClient.deleteShoppingList).toHaveBeenCalledWith(123);
|
||||
expect(mockedNotifications.notifySuccess).toHaveBeenCalledWith('Shopping list deleted');
|
||||
});
|
||||
|
||||
it('should invalidate shopping-lists query on success', async () => {
|
||||
mockedApiClient.deleteShoppingList.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve({ success: true }),
|
||||
} as Response);
|
||||
|
||||
const invalidateQueriesSpy = vi.spyOn(queryClient, 'invalidateQueries');
|
||||
|
||||
const { result } = renderHook(() => useDeleteShoppingListMutation(), { wrapper });
|
||||
|
||||
result.current.mutate({ listId: 456 });
|
||||
|
||||
await waitFor(() => expect(result.current.isSuccess).toBe(true));
|
||||
|
||||
expect(invalidateQueriesSpy).toHaveBeenCalledWith({ queryKey: ['shopping-lists'] });
|
||||
});
|
||||
|
||||
it('should handle API error with error message', async () => {
|
||||
mockedApiClient.deleteShoppingList.mockResolvedValue({
|
||||
ok: false,
|
||||
status: 404,
|
||||
json: () => Promise.resolve({ message: 'Shopping list not found' }),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useDeleteShoppingListMutation(), { wrapper });
|
||||
|
||||
result.current.mutate({ listId: 999 });
|
||||
|
||||
await waitFor(() => expect(result.current.isError).toBe(true));
|
||||
|
||||
expect(result.current.error?.message).toBe('Shopping list not found');
|
||||
expect(mockedNotifications.notifyError).toHaveBeenCalledWith('Shopping list not found');
|
||||
});
|
||||
|
||||
it('should handle API error without message', async () => {
|
||||
mockedApiClient.deleteShoppingList.mockResolvedValue({
|
||||
ok: false,
|
||||
status: 500,
|
||||
json: () => Promise.reject(new Error('Parse error')),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useDeleteShoppingListMutation(), { wrapper });
|
||||
|
||||
result.current.mutate({ listId: 123 });
|
||||
|
||||
await waitFor(() => expect(result.current.isError).toBe(true));
|
||||
|
||||
expect(result.current.error?.message).toBe('Request failed with status 500');
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,99 @@
|
||||
// src/hooks/mutations/useRemoveShoppingListItemMutation.test.tsx
|
||||
import { renderHook, waitFor } from '@testing-library/react';
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
import { QueryClient, QueryClientProvider } from '@tanstack/react-query';
|
||||
import type { ReactNode } from 'react';
|
||||
import { useRemoveShoppingListItemMutation } from './useRemoveShoppingListItemMutation';
|
||||
import * as apiClient from '../../services/apiClient';
|
||||
import * as notificationService from '../../services/notificationService';
|
||||
|
||||
vi.mock('../../services/apiClient');
|
||||
vi.mock('../../services/notificationService');
|
||||
|
||||
const mockedApiClient = vi.mocked(apiClient);
|
||||
const mockedNotifications = vi.mocked(notificationService);
|
||||
|
||||
describe('useRemoveShoppingListItemMutation', () => {
|
||||
let queryClient: QueryClient;
|
||||
|
||||
const wrapper = ({ children }: { children: ReactNode }) => (
|
||||
<QueryClientProvider client={queryClient}>{children}</QueryClientProvider>
|
||||
);
|
||||
|
||||
beforeEach(() => {
|
||||
vi.resetAllMocks();
|
||||
queryClient = new QueryClient({
|
||||
defaultOptions: {
|
||||
queries: { retry: false },
|
||||
mutations: { retry: false },
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('should remove an item from shopping list successfully', async () => {
|
||||
const mockResponse = { success: true };
|
||||
mockedApiClient.removeShoppingListItem.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve(mockResponse),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useRemoveShoppingListItemMutation(), { wrapper });
|
||||
|
||||
result.current.mutate({ itemId: 42 });
|
||||
|
||||
await waitFor(() => expect(result.current.isSuccess).toBe(true));
|
||||
|
||||
expect(mockedApiClient.removeShoppingListItem).toHaveBeenCalledWith(42);
|
||||
expect(mockedNotifications.notifySuccess).toHaveBeenCalledWith('Item removed from shopping list');
|
||||
});
|
||||
|
||||
it('should invalidate shopping-lists query on success', async () => {
|
||||
mockedApiClient.removeShoppingListItem.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve({ success: true }),
|
||||
} as Response);
|
||||
|
||||
const invalidateQueriesSpy = vi.spyOn(queryClient, 'invalidateQueries');
|
||||
|
||||
const { result } = renderHook(() => useRemoveShoppingListItemMutation(), { wrapper });
|
||||
|
||||
result.current.mutate({ itemId: 100 });
|
||||
|
||||
await waitFor(() => expect(result.current.isSuccess).toBe(true));
|
||||
|
||||
expect(invalidateQueriesSpy).toHaveBeenCalledWith({ queryKey: ['shopping-lists'] });
|
||||
});
|
||||
|
||||
it('should handle API error with error message', async () => {
|
||||
mockedApiClient.removeShoppingListItem.mockResolvedValue({
|
||||
ok: false,
|
||||
status: 404,
|
||||
json: () => Promise.resolve({ message: 'Item not found' }),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useRemoveShoppingListItemMutation(), { wrapper });
|
||||
|
||||
result.current.mutate({ itemId: 999 });
|
||||
|
||||
await waitFor(() => expect(result.current.isError).toBe(true));
|
||||
|
||||
expect(result.current.error?.message).toBe('Item not found');
|
||||
expect(mockedNotifications.notifyError).toHaveBeenCalledWith('Item not found');
|
||||
});
|
||||
|
||||
it('should handle API error without message', async () => {
|
||||
mockedApiClient.removeShoppingListItem.mockResolvedValue({
|
||||
ok: false,
|
||||
status: 500,
|
||||
json: () => Promise.reject(new Error('Parse error')),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useRemoveShoppingListItemMutation(), { wrapper });
|
||||
|
||||
result.current.mutate({ itemId: 42 });
|
||||
|
||||
await waitFor(() => expect(result.current.isError).toBe(true));
|
||||
|
||||
expect(result.current.error?.message).toBe('Request failed with status 500');
|
||||
});
|
||||
});
|
||||
99
src/hooks/mutations/useRemoveWatchedItemMutation.test.tsx
Normal file
99
src/hooks/mutations/useRemoveWatchedItemMutation.test.tsx
Normal file
@@ -0,0 +1,99 @@
|
||||
// src/hooks/mutations/useRemoveWatchedItemMutation.test.tsx
|
||||
import { renderHook, waitFor } from '@testing-library/react';
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
import { QueryClient, QueryClientProvider } from '@tanstack/react-query';
|
||||
import type { ReactNode } from 'react';
|
||||
import { useRemoveWatchedItemMutation } from './useRemoveWatchedItemMutation';
|
||||
import * as apiClient from '../../services/apiClient';
|
||||
import * as notificationService from '../../services/notificationService';
|
||||
|
||||
vi.mock('../../services/apiClient');
|
||||
vi.mock('../../services/notificationService');
|
||||
|
||||
const mockedApiClient = vi.mocked(apiClient);
|
||||
const mockedNotifications = vi.mocked(notificationService);
|
||||
|
||||
describe('useRemoveWatchedItemMutation', () => {
|
||||
let queryClient: QueryClient;
|
||||
|
||||
const wrapper = ({ children }: { children: ReactNode }) => (
|
||||
<QueryClientProvider client={queryClient}>{children}</QueryClientProvider>
|
||||
);
|
||||
|
||||
beforeEach(() => {
|
||||
vi.resetAllMocks();
|
||||
queryClient = new QueryClient({
|
||||
defaultOptions: {
|
||||
queries: { retry: false },
|
||||
mutations: { retry: false },
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('should remove a watched item successfully', async () => {
|
||||
const mockResponse = { success: true };
|
||||
mockedApiClient.removeWatchedItem.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve(mockResponse),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useRemoveWatchedItemMutation(), { wrapper });
|
||||
|
||||
result.current.mutate({ masterItemId: 123 });
|
||||
|
||||
await waitFor(() => expect(result.current.isSuccess).toBe(true));
|
||||
|
||||
expect(mockedApiClient.removeWatchedItem).toHaveBeenCalledWith(123);
|
||||
expect(mockedNotifications.notifySuccess).toHaveBeenCalledWith('Item removed from watched list');
|
||||
});
|
||||
|
||||
it('should invalidate watched-items query on success', async () => {
|
||||
mockedApiClient.removeWatchedItem.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve({ success: true }),
|
||||
} as Response);
|
||||
|
||||
const invalidateQueriesSpy = vi.spyOn(queryClient, 'invalidateQueries');
|
||||
|
||||
const { result } = renderHook(() => useRemoveWatchedItemMutation(), { wrapper });
|
||||
|
||||
result.current.mutate({ masterItemId: 456 });
|
||||
|
||||
await waitFor(() => expect(result.current.isSuccess).toBe(true));
|
||||
|
||||
expect(invalidateQueriesSpy).toHaveBeenCalledWith({ queryKey: ['watched-items'] });
|
||||
});
|
||||
|
||||
it('should handle API error with error message', async () => {
|
||||
mockedApiClient.removeWatchedItem.mockResolvedValue({
|
||||
ok: false,
|
||||
status: 404,
|
||||
json: () => Promise.resolve({ message: 'Watched item not found' }),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useRemoveWatchedItemMutation(), { wrapper });
|
||||
|
||||
result.current.mutate({ masterItemId: 999 });
|
||||
|
||||
await waitFor(() => expect(result.current.isError).toBe(true));
|
||||
|
||||
expect(result.current.error?.message).toBe('Watched item not found');
|
||||
expect(mockedNotifications.notifyError).toHaveBeenCalledWith('Watched item not found');
|
||||
});
|
||||
|
||||
it('should handle API error without message', async () => {
|
||||
mockedApiClient.removeWatchedItem.mockResolvedValue({
|
||||
ok: false,
|
||||
status: 500,
|
||||
json: () => Promise.reject(new Error('Parse error')),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useRemoveWatchedItemMutation(), { wrapper });
|
||||
|
||||
result.current.mutate({ masterItemId: 123 });
|
||||
|
||||
await waitFor(() => expect(result.current.isError).toBe(true));
|
||||
|
||||
expect(result.current.error?.message).toBe('Request failed with status 500');
|
||||
});
|
||||
});
|
||||
159
src/hooks/mutations/useUpdateShoppingListItemMutation.test.tsx
Normal file
159
src/hooks/mutations/useUpdateShoppingListItemMutation.test.tsx
Normal file
@@ -0,0 +1,159 @@
|
||||
// src/hooks/mutations/useUpdateShoppingListItemMutation.test.tsx
|
||||
import { renderHook, waitFor } from '@testing-library/react';
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
import { QueryClient, QueryClientProvider } from '@tanstack/react-query';
|
||||
import type { ReactNode } from 'react';
|
||||
import { useUpdateShoppingListItemMutation } from './useUpdateShoppingListItemMutation';
|
||||
import * as apiClient from '../../services/apiClient';
|
||||
import * as notificationService from '../../services/notificationService';
|
||||
|
||||
vi.mock('../../services/apiClient');
|
||||
vi.mock('../../services/notificationService');
|
||||
|
||||
const mockedApiClient = vi.mocked(apiClient);
|
||||
const mockedNotifications = vi.mocked(notificationService);
|
||||
|
||||
describe('useUpdateShoppingListItemMutation', () => {
|
||||
let queryClient: QueryClient;
|
||||
|
||||
const wrapper = ({ children }: { children: ReactNode }) => (
|
||||
<QueryClientProvider client={queryClient}>{children}</QueryClientProvider>
|
||||
);
|
||||
|
||||
beforeEach(() => {
|
||||
vi.resetAllMocks();
|
||||
queryClient = new QueryClient({
|
||||
defaultOptions: {
|
||||
queries: { retry: false },
|
||||
mutations: { retry: false },
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('should update a shopping list item successfully', async () => {
|
||||
const mockResponse = { id: 42, quantity: 3 };
|
||||
mockedApiClient.updateShoppingListItem.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve(mockResponse),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useUpdateShoppingListItemMutation(), { wrapper });
|
||||
|
||||
result.current.mutate({ itemId: 42, updates: { quantity: 3 } });
|
||||
|
||||
await waitFor(() => expect(result.current.isSuccess).toBe(true));
|
||||
|
||||
expect(mockedApiClient.updateShoppingListItem).toHaveBeenCalledWith(42, { quantity: 3 });
|
||||
expect(mockedNotifications.notifySuccess).toHaveBeenCalledWith('Shopping list item updated');
|
||||
});
|
||||
|
||||
it('should update is_purchased status', async () => {
|
||||
mockedApiClient.updateShoppingListItem.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve({ id: 42, is_purchased: true }),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useUpdateShoppingListItemMutation(), { wrapper });
|
||||
|
||||
result.current.mutate({ itemId: 42, updates: { is_purchased: true } });
|
||||
|
||||
await waitFor(() => expect(result.current.isSuccess).toBe(true));
|
||||
|
||||
expect(mockedApiClient.updateShoppingListItem).toHaveBeenCalledWith(42, { is_purchased: true });
|
||||
});
|
||||
|
||||
it('should update custom_item_name', async () => {
|
||||
mockedApiClient.updateShoppingListItem.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve({ id: 42, custom_item_name: 'Organic Milk' }),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useUpdateShoppingListItemMutation(), { wrapper });
|
||||
|
||||
result.current.mutate({ itemId: 42, updates: { custom_item_name: 'Organic Milk' } });
|
||||
|
||||
await waitFor(() => expect(result.current.isSuccess).toBe(true));
|
||||
|
||||
expect(mockedApiClient.updateShoppingListItem).toHaveBeenCalledWith(42, { custom_item_name: 'Organic Milk' });
|
||||
});
|
||||
|
||||
it('should update notes', async () => {
|
||||
mockedApiClient.updateShoppingListItem.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve({ id: 42, notes: 'Get the 2% variety' }),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useUpdateShoppingListItemMutation(), { wrapper });
|
||||
|
||||
result.current.mutate({ itemId: 42, updates: { notes: 'Get the 2% variety' } });
|
||||
|
||||
await waitFor(() => expect(result.current.isSuccess).toBe(true));
|
||||
|
||||
expect(mockedApiClient.updateShoppingListItem).toHaveBeenCalledWith(42, { notes: 'Get the 2% variety' });
|
||||
});
|
||||
|
||||
it('should update multiple fields at once', async () => {
|
||||
mockedApiClient.updateShoppingListItem.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve({ id: 42, quantity: 2, notes: 'Important' }),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useUpdateShoppingListItemMutation(), { wrapper });
|
||||
|
||||
result.current.mutate({ itemId: 42, updates: { quantity: 2, notes: 'Important' } });
|
||||
|
||||
await waitFor(() => expect(result.current.isSuccess).toBe(true));
|
||||
|
||||
expect(mockedApiClient.updateShoppingListItem).toHaveBeenCalledWith(42, { quantity: 2, notes: 'Important' });
|
||||
});
|
||||
|
||||
it('should invalidate shopping-lists query on success', async () => {
|
||||
mockedApiClient.updateShoppingListItem.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve({ id: 42 }),
|
||||
} as Response);
|
||||
|
||||
const invalidateQueriesSpy = vi.spyOn(queryClient, 'invalidateQueries');
|
||||
|
||||
const { result } = renderHook(() => useUpdateShoppingListItemMutation(), { wrapper });
|
||||
|
||||
result.current.mutate({ itemId: 42, updates: { quantity: 5 } });
|
||||
|
||||
await waitFor(() => expect(result.current.isSuccess).toBe(true));
|
||||
|
||||
expect(invalidateQueriesSpy).toHaveBeenCalledWith({ queryKey: ['shopping-lists'] });
|
||||
});
|
||||
|
||||
it('should handle API error with error message', async () => {
|
||||
mockedApiClient.updateShoppingListItem.mockResolvedValue({
|
||||
ok: false,
|
||||
status: 404,
|
||||
json: () => Promise.resolve({ message: 'Item not found' }),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useUpdateShoppingListItemMutation(), { wrapper });
|
||||
|
||||
result.current.mutate({ itemId: 999, updates: { quantity: 1 } });
|
||||
|
||||
await waitFor(() => expect(result.current.isError).toBe(true));
|
||||
|
||||
expect(result.current.error?.message).toBe('Item not found');
|
||||
expect(mockedNotifications.notifyError).toHaveBeenCalledWith('Item not found');
|
||||
});
|
||||
|
||||
it('should handle API error without message', async () => {
|
||||
mockedApiClient.updateShoppingListItem.mockResolvedValue({
|
||||
ok: false,
|
||||
status: 500,
|
||||
json: () => Promise.reject(new Error('Parse error')),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useUpdateShoppingListItemMutation(), { wrapper });
|
||||
|
||||
result.current.mutate({ itemId: 42, updates: { quantity: 1 } });
|
||||
|
||||
await waitFor(() => expect(result.current.isError).toBe(true));
|
||||
|
||||
expect(result.current.error?.message).toBe('Request failed with status 500');
|
||||
});
|
||||
});
|
||||
102
src/hooks/queries/useActivityLogQuery.test.tsx
Normal file
102
src/hooks/queries/useActivityLogQuery.test.tsx
Normal file
@@ -0,0 +1,102 @@
|
||||
// src/hooks/queries/useActivityLogQuery.test.tsx
|
||||
import { renderHook, waitFor } from '@testing-library/react';
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
import { QueryClient, QueryClientProvider } from '@tanstack/react-query';
|
||||
import type { ReactNode } from 'react';
|
||||
import { useActivityLogQuery } from './useActivityLogQuery';
|
||||
import * as apiClient from '../../services/apiClient';
|
||||
|
||||
vi.mock('../../services/apiClient');
|
||||
|
||||
const mockedApiClient = vi.mocked(apiClient);
|
||||
|
||||
describe('useActivityLogQuery', () => {
|
||||
let queryClient: QueryClient;
|
||||
|
||||
const wrapper = ({ children }: { children: ReactNode }) => (
|
||||
<QueryClientProvider client={queryClient}>{children}</QueryClientProvider>
|
||||
);
|
||||
|
||||
beforeEach(() => {
|
||||
vi.resetAllMocks();
|
||||
queryClient = new QueryClient({
|
||||
defaultOptions: {
|
||||
queries: { retry: false },
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('should fetch activity log with default params', async () => {
|
||||
const mockActivityLog = [
|
||||
{ id: 1, action: 'user_login', timestamp: '2024-01-01T10:00:00Z' },
|
||||
{ id: 2, action: 'flyer_uploaded', timestamp: '2024-01-01T11:00:00Z' },
|
||||
];
|
||||
mockedApiClient.fetchActivityLog.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve(mockActivityLog),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useActivityLogQuery(), { wrapper });
|
||||
|
||||
await waitFor(() => expect(result.current.isSuccess).toBe(true));
|
||||
|
||||
expect(mockedApiClient.fetchActivityLog).toHaveBeenCalledWith(20, 0);
|
||||
expect(result.current.data).toEqual(mockActivityLog);
|
||||
});
|
||||
|
||||
it('should fetch activity log with custom limit and offset', async () => {
|
||||
const mockActivityLog = [{ id: 3, action: 'item_added', timestamp: '2024-01-01T12:00:00Z' }];
|
||||
mockedApiClient.fetchActivityLog.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve(mockActivityLog),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useActivityLogQuery(10, 5), { wrapper });
|
||||
|
||||
await waitFor(() => expect(result.current.isSuccess).toBe(true));
|
||||
|
||||
expect(mockedApiClient.fetchActivityLog).toHaveBeenCalledWith(10, 5);
|
||||
expect(result.current.data).toEqual(mockActivityLog);
|
||||
});
|
||||
|
||||
it('should handle API error with error message', async () => {
|
||||
mockedApiClient.fetchActivityLog.mockResolvedValue({
|
||||
ok: false,
|
||||
status: 403,
|
||||
json: () => Promise.resolve({ message: 'Admin access required' }),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useActivityLogQuery(), { wrapper });
|
||||
|
||||
await waitFor(() => expect(result.current.isError).toBe(true));
|
||||
|
||||
expect(result.current.error?.message).toBe('Admin access required');
|
||||
});
|
||||
|
||||
it('should handle API error without message', async () => {
|
||||
mockedApiClient.fetchActivityLog.mockResolvedValue({
|
||||
ok: false,
|
||||
status: 500,
|
||||
json: () => Promise.reject(new Error('Parse error')),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useActivityLogQuery(), { wrapper });
|
||||
|
||||
await waitFor(() => expect(result.current.isError).toBe(true));
|
||||
|
||||
expect(result.current.error?.message).toBe('Request failed with status 500');
|
||||
});
|
||||
|
||||
it('should return empty array for no activity log entries', async () => {
|
||||
mockedApiClient.fetchActivityLog.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve([]),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useActivityLogQuery(), { wrapper });
|
||||
|
||||
await waitFor(() => expect(result.current.isSuccess).toBe(true));
|
||||
|
||||
expect(result.current.data).toEqual([]);
|
||||
});
|
||||
});
|
||||
78
src/hooks/queries/useApplicationStatsQuery.test.tsx
Normal file
78
src/hooks/queries/useApplicationStatsQuery.test.tsx
Normal file
@@ -0,0 +1,78 @@
|
||||
// src/hooks/queries/useApplicationStatsQuery.test.tsx
|
||||
import { renderHook, waitFor } from '@testing-library/react';
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
import { QueryClient, QueryClientProvider } from '@tanstack/react-query';
|
||||
import type { ReactNode } from 'react';
|
||||
import { useApplicationStatsQuery } from './useApplicationStatsQuery';
|
||||
import * as apiClient from '../../services/apiClient';
|
||||
|
||||
vi.mock('../../services/apiClient');
|
||||
|
||||
const mockedApiClient = vi.mocked(apiClient);
|
||||
|
||||
describe('useApplicationStatsQuery', () => {
|
||||
let queryClient: QueryClient;
|
||||
|
||||
const wrapper = ({ children }: { children: ReactNode }) => (
|
||||
<QueryClientProvider client={queryClient}>{children}</QueryClientProvider>
|
||||
);
|
||||
|
||||
beforeEach(() => {
|
||||
vi.resetAllMocks();
|
||||
queryClient = new QueryClient({
|
||||
defaultOptions: {
|
||||
queries: { retry: false },
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('should fetch application stats successfully', async () => {
|
||||
const mockStats = {
|
||||
flyerCount: 150,
|
||||
userCount: 500,
|
||||
flyerItemCount: 5000,
|
||||
storeCount: 25,
|
||||
pendingCorrectionsCount: 10,
|
||||
recipeCount: 75,
|
||||
};
|
||||
mockedApiClient.getApplicationStats.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve(mockStats),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useApplicationStatsQuery(), { wrapper });
|
||||
|
||||
await waitFor(() => expect(result.current.isSuccess).toBe(true));
|
||||
|
||||
expect(mockedApiClient.getApplicationStats).toHaveBeenCalled();
|
||||
expect(result.current.data).toEqual(mockStats);
|
||||
});
|
||||
|
||||
it('should handle API error with error message', async () => {
|
||||
mockedApiClient.getApplicationStats.mockResolvedValue({
|
||||
ok: false,
|
||||
status: 403,
|
||||
json: () => Promise.resolve({ message: 'Admin access required' }),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useApplicationStatsQuery(), { wrapper });
|
||||
|
||||
await waitFor(() => expect(result.current.isError).toBe(true));
|
||||
|
||||
expect(result.current.error?.message).toBe('Admin access required');
|
||||
});
|
||||
|
||||
it('should handle API error without message', async () => {
|
||||
mockedApiClient.getApplicationStats.mockResolvedValue({
|
||||
ok: false,
|
||||
status: 500,
|
||||
json: () => Promise.reject(new Error('Parse error')),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useApplicationStatsQuery(), { wrapper });
|
||||
|
||||
await waitFor(() => expect(result.current.isError).toBe(true));
|
||||
|
||||
expect(result.current.error?.message).toBe('Request failed with status 500');
|
||||
});
|
||||
});
|
||||
88
src/hooks/queries/useCategoriesQuery.test.tsx
Normal file
88
src/hooks/queries/useCategoriesQuery.test.tsx
Normal file
@@ -0,0 +1,88 @@
|
||||
// src/hooks/queries/useCategoriesQuery.test.tsx
|
||||
import { renderHook, waitFor } from '@testing-library/react';
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
import { QueryClient, QueryClientProvider } from '@tanstack/react-query';
|
||||
import type { ReactNode } from 'react';
|
||||
import { useCategoriesQuery } from './useCategoriesQuery';
|
||||
import * as apiClient from '../../services/apiClient';
|
||||
|
||||
vi.mock('../../services/apiClient');
|
||||
|
||||
const mockedApiClient = vi.mocked(apiClient);
|
||||
|
||||
describe('useCategoriesQuery', () => {
|
||||
let queryClient: QueryClient;
|
||||
|
||||
const wrapper = ({ children }: { children: ReactNode }) => (
|
||||
<QueryClientProvider client={queryClient}>{children}</QueryClientProvider>
|
||||
);
|
||||
|
||||
beforeEach(() => {
|
||||
vi.resetAllMocks();
|
||||
queryClient = new QueryClient({
|
||||
defaultOptions: {
|
||||
queries: { retry: false },
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('should fetch categories successfully', async () => {
|
||||
const mockCategories = [
|
||||
{ category_id: 1, name: 'Dairy' },
|
||||
{ category_id: 2, name: 'Bakery' },
|
||||
{ category_id: 3, name: 'Produce' },
|
||||
];
|
||||
mockedApiClient.fetchCategories.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve(mockCategories),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useCategoriesQuery(), { wrapper });
|
||||
|
||||
await waitFor(() => expect(result.current.isSuccess).toBe(true));
|
||||
|
||||
expect(mockedApiClient.fetchCategories).toHaveBeenCalled();
|
||||
expect(result.current.data).toEqual(mockCategories);
|
||||
});
|
||||
|
||||
it('should handle API error with error message', async () => {
|
||||
mockedApiClient.fetchCategories.mockResolvedValue({
|
||||
ok: false,
|
||||
status: 500,
|
||||
json: () => Promise.resolve({ message: 'Database error' }),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useCategoriesQuery(), { wrapper });
|
||||
|
||||
await waitFor(() => expect(result.current.isError).toBe(true));
|
||||
|
||||
expect(result.current.error?.message).toBe('Database error');
|
||||
});
|
||||
|
||||
it('should handle API error without message', async () => {
|
||||
mockedApiClient.fetchCategories.mockResolvedValue({
|
||||
ok: false,
|
||||
status: 500,
|
||||
json: () => Promise.reject(new Error('Parse error')),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useCategoriesQuery(), { wrapper });
|
||||
|
||||
await waitFor(() => expect(result.current.isError).toBe(true));
|
||||
|
||||
expect(result.current.error?.message).toBe('Request failed with status 500');
|
||||
});
|
||||
|
||||
it('should return empty array for no categories', async () => {
|
||||
mockedApiClient.fetchCategories.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve([]),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useCategoriesQuery(), { wrapper });
|
||||
|
||||
await waitFor(() => expect(result.current.isSuccess).toBe(true));
|
||||
|
||||
expect(result.current.data).toEqual([]);
|
||||
});
|
||||
});
|
||||
111
src/hooks/queries/useFlyerItemsQuery.test.tsx
Normal file
111
src/hooks/queries/useFlyerItemsQuery.test.tsx
Normal file
@@ -0,0 +1,111 @@
|
||||
// src/hooks/queries/useFlyerItemsQuery.test.tsx
|
||||
import { renderHook, waitFor } from '@testing-library/react';
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
import { QueryClient, QueryClientProvider } from '@tanstack/react-query';
|
||||
import type { ReactNode } from 'react';
|
||||
import { useFlyerItemsQuery } from './useFlyerItemsQuery';
|
||||
import * as apiClient from '../../services/apiClient';
|
||||
|
||||
vi.mock('../../services/apiClient');
|
||||
|
||||
const mockedApiClient = vi.mocked(apiClient);
|
||||
|
||||
describe('useFlyerItemsQuery', () => {
|
||||
let queryClient: QueryClient;
|
||||
|
||||
const wrapper = ({ children }: { children: ReactNode }) => (
|
||||
<QueryClientProvider client={queryClient}>{children}</QueryClientProvider>
|
||||
);
|
||||
|
||||
beforeEach(() => {
|
||||
vi.resetAllMocks();
|
||||
queryClient = new QueryClient({
|
||||
defaultOptions: {
|
||||
queries: { retry: false },
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('should fetch flyer items when flyerId is provided', async () => {
|
||||
const mockFlyerItems = [
|
||||
{ item_id: 1, name: 'Milk', price: 3.99, flyer_id: 42 },
|
||||
{ item_id: 2, name: 'Bread', price: 2.49, flyer_id: 42 },
|
||||
];
|
||||
mockedApiClient.fetchFlyerItems.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve({ items: mockFlyerItems }),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useFlyerItemsQuery(42), { wrapper });
|
||||
|
||||
await waitFor(() => expect(result.current.isSuccess).toBe(true));
|
||||
|
||||
expect(mockedApiClient.fetchFlyerItems).toHaveBeenCalledWith(42);
|
||||
expect(result.current.data).toEqual(mockFlyerItems);
|
||||
});
|
||||
|
||||
it('should not fetch when flyerId is undefined', async () => {
|
||||
const { result } = renderHook(() => useFlyerItemsQuery(undefined), { wrapper });
|
||||
|
||||
// Wait a bit to ensure the query doesn't run
|
||||
await new Promise((resolve) => setTimeout(resolve, 100));
|
||||
|
||||
expect(mockedApiClient.fetchFlyerItems).not.toHaveBeenCalled();
|
||||
expect(result.current.isLoading).toBe(false);
|
||||
expect(result.current.isFetching).toBe(false);
|
||||
});
|
||||
|
||||
it('should handle API error with error message', async () => {
|
||||
mockedApiClient.fetchFlyerItems.mockResolvedValue({
|
||||
ok: false,
|
||||
status: 404,
|
||||
json: () => Promise.resolve({ message: 'Flyer not found' }),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useFlyerItemsQuery(999), { wrapper });
|
||||
|
||||
await waitFor(() => expect(result.current.isError).toBe(true));
|
||||
|
||||
expect(result.current.error?.message).toBe('Flyer not found');
|
||||
});
|
||||
|
||||
it('should handle API error without message', async () => {
|
||||
mockedApiClient.fetchFlyerItems.mockResolvedValue({
|
||||
ok: false,
|
||||
status: 500,
|
||||
json: () => Promise.reject(new Error('Parse error')),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useFlyerItemsQuery(42), { wrapper });
|
||||
|
||||
await waitFor(() => expect(result.current.isError).toBe(true));
|
||||
|
||||
expect(result.current.error?.message).toBe('Request failed with status 500');
|
||||
});
|
||||
|
||||
it('should return empty array when API returns no items', async () => {
|
||||
mockedApiClient.fetchFlyerItems.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve({ items: [] }),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useFlyerItemsQuery(42), { wrapper });
|
||||
|
||||
await waitFor(() => expect(result.current.isSuccess).toBe(true));
|
||||
|
||||
expect(result.current.data).toEqual([]);
|
||||
});
|
||||
|
||||
it('should handle response without items property', async () => {
|
||||
mockedApiClient.fetchFlyerItems.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve({}),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useFlyerItemsQuery(42), { wrapper });
|
||||
|
||||
await waitFor(() => expect(result.current.isSuccess).toBe(true));
|
||||
|
||||
expect(result.current.data).toEqual([]);
|
||||
});
|
||||
});
|
||||
102
src/hooks/queries/useFlyersQuery.test.tsx
Normal file
102
src/hooks/queries/useFlyersQuery.test.tsx
Normal file
@@ -0,0 +1,102 @@
|
||||
// src/hooks/queries/useFlyersQuery.test.tsx
|
||||
import { renderHook, waitFor } from '@testing-library/react';
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
import { QueryClient, QueryClientProvider } from '@tanstack/react-query';
|
||||
import type { ReactNode } from 'react';
|
||||
import { useFlyersQuery } from './useFlyersQuery';
|
||||
import * as apiClient from '../../services/apiClient';
|
||||
|
||||
vi.mock('../../services/apiClient');
|
||||
|
||||
const mockedApiClient = vi.mocked(apiClient);
|
||||
|
||||
describe('useFlyersQuery', () => {
|
||||
let queryClient: QueryClient;
|
||||
|
||||
const wrapper = ({ children }: { children: ReactNode }) => (
|
||||
<QueryClientProvider client={queryClient}>{children}</QueryClientProvider>
|
||||
);
|
||||
|
||||
beforeEach(() => {
|
||||
vi.resetAllMocks();
|
||||
queryClient = new QueryClient({
|
||||
defaultOptions: {
|
||||
queries: { retry: false },
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('should fetch flyers successfully with default params', async () => {
|
||||
const mockFlyers = [
|
||||
{ flyer_id: 1, store_name: 'Store A', valid_from: '2024-01-01', valid_to: '2024-01-07' },
|
||||
{ flyer_id: 2, store_name: 'Store B', valid_from: '2024-01-01', valid_to: '2024-01-07' },
|
||||
];
|
||||
mockedApiClient.fetchFlyers.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve(mockFlyers),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useFlyersQuery(), { wrapper });
|
||||
|
||||
await waitFor(() => expect(result.current.isSuccess).toBe(true));
|
||||
|
||||
expect(mockedApiClient.fetchFlyers).toHaveBeenCalledWith(20, 0);
|
||||
expect(result.current.data).toEqual(mockFlyers);
|
||||
});
|
||||
|
||||
it('should fetch flyers with custom limit and offset', async () => {
|
||||
const mockFlyers = [{ flyer_id: 3, store_name: 'Store C' }];
|
||||
mockedApiClient.fetchFlyers.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve(mockFlyers),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useFlyersQuery(10, 5), { wrapper });
|
||||
|
||||
await waitFor(() => expect(result.current.isSuccess).toBe(true));
|
||||
|
||||
expect(mockedApiClient.fetchFlyers).toHaveBeenCalledWith(10, 5);
|
||||
expect(result.current.data).toEqual(mockFlyers);
|
||||
});
|
||||
|
||||
it('should handle API error with error message', async () => {
|
||||
mockedApiClient.fetchFlyers.mockResolvedValue({
|
||||
ok: false,
|
||||
status: 500,
|
||||
json: () => Promise.resolve({ message: 'Server error' }),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useFlyersQuery(), { wrapper });
|
||||
|
||||
await waitFor(() => expect(result.current.isError).toBe(true));
|
||||
|
||||
expect(result.current.error?.message).toBe('Server error');
|
||||
});
|
||||
|
||||
it('should handle API error without message', async () => {
|
||||
mockedApiClient.fetchFlyers.mockResolvedValue({
|
||||
ok: false,
|
||||
status: 500,
|
||||
json: () => Promise.reject(new Error('Parse error')),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useFlyersQuery(), { wrapper });
|
||||
|
||||
await waitFor(() => expect(result.current.isError).toBe(true));
|
||||
|
||||
expect(result.current.error?.message).toBe('Request failed with status 500');
|
||||
});
|
||||
|
||||
it('should return empty array for no flyers', async () => {
|
||||
mockedApiClient.fetchFlyers.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve([]),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useFlyersQuery(), { wrapper });
|
||||
|
||||
await waitFor(() => expect(result.current.isSuccess).toBe(true));
|
||||
|
||||
expect(result.current.data).toEqual([]);
|
||||
});
|
||||
});
|
||||
88
src/hooks/queries/useMasterItemsQuery.test.tsx
Normal file
88
src/hooks/queries/useMasterItemsQuery.test.tsx
Normal file
@@ -0,0 +1,88 @@
|
||||
// src/hooks/queries/useMasterItemsQuery.test.tsx
|
||||
import { renderHook, waitFor } from '@testing-library/react';
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
import { QueryClient, QueryClientProvider } from '@tanstack/react-query';
|
||||
import type { ReactNode } from 'react';
|
||||
import { useMasterItemsQuery } from './useMasterItemsQuery';
|
||||
import * as apiClient from '../../services/apiClient';
|
||||
|
||||
vi.mock('../../services/apiClient');
|
||||
|
||||
const mockedApiClient = vi.mocked(apiClient);
|
||||
|
||||
describe('useMasterItemsQuery', () => {
|
||||
let queryClient: QueryClient;
|
||||
|
||||
const wrapper = ({ children }: { children: ReactNode }) => (
|
||||
<QueryClientProvider client={queryClient}>{children}</QueryClientProvider>
|
||||
);
|
||||
|
||||
beforeEach(() => {
|
||||
vi.resetAllMocks();
|
||||
queryClient = new QueryClient({
|
||||
defaultOptions: {
|
||||
queries: { retry: false },
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('should fetch master items successfully', async () => {
|
||||
const mockMasterItems = [
|
||||
{ master_item_id: 1, name: 'Milk', category: 'Dairy' },
|
||||
{ master_item_id: 2, name: 'Bread', category: 'Bakery' },
|
||||
{ master_item_id: 3, name: 'Eggs', category: 'Dairy' },
|
||||
];
|
||||
mockedApiClient.fetchMasterItems.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve(mockMasterItems),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useMasterItemsQuery(), { wrapper });
|
||||
|
||||
await waitFor(() => expect(result.current.isSuccess).toBe(true));
|
||||
|
||||
expect(mockedApiClient.fetchMasterItems).toHaveBeenCalled();
|
||||
expect(result.current.data).toEqual(mockMasterItems);
|
||||
});
|
||||
|
||||
it('should handle API error with error message', async () => {
|
||||
mockedApiClient.fetchMasterItems.mockResolvedValue({
|
||||
ok: false,
|
||||
status: 500,
|
||||
json: () => Promise.resolve({ message: 'Database error' }),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useMasterItemsQuery(), { wrapper });
|
||||
|
||||
await waitFor(() => expect(result.current.isError).toBe(true));
|
||||
|
||||
expect(result.current.error?.message).toBe('Database error');
|
||||
});
|
||||
|
||||
it('should handle API error without message', async () => {
|
||||
mockedApiClient.fetchMasterItems.mockResolvedValue({
|
||||
ok: false,
|
||||
status: 500,
|
||||
json: () => Promise.reject(new Error('Parse error')),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useMasterItemsQuery(), { wrapper });
|
||||
|
||||
await waitFor(() => expect(result.current.isError).toBe(true));
|
||||
|
||||
expect(result.current.error?.message).toBe('Request failed with status 500');
|
||||
});
|
||||
|
||||
it('should return empty array for no master items', async () => {
|
||||
mockedApiClient.fetchMasterItems.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve([]),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useMasterItemsQuery(), { wrapper });
|
||||
|
||||
await waitFor(() => expect(result.current.isSuccess).toBe(true));
|
||||
|
||||
expect(result.current.data).toEqual([]);
|
||||
});
|
||||
});
|
||||
98
src/hooks/queries/useShoppingListsQuery.test.tsx
Normal file
98
src/hooks/queries/useShoppingListsQuery.test.tsx
Normal file
@@ -0,0 +1,98 @@
|
||||
// src/hooks/queries/useShoppingListsQuery.test.tsx
|
||||
import { renderHook, waitFor } from '@testing-library/react';
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
import { QueryClient, QueryClientProvider } from '@tanstack/react-query';
|
||||
import type { ReactNode } from 'react';
|
||||
import { useShoppingListsQuery } from './useShoppingListsQuery';
|
||||
import * as apiClient from '../../services/apiClient';
|
||||
|
||||
vi.mock('../../services/apiClient');
|
||||
|
||||
const mockedApiClient = vi.mocked(apiClient);
|
||||
|
||||
describe('useShoppingListsQuery', () => {
|
||||
let queryClient: QueryClient;
|
||||
|
||||
const wrapper = ({ children }: { children: ReactNode }) => (
|
||||
<QueryClientProvider client={queryClient}>{children}</QueryClientProvider>
|
||||
);
|
||||
|
||||
beforeEach(() => {
|
||||
vi.resetAllMocks();
|
||||
queryClient = new QueryClient({
|
||||
defaultOptions: {
|
||||
queries: { retry: false },
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('should fetch shopping lists when enabled', async () => {
|
||||
const mockShoppingLists = [
|
||||
{ shopping_list_id: 1, name: 'Weekly Groceries', items: [] },
|
||||
{ shopping_list_id: 2, name: 'Party Supplies', items: [] },
|
||||
];
|
||||
mockedApiClient.fetchShoppingLists.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve(mockShoppingLists),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useShoppingListsQuery(true), { wrapper });
|
||||
|
||||
await waitFor(() => expect(result.current.isSuccess).toBe(true));
|
||||
|
||||
expect(mockedApiClient.fetchShoppingLists).toHaveBeenCalled();
|
||||
expect(result.current.data).toEqual(mockShoppingLists);
|
||||
});
|
||||
|
||||
it('should not fetch shopping lists when disabled', async () => {
|
||||
const { result } = renderHook(() => useShoppingListsQuery(false), { wrapper });
|
||||
|
||||
// Wait a bit to ensure the query doesn't run
|
||||
await new Promise((resolve) => setTimeout(resolve, 100));
|
||||
|
||||
expect(mockedApiClient.fetchShoppingLists).not.toHaveBeenCalled();
|
||||
expect(result.current.isLoading).toBe(false);
|
||||
expect(result.current.isFetching).toBe(false);
|
||||
});
|
||||
|
||||
it('should handle API error with error message', async () => {
|
||||
mockedApiClient.fetchShoppingLists.mockResolvedValue({
|
||||
ok: false,
|
||||
status: 401,
|
||||
json: () => Promise.resolve({ message: 'Unauthorized' }),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useShoppingListsQuery(true), { wrapper });
|
||||
|
||||
await waitFor(() => expect(result.current.isError).toBe(true));
|
||||
|
||||
expect(result.current.error?.message).toBe('Unauthorized');
|
||||
});
|
||||
|
||||
it('should handle API error without message', async () => {
|
||||
mockedApiClient.fetchShoppingLists.mockResolvedValue({
|
||||
ok: false,
|
||||
status: 500,
|
||||
json: () => Promise.reject(new Error('Parse error')),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useShoppingListsQuery(true), { wrapper });
|
||||
|
||||
await waitFor(() => expect(result.current.isError).toBe(true));
|
||||
|
||||
expect(result.current.error?.message).toBe('Request failed with status 500');
|
||||
});
|
||||
|
||||
it('should return empty array for no shopping lists', async () => {
|
||||
mockedApiClient.fetchShoppingLists.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve([]),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useShoppingListsQuery(true), { wrapper });
|
||||
|
||||
await waitFor(() => expect(result.current.isSuccess).toBe(true));
|
||||
|
||||
expect(result.current.data).toEqual([]);
|
||||
});
|
||||
});
|
||||
87
src/hooks/queries/useSuggestedCorrectionsQuery.test.tsx
Normal file
87
src/hooks/queries/useSuggestedCorrectionsQuery.test.tsx
Normal file
@@ -0,0 +1,87 @@
|
||||
// src/hooks/queries/useSuggestedCorrectionsQuery.test.tsx
|
||||
import { renderHook, waitFor } from '@testing-library/react';
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
import { QueryClient, QueryClientProvider } from '@tanstack/react-query';
|
||||
import type { ReactNode } from 'react';
|
||||
import { useSuggestedCorrectionsQuery } from './useSuggestedCorrectionsQuery';
|
||||
import * as apiClient from '../../services/apiClient';
|
||||
|
||||
vi.mock('../../services/apiClient');
|
||||
|
||||
const mockedApiClient = vi.mocked(apiClient);
|
||||
|
||||
describe('useSuggestedCorrectionsQuery', () => {
|
||||
let queryClient: QueryClient;
|
||||
|
||||
const wrapper = ({ children }: { children: ReactNode }) => (
|
||||
<QueryClientProvider client={queryClient}>{children}</QueryClientProvider>
|
||||
);
|
||||
|
||||
beforeEach(() => {
|
||||
vi.resetAllMocks();
|
||||
queryClient = new QueryClient({
|
||||
defaultOptions: {
|
||||
queries: { retry: false },
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('should fetch suggested corrections successfully', async () => {
|
||||
const mockCorrections = [
|
||||
{ correction_id: 1, item_name: 'Milk', suggested_name: 'Whole Milk', status: 'pending' },
|
||||
{ correction_id: 2, item_name: 'Bread', suggested_name: 'White Bread', status: 'pending' },
|
||||
];
|
||||
mockedApiClient.getSuggestedCorrections.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve(mockCorrections),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useSuggestedCorrectionsQuery(), { wrapper });
|
||||
|
||||
await waitFor(() => expect(result.current.isSuccess).toBe(true));
|
||||
|
||||
expect(mockedApiClient.getSuggestedCorrections).toHaveBeenCalled();
|
||||
expect(result.current.data).toEqual(mockCorrections);
|
||||
});
|
||||
|
||||
it('should handle API error with error message', async () => {
|
||||
mockedApiClient.getSuggestedCorrections.mockResolvedValue({
|
||||
ok: false,
|
||||
status: 403,
|
||||
json: () => Promise.resolve({ message: 'Admin access required' }),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useSuggestedCorrectionsQuery(), { wrapper });
|
||||
|
||||
await waitFor(() => expect(result.current.isError).toBe(true));
|
||||
|
||||
expect(result.current.error?.message).toBe('Admin access required');
|
||||
});
|
||||
|
||||
it('should handle API error without message', async () => {
|
||||
mockedApiClient.getSuggestedCorrections.mockResolvedValue({
|
||||
ok: false,
|
||||
status: 500,
|
||||
json: () => Promise.reject(new Error('Parse error')),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useSuggestedCorrectionsQuery(), { wrapper });
|
||||
|
||||
await waitFor(() => expect(result.current.isError).toBe(true));
|
||||
|
||||
expect(result.current.error?.message).toBe('Request failed with status 500');
|
||||
});
|
||||
|
||||
it('should return empty array for no corrections', async () => {
|
||||
mockedApiClient.getSuggestedCorrections.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve([]),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useSuggestedCorrectionsQuery(), { wrapper });
|
||||
|
||||
await waitFor(() => expect(result.current.isSuccess).toBe(true));
|
||||
|
||||
expect(result.current.data).toEqual([]);
|
||||
});
|
||||
});
|
||||
98
src/hooks/queries/useWatchedItemsQuery.test.tsx
Normal file
98
src/hooks/queries/useWatchedItemsQuery.test.tsx
Normal file
@@ -0,0 +1,98 @@
|
||||
// src/hooks/queries/useWatchedItemsQuery.test.tsx
|
||||
import { renderHook, waitFor } from '@testing-library/react';
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
import { QueryClient, QueryClientProvider } from '@tanstack/react-query';
|
||||
import type { ReactNode } from 'react';
|
||||
import { useWatchedItemsQuery } from './useWatchedItemsQuery';
|
||||
import * as apiClient from '../../services/apiClient';
|
||||
|
||||
vi.mock('../../services/apiClient');
|
||||
|
||||
const mockedApiClient = vi.mocked(apiClient);
|
||||
|
||||
describe('useWatchedItemsQuery', () => {
|
||||
let queryClient: QueryClient;
|
||||
|
||||
const wrapper = ({ children }: { children: ReactNode }) => (
|
||||
<QueryClientProvider client={queryClient}>{children}</QueryClientProvider>
|
||||
);
|
||||
|
||||
beforeEach(() => {
|
||||
vi.resetAllMocks();
|
||||
queryClient = new QueryClient({
|
||||
defaultOptions: {
|
||||
queries: { retry: false },
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('should fetch watched items when enabled', async () => {
|
||||
const mockWatchedItems = [
|
||||
{ master_item_id: 1, name: 'Milk', category: 'Dairy' },
|
||||
{ master_item_id: 2, name: 'Bread', category: 'Bakery' },
|
||||
];
|
||||
mockedApiClient.fetchWatchedItems.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve(mockWatchedItems),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useWatchedItemsQuery(true), { wrapper });
|
||||
|
||||
await waitFor(() => expect(result.current.isSuccess).toBe(true));
|
||||
|
||||
expect(mockedApiClient.fetchWatchedItems).toHaveBeenCalled();
|
||||
expect(result.current.data).toEqual(mockWatchedItems);
|
||||
});
|
||||
|
||||
it('should not fetch watched items when disabled', async () => {
|
||||
const { result } = renderHook(() => useWatchedItemsQuery(false), { wrapper });
|
||||
|
||||
// Wait a bit to ensure the query doesn't run
|
||||
await new Promise((resolve) => setTimeout(resolve, 100));
|
||||
|
||||
expect(mockedApiClient.fetchWatchedItems).not.toHaveBeenCalled();
|
||||
expect(result.current.isLoading).toBe(false);
|
||||
expect(result.current.isFetching).toBe(false);
|
||||
});
|
||||
|
||||
it('should handle API error with error message', async () => {
|
||||
mockedApiClient.fetchWatchedItems.mockResolvedValue({
|
||||
ok: false,
|
||||
status: 401,
|
||||
json: () => Promise.resolve({ message: 'Unauthorized' }),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useWatchedItemsQuery(true), { wrapper });
|
||||
|
||||
await waitFor(() => expect(result.current.isError).toBe(true));
|
||||
|
||||
expect(result.current.error?.message).toBe('Unauthorized');
|
||||
});
|
||||
|
||||
it('should handle API error without message', async () => {
|
||||
mockedApiClient.fetchWatchedItems.mockResolvedValue({
|
||||
ok: false,
|
||||
status: 500,
|
||||
json: () => Promise.reject(new Error('Parse error')),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useWatchedItemsQuery(true), { wrapper });
|
||||
|
||||
await waitFor(() => expect(result.current.isError).toBe(true));
|
||||
|
||||
expect(result.current.error?.message).toBe('Request failed with status 500');
|
||||
});
|
||||
|
||||
it('should return empty array for no watched items', async () => {
|
||||
mockedApiClient.fetchWatchedItems.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve([]),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useWatchedItemsQuery(true), { wrapper });
|
||||
|
||||
await waitFor(() => expect(result.current.isSuccess).toBe(true));
|
||||
|
||||
expect(result.current.data).toEqual([]);
|
||||
});
|
||||
});
|
||||
@@ -8,6 +8,7 @@ import { z } from 'zod';
|
||||
import * as db from '../services/db/index.db';
|
||||
import type { UserProfile } from '../types';
|
||||
import { geocodingService } from '../services/geocodingService.server';
|
||||
import { cacheService } from '../services/cacheService.server';
|
||||
import { requireFileUpload } from '../middleware/fileUpload.middleware'; // This was a duplicate, fixed.
|
||||
import {
|
||||
createUploadMiddleware,
|
||||
@@ -635,6 +636,44 @@ router.post(
|
||||
},
|
||||
);
|
||||
|
||||
/**
|
||||
* POST /api/admin/system/clear-cache - Clears the application data cache.
|
||||
* Clears cached flyers, brands, and stats data from Redis.
|
||||
* Requires admin privileges.
|
||||
*/
|
||||
router.post(
|
||||
'/system/clear-cache',
|
||||
adminTriggerLimiter,
|
||||
validateRequest(emptySchema),
|
||||
async (req: Request, res: Response, next: NextFunction) => {
|
||||
const userProfile = req.user as UserProfile;
|
||||
req.log.info(
|
||||
`[Admin] Manual cache clear received from user: ${userProfile.user.user_id}`,
|
||||
);
|
||||
|
||||
try {
|
||||
const [flyersDeleted, brandsDeleted, statsDeleted] = await Promise.all([
|
||||
cacheService.invalidateFlyers(req.log),
|
||||
cacheService.invalidateBrands(req.log),
|
||||
cacheService.invalidateStats(req.log),
|
||||
]);
|
||||
|
||||
const totalDeleted = flyersDeleted + brandsDeleted + statsDeleted;
|
||||
res.status(200).json({
|
||||
message: `Successfully cleared the application cache. ${totalDeleted} keys were removed.`,
|
||||
details: {
|
||||
flyers: flyersDeleted,
|
||||
brands: brandsDeleted,
|
||||
stats: statsDeleted,
|
||||
},
|
||||
});
|
||||
} catch (error) {
|
||||
req.log.error({ error }, '[Admin] Failed to clear application cache.');
|
||||
next(error);
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
/* Catches errors from multer (e.g., file size, file filter) */
|
||||
router.use(handleMulterError);
|
||||
|
||||
|
||||
@@ -234,6 +234,9 @@ router.post(
|
||||
* POST /api/ai/upload-legacy - Process a flyer upload from a legacy client.
|
||||
* This is an authenticated route that processes the flyer synchronously.
|
||||
* This is used for integration testing the legacy upload flow.
|
||||
*
|
||||
* @deprecated Use POST /api/ai/upload-and-process instead for async queue-based processing (ADR-0006).
|
||||
* This synchronous endpoint is retained only for integration testing purposes.
|
||||
*/
|
||||
router.post(
|
||||
'/upload-legacy',
|
||||
@@ -282,9 +285,12 @@ router.get(
|
||||
);
|
||||
|
||||
/**
|
||||
* This endpoint saves the processed flyer data to the database. It is the final step
|
||||
* in the flyer upload workflow after the AI has extracted the data.
|
||||
* POST /api/ai/flyers/process - Saves the processed flyer data to the database.
|
||||
* This is the final step in the flyer upload workflow after the AI has extracted the data.
|
||||
* It uses `optionalAuth` to handle submissions from both anonymous and authenticated users.
|
||||
*
|
||||
* @deprecated Use POST /api/ai/upload-and-process instead for async queue-based processing (ADR-0006).
|
||||
* This synchronous endpoint processes flyers inline and should be migrated to the queue-based approach.
|
||||
*/
|
||||
router.post(
|
||||
'/flyers/process',
|
||||
|
||||
168
src/schemas/flyer.schemas.test.ts
Normal file
168
src/schemas/flyer.schemas.test.ts
Normal file
@@ -0,0 +1,168 @@
|
||||
// src/schemas/flyer.schemas.test.ts
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { flyerInsertSchema, flyerDbInsertSchema } from './flyer.schemas';
|
||||
|
||||
describe('flyerInsertSchema', () => {
|
||||
const validFlyer = {
|
||||
file_name: 'flyer.jpg',
|
||||
image_url: 'https://example.com/flyer.jpg',
|
||||
icon_url: 'https://example.com/icon.jpg',
|
||||
checksum: 'a'.repeat(64),
|
||||
store_name: 'Test Store',
|
||||
valid_from: '2023-01-01T00:00:00Z',
|
||||
valid_to: '2023-01-07T00:00:00Z',
|
||||
store_address: '123 Main St',
|
||||
status: 'processed',
|
||||
item_count: 10,
|
||||
uploaded_by: '123e4567-e89b-12d3-a456-426614174000',
|
||||
};
|
||||
|
||||
it('should validate a correct flyer object', () => {
|
||||
const result = flyerInsertSchema.safeParse(validFlyer);
|
||||
expect(result.success).toBe(true);
|
||||
});
|
||||
|
||||
it('should fail if file_name is missing or empty', () => {
|
||||
const invalid = { ...validFlyer, file_name: '' };
|
||||
const result = flyerInsertSchema.safeParse(invalid);
|
||||
expect(result.success).toBe(false);
|
||||
if (!result.success) {
|
||||
expect(result.error.issues[0].message).toBe('File name is required');
|
||||
}
|
||||
});
|
||||
|
||||
it('should fail if image_url is invalid', () => {
|
||||
const invalid = { ...validFlyer, image_url: 'ftp://invalid.com' };
|
||||
const result = flyerInsertSchema.safeParse(invalid);
|
||||
expect(result.success).toBe(false);
|
||||
if (!result.success) {
|
||||
expect(result.error.issues[0].message).toBe(
|
||||
'Flyer image URL must be a valid HTTP or HTTPS URL',
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
it('should fail if icon_url is invalid', () => {
|
||||
const invalid = { ...validFlyer, icon_url: 'not-a-url' };
|
||||
const result = flyerInsertSchema.safeParse(invalid);
|
||||
expect(result.success).toBe(false);
|
||||
});
|
||||
|
||||
it('should fail if checksum length is incorrect', () => {
|
||||
const invalid = { ...validFlyer, checksum: 'abc' };
|
||||
const result = flyerInsertSchema.safeParse(invalid);
|
||||
expect(result.success).toBe(false);
|
||||
if (!result.success) {
|
||||
expect(result.error.issues[0].message).toBe('Checksum must be 64 characters');
|
||||
}
|
||||
});
|
||||
|
||||
it('should fail if checksum is not hex', () => {
|
||||
const invalid = { ...validFlyer, checksum: 'z'.repeat(64) };
|
||||
const result = flyerInsertSchema.safeParse(invalid);
|
||||
expect(result.success).toBe(false);
|
||||
if (!result.success) {
|
||||
expect(result.error.issues[0].message).toBe('Checksum must be a valid hexadecimal string');
|
||||
}
|
||||
});
|
||||
|
||||
it('should allow null checksum', () => {
|
||||
const valid = { ...validFlyer, checksum: null };
|
||||
const result = flyerInsertSchema.safeParse(valid);
|
||||
expect(result.success).toBe(true);
|
||||
});
|
||||
|
||||
it('should fail if store_name is missing', () => {
|
||||
const invalid = { ...validFlyer, store_name: '' };
|
||||
const result = flyerInsertSchema.safeParse(invalid);
|
||||
expect(result.success).toBe(false);
|
||||
});
|
||||
|
||||
it('should validate valid_from and valid_to as datetimes', () => {
|
||||
const invalid = { ...validFlyer, valid_from: 'not-a-date' };
|
||||
const result = flyerInsertSchema.safeParse(invalid);
|
||||
expect(result.success).toBe(false);
|
||||
});
|
||||
|
||||
it('should allow null valid_from, valid_to, store_address', () => {
|
||||
const valid = {
|
||||
...validFlyer,
|
||||
valid_from: null,
|
||||
valid_to: null,
|
||||
store_address: null,
|
||||
};
|
||||
const result = flyerInsertSchema.safeParse(valid);
|
||||
expect(result.success).toBe(true);
|
||||
});
|
||||
|
||||
it('should validate status enum', () => {
|
||||
const invalid = { ...validFlyer, status: 'invalid_status' };
|
||||
const result = flyerInsertSchema.safeParse(invalid);
|
||||
expect(result.success).toBe(false);
|
||||
});
|
||||
|
||||
it('should fail if item_count is negative', () => {
|
||||
const invalid = { ...validFlyer, item_count: -1 };
|
||||
const result = flyerInsertSchema.safeParse(invalid);
|
||||
expect(result.success).toBe(false);
|
||||
if (!result.success) {
|
||||
expect(result.error.issues[0].message).toBe('Item count must be non-negative');
|
||||
}
|
||||
});
|
||||
|
||||
it('should validate uploaded_by as UUID if present', () => {
|
||||
const invalid = { ...validFlyer, uploaded_by: 'not-a-uuid' };
|
||||
const result = flyerInsertSchema.safeParse(invalid);
|
||||
expect(result.success).toBe(false);
|
||||
});
|
||||
|
||||
it('should allow null or undefined uploaded_by', () => {
|
||||
const validNull = { ...validFlyer, uploaded_by: null };
|
||||
expect(flyerInsertSchema.safeParse(validNull).success).toBe(true);
|
||||
|
||||
const validUndefined = { ...validFlyer, uploaded_by: undefined };
|
||||
expect(flyerInsertSchema.safeParse(validUndefined).success).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('flyerDbInsertSchema', () => {
|
||||
const validDbFlyer = {
|
||||
file_name: 'flyer.jpg',
|
||||
image_url: 'https://example.com/flyer.jpg',
|
||||
icon_url: 'https://example.com/icon.jpg',
|
||||
checksum: 'a'.repeat(64),
|
||||
store_id: 1,
|
||||
valid_from: '2023-01-01T00:00:00Z',
|
||||
valid_to: '2023-01-07T00:00:00Z',
|
||||
store_address: '123 Main St',
|
||||
status: 'processed',
|
||||
item_count: 10,
|
||||
uploaded_by: '123e4567-e89b-12d3-a456-426614174000',
|
||||
};
|
||||
|
||||
it('should validate a correct DB flyer object', () => {
|
||||
const result = flyerDbInsertSchema.safeParse(validDbFlyer);
|
||||
expect(result.success).toBe(true);
|
||||
});
|
||||
|
||||
it('should fail if store_id is missing', () => {
|
||||
const { store_id, ...invalid } = validDbFlyer;
|
||||
const result = flyerDbInsertSchema.safeParse(invalid);
|
||||
expect(result.success).toBe(false);
|
||||
});
|
||||
|
||||
it('should fail if store_id is not positive', () => {
|
||||
const invalid = { ...validDbFlyer, store_id: 0 };
|
||||
const result = flyerDbInsertSchema.safeParse(invalid);
|
||||
expect(result.success).toBe(false);
|
||||
if (!result.success) {
|
||||
expect(result.error.issues[0].message).toBe('Store ID must be a positive integer');
|
||||
}
|
||||
});
|
||||
|
||||
it('should fail if store_id is not an integer', () => {
|
||||
const invalid = { ...validDbFlyer, store_id: 1.5 };
|
||||
const result = flyerDbInsertSchema.safeParse(invalid);
|
||||
expect(result.success).toBe(false);
|
||||
});
|
||||
});
|
||||
226
src/services/cacheService.server.ts
Normal file
226
src/services/cacheService.server.ts
Normal file
@@ -0,0 +1,226 @@
|
||||
// src/services/cacheService.server.ts
|
||||
/**
|
||||
* @file Centralized caching service implementing the Cache-Aside pattern.
|
||||
* This service provides a reusable wrapper around Redis for caching read-heavy operations.
|
||||
* See ADR-009 for the caching strategy documentation.
|
||||
*/
|
||||
import type { Logger } from 'pino';
|
||||
import { connection as redis } from './redis.server';
|
||||
import { logger as globalLogger } from './logger.server';
|
||||
|
||||
/**
|
||||
* TTL values in seconds for different cache types.
|
||||
* These can be tuned based on data volatility and freshness requirements.
|
||||
*/
|
||||
export const CACHE_TTL = {
|
||||
/** Brand/store list - rarely changes, safe to cache for 1 hour */
|
||||
BRANDS: 60 * 60,
|
||||
/** Flyer list - changes when new flyers are added, cache for 5 minutes */
|
||||
FLYERS: 5 * 60,
|
||||
/** Individual flyer data - cache for 10 minutes */
|
||||
FLYER: 10 * 60,
|
||||
/** Flyer items - cache for 10 minutes */
|
||||
FLYER_ITEMS: 10 * 60,
|
||||
/** Statistics - can be slightly stale, cache for 5 minutes */
|
||||
STATS: 5 * 60,
|
||||
/** Most frequent sales - aggregated data, cache for 15 minutes */
|
||||
FREQUENT_SALES: 15 * 60,
|
||||
/** Categories - rarely changes, cache for 1 hour */
|
||||
CATEGORIES: 60 * 60,
|
||||
} as const;
|
||||
|
||||
/**
|
||||
* Cache key prefixes for different data types.
|
||||
* Using consistent prefixes allows for pattern-based invalidation.
|
||||
*/
|
||||
export const CACHE_PREFIX = {
|
||||
BRANDS: 'cache:brands',
|
||||
FLYERS: 'cache:flyers',
|
||||
FLYER: 'cache:flyer',
|
||||
FLYER_ITEMS: 'cache:flyer-items',
|
||||
STATS: 'cache:stats',
|
||||
FREQUENT_SALES: 'cache:frequent-sales',
|
||||
CATEGORIES: 'cache:categories',
|
||||
} as const;
|
||||
|
||||
export interface CacheOptions {
|
||||
/** Time-to-live in seconds */
|
||||
ttl: number;
|
||||
/** Optional logger for this operation */
|
||||
logger?: Logger;
|
||||
}
|
||||
|
||||
/**
|
||||
* Centralized cache service implementing the Cache-Aside pattern.
|
||||
* All cache operations are fail-safe - cache failures do not break the application.
|
||||
*/
|
||||
class CacheService {
|
||||
/**
|
||||
* Retrieves a value from cache.
|
||||
* @param key The cache key
|
||||
* @param logger Optional logger for this operation
|
||||
* @returns The cached value or null if not found/error
|
||||
*/
|
||||
async get<T>(key: string, logger: Logger = globalLogger): Promise<T | null> {
|
||||
try {
|
||||
const cached = await redis.get(key);
|
||||
if (cached) {
|
||||
logger.debug({ cacheKey: key }, 'Cache hit');
|
||||
return JSON.parse(cached) as T;
|
||||
}
|
||||
logger.debug({ cacheKey: key }, 'Cache miss');
|
||||
return null;
|
||||
} catch (error) {
|
||||
logger.warn({ err: error, cacheKey: key }, 'Redis GET failed, proceeding without cache');
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Stores a value in cache with TTL.
|
||||
* @param key The cache key
|
||||
* @param value The value to cache (will be JSON stringified)
|
||||
* @param ttl Time-to-live in seconds
|
||||
* @param logger Optional logger for this operation
|
||||
*/
|
||||
async set<T>(key: string, value: T, ttl: number, logger: Logger = globalLogger): Promise<void> {
|
||||
try {
|
||||
await redis.set(key, JSON.stringify(value), 'EX', ttl);
|
||||
logger.debug({ cacheKey: key, ttl }, 'Value cached');
|
||||
} catch (error) {
|
||||
logger.warn({ err: error, cacheKey: key }, 'Redis SET failed, value not cached');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Deletes a specific key from cache.
|
||||
* @param key The cache key to delete
|
||||
* @param logger Optional logger for this operation
|
||||
*/
|
||||
async del(key: string, logger: Logger = globalLogger): Promise<void> {
|
||||
try {
|
||||
await redis.del(key);
|
||||
logger.debug({ cacheKey: key }, 'Cache key deleted');
|
||||
} catch (error) {
|
||||
logger.warn({ err: error, cacheKey: key }, 'Redis DEL failed');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Invalidates all cache keys matching a pattern.
|
||||
* Uses SCAN for safe iteration over large key sets.
|
||||
* @param pattern The pattern to match (e.g., 'cache:flyers*')
|
||||
* @param logger Optional logger for this operation
|
||||
* @returns The number of keys deleted
|
||||
*/
|
||||
async invalidatePattern(pattern: string, logger: Logger = globalLogger): Promise<number> {
|
||||
let cursor = '0';
|
||||
let totalDeleted = 0;
|
||||
|
||||
try {
|
||||
do {
|
||||
const [nextCursor, keys] = await redis.scan(cursor, 'MATCH', pattern, 'COUNT', 100);
|
||||
cursor = nextCursor;
|
||||
if (keys.length > 0) {
|
||||
const deletedCount = await redis.del(...keys);
|
||||
totalDeleted += deletedCount;
|
||||
}
|
||||
} while (cursor !== '0');
|
||||
|
||||
logger.info({ pattern, totalDeleted }, 'Cache invalidation completed');
|
||||
return totalDeleted;
|
||||
} catch (error) {
|
||||
logger.error({ err: error, pattern }, 'Cache invalidation failed');
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Implements the Cache-Aside pattern: try cache first, fall back to fetcher, cache result.
|
||||
* This is the primary method for adding caching to existing repository methods.
|
||||
*
|
||||
* @param key The cache key
|
||||
* @param fetcher Function that retrieves data from the source (e.g., database)
|
||||
* @param options Cache options including TTL
|
||||
* @returns The data (from cache or fetcher)
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* const brands = await cacheService.getOrSet(
|
||||
* CACHE_PREFIX.BRANDS,
|
||||
* () => this.db.query('SELECT * FROM stores'),
|
||||
* { ttl: CACHE_TTL.BRANDS, logger }
|
||||
* );
|
||||
* ```
|
||||
*/
|
||||
async getOrSet<T>(
|
||||
key: string,
|
||||
fetcher: () => Promise<T>,
|
||||
options: CacheOptions,
|
||||
): Promise<T> {
|
||||
const logger = options.logger ?? globalLogger;
|
||||
|
||||
// Try to get from cache first
|
||||
const cached = await this.get<T>(key, logger);
|
||||
if (cached !== null) {
|
||||
return cached;
|
||||
}
|
||||
|
||||
// Cache miss - fetch from source
|
||||
const data = await fetcher();
|
||||
|
||||
// Cache the result (fire-and-forget, don't await)
|
||||
this.set(key, data, options.ttl, logger).catch(() => {
|
||||
// Error already logged in set()
|
||||
});
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
// --- Convenience methods for specific cache types ---
|
||||
|
||||
/**
|
||||
* Invalidates all brand-related cache entries.
|
||||
*/
|
||||
async invalidateBrands(logger: Logger = globalLogger): Promise<number> {
|
||||
return this.invalidatePattern(`${CACHE_PREFIX.BRANDS}*`, logger);
|
||||
}
|
||||
|
||||
/**
|
||||
* Invalidates all flyer-related cache entries.
|
||||
*/
|
||||
async invalidateFlyers(logger: Logger = globalLogger): Promise<number> {
|
||||
const patterns = [
|
||||
`${CACHE_PREFIX.FLYERS}*`,
|
||||
`${CACHE_PREFIX.FLYER}*`,
|
||||
`${CACHE_PREFIX.FLYER_ITEMS}*`,
|
||||
];
|
||||
|
||||
let total = 0;
|
||||
for (const pattern of patterns) {
|
||||
total += await this.invalidatePattern(pattern, logger);
|
||||
}
|
||||
return total;
|
||||
}
|
||||
|
||||
/**
|
||||
* Invalidates cache for a specific flyer and its items.
|
||||
*/
|
||||
async invalidateFlyer(flyerId: number, logger: Logger = globalLogger): Promise<void> {
|
||||
await Promise.all([
|
||||
this.del(`${CACHE_PREFIX.FLYER}:${flyerId}`, logger),
|
||||
this.del(`${CACHE_PREFIX.FLYER_ITEMS}:${flyerId}`, logger),
|
||||
// Also invalidate the flyers list since it may contain this flyer
|
||||
this.invalidatePattern(`${CACHE_PREFIX.FLYERS}*`, logger),
|
||||
]);
|
||||
}
|
||||
|
||||
/**
|
||||
* Invalidates all statistics cache entries.
|
||||
*/
|
||||
async invalidateStats(logger: Logger = globalLogger): Promise<number> {
|
||||
return this.invalidatePattern(`${CACHE_PREFIX.STATS}*`, logger);
|
||||
}
|
||||
}
|
||||
|
||||
export const cacheService = new CacheService();
|
||||
@@ -18,6 +18,7 @@ describe('Address DB Service', () => {
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
mockDb.query.mockReset();
|
||||
addressRepo = new AddressRepository(mockDb);
|
||||
});
|
||||
|
||||
|
||||
@@ -40,6 +40,7 @@ describe('Admin DB Service', () => {
|
||||
beforeEach(() => {
|
||||
// Reset the global mock's call history before each test.
|
||||
vi.clearAllMocks();
|
||||
mockDb.query.mockReset();
|
||||
|
||||
// Reset the withTransaction mock before each test
|
||||
vi.mocked(withTransaction).mockImplementation(async (callback) => {
|
||||
|
||||
@@ -47,6 +47,7 @@ describe('Budget DB Service', () => {
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
mockDb.query.mockReset();
|
||||
// Instantiate the repository with the minimal mock db for each test
|
||||
budgetRepo = new BudgetRepository(mockDb);
|
||||
});
|
||||
|
||||
@@ -28,6 +28,7 @@ import { logger as mockLogger } from '../logger.server';
|
||||
describe('Conversion DB Service', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
mockPoolInstance.query.mockReset();
|
||||
// Make getPool return our mock instance for each test
|
||||
vi.mocked(getPool).mockReturnValue(mockPoolInstance as any);
|
||||
});
|
||||
|
||||
@@ -46,6 +46,7 @@ describe('Flyer DB Service', () => {
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
mockPoolInstance.query.mockReset();
|
||||
//In a transaction, `pool.connect()` returns a client. That client has a `release` method.
|
||||
// For these tests, we simulate this by having `connect` resolve to the pool instance itself,
|
||||
// and we ensure the `release` method is mocked on that instance.
|
||||
@@ -244,8 +245,9 @@ describe('Flyer DB Service', () => {
|
||||
await expect(flyerRepo.insertFlyer(flyerData, mockLogger)).rejects.toThrow(
|
||||
CheckConstraintError,
|
||||
);
|
||||
// The implementation now generates a more detailed error message.
|
||||
await expect(flyerRepo.insertFlyer(flyerData, mockLogger)).rejects.toThrow(
|
||||
'Invalid URL format provided for image or icon.',
|
||||
"[URL_CHECK_FAIL] Invalid URL format. Image: 'https://example.com/not-a-url', Icon: 'null'",
|
||||
);
|
||||
});
|
||||
});
|
||||
@@ -585,18 +587,6 @@ describe('Flyer DB Service', () => {
|
||||
});
|
||||
|
||||
describe('getFlyers', () => {
|
||||
const expectedQuery = `
|
||||
SELECT
|
||||
f.*,
|
||||
json_build_object(
|
||||
'store_id', s.store_id,
|
||||
'name', s.name,
|
||||
'logo_url', s.logo_url
|
||||
) as store
|
||||
FROM public.flyers f
|
||||
JOIN public.stores s ON f.store_id = s.store_id
|
||||
ORDER BY f.created_at DESC LIMIT $1 OFFSET $2`;
|
||||
|
||||
it('should use default limit and offset when none are provided', async () => {
|
||||
console.log('[TEST DEBUG] Running test: getFlyers > should use default limit and offset');
|
||||
const mockFlyers: Flyer[] = [createMockFlyer({ flyer_id: 1 })];
|
||||
@@ -610,7 +600,7 @@ describe('Flyer DB Service', () => {
|
||||
);
|
||||
|
||||
expect(mockPoolInstance.query).toHaveBeenCalledWith(
|
||||
expectedQuery,
|
||||
expect.stringContaining('FROM public.flyers f'),
|
||||
[20, 0], // Default values
|
||||
);
|
||||
});
|
||||
@@ -628,7 +618,7 @@ describe('Flyer DB Service', () => {
|
||||
);
|
||||
|
||||
expect(mockPoolInstance.query).toHaveBeenCalledWith(
|
||||
expectedQuery,
|
||||
expect.stringContaining('FROM public.flyers f'),
|
||||
[10, 5], // Provided values
|
||||
);
|
||||
});
|
||||
|
||||
@@ -3,6 +3,7 @@ import type { Pool, PoolClient } from 'pg';
|
||||
import { getPool, withTransaction } from './connection.db';
|
||||
import type { Logger } from 'pino';
|
||||
import { UniqueConstraintError, NotFoundError, handleDbError } from './errors.db';
|
||||
import { cacheService, CACHE_TTL, CACHE_PREFIX } from '../cacheService.server';
|
||||
import type {
|
||||
Flyer,
|
||||
FlyerItem,
|
||||
@@ -229,22 +230,31 @@ export class FlyerRepository {
|
||||
|
||||
/**
|
||||
* Retrieves all distinct brands from the stores table.
|
||||
* Uses cache-aside pattern with 1-hour TTL (brands rarely change).
|
||||
* @returns A promise that resolves to an array of Brand objects.
|
||||
*/
|
||||
async getAllBrands(logger: Logger): Promise<Brand[]> {
|
||||
try {
|
||||
const query = `
|
||||
SELECT s.store_id as brand_id, s.name, s.logo_url, s.created_at, s.updated_at
|
||||
FROM public.stores s
|
||||
ORDER BY s.name;
|
||||
`;
|
||||
const res = await this.db.query<Brand>(query);
|
||||
return res.rows;
|
||||
} catch (error) {
|
||||
handleDbError(error, logger, 'Database error in getAllBrands', {}, {
|
||||
defaultMessage: 'Failed to retrieve brands from database.',
|
||||
});
|
||||
}
|
||||
const cacheKey = CACHE_PREFIX.BRANDS;
|
||||
|
||||
return cacheService.getOrSet<Brand[]>(
|
||||
cacheKey,
|
||||
async () => {
|
||||
try {
|
||||
const query = `
|
||||
SELECT s.store_id as brand_id, s.name, s.logo_url, s.created_at, s.updated_at
|
||||
FROM public.stores s
|
||||
ORDER BY s.name;
|
||||
`;
|
||||
const res = await this.db.query<Brand>(query);
|
||||
return res.rows;
|
||||
} catch (error) {
|
||||
handleDbError(error, logger, 'Database error in getAllBrands', {}, {
|
||||
defaultMessage: 'Failed to retrieve brands from database.',
|
||||
});
|
||||
}
|
||||
},
|
||||
{ ttl: CACHE_TTL.BRANDS, logger },
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -262,49 +272,67 @@ export class FlyerRepository {
|
||||
|
||||
/**
|
||||
* Retrieves all flyers from the database, ordered by creation date.
|
||||
* Uses cache-aside pattern with 5-minute TTL.
|
||||
* @param limit The maximum number of flyers to return.
|
||||
* @param offset The number of flyers to skip.
|
||||
* @returns A promise that resolves to an array of Flyer objects.
|
||||
*/
|
||||
async getFlyers(logger: Logger, limit: number = 20, offset: number = 0): Promise<Flyer[]> {
|
||||
try {
|
||||
const query = `
|
||||
SELECT
|
||||
f.*,
|
||||
json_build_object(
|
||||
'store_id', s.store_id,
|
||||
'name', s.name,
|
||||
'logo_url', s.logo_url
|
||||
) as store
|
||||
FROM public.flyers f
|
||||
JOIN public.stores s ON f.store_id = s.store_id
|
||||
ORDER BY f.created_at DESC LIMIT $1 OFFSET $2`;
|
||||
const res = await this.db.query<Flyer>(query, [limit, offset]);
|
||||
return res.rows;
|
||||
} catch (error) {
|
||||
handleDbError(error, logger, 'Database error in getFlyers', { limit, offset }, {
|
||||
defaultMessage: 'Failed to retrieve flyers from database.',
|
||||
});
|
||||
}
|
||||
const cacheKey = `${CACHE_PREFIX.FLYERS}:${limit}:${offset}`;
|
||||
|
||||
return cacheService.getOrSet<Flyer[]>(
|
||||
cacheKey,
|
||||
async () => {
|
||||
try {
|
||||
const query = `
|
||||
SELECT
|
||||
f.*,
|
||||
json_build_object(
|
||||
'store_id', s.store_id,
|
||||
'name', s.name,
|
||||
'logo_url', s.logo_url
|
||||
) as store
|
||||
FROM public.flyers f
|
||||
JOIN public.stores s ON f.store_id = s.store_id
|
||||
ORDER BY f.created_at DESC LIMIT $1 OFFSET $2`;
|
||||
const res = await this.db.query<Flyer>(query, [limit, offset]);
|
||||
return res.rows;
|
||||
} catch (error) {
|
||||
handleDbError(error, logger, 'Database error in getFlyers', { limit, offset }, {
|
||||
defaultMessage: 'Failed to retrieve flyers from database.',
|
||||
});
|
||||
}
|
||||
},
|
||||
{ ttl: CACHE_TTL.FLYERS, logger },
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves all items for a specific flyer.
|
||||
* Uses cache-aside pattern with 10-minute TTL.
|
||||
* @param flyerId The ID of the flyer.
|
||||
* @returns A promise that resolves to an array of FlyerItem objects.
|
||||
*/
|
||||
async getFlyerItems(flyerId: number, logger: Logger): Promise<FlyerItem[]> {
|
||||
try {
|
||||
const res = await this.db.query<FlyerItem>(
|
||||
'SELECT * FROM public.flyer_items WHERE flyer_id = $1 ORDER BY flyer_item_id ASC',
|
||||
[flyerId],
|
||||
);
|
||||
return res.rows;
|
||||
} catch (error) {
|
||||
handleDbError(error, logger, 'Database error in getFlyerItems', { flyerId }, {
|
||||
defaultMessage: 'Failed to retrieve flyer items from database.',
|
||||
});
|
||||
}
|
||||
const cacheKey = `${CACHE_PREFIX.FLYER_ITEMS}:${flyerId}`;
|
||||
|
||||
return cacheService.getOrSet<FlyerItem[]>(
|
||||
cacheKey,
|
||||
async () => {
|
||||
try {
|
||||
const res = await this.db.query<FlyerItem>(
|
||||
'SELECT * FROM public.flyer_items WHERE flyer_id = $1 ORDER BY flyer_item_id ASC',
|
||||
[flyerId],
|
||||
);
|
||||
return res.rows;
|
||||
} catch (error) {
|
||||
handleDbError(error, logger, 'Database error in getFlyerItems', { flyerId }, {
|
||||
defaultMessage: 'Failed to retrieve flyer items from database.',
|
||||
});
|
||||
}
|
||||
},
|
||||
{ ttl: CACHE_TTL.FLYER_ITEMS, logger },
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -399,6 +427,7 @@ export class FlyerRepository {
|
||||
/**
|
||||
* Deletes a flyer and all its associated items in a transaction.
|
||||
* This should typically be an admin-only action.
|
||||
* Invalidates related cache entries after successful deletion.
|
||||
* @param flyerId The ID of the flyer to delete.
|
||||
*/
|
||||
async deleteFlyer(flyerId: number, logger: Logger): Promise<void> {
|
||||
@@ -413,6 +442,9 @@ export class FlyerRepository {
|
||||
}
|
||||
logger.info(`Successfully deleted flyer with ID: ${flyerId}`);
|
||||
});
|
||||
|
||||
// Invalidate cache after successful deletion
|
||||
await cacheService.invalidateFlyer(flyerId, logger);
|
||||
} catch (error) {
|
||||
handleDbError(error, logger, 'Database transaction error in deleteFlyer', { flyerId }, {
|
||||
defaultMessage: 'Failed to delete flyer.',
|
||||
|
||||
@@ -29,6 +29,7 @@ describe('Gamification DB Service', () => {
|
||||
beforeEach(() => {
|
||||
// Reset the global mock's call history before each test.
|
||||
vi.clearAllMocks();
|
||||
mockDb.query.mockReset();
|
||||
|
||||
// Instantiate the repository with the mock pool for each test
|
||||
gamificationRepo = new GamificationRepository(mockDb);
|
||||
|
||||
@@ -30,6 +30,7 @@ describe('Notification DB Service', () => {
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
mockPoolInstance.query.mockReset();
|
||||
// Instantiate the repository with the mock pool for each test
|
||||
|
||||
notificationRepo = new NotificationRepository(mockPoolInstance as unknown as Pool);
|
||||
|
||||
@@ -35,6 +35,7 @@ describe('Personalization DB Service', () => {
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
mockQuery.mockReset();
|
||||
// Reset the withTransaction mock before each test
|
||||
vi.mocked(withTransaction).mockImplementation(async (callback) => {
|
||||
const mockClient = { query: vi.fn() };
|
||||
|
||||
@@ -27,6 +27,7 @@ import { logger as mockLogger } from '../logger.server';
|
||||
describe('Price DB Service', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
mockPoolInstance.query.mockReset();
|
||||
// Make getPool return our mock instance for each test
|
||||
vi.mocked(getPool).mockReturnValue(mockPoolInstance as any);
|
||||
});
|
||||
|
||||
@@ -34,6 +34,7 @@ describe('Reaction DB Service', () => {
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
mockDb.query.mockReset();
|
||||
reactionRepo = new ReactionRepository(mockDb);
|
||||
});
|
||||
|
||||
|
||||
@@ -28,6 +28,7 @@ describe('Recipe DB Service', () => {
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
mockQuery.mockReset();
|
||||
// Instantiate the repository with the mock pool for each test
|
||||
recipeRepo = new RecipeRepository(mockPoolInstance as unknown as Pool);
|
||||
});
|
||||
|
||||
@@ -36,6 +36,7 @@ describe('Shopping DB Service', () => {
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
mockPoolInstance.query.mockReset();
|
||||
// Instantiate the repository with the mock pool for each test
|
||||
shoppingRepo = new ShoppingRepository(mockPoolInstance as unknown as Pool);
|
||||
});
|
||||
|
||||
@@ -62,6 +62,7 @@ describe('User DB Service', () => {
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
mockPoolInstance.query.mockReset();
|
||||
userRepo = new UserRepository(mockPoolInstance as unknown as PoolClient);
|
||||
// Provide a default mock implementation for withTransaction for all tests.
|
||||
vi.mocked(withTransaction).mockImplementation(
|
||||
|
||||
@@ -4,12 +4,13 @@ import { withTransaction } from './db/connection.db';
|
||||
import { createFlyerAndItems } from './db/flyer.db';
|
||||
import { AdminRepository } from './db/admin.db';
|
||||
import { GamificationRepository } from './db/gamification.db';
|
||||
import { cacheService } from './cacheService.server';
|
||||
import type { FlyerInsert, FlyerItemInsert, Flyer } from '../types';
|
||||
|
||||
export class FlyerPersistenceService {
|
||||
/**
|
||||
* Saves the flyer and its items to the database within a transaction.
|
||||
* Also logs the activity.
|
||||
* Also logs the activity and invalidates related cache entries.
|
||||
*/
|
||||
async saveFlyer(
|
||||
flyerData: FlyerInsert,
|
||||
@@ -17,7 +18,7 @@ export class FlyerPersistenceService {
|
||||
userId: string | undefined,
|
||||
logger: Logger,
|
||||
): Promise<Flyer> {
|
||||
return withTransaction(async (client) => {
|
||||
const flyer = await withTransaction(async (client) => {
|
||||
const { flyer, items } = await createFlyerAndItems(flyerData, itemsForDb, logger, client);
|
||||
|
||||
logger.info(
|
||||
@@ -43,5 +44,12 @@ export class FlyerPersistenceService {
|
||||
}
|
||||
return flyer;
|
||||
});
|
||||
|
||||
// Invalidate flyer list cache after successful creation (fire-and-forget)
|
||||
cacheService.invalidateFlyers(logger).catch(() => {
|
||||
// Error already logged in invalidateFlyers
|
||||
});
|
||||
|
||||
return flyer;
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,9 @@
|
||||
// src/services/logger.server.test.ts
|
||||
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
|
||||
|
||||
// Unmock the module we are testing to override the global mock from setupFiles.
|
||||
vi.unmock('./logger.server');
|
||||
|
||||
// Mock pino before importing the logger
|
||||
const pinoMock = vi.fn(() => ({
|
||||
info: vi.fn(),
|
||||
@@ -25,14 +28,25 @@ describe('Server Logger', () => {
|
||||
it('should initialize pino with the correct level for production', async () => {
|
||||
vi.stubEnv('NODE_ENV', 'production');
|
||||
await import('./logger.server');
|
||||
expect(pinoMock).toHaveBeenCalledWith(expect.objectContaining({ level: 'info' }));
|
||||
expect(pinoMock).toHaveBeenCalledWith(
|
||||
expect.objectContaining({ level: 'info', transport: undefined }),
|
||||
);
|
||||
});
|
||||
|
||||
it('should initialize pino with pretty-print transport for development', async () => {
|
||||
vi.stubEnv('NODE_ENV', 'development');
|
||||
await import('./logger.server');
|
||||
expect(pinoMock).toHaveBeenCalledWith(
|
||||
expect.objectContaining({ transport: expect.any(Object) }),
|
||||
expect.objectContaining({ level: 'debug', transport: expect.any(Object) }),
|
||||
);
|
||||
});
|
||||
|
||||
it('should initialize pino with debug level and no transport for test', async () => {
|
||||
// This is the default for vitest, but we stub it for clarity.
|
||||
vi.stubEnv('NODE_ENV', 'test');
|
||||
await import('./logger.server');
|
||||
expect(pinoMock).toHaveBeenCalledWith(
|
||||
expect.objectContaining({ level: 'debug', transport: undefined }),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -59,17 +59,40 @@ vi.mock('../../services/storage/storageService', () => {
|
||||
};
|
||||
});
|
||||
|
||||
// FIX: Import the singleton instance directly to spy on it
|
||||
import { aiService } from '../../services/aiService.server';
|
||||
|
||||
|
||||
/**
|
||||
* @vitest-environment node
|
||||
*/
|
||||
|
||||
const { mockExtractCoreData } = vi.hoisted(() => ({
|
||||
mockExtractCoreData: vi.fn(),
|
||||
}));
|
||||
// CRITICAL: This mock function must be declared with vi.hoisted() to ensure it's available
|
||||
// at the module level BEFORE any imports are resolved.
|
||||
const { mockExtractCoreData } = vi.hoisted(() => {
|
||||
return {
|
||||
mockExtractCoreData: vi.fn(),
|
||||
};
|
||||
});
|
||||
|
||||
// CRITICAL: Mock the aiService module BEFORE any other imports that depend on it.
|
||||
// This ensures workers get the mocked version, not the real one.
|
||||
// We use a partial mock that only overrides extractCoreDataFromFlyerImage.
|
||||
vi.mock('../../services/aiService.server', async (importOriginal) => {
|
||||
const actual = await importOriginal<typeof import('../../services/aiService.server')>();
|
||||
|
||||
// Create a proxy around the actual aiService that intercepts extractCoreDataFromFlyerImage
|
||||
const proxiedAiService = new Proxy(actual.aiService, {
|
||||
get(target, prop) {
|
||||
if (prop === 'extractCoreDataFromFlyerImage') {
|
||||
return mockExtractCoreData;
|
||||
}
|
||||
// For all other properties/methods, return the original
|
||||
return target[prop as keyof typeof target];
|
||||
},
|
||||
});
|
||||
|
||||
return {
|
||||
...actual,
|
||||
aiService: proxiedAiService,
|
||||
};
|
||||
});
|
||||
|
||||
// Mock the connection DB service to intercept withTransaction.
|
||||
// This is crucial because FlyerPersistenceService imports directly from connection.db,
|
||||
@@ -99,9 +122,8 @@ describe('Flyer Processing Background Job Integration Test', () => {
|
||||
process.env.FRONTEND_URL = 'https://example.com';
|
||||
console.error('[TEST SETUP] FRONTEND_URL stubbed to:', process.env.FRONTEND_URL);
|
||||
|
||||
// FIX: Spy on the actual singleton instance. This ensures that when the worker
|
||||
// imports 'aiService', it gets the instance we are controlling here.
|
||||
vi.spyOn(aiService, 'extractCoreDataFromFlyerImage').mockImplementation(mockExtractCoreData);
|
||||
// NOTE: The aiService mock is now set up via vi.mock() at the module level (above).
|
||||
// This ensures workers get the mocked version when they import aiService.
|
||||
|
||||
// NEW: Import workers to start them IN-PROCESS.
|
||||
// This ensures they run in the same memory space as our mocks.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
// src/tests/integration/gamification.integration.test.ts
|
||||
import { describe, it, expect, beforeAll, afterAll, vi } from 'vitest';
|
||||
import { describe, it, expect, beforeAll, afterAll, vi, beforeEach } from 'vitest';
|
||||
import supertest from 'supertest';
|
||||
import path from 'path';
|
||||
import fs from 'node:fs/promises';
|
||||
@@ -70,8 +70,13 @@ describe('Gamification Flow Integration Test', () => {
|
||||
fullName: 'Gamification Tester',
|
||||
request,
|
||||
}));
|
||||
});
|
||||
|
||||
// Setup default mock response for the AI service's extractCoreDataFromFlyerImage method.
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
|
||||
// Reset AI Service Mock to default success state
|
||||
mockExtractCoreData.mockReset();
|
||||
mockExtractCoreData.mockResolvedValue({
|
||||
store_name: 'Gamification Test Store',
|
||||
valid_from: null,
|
||||
@@ -87,6 +92,9 @@ describe('Gamification Flow Integration Test', () => {
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
// Reset Image Processor Mock
|
||||
vi.mocked(imageProcessor.generateFlyerIcon).mockResolvedValue('mock-icon.webp');
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
// src/tests/integration/recipe.integration.test.ts
|
||||
import { describe, it, expect, beforeAll, afterAll, vi } from 'vitest';
|
||||
import { describe, it, expect, beforeAll, afterAll, vi, afterEach } from 'vitest';
|
||||
import supertest from 'supertest';
|
||||
import { createAndLoginUser } from '../utils/testHelpers';
|
||||
import { cleanupDb } from '../utils/cleanup';
|
||||
@@ -49,6 +49,12 @@ describe('Recipe API Routes Integration Tests', () => {
|
||||
createdRecipeIds.push(testRecipe.recipe_id);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.clearAllMocks();
|
||||
// Reset the mock to its default state for the next test
|
||||
vi.mocked(aiService.generateRecipeSuggestion).mockResolvedValue('Default Mock Suggestion');
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
vi.unstubAllEnvs();
|
||||
// Clean up all created resources
|
||||
|
||||
@@ -9,13 +9,56 @@ let server: Server;
|
||||
// This will hold the single database pool instance for the entire test run.
|
||||
let globalPool: ReturnType<typeof getPool> | null = null;
|
||||
|
||||
/**
|
||||
* Cleans all BullMQ queues to ensure no stale jobs from previous test runs.
|
||||
* This is critical because old jobs with outdated error messages can pollute test results.
|
||||
*/
|
||||
async function cleanAllQueues() {
|
||||
// Use console.error for visibility in CI logs (stderr is often more reliable)
|
||||
console.error(`[PID:${process.pid}] [QUEUE CLEANUP] Starting BullMQ queue cleanup...`);
|
||||
|
||||
try {
|
||||
const { flyerQueue, cleanupQueue, emailQueue, analyticsQueue, weeklyAnalyticsQueue, tokenCleanupQueue } = await import('../../services/queues.server');
|
||||
console.error(`[QUEUE CLEANUP] Successfully imported queue modules`);
|
||||
|
||||
const queues = [flyerQueue, cleanupQueue, emailQueue, analyticsQueue, weeklyAnalyticsQueue, tokenCleanupQueue];
|
||||
|
||||
for (const queue of queues) {
|
||||
try {
|
||||
// Log queue state before cleanup
|
||||
const jobCounts = await queue.getJobCounts();
|
||||
console.error(`[QUEUE CLEANUP] Queue "${queue.name}" before cleanup: ${JSON.stringify(jobCounts)}`);
|
||||
|
||||
// obliterate() removes ALL data associated with the queue from Redis
|
||||
await queue.obliterate({ force: true });
|
||||
console.error(` ✅ [QUEUE CLEANUP] Cleaned queue: ${queue.name}`);
|
||||
} catch (error) {
|
||||
// Log but don't fail - the queue might not exist yet
|
||||
console.error(` ⚠️ [QUEUE CLEANUP] Could not clean queue ${queue.name}: ${error instanceof Error ? error.message : 'Unknown error'}`);
|
||||
}
|
||||
}
|
||||
console.error(`✅ [PID:${process.pid}] [QUEUE CLEANUP] All queues cleaned successfully.`);
|
||||
} catch (error) {
|
||||
console.error(`❌ [PID:${process.pid}] [QUEUE CLEANUP] CRITICAL ERROR during queue cleanup:`, error);
|
||||
// Don't throw - we want the tests to continue even if cleanup fails
|
||||
}
|
||||
}
|
||||
|
||||
export async function setup() {
|
||||
// Ensure we are in the correct environment for these tests.
|
||||
process.env.NODE_ENV = 'test';
|
||||
// Fix: Set the FRONTEND_URL globally for the test server instance
|
||||
process.env.FRONTEND_URL = 'https://example.com';
|
||||
|
||||
console.log(`\n--- [PID:${process.pid}] Running Integration Test GLOBAL Setup ---`);
|
||||
console.error(`\n--- [PID:${process.pid}] Running Integration Test GLOBAL Setup ---`);
|
||||
console.error(`[SETUP] REDIS_URL: ${process.env.REDIS_URL}`);
|
||||
console.error(`[SETUP] REDIS_PASSWORD is set: ${!!process.env.REDIS_PASSWORD}`);
|
||||
|
||||
// CRITICAL: Clean all queues BEFORE running any tests to remove stale jobs
|
||||
// from previous test runs that may have outdated error messages.
|
||||
console.error(`[SETUP] About to call cleanAllQueues()...`);
|
||||
await cleanAllQueues();
|
||||
console.error(`[SETUP] cleanAllQueues() completed.`);
|
||||
|
||||
// The integration setup is now the single source of truth for preparing the test DB.
|
||||
// It runs the same seed script that `npm run db:reset:test` used.
|
||||
|
||||
@@ -1,14 +1,23 @@
|
||||
// src/utils/imageProcessor.test.ts
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
import type { Logger } from 'pino';
|
||||
import path from 'path';
|
||||
|
||||
// --- Hoisted Mocks ---
|
||||
const mocks = vi.hoisted(() => {
|
||||
// Create a chainable mock for the sharp library
|
||||
const toFile = vi.fn().mockResolvedValue({ info: 'mocked' });
|
||||
|
||||
// Chain for generateFlyerIcon
|
||||
const webp = vi.fn(() => ({ toFile }));
|
||||
const resize = vi.fn(() => ({ webp }));
|
||||
const sharpInstance = { resize };
|
||||
|
||||
// Chain for processAndSaveImage
|
||||
const png = vi.fn(() => ({ toFile }));
|
||||
const jpeg = vi.fn(() => ({ png }));
|
||||
const withMetadata = vi.fn(() => ({ jpeg }));
|
||||
|
||||
const sharpInstance = { resize, withMetadata };
|
||||
|
||||
// Mock the sharp function and attach static properties required by the implementation
|
||||
const sharp = vi.fn(() => sharpInstance);
|
||||
@@ -18,6 +27,9 @@ const mocks = vi.hoisted(() => {
|
||||
sharp: sharp,
|
||||
resize,
|
||||
webp,
|
||||
withMetadata,
|
||||
jpeg,
|
||||
png,
|
||||
toFile,
|
||||
mkdir: vi.fn().mockResolvedValue(undefined),
|
||||
};
|
||||
@@ -54,7 +66,7 @@ const logger = createMockLogger();
|
||||
vi.mock('../services/logger.server', () => ({ logger }));
|
||||
|
||||
// --- Import the function to be tested ---
|
||||
import { generateFlyerIcon } from './imageProcessor';
|
||||
import { generateFlyerIcon, processAndSaveImage } from './imageProcessor';
|
||||
|
||||
describe('generateFlyerIcon', () => {
|
||||
beforeEach(() => {
|
||||
@@ -95,3 +107,48 @@ describe('generateFlyerIcon', () => {
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('processAndSaveImage', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
// Ensure toFile is in a resolved state
|
||||
mocks.toFile.mockResolvedValue({ info: 'mocked' });
|
||||
});
|
||||
|
||||
it('should process the image, strip metadata, and return the new filename', async () => {
|
||||
const sourcePath = '/tmp/upload/original.jpg';
|
||||
const destinationDir = '/var/www/images';
|
||||
const originalFileName = 'original.jpg';
|
||||
|
||||
const result = await processAndSaveImage(sourcePath, destinationDir, originalFileName, logger);
|
||||
|
||||
// Check that the destination directory was created
|
||||
expect(mocks.mkdir).toHaveBeenCalledWith(destinationDir, { recursive: true });
|
||||
|
||||
// Check that sharp was called with the correct source
|
||||
expect(mocks.sharp).toHaveBeenCalledWith(sourcePath, { failOn: 'none' });
|
||||
|
||||
// Check the processing chain
|
||||
expect(mocks.withMetadata).toHaveBeenCalledWith({});
|
||||
expect(mocks.jpeg).toHaveBeenCalledWith({ quality: 85, mozjpeg: true });
|
||||
expect(mocks.png).toHaveBeenCalledWith({ compressionLevel: 8, quality: 85 });
|
||||
expect(mocks.toFile).toHaveBeenCalledWith(expect.stringContaining(path.join(destinationDir, 'original-')));
|
||||
|
||||
// Check the returned filename format (original-timestamp.jpg)
|
||||
expect(result).toMatch(/^original-\d+\.jpg$/);
|
||||
});
|
||||
|
||||
it('should throw an error if sharp fails to process the image', async () => {
|
||||
const sharpError = new Error('Processing failed');
|
||||
mocks.toFile.mockRejectedValueOnce(sharpError);
|
||||
|
||||
await expect(
|
||||
processAndSaveImage('/path/img.jpg', '/dest', 'img.jpg', logger),
|
||||
).rejects.toThrow('Failed to process image img.jpg.');
|
||||
|
||||
expect(logger.error).toHaveBeenCalledWith(
|
||||
expect.objectContaining({ err: sharpError, sourcePath: '/path/img.jpg' }),
|
||||
'An error occurred during image processing and saving.',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -20,57 +20,100 @@ const createMockLogger = (): Logger =>
|
||||
|
||||
describe('serverUtils', () => {
|
||||
describe('getBaseUrl', () => {
|
||||
const originalEnv = process.env;
|
||||
let mockLogger: Logger;
|
||||
|
||||
// Store original env values to restore after tests
|
||||
const originalFrontendUrl = process.env.FRONTEND_URL;
|
||||
const originalBaseUrl = process.env.BASE_URL;
|
||||
const originalNodeEnv = process.env.NODE_ENV;
|
||||
const originalPort = process.env.PORT;
|
||||
|
||||
beforeEach(() => {
|
||||
// Reset mocks and environment variables before each test for isolation
|
||||
vi.resetModules();
|
||||
process.env = { ...originalEnv };
|
||||
vi.unstubAllEnvs();
|
||||
// CRITICAL: Clear env vars that might be set globally (e.g., from vitest config)
|
||||
// vi.unstubAllEnvs() only removes vars set via vi.stubEnv(), not direct assignments
|
||||
delete process.env.FRONTEND_URL;
|
||||
delete process.env.BASE_URL;
|
||||
delete process.env.NODE_ENV;
|
||||
delete process.env.PORT;
|
||||
mockLogger = createMockLogger();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
// Restore original environment variables after each test
|
||||
process.env = originalEnv;
|
||||
vi.unstubAllEnvs();
|
||||
// Restore original values
|
||||
if (originalFrontendUrl !== undefined) process.env.FRONTEND_URL = originalFrontendUrl;
|
||||
else delete process.env.FRONTEND_URL;
|
||||
if (originalBaseUrl !== undefined) process.env.BASE_URL = originalBaseUrl;
|
||||
else delete process.env.BASE_URL;
|
||||
if (originalNodeEnv !== undefined) process.env.NODE_ENV = originalNodeEnv;
|
||||
else delete process.env.NODE_ENV;
|
||||
if (originalPort !== undefined) process.env.PORT = originalPort;
|
||||
else delete process.env.PORT;
|
||||
});
|
||||
|
||||
it('should use FRONTEND_URL if it is a valid URL', () => {
|
||||
process.env.FRONTEND_URL = 'https://valid.example.com';
|
||||
vi.stubEnv('FRONTEND_URL', 'https://valid.example.com');
|
||||
const baseUrl = getBaseUrl(mockLogger);
|
||||
expect(baseUrl).toBe('https://valid.example.com');
|
||||
expect(mockLogger.warn).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should trim a trailing slash from FRONTEND_URL', () => {
|
||||
process.env.FRONTEND_URL = 'https://valid.example.com/';
|
||||
vi.stubEnv('FRONTEND_URL', 'https://valid.example.com/');
|
||||
const baseUrl = getBaseUrl(mockLogger);
|
||||
expect(baseUrl).toBe('https://valid.example.com');
|
||||
});
|
||||
|
||||
it('should use BASE_URL if FRONTEND_URL is not set', () => {
|
||||
delete process.env.FRONTEND_URL;
|
||||
process.env.BASE_URL = 'https://base.example.com';
|
||||
vi.stubEnv('BASE_URL', 'https://base.example.com');
|
||||
const baseUrl = getBaseUrl(mockLogger);
|
||||
expect(baseUrl).toBe('https://base.example.com');
|
||||
expect(mockLogger.warn).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should fall back to example.com with default port 3000 if no URL is provided', () => {
|
||||
delete process.env.FRONTEND_URL;
|
||||
delete process.env.BASE_URL;
|
||||
delete process.env.PORT;
|
||||
it('should fall back to localhost with default port 3000 in test environment', () => {
|
||||
vi.stubEnv('NODE_ENV', 'test');
|
||||
const baseUrl = getBaseUrl(mockLogger);
|
||||
expect(baseUrl).toBe('https://example.com:3000');
|
||||
expect(baseUrl).toBe('http://localhost:3000');
|
||||
expect(mockLogger.warn).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should log a warning and fall back if FRONTEND_URL is invalid (does not start with http)', () => {
|
||||
process.env.FRONTEND_URL = 'invalid.url.com';
|
||||
it('should fall back to example.com in non-test environment', () => {
|
||||
vi.stubEnv('NODE_ENV', 'development');
|
||||
vi.stubEnv('PORT', '4000');
|
||||
const baseUrl = getBaseUrl(mockLogger);
|
||||
expect(baseUrl).toBe('https://example.com:3000');
|
||||
expect(baseUrl).toBe('http://example.com:4000');
|
||||
expect(mockLogger.warn).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should log a warning and fall back to localhost if FRONTEND_URL is invalid in test env', () => {
|
||||
vi.stubEnv('NODE_ENV', 'test');
|
||||
vi.stubEnv('FRONTEND_URL', 'invalid.url.com');
|
||||
const baseUrl = getBaseUrl(mockLogger);
|
||||
expect(baseUrl).toBe('http://localhost:3000');
|
||||
expect(mockLogger.warn).toHaveBeenCalledWith(
|
||||
"[getBaseUrl] FRONTEND_URL/BASE_URL is invalid or incomplete ('invalid.url.com'). Falling back to default local URL: https://example.com:3000",
|
||||
"[getBaseUrl] FRONTEND_URL/BASE_URL is invalid or incomplete ('invalid.url.com'). Falling back to: http://localhost:3000",
|
||||
);
|
||||
});
|
||||
|
||||
it('should log a warning and fall back to example.com if FRONTEND_URL is invalid in non-test env', () => {
|
||||
vi.stubEnv('NODE_ENV', 'production');
|
||||
vi.stubEnv('FRONTEND_URL', 'invalid.url.com');
|
||||
const baseUrl = getBaseUrl(mockLogger);
|
||||
expect(baseUrl).toBe('http://example.com:3000');
|
||||
expect(mockLogger.warn).toHaveBeenCalledWith(
|
||||
"[getBaseUrl] FRONTEND_URL/BASE_URL is invalid or incomplete ('invalid.url.com'). Falling back to: http://example.com:3000",
|
||||
);
|
||||
});
|
||||
|
||||
it('should throw an error if the final URL is invalid', () => {
|
||||
vi.stubEnv('FRONTEND_URL', 'http:invalid');
|
||||
expect(() => getBaseUrl(mockLogger)).toThrow(
|
||||
`[getBaseUrl] Generated URL 'http:invalid' does not match required pattern (must start with http:// or https://)`,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
Reference in New Issue
Block a user