Compare commits
10 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5bc8f6a42b | ||
| 4fd5e900af | |||
|
|
39ab773b82 | ||
| 75406cd924 | |||
|
|
8fb0a57f02 | ||
| c78323275b | |||
|
|
5fe537b93d | ||
| 61f24305fb | |||
|
|
de3f0cf26e | ||
| 45ac4fccf5 |
32
.env.example
32
.env.example
@@ -128,3 +128,35 @@ GENERATE_SOURCE_MAPS=true
|
||||
SENTRY_AUTH_TOKEN=
|
||||
# URL of your Bugsink instance (for source map uploads)
|
||||
SENTRY_URL=https://bugsink.projectium.com
|
||||
|
||||
# ===================
|
||||
# Feature Flags (ADR-024)
|
||||
# ===================
|
||||
# Feature flags control the availability of features at runtime.
|
||||
# All flags default to disabled (false) when not set or set to any value other than 'true'.
|
||||
# Set to 'true' to enable a feature.
|
||||
#
|
||||
# Backend flags use: FEATURE_SNAKE_CASE
|
||||
# Frontend flags use: VITE_FEATURE_SNAKE_CASE (VITE_ prefix required for client-side access)
|
||||
#
|
||||
# Lifecycle:
|
||||
# 1. Add flag with default false
|
||||
# 2. Enable via env var when ready for testing/rollout
|
||||
# 3. Remove conditional code when feature is fully rolled out
|
||||
# 4. Remove flag from config within 3 months of full rollout
|
||||
#
|
||||
# See: docs/adr/0024-feature-flagging-strategy.md
|
||||
|
||||
# Backend Feature Flags
|
||||
# FEATURE_BUGSINK_SYNC=false # Enable Bugsink error sync integration
|
||||
# FEATURE_ADVANCED_RBAC=false # Enable advanced RBAC features
|
||||
# FEATURE_NEW_DASHBOARD=false # Enable new dashboard experience
|
||||
# FEATURE_BETA_RECIPES=false # Enable beta recipe features
|
||||
# FEATURE_EXPERIMENTAL_AI=false # Enable experimental AI features
|
||||
# FEATURE_DEBUG_MODE=false # Enable debug mode for development
|
||||
|
||||
# Frontend Feature Flags (VITE_ prefix required)
|
||||
# VITE_FEATURE_NEW_DASHBOARD=false # Enable new dashboard experience
|
||||
# VITE_FEATURE_BETA_RECIPES=false # Enable beta recipe features
|
||||
# VITE_FEATURE_EXPERIMENTAL_AI=false # Enable experimental AI features
|
||||
# VITE_FEATURE_DEBUG_MODE=false # Enable debug mode for development
|
||||
|
||||
172
CLAUDE.md
172
CLAUDE.md
@@ -31,46 +31,19 @@ Out-of-sync = test failures.
|
||||
|
||||
**CRITICAL**: The `claude-win10` user has **READ-ONLY** access to production and test servers.
|
||||
|
||||
**Claude Code does NOT have:**
|
||||
| Capability | Status |
|
||||
| ---------------------- | ---------------------- |
|
||||
| Root/sudo access | NO |
|
||||
| Write permissions | NO |
|
||||
| PM2 restart, systemctl | NO - User must execute |
|
||||
|
||||
- Root or sudo access
|
||||
- Write permissions on servers
|
||||
- Ability to execute PM2 restart, systemctl, or other write operations directly
|
||||
**Server Operations Workflow**: Diagnose → User executes → Analyze → Fix (1-3 commands) → User executes → Verify
|
||||
|
||||
**Correct Workflow for Server Operations:**
|
||||
**Rules**:
|
||||
|
||||
| Step | Actor | Action |
|
||||
| ---- | ------ | --------------------------------------------------------------------------- |
|
||||
| 1 | Claude | Provide **diagnostic commands** (read-only checks) for user to run |
|
||||
| 2 | User | Execute commands on server, report results |
|
||||
| 3 | Claude | Analyze results, provide **fix commands** (1-3 at a time) |
|
||||
| 4 | User | Execute fix commands, report results |
|
||||
| 5 | Claude | Provide **verification commands** to confirm success and check side effects |
|
||||
| 6 | Claude | Document progress stage by stage |
|
||||
| 7 | Claude | Update ALL relevant documentation when complete |
|
||||
|
||||
**Example - Diagnosing PM2 Issues:**
|
||||
|
||||
```bash
|
||||
# Step 1: Claude provides diagnostic commands (user runs these)
|
||||
pm2 list
|
||||
pm2 logs flyer-crawler-api --lines 50
|
||||
systemctl status redis
|
||||
|
||||
# Step 3: After user reports results, Claude provides fix commands
|
||||
pm2 restart flyer-crawler-api
|
||||
# Wait for user confirmation before next command
|
||||
|
||||
# Step 5: Claude provides verification
|
||||
pm2 list
|
||||
curl -s https://flyer-crawler.projectium.com/api/health/ready | jq .
|
||||
```
|
||||
|
||||
**Never Do:**
|
||||
|
||||
- `ssh root@projectium.com "pm2 restart all"` (Claude cannot execute this)
|
||||
- Assume commands succeeded without user confirmation
|
||||
- Provide more than 3 fix commands at once (errors may cascade)
|
||||
- Provide diagnostic commands first, wait for user to report results
|
||||
- Maximum 3 fix commands at a time (errors may cascade)
|
||||
- Always verify after fixes complete
|
||||
|
||||
### Communication Style
|
||||
|
||||
@@ -105,25 +78,27 @@ Ask before assuming. Never assume:
|
||||
|
||||
### Key Patterns (with file locations)
|
||||
|
||||
| Pattern | ADR | Implementation | File |
|
||||
| ------------------ | ------- | ------------------------------------------------- | ----------------------------------- |
|
||||
| Error Handling | ADR-001 | `handleDbError()`, throw `NotFoundError` | `src/services/db/errors.db.ts` |
|
||||
| Repository Methods | ADR-034 | `get*` (throws), `find*` (null), `list*` (array) | `src/services/db/*.db.ts` |
|
||||
| API Responses | ADR-028 | `sendSuccess()`, `sendPaginated()`, `sendError()` | `src/utils/apiResponse.ts` |
|
||||
| Transactions | ADR-002 | `withTransaction(async (client) => {...})` | `src/services/db/transaction.db.ts` |
|
||||
| Pattern | ADR | Implementation | File |
|
||||
| ------------------ | ------- | ------------------------------------------------- | ------------------------------------- |
|
||||
| Error Handling | ADR-001 | `handleDbError()`, throw `NotFoundError` | `src/services/db/errors.db.ts` |
|
||||
| Repository Methods | ADR-034 | `get*` (throws), `find*` (null), `list*` (array) | `src/services/db/*.db.ts` |
|
||||
| API Responses | ADR-028 | `sendSuccess()`, `sendPaginated()`, `sendError()` | `src/utils/apiResponse.ts` |
|
||||
| Transactions | ADR-002 | `withTransaction(async (client) => {...})` | `src/services/db/connection.db.ts` |
|
||||
| Feature Flags | ADR-024 | `isFeatureEnabled()`, `useFeatureFlag()` | `src/services/featureFlags.server.ts` |
|
||||
|
||||
### Key Files Quick Access
|
||||
|
||||
| Purpose | File |
|
||||
| ----------------- | -------------------------------- |
|
||||
| Express app | `server.ts` |
|
||||
| Environment | `src/config/env.ts` |
|
||||
| Routes | `src/routes/*.routes.ts` |
|
||||
| Repositories | `src/services/db/*.db.ts` |
|
||||
| Workers | `src/services/workers.server.ts` |
|
||||
| Queues | `src/services/queues.server.ts` |
|
||||
| PM2 Config (Dev) | `ecosystem.dev.config.cjs` |
|
||||
| PM2 Config (Prod) | `ecosystem.config.cjs` |
|
||||
| Purpose | File |
|
||||
| ----------------- | ------------------------------------- |
|
||||
| Express app | `server.ts` |
|
||||
| Environment | `src/config/env.ts` |
|
||||
| Routes | `src/routes/*.routes.ts` |
|
||||
| Repositories | `src/services/db/*.db.ts` |
|
||||
| Workers | `src/services/workers.server.ts` |
|
||||
| Queues | `src/services/queues.server.ts` |
|
||||
| Feature Flags | `src/services/featureFlags.server.ts` |
|
||||
| PM2 Config (Dev) | `ecosystem.dev.config.cjs` |
|
||||
| PM2 Config (Prod) | `ecosystem.config.cjs` |
|
||||
|
||||
---
|
||||
|
||||
@@ -166,7 +141,7 @@ The dev container now matches production by using PM2 for process management.
|
||||
- `flyer-crawler-worker-dev` - Background job worker
|
||||
- `flyer-crawler-vite-dev` - Vite frontend dev server (port 5173)
|
||||
|
||||
### Log Aggregation (ADR-050)
|
||||
### Log Aggregation (ADR-015)
|
||||
|
||||
All logs flow to Bugsink via Logstash with 3-project routing:
|
||||
|
||||
@@ -249,7 +224,7 @@ All logs flow to Bugsink via Logstash with 3-project routing:
|
||||
|
||||
**Launch Pattern**:
|
||||
|
||||
```
|
||||
```text
|
||||
Use Task tool with subagent_type: "coder", "db-dev", "tester", etc.
|
||||
```
|
||||
|
||||
@@ -330,8 +305,8 @@ podman cp "d:/path/file" container:/tmp/file
|
||||
|
||||
**Quick Access**:
|
||||
|
||||
- **Dev**: https://localhost:8443 (`admin@localhost`/`admin`)
|
||||
- **Prod**: https://bugsink.projectium.com
|
||||
- **Dev**: <https://localhost:8443> (`admin@localhost`/`admin`)
|
||||
- **Prod**: <https://bugsink.projectium.com>
|
||||
|
||||
**Token Creation** (required for MCP):
|
||||
|
||||
@@ -347,7 +322,7 @@ cd /opt/bugsink && bugsink-manage create_auth_token
|
||||
|
||||
**See**: [docs/operations/LOGSTASH-QUICK-REF.md](docs/operations/LOGSTASH-QUICK-REF.md)
|
||||
|
||||
Log aggregation: PostgreSQL + PM2 + Redis + NGINX → Bugsink (ADR-050)
|
||||
Log aggregation: PostgreSQL + PM2 + Redis + NGINX → Bugsink (ADR-015)
|
||||
|
||||
---
|
||||
|
||||
@@ -367,84 +342,3 @@ Log aggregation: PostgreSQL + PM2 + Redis + NGINX → Bugsink (ADR-050)
|
||||
| **Logstash** | [LOGSTASH-QUICK-REF.md](docs/operations/LOGSTASH-QUICK-REF.md) |
|
||||
| **ADRs** | [docs/adr/index.md](docs/adr/index.md) |
|
||||
| **All Docs** | [docs/README.md](docs/README.md) |
|
||||
|
||||
---
|
||||
|
||||
## Appendix: Integration Test Issues (Full Details)
|
||||
|
||||
### 1. Vitest globalSetup Context Isolation
|
||||
|
||||
Vitest's `globalSetup` runs in separate Node.js context. Singletons, spies, mocks do NOT share instances with test files.
|
||||
|
||||
**Affected**: BullMQ worker service mocks (AI/DB failure tests)
|
||||
|
||||
**Solutions**: Mark `.todo()`, create test-only API endpoints, use Redis-based mock flags
|
||||
|
||||
```typescript
|
||||
// DOES NOT WORK - different instances
|
||||
const { flyerProcessingService } = await import('../../services/workers.server');
|
||||
flyerProcessingService._getAiProcessor()._setExtractAndValidateData(mockFn);
|
||||
```
|
||||
|
||||
### 2. Cleanup Queue Deletes Before Verification
|
||||
|
||||
Cleanup worker processes jobs in globalSetup context, ignoring test spies.
|
||||
|
||||
**Solution**: Drain and pause queue:
|
||||
|
||||
```typescript
|
||||
const { cleanupQueue } = await import('../../services/queues.server');
|
||||
await cleanupQueue.drain();
|
||||
await cleanupQueue.pause();
|
||||
// ... test ...
|
||||
await cleanupQueue.resume();
|
||||
```
|
||||
|
||||
### 3. Cache Stale After Direct SQL
|
||||
|
||||
Direct `pool.query()` inserts bypass cache invalidation.
|
||||
|
||||
**Solution**: `await cacheService.invalidateFlyers();` after inserts
|
||||
|
||||
### 4. Test Filename Collisions
|
||||
|
||||
Multer predictable filenames cause race conditions.
|
||||
|
||||
**Solution**: Use unique suffix: `${Date.now()}-${Math.round(Math.random() * 1e9)}`
|
||||
|
||||
### 5. Response Format Mismatches
|
||||
|
||||
API formats change: `data.jobId` vs `data.job.id`, nested vs flat, string vs number IDs.
|
||||
|
||||
**Solution**: Log response bodies, update assertions
|
||||
|
||||
### 6. External Service Availability
|
||||
|
||||
PM2/Redis health checks fail when unavailable.
|
||||
|
||||
**Solution**: try/catch with graceful degradation or mock
|
||||
|
||||
### 7. TZ Environment Variable Breaking Async Hooks
|
||||
|
||||
**Problem**: When `TZ=America/Los_Angeles` (or other timezone values) is set in the environment, Node.js async_hooks module can produce `RangeError: Invalid triggerAsyncId value: NaN`. This breaks React Testing Library's `render()` function which uses async hooks internally.
|
||||
|
||||
**Root Cause**: Setting `TZ` to certain timezone values interferes with Node.js's internal async tracking mechanism, causing invalid async IDs to be generated.
|
||||
|
||||
**Symptoms**:
|
||||
|
||||
```text
|
||||
RangeError: Invalid triggerAsyncId value: NaN
|
||||
❯ process.env.NODE_ENV.queueSeveralMicrotasks node_modules/react/cjs/react.development.js:751:15
|
||||
❯ process.env.NODE_ENV.exports.act node_modules/react/cjs/react.development.js:886:11
|
||||
❯ node_modules/@testing-library/react/dist/act-compat.js:46:25
|
||||
❯ renderRoot node_modules/@testing-library/react/dist/pure.js:189:26
|
||||
```
|
||||
|
||||
**Solution**: Explicitly unset `TZ` in all test scripts by adding `TZ=` (empty value) to cross-env:
|
||||
|
||||
```json
|
||||
"test:unit": "cross-env NODE_ENV=test TZ= tsx ..."
|
||||
"test:integration": "cross-env NODE_ENV=test TZ= tsx ..."
|
||||
```
|
||||
|
||||
**Context**: This issue was introduced in commit `d03900c` which added `TZ: 'America/Los_Angeles'` to PM2 ecosystem configs for consistent log timestamps in production/dev environments. Tests must explicitly override this to prevent the async hooks error.
|
||||
|
||||
393
docs/AI-DOCUMENTATION-INDEX.md
Normal file
393
docs/AI-DOCUMENTATION-INDEX.md
Normal file
@@ -0,0 +1,393 @@
|
||||
# AI Documentation Index
|
||||
|
||||
Machine-optimized navigation for AI agents. Structured for vector retrieval and semantic search.
|
||||
|
||||
---
|
||||
|
||||
## Quick Lookup Table
|
||||
|
||||
| Task/Question | Primary Doc | Section/ADR |
|
||||
| ----------------------- | --------------------------------------------------- | --------------------------------------- |
|
||||
| Add new API endpoint | [CODE-PATTERNS.md](development/CODE-PATTERNS.md) | API Response Patterns, Input Validation |
|
||||
| Add repository method | [CODE-PATTERNS.md](development/CODE-PATTERNS.md) | Repository Patterns (get*/find*/list\*) |
|
||||
| Fix failing test | [TESTING.md](development/TESTING.md) | Known Integration Test Issues |
|
||||
| Run tests correctly | [TESTING.md](development/TESTING.md) | Test Execution Environment |
|
||||
| Add database column | [DATABASE-GUIDE.md](subagents/DATABASE-GUIDE.md) | Schema sync required |
|
||||
| Deploy to production | [DEPLOYMENT.md](operations/DEPLOYMENT.md) | Application Deployment |
|
||||
| Debug container issue | [DEBUGGING.md](development/DEBUGGING.md) | Container Issues |
|
||||
| Configure environment | [ENVIRONMENT.md](getting-started/ENVIRONMENT.md) | Configuration by Environment |
|
||||
| Add background job | [CODE-PATTERNS.md](development/CODE-PATTERNS.md) | Background Jobs |
|
||||
| Handle errors correctly | [CODE-PATTERNS.md](development/CODE-PATTERNS.md) | Error Handling |
|
||||
| Use transactions | [CODE-PATTERNS.md](development/CODE-PATTERNS.md) | Transaction Management |
|
||||
| Add authentication | [AUTHENTICATION.md](architecture/AUTHENTICATION.md) | JWT Token Architecture |
|
||||
| Cache data | [CODE-PATTERNS.md](development/CODE-PATTERNS.md) | Caching |
|
||||
| Check PM2 status | [DEV-CONTAINER.md](development/DEV-CONTAINER.md) | PM2 Process Management |
|
||||
| View logs | [DEBUGGING.md](development/DEBUGGING.md) | PM2 Log Access |
|
||||
| Understand architecture | [OVERVIEW.md](architecture/OVERVIEW.md) | System Architecture Diagram |
|
||||
| Check ADR for decision | [adr/index.md](adr/index.md) | ADR by category |
|
||||
| Use subagent | [subagents/OVERVIEW.md](subagents/OVERVIEW.md) | Available Subagents |
|
||||
| API versioning | [API-VERSIONING.md](development/API-VERSIONING.md) | Phase 2 infrastructure |
|
||||
|
||||
---
|
||||
|
||||
## Documentation Tree
|
||||
|
||||
```
|
||||
docs/
|
||||
+-- AI-DOCUMENTATION-INDEX.md # THIS FILE - AI navigation index
|
||||
+-- README.md # Human-readable doc hub
|
||||
|
|
||||
+-- adr/ # Architecture Decision Records (57 ADRs)
|
||||
| +-- index.md # ADR index by category
|
||||
| +-- 0001-*.md # Standardized error handling
|
||||
| +-- 0002-*.md # Transaction management (withTransaction)
|
||||
| +-- 0003-*.md # Input validation (Zod middleware)
|
||||
| +-- 0008-*.md # API versioning (/api/v1/)
|
||||
| +-- 0014-*.md # Platform: Linux only (CRITICAL)
|
||||
| +-- 0028-*.md # API response (sendSuccess/sendError)
|
||||
| +-- 0034-*.md # Repository pattern (get*/find*/list*)
|
||||
| +-- 0035-*.md # Service layer architecture
|
||||
| +-- 0050-*.md # PostgreSQL observability + Logstash
|
||||
| +-- 0057-*.md # Test remediation post-API versioning
|
||||
| +-- adr-implementation-tracker.md # Implementation status
|
||||
|
|
||||
+-- architecture/
|
||||
| +-- OVERVIEW.md # System architecture, data flows, entities
|
||||
| +-- DATABASE.md # Schema design, extensions, setup
|
||||
| +-- AUTHENTICATION.md # OAuth, JWT, security features
|
||||
| +-- WEBSOCKET_USAGE.md # Real-time communication patterns
|
||||
| +-- api-versioning-infrastructure.md # Phase 2 versioning details
|
||||
|
|
||||
+-- development/
|
||||
| +-- CODE-PATTERNS.md # Error handling, repos, API responses
|
||||
| +-- TESTING.md # Unit/integration/E2E, known issues
|
||||
| +-- DEBUGGING.md # Container, DB, API, PM2 debugging
|
||||
| +-- DEV-CONTAINER.md # PM2, Logstash, container services
|
||||
| +-- API-VERSIONING.md # API versioning workflows
|
||||
| +-- DESIGN_TOKENS.md # Neo-Brutalism design system
|
||||
| +-- ERROR-LOGGING-PATHS.md # req.originalUrl pattern
|
||||
| +-- test-path-migration.md # Test file reorganization
|
||||
|
|
||||
+-- getting-started/
|
||||
| +-- QUICKSTART.md # Quick setup instructions
|
||||
| +-- INSTALL.md # Full installation guide
|
||||
| +-- ENVIRONMENT.md # Environment variables reference
|
||||
|
|
||||
+-- operations/
|
||||
| +-- DEPLOYMENT.md # Production deployment guide
|
||||
| +-- BARE-METAL-SETUP.md # Server provisioning
|
||||
| +-- MONITORING.md # Bugsink, health checks
|
||||
| +-- LOGSTASH-QUICK-REF.md # Log aggregation reference
|
||||
| +-- LOGSTASH-TROUBLESHOOTING.md # Logstash debugging
|
||||
|
|
||||
+-- subagents/
|
||||
| +-- OVERVIEW.md # Subagent system introduction
|
||||
| +-- CODER-GUIDE.md # Code development patterns
|
||||
| +-- TESTER-GUIDE.md # Testing strategies
|
||||
| +-- DATABASE-GUIDE.md # Database workflows
|
||||
| +-- DEVOPS-GUIDE.md # Deployment/infrastructure
|
||||
| +-- FRONTEND-GUIDE.md # UI/UX development
|
||||
| +-- AI-USAGE-GUIDE.md # Gemini integration
|
||||
| +-- DOCUMENTATION-GUIDE.md # Writing docs
|
||||
| +-- SECURITY-DEBUG-GUIDE.md # Security and debugging
|
||||
|
|
||||
+-- tools/
|
||||
| +-- MCP-CONFIGURATION.md # MCP servers setup
|
||||
| +-- BUGSINK-SETUP.md # Error tracking setup
|
||||
| +-- VSCODE-SETUP.md # Editor configuration
|
||||
|
|
||||
+-- archive/ # Historical docs, session notes
|
||||
+-- sessions/ # Development session logs
|
||||
+-- plans/ # Feature implementation plans
|
||||
+-- research/ # Investigation notes
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Problem-to-Document Mapping
|
||||
|
||||
### Database Issues
|
||||
|
||||
| Problem | Documents |
|
||||
| -------------------- | ----------------------------------------------------------------------------------------------- |
|
||||
| Schema out of sync | [DATABASE-GUIDE.md](subagents/DATABASE-GUIDE.md), [CLAUDE.md](../CLAUDE.md) schema sync section |
|
||||
| Migration needed | [DATABASE.md](architecture/DATABASE.md), ADR-013, ADR-023 |
|
||||
| Query performance | [DEBUGGING.md](development/DEBUGGING.md) Query Performance Issues |
|
||||
| Connection errors | [DEBUGGING.md](development/DEBUGGING.md) Database Issues |
|
||||
| Transaction patterns | [CODE-PATTERNS.md](development/CODE-PATTERNS.md) Transaction Management, ADR-002 |
|
||||
| Repository methods | [CODE-PATTERNS.md](development/CODE-PATTERNS.md) Repository Patterns, ADR-034 |
|
||||
|
||||
### Test Failures
|
||||
|
||||
| Problem | Documents |
|
||||
| ---------------------------- | --------------------------------------------------------------------- |
|
||||
| Tests fail in container | [TESTING.md](development/TESTING.md), ADR-014 |
|
||||
| Vitest globalSetup isolation | [CLAUDE.md](../CLAUDE.md) Integration Test Issues #1 |
|
||||
| Cache stale after insert | [CLAUDE.md](../CLAUDE.md) Integration Test Issues #3 |
|
||||
| Queue interference | [CLAUDE.md](../CLAUDE.md) Integration Test Issues #2 |
|
||||
| API path mismatches | [TESTING.md](development/TESTING.md) API Versioning in Tests, ADR-057 |
|
||||
| Type check failures | [DEBUGGING.md](development/DEBUGGING.md) Type Check Failures |
|
||||
| TZ environment breaks async | [CLAUDE.md](../CLAUDE.md) Integration Test Issues #7 |
|
||||
|
||||
### Deployment Issues
|
||||
|
||||
| Problem | Documents |
|
||||
| --------------------- | ------------------------------------------------------------------------------------- |
|
||||
| PM2 not starting | [DEBUGGING.md](development/DEBUGGING.md) PM2 Process Issues |
|
||||
| NGINX configuration | [DEPLOYMENT.md](operations/DEPLOYMENT.md) NGINX Configuration |
|
||||
| SSL certificates | [DEBUGGING.md](development/DEBUGGING.md) SSL Certificate Issues |
|
||||
| CI/CD failures | [DEPLOYMENT.md](operations/DEPLOYMENT.md) CI/CD Pipeline, ADR-017 |
|
||||
| Container won't start | [DEBUGGING.md](development/DEBUGGING.md) Container Issues |
|
||||
| Bugsink not receiving | [BUGSINK-SETUP.md](tools/BUGSINK-SETUP.md), [MONITORING.md](operations/MONITORING.md) |
|
||||
|
||||
### Frontend/UI Changes
|
||||
|
||||
| Problem | Documents |
|
||||
| ------------------ | --------------------------------------------------------------- |
|
||||
| Component patterns | [FRONTEND-GUIDE.md](subagents/FRONTEND-GUIDE.md), ADR-044 |
|
||||
| Design tokens | [DESIGN_TOKENS.md](development/DESIGN_TOKENS.md), ADR-012 |
|
||||
| State management | ADR-005, [OVERVIEW.md](architecture/OVERVIEW.md) Frontend Stack |
|
||||
| Hot reload broken | [DEBUGGING.md](development/DEBUGGING.md) Frontend Issues |
|
||||
| CORS errors | [DEBUGGING.md](development/DEBUGGING.md) API Calls Failing |
|
||||
|
||||
### API Development
|
||||
|
||||
| Problem | Documents |
|
||||
| ---------------- | ------------------------------------------------------------------------------- |
|
||||
| Response format | [CODE-PATTERNS.md](development/CODE-PATTERNS.md) API Response Patterns, ADR-028 |
|
||||
| Input validation | [CODE-PATTERNS.md](development/CODE-PATTERNS.md) Input Validation, ADR-003 |
|
||||
| Error handling | [CODE-PATTERNS.md](development/CODE-PATTERNS.md) Error Handling, ADR-001 |
|
||||
| Rate limiting | ADR-032, [OVERVIEW.md](architecture/OVERVIEW.md) |
|
||||
| API versioning | [API-VERSIONING.md](development/API-VERSIONING.md), ADR-008 |
|
||||
| Authentication | [AUTHENTICATION.md](architecture/AUTHENTICATION.md), ADR-048 |
|
||||
|
||||
### Background Jobs
|
||||
|
||||
| Problem | Documents |
|
||||
| ------------------- | ------------------------------------------------------------------------- |
|
||||
| Jobs not processing | [DEBUGGING.md](development/DEBUGGING.md) Background Job Issues |
|
||||
| Queue configuration | [CODE-PATTERNS.md](development/CODE-PATTERNS.md) Background Jobs, ADR-006 |
|
||||
| Worker crashes | [DEBUGGING.md](development/DEBUGGING.md), ADR-053 |
|
||||
| Scheduled jobs | ADR-037, [OVERVIEW.md](architecture/OVERVIEW.md) Scheduled Jobs |
|
||||
|
||||
---
|
||||
|
||||
## Document Priority Matrix
|
||||
|
||||
### CRITICAL (Read First)
|
||||
|
||||
| Document | Purpose | Key Content |
|
||||
| --------------------------------------------------------------- | ----------------------- | ----------------------------- |
|
||||
| [CLAUDE.md](../CLAUDE.md) | AI agent instructions | Rules, patterns, known issues |
|
||||
| [ADR-014](adr/0014-containerization-and-deployment-strategy.md) | Platform requirement | Tests MUST run in container |
|
||||
| [DEV-CONTAINER.md](development/DEV-CONTAINER.md) | Development environment | PM2, Logstash, services |
|
||||
|
||||
### HIGH (Core Development)
|
||||
|
||||
| Document | Purpose | Key Content |
|
||||
| --------------------------------------------------- | ----------------- | ---------------------------- |
|
||||
| [CODE-PATTERNS.md](development/CODE-PATTERNS.md) | Code templates | Error handling, repos, APIs |
|
||||
| [TESTING.md](development/TESTING.md) | Test execution | Commands, known issues |
|
||||
| [DATABASE.md](architecture/DATABASE.md) | Schema reference | Setup, extensions, users |
|
||||
| [ADR-034](adr/0034-repository-pattern-standards.md) | Repository naming | get*/find*/list\* |
|
||||
| [ADR-028](adr/0028-api-response-standardization.md) | API responses | sendSuccess/sendError |
|
||||
| [ADR-001](adr/0001-standardized-error-handling.md) | Error handling | handleDbError, NotFoundError |
|
||||
|
||||
### MEDIUM (Specialized Tasks)
|
||||
|
||||
| Document | Purpose | Key Content |
|
||||
| --------------------------------------------------- | --------------------- | ------------------------ |
|
||||
| [subagents/OVERVIEW.md](subagents/OVERVIEW.md) | Subagent selection | When to delegate |
|
||||
| [DEPLOYMENT.md](operations/DEPLOYMENT.md) | Production deployment | PM2, NGINX, CI/CD |
|
||||
| [DEBUGGING.md](development/DEBUGGING.md) | Troubleshooting | Common issues, solutions |
|
||||
| [ENVIRONMENT.md](getting-started/ENVIRONMENT.md) | Config reference | Variables by environment |
|
||||
| [AUTHENTICATION.md](architecture/AUTHENTICATION.md) | Auth patterns | OAuth, JWT, security |
|
||||
| [API-VERSIONING.md](development/API-VERSIONING.md) | Versioning | /api/v1/ prefix |
|
||||
|
||||
### LOW (Reference/Historical)
|
||||
|
||||
| Document | Purpose | Key Content |
|
||||
| -------------------- | ------------------ | ------------------------- |
|
||||
| [archive/](archive/) | Historical docs | Session notes, old plans |
|
||||
| ADR-013, ADR-023 | Migration strategy | Proposed, not implemented |
|
||||
| ADR-024 | Feature flags | Proposed |
|
||||
| ADR-025 | i18n/l10n | Proposed |
|
||||
|
||||
---
|
||||
|
||||
## Cross-Reference Matrix
|
||||
|
||||
| Document | References | Referenced By |
|
||||
| -------------------- | ------------------------------------------------------------------------------- | ------------------------------------------------------ |
|
||||
| **CLAUDE.md** | ADR-001, ADR-002, ADR-008, ADR-014, ADR-028, ADR-034, ADR-035, ADR-050, ADR-057 | All development docs |
|
||||
| **ADR-008** | ADR-028 | API-VERSIONING.md, TESTING.md, ADR-057 |
|
||||
| **ADR-014** | - | CLAUDE.md, TESTING.md, DEPLOYMENT.md, DEV-CONTAINER.md |
|
||||
| **ADR-028** | ADR-001 | CODE-PATTERNS.md, OVERVIEW.md |
|
||||
| **ADR-034** | ADR-001 | CODE-PATTERNS.md, DATABASE-GUIDE.md |
|
||||
| **ADR-057** | ADR-008, ADR-028 | TESTING.md |
|
||||
| **CODE-PATTERNS.md** | ADR-001, ADR-002, ADR-003, ADR-028, ADR-034, ADR-036, ADR-048 | CODER-GUIDE.md |
|
||||
| **TESTING.md** | ADR-014, ADR-057, CLAUDE.md | TESTER-GUIDE.md, DEBUGGING.md |
|
||||
| **DEBUGGING.md** | DEV-CONTAINER.md, TESTING.md, MONITORING.md | DEVOPS-GUIDE.md |
|
||||
| **DEV-CONTAINER.md** | ADR-014, ADR-050, ecosystem.dev.config.cjs | DEBUGGING.md, CLAUDE.md |
|
||||
| **OVERVIEW.md** | ADR-001 through ADR-050+ | All architecture docs |
|
||||
| **DATABASE.md** | ADR-002, ADR-013, ADR-055 | DATABASE-GUIDE.md |
|
||||
|
||||
---
|
||||
|
||||
## Navigation Patterns
|
||||
|
||||
### Adding a Feature
|
||||
|
||||
```
|
||||
1. CLAUDE.md -> Project rules, patterns
|
||||
2. CODE-PATTERNS.md -> Implementation templates
|
||||
3. Relevant subagent guide -> Domain-specific patterns
|
||||
4. Related ADRs -> Design decisions
|
||||
5. TESTING.md -> Test requirements
|
||||
```
|
||||
|
||||
### Fixing a Bug
|
||||
|
||||
```
|
||||
1. DEBUGGING.md -> Common issues checklist
|
||||
2. TESTING.md -> Run tests in container
|
||||
3. Error logs (pm2/bugsink) -> Identify root cause
|
||||
4. CODE-PATTERNS.md -> Correct pattern reference
|
||||
5. Related ADR -> Architectural context
|
||||
```
|
||||
|
||||
### Deploying
|
||||
|
||||
```
|
||||
1. DEPLOYMENT.md -> Deployment procedures
|
||||
2. ENVIRONMENT.md -> Required variables
|
||||
3. MONITORING.md -> Health check verification
|
||||
4. LOGSTASH-QUICK-REF.md -> Log aggregation check
|
||||
```
|
||||
|
||||
### Database Changes
|
||||
|
||||
```
|
||||
1. DATABASE-GUIDE.md -> Schema sync requirements (CRITICAL)
|
||||
2. DATABASE.md -> Schema design patterns
|
||||
3. ADR-002 -> Transaction patterns
|
||||
4. ADR-034 -> Repository methods
|
||||
5. ADR-055 -> Normalization rules
|
||||
```
|
||||
|
||||
### Subagent Selection
|
||||
|
||||
| Task Type | Subagent | Guide |
|
||||
| --------------------- | ------------------------- | ------------------------------------------------------------ |
|
||||
| Write production code | `coder` | [CODER-GUIDE.md](subagents/CODER-GUIDE.md) |
|
||||
| Database changes | `db-dev` | [DATABASE-GUIDE.md](subagents/DATABASE-GUIDE.md) |
|
||||
| Create tests | `testwriter` | [TESTER-GUIDE.md](subagents/TESTER-GUIDE.md) |
|
||||
| Fix failing tests | `tester` | [TESTER-GUIDE.md](subagents/TESTER-GUIDE.md) |
|
||||
| Container/deployment | `devops` | [DEVOPS-GUIDE.md](subagents/DEVOPS-GUIDE.md) |
|
||||
| UI components | `frontend-specialist` | [FRONTEND-GUIDE.md](subagents/FRONTEND-GUIDE.md) |
|
||||
| External APIs | `integrations-specialist` | - |
|
||||
| Security review | `security-engineer` | [SECURITY-DEBUG-GUIDE.md](subagents/SECURITY-DEBUG-GUIDE.md) |
|
||||
| Production errors | `log-debug` | [SECURITY-DEBUG-GUIDE.md](subagents/SECURITY-DEBUG-GUIDE.md) |
|
||||
| AI/Gemini issues | `ai-usage` | [AI-USAGE-GUIDE.md](subagents/AI-USAGE-GUIDE.md) |
|
||||
|
||||
---
|
||||
|
||||
## Key File Quick Reference
|
||||
|
||||
### Configuration
|
||||
|
||||
| File | Purpose |
|
||||
| -------------------------- | ---------------------------- |
|
||||
| `server.ts` | Express app setup |
|
||||
| `src/config/env.ts` | Environment validation (Zod) |
|
||||
| `ecosystem.dev.config.cjs` | PM2 dev config |
|
||||
| `ecosystem.config.cjs` | PM2 prod config |
|
||||
| `vite.config.ts` | Vite build config |
|
||||
|
||||
### Core Implementation
|
||||
|
||||
| File | Purpose |
|
||||
| ----------------------------------- | ----------------------------------- |
|
||||
| `src/routes/*.routes.ts` | API route handlers |
|
||||
| `src/services/db/*.db.ts` | Repository layer |
|
||||
| `src/services/*.server.ts` | Server-only services |
|
||||
| `src/services/queues.server.ts` | BullMQ queue definitions |
|
||||
| `src/services/workers.server.ts` | BullMQ workers |
|
||||
| `src/utils/apiResponse.ts` | sendSuccess/sendError/sendPaginated |
|
||||
| `src/services/db/errors.db.ts` | handleDbError, NotFoundError |
|
||||
| `src/services/db/transaction.db.ts` | withTransaction |
|
||||
|
||||
### Database Schema
|
||||
|
||||
| File | Purpose |
|
||||
| ------------------------------ | ----------------------------------- |
|
||||
| `sql/master_schema_rollup.sql` | Test DB, complete reference |
|
||||
| `sql/initial_schema.sql` | Fresh install (identical to rollup) |
|
||||
| `sql/migrations/*.sql` | Production ALTER statements |
|
||||
|
||||
### Testing
|
||||
|
||||
| File | Purpose |
|
||||
| ---------------------------------- | ----------------------- |
|
||||
| `vitest.config.ts` | Unit test config |
|
||||
| `vitest.config.integration.ts` | Integration test config |
|
||||
| `vitest.config.e2e.ts` | E2E test config |
|
||||
| `src/tests/utils/mockFactories.ts` | Mock data factories |
|
||||
| `src/tests/utils/storeHelpers.ts` | Store test helpers |
|
||||
|
||||
---
|
||||
|
||||
## ADR Quick Reference
|
||||
|
||||
### By Implementation Status
|
||||
|
||||
**Implemented**: 001, 002, 003, 004, 006, 008, 009, 010, 016, 017, 020, 021, 028, 032, 033, 034, 035, 036, 037, 038, 040, 041, 043, 044, 045, 046, 050, 051, 052, 055, 057
|
||||
|
||||
**Partially Implemented**: 012, 014, 015, 048
|
||||
|
||||
**Proposed**: 011, 013, 022, 023, 024, 025, 029, 030, 031, 039, 047, 053, 054, 056
|
||||
|
||||
### By Category
|
||||
|
||||
| Category | ADRs |
|
||||
| --------------------- | ------------------------------------------- |
|
||||
| Core Infrastructure | 002, 007, 020, 030 |
|
||||
| Data Management | 009, 013, 019, 023, 031, 055 |
|
||||
| API & Integration | 003, 008, 018, 022, 028 |
|
||||
| Security | 001, 011, 016, 029, 032, 033, 048 |
|
||||
| Observability | 004, 015, 050, 051, 052, 056 |
|
||||
| Deployment & Ops | 006, 014, 017, 024, 037, 038, 053, 054 |
|
||||
| Frontend/UI | 005, 012, 025, 026, 044 |
|
||||
| Dev Workflow | 010, 021, 027, 040, 045, 047, 057 |
|
||||
| Architecture Patterns | 034, 035, 036, 039, 041, 042, 043, 046, 049 |
|
||||
|
||||
---
|
||||
|
||||
## Essential Commands
|
||||
|
||||
```bash
|
||||
# Run all tests (MUST use container)
|
||||
podman exec -it flyer-crawler-dev npm test
|
||||
|
||||
# Run unit tests
|
||||
podman exec -it flyer-crawler-dev npm run test:unit
|
||||
|
||||
# Run type check
|
||||
podman exec -it flyer-crawler-dev npm run type-check
|
||||
|
||||
# Run integration tests
|
||||
podman exec -it flyer-crawler-dev npm run test:integration
|
||||
|
||||
# PM2 status
|
||||
podman exec -it flyer-crawler-dev pm2 status
|
||||
|
||||
# PM2 logs
|
||||
podman exec -it flyer-crawler-dev pm2 logs
|
||||
|
||||
# Restart all processes
|
||||
podman exec -it flyer-crawler-dev pm2 restart all
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
_This index is optimized for AI agent consumption. Updated: 2026-01-28_
|
||||
@@ -316,6 +316,7 @@ app.use('/api/v1', (req, res, next) => {
|
||||
- [ADR-018](./0018-api-documentation-strategy.md) - API Documentation Strategy (versioned OpenAPI specs)
|
||||
- [ADR-028](./0028-api-response-standardization.md) - Response Standardization (envelope pattern applies to all versions)
|
||||
- [ADR-016](./0016-api-security-hardening.md) - Security Hardening (applies to all versions)
|
||||
- [ADR-057](./0057-test-remediation-post-api-versioning.md) - Test Remediation Post-API Versioning (documents test migration)
|
||||
|
||||
## Implementation Checklist
|
||||
|
||||
|
||||
@@ -363,6 +363,13 @@ The following files contain acknowledged code smell violations that are deferred
|
||||
- `src/tests/utils/mockFactories.ts` - Mock factories (1553 lines)
|
||||
- `src/tests/utils/testHelpers.ts` - Test utilities
|
||||
|
||||
## Related ADRs
|
||||
|
||||
- [ADR-014](./0014-containerization-and-deployment-strategy.md) - Containerization (tests must run in dev container)
|
||||
- [ADR-040](./0040-testing-economics-and-priorities.md) - Testing Economics and Priorities
|
||||
- [ADR-045](./0045-test-data-factories-and-fixtures.md) - Test Data Factories and Fixtures
|
||||
- [ADR-057](./0057-test-remediation-post-api-versioning.md) - Test Remediation Post-API Versioning
|
||||
|
||||
## Future Enhancements
|
||||
|
||||
1. **Browser E2E Tests**: Consider adding Playwright for actual browser testing
|
||||
|
||||
@@ -2,7 +2,9 @@
|
||||
|
||||
**Date**: 2025-12-12
|
||||
|
||||
**Status**: Proposed
|
||||
**Status**: Superseded by [ADR-023](./0023-database-schema-migration-strategy.md)
|
||||
|
||||
**Note**: This ADR was an early draft. ADR-023 provides a more detailed specification for the same topic.
|
||||
|
||||
## Context
|
||||
|
||||
|
||||
@@ -4,6 +4,8 @@
|
||||
|
||||
**Status**: Proposed
|
||||
|
||||
**Supersedes**: [ADR-013](./0013-database-schema-migration-strategy.md)
|
||||
|
||||
## Context
|
||||
|
||||
The `README.md` indicates that the database schema is managed by manually running a large `schema.sql.txt` file. This approach is highly error-prone, makes tracking changes difficult, and is not feasible for updating a live production database without downtime or data loss.
|
||||
|
||||
@@ -1,18 +1,333 @@
|
||||
# ADR-024: Feature Flagging Strategy
|
||||
|
||||
**Date**: 2025-12-12
|
||||
**Status**: Accepted
|
||||
**Implemented**: 2026-01-28
|
||||
**Implementation Plan**: [2026-01-28-adr-024-feature-flags-implementation.md](../plans/2026-01-28-adr-024-feature-flags-implementation.md)
|
||||
|
||||
**Status**: Proposed
|
||||
## Implementation Summary
|
||||
|
||||
Feature flag infrastructure fully implemented with 89 new tests (all passing). Total test suite: 3,616 tests passing.
|
||||
|
||||
**Backend**:
|
||||
|
||||
- Zod-validated schema in `src/config/env.ts` with 6 feature flags
|
||||
- Service module `src/services/featureFlags.server.ts` with `isFeatureEnabled()`, `getFeatureFlags()`, `getEnabledFeatureFlags()`
|
||||
- Admin endpoint `GET /api/v1/admin/feature-flags` (requires admin authentication)
|
||||
- Convenience exports for direct boolean access
|
||||
|
||||
**Frontend**:
|
||||
|
||||
- Config section in `src/config.ts` with `VITE_FEATURE_*` environment variables
|
||||
- Type declarations in `src/vite-env.d.ts`
|
||||
- React hook `useFeatureFlag()` and `useAllFeatureFlags()` in `src/hooks/useFeatureFlag.ts`
|
||||
- Declarative component `<FeatureFlag>` in `src/components/FeatureFlag.tsx`
|
||||
|
||||
**Current Flags**: `bugsinkSync`, `advancedRbac`, `newDashboard`, `betaRecipes`, `experimentalAi`, `debugMode`
|
||||
|
||||
---
|
||||
|
||||
## Context
|
||||
|
||||
As the application grows, there is no way to roll out new features to a subset of users (e.g., for beta testing) or to quickly disable a problematic feature in production without a full redeployment.
|
||||
Application lacks controlled feature rollout capability. No mechanism for beta testing, quick production disablement, or gradual rollouts without full redeployment. Need type-safe, configuration-based system integrating with ADR-007 Zod validation.
|
||||
|
||||
## Decision
|
||||
|
||||
We will implement a feature flagging system. This could start with a simple configuration-based approach (defined in `ADR-007`) and evolve to use a dedicated service like **Flagsmith** or **LaunchDarkly**. This ADR will define how feature flags are created, managed, and checked in both the backend and frontend code.
|
||||
Implement environment-variable-based feature flag system. Backend: Zod-validated schema in `src/config/env.ts` + dedicated service. Frontend: Vite env vars + React hook + declarative component. All flags default `false` (opt-in model). Future migration path to Flagsmith/LaunchDarkly preserved via abstraction layer.
|
||||
|
||||
## Consequences
|
||||
|
||||
**Positive**: Decouples feature releases from code deployments, reducing risk and allowing for more controlled, gradual rollouts and A/B testing. Enables easier experimentation and faster iteration.
|
||||
**Negative**: Adds complexity to the codebase with conditional logic around features. Requires careful management of feature flag states to avoid technical debt.
|
||||
- **Positive**: Decouples releases from deployments → reduced risk, gradual rollouts, A/B testing capability
|
||||
- **Negative**: Conditional logic complexity → requires sunset policy (3-month max after full rollout)
|
||||
- **Neutral**: Restart required for flag changes (acceptable for current scale, external service removes this constraint)
|
||||
|
||||
---
|
||||
|
||||
## Implementation Details
|
||||
|
||||
### Architecture Overview
|
||||
|
||||
```text
|
||||
Environment Variables (FEATURE_*, VITE_FEATURE_*)
|
||||
│
|
||||
├── Backend ──► src/config/env.ts (Zod) ──► src/services/featureFlags.server.ts
|
||||
│ │
|
||||
│ ┌──────────┴──────────┐
|
||||
│ │ │
|
||||
│ isFeatureEnabled() getAllFeatureFlags()
|
||||
│ │
|
||||
│ Routes/Services
|
||||
│
|
||||
└── Frontend ─► src/config.ts ──► src/hooks/useFeatureFlag.ts
|
||||
│
|
||||
┌──────────────┼──────────────┐
|
||||
│ │ │
|
||||
useFeatureFlag() useAllFeatureFlags() <FeatureFlag>
|
||||
│ Component
|
||||
Components
|
||||
```
|
||||
|
||||
### File Structure
|
||||
|
||||
| File | Purpose | Layer |
|
||||
| ------------------------------------- | ------------------------ | ---------------- |
|
||||
| `src/config/env.ts` | Zod schema + env loading | Backend config |
|
||||
| `src/services/featureFlags.server.ts` | Flag access service | Backend runtime |
|
||||
| `src/config.ts` | Vite env parsing | Frontend config |
|
||||
| `src/vite-env.d.ts` | TypeScript declarations | Frontend types |
|
||||
| `src/hooks/useFeatureFlag.ts` | React hook | Frontend runtime |
|
||||
| `src/components/FeatureFlag.tsx` | Declarative wrapper | Frontend UI |
|
||||
|
||||
### Naming Convention
|
||||
|
||||
| Context | Pattern | Example |
|
||||
| ------------------- | ------------------------- | ---------------------------------- |
|
||||
| Backend env var | `FEATURE_SNAKE_CASE` | `FEATURE_NEW_DASHBOARD` |
|
||||
| Frontend env var | `VITE_FEATURE_SNAKE_CASE` | `VITE_FEATURE_NEW_DASHBOARD` |
|
||||
| Config property | `camelCase` | `config.featureFlags.newDashboard` |
|
||||
| Hook/function param | `camelCase` literal | `isFeatureEnabled('newDashboard')` |
|
||||
|
||||
### Backend Implementation
|
||||
|
||||
#### Schema Definition (`src/config/env.ts`)
|
||||
|
||||
```typescript
|
||||
/**
|
||||
* Feature flags schema (ADR-024).
|
||||
* All flags default false (disabled) for safety.
|
||||
*/
|
||||
const featureFlagsSchema = z.object({
|
||||
newDashboard: booleanString(false), // FEATURE_NEW_DASHBOARD
|
||||
betaRecipes: booleanString(false), // FEATURE_BETA_RECIPES
|
||||
experimentalAi: booleanString(false), // FEATURE_EXPERIMENTAL_AI
|
||||
debugMode: booleanString(false), // FEATURE_DEBUG_MODE
|
||||
});
|
||||
|
||||
// In loadEnvVars():
|
||||
featureFlags: {
|
||||
newDashboard: process.env.FEATURE_NEW_DASHBOARD,
|
||||
betaRecipes: process.env.FEATURE_BETA_RECIPES,
|
||||
experimentalAi: process.env.FEATURE_EXPERIMENTAL_AI,
|
||||
debugMode: process.env.FEATURE_DEBUG_MODE,
|
||||
},
|
||||
```
|
||||
|
||||
#### Service Module (`src/services/featureFlags.server.ts`)
|
||||
|
||||
```typescript
|
||||
import { config, isDevelopment } from '../config/env';
|
||||
import { logger } from './logger.server';
|
||||
|
||||
export type FeatureFlagName = keyof typeof config.featureFlags;
|
||||
|
||||
/**
|
||||
* Check feature flag state. Logs in development mode.
|
||||
*/
|
||||
export function isFeatureEnabled(flagName: FeatureFlagName): boolean {
|
||||
const enabled = config.featureFlags[flagName];
|
||||
if (isDevelopment) {
|
||||
logger.debug({ flag: flagName, enabled }, 'Feature flag checked');
|
||||
}
|
||||
return enabled;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all flags (admin/debug endpoints).
|
||||
*/
|
||||
export function getAllFeatureFlags(): Record<FeatureFlagName, boolean> {
|
||||
return { ...config.featureFlags };
|
||||
}
|
||||
|
||||
// Convenience exports (evaluated once at startup)
|
||||
export const isNewDashboardEnabled = config.featureFlags.newDashboard;
|
||||
export const isBetaRecipesEnabled = config.featureFlags.betaRecipes;
|
||||
```
|
||||
|
||||
#### Usage in Routes
|
||||
|
||||
```typescript
|
||||
import { isFeatureEnabled } from '../services/featureFlags.server';
|
||||
|
||||
router.get('/dashboard', async (req, res) => {
|
||||
if (isFeatureEnabled('newDashboard')) {
|
||||
return sendSuccess(res, { version: 'v2', data: await getNewDashboardData() });
|
||||
}
|
||||
return sendSuccess(res, { version: 'v1', data: await getLegacyDashboardData() });
|
||||
});
|
||||
```
|
||||
|
||||
### Frontend Implementation
|
||||
|
||||
#### Config (`src/config.ts`)
|
||||
|
||||
```typescript
|
||||
const config = {
|
||||
// ... existing sections ...
|
||||
|
||||
featureFlags: {
|
||||
newDashboard: import.meta.env.VITE_FEATURE_NEW_DASHBOARD === 'true',
|
||||
betaRecipes: import.meta.env.VITE_FEATURE_BETA_RECIPES === 'true',
|
||||
experimentalAi: import.meta.env.VITE_FEATURE_EXPERIMENTAL_AI === 'true',
|
||||
debugMode: import.meta.env.VITE_FEATURE_DEBUG_MODE === 'true',
|
||||
},
|
||||
};
|
||||
```
|
||||
|
||||
#### Type Declarations (`src/vite-env.d.ts`)
|
||||
|
||||
```typescript
|
||||
interface ImportMetaEnv {
|
||||
readonly VITE_FEATURE_NEW_DASHBOARD?: string;
|
||||
readonly VITE_FEATURE_BETA_RECIPES?: string;
|
||||
readonly VITE_FEATURE_EXPERIMENTAL_AI?: string;
|
||||
readonly VITE_FEATURE_DEBUG_MODE?: string;
|
||||
}
|
||||
```
|
||||
|
||||
#### React Hook (`src/hooks/useFeatureFlag.ts`)
|
||||
|
||||
```typescript
|
||||
import { useMemo } from 'react';
|
||||
import config from '../config';
|
||||
|
||||
export type FeatureFlagName = keyof typeof config.featureFlags;
|
||||
|
||||
export function useFeatureFlag(flagName: FeatureFlagName): boolean {
|
||||
return useMemo(() => config.featureFlags[flagName], [flagName]);
|
||||
}
|
||||
|
||||
export function useAllFeatureFlags(): Record<FeatureFlagName, boolean> {
|
||||
return useMemo(() => ({ ...config.featureFlags }), []);
|
||||
}
|
||||
```
|
||||
|
||||
#### Declarative Component (`src/components/FeatureFlag.tsx`)
|
||||
|
||||
```typescript
|
||||
import { ReactNode } from 'react';
|
||||
import { useFeatureFlag, FeatureFlagName } from '../hooks/useFeatureFlag';
|
||||
|
||||
interface FeatureFlagProps {
|
||||
name: FeatureFlagName;
|
||||
children: ReactNode;
|
||||
fallback?: ReactNode;
|
||||
}
|
||||
|
||||
export function FeatureFlag({ name, children, fallback = null }: FeatureFlagProps) {
|
||||
const isEnabled = useFeatureFlag(name);
|
||||
return <>{isEnabled ? children : fallback}</>;
|
||||
}
|
||||
```
|
||||
|
||||
#### Usage in Components
|
||||
|
||||
```tsx
|
||||
// Declarative approach
|
||||
<FeatureFlag name="newDashboard" fallback={<LegacyDashboard />}>
|
||||
<NewDashboard />
|
||||
</FeatureFlag>;
|
||||
|
||||
// Hook approach (for logic beyond rendering)
|
||||
const isNewDashboard = useFeatureFlag('newDashboard');
|
||||
useEffect(() => {
|
||||
if (isNewDashboard) analytics.track('new_dashboard_viewed');
|
||||
}, [isNewDashboard]);
|
||||
```
|
||||
|
||||
### Testing Patterns
|
||||
|
||||
#### Backend Test Setup
|
||||
|
||||
```typescript
|
||||
// Reset modules to test different flag states
|
||||
beforeEach(() => {
|
||||
vi.resetModules();
|
||||
process.env.FEATURE_NEW_DASHBOARD = 'true';
|
||||
});
|
||||
|
||||
// src/services/featureFlags.server.test.ts
|
||||
describe('isFeatureEnabled', () => {
|
||||
it('returns false for disabled flags', () => {
|
||||
expect(isFeatureEnabled('newDashboard')).toBe(false);
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
#### Frontend Test Setup
|
||||
|
||||
```typescript
|
||||
// Mock config module
|
||||
vi.mock('../config', () => ({
|
||||
default: {
|
||||
featureFlags: {
|
||||
newDashboard: true,
|
||||
betaRecipes: false,
|
||||
},
|
||||
},
|
||||
}));
|
||||
|
||||
// Component test
|
||||
describe('FeatureFlag', () => {
|
||||
it('renders fallback when disabled', () => {
|
||||
render(
|
||||
<FeatureFlag name="betaRecipes" fallback={<div>Old</div>}>
|
||||
<div>New</div>
|
||||
</FeatureFlag>
|
||||
);
|
||||
expect(screen.getByText('Old')).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
### Flag Lifecycle
|
||||
|
||||
| Phase | Actions |
|
||||
| ---------- | -------------------------------------------------------------------------------------------- |
|
||||
| **Add** | 1. Add to both schemas (backend + frontend) 2. Default `false` 3. Document in `.env.example` |
|
||||
| **Enable** | Set env var `='true'` → restart application |
|
||||
| **Remove** | 1. Remove conditional code 2. Remove from schemas 3. Remove env vars |
|
||||
| **Sunset** | Max 3 months after full rollout → remove flag |
|
||||
|
||||
### Admin Endpoint (Optional)
|
||||
|
||||
```typescript
|
||||
// GET /api/v1/admin/feature-flags (admin-only)
|
||||
router.get('/feature-flags', requireAdmin, async (req, res) => {
|
||||
sendSuccess(res, { flags: getAllFeatureFlags() });
|
||||
});
|
||||
```
|
||||
|
||||
### Integration with ADR-007
|
||||
|
||||
Feature flags extend existing Zod configuration pattern:
|
||||
|
||||
- **Validation**: Same `booleanString()` transform used by other config
|
||||
- **Loading**: Same `loadEnvVars()` function loads `FEATURE_*` vars
|
||||
- **Type Safety**: `FeatureFlagName` type derived from config schema
|
||||
- **Fail-Fast**: Invalid flag values fail at startup (Zod validation)
|
||||
|
||||
### Future Migration Path
|
||||
|
||||
Current implementation abstracts flag access via `isFeatureEnabled()` function and `useFeatureFlag()` hook. External service migration requires:
|
||||
|
||||
1. Replace implementation internals of these functions
|
||||
2. Add API client for Flagsmith/LaunchDarkly
|
||||
3. No changes to consuming code (routes/components)
|
||||
|
||||
### Explicitly Out of Scope
|
||||
|
||||
- External service integration (Flagsmith/LaunchDarkly)
|
||||
- Database-stored flags
|
||||
- Real-time flag updates (WebSocket/SSE)
|
||||
- User-specific flags (A/B testing percentages)
|
||||
- Flag inheritance/hierarchy
|
||||
- Flag audit logging
|
||||
|
||||
### Key Files Reference
|
||||
|
||||
| Action | Files |
|
||||
| --------------------- | ------------------------------------------------------------------------------------------------- |
|
||||
| Add new flag | `src/config/env.ts`, `src/config.ts`, `src/vite-env.d.ts`, `.env.example` |
|
||||
| Check flag (backend) | Import from `src/services/featureFlags.server.ts` |
|
||||
| Check flag (frontend) | Import hook from `src/hooks/useFeatureFlag.ts` or component from `src/components/FeatureFlag.tsx` |
|
||||
| Test flag behavior | Mock via `vi.resetModules()` (backend) or `vi.mock('../config')` (frontend) |
|
||||
|
||||
@@ -195,6 +195,12 @@ Do NOT add tests:
|
||||
- Coverage percentages may not satisfy external audits
|
||||
- Requires judgment calls that may be inconsistent
|
||||
|
||||
## Related ADRs
|
||||
|
||||
- [ADR-010](./0010-testing-strategy-and-standards.md) - Testing Strategy and Standards (this ADR extends ADR-010)
|
||||
- [ADR-045](./0045-test-data-factories-and-fixtures.md) - Test Data Factories and Fixtures
|
||||
- [ADR-057](./0057-test-remediation-post-api-versioning.md) - Test Remediation Post-API Versioning
|
||||
|
||||
## Key Files
|
||||
|
||||
- `docs/adr/0010-testing-strategy-and-standards.md` - Testing mechanics
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
|
||||
**Status**: Accepted (Fully Implemented)
|
||||
|
||||
**Related**: [ADR-015](0015-application-performance-monitoring-and-error-tracking.md), [ADR-004](0004-standardized-application-wide-structured-logging.md)
|
||||
**Related**: [ADR-015](0015-error-tracking-and-observability.md), [ADR-004](0004-standardized-application-wide-structured-logging.md)
|
||||
|
||||
## Context
|
||||
|
||||
@@ -335,7 +335,7 @@ SELECT award_achievement('user-uuid', 'Nonexistent Badge');
|
||||
|
||||
## References
|
||||
|
||||
- [ADR-015: Application Performance Monitoring](0015-application-performance-monitoring-and-error-tracking.md)
|
||||
- [ADR-015: Error Tracking and Observability](0015-error-tracking-and-observability.md)
|
||||
- [ADR-004: Standardized Structured Logging](0004-standardized-application-wide-structured-logging.md)
|
||||
- [PostgreSQL RAISE Documentation](https://www.postgresql.org/docs/current/plpgsql-errors-and-messages.html)
|
||||
- [PostgreSQL Logging Configuration](https://www.postgresql.org/docs/current/runtime-config-logging.html)
|
||||
|
||||
@@ -332,6 +332,6 @@ Response:
|
||||
## References
|
||||
|
||||
- [ADR-006: Background Job Processing](./0006-background-job-processing-and-task-queues.md)
|
||||
- [ADR-015: Application Performance Monitoring](./0015-application-performance-monitoring-and-error-tracking.md)
|
||||
- [ADR-015: Error Tracking and Observability](./0015-error-tracking-and-observability.md)
|
||||
- [Bugsink API Documentation](https://bugsink.com/docs/api/)
|
||||
- [Gitea API Documentation](https://docs.gitea.io/en-us/api-usage/)
|
||||
|
||||
@@ -15,9 +15,10 @@ This document tracks the implementation status and estimated effort for all Arch
|
||||
|
||||
| Status | Count |
|
||||
| ---------------------------- | ----- |
|
||||
| Accepted (Fully Implemented) | 40 |
|
||||
| Accepted (Fully Implemented) | 42 |
|
||||
| Partially Implemented | 2 |
|
||||
| Proposed (Not Started) | 14 |
|
||||
| Proposed (Not Started) | 12 |
|
||||
| Superseded | 1 |
|
||||
|
||||
---
|
||||
|
||||
@@ -34,13 +35,13 @@ This document tracks the implementation status and estimated effort for all Arch
|
||||
|
||||
### Category 2: Data Management
|
||||
|
||||
| ADR | Title | Status | Effort | Notes |
|
||||
| --------------------------------------------------------------- | ------------------------ | -------- | ------ | ------------------------------ |
|
||||
| [ADR-009](./0009-caching-strategy-for-read-heavy-operations.md) | Caching Strategy | Accepted | - | Fully implemented |
|
||||
| [ADR-013](./0013-database-schema-migration-strategy.md) | Schema Migrations v1 | Proposed | M | Superseded by ADR-023 |
|
||||
| [ADR-019](./0019-data-backup-and-recovery-strategy.md) | Backup & Recovery | Accepted | - | Fully implemented |
|
||||
| [ADR-023](./0023-database-schema-migration-strategy.md) | Schema Migrations v2 | Proposed | L | Requires tooling setup |
|
||||
| [ADR-031](./0031-data-retention-and-privacy-compliance.md) | Data Retention & Privacy | Proposed | XL | Legal/compliance review needed |
|
||||
| ADR | Title | Status | Effort | Notes |
|
||||
| --------------------------------------------------------------- | ------------------------ | ---------- | ------ | ------------------------------ |
|
||||
| [ADR-009](./0009-caching-strategy-for-read-heavy-operations.md) | Caching Strategy | Accepted | - | Fully implemented |
|
||||
| [ADR-013](./0013-database-schema-migration-strategy.md) | Schema Migrations v1 | Superseded | - | Superseded by ADR-023 |
|
||||
| [ADR-019](./0019-data-backup-and-recovery-strategy.md) | Backup & Recovery | Accepted | - | Fully implemented |
|
||||
| [ADR-023](./0023-database-schema-migration-strategy.md) | Schema Migrations v2 | Proposed | L | Requires tooling setup |
|
||||
| [ADR-031](./0031-data-retention-and-privacy-compliance.md) | Data Retention & Privacy | Proposed | XL | Legal/compliance review needed |
|
||||
|
||||
### Category 3: API & Integration
|
||||
|
||||
@@ -77,16 +78,16 @@ This document tracks the implementation status and estimated effort for all Arch
|
||||
|
||||
### Category 6: Deployment & Operations
|
||||
|
||||
| ADR | Title | Status | Effort | Notes |
|
||||
| -------------------------------------------------------------- | ------------------ | -------- | ------ | -------------------------- |
|
||||
| [ADR-006](./0006-background-job-processing-and-task-queues.md) | Background Jobs | Accepted | - | Fully implemented |
|
||||
| [ADR-014](./0014-containerization-and-deployment-strategy.md) | Containerization | Partial | M | Docker done, K8s pending |
|
||||
| [ADR-017](./0017-ci-cd-and-branching-strategy.md) | CI/CD & Branching | Accepted | - | Fully implemented |
|
||||
| [ADR-024](./0024-feature-flagging-strategy.md) | Feature Flags | Proposed | M | New service/library needed |
|
||||
| [ADR-037](./0037-scheduled-jobs-and-cron-pattern.md) | Scheduled Jobs | Accepted | - | Fully implemented |
|
||||
| [ADR-038](./0038-graceful-shutdown-pattern.md) | Graceful Shutdown | Accepted | - | Fully implemented |
|
||||
| [ADR-053](./0053-worker-health-checks.md) | Worker Health | Accepted | - | Fully implemented |
|
||||
| [ADR-054](./0054-bugsink-gitea-issue-sync.md) | Bugsink-Gitea Sync | Proposed | L | Automated issue creation |
|
||||
| ADR | Title | Status | Effort | Notes |
|
||||
| -------------------------------------------------------------- | ------------------ | -------- | ------ | ------------------------ |
|
||||
| [ADR-006](./0006-background-job-processing-and-task-queues.md) | Background Jobs | Accepted | - | Fully implemented |
|
||||
| [ADR-014](./0014-containerization-and-deployment-strategy.md) | Containerization | Partial | M | Docker done, K8s pending |
|
||||
| [ADR-017](./0017-ci-cd-and-branching-strategy.md) | CI/CD & Branching | Accepted | - | Fully implemented |
|
||||
| [ADR-024](./0024-feature-flagging-strategy.md) | Feature Flags | Accepted | - | Fully implemented |
|
||||
| [ADR-037](./0037-scheduled-jobs-and-cron-pattern.md) | Scheduled Jobs | Accepted | - | Fully implemented |
|
||||
| [ADR-038](./0038-graceful-shutdown-pattern.md) | Graceful Shutdown | Accepted | - | Fully implemented |
|
||||
| [ADR-053](./0053-worker-health-checks.md) | Worker Health | Accepted | - | Fully implemented |
|
||||
| [ADR-054](./0054-bugsink-gitea-issue-sync.md) | Bugsink-Gitea Sync | Proposed | L | Automated issue creation |
|
||||
|
||||
### Category 7: Frontend / User Interface
|
||||
|
||||
@@ -108,6 +109,7 @@ This document tracks the implementation status and estimated effort for all Arch
|
||||
| [ADR-040](./0040-testing-economics-and-priorities.md) | Testing Economics | Accepted | - | Fully implemented |
|
||||
| [ADR-045](./0045-test-data-factories-and-fixtures.md) | Test Data Factories | Accepted | - | Fully implemented |
|
||||
| [ADR-047](./0047-project-file-and-folder-organization.md) | Project Organization | Proposed | XL | Major reorganization |
|
||||
| [ADR-057](./0057-test-remediation-post-api-versioning.md) | Test Remediation | Accepted | - | Fully implemented |
|
||||
|
||||
### Category 9: Architecture Patterns
|
||||
|
||||
@@ -132,15 +134,14 @@ These ADRs are proposed or partially implemented, ordered by suggested implement
|
||||
|
||||
| Priority | ADR | Title | Status | Effort | Rationale |
|
||||
| -------- | ------- | ------------------------ | -------- | ------ | ------------------------------------ |
|
||||
| 1 | ADR-024 | Feature Flags | Proposed | M | Safer deployments, A/B testing |
|
||||
| 2 | ADR-054 | Bugsink-Gitea Sync | Proposed | L | Automated issue tracking from errors |
|
||||
| 3 | ADR-023 | Schema Migrations v2 | Proposed | L | Database evolution support |
|
||||
| 4 | ADR-029 | Secret Rotation | Proposed | L | Security improvement |
|
||||
| 5 | ADR-030 | Circuit Breaker | Proposed | L | Resilience improvement |
|
||||
| 6 | ADR-056 | APM (Performance) | Proposed | M | Enable when performance issues arise |
|
||||
| 7 | ADR-011 | Authorization & RBAC | Proposed | XL | Advanced permission system |
|
||||
| 8 | ADR-025 | i18n & l10n | Proposed | XL | Multi-language support |
|
||||
| 9 | ADR-031 | Data Retention & Privacy | Proposed | XL | Compliance requirements |
|
||||
| 1 | ADR-054 | Bugsink-Gitea Sync | Proposed | L | Automated issue tracking from errors |
|
||||
| 2 | ADR-023 | Schema Migrations v2 | Proposed | L | Database evolution support |
|
||||
| 3 | ADR-029 | Secret Rotation | Proposed | L | Security improvement |
|
||||
| 4 | ADR-030 | Circuit Breaker | Proposed | L | Resilience improvement |
|
||||
| 5 | ADR-056 | APM (Performance) | Proposed | M | Enable when performance issues arise |
|
||||
| 6 | ADR-011 | Authorization & RBAC | Proposed | XL | Advanced permission system |
|
||||
| 7 | ADR-025 | i18n & l10n | Proposed | XL | Multi-language support |
|
||||
| 8 | ADR-031 | Data Retention & Privacy | Proposed | XL | Compliance requirements |
|
||||
|
||||
---
|
||||
|
||||
@@ -148,6 +149,9 @@ These ADRs are proposed or partially implemented, ordered by suggested implement
|
||||
|
||||
| Date | ADR | Change |
|
||||
| ---------- | ------- | ----------------------------------------------------------------------------------- |
|
||||
| 2026-01-28 | ADR-024 | Fully implemented - Backend/frontend feature flags, 89 tests, admin endpoint |
|
||||
| 2026-01-28 | ADR-057 | Created - Test remediation documentation for ADR-008 Phase 2 migration |
|
||||
| 2026-01-28 | ADR-013 | Marked as Superseded by ADR-023 |
|
||||
| 2026-01-27 | ADR-008 | Test path migration complete - 23 files, ~70 paths updated, 274->345 tests passing |
|
||||
| 2026-01-27 | ADR-008 | Phase 2 Complete - Version router factory, deprecation headers, 82 versioning tests |
|
||||
| 2026-01-26 | ADR-015 | Completed - Added Sentry user context in AuthProvider, now fully implemented |
|
||||
|
||||
@@ -2,6 +2,8 @@
|
||||
|
||||
This directory contains a log of the architectural decisions made for the Flyer Crawler project.
|
||||
|
||||
**[Implementation Tracker](./adr-implementation-tracker.md)**: Track implementation status and effort estimates for all ADRs.
|
||||
|
||||
## 1. Foundational / Core Infrastructure
|
||||
|
||||
**[ADR-002](./0002-standardized-transaction-management.md)**: Standardized Transaction Management and Unit of Work Pattern (Accepted)
|
||||
@@ -12,7 +14,7 @@ This directory contains a log of the architectural decisions made for the Flyer
|
||||
## 2. Data Management
|
||||
|
||||
**[ADR-009](./0009-caching-strategy-for-read-heavy-operations.md)**: Caching Strategy for Read-Heavy Operations (Accepted)
|
||||
**[ADR-013](./0013-database-schema-migration-strategy.md)**: Database Schema Migration Strategy (Proposed)
|
||||
**[ADR-013](./0013-database-schema-migration-strategy.md)**: Database Schema Migration Strategy (Superseded by ADR-023)
|
||||
**[ADR-019](./0019-data-backup-and-recovery-strategy.md)**: Data Backup and Recovery Strategy (Accepted)
|
||||
**[ADR-023](./0023-database-schema-migration-strategy.md)**: Database Schema Migration Strategy (Proposed)
|
||||
**[ADR-031](./0031-data-retention-and-privacy-compliance.md)**: Data Retention and Privacy Compliance (Proposed)
|
||||
@@ -20,9 +22,9 @@ This directory contains a log of the architectural decisions made for the Flyer
|
||||
## 3. API & Integration
|
||||
|
||||
**[ADR-003](./0003-standardized-input-validation-using-middleware.md)**: Standardized Input Validation using Middleware (Accepted)
|
||||
**[ADR-008](./0008-api-versioning-strategy.md)**: API Versioning Strategy (Accepted - Phase 1 Complete)
|
||||
**[ADR-008](./0008-api-versioning-strategy.md)**: API Versioning Strategy (Accepted - Phase 2 Complete)
|
||||
**[ADR-018](./0018-api-documentation-strategy.md)**: API Documentation Strategy (Accepted)
|
||||
**[ADR-022](./0022-real-time-notification-system.md)**: Real-time Notification System (Proposed)
|
||||
**[ADR-022](./0022-real-time-notification-system.md)**: Real-time Notification System (Accepted)
|
||||
**[ADR-028](./0028-api-response-standardization.md)**: API Response Standardization and Envelope Pattern (Implemented)
|
||||
|
||||
## 4. Security & Compliance
|
||||
@@ -33,12 +35,12 @@ This directory contains a log of the architectural decisions made for the Flyer
|
||||
**[ADR-029](./0029-secret-rotation-and-key-management.md)**: Secret Rotation and Key Management Strategy (Proposed)
|
||||
**[ADR-032](./0032-rate-limiting-strategy.md)**: Rate Limiting Strategy (Accepted)
|
||||
**[ADR-033](./0033-file-upload-and-storage-strategy.md)**: File Upload and Storage Strategy (Accepted)
|
||||
**[ADR-048](./0048-authentication-strategy.md)**: Authentication Strategy (Partially Implemented)
|
||||
**[ADR-048](./0048-authentication-strategy.md)**: Authentication Strategy (Accepted)
|
||||
|
||||
## 5. Observability & Monitoring
|
||||
|
||||
**[ADR-004](./0004-standardized-application-wide-structured-logging.md)**: Standardized Application-Wide Structured Logging (Accepted)
|
||||
**[ADR-015](./0015-error-tracking-and-observability.md)**: Error Tracking and Observability (Partial)
|
||||
**[ADR-015](./0015-error-tracking-and-observability.md)**: Error Tracking and Observability (Accepted)
|
||||
**[ADR-050](./0050-postgresql-function-observability.md)**: PostgreSQL Function Observability (Accepted)
|
||||
**[ADR-051](./0051-asynchronous-context-propagation.md)**: Asynchronous Context Propagation (Accepted)
|
||||
**[ADR-052](./0052-granular-debug-logging-strategy.md)**: Granular Debug Logging Strategy (Accepted)
|
||||
@@ -52,7 +54,7 @@ This directory contains a log of the architectural decisions made for the Flyer
|
||||
**[ADR-024](./0024-feature-flagging-strategy.md)**: Feature Flagging Strategy (Proposed)
|
||||
**[ADR-037](./0037-scheduled-jobs-and-cron-pattern.md)**: Scheduled Jobs and Cron Pattern (Accepted)
|
||||
**[ADR-038](./0038-graceful-shutdown-pattern.md)**: Graceful Shutdown Pattern (Accepted)
|
||||
**[ADR-053](./0053-worker-health-checks-and-monitoring.md)**: Worker Health Checks and Monitoring (Proposed)
|
||||
**[ADR-053](./0053-worker-health-checks.md)**: Worker Health Checks and Stalled Job Monitoring (Accepted)
|
||||
**[ADR-054](./0054-bugsink-gitea-issue-sync.md)**: Bugsink to Gitea Issue Synchronization (Proposed)
|
||||
|
||||
## 7. Frontend / User Interface
|
||||
|
||||
@@ -1,10 +1,168 @@
|
||||
# Database Setup
|
||||
# Database Architecture
|
||||
|
||||
Flyer Crawler uses PostgreSQL with several extensions for full-text search, geographic data, and UUID generation.
|
||||
**Version**: 0.12.20
|
||||
**Last Updated**: 2026-01-28
|
||||
|
||||
Flyer Crawler uses PostgreSQL 16 with PostGIS for geographic data, pg_trgm for fuzzy text search, and uuid-ossp for UUID generation. The database contains 65 tables organized into logical domains.
|
||||
|
||||
## Table of Contents
|
||||
|
||||
1. [Schema Overview](#schema-overview)
|
||||
2. [Database Setup](#database-setup)
|
||||
3. [Schema Reference](#schema-reference)
|
||||
4. [Related Documentation](#related-documentation)
|
||||
|
||||
---
|
||||
|
||||
## Required Extensions
|
||||
## Schema Overview
|
||||
|
||||
The database is organized into the following domains:
|
||||
|
||||
### Core Infrastructure (6 tables)
|
||||
|
||||
| Table | Purpose | Primary Key |
|
||||
| ----------------------- | ----------------------------------------- | ----------------- |
|
||||
| `users` | Authentication credentials and login data | `user_id` (UUID) |
|
||||
| `profiles` | Public user data, preferences, points | `user_id` (UUID) |
|
||||
| `addresses` | Normalized address storage with geocoding | `address_id` |
|
||||
| `activity_log` | User activity audit trail | `activity_log_id` |
|
||||
| `password_reset_tokens` | Temporary tokens for password reset | `token_id` |
|
||||
| `schema_info` | Schema deployment metadata | `environment` |
|
||||
|
||||
### Stores and Locations (4 tables)
|
||||
|
||||
| Table | Purpose | Primary Key |
|
||||
| ------------------------ | --------------------------------------- | ------------------- |
|
||||
| `stores` | Grocery store chains (Safeway, Kroger) | `store_id` |
|
||||
| `store_locations` | Physical store locations with addresses | `store_location_id` |
|
||||
| `favorite_stores` | User store favorites | `user_id, store_id` |
|
||||
| `store_receipt_patterns` | Receipt text patterns for store ID | `pattern_id` |
|
||||
|
||||
### Flyers and Items (7 tables)
|
||||
|
||||
| Table | Purpose | Primary Key |
|
||||
| ----------------------- | -------------------------------------- | ------------------------ |
|
||||
| `flyers` | Uploaded flyer metadata and status | `flyer_id` |
|
||||
| `flyer_items` | Individual deals extracted from flyers | `flyer_item_id` |
|
||||
| `flyer_locations` | Flyer-to-location associations | `flyer_location_id` |
|
||||
| `categories` | Item categorization (Produce, Dairy) | `category_id` |
|
||||
| `master_grocery_items` | Canonical grocery item dictionary | `master_grocery_item_id` |
|
||||
| `master_item_aliases` | Alternative names for master items | `alias_id` |
|
||||
| `unmatched_flyer_items` | Items pending master item matching | `unmatched_item_id` |
|
||||
|
||||
### Products and Brands (2 tables)
|
||||
|
||||
| Table | Purpose | Primary Key |
|
||||
| ---------- | ---------------------------------------------- | ------------ |
|
||||
| `brands` | Brand names (Coca-Cola, Kraft) | `brand_id` |
|
||||
| `products` | Specific products (master item + brand + size) | `product_id` |
|
||||
|
||||
### Price Tracking (3 tables)
|
||||
|
||||
| Table | Purpose | Primary Key |
|
||||
| ----------------------- | ---------------------------------- | ------------------ |
|
||||
| `item_price_history` | Historical prices for master items | `price_history_id` |
|
||||
| `user_submitted_prices` | User-contributed price reports | `submission_id` |
|
||||
| `suggested_corrections` | Suggested edits to flyer items | `correction_id` |
|
||||
|
||||
### User Features (8 tables)
|
||||
|
||||
| Table | Purpose | Primary Key |
|
||||
| -------------------- | ------------------------------------ | --------------------------- |
|
||||
| `user_watched_items` | Items user wants to track prices for | `user_watched_item_id` |
|
||||
| `user_alerts` | Price alert thresholds | `alert_id` |
|
||||
| `notifications` | User notifications | `notification_id` |
|
||||
| `user_item_aliases` | User-defined item name aliases | `alias_id` |
|
||||
| `user_follows` | User-to-user follow relationships | `follower_id, following_id` |
|
||||
| `user_reactions` | Reactions to content (likes, etc.) | `reaction_id` |
|
||||
| `budgets` | User-defined spending budgets | `budget_id` |
|
||||
| `search_queries` | Search history for analytics | `query_id` |
|
||||
|
||||
### Shopping Lists (4 tables)
|
||||
|
||||
| Table | Purpose | Primary Key |
|
||||
| ----------------------- | ------------------------ | ------------------------- |
|
||||
| `shopping_lists` | User shopping lists | `shopping_list_id` |
|
||||
| `shopping_list_items` | Items on shopping lists | `shopping_list_item_id` |
|
||||
| `shared_shopping_lists` | Shopping list sharing | `shared_shopping_list_id` |
|
||||
| `shopping_trips` | Completed shopping trips | `trip_id` |
|
||||
| `shopping_trip_items` | Items purchased on trips | `trip_item_id` |
|
||||
|
||||
### Recipes (11 tables)
|
||||
|
||||
| Table | Purpose | Primary Key |
|
||||
| --------------------------------- | -------------------------------- | ------------------------- |
|
||||
| `recipes` | User recipes with metadata | `recipe_id` |
|
||||
| `recipe_ingredients` | Recipe ingredient list | `recipe_ingredient_id` |
|
||||
| `recipe_ingredient_substitutions` | Ingredient alternatives | `substitution_id` |
|
||||
| `tags` | Recipe tags (vegan, quick, etc.) | `tag_id` |
|
||||
| `recipe_tags` | Recipe-to-tag associations | `recipe_id, tag_id` |
|
||||
| `appliances` | Kitchen appliances | `appliance_id` |
|
||||
| `recipe_appliances` | Appliances needed for recipes | `recipe_id, appliance_id` |
|
||||
| `recipe_ratings` | User ratings for recipes | `rating_id` |
|
||||
| `recipe_comments` | User comments on recipes | `comment_id` |
|
||||
| `favorite_recipes` | User recipe favorites | `user_id, recipe_id` |
|
||||
| `recipe_collections` | User recipe collections | `collection_id` |
|
||||
|
||||
### Meal Planning (3 tables)
|
||||
|
||||
| Table | Purpose | Primary Key |
|
||||
| ------------------- | -------------------------- | ----------------- |
|
||||
| `menu_plans` | Weekly/monthly meal plans | `menu_plan_id` |
|
||||
| `shared_menu_plans` | Menu plan sharing | `share_id` |
|
||||
| `planned_meals` | Individual meals in a plan | `planned_meal_id` |
|
||||
|
||||
### Pantry and Inventory (4 tables)
|
||||
|
||||
| Table | Purpose | Primary Key |
|
||||
| -------------------- | ------------------------------------ | ----------------- |
|
||||
| `pantry_items` | User pantry inventory | `pantry_item_id` |
|
||||
| `pantry_locations` | Storage locations (fridge, freezer) | `location_id` |
|
||||
| `expiry_date_ranges` | Reference shelf life data | `expiry_range_id` |
|
||||
| `expiry_alerts` | User expiry notification preferences | `expiry_alert_id` |
|
||||
| `expiry_alert_log` | Sent expiry notifications | `alert_log_id` |
|
||||
|
||||
### Receipts (4 tables)
|
||||
|
||||
| Table | Purpose | Primary Key |
|
||||
| ------------------------ | ----------------------------- | ----------------- |
|
||||
| `receipts` | Scanned receipt metadata | `receipt_id` |
|
||||
| `receipt_items` | Items parsed from receipts | `receipt_item_id` |
|
||||
| `receipt_processing_log` | OCR/AI processing audit trail | `log_id` |
|
||||
|
||||
### UPC Scanning (2 tables)
|
||||
|
||||
| Table | Purpose | Primary Key |
|
||||
| ---------------------- | ------------------------------- | ----------- |
|
||||
| `upc_scan_history` | User barcode scan history | `scan_id` |
|
||||
| `upc_external_lookups` | External UPC API response cache | `lookup_id` |
|
||||
|
||||
### Gamification (2 tables)
|
||||
|
||||
| Table | Purpose | Primary Key |
|
||||
| ------------------- | ---------------------------- | ------------------------- |
|
||||
| `achievements` | Defined achievements | `achievement_id` |
|
||||
| `user_achievements` | Achievements earned by users | `user_id, achievement_id` |
|
||||
|
||||
### User Preferences (3 tables)
|
||||
|
||||
| Table | Purpose | Primary Key |
|
||||
| --------------------------- | ---------------------------- | ------------------------- |
|
||||
| `dietary_restrictions` | Defined dietary restrictions | `restriction_id` |
|
||||
| `user_dietary_restrictions` | User dietary preferences | `user_id, restriction_id` |
|
||||
| `user_appliances` | Appliances user owns | `user_id, appliance_id` |
|
||||
|
||||
### Reference Data (1 table)
|
||||
|
||||
| Table | Purpose | Primary Key |
|
||||
| ------------------ | ----------------------- | --------------- |
|
||||
| `unit_conversions` | Unit conversion factors | `conversion_id` |
|
||||
|
||||
---
|
||||
|
||||
## Database Setup
|
||||
|
||||
### Required Extensions
|
||||
|
||||
| Extension | Purpose |
|
||||
| ----------- | ------------------------------------------- |
|
||||
@@ -14,7 +172,7 @@ Flyer Crawler uses PostgreSQL with several extensions for full-text search, geog
|
||||
|
||||
---
|
||||
|
||||
## Database Users
|
||||
### Database Users
|
||||
|
||||
This project uses **environment-specific database users** to isolate production and test environments:
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Flyer Crawler - System Architecture Overview
|
||||
|
||||
**Version**: 0.12.5
|
||||
**Last Updated**: 2026-01-22
|
||||
**Version**: 0.12.20
|
||||
**Last Updated**: 2026-01-28
|
||||
**Platform**: Linux (Production and Development)
|
||||
|
||||
---
|
||||
@@ -41,7 +41,7 @@
|
||||
|
||||
## System Architecture Diagram
|
||||
|
||||
```
|
||||
```text
|
||||
+-----------------------------------------------------------------------------------+
|
||||
| CLIENT LAYER |
|
||||
+-----------------------------------------------------------------------------------+
|
||||
@@ -153,10 +153,10 @@
|
||||
| Component | Technology | Version | Purpose |
|
||||
| ---------------------- | ---------- | -------- | -------------------------------- |
|
||||
| **Runtime** | Node.js | 22.x LTS | Server-side JavaScript runtime |
|
||||
| **Language** | TypeScript | 5.9.x | Type-safe JavaScript superset |
|
||||
| **Web Framework** | Express.js | 5.1.x | HTTP server and routing |
|
||||
| **Frontend Framework** | React | 19.2.x | UI component library |
|
||||
| **Build Tool** | Vite | 7.2.x | Frontend bundling and dev server |
|
||||
| **Language** | TypeScript | 5.9.3 | Type-safe JavaScript superset |
|
||||
| **Web Framework** | Express.js | 5.1.0 | HTTP server and routing |
|
||||
| **Frontend Framework** | React | 19.2.0 | UI component library |
|
||||
| **Build Tool** | Vite | 7.2.4 | Frontend bundling and dev server |
|
||||
|
||||
### Data Storage
|
||||
|
||||
@@ -176,23 +176,23 @@
|
||||
| **OAuth** | Google, GitHub | Social authentication |
|
||||
| **Email** | Nodemailer (SMTP) | Transactional emails |
|
||||
|
||||
### Background Processing
|
||||
### Background Processing Stack
|
||||
|
||||
| Component | Technology | Version | Purpose |
|
||||
| ------------------- | ---------- | ------- | --------------------------------- |
|
||||
| **Job Queues** | BullMQ | 5.65.x | Reliable async job processing |
|
||||
| **Job Queues** | BullMQ | 5.65.1 | Reliable async job processing |
|
||||
| **Process Manager** | PM2 | Latest | Process management and clustering |
|
||||
| **Scheduler** | node-cron | 4.2.x | Scheduled tasks |
|
||||
| **Scheduler** | node-cron | 4.2.1 | Scheduled tasks |
|
||||
|
||||
### Frontend Stack
|
||||
|
||||
| Component | Technology | Version | Purpose |
|
||||
| -------------------- | -------------- | ------- | ---------------------------------------- |
|
||||
| **State Management** | TanStack Query | 5.90.x | Server state caching and synchronization |
|
||||
| **Routing** | React Router | 7.9.x | Client-side routing |
|
||||
| **Styling** | Tailwind CSS | 4.1.x | Utility-first CSS framework |
|
||||
| **Icons** | Lucide React | 0.555.x | Icon components |
|
||||
| **Charts** | Recharts | 3.4.x | Data visualization |
|
||||
| **State Management** | TanStack Query | 5.90.12 | Server state caching and synchronization |
|
||||
| **Routing** | React Router | 7.9.6 | Client-side routing |
|
||||
| **Styling** | Tailwind CSS | 4.1.17 | Utility-first CSS framework |
|
||||
| **Icons** | Lucide React | 0.555.0 | Icon components |
|
||||
| **Charts** | Recharts | 3.4.1 | Data visualization |
|
||||
|
||||
### Observability and Quality
|
||||
|
||||
@@ -221,7 +221,7 @@ The frontend is a single-page application (SPA) built with React 19 and Vite.
|
||||
|
||||
**Directory Structure**:
|
||||
|
||||
```
|
||||
```text
|
||||
src/
|
||||
+-- components/ # Reusable UI components
|
||||
+-- contexts/ # React context providers
|
||||
@@ -244,17 +244,30 @@ The backend is a RESTful API server built with Express.js 5.
|
||||
- Structured logging with Pino
|
||||
- Standardized error handling (ADR-001)
|
||||
|
||||
**API Route Modules**:
|
||||
| Route | Purpose |
|
||||
|-------|---------|
|
||||
| `/api/auth` | Authentication (login, register, OAuth) |
|
||||
| `/api/users` | User profile management |
|
||||
| `/api/flyers` | Flyer CRUD and processing |
|
||||
| `/api/recipes` | Recipe management |
|
||||
| `/api/deals` | Best prices and deal discovery |
|
||||
| `/api/stores` | Store management |
|
||||
| `/api/admin` | Administrative functions |
|
||||
| `/api/health` | Health checks and monitoring |
|
||||
**API Route Modules** (all versioned under `/api/v1/*`):
|
||||
|
||||
| Route | Purpose |
|
||||
| ------------------------- | ----------------------------------------------- |
|
||||
| `/api/v1/auth` | Authentication (login, register, OAuth) |
|
||||
| `/api/v1/health` | Health checks and monitoring |
|
||||
| `/api/v1/system` | System administration (PM2 status, server info) |
|
||||
| `/api/v1/users` | User profile management |
|
||||
| `/api/v1/ai` | AI-powered features and flyer processing |
|
||||
| `/api/v1/admin` | Administrative functions |
|
||||
| `/api/v1/budgets` | Budget management and spending analysis |
|
||||
| `/api/v1/achievements` | Gamification and achievement system |
|
||||
| `/api/v1/flyers` | Flyer CRUD and processing |
|
||||
| `/api/v1/recipes` | Recipe management and recommendations |
|
||||
| `/api/v1/personalization` | Master items and user preferences |
|
||||
| `/api/v1/price-history` | Price tracking and trend analysis |
|
||||
| `/api/v1/stats` | Public statistics and analytics |
|
||||
| `/api/v1/upc` | UPC barcode scanning and product lookup |
|
||||
| `/api/v1/inventory` | Inventory and expiry tracking |
|
||||
| `/api/v1/receipts` | Receipt scanning and purchase history |
|
||||
| `/api/v1/deals` | Best prices and deal discovery |
|
||||
| `/api/v1/reactions` | Social features (reactions, sharing) |
|
||||
| `/api/v1/stores` | Store management and location services |
|
||||
| `/api/v1/categories` | Category browsing and product categorization |
|
||||
|
||||
### Database (PostgreSQL/PostGIS)
|
||||
|
||||
@@ -331,7 +344,7 @@ BullMQ workers handle asynchronous processing tasks. PM2 manages both the API se
|
||||
|
||||
### Flyer Processing Pipeline
|
||||
|
||||
```
|
||||
```text
|
||||
+-------------+ +----------------+ +------------------+ +---------------+
|
||||
| User | | Express | | BullMQ | | PostgreSQL |
|
||||
| Upload +---->+ Route +---->+ Queue +---->+ Storage |
|
||||
@@ -395,7 +408,7 @@ BullMQ workers handle asynchronous processing tasks. PM2 manages both the API se
|
||||
|
||||
The application follows a strict layered architecture as defined in ADR-035.
|
||||
|
||||
```
|
||||
```text
|
||||
+-----------------------------------------------------------------------+
|
||||
| ROUTES LAYER |
|
||||
| Responsibilities: |
|
||||
@@ -458,7 +471,7 @@ The application follows a strict layered architecture as defined in ADR-035.
|
||||
|
||||
### Entity Relationship Overview
|
||||
|
||||
```
|
||||
```text
|
||||
+------------------+ +------------------+ +------------------+
|
||||
| users | | profiles | | addresses |
|
||||
|------------------| |------------------| |------------------|
|
||||
@@ -537,7 +550,7 @@ The application follows a strict layered architecture as defined in ADR-035.
|
||||
|
||||
### JWT Token Architecture
|
||||
|
||||
```
|
||||
```text
|
||||
+-------------------+ +-------------------+ +-------------------+
|
||||
| Login Request | | Server | | Database |
|
||||
| (email/pass) +---->+ Validates +---->+ Verify User |
|
||||
@@ -576,7 +589,7 @@ The application follows a strict layered architecture as defined in ADR-035.
|
||||
|
||||
### Protected Route Flow
|
||||
|
||||
```
|
||||
```text
|
||||
+-------------------+ +-------------------+ +-------------------+
|
||||
| API Request | | requireAuth | | JWT Strategy |
|
||||
| + Bearer Token +---->+ Middleware +---->+ Validate |
|
||||
@@ -603,7 +616,7 @@ The application follows a strict layered architecture as defined in ADR-035.
|
||||
|
||||
### Worker Architecture
|
||||
|
||||
```
|
||||
```text
|
||||
+-------------------+ +-------------------+ +-------------------+
|
||||
| API Server | | Redis | | Worker Process |
|
||||
| (Queue Producer)| | (Job Storage) | | (Consumer) |
|
||||
@@ -635,7 +648,7 @@ The application follows a strict layered architecture as defined in ADR-035.
|
||||
|
||||
Jobs use exponential backoff for retries:
|
||||
|
||||
```
|
||||
```text
|
||||
Attempt 1: Immediate
|
||||
Attempt 2: Initial delay (e.g., 5 seconds)
|
||||
Attempt 3: 2x delay (e.g., 10 seconds)
|
||||
@@ -658,7 +671,7 @@ Attempt 4: 4x delay (e.g., 20 seconds)
|
||||
|
||||
### Environment Overview
|
||||
|
||||
```
|
||||
```text
|
||||
+-----------------------------------------------------------------------------------+
|
||||
| DEVELOPMENT |
|
||||
+-----------------------------------------------------------------------------------+
|
||||
@@ -710,7 +723,7 @@ Attempt 4: 4x delay (e.g., 20 seconds)
|
||||
|
||||
### Deployment Pipeline (ADR-017)
|
||||
|
||||
```
|
||||
```text
|
||||
+------------+ +------------+ +------------+ +------------+
|
||||
| Push to | | Gitea | | Build & | | Deploy |
|
||||
| main +---->+ Actions +---->+ Test +---->+ to Prod |
|
||||
@@ -839,22 +852,55 @@ The system architecture is governed by Architecture Decision Records (ADRs). Key
|
||||
| File | Purpose |
|
||||
| ----------------------------------------------- | --------------------------------------- |
|
||||
| `src/services/flyerProcessingService.server.ts` | Flyer processing pipeline orchestration |
|
||||
| `src/services/flyerAiProcessor.server.ts` | AI extraction for flyers |
|
||||
| `src/services/aiService.server.ts` | Google Gemini AI integration |
|
||||
| `src/services/cacheService.server.ts` | Redis caching abstraction |
|
||||
| `src/services/emailService.server.ts` | Email sending |
|
||||
| `src/services/queues.server.ts` | BullMQ queue definitions |
|
||||
| `src/services/queueService.server.ts` | Queue management and scheduling |
|
||||
| `src/services/workers.server.ts` | BullMQ worker definitions |
|
||||
| `src/services/websocketService.server.ts` | Real-time WebSocket notifications |
|
||||
| `src/services/receiptService.server.ts` | Receipt scanning and OCR |
|
||||
| `src/services/upcService.server.ts` | UPC barcode lookup |
|
||||
| `src/services/expiryService.server.ts` | Pantry expiry tracking |
|
||||
| `src/services/geocodingService.server.ts` | Address geocoding |
|
||||
| `src/services/analyticsService.server.ts` | Analytics and reporting |
|
||||
| `src/services/monitoringService.server.ts` | Health monitoring |
|
||||
| `src/services/barcodeService.server.ts` | Barcode detection |
|
||||
| `src/services/logger.server.ts` | Structured logging (Pino) |
|
||||
| `src/services/redis.server.ts` | Redis connection management |
|
||||
| `src/services/sentry.server.ts` | Error tracking (Sentry/Bugsink) |
|
||||
|
||||
### Database Files
|
||||
|
||||
| File | Purpose |
|
||||
| ---------------------------------- | -------------------------------------------- |
|
||||
| `src/services/db/connection.db.ts` | Database pool and transaction management |
|
||||
| `src/services/db/errors.db.ts` | Database error types |
|
||||
| `src/services/db/user.db.ts` | User repository |
|
||||
| `src/services/db/flyer.db.ts` | Flyer repository |
|
||||
| `sql/master_schema_rollup.sql` | Complete database schema (for test DB setup) |
|
||||
| `sql/initial_schema.sql` | Fresh installation schema |
|
||||
| File | Purpose |
|
||||
| --------------------------------------- | -------------------------------------------- |
|
||||
| `src/services/db/connection.db.ts` | Database pool and transaction management |
|
||||
| `src/services/db/errors.db.ts` | Database error types |
|
||||
| `src/services/db/index.db.ts` | Repository exports |
|
||||
| `src/services/db/user.db.ts` | User repository |
|
||||
| `src/services/db/flyer.db.ts` | Flyer repository |
|
||||
| `src/services/db/store.db.ts` | Store repository |
|
||||
| `src/services/db/storeLocation.db.ts` | Store location repository |
|
||||
| `src/services/db/recipe.db.ts` | Recipe repository |
|
||||
| `src/services/db/category.db.ts` | Category repository |
|
||||
| `src/services/db/personalization.db.ts` | Master items and personalization |
|
||||
| `src/services/db/shopping.db.ts` | Shopping lists repository |
|
||||
| `src/services/db/deals.db.ts` | Deals and best prices repository |
|
||||
| `src/services/db/price.db.ts` | Price history repository |
|
||||
| `src/services/db/receipt.db.ts` | Receipt repository |
|
||||
| `src/services/db/upc.db.ts` | UPC scan history repository |
|
||||
| `src/services/db/expiry.db.ts` | Expiry tracking repository |
|
||||
| `src/services/db/gamification.db.ts` | Achievements repository |
|
||||
| `src/services/db/budget.db.ts` | Budget repository |
|
||||
| `src/services/db/reaction.db.ts` | User reactions repository |
|
||||
| `src/services/db/notification.db.ts` | Notifications repository |
|
||||
| `src/services/db/address.db.ts` | Address repository |
|
||||
| `src/services/db/admin.db.ts` | Admin operations repository |
|
||||
| `src/services/db/conversion.db.ts` | Unit conversion repository |
|
||||
| `src/services/db/flyerLocation.db.ts` | Flyer locations repository |
|
||||
| `sql/master_schema_rollup.sql` | Complete database schema (for test DB setup) |
|
||||
| `sql/initial_schema.sql` | Fresh installation schema |
|
||||
|
||||
### Type Definitions
|
||||
|
||||
|
||||
@@ -2,6 +2,22 @@
|
||||
|
||||
Common code patterns extracted from Architecture Decision Records (ADRs). Use these as templates when writing new code.
|
||||
|
||||
## Quick Reference
|
||||
|
||||
| Pattern | Key Function/Class | Import From |
|
||||
| ------------------ | ------------------------------------------------- | ------------------------------------- |
|
||||
| Error Handling | `handleDbError()`, `NotFoundError` | `src/services/db/errors.db.ts` |
|
||||
| Repository Methods | `get*`, `find*`, `list*` | `src/services/db/*.db.ts` |
|
||||
| API Responses | `sendSuccess()`, `sendPaginated()`, `sendError()` | `src/utils/apiResponse.ts` |
|
||||
| Transactions | `withTransaction()` | `src/services/db/connection.db.ts` |
|
||||
| Validation | `validateRequest()` | `src/middleware/validation.ts` |
|
||||
| Authentication | `authenticateJWT` | `src/middleware/auth.ts` |
|
||||
| Caching | `cacheService` | `src/services/cache.server.ts` |
|
||||
| Background Jobs | Queue classes | `src/services/queues.server.ts` |
|
||||
| Feature Flags | `isFeatureEnabled()`, `useFeatureFlag()` | `src/services/featureFlags.server.ts` |
|
||||
|
||||
---
|
||||
|
||||
## Table of Contents
|
||||
|
||||
- [Error Handling](#error-handling)
|
||||
@@ -12,12 +28,13 @@ Common code patterns extracted from Architecture Decision Records (ADRs). Use th
|
||||
- [Authentication](#authentication)
|
||||
- [Caching](#caching)
|
||||
- [Background Jobs](#background-jobs)
|
||||
- [Feature Flags](#feature-flags)
|
||||
|
||||
---
|
||||
|
||||
## Error Handling
|
||||
|
||||
**ADR**: [ADR-001](../adr/0001-standardized-error-handling-for-database-operations.md)
|
||||
**ADR**: [ADR-001](../adr/0001-standardized-error-handling.md)
|
||||
|
||||
### Repository Layer Error Handling
|
||||
|
||||
@@ -78,7 +95,7 @@ throw new DatabaseError('Failed to insert flyer', originalError);
|
||||
|
||||
## Repository Patterns
|
||||
|
||||
**ADR**: [ADR-034](../adr/0034-repository-layer-method-naming-conventions.md)
|
||||
**ADR**: [ADR-034](../adr/0034-repository-pattern-standards.md)
|
||||
|
||||
### Method Naming Conventions
|
||||
|
||||
@@ -155,16 +172,17 @@ export async function listActiveFlyers(client?: PoolClient): Promise<Flyer[]> {
|
||||
|
||||
## API Response Patterns
|
||||
|
||||
**ADR**: [ADR-028](../adr/0028-consistent-api-response-format.md)
|
||||
**ADR**: [ADR-028](../adr/0028-api-response-standardization.md)
|
||||
|
||||
### Success Response
|
||||
|
||||
```typescript
|
||||
import { sendSuccess } from '../utils/apiResponse';
|
||||
|
||||
app.post('/api/flyers', async (req, res) => {
|
||||
app.post('/api/v1/flyers', async (req, res) => {
|
||||
const flyer = await flyerService.createFlyer(req.body);
|
||||
return sendSuccess(res, flyer, 'Flyer created successfully', 201);
|
||||
// sendSuccess(res, data, statusCode?, meta?)
|
||||
return sendSuccess(res, flyer, 201);
|
||||
});
|
||||
```
|
||||
|
||||
@@ -173,30 +191,32 @@ app.post('/api/flyers', async (req, res) => {
|
||||
```typescript
|
||||
import { sendPaginated } from '../utils/apiResponse';
|
||||
|
||||
app.get('/api/flyers', async (req, res) => {
|
||||
const { page = 1, pageSize = 20 } = req.query;
|
||||
const { items, total } = await flyerService.listFlyers(page, pageSize);
|
||||
app.get('/api/v1/flyers', async (req, res) => {
|
||||
const page = parseInt(req.query.page as string) || 1;
|
||||
const limit = parseInt(req.query.limit as string) || 20;
|
||||
const { items, total } = await flyerService.listFlyers(page, limit);
|
||||
|
||||
return sendPaginated(res, {
|
||||
items,
|
||||
total,
|
||||
page: parseInt(page),
|
||||
pageSize: parseInt(pageSize),
|
||||
});
|
||||
// sendPaginated(res, data[], { page, limit, total }, meta?)
|
||||
return sendPaginated(res, items, { page, limit, total });
|
||||
});
|
||||
```
|
||||
|
||||
### Error Response
|
||||
|
||||
```typescript
|
||||
import { sendError } from '../utils/apiResponse';
|
||||
import { sendError, sendSuccess, ErrorCode } from '../utils/apiResponse';
|
||||
|
||||
app.get('/api/flyers/:id', async (req, res) => {
|
||||
app.get('/api/v1/flyers/:id', async (req, res) => {
|
||||
try {
|
||||
const flyer = await flyerDb.getFlyerById(parseInt(req.params.id));
|
||||
return sendSuccess(res, flyer);
|
||||
} catch (error) {
|
||||
return sendError(res, error); // Automatically maps error to correct status
|
||||
// sendError(res, code, message, statusCode?, details?, meta?)
|
||||
if (error instanceof NotFoundError) {
|
||||
return sendError(res, ErrorCode.NOT_FOUND, error.message, 404);
|
||||
}
|
||||
req.log.error({ error }, `Error in ${req.originalUrl.split('?')[0]}:`);
|
||||
return sendError(res, ErrorCode.INTERNAL_ERROR, 'An error occurred', 500);
|
||||
}
|
||||
});
|
||||
```
|
||||
@@ -205,12 +225,12 @@ app.get('/api/flyers/:id', async (req, res) => {
|
||||
|
||||
## Transaction Management
|
||||
|
||||
**ADR**: [ADR-002](../adr/0002-transaction-management-pattern.md)
|
||||
**ADR**: [ADR-002](../adr/0002-standardized-transaction-management.md)
|
||||
|
||||
### Basic Transaction
|
||||
|
||||
```typescript
|
||||
import { withTransaction } from '../services/db/transaction.db';
|
||||
import { withTransaction } from '../services/db/connection.db';
|
||||
|
||||
export async function createFlyerWithItems(
|
||||
flyerData: FlyerInput,
|
||||
@@ -262,7 +282,7 @@ export async function bulkImportFlyers(flyersData: FlyerInput[]): Promise<Import
|
||||
|
||||
## Input Validation
|
||||
|
||||
**ADR**: [ADR-003](../adr/0003-input-validation-framework.md)
|
||||
**ADR**: [ADR-003](../adr/0003-standardized-input-validation-using-middleware.md)
|
||||
|
||||
### Zod Schema Definition
|
||||
|
||||
@@ -298,10 +318,10 @@ export type CreateFlyerInput = z.infer<typeof createFlyerSchema>;
|
||||
import { validateRequest } from '../middleware/validation';
|
||||
import { createFlyerSchema } from '../schemas/flyer.schemas';
|
||||
|
||||
app.post('/api/flyers', validateRequest(createFlyerSchema), async (req, res) => {
|
||||
app.post('/api/v1/flyers', validateRequest(createFlyerSchema), async (req, res) => {
|
||||
// req.body is now type-safe and validated
|
||||
const flyer = await flyerService.createFlyer(req.body);
|
||||
return sendSuccess(res, flyer, 'Flyer created successfully', 201);
|
||||
return sendSuccess(res, flyer, 201);
|
||||
});
|
||||
```
|
||||
|
||||
@@ -331,7 +351,7 @@ export async function processFlyer(data: unknown): Promise<Flyer> {
|
||||
import { authenticateJWT } from '../middleware/auth';
|
||||
|
||||
app.get(
|
||||
'/api/profile',
|
||||
'/api/v1/profile',
|
||||
authenticateJWT, // Middleware adds req.user
|
||||
async (req, res) => {
|
||||
// req.user is guaranteed to exist
|
||||
@@ -347,7 +367,7 @@ app.get(
|
||||
import { optionalAuth } from '../middleware/auth';
|
||||
|
||||
app.get(
|
||||
'/api/flyers',
|
||||
'/api/v1/flyers',
|
||||
optionalAuth, // req.user may or may not exist
|
||||
async (req, res) => {
|
||||
const flyers = req.user
|
||||
@@ -374,7 +394,7 @@ export function generateToken(user: User): string {
|
||||
|
||||
## Caching
|
||||
|
||||
**ADR**: [ADR-029](../adr/0029-redis-caching-strategy.md)
|
||||
**ADR**: [ADR-009](../adr/0009-caching-strategy-for-read-heavy-operations.md)
|
||||
|
||||
### Cache Pattern
|
||||
|
||||
@@ -414,7 +434,7 @@ export async function updateFlyer(id: number, data: UpdateFlyerInput): Promise<F
|
||||
|
||||
## Background Jobs
|
||||
|
||||
**ADR**: [ADR-036](../adr/0036-background-job-processing-architecture.md)
|
||||
**ADR**: [ADR-006](../adr/0006-background-job-processing-and-task-queues.md)
|
||||
|
||||
### Queue Job
|
||||
|
||||
@@ -473,6 +493,153 @@ const flyerWorker = new Worker(
|
||||
|
||||
---
|
||||
|
||||
## Feature Flags
|
||||
|
||||
**ADR**: [ADR-024](../adr/0024-feature-flagging-strategy.md)
|
||||
|
||||
Feature flags enable controlled feature rollout, A/B testing, and quick production disablement without redeployment. All flags default to `false` (opt-in model).
|
||||
|
||||
### Backend Usage
|
||||
|
||||
```typescript
|
||||
import { isFeatureEnabled, getFeatureFlags } from '../services/featureFlags.server';
|
||||
|
||||
// Check a specific flag in route handler
|
||||
router.get('/dashboard', async (req, res) => {
|
||||
if (isFeatureEnabled('newDashboard')) {
|
||||
return sendSuccess(res, { version: 'v2', data: await getNewDashboardData() });
|
||||
}
|
||||
return sendSuccess(res, { version: 'v1', data: await getLegacyDashboardData() });
|
||||
});
|
||||
|
||||
// Check flag in service layer
|
||||
function processFlyer(flyer: Flyer): ProcessedFlyer {
|
||||
if (isFeatureEnabled('experimentalAi')) {
|
||||
return processWithExperimentalAi(flyer);
|
||||
}
|
||||
return processWithStandardAi(flyer);
|
||||
}
|
||||
|
||||
// Get all flags (admin endpoint)
|
||||
router.get('/admin/feature-flags', requireAdmin, async (req, res) => {
|
||||
sendSuccess(res, { flags: getFeatureFlags() });
|
||||
});
|
||||
```
|
||||
|
||||
### Frontend Usage
|
||||
|
||||
```tsx
|
||||
import { useFeatureFlag, useAllFeatureFlags } from '../hooks/useFeatureFlag';
|
||||
import { FeatureFlag } from '../components/FeatureFlag';
|
||||
|
||||
// Hook approach - for logic beyond rendering
|
||||
function Dashboard() {
|
||||
const isNewDashboard = useFeatureFlag('newDashboard');
|
||||
|
||||
useEffect(() => {
|
||||
if (isNewDashboard) {
|
||||
analytics.track('new_dashboard_viewed');
|
||||
}
|
||||
}, [isNewDashboard]);
|
||||
|
||||
return isNewDashboard ? <NewDashboard /> : <LegacyDashboard />;
|
||||
}
|
||||
|
||||
// Declarative component approach
|
||||
function App() {
|
||||
return (
|
||||
<FeatureFlag feature="newDashboard" fallback={<LegacyDashboard />}>
|
||||
<NewDashboard />
|
||||
</FeatureFlag>
|
||||
);
|
||||
}
|
||||
|
||||
// Debug panel showing all flags
|
||||
function DebugPanel() {
|
||||
const flags = useAllFeatureFlags();
|
||||
return (
|
||||
<ul>
|
||||
{Object.entries(flags).map(([name, enabled]) => (
|
||||
<li key={name}>
|
||||
{name}: {enabled ? 'ON' : 'OFF'}
|
||||
</li>
|
||||
))}
|
||||
</ul>
|
||||
);
|
||||
}
|
||||
```
|
||||
|
||||
### Adding a New Flag
|
||||
|
||||
1. **Backend** (`src/config/env.ts`):
|
||||
|
||||
```typescript
|
||||
// In featureFlagsSchema
|
||||
myNewFeature: booleanString(false), // FEATURE_MY_NEW_FEATURE
|
||||
|
||||
// In loadEnvVars()
|
||||
myNewFeature: process.env.FEATURE_MY_NEW_FEATURE,
|
||||
```
|
||||
|
||||
2. **Frontend** (`src/config.ts` and `src/vite-env.d.ts`):
|
||||
|
||||
```typescript
|
||||
// In config.ts featureFlags section
|
||||
myNewFeature: import.meta.env.VITE_FEATURE_MY_NEW_FEATURE === 'true',
|
||||
|
||||
// In vite-env.d.ts
|
||||
readonly VITE_FEATURE_MY_NEW_FEATURE?: string;
|
||||
```
|
||||
|
||||
3. **Environment** (`.env.example`):
|
||||
|
||||
```bash
|
||||
# FEATURE_MY_NEW_FEATURE=false
|
||||
# VITE_FEATURE_MY_NEW_FEATURE=false
|
||||
```
|
||||
|
||||
### Testing Feature Flags
|
||||
|
||||
```typescript
|
||||
// Backend - reset modules to test different states
|
||||
beforeEach(() => {
|
||||
vi.resetModules();
|
||||
process.env.FEATURE_NEW_DASHBOARD = 'true';
|
||||
});
|
||||
|
||||
// Frontend - mock config module
|
||||
vi.mock('../config', () => ({
|
||||
default: {
|
||||
featureFlags: {
|
||||
newDashboard: true,
|
||||
betaRecipes: false,
|
||||
},
|
||||
},
|
||||
}));
|
||||
```
|
||||
|
||||
### Flag Lifecycle
|
||||
|
||||
| Phase | Actions |
|
||||
| ---------- | -------------------------------------------------------------- |
|
||||
| **Add** | Add to schemas (backend + frontend), default `false`, document |
|
||||
| **Enable** | Set env var `='true'`, restart application |
|
||||
| **Remove** | Remove conditional code, remove from schemas, remove env vars |
|
||||
| **Sunset** | Max 3 months after full rollout - remove flag |
|
||||
|
||||
### Current Flags
|
||||
|
||||
| Flag | Backend Env Var | Frontend Env Var | Purpose |
|
||||
| ---------------- | ------------------------- | ------------------------------ | ------------------------ |
|
||||
| `bugsinkSync` | `FEATURE_BUGSINK_SYNC` | `VITE_FEATURE_BUGSINK_SYNC` | Bugsink error sync |
|
||||
| `advancedRbac` | `FEATURE_ADVANCED_RBAC` | `VITE_FEATURE_ADVANCED_RBAC` | Advanced RBAC features |
|
||||
| `newDashboard` | `FEATURE_NEW_DASHBOARD` | `VITE_FEATURE_NEW_DASHBOARD` | New dashboard experience |
|
||||
| `betaRecipes` | `FEATURE_BETA_RECIPES` | `VITE_FEATURE_BETA_RECIPES` | Beta recipe features |
|
||||
| `experimentalAi` | `FEATURE_EXPERIMENTAL_AI` | `VITE_FEATURE_EXPERIMENTAL_AI` | Experimental AI features |
|
||||
| `debugMode` | `FEATURE_DEBUG_MODE` | `VITE_FEATURE_DEBUG_MODE` | Debug mode |
|
||||
|
||||
---
|
||||
|
||||
## Related Documentation
|
||||
|
||||
- [ADR Index](../adr/index.md) - All architecture decision records
|
||||
|
||||
@@ -229,7 +229,7 @@ SELECT * FROM flyers WHERE store_id = 1;
|
||||
- Add missing indexes
|
||||
- Optimize WHERE clauses
|
||||
- Use connection pooling
|
||||
- See [ADR-034](../adr/0034-repository-layer-method-naming-conventions.md)
|
||||
- See [ADR-034](../adr/0034-repository-pattern-standards.md)
|
||||
|
||||
---
|
||||
|
||||
@@ -237,7 +237,7 @@ SELECT * FROM flyers WHERE store_id = 1;
|
||||
|
||||
### Tests Pass on Windows, Fail in Container
|
||||
|
||||
**Cause**: Platform-specific behavior (ADR-014)
|
||||
**Cause**: Platform-specific behavior ([ADR-014](../adr/0014-containerization-and-deployment-strategy.md))
|
||||
|
||||
**Rule**: Container results are authoritative. Windows results are unreliable.
|
||||
|
||||
|
||||
@@ -93,7 +93,7 @@ When the container starts (`scripts/dev-entrypoint.sh`):
|
||||
|
||||
PM2 manages three processes in the dev container:
|
||||
|
||||
```
|
||||
```text
|
||||
+--------------------+ +------------------------+ +--------------------+
|
||||
| flyer-crawler- | | flyer-crawler- | | flyer-crawler- |
|
||||
| api-dev | | worker-dev | | vite-dev |
|
||||
@@ -404,5 +404,5 @@ podman exec -it flyer-crawler-dev pm2 restart flyer-crawler-api-dev
|
||||
- [DEBUGGING.md](DEBUGGING.md) - Debugging strategies
|
||||
- [LOGSTASH-QUICK-REF.md](../operations/LOGSTASH-QUICK-REF.md) - Logstash quick reference
|
||||
- [DEV-CONTAINER-BUGSINK.md](../DEV-CONTAINER-BUGSINK.md) - Bugsink setup in dev container
|
||||
- [ADR-014](../adr/0014-linux-only-platform.md) - Linux-only platform decision
|
||||
- [ADR-050](../adr/0050-postgresql-function-observability.md) - PostgreSQL function observability
|
||||
- [ADR-014](../adr/0014-containerization-and-deployment-strategy.md) - Containerization and deployment strategy
|
||||
- [ADR-050](../adr/0050-postgresql-function-observability.md) - PostgreSQL function observability (includes log aggregation)
|
||||
|
||||
@@ -1,5 +1,19 @@
|
||||
# Testing Guide
|
||||
|
||||
## Quick Reference
|
||||
|
||||
| Command | Purpose |
|
||||
| ------------------------------------------------------------ | ---------------------------- |
|
||||
| `podman exec -it flyer-crawler-dev npm test` | Run all tests |
|
||||
| `podman exec -it flyer-crawler-dev npm run test:unit` | Unit tests (~2900) |
|
||||
| `podman exec -it flyer-crawler-dev npm run test:integration` | Integration tests (28 files) |
|
||||
| `podman exec -it flyer-crawler-dev npm run test:e2e` | E2E tests (11 files) |
|
||||
| `podman exec -it flyer-crawler-dev npm run type-check` | TypeScript check |
|
||||
|
||||
**Critical**: Always run tests in the dev container. Windows results are unreliable.
|
||||
|
||||
---
|
||||
|
||||
## Overview
|
||||
|
||||
This project has comprehensive test coverage including unit tests, integration tests, and E2E tests. All tests must be run in the **Linux dev container environment** for reliable results.
|
||||
@@ -76,7 +90,7 @@ To verify type-check is working correctly:
|
||||
|
||||
Example error output:
|
||||
|
||||
```
|
||||
```text
|
||||
src/pages/MyDealsPage.tsx:68:31 - error TS2339: Property 'store_name' does not exist on type 'WatchedItemDeal'.
|
||||
|
||||
68 <span>{deal.store_name}</span>
|
||||
@@ -113,15 +127,26 @@ Located throughout `src/` directory alongside source files with `.test.ts` or `.
|
||||
npm run test:unit
|
||||
```
|
||||
|
||||
### Integration Tests (5 test files)
|
||||
### Integration Tests (28 test files)
|
||||
|
||||
Located in `src/tests/integration/`:
|
||||
Located in `src/tests/integration/`. Key test files include:
|
||||
|
||||
- `admin.integration.test.ts`
|
||||
- `flyer.integration.test.ts`
|
||||
- `price.integration.test.ts`
|
||||
- `public.routes.integration.test.ts`
|
||||
- `receipt.integration.test.ts`
|
||||
| Test File | Domain |
|
||||
| -------------------------------------- | -------------------------- |
|
||||
| `admin.integration.test.ts` | Admin dashboard operations |
|
||||
| `auth.integration.test.ts` | Authentication flows |
|
||||
| `budget.integration.test.ts` | Budget management |
|
||||
| `flyer.integration.test.ts` | Flyer CRUD operations |
|
||||
| `flyer-processing.integration.test.ts` | AI flyer processing |
|
||||
| `gamification.integration.test.ts` | Achievements and points |
|
||||
| `inventory.integration.test.ts` | Inventory management |
|
||||
| `notification.integration.test.ts` | User notifications |
|
||||
| `receipt.integration.test.ts` | Receipt processing |
|
||||
| `recipe.integration.test.ts` | Recipe management |
|
||||
| `shopping-list.integration.test.ts` | Shopping list operations |
|
||||
| `user.integration.test.ts` | User profile operations |
|
||||
|
||||
See `src/tests/integration/` for the complete list.
|
||||
|
||||
Requires PostgreSQL and Redis services running.
|
||||
|
||||
@@ -129,13 +154,23 @@ Requires PostgreSQL and Redis services running.
|
||||
npm run test:integration
|
||||
```
|
||||
|
||||
### E2E Tests (3 test files)
|
||||
### E2E Tests (11 test files)
|
||||
|
||||
Located in `src/tests/e2e/`:
|
||||
Located in `src/tests/e2e/`. Full user journey tests:
|
||||
|
||||
- `deals-journey.e2e.test.ts`
|
||||
- `budget-journey.e2e.test.ts`
|
||||
- `receipt-journey.e2e.test.ts`
|
||||
| Test File | Journey |
|
||||
| --------------------------------- | ----------------------------- |
|
||||
| `admin-authorization.e2e.test.ts` | Admin access control |
|
||||
| `admin-dashboard.e2e.test.ts` | Admin dashboard flows |
|
||||
| `auth.e2e.test.ts` | Login/logout/registration |
|
||||
| `budget-journey.e2e.test.ts` | Budget tracking workflow |
|
||||
| `deals-journey.e2e.test.ts` | Finding and saving deals |
|
||||
| `error-reporting.e2e.test.ts` | Error handling verification |
|
||||
| `flyer-upload.e2e.test.ts` | Flyer upload and processing |
|
||||
| `inventory-journey.e2e.test.ts` | Pantry management |
|
||||
| `receipt-journey.e2e.test.ts` | Receipt scanning and tracking |
|
||||
| `upc-journey.e2e.test.ts` | UPC barcode scanning |
|
||||
| `user-journey.e2e.test.ts` | User profile management |
|
||||
|
||||
Requires all services (PostgreSQL, Redis, BullMQ workers) running.
|
||||
|
||||
@@ -157,20 +192,18 @@ Located in `src/tests/utils/storeHelpers.ts`:
|
||||
|
||||
```typescript
|
||||
// Create a store with a location in one call
|
||||
const store = await createStoreWithLocation({
|
||||
storeName: 'Test Store',
|
||||
address: {
|
||||
address_line_1: '123 Main St',
|
||||
city: 'Toronto',
|
||||
province_state: 'ON',
|
||||
postal_code: 'M1M 1M1',
|
||||
},
|
||||
pool,
|
||||
log,
|
||||
const store = await createStoreWithLocation(pool, {
|
||||
name: 'Test Store',
|
||||
address: '123 Main St',
|
||||
city: 'Toronto',
|
||||
province: 'ON',
|
||||
postalCode: 'M1M 1M1',
|
||||
});
|
||||
|
||||
// Returns: { storeId, addressId, storeLocationId }
|
||||
|
||||
// Cleanup stores and their locations
|
||||
await cleanupStoreLocations([storeId1, storeId2], pool, log);
|
||||
await cleanupStoreLocation(pool, store);
|
||||
```
|
||||
|
||||
### Mock Factories
|
||||
|
||||
@@ -2,134 +2,259 @@
|
||||
|
||||
Complete guide to environment variables used in Flyer Crawler.
|
||||
|
||||
---
|
||||
|
||||
## Quick Reference
|
||||
|
||||
### Minimum Required Variables (Development)
|
||||
|
||||
| Variable | Example | Purpose |
|
||||
| ---------------- | ------------------------ | -------------------- |
|
||||
| `DB_HOST` | `localhost` | PostgreSQL host |
|
||||
| `DB_USER` | `postgres` | PostgreSQL username |
|
||||
| `DB_PASSWORD` | `postgres` | PostgreSQL password |
|
||||
| `DB_NAME` | `flyer_crawler_dev` | Database name |
|
||||
| `REDIS_URL` | `redis://localhost:6379` | Redis connection URL |
|
||||
| `JWT_SECRET` | (32+ character string) | JWT signing key |
|
||||
| `GEMINI_API_KEY` | `AIzaSy...` | Google Gemini API |
|
||||
|
||||
### Source of Truth
|
||||
|
||||
The Zod schema at `src/config/env.ts` is the authoritative source for all environment variables. If a variable is not in this file, it is not used by the application.
|
||||
|
||||
---
|
||||
|
||||
## Configuration by Environment
|
||||
|
||||
### Production
|
||||
|
||||
**Location**: Gitea CI/CD secrets injected during deployment
|
||||
**Path**: `/var/www/flyer-crawler.projectium.com/`
|
||||
**Note**: No `.env` file exists - all variables come from CI/CD
|
||||
| Aspect | Details |
|
||||
| -------- | ------------------------------------------ |
|
||||
| Location | Gitea CI/CD secrets injected at deployment |
|
||||
| Path | `/var/www/flyer-crawler.projectium.com/` |
|
||||
| File | No `.env` file - all from CI/CD secrets |
|
||||
|
||||
### Test
|
||||
|
||||
**Location**: Gitea CI/CD secrets + `.env.test` file
|
||||
**Path**: `/var/www/flyer-crawler-test.projectium.com/`
|
||||
**Note**: `.env.test` overrides for test-specific values
|
||||
| Aspect | Details |
|
||||
| -------- | --------------------------------------------- |
|
||||
| Location | Gitea CI/CD secrets + `.env.test` overrides |
|
||||
| Path | `/var/www/flyer-crawler-test.projectium.com/` |
|
||||
| File | `.env.test` for test-specific values |
|
||||
|
||||
### Development Container
|
||||
|
||||
**Location**: `.env.local` file in project root
|
||||
**Note**: Overrides default DSNs in `compose.dev.yml`
|
||||
| Aspect | Details |
|
||||
| -------- | --------------------------------------- |
|
||||
| Location | `.env.local` file in project root |
|
||||
| Priority | Overrides defaults in `compose.dev.yml` |
|
||||
| File | `.env.local` (gitignored) |
|
||||
|
||||
## Required Variables
|
||||
---
|
||||
|
||||
### Database
|
||||
## Complete Variable Reference
|
||||
|
||||
| Variable | Description | Example |
|
||||
| ------------------ | ---------------------------- | ------------------------------------------ |
|
||||
| `DB_HOST` | PostgreSQL host | `localhost` (dev), `projectium.com` (prod) |
|
||||
| `DB_PORT` | PostgreSQL port | `5432` |
|
||||
| `DB_USER_PROD` | Production database user | `flyer_crawler_prod` |
|
||||
| `DB_PASSWORD_PROD` | Production database password | (secret) |
|
||||
| `DB_DATABASE_PROD` | Production database name | `flyer-crawler-prod` |
|
||||
| `DB_USER_TEST` | Test database user | `flyer_crawler_test` |
|
||||
| `DB_PASSWORD_TEST` | Test database password | (secret) |
|
||||
| `DB_DATABASE_TEST` | Test database name | `flyer-crawler-test` |
|
||||
| `DB_USER` | Dev database user | `postgres` |
|
||||
| `DB_PASSWORD` | Dev database password | `postgres` |
|
||||
| `DB_NAME` | Dev database name | `flyer_crawler_dev` |
|
||||
### Database Configuration
|
||||
|
||||
**Note**: Production and test use separate `_PROD` and `_TEST` suffixed variables. Development uses unsuffixed variables.
|
||||
| Variable | Required | Default | Description |
|
||||
| ------------- | -------- | ------- | ----------------- |
|
||||
| `DB_HOST` | Yes | - | PostgreSQL host |
|
||||
| `DB_PORT` | No | `5432` | PostgreSQL port |
|
||||
| `DB_USER` | Yes | - | Database username |
|
||||
| `DB_PASSWORD` | Yes | - | Database password |
|
||||
| `DB_NAME` | Yes | - | Database name |
|
||||
|
||||
### Redis
|
||||
**Environment-Specific Variables** (Gitea Secrets):
|
||||
|
||||
| Variable | Description | Example |
|
||||
| --------------------- | ------------------------- | ------------------------------ |
|
||||
| `REDIS_URL` | Redis connection URL | `redis://localhost:6379` (dev) |
|
||||
| `REDIS_PASSWORD_PROD` | Production Redis password | (secret) |
|
||||
| `REDIS_PASSWORD_TEST` | Test Redis password | (secret) |
|
||||
| Variable | Environment | Description |
|
||||
| ------------------ | ----------- | ------------------------ |
|
||||
| `DB_USER_PROD` | Production | Production database user |
|
||||
| `DB_PASSWORD_PROD` | Production | Production database pass |
|
||||
| `DB_DATABASE_PROD` | Production | Production database name |
|
||||
| `DB_USER_TEST` | Test | Test database user |
|
||||
| `DB_PASSWORD_TEST` | Test | Test database password |
|
||||
| `DB_DATABASE_TEST` | Test | Test database name |
|
||||
|
||||
### Redis Configuration
|
||||
|
||||
| Variable | Required | Default | Description |
|
||||
| ---------------- | -------- | ------- | ------------------------- |
|
||||
| `REDIS_URL` | Yes | - | Redis connection URL |
|
||||
| `REDIS_PASSWORD` | No | - | Redis password (optional) |
|
||||
|
||||
**URL Format**: `redis://[user:password@]host:port`
|
||||
|
||||
**Examples**:
|
||||
|
||||
```bash
|
||||
# Development (no auth)
|
||||
REDIS_URL=redis://localhost:6379
|
||||
|
||||
# Production (with auth)
|
||||
REDIS_URL=redis://:${REDIS_PASSWORD_PROD}@localhost:6379
|
||||
```
|
||||
|
||||
### Authentication
|
||||
|
||||
| Variable | Description | Example |
|
||||
| ---------------------- | -------------------------- | -------------------------------- |
|
||||
| `JWT_SECRET` | JWT token signing key | (minimum 32 characters) |
|
||||
| `SESSION_SECRET` | Session encryption key | (minimum 32 characters) |
|
||||
| `GOOGLE_CLIENT_ID` | Google OAuth client ID | `xxx.apps.googleusercontent.com` |
|
||||
| `GOOGLE_CLIENT_SECRET` | Google OAuth client secret | (secret) |
|
||||
| `GH_CLIENT_ID` | GitHub OAuth client ID | `xxx` |
|
||||
| `GH_CLIENT_SECRET` | GitHub OAuth client secret | (secret) |
|
||||
| Variable | Required | Min Length | Description |
|
||||
| ---------------------- | -------- | ---------- | ----------------------- |
|
||||
| `JWT_SECRET` | Yes | 32 chars | JWT token signing key |
|
||||
| `JWT_SECRET_PREVIOUS` | No | - | Previous key (rotation) |
|
||||
| `GOOGLE_CLIENT_ID` | No | - | Google OAuth client ID |
|
||||
| `GOOGLE_CLIENT_SECRET` | No | - | Google OAuth secret |
|
||||
| `GITHUB_CLIENT_ID` | No | - | GitHub OAuth client ID |
|
||||
| `GITHUB_CLIENT_SECRET` | No | - | GitHub OAuth secret |
|
||||
|
||||
**Generate Secure Secret**:
|
||||
|
||||
```bash
|
||||
node -e "console.log(require('crypto').randomBytes(32).toString('hex'))"
|
||||
```
|
||||
|
||||
### AI Services
|
||||
|
||||
| Variable | Description | Example |
|
||||
| -------------------------------- | ---------------------------- | ----------- |
|
||||
| `VITE_GOOGLE_GENAI_API_KEY` | Google Gemini API key (prod) | `AIzaSy...` |
|
||||
| `VITE_GOOGLE_GENAI_API_KEY_TEST` | Google Gemini API key (test) | `AIzaSy...` |
|
||||
| `GOOGLE_MAPS_API_KEY` | Google Maps Geocoding API | `AIzaSy...` |
|
||||
| Variable | Required | Description |
|
||||
| ---------------------------- | -------- | -------------------------------- |
|
||||
| `GEMINI_API_KEY` | Yes\* | Google Gemini API key |
|
||||
| `GEMINI_RPM` | No | Rate limit (default: 5) |
|
||||
| `AI_PRICE_QUALITY_THRESHOLD` | No | Quality threshold (default: 0.5) |
|
||||
|
||||
### Application
|
||||
\*Required for flyer processing. Application works without it but cannot extract flyer data.
|
||||
|
||||
| Variable | Description | Example |
|
||||
| -------------- | ------------------------ | ----------------------------------- |
|
||||
| `NODE_ENV` | Environment mode | `development`, `test`, `production` |
|
||||
| `PORT` | Backend server port | `3001` |
|
||||
| `FRONTEND_URL` | Frontend application URL | `http://localhost:5173` (dev) |
|
||||
**Get API Key**: [Google AI Studio](https://aistudio.google.com/app/apikey)
|
||||
|
||||
### Error Tracking
|
||||
### Google Services
|
||||
|
||||
| Variable | Description | Example |
|
||||
| ---------------------- | -------------------------------- | --------------------------- |
|
||||
| `SENTRY_DSN` | Sentry DSN (production) | `https://xxx@sentry.io/xxx` |
|
||||
| `VITE_SENTRY_DSN` | Frontend Sentry DSN (production) | `https://xxx@sentry.io/xxx` |
|
||||
| `SENTRY_DSN_TEST` | Sentry DSN (test) | `https://xxx@sentry.io/xxx` |
|
||||
| `VITE_SENTRY_DSN_TEST` | Frontend Sentry DSN (test) | `https://xxx@sentry.io/xxx` |
|
||||
| `SENTRY_AUTH_TOKEN` | Sentry API token for releases | (secret) |
|
||||
| Variable | Required | Description |
|
||||
| ---------------------- | -------- | -------------------------------- |
|
||||
| `GOOGLE_MAPS_API_KEY` | No | Google Maps Geocoding API |
|
||||
| `GOOGLE_CLIENT_ID` | No | OAuth (see Authentication above) |
|
||||
| `GOOGLE_CLIENT_SECRET` | No | OAuth (see Authentication above) |
|
||||
|
||||
## Optional Variables
|
||||
### UPC Lookup APIs
|
||||
|
||||
| Variable | Description | Default |
|
||||
| ------------------- | ----------------------- | ----------------- |
|
||||
| `LOG_LEVEL` | Logging verbosity | `info` |
|
||||
| `REDIS_TTL` | Cache TTL in seconds | `3600` |
|
||||
| `MAX_UPLOAD_SIZE` | Max file upload size | `10mb` |
|
||||
| `RATE_LIMIT_WINDOW` | Rate limit window (ms) | `900000` (15 min) |
|
||||
| `RATE_LIMIT_MAX` | Max requests per window | `100` |
|
||||
| Variable | Required | Description |
|
||||
| ------------------------ | -------- | ---------------------- |
|
||||
| `UPC_ITEM_DB_API_KEY` | No | UPC Item DB API key |
|
||||
| `BARCODE_LOOKUP_API_KEY` | No | Barcode Lookup API key |
|
||||
|
||||
### Application Settings
|
||||
|
||||
| Variable | Required | Default | Description |
|
||||
| -------------- | -------- | ------------- | ------------------------ |
|
||||
| `NODE_ENV` | No | `development` | Environment mode |
|
||||
| `PORT` | No | `3001` | Backend server port |
|
||||
| `FRONTEND_URL` | No | - | Frontend URL (CORS) |
|
||||
| `BASE_URL` | No | - | API base URL |
|
||||
| `STORAGE_PATH` | No | (see below) | Flyer image storage path |
|
||||
|
||||
**NODE_ENV Values**: `development`, `test`, `staging`, `production`
|
||||
|
||||
**Default STORAGE_PATH**: `/var/www/flyer-crawler.projectium.com/flyer-images`
|
||||
|
||||
### Email/SMTP Configuration
|
||||
|
||||
| Variable | Required | Default | Description |
|
||||
| ----------------- | -------- | ------- | ----------------------- |
|
||||
| `SMTP_HOST` | No | - | SMTP server hostname |
|
||||
| `SMTP_PORT` | No | `587` | SMTP server port |
|
||||
| `SMTP_USER` | No | - | SMTP username |
|
||||
| `SMTP_PASS` | No | - | SMTP password |
|
||||
| `SMTP_SECURE` | No | `false` | Use TLS |
|
||||
| `SMTP_FROM_EMAIL` | No | - | From address for emails |
|
||||
|
||||
**Note**: Email functionality degrades gracefully if not configured.
|
||||
|
||||
### Worker Configuration
|
||||
|
||||
| Variable | Default | Description |
|
||||
| ------------------------------------- | ------- | ---------------------------- |
|
||||
| `WORKER_CONCURRENCY` | `1` | Main worker concurrency |
|
||||
| `WORKER_LOCK_DURATION` | `30000` | Lock duration (ms) |
|
||||
| `EMAIL_WORKER_CONCURRENCY` | `10` | Email worker concurrency |
|
||||
| `ANALYTICS_WORKER_CONCURRENCY` | `1` | Analytics worker concurrency |
|
||||
| `CLEANUP_WORKER_CONCURRENCY` | `10` | Cleanup worker concurrency |
|
||||
| `WEEKLY_ANALYTICS_WORKER_CONCURRENCY` | `1` | Weekly analytics concurrency |
|
||||
|
||||
### Error Tracking (Bugsink/Sentry)
|
||||
|
||||
| Variable | Required | Default | Description |
|
||||
| --------------------- | -------- | -------- | ------------------------------- |
|
||||
| `SENTRY_DSN` | No | - | Backend Sentry DSN |
|
||||
| `SENTRY_ENABLED` | No | `true` | Enable error tracking |
|
||||
| `SENTRY_ENVIRONMENT` | No | NODE_ENV | Environment name for errors |
|
||||
| `SENTRY_DEBUG` | No | `false` | Enable Sentry SDK debug logging |
|
||||
| `VITE_SENTRY_DSN` | No | - | Frontend Sentry DSN |
|
||||
| `VITE_SENTRY_ENABLED` | No | `true` | Enable frontend error tracking |
|
||||
| `VITE_SENTRY_DEBUG` | No | `false` | Frontend SDK debug logging |
|
||||
|
||||
**DSN Format**: `http://[key]@[host]:[port]/[project_id]`
|
||||
|
||||
**Dev Container DSNs**:
|
||||
|
||||
```bash
|
||||
# Backend (internal)
|
||||
SENTRY_DSN=http://<key>@localhost:8000/1
|
||||
|
||||
# Frontend (via nginx proxy)
|
||||
VITE_SENTRY_DSN=https://<key>@localhost/bugsink-api/2
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Configuration Files
|
||||
|
||||
| File | Purpose |
|
||||
| ------------------------------------- | ------------------------------------------- |
|
||||
| `src/config/env.ts` | Zod schema validation - **source of truth** |
|
||||
| `ecosystem.config.cjs` | PM2 process manager config |
|
||||
| `ecosystem.config.cjs` | PM2 process manager (production) |
|
||||
| `ecosystem.dev.config.cjs` | PM2 process manager (development) |
|
||||
| `.gitea/workflows/deploy-to-prod.yml` | Production deployment workflow |
|
||||
| `.gitea/workflows/deploy-to-test.yml` | Test deployment workflow |
|
||||
| `.env.example` | Template with all variables |
|
||||
| `.env.local` | Dev container overrides (not in git) |
|
||||
| `.env.test` | Test environment overrides (not in git) |
|
||||
|
||||
---
|
||||
|
||||
## Adding New Variables
|
||||
|
||||
### 1. Update Zod Schema
|
||||
### Checklist
|
||||
|
||||
1. [ ] **Update Zod Schema** - Edit `src/config/env.ts`
|
||||
2. [ ] **Add to Gitea Secrets** - For prod/test environments
|
||||
3. [ ] **Update Deployment Workflows** - `.gitea/workflows/*.yml`
|
||||
4. [ ] **Update PM2 Config** - `ecosystem.config.cjs`
|
||||
5. [ ] **Update .env.example** - Template for developers
|
||||
6. [ ] **Update this document** - Add to appropriate section
|
||||
|
||||
### Step-by-Step
|
||||
|
||||
#### 1. Update Zod Schema
|
||||
|
||||
Edit `src/config/env.ts`:
|
||||
|
||||
```typescript
|
||||
const envSchema = z.object({
|
||||
// ... existing variables ...
|
||||
NEW_VARIABLE: z.string().min(1),
|
||||
newSection: z.object({
|
||||
newVariable: z.string().min(1, 'NEW_VARIABLE is required'),
|
||||
}),
|
||||
});
|
||||
|
||||
// In loadEnvVars():
|
||||
newSection: {
|
||||
newVariable: process.env.NEW_VARIABLE,
|
||||
},
|
||||
```
|
||||
|
||||
### 2. Add to Gitea Secrets
|
||||
|
||||
For prod/test environments:
|
||||
#### 2. Add to Gitea Secrets
|
||||
|
||||
1. Go to Gitea repository Settings > Secrets
|
||||
2. Add `NEW_VARIABLE` with value
|
||||
2. Add `NEW_VARIABLE` with production value
|
||||
3. Add `NEW_VARIABLE_TEST` if test needs different value
|
||||
|
||||
### 3. Update Deployment Workflows
|
||||
#### 3. Update Deployment Workflows
|
||||
|
||||
Edit `.gitea/workflows/deploy-to-prod.yml`:
|
||||
|
||||
@@ -145,7 +270,7 @@ env:
|
||||
NEW_VARIABLE: ${{ secrets.NEW_VARIABLE_TEST }}
|
||||
```
|
||||
|
||||
### 4. Update PM2 Config
|
||||
#### 4. Update PM2 Config
|
||||
|
||||
Edit `ecosystem.config.cjs`:
|
||||
|
||||
@@ -161,31 +286,36 @@ module.exports = {
|
||||
};
|
||||
```
|
||||
|
||||
### 5. Update Documentation
|
||||
|
||||
- Add to `.env.example`
|
||||
- Update this document
|
||||
- Document in relevant feature docs
|
||||
---
|
||||
|
||||
## Security Best Practices
|
||||
|
||||
### Secrets Management
|
||||
### Do
|
||||
|
||||
- **NEVER** commit secrets to git
|
||||
- Use Gitea Secrets for prod/test
|
||||
- Use `.env.local` for dev (gitignored)
|
||||
- Generate secrets with cryptographic randomness
|
||||
- Rotate secrets regularly
|
||||
- Use environment-specific database users
|
||||
|
||||
### Do Not
|
||||
|
||||
- Commit secrets to git
|
||||
- Use short or predictable secrets
|
||||
- Share secrets across environments
|
||||
- Log sensitive values
|
||||
|
||||
### Secret Generation
|
||||
|
||||
```bash
|
||||
# Generate secure random secrets
|
||||
# Generate secure random secrets (64 hex characters)
|
||||
node -e "console.log(require('crypto').randomBytes(32).toString('hex'))"
|
||||
|
||||
# Example output:
|
||||
# a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6a1b2
|
||||
```
|
||||
|
||||
### Database Users
|
||||
|
||||
Each environment has its own PostgreSQL user:
|
||||
### Database Users by Environment
|
||||
|
||||
| Environment | User | Database |
|
||||
| ----------- | -------------------- | -------------------- |
|
||||
@@ -193,44 +323,61 @@ Each environment has its own PostgreSQL user:
|
||||
| Test | `flyer_crawler_test` | `flyer-crawler-test` |
|
||||
| Development | `postgres` | `flyer_crawler_dev` |
|
||||
|
||||
**Setup Commands** (as postgres superuser):
|
||||
|
||||
```sql
|
||||
-- Production
|
||||
CREATE DATABASE "flyer-crawler-prod";
|
||||
CREATE USER flyer_crawler_prod WITH PASSWORD 'secure-password';
|
||||
ALTER DATABASE "flyer-crawler-prod" OWNER TO flyer_crawler_prod;
|
||||
\c "flyer-crawler-prod"
|
||||
ALTER SCHEMA public OWNER TO flyer_crawler_prod;
|
||||
GRANT CREATE, USAGE ON SCHEMA public TO flyer_crawler_prod;
|
||||
CREATE EXTENSION IF NOT EXISTS "uuid-ossp";
|
||||
CREATE EXTENSION IF NOT EXISTS postgis;
|
||||
CREATE EXTENSION IF NOT EXISTS pg_trgm;
|
||||
|
||||
-- Test (similar commands with _test suffix)
|
||||
```
|
||||
---
|
||||
|
||||
## Validation
|
||||
|
||||
Environment variables are validated at startup via `src/config/env.ts`. If validation fails:
|
||||
Environment variables are validated at startup via `src/config/env.ts`.
|
||||
|
||||
1. Check the error message for missing/invalid variables
|
||||
2. Verify `.env.local` (dev) or Gitea Secrets (prod/test)
|
||||
3. Ensure values match schema requirements (min length, format, etc.)
|
||||
### Startup Validation
|
||||
|
||||
If validation fails, you will see:
|
||||
|
||||
```text
|
||||
╔════════════════════════════════════════════════════════════════╗
|
||||
║ CONFIGURATION ERROR - APPLICATION STARTUP ║
|
||||
╚════════════════════════════════════════════════════════════════╝
|
||||
|
||||
The following environment variables are missing or invalid:
|
||||
|
||||
- database.host: DB_HOST is required
|
||||
- auth.jwtSecret: JWT_SECRET must be at least 32 characters
|
||||
|
||||
Please check your .env file or environment configuration.
|
||||
```
|
||||
|
||||
### Debugging Configuration
|
||||
|
||||
```bash
|
||||
# Check what variables are set (dev container)
|
||||
podman exec flyer-crawler-dev env | grep -E "^(DB_|REDIS_|JWT_|SENTRY_)"
|
||||
|
||||
# Test database connection
|
||||
podman exec flyer-crawler-postgres psql -U postgres -d flyer_crawler_dev -c "SELECT 1;"
|
||||
|
||||
# Test Redis connection
|
||||
podman exec flyer-crawler-redis redis-cli ping
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Variable Not Found
|
||||
|
||||
```
|
||||
```text
|
||||
Error: Missing required environment variable: JWT_SECRET
|
||||
```
|
||||
|
||||
**Solution**: Add the variable to your environment configuration.
|
||||
**Solutions**:
|
||||
|
||||
1. Check `.env.local` exists and has the variable
|
||||
2. Verify variable name matches schema exactly
|
||||
3. Restart the application after changes
|
||||
|
||||
### Invalid Value
|
||||
|
||||
```
|
||||
```text
|
||||
Error: JWT_SECRET must be at least 32 characters
|
||||
```
|
||||
|
||||
@@ -240,32 +387,36 @@ Error: JWT_SECRET must be at least 32 characters
|
||||
|
||||
Check `NODE_ENV` is set correctly:
|
||||
|
||||
- `development` - Local dev container
|
||||
- `test` - CI/CD test server
|
||||
- `production` - Production server
|
||||
| Value | Purpose |
|
||||
| ------------- | ---------------------- |
|
||||
| `development` | Local dev container |
|
||||
| `test` | CI/CD test server |
|
||||
| `staging` | Pre-production testing |
|
||||
| `production` | Production server |
|
||||
|
||||
### Database Connection Issues
|
||||
|
||||
Verify database credentials:
|
||||
|
||||
```bash
|
||||
# Development
|
||||
podman exec flyer-crawler-postgres psql -U postgres -d flyer_crawler_dev -c "SELECT 1;"
|
||||
|
||||
# Production (via SSH)
|
||||
ssh root@projectium.com "psql -U flyer_crawler_prod -d flyer-crawler-prod -c 'SELECT 1;'"
|
||||
# If connection fails, check:
|
||||
# 1. Container is running: podman ps
|
||||
# 2. DB_HOST matches container network
|
||||
# 3. DB_PASSWORD is correct
|
||||
```
|
||||
|
||||
## Reference
|
||||
---
|
||||
|
||||
- **Validation Schema**: [src/config/env.ts](../../src/config/env.ts)
|
||||
- **Template**: [.env.example](../../.env.example)
|
||||
- **Deployment Workflows**: [.gitea/workflows/](../../.gitea/workflows/)
|
||||
- **PM2 Config**: [ecosystem.config.cjs](../../ecosystem.config.cjs)
|
||||
|
||||
## See Also
|
||||
## Related Documentation
|
||||
|
||||
- [QUICKSTART.md](QUICKSTART.md) - Quick setup guide
|
||||
- [INSTALL.md](INSTALL.md) - Detailed installation
|
||||
- [DEV-CONTAINER.md](../development/DEV-CONTAINER.md) - Dev container setup
|
||||
- [DEPLOYMENT.md](../operations/DEPLOYMENT.md) - Production deployment
|
||||
- [AUTHENTICATION.md](../architecture/AUTHENTICATION.md) - OAuth setup
|
||||
- [ADR-007](../adr/0007-configuration-and-secrets-management.md) - Configuration decisions
|
||||
|
||||
---
|
||||
|
||||
Last updated: January 2026
|
||||
|
||||
@@ -1,203 +1,453 @@
|
||||
# Installation Guide
|
||||
|
||||
This guide covers setting up a local development environment for Flyer Crawler.
|
||||
Complete setup instructions for the Flyer Crawler local development environment.
|
||||
|
||||
---
|
||||
|
||||
## Quick Reference
|
||||
|
||||
| Setup Method | Best For | Time | Document Section |
|
||||
| ----------------- | --------------------------- | ------ | --------------------------------------------------- |
|
||||
| Quick Start | Already have Postgres/Redis | 5 min | [Quick Start](#quick-start) |
|
||||
| Dev Container | Full production-like setup | 15 min | [Dev Container](#development-container-recommended) |
|
||||
| Manual Containers | Learning the components | 20 min | [Podman Setup](#podman-setup-manual) |
|
||||
|
||||
---
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- Node.js 20.x or later
|
||||
- Access to a PostgreSQL database (local or remote)
|
||||
- Redis instance (for session management)
|
||||
- Google Gemini API key
|
||||
- Google Maps API key (for geocoding)
|
||||
### Required Software
|
||||
|
||||
| Software | Minimum Version | Purpose | Download |
|
||||
| -------------- | --------------- | -------------------- | ----------------------------------------------- |
|
||||
| Node.js | 20.x | Runtime | [nodejs.org](https://nodejs.org/) |
|
||||
| Podman Desktop | 4.x | Container management | [podman-desktop.io](https://podman-desktop.io/) |
|
||||
| Git | 2.x | Version control | [git-scm.com](https://git-scm.com/) |
|
||||
|
||||
### Windows-Specific Requirements
|
||||
|
||||
| Requirement | Purpose | Setup Command |
|
||||
| ----------- | ------------------------------ | ---------------------------------- |
|
||||
| WSL 2 | Linux compatibility for Podman | `wsl --install` (admin PowerShell) |
|
||||
|
||||
### Verify Installation
|
||||
|
||||
```bash
|
||||
# Check all prerequisites
|
||||
node --version # Expected: v20.x or higher
|
||||
podman --version # Expected: podman version 4.x or higher
|
||||
git --version # Expected: git version 2.x or higher
|
||||
wsl --list -v # Expected: Shows WSL 2 distro
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Quick Start
|
||||
|
||||
If you already have PostgreSQL and Redis configured:
|
||||
If you already have PostgreSQL and Redis configured externally:
|
||||
|
||||
```bash
|
||||
# Install dependencies
|
||||
# 1. Clone the repository
|
||||
git clone https://gitea.projectium.com/flyer-crawler/flyer-crawler.git
|
||||
cd flyer-crawler
|
||||
|
||||
# 2. Install dependencies
|
||||
npm install
|
||||
|
||||
# Run in development mode
|
||||
# 3. Create .env.local (see Environment section below)
|
||||
|
||||
# 4. Run in development mode
|
||||
npm run dev
|
||||
```
|
||||
|
||||
**Access Points**:
|
||||
|
||||
- Frontend: `http://localhost:5173`
|
||||
- Backend API: `http://localhost:3001`
|
||||
|
||||
---
|
||||
|
||||
## Development Environment with Podman (Recommended for Windows)
|
||||
## Development Container (Recommended)
|
||||
|
||||
This approach uses Podman with an Ubuntu container for a consistent development environment.
|
||||
The dev container provides a complete, production-like environment.
|
||||
|
||||
### What's Included
|
||||
|
||||
| Service | Purpose | Port |
|
||||
| ---------- | ------------------------ | ---------- |
|
||||
| Node.js | API server, worker, Vite | 3001, 5173 |
|
||||
| PostgreSQL | Database with PostGIS | 5432 |
|
||||
| Redis | Cache and job queues | 6379 |
|
||||
| NGINX | HTTPS reverse proxy | 443 |
|
||||
| Bugsink | Error tracking | 8443 |
|
||||
| Logstash | Log aggregation | - |
|
||||
| PM2 | Process management | - |
|
||||
|
||||
### Setup Steps
|
||||
|
||||
#### Step 1: Initialize Podman
|
||||
|
||||
```bash
|
||||
# Windows: Start Podman Desktop, or from terminal:
|
||||
podman machine init
|
||||
podman machine start
|
||||
```
|
||||
|
||||
#### Step 2: Start Dev Container
|
||||
|
||||
```bash
|
||||
# Start all services
|
||||
podman-compose -f compose.dev.yml up -d
|
||||
|
||||
# View logs (optional)
|
||||
podman-compose -f compose.dev.yml logs -f
|
||||
```
|
||||
|
||||
**Expected Output**:
|
||||
|
||||
```text
|
||||
[+] Running 3/3
|
||||
- Container flyer-crawler-postgres Started
|
||||
- Container flyer-crawler-redis Started
|
||||
- Container flyer-crawler-dev Started
|
||||
```
|
||||
|
||||
#### Step 3: Verify Services
|
||||
|
||||
```bash
|
||||
# Check containers are running
|
||||
podman ps
|
||||
|
||||
# Check PM2 processes
|
||||
podman exec -it flyer-crawler-dev pm2 status
|
||||
```
|
||||
|
||||
**Expected PM2 Status**:
|
||||
|
||||
```text
|
||||
+---------------------------+--------+-------+
|
||||
| name | status | cpu |
|
||||
+---------------------------+--------+-------+
|
||||
| flyer-crawler-api-dev | online | 0% |
|
||||
| flyer-crawler-worker-dev | online | 0% |
|
||||
| flyer-crawler-vite-dev | online | 0% |
|
||||
+---------------------------+--------+-------+
|
||||
```
|
||||
|
||||
#### Step 4: Access Application
|
||||
|
||||
| Service | URL | Notes |
|
||||
| ----------- | ------------------------ | ---------------------------- |
|
||||
| Frontend | `https://localhost` | NGINX proxies to Vite |
|
||||
| Backend API | `http://localhost:3001` | Express server |
|
||||
| Bugsink | `https://localhost:8443` | Login: admin@localhost/admin |
|
||||
|
||||
### SSL Certificate Setup (Optional but Recommended)
|
||||
|
||||
To eliminate browser security warnings:
|
||||
|
||||
**Windows**:
|
||||
|
||||
1. Double-click `certs/mkcert-ca.crt`
|
||||
2. Click "Install Certificate..."
|
||||
3. Select "Local Machine" > Next
|
||||
4. Select "Place all certificates in the following store"
|
||||
5. Browse > Select "Trusted Root Certification Authorities" > OK
|
||||
6. Click Next > Finish
|
||||
7. Restart browser
|
||||
|
||||
**Other Platforms**: See [`certs/README.md`](../../certs/README.md)
|
||||
|
||||
### Managing the Dev Container
|
||||
|
||||
| Action | Command |
|
||||
| --------- | ------------------------------------------- |
|
||||
| Start | `podman-compose -f compose.dev.yml up -d` |
|
||||
| Stop | `podman-compose -f compose.dev.yml down` |
|
||||
| View logs | `podman-compose -f compose.dev.yml logs -f` |
|
||||
| Restart | `podman-compose -f compose.dev.yml restart` |
|
||||
| Rebuild | `podman-compose -f compose.dev.yml build` |
|
||||
|
||||
---
|
||||
|
||||
## Podman Setup (Manual)
|
||||
|
||||
For understanding the individual components or custom configurations.
|
||||
|
||||
### Step 1: Install Prerequisites on Windows
|
||||
|
||||
1. **Install WSL 2**: Podman on Windows relies on the Windows Subsystem for Linux.
|
||||
```powershell
|
||||
# Run in administrator PowerShell
|
||||
wsl --install
|
||||
```
|
||||
|
||||
```powershell
|
||||
wsl --install
|
||||
```
|
||||
Restart computer after WSL installation.
|
||||
|
||||
Run this in an administrator PowerShell.
|
||||
### Step 2: Initialize Podman
|
||||
|
||||
2. **Install Podman Desktop**: Download and install [Podman Desktop for Windows](https://podman-desktop.io/).
|
||||
1. Launch **Podman Desktop**
|
||||
2. Follow the setup wizard to initialize Podman machine
|
||||
3. Start the Podman machine
|
||||
|
||||
### Step 2: Set Up Podman
|
||||
|
||||
1. **Initialize Podman**: Launch Podman Desktop. It will automatically set up its WSL 2 machine.
|
||||
2. **Start Podman**: Ensure the Podman machine is running from the Podman Desktop interface.
|
||||
|
||||
### Step 3: Set Up the Ubuntu Container
|
||||
|
||||
1. **Pull Ubuntu Image**:
|
||||
|
||||
```bash
|
||||
podman pull ubuntu:latest
|
||||
```
|
||||
|
||||
2. **Create a Podman Volume** (persists node_modules between container restarts):
|
||||
|
||||
```bash
|
||||
podman volume create node_modules_cache
|
||||
```
|
||||
|
||||
3. **Run the Ubuntu Container**:
|
||||
|
||||
Open a terminal in your project's root directory and run:
|
||||
|
||||
```bash
|
||||
podman run -it -p 3001:3001 -p 5173:5173 --name flyer-dev \
|
||||
-v "$(pwd):/app" \
|
||||
-v "node_modules_cache:/app/node_modules" \
|
||||
ubuntu:latest
|
||||
```
|
||||
|
||||
| Flag | Purpose |
|
||||
| ------------------------------------------- | ------------------------------------------------ |
|
||||
| `-p 3001:3001` | Forwards the backend server port |
|
||||
| `-p 5173:5173` | Forwards the Vite frontend server port |
|
||||
| `--name flyer-dev` | Names the container for easy reference |
|
||||
| `-v "...:/app"` | Mounts your project directory into the container |
|
||||
| `-v "node_modules_cache:/app/node_modules"` | Mounts the named volume for node_modules |
|
||||
|
||||
### Step 4: Configure the Ubuntu Environment
|
||||
|
||||
You are now inside the Ubuntu container's shell.
|
||||
|
||||
1. **Update Package Lists**:
|
||||
|
||||
```bash
|
||||
apt-get update
|
||||
```
|
||||
|
||||
2. **Install Dependencies**:
|
||||
|
||||
```bash
|
||||
apt-get install -y curl git
|
||||
curl -sL https://deb.nodesource.com/setup_20.x | bash -
|
||||
apt-get install -y nodejs
|
||||
```
|
||||
|
||||
3. **Navigate to Project Directory**:
|
||||
|
||||
```bash
|
||||
cd /app
|
||||
```
|
||||
|
||||
4. **Install Project Dependencies**:
|
||||
|
||||
```bash
|
||||
npm install
|
||||
```
|
||||
|
||||
### Step 5: Run the Development Server
|
||||
Or from terminal:
|
||||
|
||||
```bash
|
||||
podman machine init
|
||||
podman machine start
|
||||
```
|
||||
|
||||
### Step 3: Create Podman Network
|
||||
|
||||
```bash
|
||||
podman network create flyer-crawler-net
|
||||
```
|
||||
|
||||
### Step 4: Create PostgreSQL Container
|
||||
|
||||
```bash
|
||||
podman run -d \
|
||||
--name flyer-crawler-postgres \
|
||||
--network flyer-crawler-net \
|
||||
-e POSTGRES_USER=postgres \
|
||||
-e POSTGRES_PASSWORD=postgres \
|
||||
-e POSTGRES_DB=flyer_crawler_dev \
|
||||
-p 5432:5432 \
|
||||
-v flyer-crawler-pgdata:/var/lib/postgresql/data \
|
||||
docker.io/postgis/postgis:15-3.3
|
||||
```
|
||||
|
||||
### Step 5: Create Redis Container
|
||||
|
||||
```bash
|
||||
podman run -d \
|
||||
--name flyer-crawler-redis \
|
||||
--network flyer-crawler-net \
|
||||
-p 6379:6379 \
|
||||
-v flyer-crawler-redis:/data \
|
||||
docker.io/library/redis:alpine
|
||||
```
|
||||
|
||||
### Step 6: Initialize Database
|
||||
|
||||
```bash
|
||||
# Wait for PostgreSQL to be ready
|
||||
podman exec flyer-crawler-postgres pg_isready -U postgres
|
||||
|
||||
# Install required extensions
|
||||
podman exec flyer-crawler-postgres psql -U postgres -d flyer_crawler_dev -c "
|
||||
CREATE EXTENSION IF NOT EXISTS postgis;
|
||||
CREATE EXTENSION IF NOT EXISTS pg_trgm;
|
||||
CREATE EXTENSION IF NOT EXISTS \"uuid-ossp\";
|
||||
"
|
||||
|
||||
# Apply schema
|
||||
podman exec -i flyer-crawler-postgres psql -U postgres -d flyer_crawler_dev < sql/master_schema_rollup.sql
|
||||
```
|
||||
|
||||
### Step 7: Create Node.js Container
|
||||
|
||||
```bash
|
||||
# Create volume for node_modules
|
||||
podman volume create node_modules_cache
|
||||
|
||||
# Run Ubuntu container with project mounted
|
||||
podman run -it \
|
||||
--name flyer-dev \
|
||||
--network flyer-crawler-net \
|
||||
-p 3001:3001 \
|
||||
-p 5173:5173 \
|
||||
-v "$(pwd):/app" \
|
||||
-v "node_modules_cache:/app/node_modules" \
|
||||
ubuntu:latest
|
||||
```
|
||||
|
||||
### Step 8: Configure Container Environment
|
||||
|
||||
Inside the container:
|
||||
|
||||
```bash
|
||||
# Update and install dependencies
|
||||
apt-get update
|
||||
apt-get install -y curl git
|
||||
|
||||
# Install Node.js 20
|
||||
curl -sL https://deb.nodesource.com/setup_20.x | bash -
|
||||
apt-get install -y nodejs
|
||||
|
||||
# Navigate to project and install
|
||||
cd /app
|
||||
npm install
|
||||
|
||||
# Start development server
|
||||
npm run dev
|
||||
```
|
||||
|
||||
### Step 6: Access the Application
|
||||
### Container Management Commands
|
||||
|
||||
- **Frontend**: http://localhost:5173
|
||||
- **Backend API**: http://localhost:3001
|
||||
|
||||
### Dev Container with HTTPS (Full Stack)
|
||||
|
||||
When using the full dev container stack with NGINX (via `compose.dev.yml`), access the application over HTTPS:
|
||||
|
||||
- **Frontend**: https://localhost or https://127.0.0.1
|
||||
- **Backend API**: http://localhost:3001
|
||||
|
||||
**SSL Certificate Notes:**
|
||||
|
||||
- The dev container uses self-signed certificates generated by mkcert
|
||||
- Both `localhost` and `127.0.0.1` are valid hostnames (certificate includes both as SANs)
|
||||
- If images fail to load with SSL errors, see [FLYER-URL-CONFIGURATION.md](../FLYER-URL-CONFIGURATION.md#ssl-certificate-configuration-dev-container)
|
||||
|
||||
**Eliminate SSL Warnings (Recommended):**
|
||||
|
||||
To avoid browser security warnings for self-signed certificates, install the mkcert CA certificate on your system. The CA certificate is located at `certs/mkcert-ca.crt` in the project root.
|
||||
|
||||
See [`certs/README.md`](../../certs/README.md) for platform-specific installation instructions (Windows, macOS, Linux, Firefox).
|
||||
|
||||
After installation:
|
||||
|
||||
- Your browser will trust all mkcert certificates without warnings
|
||||
- Both `https://localhost/` and `https://127.0.0.1/` will work without SSL errors
|
||||
- Flyer images will load without `ERR_CERT_AUTHORITY_INVALID` errors
|
||||
|
||||
### Managing the Container
|
||||
|
||||
| Action | Command |
|
||||
| --------------------- | -------------------------------- |
|
||||
| Stop the container | Press `Ctrl+C`, then type `exit` |
|
||||
| Restart the container | `podman start -a -i flyer-dev` |
|
||||
| Remove the container | `podman rm flyer-dev` |
|
||||
| Action | Command |
|
||||
| -------------- | ------------------------------ |
|
||||
| Stop container | Press `Ctrl+C`, then `exit` |
|
||||
| Restart | `podman start -a -i flyer-dev` |
|
||||
| Remove | `podman rm flyer-dev` |
|
||||
| List running | `podman ps` |
|
||||
| List all | `podman ps -a` |
|
||||
|
||||
---
|
||||
|
||||
## Environment Variables
|
||||
## Environment Configuration
|
||||
|
||||
This project is configured to run in a CI/CD environment and does not use `.env` files. All configuration must be provided as environment variables.
|
||||
### Create .env.local
|
||||
|
||||
For local development, you can export these in your shell or use your IDE's environment configuration:
|
||||
Create `.env.local` in the project root with your configuration:
|
||||
|
||||
| Variable | Description |
|
||||
| --------------------------- | ------------------------------------- |
|
||||
| `DB_HOST` | PostgreSQL server hostname |
|
||||
| `DB_USER` | PostgreSQL username |
|
||||
| `DB_PASSWORD` | PostgreSQL password |
|
||||
| `DB_DATABASE_PROD` | Production database name |
|
||||
| `JWT_SECRET` | Secret string for signing auth tokens |
|
||||
| `VITE_GOOGLE_GENAI_API_KEY` | Google Gemini API key |
|
||||
| `GOOGLE_MAPS_API_KEY` | Google Maps Geocoding API key |
|
||||
| `REDIS_PASSWORD_PROD` | Production Redis password |
|
||||
| `REDIS_PASSWORD_TEST` | Test Redis password |
|
||||
```bash
|
||||
# Database (adjust host based on your setup)
|
||||
DB_HOST=localhost # Use 'postgres' if inside dev container
|
||||
DB_PORT=5432
|
||||
DB_USER=postgres
|
||||
DB_PASSWORD=postgres
|
||||
DB_NAME=flyer_crawler_dev
|
||||
|
||||
# Redis (adjust host based on your setup)
|
||||
REDIS_URL=redis://localhost:6379 # Use 'redis://redis:6379' inside container
|
||||
|
||||
# Application
|
||||
NODE_ENV=development
|
||||
PORT=3001
|
||||
FRONTEND_URL=http://localhost:5173
|
||||
|
||||
# Authentication (generate secure values)
|
||||
JWT_SECRET=your-secret-at-least-32-characters-long
|
||||
|
||||
# AI Services
|
||||
GEMINI_API_KEY=your-google-gemini-api-key
|
||||
GOOGLE_MAPS_API_KEY=your-google-maps-api-key # Optional
|
||||
```
|
||||
|
||||
**Generate Secure Secrets**:
|
||||
|
||||
```bash
|
||||
node -e "console.log(require('crypto').randomBytes(32).toString('hex'))"
|
||||
```
|
||||
|
||||
### Environment Differences
|
||||
|
||||
| Variable | Host Development | Inside Dev Container |
|
||||
| ----------- | ------------------------ | -------------------- |
|
||||
| `DB_HOST` | `localhost` | `postgres` |
|
||||
| `REDIS_URL` | `redis://localhost:6379` | `redis://redis:6379` |
|
||||
|
||||
See [ENVIRONMENT.md](ENVIRONMENT.md) for complete variable reference.
|
||||
|
||||
---
|
||||
|
||||
## Seeding Development Data
|
||||
|
||||
To create initial test accounts (`admin@example.com` and `user@example.com`) and sample data:
|
||||
Create test accounts and sample data:
|
||||
|
||||
```bash
|
||||
npm run seed
|
||||
```
|
||||
|
||||
The seed script performs the following actions:
|
||||
### What the Seed Script Does
|
||||
|
||||
1. Rebuilds the database schema from `sql/master_schema_rollup.sql`
|
||||
2. Creates test user accounts (admin and regular user)
|
||||
3. Copies test flyer images from `src/tests/assets/` to `public/flyer-images/`
|
||||
4. Creates a sample flyer with items linked to the test images
|
||||
5. Seeds watched items and a shopping list for the test user
|
||||
1. Rebuilds database schema from `sql/master_schema_rollup.sql`
|
||||
2. Creates test user accounts:
|
||||
- `admin@example.com` (admin user)
|
||||
- `user@example.com` (regular user)
|
||||
3. Copies test flyer images to `public/flyer-images/`
|
||||
4. Creates sample flyer with items
|
||||
5. Seeds watched items and shopping list
|
||||
|
||||
**Test Images**: The seed script copies `test-flyer-image.jpg` and `test-flyer-icon.png` to the `public/flyer-images/` directory, which is served by NGINX at `/flyer-images/`.
|
||||
### Test Images
|
||||
|
||||
After running, you may need to restart your IDE's TypeScript server to pick up any generated types.
|
||||
The seed script copies these files from `src/tests/assets/`:
|
||||
|
||||
- `test-flyer-image.jpg`
|
||||
- `test-flyer-icon.png`
|
||||
|
||||
Images are served by NGINX at `/flyer-images/`.
|
||||
|
||||
---
|
||||
|
||||
## Verification Checklist
|
||||
|
||||
After installation, verify everything works:
|
||||
|
||||
- [ ] **Containers running**: `podman ps` shows postgres and redis
|
||||
- [ ] **Database accessible**: `podman exec flyer-crawler-postgres psql -U postgres -c "SELECT 1;"`
|
||||
- [ ] **Frontend loads**: Open `http://localhost:5173` (or `https://localhost` for dev container)
|
||||
- [ ] **API responds**: `curl http://localhost:3001/health`
|
||||
- [ ] **Tests pass**: `npm run test:unit` (or in container: `podman exec -it flyer-crawler-dev npm run test:unit`)
|
||||
- [ ] **Type check passes**: `npm run type-check`
|
||||
|
||||
---
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Podman Machine Won't Start
|
||||
|
||||
```bash
|
||||
# Reset Podman machine
|
||||
podman machine rm
|
||||
podman machine init
|
||||
podman machine start
|
||||
```
|
||||
|
||||
### Port Already in Use
|
||||
|
||||
```bash
|
||||
# Find process using port
|
||||
netstat -ano | findstr :5432
|
||||
|
||||
# Option: Use different port
|
||||
podman run -d --name flyer-crawler-postgres -p 5433:5432 ...
|
||||
# Then set DB_PORT=5433 in .env.local
|
||||
```
|
||||
|
||||
### Database Extensions Missing
|
||||
|
||||
```bash
|
||||
podman exec flyer-crawler-postgres psql -U postgres -d flyer_crawler_dev -c "
|
||||
CREATE EXTENSION IF NOT EXISTS postgis;
|
||||
CREATE EXTENSION IF NOT EXISTS pg_trgm;
|
||||
CREATE EXTENSION IF NOT EXISTS \"uuid-ossp\";
|
||||
"
|
||||
```
|
||||
|
||||
### Permission Denied on Windows Paths
|
||||
|
||||
Use `MSYS_NO_PATHCONV=1` prefix:
|
||||
|
||||
```bash
|
||||
MSYS_NO_PATHCONV=1 podman exec flyer-crawler-dev /path/to/script.sh
|
||||
```
|
||||
|
||||
### Tests Fail with Timezone Errors
|
||||
|
||||
Tests must run in the dev container, not on Windows host:
|
||||
|
||||
```bash
|
||||
# CORRECT
|
||||
podman exec -it flyer-crawler-dev npm test
|
||||
|
||||
# INCORRECT (may fail with TZ errors)
|
||||
npm test
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Next Steps
|
||||
|
||||
- [Database Setup](DATABASE.md) - Set up PostgreSQL with required extensions
|
||||
- [Authentication Setup](AUTHENTICATION.md) - Configure OAuth providers
|
||||
- [Deployment Guide](DEPLOYMENT.md) - Deploy to production
|
||||
| Goal | Document |
|
||||
| --------------------- | ------------------------------------------------------ |
|
||||
| Quick setup guide | [QUICKSTART.md](QUICKSTART.md) |
|
||||
| Environment variables | [ENVIRONMENT.md](ENVIRONMENT.md) |
|
||||
| Database schema | [DATABASE.md](../architecture/DATABASE.md) |
|
||||
| Authentication setup | [AUTHENTICATION.md](../architecture/AUTHENTICATION.md) |
|
||||
| Dev container details | [DEV-CONTAINER.md](../development/DEV-CONTAINER.md) |
|
||||
| Deployment | [DEPLOYMENT.md](../operations/DEPLOYMENT.md) |
|
||||
|
||||
---
|
||||
|
||||
Last updated: January 2026
|
||||
|
||||
@@ -2,13 +2,38 @@
|
||||
|
||||
Get Flyer Crawler running in 5 minutes.
|
||||
|
||||
## Prerequisites
|
||||
---
|
||||
|
||||
- **Windows 10/11** with WSL 2
|
||||
- **Podman Desktop** installed
|
||||
- **Node.js 20+** installed
|
||||
## Prerequisites Checklist
|
||||
|
||||
## 1. Start Containers (1 minute)
|
||||
Before starting, verify you have:
|
||||
|
||||
- [ ] **Windows 10/11** with WSL 2 enabled
|
||||
- [ ] **Podman Desktop** installed ([download](https://podman-desktop.io/))
|
||||
- [ ] **Node.js 20+** installed
|
||||
- [ ] **Git** for cloning the repository
|
||||
|
||||
**Verify Prerequisites**:
|
||||
|
||||
```bash
|
||||
# Check Podman
|
||||
podman --version
|
||||
# Expected: podman version 4.x or higher
|
||||
|
||||
# Check Node.js
|
||||
node --version
|
||||
# Expected: v20.x or higher
|
||||
|
||||
# Check WSL
|
||||
wsl --list --verbose
|
||||
# Expected: Shows WSL 2 distro
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Quick Setup (5 Steps)
|
||||
|
||||
### Step 1: Start Containers (1 minute)
|
||||
|
||||
```bash
|
||||
# Start PostgreSQL and Redis
|
||||
@@ -27,11 +52,18 @@ podman run -d --name flyer-crawler-redis \
|
||||
docker.io/library/redis:alpine
|
||||
```
|
||||
|
||||
## 2. Initialize Database (2 minutes)
|
||||
**Expected Output**:
|
||||
|
||||
```text
|
||||
# Container IDs displayed, no errors
|
||||
```
|
||||
|
||||
### Step 2: Initialize Database (2 minutes)
|
||||
|
||||
```bash
|
||||
# Wait for PostgreSQL to be ready
|
||||
podman exec flyer-crawler-postgres pg_isready -U postgres
|
||||
# Expected: localhost:5432 - accepting connections
|
||||
|
||||
# Install extensions
|
||||
podman exec flyer-crawler-postgres psql -U postgres -d flyer_crawler_dev \
|
||||
@@ -41,7 +73,17 @@ podman exec flyer-crawler-postgres psql -U postgres -d flyer_crawler_dev \
|
||||
podman exec -i flyer-crawler-postgres psql -U postgres -d flyer_crawler_dev < sql/master_schema_rollup.sql
|
||||
```
|
||||
|
||||
## 3. Configure Environment (1 minute)
|
||||
**Expected Output**:
|
||||
|
||||
```text
|
||||
CREATE EXTENSION
|
||||
CREATE EXTENSION
|
||||
CREATE EXTENSION
|
||||
CREATE TABLE
|
||||
... (many tables created)
|
||||
```
|
||||
|
||||
### Step 3: Configure Environment (1 minute)
|
||||
|
||||
Create `.env.local` in the project root:
|
||||
|
||||
@@ -61,16 +103,22 @@ NODE_ENV=development
|
||||
PORT=3001
|
||||
FRONTEND_URL=http://localhost:5173
|
||||
|
||||
# Secrets (generate your own)
|
||||
# Secrets (generate your own - see command below)
|
||||
JWT_SECRET=your-dev-jwt-secret-at-least-32-chars-long
|
||||
SESSION_SECRET=your-dev-session-secret-at-least-32-chars-long
|
||||
|
||||
# AI Services (get your own keys)
|
||||
VITE_GOOGLE_GENAI_API_KEY=your-google-genai-api-key
|
||||
GEMINI_API_KEY=your-google-gemini-api-key
|
||||
GOOGLE_MAPS_API_KEY=your-google-maps-api-key
|
||||
```
|
||||
|
||||
## 4. Install & Run (1 minute)
|
||||
**Generate Secure Secrets**:
|
||||
|
||||
```bash
|
||||
node -e "console.log(require('crypto').randomBytes(32).toString('hex'))"
|
||||
```
|
||||
|
||||
### Step 4: Install and Run (1 minute)
|
||||
|
||||
```bash
|
||||
# Install dependencies (first time only)
|
||||
@@ -80,35 +128,61 @@ npm install
|
||||
npm run dev
|
||||
```
|
||||
|
||||
## 5. Access Application
|
||||
**Expected Output**:
|
||||
|
||||
- **Frontend**: http://localhost:5173
|
||||
- **Backend API**: http://localhost:3001
|
||||
- **Health Check**: http://localhost:3001/health
|
||||
```text
|
||||
> flyer-crawler@x.x.x dev
|
||||
> concurrently ...
|
||||
|
||||
### Dev Container (HTTPS)
|
||||
[API] Server listening on port 3001
|
||||
[Vite] VITE ready at http://localhost:5173
|
||||
```
|
||||
|
||||
When using the full dev container with NGINX, access via HTTPS:
|
||||
### Step 5: Verify Installation
|
||||
|
||||
- **Frontend**: https://localhost or https://127.0.0.1
|
||||
- **Backend API**: http://localhost:3001
|
||||
- **Bugsink**: `https://localhost:8443` (error tracking)
|
||||
| Check | URL/Command | Expected Result |
|
||||
| ----------- | ------------------------------ | ----------------------------------- |
|
||||
| Frontend | `http://localhost:5173` | Flyer Crawler app loads |
|
||||
| Backend API | `http://localhost:3001/health` | `{ "status": "ok", ... }` |
|
||||
| Database | `podman exec ... psql -c ...` | `SELECT version()` returns Postgres |
|
||||
| Containers | `podman ps` | Shows postgres and redis running |
|
||||
|
||||
**Note:** The dev container accepts both `localhost` and `127.0.0.1` for HTTPS connections. The self-signed certificate is valid for both hostnames.
|
||||
---
|
||||
|
||||
**SSL Certificate Warnings:** To eliminate browser security warnings for self-signed certificates, install the mkcert CA certificate. See [`certs/README.md`](../../certs/README.md) for platform-specific installation instructions. This is optional but recommended for a better development experience.
|
||||
## Full Dev Container (Recommended)
|
||||
|
||||
### Dev Container Architecture
|
||||
For a production-like environment with NGINX, Bugsink error tracking, and PM2 process management:
|
||||
|
||||
The dev container uses PM2 for process management, matching production (ADR-014):
|
||||
### Starting the Dev Container
|
||||
|
||||
| Process | Description | Port |
|
||||
| -------------------------- | ------------------------ | ---- |
|
||||
| `flyer-crawler-api-dev` | API server (tsx watch) | 3001 |
|
||||
| `flyer-crawler-worker-dev` | Background job worker | - |
|
||||
| `flyer-crawler-vite-dev` | Vite frontend dev server | 5173 |
|
||||
```bash
|
||||
# Start all services
|
||||
podman-compose -f compose.dev.yml up -d
|
||||
|
||||
**PM2 Commands** (run inside container):
|
||||
# View logs
|
||||
podman-compose -f compose.dev.yml logs -f
|
||||
```
|
||||
|
||||
### Access Points
|
||||
|
||||
| Service | URL | Notes |
|
||||
| ----------- | ------------------------ | ---------------------------- |
|
||||
| Frontend | `https://localhost` | NGINX proxy to Vite |
|
||||
| Backend API | `http://localhost:3001` | Express server |
|
||||
| Bugsink | `https://localhost:8443` | Error tracking (admin/admin) |
|
||||
| PostgreSQL | `localhost:5432` | Database |
|
||||
| Redis | `localhost:6379` | Cache |
|
||||
|
||||
**SSL Certificate Setup (Recommended)**:
|
||||
|
||||
To eliminate browser security warnings, install the mkcert CA certificate:
|
||||
|
||||
```bash
|
||||
# Windows: Double-click certs/mkcert-ca.crt and install to Trusted Root CAs
|
||||
# See certs/README.md for detailed instructions per platform
|
||||
```
|
||||
|
||||
### PM2 Commands
|
||||
|
||||
```bash
|
||||
# View process status
|
||||
@@ -124,63 +198,152 @@ podman exec -it flyer-crawler-dev pm2 restart all
|
||||
podman exec -it flyer-crawler-dev pm2 restart flyer-crawler-api-dev
|
||||
```
|
||||
|
||||
## Verify Installation
|
||||
### Dev Container Processes
|
||||
|
||||
| Process | Description | Port |
|
||||
| -------------------------- | ------------------------ | ---- |
|
||||
| `flyer-crawler-api-dev` | API server (tsx watch) | 3001 |
|
||||
| `flyer-crawler-worker-dev` | Background job worker | - |
|
||||
| `flyer-crawler-vite-dev` | Vite frontend dev server | 5173 |
|
||||
|
||||
---
|
||||
|
||||
## Verification Commands
|
||||
|
||||
Run these to confirm everything is working:
|
||||
|
||||
```bash
|
||||
# Check containers are running
|
||||
podman ps
|
||||
# Expected: flyer-crawler-postgres and flyer-crawler-redis both running
|
||||
|
||||
# Test database connection
|
||||
podman exec flyer-crawler-postgres psql -U postgres -d flyer_crawler_dev -c "SELECT version();"
|
||||
# Expected: PostgreSQL 15.x with PostGIS
|
||||
|
||||
# Run tests (in dev container)
|
||||
podman exec -it flyer-crawler-dev npm run test:unit
|
||||
# Expected: All tests pass
|
||||
|
||||
# Run type check
|
||||
podman exec -it flyer-crawler-dev npm run type-check
|
||||
# Expected: No type errors
|
||||
```
|
||||
|
||||
## Common Issues
|
||||
---
|
||||
|
||||
## Common Issues and Solutions
|
||||
|
||||
### "Unable to connect to Podman socket"
|
||||
|
||||
**Cause**: Podman machine not running
|
||||
|
||||
**Solution**:
|
||||
|
||||
```bash
|
||||
podman machine start
|
||||
```
|
||||
|
||||
### "Connection refused" to PostgreSQL
|
||||
|
||||
Wait a few seconds for PostgreSQL to initialize:
|
||||
**Cause**: PostgreSQL still initializing
|
||||
|
||||
**Solution**:
|
||||
|
||||
```bash
|
||||
# Wait for PostgreSQL to be ready
|
||||
podman exec flyer-crawler-postgres pg_isready -U postgres
|
||||
# Retry after "accepting connections" message
|
||||
```
|
||||
|
||||
### Port 5432 or 6379 already in use
|
||||
|
||||
Stop conflicting services or change port mappings:
|
||||
**Cause**: Another service using the port
|
||||
|
||||
**Solution**:
|
||||
|
||||
```bash
|
||||
# Use different host port
|
||||
# Option 1: Stop conflicting service
|
||||
# Option 2: Use different host port
|
||||
podman run -d --name flyer-crawler-postgres -p 5433:5432 ...
|
||||
# Then update DB_PORT=5433 in .env.local
|
||||
```
|
||||
|
||||
Then update `DB_PORT=5433` in `.env.local`.
|
||||
### "JWT_SECRET must be at least 32 characters"
|
||||
|
||||
**Cause**: Secret too short in .env.local
|
||||
|
||||
**Solution**: Generate a longer secret:
|
||||
|
||||
```bash
|
||||
node -e "console.log(require('crypto').randomBytes(32).toString('hex'))"
|
||||
```
|
||||
|
||||
### Tests fail with "TZ environment variable" errors
|
||||
|
||||
**Cause**: Timezone setting interfering with Node.js async hooks
|
||||
|
||||
**Solution**: Tests must run in dev container (not Windows host):
|
||||
|
||||
```bash
|
||||
# CORRECT - run in container
|
||||
podman exec -it flyer-crawler-dev npm test
|
||||
|
||||
# INCORRECT - do not run on Windows host
|
||||
npm test
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Next Steps
|
||||
|
||||
- **Read the docs**: [docs/README.md](../README.md)
|
||||
- **Understand the architecture**: [docs/architecture/DATABASE.md](../architecture/DATABASE.md)
|
||||
- **Learn testing**: [docs/development/TESTING.md](../development/TESTING.md)
|
||||
- **Explore ADRs**: [docs/adr/index.md](../adr/index.md)
|
||||
- **Contributing**: [CONTRIBUTING.md](../../CONTRIBUTING.md)
|
||||
| Goal | Document |
|
||||
| ----------------------- | ----------------------------------------------------- |
|
||||
| Understand the codebase | [Architecture Overview](../architecture/OVERVIEW.md) |
|
||||
| Configure environment | [Environment Variables](ENVIRONMENT.md) |
|
||||
| Set up MCP tools | [MCP Configuration](../tools/MCP-CONFIGURATION.md) |
|
||||
| Learn testing | [Testing Guide](../development/TESTING.md) |
|
||||
| Understand DB schema | [Database Documentation](../architecture/DATABASE.md) |
|
||||
| Read ADRs | [ADR Index](../adr/index.md) |
|
||||
| Full installation guide | [Installation Guide](INSTALL.md) |
|
||||
|
||||
## Development Workflow
|
||||
---
|
||||
|
||||
## Daily Development Workflow
|
||||
|
||||
```bash
|
||||
# Daily workflow
|
||||
# 1. Start containers
|
||||
podman start flyer-crawler-postgres flyer-crawler-redis
|
||||
|
||||
# 2. Start dev server
|
||||
npm run dev
|
||||
# ... make changes ...
|
||||
|
||||
# 3. Make changes and test
|
||||
npm test
|
||||
|
||||
# 4. Type check before commit
|
||||
npm run type-check
|
||||
|
||||
# 5. Commit changes
|
||||
git commit
|
||||
```
|
||||
|
||||
For detailed setup instructions, see [INSTALL.md](INSTALL.md).
|
||||
**For dev container users**:
|
||||
|
||||
```bash
|
||||
# 1. Start dev container
|
||||
podman-compose -f compose.dev.yml up -d
|
||||
|
||||
# 2. View logs
|
||||
podman exec -it flyer-crawler-dev pm2 logs
|
||||
|
||||
# 3. Run tests
|
||||
podman exec -it flyer-crawler-dev npm test
|
||||
|
||||
# 4. Stop when done
|
||||
podman-compose -f compose.dev.yml down
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
Last updated: January 2026
|
||||
|
||||
@@ -2,8 +2,54 @@
|
||||
|
||||
This guide covers the manual installation of Flyer Crawler and its dependencies on a bare-metal Ubuntu server (e.g., a colocation server). This is the definitive reference for setting up a production environment without containers.
|
||||
|
||||
**Last verified**: 2026-01-28
|
||||
|
||||
**Target Environment**: Ubuntu 22.04 LTS (or newer)
|
||||
|
||||
**Related documentation**:
|
||||
|
||||
- [ADR-014: Containerization and Deployment Strategy](../adr/0014-containerization-and-deployment-strategy.md)
|
||||
- [ADR-015: Error Tracking and Observability](../adr/0015-error-tracking-and-observability.md)
|
||||
- [ADR-050: PostgreSQL Function Observability](../adr/0050-postgresql-function-observability.md)
|
||||
- [Deployment Guide](DEPLOYMENT.md)
|
||||
- [Monitoring Guide](MONITORING.md)
|
||||
|
||||
---
|
||||
|
||||
## Quick Reference
|
||||
|
||||
### Installation Time Estimates
|
||||
|
||||
| Component | Estimated Time | Notes |
|
||||
| ----------- | --------------- | ----------------------------- |
|
||||
| PostgreSQL | 10-15 minutes | Including PostGIS extensions |
|
||||
| Redis | 5 minutes | Quick install |
|
||||
| Node.js | 5 minutes | Via NodeSource repository |
|
||||
| Application | 15-20 minutes | Clone, install, build |
|
||||
| PM2 | 5 minutes | Global install + config |
|
||||
| NGINX | 10-15 minutes | Including SSL via Certbot |
|
||||
| Bugsink | 20-30 minutes | Python venv, systemd services |
|
||||
| Logstash | 15-20 minutes | Including pipeline config |
|
||||
| **Total** | **~90 minutes** | For complete fresh install |
|
||||
|
||||
### Post-Installation Verification
|
||||
|
||||
After completing setup, verify all services:
|
||||
|
||||
```bash
|
||||
# Check all services are running
|
||||
systemctl status postgresql nginx redis-server gunicorn-bugsink snappea logstash
|
||||
|
||||
# Verify application health
|
||||
curl -s https://flyer-crawler.projectium.com/api/health/ready | jq .
|
||||
|
||||
# Check PM2 processes
|
||||
pm2 list
|
||||
|
||||
# Verify Bugsink is accessible
|
||||
curl -s https://bugsink.projectium.com/accounts/login/ | head -5
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Server Access Model
|
||||
|
||||
@@ -2,6 +2,41 @@
|
||||
|
||||
This guide covers deploying Flyer Crawler to a production server.
|
||||
|
||||
**Last verified**: 2026-01-28
|
||||
|
||||
**Related documentation**:
|
||||
|
||||
- [ADR-014: Containerization and Deployment Strategy](../adr/0014-containerization-and-deployment-strategy.md)
|
||||
- [ADR-015: Error Tracking and Observability](../adr/0015-error-tracking-and-observability.md)
|
||||
- [Bare-Metal Setup Guide](BARE-METAL-SETUP.md)
|
||||
- [Monitoring Guide](MONITORING.md)
|
||||
|
||||
---
|
||||
|
||||
## Quick Reference
|
||||
|
||||
### Command Reference Table
|
||||
|
||||
| Task | Command |
|
||||
| -------------------- | ----------------------------------------------------------------------- |
|
||||
| Deploy to production | Gitea Actions workflow (manual trigger) |
|
||||
| Deploy to test | Automatic on push to `main` |
|
||||
| Check PM2 status | `pm2 list` |
|
||||
| View logs | `pm2 logs flyer-crawler-api --lines 100` |
|
||||
| Restart all | `pm2 restart all` |
|
||||
| Check NGINX | `sudo nginx -t && sudo systemctl status nginx` |
|
||||
| Check health | `curl -s https://flyer-crawler.projectium.com/api/health/ready \| jq .` |
|
||||
|
||||
### Deployment URLs
|
||||
|
||||
| Environment | URL | API Port |
|
||||
| ------------- | ------------------------------------------- | -------- |
|
||||
| Production | `https://flyer-crawler.projectium.com` | 3001 |
|
||||
| Test | `https://flyer-crawler-test.projectium.com` | 3002 |
|
||||
| Dev Container | `https://localhost` | 3001 |
|
||||
|
||||
---
|
||||
|
||||
## Server Access Model
|
||||
|
||||
**Important**: Claude Code (and AI tools) have **READ-ONLY** access to production/test servers. The deployment workflow is:
|
||||
@@ -24,12 +59,24 @@ When troubleshooting deployment issues:
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- Ubuntu server (22.04 LTS recommended)
|
||||
- PostgreSQL 14+ with PostGIS extension
|
||||
- Redis
|
||||
- Node.js 20.x
|
||||
- NGINX (reverse proxy)
|
||||
- PM2 (process manager)
|
||||
| Component | Version | Purpose |
|
||||
| ---------- | --------- | ------------------------------- |
|
||||
| Ubuntu | 22.04 LTS | Operating system |
|
||||
| PostgreSQL | 14+ | Database with PostGIS extension |
|
||||
| Redis | 6+ | Caching and job queues |
|
||||
| Node.js | 20.x LTS | Application runtime |
|
||||
| NGINX | 1.18+ | Reverse proxy and static files |
|
||||
| PM2 | Latest | Process manager |
|
||||
|
||||
**Verify prerequisites**:
|
||||
|
||||
```bash
|
||||
node --version # Should be v20.x.x
|
||||
psql --version # Should be 14+
|
||||
redis-cli ping # Should return PONG
|
||||
nginx -v # Should be 1.18+
|
||||
pm2 --version # Any recent version
|
||||
```
|
||||
|
||||
## Dev Container Parity (ADR-014)
|
||||
|
||||
@@ -210,7 +257,7 @@ types {
|
||||
|
||||
**Option 2**: Edit `/etc/nginx/mime.types` globally:
|
||||
|
||||
```
|
||||
```text
|
||||
# Change this line:
|
||||
application/javascript js;
|
||||
|
||||
@@ -341,9 +388,78 @@ The Sentry SDK v10+ enforces HTTPS-only DSNs by default. Since Bugsink runs loca
|
||||
|
||||
---
|
||||
|
||||
## Deployment Troubleshooting
|
||||
|
||||
### Decision Tree: Deployment Issues
|
||||
|
||||
```text
|
||||
Deployment failed?
|
||||
|
|
||||
+-- Build step failed?
|
||||
| |
|
||||
| +-- TypeScript errors --> Fix type issues, run `npm run type-check`
|
||||
| +-- Missing dependencies --> Run `npm ci`
|
||||
| +-- Out of memory --> Increase Node heap size
|
||||
|
|
||||
+-- Tests failed?
|
||||
| |
|
||||
| +-- Database connection --> Check DB_HOST, credentials
|
||||
| +-- Redis connection --> Check REDIS_URL
|
||||
| +-- Test isolation --> Check for race conditions
|
||||
|
|
||||
+-- SSH/Deploy failed?
|
||||
|
|
||||
+-- Permission denied --> Check SSH keys in Gitea secrets
|
||||
+-- Host unreachable --> Check firewall, VPN
|
||||
+-- PM2 error --> Check PM2 logs on server
|
||||
```
|
||||
|
||||
### Common Deployment Issues
|
||||
|
||||
| Symptom | Diagnosis | Solution |
|
||||
| ------------------------------------ | ----------------------- | ------------------------------------------------ |
|
||||
| "Connection refused" on health check | API not started | Check `pm2 logs flyer-crawler-api` |
|
||||
| 502 Bad Gateway | NGINX cannot reach API | Verify API port (3001), restart PM2 |
|
||||
| CSS/JS not loading | Build artifacts missing | Re-run `npm run build`, check NGINX static paths |
|
||||
| Database migrations failed | Schema mismatch | Run migrations manually, check DB connectivity |
|
||||
| "ENOSPC" error | Disk full | Clear old logs: `pm2 flush`, clean npm cache |
|
||||
| SSL certificate error | Cert expired/missing | Run `certbot renew`, check NGINX config |
|
||||
|
||||
### Post-Deployment Verification Checklist
|
||||
|
||||
After every deployment, verify:
|
||||
|
||||
- [ ] Health check passes: `curl -s https://flyer-crawler.projectium.com/api/health/ready`
|
||||
- [ ] PM2 processes running: `pm2 list` shows `online` status
|
||||
- [ ] No recent errors: Check Bugsink for new issues
|
||||
- [ ] Frontend loads: Browser shows login page
|
||||
- [ ] API responds: `curl https://flyer-crawler.projectium.com/api/health/ping`
|
||||
|
||||
### Rollback Procedure
|
||||
|
||||
If deployment causes issues:
|
||||
|
||||
```bash
|
||||
# 1. Check current release
|
||||
cd /var/www/flyer-crawler.projectium.com
|
||||
git log --oneline -5
|
||||
|
||||
# 2. Revert to previous commit
|
||||
git checkout HEAD~1
|
||||
|
||||
# 3. Rebuild and restart
|
||||
npm ci && npm run build
|
||||
pm2 restart all
|
||||
|
||||
# 4. Verify health
|
||||
curl -s http://localhost:3001/api/health/ready | jq .
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Related Documentation
|
||||
|
||||
- [Database Setup](DATABASE.md) - PostgreSQL and PostGIS configuration
|
||||
- [Authentication Setup](AUTHENTICATION.md) - OAuth provider configuration
|
||||
- [Installation Guide](INSTALL.md) - Local development setup
|
||||
- [Bare-Metal Server Setup](docs/BARE-METAL-SETUP.md) - Manual server installation guide
|
||||
- [Database Setup](../architecture/DATABASE.md) - PostgreSQL and PostGIS configuration
|
||||
- [Monitoring Guide](MONITORING.md) - Health checks and error tracking
|
||||
- [Logstash Quick Reference](LOGSTASH-QUICK-REF.md) - Log aggregation
|
||||
- [Bare-Metal Server Setup](BARE-METAL-SETUP.md) - Manual server installation guide
|
||||
|
||||
@@ -2,10 +2,47 @@
|
||||
|
||||
Aggregates logs from PostgreSQL, PM2, Redis, NGINX; forwards errors to Bugsink.
|
||||
|
||||
**Last verified**: 2026-01-28
|
||||
|
||||
**Related documentation**:
|
||||
|
||||
- [ADR-050: PostgreSQL Function Observability](../adr/0050-postgresql-function-observability.md)
|
||||
- [ADR-015: Error Tracking and Observability](../adr/0015-error-tracking-and-observability.md)
|
||||
- [Monitoring Guide](MONITORING.md)
|
||||
- [Logstash Troubleshooting Runbook](LOGSTASH-TROUBLESHOOTING.md)
|
||||
|
||||
---
|
||||
|
||||
## Quick Reference
|
||||
|
||||
### Bugsink Project Routing
|
||||
|
||||
| Source Type | Environment | Bugsink Project | Project ID |
|
||||
| -------------- | ----------- | -------------------- | ---------- |
|
||||
| PM2 API/Worker | Dev | Backend API (Dev) | 1 |
|
||||
| PostgreSQL | Dev | Backend API (Dev) | 1 |
|
||||
| Frontend JS | Dev | Frontend (Dev) | 2 |
|
||||
| Redis/NGINX | Dev | Infrastructure (Dev) | 4 |
|
||||
| PM2 API/Worker | Production | Backend API (Prod) | 1 |
|
||||
| PostgreSQL | Production | Backend API (Prod) | 1 |
|
||||
| PM2 API/Worker | Test | Backend API (Test) | 3 |
|
||||
|
||||
### Key DSN Keys (Dev Container)
|
||||
|
||||
| Project | DSN Key |
|
||||
| -------------------- | ---------------------------------- |
|
||||
| Backend API (Dev) | `cea01396c56246adb5878fa5ee6b1d22` |
|
||||
| Frontend (Dev) | `d92663cb73cf4145b677b84029e4b762` |
|
||||
| Infrastructure (Dev) | `14e8791da3d347fa98073261b596cab9` |
|
||||
|
||||
---
|
||||
|
||||
## Configuration
|
||||
|
||||
**Primary config**: `/etc/logstash/conf.d/bugsink.conf`
|
||||
|
||||
**Dev container config**: `docker/logstash/bugsink.conf`
|
||||
|
||||
### Related Files
|
||||
|
||||
| Path | Purpose |
|
||||
@@ -89,6 +126,34 @@ MSYS_NO_PATHCONV=1 podman exec flyer-crawler-dev ls -la /var/log/redis/
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Decision Tree: Logs Not Appearing in Bugsink
|
||||
|
||||
```text
|
||||
Errors not showing in Bugsink?
|
||||
|
|
||||
+-- Logstash running?
|
||||
| |
|
||||
| +-- No --> systemctl start logstash
|
||||
| +-- Yes --> Check pipeline stats
|
||||
| |
|
||||
| +-- Events in = 0?
|
||||
| | |
|
||||
| | +-- Log files exist? --> ls /var/log/pm2/*.log
|
||||
| | +-- Permissions OK? --> groups logstash
|
||||
| |
|
||||
| +-- Events filtered = high?
|
||||
| | |
|
||||
| | +-- Grok failures --> Check log format matches pattern
|
||||
| |
|
||||
| +-- Events out but no Bugsink?
|
||||
| |
|
||||
| +-- 403 error --> Wrong DSN key
|
||||
| +-- 500 error --> Invalid event format (check sentry_level)
|
||||
| +-- Connection refused --> Bugsink not running
|
||||
```
|
||||
|
||||
### Common Issues Table
|
||||
|
||||
| Issue | Check | Solution |
|
||||
| --------------------- | ---------------- | ---------------------------------------------------------------------------------------------- |
|
||||
| No Bugsink errors | Logstash running | `systemctl status logstash` |
|
||||
@@ -103,6 +168,25 @@ MSYS_NO_PATHCONV=1 podman exec flyer-crawler-dev ls -la /var/log/redis/
|
||||
| High disk usage | Log rotation | Verify `/etc/logrotate.d/logstash` configured |
|
||||
| varchar(7) error | Level validation | Add Ruby filter to validate/normalize `sentry_level` before output |
|
||||
|
||||
### Expected Output Examples
|
||||
|
||||
**Successful Logstash pipeline stats**:
|
||||
|
||||
```json
|
||||
{
|
||||
"in": 1523,
|
||||
"out": 1520,
|
||||
"filtered": 1520,
|
||||
"queue_push_duration_in_millis": 45
|
||||
}
|
||||
```
|
||||
|
||||
**Healthy Bugsink HTTP response**:
|
||||
|
||||
```json
|
||||
{ "id": "a1b2c3d4e5f6..." }
|
||||
```
|
||||
|
||||
## Related Documentation
|
||||
|
||||
- **Dev Container Guide**: [DEV-CONTAINER.md](../development/DEV-CONTAINER.md) - PM2 and log aggregation in dev
|
||||
|
||||
@@ -2,6 +2,16 @@
|
||||
|
||||
This runbook provides step-by-step diagnostics and solutions for common Logstash issues in the PostgreSQL observability pipeline (ADR-050).
|
||||
|
||||
**Last verified**: 2026-01-28
|
||||
|
||||
**Related documentation**:
|
||||
|
||||
- [ADR-050: PostgreSQL Function Observability](../adr/0050-postgresql-function-observability.md)
|
||||
- [Logstash Quick Reference](LOGSTASH-QUICK-REF.md)
|
||||
- [Monitoring Guide](MONITORING.md)
|
||||
|
||||
---
|
||||
|
||||
## Quick Reference
|
||||
|
||||
| Symptom | Most Likely Cause | Quick Check |
|
||||
|
||||
@@ -2,6 +2,72 @@
|
||||
|
||||
This guide covers all aspects of monitoring the Flyer Crawler application across development, test, and production environments.
|
||||
|
||||
**Last verified**: 2026-01-28
|
||||
|
||||
**Related documentation**:
|
||||
|
||||
- [ADR-015: Error Tracking and Observability](../adr/0015-error-tracking-and-observability.md)
|
||||
- [ADR-020: Health Checks](../adr/0020-health-checks-and-liveness-readiness-probes.md)
|
||||
- [ADR-050: PostgreSQL Function Observability](../adr/0050-postgresql-function-observability.md)
|
||||
- [Logstash Quick Reference](LOGSTASH-QUICK-REF.md)
|
||||
- [Deployment Guide](DEPLOYMENT.md)
|
||||
|
||||
---
|
||||
|
||||
## Quick Reference
|
||||
|
||||
### Monitoring URLs
|
||||
|
||||
| Service | Production URL | Dev Container URL |
|
||||
| ------------ | ------------------------------------------------------- | ---------------------------------------- |
|
||||
| Health Check | `https://flyer-crawler.projectium.com/api/health/ready` | `http://localhost:3001/api/health/ready` |
|
||||
| Bugsink | `https://bugsink.projectium.com` | `https://localhost:8443` |
|
||||
| Bull Board | `https://flyer-crawler.projectium.com/api/admin/jobs` | `http://localhost:3001/api/admin/jobs` |
|
||||
|
||||
### Quick Diagnostic Commands
|
||||
|
||||
```bash
|
||||
# Check all services at once (production)
|
||||
curl -s https://flyer-crawler.projectium.com/api/health/ready | jq '.data.services'
|
||||
|
||||
# Dev container health check
|
||||
podman exec flyer-crawler-dev curl -s http://localhost:3001/api/health/ready | jq .
|
||||
|
||||
# PM2 process overview
|
||||
pm2 list
|
||||
|
||||
# Recent errors in Bugsink (via MCP)
|
||||
# mcp__bugsink__list_issues --project_id 1 --status unresolved
|
||||
```
|
||||
|
||||
### Monitoring Decision Tree
|
||||
|
||||
```text
|
||||
Application seems slow or unresponsive?
|
||||
|
|
||||
+-- Check health endpoint first
|
||||
| |
|
||||
| +-- Returns unhealthy?
|
||||
| | |
|
||||
| | +-- Database unhealthy --> Check DB pool, connections
|
||||
| | +-- Redis unhealthy --> Check Redis memory, connection
|
||||
| | +-- Storage unhealthy --> Check disk space, permissions
|
||||
| |
|
||||
| +-- Returns healthy but slow?
|
||||
| |
|
||||
| +-- Check PM2 memory/CPU usage
|
||||
| +-- Check database slow query log
|
||||
| +-- Check Redis queue depth
|
||||
|
|
||||
+-- Health endpoint not responding?
|
||||
|
|
||||
+-- Check PM2 status --> Process crashed?
|
||||
+-- Check NGINX --> 502 errors?
|
||||
+-- Check network --> Firewall/DNS issues?
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Table of Contents
|
||||
|
||||
1. [Health Checks](#health-checks)
|
||||
@@ -294,7 +360,7 @@ The command outputs a 40-character hex token.
|
||||
|
||||
**Error Anatomy**:
|
||||
|
||||
```
|
||||
```text
|
||||
TypeError: Cannot read properties of undefined (reading 'map')
|
||||
├── Exception Type: TypeError
|
||||
├── Message: Cannot read properties of undefined (reading 'map')
|
||||
@@ -357,7 +423,7 @@ Logstash aggregates logs from multiple sources and forwards errors to Bugsink (A
|
||||
|
||||
### Architecture
|
||||
|
||||
```
|
||||
```text
|
||||
Log Sources Logstash Outputs
|
||||
┌──────────────┐ ┌─────────────┐ ┌─────────────┐
|
||||
│ PostgreSQL │──────────────│ │───────────│ Bugsink │
|
||||
@@ -520,7 +586,7 @@ pm2 stop flyer-crawler-api
|
||||
|
||||
**Healthy Process**:
|
||||
|
||||
```
|
||||
```text
|
||||
┌─────────────────────┬────┬─────────┬─────────┬───────┬────────┬─────────┬──────────┐
|
||||
│ Name │ id │ mode │ status │ cpu │ mem │ uptime │ restarts │
|
||||
├─────────────────────┼────┼─────────┼─────────┼───────┼────────┼─────────┼──────────┤
|
||||
@@ -833,7 +899,7 @@ Configure alerts in your monitoring tool (UptimeRobot, Datadog, etc.):
|
||||
2. Review during business hours
|
||||
3. Create Gitea issue for tracking
|
||||
|
||||
### Quick Diagnostic Commands
|
||||
### On-Call Diagnostic Commands
|
||||
|
||||
> **Note**: User executes these commands on the server. Claude Code provides commands but cannot run them directly.
|
||||
|
||||
|
||||
849
docs/plans/2026-01-28-adr-024-feature-flags-implementation.md
Normal file
849
docs/plans/2026-01-28-adr-024-feature-flags-implementation.md
Normal file
@@ -0,0 +1,849 @@
|
||||
# ADR-024 Implementation Plan: Feature Flagging Strategy
|
||||
|
||||
**Date**: 2026-01-28
|
||||
**Type**: Technical Implementation Plan
|
||||
**Related**: [ADR-024: Feature Flagging Strategy](../adr/0024-feature-flagging-strategy.md), [ADR-007: Configuration and Secrets Management](../adr/0007-configuration-and-secrets-management.md)
|
||||
**Status**: Ready for Implementation
|
||||
|
||||
---
|
||||
|
||||
## Project Overview
|
||||
|
||||
Implement a simple, configuration-based feature flag system that integrates with the existing Zod-validated configuration in `src/config/env.ts`. The system will support both backend and frontend feature flags through environment variables, with type-safe access patterns and helper utilities.
|
||||
|
||||
### Key Success Criteria
|
||||
|
||||
1. Feature flags accessible via type-safe API on both backend and frontend
|
||||
2. Zero runtime overhead when flag is disabled (compile-time elimination where possible)
|
||||
3. Consistent naming convention (environment variables and code access)
|
||||
4. Graceful degradation (missing flag defaults to disabled)
|
||||
5. Easy migration path to external service (Flagsmith/LaunchDarkly) in the future
|
||||
6. Full test coverage with mocking utilities
|
||||
|
||||
### Estimated Total Effort
|
||||
|
||||
| Phase | Estimate |
|
||||
| --------------------------------- | -------------- |
|
||||
| Phase 1: Backend Infrastructure | 3-5 hours |
|
||||
| Phase 2: Frontend Infrastructure | 2-3 hours |
|
||||
| Phase 3: Documentation & Examples | 1-2 hours |
|
||||
| **Total** | **6-10 hours** |
|
||||
|
||||
---
|
||||
|
||||
## Current State Analysis
|
||||
|
||||
### Backend Configuration (`src/config/env.ts`)
|
||||
|
||||
- Zod-based schema validation at startup
|
||||
- Organized into logical groups (database, redis, auth, smtp, ai, etc.)
|
||||
- Helper exports for service availability (`isSmtpConfigured`, `isAiConfigured`, etc.)
|
||||
- Environment helpers (`isProduction`, `isTest`, `isDevelopment`)
|
||||
- Fail-fast on invalid configuration
|
||||
|
||||
### Frontend Configuration (`src/config.ts`)
|
||||
|
||||
- Uses `import.meta.env` (Vite environment variables)
|
||||
- Organized into sections (app, google, sentry)
|
||||
- Boolean parsing for string env vars
|
||||
- Type declarations in `src/vite-env.d.ts`
|
||||
|
||||
### Existing Patterns to Follow
|
||||
|
||||
```typescript
|
||||
// Backend - service availability check pattern
|
||||
export const isSmtpConfigured =
|
||||
!!config.smtp.host && !!config.smtp.user && !!config.smtp.pass;
|
||||
|
||||
// Frontend - boolean parsing pattern
|
||||
enabled: import.meta.env.VITE_SENTRY_ENABLED !== 'false',
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Task Breakdown
|
||||
|
||||
### Phase 1: Backend Feature Flag Infrastructure
|
||||
|
||||
#### [1.1] Define Feature Flag Schema in env.ts
|
||||
|
||||
**Complexity**: Low
|
||||
**Estimate**: 30-45 minutes
|
||||
**Dependencies**: None
|
||||
**Parallelizable**: Yes
|
||||
|
||||
**Description**: Add a new `featureFlags` section to the Zod schema in `src/config/env.ts`.
|
||||
|
||||
**Acceptance Criteria**:
|
||||
|
||||
- [ ] New `featureFlagsSchema` Zod object defined
|
||||
- [ ] Schema supports boolean flags with defaults to `false` (opt-in model)
|
||||
- [ ] Schema added to main `envSchema` object
|
||||
- [ ] Type exported as part of `EnvConfig`
|
||||
|
||||
**Implementation Details**:
|
||||
|
||||
```typescript
|
||||
// src/config/env.ts
|
||||
|
||||
/**
|
||||
* Feature flags configuration schema (ADR-024).
|
||||
* All flags default to false (disabled) for safety.
|
||||
* Set to 'true' in environment to enable.
|
||||
*/
|
||||
const featureFlagsSchema = z.object({
|
||||
// Example flags - replace with actual feature flags as needed
|
||||
newDashboard: booleanString(false), // FEATURE_NEW_DASHBOARD
|
||||
betaRecipes: booleanString(false), // FEATURE_BETA_RECIPES
|
||||
experimentalAi: booleanString(false), // FEATURE_EXPERIMENTAL_AI
|
||||
debugMode: booleanString(false), // FEATURE_DEBUG_MODE
|
||||
});
|
||||
|
||||
// In loadEnvVars():
|
||||
featureFlags: {
|
||||
newDashboard: process.env.FEATURE_NEW_DASHBOARD,
|
||||
betaRecipes: process.env.FEATURE_BETA_RECIPES,
|
||||
experimentalAi: process.env.FEATURE_EXPERIMENTAL_AI,
|
||||
debugMode: process.env.FEATURE_DEBUG_MODE,
|
||||
},
|
||||
```
|
||||
|
||||
**Risks/Notes**:
|
||||
|
||||
- Naming convention: `FEATURE_*` prefix for all feature flag env vars
|
||||
- Default to `false` ensures features are opt-in, preventing accidental exposure
|
||||
|
||||
---
|
||||
|
||||
#### [1.2] Create Feature Flag Service Module
|
||||
|
||||
**Complexity**: Medium
|
||||
**Estimate**: 1-2 hours
|
||||
**Dependencies**: [1.1]
|
||||
**Parallelizable**: No (depends on 1.1)
|
||||
|
||||
**Description**: Create a dedicated service module for feature flag access with helper functions.
|
||||
|
||||
**File**: `src/services/featureFlags.server.ts`
|
||||
|
||||
**Acceptance Criteria**:
|
||||
|
||||
- [ ] `isFeatureEnabled(flagName)` function for checking flags
|
||||
- [ ] `getAllFeatureFlags()` function for debugging/admin endpoints
|
||||
- [ ] Type-safe flag name parameter (union type or enum)
|
||||
- [ ] Exported helper booleans for common flags (similar to `isSmtpConfigured`)
|
||||
- [ ] Logging when feature flag is checked in development mode
|
||||
|
||||
**Implementation Details**:
|
||||
|
||||
```typescript
|
||||
// src/services/featureFlags.server.ts
|
||||
import { config, isDevelopment } from '../config/env';
|
||||
import { logger } from './logger.server';
|
||||
|
||||
export type FeatureFlagName = keyof typeof config.featureFlags;
|
||||
|
||||
/**
|
||||
* Check if a feature flag is enabled.
|
||||
* @param flagName - The name of the feature flag to check
|
||||
* @returns boolean indicating if the feature is enabled
|
||||
*/
|
||||
export function isFeatureEnabled(flagName: FeatureFlagName): boolean {
|
||||
const enabled = config.featureFlags[flagName];
|
||||
|
||||
if (isDevelopment) {
|
||||
logger.debug({ flag: flagName, enabled }, 'Feature flag checked');
|
||||
}
|
||||
|
||||
return enabled;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all feature flags and their current states.
|
||||
* Useful for debugging and admin endpoints.
|
||||
*/
|
||||
export function getAllFeatureFlags(): Record<FeatureFlagName, boolean> {
|
||||
return { ...config.featureFlags };
|
||||
}
|
||||
|
||||
// Convenience exports for common flag checks
|
||||
export const isNewDashboardEnabled = config.featureFlags.newDashboard;
|
||||
export const isBetaRecipesEnabled = config.featureFlags.betaRecipes;
|
||||
export const isExperimentalAiEnabled = config.featureFlags.experimentalAi;
|
||||
export const isDebugModeEnabled = config.featureFlags.debugMode;
|
||||
```
|
||||
|
||||
**Risks/Notes**:
|
||||
|
||||
- Keep logging minimal to avoid performance impact
|
||||
- Convenience exports are evaluated once at startup (not dynamic)
|
||||
|
||||
---
|
||||
|
||||
#### [1.3] Add Admin Endpoint for Feature Flag Status
|
||||
|
||||
**Complexity**: Low
|
||||
**Estimate**: 30-45 minutes
|
||||
**Dependencies**: [1.2]
|
||||
**Parallelizable**: No (depends on 1.2)
|
||||
|
||||
**Description**: Add an admin/health endpoint to view current feature flag states.
|
||||
|
||||
**File**: `src/routes/admin.routes.ts` (or `stats.routes.ts` if admin routes don't exist)
|
||||
|
||||
**Acceptance Criteria**:
|
||||
|
||||
- [ ] `GET /api/v1/admin/feature-flags` endpoint (admin-only)
|
||||
- [ ] Returns JSON object with all flags and their states
|
||||
- [ ] Requires admin authentication
|
||||
- [ ] Endpoint documented in Swagger
|
||||
|
||||
**Implementation Details**:
|
||||
|
||||
```typescript
|
||||
// In appropriate routes file
|
||||
router.get('/feature-flags', requireAdmin, async (req, res) => {
|
||||
const flags = getAllFeatureFlags();
|
||||
sendSuccess(res, { flags });
|
||||
});
|
||||
```
|
||||
|
||||
**Risks/Notes**:
|
||||
|
||||
- Ensure endpoint is protected (admin-only)
|
||||
- Consider caching response if called frequently
|
||||
|
||||
---
|
||||
|
||||
#### [1.4] Backend Unit Tests
|
||||
|
||||
**Complexity**: Medium
|
||||
**Estimate**: 1-2 hours
|
||||
**Dependencies**: [1.1], [1.2]
|
||||
**Parallelizable**: Yes (can start after 1.1, in parallel with 1.3)
|
||||
|
||||
**Description**: Write unit tests for feature flag configuration and service.
|
||||
|
||||
**Files**:
|
||||
|
||||
- `src/config/env.test.ts` (add feature flag tests)
|
||||
- `src/services/featureFlags.server.test.ts` (new file)
|
||||
|
||||
**Acceptance Criteria**:
|
||||
|
||||
- [ ] Test default values (all false)
|
||||
- [ ] Test parsing 'true'/'false' strings
|
||||
- [ ] Test `isFeatureEnabled()` function
|
||||
- [ ] Test `getAllFeatureFlags()` function
|
||||
- [ ] Test type safety (TypeScript compile-time checks)
|
||||
|
||||
**Implementation Details**:
|
||||
|
||||
```typescript
|
||||
// src/config/env.test.ts - add to existing file
|
||||
describe('featureFlags configuration', () => {
|
||||
it('should default all feature flags to false', async () => {
|
||||
setValidEnv();
|
||||
const { config } = await import('./env');
|
||||
|
||||
expect(config.featureFlags.newDashboard).toBe(false);
|
||||
expect(config.featureFlags.betaRecipes).toBe(false);
|
||||
});
|
||||
|
||||
it('should parse FEATURE_NEW_DASHBOARD as true when set', async () => {
|
||||
setValidEnv({ FEATURE_NEW_DASHBOARD: 'true' });
|
||||
const { config } = await import('./env');
|
||||
|
||||
expect(config.featureFlags.newDashboard).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
// src/services/featureFlags.server.test.ts - new file
|
||||
describe('featureFlags service', () => {
|
||||
describe('isFeatureEnabled', () => {
|
||||
it('should return false for disabled flags', () => {
|
||||
expect(isFeatureEnabled('newDashboard')).toBe(false);
|
||||
});
|
||||
|
||||
// ... more tests
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### Phase 2: Frontend Feature Flag Infrastructure
|
||||
|
||||
#### [2.1] Add Frontend Feature Flag Config
|
||||
|
||||
**Complexity**: Low
|
||||
**Estimate**: 30-45 minutes
|
||||
**Dependencies**: None (can run in parallel with Phase 1)
|
||||
**Parallelizable**: Yes
|
||||
|
||||
**Description**: Add feature flags to the frontend config module.
|
||||
|
||||
**Files**:
|
||||
|
||||
- `src/config.ts` - Add featureFlags section
|
||||
- `src/vite-env.d.ts` - Add type declarations
|
||||
|
||||
**Acceptance Criteria**:
|
||||
|
||||
- [ ] Feature flags section added to `src/config.ts`
|
||||
- [ ] TypeScript declarations updated in `vite-env.d.ts`
|
||||
- [ ] Boolean parsing consistent with existing pattern
|
||||
- [ ] Default to false when env var not set
|
||||
|
||||
**Implementation Details**:
|
||||
|
||||
```typescript
|
||||
// src/config.ts
|
||||
const config = {
|
||||
// ... existing sections ...
|
||||
|
||||
/**
|
||||
* Feature flags for conditional feature rendering (ADR-024).
|
||||
* All flags default to false (disabled) when not explicitly set.
|
||||
*/
|
||||
featureFlags: {
|
||||
newDashboard: import.meta.env.VITE_FEATURE_NEW_DASHBOARD === 'true',
|
||||
betaRecipes: import.meta.env.VITE_FEATURE_BETA_RECIPES === 'true',
|
||||
experimentalAi: import.meta.env.VITE_FEATURE_EXPERIMENTAL_AI === 'true',
|
||||
debugMode: import.meta.env.VITE_FEATURE_DEBUG_MODE === 'true',
|
||||
},
|
||||
};
|
||||
|
||||
// src/vite-env.d.ts
|
||||
interface ImportMetaEnv {
|
||||
// ... existing declarations ...
|
||||
readonly VITE_FEATURE_NEW_DASHBOARD?: string;
|
||||
readonly VITE_FEATURE_BETA_RECIPES?: string;
|
||||
readonly VITE_FEATURE_EXPERIMENTAL_AI?: string;
|
||||
readonly VITE_FEATURE_DEBUG_MODE?: string;
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
#### [2.2] Create useFeatureFlag React Hook
|
||||
|
||||
**Complexity**: Medium
|
||||
**Estimate**: 1-1.5 hours
|
||||
**Dependencies**: [2.1]
|
||||
**Parallelizable**: No (depends on 2.1)
|
||||
|
||||
**Description**: Create a React hook for checking feature flags in components.
|
||||
|
||||
**File**: `src/hooks/useFeatureFlag.ts`
|
||||
|
||||
**Acceptance Criteria**:
|
||||
|
||||
- [ ] `useFeatureFlag(flagName)` hook returns boolean
|
||||
- [ ] Type-safe flag name parameter
|
||||
- [ ] Memoized to prevent unnecessary re-renders
|
||||
- [ ] Optional `FeatureFlag` component for conditional rendering
|
||||
|
||||
**Implementation Details**:
|
||||
|
||||
```typescript
|
||||
// src/hooks/useFeatureFlag.ts
|
||||
import { useMemo } from 'react';
|
||||
import config from '../config';
|
||||
|
||||
export type FeatureFlagName = keyof typeof config.featureFlags;
|
||||
|
||||
/**
|
||||
* Hook to check if a feature flag is enabled.
|
||||
*
|
||||
* @param flagName - The name of the feature flag to check
|
||||
* @returns boolean indicating if the feature is enabled
|
||||
*
|
||||
* @example
|
||||
* const isNewDashboard = useFeatureFlag('newDashboard');
|
||||
* if (isNewDashboard) {
|
||||
* return <NewDashboard />;
|
||||
* }
|
||||
*/
|
||||
export function useFeatureFlag(flagName: FeatureFlagName): boolean {
|
||||
return useMemo(() => config.featureFlags[flagName], [flagName]);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all feature flags (useful for debugging).
|
||||
*/
|
||||
export function useAllFeatureFlags(): Record<FeatureFlagName, boolean> {
|
||||
return useMemo(() => ({ ...config.featureFlags }), []);
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
#### [2.3] Create FeatureFlag Component
|
||||
|
||||
**Complexity**: Low
|
||||
**Estimate**: 30-45 minutes
|
||||
**Dependencies**: [2.2]
|
||||
**Parallelizable**: No (depends on 2.2)
|
||||
|
||||
**Description**: Create a declarative component for feature flag conditional rendering.
|
||||
|
||||
**File**: `src/components/FeatureFlag.tsx`
|
||||
|
||||
**Acceptance Criteria**:
|
||||
|
||||
- [ ] `<FeatureFlag name="flagName">` component
|
||||
- [ ] Children rendered only when flag is enabled
|
||||
- [ ] Optional `fallback` prop for disabled state
|
||||
- [ ] TypeScript-enforced flag names
|
||||
|
||||
**Implementation Details**:
|
||||
|
||||
```typescript
|
||||
// src/components/FeatureFlag.tsx
|
||||
import { ReactNode } from 'react';
|
||||
import { useFeatureFlag, FeatureFlagName } from '../hooks/useFeatureFlag';
|
||||
|
||||
interface FeatureFlagProps {
|
||||
/** The name of the feature flag to check */
|
||||
name: FeatureFlagName;
|
||||
/** Content to render when feature is enabled */
|
||||
children: ReactNode;
|
||||
/** Optional content to render when feature is disabled */
|
||||
fallback?: ReactNode;
|
||||
}
|
||||
|
||||
/**
|
||||
* Conditionally renders children based on feature flag state.
|
||||
*
|
||||
* @example
|
||||
* <FeatureFlag name="newDashboard" fallback={<OldDashboard />}>
|
||||
* <NewDashboard />
|
||||
* </FeatureFlag>
|
||||
*/
|
||||
export function FeatureFlag({ name, children, fallback = null }: FeatureFlagProps) {
|
||||
const isEnabled = useFeatureFlag(name);
|
||||
return <>{isEnabled ? children : fallback}</>;
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
#### [2.4] Frontend Unit Tests
|
||||
|
||||
**Complexity**: Medium
|
||||
**Estimate**: 1-1.5 hours
|
||||
**Dependencies**: [2.1], [2.2], [2.3]
|
||||
**Parallelizable**: No (depends on previous frontend tasks)
|
||||
|
||||
**Description**: Write unit tests for frontend feature flag utilities.
|
||||
|
||||
**Files**:
|
||||
|
||||
- `src/config.test.ts` (add feature flag tests)
|
||||
- `src/hooks/useFeatureFlag.test.ts` (new file)
|
||||
- `src/components/FeatureFlag.test.tsx` (new file)
|
||||
|
||||
**Acceptance Criteria**:
|
||||
|
||||
- [ ] Test config structure includes featureFlags
|
||||
- [ ] Test default values (all false)
|
||||
- [ ] Test hook returns correct values
|
||||
- [ ] Test component renders/hides children correctly
|
||||
- [ ] Test fallback rendering
|
||||
|
||||
**Implementation Details**:
|
||||
|
||||
```typescript
|
||||
// src/hooks/useFeatureFlag.test.ts
|
||||
import { renderHook } from '@testing-library/react';
|
||||
import { useFeatureFlag, useAllFeatureFlags } from './useFeatureFlag';
|
||||
|
||||
describe('useFeatureFlag', () => {
|
||||
it('should return false for disabled flags', () => {
|
||||
const { result } = renderHook(() => useFeatureFlag('newDashboard'));
|
||||
expect(result.current).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
// src/components/FeatureFlag.test.tsx
|
||||
import { render, screen } from '@testing-library/react';
|
||||
import { FeatureFlag } from './FeatureFlag';
|
||||
|
||||
describe('FeatureFlag', () => {
|
||||
it('should not render children when flag is disabled', () => {
|
||||
render(
|
||||
<FeatureFlag name="newDashboard">
|
||||
<div data-testid="new-feature">New Feature</div>
|
||||
</FeatureFlag>
|
||||
);
|
||||
expect(screen.queryByTestId('new-feature')).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should render fallback when flag is disabled', () => {
|
||||
render(
|
||||
<FeatureFlag name="newDashboard" fallback={<div>Old Feature</div>}>
|
||||
<div>New Feature</div>
|
||||
</FeatureFlag>
|
||||
);
|
||||
expect(screen.getByText('Old Feature')).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### Phase 3: Documentation & Integration
|
||||
|
||||
#### [3.1] Update ADR-024 with Implementation Status
|
||||
|
||||
**Complexity**: Low
|
||||
**Estimate**: 30 minutes
|
||||
**Dependencies**: [1.1], [1.2], [2.1], [2.2]
|
||||
**Parallelizable**: Yes (can be done after core implementation)
|
||||
|
||||
**Description**: Update ADR-024 to mark it as implemented and add implementation details.
|
||||
|
||||
**File**: `docs/adr/0024-feature-flagging-strategy.md`
|
||||
|
||||
**Acceptance Criteria**:
|
||||
|
||||
- [ ] Status changed from "Proposed" to "Accepted"
|
||||
- [ ] Implementation status section added
|
||||
- [ ] Key files documented
|
||||
- [ ] Usage examples included
|
||||
|
||||
---
|
||||
|
||||
#### [3.2] Update Environment Documentation
|
||||
|
||||
**Complexity**: Low
|
||||
**Estimate**: 30 minutes
|
||||
**Dependencies**: [1.1], [2.1]
|
||||
**Parallelizable**: Yes
|
||||
|
||||
**Description**: Add feature flag environment variables to documentation.
|
||||
|
||||
**Files**:
|
||||
|
||||
- `docs/getting-started/ENVIRONMENT.md`
|
||||
- `.env.example`
|
||||
|
||||
**Acceptance Criteria**:
|
||||
|
||||
- [ ] Feature flag variables documented in ENVIRONMENT.md
|
||||
- [ ] New section "Feature Flags" added
|
||||
- [ ] `.env.example` updated with commented feature flag examples
|
||||
|
||||
**Implementation Details**:
|
||||
|
||||
```bash
|
||||
# .env.example addition
|
||||
# ===================
|
||||
# Feature Flags (ADR-024)
|
||||
# ===================
|
||||
# All feature flags default to disabled (false) when not set.
|
||||
# Set to 'true' to enable a feature.
|
||||
#
|
||||
# FEATURE_NEW_DASHBOARD=false
|
||||
# FEATURE_BETA_RECIPES=false
|
||||
# FEATURE_EXPERIMENTAL_AI=false
|
||||
# FEATURE_DEBUG_MODE=false
|
||||
#
|
||||
# Frontend equivalents (prefix with VITE_):
|
||||
# VITE_FEATURE_NEW_DASHBOARD=false
|
||||
# VITE_FEATURE_BETA_RECIPES=false
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
#### [3.3] Create CODE-PATTERNS Entry
|
||||
|
||||
**Complexity**: Low
|
||||
**Estimate**: 30 minutes
|
||||
**Dependencies**: All implementation tasks
|
||||
**Parallelizable**: Yes
|
||||
|
||||
**Description**: Add feature flag usage patterns to CODE-PATTERNS.md.
|
||||
|
||||
**File**: `docs/development/CODE-PATTERNS.md`
|
||||
|
||||
**Acceptance Criteria**:
|
||||
|
||||
- [ ] Feature flag section added with examples
|
||||
- [ ] Backend usage pattern documented
|
||||
- [ ] Frontend usage pattern documented
|
||||
- [ ] Testing pattern documented
|
||||
|
||||
---
|
||||
|
||||
#### [3.4] Update CLAUDE.md Quick Reference
|
||||
|
||||
**Complexity**: Low
|
||||
**Estimate**: 15 minutes
|
||||
**Dependencies**: All implementation tasks
|
||||
**Parallelizable**: Yes
|
||||
|
||||
**Description**: Add feature flags to the CLAUDE.md quick reference tables.
|
||||
|
||||
**File**: `CLAUDE.md`
|
||||
|
||||
**Acceptance Criteria**:
|
||||
|
||||
- [ ] Feature flags added to "Key Patterns" table
|
||||
- [ ] Reference to featureFlags service added
|
||||
|
||||
---
|
||||
|
||||
## Implementation Sequence
|
||||
|
||||
### Phase 1 (Backend) - Can Start Immediately
|
||||
|
||||
```text
|
||||
[1.1] Schema ──────────┬──> [1.2] Service ──> [1.3] Admin Endpoint
|
||||
│
|
||||
└──> [1.4] Backend Tests (can start after 1.1)
|
||||
```
|
||||
|
||||
### Phase 2 (Frontend) - Can Start Immediately (Parallel with Phase 1)
|
||||
|
||||
```text
|
||||
[2.1] Config ──> [2.2] Hook ──> [2.3] Component ──> [2.4] Frontend Tests
|
||||
```
|
||||
|
||||
### Phase 3 (Documentation) - After Implementation
|
||||
|
||||
```text
|
||||
All Phase 1 & 2 Tasks ──> [3.1] ADR Update
|
||||
├──> [3.2] Env Docs
|
||||
├──> [3.3] Code Patterns
|
||||
└──> [3.4] CLAUDE.md
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Critical Path
|
||||
|
||||
The minimum path to a working feature flag system:
|
||||
|
||||
1. **[1.1] Schema** (30 min) - Required for backend
|
||||
2. **[1.2] Service** (1.5 hr) - Required for backend access
|
||||
3. **[2.1] Frontend Config** (30 min) - Required for frontend
|
||||
4. **[2.2] Hook** (1 hr) - Required for React integration
|
||||
|
||||
**Critical path duration**: ~3.5 hours
|
||||
|
||||
Non-critical but recommended:
|
||||
|
||||
- Admin endpoint (debugging)
|
||||
- FeatureFlag component (developer convenience)
|
||||
- Tests (quality assurance)
|
||||
- Documentation (maintainability)
|
||||
|
||||
---
|
||||
|
||||
## Scope Recommendations
|
||||
|
||||
### MVP (Minimum Viable Implementation)
|
||||
|
||||
Include in initial implementation:
|
||||
|
||||
- [1.1] Backend schema with 2-3 example flags
|
||||
- [1.2] Feature flag service
|
||||
- [2.1] Frontend config
|
||||
- [2.2] useFeatureFlag hook
|
||||
- [1.4] Core backend tests
|
||||
- [2.4] Core frontend tests
|
||||
|
||||
### Enhancements (Future Iterations)
|
||||
|
||||
Defer to follow-up work:
|
||||
|
||||
- Admin endpoint for flag visibility
|
||||
- FeatureFlag component (nice-to-have)
|
||||
- Dynamic flag updates without restart (requires external service)
|
||||
- User-specific flags (A/B testing)
|
||||
- Flag analytics/usage tracking
|
||||
- Gradual rollout percentages
|
||||
|
||||
### Explicitly Out of Scope
|
||||
|
||||
- Integration with Flagsmith/LaunchDarkly (future ADR)
|
||||
- Database-stored flags (requires schema changes)
|
||||
- Real-time flag updates (WebSocket/SSE)
|
||||
- Flag inheritance/hierarchy
|
||||
- Flag audit logging
|
||||
|
||||
---
|
||||
|
||||
## Testing Strategy
|
||||
|
||||
### Backend Tests
|
||||
|
||||
| Test Type | Coverage Target | Location |
|
||||
| ----------------- | ---------------------------------------- | ------------------------------------------ |
|
||||
| Schema validation | Parse true/false, defaults | `src/config/env.test.ts` |
|
||||
| Service functions | `isFeatureEnabled`, `getAllFeatureFlags` | `src/services/featureFlags.server.test.ts` |
|
||||
| Integration | Admin endpoint (if added) | `src/routes/admin.routes.test.ts` |
|
||||
|
||||
### Frontend Tests
|
||||
|
||||
| Test Type | Coverage Target | Location |
|
||||
| ------------------- | --------------------------- | ------------------------------------- |
|
||||
| Config structure | featureFlags section exists | `src/config.test.ts` |
|
||||
| Hook behavior | Returns correct values | `src/hooks/useFeatureFlag.test.ts` |
|
||||
| Component rendering | Conditional children | `src/components/FeatureFlag.test.tsx` |
|
||||
|
||||
### Mocking Pattern for Tests
|
||||
|
||||
```typescript
|
||||
// Backend - reset modules to test different flag states
|
||||
beforeEach(() => {
|
||||
vi.resetModules();
|
||||
process.env.FEATURE_NEW_DASHBOARD = 'true';
|
||||
});
|
||||
|
||||
// Frontend - mock config module
|
||||
vi.mock('../config', () => ({
|
||||
default: {
|
||||
featureFlags: {
|
||||
newDashboard: true,
|
||||
betaRecipes: false,
|
||||
},
|
||||
},
|
||||
}));
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Risk Assessment
|
||||
|
||||
| Risk | Impact | Likelihood | Mitigation |
|
||||
| ------------------------------------------- | ------ | ---------- | ------------------------------------------------------------- |
|
||||
| Flag state inconsistency (backend/frontend) | Medium | Low | Use same env var naming, document sync requirements |
|
||||
| Performance impact from flag checks | Low | Low | Flags cached at startup, no runtime DB calls |
|
||||
| Stale flags after deployment | Medium | Medium | Document restart requirement, consider future dynamic loading |
|
||||
| Feature creep (too many flags) | Medium | Medium | Require ADR for new flags, sunset policy |
|
||||
| Missing flag causes crash | High | Low | Default to false, graceful degradation |
|
||||
|
||||
---
|
||||
|
||||
## Files to Create
|
||||
|
||||
| File | Purpose |
|
||||
| ------------------------------------------ | ---------------------------- |
|
||||
| `src/services/featureFlags.server.ts` | Backend feature flag service |
|
||||
| `src/services/featureFlags.server.test.ts` | Backend tests |
|
||||
| `src/hooks/useFeatureFlag.ts` | React hook for flag access |
|
||||
| `src/hooks/useFeatureFlag.test.ts` | Hook tests |
|
||||
| `src/components/FeatureFlag.tsx` | Declarative flag component |
|
||||
| `src/components/FeatureFlag.test.tsx` | Component tests |
|
||||
|
||||
## Files to Modify
|
||||
|
||||
| File | Changes |
|
||||
| -------------------------------------------- | ---------------------------------- |
|
||||
| `src/config/env.ts` | Add featureFlagsSchema and loading |
|
||||
| `src/config/env.test.ts` | Add feature flag tests |
|
||||
| `src/config.ts` | Add featureFlags section |
|
||||
| `src/config.test.ts` | Add feature flag tests |
|
||||
| `src/vite-env.d.ts` | Add VITE*FEATURE*\* declarations |
|
||||
| `.env.example` | Add feature flag examples |
|
||||
| `docs/adr/0024-feature-flagging-strategy.md` | Update status and details |
|
||||
| `docs/getting-started/ENVIRONMENT.md` | Document feature flag vars |
|
||||
| `docs/development/CODE-PATTERNS.md` | Add usage patterns |
|
||||
| `CLAUDE.md` | Add to quick reference |
|
||||
|
||||
---
|
||||
|
||||
## Verification Commands
|
||||
|
||||
After implementation, run these commands in the dev container:
|
||||
|
||||
```bash
|
||||
# Type checking
|
||||
podman exec -it flyer-crawler-dev npm run type-check
|
||||
|
||||
# Backend unit tests
|
||||
podman exec -it flyer-crawler-dev npm run test:unit -- --grep "featureFlag"
|
||||
|
||||
# Frontend tests (includes hook and component tests)
|
||||
podman exec -it flyer-crawler-dev npm run test:unit -- --grep "FeatureFlag"
|
||||
|
||||
# Full test suite
|
||||
podman exec -it flyer-crawler-dev npm test
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Example Usage (Post-Implementation)
|
||||
|
||||
### Backend Route Handler
|
||||
|
||||
```typescript
|
||||
// src/routes/flyers.routes.ts
|
||||
import { isFeatureEnabled } from '../services/featureFlags.server';
|
||||
|
||||
router.get('/dashboard', async (req, res) => {
|
||||
if (isFeatureEnabled('newDashboard')) {
|
||||
// New dashboard logic
|
||||
return sendSuccess(res, { version: 'v2', data: await getNewDashboardData() });
|
||||
}
|
||||
// Legacy dashboard
|
||||
return sendSuccess(res, { version: 'v1', data: await getLegacyDashboardData() });
|
||||
});
|
||||
```
|
||||
|
||||
### React Component
|
||||
|
||||
```tsx
|
||||
// src/pages/Dashboard.tsx
|
||||
import { FeatureFlag } from '../components/FeatureFlag';
|
||||
import { useFeatureFlag } from '../hooks/useFeatureFlag';
|
||||
|
||||
// Option 1: Declarative component
|
||||
function Dashboard() {
|
||||
return (
|
||||
<FeatureFlag name="newDashboard" fallback={<LegacyDashboard />}>
|
||||
<NewDashboard />
|
||||
</FeatureFlag>
|
||||
);
|
||||
}
|
||||
|
||||
// Option 2: Hook for logic
|
||||
function DashboardWithLogic() {
|
||||
const isNewDashboard = useFeatureFlag('newDashboard');
|
||||
|
||||
useEffect(() => {
|
||||
if (isNewDashboard) {
|
||||
analytics.track('new_dashboard_viewed');
|
||||
}
|
||||
}, [isNewDashboard]);
|
||||
|
||||
return isNewDashboard ? <NewDashboard /> : <LegacyDashboard />;
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Implementation Notes
|
||||
|
||||
### Naming Convention
|
||||
|
||||
| Context | Pattern | Example |
|
||||
| ---------------- | ------------------------- | ---------------------------------- |
|
||||
| Backend env var | `FEATURE_SNAKE_CASE` | `FEATURE_NEW_DASHBOARD` |
|
||||
| Frontend env var | `VITE_FEATURE_SNAKE_CASE` | `VITE_FEATURE_NEW_DASHBOARD` |
|
||||
| Config property | `camelCase` | `config.featureFlags.newDashboard` |
|
||||
| Type/Hook param | `camelCase` | `isFeatureEnabled('newDashboard')` |
|
||||
|
||||
### Flag Lifecycle
|
||||
|
||||
1. **Adding a flag**: Add to both schemas, set default to `false`, document
|
||||
2. **Enabling a flag**: Set env var to `'true'`, restart application
|
||||
3. **Removing a flag**: Remove conditional code first, then remove flag from schemas
|
||||
4. **Sunset policy**: Flags should be removed within 3 months of full rollout
|
||||
|
||||
---
|
||||
|
||||
Last updated: 2026-01-28
|
||||
@@ -2,6 +2,17 @@
|
||||
|
||||
The **ai-usage** subagent specializes in LLM APIs (Gemini, Claude), prompt engineering, and AI-powered features in the Flyer Crawler project.
|
||||
|
||||
## Quick Reference
|
||||
|
||||
| Aspect | Details |
|
||||
| ------------------ | ----------------------------------------------------------------------------------- |
|
||||
| **Primary Use** | Gemini API integration, prompt engineering, AI extraction |
|
||||
| **Key Files** | `src/services/aiService.server.ts`, `src/services/flyerProcessingService.server.ts` |
|
||||
| **Key ADRs** | ADR-041 (AI Integration), ADR-046 (Image Processing) |
|
||||
| **API Key Env** | `VITE_GOOGLE_GENAI_API_KEY` (prod), `VITE_GOOGLE_GENAI_API_KEY_TEST` (test) |
|
||||
| **Error Handling** | Rate limits (429), JSON parse errors, timeout handling |
|
||||
| **Delegate To** | `coder` (implementation), `testwriter` (tests), `integrations-specialist` |
|
||||
|
||||
## When to Use
|
||||
|
||||
Use the **ai-usage** subagent when you need to:
|
||||
@@ -295,6 +306,9 @@ const fixtureResponse = await fs.readFile('fixtures/gemini-response.json');
|
||||
## Related Documentation
|
||||
|
||||
- [OVERVIEW.md](./OVERVIEW.md) - Subagent system overview
|
||||
- [CODER-GUIDE.md](./CODER-GUIDE.md) - For implementing AI features
|
||||
- [TESTER-GUIDE.md](./TESTER-GUIDE.md) - Testing AI features
|
||||
- [INTEGRATIONS-GUIDE.md](./INTEGRATIONS-GUIDE.md) - External API patterns
|
||||
- [../adr/0041-ai-gemini-integration-architecture.md](../adr/0041-ai-gemini-integration-architecture.md) - AI integration ADR
|
||||
- [../adr/0046-image-processing-pipeline.md](../adr/0046-image-processing-pipeline.md) - Image processing
|
||||
- [CODER-GUIDE.md](./CODER-GUIDE.md) - For implementing AI features
|
||||
- [../getting-started/ENVIRONMENT.md](../getting-started/ENVIRONMENT.md) - Environment configuration
|
||||
|
||||
@@ -2,6 +2,17 @@
|
||||
|
||||
The **coder** subagent is your primary tool for writing and modifying production Node.js/TypeScript code in the Flyer Crawler project. This guide explains how to work effectively with the coder subagent.
|
||||
|
||||
## Quick Reference
|
||||
|
||||
| Aspect | Details |
|
||||
| ---------------- | ------------------------------------------------------------------------ |
|
||||
| **Primary Use** | Write/modify production TypeScript code |
|
||||
| **Key Files** | `src/routes/*.routes.ts`, `src/services/**/*.ts`, `src/components/*.tsx` |
|
||||
| **Key ADRs** | ADR-034 (Repository), ADR-035 (Services), ADR-028 (API Response) |
|
||||
| **Test Command** | `podman exec -it flyer-crawler-dev npm run test:unit` |
|
||||
| **Type Check** | `podman exec -it flyer-crawler-dev npm run type-check` |
|
||||
| **Delegate To** | `db-dev` (database), `frontend-specialist` (UI), `testwriter` (tests) |
|
||||
|
||||
## When to Use the Coder Subagent
|
||||
|
||||
Use the coder subagent when you need to:
|
||||
@@ -307,6 +318,8 @@ error classes for all database operations"
|
||||
|
||||
- [OVERVIEW.md](./OVERVIEW.md) - Subagent system overview
|
||||
- [TESTER-GUIDE.md](./TESTER-GUIDE.md) - Testing strategies
|
||||
- [DATABASE-GUIDE.md](./DATABASE-GUIDE.md) - Database development workflows
|
||||
- [../adr/0034-repository-pattern-standards.md](../adr/0034-repository-pattern-standards.md) - Repository patterns
|
||||
- [../adr/0035-service-layer-architecture.md](../adr/0035-service-layer-architecture.md) - Service layer architecture
|
||||
- [../adr/0028-api-response-standardization.md](../adr/0028-api-response-standardization.md) - API response patterns
|
||||
- [../development/CODE-PATTERNS.md](../development/CODE-PATTERNS.md) - Code patterns reference
|
||||
|
||||
@@ -5,6 +5,17 @@ This guide covers two database-focused subagents:
|
||||
- **db-dev**: Database development - schemas, queries, migrations, optimization
|
||||
- **db-admin**: Database administration - PostgreSQL/Redis admin, security, backups
|
||||
|
||||
## Quick Reference
|
||||
|
||||
| Aspect | db-dev | db-admin |
|
||||
| ---------------- | -------------------------------------------- | ------------------------------------------ |
|
||||
| **Primary Use** | Schemas, queries, migrations | Performance tuning, backups, security |
|
||||
| **Key Files** | `src/services/db/*.db.ts`, `sql/migrations/` | `postgresql.conf`, `pg_hba.conf` |
|
||||
| **Key ADRs** | ADR-034 (Repository), ADR-002 (Transactions) | ADR-019 (Backups), ADR-050 (Observability) |
|
||||
| **Test Command** | `podman exec -it flyer-crawler-dev npm test` | N/A |
|
||||
| **MCP Tool** | `mcp__devdb__query` | SSH to production |
|
||||
| **Delegate To** | `coder` (service layer), `db-admin` (perf) | `devops` (infrastructure) |
|
||||
|
||||
## Understanding the Difference
|
||||
|
||||
| Aspect | db-dev | db-admin |
|
||||
@@ -412,8 +423,9 @@ This is useful for:
|
||||
|
||||
- [OVERVIEW.md](./OVERVIEW.md) - Subagent system overview
|
||||
- [CODER-GUIDE.md](./CODER-GUIDE.md) - Working with the coder subagent
|
||||
- [DEVOPS-GUIDE.md](./DEVOPS-GUIDE.md) - DevOps and deployment workflows
|
||||
- [../adr/0034-repository-pattern-standards.md](../adr/0034-repository-pattern-standards.md) - Repository patterns
|
||||
- [../adr/0002-standardized-transaction-management.md](../adr/0002-standardized-transaction-management.md) - Transaction management
|
||||
- [../adr/0019-data-backup-and-recovery-strategy.md](../adr/0019-data-backup-and-recovery-strategy.md) - Backup strategy
|
||||
- [../adr/0050-postgresql-function-observability.md](../adr/0050-postgresql-function-observability.md) - Database observability
|
||||
- [../BARE-METAL-SETUP.md](../BARE-METAL-SETUP.md) - Production database setup
|
||||
- [../operations/BARE-METAL-SETUP.md](../operations/BARE-METAL-SETUP.md) - Production database setup
|
||||
|
||||
@@ -6,6 +6,17 @@ This guide covers DevOps-related subagents for deployment, infrastructure, and o
|
||||
- **infra-architect**: Resource optimization, capacity planning
|
||||
- **bg-worker**: Background jobs, PM2 workers, BullMQ queues
|
||||
|
||||
## Quick Reference
|
||||
|
||||
| Aspect | devops | infra-architect | bg-worker |
|
||||
| ---------------- | ------------------------------------------ | --------------------------- | ------------------------------- |
|
||||
| **Primary Use** | Containers, CI/CD, deployments | Resource optimization | BullMQ queues, PM2 workers |
|
||||
| **Key Files** | `compose.dev.yml`, `.gitea/workflows/` | `ecosystem.config.cjs` | `src/services/queues.server.ts` |
|
||||
| **Key ADRs** | ADR-014 (Containers), ADR-017 (CI/CD) | N/A | ADR-006 (Background Jobs) |
|
||||
| **Commands** | `podman-compose`, `pm2` | `pm2 monit`, system metrics | Redis CLI, `pm2 logs` |
|
||||
| **MCP Tools** | `mcp__podman__*` | N/A | N/A |
|
||||
| **Access Model** | Read-only on production (provide commands) | Same | Same |
|
||||
|
||||
---
|
||||
|
||||
## CRITICAL: Server Access Model
|
||||
@@ -543,8 +554,13 @@ podman exec -it flyer-crawler-dev npm test
|
||||
## Related Documentation
|
||||
|
||||
- [OVERVIEW.md](./OVERVIEW.md) - Subagent system overview
|
||||
- [../BARE-METAL-SETUP.md](../BARE-METAL-SETUP.md) - Production setup guide
|
||||
- [DATABASE-GUIDE.md](./DATABASE-GUIDE.md) - Database administration
|
||||
- [SECURITY-DEBUG-GUIDE.md](./SECURITY-DEBUG-GUIDE.md) - Production debugging
|
||||
- [../operations/BARE-METAL-SETUP.md](../operations/BARE-METAL-SETUP.md) - Production setup guide
|
||||
- [../operations/DEPLOYMENT.md](../operations/DEPLOYMENT.md) - Deployment guide
|
||||
- [../operations/MONITORING.md](../operations/MONITORING.md) - Monitoring guide
|
||||
- [../development/DEV-CONTAINER.md](../development/DEV-CONTAINER.md) - Dev container guide
|
||||
- [../adr/0014-containerization-and-deployment-strategy.md](../adr/0014-containerization-and-deployment-strategy.md) - Containerization ADR
|
||||
- [../adr/0006-background-job-processing-and-task-queues.md](../adr/0006-background-job-processing-and-task-queues.md) - Background jobs ADR
|
||||
- [../adr/0017-ci-cd-and-branching-strategy.md](../adr/0017-ci-cd-and-branching-strategy.md) - CI/CD strategy
|
||||
- [../adr/0053-worker-health-checks.md](../adr/0053-worker-health-checks.md) - Worker health checks
|
||||
- [../adr/0053-worker-health-checks-and-monitoring.md](../adr/0053-worker-health-checks-and-monitoring.md) - Worker health checks
|
||||
|
||||
@@ -7,6 +7,15 @@ This guide covers documentation-focused subagents:
|
||||
- **planner**: Feature breakdown, roadmaps, scope management
|
||||
- **product-owner**: Requirements, user stories, backlog prioritization
|
||||
|
||||
## Quick Reference
|
||||
|
||||
| Aspect | documenter | describer-for-ai | planner | product-owner |
|
||||
| --------------- | -------------------- | ------------------------ | --------------------- | ---------------------- |
|
||||
| **Primary Use** | User docs, API specs | ADRs, technical specs | Feature breakdown | User stories, backlog |
|
||||
| **Key Files** | `docs/`, API docs | `docs/adr/`, `CLAUDE.md` | `docs/plans/` | Issue tracker |
|
||||
| **Output** | Markdown guides | ADRs, context docs | Task lists, roadmaps | User stories, criteria |
|
||||
| **Delegate To** | `coder` (implement) | `documenter` (user docs) | `coder` (build tasks) | `planner` (breakdown) |
|
||||
|
||||
## The documenter Subagent
|
||||
|
||||
### When to Use
|
||||
@@ -437,6 +446,8 @@ Include dates on documentation that may become stale:
|
||||
## Related Documentation
|
||||
|
||||
- [OVERVIEW.md](./OVERVIEW.md) - Subagent system overview
|
||||
- [CODER-GUIDE.md](./CODER-GUIDE.md) - For implementing documented features
|
||||
- [../adr/index.md](../adr/index.md) - ADR index
|
||||
- [../TESTING.md](../TESTING.md) - Testing guide
|
||||
- [../development/TESTING.md](../development/TESTING.md) - Testing guide
|
||||
- [../development/CODE-PATTERNS.md](../development/CODE-PATTERNS.md) - Code patterns reference
|
||||
- [../../CLAUDE.md](../../CLAUDE.md) - AI instructions
|
||||
|
||||
@@ -5,6 +5,17 @@ This guide covers frontend-focused subagents:
|
||||
- **frontend-specialist**: UI components, Neo-Brutalism, Core Web Vitals, accessibility
|
||||
- **uiux-designer**: UI/UX decisions, component design, user experience
|
||||
|
||||
## Quick Reference
|
||||
|
||||
| Aspect | frontend-specialist | uiux-designer |
|
||||
| ----------------- | ---------------------------------------------- | -------------------------------------- |
|
||||
| **Primary Use** | React components, performance, accessibility | Design decisions, user flows |
|
||||
| **Key Files** | `src/components/`, `src/features/` | Design specs, mockups |
|
||||
| **Key ADRs** | ADR-012 (Design System), ADR-044 (Feature Org) | ADR-012 (Design System) |
|
||||
| **Design System** | Neo-Brutalism (bold borders, high contrast) | Same |
|
||||
| **State Mgmt** | TanStack Query (server), Zustand (client) | N/A |
|
||||
| **Delegate To** | `coder` (backend), `tester` (test coverage) | `frontend-specialist` (implementation) |
|
||||
|
||||
## The frontend-specialist Subagent
|
||||
|
||||
### When to Use
|
||||
@@ -406,7 +417,8 @@ const handleSelect = useCallback((id: string) => {
|
||||
|
||||
- [OVERVIEW.md](./OVERVIEW.md) - Subagent system overview
|
||||
- [CODER-GUIDE.md](./CODER-GUIDE.md) - For implementing features
|
||||
- [../DESIGN_TOKENS.md](../DESIGN_TOKENS.md) - Design token reference
|
||||
- [TESTER-GUIDE.md](./TESTER-GUIDE.md) - Component testing patterns
|
||||
- [../development/DESIGN_TOKENS.md](../development/DESIGN_TOKENS.md) - Design token reference
|
||||
- [../adr/0012-frontend-component-library-and-design-system.md](../adr/0012-frontend-component-library-and-design-system.md) - Design system ADR
|
||||
- [../adr/0005-frontend-state-management-and-server-cache-strategy.md](../adr/0005-frontend-state-management-and-server-cache-strategy.md) - State management ADR
|
||||
- [../adr/0044-frontend-feature-organization.md](../adr/0044-frontend-feature-organization.md) - Feature organization
|
||||
|
||||
396
docs/subagents/INTEGRATIONS-GUIDE.md
Normal file
396
docs/subagents/INTEGRATIONS-GUIDE.md
Normal file
@@ -0,0 +1,396 @@
|
||||
# Integrations Subagent Guide
|
||||
|
||||
The **integrations-specialist** subagent handles third-party services, webhooks, and external API integrations in the Flyer Crawler project.
|
||||
|
||||
## Quick Reference
|
||||
|
||||
| Aspect | Details |
|
||||
| --------------- | --------------------------------------------------------------------------- |
|
||||
| **Primary Use** | External APIs, webhooks, OAuth, third-party services |
|
||||
| **Key Files** | `src/services/external/`, `src/routes/webhooks.routes.ts` |
|
||||
| **Key ADRs** | ADR-041 (AI Integration), ADR-016 (API Security), ADR-048 (Auth) |
|
||||
| **MCP Tools** | `mcp__gitea-projectium__*`, `mcp__bugsink__*` |
|
||||
| **Security** | API key storage, webhook signatures, OAuth state param |
|
||||
| **Delegate To** | `coder` (implementation), `security-engineer` (review), `ai-usage` (Gemini) |
|
||||
|
||||
## When to Use
|
||||
|
||||
Use the **integrations-specialist** subagent when you need to:
|
||||
|
||||
- Integrate with external APIs (OAuth, REST, GraphQL)
|
||||
- Implement webhook handlers
|
||||
- Configure third-party services
|
||||
- Debug external service connectivity
|
||||
- Handle API authentication flows
|
||||
- Manage external service rate limits
|
||||
|
||||
## What integrations-specialist Knows
|
||||
|
||||
The integrations-specialist subagent understands:
|
||||
|
||||
- OAuth 2.0 flows (authorization code, client credentials)
|
||||
- REST API integration patterns
|
||||
- Webhook security (signature verification)
|
||||
- External service error handling
|
||||
- Rate limiting and retry strategies
|
||||
- API key management
|
||||
|
||||
## Current Integrations
|
||||
|
||||
| Service | Purpose | Integration Type | Key Files |
|
||||
| ------------- | ---------------------- | ---------------- | ---------------------------------- |
|
||||
| Google Gemini | AI flyer extraction | REST API | `src/services/aiService.server.ts` |
|
||||
| Bugsink | Error tracking | REST API | MCP: `mcp__bugsink__*` |
|
||||
| Gitea | Repository and CI/CD | REST API | MCP: `mcp__gitea-projectium__*` |
|
||||
| Redis | Caching and job queues | Native client | `src/services/redis.server.ts` |
|
||||
| PostgreSQL | Primary database | Native client | `src/services/db/pool.db.ts` |
|
||||
|
||||
## Example Requests
|
||||
|
||||
### Adding External API Integration
|
||||
|
||||
```
|
||||
"Use integrations-specialist to integrate with the Store API
|
||||
to automatically fetch store location data. Include proper
|
||||
error handling, rate limiting, and caching."
|
||||
```
|
||||
|
||||
### OAuth Implementation
|
||||
|
||||
```
|
||||
"Use integrations-specialist to implement Google OAuth for
|
||||
user authentication. Include token refresh handling and
|
||||
session management."
|
||||
```
|
||||
|
||||
### Webhook Handler
|
||||
|
||||
```
|
||||
"Use integrations-specialist to create a webhook handler for
|
||||
receiving store inventory updates. Include signature verification
|
||||
and idempotency handling."
|
||||
```
|
||||
|
||||
### Debugging External Service Issues
|
||||
|
||||
```
|
||||
"Use integrations-specialist to debug why the Gemini API calls
|
||||
are intermittently failing with timeout errors. Check connection
|
||||
pooling, retry logic, and error handling."
|
||||
```
|
||||
|
||||
## Integration Patterns
|
||||
|
||||
### REST API Client Pattern
|
||||
|
||||
```typescript
|
||||
// src/services/external/storeApi.server.ts
|
||||
import { env } from '@/config/env';
|
||||
import { log } from '@/services/logger.server';
|
||||
|
||||
interface StoreApiConfig {
|
||||
baseUrl: string;
|
||||
apiKey: string;
|
||||
timeout: number;
|
||||
}
|
||||
|
||||
class StoreApiClient {
|
||||
private config: StoreApiConfig;
|
||||
|
||||
constructor(config: StoreApiConfig) {
|
||||
this.config = config;
|
||||
}
|
||||
|
||||
async getStoreLocations(storeId: string): Promise<StoreLocation[]> {
|
||||
const url = `${this.config.baseUrl}/stores/${storeId}/locations`;
|
||||
|
||||
try {
|
||||
const response = await fetch(url, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${this.config.apiKey}`,
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
signal: AbortSignal.timeout(this.config.timeout),
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
throw new ExternalApiError(`Store API error: ${response.status}`, response.status);
|
||||
}
|
||||
|
||||
return response.json();
|
||||
} catch (error) {
|
||||
log.error({ error, storeId }, 'Failed to fetch store locations');
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export const storeApiClient = new StoreApiClient({
|
||||
baseUrl: env.STORE_API_BASE_URL,
|
||||
apiKey: env.STORE_API_KEY,
|
||||
timeout: 10000,
|
||||
});
|
||||
```
|
||||
|
||||
### Webhook Handler Pattern
|
||||
|
||||
```typescript
|
||||
// src/routes/webhooks.routes.ts
|
||||
import { Router } from 'express';
|
||||
import crypto from 'crypto';
|
||||
import { env } from '@/config/env';
|
||||
|
||||
const router = Router();
|
||||
|
||||
function verifyWebhookSignature(payload: string, signature: string, secret: string): boolean {
|
||||
const expected = crypto.createHmac('sha256', secret).update(payload).digest('hex');
|
||||
return crypto.timingSafeEqual(Buffer.from(signature), Buffer.from(`sha256=${expected}`));
|
||||
}
|
||||
|
||||
router.post('/store-updates', async (req, res, next) => {
|
||||
try {
|
||||
const signature = req.headers['x-webhook-signature'] as string;
|
||||
const payload = JSON.stringify(req.body);
|
||||
|
||||
if (!verifyWebhookSignature(payload, signature, env.WEBHOOK_SECRET)) {
|
||||
return res.status(401).json({ error: 'Invalid signature' });
|
||||
}
|
||||
|
||||
// Process webhook with idempotency check
|
||||
const eventId = req.headers['x-event-id'] as string;
|
||||
const alreadyProcessed = await checkIdempotencyKey(eventId);
|
||||
|
||||
if (alreadyProcessed) {
|
||||
return res.status(200).json({ status: 'already_processed' });
|
||||
}
|
||||
|
||||
await processStoreUpdate(req.body);
|
||||
await markEventProcessed(eventId);
|
||||
|
||||
res.status(200).json({ status: 'processed' });
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
```
|
||||
|
||||
### OAuth Flow Pattern
|
||||
|
||||
```typescript
|
||||
// src/services/oauth/googleOAuth.server.ts
|
||||
import { OAuth2Client } from 'google-auth-library';
|
||||
import { env } from '@/config/env';
|
||||
|
||||
const oauth2Client = new OAuth2Client(
|
||||
env.GOOGLE_CLIENT_ID,
|
||||
env.GOOGLE_CLIENT_SECRET,
|
||||
env.GOOGLE_REDIRECT_URI,
|
||||
);
|
||||
|
||||
export function getAuthorizationUrl(): string {
|
||||
return oauth2Client.generateAuthUrl({
|
||||
access_type: 'offline',
|
||||
scope: ['email', 'profile'],
|
||||
prompt: 'consent',
|
||||
});
|
||||
}
|
||||
|
||||
export async function exchangeCodeForTokens(code: string) {
|
||||
const { tokens } = await oauth2Client.getToken(code);
|
||||
return tokens;
|
||||
}
|
||||
|
||||
export async function refreshAccessToken(refreshToken: string) {
|
||||
oauth2Client.setCredentials({ refresh_token: refreshToken });
|
||||
const { credentials } = await oauth2Client.refreshAccessToken();
|
||||
return credentials;
|
||||
}
|
||||
```
|
||||
|
||||
## Error Handling for External Services
|
||||
|
||||
### Custom Error Classes
|
||||
|
||||
```typescript
|
||||
// src/services/external/errors.ts
|
||||
export class ExternalApiError extends Error {
|
||||
constructor(
|
||||
message: string,
|
||||
public statusCode: number,
|
||||
public retryable: boolean = false,
|
||||
) {
|
||||
super(message);
|
||||
this.name = 'ExternalApiError';
|
||||
}
|
||||
}
|
||||
|
||||
export class RateLimitError extends ExternalApiError {
|
||||
constructor(
|
||||
message: string,
|
||||
public retryAfter: number,
|
||||
) {
|
||||
super(message, 429, true);
|
||||
this.name = 'RateLimitError';
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Retry with Exponential Backoff
|
||||
|
||||
```typescript
|
||||
async function fetchWithRetry<T>(
|
||||
fn: () => Promise<T>,
|
||||
options: { maxRetries: number; baseDelay: number },
|
||||
): Promise<T> {
|
||||
let lastError: Error;
|
||||
|
||||
for (let attempt = 0; attempt <= options.maxRetries; attempt++) {
|
||||
try {
|
||||
return await fn();
|
||||
} catch (error) {
|
||||
lastError = error as Error;
|
||||
|
||||
if (error instanceof ExternalApiError && !error.retryable) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
if (attempt < options.maxRetries) {
|
||||
const delay = options.baseDelay * Math.pow(2, attempt);
|
||||
await new Promise((resolve) => setTimeout(resolve, delay));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
throw lastError!;
|
||||
}
|
||||
```
|
||||
|
||||
## Rate Limiting Strategies
|
||||
|
||||
### Token Bucket Pattern
|
||||
|
||||
```typescript
|
||||
class RateLimiter {
|
||||
private tokens: number;
|
||||
private lastRefill: number;
|
||||
private readonly maxTokens: number;
|
||||
private readonly refillRate: number; // tokens per second
|
||||
|
||||
constructor(maxTokens: number, refillRate: number) {
|
||||
this.maxTokens = maxTokens;
|
||||
this.tokens = maxTokens;
|
||||
this.refillRate = refillRate;
|
||||
this.lastRefill = Date.now();
|
||||
}
|
||||
|
||||
async acquire(): Promise<void> {
|
||||
this.refill();
|
||||
|
||||
if (this.tokens < 1) {
|
||||
const waitTime = ((1 - this.tokens) / this.refillRate) * 1000;
|
||||
await new Promise((resolve) => setTimeout(resolve, waitTime));
|
||||
this.refill();
|
||||
}
|
||||
|
||||
this.tokens -= 1;
|
||||
}
|
||||
|
||||
private refill(): void {
|
||||
const now = Date.now();
|
||||
const elapsed = (now - this.lastRefill) / 1000;
|
||||
this.tokens = Math.min(this.maxTokens, this.tokens + elapsed * this.refillRate);
|
||||
this.lastRefill = now;
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Testing Integrations
|
||||
|
||||
### Mocking External Services
|
||||
|
||||
```typescript
|
||||
// src/tests/mocks/storeApi.mock.ts
|
||||
import { vi } from 'vitest';
|
||||
|
||||
export const mockStoreApiClient = {
|
||||
getStoreLocations: vi.fn(),
|
||||
};
|
||||
|
||||
vi.mock('@/services/external/storeApi.server', () => ({
|
||||
storeApiClient: mockStoreApiClient,
|
||||
}));
|
||||
```
|
||||
|
||||
### Integration Test with Real Service
|
||||
|
||||
```typescript
|
||||
// src/tests/integration/storeApi.integration.test.ts
|
||||
describe('Store API Integration', () => {
|
||||
it.skipIf(!env.STORE_API_KEY)('fetches real store locations', async () => {
|
||||
const locations = await storeApiClient.getStoreLocations('test-store');
|
||||
expect(locations).toBeInstanceOf(Array);
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
## MCP Tools for Integrations
|
||||
|
||||
### Gitea Integration
|
||||
|
||||
```
|
||||
// List repositories
|
||||
mcp__gitea-projectium__list_my_repos()
|
||||
|
||||
// Create issue
|
||||
mcp__gitea-projectium__create_issue({
|
||||
owner: "projectium",
|
||||
repo: "flyer-crawler",
|
||||
title: "Issue title",
|
||||
body: "Issue description"
|
||||
})
|
||||
```
|
||||
|
||||
### Bugsink Integration
|
||||
|
||||
```
|
||||
// List projects
|
||||
mcp__bugsink__list_projects()
|
||||
|
||||
// Get issue details
|
||||
mcp__bugsink__get_issue({ issue_id: "..." })
|
||||
|
||||
// Get stacktrace
|
||||
mcp__bugsink__get_stacktrace({ event_id: "..." })
|
||||
```
|
||||
|
||||
## Security Considerations
|
||||
|
||||
### API Key Storage
|
||||
|
||||
- Never commit API keys to version control
|
||||
- Use environment variables via `src/config/env.ts`
|
||||
- Rotate keys periodically
|
||||
- Use separate keys for dev/test/prod
|
||||
|
||||
### Webhook Security
|
||||
|
||||
- Always verify webhook signatures
|
||||
- Use HTTPS for webhook endpoints
|
||||
- Implement idempotency
|
||||
- Log webhook events for audit
|
||||
|
||||
### OAuth Security
|
||||
|
||||
- Use state parameter to prevent CSRF
|
||||
- Store tokens securely (encrypted at rest)
|
||||
- Implement token refresh before expiration
|
||||
- Validate token scopes
|
||||
|
||||
## Related Documentation
|
||||
|
||||
- [OVERVIEW.md](./OVERVIEW.md) - Subagent system overview
|
||||
- [SECURITY-DEBUG-GUIDE.md](./SECURITY-DEBUG-GUIDE.md) - Security patterns
|
||||
- [AI-USAGE-GUIDE.md](./AI-USAGE-GUIDE.md) - Gemini API integration
|
||||
- [../adr/0041-ai-gemini-integration-architecture.md](../adr/0041-ai-gemini-integration-architecture.md) - AI integration ADR
|
||||
- [../adr/0016-api-security-hardening.md](../adr/0016-api-security-hardening.md) - API security
|
||||
- [../adr/0048-authentication-strategy.md](../adr/0048-authentication-strategy.md) - Authentication
|
||||
@@ -89,6 +89,47 @@ Or:
|
||||
|
||||
Claude will automatically invoke the appropriate subagent with the relevant context.
|
||||
|
||||
## Quick Reference Decision Tree
|
||||
|
||||
Use this flowchart to quickly identify the right subagent:
|
||||
|
||||
```
|
||||
What do you need to do?
|
||||
|
|
||||
+-- Write/modify code? ----------------> Is it database-related?
|
||||
| |
|
||||
| +-- Yes -> db-dev
|
||||
| +-- No --> Is it frontend?
|
||||
| |
|
||||
| +-- Yes -> frontend-specialist
|
||||
| +-- No --> Is it AI/Gemini?
|
||||
| |
|
||||
| +-- Yes -> ai-usage
|
||||
| +-- No --> coder
|
||||
|
|
||||
+-- Test something? -------------------> Write new tests? -> testwriter
|
||||
| Find bugs/vulnerabilities? -> tester
|
||||
| Review existing code? -> code-reviewer
|
||||
|
|
||||
+-- Debug an issue? -------------------> Production error? -> log-debug
|
||||
| Database slow? -> db-admin
|
||||
| External API failing? -> integrations-specialist
|
||||
| AI extraction failing? -> ai-usage
|
||||
|
|
||||
+-- Infrastructure/Deployment? --------> Container/CI/CD? -> devops
|
||||
| Resource optimization? -> infra-architect
|
||||
| Background jobs? -> bg-worker
|
||||
|
|
||||
+-- Documentation? --------------------> User-facing docs? -> documenter
|
||||
| ADRs/Technical specs? -> describer-for-ai
|
||||
| Feature planning? -> planner
|
||||
| User stories? -> product-owner
|
||||
|
|
||||
+-- Security? -------------------------> security-engineer
|
||||
|
|
||||
+-- Design/UX? ------------------------> uiux-designer
|
||||
```
|
||||
|
||||
## Subagent Selection Guide
|
||||
|
||||
### Which Subagent Should I Use?
|
||||
@@ -183,12 +224,26 @@ Subagents can pass information back to the main conversation and to each other t
|
||||
|
||||
## Related Documentation
|
||||
|
||||
- [CODER-GUIDE.md](./CODER-GUIDE.md) - Working with the coder subagent
|
||||
- [TESTER-GUIDE.md](./TESTER-GUIDE.md) - Testing strategies and patterns
|
||||
- [DATABASE-GUIDE.md](./DATABASE-GUIDE.md) - Database development workflows
|
||||
- [DEVOPS-GUIDE.md](./DEVOPS-GUIDE.md) - DevOps and deployment workflows
|
||||
### Subagent Guides
|
||||
|
||||
| Guide | Subagents Covered |
|
||||
| ---------------------------------------------------- | ----------------------------------------------------- |
|
||||
| [CODER-GUIDE.md](./CODER-GUIDE.md) | coder |
|
||||
| [TESTER-GUIDE.md](./TESTER-GUIDE.md) | tester, testwriter |
|
||||
| [DATABASE-GUIDE.md](./DATABASE-GUIDE.md) | db-dev, db-admin |
|
||||
| [DEVOPS-GUIDE.md](./DEVOPS-GUIDE.md) | devops, infra-architect, bg-worker |
|
||||
| [FRONTEND-GUIDE.md](./FRONTEND-GUIDE.md) | frontend-specialist, uiux-designer |
|
||||
| [SECURITY-DEBUG-GUIDE.md](./SECURITY-DEBUG-GUIDE.md) | security-engineer, log-debug, code-reviewer |
|
||||
| [AI-USAGE-GUIDE.md](./AI-USAGE-GUIDE.md) | ai-usage |
|
||||
| [INTEGRATIONS-GUIDE.md](./INTEGRATIONS-GUIDE.md) | integrations-specialist, tools-integration-specialist |
|
||||
| [DOCUMENTATION-GUIDE.md](./DOCUMENTATION-GUIDE.md) | documenter, describer-for-ai, planner, product-owner |
|
||||
|
||||
### Project Documentation
|
||||
|
||||
- [../adr/index.md](../adr/index.md) - Architecture Decision Records
|
||||
- [../TESTING.md](../TESTING.md) - Testing guide
|
||||
- [../development/TESTING.md](../development/TESTING.md) - Testing guide
|
||||
- [../development/CODE-PATTERNS.md](../development/CODE-PATTERNS.md) - Code patterns reference
|
||||
- [../architecture/OVERVIEW.md](../architecture/OVERVIEW.md) - System architecture
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
|
||||
@@ -6,6 +6,16 @@ This guide covers security and debugging-focused subagents:
|
||||
- **log-debug**: Production errors, observability, Bugsink/Sentry analysis
|
||||
- **code-reviewer**: Code quality, security review, best practices
|
||||
|
||||
## Quick Reference
|
||||
|
||||
| Aspect | security-engineer | log-debug | code-reviewer |
|
||||
| --------------- | ---------------------------------- | ---------------------------------------- | --------------------------- |
|
||||
| **Primary Use** | Security audits, OWASP | Production debugging | Code quality review |
|
||||
| **Key ADRs** | ADR-016 (Security), ADR-032 (Rate) | ADR-050 (Observability) | ADR-034, ADR-035 (Patterns) |
|
||||
| **MCP Tools** | N/A | `mcp__bugsink__*`, `mcp__localerrors__*` | N/A |
|
||||
| **Key Checks** | Auth, input validation, CORS | Logs, stacktraces, error patterns | Patterns, tests, security |
|
||||
| **Delegate To** | `coder` (fix issues) | `devops` (infra), `coder` (fixes) | `coder`, `testwriter` |
|
||||
|
||||
## The security-engineer Subagent
|
||||
|
||||
### When to Use
|
||||
@@ -432,8 +442,10 @@ tail -f /var/log/postgresql/postgresql-$(date +%Y-%m-%d).log | grep "duration:"
|
||||
|
||||
- [OVERVIEW.md](./OVERVIEW.md) - Subagent system overview
|
||||
- [DEVOPS-GUIDE.md](./DEVOPS-GUIDE.md) - Infrastructure debugging
|
||||
- [TESTER-GUIDE.md](./TESTER-GUIDE.md) - Security testing
|
||||
- [../adr/0016-api-security-hardening.md](../adr/0016-api-security-hardening.md) - Security ADR
|
||||
- [../adr/0032-rate-limiting-strategy.md](../adr/0032-rate-limiting-strategy.md) - Rate limiting
|
||||
- [../adr/0015-application-performance-monitoring-and-error-tracking.md](../adr/0015-application-performance-monitoring-and-error-tracking.md) - Monitoring ADR
|
||||
- [../adr/0015-error-tracking-and-observability.md](../adr/0015-error-tracking-and-observability.md) - Monitoring ADR
|
||||
- [../adr/0050-postgresql-function-observability.md](../adr/0050-postgresql-function-observability.md) - Database observability
|
||||
- [../BARE-METAL-SETUP.md](../BARE-METAL-SETUP.md) - Production setup
|
||||
- [../operations/BARE-METAL-SETUP.md](../operations/BARE-METAL-SETUP.md) - Production setup
|
||||
- [../tools/BUGSINK-SETUP.md](../tools/BUGSINK-SETUP.md) - Bugsink configuration
|
||||
|
||||
@@ -5,6 +5,17 @@ This guide covers two related but distinct subagents for testing in the Flyer Cr
|
||||
- **tester**: Adversarial testing to find edge cases, race conditions, and vulnerabilities
|
||||
- **testwriter**: Creating comprehensive test suites for features and fixes
|
||||
|
||||
## Quick Reference
|
||||
|
||||
| Aspect | tester | testwriter |
|
||||
| ---------------- | -------------------------------------------- | ------------------------------------------ |
|
||||
| **Primary Use** | Find bugs, security issues, edge cases | Create test suites, improve coverage |
|
||||
| **Key Files** | N/A (analysis-focused) | `*.test.ts`, `src/tests/utils/` |
|
||||
| **Key ADRs** | ADR-010 (Testing), ADR-040 (Test Economics) | ADR-010 (Testing), ADR-045 (Test Fixtures) |
|
||||
| **Test Command** | `podman exec -it flyer-crawler-dev npm test` | Same |
|
||||
| **Test Stack** | Vitest, Supertest, Testing Library | Same |
|
||||
| **Delegate To** | `testwriter` (write tests for findings) | `coder` (fix failing tests) |
|
||||
|
||||
## Understanding the Difference
|
||||
|
||||
| Aspect | tester | testwriter |
|
||||
@@ -399,6 +410,7 @@ A typical workflow for thorough testing:
|
||||
|
||||
- [OVERVIEW.md](./OVERVIEW.md) - Subagent system overview
|
||||
- [CODER-GUIDE.md](./CODER-GUIDE.md) - Working with the coder subagent
|
||||
- [../TESTING.md](../TESTING.md) - Testing guide
|
||||
- [SECURITY-DEBUG-GUIDE.md](./SECURITY-DEBUG-GUIDE.md) - Security testing and code review
|
||||
- [../development/TESTING.md](../development/TESTING.md) - Testing guide
|
||||
- [../adr/0010-testing-strategy-and-standards.md](../adr/0010-testing-strategy-and-standards.md) - Testing ADR
|
||||
- [../adr/0040-testing-economics-and-priorities.md](../adr/0040-testing-economics-and-priorities.md) - Testing priorities
|
||||
|
||||
4
package-lock.json
generated
4
package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "flyer-crawler",
|
||||
"version": "0.12.20",
|
||||
"version": "0.12.25",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "flyer-crawler",
|
||||
"version": "0.12.20",
|
||||
"version": "0.12.25",
|
||||
"dependencies": {
|
||||
"@bull-board/api": "^6.14.2",
|
||||
"@bull-board/express": "^6.14.2",
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "flyer-crawler",
|
||||
"private": true,
|
||||
"version": "0.12.20",
|
||||
"version": "0.12.25",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "concurrently \"npm:start:dev\" \"vite\"",
|
||||
|
||||
378
src/components/FeatureFlag.test.tsx
Normal file
378
src/components/FeatureFlag.test.tsx
Normal file
@@ -0,0 +1,378 @@
|
||||
// src/components/FeatureFlag.test.tsx
|
||||
/**
|
||||
* Unit tests for the FeatureFlag component (ADR-024).
|
||||
*
|
||||
* These tests verify:
|
||||
* - Component renders children when feature is enabled
|
||||
* - Component hides children when feature is disabled
|
||||
* - Component renders fallback when feature is disabled
|
||||
* - Component returns null when disabled and no fallback provided
|
||||
* - All feature flag names are properly handled
|
||||
*/
|
||||
|
||||
import React from 'react';
|
||||
import { render, screen } from '@testing-library/react';
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
|
||||
// Mock the useFeatureFlag hook
|
||||
const mockUseFeatureFlag = vi.fn();
|
||||
|
||||
vi.mock('../hooks/useFeatureFlag', () => ({
|
||||
useFeatureFlag: (flagName: string) => mockUseFeatureFlag(flagName),
|
||||
}));
|
||||
|
||||
// Import after mocking
|
||||
import { FeatureFlag } from './FeatureFlag';
|
||||
|
||||
describe('FeatureFlag component', () => {
|
||||
beforeEach(() => {
|
||||
mockUseFeatureFlag.mockReset();
|
||||
// Default to disabled
|
||||
mockUseFeatureFlag.mockReturnValue(false);
|
||||
});
|
||||
|
||||
describe('when feature is enabled', () => {
|
||||
beforeEach(() => {
|
||||
mockUseFeatureFlag.mockReturnValue(true);
|
||||
});
|
||||
|
||||
it('should render children', () => {
|
||||
render(
|
||||
<FeatureFlag feature="newDashboard">
|
||||
<div data-testid="new-feature">New Feature Content</div>
|
||||
</FeatureFlag>,
|
||||
);
|
||||
|
||||
expect(screen.getByTestId('new-feature')).toBeInTheDocument();
|
||||
expect(screen.getByText('New Feature Content')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should not render fallback', () => {
|
||||
render(
|
||||
<FeatureFlag feature="newDashboard" fallback={<div data-testid="fallback">Fallback</div>}>
|
||||
<div data-testid="new-feature">New Feature</div>
|
||||
</FeatureFlag>,
|
||||
);
|
||||
|
||||
expect(screen.getByTestId('new-feature')).toBeInTheDocument();
|
||||
expect(screen.queryByTestId('fallback')).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should render multiple children', () => {
|
||||
render(
|
||||
<FeatureFlag feature="newDashboard">
|
||||
<div data-testid="child-1">Child 1</div>
|
||||
<div data-testid="child-2">Child 2</div>
|
||||
</FeatureFlag>,
|
||||
);
|
||||
|
||||
expect(screen.getByTestId('child-1')).toBeInTheDocument();
|
||||
expect(screen.getByTestId('child-2')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should render text content', () => {
|
||||
render(<FeatureFlag feature="newDashboard">Just some text</FeatureFlag>);
|
||||
|
||||
expect(screen.getByText('Just some text')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should call useFeatureFlag with correct flag name', () => {
|
||||
render(
|
||||
<FeatureFlag feature="betaRecipes">
|
||||
<div>Content</div>
|
||||
</FeatureFlag>,
|
||||
);
|
||||
|
||||
expect(mockUseFeatureFlag).toHaveBeenCalledWith('betaRecipes');
|
||||
});
|
||||
});
|
||||
|
||||
describe('when feature is disabled', () => {
|
||||
beforeEach(() => {
|
||||
mockUseFeatureFlag.mockReturnValue(false);
|
||||
});
|
||||
|
||||
it('should not render children', () => {
|
||||
render(
|
||||
<FeatureFlag feature="newDashboard">
|
||||
<div data-testid="new-feature">New Feature Content</div>
|
||||
</FeatureFlag>,
|
||||
);
|
||||
|
||||
expect(screen.queryByTestId('new-feature')).not.toBeInTheDocument();
|
||||
expect(screen.queryByText('New Feature Content')).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should render fallback when provided', () => {
|
||||
render(
|
||||
<FeatureFlag
|
||||
feature="newDashboard"
|
||||
fallback={<div data-testid="fallback">Legacy Feature</div>}
|
||||
>
|
||||
<div data-testid="new-feature">New Feature</div>
|
||||
</FeatureFlag>,
|
||||
);
|
||||
|
||||
expect(screen.queryByTestId('new-feature')).not.toBeInTheDocument();
|
||||
expect(screen.getByTestId('fallback')).toBeInTheDocument();
|
||||
expect(screen.getByText('Legacy Feature')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should render null when no fallback is provided', () => {
|
||||
const { container } = render(
|
||||
<FeatureFlag feature="newDashboard">
|
||||
<div data-testid="new-feature">New Feature</div>
|
||||
</FeatureFlag>,
|
||||
);
|
||||
|
||||
expect(screen.queryByTestId('new-feature')).not.toBeInTheDocument();
|
||||
// Container should be empty (just the wrapper)
|
||||
expect(container.innerHTML).toBe('');
|
||||
});
|
||||
|
||||
it('should render complex fallback components', () => {
|
||||
const FallbackComponent = () => (
|
||||
<div data-testid="complex-fallback">
|
||||
<h1>Legacy Dashboard</h1>
|
||||
<p>This is the old version</p>
|
||||
</div>
|
||||
);
|
||||
|
||||
render(
|
||||
<FeatureFlag feature="newDashboard" fallback={<FallbackComponent />}>
|
||||
<div data-testid="new-feature">New Dashboard</div>
|
||||
</FeatureFlag>,
|
||||
);
|
||||
|
||||
expect(screen.queryByTestId('new-feature')).not.toBeInTheDocument();
|
||||
expect(screen.getByTestId('complex-fallback')).toBeInTheDocument();
|
||||
expect(screen.getByText('Legacy Dashboard')).toBeInTheDocument();
|
||||
expect(screen.getByText('This is the old version')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should render text fallback', () => {
|
||||
render(
|
||||
<FeatureFlag feature="newDashboard" fallback="Feature not available">
|
||||
<div>New Feature</div>
|
||||
</FeatureFlag>,
|
||||
);
|
||||
|
||||
expect(screen.getByText('Feature not available')).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
describe('with different feature flags', () => {
|
||||
it('should work with newDashboard flag', () => {
|
||||
mockUseFeatureFlag.mockReturnValue(true);
|
||||
|
||||
render(
|
||||
<FeatureFlag feature="newDashboard">
|
||||
<div data-testid="dashboard">Dashboard</div>
|
||||
</FeatureFlag>,
|
||||
);
|
||||
|
||||
expect(mockUseFeatureFlag).toHaveBeenCalledWith('newDashboard');
|
||||
expect(screen.getByTestId('dashboard')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should work with betaRecipes flag', () => {
|
||||
mockUseFeatureFlag.mockReturnValue(true);
|
||||
|
||||
render(
|
||||
<FeatureFlag feature="betaRecipes">
|
||||
<div data-testid="recipes">Recipes</div>
|
||||
</FeatureFlag>,
|
||||
);
|
||||
|
||||
expect(mockUseFeatureFlag).toHaveBeenCalledWith('betaRecipes');
|
||||
expect(screen.getByTestId('recipes')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should work with experimentalAi flag', () => {
|
||||
mockUseFeatureFlag.mockReturnValue(true);
|
||||
|
||||
render(
|
||||
<FeatureFlag feature="experimentalAi">
|
||||
<div data-testid="ai">AI Feature</div>
|
||||
</FeatureFlag>,
|
||||
);
|
||||
|
||||
expect(mockUseFeatureFlag).toHaveBeenCalledWith('experimentalAi');
|
||||
expect(screen.getByTestId('ai')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should work with debugMode flag', () => {
|
||||
mockUseFeatureFlag.mockReturnValue(true);
|
||||
|
||||
render(
|
||||
<FeatureFlag feature="debugMode">
|
||||
<div data-testid="debug">Debug Panel</div>
|
||||
</FeatureFlag>,
|
||||
);
|
||||
|
||||
expect(mockUseFeatureFlag).toHaveBeenCalledWith('debugMode');
|
||||
expect(screen.getByTestId('debug')).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
describe('real-world usage patterns', () => {
|
||||
it('should work for A/B testing pattern', () => {
|
||||
mockUseFeatureFlag.mockReturnValue(false);
|
||||
|
||||
render(
|
||||
<FeatureFlag feature="newDashboard" fallback={<div data-testid="old-ui">Old UI</div>}>
|
||||
<div data-testid="new-ui">New UI</div>
|
||||
</FeatureFlag>,
|
||||
);
|
||||
|
||||
expect(screen.queryByTestId('new-ui')).not.toBeInTheDocument();
|
||||
expect(screen.getByTestId('old-ui')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should work for gradual rollout pattern', () => {
|
||||
mockUseFeatureFlag.mockReturnValue(true);
|
||||
|
||||
render(
|
||||
<div>
|
||||
<nav data-testid="nav">Navigation</nav>
|
||||
<FeatureFlag feature="betaRecipes">
|
||||
<aside data-testid="recipe-suggestions">Recipe Suggestions</aside>
|
||||
</FeatureFlag>
|
||||
<main data-testid="main">Main Content</main>
|
||||
</div>,
|
||||
);
|
||||
|
||||
expect(screen.getByTestId('nav')).toBeInTheDocument();
|
||||
expect(screen.getByTestId('recipe-suggestions')).toBeInTheDocument();
|
||||
expect(screen.getByTestId('main')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should work nested within conditional logic', () => {
|
||||
mockUseFeatureFlag.mockReturnValue(true);
|
||||
const isLoggedIn = true;
|
||||
|
||||
render(
|
||||
<div>
|
||||
{isLoggedIn && (
|
||||
<FeatureFlag
|
||||
feature="experimentalAi"
|
||||
fallback={<div data-testid="standard">Standard</div>}
|
||||
>
|
||||
<div data-testid="ai-search">AI Search</div>
|
||||
</FeatureFlag>
|
||||
)}
|
||||
</div>,
|
||||
);
|
||||
|
||||
expect(screen.getByTestId('ai-search')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should work with multiple FeatureFlag components', () => {
|
||||
// First call for newDashboard returns true
|
||||
// Second call for debugMode returns false
|
||||
mockUseFeatureFlag.mockImplementation((flag: string) => {
|
||||
if (flag === 'newDashboard') return true;
|
||||
if (flag === 'debugMode') return false;
|
||||
return false;
|
||||
});
|
||||
|
||||
render(
|
||||
<div>
|
||||
<FeatureFlag feature="newDashboard">
|
||||
<div data-testid="new-dashboard">New Dashboard</div>
|
||||
</FeatureFlag>
|
||||
<FeatureFlag feature="debugMode" fallback={<div data-testid="no-debug">No Debug</div>}>
|
||||
<div data-testid="debug-panel">Debug Panel</div>
|
||||
</FeatureFlag>
|
||||
</div>,
|
||||
);
|
||||
|
||||
expect(screen.getByTestId('new-dashboard')).toBeInTheDocument();
|
||||
expect(screen.queryByTestId('debug-panel')).not.toBeInTheDocument();
|
||||
expect(screen.getByTestId('no-debug')).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
describe('edge cases', () => {
|
||||
it('should handle undefined fallback gracefully', () => {
|
||||
mockUseFeatureFlag.mockReturnValue(false);
|
||||
|
||||
const { container } = render(
|
||||
<FeatureFlag feature="newDashboard" fallback={undefined}>
|
||||
<div data-testid="new-feature">New Feature</div>
|
||||
</FeatureFlag>,
|
||||
);
|
||||
|
||||
expect(screen.queryByTestId('new-feature')).not.toBeInTheDocument();
|
||||
expect(container.innerHTML).toBe('');
|
||||
});
|
||||
|
||||
it('should handle null children gracefully when enabled', () => {
|
||||
mockUseFeatureFlag.mockReturnValue(true);
|
||||
|
||||
const { container } = render(<FeatureFlag feature="newDashboard">{null}</FeatureFlag>);
|
||||
|
||||
// Should render nothing (null)
|
||||
expect(container.innerHTML).toBe('');
|
||||
});
|
||||
|
||||
it('should handle empty children when enabled', () => {
|
||||
mockUseFeatureFlag.mockReturnValue(true);
|
||||
|
||||
const { container } = render(
|
||||
<FeatureFlag feature="newDashboard">
|
||||
<></>
|
||||
</FeatureFlag>,
|
||||
);
|
||||
|
||||
// Should render the empty fragment
|
||||
expect(container.innerHTML).toBe('');
|
||||
});
|
||||
|
||||
it('should handle boolean children', () => {
|
||||
mockUseFeatureFlag.mockReturnValue(true);
|
||||
|
||||
// React ignores boolean children, so nothing should render
|
||||
const { container } = render(
|
||||
<FeatureFlag feature="newDashboard">{true as unknown as React.ReactNode}</FeatureFlag>,
|
||||
);
|
||||
|
||||
expect(container.innerHTML).toBe('');
|
||||
});
|
||||
|
||||
it('should handle number children', () => {
|
||||
mockUseFeatureFlag.mockReturnValue(true);
|
||||
|
||||
render(<FeatureFlag feature="newDashboard">{42}</FeatureFlag>);
|
||||
|
||||
expect(screen.getByText('42')).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
describe('re-rendering behavior', () => {
|
||||
it('should update when feature flag value changes', () => {
|
||||
const { rerender } = render(
|
||||
<FeatureFlag feature="newDashboard" fallback={<div data-testid="fallback">Fallback</div>}>
|
||||
<div data-testid="new-feature">New Feature</div>
|
||||
</FeatureFlag>,
|
||||
);
|
||||
|
||||
// Initially disabled
|
||||
expect(screen.queryByTestId('new-feature')).not.toBeInTheDocument();
|
||||
expect(screen.getByTestId('fallback')).toBeInTheDocument();
|
||||
|
||||
// Enable the flag
|
||||
mockUseFeatureFlag.mockReturnValue(true);
|
||||
|
||||
rerender(
|
||||
<FeatureFlag feature="newDashboard" fallback={<div data-testid="fallback">Fallback</div>}>
|
||||
<div data-testid="new-feature">New Feature</div>
|
||||
</FeatureFlag>,
|
||||
);
|
||||
|
||||
// Now enabled
|
||||
expect(screen.getByTestId('new-feature')).toBeInTheDocument();
|
||||
expect(screen.queryByTestId('fallback')).not.toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
});
|
||||
75
src/components/FeatureFlag.tsx
Normal file
75
src/components/FeatureFlag.tsx
Normal file
@@ -0,0 +1,75 @@
|
||||
// src/components/FeatureFlag.tsx
|
||||
import type { ReactNode } from 'react';
|
||||
import { useFeatureFlag, type FeatureFlagName } from '../hooks/useFeatureFlag';
|
||||
|
||||
/**
|
||||
* Props for the FeatureFlag component.
|
||||
*/
|
||||
export interface FeatureFlagProps {
|
||||
/**
|
||||
* The name of the feature flag to check.
|
||||
* Must be a valid FeatureFlagName defined in config.featureFlags.
|
||||
*/
|
||||
feature: FeatureFlagName;
|
||||
|
||||
/**
|
||||
* Content to render when the feature flag is enabled.
|
||||
*/
|
||||
children: ReactNode;
|
||||
|
||||
/**
|
||||
* Optional content to render when the feature flag is disabled.
|
||||
* If not provided, nothing is rendered when the flag is disabled.
|
||||
* @default null
|
||||
*/
|
||||
fallback?: ReactNode;
|
||||
}
|
||||
|
||||
/**
|
||||
* Declarative component for conditional rendering based on feature flag state.
|
||||
*
|
||||
* This component provides a clean, declarative API for rendering content based
|
||||
* on whether a feature flag is enabled or disabled. It uses the useFeatureFlag
|
||||
* hook internally and supports an optional fallback for disabled features.
|
||||
*
|
||||
* @param props - Component props
|
||||
* @param props.feature - The feature flag name to check
|
||||
* @param props.children - Content rendered when feature is enabled
|
||||
* @param props.fallback - Content rendered when feature is disabled (default: null)
|
||||
*
|
||||
* @example
|
||||
* // Basic usage - show new feature when enabled
|
||||
* <FeatureFlag feature="newDashboard">
|
||||
* <NewDashboard />
|
||||
* </FeatureFlag>
|
||||
*
|
||||
* @example
|
||||
* // With fallback - show alternative when feature is disabled
|
||||
* <FeatureFlag feature="newDashboard" fallback={<LegacyDashboard />}>
|
||||
* <NewDashboard />
|
||||
* </FeatureFlag>
|
||||
*
|
||||
* @example
|
||||
* // Wrap a section of UI that should only appear when flag is enabled
|
||||
* <div className="sidebar">
|
||||
* <Navigation />
|
||||
* <FeatureFlag feature="betaRecipes">
|
||||
* <RecipeSuggestions />
|
||||
* </FeatureFlag>
|
||||
* <Footer />
|
||||
* </div>
|
||||
*
|
||||
* @example
|
||||
* // Combine with other conditional logic
|
||||
* {isLoggedIn && (
|
||||
* <FeatureFlag feature="experimentalAi" fallback={<StandardSearch />}>
|
||||
* <AiPoweredSearch />
|
||||
* </FeatureFlag>
|
||||
* )}
|
||||
*
|
||||
* @see docs/adr/0024-feature-flagging-strategy.md
|
||||
*/
|
||||
export function FeatureFlag({ feature, children, fallback = null }: FeatureFlagProps): ReactNode {
|
||||
const isEnabled = useFeatureFlag(feature);
|
||||
return isEnabled ? children : fallback;
|
||||
}
|
||||
@@ -24,6 +24,28 @@ const config = {
|
||||
debug: import.meta.env.VITE_SENTRY_DEBUG === 'true',
|
||||
enabled: import.meta.env.VITE_SENTRY_ENABLED !== 'false',
|
||||
},
|
||||
/**
|
||||
* Feature flags for conditional feature rendering (ADR-024).
|
||||
*
|
||||
* All flags default to false (disabled) when the environment variable is not set
|
||||
* or is set to any value other than 'true'. This opt-in model ensures features
|
||||
* are explicitly enabled, preventing accidental exposure of incomplete features.
|
||||
*
|
||||
* Environment variables follow the naming convention: VITE_FEATURE_SNAKE_CASE
|
||||
* Config properties use camelCase for consistency with JavaScript conventions.
|
||||
*
|
||||
* @see docs/adr/0024-feature-flagging-strategy.md
|
||||
*/
|
||||
featureFlags: {
|
||||
/** Enable the redesigned dashboard UI (VITE_FEATURE_NEW_DASHBOARD) */
|
||||
newDashboard: import.meta.env.VITE_FEATURE_NEW_DASHBOARD === 'true',
|
||||
/** Enable beta recipe features (VITE_FEATURE_BETA_RECIPES) */
|
||||
betaRecipes: import.meta.env.VITE_FEATURE_BETA_RECIPES === 'true',
|
||||
/** Enable experimental AI features (VITE_FEATURE_EXPERIMENTAL_AI) */
|
||||
experimentalAi: import.meta.env.VITE_FEATURE_EXPERIMENTAL_AI === 'true',
|
||||
/** Enable debug mode UI elements (VITE_FEATURE_DEBUG_MODE) */
|
||||
debugMode: import.meta.env.VITE_FEATURE_DEBUG_MODE === 'true',
|
||||
},
|
||||
};
|
||||
|
||||
export default config;
|
||||
|
||||
@@ -155,6 +155,38 @@ const sentrySchema = z.object({
|
||||
debug: booleanString(false),
|
||||
});
|
||||
|
||||
/**
|
||||
* Feature flags configuration schema (ADR-024).
|
||||
*
|
||||
* All flags default to `false` (disabled) for safety, following an opt-in model.
|
||||
* Set the corresponding environment variable to 'true' to enable a feature.
|
||||
*
|
||||
* Environment variable naming convention: `FEATURE_SNAKE_CASE`
|
||||
* Config property naming convention: `camelCase`
|
||||
*
|
||||
* @example
|
||||
* // Enable via environment:
|
||||
* FEATURE_BUGSINK_SYNC=true
|
||||
*
|
||||
* // Check in code:
|
||||
* import { config } from './config/env';
|
||||
* if (config.featureFlags.bugsinkSync) { ... }
|
||||
*/
|
||||
const featureFlagsSchema = z.object({
|
||||
/** Enable Bugsink error sync integration (FEATURE_BUGSINK_SYNC) */
|
||||
bugsinkSync: booleanString(false),
|
||||
/** Enable advanced RBAC features (FEATURE_ADVANCED_RBAC) */
|
||||
advancedRbac: booleanString(false),
|
||||
/** Enable new dashboard experience (FEATURE_NEW_DASHBOARD) */
|
||||
newDashboard: booleanString(false),
|
||||
/** Enable beta recipe features (FEATURE_BETA_RECIPES) */
|
||||
betaRecipes: booleanString(false),
|
||||
/** Enable experimental AI features (FEATURE_EXPERIMENTAL_AI) */
|
||||
experimentalAi: booleanString(false),
|
||||
/** Enable debug mode for development (FEATURE_DEBUG_MODE) */
|
||||
debugMode: booleanString(false),
|
||||
});
|
||||
|
||||
/**
|
||||
* Complete environment configuration schema.
|
||||
*/
|
||||
@@ -170,6 +202,7 @@ const envSchema = z.object({
|
||||
worker: workerSchema,
|
||||
server: serverSchema,
|
||||
sentry: sentrySchema,
|
||||
featureFlags: featureFlagsSchema,
|
||||
});
|
||||
|
||||
export type EnvConfig = z.infer<typeof envSchema>;
|
||||
@@ -244,6 +277,14 @@ function loadEnvVars(): unknown {
|
||||
environment: process.env.SENTRY_ENVIRONMENT || process.env.NODE_ENV,
|
||||
debug: process.env.SENTRY_DEBUG,
|
||||
},
|
||||
featureFlags: {
|
||||
bugsinkSync: process.env.FEATURE_BUGSINK_SYNC,
|
||||
advancedRbac: process.env.FEATURE_ADVANCED_RBAC,
|
||||
newDashboard: process.env.FEATURE_NEW_DASHBOARD,
|
||||
betaRecipes: process.env.FEATURE_BETA_RECIPES,
|
||||
experimentalAi: process.env.FEATURE_EXPERIMENTAL_AI,
|
||||
debugMode: process.env.FEATURE_DEBUG_MODE,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
@@ -391,3 +432,33 @@ export const isGoogleOAuthConfigured = !!config.google.clientId && !!config.goog
|
||||
* Returns true if GitHub OAuth is configured (both client ID and secret present).
|
||||
*/
|
||||
export const isGithubOAuthConfigured = !!config.github.clientId && !!config.github.clientSecret;
|
||||
|
||||
// --- Feature Flag Helpers (ADR-024) ---
|
||||
|
||||
/**
|
||||
* Type representing valid feature flag names.
|
||||
* Derived from the featureFlagsSchema for type safety.
|
||||
*/
|
||||
export type FeatureFlagName = keyof typeof config.featureFlags;
|
||||
|
||||
/**
|
||||
* Check if a feature flag is enabled.
|
||||
*
|
||||
* This is a convenience function for checking feature flag state.
|
||||
* For more advanced usage (logging, all flags), use the featureFlags service.
|
||||
*
|
||||
* @param flagName - The name of the feature flag to check
|
||||
* @returns boolean indicating if the feature is enabled
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* import { isFeatureFlagEnabled } from './config/env';
|
||||
*
|
||||
* if (isFeatureFlagEnabled('newDashboard')) {
|
||||
* // Use new dashboard
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
export function isFeatureFlagEnabled(flagName: FeatureFlagName): boolean {
|
||||
return config.featureFlags[flagName];
|
||||
}
|
||||
|
||||
@@ -185,10 +185,7 @@ describe('StoreCard', () => {
|
||||
|
||||
it('should show "No location data" when locations is undefined', () => {
|
||||
renderWithProviders(
|
||||
<StoreCard
|
||||
store={mockStoreUndefinedLocations as typeof mockStoreWithLogo}
|
||||
showLocations={true}
|
||||
/>,
|
||||
<StoreCard store={mockStoreUndefinedLocations as any} showLocations={true} />,
|
||||
);
|
||||
|
||||
expect(screen.getByText('No location data')).toBeInTheDocument();
|
||||
|
||||
126
src/hooks/queries/useBestSalePricesQuery.test.tsx
Normal file
126
src/hooks/queries/useBestSalePricesQuery.test.tsx
Normal file
@@ -0,0 +1,126 @@
|
||||
// src/hooks/queries/useBestSalePricesQuery.test.tsx
|
||||
import { renderHook, waitFor } from '@testing-library/react';
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
import { QueryClient, QueryClientProvider } from '@tanstack/react-query';
|
||||
import type { ReactNode } from 'react';
|
||||
import { useBestSalePricesQuery } from './useBestSalePricesQuery';
|
||||
import * as apiClient from '../../services/apiClient';
|
||||
import type { WatchedItemDeal } from '../../types';
|
||||
|
||||
vi.mock('../../services/apiClient');
|
||||
|
||||
const mockedApiClient = vi.mocked(apiClient);
|
||||
|
||||
describe('useBestSalePricesQuery', () => {
|
||||
let queryClient: QueryClient;
|
||||
|
||||
const wrapper = ({ children }: { children: ReactNode }) => (
|
||||
<QueryClientProvider client={queryClient}>{children}</QueryClientProvider>
|
||||
);
|
||||
|
||||
beforeEach(() => {
|
||||
vi.resetAllMocks();
|
||||
queryClient = new QueryClient({
|
||||
defaultOptions: {
|
||||
queries: { retry: false },
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('should fetch best sale prices successfully', async () => {
|
||||
const mockDeals: WatchedItemDeal[] = [
|
||||
{
|
||||
master_item_id: 101,
|
||||
item_name: 'Organic Bananas',
|
||||
best_price_in_cents: 59,
|
||||
store: {
|
||||
store_id: 1,
|
||||
name: 'Green Grocer',
|
||||
logo_url: null,
|
||||
locations: [
|
||||
{
|
||||
address_line_1: '123 Main St',
|
||||
city: 'Springfield',
|
||||
province_state: 'ON',
|
||||
postal_code: 'A1B2C3',
|
||||
},
|
||||
],
|
||||
},
|
||||
flyer_id: 56,
|
||||
valid_to: '2026-02-01T23:59:59Z',
|
||||
},
|
||||
];
|
||||
mockedApiClient.fetchBestSalePrices.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve({ success: true, data: mockDeals }),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useBestSalePricesQuery(), { wrapper });
|
||||
|
||||
await waitFor(() => expect(result.current.isSuccess).toBe(true));
|
||||
|
||||
expect(mockedApiClient.fetchBestSalePrices).toHaveBeenCalled();
|
||||
expect(result.current.data).toEqual(mockDeals);
|
||||
});
|
||||
|
||||
it('should handle API error with error message', async () => {
|
||||
mockedApiClient.fetchBestSalePrices.mockResolvedValue({
|
||||
ok: false,
|
||||
status: 401,
|
||||
json: () => Promise.resolve({ message: 'Authentication required' }),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useBestSalePricesQuery(), { wrapper });
|
||||
|
||||
await waitFor(() => expect(result.current.isError).toBe(true));
|
||||
|
||||
expect(result.current.error?.message).toBe('Authentication required');
|
||||
});
|
||||
|
||||
it('should handle API error without message', async () => {
|
||||
mockedApiClient.fetchBestSalePrices.mockResolvedValue({
|
||||
ok: false,
|
||||
status: 500,
|
||||
json: () => Promise.reject(new Error('Parse error')),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useBestSalePricesQuery(), { wrapper });
|
||||
|
||||
await waitFor(() => expect(result.current.isError).toBe(true));
|
||||
|
||||
expect(result.current.error?.message).toBe('Request failed with status 500');
|
||||
});
|
||||
|
||||
it('should use fallback message when error.message is empty', async () => {
|
||||
mockedApiClient.fetchBestSalePrices.mockResolvedValue({
|
||||
ok: false,
|
||||
status: 500,
|
||||
json: () => Promise.resolve({ message: '' }),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useBestSalePricesQuery(), { wrapper });
|
||||
|
||||
await waitFor(() => expect(result.current.isError).toBe(true));
|
||||
|
||||
expect(result.current.error?.message).toBe('Failed to fetch best sale prices');
|
||||
});
|
||||
|
||||
it('should return empty array for no deals', async () => {
|
||||
mockedApiClient.fetchBestSalePrices.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve({ success: true, data: [] }),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useBestSalePricesQuery(), { wrapper });
|
||||
|
||||
await waitFor(() => expect(result.current.isSuccess).toBe(true));
|
||||
|
||||
expect(result.current.data).toEqual([]);
|
||||
});
|
||||
|
||||
it('should not fetch when disabled', () => {
|
||||
renderHook(() => useBestSalePricesQuery(false), { wrapper });
|
||||
|
||||
expect(mockedApiClient.fetchBestSalePrices).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
175
src/hooks/queries/useBrandsQuery.test.tsx
Normal file
175
src/hooks/queries/useBrandsQuery.test.tsx
Normal file
@@ -0,0 +1,175 @@
|
||||
// src/hooks/queries/useBrandsQuery.test.tsx
|
||||
import { renderHook, waitFor } from '@testing-library/react';
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
import { QueryClient, QueryClientProvider } from '@tanstack/react-query';
|
||||
import type { ReactNode } from 'react';
|
||||
import { useBrandsQuery } from './useBrandsQuery';
|
||||
import * as apiClient from '../../services/apiClient';
|
||||
import type { Brand } from '../../types';
|
||||
|
||||
vi.mock('../../services/apiClient');
|
||||
|
||||
const mockedApiClient = vi.mocked(apiClient);
|
||||
|
||||
describe('useBrandsQuery', () => {
|
||||
let queryClient: QueryClient;
|
||||
|
||||
const wrapper = ({ children }: { children: ReactNode }) => (
|
||||
<QueryClientProvider client={queryClient}>{children}</QueryClientProvider>
|
||||
);
|
||||
|
||||
beforeEach(() => {
|
||||
vi.resetAllMocks();
|
||||
queryClient = new QueryClient({
|
||||
defaultOptions: {
|
||||
queries: { retry: false },
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('should fetch brands successfully', async () => {
|
||||
const mockBrands: Brand[] = [
|
||||
{
|
||||
brand_id: 1,
|
||||
name: 'Organic Valley',
|
||||
logo_url: 'https://example.com/organic-valley.png',
|
||||
store_id: null,
|
||||
store_name: null,
|
||||
created_at: '2025-01-01T00:00:00Z',
|
||||
updated_at: '2025-01-01T00:00:00Z',
|
||||
},
|
||||
{
|
||||
brand_id: 2,
|
||||
name: "Kellogg's",
|
||||
logo_url: null,
|
||||
store_id: 5,
|
||||
store_name: 'SuperMart',
|
||||
created_at: '2025-01-02T00:00:00Z',
|
||||
updated_at: '2025-01-02T00:00:00Z',
|
||||
},
|
||||
];
|
||||
mockedApiClient.fetchAllBrands.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve({ success: true, data: mockBrands }),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useBrandsQuery(), { wrapper });
|
||||
|
||||
await waitFor(() => expect(result.current.isSuccess).toBe(true));
|
||||
|
||||
expect(mockedApiClient.fetchAllBrands).toHaveBeenCalled();
|
||||
expect(result.current.data).toEqual(mockBrands);
|
||||
});
|
||||
|
||||
it('should handle API error with error message', async () => {
|
||||
mockedApiClient.fetchAllBrands.mockResolvedValue({
|
||||
ok: false,
|
||||
status: 401,
|
||||
json: () => Promise.resolve({ message: 'Authentication required' }),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useBrandsQuery(), { wrapper });
|
||||
|
||||
await waitFor(() => expect(result.current.isError).toBe(true));
|
||||
|
||||
expect(result.current.error?.message).toBe('Authentication required');
|
||||
});
|
||||
|
||||
it('should handle API error without message (JSON parse failure)', async () => {
|
||||
mockedApiClient.fetchAllBrands.mockResolvedValue({
|
||||
ok: false,
|
||||
status: 500,
|
||||
json: () => Promise.reject(new Error('Parse error')),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useBrandsQuery(), { wrapper });
|
||||
|
||||
await waitFor(() => expect(result.current.isError).toBe(true));
|
||||
|
||||
expect(result.current.error?.message).toBe('Request failed with status 500');
|
||||
});
|
||||
|
||||
it('should use fallback message when error.message is empty', async () => {
|
||||
mockedApiClient.fetchAllBrands.mockResolvedValue({
|
||||
ok: false,
|
||||
status: 500,
|
||||
json: () => Promise.resolve({ message: '' }),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useBrandsQuery(), { wrapper });
|
||||
|
||||
await waitFor(() => expect(result.current.isError).toBe(true));
|
||||
|
||||
expect(result.current.error?.message).toBe('Failed to fetch brands');
|
||||
});
|
||||
|
||||
it('should return empty array for no brands', async () => {
|
||||
mockedApiClient.fetchAllBrands.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve({ success: true, data: [] }),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useBrandsQuery(), { wrapper });
|
||||
|
||||
await waitFor(() => expect(result.current.isSuccess).toBe(true));
|
||||
|
||||
expect(result.current.data).toEqual([]);
|
||||
});
|
||||
|
||||
it('should return empty array when success is false', async () => {
|
||||
mockedApiClient.fetchAllBrands.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve({ success: false, error: 'Something went wrong' }),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useBrandsQuery(), { wrapper });
|
||||
|
||||
await waitFor(() => expect(result.current.isSuccess).toBe(true));
|
||||
|
||||
expect(result.current.data).toEqual([]);
|
||||
});
|
||||
|
||||
it('should return empty array when data is not an array', async () => {
|
||||
mockedApiClient.fetchAllBrands.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve({ success: true, data: null }),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useBrandsQuery(), { wrapper });
|
||||
|
||||
await waitFor(() => expect(result.current.isSuccess).toBe(true));
|
||||
|
||||
expect(result.current.data).toEqual([]);
|
||||
});
|
||||
|
||||
it('should not fetch when disabled', () => {
|
||||
renderHook(() => useBrandsQuery(false), { wrapper });
|
||||
|
||||
expect(mockedApiClient.fetchAllBrands).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should fetch when explicitly enabled', async () => {
|
||||
const mockBrands: Brand[] = [
|
||||
{
|
||||
brand_id: 1,
|
||||
name: 'Test Brand',
|
||||
logo_url: null,
|
||||
store_id: null,
|
||||
store_name: null,
|
||||
created_at: '2025-01-01T00:00:00Z',
|
||||
updated_at: '2025-01-01T00:00:00Z',
|
||||
},
|
||||
];
|
||||
mockedApiClient.fetchAllBrands.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve({ success: true, data: mockBrands }),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useBrandsQuery(true), { wrapper });
|
||||
|
||||
await waitFor(() => expect(result.current.isSuccess).toBe(true));
|
||||
|
||||
expect(mockedApiClient.fetchAllBrands).toHaveBeenCalled();
|
||||
expect(result.current.data).toEqual(mockBrands);
|
||||
});
|
||||
});
|
||||
235
src/hooks/queries/useFlyerItemCountQuery.test.tsx
Normal file
235
src/hooks/queries/useFlyerItemCountQuery.test.tsx
Normal file
@@ -0,0 +1,235 @@
|
||||
// src/hooks/queries/useFlyerItemCountQuery.test.tsx
|
||||
import { renderHook, waitFor } from '@testing-library/react';
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
import { QueryClient, QueryClientProvider } from '@tanstack/react-query';
|
||||
import type { ReactNode } from 'react';
|
||||
import { useFlyerItemCountQuery } from './useFlyerItemCountQuery';
|
||||
import * as apiClient from '../../services/apiClient';
|
||||
|
||||
vi.mock('../../services/apiClient');
|
||||
|
||||
const mockedApiClient = vi.mocked(apiClient);
|
||||
|
||||
describe('useFlyerItemCountQuery', () => {
|
||||
let queryClient: QueryClient;
|
||||
|
||||
const wrapper = ({ children }: { children: ReactNode }) => (
|
||||
<QueryClientProvider client={queryClient}>{children}</QueryClientProvider>
|
||||
);
|
||||
|
||||
beforeEach(() => {
|
||||
vi.resetAllMocks();
|
||||
queryClient = new QueryClient({
|
||||
defaultOptions: {
|
||||
queries: { retry: false },
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('should fetch flyer item count successfully', async () => {
|
||||
const flyerIds = [1, 2, 3];
|
||||
const mockCount = { count: 42 };
|
||||
|
||||
mockedApiClient.countFlyerItemsForFlyers.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve({ success: true, data: mockCount }),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useFlyerItemCountQuery(flyerIds), { wrapper });
|
||||
|
||||
await waitFor(() => expect(result.current.isSuccess).toBe(true));
|
||||
|
||||
expect(mockedApiClient.countFlyerItemsForFlyers).toHaveBeenCalledWith(flyerIds);
|
||||
expect(result.current.data).toEqual(mockCount);
|
||||
});
|
||||
|
||||
it('should handle API error with error message', async () => {
|
||||
const flyerIds = [1, 2];
|
||||
|
||||
mockedApiClient.countFlyerItemsForFlyers.mockResolvedValue({
|
||||
ok: false,
|
||||
status: 401,
|
||||
json: () => Promise.resolve({ message: 'Authentication required' }),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useFlyerItemCountQuery(flyerIds), { wrapper });
|
||||
|
||||
await waitFor(() => expect(result.current.isError).toBe(true));
|
||||
|
||||
expect(result.current.error?.message).toBe('Authentication required');
|
||||
});
|
||||
|
||||
it('should handle API error without message (JSON parse error)', async () => {
|
||||
const flyerIds = [1, 2];
|
||||
|
||||
mockedApiClient.countFlyerItemsForFlyers.mockResolvedValue({
|
||||
ok: false,
|
||||
status: 500,
|
||||
json: () => Promise.reject(new Error('Parse error')),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useFlyerItemCountQuery(flyerIds), { wrapper });
|
||||
|
||||
await waitFor(() => expect(result.current.isError).toBe(true));
|
||||
|
||||
expect(result.current.error?.message).toBe('Request failed with status 500');
|
||||
});
|
||||
|
||||
it('should use fallback message when error.message is empty', async () => {
|
||||
const flyerIds = [1, 2];
|
||||
|
||||
mockedApiClient.countFlyerItemsForFlyers.mockResolvedValue({
|
||||
ok: false,
|
||||
status: 500,
|
||||
json: () => Promise.resolve({ message: '' }),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useFlyerItemCountQuery(flyerIds), { wrapper });
|
||||
|
||||
await waitFor(() => expect(result.current.isError).toBe(true));
|
||||
|
||||
expect(result.current.error?.message).toBe('Failed to count flyer items');
|
||||
});
|
||||
|
||||
it('should return zero count for empty flyerIds array without calling API', async () => {
|
||||
const flyerIds: number[] = [];
|
||||
|
||||
const { result } = renderHook(() => useFlyerItemCountQuery(flyerIds, true), { wrapper });
|
||||
|
||||
// Query should be disabled due to flyerIds.length === 0
|
||||
expect(result.current.isPending).toBe(true);
|
||||
expect(result.current.fetchStatus).toBe('idle');
|
||||
expect(mockedApiClient.countFlyerItemsForFlyers).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should not fetch when disabled via enabled parameter', () => {
|
||||
const flyerIds = [1, 2, 3];
|
||||
|
||||
renderHook(() => useFlyerItemCountQuery(flyerIds, false), { wrapper });
|
||||
|
||||
expect(mockedApiClient.countFlyerItemsForFlyers).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should return count of zero when API returns zero', async () => {
|
||||
const flyerIds = [99, 100];
|
||||
const mockCount = { count: 0 };
|
||||
|
||||
mockedApiClient.countFlyerItemsForFlyers.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve({ success: true, data: mockCount }),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useFlyerItemCountQuery(flyerIds), { wrapper });
|
||||
|
||||
await waitFor(() => expect(result.current.isSuccess).toBe(true));
|
||||
|
||||
expect(result.current.data).toEqual({ count: 0 });
|
||||
});
|
||||
|
||||
it('should handle response without data wrapper (fallback to json)', async () => {
|
||||
const flyerIds = [1];
|
||||
const mockCount = { count: 15 };
|
||||
|
||||
// Some API responses might return data directly without the { success, data } wrapper
|
||||
mockedApiClient.countFlyerItemsForFlyers.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve(mockCount),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useFlyerItemCountQuery(flyerIds), { wrapper });
|
||||
|
||||
await waitFor(() => expect(result.current.isSuccess).toBe(true));
|
||||
|
||||
// Should fall back to the raw json when data is undefined
|
||||
expect(result.current.data).toEqual(mockCount);
|
||||
});
|
||||
|
||||
it('should use different cache keys for different flyerIds arrays', async () => {
|
||||
const flyerIds1 = [1, 2];
|
||||
const flyerIds2 = [3, 4];
|
||||
const mockCount1 = { count: 10 };
|
||||
const mockCount2 = { count: 20 };
|
||||
|
||||
mockedApiClient.countFlyerItemsForFlyers
|
||||
.mockResolvedValueOnce({
|
||||
ok: true,
|
||||
json: () => Promise.resolve({ success: true, data: mockCount1 }),
|
||||
} as Response)
|
||||
.mockResolvedValueOnce({
|
||||
ok: true,
|
||||
json: () => Promise.resolve({ success: true, data: mockCount2 }),
|
||||
} as Response);
|
||||
|
||||
const { result: result1 } = renderHook(() => useFlyerItemCountQuery(flyerIds1), { wrapper });
|
||||
await waitFor(() => expect(result1.current.isSuccess).toBe(true));
|
||||
|
||||
const { result: result2 } = renderHook(() => useFlyerItemCountQuery(flyerIds2), { wrapper });
|
||||
await waitFor(() => expect(result2.current.isSuccess).toBe(true));
|
||||
|
||||
// Both calls should have been made since they have different cache keys
|
||||
expect(mockedApiClient.countFlyerItemsForFlyers).toHaveBeenCalledTimes(2);
|
||||
expect(mockedApiClient.countFlyerItemsForFlyers).toHaveBeenCalledWith(flyerIds1);
|
||||
expect(mockedApiClient.countFlyerItemsForFlyers).toHaveBeenCalledWith(flyerIds2);
|
||||
});
|
||||
|
||||
it('should handle large count values', async () => {
|
||||
const flyerIds = [1, 2, 3, 4, 5];
|
||||
const mockCount = { count: 999999 };
|
||||
|
||||
mockedApiClient.countFlyerItemsForFlyers.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve({ success: true, data: mockCount }),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useFlyerItemCountQuery(flyerIds), { wrapper });
|
||||
|
||||
await waitFor(() => expect(result.current.isSuccess).toBe(true));
|
||||
|
||||
expect(result.current.data).toEqual({ count: 999999 });
|
||||
});
|
||||
|
||||
it('should handle network error', async () => {
|
||||
const flyerIds = [1, 2];
|
||||
|
||||
mockedApiClient.countFlyerItemsForFlyers.mockRejectedValue(new Error('Network error'));
|
||||
|
||||
const { result } = renderHook(() => useFlyerItemCountQuery(flyerIds), { wrapper });
|
||||
|
||||
await waitFor(() => expect(result.current.isError).toBe(true));
|
||||
|
||||
expect(result.current.error?.message).toBe('Network error');
|
||||
});
|
||||
|
||||
it('should default enabled to true when not specified', async () => {
|
||||
const flyerIds = [1];
|
||||
const mockCount = { count: 5 };
|
||||
|
||||
mockedApiClient.countFlyerItemsForFlyers.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve({ success: true, data: mockCount }),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useFlyerItemCountQuery(flyerIds), { wrapper });
|
||||
|
||||
await waitFor(() => expect(result.current.isSuccess).toBe(true));
|
||||
|
||||
expect(mockedApiClient.countFlyerItemsForFlyers).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should handle single flyerId in array', async () => {
|
||||
const flyerIds = [42];
|
||||
const mockCount = { count: 7 };
|
||||
|
||||
mockedApiClient.countFlyerItemsForFlyers.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve({ success: true, data: mockCount }),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useFlyerItemCountQuery(flyerIds), { wrapper });
|
||||
|
||||
await waitFor(() => expect(result.current.isSuccess).toBe(true));
|
||||
|
||||
expect(mockedApiClient.countFlyerItemsForFlyers).toHaveBeenCalledWith([42]);
|
||||
expect(result.current.data).toEqual({ count: 7 });
|
||||
});
|
||||
});
|
||||
310
src/hooks/queries/useFlyerItemsForFlyersQuery.test.tsx
Normal file
310
src/hooks/queries/useFlyerItemsForFlyersQuery.test.tsx
Normal file
@@ -0,0 +1,310 @@
|
||||
// src/hooks/queries/useFlyerItemsForFlyersQuery.test.tsx
|
||||
import { renderHook, waitFor } from '@testing-library/react';
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
import { QueryClient, QueryClientProvider } from '@tanstack/react-query';
|
||||
import type { ReactNode } from 'react';
|
||||
import { useFlyerItemsForFlyersQuery } from './useFlyerItemsForFlyersQuery';
|
||||
import * as apiClient from '../../services/apiClient';
|
||||
import type { FlyerItem } from '../../types';
|
||||
import { createMockFlyerItem } from '../../tests/utils/mockFactories';
|
||||
|
||||
vi.mock('../../services/apiClient');
|
||||
|
||||
const mockedApiClient = vi.mocked(apiClient);
|
||||
|
||||
describe('useFlyerItemsForFlyersQuery', () => {
|
||||
let queryClient: QueryClient;
|
||||
|
||||
const wrapper = ({ children }: { children: ReactNode }) => (
|
||||
<QueryClientProvider client={queryClient}>{children}</QueryClientProvider>
|
||||
);
|
||||
|
||||
beforeEach(() => {
|
||||
vi.resetAllMocks();
|
||||
queryClient = new QueryClient({
|
||||
defaultOptions: {
|
||||
queries: { retry: false },
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('should fetch flyer items for multiple flyers successfully', async () => {
|
||||
const mockFlyerItems: FlyerItem[] = [
|
||||
createMockFlyerItem({
|
||||
flyer_item_id: 1,
|
||||
flyer_id: 100,
|
||||
item: 'Organic Bananas',
|
||||
price_display: '$0.59/lb',
|
||||
price_in_cents: 59,
|
||||
quantity: 'lb',
|
||||
master_item_id: 1001,
|
||||
master_item_name: 'Bananas',
|
||||
category_id: 1,
|
||||
category_name: 'Produce',
|
||||
}),
|
||||
createMockFlyerItem({
|
||||
flyer_item_id: 2,
|
||||
flyer_id: 100,
|
||||
item: 'Whole Milk',
|
||||
price_display: '$3.99',
|
||||
price_in_cents: 399,
|
||||
quantity: 'gal',
|
||||
master_item_id: 1002,
|
||||
master_item_name: 'Milk',
|
||||
category_id: 2,
|
||||
category_name: 'Dairy',
|
||||
}),
|
||||
createMockFlyerItem({
|
||||
flyer_item_id: 3,
|
||||
flyer_id: 101,
|
||||
item: 'Chicken Breast',
|
||||
price_display: '$5.99/lb',
|
||||
price_in_cents: 599,
|
||||
quantity: 'lb',
|
||||
master_item_id: 1003,
|
||||
master_item_name: 'Chicken',
|
||||
category_id: 3,
|
||||
category_name: 'Meat',
|
||||
}),
|
||||
];
|
||||
|
||||
mockedApiClient.fetchFlyerItemsForFlyers.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve({ success: true, data: mockFlyerItems }),
|
||||
} as Response);
|
||||
|
||||
const flyerIds = [100, 101];
|
||||
const { result } = renderHook(() => useFlyerItemsForFlyersQuery(flyerIds), {
|
||||
wrapper,
|
||||
});
|
||||
|
||||
await waitFor(() => expect(result.current.isSuccess).toBe(true));
|
||||
|
||||
expect(mockedApiClient.fetchFlyerItemsForFlyers).toHaveBeenCalledWith(flyerIds);
|
||||
expect(result.current.data).toEqual(mockFlyerItems);
|
||||
expect(result.current.data).toHaveLength(3);
|
||||
});
|
||||
|
||||
it('should handle API error with error message', async () => {
|
||||
mockedApiClient.fetchFlyerItemsForFlyers.mockResolvedValue({
|
||||
ok: false,
|
||||
status: 401,
|
||||
json: () => Promise.resolve({ message: 'Authentication required' }),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useFlyerItemsForFlyersQuery([100]), {
|
||||
wrapper,
|
||||
});
|
||||
|
||||
await waitFor(() => expect(result.current.isError).toBe(true));
|
||||
|
||||
expect(result.current.error?.message).toBe('Authentication required');
|
||||
});
|
||||
|
||||
it('should handle API error without message (JSON parse error)', async () => {
|
||||
mockedApiClient.fetchFlyerItemsForFlyers.mockResolvedValue({
|
||||
ok: false,
|
||||
status: 500,
|
||||
json: () => Promise.reject(new Error('Parse error')),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useFlyerItemsForFlyersQuery([100]), {
|
||||
wrapper,
|
||||
});
|
||||
|
||||
await waitFor(() => expect(result.current.isError).toBe(true));
|
||||
|
||||
expect(result.current.error?.message).toBe('Request failed with status 500');
|
||||
});
|
||||
|
||||
it('should use fallback message when error.message is empty', async () => {
|
||||
mockedApiClient.fetchFlyerItemsForFlyers.mockResolvedValue({
|
||||
ok: false,
|
||||
status: 500,
|
||||
json: () => Promise.resolve({ message: '' }),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useFlyerItemsForFlyersQuery([100]), {
|
||||
wrapper,
|
||||
});
|
||||
|
||||
await waitFor(() => expect(result.current.isError).toBe(true));
|
||||
|
||||
expect(result.current.error?.message).toBe('Failed to fetch flyer items');
|
||||
});
|
||||
|
||||
it('should return empty array for no flyer items', async () => {
|
||||
mockedApiClient.fetchFlyerItemsForFlyers.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve({ success: true, data: [] }),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useFlyerItemsForFlyersQuery([100]), {
|
||||
wrapper,
|
||||
});
|
||||
|
||||
await waitFor(() => expect(result.current.isSuccess).toBe(true));
|
||||
|
||||
expect(result.current.data).toEqual([]);
|
||||
});
|
||||
|
||||
it('should not fetch when disabled explicitly', () => {
|
||||
renderHook(() => useFlyerItemsForFlyersQuery([100], false), { wrapper });
|
||||
|
||||
expect(mockedApiClient.fetchFlyerItemsForFlyers).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should not fetch when flyerIds array is empty', () => {
|
||||
renderHook(() => useFlyerItemsForFlyersQuery([]), { wrapper });
|
||||
|
||||
expect(mockedApiClient.fetchFlyerItemsForFlyers).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should not fetch when flyerIds is empty even if enabled is true', () => {
|
||||
renderHook(() => useFlyerItemsForFlyersQuery([], true), { wrapper });
|
||||
|
||||
expect(mockedApiClient.fetchFlyerItemsForFlyers).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should return empty array when success is false in response', async () => {
|
||||
mockedApiClient.fetchFlyerItemsForFlyers.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve({ success: false, error: 'Some error' }),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useFlyerItemsForFlyersQuery([100]), {
|
||||
wrapper,
|
||||
});
|
||||
|
||||
await waitFor(() => expect(result.current.isSuccess).toBe(true));
|
||||
|
||||
expect(result.current.data).toEqual([]);
|
||||
});
|
||||
|
||||
it('should return empty array when data is not an array', async () => {
|
||||
mockedApiClient.fetchFlyerItemsForFlyers.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve({ success: true, data: null }),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useFlyerItemsForFlyersQuery([100]), {
|
||||
wrapper,
|
||||
});
|
||||
|
||||
await waitFor(() => expect(result.current.isSuccess).toBe(true));
|
||||
|
||||
expect(result.current.data).toEqual([]);
|
||||
});
|
||||
|
||||
it('should return empty array when data is an object instead of array', async () => {
|
||||
mockedApiClient.fetchFlyerItemsForFlyers.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve({ success: true, data: { item: 'not an array' } }),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useFlyerItemsForFlyersQuery([100]), {
|
||||
wrapper,
|
||||
});
|
||||
|
||||
await waitFor(() => expect(result.current.isSuccess).toBe(true));
|
||||
|
||||
expect(result.current.data).toEqual([]);
|
||||
});
|
||||
|
||||
it('should fetch for single flyer ID', async () => {
|
||||
const mockFlyerItems: FlyerItem[] = [
|
||||
createMockFlyerItem({
|
||||
flyer_item_id: 1,
|
||||
flyer_id: 100,
|
||||
item: 'Bread',
|
||||
price_display: '$2.49',
|
||||
price_in_cents: 249,
|
||||
}),
|
||||
];
|
||||
|
||||
mockedApiClient.fetchFlyerItemsForFlyers.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve({ success: true, data: mockFlyerItems }),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useFlyerItemsForFlyersQuery([100]), {
|
||||
wrapper,
|
||||
});
|
||||
|
||||
await waitFor(() => expect(result.current.isSuccess).toBe(true));
|
||||
|
||||
expect(mockedApiClient.fetchFlyerItemsForFlyers).toHaveBeenCalledWith([100]);
|
||||
expect(result.current.data).toEqual(mockFlyerItems);
|
||||
});
|
||||
|
||||
it('should handle 404 error status', async () => {
|
||||
mockedApiClient.fetchFlyerItemsForFlyers.mockResolvedValue({
|
||||
ok: false,
|
||||
status: 404,
|
||||
json: () => Promise.resolve({ message: 'Flyers not found' }),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useFlyerItemsForFlyersQuery([999]), {
|
||||
wrapper,
|
||||
});
|
||||
|
||||
await waitFor(() => expect(result.current.isError).toBe(true));
|
||||
|
||||
expect(result.current.error?.message).toBe('Flyers not found');
|
||||
});
|
||||
|
||||
it('should handle network error', async () => {
|
||||
mockedApiClient.fetchFlyerItemsForFlyers.mockRejectedValue(new Error('Network error'));
|
||||
|
||||
const { result } = renderHook(() => useFlyerItemsForFlyersQuery([100]), {
|
||||
wrapper,
|
||||
});
|
||||
|
||||
await waitFor(() => expect(result.current.isError).toBe(true));
|
||||
|
||||
expect(result.current.error?.message).toBe('Network error');
|
||||
});
|
||||
|
||||
it('should be enabled by default when flyerIds has items', async () => {
|
||||
mockedApiClient.fetchFlyerItemsForFlyers.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve({ success: true, data: [] }),
|
||||
} as Response);
|
||||
|
||||
// Call without the enabled parameter (uses default value of true)
|
||||
renderHook(() => useFlyerItemsForFlyersQuery([100]), { wrapper });
|
||||
|
||||
await waitFor(() => expect(mockedApiClient.fetchFlyerItemsForFlyers).toHaveBeenCalled());
|
||||
});
|
||||
|
||||
it('should use consistent query key regardless of flyer IDs order', async () => {
|
||||
const mockItems: FlyerItem[] = [createMockFlyerItem({ flyer_item_id: 1, flyer_id: 100 })];
|
||||
|
||||
mockedApiClient.fetchFlyerItemsForFlyers.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve({ success: true, data: mockItems }),
|
||||
} as Response);
|
||||
|
||||
// First call with [100, 200, 50]
|
||||
const { result: result1 } = renderHook(() => useFlyerItemsForFlyersQuery([100, 200, 50]), {
|
||||
wrapper,
|
||||
});
|
||||
await waitFor(() => expect(result1.current.isSuccess).toBe(true));
|
||||
|
||||
// API should be called with original order
|
||||
expect(mockedApiClient.fetchFlyerItemsForFlyers).toHaveBeenCalledWith([100, 200, 50]);
|
||||
|
||||
// Second call with same IDs in different order should use cached result
|
||||
// because query key uses sorted IDs (50,100,200)
|
||||
const { result: result2 } = renderHook(() => useFlyerItemsForFlyersQuery([50, 200, 100]), {
|
||||
wrapper,
|
||||
});
|
||||
|
||||
// Should immediately have data from cache (no additional API call)
|
||||
await waitFor(() => expect(result2.current.isSuccess).toBe(true));
|
||||
|
||||
// API should still only have been called once (cached)
|
||||
expect(mockedApiClient.fetchFlyerItemsForFlyers).toHaveBeenCalledTimes(1);
|
||||
expect(result2.current.data).toEqual(mockItems);
|
||||
});
|
||||
});
|
||||
193
src/hooks/queries/useLeaderboardQuery.test.tsx
Normal file
193
src/hooks/queries/useLeaderboardQuery.test.tsx
Normal file
@@ -0,0 +1,193 @@
|
||||
// src/hooks/queries/useLeaderboardQuery.test.tsx
|
||||
import { renderHook, waitFor } from '@testing-library/react';
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
import { QueryClient, QueryClientProvider } from '@tanstack/react-query';
|
||||
import type { ReactNode } from 'react';
|
||||
import { useLeaderboardQuery } from './useLeaderboardQuery';
|
||||
import * as apiClient from '../../services/apiClient';
|
||||
import type { LeaderboardUser } from '../../types';
|
||||
|
||||
vi.mock('../../services/apiClient');
|
||||
|
||||
const mockedApiClient = vi.mocked(apiClient);
|
||||
|
||||
describe('useLeaderboardQuery', () => {
|
||||
let queryClient: QueryClient;
|
||||
|
||||
const wrapper = ({ children }: { children: ReactNode }) => (
|
||||
<QueryClientProvider client={queryClient}>{children}</QueryClientProvider>
|
||||
);
|
||||
|
||||
beforeEach(() => {
|
||||
vi.resetAllMocks();
|
||||
queryClient = new QueryClient({
|
||||
defaultOptions: {
|
||||
queries: { retry: false },
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('should fetch leaderboard successfully', async () => {
|
||||
const mockLeaderboard: LeaderboardUser[] = [
|
||||
{
|
||||
user_id: 'user-123',
|
||||
full_name: 'Top Scorer',
|
||||
avatar_url: 'https://example.com/avatar1.png',
|
||||
points: 1500,
|
||||
rank: '1',
|
||||
},
|
||||
{
|
||||
user_id: 'user-456',
|
||||
full_name: 'Second Place',
|
||||
avatar_url: null,
|
||||
points: 1200,
|
||||
rank: '2',
|
||||
},
|
||||
];
|
||||
mockedApiClient.fetchLeaderboard.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve({ success: true, data: mockLeaderboard }),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useLeaderboardQuery(), { wrapper });
|
||||
|
||||
await waitFor(() => expect(result.current.isSuccess).toBe(true));
|
||||
|
||||
expect(mockedApiClient.fetchLeaderboard).toHaveBeenCalledWith(10);
|
||||
expect(result.current.data).toEqual(mockLeaderboard);
|
||||
});
|
||||
|
||||
it('should fetch leaderboard with custom limit', async () => {
|
||||
const mockLeaderboard: LeaderboardUser[] = [
|
||||
{
|
||||
user_id: 'user-789',
|
||||
full_name: 'Champion',
|
||||
avatar_url: 'https://example.com/avatar.png',
|
||||
points: 2000,
|
||||
rank: '1',
|
||||
},
|
||||
];
|
||||
mockedApiClient.fetchLeaderboard.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve({ success: true, data: mockLeaderboard }),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useLeaderboardQuery(5), { wrapper });
|
||||
|
||||
await waitFor(() => expect(result.current.isSuccess).toBe(true));
|
||||
|
||||
expect(mockedApiClient.fetchLeaderboard).toHaveBeenCalledWith(5);
|
||||
expect(result.current.data).toEqual(mockLeaderboard);
|
||||
});
|
||||
|
||||
it('should handle API error with error message', async () => {
|
||||
mockedApiClient.fetchLeaderboard.mockResolvedValue({
|
||||
ok: false,
|
||||
status: 401,
|
||||
json: () => Promise.resolve({ message: 'Authentication required' }),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useLeaderboardQuery(), { wrapper });
|
||||
|
||||
await waitFor(() => expect(result.current.isError).toBe(true));
|
||||
|
||||
expect(result.current.error?.message).toBe('Authentication required');
|
||||
});
|
||||
|
||||
it('should handle API error without message', async () => {
|
||||
mockedApiClient.fetchLeaderboard.mockResolvedValue({
|
||||
ok: false,
|
||||
status: 500,
|
||||
json: () => Promise.reject(new Error('Parse error')),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useLeaderboardQuery(), { wrapper });
|
||||
|
||||
await waitFor(() => expect(result.current.isError).toBe(true));
|
||||
|
||||
expect(result.current.error?.message).toBe('Request failed with status 500');
|
||||
});
|
||||
|
||||
it('should use fallback message when error.message is empty', async () => {
|
||||
mockedApiClient.fetchLeaderboard.mockResolvedValue({
|
||||
ok: false,
|
||||
status: 500,
|
||||
json: () => Promise.resolve({ message: '' }),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useLeaderboardQuery(), { wrapper });
|
||||
|
||||
await waitFor(() => expect(result.current.isError).toBe(true));
|
||||
|
||||
expect(result.current.error?.message).toBe('Failed to fetch leaderboard');
|
||||
});
|
||||
|
||||
it('should return empty array for no users on leaderboard', async () => {
|
||||
mockedApiClient.fetchLeaderboard.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve({ success: true, data: [] }),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useLeaderboardQuery(), { wrapper });
|
||||
|
||||
await waitFor(() => expect(result.current.isSuccess).toBe(true));
|
||||
|
||||
expect(result.current.data).toEqual([]);
|
||||
});
|
||||
|
||||
it('should return empty array when success is false', async () => {
|
||||
mockedApiClient.fetchLeaderboard.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve({ success: false, data: null }),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useLeaderboardQuery(), { wrapper });
|
||||
|
||||
await waitFor(() => expect(result.current.isSuccess).toBe(true));
|
||||
|
||||
expect(result.current.data).toEqual([]);
|
||||
});
|
||||
|
||||
it('should return empty array when data is not an array', async () => {
|
||||
mockedApiClient.fetchLeaderboard.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve({ success: true, data: { invalid: 'data' } }),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useLeaderboardQuery(), { wrapper });
|
||||
|
||||
await waitFor(() => expect(result.current.isSuccess).toBe(true));
|
||||
|
||||
expect(result.current.data).toEqual([]);
|
||||
});
|
||||
|
||||
it('should not fetch when disabled', () => {
|
||||
renderHook(() => useLeaderboardQuery(10, false), { wrapper });
|
||||
|
||||
expect(mockedApiClient.fetchLeaderboard).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should handle users with null full_name and avatar_url', async () => {
|
||||
const mockLeaderboard: LeaderboardUser[] = [
|
||||
{
|
||||
user_id: 'user-anon',
|
||||
full_name: null,
|
||||
avatar_url: null,
|
||||
points: 100,
|
||||
rank: '1',
|
||||
},
|
||||
];
|
||||
mockedApiClient.fetchLeaderboard.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve({ success: true, data: mockLeaderboard }),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useLeaderboardQuery(), { wrapper });
|
||||
|
||||
await waitFor(() => expect(result.current.isSuccess).toBe(true));
|
||||
|
||||
expect(result.current.data).toEqual(mockLeaderboard);
|
||||
expect(result.current.data?.[0].full_name).toBeNull();
|
||||
expect(result.current.data?.[0].avatar_url).toBeNull();
|
||||
});
|
||||
});
|
||||
216
src/hooks/queries/usePriceHistoryQuery.test.tsx
Normal file
216
src/hooks/queries/usePriceHistoryQuery.test.tsx
Normal file
@@ -0,0 +1,216 @@
|
||||
// src/hooks/queries/usePriceHistoryQuery.test.tsx
|
||||
import { renderHook, waitFor } from '@testing-library/react';
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
import { QueryClient, QueryClientProvider } from '@tanstack/react-query';
|
||||
import type { ReactNode } from 'react';
|
||||
import { usePriceHistoryQuery } from './usePriceHistoryQuery';
|
||||
import * as apiClient from '../../services/apiClient';
|
||||
import type { HistoricalPriceDataPoint } from '../../types';
|
||||
import { createMockHistoricalPriceDataPoint } from '../../tests/utils/mockFactories';
|
||||
|
||||
vi.mock('../../services/apiClient');
|
||||
|
||||
const mockedApiClient = vi.mocked(apiClient);
|
||||
|
||||
describe('usePriceHistoryQuery', () => {
|
||||
let queryClient: QueryClient;
|
||||
|
||||
const wrapper = ({ children }: { children: ReactNode }) => (
|
||||
<QueryClientProvider client={queryClient}>{children}</QueryClientProvider>
|
||||
);
|
||||
|
||||
beforeEach(() => {
|
||||
vi.resetAllMocks();
|
||||
queryClient = new QueryClient({
|
||||
defaultOptions: {
|
||||
queries: { retry: false },
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('should fetch price history successfully', async () => {
|
||||
const masterItemIds = [101, 102];
|
||||
const mockPriceHistory: HistoricalPriceDataPoint[] = [
|
||||
createMockHistoricalPriceDataPoint({
|
||||
master_item_id: 101,
|
||||
avg_price_in_cents: 299,
|
||||
summary_date: '2026-01-15',
|
||||
}),
|
||||
createMockHistoricalPriceDataPoint({
|
||||
master_item_id: 101,
|
||||
avg_price_in_cents: 349,
|
||||
summary_date: '2026-01-16',
|
||||
}),
|
||||
createMockHistoricalPriceDataPoint({
|
||||
master_item_id: 102,
|
||||
avg_price_in_cents: 199,
|
||||
summary_date: '2026-01-15',
|
||||
}),
|
||||
];
|
||||
|
||||
mockedApiClient.fetchHistoricalPriceData.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve({ success: true, data: mockPriceHistory }),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => usePriceHistoryQuery(masterItemIds), { wrapper });
|
||||
|
||||
await waitFor(() => expect(result.current.isSuccess).toBe(true));
|
||||
|
||||
expect(mockedApiClient.fetchHistoricalPriceData).toHaveBeenCalledWith(masterItemIds);
|
||||
expect(result.current.data).toEqual(mockPriceHistory);
|
||||
});
|
||||
|
||||
it('should handle API error with error message', async () => {
|
||||
const masterItemIds = [101];
|
||||
|
||||
mockedApiClient.fetchHistoricalPriceData.mockResolvedValue({
|
||||
ok: false,
|
||||
status: 401,
|
||||
json: () => Promise.resolve({ message: 'Authentication required' }),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => usePriceHistoryQuery(masterItemIds), { wrapper });
|
||||
|
||||
await waitFor(() => expect(result.current.isError).toBe(true));
|
||||
|
||||
expect(result.current.error?.message).toBe('Authentication required');
|
||||
});
|
||||
|
||||
it('should handle API error without message (JSON parse failure)', async () => {
|
||||
const masterItemIds = [101];
|
||||
|
||||
mockedApiClient.fetchHistoricalPriceData.mockResolvedValue({
|
||||
ok: false,
|
||||
status: 500,
|
||||
json: () => Promise.reject(new Error('Parse error')),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => usePriceHistoryQuery(masterItemIds), { wrapper });
|
||||
|
||||
await waitFor(() => expect(result.current.isError).toBe(true));
|
||||
|
||||
expect(result.current.error?.message).toBe('Request failed with status 500');
|
||||
});
|
||||
|
||||
it('should use fallback message when error.message is empty', async () => {
|
||||
const masterItemIds = [101];
|
||||
|
||||
mockedApiClient.fetchHistoricalPriceData.mockResolvedValue({
|
||||
ok: false,
|
||||
status: 500,
|
||||
json: () => Promise.resolve({ message: '' }),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => usePriceHistoryQuery(masterItemIds), { wrapper });
|
||||
|
||||
await waitFor(() => expect(result.current.isError).toBe(true));
|
||||
|
||||
expect(result.current.error?.message).toBe('Failed to fetch price history');
|
||||
});
|
||||
|
||||
it('should return empty array for no price history data', async () => {
|
||||
const masterItemIds = [101];
|
||||
|
||||
mockedApiClient.fetchHistoricalPriceData.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve({ success: true, data: [] }),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => usePriceHistoryQuery(masterItemIds), { wrapper });
|
||||
|
||||
await waitFor(() => expect(result.current.isSuccess).toBe(true));
|
||||
|
||||
expect(result.current.data).toEqual([]);
|
||||
});
|
||||
|
||||
it('should return empty array when success is false', async () => {
|
||||
const masterItemIds = [101];
|
||||
|
||||
mockedApiClient.fetchHistoricalPriceData.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve({ success: false, data: null }),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => usePriceHistoryQuery(masterItemIds), { wrapper });
|
||||
|
||||
await waitFor(() => expect(result.current.isSuccess).toBe(true));
|
||||
|
||||
expect(result.current.data).toEqual([]);
|
||||
});
|
||||
|
||||
it('should return empty array when data is not an array', async () => {
|
||||
const masterItemIds = [101];
|
||||
|
||||
mockedApiClient.fetchHistoricalPriceData.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve({ success: true, data: 'not an array' }),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => usePriceHistoryQuery(masterItemIds), { wrapper });
|
||||
|
||||
await waitFor(() => expect(result.current.isSuccess).toBe(true));
|
||||
|
||||
expect(result.current.data).toEqual([]);
|
||||
});
|
||||
|
||||
it('should not fetch when masterItemIds is empty', async () => {
|
||||
const { result } = renderHook(() => usePriceHistoryQuery([]), { wrapper });
|
||||
|
||||
// Query should not be enabled with empty array
|
||||
expect(result.current.fetchStatus).toBe('idle');
|
||||
expect(mockedApiClient.fetchHistoricalPriceData).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should not fetch when explicitly disabled', () => {
|
||||
const masterItemIds = [101, 102];
|
||||
|
||||
renderHook(() => usePriceHistoryQuery(masterItemIds, false), { wrapper });
|
||||
|
||||
expect(mockedApiClient.fetchHistoricalPriceData).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should return empty array from queryFn when masterItemIds becomes empty during execution', async () => {
|
||||
// This tests the early return within queryFn for empty arrays
|
||||
// The query is enabled by default, but if masterItemIds is empty, queryFn returns []
|
||||
const masterItemIds: number[] = [];
|
||||
|
||||
// Even if enabled is forced to true, the queryFn should return empty array
|
||||
// Note: The hook's enabled condition prevents this from running normally,
|
||||
// but the queryFn has defensive code that returns [] if masterItemIds.length === 0
|
||||
const { result } = renderHook(() => usePriceHistoryQuery(masterItemIds, true), { wrapper });
|
||||
|
||||
// Query is disabled when masterItemIds is empty due to enabled condition
|
||||
expect(result.current.fetchStatus).toBe('idle');
|
||||
expect(mockedApiClient.fetchHistoricalPriceData).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should handle price history with null avg_price_in_cents values', async () => {
|
||||
const masterItemIds = [101];
|
||||
const mockPriceHistory: HistoricalPriceDataPoint[] = [
|
||||
createMockHistoricalPriceDataPoint({
|
||||
master_item_id: 101,
|
||||
avg_price_in_cents: null,
|
||||
summary_date: '2026-01-15',
|
||||
}),
|
||||
createMockHistoricalPriceDataPoint({
|
||||
master_item_id: 101,
|
||||
avg_price_in_cents: 299,
|
||||
summary_date: '2026-01-16',
|
||||
}),
|
||||
];
|
||||
|
||||
mockedApiClient.fetchHistoricalPriceData.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve({ success: true, data: mockPriceHistory }),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => usePriceHistoryQuery(masterItemIds), { wrapper });
|
||||
|
||||
await waitFor(() => expect(result.current.isSuccess).toBe(true));
|
||||
|
||||
expect(result.current.data).toEqual(mockPriceHistory);
|
||||
expect(result.current.data?.[0].avg_price_in_cents).toBeNull();
|
||||
expect(result.current.data?.[1].avg_price_in_cents).toBe(299);
|
||||
});
|
||||
});
|
||||
413
src/hooks/queries/useUserProfileDataQuery.test.tsx
Normal file
413
src/hooks/queries/useUserProfileDataQuery.test.tsx
Normal file
@@ -0,0 +1,413 @@
|
||||
// src/hooks/queries/useUserProfileDataQuery.test.tsx
|
||||
import { renderHook, waitFor } from '@testing-library/react';
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
import { QueryClient, QueryClientProvider } from '@tanstack/react-query';
|
||||
import type { ReactNode } from 'react';
|
||||
import { useUserProfileDataQuery } from './useUserProfileDataQuery';
|
||||
import * as apiClient from '../../services/apiClient';
|
||||
import type { UserProfile, Achievement, UserAchievement } from '../../types';
|
||||
|
||||
vi.mock('../../services/apiClient');
|
||||
|
||||
const mockedApiClient = vi.mocked(apiClient);
|
||||
|
||||
describe('useUserProfileDataQuery', () => {
|
||||
let queryClient: QueryClient;
|
||||
|
||||
const wrapper = ({ children }: { children: ReactNode }) => (
|
||||
<QueryClientProvider client={queryClient}>{children}</QueryClientProvider>
|
||||
);
|
||||
|
||||
beforeEach(() => {
|
||||
vi.resetAllMocks();
|
||||
queryClient = new QueryClient({
|
||||
defaultOptions: {
|
||||
queries: { retry: false },
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
const mockProfile: UserProfile = {
|
||||
full_name: 'Test User',
|
||||
avatar_url: 'https://example.com/avatar.png',
|
||||
address_id: 1,
|
||||
points: 100,
|
||||
role: 'user',
|
||||
preferences: { darkMode: false, unitSystem: 'metric' },
|
||||
created_at: '2025-01-01T00:00:00Z',
|
||||
updated_at: '2025-01-01T00:00:00Z',
|
||||
user: {
|
||||
user_id: 'user-123',
|
||||
email: 'test@example.com',
|
||||
created_at: '2025-01-01T00:00:00Z',
|
||||
updated_at: '2025-01-01T00:00:00Z',
|
||||
},
|
||||
address: {
|
||||
address_id: 1,
|
||||
address_line_1: '123 Main St',
|
||||
city: 'Test City',
|
||||
province_state: 'ON',
|
||||
postal_code: 'A1B 2C3',
|
||||
country: 'Canada',
|
||||
latitude: null,
|
||||
longitude: null,
|
||||
created_at: '2025-01-01T00:00:00Z',
|
||||
updated_at: '2025-01-01T00:00:00Z',
|
||||
},
|
||||
};
|
||||
|
||||
const mockAchievements: (UserAchievement & Achievement)[] = [
|
||||
{
|
||||
user_id: 'user-123',
|
||||
achievement_id: 1,
|
||||
achieved_at: '2025-01-15T10:00:00Z',
|
||||
name: 'First Upload',
|
||||
description: 'Uploaded your first flyer',
|
||||
icon: 'trophy',
|
||||
points_value: 10,
|
||||
created_at: '2025-01-01T00:00:00Z',
|
||||
},
|
||||
{
|
||||
user_id: 'user-123',
|
||||
achievement_id: 2,
|
||||
achieved_at: '2025-01-20T15:30:00Z',
|
||||
name: 'Deal Hunter',
|
||||
description: 'Found 10 deals',
|
||||
icon: 'star',
|
||||
points_value: 25,
|
||||
created_at: '2025-01-01T00:00:00Z',
|
||||
},
|
||||
];
|
||||
|
||||
it('should fetch user profile and achievements successfully', async () => {
|
||||
mockedApiClient.getAuthenticatedUserProfile.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve({ success: true, data: mockProfile }),
|
||||
} as Response);
|
||||
|
||||
mockedApiClient.getUserAchievements.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve({ success: true, data: mockAchievements }),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useUserProfileDataQuery(), { wrapper });
|
||||
|
||||
await waitFor(() => expect(result.current.isSuccess).toBe(true));
|
||||
|
||||
expect(mockedApiClient.getAuthenticatedUserProfile).toHaveBeenCalled();
|
||||
expect(mockedApiClient.getUserAchievements).toHaveBeenCalled();
|
||||
expect(result.current.data).toEqual({
|
||||
profile: mockProfile,
|
||||
achievements: mockAchievements,
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle profile API error with error message', async () => {
|
||||
mockedApiClient.getAuthenticatedUserProfile.mockResolvedValue({
|
||||
ok: false,
|
||||
status: 401,
|
||||
json: () => Promise.resolve({ message: 'Authentication required' }),
|
||||
} as Response);
|
||||
|
||||
mockedApiClient.getUserAchievements.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve({ success: true, data: mockAchievements }),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useUserProfileDataQuery(), { wrapper });
|
||||
|
||||
await waitFor(() => expect(result.current.isError).toBe(true));
|
||||
|
||||
expect(result.current.error?.message).toBe('Authentication required');
|
||||
});
|
||||
|
||||
it('should handle profile API error without message', async () => {
|
||||
mockedApiClient.getAuthenticatedUserProfile.mockResolvedValue({
|
||||
ok: false,
|
||||
status: 500,
|
||||
json: () => Promise.reject(new Error('Parse error')),
|
||||
} as Response);
|
||||
|
||||
mockedApiClient.getUserAchievements.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve({ success: true, data: mockAchievements }),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useUserProfileDataQuery(), { wrapper });
|
||||
|
||||
await waitFor(() => expect(result.current.isError).toBe(true));
|
||||
|
||||
expect(result.current.error?.message).toBe('Request failed with status 500');
|
||||
});
|
||||
|
||||
it('should use fallback message when profile error.message is empty', async () => {
|
||||
mockedApiClient.getAuthenticatedUserProfile.mockResolvedValue({
|
||||
ok: false,
|
||||
status: 500,
|
||||
json: () => Promise.resolve({ message: '' }),
|
||||
} as Response);
|
||||
|
||||
mockedApiClient.getUserAchievements.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve({ success: true, data: mockAchievements }),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useUserProfileDataQuery(), { wrapper });
|
||||
|
||||
await waitFor(() => expect(result.current.isError).toBe(true));
|
||||
|
||||
expect(result.current.error?.message).toBe('Failed to fetch user profile');
|
||||
});
|
||||
|
||||
it('should handle achievements API error with error message', async () => {
|
||||
mockedApiClient.getAuthenticatedUserProfile.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve({ success: true, data: mockProfile }),
|
||||
} as Response);
|
||||
|
||||
mockedApiClient.getUserAchievements.mockResolvedValue({
|
||||
ok: false,
|
||||
status: 403,
|
||||
json: () => Promise.resolve({ message: 'Access denied' }),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useUserProfileDataQuery(), { wrapper });
|
||||
|
||||
await waitFor(() => expect(result.current.isError).toBe(true));
|
||||
|
||||
expect(result.current.error?.message).toBe('Access denied');
|
||||
});
|
||||
|
||||
it('should handle achievements API error without message', async () => {
|
||||
mockedApiClient.getAuthenticatedUserProfile.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve({ success: true, data: mockProfile }),
|
||||
} as Response);
|
||||
|
||||
mockedApiClient.getUserAchievements.mockResolvedValue({
|
||||
ok: false,
|
||||
status: 500,
|
||||
json: () => Promise.reject(new Error('Parse error')),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useUserProfileDataQuery(), { wrapper });
|
||||
|
||||
await waitFor(() => expect(result.current.isError).toBe(true));
|
||||
|
||||
expect(result.current.error?.message).toBe('Request failed with status 500');
|
||||
});
|
||||
|
||||
it('should use fallback message when achievements error.message is empty', async () => {
|
||||
mockedApiClient.getAuthenticatedUserProfile.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve({ success: true, data: mockProfile }),
|
||||
} as Response);
|
||||
|
||||
mockedApiClient.getUserAchievements.mockResolvedValue({
|
||||
ok: false,
|
||||
status: 500,
|
||||
json: () => Promise.resolve({ message: '' }),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useUserProfileDataQuery(), { wrapper });
|
||||
|
||||
await waitFor(() => expect(result.current.isError).toBe(true));
|
||||
|
||||
expect(result.current.error?.message).toBe('Failed to fetch user achievements');
|
||||
});
|
||||
|
||||
it('should return empty array for no achievements', async () => {
|
||||
mockedApiClient.getAuthenticatedUserProfile.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve({ success: true, data: mockProfile }),
|
||||
} as Response);
|
||||
|
||||
mockedApiClient.getUserAchievements.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve({ success: true, data: [] }),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useUserProfileDataQuery(), { wrapper });
|
||||
|
||||
await waitFor(() => expect(result.current.isSuccess).toBe(true));
|
||||
|
||||
expect(result.current.data).toEqual({
|
||||
profile: mockProfile,
|
||||
achievements: [],
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle undefined achievements data gracefully', async () => {
|
||||
// When API returns response without data wrapper (legacy format)
|
||||
// achievementsJson.data will be undefined, falling back to achievementsJson itself
|
||||
// Then achievements || [] will convert falsy value to empty array
|
||||
mockedApiClient.getAuthenticatedUserProfile.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve({ success: true, data: mockProfile }),
|
||||
} as Response);
|
||||
|
||||
// Return empty array directly (no wrapper) - this tests the fallback path
|
||||
mockedApiClient.getUserAchievements.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve([]),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useUserProfileDataQuery(), { wrapper });
|
||||
|
||||
await waitFor(() => expect(result.current.isSuccess).toBe(true));
|
||||
|
||||
expect(result.current.data).toEqual({
|
||||
profile: mockProfile,
|
||||
achievements: [],
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle response without data wrapper (direct response)', async () => {
|
||||
// Some APIs may return data directly without the { success, data } wrapper
|
||||
mockedApiClient.getAuthenticatedUserProfile.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve(mockProfile),
|
||||
} as Response);
|
||||
|
||||
mockedApiClient.getUserAchievements.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve(mockAchievements),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useUserProfileDataQuery(), { wrapper });
|
||||
|
||||
await waitFor(() => expect(result.current.isSuccess).toBe(true));
|
||||
|
||||
expect(result.current.data).toEqual({
|
||||
profile: mockProfile,
|
||||
achievements: mockAchievements,
|
||||
});
|
||||
});
|
||||
|
||||
it('should not fetch when disabled', () => {
|
||||
renderHook(() => useUserProfileDataQuery(false), { wrapper });
|
||||
|
||||
expect(mockedApiClient.getAuthenticatedUserProfile).not.toHaveBeenCalled();
|
||||
expect(mockedApiClient.getUserAchievements).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should fetch when enabled is explicitly true', async () => {
|
||||
mockedApiClient.getAuthenticatedUserProfile.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve({ success: true, data: mockProfile }),
|
||||
} as Response);
|
||||
|
||||
mockedApiClient.getUserAchievements.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve({ success: true, data: mockAchievements }),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useUserProfileDataQuery(true), { wrapper });
|
||||
|
||||
await waitFor(() => expect(result.current.isSuccess).toBe(true));
|
||||
|
||||
expect(mockedApiClient.getAuthenticatedUserProfile).toHaveBeenCalled();
|
||||
expect(mockedApiClient.getUserAchievements).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should handle profile with minimal data', async () => {
|
||||
const minimalProfile: UserProfile = {
|
||||
full_name: null,
|
||||
avatar_url: null,
|
||||
address_id: null,
|
||||
points: 0,
|
||||
role: 'user',
|
||||
preferences: null,
|
||||
created_at: '2025-01-01T00:00:00Z',
|
||||
updated_at: '2025-01-01T00:00:00Z',
|
||||
user: {
|
||||
user_id: 'user-456',
|
||||
email: 'minimal@example.com',
|
||||
created_at: '2025-01-01T00:00:00Z',
|
||||
updated_at: '2025-01-01T00:00:00Z',
|
||||
},
|
||||
address: null,
|
||||
};
|
||||
|
||||
mockedApiClient.getAuthenticatedUserProfile.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve({ success: true, data: minimalProfile }),
|
||||
} as Response);
|
||||
|
||||
mockedApiClient.getUserAchievements.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve({ success: true, data: [] }),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useUserProfileDataQuery(), { wrapper });
|
||||
|
||||
await waitFor(() => expect(result.current.isSuccess).toBe(true));
|
||||
|
||||
expect(result.current.data).toEqual({
|
||||
profile: minimalProfile,
|
||||
achievements: [],
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle admin user profile', async () => {
|
||||
const adminProfile: UserProfile = {
|
||||
...mockProfile,
|
||||
role: 'admin',
|
||||
points: 500,
|
||||
};
|
||||
|
||||
mockedApiClient.getAuthenticatedUserProfile.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve({ success: true, data: adminProfile }),
|
||||
} as Response);
|
||||
|
||||
mockedApiClient.getUserAchievements.mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve({ success: true, data: mockAchievements }),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useUserProfileDataQuery(), { wrapper });
|
||||
|
||||
await waitFor(() => expect(result.current.isSuccess).toBe(true));
|
||||
|
||||
expect(result.current.data?.profile.role).toBe('admin');
|
||||
expect(result.current.data?.profile.points).toBe(500);
|
||||
});
|
||||
|
||||
it('should call both APIs in parallel', async () => {
|
||||
let profileCallTime: number | null = null;
|
||||
let achievementsCallTime: number | null = null;
|
||||
|
||||
mockedApiClient.getAuthenticatedUserProfile.mockImplementation(() => {
|
||||
profileCallTime = Date.now();
|
||||
return Promise.resolve({
|
||||
ok: true,
|
||||
json: () => Promise.resolve({ success: true, data: mockProfile }),
|
||||
} as Response);
|
||||
});
|
||||
|
||||
mockedApiClient.getUserAchievements.mockImplementation(() => {
|
||||
achievementsCallTime = Date.now();
|
||||
return Promise.resolve({
|
||||
ok: true,
|
||||
json: () => Promise.resolve({ success: true, data: mockAchievements }),
|
||||
} as Response);
|
||||
});
|
||||
|
||||
const { result } = renderHook(() => useUserProfileDataQuery(), { wrapper });
|
||||
|
||||
await waitFor(() => expect(result.current.isSuccess).toBe(true));
|
||||
|
||||
// Both calls should have happened
|
||||
expect(profileCallTime).not.toBeNull();
|
||||
expect(achievementsCallTime).not.toBeNull();
|
||||
|
||||
// Both calls should have been made nearly simultaneously (within 50ms)
|
||||
// This verifies Promise.all is being used for parallel execution
|
||||
expect(
|
||||
Math.abs(
|
||||
(profileCallTime as unknown as number) - (achievementsCallTime as unknown as number),
|
||||
),
|
||||
).toBeLessThan(50);
|
||||
});
|
||||
});
|
||||
@@ -14,7 +14,7 @@ vi.mock('../services/eventBus', () => ({
|
||||
|
||||
import { eventBus } from '../services/eventBus';
|
||||
|
||||
const mockEventBus = eventBus as {
|
||||
const mockEventBus = eventBus as unknown as {
|
||||
on: Mock;
|
||||
off: Mock;
|
||||
dispatch: Mock;
|
||||
@@ -206,7 +206,7 @@ describe('useEventBus', () => {
|
||||
name: string;
|
||||
}
|
||||
|
||||
const callback = vi.fn<[TestData?], void>();
|
||||
const callback = vi.fn();
|
||||
|
||||
renderHook(() => useEventBus<TestData>('typed-event', callback));
|
||||
|
||||
@@ -217,7 +217,7 @@ describe('useEventBus', () => {
|
||||
});
|
||||
|
||||
it('should handle callback with optional parameter', () => {
|
||||
const callback = vi.fn<[string?], void>();
|
||||
const callback = vi.fn();
|
||||
|
||||
renderHook(() => useEventBus<string>('optional-event', callback));
|
||||
|
||||
|
||||
300
src/hooks/useFeatureFlag.test.ts
Normal file
300
src/hooks/useFeatureFlag.test.ts
Normal file
@@ -0,0 +1,300 @@
|
||||
// src/hooks/useFeatureFlag.test.ts
|
||||
/**
|
||||
* Unit tests for the useFeatureFlag React hook (ADR-024).
|
||||
*
|
||||
* These tests verify:
|
||||
* - useFeatureFlag() returns correct boolean for each flag
|
||||
* - useFeatureFlag() handles all valid flag names
|
||||
* - useAllFeatureFlags() returns all flag states
|
||||
* - Default behavior (all flags disabled when not set)
|
||||
* - Memoization behavior (stable references)
|
||||
*/
|
||||
|
||||
import { renderHook } from '@testing-library/react';
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
|
||||
// Define mock feature flags object that will be mutated in tests
|
||||
// Note: We use a getter function pattern to avoid hoisting issues with vi.mock
|
||||
vi.mock('../config', () => {
|
||||
// Create a mutable flags object
|
||||
const flags = {
|
||||
newDashboard: false,
|
||||
betaRecipes: false,
|
||||
experimentalAi: false,
|
||||
debugMode: false,
|
||||
};
|
||||
|
||||
return {
|
||||
default: {
|
||||
featureFlags: flags,
|
||||
},
|
||||
// Export the flags object for test mutation
|
||||
__mockFlags: flags,
|
||||
};
|
||||
});
|
||||
|
||||
// Import config to get access to the mock flags for mutation
|
||||
import config from '../config';
|
||||
|
||||
// Import after mocking
|
||||
import { useFeatureFlag, useAllFeatureFlags, type FeatureFlagName } from './useFeatureFlag';
|
||||
|
||||
// Helper to reset flags
|
||||
const resetMockFlags = () => {
|
||||
config.featureFlags.newDashboard = false;
|
||||
config.featureFlags.betaRecipes = false;
|
||||
config.featureFlags.experimentalAi = false;
|
||||
config.featureFlags.debugMode = false;
|
||||
};
|
||||
|
||||
describe('useFeatureFlag hook', () => {
|
||||
beforeEach(() => {
|
||||
// Reset mock flags to default state before each test
|
||||
resetMockFlags();
|
||||
});
|
||||
|
||||
describe('useFeatureFlag()', () => {
|
||||
it('should return false for disabled flags', () => {
|
||||
const { result } = renderHook(() => useFeatureFlag('newDashboard'));
|
||||
|
||||
expect(result.current).toBe(false);
|
||||
});
|
||||
|
||||
it('should return true for enabled flags', () => {
|
||||
config.featureFlags.newDashboard = true;
|
||||
|
||||
const { result } = renderHook(() => useFeatureFlag('newDashboard'));
|
||||
|
||||
expect(result.current).toBe(true);
|
||||
});
|
||||
|
||||
it('should return false for betaRecipes when disabled', () => {
|
||||
const { result } = renderHook(() => useFeatureFlag('betaRecipes'));
|
||||
|
||||
expect(result.current).toBe(false);
|
||||
});
|
||||
|
||||
it('should return true for betaRecipes when enabled', () => {
|
||||
config.featureFlags.betaRecipes = true;
|
||||
|
||||
const { result } = renderHook(() => useFeatureFlag('betaRecipes'));
|
||||
|
||||
expect(result.current).toBe(true);
|
||||
});
|
||||
|
||||
it('should return false for experimentalAi when disabled', () => {
|
||||
const { result } = renderHook(() => useFeatureFlag('experimentalAi'));
|
||||
|
||||
expect(result.current).toBe(false);
|
||||
});
|
||||
|
||||
it('should return true for experimentalAi when enabled', () => {
|
||||
config.featureFlags.experimentalAi = true;
|
||||
|
||||
const { result } = renderHook(() => useFeatureFlag('experimentalAi'));
|
||||
|
||||
expect(result.current).toBe(true);
|
||||
});
|
||||
|
||||
it('should return false for debugMode when disabled', () => {
|
||||
const { result } = renderHook(() => useFeatureFlag('debugMode'));
|
||||
|
||||
expect(result.current).toBe(false);
|
||||
});
|
||||
|
||||
it('should return true for debugMode when enabled', () => {
|
||||
config.featureFlags.debugMode = true;
|
||||
|
||||
const { result } = renderHook(() => useFeatureFlag('debugMode'));
|
||||
|
||||
expect(result.current).toBe(true);
|
||||
});
|
||||
|
||||
it('should return consistent value across multiple calls with same flag', () => {
|
||||
config.featureFlags.newDashboard = true;
|
||||
|
||||
const { result: result1 } = renderHook(() => useFeatureFlag('newDashboard'));
|
||||
const { result: result2 } = renderHook(() => useFeatureFlag('newDashboard'));
|
||||
|
||||
expect(result1.current).toBe(result2.current);
|
||||
expect(result1.current).toBe(true);
|
||||
});
|
||||
|
||||
it('should return different values for different flags', () => {
|
||||
config.featureFlags.newDashboard = true;
|
||||
config.featureFlags.betaRecipes = false;
|
||||
|
||||
const { result: dashboardResult } = renderHook(() => useFeatureFlag('newDashboard'));
|
||||
const { result: recipesResult } = renderHook(() => useFeatureFlag('betaRecipes'));
|
||||
|
||||
expect(dashboardResult.current).toBe(true);
|
||||
expect(recipesResult.current).toBe(false);
|
||||
});
|
||||
|
||||
it('should memoize the result (stable reference with same flagName)', () => {
|
||||
const { result, rerender } = renderHook(
|
||||
({ flagName }: { flagName: FeatureFlagName }) => useFeatureFlag(flagName),
|
||||
{ initialProps: { flagName: 'newDashboard' as FeatureFlagName } },
|
||||
);
|
||||
|
||||
const firstValue = result.current;
|
||||
|
||||
// Rerender with same flag name
|
||||
rerender({ flagName: 'newDashboard' as FeatureFlagName });
|
||||
|
||||
const secondValue = result.current;
|
||||
|
||||
// Values should be equal (both false in this case)
|
||||
expect(firstValue).toBe(secondValue);
|
||||
});
|
||||
|
||||
it('should update when flag name changes', () => {
|
||||
config.featureFlags.newDashboard = true;
|
||||
config.featureFlags.betaRecipes = false;
|
||||
|
||||
const { result, rerender } = renderHook(
|
||||
({ flagName }: { flagName: FeatureFlagName }) => useFeatureFlag(flagName),
|
||||
{ initialProps: { flagName: 'newDashboard' as FeatureFlagName } },
|
||||
);
|
||||
|
||||
expect(result.current).toBe(true);
|
||||
|
||||
// Change to a different flag
|
||||
rerender({ flagName: 'betaRecipes' as FeatureFlagName });
|
||||
|
||||
expect(result.current).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('useAllFeatureFlags()', () => {
|
||||
it('should return all flags with their current states', () => {
|
||||
config.featureFlags.newDashboard = true;
|
||||
config.featureFlags.debugMode = true;
|
||||
|
||||
const { result } = renderHook(() => useAllFeatureFlags());
|
||||
|
||||
expect(result.current).toEqual({
|
||||
newDashboard: true,
|
||||
betaRecipes: false,
|
||||
experimentalAi: false,
|
||||
debugMode: true,
|
||||
});
|
||||
});
|
||||
|
||||
it('should return all flags as false when none are enabled', () => {
|
||||
const { result } = renderHook(() => useAllFeatureFlags());
|
||||
|
||||
expect(result.current).toEqual({
|
||||
newDashboard: false,
|
||||
betaRecipes: false,
|
||||
experimentalAi: false,
|
||||
debugMode: false,
|
||||
});
|
||||
});
|
||||
|
||||
it('should return a shallow copy (not the original object)', () => {
|
||||
const { result } = renderHook(() => useAllFeatureFlags());
|
||||
|
||||
// Modifying the returned object should not affect the config
|
||||
const flags = result.current;
|
||||
(flags as Record<string, boolean>).newDashboard = true;
|
||||
|
||||
// Re-render and get fresh flags
|
||||
const { result: result2 } = renderHook(() => useAllFeatureFlags());
|
||||
|
||||
// The mock config should still have the original value
|
||||
expect(config.featureFlags.newDashboard).toBe(false);
|
||||
// Note: result2 reads from the mock, which we didn't modify
|
||||
expect(result2.current.newDashboard).toBe(false);
|
||||
});
|
||||
|
||||
it('should return an object with all expected flag names', () => {
|
||||
const { result } = renderHook(() => useAllFeatureFlags());
|
||||
|
||||
const expectedFlags = ['newDashboard', 'betaRecipes', 'experimentalAi', 'debugMode'];
|
||||
|
||||
expect(Object.keys(result.current).sort()).toEqual(expectedFlags.sort());
|
||||
});
|
||||
|
||||
it('should memoize the result', () => {
|
||||
const { result, rerender } = renderHook(() => useAllFeatureFlags());
|
||||
|
||||
const firstValue = result.current;
|
||||
|
||||
// Rerender without any changes
|
||||
rerender();
|
||||
|
||||
const secondValue = result.current;
|
||||
|
||||
// Should return the same memoized object reference
|
||||
expect(firstValue).toBe(secondValue);
|
||||
});
|
||||
});
|
||||
|
||||
describe('FeatureFlagName type', () => {
|
||||
it('should accept valid flag names at compile time', () => {
|
||||
// These should compile without TypeScript errors
|
||||
const validNames: FeatureFlagName[] = [
|
||||
'newDashboard',
|
||||
'betaRecipes',
|
||||
'experimentalAi',
|
||||
'debugMode',
|
||||
];
|
||||
|
||||
validNames.forEach((name) => {
|
||||
const { result } = renderHook(() => useFeatureFlag(name));
|
||||
expect(typeof result.current).toBe('boolean');
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('useFeatureFlag integration scenarios', () => {
|
||||
beforeEach(() => {
|
||||
resetMockFlags();
|
||||
});
|
||||
|
||||
it('should work with conditional rendering pattern', () => {
|
||||
config.featureFlags.newDashboard = true;
|
||||
|
||||
const { result } = renderHook(() => {
|
||||
const isNewDashboard = useFeatureFlag('newDashboard');
|
||||
return isNewDashboard ? 'new' : 'legacy';
|
||||
});
|
||||
|
||||
expect(result.current).toBe('new');
|
||||
});
|
||||
|
||||
it('should work with multiple flags in same component', () => {
|
||||
config.featureFlags.newDashboard = true;
|
||||
config.featureFlags.betaRecipes = true;
|
||||
config.featureFlags.experimentalAi = false;
|
||||
|
||||
const { result } = renderHook(() => ({
|
||||
dashboard: useFeatureFlag('newDashboard'),
|
||||
recipes: useFeatureFlag('betaRecipes'),
|
||||
ai: useFeatureFlag('experimentalAi'),
|
||||
}));
|
||||
|
||||
expect(result.current).toEqual({
|
||||
dashboard: true,
|
||||
recipes: true,
|
||||
ai: false,
|
||||
});
|
||||
});
|
||||
|
||||
it('should work with useAllFeatureFlags for admin panels', () => {
|
||||
config.featureFlags.newDashboard = true;
|
||||
config.featureFlags.debugMode = true;
|
||||
|
||||
const { result } = renderHook(() => {
|
||||
const flags = useAllFeatureFlags();
|
||||
const enabledCount = Object.values(flags).filter(Boolean).length;
|
||||
return { flags, enabledCount };
|
||||
});
|
||||
|
||||
expect(result.current.enabledCount).toBe(2);
|
||||
expect(result.current.flags.newDashboard).toBe(true);
|
||||
expect(result.current.flags.debugMode).toBe(true);
|
||||
});
|
||||
});
|
||||
86
src/hooks/useFeatureFlag.ts
Normal file
86
src/hooks/useFeatureFlag.ts
Normal file
@@ -0,0 +1,86 @@
|
||||
// src/hooks/useFeatureFlag.ts
|
||||
import { useMemo } from 'react';
|
||||
import config from '../config';
|
||||
|
||||
/**
|
||||
* Union type of all available feature flag names.
|
||||
* This type is derived from the config.featureFlags object to ensure
|
||||
* type safety and autocomplete support when checking feature flags.
|
||||
*
|
||||
* @example
|
||||
* const flagName: FeatureFlagName = 'newDashboard'; // Valid
|
||||
* const invalid: FeatureFlagName = 'nonexistent'; // TypeScript error
|
||||
*/
|
||||
export type FeatureFlagName = keyof typeof config.featureFlags;
|
||||
|
||||
/**
|
||||
* React hook to check if a feature flag is enabled.
|
||||
*
|
||||
* Feature flags are loaded from environment variables at build time and
|
||||
* cannot change during runtime. This hook memoizes the result to prevent
|
||||
* unnecessary re-renders when the component re-renders.
|
||||
*
|
||||
* @param flagName - The name of the feature flag to check (must be a valid FeatureFlagName)
|
||||
* @returns boolean indicating if the feature is enabled (true) or disabled (false)
|
||||
*
|
||||
* @example
|
||||
* // Basic usage - conditionally render UI
|
||||
* function Dashboard() {
|
||||
* const isNewDashboard = useFeatureFlag('newDashboard');
|
||||
*
|
||||
* if (isNewDashboard) {
|
||||
* return <NewDashboard />;
|
||||
* }
|
||||
* return <LegacyDashboard />;
|
||||
* }
|
||||
*
|
||||
* @example
|
||||
* // Track feature flag usage with analytics
|
||||
* function FeatureComponent() {
|
||||
* const isExperimentalAi = useFeatureFlag('experimentalAi');
|
||||
*
|
||||
* useEffect(() => {
|
||||
* if (isExperimentalAi) {
|
||||
* analytics.track('experimental_ai_enabled');
|
||||
* }
|
||||
* }, [isExperimentalAi]);
|
||||
*
|
||||
* return isExperimentalAi ? <AiFeature /> : null;
|
||||
* }
|
||||
*
|
||||
* @see docs/adr/0024-feature-flagging-strategy.md
|
||||
*/
|
||||
export function useFeatureFlag(flagName: FeatureFlagName): boolean {
|
||||
return useMemo(() => config.featureFlags[flagName], [flagName]);
|
||||
}
|
||||
|
||||
/**
|
||||
* React hook to get all feature flags and their current states.
|
||||
*
|
||||
* This hook is useful for debugging, admin panels, or components that
|
||||
* need to display the current feature flag configuration. The returned
|
||||
* object is a shallow copy to prevent accidental mutation of the config.
|
||||
*
|
||||
* @returns Record mapping each feature flag name to its boolean state
|
||||
*
|
||||
* @example
|
||||
* // Display feature flag status in an admin panel
|
||||
* function FeatureFlagDebugPanel() {
|
||||
* const flags = useAllFeatureFlags();
|
||||
*
|
||||
* return (
|
||||
* <ul>
|
||||
* {Object.entries(flags).map(([name, enabled]) => (
|
||||
* <li key={name}>
|
||||
* {name}: {enabled ? 'Enabled' : 'Disabled'}
|
||||
* </li>
|
||||
* ))}
|
||||
* </ul>
|
||||
* );
|
||||
* }
|
||||
*
|
||||
* @see docs/adr/0024-feature-flagging-strategy.md
|
||||
*/
|
||||
export function useAllFeatureFlags(): Record<FeatureFlagName, boolean> {
|
||||
return useMemo(() => ({ ...config.featureFlags }), []);
|
||||
}
|
||||
161
src/hooks/useUserProfileData.test.ts
Normal file
161
src/hooks/useUserProfileData.test.ts
Normal file
@@ -0,0 +1,161 @@
|
||||
// src/hooks/useUserProfileData.test.ts
|
||||
import React from 'react';
|
||||
import { renderHook, act } from '@testing-library/react';
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
import { QueryClient, QueryClientProvider } from '@tanstack/react-query';
|
||||
import { useUserProfileData } from './useUserProfileData';
|
||||
import * as useUserProfileDataQueryModule from './queries/useUserProfileDataQuery';
|
||||
import type { UserProfile, Achievement } from '../types';
|
||||
|
||||
// Mock the underlying query hook
|
||||
vi.mock('./queries/useUserProfileDataQuery');
|
||||
|
||||
const mockedUseUserProfileDataQuery = vi.mocked(
|
||||
useUserProfileDataQueryModule.useUserProfileDataQuery,
|
||||
);
|
||||
|
||||
// Mock factories for consistent test data
|
||||
const createMockUserProfile = (overrides: Partial<UserProfile> = {}): UserProfile => ({
|
||||
user: {
|
||||
user_id: 'user-123',
|
||||
email: 'test@example.com',
|
||||
created_at: new Date().toISOString(),
|
||||
updated_at: new Date().toISOString(),
|
||||
},
|
||||
full_name: 'Test User',
|
||||
avatar_url: null,
|
||||
address_id: null,
|
||||
points: 0,
|
||||
role: 'user',
|
||||
preferences: null,
|
||||
created_at: new Date().toISOString(),
|
||||
updated_at: new Date().toISOString(),
|
||||
address: null,
|
||||
...overrides,
|
||||
});
|
||||
|
||||
const createMockAchievement = (overrides: Partial<Achievement> = {}): Achievement => ({
|
||||
achievement_id: 1,
|
||||
name: 'First Upload',
|
||||
description: 'You uploaded your first flyer!',
|
||||
points_value: 10,
|
||||
created_at: new Date().toISOString(),
|
||||
...overrides,
|
||||
});
|
||||
|
||||
const createWrapper = () => {
|
||||
const queryClient = new QueryClient({
|
||||
defaultOptions: {
|
||||
queries: {
|
||||
retry: false, // Turn off retries for tests
|
||||
},
|
||||
},
|
||||
});
|
||||
return ({ children }: { children: React.ReactNode }) =>
|
||||
React.createElement(QueryClientProvider, { client: queryClient }, children);
|
||||
};
|
||||
|
||||
describe('useUserProfileData Hook', () => {
|
||||
const mockProfileData = createMockUserProfile();
|
||||
const mockAchievementsData = [createMockAchievement()];
|
||||
const mockQueryData = {
|
||||
profile: mockProfileData,
|
||||
achievements: mockAchievementsData,
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
// Reset the mock to a default loading state
|
||||
mockedUseUserProfileDataQuery.mockReturnValue({
|
||||
data: undefined,
|
||||
isLoading: true,
|
||||
error: null,
|
||||
} as ReturnType<typeof mockedUseUserProfileDataQuery>);
|
||||
});
|
||||
|
||||
it('should return loading state initially', () => {
|
||||
const { result } = renderHook(() => useUserProfileData(), { wrapper: createWrapper() });
|
||||
|
||||
expect(result.current.isLoading).toBe(true);
|
||||
expect(result.current.profile).toBeNull();
|
||||
expect(result.current.achievements).toEqual([]);
|
||||
expect(result.current.error).toBeNull();
|
||||
});
|
||||
|
||||
it('should return profile and achievements on successful fetch', () => {
|
||||
mockedUseUserProfileDataQuery.mockReturnValue({
|
||||
data: mockQueryData,
|
||||
isLoading: false,
|
||||
error: null,
|
||||
} as ReturnType<typeof mockedUseUserProfileDataQuery>);
|
||||
|
||||
const { result } = renderHook(() => useUserProfileData(), { wrapper: createWrapper() });
|
||||
|
||||
expect(result.current.isLoading).toBe(false);
|
||||
expect(result.current.profile).toEqual(mockProfileData);
|
||||
expect(result.current.achievements).toEqual(mockAchievementsData);
|
||||
expect(result.current.error).toBeNull();
|
||||
});
|
||||
|
||||
it('should return an error message on failure', () => {
|
||||
const mockError = new Error('Failed to fetch profile');
|
||||
mockedUseUserProfileDataQuery.mockReturnValue({
|
||||
data: undefined,
|
||||
isLoading: false,
|
||||
error: mockError,
|
||||
} as ReturnType<typeof mockedUseUserProfileDataQuery>);
|
||||
|
||||
const { result } = renderHook(() => useUserProfileData(), { wrapper: createWrapper() });
|
||||
|
||||
expect(result.current.isLoading).toBe(false);
|
||||
expect(result.current.profile).toBeNull();
|
||||
expect(result.current.achievements).toEqual([]);
|
||||
expect(result.current.error).toBe('Failed to fetch profile');
|
||||
});
|
||||
|
||||
it('setProfile should update the profile in the query cache with a new object', () => {
|
||||
const queryClient = new QueryClient();
|
||||
queryClient.setQueryData(['user-profile-data'], mockQueryData);
|
||||
|
||||
const wrapper = ({ children }: { children: React.ReactNode }) =>
|
||||
React.createElement(QueryClientProvider, { client: queryClient }, children);
|
||||
|
||||
const { result } = renderHook(() => useUserProfileData(), { wrapper });
|
||||
|
||||
const updatedProfile: UserProfile = { ...mockProfileData, full_name: 'Updated' };
|
||||
|
||||
act(() => {
|
||||
result.current.setProfile(updatedProfile);
|
||||
});
|
||||
|
||||
const updatedData = queryClient.getQueryData(['user-profile-data']) as typeof mockQueryData;
|
||||
expect(updatedData.profile).toEqual(updatedProfile);
|
||||
});
|
||||
|
||||
it('setProfile should not throw if oldData is undefined', () => {
|
||||
const { result } = renderHook(() => useUserProfileData(), { wrapper: createWrapper() });
|
||||
|
||||
const newProfile = createMockUserProfile();
|
||||
|
||||
expect(() => {
|
||||
act(() => {
|
||||
result.current.setProfile(newProfile);
|
||||
});
|
||||
}).not.toThrow();
|
||||
|
||||
const cachedData = result.current.profile;
|
||||
expect(cachedData).toBeNull();
|
||||
});
|
||||
|
||||
it('should maintain stable function references across rerenders', () => {
|
||||
const { result, rerender } = renderHook(() => useUserProfileData(), {
|
||||
wrapper: createWrapper(),
|
||||
});
|
||||
|
||||
const initialSetProfile = result.current.setProfile;
|
||||
|
||||
rerender();
|
||||
|
||||
expect(result.current.setProfile).toBe(initialSetProfile);
|
||||
});
|
||||
});
|
||||
304
src/hooks/useWebSocket.test.ts
Normal file
304
src/hooks/useWebSocket.test.ts
Normal file
@@ -0,0 +1,304 @@
|
||||
// src/hooks/useWebSocket.test.ts
|
||||
import { renderHook, act } from '@testing-library/react';
|
||||
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
|
||||
import { useWebSocket } from './useWebSocket';
|
||||
import { eventBus } from '../services/eventBus';
|
||||
|
||||
// Mock eventBus
|
||||
vi.mock('../services/eventBus', () => ({
|
||||
eventBus: {
|
||||
dispatch: vi.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
// A mock WebSocket class for testing
|
||||
class MockWebSocket {
|
||||
static instances: MockWebSocket[] = [];
|
||||
static CONNECTING = 0;
|
||||
static OPEN = 1;
|
||||
static CLOSING = 2;
|
||||
static CLOSED = 3;
|
||||
|
||||
url: string;
|
||||
readyState: number;
|
||||
onopen: () => void = () => {};
|
||||
onclose: (event: { code: number; reason: string; wasClean: boolean }) => void = () => {};
|
||||
onmessage: (event: { data: string }) => void = () => {};
|
||||
onerror: (error: Event) => void = () => {};
|
||||
send = vi.fn();
|
||||
close = vi.fn((code = 1000, reason = 'Client disconnecting') => {
|
||||
if (this.readyState === MockWebSocket.CLOSED || this.readyState === MockWebSocket.CLOSING)
|
||||
return;
|
||||
this.readyState = MockWebSocket.CLOSING;
|
||||
setTimeout(() => {
|
||||
this.readyState = MockWebSocket.CLOSED;
|
||||
this.onclose({ code, reason, wasClean: code === 1000 });
|
||||
}, 0);
|
||||
});
|
||||
|
||||
constructor(url: string) {
|
||||
this.url = url;
|
||||
this.readyState = MockWebSocket.CONNECTING;
|
||||
MockWebSocket.instances.push(this);
|
||||
}
|
||||
|
||||
// --- Test Helper Methods ---
|
||||
_open() {
|
||||
this.readyState = MockWebSocket.OPEN;
|
||||
this.onopen();
|
||||
}
|
||||
|
||||
_message(data: object | string) {
|
||||
if (typeof data === 'string') {
|
||||
this.onmessage({ data });
|
||||
} else {
|
||||
this.onmessage({ data: JSON.stringify(data) });
|
||||
}
|
||||
}
|
||||
|
||||
_error(errorEvent = new Event('error')) {
|
||||
this.onerror(errorEvent);
|
||||
}
|
||||
|
||||
_close(code: number, reason: string) {
|
||||
if (this.readyState === MockWebSocket.CLOSED) return;
|
||||
this.readyState = MockWebSocket.CLOSED;
|
||||
this.onclose({ code, reason, wasClean: code === 1000 });
|
||||
}
|
||||
|
||||
static get lastInstance(): MockWebSocket | undefined {
|
||||
return this.instances[this.instances.length - 1];
|
||||
}
|
||||
|
||||
static clearInstances() {
|
||||
this.instances = [];
|
||||
}
|
||||
}
|
||||
|
||||
describe('useWebSocket Hook', () => {
|
||||
const mockToken = 'test-token';
|
||||
let consoleErrorSpy: ReturnType<typeof vi.spyOn>;
|
||||
|
||||
beforeEach(() => {
|
||||
vi.useFakeTimers();
|
||||
global.WebSocket = MockWebSocket as any;
|
||||
MockWebSocket.clearInstances();
|
||||
|
||||
Object.defineProperty(window, 'location', {
|
||||
value: { protocol: 'https:', host: 'testhost.com' },
|
||||
writable: true,
|
||||
});
|
||||
|
||||
Object.defineProperty(document, 'cookie', {
|
||||
writable: true,
|
||||
value: `accessToken=${mockToken}`,
|
||||
});
|
||||
|
||||
vi.clearAllMocks();
|
||||
consoleErrorSpy = vi.spyOn(console, 'error').mockImplementation(() => {});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.useRealTimers();
|
||||
vi.restoreAllMocks();
|
||||
});
|
||||
|
||||
it('should not connect on mount if autoConnect is false', () => {
|
||||
renderHook(() => useWebSocket({ autoConnect: false }));
|
||||
expect(MockWebSocket.instances).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should auto-connect on mount by default', () => {
|
||||
const { result } = renderHook(() => useWebSocket());
|
||||
expect(MockWebSocket.instances).toHaveLength(1);
|
||||
expect(MockWebSocket.lastInstance?.url).toBe(`wss://testhost.com/ws?token=${mockToken}`);
|
||||
expect(result.current.isConnecting).toBe(true);
|
||||
});
|
||||
|
||||
it('should set an error state if no access token is found', () => {
|
||||
document.cookie = ''; // No token
|
||||
const { result } = renderHook(() => useWebSocket());
|
||||
|
||||
expect(result.current.isConnected).toBe(false);
|
||||
expect(result.current.isConnecting).toBe(false);
|
||||
expect(result.current.error).toBe('No access token found. Please log in.');
|
||||
expect(MockWebSocket.instances).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should transition to connected state on WebSocket open', () => {
|
||||
const onConnect = vi.fn();
|
||||
const { result } = renderHook(() => useWebSocket({ onConnect }));
|
||||
|
||||
expect(result.current.isConnecting).toBe(true);
|
||||
act(() => MockWebSocket.lastInstance?._open());
|
||||
|
||||
expect(result.current.isConnected).toBe(true);
|
||||
expect(result.current.isConnecting).toBe(false);
|
||||
expect(onConnect).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should handle incoming messages and dispatch to eventBus', () => {
|
||||
renderHook(() => useWebSocket());
|
||||
act(() => MockWebSocket.lastInstance?._open());
|
||||
|
||||
const dealData = { flyerId: 1 };
|
||||
act(() => MockWebSocket.lastInstance?._message({ type: 'deal-notification', data: dealData }));
|
||||
expect(eventBus.dispatch).toHaveBeenCalledWith('notification:deal', dealData);
|
||||
});
|
||||
|
||||
it('should log an error for invalid JSON messages', () => {
|
||||
renderHook(() => useWebSocket());
|
||||
act(() => MockWebSocket.lastInstance?._open());
|
||||
|
||||
const invalidJson = 'this is not json';
|
||||
act(() => MockWebSocket.lastInstance?._message(invalidJson));
|
||||
|
||||
expect(consoleErrorSpy).toHaveBeenCalledWith(
|
||||
'[WebSocket] Failed to parse message:',
|
||||
expect.any(SyntaxError),
|
||||
);
|
||||
expect(eventBus.dispatch).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should respond to ping with pong', () => {
|
||||
renderHook(() => useWebSocket());
|
||||
act(() => MockWebSocket.lastInstance?._open());
|
||||
|
||||
act(() => MockWebSocket.lastInstance?._message({ type: 'ping', data: {} }));
|
||||
|
||||
expect(MockWebSocket.lastInstance?.send).toHaveBeenCalledWith(
|
||||
expect.stringContaining('"type":"pong"'),
|
||||
);
|
||||
});
|
||||
|
||||
it('should disconnect and clean up when disconnect is called', () => {
|
||||
const onDisconnect = vi.fn();
|
||||
const { result } = renderHook(() => useWebSocket({ onDisconnect }));
|
||||
act(() => MockWebSocket.lastInstance?._open());
|
||||
|
||||
act(() => result.current.disconnect());
|
||||
|
||||
expect(MockWebSocket.lastInstance?.close).toHaveBeenCalledWith(1000, 'Client disconnecting');
|
||||
expect(result.current.isConnected).toBe(false);
|
||||
|
||||
act(() => vi.runAllTimers());
|
||||
expect(onDisconnect).toHaveBeenCalled();
|
||||
|
||||
// Ensure no reconnection attempt is made
|
||||
expect(MockWebSocket.instances).toHaveLength(1);
|
||||
});
|
||||
|
||||
it('should attempt to reconnect on unexpected close', () => {
|
||||
const { result } = renderHook(() => useWebSocket({ reconnectDelay: 1000 }));
|
||||
act(() => MockWebSocket.lastInstance?._open());
|
||||
|
||||
act(() => MockWebSocket.lastInstance?._close(1006, 'Abnormal closure'));
|
||||
expect(result.current.isConnected).toBe(false);
|
||||
|
||||
act(() => vi.advanceTimersByTime(1000));
|
||||
expect(MockWebSocket.instances).toHaveLength(2);
|
||||
expect(result.current.isConnecting).toBe(true);
|
||||
});
|
||||
|
||||
it('should use exponential backoff for reconnection', () => {
|
||||
renderHook(() => useWebSocket({ reconnectDelay: 1000, maxReconnectAttempts: 3 }));
|
||||
act(() => MockWebSocket.lastInstance?._open());
|
||||
|
||||
// 1st failure -> 1s delay
|
||||
act(() => MockWebSocket.lastInstance?._close(1006, 'Abnormal'));
|
||||
act(() => vi.advanceTimersByTime(1000));
|
||||
expect(MockWebSocket.instances).toHaveLength(2);
|
||||
|
||||
// 2nd failure -> 2s delay
|
||||
act(() => MockWebSocket.lastInstance?._close(1006, 'Abnormal'));
|
||||
act(() => vi.advanceTimersByTime(2000));
|
||||
expect(MockWebSocket.instances).toHaveLength(3);
|
||||
|
||||
// 3rd failure -> 4s delay
|
||||
act(() => MockWebSocket.lastInstance?._close(1006, 'Abnormal'));
|
||||
act(() => vi.advanceTimersByTime(4000));
|
||||
expect(MockWebSocket.instances).toHaveLength(4);
|
||||
});
|
||||
|
||||
it('should stop reconnecting after maxReconnectAttempts', () => {
|
||||
const { result } = renderHook(() =>
|
||||
useWebSocket({ reconnectDelay: 100, maxReconnectAttempts: 1 }),
|
||||
);
|
||||
act(() => MockWebSocket.lastInstance?._open());
|
||||
|
||||
// 1st failure
|
||||
act(() => MockWebSocket.lastInstance?._close(1006, 'Abnormal'));
|
||||
act(() => vi.advanceTimersByTime(100));
|
||||
expect(MockWebSocket.instances).toHaveLength(2);
|
||||
|
||||
// 2nd failure (should be the last)
|
||||
act(() => MockWebSocket.lastInstance?._close(1006, 'Abnormal'));
|
||||
act(() => vi.advanceTimersByTime(5000));
|
||||
|
||||
expect(MockWebSocket.instances).toHaveLength(2); // No new instance
|
||||
expect(result.current.error).toBe('Failed to reconnect after multiple attempts');
|
||||
});
|
||||
|
||||
it('should reset reconnect attempts on a successful connection', () => {
|
||||
renderHook(() => useWebSocket({ reconnectDelay: 100, maxReconnectAttempts: 2 }));
|
||||
act(() => MockWebSocket.lastInstance?._open());
|
||||
|
||||
act(() => MockWebSocket.lastInstance?._close(1006, 'Abnormal'));
|
||||
act(() => vi.advanceTimersByTime(100)); // 1st reconnect attempt
|
||||
expect(MockWebSocket.instances).toHaveLength(2);
|
||||
|
||||
act(() => MockWebSocket.lastInstance?._open()); // Reconnect succeeds
|
||||
|
||||
act(() => MockWebSocket.lastInstance?._close(1006, 'Abnormal'));
|
||||
act(() => vi.advanceTimersByTime(100)); // Delay should be reset to base
|
||||
expect(MockWebSocket.instances).toHaveLength(3);
|
||||
});
|
||||
|
||||
it('should send a message when connected', () => {
|
||||
const { result } = renderHook(() => useWebSocket());
|
||||
act(() => MockWebSocket.lastInstance?._open());
|
||||
|
||||
const message = { type: 'ping' as const, data: {}, timestamp: new Date().toISOString() };
|
||||
act(() => result.current.send(message));
|
||||
|
||||
expect(MockWebSocket.lastInstance?.send).toHaveBeenCalledWith(JSON.stringify(message));
|
||||
});
|
||||
|
||||
it('should warn when trying to send a message while not connected', () => {
|
||||
const consoleWarnSpy = vi.spyOn(console, 'warn').mockImplementation(() => {});
|
||||
const { result } = renderHook(() => useWebSocket());
|
||||
// Do not open connection
|
||||
|
||||
const message = { type: 'ping' as const, data: {}, timestamp: new Date().toISOString() };
|
||||
act(() => result.current.send(message));
|
||||
|
||||
expect(MockWebSocket.lastInstance?.send).not.toHaveBeenCalled();
|
||||
expect(consoleWarnSpy).toHaveBeenCalledWith('[WebSocket] Cannot send message: not connected');
|
||||
consoleWarnSpy.mockRestore();
|
||||
});
|
||||
|
||||
it('should clean up on unmount', () => {
|
||||
const { unmount } = renderHook(() => useWebSocket());
|
||||
const instance = MockWebSocket.lastInstance;
|
||||
|
||||
unmount();
|
||||
|
||||
expect(instance?.close).toHaveBeenCalled();
|
||||
act(() => vi.advanceTimersByTime(5000));
|
||||
expect(MockWebSocket.instances).toHaveLength(1); // No new reconnect attempts
|
||||
});
|
||||
|
||||
it('should maintain stable function references across rerenders', () => {
|
||||
const { result, rerender } = renderHook(() => useWebSocket());
|
||||
|
||||
const initialConnect = result.current.connect;
|
||||
const initialDisconnect = result.current.disconnect;
|
||||
const initialSend = result.current.send;
|
||||
|
||||
rerender();
|
||||
|
||||
expect(result.current.connect).toBe(initialConnect);
|
||||
expect(result.current.disconnect).toBe(initialDisconnect);
|
||||
expect(result.current.send).toBe(initialSend);
|
||||
});
|
||||
});
|
||||
478
src/pages/DealsPage.test.tsx
Normal file
478
src/pages/DealsPage.test.tsx
Normal file
@@ -0,0 +1,478 @@
|
||||
// src/pages/DealsPage.test.tsx
|
||||
import React from 'react';
|
||||
import { render, screen } from '@testing-library/react';
|
||||
import { describe, it, expect, vi, beforeEach, type Mock } from 'vitest';
|
||||
import { DealsPage } from './DealsPage';
|
||||
import { useAuth } from '../hooks/useAuth';
|
||||
import { useWatchedItems } from '../hooks/useWatchedItems';
|
||||
import { useShoppingLists } from '../hooks/useShoppingLists';
|
||||
import {
|
||||
createMockUser,
|
||||
createMockUserProfile,
|
||||
createMockMasterGroceryItem,
|
||||
createMockShoppingList,
|
||||
resetMockIds,
|
||||
} from '../tests/utils/mockFactories';
|
||||
import type { MasterGroceryItem, ShoppingList } from '../types';
|
||||
|
||||
// Mock the hooks that DealsPage depends on
|
||||
vi.mock('../hooks/useAuth');
|
||||
vi.mock('../hooks/useWatchedItems');
|
||||
vi.mock('../hooks/useShoppingLists');
|
||||
|
||||
// Mock the child components to isolate DealsPage logic
|
||||
vi.mock('../features/shopping/WatchedItemsList', () => ({
|
||||
WatchedItemsList: vi.fn(({ items, user, activeListId }) => (
|
||||
<div data-testid="watched-items-list">
|
||||
<span data-testid="watched-items-count">{items?.length ?? 0} items</span>
|
||||
<span data-testid="watched-items-user">{user ? 'logged-in' : 'logged-out'}</span>
|
||||
<span data-testid="watched-items-active-list">{activeListId ?? 'none'}</span>
|
||||
</div>
|
||||
)),
|
||||
}));
|
||||
|
||||
vi.mock('../features/charts/PriceChart', () => ({
|
||||
PriceChart: vi.fn(({ unitSystem, user }) => (
|
||||
<div data-testid="price-chart">
|
||||
<span data-testid="price-chart-unit-system">{unitSystem}</span>
|
||||
<span data-testid="price-chart-user">{user ? 'logged-in' : 'logged-out'}</span>
|
||||
</div>
|
||||
)),
|
||||
}));
|
||||
|
||||
vi.mock('../features/charts/PriceHistoryChart', () => ({
|
||||
PriceHistoryChart: vi.fn(() => <div data-testid="price-history-chart">Price History Chart</div>),
|
||||
}));
|
||||
|
||||
// Cast the mocked hooks for type-safe assertions
|
||||
const mockedUseAuth = useAuth as Mock;
|
||||
const mockedUseWatchedItems = useWatchedItems as Mock;
|
||||
const mockedUseShoppingLists = useShoppingLists as Mock;
|
||||
|
||||
describe('DealsPage Component', () => {
|
||||
// Create mock data
|
||||
const mockUser = createMockUser({ user_id: 'user-123', email: 'test@example.com' });
|
||||
const mockUserProfile = createMockUserProfile({ user: mockUser });
|
||||
|
||||
const mockWatchedItems: MasterGroceryItem[] = [
|
||||
createMockMasterGroceryItem({ master_grocery_item_id: 1, name: 'Apples' }),
|
||||
createMockMasterGroceryItem({ master_grocery_item_id: 2, name: 'Bananas' }),
|
||||
];
|
||||
|
||||
const mockShoppingLists: ShoppingList[] = [
|
||||
createMockShoppingList({ shopping_list_id: 101, name: 'Weekly Groceries' }),
|
||||
createMockShoppingList({ shopping_list_id: 102, name: 'Party Shopping' }),
|
||||
];
|
||||
|
||||
// Mock function implementations
|
||||
const mockAddWatchedItem = vi.fn();
|
||||
const mockRemoveWatchedItem = vi.fn();
|
||||
const mockAddItemToList = vi.fn();
|
||||
const mockSetActiveListId = vi.fn();
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
resetMockIds();
|
||||
|
||||
// Set up default mock implementations for authenticated user with data
|
||||
mockedUseAuth.mockReturnValue({
|
||||
userProfile: mockUserProfile,
|
||||
authStatus: 'AUTHENTICATED',
|
||||
isLoading: false,
|
||||
login: vi.fn(),
|
||||
logout: vi.fn(),
|
||||
updateProfile: vi.fn(),
|
||||
});
|
||||
|
||||
mockedUseWatchedItems.mockReturnValue({
|
||||
watchedItems: mockWatchedItems,
|
||||
addWatchedItem: mockAddWatchedItem,
|
||||
removeWatchedItem: mockRemoveWatchedItem,
|
||||
error: null,
|
||||
});
|
||||
|
||||
mockedUseShoppingLists.mockReturnValue({
|
||||
shoppingLists: mockShoppingLists,
|
||||
activeListId: 101,
|
||||
setActiveListId: mockSetActiveListId,
|
||||
createList: vi.fn(),
|
||||
deleteList: vi.fn(),
|
||||
addItemToList: mockAddItemToList,
|
||||
updateItemInList: vi.fn(),
|
||||
removeItemFromList: vi.fn(),
|
||||
isCreatingList: false,
|
||||
isDeletingList: false,
|
||||
isAddingItem: false,
|
||||
isUpdatingItem: false,
|
||||
isRemovingItem: false,
|
||||
error: null,
|
||||
});
|
||||
});
|
||||
|
||||
describe('Page Rendering', () => {
|
||||
it('should render the page title', () => {
|
||||
render(<DealsPage />);
|
||||
|
||||
expect(
|
||||
screen.getByRole('heading', { name: /my deals & watched items/i }),
|
||||
).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should render all three main components', () => {
|
||||
render(<DealsPage />);
|
||||
|
||||
expect(screen.getByTestId('watched-items-list')).toBeInTheDocument();
|
||||
expect(screen.getByTestId('price-chart')).toBeInTheDocument();
|
||||
expect(screen.getByTestId('price-history-chart')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should apply correct layout classes for max width and spacing', () => {
|
||||
const { container } = render(<DealsPage />);
|
||||
|
||||
const mainContainer = container.firstChild as HTMLElement;
|
||||
expect(mainContainer).toHaveClass('max-w-4xl');
|
||||
expect(mainContainer).toHaveClass('mx-auto');
|
||||
expect(mainContainer).toHaveClass('p-4');
|
||||
expect(mainContainer).toHaveClass('space-y-6');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Props Passing to WatchedItemsList', () => {
|
||||
it('should pass watched items to WatchedItemsList', () => {
|
||||
render(<DealsPage />);
|
||||
|
||||
expect(screen.getByTestId('watched-items-count')).toHaveTextContent('2 items');
|
||||
});
|
||||
|
||||
it('should pass user to WatchedItemsList when authenticated', () => {
|
||||
render(<DealsPage />);
|
||||
|
||||
expect(screen.getByTestId('watched-items-user')).toHaveTextContent('logged-in');
|
||||
});
|
||||
|
||||
it('should pass null user to WatchedItemsList when not authenticated', () => {
|
||||
mockedUseAuth.mockReturnValue({
|
||||
userProfile: null,
|
||||
authStatus: 'SIGNED_OUT',
|
||||
isLoading: false,
|
||||
login: vi.fn(),
|
||||
logout: vi.fn(),
|
||||
updateProfile: vi.fn(),
|
||||
});
|
||||
|
||||
render(<DealsPage />);
|
||||
|
||||
expect(screen.getByTestId('watched-items-user')).toHaveTextContent('logged-out');
|
||||
});
|
||||
|
||||
it('should pass activeListId to WatchedItemsList', () => {
|
||||
render(<DealsPage />);
|
||||
|
||||
expect(screen.getByTestId('watched-items-active-list')).toHaveTextContent('101');
|
||||
});
|
||||
|
||||
it('should pass "none" when no active list is selected', () => {
|
||||
mockedUseShoppingLists.mockReturnValue({
|
||||
...mockedUseShoppingLists(),
|
||||
activeListId: null,
|
||||
});
|
||||
|
||||
render(<DealsPage />);
|
||||
|
||||
expect(screen.getByTestId('watched-items-active-list')).toHaveTextContent('none');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Props Passing to PriceChart', () => {
|
||||
it('should pass imperial unit system to PriceChart', () => {
|
||||
render(<DealsPage />);
|
||||
|
||||
expect(screen.getByTestId('price-chart-unit-system')).toHaveTextContent('imperial');
|
||||
});
|
||||
|
||||
it('should pass user to PriceChart when authenticated', () => {
|
||||
render(<DealsPage />);
|
||||
|
||||
expect(screen.getByTestId('price-chart-user')).toHaveTextContent('logged-in');
|
||||
});
|
||||
|
||||
it('should pass null user to PriceChart when not authenticated', () => {
|
||||
mockedUseAuth.mockReturnValue({
|
||||
userProfile: null,
|
||||
authStatus: 'SIGNED_OUT',
|
||||
isLoading: false,
|
||||
login: vi.fn(),
|
||||
logout: vi.fn(),
|
||||
updateProfile: vi.fn(),
|
||||
});
|
||||
|
||||
render(<DealsPage />);
|
||||
|
||||
expect(screen.getByTestId('price-chart-user')).toHaveTextContent('logged-out');
|
||||
});
|
||||
});
|
||||
|
||||
describe('User Authentication States', () => {
|
||||
it('should render correctly when user is authenticated', () => {
|
||||
render(<DealsPage />);
|
||||
|
||||
// Both components should receive the user
|
||||
expect(screen.getByTestId('watched-items-user')).toHaveTextContent('logged-in');
|
||||
expect(screen.getByTestId('price-chart-user')).toHaveTextContent('logged-in');
|
||||
});
|
||||
|
||||
it('should render correctly when user is not authenticated', () => {
|
||||
mockedUseAuth.mockReturnValue({
|
||||
userProfile: null,
|
||||
authStatus: 'SIGNED_OUT',
|
||||
isLoading: false,
|
||||
login: vi.fn(),
|
||||
logout: vi.fn(),
|
||||
updateProfile: vi.fn(),
|
||||
});
|
||||
|
||||
render(<DealsPage />);
|
||||
|
||||
// Both components should receive null user
|
||||
expect(screen.getByTestId('watched-items-user')).toHaveTextContent('logged-out');
|
||||
expect(screen.getByTestId('price-chart-user')).toHaveTextContent('logged-out');
|
||||
});
|
||||
|
||||
it('should handle undefined user within userProfile gracefully', () => {
|
||||
// Edge case where userProfile exists but user is undefined
|
||||
mockedUseAuth.mockReturnValue({
|
||||
userProfile: { ...mockUserProfile, user: undefined },
|
||||
authStatus: 'AUTHENTICATED',
|
||||
isLoading: false,
|
||||
login: vi.fn(),
|
||||
logout: vi.fn(),
|
||||
updateProfile: vi.fn(),
|
||||
});
|
||||
|
||||
render(<DealsPage />);
|
||||
|
||||
// Should treat undefined user as logged out
|
||||
expect(screen.getByTestId('watched-items-user')).toHaveTextContent('logged-out');
|
||||
expect(screen.getByTestId('price-chart-user')).toHaveTextContent('logged-out');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Watched Items Data States', () => {
|
||||
it('should render with empty watched items list', () => {
|
||||
mockedUseWatchedItems.mockReturnValue({
|
||||
watchedItems: [],
|
||||
addWatchedItem: mockAddWatchedItem,
|
||||
removeWatchedItem: mockRemoveWatchedItem,
|
||||
error: null,
|
||||
});
|
||||
|
||||
render(<DealsPage />);
|
||||
|
||||
expect(screen.getByTestId('watched-items-count')).toHaveTextContent('0 items');
|
||||
});
|
||||
|
||||
it('should render with multiple watched items', () => {
|
||||
const manyItems = Array.from({ length: 10 }, (_, i) =>
|
||||
createMockMasterGroceryItem({
|
||||
master_grocery_item_id: i + 1,
|
||||
name: `Item ${i + 1}`,
|
||||
}),
|
||||
);
|
||||
|
||||
mockedUseWatchedItems.mockReturnValue({
|
||||
watchedItems: manyItems,
|
||||
addWatchedItem: mockAddWatchedItem,
|
||||
removeWatchedItem: mockRemoveWatchedItem,
|
||||
error: null,
|
||||
});
|
||||
|
||||
render(<DealsPage />);
|
||||
|
||||
expect(screen.getByTestId('watched-items-count')).toHaveTextContent('10 items');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Shopping Lists Data States', () => {
|
||||
it('should render when no shopping lists exist', () => {
|
||||
mockedUseShoppingLists.mockReturnValue({
|
||||
...mockedUseShoppingLists(),
|
||||
shoppingLists: [],
|
||||
activeListId: null,
|
||||
});
|
||||
|
||||
render(<DealsPage />);
|
||||
|
||||
expect(screen.getByTestId('watched-items-active-list')).toHaveTextContent('none');
|
||||
});
|
||||
|
||||
it('should render when shopping lists exist but none is active', () => {
|
||||
mockedUseShoppingLists.mockReturnValue({
|
||||
...mockedUseShoppingLists(),
|
||||
activeListId: null,
|
||||
});
|
||||
|
||||
render(<DealsPage />);
|
||||
|
||||
expect(screen.getByTestId('watched-items-active-list')).toHaveTextContent('none');
|
||||
});
|
||||
});
|
||||
|
||||
describe('handleAddItemFromWatchedList Function', () => {
|
||||
it('should call addItemToList with correct parameters when activeListId exists', async () => {
|
||||
// Import the mocked component to access its props
|
||||
const { WatchedItemsList } = await import('../features/shopping/WatchedItemsList');
|
||||
const MockedWatchedItemsList = vi.mocked(WatchedItemsList);
|
||||
|
||||
render(<DealsPage />);
|
||||
|
||||
// Get the onAddItemToList prop passed to WatchedItemsList
|
||||
const lastCall =
|
||||
MockedWatchedItemsList.mock.calls[MockedWatchedItemsList.mock.calls.length - 1];
|
||||
const onAddItemToList = lastCall[0].onAddItemToList;
|
||||
|
||||
// Simulate calling the handler with a master item ID
|
||||
onAddItemToList(42);
|
||||
|
||||
// Verify addItemToList was called with the active list ID and master item ID
|
||||
expect(mockAddItemToList).toHaveBeenCalledTimes(1);
|
||||
expect(mockAddItemToList).toHaveBeenCalledWith(101, { masterItemId: 42 });
|
||||
});
|
||||
|
||||
it('should not call addItemToList when activeListId is null', async () => {
|
||||
mockedUseShoppingLists.mockReturnValue({
|
||||
...mockedUseShoppingLists(),
|
||||
activeListId: null,
|
||||
addItemToList: mockAddItemToList,
|
||||
});
|
||||
|
||||
const { WatchedItemsList } = await import('../features/shopping/WatchedItemsList');
|
||||
const MockedWatchedItemsList = vi.mocked(WatchedItemsList);
|
||||
|
||||
render(<DealsPage />);
|
||||
|
||||
// Get the onAddItemToList prop passed to WatchedItemsList
|
||||
const lastCall =
|
||||
MockedWatchedItemsList.mock.calls[MockedWatchedItemsList.mock.calls.length - 1];
|
||||
const onAddItemToList = lastCall[0].onAddItemToList;
|
||||
|
||||
// Simulate calling the handler with a master item ID
|
||||
onAddItemToList(42);
|
||||
|
||||
// Verify addItemToList was NOT called because there's no active list
|
||||
expect(mockAddItemToList).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Callback Props Verification', () => {
|
||||
it('should pass addWatchedItem function to WatchedItemsList', async () => {
|
||||
const { WatchedItemsList } = await import('../features/shopping/WatchedItemsList');
|
||||
const MockedWatchedItemsList = vi.mocked(WatchedItemsList);
|
||||
|
||||
render(<DealsPage />);
|
||||
|
||||
const lastCall =
|
||||
MockedWatchedItemsList.mock.calls[MockedWatchedItemsList.mock.calls.length - 1];
|
||||
expect(lastCall[0].onAddItem).toBe(mockAddWatchedItem);
|
||||
});
|
||||
|
||||
it('should pass removeWatchedItem function to WatchedItemsList', async () => {
|
||||
const { WatchedItemsList } = await import('../features/shopping/WatchedItemsList');
|
||||
const MockedWatchedItemsList = vi.mocked(WatchedItemsList);
|
||||
|
||||
render(<DealsPage />);
|
||||
|
||||
const lastCall =
|
||||
MockedWatchedItemsList.mock.calls[MockedWatchedItemsList.mock.calls.length - 1];
|
||||
expect(lastCall[0].onRemoveItem).toBe(mockRemoveWatchedItem);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Component Structure', () => {
|
||||
it('should render components in the correct order', () => {
|
||||
const { container } = render(<DealsPage />);
|
||||
|
||||
const mainContainer = container.firstChild as HTMLElement;
|
||||
const children = Array.from(mainContainer.children);
|
||||
|
||||
// First child should be the heading
|
||||
expect(children[0].tagName).toBe('H1');
|
||||
expect(children[0]).toHaveTextContent(/my deals & watched items/i);
|
||||
|
||||
// Subsequent children are the three main components
|
||||
// (indices may vary based on actual DOM structure)
|
||||
expect(screen.getByTestId('watched-items-list')).toBeInTheDocument();
|
||||
expect(screen.getByTestId('price-chart')).toBeInTheDocument();
|
||||
expect(screen.getByTestId('price-history-chart')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should have proper heading styling', () => {
|
||||
render(<DealsPage />);
|
||||
|
||||
const heading = screen.getByRole('heading', { name: /my deals & watched items/i });
|
||||
expect(heading).toHaveClass('text-3xl');
|
||||
expect(heading).toHaveClass('font-bold');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Edge Cases', () => {
|
||||
it('should handle userProfile with null user property', () => {
|
||||
mockedUseAuth.mockReturnValue({
|
||||
userProfile: { ...mockUserProfile, user: null } as unknown,
|
||||
authStatus: 'AUTHENTICATED',
|
||||
isLoading: false,
|
||||
login: vi.fn(),
|
||||
logout: vi.fn(),
|
||||
updateProfile: vi.fn(),
|
||||
});
|
||||
|
||||
render(<DealsPage />);
|
||||
|
||||
// Should render without crashing and treat user as null
|
||||
expect(screen.getByTestId('watched-items-list')).toBeInTheDocument();
|
||||
expect(screen.getByTestId('watched-items-user')).toHaveTextContent('logged-out');
|
||||
});
|
||||
|
||||
it('should handle undefined watchedItems gracefully', () => {
|
||||
mockedUseWatchedItems.mockReturnValue({
|
||||
watchedItems: undefined,
|
||||
addWatchedItem: mockAddWatchedItem,
|
||||
removeWatchedItem: mockRemoveWatchedItem,
|
||||
error: null,
|
||||
});
|
||||
|
||||
// This should not throw an error - the component should handle undefined
|
||||
render(<DealsPage />);
|
||||
|
||||
// The mock will receive undefined and show "0 items" or handle it
|
||||
expect(screen.getByTestId('watched-items-list')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should render PriceHistoryChart without any props', () => {
|
||||
render(<DealsPage />);
|
||||
|
||||
// PriceHistoryChart doesn't take props from DealsPage
|
||||
expect(screen.getByTestId('price-history-chart')).toBeInTheDocument();
|
||||
expect(screen.getByTestId('price-history-chart')).toHaveTextContent('Price History Chart');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Hook Integration', () => {
|
||||
it('should call useAuth hook', () => {
|
||||
render(<DealsPage />);
|
||||
|
||||
expect(mockedUseAuth).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should call useWatchedItems hook', () => {
|
||||
render(<DealsPage />);
|
||||
|
||||
expect(mockedUseWatchedItems).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should call useShoppingLists hook', () => {
|
||||
render(<DealsPage />);
|
||||
|
||||
expect(mockedUseShoppingLists).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
});
|
||||
564
src/pages/FlyersPage.test.tsx
Normal file
564
src/pages/FlyersPage.test.tsx
Normal file
@@ -0,0 +1,564 @@
|
||||
// src/pages/FlyersPage.test.tsx
|
||||
import React from 'react';
|
||||
import { render, screen, fireEvent, within } from '@testing-library/react';
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
import { MemoryRouter } from 'react-router-dom';
|
||||
import { FlyersPage } from './FlyersPage';
|
||||
import { createMockFlyer, createMockUserProfile, resetMockIds } from '../tests/utils/mockFactories';
|
||||
import type { Flyer, UserProfile } from '../types';
|
||||
|
||||
// Unmock the component to test the real implementation
|
||||
vi.unmock('./FlyersPage');
|
||||
|
||||
// Mock the hooks used by FlyersPage
|
||||
const mockUseAuth = vi.fn();
|
||||
const mockUseFlyers = vi.fn();
|
||||
const mockUseFlyerSelection = vi.fn();
|
||||
|
||||
vi.mock('../hooks/useAuth', () => ({
|
||||
useAuth: () => mockUseAuth(),
|
||||
}));
|
||||
|
||||
vi.mock('../hooks/useFlyers', () => ({
|
||||
useFlyers: () => mockUseFlyers(),
|
||||
}));
|
||||
|
||||
vi.mock('../hooks/useFlyerSelection', () => ({
|
||||
useFlyerSelection: (options: { flyers: Flyer[] }) => mockUseFlyerSelection(options),
|
||||
}));
|
||||
|
||||
// Mock child components to isolate the FlyersPage logic
|
||||
vi.mock('../features/flyer/FlyerList', async () => {
|
||||
const { MockFlyerList } = await import('../tests/utils/componentMocks');
|
||||
return { FlyerList: MockFlyerList };
|
||||
});
|
||||
|
||||
vi.mock('../features/flyer/FlyerUploader', async () => {
|
||||
const { MockFlyerUploader } = await import('../tests/utils/componentMocks');
|
||||
return { FlyerUploader: MockFlyerUploader };
|
||||
});
|
||||
|
||||
// Mock the logger to prevent console output during tests
|
||||
vi.mock('../services/logger.client', () => ({
|
||||
logger: {
|
||||
info: vi.fn(),
|
||||
warn: vi.fn(),
|
||||
error: vi.fn(),
|
||||
debug: vi.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
describe('FlyersPage Component', () => {
|
||||
// Default mock implementations
|
||||
const mockRefetchFlyers = vi.fn();
|
||||
const mockHandleFlyerSelect = vi.fn();
|
||||
|
||||
const defaultAuthReturn = {
|
||||
userProfile: null as UserProfile | null,
|
||||
authStatus: 'SIGNED_OUT' as const,
|
||||
isLoading: false,
|
||||
login: vi.fn(),
|
||||
logout: vi.fn(),
|
||||
updateProfile: vi.fn(),
|
||||
};
|
||||
|
||||
const defaultFlyersReturn = {
|
||||
flyers: [] as Flyer[],
|
||||
isLoadingFlyers: false,
|
||||
flyersError: null,
|
||||
fetchNextFlyersPage: vi.fn(),
|
||||
hasNextFlyersPage: false,
|
||||
isRefetchingFlyers: false,
|
||||
refetchFlyers: mockRefetchFlyers,
|
||||
};
|
||||
|
||||
const defaultSelectionReturn = {
|
||||
selectedFlyer: null as Flyer | null,
|
||||
handleFlyerSelect: mockHandleFlyerSelect,
|
||||
flyerIdFromUrl: undefined,
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
resetMockIds();
|
||||
|
||||
// Set up default mock implementations
|
||||
mockUseAuth.mockReturnValue(defaultAuthReturn);
|
||||
mockUseFlyers.mockReturnValue(defaultFlyersReturn);
|
||||
mockUseFlyerSelection.mockReturnValue(defaultSelectionReturn);
|
||||
});
|
||||
|
||||
const renderPage = () => {
|
||||
return render(
|
||||
<MemoryRouter>
|
||||
<FlyersPage />
|
||||
</MemoryRouter>,
|
||||
);
|
||||
};
|
||||
|
||||
describe('Basic Rendering', () => {
|
||||
it('should render the page title', () => {
|
||||
renderPage();
|
||||
|
||||
expect(screen.getByRole('heading', { name: /flyers/i, level: 1 })).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should render the FlyerList component', () => {
|
||||
renderPage();
|
||||
|
||||
expect(screen.getByTestId('flyer-list')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should render the FlyerUploader component', () => {
|
||||
renderPage();
|
||||
|
||||
expect(screen.getByTestId('flyer-uploader')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should have the correct page structure with spacing', () => {
|
||||
const { container } = renderPage();
|
||||
|
||||
// Check for the main container with styling classes
|
||||
const mainDiv = container.querySelector('.max-w-4xl');
|
||||
expect(mainDiv).toBeInTheDocument();
|
||||
expect(mainDiv).toHaveClass('mx-auto', 'p-4', 'space-y-6');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Empty State (No Flyers)', () => {
|
||||
it('should show empty message when there are no flyers', () => {
|
||||
mockUseFlyers.mockReturnValue({
|
||||
...defaultFlyersReturn,
|
||||
flyers: [],
|
||||
});
|
||||
|
||||
renderPage();
|
||||
|
||||
const flyerList = screen.getByTestId('flyer-list');
|
||||
expect(flyerList).toHaveAttribute('data-flyer-count', '0');
|
||||
expect(screen.getByTestId('no-flyers-message')).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
describe('With Flyers Data', () => {
|
||||
const mockFlyers: Flyer[] = [
|
||||
createMockFlyer({
|
||||
flyer_id: 1,
|
||||
store: { store_id: 1, name: 'Safeway', created_at: '', updated_at: '' },
|
||||
item_count: 25,
|
||||
}),
|
||||
createMockFlyer({
|
||||
flyer_id: 2,
|
||||
store: { store_id: 2, name: 'Walmart', created_at: '', updated_at: '' },
|
||||
item_count: 40,
|
||||
}),
|
||||
createMockFlyer({
|
||||
flyer_id: 3,
|
||||
store: { store_id: 3, name: 'Costco', created_at: '', updated_at: '' },
|
||||
item_count: 60,
|
||||
}),
|
||||
];
|
||||
|
||||
it('should pass flyers to FlyerList component', () => {
|
||||
mockUseFlyers.mockReturnValue({
|
||||
...defaultFlyersReturn,
|
||||
flyers: mockFlyers,
|
||||
});
|
||||
|
||||
renderPage();
|
||||
|
||||
const flyerList = screen.getByTestId('flyer-list');
|
||||
expect(flyerList).toHaveAttribute('data-flyer-count', '3');
|
||||
});
|
||||
|
||||
it('should render all flyer items', () => {
|
||||
mockUseFlyers.mockReturnValue({
|
||||
...defaultFlyersReturn,
|
||||
flyers: mockFlyers,
|
||||
});
|
||||
|
||||
renderPage();
|
||||
|
||||
expect(screen.getByTestId('flyer-item-1')).toBeInTheDocument();
|
||||
expect(screen.getByTestId('flyer-item-2')).toBeInTheDocument();
|
||||
expect(screen.getByTestId('flyer-item-3')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should display store names in flyer list', () => {
|
||||
mockUseFlyers.mockReturnValue({
|
||||
...defaultFlyersReturn,
|
||||
flyers: mockFlyers,
|
||||
});
|
||||
|
||||
renderPage();
|
||||
|
||||
expect(screen.getByText(/Safeway - 25 items/)).toBeInTheDocument();
|
||||
expect(screen.getByText(/Walmart - 40 items/)).toBeInTheDocument();
|
||||
expect(screen.getByText(/Costco - 60 items/)).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Flyer Selection', () => {
|
||||
const mockFlyers: Flyer[] = [
|
||||
createMockFlyer({
|
||||
flyer_id: 1,
|
||||
store: { store_id: 1, name: 'Safeway', created_at: '', updated_at: '' },
|
||||
item_count: 25,
|
||||
}),
|
||||
createMockFlyer({
|
||||
flyer_id: 2,
|
||||
store: { store_id: 2, name: 'Walmart', created_at: '', updated_at: '' },
|
||||
item_count: 40,
|
||||
}),
|
||||
];
|
||||
|
||||
it('should pass selectedFlyerId to FlyerList when a flyer is selected', () => {
|
||||
const selectedFlyer = mockFlyers[0];
|
||||
|
||||
mockUseFlyers.mockReturnValue({
|
||||
...defaultFlyersReturn,
|
||||
flyers: mockFlyers,
|
||||
});
|
||||
mockUseFlyerSelection.mockReturnValue({
|
||||
...defaultSelectionReturn,
|
||||
selectedFlyer,
|
||||
handleFlyerSelect: mockHandleFlyerSelect,
|
||||
});
|
||||
|
||||
renderPage();
|
||||
|
||||
const flyerList = screen.getByTestId('flyer-list');
|
||||
expect(flyerList).toHaveAttribute('data-selected-id', '1');
|
||||
});
|
||||
|
||||
it('should pass null selectedFlyerId when no flyer is selected', () => {
|
||||
mockUseFlyers.mockReturnValue({
|
||||
...defaultFlyersReturn,
|
||||
flyers: mockFlyers,
|
||||
});
|
||||
mockUseFlyerSelection.mockReturnValue({
|
||||
...defaultSelectionReturn,
|
||||
selectedFlyer: null,
|
||||
});
|
||||
|
||||
renderPage();
|
||||
|
||||
const flyerList = screen.getByTestId('flyer-list');
|
||||
expect(flyerList).toHaveAttribute('data-selected-id', 'none');
|
||||
});
|
||||
|
||||
it('should call handleFlyerSelect when a flyer is clicked', () => {
|
||||
mockUseFlyers.mockReturnValue({
|
||||
...defaultFlyersReturn,
|
||||
flyers: mockFlyers,
|
||||
});
|
||||
|
||||
renderPage();
|
||||
|
||||
const flyerItem = screen.getByTestId('flyer-item-1');
|
||||
const selectButton = within(flyerItem).getByRole('button');
|
||||
fireEvent.click(selectButton);
|
||||
|
||||
expect(mockHandleFlyerSelect).toHaveBeenCalledTimes(1);
|
||||
expect(mockHandleFlyerSelect).toHaveBeenCalledWith(mockFlyers[0]);
|
||||
});
|
||||
|
||||
it('should call useFlyerSelection with the correct flyers', () => {
|
||||
mockUseFlyers.mockReturnValue({
|
||||
...defaultFlyersReturn,
|
||||
flyers: mockFlyers,
|
||||
});
|
||||
|
||||
renderPage();
|
||||
|
||||
expect(mockUseFlyerSelection).toHaveBeenCalledWith({ flyers: mockFlyers });
|
||||
});
|
||||
});
|
||||
|
||||
describe('User Authentication States', () => {
|
||||
const mockFlyers: Flyer[] = [
|
||||
createMockFlyer({
|
||||
flyer_id: 1,
|
||||
store: { store_id: 1, name: 'Safeway', created_at: '', updated_at: '' },
|
||||
item_count: 25,
|
||||
}),
|
||||
];
|
||||
|
||||
it('should pass null profile to FlyerList when user is not authenticated', () => {
|
||||
mockUseAuth.mockReturnValue({
|
||||
...defaultAuthReturn,
|
||||
userProfile: null,
|
||||
authStatus: 'SIGNED_OUT',
|
||||
});
|
||||
mockUseFlyers.mockReturnValue({
|
||||
...defaultFlyersReturn,
|
||||
flyers: mockFlyers,
|
||||
});
|
||||
|
||||
renderPage();
|
||||
|
||||
const flyerList = screen.getByTestId('flyer-list');
|
||||
expect(flyerList).toHaveAttribute('data-profile-role', 'none');
|
||||
});
|
||||
|
||||
it('should pass user profile to FlyerList when user is authenticated', () => {
|
||||
const userProfile = createMockUserProfile({ role: 'user' });
|
||||
|
||||
mockUseAuth.mockReturnValue({
|
||||
...defaultAuthReturn,
|
||||
userProfile,
|
||||
authStatus: 'AUTHENTICATED',
|
||||
});
|
||||
mockUseFlyers.mockReturnValue({
|
||||
...defaultFlyersReturn,
|
||||
flyers: mockFlyers,
|
||||
});
|
||||
|
||||
renderPage();
|
||||
|
||||
const flyerList = screen.getByTestId('flyer-list');
|
||||
expect(flyerList).toHaveAttribute('data-profile-role', 'user');
|
||||
});
|
||||
|
||||
it('should pass admin profile to FlyerList when user is admin', () => {
|
||||
const adminProfile = createMockUserProfile({ role: 'admin' });
|
||||
|
||||
mockUseAuth.mockReturnValue({
|
||||
...defaultAuthReturn,
|
||||
userProfile: adminProfile,
|
||||
authStatus: 'AUTHENTICATED',
|
||||
});
|
||||
mockUseFlyers.mockReturnValue({
|
||||
...defaultFlyersReturn,
|
||||
flyers: mockFlyers,
|
||||
});
|
||||
|
||||
renderPage();
|
||||
|
||||
const flyerList = screen.getByTestId('flyer-list');
|
||||
expect(flyerList).toHaveAttribute('data-profile-role', 'admin');
|
||||
});
|
||||
});
|
||||
|
||||
describe('FlyerUploader Integration', () => {
|
||||
it('should pass refetchFlyers to FlyerUploader as onProcessingComplete', () => {
|
||||
renderPage();
|
||||
|
||||
// Click the mock upload complete button
|
||||
const completeButton = screen.getByTestId('mock-upload-complete-btn');
|
||||
fireEvent.click(completeButton);
|
||||
|
||||
// Verify refetchFlyers was called
|
||||
expect(mockRefetchFlyers).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('should trigger data refresh when upload completes', () => {
|
||||
const mockFlyers: Flyer[] = [
|
||||
createMockFlyer({
|
||||
flyer_id: 1,
|
||||
store: { store_id: 1, name: 'Safeway', created_at: '', updated_at: '' },
|
||||
}),
|
||||
];
|
||||
|
||||
mockUseFlyers.mockReturnValue({
|
||||
...defaultFlyersReturn,
|
||||
flyers: mockFlyers,
|
||||
});
|
||||
|
||||
renderPage();
|
||||
|
||||
// Simulate upload completion
|
||||
const completeButton = screen.getByTestId('mock-upload-complete-btn');
|
||||
fireEvent.click(completeButton);
|
||||
|
||||
expect(mockRefetchFlyers).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Hook Integration', () => {
|
||||
it('should call useAuth hook', () => {
|
||||
renderPage();
|
||||
|
||||
expect(mockUseAuth).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should call useFlyers hook', () => {
|
||||
renderPage();
|
||||
|
||||
expect(mockUseFlyers).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should call useFlyerSelection with flyers from useFlyers', () => {
|
||||
const mockFlyers: Flyer[] = [
|
||||
createMockFlyer({ flyer_id: 1 }),
|
||||
createMockFlyer({ flyer_id: 2 }),
|
||||
];
|
||||
|
||||
mockUseFlyers.mockReturnValue({
|
||||
...defaultFlyersReturn,
|
||||
flyers: mockFlyers,
|
||||
});
|
||||
|
||||
renderPage();
|
||||
|
||||
expect(mockUseFlyerSelection).toHaveBeenCalledWith({ flyers: mockFlyers });
|
||||
});
|
||||
});
|
||||
|
||||
describe('Component Props Passing', () => {
|
||||
it('should pass all required props to FlyerList', () => {
|
||||
const mockFlyers: Flyer[] = [createMockFlyer({ flyer_id: 1 })];
|
||||
const selectedFlyer = mockFlyers[0];
|
||||
const userProfile = createMockUserProfile({ role: 'admin' });
|
||||
|
||||
mockUseAuth.mockReturnValue({
|
||||
...defaultAuthReturn,
|
||||
userProfile,
|
||||
});
|
||||
mockUseFlyers.mockReturnValue({
|
||||
...defaultFlyersReturn,
|
||||
flyers: mockFlyers,
|
||||
});
|
||||
mockUseFlyerSelection.mockReturnValue({
|
||||
selectedFlyer,
|
||||
handleFlyerSelect: mockHandleFlyerSelect,
|
||||
flyerIdFromUrl: undefined,
|
||||
});
|
||||
|
||||
renderPage();
|
||||
|
||||
const flyerList = screen.getByTestId('flyer-list');
|
||||
|
||||
// Verify all props are passed correctly
|
||||
expect(flyerList).toHaveAttribute('data-flyer-count', '1');
|
||||
expect(flyerList).toHaveAttribute('data-selected-id', '1');
|
||||
expect(flyerList).toHaveAttribute('data-profile-role', 'admin');
|
||||
});
|
||||
|
||||
it('should handle selectedFlyer being null gracefully', () => {
|
||||
const mockFlyers: Flyer[] = [createMockFlyer({ flyer_id: 1 })];
|
||||
|
||||
mockUseFlyers.mockReturnValue({
|
||||
...defaultFlyersReturn,
|
||||
flyers: mockFlyers,
|
||||
});
|
||||
mockUseFlyerSelection.mockReturnValue({
|
||||
selectedFlyer: null,
|
||||
handleFlyerSelect: mockHandleFlyerSelect,
|
||||
flyerIdFromUrl: undefined,
|
||||
});
|
||||
|
||||
renderPage();
|
||||
|
||||
const flyerList = screen.getByTestId('flyer-list');
|
||||
expect(flyerList).toHaveAttribute('data-selected-id', 'none');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Edge Cases', () => {
|
||||
it('should handle flyer with missing store gracefully', () => {
|
||||
const flyerWithoutStore = createMockFlyer({
|
||||
flyer_id: 1,
|
||||
item_count: 10,
|
||||
});
|
||||
// Remove the store to test fallback behavior
|
||||
(flyerWithoutStore as unknown as { store: undefined }).store = undefined;
|
||||
|
||||
mockUseFlyers.mockReturnValue({
|
||||
...defaultFlyersReturn,
|
||||
flyers: [flyerWithoutStore],
|
||||
});
|
||||
|
||||
renderPage();
|
||||
|
||||
// Should show "Unknown Store" as fallback
|
||||
expect(screen.getByText(/Unknown Store - 10 items/)).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should handle undefined selectedFlyer flyer_id', () => {
|
||||
mockUseFlyers.mockReturnValue({
|
||||
...defaultFlyersReturn,
|
||||
flyers: [],
|
||||
});
|
||||
mockUseFlyerSelection.mockReturnValue({
|
||||
selectedFlyer: null,
|
||||
handleFlyerSelect: mockHandleFlyerSelect,
|
||||
flyerIdFromUrl: undefined,
|
||||
});
|
||||
|
||||
renderPage();
|
||||
|
||||
const flyerList = screen.getByTestId('flyer-list');
|
||||
expect(flyerList).toHaveAttribute('data-selected-id', 'none');
|
||||
});
|
||||
|
||||
it('should handle multiple rapid flyer selections', () => {
|
||||
const mockFlyers: Flyer[] = [
|
||||
createMockFlyer({ flyer_id: 1 }),
|
||||
createMockFlyer({ flyer_id: 2 }),
|
||||
createMockFlyer({ flyer_id: 3 }),
|
||||
];
|
||||
|
||||
mockUseFlyers.mockReturnValue({
|
||||
...defaultFlyersReturn,
|
||||
flyers: mockFlyers,
|
||||
});
|
||||
|
||||
renderPage();
|
||||
|
||||
// Rapidly click different flyers
|
||||
fireEvent.click(within(screen.getByTestId('flyer-item-1')).getByRole('button'));
|
||||
fireEvent.click(within(screen.getByTestId('flyer-item-2')).getByRole('button'));
|
||||
fireEvent.click(within(screen.getByTestId('flyer-item-3')).getByRole('button'));
|
||||
|
||||
expect(mockHandleFlyerSelect).toHaveBeenCalledTimes(3);
|
||||
expect(mockHandleFlyerSelect).toHaveBeenNthCalledWith(1, mockFlyers[0]);
|
||||
expect(mockHandleFlyerSelect).toHaveBeenNthCalledWith(2, mockFlyers[1]);
|
||||
expect(mockHandleFlyerSelect).toHaveBeenNthCalledWith(3, mockFlyers[2]);
|
||||
});
|
||||
|
||||
it('should handle large number of flyers', () => {
|
||||
const manyFlyers = Array.from({ length: 100 }, (_, i) =>
|
||||
createMockFlyer({
|
||||
flyer_id: i + 1,
|
||||
store: { store_id: i + 1, name: `Store ${i + 1}`, created_at: '', updated_at: '' },
|
||||
item_count: (i + 1) * 10,
|
||||
}),
|
||||
);
|
||||
|
||||
mockUseFlyers.mockReturnValue({
|
||||
...defaultFlyersReturn,
|
||||
flyers: manyFlyers,
|
||||
});
|
||||
|
||||
renderPage();
|
||||
|
||||
const flyerList = screen.getByTestId('flyer-list');
|
||||
expect(flyerList).toHaveAttribute('data-flyer-count', '100');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Accessibility', () => {
|
||||
it('should have a main heading for the page', () => {
|
||||
renderPage();
|
||||
|
||||
const heading = screen.getByRole('heading', { level: 1, name: /flyers/i });
|
||||
expect(heading).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should render interactive elements as buttons', () => {
|
||||
const mockFlyers: Flyer[] = [createMockFlyer({ flyer_id: 1 })];
|
||||
|
||||
mockUseFlyers.mockReturnValue({
|
||||
...defaultFlyersReturn,
|
||||
flyers: mockFlyers,
|
||||
});
|
||||
|
||||
renderPage();
|
||||
|
||||
// Both flyer selection and upload complete should be accessible buttons
|
||||
const buttons = screen.getAllByRole('button');
|
||||
expect(buttons.length).toBeGreaterThanOrEqual(2);
|
||||
});
|
||||
});
|
||||
});
|
||||
469
src/pages/ShoppingListsPage.test.tsx
Normal file
469
src/pages/ShoppingListsPage.test.tsx
Normal file
@@ -0,0 +1,469 @@
|
||||
// src/pages/ShoppingListsPage.test.tsx
|
||||
import { render, screen, waitFor } from '@testing-library/react';
|
||||
import { describe, it, expect, vi, beforeEach, type Mock } from 'vitest';
|
||||
import { ShoppingListsPage } from './ShoppingListsPage';
|
||||
import { useAuth } from '../hooks/useAuth';
|
||||
import { useShoppingLists } from '../hooks/useShoppingLists';
|
||||
import type { ShoppingList, ShoppingListItem, User, UserProfile } from '../types';
|
||||
import {
|
||||
createMockUser,
|
||||
createMockUserProfile,
|
||||
createMockShoppingList,
|
||||
createMockShoppingListItem,
|
||||
} from '../tests/utils/mockFactories';
|
||||
|
||||
// Mock the hooks used by ShoppingListsPage
|
||||
vi.mock('../hooks/useAuth');
|
||||
vi.mock('../hooks/useShoppingLists');
|
||||
|
||||
// Mock the ShoppingListComponent to isolate ShoppingListsPage logic
|
||||
vi.mock('../features/shopping/ShoppingList', () => ({
|
||||
ShoppingListComponent: vi.fn(
|
||||
({
|
||||
user,
|
||||
lists,
|
||||
activeListId,
|
||||
onSelectList,
|
||||
onCreateList,
|
||||
onDeleteList,
|
||||
onAddItem,
|
||||
onUpdateItem,
|
||||
onRemoveItem,
|
||||
}: {
|
||||
user: User | null;
|
||||
lists: ShoppingList[];
|
||||
activeListId: number | null;
|
||||
onSelectList: (listId: number) => void;
|
||||
onCreateList: (name: string) => Promise<void>;
|
||||
onDeleteList: (listId: number) => Promise<void>;
|
||||
onAddItem: (item: { customItemName: string }) => Promise<void>;
|
||||
onUpdateItem: (itemId: number, updates: Partial<ShoppingListItem>) => Promise<void>;
|
||||
onRemoveItem: (itemId: number) => Promise<void>;
|
||||
}) => (
|
||||
<div data-testid="shopping-list-component">
|
||||
<span data-testid="user-status">{user ? 'authenticated' : 'not-authenticated'}</span>
|
||||
<span data-testid="lists-count">{lists.length}</span>
|
||||
<span data-testid="active-list-id">{activeListId ?? 'none'}</span>
|
||||
<button data-testid="select-list-btn" onClick={() => onSelectList(999)}>
|
||||
Select List
|
||||
</button>
|
||||
<button data-testid="create-list-btn" onClick={() => onCreateList('New List')}>
|
||||
Create List
|
||||
</button>
|
||||
<button data-testid="delete-list-btn" onClick={() => onDeleteList(1)}>
|
||||
Delete List
|
||||
</button>
|
||||
<button
|
||||
data-testid="add-item-btn"
|
||||
onClick={() => onAddItem({ customItemName: 'Test Item' })}
|
||||
>
|
||||
Add Item
|
||||
</button>
|
||||
<button
|
||||
data-testid="update-item-btn"
|
||||
onClick={() => onUpdateItem(10, { is_purchased: true })}
|
||||
>
|
||||
Update Item
|
||||
</button>
|
||||
<button data-testid="remove-item-btn" onClick={() => onRemoveItem(10)}>
|
||||
Remove Item
|
||||
</button>
|
||||
</div>
|
||||
),
|
||||
),
|
||||
}));
|
||||
|
||||
const mockedUseAuth = vi.mocked(useAuth);
|
||||
const mockedUseShoppingLists = vi.mocked(useShoppingLists);
|
||||
|
||||
describe('ShoppingListsPage', () => {
|
||||
const mockUser: User = createMockUser({ user_id: 'user-123', email: 'test@example.com' });
|
||||
const mockUserProfile: UserProfile = createMockUserProfile({ user: mockUser });
|
||||
|
||||
const mockShoppingLists: ShoppingList[] = [
|
||||
createMockShoppingList({
|
||||
shopping_list_id: 1,
|
||||
name: 'Groceries',
|
||||
user_id: 'user-123',
|
||||
items: [
|
||||
createMockShoppingListItem({
|
||||
shopping_list_item_id: 101,
|
||||
shopping_list_id: 1,
|
||||
custom_item_name: 'Apples',
|
||||
}),
|
||||
],
|
||||
}),
|
||||
createMockShoppingList({
|
||||
shopping_list_id: 2,
|
||||
name: 'Hardware',
|
||||
user_id: 'user-123',
|
||||
items: [],
|
||||
}),
|
||||
];
|
||||
|
||||
// Mock functions from useShoppingLists
|
||||
const mockSetActiveListId = vi.fn();
|
||||
const mockCreateList = vi.fn();
|
||||
const mockDeleteList = vi.fn();
|
||||
const mockAddItemToList = vi.fn();
|
||||
const mockUpdateItemInList = vi.fn();
|
||||
const mockRemoveItemFromList = vi.fn();
|
||||
|
||||
const defaultUseShoppingListsReturn = {
|
||||
shoppingLists: mockShoppingLists,
|
||||
activeListId: 1,
|
||||
setActiveListId: mockSetActiveListId,
|
||||
createList: mockCreateList,
|
||||
deleteList: mockDeleteList,
|
||||
addItemToList: mockAddItemToList,
|
||||
updateItemInList: mockUpdateItemInList,
|
||||
removeItemFromList: mockRemoveItemFromList,
|
||||
isCreatingList: false,
|
||||
isDeletingList: false,
|
||||
isAddingItem: false,
|
||||
isUpdatingItem: false,
|
||||
isRemovingItem: false,
|
||||
error: null,
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
vi.resetAllMocks();
|
||||
|
||||
// Default authenticated user
|
||||
mockedUseAuth.mockReturnValue({
|
||||
userProfile: mockUserProfile,
|
||||
authStatus: 'AUTHENTICATED',
|
||||
isLoading: false,
|
||||
login: vi.fn(),
|
||||
logout: vi.fn(),
|
||||
updateProfile: vi.fn(),
|
||||
});
|
||||
|
||||
// Default shopping lists state
|
||||
mockedUseShoppingLists.mockReturnValue(defaultUseShoppingListsReturn);
|
||||
});
|
||||
|
||||
describe('Rendering', () => {
|
||||
it('should render the page title', () => {
|
||||
render(<ShoppingListsPage />);
|
||||
|
||||
expect(screen.getByRole('heading', { name: 'Shopping Lists' })).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should render the ShoppingListComponent', () => {
|
||||
render(<ShoppingListsPage />);
|
||||
|
||||
expect(screen.getByTestId('shopping-list-component')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should pass the correct user to ShoppingListComponent when authenticated', () => {
|
||||
render(<ShoppingListsPage />);
|
||||
|
||||
expect(screen.getByTestId('user-status')).toHaveTextContent('authenticated');
|
||||
});
|
||||
|
||||
it('should pass null user to ShoppingListComponent when not authenticated', () => {
|
||||
mockedUseAuth.mockReturnValue({
|
||||
userProfile: null,
|
||||
authStatus: 'SIGNED_OUT',
|
||||
isLoading: false,
|
||||
login: vi.fn(),
|
||||
logout: vi.fn(),
|
||||
updateProfile: vi.fn(),
|
||||
});
|
||||
|
||||
render(<ShoppingListsPage />);
|
||||
|
||||
expect(screen.getByTestId('user-status')).toHaveTextContent('not-authenticated');
|
||||
});
|
||||
|
||||
it('should pass the shopping lists to ShoppingListComponent', () => {
|
||||
render(<ShoppingListsPage />);
|
||||
|
||||
expect(screen.getByTestId('lists-count')).toHaveTextContent('2');
|
||||
});
|
||||
|
||||
it('should pass the active list ID to ShoppingListComponent', () => {
|
||||
render(<ShoppingListsPage />);
|
||||
|
||||
expect(screen.getByTestId('active-list-id')).toHaveTextContent('1');
|
||||
});
|
||||
|
||||
it('should handle empty shopping lists', () => {
|
||||
mockedUseShoppingLists.mockReturnValue({
|
||||
...defaultUseShoppingListsReturn,
|
||||
shoppingLists: [],
|
||||
activeListId: null,
|
||||
});
|
||||
|
||||
render(<ShoppingListsPage />);
|
||||
|
||||
expect(screen.getByTestId('lists-count')).toHaveTextContent('0');
|
||||
expect(screen.getByTestId('active-list-id')).toHaveTextContent('none');
|
||||
});
|
||||
});
|
||||
|
||||
describe('User State', () => {
|
||||
it('should extract user from userProfile when available', () => {
|
||||
render(<ShoppingListsPage />);
|
||||
|
||||
// The component should pass the user object to ShoppingListComponent
|
||||
expect(screen.getByTestId('user-status')).toHaveTextContent('authenticated');
|
||||
});
|
||||
|
||||
it('should pass null user when userProfile is null', () => {
|
||||
mockedUseAuth.mockReturnValue({
|
||||
userProfile: null,
|
||||
authStatus: 'SIGNED_OUT',
|
||||
isLoading: false,
|
||||
login: vi.fn(),
|
||||
logout: vi.fn(),
|
||||
updateProfile: vi.fn(),
|
||||
});
|
||||
|
||||
render(<ShoppingListsPage />);
|
||||
|
||||
expect(screen.getByTestId('user-status')).toHaveTextContent('not-authenticated');
|
||||
});
|
||||
|
||||
it('should pass null user when userProfile has no user property', () => {
|
||||
mockedUseAuth.mockReturnValue({
|
||||
userProfile: { ...mockUserProfile, user: undefined as unknown as User },
|
||||
authStatus: 'AUTHENTICATED',
|
||||
isLoading: false,
|
||||
login: vi.fn(),
|
||||
logout: vi.fn(),
|
||||
updateProfile: vi.fn(),
|
||||
});
|
||||
|
||||
render(<ShoppingListsPage />);
|
||||
|
||||
// When userProfile.user is undefined, the nullish coalescing should return null
|
||||
expect(screen.getByTestId('user-status')).toHaveTextContent('not-authenticated');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Callback Props', () => {
|
||||
it('should pass setActiveListId to ShoppingListComponent', async () => {
|
||||
render(<ShoppingListsPage />);
|
||||
|
||||
const selectButton = screen.getByTestId('select-list-btn');
|
||||
selectButton.click();
|
||||
|
||||
expect(mockSetActiveListId).toHaveBeenCalledWith(999);
|
||||
});
|
||||
|
||||
it('should pass createList to ShoppingListComponent', async () => {
|
||||
render(<ShoppingListsPage />);
|
||||
|
||||
const createButton = screen.getByTestId('create-list-btn');
|
||||
createButton.click();
|
||||
|
||||
expect(mockCreateList).toHaveBeenCalledWith('New List');
|
||||
});
|
||||
|
||||
it('should pass deleteList to ShoppingListComponent', async () => {
|
||||
render(<ShoppingListsPage />);
|
||||
|
||||
const deleteButton = screen.getByTestId('delete-list-btn');
|
||||
deleteButton.click();
|
||||
|
||||
expect(mockDeleteList).toHaveBeenCalledWith(1);
|
||||
});
|
||||
|
||||
it('should pass updateItemInList to ShoppingListComponent', async () => {
|
||||
render(<ShoppingListsPage />);
|
||||
|
||||
const updateButton = screen.getByTestId('update-item-btn');
|
||||
updateButton.click();
|
||||
|
||||
expect(mockUpdateItemInList).toHaveBeenCalledWith(10, { is_purchased: true });
|
||||
});
|
||||
|
||||
it('should pass removeItemFromList to ShoppingListComponent', async () => {
|
||||
render(<ShoppingListsPage />);
|
||||
|
||||
const removeButton = screen.getByTestId('remove-item-btn');
|
||||
removeButton.click();
|
||||
|
||||
expect(mockRemoveItemFromList).toHaveBeenCalledWith(10);
|
||||
});
|
||||
});
|
||||
|
||||
describe('handleAddItemToShoppingList', () => {
|
||||
it('should call addItemToList with activeListId when adding an item', async () => {
|
||||
mockAddItemToList.mockResolvedValue(undefined);
|
||||
|
||||
render(<ShoppingListsPage />);
|
||||
|
||||
const addButton = screen.getByTestId('add-item-btn');
|
||||
addButton.click();
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockAddItemToList).toHaveBeenCalledWith(1, { customItemName: 'Test Item' });
|
||||
});
|
||||
});
|
||||
|
||||
it('should not call addItemToList when activeListId is null', async () => {
|
||||
mockedUseShoppingLists.mockReturnValue({
|
||||
...defaultUseShoppingListsReturn,
|
||||
activeListId: null,
|
||||
});
|
||||
|
||||
render(<ShoppingListsPage />);
|
||||
|
||||
const addButton = screen.getByTestId('add-item-btn');
|
||||
addButton.click();
|
||||
|
||||
// Wait a tick to ensure any async operations would have completed
|
||||
await waitFor(() => {
|
||||
expect(mockAddItemToList).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle addItemToList with masterItemId', async () => {
|
||||
// Re-mock ShoppingListComponent to test with masterItemId
|
||||
const ShoppingListComponent = vi.mocked(
|
||||
await import('../features/shopping/ShoppingList'),
|
||||
).ShoppingListComponent;
|
||||
|
||||
// Get the onAddItem prop from the last render call
|
||||
const lastCallProps = (ShoppingListComponent as unknown as Mock).mock.calls[0]?.[0];
|
||||
|
||||
if (lastCallProps?.onAddItem) {
|
||||
await lastCallProps.onAddItem({ masterItemId: 42 });
|
||||
|
||||
expect(mockAddItemToList).toHaveBeenCalledWith(1, { masterItemId: 42 });
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('Integration with useShoppingLists', () => {
|
||||
it('should use the correct hooks', () => {
|
||||
render(<ShoppingListsPage />);
|
||||
|
||||
expect(mockedUseAuth).toHaveBeenCalled();
|
||||
expect(mockedUseShoppingLists).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should reflect changes when shoppingLists updates', () => {
|
||||
const { rerender } = render(<ShoppingListsPage />);
|
||||
|
||||
expect(screen.getByTestId('lists-count')).toHaveTextContent('2');
|
||||
|
||||
// Simulate adding a new list
|
||||
mockedUseShoppingLists.mockReturnValue({
|
||||
...defaultUseShoppingListsReturn,
|
||||
shoppingLists: [
|
||||
...mockShoppingLists,
|
||||
createMockShoppingList({
|
||||
shopping_list_id: 3,
|
||||
name: 'New List',
|
||||
user_id: 'user-123',
|
||||
}),
|
||||
],
|
||||
});
|
||||
|
||||
rerender(<ShoppingListsPage />);
|
||||
|
||||
expect(screen.getByTestId('lists-count')).toHaveTextContent('3');
|
||||
});
|
||||
|
||||
it('should reflect changes when activeListId updates', () => {
|
||||
const { rerender } = render(<ShoppingListsPage />);
|
||||
|
||||
expect(screen.getByTestId('active-list-id')).toHaveTextContent('1');
|
||||
|
||||
mockedUseShoppingLists.mockReturnValue({
|
||||
...defaultUseShoppingListsReturn,
|
||||
activeListId: 2,
|
||||
});
|
||||
|
||||
rerender(<ShoppingListsPage />);
|
||||
|
||||
expect(screen.getByTestId('active-list-id')).toHaveTextContent('2');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Page Structure', () => {
|
||||
it('should have correct CSS classes for layout', () => {
|
||||
const { container } = render(<ShoppingListsPage />);
|
||||
|
||||
const pageContainer = container.firstChild as HTMLElement;
|
||||
expect(pageContainer).toHaveClass('max-w-4xl', 'mx-auto', 'p-4', 'space-y-6');
|
||||
});
|
||||
|
||||
it('should have correctly styled heading', () => {
|
||||
render(<ShoppingListsPage />);
|
||||
|
||||
const heading = screen.getByRole('heading', { name: 'Shopping Lists' });
|
||||
expect(heading).toHaveClass('text-3xl', 'font-bold', 'text-gray-900');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Edge Cases', () => {
|
||||
it('should handle auth loading state gracefully', () => {
|
||||
mockedUseAuth.mockReturnValue({
|
||||
userProfile: null,
|
||||
authStatus: 'Determining...',
|
||||
isLoading: true,
|
||||
login: vi.fn(),
|
||||
logout: vi.fn(),
|
||||
updateProfile: vi.fn(),
|
||||
});
|
||||
|
||||
render(<ShoppingListsPage />);
|
||||
|
||||
// Page should still render even during auth loading
|
||||
expect(screen.getByRole('heading', { name: 'Shopping Lists' })).toBeInTheDocument();
|
||||
expect(screen.getByTestId('user-status')).toHaveTextContent('not-authenticated');
|
||||
});
|
||||
|
||||
it('should handle shopping lists with items correctly', () => {
|
||||
const listsWithItems = [
|
||||
createMockShoppingList({
|
||||
shopping_list_id: 1,
|
||||
name: 'With Items',
|
||||
items: [
|
||||
createMockShoppingListItem({
|
||||
shopping_list_item_id: 1,
|
||||
custom_item_name: 'Item 1',
|
||||
is_purchased: false,
|
||||
}),
|
||||
createMockShoppingListItem({
|
||||
shopping_list_item_id: 2,
|
||||
custom_item_name: 'Item 2',
|
||||
is_purchased: true,
|
||||
}),
|
||||
],
|
||||
}),
|
||||
];
|
||||
|
||||
mockedUseShoppingLists.mockReturnValue({
|
||||
...defaultUseShoppingListsReturn,
|
||||
shoppingLists: listsWithItems,
|
||||
});
|
||||
|
||||
render(<ShoppingListsPage />);
|
||||
|
||||
expect(screen.getByTestId('lists-count')).toHaveTextContent('1');
|
||||
});
|
||||
|
||||
it('should handle async callback errors gracefully', async () => {
|
||||
// The useShoppingLists hook catches errors internally and logs them,
|
||||
// so we mock it to resolve (the real error handling is tested in useShoppingLists.test.tsx)
|
||||
mockAddItemToList.mockResolvedValue(undefined);
|
||||
|
||||
render(<ShoppingListsPage />);
|
||||
|
||||
const addButton = screen.getByTestId('add-item-btn');
|
||||
|
||||
// Should not throw when clicked
|
||||
expect(() => addButton.click()).not.toThrow();
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockAddItemToList).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
78
src/pages/admin/AdminStoresPage.test.tsx
Normal file
78
src/pages/admin/AdminStoresPage.test.tsx
Normal file
@@ -0,0 +1,78 @@
|
||||
// src/pages/admin/AdminStoresPage.test.tsx
|
||||
import React from 'react';
|
||||
import { render, screen } from '@testing-library/react';
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
import { MemoryRouter } from 'react-router-dom';
|
||||
import { AdminStoresPage } from './AdminStoresPage';
|
||||
import { QueryWrapper } from '../../tests/utils/renderWithProviders';
|
||||
|
||||
// Mock the AdminStoreManager child component to isolate the test
|
||||
vi.mock('./components/AdminStoreManager', () => ({
|
||||
AdminStoreManager: () => <div data-testid="admin-store-manager-mock">Admin Store Manager</div>,
|
||||
}));
|
||||
|
||||
// Mock the logger to prevent console output during tests
|
||||
vi.mock('../../services/logger', () => ({
|
||||
logger: {
|
||||
info: vi.fn(),
|
||||
warn: vi.fn(),
|
||||
error: vi.fn(),
|
||||
debug: vi.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
// Helper function to render the component within router and query contexts
|
||||
const renderWithRouter = () => {
|
||||
return render(
|
||||
<QueryWrapper>
|
||||
<MemoryRouter>
|
||||
<AdminStoresPage />
|
||||
</MemoryRouter>
|
||||
</QueryWrapper>,
|
||||
);
|
||||
};
|
||||
|
||||
describe('AdminStoresPage', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
it('should render the main heading and description', () => {
|
||||
renderWithRouter();
|
||||
|
||||
expect(screen.getByRole('heading', { name: /store management/i })).toBeInTheDocument();
|
||||
expect(screen.getByText('Manage stores and their locations.')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should render a link back to the admin dashboard', () => {
|
||||
renderWithRouter();
|
||||
|
||||
const link = screen.getByRole('link', { name: /back to admin dashboard/i });
|
||||
expect(link).toBeInTheDocument();
|
||||
expect(link).toHaveAttribute('href', '/admin');
|
||||
});
|
||||
|
||||
it('should render the AdminStoreManager component', () => {
|
||||
renderWithRouter();
|
||||
|
||||
expect(screen.getByTestId('admin-store-manager-mock')).toBeInTheDocument();
|
||||
expect(screen.getByText('Admin Store Manager')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should have proper page layout structure', () => {
|
||||
const { container } = renderWithRouter();
|
||||
|
||||
// Check for the main container with expected classes
|
||||
const mainContainer = container.querySelector('.max-w-6xl');
|
||||
expect(mainContainer).toBeInTheDocument();
|
||||
expect(mainContainer).toHaveClass('mx-auto', 'py-8', 'px-4');
|
||||
});
|
||||
|
||||
it('should render the back link with the left arrow entity', () => {
|
||||
renderWithRouter();
|
||||
|
||||
// The back link should contain the larr HTML entity (left arrow)
|
||||
const link = screen.getByRole('link', { name: /back to admin dashboard/i });
|
||||
expect(link.textContent).toContain('\u2190'); // Unicode for ←
|
||||
});
|
||||
});
|
||||
672
src/pages/admin/components/AdminStoreManager.test.tsx
Normal file
672
src/pages/admin/components/AdminStoreManager.test.tsx
Normal file
@@ -0,0 +1,672 @@
|
||||
// src/pages/admin/components/AdminStoreManager.test.tsx
|
||||
import React from 'react';
|
||||
import { screen, fireEvent, waitFor, within } from '@testing-library/react';
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
import toast from 'react-hot-toast';
|
||||
import { AdminStoreManager } from './AdminStoreManager';
|
||||
import * as apiClient from '../../../services/apiClient';
|
||||
import { createMockStoreWithLocations } from '../../../tests/utils/mockFactories';
|
||||
import { renderWithProviders } from '../../../tests/utils/renderWithProviders';
|
||||
import type { StoreWithLocations } from '../../../types';
|
||||
|
||||
// Must explicitly call vi.mock() for apiClient
|
||||
vi.mock('../../../services/apiClient');
|
||||
|
||||
// Mock react-hot-toast
|
||||
vi.mock('react-hot-toast', () => ({
|
||||
default: {
|
||||
loading: vi.fn(),
|
||||
success: vi.fn(),
|
||||
error: vi.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
// Mock the StoreForm component to isolate AdminStoreManager testing
|
||||
vi.mock('./StoreForm', () => ({
|
||||
StoreForm: ({
|
||||
store,
|
||||
onSuccess,
|
||||
onCancel,
|
||||
}: {
|
||||
store?: StoreWithLocations;
|
||||
onSuccess: () => void;
|
||||
onCancel: () => void;
|
||||
}) => (
|
||||
<div data-testid="store-form-mock">
|
||||
<span data-testid="store-form-mode">{store ? 'edit' : 'create'}</span>
|
||||
{store && <span data-testid="store-form-store-id">{store.store_id}</span>}
|
||||
<button onClick={onSuccess} data-testid="store-form-success">
|
||||
Submit
|
||||
</button>
|
||||
<button onClick={onCancel} data-testid="store-form-cancel">
|
||||
Cancel
|
||||
</button>
|
||||
</div>
|
||||
),
|
||||
}));
|
||||
|
||||
// Mock the ErrorDisplay component
|
||||
vi.mock('../../../components/ErrorDisplay', () => ({
|
||||
ErrorDisplay: ({ message }: { message: string }) => (
|
||||
<div data-testid="error-display">{message}</div>
|
||||
),
|
||||
}));
|
||||
|
||||
// Mock the logger to prevent console output during tests
|
||||
vi.mock('../../../services/logger.client', () => ({
|
||||
logger: {
|
||||
info: vi.fn(),
|
||||
warn: vi.fn(),
|
||||
error: vi.fn(),
|
||||
debug: vi.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
const mockedApiClient = vi.mocked(apiClient);
|
||||
const mockedToast = vi.mocked(toast, true);
|
||||
|
||||
const mockStores: StoreWithLocations[] = [
|
||||
createMockStoreWithLocations({
|
||||
store_id: 1,
|
||||
name: 'Loblaws',
|
||||
logo_url: 'https://example.com/loblaws.png',
|
||||
locations: [
|
||||
{ address: { address_line_1: '123 Main St', city: 'Toronto' } },
|
||||
{ address: { address_line_1: '456 Oak Ave', city: 'Mississauga' } },
|
||||
],
|
||||
}),
|
||||
createMockStoreWithLocations({
|
||||
store_id: 2,
|
||||
name: 'No Frills',
|
||||
logo_url: null,
|
||||
locations: [],
|
||||
}),
|
||||
createMockStoreWithLocations({
|
||||
store_id: 3,
|
||||
name: 'Walmart',
|
||||
logo_url: 'https://example.com/walmart.png',
|
||||
locations: [{ address: { address_line_1: '789 Pine St', city: 'Vancouver' } }],
|
||||
}),
|
||||
];
|
||||
|
||||
// Helper to create a successful API response
|
||||
const createSuccessResponse = (data: unknown) =>
|
||||
new Response(JSON.stringify({ data }), {
|
||||
status: 200,
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
});
|
||||
|
||||
// Helper to create a failed API response
|
||||
const createErrorResponse = (status: number, body?: string) => new Response(body || '', { status });
|
||||
|
||||
describe('AdminStoreManager', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
|
||||
// Default mock: successful response with stores
|
||||
mockedApiClient.getStores.mockResolvedValue(createSuccessResponse(mockStores));
|
||||
mockedApiClient.deleteStore.mockResolvedValue(createSuccessResponse({}));
|
||||
|
||||
// Reset window.confirm mock
|
||||
vi.spyOn(window, 'confirm').mockReturnValue(true);
|
||||
});
|
||||
|
||||
describe('Loading State', () => {
|
||||
it('should render a loading state while fetching stores', async () => {
|
||||
// Make getStores hang indefinitely for this test
|
||||
mockedApiClient.getStores.mockImplementation(
|
||||
() => new Promise(() => {}), // Never resolves
|
||||
);
|
||||
|
||||
renderWithProviders(<AdminStoreManager />);
|
||||
|
||||
expect(screen.getByText('Loading stores...')).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Error State', () => {
|
||||
it('should display an error message if fetching stores fails', async () => {
|
||||
mockedApiClient.getStores.mockResolvedValue(
|
||||
createErrorResponse(500, 'Internal Server Error'),
|
||||
);
|
||||
|
||||
renderWithProviders(<AdminStoreManager />);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByTestId('error-display')).toBeInTheDocument();
|
||||
expect(screen.getByText(/Failed to load stores/i)).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
it('should display a generic error message for network failures', async () => {
|
||||
mockedApiClient.getStores.mockRejectedValue(new Error('Network Error'));
|
||||
|
||||
renderWithProviders(<AdminStoreManager />);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByTestId('error-display')).toBeInTheDocument();
|
||||
expect(screen.getByText(/Failed to load stores: Network Error/i)).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Success State - Store List', () => {
|
||||
it('should render the list of stores when data is fetched successfully', async () => {
|
||||
renderWithProviders(<AdminStoreManager />);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByRole('heading', { name: /store management/i })).toBeInTheDocument();
|
||||
expect(screen.getByText('Loblaws')).toBeInTheDocument();
|
||||
expect(screen.getByText('No Frills')).toBeInTheDocument();
|
||||
expect(screen.getByText('Walmart')).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
it('should display store logos when available', async () => {
|
||||
renderWithProviders(<AdminStoreManager />);
|
||||
|
||||
await waitFor(() => {
|
||||
const loblawsLogo = screen.getByAltText('Loblaws logo');
|
||||
expect(loblawsLogo).toHaveAttribute('src', 'https://example.com/loblaws.png');
|
||||
|
||||
const walmartLogo = screen.getByAltText('Walmart logo');
|
||||
expect(walmartLogo).toHaveAttribute('src', 'https://example.com/walmart.png');
|
||||
});
|
||||
});
|
||||
|
||||
it('should display "No Logo" placeholder when logo_url is null', async () => {
|
||||
renderWithProviders(<AdminStoreManager />);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Loblaws')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
// No Frills has no logo
|
||||
const noLogoElements = screen.getAllByText('No Logo');
|
||||
expect(noLogoElements.length).toBeGreaterThanOrEqual(1);
|
||||
});
|
||||
|
||||
it('should display location count and first address for stores with locations', async () => {
|
||||
renderWithProviders(<AdminStoreManager />);
|
||||
|
||||
await waitFor(() => {
|
||||
// Loblaws has 2 locations
|
||||
expect(screen.getByText('2 location(s)')).toBeInTheDocument();
|
||||
expect(screen.getByText('123 Main St, Toronto')).toBeInTheDocument();
|
||||
expect(screen.getByText('+ 1 more')).toBeInTheDocument();
|
||||
|
||||
// Walmart has 1 location
|
||||
expect(screen.getByText('1 location(s)')).toBeInTheDocument();
|
||||
expect(screen.getByText('789 Pine St, Vancouver')).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
it('should display "No locations" for stores without locations', async () => {
|
||||
renderWithProviders(<AdminStoreManager />);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('No locations')).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
it('should render Edit and Delete buttons for each store', async () => {
|
||||
renderWithProviders(<AdminStoreManager />);
|
||||
|
||||
await waitFor(() => {
|
||||
// There are 3 stores, so should have 3 Edit and 3 Delete buttons
|
||||
const editButtons = screen.getAllByRole('button', { name: /edit/i });
|
||||
const deleteButtons = screen.getAllByRole('button', { name: /delete/i });
|
||||
|
||||
expect(editButtons).toHaveLength(3);
|
||||
expect(deleteButtons).toHaveLength(3);
|
||||
});
|
||||
});
|
||||
|
||||
it('should render "Create Store" button', async () => {
|
||||
renderWithProviders(<AdminStoreManager />);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByRole('button', { name: /create store/i })).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
it('should render an empty state message when no stores exist', async () => {
|
||||
mockedApiClient.getStores.mockResolvedValue(createSuccessResponse([]));
|
||||
|
||||
renderWithProviders(<AdminStoreManager />);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('No stores found. Create one to get started!')).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Table Structure', () => {
|
||||
it('should render a table with correct column headers', async () => {
|
||||
renderWithProviders(<AdminStoreManager />);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByRole('columnheader', { name: /logo/i })).toBeInTheDocument();
|
||||
expect(screen.getByRole('columnheader', { name: /store name/i })).toBeInTheDocument();
|
||||
expect(screen.getByRole('columnheader', { name: /locations/i })).toBeInTheDocument();
|
||||
expect(screen.getByRole('columnheader', { name: /actions/i })).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
it('should render one row per store plus the header row', async () => {
|
||||
renderWithProviders(<AdminStoreManager />);
|
||||
|
||||
await waitFor(() => {
|
||||
// 1 header row + 3 data rows
|
||||
const rows = screen.getAllByRole('row');
|
||||
expect(rows).toHaveLength(4);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Create Store Modal', () => {
|
||||
it('should open the create modal when "Create Store" button is clicked', async () => {
|
||||
renderWithProviders(<AdminStoreManager />);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Loblaws')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
fireEvent.click(screen.getByRole('button', { name: /create store/i }));
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Create New Store')).toBeInTheDocument();
|
||||
expect(screen.getByTestId('store-form-mock')).toBeInTheDocument();
|
||||
expect(screen.getByTestId('store-form-mode')).toHaveTextContent('create');
|
||||
});
|
||||
});
|
||||
|
||||
it('should close the create modal when cancel is clicked', async () => {
|
||||
renderWithProviders(<AdminStoreManager />);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Loblaws')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
// Open modal
|
||||
fireEvent.click(screen.getByRole('button', { name: /create store/i }));
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Create New Store')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
// Click cancel
|
||||
fireEvent.click(screen.getByTestId('store-form-cancel'));
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.queryByText('Create New Store')).not.toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
it('should close the create modal and refresh data when form submission succeeds', async () => {
|
||||
renderWithProviders(<AdminStoreManager />);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Loblaws')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
// Open modal
|
||||
fireEvent.click(screen.getByRole('button', { name: /create store/i }));
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByTestId('store-form-mock')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
// Submit the form (triggers onSuccess)
|
||||
fireEvent.click(screen.getByTestId('store-form-success'));
|
||||
|
||||
await waitFor(() => {
|
||||
// Modal should be closed
|
||||
expect(screen.queryByText('Create New Store')).not.toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Edit Store Modal', () => {
|
||||
it('should open the edit modal when "Edit" button is clicked', async () => {
|
||||
renderWithProviders(<AdminStoreManager />);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Loblaws')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
// Find the Loblaws row and click its Edit button
|
||||
const loblawsRow = screen.getByText('Loblaws').closest('tr');
|
||||
const editButton = within(loblawsRow!).getByRole('button', { name: /edit/i });
|
||||
fireEvent.click(editButton);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Edit Store')).toBeInTheDocument();
|
||||
expect(screen.getByTestId('store-form-mock')).toBeInTheDocument();
|
||||
expect(screen.getByTestId('store-form-mode')).toHaveTextContent('edit');
|
||||
expect(screen.getByTestId('store-form-store-id')).toHaveTextContent('1');
|
||||
});
|
||||
});
|
||||
|
||||
it('should pass the correct store to the form in edit mode', async () => {
|
||||
renderWithProviders(<AdminStoreManager />);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Walmart')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
// Click Edit on Walmart (store_id: 3)
|
||||
const walmartRow = screen.getByText('Walmart').closest('tr');
|
||||
const editButton = within(walmartRow!).getByRole('button', { name: /edit/i });
|
||||
fireEvent.click(editButton);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByTestId('store-form-store-id')).toHaveTextContent('3');
|
||||
});
|
||||
});
|
||||
|
||||
it('should close the edit modal when cancel is clicked', async () => {
|
||||
renderWithProviders(<AdminStoreManager />);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Loblaws')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
// Open edit modal
|
||||
const loblawsRow = screen.getByText('Loblaws').closest('tr');
|
||||
fireEvent.click(within(loblawsRow!).getByRole('button', { name: /edit/i }));
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Edit Store')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
// Click cancel
|
||||
fireEvent.click(screen.getByTestId('store-form-cancel'));
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.queryByText('Edit Store')).not.toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
it('should close the edit modal and refresh data when form submission succeeds', async () => {
|
||||
renderWithProviders(<AdminStoreManager />);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Loblaws')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
// Open edit modal
|
||||
const loblawsRow = screen.getByText('Loblaws').closest('tr');
|
||||
fireEvent.click(within(loblawsRow!).getByRole('button', { name: /edit/i }));
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByTestId('store-form-mock')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
// Submit form
|
||||
fireEvent.click(screen.getByTestId('store-form-success'));
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.queryByText('Edit Store')).not.toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Delete Store', () => {
|
||||
it('should show a confirmation dialog before deleting', async () => {
|
||||
const confirmSpy = vi.spyOn(window, 'confirm').mockReturnValue(false);
|
||||
|
||||
renderWithProviders(<AdminStoreManager />);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Loblaws')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
const loblawsRow = screen.getByText('Loblaws').closest('tr');
|
||||
fireEvent.click(within(loblawsRow!).getByRole('button', { name: /delete/i }));
|
||||
|
||||
expect(confirmSpy).toHaveBeenCalledWith(
|
||||
expect.stringContaining('Are you sure you want to delete "Loblaws"'),
|
||||
);
|
||||
});
|
||||
|
||||
it('should not delete if user cancels the confirmation', async () => {
|
||||
vi.spyOn(window, 'confirm').mockReturnValue(false);
|
||||
|
||||
renderWithProviders(<AdminStoreManager />);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Loblaws')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
const loblawsRow = screen.getByText('Loblaws').closest('tr');
|
||||
fireEvent.click(within(loblawsRow!).getByRole('button', { name: /delete/i }));
|
||||
|
||||
// API should not be called
|
||||
expect(mockedApiClient.deleteStore).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should call deleteStore API when user confirms deletion', async () => {
|
||||
vi.spyOn(window, 'confirm').mockReturnValue(true);
|
||||
|
||||
renderWithProviders(<AdminStoreManager />);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Loblaws')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
const loblawsRow = screen.getByText('Loblaws').closest('tr');
|
||||
fireEvent.click(within(loblawsRow!).getByRole('button', { name: /delete/i }));
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockedApiClient.deleteStore).toHaveBeenCalledWith(1);
|
||||
});
|
||||
});
|
||||
|
||||
it('should show a loading toast while deleting', async () => {
|
||||
vi.spyOn(window, 'confirm').mockReturnValue(true);
|
||||
mockedToast.loading.mockReturnValue('delete-toast-id');
|
||||
|
||||
renderWithProviders(<AdminStoreManager />);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Loblaws')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
const loblawsRow = screen.getByText('Loblaws').closest('tr');
|
||||
fireEvent.click(within(loblawsRow!).getByRole('button', { name: /delete/i }));
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockedToast.loading).toHaveBeenCalledWith('Deleting store...');
|
||||
});
|
||||
});
|
||||
|
||||
it('should show success toast after successful deletion', async () => {
|
||||
vi.spyOn(window, 'confirm').mockReturnValue(true);
|
||||
mockedToast.loading.mockReturnValue('delete-toast-id');
|
||||
|
||||
renderWithProviders(<AdminStoreManager />);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Loblaws')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
const loblawsRow = screen.getByText('Loblaws').closest('tr');
|
||||
fireEvent.click(within(loblawsRow!).getByRole('button', { name: /delete/i }));
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockedToast.success).toHaveBeenCalledWith('Store deleted successfully!', {
|
||||
id: 'delete-toast-id',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('should show error toast when deletion fails with response body', async () => {
|
||||
vi.spyOn(window, 'confirm').mockReturnValue(true);
|
||||
mockedToast.loading.mockReturnValue('delete-toast-id');
|
||||
mockedApiClient.deleteStore.mockResolvedValue(
|
||||
createErrorResponse(400, 'Store has active flyers'),
|
||||
);
|
||||
|
||||
renderWithProviders(<AdminStoreManager />);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Loblaws')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
const loblawsRow = screen.getByText('Loblaws').closest('tr');
|
||||
fireEvent.click(within(loblawsRow!).getByRole('button', { name: /delete/i }));
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockedToast.error).toHaveBeenCalledWith('Delete failed: Store has active flyers', {
|
||||
id: 'delete-toast-id',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('should show error toast with status code when response body is empty', async () => {
|
||||
vi.spyOn(window, 'confirm').mockReturnValue(true);
|
||||
mockedToast.loading.mockReturnValue('delete-toast-id');
|
||||
mockedApiClient.deleteStore.mockResolvedValue(createErrorResponse(500));
|
||||
|
||||
renderWithProviders(<AdminStoreManager />);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Loblaws')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
const loblawsRow = screen.getByText('Loblaws').closest('tr');
|
||||
fireEvent.click(within(loblawsRow!).getByRole('button', { name: /delete/i }));
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockedToast.error).toHaveBeenCalledWith(
|
||||
'Delete failed: Delete failed with status 500',
|
||||
{ id: 'delete-toast-id' },
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
it('should show error toast when API call throws an exception', async () => {
|
||||
vi.spyOn(window, 'confirm').mockReturnValue(true);
|
||||
mockedToast.loading.mockReturnValue('delete-toast-id');
|
||||
mockedApiClient.deleteStore.mockRejectedValue(new Error('Network error'));
|
||||
|
||||
renderWithProviders(<AdminStoreManager />);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Loblaws')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
const loblawsRow = screen.getByText('Loblaws').closest('tr');
|
||||
fireEvent.click(within(loblawsRow!).getByRole('button', { name: /delete/i }));
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockedToast.error).toHaveBeenCalledWith('Delete failed: Network error', {
|
||||
id: 'delete-toast-id',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle non-Error objects thrown during deletion', async () => {
|
||||
vi.spyOn(window, 'confirm').mockReturnValue(true);
|
||||
mockedToast.loading.mockReturnValue('delete-toast-id');
|
||||
mockedApiClient.deleteStore.mockRejectedValue('A string error');
|
||||
|
||||
renderWithProviders(<AdminStoreManager />);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Loblaws')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
const loblawsRow = screen.getByText('Loblaws').closest('tr');
|
||||
fireEvent.click(within(loblawsRow!).getByRole('button', { name: /delete/i }));
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockedToast.error).toHaveBeenCalledWith('Delete failed: A string error', {
|
||||
id: 'delete-toast-id',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('should include correct warning message in confirmation dialog about locations and linked data', async () => {
|
||||
const confirmSpy = vi.spyOn(window, 'confirm').mockReturnValue(false);
|
||||
|
||||
renderWithProviders(<AdminStoreManager />);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('No Frills')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
const noFrillsRow = screen.getByText('No Frills').closest('tr');
|
||||
fireEvent.click(within(noFrillsRow!).getByRole('button', { name: /delete/i }));
|
||||
|
||||
expect(confirmSpy).toHaveBeenCalledWith(
|
||||
expect.stringContaining('delete all associated locations'),
|
||||
);
|
||||
expect(confirmSpy).toHaveBeenCalledWith(
|
||||
expect.stringContaining('may affect flyers/receipts'),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('API Calls', () => {
|
||||
it('should call getStores with includeLocations=true on mount', async () => {
|
||||
renderWithProviders(<AdminStoreManager />);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockedApiClient.getStores).toHaveBeenCalledWith(true);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Query Invalidation', () => {
|
||||
it('should refetch stores after successful store deletion', async () => {
|
||||
vi.spyOn(window, 'confirm').mockReturnValue(true);
|
||||
|
||||
renderWithProviders(<AdminStoreManager />);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Loblaws')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
// Initial call
|
||||
expect(mockedApiClient.getStores).toHaveBeenCalledTimes(1);
|
||||
|
||||
const loblawsRow = screen.getByText('Loblaws').closest('tr');
|
||||
fireEvent.click(within(loblawsRow!).getByRole('button', { name: /delete/i }));
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockedToast.success).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
// Should have been called again due to query invalidation
|
||||
await waitFor(() => {
|
||||
expect(mockedApiClient.getStores).toHaveBeenCalledTimes(2);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Accessibility', () => {
|
||||
it('should have accessible table structure', async () => {
|
||||
renderWithProviders(<AdminStoreManager />);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByRole('table')).toBeInTheDocument();
|
||||
// There should be 2 rowgroups: thead and tbody
|
||||
const rowgroups = screen.getAllByRole('rowgroup');
|
||||
expect(rowgroups).toHaveLength(2);
|
||||
});
|
||||
});
|
||||
|
||||
it('should have proper scope attribute on column headers', async () => {
|
||||
renderWithProviders(<AdminStoreManager />);
|
||||
|
||||
await waitFor(() => {
|
||||
const headers = screen.getAllByRole('columnheader');
|
||||
headers.forEach((header) => {
|
||||
expect(header).toHaveAttribute('scope', 'col');
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
893
src/pages/admin/components/StoreForm.test.tsx
Normal file
893
src/pages/admin/components/StoreForm.test.tsx
Normal file
@@ -0,0 +1,893 @@
|
||||
// src/pages/admin/components/StoreForm.test.tsx
|
||||
import React from 'react';
|
||||
import { screen, fireEvent, waitFor } from '@testing-library/react';
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
import toast from 'react-hot-toast';
|
||||
import { StoreForm } from './StoreForm';
|
||||
import * as apiClient from '../../../services/apiClient';
|
||||
import { createMockStoreWithLocations } from '../../../tests/utils/mockFactories';
|
||||
import { renderWithProviders } from '../../../tests/utils/renderWithProviders';
|
||||
|
||||
// Mock apiClient module
|
||||
vi.mock('../../../services/apiClient');
|
||||
|
||||
// Mock react-hot-toast
|
||||
vi.mock('react-hot-toast', () => ({
|
||||
default: {
|
||||
error: vi.fn(),
|
||||
success: vi.fn(),
|
||||
loading: vi.fn(() => 'toast-id'),
|
||||
},
|
||||
}));
|
||||
|
||||
// Mock the logger to prevent console noise
|
||||
vi.mock('../../../services/logger.client', () => ({
|
||||
logger: {
|
||||
error: vi.fn(),
|
||||
warn: vi.fn(),
|
||||
info: vi.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
const mockedApiClient = vi.mocked(apiClient);
|
||||
const mockedToast = vi.mocked(toast);
|
||||
|
||||
describe('StoreForm', () => {
|
||||
const mockOnSuccess = vi.fn();
|
||||
const mockOnCancel = vi.fn();
|
||||
|
||||
const defaultProps = {
|
||||
onSuccess: mockOnSuccess,
|
||||
onCancel: mockOnCancel,
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
// =========================================================================
|
||||
// Rendering Tests
|
||||
// =========================================================================
|
||||
describe('Rendering', () => {
|
||||
it('should render empty form in create mode', () => {
|
||||
renderWithProviders(<StoreForm {...defaultProps} />);
|
||||
|
||||
// Check that the form fields are present
|
||||
expect(screen.getByLabelText(/store name/i)).toBeInTheDocument();
|
||||
expect(screen.getByLabelText(/logo url/i)).toBeInTheDocument();
|
||||
expect(screen.getByLabelText(/include store address/i)).toBeInTheDocument();
|
||||
|
||||
// Check buttons
|
||||
expect(screen.getByRole('button', { name: /cancel/i })).toBeInTheDocument();
|
||||
expect(screen.getByRole('button', { name: /create store/i })).toBeInTheDocument();
|
||||
|
||||
// Store name should be empty
|
||||
expect(screen.getByLabelText(/store name/i)).toHaveValue('');
|
||||
expect(screen.getByLabelText(/logo url/i)).toHaveValue('');
|
||||
});
|
||||
|
||||
it('should render pre-filled form in edit mode', () => {
|
||||
const mockStore = createMockStoreWithLocations({
|
||||
store_id: 1,
|
||||
name: 'Test Store',
|
||||
logo_url: 'https://example.com/logo.png',
|
||||
});
|
||||
|
||||
renderWithProviders(<StoreForm {...defaultProps} store={mockStore} />);
|
||||
|
||||
// Check that the form is pre-filled
|
||||
expect(screen.getByLabelText(/store name/i)).toHaveValue('Test Store');
|
||||
expect(screen.getByLabelText(/logo url/i)).toHaveValue('https://example.com/logo.png');
|
||||
|
||||
// In edit mode, button says "Update Store"
|
||||
expect(screen.getByRole('button', { name: /update store/i })).toBeInTheDocument();
|
||||
|
||||
// In edit mode, address checkbox should say "Add a new location" and be unchecked
|
||||
expect(screen.getByLabelText(/add a new location/i)).toBeInTheDocument();
|
||||
expect(screen.getByLabelText(/add a new location/i)).not.toBeChecked();
|
||||
});
|
||||
|
||||
it('should show address fields when include address checkbox is checked in create mode', () => {
|
||||
renderWithProviders(<StoreForm {...defaultProps} />);
|
||||
|
||||
// In create mode, checkbox should be checked by default
|
||||
expect(screen.getByLabelText(/include store address/i)).toBeChecked();
|
||||
|
||||
// Address fields should be visible
|
||||
expect(screen.getByLabelText(/address line 1/i)).toBeInTheDocument();
|
||||
expect(screen.getByLabelText(/city/i)).toBeInTheDocument();
|
||||
expect(screen.getByLabelText(/province\/state/i)).toBeInTheDocument();
|
||||
expect(screen.getByLabelText(/postal code/i)).toBeInTheDocument();
|
||||
expect(screen.getByLabelText(/country/i)).toBeInTheDocument();
|
||||
|
||||
// Province should default to 'ON' and country to 'Canada'
|
||||
expect(screen.getByLabelText(/province\/state/i)).toHaveValue('ON');
|
||||
expect(screen.getByLabelText(/country/i)).toHaveValue('Canada');
|
||||
});
|
||||
|
||||
it('should hide address fields when checkbox is unchecked', () => {
|
||||
renderWithProviders(<StoreForm {...defaultProps} />);
|
||||
|
||||
// Uncheck the address checkbox
|
||||
fireEvent.click(screen.getByLabelText(/include store address/i));
|
||||
|
||||
// Address fields should be hidden
|
||||
expect(screen.queryByLabelText(/address line 1/i)).not.toBeInTheDocument();
|
||||
expect(screen.queryByLabelText(/city/i)).not.toBeInTheDocument();
|
||||
expect(screen.queryByLabelText(/province\/state/i)).not.toBeInTheDocument();
|
||||
expect(screen.queryByLabelText(/postal code/i)).not.toBeInTheDocument();
|
||||
expect(screen.queryByLabelText(/country/i)).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should show address fields when "Add a new location" is checked in edit mode', () => {
|
||||
const mockStore = createMockStoreWithLocations({ store_id: 1, name: 'Test Store' });
|
||||
|
||||
renderWithProviders(<StoreForm {...defaultProps} store={mockStore} />);
|
||||
|
||||
// Initially address fields should be hidden in edit mode
|
||||
expect(screen.queryByLabelText(/address line 1/i)).not.toBeInTheDocument();
|
||||
|
||||
// Check the "Add a new location" checkbox
|
||||
fireEvent.click(screen.getByLabelText(/add a new location/i));
|
||||
|
||||
// Address fields should now be visible
|
||||
expect(screen.getByLabelText(/address line 1/i)).toBeInTheDocument();
|
||||
expect(screen.getByLabelText(/city/i)).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
// =========================================================================
|
||||
// User Input Tests
|
||||
// =========================================================================
|
||||
describe('User Input', () => {
|
||||
it('should allow typing in the store name field', () => {
|
||||
renderWithProviders(<StoreForm {...defaultProps} />);
|
||||
|
||||
const nameInput = screen.getByLabelText(/store name/i);
|
||||
fireEvent.change(nameInput, { target: { value: 'Loblaws' } });
|
||||
|
||||
expect(nameInput).toHaveValue('Loblaws');
|
||||
});
|
||||
|
||||
it('should allow typing in the logo URL field', () => {
|
||||
renderWithProviders(<StoreForm {...defaultProps} />);
|
||||
|
||||
const logoInput = screen.getByLabelText(/logo url/i);
|
||||
fireEvent.change(logoInput, { target: { value: 'https://example.com/logo.png' } });
|
||||
|
||||
expect(logoInput).toHaveValue('https://example.com/logo.png');
|
||||
});
|
||||
|
||||
it('should allow typing in all address fields', () => {
|
||||
renderWithProviders(<StoreForm {...defaultProps} />);
|
||||
|
||||
const addressLine1 = screen.getByLabelText(/address line 1/i);
|
||||
const city = screen.getByLabelText(/city/i);
|
||||
const provinceState = screen.getByLabelText(/province\/state/i);
|
||||
const postalCode = screen.getByLabelText(/postal code/i);
|
||||
const country = screen.getByLabelText(/country/i);
|
||||
|
||||
fireEvent.change(addressLine1, { target: { value: '123 Main St' } });
|
||||
fireEvent.change(city, { target: { value: 'Toronto' } });
|
||||
fireEvent.change(provinceState, { target: { value: 'Ontario' } });
|
||||
fireEvent.change(postalCode, { target: { value: 'M5V 1A1' } });
|
||||
fireEvent.change(country, { target: { value: 'USA' } });
|
||||
|
||||
expect(addressLine1).toHaveValue('123 Main St');
|
||||
expect(city).toHaveValue('Toronto');
|
||||
expect(provinceState).toHaveValue('Ontario');
|
||||
expect(postalCode).toHaveValue('M5V 1A1');
|
||||
expect(country).toHaveValue('USA');
|
||||
});
|
||||
|
||||
it('should toggle the include address checkbox', () => {
|
||||
renderWithProviders(<StoreForm {...defaultProps} />);
|
||||
|
||||
const checkbox = screen.getByLabelText(/include store address/i);
|
||||
|
||||
// Initially checked in create mode
|
||||
expect(checkbox).toBeChecked();
|
||||
|
||||
// Uncheck
|
||||
fireEvent.click(checkbox);
|
||||
expect(checkbox).not.toBeChecked();
|
||||
|
||||
// Check again
|
||||
fireEvent.click(checkbox);
|
||||
expect(checkbox).toBeChecked();
|
||||
});
|
||||
});
|
||||
|
||||
// =========================================================================
|
||||
// Form Validation Tests
|
||||
// =========================================================================
|
||||
describe('Form Validation', () => {
|
||||
// Note: The StoreForm has HTML5 `required` attributes on certain inputs.
|
||||
// When a field with `required` is empty, browser validation prevents the
|
||||
// submit event from firing, so the JavaScript validation in handleSubmit
|
||||
// is a secondary layer for cases like whitespace-only values.
|
||||
|
||||
it('should have required attribute on store name field', () => {
|
||||
renderWithProviders(<StoreForm {...defaultProps} />);
|
||||
|
||||
const nameInput = screen.getByLabelText(/store name/i);
|
||||
expect(nameInput).toHaveAttribute('required');
|
||||
});
|
||||
|
||||
it('should show error toast when store name is whitespace only', async () => {
|
||||
// This tests the JS validation for whitespace-only values
|
||||
// (browser validation doesn't catch whitespace-only)
|
||||
renderWithProviders(<StoreForm {...defaultProps} />);
|
||||
|
||||
fireEvent.change(screen.getByLabelText(/store name/i), { target: { value: ' ' } });
|
||||
fireEvent.click(screen.getByLabelText(/include store address/i)); // Uncheck address
|
||||
|
||||
// Directly trigger form submit to bypass HTML5 validation
|
||||
const form = document.querySelector('form');
|
||||
if (form) {
|
||||
fireEvent.submit(form);
|
||||
}
|
||||
|
||||
expect(mockedToast.error).toHaveBeenCalledWith('Store name is required');
|
||||
});
|
||||
|
||||
it('should show error toast when address fields contain only whitespace', async () => {
|
||||
// This tests the JS validation for whitespace-only values in address fields
|
||||
renderWithProviders(<StoreForm {...defaultProps} />);
|
||||
|
||||
// Fill store name
|
||||
fireEvent.change(screen.getByLabelText(/store name/i), { target: { value: 'Test Store' } });
|
||||
|
||||
// Fill address fields with whitespace only (browser validation won't catch this)
|
||||
fireEvent.change(screen.getByLabelText(/address line 1/i), { target: { value: ' ' } });
|
||||
fireEvent.change(screen.getByLabelText(/city/i), { target: { value: ' ' } });
|
||||
fireEvent.change(screen.getByLabelText(/postal code/i), { target: { value: ' ' } });
|
||||
|
||||
// Directly trigger form submit to bypass HTML5 validation
|
||||
const form = document.querySelector('form');
|
||||
if (form) {
|
||||
fireEvent.submit(form);
|
||||
}
|
||||
|
||||
expect(mockedToast.error).toHaveBeenCalledWith(
|
||||
'All address fields are required when adding a location',
|
||||
);
|
||||
expect(mockedApiClient.createStore).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should have required attribute on address fields when checkbox is checked', () => {
|
||||
renderWithProviders(<StoreForm {...defaultProps} />);
|
||||
|
||||
// Checkbox is checked by default in create mode, address fields should be visible
|
||||
expect(screen.getByLabelText(/address line 1/i)).toHaveAttribute('required');
|
||||
expect(screen.getByLabelText(/city/i)).toHaveAttribute('required');
|
||||
expect(screen.getByLabelText(/province\/state/i)).toHaveAttribute('required');
|
||||
expect(screen.getByLabelText(/postal code/i)).toHaveAttribute('required');
|
||||
});
|
||||
|
||||
it('should show error toast when address line 1 is whitespace but other fields are filled', async () => {
|
||||
renderWithProviders(<StoreForm {...defaultProps} />);
|
||||
|
||||
fireEvent.change(screen.getByLabelText(/store name/i), { target: { value: 'Test Store' } });
|
||||
fireEvent.change(screen.getByLabelText(/address line 1/i), { target: { value: ' ' } });
|
||||
fireEvent.change(screen.getByLabelText(/city/i), { target: { value: 'Toronto' } });
|
||||
fireEvent.change(screen.getByLabelText(/postal code/i), { target: { value: 'M5V 1A1' } });
|
||||
|
||||
const form = document.querySelector('form');
|
||||
if (form) {
|
||||
fireEvent.submit(form);
|
||||
}
|
||||
|
||||
expect(mockedToast.error).toHaveBeenCalledWith(
|
||||
'All address fields are required when adding a location',
|
||||
);
|
||||
});
|
||||
|
||||
it('should show error toast when city is whitespace but other fields are filled', async () => {
|
||||
renderWithProviders(<StoreForm {...defaultProps} />);
|
||||
|
||||
fireEvent.change(screen.getByLabelText(/store name/i), { target: { value: 'Test Store' } });
|
||||
fireEvent.change(screen.getByLabelText(/address line 1/i), {
|
||||
target: { value: '123 Main St' },
|
||||
});
|
||||
fireEvent.change(screen.getByLabelText(/city/i), { target: { value: ' ' } });
|
||||
fireEvent.change(screen.getByLabelText(/postal code/i), { target: { value: 'M5V 1A1' } });
|
||||
|
||||
const form = document.querySelector('form');
|
||||
if (form) {
|
||||
fireEvent.submit(form);
|
||||
}
|
||||
|
||||
expect(mockedToast.error).toHaveBeenCalledWith(
|
||||
'All address fields are required when adding a location',
|
||||
);
|
||||
});
|
||||
|
||||
it('should show error toast when postal code is whitespace but other fields are filled', async () => {
|
||||
renderWithProviders(<StoreForm {...defaultProps} />);
|
||||
|
||||
fireEvent.change(screen.getByLabelText(/store name/i), { target: { value: 'Test Store' } });
|
||||
fireEvent.change(screen.getByLabelText(/address line 1/i), {
|
||||
target: { value: '123 Main St' },
|
||||
});
|
||||
fireEvent.change(screen.getByLabelText(/city/i), { target: { value: 'Toronto' } });
|
||||
fireEvent.change(screen.getByLabelText(/postal code/i), { target: { value: ' ' } });
|
||||
|
||||
const form = document.querySelector('form');
|
||||
if (form) {
|
||||
fireEvent.submit(form);
|
||||
}
|
||||
|
||||
expect(mockedToast.error).toHaveBeenCalledWith(
|
||||
'All address fields are required when adding a location',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
// =========================================================================
|
||||
// Create Store Submission Tests
|
||||
// =========================================================================
|
||||
describe('Create Store Submission', () => {
|
||||
it('should call createStore API with correct data when submitting without address', async () => {
|
||||
mockedApiClient.createStore.mockResolvedValue(
|
||||
new Response(JSON.stringify({ store_id: 1, name: 'New Store' }), { status: 201 }),
|
||||
);
|
||||
|
||||
renderWithProviders(<StoreForm {...defaultProps} />);
|
||||
|
||||
// Fill the form
|
||||
fireEvent.change(screen.getByLabelText(/store name/i), { target: { value: 'New Store' } });
|
||||
fireEvent.change(screen.getByLabelText(/logo url/i), {
|
||||
target: { value: 'https://example.com/logo.png' },
|
||||
});
|
||||
|
||||
// Uncheck address
|
||||
fireEvent.click(screen.getByLabelText(/include store address/i));
|
||||
|
||||
// Submit
|
||||
fireEvent.click(screen.getByRole('button', { name: /create store/i }));
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockedApiClient.createStore).toHaveBeenCalledWith({
|
||||
name: 'New Store',
|
||||
logo_url: 'https://example.com/logo.png',
|
||||
});
|
||||
expect(mockedToast.loading).toHaveBeenCalledWith('Creating store...');
|
||||
expect(mockedToast.success).toHaveBeenCalledWith('Store created successfully!', {
|
||||
id: 'toast-id',
|
||||
});
|
||||
expect(mockOnSuccess).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
it('should call createStore API with address when checkbox is checked', async () => {
|
||||
mockedApiClient.createStore.mockResolvedValue(
|
||||
new Response(JSON.stringify({ store_id: 1, name: 'New Store' }), { status: 201 }),
|
||||
);
|
||||
|
||||
renderWithProviders(<StoreForm {...defaultProps} />);
|
||||
|
||||
// Fill the form with address
|
||||
fireEvent.change(screen.getByLabelText(/store name/i), { target: { value: 'New Store' } });
|
||||
fireEvent.change(screen.getByLabelText(/address line 1/i), {
|
||||
target: { value: '123 Main St' },
|
||||
});
|
||||
fireEvent.change(screen.getByLabelText(/city/i), { target: { value: 'Toronto' } });
|
||||
fireEvent.change(screen.getByLabelText(/postal code/i), { target: { value: 'M5V 1A1' } });
|
||||
|
||||
// Submit
|
||||
fireEvent.click(screen.getByRole('button', { name: /create store/i }));
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockedApiClient.createStore).toHaveBeenCalledWith({
|
||||
name: 'New Store',
|
||||
logo_url: undefined,
|
||||
address: {
|
||||
address_line_1: '123 Main St',
|
||||
city: 'Toronto',
|
||||
province_state: 'ON',
|
||||
postal_code: 'M5V 1A1',
|
||||
country: 'Canada',
|
||||
},
|
||||
});
|
||||
expect(mockOnSuccess).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
it('should trim whitespace from input fields before submission', async () => {
|
||||
mockedApiClient.createStore.mockResolvedValue(
|
||||
new Response(JSON.stringify({ store_id: 1 }), { status: 201 }),
|
||||
);
|
||||
|
||||
renderWithProviders(<StoreForm {...defaultProps} />);
|
||||
|
||||
fireEvent.change(screen.getByLabelText(/store name/i), {
|
||||
target: { value: ' Trimmed Store ' },
|
||||
});
|
||||
fireEvent.change(screen.getByLabelText(/logo url/i), {
|
||||
target: { value: ' https://example.com/logo.png ' },
|
||||
});
|
||||
fireEvent.click(screen.getByLabelText(/include store address/i)); // Uncheck
|
||||
|
||||
fireEvent.click(screen.getByRole('button', { name: /create store/i }));
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockedApiClient.createStore).toHaveBeenCalledWith({
|
||||
name: 'Trimmed Store',
|
||||
logo_url: 'https://example.com/logo.png',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('should not include logo_url if empty', async () => {
|
||||
mockedApiClient.createStore.mockResolvedValue(
|
||||
new Response(JSON.stringify({ store_id: 1 }), { status: 201 }),
|
||||
);
|
||||
|
||||
renderWithProviders(<StoreForm {...defaultProps} />);
|
||||
|
||||
fireEvent.change(screen.getByLabelText(/store name/i), {
|
||||
target: { value: 'No Logo Store' },
|
||||
});
|
||||
fireEvent.click(screen.getByLabelText(/include store address/i)); // Uncheck
|
||||
|
||||
fireEvent.click(screen.getByRole('button', { name: /create store/i }));
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockedApiClient.createStore).toHaveBeenCalledWith({
|
||||
name: 'No Logo Store',
|
||||
logo_url: undefined,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('should show error toast when createStore API fails', async () => {
|
||||
mockedApiClient.createStore.mockResolvedValue(
|
||||
new Response('Store already exists', { status: 400 }),
|
||||
);
|
||||
|
||||
renderWithProviders(<StoreForm {...defaultProps} />);
|
||||
|
||||
fireEvent.change(screen.getByLabelText(/store name/i), { target: { value: 'Test Store' } });
|
||||
fireEvent.click(screen.getByLabelText(/include store address/i)); // Uncheck
|
||||
|
||||
fireEvent.click(screen.getByRole('button', { name: /create store/i }));
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockedToast.error).toHaveBeenCalledWith('Failed: Store already exists', {
|
||||
id: 'toast-id',
|
||||
});
|
||||
expect(mockOnSuccess).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
it('should show generic error message when response body is empty', async () => {
|
||||
mockedApiClient.createStore.mockResolvedValue(new Response('', { status: 500 }));
|
||||
|
||||
renderWithProviders(<StoreForm {...defaultProps} />);
|
||||
|
||||
fireEvent.change(screen.getByLabelText(/store name/i), { target: { value: 'Test Store' } });
|
||||
fireEvent.click(screen.getByLabelText(/include store address/i)); // Uncheck
|
||||
|
||||
fireEvent.click(screen.getByRole('button', { name: /create store/i }));
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockedToast.error).toHaveBeenCalledWith(
|
||||
'Failed: Create failed with status 500',
|
||||
expect.any(Object),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle network error during store creation', async () => {
|
||||
mockedApiClient.createStore.mockRejectedValue(new Error('Network error'));
|
||||
|
||||
renderWithProviders(<StoreForm {...defaultProps} />);
|
||||
|
||||
fireEvent.change(screen.getByLabelText(/store name/i), { target: { value: 'Test Store' } });
|
||||
fireEvent.click(screen.getByLabelText(/include store address/i)); // Uncheck
|
||||
|
||||
fireEvent.click(screen.getByRole('button', { name: /create store/i }));
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockedToast.error).toHaveBeenCalledWith('Failed: Network error', {
|
||||
id: 'toast-id',
|
||||
});
|
||||
expect(mockOnSuccess).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle non-Error thrown during submission', async () => {
|
||||
mockedApiClient.createStore.mockRejectedValue('String error');
|
||||
|
||||
renderWithProviders(<StoreForm {...defaultProps} />);
|
||||
|
||||
fireEvent.change(screen.getByLabelText(/store name/i), { target: { value: 'Test Store' } });
|
||||
fireEvent.click(screen.getByLabelText(/include store address/i)); // Uncheck
|
||||
|
||||
fireEvent.click(screen.getByRole('button', { name: /create store/i }));
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockedToast.error).toHaveBeenCalledWith('Failed: String error', {
|
||||
id: 'toast-id',
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
// =========================================================================
|
||||
// Update Store Submission Tests
|
||||
// =========================================================================
|
||||
describe('Update Store Submission', () => {
|
||||
it('should call updateStore API with correct data', async () => {
|
||||
const mockStore = createMockStoreWithLocations({
|
||||
store_id: 42,
|
||||
name: 'Old Store Name',
|
||||
logo_url: 'https://example.com/old-logo.png',
|
||||
});
|
||||
|
||||
mockedApiClient.updateStore.mockResolvedValue(
|
||||
new Response(JSON.stringify({ store_id: 42, name: 'Updated Store' }), { status: 200 }),
|
||||
);
|
||||
|
||||
renderWithProviders(<StoreForm {...defaultProps} store={mockStore} />);
|
||||
|
||||
// Change the name
|
||||
fireEvent.change(screen.getByLabelText(/store name/i), {
|
||||
target: { value: 'Updated Store Name' },
|
||||
});
|
||||
|
||||
// Submit
|
||||
fireEvent.click(screen.getByRole('button', { name: /update store/i }));
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockedApiClient.updateStore).toHaveBeenCalledWith(42, {
|
||||
name: 'Updated Store Name',
|
||||
logo_url: 'https://example.com/old-logo.png',
|
||||
});
|
||||
expect(mockedToast.loading).toHaveBeenCalledWith('Updating store...');
|
||||
expect(mockedToast.success).toHaveBeenCalledWith('Store updated successfully!', {
|
||||
id: 'toast-id',
|
||||
});
|
||||
expect(mockOnSuccess).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
it('should call addStoreLocation when adding location in edit mode', async () => {
|
||||
const mockStore = createMockStoreWithLocations({
|
||||
store_id: 42,
|
||||
name: 'Existing Store',
|
||||
});
|
||||
|
||||
mockedApiClient.updateStore.mockResolvedValue(
|
||||
new Response(JSON.stringify({ store_id: 42 }), { status: 200 }),
|
||||
);
|
||||
mockedApiClient.addStoreLocation.mockResolvedValue(
|
||||
new Response(JSON.stringify({ store_location_id: 1 }), { status: 201 }),
|
||||
);
|
||||
|
||||
renderWithProviders(<StoreForm {...defaultProps} store={mockStore} />);
|
||||
|
||||
// Check the "Add a new location" checkbox
|
||||
fireEvent.click(screen.getByLabelText(/add a new location/i));
|
||||
|
||||
// Fill address fields
|
||||
fireEvent.change(screen.getByLabelText(/address line 1/i), {
|
||||
target: { value: '456 New St' },
|
||||
});
|
||||
fireEvent.change(screen.getByLabelText(/city/i), { target: { value: 'Vancouver' } });
|
||||
fireEvent.change(screen.getByLabelText(/province\/state/i), { target: { value: 'BC' } });
|
||||
fireEvent.change(screen.getByLabelText(/postal code/i), { target: { value: 'V6B 1A1' } });
|
||||
|
||||
// Submit
|
||||
fireEvent.click(screen.getByRole('button', { name: /update store/i }));
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockedApiClient.updateStore).toHaveBeenCalled();
|
||||
expect(mockedApiClient.addStoreLocation).toHaveBeenCalledWith(42, {
|
||||
address_line_1: '456 New St',
|
||||
city: 'Vancouver',
|
||||
province_state: 'BC',
|
||||
postal_code: 'V6B 1A1',
|
||||
country: 'Canada',
|
||||
});
|
||||
expect(mockOnSuccess).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
it('should not call addStoreLocation when checkbox is unchecked in edit mode', async () => {
|
||||
const mockStore = createMockStoreWithLocations({
|
||||
store_id: 42,
|
||||
name: 'Existing Store',
|
||||
});
|
||||
|
||||
mockedApiClient.updateStore.mockResolvedValue(
|
||||
new Response(JSON.stringify({ store_id: 42 }), { status: 200 }),
|
||||
);
|
||||
|
||||
renderWithProviders(<StoreForm {...defaultProps} store={mockStore} />);
|
||||
|
||||
// Just update the name (checkbox is unchecked by default in edit mode)
|
||||
fireEvent.change(screen.getByLabelText(/store name/i), { target: { value: 'New Name' } });
|
||||
|
||||
fireEvent.click(screen.getByRole('button', { name: /update store/i }));
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockedApiClient.updateStore).toHaveBeenCalled();
|
||||
expect(mockedApiClient.addStoreLocation).not.toHaveBeenCalled();
|
||||
expect(mockOnSuccess).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
it('should show error toast when updateStore API fails', async () => {
|
||||
const mockStore = createMockStoreWithLocations({
|
||||
store_id: 42,
|
||||
name: 'Existing Store',
|
||||
});
|
||||
|
||||
mockedApiClient.updateStore.mockResolvedValue(new Response('Update failed', { status: 400 }));
|
||||
|
||||
renderWithProviders(<StoreForm {...defaultProps} store={mockStore} />);
|
||||
|
||||
fireEvent.change(screen.getByLabelText(/store name/i), { target: { value: 'New Name' } });
|
||||
fireEvent.click(screen.getByRole('button', { name: /update store/i }));
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockedToast.error).toHaveBeenCalledWith('Failed: Update failed', {
|
||||
id: 'toast-id',
|
||||
});
|
||||
expect(mockOnSuccess).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
it('should show error toast when addStoreLocation fails after updateStore succeeds', async () => {
|
||||
const mockStore = createMockStoreWithLocations({
|
||||
store_id: 42,
|
||||
name: 'Existing Store',
|
||||
});
|
||||
|
||||
mockedApiClient.updateStore.mockResolvedValue(
|
||||
new Response(JSON.stringify({ store_id: 42 }), { status: 200 }),
|
||||
);
|
||||
mockedApiClient.addStoreLocation.mockResolvedValue(
|
||||
new Response('Location add failed', { status: 400 }),
|
||||
);
|
||||
|
||||
renderWithProviders(<StoreForm {...defaultProps} store={mockStore} />);
|
||||
|
||||
// Check the "Add a new location" checkbox
|
||||
fireEvent.click(screen.getByLabelText(/add a new location/i));
|
||||
|
||||
// Fill address fields
|
||||
fireEvent.change(screen.getByLabelText(/address line 1/i), {
|
||||
target: { value: '456 New St' },
|
||||
});
|
||||
fireEvent.change(screen.getByLabelText(/city/i), { target: { value: 'Vancouver' } });
|
||||
fireEvent.change(screen.getByLabelText(/postal code/i), { target: { value: 'V6B 1A1' } });
|
||||
|
||||
fireEvent.click(screen.getByRole('button', { name: /update store/i }));
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockedToast.error).toHaveBeenCalledWith(
|
||||
'Failed: Location add failed: Location add failed',
|
||||
{ id: 'toast-id' },
|
||||
);
|
||||
expect(mockOnSuccess).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle generic error message when updateStore response body is empty', async () => {
|
||||
const mockStore = createMockStoreWithLocations({
|
||||
store_id: 42,
|
||||
name: 'Existing Store',
|
||||
});
|
||||
|
||||
mockedApiClient.updateStore.mockResolvedValue(new Response('', { status: 500 }));
|
||||
|
||||
renderWithProviders(<StoreForm {...defaultProps} store={mockStore} />);
|
||||
|
||||
fireEvent.change(screen.getByLabelText(/store name/i), { target: { value: 'New Name' } });
|
||||
fireEvent.click(screen.getByRole('button', { name: /update store/i }));
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockedToast.error).toHaveBeenCalledWith(
|
||||
'Failed: Update failed with status 500',
|
||||
expect.any(Object),
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
// =========================================================================
|
||||
// Button Disable State Tests
|
||||
// =========================================================================
|
||||
describe('Button Disable States', () => {
|
||||
it('should disable buttons while submitting', async () => {
|
||||
// Create a promise that we can control
|
||||
let resolvePromise: (value: Response) => void;
|
||||
const pendingPromise = new Promise<Response>((resolve) => {
|
||||
resolvePromise = resolve;
|
||||
});
|
||||
|
||||
mockedApiClient.createStore.mockReturnValue(pendingPromise);
|
||||
|
||||
renderWithProviders(<StoreForm {...defaultProps} />);
|
||||
|
||||
fireEvent.change(screen.getByLabelText(/store name/i), { target: { value: 'Test Store' } });
|
||||
fireEvent.click(screen.getByLabelText(/include store address/i)); // Uncheck
|
||||
|
||||
const submitButton = screen.getByRole('button', { name: /create store/i });
|
||||
const cancelButton = screen.getByRole('button', { name: /cancel/i });
|
||||
|
||||
fireEvent.click(submitButton);
|
||||
|
||||
// Check that buttons are disabled and text changes
|
||||
await waitFor(() => {
|
||||
expect(screen.getByRole('button', { name: /saving\.\.\./i })).toBeDisabled();
|
||||
expect(cancelButton).toBeDisabled();
|
||||
});
|
||||
|
||||
// Resolve the promise
|
||||
resolvePromise!(new Response(JSON.stringify({ store_id: 1 }), { status: 201 }));
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockOnSuccess).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
it('should re-enable buttons after submission fails', async () => {
|
||||
mockedApiClient.createStore.mockRejectedValue(new Error('Failed'));
|
||||
|
||||
renderWithProviders(<StoreForm {...defaultProps} />);
|
||||
|
||||
fireEvent.change(screen.getByLabelText(/store name/i), { target: { value: 'Test Store' } });
|
||||
fireEvent.click(screen.getByLabelText(/include store address/i)); // Uncheck
|
||||
|
||||
fireEvent.click(screen.getByRole('button', { name: /create store/i }));
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockedToast.error).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
// Buttons should be re-enabled
|
||||
expect(screen.getByRole('button', { name: /create store/i })).not.toBeDisabled();
|
||||
expect(screen.getByRole('button', { name: /cancel/i })).not.toBeDisabled();
|
||||
});
|
||||
});
|
||||
|
||||
// =========================================================================
|
||||
// Cancel Button Tests
|
||||
// =========================================================================
|
||||
describe('Cancel Button', () => {
|
||||
it('should call onCancel when cancel button is clicked', () => {
|
||||
renderWithProviders(<StoreForm {...defaultProps} />);
|
||||
|
||||
fireEvent.click(screen.getByRole('button', { name: /cancel/i }));
|
||||
|
||||
expect(mockOnCancel).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('should call onCancel when cancel button is clicked in edit mode', () => {
|
||||
const mockStore = createMockStoreWithLocations({ store_id: 1, name: 'Test Store' });
|
||||
|
||||
renderWithProviders(<StoreForm {...defaultProps} store={mockStore} />);
|
||||
|
||||
fireEvent.click(screen.getByRole('button', { name: /cancel/i }));
|
||||
|
||||
expect(mockOnCancel).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
});
|
||||
|
||||
// =========================================================================
|
||||
// Form Submission via Form Element
|
||||
// =========================================================================
|
||||
describe('Form Submission', () => {
|
||||
it('should submit form via form submit event', async () => {
|
||||
mockedApiClient.createStore.mockResolvedValue(
|
||||
new Response(JSON.stringify({ store_id: 1 }), { status: 201 }),
|
||||
);
|
||||
|
||||
renderWithProviders(<StoreForm {...defaultProps} />);
|
||||
|
||||
fireEvent.change(screen.getByLabelText(/store name/i), { target: { value: 'Test Store' } });
|
||||
fireEvent.click(screen.getByLabelText(/include store address/i)); // Uncheck
|
||||
|
||||
// Submit via form element using document.querySelector since form doesn't have role
|
||||
const form = document.querySelector('form');
|
||||
expect(form).toBeInTheDocument();
|
||||
if (form) {
|
||||
fireEvent.submit(form);
|
||||
}
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockedApiClient.createStore).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
it('should submit form via submit button click', async () => {
|
||||
mockedApiClient.createStore.mockResolvedValue(
|
||||
new Response(JSON.stringify({ store_id: 1 }), { status: 201 }),
|
||||
);
|
||||
|
||||
renderWithProviders(<StoreForm {...defaultProps} />);
|
||||
|
||||
fireEvent.change(screen.getByLabelText(/store name/i), { target: { value: 'Test Store' } });
|
||||
fireEvent.click(screen.getByLabelText(/include store address/i)); // Uncheck
|
||||
|
||||
// Submit via button click
|
||||
fireEvent.click(screen.getByRole('button', { name: /create store/i }));
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockedApiClient.createStore).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
// =========================================================================
|
||||
// Edge Cases
|
||||
// =========================================================================
|
||||
describe('Edge Cases', () => {
|
||||
it('should handle store with null logo_url in edit mode', () => {
|
||||
const mockStore = createMockStoreWithLocations({
|
||||
store_id: 1,
|
||||
name: 'Store Without Logo',
|
||||
logo_url: null,
|
||||
});
|
||||
|
||||
renderWithProviders(<StoreForm {...defaultProps} store={mockStore} />);
|
||||
|
||||
expect(screen.getByLabelText(/store name/i)).toHaveValue('Store Without Logo');
|
||||
expect(screen.getByLabelText(/logo url/i)).toHaveValue('');
|
||||
});
|
||||
|
||||
it('should handle store with undefined logo_url in edit mode', () => {
|
||||
const mockStore = createMockStoreWithLocations({
|
||||
store_id: 1,
|
||||
name: 'Store Without Logo',
|
||||
logo_url: undefined,
|
||||
});
|
||||
|
||||
renderWithProviders(<StoreForm {...defaultProps} store={mockStore} />);
|
||||
|
||||
expect(screen.getByLabelText(/store name/i)).toHaveValue('Store Without Logo');
|
||||
expect(screen.getByLabelText(/logo url/i)).toHaveValue('');
|
||||
});
|
||||
|
||||
it('should clear logo_url when submitting with empty string', async () => {
|
||||
const mockStore = createMockStoreWithLocations({
|
||||
store_id: 1,
|
||||
name: 'Store With Logo',
|
||||
logo_url: 'https://example.com/logo.png',
|
||||
});
|
||||
|
||||
mockedApiClient.updateStore.mockResolvedValue(
|
||||
new Response(JSON.stringify({ store_id: 1 }), { status: 200 }),
|
||||
);
|
||||
|
||||
renderWithProviders(<StoreForm {...defaultProps} store={mockStore} />);
|
||||
|
||||
// Clear the logo URL
|
||||
fireEvent.change(screen.getByLabelText(/logo url/i), { target: { value: '' } });
|
||||
|
||||
fireEvent.click(screen.getByRole('button', { name: /update store/i }));
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockedApiClient.updateStore).toHaveBeenCalledWith(1, {
|
||||
name: 'Store With Logo',
|
||||
logo_url: undefined,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('should prevent default form submission behavior', async () => {
|
||||
renderWithProviders(<StoreForm {...defaultProps} />);
|
||||
|
||||
const form = document.querySelector('form');
|
||||
expect(form).toBeInTheDocument();
|
||||
|
||||
// The form has an onSubmit handler that calls e.preventDefault()
|
||||
// This is tested implicitly by the fact that the page doesn't reload
|
||||
// and our mocks are called instead
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -108,6 +108,14 @@ vi.mock('../config/env', () => ({
|
||||
redis: { url: 'redis://localhost:6379' },
|
||||
auth: { jwtSecret: 'test-secret' },
|
||||
server: { port: 3000, host: 'localhost' },
|
||||
featureFlags: {
|
||||
bugsinkSync: false,
|
||||
advancedRbac: false,
|
||||
newDashboard: false,
|
||||
betaRecipes: false,
|
||||
experimentalAi: false,
|
||||
debugMode: false,
|
||||
},
|
||||
},
|
||||
isAiConfigured: vi.fn().mockReturnValue(false),
|
||||
parseConfig: vi.fn(),
|
||||
@@ -212,7 +220,9 @@ describe('Admin Content Management Routes (/api/v1/admin)', () => {
|
||||
it('POST /corrections/:id/approve should approve a correction', async () => {
|
||||
const correctionId = 123;
|
||||
vi.mocked(mockedDb.adminRepo.approveCorrection).mockResolvedValue(undefined);
|
||||
const response = await supertest(app).post(`/api/v1/admin/corrections/${correctionId}/approve`);
|
||||
const response = await supertest(app).post(
|
||||
`/api/v1/admin/corrections/${correctionId}/approve`,
|
||||
);
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body.data).toEqual({ message: 'Correction approved successfully.' });
|
||||
expect(vi.mocked(mockedDb.adminRepo.approveCorrection)).toHaveBeenCalledWith(
|
||||
@@ -224,14 +234,18 @@ describe('Admin Content Management Routes (/api/v1/admin)', () => {
|
||||
it('POST /corrections/:id/approve should return 500 on DB error', async () => {
|
||||
const correctionId = 123;
|
||||
vi.mocked(mockedDb.adminRepo.approveCorrection).mockRejectedValue(new Error('DB Error'));
|
||||
const response = await supertest(app).post(`/api/v1/admin/corrections/${correctionId}/approve`);
|
||||
const response = await supertest(app).post(
|
||||
`/api/v1/admin/corrections/${correctionId}/approve`,
|
||||
);
|
||||
expect(response.status).toBe(500);
|
||||
});
|
||||
|
||||
it('POST /corrections/:id/reject should reject a correction', async () => {
|
||||
const correctionId = 789;
|
||||
vi.mocked(mockedDb.adminRepo.rejectCorrection).mockResolvedValue(undefined);
|
||||
const response = await supertest(app).post(`/api/v1/admin/corrections/${correctionId}/reject`);
|
||||
const response = await supertest(app).post(
|
||||
`/api/v1/admin/corrections/${correctionId}/reject`,
|
||||
);
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body.data).toEqual({ message: 'Correction rejected successfully.' });
|
||||
});
|
||||
@@ -239,7 +253,9 @@ describe('Admin Content Management Routes (/api/v1/admin)', () => {
|
||||
it('POST /corrections/:id/reject should return 500 on DB error', async () => {
|
||||
const correctionId = 789;
|
||||
vi.mocked(mockedDb.adminRepo.rejectCorrection).mockRejectedValue(new Error('DB Error'));
|
||||
const response = await supertest(app).post(`/api/v1/admin/corrections/${correctionId}/reject`);
|
||||
const response = await supertest(app).post(
|
||||
`/api/v1/admin/corrections/${correctionId}/reject`,
|
||||
);
|
||||
expect(response.status).toBe(500);
|
||||
});
|
||||
|
||||
|
||||
@@ -74,9 +74,41 @@ vi.mock('../config/env', () => ({
|
||||
redis: { url: 'redis://localhost:6379' },
|
||||
auth: { jwtSecret: 'test-secret' },
|
||||
server: { port: 3000, host: 'localhost' },
|
||||
featureFlags: {
|
||||
bugsinkSync: false,
|
||||
advancedRbac: false,
|
||||
newDashboard: true,
|
||||
betaRecipes: false,
|
||||
experimentalAi: false,
|
||||
debugMode: true,
|
||||
},
|
||||
},
|
||||
isAiConfigured: vi.fn().mockReturnValue(false),
|
||||
parseConfig: vi.fn(),
|
||||
isDevelopment: false,
|
||||
}));
|
||||
|
||||
// Mock the feature flags service
|
||||
vi.mock('../services/featureFlags.server', () => ({
|
||||
getFeatureFlags: vi.fn(() => ({
|
||||
bugsinkSync: false,
|
||||
advancedRbac: false,
|
||||
newDashboard: true,
|
||||
betaRecipes: false,
|
||||
experimentalAi: false,
|
||||
debugMode: true,
|
||||
})),
|
||||
isFeatureEnabled: vi.fn((flag: string) => {
|
||||
const flags: Record<string, boolean> = {
|
||||
bugsinkSync: false,
|
||||
advancedRbac: false,
|
||||
newDashboard: true,
|
||||
betaRecipes: false,
|
||||
experimentalAi: false,
|
||||
debugMode: true,
|
||||
};
|
||||
return flags[flag] ?? false;
|
||||
}),
|
||||
}));
|
||||
|
||||
// Mock Passport to allow admin access
|
||||
@@ -93,6 +125,7 @@ vi.mock('../config/passport', () => ({
|
||||
|
||||
import adminRouter from './admin.routes';
|
||||
import { cacheService } from '../services/cacheService.server';
|
||||
import { getFeatureFlags } from '../services/featureFlags.server';
|
||||
import { mockLogger } from '../tests/utils/mockLogger';
|
||||
|
||||
describe('Admin Routes Rate Limiting', () => {
|
||||
@@ -177,4 +210,67 @@ describe('Admin Routes Rate Limiting', () => {
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('GET /feature-flags (ADR-024)', () => {
|
||||
it('should return 200 and the current feature flag states', async () => {
|
||||
const response = await supertest(app).get('/api/v1/admin/feature-flags');
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body.success).toBe(true);
|
||||
expect(response.body.data.flags).toEqual({
|
||||
bugsinkSync: false,
|
||||
advancedRbac: false,
|
||||
newDashboard: true,
|
||||
betaRecipes: false,
|
||||
experimentalAi: false,
|
||||
debugMode: true,
|
||||
});
|
||||
});
|
||||
|
||||
it('should call getFeatureFlags service function', async () => {
|
||||
await supertest(app).get('/api/v1/admin/feature-flags');
|
||||
|
||||
expect(vi.mocked(getFeatureFlags)).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should return flags with all expected keys', async () => {
|
||||
const response = await supertest(app).get('/api/v1/admin/feature-flags');
|
||||
|
||||
const expectedFlags = [
|
||||
'bugsinkSync',
|
||||
'advancedRbac',
|
||||
'newDashboard',
|
||||
'betaRecipes',
|
||||
'experimentalAi',
|
||||
'debugMode',
|
||||
];
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(Object.keys(response.body.data.flags).sort()).toEqual(expectedFlags.sort());
|
||||
});
|
||||
|
||||
it('should return boolean values for all flags', async () => {
|
||||
const response = await supertest(app).get('/api/v1/admin/feature-flags');
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
Object.values(response.body.data.flags).forEach((value) => {
|
||||
expect(typeof value).toBe('boolean');
|
||||
});
|
||||
});
|
||||
|
||||
it('should return 500 if getFeatureFlags throws an error', async () => {
|
||||
const featureFlagsError = new Error('Feature flags service error');
|
||||
vi.mocked(getFeatureFlags).mockImplementationOnce(() => {
|
||||
throw featureFlagsError;
|
||||
});
|
||||
|
||||
const response = await supertest(app).get('/api/v1/admin/feature-flags');
|
||||
|
||||
expect(response.status).toBe(500);
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
{ error: featureFlagsError },
|
||||
'Error fetching feature flags',
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -33,6 +33,7 @@ import { cleanupUploadedFile } from '../utils/fileUtils';
|
||||
import { brandService } from '../services/brandService';
|
||||
import { adminTriggerLimiter, adminUploadLimiter } from '../config/rateLimiters';
|
||||
import { sendSuccess, sendNoContent } from '../utils/apiResponse';
|
||||
import { getFeatureFlags } from '../services/featureFlags.server';
|
||||
|
||||
const updateCorrectionSchema = numericIdParam('id').extend({
|
||||
body: z.object({
|
||||
@@ -1229,6 +1230,59 @@ router.get(
|
||||
},
|
||||
);
|
||||
|
||||
/**
|
||||
* @openapi
|
||||
* /admin/feature-flags:
|
||||
* get:
|
||||
* tags: [Admin]
|
||||
* summary: Get feature flags status
|
||||
* description: Get the current state of all feature flags. Requires admin role. (ADR-024)
|
||||
* security:
|
||||
* - bearerAuth: []
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Feature flags and their current states
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* success:
|
||||
* type: boolean
|
||||
* example: true
|
||||
* data:
|
||||
* type: object
|
||||
* properties:
|
||||
* flags:
|
||||
* type: object
|
||||
* additionalProperties:
|
||||
* type: boolean
|
||||
* example:
|
||||
* bugsinkSync: false
|
||||
* advancedRbac: false
|
||||
* newDashboard: true
|
||||
* betaRecipes: false
|
||||
* experimentalAi: false
|
||||
* debugMode: false
|
||||
* 401:
|
||||
* description: Unauthorized
|
||||
* 403:
|
||||
* description: Forbidden - admin role required
|
||||
*/
|
||||
router.get(
|
||||
'/feature-flags',
|
||||
validateRequest(emptySchema),
|
||||
async (req: Request, res: Response, next: NextFunction) => {
|
||||
try {
|
||||
const flags = getFeatureFlags();
|
||||
sendSuccess(res, { flags });
|
||||
} catch (error) {
|
||||
req.log.error({ error }, 'Error fetching feature flags');
|
||||
next(error);
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
/**
|
||||
* @openapi
|
||||
* /admin/websocket/stats:
|
||||
|
||||
@@ -40,6 +40,14 @@ vi.mock('../config/env', () => ({
|
||||
redis: { url: 'redis://localhost:6379' },
|
||||
auth: { jwtSecret: 'test-secret' },
|
||||
server: { port: 3000, host: 'localhost' },
|
||||
featureFlags: {
|
||||
bugsinkSync: false,
|
||||
advancedRbac: false,
|
||||
newDashboard: false,
|
||||
betaRecipes: false,
|
||||
experimentalAi: false,
|
||||
debugMode: false,
|
||||
},
|
||||
},
|
||||
isAiConfigured: vi.fn().mockReturnValue(false),
|
||||
parseConfig: vi.fn(),
|
||||
|
||||
@@ -45,6 +45,14 @@ vi.mock('../config/env', () => ({
|
||||
redis: { url: 'redis://localhost:6379' },
|
||||
auth: { jwtSecret: 'test-secret' },
|
||||
server: { port: 3000, host: 'localhost' },
|
||||
featureFlags: {
|
||||
bugsinkSync: false,
|
||||
advancedRbac: false,
|
||||
newDashboard: false,
|
||||
betaRecipes: false,
|
||||
experimentalAi: false,
|
||||
debugMode: false,
|
||||
},
|
||||
},
|
||||
isAiConfigured: vi.fn().mockReturnValue(false),
|
||||
parseConfig: vi.fn(),
|
||||
|
||||
@@ -47,6 +47,14 @@ vi.mock('../config/env', () => ({
|
||||
redis: { url: 'redis://localhost:6379' },
|
||||
auth: { jwtSecret: 'test-secret' },
|
||||
server: { port: 3000, host: 'localhost' },
|
||||
featureFlags: {
|
||||
bugsinkSync: false,
|
||||
advancedRbac: false,
|
||||
newDashboard: false,
|
||||
betaRecipes: false,
|
||||
experimentalAi: false,
|
||||
debugMode: false,
|
||||
},
|
||||
},
|
||||
isAiConfigured: vi.fn().mockReturnValue(false),
|
||||
parseConfig: vi.fn(),
|
||||
|
||||
506
src/routes/category.routes.test.ts
Normal file
506
src/routes/category.routes.test.ts
Normal file
@@ -0,0 +1,506 @@
|
||||
// src/routes/category.routes.test.ts
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
import supertest from 'supertest';
|
||||
import { createTestApp } from '../tests/utils/createTestApp';
|
||||
import { mockLogger } from '../tests/utils/mockLogger';
|
||||
import { createMockCategory } from '../tests/utils/mockFactories';
|
||||
import type { Category } from '../types';
|
||||
|
||||
// 1. Use vi.hoisted to create mock functions before vi.mock hoisting
|
||||
const { mockCategoryDbService } = vi.hoisted(() => ({
|
||||
mockCategoryDbService: {
|
||||
getAllCategories: vi.fn(),
|
||||
getCategoryById: vi.fn(),
|
||||
getCategoryByName: vi.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
// Mock the CategoryDbService with the hoisted mock object
|
||||
vi.mock('../services/db/category.db', () => ({
|
||||
CategoryDbService: mockCategoryDbService,
|
||||
}));
|
||||
|
||||
// Mock the logger to keep test output clean
|
||||
vi.mock('../services/logger.server', async () => ({
|
||||
logger: (await import('../tests/utils/mockLogger')).mockLogger,
|
||||
}));
|
||||
|
||||
// Import the router AFTER all mocks are defined
|
||||
import categoryRouter from './category.routes';
|
||||
|
||||
// Define a reusable matcher for the logger object
|
||||
const expectLogger = expect.objectContaining({
|
||||
info: expect.any(Function),
|
||||
error: expect.any(Function),
|
||||
});
|
||||
|
||||
describe('Category Routes (/api/v1/categories)', () => {
|
||||
const basePath = '/api/v1/categories';
|
||||
const app = createTestApp({ router: categoryRouter, basePath });
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
// ===========================================================================
|
||||
// GET / - List all categories
|
||||
// ===========================================================================
|
||||
describe('GET /', () => {
|
||||
it('should return 200 with list of all categories', async () => {
|
||||
// Arrange
|
||||
const mockCategories: Category[] = [
|
||||
createMockCategory({ category_id: 1, name: 'Bakery & Bread' }),
|
||||
createMockCategory({ category_id: 2, name: 'Dairy & Eggs' }),
|
||||
createMockCategory({ category_id: 3, name: 'Fruits & Vegetables' }),
|
||||
];
|
||||
mockCategoryDbService.getAllCategories.mockResolvedValue(mockCategories);
|
||||
|
||||
// Act
|
||||
const response = await supertest(app).get('/api/v1/categories');
|
||||
|
||||
// Assert
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body.success).toBe(true);
|
||||
expect(response.body.data).toEqual(mockCategories);
|
||||
expect(mockCategoryDbService.getAllCategories).toHaveBeenCalledWith(expectLogger);
|
||||
});
|
||||
|
||||
it('should return 200 with empty array when no categories exist', async () => {
|
||||
// Arrange
|
||||
mockCategoryDbService.getAllCategories.mockResolvedValue([]);
|
||||
|
||||
// Act
|
||||
const response = await supertest(app).get('/api/v1/categories');
|
||||
|
||||
// Assert
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body.success).toBe(true);
|
||||
expect(response.body.data).toEqual([]);
|
||||
expect(Array.isArray(response.body.data)).toBe(true);
|
||||
});
|
||||
|
||||
it('should return 500 if the database call fails', async () => {
|
||||
// Arrange
|
||||
const dbError = new Error('Database connection failed');
|
||||
mockCategoryDbService.getAllCategories.mockRejectedValue(dbError);
|
||||
|
||||
// Act
|
||||
const response = await supertest(app).get('/api/v1/categories');
|
||||
|
||||
// Assert
|
||||
expect(response.status).toBe(500);
|
||||
expect(response.body.error.message).toBe('Database connection failed');
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
err: expect.any(Error),
|
||||
}),
|
||||
expect.stringMatching(/Unhandled API Error/),
|
||||
);
|
||||
});
|
||||
|
||||
it('should return 500 if the database throws a non-Error object', async () => {
|
||||
// Arrange
|
||||
const dbError = { message: 'Unexpected error', code: 'UNKNOWN' };
|
||||
mockCategoryDbService.getAllCategories.mockRejectedValue(dbError);
|
||||
|
||||
// Act
|
||||
const response = await supertest(app).get('/api/v1/categories');
|
||||
|
||||
// Assert
|
||||
expect(response.status).toBe(500);
|
||||
expect(response.body.error.message).toBe('Unexpected error');
|
||||
});
|
||||
});
|
||||
|
||||
// ===========================================================================
|
||||
// GET /:id - Get category by ID
|
||||
// ===========================================================================
|
||||
describe('GET /:id', () => {
|
||||
it('should return 200 with category details for valid ID', async () => {
|
||||
// Arrange
|
||||
const mockCategory = createMockCategory({
|
||||
category_id: 5,
|
||||
name: 'Dairy & Eggs',
|
||||
});
|
||||
mockCategoryDbService.getCategoryById.mockResolvedValue(mockCategory);
|
||||
|
||||
// Act
|
||||
const response = await supertest(app).get('/api/v1/categories/5');
|
||||
|
||||
// Assert
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body.success).toBe(true);
|
||||
expect(response.body.data).toEqual(mockCategory);
|
||||
expect(mockCategoryDbService.getCategoryById).toHaveBeenCalledWith(5, expectLogger);
|
||||
});
|
||||
|
||||
it('should return 404 for non-existent category ID', async () => {
|
||||
// Arrange
|
||||
mockCategoryDbService.getCategoryById.mockResolvedValue(null);
|
||||
|
||||
// Act
|
||||
const response = await supertest(app).get('/api/v1/categories/999999');
|
||||
|
||||
// Assert
|
||||
expect(response.status).toBe(404);
|
||||
expect(response.body.success).toBe(false);
|
||||
expect(response.body.error).toContain('Category with ID 999999 not found');
|
||||
expect(mockCategoryDbService.getCategoryById).toHaveBeenCalledWith(999999, expectLogger);
|
||||
});
|
||||
|
||||
it('should return 400 for non-numeric category ID', async () => {
|
||||
// Act
|
||||
const response = await supertest(app).get('/api/v1/categories/invalid');
|
||||
|
||||
// Assert
|
||||
expect(response.status).toBe(400);
|
||||
expect(response.body.success).toBe(false);
|
||||
expect(response.body.error).toContain('Invalid category ID');
|
||||
expect(mockCategoryDbService.getCategoryById).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should return 400 for negative category ID', async () => {
|
||||
// Act
|
||||
const response = await supertest(app).get('/api/v1/categories/-1');
|
||||
|
||||
// Assert
|
||||
expect(response.status).toBe(400);
|
||||
expect(response.body.success).toBe(false);
|
||||
expect(response.body.error).toContain('Invalid category ID');
|
||||
expect(mockCategoryDbService.getCategoryById).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should return 400 for zero category ID', async () => {
|
||||
// Act
|
||||
const response = await supertest(app).get('/api/v1/categories/0');
|
||||
|
||||
// Assert
|
||||
expect(response.status).toBe(400);
|
||||
expect(response.body.success).toBe(false);
|
||||
expect(response.body.error).toContain('Invalid category ID');
|
||||
expect(mockCategoryDbService.getCategoryById).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should return 400 for floating point category ID', async () => {
|
||||
// Act - The parseInt will convert 1.5 to 1, so this should actually succeed
|
||||
const _response = await supertest(app).get('/api/v1/categories/1.5');
|
||||
|
||||
// Assert - parseInt('1.5', 10) returns 1, which is valid
|
||||
// This tests that the route handles string-to-int conversion correctly
|
||||
expect(mockCategoryDbService.getCategoryById).toHaveBeenCalledWith(1, expectLogger);
|
||||
});
|
||||
|
||||
it('should return 500 if the database call fails', async () => {
|
||||
// Arrange
|
||||
const dbError = new Error('Database query timeout');
|
||||
mockCategoryDbService.getCategoryById.mockRejectedValue(dbError);
|
||||
|
||||
// Act
|
||||
const response = await supertest(app).get('/api/v1/categories/1');
|
||||
|
||||
// Assert
|
||||
expect(response.status).toBe(500);
|
||||
expect(response.body.error.message).toBe('Database query timeout');
|
||||
});
|
||||
|
||||
it('should handle very large category IDs', async () => {
|
||||
// Arrange
|
||||
mockCategoryDbService.getCategoryById.mockResolvedValue(null);
|
||||
|
||||
// Act
|
||||
const response = await supertest(app).get('/api/v1/categories/2147483647');
|
||||
|
||||
// Assert
|
||||
expect(response.status).toBe(404);
|
||||
expect(mockCategoryDbService.getCategoryById).toHaveBeenCalledWith(2147483647, expectLogger);
|
||||
});
|
||||
});
|
||||
|
||||
// ===========================================================================
|
||||
// GET /lookup - Lookup category by name
|
||||
// ===========================================================================
|
||||
describe('GET /lookup', () => {
|
||||
it('should return 200 with category for exact name match', async () => {
|
||||
// Arrange
|
||||
const mockCategory = createMockCategory({
|
||||
category_id: 3,
|
||||
name: 'Dairy & Eggs',
|
||||
});
|
||||
mockCategoryDbService.getCategoryByName.mockResolvedValue(mockCategory);
|
||||
|
||||
// Act
|
||||
const response = await supertest(app).get(
|
||||
'/api/v1/categories/lookup?name=Dairy%20%26%20Eggs',
|
||||
);
|
||||
|
||||
// Assert
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body.success).toBe(true);
|
||||
expect(response.body.data).toEqual(mockCategory);
|
||||
expect(mockCategoryDbService.getCategoryByName).toHaveBeenCalledWith(
|
||||
'Dairy & Eggs',
|
||||
expectLogger,
|
||||
);
|
||||
});
|
||||
|
||||
it('should return 200 with category for case-insensitive name match', async () => {
|
||||
// Arrange
|
||||
const mockCategory = createMockCategory({
|
||||
category_id: 3,
|
||||
name: 'Dairy & Eggs',
|
||||
});
|
||||
mockCategoryDbService.getCategoryByName.mockResolvedValue(mockCategory);
|
||||
|
||||
// Act
|
||||
const response = await supertest(app).get(
|
||||
'/api/v1/categories/lookup?name=dairy%20%26%20eggs',
|
||||
);
|
||||
|
||||
// Assert
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body.success).toBe(true);
|
||||
expect(response.body.data).toEqual(mockCategory);
|
||||
// The service receives the original query, case-insensitivity is handled in the DB query
|
||||
expect(mockCategoryDbService.getCategoryByName).toHaveBeenCalledWith(
|
||||
'dairy & eggs',
|
||||
expectLogger,
|
||||
);
|
||||
});
|
||||
|
||||
it('should return 404 for non-existent category name', async () => {
|
||||
// Arrange
|
||||
mockCategoryDbService.getCategoryByName.mockResolvedValue(null);
|
||||
|
||||
// Act
|
||||
const response = await supertest(app).get(
|
||||
'/api/v1/categories/lookup?name=NonExistentCategory',
|
||||
);
|
||||
|
||||
// Assert
|
||||
expect(response.status).toBe(404);
|
||||
expect(response.body.success).toBe(false);
|
||||
expect(response.body.error).toContain("Category 'NonExistentCategory' not found");
|
||||
});
|
||||
|
||||
it('should return 400 when name query parameter is missing', async () => {
|
||||
// Act
|
||||
const response = await supertest(app).get('/api/v1/categories/lookup');
|
||||
|
||||
// Assert
|
||||
expect(response.status).toBe(400);
|
||||
expect(response.body.success).toBe(false);
|
||||
expect(response.body.error).toContain('required');
|
||||
expect(mockCategoryDbService.getCategoryByName).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should return 400 for empty name query parameter', async () => {
|
||||
// Act
|
||||
const response = await supertest(app).get('/api/v1/categories/lookup?name=');
|
||||
|
||||
// Assert
|
||||
expect(response.status).toBe(400);
|
||||
expect(response.body.success).toBe(false);
|
||||
expect(response.body.error).toContain('required');
|
||||
expect(mockCategoryDbService.getCategoryByName).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should return 400 for whitespace-only name query parameter', async () => {
|
||||
// Act
|
||||
const response = await supertest(app).get('/api/v1/categories/lookup?name=%20%20%20');
|
||||
|
||||
// Assert
|
||||
expect(response.status).toBe(400);
|
||||
expect(response.body.success).toBe(false);
|
||||
expect(response.body.error).toContain('required');
|
||||
expect(mockCategoryDbService.getCategoryByName).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should handle URL-encoded special characters in name', async () => {
|
||||
// Arrange
|
||||
const mockCategory = createMockCategory({
|
||||
category_id: 1,
|
||||
name: 'Meat & Seafood',
|
||||
});
|
||||
mockCategoryDbService.getCategoryByName.mockResolvedValue(mockCategory);
|
||||
|
||||
// Act - URL encoded & is %26
|
||||
const response = await supertest(app).get(
|
||||
'/api/v1/categories/lookup?name=Meat%20%26%20Seafood',
|
||||
);
|
||||
|
||||
// Assert
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body.data.name).toBe('Meat & Seafood');
|
||||
expect(mockCategoryDbService.getCategoryByName).toHaveBeenCalledWith(
|
||||
'Meat & Seafood',
|
||||
expectLogger,
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle names with only special characters', async () => {
|
||||
// Arrange
|
||||
mockCategoryDbService.getCategoryByName.mockResolvedValue(null);
|
||||
|
||||
// Act
|
||||
const response = await supertest(app).get('/api/v1/categories/lookup?name=%26%26%26');
|
||||
|
||||
// Assert
|
||||
expect(response.status).toBe(404);
|
||||
expect(mockCategoryDbService.getCategoryByName).toHaveBeenCalledWith('&&&', expectLogger);
|
||||
});
|
||||
|
||||
it('should return 500 if the database call fails', async () => {
|
||||
// Arrange
|
||||
const dbError = new Error('Database unavailable');
|
||||
mockCategoryDbService.getCategoryByName.mockRejectedValue(dbError);
|
||||
|
||||
// Act
|
||||
const response = await supertest(app).get('/api/v1/categories/lookup?name=TestCategory');
|
||||
|
||||
// Assert
|
||||
expect(response.status).toBe(500);
|
||||
expect(response.body.error.message).toBe('Database unavailable');
|
||||
});
|
||||
|
||||
it('should handle very long category names', async () => {
|
||||
// Arrange
|
||||
const longName = 'A'.repeat(500);
|
||||
mockCategoryDbService.getCategoryByName.mockResolvedValue(null);
|
||||
|
||||
// Act
|
||||
const response = await supertest(app).get(
|
||||
`/api/v1/categories/lookup?name=${encodeURIComponent(longName)}`,
|
||||
);
|
||||
|
||||
// Assert
|
||||
expect(response.status).toBe(404);
|
||||
expect(mockCategoryDbService.getCategoryByName).toHaveBeenCalledWith(longName, expectLogger);
|
||||
});
|
||||
|
||||
it('should handle names with unicode characters', async () => {
|
||||
// Arrange
|
||||
const unicodeName = 'Fruits et Legumes';
|
||||
const mockCategory = createMockCategory({
|
||||
category_id: 10,
|
||||
name: unicodeName,
|
||||
});
|
||||
mockCategoryDbService.getCategoryByName.mockResolvedValue(mockCategory);
|
||||
|
||||
// Act
|
||||
const response = await supertest(app).get(
|
||||
`/api/v1/categories/lookup?name=${encodeURIComponent(unicodeName)}`,
|
||||
);
|
||||
|
||||
// Assert
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body.data.name).toBe(unicodeName);
|
||||
});
|
||||
|
||||
it('should handle names with leading/trailing spaces (which get trimmed)', async () => {
|
||||
// Act - note: the trim check happens before calling the service
|
||||
// A name like " Dairy " will fail the trim() === '' check
|
||||
const _response = await supertest(app).get('/api/v1/categories/lookup?name=%20Dairy%20');
|
||||
|
||||
// Assert - The name ' Dairy ' has non-whitespace content after trim, so it passes validation
|
||||
expect(mockCategoryDbService.getCategoryByName).toHaveBeenCalledWith(' Dairy ', expectLogger);
|
||||
});
|
||||
});
|
||||
|
||||
// ===========================================================================
|
||||
// Edge Cases and Error Handling
|
||||
// ===========================================================================
|
||||
describe('Edge Cases', () => {
|
||||
it('should not require authentication for GET /', async () => {
|
||||
// Arrange - no authentication setup needed for public routes
|
||||
mockCategoryDbService.getAllCategories.mockResolvedValue([]);
|
||||
|
||||
// Act
|
||||
const response = await supertest(app).get('/api/v1/categories');
|
||||
|
||||
// Assert
|
||||
expect(response.status).toBe(200);
|
||||
});
|
||||
|
||||
it('should not require authentication for GET /:id', async () => {
|
||||
// Arrange
|
||||
mockCategoryDbService.getCategoryById.mockResolvedValue(
|
||||
createMockCategory({ category_id: 1, name: 'Test' }),
|
||||
);
|
||||
|
||||
// Act
|
||||
const response = await supertest(app).get('/api/v1/categories/1');
|
||||
|
||||
// Assert
|
||||
expect(response.status).toBe(200);
|
||||
});
|
||||
|
||||
it('should not require authentication for GET /lookup', async () => {
|
||||
// Arrange
|
||||
mockCategoryDbService.getCategoryByName.mockResolvedValue(
|
||||
createMockCategory({ category_id: 1, name: 'Test' }),
|
||||
);
|
||||
|
||||
// Act
|
||||
const response = await supertest(app).get('/api/v1/categories/lookup?name=Test');
|
||||
|
||||
// Assert
|
||||
expect(response.status).toBe(200);
|
||||
});
|
||||
|
||||
it('should return consistent response format for success', async () => {
|
||||
// Arrange
|
||||
const mockCategories = [createMockCategory({ category_id: 1, name: 'Test' })];
|
||||
mockCategoryDbService.getAllCategories.mockResolvedValue(mockCategories);
|
||||
|
||||
// Act
|
||||
const response = await supertest(app).get('/api/v1/categories');
|
||||
|
||||
// Assert - verify consistent API response format
|
||||
expect(response.body).toHaveProperty('success', true);
|
||||
expect(response.body).toHaveProperty('data');
|
||||
expect(response.body.data).toEqual(mockCategories);
|
||||
});
|
||||
|
||||
it('should return consistent response format for validation errors', async () => {
|
||||
// Act
|
||||
const response = await supertest(app).get('/api/v1/categories/invalid');
|
||||
|
||||
// Assert
|
||||
expect(response.body).toHaveProperty('success', false);
|
||||
expect(response.body).toHaveProperty('error');
|
||||
expect(typeof response.body.error).toBe('string');
|
||||
});
|
||||
|
||||
it('should return consistent response format for not found errors', async () => {
|
||||
// Arrange
|
||||
mockCategoryDbService.getCategoryById.mockResolvedValue(null);
|
||||
|
||||
// Act
|
||||
const response = await supertest(app).get('/api/v1/categories/999');
|
||||
|
||||
// Assert
|
||||
expect(response.body).toHaveProperty('success', false);
|
||||
expect(response.body).toHaveProperty('error');
|
||||
expect(typeof response.body.error).toBe('string');
|
||||
});
|
||||
});
|
||||
|
||||
// ===========================================================================
|
||||
// Route Ordering Tests (ensure /lookup is matched before /:id)
|
||||
// ===========================================================================
|
||||
describe('Route Ordering', () => {
|
||||
it('should route /lookup correctly instead of treating it as an ID', async () => {
|
||||
// This tests that the router correctly matches /lookup before /:id
|
||||
// If route ordering were wrong, 'lookup' would be parsed as a category ID
|
||||
mockCategoryDbService.getCategoryByName.mockResolvedValue(
|
||||
createMockCategory({ category_id: 1, name: 'Test' }),
|
||||
);
|
||||
|
||||
const _response = await supertest(app).get('/api/v1/categories/lookup?name=Test');
|
||||
|
||||
// Assert that getCategoryByName was called, not getCategoryById
|
||||
expect(mockCategoryDbService.getCategoryByName).toHaveBeenCalled();
|
||||
expect(mockCategoryDbService.getCategoryById).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -7,7 +7,7 @@ import {
|
||||
createMockRecipeComment,
|
||||
createMockUserProfile,
|
||||
} from '../tests/utils/mockFactories';
|
||||
import { NotFoundError } from '../services/db/errors.db';
|
||||
import { NotFoundError, ForeignKeyConstraintError } from '../services/db/errors.db';
|
||||
import { createTestApp } from '../tests/utils/createTestApp';
|
||||
|
||||
// 1. Mock the Service Layer directly.
|
||||
@@ -18,6 +18,8 @@ vi.mock('../services/db/index.db', () => ({
|
||||
getRecipeById: vi.fn(),
|
||||
findRecipesByIngredientAndTag: vi.fn(),
|
||||
getRecipeComments: vi.fn(),
|
||||
addRecipeComment: vi.fn(),
|
||||
forkRecipe: vi.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
@@ -70,7 +72,9 @@ describe('Recipe Routes (/api/v1/recipes)', () => {
|
||||
const mockRecipes = [createMockRecipe({ recipe_id: 1, name: 'Pasta' })];
|
||||
vi.mocked(db.recipeRepo.getRecipesBySalePercentage).mockResolvedValue(mockRecipes);
|
||||
|
||||
const response = await supertest(app).get('/api/v1/recipes/by-sale-percentage?minPercentage=75');
|
||||
const response = await supertest(app).get(
|
||||
'/api/v1/recipes/by-sale-percentage?minPercentage=75',
|
||||
);
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body.data).toEqual(mockRecipes);
|
||||
@@ -268,7 +272,9 @@ describe('Recipe Routes (/api/v1/recipes)', () => {
|
||||
const mockSuggestion = 'Chicken and Rice Casserole...';
|
||||
vi.mocked(aiService.generateRecipeSuggestion).mockResolvedValue(mockSuggestion);
|
||||
|
||||
const response = await supertest(authApp).post('/api/v1/recipes/suggest').send({ ingredients });
|
||||
const response = await supertest(authApp)
|
||||
.post('/api/v1/recipes/suggest')
|
||||
.send({ ingredients });
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body.data).toEqual({ suggestion: mockSuggestion });
|
||||
@@ -382,4 +388,262 @@ describe('Recipe Routes (/api/v1/recipes)', () => {
|
||||
expect(parseInt(response.headers['ratelimit-limit'])).toBe(100);
|
||||
});
|
||||
});
|
||||
|
||||
describe('POST /:recipeId/comments', () => {
|
||||
const mockUser = createMockUserProfile({ user: { user_id: 'comment-user-123' } });
|
||||
const authApp = createTestApp({
|
||||
router: recipeRouter,
|
||||
basePath: '/api/v1/recipes',
|
||||
authenticatedUser: mockUser,
|
||||
});
|
||||
const unauthApp = createTestApp({ router: recipeRouter, basePath: '/api/v1/recipes' });
|
||||
|
||||
it('should return 401 Unauthorized if user is not authenticated', async () => {
|
||||
const response = await supertest(unauthApp)
|
||||
.post('/api/v1/recipes/1/comments')
|
||||
.send({ content: 'Great recipe!' });
|
||||
|
||||
expect(response.status).toBe(401);
|
||||
});
|
||||
|
||||
it('should successfully add a comment to a recipe', async () => {
|
||||
const mockComment = createMockRecipeComment({
|
||||
recipe_id: 1,
|
||||
user_id: mockUser.user.user_id,
|
||||
content: 'This is delicious!',
|
||||
});
|
||||
vi.mocked(db.recipeRepo.addRecipeComment).mockResolvedValue(mockComment);
|
||||
|
||||
const response = await supertest(authApp)
|
||||
.post('/api/v1/recipes/1/comments')
|
||||
.send({ content: 'This is delicious!' });
|
||||
|
||||
expect(response.status).toBe(201);
|
||||
expect(response.body.data).toEqual(mockComment);
|
||||
expect(db.recipeRepo.addRecipeComment).toHaveBeenCalledWith(
|
||||
1,
|
||||
mockUser.user.user_id,
|
||||
'This is delicious!',
|
||||
expectLogger,
|
||||
undefined,
|
||||
);
|
||||
});
|
||||
|
||||
it('should successfully add a reply to an existing comment', async () => {
|
||||
const mockComment = createMockRecipeComment({
|
||||
recipe_id: 1,
|
||||
user_id: mockUser.user.user_id,
|
||||
content: 'I agree!',
|
||||
parent_comment_id: 5,
|
||||
});
|
||||
vi.mocked(db.recipeRepo.addRecipeComment).mockResolvedValue(mockComment);
|
||||
|
||||
const response = await supertest(authApp)
|
||||
.post('/api/v1/recipes/1/comments')
|
||||
.send({ content: 'I agree!', parentCommentId: 5 });
|
||||
|
||||
expect(response.status).toBe(201);
|
||||
expect(response.body.data).toEqual(mockComment);
|
||||
expect(db.recipeRepo.addRecipeComment).toHaveBeenCalledWith(
|
||||
1,
|
||||
mockUser.user.user_id,
|
||||
'I agree!',
|
||||
expectLogger,
|
||||
5,
|
||||
);
|
||||
});
|
||||
|
||||
it('should return 400 if content is missing', async () => {
|
||||
const response = await supertest(authApp).post('/api/v1/recipes/1/comments').send({});
|
||||
|
||||
expect(response.status).toBe(400);
|
||||
expect(response.body.error.details[0].message).toBe('Comment content is required.');
|
||||
});
|
||||
|
||||
it('should return 400 if content is empty string', async () => {
|
||||
const response = await supertest(authApp)
|
||||
.post('/api/v1/recipes/1/comments')
|
||||
.send({ content: '' });
|
||||
|
||||
expect(response.status).toBe(400);
|
||||
expect(response.body.error.details[0].message).toBe('Comment content is required.');
|
||||
});
|
||||
|
||||
it('should return 400 for an invalid recipeId', async () => {
|
||||
const response = await supertest(authApp)
|
||||
.post('/api/v1/recipes/abc/comments')
|
||||
.send({ content: 'Test comment' });
|
||||
|
||||
expect(response.status).toBe(400);
|
||||
expect(response.body.error.details[0].message).toContain('received NaN');
|
||||
});
|
||||
|
||||
it('should return 400 for an invalid parentCommentId', async () => {
|
||||
const response = await supertest(authApp)
|
||||
.post('/api/v1/recipes/1/comments')
|
||||
.send({ content: 'Test comment', parentCommentId: 'invalid' });
|
||||
|
||||
expect(response.status).toBe(400);
|
||||
});
|
||||
|
||||
it('should return 400 if recipe or parent comment does not exist (foreign key violation)', async () => {
|
||||
const fkError = new ForeignKeyConstraintError(
|
||||
'The specified recipe, user, or parent comment does not exist.',
|
||||
);
|
||||
vi.mocked(db.recipeRepo.addRecipeComment).mockRejectedValue(fkError);
|
||||
|
||||
const response = await supertest(authApp)
|
||||
.post('/api/v1/recipes/999/comments')
|
||||
.send({ content: 'Comment on non-existent recipe' });
|
||||
|
||||
expect(response.status).toBe(400);
|
||||
expect(response.body.error.message).toContain('does not exist');
|
||||
});
|
||||
|
||||
it('should return 500 if the database call fails', async () => {
|
||||
const dbError = new Error('DB Error');
|
||||
vi.mocked(db.recipeRepo.addRecipeComment).mockRejectedValue(dbError);
|
||||
|
||||
const response = await supertest(authApp)
|
||||
.post('/api/v1/recipes/1/comments')
|
||||
.send({ content: 'Test comment' });
|
||||
|
||||
expect(response.status).toBe(500);
|
||||
expect(response.body.error.message).toBe('DB Error');
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
{ error: dbError },
|
||||
'Error adding comment to recipe ID 1:',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('POST /:recipeId/fork', () => {
|
||||
const mockUser = createMockUserProfile({ user: { user_id: 'fork-user-456' } });
|
||||
const authApp = createTestApp({
|
||||
router: recipeRouter,
|
||||
basePath: '/api/v1/recipes',
|
||||
authenticatedUser: mockUser,
|
||||
});
|
||||
const unauthApp = createTestApp({ router: recipeRouter, basePath: '/api/v1/recipes' });
|
||||
|
||||
it('should return 401 Unauthorized if user is not authenticated', async () => {
|
||||
const response = await supertest(unauthApp).post('/api/v1/recipes/1/fork');
|
||||
|
||||
expect(response.status).toBe(401);
|
||||
});
|
||||
|
||||
it('should successfully fork a recipe', async () => {
|
||||
const forkedRecipe = createMockRecipe({
|
||||
recipe_id: 20,
|
||||
name: 'Original Recipe (Fork)',
|
||||
user_id: mockUser.user.user_id,
|
||||
original_recipe_id: 10,
|
||||
});
|
||||
vi.mocked(db.recipeRepo.forkRecipe).mockResolvedValue(forkedRecipe);
|
||||
|
||||
const response = await supertest(authApp).post('/api/v1/recipes/10/fork');
|
||||
|
||||
expect(response.status).toBe(201);
|
||||
expect(response.body.data).toEqual(forkedRecipe);
|
||||
expect(db.recipeRepo.forkRecipe).toHaveBeenCalledWith(
|
||||
mockUser.user.user_id,
|
||||
10,
|
||||
expectLogger,
|
||||
);
|
||||
});
|
||||
|
||||
it('should return 400 for an invalid recipeId', async () => {
|
||||
const response = await supertest(authApp).post('/api/v1/recipes/abc/fork');
|
||||
|
||||
expect(response.status).toBe(400);
|
||||
expect(response.body.error.details[0].message).toContain('received NaN');
|
||||
});
|
||||
|
||||
it('should return 400 if recipe does not exist (foreign key violation)', async () => {
|
||||
const fkError = new ForeignKeyConstraintError(
|
||||
'The specified user or original recipe does not exist.',
|
||||
);
|
||||
vi.mocked(db.recipeRepo.forkRecipe).mockRejectedValue(fkError);
|
||||
|
||||
const response = await supertest(authApp).post('/api/v1/recipes/999/fork');
|
||||
|
||||
expect(response.status).toBe(400);
|
||||
expect(response.body.error.message).toContain('does not exist');
|
||||
});
|
||||
|
||||
it('should return 500 if database function raises an error (e.g., recipe not public)', async () => {
|
||||
const dbFunctionError = new Error('Cannot fork a private recipe.');
|
||||
vi.mocked(db.recipeRepo.forkRecipe).mockRejectedValue(dbFunctionError);
|
||||
|
||||
const response = await supertest(authApp).post('/api/v1/recipes/5/fork');
|
||||
|
||||
expect(response.status).toBe(500);
|
||||
expect(response.body.error.message).toBe('Cannot fork a private recipe.');
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
{ error: dbFunctionError },
|
||||
'Error forking recipe ID 5:',
|
||||
);
|
||||
});
|
||||
|
||||
it('should return 500 if the database call fails', async () => {
|
||||
const dbError = new Error('DB Error');
|
||||
vi.mocked(db.recipeRepo.forkRecipe).mockRejectedValue(dbError);
|
||||
|
||||
const response = await supertest(authApp).post('/api/v1/recipes/1/fork');
|
||||
|
||||
expect(response.status).toBe(500);
|
||||
expect(response.body.error.message).toBe('DB Error');
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
{ error: dbError },
|
||||
'Error forking recipe ID 1:',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Rate Limiting on POST /:recipeId/comments', () => {
|
||||
const mockUser = createMockUserProfile({ user: { user_id: 'rate-limit-comment-user' } });
|
||||
const authApp = createTestApp({
|
||||
router: recipeRouter,
|
||||
basePath: '/api/v1/recipes',
|
||||
authenticatedUser: mockUser,
|
||||
});
|
||||
|
||||
it('should apply userUpdateLimiter to POST /:recipeId/comments', async () => {
|
||||
const mockComment = createMockRecipeComment({});
|
||||
vi.mocked(db.recipeRepo.addRecipeComment).mockResolvedValue(mockComment);
|
||||
|
||||
const response = await supertest(authApp)
|
||||
.post('/api/v1/recipes/1/comments')
|
||||
.set('X-Test-Rate-Limit-Enable', 'true')
|
||||
.send({ content: 'Test comment' });
|
||||
|
||||
expect(response.status).toBe(201);
|
||||
expect(response.headers).toHaveProperty('ratelimit-limit');
|
||||
// userUpdateLimiter has limit of 100 per 15 minutes
|
||||
expect(parseInt(response.headers['ratelimit-limit'])).toBe(100);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Rate Limiting on POST /:recipeId/fork', () => {
|
||||
const mockUser = createMockUserProfile({ user: { user_id: 'rate-limit-fork-user' } });
|
||||
const authApp = createTestApp({
|
||||
router: recipeRouter,
|
||||
basePath: '/api/v1/recipes',
|
||||
authenticatedUser: mockUser,
|
||||
});
|
||||
|
||||
it('should apply userUpdateLimiter to POST /:recipeId/fork', async () => {
|
||||
const forkedRecipe = createMockRecipe({});
|
||||
vi.mocked(db.recipeRepo.forkRecipe).mockResolvedValue(forkedRecipe);
|
||||
|
||||
const response = await supertest(authApp)
|
||||
.post('/api/v1/recipes/1/fork')
|
||||
.set('X-Test-Rate-Limit-Enable', 'true');
|
||||
|
||||
expect(response.status).toBe(201);
|
||||
expect(response.headers).toHaveProperty('ratelimit-limit');
|
||||
// userUpdateLimiter has limit of 100 per 15 minutes
|
||||
expect(parseInt(response.headers['ratelimit-limit'])).toBe(100);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -2,7 +2,18 @@
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
import { AddressRepository } from './address.db';
|
||||
import type { Address } from '../../types';
|
||||
import { UniqueConstraintError, NotFoundError } from './errors.db';
|
||||
import {
|
||||
UniqueConstraintError,
|
||||
NotFoundError,
|
||||
ForeignKeyConstraintError,
|
||||
NotNullConstraintError,
|
||||
CheckConstraintError,
|
||||
InvalidTextRepresentationError,
|
||||
NumericValueOutOfRangeError,
|
||||
} from './errors.db';
|
||||
|
||||
// Un-mock the module we are testing to ensure we use the real implementation.
|
||||
vi.unmock('./address.db');
|
||||
|
||||
// Mock dependencies
|
||||
vi.mock('../logger.server', () => ({
|
||||
@@ -16,6 +27,23 @@ describe('Address DB Service', () => {
|
||||
query: vi.fn(),
|
||||
};
|
||||
|
||||
// Helper function to create a mock address with default values
|
||||
const createMockAddress = (overrides: Partial<Address> = {}): Address => ({
|
||||
address_id: 1,
|
||||
address_line_1: '123 Main St',
|
||||
address_line_2: null,
|
||||
city: 'Anytown',
|
||||
province_state: 'CA',
|
||||
postal_code: '12345',
|
||||
country: 'USA',
|
||||
latitude: null,
|
||||
longitude: null,
|
||||
location: null,
|
||||
created_at: new Date().toISOString(),
|
||||
updated_at: new Date().toISOString(),
|
||||
...overrides,
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
mockDb.query.mockReset();
|
||||
@@ -24,16 +52,7 @@ describe('Address DB Service', () => {
|
||||
|
||||
describe('getAddressById', () => {
|
||||
it('should return an address if found', async () => {
|
||||
const mockAddress: Address = {
|
||||
address_id: 1,
|
||||
address_line_1: '123 Main St',
|
||||
city: 'Anytown',
|
||||
province_state: 'CA',
|
||||
postal_code: '12345',
|
||||
country: 'USA',
|
||||
created_at: new Date().toISOString(),
|
||||
updated_at: new Date().toISOString(),
|
||||
};
|
||||
const mockAddress = createMockAddress();
|
||||
mockDb.query.mockResolvedValue({ rows: [mockAddress], rowCount: 1 });
|
||||
|
||||
const result = await addressRepo.getAddressById(1, mockLogger);
|
||||
@@ -65,6 +84,51 @@ describe('Address DB Service', () => {
|
||||
'Database error in getAddressById',
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle InvalidTextRepresentationError for invalid ID format', async () => {
|
||||
const dbError = new Error('invalid input syntax for type integer');
|
||||
(dbError as any).code = '22P02';
|
||||
mockDb.query.mockRejectedValue(dbError);
|
||||
|
||||
await expect(addressRepo.getAddressById(NaN, mockLogger)).rejects.toThrow(
|
||||
InvalidTextRepresentationError,
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle edge case with address_id of 0', async () => {
|
||||
mockDb.query.mockResolvedValue({ rowCount: 0, rows: [] });
|
||||
await expect(addressRepo.getAddressById(0, mockLogger)).rejects.toThrow(NotFoundError);
|
||||
expect(mockDb.query).toHaveBeenCalledWith(
|
||||
'SELECT * FROM public.addresses WHERE address_id = $1',
|
||||
[0],
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle edge case with negative address_id', async () => {
|
||||
mockDb.query.mockResolvedValue({ rowCount: 0, rows: [] });
|
||||
await expect(addressRepo.getAddressById(-1, mockLogger)).rejects.toThrow(NotFoundError);
|
||||
expect(mockDb.query).toHaveBeenCalledWith(
|
||||
'SELECT * FROM public.addresses WHERE address_id = $1',
|
||||
[-1],
|
||||
);
|
||||
});
|
||||
|
||||
it('should return address with all optional fields populated', async () => {
|
||||
const mockAddress = createMockAddress({
|
||||
address_line_2: 'Suite 100',
|
||||
latitude: 37.7749,
|
||||
longitude: -122.4194,
|
||||
location: { type: 'Point', coordinates: [-122.4194, 37.7749] },
|
||||
});
|
||||
mockDb.query.mockResolvedValue({ rows: [mockAddress], rowCount: 1 });
|
||||
|
||||
const result = await addressRepo.getAddressById(1, mockLogger);
|
||||
|
||||
expect(result.address_line_2).toBe('Suite 100');
|
||||
expect(result.latitude).toBe(37.7749);
|
||||
expect(result.longitude).toBe(-122.4194);
|
||||
expect(result.location).toEqual({ type: 'Point', coordinates: [-122.4194, 37.7749] });
|
||||
});
|
||||
});
|
||||
|
||||
describe('upsertAddress', () => {
|
||||
@@ -131,5 +195,400 @@ describe('Address DB Service', () => {
|
||||
'Database error in upsertAddress',
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle NotNullConstraintError when required field is null', async () => {
|
||||
const addressData = { address_line_1: null as unknown as string };
|
||||
const dbError = new Error('null value in column violates not-null constraint');
|
||||
(dbError as any).code = '23502';
|
||||
mockDb.query.mockRejectedValue(dbError);
|
||||
|
||||
await expect(addressRepo.upsertAddress(addressData, mockLogger)).rejects.toThrow(
|
||||
NotNullConstraintError,
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle CheckConstraintError when check constraint is violated', async () => {
|
||||
const addressData = { address_line_1: '123 Test St', postal_code: '' };
|
||||
const dbError = new Error('check constraint violation');
|
||||
(dbError as any).code = '23514';
|
||||
mockDb.query.mockRejectedValue(dbError);
|
||||
|
||||
await expect(addressRepo.upsertAddress(addressData, mockLogger)).rejects.toThrow(
|
||||
CheckConstraintError,
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle upsert with all address fields', async () => {
|
||||
const fullAddressData = {
|
||||
address_line_1: '100 Complete St',
|
||||
address_line_2: 'Apt 1',
|
||||
city: 'Fullville',
|
||||
province_state: 'NY',
|
||||
postal_code: '10001',
|
||||
country: 'USA',
|
||||
latitude: 40.7128,
|
||||
longitude: -74.006,
|
||||
};
|
||||
mockDb.query.mockResolvedValue({ rows: [{ address_id: 5 }] });
|
||||
|
||||
const result = await addressRepo.upsertAddress(fullAddressData, mockLogger);
|
||||
|
||||
expect(result).toBe(5);
|
||||
const [query, values] = mockDb.query.mock.calls[0];
|
||||
expect(query).toContain('INSERT INTO public.addresses');
|
||||
expect(values).toContain('100 Complete St');
|
||||
expect(values).toContain('Apt 1');
|
||||
expect(values).toContain('Fullville');
|
||||
expect(values).toContain('NY');
|
||||
expect(values).toContain('10001');
|
||||
expect(values).toContain('USA');
|
||||
expect(values).toContain(40.7128);
|
||||
expect(values).toContain(-74.006);
|
||||
});
|
||||
|
||||
it('should handle update with partial fields', async () => {
|
||||
const partialUpdate = { address_id: 1, city: 'UpdatedCity' };
|
||||
mockDb.query.mockResolvedValue({ rows: [{ address_id: 1 }] });
|
||||
|
||||
const result = await addressRepo.upsertAddress(partialUpdate, mockLogger);
|
||||
|
||||
expect(result).toBe(1);
|
||||
const [query, values] = mockDb.query.mock.calls[0];
|
||||
expect(query).toContain('ON CONFLICT (address_id) DO UPDATE');
|
||||
expect(query).toContain('city = EXCLUDED.city');
|
||||
expect(values).toEqual([1, 'UpdatedCity']);
|
||||
});
|
||||
|
||||
it('should handle NumericValueOutOfRangeError for invalid latitude/longitude', async () => {
|
||||
const addressData = { address_line_1: '123 Test St', latitude: 999999 };
|
||||
const dbError = new Error('numeric value out of range');
|
||||
(dbError as any).code = '22003';
|
||||
mockDb.query.mockRejectedValue(dbError);
|
||||
|
||||
await expect(addressRepo.upsertAddress(addressData, mockLogger)).rejects.toThrow(
|
||||
NumericValueOutOfRangeError,
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle ForeignKeyConstraintError if a FK is violated', async () => {
|
||||
const addressData = { address_line_1: '123 FK St' };
|
||||
const dbError = new Error('violates foreign key constraint');
|
||||
(dbError as any).code = '23503';
|
||||
mockDb.query.mockRejectedValue(dbError);
|
||||
|
||||
await expect(addressRepo.upsertAddress(addressData, mockLogger)).rejects.toThrow(
|
||||
ForeignKeyConstraintError,
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('searchAddressesByText', () => {
|
||||
it('should execute the correct query and return matching addresses', async () => {
|
||||
const mockAddresses = [
|
||||
createMockAddress({ address_id: 1, city: 'Toronto' }),
|
||||
createMockAddress({ address_id: 2, city: 'Toronto East' }),
|
||||
];
|
||||
mockDb.query.mockResolvedValue({ rows: mockAddresses });
|
||||
|
||||
const result = await addressRepo.searchAddressesByText('Toronto', mockLogger);
|
||||
|
||||
expect(result).toEqual(mockAddresses);
|
||||
expect(mockDb.query).toHaveBeenCalledWith(expect.stringContaining('WHERE'), [
|
||||
'%Toronto%',
|
||||
10,
|
||||
]);
|
||||
expect(mockDb.query).toHaveBeenCalledWith(
|
||||
expect.stringContaining('address_line_1 ILIKE $1'),
|
||||
['%Toronto%', 10],
|
||||
);
|
||||
expect(mockDb.query).toHaveBeenCalledWith(expect.stringContaining('city ILIKE $1'), [
|
||||
'%Toronto%',
|
||||
10,
|
||||
]);
|
||||
expect(mockDb.query).toHaveBeenCalledWith(expect.stringContaining('postal_code ILIKE $1'), [
|
||||
'%Toronto%',
|
||||
10,
|
||||
]);
|
||||
});
|
||||
|
||||
it('should return an empty array if no addresses match', async () => {
|
||||
mockDb.query.mockResolvedValue({ rows: [] });
|
||||
|
||||
const result = await addressRepo.searchAddressesByText('NonexistentCity', mockLogger);
|
||||
|
||||
expect(result).toEqual([]);
|
||||
expect(mockDb.query).toHaveBeenCalledWith(expect.any(String), ['%NonexistentCity%', 10]);
|
||||
});
|
||||
|
||||
it('should use custom limit when provided', async () => {
|
||||
mockDb.query.mockResolvedValue({ rows: [] });
|
||||
|
||||
await addressRepo.searchAddressesByText('Test', mockLogger, 5);
|
||||
|
||||
expect(mockDb.query).toHaveBeenCalledWith(expect.any(String), ['%Test%', 5]);
|
||||
});
|
||||
|
||||
it('should use default limit of 10 when not provided', async () => {
|
||||
mockDb.query.mockResolvedValue({ rows: [] });
|
||||
|
||||
await addressRepo.searchAddressesByText('Test', mockLogger);
|
||||
|
||||
expect(mockDb.query).toHaveBeenCalledWith(expect.any(String), ['%Test%', 10]);
|
||||
});
|
||||
|
||||
it('should throw a generic error if the database query fails', async () => {
|
||||
const dbError = new Error('DB Connection Error');
|
||||
mockDb.query.mockRejectedValue(dbError);
|
||||
|
||||
await expect(addressRepo.searchAddressesByText('Toronto', mockLogger)).rejects.toThrow(
|
||||
'Failed to search addresses.',
|
||||
);
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
{ err: dbError, query: 'Toronto', limit: 10 },
|
||||
'Database error in searchAddressesByText',
|
||||
);
|
||||
});
|
||||
|
||||
it('should throw error with correct context when custom limit is used', async () => {
|
||||
const dbError = new Error('DB Error');
|
||||
mockDb.query.mockRejectedValue(dbError);
|
||||
|
||||
await expect(addressRepo.searchAddressesByText('Test', mockLogger, 25)).rejects.toThrow(
|
||||
'Failed to search addresses.',
|
||||
);
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
{ err: dbError, query: 'Test', limit: 25 },
|
||||
'Database error in searchAddressesByText',
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle searching by postal code', async () => {
|
||||
const mockAddresses = [createMockAddress({ address_id: 1, postal_code: 'M5V 3A1' })];
|
||||
mockDb.query.mockResolvedValue({ rows: mockAddresses });
|
||||
|
||||
const result = await addressRepo.searchAddressesByText('M5V', mockLogger);
|
||||
|
||||
expect(result).toEqual(mockAddresses);
|
||||
expect(mockDb.query).toHaveBeenCalledWith(expect.any(String), ['%M5V%', 10]);
|
||||
});
|
||||
|
||||
it('should handle searching by street address', async () => {
|
||||
const mockAddresses = [createMockAddress({ address_id: 1, address_line_1: '100 King St W' })];
|
||||
mockDb.query.mockResolvedValue({ rows: mockAddresses });
|
||||
|
||||
const result = await addressRepo.searchAddressesByText('King St', mockLogger);
|
||||
|
||||
expect(result).toEqual(mockAddresses);
|
||||
});
|
||||
|
||||
it('should handle empty search string', async () => {
|
||||
mockDb.query.mockResolvedValue({ rows: [] });
|
||||
|
||||
const result = await addressRepo.searchAddressesByText('', mockLogger);
|
||||
|
||||
expect(result).toEqual([]);
|
||||
expect(mockDb.query).toHaveBeenCalledWith(expect.any(String), ['%%', 10]);
|
||||
});
|
||||
|
||||
it('should handle special characters in search query', async () => {
|
||||
mockDb.query.mockResolvedValue({ rows: [] });
|
||||
|
||||
await addressRepo.searchAddressesByText("O'Brien", mockLogger);
|
||||
|
||||
expect(mockDb.query).toHaveBeenCalledWith(expect.any(String), ["%O'Brien%", 10]);
|
||||
});
|
||||
|
||||
it('should return results ordered by city and address_line_1', async () => {
|
||||
mockDb.query.mockResolvedValue({ rows: [] });
|
||||
|
||||
await addressRepo.searchAddressesByText('Test', mockLogger);
|
||||
|
||||
expect(mockDb.query).toHaveBeenCalledWith(
|
||||
expect.stringContaining('ORDER BY city ASC, address_line_1 ASC'),
|
||||
expect.any(Array),
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle limit of 0', async () => {
|
||||
mockDb.query.mockResolvedValue({ rows: [] });
|
||||
|
||||
await addressRepo.searchAddressesByText('Test', mockLogger, 0);
|
||||
|
||||
expect(mockDb.query).toHaveBeenCalledWith(expect.any(String), ['%Test%', 0]);
|
||||
});
|
||||
|
||||
it('should handle large limit values', async () => {
|
||||
mockDb.query.mockResolvedValue({ rows: [] });
|
||||
|
||||
await addressRepo.searchAddressesByText('Test', mockLogger, 1000);
|
||||
|
||||
expect(mockDb.query).toHaveBeenCalledWith(expect.any(String), ['%Test%', 1000]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getAddressesByStoreId', () => {
|
||||
it('should execute the correct query and return addresses for a store', async () => {
|
||||
const mockAddresses = [
|
||||
createMockAddress({ address_id: 1 }),
|
||||
createMockAddress({ address_id: 2 }),
|
||||
];
|
||||
mockDb.query.mockResolvedValue({ rows: mockAddresses });
|
||||
|
||||
const result = await addressRepo.getAddressesByStoreId(1, mockLogger);
|
||||
|
||||
expect(result).toEqual(mockAddresses);
|
||||
expect(mockDb.query).toHaveBeenCalledWith(
|
||||
expect.stringContaining('FROM public.addresses a'),
|
||||
[1],
|
||||
);
|
||||
expect(mockDb.query).toHaveBeenCalledWith(
|
||||
expect.stringContaining(
|
||||
'INNER JOIN public.store_locations sl ON a.address_id = sl.address_id',
|
||||
),
|
||||
[1],
|
||||
);
|
||||
expect(mockDb.query).toHaveBeenCalledWith(
|
||||
expect.stringContaining('WHERE sl.store_id = $1'),
|
||||
[1],
|
||||
);
|
||||
});
|
||||
|
||||
it('should return an empty array if the store has no addresses', async () => {
|
||||
mockDb.query.mockResolvedValue({ rows: [] });
|
||||
|
||||
const result = await addressRepo.getAddressesByStoreId(999, mockLogger);
|
||||
|
||||
expect(result).toEqual([]);
|
||||
expect(mockDb.query).toHaveBeenCalledWith(expect.any(String), [999]);
|
||||
});
|
||||
|
||||
it('should return an empty array for a non-existent store', async () => {
|
||||
mockDb.query.mockResolvedValue({ rows: [] });
|
||||
|
||||
const result = await addressRepo.getAddressesByStoreId(0, mockLogger);
|
||||
|
||||
expect(result).toEqual([]);
|
||||
});
|
||||
|
||||
it('should throw a generic error if the database query fails', async () => {
|
||||
const dbError = new Error('DB Connection Error');
|
||||
mockDb.query.mockRejectedValue(dbError);
|
||||
|
||||
await expect(addressRepo.getAddressesByStoreId(1, mockLogger)).rejects.toThrow(
|
||||
'Failed to retrieve addresses for store.',
|
||||
);
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
{ err: dbError, storeId: 1 },
|
||||
'Database error in getAddressesByStoreId',
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle store_id of 0', async () => {
|
||||
mockDb.query.mockResolvedValue({ rows: [] });
|
||||
|
||||
const result = await addressRepo.getAddressesByStoreId(0, mockLogger);
|
||||
|
||||
expect(result).toEqual([]);
|
||||
expect(mockDb.query).toHaveBeenCalledWith(expect.any(String), [0]);
|
||||
});
|
||||
|
||||
it('should handle negative store_id', async () => {
|
||||
mockDb.query.mockResolvedValue({ rows: [] });
|
||||
|
||||
const result = await addressRepo.getAddressesByStoreId(-1, mockLogger);
|
||||
|
||||
expect(result).toEqual([]);
|
||||
expect(mockDb.query).toHaveBeenCalledWith(expect.any(String), [-1]);
|
||||
});
|
||||
|
||||
it('should handle InvalidTextRepresentationError for invalid store ID format', async () => {
|
||||
const dbError = new Error('invalid input syntax for type integer');
|
||||
(dbError as any).code = '22P02';
|
||||
mockDb.query.mockRejectedValue(dbError);
|
||||
|
||||
await expect(addressRepo.getAddressesByStoreId(NaN, mockLogger)).rejects.toThrow(
|
||||
InvalidTextRepresentationError,
|
||||
);
|
||||
});
|
||||
|
||||
it('should return results ordered by store_location created_at ASC', async () => {
|
||||
mockDb.query.mockResolvedValue({ rows: [] });
|
||||
|
||||
await addressRepo.getAddressesByStoreId(1, mockLogger);
|
||||
|
||||
expect(mockDb.query).toHaveBeenCalledWith(
|
||||
expect.stringContaining('ORDER BY sl.created_at ASC'),
|
||||
expect.any(Array),
|
||||
);
|
||||
});
|
||||
|
||||
it('should return multiple addresses when store has multiple locations', async () => {
|
||||
const mockAddresses = [
|
||||
createMockAddress({ address_id: 1, city: 'Toronto' }),
|
||||
createMockAddress({ address_id: 2, city: 'Vancouver' }),
|
||||
createMockAddress({ address_id: 3, city: 'Montreal' }),
|
||||
];
|
||||
mockDb.query.mockResolvedValue({ rows: mockAddresses });
|
||||
|
||||
const result = await addressRepo.getAddressesByStoreId(1, mockLogger);
|
||||
|
||||
expect(result).toHaveLength(3);
|
||||
expect(result[0].city).toBe('Toronto');
|
||||
expect(result[1].city).toBe('Vancouver');
|
||||
expect(result[2].city).toBe('Montreal');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Repository instantiation', () => {
|
||||
it('should use provided db connection', () => {
|
||||
const customDb = { query: vi.fn() };
|
||||
const repo = new AddressRepository(customDb);
|
||||
|
||||
expect(repo).toBeDefined();
|
||||
});
|
||||
|
||||
it('should work with default pool when no db provided', () => {
|
||||
// This tests that the constructor can be called without arguments
|
||||
// The default getPool() will be used - we don't test the actual pool here
|
||||
// as that would require mocking the connection module
|
||||
expect(() => new AddressRepository(mockDb)).not.toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Error handling edge cases', () => {
|
||||
it('should rethrow NotFoundError without wrapping', async () => {
|
||||
mockDb.query.mockResolvedValue({ rowCount: 0, rows: [] });
|
||||
|
||||
try {
|
||||
await addressRepo.getAddressById(999, mockLogger);
|
||||
expect.fail('Should have thrown');
|
||||
} catch (error) {
|
||||
expect(error).toBeInstanceOf(NotFoundError);
|
||||
expect((error as NotFoundError).status).toBe(404);
|
||||
}
|
||||
});
|
||||
|
||||
it('should handle PostgreSQL error with constraint and detail properties', async () => {
|
||||
const dbError = new Error('duplicate key');
|
||||
(dbError as any).code = '23505';
|
||||
(dbError as any).constraint = 'addresses_pkey';
|
||||
(dbError as any).detail = 'Key (address_id)=(1) already exists.';
|
||||
mockDb.query.mockRejectedValue(dbError);
|
||||
|
||||
await expect(addressRepo.upsertAddress({ address_id: 1 }, mockLogger)).rejects.toThrow(
|
||||
UniqueConstraintError,
|
||||
);
|
||||
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
{
|
||||
err: dbError,
|
||||
address: { address_id: 1 },
|
||||
code: '23505',
|
||||
constraint: 'addresses_pkey',
|
||||
detail: 'Key (address_id)=(1) already exists.',
|
||||
},
|
||||
'Database error in upsertAddress',
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
579
src/services/db/category.db.test.ts
Normal file
579
src/services/db/category.db.test.ts
Normal file
@@ -0,0 +1,579 @@
|
||||
// src/services/db/category.db.test.ts
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
import type { Pool } from 'pg';
|
||||
|
||||
// Un-mock the module we are testing to ensure we use the real implementation.
|
||||
vi.unmock('./category.db');
|
||||
|
||||
// Mock the logger to prevent console output during tests
|
||||
vi.mock('../logger.server', () => ({
|
||||
logger: {
|
||||
info: vi.fn(),
|
||||
warn: vi.fn(),
|
||||
error: vi.fn(),
|
||||
debug: vi.fn(),
|
||||
},
|
||||
}));
|
||||
import { logger as mockLogger } from '../logger.server';
|
||||
|
||||
// Mock the connection module to control getPool
|
||||
vi.mock('./connection.db', () => ({
|
||||
getPool: vi.fn(),
|
||||
}));
|
||||
import { getPool } from './connection.db';
|
||||
|
||||
import { CategoryDbService, type Category } from './category.db';
|
||||
import { DatabaseError } from '../processingErrors';
|
||||
|
||||
describe('Category DB Service', () => {
|
||||
// Create a mock pool instance with a query method
|
||||
const mockPool = {
|
||||
query: vi.fn(),
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
mockPool.query.mockReset();
|
||||
// Mock getPool to return our mock pool
|
||||
vi.mocked(getPool).mockReturnValue(mockPool as unknown as Pool);
|
||||
});
|
||||
|
||||
describe('getAllCategories', () => {
|
||||
it('should execute the correct SELECT query and return all categories ordered by name', async () => {
|
||||
// Arrange
|
||||
const mockCategories: Category[] = [
|
||||
{
|
||||
category_id: 1,
|
||||
name: 'Bakery',
|
||||
created_at: new Date('2024-01-01'),
|
||||
updated_at: new Date('2024-01-01'),
|
||||
},
|
||||
{
|
||||
category_id: 2,
|
||||
name: 'Dairy & Eggs',
|
||||
created_at: new Date('2024-01-01'),
|
||||
updated_at: new Date('2024-01-01'),
|
||||
},
|
||||
{
|
||||
category_id: 3,
|
||||
name: 'Fruits & Vegetables',
|
||||
created_at: new Date('2024-01-01'),
|
||||
updated_at: new Date('2024-01-01'),
|
||||
},
|
||||
];
|
||||
mockPool.query.mockResolvedValue({ rows: mockCategories });
|
||||
|
||||
// Act
|
||||
const result = await CategoryDbService.getAllCategories(mockLogger);
|
||||
|
||||
// Assert
|
||||
expect(result).toEqual(mockCategories);
|
||||
expect(mockPool.query).toHaveBeenCalledTimes(1);
|
||||
expect(mockPool.query).toHaveBeenCalledWith(
|
||||
expect.stringContaining('SELECT category_id, name, created_at, updated_at'),
|
||||
);
|
||||
expect(mockPool.query).toHaveBeenCalledWith(
|
||||
expect.stringContaining('FROM public.categories'),
|
||||
);
|
||||
expect(mockPool.query).toHaveBeenCalledWith(expect.stringContaining('ORDER BY name ASC'));
|
||||
});
|
||||
|
||||
it('should return an empty array when no categories exist', async () => {
|
||||
// Arrange
|
||||
mockPool.query.mockResolvedValue({ rows: [] });
|
||||
|
||||
// Act
|
||||
const result = await CategoryDbService.getAllCategories(mockLogger);
|
||||
|
||||
// Assert
|
||||
expect(result).toEqual([]);
|
||||
expect(mockPool.query).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('should throw a DatabaseError and log when the database query fails', async () => {
|
||||
// Arrange
|
||||
const dbError = new Error('Connection refused');
|
||||
mockPool.query.mockRejectedValue(dbError);
|
||||
|
||||
// Act & Assert
|
||||
await expect(CategoryDbService.getAllCategories(mockLogger)).rejects.toThrow(DatabaseError);
|
||||
// The DatabaseError from processingErrors.ts uses the default message from handleDbError
|
||||
await expect(CategoryDbService.getAllCategories(mockLogger)).rejects.toThrow(
|
||||
'Failed to perform operation on database.',
|
||||
);
|
||||
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
{ err: dbError },
|
||||
'Error fetching all categories',
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle PostgreSQL specific errors correctly', async () => {
|
||||
// Arrange - Simulate a PostgreSQL connection error
|
||||
const pgError = new Error('Connection terminated unexpectedly');
|
||||
(pgError as Error & { code: string }).code = '57P01'; // Admin shutdown
|
||||
mockPool.query.mockRejectedValue(pgError);
|
||||
|
||||
// Act & Assert
|
||||
await expect(CategoryDbService.getAllCategories(mockLogger)).rejects.toThrow(DatabaseError);
|
||||
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
err: pgError,
|
||||
code: '57P01',
|
||||
}),
|
||||
'Error fetching all categories',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getCategoryById', () => {
|
||||
it('should execute the correct SELECT query with category ID parameter', async () => {
|
||||
// Arrange
|
||||
const mockCategory: Category = {
|
||||
category_id: 5,
|
||||
name: 'Meat & Seafood',
|
||||
created_at: new Date('2024-01-01'),
|
||||
updated_at: new Date('2024-01-01'),
|
||||
};
|
||||
mockPool.query.mockResolvedValue({ rows: [mockCategory] });
|
||||
|
||||
// Act
|
||||
const result = await CategoryDbService.getCategoryById(5, mockLogger);
|
||||
|
||||
// Assert
|
||||
expect(result).toEqual(mockCategory);
|
||||
expect(mockPool.query).toHaveBeenCalledTimes(1);
|
||||
expect(mockPool.query).toHaveBeenCalledWith(
|
||||
expect.stringContaining('SELECT category_id, name, created_at, updated_at'),
|
||||
[5],
|
||||
);
|
||||
expect(mockPool.query).toHaveBeenCalledWith(
|
||||
expect.stringContaining('WHERE category_id = $1'),
|
||||
[5],
|
||||
);
|
||||
});
|
||||
|
||||
it('should return null when category is not found', async () => {
|
||||
// Arrange
|
||||
mockPool.query.mockResolvedValue({ rows: [] });
|
||||
|
||||
// Act
|
||||
const result = await CategoryDbService.getCategoryById(999, mockLogger);
|
||||
|
||||
// Assert
|
||||
expect(result).toBeNull();
|
||||
expect(mockPool.query).toHaveBeenCalledWith(expect.any(String), [999]);
|
||||
});
|
||||
|
||||
it('should return null for non-existent category ID of 0', async () => {
|
||||
// Arrange
|
||||
mockPool.query.mockResolvedValue({ rows: [] });
|
||||
|
||||
// Act
|
||||
const result = await CategoryDbService.getCategoryById(0, mockLogger);
|
||||
|
||||
// Assert
|
||||
expect(result).toBeNull();
|
||||
expect(mockPool.query).toHaveBeenCalledWith(expect.any(String), [0]);
|
||||
});
|
||||
|
||||
it('should return null for negative category ID', async () => {
|
||||
// Arrange
|
||||
mockPool.query.mockResolvedValue({ rows: [] });
|
||||
|
||||
// Act
|
||||
const result = await CategoryDbService.getCategoryById(-1, mockLogger);
|
||||
|
||||
// Assert
|
||||
expect(result).toBeNull();
|
||||
expect(mockPool.query).toHaveBeenCalledWith(expect.any(String), [-1]);
|
||||
});
|
||||
|
||||
it('should throw a DatabaseError and log when the database query fails', async () => {
|
||||
// Arrange
|
||||
const dbError = new Error('Database timeout');
|
||||
mockPool.query.mockRejectedValue(dbError);
|
||||
|
||||
// Act & Assert
|
||||
await expect(CategoryDbService.getCategoryById(1, mockLogger)).rejects.toThrow(DatabaseError);
|
||||
// The DatabaseError from processingErrors.ts uses the default message from handleDbError
|
||||
await expect(CategoryDbService.getCategoryById(1, mockLogger)).rejects.toThrow(
|
||||
'Failed to perform operation on database.',
|
||||
);
|
||||
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
{ err: dbError, categoryId: 1 },
|
||||
'Error fetching category by ID',
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle PostgreSQL invalid text representation error for invalid ID type', async () => {
|
||||
// Arrange - Simulate PostgreSQL error for invalid ID format
|
||||
const pgError = new Error('invalid input syntax for type integer');
|
||||
(pgError as Error & { code: string }).code = '22P02';
|
||||
mockPool.query.mockRejectedValue(pgError);
|
||||
|
||||
// Act & Assert
|
||||
// Note: The implementation catches this and rethrows via handleDbError
|
||||
// which converts it to an InvalidTextRepresentationError
|
||||
const { InvalidTextRepresentationError } = await import('./errors.db');
|
||||
await expect(CategoryDbService.getCategoryById(1, mockLogger)).rejects.toThrow(
|
||||
InvalidTextRepresentationError,
|
||||
);
|
||||
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
err: pgError,
|
||||
categoryId: 1,
|
||||
code: '22P02',
|
||||
}),
|
||||
'Error fetching category by ID',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getCategoryByName', () => {
|
||||
it('should execute the correct SELECT query with case-insensitive name matching', async () => {
|
||||
// Arrange
|
||||
const mockCategory: Category = {
|
||||
category_id: 3,
|
||||
name: 'Dairy & Eggs',
|
||||
created_at: new Date('2024-01-01'),
|
||||
updated_at: new Date('2024-01-01'),
|
||||
};
|
||||
mockPool.query.mockResolvedValue({ rows: [mockCategory] });
|
||||
|
||||
// Act
|
||||
const result = await CategoryDbService.getCategoryByName('Dairy & Eggs', mockLogger);
|
||||
|
||||
// Assert
|
||||
expect(result).toEqual(mockCategory);
|
||||
expect(mockPool.query).toHaveBeenCalledTimes(1);
|
||||
expect(mockPool.query).toHaveBeenCalledWith(
|
||||
expect.stringContaining('SELECT category_id, name, created_at, updated_at'),
|
||||
['Dairy & Eggs'],
|
||||
);
|
||||
expect(mockPool.query).toHaveBeenCalledWith(
|
||||
expect.stringContaining('WHERE LOWER(name) = LOWER($1)'),
|
||||
['Dairy & Eggs'],
|
||||
);
|
||||
});
|
||||
|
||||
it('should find category with lowercase input matching uppercase stored name', async () => {
|
||||
// Arrange
|
||||
const mockCategory: Category = {
|
||||
category_id: 3,
|
||||
name: 'DAIRY & EGGS',
|
||||
created_at: new Date('2024-01-01'),
|
||||
updated_at: new Date('2024-01-01'),
|
||||
};
|
||||
mockPool.query.mockResolvedValue({ rows: [mockCategory] });
|
||||
|
||||
// Act
|
||||
const result = await CategoryDbService.getCategoryByName('dairy & eggs', mockLogger);
|
||||
|
||||
// Assert
|
||||
expect(result).toEqual(mockCategory);
|
||||
expect(mockPool.query).toHaveBeenCalledWith(expect.any(String), ['dairy & eggs']);
|
||||
});
|
||||
|
||||
it('should find category with mixed case input', async () => {
|
||||
// Arrange
|
||||
const mockCategory: Category = {
|
||||
category_id: 4,
|
||||
name: 'Frozen Foods',
|
||||
created_at: new Date('2024-01-01'),
|
||||
updated_at: new Date('2024-01-01'),
|
||||
};
|
||||
mockPool.query.mockResolvedValue({ rows: [mockCategory] });
|
||||
|
||||
// Act
|
||||
const result = await CategoryDbService.getCategoryByName('fRoZeN fOoDs', mockLogger);
|
||||
|
||||
// Assert
|
||||
expect(result).toEqual(mockCategory);
|
||||
expect(mockPool.query).toHaveBeenCalledWith(expect.any(String), ['fRoZeN fOoDs']);
|
||||
});
|
||||
|
||||
it('should return null when category name is not found', async () => {
|
||||
// Arrange
|
||||
mockPool.query.mockResolvedValue({ rows: [] });
|
||||
|
||||
// Act
|
||||
const result = await CategoryDbService.getCategoryByName('Non-Existent Category', mockLogger);
|
||||
|
||||
// Assert
|
||||
expect(result).toBeNull();
|
||||
expect(mockPool.query).toHaveBeenCalledWith(expect.any(String), ['Non-Existent Category']);
|
||||
});
|
||||
|
||||
it('should return null for empty string name', async () => {
|
||||
// Arrange
|
||||
mockPool.query.mockResolvedValue({ rows: [] });
|
||||
|
||||
// Act
|
||||
const result = await CategoryDbService.getCategoryByName('', mockLogger);
|
||||
|
||||
// Assert
|
||||
expect(result).toBeNull();
|
||||
expect(mockPool.query).toHaveBeenCalledWith(expect.any(String), ['']);
|
||||
});
|
||||
|
||||
it('should return null for whitespace-only name', async () => {
|
||||
// Arrange
|
||||
mockPool.query.mockResolvedValue({ rows: [] });
|
||||
|
||||
// Act
|
||||
const result = await CategoryDbService.getCategoryByName(' ', mockLogger);
|
||||
|
||||
// Assert
|
||||
expect(result).toBeNull();
|
||||
expect(mockPool.query).toHaveBeenCalledWith(expect.any(String), [' ']);
|
||||
});
|
||||
|
||||
it('should handle special characters in category name', async () => {
|
||||
// Arrange
|
||||
const mockCategory: Category = {
|
||||
category_id: 10,
|
||||
name: "Health & Beauty (Women's)",
|
||||
created_at: new Date('2024-01-01'),
|
||||
updated_at: new Date('2024-01-01'),
|
||||
};
|
||||
mockPool.query.mockResolvedValue({ rows: [mockCategory] });
|
||||
|
||||
// Act
|
||||
const result = await CategoryDbService.getCategoryByName(
|
||||
"Health & Beauty (Women's)",
|
||||
mockLogger,
|
||||
);
|
||||
|
||||
// Assert
|
||||
expect(result).toEqual(mockCategory);
|
||||
expect(mockPool.query).toHaveBeenCalledWith(expect.any(String), [
|
||||
"Health & Beauty (Women's)",
|
||||
]);
|
||||
});
|
||||
|
||||
it('should throw a DatabaseError and log when the database query fails', async () => {
|
||||
// Arrange
|
||||
const dbError = new Error('Query execution failed');
|
||||
mockPool.query.mockRejectedValue(dbError);
|
||||
|
||||
// Act & Assert
|
||||
await expect(CategoryDbService.getCategoryByName('Test', mockLogger)).rejects.toThrow(
|
||||
DatabaseError,
|
||||
);
|
||||
// The DatabaseError from processingErrors.ts uses the default message from handleDbError
|
||||
await expect(CategoryDbService.getCategoryByName('Test', mockLogger)).rejects.toThrow(
|
||||
'Failed to perform operation on database.',
|
||||
);
|
||||
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
{ err: dbError, name: 'Test' },
|
||||
'Error fetching category by name',
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle PostgreSQL specific errors and include name in log context', async () => {
|
||||
// Arrange - Simulate a PostgreSQL error
|
||||
const pgError = new Error('Out of memory');
|
||||
(pgError as Error & { code: string }).code = '53200'; // Out of memory
|
||||
mockPool.query.mockRejectedValue(pgError);
|
||||
|
||||
// Act & Assert
|
||||
await expect(CategoryDbService.getCategoryByName('LargeQuery', mockLogger)).rejects.toThrow(
|
||||
DatabaseError,
|
||||
);
|
||||
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
err: pgError,
|
||||
name: 'LargeQuery',
|
||||
code: '53200',
|
||||
}),
|
||||
'Error fetching category by name',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Integration-like scenarios', () => {
|
||||
it('should handle multiple sequential calls correctly', async () => {
|
||||
// Arrange
|
||||
const categories: Category[] = [
|
||||
{
|
||||
category_id: 1,
|
||||
name: 'A Category',
|
||||
created_at: new Date(),
|
||||
updated_at: new Date(),
|
||||
},
|
||||
];
|
||||
const singleCategory: Category = {
|
||||
category_id: 1,
|
||||
name: 'A Category',
|
||||
created_at: new Date(),
|
||||
updated_at: new Date(),
|
||||
};
|
||||
|
||||
mockPool.query
|
||||
.mockResolvedValueOnce({ rows: categories }) // getAllCategories
|
||||
.mockResolvedValueOnce({ rows: [singleCategory] }) // getCategoryById
|
||||
.mockResolvedValueOnce({ rows: [singleCategory] }); // getCategoryByName
|
||||
|
||||
// Act
|
||||
const allResult = await CategoryDbService.getAllCategories(mockLogger);
|
||||
const byIdResult = await CategoryDbService.getCategoryById(1, mockLogger);
|
||||
const byNameResult = await CategoryDbService.getCategoryByName('A Category', mockLogger);
|
||||
|
||||
// Assert
|
||||
expect(allResult).toEqual(categories);
|
||||
expect(byIdResult).toEqual(singleCategory);
|
||||
expect(byNameResult).toEqual(singleCategory);
|
||||
expect(mockPool.query).toHaveBeenCalledTimes(3);
|
||||
});
|
||||
|
||||
it('should correctly isolate errors between calls', async () => {
|
||||
// Arrange
|
||||
const dbError = new Error('Transient error');
|
||||
const mockCategory: Category = {
|
||||
category_id: 1,
|
||||
name: 'Test',
|
||||
created_at: new Date(),
|
||||
updated_at: new Date(),
|
||||
};
|
||||
|
||||
mockPool.query.mockRejectedValueOnce(dbError).mockResolvedValueOnce({ rows: [mockCategory] });
|
||||
|
||||
// Act & Assert
|
||||
// First call fails
|
||||
await expect(CategoryDbService.getCategoryById(1, mockLogger)).rejects.toThrow(DatabaseError);
|
||||
|
||||
// Second call succeeds (simulating recovery)
|
||||
const result = await CategoryDbService.getCategoryById(1, mockLogger);
|
||||
expect(result).toEqual(mockCategory);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Edge cases', () => {
|
||||
it('should handle very large category ID', async () => {
|
||||
// Arrange
|
||||
mockPool.query.mockResolvedValue({ rows: [] });
|
||||
|
||||
// Act
|
||||
const result = await CategoryDbService.getCategoryById(Number.MAX_SAFE_INTEGER, mockLogger);
|
||||
|
||||
// Assert
|
||||
expect(result).toBeNull();
|
||||
expect(mockPool.query).toHaveBeenCalledWith(expect.any(String), [Number.MAX_SAFE_INTEGER]);
|
||||
});
|
||||
|
||||
it('should handle category name with SQL-like content (SQL injection prevention)', async () => {
|
||||
// Arrange - This tests that parameterized queries prevent SQL injection
|
||||
const maliciousName = "'; DROP TABLE categories; --";
|
||||
mockPool.query.mockResolvedValue({ rows: [] });
|
||||
|
||||
// Act
|
||||
const result = await CategoryDbService.getCategoryByName(maliciousName, mockLogger);
|
||||
|
||||
// Assert
|
||||
expect(result).toBeNull();
|
||||
// The important thing is that the name is passed as a parameter, not concatenated
|
||||
expect(mockPool.query).toHaveBeenCalledWith(
|
||||
expect.stringContaining('WHERE LOWER(name) = LOWER($1)'),
|
||||
[maliciousName],
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle unicode characters in category name', async () => {
|
||||
// Arrange
|
||||
const mockCategory: Category = {
|
||||
category_id: 15,
|
||||
name: 'Bebidas y Jugos',
|
||||
created_at: new Date(),
|
||||
updated_at: new Date(),
|
||||
};
|
||||
mockPool.query.mockResolvedValue({ rows: [mockCategory] });
|
||||
|
||||
// Act
|
||||
const result = await CategoryDbService.getCategoryByName('Bebidas y Jugos', mockLogger);
|
||||
|
||||
// Assert
|
||||
expect(result).toEqual(mockCategory);
|
||||
});
|
||||
|
||||
it('should handle category with emoji in name', async () => {
|
||||
// Arrange
|
||||
const mockCategory: Category = {
|
||||
category_id: 20,
|
||||
name: 'Snacks',
|
||||
created_at: new Date(),
|
||||
updated_at: new Date(),
|
||||
};
|
||||
mockPool.query.mockResolvedValue({ rows: [mockCategory] });
|
||||
|
||||
// Act
|
||||
const result = await CategoryDbService.getCategoryByName('Snacks', mockLogger);
|
||||
|
||||
// Assert
|
||||
expect(result).toEqual(mockCategory);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Return type verification', () => {
|
||||
it('getAllCategories should return Category[] type with all required fields', async () => {
|
||||
// Arrange
|
||||
const mockCategories: Category[] = [
|
||||
{
|
||||
category_id: 1,
|
||||
name: 'Test Category',
|
||||
created_at: new Date('2024-01-15T10:30:00Z'),
|
||||
updated_at: new Date('2024-01-15T10:30:00Z'),
|
||||
},
|
||||
];
|
||||
mockPool.query.mockResolvedValue({ rows: mockCategories });
|
||||
|
||||
// Act
|
||||
const result = await CategoryDbService.getAllCategories(mockLogger);
|
||||
|
||||
// Assert
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0]).toHaveProperty('category_id');
|
||||
expect(result[0]).toHaveProperty('name');
|
||||
expect(result[0]).toHaveProperty('created_at');
|
||||
expect(result[0]).toHaveProperty('updated_at');
|
||||
expect(typeof result[0].category_id).toBe('number');
|
||||
expect(typeof result[0].name).toBe('string');
|
||||
expect(result[0].created_at).toBeInstanceOf(Date);
|
||||
expect(result[0].updated_at).toBeInstanceOf(Date);
|
||||
});
|
||||
|
||||
it('getCategoryById should return Category | null type', async () => {
|
||||
// Arrange - Test non-null case
|
||||
const mockCategory: Category = {
|
||||
category_id: 1,
|
||||
name: 'Test',
|
||||
created_at: new Date(),
|
||||
updated_at: new Date(),
|
||||
};
|
||||
mockPool.query.mockResolvedValue({ rows: [mockCategory] });
|
||||
|
||||
// Act
|
||||
const result = await CategoryDbService.getCategoryById(1, mockLogger);
|
||||
|
||||
// Assert
|
||||
expect(result).not.toBeNull();
|
||||
expect(result).toHaveProperty('category_id', 1);
|
||||
});
|
||||
|
||||
it('getCategoryByName should return Category | null type', async () => {
|
||||
// Arrange - Test null case
|
||||
mockPool.query.mockResolvedValue({ rows: [] });
|
||||
|
||||
// Act
|
||||
const result = await CategoryDbService.getCategoryByName('Missing', mockLogger);
|
||||
|
||||
// Assert
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
});
|
||||
});
|
||||
572
src/services/db/flyerLocation.db.test.ts
Normal file
572
src/services/db/flyerLocation.db.test.ts
Normal file
@@ -0,0 +1,572 @@
|
||||
// src/services/db/flyerLocation.db.test.ts
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
import type { FlyerLocation } from '../../types';
|
||||
|
||||
// Un-mock the module we are testing to ensure we use the real implementation.
|
||||
vi.unmock('./flyerLocation.db');
|
||||
|
||||
// Mock the logger to prevent console output during tests
|
||||
vi.mock('../logger.server', () => ({
|
||||
logger: {
|
||||
info: vi.fn(),
|
||||
warn: vi.fn(),
|
||||
error: vi.fn(),
|
||||
debug: vi.fn(),
|
||||
},
|
||||
}));
|
||||
import { logger as mockLogger } from '../logger.server';
|
||||
|
||||
// Import the repository after mocks are set up
|
||||
import { FlyerLocationRepository } from './flyerLocation.db';
|
||||
|
||||
describe('FlyerLocation DB Service', () => {
|
||||
let flyerLocationRepo: FlyerLocationRepository;
|
||||
const mockDb = {
|
||||
query: vi.fn(),
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
mockDb.query.mockReset();
|
||||
|
||||
// Instantiate the repository with the minimal mock db for each test
|
||||
flyerLocationRepo = new FlyerLocationRepository(mockDb as any);
|
||||
});
|
||||
|
||||
describe('linkFlyerToLocations', () => {
|
||||
it('should execute bulk INSERT query with multiple store location IDs', async () => {
|
||||
mockDb.query.mockResolvedValue({ rows: [], rowCount: 3 });
|
||||
const storeLocationIds = [1, 2, 3];
|
||||
|
||||
await flyerLocationRepo.linkFlyerToLocations(100, storeLocationIds, mockLogger);
|
||||
|
||||
expect(mockDb.query).toHaveBeenCalledTimes(1);
|
||||
expect(mockDb.query).toHaveBeenCalledWith(
|
||||
expect.stringContaining('INSERT INTO public.flyer_locations'),
|
||||
[100, 1, 2, 3],
|
||||
);
|
||||
// Check the VALUES clause has multiple placeholders
|
||||
expect(mockDb.query.mock.calls[0][0]).toContain('VALUES ($1, $2), ($1, $3), ($1, $4)');
|
||||
expect(mockDb.query.mock.calls[0][0]).toContain(
|
||||
'ON CONFLICT (flyer_id, store_location_id) DO NOTHING',
|
||||
);
|
||||
expect(mockLogger.info).toHaveBeenCalledWith(
|
||||
{ flyerId: 100, locationCount: 3 },
|
||||
'Linked flyer to store locations',
|
||||
);
|
||||
});
|
||||
|
||||
it('should execute INSERT query with single store location ID', async () => {
|
||||
mockDb.query.mockResolvedValue({ rows: [], rowCount: 1 });
|
||||
|
||||
await flyerLocationRepo.linkFlyerToLocations(200, [5], mockLogger);
|
||||
|
||||
expect(mockDb.query).toHaveBeenCalledTimes(1);
|
||||
expect(mockDb.query).toHaveBeenCalledWith(
|
||||
expect.stringContaining('VALUES ($1, $2)'),
|
||||
[200, 5],
|
||||
);
|
||||
expect(mockLogger.info).toHaveBeenCalledWith(
|
||||
{ flyerId: 200, locationCount: 1 },
|
||||
'Linked flyer to store locations',
|
||||
);
|
||||
});
|
||||
|
||||
it('should log warning and not query when storeLocationIds is empty', async () => {
|
||||
await flyerLocationRepo.linkFlyerToLocations(300, [], mockLogger);
|
||||
|
||||
expect(mockDb.query).not.toHaveBeenCalled();
|
||||
expect(mockLogger.warn).toHaveBeenCalledWith(
|
||||
{ flyerId: 300 },
|
||||
'No store locations provided for flyer linkage',
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle ON CONFLICT silently for duplicate entries', async () => {
|
||||
// ON CONFLICT DO NOTHING means duplicate rows are silently ignored
|
||||
mockDb.query.mockResolvedValue({ rows: [], rowCount: 2 }); // Only 2 of 3 inserted
|
||||
|
||||
await flyerLocationRepo.linkFlyerToLocations(400, [10, 11, 12], mockLogger);
|
||||
|
||||
expect(mockDb.query).toHaveBeenCalledTimes(1);
|
||||
expect(mockLogger.info).toHaveBeenCalledWith(
|
||||
{ flyerId: 400, locationCount: 3 },
|
||||
'Linked flyer to store locations',
|
||||
);
|
||||
});
|
||||
|
||||
it('should throw a generic error if the database query fails', async () => {
|
||||
const dbError = new Error('DB Connection Error');
|
||||
mockDb.query.mockRejectedValue(dbError);
|
||||
|
||||
await expect(flyerLocationRepo.linkFlyerToLocations(500, [1, 2], mockLogger)).rejects.toThrow(
|
||||
'Failed to link flyer to store locations.',
|
||||
);
|
||||
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
{ err: dbError, flyerId: 500, storeLocationIds: [1, 2] },
|
||||
'Database error in linkFlyerToLocations',
|
||||
);
|
||||
});
|
||||
|
||||
it('should throw ForeignKeyConstraintError if flyer or store location does not exist', async () => {
|
||||
const dbError = new Error('violates foreign key constraint');
|
||||
(dbError as Error & { code: string }).code = '23503';
|
||||
mockDb.query.mockRejectedValue(dbError);
|
||||
|
||||
// The handleDbError function will throw ForeignKeyConstraintError
|
||||
const { ForeignKeyConstraintError } = await import('./errors.db');
|
||||
await expect(flyerLocationRepo.linkFlyerToLocations(999, [1], mockLogger)).rejects.toThrow(
|
||||
ForeignKeyConstraintError,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('linkFlyerToAllStoreLocations', () => {
|
||||
it('should execute INSERT...SELECT query and return the count of linked locations', async () => {
|
||||
mockDb.query.mockResolvedValue({
|
||||
rows: [{ store_location_id: 1 }, { store_location_id: 2 }, { store_location_id: 3 }],
|
||||
rowCount: 3,
|
||||
});
|
||||
|
||||
const result = await flyerLocationRepo.linkFlyerToAllStoreLocations(100, 5, mockLogger);
|
||||
|
||||
expect(result).toBe(3);
|
||||
expect(mockDb.query).toHaveBeenCalledTimes(1);
|
||||
expect(mockDb.query).toHaveBeenCalledWith(
|
||||
expect.stringContaining('INSERT INTO public.flyer_locations'),
|
||||
[100, 5],
|
||||
);
|
||||
expect(mockDb.query.mock.calls[0][0]).toContain('SELECT $1, store_location_id');
|
||||
expect(mockDb.query.mock.calls[0][0]).toContain('FROM public.store_locations');
|
||||
expect(mockDb.query.mock.calls[0][0]).toContain('WHERE store_id = $2');
|
||||
expect(mockDb.query.mock.calls[0][0]).toContain(
|
||||
'ON CONFLICT (flyer_id, store_location_id) DO NOTHING',
|
||||
);
|
||||
expect(mockLogger.info).toHaveBeenCalledWith(
|
||||
{ flyerId: 100, storeId: 5, linkedCount: 3 },
|
||||
'Linked flyer to all store locations',
|
||||
);
|
||||
});
|
||||
|
||||
it('should return 0 when no store locations exist for the store', async () => {
|
||||
mockDb.query.mockResolvedValue({ rows: [], rowCount: 0 });
|
||||
|
||||
const result = await flyerLocationRepo.linkFlyerToAllStoreLocations(200, 10, mockLogger);
|
||||
|
||||
expect(result).toBe(0);
|
||||
expect(mockLogger.info).toHaveBeenCalledWith(
|
||||
{ flyerId: 200, storeId: 10, linkedCount: 0 },
|
||||
'Linked flyer to all store locations',
|
||||
);
|
||||
});
|
||||
|
||||
it('should return 0 when all locations are already linked (ON CONFLICT)', async () => {
|
||||
// ON CONFLICT DO NOTHING means no rows returned
|
||||
mockDb.query.mockResolvedValue({ rows: [], rowCount: 0 });
|
||||
|
||||
const result = await flyerLocationRepo.linkFlyerToAllStoreLocations(300, 15, mockLogger);
|
||||
|
||||
expect(result).toBe(0);
|
||||
});
|
||||
|
||||
it('should throw a generic error if the database query fails', async () => {
|
||||
const dbError = new Error('DB Connection Error');
|
||||
mockDb.query.mockRejectedValue(dbError);
|
||||
|
||||
await expect(
|
||||
flyerLocationRepo.linkFlyerToAllStoreLocations(400, 20, mockLogger),
|
||||
).rejects.toThrow('Failed to link flyer to all store locations.');
|
||||
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
{ err: dbError, flyerId: 400, storeId: 20 },
|
||||
'Database error in linkFlyerToAllStoreLocations',
|
||||
);
|
||||
});
|
||||
|
||||
it('should throw ForeignKeyConstraintError if flyer or store does not exist', async () => {
|
||||
const dbError = new Error('violates foreign key constraint');
|
||||
(dbError as Error & { code: string }).code = '23503';
|
||||
mockDb.query.mockRejectedValue(dbError);
|
||||
|
||||
const { ForeignKeyConstraintError } = await import('./errors.db');
|
||||
await expect(
|
||||
flyerLocationRepo.linkFlyerToAllStoreLocations(999, 999, mockLogger),
|
||||
).rejects.toThrow(ForeignKeyConstraintError);
|
||||
});
|
||||
});
|
||||
|
||||
describe('unlinkAllLocations', () => {
|
||||
it('should execute DELETE query for all flyer locations', async () => {
|
||||
mockDb.query.mockResolvedValue({ rows: [], rowCount: 5 });
|
||||
|
||||
await flyerLocationRepo.unlinkAllLocations(100, mockLogger);
|
||||
|
||||
expect(mockDb.query).toHaveBeenCalledTimes(1);
|
||||
expect(mockDb.query).toHaveBeenCalledWith(
|
||||
'DELETE FROM public.flyer_locations WHERE flyer_id = $1',
|
||||
[100],
|
||||
);
|
||||
expect(mockLogger.info).toHaveBeenCalledWith(
|
||||
{ flyerId: 100 },
|
||||
'Unlinked all locations from flyer',
|
||||
);
|
||||
});
|
||||
|
||||
it('should complete successfully even if no rows are deleted', async () => {
|
||||
mockDb.query.mockResolvedValue({ rows: [], rowCount: 0 });
|
||||
|
||||
await flyerLocationRepo.unlinkAllLocations(200, mockLogger);
|
||||
|
||||
expect(mockDb.query).toHaveBeenCalledTimes(1);
|
||||
expect(mockLogger.info).toHaveBeenCalledWith(
|
||||
{ flyerId: 200 },
|
||||
'Unlinked all locations from flyer',
|
||||
);
|
||||
});
|
||||
|
||||
it('should throw a generic error if the database query fails', async () => {
|
||||
const dbError = new Error('DB Connection Error');
|
||||
mockDb.query.mockRejectedValue(dbError);
|
||||
|
||||
await expect(flyerLocationRepo.unlinkAllLocations(300, mockLogger)).rejects.toThrow(
|
||||
'Failed to unlink locations from flyer.',
|
||||
);
|
||||
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
{ err: dbError, flyerId: 300 },
|
||||
'Database error in unlinkAllLocations',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('unlinkLocation', () => {
|
||||
it('should execute DELETE query for a specific flyer-location pair', async () => {
|
||||
mockDb.query.mockResolvedValue({ rows: [], rowCount: 1 });
|
||||
|
||||
await flyerLocationRepo.unlinkLocation(100, 50, mockLogger);
|
||||
|
||||
expect(mockDb.query).toHaveBeenCalledTimes(1);
|
||||
expect(mockDb.query).toHaveBeenCalledWith(
|
||||
'DELETE FROM public.flyer_locations WHERE flyer_id = $1 AND store_location_id = $2',
|
||||
[100, 50],
|
||||
);
|
||||
expect(mockLogger.info).toHaveBeenCalledWith(
|
||||
{ flyerId: 100, storeLocationId: 50 },
|
||||
'Unlinked location from flyer',
|
||||
);
|
||||
});
|
||||
|
||||
it('should complete successfully even if the link does not exist', async () => {
|
||||
mockDb.query.mockResolvedValue({ rows: [], rowCount: 0 });
|
||||
|
||||
await flyerLocationRepo.unlinkLocation(200, 60, mockLogger);
|
||||
|
||||
expect(mockDb.query).toHaveBeenCalledTimes(1);
|
||||
expect(mockLogger.info).toHaveBeenCalledWith(
|
||||
{ flyerId: 200, storeLocationId: 60 },
|
||||
'Unlinked location from flyer',
|
||||
);
|
||||
});
|
||||
|
||||
it('should throw a generic error if the database query fails', async () => {
|
||||
const dbError = new Error('DB Connection Error');
|
||||
mockDb.query.mockRejectedValue(dbError);
|
||||
|
||||
await expect(flyerLocationRepo.unlinkLocation(300, 70, mockLogger)).rejects.toThrow(
|
||||
'Failed to unlink location from flyer.',
|
||||
);
|
||||
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
{ err: dbError, flyerId: 300, storeLocationId: 70 },
|
||||
'Database error in unlinkLocation',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getLocationIdsByFlyerId', () => {
|
||||
it('should return an array of store location IDs for a flyer', async () => {
|
||||
mockDb.query.mockResolvedValue({
|
||||
rows: [{ store_location_id: 1 }, { store_location_id: 2 }, { store_location_id: 3 }],
|
||||
});
|
||||
|
||||
const result = await flyerLocationRepo.getLocationIdsByFlyerId(100, mockLogger);
|
||||
|
||||
expect(result).toEqual([1, 2, 3]);
|
||||
expect(mockDb.query).toHaveBeenCalledTimes(1);
|
||||
expect(mockDb.query).toHaveBeenCalledWith(
|
||||
'SELECT store_location_id FROM public.flyer_locations WHERE flyer_id = $1',
|
||||
[100],
|
||||
);
|
||||
});
|
||||
|
||||
it('should return an empty array if no locations are linked to the flyer', async () => {
|
||||
mockDb.query.mockResolvedValue({ rows: [] });
|
||||
|
||||
const result = await flyerLocationRepo.getLocationIdsByFlyerId(200, mockLogger);
|
||||
|
||||
expect(result).toEqual([]);
|
||||
});
|
||||
|
||||
it('should throw a generic error if the database query fails', async () => {
|
||||
const dbError = new Error('DB Connection Error');
|
||||
mockDb.query.mockRejectedValue(dbError);
|
||||
|
||||
await expect(flyerLocationRepo.getLocationIdsByFlyerId(300, mockLogger)).rejects.toThrow(
|
||||
'Failed to get location IDs for flyer.',
|
||||
);
|
||||
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
{ err: dbError, flyerId: 300 },
|
||||
'Database error in getLocationIdsByFlyerId',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getFlyerLocationsByFlyerId', () => {
|
||||
it('should return an array of FlyerLocation objects for a flyer', async () => {
|
||||
const mockFlyerLocations: FlyerLocation[] = [
|
||||
{
|
||||
flyer_id: 100,
|
||||
store_location_id: 1,
|
||||
created_at: '2025-01-01T00:00:00Z',
|
||||
updated_at: '2025-01-01T00:00:00Z',
|
||||
},
|
||||
{
|
||||
flyer_id: 100,
|
||||
store_location_id: 2,
|
||||
created_at: '2025-01-01T00:00:00Z',
|
||||
updated_at: '2025-01-01T00:00:00Z',
|
||||
},
|
||||
];
|
||||
mockDb.query.mockResolvedValue({ rows: mockFlyerLocations });
|
||||
|
||||
const result = await flyerLocationRepo.getFlyerLocationsByFlyerId(100, mockLogger);
|
||||
|
||||
expect(result).toEqual(mockFlyerLocations);
|
||||
expect(mockDb.query).toHaveBeenCalledTimes(1);
|
||||
expect(mockDb.query).toHaveBeenCalledWith(
|
||||
'SELECT * FROM public.flyer_locations WHERE flyer_id = $1',
|
||||
[100],
|
||||
);
|
||||
});
|
||||
|
||||
it('should return an empty array if no flyer locations exist for the flyer', async () => {
|
||||
mockDb.query.mockResolvedValue({ rows: [] });
|
||||
|
||||
const result = await flyerLocationRepo.getFlyerLocationsByFlyerId(200, mockLogger);
|
||||
|
||||
expect(result).toEqual([]);
|
||||
});
|
||||
|
||||
it('should throw a generic error if the database query fails', async () => {
|
||||
const dbError = new Error('DB Connection Error');
|
||||
mockDb.query.mockRejectedValue(dbError);
|
||||
|
||||
await expect(flyerLocationRepo.getFlyerLocationsByFlyerId(300, mockLogger)).rejects.toThrow(
|
||||
'Failed to get flyer locations.',
|
||||
);
|
||||
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
{ err: dbError, flyerId: 300 },
|
||||
'Database error in getFlyerLocationsByFlyerId',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Transaction support (optional PoolClient parameter)', () => {
|
||||
it('should use provided PoolClient instead of Pool when passed to constructor', async () => {
|
||||
const mockClient = {
|
||||
query: vi.fn().mockResolvedValue({ rows: [{ store_location_id: 1 }], rowCount: 1 }),
|
||||
};
|
||||
|
||||
// Create repository with a mock PoolClient
|
||||
const repoWithClient = new FlyerLocationRepository(mockClient as any);
|
||||
|
||||
await repoWithClient.linkFlyerToLocations(100, [1], mockLogger);
|
||||
|
||||
// Should use the client's query method, not the pool's
|
||||
expect(mockClient.query).toHaveBeenCalledTimes(1);
|
||||
expect(mockClient.query).toHaveBeenCalledWith(
|
||||
expect.stringContaining('INSERT INTO public.flyer_locations'),
|
||||
[100, 1],
|
||||
);
|
||||
});
|
||||
|
||||
it('should work correctly within a transaction for linkFlyerToAllStoreLocations', async () => {
|
||||
const mockClient = {
|
||||
query: vi.fn().mockResolvedValue({
|
||||
rows: [{ store_location_id: 1 }, { store_location_id: 2 }],
|
||||
rowCount: 2,
|
||||
}),
|
||||
};
|
||||
|
||||
const repoWithClient = new FlyerLocationRepository(mockClient as any);
|
||||
const result = await repoWithClient.linkFlyerToAllStoreLocations(100, 5, mockLogger);
|
||||
|
||||
expect(result).toBe(2);
|
||||
expect(mockClient.query).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('should work correctly within a transaction for unlinkAllLocations', async () => {
|
||||
const mockClient = {
|
||||
query: vi.fn().mockResolvedValue({ rows: [], rowCount: 3 }),
|
||||
};
|
||||
|
||||
const repoWithClient = new FlyerLocationRepository(mockClient as any);
|
||||
await repoWithClient.unlinkAllLocations(100, mockLogger);
|
||||
|
||||
expect(mockClient.query).toHaveBeenCalledWith(
|
||||
'DELETE FROM public.flyer_locations WHERE flyer_id = $1',
|
||||
[100],
|
||||
);
|
||||
});
|
||||
|
||||
it('should work correctly within a transaction for getLocationIdsByFlyerId', async () => {
|
||||
const mockClient = {
|
||||
query: vi.fn().mockResolvedValue({
|
||||
rows: [{ store_location_id: 10 }, { store_location_id: 20 }],
|
||||
}),
|
||||
};
|
||||
|
||||
const repoWithClient = new FlyerLocationRepository(mockClient as any);
|
||||
const result = await repoWithClient.getLocationIdsByFlyerId(100, mockLogger);
|
||||
|
||||
expect(result).toEqual([10, 20]);
|
||||
expect(mockClient.query).toHaveBeenCalledWith(
|
||||
'SELECT store_location_id FROM public.flyer_locations WHERE flyer_id = $1',
|
||||
[100],
|
||||
);
|
||||
});
|
||||
|
||||
it('should work correctly within a transaction for getFlyerLocationsByFlyerId', async () => {
|
||||
const mockFlyerLocations: FlyerLocation[] = [
|
||||
{
|
||||
flyer_id: 100,
|
||||
store_location_id: 1,
|
||||
created_at: '2025-01-01T00:00:00Z',
|
||||
updated_at: '2025-01-01T00:00:00Z',
|
||||
},
|
||||
];
|
||||
const mockClient = {
|
||||
query: vi.fn().mockResolvedValue({ rows: mockFlyerLocations }),
|
||||
};
|
||||
|
||||
const repoWithClient = new FlyerLocationRepository(mockClient as any);
|
||||
const result = await repoWithClient.getFlyerLocationsByFlyerId(100, mockLogger);
|
||||
|
||||
expect(result).toEqual(mockFlyerLocations);
|
||||
expect(mockClient.query).toHaveBeenCalledWith(
|
||||
'SELECT * FROM public.flyer_locations WHERE flyer_id = $1',
|
||||
[100],
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Edge cases', () => {
|
||||
it('should handle very large arrays of store location IDs', async () => {
|
||||
// Create an array of 100 location IDs
|
||||
const largeArray = Array.from({ length: 100 }, (_, i) => i + 1);
|
||||
mockDb.query.mockResolvedValue({ rows: [], rowCount: 100 });
|
||||
|
||||
await flyerLocationRepo.linkFlyerToLocations(100, largeArray, mockLogger);
|
||||
|
||||
expect(mockDb.query).toHaveBeenCalledTimes(1);
|
||||
// Check that all IDs are in the parameters
|
||||
const queryParams = mockDb.query.mock.calls[0][1] as number[];
|
||||
expect(queryParams).toHaveLength(101); // flyerId + 100 locationIds
|
||||
expect(queryParams[0]).toBe(100); // flyerId
|
||||
expect(queryParams[1]).toBe(1); // first locationId
|
||||
expect(queryParams[100]).toBe(100); // last locationId
|
||||
});
|
||||
|
||||
it('should handle negative IDs (database will validate constraints)', async () => {
|
||||
mockDb.query.mockResolvedValue({ rows: [], rowCount: 0 });
|
||||
|
||||
// The repository passes these through; the database would reject them
|
||||
await flyerLocationRepo.linkFlyerToLocations(-1, [-1, -2], mockLogger);
|
||||
|
||||
expect(mockDb.query).toHaveBeenCalledWith(expect.any(String), [-1, -1, -2]);
|
||||
});
|
||||
|
||||
it('should handle zero as flyer ID', async () => {
|
||||
mockDb.query.mockResolvedValue({ rows: [], rowCount: 1 });
|
||||
|
||||
await flyerLocationRepo.linkFlyerToLocations(0, [1], mockLogger);
|
||||
|
||||
expect(mockDb.query).toHaveBeenCalledWith(expect.any(String), [0, 1]);
|
||||
expect(mockLogger.info).toHaveBeenCalledWith(
|
||||
{ flyerId: 0, locationCount: 1 },
|
||||
'Linked flyer to store locations',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('PostgreSQL error code handling', () => {
|
||||
it('should throw UniqueConstraintError for code 23505', async () => {
|
||||
const dbError = new Error('duplicate key value violates unique constraint');
|
||||
(dbError as Error & { code: string }).code = '23505';
|
||||
mockDb.query.mockRejectedValue(dbError);
|
||||
|
||||
const { UniqueConstraintError } = await import('./errors.db');
|
||||
await expect(flyerLocationRepo.linkFlyerToLocations(100, [1], mockLogger)).rejects.toThrow(
|
||||
UniqueConstraintError,
|
||||
);
|
||||
});
|
||||
|
||||
it('should throw ForeignKeyConstraintError for code 23503', async () => {
|
||||
const dbError = new Error('violates foreign key constraint');
|
||||
(dbError as Error & { code: string }).code = '23503';
|
||||
mockDb.query.mockRejectedValue(dbError);
|
||||
|
||||
const { ForeignKeyConstraintError } = await import('./errors.db');
|
||||
await expect(
|
||||
flyerLocationRepo.linkFlyerToAllStoreLocations(100, 1, mockLogger),
|
||||
).rejects.toThrow(ForeignKeyConstraintError);
|
||||
});
|
||||
|
||||
it('should throw NotNullConstraintError for code 23502', async () => {
|
||||
const dbError = new Error('null value in column violates not-null constraint');
|
||||
(dbError as Error & { code: string }).code = '23502';
|
||||
mockDb.query.mockRejectedValue(dbError);
|
||||
|
||||
const { NotNullConstraintError } = await import('./errors.db');
|
||||
await expect(flyerLocationRepo.unlinkLocation(100, 1, mockLogger)).rejects.toThrow(
|
||||
NotNullConstraintError,
|
||||
);
|
||||
});
|
||||
|
||||
it('should throw CheckConstraintError for code 23514', async () => {
|
||||
const dbError = new Error('violates check constraint');
|
||||
(dbError as Error & { code: string }).code = '23514';
|
||||
mockDb.query.mockRejectedValue(dbError);
|
||||
|
||||
const { CheckConstraintError } = await import('./errors.db');
|
||||
await expect(flyerLocationRepo.getLocationIdsByFlyerId(100, mockLogger)).rejects.toThrow(
|
||||
CheckConstraintError,
|
||||
);
|
||||
});
|
||||
|
||||
it('should throw InvalidTextRepresentationError for code 22P02', async () => {
|
||||
const dbError = new Error('invalid input syntax for type integer');
|
||||
(dbError as Error & { code: string }).code = '22P02';
|
||||
mockDb.query.mockRejectedValue(dbError);
|
||||
|
||||
const { InvalidTextRepresentationError } = await import('./errors.db');
|
||||
await expect(flyerLocationRepo.getFlyerLocationsByFlyerId(100, mockLogger)).rejects.toThrow(
|
||||
InvalidTextRepresentationError,
|
||||
);
|
||||
});
|
||||
|
||||
it('should throw NumericValueOutOfRangeError for code 22003', async () => {
|
||||
const dbError = new Error('integer out of range');
|
||||
(dbError as Error & { code: string }).code = '22003';
|
||||
mockDb.query.mockRejectedValue(dbError);
|
||||
|
||||
const { NumericValueOutOfRangeError } = await import('./errors.db');
|
||||
await expect(flyerLocationRepo.unlinkAllLocations(100, mockLogger)).rejects.toThrow(
|
||||
NumericValueOutOfRangeError,
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
697
src/services/db/store.db.test.ts
Normal file
697
src/services/db/store.db.test.ts
Normal file
@@ -0,0 +1,697 @@
|
||||
// src/services/db/store.db.test.ts
|
||||
import { describe, it, expect, vi, beforeEach, Mock } from 'vitest';
|
||||
import type { PoolClient } from 'pg';
|
||||
|
||||
// Mock the logger to prevent stderr noise during tests
|
||||
vi.mock('../logger.server', () => ({
|
||||
logger: {
|
||||
debug: vi.fn(),
|
||||
info: vi.fn(),
|
||||
warn: vi.fn(),
|
||||
error: vi.fn(),
|
||||
},
|
||||
}));
|
||||
import { logger as mockLogger } from '../logger.server';
|
||||
|
||||
// Un-mock the module we are testing to ensure we use the real implementation.
|
||||
vi.unmock('./store.db');
|
||||
|
||||
import { StoreRepository } from './store.db';
|
||||
import { mockPoolInstance } from '../../tests/setup/tests-setup-unit';
|
||||
import { NotFoundError, UniqueConstraintError } from './errors.db';
|
||||
import type { Store } from '../../types';
|
||||
|
||||
describe('Store DB Service', () => {
|
||||
let storeRepo: StoreRepository;
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
mockPoolInstance.query.mockReset();
|
||||
// Instantiate the repository with the mock pool for each test
|
||||
storeRepo = new StoreRepository(mockPoolInstance as unknown as PoolClient);
|
||||
});
|
||||
|
||||
describe('createStore', () => {
|
||||
it('should execute the correct INSERT query and return the new store ID', async () => {
|
||||
// Arrange
|
||||
const newStoreId = 42;
|
||||
mockPoolInstance.query.mockResolvedValue({ rows: [{ store_id: newStoreId }] });
|
||||
|
||||
// Act
|
||||
const result = await storeRepo.createStore(
|
||||
'Test Store',
|
||||
mockLogger,
|
||||
'https://logo.url',
|
||||
'user-123',
|
||||
);
|
||||
|
||||
// Assert
|
||||
expect(mockPoolInstance.query).toHaveBeenCalledWith(
|
||||
expect.stringContaining('INSERT INTO public.stores'),
|
||||
['Test Store', 'https://logo.url', 'user-123'],
|
||||
);
|
||||
expect(result).toBe(newStoreId);
|
||||
});
|
||||
|
||||
it('should handle null values for optional parameters', async () => {
|
||||
// Arrange
|
||||
mockPoolInstance.query.mockResolvedValue({ rows: [{ store_id: 1 }] });
|
||||
|
||||
// Act
|
||||
await storeRepo.createStore('Basic Store', mockLogger);
|
||||
|
||||
// Assert
|
||||
expect(mockPoolInstance.query).toHaveBeenCalledWith(
|
||||
expect.stringContaining('INSERT INTO public.stores'),
|
||||
['Basic Store', null, null],
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle explicit null values for logoUrl and createdBy', async () => {
|
||||
// Arrange
|
||||
mockPoolInstance.query.mockResolvedValue({ rows: [{ store_id: 1 }] });
|
||||
|
||||
// Act
|
||||
await storeRepo.createStore('Another Store', mockLogger, null, null);
|
||||
|
||||
// Assert
|
||||
expect(mockPoolInstance.query).toHaveBeenCalledWith(
|
||||
expect.stringContaining('INSERT INTO public.stores'),
|
||||
['Another Store', null, null],
|
||||
);
|
||||
});
|
||||
|
||||
it('should throw UniqueConstraintError if store name already exists', async () => {
|
||||
// Arrange
|
||||
const dbError = new Error('duplicate key value violates unique constraint');
|
||||
(dbError as Error & { code: string }).code = '23505';
|
||||
mockPoolInstance.query.mockRejectedValue(dbError);
|
||||
|
||||
// Act & Assert
|
||||
await expect(storeRepo.createStore('Duplicate Store', mockLogger)).rejects.toThrow(
|
||||
UniqueConstraintError,
|
||||
);
|
||||
await expect(storeRepo.createStore('Duplicate Store', mockLogger)).rejects.toThrow(
|
||||
'A store with the name "Duplicate Store" already exists.',
|
||||
);
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
err: dbError,
|
||||
code: '23505',
|
||||
name: 'Duplicate Store',
|
||||
}),
|
||||
'Database error in createStore',
|
||||
);
|
||||
});
|
||||
|
||||
it('should throw a generic error if the database query fails', async () => {
|
||||
// Arrange
|
||||
const dbError = new Error('DB Connection Error');
|
||||
mockPoolInstance.query.mockRejectedValue(dbError);
|
||||
|
||||
// Act & Assert
|
||||
await expect(storeRepo.createStore('Fail Store', mockLogger)).rejects.toThrow(
|
||||
'Failed to create store.',
|
||||
);
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
err: dbError,
|
||||
name: 'Fail Store',
|
||||
}),
|
||||
'Database error in createStore',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getStoreById', () => {
|
||||
const mockStore: Store = {
|
||||
store_id: 1,
|
||||
name: 'Test Store',
|
||||
logo_url: 'https://logo.url',
|
||||
created_by: 'user-123',
|
||||
created_at: new Date().toISOString(),
|
||||
updated_at: new Date().toISOString(),
|
||||
};
|
||||
|
||||
it('should execute the correct SELECT query and return the store', async () => {
|
||||
// Arrange
|
||||
mockPoolInstance.query.mockResolvedValue({ rows: [mockStore], rowCount: 1 });
|
||||
|
||||
// Act
|
||||
const result = await storeRepo.getStoreById(1, mockLogger);
|
||||
|
||||
// Assert
|
||||
expect(mockPoolInstance.query).toHaveBeenCalledWith(
|
||||
expect.stringContaining('SELECT * FROM public.stores WHERE store_id = $1'),
|
||||
[1],
|
||||
);
|
||||
expect(result).toEqual(mockStore);
|
||||
});
|
||||
|
||||
it('should throw NotFoundError if store is not found', async () => {
|
||||
// Arrange
|
||||
mockPoolInstance.query.mockResolvedValue({ rows: [], rowCount: 0 });
|
||||
|
||||
// Act & Assert
|
||||
await expect(storeRepo.getStoreById(999, mockLogger)).rejects.toThrow(NotFoundError);
|
||||
await expect(storeRepo.getStoreById(999, mockLogger)).rejects.toThrow(
|
||||
'Store with ID 999 not found.',
|
||||
);
|
||||
});
|
||||
|
||||
it('should throw a generic error if the database query fails', async () => {
|
||||
// Arrange
|
||||
const dbError = new Error('DB Connection Error');
|
||||
mockPoolInstance.query.mockRejectedValue(dbError);
|
||||
|
||||
// Act & Assert
|
||||
await expect(storeRepo.getStoreById(1, mockLogger)).rejects.toThrow(
|
||||
'Failed to retrieve store.',
|
||||
);
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
err: dbError,
|
||||
storeId: 1,
|
||||
}),
|
||||
'Database error in getStoreById',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getAllStores', () => {
|
||||
it('should execute the correct SELECT query and return all stores', async () => {
|
||||
// Arrange
|
||||
const mockStores: Store[] = [
|
||||
{
|
||||
store_id: 1,
|
||||
name: 'Alpha Store',
|
||||
logo_url: null,
|
||||
created_by: null,
|
||||
created_at: new Date().toISOString(),
|
||||
updated_at: new Date().toISOString(),
|
||||
},
|
||||
{
|
||||
store_id: 2,
|
||||
name: 'Beta Store',
|
||||
logo_url: 'https://beta.logo.url',
|
||||
created_by: 'user-456',
|
||||
created_at: new Date().toISOString(),
|
||||
updated_at: new Date().toISOString(),
|
||||
},
|
||||
];
|
||||
mockPoolInstance.query.mockResolvedValue({ rows: mockStores });
|
||||
|
||||
// Act
|
||||
const result = await storeRepo.getAllStores(mockLogger);
|
||||
|
||||
// Assert
|
||||
expect(mockPoolInstance.query).toHaveBeenCalledWith(
|
||||
expect.stringContaining('SELECT * FROM public.stores ORDER BY name ASC'),
|
||||
);
|
||||
expect(result).toEqual(mockStores);
|
||||
expect(result).toHaveLength(2);
|
||||
});
|
||||
|
||||
it('should return an empty array if no stores exist', async () => {
|
||||
// Arrange
|
||||
mockPoolInstance.query.mockResolvedValue({ rows: [] });
|
||||
|
||||
// Act
|
||||
const result = await storeRepo.getAllStores(mockLogger);
|
||||
|
||||
// Assert
|
||||
expect(result).toEqual([]);
|
||||
expect(result).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should throw a generic error if the database query fails', async () => {
|
||||
// Arrange
|
||||
const dbError = new Error('DB Connection Error');
|
||||
mockPoolInstance.query.mockRejectedValue(dbError);
|
||||
|
||||
// Act & Assert
|
||||
await expect(storeRepo.getAllStores(mockLogger)).rejects.toThrow(
|
||||
'Failed to retrieve stores.',
|
||||
);
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
err: dbError,
|
||||
}),
|
||||
'Database error in getAllStores',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('updateStore', () => {
|
||||
it('should execute the correct UPDATE query when updating name only', async () => {
|
||||
// Arrange
|
||||
mockPoolInstance.query.mockResolvedValue({ rowCount: 1, rows: [] });
|
||||
|
||||
// Act
|
||||
await storeRepo.updateStore(1, { name: 'Updated Store' }, mockLogger);
|
||||
|
||||
// Assert
|
||||
expect(mockPoolInstance.query).toHaveBeenCalledWith(
|
||||
expect.stringContaining('UPDATE public.stores'),
|
||||
expect.arrayContaining(['Updated Store', 1]),
|
||||
);
|
||||
expect(mockPoolInstance.query).toHaveBeenCalledWith(
|
||||
expect.stringContaining('name = $1'),
|
||||
expect.any(Array),
|
||||
);
|
||||
});
|
||||
|
||||
it('should execute the correct UPDATE query when updating logo_url only', async () => {
|
||||
// Arrange
|
||||
mockPoolInstance.query.mockResolvedValue({ rowCount: 1, rows: [] });
|
||||
|
||||
// Act
|
||||
await storeRepo.updateStore(1, { logo_url: 'https://new.logo.url' }, mockLogger);
|
||||
|
||||
// Assert
|
||||
expect(mockPoolInstance.query).toHaveBeenCalledWith(
|
||||
expect.stringContaining('logo_url = $1'),
|
||||
expect.arrayContaining(['https://new.logo.url', 1]),
|
||||
);
|
||||
});
|
||||
|
||||
it('should execute the correct UPDATE query when updating both name and logo_url', async () => {
|
||||
// Arrange
|
||||
mockPoolInstance.query.mockResolvedValue({ rowCount: 1, rows: [] });
|
||||
|
||||
// Act
|
||||
await storeRepo.updateStore(
|
||||
1,
|
||||
{ name: 'Updated Store', logo_url: 'https://new.logo.url' },
|
||||
mockLogger,
|
||||
);
|
||||
|
||||
// Assert
|
||||
expect(mockPoolInstance.query).toHaveBeenCalledWith(
|
||||
expect.stringContaining('UPDATE public.stores'),
|
||||
['Updated Store', 'https://new.logo.url', 1],
|
||||
);
|
||||
});
|
||||
|
||||
it('should allow setting logo_url to null', async () => {
|
||||
// Arrange
|
||||
mockPoolInstance.query.mockResolvedValue({ rowCount: 1, rows: [] });
|
||||
|
||||
// Act
|
||||
await storeRepo.updateStore(1, { logo_url: null }, mockLogger);
|
||||
|
||||
// Assert
|
||||
expect(mockPoolInstance.query).toHaveBeenCalledWith(
|
||||
expect.stringContaining('logo_url = $1'),
|
||||
[null, 1],
|
||||
);
|
||||
});
|
||||
|
||||
it('should throw a generic error if no fields are provided for update', async () => {
|
||||
// Note: The 'No fields provided for update' error is caught by handleDbError
|
||||
// and wrapped in the default message 'Failed to update store.'
|
||||
// Act & Assert
|
||||
await expect(storeRepo.updateStore(1, {}, mockLogger)).rejects.toThrow(
|
||||
'Failed to update store.',
|
||||
);
|
||||
// Verify the original error was logged
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
err: expect.any(Error),
|
||||
storeId: 1,
|
||||
updates: {},
|
||||
}),
|
||||
'Database error in updateStore',
|
||||
);
|
||||
});
|
||||
|
||||
it('should throw NotFoundError if store is not found', async () => {
|
||||
// Arrange
|
||||
mockPoolInstance.query.mockResolvedValue({ rowCount: 0, rows: [] });
|
||||
|
||||
// Act & Assert
|
||||
await expect(
|
||||
storeRepo.updateStore(999, { name: 'Updated Store' }, mockLogger),
|
||||
).rejects.toThrow(NotFoundError);
|
||||
await expect(
|
||||
storeRepo.updateStore(999, { name: 'Updated Store' }, mockLogger),
|
||||
).rejects.toThrow('Store with ID 999 not found.');
|
||||
});
|
||||
|
||||
it('should throw UniqueConstraintError if updated name already exists', async () => {
|
||||
// Arrange
|
||||
const dbError = new Error('duplicate key value violates unique constraint');
|
||||
(dbError as Error & { code: string }).code = '23505';
|
||||
mockPoolInstance.query.mockRejectedValue(dbError);
|
||||
|
||||
// Act & Assert
|
||||
await expect(
|
||||
storeRepo.updateStore(1, { name: 'Duplicate Store' }, mockLogger),
|
||||
).rejects.toThrow(UniqueConstraintError);
|
||||
await expect(
|
||||
storeRepo.updateStore(1, { name: 'Duplicate Store' }, mockLogger),
|
||||
).rejects.toThrow('A store with the name "Duplicate Store" already exists.');
|
||||
});
|
||||
|
||||
it('should not set custom uniqueMessage when only logo_url is updated', async () => {
|
||||
// Arrange
|
||||
const dbError = new Error('duplicate key value violates unique constraint');
|
||||
(dbError as Error & { code: string }).code = '23505';
|
||||
mockPoolInstance.query.mockRejectedValue(dbError);
|
||||
|
||||
// Act & Assert
|
||||
// When only logo_url is updated, uniqueMessage should be undefined (use default)
|
||||
await expect(
|
||||
storeRepo.updateStore(1, { logo_url: 'https://duplicate.url' }, mockLogger),
|
||||
).rejects.toThrow(UniqueConstraintError);
|
||||
// The default UniqueConstraintError message should be used
|
||||
await expect(
|
||||
storeRepo.updateStore(1, { logo_url: 'https://duplicate.url' }, mockLogger),
|
||||
).rejects.toThrow('The record already exists.');
|
||||
});
|
||||
|
||||
it('should throw a generic error if the database query fails', async () => {
|
||||
// Arrange
|
||||
const dbError = new Error('DB Connection Error');
|
||||
mockPoolInstance.query.mockRejectedValue(dbError);
|
||||
|
||||
// Act & Assert
|
||||
await expect(storeRepo.updateStore(1, { name: 'Fail Store' }, mockLogger)).rejects.toThrow(
|
||||
'Failed to update store.',
|
||||
);
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
err: dbError,
|
||||
storeId: 1,
|
||||
updates: { name: 'Fail Store' },
|
||||
}),
|
||||
'Database error in updateStore',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('deleteStore', () => {
|
||||
it('should execute the correct DELETE query', async () => {
|
||||
// Arrange
|
||||
mockPoolInstance.query.mockResolvedValue({ rowCount: 1, rows: [] });
|
||||
|
||||
// Act
|
||||
await storeRepo.deleteStore(1, mockLogger);
|
||||
|
||||
// Assert
|
||||
expect(mockPoolInstance.query).toHaveBeenCalledWith(
|
||||
expect.stringContaining('DELETE FROM public.stores WHERE store_id = $1'),
|
||||
[1],
|
||||
);
|
||||
});
|
||||
|
||||
it('should throw NotFoundError if store is not found', async () => {
|
||||
// Arrange
|
||||
mockPoolInstance.query.mockResolvedValue({ rowCount: 0, rows: [] });
|
||||
|
||||
// Act & Assert
|
||||
await expect(storeRepo.deleteStore(999, mockLogger)).rejects.toThrow(NotFoundError);
|
||||
await expect(storeRepo.deleteStore(999, mockLogger)).rejects.toThrow(
|
||||
'Store with ID 999 not found.',
|
||||
);
|
||||
});
|
||||
|
||||
it('should throw a generic error if the database query fails', async () => {
|
||||
// Arrange
|
||||
const dbError = new Error('DB Connection Error');
|
||||
mockPoolInstance.query.mockRejectedValue(dbError);
|
||||
|
||||
// Act & Assert
|
||||
await expect(storeRepo.deleteStore(1, mockLogger)).rejects.toThrow('Failed to delete store.');
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
err: dbError,
|
||||
storeId: 1,
|
||||
}),
|
||||
'Database error in deleteStore',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('searchStoresByName', () => {
|
||||
const mockStores: Store[] = [
|
||||
{
|
||||
store_id: 1,
|
||||
name: 'Fresh Mart',
|
||||
logo_url: null,
|
||||
created_by: null,
|
||||
created_at: new Date().toISOString(),
|
||||
updated_at: new Date().toISOString(),
|
||||
},
|
||||
{
|
||||
store_id: 2,
|
||||
name: 'Fresh Foods',
|
||||
logo_url: null,
|
||||
created_by: null,
|
||||
created_at: new Date().toISOString(),
|
||||
updated_at: new Date().toISOString(),
|
||||
},
|
||||
];
|
||||
|
||||
it('should execute the correct SELECT query with ILIKE pattern', async () => {
|
||||
// Arrange
|
||||
mockPoolInstance.query.mockResolvedValue({ rows: mockStores });
|
||||
|
||||
// Act
|
||||
const result = await storeRepo.searchStoresByName('Fresh', mockLogger);
|
||||
|
||||
// Assert
|
||||
expect(mockPoolInstance.query).toHaveBeenCalledWith(
|
||||
expect.stringContaining('WHERE name ILIKE $1'),
|
||||
['%Fresh%', 10], // Default limit is 10
|
||||
);
|
||||
expect(result).toEqual(mockStores);
|
||||
});
|
||||
|
||||
it('should use the provided limit parameter', async () => {
|
||||
// Arrange
|
||||
mockPoolInstance.query.mockResolvedValue({ rows: [mockStores[0]] });
|
||||
|
||||
// Act
|
||||
await storeRepo.searchStoresByName('Fresh', mockLogger, 5);
|
||||
|
||||
// Assert
|
||||
expect(mockPoolInstance.query).toHaveBeenCalledWith(expect.stringContaining('LIMIT $2'), [
|
||||
'%Fresh%',
|
||||
5,
|
||||
]);
|
||||
});
|
||||
|
||||
it('should return an empty array if no stores match', async () => {
|
||||
// Arrange
|
||||
mockPoolInstance.query.mockResolvedValue({ rows: [] });
|
||||
|
||||
// Act
|
||||
const result = await storeRepo.searchStoresByName('Nonexistent', mockLogger);
|
||||
|
||||
// Assert
|
||||
expect(result).toEqual([]);
|
||||
expect(result).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should handle special characters in search query', async () => {
|
||||
// Arrange
|
||||
mockPoolInstance.query.mockResolvedValue({ rows: [] });
|
||||
|
||||
// Act
|
||||
await storeRepo.searchStoresByName("Store's %Special_Name", mockLogger);
|
||||
|
||||
// Assert
|
||||
expect(mockPoolInstance.query).toHaveBeenCalledWith(
|
||||
expect.stringContaining('WHERE name ILIKE $1'),
|
||||
["%Store's %Special_Name%", 10],
|
||||
);
|
||||
});
|
||||
|
||||
it('should throw a generic error if the database query fails', async () => {
|
||||
// Arrange
|
||||
const dbError = new Error('DB Connection Error');
|
||||
mockPoolInstance.query.mockRejectedValue(dbError);
|
||||
|
||||
// Act & Assert
|
||||
await expect(storeRepo.searchStoresByName('Fail', mockLogger)).rejects.toThrow(
|
||||
'Failed to search stores.',
|
||||
);
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
err: dbError,
|
||||
query: 'Fail',
|
||||
limit: 10,
|
||||
}),
|
||||
'Database error in searchStoresByName',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('transaction support (PoolClient injection)', () => {
|
||||
it('should use the injected PoolClient for queries', async () => {
|
||||
// Arrange - Create a separate mock client to simulate transaction usage
|
||||
const mockClient = {
|
||||
query: vi.fn().mockResolvedValue({ rows: [{ store_id: 100 }] }),
|
||||
};
|
||||
const transactionRepo = new StoreRepository(mockClient as unknown as PoolClient);
|
||||
|
||||
// Act
|
||||
const result = await transactionRepo.createStore('Transaction Store', mockLogger);
|
||||
|
||||
// Assert - Verify the injected client was used, not the global pool
|
||||
expect(mockClient.query).toHaveBeenCalledWith(
|
||||
expect.stringContaining('INSERT INTO public.stores'),
|
||||
['Transaction Store', null, null],
|
||||
);
|
||||
expect(mockPoolInstance.query).not.toHaveBeenCalled();
|
||||
expect(result).toBe(100);
|
||||
});
|
||||
|
||||
it('should allow different operations within the same PoolClient', async () => {
|
||||
// Arrange
|
||||
const mockClient = {
|
||||
query: vi.fn(),
|
||||
};
|
||||
(mockClient.query as Mock)
|
||||
.mockResolvedValueOnce({ rows: [{ store_id: 1 }] }) // createStore
|
||||
.mockResolvedValueOnce({
|
||||
rows: [
|
||||
{
|
||||
store_id: 1,
|
||||
name: 'Test',
|
||||
created_at: new Date().toISOString(),
|
||||
updated_at: new Date().toISOString(),
|
||||
},
|
||||
],
|
||||
rowCount: 1,
|
||||
}); // getStoreById
|
||||
|
||||
const transactionRepo = new StoreRepository(mockClient as unknown as PoolClient);
|
||||
|
||||
// Act
|
||||
await transactionRepo.createStore('Test', mockLogger);
|
||||
await transactionRepo.getStoreById(1, mockLogger);
|
||||
|
||||
// Assert
|
||||
expect(mockClient.query).toHaveBeenCalledTimes(2);
|
||||
});
|
||||
});
|
||||
|
||||
describe('constructor with default pool', () => {
|
||||
it('should use getPool() when no db is provided', async () => {
|
||||
// Note: This test verifies the default constructor behavior.
|
||||
// In the test environment, getPool() returns the mocked pool from tests-setup-unit.ts.
|
||||
// We can verify this by creating a repository without parameters and checking if queries work.
|
||||
|
||||
// Arrange
|
||||
mockPoolInstance.query.mockResolvedValue({ rows: [] });
|
||||
|
||||
// Act - Create repository without explicit db parameter
|
||||
const defaultRepo = new StoreRepository();
|
||||
await defaultRepo.getAllStores(mockLogger);
|
||||
|
||||
// Assert - The mock pool should have been called since getPool() returns our mock
|
||||
expect(mockPoolInstance.query).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('edge cases', () => {
|
||||
it('should handle empty string for store name', async () => {
|
||||
// Arrange
|
||||
mockPoolInstance.query.mockResolvedValue({ rows: [{ store_id: 1 }] });
|
||||
|
||||
// Act
|
||||
await storeRepo.createStore('', mockLogger);
|
||||
|
||||
// Assert - Empty string should be passed as-is
|
||||
expect(mockPoolInstance.query).toHaveBeenCalledWith(
|
||||
expect.stringContaining('INSERT INTO public.stores'),
|
||||
['', null, null],
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle very long store names', async () => {
|
||||
// Arrange
|
||||
const longName = 'A'.repeat(1000);
|
||||
mockPoolInstance.query.mockResolvedValue({ rows: [{ store_id: 1 }] });
|
||||
|
||||
// Act
|
||||
await storeRepo.createStore(longName, mockLogger);
|
||||
|
||||
// Assert
|
||||
expect(mockPoolInstance.query).toHaveBeenCalledWith(
|
||||
expect.stringContaining('INSERT INTO public.stores'),
|
||||
[longName, null, null],
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle numeric store IDs as expected', async () => {
|
||||
// Arrange
|
||||
mockPoolInstance.query.mockResolvedValue({
|
||||
rows: [
|
||||
{
|
||||
store_id: 0,
|
||||
name: 'Zero ID Store',
|
||||
created_at: new Date().toISOString(),
|
||||
updated_at: new Date().toISOString(),
|
||||
},
|
||||
],
|
||||
rowCount: 1,
|
||||
});
|
||||
|
||||
// Act
|
||||
const result = await storeRepo.getStoreById(0, mockLogger);
|
||||
|
||||
// Assert
|
||||
expect(mockPoolInstance.query).toHaveBeenCalledWith(
|
||||
expect.stringContaining('WHERE store_id = $1'),
|
||||
[0],
|
||||
);
|
||||
expect(result.store_id).toBe(0);
|
||||
});
|
||||
|
||||
it('should handle search with empty query string', async () => {
|
||||
// Arrange
|
||||
mockPoolInstance.query.mockResolvedValue({ rows: [] });
|
||||
|
||||
// Act
|
||||
const result = await storeRepo.searchStoresByName('', mockLogger);
|
||||
|
||||
// Assert
|
||||
expect(mockPoolInstance.query).toHaveBeenCalledWith(
|
||||
expect.stringContaining('WHERE name ILIKE $1'),
|
||||
['%%', 10], // Empty string wrapped with wildcards
|
||||
);
|
||||
expect(result).toEqual([]);
|
||||
});
|
||||
|
||||
it('should handle negative limit in search (database will handle validation)', async () => {
|
||||
// Arrange
|
||||
mockPoolInstance.query.mockResolvedValue({ rows: [] });
|
||||
|
||||
// Act
|
||||
await storeRepo.searchStoresByName('Test', mockLogger, -1);
|
||||
|
||||
// Assert - The value is passed through; the database handles validation
|
||||
expect(mockPoolInstance.query).toHaveBeenCalledWith(expect.stringContaining('LIMIT $2'), [
|
||||
'%Test%',
|
||||
-1,
|
||||
]);
|
||||
});
|
||||
|
||||
it('should handle zero limit in search', async () => {
|
||||
// Arrange
|
||||
mockPoolInstance.query.mockResolvedValue({ rows: [] });
|
||||
|
||||
// Act
|
||||
await storeRepo.searchStoresByName('Test', mockLogger, 0);
|
||||
|
||||
// Assert
|
||||
expect(mockPoolInstance.query).toHaveBeenCalledWith(expect.stringContaining('LIMIT $2'), [
|
||||
'%Test%',
|
||||
0,
|
||||
]);
|
||||
});
|
||||
});
|
||||
});
|
||||
629
src/services/db/storeLocation.db.test.ts
Normal file
629
src/services/db/storeLocation.db.test.ts
Normal file
@@ -0,0 +1,629 @@
|
||||
// src/services/db/storeLocation.db.test.ts
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
import { StoreLocationRepository } from './storeLocation.db';
|
||||
import type { StoreLocationWithAddress, StoreWithLocations } from './storeLocation.db';
|
||||
import { UniqueConstraintError, NotFoundError, ForeignKeyConstraintError } from './errors.db';
|
||||
|
||||
// Un-mock the module we are testing to ensure we use the real implementation.
|
||||
vi.unmock('./storeLocation.db');
|
||||
|
||||
// Mock the logger to prevent console output during tests
|
||||
vi.mock('../logger.server', () => ({
|
||||
logger: {
|
||||
info: vi.fn(),
|
||||
warn: vi.fn(),
|
||||
error: vi.fn(),
|
||||
debug: vi.fn(),
|
||||
},
|
||||
}));
|
||||
import { logger as mockLogger } from '../logger.server';
|
||||
|
||||
describe('StoreLocation DB Service', () => {
|
||||
let storeLocationRepo: StoreLocationRepository;
|
||||
const mockDb = {
|
||||
query: vi.fn(),
|
||||
};
|
||||
|
||||
// Test fixtures
|
||||
const mockAddress = {
|
||||
address_id: 1,
|
||||
address_line_1: '123 Main St',
|
||||
address_line_2: null,
|
||||
city: 'Anytown',
|
||||
province_state: 'CA',
|
||||
postal_code: '12345',
|
||||
country: 'USA',
|
||||
latitude: 37.7749,
|
||||
longitude: -122.4194,
|
||||
created_at: '2025-01-01T00:00:00.000Z',
|
||||
updated_at: '2025-01-01T00:00:00.000Z',
|
||||
};
|
||||
|
||||
const mockStoreLocation: StoreLocationWithAddress = {
|
||||
store_location_id: 1,
|
||||
store_id: 1,
|
||||
address_id: 1,
|
||||
created_at: '2025-01-01T00:00:00.000Z',
|
||||
updated_at: '2025-01-01T00:00:00.000Z',
|
||||
address: mockAddress,
|
||||
};
|
||||
|
||||
const mockStore = {
|
||||
store_id: 1,
|
||||
name: 'Test Store',
|
||||
logo_url: 'https://example.com/logo.png',
|
||||
created_by: 'user-123',
|
||||
created_at: '2025-01-01T00:00:00.000Z',
|
||||
updated_at: '2025-01-01T00:00:00.000Z',
|
||||
};
|
||||
|
||||
const mockStoreWithLocations: StoreWithLocations = {
|
||||
...mockStore,
|
||||
locations: [mockStoreLocation],
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
mockDb.query.mockReset();
|
||||
storeLocationRepo = new StoreLocationRepository(mockDb);
|
||||
});
|
||||
|
||||
describe('createStoreLocation', () => {
|
||||
it('should create a store location and return the store_location_id', async () => {
|
||||
// Arrange
|
||||
mockDb.query.mockResolvedValue({
|
||||
rows: [{ store_location_id: 1 }],
|
||||
rowCount: 1,
|
||||
});
|
||||
|
||||
// Act
|
||||
const result = await storeLocationRepo.createStoreLocation(1, 1, mockLogger);
|
||||
|
||||
// Assert
|
||||
expect(result).toBe(1);
|
||||
expect(mockDb.query).toHaveBeenCalledWith(
|
||||
expect.stringContaining('INSERT INTO public.store_locations'),
|
||||
[1, 1],
|
||||
);
|
||||
expect(mockDb.query).toHaveBeenCalledWith(
|
||||
expect.stringContaining('RETURNING store_location_id'),
|
||||
expect.any(Array),
|
||||
);
|
||||
});
|
||||
|
||||
it('should throw UniqueConstraintError when store-address link already exists', async () => {
|
||||
// Arrange
|
||||
const dbError = new Error('duplicate key value violates unique constraint');
|
||||
(dbError as any).code = '23505';
|
||||
(dbError as any).constraint = 'store_locations_store_id_address_id_key';
|
||||
mockDb.query.mockRejectedValue(dbError);
|
||||
|
||||
// Act & Assert
|
||||
await expect(storeLocationRepo.createStoreLocation(1, 1, mockLogger)).rejects.toThrow(
|
||||
UniqueConstraintError,
|
||||
);
|
||||
await expect(storeLocationRepo.createStoreLocation(1, 1, mockLogger)).rejects.toThrow(
|
||||
'This store is already linked to this address.',
|
||||
);
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
err: dbError,
|
||||
storeId: 1,
|
||||
addressId: 1,
|
||||
code: '23505',
|
||||
}),
|
||||
'Database error in createStoreLocation',
|
||||
);
|
||||
});
|
||||
|
||||
it('should throw ForeignKeyConstraintError when store does not exist', async () => {
|
||||
// Arrange
|
||||
const dbError = new Error('insert or update on table violates foreign key constraint');
|
||||
(dbError as any).code = '23503';
|
||||
(dbError as any).constraint = 'store_locations_store_id_fkey';
|
||||
mockDb.query.mockRejectedValue(dbError);
|
||||
|
||||
// Act & Assert
|
||||
await expect(storeLocationRepo.createStoreLocation(999, 1, mockLogger)).rejects.toThrow(
|
||||
ForeignKeyConstraintError,
|
||||
);
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
err: dbError,
|
||||
storeId: 999,
|
||||
addressId: 1,
|
||||
code: '23503',
|
||||
}),
|
||||
'Database error in createStoreLocation',
|
||||
);
|
||||
});
|
||||
|
||||
it('should throw ForeignKeyConstraintError when address does not exist', async () => {
|
||||
// Arrange
|
||||
const dbError = new Error('insert or update on table violates foreign key constraint');
|
||||
(dbError as any).code = '23503';
|
||||
(dbError as any).constraint = 'store_locations_address_id_fkey';
|
||||
mockDb.query.mockRejectedValue(dbError);
|
||||
|
||||
// Act & Assert
|
||||
await expect(storeLocationRepo.createStoreLocation(1, 999, mockLogger)).rejects.toThrow(
|
||||
ForeignKeyConstraintError,
|
||||
);
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
err: dbError,
|
||||
storeId: 1,
|
||||
addressId: 999,
|
||||
code: '23503',
|
||||
}),
|
||||
'Database error in createStoreLocation',
|
||||
);
|
||||
});
|
||||
|
||||
it('should throw a generic error if the database query fails', async () => {
|
||||
// Arrange
|
||||
const dbError = new Error('DB Connection Error');
|
||||
mockDb.query.mockRejectedValue(dbError);
|
||||
|
||||
// Act & Assert
|
||||
await expect(storeLocationRepo.createStoreLocation(1, 1, mockLogger)).rejects.toThrow(
|
||||
'Failed to create store location link.',
|
||||
);
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
{ err: dbError, storeId: 1, addressId: 1 },
|
||||
'Database error in createStoreLocation',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getLocationsByStoreId', () => {
|
||||
it('should return locations for a store with address data', async () => {
|
||||
// Arrange
|
||||
mockDb.query.mockResolvedValue({
|
||||
rows: [mockStoreLocation],
|
||||
rowCount: 1,
|
||||
});
|
||||
|
||||
// Act
|
||||
const result = await storeLocationRepo.getLocationsByStoreId(1, mockLogger);
|
||||
|
||||
// Assert
|
||||
expect(result).toEqual([mockStoreLocation]);
|
||||
expect(mockDb.query).toHaveBeenCalledWith(
|
||||
expect.stringContaining('FROM public.store_locations sl'),
|
||||
[1],
|
||||
);
|
||||
expect(mockDb.query).toHaveBeenCalledWith(
|
||||
expect.stringContaining('INNER JOIN public.addresses a'),
|
||||
expect.any(Array),
|
||||
);
|
||||
});
|
||||
|
||||
it('should return multiple locations when store has multiple addresses', async () => {
|
||||
// Arrange
|
||||
const secondLocation: StoreLocationWithAddress = {
|
||||
store_location_id: 2,
|
||||
store_id: 1,
|
||||
address_id: 2,
|
||||
created_at: '2025-01-02T00:00:00.000Z',
|
||||
updated_at: '2025-01-02T00:00:00.000Z',
|
||||
address: {
|
||||
...mockAddress,
|
||||
address_id: 2,
|
||||
address_line_1: '456 Other St',
|
||||
},
|
||||
};
|
||||
mockDb.query.mockResolvedValue({
|
||||
rows: [mockStoreLocation, secondLocation],
|
||||
rowCount: 2,
|
||||
});
|
||||
|
||||
// Act
|
||||
const result = await storeLocationRepo.getLocationsByStoreId(1, mockLogger);
|
||||
|
||||
// Assert
|
||||
expect(result).toHaveLength(2);
|
||||
expect(result[0].store_location_id).toBe(1);
|
||||
expect(result[1].store_location_id).toBe(2);
|
||||
});
|
||||
|
||||
it('should return an empty array when store has no locations', async () => {
|
||||
// Arrange
|
||||
mockDb.query.mockResolvedValue({ rows: [], rowCount: 0 });
|
||||
|
||||
// Act
|
||||
const result = await storeLocationRepo.getLocationsByStoreId(999, mockLogger);
|
||||
|
||||
// Assert
|
||||
expect(result).toEqual([]);
|
||||
});
|
||||
|
||||
it('should throw a generic error if the database query fails', async () => {
|
||||
// Arrange
|
||||
const dbError = new Error('DB Connection Error');
|
||||
mockDb.query.mockRejectedValue(dbError);
|
||||
|
||||
// Act & Assert
|
||||
await expect(storeLocationRepo.getLocationsByStoreId(1, mockLogger)).rejects.toThrow(
|
||||
'Failed to retrieve store locations.',
|
||||
);
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
{ err: dbError, storeId: 1 },
|
||||
'Database error in getLocationsByStoreId',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getStoreWithLocations', () => {
|
||||
it('should return a store with all its locations', async () => {
|
||||
// Arrange
|
||||
mockDb.query.mockResolvedValue({
|
||||
rows: [mockStoreWithLocations],
|
||||
rowCount: 1,
|
||||
});
|
||||
|
||||
// Act
|
||||
const result = await storeLocationRepo.getStoreWithLocations(1, mockLogger);
|
||||
|
||||
// Assert
|
||||
expect(result).toEqual(mockStoreWithLocations);
|
||||
expect(result.store_id).toBe(1);
|
||||
expect(result.locations).toHaveLength(1);
|
||||
expect(result.locations[0].address.address_line_1).toBe('123 Main St');
|
||||
expect(mockDb.query).toHaveBeenCalledWith(
|
||||
expect.stringContaining('FROM public.stores s'),
|
||||
[1],
|
||||
);
|
||||
expect(mockDb.query).toHaveBeenCalledWith(
|
||||
expect.stringContaining('LEFT JOIN public.store_locations sl'),
|
||||
expect.any(Array),
|
||||
);
|
||||
});
|
||||
|
||||
it('should return a store with empty locations array when store has no locations', async () => {
|
||||
// Arrange
|
||||
const storeWithNoLocations: StoreWithLocations = {
|
||||
...mockStore,
|
||||
locations: [],
|
||||
};
|
||||
mockDb.query.mockResolvedValue({
|
||||
rows: [storeWithNoLocations],
|
||||
rowCount: 1,
|
||||
});
|
||||
|
||||
// Act
|
||||
const result = await storeLocationRepo.getStoreWithLocations(1, mockLogger);
|
||||
|
||||
// Assert
|
||||
expect(result.locations).toEqual([]);
|
||||
});
|
||||
|
||||
it('should throw NotFoundError when store does not exist', async () => {
|
||||
// Arrange
|
||||
mockDb.query.mockResolvedValue({ rows: [], rowCount: 0 });
|
||||
|
||||
// Act & Assert
|
||||
await expect(storeLocationRepo.getStoreWithLocations(999, mockLogger)).rejects.toThrow(
|
||||
NotFoundError,
|
||||
);
|
||||
await expect(storeLocationRepo.getStoreWithLocations(999, mockLogger)).rejects.toThrow(
|
||||
'Store with ID 999 not found.',
|
||||
);
|
||||
});
|
||||
|
||||
it('should throw a generic error if the database query fails', async () => {
|
||||
// Arrange
|
||||
const dbError = new Error('DB Connection Error');
|
||||
mockDb.query.mockRejectedValue(dbError);
|
||||
|
||||
// Act & Assert
|
||||
await expect(storeLocationRepo.getStoreWithLocations(1, mockLogger)).rejects.toThrow(
|
||||
'Failed to retrieve store with locations.',
|
||||
);
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
{ err: dbError, storeId: 1 },
|
||||
'Database error in getStoreWithLocations',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getAllStoresWithLocations', () => {
|
||||
it('should return all stores with their locations', async () => {
|
||||
// Arrange
|
||||
const secondStore: StoreWithLocations = {
|
||||
store_id: 2,
|
||||
name: 'Another Store',
|
||||
logo_url: null,
|
||||
created_by: null,
|
||||
created_at: '2025-01-02T00:00:00.000Z',
|
||||
updated_at: '2025-01-02T00:00:00.000Z',
|
||||
locations: [],
|
||||
};
|
||||
mockDb.query.mockResolvedValue({
|
||||
rows: [mockStoreWithLocations, secondStore],
|
||||
rowCount: 2,
|
||||
});
|
||||
|
||||
// Act
|
||||
const result = await storeLocationRepo.getAllStoresWithLocations(mockLogger);
|
||||
|
||||
// Assert
|
||||
expect(result).toHaveLength(2);
|
||||
expect(result[0].store_id).toBe(1);
|
||||
expect(result[0].locations).toHaveLength(1);
|
||||
expect(result[1].store_id).toBe(2);
|
||||
expect(result[1].locations).toEqual([]);
|
||||
});
|
||||
|
||||
it('should return an empty array when no stores exist', async () => {
|
||||
// Arrange
|
||||
mockDb.query.mockResolvedValue({ rows: [], rowCount: 0 });
|
||||
|
||||
// Act
|
||||
const result = await storeLocationRepo.getAllStoresWithLocations(mockLogger);
|
||||
|
||||
// Assert
|
||||
expect(result).toEqual([]);
|
||||
});
|
||||
|
||||
it('should return stores ordered by name ASC', async () => {
|
||||
// Arrange
|
||||
mockDb.query.mockResolvedValue({ rows: [], rowCount: 0 });
|
||||
|
||||
// Act
|
||||
await storeLocationRepo.getAllStoresWithLocations(mockLogger);
|
||||
|
||||
// Assert
|
||||
expect(mockDb.query).toHaveBeenCalledWith(expect.stringContaining('ORDER BY s.name ASC'));
|
||||
});
|
||||
|
||||
it('should throw a generic error if the database query fails', async () => {
|
||||
// Arrange
|
||||
const dbError = new Error('DB Connection Error');
|
||||
mockDb.query.mockRejectedValue(dbError);
|
||||
|
||||
// Act & Assert
|
||||
await expect(storeLocationRepo.getAllStoresWithLocations(mockLogger)).rejects.toThrow(
|
||||
'Failed to retrieve stores with locations.',
|
||||
);
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
{ err: dbError },
|
||||
'Database error in getAllStoresWithLocations',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('deleteStoreLocation', () => {
|
||||
it('should delete a store location successfully', async () => {
|
||||
// Arrange
|
||||
mockDb.query.mockResolvedValue({ rowCount: 1 });
|
||||
|
||||
// Act
|
||||
await storeLocationRepo.deleteStoreLocation(1, mockLogger);
|
||||
|
||||
// Assert
|
||||
expect(mockDb.query).toHaveBeenCalledWith(
|
||||
'DELETE FROM public.store_locations WHERE store_location_id = $1',
|
||||
[1],
|
||||
);
|
||||
});
|
||||
|
||||
it('should throw NotFoundError when store location does not exist', async () => {
|
||||
// Arrange
|
||||
mockDb.query.mockResolvedValue({ rowCount: 0 });
|
||||
|
||||
// Act & Assert
|
||||
await expect(storeLocationRepo.deleteStoreLocation(999, mockLogger)).rejects.toThrow(
|
||||
NotFoundError,
|
||||
);
|
||||
await expect(storeLocationRepo.deleteStoreLocation(999, mockLogger)).rejects.toThrow(
|
||||
'Store location with ID 999 not found.',
|
||||
);
|
||||
});
|
||||
|
||||
it('should throw a generic error if the database query fails', async () => {
|
||||
// Arrange
|
||||
const dbError = new Error('DB Connection Error');
|
||||
mockDb.query.mockRejectedValue(dbError);
|
||||
|
||||
// Act & Assert
|
||||
await expect(storeLocationRepo.deleteStoreLocation(1, mockLogger)).rejects.toThrow(
|
||||
'Failed to delete store location.',
|
||||
);
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
{ err: dbError, storeLocationId: 1 },
|
||||
'Database error in deleteStoreLocation',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('updateStoreLocation', () => {
|
||||
it('should update a store location to point to a new address', async () => {
|
||||
// Arrange
|
||||
mockDb.query.mockResolvedValue({ rowCount: 1 });
|
||||
|
||||
// Act
|
||||
await storeLocationRepo.updateStoreLocation(1, 2, mockLogger);
|
||||
|
||||
// Assert
|
||||
expect(mockDb.query).toHaveBeenCalledWith(
|
||||
expect.stringContaining('UPDATE public.store_locations'),
|
||||
[2, 1],
|
||||
);
|
||||
expect(mockDb.query).toHaveBeenCalledWith(
|
||||
expect.stringContaining('SET address_id = $1'),
|
||||
expect.any(Array),
|
||||
);
|
||||
expect(mockDb.query).toHaveBeenCalledWith(
|
||||
expect.stringContaining('WHERE store_location_id = $2'),
|
||||
expect.any(Array),
|
||||
);
|
||||
});
|
||||
|
||||
it('should throw NotFoundError when store location does not exist', async () => {
|
||||
// Arrange
|
||||
mockDb.query.mockResolvedValue({ rowCount: 0 });
|
||||
|
||||
// Act & Assert
|
||||
await expect(storeLocationRepo.updateStoreLocation(999, 1, mockLogger)).rejects.toThrow(
|
||||
NotFoundError,
|
||||
);
|
||||
await expect(storeLocationRepo.updateStoreLocation(999, 1, mockLogger)).rejects.toThrow(
|
||||
'Store location with ID 999 not found.',
|
||||
);
|
||||
});
|
||||
|
||||
it('should throw ForeignKeyConstraintError when new address does not exist', async () => {
|
||||
// Arrange
|
||||
const dbError = new Error('insert or update on table violates foreign key constraint');
|
||||
(dbError as any).code = '23503';
|
||||
(dbError as any).constraint = 'store_locations_address_id_fkey';
|
||||
mockDb.query.mockRejectedValue(dbError);
|
||||
|
||||
// Act & Assert
|
||||
await expect(storeLocationRepo.updateStoreLocation(1, 999, mockLogger)).rejects.toThrow(
|
||||
ForeignKeyConstraintError,
|
||||
);
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
err: dbError,
|
||||
storeLocationId: 1,
|
||||
newAddressId: 999,
|
||||
code: '23503',
|
||||
}),
|
||||
'Database error in updateStoreLocation',
|
||||
);
|
||||
});
|
||||
|
||||
it('should throw a generic error if the database query fails', async () => {
|
||||
// Arrange
|
||||
const dbError = new Error('DB Connection Error');
|
||||
mockDb.query.mockRejectedValue(dbError);
|
||||
|
||||
// Act & Assert
|
||||
await expect(storeLocationRepo.updateStoreLocation(1, 2, mockLogger)).rejects.toThrow(
|
||||
'Failed to update store location.',
|
||||
);
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
{ err: dbError, storeLocationId: 1, newAddressId: 2 },
|
||||
'Database error in updateStoreLocation',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Transaction Support', () => {
|
||||
it('should use provided pool client for transaction when passed to constructor', async () => {
|
||||
// Arrange
|
||||
const mockPoolClient = {
|
||||
query: vi.fn().mockResolvedValue({
|
||||
rows: [{ store_location_id: 1 }],
|
||||
rowCount: 1,
|
||||
}),
|
||||
};
|
||||
const transactionRepo = new StoreLocationRepository(mockPoolClient);
|
||||
|
||||
// Act
|
||||
await transactionRepo.createStoreLocation(1, 1, mockLogger);
|
||||
|
||||
// Assert
|
||||
expect(mockPoolClient.query).toHaveBeenCalled();
|
||||
expect(mockDb.query).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should allow multiple operations within the same transaction context', async () => {
|
||||
// Arrange
|
||||
const mockPoolClient = {
|
||||
query: vi.fn(),
|
||||
};
|
||||
mockPoolClient.query
|
||||
.mockResolvedValueOnce({ rows: [{ store_location_id: 1 }], rowCount: 1 }) // create
|
||||
.mockResolvedValueOnce({ rows: [mockStoreLocation], rowCount: 1 }) // get
|
||||
.mockResolvedValueOnce({ rowCount: 1 }); // delete
|
||||
|
||||
const transactionRepo = new StoreLocationRepository(mockPoolClient);
|
||||
|
||||
// Act - simulating a transaction with multiple operations
|
||||
const locationId = await transactionRepo.createStoreLocation(1, 1, mockLogger);
|
||||
const locations = await transactionRepo.getLocationsByStoreId(1, mockLogger);
|
||||
await transactionRepo.deleteStoreLocation(locationId, mockLogger);
|
||||
|
||||
// Assert
|
||||
expect(mockPoolClient.query).toHaveBeenCalledTimes(3);
|
||||
expect(locationId).toBe(1);
|
||||
expect(locations).toEqual([mockStoreLocation]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Edge Cases', () => {
|
||||
it('should handle store with many locations', async () => {
|
||||
// Arrange
|
||||
const manyLocations = Array.from({ length: 100 }, (_, i) => ({
|
||||
...mockStoreLocation,
|
||||
store_location_id: i + 1,
|
||||
address: {
|
||||
...mockAddress,
|
||||
address_id: i + 1,
|
||||
address_line_1: `${i + 1} Test St`,
|
||||
},
|
||||
}));
|
||||
mockDb.query.mockResolvedValue({
|
||||
rows: [{ ...mockStore, locations: manyLocations }],
|
||||
rowCount: 1,
|
||||
});
|
||||
|
||||
// Act
|
||||
const result = await storeLocationRepo.getStoreWithLocations(1, mockLogger);
|
||||
|
||||
// Assert
|
||||
expect(result.locations).toHaveLength(100);
|
||||
});
|
||||
|
||||
it('should handle null values in optional address fields', async () => {
|
||||
// Arrange
|
||||
const locationWithNullFields: StoreLocationWithAddress = {
|
||||
...mockStoreLocation,
|
||||
address: {
|
||||
...mockAddress,
|
||||
address_line_2: null,
|
||||
latitude: null,
|
||||
longitude: null,
|
||||
},
|
||||
};
|
||||
mockDb.query.mockResolvedValue({
|
||||
rows: [locationWithNullFields],
|
||||
rowCount: 1,
|
||||
});
|
||||
|
||||
// Act
|
||||
const result = await storeLocationRepo.getLocationsByStoreId(1, mockLogger);
|
||||
|
||||
// Assert
|
||||
expect(result[0].address.address_line_2).toBeNull();
|
||||
expect(result[0].address.latitude).toBeNull();
|
||||
expect(result[0].address.longitude).toBeNull();
|
||||
});
|
||||
|
||||
it('should handle zero as a valid store_location_id (edge case for invalid ID)', async () => {
|
||||
// Note: While 0 is technically invalid in PostgreSQL serial columns,
|
||||
// the database should reject it, not the application layer
|
||||
mockDb.query.mockResolvedValue({ rowCount: 0 });
|
||||
|
||||
await expect(storeLocationRepo.deleteStoreLocation(0, mockLogger)).rejects.toThrow(
|
||||
NotFoundError,
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle large store IDs near integer limits', async () => {
|
||||
// Arrange
|
||||
const largeId = 2147483647; // Max 32-bit signed integer
|
||||
mockDb.query.mockResolvedValue({ rows: [], rowCount: 0 });
|
||||
|
||||
// Act
|
||||
const result = await storeLocationRepo.getLocationsByStoreId(largeId, mockLogger);
|
||||
|
||||
// Assert
|
||||
expect(result).toEqual([]);
|
||||
expect(mockDb.query).toHaveBeenCalledWith(expect.any(String), [largeId]);
|
||||
});
|
||||
});
|
||||
});
|
||||
465
src/services/featureFlags.server.test.ts
Normal file
465
src/services/featureFlags.server.test.ts
Normal file
@@ -0,0 +1,465 @@
|
||||
// src/services/featureFlags.server.test.ts
|
||||
/**
|
||||
* Unit tests for the Feature Flags Service (ADR-024).
|
||||
*
|
||||
* These tests verify:
|
||||
* - isFeatureEnabled() returns correct boolean for each flag
|
||||
* - isFeatureEnabled() handles all valid flag names
|
||||
* - getFeatureFlags() returns all flags and their states
|
||||
* - getEnabledFeatureFlags() returns only enabled flags
|
||||
* - Convenience exports return correct values
|
||||
* - Default behavior (all flags disabled when not set)
|
||||
* - Environment variable parsing for enabled/disabled states
|
||||
*/
|
||||
|
||||
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
|
||||
|
||||
// Store original process.env
|
||||
const originalEnv = { ...process.env };
|
||||
|
||||
describe('featureFlags.server', () => {
|
||||
beforeEach(() => {
|
||||
// Reset modules before each test to allow re-importing with different env vars
|
||||
vi.resetModules();
|
||||
// Reset process.env to original state
|
||||
process.env = { ...originalEnv };
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
// Restore original process.env
|
||||
process.env = originalEnv;
|
||||
});
|
||||
|
||||
/**
|
||||
* Helper to set up the minimum required environment variables for config to load.
|
||||
* This includes database, redis, and auth config that are required by Zod validation.
|
||||
*/
|
||||
const setMinimalValidEnv = (overrides: Record<string, string> = {}) => {
|
||||
process.env = {
|
||||
...process.env,
|
||||
// Required config
|
||||
NODE_ENV: 'test',
|
||||
DB_HOST: 'localhost',
|
||||
DB_USER: 'test',
|
||||
DB_PASSWORD: 'test',
|
||||
DB_NAME: 'test',
|
||||
REDIS_URL: 'redis://localhost:6379',
|
||||
JWT_SECRET: 'test-secret-must-be-at-least-32-characters-long',
|
||||
// Feature flags default to false, so we override as needed
|
||||
...overrides,
|
||||
};
|
||||
};
|
||||
|
||||
describe('isFeatureEnabled()', () => {
|
||||
it('should return false for all flags when no feature flags are set', async () => {
|
||||
setMinimalValidEnv();
|
||||
const { isFeatureEnabled } = await import('./featureFlags.server');
|
||||
|
||||
expect(isFeatureEnabled('bugsinkSync')).toBe(false);
|
||||
expect(isFeatureEnabled('advancedRbac')).toBe(false);
|
||||
expect(isFeatureEnabled('newDashboard')).toBe(false);
|
||||
expect(isFeatureEnabled('betaRecipes')).toBe(false);
|
||||
expect(isFeatureEnabled('experimentalAi')).toBe(false);
|
||||
expect(isFeatureEnabled('debugMode')).toBe(false);
|
||||
});
|
||||
|
||||
it('should return true for bugsinkSync when FEATURE_BUGSINK_SYNC is set to "true"', async () => {
|
||||
setMinimalValidEnv({ FEATURE_BUGSINK_SYNC: 'true' });
|
||||
const { isFeatureEnabled } = await import('./featureFlags.server');
|
||||
|
||||
expect(isFeatureEnabled('bugsinkSync')).toBe(true);
|
||||
});
|
||||
|
||||
it('should return true for advancedRbac when FEATURE_ADVANCED_RBAC is set to "true"', async () => {
|
||||
setMinimalValidEnv({ FEATURE_ADVANCED_RBAC: 'true' });
|
||||
const { isFeatureEnabled } = await import('./featureFlags.server');
|
||||
|
||||
expect(isFeatureEnabled('advancedRbac')).toBe(true);
|
||||
});
|
||||
|
||||
it('should return true for newDashboard when FEATURE_NEW_DASHBOARD is set to "true"', async () => {
|
||||
setMinimalValidEnv({ FEATURE_NEW_DASHBOARD: 'true' });
|
||||
const { isFeatureEnabled } = await import('./featureFlags.server');
|
||||
|
||||
expect(isFeatureEnabled('newDashboard')).toBe(true);
|
||||
});
|
||||
|
||||
it('should return true for betaRecipes when FEATURE_BETA_RECIPES is set to "true"', async () => {
|
||||
setMinimalValidEnv({ FEATURE_BETA_RECIPES: 'true' });
|
||||
const { isFeatureEnabled } = await import('./featureFlags.server');
|
||||
|
||||
expect(isFeatureEnabled('betaRecipes')).toBe(true);
|
||||
});
|
||||
|
||||
it('should return true for experimentalAi when FEATURE_EXPERIMENTAL_AI is set to "true"', async () => {
|
||||
setMinimalValidEnv({ FEATURE_EXPERIMENTAL_AI: 'true' });
|
||||
const { isFeatureEnabled } = await import('./featureFlags.server');
|
||||
|
||||
expect(isFeatureEnabled('experimentalAi')).toBe(true);
|
||||
});
|
||||
|
||||
it('should return true for debugMode when FEATURE_DEBUG_MODE is set to "true"', async () => {
|
||||
setMinimalValidEnv({ FEATURE_DEBUG_MODE: 'true' });
|
||||
const { isFeatureEnabled } = await import('./featureFlags.server');
|
||||
|
||||
expect(isFeatureEnabled('debugMode')).toBe(true);
|
||||
});
|
||||
|
||||
it('should return false when flag is set to "false"', async () => {
|
||||
setMinimalValidEnv({
|
||||
FEATURE_NEW_DASHBOARD: 'false',
|
||||
});
|
||||
const { isFeatureEnabled } = await import('./featureFlags.server');
|
||||
|
||||
expect(isFeatureEnabled('newDashboard')).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false for non-"true" string values', async () => {
|
||||
setMinimalValidEnv({
|
||||
FEATURE_NEW_DASHBOARD: 'TRUE', // uppercase
|
||||
});
|
||||
const { isFeatureEnabled } = await import('./featureFlags.server');
|
||||
|
||||
// The booleanString helper only checks for exact 'true' match
|
||||
expect(isFeatureEnabled('newDashboard')).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false for empty string value', async () => {
|
||||
setMinimalValidEnv({
|
||||
FEATURE_NEW_DASHBOARD: '',
|
||||
});
|
||||
const { isFeatureEnabled } = await import('./featureFlags.server');
|
||||
|
||||
expect(isFeatureEnabled('newDashboard')).toBe(false);
|
||||
});
|
||||
|
||||
it('should handle multiple flags enabled simultaneously', async () => {
|
||||
setMinimalValidEnv({
|
||||
FEATURE_NEW_DASHBOARD: 'true',
|
||||
FEATURE_BETA_RECIPES: 'true',
|
||||
FEATURE_DEBUG_MODE: 'true',
|
||||
});
|
||||
const { isFeatureEnabled } = await import('./featureFlags.server');
|
||||
|
||||
expect(isFeatureEnabled('newDashboard')).toBe(true);
|
||||
expect(isFeatureEnabled('betaRecipes')).toBe(true);
|
||||
expect(isFeatureEnabled('debugMode')).toBe(true);
|
||||
// These should still be false
|
||||
expect(isFeatureEnabled('bugsinkSync')).toBe(false);
|
||||
expect(isFeatureEnabled('advancedRbac')).toBe(false);
|
||||
expect(isFeatureEnabled('experimentalAi')).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getFeatureFlags()', () => {
|
||||
it('should return all flags with their current states', async () => {
|
||||
setMinimalValidEnv({
|
||||
FEATURE_NEW_DASHBOARD: 'true',
|
||||
FEATURE_DEBUG_MODE: 'true',
|
||||
});
|
||||
const { getFeatureFlags } = await import('./featureFlags.server');
|
||||
|
||||
const flags = getFeatureFlags();
|
||||
|
||||
expect(flags).toEqual({
|
||||
bugsinkSync: false,
|
||||
advancedRbac: false,
|
||||
newDashboard: true,
|
||||
betaRecipes: false,
|
||||
experimentalAi: false,
|
||||
debugMode: true,
|
||||
});
|
||||
});
|
||||
|
||||
it('should return a copy of flags (not the original object)', async () => {
|
||||
setMinimalValidEnv();
|
||||
const { getFeatureFlags, isFeatureEnabled } = await import('./featureFlags.server');
|
||||
|
||||
const flags = getFeatureFlags();
|
||||
|
||||
// Modifying the returned object should not affect the original
|
||||
(flags as Record<string, boolean>).newDashboard = true;
|
||||
|
||||
// The original should still be false
|
||||
expect(isFeatureEnabled('newDashboard')).toBe(false);
|
||||
});
|
||||
|
||||
it('should return all flags as false when no flags are set', async () => {
|
||||
setMinimalValidEnv();
|
||||
const { getFeatureFlags } = await import('./featureFlags.server');
|
||||
|
||||
const flags = getFeatureFlags();
|
||||
|
||||
// All values should be false
|
||||
Object.values(flags).forEach((value) => {
|
||||
expect(value).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
it('should include all expected flag names', async () => {
|
||||
setMinimalValidEnv();
|
||||
const { getFeatureFlags } = await import('./featureFlags.server');
|
||||
|
||||
const flags = getFeatureFlags();
|
||||
const expectedFlags = [
|
||||
'bugsinkSync',
|
||||
'advancedRbac',
|
||||
'newDashboard',
|
||||
'betaRecipes',
|
||||
'experimentalAi',
|
||||
'debugMode',
|
||||
];
|
||||
|
||||
expect(Object.keys(flags).sort()).toEqual(expectedFlags.sort());
|
||||
});
|
||||
});
|
||||
|
||||
describe('getEnabledFeatureFlags()', () => {
|
||||
it('should return an empty array when no flags are enabled', async () => {
|
||||
setMinimalValidEnv();
|
||||
const { getEnabledFeatureFlags } = await import('./featureFlags.server');
|
||||
|
||||
const enabledFlags = getEnabledFeatureFlags();
|
||||
|
||||
expect(enabledFlags).toEqual([]);
|
||||
});
|
||||
|
||||
it('should return only enabled flag names', async () => {
|
||||
setMinimalValidEnv({
|
||||
FEATURE_NEW_DASHBOARD: 'true',
|
||||
FEATURE_DEBUG_MODE: 'true',
|
||||
});
|
||||
const { getEnabledFeatureFlags } = await import('./featureFlags.server');
|
||||
|
||||
const enabledFlags = getEnabledFeatureFlags();
|
||||
|
||||
expect(enabledFlags).toHaveLength(2);
|
||||
expect(enabledFlags).toContain('newDashboard');
|
||||
expect(enabledFlags).toContain('debugMode');
|
||||
expect(enabledFlags).not.toContain('bugsinkSync');
|
||||
expect(enabledFlags).not.toContain('advancedRbac');
|
||||
});
|
||||
|
||||
it('should return all flag names when all flags are enabled', async () => {
|
||||
setMinimalValidEnv({
|
||||
FEATURE_BUGSINK_SYNC: 'true',
|
||||
FEATURE_ADVANCED_RBAC: 'true',
|
||||
FEATURE_NEW_DASHBOARD: 'true',
|
||||
FEATURE_BETA_RECIPES: 'true',
|
||||
FEATURE_EXPERIMENTAL_AI: 'true',
|
||||
FEATURE_DEBUG_MODE: 'true',
|
||||
});
|
||||
const { getEnabledFeatureFlags } = await import('./featureFlags.server');
|
||||
|
||||
const enabledFlags = getEnabledFeatureFlags();
|
||||
|
||||
expect(enabledFlags).toHaveLength(6);
|
||||
expect(enabledFlags).toContain('bugsinkSync');
|
||||
expect(enabledFlags).toContain('advancedRbac');
|
||||
expect(enabledFlags).toContain('newDashboard');
|
||||
expect(enabledFlags).toContain('betaRecipes');
|
||||
expect(enabledFlags).toContain('experimentalAi');
|
||||
expect(enabledFlags).toContain('debugMode');
|
||||
});
|
||||
});
|
||||
|
||||
describe('convenience exports', () => {
|
||||
it('should export isBugsinkSyncEnabled as false when flag is not set', async () => {
|
||||
setMinimalValidEnv();
|
||||
const { isBugsinkSyncEnabled } = await import('./featureFlags.server');
|
||||
|
||||
expect(isBugsinkSyncEnabled).toBe(false);
|
||||
});
|
||||
|
||||
it('should export isBugsinkSyncEnabled as true when flag is set', async () => {
|
||||
setMinimalValidEnv({ FEATURE_BUGSINK_SYNC: 'true' });
|
||||
const { isBugsinkSyncEnabled } = await import('./featureFlags.server');
|
||||
|
||||
expect(isBugsinkSyncEnabled).toBe(true);
|
||||
});
|
||||
|
||||
it('should export isAdvancedRbacEnabled as false when flag is not set', async () => {
|
||||
setMinimalValidEnv();
|
||||
const { isAdvancedRbacEnabled } = await import('./featureFlags.server');
|
||||
|
||||
expect(isAdvancedRbacEnabled).toBe(false);
|
||||
});
|
||||
|
||||
it('should export isAdvancedRbacEnabled as true when flag is set', async () => {
|
||||
setMinimalValidEnv({ FEATURE_ADVANCED_RBAC: 'true' });
|
||||
const { isAdvancedRbacEnabled } = await import('./featureFlags.server');
|
||||
|
||||
expect(isAdvancedRbacEnabled).toBe(true);
|
||||
});
|
||||
|
||||
it('should export isNewDashboardEnabled as false when flag is not set', async () => {
|
||||
setMinimalValidEnv();
|
||||
const { isNewDashboardEnabled } = await import('./featureFlags.server');
|
||||
|
||||
expect(isNewDashboardEnabled).toBe(false);
|
||||
});
|
||||
|
||||
it('should export isNewDashboardEnabled as true when flag is set', async () => {
|
||||
setMinimalValidEnv({ FEATURE_NEW_DASHBOARD: 'true' });
|
||||
const { isNewDashboardEnabled } = await import('./featureFlags.server');
|
||||
|
||||
expect(isNewDashboardEnabled).toBe(true);
|
||||
});
|
||||
|
||||
it('should export isBetaRecipesEnabled as false when flag is not set', async () => {
|
||||
setMinimalValidEnv();
|
||||
const { isBetaRecipesEnabled } = await import('./featureFlags.server');
|
||||
|
||||
expect(isBetaRecipesEnabled).toBe(false);
|
||||
});
|
||||
|
||||
it('should export isBetaRecipesEnabled as true when flag is set', async () => {
|
||||
setMinimalValidEnv({ FEATURE_BETA_RECIPES: 'true' });
|
||||
const { isBetaRecipesEnabled } = await import('./featureFlags.server');
|
||||
|
||||
expect(isBetaRecipesEnabled).toBe(true);
|
||||
});
|
||||
|
||||
it('should export isExperimentalAiEnabled as false when flag is not set', async () => {
|
||||
setMinimalValidEnv();
|
||||
const { isExperimentalAiEnabled } = await import('./featureFlags.server');
|
||||
|
||||
expect(isExperimentalAiEnabled).toBe(false);
|
||||
});
|
||||
|
||||
it('should export isExperimentalAiEnabled as true when flag is set', async () => {
|
||||
setMinimalValidEnv({ FEATURE_EXPERIMENTAL_AI: 'true' });
|
||||
const { isExperimentalAiEnabled } = await import('./featureFlags.server');
|
||||
|
||||
expect(isExperimentalAiEnabled).toBe(true);
|
||||
});
|
||||
|
||||
it('should export isDebugModeEnabled as false when flag is not set', async () => {
|
||||
setMinimalValidEnv();
|
||||
const { isDebugModeEnabled } = await import('./featureFlags.server');
|
||||
|
||||
expect(isDebugModeEnabled).toBe(false);
|
||||
});
|
||||
|
||||
it('should export isDebugModeEnabled as true when flag is set', async () => {
|
||||
setMinimalValidEnv({ FEATURE_DEBUG_MODE: 'true' });
|
||||
const { isDebugModeEnabled } = await import('./featureFlags.server');
|
||||
|
||||
expect(isDebugModeEnabled).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('FeatureFlagName type', () => {
|
||||
it('should re-export the FeatureFlagName type from env.ts', async () => {
|
||||
setMinimalValidEnv();
|
||||
const featureFlagsModule = await import('./featureFlags.server');
|
||||
|
||||
// TypeScript will enforce that FeatureFlagName is properly exported
|
||||
// This test verifies the export exists at runtime
|
||||
expect(featureFlagsModule).toHaveProperty('isFeatureEnabled');
|
||||
|
||||
// The type export is verified by TypeScript compilation
|
||||
// This runtime test ensures the module loads correctly
|
||||
});
|
||||
});
|
||||
|
||||
describe('development mode logging', () => {
|
||||
it('should log feature flag checks in development mode', async () => {
|
||||
setMinimalValidEnv();
|
||||
// Override NODE_ENV to development for this test
|
||||
process.env.NODE_ENV = 'development';
|
||||
|
||||
// Mock the logger
|
||||
const mockLogger = {
|
||||
debug: vi.fn(),
|
||||
info: vi.fn(),
|
||||
warn: vi.fn(),
|
||||
error: vi.fn(),
|
||||
child: vi.fn().mockReturnThis(),
|
||||
};
|
||||
|
||||
vi.doMock('./logger.server', () => ({
|
||||
logger: mockLogger,
|
||||
}));
|
||||
|
||||
const { isFeatureEnabled } = await import('./featureFlags.server');
|
||||
|
||||
isFeatureEnabled('newDashboard');
|
||||
|
||||
// In development mode, the logger.debug should be called
|
||||
expect(mockLogger.debug).toHaveBeenCalledWith(
|
||||
{ flag: 'newDashboard', enabled: false },
|
||||
'Feature flag checked',
|
||||
);
|
||||
});
|
||||
|
||||
it('should not log in test mode', async () => {
|
||||
setMinimalValidEnv();
|
||||
|
||||
// Mock the logger
|
||||
const mockLogger = {
|
||||
debug: vi.fn(),
|
||||
info: vi.fn(),
|
||||
warn: vi.fn(),
|
||||
error: vi.fn(),
|
||||
child: vi.fn().mockReturnThis(),
|
||||
};
|
||||
|
||||
vi.doMock('./logger.server', () => ({
|
||||
logger: mockLogger,
|
||||
}));
|
||||
|
||||
const { isFeatureEnabled } = await import('./featureFlags.server');
|
||||
|
||||
isFeatureEnabled('newDashboard');
|
||||
|
||||
// In test mode (NODE_ENV=test), the logger.debug should not be called
|
||||
expect(mockLogger.debug).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('isFeatureFlagEnabled in env.ts', () => {
|
||||
beforeEach(() => {
|
||||
vi.resetModules();
|
||||
process.env = { ...originalEnv };
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
process.env = originalEnv;
|
||||
});
|
||||
|
||||
const setMinimalValidEnv = (overrides: Record<string, string> = {}) => {
|
||||
process.env = {
|
||||
...process.env,
|
||||
NODE_ENV: 'test',
|
||||
DB_HOST: 'localhost',
|
||||
DB_USER: 'test',
|
||||
DB_PASSWORD: 'test',
|
||||
DB_NAME: 'test',
|
||||
REDIS_URL: 'redis://localhost:6379',
|
||||
JWT_SECRET: 'test-secret-must-be-at-least-32-characters-long',
|
||||
...overrides,
|
||||
};
|
||||
};
|
||||
|
||||
it('should return correct value from isFeatureFlagEnabled in env.ts', async () => {
|
||||
setMinimalValidEnv({ FEATURE_NEW_DASHBOARD: 'true' });
|
||||
const { isFeatureFlagEnabled } = await import('../config/env');
|
||||
|
||||
expect(isFeatureFlagEnabled('newDashboard')).toBe(true);
|
||||
expect(isFeatureFlagEnabled('betaRecipes')).toBe(false);
|
||||
});
|
||||
|
||||
it('should default to false for undefined flags', async () => {
|
||||
setMinimalValidEnv();
|
||||
const { isFeatureFlagEnabled } = await import('../config/env');
|
||||
|
||||
expect(isFeatureFlagEnabled('bugsinkSync')).toBe(false);
|
||||
expect(isFeatureFlagEnabled('advancedRbac')).toBe(false);
|
||||
expect(isFeatureFlagEnabled('newDashboard')).toBe(false);
|
||||
expect(isFeatureFlagEnabled('betaRecipes')).toBe(false);
|
||||
expect(isFeatureFlagEnabled('experimentalAi')).toBe(false);
|
||||
expect(isFeatureFlagEnabled('debugMode')).toBe(false);
|
||||
});
|
||||
});
|
||||
169
src/services/featureFlags.server.ts
Normal file
169
src/services/featureFlags.server.ts
Normal file
@@ -0,0 +1,169 @@
|
||||
// src/services/featureFlags.server.ts
|
||||
/**
|
||||
* Feature Flags Service (ADR-024)
|
||||
*
|
||||
* This module provides a centralized service for accessing feature flags
|
||||
* on the backend. It integrates with the Zod-validated configuration in
|
||||
* `src/config/env.ts` and provides type-safe access patterns.
|
||||
*
|
||||
* All feature flags default to `false` (disabled) following an opt-in model.
|
||||
* Set the corresponding `FEATURE_*` environment variable to 'true' to enable.
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* import { isFeatureEnabled, getFeatureFlags } from './services/featureFlags.server';
|
||||
*
|
||||
* // Check a specific flag
|
||||
* if (isFeatureEnabled('newDashboard')) {
|
||||
* // Use new dashboard logic
|
||||
* }
|
||||
*
|
||||
* // Get all flags (for admin endpoints)
|
||||
* const allFlags = getFeatureFlags();
|
||||
* ```
|
||||
*
|
||||
* @see docs/adr/0024-feature-flagging-strategy.md for architecture details
|
||||
*/
|
||||
|
||||
import { config, isDevelopment, FeatureFlagName } from '../config/env';
|
||||
import { logger } from './logger.server';
|
||||
|
||||
// Re-export FeatureFlagName for convenience
|
||||
export type { FeatureFlagName };
|
||||
|
||||
/**
|
||||
* Check if a feature flag is enabled.
|
||||
*
|
||||
* In development mode, this function logs the flag check for debugging purposes.
|
||||
* In production/test, logging is omitted to avoid performance overhead.
|
||||
*
|
||||
* @param flagName - The name of the feature flag to check (type-safe)
|
||||
* @returns boolean indicating if the feature is enabled
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* import { isFeatureEnabled } from '../services/featureFlags.server';
|
||||
*
|
||||
* // In a route handler
|
||||
* router.get('/dashboard', async (req, res) => {
|
||||
* if (isFeatureEnabled('newDashboard')) {
|
||||
* return sendSuccess(res, { version: 'v2', data: await getNewDashboardData() });
|
||||
* }
|
||||
* return sendSuccess(res, { version: 'v1', data: await getLegacyDashboardData() });
|
||||
* });
|
||||
*
|
||||
* // In a service
|
||||
* function processFlyer(flyer: Flyer): ProcessedFlyer {
|
||||
* if (isFeatureEnabled('experimentalAi')) {
|
||||
* return processWithExperimentalAi(flyer);
|
||||
* }
|
||||
* return processWithStandardAi(flyer);
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
export function isFeatureEnabled(flagName: FeatureFlagName): boolean {
|
||||
const enabled = config.featureFlags[flagName];
|
||||
|
||||
if (isDevelopment) {
|
||||
logger.debug({ flag: flagName, enabled }, 'Feature flag checked');
|
||||
}
|
||||
|
||||
return enabled;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all feature flags and their current states.
|
||||
*
|
||||
* This function returns a shallow copy of all feature flags,
|
||||
* useful for admin/debug endpoints and monitoring dashboards.
|
||||
*
|
||||
* @returns Record of all feature flag names to their boolean states
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* import { getFeatureFlags } from '../services/featureFlags.server';
|
||||
*
|
||||
* // In an admin route handler
|
||||
* router.get('/admin/feature-flags', requireAdmin, async (req, res) => {
|
||||
* const flags = getFeatureFlags();
|
||||
* sendSuccess(res, { flags });
|
||||
* });
|
||||
*
|
||||
* // Result:
|
||||
* // {
|
||||
* // "bugsinkSync": false,
|
||||
* // "advancedRbac": false,
|
||||
* // "newDashboard": true,
|
||||
* // "betaRecipes": false,
|
||||
* // "experimentalAi": false,
|
||||
* // "debugMode": false
|
||||
* // }
|
||||
* ```
|
||||
*/
|
||||
export function getFeatureFlags(): Record<FeatureFlagName, boolean> {
|
||||
// Return a shallow copy to prevent external mutation
|
||||
return { ...config.featureFlags };
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a list of all enabled feature flags.
|
||||
*
|
||||
* Useful for logging and diagnostics to quickly see which features are active.
|
||||
*
|
||||
* @returns Array of feature flag names that are currently enabled
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* import { getEnabledFeatureFlags } from '../services/featureFlags.server';
|
||||
*
|
||||
* // Log enabled features at startup
|
||||
* const enabled = getEnabledFeatureFlags();
|
||||
* if (enabled.length > 0) {
|
||||
* logger.info({ enabledFlags: enabled }, 'Active feature flags');
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
export function getEnabledFeatureFlags(): FeatureFlagName[] {
|
||||
const flags = config.featureFlags;
|
||||
return (Object.keys(flags) as FeatureFlagName[]).filter((key) => flags[key]);
|
||||
}
|
||||
|
||||
// --- Convenience Exports ---
|
||||
// These are evaluated once at module load time (startup).
|
||||
// Use these for simple boolean checks when you don't need dynamic behavior.
|
||||
|
||||
/**
|
||||
* True if Bugsink error sync integration is enabled.
|
||||
* @see FEATURE_BUGSINK_SYNC environment variable
|
||||
*/
|
||||
export const isBugsinkSyncEnabled = config.featureFlags.bugsinkSync;
|
||||
|
||||
/**
|
||||
* True if advanced RBAC features are enabled.
|
||||
* @see FEATURE_ADVANCED_RBAC environment variable
|
||||
*/
|
||||
export const isAdvancedRbacEnabled = config.featureFlags.advancedRbac;
|
||||
|
||||
/**
|
||||
* True if new dashboard experience is enabled.
|
||||
* @see FEATURE_NEW_DASHBOARD environment variable
|
||||
*/
|
||||
export const isNewDashboardEnabled = config.featureFlags.newDashboard;
|
||||
|
||||
/**
|
||||
* True if beta recipe features are enabled.
|
||||
* @see FEATURE_BETA_RECIPES environment variable
|
||||
*/
|
||||
export const isBetaRecipesEnabled = config.featureFlags.betaRecipes;
|
||||
|
||||
/**
|
||||
* True if experimental AI features are enabled.
|
||||
* @see FEATURE_EXPERIMENTAL_AI environment variable
|
||||
*/
|
||||
export const isExperimentalAiEnabled = config.featureFlags.experimentalAi;
|
||||
|
||||
/**
|
||||
* True if debug mode is enabled.
|
||||
* @see FEATURE_DEBUG_MODE environment variable
|
||||
*/
|
||||
export const isDebugModeEnabled = config.featureFlags.debugMode;
|
||||
@@ -331,7 +331,9 @@ describe('FlyerAiProcessor', () => {
|
||||
|
||||
expect(result.needsReview).toBe(true);
|
||||
expect(logger.warn).toHaveBeenCalledWith(
|
||||
expect.objectContaining({ qualityIssues: ['Missing both valid_from and valid_to dates'] }),
|
||||
expect.objectContaining({
|
||||
qualityIssues: ['Missing validity dates (valid_from or valid_to)'],
|
||||
}),
|
||||
expect.stringContaining('AI response has quality issues.'),
|
||||
);
|
||||
});
|
||||
@@ -358,10 +360,10 @@ describe('FlyerAiProcessor', () => {
|
||||
qualityIssues: [
|
||||
'Missing store name',
|
||||
'No items were extracted',
|
||||
'Missing both valid_from and valid_to dates',
|
||||
'Missing validity dates (valid_from or valid_to)',
|
||||
],
|
||||
},
|
||||
'AI response has quality issues. Flagging for review. Issues: Missing store name, No items were extracted, Missing both valid_from and valid_to dates',
|
||||
'AI response has quality issues. Flagging for review. Issues: Missing store name, No items were extracted, Missing validity dates (valid_from or valid_to)',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -99,8 +99,8 @@ export class FlyerAiProcessor {
|
||||
}
|
||||
|
||||
// 4. Check for flyer validity dates.
|
||||
if (!valid_from && !valid_to) {
|
||||
qualityIssues.push('Missing both valid_from and valid_to dates');
|
||||
if (!valid_from || !valid_to) {
|
||||
qualityIssues.push('Missing validity dates (valid_from or valid_to)');
|
||||
}
|
||||
|
||||
const needsReview = qualityIssues.length > 0;
|
||||
|
||||
@@ -1,4 +1,18 @@
|
||||
// src/services/receiptService.server.test.ts
|
||||
/**
|
||||
* @file Comprehensive unit tests for the Receipt Service
|
||||
* Tests receipt processing logic, OCR extraction, text parsing, and error handling.
|
||||
*
|
||||
* Coverage includes:
|
||||
* - Receipt CRUD operations (create, read, delete)
|
||||
* - Receipt processing workflow (OCR extraction, store detection, item parsing)
|
||||
* - Receipt item management
|
||||
* - Processing logs and statistics
|
||||
* - Store pattern management
|
||||
* - Job processing via BullMQ
|
||||
* - Edge cases and error handling
|
||||
* - Internal parsing logic patterns
|
||||
*/
|
||||
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
|
||||
import type { Logger } from 'pino';
|
||||
import type { Job } from 'bullmq';
|
||||
@@ -11,6 +25,8 @@ import type {
|
||||
ReceiptProcessingStatus,
|
||||
OcrProvider,
|
||||
ReceiptProcessingLogRecord,
|
||||
ReceiptScan,
|
||||
ReceiptItem,
|
||||
} from '../types/expiry';
|
||||
|
||||
// Mock dependencies
|
||||
@@ -1035,4 +1051,391 @@ describe('receiptService.server', () => {
|
||||
expect(textLines).toEqual(['MILK 2% - $4.99', 'BREAD - $2.99']);
|
||||
});
|
||||
});
|
||||
|
||||
// ==========================================================================
|
||||
// ADDITIONAL EDGE CASES AND ERROR HANDLING TESTS
|
||||
// ==========================================================================
|
||||
|
||||
describe('Receipt Processing Edge Cases', () => {
|
||||
it('should handle empty receipt items array from AI', async () => {
|
||||
const mockReceipt = createMockReceiptScan({ receipt_id: 10 });
|
||||
|
||||
vi.mocked(receiptRepo.updateReceipt).mockResolvedValue({
|
||||
...mockReceipt,
|
||||
status: 'completed' as ReceiptStatus,
|
||||
} as any);
|
||||
vi.mocked(receiptRepo.logProcessingStep).mockResolvedValue(createMockProcessingLogRecord());
|
||||
vi.mocked(receiptRepo.detectStoreFromText).mockResolvedValueOnce(null);
|
||||
vi.mocked(receiptRepo.addReceiptItems).mockResolvedValueOnce([]);
|
||||
|
||||
const result = await processReceipt(10, mockLogger);
|
||||
|
||||
expect(result.items).toHaveLength(0);
|
||||
expect(result.receipt.status).toBe('completed');
|
||||
});
|
||||
|
||||
it('should handle receipts with discount items (negative prices)', async () => {
|
||||
const mockReceipt = createMockReceiptScan({ receipt_id: 11 });
|
||||
|
||||
vi.mocked(receiptRepo.updateReceipt).mockResolvedValue({
|
||||
...mockReceipt,
|
||||
status: 'completed' as ReceiptStatus,
|
||||
} as any);
|
||||
vi.mocked(receiptRepo.logProcessingStep).mockResolvedValue(createMockProcessingLogRecord());
|
||||
vi.mocked(receiptRepo.detectStoreFromText).mockResolvedValueOnce(null);
|
||||
|
||||
// Mock items including a discount
|
||||
const mockItems: ReceiptItem[] = [
|
||||
createMockReceiptItem({ receipt_item_id: 1, price_paid_cents: 500 }),
|
||||
createMockReceiptItem({
|
||||
receipt_item_id: 2,
|
||||
raw_item_description: 'COUPON DISCOUNT',
|
||||
price_paid_cents: -100,
|
||||
is_discount: true,
|
||||
}),
|
||||
];
|
||||
vi.mocked(receiptRepo.addReceiptItems).mockResolvedValueOnce(mockItems);
|
||||
|
||||
const result = await processReceipt(11, mockLogger);
|
||||
|
||||
expect(result.items).toHaveLength(2);
|
||||
expect(result.items.find((i) => i.is_discount)).toBeTruthy();
|
||||
});
|
||||
|
||||
it('should handle receipts with maximum retry count', async () => {
|
||||
const _mockReceipt = createMockReceiptScan({
|
||||
receipt_id: 12,
|
||||
retry_count: 2, // Already at 2 retries (one more allowed before MAX_RETRY_ATTEMPTS=3)
|
||||
});
|
||||
|
||||
vi.mocked(receiptRepo.updateReceipt).mockRejectedValue(new Error('Persistent failure'));
|
||||
vi.mocked(receiptRepo.incrementRetryCount).mockResolvedValueOnce(3); // Now at max
|
||||
vi.mocked(receiptRepo.logProcessingStep).mockResolvedValue(createMockProcessingLogRecord());
|
||||
|
||||
await expect(processReceipt(12, mockLogger)).rejects.toThrow('Persistent failure');
|
||||
|
||||
expect(receiptRepo.incrementRetryCount).toHaveBeenCalledWith(12, expect.any(Object));
|
||||
});
|
||||
});
|
||||
|
||||
describe('Receipt Item Validation', () => {
|
||||
it('should handle items with zero quantity', () => {
|
||||
// Test the quantity extraction logic
|
||||
const item = {
|
||||
raw_item_description: 'FREE SAMPLE',
|
||||
quantity: 0,
|
||||
price_paid_cents: 0,
|
||||
};
|
||||
|
||||
expect(item.quantity).toBe(0);
|
||||
expect(item.price_paid_cents).toBe(0);
|
||||
});
|
||||
|
||||
it('should handle items with very long descriptions', () => {
|
||||
const longDescription =
|
||||
'ORGANIC FREE-RANGE CHICKEN BREAST BONELESS SKINLESS FAMILY PACK 3.5LB AVG';
|
||||
|
||||
expect(longDescription.length).toBeGreaterThan(50);
|
||||
// The service should handle long descriptions without truncation at this level
|
||||
expect(longDescription).toContain('CHICKEN');
|
||||
});
|
||||
|
||||
it('should handle items with special characters in description', () => {
|
||||
const specialChars = ['BREAD & BUTTER', "ANNIE'S MAC", 'ITEM #1234', 'PRICE: $5.99'];
|
||||
|
||||
specialChars.forEach((desc) => {
|
||||
// These should all be valid descriptions
|
||||
expect(desc.length).toBeGreaterThan(0);
|
||||
});
|
||||
});
|
||||
|
||||
it('should calculate discount status correctly for negative prices', () => {
|
||||
const isDiscount = (priceCents: number) => priceCents < 0;
|
||||
|
||||
expect(isDiscount(-100)).toBe(true);
|
||||
expect(isDiscount(-1)).toBe(true);
|
||||
expect(isDiscount(0)).toBe(false);
|
||||
expect(isDiscount(100)).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Store Detection Logic', () => {
|
||||
it('should skip store detection when store_location_id is already set', async () => {
|
||||
const mockReceipt = createMockReceiptScan({
|
||||
receipt_id: 13,
|
||||
store_location_id: 5, // Already has a store
|
||||
});
|
||||
|
||||
vi.mocked(receiptRepo.updateReceipt).mockResolvedValue({
|
||||
...mockReceipt,
|
||||
status: 'completed' as ReceiptStatus,
|
||||
} as any);
|
||||
vi.mocked(receiptRepo.logProcessingStep).mockResolvedValue(createMockProcessingLogRecord());
|
||||
vi.mocked(receiptRepo.addReceiptItems).mockResolvedValueOnce([]);
|
||||
|
||||
await processReceipt(13, mockLogger);
|
||||
|
||||
// Store detection should not be attempted
|
||||
expect(receiptRepo.detectStoreFromText).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should handle store detection returning null', async () => {
|
||||
const mockReceipt = createMockReceiptScan({
|
||||
receipt_id: 14,
|
||||
store_location_id: null,
|
||||
});
|
||||
|
||||
vi.mocked(receiptRepo.updateReceipt).mockResolvedValue({
|
||||
...mockReceipt,
|
||||
status: 'completed' as ReceiptStatus,
|
||||
} as any);
|
||||
vi.mocked(receiptRepo.logProcessingStep).mockResolvedValue(createMockProcessingLogRecord());
|
||||
vi.mocked(receiptRepo.detectStoreFromText).mockResolvedValueOnce(null);
|
||||
vi.mocked(receiptRepo.addReceiptItems).mockResolvedValueOnce([]);
|
||||
|
||||
const result = await processReceipt(14, mockLogger);
|
||||
|
||||
expect(result.receipt.status).toBe('completed');
|
||||
// Should log that no store was found
|
||||
expect(receiptRepo.logProcessingStep).toHaveBeenCalledWith(
|
||||
14,
|
||||
'store_detection',
|
||||
'completed',
|
||||
expect.any(Object),
|
||||
expect.objectContaining({
|
||||
outputData: expect.objectContaining({ storeId: null }),
|
||||
}),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Processing Log Consistency', () => {
|
||||
it('should log all processing steps in correct order', async () => {
|
||||
const mockReceipt = createMockReceiptScan({ receipt_id: 15 });
|
||||
|
||||
vi.mocked(receiptRepo.updateReceipt).mockResolvedValue({
|
||||
...mockReceipt,
|
||||
status: 'completed' as ReceiptStatus,
|
||||
} as any);
|
||||
vi.mocked(receiptRepo.logProcessingStep).mockResolvedValue(createMockProcessingLogRecord());
|
||||
vi.mocked(receiptRepo.detectStoreFromText).mockResolvedValueOnce(null);
|
||||
vi.mocked(receiptRepo.addReceiptItems).mockResolvedValueOnce([]);
|
||||
|
||||
await processReceipt(15, mockLogger);
|
||||
|
||||
// Verify processing steps are logged
|
||||
const logCalls = vi.mocked(receiptRepo.logProcessingStep).mock.calls;
|
||||
const loggedSteps = logCalls.map((call) => call[1]);
|
||||
|
||||
expect(loggedSteps).toContain('ocr_extraction');
|
||||
expect(loggedSteps).toContain('text_parsing');
|
||||
expect(loggedSteps).toContain('item_extraction');
|
||||
expect(loggedSteps).toContain('finalization');
|
||||
});
|
||||
|
||||
it('should log duration for finalization step', async () => {
|
||||
const mockReceipt = createMockReceiptScan({ receipt_id: 16 });
|
||||
|
||||
vi.mocked(receiptRepo.updateReceipt).mockResolvedValue({
|
||||
...mockReceipt,
|
||||
status: 'completed' as ReceiptStatus,
|
||||
} as any);
|
||||
vi.mocked(receiptRepo.logProcessingStep).mockResolvedValue(createMockProcessingLogRecord());
|
||||
vi.mocked(receiptRepo.detectStoreFromText).mockResolvedValueOnce(null);
|
||||
vi.mocked(receiptRepo.addReceiptItems).mockResolvedValueOnce([]);
|
||||
|
||||
await processReceipt(16, mockLogger);
|
||||
|
||||
// Find the finalization log call
|
||||
const logCalls = vi.mocked(receiptRepo.logProcessingStep).mock.calls;
|
||||
const finalizationCall = logCalls.find((call) => call[1] === 'finalization');
|
||||
|
||||
expect(finalizationCall).toBeDefined();
|
||||
expect(finalizationCall?.[4]).toHaveProperty('durationMs');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Job Processing Context Propagation', () => {
|
||||
it('should propagate request ID from job metadata', async () => {
|
||||
const mockReceipt = createMockReceiptScan();
|
||||
|
||||
vi.mocked(receiptRepo.getReceiptById).mockResolvedValueOnce(mockReceipt);
|
||||
vi.mocked(receiptRepo.updateReceipt).mockResolvedValue({
|
||||
...mockReceipt,
|
||||
status: 'completed' as ReceiptStatus,
|
||||
} as any);
|
||||
vi.mocked(receiptRepo.logProcessingStep).mockResolvedValue(createMockProcessingLogRecord());
|
||||
vi.mocked(receiptRepo.detectStoreFromText).mockResolvedValueOnce(null);
|
||||
vi.mocked(receiptRepo.addReceiptItems).mockResolvedValueOnce([]);
|
||||
|
||||
const mockJob = {
|
||||
id: 'job-ctx-1',
|
||||
data: {
|
||||
receiptId: 1,
|
||||
userId: 'user-1',
|
||||
meta: { requestId: 'req-context-test', userId: 'user-1', origin: 'api' },
|
||||
},
|
||||
attemptsMade: 0,
|
||||
} as Job<ReceiptJobData>;
|
||||
|
||||
await processReceiptJob(mockJob, mockLogger);
|
||||
|
||||
// Verify logger.child was called with context
|
||||
expect(mockLogger.child).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
requestId: 'req-context-test',
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle missing job metadata gracefully', async () => {
|
||||
const mockReceipt = createMockReceiptScan();
|
||||
|
||||
vi.mocked(receiptRepo.getReceiptById).mockResolvedValueOnce(mockReceipt);
|
||||
vi.mocked(receiptRepo.updateReceipt).mockResolvedValue({
|
||||
...mockReceipt,
|
||||
status: 'completed' as ReceiptStatus,
|
||||
} as any);
|
||||
vi.mocked(receiptRepo.logProcessingStep).mockResolvedValue(createMockProcessingLogRecord());
|
||||
vi.mocked(receiptRepo.detectStoreFromText).mockResolvedValueOnce(null);
|
||||
vi.mocked(receiptRepo.addReceiptItems).mockResolvedValueOnce([]);
|
||||
|
||||
const mockJob = {
|
||||
id: 'job-no-meta',
|
||||
data: {
|
||||
receiptId: 1,
|
||||
userId: 'user-1',
|
||||
// No meta property
|
||||
},
|
||||
attemptsMade: 0,
|
||||
} as Job<ReceiptJobData>;
|
||||
|
||||
// Should not throw
|
||||
const result = await processReceiptJob(mockJob, mockLogger);
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Currency and Monetary Value Handling', () => {
|
||||
it('should correctly convert cents to dollars for display', () => {
|
||||
const formatCents = (cents: number) => `$${(cents / 100).toFixed(2)}`;
|
||||
|
||||
expect(formatCents(499)).toBe('$4.99');
|
||||
expect(formatCents(100)).toBe('$1.00');
|
||||
expect(formatCents(1)).toBe('$0.01');
|
||||
expect(formatCents(0)).toBe('$0.00');
|
||||
expect(formatCents(12345)).toBe('$123.45');
|
||||
});
|
||||
|
||||
it('should handle negative values for discounts', () => {
|
||||
const formatCents = (cents: number) => {
|
||||
const absValue = Math.abs(cents);
|
||||
const prefix = cents < 0 ? '-' : '';
|
||||
return `${prefix}$${(absValue / 100).toFixed(2)}`;
|
||||
};
|
||||
|
||||
expect(formatCents(-200)).toBe('-$2.00');
|
||||
expect(formatCents(-1)).toBe('-$0.01');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Date Parsing Edge Cases', () => {
|
||||
it('should handle various date formats', () => {
|
||||
const parseDate = (dateStr: string): Date | null => {
|
||||
// MM/DD/YYYY pattern
|
||||
const mdyPattern = /(\d{1,2})\/(\d{1,2})\/(\d{2,4})/;
|
||||
// YYYY-MM-DD pattern
|
||||
const isoPattern = /(\d{4})-(\d{2})-(\d{2})/;
|
||||
|
||||
let match = dateStr.match(isoPattern);
|
||||
if (match) {
|
||||
return new Date(parseInt(match[1]), parseInt(match[2]) - 1, parseInt(match[3]));
|
||||
}
|
||||
|
||||
match = dateStr.match(mdyPattern);
|
||||
if (match) {
|
||||
let year = parseInt(match[3]);
|
||||
if (year < 100) year += 2000;
|
||||
return new Date(year, parseInt(match[1]) - 1, parseInt(match[2]));
|
||||
}
|
||||
|
||||
return null;
|
||||
};
|
||||
|
||||
// ISO format
|
||||
const isoDate = parseDate('2024-01-15');
|
||||
expect(isoDate?.getFullYear()).toBe(2024);
|
||||
expect(isoDate?.getMonth()).toBe(0); // January
|
||||
expect(isoDate?.getDate()).toBe(15);
|
||||
|
||||
// US format with 4-digit year
|
||||
const usDate = parseDate('01/15/2024');
|
||||
expect(usDate?.getFullYear()).toBe(2024);
|
||||
|
||||
// US format with 2-digit year
|
||||
const shortYear = parseDate('1/5/24');
|
||||
expect(shortYear?.getFullYear()).toBe(2024);
|
||||
|
||||
// Invalid format
|
||||
expect(parseDate('invalid')).toBeNull();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
// ==========================================================================
|
||||
// HELPER FUNCTIONS FOR ADDITIONAL TESTS
|
||||
// ==========================================================================
|
||||
|
||||
/**
|
||||
* Creates a mock receipt scan for testing.
|
||||
*/
|
||||
function createMockReceiptScan(overrides: Partial<ReceiptScan> = {}): ReceiptScan {
|
||||
return {
|
||||
receipt_id: 1,
|
||||
user_id: 'user-1',
|
||||
store_location_id: null,
|
||||
receipt_image_url: '/uploads/receipt.jpg',
|
||||
transaction_date: null,
|
||||
total_amount_cents: null,
|
||||
status: 'pending' as ReceiptStatus,
|
||||
raw_text: null,
|
||||
store_confidence: null,
|
||||
ocr_provider: null,
|
||||
error_details: null,
|
||||
retry_count: 0,
|
||||
ocr_confidence: null,
|
||||
currency: 'CAD',
|
||||
created_at: new Date().toISOString(),
|
||||
processed_at: null,
|
||||
updated_at: new Date().toISOString(),
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a mock receipt item for testing.
|
||||
*/
|
||||
function createMockReceiptItem(overrides: Partial<ReceiptItem> = {}): ReceiptItem {
|
||||
return {
|
||||
receipt_item_id: 1,
|
||||
receipt_id: 1,
|
||||
raw_item_description: 'TEST ITEM',
|
||||
quantity: 1,
|
||||
price_paid_cents: 299,
|
||||
master_item_id: null,
|
||||
product_id: null,
|
||||
status: 'unmatched' as ReceiptItemStatus,
|
||||
line_number: 1,
|
||||
match_confidence: null,
|
||||
is_discount: false,
|
||||
unit_price_cents: null,
|
||||
unit_type: null,
|
||||
added_to_pantry: false,
|
||||
pantry_item_id: null,
|
||||
upc_code: null,
|
||||
created_at: new Date().toISOString(),
|
||||
updated_at: new Date().toISOString(),
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1,8 +1,28 @@
|
||||
// src/services/sentry.client.test.ts
|
||||
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
|
||||
/**
|
||||
* Comprehensive unit tests for the Sentry client initialization and configuration.
|
||||
*
|
||||
* IMPORTANT: This test file needs to unmock the global sentry.client and config mocks
|
||||
* from tests-setup-unit.ts to test the actual implementation.
|
||||
*
|
||||
* Tests cover:
|
||||
* - Sentry.init() configuration with correct parameters
|
||||
* - Environment-based configuration (development vs production)
|
||||
* - DSN configuration handling
|
||||
* - Integration setup (breadcrumbs)
|
||||
* - Error handling when Sentry is not configured
|
||||
* - All exported functions (captureException, captureMessage, setUser, addBreadcrumb)
|
||||
*/
|
||||
import { describe, it, expect, vi, beforeEach, afterEach, type Mock } from 'vitest';
|
||||
|
||||
// Remove global mocks to test actual implementation
|
||||
vi.unmock('../../services/sentry.client');
|
||||
vi.unmock('../../config');
|
||||
vi.unmock('./sentry.client');
|
||||
vi.unmock('../config');
|
||||
|
||||
// Use vi.hoisted to define mocks that need to be available before vi.mock runs
|
||||
const { mockSentry, mockLogger } = vi.hoisted(() => ({
|
||||
const { mockSentry, mockLogger, mockConfig } = vi.hoisted(() => ({
|
||||
mockSentry: {
|
||||
init: vi.fn(),
|
||||
captureException: vi.fn(() => 'mock-event-id'),
|
||||
@@ -10,7 +30,7 @@ const { mockSentry, mockLogger } = vi.hoisted(() => ({
|
||||
setContext: vi.fn(),
|
||||
setUser: vi.fn(),
|
||||
addBreadcrumb: vi.fn(),
|
||||
breadcrumbsIntegration: vi.fn(() => ({})),
|
||||
breadcrumbsIntegration: vi.fn(() => ({ name: 'Breadcrumbs' })),
|
||||
ErrorBoundary: vi.fn(),
|
||||
},
|
||||
mockLogger: {
|
||||
@@ -19,6 +39,28 @@ const { mockSentry, mockLogger } = vi.hoisted(() => ({
|
||||
warn: vi.fn(),
|
||||
error: vi.fn(),
|
||||
},
|
||||
mockConfig: {
|
||||
app: {
|
||||
version: '1.0.0-test',
|
||||
commitMessage: 'test commit',
|
||||
commitUrl: 'https://example.com',
|
||||
},
|
||||
google: {
|
||||
mapsEmbedApiKey: '',
|
||||
},
|
||||
sentry: {
|
||||
dsn: '',
|
||||
environment: 'test',
|
||||
debug: false,
|
||||
enabled: false,
|
||||
},
|
||||
featureFlags: {
|
||||
newDashboard: false,
|
||||
betaRecipes: false,
|
||||
experimentalAi: false,
|
||||
debugMode: false,
|
||||
},
|
||||
},
|
||||
}));
|
||||
|
||||
vi.mock('@sentry/react', () => mockSentry);
|
||||
@@ -28,40 +70,69 @@ vi.mock('./logger.client', () => ({
|
||||
default: mockLogger,
|
||||
}));
|
||||
|
||||
// Mock the config module with our mutable config object
|
||||
vi.mock('../config', () => ({
|
||||
default: mockConfig,
|
||||
}));
|
||||
|
||||
describe('sentry.client', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
vi.resetModules();
|
||||
// Reset config to default disabled state
|
||||
mockConfig.sentry = {
|
||||
dsn: '',
|
||||
environment: 'test',
|
||||
debug: false,
|
||||
enabled: false,
|
||||
};
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.unstubAllEnvs();
|
||||
});
|
||||
|
||||
describe('with Sentry disabled (default test environment)', () => {
|
||||
// The test environment has Sentry disabled by default (VITE_SENTRY_DSN not set)
|
||||
// Import the module fresh for each test
|
||||
|
||||
beforeEach(() => {
|
||||
vi.resetModules();
|
||||
});
|
||||
|
||||
it('should have isSentryConfigured as false in test environment', async () => {
|
||||
// ============================================================================
|
||||
// Section 1: Tests with Sentry DISABLED (default test environment)
|
||||
// ============================================================================
|
||||
describe('with Sentry disabled (no DSN configured)', () => {
|
||||
it('should have isSentryConfigured as false when DSN is empty', async () => {
|
||||
const { isSentryConfigured } = await import('./sentry.client');
|
||||
expect(isSentryConfigured).toBe(false);
|
||||
});
|
||||
|
||||
it('should not initialize Sentry when not configured', async () => {
|
||||
const { initSentry, isSentryConfigured } = await import('./sentry.client');
|
||||
it('should have isSentryConfigured as false when enabled is false', async () => {
|
||||
mockConfig.sentry = {
|
||||
dsn: 'https://test@sentry.io/123',
|
||||
environment: 'test',
|
||||
debug: false,
|
||||
enabled: false,
|
||||
};
|
||||
vi.resetModules();
|
||||
|
||||
const { isSentryConfigured } = await import('./sentry.client');
|
||||
expect(isSentryConfigured).toBe(false);
|
||||
});
|
||||
|
||||
it('should not call Sentry.init when not configured', async () => {
|
||||
const { initSentry } = await import('./sentry.client');
|
||||
|
||||
initSentry();
|
||||
|
||||
// When Sentry is not configured, Sentry.init should NOT be called
|
||||
if (!isSentryConfigured) {
|
||||
expect(mockSentry.init).not.toHaveBeenCalled();
|
||||
}
|
||||
expect(mockSentry.init).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should return undefined from captureException when not configured', async () => {
|
||||
it('should log info message about Sentry being disabled', async () => {
|
||||
const { initSentry } = await import('./sentry.client');
|
||||
|
||||
initSentry();
|
||||
|
||||
expect(mockLogger.info).toHaveBeenCalledWith(
|
||||
'[Sentry] Error tracking disabled (VITE_SENTRY_DSN not configured)',
|
||||
);
|
||||
});
|
||||
|
||||
it('should return undefined from captureException', async () => {
|
||||
const { captureException } = await import('./sentry.client');
|
||||
|
||||
const result = captureException(new Error('test error'));
|
||||
@@ -70,7 +141,17 @@ describe('sentry.client', () => {
|
||||
expect(mockSentry.captureException).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should return undefined from captureMessage when not configured', async () => {
|
||||
it('should return undefined from captureException even with context', async () => {
|
||||
const { captureException } = await import('./sentry.client');
|
||||
|
||||
const result = captureException(new Error('test error'), { userId: '123' });
|
||||
|
||||
expect(result).toBeUndefined();
|
||||
expect(mockSentry.setContext).not.toHaveBeenCalled();
|
||||
expect(mockSentry.captureException).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should return undefined from captureMessage', async () => {
|
||||
const { captureMessage } = await import('./sentry.client');
|
||||
|
||||
const result = captureMessage('test message');
|
||||
@@ -79,7 +160,16 @@ describe('sentry.client', () => {
|
||||
expect(mockSentry.captureMessage).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should not set user when not configured', async () => {
|
||||
it('should return undefined from captureMessage with custom level', async () => {
|
||||
const { captureMessage } = await import('./sentry.client');
|
||||
|
||||
const result = captureMessage('warning message', 'warning');
|
||||
|
||||
expect(result).toBeUndefined();
|
||||
expect(mockSentry.captureMessage).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should not call setUser when not configured', async () => {
|
||||
const { setUser } = await import('./sentry.client');
|
||||
|
||||
setUser({ id: '123', email: 'test@example.com' });
|
||||
@@ -87,7 +177,15 @@ describe('sentry.client', () => {
|
||||
expect(mockSentry.setUser).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should not add breadcrumb when not configured', async () => {
|
||||
it('should not call setUser with null when not configured', async () => {
|
||||
const { setUser } = await import('./sentry.client');
|
||||
|
||||
setUser(null);
|
||||
|
||||
expect(mockSentry.setUser).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should not call addBreadcrumb when not configured', async () => {
|
||||
const { addBreadcrumb } = await import('./sentry.client');
|
||||
|
||||
addBreadcrumb({ message: 'test breadcrumb', category: 'test' });
|
||||
@@ -96,40 +194,374 @@ describe('sentry.client', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('Sentry re-export', () => {
|
||||
it('should re-export Sentry object', async () => {
|
||||
const { Sentry } = await import('./sentry.client');
|
||||
// ============================================================================
|
||||
// Section 2: Tests with Sentry ENABLED
|
||||
// ============================================================================
|
||||
describe('with Sentry enabled (DSN configured)', () => {
|
||||
beforeEach(async () => {
|
||||
mockConfig.sentry = {
|
||||
dsn: 'https://abc123@bugsink.projectium.com/1',
|
||||
environment: 'development',
|
||||
debug: true,
|
||||
enabled: true,
|
||||
};
|
||||
vi.resetModules();
|
||||
// Clear mocks after resetModules to ensure clean state
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
expect(Sentry).toBeDefined();
|
||||
expect(Sentry.init).toBeDefined();
|
||||
expect(Sentry.captureException).toBeDefined();
|
||||
it('should have isSentryConfigured as true when DSN is set and enabled', async () => {
|
||||
const { isSentryConfigured } = await import('./sentry.client');
|
||||
expect(isSentryConfigured).toBe(true);
|
||||
});
|
||||
|
||||
it('should call Sentry.init with correct DSN', async () => {
|
||||
const { initSentry } = await import('./sentry.client');
|
||||
|
||||
initSentry();
|
||||
|
||||
expect(mockSentry.init).toHaveBeenCalledTimes(1);
|
||||
const initConfig = (mockSentry.init as Mock).mock.calls[0][0];
|
||||
expect(initConfig.dsn).toBe('https://abc123@bugsink.projectium.com/1');
|
||||
});
|
||||
|
||||
it('should call Sentry.init with correct environment', async () => {
|
||||
const { initSentry } = await import('./sentry.client');
|
||||
|
||||
initSentry();
|
||||
|
||||
const initConfig = (mockSentry.init as Mock).mock.calls[0][0];
|
||||
expect(initConfig.environment).toBe('development');
|
||||
});
|
||||
|
||||
it('should call Sentry.init with debug flag', async () => {
|
||||
const { initSentry } = await import('./sentry.client');
|
||||
|
||||
initSentry();
|
||||
|
||||
const initConfig = (mockSentry.init as Mock).mock.calls[0][0];
|
||||
expect(initConfig.debug).toBe(true);
|
||||
});
|
||||
|
||||
it('should call Sentry.init with tracesSampleRate of 0', async () => {
|
||||
const { initSentry } = await import('./sentry.client');
|
||||
|
||||
initSentry();
|
||||
|
||||
const initConfig = (mockSentry.init as Mock).mock.calls[0][0];
|
||||
expect(initConfig.tracesSampleRate).toBe(0);
|
||||
});
|
||||
|
||||
it('should configure breadcrumbs integration with all options', async () => {
|
||||
const { initSentry } = await import('./sentry.client');
|
||||
|
||||
initSentry();
|
||||
|
||||
expect(mockSentry.breadcrumbsIntegration).toHaveBeenCalledWith({
|
||||
console: true,
|
||||
dom: true,
|
||||
fetch: true,
|
||||
history: true,
|
||||
xhr: true,
|
||||
});
|
||||
});
|
||||
|
||||
it('should include breadcrumbs integration in init config', async () => {
|
||||
const { initSentry } = await import('./sentry.client');
|
||||
|
||||
initSentry();
|
||||
|
||||
const initConfig = (mockSentry.init as Mock).mock.calls[0][0];
|
||||
expect(initConfig.integrations).toBeDefined();
|
||||
expect(initConfig.integrations).toHaveLength(1);
|
||||
expect(initConfig.integrations[0]).toEqual({ name: 'Breadcrumbs' });
|
||||
});
|
||||
|
||||
it('should configure beforeSend filter function', async () => {
|
||||
const { initSentry } = await import('./sentry.client');
|
||||
|
||||
initSentry();
|
||||
|
||||
const initConfig = (mockSentry.init as Mock).mock.calls[0][0];
|
||||
expect(typeof initConfig.beforeSend).toBe('function');
|
||||
});
|
||||
|
||||
it('should log success message with environment', async () => {
|
||||
const { initSentry } = await import('./sentry.client');
|
||||
|
||||
initSentry();
|
||||
|
||||
expect(mockLogger.info).toHaveBeenCalledWith(
|
||||
'[Sentry] Error tracking initialized (development)',
|
||||
);
|
||||
});
|
||||
|
||||
describe('captureException', () => {
|
||||
it('should return event ID when capturing exception', async () => {
|
||||
const { captureException } = await import('./sentry.client');
|
||||
|
||||
const result = captureException(new Error('test error'));
|
||||
|
||||
expect(result).toBe('mock-event-id');
|
||||
});
|
||||
|
||||
it('should call Sentry.captureException with the error', async () => {
|
||||
const { captureException } = await import('./sentry.client');
|
||||
const error = new Error('test error');
|
||||
|
||||
captureException(error);
|
||||
|
||||
expect(mockSentry.captureException).toHaveBeenCalledWith(error);
|
||||
});
|
||||
|
||||
it('should set context when provided', async () => {
|
||||
const { captureException } = await import('./sentry.client');
|
||||
const error = new Error('test error');
|
||||
const context = { userId: '123', action: 'upload' };
|
||||
|
||||
captureException(error, context);
|
||||
|
||||
expect(mockSentry.setContext).toHaveBeenCalledWith('additional', context);
|
||||
expect(mockSentry.captureException).toHaveBeenCalledWith(error);
|
||||
});
|
||||
|
||||
it('should not set context when not provided', async () => {
|
||||
const { captureException } = await import('./sentry.client');
|
||||
|
||||
captureException(new Error('test error'));
|
||||
|
||||
expect(mockSentry.setContext).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should handle complex context objects', async () => {
|
||||
const { captureException } = await import('./sentry.client');
|
||||
const context = {
|
||||
userId: '123',
|
||||
nested: { data: [1, 2, 3] },
|
||||
timestamp: new Date().toISOString(),
|
||||
};
|
||||
|
||||
captureException(new Error('test'), context);
|
||||
|
||||
expect(mockSentry.setContext).toHaveBeenCalledWith('additional', context);
|
||||
});
|
||||
});
|
||||
|
||||
describe('captureMessage', () => {
|
||||
it('should return message ID when capturing message', async () => {
|
||||
const { captureMessage } = await import('./sentry.client');
|
||||
|
||||
const result = captureMessage('test message');
|
||||
|
||||
expect(result).toBe('mock-message-id');
|
||||
});
|
||||
|
||||
it('should call Sentry.captureMessage with message and default level', async () => {
|
||||
const { captureMessage } = await import('./sentry.client');
|
||||
|
||||
captureMessage('test message');
|
||||
|
||||
expect(mockSentry.captureMessage).toHaveBeenCalledWith('test message', 'info');
|
||||
});
|
||||
|
||||
it('should call Sentry.captureMessage with warning level', async () => {
|
||||
const { captureMessage } = await import('./sentry.client');
|
||||
|
||||
captureMessage('warning message', 'warning');
|
||||
|
||||
expect(mockSentry.captureMessage).toHaveBeenCalledWith('warning message', 'warning');
|
||||
});
|
||||
|
||||
it('should call Sentry.captureMessage with error level', async () => {
|
||||
const { captureMessage } = await import('./sentry.client');
|
||||
|
||||
captureMessage('error message', 'error');
|
||||
|
||||
expect(mockSentry.captureMessage).toHaveBeenCalledWith('error message', 'error');
|
||||
});
|
||||
|
||||
it('should call Sentry.captureMessage with debug level', async () => {
|
||||
const { captureMessage } = await import('./sentry.client');
|
||||
|
||||
captureMessage('debug message', 'debug');
|
||||
|
||||
expect(mockSentry.captureMessage).toHaveBeenCalledWith('debug message', 'debug');
|
||||
});
|
||||
|
||||
it('should call Sentry.captureMessage with fatal level', async () => {
|
||||
const { captureMessage } = await import('./sentry.client');
|
||||
|
||||
captureMessage('fatal message', 'fatal');
|
||||
|
||||
expect(mockSentry.captureMessage).toHaveBeenCalledWith('fatal message', 'fatal');
|
||||
});
|
||||
});
|
||||
|
||||
describe('setUser', () => {
|
||||
it('should call Sentry.setUser with user object', async () => {
|
||||
const { setUser } = await import('./sentry.client');
|
||||
const user = { id: '123', email: 'test@example.com', username: 'testuser' };
|
||||
|
||||
setUser(user);
|
||||
|
||||
expect(mockSentry.setUser).toHaveBeenCalledWith(user);
|
||||
});
|
||||
|
||||
it('should call Sentry.setUser with minimal user object (id only)', async () => {
|
||||
const { setUser } = await import('./sentry.client');
|
||||
const user = { id: '456' };
|
||||
|
||||
setUser(user);
|
||||
|
||||
expect(mockSentry.setUser).toHaveBeenCalledWith(user);
|
||||
});
|
||||
|
||||
it('should call Sentry.setUser with null to clear user', async () => {
|
||||
const { setUser } = await import('./sentry.client');
|
||||
|
||||
setUser(null);
|
||||
|
||||
expect(mockSentry.setUser).toHaveBeenCalledWith(null);
|
||||
});
|
||||
|
||||
it('should call Sentry.setUser with user having optional fields', async () => {
|
||||
const { setUser } = await import('./sentry.client');
|
||||
const user = { id: '789', email: 'user@example.com' };
|
||||
|
||||
setUser(user);
|
||||
|
||||
expect(mockSentry.setUser).toHaveBeenCalledWith(user);
|
||||
});
|
||||
});
|
||||
|
||||
describe('addBreadcrumb', () => {
|
||||
it('should call Sentry.addBreadcrumb with breadcrumb object', async () => {
|
||||
const { addBreadcrumb } = await import('./sentry.client');
|
||||
const breadcrumb = { message: 'User clicked button', category: 'ui' };
|
||||
|
||||
addBreadcrumb(breadcrumb);
|
||||
|
||||
expect(mockSentry.addBreadcrumb).toHaveBeenCalledWith(breadcrumb);
|
||||
});
|
||||
|
||||
it('should handle breadcrumb with all optional fields', async () => {
|
||||
const { addBreadcrumb } = await import('./sentry.client');
|
||||
const breadcrumb = {
|
||||
message: 'API request completed',
|
||||
category: 'http',
|
||||
type: 'http',
|
||||
level: 'info' as const,
|
||||
data: { url: '/api/flyers', status: 200 },
|
||||
timestamp: Date.now() / 1000,
|
||||
};
|
||||
|
||||
addBreadcrumb(breadcrumb);
|
||||
|
||||
expect(mockSentry.addBreadcrumb).toHaveBeenCalledWith(breadcrumb);
|
||||
});
|
||||
|
||||
it('should handle navigation breadcrumb', async () => {
|
||||
const { addBreadcrumb } = await import('./sentry.client');
|
||||
const breadcrumb = {
|
||||
message: 'Navigation',
|
||||
category: 'navigation',
|
||||
data: { from: '/home', to: '/flyers' },
|
||||
};
|
||||
|
||||
addBreadcrumb(breadcrumb);
|
||||
|
||||
expect(mockSentry.addBreadcrumb).toHaveBeenCalledWith(breadcrumb);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('initSentry beforeSend filter logic', () => {
|
||||
// Test the beforeSend filter function logic in isolation
|
||||
// This tests the filter that's passed to Sentry.init
|
||||
// ============================================================================
|
||||
// Section 3: Environment-specific configuration tests
|
||||
// ============================================================================
|
||||
describe('environment-specific configuration', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
it('should filter out browser extension errors', () => {
|
||||
// Simulate the beforeSend logic from the implementation
|
||||
const filterExtensionErrors = (event: {
|
||||
exception?: {
|
||||
values?: Array<{
|
||||
stacktrace?: {
|
||||
frames?: Array<{ filename?: string }>;
|
||||
};
|
||||
}>;
|
||||
};
|
||||
}) => {
|
||||
if (
|
||||
event.exception?.values?.[0]?.stacktrace?.frames?.some((frame) =>
|
||||
frame.filename?.includes('extension://'),
|
||||
)
|
||||
) {
|
||||
return null;
|
||||
}
|
||||
return event;
|
||||
it('should use production environment', async () => {
|
||||
mockConfig.sentry = {
|
||||
dsn: 'https://prod@bugsink.projectium.com/1',
|
||||
environment: 'production',
|
||||
debug: false,
|
||||
enabled: true,
|
||||
};
|
||||
vi.resetModules();
|
||||
vi.clearAllMocks();
|
||||
|
||||
const { initSentry } = await import('./sentry.client');
|
||||
initSentry();
|
||||
|
||||
expect(mockSentry.init).toHaveBeenCalledTimes(1);
|
||||
const initConfig = (mockSentry.init as Mock).mock.calls[0][0];
|
||||
expect(initConfig.environment).toBe('production');
|
||||
expect(initConfig.debug).toBe(false);
|
||||
});
|
||||
|
||||
it('should use development environment with debug enabled', async () => {
|
||||
mockConfig.sentry = {
|
||||
dsn: 'https://dev@bugsink.projectium.com/2',
|
||||
environment: 'development',
|
||||
debug: true,
|
||||
enabled: true,
|
||||
};
|
||||
vi.resetModules();
|
||||
vi.clearAllMocks();
|
||||
|
||||
const { initSentry } = await import('./sentry.client');
|
||||
initSentry();
|
||||
|
||||
expect(mockSentry.init).toHaveBeenCalledTimes(1);
|
||||
const initConfig = (mockSentry.init as Mock).mock.calls[0][0];
|
||||
expect(initConfig.environment).toBe('development');
|
||||
expect(initConfig.debug).toBe(true);
|
||||
});
|
||||
|
||||
it('should use staging environment', async () => {
|
||||
mockConfig.sentry = {
|
||||
dsn: 'https://staging@bugsink.projectium.com/3',
|
||||
environment: 'staging',
|
||||
debug: false,
|
||||
enabled: true,
|
||||
};
|
||||
vi.resetModules();
|
||||
vi.clearAllMocks();
|
||||
|
||||
const { initSentry } = await import('./sentry.client');
|
||||
initSentry();
|
||||
|
||||
expect(mockSentry.init).toHaveBeenCalledTimes(1);
|
||||
const initConfig = (mockSentry.init as Mock).mock.calls[0][0];
|
||||
expect(initConfig.environment).toBe('staging');
|
||||
});
|
||||
});
|
||||
|
||||
// ============================================================================
|
||||
// Section 4: beforeSend filter logic tests
|
||||
// ============================================================================
|
||||
describe('beforeSend filter logic', () => {
|
||||
beforeEach(async () => {
|
||||
mockConfig.sentry = {
|
||||
dsn: 'https://test@bugsink.projectium.com/1',
|
||||
environment: 'test',
|
||||
debug: false,
|
||||
enabled: true,
|
||||
};
|
||||
vi.resetModules();
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
it('should filter out Chrome extension errors', async () => {
|
||||
const { initSentry } = await import('./sentry.client');
|
||||
initSentry();
|
||||
|
||||
expect(mockSentry.init).toHaveBeenCalledTimes(1);
|
||||
const initConfig = (mockSentry.init as Mock).mock.calls[0][0];
|
||||
const beforeSend = initConfig.beforeSend;
|
||||
|
||||
const extensionError = {
|
||||
exception: {
|
||||
@@ -143,158 +575,274 @@ describe('sentry.client', () => {
|
||||
},
|
||||
};
|
||||
|
||||
expect(filterExtensionErrors(extensionError)).toBeNull();
|
||||
expect(beforeSend(extensionError)).toBeNull();
|
||||
});
|
||||
|
||||
it('should allow normal errors through', () => {
|
||||
const filterExtensionErrors = (event: {
|
||||
exception?: {
|
||||
values?: Array<{
|
||||
stacktrace?: {
|
||||
frames?: Array<{ filename?: string }>;
|
||||
};
|
||||
}>;
|
||||
};
|
||||
}) => {
|
||||
if (
|
||||
event.exception?.values?.[0]?.stacktrace?.frames?.some((frame) =>
|
||||
frame.filename?.includes('extension://'),
|
||||
)
|
||||
) {
|
||||
return null;
|
||||
}
|
||||
return event;
|
||||
};
|
||||
it('should filter out Firefox extension errors', async () => {
|
||||
const { initSentry } = await import('./sentry.client');
|
||||
initSentry();
|
||||
|
||||
const normalError = {
|
||||
exception: {
|
||||
values: [
|
||||
{
|
||||
stacktrace: {
|
||||
frames: [{ filename: '/app/src/index.js' }],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
|
||||
expect(filterExtensionErrors(normalError)).toBe(normalError);
|
||||
});
|
||||
|
||||
it('should handle events without exception property', () => {
|
||||
const filterExtensionErrors = (event: {
|
||||
exception?: {
|
||||
values?: Array<{
|
||||
stacktrace?: {
|
||||
frames?: Array<{ filename?: string }>;
|
||||
};
|
||||
}>;
|
||||
};
|
||||
}) => {
|
||||
if (
|
||||
event.exception?.values?.[0]?.stacktrace?.frames?.some((frame) =>
|
||||
frame.filename?.includes('extension://'),
|
||||
)
|
||||
) {
|
||||
return null;
|
||||
}
|
||||
return event;
|
||||
};
|
||||
|
||||
const eventWithoutException = { message: 'test' };
|
||||
|
||||
expect(filterExtensionErrors(eventWithoutException as any)).toBe(eventWithoutException);
|
||||
});
|
||||
|
||||
it('should handle firefox extension URLs', () => {
|
||||
const filterExtensionErrors = (event: {
|
||||
exception?: {
|
||||
values?: Array<{
|
||||
stacktrace?: {
|
||||
frames?: Array<{ filename?: string }>;
|
||||
};
|
||||
}>;
|
||||
};
|
||||
}) => {
|
||||
if (
|
||||
event.exception?.values?.[0]?.stacktrace?.frames?.some((frame) =>
|
||||
frame.filename?.includes('extension://'),
|
||||
)
|
||||
) {
|
||||
return null;
|
||||
}
|
||||
return event;
|
||||
};
|
||||
const initConfig = (mockSentry.init as Mock).mock.calls[0][0];
|
||||
const beforeSend = initConfig.beforeSend;
|
||||
|
||||
const firefoxExtensionError = {
|
||||
exception: {
|
||||
values: [
|
||||
{
|
||||
stacktrace: {
|
||||
frames: [{ filename: 'moz-extension://abc123/script.js' }],
|
||||
frames: [{ filename: 'moz-extension://def456/background.js' }],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
|
||||
expect(filterExtensionErrors(firefoxExtensionError)).toBeNull();
|
||||
expect(beforeSend(firefoxExtensionError)).toBeNull();
|
||||
});
|
||||
|
||||
it('should allow normal application errors through', async () => {
|
||||
const { initSentry } = await import('./sentry.client');
|
||||
initSentry();
|
||||
|
||||
const initConfig = (mockSentry.init as Mock).mock.calls[0][0];
|
||||
const beforeSend = initConfig.beforeSend;
|
||||
|
||||
const normalError = {
|
||||
exception: {
|
||||
values: [
|
||||
{
|
||||
stacktrace: {
|
||||
frames: [
|
||||
{ filename: '/app/src/index.js' },
|
||||
{ filename: '/app/src/components/Button.js' },
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
|
||||
expect(beforeSend(normalError)).toBe(normalError);
|
||||
});
|
||||
|
||||
it('should handle events without exception property', async () => {
|
||||
const { initSentry } = await import('./sentry.client');
|
||||
initSentry();
|
||||
|
||||
const initConfig = (mockSentry.init as Mock).mock.calls[0][0];
|
||||
const beforeSend = initConfig.beforeSend;
|
||||
|
||||
const messageEvent = { message: 'test message', level: 'info' };
|
||||
|
||||
expect(beforeSend(messageEvent)).toBe(messageEvent);
|
||||
});
|
||||
|
||||
it('should handle events without stacktrace', async () => {
|
||||
const { initSentry } = await import('./sentry.client');
|
||||
initSentry();
|
||||
|
||||
const initConfig = (mockSentry.init as Mock).mock.calls[0][0];
|
||||
const beforeSend = initConfig.beforeSend;
|
||||
|
||||
const errorWithoutStacktrace = {
|
||||
exception: {
|
||||
values: [{ value: 'Error message' }],
|
||||
},
|
||||
};
|
||||
|
||||
expect(beforeSend(errorWithoutStacktrace)).toBe(errorWithoutStacktrace);
|
||||
});
|
||||
|
||||
it('should handle events with empty frames array', async () => {
|
||||
const { initSentry } = await import('./sentry.client');
|
||||
initSentry();
|
||||
|
||||
const initConfig = (mockSentry.init as Mock).mock.calls[0][0];
|
||||
const beforeSend = initConfig.beforeSend;
|
||||
|
||||
const errorWithEmptyFrames = {
|
||||
exception: {
|
||||
values: [
|
||||
{
|
||||
stacktrace: {
|
||||
frames: [],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
|
||||
expect(beforeSend(errorWithEmptyFrames)).toBe(errorWithEmptyFrames);
|
||||
});
|
||||
|
||||
it('should handle mixed frames with extension in later frame', async () => {
|
||||
const { initSentry } = await import('./sentry.client');
|
||||
initSentry();
|
||||
|
||||
const initConfig = (mockSentry.init as Mock).mock.calls[0][0];
|
||||
const beforeSend = initConfig.beforeSend;
|
||||
|
||||
// Note: Implementation uses .some() so it filters if ANY frame is extension
|
||||
const mixedError = {
|
||||
exception: {
|
||||
values: [
|
||||
{
|
||||
stacktrace: {
|
||||
frames: [
|
||||
{ filename: '/app/src/index.js' },
|
||||
{ filename: 'chrome-extension://abc123/inject.js' },
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
|
||||
// The implementation uses .some() so it will filter if ANY frame is extension
|
||||
expect(beforeSend(mixedError)).toBeNull();
|
||||
});
|
||||
|
||||
it('should handle frames with undefined filename', async () => {
|
||||
const { initSentry } = await import('./sentry.client');
|
||||
initSentry();
|
||||
|
||||
const initConfig = (mockSentry.init as Mock).mock.calls[0][0];
|
||||
const beforeSend = initConfig.beforeSend;
|
||||
|
||||
const errorWithUndefinedFilename = {
|
||||
exception: {
|
||||
values: [
|
||||
{
|
||||
stacktrace: {
|
||||
frames: [{ filename: undefined }, { filename: '/app/src/index.js' }],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
|
||||
expect(beforeSend(errorWithUndefinedFilename)).toBe(errorWithUndefinedFilename);
|
||||
});
|
||||
});
|
||||
|
||||
describe('isSentryConfigured logic', () => {
|
||||
// Test the logic that determines if Sentry is configured
|
||||
// This mirrors the implementation: !!config.sentry.dsn && config.sentry.enabled
|
||||
// ============================================================================
|
||||
// Section 5: Sentry re-export tests
|
||||
// ============================================================================
|
||||
describe('Sentry re-export', () => {
|
||||
it('should re-export Sentry object', async () => {
|
||||
const { Sentry } = await import('./sentry.client');
|
||||
|
||||
it('should return false when DSN is empty', () => {
|
||||
const dsn = '';
|
||||
const enabled = true;
|
||||
const result = !!dsn && enabled;
|
||||
expect(result).toBe(false);
|
||||
expect(Sentry).toBeDefined();
|
||||
});
|
||||
|
||||
it('should return false when enabled is false', () => {
|
||||
const dsn = 'https://test@sentry.io/123';
|
||||
const enabled = false;
|
||||
const result = !!dsn && enabled;
|
||||
expect(result).toBe(false);
|
||||
it('should have init method on re-exported Sentry', async () => {
|
||||
const { Sentry } = await import('./sentry.client');
|
||||
|
||||
expect(Sentry.init).toBeDefined();
|
||||
expect(typeof Sentry.init).toBe('function');
|
||||
});
|
||||
|
||||
it('should return true when DSN is set and enabled is true', () => {
|
||||
const dsn = 'https://test@sentry.io/123';
|
||||
const enabled = true;
|
||||
const result = !!dsn && enabled;
|
||||
expect(result).toBe(true);
|
||||
it('should have captureException method on re-exported Sentry', async () => {
|
||||
const { Sentry } = await import('./sentry.client');
|
||||
|
||||
expect(Sentry.captureException).toBeDefined();
|
||||
expect(typeof Sentry.captureException).toBe('function');
|
||||
});
|
||||
|
||||
it('should return false when DSN is undefined', () => {
|
||||
const dsn = undefined;
|
||||
const enabled = true;
|
||||
const result = !!dsn && enabled;
|
||||
expect(result).toBe(false);
|
||||
it('should have captureMessage method on re-exported Sentry', async () => {
|
||||
const { Sentry } = await import('./sentry.client');
|
||||
|
||||
expect(Sentry.captureMessage).toBeDefined();
|
||||
expect(typeof Sentry.captureMessage).toBe('function');
|
||||
});
|
||||
|
||||
it('should have setUser method on re-exported Sentry', async () => {
|
||||
const { Sentry } = await import('./sentry.client');
|
||||
|
||||
expect(Sentry.setUser).toBeDefined();
|
||||
expect(typeof Sentry.setUser).toBe('function');
|
||||
});
|
||||
|
||||
it('should have addBreadcrumb method on re-exported Sentry', async () => {
|
||||
const { Sentry } = await import('./sentry.client');
|
||||
|
||||
expect(Sentry.addBreadcrumb).toBeDefined();
|
||||
expect(typeof Sentry.addBreadcrumb).toBe('function');
|
||||
});
|
||||
});
|
||||
|
||||
describe('captureException logic', () => {
|
||||
it('should set context before capturing when context is provided', () => {
|
||||
// This tests the conditional context setting logic
|
||||
const context = { userId: '123' };
|
||||
const shouldSetContext = !!context;
|
||||
expect(shouldSetContext).toBe(true);
|
||||
// ============================================================================
|
||||
// Section 6: isSentryConfigured logic edge cases
|
||||
// ============================================================================
|
||||
describe('isSentryConfigured logic edge cases', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
it('should not set context when not provided', () => {
|
||||
const context = undefined;
|
||||
const shouldSetContext = !!context;
|
||||
expect(shouldSetContext).toBe(false);
|
||||
it('should return false when DSN is null-ish', async () => {
|
||||
mockConfig.sentry = {
|
||||
dsn: null as unknown as string,
|
||||
environment: 'test',
|
||||
debug: false,
|
||||
enabled: true,
|
||||
};
|
||||
vi.resetModules();
|
||||
|
||||
const { isSentryConfigured } = await import('./sentry.client');
|
||||
expect(isSentryConfigured).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false when DSN is undefined', async () => {
|
||||
mockConfig.sentry = {
|
||||
dsn: undefined as unknown as string,
|
||||
environment: 'test',
|
||||
debug: false,
|
||||
enabled: true,
|
||||
};
|
||||
vi.resetModules();
|
||||
|
||||
const { isSentryConfigured } = await import('./sentry.client');
|
||||
expect(isSentryConfigured).toBe(false);
|
||||
});
|
||||
|
||||
it('should return true only when both DSN is truthy AND enabled is true', async () => {
|
||||
mockConfig.sentry = {
|
||||
dsn: 'https://valid@sentry.io/123',
|
||||
environment: 'test',
|
||||
debug: false,
|
||||
enabled: true,
|
||||
};
|
||||
vi.resetModules();
|
||||
|
||||
const { isSentryConfigured } = await import('./sentry.client');
|
||||
expect(isSentryConfigured).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('captureMessage default level', () => {
|
||||
it('should default to info level', () => {
|
||||
// Test the default parameter behavior
|
||||
const defaultLevel = 'info';
|
||||
expect(defaultLevel).toBe('info');
|
||||
// ============================================================================
|
||||
// Section 7: Multiple initialization calls
|
||||
// ============================================================================
|
||||
describe('multiple initialization handling', () => {
|
||||
beforeEach(async () => {
|
||||
mockConfig.sentry = {
|
||||
dsn: 'https://test@bugsink.projectium.com/1',
|
||||
environment: 'test',
|
||||
debug: false,
|
||||
enabled: true,
|
||||
};
|
||||
vi.resetModules();
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
it('should call Sentry.init each time initSentry is called', async () => {
|
||||
const { initSentry } = await import('./sentry.client');
|
||||
|
||||
initSentry();
|
||||
initSentry();
|
||||
|
||||
// The implementation does not guard against multiple calls
|
||||
// This test documents current behavior - Sentry.init may be called multiple times
|
||||
expect(mockSentry.init).toHaveBeenCalledTimes(2);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -296,6 +296,23 @@ describe('E2E Receipt Processing Journey', () => {
|
||||
expect(reprocessResponse.status).toBe(200);
|
||||
expect(reprocessResponse.body.data.message).toContain('reprocessing');
|
||||
|
||||
// Wait for the reprocess job to complete before deleting
|
||||
await poll(
|
||||
async () => {
|
||||
const statusResponse = await getRequest()
|
||||
.get(`/api/v1/receipts/${receipt2Result.rows[0].receipt_id}`)
|
||||
.set('Authorization', `Bearer ${authToken}`);
|
||||
return statusResponse.status === 200
|
||||
? statusResponse.body
|
||||
: { data: { receipt: { status: 'pending' } } };
|
||||
},
|
||||
(result) => {
|
||||
const status = result.data?.receipt?.status;
|
||||
return status === 'completed' || status === 'failed';
|
||||
},
|
||||
{ timeout: 15000, interval: 1000, description: 'receipt reprocessing' },
|
||||
);
|
||||
|
||||
// Step 17: Delete the failed receipt
|
||||
const deleteResponse = await getRequest()
|
||||
.delete(`/api/v1/receipts/${receipt2Result.rows[0].receipt_id}`)
|
||||
|
||||
@@ -107,7 +107,10 @@ export const MockMainLayout: React.FC<Partial<MainLayoutProps>> = () => (
|
||||
<Outlet />
|
||||
</div>
|
||||
);
|
||||
export const MockHomePage: React.FC<Partial<HomePageProps>> = ({ selectedFlyer, onOpenCorrectionTool }) => (
|
||||
export const MockHomePage: React.FC<Partial<HomePageProps>> = ({
|
||||
selectedFlyer,
|
||||
onOpenCorrectionTool,
|
||||
}) => (
|
||||
<div data-testid="home-page-mock" data-selected-flyer-id={selectedFlyer?.flyer_id}>
|
||||
Mock Home Page
|
||||
<button onClick={onOpenCorrectionTool}>Open Correction Tool</button>
|
||||
@@ -192,3 +195,59 @@ export const MockBookOpenIcon: React.FC<React.SVGProps<SVGSVGElement>> = (props)
|
||||
);
|
||||
|
||||
export const MockFooter: React.FC = () => <footer data-testid="footer-mock">Mock Footer</footer>;
|
||||
|
||||
// --- FlyerList and FlyerUploader Mocks ---
|
||||
import type { Flyer, UserProfile } from '../../types';
|
||||
|
||||
interface MockFlyerListProps {
|
||||
flyers: Flyer[];
|
||||
onFlyerSelect: (flyer: Flyer) => void;
|
||||
selectedFlyerId: number | null;
|
||||
profile: UserProfile | null;
|
||||
}
|
||||
|
||||
export const MockFlyerList: React.FC<MockFlyerListProps> = ({
|
||||
flyers,
|
||||
onFlyerSelect,
|
||||
selectedFlyerId,
|
||||
profile,
|
||||
}) => (
|
||||
<div
|
||||
data-testid="flyer-list"
|
||||
data-selected-id={selectedFlyerId ?? 'none'}
|
||||
data-flyer-count={flyers.length}
|
||||
data-profile-role={profile?.role ?? 'none'}
|
||||
>
|
||||
<h3>Mock Flyer List</h3>
|
||||
{flyers.length === 0 ? (
|
||||
<p data-testid="no-flyers-message">No flyers available</p>
|
||||
) : (
|
||||
<ul>
|
||||
{flyers.map((flyer) => (
|
||||
<li
|
||||
key={flyer.flyer_id}
|
||||
data-testid={`flyer-item-${flyer.flyer_id}`}
|
||||
data-selected={selectedFlyerId === flyer.flyer_id}
|
||||
>
|
||||
<button onClick={() => onFlyerSelect(flyer)}>
|
||||
{flyer.store?.name ?? 'Unknown Store'} - {flyer.item_count} items
|
||||
</button>
|
||||
</li>
|
||||
))}
|
||||
</ul>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
|
||||
interface MockFlyerUploaderProps {
|
||||
onProcessingComplete: () => void;
|
||||
}
|
||||
|
||||
export const MockFlyerUploader: React.FC<MockFlyerUploaderProps> = ({ onProcessingComplete }) => (
|
||||
<div data-testid="flyer-uploader">
|
||||
<h3>Mock Flyer Uploader</h3>
|
||||
<button data-testid="mock-upload-complete-btn" onClick={onProcessingComplete}>
|
||||
Simulate Upload Complete
|
||||
</button>
|
||||
</div>
|
||||
);
|
||||
|
||||
26
src/vite-env.d.ts
vendored
26
src/vite-env.d.ts
vendored
@@ -5,7 +5,31 @@ interface ImportMetaEnv {
|
||||
readonly VITE_APP_COMMIT_MESSAGE: string;
|
||||
readonly VITE_APP_COMMIT_URL: string;
|
||||
readonly VITE_GOOGLE_MAPS_EMBED_API_KEY: string;
|
||||
// Add any other environment variables you use here
|
||||
|
||||
// Sentry/Bugsink Configuration (ADR-015)
|
||||
readonly VITE_SENTRY_DSN?: string;
|
||||
readonly VITE_SENTRY_ENVIRONMENT?: string;
|
||||
readonly VITE_SENTRY_DEBUG?: string;
|
||||
readonly VITE_SENTRY_ENABLED?: string;
|
||||
|
||||
/**
|
||||
* Feature Flags (ADR-024)
|
||||
*
|
||||
* All feature flag environment variables are optional and default to disabled
|
||||
* when not set. Set to 'true' to enable a feature.
|
||||
*
|
||||
* Naming convention: VITE_FEATURE_SNAKE_CASE
|
||||
*
|
||||
* @see docs/adr/0024-feature-flagging-strategy.md
|
||||
*/
|
||||
/** Enable the redesigned dashboard UI */
|
||||
readonly VITE_FEATURE_NEW_DASHBOARD?: string;
|
||||
/** Enable beta recipe features */
|
||||
readonly VITE_FEATURE_BETA_RECIPES?: string;
|
||||
/** Enable experimental AI features */
|
||||
readonly VITE_FEATURE_EXPERIMENTAL_AI?: string;
|
||||
/** Enable debug mode UI elements */
|
||||
readonly VITE_FEATURE_DEBUG_MODE?: string;
|
||||
}
|
||||
|
||||
interface ImportMeta {
|
||||
|
||||
Reference in New Issue
Block a user