Compare commits
27 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5870e5c614 | ||
| 2e7ebbd9ed | |||
|
|
dc3fa21359 | ||
| 11aeac5edd | |||
|
|
f6c0c082bc | ||
| 4e22213cd1 | |||
|
|
9815eb3686 | ||
| 2bf4a7c1e6 | |||
|
|
5eed3f51f4 | ||
| d250932c05 | |||
|
|
7d1f964574 | ||
| 3b69e58de3 | |||
|
|
5211aadd22 | ||
| a997d1d0b0 | |||
| cf5f77c58e | |||
|
|
cf0f5bb820 | ||
| 503e7084da | |||
|
|
d8aa19ac40 | ||
| dcd9452b8c | |||
|
|
6d468544e2 | ||
| 2913c7aa09 | |||
|
|
77f9cb6081 | ||
| 2f1d73ca12 | |||
|
|
402e2617ca | ||
| e14c19c112 | |||
|
|
ea46f66c7a | ||
| a42ee5a461 |
16
.claude/hooks.json
Normal file
16
.claude/hooks.json
Normal file
@@ -0,0 +1,16 @@
|
||||
{
|
||||
"$schema": "https://claude.ai/schemas/hooks.json",
|
||||
"hooks": {
|
||||
"PreToolUse": [
|
||||
{
|
||||
"matcher": "Bash",
|
||||
"hooks": [
|
||||
{
|
||||
"type": "command",
|
||||
"command": "node -e \"const cmd = process.argv[1] || ''; const isTest = /\\b(npm\\s+(run\\s+)?test|vitest|jest)\\b/i.test(cmd); const isWindows = process.platform === 'win32'; const inContainer = process.env.REMOTE_CONTAINERS === 'true' || process.env.DEVCONTAINER === 'true'; if (isTest && isWindows && !inContainer) { console.error('BLOCKED: Tests must run on Linux. Use Dev Container (Reopen in Container) or WSL.'); process.exit(1); }\" -- \"$CLAUDE_TOOL_INPUT\""
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
@@ -18,11 +18,9 @@
|
||||
"Bash(PGPASSWORD=postgres psql:*)",
|
||||
"Bash(npm search:*)",
|
||||
"Bash(npx:*)",
|
||||
"Bash(curl -s -H \"Authorization: token c72bc0f14f623fec233d3c94b3a16397fe3649ef\" https://gitea.projectium.com/api/v1/user)",
|
||||
"Bash(curl:*)",
|
||||
"Bash(powershell:*)",
|
||||
"Bash(cmd.exe:*)",
|
||||
"Bash(export NODE_ENV=test DB_HOST=localhost DB_USER=postgres DB_PASSWORD=postgres DB_NAME=flyer_crawler_dev REDIS_URL=redis://localhost:6379 FRONTEND_URL=http://localhost:5173 JWT_SECRET=test-jwt-secret:*)",
|
||||
"Bash(npm run test:integration:*)",
|
||||
"Bash(grep:*)",
|
||||
"Bash(done)",
|
||||
@@ -79,7 +77,18 @@
|
||||
"Bash(npm run lint)",
|
||||
"Bash(npm run typecheck:*)",
|
||||
"Bash(npm run type-check:*)",
|
||||
"Bash(npm run test:unit:*)"
|
||||
"Bash(npm run test:unit:*)",
|
||||
"mcp__filesystem__move_file",
|
||||
"Bash(git checkout:*)",
|
||||
"Bash(podman image inspect:*)",
|
||||
"Bash(node -e:*)",
|
||||
"Bash(xargs -I {} sh -c 'if ! grep -q \"\"vi.mock.*apiClient\"\" \"\"{}\"\"; then echo \"\"{}\"\"; fi')",
|
||||
"Bash(MSYS_NO_PATHCONV=1 podman exec:*)",
|
||||
"Bash(docker ps:*)",
|
||||
"Bash(find:*)",
|
||||
"Bash(\"/c/Users/games3/.local/bin/uvx.exe\" markitdown-mcp --help)",
|
||||
"Bash(git stash:*)",
|
||||
"Bash(ping:*)"
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
27
.env.example
27
.env.example
@@ -41,6 +41,14 @@ FRONTEND_URL=http://localhost:3000
|
||||
# REQUIRED: Secret key for signing JWT tokens (generate a random 64+ character string)
|
||||
JWT_SECRET=your-super-secret-jwt-key-change-this-in-production
|
||||
|
||||
# OAuth Providers (Optional - enable social login)
|
||||
# Google OAuth - https://console.cloud.google.com/apis/credentials
|
||||
GOOGLE_CLIENT_ID=
|
||||
GOOGLE_CLIENT_SECRET=
|
||||
# GitHub OAuth - https://github.com/settings/developers
|
||||
GITHUB_CLIENT_ID=
|
||||
GITHUB_CLIENT_SECRET=
|
||||
|
||||
# ===================
|
||||
# AI/ML Services
|
||||
# ===================
|
||||
@@ -75,3 +83,22 @@ CLEANUP_WORKER_CONCURRENCY=10
|
||||
|
||||
# Worker lock duration in milliseconds (default: 2 minutes)
|
||||
WORKER_LOCK_DURATION=120000
|
||||
|
||||
# ===================
|
||||
# Error Tracking (ADR-015)
|
||||
# ===================
|
||||
# Sentry-compatible error tracking via Bugsink (self-hosted)
|
||||
# DSNs are created in Bugsink UI at http://localhost:8000 (dev) or your production URL
|
||||
# Backend DSN - for Express/Node.js errors
|
||||
SENTRY_DSN=
|
||||
# Frontend DSN - for React/browser errors (uses VITE_ prefix)
|
||||
VITE_SENTRY_DSN=
|
||||
# Environment name for error grouping (defaults to NODE_ENV)
|
||||
SENTRY_ENVIRONMENT=development
|
||||
VITE_SENTRY_ENVIRONMENT=development
|
||||
# Enable/disable error tracking (default: true)
|
||||
SENTRY_ENABLED=true
|
||||
VITE_SENTRY_ENABLED=true
|
||||
# Enable debug mode for SDK troubleshooting (default: false)
|
||||
SENTRY_DEBUG=false
|
||||
VITE_SENTRY_DEBUG=false
|
||||
|
||||
@@ -1,66 +0,0 @@
|
||||
{
|
||||
"mcpServers": {
|
||||
"gitea-projectium": {
|
||||
"command": "d:\\gitea-mcp\\gitea-mcp.exe",
|
||||
"args": ["run", "-t", "stdio"],
|
||||
"env": {
|
||||
"GITEA_HOST": "https://gitea.projectium.com",
|
||||
"GITEA_ACCESS_TOKEN": "c72bc0f14f623fec233d3c94b3a16397fe3649ef"
|
||||
}
|
||||
},
|
||||
"gitea-torbonium": {
|
||||
"command": "d:\\gitea-mcp\\gitea-mcp.exe",
|
||||
"args": ["run", "-t", "stdio"],
|
||||
"env": {
|
||||
"GITEA_HOST": "https://gitea.torbonium.com",
|
||||
"GITEA_ACCESS_TOKEN": "391c9ddbe113378bc87bb8184800ba954648fcf8"
|
||||
}
|
||||
},
|
||||
"gitea-lan": {
|
||||
"command": "d:\\gitea-mcp\\gitea-mcp.exe",
|
||||
"args": ["run", "-t", "stdio"],
|
||||
"env": {
|
||||
"GITEA_HOST": "https://gitea.torbolan.com",
|
||||
"GITEA_ACCESS_TOKEN": "YOUR_LAN_TOKEN_HERE"
|
||||
},
|
||||
"disabled": true
|
||||
},
|
||||
"podman": {
|
||||
"command": "D:\\nodejs\\npx.cmd",
|
||||
"args": ["-y", "podman-mcp-server@latest"],
|
||||
"env": {
|
||||
"DOCKER_HOST": "npipe:////./pipe/podman-machine-default"
|
||||
}
|
||||
},
|
||||
"filesystem": {
|
||||
"command": "d:\\nodejs\\node.exe",
|
||||
"args": [
|
||||
"c:\\Users\\games3\\AppData\\Roaming\\npm\\node_modules\\@modelcontextprotocol\\server-filesystem\\dist\\index.js",
|
||||
"d:\\gitea\\flyer-crawler.projectium.com\\flyer-crawler.projectium.com"
|
||||
]
|
||||
},
|
||||
"fetch": {
|
||||
"command": "D:\\nodejs\\npx.cmd",
|
||||
"args": ["-y", "@modelcontextprotocol/server-fetch"]
|
||||
},
|
||||
"io.github.ChromeDevTools/chrome-devtools-mcp": {
|
||||
"type": "stdio",
|
||||
"command": "npx",
|
||||
"args": ["chrome-devtools-mcp@0.12.1"],
|
||||
"gallery": "https://api.mcp.github.com",
|
||||
"version": "0.12.1"
|
||||
},
|
||||
"markitdown": {
|
||||
"command": "C:\\Users\\games3\\.local\\bin\\uvx.exe",
|
||||
"args": ["markitdown-mcp"]
|
||||
},
|
||||
"sequential-thinking": {
|
||||
"command": "D:\\nodejs\\npx.cmd",
|
||||
"args": ["-y", "@modelcontextprotocol/server-sequential-thinking"]
|
||||
},
|
||||
"memory": {
|
||||
"command": "D:\\nodejs\\npx.cmd",
|
||||
"args": ["-y", "@modelcontextprotocol/server-memory"]
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -130,6 +130,11 @@ jobs:
|
||||
SMTP_USER: ''
|
||||
SMTP_PASS: ''
|
||||
SMTP_FROM_EMAIL: 'noreply@flyer-crawler.projectium.com'
|
||||
# OAuth Providers
|
||||
GOOGLE_CLIENT_ID: ${{ secrets.GOOGLE_CLIENT_ID }}
|
||||
GOOGLE_CLIENT_SECRET: ${{ secrets.GOOGLE_CLIENT_SECRET }}
|
||||
GITHUB_CLIENT_ID: ${{ secrets.GH_CLIENT_ID }}
|
||||
GITHUB_CLIENT_SECRET: ${{ secrets.GH_CLIENT_SECRET }}
|
||||
run: |
|
||||
if [ -z "$DB_HOST" ] || [ -z "$DB_USER" ] || [ -z "$DB_PASSWORD" ] || [ -z "$DB_NAME" ]; then
|
||||
echo "ERROR: One or more production database secrets (DB_HOST, DB_USER, DB_PASSWORD, DB_DATABASE_PROD) are not set."
|
||||
|
||||
@@ -198,8 +198,8 @@ jobs:
|
||||
--reporter=verbose --includeTaskLocation --testTimeout=10000 --silent=passed-only || true
|
||||
|
||||
echo "--- Running E2E Tests ---"
|
||||
# Run E2E tests using the dedicated E2E config which inherits from integration config.
|
||||
# We still pass --coverage to enable it, but directory and timeout are now in the config.
|
||||
# Run E2E tests using the dedicated E2E config.
|
||||
# E2E uses port 3098, integration uses 3099 to avoid conflicts.
|
||||
npx vitest run --config vitest.config.e2e.ts --coverage \
|
||||
--coverage.exclude='**/*.test.ts' \
|
||||
--coverage.exclude='**/tests/**' \
|
||||
@@ -240,7 +240,19 @@ jobs:
|
||||
# Run c8: read raw files from the temp dir, and output an Istanbul JSON report.
|
||||
# We only generate the 'json' report here because it's all nyc needs for merging.
|
||||
echo "Server coverage report about to be generated..."
|
||||
npx c8 report --exclude='**/*.test.ts' --exclude='**/tests/**' --exclude='**/mocks/**' --reporter=json --temp-directory .coverage/tmp/integration-server --reports-dir .coverage/integration-server
|
||||
npx c8 report \
|
||||
--include='src/**' \
|
||||
--exclude='**/*.test.ts' \
|
||||
--exclude='**/*.test.tsx' \
|
||||
--exclude='**/tests/**' \
|
||||
--exclude='**/mocks/**' \
|
||||
--exclude='hostexecutor/**' \
|
||||
--exclude='scripts/**' \
|
||||
--exclude='*.config.js' \
|
||||
--exclude='*.config.ts' \
|
||||
--reporter=json \
|
||||
--temp-directory .coverage/tmp/integration-server \
|
||||
--reports-dir .coverage/integration-server
|
||||
echo "Server coverage report generated. Verifying existence:"
|
||||
ls -l .coverage/integration-server/coverage-final.json
|
||||
|
||||
@@ -280,12 +292,18 @@ jobs:
|
||||
--reporter=html \
|
||||
--report-dir .coverage/ \
|
||||
--temp-dir "$NYC_SOURCE_DIR" \
|
||||
--include "src/**" \
|
||||
--exclude "**/*.test.ts" \
|
||||
--exclude "**/*.test.tsx" \
|
||||
--exclude "**/tests/**" \
|
||||
--exclude "**/mocks/**" \
|
||||
--exclude "**/index.tsx" \
|
||||
--exclude "**/vite-env.d.ts" \
|
||||
--exclude "**/vitest.setup.ts"
|
||||
--exclude "**/vitest.setup.ts" \
|
||||
--exclude "hostexecutor/**" \
|
||||
--exclude "scripts/**" \
|
||||
--exclude "*.config.js" \
|
||||
--exclude "*.config.ts"
|
||||
|
||||
# Re-enable secret masking for subsequent steps.
|
||||
echo "::secret-masking::"
|
||||
|
||||
12
.gitignore
vendored
12
.gitignore
vendored
@@ -11,9 +11,18 @@ node_modules
|
||||
dist
|
||||
dist-ssr
|
||||
*.local
|
||||
.env
|
||||
*.tsbuildinfo
|
||||
|
||||
# Test coverage
|
||||
coverage
|
||||
.nyc_output
|
||||
.coverage
|
||||
|
||||
# Test artifacts - flyer-images/ is a runtime directory
|
||||
# Test fixtures are stored in src/tests/assets/ instead
|
||||
flyer-images/
|
||||
test-output.txt
|
||||
|
||||
# Editor directories and files
|
||||
.vscode/*
|
||||
@@ -25,3 +34,6 @@ coverage
|
||||
*.njsproj
|
||||
*.sln
|
||||
*.sw?
|
||||
Thumbs.db
|
||||
.claude
|
||||
nul
|
||||
|
||||
5
.nycrc.json
Normal file
5
.nycrc.json
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"text": {
|
||||
"maxCols": 200
|
||||
}
|
||||
}
|
||||
110
AUTHENTICATION.md
Normal file
110
AUTHENTICATION.md
Normal file
@@ -0,0 +1,110 @@
|
||||
# Authentication Setup
|
||||
|
||||
Flyer Crawler supports OAuth authentication via Google and GitHub. This guide walks through configuring both providers.
|
||||
|
||||
---
|
||||
|
||||
## Google OAuth
|
||||
|
||||
### Step 1: Create OAuth Credentials
|
||||
|
||||
1. Go to the [Google Cloud Console](https://console.cloud.google.com/)
|
||||
2. Create a new project (or select an existing one)
|
||||
3. Navigate to **APIs & Services > Credentials**
|
||||
4. Click **Create Credentials > OAuth client ID**
|
||||
5. Select **Web application** as the application type
|
||||
|
||||
### Step 2: Configure Authorized Redirect URIs
|
||||
|
||||
Add the callback URL where Google will redirect users after authentication:
|
||||
|
||||
| Environment | Redirect URI |
|
||||
| ----------- | -------------------------------------------------- |
|
||||
| Development | `http://localhost:3001/api/auth/google/callback` |
|
||||
| Production | `https://your-domain.com/api/auth/google/callback` |
|
||||
|
||||
### Step 3: Save Credentials
|
||||
|
||||
After clicking **Create**, you'll receive:
|
||||
|
||||
- **Client ID**
|
||||
- **Client Secret**
|
||||
|
||||
Store these securely as environment variables:
|
||||
|
||||
- `GOOGLE_CLIENT_ID`
|
||||
- `GOOGLE_CLIENT_SECRET`
|
||||
|
||||
---
|
||||
|
||||
## GitHub OAuth
|
||||
|
||||
### Step 1: Create OAuth App
|
||||
|
||||
1. Go to your [GitHub Developer Settings](https://github.com/settings/developers)
|
||||
2. Navigate to **OAuth Apps**
|
||||
3. Click **New OAuth App**
|
||||
|
||||
### Step 2: Fill in Application Details
|
||||
|
||||
| Field | Value |
|
||||
| -------------------------- | ---------------------------------------------------- |
|
||||
| Application name | Flyer Crawler (or your preferred name) |
|
||||
| Homepage URL | `http://localhost:5173` (dev) or your production URL |
|
||||
| Authorization callback URL | `http://localhost:3001/api/auth/github/callback` |
|
||||
|
||||
### Step 3: Save GitHub Credentials
|
||||
|
||||
After clicking **Register application**, you'll receive:
|
||||
|
||||
- **Client ID**
|
||||
- **Client Secret**
|
||||
|
||||
Store these securely as environment variables:
|
||||
|
||||
- `GITHUB_CLIENT_ID`
|
||||
- `GITHUB_CLIENT_SECRET`
|
||||
|
||||
---
|
||||
|
||||
## Environment Variables Summary
|
||||
|
||||
| Variable | Description |
|
||||
| ---------------------- | ---------------------------------------- |
|
||||
| `GOOGLE_CLIENT_ID` | Google OAuth client ID |
|
||||
| `GOOGLE_CLIENT_SECRET` | Google OAuth client secret |
|
||||
| `GITHUB_CLIENT_ID` | GitHub OAuth client ID |
|
||||
| `GITHUB_CLIENT_SECRET` | GitHub OAuth client secret |
|
||||
| `JWT_SECRET` | Secret for signing authentication tokens |
|
||||
|
||||
---
|
||||
|
||||
## Production Considerations
|
||||
|
||||
When deploying to production:
|
||||
|
||||
1. **Update redirect URIs** in both Google Cloud Console and GitHub OAuth settings to use your production domain
|
||||
2. **Use HTTPS** for all callback URLs in production
|
||||
3. **Store secrets securely** using your CI/CD platform's secrets management (e.g., Gitea repository secrets)
|
||||
|
||||
---
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### "redirect_uri_mismatch" Error
|
||||
|
||||
The callback URL in your OAuth provider settings doesn't match what the application is sending. Verify:
|
||||
|
||||
- The URL is exactly correct (no trailing slashes, correct port)
|
||||
- You're using the right environment (dev vs production URLs)
|
||||
|
||||
### "invalid_client" Error
|
||||
|
||||
The Client ID or Client Secret is incorrect. Double-check your environment variables.
|
||||
|
||||
---
|
||||
|
||||
## Related Documentation
|
||||
|
||||
- [Installation Guide](INSTALL.md) - Local development setup
|
||||
- [Deployment Guide](DEPLOYMENT.md) - Production deployment
|
||||
254
CLAUDE.md
Normal file
254
CLAUDE.md
Normal file
@@ -0,0 +1,254 @@
|
||||
# Claude Code Project Instructions
|
||||
|
||||
## Communication Style: Ask Before Assuming
|
||||
|
||||
**IMPORTANT**: When helping with tasks, **ask clarifying questions before making assumptions**. Do not assume:
|
||||
|
||||
- What steps the user has or hasn't completed
|
||||
- What the user already knows or has configured
|
||||
- What external services (OAuth providers, APIs, etc.) are already set up
|
||||
- What secrets or credentials have already been created
|
||||
|
||||
Instead, ask the user to confirm the current state before providing instructions or making recommendations. This prevents wasted effort and respects the user's existing work.
|
||||
|
||||
## Platform Requirement: Linux Only
|
||||
|
||||
**CRITICAL**: This application is designed to run **exclusively on Linux**. See [ADR-014](docs/adr/0014-containerization-and-deployment-strategy.md) for full details.
|
||||
|
||||
### Environment Terminology
|
||||
|
||||
- **Dev Container** (or just "dev"): The containerized Linux development environment (`flyer-crawler-dev`). This is where all development and testing should occur.
|
||||
- **Host**: The Windows machine running Podman/Docker and VS Code.
|
||||
|
||||
When instructions say "run in dev" or "run in the dev container", they mean executing commands inside the `flyer-crawler-dev` container.
|
||||
|
||||
### Test Execution Rules
|
||||
|
||||
1. **ALL tests MUST be executed in the dev container** - the Linux container environment
|
||||
2. **NEVER run tests directly on Windows host** - test results from Windows are unreliable
|
||||
3. **Always use the dev container for testing** when developing on Windows
|
||||
|
||||
### How to Run Tests Correctly
|
||||
|
||||
```bash
|
||||
# If on Windows, first open VS Code and "Reopen in Container"
|
||||
# Then run tests inside the dev container:
|
||||
npm test # Run all unit tests
|
||||
npm run test:unit # Run unit tests only
|
||||
npm run test:integration # Run integration tests (requires DB/Redis)
|
||||
```
|
||||
|
||||
### Running Tests via Podman (from Windows host)
|
||||
|
||||
The command to run unit tests in the dev container via podman:
|
||||
|
||||
```bash
|
||||
podman exec -it flyer-crawler-dev npm run test:unit
|
||||
```
|
||||
|
||||
The command to run integration tests in the dev container via podman:
|
||||
|
||||
```bash
|
||||
podman exec -it flyer-crawler-dev npm run test:integration
|
||||
```
|
||||
|
||||
For running specific test files:
|
||||
|
||||
```bash
|
||||
podman exec -it flyer-crawler-dev npm test -- --run src/hooks/useAuth.test.tsx
|
||||
```
|
||||
|
||||
### Why Linux Only?
|
||||
|
||||
- Path separators: Code uses POSIX-style paths (`/`) which may break on Windows
|
||||
- Shell scripts in `scripts/` directory are Linux-only
|
||||
- External dependencies like `pdftocairo` assume Linux installation paths
|
||||
- Unix-style file permissions are assumed throughout
|
||||
|
||||
### Test Result Interpretation
|
||||
|
||||
- Tests that **pass on Windows but fail on Linux** = **BROKEN tests** (must be fixed)
|
||||
- Tests that **fail on Windows but pass on Linux** = **PASSING tests** (acceptable)
|
||||
|
||||
## Development Workflow
|
||||
|
||||
1. Open project in VS Code
|
||||
2. Use "Reopen in Container" (Dev Containers extension required) to enter the dev environment
|
||||
3. Wait for dev container initialization to complete
|
||||
4. Run `npm test` to verify the dev environment is working
|
||||
5. Make changes and run tests inside the dev container
|
||||
|
||||
## Code Change Verification
|
||||
|
||||
After making any code changes, **always run a type-check** to catch TypeScript errors before committing:
|
||||
|
||||
```bash
|
||||
npm run type-check
|
||||
```
|
||||
|
||||
This prevents linting/type errors from being introduced into the codebase.
|
||||
|
||||
## Quick Reference
|
||||
|
||||
| Command | Description |
|
||||
| -------------------------- | ---------------------------- |
|
||||
| `npm test` | Run all unit tests |
|
||||
| `npm run test:unit` | Run unit tests only |
|
||||
| `npm run test:integration` | Run integration tests |
|
||||
| `npm run dev:container` | Start dev server (container) |
|
||||
| `npm run build` | Build for production |
|
||||
| `npm run type-check` | Run TypeScript type checking |
|
||||
|
||||
## Known Integration Test Issues and Solutions
|
||||
|
||||
This section documents common test issues encountered in integration tests, their root causes, and solutions. These patterns recur frequently.
|
||||
|
||||
### 1. Vitest globalSetup Runs in Separate Node.js Context
|
||||
|
||||
**Problem:** Vitest's `globalSetup` runs in a completely separate Node.js context from test files. This means:
|
||||
|
||||
- Singletons created in globalSetup are NOT the same instances as those in test files
|
||||
- `global`, `globalThis`, and `process` are all isolated between contexts
|
||||
- `vi.spyOn()` on module exports doesn't work cross-context
|
||||
- Dependency injection via setter methods fails across contexts
|
||||
|
||||
**Affected Tests:** Any test trying to inject mocks into BullMQ worker services (e.g., AI failure tests, DB failure tests)
|
||||
|
||||
**Solution Options:**
|
||||
|
||||
1. Mark tests as `.todo()` until an API-based mock injection mechanism is implemented
|
||||
2. Create test-only API endpoints that allow setting mock behaviors via HTTP
|
||||
3. Use file-based or Redis-based mock flags that services check at runtime
|
||||
|
||||
**Example of affected code pattern:**
|
||||
|
||||
```typescript
|
||||
// This DOES NOT work - different module instances
|
||||
const { flyerProcessingService } = await import('../../services/workers.server');
|
||||
flyerProcessingService._getAiProcessor()._setExtractAndValidateData(mockFn);
|
||||
// The worker uses a different flyerProcessingService instance!
|
||||
```
|
||||
|
||||
### 2. BullMQ Cleanup Queue Deleting Files Before Test Verification
|
||||
|
||||
**Problem:** The cleanup worker runs in the globalSetup context and processes cleanup jobs even when tests spy on `cleanupQueue.add()`. The spy intercepts calls in the test context, but jobs already queued run in the worker's context.
|
||||
|
||||
**Affected Tests:** EXIF/PNG metadata stripping tests that need to verify file contents before deletion
|
||||
|
||||
**Solution:** Drain and pause the cleanup queue before the test:
|
||||
|
||||
```typescript
|
||||
const { cleanupQueue } = await import('../../services/queues.server');
|
||||
await cleanupQueue.drain(); // Remove existing jobs
|
||||
await cleanupQueue.pause(); // Prevent new jobs from processing
|
||||
// ... run test ...
|
||||
await cleanupQueue.resume(); // Restore normal operation
|
||||
```
|
||||
|
||||
### 3. Cache Invalidation After Direct Database Inserts
|
||||
|
||||
**Problem:** Tests that insert data directly via SQL (bypassing the service layer) don't trigger cache invalidation. Subsequent API calls return stale cached data.
|
||||
|
||||
**Affected Tests:** Any test using `pool.query()` to insert flyers, stores, or other cached entities
|
||||
|
||||
**Solution:** Manually invalidate the cache after direct inserts:
|
||||
|
||||
```typescript
|
||||
await pool.query('INSERT INTO flyers ...');
|
||||
await cacheService.invalidateFlyers(); // Clear stale cache
|
||||
```
|
||||
|
||||
### 4. Unique Filenames Required for Test Isolation
|
||||
|
||||
**Problem:** Multer generates predictable filenames in test environments, causing race conditions when multiple tests upload files concurrently or in sequence.
|
||||
|
||||
**Affected Tests:** Flyer processing tests, file upload tests
|
||||
|
||||
**Solution:** Always use unique filenames with timestamps:
|
||||
|
||||
```typescript
|
||||
// In multer.middleware.ts
|
||||
const uniqueSuffix = `${Date.now()}-${Math.round(Math.random() * 1e9)}`;
|
||||
cb(null, `${file.fieldname}-${uniqueSuffix}-${sanitizedOriginalName}`);
|
||||
```
|
||||
|
||||
### 5. Response Format Mismatches
|
||||
|
||||
**Problem:** API response formats may change, causing tests to fail when expecting old formats.
|
||||
|
||||
**Common Issues:**
|
||||
|
||||
- `response.body.data.jobId` vs `response.body.data.job.id`
|
||||
- Nested objects vs flat response structures
|
||||
- Type coercion (string vs number for IDs)
|
||||
|
||||
**Solution:** Always log response bodies during debugging and update test assertions to match actual API contracts.
|
||||
|
||||
### 6. External Service Availability
|
||||
|
||||
**Problem:** Tests depending on external services (PM2, Redis health checks) fail when those services aren't available in the test environment.
|
||||
|
||||
**Solution:** Use try/catch with graceful degradation or mock the external service checks.
|
||||
|
||||
## MCP Servers
|
||||
|
||||
The following MCP servers are configured for this project:
|
||||
|
||||
| Server | Purpose |
|
||||
| --------------------- | ------------------------------------------- |
|
||||
| gitea-projectium | Gitea API for gitea.projectium.com |
|
||||
| gitea-torbonium | Gitea API for gitea.torbonium.com |
|
||||
| podman | Container management |
|
||||
| filesystem | File system access |
|
||||
| fetch | Web fetching |
|
||||
| markitdown | Convert documents to markdown |
|
||||
| sequential-thinking | Step-by-step reasoning |
|
||||
| memory | Knowledge graph persistence |
|
||||
| postgres | Direct database queries (localhost:5432) |
|
||||
| playwright | Browser automation and testing |
|
||||
| redis | Redis cache inspection (localhost:6379) |
|
||||
| sentry-selfhosted-mcp | Error tracking via Bugsink (localhost:8000) |
|
||||
|
||||
**Note:** MCP servers are currently only available in **Claude CLI**. Due to a bug in Claude VS Code extension, MCP servers do not work there yet.
|
||||
|
||||
### Sentry/Bugsink MCP Server Setup (ADR-015)
|
||||
|
||||
To enable Claude Code to query and analyze application errors from Bugsink:
|
||||
|
||||
1. **Install the MCP server**:
|
||||
|
||||
```bash
|
||||
# Clone the sentry-selfhosted-mcp repository
|
||||
git clone https://github.com/ddfourtwo/sentry-selfhosted-mcp.git
|
||||
cd sentry-selfhosted-mcp
|
||||
npm install
|
||||
```
|
||||
|
||||
2. **Configure Claude Code** (add to `.claude/mcp.json`):
|
||||
|
||||
```json
|
||||
{
|
||||
"sentry-selfhosted-mcp": {
|
||||
"command": "node",
|
||||
"args": ["/path/to/sentry-selfhosted-mcp/dist/index.js"],
|
||||
"env": {
|
||||
"SENTRY_URL": "http://localhost:8000",
|
||||
"SENTRY_AUTH_TOKEN": "<get-from-bugsink-ui>",
|
||||
"SENTRY_ORG_SLUG": "flyer-crawler"
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
3. **Get the auth token**:
|
||||
- Navigate to Bugsink UI at `http://localhost:8000`
|
||||
- Log in with admin credentials
|
||||
- Go to Settings > API Keys
|
||||
- Create a new API key with read access
|
||||
|
||||
4. **Available capabilities**:
|
||||
- List projects and issues
|
||||
- View detailed error events
|
||||
- Search by error message or stack trace
|
||||
- Update issue status (resolve, ignore)
|
||||
- Add comments to issues
|
||||
188
DATABASE.md
Normal file
188
DATABASE.md
Normal file
@@ -0,0 +1,188 @@
|
||||
# Database Setup
|
||||
|
||||
Flyer Crawler uses PostgreSQL with several extensions for full-text search, geographic data, and UUID generation.
|
||||
|
||||
---
|
||||
|
||||
## Required Extensions
|
||||
|
||||
| Extension | Purpose |
|
||||
| ----------- | ------------------------------------------- |
|
||||
| `postgis` | Geographic/spatial data for store locations |
|
||||
| `pg_trgm` | Trigram matching for fuzzy text search |
|
||||
| `uuid-ossp` | UUID generation for primary keys |
|
||||
|
||||
---
|
||||
|
||||
## Production Database Setup
|
||||
|
||||
### Step 1: Install PostgreSQL
|
||||
|
||||
```bash
|
||||
sudo apt update
|
||||
sudo apt install postgresql postgresql-contrib
|
||||
```
|
||||
|
||||
### Step 2: Create Database and User
|
||||
|
||||
Switch to the postgres system user:
|
||||
|
||||
```bash
|
||||
sudo -u postgres psql
|
||||
```
|
||||
|
||||
Run the following SQL commands (replace `'a_very_strong_password'` with a secure password):
|
||||
|
||||
```sql
|
||||
-- Create a new role for your application
|
||||
CREATE ROLE flyer_crawler_user WITH LOGIN PASSWORD 'a_very_strong_password';
|
||||
|
||||
-- Create the production database
|
||||
CREATE DATABASE "flyer-crawler-prod" WITH OWNER = flyer_crawler_user;
|
||||
|
||||
-- Connect to the new database
|
||||
\c "flyer-crawler-prod"
|
||||
|
||||
-- Install required extensions (must be done as superuser)
|
||||
CREATE EXTENSION IF NOT EXISTS postgis;
|
||||
CREATE EXTENSION IF NOT EXISTS pg_trgm;
|
||||
CREATE EXTENSION IF NOT EXISTS "uuid-ossp";
|
||||
|
||||
-- Exit
|
||||
\q
|
||||
```
|
||||
|
||||
### Step 3: Apply the Schema
|
||||
|
||||
Navigate to your project directory and run:
|
||||
|
||||
```bash
|
||||
psql -U flyer_crawler_user -d "flyer-crawler-prod" -f sql/master_schema_rollup.sql
|
||||
```
|
||||
|
||||
This creates all tables, functions, triggers, and seeds essential data (categories, master items).
|
||||
|
||||
### Step 4: Seed the Admin Account
|
||||
|
||||
Set the required environment variables and run the seed script:
|
||||
|
||||
```bash
|
||||
export DB_USER=flyer_crawler_user
|
||||
export DB_PASSWORD=your_password
|
||||
export DB_NAME="flyer-crawler-prod"
|
||||
export DB_HOST=localhost
|
||||
|
||||
npx tsx src/db/seed_admin_account.ts
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Test Database Setup
|
||||
|
||||
The test database is used by CI/CD pipelines and local test runs.
|
||||
|
||||
### Step 1: Create the Test Database
|
||||
|
||||
```bash
|
||||
sudo -u postgres psql
|
||||
```
|
||||
|
||||
```sql
|
||||
-- Create the test database
|
||||
CREATE DATABASE "flyer-crawler-test" WITH OWNER = flyer_crawler_user;
|
||||
|
||||
-- Connect to the test database
|
||||
\c "flyer-crawler-test"
|
||||
|
||||
-- Install required extensions
|
||||
CREATE EXTENSION IF NOT EXISTS postgis;
|
||||
CREATE EXTENSION IF NOT EXISTS pg_trgm;
|
||||
CREATE EXTENSION IF NOT EXISTS "uuid-ossp";
|
||||
|
||||
-- Grant schema ownership (required for test runner to reset schema)
|
||||
ALTER SCHEMA public OWNER TO flyer_crawler_user;
|
||||
|
||||
-- Exit
|
||||
\q
|
||||
```
|
||||
|
||||
### Step 2: Configure CI/CD Secrets
|
||||
|
||||
Ensure these secrets are set in your Gitea repository settings:
|
||||
|
||||
| Secret | Description |
|
||||
| ------------- | ------------------------------------------ |
|
||||
| `DB_HOST` | Database hostname (e.g., `localhost`) |
|
||||
| `DB_PORT` | Database port (e.g., `5432`) |
|
||||
| `DB_USER` | Database user (e.g., `flyer_crawler_user`) |
|
||||
| `DB_PASSWORD` | Database password |
|
||||
|
||||
---
|
||||
|
||||
## How the Test Pipeline Works
|
||||
|
||||
The CI pipeline uses a permanent test database that gets reset on each test run:
|
||||
|
||||
1. **Setup**: The vitest global setup script connects to `flyer-crawler-test`
|
||||
2. **Schema Reset**: Executes `sql/drop_tables.sql` (`DROP SCHEMA public CASCADE`)
|
||||
3. **Schema Application**: Runs `sql/master_schema_rollup.sql` to build a fresh schema
|
||||
4. **Test Execution**: Tests run against the clean database
|
||||
|
||||
This approach is faster than creating/destroying databases and doesn't require sudo access.
|
||||
|
||||
---
|
||||
|
||||
## Connecting to Production Database
|
||||
|
||||
```bash
|
||||
psql -h localhost -U flyer_crawler_user -d "flyer-crawler-prod" -W
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Checking PostGIS Version
|
||||
|
||||
```sql
|
||||
SELECT version();
|
||||
SELECT PostGIS_Full_Version();
|
||||
```
|
||||
|
||||
Example output:
|
||||
|
||||
```
|
||||
PostgreSQL 14.19 (Ubuntu 14.19-0ubuntu0.22.04.1)
|
||||
POSTGIS="3.2.0 c3e3cc0" GEOS="3.10.2-CAPI-1.16.0" PROJ="8.2.1"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Schema Files
|
||||
|
||||
| File | Purpose |
|
||||
| ------------------------------ | --------------------------------------------------------- |
|
||||
| `sql/master_schema_rollup.sql` | Complete schema with all tables, functions, and seed data |
|
||||
| `sql/drop_tables.sql` | Drops entire schema (used by test runner) |
|
||||
| `sql/schema.sql.txt` | Legacy schema file (reference only) |
|
||||
|
||||
---
|
||||
|
||||
## Backup and Restore
|
||||
|
||||
### Create a Backup
|
||||
|
||||
```bash
|
||||
pg_dump -U flyer_crawler_user -d "flyer-crawler-prod" -F c -f backup.dump
|
||||
```
|
||||
|
||||
### Restore from Backup
|
||||
|
||||
```bash
|
||||
pg_restore -U flyer_crawler_user -d "flyer-crawler-prod" -c backup.dump
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Related Documentation
|
||||
|
||||
- [Installation Guide](INSTALL.md) - Local development setup
|
||||
- [Deployment Guide](DEPLOYMENT.md) - Production deployment
|
||||
271
DEPLOYMENT.md
Normal file
271
DEPLOYMENT.md
Normal file
@@ -0,0 +1,271 @@
|
||||
# Deployment Guide
|
||||
|
||||
This guide covers deploying Flyer Crawler to a production server.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- Ubuntu server (22.04 LTS recommended)
|
||||
- PostgreSQL 14+ with PostGIS extension
|
||||
- Redis
|
||||
- Node.js 20.x
|
||||
- NGINX (reverse proxy)
|
||||
- PM2 (process manager)
|
||||
|
||||
---
|
||||
|
||||
## Server Setup
|
||||
|
||||
### Install Node.js
|
||||
|
||||
```bash
|
||||
curl -sL https://deb.nodesource.com/setup_20.x | sudo bash -
|
||||
sudo apt-get install -y nodejs
|
||||
```
|
||||
|
||||
### Install PM2
|
||||
|
||||
```bash
|
||||
sudo npm install -g pm2
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Application Deployment
|
||||
|
||||
### Clone and Install
|
||||
|
||||
```bash
|
||||
git clone <repository-url>
|
||||
cd flyer-crawler.projectium.com
|
||||
npm install
|
||||
```
|
||||
|
||||
### Build for Production
|
||||
|
||||
```bash
|
||||
npm run build
|
||||
```
|
||||
|
||||
### Start with PM2
|
||||
|
||||
```bash
|
||||
npm run start:prod
|
||||
```
|
||||
|
||||
This starts three PM2 processes:
|
||||
|
||||
- `flyer-crawler-api` - Main API server
|
||||
- `flyer-crawler-worker` - Background job worker
|
||||
- `flyer-crawler-analytics-worker` - Analytics processing worker
|
||||
|
||||
---
|
||||
|
||||
## Environment Variables (Gitea Secrets)
|
||||
|
||||
For deployments using Gitea CI/CD workflows, configure these as **repository secrets**:
|
||||
|
||||
| Secret | Description |
|
||||
| --------------------------- | ------------------------------------------- |
|
||||
| `DB_HOST` | PostgreSQL server hostname |
|
||||
| `DB_USER` | PostgreSQL username |
|
||||
| `DB_PASSWORD` | PostgreSQL password |
|
||||
| `DB_DATABASE_PROD` | Production database name |
|
||||
| `REDIS_PASSWORD_PROD` | Production Redis password |
|
||||
| `REDIS_PASSWORD_TEST` | Test Redis password |
|
||||
| `JWT_SECRET` | Long, random string for signing auth tokens |
|
||||
| `VITE_GOOGLE_GENAI_API_KEY` | Google Gemini API key |
|
||||
| `GOOGLE_MAPS_API_KEY` | Google Maps Geocoding API key |
|
||||
|
||||
---
|
||||
|
||||
## NGINX Configuration
|
||||
|
||||
### Reverse Proxy Setup
|
||||
|
||||
Create a site configuration at `/etc/nginx/sites-available/flyer-crawler.projectium.com`:
|
||||
|
||||
```nginx
|
||||
server {
|
||||
listen 80;
|
||||
server_name flyer-crawler.projectium.com;
|
||||
|
||||
location / {
|
||||
proxy_pass http://localhost:5173;
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header Connection 'upgrade';
|
||||
proxy_set_header Host $host;
|
||||
proxy_cache_bypass $http_upgrade;
|
||||
}
|
||||
|
||||
location /api {
|
||||
proxy_pass http://localhost:3001;
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header Connection 'upgrade';
|
||||
proxy_set_header Host $host;
|
||||
proxy_cache_bypass $http_upgrade;
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Enable the site:
|
||||
|
||||
```bash
|
||||
sudo ln -s /etc/nginx/sites-available/flyer-crawler.projectium.com /etc/nginx/sites-enabled/
|
||||
sudo nginx -t
|
||||
sudo systemctl reload nginx
|
||||
```
|
||||
|
||||
### MIME Types Fix for .mjs Files
|
||||
|
||||
If JavaScript modules (`.mjs` files) aren't loading correctly, add the proper MIME type.
|
||||
|
||||
**Option 1**: Edit the site configuration file directly:
|
||||
|
||||
```nginx
|
||||
# Add inside the server block
|
||||
types {
|
||||
application/javascript js mjs;
|
||||
}
|
||||
```
|
||||
|
||||
**Option 2**: Edit `/etc/nginx/mime.types` globally:
|
||||
|
||||
```
|
||||
# Change this line:
|
||||
application/javascript js;
|
||||
|
||||
# To:
|
||||
application/javascript js mjs;
|
||||
```
|
||||
|
||||
After changes:
|
||||
|
||||
```bash
|
||||
sudo nginx -t
|
||||
sudo systemctl reload nginx
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## PM2 Log Management
|
||||
|
||||
Install and configure pm2-logrotate to manage log files:
|
||||
|
||||
```bash
|
||||
pm2 install pm2-logrotate
|
||||
pm2 set pm2-logrotate:max_size 10M
|
||||
pm2 set pm2-logrotate:retain 14
|
||||
pm2 set pm2-logrotate:compress false
|
||||
pm2 set pm2-logrotate:dateFormat YYYY-MM-DD_HH-mm-ss
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Rate Limiting
|
||||
|
||||
The application respects the Gemini AI service's rate limits. You can adjust the `GEMINI_RPM` (requests per minute) environment variable in production as needed without changing the code.
|
||||
|
||||
---
|
||||
|
||||
## CI/CD Pipeline
|
||||
|
||||
The project includes Gitea workflows at `.gitea/workflows/deploy.yml` that:
|
||||
|
||||
1. Run tests against a test database
|
||||
2. Build the application
|
||||
3. Deploy to production on successful builds
|
||||
|
||||
The workflow automatically:
|
||||
|
||||
- Sets up the test database schema before tests
|
||||
- Tears down test data after tests complete
|
||||
- Deploys to the production server
|
||||
|
||||
---
|
||||
|
||||
## Monitoring
|
||||
|
||||
### Check PM2 Status
|
||||
|
||||
```bash
|
||||
pm2 status
|
||||
pm2 logs
|
||||
pm2 logs flyer-crawler-api --lines 100
|
||||
```
|
||||
|
||||
### Restart Services
|
||||
|
||||
```bash
|
||||
pm2 restart all
|
||||
pm2 restart flyer-crawler-api
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Error Tracking with Bugsink (ADR-015)
|
||||
|
||||
Bugsink is a self-hosted Sentry-compatible error tracking system. See [docs/adr/0015-application-performance-monitoring-and-error-tracking.md](docs/adr/0015-application-performance-monitoring-and-error-tracking.md) for the full architecture decision.
|
||||
|
||||
### Creating Bugsink Projects and DSNs
|
||||
|
||||
After Bugsink is installed and running, you need to create projects and obtain DSNs:
|
||||
|
||||
1. **Access Bugsink UI**: Navigate to `http://localhost:8000`
|
||||
|
||||
2. **Log in** with your admin credentials
|
||||
|
||||
3. **Create Backend Project**:
|
||||
- Click "Create Project"
|
||||
- Name: `flyer-crawler-backend`
|
||||
- Platform: Node.js
|
||||
- Copy the generated DSN (format: `http://<key>@localhost:8000/<project_id>`)
|
||||
|
||||
4. **Create Frontend Project**:
|
||||
- Click "Create Project"
|
||||
- Name: `flyer-crawler-frontend`
|
||||
- Platform: React
|
||||
- Copy the generated DSN
|
||||
|
||||
5. **Configure Environment Variables**:
|
||||
|
||||
```bash
|
||||
# Backend (server-side)
|
||||
export SENTRY_DSN=http://<backend-key>@localhost:8000/<backend-project-id>
|
||||
|
||||
# Frontend (client-side, exposed to browser)
|
||||
export VITE_SENTRY_DSN=http://<frontend-key>@localhost:8000/<frontend-project-id>
|
||||
|
||||
# Shared settings
|
||||
export SENTRY_ENVIRONMENT=production
|
||||
export VITE_SENTRY_ENVIRONMENT=production
|
||||
export SENTRY_ENABLED=true
|
||||
export VITE_SENTRY_ENABLED=true
|
||||
```
|
||||
|
||||
### Testing Error Tracking
|
||||
|
||||
Verify Bugsink is receiving events:
|
||||
|
||||
```bash
|
||||
npx tsx scripts/test-bugsink.ts
|
||||
```
|
||||
|
||||
This sends test error and info events. Check the Bugsink UI for:
|
||||
|
||||
- `BugsinkTestError` in the backend project
|
||||
- Info message "Test info message from test-bugsink.ts"
|
||||
|
||||
### Sentry SDK v10+ HTTP DSN Limitation
|
||||
|
||||
The Sentry SDK v10+ enforces HTTPS-only DSNs by default. Since Bugsink runs locally over HTTP, our implementation uses the Sentry Store API directly instead of the SDK's built-in transport. This is handled transparently by the `sentry.server.ts` and `sentry.client.ts` modules.
|
||||
|
||||
---
|
||||
|
||||
## Related Documentation
|
||||
|
||||
- [Database Setup](DATABASE.md) - PostgreSQL and PostGIS configuration
|
||||
- [Authentication Setup](AUTHENTICATION.md) - OAuth provider configuration
|
||||
- [Installation Guide](INSTALL.md) - Local development setup
|
||||
- [Bare-Metal Server Setup](docs/BARE-METAL-SETUP.md) - Manual server installation guide
|
||||
228
Dockerfile.dev
228
Dockerfile.dev
@@ -7,7 +7,7 @@
|
||||
#
|
||||
# Base: Ubuntu 22.04 (LTS) - matches production server
|
||||
# Node: v20.x (LTS) - matches production
|
||||
# Includes: PostgreSQL client, Redis CLI, build tools
|
||||
# Includes: PostgreSQL client, Redis CLI, build tools, Bugsink, Logstash
|
||||
# ============================================================================
|
||||
|
||||
FROM ubuntu:22.04
|
||||
@@ -21,16 +21,23 @@ ENV DEBIAN_FRONTEND=noninteractive
|
||||
# - curl: for downloading Node.js setup script and health checks
|
||||
# - git: for version control operations
|
||||
# - build-essential: for compiling native Node.js modules (node-gyp)
|
||||
# - python3: required by some Node.js build tools
|
||||
# - python3, python3-pip, python3-venv: for Bugsink
|
||||
# - postgresql-client: for psql CLI (database initialization)
|
||||
# - redis-tools: for redis-cli (health checks)
|
||||
# - gnupg, apt-transport-https: for Elastic APT repository (Logstash)
|
||||
# - openjdk-17-jre-headless: required by Logstash
|
||||
RUN apt-get update && apt-get install -y \
|
||||
curl \
|
||||
git \
|
||||
build-essential \
|
||||
python3 \
|
||||
python3-pip \
|
||||
python3-venv \
|
||||
postgresql-client \
|
||||
redis-tools \
|
||||
gnupg \
|
||||
apt-transport-https \
|
||||
openjdk-17-jre-headless \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# ============================================================================
|
||||
@@ -39,6 +46,204 @@ RUN apt-get update && apt-get install -y \
|
||||
RUN curl -fsSL https://deb.nodesource.com/setup_20.x | bash - \
|
||||
&& apt-get install -y nodejs
|
||||
|
||||
# ============================================================================
|
||||
# Install Logstash (Elastic APT Repository)
|
||||
# ============================================================================
|
||||
# ADR-015: Log aggregation for Pino and Redis logs → Bugsink
|
||||
RUN curl -fsSL https://artifacts.elastic.co/GPG-KEY-elasticsearch | gpg --dearmor -o /usr/share/keyrings/elastic-keyring.gpg \
|
||||
&& echo "deb [signed-by=/usr/share/keyrings/elastic-keyring.gpg] https://artifacts.elastic.co/packages/8.x/apt stable main" | tee /etc/apt/sources.list.d/elastic-8.x.list \
|
||||
&& apt-get update \
|
||||
&& apt-get install -y logstash \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# ============================================================================
|
||||
# Install Bugsink (Python Package)
|
||||
# ============================================================================
|
||||
# ADR-015: Self-hosted Sentry-compatible error tracking
|
||||
# Create a virtual environment for Bugsink to avoid conflicts
|
||||
RUN python3 -m venv /opt/bugsink \
|
||||
&& /opt/bugsink/bin/pip install --upgrade pip \
|
||||
&& /opt/bugsink/bin/pip install bugsink gunicorn psycopg2-binary
|
||||
|
||||
# Create Bugsink directories and configuration
|
||||
RUN mkdir -p /var/log/bugsink /var/lib/bugsink /opt/bugsink/conf
|
||||
|
||||
# Create Bugsink configuration file (Django settings module)
|
||||
# This file is imported by bugsink-manage via DJANGO_SETTINGS_MODULE
|
||||
# Based on bugsink/conf_templates/docker.py.template but customized for our setup
|
||||
RUN echo 'import os\n\
|
||||
from urllib.parse import urlparse\n\
|
||||
\n\
|
||||
from bugsink.settings.default import *\n\
|
||||
from bugsink.settings.default import DATABASES, SILENCED_SYSTEM_CHECKS\n\
|
||||
from bugsink.conf_utils import deduce_allowed_hosts, deduce_script_name\n\
|
||||
\n\
|
||||
IS_DOCKER = True\n\
|
||||
\n\
|
||||
# Security settings\n\
|
||||
SECRET_KEY = os.getenv("SECRET_KEY")\n\
|
||||
DEBUG = os.getenv("DEBUG", "False").lower() in ("true", "1", "yes")\n\
|
||||
\n\
|
||||
# Silence cookie security warnings for dev (no HTTPS)\n\
|
||||
SILENCED_SYSTEM_CHECKS += ["security.W012", "security.W016"]\n\
|
||||
\n\
|
||||
# Database configuration from DATABASE_URL environment variable\n\
|
||||
if os.getenv("DATABASE_URL"):\n\
|
||||
DATABASE_URL = os.getenv("DATABASE_URL")\n\
|
||||
parsed = urlparse(DATABASE_URL)\n\
|
||||
\n\
|
||||
if parsed.scheme in ["postgres", "postgresql"]:\n\
|
||||
DATABASES["default"] = {\n\
|
||||
"ENGINE": "django.db.backends.postgresql",\n\
|
||||
"NAME": parsed.path.lstrip("/"),\n\
|
||||
"USER": parsed.username,\n\
|
||||
"PASSWORD": parsed.password,\n\
|
||||
"HOST": parsed.hostname,\n\
|
||||
"PORT": parsed.port or "5432",\n\
|
||||
}\n\
|
||||
\n\
|
||||
# Snappea (background task runner) settings\n\
|
||||
SNAPPEA = {\n\
|
||||
"TASK_ALWAYS_EAGER": False,\n\
|
||||
"WORKAHOLIC": True,\n\
|
||||
"NUM_WORKERS": 2,\n\
|
||||
"PID_FILE": None,\n\
|
||||
}\n\
|
||||
DATABASES["snappea"]["NAME"] = "/tmp/snappea.sqlite3"\n\
|
||||
\n\
|
||||
# Site settings\n\
|
||||
_PORT = os.getenv("PORT", "8000")\n\
|
||||
BUGSINK = {\n\
|
||||
"BASE_URL": os.getenv("BASE_URL", f"http://localhost:{_PORT}"),\n\
|
||||
"SITE_TITLE": os.getenv("SITE_TITLE", "Flyer Crawler Error Tracking"),\n\
|
||||
"SINGLE_USER": os.getenv("SINGLE_USER", "True").lower() in ("true", "1", "yes"),\n\
|
||||
"SINGLE_TEAM": os.getenv("SINGLE_TEAM", "True").lower() in ("true", "1", "yes"),\n\
|
||||
"PHONEHOME": False,\n\
|
||||
}\n\
|
||||
\n\
|
||||
ALLOWED_HOSTS = deduce_allowed_hosts(BUGSINK["BASE_URL"])\n\
|
||||
\n\
|
||||
# Console email backend for dev\n\
|
||||
EMAIL_BACKEND = "bugsink.email_backends.QuietConsoleEmailBackend"\n\
|
||||
' > /opt/bugsink/conf/bugsink_conf.py
|
||||
|
||||
# Create Bugsink startup script
|
||||
# Uses DATABASE_URL environment variable (standard Docker approach per docs)
|
||||
RUN echo '#!/bin/bash\n\
|
||||
set -e\n\
|
||||
\n\
|
||||
# Build DATABASE_URL from individual env vars for flexibility\n\
|
||||
export DATABASE_URL="postgresql://${BUGSINK_DB_USER:-bugsink}:${BUGSINK_DB_PASSWORD:-bugsink_dev_password}@${BUGSINK_DB_HOST:-postgres}:${BUGSINK_DB_PORT:-5432}/${BUGSINK_DB_NAME:-bugsink}"\n\
|
||||
# SECRET_KEY is required by Bugsink/Django\n\
|
||||
export SECRET_KEY="${BUGSINK_SECRET_KEY:-dev-bugsink-secret-key-minimum-50-characters-for-security}"\n\
|
||||
\n\
|
||||
# Create superuser if not exists (for dev convenience)\n\
|
||||
if [ -n "$BUGSINK_ADMIN_EMAIL" ] && [ -n "$BUGSINK_ADMIN_PASSWORD" ]; then\n\
|
||||
export CREATE_SUPERUSER="${BUGSINK_ADMIN_EMAIL}:${BUGSINK_ADMIN_PASSWORD}"\n\
|
||||
fi\n\
|
||||
\n\
|
||||
# Wait for PostgreSQL to be ready\n\
|
||||
until pg_isready -h ${BUGSINK_DB_HOST:-postgres} -p ${BUGSINK_DB_PORT:-5432} -U ${BUGSINK_DB_USER:-bugsink}; do\n\
|
||||
echo "Waiting for PostgreSQL..."\n\
|
||||
sleep 2\n\
|
||||
done\n\
|
||||
\n\
|
||||
echo "PostgreSQL is ready. Starting Bugsink..."\n\
|
||||
echo "DATABASE_URL: postgresql://${BUGSINK_DB_USER}:***@${BUGSINK_DB_HOST}:${BUGSINK_DB_PORT}/${BUGSINK_DB_NAME}"\n\
|
||||
\n\
|
||||
# Change to config directory so bugsink_conf.py can be found\n\
|
||||
cd /opt/bugsink/conf\n\
|
||||
\n\
|
||||
# Run migrations\n\
|
||||
echo "Running database migrations..."\n\
|
||||
/opt/bugsink/bin/bugsink-manage migrate --noinput\n\
|
||||
\n\
|
||||
# Create superuser if CREATE_SUPERUSER is set (format: email:password)\n\
|
||||
if [ -n "$CREATE_SUPERUSER" ]; then\n\
|
||||
IFS=":" read -r ADMIN_EMAIL ADMIN_PASS <<< "$CREATE_SUPERUSER"\n\
|
||||
/opt/bugsink/bin/bugsink-manage shell -c "\n\
|
||||
from django.contrib.auth import get_user_model\n\
|
||||
User = get_user_model()\n\
|
||||
if not User.objects.filter(email='"'"'$ADMIN_EMAIL'"'"').exists():\n\
|
||||
User.objects.create_superuser('"'"'$ADMIN_EMAIL'"'"', '"'"'$ADMIN_PASS'"'"')\n\
|
||||
print('"'"'Superuser created'"'"')\n\
|
||||
else:\n\
|
||||
print('"'"'Superuser already exists'"'"')\n\
|
||||
" || true\n\
|
||||
fi\n\
|
||||
\n\
|
||||
# Start Bugsink with Gunicorn\n\
|
||||
echo "Starting Gunicorn on port ${BUGSINK_PORT:-8000}..."\n\
|
||||
exec /opt/bugsink/bin/gunicorn \\\n\
|
||||
--bind 0.0.0.0:${BUGSINK_PORT:-8000} \\\n\
|
||||
--workers ${BUGSINK_WORKERS:-2} \\\n\
|
||||
--access-logfile - \\\n\
|
||||
--error-logfile - \\\n\
|
||||
bugsink.wsgi:application\n\
|
||||
' > /usr/local/bin/start-bugsink.sh \
|
||||
&& chmod +x /usr/local/bin/start-bugsink.sh
|
||||
|
||||
# ============================================================================
|
||||
# Create Logstash Pipeline Configuration
|
||||
# ============================================================================
|
||||
# ADR-015: Pino and Redis logs → Bugsink
|
||||
RUN mkdir -p /etc/logstash/conf.d /app/logs
|
||||
|
||||
RUN echo 'input {\n\
|
||||
# Pino application logs\n\
|
||||
file {\n\
|
||||
path => "/app/logs/*.log"\n\
|
||||
codec => json\n\
|
||||
type => "pino"\n\
|
||||
tags => ["app"]\n\
|
||||
start_position => "beginning"\n\
|
||||
sincedb_path => "/var/lib/logstash/sincedb_pino"\n\
|
||||
}\n\
|
||||
\n\
|
||||
# Redis logs\n\
|
||||
file {\n\
|
||||
path => "/var/log/redis/*.log"\n\
|
||||
type => "redis"\n\
|
||||
tags => ["redis"]\n\
|
||||
start_position => "beginning"\n\
|
||||
sincedb_path => "/var/lib/logstash/sincedb_redis"\n\
|
||||
}\n\
|
||||
}\n\
|
||||
\n\
|
||||
filter {\n\
|
||||
# Pino error detection (level 50 = error, 60 = fatal)\n\
|
||||
if [type] == "pino" and [level] >= 50 {\n\
|
||||
mutate { add_tag => ["error"] }\n\
|
||||
}\n\
|
||||
\n\
|
||||
# Redis error detection\n\
|
||||
if [type] == "redis" {\n\
|
||||
grok {\n\
|
||||
match => { "message" => "%%{POSINT:pid}:%%{WORD:role} %%{MONTHDAY} %%{MONTH} %%{TIME} %%{WORD:loglevel} %%{GREEDYDATA:redis_message}" }\n\
|
||||
}\n\
|
||||
if [loglevel] in ["WARNING", "ERROR"] {\n\
|
||||
mutate { add_tag => ["error"] }\n\
|
||||
}\n\
|
||||
}\n\
|
||||
}\n\
|
||||
\n\
|
||||
output {\n\
|
||||
if "error" in [tags] {\n\
|
||||
http {\n\
|
||||
url => "http://localhost:8000/api/store/"\n\
|
||||
http_method => "post"\n\
|
||||
format => "json"\n\
|
||||
}\n\
|
||||
}\n\
|
||||
\n\
|
||||
# Debug output (comment out in production)\n\
|
||||
stdout { codec => rubydebug }\n\
|
||||
}\n\
|
||||
' > /etc/logstash/conf.d/bugsink.conf
|
||||
|
||||
# Create Logstash sincedb directory
|
||||
RUN mkdir -p /var/lib/logstash && chown -R logstash:logstash /var/lib/logstash
|
||||
|
||||
# ============================================================================
|
||||
# Set Working Directory
|
||||
# ============================================================================
|
||||
@@ -52,6 +257,25 @@ ENV NODE_ENV=development
|
||||
# Increase Node.js memory limit for large builds
|
||||
ENV NODE_OPTIONS='--max-old-space-size=8192'
|
||||
|
||||
# Bugsink defaults (ADR-015)
|
||||
ENV BUGSINK_DB_HOST=postgres
|
||||
ENV BUGSINK_DB_PORT=5432
|
||||
ENV BUGSINK_DB_NAME=bugsink
|
||||
ENV BUGSINK_DB_USER=bugsink
|
||||
ENV BUGSINK_DB_PASSWORD=bugsink_dev_password
|
||||
ENV BUGSINK_PORT=8000
|
||||
ENV BUGSINK_BASE_URL=http://localhost:8000
|
||||
ENV BUGSINK_ADMIN_EMAIL=admin@localhost
|
||||
ENV BUGSINK_ADMIN_PASSWORD=admin
|
||||
|
||||
# ============================================================================
|
||||
# Expose Ports
|
||||
# ============================================================================
|
||||
# 3000 - Vite frontend
|
||||
# 3001 - Express backend
|
||||
# 8000 - Bugsink error tracking
|
||||
EXPOSE 3000 3001 8000
|
||||
|
||||
# ============================================================================
|
||||
# Default Command
|
||||
# ============================================================================
|
||||
|
||||
168
INSTALL.md
Normal file
168
INSTALL.md
Normal file
@@ -0,0 +1,168 @@
|
||||
# Installation Guide
|
||||
|
||||
This guide covers setting up a local development environment for Flyer Crawler.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- Node.js 20.x or later
|
||||
- Access to a PostgreSQL database (local or remote)
|
||||
- Redis instance (for session management)
|
||||
- Google Gemini API key
|
||||
- Google Maps API key (for geocoding)
|
||||
|
||||
## Quick Start
|
||||
|
||||
If you already have PostgreSQL and Redis configured:
|
||||
|
||||
```bash
|
||||
# Install dependencies
|
||||
npm install
|
||||
|
||||
# Run in development mode
|
||||
npm run dev
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Development Environment with Podman (Recommended for Windows)
|
||||
|
||||
This approach uses Podman with an Ubuntu container for a consistent development environment.
|
||||
|
||||
### Step 1: Install Prerequisites on Windows
|
||||
|
||||
1. **Install WSL 2**: Podman on Windows relies on the Windows Subsystem for Linux.
|
||||
|
||||
```powershell
|
||||
wsl --install
|
||||
```
|
||||
|
||||
Run this in an administrator PowerShell.
|
||||
|
||||
2. **Install Podman Desktop**: Download and install [Podman Desktop for Windows](https://podman-desktop.io/).
|
||||
|
||||
### Step 2: Set Up Podman
|
||||
|
||||
1. **Initialize Podman**: Launch Podman Desktop. It will automatically set up its WSL 2 machine.
|
||||
2. **Start Podman**: Ensure the Podman machine is running from the Podman Desktop interface.
|
||||
|
||||
### Step 3: Set Up the Ubuntu Container
|
||||
|
||||
1. **Pull Ubuntu Image**:
|
||||
|
||||
```bash
|
||||
podman pull ubuntu:latest
|
||||
```
|
||||
|
||||
2. **Create a Podman Volume** (persists node_modules between container restarts):
|
||||
|
||||
```bash
|
||||
podman volume create node_modules_cache
|
||||
```
|
||||
|
||||
3. **Run the Ubuntu Container**:
|
||||
|
||||
Open a terminal in your project's root directory and run:
|
||||
|
||||
```bash
|
||||
podman run -it -p 3001:3001 -p 5173:5173 --name flyer-dev \
|
||||
-v "$(pwd):/app" \
|
||||
-v "node_modules_cache:/app/node_modules" \
|
||||
ubuntu:latest
|
||||
```
|
||||
|
||||
| Flag | Purpose |
|
||||
| ------------------------------------------- | ------------------------------------------------ |
|
||||
| `-p 3001:3001` | Forwards the backend server port |
|
||||
| `-p 5173:5173` | Forwards the Vite frontend server port |
|
||||
| `--name flyer-dev` | Names the container for easy reference |
|
||||
| `-v "...:/app"` | Mounts your project directory into the container |
|
||||
| `-v "node_modules_cache:/app/node_modules"` | Mounts the named volume for node_modules |
|
||||
|
||||
### Step 4: Configure the Ubuntu Environment
|
||||
|
||||
You are now inside the Ubuntu container's shell.
|
||||
|
||||
1. **Update Package Lists**:
|
||||
|
||||
```bash
|
||||
apt-get update
|
||||
```
|
||||
|
||||
2. **Install Dependencies**:
|
||||
|
||||
```bash
|
||||
apt-get install -y curl git
|
||||
curl -sL https://deb.nodesource.com/setup_20.x | bash -
|
||||
apt-get install -y nodejs
|
||||
```
|
||||
|
||||
3. **Navigate to Project Directory**:
|
||||
|
||||
```bash
|
||||
cd /app
|
||||
```
|
||||
|
||||
4. **Install Project Dependencies**:
|
||||
|
||||
```bash
|
||||
npm install
|
||||
```
|
||||
|
||||
### Step 5: Run the Development Server
|
||||
|
||||
```bash
|
||||
npm run dev
|
||||
```
|
||||
|
||||
### Step 6: Access the Application
|
||||
|
||||
- **Frontend**: http://localhost:5173
|
||||
- **Backend API**: http://localhost:3001
|
||||
|
||||
### Managing the Container
|
||||
|
||||
| Action | Command |
|
||||
| --------------------- | -------------------------------- |
|
||||
| Stop the container | Press `Ctrl+C`, then type `exit` |
|
||||
| Restart the container | `podman start -a -i flyer-dev` |
|
||||
| Remove the container | `podman rm flyer-dev` |
|
||||
|
||||
---
|
||||
|
||||
## Environment Variables
|
||||
|
||||
This project is configured to run in a CI/CD environment and does not use `.env` files. All configuration must be provided as environment variables.
|
||||
|
||||
For local development, you can export these in your shell or use your IDE's environment configuration:
|
||||
|
||||
| Variable | Description |
|
||||
| --------------------------- | ------------------------------------- |
|
||||
| `DB_HOST` | PostgreSQL server hostname |
|
||||
| `DB_USER` | PostgreSQL username |
|
||||
| `DB_PASSWORD` | PostgreSQL password |
|
||||
| `DB_DATABASE_PROD` | Production database name |
|
||||
| `JWT_SECRET` | Secret string for signing auth tokens |
|
||||
| `VITE_GOOGLE_GENAI_API_KEY` | Google Gemini API key |
|
||||
| `GOOGLE_MAPS_API_KEY` | Google Maps Geocoding API key |
|
||||
| `REDIS_PASSWORD_PROD` | Production Redis password |
|
||||
| `REDIS_PASSWORD_TEST` | Test Redis password |
|
||||
|
||||
---
|
||||
|
||||
## Seeding Development Users
|
||||
|
||||
To create initial test accounts (`admin@example.com` and `user@example.com`):
|
||||
|
||||
```bash
|
||||
npm run seed
|
||||
```
|
||||
|
||||
After running, you may need to restart your IDE's TypeScript server to pick up any generated types.
|
||||
|
||||
---
|
||||
|
||||
## Next Steps
|
||||
|
||||
- [Database Setup](DATABASE.md) - Set up PostgreSQL with required extensions
|
||||
- [Authentication Setup](AUTHENTICATION.md) - Configure OAuth providers
|
||||
- [Deployment Guide](DEPLOYMENT.md) - Deploy to production
|
||||
451
README.md
451
README.md
@@ -1,424 +1,91 @@
|
||||
# Flyer Crawler - Grocery AI Analyzer
|
||||
|
||||
Flyer Crawler is a web application that uses the Google Gemini AI to extract, analyze, and manage data from grocery store flyers. Users can upload flyer images or PDFs, and the application will automatically identify items, prices, and sale dates, storing the structured data in a PostgreSQL database for historical analysis, price tracking, and personalized deal alerts.
|
||||
Flyer Crawler is a web application that uses Google Gemini AI to extract, analyze, and manage data from grocery store flyers. Users can upload flyer images or PDFs, and the application automatically identifies items, prices, and sale dates, storing structured data in a PostgreSQL database for historical analysis, price tracking, and personalized deal alerts.
|
||||
|
||||
We are working on an app to help people save money, by finding good deals that are only advertized in store flyers/ads. So, the primary purpose of the site is to make uploading flyers as easy as possible and as accurate as possible, and to store peoples needs, so sales can be matched to needs.
|
||||
**Our mission**: Help people save money by finding good deals that are only advertised in store flyers. The app makes uploading flyers as easy and accurate as possible, and matches sales to users' needs.
|
||||
|
||||
---
|
||||
|
||||
## Features
|
||||
|
||||
- **AI-Powered Data Extraction**: Upload PNG, JPG, or PDF flyers to automatically extract store names, sale dates, and a detailed list of items with prices and quantities.
|
||||
- **Bulk Import**: Process multiple flyers at once with a summary report of successes, skips (duplicates), and errors.
|
||||
- **Database Integration**: All extracted data is saved to a PostgreSQL database, enabling long-term persistence and analysis.
|
||||
- **Personalized Watchlist**: Authenticated users can create a "watchlist" of specific grocery items they want to track.
|
||||
- **Active Deal Alerts**: The app highlights current sales on your watched items from all valid flyers in the database.
|
||||
- **Price History Charts**: Visualize the price trends of your watched items over time.
|
||||
- **Shopping List Management**: Users can create multiple shopping lists, add items from flyers or their watchlist, and track purchased items.
|
||||
- **User Authentication & Management**: Secure user sign-up, login, and profile management, including a secure account deletion process.
|
||||
- **Dynamic UI**: A responsive interface with dark mode and a choice between metric/imperial unit systems.
|
||||
- **AI-Powered Data Extraction**: Upload PNG, JPG, or PDF flyers to automatically extract store names, sale dates, and detailed item lists with prices and quantities
|
||||
- **Bulk Import**: Process multiple flyers at once with summary reports of successes, skips (duplicates), and errors
|
||||
- **Personalized Watchlist**: Create a watchlist of specific grocery items you want to track
|
||||
- **Active Deal Alerts**: See current sales on your watched items from all valid flyers
|
||||
- **Price History Charts**: Visualize price trends of watched items over time
|
||||
- **Shopping List Management**: Create multiple shopping lists, add items from flyers or your watchlist, and track purchased items
|
||||
- **User Authentication**: Secure sign-up, login, profile management, and account deletion
|
||||
- **Dynamic UI**: Responsive interface with dark mode and metric/imperial unit systems
|
||||
|
||||
---
|
||||
|
||||
## Tech Stack
|
||||
|
||||
- **Frontend**: React, TypeScript, Tailwind CSS
|
||||
- **AI**: Google Gemini API (`@google/genai`)
|
||||
- **Backend**: Node.js with Express
|
||||
- **Database**: PostgreSQL
|
||||
- **Authentication**: Passport.js
|
||||
- **UI Components**: Recharts for charts
|
||||
| Layer | Technology |
|
||||
| -------------- | ----------------------------------- |
|
||||
| Frontend | React, TypeScript, Tailwind CSS |
|
||||
| AI | Google Gemini API (`@google/genai`) |
|
||||
| Backend | Node.js, Express |
|
||||
| Database | PostgreSQL with PostGIS |
|
||||
| Authentication | Passport.js (Google, GitHub OAuth) |
|
||||
| Charts | Recharts |
|
||||
|
||||
---
|
||||
|
||||
## Required Secrets & Configuration
|
||||
|
||||
This project is configured to run in a CI/CD environment and does not use `.env` files. All configuration and secrets must be provided as environment variables. For deployments using the included Gitea workflows, these must be configured as **repository secrets** in your Gitea instance.
|
||||
|
||||
- **`DB_HOST`, `DB_USER`, `DB_PASSWORD`**: Credentials for your PostgreSQL server. The port is assumed to be `5432`.
|
||||
- **`DB_DATABASE_PROD`**: The name of your production database.
|
||||
- **`REDIS_PASSWORD_PROD`**: The password for your production Redis instance.
|
||||
- **`REDIS_PASSWORD_TEST`**: The password for your test Redis instance.
|
||||
- **`JWT_SECRET`**: A long, random, and secret string for signing authentication tokens.
|
||||
- **`VITE_GOOGLE_GENAI_API_KEY`**: Your Google Gemini API key.
|
||||
- **`GOOGLE_MAPS_API_KEY`**: Your Google Maps Geocoding API key.
|
||||
|
||||
## Setup and Installation
|
||||
|
||||
### Step 1: Set Up PostgreSQL Database
|
||||
|
||||
1. **Set up a PostgreSQL database instance.**
|
||||
2. **Run the Database Schema**:
|
||||
- Connect to your database using a tool like `psql` or DBeaver.
|
||||
- Open `sql/schema.sql.txt`, copy its entire contents, and execute it against your database.
|
||||
- This will create all necessary tables, functions, and relationships.
|
||||
|
||||
### Step 2: Install Dependencies and Run the Application
|
||||
|
||||
1. **Install Dependencies**:
|
||||
|
||||
```bash
|
||||
npm install
|
||||
```
|
||||
|
||||
2. **Run the Application**:
|
||||
|
||||
```bash
|
||||
npm run start:prod
|
||||
```
|
||||
|
||||
### Step 3: Seed Development Users (Optional)
|
||||
|
||||
To create the initial `admin@example.com` and `user@example.com` accounts, you can run the seed script:
|
||||
## Quick Start
|
||||
|
||||
```bash
|
||||
npm run seed
|
||||
# Install dependencies
|
||||
npm install
|
||||
|
||||
# Run in development mode
|
||||
npm run dev
|
||||
```
|
||||
|
||||
After running, you may need to restart your IDE's TypeScript server to pick up the changes.
|
||||
|
||||
## NGINX mime types issue
|
||||
|
||||
sudo nano /etc/nginx/mime.types
|
||||
|
||||
change
|
||||
|
||||
application/javascript js;
|
||||
|
||||
TO
|
||||
|
||||
application/javascript js mjs;
|
||||
|
||||
RESTART NGINX
|
||||
|
||||
sudo nginx -t
|
||||
sudo systemctl reload nginx
|
||||
|
||||
actually the proper change was to do this in the /etc/nginx/sites-available/flyer-crawler.projectium.com file
|
||||
|
||||
## for OAuth
|
||||
|
||||
1. Get Google OAuth Credentials
|
||||
This is a crucial step that you must do outside the codebase:
|
||||
|
||||
Go to the Google Cloud Console.
|
||||
|
||||
Create a new project (or select an existing one).
|
||||
|
||||
In the navigation menu, go to APIs & Services > Credentials.
|
||||
|
||||
Click Create Credentials > OAuth client ID.
|
||||
|
||||
Select Web application as the application type.
|
||||
|
||||
Under Authorized redirect URIs, click ADD URI and enter the URL where Google will redirect users back to your server. For local development, this will be: http://localhost:3001/api/auth/google/callback.
|
||||
|
||||
Click Create. You will be given a Client ID and a Client Secret.
|
||||
|
||||
2. Get GitHub OAuth Credentials
|
||||
You'll need to obtain a Client ID and Client Secret from GitHub:
|
||||
|
||||
Go to your GitHub profile settings.
|
||||
|
||||
Navigate to Developer settings > OAuth Apps.
|
||||
|
||||
Click New OAuth App.
|
||||
|
||||
Fill in the required fields:
|
||||
|
||||
Application name: A descriptive name for your app (e.g., "Flyer Crawler").
|
||||
Homepage URL: The base URL of your application (e.g., http://localhost:5173 for local development).
|
||||
Authorization callback URL: This is where GitHub will redirect users after they authorize your app. For local development, this will be: <http://localhost:3001/api/auth/github/callback>.
|
||||
Click Register application.
|
||||
|
||||
You will be given a Client ID and a Client Secret.
|
||||
|
||||
## connect to postgres on projectium.com
|
||||
|
||||
psql -h localhost -U flyer_crawler_user -d "flyer-crawler-prod" -W
|
||||
|
||||
## postgis
|
||||
|
||||
flyer-crawler-prod=> SELECT version();
|
||||
version
|
||||
See [INSTALL.md](INSTALL.md) for detailed setup instructions.
|
||||
|
||||
---
|
||||
|
||||
PostgreSQL 14.19 (Ubuntu 14.19-0ubuntu0.22.04.1) on x86_64-pc-linux-gnu, compiled by gcc (Ubuntu 11.4.0-1ubuntu1~22.04.2) 11.4.0, 64-bit
|
||||
(1 row)
|
||||
## Documentation
|
||||
|
||||
flyer-crawler-prod=> SELECT PostGIS_Full_Version();
|
||||
postgis_full_version
|
||||
| Document | Description |
|
||||
| -------------------------------------- | ---------------------------------------- |
|
||||
| [INSTALL.md](INSTALL.md) | Local development setup with Podman |
|
||||
| [DATABASE.md](DATABASE.md) | PostgreSQL setup, schema, and extensions |
|
||||
| [AUTHENTICATION.md](AUTHENTICATION.md) | OAuth configuration (Google, GitHub) |
|
||||
| [DEPLOYMENT.md](DEPLOYMENT.md) | Production server setup, NGINX, PM2 |
|
||||
|
||||
---
|
||||
|
||||
POSTGIS="3.2.0 c3e3cc0" [EXTENSION] PGSQL="140" GEOS="3.10.2-CAPI-1.16.0" PROJ="8.2.1" LIBXML="2.9.12" LIBJSON="0.15" LIBPROTOBUF="1.3.3" WAGYU="0.5.0 (Internal)"
|
||||
(1 row)
|
||||
## Environment Variables
|
||||
|
||||
## production postgres setup
|
||||
This project uses environment variables for configuration (no `.env` files). Key variables:
|
||||
|
||||
Part 1: Production Database Setup
|
||||
This database will be the live, persistent storage for your application.
|
||||
| Variable | Description |
|
||||
| ----------------------------------- | -------------------------------- |
|
||||
| `DB_HOST`, `DB_USER`, `DB_PASSWORD` | PostgreSQL credentials |
|
||||
| `DB_DATABASE_PROD` | Production database name |
|
||||
| `JWT_SECRET` | Authentication token signing key |
|
||||
| `VITE_GOOGLE_GENAI_API_KEY` | Google Gemini API key |
|
||||
| `GOOGLE_MAPS_API_KEY` | Google Maps Geocoding API key |
|
||||
| `REDIS_PASSWORD_PROD` | Redis password |
|
||||
|
||||
Step 1: Install PostgreSQL (if not already installed)
|
||||
First, ensure PostgreSQL is installed on your server.
|
||||
See [INSTALL.md](INSTALL.md) for the complete list.
|
||||
|
||||
bash
|
||||
sudo apt update
|
||||
sudo apt install postgresql postgresql-contrib
|
||||
Step 2: Create the Production Database and User
|
||||
It's best practice to create a dedicated, non-superuser role for your application to connect with.
|
||||
---
|
||||
|
||||
Switch to the postgres system user to get superuser access to the database.
|
||||
## Scripts
|
||||
|
||||
bash
|
||||
sudo -u postgres psql
|
||||
Inside the psql shell, run the following SQL commands. Remember to replace 'a_very_strong_password' with a secure password that you will manage with a secrets tool or in your .env file.
|
||||
| Command | Description |
|
||||
| -------------------- | -------------------------------- |
|
||||
| `npm run dev` | Start development server |
|
||||
| `npm run build` | Build for production |
|
||||
| `npm run start:prod` | Start production server with PM2 |
|
||||
| `npm run test` | Run test suite |
|
||||
| `npm run seed` | Seed development user accounts |
|
||||
|
||||
sql
|
||||
-- Create a new role (user) for your application
|
||||
CREATE ROLE flyer_crawler_user WITH LOGIN PASSWORD 'a_very_strong_password';
|
||||
---
|
||||
|
||||
-- Create the production database and assign ownership to the new user
|
||||
CREATE DATABASE "flyer-crawler-prod" WITH OWNER = flyer_crawler_user;
|
||||
## License
|
||||
|
||||
-- Connect to the new database to install extensions within it.
|
||||
\c "flyer-crawler-prod"
|
||||
|
||||
-- Install the required extensions as a superuser. This only needs to be done once.
|
||||
CREATE EXTENSION IF NOT EXISTS postgis;
|
||||
CREATE EXTENSION IF NOT EXISTS pg_trgm;
|
||||
CREATE EXTENSION IF NOT EXISTS "uuid-ossp";
|
||||
|
||||
-- Exit the psql shell
|
||||
|
||||
Step 3: Apply the Master Schema
|
||||
Now, you'll populate your new database with all the tables, functions, and initial data. Your master_schema_rollup.sql file is perfect for this.
|
||||
|
||||
Navigate to your project's root directory on the server.
|
||||
|
||||
Run the following command to execute the master schema script against your new production database. You will be prompted for the password you created in the previous step.
|
||||
|
||||
bash
|
||||
psql -U flyer_crawler_user -d "flyer-crawler-prod" -f sql/master_schema_rollup.sql
|
||||
This single command creates all tables, extensions (pg_trgm, postgis), functions, and triggers, and seeds essential data like categories and master items.
|
||||
|
||||
Step 4: Seed the Admin Account (If Needed)
|
||||
Your application has a separate script to create the initial admin user. To run it, you must first set the required environment variables in your shell session.
|
||||
|
||||
bash
|
||||
|
||||
# Set variables for the current session
|
||||
|
||||
export DB_USER=flyer_crawler_user DB_PASSWORD=your_password DB_NAME="flyer-crawler-prod" ...
|
||||
|
||||
# Run the seeding script
|
||||
|
||||
npx tsx src/db/seed_admin_account.ts
|
||||
Your production database is now ready!
|
||||
|
||||
Part 2: Test Database Setup (for CI/CD)
|
||||
Your Gitea workflow (deploy.yml) already automates the creation and teardown of the test database during the pipeline run. The steps below are for understanding what the workflow does and for manual setup if you ever need to run tests outside the CI pipeline.
|
||||
|
||||
The process your CI pipeline follows is:
|
||||
|
||||
Setup (sql/test_setup.sql):
|
||||
|
||||
As the postgres superuser, it runs sql/test_setup.sql.
|
||||
This creates a temporary role named test_runner.
|
||||
It creates a separate database named "flyer-crawler-test" owned by test_runner.
|
||||
Schema Application (src/tests/setup/global-setup.ts):
|
||||
|
||||
The test runner (vitest) executes the global-setup.ts file.
|
||||
This script connects to the "flyer-crawler-test" database using the temporary credentials.
|
||||
It then runs the same sql/master_schema_rollup.sql file, ensuring your test database has the exact same structure as production.
|
||||
Test Execution:
|
||||
|
||||
Your tests run against this clean, isolated "flyer-crawler-test" database.
|
||||
Teardown (sql/test_teardown.sql):
|
||||
|
||||
After tests complete (whether they pass or fail), the if: always() step in your workflow ensures that sql/test_teardown.sql is executed.
|
||||
This script terminates any lingering connections to the test database, drops the "flyer-crawler-test" database completely, and drops the test_runner role.
|
||||
|
||||
Part 3: Test Database Setup (for CI/CD and Local Testing)
|
||||
Your Gitea workflow and local test runner rely on a permanent test database. This database needs to be created once on your server. The test runner will automatically reset the schema inside it before every test run.
|
||||
|
||||
Step 1: Create the Test Database
|
||||
On your server, switch to the postgres system user to get superuser access.
|
||||
|
||||
bash
|
||||
sudo -u postgres psql
|
||||
Inside the psql shell, create a new database. We will assign ownership to the same flyer_crawler_user that your application uses. This user needs to be the owner to have permission to drop and recreate the schema during testing.
|
||||
|
||||
sql
|
||||
-- Create the test database and assign ownership to your existing application user
|
||||
CREATE DATABASE "flyer-crawler-test" WITH OWNER = flyer_crawler_user;
|
||||
|
||||
-- Connect to the newly created test database
|
||||
\c "flyer-crawler-test"
|
||||
|
||||
-- Install the required extensions as a superuser. This only needs to be done once.
|
||||
CREATE EXTENSION IF NOT EXISTS postgis;
|
||||
CREATE EXTENSION IF NOT EXISTS pg_trgm;
|
||||
CREATE EXTENSION IF NOT EXISTS "uuid-ossp";
|
||||
|
||||
-- Connect to the newly created test database
|
||||
\c "flyer-crawler-test"
|
||||
|
||||
-- Grant ownership of the public schema within this database to your application user.
|
||||
-- This is CRITICAL for allowing the test runner to drop and recreate the schema.
|
||||
ALTER SCHEMA public OWNER TO flyer_crawler_user;
|
||||
|
||||
-- Exit the psql shell
|
||||
\q
|
||||
|
||||
Step 2: Configure Gitea Secrets for Testing
|
||||
Your CI pipeline needs to know how to connect to this test database. Ensure the following secrets are set in your Gitea repository settings:
|
||||
|
||||
DB_HOST: The hostname of your database server (e.g., localhost).
|
||||
DB_PORT: The port for your database (e.g., 5432).
|
||||
DB_USER: The user for the database (e.g., flyer_crawler_user).
|
||||
DB_PASSWORD: The password for the database user.
|
||||
The workflow file (.gitea/workflows/deploy.yml) is configured to use these secrets and will automatically connect to the "flyer-crawler-test" database when it runs the npm test command.
|
||||
|
||||
How the Test Workflow Works
|
||||
The CI pipeline no longer uses sudo or creates/destroys the database on each run. Instead, the process is now:
|
||||
|
||||
Setup: The vitest global setup script (src/tests/setup/global-setup.ts) connects to the permanent "flyer-crawler-test" database.
|
||||
|
||||
Schema Reset: It executes sql/drop_tables.sql (which runs DROP SCHEMA public CASCADE) to completely wipe all tables, functions, and triggers.
|
||||
|
||||
Schema Application: It then immediately executes sql/master_schema_rollup.sql to build a fresh, clean schema and seed initial data.
|
||||
|
||||
Test Execution: Your tests run against this clean, isolated schema.
|
||||
|
||||
This approach is faster, more reliable, and removes the need for sudo access within the CI pipeline.
|
||||
|
||||
gitea-runner@projectium:~$ pm2 install pm2-logrotate
|
||||
[PM2][Module] Installing NPM pm2-logrotate module
|
||||
[PM2][Module] Calling [NPM] to install pm2-logrotate ...
|
||||
|
||||
added 161 packages in 5s
|
||||
|
||||
21 packages are looking for funding
|
||||
run `npm fund` for details
|
||||
npm notice
|
||||
npm notice New patch version of npm available! 11.6.3 -> 11.6.4
|
||||
npm notice Changelog: https://github.com/npm/cli/releases/tag/v11.6.4
|
||||
npm notice To update run: npm install -g npm@11.6.4
|
||||
npm notice
|
||||
[PM2][Module] Module downloaded
|
||||
[PM2][WARN] Applications pm2-logrotate not running, starting...
|
||||
[PM2] App [pm2-logrotate] launched (1 instances)
|
||||
Module: pm2-logrotate
|
||||
$ pm2 set pm2-logrotate:max_size 10M
|
||||
$ pm2 set pm2-logrotate:retain 30
|
||||
$ pm2 set pm2-logrotate:compress false
|
||||
$ pm2 set pm2-logrotate:dateFormat YYYY-MM-DD_HH-mm-ss
|
||||
$ pm2 set pm2-logrotate:workerInterval 30
|
||||
$ pm2 set pm2-logrotate:rotateInterval 0 0 \* \* _
|
||||
$ pm2 set pm2-logrotate:rotateModule true
|
||||
Modules configuration. Copy/Paste line to edit values.
|
||||
[PM2][Module] Module successfully installed and launched
|
||||
[PM2][Module] Checkout module options: `$ pm2 conf`
|
||||
┌────┬───────────────────────────────────┬─────────────┬─────────┬─────────┬──────────┬────────┬──────┬───────────┬──────────┬──────────┬──────────┬──────────┐
|
||||
│ id │ name │ namespace │ version │ mode │ pid │ uptime │ ↺ │ status │ cpu │ mem │ user │ watching │
|
||||
├────┼───────────────────────────────────┼─────────────┼─────────┼─────────┼──────────┼────────┼──────┼───────────┼──────────┼──────────┼──────────┼──────────┤
|
||||
│ 2 │ flyer-crawler-analytics-worker │ default │ 0.0.0 │ fork │ 3846981 │ 7m │ 5 │ online │ 0% │ 55.8mb │ git… │ disabled │
|
||||
│ 11 │ flyer-crawler-api │ default │ 0.0.0 │ fork │ 3846987 │ 7m │ 0 │ online │ 0% │ 59.0mb │ git… │ disabled │
|
||||
│ 12 │ flyer-crawler-worker │ default │ 0.0.0 │ fork │ 3846988 │ 7m │ 0 │ online │ 0% │ 54.2mb │ git… │ disabled │
|
||||
└────┴───────────────────────────────────┴─────────────┴─────────┴─────────┴──────────┴────────┴──────┴───────────┴──────────┴──────────┴──────────┴──────────┘
|
||||
Module
|
||||
┌────┬──────────────────────────────┬───────────────┬──────────┬──────────┬──────┬──────────┬──────────┬──────────┐
|
||||
│ id │ module │ version │ pid │ status │ ↺ │ cpu │ mem │ user │
|
||||
├────┼──────────────────────────────┼───────────────┼──────────┼──────────┼──────┼──────────┼──────────┼──────────┤
|
||||
│ 13 │ pm2-logrotate │ 3.0.0 │ 3848878 │ online │ 0 │ 0% │ 20.1mb │ git… │
|
||||
└────┴──────────────────────────────┴───────────────┴──────────┴──────────┴──────┴──────────┴──────────┴──────────┘
|
||||
gitea-runner@projectium:~$ pm2 set pm2-logrotate:max_size 10M
|
||||
[PM2] Module pm2-logrotate restarted
|
||||
[PM2] Setting changed
|
||||
Module: pm2-logrotate
|
||||
$ pm2 set pm2-logrotate:max_size 10M
|
||||
$ pm2 set pm2-logrotate:retain 30
|
||||
$ pm2 set pm2-logrotate:compress false
|
||||
$ pm2 set pm2-logrotate:dateFormat YYYY-MM-DD_HH-mm-ss
|
||||
$ pm2 set pm2-logrotate:workerInterval 30
|
||||
$ pm2 set pm2-logrotate:rotateInterval 0 0 _ \* _
|
||||
$ pm2 set pm2-logrotate:rotateModule true
|
||||
gitea-runner@projectium:~$ pm2 set pm2-logrotate:retain 14
|
||||
[PM2] Module pm2-logrotate restarted
|
||||
[PM2] Setting changed
|
||||
Module: pm2-logrotate
|
||||
$ pm2 set pm2-logrotate:max_size 10M
|
||||
$ pm2 set pm2-logrotate:retain 14
|
||||
$ pm2 set pm2-logrotate:compress false
|
||||
$ pm2 set pm2-logrotate:dateFormat YYYY-MM-DD_HH-mm-ss
|
||||
$ pm2 set pm2-logrotate:workerInterval 30
|
||||
$ pm2 set pm2-logrotate:rotateInterval 0 0 _ \* \*
|
||||
$ pm2 set pm2-logrotate:rotateModule true
|
||||
gitea-runner@projectium:~$
|
||||
|
||||
## dev server setup:
|
||||
|
||||
Here are the steps to set up the development environment on Windows using Podman with an Ubuntu container:
|
||||
|
||||
1. Install Prerequisites on Windows
|
||||
Install WSL 2: Podman on Windows relies on the Windows Subsystem for Linux. Install it by running wsl --install in an administrator PowerShell.
|
||||
Install Podman Desktop: Download and install Podman Desktop for Windows.
|
||||
|
||||
2. Set Up Podman
|
||||
Initialize Podman: Launch Podman Desktop. It will automatically set up its WSL 2 machine.
|
||||
Start Podman: Ensure the Podman machine is running from the Podman Desktop interface.
|
||||
|
||||
3. Set Up the Ubuntu Container
|
||||
|
||||
- Pull Ubuntu Image: Open a PowerShell or command prompt and pull the latest Ubuntu image:
|
||||
podman pull ubuntu:latest
|
||||
- Create a Podman Volume: Create a volume to persist node_modules and avoid installing them every time the container starts.
|
||||
podman volume create node_modules_cache
|
||||
- Run the Ubuntu Container: Start a new container with the project directory mounted and the necessary ports forwarded.
|
||||
- Open a terminal in your project's root directory on Windows.
|
||||
- Run the following command, replacing D:\gitea\flyer-crawler.projectium.com\flyer-crawler.projectium.com with the full path to your project:
|
||||
|
||||
podman run -it -p 3001:3001 -p 5173:5173 --name flyer-dev -v "D:\gitea\flyer-crawler.projectium.com\flyer-crawler.projectium.com:/app" -v "node_modules_cache:/app/node_modules" ubuntu:latest
|
||||
|
||||
-p 3001:3001: Forwards the backend server port.
|
||||
-p 5173:5173: Forwards the Vite frontend server port.
|
||||
--name flyer-dev: Names the container for easy reference.
|
||||
-v "...:/app": Mounts your project directory into the container at /app.
|
||||
-v "node_modules_cache:/app/node_modules": Mounts the named volume for node_modules.
|
||||
|
||||
4. Configure the Ubuntu Environment
|
||||
You are now inside the Ubuntu container's shell.
|
||||
|
||||
- Update Package Lists:
|
||||
apt-get update
|
||||
- Install Dependencies: Install curl, git, and nodejs (which includes npm).
|
||||
apt-get install -y curl git
|
||||
curl -sL https://deb.nodesource.com/setup_20.x | bash -
|
||||
apt-get install -y nodejs
|
||||
- Navigate to Project Directory:
|
||||
cd /app
|
||||
|
||||
- Install Project Dependencies:
|
||||
npm install
|
||||
|
||||
5. Run the Development Server
|
||||
- Start the Application:
|
||||
npm run dev
|
||||
|
||||
6. Accessing the Application
|
||||
|
||||
- Frontend: Open your browser and go to http://localhost:5173.
|
||||
- Backend: The frontend will make API calls to http://localhost:3001.
|
||||
|
||||
Managing the Environment
|
||||
|
||||
- Stopping the Container: Press Ctrl+C in the container terminal, then type exit.
|
||||
- Restarting the Container:
|
||||
podman start -a -i flyer-dev
|
||||
|
||||
## for me:
|
||||
|
||||
cd /mnt/d/gitea/flyer-crawler.projectium.com/flyer-crawler.projectium.com
|
||||
podman run -it -p 3001:3001 -p 5173:5173 --name flyer-dev -v "$(pwd):/app" -v "node_modules_cache:/app/node_modules" ubuntu:latest
|
||||
|
||||
rate limiting
|
||||
|
||||
respect the AI service's rate limits, making it more stable and robust. You can adjust the GEMINI_RPM environment variable in your production environment as needed without changing the code.
|
||||
[Add license information here]
|
||||
|
||||
3
README.testing.md
Normal file
3
README.testing.md
Normal file
@@ -0,0 +1,3 @@
|
||||
using powershell on win10 use this command to run the integration tests only in the container
|
||||
|
||||
podman exec -i flyer-crawler-dev npm run test:integration 2>&1 | Tee-Object -FilePath test-output.txt
|
||||
@@ -5,7 +5,7 @@
|
||||
# This file defines the local development environment using Docker/Podman.
|
||||
#
|
||||
# Services:
|
||||
# - app: Node.js application (API + Frontend)
|
||||
# - app: Node.js application (API + Frontend + Bugsink + Logstash)
|
||||
# - postgres: PostgreSQL 15 with PostGIS extension
|
||||
# - redis: Redis for caching and job queues
|
||||
#
|
||||
@@ -18,6 +18,10 @@
|
||||
# VS Code Dev Containers:
|
||||
# This file is referenced by .devcontainer/devcontainer.json for seamless
|
||||
# VS Code integration. Open the project in VS Code and use "Reopen in Container".
|
||||
#
|
||||
# Bugsink (ADR-015):
|
||||
# Access error tracking UI at http://localhost:8000
|
||||
# Default login: admin@localhost / admin
|
||||
# ============================================================================
|
||||
|
||||
version: '3.8'
|
||||
@@ -43,6 +47,7 @@ services:
|
||||
ports:
|
||||
- '3000:3000' # Frontend (Vite default)
|
||||
- '3001:3001' # Backend API
|
||||
- '8000:8000' # Bugsink error tracking (ADR-015)
|
||||
environment:
|
||||
# Core settings
|
||||
- NODE_ENV=development
|
||||
@@ -62,6 +67,26 @@ services:
|
||||
- JWT_SECRET=dev-jwt-secret-change-in-production
|
||||
# Worker settings
|
||||
- WORKER_LOCK_DURATION=120000
|
||||
# Bugsink error tracking (ADR-015)
|
||||
- BUGSINK_DB_HOST=postgres
|
||||
- BUGSINK_DB_PORT=5432
|
||||
- BUGSINK_DB_NAME=bugsink
|
||||
- BUGSINK_DB_USER=bugsink
|
||||
- BUGSINK_DB_PASSWORD=bugsink_dev_password
|
||||
- BUGSINK_PORT=8000
|
||||
- BUGSINK_BASE_URL=http://localhost:8000
|
||||
- BUGSINK_ADMIN_EMAIL=admin@localhost
|
||||
- BUGSINK_ADMIN_PASSWORD=admin
|
||||
- BUGSINK_SECRET_KEY=dev-bugsink-secret-key-minimum-50-characters-for-security
|
||||
# Sentry SDK configuration (points to local Bugsink)
|
||||
- SENTRY_DSN=http://59a58583-e869-7697-f94a-cfa0337676a8@localhost:8000/1
|
||||
- VITE_SENTRY_DSN=http://d5fc5221-4266-ff2f-9af8-5689696072f3@localhost:8000/2
|
||||
- SENTRY_ENVIRONMENT=development
|
||||
- VITE_SENTRY_ENVIRONMENT=development
|
||||
- SENTRY_ENABLED=true
|
||||
- VITE_SENTRY_ENABLED=true
|
||||
- SENTRY_DEBUG=true
|
||||
- VITE_SENTRY_DEBUG=true
|
||||
depends_on:
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
@@ -93,9 +118,10 @@ services:
|
||||
POSTGRES_INITDB_ARGS: '--encoding=UTF8 --locale=C'
|
||||
volumes:
|
||||
- postgres_data:/var/lib/postgresql/data
|
||||
# Mount the extensions init script to run on first database creation
|
||||
# The 00- prefix ensures it runs before any other init scripts
|
||||
# Mount init scripts to run on first database creation
|
||||
# Scripts run in alphabetical order: 00-extensions, 01-bugsink
|
||||
- ./sql/00-init-extensions.sql:/docker-entrypoint-initdb.d/00-init-extensions.sql:ro
|
||||
- ./sql/01-init-bugsink.sh:/docker-entrypoint-initdb.d/01-init-bugsink.sh:ro
|
||||
# Healthcheck ensures postgres is ready before app starts
|
||||
healthcheck:
|
||||
test: ['CMD-SHELL', 'pg_isready -U postgres -d flyer_crawler_dev']
|
||||
|
||||
637
docs/BARE-METAL-SETUP.md
Normal file
637
docs/BARE-METAL-SETUP.md
Normal file
@@ -0,0 +1,637 @@
|
||||
# Bare-Metal Server Setup Guide
|
||||
|
||||
This guide covers the manual installation of Flyer Crawler and its dependencies on a bare-metal Ubuntu server (e.g., a colocation server). This is the definitive reference for setting up a production environment without containers.
|
||||
|
||||
**Target Environment**: Ubuntu 22.04 LTS (or newer)
|
||||
|
||||
---
|
||||
|
||||
## Table of Contents
|
||||
|
||||
1. [System Prerequisites](#system-prerequisites)
|
||||
2. [PostgreSQL Setup](#postgresql-setup)
|
||||
3. [Redis Setup](#redis-setup)
|
||||
4. [Node.js and Application Setup](#nodejs-and-application-setup)
|
||||
5. [PM2 Process Manager](#pm2-process-manager)
|
||||
6. [NGINX Reverse Proxy](#nginx-reverse-proxy)
|
||||
7. [Bugsink Error Tracking](#bugsink-error-tracking)
|
||||
8. [Logstash Log Aggregation](#logstash-log-aggregation)
|
||||
9. [SSL/TLS with Let's Encrypt](#ssltls-with-lets-encrypt)
|
||||
10. [Firewall Configuration](#firewall-configuration)
|
||||
11. [Maintenance Commands](#maintenance-commands)
|
||||
|
||||
---
|
||||
|
||||
## System Prerequisites
|
||||
|
||||
Update the system and install essential packages:
|
||||
|
||||
```bash
|
||||
sudo apt update && sudo apt upgrade -y
|
||||
sudo apt install -y curl git build-essential python3 python3-pip python3-venv
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## PostgreSQL Setup
|
||||
|
||||
### Install PostgreSQL 14+ with PostGIS
|
||||
|
||||
```bash
|
||||
# Add PostgreSQL APT repository
|
||||
sudo sh -c 'echo "deb http://apt.postgresql.org/pub/repos/apt $(lsb_release -cs)-pgdg main" > /etc/apt/sources.list.d/pgdg.list'
|
||||
wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | sudo apt-key add -
|
||||
sudo apt update
|
||||
|
||||
# Install PostgreSQL and PostGIS
|
||||
sudo apt install -y postgresql-14 postgresql-14-postgis-3
|
||||
```
|
||||
|
||||
### Create Application Database and User
|
||||
|
||||
```bash
|
||||
sudo -u postgres psql
|
||||
```
|
||||
|
||||
```sql
|
||||
-- Create application user and database
|
||||
CREATE USER flyer_crawler WITH PASSWORD 'YOUR_SECURE_PASSWORD';
|
||||
CREATE DATABASE flyer_crawler OWNER flyer_crawler;
|
||||
|
||||
-- Connect to the database and enable extensions
|
||||
\c flyer_crawler
|
||||
|
||||
CREATE EXTENSION IF NOT EXISTS postgis;
|
||||
CREATE EXTENSION IF NOT EXISTS pg_trgm;
|
||||
CREATE EXTENSION IF NOT EXISTS pgcrypto;
|
||||
|
||||
-- Grant privileges
|
||||
GRANT ALL PRIVILEGES ON DATABASE flyer_crawler TO flyer_crawler;
|
||||
|
||||
\q
|
||||
```
|
||||
|
||||
### Create Bugsink Database (for error tracking)
|
||||
|
||||
```bash
|
||||
sudo -u postgres psql
|
||||
```
|
||||
|
||||
```sql
|
||||
-- Create dedicated Bugsink user and database
|
||||
CREATE USER bugsink WITH PASSWORD 'BUGSINK_SECURE_PASSWORD';
|
||||
CREATE DATABASE bugsink OWNER bugsink;
|
||||
GRANT ALL PRIVILEGES ON DATABASE bugsink TO bugsink;
|
||||
|
||||
\q
|
||||
```
|
||||
|
||||
### Configure PostgreSQL for Remote Access (if needed)
|
||||
|
||||
Edit `/etc/postgresql/14/main/postgresql.conf`:
|
||||
|
||||
```conf
|
||||
listen_addresses = 'localhost' # Change to '*' for remote access
|
||||
```
|
||||
|
||||
Edit `/etc/postgresql/14/main/pg_hba.conf` to add allowed hosts:
|
||||
|
||||
```conf
|
||||
# Local connections
|
||||
local all all peer
|
||||
host all all 127.0.0.1/32 scram-sha-256
|
||||
```
|
||||
|
||||
Restart PostgreSQL:
|
||||
|
||||
```bash
|
||||
sudo systemctl restart postgresql
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Redis Setup
|
||||
|
||||
### Install Redis
|
||||
|
||||
```bash
|
||||
sudo apt install -y redis-server
|
||||
```
|
||||
|
||||
### Configure Redis Password
|
||||
|
||||
Edit `/etc/redis/redis.conf`:
|
||||
|
||||
```conf
|
||||
requirepass YOUR_REDIS_PASSWORD
|
||||
```
|
||||
|
||||
Restart Redis:
|
||||
|
||||
```bash
|
||||
sudo systemctl restart redis-server
|
||||
sudo systemctl enable redis-server
|
||||
```
|
||||
|
||||
### Test Redis Connection
|
||||
|
||||
```bash
|
||||
redis-cli -a YOUR_REDIS_PASSWORD ping
|
||||
# Should output: PONG
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Node.js and Application Setup
|
||||
|
||||
### Install Node.js 20.x
|
||||
|
||||
```bash
|
||||
curl -fsSL https://deb.nodesource.com/setup_20.x | sudo -E bash -
|
||||
sudo apt install -y nodejs
|
||||
```
|
||||
|
||||
Verify installation:
|
||||
|
||||
```bash
|
||||
node --version # Should output v20.x.x
|
||||
npm --version
|
||||
```
|
||||
|
||||
### Install System Dependencies for PDF Processing
|
||||
|
||||
```bash
|
||||
sudo apt install -y poppler-utils # For pdftocairo
|
||||
```
|
||||
|
||||
### Clone and Install Application
|
||||
|
||||
```bash
|
||||
# Create application directory
|
||||
sudo mkdir -p /opt/flyer-crawler
|
||||
sudo chown $USER:$USER /opt/flyer-crawler
|
||||
|
||||
# Clone repository
|
||||
cd /opt/flyer-crawler
|
||||
git clone https://gitea.projectium.com/flyer-crawler/flyer-crawler.projectium.com.git .
|
||||
|
||||
# Install dependencies
|
||||
npm install
|
||||
|
||||
# Build for production
|
||||
npm run build
|
||||
```
|
||||
|
||||
### Configure Environment Variables
|
||||
|
||||
Create a systemd environment file at `/etc/flyer-crawler/environment`:
|
||||
|
||||
```bash
|
||||
sudo mkdir -p /etc/flyer-crawler
|
||||
sudo nano /etc/flyer-crawler/environment
|
||||
```
|
||||
|
||||
Add the following (replace with actual values):
|
||||
|
||||
```bash
|
||||
# Database
|
||||
DB_HOST=localhost
|
||||
DB_USER=flyer_crawler
|
||||
DB_PASSWORD=YOUR_SECURE_PASSWORD
|
||||
DB_DATABASE_PROD=flyer_crawler
|
||||
|
||||
# Redis
|
||||
REDIS_HOST=localhost
|
||||
REDIS_PORT=6379
|
||||
REDIS_PASSWORD_PROD=YOUR_REDIS_PASSWORD
|
||||
|
||||
# Authentication
|
||||
JWT_SECRET=YOUR_LONG_RANDOM_JWT_SECRET
|
||||
|
||||
# Google APIs
|
||||
VITE_GOOGLE_GENAI_API_KEY=YOUR_GEMINI_API_KEY
|
||||
GOOGLE_MAPS_API_KEY=YOUR_MAPS_API_KEY
|
||||
|
||||
# Sentry/Bugsink Error Tracking (ADR-015)
|
||||
SENTRY_DSN=http://BACKEND_KEY@localhost:8000/1
|
||||
VITE_SENTRY_DSN=http://FRONTEND_KEY@localhost:8000/2
|
||||
SENTRY_ENVIRONMENT=production
|
||||
VITE_SENTRY_ENVIRONMENT=production
|
||||
SENTRY_ENABLED=true
|
||||
VITE_SENTRY_ENABLED=true
|
||||
SENTRY_DEBUG=false
|
||||
VITE_SENTRY_DEBUG=false
|
||||
|
||||
# Application
|
||||
NODE_ENV=production
|
||||
PORT=3001
|
||||
```
|
||||
|
||||
Secure the file:
|
||||
|
||||
```bash
|
||||
sudo chmod 600 /etc/flyer-crawler/environment
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## PM2 Process Manager
|
||||
|
||||
### Install PM2 Globally
|
||||
|
||||
```bash
|
||||
sudo npm install -g pm2
|
||||
```
|
||||
|
||||
### Start Application with PM2
|
||||
|
||||
```bash
|
||||
cd /opt/flyer-crawler
|
||||
npm run start:prod
|
||||
```
|
||||
|
||||
This starts three processes:
|
||||
|
||||
- `flyer-crawler-api` - Main API server (port 3001)
|
||||
- `flyer-crawler-worker` - Background job worker
|
||||
- `flyer-crawler-analytics-worker` - Analytics processing worker
|
||||
|
||||
### Configure PM2 Startup
|
||||
|
||||
```bash
|
||||
pm2 startup systemd
|
||||
# Follow the command output to enable PM2 on boot
|
||||
|
||||
pm2 save
|
||||
```
|
||||
|
||||
### PM2 Log Rotation
|
||||
|
||||
```bash
|
||||
pm2 install pm2-logrotate
|
||||
pm2 set pm2-logrotate:max_size 10M
|
||||
pm2 set pm2-logrotate:retain 14
|
||||
pm2 set pm2-logrotate:compress true
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## NGINX Reverse Proxy
|
||||
|
||||
### Install NGINX
|
||||
|
||||
```bash
|
||||
sudo apt install -y nginx
|
||||
```
|
||||
|
||||
### Create Site Configuration
|
||||
|
||||
Create `/etc/nginx/sites-available/flyer-crawler.projectium.com`:
|
||||
|
||||
```nginx
|
||||
server {
|
||||
listen 80;
|
||||
server_name flyer-crawler.projectium.com;
|
||||
|
||||
# Redirect HTTP to HTTPS (uncomment after SSL setup)
|
||||
# return 301 https://$server_name$request_uri;
|
||||
|
||||
location / {
|
||||
proxy_pass http://localhost:5173;
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header Connection 'upgrade';
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
proxy_cache_bypass $http_upgrade;
|
||||
}
|
||||
|
||||
location /api {
|
||||
proxy_pass http://localhost:3001;
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header Connection 'upgrade';
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
proxy_cache_bypass $http_upgrade;
|
||||
|
||||
# File upload size limit
|
||||
client_max_body_size 50M;
|
||||
}
|
||||
|
||||
# MIME type fix for .mjs files
|
||||
types {
|
||||
application/javascript js mjs;
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Enable the Site
|
||||
|
||||
```bash
|
||||
sudo ln -s /etc/nginx/sites-available/flyer-crawler.projectium.com /etc/nginx/sites-enabled/
|
||||
sudo nginx -t
|
||||
sudo systemctl reload nginx
|
||||
sudo systemctl enable nginx
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Bugsink Error Tracking
|
||||
|
||||
Bugsink is a lightweight, self-hosted Sentry-compatible error tracking system. See [ADR-015](adr/0015-application-performance-monitoring-and-error-tracking.md) for architecture details.
|
||||
|
||||
### Install Bugsink
|
||||
|
||||
```bash
|
||||
# Create virtual environment
|
||||
sudo mkdir -p /opt/bugsink
|
||||
sudo python3 -m venv /opt/bugsink/venv
|
||||
|
||||
# Activate and install
|
||||
source /opt/bugsink/venv/bin/activate
|
||||
pip install bugsink
|
||||
|
||||
# Create wrapper scripts
|
||||
sudo tee /opt/bugsink/bin/bugsink-manage << 'EOF'
|
||||
#!/bin/bash
|
||||
source /opt/bugsink/venv/bin/activate
|
||||
exec python -m bugsink.manage "$@"
|
||||
EOF
|
||||
|
||||
sudo tee /opt/bugsink/bin/bugsink-runserver << 'EOF'
|
||||
#!/bin/bash
|
||||
source /opt/bugsink/venv/bin/activate
|
||||
exec python -m bugsink.runserver "$@"
|
||||
EOF
|
||||
|
||||
sudo chmod +x /opt/bugsink/bin/bugsink-manage /opt/bugsink/bin/bugsink-runserver
|
||||
```
|
||||
|
||||
### Configure Bugsink
|
||||
|
||||
Create `/etc/bugsink/environment`:
|
||||
|
||||
```bash
|
||||
sudo mkdir -p /etc/bugsink
|
||||
sudo nano /etc/bugsink/environment
|
||||
```
|
||||
|
||||
```bash
|
||||
SECRET_KEY=YOUR_RANDOM_50_CHAR_SECRET_KEY
|
||||
DATABASE_URL=postgresql://bugsink:BUGSINK_SECURE_PASSWORD@localhost:5432/bugsink
|
||||
BASE_URL=http://localhost:8000
|
||||
PORT=8000
|
||||
```
|
||||
|
||||
```bash
|
||||
sudo chmod 600 /etc/bugsink/environment
|
||||
```
|
||||
|
||||
### Initialize Bugsink Database
|
||||
|
||||
```bash
|
||||
source /etc/bugsink/environment
|
||||
/opt/bugsink/bin/bugsink-manage migrate
|
||||
/opt/bugsink/bin/bugsink-manage migrate --database=snappea
|
||||
```
|
||||
|
||||
### Create Bugsink Admin User
|
||||
|
||||
```bash
|
||||
/opt/bugsink/bin/bugsink-manage createsuperuser
|
||||
```
|
||||
|
||||
### Create Systemd Service
|
||||
|
||||
Create `/etc/systemd/system/bugsink.service`:
|
||||
|
||||
```ini
|
||||
[Unit]
|
||||
Description=Bugsink Error Tracking
|
||||
After=network.target postgresql.service
|
||||
|
||||
[Service]
|
||||
Type=simple
|
||||
User=www-data
|
||||
Group=www-data
|
||||
EnvironmentFile=/etc/bugsink/environment
|
||||
ExecStart=/opt/bugsink/bin/bugsink-runserver 0.0.0.0:8000
|
||||
Restart=always
|
||||
RestartSec=5
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
```
|
||||
|
||||
```bash
|
||||
sudo systemctl daemon-reload
|
||||
sudo systemctl enable bugsink
|
||||
sudo systemctl start bugsink
|
||||
```
|
||||
|
||||
### Create Bugsink Projects and Get DSNs
|
||||
|
||||
1. Access Bugsink UI at `http://localhost:8000`
|
||||
2. Log in with admin credentials
|
||||
3. Create projects:
|
||||
- **flyer-crawler-backend** (Platform: Node.js)
|
||||
- **flyer-crawler-frontend** (Platform: React)
|
||||
4. Copy the DSNs from each project's settings
|
||||
5. Update `/etc/flyer-crawler/environment` with the DSNs
|
||||
|
||||
### Test Error Tracking
|
||||
|
||||
```bash
|
||||
cd /opt/flyer-crawler
|
||||
npx tsx scripts/test-bugsink.ts
|
||||
```
|
||||
|
||||
Check Bugsink UI for test events.
|
||||
|
||||
---
|
||||
|
||||
## Logstash Log Aggregation
|
||||
|
||||
Logstash aggregates logs from the application and infrastructure, forwarding errors to Bugsink.
|
||||
|
||||
### Install Logstash
|
||||
|
||||
```bash
|
||||
# Add Elastic APT repository
|
||||
wget -qO - https://artifacts.elastic.co/GPG-KEY-elasticsearch | sudo gpg --dearmor -o /usr/share/keyrings/elastic-keyring.gpg
|
||||
echo "deb [signed-by=/usr/share/keyrings/elastic-keyring.gpg] https://artifacts.elastic.co/packages/8.x/apt stable main" | sudo tee /etc/apt/sources.list.d/elastic-8.x.list
|
||||
|
||||
sudo apt update
|
||||
sudo apt install -y logstash
|
||||
```
|
||||
|
||||
### Configure Logstash Pipeline
|
||||
|
||||
Create `/etc/logstash/conf.d/bugsink.conf`:
|
||||
|
||||
```conf
|
||||
input {
|
||||
# Pino application logs
|
||||
file {
|
||||
path => "/opt/flyer-crawler/logs/*.log"
|
||||
codec => json
|
||||
type => "pino"
|
||||
tags => ["app"]
|
||||
}
|
||||
|
||||
# Redis logs
|
||||
file {
|
||||
path => "/var/log/redis/*.log"
|
||||
type => "redis"
|
||||
tags => ["redis"]
|
||||
}
|
||||
}
|
||||
|
||||
filter {
|
||||
# Pino error detection (level 50 = error, 60 = fatal)
|
||||
if [type] == "pino" and [level] >= 50 {
|
||||
mutate { add_tag => ["error"] }
|
||||
}
|
||||
|
||||
# Redis error detection
|
||||
if [type] == "redis" {
|
||||
grok {
|
||||
match => { "message" => "%{POSINT:pid}:%{WORD:role} %{MONTHDAY} %{MONTH} %{TIME} %{WORD:loglevel} %{GREEDYDATA:redis_message}" }
|
||||
}
|
||||
if [loglevel] in ["WARNING", "ERROR"] {
|
||||
mutate { add_tag => ["error"] }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
output {
|
||||
if "error" in [tags] {
|
||||
http {
|
||||
url => "http://localhost:8000/api/1/store/"
|
||||
http_method => "post"
|
||||
format => "json"
|
||||
headers => {
|
||||
"X-Sentry-Auth" => "Sentry sentry_version=7, sentry_client=logstash/1.0, sentry_key=YOUR_BACKEND_DSN_KEY"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Replace `YOUR_BACKEND_DSN_KEY` with the key from your backend project DSN.
|
||||
|
||||
### Start Logstash
|
||||
|
||||
```bash
|
||||
sudo systemctl enable logstash
|
||||
sudo systemctl start logstash
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## SSL/TLS with Let's Encrypt
|
||||
|
||||
### Install Certbot
|
||||
|
||||
```bash
|
||||
sudo apt install -y certbot python3-certbot-nginx
|
||||
```
|
||||
|
||||
### Obtain Certificate
|
||||
|
||||
```bash
|
||||
sudo certbot --nginx -d flyer-crawler.projectium.com
|
||||
```
|
||||
|
||||
Certbot will automatically configure NGINX for HTTPS.
|
||||
|
||||
### Auto-Renewal
|
||||
|
||||
Certbot installs a systemd timer for automatic renewal. Verify:
|
||||
|
||||
```bash
|
||||
sudo systemctl status certbot.timer
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Firewall Configuration
|
||||
|
||||
### Configure UFW
|
||||
|
||||
```bash
|
||||
sudo ufw default deny incoming
|
||||
sudo ufw default allow outgoing
|
||||
|
||||
# Allow SSH
|
||||
sudo ufw allow ssh
|
||||
|
||||
# Allow HTTP and HTTPS
|
||||
sudo ufw allow 80/tcp
|
||||
sudo ufw allow 443/tcp
|
||||
|
||||
# Enable firewall
|
||||
sudo ufw enable
|
||||
```
|
||||
|
||||
**Important**: Bugsink (port 8000) should NOT be exposed externally. It listens on localhost only.
|
||||
|
||||
---
|
||||
|
||||
## Maintenance Commands
|
||||
|
||||
### Application Management
|
||||
|
||||
| Task | Command |
|
||||
| --------------------- | -------------------------------------------------------------------------------------- |
|
||||
| View PM2 status | `pm2 status` |
|
||||
| View application logs | `pm2 logs` |
|
||||
| Restart all processes | `pm2 restart all` |
|
||||
| Restart specific app | `pm2 restart flyer-crawler-api` |
|
||||
| Update application | `cd /opt/flyer-crawler && git pull && npm install && npm run build && pm2 restart all` |
|
||||
|
||||
### Service Management
|
||||
|
||||
| Service | Start | Stop | Status |
|
||||
| ---------- | ----------------------------------- | ---------------------------------- | ------------------------------------ |
|
||||
| PostgreSQL | `sudo systemctl start postgresql` | `sudo systemctl stop postgresql` | `sudo systemctl status postgresql` |
|
||||
| Redis | `sudo systemctl start redis-server` | `sudo systemctl stop redis-server` | `sudo systemctl status redis-server` |
|
||||
| NGINX | `sudo systemctl start nginx` | `sudo systemctl stop nginx` | `sudo systemctl status nginx` |
|
||||
| Bugsink | `sudo systemctl start bugsink` | `sudo systemctl stop bugsink` | `sudo systemctl status bugsink` |
|
||||
| Logstash | `sudo systemctl start logstash` | `sudo systemctl stop logstash` | `sudo systemctl status logstash` |
|
||||
|
||||
### Database Backup
|
||||
|
||||
```bash
|
||||
# Backup application database
|
||||
pg_dump -U flyer_crawler -h localhost flyer_crawler > backup_$(date +%Y%m%d).sql
|
||||
|
||||
# Backup Bugsink database
|
||||
pg_dump -U bugsink -h localhost bugsink > bugsink_backup_$(date +%Y%m%d).sql
|
||||
```
|
||||
|
||||
### Log Locations
|
||||
|
||||
| Log | Location |
|
||||
| ----------------- | --------------------------- |
|
||||
| Application (PM2) | `~/.pm2/logs/` |
|
||||
| NGINX access | `/var/log/nginx/access.log` |
|
||||
| NGINX error | `/var/log/nginx/error.log` |
|
||||
| PostgreSQL | `/var/log/postgresql/` |
|
||||
| Redis | `/var/log/redis/` |
|
||||
| Bugsink | `journalctl -u bugsink` |
|
||||
| Logstash | `/var/log/logstash/` |
|
||||
|
||||
---
|
||||
|
||||
## Related Documentation
|
||||
|
||||
- [DEPLOYMENT.md](../DEPLOYMENT.md) - Container-based deployment
|
||||
- [DATABASE.md](../DATABASE.md) - Database schema and extensions
|
||||
- [AUTHENTICATION.md](../AUTHENTICATION.md) - OAuth provider setup
|
||||
- [ADR-015](adr/0015-application-performance-monitoring-and-error-tracking.md) - Error tracking architecture
|
||||
@@ -3,7 +3,7 @@
|
||||
**Date**: 2025-12-12
|
||||
**Implementation Date**: 2026-01-08
|
||||
|
||||
**Status**: Accepted and Implemented (Phases 1-5 complete, user + admin features migrated)
|
||||
**Status**: Accepted and Fully Implemented (Phases 1-8 complete, 100% coverage)
|
||||
|
||||
## Context
|
||||
|
||||
@@ -23,18 +23,21 @@ We will adopt a dedicated library for managing server state, such as **TanStack
|
||||
### Phase 1: Infrastructure & Core Queries (✅ Complete - 2026-01-08)
|
||||
|
||||
**Files Created:**
|
||||
|
||||
- [src/config/queryClient.ts](../../src/config/queryClient.ts) - Global QueryClient configuration
|
||||
- [src/hooks/queries/useFlyersQuery.ts](../../src/hooks/queries/useFlyersQuery.ts) - Flyers data query
|
||||
- [src/hooks/queries/useWatchedItemsQuery.ts](../../src/hooks/queries/useWatchedItemsQuery.ts) - Watched items query
|
||||
- [src/hooks/queries/useShoppingListsQuery.ts](../../src/hooks/queries/useShoppingListsQuery.ts) - Shopping lists query
|
||||
|
||||
**Files Modified:**
|
||||
|
||||
- [src/providers/AppProviders.tsx](../../src/providers/AppProviders.tsx) - Added QueryClientProvider wrapper
|
||||
- [src/providers/FlyersProvider.tsx](../../src/providers/FlyersProvider.tsx) - Refactored to use TanStack Query
|
||||
- [src/providers/UserDataProvider.tsx](../../src/providers/UserDataProvider.tsx) - Refactored to use TanStack Query
|
||||
- [src/services/apiClient.ts](../../src/services/apiClient.ts) - Added pagination params to fetchFlyers
|
||||
|
||||
**Benefits Achieved:**
|
||||
|
||||
- ✅ Removed ~150 lines of custom state management code
|
||||
- ✅ Automatic caching of server data
|
||||
- ✅ Background refetching for stale data
|
||||
@@ -45,14 +48,17 @@ We will adopt a dedicated library for managing server state, such as **TanStack
|
||||
### Phase 2: Remaining Queries (✅ Complete - 2026-01-08)
|
||||
|
||||
**Files Created:**
|
||||
|
||||
- [src/hooks/queries/useMasterItemsQuery.ts](../../src/hooks/queries/useMasterItemsQuery.ts) - Master grocery items query
|
||||
- [src/hooks/queries/useFlyerItemsQuery.ts](../../src/hooks/queries/useFlyerItemsQuery.ts) - Flyer items query
|
||||
|
||||
**Files Modified:**
|
||||
|
||||
- [src/providers/MasterItemsProvider.tsx](../../src/providers/MasterItemsProvider.tsx) - Refactored to use TanStack Query
|
||||
- [src/hooks/useFlyerItems.ts](../../src/hooks/useFlyerItems.ts) - Refactored to use TanStack Query
|
||||
|
||||
**Benefits Achieved:**
|
||||
|
||||
- ✅ Removed additional ~50 lines of custom state management code
|
||||
- ✅ Per-flyer item caching (items cached separately for each flyer)
|
||||
- ✅ Longer cache times for infrequently changing data (master items)
|
||||
@@ -82,78 +88,154 @@ We will adopt a dedicated library for managing server state, such as **TanStack
|
||||
|
||||
**See**: [plans/adr-0005-phase-3-summary.md](../../plans/adr-0005-phase-3-summary.md) for detailed documentation
|
||||
|
||||
### Phase 4: Hook Refactoring (✅ Complete - 2026-01-08)
|
||||
### Phase 4: Hook Refactoring (✅ Complete)
|
||||
|
||||
**Goal:** Refactor user-facing hooks to use TanStack Query mutation hooks.
|
||||
|
||||
**Files Modified:**
|
||||
|
||||
- [src/hooks/useWatchedItems.tsx](../../src/hooks/useWatchedItems.tsx) - Refactored to use mutation hooks
|
||||
- [src/hooks/useShoppingLists.tsx](../../src/hooks/useShoppingLists.tsx) - Refactored to use mutation hooks
|
||||
- [src/contexts/UserDataContext.ts](../../src/contexts/UserDataContext.ts) - Removed deprecated setters
|
||||
- [src/providers/UserDataProvider.tsx](../../src/providers/UserDataProvider.tsx) - Removed setter stub implementations
|
||||
- [src/contexts/UserDataContext.ts](../../src/contexts/UserDataContext.ts) - Clean read-only interface (no setters)
|
||||
- [src/providers/UserDataProvider.tsx](../../src/providers/UserDataProvider.tsx) - Uses query hooks, no setter stubs
|
||||
|
||||
**Benefits Achieved:**
|
||||
|
||||
- ✅ Removed 52 lines of code from custom hooks (-17%)
|
||||
- ✅ Eliminated all `useApi` dependencies from user-facing hooks
|
||||
- ✅ Removed 150+ lines of manual state management
|
||||
- ✅ Simplified useShoppingLists by 21% (222 → 176 lines)
|
||||
- ✅ Maintained backward compatibility for hook consumers
|
||||
- ✅ Cleaner context interface (read-only server state)
|
||||
- ✅ Both hooks now use TanStack Query mutations
|
||||
- ✅ Automatic cache invalidation after mutations
|
||||
- ✅ Consistent error handling via mutation hooks
|
||||
- ✅ Clean context interface (read-only server state)
|
||||
- ✅ Backward compatible API for hook consumers
|
||||
|
||||
**See**: [plans/adr-0005-phase-4-summary.md](../../plans/adr-0005-phase-4-summary.md) for detailed documentation
|
||||
### Phase 5: Admin Features (✅ Complete)
|
||||
|
||||
### Phase 5: Admin Features (✅ Complete - 2026-01-08)
|
||||
**Goal:** Create query hooks for admin features.
|
||||
|
||||
**Files Created:**
|
||||
|
||||
- [src/hooks/queries/useActivityLogQuery.ts](../../src/hooks/queries/useActivityLogQuery.ts) - Activity log query with pagination
|
||||
- [src/hooks/queries/useApplicationStatsQuery.ts](../../src/hooks/queries/useApplicationStatsQuery.ts) - Application statistics query
|
||||
- [src/hooks/queries/useSuggestedCorrectionsQuery.ts](../../src/hooks/queries/useSuggestedCorrectionsQuery.ts) - Corrections query
|
||||
- [src/hooks/queries/useCategoriesQuery.ts](../../src/hooks/queries/useCategoriesQuery.ts) - Categories query (public endpoint)
|
||||
- [src/hooks/queries/useActivityLogQuery.ts](../../src/hooks/queries/useActivityLogQuery.ts) - Activity log with pagination
|
||||
- [src/hooks/queries/useApplicationStatsQuery.ts](../../src/hooks/queries/useApplicationStatsQuery.ts) - Application statistics
|
||||
- [src/hooks/queries/useSuggestedCorrectionsQuery.ts](../../src/hooks/queries/useSuggestedCorrectionsQuery.ts) - Corrections data
|
||||
- [src/hooks/queries/useCategoriesQuery.ts](../../src/hooks/queries/useCategoriesQuery.ts) - Categories (public endpoint)
|
||||
|
||||
**Files Modified:**
|
||||
**Components Migrated:**
|
||||
|
||||
- [src/pages/admin/ActivityLog.tsx](../../src/pages/admin/ActivityLog.tsx) - Refactored to use TanStack Query
|
||||
- [src/pages/admin/AdminStatsPage.tsx](../../src/pages/admin/AdminStatsPage.tsx) - Refactored to use TanStack Query
|
||||
- [src/pages/admin/CorrectionsPage.tsx](../../src/pages/admin/CorrectionsPage.tsx) - Refactored to use TanStack Query
|
||||
- [src/pages/admin/ActivityLog.tsx](../../src/pages/admin/ActivityLog.tsx) - Uses useActivityLogQuery
|
||||
- [src/pages/admin/AdminStatsPage.tsx](../../src/pages/admin/AdminStatsPage.tsx) - Uses useApplicationStatsQuery
|
||||
- [src/pages/admin/CorrectionsPage.tsx](../../src/pages/admin/CorrectionsPage.tsx) - Uses useSuggestedCorrectionsQuery, useMasterItemsQuery, useCategoriesQuery
|
||||
|
||||
**Benefits Achieved:**
|
||||
|
||||
- ✅ Removed 121 lines from admin components (-32%)
|
||||
- ✅ Eliminated manual state management from all admin queries
|
||||
- ✅ Automatic parallel fetching (CorrectionsPage fetches 3 queries simultaneously)
|
||||
- ✅ Consistent caching strategy across all admin features
|
||||
- ✅ Smart refetching with appropriate stale times (30s to 1 hour)
|
||||
- ✅ Automatic caching of admin data
|
||||
- ✅ Parallel fetching (CorrectionsPage fetches 3 queries simultaneously)
|
||||
- ✅ Consistent stale times (30s to 2 min based on data volatility)
|
||||
- ✅ Shared cache across components (useMasterItemsQuery reused)
|
||||
|
||||
**See**: [plans/adr-0005-phase-5-summary.md](../../plans/adr-0005-phase-5-summary.md) for detailed documentation
|
||||
### Phase 6: Analytics Features (✅ Complete - 2026-01-10)
|
||||
|
||||
### Phase 6: Cleanup (🔄 In Progress - 2026-01-08)
|
||||
**Goal:** Migrate analytics and deals features.
|
||||
|
||||
**Completed:**
|
||||
**Files Created:**
|
||||
|
||||
- ✅ Removed custom useInfiniteQuery hook (not used in production)
|
||||
- ✅ Analyzed remaining useApi/useApiOnMount usage
|
||||
- [src/hooks/queries/useBestSalePricesQuery.ts](../../src/hooks/queries/useBestSalePricesQuery.ts) - Best sale prices for watched items
|
||||
- [src/hooks/queries/useFlyerItemsForFlyersQuery.ts](../../src/hooks/queries/useFlyerItemsForFlyersQuery.ts) - Batch fetch items for multiple flyers
|
||||
- [src/hooks/queries/useFlyerItemCountQuery.ts](../../src/hooks/queries/useFlyerItemCountQuery.ts) - Count items across flyers
|
||||
|
||||
**Remaining:**
|
||||
**Files Modified:**
|
||||
|
||||
- ⏳ Migrate auth features (AuthProvider, AuthView, ProfileManager) from useApi to TanStack Query
|
||||
- ⏳ Migrate useActiveDeals from useApi to TanStack Query
|
||||
- ⏳ Migrate AdminBrandManager from useApiOnMount to TanStack Query
|
||||
- ⏳ Consider removal of useApi/useApiOnMount hooks once fully migrated
|
||||
- ⏳ Update all tests for migrated features
|
||||
- [src/pages/MyDealsPage.tsx](../../src/pages/MyDealsPage.tsx) - Now uses useBestSalePricesQuery
|
||||
- [src/hooks/useActiveDeals.tsx](../../src/hooks/useActiveDeals.tsx) - Refactored to use TanStack Query hooks
|
||||
|
||||
**Note**: `useApi` and `useApiOnMount` are still actively used in 6 production files for authentication, profile management, and some admin features. Full migration of these critical features requires careful planning and is documented as future work.
|
||||
**Benefits Achieved:**
|
||||
|
||||
- ✅ Removed useApi dependency from analytics features
|
||||
- ✅ Automatic caching of deal data (2-5 minute stale times)
|
||||
- ✅ Consistent error handling via TanStack Query
|
||||
- ✅ Batch fetching for flyer items (single query for multiple flyers)
|
||||
|
||||
### Phase 7: Cleanup (✅ Complete - 2026-01-10)
|
||||
|
||||
**Goal:** Remove legacy hooks once migration is complete.
|
||||
|
||||
**Files Created:**
|
||||
|
||||
- [src/hooks/queries/useUserAddressQuery.ts](../../src/hooks/queries/useUserAddressQuery.ts) - User address fetching
|
||||
- [src/hooks/queries/useAuthProfileQuery.ts](../../src/hooks/queries/useAuthProfileQuery.ts) - Auth profile fetching
|
||||
- [src/hooks/mutations/useGeocodeMutation.ts](../../src/hooks/mutations/useGeocodeMutation.ts) - Address geocoding
|
||||
|
||||
**Files Modified:**
|
||||
|
||||
- [src/hooks/useProfileAddress.ts](../../src/hooks/useProfileAddress.ts) - Refactored to use TanStack Query
|
||||
- [src/providers/AuthProvider.tsx](../../src/providers/AuthProvider.tsx) - Refactored to use TanStack Query
|
||||
|
||||
**Files Removed:**
|
||||
|
||||
- ~~src/hooks/useApi.ts~~ - Legacy hook removed
|
||||
- ~~src/hooks/useApi.test.ts~~ - Test file removed
|
||||
- ~~src/hooks/useApiOnMount.ts~~ - Legacy hook removed
|
||||
- ~~src/hooks/useApiOnMount.test.ts~~ - Test file removed
|
||||
|
||||
**Benefits Achieved:**
|
||||
|
||||
- ✅ Removed all legacy `useApi` and `useApiOnMount` hooks
|
||||
- ✅ Complete TanStack Query coverage for all data fetching
|
||||
- ✅ Consistent error handling across the entire application
|
||||
- ✅ Unified caching strategy for all server state
|
||||
|
||||
### Phase 8: Additional Component Migration (✅ Complete - 2026-01-10)
|
||||
|
||||
**Goal:** Migrate remaining components with manual data fetching to TanStack Query.
|
||||
|
||||
**Files Created:**
|
||||
|
||||
- [src/hooks/queries/useUserProfileDataQuery.ts](../../src/hooks/queries/useUserProfileDataQuery.ts) - Combined user profile + achievements query
|
||||
- [src/hooks/queries/useLeaderboardQuery.ts](../../src/hooks/queries/useLeaderboardQuery.ts) - Public leaderboard data
|
||||
- [src/hooks/queries/usePriceHistoryQuery.ts](../../src/hooks/queries/usePriceHistoryQuery.ts) - Historical price data for watched items
|
||||
|
||||
**Files Modified:**
|
||||
|
||||
- [src/hooks/useUserProfileData.ts](../../src/hooks/useUserProfileData.ts) - Refactored to use useUserProfileDataQuery
|
||||
- [src/components/Leaderboard.tsx](../../src/components/Leaderboard.tsx) - Refactored to use useLeaderboardQuery
|
||||
- [src/features/charts/PriceHistoryChart.tsx](../../src/features/charts/PriceHistoryChart.tsx) - Refactored to use usePriceHistoryQuery
|
||||
|
||||
**Benefits Achieved:**
|
||||
|
||||
- ✅ Parallel fetching for profile + achievements data
|
||||
- ✅ Public leaderboard cached with 2-minute stale time
|
||||
- ✅ Price history cached with 10-minute stale time (data changes infrequently)
|
||||
- ✅ Backward-compatible setProfile function via queryClient.setQueryData
|
||||
- ✅ Stable query keys with sorted IDs for price history
|
||||
|
||||
## Migration Status
|
||||
|
||||
Current Coverage: **85% complete**
|
||||
Current Coverage: **100% complete**
|
||||
|
||||
- ✅ **User Features: 100%** - All core user-facing features fully migrated (queries + mutations + hooks)
|
||||
- ✅ **Admin Features: 100%** - Activity log, stats, corrections now use TanStack Query
|
||||
- ⏳ **Auth/Profile Features: 0%** - Auth provider, profile manager still use useApi
|
||||
- ⏳ **Analytics Features: 0%** - Active Deals need migration
|
||||
- ⏳ **Brand Management: 0%** - AdminBrandManager still uses useApiOnMount
|
||||
| Category | Total | Migrated | Status |
|
||||
| ----------------------------- | ----- | -------- | ------- |
|
||||
| Query Hooks (User) | 7 | 7 | ✅ 100% |
|
||||
| Query Hooks (Admin) | 4 | 4 | ✅ 100% |
|
||||
| Query Hooks (Analytics) | 3 | 3 | ✅ 100% |
|
||||
| Query Hooks (Phase 8) | 3 | 3 | ✅ 100% |
|
||||
| Mutation Hooks | 8 | 8 | ✅ 100% |
|
||||
| User Hooks | 2 | 2 | ✅ 100% |
|
||||
| Analytics Features | 2 | 2 | ✅ 100% |
|
||||
| Component Migration (Phase 8) | 3 | 3 | ✅ 100% |
|
||||
| Legacy Hook Cleanup | 4 | 4 | ✅ 100% |
|
||||
|
||||
**Completed:**
|
||||
|
||||
- ✅ Core query hooks (flyers, flyerItems, masterItems, watchedItems, shoppingLists)
|
||||
- ✅ Admin query hooks (activityLog, applicationStats, suggestedCorrections, categories)
|
||||
- ✅ Analytics query hooks (bestSalePrices, flyerItemsForFlyers, flyerItemCount)
|
||||
- ✅ Auth/Profile query hooks (authProfile, userAddress)
|
||||
- ✅ Phase 8 query hooks (userProfileData, leaderboard, priceHistory)
|
||||
- ✅ All mutation hooks (watched items, shopping lists, geocode)
|
||||
- ✅ Provider refactoring (AppProviders, FlyersProvider, MasterItemsProvider, UserDataProvider, AuthProvider)
|
||||
- ✅ User hooks refactoring (useWatchedItems, useShoppingLists, useProfileAddress, useUserProfileData)
|
||||
- ✅ Admin component migration (ActivityLog, AdminStatsPage, CorrectionsPage)
|
||||
- ✅ Analytics features (MyDealsPage, useActiveDeals)
|
||||
- ✅ Component migration (Leaderboard, PriceHistoryChart)
|
||||
- ✅ Legacy hooks removed (useApi, useApiOnMount)
|
||||
|
||||
See [plans/adr-0005-master-migration-status.md](../../plans/adr-0005-master-migration-status.md) for complete tracking of all components.
|
||||
|
||||
|
||||
@@ -10,6 +10,41 @@
|
||||
|
||||
The project is currently run using `pm2`, and the `README.md` contains manual setup instructions. While functional, this lacks the portability, scalability, and consistency of modern deployment practices. Local development environments also suffered from inconsistency issues.
|
||||
|
||||
## Platform Requirement: Linux Only
|
||||
|
||||
**CRITICAL**: This application is designed and intended to run **exclusively on Linux**, either:
|
||||
|
||||
- **In a container** (Docker/Podman) - the recommended and primary development environment
|
||||
- **On bare-metal Linux** - for production deployments
|
||||
|
||||
### Windows Compatibility
|
||||
|
||||
**Windows is NOT a supported platform.** Any apparent Windows compatibility is:
|
||||
|
||||
- Coincidental and not guaranteed
|
||||
- Subject to break at any time without notice
|
||||
- Not a priority to fix or maintain
|
||||
|
||||
Specific issues that arise on Windows include:
|
||||
|
||||
- **Path separators**: The codebase uses POSIX-style paths (`/`) which work natively on Linux but may cause issues with `path.join()` on Windows producing backslash paths
|
||||
- **Shell scripts**: Bash scripts in `scripts/` directory are Linux-only
|
||||
- **External dependencies**: Tools like `pdftocairo` assume Linux installation paths
|
||||
- **File permissions**: Unix-style permissions are assumed throughout
|
||||
|
||||
### Test Execution Requirement
|
||||
|
||||
**ALL tests MUST be executed on Linux.** This includes:
|
||||
|
||||
- Unit tests
|
||||
- Integration tests
|
||||
- End-to-end tests
|
||||
- Any CI/CD pipeline tests
|
||||
|
||||
Tests that pass on Windows but fail on Linux are considered **broken tests**. Tests that fail on Windows but pass on Linux are considered **passing tests**.
|
||||
|
||||
**For Windows developers**: Always use the Dev Container (VS Code "Reopen in Container") to run tests. Never rely on test results from the Windows host machine.
|
||||
|
||||
## Decision
|
||||
|
||||
We will standardize the deployment process using a hybrid approach:
|
||||
@@ -283,7 +318,35 @@ podman-compose -f compose.dev.yml build app
|
||||
- `.gitea/workflows/deploy-to-prod.yml` - Production deployment pipeline
|
||||
- `.gitea/workflows/deploy-to-test.yml` - Test deployment pipeline
|
||||
|
||||
## Container Test Readiness Requirement
|
||||
|
||||
**CRITICAL**: The development container MUST be fully test-ready on startup. This means:
|
||||
|
||||
1. **Zero Manual Steps**: After running `podman-compose -f compose.dev.yml up -d` and entering the container, tests MUST run immediately with `npm test` without any additional setup steps.
|
||||
|
||||
2. **Complete Environment**: All environment variables, database connections, Redis connections, and seed data MUST be automatically initialized during container startup.
|
||||
|
||||
3. **Enforcement Checklist**:
|
||||
- [ ] `npm test` runs successfully immediately after container start
|
||||
- [ ] Database is seeded with test data (admin account, sample data)
|
||||
- [ ] Redis is connected and healthy
|
||||
- [ ] All environment variables are set via `compose.dev.yml` or `.env` files
|
||||
- [ ] No "database not ready" or "connection refused" errors on first test run
|
||||
|
||||
4. **Current Gaps (To Fix)**:
|
||||
- Integration tests require database seeding (`npm run db:reset:test`)
|
||||
- Environment variables from `.env.test` may not be loaded automatically
|
||||
- Some npm scripts use `NODE_ENV=` syntax which fails on Windows (use `cross-env`)
|
||||
|
||||
5. **Resolution Steps**:
|
||||
- The `docker-init.sh` script should seed the test database after seeding dev database
|
||||
- Add automatic `.env.test` loading or move all test env vars to `compose.dev.yml`
|
||||
- Update all npm scripts to use `cross-env` for cross-platform compatibility
|
||||
|
||||
**Rationale**: Developers and CI systems should never need to run manual setup commands to execute tests. If the container is running, tests should work. Any deviation from this principle indicates an incomplete container setup.
|
||||
|
||||
## Related ADRs
|
||||
|
||||
- [ADR-017](./0017-ci-cd-and-branching-strategy.md) - CI/CD Strategy
|
||||
- [ADR-038](./0038-graceful-shutdown-pattern.md) - Graceful Shutdown Pattern
|
||||
- [ADR-010](./0010-testing-strategy-and-standards.md) - Testing Strategy and Standards
|
||||
|
||||
@@ -2,17 +2,321 @@
|
||||
|
||||
**Date**: 2025-12-12
|
||||
|
||||
**Status**: Proposed
|
||||
**Status**: Accepted
|
||||
|
||||
**Updated**: 2026-01-11
|
||||
|
||||
## Context
|
||||
|
||||
While `ADR-004` established structured logging, the application lacks a high-level, aggregated view of its health, performance, and errors. It's difficult to spot trends, identify slow API endpoints, or be proactively notified of new types of errors.
|
||||
While `ADR-004` established structured logging with Pino, the application lacks a high-level, aggregated view of its health, performance, and errors. It's difficult to spot trends, identify slow API endpoints, or be proactively notified of new types of errors.
|
||||
|
||||
Key requirements:
|
||||
|
||||
1. **Self-hosted**: No external SaaS dependencies for error tracking
|
||||
2. **Sentry SDK compatible**: Leverage mature, well-documented SDKs
|
||||
3. **Lightweight**: Minimal resource overhead in the dev container
|
||||
4. **Production-ready**: Same architecture works on bare-metal production servers
|
||||
5. **AI-accessible**: MCP server integration for Claude Code and other AI tools
|
||||
|
||||
## Decision
|
||||
|
||||
We will integrate a dedicated Application Performance Monitoring (APM) and error tracking service like **Sentry**, **Datadog**, or **New Relic**. This will define how the service is integrated to automatically capture and report unhandled exceptions, performance data (e.g., transaction traces, database query times), and release health.
|
||||
We will implement a self-hosted error tracking stack using **Bugsink** as the Sentry-compatible backend, with the following components:
|
||||
|
||||
### 1. Error Tracking Backend: Bugsink
|
||||
|
||||
**Bugsink** is a lightweight, self-hosted Sentry alternative that:
|
||||
|
||||
- Runs as a single process (no Kafka, Redis, ClickHouse required)
|
||||
- Is fully compatible with Sentry SDKs
|
||||
- Supports ARM64 and AMD64 architectures
|
||||
- Can use SQLite (dev) or PostgreSQL (production)
|
||||
|
||||
**Deployment**:
|
||||
|
||||
- **Dev container**: Installed as a systemd service inside the container
|
||||
- **Production**: Runs as a systemd service on bare-metal, listening on localhost only
|
||||
- **Database**: Uses PostgreSQL with a dedicated `bugsink` user and `bugsink` database (same PostgreSQL instance as the main application)
|
||||
|
||||
### 2. Backend Integration: @sentry/node
|
||||
|
||||
The Express backend will integrate `@sentry/node` SDK to:
|
||||
|
||||
- Capture unhandled exceptions before PM2/process manager restarts
|
||||
- Report errors with full stack traces and context
|
||||
- Integrate with Pino logger for breadcrumbs
|
||||
- Track transaction performance (optional)
|
||||
|
||||
### 3. Frontend Integration: @sentry/react
|
||||
|
||||
The React frontend will integrate `@sentry/react` SDK to:
|
||||
|
||||
- Wrap the app in a Sentry Error Boundary
|
||||
- Capture unhandled JavaScript errors
|
||||
- Report errors with component stack traces
|
||||
- Track user session context
|
||||
- **Frontend Error Correlation**: The global API client (Axios/Fetch wrapper) MUST intercept 4xx/5xx responses. It MUST extract the `x-request-id` header (if present) and attach it to the Sentry scope as a tag `api_request_id` before re-throwing the error. This allows developers to copy the ID from Sentry and search for it in backend logs.
|
||||
|
||||
### 4. Log Aggregation: Logstash
|
||||
|
||||
**Logstash** parses application and infrastructure logs, forwarding error patterns to Bugsink:
|
||||
|
||||
- **Installation**: Installed inside the dev container (and on bare-metal prod servers)
|
||||
- **Inputs**:
|
||||
- Pino JSON logs from the Node.js application
|
||||
- Redis logs (connection errors, memory warnings, slow commands)
|
||||
- PostgreSQL function logs (future - see Implementation Steps)
|
||||
- **Filter**: Identifies error-level logs (5xx responses, unhandled exceptions, Redis errors)
|
||||
- **Output**: Sends to Bugsink via Sentry-compatible HTTP API
|
||||
|
||||
This provides a secondary error capture path for:
|
||||
|
||||
- Errors that occur before Sentry SDK initialization
|
||||
- Log-based errors that don't throw exceptions
|
||||
- Redis connection/performance issues
|
||||
- Database function errors and slow queries
|
||||
- Historical error analysis from log files
|
||||
|
||||
### 5. MCP Server Integration: sentry-selfhosted-mcp
|
||||
|
||||
For AI tool integration (Claude Code, Cursor, etc.), we use the open-source [sentry-selfhosted-mcp](https://github.com/ddfourtwo/sentry-selfhosted-mcp) server:
|
||||
|
||||
- **No code changes required**: Configurable via environment variables
|
||||
- **Capabilities**: List projects, get issues, view events, update status, add comments
|
||||
- **Configuration**:
|
||||
- `SENTRY_URL`: Points to Bugsink instance
|
||||
- `SENTRY_AUTH_TOKEN`: API token from Bugsink
|
||||
- `SENTRY_ORG_SLUG`: Organization identifier
|
||||
|
||||
## Architecture
|
||||
|
||||
```text
|
||||
┌─────────────────────────────────────────────────────────────────────────┐
|
||||
│ Dev Container / Production Server │
|
||||
├─────────────────────────────────────────────────────────────────────────┤
|
||||
│ │
|
||||
│ ┌──────────────────┐ ┌──────────────────┐ │
|
||||
│ │ Frontend │ │ Backend │ │
|
||||
│ │ (React) │ │ (Express) │ │
|
||||
│ │ @sentry/react │ │ @sentry/node │ │
|
||||
│ └────────┬─────────┘ └────────┬─────────┘ │
|
||||
│ │ │ │
|
||||
│ │ Sentry SDK Protocol │ │
|
||||
│ └───────────┬───────────────┘ │
|
||||
│ │ │
|
||||
│ ▼ │
|
||||
│ ┌──────────────────────┐ │
|
||||
│ │ Bugsink │ │
|
||||
│ │ (localhost:8000) │◄──────────────────┐ │
|
||||
│ │ │ │ │
|
||||
│ │ PostgreSQL backend │ │ │
|
||||
│ └──────────────────────┘ │ │
|
||||
│ │ │
|
||||
│ ┌──────────────────────┐ │ │
|
||||
│ │ Logstash │───────────────────┘ │
|
||||
│ │ (Log Aggregator) │ Sentry Output │
|
||||
│ │ │ │
|
||||
│ │ Inputs: │ │
|
||||
│ │ - Pino app logs │ │
|
||||
│ │ - Redis logs │ │
|
||||
│ │ - PostgreSQL (future) │
|
||||
│ └──────────────────────┘ │
|
||||
│ ▲ ▲ ▲ │
|
||||
│ │ │ │ │
|
||||
│ ┌───────────┘ │ └───────────┐ │
|
||||
│ │ │ │ │
|
||||
│ ┌────┴─────┐ ┌─────┴────┐ ┌──────┴─────┐ │
|
||||
│ │ Pino │ │ Redis │ │ PostgreSQL │ │
|
||||
│ │ Logs │ │ Logs │ │ Logs (TBD) │ │
|
||||
│ └──────────┘ └──────────┘ └────────────┘ │
|
||||
│ │
|
||||
│ ┌──────────────────────┐ │
|
||||
│ │ PostgreSQL │ │
|
||||
│ │ ┌────────────────┐ │ │
|
||||
│ │ │ flyer_crawler │ │ (main app database) │
|
||||
│ │ ├────────────────┤ │ │
|
||||
│ │ │ bugsink │ │ (error tracking database) │
|
||||
│ │ └────────────────┘ │ │
|
||||
│ └──────────────────────┘ │
|
||||
│ │
|
||||
└─────────────────────────────────────────────────────────────────────────┘
|
||||
|
||||
External (Developer Machine):
|
||||
┌──────────────────────────────────────┐
|
||||
│ Claude Code / Cursor / VS Code │
|
||||
│ ┌────────────────────────────────┐ │
|
||||
│ │ sentry-selfhosted-mcp │ │
|
||||
│ │ (MCP Server) │ │
|
||||
│ │ │ │
|
||||
│ │ SENTRY_URL=http://localhost:8000
|
||||
│ │ SENTRY_AUTH_TOKEN=... │ │
|
||||
│ │ SENTRY_ORG_SLUG=... │ │
|
||||
│ └────────────────────────────────┘ │
|
||||
└──────────────────────────────────────┘
|
||||
```
|
||||
|
||||
## Configuration
|
||||
|
||||
### Environment Variables
|
||||
|
||||
| Variable | Description | Default (Dev) |
|
||||
| ------------------ | ------------------------------ | -------------------------- |
|
||||
| `BUGSINK_DSN` | Sentry-compatible DSN for SDKs | Set after project creation |
|
||||
| `BUGSINK_ENABLED` | Enable/disable error reporting | `true` |
|
||||
| `BUGSINK_BASE_URL` | Bugsink web UI URL (internal) | `http://localhost:8000` |
|
||||
|
||||
### PostgreSQL Setup
|
||||
|
||||
```sql
|
||||
-- Create dedicated Bugsink database and user
|
||||
CREATE USER bugsink WITH PASSWORD 'bugsink_dev_password';
|
||||
CREATE DATABASE bugsink OWNER bugsink;
|
||||
GRANT ALL PRIVILEGES ON DATABASE bugsink TO bugsink;
|
||||
```
|
||||
|
||||
### Bugsink Configuration
|
||||
|
||||
```bash
|
||||
# Environment variables for Bugsink service
|
||||
SECRET_KEY=<random-50-char-string>
|
||||
DATABASE_URL=postgresql://bugsink:bugsink_dev_password@localhost:5432/bugsink
|
||||
BASE_URL=http://localhost:8000
|
||||
PORT=8000
|
||||
```
|
||||
|
||||
### Logstash Pipeline
|
||||
|
||||
```conf
|
||||
# /etc/logstash/conf.d/bugsink.conf
|
||||
|
||||
# === INPUTS ===
|
||||
input {
|
||||
# Pino application logs
|
||||
file {
|
||||
path => "/app/logs/*.log"
|
||||
codec => json
|
||||
type => "pino"
|
||||
tags => ["app"]
|
||||
}
|
||||
|
||||
# Redis logs
|
||||
file {
|
||||
path => "/var/log/redis/*.log"
|
||||
type => "redis"
|
||||
tags => ["redis"]
|
||||
}
|
||||
|
||||
# PostgreSQL logs (for function logging - future)
|
||||
# file {
|
||||
# path => "/var/log/postgresql/*.log"
|
||||
# type => "postgres"
|
||||
# tags => ["postgres"]
|
||||
# }
|
||||
}
|
||||
|
||||
# === FILTERS ===
|
||||
filter {
|
||||
# Pino error detection (level 50 = error, 60 = fatal)
|
||||
if [type] == "pino" and [level] >= 50 {
|
||||
mutate { add_tag => ["error"] }
|
||||
}
|
||||
|
||||
# Redis error detection
|
||||
if [type] == "redis" {
|
||||
grok {
|
||||
match => { "message" => "%{POSINT:pid}:%{WORD:role} %{MONTHDAY} %{MONTH} %{TIME} %{WORD:loglevel} %{GREEDYDATA:redis_message}" }
|
||||
}
|
||||
if [loglevel] in ["WARNING", "ERROR"] {
|
||||
mutate { add_tag => ["error"] }
|
||||
}
|
||||
}
|
||||
|
||||
# PostgreSQL function error detection (future)
|
||||
# if [type] == "postgres" {
|
||||
# # Parse PostgreSQL log format and detect ERROR/FATAL levels
|
||||
# }
|
||||
}
|
||||
|
||||
# === OUTPUT ===
|
||||
output {
|
||||
if "error" in [tags] {
|
||||
http {
|
||||
url => "http://localhost:8000/api/store/"
|
||||
http_method => "post"
|
||||
format => "json"
|
||||
# Sentry envelope format
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Implementation Steps
|
||||
|
||||
1. **Update Dockerfile.dev**:
|
||||
- Install Bugsink (pip package or binary)
|
||||
- Install Logstash (Elastic APT repository)
|
||||
- Add systemd service files for both
|
||||
|
||||
2. **PostgreSQL initialization**:
|
||||
- Add Bugsink user/database creation to `sql/00-init-extensions.sql`
|
||||
|
||||
3. **Backend SDK integration**:
|
||||
- Install `@sentry/node`
|
||||
- Initialize in `server.ts` before Express app
|
||||
- Configure error handler middleware integration
|
||||
|
||||
4. **Frontend SDK integration**:
|
||||
- Install `@sentry/react`
|
||||
- Wrap `App` component with `Sentry.ErrorBoundary`
|
||||
- Configure in `src/index.tsx`
|
||||
|
||||
5. **Environment configuration**:
|
||||
- Add Bugsink variables to `src/config/env.ts`
|
||||
- Update `.env.example` and `compose.dev.yml`
|
||||
|
||||
6. **Logstash configuration**:
|
||||
- Create pipeline config for Pino → Bugsink
|
||||
- Configure Pino to write to log file in addition to stdout
|
||||
- Configure Redis log monitoring (connection errors, slow commands)
|
||||
|
||||
7. **MCP server documentation**:
|
||||
- Document `sentry-selfhosted-mcp` setup in CLAUDE.md
|
||||
|
||||
8. **PostgreSQL function logging** (future):
|
||||
- Configure PostgreSQL to log function execution errors
|
||||
- Add Logstash input for PostgreSQL logs
|
||||
- Define filter rules for function-level error detection
|
||||
- _Note: Ask for implementation details when this step is reached_
|
||||
|
||||
## Consequences
|
||||
|
||||
**Positive**: Provides critical observability into the application's real-world behavior. Enables proactive identification and resolution of performance bottlenecks and errors. Improves overall application reliability and user experience.
|
||||
**Negative**: Introduces a new third-party dependency and potential subscription costs. Requires initial setup and configuration of the APM/error tracking agent.
|
||||
### Positive
|
||||
|
||||
- **Full observability**: Aggregated view of errors, trends, and performance
|
||||
- **Self-hosted**: No external SaaS dependencies or subscription costs
|
||||
- **SDK compatibility**: Leverages mature Sentry SDKs with excellent documentation
|
||||
- **AI integration**: MCP server enables Claude Code to query and analyze errors
|
||||
- **Unified architecture**: Same setup works in dev container and production
|
||||
- **Lightweight**: Bugsink runs in a single process, unlike full Sentry (16GB+ RAM)
|
||||
|
||||
### Negative
|
||||
|
||||
- **Additional services**: Bugsink and Logstash add complexity to the container
|
||||
- **PostgreSQL overhead**: Additional database for error tracking
|
||||
- **Initial setup**: Requires configuration of multiple components
|
||||
- **Logstash learning curve**: Pipeline configuration requires Logstash knowledge
|
||||
|
||||
## Alternatives Considered
|
||||
|
||||
1. **Full Sentry self-hosted**: Rejected due to complexity (Kafka, Redis, ClickHouse, 16GB+ RAM minimum)
|
||||
2. **GlitchTip**: Considered, but Bugsink is lighter weight and easier to deploy
|
||||
3. **Sentry SaaS**: Rejected due to self-hosted requirement
|
||||
4. **Custom error aggregation**: Rejected in favor of proven Sentry SDK ecosystem
|
||||
|
||||
## References
|
||||
|
||||
- [Bugsink Documentation](https://www.bugsink.com/docs/)
|
||||
- [Bugsink Docker Install](https://www.bugsink.com/docs/docker-install/)
|
||||
- [@sentry/node Documentation](https://docs.sentry.io/platforms/javascript/guides/node/)
|
||||
- [@sentry/react Documentation](https://docs.sentry.io/platforms/javascript/guides/react/)
|
||||
- [sentry-selfhosted-mcp](https://github.com/ddfourtwo/sentry-selfhosted-mcp)
|
||||
- [Logstash Reference](https://www.elastic.co/guide/en/logstash/current/index.html)
|
||||
|
||||
@@ -2,17 +2,265 @@
|
||||
|
||||
**Date**: 2025-12-12
|
||||
|
||||
**Status**: Proposed
|
||||
**Status**: Accepted
|
||||
|
||||
**Implemented**: 2026-01-11
|
||||
|
||||
## Context
|
||||
|
||||
As the API grows, it becomes increasingly difficult for frontend developers and other consumers to understand its endpoints, request formats, and response structures. There is no single source of truth for API documentation.
|
||||
|
||||
Key requirements:
|
||||
|
||||
1. **Developer Experience**: Developers need interactive documentation to explore and test API endpoints.
|
||||
2. **Code-Documentation Sync**: Documentation should stay in sync with the actual code to prevent drift.
|
||||
3. **Low Maintenance Overhead**: The documentation approach should be "fast and lite" - minimal additional work for developers.
|
||||
4. **Security**: Documentation should not expose sensitive information in production environments.
|
||||
|
||||
## Decision
|
||||
|
||||
We will adopt **OpenAPI (Swagger)** for API documentation. We will use tools (e.g., JSDoc annotations with `swagger-jsdoc`) to generate an `openapi.json` specification directly from the route handler source code. This specification will be served via a UI like Swagger UI for interactive exploration.
|
||||
We will adopt **OpenAPI 3.0 (Swagger)** for API documentation using the following approach:
|
||||
|
||||
1. **JSDoc Annotations**: Use `swagger-jsdoc` to generate OpenAPI specs from JSDoc comments in route files.
|
||||
2. **Swagger UI**: Use `swagger-ui-express` to serve interactive documentation at `/docs/api-docs`.
|
||||
3. **Environment Restriction**: Only expose the Swagger UI in development and test environments, not production.
|
||||
4. **Incremental Adoption**: Start with key public routes and progressively add annotations to all endpoints.
|
||||
|
||||
### Tooling Selection
|
||||
|
||||
| Tool | Purpose |
|
||||
| -------------------- | ---------------------------------------------- |
|
||||
| `swagger-jsdoc` | Generates OpenAPI 3.0 spec from JSDoc comments |
|
||||
| `swagger-ui-express` | Serves interactive Swagger UI |
|
||||
|
||||
**Why JSDoc over separate schema files?**
|
||||
|
||||
- Documentation lives with the code, reducing drift
|
||||
- No separate files to maintain
|
||||
- Developers see documentation when editing routes
|
||||
- Lower learning curve for the team
|
||||
|
||||
## Implementation Details
|
||||
|
||||
### OpenAPI Configuration
|
||||
|
||||
Located in `src/config/swagger.ts`:
|
||||
|
||||
```typescript
|
||||
import swaggerJsdoc from 'swagger-jsdoc';
|
||||
|
||||
const options: swaggerJsdoc.Options = {
|
||||
definition: {
|
||||
openapi: '3.0.0',
|
||||
info: {
|
||||
title: 'Flyer Crawler API',
|
||||
version: '1.0.0',
|
||||
description: 'API for the Flyer Crawler application',
|
||||
contact: {
|
||||
name: 'API Support',
|
||||
},
|
||||
},
|
||||
servers: [
|
||||
{
|
||||
url: '/api',
|
||||
description: 'API server',
|
||||
},
|
||||
],
|
||||
components: {
|
||||
securitySchemes: {
|
||||
bearerAuth: {
|
||||
type: 'http',
|
||||
scheme: 'bearer',
|
||||
bearerFormat: 'JWT',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
apis: ['./src/routes/*.ts'],
|
||||
};
|
||||
|
||||
export const swaggerSpec = swaggerJsdoc(options);
|
||||
```
|
||||
|
||||
### JSDoc Annotation Pattern
|
||||
|
||||
Each route handler should include OpenAPI annotations using the `@openapi` tag:
|
||||
|
||||
```typescript
|
||||
/**
|
||||
* @openapi
|
||||
* /health/ping:
|
||||
* get:
|
||||
* summary: Simple ping endpoint
|
||||
* description: Returns a pong response to verify server is responsive
|
||||
* tags:
|
||||
* - Health
|
||||
* responses:
|
||||
* 200:
|
||||
* description: Server is responsive
|
||||
* content:
|
||||
* application/json:
|
||||
* schema:
|
||||
* type: object
|
||||
* properties:
|
||||
* success:
|
||||
* type: boolean
|
||||
* example: true
|
||||
* data:
|
||||
* type: object
|
||||
* properties:
|
||||
* message:
|
||||
* type: string
|
||||
* example: pong
|
||||
*/
|
||||
router.get('/ping', validateRequest(emptySchema), (_req: Request, res: Response) => {
|
||||
return sendSuccess(res, { message: 'pong' });
|
||||
});
|
||||
```
|
||||
|
||||
### Route Documentation Priority
|
||||
|
||||
Document routes in this order of priority:
|
||||
|
||||
1. **Health Routes** - `/api/health/*` (public, critical for operations)
|
||||
2. **Auth Routes** - `/api/auth/*` (public, essential for integration)
|
||||
3. **Gamification Routes** - `/api/achievements/*` (simple, good example)
|
||||
4. **Flyer Routes** - `/api/flyers/*` (core functionality)
|
||||
5. **User Routes** - `/api/users/*` (common CRUD patterns)
|
||||
6. **Remaining Routes** - Budget, Recipe, Admin, etc.
|
||||
|
||||
### Swagger UI Setup
|
||||
|
||||
In `server.ts`, add the Swagger UI middleware (development/test only):
|
||||
|
||||
```typescript
|
||||
import swaggerUi from 'swagger-ui-express';
|
||||
import { swaggerSpec } from './src/config/swagger';
|
||||
|
||||
// Only serve Swagger UI in non-production environments
|
||||
if (process.env.NODE_ENV !== 'production') {
|
||||
app.use('/docs/api-docs', swaggerUi.serve, swaggerUi.setup(swaggerSpec));
|
||||
|
||||
// Optionally expose raw JSON spec for tooling
|
||||
app.get('/docs/api-docs.json', (_req, res) => {
|
||||
res.setHeader('Content-Type', 'application/json');
|
||||
res.send(swaggerSpec);
|
||||
});
|
||||
}
|
||||
```
|
||||
|
||||
### Response Schema Standardization
|
||||
|
||||
All API responses follow the standardized format from [ADR-028](./0028-api-response-standardization.md):
|
||||
|
||||
```typescript
|
||||
// Success response
|
||||
{
|
||||
"success": true,
|
||||
"data": { ... }
|
||||
}
|
||||
|
||||
// Error response
|
||||
{
|
||||
"success": false,
|
||||
"error": {
|
||||
"code": "ERROR_CODE",
|
||||
"message": "Human-readable message"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Define reusable schema components for these patterns:
|
||||
|
||||
```typescript
|
||||
/**
|
||||
* @openapi
|
||||
* components:
|
||||
* schemas:
|
||||
* SuccessResponse:
|
||||
* type: object
|
||||
* properties:
|
||||
* success:
|
||||
* type: boolean
|
||||
* example: true
|
||||
* data:
|
||||
* type: object
|
||||
* ErrorResponse:
|
||||
* type: object
|
||||
* properties:
|
||||
* success:
|
||||
* type: boolean
|
||||
* example: false
|
||||
* error:
|
||||
* type: object
|
||||
* properties:
|
||||
* code:
|
||||
* type: string
|
||||
* message:
|
||||
* type: string
|
||||
*/
|
||||
```
|
||||
|
||||
### Security Considerations
|
||||
|
||||
1. **Production Disabled**: Swagger UI is not available in production to prevent information disclosure.
|
||||
2. **No Sensitive Data**: Never include actual secrets, tokens, or PII in example values.
|
||||
3. **Authentication Documented**: Clearly document which endpoints require authentication.
|
||||
|
||||
## API Route Tags
|
||||
|
||||
Organize endpoints using consistent tags:
|
||||
|
||||
| Tag | Description | Routes |
|
||||
| ------------ | ---------------------------------- | --------------------- |
|
||||
| Health | Server health and readiness checks | `/api/health/*` |
|
||||
| Auth | Authentication and authorization | `/api/auth/*` |
|
||||
| Users | User profile management | `/api/users/*` |
|
||||
| Flyers | Flyer uploads and retrieval | `/api/flyers/*` |
|
||||
| Achievements | Gamification and leaderboards | `/api/achievements/*` |
|
||||
| Budgets | Budget tracking | `/api/budgets/*` |
|
||||
| Recipes | Recipe management | `/api/recipes/*` |
|
||||
| Admin | Administrative operations | `/api/admin/*` |
|
||||
| System | System status and monitoring | `/api/system/*` |
|
||||
|
||||
## Testing
|
||||
|
||||
Verify API documentation is correct by:
|
||||
|
||||
1. **Manual Review**: Navigate to `/docs/api-docs` and test each endpoint.
|
||||
2. **Spec Validation**: Use OpenAPI validators to check the generated spec.
|
||||
3. **Integration Tests**: Existing integration tests serve as implicit documentation verification.
|
||||
|
||||
## Consequences
|
||||
|
||||
- **Positive**: Creates a single source of truth for API documentation that stays in sync with the code. Enables auto-generation of client SDKs and simplifies testing.
|
||||
- **Negative**: Requires developers to maintain JSDoc annotations on all routes. Adds a build step and new dependencies to the project.
|
||||
### Positive
|
||||
|
||||
- **Single Source of Truth**: Documentation lives with the code and stays in sync.
|
||||
- **Interactive Exploration**: Developers can try endpoints directly from the UI.
|
||||
- **SDK Generation**: OpenAPI spec enables automatic client SDK generation.
|
||||
- **Onboarding**: New developers can quickly understand the API surface.
|
||||
- **Low Overhead**: JSDoc annotations are minimal additions to existing code.
|
||||
|
||||
### Negative
|
||||
|
||||
- **Maintenance Required**: Developers must update annotations when routes change.
|
||||
- **Build Dependency**: Adds `swagger-jsdoc` and `swagger-ui-express` packages.
|
||||
- **Initial Investment**: Existing routes need annotations added incrementally.
|
||||
|
||||
### Mitigation
|
||||
|
||||
- Include documentation checks in code review process.
|
||||
- Start with high-priority routes and expand coverage over time.
|
||||
- Use TypeScript types to reduce documentation duplication where possible.
|
||||
|
||||
## Key Files
|
||||
|
||||
- `src/config/swagger.ts` - OpenAPI configuration
|
||||
- `src/routes/*.ts` - Route files with JSDoc annotations
|
||||
- `server.ts` - Swagger UI middleware setup
|
||||
|
||||
## Related ADRs
|
||||
|
||||
- [ADR-003](./0003-standardized-input-validation-using-middleware.md) - Input Validation (Zod schemas)
|
||||
- [ADR-028](./0028-api-response-standardization.md) - Response Standardization
|
||||
- [ADR-016](./0016-api-security-hardening.md) - Security Hardening
|
||||
|
||||
214
docs/adr/0040-testing-economics-and-priorities.md
Normal file
214
docs/adr/0040-testing-economics-and-priorities.md
Normal file
@@ -0,0 +1,214 @@
|
||||
# ADR-040: Testing Economics and Priorities
|
||||
|
||||
**Date**: 2026-01-09
|
||||
|
||||
**Status**: Accepted
|
||||
|
||||
## Context
|
||||
|
||||
ADR-010 established the testing strategy and standards. However, it does not address the economic trade-offs of testing: when the cost of writing and maintaining tests exceeds their value. This document provides practical guidance on where to invest testing effort for maximum return.
|
||||
|
||||
## Decision
|
||||
|
||||
We adopt a **value-based testing approach** that prioritizes tests based on:
|
||||
|
||||
1. Risk of the code path (what breaks if this fails?)
|
||||
2. Stability of the code (how often does this change?)
|
||||
3. Complexity of the logic (can a human easily verify correctness?)
|
||||
4. Cost of the test (setup complexity, execution time, maintenance burden)
|
||||
|
||||
## Testing Investment Matrix
|
||||
|
||||
| Test Type | Investment Level | When to Write | When to Skip |
|
||||
| --------------- | ------------------- | ------------------------------- | --------------------------------- |
|
||||
| **E2E** | Minimal (5 tests) | Critical user flows only | Everything else |
|
||||
| **Integration** | Moderate (17 tests) | API contracts, auth, DB queries | Internal service wiring |
|
||||
| **Unit** | High (185+ tests) | Business logic, utilities | Defensive fallbacks, trivial code |
|
||||
|
||||
## High-Value Tests (Always Write)
|
||||
|
||||
### E2E Tests (Budget: 5-10 tests total)
|
||||
|
||||
Write E2E tests for flows where failure means:
|
||||
|
||||
- Users cannot sign up or log in
|
||||
- Users cannot complete the core value proposition (upload flyer → see deals)
|
||||
- Money or data is at risk
|
||||
|
||||
**Current E2E coverage is appropriate:**
|
||||
|
||||
- `auth.e2e.test.ts` - Registration, login, password reset
|
||||
- `flyer-upload.e2e.test.ts` - Complete upload pipeline
|
||||
- `user-journey.e2e.test.ts` - Full user workflow
|
||||
- `admin-authorization.e2e.test.ts` - Admin access control
|
||||
- `admin-dashboard.e2e.test.ts` - Admin operations
|
||||
|
||||
**Do NOT add E2E tests for:**
|
||||
|
||||
- UI variations or styling
|
||||
- Edge cases (handle in unit tests)
|
||||
- Features that can be tested faster at a lower level
|
||||
|
||||
### Integration Tests (Budget: 15-25 tests)
|
||||
|
||||
Write integration tests for:
|
||||
|
||||
- Every public API endpoint (contract testing)
|
||||
- Authentication and authorization flows
|
||||
- Database queries that involve joins or complex logic
|
||||
- Middleware behavior (rate limiting, validation)
|
||||
|
||||
**Current integration coverage is appropriate:**
|
||||
|
||||
- Auth, admin, user routes
|
||||
- Flyer processing pipeline
|
||||
- Shopping lists, budgets, recipes
|
||||
- Gamification and notifications
|
||||
|
||||
**Do NOT add integration tests for:**
|
||||
|
||||
- Internal service-to-service calls (mock at boundaries)
|
||||
- Simple CRUD operations (test the repository pattern once)
|
||||
- UI components (use unit tests)
|
||||
|
||||
### Unit Tests (Budget: Proportional to complexity)
|
||||
|
||||
Write unit tests for:
|
||||
|
||||
- **Pure functions and utilities** - High value, easy to test
|
||||
- **Business logic in services** - Medium-high value
|
||||
- **React components** - Rendering, user interactions, state changes
|
||||
- **Custom hooks** - Data transformation, side effects
|
||||
- **Validators and parsers** - Edge cases matter here
|
||||
|
||||
## Low-Value Tests (Skip or Defer)
|
||||
|
||||
### Tests That Cost More Than They're Worth
|
||||
|
||||
1. **Defensive fallback code protected by types**
|
||||
|
||||
```typescript
|
||||
// This fallback can never execute if types are correct
|
||||
const name = store.name || 'Unknown'; // store.name is required
|
||||
```
|
||||
|
||||
- If you need `as any` to test it, the type system already prevents it
|
||||
- Either remove the fallback or accept the coverage gap
|
||||
|
||||
2. **Switch/case default branches for exhaustive enums**
|
||||
|
||||
```typescript
|
||||
switch (status) {
|
||||
case 'pending':
|
||||
return 'yellow';
|
||||
case 'complete':
|
||||
return 'green';
|
||||
default:
|
||||
return ''; // TypeScript prevents this
|
||||
}
|
||||
```
|
||||
|
||||
- The default exists for safety, not for execution
|
||||
- Don't test impossible states
|
||||
|
||||
3. **Trivial component variations**
|
||||
- Testing every tab in a tab panel when they share logic
|
||||
- Testing loading states that just show a spinner
|
||||
- Testing disabled button states (test the logic that disables, not the disabled state)
|
||||
|
||||
4. **Tests requiring excessive mock setup**
|
||||
- If test setup is longer than test assertions, reconsider
|
||||
- Per ADR-010: "Excessive mock setup" is a code smell
|
||||
|
||||
5. **Framework behavior verification**
|
||||
- React rendering, React Query caching, Router navigation
|
||||
- Trust the framework; test your code
|
||||
|
||||
### Coverage Gaps to Accept
|
||||
|
||||
The following coverage gaps are acceptable and should NOT be closed with tests:
|
||||
|
||||
| Pattern | Reason | Alternative |
|
||||
| ------------------------------------------ | ------------------------- | ----------------------------- |
|
||||
| `value \|\| 'default'` for required fields | Type system prevents | Remove fallback or accept gap |
|
||||
| `catch (error) { ... }` for typed APIs | Error types are known | Test the expected error types |
|
||||
| `default:` in exhaustive switches | TypeScript exhaustiveness | Accept gap |
|
||||
| Logging statements | Observability, not logic | No test needed |
|
||||
| Feature flags / environment checks | Tested by deployment | Config tests if complex |
|
||||
|
||||
## Time Budget Guidelines
|
||||
|
||||
For a typical feature (new API endpoint + UI):
|
||||
|
||||
| Activity | Time Budget | Notes |
|
||||
| --------------------------------------- | ----------- | ------------------------------------- |
|
||||
| Unit tests (component + hook + utility) | 30-45 min | Write alongside code |
|
||||
| Integration test (API contract) | 15-20 min | One test per endpoint |
|
||||
| E2E test | 0 min | Only for critical paths |
|
||||
| Total testing overhead | ~1 hour | Should not exceed implementation time |
|
||||
|
||||
**Rule of thumb**: If testing takes longer than implementation, you're either:
|
||||
|
||||
1. Testing too much
|
||||
2. Writing tests that are too complex
|
||||
3. Testing code that should be refactored
|
||||
|
||||
## Coverage Targets
|
||||
|
||||
We explicitly reject arbitrary coverage percentage targets. Instead:
|
||||
|
||||
| Metric | Target | Rationale |
|
||||
| ---------------------- | --------------- | -------------------------------------- |
|
||||
| Statement coverage | No target | High coverage ≠ quality tests |
|
||||
| Branch coverage | No target | Many branches are defensive/impossible |
|
||||
| E2E test count | 5-10 | Critical paths only |
|
||||
| Integration test count | 15-25 | API contracts |
|
||||
| Unit test files | 1:1 with source | Colocated, proportional |
|
||||
|
||||
## When to Add Tests to Existing Code
|
||||
|
||||
Add tests when:
|
||||
|
||||
1. **Fixing a bug** - Add a test that would have caught it
|
||||
2. **Refactoring** - Add tests before changing behavior
|
||||
3. **Code review feedback** - Reviewer identifies risk
|
||||
4. **Production incident** - Prevent recurrence
|
||||
|
||||
Do NOT add tests:
|
||||
|
||||
1. To increase coverage percentages
|
||||
2. For code that hasn't changed in 6+ months
|
||||
3. For code scheduled for deletion/replacement
|
||||
|
||||
## Consequences
|
||||
|
||||
**Positive:**
|
||||
|
||||
- Testing effort focuses on high-risk, high-value code
|
||||
- Developers spend less time on low-value tests
|
||||
- Test suite runs faster (fewer unnecessary tests)
|
||||
- Maintenance burden decreases
|
||||
|
||||
**Negative:**
|
||||
|
||||
- Some defensive code paths remain untested
|
||||
- Coverage percentages may not satisfy external audits
|
||||
- Requires judgment calls that may be inconsistent
|
||||
|
||||
## Key Files
|
||||
|
||||
- `docs/adr/0010-testing-strategy-and-standards.md` - Testing mechanics
|
||||
- `vitest.config.ts` - Coverage configuration
|
||||
- `src/tests/` - Test utilities and setup
|
||||
|
||||
## Review Checklist
|
||||
|
||||
Before adding a new test, ask:
|
||||
|
||||
1. [ ] What user-visible behavior does this test protect?
|
||||
2. [ ] Can this be tested at a lower level (unit vs integration)?
|
||||
3. [ ] Does this test require `as any` or mock gymnastics?
|
||||
4. [ ] Will this test break when implementation changes (brittle)?
|
||||
5. [ ] Is the test setup simpler than the code being tested?
|
||||
|
||||
If any answer suggests low value, skip the test or simplify.
|
||||
291
docs/adr/0041-ai-gemini-integration-architecture.md
Normal file
291
docs/adr/0041-ai-gemini-integration-architecture.md
Normal file
@@ -0,0 +1,291 @@
|
||||
# ADR-041: AI/Gemini Integration Architecture
|
||||
|
||||
**Date**: 2026-01-09
|
||||
|
||||
**Status**: Accepted
|
||||
|
||||
**Implemented**: 2026-01-09
|
||||
|
||||
## Context
|
||||
|
||||
The application relies heavily on Google Gemini AI for core functionality:
|
||||
|
||||
1. **Flyer Processing**: Extracting store names, dates, addresses, and individual sale items from uploaded flyer images.
|
||||
2. **Receipt Analysis**: Parsing purchased items and prices from receipt images.
|
||||
3. **Recipe Suggestions**: Generating recipe ideas based on available ingredients.
|
||||
4. **Text Extraction**: OCR-style extraction from cropped image regions.
|
||||
|
||||
These AI operations have unique challenges:
|
||||
|
||||
- **Rate Limits**: Google AI API enforces requests-per-minute (RPM) limits.
|
||||
- **Quota Buckets**: Different model families (stable, preview, experimental) have separate quotas.
|
||||
- **Model Availability**: Models may be unavailable due to regional restrictions, updates, or high load.
|
||||
- **Cost Variability**: Different models have different pricing (Flash-Lite vs Pro).
|
||||
- **Output Limits**: Some models have 8k token limits, others 65k.
|
||||
- **Testability**: Tests must not make real API calls.
|
||||
|
||||
## Decision
|
||||
|
||||
We will implement a centralized `AIService` class with:
|
||||
|
||||
1. **Dependency Injection**: AI client and filesystem are injectable for testability.
|
||||
2. **Model Fallback Chain**: Automatic failover through prioritized model lists.
|
||||
3. **Rate Limiting**: Per-instance rate limiter using `p-ratelimit`.
|
||||
4. **Tiered Model Selection**: Different model lists for different task types.
|
||||
5. **Environment-Aware Mocking**: Automatic mock client in test environments.
|
||||
|
||||
### Design Principles
|
||||
|
||||
- **Single Responsibility**: `AIService` handles all AI interactions.
|
||||
- **Fail-Safe Fallbacks**: If a model fails, try the next one in the chain.
|
||||
- **Cost Optimization**: Use cheaper "lite" models for simple text tasks.
|
||||
- **Structured Logging**: Log all AI interactions with timing and model info.
|
||||
|
||||
## Implementation Details
|
||||
|
||||
### AIService Class Structure
|
||||
|
||||
Located in `src/services/aiService.server.ts`:
|
||||
|
||||
```typescript
|
||||
interface IAiClient {
|
||||
generateContent(request: {
|
||||
contents: Content[];
|
||||
tools?: Tool[];
|
||||
useLiteModels?: boolean;
|
||||
}): Promise<GenerateContentResponse>;
|
||||
}
|
||||
|
||||
interface IFileSystem {
|
||||
readFile(path: string): Promise<Buffer>;
|
||||
}
|
||||
|
||||
export class AIService {
|
||||
private aiClient: IAiClient;
|
||||
private fs: IFileSystem;
|
||||
private rateLimiter: <T>(fn: () => Promise<T>) => Promise<T>;
|
||||
private logger: Logger;
|
||||
|
||||
constructor(logger: Logger, aiClient?: IAiClient, fs?: IFileSystem) {
|
||||
// If aiClient provided: use it (unit test)
|
||||
// Else if test environment: use internal mock (integration test)
|
||||
// Else: create real GoogleGenAI client (production)
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Tiered Model Lists
|
||||
|
||||
Models are organized by task complexity and quota bucket:
|
||||
|
||||
```typescript
|
||||
// For image processing (vision + long output)
|
||||
private readonly models = [
|
||||
// Tier A: Fast & Stable
|
||||
'gemini-2.5-flash', // Primary, 65k output
|
||||
'gemini-2.5-flash-lite', // Cost-saver, 65k output
|
||||
|
||||
// Tier B: Heavy Lifters
|
||||
'gemini-2.5-pro', // Complex layouts, 65k output
|
||||
|
||||
// Tier C: Preview Bucket (separate quota)
|
||||
'gemini-3-flash-preview',
|
||||
'gemini-3-pro-preview',
|
||||
|
||||
// Tier D: Experimental Bucket
|
||||
'gemini-exp-1206',
|
||||
|
||||
// Tier E: Last Resort
|
||||
'gemma-3-27b-it',
|
||||
'gemini-2.0-flash-exp', // WARNING: 8k limit
|
||||
];
|
||||
|
||||
// For simple text tasks (recipes, categorization)
|
||||
private readonly models_lite = [
|
||||
'gemini-2.5-flash-lite',
|
||||
'gemini-2.0-flash-lite-001',
|
||||
'gemini-2.0-flash-001',
|
||||
'gemma-3-12b-it',
|
||||
'gemma-3-4b-it',
|
||||
'gemini-2.0-flash-exp',
|
||||
];
|
||||
```
|
||||
|
||||
### Fallback with Retry Logic
|
||||
|
||||
```typescript
|
||||
private async _generateWithFallback(
|
||||
genAI: GoogleGenAI,
|
||||
request: { contents: Content[]; tools?: Tool[] },
|
||||
models: string[],
|
||||
): Promise<GenerateContentResponse> {
|
||||
let lastError: Error | null = null;
|
||||
|
||||
for (const modelName of models) {
|
||||
try {
|
||||
return await genAI.models.generateContent({ model: modelName, ...request });
|
||||
} catch (error: unknown) {
|
||||
const errorMsg = extractErrorMessage(error);
|
||||
const isRetriable = [
|
||||
'quota', '429', '503', 'resource_exhausted',
|
||||
'overloaded', 'unavailable', 'not found'
|
||||
].some(term => errorMsg.toLowerCase().includes(term));
|
||||
|
||||
if (isRetriable) {
|
||||
this.logger.warn(`Model '${modelName}' failed, trying next...`);
|
||||
lastError = new Error(errorMsg);
|
||||
continue;
|
||||
}
|
||||
throw error; // Non-retriable error
|
||||
}
|
||||
}
|
||||
throw lastError || new Error('All AI models failed.');
|
||||
}
|
||||
```
|
||||
|
||||
### Rate Limiting
|
||||
|
||||
```typescript
|
||||
const requestsPerMinute = parseInt(process.env.GEMINI_RPM || '5', 10);
|
||||
this.rateLimiter = pRateLimit({
|
||||
interval: 60 * 1000,
|
||||
rate: requestsPerMinute,
|
||||
concurrency: requestsPerMinute,
|
||||
});
|
||||
|
||||
// Usage:
|
||||
const result = await this.rateLimiter(() =>
|
||||
this.aiClient.generateContent({ contents: [...] })
|
||||
);
|
||||
```
|
||||
|
||||
### Test Environment Detection
|
||||
|
||||
```typescript
|
||||
const isTestEnvironment = process.env.NODE_ENV === 'test' || !!process.env.VITEST_POOL_ID;
|
||||
|
||||
if (aiClient) {
|
||||
// Unit test: use provided mock
|
||||
this.aiClient = aiClient;
|
||||
} else if (isTestEnvironment) {
|
||||
// Integration test: use internal mock
|
||||
this.aiClient = {
|
||||
generateContent: async () => ({
|
||||
text: JSON.stringify(this.getMockFlyerData()),
|
||||
}),
|
||||
};
|
||||
} else {
|
||||
// Production: use real client
|
||||
const genAI = new GoogleGenAI({ apiKey: process.env.GEMINI_API_KEY });
|
||||
this.aiClient = { generateContent: /* adapter */ };
|
||||
}
|
||||
```
|
||||
|
||||
### Prompt Engineering
|
||||
|
||||
Prompts are constructed with:
|
||||
|
||||
1. **Clear Task Definition**: What to extract and in what format.
|
||||
2. **Structured Output Requirements**: JSON schema with field descriptions.
|
||||
3. **Examples**: Concrete examples of expected output.
|
||||
4. **Context Hints**: User location for store address resolution.
|
||||
|
||||
```typescript
|
||||
private _buildFlyerExtractionPrompt(
|
||||
masterItems: MasterGroceryItem[],
|
||||
submitterIp?: string,
|
||||
userProfileAddress?: string,
|
||||
): string {
|
||||
// Location hint for address resolution
|
||||
let locationHint = '';
|
||||
if (userProfileAddress) {
|
||||
locationHint = `The user has profile address "${userProfileAddress}"...`;
|
||||
}
|
||||
|
||||
// Simplified master item list (reduce token usage)
|
||||
const simplifiedMasterList = masterItems.map(item => ({
|
||||
id: item.master_grocery_item_id,
|
||||
name: item.name,
|
||||
}));
|
||||
|
||||
return `
|
||||
# TASK
|
||||
Analyze the flyer image(s) and extract...
|
||||
|
||||
# RULES
|
||||
1. Extract store_name, valid_from, valid_to, store_address
|
||||
2. Extract items array with item, price_display, price_in_cents...
|
||||
|
||||
# EXAMPLES
|
||||
- { "item": "Red Grapes", "price_display": "$1.99 /lb", ... }
|
||||
|
||||
# MASTER LIST
|
||||
${JSON.stringify(simplifiedMasterList)}
|
||||
`;
|
||||
}
|
||||
```
|
||||
|
||||
### Response Parsing
|
||||
|
||||
AI responses may contain markdown, trailing text, or formatting issues:
|
||||
|
||||
````typescript
|
||||
private _parseJsonFromAiResponse<T>(responseText: string | undefined, logger: Logger): T | null {
|
||||
if (!responseText) return null;
|
||||
|
||||
// Try to extract from markdown code block
|
||||
const markdownMatch = responseText.match(/```(json)?\s*([\s\S]*?)\s*```/);
|
||||
let jsonString = markdownMatch?.[2]?.trim() || responseText;
|
||||
|
||||
// Find JSON boundaries
|
||||
const startIndex = Math.min(
|
||||
jsonString.indexOf('{') >= 0 ? jsonString.indexOf('{') : Infinity,
|
||||
jsonString.indexOf('[') >= 0 ? jsonString.indexOf('[') : Infinity
|
||||
);
|
||||
const endIndex = Math.max(jsonString.lastIndexOf('}'), jsonString.lastIndexOf(']'));
|
||||
|
||||
if (startIndex === Infinity || endIndex === -1) return null;
|
||||
|
||||
try {
|
||||
return JSON.parse(jsonString.substring(startIndex, endIndex + 1));
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
````
|
||||
|
||||
## Consequences
|
||||
|
||||
### Positive
|
||||
|
||||
- **Resilience**: Automatic failover when models are unavailable or rate-limited.
|
||||
- **Cost Control**: Uses cheaper models for simple tasks.
|
||||
- **Testability**: Full mock support for unit and integration tests.
|
||||
- **Observability**: Detailed logging of all AI operations with timing.
|
||||
- **Maintainability**: Centralized AI logic in one service.
|
||||
|
||||
### Negative
|
||||
|
||||
- **Model List Maintenance**: Must update model lists when new models release.
|
||||
- **Complexity**: Fallback logic adds complexity.
|
||||
- **Delayed Failures**: May take longer to fail if all models are down.
|
||||
|
||||
### Mitigation
|
||||
|
||||
- Monitor model deprecation announcements from Google.
|
||||
- Add health checks that validate AI connectivity on startup.
|
||||
- Consider caching successful model selections per task type.
|
||||
|
||||
## Key Files
|
||||
|
||||
- `src/services/aiService.server.ts` - Main AIService class
|
||||
- `src/services/aiService.server.test.ts` - Unit tests with mocked AI client
|
||||
- `src/services/aiApiClient.ts` - Low-level API client wrapper
|
||||
- `src/services/aiAnalysisService.ts` - Higher-level analysis orchestration
|
||||
- `src/types/ai.ts` - Zod schemas for AI response validation
|
||||
|
||||
## Related ADRs
|
||||
|
||||
- [ADR-027](./0027-standardized-naming-convention-for-ai-and-database-types.md) - Naming Conventions for AI Types
|
||||
- [ADR-039](./0039-dependency-injection-pattern.md) - Dependency Injection Pattern
|
||||
- [ADR-001](./0001-standardized-error-handling.md) - Error Handling
|
||||
329
docs/adr/0042-email-and-notification-architecture.md
Normal file
329
docs/adr/0042-email-and-notification-architecture.md
Normal file
@@ -0,0 +1,329 @@
|
||||
# ADR-042: Email and Notification Architecture
|
||||
|
||||
**Date**: 2026-01-09
|
||||
|
||||
**Status**: Accepted
|
||||
|
||||
**Implemented**: 2026-01-09
|
||||
|
||||
## Context
|
||||
|
||||
The application sends emails for multiple purposes:
|
||||
|
||||
1. **Transactional Emails**: Password reset, welcome emails, account verification.
|
||||
2. **Deal Notifications**: Alerting users when watched items go on sale.
|
||||
3. **Bulk Communications**: System announcements, marketing (future).
|
||||
|
||||
Email delivery has unique challenges:
|
||||
|
||||
- **Reliability**: Emails must be delivered even if the main request fails.
|
||||
- **Rate Limits**: SMTP servers enforce sending limits.
|
||||
- **Retry Logic**: Failed emails should be retried with backoff.
|
||||
- **Templating**: Emails need consistent branding and formatting.
|
||||
- **Testing**: Tests should not send real emails.
|
||||
|
||||
## Decision
|
||||
|
||||
We will implement a queue-based email system using:
|
||||
|
||||
1. **Nodemailer**: For SMTP transport and email composition.
|
||||
2. **BullMQ**: For job queuing, retry logic, and rate limiting.
|
||||
3. **Dedicated Worker**: Background process for email delivery.
|
||||
4. **Structured Logging**: Job-scoped logging for debugging.
|
||||
|
||||
### Design Principles
|
||||
|
||||
- **Asynchronous Delivery**: Queue emails immediately, deliver asynchronously.
|
||||
- **Idempotent Jobs**: Jobs can be retried safely.
|
||||
- **Separation of Concerns**: Email composition separate from delivery.
|
||||
- **Environment-Aware**: Disable real sending in test environments.
|
||||
|
||||
## Implementation Details
|
||||
|
||||
### Email Service Structure
|
||||
|
||||
Located in `src/services/emailService.server.ts`:
|
||||
|
||||
```typescript
|
||||
import nodemailer from 'nodemailer';
|
||||
import type { Job } from 'bullmq';
|
||||
import type { Logger } from 'pino';
|
||||
|
||||
// SMTP transporter configured from environment
|
||||
const transporter = nodemailer.createTransport({
|
||||
host: process.env.SMTP_HOST,
|
||||
port: parseInt(process.env.SMTP_PORT || '587', 10),
|
||||
secure: process.env.SMTP_SECURE === 'true',
|
||||
auth: {
|
||||
user: process.env.SMTP_USER,
|
||||
pass: process.env.SMTP_PASS,
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
### Email Job Data Structure
|
||||
|
||||
```typescript
|
||||
// src/types/job-data.ts
|
||||
export interface EmailJobData {
|
||||
to: string;
|
||||
subject: string;
|
||||
text: string;
|
||||
html: string;
|
||||
}
|
||||
```
|
||||
|
||||
### Core Send Function
|
||||
|
||||
```typescript
|
||||
export const sendEmail = async (options: EmailJobData, logger: Logger) => {
|
||||
const mailOptions = {
|
||||
from: `"Flyer Crawler" <${process.env.SMTP_FROM_EMAIL}>`,
|
||||
to: options.to,
|
||||
subject: options.subject,
|
||||
text: options.text,
|
||||
html: options.html,
|
||||
};
|
||||
|
||||
const info = await transporter.sendMail(mailOptions);
|
||||
logger.info(
|
||||
{ to: options.to, subject: options.subject, messageId: info.messageId },
|
||||
'Email sent successfully.',
|
||||
);
|
||||
};
|
||||
```
|
||||
|
||||
### Job Processor
|
||||
|
||||
```typescript
|
||||
export const processEmailJob = async (job: Job<EmailJobData>) => {
|
||||
// Create child logger with job context
|
||||
const jobLogger = globalLogger.child({
|
||||
jobId: job.id,
|
||||
jobName: job.name,
|
||||
recipient: job.data.to,
|
||||
});
|
||||
|
||||
jobLogger.info('Picked up email job.');
|
||||
|
||||
try {
|
||||
await sendEmail(job.data, jobLogger);
|
||||
} catch (error) {
|
||||
const wrappedError = error instanceof Error ? error : new Error(String(error));
|
||||
jobLogger.error({ err: wrappedError, attemptsMade: job.attemptsMade }, 'Email job failed.');
|
||||
throw wrappedError; // BullMQ will retry
|
||||
}
|
||||
};
|
||||
```
|
||||
|
||||
### Specialized Email Functions
|
||||
|
||||
#### Password Reset
|
||||
|
||||
```typescript
|
||||
export const sendPasswordResetEmail = async (to: string, token: string, logger: Logger) => {
|
||||
const resetUrl = `${process.env.FRONTEND_URL}/reset-password?token=${token}`;
|
||||
|
||||
const html = `
|
||||
<div style="font-family: sans-serif; padding: 20px;">
|
||||
<h2>Password Reset Request</h2>
|
||||
<p>Click the link below to set a new password. This link expires in 1 hour.</p>
|
||||
<a href="${resetUrl}" style="background-color: #007bff; color: white; padding: 14px 25px; ...">
|
||||
Reset Your Password
|
||||
</a>
|
||||
<p>If you did not request this, please ignore this email.</p>
|
||||
</div>
|
||||
`;
|
||||
|
||||
await sendEmail({ to, subject: 'Your Password Reset Request', text: '...', html }, logger);
|
||||
};
|
||||
```
|
||||
|
||||
#### Welcome Email
|
||||
|
||||
```typescript
|
||||
export const sendWelcomeEmail = async (to: string, name: string | null, logger: Logger) => {
|
||||
const recipientName = name || 'there';
|
||||
const html = `
|
||||
<div style="font-family: sans-serif; padding: 20px;">
|
||||
<h2>Welcome!</h2>
|
||||
<p>Hello ${recipientName},</p>
|
||||
<p>Thank you for joining Flyer Crawler.</p>
|
||||
<p>Start by uploading your first flyer to see how much you can save!</p>
|
||||
</div>
|
||||
`;
|
||||
|
||||
await sendEmail({ to, subject: 'Welcome to Flyer Crawler!', text: '...', html }, logger);
|
||||
};
|
||||
```
|
||||
|
||||
#### Deal Notifications
|
||||
|
||||
```typescript
|
||||
export const sendDealNotificationEmail = async (
|
||||
to: string,
|
||||
name: string | null,
|
||||
deals: WatchedItemDeal[],
|
||||
logger: Logger,
|
||||
) => {
|
||||
const dealsListHtml = deals
|
||||
.map(
|
||||
(deal) => `
|
||||
<li>
|
||||
<strong>${deal.item_name}</strong> is on sale for
|
||||
<strong>$${(deal.best_price_in_cents / 100).toFixed(2)}</strong>
|
||||
at ${deal.store_name}!
|
||||
</li>
|
||||
`,
|
||||
)
|
||||
.join('');
|
||||
|
||||
const html = `
|
||||
<h1>Hi ${name || 'there'},</h1>
|
||||
<p>We found great deals on items you're watching:</p>
|
||||
<ul>${dealsListHtml}</ul>
|
||||
<p>Check them out on the deals page!</p>
|
||||
`;
|
||||
|
||||
await sendEmail({ to, subject: 'New Deals Found!', text: '...', html }, logger);
|
||||
};
|
||||
```
|
||||
|
||||
### Queue Configuration
|
||||
|
||||
Located in `src/services/queueService.server.ts`:
|
||||
|
||||
```typescript
|
||||
import { Queue, Worker, Job } from 'bullmq';
|
||||
import { processEmailJob } from './emailService.server';
|
||||
|
||||
export const emailQueue = new Queue<EmailJobData>('email', {
|
||||
connection: redisConnection,
|
||||
defaultJobOptions: {
|
||||
attempts: 3,
|
||||
backoff: {
|
||||
type: 'exponential',
|
||||
delay: 1000,
|
||||
},
|
||||
removeOnComplete: 100,
|
||||
removeOnFail: 500,
|
||||
},
|
||||
});
|
||||
|
||||
// Worker to process email jobs
|
||||
const emailWorker = new Worker('email', processEmailJob, {
|
||||
connection: redisConnection,
|
||||
concurrency: 5,
|
||||
});
|
||||
```
|
||||
|
||||
### Enqueueing Emails
|
||||
|
||||
```typescript
|
||||
// From backgroundJobService.ts
|
||||
await emailQueue.add('deal-notification', {
|
||||
to: user.email,
|
||||
subject: 'New Deals Found!',
|
||||
text: textContent,
|
||||
html: htmlContent,
|
||||
});
|
||||
```
|
||||
|
||||
### Background Job Integration
|
||||
|
||||
Located in `src/services/backgroundJobService.ts`:
|
||||
|
||||
```typescript
|
||||
export class BackgroundJobService {
|
||||
constructor(
|
||||
private personalizationRepo: PersonalizationRepository,
|
||||
private notificationRepo: NotificationRepository,
|
||||
private emailQueue: Queue<EmailJobData>,
|
||||
private logger: Logger,
|
||||
) {}
|
||||
|
||||
async runDailyDealCheck(): Promise<void> {
|
||||
this.logger.info('Starting daily deal check...');
|
||||
|
||||
const deals = await this.personalizationRepo.getBestSalePricesForAllUsers(this.logger);
|
||||
|
||||
for (const userDeals of deals) {
|
||||
await this.emailQueue.add('deal-notification', {
|
||||
to: userDeals.email,
|
||||
subject: 'New Deals Found!',
|
||||
text: '...',
|
||||
html: '...',
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Environment Variables
|
||||
|
||||
```bash
|
||||
# SMTP Configuration
|
||||
SMTP_HOST=smtp.example.com
|
||||
SMTP_PORT=587
|
||||
SMTP_SECURE=false
|
||||
SMTP_USER=user@example.com
|
||||
SMTP_PASS=secret
|
||||
SMTP_FROM_EMAIL=noreply@flyer-crawler.com
|
||||
|
||||
# Frontend URL for email links
|
||||
FRONTEND_URL=https://flyer-crawler.com
|
||||
```
|
||||
|
||||
## Consequences
|
||||
|
||||
### Positive
|
||||
|
||||
- **Reliability**: Failed emails are automatically retried with exponential backoff.
|
||||
- **Scalability**: Queue can handle burst traffic without overwhelming SMTP.
|
||||
- **Observability**: Job-scoped logging enables easy debugging.
|
||||
- **Separation**: Email composition is decoupled from delivery timing.
|
||||
- **Testability**: Can mock the queue or use Ethereal for testing.
|
||||
|
||||
### Negative
|
||||
|
||||
- **Complexity**: Adds queue infrastructure dependency (Redis).
|
||||
- **Delayed Delivery**: Emails are not instant (queued first).
|
||||
- **Monitoring Required**: Need to monitor queue depth and failure rates.
|
||||
|
||||
### Mitigation
|
||||
|
||||
- Use Bull Board UI for queue monitoring (already implemented).
|
||||
- Set up alerts for queue depth and failure rate thresholds.
|
||||
- Consider Ethereal or MailHog for development/testing.
|
||||
|
||||
## Testing Strategy
|
||||
|
||||
```typescript
|
||||
// Unit test with mocked queue
|
||||
const mockEmailQueue = {
|
||||
add: vi.fn().mockResolvedValue({ id: 'job-1' }),
|
||||
};
|
||||
|
||||
const service = new BackgroundJobService(
|
||||
mockPersonalizationRepo,
|
||||
mockNotificationRepo,
|
||||
mockEmailQueue as any,
|
||||
mockLogger,
|
||||
);
|
||||
|
||||
await service.runDailyDealCheck();
|
||||
expect(mockEmailQueue.add).toHaveBeenCalledWith('deal-notification', expect.any(Object));
|
||||
```
|
||||
|
||||
## Key Files
|
||||
|
||||
- `src/services/emailService.server.ts` - Email composition and sending
|
||||
- `src/services/queueService.server.ts` - Queue configuration and workers
|
||||
- `src/services/backgroundJobService.ts` - Scheduled deal notifications
|
||||
- `src/types/job-data.ts` - Email job data types
|
||||
|
||||
## Related ADRs
|
||||
|
||||
- [ADR-006](./0006-background-job-processing-and-task-queues.md) - Background Job Processing
|
||||
- [ADR-004](./0004-standardized-application-wide-structured-logging.md) - Structured Logging
|
||||
- [ADR-039](./0039-dependency-injection-pattern.md) - Dependency Injection
|
||||
392
docs/adr/0043-express-middleware-pipeline.md
Normal file
392
docs/adr/0043-express-middleware-pipeline.md
Normal file
@@ -0,0 +1,392 @@
|
||||
# ADR-043: Express Middleware Pipeline Architecture
|
||||
|
||||
**Date**: 2026-01-09
|
||||
|
||||
**Status**: Accepted
|
||||
|
||||
**Implemented**: 2026-01-09
|
||||
|
||||
## Context
|
||||
|
||||
The Express application uses a layered middleware pipeline to handle cross-cutting concerns:
|
||||
|
||||
1. **Security**: Helmet headers, CORS, rate limiting.
|
||||
2. **Parsing**: JSON body, URL-encoded, cookies.
|
||||
3. **Authentication**: Session management, JWT verification.
|
||||
4. **Validation**: Request body/params validation.
|
||||
5. **File Handling**: Multipart form data, file uploads.
|
||||
6. **Error Handling**: Centralized error responses.
|
||||
|
||||
Middleware ordering is critical - incorrect ordering can cause security vulnerabilities or broken functionality. This ADR documents the canonical middleware order and patterns.
|
||||
|
||||
## Decision
|
||||
|
||||
We will establish a strict middleware ordering convention:
|
||||
|
||||
1. **Security First**: Security headers and protections apply to all requests.
|
||||
2. **Parsing Before Logic**: Body/cookie parsing before route handlers.
|
||||
3. **Auth Before Routes**: Authentication middleware before protected routes.
|
||||
4. **Validation At Route Level**: Per-route validation middleware.
|
||||
5. **Error Handler Last**: Centralized error handling catches all errors.
|
||||
|
||||
### Design Principles
|
||||
|
||||
- **Defense in Depth**: Multiple security layers.
|
||||
- **Fail-Fast**: Reject bad requests early in the pipeline.
|
||||
- **Explicit Ordering**: Document and enforce middleware order.
|
||||
- **Route-Level Flexibility**: Specific middleware per route as needed.
|
||||
|
||||
## Implementation Details
|
||||
|
||||
### Global Middleware Order
|
||||
|
||||
Located in `src/server.ts`:
|
||||
|
||||
```typescript
|
||||
import express from 'express';
|
||||
import helmet from 'helmet';
|
||||
import cors from 'cors';
|
||||
import cookieParser from 'cookie-parser';
|
||||
import { requestTimeoutMiddleware } from './middleware/timeout.middleware';
|
||||
import { rateLimiter } from './middleware/rateLimit.middleware';
|
||||
import { errorHandler } from './middleware/errorHandler.middleware';
|
||||
|
||||
const app = express();
|
||||
|
||||
// ============================================
|
||||
// LAYER 1: Security Headers & Protections
|
||||
// ============================================
|
||||
app.use(
|
||||
helmet({
|
||||
contentSecurityPolicy: {
|
||||
directives: {
|
||||
defaultSrc: ["'self'"],
|
||||
scriptSrc: ["'self'", "'unsafe-inline'"],
|
||||
styleSrc: ["'self'", "'unsafe-inline'"],
|
||||
imgSrc: ["'self'", 'data:', 'blob:'],
|
||||
},
|
||||
},
|
||||
}),
|
||||
);
|
||||
app.use(
|
||||
cors({
|
||||
origin: process.env.FRONTEND_URL,
|
||||
credentials: true,
|
||||
}),
|
||||
);
|
||||
|
||||
// ============================================
|
||||
// LAYER 2: Request Limits & Timeouts
|
||||
// ============================================
|
||||
app.use(requestTimeoutMiddleware(30000)); // 30s default
|
||||
app.use(rateLimiter); // Rate limiting per IP
|
||||
|
||||
// ============================================
|
||||
// LAYER 3: Body & Cookie Parsing
|
||||
// ============================================
|
||||
app.use(express.json({ limit: '10mb' }));
|
||||
app.use(express.urlencoded({ extended: true, limit: '10mb' }));
|
||||
app.use(cookieParser());
|
||||
|
||||
// ============================================
|
||||
// LAYER 4: Static Assets (before auth)
|
||||
// ============================================
|
||||
app.use('/flyer-images', express.static('flyer-images'));
|
||||
|
||||
// ============================================
|
||||
// LAYER 5: Authentication Setup
|
||||
// ============================================
|
||||
app.use(passport.initialize());
|
||||
app.use(passport.session());
|
||||
|
||||
// ============================================
|
||||
// LAYER 6: Routes (with per-route middleware)
|
||||
// ============================================
|
||||
app.use('/api/auth', authRoutes);
|
||||
app.use('/api/flyers', flyerRoutes);
|
||||
app.use('/api/admin', adminRoutes);
|
||||
// ... more routes
|
||||
|
||||
// ============================================
|
||||
// LAYER 7: Error Handling (must be last)
|
||||
// ============================================
|
||||
app.use(errorHandler);
|
||||
```
|
||||
|
||||
### Validation Middleware
|
||||
|
||||
Located in `src/middleware/validation.middleware.ts`:
|
||||
|
||||
```typescript
|
||||
import { z } from 'zod';
|
||||
import { Request, Response, NextFunction } from 'express';
|
||||
import { ValidationError } from '../services/db/errors.db';
|
||||
|
||||
export const validate = <T extends z.ZodType>(schema: T) => {
|
||||
return (req: Request, res: Response, next: NextFunction) => {
|
||||
const result = schema.safeParse({
|
||||
body: req.body,
|
||||
query: req.query,
|
||||
params: req.params,
|
||||
});
|
||||
|
||||
if (!result.success) {
|
||||
const errors = result.error.errors.map((err) => ({
|
||||
path: err.path.join('.'),
|
||||
message: err.message,
|
||||
}));
|
||||
return next(new ValidationError(errors));
|
||||
}
|
||||
|
||||
// Attach validated data to request
|
||||
req.validated = result.data;
|
||||
next();
|
||||
};
|
||||
};
|
||||
|
||||
// Usage in routes:
|
||||
router.post('/flyers', authenticate, validate(CreateFlyerSchema), flyerController.create);
|
||||
```
|
||||
|
||||
### File Upload Middleware
|
||||
|
||||
Located in `src/middleware/fileUpload.middleware.ts`:
|
||||
|
||||
```typescript
|
||||
import multer from 'multer';
|
||||
import path from 'path';
|
||||
import { v4 as uuidv4 } from 'uuid';
|
||||
|
||||
const storage = multer.diskStorage({
|
||||
destination: (req, file, cb) => {
|
||||
cb(null, 'flyer-images/');
|
||||
},
|
||||
filename: (req, file, cb) => {
|
||||
const ext = path.extname(file.originalname);
|
||||
cb(null, `${uuidv4()}${ext}`);
|
||||
},
|
||||
});
|
||||
|
||||
const fileFilter = (req: Request, file: Express.Multer.File, cb: multer.FileFilterCallback) => {
|
||||
const allowedTypes = ['image/jpeg', 'image/png', 'image/webp', 'application/pdf'];
|
||||
if (allowedTypes.includes(file.mimetype)) {
|
||||
cb(null, true);
|
||||
} else {
|
||||
cb(new Error('Invalid file type'));
|
||||
}
|
||||
};
|
||||
|
||||
export const uploadFlyer = multer({
|
||||
storage,
|
||||
fileFilter,
|
||||
limits: {
|
||||
fileSize: 10 * 1024 * 1024, // 10MB
|
||||
files: 10, // Max 10 files per request
|
||||
},
|
||||
});
|
||||
|
||||
// Usage:
|
||||
router.post('/flyers/upload', uploadFlyer.array('files', 10), flyerController.upload);
|
||||
```
|
||||
|
||||
### Authentication Middleware
|
||||
|
||||
Located in `src/middleware/auth.middleware.ts`:
|
||||
|
||||
```typescript
|
||||
import passport from 'passport';
|
||||
import { Request, Response, NextFunction } from 'express';
|
||||
|
||||
// Require authenticated user
|
||||
export const authenticate = (req: Request, res: Response, next: NextFunction) => {
|
||||
passport.authenticate('jwt', { session: false }, (err, user) => {
|
||||
if (err) return next(err);
|
||||
if (!user) {
|
||||
return res.status(401).json({ error: 'Unauthorized' });
|
||||
}
|
||||
req.user = user;
|
||||
next();
|
||||
})(req, res, next);
|
||||
};
|
||||
|
||||
// Require admin role
|
||||
export const requireAdmin = (req: Request, res: Response, next: NextFunction) => {
|
||||
if (!req.user?.role || req.user.role !== 'admin') {
|
||||
return res.status(403).json({ error: 'Forbidden' });
|
||||
}
|
||||
next();
|
||||
};
|
||||
|
||||
// Optional auth (attach user if present, continue if not)
|
||||
export const optionalAuth = (req: Request, res: Response, next: NextFunction) => {
|
||||
passport.authenticate('jwt', { session: false }, (err, user) => {
|
||||
if (user) req.user = user;
|
||||
next();
|
||||
})(req, res, next);
|
||||
};
|
||||
```
|
||||
|
||||
### Error Handler Middleware
|
||||
|
||||
Located in `src/middleware/errorHandler.middleware.ts`:
|
||||
|
||||
```typescript
|
||||
import { Request, Response, NextFunction } from 'express';
|
||||
import { v4 as uuidv4 } from 'uuid';
|
||||
import { logger } from '../services/logger.server';
|
||||
import { ValidationError, NotFoundError, UniqueConstraintError } from '../services/db/errors.db';
|
||||
|
||||
export const errorHandler = (err: Error, req: Request, res: Response, next: NextFunction) => {
|
||||
const errorId = uuidv4();
|
||||
|
||||
// Log error with context
|
||||
logger.error(
|
||||
{
|
||||
errorId,
|
||||
err,
|
||||
path: req.path,
|
||||
method: req.method,
|
||||
userId: req.user?.user_id,
|
||||
},
|
||||
'Request error',
|
||||
);
|
||||
|
||||
// Map error types to HTTP responses
|
||||
if (err instanceof ValidationError) {
|
||||
return res.status(400).json({
|
||||
success: false,
|
||||
error: { code: 'VALIDATION_ERROR', message: err.message, details: err.errors },
|
||||
meta: { errorId },
|
||||
});
|
||||
}
|
||||
|
||||
if (err instanceof NotFoundError) {
|
||||
return res.status(404).json({
|
||||
success: false,
|
||||
error: { code: 'NOT_FOUND', message: err.message },
|
||||
meta: { errorId },
|
||||
});
|
||||
}
|
||||
|
||||
if (err instanceof UniqueConstraintError) {
|
||||
return res.status(409).json({
|
||||
success: false,
|
||||
error: { code: 'CONFLICT', message: err.message },
|
||||
meta: { errorId },
|
||||
});
|
||||
}
|
||||
|
||||
// Default: Internal Server Error
|
||||
return res.status(500).json({
|
||||
success: false,
|
||||
error: {
|
||||
code: 'INTERNAL_ERROR',
|
||||
message: process.env.NODE_ENV === 'production' ? 'An unexpected error occurred' : err.message,
|
||||
},
|
||||
meta: { errorId },
|
||||
});
|
||||
};
|
||||
```
|
||||
|
||||
### Request Timeout Middleware
|
||||
|
||||
```typescript
|
||||
export const requestTimeoutMiddleware = (timeout: number) => {
|
||||
return (req: Request, res: Response, next: NextFunction) => {
|
||||
res.setTimeout(timeout, () => {
|
||||
if (!res.headersSent) {
|
||||
res.status(503).json({
|
||||
success: false,
|
||||
error: { code: 'TIMEOUT', message: 'Request timed out' },
|
||||
});
|
||||
}
|
||||
});
|
||||
next();
|
||||
};
|
||||
};
|
||||
```
|
||||
|
||||
## Route-Level Middleware Patterns
|
||||
|
||||
### Protected Route with Validation
|
||||
|
||||
```typescript
|
||||
router.put(
|
||||
'/flyers/:flyerId',
|
||||
authenticate, // 1. Auth check
|
||||
validate(UpdateFlyerSchema), // 2. Input validation
|
||||
flyerController.update, // 3. Handler
|
||||
);
|
||||
```
|
||||
|
||||
### Admin-Only Route
|
||||
|
||||
```typescript
|
||||
router.delete(
|
||||
'/admin/users/:userId',
|
||||
authenticate, // 1. Auth check
|
||||
requireAdmin, // 2. Role check
|
||||
validate(DeleteUserSchema), // 3. Input validation
|
||||
adminController.deleteUser, // 4. Handler
|
||||
);
|
||||
```
|
||||
|
||||
### File Upload Route
|
||||
|
||||
```typescript
|
||||
router.post(
|
||||
'/flyers/upload',
|
||||
authenticate, // 1. Auth check
|
||||
uploadFlyer.array('files', 10), // 2. File handling
|
||||
validate(UploadFlyerSchema), // 3. Metadata validation
|
||||
flyerController.upload, // 4. Handler
|
||||
);
|
||||
```
|
||||
|
||||
### Public Route with Optional Auth
|
||||
|
||||
```typescript
|
||||
router.get(
|
||||
'/flyers/:flyerId',
|
||||
optionalAuth, // 1. Attach user if present
|
||||
flyerController.getById, // 2. Handler (can check req.user)
|
||||
);
|
||||
```
|
||||
|
||||
## Consequences
|
||||
|
||||
### Positive
|
||||
|
||||
- **Security**: Defense-in-depth with multiple security layers.
|
||||
- **Consistency**: Predictable request processing order.
|
||||
- **Maintainability**: Clear separation of concerns.
|
||||
- **Debuggability**: Errors caught and logged centrally.
|
||||
- **Flexibility**: Per-route middleware composition.
|
||||
|
||||
### Negative
|
||||
|
||||
- **Order Sensitivity**: Middleware order bugs can be subtle.
|
||||
- **Performance**: Many middleware layers add latency.
|
||||
- **Complexity**: New developers must understand the pipeline.
|
||||
|
||||
### Mitigation
|
||||
|
||||
- Document middleware order in comments (as shown above).
|
||||
- Use integration tests that verify middleware chain behavior.
|
||||
- Profile middleware performance in production.
|
||||
|
||||
## Key Files
|
||||
|
||||
- `src/server.ts` - Global middleware registration
|
||||
- `src/middleware/validation.middleware.ts` - Zod validation
|
||||
- `src/middleware/fileUpload.middleware.ts` - Multer configuration
|
||||
- `src/middleware/multer.middleware.ts` - File upload handling
|
||||
- `src/middleware/errorHandler.middleware.ts` - Error handling (implicit)
|
||||
|
||||
## Related ADRs
|
||||
|
||||
- [ADR-001](./0001-standardized-error-handling.md) - Error Handling
|
||||
- [ADR-003](./0003-standardized-input-validation-using-middleware.md) - Input Validation
|
||||
- [ADR-016](./0016-api-security-hardening.md) - API Security
|
||||
- [ADR-032](./0032-rate-limiting-strategy.md) - Rate Limiting
|
||||
- [ADR-033](./0033-file-upload-and-storage-strategy.md) - File Uploads
|
||||
275
docs/adr/0044-frontend-feature-organization.md
Normal file
275
docs/adr/0044-frontend-feature-organization.md
Normal file
@@ -0,0 +1,275 @@
|
||||
# ADR-044: Frontend Feature Organization Pattern
|
||||
|
||||
**Date**: 2026-01-09
|
||||
|
||||
**Status**: Accepted
|
||||
|
||||
**Implemented**: 2026-01-09
|
||||
|
||||
## Context
|
||||
|
||||
The React frontend has grown to include multiple distinct features:
|
||||
|
||||
- Flyer viewing and management
|
||||
- Shopping list creation
|
||||
- Budget tracking and charts
|
||||
- Voice assistant
|
||||
- User personalization
|
||||
- Admin dashboard
|
||||
|
||||
Without clear organization, code becomes scattered across generic folders (`/components`, `/hooks`, `/utils`), making it hard to:
|
||||
|
||||
1. Understand feature boundaries
|
||||
2. Find related code
|
||||
3. Refactor or remove features
|
||||
4. Onboard new developers
|
||||
|
||||
## Decision
|
||||
|
||||
We will adopt a **feature-based folder structure** where each major feature is self-contained in its own directory under `/features`. Shared code lives in dedicated top-level folders.
|
||||
|
||||
### Design Principles
|
||||
|
||||
- **Colocation**: Keep related code together (components, hooks, types, utils).
|
||||
- **Feature Independence**: Features should minimize cross-dependencies.
|
||||
- **Shared Extraction**: Only extract to shared folders when truly reused.
|
||||
- **Flat Within Features**: Avoid deep nesting within feature folders.
|
||||
|
||||
## Implementation Details
|
||||
|
||||
### Directory Structure
|
||||
|
||||
```
|
||||
src/
|
||||
├── features/ # Feature modules
|
||||
│ ├── flyer/ # Flyer viewing/management
|
||||
│ │ ├── components/
|
||||
│ │ ├── hooks/
|
||||
│ │ ├── types.ts
|
||||
│ │ └── index.ts
|
||||
│ ├── shopping/ # Shopping lists
|
||||
│ │ ├── components/
|
||||
│ │ ├── hooks/
|
||||
│ │ └── index.ts
|
||||
│ ├── charts/ # Budget/analytics charts
|
||||
│ │ ├── components/
|
||||
│ │ └── index.ts
|
||||
│ ├── voice-assistant/ # Voice commands
|
||||
│ │ ├── components/
|
||||
│ │ └── index.ts
|
||||
│ └── admin/ # Admin dashboard
|
||||
│ ├── components/
|
||||
│ └── index.ts
|
||||
├── components/ # Shared UI components
|
||||
│ ├── ui/ # Primitive components (Button, Input, etc.)
|
||||
│ ├── layout/ # Layout components (Header, Footer, etc.)
|
||||
│ └── common/ # Shared composite components
|
||||
├── hooks/ # Shared hooks
|
||||
│ ├── queries/ # TanStack Query hooks
|
||||
│ ├── mutations/ # TanStack Mutation hooks
|
||||
│ └── utils/ # Utility hooks (useDebounce, etc.)
|
||||
├── providers/ # React context providers
|
||||
│ ├── AppProviders.tsx
|
||||
│ ├── UserDataProvider.tsx
|
||||
│ └── FlyersProvider.tsx
|
||||
├── pages/ # Route page components
|
||||
├── services/ # API clients, external services
|
||||
├── types/ # Shared TypeScript types
|
||||
├── utils/ # Shared utility functions
|
||||
└── lib/ # Third-party library wrappers
|
||||
```
|
||||
|
||||
### Feature Module Structure
|
||||
|
||||
Each feature follows a consistent internal structure:
|
||||
|
||||
```
|
||||
features/flyer/
|
||||
├── components/
|
||||
│ ├── FlyerCard.tsx
|
||||
│ ├── FlyerGrid.tsx
|
||||
│ ├── FlyerUploader.tsx
|
||||
│ ├── FlyerItemList.tsx
|
||||
│ └── index.ts # Re-exports all components
|
||||
├── hooks/
|
||||
│ ├── useFlyerDetails.ts
|
||||
│ ├── useFlyerUpload.ts
|
||||
│ └── index.ts # Re-exports all hooks
|
||||
├── types.ts # Feature-specific types
|
||||
├── utils.ts # Feature-specific utilities
|
||||
└── index.ts # Public API of the feature
|
||||
```
|
||||
|
||||
### Feature Index File
|
||||
|
||||
Each feature has an `index.ts` that defines its public API:
|
||||
|
||||
```typescript
|
||||
// features/flyer/index.ts
|
||||
export { FlyerCard, FlyerGrid, FlyerUploader } from './components';
|
||||
export { useFlyerDetails, useFlyerUpload } from './hooks';
|
||||
export type { FlyerViewProps, FlyerUploadState } from './types';
|
||||
```
|
||||
|
||||
### Import Patterns
|
||||
|
||||
```typescript
|
||||
// Importing from a feature (preferred)
|
||||
import { FlyerCard, useFlyerDetails } from '@/features/flyer';
|
||||
|
||||
// Importing shared components
|
||||
import { Button, Card } from '@/components/ui';
|
||||
import { useDebounce } from '@/hooks/utils';
|
||||
|
||||
// Avoid: reaching into feature internals
|
||||
// import { FlyerCard } from '@/features/flyer/components/FlyerCard';
|
||||
```
|
||||
|
||||
### Provider Organization
|
||||
|
||||
Located in `src/providers/`:
|
||||
|
||||
```typescript
|
||||
// AppProviders.tsx - Composes all providers
|
||||
export function AppProviders({ children }: { children: React.ReactNode }) {
|
||||
return (
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<AuthProvider>
|
||||
<UserDataProvider>
|
||||
<FlyersProvider>
|
||||
<ThemeProvider>
|
||||
{children}
|
||||
</ThemeProvider>
|
||||
</FlyersProvider>
|
||||
</UserDataProvider>
|
||||
</AuthProvider>
|
||||
</QueryClientProvider>
|
||||
);
|
||||
}
|
||||
```
|
||||
|
||||
### Query/Mutation Hook Organization
|
||||
|
||||
Located in `src/hooks/`:
|
||||
|
||||
```typescript
|
||||
// hooks/queries/useFlyersQuery.ts
|
||||
export function useFlyersQuery(options?: { storeId?: number }) {
|
||||
return useQuery({
|
||||
queryKey: ['flyers', options],
|
||||
queryFn: () => flyerService.getFlyers(options),
|
||||
staleTime: 5 * 60 * 1000,
|
||||
});
|
||||
}
|
||||
|
||||
// hooks/mutations/useFlyerUploadMutation.ts
|
||||
export function useFlyerUploadMutation() {
|
||||
const queryClient = useQueryClient();
|
||||
|
||||
return useMutation({
|
||||
mutationFn: flyerService.uploadFlyer,
|
||||
onSuccess: () => {
|
||||
queryClient.invalidateQueries({ queryKey: ['flyers'] });
|
||||
},
|
||||
});
|
||||
}
|
||||
```
|
||||
|
||||
### Page Components
|
||||
|
||||
Pages are thin wrappers that compose feature components:
|
||||
|
||||
```typescript
|
||||
// pages/Flyers.tsx
|
||||
import { FlyerGrid, FlyerUploader } from '@/features/flyer';
|
||||
import { PageLayout } from '@/components/layout';
|
||||
|
||||
export function FliversPage() {
|
||||
return (
|
||||
<PageLayout title="My Flyers">
|
||||
<FlyerUploader />
|
||||
<FlyerGrid />
|
||||
</PageLayout>
|
||||
);
|
||||
}
|
||||
```
|
||||
|
||||
### Cross-Feature Communication
|
||||
|
||||
When features need to communicate, use:
|
||||
|
||||
1. **Shared State Providers**: For global state (user, theme).
|
||||
2. **Query Invalidation**: For data synchronization.
|
||||
3. **Event Bus**: For loose coupling (see ADR-036).
|
||||
|
||||
```typescript
|
||||
// Feature A triggers update
|
||||
const uploadMutation = useFlyerUploadMutation();
|
||||
await uploadMutation.mutateAsync(file);
|
||||
// Query invalidation automatically updates Feature B's flyer list
|
||||
```
|
||||
|
||||
## Naming Conventions
|
||||
|
||||
| Item | Convention | Example |
|
||||
| -------------- | -------------------- | -------------------- |
|
||||
| Feature folder | kebab-case | `voice-assistant/` |
|
||||
| Component file | PascalCase | `FlyerCard.tsx` |
|
||||
| Hook file | camelCase with `use` | `useFlyerDetails.ts` |
|
||||
| Type file | lowercase | `types.ts` |
|
||||
| Utility file | lowercase | `utils.ts` |
|
||||
| Index file | lowercase | `index.ts` |
|
||||
|
||||
## When to Create a New Feature
|
||||
|
||||
Create a new feature folder when:
|
||||
|
||||
1. The functionality is distinct and self-contained.
|
||||
2. It has its own set of components, hooks, and potentially types.
|
||||
3. It could theoretically be extracted into a separate package.
|
||||
4. It has minimal dependencies on other features.
|
||||
|
||||
Do NOT create a feature folder for:
|
||||
|
||||
- A single reusable component (use `components/`).
|
||||
- A single utility function (use `utils/`).
|
||||
- A single hook (use `hooks/`).
|
||||
|
||||
## Consequences
|
||||
|
||||
### Positive
|
||||
|
||||
- **Discoverability**: Easy to find all code related to a feature.
|
||||
- **Encapsulation**: Features have clear boundaries and public APIs.
|
||||
- **Refactoring**: Can modify or remove features with confidence.
|
||||
- **Scalability**: Supports team growth with feature ownership.
|
||||
- **Testing**: Can test features in isolation.
|
||||
|
||||
### Negative
|
||||
|
||||
- **Duplication Risk**: Similar utilities might be duplicated across features.
|
||||
- **Decision Overhead**: Must decide when to extract to shared folders.
|
||||
- **Import Verbosity**: Feature imports can be longer.
|
||||
|
||||
### Mitigation
|
||||
|
||||
- Regular refactoring sessions to extract shared code.
|
||||
- Lint rules to prevent importing from feature internals.
|
||||
- Code review focus on proper feature boundaries.
|
||||
|
||||
## Key Directories
|
||||
|
||||
- `src/features/flyer/` - Flyer viewing and management
|
||||
- `src/features/shopping/` - Shopping list functionality
|
||||
- `src/features/charts/` - Budget and analytics charts
|
||||
- `src/features/voice-assistant/` - Voice command interface
|
||||
- `src/features/admin/` - Admin dashboard
|
||||
- `src/components/ui/` - Shared primitive components
|
||||
- `src/hooks/queries/` - TanStack Query hooks
|
||||
- `src/providers/` - React context providers
|
||||
|
||||
## Related ADRs
|
||||
|
||||
- [ADR-005](./0005-frontend-state-management-and-server-cache-strategy.md) - State Management
|
||||
- [ADR-012](./0012-frontend-component-library-and-design-system.md) - Component Library
|
||||
- [ADR-026](./0026-standardized-client-side-structured-logging.md) - Client Logging
|
||||
350
docs/adr/0045-test-data-factories-and-fixtures.md
Normal file
350
docs/adr/0045-test-data-factories-and-fixtures.md
Normal file
@@ -0,0 +1,350 @@
|
||||
# ADR-045: Test Data Factories and Fixtures
|
||||
|
||||
**Date**: 2026-01-09
|
||||
|
||||
**Status**: Accepted
|
||||
|
||||
**Implemented**: 2026-01-09
|
||||
|
||||
## Context
|
||||
|
||||
The application has a complex domain model with many entity types:
|
||||
|
||||
- Users, Profiles, Addresses
|
||||
- Flyers, FlyerItems, Stores
|
||||
- ShoppingLists, ShoppingListItems
|
||||
- Recipes, RecipeIngredients
|
||||
- Gamification (points, badges, leaderboards)
|
||||
- And more...
|
||||
|
||||
Testing requires realistic mock data that:
|
||||
|
||||
1. Satisfies TypeScript types.
|
||||
2. Has valid relationships between entities.
|
||||
3. Is customizable for specific test scenarios.
|
||||
4. Is consistent across test suites.
|
||||
5. Avoids boilerplate in test files.
|
||||
|
||||
## Decision
|
||||
|
||||
We will implement a **factory function pattern** for test data generation:
|
||||
|
||||
1. **Centralized Mock Factories**: All factories in a single, organized file.
|
||||
2. **Sensible Defaults**: Each factory produces valid data with minimal input.
|
||||
3. **Override Support**: Factories accept partial overrides for customization.
|
||||
4. **Relationship Helpers**: Factories can generate related entities.
|
||||
5. **Type Safety**: Factories return properly typed objects.
|
||||
|
||||
### Design Principles
|
||||
|
||||
- **Convention over Configuration**: Factories work with zero arguments.
|
||||
- **Composability**: Factories can call other factories.
|
||||
- **Immutability**: Each call returns a new object (no shared references).
|
||||
- **Predictability**: Deterministic output when seeded.
|
||||
|
||||
## Implementation Details
|
||||
|
||||
### Factory File Structure
|
||||
|
||||
Located in `src/test/mockFactories.ts`:
|
||||
|
||||
```typescript
|
||||
import { v4 as uuidv4 } from 'uuid';
|
||||
import type {
|
||||
User,
|
||||
UserProfile,
|
||||
Flyer,
|
||||
FlyerItem,
|
||||
ShoppingList,
|
||||
// ... other types
|
||||
} from '../types';
|
||||
|
||||
// ============================================
|
||||
// PRIMITIVE HELPERS
|
||||
// ============================================
|
||||
let idCounter = 1;
|
||||
export const nextId = () => idCounter++;
|
||||
export const resetIdCounter = () => {
|
||||
idCounter = 1;
|
||||
};
|
||||
|
||||
export const randomEmail = () => `user-${uuidv4().slice(0, 8)}@test.com`;
|
||||
export const randomDate = (daysAgo = 0) => {
|
||||
const date = new Date();
|
||||
date.setDate(date.getDate() - daysAgo);
|
||||
return date.toISOString();
|
||||
};
|
||||
|
||||
// ============================================
|
||||
// USER FACTORIES
|
||||
// ============================================
|
||||
export const createMockUser = (overrides: Partial<User> = {}): User => ({
|
||||
user_id: nextId(),
|
||||
email: randomEmail(),
|
||||
name: 'Test User',
|
||||
role: 'user',
|
||||
created_at: randomDate(30),
|
||||
updated_at: randomDate(),
|
||||
...overrides,
|
||||
});
|
||||
|
||||
export const createMockUserProfile = (overrides: Partial<UserProfile> = {}): UserProfile => {
|
||||
const user = createMockUser(overrides.user);
|
||||
return {
|
||||
user,
|
||||
profile: createMockProfile({ user_id: user.user_id, ...overrides.profile }),
|
||||
address: overrides.address ?? null,
|
||||
preferences: overrides.preferences ?? null,
|
||||
};
|
||||
};
|
||||
|
||||
// ============================================
|
||||
// FLYER FACTORIES
|
||||
// ============================================
|
||||
export const createMockFlyer = (overrides: Partial<Flyer> = {}): Flyer => ({
|
||||
flyer_id: nextId(),
|
||||
file_name: 'test-flyer.jpg',
|
||||
image_url: 'https://example.com/flyer.jpg',
|
||||
icon_url: 'https://example.com/flyer-icon.jpg',
|
||||
checksum: uuidv4(),
|
||||
store_name: 'Test Store',
|
||||
store_address: '123 Test St',
|
||||
valid_from: randomDate(7),
|
||||
valid_to: randomDate(-7), // 7 days in future
|
||||
item_count: 10,
|
||||
status: 'approved',
|
||||
uploaded_by: null,
|
||||
created_at: randomDate(7),
|
||||
updated_at: randomDate(),
|
||||
...overrides,
|
||||
});
|
||||
|
||||
export const createMockFlyerItem = (overrides: Partial<FlyerItem> = {}): FlyerItem => ({
|
||||
flyer_item_id: nextId(),
|
||||
flyer_id: overrides.flyer_id ?? nextId(),
|
||||
item: 'Test Product',
|
||||
price_display: '$2.99',
|
||||
price_in_cents: 299,
|
||||
quantity: 'each',
|
||||
category_name: 'Groceries',
|
||||
master_item_id: null,
|
||||
view_count: 0,
|
||||
click_count: 0,
|
||||
created_at: randomDate(7),
|
||||
updated_at: randomDate(),
|
||||
...overrides,
|
||||
});
|
||||
|
||||
// ============================================
|
||||
// FLYER WITH ITEMS (COMPOSITE)
|
||||
// ============================================
|
||||
export const createMockFlyerWithItems = (
|
||||
flyerOverrides: Partial<Flyer> = {},
|
||||
itemCount = 5,
|
||||
): { flyer: Flyer; items: FlyerItem[] } => {
|
||||
const flyer = createMockFlyer(flyerOverrides);
|
||||
const items = Array.from({ length: itemCount }, (_, i) =>
|
||||
createMockFlyerItem({
|
||||
flyer_id: flyer.flyer_id,
|
||||
item: `Product ${i + 1}`,
|
||||
price_in_cents: 100 + i * 50,
|
||||
}),
|
||||
);
|
||||
flyer.item_count = items.length;
|
||||
return { flyer, items };
|
||||
};
|
||||
|
||||
// ============================================
|
||||
// SHOPPING LIST FACTORIES
|
||||
// ============================================
|
||||
export const createMockShoppingList = (overrides: Partial<ShoppingList> = {}): ShoppingList => ({
|
||||
shopping_list_id: nextId(),
|
||||
user_id: overrides.user_id ?? nextId(),
|
||||
name: 'Weekly Groceries',
|
||||
is_active: true,
|
||||
created_at: randomDate(14),
|
||||
updated_at: randomDate(),
|
||||
...overrides,
|
||||
});
|
||||
|
||||
export const createMockShoppingListItem = (
|
||||
overrides: Partial<ShoppingListItem> = {},
|
||||
): ShoppingListItem => ({
|
||||
shopping_list_item_id: nextId(),
|
||||
shopping_list_id: overrides.shopping_list_id ?? nextId(),
|
||||
item_name: 'Milk',
|
||||
quantity: 1,
|
||||
is_purchased: false,
|
||||
created_at: randomDate(7),
|
||||
updated_at: randomDate(),
|
||||
...overrides,
|
||||
});
|
||||
```
|
||||
|
||||
### Usage in Tests
|
||||
|
||||
```typescript
|
||||
import {
|
||||
createMockUser,
|
||||
createMockFlyer,
|
||||
createMockFlyerWithItems,
|
||||
resetIdCounter,
|
||||
} from '../test/mockFactories';
|
||||
|
||||
describe('FlyerService', () => {
|
||||
beforeEach(() => {
|
||||
resetIdCounter(); // Consistent IDs across tests
|
||||
});
|
||||
|
||||
it('should get flyer by ID', async () => {
|
||||
const mockFlyer = createMockFlyer({ store_name: 'Walmart' });
|
||||
|
||||
mockDb.query.mockResolvedValue({ rows: [mockFlyer] });
|
||||
|
||||
const result = await flyerService.getFlyerById(mockFlyer.flyer_id);
|
||||
|
||||
expect(result.store_name).toBe('Walmart');
|
||||
});
|
||||
|
||||
it('should return flyer with items', async () => {
|
||||
const { flyer, items } = createMockFlyerWithItems(
|
||||
{ store_name: 'Costco' },
|
||||
10, // 10 items
|
||||
);
|
||||
|
||||
mockDb.query.mockResolvedValueOnce({ rows: [flyer] }).mockResolvedValueOnce({ rows: items });
|
||||
|
||||
const result = await flyerService.getFlyerWithItems(flyer.flyer_id);
|
||||
|
||||
expect(result.flyer.store_name).toBe('Costco');
|
||||
expect(result.items).toHaveLength(10);
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
### Bulk Data Generation
|
||||
|
||||
For integration tests or seeding:
|
||||
|
||||
```typescript
|
||||
export const createMockDataset = () => {
|
||||
const users = Array.from({ length: 10 }, () => createMockUser());
|
||||
const flyers = Array.from({ length: 5 }, () => createMockFlyer());
|
||||
const flyersWithItems = flyers.map((flyer) => ({
|
||||
flyer,
|
||||
items: Array.from({ length: Math.floor(Math.random() * 20) + 5 }, () =>
|
||||
createMockFlyerItem({ flyer_id: flyer.flyer_id }),
|
||||
),
|
||||
}));
|
||||
|
||||
return { users, flyers, flyersWithItems };
|
||||
};
|
||||
```
|
||||
|
||||
### API Response Factories
|
||||
|
||||
For testing API handlers:
|
||||
|
||||
```typescript
|
||||
export const createMockApiResponse = <T>(
|
||||
data: T,
|
||||
overrides: Partial<ApiResponse<T>> = {},
|
||||
): ApiResponse<T> => ({
|
||||
success: true,
|
||||
data,
|
||||
meta: {
|
||||
timestamp: new Date().toISOString(),
|
||||
requestId: uuidv4(),
|
||||
...overrides.meta,
|
||||
},
|
||||
...overrides,
|
||||
});
|
||||
|
||||
export const createMockPaginatedResponse = <T>(
|
||||
items: T[],
|
||||
page = 1,
|
||||
pageSize = 20,
|
||||
): PaginatedApiResponse<T> => ({
|
||||
success: true,
|
||||
data: items,
|
||||
meta: {
|
||||
timestamp: new Date().toISOString(),
|
||||
requestId: uuidv4(),
|
||||
},
|
||||
pagination: {
|
||||
page,
|
||||
pageSize,
|
||||
totalItems: items.length,
|
||||
totalPages: Math.ceil(items.length / pageSize),
|
||||
hasMore: false,
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
### Database Query Mock Helpers
|
||||
|
||||
```typescript
|
||||
export const mockQueryResult = <T>(rows: T[]) => ({
|
||||
rows,
|
||||
rowCount: rows.length,
|
||||
});
|
||||
|
||||
export const mockEmptyResult = () => ({
|
||||
rows: [],
|
||||
rowCount: 0,
|
||||
});
|
||||
|
||||
export const mockInsertResult = <T>(inserted: T) => ({
|
||||
rows: [inserted],
|
||||
rowCount: 1,
|
||||
});
|
||||
```
|
||||
|
||||
## Test Cleanup Utilities
|
||||
|
||||
```typescript
|
||||
// For integration tests with real database
|
||||
export const cleanupTestData = async (pool: Pool) => {
|
||||
await pool.query('DELETE FROM flyer_items WHERE flyer_id > 1000000');
|
||||
await pool.query('DELETE FROM flyers WHERE flyer_id > 1000000');
|
||||
await pool.query('DELETE FROM users WHERE user_id > 1000000');
|
||||
};
|
||||
|
||||
// Mark test data with high IDs
|
||||
export const createTestFlyer = (overrides: Partial<Flyer> = {}) =>
|
||||
createMockFlyer({ flyer_id: 1000000 + nextId(), ...overrides });
|
||||
```
|
||||
|
||||
## Consequences
|
||||
|
||||
### Positive
|
||||
|
||||
- **Consistency**: All tests use the same factory patterns.
|
||||
- **Type Safety**: Factories return correctly typed objects.
|
||||
- **Reduced Boilerplate**: Tests focus on behavior, not data setup.
|
||||
- **Maintainability**: Update factory once, all tests benefit.
|
||||
- **Flexibility**: Easy to create edge case data.
|
||||
|
||||
### Negative
|
||||
|
||||
- **Single Large File**: Factory file can become large.
|
||||
- **Learning Curve**: New developers must learn factory patterns.
|
||||
- **Maintenance**: Factories must be updated when types change.
|
||||
|
||||
### Mitigation
|
||||
|
||||
- Split factories into multiple files if needed (by domain).
|
||||
- Add JSDoc comments explaining each factory.
|
||||
- Use TypeScript to catch type mismatches automatically.
|
||||
|
||||
## Key Files
|
||||
|
||||
- `src/test/mockFactories.ts` - All mock factory functions
|
||||
- `src/test/testUtils.ts` - Test helper utilities
|
||||
- `src/test/setup.ts` - Global test setup with factory reset
|
||||
|
||||
## Related ADRs
|
||||
|
||||
- [ADR-010](./0010-testing-strategy-and-standards.md) - Testing Strategy
|
||||
- [ADR-040](./0040-testing-economics-and-priorities.md) - Testing Economics
|
||||
- [ADR-027](./0027-standardized-naming-convention-for-ai-and-database-types.md) - Type Naming
|
||||
363
docs/adr/0046-image-processing-pipeline.md
Normal file
363
docs/adr/0046-image-processing-pipeline.md
Normal file
@@ -0,0 +1,363 @@
|
||||
# ADR-046: Image Processing Pipeline
|
||||
|
||||
**Date**: 2026-01-09
|
||||
|
||||
**Status**: Accepted
|
||||
|
||||
**Implemented**: 2026-01-09
|
||||
|
||||
## Context
|
||||
|
||||
The application handles significant image processing for flyer uploads:
|
||||
|
||||
1. **Privacy Protection**: Strip EXIF metadata (location, device info).
|
||||
2. **Optimization**: Resize, compress, and convert images for web delivery.
|
||||
3. **Icon Generation**: Create thumbnails for listing views.
|
||||
4. **Format Support**: Handle JPEG, PNG, WebP, and PDF inputs.
|
||||
5. **Storage Management**: Organize processed images on disk.
|
||||
|
||||
These operations must be:
|
||||
|
||||
- **Performant**: Large images should not block the request.
|
||||
- **Secure**: Prevent malicious file uploads.
|
||||
- **Consistent**: Produce predictable output quality.
|
||||
- **Testable**: Support unit testing without real files.
|
||||
|
||||
## Decision
|
||||
|
||||
We will implement a modular image processing pipeline using:
|
||||
|
||||
1. **Sharp**: For image resizing, compression, and format conversion.
|
||||
2. **EXIF Parsing**: For metadata extraction and stripping.
|
||||
3. **UUID Naming**: For unique, non-guessable file names.
|
||||
4. **Directory Structure**: Organized storage for originals and derivatives.
|
||||
|
||||
### Design Principles
|
||||
|
||||
- **Pipeline Pattern**: Chain processing steps in a predictable order.
|
||||
- **Fail-Fast Validation**: Reject invalid files before processing.
|
||||
- **Idempotent Operations**: Same input produces same output.
|
||||
- **Resource Cleanup**: Delete temp files on error.
|
||||
|
||||
## Implementation Details
|
||||
|
||||
### Image Processor Module
|
||||
|
||||
Located in `src/utils/imageProcessor.ts`:
|
||||
|
||||
```typescript
|
||||
import sharp from 'sharp';
|
||||
import path from 'path';
|
||||
import { v4 as uuidv4 } from 'uuid';
|
||||
import fs from 'fs/promises';
|
||||
import type { Logger } from 'pino';
|
||||
|
||||
// ============================================
|
||||
// CONFIGURATION
|
||||
// ============================================
|
||||
const IMAGE_CONFIG = {
|
||||
maxWidth: 2048,
|
||||
maxHeight: 2048,
|
||||
quality: 85,
|
||||
iconSize: 200,
|
||||
allowedFormats: ['jpeg', 'png', 'webp', 'avif'],
|
||||
outputFormat: 'webp' as const,
|
||||
};
|
||||
|
||||
// ============================================
|
||||
// MAIN PROCESSING FUNCTION
|
||||
// ============================================
|
||||
export async function processAndSaveImage(
|
||||
inputPath: string,
|
||||
outputDir: string,
|
||||
originalFileName: string,
|
||||
logger: Logger,
|
||||
): Promise<string> {
|
||||
const outputFileName = `${uuidv4()}.${IMAGE_CONFIG.outputFormat}`;
|
||||
const outputPath = path.join(outputDir, outputFileName);
|
||||
|
||||
logger.info({ inputPath, outputPath }, 'Processing image');
|
||||
|
||||
try {
|
||||
// Create sharp instance and strip metadata
|
||||
await sharp(inputPath)
|
||||
.rotate() // Auto-rotate based on EXIF orientation
|
||||
.resize(IMAGE_CONFIG.maxWidth, IMAGE_CONFIG.maxHeight, {
|
||||
fit: 'inside',
|
||||
withoutEnlargement: true,
|
||||
})
|
||||
.webp({ quality: IMAGE_CONFIG.quality })
|
||||
.toFile(outputPath);
|
||||
|
||||
logger.info({ outputPath }, 'Image processed successfully');
|
||||
return outputFileName;
|
||||
} catch (error) {
|
||||
logger.error({ error, inputPath }, 'Image processing failed');
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Icon Generation
|
||||
|
||||
```typescript
|
||||
export async function generateFlyerIcon(
|
||||
inputPath: string,
|
||||
iconsDir: string,
|
||||
logger: Logger,
|
||||
): Promise<string> {
|
||||
// Ensure icons directory exists
|
||||
await fs.mkdir(iconsDir, { recursive: true });
|
||||
|
||||
const iconFileName = `${uuidv4()}-icon.webp`;
|
||||
const iconPath = path.join(iconsDir, iconFileName);
|
||||
|
||||
logger.info({ inputPath, iconPath }, 'Generating icon');
|
||||
|
||||
await sharp(inputPath)
|
||||
.resize(IMAGE_CONFIG.iconSize, IMAGE_CONFIG.iconSize, {
|
||||
fit: 'cover',
|
||||
position: 'top', // Flyers usually have store name at top
|
||||
})
|
||||
.webp({ quality: 80 })
|
||||
.toFile(iconPath);
|
||||
|
||||
logger.info({ iconPath }, 'Icon generated successfully');
|
||||
return iconFileName;
|
||||
}
|
||||
```
|
||||
|
||||
### EXIF Metadata Extraction
|
||||
|
||||
For audit/logging purposes before stripping:
|
||||
|
||||
```typescript
|
||||
import ExifParser from 'exif-parser';
|
||||
|
||||
export async function extractExifMetadata(
|
||||
filePath: string,
|
||||
logger: Logger,
|
||||
): Promise<ExifMetadata | null> {
|
||||
try {
|
||||
const buffer = await fs.readFile(filePath);
|
||||
const parser = ExifParser.create(buffer);
|
||||
const result = parser.parse();
|
||||
|
||||
const metadata: ExifMetadata = {
|
||||
make: result.tags?.Make,
|
||||
model: result.tags?.Model,
|
||||
dateTime: result.tags?.DateTimeOriginal,
|
||||
gpsLatitude: result.tags?.GPSLatitude,
|
||||
gpsLongitude: result.tags?.GPSLongitude,
|
||||
orientation: result.tags?.Orientation,
|
||||
};
|
||||
|
||||
// Log if GPS data was present (privacy concern)
|
||||
if (metadata.gpsLatitude || metadata.gpsLongitude) {
|
||||
logger.info({ filePath }, 'GPS data found in image, will be stripped during processing');
|
||||
}
|
||||
|
||||
return metadata;
|
||||
} catch (error) {
|
||||
logger.debug({ error, filePath }, 'No EXIF data found or parsing failed');
|
||||
return null;
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### PDF to Image Conversion
|
||||
|
||||
```typescript
|
||||
import * as pdfjs from 'pdfjs-dist';
|
||||
|
||||
export async function convertPdfToImages(
|
||||
pdfPath: string,
|
||||
outputDir: string,
|
||||
logger: Logger,
|
||||
): Promise<string[]> {
|
||||
const pdfData = await fs.readFile(pdfPath);
|
||||
const pdf = await pdfjs.getDocument({ data: pdfData }).promise;
|
||||
|
||||
const outputPaths: string[] = [];
|
||||
|
||||
for (let i = 1; i <= pdf.numPages; i++) {
|
||||
const page = await pdf.getPage(i);
|
||||
const viewport = page.getViewport({ scale: 2.0 }); // 2x for quality
|
||||
|
||||
// Create canvas and render
|
||||
const canvas = createCanvas(viewport.width, viewport.height);
|
||||
const context = canvas.getContext('2d');
|
||||
|
||||
await page.render({
|
||||
canvasContext: context,
|
||||
viewport: viewport,
|
||||
}).promise;
|
||||
|
||||
// Save as image
|
||||
const outputFileName = `${uuidv4()}-page-${i}.png`;
|
||||
const outputPath = path.join(outputDir, outputFileName);
|
||||
const buffer = canvas.toBuffer('image/png');
|
||||
await fs.writeFile(outputPath, buffer);
|
||||
|
||||
outputPaths.push(outputPath);
|
||||
logger.info({ page: i, outputPath }, 'PDF page converted to image');
|
||||
}
|
||||
|
||||
return outputPaths;
|
||||
}
|
||||
```
|
||||
|
||||
### File Validation
|
||||
|
||||
```typescript
|
||||
import { fileTypeFromBuffer } from 'file-type';
|
||||
|
||||
export async function validateImageFile(
|
||||
filePath: string,
|
||||
logger: Logger,
|
||||
): Promise<{ valid: boolean; mimeType: string | null; error?: string }> {
|
||||
try {
|
||||
const buffer = await fs.readFile(filePath, { length: 4100 }); // Read header only
|
||||
const type = await fileTypeFromBuffer(buffer);
|
||||
|
||||
if (!type) {
|
||||
return { valid: false, mimeType: null, error: 'Unknown file type' };
|
||||
}
|
||||
|
||||
const allowedMimes = ['image/jpeg', 'image/png', 'image/webp', 'image/avif', 'application/pdf'];
|
||||
|
||||
if (!allowedMimes.includes(type.mime)) {
|
||||
return {
|
||||
valid: false,
|
||||
mimeType: type.mime,
|
||||
error: `File type ${type.mime} not allowed`,
|
||||
};
|
||||
}
|
||||
|
||||
return { valid: true, mimeType: type.mime };
|
||||
} catch (error) {
|
||||
logger.error({ error, filePath }, 'File validation failed');
|
||||
return { valid: false, mimeType: null, error: 'Validation error' };
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Storage Organization
|
||||
|
||||
```
|
||||
flyer-images/
|
||||
├── originals/ # Uploaded files (if kept)
|
||||
│ └── {uuid}.{ext}
|
||||
├── processed/ # Optimized images (or root level)
|
||||
│ └── {uuid}.webp
|
||||
├── icons/ # Thumbnails
|
||||
│ └── {uuid}-icon.webp
|
||||
└── temp/ # Temporary processing files
|
||||
└── {uuid}.tmp
|
||||
```
|
||||
|
||||
### Cleanup Utilities
|
||||
|
||||
```typescript
|
||||
export async function cleanupTempFiles(
|
||||
tempDir: string,
|
||||
maxAgeMs: number,
|
||||
logger: Logger,
|
||||
): Promise<number> {
|
||||
const files = await fs.readdir(tempDir);
|
||||
const now = Date.now();
|
||||
let deletedCount = 0;
|
||||
|
||||
for (const file of files) {
|
||||
const filePath = path.join(tempDir, file);
|
||||
const stats = await fs.stat(filePath);
|
||||
const age = now - stats.mtimeMs;
|
||||
|
||||
if (age > maxAgeMs) {
|
||||
await fs.unlink(filePath);
|
||||
deletedCount++;
|
||||
}
|
||||
}
|
||||
|
||||
logger.info({ deletedCount, tempDir }, 'Cleaned up temp files');
|
||||
return deletedCount;
|
||||
}
|
||||
```
|
||||
|
||||
### Integration with Flyer Processing
|
||||
|
||||
```typescript
|
||||
// In flyerProcessingService.ts
|
||||
export async function processUploadedFlyer(
|
||||
file: Express.Multer.File,
|
||||
logger: Logger,
|
||||
): Promise<{ imageUrl: string; iconUrl: string }> {
|
||||
const flyerImageDir = 'flyer-images';
|
||||
const iconsDir = path.join(flyerImageDir, 'icons');
|
||||
|
||||
// 1. Validate file
|
||||
const validation = await validateImageFile(file.path, logger);
|
||||
if (!validation.valid) {
|
||||
throw new ValidationError([{ path: 'file', message: validation.error! }]);
|
||||
}
|
||||
|
||||
// 2. Extract and log EXIF before stripping
|
||||
await extractExifMetadata(file.path, logger);
|
||||
|
||||
// 3. Process and optimize image
|
||||
const processedFileName = await processAndSaveImage(
|
||||
file.path,
|
||||
flyerImageDir,
|
||||
file.originalname,
|
||||
logger,
|
||||
);
|
||||
|
||||
// 4. Generate icon
|
||||
const processedImagePath = path.join(flyerImageDir, processedFileName);
|
||||
const iconFileName = await generateFlyerIcon(processedImagePath, iconsDir, logger);
|
||||
|
||||
// 5. Construct URLs
|
||||
const baseUrl = process.env.BACKEND_URL || 'http://localhost:3001';
|
||||
const imageUrl = `${baseUrl}/flyer-images/${processedFileName}`;
|
||||
const iconUrl = `${baseUrl}/flyer-images/icons/${iconFileName}`;
|
||||
|
||||
// 6. Delete original upload (privacy)
|
||||
await fs.unlink(file.path);
|
||||
|
||||
return { imageUrl, iconUrl };
|
||||
}
|
||||
```
|
||||
|
||||
## Consequences
|
||||
|
||||
### Positive
|
||||
|
||||
- **Privacy**: EXIF metadata (including GPS) is stripped automatically.
|
||||
- **Performance**: WebP output reduces file sizes by 25-35%.
|
||||
- **Consistency**: All images processed to standard format and dimensions.
|
||||
- **Security**: File type validation prevents malicious uploads.
|
||||
- **Organization**: Clear directory structure for storage management.
|
||||
|
||||
### Negative
|
||||
|
||||
- **CPU Intensive**: Image processing can be slow for large files.
|
||||
- **Storage**: Keeping originals doubles storage requirements.
|
||||
- **Dependency**: Sharp requires native binaries.
|
||||
|
||||
### Mitigation
|
||||
|
||||
- Process images in background jobs (BullMQ queue).
|
||||
- Configure whether to keep originals based on requirements.
|
||||
- Use pre-built Sharp binaries via npm.
|
||||
|
||||
## Key Files
|
||||
|
||||
- `src/utils/imageProcessor.ts` - Core image processing functions
|
||||
- `src/services/flyer/flyerProcessingService.ts` - Integration with flyer workflow
|
||||
- `src/middleware/fileUpload.middleware.ts` - Multer configuration
|
||||
|
||||
## Related ADRs
|
||||
|
||||
- [ADR-033](./0033-file-upload-and-storage-strategy.md) - File Upload Strategy
|
||||
- [ADR-006](./0006-background-job-processing-and-task-queues.md) - Background Jobs
|
||||
- [ADR-041](./0041-ai-gemini-integration-architecture.md) - AI Integration (uses processed images)
|
||||
545
docs/adr/0047-project-file-and-folder-organization.md
Normal file
545
docs/adr/0047-project-file-and-folder-organization.md
Normal file
@@ -0,0 +1,545 @@
|
||||
# ADR-047: Project File and Folder Organization
|
||||
|
||||
**Date**: 2026-01-09
|
||||
|
||||
**Status**: Proposed
|
||||
|
||||
**Effort**: XL (Major reorganization across entire codebase)
|
||||
|
||||
## Context
|
||||
|
||||
The project has grown organically with a mix of organizational patterns:
|
||||
|
||||
- **By Type**: Components, hooks, middleware, utilities, types all in flat directories
|
||||
- **By Feature**: Routes, database modules, and partial feature directories
|
||||
- **Mixed Concerns**: Frontend and backend code intermingled in `src/`
|
||||
|
||||
Current pain points:
|
||||
|
||||
1. **Flat services directory**: 75+ files with no subdirectory grouping
|
||||
2. **Monolithic types.ts**: 750+ lines, unclear when to add new types
|
||||
3. **Flat components directory**: 43+ components at root level
|
||||
4. **Incomplete feature modules**: Features contain only UI, not domain logic
|
||||
5. **No clear frontend/backend separation**: Both share `src/` root
|
||||
|
||||
As the project scales, these issues compound, making navigation, refactoring, and onboarding increasingly difficult.
|
||||
|
||||
## Decision
|
||||
|
||||
We will adopt a **domain-driven organization** with clear separation between:
|
||||
|
||||
1. **Client code** (React, browser-only)
|
||||
2. **Server code** (Express, Node-only)
|
||||
3. **Shared code** (Types, utilities used by both)
|
||||
|
||||
Within each layer, organize by **feature/domain** rather than by file type.
|
||||
|
||||
### Design Principles
|
||||
|
||||
- **Colocation**: Related code lives together (components, hooks, types, tests)
|
||||
- **Explicit Boundaries**: Clear separation between client, server, and shared
|
||||
- **Feature Ownership**: Each domain owns its entire vertical slice
|
||||
- **Discoverability**: New developers can find code by thinking about features, not file types
|
||||
- **Incremental Migration**: Structure supports gradual transition from current layout
|
||||
|
||||
## Target Directory Structure
|
||||
|
||||
```
|
||||
src/
|
||||
├── client/ # React frontend (browser-only code)
|
||||
│ ├── app/ # App shell and routing
|
||||
│ │ ├── App.tsx
|
||||
│ │ ├── routes.tsx
|
||||
│ │ └── providers/ # React context providers
|
||||
│ │ ├── AppProviders.tsx
|
||||
│ │ ├── AuthProvider.tsx
|
||||
│ │ ├── FlyersProvider.tsx
|
||||
│ │ └── index.ts
|
||||
│ │
|
||||
│ ├── features/ # Feature modules (UI + hooks + types)
|
||||
│ │ ├── auth/
|
||||
│ │ │ ├── components/
|
||||
│ │ │ │ ├── LoginForm.tsx
|
||||
│ │ │ │ ├── RegisterForm.tsx
|
||||
│ │ │ │ └── index.ts
|
||||
│ │ │ ├── hooks/
|
||||
│ │ │ │ ├── useAuth.ts
|
||||
│ │ │ │ ├── useLogin.ts
|
||||
│ │ │ │ └── index.ts
|
||||
│ │ │ ├── types.ts
|
||||
│ │ │ └── index.ts
|
||||
│ │ │
|
||||
│ │ ├── flyer/
|
||||
│ │ │ ├── components/
|
||||
│ │ │ │ ├── FlyerCard.tsx
|
||||
│ │ │ │ ├── FlyerGrid.tsx
|
||||
│ │ │ │ ├── FlyerUploader.tsx
|
||||
│ │ │ │ ├── BulkImporter.tsx
|
||||
│ │ │ │ └── index.ts
|
||||
│ │ │ ├── hooks/
|
||||
│ │ │ │ ├── useFlyersQuery.ts
|
||||
│ │ │ │ ├── useFlyerUploadMutation.ts
|
||||
│ │ │ │ └── index.ts
|
||||
│ │ │ ├── types.ts
|
||||
│ │ │ └── index.ts
|
||||
│ │ │
|
||||
│ │ ├── shopping/
|
||||
│ │ │ ├── components/
|
||||
│ │ │ ├── hooks/
|
||||
│ │ │ ├── types.ts
|
||||
│ │ │ └── index.ts
|
||||
│ │ │
|
||||
│ │ ├── recipes/
|
||||
│ │ │ ├── components/
|
||||
│ │ │ ├── hooks/
|
||||
│ │ │ └── index.ts
|
||||
│ │ │
|
||||
│ │ ├── charts/
|
||||
│ │ │ ├── components/
|
||||
│ │ │ └── index.ts
|
||||
│ │ │
|
||||
│ │ ├── voice-assistant/
|
||||
│ │ │ ├── components/
|
||||
│ │ │ └── index.ts
|
||||
│ │ │
|
||||
│ │ ├── user/
|
||||
│ │ │ ├── components/
|
||||
│ │ │ ├── hooks/
|
||||
│ │ │ └── index.ts
|
||||
│ │ │
|
||||
│ │ ├── gamification/
|
||||
│ │ │ ├── components/
|
||||
│ │ │ ├── hooks/
|
||||
│ │ │ └── index.ts
|
||||
│ │ │
|
||||
│ │ └── admin/
|
||||
│ │ ├── components/
|
||||
│ │ ├── hooks/
|
||||
│ │ ├── pages/ # Admin-specific pages
|
||||
│ │ └── index.ts
|
||||
│ │
|
||||
│ ├── pages/ # Route page components
|
||||
│ │ ├── HomePage.tsx
|
||||
│ │ ├── MyDealsPage.tsx
|
||||
│ │ ├── UserProfilePage.tsx
|
||||
│ │ └── index.ts
|
||||
│ │
|
||||
│ ├── components/ # Shared UI components
|
||||
│ │ ├── ui/ # Primitive components (design system)
|
||||
│ │ │ ├── Button.tsx
|
||||
│ │ │ ├── Card.tsx
|
||||
│ │ │ ├── Input.tsx
|
||||
│ │ │ ├── Modal.tsx
|
||||
│ │ │ ├── Badge.tsx
|
||||
│ │ │ └── index.ts
|
||||
│ │ │
|
||||
│ │ ├── layout/ # Layout components
|
||||
│ │ │ ├── Header.tsx
|
||||
│ │ │ ├── Footer.tsx
|
||||
│ │ │ ├── Sidebar.tsx
|
||||
│ │ │ ├── PageLayout.tsx
|
||||
│ │ │ └── index.ts
|
||||
│ │ │
|
||||
│ │ ├── feedback/ # User feedback components
|
||||
│ │ │ ├── LoadingSpinner.tsx
|
||||
│ │ │ ├── ErrorMessage.tsx
|
||||
│ │ │ ├── Toast.tsx
|
||||
│ │ │ ├── ConfirmDialog.tsx
|
||||
│ │ │ └── index.ts
|
||||
│ │ │
|
||||
│ │ ├── forms/ # Form components
|
||||
│ │ │ ├── FormField.tsx
|
||||
│ │ │ ├── SearchInput.tsx
|
||||
│ │ │ ├── DatePicker.tsx
|
||||
│ │ │ └── index.ts
|
||||
│ │ │
|
||||
│ │ ├── icons/ # Icon components
|
||||
│ │ │ ├── ChevronIcon.tsx
|
||||
│ │ │ ├── UserIcon.tsx
|
||||
│ │ │ └── index.ts
|
||||
│ │ │
|
||||
│ │ └── index.ts
|
||||
│ │
|
||||
│ ├── hooks/ # Shared hooks (not feature-specific)
|
||||
│ │ ├── useDebounce.ts
|
||||
│ │ ├── useLocalStorage.ts
|
||||
│ │ ├── useMediaQuery.ts
|
||||
│ │ └── index.ts
|
||||
│ │
|
||||
│ ├── services/ # Client-side services (API clients)
|
||||
│ │ ├── apiClient.ts
|
||||
│ │ ├── logger.client.ts
|
||||
│ │ └── index.ts
|
||||
│ │
|
||||
│ ├── lib/ # Third-party library wrappers
|
||||
│ │ ├── queryClient.ts
|
||||
│ │ ├── toast.ts
|
||||
│ │ └── index.ts
|
||||
│ │
|
||||
│ └── styles/ # Global styles
|
||||
│ ├── globals.css
|
||||
│ └── tailwind.css
|
||||
│
|
||||
├── server/ # Express backend (Node-only code)
|
||||
│ ├── app.ts # Express app setup
|
||||
│ ├── server.ts # Server entry point
|
||||
│ │
|
||||
│ ├── domains/ # Domain modules (business logic)
|
||||
│ │ ├── auth/
|
||||
│ │ │ ├── auth.service.ts
|
||||
│ │ │ ├── auth.routes.ts
|
||||
│ │ │ ├── auth.controller.ts
|
||||
│ │ │ ├── auth.repository.ts
|
||||
│ │ │ ├── auth.types.ts
|
||||
│ │ │ ├── auth.service.test.ts
|
||||
│ │ │ ├── auth.routes.test.ts
|
||||
│ │ │ └── index.ts
|
||||
│ │ │
|
||||
│ │ ├── flyer/
|
||||
│ │ │ ├── flyer.service.ts
|
||||
│ │ │ ├── flyer.routes.ts
|
||||
│ │ │ ├── flyer.controller.ts
|
||||
│ │ │ ├── flyer.repository.ts
|
||||
│ │ │ ├── flyer.types.ts
|
||||
│ │ │ ├── flyer.processing.ts # Flyer-specific processing logic
|
||||
│ │ │ ├── flyer.ai.ts # AI integration for flyers
|
||||
│ │ │ └── index.ts
|
||||
│ │ │
|
||||
│ │ ├── user/
|
||||
│ │ │ ├── user.service.ts
|
||||
│ │ │ ├── user.routes.ts
|
||||
│ │ │ ├── user.controller.ts
|
||||
│ │ │ ├── user.repository.ts
|
||||
│ │ │ └── index.ts
|
||||
│ │ │
|
||||
│ │ ├── shopping/
|
||||
│ │ │ ├── shopping.service.ts
|
||||
│ │ │ ├── shopping.routes.ts
|
||||
│ │ │ ├── shopping.repository.ts
|
||||
│ │ │ └── index.ts
|
||||
│ │ │
|
||||
│ │ ├── recipe/
|
||||
│ │ │ ├── recipe.service.ts
|
||||
│ │ │ ├── recipe.routes.ts
|
||||
│ │ │ ├── recipe.repository.ts
|
||||
│ │ │ └── index.ts
|
||||
│ │ │
|
||||
│ │ ├── gamification/
|
||||
│ │ │ ├── gamification.service.ts
|
||||
│ │ │ ├── gamification.routes.ts
|
||||
│ │ │ ├── gamification.repository.ts
|
||||
│ │ │ └── index.ts
|
||||
│ │ │
|
||||
│ │ ├── notification/
|
||||
│ │ │ ├── notification.service.ts
|
||||
│ │ │ ├── email.service.ts
|
||||
│ │ │ └── index.ts
|
||||
│ │ │
|
||||
│ │ ├── ai/
|
||||
│ │ │ ├── ai.service.ts
|
||||
│ │ │ ├── ai.client.ts
|
||||
│ │ │ ├── ai.prompts.ts
|
||||
│ │ │ └── index.ts
|
||||
│ │ │
|
||||
│ │ └── admin/
|
||||
│ │ ├── admin.routes.ts
|
||||
│ │ ├── admin.controller.ts
|
||||
│ │ ├── admin.service.ts
|
||||
│ │ └── index.ts
|
||||
│ │
|
||||
│ ├── middleware/ # Express middleware
|
||||
│ │ ├── auth.middleware.ts
|
||||
│ │ ├── validation.middleware.ts
|
||||
│ │ ├── errorHandler.middleware.ts
|
||||
│ │ ├── rateLimit.middleware.ts
|
||||
│ │ ├── fileUpload.middleware.ts
|
||||
│ │ └── index.ts
|
||||
│ │
|
||||
│ ├── infrastructure/ # Cross-cutting infrastructure
|
||||
│ │ ├── database/
|
||||
│ │ │ ├── pool.ts
|
||||
│ │ │ ├── migrations/
|
||||
│ │ │ └── seeds/
|
||||
│ │ │
|
||||
│ │ ├── cache/
|
||||
│ │ │ ├── redis.ts
|
||||
│ │ │ └── cacheService.ts
|
||||
│ │ │
|
||||
│ │ ├── queue/
|
||||
│ │ │ ├── queueService.ts
|
||||
│ │ │ ├── workers/
|
||||
│ │ │ │ ├── email.worker.ts
|
||||
│ │ │ │ ├── flyer.worker.ts
|
||||
│ │ │ │ └── index.ts
|
||||
│ │ │ └── index.ts
|
||||
│ │ │
|
||||
│ │ ├── jobs/
|
||||
│ │ │ ├── cronJobs.ts
|
||||
│ │ │ ├── dailyAnalytics.job.ts
|
||||
│ │ │ └── index.ts
|
||||
│ │ │
|
||||
│ │ └── logging/
|
||||
│ │ ├── logger.ts
|
||||
│ │ └── index.ts
|
||||
│ │
|
||||
│ ├── config/ # Server configuration
|
||||
│ │ ├── database.config.ts
|
||||
│ │ ├── redis.config.ts
|
||||
│ │ ├── auth.config.ts
|
||||
│ │ └── index.ts
|
||||
│ │
|
||||
│ └── utils/ # Server-only utilities
|
||||
│ ├── imageProcessor.ts
|
||||
│ ├── geocoding.ts
|
||||
│ └── index.ts
|
||||
│
|
||||
├── shared/ # Code shared between client and server
|
||||
│ ├── types/ # Shared TypeScript types
|
||||
│ │ ├── entities/ # Domain entities
|
||||
│ │ │ ├── flyer.types.ts
|
||||
│ │ │ ├── user.types.ts
|
||||
│ │ │ ├── shopping.types.ts
|
||||
│ │ │ ├── recipe.types.ts
|
||||
│ │ │ └── index.ts
|
||||
│ │ │
|
||||
│ │ ├── api/ # API contract types
|
||||
│ │ │ ├── requests.ts
|
||||
│ │ │ ├── responses.ts
|
||||
│ │ │ ├── errors.ts
|
||||
│ │ │ └── index.ts
|
||||
│ │ │
|
||||
│ │ └── index.ts
|
||||
│ │
|
||||
│ ├── schemas/ # Zod validation schemas
|
||||
│ │ ├── flyer.schema.ts
|
||||
│ │ ├── user.schema.ts
|
||||
│ │ ├── auth.schema.ts
|
||||
│ │ └── index.ts
|
||||
│ │
|
||||
│ ├── constants/ # Shared constants
|
||||
│ │ ├── categories.ts
|
||||
│ │ ├── errorCodes.ts
|
||||
│ │ └── index.ts
|
||||
│ │
|
||||
│ └── utils/ # Isomorphic utilities
|
||||
│ ├── formatting.ts
|
||||
│ ├── validation.ts
|
||||
│ └── index.ts
|
||||
│
|
||||
├── tests/ # Test infrastructure
|
||||
│ ├── setup/
|
||||
│ │ ├── vitest.setup.ts
|
||||
│ │ └── testDb.setup.ts
|
||||
│ │
|
||||
│ ├── fixtures/
|
||||
│ │ ├── mockFactories.ts
|
||||
│ │ ├── sampleFlyers/
|
||||
│ │ └── index.ts
|
||||
│ │
|
||||
│ ├── utils/
|
||||
│ │ ├── testHelpers.ts
|
||||
│ │ └── index.ts
|
||||
│ │
|
||||
│ ├── integration/ # Integration tests
|
||||
│ │ ├── api/
|
||||
│ │ └── database/
|
||||
│ │
|
||||
│ └── e2e/ # End-to-end tests
|
||||
│ └── flows/
|
||||
│
|
||||
├── scripts/ # Build and utility scripts
|
||||
│ ├── seed.ts
|
||||
│ ├── migrate.ts
|
||||
│ └── generateTypes.ts
|
||||
│
|
||||
└── index.tsx # Client entry point
|
||||
```
|
||||
|
||||
## Domain Module Structure
|
||||
|
||||
Each server domain follows a consistent structure:
|
||||
|
||||
```
|
||||
domains/flyer/
|
||||
├── flyer.service.ts # Business logic
|
||||
├── flyer.routes.ts # Express routes
|
||||
├── flyer.controller.ts # Route handlers
|
||||
├── flyer.repository.ts # Database access
|
||||
├── flyer.types.ts # Domain-specific types
|
||||
├── flyer.service.test.ts # Service tests
|
||||
├── flyer.routes.test.ts # Route tests
|
||||
└── index.ts # Public API
|
||||
```
|
||||
|
||||
### Domain Index Pattern
|
||||
|
||||
Each domain exports a clean public API:
|
||||
|
||||
```typescript
|
||||
// server/domains/flyer/index.ts
|
||||
export { FlyerService } from './flyer.service';
|
||||
export { flyerRoutes } from './flyer.routes';
|
||||
export type { FlyerWithItems, FlyerCreateInput } from './flyer.types';
|
||||
```
|
||||
|
||||
## Client Feature Module Structure
|
||||
|
||||
Each client feature follows a consistent structure:
|
||||
|
||||
```
|
||||
client/features/flyer/
|
||||
├── components/
|
||||
│ ├── FlyerCard.tsx
|
||||
│ ├── FlyerCard.test.tsx
|
||||
│ ├── FlyerGrid.tsx
|
||||
│ └── index.ts
|
||||
├── hooks/
|
||||
│ ├── useFlyersQuery.ts
|
||||
│ ├── useFlyerUploadMutation.ts
|
||||
│ └── index.ts
|
||||
├── types.ts # Feature-specific client types
|
||||
└── index.ts # Public API
|
||||
```
|
||||
|
||||
## Import Path Aliases
|
||||
|
||||
Configure TypeScript and bundler for clean imports:
|
||||
|
||||
```typescript
|
||||
// tsconfig.json paths
|
||||
{
|
||||
"paths": {
|
||||
"@/client/*": ["src/client/*"],
|
||||
"@/server/*": ["src/server/*"],
|
||||
"@/shared/*": ["src/shared/*"],
|
||||
"@/tests/*": ["src/tests/*"]
|
||||
}
|
||||
}
|
||||
|
||||
// Usage examples
|
||||
import { Button, Card } from '@/client/components/ui';
|
||||
import { useFlyersQuery } from '@/client/features/flyer';
|
||||
import { FlyerService } from '@/server/domains/flyer';
|
||||
import type { Flyer } from '@/shared/types/entities';
|
||||
```
|
||||
|
||||
## Migration Strategy
|
||||
|
||||
Given the scope of this reorganization, migrate incrementally:
|
||||
|
||||
### Phase 1: Create Directory Structure
|
||||
|
||||
1. Create `client/`, `server/`, `shared/` directories
|
||||
2. Set up path aliases in tsconfig.json
|
||||
3. Update build configuration (Vite)
|
||||
|
||||
### Phase 2: Migrate Shared Code
|
||||
|
||||
1. Move types to `shared/types/`
|
||||
2. Move schemas to `shared/schemas/`
|
||||
3. Move shared utils to `shared/utils/`
|
||||
4. Update imports across codebase
|
||||
|
||||
### Phase 3: Migrate Server Code
|
||||
|
||||
1. Create `server/domains/` structure
|
||||
2. Move one domain at a time (start with `auth` or `user`)
|
||||
3. Move each service + routes + repository together
|
||||
4. Update route registration in app.ts
|
||||
5. Run tests after each domain migration
|
||||
|
||||
### Phase 4: Migrate Client Code
|
||||
|
||||
1. Create `client/features/` structure
|
||||
2. Move components into features
|
||||
3. Move hooks into features or shared hooks
|
||||
4. Move pages to `client/pages/`
|
||||
5. Organize shared components into categories
|
||||
|
||||
### Phase 5: Cleanup
|
||||
|
||||
1. Remove empty old directories
|
||||
2. Update all remaining imports
|
||||
3. Update CI/CD paths if needed
|
||||
4. Update documentation
|
||||
|
||||
## Naming Conventions
|
||||
|
||||
| Item | Convention | Example |
|
||||
| ----------------- | -------------------- | ----------------------- |
|
||||
| Domain directory | lowercase | `flyer/`, `shopping/` |
|
||||
| Feature directory | kebab-case | `voice-assistant/` |
|
||||
| Service file | domain.service.ts | `flyer.service.ts` |
|
||||
| Route file | domain.routes.ts | `flyer.routes.ts` |
|
||||
| Repository file | domain.repository.ts | `flyer.repository.ts` |
|
||||
| Component file | PascalCase.tsx | `FlyerCard.tsx` |
|
||||
| Hook file | camelCase.ts | `useFlyersQuery.ts` |
|
||||
| Type file | domain.types.ts | `flyer.types.ts` |
|
||||
| Test file | \*.test.ts(x) | `flyer.service.test.ts` |
|
||||
| Index file | index.ts | `index.ts` |
|
||||
|
||||
## File Placement Guidelines
|
||||
|
||||
**Where does this file go?**
|
||||
|
||||
| If the file is... | Place it in... |
|
||||
| ------------------------------------ | ------------------------------------------------ |
|
||||
| Used only by React | `client/` |
|
||||
| Used only by Express/Node | `server/` |
|
||||
| TypeScript types used by both | `shared/types/` |
|
||||
| Zod schemas | `shared/schemas/` |
|
||||
| React component for one feature | `client/features/{feature}/components/` |
|
||||
| React component used across features | `client/components/` |
|
||||
| React hook for one feature | `client/features/{feature}/hooks/` |
|
||||
| React hook used across features | `client/hooks/` |
|
||||
| Business logic for a domain | `server/domains/{domain}/` |
|
||||
| Database access for a domain | `server/domains/{domain}/{domain}.repository.ts` |
|
||||
| Express middleware | `server/middleware/` |
|
||||
| Background job worker | `server/infrastructure/queue/workers/` |
|
||||
| Cron job definition | `server/infrastructure/jobs/` |
|
||||
| Test factory/fixture | `tests/fixtures/` |
|
||||
|
||||
## Consequences
|
||||
|
||||
### Positive
|
||||
|
||||
- **Clear Boundaries**: Frontend, backend, and shared code are explicitly separated
|
||||
- **Feature Discoverability**: Find all code for a feature in one place
|
||||
- **Parallel Development**: Teams can work on domains independently
|
||||
- **Easier Refactoring**: Domain boundaries make changes localized
|
||||
- **Better Onboarding**: New developers navigate by feature, not file type
|
||||
- **Scalability**: Structure supports growth without becoming unwieldy
|
||||
|
||||
### Negative
|
||||
|
||||
- **Large Migration Effort**: Significant one-time cost (XL effort)
|
||||
- **Import Updates**: All imports need updating
|
||||
- **Learning Curve**: Team must learn new structure
|
||||
- **Merge Conflicts**: In-flight PRs will need rebasing
|
||||
|
||||
### Mitigation
|
||||
|
||||
- Use automated tools (e.g., `ts-morph`) to update imports
|
||||
- Migrate one domain/feature at a time
|
||||
- Create a migration checklist and track progress
|
||||
- Coordinate with team to minimize in-flight work during migration phases
|
||||
- Consider using feature flags to ship incrementally
|
||||
|
||||
## Key Differences from Current Structure
|
||||
|
||||
| Aspect | Current | Target |
|
||||
| ---------------- | -------------------------- | ----------------------------------------- |
|
||||
| Frontend/Backend | Mixed in `src/` | Separated in `client/` and `server/` |
|
||||
| Services | Flat directory (75+ files) | Grouped by domain |
|
||||
| Components | Flat directory (43+ files) | Categorized (ui, layout, feedback, forms) |
|
||||
| Types | Monolithic `types.ts` | Split by entity in `shared/types/` |
|
||||
| Features | UI-only | Full vertical slice (UI + hooks + types) |
|
||||
| Routes | Separate from services | Co-located in domain |
|
||||
| Tests | Co-located + `tests/` | Co-located + `tests/` for fixtures |
|
||||
|
||||
## Related ADRs
|
||||
|
||||
- [ADR-034](./0034-repository-pattern-standards.md) - Repository Pattern (affects domain structure)
|
||||
- [ADR-035](./0035-service-layer-architecture.md) - Service Layer (affects domain structure)
|
||||
- [ADR-044](./0044-frontend-feature-organization.md) - Frontend Features (this ADR supersedes it)
|
||||
- [ADR-045](./0045-test-data-factories-and-fixtures.md) - Test Fixtures (affects tests/ directory)
|
||||
419
docs/adr/0048-authentication-strategy.md
Normal file
419
docs/adr/0048-authentication-strategy.md
Normal file
@@ -0,0 +1,419 @@
|
||||
# ADR-048: Authentication Strategy
|
||||
|
||||
**Date**: 2026-01-09
|
||||
|
||||
**Status**: Partially Implemented
|
||||
|
||||
**Implemented**: 2026-01-09 (Local auth only)
|
||||
|
||||
## Context
|
||||
|
||||
The application requires a secure authentication system that supports both traditional email/password login and social OAuth providers (Google, GitHub). The system must handle user sessions, token refresh, account security (lockout after failed attempts), and integrate seamlessly with the existing Express middleware pipeline.
|
||||
|
||||
Currently, **only local authentication is enabled**. OAuth strategies are fully implemented but commented out, pending configuration of OAuth provider credentials.
|
||||
|
||||
## Decision
|
||||
|
||||
We will implement a stateless JWT-based authentication system with the following components:
|
||||
|
||||
1. **Local Authentication**: Email/password login with bcrypt hashing.
|
||||
2. **OAuth Authentication**: Google and GitHub OAuth 2.0 (currently disabled).
|
||||
3. **JWT Access Tokens**: Short-lived tokens (15 minutes) for API authentication.
|
||||
4. **Refresh Tokens**: Long-lived tokens (7 days) stored in HTTP-only cookies.
|
||||
5. **Account Security**: Lockout after 5 failed login attempts for 15 minutes.
|
||||
|
||||
### Design Principles
|
||||
|
||||
- **Stateless Sessions**: No server-side session storage; JWT contains all auth state.
|
||||
- **Defense in Depth**: Multiple security layers (rate limiting, lockout, secure cookies).
|
||||
- **Graceful OAuth Degradation**: OAuth is optional; system works with local auth only.
|
||||
- **OAuth User Flexibility**: OAuth users have `password_hash = NULL` in database.
|
||||
|
||||
## Current Implementation Status
|
||||
|
||||
| Component | Status | Notes |
|
||||
| ------------------------ | ------- | ----------------------------------------------------------- |
|
||||
| **Local Authentication** | Enabled | Email/password with bcrypt (salt rounds = 10) |
|
||||
| **JWT Access Tokens** | Enabled | 15-minute expiry, `Authorization: Bearer` header |
|
||||
| **Refresh Tokens** | Enabled | 7-day expiry, HTTP-only cookie |
|
||||
| **Account Lockout** | Enabled | 5 failed attempts, 15-minute lockout |
|
||||
| **Password Reset** | Enabled | Email-based token flow |
|
||||
| **Google OAuth** | Enabled | Requires GOOGLE_CLIENT_ID and GOOGLE_CLIENT_SECRET env vars |
|
||||
| **GitHub OAuth** | Enabled | Requires GITHUB_CLIENT_ID and GITHUB_CLIENT_SECRET env vars |
|
||||
| **OAuth Routes** | Enabled | `/api/auth/google`, `/api/auth/github` + callbacks |
|
||||
| **OAuth Frontend UI** | Enabled | Login buttons in AuthView.tsx |
|
||||
|
||||
## Implementation Details
|
||||
|
||||
### Authentication Flow
|
||||
|
||||
```text
|
||||
┌─────────────────────────────────────────────────────────────────────┐
|
||||
│ AUTHENTICATION FLOW │
|
||||
├─────────────────────────────────────────────────────────────────────┤
|
||||
│ │
|
||||
│ ┌──────────┐ ┌──────────┐ ┌──────────┐ ┌──────────┐ │
|
||||
│ │ Login │───>│ Passport │───>│ JWT │───>│ Protected│ │
|
||||
│ │ Request │ │ Local │ │ Token │ │ Routes │ │
|
||||
│ └──────────┘ └──────────┘ └──────────┘ └──────────┘ │
|
||||
│ │ │ │ │
|
||||
│ │ ┌──────────┐ │ │ │
|
||||
│ └────────>│ OAuth │─────────────┘ │ │
|
||||
│ (disabled) │ Provider │ │ │
|
||||
│ └──────────┘ │ │
|
||||
│ │ │
|
||||
│ ┌──────────┐ ┌──────────┐ │ │
|
||||
│ │ Refresh │───>│ New │<─────────────────────────┘ │
|
||||
│ │ Token │ │ JWT │ (when access token expires) │
|
||||
│ └──────────┘ └──────────┘ │
|
||||
│ │
|
||||
└─────────────────────────────────────────────────────────────────────┘
|
||||
```
|
||||
|
||||
### Local Strategy (Enabled)
|
||||
|
||||
Located in `src/routes/passport.routes.ts`:
|
||||
|
||||
```typescript
|
||||
passport.use(
|
||||
new LocalStrategy(
|
||||
{ usernameField: 'email', passReqToCallback: true },
|
||||
async (req, email, password, done) => {
|
||||
// 1. Find user with profile by email
|
||||
const userprofile = await db.userRepo.findUserWithProfileByEmail(email, req.log);
|
||||
|
||||
// 2. Check account lockout
|
||||
if (userprofile.failed_login_attempts >= MAX_FAILED_ATTEMPTS) {
|
||||
// Check if lockout period has passed
|
||||
}
|
||||
|
||||
// 3. Verify password with bcrypt
|
||||
const isMatch = await bcrypt.compare(password, userprofile.password_hash);
|
||||
|
||||
// 4. On success, reset failed attempts and return user
|
||||
// 5. On failure, increment failed attempts
|
||||
},
|
||||
),
|
||||
);
|
||||
```
|
||||
|
||||
**Security Features**:
|
||||
|
||||
- Bcrypt password hashing with salt rounds = 10
|
||||
- Account lockout after 5 failed attempts
|
||||
- 15-minute lockout duration
|
||||
- Failed attempt tracking persists across lockout refreshes
|
||||
- Activity logging for failed login attempts
|
||||
|
||||
### JWT Strategy (Enabled)
|
||||
|
||||
```typescript
|
||||
const jwtOptions = {
|
||||
jwtFromRequest: ExtractJwt.fromAuthHeaderAsBearerToken(),
|
||||
secretOrKey: JWT_SECRET,
|
||||
};
|
||||
|
||||
passport.use(
|
||||
new JwtStrategy(jwtOptions, async (jwt_payload, done) => {
|
||||
const userProfile = await db.userRepo.findUserProfileById(jwt_payload.user_id);
|
||||
if (userProfile) {
|
||||
return done(null, userProfile);
|
||||
}
|
||||
return done(null, false);
|
||||
}),
|
||||
);
|
||||
```
|
||||
|
||||
**Token Configuration**:
|
||||
|
||||
- Access token: 15 minutes expiry
|
||||
- Refresh token: 7 days expiry, 64-byte random hex
|
||||
- Refresh token stored in HTTP-only cookie with `secure` flag in production
|
||||
|
||||
### OAuth Strategies (Disabled)
|
||||
|
||||
#### Google OAuth
|
||||
|
||||
Located in `src/routes/passport.routes.ts` (lines 167-217, commented):
|
||||
|
||||
```typescript
|
||||
// passport.use(new GoogleStrategy({
|
||||
// clientID: process.env.GOOGLE_CLIENT_ID!,
|
||||
// clientSecret: process.env.GOOGLE_CLIENT_SECRET!,
|
||||
// callbackURL: '/api/auth/google/callback',
|
||||
// scope: ['profile', 'email']
|
||||
// },
|
||||
// async (accessToken, refreshToken, profile, done) => {
|
||||
// const email = profile.emails?.[0]?.value;
|
||||
// const user = await db.findUserByEmail(email);
|
||||
// if (user) {
|
||||
// return done(null, user);
|
||||
// }
|
||||
// // Create new user with null password_hash
|
||||
// const newUser = await db.createUser(email, null, {
|
||||
// full_name: profile.displayName,
|
||||
// avatar_url: profile.photos?.[0]?.value
|
||||
// });
|
||||
// return done(null, newUser);
|
||||
// }
|
||||
// ));
|
||||
```
|
||||
|
||||
#### GitHub OAuth
|
||||
|
||||
Located in `src/routes/passport.routes.ts` (lines 219-269, commented):
|
||||
|
||||
```typescript
|
||||
// passport.use(new GitHubStrategy({
|
||||
// clientID: process.env.GITHUB_CLIENT_ID!,
|
||||
// clientSecret: process.env.GITHUB_CLIENT_SECRET!,
|
||||
// callbackURL: '/api/auth/github/callback',
|
||||
// scope: ['user:email']
|
||||
// },
|
||||
// async (accessToken, refreshToken, profile, done) => {
|
||||
// const email = profile.emails?.[0]?.value;
|
||||
// // Similar flow to Google OAuth
|
||||
// }
|
||||
// ));
|
||||
```
|
||||
|
||||
#### OAuth Routes (Disabled)
|
||||
|
||||
Located in `src/routes/auth.routes.ts` (lines 289-315, commented):
|
||||
|
||||
```typescript
|
||||
// const handleOAuthCallback = (req, res) => {
|
||||
// const user = req.user;
|
||||
// const accessToken = jwt.sign(payload, JWT_SECRET, { expiresIn: '15m' });
|
||||
// const refreshToken = crypto.randomBytes(64).toString('hex');
|
||||
//
|
||||
// await db.saveRefreshToken(user.user_id, refreshToken);
|
||||
// res.cookie('refreshToken', refreshToken, { httpOnly: true, secure: true });
|
||||
// res.redirect(`${FRONTEND_URL}/auth/callback?token=${accessToken}`);
|
||||
// };
|
||||
|
||||
// router.get('/google', passport.authenticate('google', { session: false }));
|
||||
// router.get('/google/callback', passport.authenticate('google', { ... }), handleOAuthCallback);
|
||||
// router.get('/github', passport.authenticate('github', { session: false }));
|
||||
// router.get('/github/callback', passport.authenticate('github', { ... }), handleOAuthCallback);
|
||||
```
|
||||
|
||||
### Database Schema
|
||||
|
||||
**Users Table** (`sql/initial_schema.sql`):
|
||||
|
||||
```sql
|
||||
CREATE TABLE public.users (
|
||||
user_id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
|
||||
email TEXT NOT NULL UNIQUE,
|
||||
password_hash TEXT, -- NULL for OAuth-only users
|
||||
refresh_token TEXT, -- Current refresh token
|
||||
failed_login_attempts INTEGER DEFAULT 0,
|
||||
last_failed_login TIMESTAMPTZ,
|
||||
created_at TIMESTAMPTZ DEFAULT now(),
|
||||
updated_at TIMESTAMPTZ DEFAULT now()
|
||||
);
|
||||
```
|
||||
|
||||
**Note**: There is no separate OAuth provider mapping table. OAuth users are identified by `password_hash = NULL`. If a user signs up via OAuth and later wants to add a password, this would require schema changes.
|
||||
|
||||
### Authentication Middleware
|
||||
|
||||
Located in `src/routes/passport.routes.ts`:
|
||||
|
||||
```typescript
|
||||
// Require admin role
|
||||
export const isAdmin = (req, res, next) => {
|
||||
if (req.user?.role === 'admin') {
|
||||
next();
|
||||
} else {
|
||||
next(new ForbiddenError('Administrator access required.'));
|
||||
}
|
||||
};
|
||||
|
||||
// Optional auth - attach user if present, continue if not
|
||||
export const optionalAuth = (req, res, next) => {
|
||||
passport.authenticate('jwt', { session: false }, (err, user) => {
|
||||
if (user) req.user = user;
|
||||
next();
|
||||
})(req, res, next);
|
||||
};
|
||||
|
||||
// Mock auth for testing (only in NODE_ENV=test)
|
||||
export const mockAuth = (req, res, next) => {
|
||||
if (process.env.NODE_ENV === 'test') {
|
||||
req.user = createMockUserProfile({ role: 'admin' });
|
||||
}
|
||||
next();
|
||||
};
|
||||
```
|
||||
|
||||
## Enabling OAuth
|
||||
|
||||
### Step 1: Set Environment Variables
|
||||
|
||||
Add to `.env`:
|
||||
|
||||
```bash
|
||||
# Google OAuth
|
||||
GOOGLE_CLIENT_ID=your-google-client-id
|
||||
GOOGLE_CLIENT_SECRET=your-google-client-secret
|
||||
|
||||
# GitHub OAuth
|
||||
GITHUB_CLIENT_ID=your-github-client-id
|
||||
GITHUB_CLIENT_SECRET=your-github-client-secret
|
||||
```
|
||||
|
||||
### Step 2: Configure OAuth Providers
|
||||
|
||||
**Google Cloud Console**:
|
||||
|
||||
1. Create project at <https://console.cloud.google.com/>
|
||||
2. Enable Google+ API
|
||||
3. Create OAuth 2.0 credentials (Web Application)
|
||||
4. Add authorized redirect URI:
|
||||
- Development: `http://localhost:3001/api/auth/google/callback`
|
||||
- Production: `https://your-domain.com/api/auth/google/callback`
|
||||
|
||||
**GitHub Developer Settings**:
|
||||
|
||||
1. Go to <https://github.com/settings/developers>
|
||||
2. Create new OAuth App
|
||||
3. Set Authorization callback URL:
|
||||
- Development: `http://localhost:3001/api/auth/github/callback`
|
||||
- Production: `https://your-domain.com/api/auth/github/callback`
|
||||
|
||||
### Step 3: Uncomment Backend Code
|
||||
|
||||
**In `src/routes/passport.routes.ts`**:
|
||||
|
||||
1. Uncomment import statements (lines 5-6):
|
||||
|
||||
```typescript
|
||||
import { Strategy as GoogleStrategy } from 'passport-google-oauth20';
|
||||
import { Strategy as GitHubStrategy } from 'passport-github2';
|
||||
```
|
||||
|
||||
2. Uncomment Google strategy (lines 167-217)
|
||||
3. Uncomment GitHub strategy (lines 219-269)
|
||||
|
||||
**In `src/routes/auth.routes.ts`**:
|
||||
|
||||
1. Uncomment `handleOAuthCallback` function (lines 291-309)
|
||||
2. Uncomment OAuth routes (lines 311-315)
|
||||
|
||||
### Step 4: Add Frontend OAuth Buttons
|
||||
|
||||
Create login buttons that redirect to:
|
||||
|
||||
- Google: `GET /api/auth/google`
|
||||
- GitHub: `GET /api/auth/github`
|
||||
|
||||
Handle callback at `/auth/callback?token=<accessToken>`:
|
||||
|
||||
1. Extract token from URL
|
||||
2. Store in client-side token storage
|
||||
3. Redirect to dashboard
|
||||
|
||||
### Step 5: Handle OAuth Callback Page
|
||||
|
||||
Create `src/pages/AuthCallback.tsx`:
|
||||
|
||||
```typescript
|
||||
const AuthCallback = () => {
|
||||
const token = new URLSearchParams(location.search).get('token');
|
||||
if (token) {
|
||||
setToken(token);
|
||||
navigate('/dashboard');
|
||||
} else {
|
||||
navigate('/login?error=auth_failed');
|
||||
}
|
||||
};
|
||||
```
|
||||
|
||||
## Known Limitations
|
||||
|
||||
1. **No OAuth Provider ID Mapping**: Users are identified by email only. If a user has accounts with different emails on Google and GitHub, they create separate accounts.
|
||||
|
||||
2. **No Account Linking**: Users cannot link multiple OAuth providers to one account.
|
||||
|
||||
3. **No Password Addition for OAuth Users**: OAuth-only users cannot add a password to enable local login.
|
||||
|
||||
4. **No PKCE Flow**: OAuth implementation uses standard flow, not PKCE (Proof Key for Code Exchange).
|
||||
|
||||
5. **No OAuth State Parameter Validation**: The commented code doesn't show explicit state parameter handling for CSRF protection (Passport may handle this internally).
|
||||
|
||||
6. **No Refresh Token from OAuth Providers**: Only email/profile data is extracted; OAuth refresh tokens are not stored for API access.
|
||||
|
||||
## Dependencies
|
||||
|
||||
**Installed** (all available):
|
||||
|
||||
- `passport` v0.7.0
|
||||
- `passport-local` v1.0.0
|
||||
- `passport-jwt` v4.0.1
|
||||
- `passport-google-oauth20` v2.0.0
|
||||
- `passport-github2` v0.1.12
|
||||
- `bcrypt` v5.x
|
||||
- `jsonwebtoken` v9.x
|
||||
|
||||
**Type Definitions**:
|
||||
|
||||
- `@types/passport`
|
||||
- `@types/passport-local`
|
||||
- `@types/passport-jwt`
|
||||
- `@types/passport-google-oauth20`
|
||||
- `@types/passport-github2`
|
||||
|
||||
## Consequences
|
||||
|
||||
### Positive
|
||||
|
||||
- **Stateless Architecture**: No session storage required; scales horizontally.
|
||||
- **Secure by Default**: HTTP-only cookies, short token expiry, bcrypt hashing.
|
||||
- **Account Protection**: Lockout prevents brute-force attacks.
|
||||
- **Flexible OAuth**: Can enable/disable OAuth without code changes (just env vars + uncommenting).
|
||||
- **Graceful Degradation**: System works with local auth only.
|
||||
|
||||
### Negative
|
||||
|
||||
- **OAuth Disabled by Default**: Requires manual uncommenting to enable.
|
||||
- **No Account Linking**: Multiple OAuth providers create separate accounts.
|
||||
- **Frontend Work Required**: OAuth login buttons don't exist yet.
|
||||
- **Token in URL**: OAuth callback passes token in URL (visible in browser history).
|
||||
|
||||
### Mitigation
|
||||
|
||||
- Document OAuth enablement steps clearly (see AUTHENTICATION.md).
|
||||
- Consider adding OAuth provider ID columns for future account linking.
|
||||
- Use URL fragment (`#token=`) instead of query parameter for callback.
|
||||
|
||||
## Key Files
|
||||
|
||||
| File | Purpose |
|
||||
| ------------------------------- | ------------------------------------------------ |
|
||||
| `src/routes/passport.routes.ts` | Passport strategies (local, JWT, OAuth) |
|
||||
| `src/routes/auth.routes.ts` | Auth endpoints (login, register, refresh, OAuth) |
|
||||
| `src/services/authService.ts` | Auth business logic |
|
||||
| `src/services/db/user.db.ts` | User database operations |
|
||||
| `src/config/env.ts` | Environment variable validation |
|
||||
| `AUTHENTICATION.md` | OAuth setup guide |
|
||||
| `.env.example` | Environment variable template |
|
||||
|
||||
## Related ADRs
|
||||
|
||||
- [ADR-011](./0011-advanced-authorization-and-access-control-strategy.md) - Authorization and Access Control
|
||||
- [ADR-016](./0016-api-security-hardening.md) - API Security (rate limiting, headers)
|
||||
- [ADR-032](./0032-rate-limiting-strategy.md) - Rate Limiting
|
||||
- [ADR-043](./0043-express-middleware-pipeline.md) - Middleware Pipeline
|
||||
|
||||
## Future Enhancements
|
||||
|
||||
1. **Enable OAuth**: Uncomment strategies and configure providers.
|
||||
2. **Add OAuth Provider Mapping Table**: Store `googleId`, `githubId` for account linking.
|
||||
3. **Implement Account Linking**: Allow users to connect multiple OAuth providers.
|
||||
4. **Add Password to OAuth Users**: Allow OAuth users to set a password.
|
||||
5. **Implement PKCE**: Add PKCE flow for enhanced OAuth security.
|
||||
6. **Token in Fragment**: Use URL fragment for OAuth callback token.
|
||||
7. **OAuth Token Storage**: Store OAuth refresh tokens for provider API access.
|
||||
8. **Magic Link Login**: Add passwordless email login option.
|
||||
299
docs/adr/0049-gamification-and-achievement-system.md
Normal file
299
docs/adr/0049-gamification-and-achievement-system.md
Normal file
@@ -0,0 +1,299 @@
|
||||
# ADR-049: Gamification and Achievement System
|
||||
|
||||
**Date**: 2026-01-11
|
||||
|
||||
**Status**: Accepted
|
||||
|
||||
**Implemented**: 2026-01-11
|
||||
|
||||
## Context
|
||||
|
||||
The application implements a gamification system to encourage user engagement through achievements and points. Users earn achievements for completing specific actions within the platform, and these achievements contribute to a points-based leaderboard.
|
||||
|
||||
Key requirements:
|
||||
|
||||
1. **User Engagement**: Reward users for meaningful actions (uploads, recipes, sharing).
|
||||
2. **Progress Tracking**: Show users their accomplishments and progress.
|
||||
3. **Social Competition**: Leaderboard to compare users by points.
|
||||
4. **Idempotent Awards**: Achievements should only be awarded once per user.
|
||||
5. **Transactional Safety**: Achievement awards must be atomic with the triggering action.
|
||||
|
||||
## Decision
|
||||
|
||||
We will implement a database-driven gamification system with:
|
||||
|
||||
1. **Database Functions**: Core logic in PostgreSQL for atomicity and idempotency.
|
||||
2. **Database Triggers**: Automatic achievement awards on specific events.
|
||||
3. **Application-Level Awards**: Explicit calls from service layer when triggers aren't suitable.
|
||||
4. **Points Aggregation**: Stored in user profile for efficient leaderboard queries.
|
||||
|
||||
### Design Principles
|
||||
|
||||
- **Single Award**: Each achievement can only be earned once per user (enforced by unique constraint).
|
||||
- **Atomic Operations**: Achievement awards happen within the same transaction as the triggering action.
|
||||
- **Silent Failure**: If an achievement doesn't exist, the award function returns silently (no error).
|
||||
- **Points Sync**: Points are updated on the profile immediately when an achievement is awarded.
|
||||
|
||||
## Implementation Details
|
||||
|
||||
### Database Schema
|
||||
|
||||
```sql
|
||||
-- Achievements master table
|
||||
CREATE TABLE public.achievements (
|
||||
achievement_id BIGSERIAL PRIMARY KEY,
|
||||
name TEXT UNIQUE NOT NULL,
|
||||
description TEXT NOT NULL,
|
||||
icon TEXT NOT NULL,
|
||||
points_value INTEGER NOT NULL DEFAULT 0,
|
||||
created_at TIMESTAMPTZ DEFAULT NOW()
|
||||
);
|
||||
|
||||
-- User achievements (junction table)
|
||||
CREATE TABLE public.user_achievements (
|
||||
user_id UUID REFERENCES public.users(user_id) ON DELETE CASCADE,
|
||||
achievement_id BIGINT REFERENCES public.achievements(achievement_id) ON DELETE CASCADE,
|
||||
achieved_at TIMESTAMPTZ DEFAULT NOW(),
|
||||
PRIMARY KEY (user_id, achievement_id)
|
||||
);
|
||||
|
||||
-- Points stored on profile for efficient leaderboard
|
||||
ALTER TABLE public.profiles ADD COLUMN points INTEGER DEFAULT 0;
|
||||
```
|
||||
|
||||
### Award Achievement Function
|
||||
|
||||
Located in `sql/Initial_triggers_and_functions.sql`:
|
||||
|
||||
```sql
|
||||
CREATE OR REPLACE FUNCTION public.award_achievement(p_user_id UUID, p_achievement_name TEXT)
|
||||
RETURNS void
|
||||
LANGUAGE plpgsql
|
||||
SECURITY DEFINER
|
||||
AS $$
|
||||
DECLARE
|
||||
v_achievement_id BIGINT;
|
||||
v_points_value INTEGER;
|
||||
BEGIN
|
||||
-- Find the achievement by name to get its ID and point value.
|
||||
SELECT achievement_id, points_value INTO v_achievement_id, v_points_value
|
||||
FROM public.achievements WHERE name = p_achievement_name;
|
||||
|
||||
-- If the achievement doesn't exist, do nothing.
|
||||
IF v_achievement_id IS NULL THEN
|
||||
RETURN;
|
||||
END IF;
|
||||
|
||||
-- Insert the achievement for the user.
|
||||
-- ON CONFLICT DO NOTHING ensures idempotency.
|
||||
INSERT INTO public.user_achievements (user_id, achievement_id)
|
||||
VALUES (p_user_id, v_achievement_id)
|
||||
ON CONFLICT (user_id, achievement_id) DO NOTHING;
|
||||
|
||||
-- If the insert was successful (user didn't have it), update their points.
|
||||
IF FOUND THEN
|
||||
UPDATE public.profiles SET points = points + v_points_value WHERE user_id = p_user_id;
|
||||
END IF;
|
||||
END;
|
||||
$$;
|
||||
```
|
||||
|
||||
### Current Achievements
|
||||
|
||||
| Name | Description | Icon | Points |
|
||||
| -------------------- | ----------------------------------------------------------- | ------------ | ------ |
|
||||
| Welcome Aboard | Join the community by creating your account. | user-check | 5 |
|
||||
| First Recipe | Create your very first recipe. | chef-hat | 10 |
|
||||
| Recipe Sharer | Share a recipe with another user for the first time. | share-2 | 15 |
|
||||
| List Sharer | Share a shopping list with another user for the first time. | list | 20 |
|
||||
| First Favorite | Mark a recipe as one of your favorites. | heart | 5 |
|
||||
| First Fork | Make a personal copy of a public recipe. | git-fork | 10 |
|
||||
| First Budget Created | Create your first budget to track spending. | piggy-bank | 15 |
|
||||
| First-Upload | Upload your first flyer. | upload-cloud | 25 |
|
||||
|
||||
### Achievement Triggers
|
||||
|
||||
#### User Registration (Database Trigger)
|
||||
|
||||
Awards "Welcome Aboard" when a new user is created:
|
||||
|
||||
```sql
|
||||
-- In handle_new_user() function
|
||||
PERFORM public.award_achievement(new.user_id, 'Welcome Aboard');
|
||||
```
|
||||
|
||||
#### Flyer Upload (Database Trigger + Application Code)
|
||||
|
||||
Awards "First-Upload" when a flyer is inserted with an `uploaded_by` value:
|
||||
|
||||
```sql
|
||||
-- In log_new_flyer() trigger function
|
||||
IF NEW.uploaded_by IS NOT NULL THEN
|
||||
PERFORM public.award_achievement(NEW.uploaded_by, 'First-Upload');
|
||||
END IF;
|
||||
```
|
||||
|
||||
Additionally, the `FlyerPersistenceService.saveFlyer()` method explicitly awards the achievement within the transaction:
|
||||
|
||||
```typescript
|
||||
// In src/services/flyerPersistenceService.server.ts
|
||||
if (userId) {
|
||||
const gamificationRepo = new GamificationRepository(client);
|
||||
await gamificationRepo.awardAchievement(userId, 'First-Upload', logger);
|
||||
}
|
||||
```
|
||||
|
||||
### Repository Layer
|
||||
|
||||
Located in `src/services/db/gamification.db.ts`:
|
||||
|
||||
```typescript
|
||||
export class GamificationRepository {
|
||||
private db: Pick<Pool | PoolClient, 'query'>;
|
||||
|
||||
constructor(db: Pick<Pool | PoolClient, 'query'> = getPool()) {
|
||||
this.db = db;
|
||||
}
|
||||
|
||||
async getUserAchievements(
|
||||
userId: string,
|
||||
logger: Logger,
|
||||
): Promise<(UserAchievement & Achievement)[]> {
|
||||
const query = `
|
||||
SELECT ua.user_id, ua.achievement_id, ua.achieved_at,
|
||||
a.name, a.description, a.icon, a.points_value, a.created_at
|
||||
FROM public.user_achievements ua
|
||||
JOIN public.achievements a ON ua.achievement_id = a.achievement_id
|
||||
WHERE ua.user_id = $1
|
||||
ORDER BY ua.achieved_at DESC;
|
||||
`;
|
||||
const res = await this.db.query(query, [userId]);
|
||||
return res.rows;
|
||||
}
|
||||
|
||||
async awardAchievement(userId: string, achievementName: string, logger: Logger): Promise<void> {
|
||||
await this.db.query('SELECT public.award_achievement($1, $2)', [userId, achievementName]);
|
||||
}
|
||||
|
||||
async getLeaderboard(limit: number, logger: Logger): Promise<LeaderboardUser[]> {
|
||||
const query = `
|
||||
SELECT user_id, full_name, avatar_url, points,
|
||||
RANK() OVER (ORDER BY points DESC) as rank
|
||||
FROM public.profiles
|
||||
ORDER BY points DESC, full_name ASC
|
||||
LIMIT $1;
|
||||
`;
|
||||
const res = await this.db.query(query, [limit]);
|
||||
return res.rows;
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### API Endpoints
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
| ------ | ------------------------------- | ------------------------------- |
|
||||
| GET | `/api/achievements` | List all available achievements |
|
||||
| GET | `/api/achievements/me` | Get current user's achievements |
|
||||
| GET | `/api/achievements/leaderboard` | Get top users by points |
|
||||
|
||||
## Testing Considerations
|
||||
|
||||
### Critical Testing Requirements
|
||||
|
||||
When testing gamification features, be aware of the following:
|
||||
|
||||
1. **Database Seed Data**: Achievement definitions must exist in the database before tests run. The `award_achievement()` function silently returns if the achievement name doesn't exist.
|
||||
|
||||
2. **Transactional Context**: When awarding achievements from within a transaction:
|
||||
- The achievement is visible within the transaction immediately
|
||||
- External queries won't see the achievement until the transaction commits
|
||||
- Tests should wait for job completion before asserting achievement state
|
||||
|
||||
3. **Vitest Global Setup Context**: The integration test global setup runs in a separate Node.js context. Achievement verification must use direct database queries, not mocked services.
|
||||
|
||||
4. **Achievement Idempotency**: Calling `award_achievement()` multiple times for the same user/achievement combination is safe and expected. Only the first call actually inserts.
|
||||
|
||||
### Example Integration Test Pattern
|
||||
|
||||
```typescript
|
||||
it('should award the "First Upload" achievement after flyer processing', async () => {
|
||||
// 1. Create user (awards "Welcome Aboard" via database trigger)
|
||||
const { user: testUser, token } = await createAndLoginUser({...});
|
||||
|
||||
// 2. Upload flyer (triggers async job)
|
||||
const uploadResponse = await request
|
||||
.post('/api/flyers/upload')
|
||||
.set('Authorization', `Bearer ${token}`)
|
||||
.attach('flyerFile', testImagePath);
|
||||
expect(uploadResponse.status).toBe(202);
|
||||
|
||||
// 3. Wait for job to complete
|
||||
await poll(async () => {
|
||||
const status = await request.get(`/api/flyers/job/${jobId}/status`);
|
||||
return status.body.data.status === 'completed';
|
||||
}, { timeout: 15000 });
|
||||
|
||||
// 4. Wait for achievements to be visible (transaction committed)
|
||||
await vi.waitUntil(async () => {
|
||||
const achievements = await db.gamificationRepo.getUserAchievements(
|
||||
testUser.user.user_id,
|
||||
logger
|
||||
);
|
||||
return achievements.length >= 2; // Welcome Aboard + First-Upload
|
||||
}, { timeout: 15000, interval: 500 });
|
||||
|
||||
// 5. Assert specific achievements
|
||||
const userAchievements = await db.gamificationRepo.getUserAchievements(
|
||||
testUser.user.user_id,
|
||||
logger
|
||||
);
|
||||
expect(userAchievements.find(a => a.name === 'Welcome Aboard')).toBeDefined();
|
||||
expect(userAchievements.find(a => a.name === 'First-Upload')).toBeDefined();
|
||||
});
|
||||
```
|
||||
|
||||
### Common Test Pitfalls
|
||||
|
||||
1. **Missing Seed Data**: If tests fail with "achievement not found", ensure the test database has the achievements table populated.
|
||||
|
||||
2. **Race Conditions**: Achievement awards in async jobs may not be visible immediately. Always poll or use `vi.waitUntil()`.
|
||||
|
||||
3. **Wrong User ID**: Verify the user ID passed to `awardAchievement()` matches the user created in the test.
|
||||
|
||||
4. **Transaction Isolation**: When querying within a test, use the same database connection if checking mid-transaction state.
|
||||
|
||||
## Consequences
|
||||
|
||||
### Positive
|
||||
|
||||
- **Engagement**: Users have clear goals and rewards for platform activity.
|
||||
- **Scalability**: Points stored on profile enable O(1) leaderboard sorting.
|
||||
- **Reliability**: Database-level idempotency prevents duplicate awards.
|
||||
- **Flexibility**: New achievements can be added via SQL without code changes.
|
||||
|
||||
### Negative
|
||||
|
||||
- **Complexity**: Multiple award paths (triggers + application code) require careful coordination.
|
||||
- **Testing**: Async nature of some awards complicates integration testing.
|
||||
- **Coupling**: Achievement names are strings; typos fail silently.
|
||||
|
||||
### Mitigation
|
||||
|
||||
- Use constants for achievement names in application code.
|
||||
- Document all award trigger points clearly.
|
||||
- Test each achievement path independently.
|
||||
|
||||
## Key Files
|
||||
|
||||
- `sql/initial_data.sql` - Achievement definitions (seed data)
|
||||
- `sql/Initial_triggers_and_functions.sql` - `award_achievement()` function and triggers
|
||||
- `src/services/db/gamification.db.ts` - Repository layer
|
||||
- `src/routes/achievements.routes.ts` - API endpoints
|
||||
- `src/services/flyerPersistenceService.server.ts` - First-Upload award (application code)
|
||||
|
||||
## Related ADRs
|
||||
|
||||
- [ADR-002](./0002-standardized-transaction-management.md) - Transaction Management
|
||||
- [ADR-034](./0034-repository-pattern-standards.md) - Repository Pattern
|
||||
- [ADR-006](./0006-background-job-processing-and-task-queues.md) - Background Jobs (flyer processing)
|
||||
341
docs/adr/0050-postgresql-function-observability.md
Normal file
341
docs/adr/0050-postgresql-function-observability.md
Normal file
@@ -0,0 +1,341 @@
|
||||
# ADR-050: PostgreSQL Function Observability
|
||||
|
||||
**Date**: 2026-01-11
|
||||
|
||||
**Status**: Proposed
|
||||
|
||||
**Related**: [ADR-015](0015-application-performance-monitoring-and-error-tracking.md), [ADR-004](0004-standardized-application-wide-structured-logging.md)
|
||||
|
||||
## Context
|
||||
|
||||
The application uses 30+ PostgreSQL functions and 11+ triggers for business logic, including:
|
||||
|
||||
- Recipe recommendations and search
|
||||
- Shopping list generation from menu plans
|
||||
- Price history tracking
|
||||
- Achievement awards
|
||||
- Activity logging
|
||||
- User profile creation
|
||||
|
||||
**Current Problem**: These database functions can fail silently in several ways:
|
||||
|
||||
1. **`ON CONFLICT DO NOTHING`** - Swallows constraint violations without notification
|
||||
2. **`IF NOT FOUND THEN RETURN;`** - Silently exits when data is missing
|
||||
3. **Trigger functions returning `NULL`** - No indication of partial failures
|
||||
4. **No logging inside functions** - No visibility into function execution
|
||||
|
||||
When these silent failures occur:
|
||||
|
||||
- The application layer receives no error (function "succeeds" but does nothing)
|
||||
- No logs are generated for debugging
|
||||
- Issues are only discovered when users report missing data
|
||||
- Root cause analysis is extremely difficult
|
||||
|
||||
**Example of Silent Failure**:
|
||||
|
||||
```sql
|
||||
-- This function silently does nothing if achievement doesn't exist
|
||||
CREATE OR REPLACE FUNCTION public.award_achievement(p_user_id UUID, p_achievement_name TEXT)
|
||||
RETURNS void AS $$
|
||||
BEGIN
|
||||
SELECT achievement_id INTO v_achievement_id FROM achievements WHERE name = p_achievement_name;
|
||||
IF v_achievement_id IS NULL THEN
|
||||
RETURN; -- Silent failure - no log, no error
|
||||
END IF;
|
||||
-- ...
|
||||
END;
|
||||
$$;
|
||||
```
|
||||
|
||||
ADR-015 established Logstash + Bugsink for error tracking, with PostgreSQL log integration marked as "future". This ADR defines the implementation.
|
||||
|
||||
## Decision
|
||||
|
||||
We will implement a standardized PostgreSQL function observability strategy with three tiers of logging severity:
|
||||
|
||||
### 1. Function Logging Helper
|
||||
|
||||
Create a reusable logging function that outputs structured JSON to PostgreSQL logs:
|
||||
|
||||
```sql
|
||||
-- Function to emit structured log messages from PL/pgSQL
|
||||
CREATE OR REPLACE FUNCTION public.fn_log(
|
||||
p_level TEXT, -- 'DEBUG', 'INFO', 'NOTICE', 'WARNING', 'ERROR'
|
||||
p_function_name TEXT, -- The calling function name
|
||||
p_message TEXT, -- Human-readable message
|
||||
p_context JSONB DEFAULT NULL -- Additional context (user_id, params, etc.)
|
||||
)
|
||||
RETURNS void
|
||||
LANGUAGE plpgsql
|
||||
AS $$
|
||||
DECLARE
|
||||
log_line TEXT;
|
||||
BEGIN
|
||||
-- Build structured JSON log line
|
||||
log_line := jsonb_build_object(
|
||||
'timestamp', now(),
|
||||
'level', p_level,
|
||||
'source', 'postgresql',
|
||||
'function', p_function_name,
|
||||
'message', p_message,
|
||||
'context', COALESCE(p_context, '{}'::jsonb)
|
||||
)::text;
|
||||
|
||||
-- Use appropriate RAISE level
|
||||
CASE p_level
|
||||
WHEN 'DEBUG' THEN RAISE DEBUG '%', log_line;
|
||||
WHEN 'INFO' THEN RAISE INFO '%', log_line;
|
||||
WHEN 'NOTICE' THEN RAISE NOTICE '%', log_line;
|
||||
WHEN 'WARNING' THEN RAISE WARNING '%', log_line;
|
||||
WHEN 'ERROR' THEN RAISE LOG '%', log_line; -- Use LOG for errors to ensure capture
|
||||
ELSE RAISE NOTICE '%', log_line;
|
||||
END CASE;
|
||||
END;
|
||||
$$;
|
||||
```
|
||||
|
||||
### 2. Logging Tiers
|
||||
|
||||
#### Tier 1: Critical Functions (Always Log)
|
||||
|
||||
Functions where silent failure causes data corruption or user-facing issues:
|
||||
|
||||
| Function | Log Events |
|
||||
| ---------------------------------- | --------------------------------------- |
|
||||
| `handle_new_user()` | User creation, profile creation, errors |
|
||||
| `award_achievement()` | Achievement not found, already awarded |
|
||||
| `approve_correction()` | Correction not found, permission denied |
|
||||
| `complete_shopping_list()` | List not found, permission denied |
|
||||
| `add_menu_plan_to_shopping_list()` | Permission denied, items added |
|
||||
| `fork_recipe()` | Original not found, fork created |
|
||||
|
||||
**Pattern**:
|
||||
|
||||
```sql
|
||||
CREATE OR REPLACE FUNCTION public.award_achievement(p_user_id UUID, p_achievement_name TEXT)
|
||||
RETURNS void AS $$
|
||||
DECLARE
|
||||
v_achievement_id BIGINT;
|
||||
v_points_value INTEGER;
|
||||
v_context JSONB;
|
||||
BEGIN
|
||||
v_context := jsonb_build_object('user_id', p_user_id, 'achievement_name', p_achievement_name);
|
||||
|
||||
SELECT achievement_id, points_value INTO v_achievement_id, v_points_value
|
||||
FROM public.achievements WHERE name = p_achievement_name;
|
||||
|
||||
IF v_achievement_id IS NULL THEN
|
||||
-- Log the issue instead of silent return
|
||||
PERFORM fn_log('WARNING', 'award_achievement',
|
||||
'Achievement not found: ' || p_achievement_name, v_context);
|
||||
RETURN;
|
||||
END IF;
|
||||
|
||||
INSERT INTO public.user_achievements (user_id, achievement_id)
|
||||
VALUES (p_user_id, v_achievement_id)
|
||||
ON CONFLICT (user_id, achievement_id) DO NOTHING;
|
||||
|
||||
IF FOUND THEN
|
||||
UPDATE public.profiles SET points = points + v_points_value WHERE user_id = p_user_id;
|
||||
PERFORM fn_log('INFO', 'award_achievement',
|
||||
'Achievement awarded: ' || p_achievement_name, v_context);
|
||||
END IF;
|
||||
END;
|
||||
$$;
|
||||
```
|
||||
|
||||
#### Tier 2: Business Logic Functions (Log on Anomalies)
|
||||
|
||||
Functions where unexpected conditions should be logged but aren't critical:
|
||||
|
||||
| Function | Log Events |
|
||||
| -------------------------------------- | ---------------------------------- |
|
||||
| `suggest_master_item_for_flyer_item()` | No match found (below threshold) |
|
||||
| `recommend_recipes_for_user()` | No recommendations generated |
|
||||
| `find_recipes_from_pantry()` | Empty pantry, no recipes found |
|
||||
| `get_best_sale_prices_for_user()` | No watched items, no current sales |
|
||||
|
||||
**Pattern**: Log when results are unexpectedly empty or inputs are invalid.
|
||||
|
||||
#### Tier 3: Triggers (Log Errors Only)
|
||||
|
||||
Triggers should be fast, so only log when something goes wrong:
|
||||
|
||||
| Trigger Function | Log Events |
|
||||
| --------------------------------------------- | ------------------------- |
|
||||
| `update_price_history_on_flyer_item_insert()` | Failed to update history |
|
||||
| `update_recipe_rating_aggregates()` | Rating calculation failed |
|
||||
| `log_new_recipe()` | Profile lookup failed |
|
||||
| `log_new_flyer()` | Store lookup failed |
|
||||
|
||||
### 3. PostgreSQL Configuration
|
||||
|
||||
Enable logging in `postgresql.conf`:
|
||||
|
||||
```ini
|
||||
# Log all function notices and above
|
||||
log_min_messages = notice
|
||||
|
||||
# Include function name in log prefix
|
||||
log_line_prefix = '%t [%p] %u@%d '
|
||||
|
||||
# Log to file for Logstash pickup
|
||||
logging_collector = on
|
||||
log_directory = '/var/log/postgresql'
|
||||
log_filename = 'postgresql-%Y-%m-%d.log'
|
||||
log_rotation_age = 1d
|
||||
log_rotation_size = 100MB
|
||||
|
||||
# Capture slow queries from functions
|
||||
log_min_duration_statement = 1000 # Log queries over 1 second
|
||||
```
|
||||
|
||||
### 4. Logstash Integration
|
||||
|
||||
Update the Logstash pipeline (extends ADR-015 configuration):
|
||||
|
||||
```conf
|
||||
# PostgreSQL function log input
|
||||
input {
|
||||
file {
|
||||
path => "/var/log/postgresql/*.log"
|
||||
type => "postgres"
|
||||
tags => ["postgres"]
|
||||
start_position => "beginning"
|
||||
sincedb_path => "/var/lib/logstash/sincedb_postgres"
|
||||
}
|
||||
}
|
||||
|
||||
filter {
|
||||
if [type] == "postgres" {
|
||||
# Extract timestamp and process ID from PostgreSQL log prefix
|
||||
grok {
|
||||
match => { "message" => "%{TIMESTAMP_ISO8601:pg_timestamp} \[%{POSINT:pg_pid}\] %{USER:pg_user}@%{WORD:pg_database} %{GREEDYDATA:pg_message}" }
|
||||
}
|
||||
|
||||
# Check if this is a structured JSON log from fn_log()
|
||||
if [pg_message] =~ /^\{.*"source":"postgresql".*\}$/ {
|
||||
json {
|
||||
source => "pg_message"
|
||||
target => "fn_log"
|
||||
}
|
||||
|
||||
# Mark as error if level is WARNING or ERROR
|
||||
if [fn_log][level] in ["WARNING", "ERROR"] {
|
||||
mutate { add_tag => ["error", "db_function"] }
|
||||
}
|
||||
}
|
||||
|
||||
# Also catch native PostgreSQL errors
|
||||
if [pg_message] =~ /^ERROR:/ or [pg_message] =~ /^FATAL:/ {
|
||||
mutate { add_tag => ["error", "postgres_native"] }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
output {
|
||||
if "error" in [tags] and "postgres" in [tags] {
|
||||
http {
|
||||
url => "http://localhost:8000/api/store/"
|
||||
http_method => "post"
|
||||
format => "json"
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### 5. Dual-File Update Requirement
|
||||
|
||||
**IMPORTANT**: All SQL function changes must be applied to BOTH files:
|
||||
|
||||
1. `sql/Initial_triggers_and_functions.sql` - Used for incremental updates
|
||||
2. `sql/master_schema_rollup.sql` - Used for fresh database setup
|
||||
|
||||
Both files must remain in sync for triggers and functions.
|
||||
|
||||
## Implementation Steps
|
||||
|
||||
1. **Create `fn_log()` helper function**:
|
||||
- Add to both `Initial_triggers_and_functions.sql` and `master_schema_rollup.sql`
|
||||
- Test with `SELECT fn_log('INFO', 'test', 'Test message', '{"key": "value"}'::jsonb);`
|
||||
|
||||
2. **Update Tier 1 critical functions** (highest priority):
|
||||
- `award_achievement()` - Log missing achievements, duplicate awards
|
||||
- `handle_new_user()` - Log user creation success/failure
|
||||
- `approve_correction()` - Log not found, permission denied
|
||||
- `complete_shopping_list()` - Log permission checks
|
||||
- `add_menu_plan_to_shopping_list()` - Log permission checks, items added
|
||||
- `fork_recipe()` - Log original not found
|
||||
|
||||
3. **Update Tier 2 business logic functions**:
|
||||
- Add anomaly logging to suggestion/recommendation functions
|
||||
- Log empty result sets with context
|
||||
|
||||
4. **Update Tier 3 trigger functions**:
|
||||
- Add error-only logging to critical triggers
|
||||
- Wrap complex trigger logic in exception handlers
|
||||
|
||||
5. **Configure PostgreSQL logging**:
|
||||
- Update `postgresql.conf` in dev container
|
||||
- Update production PostgreSQL configuration
|
||||
- Verify logs appear in expected location
|
||||
|
||||
6. **Update Logstash pipeline**:
|
||||
- Add PostgreSQL input to `bugsink.conf`
|
||||
- Add filter rules for structured JSON extraction
|
||||
- Test end-to-end: function log → Logstash → Bugsink
|
||||
|
||||
7. **Verify in Bugsink**:
|
||||
- Confirm database function errors appear as issues
|
||||
- Verify context (user_id, function name, params) is captured
|
||||
|
||||
## Consequences
|
||||
|
||||
### Positive
|
||||
|
||||
- **Visibility**: Silent failures become visible in error tracking
|
||||
- **Debugging**: Function execution context captured for root cause analysis
|
||||
- **Proactive detection**: Anomalies logged before users report issues
|
||||
- **Unified monitoring**: Database errors appear alongside application errors in Bugsink
|
||||
- **Structured logs**: JSON format enables filtering and aggregation
|
||||
|
||||
### Negative
|
||||
|
||||
- **Performance overhead**: Logging adds latency to function execution
|
||||
- **Log volume**: Tier 1/2 functions may generate significant log volume
|
||||
- **Maintenance**: Two SQL files must be kept in sync
|
||||
- **PostgreSQL configuration**: Requires access to `postgresql.conf`
|
||||
|
||||
### Mitigations
|
||||
|
||||
- **Performance**: Only log meaningful events, not every function call
|
||||
- **Log volume**: Use appropriate log levels; Logstash filters reduce noise
|
||||
- **Sync**: Add CI check to verify SQL files match for function definitions
|
||||
- **Configuration**: Document PostgreSQL settings in deployment runbook
|
||||
|
||||
## Examples
|
||||
|
||||
### Before (Silent Failure)
|
||||
|
||||
```sql
|
||||
-- User thinks achievement was awarded, but it silently failed
|
||||
SELECT award_achievement('user-uuid', 'Nonexistent Badge');
|
||||
-- Returns: void (no error, no log)
|
||||
-- Result: User never gets achievement, nobody knows why
|
||||
```
|
||||
|
||||
### After (Observable Failure)
|
||||
|
||||
```sql
|
||||
SELECT award_achievement('user-uuid', 'Nonexistent Badge');
|
||||
-- Returns: void
|
||||
-- PostgreSQL log: {"timestamp":"2026-01-11T10:30:00Z","level":"WARNING","source":"postgresql","function":"award_achievement","message":"Achievement not found: Nonexistent Badge","context":{"user_id":"user-uuid","achievement_name":"Nonexistent Badge"}}
|
||||
-- Bugsink: New issue created with full context
|
||||
```
|
||||
|
||||
## References
|
||||
|
||||
- [ADR-015: Application Performance Monitoring](0015-application-performance-monitoring-and-error-tracking.md)
|
||||
- [ADR-004: Standardized Structured Logging](0004-standardized-application-wide-structured-logging.md)
|
||||
- [PostgreSQL RAISE Documentation](https://www.postgresql.org/docs/current/plpgsql-errors-and-messages.html)
|
||||
- [PostgreSQL Logging Configuration](https://www.postgresql.org/docs/current/runtime-config-logging.html)
|
||||
54
docs/adr/0051-asynchronous-context-propagation.md
Normal file
54
docs/adr/0051-asynchronous-context-propagation.md
Normal file
@@ -0,0 +1,54 @@
|
||||
# ADR-051: Asynchronous Context Propagation
|
||||
|
||||
**Date**: 2026-01-11
|
||||
|
||||
**Status**: Accepted (Implemented)
|
||||
|
||||
## Context
|
||||
|
||||
Debugging asynchronous workflows is difficult because the `request_id` generated at the API layer is lost when a task is handed off to a background queue (BullMQ). Logs from the worker appear disconnected from the user action that triggered them.
|
||||
|
||||
## Decision
|
||||
|
||||
We will implement a context propagation pattern for all background jobs:
|
||||
|
||||
1. **Job Data Payload**: All job data interfaces MUST include a `meta` object containing `requestId`, `userId`, and `origin`.
|
||||
2. **Worker Logger Initialization**: All BullMQ workers MUST initialize a child logger immediately upon processing a job, using the metadata passed in the payload.
|
||||
3. **Correlation**: The worker's logger must use the _same_ `request_id` as the initiating API request.
|
||||
|
||||
## Implementation
|
||||
|
||||
```typescript
|
||||
// 1. Enqueueing (API Layer)
|
||||
await queue.add('process-flyer', {
|
||||
...data,
|
||||
meta: {
|
||||
requestId: req.log.bindings().request_id, // Propagate ID
|
||||
userId: req.user.id,
|
||||
},
|
||||
});
|
||||
|
||||
// 2. Processing (Worker Layer)
|
||||
const worker = new Worker('queue', async (job) => {
|
||||
const { requestId, userId } = job.data.meta || {};
|
||||
|
||||
// Create context-aware logger for this specific job execution
|
||||
const jobLogger = logger.child({
|
||||
request_id: requestId || uuidv4(), // Use propagated ID or generate new
|
||||
user_id: userId,
|
||||
job_id: job.id,
|
||||
service: 'worker',
|
||||
});
|
||||
|
||||
try {
|
||||
await processJob(job.data, jobLogger); // Pass logger down
|
||||
} catch (err) {
|
||||
jobLogger.error({ err }, 'Job failed');
|
||||
throw err;
|
||||
}
|
||||
});
|
||||
```
|
||||
|
||||
## Consequences
|
||||
|
||||
**Positive**: Complete traceability from API request -> Queue -> Worker execution. Drastically reduces time to find "what happened" to a specific user request.
|
||||
42
docs/adr/0052-granular-debug-logging-strategy.md
Normal file
42
docs/adr/0052-granular-debug-logging-strategy.md
Normal file
@@ -0,0 +1,42 @@
|
||||
# ADR-052: Granular Debug Logging Strategy
|
||||
|
||||
**Date**: 2026-01-11
|
||||
|
||||
**Status**: Proposed
|
||||
|
||||
## Context
|
||||
|
||||
Global log levels (INFO vs DEBUG) are too coarse. Developers need to inspect detailed debug information for specific subsystems (e.g., `ai-service`, `db-pool`) without being flooded by logs from the entire application.
|
||||
|
||||
## Decision
|
||||
|
||||
We will adopt a namespace-based debug filter pattern, similar to the `debug` npm package, but integrated into our Pino logger.
|
||||
|
||||
1. **Logger Namespaces**: Every service/module logger must be initialized with a `module` property (e.g., `logger.child({ module: 'ai-service' })`).
|
||||
2. **Environment Filter**: We will support a `DEBUG_MODULES` environment variable that overrides the log level for matching modules.
|
||||
|
||||
## Implementation
|
||||
|
||||
In `src/services/logger.server.ts`:
|
||||
|
||||
```typescript
|
||||
const debugModules = (process.env.DEBUG_MODULES || '').split(',').map((s) => s.trim());
|
||||
|
||||
export const createScopedLogger = (moduleName: string) => {
|
||||
// If DEBUG_MODULES contains "ai-service" or "*", force level to 'debug'
|
||||
const isDebugEnabled = debugModules.includes('*') || debugModules.includes(moduleName);
|
||||
|
||||
return logger.child({
|
||||
module: moduleName,
|
||||
level: isDebugEnabled ? 'debug' : logger.level,
|
||||
});
|
||||
};
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
To debug only AI and Database interactions:
|
||||
|
||||
```bash
|
||||
DEBUG_MODULES=ai-service,db-repo npm run dev
|
||||
```
|
||||
62
docs/adr/0053-worker-health-checks.md
Normal file
62
docs/adr/0053-worker-health-checks.md
Normal file
@@ -0,0 +1,62 @@
|
||||
# ADR-053: Worker Health Checks and Stalled Job Monitoring
|
||||
|
||||
**Date**: 2026-01-11
|
||||
|
||||
**Status**: Proposed
|
||||
|
||||
## Context
|
||||
|
||||
Our application relies heavily on background workers (BullMQ) for flyer processing, analytics, and emails. If a worker process crashes (e.g., Out of Memory) or hangs, jobs may remain in the 'active' state indefinitely ("stalled") until BullMQ's fail-safe triggers.
|
||||
|
||||
Currently, we lack:
|
||||
|
||||
1. Visibility into queue depths and worker status via HTTP endpoints (for uptime monitors).
|
||||
2. A mechanism to detect if the worker process itself is alive, beyond just queue statistics.
|
||||
3. Explicit configuration to ensure stalled jobs are recovered quickly.
|
||||
|
||||
## Decision
|
||||
|
||||
We will implement a multi-layered health check strategy for background workers:
|
||||
|
||||
1. **Queue Metrics Endpoint**: Expose a protected endpoint `GET /health/queues` that returns the counts (waiting, active, failed) for all critical queues.
|
||||
2. **Stalled Job Configuration**: Explicitly configure BullMQ workers with aggressive stall detection settings to recover quickly from crashes.
|
||||
3. **Worker Heartbeats**: Workers will periodically update a "heartbeat" key in Redis. The health endpoint will check if this timestamp is recent.
|
||||
|
||||
## Implementation
|
||||
|
||||
### 1. BullMQ Worker Settings
|
||||
|
||||
Workers must be initialized with specific options to handle stalls:
|
||||
|
||||
```typescript
|
||||
const workerOptions = {
|
||||
// Check for stalled jobs every 30 seconds
|
||||
stalledInterval: 30000,
|
||||
// Fail job after 3 stalls (prevents infinite loops causing infinite retries)
|
||||
maxStalledCount: 3,
|
||||
// Duration of the lock for the job in milliseconds.
|
||||
// If the worker doesn't renew this (e.g. crash), the job stalls.
|
||||
lockDuration: 30000,
|
||||
};
|
||||
```
|
||||
|
||||
### 2. Health Endpoint Logic
|
||||
|
||||
The `/health/queues` endpoint will:
|
||||
|
||||
1. Iterate through all defined queues (`flyerQueue`, `emailQueue`, etc.).
|
||||
2. Fetch job counts (`waiting`, `active`, `failed`, `delayed`).
|
||||
3. Return a 200 OK if queues are accessible, or 503 if Redis is unreachable.
|
||||
4. (Future) Return 500 if the `waiting` count exceeds a critical threshold for too long.
|
||||
|
||||
## Consequences
|
||||
|
||||
**Positive**:
|
||||
|
||||
- Early detection of stuck processing pipelines.
|
||||
- Automatic recovery of stalled jobs via BullMQ configuration.
|
||||
- Metrics available for external monitoring tools (e.g., UptimeRobot, Datadog).
|
||||
|
||||
**Negative**:
|
||||
|
||||
- Requires configuring external monitoring to poll the new endpoint.
|
||||
@@ -15,9 +15,9 @@ This document tracks the implementation status and estimated effort for all Arch
|
||||
|
||||
| Status | Count |
|
||||
| ---------------------------- | ----- |
|
||||
| Accepted (Fully Implemented) | 22 |
|
||||
| Accepted (Fully Implemented) | 30 |
|
||||
| Partially Implemented | 2 |
|
||||
| Proposed (Not Started) | 15 |
|
||||
| Proposed (Not Started) | 16 |
|
||||
|
||||
---
|
||||
|
||||
@@ -48,7 +48,7 @@ This document tracks the implementation status and estimated effort for all Arch
|
||||
| ------------------------------------------------------------------- | ------------------------ | ----------- | ------ | ------------------------------------- |
|
||||
| [ADR-003](./0003-standardized-input-validation-using-middleware.md) | Input Validation | Accepted | - | Fully implemented |
|
||||
| [ADR-008](./0008-api-versioning-strategy.md) | API Versioning | Proposed | L | Major URL/routing changes |
|
||||
| [ADR-018](./0018-api-documentation-strategy.md) | API Documentation | Proposed | M | OpenAPI/Swagger setup |
|
||||
| [ADR-018](./0018-api-documentation-strategy.md) | API Documentation | Accepted | - | OpenAPI/Swagger implemented |
|
||||
| [ADR-022](./0022-real-time-notification-system.md) | Real-time Notifications | Proposed | XL | WebSocket infrastructure |
|
||||
| [ADR-028](./0028-api-response-standardization.md) | Response Standardization | Implemented | L | Completed (routes, middleware, tests) |
|
||||
|
||||
@@ -65,10 +65,11 @@ This document tracks the implementation status and estimated effort for all Arch
|
||||
|
||||
### Category 5: Observability & Monitoring
|
||||
|
||||
| ADR | Title | Status | Effort | Notes |
|
||||
| -------------------------------------------------------------------------- | -------------------- | -------- | ------ | ----------------------- |
|
||||
| [ADR-004](./0004-standardized-application-wide-structured-logging.md) | Structured Logging | Accepted | - | Fully implemented |
|
||||
| [ADR-015](./0015-application-performance-monitoring-and-error-tracking.md) | APM & Error Tracking | Proposed | M | Third-party integration |
|
||||
| ADR | Title | Status | Effort | Notes |
|
||||
| -------------------------------------------------------------------------- | --------------------------- | -------- | ------ | --------------------------------- |
|
||||
| [ADR-004](./0004-standardized-application-wide-structured-logging.md) | Structured Logging | Accepted | - | Fully implemented |
|
||||
| [ADR-015](./0015-application-performance-monitoring-and-error-tracking.md) | APM & Error Tracking | Proposed | M | Third-party integration |
|
||||
| [ADR-050](./0050-postgresql-function-observability.md) | PostgreSQL Fn Observability | Proposed | M | Depends on ADR-015 implementation |
|
||||
|
||||
### Category 6: Deployment & Operations
|
||||
|
||||
@@ -83,29 +84,37 @@ This document tracks the implementation status and estimated effort for all Arch
|
||||
|
||||
### Category 7: Frontend / User Interface
|
||||
|
||||
| ADR | Title | Status | Effort | Notes |
|
||||
| ------------------------------------------------------------------------ | ------------------- | -------- | ------ | ------------------------------------------- |
|
||||
| [ADR-005](./0005-frontend-state-management-and-server-cache-strategy.md) | State Management | Accepted | - | Fully implemented |
|
||||
| [ADR-012](./0012-frontend-component-library-and-design-system.md) | Component Library | Partial | L | Core components done, design tokens pending |
|
||||
| [ADR-025](./0025-internationalization-and-localization-strategy.md) | i18n & l10n | Proposed | XL | All UI strings need extraction |
|
||||
| [ADR-026](./0026-standardized-client-side-structured-logging.md) | Client-Side Logging | Accepted | - | Fully implemented |
|
||||
| ADR | Title | Status | Effort | Notes |
|
||||
| ------------------------------------------------------------------------ | -------------------- | -------- | ------ | ------------------------------------------- |
|
||||
| [ADR-005](./0005-frontend-state-management-and-server-cache-strategy.md) | State Management | Accepted | - | Fully implemented |
|
||||
| [ADR-012](./0012-frontend-component-library-and-design-system.md) | Component Library | Partial | L | Core components done, design tokens pending |
|
||||
| [ADR-025](./0025-internationalization-and-localization-strategy.md) | i18n & l10n | Proposed | XL | All UI strings need extraction |
|
||||
| [ADR-026](./0026-standardized-client-side-structured-logging.md) | Client-Side Logging | Accepted | - | Fully implemented |
|
||||
| [ADR-044](./0044-frontend-feature-organization.md) | Feature Organization | Accepted | - | Fully implemented |
|
||||
|
||||
### Category 8: Development Workflow & Quality
|
||||
|
||||
| ADR | Title | Status | Effort | Notes |
|
||||
| ----------------------------------------------------------------------------- | -------------------- | -------- | ------ | ----------------- |
|
||||
| [ADR-010](./0010-testing-strategy-and-standards.md) | Testing Strategy | Accepted | - | Fully implemented |
|
||||
| [ADR-021](./0021-code-formatting-and-linting-unification.md) | Formatting & Linting | Accepted | - | Fully implemented |
|
||||
| [ADR-027](./0027-standardized-naming-convention-for-ai-and-database-types.md) | Naming Conventions | Accepted | - | Fully implemented |
|
||||
| ADR | Title | Status | Effort | Notes |
|
||||
| ----------------------------------------------------------------------------- | -------------------- | -------- | ------ | -------------------- |
|
||||
| [ADR-010](./0010-testing-strategy-and-standards.md) | Testing Strategy | Accepted | - | Fully implemented |
|
||||
| [ADR-021](./0021-code-formatting-and-linting-unification.md) | Formatting & Linting | Accepted | - | Fully implemented |
|
||||
| [ADR-027](./0027-standardized-naming-convention-for-ai-and-database-types.md) | Naming Conventions | Accepted | - | Fully implemented |
|
||||
| [ADR-045](./0045-test-data-factories-and-fixtures.md) | Test Data Factories | Accepted | - | Fully implemented |
|
||||
| [ADR-047](./0047-project-file-and-folder-organization.md) | Project Organization | Proposed | XL | Major reorganization |
|
||||
|
||||
### Category 9: Architecture Patterns
|
||||
|
||||
| ADR | Title | Status | Effort | Notes |
|
||||
| -------------------------------------------------- | -------------------- | -------- | ------ | ----------------- |
|
||||
| [ADR-034](./0034-repository-pattern-standards.md) | Repository Pattern | Accepted | - | Fully implemented |
|
||||
| [ADR-035](./0035-service-layer-architecture.md) | Service Layer | Accepted | - | Fully implemented |
|
||||
| [ADR-036](./0036-event-bus-and-pub-sub-pattern.md) | Event Bus | Accepted | - | Fully implemented |
|
||||
| [ADR-039](./0039-dependency-injection-pattern.md) | Dependency Injection | Accepted | - | Fully implemented |
|
||||
| ADR | Title | Status | Effort | Notes |
|
||||
| -------------------------------------------------------- | --------------------- | -------- | ------ | ----------------- |
|
||||
| [ADR-034](./0034-repository-pattern-standards.md) | Repository Pattern | Accepted | - | Fully implemented |
|
||||
| [ADR-035](./0035-service-layer-architecture.md) | Service Layer | Accepted | - | Fully implemented |
|
||||
| [ADR-036](./0036-event-bus-and-pub-sub-pattern.md) | Event Bus | Accepted | - | Fully implemented |
|
||||
| [ADR-039](./0039-dependency-injection-pattern.md) | Dependency Injection | Accepted | - | Fully implemented |
|
||||
| [ADR-041](./0041-ai-gemini-integration-architecture.md) | AI/Gemini Integration | Accepted | - | Fully implemented |
|
||||
| [ADR-042](./0042-email-and-notification-architecture.md) | Email & Notifications | Accepted | - | Fully implemented |
|
||||
| [ADR-043](./0043-express-middleware-pipeline.md) | Middleware Pipeline | Accepted | - | Fully implemented |
|
||||
| [ADR-046](./0046-image-processing-pipeline.md) | Image Processing | Accepted | - | Fully implemented |
|
||||
| [ADR-049](./0049-gamification-and-achievement-system.md) | Gamification System | Accepted | - | Fully implemented |
|
||||
|
||||
---
|
||||
|
||||
@@ -113,28 +122,38 @@ This document tracks the implementation status and estimated effort for all Arch
|
||||
|
||||
These ADRs are proposed but not yet implemented, ordered by suggested implementation priority:
|
||||
|
||||
| Priority | ADR | Title | Effort | Rationale |
|
||||
| -------- | ------- | ------------------------ | ------ | ----------------------------------------------------- |
|
||||
| 1 | ADR-018 | API Documentation | M | Improves developer experience, enables SDK generation |
|
||||
| 2 | ADR-015 | APM & Error Tracking | M | Production visibility, debugging |
|
||||
| 3 | ADR-024 | Feature Flags | M | Safer deployments, A/B testing |
|
||||
| 4 | ADR-023 | Schema Migrations v2 | L | Database evolution support |
|
||||
| 5 | ADR-029 | Secret Rotation | L | Security improvement |
|
||||
| 6 | ADR-008 | API Versioning | L | Future API evolution |
|
||||
| 7 | ADR-030 | Circuit Breaker | L | Resilience improvement |
|
||||
| 8 | ADR-022 | Real-time Notifications | XL | Major feature enhancement |
|
||||
| 9 | ADR-011 | Authorization & RBAC | XL | Advanced permission system |
|
||||
| 10 | ADR-025 | i18n & l10n | XL | Multi-language support |
|
||||
| 11 | ADR-031 | Data Retention & Privacy | XL | Compliance requirements |
|
||||
| Priority | ADR | Title | Effort | Rationale |
|
||||
| -------- | ------- | --------------------------- | ------ | ------------------------------------------------- |
|
||||
| 1 | ADR-015 | APM & Error Tracking | M | Production visibility, debugging |
|
||||
| 1b | ADR-050 | PostgreSQL Fn Observability | M | Database function visibility (depends on ADR-015) |
|
||||
| 2 | ADR-024 | Feature Flags | M | Safer deployments, A/B testing |
|
||||
| 3 | ADR-023 | Schema Migrations v2 | L | Database evolution support |
|
||||
| 4 | ADR-029 | Secret Rotation | L | Security improvement |
|
||||
| 5 | ADR-008 | API Versioning | L | Future API evolution |
|
||||
| 6 | ADR-030 | Circuit Breaker | L | Resilience improvement |
|
||||
| 7 | ADR-022 | Real-time Notifications | XL | Major feature enhancement |
|
||||
| 8 | ADR-011 | Authorization & RBAC | XL | Advanced permission system |
|
||||
| 9 | ADR-025 | i18n & l10n | XL | Multi-language support |
|
||||
| 10 | ADR-031 | Data Retention & Privacy | XL | Compliance requirements |
|
||||
|
||||
---
|
||||
|
||||
## Recent Implementation History
|
||||
|
||||
| Date | ADR | Change |
|
||||
| ---------- | ------- | --------------------------------------------------------------------------------------------- |
|
||||
| 2026-01-09 | ADR-026 | Fully implemented - all client-side components, hooks, and services now use structured logger |
|
||||
| 2026-01-09 | ADR-028 | Fully implemented - all routes, middleware, and tests updated |
|
||||
| Date | ADR | Change |
|
||||
| ---------- | ------- | ---------------------------------------------------------------------- |
|
||||
| 2026-01-11 | ADR-050 | Created - PostgreSQL function observability with fn_log() and Logstash |
|
||||
| 2026-01-11 | ADR-018 | Implemented - OpenAPI/Swagger documentation at /docs/api-docs |
|
||||
| 2026-01-11 | ADR-049 | Created - Gamification system, achievements, and testing requirements |
|
||||
| 2026-01-09 | ADR-047 | Created - Project file/folder organization with migration plan |
|
||||
| 2026-01-09 | ADR-041 | Created - AI/Gemini integration with model fallback and rate limiting |
|
||||
| 2026-01-09 | ADR-042 | Created - Email and notification architecture with BullMQ queuing |
|
||||
| 2026-01-09 | ADR-043 | Created - Express middleware pipeline ordering and patterns |
|
||||
| 2026-01-09 | ADR-044 | Created - Frontend feature-based folder organization |
|
||||
| 2026-01-09 | ADR-045 | Created - Test data factory pattern for mock generation |
|
||||
| 2026-01-09 | ADR-046 | Created - Image processing pipeline with Sharp and EXIF stripping |
|
||||
| 2026-01-09 | ADR-026 | Fully implemented - client-side structured logger |
|
||||
| 2026-01-09 | ADR-028 | Fully implemented - all routes, middleware, and tests updated |
|
||||
|
||||
---
|
||||
|
||||
|
||||
@@ -33,6 +33,7 @@ This directory contains a log of the architectural decisions made for the Flyer
|
||||
**[ADR-029](./0029-secret-rotation-and-key-management.md)**: Secret Rotation and Key Management Strategy (Proposed)
|
||||
**[ADR-032](./0032-rate-limiting-strategy.md)**: Rate Limiting Strategy (Accepted)
|
||||
**[ADR-033](./0033-file-upload-and-storage-strategy.md)**: File Upload and Storage Strategy (Accepted)
|
||||
**[ADR-048](./0048-authentication-strategy.md)**: Authentication Strategy (Partially Implemented)
|
||||
|
||||
## 5. Observability & Monitoring
|
||||
|
||||
@@ -54,12 +55,16 @@ This directory contains a log of the architectural decisions made for the Flyer
|
||||
**[ADR-012](./0012-frontend-component-library-and-design-system.md)**: Frontend Component Library and Design System (Partially Implemented)
|
||||
**[ADR-025](./0025-internationalization-and-localization-strategy.md)**: Internationalization (i18n) and Localization (l10n) Strategy (Proposed)
|
||||
**[ADR-026](./0026-standardized-client-side-structured-logging.md)**: Standardized Client-Side Structured Logging (Proposed)
|
||||
**[ADR-044](./0044-frontend-feature-organization.md)**: Frontend Feature Organization Pattern (Accepted)
|
||||
|
||||
## 8. Development Workflow & Quality
|
||||
|
||||
**[ADR-010](./0010-testing-strategy-and-standards.md)**: Testing Strategy and Standards (Accepted)
|
||||
**[ADR-021](./0021-code-formatting-and-linting-unification.md)**: Code Formatting and Linting Unification (Accepted)
|
||||
**[ADR-027](./0027-standardized-naming-convention-for-ai-and-database-types.md)**: Standardized Naming Convention for AI and Database Types (Accepted)
|
||||
**[ADR-040](./0040-testing-economics-and-priorities.md)**: Testing Economics and Priorities (Accepted)
|
||||
**[ADR-045](./0045-test-data-factories-and-fixtures.md)**: Test Data Factories and Fixtures (Accepted)
|
||||
**[ADR-047](./0047-project-file-and-folder-organization.md)**: Project File and Folder Organization (Proposed)
|
||||
|
||||
## 9. Architecture Patterns
|
||||
|
||||
@@ -67,3 +72,7 @@ This directory contains a log of the architectural decisions made for the Flyer
|
||||
**[ADR-035](./0035-service-layer-architecture.md)**: Service Layer Architecture (Accepted)
|
||||
**[ADR-036](./0036-event-bus-and-pub-sub-pattern.md)**: Event Bus and Pub/Sub Pattern (Accepted)
|
||||
**[ADR-039](./0039-dependency-injection-pattern.md)**: Dependency Injection Pattern (Accepted)
|
||||
**[ADR-041](./0041-ai-gemini-integration-architecture.md)**: AI/Gemini Integration Architecture (Accepted)
|
||||
**[ADR-042](./0042-email-and-notification-architecture.md)**: Email and Notification Architecture (Accepted)
|
||||
**[ADR-043](./0043-express-middleware-pipeline.md)**: Express Middleware Pipeline Architecture (Accepted)
|
||||
**[ADR-046](./0046-image-processing-pipeline.md)**: Image Processing Pipeline (Accepted)
|
||||
|
||||
1138
package-lock.json
generated
1138
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
17
package.json
17
package.json
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "flyer-crawler",
|
||||
"private": true,
|
||||
"version": "0.9.79",
|
||||
"version": "0.9.92",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "concurrently \"npm:start:dev\" \"vite\"",
|
||||
@@ -9,11 +9,11 @@
|
||||
"start": "npm run start:prod",
|
||||
"build": "vite build",
|
||||
"preview": "vite preview",
|
||||
"test": "cross-env NODE_ENV=test tsx ./node_modules/vitest/vitest.mjs run",
|
||||
"test": "node scripts/check-linux.js && cross-env NODE_ENV=test tsx ./node_modules/vitest/vitest.mjs run",
|
||||
"test-wsl": "cross-env NODE_ENV=test vitest run",
|
||||
"test:coverage": "npm run clean && npm run test:unit -- --coverage && npm run test:integration -- --coverage",
|
||||
"test:unit": "NODE_ENV=test tsx --max-old-space-size=8192 ./node_modules/vitest/vitest.mjs run --project unit -c vite.config.ts",
|
||||
"test:integration": "NODE_ENV=test tsx --max-old-space-size=8192 ./node_modules/vitest/vitest.mjs run --project integration -c vitest.config.integration.ts",
|
||||
"test:unit": "node scripts/check-linux.js && cross-env NODE_ENV=test tsx --max-old-space-size=8192 ./node_modules/vitest/vitest.mjs run --project unit -c vite.config.ts",
|
||||
"test:integration": "node scripts/check-linux.js && cross-env NODE_ENV=test tsx --max-old-space-size=8192 ./node_modules/vitest/vitest.mjs run --project integration -c vitest.config.integration.ts",
|
||||
"format": "prettier --write .",
|
||||
"lint": "eslint . --ext ts,tsx --report-unused-disable-directives --max-warnings 0",
|
||||
"type-check": "tsc --noEmit",
|
||||
@@ -31,6 +31,8 @@
|
||||
"@bull-board/api": "^6.14.2",
|
||||
"@bull-board/express": "^6.14.2",
|
||||
"@google/genai": "^1.30.0",
|
||||
"@sentry/node": "^10.32.1",
|
||||
"@sentry/react": "^10.32.1",
|
||||
"@tanstack/react-query": "^5.90.12",
|
||||
"@types/connect-timeout": "^1.9.0",
|
||||
"bcrypt": "^5.1.1",
|
||||
@@ -65,9 +67,12 @@
|
||||
"react-router-dom": "^7.9.6",
|
||||
"recharts": "^3.4.1",
|
||||
"sharp": "^0.34.5",
|
||||
"swagger-jsdoc": "^6.2.8",
|
||||
"swagger-ui-express": "^5.0.1",
|
||||
"tsx": "^4.20.6",
|
||||
"zod": "^4.2.1",
|
||||
"zxcvbn": "^4.4.2"
|
||||
"zxcvbn": "^4.4.2",
|
||||
"zxing-wasm": "^2.2.4"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@tailwindcss/postcss": "4.1.17",
|
||||
@@ -96,6 +101,8 @@
|
||||
"@types/react-dom": "^19.2.3",
|
||||
"@types/sharp": "^0.31.1",
|
||||
"@types/supertest": "^6.0.3",
|
||||
"@types/swagger-jsdoc": "^6.0.4",
|
||||
"@types/swagger-ui-express": "^4.1.8",
|
||||
"@types/zxcvbn": "^4.4.5",
|
||||
"@typescript-eslint/eslint-plugin": "^8.47.0",
|
||||
"@typescript-eslint/parser": "^8.47.0",
|
||||
|
||||
@@ -1,123 +1,116 @@
|
||||
# ADR-0005 Master Migration Status
|
||||
|
||||
**Last Updated**: 2026-01-08
|
||||
**Last Updated**: 2026-01-10
|
||||
|
||||
This document tracks the complete migration status of all data fetching patterns in the application to TanStack Query (React Query) as specified in ADR-0005.
|
||||
|
||||
## Migration Overview
|
||||
|
||||
| Category | Total | Migrated | Remaining | % Complete |
|
||||
|----------|-------|----------|-----------|------------|
|
||||
| **User Features** | 5 queries + 7 mutations | 12/12 | 0 | ✅ 100% |
|
||||
| **Admin Features** | 3 queries | 0/3 | 3 | ❌ 0% |
|
||||
| **Analytics Features** | 2 queries | 0/2 | 2 | ❌ 0% |
|
||||
| **Legacy Hooks** | 3 hooks | 0/3 | 3 | ❌ 0% |
|
||||
| **TOTAL** | 20 items | 12/20 | 8 | 🟡 60% |
|
||||
| Category | Total | Migrated | Remaining | % Complete |
|
||||
| ---------------------- | ------------------------ | -------- | --------- | ---------- |
|
||||
| **User Features** | 7 queries + 8 mutations | 15/15 | 0 | ✅ 100% |
|
||||
| **User Hooks** | 3 hooks | 3/3 | 0 | ✅ 100% |
|
||||
| **Admin Features** | 4 queries + 3 components | 7/7 | 0 | ✅ 100% |
|
||||
| **Analytics Features** | 3 queries + 2 components | 5/5 | 0 | ✅ 100% |
|
||||
| **Legacy Hooks** | 4 items | 4/4 | 0 | ✅ 100% |
|
||||
| **Phase 8 Queries** | 3 queries | 3/3 | 0 | ✅ 100% |
|
||||
| **Phase 8 Components** | 3 components | 3/3 | 0 | ✅ 100% |
|
||||
| **TOTAL** | 40 items | 40/40 | 0 | ✅ 100% |
|
||||
|
||||
---
|
||||
|
||||
## ✅ COMPLETED: User-Facing Features (Phase 1-3)
|
||||
|
||||
### Query Hooks (5)
|
||||
### Query Hooks (7)
|
||||
|
||||
| Hook | File | Query Key | Status | Phase |
|
||||
|------|------|-----------|--------|-------|
|
||||
| useFlyersQuery | [src/hooks/queries/useFlyersQuery.ts](../src/hooks/queries/useFlyersQuery.ts) | `['flyers', { limit, offset }]` | ✅ Done | 1 |
|
||||
| useFlyerItemsQuery | [src/hooks/queries/useFlyerItemsQuery.ts](../src/hooks/queries/useFlyerItemsQuery.ts) | `['flyer-items', flyerId]` | ✅ Done | 2 |
|
||||
| useMasterItemsQuery | [src/hooks/queries/useMasterItemsQuery.ts](../src/hooks/queries/useMasterItemsQuery.ts) | `['master-items']` | ✅ Done | 2 |
|
||||
| useWatchedItemsQuery | [src/hooks/queries/useWatchedItemsQuery.ts](../src/hooks/queries/useWatchedItemsQuery.ts) | `['watched-items']` | ✅ Done | 1 |
|
||||
| useShoppingListsQuery | [src/hooks/queries/useShoppingListsQuery.ts](../src/hooks/queries/useShoppingListsQuery.ts) | `['shopping-lists']` | ✅ Done | 1 |
|
||||
| Hook | File | Query Key | Status | Phase |
|
||||
| --------------------- | ------------------------------------------------------------------------------------------- | ------------------------------- | ------- | ----- |
|
||||
| useFlyersQuery | [src/hooks/queries/useFlyersQuery.ts](../src/hooks/queries/useFlyersQuery.ts) | `['flyers', { limit, offset }]` | ✅ Done | 1 |
|
||||
| useFlyerItemsQuery | [src/hooks/queries/useFlyerItemsQuery.ts](../src/hooks/queries/useFlyerItemsQuery.ts) | `['flyer-items', flyerId]` | ✅ Done | 2 |
|
||||
| useMasterItemsQuery | [src/hooks/queries/useMasterItemsQuery.ts](../src/hooks/queries/useMasterItemsQuery.ts) | `['master-items']` | ✅ Done | 2 |
|
||||
| useWatchedItemsQuery | [src/hooks/queries/useWatchedItemsQuery.ts](../src/hooks/queries/useWatchedItemsQuery.ts) | `['watched-items']` | ✅ Done | 1 |
|
||||
| useShoppingListsQuery | [src/hooks/queries/useShoppingListsQuery.ts](../src/hooks/queries/useShoppingListsQuery.ts) | `['shopping-lists']` | ✅ Done | 1 |
|
||||
| useUserAddressQuery | [src/hooks/queries/useUserAddressQuery.ts](../src/hooks/queries/useUserAddressQuery.ts) | `['user-address', addressId]` | ✅ Done | 7 |
|
||||
| useAuthProfileQuery | [src/hooks/queries/useAuthProfileQuery.ts](../src/hooks/queries/useAuthProfileQuery.ts) | `['auth-profile']` | ✅ Done | 7 |
|
||||
|
||||
### Mutation Hooks (7)
|
||||
### Mutation Hooks (8)
|
||||
|
||||
| Hook | File | Invalidates | Status | Phase |
|
||||
|------|------|-------------|--------|-------|
|
||||
| useAddWatchedItemMutation | [src/hooks/mutations/useAddWatchedItemMutation.ts](../src/hooks/mutations/useAddWatchedItemMutation.ts) | `['watched-items']` | ✅ Done | 3 |
|
||||
| useRemoveWatchedItemMutation | [src/hooks/mutations/useRemoveWatchedItemMutation.ts](../src/hooks/mutations/useRemoveWatchedItemMutation.ts) | `['watched-items']` | ✅ Done | 3 |
|
||||
| useCreateShoppingListMutation | [src/hooks/mutations/useCreateShoppingListMutation.ts](../src/hooks/mutations/useCreateShoppingListMutation.ts) | `['shopping-lists']` | ✅ Done | 3 |
|
||||
| useDeleteShoppingListMutation | [src/hooks/mutations/useDeleteShoppingListMutation.ts](../src/hooks/mutations/useDeleteShoppingListMutation.ts) | `['shopping-lists']` | ✅ Done | 3 |
|
||||
| useAddShoppingListItemMutation | [src/hooks/mutations/useAddShoppingListItemMutation.ts](../src/hooks/mutations/useAddShoppingListItemMutation.ts) | `['shopping-lists']` | ✅ Done | 3 |
|
||||
| useUpdateShoppingListItemMutation | [src/hooks/mutations/useUpdateShoppingListItemMutation.ts](../src/hooks/mutations/useUpdateShoppingListItemMutation.ts) | `['shopping-lists']` | ✅ Done | 3 |
|
||||
| useRemoveShoppingListItemMutation | [src/hooks/mutations/useRemoveShoppingListItemMutation.ts](../src/hooks/mutations/useRemoveShoppingListItemMutation.ts) | `['shopping-lists']` | ✅ Done | 3 |
|
||||
| Hook | File | Invalidates | Status | Phase |
|
||||
| --------------------------------- | ----------------------------------------------------------------------------------------------------------------------- | -------------------- | ------- | ----- |
|
||||
| useAddWatchedItemMutation | [src/hooks/mutations/useAddWatchedItemMutation.ts](../src/hooks/mutations/useAddWatchedItemMutation.ts) | `['watched-items']` | ✅ Done | 3 |
|
||||
| useRemoveWatchedItemMutation | [src/hooks/mutations/useRemoveWatchedItemMutation.ts](../src/hooks/mutations/useRemoveWatchedItemMutation.ts) | `['watched-items']` | ✅ Done | 3 |
|
||||
| useCreateShoppingListMutation | [src/hooks/mutations/useCreateShoppingListMutation.ts](../src/hooks/mutations/useCreateShoppingListMutation.ts) | `['shopping-lists']` | ✅ Done | 3 |
|
||||
| useDeleteShoppingListMutation | [src/hooks/mutations/useDeleteShoppingListMutation.ts](../src/hooks/mutations/useDeleteShoppingListMutation.ts) | `['shopping-lists']` | ✅ Done | 3 |
|
||||
| useAddShoppingListItemMutation | [src/hooks/mutations/useAddShoppingListItemMutation.ts](../src/hooks/mutations/useAddShoppingListItemMutation.ts) | `['shopping-lists']` | ✅ Done | 3 |
|
||||
| useUpdateShoppingListItemMutation | [src/hooks/mutations/useUpdateShoppingListItemMutation.ts](../src/hooks/mutations/useUpdateShoppingListItemMutation.ts) | `['shopping-lists']` | ✅ Done | 3 |
|
||||
| useRemoveShoppingListItemMutation | [src/hooks/mutations/useRemoveShoppingListItemMutation.ts](../src/hooks/mutations/useRemoveShoppingListItemMutation.ts) | `['shopping-lists']` | ✅ Done | 3 |
|
||||
| useGeocodeMutation | [src/hooks/mutations/useGeocodeMutation.ts](../src/hooks/mutations/useGeocodeMutation.ts) | N/A | ✅ Done | 7 |
|
||||
|
||||
### Providers Migrated (4)
|
||||
### Providers Migrated (5)
|
||||
|
||||
| Provider | Uses | Status |
|
||||
|----------|------|--------|
|
||||
| [AppProviders.tsx](../src/providers/AppProviders.tsx) | QueryClientProvider wrapper | ✅ Done |
|
||||
| [FlyersProvider.tsx](../src/providers/FlyersProvider.tsx) | useFlyersQuery | ✅ Done |
|
||||
| [MasterItemsProvider.tsx](../src/providers/MasterItemsProvider.tsx) | useMasterItemsQuery | ✅ Done |
|
||||
| [UserDataProvider.tsx](../src/providers/UserDataProvider.tsx) | useWatchedItemsQuery + useShoppingListsQuery | ✅ Done |
|
||||
| Provider | Uses | Status |
|
||||
| ------------------------------------------------------------------- | -------------------------------------------- | ------- |
|
||||
| [AppProviders.tsx](../src/providers/AppProviders.tsx) | QueryClientProvider wrapper | ✅ Done |
|
||||
| [FlyersProvider.tsx](../src/providers/FlyersProvider.tsx) | useFlyersQuery | ✅ Done |
|
||||
| [MasterItemsProvider.tsx](../src/providers/MasterItemsProvider.tsx) | useMasterItemsQuery | ✅ Done |
|
||||
| [UserDataProvider.tsx](../src/providers/UserDataProvider.tsx) | useWatchedItemsQuery + useShoppingListsQuery | ✅ Done |
|
||||
| [AuthProvider.tsx](../src/providers/AuthProvider.tsx) | useAuthProfileQuery | ✅ Done |
|
||||
|
||||
---
|
||||
|
||||
## ❌ NOT MIGRATED: Admin & Analytics Features
|
||||
## ✅ COMPLETED: Admin Features (Phase 5)
|
||||
|
||||
### High Priority - Admin Features
|
||||
### Admin Query Hooks (4)
|
||||
|
||||
| Feature | Component/Hook | Current Pattern | API Calls | Priority |
|
||||
|---------|----------------|-----------------|-----------|----------|
|
||||
| **Activity Log** | [ActivityLog.tsx](../src/components/ActivityLog.tsx) | useState + useEffect | `fetchActivityLog(20, 0)` | 🔴 HIGH |
|
||||
| **Admin Stats** | [AdminStatsPage.tsx](../src/pages/AdminStatsPage.tsx) | useState + useEffect | `getApplicationStats()` | 🔴 HIGH |
|
||||
| **Corrections** | [CorrectionsPage.tsx](../src/pages/CorrectionsPage.tsx) | useState + useEffect + Promise.all | `getSuggestedCorrections()`, `fetchMasterItems()`, `fetchCategories()` | 🔴 HIGH |
|
||||
| Hook | File | Query Key | Status | Phase |
|
||||
| ---------------------------- | --------------------------------------------------------------------------------------------------------- | ------------------------------------- | ------- | ----- |
|
||||
| useActivityLogQuery | [src/hooks/queries/useActivityLogQuery.ts](../src/hooks/queries/useActivityLogQuery.ts) | `['activity-log', { limit, offset }]` | ✅ Done | 5 |
|
||||
| useApplicationStatsQuery | [src/hooks/queries/useApplicationStatsQuery.ts](../src/hooks/queries/useApplicationStatsQuery.ts) | `['application-stats']` | ✅ Done | 5 |
|
||||
| useSuggestedCorrectionsQuery | [src/hooks/queries/useSuggestedCorrectionsQuery.ts](../src/hooks/queries/useSuggestedCorrectionsQuery.ts) | `['suggested-corrections']` | ✅ Done | 5 |
|
||||
| useCategoriesQuery | [src/hooks/queries/useCategoriesQuery.ts](../src/hooks/queries/useCategoriesQuery.ts) | `['categories']` | ✅ Done | 5 |
|
||||
|
||||
**Issues:**
|
||||
- Manual state management with useState/useEffect
|
||||
- No caching - data refetches on every mount
|
||||
- No automatic refetching or background updates
|
||||
- Manual loading/error state handling
|
||||
- Duplicate API calls (CorrectionsPage fetches master items separately)
|
||||
### Admin Components Migrated (3)
|
||||
|
||||
**Recommended Query Hooks to Create:**
|
||||
```typescript
|
||||
// src/hooks/queries/useActivityLogQuery.ts
|
||||
queryKey: ['activity-log', { limit, offset }]
|
||||
staleTime: 30 seconds (frequently updated)
|
||||
| Component | Uses | Status |
|
||||
| ------------------------------------------------------------- | --------------------------------------------------------------------- | ------- |
|
||||
| [ActivityLog.tsx](../src/pages/admin/ActivityLog.tsx) | useActivityLogQuery | ✅ Done |
|
||||
| [AdminStatsPage.tsx](../src/pages/admin/AdminStatsPage.tsx) | useApplicationStatsQuery | ✅ Done |
|
||||
| [CorrectionsPage.tsx](../src/pages/admin/CorrectionsPage.tsx) | useSuggestedCorrectionsQuery, useMasterItemsQuery, useCategoriesQuery | ✅ Done |
|
||||
|
||||
// src/hooks/queries/useApplicationStatsQuery.ts
|
||||
queryKey: ['application-stats']
|
||||
staleTime: 2 minutes (changes moderately)
|
||||
---
|
||||
|
||||
// src/hooks/queries/useSuggestedCorrectionsQuery.ts
|
||||
queryKey: ['suggested-corrections']
|
||||
staleTime: 1 minute
|
||||
## ✅ COMPLETED: Analytics Features (Phase 6)
|
||||
|
||||
// src/hooks/queries/useCategoriesQuery.ts
|
||||
queryKey: ['categories']
|
||||
staleTime: 10 minutes (rarely changes)
|
||||
```
|
||||
### Analytics Query Hooks (3)
|
||||
|
||||
### Medium Priority - Analytics Features
|
||||
| Hook | File | Query Key | Status | Phase |
|
||||
| --------------------------- | ------------------------------------------------------------------------------------------------------- | --------------------------------- | ------- | ----- |
|
||||
| useBestSalePricesQuery | [src/hooks/queries/useBestSalePricesQuery.ts](../src/hooks/queries/useBestSalePricesQuery.ts) | `['best-sale-prices']` | ✅ Done | 6 |
|
||||
| useFlyerItemsForFlyersQuery | [src/hooks/queries/useFlyerItemsForFlyersQuery.ts](../src/hooks/queries/useFlyerItemsForFlyersQuery.ts) | `['flyer-items-batch', flyerIds]` | ✅ Done | 6 |
|
||||
| useFlyerItemCountQuery | [src/hooks/queries/useFlyerItemCountQuery.ts](../src/hooks/queries/useFlyerItemCountQuery.ts) | `['flyer-item-count', flyerIds]` | ✅ Done | 6 |
|
||||
|
||||
| Feature | Component/Hook | Current Pattern | API Calls | Priority |
|
||||
|---------|----------------|-----------------|-----------|----------|
|
||||
| **My Deals** | [MyDealsPage.tsx](../src/pages/MyDealsPage.tsx) | useState + useEffect | `fetchBestSalePrices()` | 🟡 MEDIUM |
|
||||
| **Active Deals** | [useActiveDeals.tsx](../src/hooks/useActiveDeals.tsx) | useApi hook | `countFlyerItemsForFlyers()`, `fetchFlyerItemsForFlyers()` | 🟡 MEDIUM |
|
||||
### Analytics Components/Hooks Migrated (2)
|
||||
|
||||
**Issues:**
|
||||
- useActiveDeals uses old `useApi` hook pattern
|
||||
- MyDealsPage has manual state management
|
||||
- No caching for best sale prices
|
||||
- No relationship to watched-items cache (could be optimized)
|
||||
| Component/Hook | Uses | Status |
|
||||
| ----------------------------------------------------- | --------------------------------------------------- | ------- |
|
||||
| [MyDealsPage.tsx](../src/pages/MyDealsPage.tsx) | useBestSalePricesQuery | ✅ Done |
|
||||
| [useActiveDeals.tsx](../src/hooks/useActiveDeals.tsx) | useFlyerItemsForFlyersQuery, useFlyerItemCountQuery | ✅ Done |
|
||||
|
||||
**Recommended Query Hooks to Create:**
|
||||
```typescript
|
||||
// src/hooks/queries/useBestSalePricesQuery.ts
|
||||
queryKey: ['best-sale-prices', watchedItemIds]
|
||||
staleTime: 2 minutes
|
||||
// Should invalidate when flyers or flyer-items update
|
||||
**Benefits Achieved:**
|
||||
|
||||
// Refactor useActiveDeals to use TanStack Query
|
||||
// Could share cache with flyer-items query
|
||||
```
|
||||
- ✅ Removed useApi dependency from analytics features
|
||||
- ✅ Automatic caching of deal data (2-5 minute stale times)
|
||||
- ✅ Consistent error handling via TanStack Query
|
||||
- ✅ Batch fetching for flyer items (single query for multiple flyers)
|
||||
|
||||
### Low Priority - Voice Lab
|
||||
|
||||
| Feature | Component | Current Pattern | Priority |
|
||||
|---------|-----------|-----------------|----------|
|
||||
| **Voice Lab** | [VoiceLabPage.tsx](../src/pages/VoiceLabPage.tsx) | Direct async/await | 🟢 LOW |
|
||||
| Feature | Component | Current Pattern | Priority |
|
||||
| ------------- | ------------------------------------------------- | ------------------ | -------- |
|
||||
| **Voice Lab** | [VoiceLabPage.tsx](../src/pages/VoiceLabPage.tsx) | Direct async/await | 🟢 LOW |
|
||||
|
||||
**Notes:**
|
||||
|
||||
- Event-driven API calls (not data fetching)
|
||||
- Speech generation and voice sessions
|
||||
- Mutation-like operations, not query-like
|
||||
@@ -125,107 +118,113 @@ staleTime: 2 minutes
|
||||
|
||||
---
|
||||
|
||||
## ⚠️ LEGACY HOOKS STILL IN USE
|
||||
## ✅ COMPLETED: Legacy Hook Cleanup (Phase 7)
|
||||
|
||||
### Hooks to Deprecate/Remove
|
||||
### Hooks Removed
|
||||
|
||||
| Hook | File | Used By | Status |
|
||||
|------|------|---------|--------|
|
||||
| **useApi** | [src/hooks/useApi.ts](../src/hooks/useApi.ts) | useActiveDeals, useWatchedItems, useShoppingLists | ⚠️ Active |
|
||||
| **useApiOnMount** | [src/hooks/useApiOnMount.ts](../src/hooks/useApiOnMount.ts) | None (deprecated) | ⚠️ Remove |
|
||||
| **useInfiniteQuery** | [src/hooks/useInfiniteQuery.ts](../src/hooks/useInfiniteQuery.ts) | None (deprecated) | ⚠️ Remove |
|
||||
| Hook | Former File | Replaced By | Status |
|
||||
| ----------------- | ------------------------------ | -------------------- | ---------- |
|
||||
| **useApi** | ~~src/hooks/useApi.ts~~ | TanStack Query hooks | ✅ Removed |
|
||||
| **useApiOnMount** | ~~src/hooks/useApiOnMount.ts~~ | TanStack Query hooks | ✅ Removed |
|
||||
|
||||
**Plan:**
|
||||
- Phase 4: Refactor useWatchedItems/useShoppingLists to use TanStack Query mutations
|
||||
- Phase 5: Refactor useActiveDeals to use TanStack Query
|
||||
- Phase 6: Remove useApi, useApiOnMount, custom useInfiniteQuery
|
||||
### Additional Hooks Created (Phase 7)
|
||||
|
||||
| Hook | File | Purpose |
|
||||
| ------------------- | ----------------------------------------------------------------------------------------- | -------------------------------- |
|
||||
| useUserAddressQuery | [src/hooks/queries/useUserAddressQuery.ts](../src/hooks/queries/useUserAddressQuery.ts) | Fetch user address by ID |
|
||||
| useAuthProfileQuery | [src/hooks/queries/useAuthProfileQuery.ts](../src/hooks/queries/useAuthProfileQuery.ts) | Fetch authenticated user profile |
|
||||
| useGeocodeMutation | [src/hooks/mutations/useGeocodeMutation.ts](../src/hooks/mutations/useGeocodeMutation.ts) | Geocode address strings |
|
||||
|
||||
### Files Modified (Phase 7)
|
||||
|
||||
| File | Change |
|
||||
| --------------------------------------------------------- | ---------------------------------------------------------- |
|
||||
| [useProfileAddress.ts](../src/hooks/useProfileAddress.ts) | Refactored to use useUserAddressQuery + useGeocodeMutation |
|
||||
| [AuthProvider.tsx](../src/providers/AuthProvider.tsx) | Refactored to use useAuthProfileQuery |
|
||||
|
||||
---
|
||||
|
||||
## 📊 MIGRATION PHASES
|
||||
|
||||
### ✅ Phase 1: Core Queries (Complete)
|
||||
|
||||
- Infrastructure setup (QueryClientProvider)
|
||||
- Flyers, Watched Items, Shopping Lists queries
|
||||
- Providers refactored
|
||||
|
||||
### ✅ Phase 2: Additional Queries (Complete)
|
||||
|
||||
- Master Items query
|
||||
- Flyer Items query
|
||||
- Per-resource caching strategies
|
||||
|
||||
### ✅ Phase 3: Mutations (Complete)
|
||||
|
||||
- All watched items mutations
|
||||
- All shopping list mutations
|
||||
- Automatic cache invalidation
|
||||
|
||||
### 🔄 Phase 4: Hook Refactoring (Planned)
|
||||
- [ ] Refactor useWatchedItems to use mutation hooks
|
||||
- [ ] Refactor useShoppingLists to use mutation hooks
|
||||
- [ ] Remove deprecated setters from context
|
||||
### ✅ Phase 4: Hook Refactoring (Complete)
|
||||
|
||||
### ⏳ Phase 5: Admin Features (Not Started)
|
||||
- [ ] Create useActivityLogQuery
|
||||
- [ ] Create useApplicationStatsQuery
|
||||
- [ ] Create useSuggestedCorrectionsQuery
|
||||
- [ ] Create useCategoriesQuery
|
||||
- [ ] Migrate ActivityLog.tsx
|
||||
- [ ] Migrate AdminStatsPage.tsx
|
||||
- [ ] Migrate CorrectionsPage.tsx
|
||||
- [x] Refactor useWatchedItems to use mutation hooks
|
||||
- [x] Refactor useShoppingLists to use mutation hooks
|
||||
- [x] Remove deprecated setters from context
|
||||
|
||||
### ⏳ Phase 6: Analytics Features (Not Started)
|
||||
- [ ] Create useBestSalePricesQuery
|
||||
- [ ] Migrate MyDealsPage.tsx
|
||||
- [ ] Refactor useActiveDeals to use TanStack Query
|
||||
### ✅ Phase 5: Admin Features (Complete)
|
||||
|
||||
### ⏳ Phase 7: Cleanup (Not Started)
|
||||
- [ ] Remove useApi hook
|
||||
- [ ] Remove useApiOnMount hook
|
||||
- [ ] Remove custom useInfiniteQuery hook
|
||||
- [ ] Remove all stub implementations
|
||||
- [ ] Update all tests
|
||||
- [x] Create useActivityLogQuery
|
||||
- [x] Create useApplicationStatsQuery
|
||||
- [x] Create useSuggestedCorrectionsQuery
|
||||
- [x] Create useCategoriesQuery
|
||||
- [x] Migrate ActivityLog.tsx
|
||||
- [x] Migrate AdminStatsPage.tsx
|
||||
- [x] Migrate CorrectionsPage.tsx
|
||||
|
||||
### ✅ Phase 6: Analytics Features (Complete - 2026-01-10)
|
||||
|
||||
- [x] Create useBestSalePricesQuery
|
||||
- [x] Create useFlyerItemsForFlyersQuery
|
||||
- [x] Create useFlyerItemCountQuery
|
||||
- [x] Migrate MyDealsPage.tsx
|
||||
- [x] Refactor useActiveDeals to use TanStack Query
|
||||
|
||||
### ✅ Phase 7: Cleanup (Complete - 2026-01-10)
|
||||
|
||||
- [x] Create useUserAddressQuery
|
||||
- [x] Create useAuthProfileQuery
|
||||
- [x] Create useGeocodeMutation
|
||||
- [x] Migrate useProfileAddress from useApi to TanStack Query
|
||||
- [x] Migrate AuthProvider from useApi to TanStack Query
|
||||
- [x] Remove useApi hook
|
||||
- [x] Remove useApiOnMount hook
|
||||
|
||||
### ✅ Phase 8: Additional Component Migration (Complete - 2026-01-10)
|
||||
|
||||
- [x] Create useUserProfileDataQuery (combined profile + achievements)
|
||||
- [x] Create useLeaderboardQuery (public leaderboard data)
|
||||
- [x] Create usePriceHistoryQuery (historical price data for watched items)
|
||||
- [x] Refactor useUserProfileData to use TanStack Query
|
||||
- [x] Refactor Leaderboard.tsx to use useLeaderboardQuery
|
||||
- [x] Refactor PriceHistoryChart.tsx to use usePriceHistoryQuery
|
||||
|
||||
---
|
||||
|
||||
## 🎯 RECOMMENDED NEXT STEPS
|
||||
## 🎉 MIGRATION COMPLETE
|
||||
|
||||
### Option A: Complete User Features First (Phase 4)
|
||||
Focus on finishing the user-facing feature migration by refactoring the remaining custom hooks. This provides a complete, polished user experience.
|
||||
The TanStack Query migration is **100% complete**. All data fetching in the application now uses TanStack Query for:
|
||||
|
||||
**Pros:**
|
||||
- Completes the user-facing story
|
||||
- Simplifies codebase for user features
|
||||
- Sets pattern for admin features
|
||||
|
||||
**Cons:**
|
||||
- Admin features still use old patterns
|
||||
|
||||
### Option B: Migrate Admin Features (Phase 5)
|
||||
Create query hooks for admin features to improve admin user experience and establish complete ADR-0005 coverage.
|
||||
|
||||
**Pros:**
|
||||
- Faster admin pages with caching
|
||||
- Consistent patterns across entire app
|
||||
- Better for admin users
|
||||
|
||||
**Cons:**
|
||||
- User-facing hooks still partially old pattern
|
||||
|
||||
### Option C: Parallel Migration (Phase 4 + 5)
|
||||
Work on both user hook refactoring and admin feature migration simultaneously.
|
||||
|
||||
**Pros:**
|
||||
- Fastest path to complete migration
|
||||
- Comprehensive coverage quickly
|
||||
|
||||
**Cons:**
|
||||
- Larger scope, more testing needed
|
||||
- **Automatic caching** - Server data is cached and shared across components
|
||||
- **Background refetching** - Stale data is automatically refreshed
|
||||
- **Loading/error states** - Consistent handling across the entire application
|
||||
- **Cache invalidation** - Mutations automatically invalidate related queries
|
||||
- **DevTools** - React Query DevTools available in development mode
|
||||
|
||||
---
|
||||
|
||||
## 📝 NOTES
|
||||
|
||||
### Query Key Organization
|
||||
|
||||
Currently using literal strings for query keys. Consider creating a centralized query keys file:
|
||||
|
||||
```typescript
|
||||
@@ -246,24 +245,29 @@ export const queryKeys = {
|
||||
```
|
||||
|
||||
### Cache Invalidation Strategy
|
||||
|
||||
Admin features may need different invalidation strategies:
|
||||
|
||||
- Activity log should refetch after mutations
|
||||
- Stats should refetch after significant operations
|
||||
- Corrections should refetch after approving/rejecting
|
||||
|
||||
### Stale Time Recommendations
|
||||
|
||||
| Data Type | Stale Time | Reasoning |
|
||||
|-----------|------------|-----------|
|
||||
| Master Items | 10 minutes | Rarely changes |
|
||||
| Categories | 10 minutes | Rarely changes |
|
||||
| Flyers | 2 minutes | Moderate changes |
|
||||
| Flyer Items | 5 minutes | Static once created |
|
||||
| User Lists | 1 minute | Frequent changes |
|
||||
| Admin Stats | 2 minutes | Moderate changes |
|
||||
| Activity Log | 30 seconds | Frequently updated |
|
||||
| Corrections | 1 minute | Moderate changes |
|
||||
| Best Prices | 2 minutes | Recalculated periodically |
|
||||
| Data Type | Stale Time | Reasoning |
|
||||
| ----------------- | ---------- | ----------------------------------- |
|
||||
| Master Items | 10 minutes | Rarely changes |
|
||||
| Categories | 10 minutes | Rarely changes |
|
||||
| Flyers | 2 minutes | Moderate changes |
|
||||
| Flyer Items | 5 minutes | Static once created |
|
||||
| User Lists | 1 minute | Frequent changes |
|
||||
| Admin Stats | 2 minutes | Moderate changes |
|
||||
| Activity Log | 30 seconds | Frequently updated |
|
||||
| Corrections | 1 minute | Moderate changes |
|
||||
| Best Prices | 2 minutes | Recalculated periodically |
|
||||
| User Profile Data | 5 minutes | User-specific, changes infrequently |
|
||||
| Leaderboard | 2 minutes | Public data, moderate updates |
|
||||
| Price History | 10 minutes | Historical data, rarely changes |
|
||||
|
||||
---
|
||||
|
||||
|
||||
@@ -1,88 +0,0 @@
|
||||
# PowerShell script to run integration tests with containerized infrastructure
|
||||
# Sets up environment variables and runs the integration test suite
|
||||
|
||||
Write-Host "=== Flyer Crawler Integration Test Runner ===" -ForegroundColor Cyan
|
||||
Write-Host ""
|
||||
|
||||
# Check if containers are running
|
||||
Write-Host "Checking container status..." -ForegroundColor Yellow
|
||||
$postgresRunning = podman ps --filter "name=flyer-crawler-postgres" --format "{{.Names}}" 2>$null
|
||||
$redisRunning = podman ps --filter "name=flyer-crawler-redis" --format "{{.Names}}" 2>$null
|
||||
|
||||
if (-not $postgresRunning) {
|
||||
Write-Host "ERROR: PostgreSQL container is not running!" -ForegroundColor Red
|
||||
Write-Host "Start it with: podman start flyer-crawler-postgres" -ForegroundColor Yellow
|
||||
exit 1
|
||||
}
|
||||
|
||||
if (-not $redisRunning) {
|
||||
Write-Host "ERROR: Redis container is not running!" -ForegroundColor Red
|
||||
Write-Host "Start it with: podman start flyer-crawler-redis" -ForegroundColor Yellow
|
||||
exit 1
|
||||
}
|
||||
|
||||
Write-Host "✓ PostgreSQL container: $postgresRunning" -ForegroundColor Green
|
||||
Write-Host "✓ Redis container: $redisRunning" -ForegroundColor Green
|
||||
Write-Host ""
|
||||
|
||||
# Set environment variables for integration tests
|
||||
Write-Host "Setting environment variables..." -ForegroundColor Yellow
|
||||
|
||||
$env:NODE_ENV = "test"
|
||||
$env:DB_HOST = "localhost"
|
||||
$env:DB_USER = "postgres"
|
||||
$env:DB_PASSWORD = "postgres"
|
||||
$env:DB_NAME = "flyer_crawler_dev"
|
||||
$env:DB_PORT = "5432"
|
||||
$env:REDIS_URL = "redis://localhost:6379"
|
||||
$env:REDIS_PASSWORD = ""
|
||||
$env:FRONTEND_URL = "http://localhost:5173"
|
||||
$env:VITE_API_BASE_URL = "http://localhost:3001/api"
|
||||
$env:JWT_SECRET = "test-jwt-secret-for-integration-tests"
|
||||
$env:NODE_OPTIONS = "--max-old-space-size=8192"
|
||||
|
||||
Write-Host "✓ Environment configured" -ForegroundColor Green
|
||||
Write-Host ""
|
||||
|
||||
# Display configuration
|
||||
Write-Host "Test Configuration:" -ForegroundColor Cyan
|
||||
Write-Host " NODE_ENV: $env:NODE_ENV"
|
||||
Write-Host " Database: $env:DB_HOST`:$env:DB_PORT/$env:DB_NAME"
|
||||
Write-Host " Redis: $env:REDIS_URL"
|
||||
Write-Host " Frontend URL: $env:FRONTEND_URL"
|
||||
Write-Host ""
|
||||
|
||||
# Check database connectivity
|
||||
Write-Host "Verifying database connection..." -ForegroundColor Yellow
|
||||
$dbCheck = podman exec flyer-crawler-postgres psql -U postgres -d flyer_crawler_dev -c "SELECT 1;" 2>&1
|
||||
if ($LASTEXITCODE -ne 0) {
|
||||
Write-Host "ERROR: Cannot connect to database!" -ForegroundColor Red
|
||||
Write-Host $dbCheck
|
||||
exit 1
|
||||
}
|
||||
Write-Host "✓ Database connection successful" -ForegroundColor Green
|
||||
Write-Host ""
|
||||
|
||||
# Check URL constraints are enabled
|
||||
Write-Host "Verifying URL constraints..." -ForegroundColor Yellow
|
||||
$constraints = podman exec flyer-crawler-postgres psql -U postgres -d flyer_crawler_dev -t -A -c "SELECT COUNT(*) FROM pg_constraint WHERE conname LIKE '%url_check';"
|
||||
Write-Host "✓ Found $constraints URL constraint(s)" -ForegroundColor Green
|
||||
Write-Host ""
|
||||
|
||||
# Run integration tests
|
||||
Write-Host "=== Running Integration Tests ===" -ForegroundColor Cyan
|
||||
Write-Host ""
|
||||
|
||||
npm run test:integration
|
||||
|
||||
$exitCode = $LASTEXITCODE
|
||||
|
||||
Write-Host ""
|
||||
if ($exitCode -eq 0) {
|
||||
Write-Host "=== Integration Tests PASSED ===" -ForegroundColor Green
|
||||
} else {
|
||||
Write-Host "=== Integration Tests FAILED ===" -ForegroundColor Red
|
||||
Write-Host "Exit code: $exitCode" -ForegroundColor Red
|
||||
}
|
||||
|
||||
exit $exitCode
|
||||
@@ -1,80 +0,0 @@
|
||||
@echo off
|
||||
REM Simple batch script to run integration tests with container infrastructure
|
||||
|
||||
echo === Flyer Crawler Integration Test Runner ===
|
||||
echo.
|
||||
|
||||
REM Check containers
|
||||
echo Checking container status...
|
||||
podman ps --filter "name=flyer-crawler-postgres" --format "{{.Names}}" >nul 2>&1
|
||||
if errorlevel 1 (
|
||||
echo ERROR: PostgreSQL container is not running!
|
||||
echo Start it with: podman start flyer-crawler-postgres
|
||||
exit /b 1
|
||||
)
|
||||
|
||||
podman ps --filter "name=flyer-crawler-redis" --format "{{.Names}}" >nul 2>&1
|
||||
if errorlevel 1 (
|
||||
echo ERROR: Redis container is not running!
|
||||
echo Start it with: podman start flyer-crawler-redis
|
||||
exit /b 1
|
||||
)
|
||||
|
||||
echo [OK] Containers are running
|
||||
echo.
|
||||
|
||||
REM Set environment variables
|
||||
echo Setting environment variables...
|
||||
set NODE_ENV=test
|
||||
set DB_HOST=localhost
|
||||
set DB_USER=postgres
|
||||
set DB_PASSWORD=postgres
|
||||
set DB_NAME=flyer_crawler_dev
|
||||
set DB_PORT=5432
|
||||
set REDIS_URL=redis://localhost:6379
|
||||
set REDIS_PASSWORD=
|
||||
set FRONTEND_URL=http://localhost:5173
|
||||
set VITE_API_BASE_URL=http://localhost:3001/api
|
||||
set JWT_SECRET=test-jwt-secret-for-integration-tests
|
||||
set NODE_OPTIONS=--max-old-space-size=8192
|
||||
|
||||
echo [OK] Environment configured
|
||||
echo.
|
||||
|
||||
echo Test Configuration:
|
||||
echo NODE_ENV: %NODE_ENV%
|
||||
echo Database: %DB_HOST%:%DB_PORT%/%DB_NAME%
|
||||
echo Redis: %REDIS_URL%
|
||||
echo Frontend URL: %FRONTEND_URL%
|
||||
echo.
|
||||
|
||||
REM Verify database
|
||||
echo Verifying database connection...
|
||||
podman exec flyer-crawler-postgres psql -U postgres -d flyer_crawler_dev -c "SELECT 1;" >nul 2>&1
|
||||
if errorlevel 1 (
|
||||
echo ERROR: Cannot connect to database!
|
||||
exit /b 1
|
||||
)
|
||||
echo [OK] Database connection successful
|
||||
echo.
|
||||
|
||||
REM Check URL constraints
|
||||
echo Verifying URL constraints...
|
||||
podman exec flyer-crawler-postgres psql -U postgres -d flyer_crawler_dev -t -A -c "SELECT COUNT(*) FROM pg_constraint WHERE conname LIKE '%%url_check';"
|
||||
echo.
|
||||
|
||||
REM Run tests
|
||||
echo === Running Integration Tests ===
|
||||
echo.
|
||||
|
||||
npm run test:integration
|
||||
|
||||
if errorlevel 1 (
|
||||
echo.
|
||||
echo === Integration Tests FAILED ===
|
||||
exit /b 1
|
||||
) else (
|
||||
echo.
|
||||
echo === Integration Tests PASSED ===
|
||||
exit /b 0
|
||||
)
|
||||
31
scripts/check-linux.js
Normal file
31
scripts/check-linux.js
Normal file
@@ -0,0 +1,31 @@
|
||||
#!/usr/bin/env node
|
||||
/**
|
||||
* Platform check script for test execution.
|
||||
* Warns (but doesn't block) when running tests on Windows outside a container.
|
||||
*
|
||||
* See ADR-014 for details on Linux-only requirement.
|
||||
*/
|
||||
|
||||
const isWindows = process.platform === 'win32';
|
||||
const inContainer =
|
||||
process.env.REMOTE_CONTAINERS === 'true' ||
|
||||
process.env.DEVCONTAINER === 'true' ||
|
||||
process.env.container === 'podman' ||
|
||||
process.env.container === 'docker';
|
||||
|
||||
if (isWindows && !inContainer) {
|
||||
console.warn('\n' + '='.repeat(70));
|
||||
console.warn('⚠️ WARNING: Running tests on Windows outside a container');
|
||||
console.warn('='.repeat(70));
|
||||
console.warn('');
|
||||
console.warn('This application is designed for Linux only. Test results on Windows');
|
||||
console.warn('may be unreliable due to path separator differences and other issues.');
|
||||
console.warn('');
|
||||
console.warn('For accurate test results, please use:');
|
||||
console.warn(' - VS Code Dev Container ("Reopen in Container")');
|
||||
console.warn(' - WSL (Windows Subsystem for Linux)');
|
||||
console.warn(' - A Linux VM or bare-metal Linux');
|
||||
console.warn('');
|
||||
console.warn('See docs/adr/0014-containerization-and-deployment-strategy.md');
|
||||
console.warn('='.repeat(70) + '\n');
|
||||
}
|
||||
164
scripts/test-bugsink.ts
Normal file
164
scripts/test-bugsink.ts
Normal file
@@ -0,0 +1,164 @@
|
||||
#!/usr/bin/env npx tsx
|
||||
/**
|
||||
* Test script to verify Bugsink error tracking is working.
|
||||
*
|
||||
* This script sends test events directly to Bugsink using the Sentry store API.
|
||||
* We use curl/fetch instead of the Sentry SDK because SDK v8+ has strict DSN
|
||||
* validation that rejects HTTP URLs (Bugsink uses HTTP locally).
|
||||
*
|
||||
* Usage:
|
||||
* npx tsx scripts/test-bugsink.ts
|
||||
*
|
||||
* Or with environment override:
|
||||
* SENTRY_DSN=http://...@localhost:8000/1 npx tsx scripts/test-bugsink.ts
|
||||
*/
|
||||
|
||||
// Configuration - parse DSN to extract components
|
||||
const DSN =
|
||||
process.env.SENTRY_DSN || 'http://59a58583-e869-7697-f94a-cfa0337676a8@localhost:8000/1';
|
||||
const ENVIRONMENT = process.env.SENTRY_ENVIRONMENT || 'test';
|
||||
|
||||
// Parse DSN: http://<key>@<host>/<project_id>
|
||||
function parseDsn(dsn: string) {
|
||||
const match = dsn.match(/^(https?):\/\/([^@]+)@([^/]+)\/(.+)$/);
|
||||
if (!match) {
|
||||
throw new Error(`Invalid DSN format: ${dsn}`);
|
||||
}
|
||||
return {
|
||||
protocol: match[1],
|
||||
publicKey: match[2],
|
||||
host: match[3],
|
||||
projectId: match[4],
|
||||
};
|
||||
}
|
||||
|
||||
const dsnParts = parseDsn(DSN);
|
||||
const STORE_URL = `${dsnParts.protocol}://${dsnParts.host}/api/${dsnParts.projectId}/store/`;
|
||||
|
||||
console.log('='.repeat(60));
|
||||
console.log('Bugsink/Sentry Test Script');
|
||||
console.log('='.repeat(60));
|
||||
console.log(`DSN: ${DSN}`);
|
||||
console.log(`Store URL: ${STORE_URL}`);
|
||||
console.log(`Public Key: ${dsnParts.publicKey}`);
|
||||
console.log(`Environment: ${ENVIRONMENT}`);
|
||||
console.log('');
|
||||
|
||||
// Generate a UUID for event_id
|
||||
function generateEventId(): string {
|
||||
return 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx'.replace(/x/g, () =>
|
||||
Math.floor(Math.random() * 16).toString(16),
|
||||
);
|
||||
}
|
||||
|
||||
// Send an event to Bugsink via the Sentry store API
|
||||
async function sendEvent(
|
||||
event: Record<string, unknown>,
|
||||
): Promise<{ success: boolean; status: number }> {
|
||||
const response = await fetch(STORE_URL, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'X-Sentry-Auth': `Sentry sentry_version=7, sentry_client=test-bugsink/1.0, sentry_key=${dsnParts.publicKey}`,
|
||||
},
|
||||
body: JSON.stringify(event),
|
||||
});
|
||||
|
||||
return {
|
||||
success: response.ok,
|
||||
status: response.status,
|
||||
};
|
||||
}
|
||||
|
||||
async function main() {
|
||||
console.log('[Test] Sending test events to Bugsink...\n');
|
||||
|
||||
try {
|
||||
// Test 1: Send an error event
|
||||
const errorEventId = generateEventId();
|
||||
console.log(`[Test 1] Sending error event (ID: ${errorEventId})...`);
|
||||
const errorEvent = {
|
||||
event_id: errorEventId,
|
||||
timestamp: new Date().toISOString(),
|
||||
platform: 'node',
|
||||
level: 'error',
|
||||
logger: 'test-bugsink.ts',
|
||||
environment: ENVIRONMENT,
|
||||
server_name: 'flyer-crawler-dev',
|
||||
message: 'BugsinkTestError: This is a test error from test-bugsink.ts script',
|
||||
exception: {
|
||||
values: [
|
||||
{
|
||||
type: 'BugsinkTestError',
|
||||
value: 'This is a test error from test-bugsink.ts script',
|
||||
stacktrace: {
|
||||
frames: [
|
||||
{
|
||||
filename: 'scripts/test-bugsink.ts',
|
||||
function: 'main',
|
||||
lineno: 42,
|
||||
colno: 10,
|
||||
in_app: true,
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
tags: {
|
||||
test: 'true',
|
||||
source: 'test-bugsink.ts',
|
||||
},
|
||||
};
|
||||
|
||||
const errorResult = await sendEvent(errorEvent);
|
||||
console.log(
|
||||
` Result: ${errorResult.success ? 'SUCCESS' : 'FAILED'} (HTTP ${errorResult.status})`,
|
||||
);
|
||||
|
||||
// Test 2: Send an info message
|
||||
const messageEventId = generateEventId();
|
||||
console.log(`[Test 2] Sending info message (ID: ${messageEventId})...`);
|
||||
const messageEvent = {
|
||||
event_id: messageEventId,
|
||||
timestamp: new Date().toISOString(),
|
||||
platform: 'node',
|
||||
level: 'info',
|
||||
logger: 'test-bugsink.ts',
|
||||
environment: ENVIRONMENT,
|
||||
server_name: 'flyer-crawler-dev',
|
||||
message: 'Test info message from test-bugsink.ts - Bugsink is working!',
|
||||
tags: {
|
||||
test: 'true',
|
||||
source: 'test-bugsink.ts',
|
||||
},
|
||||
};
|
||||
|
||||
const messageResult = await sendEvent(messageEvent);
|
||||
console.log(
|
||||
` Result: ${messageResult.success ? 'SUCCESS' : 'FAILED'} (HTTP ${messageResult.status})`,
|
||||
);
|
||||
|
||||
// Summary
|
||||
console.log('');
|
||||
console.log('='.repeat(60));
|
||||
if (errorResult.success && messageResult.success) {
|
||||
console.log('SUCCESS! Both test events were accepted by Bugsink.');
|
||||
console.log('');
|
||||
console.log('Check Bugsink UI at http://localhost:8000');
|
||||
console.log('Look for:');
|
||||
console.log(' - BugsinkTestError: "This is a test error..."');
|
||||
console.log(' - Info message: "Test info message from test-bugsink.ts"');
|
||||
} else {
|
||||
console.log('WARNING: Some events may not have been accepted.');
|
||||
console.log('Check that Bugsink is running and the DSN is correct.');
|
||||
process.exit(1);
|
||||
}
|
||||
console.log('='.repeat(60));
|
||||
} catch (error) {
|
||||
console.error('[Test] Failed to send events:', error);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
main();
|
||||
70
server.ts
70
server.ts
@@ -1,4 +1,12 @@
|
||||
// server.ts
|
||||
/**
|
||||
* IMPORTANT: Sentry initialization MUST happen before any other imports
|
||||
* to ensure all errors are captured, including those in imported modules.
|
||||
* See ADR-015: Application Performance Monitoring and Error Tracking.
|
||||
*/
|
||||
import { initSentry, getSentryMiddleware } from './src/services/sentry.server';
|
||||
initSentry();
|
||||
|
||||
import express, { Request, Response, NextFunction } from 'express';
|
||||
import { randomUUID } from 'crypto';
|
||||
import helmet from 'helmet';
|
||||
@@ -7,7 +15,7 @@ import cookieParser from 'cookie-parser';
|
||||
import listEndpoints from 'express-list-endpoints';
|
||||
import { getPool } from './src/services/db/connection.db';
|
||||
|
||||
import passport from './src/routes/passport.routes';
|
||||
import passport from './src/config/passport';
|
||||
import { logger } from './src/services/logger.server';
|
||||
|
||||
// Import routers
|
||||
@@ -24,15 +32,23 @@ import statsRouter from './src/routes/stats.routes';
|
||||
import gamificationRouter from './src/routes/gamification.routes';
|
||||
import systemRouter from './src/routes/system.routes';
|
||||
import healthRouter from './src/routes/health.routes';
|
||||
import upcRouter from './src/routes/upc.routes';
|
||||
import inventoryRouter from './src/routes/inventory.routes';
|
||||
import receiptRouter from './src/routes/receipt.routes';
|
||||
import { errorHandler } from './src/middleware/errorHandler';
|
||||
import { backgroundJobService, startBackgroundJobs } from './src/services/backgroundJobService';
|
||||
import type { UserProfile } from './src/types';
|
||||
|
||||
// API Documentation (ADR-018)
|
||||
import swaggerUi from 'swagger-ui-express';
|
||||
import { swaggerSpec } from './src/config/swagger';
|
||||
import {
|
||||
analyticsQueue,
|
||||
weeklyAnalyticsQueue,
|
||||
gracefulShutdown,
|
||||
tokenCleanupQueue,
|
||||
} from './src/services/queueService.server';
|
||||
import { monitoringService } from './src/services/monitoringService.server';
|
||||
|
||||
// --- START DEBUG LOGGING ---
|
||||
// Log the database connection details as seen by the SERVER PROCESS.
|
||||
@@ -104,10 +120,15 @@ app.use(express.urlencoded({ limit: '100mb', extended: true }));
|
||||
app.use(cookieParser()); // Middleware to parse cookies
|
||||
app.use(passport.initialize()); // Initialize Passport
|
||||
|
||||
// --- Sentry Request Handler (ADR-015) ---
|
||||
// Must be the first middleware after body parsers to capture request data for errors.
|
||||
const sentryMiddleware = getSentryMiddleware();
|
||||
app.use(sentryMiddleware.requestHandler);
|
||||
|
||||
// --- MOCK AUTH FOR TESTING ---
|
||||
// This MUST come after passport.initialize() and BEFORE any of the API routes.
|
||||
import { mockAuth } from './src/routes/passport.routes';
|
||||
app.use(mockAuth);
|
||||
import { mockAuth } from './src/config/passport';
|
||||
app.use(mockAuth);
|
||||
|
||||
// Add a request timeout middleware. This will help prevent requests from hanging indefinitely.
|
||||
// We set a generous 5-minute timeout to accommodate slow AI processing for large flyers.
|
||||
@@ -188,8 +209,41 @@ if (!process.env.JWT_SECRET) {
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// --- API Documentation (ADR-018) ---
|
||||
// Only serve Swagger UI in non-production environments to prevent information disclosure.
|
||||
if (process.env.NODE_ENV !== 'production') {
|
||||
app.use(
|
||||
'/docs/api-docs',
|
||||
swaggerUi.serve,
|
||||
swaggerUi.setup(swaggerSpec, {
|
||||
customCss: '.swagger-ui .topbar { display: none }',
|
||||
customSiteTitle: 'Flyer Crawler API Documentation',
|
||||
}),
|
||||
);
|
||||
|
||||
// Expose raw OpenAPI JSON spec for tooling (SDK generation, testing, etc.)
|
||||
app.get('/docs/api-docs.json', (_req, res) => {
|
||||
res.setHeader('Content-Type', 'application/json');
|
||||
res.send(swaggerSpec);
|
||||
});
|
||||
|
||||
logger.info('API Documentation available at /docs/api-docs');
|
||||
}
|
||||
|
||||
// --- API Routes ---
|
||||
|
||||
// ADR-053: Worker Health Checks
|
||||
// Expose queue metrics for monitoring.
|
||||
app.get('/api/health/queues', async (req, res) => {
|
||||
try {
|
||||
const statuses = await monitoringService.getQueueStatuses();
|
||||
res.json(statuses);
|
||||
} catch (error) {
|
||||
logger.error({ err: error }, 'Failed to fetch queue statuses');
|
||||
res.status(503).json({ error: 'Failed to fetch queue statuses' });
|
||||
}
|
||||
});
|
||||
|
||||
// The order of route registration is critical.
|
||||
// More specific routes should be registered before more general ones.
|
||||
// 1. Authentication routes for login, registration, etc.
|
||||
@@ -218,9 +272,19 @@ app.use('/api/personalization', personalizationRouter);
|
||||
app.use('/api/price-history', priceRouter);
|
||||
// 10. Public statistics routes.
|
||||
app.use('/api/stats', statsRouter);
|
||||
// 11. UPC barcode scanning routes.
|
||||
app.use('/api/upc', upcRouter);
|
||||
// 12. Inventory and expiry tracking routes.
|
||||
app.use('/api/inventory', inventoryRouter);
|
||||
// 13. Receipt scanning routes.
|
||||
app.use('/api/receipts', receiptRouter);
|
||||
|
||||
// --- Error Handling and Server Startup ---
|
||||
|
||||
// Sentry Error Handler (ADR-015) - captures errors and sends to Bugsink.
|
||||
// Must come BEFORE the custom error handler but AFTER all routes.
|
||||
app.use(sentryMiddleware.errorHandler);
|
||||
|
||||
// Global error handling middleware. This must be the last `app.use()` call.
|
||||
app.use(errorHandler);
|
||||
|
||||
|
||||
40
sql/01-init-bugsink.sh
Normal file
40
sql/01-init-bugsink.sh
Normal file
@@ -0,0 +1,40 @@
|
||||
#!/bin/bash
|
||||
# sql/01-init-bugsink.sh
|
||||
# ============================================================================
|
||||
# BUGSINK DATABASE INITIALIZATION (ADR-015)
|
||||
# ============================================================================
|
||||
# This script creates the Bugsink database and user for error tracking.
|
||||
# It runs after 00-init-extensions.sql due to alphabetical ordering.
|
||||
#
|
||||
# Note: Shell scripts in docker-entrypoint-initdb.d/ can execute multiple
|
||||
# SQL commands including CREATE DATABASE (which requires a separate transaction).
|
||||
# ============================================================================
|
||||
|
||||
set -e
|
||||
|
||||
# Use the postgres superuser to create the bugsink user and database
|
||||
psql -v ON_ERROR_STOP=1 --username "$POSTGRES_USER" --dbname "$POSTGRES_DB" <<-EOSQL
|
||||
-- Create Bugsink user (if not exists)
|
||||
DO \$\$
|
||||
BEGIN
|
||||
IF NOT EXISTS (SELECT FROM pg_catalog.pg_roles WHERE rolname = 'bugsink') THEN
|
||||
CREATE USER bugsink WITH PASSWORD 'bugsink_dev_password';
|
||||
RAISE NOTICE 'Created bugsink user';
|
||||
ELSE
|
||||
RAISE NOTICE 'Bugsink user already exists';
|
||||
END IF;
|
||||
END \$\$;
|
||||
EOSQL
|
||||
|
||||
# Check if bugsink database exists, create if not
|
||||
if psql -v ON_ERROR_STOP=1 --username "$POSTGRES_USER" -lqt | cut -d \| -f 1 | grep -qw bugsink; then
|
||||
echo "Bugsink database already exists"
|
||||
else
|
||||
psql -v ON_ERROR_STOP=1 --username "$POSTGRES_USER" --dbname "$POSTGRES_DB" <<-EOSQL
|
||||
CREATE DATABASE bugsink OWNER bugsink;
|
||||
GRANT ALL PRIVILEGES ON DATABASE bugsink TO bugsink;
|
||||
EOSQL
|
||||
echo "Created bugsink database"
|
||||
fi
|
||||
|
||||
echo "✅ Bugsink database and user have been configured (ADR-015)"
|
||||
@@ -1,6 +1,55 @@
|
||||
-- sql/Initial_triggers_and_functions.sql
|
||||
-- This file contains all trigger functions and trigger definitions for the database.
|
||||
|
||||
-- ============================================================================
|
||||
-- PART 0: OBSERVABILITY HELPERS (ADR-050)
|
||||
-- ============================================================================
|
||||
-- These functions provide structured logging capabilities for database functions.
|
||||
-- Logs are emitted via RAISE statements and can be captured by Logstash for
|
||||
-- forwarding to error tracking systems (see ADR-015).
|
||||
|
||||
-- Function to emit structured log messages from PL/pgSQL functions.
|
||||
-- This enables observability for database operations that might otherwise fail silently.
|
||||
DROP FUNCTION IF EXISTS public.fn_log(TEXT, TEXT, TEXT, JSONB);
|
||||
|
||||
CREATE OR REPLACE FUNCTION public.fn_log(
|
||||
p_level TEXT, -- 'DEBUG', 'INFO', 'NOTICE', 'WARNING', 'ERROR'
|
||||
p_function_name TEXT, -- The calling function name
|
||||
p_message TEXT, -- Human-readable message
|
||||
p_context JSONB DEFAULT NULL -- Additional context (user_id, params, etc.)
|
||||
)
|
||||
RETURNS void
|
||||
LANGUAGE plpgsql
|
||||
AS $$
|
||||
DECLARE
|
||||
log_line TEXT;
|
||||
BEGIN
|
||||
-- Build structured JSON log line for Logstash parsing
|
||||
log_line := jsonb_build_object(
|
||||
'timestamp', now(),
|
||||
'level', p_level,
|
||||
'source', 'postgresql',
|
||||
'function', p_function_name,
|
||||
'message', p_message,
|
||||
'context', COALESCE(p_context, '{}'::jsonb)
|
||||
)::text;
|
||||
|
||||
-- Use appropriate RAISE level based on severity
|
||||
-- Note: We use RAISE LOG for errors to ensure they're always captured
|
||||
-- regardless of client_min_messages setting
|
||||
CASE UPPER(p_level)
|
||||
WHEN 'DEBUG' THEN RAISE DEBUG '%', log_line;
|
||||
WHEN 'INFO' THEN RAISE INFO '%', log_line;
|
||||
WHEN 'NOTICE' THEN RAISE NOTICE '%', log_line;
|
||||
WHEN 'WARNING' THEN RAISE WARNING '%', log_line;
|
||||
WHEN 'ERROR' THEN RAISE LOG '%', log_line;
|
||||
ELSE RAISE NOTICE '%', log_line;
|
||||
END CASE;
|
||||
END;
|
||||
$$;
|
||||
|
||||
COMMENT ON FUNCTION public.fn_log IS 'Emits structured JSON log messages for database function observability (ADR-050)';
|
||||
|
||||
-- ============================================================================
|
||||
-- PART 3: DATABASE FUNCTIONS
|
||||
-- ============================================================================
|
||||
@@ -223,13 +272,32 @@ AS $$
|
||||
DECLARE
|
||||
list_owner_id UUID;
|
||||
item_to_add RECORD;
|
||||
v_items_added INTEGER := 0;
|
||||
v_context JSONB;
|
||||
BEGIN
|
||||
-- Build context for logging
|
||||
v_context := jsonb_build_object(
|
||||
'user_id', p_user_id,
|
||||
'menu_plan_id', p_menu_plan_id,
|
||||
'shopping_list_id', p_shopping_list_id
|
||||
);
|
||||
|
||||
-- Security Check: Ensure the user calling this function owns the target shopping list.
|
||||
SELECT user_id INTO list_owner_id
|
||||
FROM public.shopping_lists
|
||||
WHERE shopping_list_id = p_shopping_list_id;
|
||||
|
||||
IF list_owner_id IS NULL OR list_owner_id <> p_user_id THEN
|
||||
IF list_owner_id IS NULL THEN
|
||||
PERFORM fn_log('WARNING', 'add_menu_plan_to_shopping_list',
|
||||
'Shopping list not found',
|
||||
v_context);
|
||||
RAISE EXCEPTION 'Permission denied: You do not own shopping list %', p_shopping_list_id;
|
||||
END IF;
|
||||
|
||||
IF list_owner_id <> p_user_id THEN
|
||||
PERFORM fn_log('WARNING', 'add_menu_plan_to_shopping_list',
|
||||
'Permission denied: user does not own list',
|
||||
v_context || jsonb_build_object('list_owner_id', list_owner_id));
|
||||
RAISE EXCEPTION 'Permission denied: You do not own shopping list %', p_shopping_list_id;
|
||||
END IF;
|
||||
|
||||
@@ -244,9 +312,16 @@ BEGIN
|
||||
DO UPDATE SET
|
||||
quantity = shopping_list_items.quantity + EXCLUDED.quantity;
|
||||
|
||||
v_items_added := v_items_added + 1;
|
||||
|
||||
-- Return the details of the item that was added/updated.
|
||||
RETURN QUERY SELECT item_to_add.master_item_id, item_to_add.item_name, item_to_add.shopping_list_quantity;
|
||||
END LOOP;
|
||||
|
||||
-- Log completion (items_added = 0 is normal if pantry has everything)
|
||||
PERFORM fn_log('INFO', 'add_menu_plan_to_shopping_list',
|
||||
'Menu plan items added to shopping list',
|
||||
v_context || jsonb_build_object('items_added', v_items_added));
|
||||
END;
|
||||
$$;
|
||||
|
||||
@@ -520,16 +595,30 @@ SECURITY DEFINER
|
||||
AS $$
|
||||
DECLARE
|
||||
correction_record RECORD;
|
||||
v_context JSONB;
|
||||
BEGIN
|
||||
-- Build context for logging
|
||||
v_context := jsonb_build_object('correction_id', p_correction_id);
|
||||
|
||||
-- 1. Fetch the correction details, ensuring it's still pending.
|
||||
SELECT * INTO correction_record
|
||||
FROM public.suggested_corrections
|
||||
WHERE suggested_correction_id = p_correction_id AND status = 'pending';
|
||||
|
||||
IF NOT FOUND THEN
|
||||
PERFORM fn_log('WARNING', 'approve_correction',
|
||||
'Correction not found or already processed',
|
||||
v_context);
|
||||
RAISE EXCEPTION 'Correction with ID % not found or already processed.', p_correction_id;
|
||||
END IF;
|
||||
|
||||
-- Add correction details to context
|
||||
v_context := v_context || jsonb_build_object(
|
||||
'correction_type', correction_record.correction_type,
|
||||
'flyer_item_id', correction_record.flyer_item_id,
|
||||
'suggested_value', correction_record.suggested_value
|
||||
);
|
||||
|
||||
-- 2. Apply the correction based on its type.
|
||||
IF correction_record.correction_type = 'INCORRECT_ITEM_LINK' THEN
|
||||
UPDATE public.flyer_items
|
||||
@@ -545,6 +634,11 @@ BEGIN
|
||||
UPDATE public.suggested_corrections
|
||||
SET status = 'approved', reviewed_at = now()
|
||||
WHERE suggested_correction_id = p_correction_id;
|
||||
|
||||
-- Log successful correction approval
|
||||
PERFORM fn_log('INFO', 'approve_correction',
|
||||
'Correction approved and applied',
|
||||
v_context);
|
||||
END;
|
||||
$$;
|
||||
|
||||
@@ -566,7 +660,14 @@ SECURITY INVOKER
|
||||
AS $$
|
||||
DECLARE
|
||||
new_recipe_id BIGINT;
|
||||
v_context JSONB;
|
||||
BEGIN
|
||||
-- Build context for logging
|
||||
v_context := jsonb_build_object(
|
||||
'user_id', p_user_id,
|
||||
'original_recipe_id', p_original_recipe_id
|
||||
);
|
||||
|
||||
-- 1. Create a copy of the recipe, linking it to the new user and the original recipe.
|
||||
INSERT INTO public.recipes (
|
||||
user_id,
|
||||
@@ -605,6 +706,9 @@ BEGIN
|
||||
|
||||
-- If the original recipe didn't exist, new_recipe_id will be null.
|
||||
IF new_recipe_id IS NULL THEN
|
||||
PERFORM fn_log('WARNING', 'fork_recipe',
|
||||
'Original recipe not found',
|
||||
v_context);
|
||||
RETURN;
|
||||
END IF;
|
||||
|
||||
@@ -613,6 +717,11 @@ BEGIN
|
||||
INSERT INTO public.recipe_tags (recipe_id, tag_id) SELECT new_recipe_id, tag_id FROM public.recipe_tags WHERE recipe_id = p_original_recipe_id;
|
||||
INSERT INTO public.recipe_appliances (recipe_id, appliance_id) SELECT new_recipe_id, appliance_id FROM public.recipe_appliances WHERE recipe_id = p_original_recipe_id;
|
||||
|
||||
-- Log successful fork
|
||||
PERFORM fn_log('INFO', 'fork_recipe',
|
||||
'Recipe forked successfully',
|
||||
v_context || jsonb_build_object('new_recipe_id', new_recipe_id));
|
||||
|
||||
-- 3. Return the newly created recipe record.
|
||||
RETURN QUERY SELECT * FROM public.recipes WHERE recipe_id = new_recipe_id;
|
||||
END;
|
||||
@@ -889,13 +998,32 @@ AS $$
|
||||
DECLARE
|
||||
list_owner_id UUID;
|
||||
new_trip_id BIGINT;
|
||||
v_items_count INTEGER;
|
||||
v_context JSONB;
|
||||
BEGIN
|
||||
-- Build context for logging
|
||||
v_context := jsonb_build_object(
|
||||
'user_id', p_user_id,
|
||||
'shopping_list_id', p_shopping_list_id,
|
||||
'total_spent_cents', p_total_spent_cents
|
||||
);
|
||||
|
||||
-- Security Check: Ensure the user calling this function owns the target shopping list.
|
||||
SELECT user_id INTO list_owner_id
|
||||
FROM public.shopping_lists
|
||||
WHERE shopping_list_id = p_shopping_list_id;
|
||||
|
||||
IF list_owner_id IS NULL OR list_owner_id <> p_user_id THEN
|
||||
IF list_owner_id IS NULL THEN
|
||||
PERFORM fn_log('WARNING', 'complete_shopping_list',
|
||||
'Shopping list not found',
|
||||
v_context);
|
||||
RAISE EXCEPTION 'Permission denied: You do not own shopping list %', p_shopping_list_id;
|
||||
END IF;
|
||||
|
||||
IF list_owner_id <> p_user_id THEN
|
||||
PERFORM fn_log('WARNING', 'complete_shopping_list',
|
||||
'Permission denied: user does not own list',
|
||||
v_context || jsonb_build_object('list_owner_id', list_owner_id));
|
||||
RAISE EXCEPTION 'Permission denied: You do not own shopping list %', p_shopping_list_id;
|
||||
END IF;
|
||||
|
||||
@@ -910,10 +1038,17 @@ BEGIN
|
||||
FROM public.shopping_list_items
|
||||
WHERE shopping_list_id = p_shopping_list_id AND is_purchased = true;
|
||||
|
||||
GET DIAGNOSTICS v_items_count = ROW_COUNT;
|
||||
|
||||
-- 3. Delete the purchased items from the original shopping list.
|
||||
DELETE FROM public.shopping_list_items
|
||||
WHERE shopping_list_id = p_shopping_list_id AND is_purchased = true;
|
||||
|
||||
-- Log successful completion
|
||||
PERFORM fn_log('INFO', 'complete_shopping_list',
|
||||
'Shopping list completed successfully',
|
||||
v_context || jsonb_build_object('trip_id', new_trip_id, 'items_archived', v_items_count));
|
||||
|
||||
RETURN new_trip_id;
|
||||
END;
|
||||
$$;
|
||||
@@ -1047,13 +1182,19 @@ AS $$
|
||||
DECLARE
|
||||
v_achievement_id BIGINT;
|
||||
v_points_value INTEGER;
|
||||
v_context JSONB;
|
||||
BEGIN
|
||||
-- Build context for logging
|
||||
v_context := jsonb_build_object('user_id', p_user_id, 'achievement_name', p_achievement_name);
|
||||
|
||||
-- Find the achievement by name to get its ID and point value.
|
||||
SELECT achievement_id, points_value INTO v_achievement_id, v_points_value
|
||||
FROM public.achievements WHERE name = p_achievement_name;
|
||||
|
||||
-- If the achievement doesn't exist, do nothing.
|
||||
-- If the achievement doesn't exist, log warning and return.
|
||||
IF v_achievement_id IS NULL THEN
|
||||
PERFORM fn_log('WARNING', 'award_achievement',
|
||||
'Achievement not found: ' || p_achievement_name, v_context);
|
||||
RETURN;
|
||||
END IF;
|
||||
|
||||
@@ -1065,9 +1206,12 @@ BEGIN
|
||||
ON CONFLICT (user_id, achievement_id) DO NOTHING;
|
||||
|
||||
-- If the insert was successful (i.e., the user didn't have the achievement),
|
||||
-- update their total points. The `GET DIAGNOSTICS` command checks the row count of the last query.
|
||||
-- update their total points and log success.
|
||||
IF FOUND THEN
|
||||
UPDATE public.profiles SET points = points + v_points_value WHERE user_id = p_user_id;
|
||||
PERFORM fn_log('INFO', 'award_achievement',
|
||||
'Achievement awarded: ' || p_achievement_name,
|
||||
v_context || jsonb_build_object('points_awarded', v_points_value));
|
||||
END IF;
|
||||
END;
|
||||
$$;
|
||||
@@ -1165,13 +1309,25 @@ RETURNS TRIGGER AS $$
|
||||
DECLARE
|
||||
new_profile_id UUID;
|
||||
user_meta_data JSONB;
|
||||
v_context JSONB;
|
||||
BEGIN
|
||||
-- Build context for logging
|
||||
v_context := jsonb_build_object('user_id', new.user_id, 'email', new.email);
|
||||
|
||||
-- The user's metadata (full_name, avatar_url) is passed via a temporary session variable.
|
||||
user_meta_data := current_setting('my_app.user_metadata', true)::JSONB;
|
||||
|
||||
INSERT INTO public.profiles (user_id, role, full_name, avatar_url)
|
||||
VALUES (new.user_id, 'user', user_meta_data->>'full_name', user_meta_data->>'avatar_url')
|
||||
RETURNING user_id INTO new_profile_id;
|
||||
-- Create the user profile
|
||||
BEGIN
|
||||
INSERT INTO public.profiles (user_id, role, full_name, avatar_url)
|
||||
VALUES (new.user_id, 'user', user_meta_data->>'full_name', user_meta_data->>'avatar_url')
|
||||
RETURNING user_id INTO new_profile_id;
|
||||
EXCEPTION WHEN OTHERS THEN
|
||||
PERFORM fn_log('ERROR', 'handle_new_user',
|
||||
'Failed to create profile: ' || SQLERRM,
|
||||
v_context || jsonb_build_object('sqlstate', SQLSTATE));
|
||||
RAISE;
|
||||
END;
|
||||
|
||||
-- Also create a default shopping list for the new user.
|
||||
INSERT INTO public.shopping_lists (user_id, name)
|
||||
@@ -1179,12 +1335,20 @@ BEGIN
|
||||
|
||||
-- Log the new user event
|
||||
INSERT INTO public.activity_log (user_id, action, display_text, icon, details)
|
||||
VALUES (new.user_id, 'user_registered',
|
||||
VALUES (new.user_id, 'user_registered',
|
||||
COALESCE(user_meta_data->>'full_name', new.email) || ' has registered.',
|
||||
'user-plus',
|
||||
'user-plus',
|
||||
jsonb_build_object('email', new.email)
|
||||
);
|
||||
|
||||
|
||||
-- Award the 'Welcome Aboard' achievement for new user registration
|
||||
PERFORM public.award_achievement(new.user_id, 'Welcome Aboard');
|
||||
|
||||
-- Log successful user creation
|
||||
PERFORM fn_log('INFO', 'handle_new_user',
|
||||
'New user created successfully',
|
||||
v_context || jsonb_build_object('full_name', user_meta_data->>'full_name'));
|
||||
|
||||
RETURN new;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
@@ -260,6 +260,7 @@ ON CONFLICT (name) DO NOTHING;
|
||||
|
||||
-- 9. Pre-populate the achievements table.
|
||||
INSERT INTO public.achievements (name, description, icon, points_value) VALUES
|
||||
('Welcome Aboard', 'Join the community by creating your account.', 'user-check', 5),
|
||||
('First Recipe', 'Create your very first recipe.', 'chef-hat', 10),
|
||||
('Recipe Sharer', 'Share a recipe with another user for the first time.', 'share-2', 15),
|
||||
('List Sharer', 'Share a shopping list with another user for the first time.', 'list', 20),
|
||||
|
||||
@@ -1012,3 +1012,232 @@ CREATE INDEX IF NOT EXISTS idx_user_achievements_user_id ON public.user_achievem
|
||||
CREATE INDEX IF NOT EXISTS idx_user_achievements_achievement_id ON public.user_achievements(achievement_id);
|
||||
|
||||
|
||||
-- ============================================================================
|
||||
-- UPC SCANNING FEATURE TABLES (59-60)
|
||||
-- ============================================================================
|
||||
|
||||
-- 59. UPC Scan History - tracks all UPC scans performed by users
|
||||
-- This table provides an audit trail and allows users to see their scan history
|
||||
CREATE TABLE IF NOT EXISTS public.upc_scan_history (
|
||||
scan_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
||||
user_id UUID NOT NULL REFERENCES public.users(user_id) ON DELETE CASCADE,
|
||||
upc_code TEXT NOT NULL,
|
||||
product_id BIGINT REFERENCES public.products(product_id) ON DELETE SET NULL,
|
||||
scan_source TEXT NOT NULL,
|
||||
scan_confidence NUMERIC(5,4),
|
||||
raw_image_path TEXT,
|
||||
lookup_successful BOOLEAN DEFAULT FALSE NOT NULL,
|
||||
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||
CONSTRAINT upc_scan_history_upc_code_check CHECK (upc_code ~ '^[0-9]{8,14}$'),
|
||||
CONSTRAINT upc_scan_history_scan_source_check CHECK (scan_source IN ('image_upload', 'manual_entry', 'phone_app', 'camera_scan')),
|
||||
CONSTRAINT upc_scan_history_scan_confidence_check CHECK (scan_confidence IS NULL OR (scan_confidence >= 0 AND scan_confidence <= 1))
|
||||
);
|
||||
COMMENT ON TABLE public.upc_scan_history IS 'Audit trail of all UPC barcode scans performed by users, tracking scan source and results.';
|
||||
COMMENT ON COLUMN public.upc_scan_history.upc_code IS 'The scanned UPC/EAN barcode (8-14 digits).';
|
||||
COMMENT ON COLUMN public.upc_scan_history.product_id IS 'Reference to the matched product, if found in our database.';
|
||||
COMMENT ON COLUMN public.upc_scan_history.scan_source IS 'How the scan was performed: image_upload, manual_entry, phone_app, or camera_scan.';
|
||||
COMMENT ON COLUMN public.upc_scan_history.scan_confidence IS 'Confidence score from barcode detection (0.0-1.0), null for manual entry.';
|
||||
COMMENT ON COLUMN public.upc_scan_history.raw_image_path IS 'Path to the uploaded barcode image, if applicable.';
|
||||
COMMENT ON COLUMN public.upc_scan_history.lookup_successful IS 'Whether the UPC was successfully matched to a product (internal or external).';
|
||||
CREATE INDEX IF NOT EXISTS idx_upc_scan_history_user_id ON public.upc_scan_history(user_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_upc_scan_history_upc_code ON public.upc_scan_history(upc_code);
|
||||
CREATE INDEX IF NOT EXISTS idx_upc_scan_history_created_at ON public.upc_scan_history(created_at DESC);
|
||||
CREATE INDEX IF NOT EXISTS idx_upc_scan_history_product_id ON public.upc_scan_history(product_id) WHERE product_id IS NOT NULL;
|
||||
|
||||
-- 60. UPC External Lookups - cache for external UPC database API responses
|
||||
CREATE TABLE IF NOT EXISTS public.upc_external_lookups (
|
||||
lookup_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
||||
upc_code TEXT NOT NULL UNIQUE,
|
||||
product_name TEXT,
|
||||
brand_name TEXT,
|
||||
category TEXT,
|
||||
description TEXT,
|
||||
image_url TEXT,
|
||||
external_source TEXT NOT NULL,
|
||||
lookup_data JSONB,
|
||||
lookup_successful BOOLEAN DEFAULT FALSE NOT NULL,
|
||||
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||
CONSTRAINT upc_external_lookups_upc_code_check CHECK (upc_code ~ '^[0-9]{8,14}$'),
|
||||
CONSTRAINT upc_external_lookups_external_source_check CHECK (external_source IN ('openfoodfacts', 'upcitemdb', 'manual', 'unknown')),
|
||||
CONSTRAINT upc_external_lookups_name_check CHECK (NOT lookup_successful OR product_name IS NOT NULL)
|
||||
);
|
||||
COMMENT ON TABLE public.upc_external_lookups IS 'Cache for external UPC database API responses to reduce API calls and improve lookup speed.';
|
||||
COMMENT ON COLUMN public.upc_external_lookups.upc_code IS 'The UPC/EAN barcode that was looked up.';
|
||||
COMMENT ON COLUMN public.upc_external_lookups.product_name IS 'Product name returned from external API.';
|
||||
COMMENT ON COLUMN public.upc_external_lookups.brand_name IS 'Brand name returned from external API.';
|
||||
COMMENT ON COLUMN public.upc_external_lookups.category IS 'Product category returned from external API.';
|
||||
COMMENT ON COLUMN public.upc_external_lookups.description IS 'Product description returned from external API.';
|
||||
COMMENT ON COLUMN public.upc_external_lookups.image_url IS 'Product image URL returned from external API.';
|
||||
COMMENT ON COLUMN public.upc_external_lookups.external_source IS 'Which external API provided this data: openfoodfacts, upcitemdb, manual, unknown.';
|
||||
COMMENT ON COLUMN public.upc_external_lookups.lookup_data IS 'Full raw JSON response from the external API for reference.';
|
||||
COMMENT ON COLUMN public.upc_external_lookups.lookup_successful IS 'Whether the external lookup found product information.';
|
||||
CREATE INDEX IF NOT EXISTS idx_upc_external_lookups_upc_code ON public.upc_external_lookups(upc_code);
|
||||
CREATE INDEX IF NOT EXISTS idx_upc_external_lookups_external_source ON public.upc_external_lookups(external_source);
|
||||
|
||||
-- Add index to existing products.upc_code for faster lookups
|
||||
CREATE INDEX IF NOT EXISTS idx_products_upc_code ON public.products(upc_code) WHERE upc_code IS NOT NULL;
|
||||
|
||||
|
||||
-- ============================================================================
|
||||
-- EXPIRY DATE TRACKING FEATURE TABLES (61-63)
|
||||
-- ============================================================================
|
||||
|
||||
-- 61. Expiry Date Ranges - reference table for typical shelf life
|
||||
CREATE TABLE IF NOT EXISTS public.expiry_date_ranges (
|
||||
expiry_range_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
||||
master_item_id BIGINT REFERENCES public.master_grocery_items(master_grocery_item_id) ON DELETE CASCADE,
|
||||
category_id BIGINT REFERENCES public.categories(category_id) ON DELETE CASCADE,
|
||||
item_pattern TEXT,
|
||||
storage_location TEXT NOT NULL,
|
||||
min_days INTEGER NOT NULL,
|
||||
max_days INTEGER NOT NULL,
|
||||
typical_days INTEGER NOT NULL,
|
||||
notes TEXT,
|
||||
source TEXT,
|
||||
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||
CONSTRAINT expiry_date_ranges_storage_location_check CHECK (storage_location IN ('fridge', 'freezer', 'pantry', 'room_temp')),
|
||||
CONSTRAINT expiry_date_ranges_min_days_check CHECK (min_days >= 0),
|
||||
CONSTRAINT expiry_date_ranges_max_days_check CHECK (max_days >= min_days),
|
||||
CONSTRAINT expiry_date_ranges_typical_days_check CHECK (typical_days >= min_days AND typical_days <= max_days),
|
||||
CONSTRAINT expiry_date_ranges_identifier_check CHECK (
|
||||
master_item_id IS NOT NULL OR category_id IS NOT NULL OR item_pattern IS NOT NULL
|
||||
),
|
||||
CONSTRAINT expiry_date_ranges_source_check CHECK (source IS NULL OR source IN ('usda', 'fda', 'manual', 'community'))
|
||||
);
|
||||
COMMENT ON TABLE public.expiry_date_ranges IS 'Reference table storing typical shelf life for grocery items based on storage location.';
|
||||
COMMENT ON COLUMN public.expiry_date_ranges.master_item_id IS 'Specific item this range applies to (most specific).';
|
||||
COMMENT ON COLUMN public.expiry_date_ranges.category_id IS 'Category this range applies to (fallback if no item match).';
|
||||
COMMENT ON COLUMN public.expiry_date_ranges.item_pattern IS 'Regex pattern to match item names (fallback if no item/category match).';
|
||||
COMMENT ON COLUMN public.expiry_date_ranges.storage_location IS 'Where the item is stored: fridge, freezer, pantry, or room_temp.';
|
||||
COMMENT ON COLUMN public.expiry_date_ranges.min_days IS 'Minimum shelf life in days under proper storage.';
|
||||
COMMENT ON COLUMN public.expiry_date_ranges.max_days IS 'Maximum shelf life in days under proper storage.';
|
||||
COMMENT ON COLUMN public.expiry_date_ranges.typical_days IS 'Most common/recommended shelf life in days.';
|
||||
COMMENT ON COLUMN public.expiry_date_ranges.notes IS 'Additional storage tips or warnings.';
|
||||
COMMENT ON COLUMN public.expiry_date_ranges.source IS 'Data source: usda, fda, manual, or community.';
|
||||
CREATE INDEX IF NOT EXISTS idx_expiry_date_ranges_master_item_id ON public.expiry_date_ranges(master_item_id) WHERE master_item_id IS NOT NULL;
|
||||
CREATE INDEX IF NOT EXISTS idx_expiry_date_ranges_category_id ON public.expiry_date_ranges(category_id) WHERE category_id IS NOT NULL;
|
||||
CREATE INDEX IF NOT EXISTS idx_expiry_date_ranges_storage_location ON public.expiry_date_ranges(storage_location);
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS idx_expiry_date_ranges_unique_item_location
|
||||
ON public.expiry_date_ranges(master_item_id, storage_location)
|
||||
WHERE master_item_id IS NOT NULL;
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS idx_expiry_date_ranges_unique_category_location
|
||||
ON public.expiry_date_ranges(category_id, storage_location)
|
||||
WHERE category_id IS NOT NULL AND master_item_id IS NULL;
|
||||
|
||||
-- 62. Expiry Alerts - user notification preferences for expiry warnings
|
||||
CREATE TABLE IF NOT EXISTS public.expiry_alerts (
|
||||
expiry_alert_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
||||
user_id UUID NOT NULL REFERENCES public.users(user_id) ON DELETE CASCADE,
|
||||
days_before_expiry INTEGER NOT NULL DEFAULT 3,
|
||||
alert_method TEXT NOT NULL,
|
||||
is_enabled BOOLEAN DEFAULT TRUE NOT NULL,
|
||||
last_alert_sent_at TIMESTAMPTZ,
|
||||
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||
CONSTRAINT expiry_alerts_days_before_check CHECK (days_before_expiry >= 0 AND days_before_expiry <= 30),
|
||||
CONSTRAINT expiry_alerts_method_check CHECK (alert_method IN ('email', 'push', 'in_app')),
|
||||
UNIQUE(user_id, alert_method)
|
||||
);
|
||||
COMMENT ON TABLE public.expiry_alerts IS 'User preferences for expiry date notifications and alerts.';
|
||||
COMMENT ON COLUMN public.expiry_alerts.days_before_expiry IS 'How many days before expiry to send alert (0-30).';
|
||||
COMMENT ON COLUMN public.expiry_alerts.alert_method IS 'How to notify: email, push, or in_app.';
|
||||
COMMENT ON COLUMN public.expiry_alerts.is_enabled IS 'Whether this alert type is currently enabled.';
|
||||
COMMENT ON COLUMN public.expiry_alerts.last_alert_sent_at IS 'Timestamp of the last alert sent to prevent duplicate notifications.';
|
||||
CREATE INDEX IF NOT EXISTS idx_expiry_alerts_user_id ON public.expiry_alerts(user_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_expiry_alerts_enabled ON public.expiry_alerts(user_id, is_enabled) WHERE is_enabled = TRUE;
|
||||
|
||||
-- 63. Expiry Alert Log - tracks sent notifications
|
||||
CREATE TABLE IF NOT EXISTS public.expiry_alert_log (
|
||||
alert_log_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
||||
user_id UUID NOT NULL REFERENCES public.users(user_id) ON DELETE CASCADE,
|
||||
pantry_item_id BIGINT REFERENCES public.pantry_items(pantry_item_id) ON DELETE SET NULL,
|
||||
alert_type TEXT NOT NULL,
|
||||
alert_method TEXT NOT NULL,
|
||||
item_name TEXT NOT NULL,
|
||||
expiry_date DATE,
|
||||
days_until_expiry INTEGER,
|
||||
sent_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||
CONSTRAINT expiry_alert_log_type_check CHECK (alert_type IN ('expiring_soon', 'expired', 'expiry_reminder')),
|
||||
CONSTRAINT expiry_alert_log_method_check CHECK (alert_method IN ('email', 'push', 'in_app')),
|
||||
CONSTRAINT expiry_alert_log_item_name_check CHECK (TRIM(item_name) <> '')
|
||||
);
|
||||
COMMENT ON TABLE public.expiry_alert_log IS 'Log of all expiry notifications sent to users for auditing and duplicate prevention.';
|
||||
COMMENT ON COLUMN public.expiry_alert_log.pantry_item_id IS 'The pantry item that triggered the alert (may be null if item deleted).';
|
||||
COMMENT ON COLUMN public.expiry_alert_log.alert_type IS 'Type of alert: expiring_soon, expired, or expiry_reminder.';
|
||||
COMMENT ON COLUMN public.expiry_alert_log.alert_method IS 'How the alert was sent: email, push, or in_app.';
|
||||
COMMENT ON COLUMN public.expiry_alert_log.item_name IS 'Snapshot of item name at time of alert (in case item is deleted).';
|
||||
COMMENT ON COLUMN public.expiry_alert_log.expiry_date IS 'The expiry date that triggered the alert.';
|
||||
COMMENT ON COLUMN public.expiry_alert_log.days_until_expiry IS 'Days until expiry at time alert was sent (negative = expired).';
|
||||
CREATE INDEX IF NOT EXISTS idx_expiry_alert_log_user_id ON public.expiry_alert_log(user_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_expiry_alert_log_pantry_item_id ON public.expiry_alert_log(pantry_item_id) WHERE pantry_item_id IS NOT NULL;
|
||||
CREATE INDEX IF NOT EXISTS idx_expiry_alert_log_sent_at ON public.expiry_alert_log(sent_at DESC);
|
||||
|
||||
|
||||
-- ============================================================================
|
||||
-- RECEIPT SCANNING ENHANCEMENT TABLES (64-65)
|
||||
-- ============================================================================
|
||||
|
||||
-- 64. Receipt Processing Log - track OCR/AI processing attempts
|
||||
CREATE TABLE IF NOT EXISTS public.receipt_processing_log (
|
||||
log_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
||||
receipt_id BIGINT NOT NULL REFERENCES public.receipts(receipt_id) ON DELETE CASCADE,
|
||||
processing_step TEXT NOT NULL,
|
||||
status TEXT NOT NULL,
|
||||
provider TEXT,
|
||||
duration_ms INTEGER,
|
||||
tokens_used INTEGER,
|
||||
cost_cents INTEGER,
|
||||
input_data JSONB,
|
||||
output_data JSONB,
|
||||
error_message TEXT,
|
||||
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||
CONSTRAINT receipt_processing_log_step_check CHECK (processing_step IN (
|
||||
'upload', 'ocr_extraction', 'text_parsing', 'store_detection',
|
||||
'item_extraction', 'item_matching', 'price_parsing', 'finalization'
|
||||
)),
|
||||
CONSTRAINT receipt_processing_log_status_check CHECK (status IN ('started', 'completed', 'failed', 'skipped')),
|
||||
CONSTRAINT receipt_processing_log_provider_check CHECK (provider IS NULL OR provider IN (
|
||||
'tesseract', 'openai', 'anthropic', 'google_vision', 'aws_textract', 'internal'
|
||||
))
|
||||
);
|
||||
COMMENT ON TABLE public.receipt_processing_log IS 'Detailed log of each processing step for receipts, useful for debugging and cost tracking.';
|
||||
COMMENT ON COLUMN public.receipt_processing_log.processing_step IS 'Which processing step this log entry is for.';
|
||||
COMMENT ON COLUMN public.receipt_processing_log.status IS 'Status of this step: started, completed, failed, skipped.';
|
||||
COMMENT ON COLUMN public.receipt_processing_log.provider IS 'External service used: tesseract, openai, anthropic, etc.';
|
||||
COMMENT ON COLUMN public.receipt_processing_log.duration_ms IS 'How long this step took in milliseconds.';
|
||||
COMMENT ON COLUMN public.receipt_processing_log.tokens_used IS 'Number of API tokens used (for LLM providers).';
|
||||
COMMENT ON COLUMN public.receipt_processing_log.cost_cents IS 'Estimated cost in cents for this processing step.';
|
||||
COMMENT ON COLUMN public.receipt_processing_log.input_data IS 'Input data sent to the processing step (for debugging).';
|
||||
COMMENT ON COLUMN public.receipt_processing_log.output_data IS 'Output data received from the processing step.';
|
||||
CREATE INDEX IF NOT EXISTS idx_receipt_processing_log_receipt_id ON public.receipt_processing_log(receipt_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_receipt_processing_log_step_status ON public.receipt_processing_log(processing_step, status);
|
||||
CREATE INDEX IF NOT EXISTS idx_receipt_processing_log_created_at ON public.receipt_processing_log(created_at DESC);
|
||||
|
||||
-- 65. Store-specific receipt patterns - help identify stores from receipt text
|
||||
CREATE TABLE IF NOT EXISTS public.store_receipt_patterns (
|
||||
pattern_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
||||
store_id BIGINT NOT NULL REFERENCES public.stores(store_id) ON DELETE CASCADE,
|
||||
pattern_type TEXT NOT NULL,
|
||||
pattern_value TEXT NOT NULL,
|
||||
priority INTEGER DEFAULT 0,
|
||||
is_active BOOLEAN DEFAULT TRUE,
|
||||
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||
CONSTRAINT store_receipt_patterns_type_check CHECK (pattern_type IN (
|
||||
'header_regex', 'footer_regex', 'phone_number', 'address_fragment', 'store_number_format'
|
||||
)),
|
||||
CONSTRAINT store_receipt_patterns_value_check CHECK (TRIM(pattern_value) <> ''),
|
||||
UNIQUE(store_id, pattern_type, pattern_value)
|
||||
);
|
||||
COMMENT ON TABLE public.store_receipt_patterns IS 'Patterns to help identify stores from receipt text and format.';
|
||||
COMMENT ON COLUMN public.store_receipt_patterns.pattern_type IS 'Type of pattern: header_regex, footer_regex, phone_number, etc.';
|
||||
COMMENT ON COLUMN public.store_receipt_patterns.pattern_value IS 'The actual pattern (regex or literal text).';
|
||||
COMMENT ON COLUMN public.store_receipt_patterns.priority IS 'Higher priority patterns are checked first.';
|
||||
COMMENT ON COLUMN public.store_receipt_patterns.is_active IS 'Whether this pattern is currently in use.';
|
||||
CREATE INDEX IF NOT EXISTS idx_store_receipt_patterns_store_id ON public.store_receipt_patterns(store_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_store_receipt_patterns_active ON public.store_receipt_patterns(pattern_type, is_active, priority DESC)
|
||||
WHERE is_active = TRUE;
|
||||
|
||||
|
||||
@@ -1033,6 +1033,235 @@ CREATE INDEX IF NOT EXISTS idx_user_achievements_user_id ON public.user_achievem
|
||||
CREATE INDEX IF NOT EXISTS idx_user_achievements_achievement_id ON public.user_achievements(achievement_id);
|
||||
|
||||
|
||||
-- ============================================================================
|
||||
-- UPC SCANNING FEATURE TABLES (59-60)
|
||||
-- ============================================================================
|
||||
|
||||
-- 59. UPC Scan History - tracks all UPC scans performed by users
|
||||
-- This table provides an audit trail and allows users to see their scan history
|
||||
CREATE TABLE IF NOT EXISTS public.upc_scan_history (
|
||||
scan_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
||||
user_id UUID NOT NULL REFERENCES public.users(user_id) ON DELETE CASCADE,
|
||||
upc_code TEXT NOT NULL,
|
||||
product_id BIGINT REFERENCES public.products(product_id) ON DELETE SET NULL,
|
||||
scan_source TEXT NOT NULL,
|
||||
scan_confidence NUMERIC(5,4),
|
||||
raw_image_path TEXT,
|
||||
lookup_successful BOOLEAN DEFAULT FALSE NOT NULL,
|
||||
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||
CONSTRAINT upc_scan_history_upc_code_check CHECK (upc_code ~ '^[0-9]{8,14}$'),
|
||||
CONSTRAINT upc_scan_history_scan_source_check CHECK (scan_source IN ('image_upload', 'manual_entry', 'phone_app', 'camera_scan')),
|
||||
CONSTRAINT upc_scan_history_scan_confidence_check CHECK (scan_confidence IS NULL OR (scan_confidence >= 0 AND scan_confidence <= 1))
|
||||
);
|
||||
COMMENT ON TABLE public.upc_scan_history IS 'Audit trail of all UPC barcode scans performed by users, tracking scan source and results.';
|
||||
COMMENT ON COLUMN public.upc_scan_history.upc_code IS 'The scanned UPC/EAN barcode (8-14 digits).';
|
||||
COMMENT ON COLUMN public.upc_scan_history.product_id IS 'Reference to the matched product, if found in our database.';
|
||||
COMMENT ON COLUMN public.upc_scan_history.scan_source IS 'How the scan was performed: image_upload, manual_entry, phone_app, or camera_scan.';
|
||||
COMMENT ON COLUMN public.upc_scan_history.scan_confidence IS 'Confidence score from barcode detection (0.0-1.0), null for manual entry.';
|
||||
COMMENT ON COLUMN public.upc_scan_history.raw_image_path IS 'Path to the uploaded barcode image, if applicable.';
|
||||
COMMENT ON COLUMN public.upc_scan_history.lookup_successful IS 'Whether the UPC was successfully matched to a product (internal or external).';
|
||||
CREATE INDEX IF NOT EXISTS idx_upc_scan_history_user_id ON public.upc_scan_history(user_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_upc_scan_history_upc_code ON public.upc_scan_history(upc_code);
|
||||
CREATE INDEX IF NOT EXISTS idx_upc_scan_history_created_at ON public.upc_scan_history(created_at DESC);
|
||||
CREATE INDEX IF NOT EXISTS idx_upc_scan_history_product_id ON public.upc_scan_history(product_id) WHERE product_id IS NOT NULL;
|
||||
|
||||
-- 60. UPC External Lookups - cache for external UPC database API responses
|
||||
CREATE TABLE IF NOT EXISTS public.upc_external_lookups (
|
||||
lookup_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
||||
upc_code TEXT NOT NULL UNIQUE,
|
||||
product_name TEXT,
|
||||
brand_name TEXT,
|
||||
category TEXT,
|
||||
description TEXT,
|
||||
image_url TEXT,
|
||||
external_source TEXT NOT NULL,
|
||||
lookup_data JSONB,
|
||||
lookup_successful BOOLEAN DEFAULT FALSE NOT NULL,
|
||||
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||
CONSTRAINT upc_external_lookups_upc_code_check CHECK (upc_code ~ '^[0-9]{8,14}$'),
|
||||
CONSTRAINT upc_external_lookups_external_source_check CHECK (external_source IN ('openfoodfacts', 'upcitemdb', 'manual', 'unknown')),
|
||||
CONSTRAINT upc_external_lookups_name_check CHECK (NOT lookup_successful OR product_name IS NOT NULL)
|
||||
);
|
||||
COMMENT ON TABLE public.upc_external_lookups IS 'Cache for external UPC database API responses to reduce API calls and improve lookup speed.';
|
||||
COMMENT ON COLUMN public.upc_external_lookups.upc_code IS 'The UPC/EAN barcode that was looked up.';
|
||||
COMMENT ON COLUMN public.upc_external_lookups.product_name IS 'Product name returned from external API.';
|
||||
COMMENT ON COLUMN public.upc_external_lookups.brand_name IS 'Brand name returned from external API.';
|
||||
COMMENT ON COLUMN public.upc_external_lookups.category IS 'Product category returned from external API.';
|
||||
COMMENT ON COLUMN public.upc_external_lookups.description IS 'Product description returned from external API.';
|
||||
COMMENT ON COLUMN public.upc_external_lookups.image_url IS 'Product image URL returned from external API.';
|
||||
COMMENT ON COLUMN public.upc_external_lookups.external_source IS 'Which external API provided this data: openfoodfacts, upcitemdb, manual, unknown.';
|
||||
COMMENT ON COLUMN public.upc_external_lookups.lookup_data IS 'Full raw JSON response from the external API for reference.';
|
||||
COMMENT ON COLUMN public.upc_external_lookups.lookup_successful IS 'Whether the external lookup found product information.';
|
||||
CREATE INDEX IF NOT EXISTS idx_upc_external_lookups_upc_code ON public.upc_external_lookups(upc_code);
|
||||
CREATE INDEX IF NOT EXISTS idx_upc_external_lookups_external_source ON public.upc_external_lookups(external_source);
|
||||
|
||||
-- Add index to existing products.upc_code for faster lookups
|
||||
CREATE INDEX IF NOT EXISTS idx_products_upc_code ON public.products(upc_code) WHERE upc_code IS NOT NULL;
|
||||
|
||||
|
||||
-- ============================================================================
|
||||
-- EXPIRY DATE TRACKING FEATURE TABLES (61-63)
|
||||
-- ============================================================================
|
||||
|
||||
-- 61. Expiry Date Ranges - reference table for typical shelf life
|
||||
CREATE TABLE IF NOT EXISTS public.expiry_date_ranges (
|
||||
expiry_range_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
||||
master_item_id BIGINT REFERENCES public.master_grocery_items(master_grocery_item_id) ON DELETE CASCADE,
|
||||
category_id BIGINT REFERENCES public.categories(category_id) ON DELETE CASCADE,
|
||||
item_pattern TEXT,
|
||||
storage_location TEXT NOT NULL,
|
||||
min_days INTEGER NOT NULL,
|
||||
max_days INTEGER NOT NULL,
|
||||
typical_days INTEGER NOT NULL,
|
||||
notes TEXT,
|
||||
source TEXT,
|
||||
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||
CONSTRAINT expiry_date_ranges_storage_location_check CHECK (storage_location IN ('fridge', 'freezer', 'pantry', 'room_temp')),
|
||||
CONSTRAINT expiry_date_ranges_min_days_check CHECK (min_days >= 0),
|
||||
CONSTRAINT expiry_date_ranges_max_days_check CHECK (max_days >= min_days),
|
||||
CONSTRAINT expiry_date_ranges_typical_days_check CHECK (typical_days >= min_days AND typical_days <= max_days),
|
||||
CONSTRAINT expiry_date_ranges_identifier_check CHECK (
|
||||
master_item_id IS NOT NULL OR category_id IS NOT NULL OR item_pattern IS NOT NULL
|
||||
),
|
||||
CONSTRAINT expiry_date_ranges_source_check CHECK (source IS NULL OR source IN ('usda', 'fda', 'manual', 'community'))
|
||||
);
|
||||
COMMENT ON TABLE public.expiry_date_ranges IS 'Reference table storing typical shelf life for grocery items based on storage location.';
|
||||
COMMENT ON COLUMN public.expiry_date_ranges.master_item_id IS 'Specific item this range applies to (most specific).';
|
||||
COMMENT ON COLUMN public.expiry_date_ranges.category_id IS 'Category this range applies to (fallback if no item match).';
|
||||
COMMENT ON COLUMN public.expiry_date_ranges.item_pattern IS 'Regex pattern to match item names (fallback if no item/category match).';
|
||||
COMMENT ON COLUMN public.expiry_date_ranges.storage_location IS 'Where the item is stored: fridge, freezer, pantry, or room_temp.';
|
||||
COMMENT ON COLUMN public.expiry_date_ranges.min_days IS 'Minimum shelf life in days under proper storage.';
|
||||
COMMENT ON COLUMN public.expiry_date_ranges.max_days IS 'Maximum shelf life in days under proper storage.';
|
||||
COMMENT ON COLUMN public.expiry_date_ranges.typical_days IS 'Most common/recommended shelf life in days.';
|
||||
COMMENT ON COLUMN public.expiry_date_ranges.notes IS 'Additional storage tips or warnings.';
|
||||
COMMENT ON COLUMN public.expiry_date_ranges.source IS 'Data source: usda, fda, manual, or community.';
|
||||
CREATE INDEX IF NOT EXISTS idx_expiry_date_ranges_master_item_id ON public.expiry_date_ranges(master_item_id) WHERE master_item_id IS NOT NULL;
|
||||
CREATE INDEX IF NOT EXISTS idx_expiry_date_ranges_category_id ON public.expiry_date_ranges(category_id) WHERE category_id IS NOT NULL;
|
||||
CREATE INDEX IF NOT EXISTS idx_expiry_date_ranges_storage_location ON public.expiry_date_ranges(storage_location);
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS idx_expiry_date_ranges_unique_item_location
|
||||
ON public.expiry_date_ranges(master_item_id, storage_location)
|
||||
WHERE master_item_id IS NOT NULL;
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS idx_expiry_date_ranges_unique_category_location
|
||||
ON public.expiry_date_ranges(category_id, storage_location)
|
||||
WHERE category_id IS NOT NULL AND master_item_id IS NULL;
|
||||
|
||||
-- 62. Expiry Alerts - user notification preferences for expiry warnings
|
||||
CREATE TABLE IF NOT EXISTS public.expiry_alerts (
|
||||
expiry_alert_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
||||
user_id UUID NOT NULL REFERENCES public.users(user_id) ON DELETE CASCADE,
|
||||
days_before_expiry INTEGER NOT NULL DEFAULT 3,
|
||||
alert_method TEXT NOT NULL,
|
||||
is_enabled BOOLEAN DEFAULT TRUE NOT NULL,
|
||||
last_alert_sent_at TIMESTAMPTZ,
|
||||
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||
CONSTRAINT expiry_alerts_days_before_check CHECK (days_before_expiry >= 0 AND days_before_expiry <= 30),
|
||||
CONSTRAINT expiry_alerts_method_check CHECK (alert_method IN ('email', 'push', 'in_app')),
|
||||
UNIQUE(user_id, alert_method)
|
||||
);
|
||||
COMMENT ON TABLE public.expiry_alerts IS 'User preferences for expiry date notifications and alerts.';
|
||||
COMMENT ON COLUMN public.expiry_alerts.days_before_expiry IS 'How many days before expiry to send alert (0-30).';
|
||||
COMMENT ON COLUMN public.expiry_alerts.alert_method IS 'How to notify: email, push, or in_app.';
|
||||
COMMENT ON COLUMN public.expiry_alerts.is_enabled IS 'Whether this alert type is currently enabled.';
|
||||
COMMENT ON COLUMN public.expiry_alerts.last_alert_sent_at IS 'Timestamp of the last alert sent to prevent duplicate notifications.';
|
||||
CREATE INDEX IF NOT EXISTS idx_expiry_alerts_user_id ON public.expiry_alerts(user_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_expiry_alerts_enabled ON public.expiry_alerts(user_id, is_enabled) WHERE is_enabled = TRUE;
|
||||
|
||||
-- 63. Expiry Alert Log - tracks sent notifications
|
||||
CREATE TABLE IF NOT EXISTS public.expiry_alert_log (
|
||||
alert_log_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
||||
user_id UUID NOT NULL REFERENCES public.users(user_id) ON DELETE CASCADE,
|
||||
pantry_item_id BIGINT REFERENCES public.pantry_items(pantry_item_id) ON DELETE SET NULL,
|
||||
alert_type TEXT NOT NULL,
|
||||
alert_method TEXT NOT NULL,
|
||||
item_name TEXT NOT NULL,
|
||||
expiry_date DATE,
|
||||
days_until_expiry INTEGER,
|
||||
sent_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||
CONSTRAINT expiry_alert_log_type_check CHECK (alert_type IN ('expiring_soon', 'expired', 'expiry_reminder')),
|
||||
CONSTRAINT expiry_alert_log_method_check CHECK (alert_method IN ('email', 'push', 'in_app')),
|
||||
CONSTRAINT expiry_alert_log_item_name_check CHECK (TRIM(item_name) <> '')
|
||||
);
|
||||
COMMENT ON TABLE public.expiry_alert_log IS 'Log of all expiry notifications sent to users for auditing and duplicate prevention.';
|
||||
COMMENT ON COLUMN public.expiry_alert_log.pantry_item_id IS 'The pantry item that triggered the alert (may be null if item deleted).';
|
||||
COMMENT ON COLUMN public.expiry_alert_log.alert_type IS 'Type of alert: expiring_soon, expired, or expiry_reminder.';
|
||||
COMMENT ON COLUMN public.expiry_alert_log.alert_method IS 'How the alert was sent: email, push, or in_app.';
|
||||
COMMENT ON COLUMN public.expiry_alert_log.item_name IS 'Snapshot of item name at time of alert (in case item is deleted).';
|
||||
COMMENT ON COLUMN public.expiry_alert_log.expiry_date IS 'The expiry date that triggered the alert.';
|
||||
COMMENT ON COLUMN public.expiry_alert_log.days_until_expiry IS 'Days until expiry at time alert was sent (negative = expired).';
|
||||
CREATE INDEX IF NOT EXISTS idx_expiry_alert_log_user_id ON public.expiry_alert_log(user_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_expiry_alert_log_pantry_item_id ON public.expiry_alert_log(pantry_item_id) WHERE pantry_item_id IS NOT NULL;
|
||||
CREATE INDEX IF NOT EXISTS idx_expiry_alert_log_sent_at ON public.expiry_alert_log(sent_at DESC);
|
||||
|
||||
|
||||
-- ============================================================================
|
||||
-- RECEIPT SCANNING ENHANCEMENT TABLES (64-65)
|
||||
-- ============================================================================
|
||||
|
||||
-- 64. Receipt Processing Log - track OCR/AI processing attempts
|
||||
CREATE TABLE IF NOT EXISTS public.receipt_processing_log (
|
||||
log_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
||||
receipt_id BIGINT NOT NULL REFERENCES public.receipts(receipt_id) ON DELETE CASCADE,
|
||||
processing_step TEXT NOT NULL,
|
||||
status TEXT NOT NULL,
|
||||
provider TEXT,
|
||||
duration_ms INTEGER,
|
||||
tokens_used INTEGER,
|
||||
cost_cents INTEGER,
|
||||
input_data JSONB,
|
||||
output_data JSONB,
|
||||
error_message TEXT,
|
||||
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||
CONSTRAINT receipt_processing_log_step_check CHECK (processing_step IN (
|
||||
'upload', 'ocr_extraction', 'text_parsing', 'store_detection',
|
||||
'item_extraction', 'item_matching', 'price_parsing', 'finalization'
|
||||
)),
|
||||
CONSTRAINT receipt_processing_log_status_check CHECK (status IN ('started', 'completed', 'failed', 'skipped')),
|
||||
CONSTRAINT receipt_processing_log_provider_check CHECK (provider IS NULL OR provider IN (
|
||||
'tesseract', 'openai', 'anthropic', 'google_vision', 'aws_textract', 'internal'
|
||||
))
|
||||
);
|
||||
COMMENT ON TABLE public.receipt_processing_log IS 'Detailed log of each processing step for receipts, useful for debugging and cost tracking.';
|
||||
COMMENT ON COLUMN public.receipt_processing_log.processing_step IS 'Which processing step this log entry is for.';
|
||||
COMMENT ON COLUMN public.receipt_processing_log.status IS 'Status of this step: started, completed, failed, skipped.';
|
||||
COMMENT ON COLUMN public.receipt_processing_log.provider IS 'External service used: tesseract, openai, anthropic, etc.';
|
||||
COMMENT ON COLUMN public.receipt_processing_log.duration_ms IS 'How long this step took in milliseconds.';
|
||||
COMMENT ON COLUMN public.receipt_processing_log.tokens_used IS 'Number of API tokens used (for LLM providers).';
|
||||
COMMENT ON COLUMN public.receipt_processing_log.cost_cents IS 'Estimated cost in cents for this processing step.';
|
||||
COMMENT ON COLUMN public.receipt_processing_log.input_data IS 'Input data sent to the processing step (for debugging).';
|
||||
COMMENT ON COLUMN public.receipt_processing_log.output_data IS 'Output data received from the processing step.';
|
||||
CREATE INDEX IF NOT EXISTS idx_receipt_processing_log_receipt_id ON public.receipt_processing_log(receipt_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_receipt_processing_log_step_status ON public.receipt_processing_log(processing_step, status);
|
||||
CREATE INDEX IF NOT EXISTS idx_receipt_processing_log_created_at ON public.receipt_processing_log(created_at DESC);
|
||||
|
||||
-- 65. Store-specific receipt patterns - help identify stores from receipt text
|
||||
CREATE TABLE IF NOT EXISTS public.store_receipt_patterns (
|
||||
pattern_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
||||
store_id BIGINT NOT NULL REFERENCES public.stores(store_id) ON DELETE CASCADE,
|
||||
pattern_type TEXT NOT NULL,
|
||||
pattern_value TEXT NOT NULL,
|
||||
priority INTEGER DEFAULT 0,
|
||||
is_active BOOLEAN DEFAULT TRUE,
|
||||
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||
CONSTRAINT store_receipt_patterns_type_check CHECK (pattern_type IN (
|
||||
'header_regex', 'footer_regex', 'phone_number', 'address_fragment', 'store_number_format'
|
||||
)),
|
||||
CONSTRAINT store_receipt_patterns_value_check CHECK (TRIM(pattern_value) <> ''),
|
||||
UNIQUE(store_id, pattern_type, pattern_value)
|
||||
);
|
||||
COMMENT ON TABLE public.store_receipt_patterns IS 'Patterns to help identify stores from receipt text and format.';
|
||||
COMMENT ON COLUMN public.store_receipt_patterns.pattern_type IS 'Type of pattern: header_regex, footer_regex, phone_number, etc.';
|
||||
COMMENT ON COLUMN public.store_receipt_patterns.pattern_value IS 'The actual pattern (regex or literal text).';
|
||||
COMMENT ON COLUMN public.store_receipt_patterns.priority IS 'Higher priority patterns are checked first.';
|
||||
COMMENT ON COLUMN public.store_receipt_patterns.is_active IS 'Whether this pattern is currently in use.';
|
||||
CREATE INDEX IF NOT EXISTS idx_store_receipt_patterns_store_id ON public.store_receipt_patterns(store_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_store_receipt_patterns_active ON public.store_receipt_patterns(pattern_type, is_active, priority DESC)
|
||||
WHERE is_active = TRUE;
|
||||
|
||||
|
||||
-- ============================================================================
|
||||
-- PART 2: DATA SEEDING
|
||||
@@ -1258,6 +1487,7 @@ ON CONFLICT (name) DO NOTHING;
|
||||
|
||||
-- Pre-populate the achievements table.
|
||||
INSERT INTO public.achievements (name, description, icon, points_value) VALUES
|
||||
('Welcome Aboard', 'Join the community by creating your account.', 'user-check', 5),
|
||||
('First Recipe', 'Create your very first recipe.', 'chef-hat', 10),
|
||||
('Recipe Sharer', 'Share a recipe with another user for the first time.', 'share-2', 15),
|
||||
('List Sharer', 'Share a shopping list with another user for the first time.', 'list', 20),
|
||||
@@ -1267,6 +1497,55 @@ INSERT INTO public.achievements (name, description, icon, points_value) VALUES
|
||||
('First-Upload', 'Upload your first flyer.', 'upload-cloud', 25)
|
||||
ON CONFLICT (name) DO NOTHING;
|
||||
|
||||
-- ============================================================================
|
||||
-- PART 0: OBSERVABILITY HELPERS (ADR-050)
|
||||
-- ============================================================================
|
||||
-- These functions provide structured logging capabilities for database functions.
|
||||
-- Logs are emitted via RAISE statements and can be captured by Logstash for
|
||||
-- forwarding to error tracking systems (see ADR-015).
|
||||
|
||||
-- Function to emit structured log messages from PL/pgSQL functions.
|
||||
-- This enables observability for database operations that might otherwise fail silently.
|
||||
DROP FUNCTION IF EXISTS public.fn_log(TEXT, TEXT, TEXT, JSONB);
|
||||
|
||||
CREATE OR REPLACE FUNCTION public.fn_log(
|
||||
p_level TEXT, -- 'DEBUG', 'INFO', 'NOTICE', 'WARNING', 'ERROR'
|
||||
p_function_name TEXT, -- The calling function name
|
||||
p_message TEXT, -- Human-readable message
|
||||
p_context JSONB DEFAULT NULL -- Additional context (user_id, params, etc.)
|
||||
)
|
||||
RETURNS void
|
||||
LANGUAGE plpgsql
|
||||
AS $$
|
||||
DECLARE
|
||||
log_line TEXT;
|
||||
BEGIN
|
||||
-- Build structured JSON log line for Logstash parsing
|
||||
log_line := jsonb_build_object(
|
||||
'timestamp', now(),
|
||||
'level', p_level,
|
||||
'source', 'postgresql',
|
||||
'function', p_function_name,
|
||||
'message', p_message,
|
||||
'context', COALESCE(p_context, '{}'::jsonb)
|
||||
)::text;
|
||||
|
||||
-- Use appropriate RAISE level based on severity
|
||||
-- Note: We use RAISE LOG for errors to ensure they're always captured
|
||||
-- regardless of client_min_messages setting
|
||||
CASE UPPER(p_level)
|
||||
WHEN 'DEBUG' THEN RAISE DEBUG '%', log_line;
|
||||
WHEN 'INFO' THEN RAISE INFO '%', log_line;
|
||||
WHEN 'NOTICE' THEN RAISE NOTICE '%', log_line;
|
||||
WHEN 'WARNING' THEN RAISE WARNING '%', log_line;
|
||||
WHEN 'ERROR' THEN RAISE LOG '%', log_line;
|
||||
ELSE RAISE NOTICE '%', log_line;
|
||||
END CASE;
|
||||
END;
|
||||
$$;
|
||||
|
||||
COMMENT ON FUNCTION public.fn_log IS 'Emits structured JSON log messages for database function observability (ADR-050)';
|
||||
|
||||
-- ============================================================================
|
||||
-- PART 3: DATABASE FUNCTIONS
|
||||
-- ============================================================================
|
||||
@@ -1487,13 +1766,32 @@ AS $$
|
||||
DECLARE
|
||||
list_owner_id UUID;
|
||||
item_to_add RECORD;
|
||||
v_items_added INTEGER := 0;
|
||||
v_context JSONB;
|
||||
BEGIN
|
||||
-- Build context for logging
|
||||
v_context := jsonb_build_object(
|
||||
'user_id', p_user_id,
|
||||
'menu_plan_id', p_menu_plan_id,
|
||||
'shopping_list_id', p_shopping_list_id
|
||||
);
|
||||
|
||||
-- Security Check: Ensure the user calling this function owns the target shopping list.
|
||||
SELECT user_id INTO list_owner_id
|
||||
FROM public.shopping_lists
|
||||
WHERE shopping_list_id = p_shopping_list_id;
|
||||
|
||||
IF list_owner_id IS NULL OR list_owner_id <> p_user_id THEN
|
||||
IF list_owner_id IS NULL THEN
|
||||
PERFORM fn_log('WARNING', 'add_menu_plan_to_shopping_list',
|
||||
'Shopping list not found',
|
||||
v_context);
|
||||
RAISE EXCEPTION 'Permission denied: You do not own shopping list %', p_shopping_list_id;
|
||||
END IF;
|
||||
|
||||
IF list_owner_id <> p_user_id THEN
|
||||
PERFORM fn_log('WARNING', 'add_menu_plan_to_shopping_list',
|
||||
'Permission denied: user does not own list',
|
||||
v_context || jsonb_build_object('list_owner_id', list_owner_id));
|
||||
RAISE EXCEPTION 'Permission denied: You do not own shopping list %', p_shopping_list_id;
|
||||
END IF;
|
||||
|
||||
@@ -1508,9 +1806,16 @@ BEGIN
|
||||
DO UPDATE SET
|
||||
quantity = shopping_list_items.quantity + EXCLUDED.quantity;
|
||||
|
||||
v_items_added := v_items_added + 1;
|
||||
|
||||
-- Return the details of the item that was added/updated.
|
||||
RETURN QUERY SELECT item_to_add.master_item_id, item_to_add.item_name, item_to_add.shopping_list_quantity;
|
||||
END LOOP;
|
||||
|
||||
-- Log completion (items_added = 0 is normal if pantry has everything)
|
||||
PERFORM fn_log('INFO', 'add_menu_plan_to_shopping_list',
|
||||
'Menu plan items added to shopping list',
|
||||
v_context || jsonb_build_object('items_added', v_items_added));
|
||||
END;
|
||||
$$;
|
||||
|
||||
@@ -2038,13 +2343,32 @@ AS $$
|
||||
DECLARE
|
||||
list_owner_id UUID;
|
||||
new_trip_id BIGINT;
|
||||
v_items_count INTEGER;
|
||||
v_context JSONB;
|
||||
BEGIN
|
||||
-- Build context for logging
|
||||
v_context := jsonb_build_object(
|
||||
'user_id', p_user_id,
|
||||
'shopping_list_id', p_shopping_list_id,
|
||||
'total_spent_cents', p_total_spent_cents
|
||||
);
|
||||
|
||||
-- Security Check: Ensure the user calling this function owns the target shopping list.
|
||||
SELECT user_id INTO list_owner_id
|
||||
FROM public.shopping_lists
|
||||
WHERE shopping_list_id = p_shopping_list_id;
|
||||
|
||||
IF list_owner_id IS NULL OR list_owner_id <> p_user_id THEN
|
||||
IF list_owner_id IS NULL THEN
|
||||
PERFORM fn_log('WARNING', 'complete_shopping_list',
|
||||
'Shopping list not found',
|
||||
v_context);
|
||||
RAISE EXCEPTION 'Permission denied: You do not own shopping list %', p_shopping_list_id;
|
||||
END IF;
|
||||
|
||||
IF list_owner_id <> p_user_id THEN
|
||||
PERFORM fn_log('WARNING', 'complete_shopping_list',
|
||||
'Permission denied: user does not own list',
|
||||
v_context || jsonb_build_object('list_owner_id', list_owner_id));
|
||||
RAISE EXCEPTION 'Permission denied: You do not own shopping list %', p_shopping_list_id;
|
||||
END IF;
|
||||
|
||||
@@ -2059,10 +2383,17 @@ BEGIN
|
||||
FROM public.shopping_list_items
|
||||
WHERE shopping_list_id = p_shopping_list_id AND is_purchased = true;
|
||||
|
||||
GET DIAGNOSTICS v_items_count = ROW_COUNT;
|
||||
|
||||
-- 3. Delete the purchased items from the original shopping list.
|
||||
DELETE FROM public.shopping_list_items
|
||||
WHERE shopping_list_id = p_shopping_list_id AND is_purchased = true;
|
||||
|
||||
-- Log successful completion
|
||||
PERFORM fn_log('INFO', 'complete_shopping_list',
|
||||
'Shopping list completed successfully',
|
||||
v_context || jsonb_build_object('trip_id', new_trip_id, 'items_archived', v_items_count));
|
||||
|
||||
RETURN new_trip_id;
|
||||
END;
|
||||
$$;
|
||||
@@ -2197,16 +2528,30 @@ SECURITY DEFINER
|
||||
AS $$
|
||||
DECLARE
|
||||
correction_record RECORD;
|
||||
v_context JSONB;
|
||||
BEGIN
|
||||
-- Build context for logging
|
||||
v_context := jsonb_build_object('correction_id', p_correction_id);
|
||||
|
||||
-- 1. Fetch the correction details, ensuring it's still pending.
|
||||
SELECT * INTO correction_record
|
||||
FROM public.suggested_corrections
|
||||
WHERE suggested_correction_id = p_correction_id AND status = 'pending';
|
||||
|
||||
IF NOT FOUND THEN
|
||||
PERFORM fn_log('WARNING', 'approve_correction',
|
||||
'Correction not found or already processed',
|
||||
v_context);
|
||||
RAISE EXCEPTION 'Correction with ID % not found or already processed.', p_correction_id;
|
||||
END IF;
|
||||
|
||||
-- Add correction details to context
|
||||
v_context := v_context || jsonb_build_object(
|
||||
'correction_type', correction_record.correction_type,
|
||||
'flyer_item_id', correction_record.flyer_item_id,
|
||||
'suggested_value', correction_record.suggested_value
|
||||
);
|
||||
|
||||
-- 2. Apply the correction based on its type.
|
||||
IF correction_record.correction_type = 'INCORRECT_ITEM_LINK' THEN
|
||||
UPDATE public.flyer_items
|
||||
@@ -2222,6 +2567,11 @@ BEGIN
|
||||
UPDATE public.suggested_corrections
|
||||
SET status = 'approved', reviewed_at = now()
|
||||
WHERE suggested_correction_id = p_correction_id;
|
||||
|
||||
-- Log successful correction approval
|
||||
PERFORM fn_log('INFO', 'approve_correction',
|
||||
'Correction approved and applied',
|
||||
v_context);
|
||||
END;
|
||||
$$;
|
||||
|
||||
@@ -2236,13 +2586,19 @@ AS $$
|
||||
DECLARE
|
||||
v_achievement_id BIGINT;
|
||||
v_points_value INTEGER;
|
||||
v_context JSONB;
|
||||
BEGIN
|
||||
-- Build context for logging
|
||||
v_context := jsonb_build_object('user_id', p_user_id, 'achievement_name', p_achievement_name);
|
||||
|
||||
-- Find the achievement by name to get its ID and point value.
|
||||
SELECT achievement_id, points_value INTO v_achievement_id, v_points_value
|
||||
FROM public.achievements WHERE name = p_achievement_name;
|
||||
|
||||
-- If the achievement doesn't exist, do nothing.
|
||||
-- If the achievement doesn't exist, log warning and return.
|
||||
IF v_achievement_id IS NULL THEN
|
||||
PERFORM fn_log('WARNING', 'award_achievement',
|
||||
'Achievement not found: ' || p_achievement_name, v_context);
|
||||
RETURN;
|
||||
END IF;
|
||||
|
||||
@@ -2254,9 +2610,12 @@ BEGIN
|
||||
ON CONFLICT (user_id, achievement_id) DO NOTHING;
|
||||
|
||||
-- If the insert was successful (i.e., the user didn't have the achievement),
|
||||
-- update their total points. The `GET DIAGNOSTICS` command checks the row count of the last query.
|
||||
-- update their total points and log success.
|
||||
IF FOUND THEN
|
||||
UPDATE public.profiles SET points = points + v_points_value WHERE user_id = p_user_id;
|
||||
PERFORM fn_log('INFO', 'award_achievement',
|
||||
'Achievement awarded: ' || p_achievement_name,
|
||||
v_context || jsonb_build_object('points_awarded', v_points_value));
|
||||
END IF;
|
||||
END;
|
||||
$$;
|
||||
@@ -2279,7 +2638,14 @@ SECURITY INVOKER
|
||||
AS $$
|
||||
DECLARE
|
||||
new_recipe_id BIGINT;
|
||||
v_context JSONB;
|
||||
BEGIN
|
||||
-- Build context for logging
|
||||
v_context := jsonb_build_object(
|
||||
'user_id', p_user_id,
|
||||
'original_recipe_id', p_original_recipe_id
|
||||
);
|
||||
|
||||
-- 1. Create a copy of the recipe, linking it to the new user and the original recipe.
|
||||
INSERT INTO public.recipes (
|
||||
user_id,
|
||||
@@ -2318,6 +2684,9 @@ BEGIN
|
||||
|
||||
-- If the original recipe didn't exist, new_recipe_id will be null.
|
||||
IF new_recipe_id IS NULL THEN
|
||||
PERFORM fn_log('WARNING', 'fork_recipe',
|
||||
'Original recipe not found',
|
||||
v_context);
|
||||
RETURN;
|
||||
END IF;
|
||||
|
||||
@@ -2326,6 +2695,11 @@ BEGIN
|
||||
INSERT INTO public.recipe_tags (recipe_id, tag_id) SELECT new_recipe_id, tag_id FROM public.recipe_tags WHERE recipe_id = p_original_recipe_id;
|
||||
INSERT INTO public.recipe_appliances (recipe_id, appliance_id) SELECT new_recipe_id, appliance_id FROM public.recipe_appliances WHERE recipe_id = p_original_recipe_id;
|
||||
|
||||
-- Log successful fork
|
||||
PERFORM fn_log('INFO', 'fork_recipe',
|
||||
'Recipe forked successfully',
|
||||
v_context || jsonb_build_object('new_recipe_id', new_recipe_id));
|
||||
|
||||
-- 3. Return the newly created recipe record.
|
||||
RETURN QUERY SELECT * FROM public.recipes WHERE recipe_id = new_recipe_id;
|
||||
END;
|
||||
@@ -2346,13 +2720,25 @@ RETURNS TRIGGER AS $$
|
||||
DECLARE
|
||||
new_profile_id UUID;
|
||||
user_meta_data JSONB;
|
||||
v_context JSONB;
|
||||
BEGIN
|
||||
-- Build context for logging
|
||||
v_context := jsonb_build_object('user_id', new.user_id, 'email', new.email);
|
||||
|
||||
-- The user's metadata (full_name, avatar_url) is passed via a temporary session variable.
|
||||
user_meta_data := current_setting('my_app.user_metadata', true)::JSONB;
|
||||
|
||||
INSERT INTO public.profiles (user_id, role, full_name, avatar_url)
|
||||
VALUES (new.user_id, 'user', user_meta_data->>'full_name', user_meta_data->>'avatar_url')
|
||||
RETURNING user_id INTO new_profile_id;
|
||||
-- Create the user profile
|
||||
BEGIN
|
||||
INSERT INTO public.profiles (user_id, role, full_name, avatar_url)
|
||||
VALUES (new.user_id, 'user', user_meta_data->>'full_name', user_meta_data->>'avatar_url')
|
||||
RETURNING user_id INTO new_profile_id;
|
||||
EXCEPTION WHEN OTHERS THEN
|
||||
PERFORM fn_log('ERROR', 'handle_new_user',
|
||||
'Failed to create profile: ' || SQLERRM,
|
||||
v_context || jsonb_build_object('sqlstate', SQLSTATE));
|
||||
RAISE;
|
||||
END;
|
||||
|
||||
-- Also create a default shopping list for the new user.
|
||||
INSERT INTO public.shopping_lists (user_id, name)
|
||||
@@ -2365,6 +2751,15 @@ BEGIN
|
||||
'user-plus',
|
||||
jsonb_build_object('email', new.email)
|
||||
);
|
||||
|
||||
-- Award the 'Welcome Aboard' achievement for new user registration
|
||||
PERFORM public.award_achievement(new.user_id, 'Welcome Aboard');
|
||||
|
||||
-- Log successful user creation
|
||||
PERFORM fn_log('INFO', 'handle_new_user',
|
||||
'New user created successfully',
|
||||
v_context || jsonb_build_object('full_name', user_meta_data->>'full_name'));
|
||||
|
||||
RETURN new;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
90
sql/migrations/001_upc_scanning.sql
Normal file
90
sql/migrations/001_upc_scanning.sql
Normal file
@@ -0,0 +1,90 @@
|
||||
-- sql/migrations/001_upc_scanning.sql
|
||||
-- ============================================================================
|
||||
-- UPC SCANNING FEATURE MIGRATION
|
||||
-- ============================================================================
|
||||
-- Purpose:
|
||||
-- This migration adds tables to support UPC barcode scanning functionality:
|
||||
-- 1. upc_scan_history - Audit trail of all UPC scans performed by users
|
||||
-- 2. upc_external_lookups - Cache for external UPC database API responses
|
||||
--
|
||||
-- The products.upc_code column already exists in the schema.
|
||||
-- These tables extend the functionality to track scans and cache lookups.
|
||||
-- ============================================================================
|
||||
|
||||
-- 1. UPC Scan History - tracks all UPC scans performed by users
|
||||
-- This table provides an audit trail and allows users to see their scan history
|
||||
CREATE TABLE IF NOT EXISTS public.upc_scan_history (
|
||||
scan_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
||||
user_id UUID NOT NULL REFERENCES public.users(user_id) ON DELETE CASCADE,
|
||||
upc_code TEXT NOT NULL,
|
||||
product_id BIGINT REFERENCES public.products(product_id) ON DELETE SET NULL,
|
||||
scan_source TEXT NOT NULL,
|
||||
scan_confidence NUMERIC(5,4),
|
||||
raw_image_path TEXT,
|
||||
lookup_successful BOOLEAN DEFAULT FALSE NOT NULL,
|
||||
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||
-- Validate UPC code format (8-14 digits for UPC-A, UPC-E, EAN-8, EAN-13, etc.)
|
||||
CONSTRAINT upc_scan_history_upc_code_check CHECK (upc_code ~ '^[0-9]{8,14}$'),
|
||||
-- Validate scan source is one of the allowed values
|
||||
CONSTRAINT upc_scan_history_scan_source_check CHECK (scan_source IN ('image_upload', 'manual_entry', 'phone_app', 'camera_scan')),
|
||||
-- Confidence score must be between 0 and 1 if provided
|
||||
CONSTRAINT upc_scan_history_scan_confidence_check CHECK (scan_confidence IS NULL OR (scan_confidence >= 0 AND scan_confidence <= 1))
|
||||
);
|
||||
COMMENT ON TABLE public.upc_scan_history IS 'Audit trail of all UPC barcode scans performed by users, tracking scan source and results.';
|
||||
COMMENT ON COLUMN public.upc_scan_history.upc_code IS 'The scanned UPC/EAN barcode (8-14 digits).';
|
||||
COMMENT ON COLUMN public.upc_scan_history.product_id IS 'Reference to the matched product, if found in our database.';
|
||||
COMMENT ON COLUMN public.upc_scan_history.scan_source IS 'How the scan was performed: image_upload, manual_entry, phone_app, or camera_scan.';
|
||||
COMMENT ON COLUMN public.upc_scan_history.scan_confidence IS 'Confidence score from barcode detection (0.0-1.0), null for manual entry.';
|
||||
COMMENT ON COLUMN public.upc_scan_history.raw_image_path IS 'Path to the uploaded barcode image, if applicable.';
|
||||
COMMENT ON COLUMN public.upc_scan_history.lookup_successful IS 'Whether the UPC was successfully matched to a product (internal or external).';
|
||||
|
||||
-- Indexes for upc_scan_history
|
||||
CREATE INDEX IF NOT EXISTS idx_upc_scan_history_user_id ON public.upc_scan_history(user_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_upc_scan_history_upc_code ON public.upc_scan_history(upc_code);
|
||||
CREATE INDEX IF NOT EXISTS idx_upc_scan_history_created_at ON public.upc_scan_history(created_at DESC);
|
||||
CREATE INDEX IF NOT EXISTS idx_upc_scan_history_product_id ON public.upc_scan_history(product_id) WHERE product_id IS NOT NULL;
|
||||
|
||||
|
||||
-- 2. UPC External Lookups - cache for external UPC database API responses
|
||||
-- This table caches results from external UPC databases (OpenFoodFacts, UPC Item DB, etc.)
|
||||
-- to reduce API calls and improve response times for repeated lookups
|
||||
CREATE TABLE IF NOT EXISTS public.upc_external_lookups (
|
||||
lookup_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
||||
upc_code TEXT NOT NULL UNIQUE,
|
||||
product_name TEXT,
|
||||
brand_name TEXT,
|
||||
category TEXT,
|
||||
description TEXT,
|
||||
image_url TEXT,
|
||||
external_source TEXT NOT NULL,
|
||||
lookup_data JSONB,
|
||||
lookup_successful BOOLEAN DEFAULT FALSE NOT NULL,
|
||||
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||
-- Validate UPC code format
|
||||
CONSTRAINT upc_external_lookups_upc_code_check CHECK (upc_code ~ '^[0-9]{8,14}$'),
|
||||
-- Validate external source is one of the supported APIs
|
||||
CONSTRAINT upc_external_lookups_external_source_check CHECK (external_source IN ('openfoodfacts', 'upcitemdb', 'manual', 'unknown')),
|
||||
-- If lookup was successful, product_name should be present
|
||||
CONSTRAINT upc_external_lookups_name_check CHECK (NOT lookup_successful OR product_name IS NOT NULL)
|
||||
);
|
||||
COMMENT ON TABLE public.upc_external_lookups IS 'Cache for external UPC database API responses to reduce API calls and improve lookup speed.';
|
||||
COMMENT ON COLUMN public.upc_external_lookups.upc_code IS 'The UPC/EAN barcode that was looked up.';
|
||||
COMMENT ON COLUMN public.upc_external_lookups.product_name IS 'Product name returned from external API.';
|
||||
COMMENT ON COLUMN public.upc_external_lookups.brand_name IS 'Brand name returned from external API.';
|
||||
COMMENT ON COLUMN public.upc_external_lookups.category IS 'Product category returned from external API.';
|
||||
COMMENT ON COLUMN public.upc_external_lookups.description IS 'Product description returned from external API.';
|
||||
COMMENT ON COLUMN public.upc_external_lookups.image_url IS 'Product image URL returned from external API.';
|
||||
COMMENT ON COLUMN public.upc_external_lookups.external_source IS 'Which external API provided this data: openfoodfacts, upcitemdb, manual, unknown.';
|
||||
COMMENT ON COLUMN public.upc_external_lookups.lookup_data IS 'Full raw JSON response from the external API for reference.';
|
||||
COMMENT ON COLUMN public.upc_external_lookups.lookup_successful IS 'Whether the external lookup found product information.';
|
||||
|
||||
-- Index for upc_external_lookups
|
||||
CREATE INDEX IF NOT EXISTS idx_upc_external_lookups_upc_code ON public.upc_external_lookups(upc_code);
|
||||
CREATE INDEX IF NOT EXISTS idx_upc_external_lookups_external_source ON public.upc_external_lookups(external_source);
|
||||
|
||||
|
||||
-- 3. Add index to existing products.upc_code if not exists
|
||||
-- This speeds up lookups when matching scanned UPCs to existing products
|
||||
CREATE INDEX IF NOT EXISTS idx_products_upc_code ON public.products(upc_code) WHERE upc_code IS NOT NULL;
|
||||
189
sql/migrations/002_expiry_tracking.sql
Normal file
189
sql/migrations/002_expiry_tracking.sql
Normal file
@@ -0,0 +1,189 @@
|
||||
-- sql/migrations/002_expiry_tracking.sql
|
||||
-- ============================================================================
|
||||
-- EXPIRY DATE TRACKING FEATURE MIGRATION
|
||||
-- ============================================================================
|
||||
-- Purpose:
|
||||
-- This migration adds tables and enhancements for expiry date tracking:
|
||||
-- 1. expiry_date_ranges - Reference table for typical shelf life by item/category
|
||||
-- 2. expiry_alerts - User notification preferences for expiry warnings
|
||||
-- 3. Enhancements to pantry_items for better expiry tracking
|
||||
--
|
||||
-- Existing tables used:
|
||||
-- - pantry_items (already has best_before_date)
|
||||
-- - pantry_locations (already exists for fridge/freezer/pantry)
|
||||
-- - receipts and receipt_items (already exist for receipt scanning)
|
||||
-- ============================================================================
|
||||
|
||||
-- 1. Expiry Date Ranges - reference table for typical shelf life
|
||||
-- This table stores expected shelf life for items based on storage location
|
||||
-- Used to auto-calculate expiry dates when users add items to inventory
|
||||
CREATE TABLE IF NOT EXISTS public.expiry_date_ranges (
|
||||
expiry_range_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
||||
master_item_id BIGINT REFERENCES public.master_grocery_items(master_grocery_item_id) ON DELETE CASCADE,
|
||||
category_id BIGINT REFERENCES public.categories(category_id) ON DELETE CASCADE,
|
||||
item_pattern TEXT,
|
||||
storage_location TEXT NOT NULL,
|
||||
min_days INTEGER NOT NULL,
|
||||
max_days INTEGER NOT NULL,
|
||||
typical_days INTEGER NOT NULL,
|
||||
notes TEXT,
|
||||
source TEXT,
|
||||
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||
-- Validate storage location is one of the allowed values
|
||||
CONSTRAINT expiry_date_ranges_storage_location_check CHECK (storage_location IN ('fridge', 'freezer', 'pantry', 'room_temp')),
|
||||
-- Validate day ranges are logical
|
||||
CONSTRAINT expiry_date_ranges_min_days_check CHECK (min_days >= 0),
|
||||
CONSTRAINT expiry_date_ranges_max_days_check CHECK (max_days >= min_days),
|
||||
CONSTRAINT expiry_date_ranges_typical_days_check CHECK (typical_days >= min_days AND typical_days <= max_days),
|
||||
-- At least one identifier must be present
|
||||
CONSTRAINT expiry_date_ranges_identifier_check CHECK (
|
||||
master_item_id IS NOT NULL OR category_id IS NOT NULL OR item_pattern IS NOT NULL
|
||||
),
|
||||
-- Validate source is one of the known sources
|
||||
CONSTRAINT expiry_date_ranges_source_check CHECK (source IS NULL OR source IN ('usda', 'fda', 'manual', 'community'))
|
||||
);
|
||||
COMMENT ON TABLE public.expiry_date_ranges IS 'Reference table storing typical shelf life for grocery items based on storage location.';
|
||||
COMMENT ON COLUMN public.expiry_date_ranges.master_item_id IS 'Specific item this range applies to (most specific).';
|
||||
COMMENT ON COLUMN public.expiry_date_ranges.category_id IS 'Category this range applies to (fallback if no item match).';
|
||||
COMMENT ON COLUMN public.expiry_date_ranges.item_pattern IS 'Regex pattern to match item names (fallback if no item/category match).';
|
||||
COMMENT ON COLUMN public.expiry_date_ranges.storage_location IS 'Where the item is stored: fridge, freezer, pantry, or room_temp.';
|
||||
COMMENT ON COLUMN public.expiry_date_ranges.min_days IS 'Minimum shelf life in days under proper storage.';
|
||||
COMMENT ON COLUMN public.expiry_date_ranges.max_days IS 'Maximum shelf life in days under proper storage.';
|
||||
COMMENT ON COLUMN public.expiry_date_ranges.typical_days IS 'Most common/recommended shelf life in days.';
|
||||
COMMENT ON COLUMN public.expiry_date_ranges.notes IS 'Additional storage tips or warnings.';
|
||||
COMMENT ON COLUMN public.expiry_date_ranges.source IS 'Data source: usda, fda, manual, or community.';
|
||||
|
||||
-- Indexes for expiry_date_ranges
|
||||
CREATE INDEX IF NOT EXISTS idx_expiry_date_ranges_master_item_id ON public.expiry_date_ranges(master_item_id) WHERE master_item_id IS NOT NULL;
|
||||
CREATE INDEX IF NOT EXISTS idx_expiry_date_ranges_category_id ON public.expiry_date_ranges(category_id) WHERE category_id IS NOT NULL;
|
||||
CREATE INDEX IF NOT EXISTS idx_expiry_date_ranges_storage_location ON public.expiry_date_ranges(storage_location);
|
||||
|
||||
-- Unique constraint to prevent duplicate entries for same item/location combo
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS idx_expiry_date_ranges_unique_item_location
|
||||
ON public.expiry_date_ranges(master_item_id, storage_location)
|
||||
WHERE master_item_id IS NOT NULL;
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS idx_expiry_date_ranges_unique_category_location
|
||||
ON public.expiry_date_ranges(category_id, storage_location)
|
||||
WHERE category_id IS NOT NULL AND master_item_id IS NULL;
|
||||
|
||||
|
||||
-- 2. Expiry Alerts - user notification preferences for expiry warnings
|
||||
-- This table stores user preferences for when and how to receive expiry notifications
|
||||
CREATE TABLE IF NOT EXISTS public.expiry_alerts (
|
||||
expiry_alert_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
||||
user_id UUID NOT NULL REFERENCES public.users(user_id) ON DELETE CASCADE,
|
||||
days_before_expiry INTEGER NOT NULL DEFAULT 3,
|
||||
alert_method TEXT NOT NULL,
|
||||
is_enabled BOOLEAN DEFAULT TRUE NOT NULL,
|
||||
last_alert_sent_at TIMESTAMPTZ,
|
||||
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||
-- Validate days before expiry is reasonable
|
||||
CONSTRAINT expiry_alerts_days_before_check CHECK (days_before_expiry >= 0 AND days_before_expiry <= 30),
|
||||
-- Validate alert method is one of the allowed values
|
||||
CONSTRAINT expiry_alerts_method_check CHECK (alert_method IN ('email', 'push', 'in_app')),
|
||||
-- Each user can only have one setting per alert method
|
||||
UNIQUE(user_id, alert_method)
|
||||
);
|
||||
COMMENT ON TABLE public.expiry_alerts IS 'User preferences for expiry date notifications and alerts.';
|
||||
COMMENT ON COLUMN public.expiry_alerts.days_before_expiry IS 'How many days before expiry to send alert (0-30).';
|
||||
COMMENT ON COLUMN public.expiry_alerts.alert_method IS 'How to notify: email, push, or in_app.';
|
||||
COMMENT ON COLUMN public.expiry_alerts.is_enabled IS 'Whether this alert type is currently enabled.';
|
||||
COMMENT ON COLUMN public.expiry_alerts.last_alert_sent_at IS 'Timestamp of the last alert sent to prevent duplicate notifications.';
|
||||
|
||||
-- Indexes for expiry_alerts
|
||||
CREATE INDEX IF NOT EXISTS idx_expiry_alerts_user_id ON public.expiry_alerts(user_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_expiry_alerts_enabled ON public.expiry_alerts(user_id, is_enabled) WHERE is_enabled = TRUE;
|
||||
|
||||
|
||||
-- 3. Expiry Alert Log - tracks sent notifications (for auditing and preventing duplicates)
|
||||
CREATE TABLE IF NOT EXISTS public.expiry_alert_log (
|
||||
alert_log_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
||||
user_id UUID NOT NULL REFERENCES public.users(user_id) ON DELETE CASCADE,
|
||||
pantry_item_id BIGINT REFERENCES public.pantry_items(pantry_item_id) ON DELETE SET NULL,
|
||||
alert_type TEXT NOT NULL,
|
||||
alert_method TEXT NOT NULL,
|
||||
item_name TEXT NOT NULL,
|
||||
expiry_date DATE,
|
||||
days_until_expiry INTEGER,
|
||||
sent_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||
-- Validate alert type
|
||||
CONSTRAINT expiry_alert_log_type_check CHECK (alert_type IN ('expiring_soon', 'expired', 'expiry_reminder')),
|
||||
-- Validate alert method
|
||||
CONSTRAINT expiry_alert_log_method_check CHECK (alert_method IN ('email', 'push', 'in_app')),
|
||||
-- Validate item_name is not empty
|
||||
CONSTRAINT expiry_alert_log_item_name_check CHECK (TRIM(item_name) <> '')
|
||||
);
|
||||
COMMENT ON TABLE public.expiry_alert_log IS 'Log of all expiry notifications sent to users for auditing and duplicate prevention.';
|
||||
COMMENT ON COLUMN public.expiry_alert_log.pantry_item_id IS 'The pantry item that triggered the alert (may be null if item deleted).';
|
||||
COMMENT ON COLUMN public.expiry_alert_log.alert_type IS 'Type of alert: expiring_soon, expired, or expiry_reminder.';
|
||||
COMMENT ON COLUMN public.expiry_alert_log.alert_method IS 'How the alert was sent: email, push, or in_app.';
|
||||
COMMENT ON COLUMN public.expiry_alert_log.item_name IS 'Snapshot of item name at time of alert (in case item is deleted).';
|
||||
COMMENT ON COLUMN public.expiry_alert_log.expiry_date IS 'The expiry date that triggered the alert.';
|
||||
COMMENT ON COLUMN public.expiry_alert_log.days_until_expiry IS 'Days until expiry at time alert was sent (negative = expired).';
|
||||
|
||||
-- Indexes for expiry_alert_log
|
||||
CREATE INDEX IF NOT EXISTS idx_expiry_alert_log_user_id ON public.expiry_alert_log(user_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_expiry_alert_log_pantry_item_id ON public.expiry_alert_log(pantry_item_id) WHERE pantry_item_id IS NOT NULL;
|
||||
CREATE INDEX IF NOT EXISTS idx_expiry_alert_log_sent_at ON public.expiry_alert_log(sent_at DESC);
|
||||
|
||||
|
||||
-- 4. Enhancements to pantry_items table
|
||||
-- Add columns to better support expiry tracking from receipts and UPC scans
|
||||
|
||||
-- Add purchase_date column to track when item was bought
|
||||
ALTER TABLE public.pantry_items
|
||||
ADD COLUMN IF NOT EXISTS purchase_date DATE;
|
||||
COMMENT ON COLUMN public.pantry_items.purchase_date IS 'Date the item was purchased (from receipt or manual entry).';
|
||||
|
||||
-- Add source column to track how item was added
|
||||
ALTER TABLE public.pantry_items
|
||||
ADD COLUMN IF NOT EXISTS source TEXT DEFAULT 'manual';
|
||||
-- Note: Cannot add CHECK constraint via ALTER in PostgreSQL, will validate in application
|
||||
|
||||
-- Add receipt_item_id to link back to receipt if added from receipt scan
|
||||
ALTER TABLE public.pantry_items
|
||||
ADD COLUMN IF NOT EXISTS receipt_item_id BIGINT REFERENCES public.receipt_items(receipt_item_id) ON DELETE SET NULL;
|
||||
COMMENT ON COLUMN public.pantry_items.receipt_item_id IS 'Link to receipt_items if this pantry item was created from a receipt scan.';
|
||||
|
||||
-- Add product_id to link to specific product if known from UPC scan
|
||||
ALTER TABLE public.pantry_items
|
||||
ADD COLUMN IF NOT EXISTS product_id BIGINT REFERENCES public.products(product_id) ON DELETE SET NULL;
|
||||
COMMENT ON COLUMN public.pantry_items.product_id IS 'Link to products if this pantry item was created from a UPC scan.';
|
||||
|
||||
-- Add expiry_source to track how expiry date was determined
|
||||
ALTER TABLE public.pantry_items
|
||||
ADD COLUMN IF NOT EXISTS expiry_source TEXT;
|
||||
COMMENT ON COLUMN public.pantry_items.expiry_source IS 'How expiry was determined: manual, calculated, package, receipt.';
|
||||
|
||||
-- Add is_consumed column if not exists (check for existing)
|
||||
ALTER TABLE public.pantry_items
|
||||
ADD COLUMN IF NOT EXISTS is_consumed BOOLEAN DEFAULT FALSE;
|
||||
COMMENT ON COLUMN public.pantry_items.is_consumed IS 'Whether the item has been fully consumed.';
|
||||
|
||||
-- Add consumed_at timestamp
|
||||
ALTER TABLE public.pantry_items
|
||||
ADD COLUMN IF NOT EXISTS consumed_at TIMESTAMPTZ;
|
||||
COMMENT ON COLUMN public.pantry_items.consumed_at IS 'When the item was marked as consumed.';
|
||||
|
||||
-- New indexes for pantry_items expiry queries
|
||||
CREATE INDEX IF NOT EXISTS idx_pantry_items_best_before_date ON public.pantry_items(best_before_date)
|
||||
WHERE best_before_date IS NOT NULL AND (is_consumed IS NULL OR is_consumed = FALSE);
|
||||
CREATE INDEX IF NOT EXISTS idx_pantry_items_expiring_soon ON public.pantry_items(user_id, best_before_date)
|
||||
WHERE best_before_date IS NOT NULL AND (is_consumed IS NULL OR is_consumed = FALSE);
|
||||
CREATE INDEX IF NOT EXISTS idx_pantry_items_receipt_item_id ON public.pantry_items(receipt_item_id)
|
||||
WHERE receipt_item_id IS NOT NULL;
|
||||
CREATE INDEX IF NOT EXISTS idx_pantry_items_product_id ON public.pantry_items(product_id)
|
||||
WHERE product_id IS NOT NULL;
|
||||
|
||||
|
||||
-- 5. Add UPC scan support to receipt_items table
|
||||
-- When receipt items are matched via UPC, store the reference
|
||||
ALTER TABLE public.receipt_items
|
||||
ADD COLUMN IF NOT EXISTS upc_code TEXT;
|
||||
COMMENT ON COLUMN public.receipt_items.upc_code IS 'UPC code if extracted from receipt or matched during processing.';
|
||||
|
||||
-- Add constraint for upc_code format (cannot add via ALTER, will validate in app)
|
||||
CREATE INDEX IF NOT EXISTS idx_receipt_items_upc_code ON public.receipt_items(upc_code)
|
||||
WHERE upc_code IS NOT NULL;
|
||||
169
sql/migrations/003_receipt_scanning_enhancements.sql
Normal file
169
sql/migrations/003_receipt_scanning_enhancements.sql
Normal file
@@ -0,0 +1,169 @@
|
||||
-- sql/migrations/003_receipt_scanning_enhancements.sql
|
||||
-- ============================================================================
|
||||
-- RECEIPT SCANNING ENHANCEMENTS MIGRATION
|
||||
-- ============================================================================
|
||||
-- Purpose:
|
||||
-- This migration adds enhancements to the existing receipt scanning tables:
|
||||
-- 1. Enhancements to receipts table for better OCR processing
|
||||
-- 2. Enhancements to receipt_items for better item matching
|
||||
-- 3. receipt_processing_log for tracking OCR/AI processing attempts
|
||||
--
|
||||
-- Existing tables:
|
||||
-- - receipts (lines 932-948 in master_schema_rollup.sql)
|
||||
-- - receipt_items (lines 951-966 in master_schema_rollup.sql)
|
||||
-- ============================================================================
|
||||
|
||||
-- 1. Enhancements to receipts table
|
||||
|
||||
-- Add store detection confidence
|
||||
ALTER TABLE public.receipts
|
||||
ADD COLUMN IF NOT EXISTS store_confidence NUMERIC(5,4);
|
||||
COMMENT ON COLUMN public.receipts.store_confidence IS 'Confidence score for store detection (0.0-1.0).';
|
||||
|
||||
-- Add OCR provider used
|
||||
ALTER TABLE public.receipts
|
||||
ADD COLUMN IF NOT EXISTS ocr_provider TEXT;
|
||||
COMMENT ON COLUMN public.receipts.ocr_provider IS 'Which OCR service processed this receipt: tesseract, openai, anthropic.';
|
||||
|
||||
-- Add error details for failed processing
|
||||
ALTER TABLE public.receipts
|
||||
ADD COLUMN IF NOT EXISTS error_details JSONB;
|
||||
COMMENT ON COLUMN public.receipts.error_details IS 'Detailed error information if processing failed.';
|
||||
|
||||
-- Add retry count for failed processing
|
||||
ALTER TABLE public.receipts
|
||||
ADD COLUMN IF NOT EXISTS retry_count INTEGER DEFAULT 0;
|
||||
COMMENT ON COLUMN public.receipts.retry_count IS 'Number of processing retry attempts.';
|
||||
|
||||
-- Add extracted text confidence
|
||||
ALTER TABLE public.receipts
|
||||
ADD COLUMN IF NOT EXISTS ocr_confidence NUMERIC(5,4);
|
||||
COMMENT ON COLUMN public.receipts.ocr_confidence IS 'Overall OCR text extraction confidence score.';
|
||||
|
||||
-- Add currency detection
|
||||
ALTER TABLE public.receipts
|
||||
ADD COLUMN IF NOT EXISTS currency TEXT DEFAULT 'CAD';
|
||||
COMMENT ON COLUMN public.receipts.currency IS 'Detected currency: CAD, USD, etc.';
|
||||
|
||||
-- New indexes for receipt processing
|
||||
CREATE INDEX IF NOT EXISTS idx_receipts_status_retry ON public.receipts(status, retry_count)
|
||||
WHERE status IN ('pending', 'failed') AND retry_count < 3;
|
||||
|
||||
|
||||
-- 2. Enhancements to receipt_items table
|
||||
|
||||
-- Add line number from receipt for ordering
|
||||
ALTER TABLE public.receipt_items
|
||||
ADD COLUMN IF NOT EXISTS line_number INTEGER;
|
||||
COMMENT ON COLUMN public.receipt_items.line_number IS 'Original line number on the receipt for display ordering.';
|
||||
|
||||
-- Add match confidence score
|
||||
ALTER TABLE public.receipt_items
|
||||
ADD COLUMN IF NOT EXISTS match_confidence NUMERIC(5,4);
|
||||
COMMENT ON COLUMN public.receipt_items.match_confidence IS 'Confidence score for item matching (0.0-1.0).';
|
||||
|
||||
-- Add is_discount flag for discount/coupon lines
|
||||
ALTER TABLE public.receipt_items
|
||||
ADD COLUMN IF NOT EXISTS is_discount BOOLEAN DEFAULT FALSE;
|
||||
COMMENT ON COLUMN public.receipt_items.is_discount IS 'Whether this line is a discount/coupon (negative price).';
|
||||
|
||||
-- Add unit_price if per-unit pricing detected
|
||||
ALTER TABLE public.receipt_items
|
||||
ADD COLUMN IF NOT EXISTS unit_price_cents INTEGER;
|
||||
COMMENT ON COLUMN public.receipt_items.unit_price_cents IS 'Per-unit price if detected (e.g., price per kg).';
|
||||
|
||||
-- Add unit type if detected
|
||||
ALTER TABLE public.receipt_items
|
||||
ADD COLUMN IF NOT EXISTS unit_type TEXT;
|
||||
COMMENT ON COLUMN public.receipt_items.unit_type IS 'Unit type if detected: kg, lb, each, etc.';
|
||||
|
||||
-- Add added_to_pantry flag
|
||||
ALTER TABLE public.receipt_items
|
||||
ADD COLUMN IF NOT EXISTS added_to_pantry BOOLEAN DEFAULT FALSE;
|
||||
COMMENT ON COLUMN public.receipt_items.added_to_pantry IS 'Whether this item has been added to user pantry.';
|
||||
|
||||
-- Add pantry_item_id link
|
||||
ALTER TABLE public.receipt_items
|
||||
ADD COLUMN IF NOT EXISTS pantry_item_id BIGINT REFERENCES public.pantry_items(pantry_item_id) ON DELETE SET NULL;
|
||||
COMMENT ON COLUMN public.receipt_items.pantry_item_id IS 'Link to pantry_items if this receipt item was added to pantry.';
|
||||
|
||||
-- New indexes for receipt_items
|
||||
CREATE INDEX IF NOT EXISTS idx_receipt_items_status ON public.receipt_items(status);
|
||||
CREATE INDEX IF NOT EXISTS idx_receipt_items_added_to_pantry ON public.receipt_items(receipt_id, added_to_pantry)
|
||||
WHERE added_to_pantry = FALSE;
|
||||
CREATE INDEX IF NOT EXISTS idx_receipt_items_pantry_item_id ON public.receipt_items(pantry_item_id)
|
||||
WHERE pantry_item_id IS NOT NULL;
|
||||
|
||||
|
||||
-- 3. Receipt Processing Log - track OCR/AI processing attempts
|
||||
-- Useful for debugging, monitoring costs, and improving processing
|
||||
CREATE TABLE IF NOT EXISTS public.receipt_processing_log (
|
||||
log_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
||||
receipt_id BIGINT NOT NULL REFERENCES public.receipts(receipt_id) ON DELETE CASCADE,
|
||||
processing_step TEXT NOT NULL,
|
||||
status TEXT NOT NULL,
|
||||
provider TEXT,
|
||||
duration_ms INTEGER,
|
||||
tokens_used INTEGER,
|
||||
cost_cents INTEGER,
|
||||
input_data JSONB,
|
||||
output_data JSONB,
|
||||
error_message TEXT,
|
||||
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||
-- Validate processing step
|
||||
CONSTRAINT receipt_processing_log_step_check CHECK (processing_step IN (
|
||||
'upload', 'ocr_extraction', 'text_parsing', 'store_detection',
|
||||
'item_extraction', 'item_matching', 'price_parsing', 'finalization'
|
||||
)),
|
||||
-- Validate status
|
||||
CONSTRAINT receipt_processing_log_status_check CHECK (status IN ('started', 'completed', 'failed', 'skipped')),
|
||||
-- Validate provider if specified
|
||||
CONSTRAINT receipt_processing_log_provider_check CHECK (provider IS NULL OR provider IN (
|
||||
'tesseract', 'openai', 'anthropic', 'google_vision', 'aws_textract', 'internal'
|
||||
))
|
||||
);
|
||||
COMMENT ON TABLE public.receipt_processing_log IS 'Detailed log of each processing step for receipts, useful for debugging and cost tracking.';
|
||||
COMMENT ON COLUMN public.receipt_processing_log.processing_step IS 'Which processing step this log entry is for.';
|
||||
COMMENT ON COLUMN public.receipt_processing_log.status IS 'Status of this step: started, completed, failed, skipped.';
|
||||
COMMENT ON COLUMN public.receipt_processing_log.provider IS 'External service used: tesseract, openai, anthropic, etc.';
|
||||
COMMENT ON COLUMN public.receipt_processing_log.duration_ms IS 'How long this step took in milliseconds.';
|
||||
COMMENT ON COLUMN public.receipt_processing_log.tokens_used IS 'Number of API tokens used (for LLM providers).';
|
||||
COMMENT ON COLUMN public.receipt_processing_log.cost_cents IS 'Estimated cost in cents for this processing step.';
|
||||
COMMENT ON COLUMN public.receipt_processing_log.input_data IS 'Input data sent to the processing step (for debugging).';
|
||||
COMMENT ON COLUMN public.receipt_processing_log.output_data IS 'Output data received from the processing step.';
|
||||
|
||||
-- Indexes for receipt_processing_log
|
||||
CREATE INDEX IF NOT EXISTS idx_receipt_processing_log_receipt_id ON public.receipt_processing_log(receipt_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_receipt_processing_log_step_status ON public.receipt_processing_log(processing_step, status);
|
||||
CREATE INDEX IF NOT EXISTS idx_receipt_processing_log_created_at ON public.receipt_processing_log(created_at DESC);
|
||||
|
||||
|
||||
-- 4. Store-specific receipt patterns - help identify stores from receipt text
|
||||
CREATE TABLE IF NOT EXISTS public.store_receipt_patterns (
|
||||
pattern_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
||||
store_id BIGINT NOT NULL REFERENCES public.stores(store_id) ON DELETE CASCADE,
|
||||
pattern_type TEXT NOT NULL,
|
||||
pattern_value TEXT NOT NULL,
|
||||
priority INTEGER DEFAULT 0,
|
||||
is_active BOOLEAN DEFAULT TRUE,
|
||||
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||
-- Validate pattern type
|
||||
CONSTRAINT store_receipt_patterns_type_check CHECK (pattern_type IN (
|
||||
'header_regex', 'footer_regex', 'phone_number', 'address_fragment', 'store_number_format'
|
||||
)),
|
||||
-- Validate pattern is not empty
|
||||
CONSTRAINT store_receipt_patterns_value_check CHECK (TRIM(pattern_value) <> ''),
|
||||
-- Unique constraint per store/type/value
|
||||
UNIQUE(store_id, pattern_type, pattern_value)
|
||||
);
|
||||
COMMENT ON TABLE public.store_receipt_patterns IS 'Patterns to help identify stores from receipt text and format.';
|
||||
COMMENT ON COLUMN public.store_receipt_patterns.pattern_type IS 'Type of pattern: header_regex, footer_regex, phone_number, etc.';
|
||||
COMMENT ON COLUMN public.store_receipt_patterns.pattern_value IS 'The actual pattern (regex or literal text).';
|
||||
COMMENT ON COLUMN public.store_receipt_patterns.priority IS 'Higher priority patterns are checked first.';
|
||||
COMMENT ON COLUMN public.store_receipt_patterns.is_active IS 'Whether this pattern is currently in use.';
|
||||
|
||||
-- Indexes for store_receipt_patterns
|
||||
CREATE INDEX IF NOT EXISTS idx_store_receipt_patterns_store_id ON public.store_receipt_patterns(store_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_store_receipt_patterns_active ON public.store_receipt_patterns(pattern_type, is_active, priority DESC)
|
||||
WHERE is_active = TRUE;
|
||||
@@ -8,8 +8,8 @@ import * as apiClient from '../services/apiClient';
|
||||
import { useModal } from '../hooks/useModal';
|
||||
import { renderWithProviders } from '../tests/utils/renderWithProviders';
|
||||
|
||||
// Mock dependencies
|
||||
// The apiClient is mocked globally in `src/tests/setup/globalApiMock.ts`.
|
||||
// Must explicitly call vi.mock() for apiClient
|
||||
vi.mock('../services/apiClient');
|
||||
vi.mock('../hooks/useAppInitialization');
|
||||
vi.mock('../hooks/useModal');
|
||||
vi.mock('./WhatsNewModal', () => ({
|
||||
|
||||
@@ -22,7 +22,9 @@ describe('ConfirmationModal (in components)', () => {
|
||||
});
|
||||
|
||||
it('should not render when isOpen is false', () => {
|
||||
const { container } = renderWithProviders(<ConfirmationModal {...defaultProps} isOpen={false} />);
|
||||
const { container } = renderWithProviders(
|
||||
<ConfirmationModal {...defaultProps} isOpen={false} />,
|
||||
);
|
||||
expect(container.firstChild).toBeNull();
|
||||
});
|
||||
|
||||
|
||||
@@ -64,4 +64,4 @@ describe('Dashboard Component', () => {
|
||||
expect(gridContainer).toHaveClass('lg:grid-cols-3');
|
||||
expect(gridContainer).toHaveClass('gap-6');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -7,7 +7,7 @@ export const Dashboard: React.FC = () => {
|
||||
return (
|
||||
<div className="max-w-7xl mx-auto px-4 sm:px-6 lg:px-8 py-8">
|
||||
<h1 className="text-2xl font-bold text-gray-900 dark:text-white mb-6">Dashboard</h1>
|
||||
|
||||
|
||||
<div className="grid grid-cols-1 lg:grid-cols-3 gap-6">
|
||||
{/* Main Content Area */}
|
||||
<div className="lg:col-span-2 space-y-6">
|
||||
@@ -30,4 +30,4 @@ export const Dashboard: React.FC = () => {
|
||||
);
|
||||
};
|
||||
|
||||
export default Dashboard;
|
||||
export default Dashboard;
|
||||
|
||||
152
src/components/ErrorBoundary.tsx
Normal file
152
src/components/ErrorBoundary.tsx
Normal file
@@ -0,0 +1,152 @@
|
||||
// src/components/ErrorBoundary.tsx
|
||||
/**
|
||||
* React Error Boundary with Sentry integration.
|
||||
* Implements ADR-015: Application Performance Monitoring and Error Tracking.
|
||||
*
|
||||
* This component catches JavaScript errors anywhere in the child component tree,
|
||||
* logs them to Sentry/Bugsink, and displays a fallback UI instead of crashing.
|
||||
*/
|
||||
import { Component, ReactNode } from 'react';
|
||||
import { Sentry, captureException, isSentryConfigured } from '../services/sentry.client';
|
||||
|
||||
interface ErrorBoundaryProps {
|
||||
/** Child components to render */
|
||||
children: ReactNode;
|
||||
/** Optional custom fallback UI. If not provided, uses default error message. */
|
||||
fallback?: ReactNode;
|
||||
/** Optional callback when an error is caught */
|
||||
onError?: (error: Error, errorInfo: React.ErrorInfo) => void;
|
||||
}
|
||||
|
||||
interface ErrorBoundaryState {
|
||||
hasError: boolean;
|
||||
error: Error | null;
|
||||
eventId: string | null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Error Boundary component that catches React component errors
|
||||
* and reports them to Sentry/Bugsink.
|
||||
*
|
||||
* @example
|
||||
* ```tsx
|
||||
* <ErrorBoundary fallback={<p>Something went wrong.</p>}>
|
||||
* <MyComponent />
|
||||
* </ErrorBoundary>
|
||||
* ```
|
||||
*/
|
||||
export class ErrorBoundary extends Component<ErrorBoundaryProps, ErrorBoundaryState> {
|
||||
constructor(props: ErrorBoundaryProps) {
|
||||
super(props);
|
||||
this.state = {
|
||||
hasError: false,
|
||||
error: null,
|
||||
eventId: null,
|
||||
};
|
||||
}
|
||||
|
||||
static getDerivedStateFromError(error: Error): Partial<ErrorBoundaryState> {
|
||||
return { hasError: true, error };
|
||||
}
|
||||
|
||||
componentDidCatch(error: Error, errorInfo: React.ErrorInfo): void {
|
||||
// Log to console in development
|
||||
console.error('ErrorBoundary caught an error:', error, errorInfo);
|
||||
|
||||
// Report to Sentry with component stack
|
||||
const eventId = captureException(error, {
|
||||
componentStack: errorInfo.componentStack,
|
||||
});
|
||||
|
||||
this.setState({ eventId: eventId ?? null });
|
||||
|
||||
// Call optional onError callback
|
||||
this.props.onError?.(error, errorInfo);
|
||||
}
|
||||
|
||||
handleReload = (): void => {
|
||||
window.location.reload();
|
||||
};
|
||||
|
||||
handleReportFeedback = (): void => {
|
||||
if (isSentryConfigured && this.state.eventId) {
|
||||
// Open Sentry feedback dialog if available
|
||||
Sentry.showReportDialog({ eventId: this.state.eventId });
|
||||
}
|
||||
};
|
||||
|
||||
render(): ReactNode {
|
||||
if (this.state.hasError) {
|
||||
// Custom fallback UI if provided
|
||||
if (this.props.fallback) {
|
||||
return this.props.fallback;
|
||||
}
|
||||
|
||||
// Default fallback UI
|
||||
return (
|
||||
<div className="flex min-h-screen items-center justify-center bg-gray-50 dark:bg-gray-900 p-4">
|
||||
<div className="max-w-md w-full bg-white dark:bg-gray-800 rounded-lg shadow-lg p-6 text-center">
|
||||
<div className="text-red-500 dark:text-red-400 mb-4">
|
||||
<svg
|
||||
className="w-16 h-16 mx-auto"
|
||||
fill="none"
|
||||
stroke="currentColor"
|
||||
viewBox="0 0 24 24"
|
||||
aria-hidden="true"
|
||||
>
|
||||
<path
|
||||
strokeLinecap="round"
|
||||
strokeLinejoin="round"
|
||||
strokeWidth={2}
|
||||
d="M12 9v2m0 4h.01m-6.938 4h13.856c1.54 0 2.502-1.667 1.732-3L13.732 4c-.77-1.333-2.694-1.333-3.464 0L3.34 16c-.77 1.333.192 3 1.732 3z"
|
||||
/>
|
||||
</svg>
|
||||
</div>
|
||||
<h1 className="text-xl font-semibold text-gray-900 dark:text-white mb-2">
|
||||
Something went wrong
|
||||
</h1>
|
||||
<p className="text-gray-600 dark:text-gray-400 mb-6">
|
||||
We're sorry, but an unexpected error occurred. Our team has been notified.
|
||||
</p>
|
||||
<div className="flex flex-col sm:flex-row gap-3 justify-center">
|
||||
<button
|
||||
onClick={this.handleReload}
|
||||
className="px-4 py-2 bg-blue-600 text-white rounded-md hover:bg-blue-700 transition-colors"
|
||||
>
|
||||
Reload Page
|
||||
</button>
|
||||
{isSentryConfigured && this.state.eventId && (
|
||||
<button
|
||||
onClick={this.handleReportFeedback}
|
||||
className="px-4 py-2 bg-gray-200 dark:bg-gray-700 text-gray-800 dark:text-gray-200 rounded-md hover:bg-gray-300 dark:hover:bg-gray-600 transition-colors"
|
||||
>
|
||||
Report Feedback
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
{this.state.error && process.env.NODE_ENV === 'development' && (
|
||||
<details className="mt-6 text-left">
|
||||
<summary className="cursor-pointer text-sm text-gray-500 dark:text-gray-400">
|
||||
Error Details (Development Only)
|
||||
</summary>
|
||||
<pre className="mt-2 p-3 bg-gray-100 dark:bg-gray-900 rounded text-xs overflow-auto max-h-48 text-red-600 dark:text-red-400">
|
||||
{this.state.error.message}
|
||||
{'\n\n'}
|
||||
{this.state.error.stack}
|
||||
</pre>
|
||||
</details>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
return this.props.children;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Pre-configured Sentry ErrorBoundary from @sentry/react.
|
||||
* Use this for simpler integration when you don't need custom UI.
|
||||
*/
|
||||
export const SentryErrorBoundary = Sentry.ErrorBoundary;
|
||||
@@ -48,7 +48,9 @@ describe('FlyerCorrectionTool', () => {
|
||||
});
|
||||
|
||||
it('should not render when isOpen is false', () => {
|
||||
const { container } = renderWithProviders(<FlyerCorrectionTool {...defaultProps} isOpen={false} />);
|
||||
const { container } = renderWithProviders(
|
||||
<FlyerCorrectionTool {...defaultProps} isOpen={false} />,
|
||||
);
|
||||
expect(container.firstChild).toBeNull();
|
||||
});
|
||||
|
||||
@@ -302,4 +304,45 @@ describe('FlyerCorrectionTool', () => {
|
||||
|
||||
expect(clearRectSpy).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should call rescanImageArea with "dates" type when Extract Sale Dates is clicked', async () => {
|
||||
mockedAiApiClient.rescanImageArea.mockResolvedValue(
|
||||
new Response(JSON.stringify({ text: 'Jan 1 - Jan 7' })),
|
||||
);
|
||||
|
||||
renderWithProviders(<FlyerCorrectionTool {...defaultProps} />);
|
||||
|
||||
// Wait for image fetch to complete
|
||||
await waitFor(() => expect(global.fetch).toHaveBeenCalledWith(defaultProps.imageUrl));
|
||||
|
||||
const canvas = screen.getByRole('dialog').querySelector('canvas')!;
|
||||
const image = screen.getByAltText('Flyer for correction');
|
||||
|
||||
// Mock image dimensions
|
||||
Object.defineProperty(image, 'naturalWidth', { value: 1000, configurable: true });
|
||||
Object.defineProperty(image, 'naturalHeight', { value: 800, configurable: true });
|
||||
Object.defineProperty(image, 'clientWidth', { value: 500, configurable: true });
|
||||
Object.defineProperty(image, 'clientHeight', { value: 400, configurable: true });
|
||||
|
||||
// Draw a selection
|
||||
fireEvent.mouseDown(canvas, { clientX: 10, clientY: 10 });
|
||||
fireEvent.mouseMove(canvas, { clientX: 60, clientY: 30 });
|
||||
fireEvent.mouseUp(canvas);
|
||||
|
||||
// Click the "Extract Sale Dates" button instead of "Extract Store Name"
|
||||
fireEvent.click(screen.getByRole('button', { name: /extract sale dates/i }));
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockedAiApiClient.rescanImageArea).toHaveBeenCalledWith(
|
||||
expect.any(File),
|
||||
expect.objectContaining({ x: 20, y: 20, width: 100, height: 40 }),
|
||||
'dates', // This is the key difference - testing the 'dates' extraction type
|
||||
);
|
||||
});
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockedNotifySuccess).toHaveBeenCalledWith('Extracted: Jan 1 - Jan 7');
|
||||
expect(defaultProps.onDataExtracted).toHaveBeenCalledWith('dates', 'Jan 1 - Jan 7');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -27,10 +27,4 @@ describe('Footer', () => {
|
||||
// Assert: Check that the rendered text includes the mocked year
|
||||
expect(screen.getByText('Copyright 2025-2025')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should display the correct year when it changes', () => {
|
||||
vi.setSystemTime(new Date('2030-01-01T00:00:00Z'));
|
||||
renderWithProviders(<Footer />);
|
||||
expect(screen.getByText('Copyright 2025-2030')).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -8,8 +8,9 @@ import { LeaderboardUser } from '../types';
|
||||
import { createMockLeaderboardUser } from '../tests/utils/mockFactories';
|
||||
import { renderWithProviders } from '../tests/utils/renderWithProviders';
|
||||
|
||||
// The apiClient and logger are mocked globally.
|
||||
// We can get a typed reference to the apiClient for individual test overrides.
|
||||
// Must explicitly call vi.mock() for apiClient
|
||||
vi.mock('../services/apiClient');
|
||||
|
||||
const mockedApiClient = vi.mocked(apiClient);
|
||||
|
||||
// Mock lucide-react icons to prevent rendering errors in the test environment
|
||||
@@ -50,18 +51,19 @@ describe('Leaderboard', () => {
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByRole('alert')).toBeInTheDocument();
|
||||
expect(screen.getByText('Error: Failed to fetch leaderboard data.')).toBeInTheDocument();
|
||||
// The query hook throws an error with the status code when JSON parsing fails
|
||||
expect(screen.getByText('Error: Request failed with status 500')).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
it('should display a generic error for unknown error types', async () => {
|
||||
const unknownError = 'A string error';
|
||||
mockedApiClient.fetchLeaderboard.mockRejectedValue(unknownError);
|
||||
// Use an actual Error object since the component displays error.message
|
||||
mockedApiClient.fetchLeaderboard.mockRejectedValue(new Error('A string error'));
|
||||
renderWithProviders(<Leaderboard />);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByRole('alert')).toBeInTheDocument();
|
||||
expect(screen.getByText('Error: An unknown error occurred.')).toBeInTheDocument();
|
||||
expect(screen.getByText('Error: A string error')).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
@@ -1,36 +1,15 @@
|
||||
// src/components/Leaderboard.tsx
|
||||
import React, { useState, useEffect } from 'react';
|
||||
import * as apiClient from '../services/apiClient';
|
||||
import { LeaderboardUser } from '../types';
|
||||
import { logger } from '../services/logger.client';
|
||||
import React from 'react';
|
||||
import { useLeaderboardQuery } from '../hooks/queries/useLeaderboardQuery';
|
||||
import { Award, Crown, ShieldAlert } from 'lucide-react';
|
||||
|
||||
/**
|
||||
* Leaderboard component displaying top users by points.
|
||||
*
|
||||
* Refactored to use TanStack Query (ADR-0005 Phase 8).
|
||||
*/
|
||||
export const Leaderboard: React.FC = () => {
|
||||
const [leaderboard, setLeaderboard] = useState<LeaderboardUser[]>([]);
|
||||
const [isLoading, setIsLoading] = useState(true);
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
|
||||
useEffect(() => {
|
||||
const loadLeaderboard = async () => {
|
||||
setIsLoading(true);
|
||||
try {
|
||||
const response = await apiClient.fetchLeaderboard(10); // Fetch top 10 users
|
||||
if (!response.ok) {
|
||||
throw new Error('Failed to fetch leaderboard data.');
|
||||
}
|
||||
const data: LeaderboardUser[] = await response.json();
|
||||
setLeaderboard(data);
|
||||
} catch (err) {
|
||||
const errorMessage = err instanceof Error ? err.message : 'An unknown error occurred.';
|
||||
logger.error('Error fetching leaderboard:', { error: err });
|
||||
setError(errorMessage);
|
||||
} finally {
|
||||
setIsLoading(false);
|
||||
}
|
||||
};
|
||||
|
||||
loadLeaderboard();
|
||||
}, []);
|
||||
const { data: leaderboard = [], isLoading, error } = useLeaderboardQuery(10);
|
||||
|
||||
const getRankIcon = (rank: string) => {
|
||||
switch (rank) {
|
||||
@@ -57,7 +36,7 @@ export const Leaderboard: React.FC = () => {
|
||||
>
|
||||
<div className="flex items-center">
|
||||
<ShieldAlert className="h-6 w-6 mr-3" />
|
||||
<p className="font-bold">Error: {error}</p>
|
||||
<p className="font-bold">Error: {error.message}</p>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
|
||||
@@ -11,7 +11,10 @@ vi.mock('zxcvbn');
|
||||
|
||||
describe('PasswordStrengthIndicator', () => {
|
||||
it('should render 5 gray bars when no password is provided', () => {
|
||||
(zxcvbn as Mock).mockReturnValue({ score: -1, feedback: { warning: '', suggestions: [] } });
|
||||
(zxcvbn as Mock).mockReturnValue({
|
||||
score: -1,
|
||||
feedback: { warning: '', suggestions: [] },
|
||||
});
|
||||
const { container } = renderWithProviders(<PasswordStrengthIndicator password="" />);
|
||||
const bars = container.querySelectorAll('.h-1\\.5');
|
||||
expect(bars).toHaveLength(5);
|
||||
@@ -28,8 +31,13 @@ describe('PasswordStrengthIndicator', () => {
|
||||
{ score: 3, label: 'Good', color: 'bg-yellow-500', bars: 4 },
|
||||
{ score: 4, label: 'Strong', color: 'bg-green-500', bars: 5 },
|
||||
])('should render correctly for score $score ($label)', ({ score, label, color, bars }) => {
|
||||
(zxcvbn as Mock).mockReturnValue({ score, feedback: { warning: '', suggestions: [] } });
|
||||
const { container } = renderWithProviders(<PasswordStrengthIndicator password="some-password" />);
|
||||
(zxcvbn as Mock).mockReturnValue({
|
||||
score,
|
||||
feedback: { warning: '', suggestions: [] },
|
||||
});
|
||||
const { container } = renderWithProviders(
|
||||
<PasswordStrengthIndicator password="some-password" />,
|
||||
);
|
||||
|
||||
// Check the label
|
||||
expect(screen.getByText(label)).toBeInTheDocument();
|
||||
@@ -82,7 +90,10 @@ describe('PasswordStrengthIndicator', () => {
|
||||
});
|
||||
|
||||
it('should use default empty string if password prop is undefined', () => {
|
||||
(zxcvbn as Mock).mockReturnValue({ score: 0, feedback: { warning: '', suggestions: [] } });
|
||||
(zxcvbn as Mock).mockReturnValue({
|
||||
score: 0,
|
||||
feedback: { warning: '', suggestions: [] },
|
||||
});
|
||||
const { container } = renderWithProviders(<PasswordStrengthIndicator />);
|
||||
const bars = container.querySelectorAll('.h-1\\.5');
|
||||
expect(bars).toHaveLength(5);
|
||||
@@ -94,7 +105,10 @@ describe('PasswordStrengthIndicator', () => {
|
||||
|
||||
it('should handle out-of-range scores gracefully (defensive)', () => {
|
||||
// Mock a score that isn't 0-4 to hit default switch cases
|
||||
(zxcvbn as Mock).mockReturnValue({ score: 99, feedback: { warning: '', suggestions: [] } });
|
||||
(zxcvbn as Mock).mockReturnValue({
|
||||
score: 99,
|
||||
feedback: { warning: '', suggestions: [] },
|
||||
});
|
||||
const { container } = renderWithProviders(<PasswordStrengthIndicator password="test" />);
|
||||
|
||||
// Check bars - should hit default case in getBarColor which returns gray
|
||||
|
||||
@@ -8,8 +8,9 @@ import { logger } from '../services/logger.client';
|
||||
import { renderWithProviders } from '../tests/utils/renderWithProviders';
|
||||
import '@testing-library/jest-dom';
|
||||
|
||||
// The apiClient is mocked globally in `src/tests/setup/globalApiMock.ts`.
|
||||
// We can get a typed reference to it for individual test overrides.
|
||||
// Must explicitly call vi.mock() for apiClient
|
||||
vi.mock('../services/apiClient');
|
||||
|
||||
const mockedApiClient = vi.mocked(apiClient);
|
||||
|
||||
describe('RecipeSuggester Component', () => {
|
||||
@@ -54,7 +55,10 @@ describe('RecipeSuggester Component', () => {
|
||||
// Add a delay to ensure the loading state is visible during the test
|
||||
mockedApiClient.suggestRecipe.mockImplementation(async () => {
|
||||
await new Promise((resolve) => setTimeout(resolve, 50));
|
||||
return { ok: true, json: async () => ({ suggestion: mockSuggestion }) } as Response;
|
||||
return {
|
||||
ok: true,
|
||||
json: async () => ({ suggestion: mockSuggestion }),
|
||||
} as Response;
|
||||
});
|
||||
|
||||
const button = screen.getByRole('button', { name: /Suggest a Recipe/i });
|
||||
@@ -120,7 +124,7 @@ describe('RecipeSuggester Component', () => {
|
||||
|
||||
expect(logger.error).toHaveBeenCalledWith(
|
||||
{ error: networkError },
|
||||
'Failed to fetch recipe suggestion.'
|
||||
'Failed to fetch recipe suggestion.',
|
||||
);
|
||||
console.log('TEST: Network error caught and logged');
|
||||
});
|
||||
@@ -196,7 +200,7 @@ describe('RecipeSuggester Component', () => {
|
||||
|
||||
expect(logger.error).toHaveBeenCalledWith(
|
||||
{ error: 'Something weird happened' },
|
||||
'Failed to fetch recipe suggestion.'
|
||||
'Failed to fetch recipe suggestion.',
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -9,45 +9,60 @@ export const RecipeSuggester: React.FC = () => {
|
||||
const [isLoading, setIsLoading] = useState<boolean>(false);
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
|
||||
const handleSubmit = useCallback(async (event: React.FormEvent<HTMLFormElement>) => {
|
||||
event.preventDefault();
|
||||
setIsLoading(true);
|
||||
setError(null);
|
||||
setSuggestion(null);
|
||||
const handleSubmit = useCallback(
|
||||
async (event: React.FormEvent<HTMLFormElement>) => {
|
||||
event.preventDefault();
|
||||
setIsLoading(true);
|
||||
setError(null);
|
||||
setSuggestion(null);
|
||||
|
||||
const ingredientList = ingredients.split(',').map(item => item.trim()).filter(Boolean);
|
||||
const ingredientList = ingredients
|
||||
.split(',')
|
||||
.map((item) => item.trim())
|
||||
.filter(Boolean);
|
||||
|
||||
if (ingredientList.length === 0) {
|
||||
setError('Please enter at least one ingredient.');
|
||||
setIsLoading(false);
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await suggestRecipe(ingredientList);
|
||||
const data = await response.json();
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(data.message || 'Failed to get suggestion.');
|
||||
if (ingredientList.length === 0) {
|
||||
setError('Please enter at least one ingredient.');
|
||||
setIsLoading(false);
|
||||
return;
|
||||
}
|
||||
|
||||
setSuggestion(data.suggestion);
|
||||
} catch (err) {
|
||||
const errorMessage = err instanceof Error ? err.message : 'An unknown error occurred.';
|
||||
logger.error({ error: err }, 'Failed to fetch recipe suggestion.');
|
||||
setError(errorMessage);
|
||||
} finally {
|
||||
setIsLoading(false);
|
||||
}
|
||||
}, [ingredients]);
|
||||
try {
|
||||
const response = await suggestRecipe(ingredientList);
|
||||
const data = await response.json();
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(data.message || 'Failed to get suggestion.');
|
||||
}
|
||||
|
||||
setSuggestion(data.suggestion);
|
||||
} catch (err) {
|
||||
const errorMessage = err instanceof Error ? err.message : 'An unknown error occurred.';
|
||||
logger.error({ error: err }, 'Failed to fetch recipe suggestion.');
|
||||
setError(errorMessage);
|
||||
} finally {
|
||||
setIsLoading(false);
|
||||
}
|
||||
},
|
||||
[ingredients],
|
||||
);
|
||||
|
||||
return (
|
||||
<div className="bg-white dark:bg-gray-800 shadow rounded-lg p-6">
|
||||
<h2 className="text-xl font-semibold text-gray-900 dark:text-white mb-2">Get a Recipe Suggestion</h2>
|
||||
<p className="text-gray-600 dark:text-gray-400 mb-4">Enter some ingredients you have, separated by commas.</p>
|
||||
<h2 className="text-xl font-semibold text-gray-900 dark:text-white mb-2">
|
||||
Get a Recipe Suggestion
|
||||
</h2>
|
||||
<p className="text-gray-600 dark:text-gray-400 mb-4">
|
||||
Enter some ingredients you have, separated by commas.
|
||||
</p>
|
||||
<form onSubmit={handleSubmit}>
|
||||
<div className="mb-4">
|
||||
<label htmlFor="ingredients-input" className="block text-sm font-medium text-gray-700 dark:text-gray-300 mb-1">Ingredients:</label>
|
||||
<label
|
||||
htmlFor="ingredients-input"
|
||||
className="block text-sm font-medium text-gray-700 dark:text-gray-300 mb-1"
|
||||
>
|
||||
Ingredients:
|
||||
</label>
|
||||
<input
|
||||
id="ingredients-input"
|
||||
type="text"
|
||||
@@ -58,23 +73,31 @@ export const RecipeSuggester: React.FC = () => {
|
||||
className="block w-full rounded-md border-gray-300 shadow-sm focus:border-blue-500 focus:ring-blue-500 dark:bg-gray-700 dark:border-gray-600 dark:text-white sm:text-sm p-2 border"
|
||||
/>
|
||||
</div>
|
||||
<button type="submit" disabled={isLoading} className="w-full flex justify-center py-2 px-4 border border-transparent rounded-md shadow-sm text-sm font-medium text-white bg-blue-600 hover:bg-blue-700 focus:outline-none focus:ring-2 focus:ring-offset-2 focus:ring-blue-500 disabled:opacity-50 transition-colors">
|
||||
<button
|
||||
type="submit"
|
||||
disabled={isLoading}
|
||||
className="w-full flex justify-center py-2 px-4 border border-transparent rounded-md shadow-sm text-sm font-medium text-white bg-blue-600 hover:bg-blue-700 focus:outline-none focus:ring-2 focus:ring-offset-2 focus:ring-blue-500 disabled:opacity-50 transition-colors"
|
||||
>
|
||||
{isLoading ? 'Getting suggestion...' : 'Suggest a Recipe'}
|
||||
</button>
|
||||
</form>
|
||||
|
||||
{error && (
|
||||
<div className="mt-4 p-4 bg-red-50 dark:bg-red-900/50 text-red-700 dark:text-red-200 rounded-md text-sm">{error}</div>
|
||||
<div className="mt-4 p-4 bg-red-50 dark:bg-red-900/50 text-red-700 dark:text-red-200 rounded-md text-sm">
|
||||
{error}
|
||||
</div>
|
||||
)}
|
||||
|
||||
{suggestion && (
|
||||
<div className="mt-6 bg-gray-50 dark:bg-gray-700/50 rounded-lg p-4 border border-gray-200 dark:border-gray-600">
|
||||
<div className="prose dark:prose-invert max-w-none">
|
||||
<h5 className="text-lg font-medium text-gray-900 dark:text-white mb-2">Recipe Suggestion</h5>
|
||||
<h5 className="text-lg font-medium text-gray-900 dark:text-white mb-2">
|
||||
Recipe Suggestion
|
||||
</h5>
|
||||
<p className="text-gray-700 dark:text-gray-300 whitespace-pre-wrap">{suggestion}</p>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
};
|
||||
};
|
||||
|
||||
@@ -19,7 +19,9 @@ export const StatCard: React.FC<StatCardProps> = ({ title, value, icon }) => {
|
||||
</div>
|
||||
<div className="ml-5 w-0 flex-1">
|
||||
<dl>
|
||||
<dt className="text-sm font-medium text-gray-500 dark:text-gray-400 truncate">{title}</dt>
|
||||
<dt className="text-sm font-medium text-gray-500 dark:text-gray-400 truncate">
|
||||
{title}
|
||||
</dt>
|
||||
<dd>
|
||||
<div className="text-lg font-medium text-gray-900 dark:text-white">{value}</div>
|
||||
</dd>
|
||||
@@ -29,4 +31,4 @@ export const StatCard: React.FC<StatCardProps> = ({ title, value, icon }) => {
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
};
|
||||
|
||||
@@ -15,4 +15,4 @@ export const DocumentMagnifyingGlassIcon: React.FC<React.SVGProps<SVGSVGElement>
|
||||
d="M19.5 14.25v-2.625a3.375 3.375 0 0 0-3.375-3.375h-1.5A1.125 1.125 0 0 1 13.5 7.125v-1.5a3.375 3.375 0 0 0-3.375-3.375H8.25m5.231 13.481L15 17.25m-4.5 4.5L6.75 21.75m0 0L2.25 17.25m4.5 4.5v-4.5m13.5-3V9A2.25 2.25 0 0 0 16.5 6.75h-9A2.25 2.25 0 0 0 5.25 9v9.75m14.25-10.5a2.25 2.25 0 0 0-2.25-2.25H5.25a2.25 2.25 0 0 0-2.25 2.25v10.5a2.25 2.25 0 0 0 2.25 225h5.25"
|
||||
/>
|
||||
</svg>
|
||||
);
|
||||
);
|
||||
|
||||
@@ -14,6 +14,16 @@ const config = {
|
||||
google: {
|
||||
mapsEmbedApiKey: import.meta.env.VITE_GOOGLE_MAPS_EMBED_API_KEY,
|
||||
},
|
||||
/**
|
||||
* Sentry/Bugsink error tracking configuration (ADR-015).
|
||||
* Uses VITE_ prefix for client-side environment variables.
|
||||
*/
|
||||
sentry: {
|
||||
dsn: import.meta.env.VITE_SENTRY_DSN,
|
||||
environment: import.meta.env.VITE_SENTRY_ENVIRONMENT || import.meta.env.MODE,
|
||||
debug: import.meta.env.VITE_SENTRY_DEBUG === 'true',
|
||||
enabled: import.meta.env.VITE_SENTRY_ENABLED !== 'false',
|
||||
},
|
||||
};
|
||||
|
||||
export default config;
|
||||
|
||||
432
src/config/env.test.ts
Normal file
432
src/config/env.test.ts
Normal file
@@ -0,0 +1,432 @@
|
||||
// src/config/env.test.ts
|
||||
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
|
||||
|
||||
describe('env config', () => {
|
||||
const originalEnv = process.env;
|
||||
|
||||
beforeEach(() => {
|
||||
vi.resetModules();
|
||||
process.env = { ...originalEnv };
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
process.env = originalEnv;
|
||||
});
|
||||
|
||||
/**
|
||||
* Sets up minimal valid environment variables for config parsing.
|
||||
*/
|
||||
function setValidEnv(overrides: Record<string, string> = {}) {
|
||||
process.env = {
|
||||
NODE_ENV: 'test',
|
||||
// Database (required)
|
||||
DB_HOST: 'localhost',
|
||||
DB_PORT: '5432',
|
||||
DB_USER: 'testuser',
|
||||
DB_PASSWORD: 'testpass',
|
||||
DB_NAME: 'testdb',
|
||||
// Redis (required)
|
||||
REDIS_URL: 'redis://localhost:6379',
|
||||
// Auth (required - min 32 chars)
|
||||
JWT_SECRET: 'this-is-a-test-secret-that-is-at-least-32-characters-long',
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
describe('successful config parsing', () => {
|
||||
it('should parse valid configuration with all required fields', async () => {
|
||||
setValidEnv();
|
||||
|
||||
const { config } = await import('./env');
|
||||
|
||||
expect(config.database.host).toBe('localhost');
|
||||
expect(config.database.port).toBe(5432);
|
||||
expect(config.database.user).toBe('testuser');
|
||||
expect(config.database.password).toBe('testpass');
|
||||
expect(config.database.name).toBe('testdb');
|
||||
expect(config.redis.url).toBe('redis://localhost:6379');
|
||||
expect(config.auth.jwtSecret).toBe(
|
||||
'this-is-a-test-secret-that-is-at-least-32-characters-long',
|
||||
);
|
||||
});
|
||||
|
||||
it('should use default values for optional fields', async () => {
|
||||
setValidEnv();
|
||||
|
||||
const { config } = await import('./env');
|
||||
|
||||
// Worker defaults
|
||||
expect(config.worker.concurrency).toBe(1);
|
||||
expect(config.worker.lockDuration).toBe(30000);
|
||||
expect(config.worker.emailConcurrency).toBe(10);
|
||||
expect(config.worker.analyticsConcurrency).toBe(1);
|
||||
expect(config.worker.cleanupConcurrency).toBe(10);
|
||||
expect(config.worker.weeklyAnalyticsConcurrency).toBe(1);
|
||||
|
||||
// Server defaults
|
||||
expect(config.server.port).toBe(3001);
|
||||
expect(config.server.nodeEnv).toBe('test');
|
||||
expect(config.server.storagePath).toBe('/var/www/flyer-crawler.projectium.com/flyer-images');
|
||||
|
||||
// AI defaults
|
||||
expect(config.ai.geminiRpm).toBe(5);
|
||||
expect(config.ai.priceQualityThreshold).toBe(0.5);
|
||||
|
||||
// SMTP defaults
|
||||
expect(config.smtp.port).toBe(587);
|
||||
expect(config.smtp.secure).toBe(false);
|
||||
});
|
||||
|
||||
it('should parse custom port values', async () => {
|
||||
setValidEnv({
|
||||
DB_PORT: '5433',
|
||||
PORT: '4000',
|
||||
SMTP_PORT: '465',
|
||||
});
|
||||
|
||||
const { config } = await import('./env');
|
||||
|
||||
expect(config.database.port).toBe(5433);
|
||||
expect(config.server.port).toBe(4000);
|
||||
expect(config.smtp.port).toBe(465);
|
||||
});
|
||||
|
||||
it('should parse boolean SMTP_SECURE correctly', async () => {
|
||||
setValidEnv({
|
||||
SMTP_SECURE: 'true',
|
||||
});
|
||||
|
||||
const { config } = await import('./env');
|
||||
|
||||
expect(config.smtp.secure).toBe(true);
|
||||
});
|
||||
|
||||
it('should parse false for SMTP_SECURE when set to false', async () => {
|
||||
setValidEnv({
|
||||
SMTP_SECURE: 'false',
|
||||
});
|
||||
|
||||
const { config } = await import('./env');
|
||||
|
||||
expect(config.smtp.secure).toBe(false);
|
||||
});
|
||||
|
||||
it('should parse worker concurrency values', async () => {
|
||||
setValidEnv({
|
||||
WORKER_CONCURRENCY: '5',
|
||||
WORKER_LOCK_DURATION: '60000',
|
||||
EMAIL_WORKER_CONCURRENCY: '20',
|
||||
ANALYTICS_WORKER_CONCURRENCY: '3',
|
||||
CLEANUP_WORKER_CONCURRENCY: '15',
|
||||
WEEKLY_ANALYTICS_WORKER_CONCURRENCY: '2',
|
||||
});
|
||||
|
||||
const { config } = await import('./env');
|
||||
|
||||
expect(config.worker.concurrency).toBe(5);
|
||||
expect(config.worker.lockDuration).toBe(60000);
|
||||
expect(config.worker.emailConcurrency).toBe(20);
|
||||
expect(config.worker.analyticsConcurrency).toBe(3);
|
||||
expect(config.worker.cleanupConcurrency).toBe(15);
|
||||
expect(config.worker.weeklyAnalyticsConcurrency).toBe(2);
|
||||
});
|
||||
|
||||
it('should parse AI configuration values', async () => {
|
||||
setValidEnv({
|
||||
GEMINI_API_KEY: 'test-gemini-key',
|
||||
GEMINI_RPM: '10',
|
||||
AI_PRICE_QUALITY_THRESHOLD: '0.75',
|
||||
});
|
||||
|
||||
const { config } = await import('./env');
|
||||
|
||||
expect(config.ai.geminiApiKey).toBe('test-gemini-key');
|
||||
expect(config.ai.geminiRpm).toBe(10);
|
||||
expect(config.ai.priceQualityThreshold).toBe(0.75);
|
||||
});
|
||||
|
||||
it('should parse Google configuration values', async () => {
|
||||
setValidEnv({
|
||||
GOOGLE_MAPS_API_KEY: 'test-maps-key',
|
||||
GOOGLE_CLIENT_ID: 'test-client-id',
|
||||
GOOGLE_CLIENT_SECRET: 'test-client-secret',
|
||||
});
|
||||
|
||||
const { config } = await import('./env');
|
||||
|
||||
expect(config.google.mapsApiKey).toBe('test-maps-key');
|
||||
expect(config.google.clientId).toBe('test-client-id');
|
||||
expect(config.google.clientSecret).toBe('test-client-secret');
|
||||
});
|
||||
|
||||
it('should parse optional SMTP configuration', async () => {
|
||||
setValidEnv({
|
||||
SMTP_HOST: 'smtp.example.com',
|
||||
SMTP_USER: 'smtp-user',
|
||||
SMTP_PASS: 'smtp-pass',
|
||||
SMTP_FROM_EMAIL: 'noreply@example.com',
|
||||
});
|
||||
|
||||
const { config } = await import('./env');
|
||||
|
||||
expect(config.smtp.host).toBe('smtp.example.com');
|
||||
expect(config.smtp.user).toBe('smtp-user');
|
||||
expect(config.smtp.pass).toBe('smtp-pass');
|
||||
expect(config.smtp.fromEmail).toBe('noreply@example.com');
|
||||
});
|
||||
|
||||
it('should parse optional JWT_SECRET_PREVIOUS for rotation', async () => {
|
||||
setValidEnv({
|
||||
JWT_SECRET_PREVIOUS: 'old-secret-that-is-at-least-32-characters-long',
|
||||
});
|
||||
|
||||
const { config } = await import('./env');
|
||||
|
||||
expect(config.auth.jwtSecretPrevious).toBe('old-secret-that-is-at-least-32-characters-long');
|
||||
});
|
||||
|
||||
it('should handle empty string values as undefined for optional int fields', async () => {
|
||||
setValidEnv({
|
||||
GEMINI_RPM: '',
|
||||
AI_PRICE_QUALITY_THRESHOLD: ' ',
|
||||
});
|
||||
|
||||
const { config } = await import('./env');
|
||||
|
||||
// Should use defaults when empty
|
||||
expect(config.ai.geminiRpm).toBe(5);
|
||||
expect(config.ai.priceQualityThreshold).toBe(0.5);
|
||||
});
|
||||
});
|
||||
|
||||
describe('convenience helpers', () => {
|
||||
it('should export isProduction as false in test env', async () => {
|
||||
setValidEnv({ NODE_ENV: 'test' });
|
||||
|
||||
const { isProduction } = await import('./env');
|
||||
|
||||
expect(isProduction).toBe(false);
|
||||
});
|
||||
|
||||
it('should export isTest as true in test env', async () => {
|
||||
setValidEnv({ NODE_ENV: 'test' });
|
||||
|
||||
const { isTest } = await import('./env');
|
||||
|
||||
expect(isTest).toBe(true);
|
||||
});
|
||||
|
||||
it('should export isDevelopment as false in test env', async () => {
|
||||
setValidEnv({ NODE_ENV: 'test' });
|
||||
|
||||
const { isDevelopment } = await import('./env');
|
||||
|
||||
expect(isDevelopment).toBe(false);
|
||||
});
|
||||
|
||||
it('should export isSmtpConfigured as false when SMTP not configured', async () => {
|
||||
setValidEnv();
|
||||
|
||||
const { isSmtpConfigured } = await import('./env');
|
||||
|
||||
expect(isSmtpConfigured).toBe(false);
|
||||
});
|
||||
|
||||
it('should export isSmtpConfigured as true when all SMTP fields present', async () => {
|
||||
setValidEnv({
|
||||
SMTP_HOST: 'smtp.example.com',
|
||||
SMTP_USER: 'user',
|
||||
SMTP_PASS: 'pass',
|
||||
SMTP_FROM_EMAIL: 'noreply@example.com',
|
||||
});
|
||||
|
||||
const { isSmtpConfigured } = await import('./env');
|
||||
|
||||
expect(isSmtpConfigured).toBe(true);
|
||||
});
|
||||
|
||||
it('should export isAiConfigured as false when Gemini not configured', async () => {
|
||||
setValidEnv();
|
||||
|
||||
const { isAiConfigured } = await import('./env');
|
||||
|
||||
expect(isAiConfigured).toBe(false);
|
||||
});
|
||||
|
||||
it('should export isAiConfigured as true when Gemini key present', async () => {
|
||||
setValidEnv({
|
||||
GEMINI_API_KEY: 'test-key',
|
||||
});
|
||||
|
||||
const { isAiConfigured } = await import('./env');
|
||||
|
||||
expect(isAiConfigured).toBe(true);
|
||||
});
|
||||
|
||||
it('should export isGoogleMapsConfigured as false when not configured', async () => {
|
||||
setValidEnv();
|
||||
|
||||
const { isGoogleMapsConfigured } = await import('./env');
|
||||
|
||||
expect(isGoogleMapsConfigured).toBe(false);
|
||||
});
|
||||
|
||||
it('should export isGoogleMapsConfigured as true when Maps key present', async () => {
|
||||
setValidEnv({
|
||||
GOOGLE_MAPS_API_KEY: 'test-maps-key',
|
||||
});
|
||||
|
||||
const { isGoogleMapsConfigured } = await import('./env');
|
||||
|
||||
expect(isGoogleMapsConfigured).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('validation errors', () => {
|
||||
it('should throw error when DB_HOST is missing', async () => {
|
||||
setValidEnv();
|
||||
delete process.env.DB_HOST;
|
||||
|
||||
await expect(import('./env')).rejects.toThrow('CONFIGURATION ERROR');
|
||||
});
|
||||
|
||||
it('should throw error when DB_USER is missing', async () => {
|
||||
setValidEnv();
|
||||
delete process.env.DB_USER;
|
||||
|
||||
await expect(import('./env')).rejects.toThrow('CONFIGURATION ERROR');
|
||||
});
|
||||
|
||||
it('should throw error when DB_PASSWORD is missing', async () => {
|
||||
setValidEnv();
|
||||
delete process.env.DB_PASSWORD;
|
||||
|
||||
await expect(import('./env')).rejects.toThrow('CONFIGURATION ERROR');
|
||||
});
|
||||
|
||||
it('should throw error when DB_NAME is missing', async () => {
|
||||
setValidEnv();
|
||||
delete process.env.DB_NAME;
|
||||
|
||||
await expect(import('./env')).rejects.toThrow('CONFIGURATION ERROR');
|
||||
});
|
||||
|
||||
it('should throw error when REDIS_URL is missing', async () => {
|
||||
setValidEnv();
|
||||
delete process.env.REDIS_URL;
|
||||
|
||||
await expect(import('./env')).rejects.toThrow('CONFIGURATION ERROR');
|
||||
});
|
||||
|
||||
it('should throw error when REDIS_URL is invalid', async () => {
|
||||
setValidEnv({
|
||||
REDIS_URL: 'not-a-valid-url',
|
||||
});
|
||||
|
||||
await expect(import('./env')).rejects.toThrow('CONFIGURATION ERROR');
|
||||
});
|
||||
|
||||
it('should throw error when JWT_SECRET is missing', async () => {
|
||||
setValidEnv();
|
||||
delete process.env.JWT_SECRET;
|
||||
|
||||
await expect(import('./env')).rejects.toThrow('CONFIGURATION ERROR');
|
||||
});
|
||||
|
||||
it('should throw error when JWT_SECRET is too short', async () => {
|
||||
setValidEnv({
|
||||
JWT_SECRET: 'short',
|
||||
});
|
||||
|
||||
await expect(import('./env')).rejects.toThrow('CONFIGURATION ERROR');
|
||||
});
|
||||
|
||||
it('should include field path in error message', async () => {
|
||||
setValidEnv();
|
||||
delete process.env.DB_HOST;
|
||||
|
||||
await expect(import('./env')).rejects.toThrow('database.host');
|
||||
});
|
||||
});
|
||||
|
||||
describe('environment modes', () => {
|
||||
it('should set nodeEnv to development by default', async () => {
|
||||
setValidEnv();
|
||||
delete process.env.NODE_ENV;
|
||||
|
||||
const { config } = await import('./env');
|
||||
|
||||
expect(config.server.nodeEnv).toBe('development');
|
||||
});
|
||||
|
||||
it('should accept production as NODE_ENV', async () => {
|
||||
setValidEnv({
|
||||
NODE_ENV: 'production',
|
||||
});
|
||||
|
||||
const { config, isProduction, isDevelopment, isTest } = await import('./env');
|
||||
|
||||
expect(config.server.nodeEnv).toBe('production');
|
||||
expect(isProduction).toBe(true);
|
||||
expect(isDevelopment).toBe(false);
|
||||
expect(isTest).toBe(false);
|
||||
});
|
||||
|
||||
it('should accept development as NODE_ENV', async () => {
|
||||
setValidEnv({
|
||||
NODE_ENV: 'development',
|
||||
});
|
||||
|
||||
const { config, isProduction, isDevelopment, isTest } = await import('./env');
|
||||
|
||||
expect(config.server.nodeEnv).toBe('development');
|
||||
expect(isProduction).toBe(false);
|
||||
expect(isDevelopment).toBe(true);
|
||||
expect(isTest).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('server configuration', () => {
|
||||
it('should parse FRONTEND_URL when provided', async () => {
|
||||
setValidEnv({
|
||||
FRONTEND_URL: 'https://example.com',
|
||||
});
|
||||
|
||||
const { config } = await import('./env');
|
||||
|
||||
expect(config.server.frontendUrl).toBe('https://example.com');
|
||||
});
|
||||
|
||||
it('should parse BASE_URL when provided', async () => {
|
||||
setValidEnv({
|
||||
BASE_URL: '/api/v1',
|
||||
});
|
||||
|
||||
const { config } = await import('./env');
|
||||
|
||||
expect(config.server.baseUrl).toBe('/api/v1');
|
||||
});
|
||||
|
||||
it('should parse STORAGE_PATH when provided', async () => {
|
||||
setValidEnv({
|
||||
STORAGE_PATH: '/custom/storage/path',
|
||||
});
|
||||
|
||||
const { config } = await import('./env');
|
||||
|
||||
expect(config.server.storagePath).toBe('/custom/storage/path');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Redis configuration', () => {
|
||||
it('should parse REDIS_PASSWORD when provided', async () => {
|
||||
setValidEnv({
|
||||
REDIS_PASSWORD: 'redis-secret',
|
||||
});
|
||||
|
||||
const { config } = await import('./env');
|
||||
|
||||
expect(config.redis.password).toBe('redis-secret');
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -94,6 +94,15 @@ const aiSchema = z.object({
|
||||
priceQualityThreshold: floatWithDefault(0.5),
|
||||
});
|
||||
|
||||
/**
|
||||
* UPC API configuration schema.
|
||||
* External APIs for product lookup by barcode.
|
||||
*/
|
||||
const upcSchema = z.object({
|
||||
upcItemDbApiKey: z.string().optional(), // UPC Item DB API key (upcitemdb.com)
|
||||
barcodeLookupApiKey: z.string().optional(), // Barcode Lookup API key (barcodelookup.com)
|
||||
});
|
||||
|
||||
/**
|
||||
* Google services configuration schema.
|
||||
*/
|
||||
@@ -126,6 +135,17 @@ const serverSchema = z.object({
|
||||
storagePath: z.string().default('/var/www/flyer-crawler.projectium.com/flyer-images'),
|
||||
});
|
||||
|
||||
/**
|
||||
* Error tracking configuration schema (ADR-015).
|
||||
* Uses Bugsink (Sentry-compatible self-hosted error tracking).
|
||||
*/
|
||||
const sentrySchema = z.object({
|
||||
dsn: z.string().optional(), // Sentry DSN for backend
|
||||
enabled: booleanString(true),
|
||||
environment: z.string().optional(),
|
||||
debug: booleanString(false),
|
||||
});
|
||||
|
||||
/**
|
||||
* Complete environment configuration schema.
|
||||
*/
|
||||
@@ -135,9 +155,11 @@ const envSchema = z.object({
|
||||
auth: authSchema,
|
||||
smtp: smtpSchema,
|
||||
ai: aiSchema,
|
||||
upc: upcSchema,
|
||||
google: googleSchema,
|
||||
worker: workerSchema,
|
||||
server: serverSchema,
|
||||
sentry: sentrySchema,
|
||||
});
|
||||
|
||||
export type EnvConfig = z.infer<typeof envSchema>;
|
||||
@@ -178,6 +200,10 @@ function loadEnvVars(): unknown {
|
||||
geminiRpm: process.env.GEMINI_RPM,
|
||||
priceQualityThreshold: process.env.AI_PRICE_QUALITY_THRESHOLD,
|
||||
},
|
||||
upc: {
|
||||
upcItemDbApiKey: process.env.UPC_ITEM_DB_API_KEY,
|
||||
barcodeLookupApiKey: process.env.BARCODE_LOOKUP_API_KEY,
|
||||
},
|
||||
google: {
|
||||
mapsApiKey: process.env.GOOGLE_MAPS_API_KEY,
|
||||
clientId: process.env.GOOGLE_CLIENT_ID,
|
||||
@@ -198,6 +224,12 @@ function loadEnvVars(): unknown {
|
||||
baseUrl: process.env.BASE_URL,
|
||||
storagePath: process.env.STORAGE_PATH,
|
||||
},
|
||||
sentry: {
|
||||
dsn: process.env.SENTRY_DSN,
|
||||
enabled: process.env.SENTRY_ENABLED,
|
||||
environment: process.env.SENTRY_ENVIRONMENT || process.env.NODE_ENV,
|
||||
debug: process.env.SENTRY_DEBUG,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
@@ -301,3 +333,18 @@ export const isAiConfigured = !!config.ai.geminiApiKey;
|
||||
* Returns true if Google Maps is configured.
|
||||
*/
|
||||
export const isGoogleMapsConfigured = !!config.google.mapsApiKey;
|
||||
|
||||
/**
|
||||
* Returns true if Sentry/Bugsink error tracking is configured and enabled.
|
||||
*/
|
||||
export const isSentryConfigured = !!config.sentry.dsn && config.sentry.enabled;
|
||||
|
||||
/**
|
||||
* Returns true if UPC Item DB API is configured.
|
||||
*/
|
||||
export const isUpcItemDbConfigured = !!config.upc.upcItemDbApiKey;
|
||||
|
||||
/**
|
||||
* Returns true if Barcode Lookup API is configured.
|
||||
*/
|
||||
export const isBarcodeLookupConfigured = !!config.upc.barcodeLookupApiKey;
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
// src/routes/passport.routes.test.ts
|
||||
// src/config/passport.test.ts
|
||||
import { describe, it, expect, vi, beforeEach, type Mocked } from 'vitest';
|
||||
import * as bcrypt from 'bcrypt';
|
||||
import { Request, Response, NextFunction } from 'express';
|
||||
@@ -101,7 +101,7 @@ vi.mock('passport', () => {
|
||||
});
|
||||
|
||||
// Now, import the passport configuration which will use our mocks
|
||||
import passport, { isAdmin, optionalAuth, mockAuth } from './passport.routes';
|
||||
import passport, { isAdmin, optionalAuth, mockAuth } from './passport';
|
||||
import { logger } from '../services/logger.server';
|
||||
import { ForbiddenError } from '../services/db/errors.db';
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
// src/routes/passport.routes.ts
|
||||
// src/config/passport.ts
|
||||
import passport from 'passport';
|
||||
// All route handlers now use req.log (request-scoped logger) as per ADR-004
|
||||
import { Strategy as LocalStrategy } from 'passport-local';
|
||||
//import { Strategy as GoogleStrategy } from 'passport-google-oauth20';
|
||||
//import { Strategy as GitHubStrategy } from 'passport-github2';
|
||||
import { Strategy as GoogleStrategy, Profile as GoogleProfile } from 'passport-google-oauth20';
|
||||
import { Strategy as GitHubStrategy, Profile as GitHubProfile } from 'passport-github2';
|
||||
// All route handlers now use req.log (request-scoped logger) as per ADR-004
|
||||
import { Strategy as JwtStrategy, ExtractJwt } from 'passport-jwt';
|
||||
import * as bcrypt from 'bcrypt';
|
||||
@@ -165,108 +165,149 @@ passport.use(
|
||||
);
|
||||
|
||||
// --- Passport Google OAuth 2.0 Strategy ---
|
||||
// passport.use(new GoogleStrategy({
|
||||
// clientID: process.env.GOOGLE_CLIENT_ID!,
|
||||
// clientSecret: process.env.GOOGLE_CLIENT_SECRET!,
|
||||
// callbackURL: '/api/auth/google/callback', // Must match the one in Google Cloud Console
|
||||
// scope: ['profile', 'email']
|
||||
// },
|
||||
// async (accessToken, refreshToken, profile, done) => {
|
||||
// try {
|
||||
// const email = profile.emails?.[0]?.value;
|
||||
// if (!email) {
|
||||
// return done(new Error("No email found in Google profile."), false);
|
||||
// }
|
||||
// Only register the strategy if the required environment variables are set.
|
||||
if (process.env.GOOGLE_CLIENT_ID && process.env.GOOGLE_CLIENT_SECRET) {
|
||||
passport.use(
|
||||
new GoogleStrategy(
|
||||
{
|
||||
clientID: process.env.GOOGLE_CLIENT_ID,
|
||||
clientSecret: process.env.GOOGLE_CLIENT_SECRET,
|
||||
callbackURL: '/api/auth/google/callback',
|
||||
scope: ['profile', 'email'],
|
||||
},
|
||||
async (
|
||||
_accessToken: string,
|
||||
_refreshToken: string,
|
||||
profile: GoogleProfile,
|
||||
done: (error: Error | null, user?: UserProfile | false) => void,
|
||||
) => {
|
||||
try {
|
||||
const email = profile.emails?.[0]?.value;
|
||||
if (!email) {
|
||||
return done(new Error('No email found in Google profile.'), false);
|
||||
}
|
||||
|
||||
// // Check if user already exists in our database
|
||||
// const user = await db.findUserByEmail(email); // Changed to const as 'user' is not reassigned
|
||||
// Check if user already exists in our database
|
||||
const existingUserProfile = await db.userRepo.findUserWithProfileByEmail(email, logger);
|
||||
|
||||
// if (user) {
|
||||
// // User exists, proceed to log them in.
|
||||
// req.log.info(`Google OAuth successful for existing user: ${email}`);
|
||||
// // The password_hash is intentionally destructured and discarded for security.
|
||||
// const { password_hash, ...userWithoutHash } = user;
|
||||
// return done(null, userWithoutHash);
|
||||
// } else {
|
||||
// // User does not exist, create a new account for them.
|
||||
// req.log.info(`Google OAuth: creating new user for email: ${email}`);
|
||||
if (existingUserProfile) {
|
||||
// User exists, proceed to log them in.
|
||||
logger.info(`Google OAuth successful for existing user: ${email}`);
|
||||
// Strip sensitive fields before returning
|
||||
const {
|
||||
password_hash: _password_hash,
|
||||
failed_login_attempts: _failed_login_attempts,
|
||||
last_failed_login: _last_failed_login,
|
||||
refresh_token: _refresh_token,
|
||||
...cleanUserProfile
|
||||
} = existingUserProfile;
|
||||
return done(null, cleanUserProfile);
|
||||
} else {
|
||||
// User does not exist, create a new account for them.
|
||||
logger.info(`Google OAuth: creating new user for email: ${email}`);
|
||||
|
||||
// // Since this is an OAuth user, they don't have a password.
|
||||
// // We pass `null` for the password hash.
|
||||
// const newUser = await db.createUser(email, null, {
|
||||
// full_name: profile.displayName,
|
||||
// avatar_url: profile.photos?.[0]?.value
|
||||
// });
|
||||
// Since this is an OAuth user, they don't have a password.
|
||||
// We pass `null` for the password hash.
|
||||
const newUserProfile = await db.userRepo.createUser(
|
||||
email,
|
||||
null, // No password for OAuth users
|
||||
{
|
||||
full_name: profile.displayName,
|
||||
avatar_url: profile.photos?.[0]?.value,
|
||||
},
|
||||
logger,
|
||||
);
|
||||
|
||||
// // Send a welcome email to the new user
|
||||
// try {
|
||||
// await sendWelcomeEmail(email, profile.displayName);
|
||||
// } catch (emailError) {
|
||||
// req.log.error(`Failed to send welcome email to new Google user ${email}`, { error: emailError });
|
||||
// // Don't block the login flow if email fails.
|
||||
// }
|
||||
|
||||
// // The `createUser` function returns the user object without the password hash.
|
||||
// return done(null, newUser);
|
||||
// }
|
||||
// } catch (err) {
|
||||
// req.log.error('Error during Google authentication strategy:', { error: err });
|
||||
// return done(err, false);
|
||||
// }
|
||||
// }
|
||||
// ));
|
||||
return done(null, newUserProfile);
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error({ error: err }, 'Error during Google authentication strategy');
|
||||
return done(err as Error, false);
|
||||
}
|
||||
},
|
||||
),
|
||||
);
|
||||
logger.info('[Passport] Google OAuth strategy registered.');
|
||||
} else {
|
||||
logger.warn(
|
||||
'[Passport] Google OAuth strategy NOT registered: GOOGLE_CLIENT_ID or GOOGLE_CLIENT_SECRET not set.',
|
||||
);
|
||||
}
|
||||
|
||||
// --- Passport GitHub OAuth 2.0 Strategy ---
|
||||
// passport.use(new GitHubStrategy({
|
||||
// clientID: process.env.GITHUB_CLIENT_ID!,
|
||||
// clientSecret: process.env.GITHUB_CLIENT_SECRET!,
|
||||
// callbackURL: '/api/auth/github/callback', // Must match the one in GitHub OAuth App settings
|
||||
// scope: ['user:email'] // Request email access
|
||||
// },
|
||||
// async (accessToken, refreshToken, profile, done) => {
|
||||
// try {
|
||||
// const email = profile.emails?.[0]?.value;
|
||||
// if (!email) {
|
||||
// return done(new Error("No public email found in GitHub profile. Please ensure your primary email is public or add one."), false);
|
||||
// }
|
||||
// Only register the strategy if the required environment variables are set.
|
||||
if (process.env.GITHUB_CLIENT_ID && process.env.GITHUB_CLIENT_SECRET) {
|
||||
passport.use(
|
||||
new GitHubStrategy(
|
||||
{
|
||||
clientID: process.env.GITHUB_CLIENT_ID,
|
||||
clientSecret: process.env.GITHUB_CLIENT_SECRET,
|
||||
callbackURL: '/api/auth/github/callback',
|
||||
scope: ['user:email'],
|
||||
},
|
||||
async (
|
||||
_accessToken: string,
|
||||
_refreshToken: string,
|
||||
profile: GitHubProfile,
|
||||
done: (error: Error | null, user?: UserProfile | false) => void,
|
||||
) => {
|
||||
try {
|
||||
const email = profile.emails?.[0]?.value;
|
||||
if (!email) {
|
||||
return done(
|
||||
new Error(
|
||||
'No public email found in GitHub profile. Please ensure your primary email is public or add one.',
|
||||
),
|
||||
false,
|
||||
);
|
||||
}
|
||||
|
||||
// // Check if user already exists in our database
|
||||
// const user = await db.findUserByEmail(email); // Changed to const as 'user' is not reassigned
|
||||
// Check if user already exists in our database
|
||||
const existingUserProfile = await db.userRepo.findUserWithProfileByEmail(email, logger);
|
||||
|
||||
// if (user) {
|
||||
// // User exists, proceed to log them in.
|
||||
// req.log.info(`GitHub OAuth successful for existing user: ${email}`);
|
||||
// // The password_hash is intentionally destructured and discarded for security.
|
||||
// const { password_hash, ...userWithoutHash } = user;
|
||||
// return done(null, userWithoutHash);
|
||||
// } else {
|
||||
// // User does not exist, create a new account for them.
|
||||
// req.log.info(`GitHub OAuth: creating new user for email: ${email}`);
|
||||
if (existingUserProfile) {
|
||||
// User exists, proceed to log them in.
|
||||
logger.info(`GitHub OAuth successful for existing user: ${email}`);
|
||||
// Strip sensitive fields before returning
|
||||
const {
|
||||
password_hash: _password_hash,
|
||||
failed_login_attempts: _failed_login_attempts,
|
||||
last_failed_login: _last_failed_login,
|
||||
refresh_token: _refresh_token,
|
||||
...cleanUserProfile
|
||||
} = existingUserProfile;
|
||||
return done(null, cleanUserProfile);
|
||||
} else {
|
||||
// User does not exist, create a new account for them.
|
||||
logger.info(`GitHub OAuth: creating new user for email: ${email}`);
|
||||
|
||||
// // Since this is an OAuth user, they don't have a password.
|
||||
// // We pass `null` for the password hash.
|
||||
// const newUser = await db.createUser(email, null, {
|
||||
// full_name: profile.displayName || profile.username, // GitHub profile might not have displayName
|
||||
// avatar_url: profile.photos?.[0]?.value
|
||||
// });
|
||||
// Since this is an OAuth user, they don't have a password.
|
||||
// We pass `null` for the password hash.
|
||||
const newUserProfile = await db.userRepo.createUser(
|
||||
email,
|
||||
null, // No password for OAuth users
|
||||
{
|
||||
full_name: profile.displayName || profile.username, // GitHub profile might not have displayName
|
||||
avatar_url: profile.photos?.[0]?.value,
|
||||
},
|
||||
logger,
|
||||
);
|
||||
|
||||
// // Send a welcome email to the new user
|
||||
// try {
|
||||
// await sendWelcomeEmail(email, profile.displayName || profile.username);
|
||||
// } catch (emailError) {
|
||||
// req.log.error(`Failed to send welcome email to new GitHub user ${email}`, { error: emailError });
|
||||
// // Don't block the login flow if email fails.
|
||||
// }
|
||||
|
||||
// // The `createUser` function returns the user object without the password hash.
|
||||
// return done(null, newUser);
|
||||
// }
|
||||
// } catch (err) {
|
||||
// req.log.error('Error during GitHub authentication strategy:', { error: err });
|
||||
// return done(err, false);
|
||||
// }
|
||||
// }
|
||||
// ));
|
||||
return done(null, newUserProfile);
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error({ error: err }, 'Error during GitHub authentication strategy');
|
||||
return done(err as Error, false);
|
||||
}
|
||||
},
|
||||
),
|
||||
);
|
||||
logger.info('[Passport] GitHub OAuth strategy registered.');
|
||||
} else {
|
||||
logger.warn(
|
||||
'[Passport] GitHub OAuth strategy NOT registered: GITHUB_CLIENT_ID or GITHUB_CLIENT_SECRET not set.',
|
||||
);
|
||||
}
|
||||
|
||||
// --- Passport JWT Strategy (for protecting API routes) ---
|
||||
const jwtOptions = {
|
||||
98
src/config/queryClient.test.tsx
Normal file
98
src/config/queryClient.test.tsx
Normal file
@@ -0,0 +1,98 @@
|
||||
// src/config/queryClient.test.ts
|
||||
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
|
||||
import { QueryClientProvider } from '@tanstack/react-query';
|
||||
import { renderHook, waitFor } from '@testing-library/react';
|
||||
import { useMutation } from '@tanstack/react-query';
|
||||
import type { ReactNode } from 'react';
|
||||
import { queryClient } from './queryClient';
|
||||
import * as loggerModule from '../services/logger.client';
|
||||
|
||||
vi.mock('../services/logger.client', () => ({
|
||||
logger: {
|
||||
error: vi.fn(),
|
||||
info: vi.fn(),
|
||||
warn: vi.fn(),
|
||||
debug: vi.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
const mockedLogger = vi.mocked(loggerModule.logger);
|
||||
|
||||
describe('queryClient', () => {
|
||||
beforeEach(() => {
|
||||
vi.resetAllMocks();
|
||||
queryClient.clear();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
queryClient.clear();
|
||||
});
|
||||
|
||||
describe('configuration', () => {
|
||||
it('should have correct default query options', () => {
|
||||
const defaultOptions = queryClient.getDefaultOptions();
|
||||
|
||||
expect(defaultOptions.queries?.staleTime).toBe(1000 * 60 * 5); // 5 minutes
|
||||
expect(defaultOptions.queries?.gcTime).toBe(1000 * 60 * 30); // 30 minutes
|
||||
expect(defaultOptions.queries?.retry).toBe(1);
|
||||
expect(defaultOptions.queries?.refetchOnWindowFocus).toBe(false);
|
||||
expect(defaultOptions.queries?.refetchOnMount).toBe(true);
|
||||
expect(defaultOptions.queries?.refetchOnReconnect).toBe(false);
|
||||
});
|
||||
|
||||
it('should have correct default mutation options', () => {
|
||||
const defaultOptions = queryClient.getDefaultOptions();
|
||||
|
||||
expect(defaultOptions.mutations?.retry).toBe(0);
|
||||
expect(defaultOptions.mutations?.onError).toBeDefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('mutation onError callback', () => {
|
||||
const wrapper = ({ children }: { children: ReactNode }) => (
|
||||
<QueryClientProvider client={queryClient}>{children}</QueryClientProvider>
|
||||
);
|
||||
|
||||
it('should log Error instance message on mutation error', async () => {
|
||||
const testError = new Error('Test mutation error');
|
||||
|
||||
const { result } = renderHook(
|
||||
() =>
|
||||
useMutation({
|
||||
mutationFn: async () => {
|
||||
throw testError;
|
||||
},
|
||||
}),
|
||||
{ wrapper },
|
||||
);
|
||||
|
||||
result.current.mutate();
|
||||
|
||||
await waitFor(() => expect(result.current.isError).toBe(true));
|
||||
|
||||
expect(mockedLogger.error).toHaveBeenCalledWith('Mutation error', {
|
||||
error: 'Test mutation error',
|
||||
});
|
||||
});
|
||||
|
||||
it('should log "Unknown error" for non-Error objects', async () => {
|
||||
const { result } = renderHook(
|
||||
() =>
|
||||
useMutation({
|
||||
mutationFn: async () => {
|
||||
throw 'string error';
|
||||
},
|
||||
}),
|
||||
{ wrapper },
|
||||
);
|
||||
|
||||
result.current.mutate();
|
||||
|
||||
await waitFor(() => expect(result.current.isError).toBe(true));
|
||||
|
||||
expect(mockedLogger.error).toHaveBeenCalledWith('Mutation error', {
|
||||
error: 'Unknown error',
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
84
src/config/queryKeys.ts
Normal file
84
src/config/queryKeys.ts
Normal file
@@ -0,0 +1,84 @@
|
||||
// src/config/queryKeys.ts
|
||||
/**
|
||||
* Centralized query keys for TanStack Query.
|
||||
*
|
||||
* This file provides a single source of truth for all query keys used
|
||||
* throughout the application. Using these factory functions ensures
|
||||
* consistent key naming and proper cache invalidation.
|
||||
*
|
||||
* @example
|
||||
* ```tsx
|
||||
* // In a query hook
|
||||
* useQuery({
|
||||
* queryKey: queryKeys.flyers(10, 0),
|
||||
* queryFn: fetchFlyers,
|
||||
* });
|
||||
*
|
||||
* // For cache invalidation
|
||||
* queryClient.invalidateQueries({ queryKey: queryKeys.watchedItems() });
|
||||
* ```
|
||||
*/
|
||||
export const queryKeys = {
|
||||
// User Features
|
||||
flyers: (limit: number, offset: number) => ['flyers', { limit, offset }] as const,
|
||||
flyerItems: (flyerId: number) => ['flyer-items', flyerId] as const,
|
||||
flyerItemsBatch: (flyerIds: number[]) =>
|
||||
['flyer-items-batch', flyerIds.sort().join(',')] as const,
|
||||
flyerItemsCount: (flyerIds: number[]) =>
|
||||
['flyer-items-count', flyerIds.sort().join(',')] as const,
|
||||
masterItems: () => ['master-items'] as const,
|
||||
watchedItems: () => ['watched-items'] as const,
|
||||
shoppingLists: () => ['shopping-lists'] as const,
|
||||
|
||||
// Auth & Profile
|
||||
authProfile: () => ['auth-profile'] as const,
|
||||
userAddress: (addressId: number | null) => ['user-address', addressId] as const,
|
||||
userProfileData: () => ['user-profile-data'] as const,
|
||||
|
||||
// Admin Features
|
||||
activityLog: (limit: number, offset: number) => ['activity-log', { limit, offset }] as const,
|
||||
applicationStats: () => ['application-stats'] as const,
|
||||
suggestedCorrections: () => ['suggested-corrections'] as const,
|
||||
categories: () => ['categories'] as const,
|
||||
|
||||
// Analytics
|
||||
bestSalePrices: () => ['best-sale-prices'] as const,
|
||||
priceHistory: (masterItemIds: number[]) =>
|
||||
['price-history', [...masterItemIds].sort((a, b) => a - b).join(',')] as const,
|
||||
leaderboard: (limit: number) => ['leaderboard', limit] as const,
|
||||
} as const;
|
||||
|
||||
/**
|
||||
* Base keys for partial matching in cache invalidation.
|
||||
*
|
||||
* Use these when you need to invalidate all queries of a certain type
|
||||
* regardless of their parameters.
|
||||
*
|
||||
* @example
|
||||
* ```tsx
|
||||
* // Invalidate all flyer-related queries
|
||||
* queryClient.invalidateQueries({ queryKey: queryKeyBases.flyers });
|
||||
* ```
|
||||
*/
|
||||
export const queryKeyBases = {
|
||||
flyers: ['flyers'] as const,
|
||||
flyerItems: ['flyer-items'] as const,
|
||||
flyerItemsBatch: ['flyer-items-batch'] as const,
|
||||
flyerItemsCount: ['flyer-items-count'] as const,
|
||||
masterItems: ['master-items'] as const,
|
||||
watchedItems: ['watched-items'] as const,
|
||||
shoppingLists: ['shopping-lists'] as const,
|
||||
authProfile: ['auth-profile'] as const,
|
||||
userAddress: ['user-address'] as const,
|
||||
userProfileData: ['user-profile-data'] as const,
|
||||
activityLog: ['activity-log'] as const,
|
||||
applicationStats: ['application-stats'] as const,
|
||||
suggestedCorrections: ['suggested-corrections'] as const,
|
||||
categories: ['categories'] as const,
|
||||
bestSalePrices: ['best-sale-prices'] as const,
|
||||
priceHistory: ['price-history'] as const,
|
||||
leaderboard: ['leaderboard'] as const,
|
||||
} as const;
|
||||
|
||||
export type QueryKeys = typeof queryKeys;
|
||||
export type QueryKeyBases = typeof queryKeyBases;
|
||||
228
src/config/swagger.ts
Normal file
228
src/config/swagger.ts
Normal file
@@ -0,0 +1,228 @@
|
||||
// src/config/swagger.ts
|
||||
/**
|
||||
* @file OpenAPI/Swagger configuration for API documentation.
|
||||
* Implements ADR-018: API Documentation Strategy.
|
||||
*
|
||||
* This file configures swagger-jsdoc to generate an OpenAPI 3.0 specification
|
||||
* from JSDoc annotations in route files. The specification is used by
|
||||
* swagger-ui-express to serve interactive API documentation.
|
||||
*/
|
||||
import swaggerJsdoc from 'swagger-jsdoc';
|
||||
|
||||
const options: swaggerJsdoc.Options = {
|
||||
definition: {
|
||||
openapi: '3.0.0',
|
||||
info: {
|
||||
title: 'Flyer Crawler API',
|
||||
version: '1.0.0',
|
||||
description:
|
||||
'API for the Flyer Crawler application - a platform for discovering grocery deals, managing recipes, and tracking budgets.',
|
||||
contact: {
|
||||
name: 'API Support',
|
||||
},
|
||||
license: {
|
||||
name: 'Private',
|
||||
},
|
||||
},
|
||||
servers: [
|
||||
{
|
||||
url: '/api',
|
||||
description: 'API server',
|
||||
},
|
||||
],
|
||||
components: {
|
||||
securitySchemes: {
|
||||
bearerAuth: {
|
||||
type: 'http',
|
||||
scheme: 'bearer',
|
||||
bearerFormat: 'JWT',
|
||||
description: 'JWT token obtained from /auth/login or /auth/register',
|
||||
},
|
||||
},
|
||||
schemas: {
|
||||
// Standard success response wrapper (ADR-028)
|
||||
SuccessResponse: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
success: {
|
||||
type: 'boolean',
|
||||
example: true,
|
||||
},
|
||||
data: {
|
||||
type: 'object',
|
||||
description: 'Response payload - structure varies by endpoint',
|
||||
},
|
||||
},
|
||||
required: ['success', 'data'],
|
||||
},
|
||||
// Standard error response wrapper (ADR-028)
|
||||
ErrorResponse: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
success: {
|
||||
type: 'boolean',
|
||||
example: false,
|
||||
},
|
||||
error: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
code: {
|
||||
type: 'string',
|
||||
description: 'Machine-readable error code',
|
||||
example: 'VALIDATION_ERROR',
|
||||
},
|
||||
message: {
|
||||
type: 'string',
|
||||
description: 'Human-readable error message',
|
||||
example: 'Invalid request parameters',
|
||||
},
|
||||
},
|
||||
required: ['code', 'message'],
|
||||
},
|
||||
},
|
||||
required: ['success', 'error'],
|
||||
},
|
||||
// Common service health status
|
||||
ServiceHealth: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
status: {
|
||||
type: 'string',
|
||||
enum: ['healthy', 'degraded', 'unhealthy'],
|
||||
},
|
||||
latency: {
|
||||
type: 'number',
|
||||
description: 'Response time in milliseconds',
|
||||
},
|
||||
message: {
|
||||
type: 'string',
|
||||
description: 'Additional status information',
|
||||
},
|
||||
details: {
|
||||
type: 'object',
|
||||
description: 'Service-specific details',
|
||||
},
|
||||
},
|
||||
required: ['status'],
|
||||
},
|
||||
// Achievement schema
|
||||
Achievement: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
achievement_id: {
|
||||
type: 'integer',
|
||||
example: 1,
|
||||
},
|
||||
name: {
|
||||
type: 'string',
|
||||
example: 'First-Upload',
|
||||
},
|
||||
description: {
|
||||
type: 'string',
|
||||
example: 'Upload your first flyer',
|
||||
},
|
||||
icon: {
|
||||
type: 'string',
|
||||
example: 'upload-cloud',
|
||||
},
|
||||
points_value: {
|
||||
type: 'integer',
|
||||
example: 25,
|
||||
},
|
||||
created_at: {
|
||||
type: 'string',
|
||||
format: 'date-time',
|
||||
},
|
||||
},
|
||||
},
|
||||
// User achievement (with achieved_at)
|
||||
UserAchievement: {
|
||||
allOf: [
|
||||
{ $ref: '#/components/schemas/Achievement' },
|
||||
{
|
||||
type: 'object',
|
||||
properties: {
|
||||
user_id: {
|
||||
type: 'string',
|
||||
format: 'uuid',
|
||||
},
|
||||
achieved_at: {
|
||||
type: 'string',
|
||||
format: 'date-time',
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
// Leaderboard entry
|
||||
LeaderboardUser: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
user_id: {
|
||||
type: 'string',
|
||||
format: 'uuid',
|
||||
},
|
||||
full_name: {
|
||||
type: 'string',
|
||||
example: 'John Doe',
|
||||
},
|
||||
avatar_url: {
|
||||
type: 'string',
|
||||
nullable: true,
|
||||
},
|
||||
points: {
|
||||
type: 'integer',
|
||||
example: 150,
|
||||
},
|
||||
rank: {
|
||||
type: 'integer',
|
||||
example: 1,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
tags: [
|
||||
{
|
||||
name: 'Health',
|
||||
description: 'Server health and readiness checks',
|
||||
},
|
||||
{
|
||||
name: 'Auth',
|
||||
description: 'Authentication and authorization',
|
||||
},
|
||||
{
|
||||
name: 'Users',
|
||||
description: 'User profile management',
|
||||
},
|
||||
{
|
||||
name: 'Achievements',
|
||||
description: 'Gamification and leaderboards',
|
||||
},
|
||||
{
|
||||
name: 'Flyers',
|
||||
description: 'Flyer uploads and retrieval',
|
||||
},
|
||||
{
|
||||
name: 'Recipes',
|
||||
description: 'Recipe management',
|
||||
},
|
||||
{
|
||||
name: 'Budgets',
|
||||
description: 'Budget tracking and analysis',
|
||||
},
|
||||
{
|
||||
name: 'Admin',
|
||||
description: 'Administrative operations (requires admin role)',
|
||||
},
|
||||
{
|
||||
name: 'System',
|
||||
description: 'System status and monitoring',
|
||||
},
|
||||
],
|
||||
},
|
||||
// Path to the API routes files with JSDoc annotations
|
||||
apis: ['./src/routes/*.ts'],
|
||||
};
|
||||
|
||||
export const swaggerSpec = swaggerJsdoc(options);
|
||||
18
src/config/workerOptions.ts
Normal file
18
src/config/workerOptions.ts
Normal file
@@ -0,0 +1,18 @@
|
||||
import { WorkerOptions } from 'bullmq';
|
||||
|
||||
/**
|
||||
* Standard worker options for stall detection and recovery.
|
||||
* Defined in ADR-053.
|
||||
*
|
||||
* Note: This is a partial configuration that must be spread into a full
|
||||
* WorkerOptions object along with a `connection` property when creating workers.
|
||||
*/
|
||||
export const defaultWorkerOptions: Omit<WorkerOptions, 'connection'> = {
|
||||
// Check for stalled jobs every 30 seconds
|
||||
stalledInterval: 30000,
|
||||
// Fail job after 3 stalls (prevents infinite loops causing infinite retries)
|
||||
maxStalledCount: 3,
|
||||
// Duration of the lock for the job in milliseconds.
|
||||
// If the worker doesn't renew this (e.g. crash), the job stalls.
|
||||
lockDuration: 30000,
|
||||
};
|
||||
@@ -124,4 +124,59 @@ describe('PriceChart', () => {
|
||||
// Milk: $1.13/L (already metric)
|
||||
expect(screen.getByText('$1.13/L')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should display N/A when unit_price is null or undefined', () => {
|
||||
const dealsWithoutUnitPrice: DealItem[] = [
|
||||
{
|
||||
item: 'Mystery Item',
|
||||
master_item_name: null,
|
||||
price_display: '$9.99',
|
||||
price_in_cents: 999,
|
||||
quantity: '1 pack',
|
||||
storeName: 'Test Store',
|
||||
unit_price: null, // No unit price available
|
||||
},
|
||||
];
|
||||
|
||||
mockedUseActiveDeals.mockReturnValue({
|
||||
activeDeals: dealsWithoutUnitPrice,
|
||||
isLoading: false,
|
||||
error: null,
|
||||
totalActiveItems: dealsWithoutUnitPrice.length,
|
||||
});
|
||||
|
||||
render(<PriceChart {...defaultProps} />);
|
||||
|
||||
expect(screen.getByText('Mystery Item')).toBeInTheDocument();
|
||||
expect(screen.getByText('$9.99')).toBeInTheDocument();
|
||||
expect(screen.getByText('N/A')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should not show master item name when it matches the item name (case insensitive)', () => {
|
||||
const dealWithSameMasterName: DealItem[] = [
|
||||
{
|
||||
item: 'Apples',
|
||||
master_item_name: 'APPLES', // Same as item name, different case
|
||||
price_display: '$2.99',
|
||||
price_in_cents: 299,
|
||||
quantity: 'per lb',
|
||||
storeName: 'Fresh Mart',
|
||||
unit_price: { value: 299, unit: 'lb' },
|
||||
},
|
||||
];
|
||||
|
||||
mockedUseActiveDeals.mockReturnValue({
|
||||
activeDeals: dealWithSameMasterName,
|
||||
isLoading: false,
|
||||
error: null,
|
||||
totalActiveItems: dealWithSameMasterName.length,
|
||||
});
|
||||
|
||||
render(<PriceChart {...defaultProps} />);
|
||||
|
||||
expect(screen.getByText('Apples')).toBeInTheDocument();
|
||||
// The master item name should NOT be shown since it matches the item name
|
||||
expect(screen.queryByText('(APPLES)')).not.toBeInTheDocument();
|
||||
expect(screen.queryByText('(Apples)')).not.toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -10,6 +10,7 @@ import {
|
||||
createMockMasterGroceryItem,
|
||||
createMockHistoricalPriceDataPoint,
|
||||
} from '../../tests/utils/mockFactories';
|
||||
import { QueryWrapper } from '../../tests/utils/renderWithProviders';
|
||||
|
||||
// Mock the apiClient
|
||||
vi.mock('../../services/apiClient');
|
||||
@@ -18,6 +19,8 @@ vi.mock('../../services/apiClient');
|
||||
vi.mock('../../hooks/useUserData');
|
||||
const mockedUseUserData = useUserData as Mock;
|
||||
|
||||
const renderWithQuery = (ui: React.ReactElement) => render(ui, { wrapper: QueryWrapper });
|
||||
|
||||
// Mock the logger
|
||||
vi.mock('../../services/logger', () => ({
|
||||
logger: {
|
||||
@@ -116,7 +119,7 @@ describe('PriceHistoryChart', () => {
|
||||
isLoading: false,
|
||||
error: null,
|
||||
});
|
||||
render(<PriceHistoryChart />);
|
||||
renderWithQuery(<PriceHistoryChart />);
|
||||
expect(
|
||||
screen.getByText('Add items to your watchlist to see their price trends over time.'),
|
||||
).toBeInTheDocument();
|
||||
@@ -124,13 +127,13 @@ describe('PriceHistoryChart', () => {
|
||||
|
||||
it('should display a loading state while fetching data', () => {
|
||||
vi.mocked(apiClient.fetchHistoricalPriceData).mockReturnValue(new Promise(() => {}));
|
||||
render(<PriceHistoryChart />);
|
||||
renderWithQuery(<PriceHistoryChart />);
|
||||
expect(screen.getByText('Loading Price History...')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should display an error message if the API call fails', async () => {
|
||||
vi.mocked(apiClient.fetchHistoricalPriceData).mockRejectedValue(new Error('API is down'));
|
||||
render(<PriceHistoryChart />);
|
||||
renderWithQuery(<PriceHistoryChart />);
|
||||
|
||||
await waitFor(() => {
|
||||
// Use regex to match the error message text which might be split across elements
|
||||
@@ -142,7 +145,7 @@ describe('PriceHistoryChart', () => {
|
||||
vi.mocked(apiClient.fetchHistoricalPriceData).mockResolvedValue(
|
||||
new Response(JSON.stringify([])),
|
||||
);
|
||||
render(<PriceHistoryChart />);
|
||||
renderWithQuery(<PriceHistoryChart />);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(
|
||||
@@ -157,7 +160,7 @@ describe('PriceHistoryChart', () => {
|
||||
vi.mocked(apiClient.fetchHistoricalPriceData).mockResolvedValue(
|
||||
new Response(JSON.stringify(mockPriceHistory)),
|
||||
);
|
||||
render(<PriceHistoryChart />);
|
||||
renderWithQuery(<PriceHistoryChart />);
|
||||
|
||||
await waitFor(() => {
|
||||
// Check that the API was called with the correct item IDs
|
||||
@@ -186,7 +189,7 @@ describe('PriceHistoryChart', () => {
|
||||
error: null,
|
||||
});
|
||||
vi.mocked(apiClient.fetchHistoricalPriceData).mockReturnValue(new Promise(() => {}));
|
||||
render(<PriceHistoryChart />);
|
||||
renderWithQuery(<PriceHistoryChart />);
|
||||
expect(screen.getByText('Loading Price History...')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
@@ -194,7 +197,7 @@ describe('PriceHistoryChart', () => {
|
||||
vi.mocked(apiClient.fetchHistoricalPriceData).mockResolvedValue(
|
||||
new Response(JSON.stringify(mockPriceHistory)),
|
||||
);
|
||||
const { rerender } = render(<PriceHistoryChart />);
|
||||
const { rerender } = renderWithQuery(<PriceHistoryChart />);
|
||||
|
||||
// Initial render with items
|
||||
await waitFor(() => {
|
||||
@@ -242,7 +245,7 @@ describe('PriceHistoryChart', () => {
|
||||
vi.mocked(apiClient.fetchHistoricalPriceData).mockResolvedValue(
|
||||
new Response(JSON.stringify(dataWithSinglePoint)),
|
||||
);
|
||||
render(<PriceHistoryChart />);
|
||||
renderWithQuery(<PriceHistoryChart />);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByTestId('line-Organic Bananas')).toBeInTheDocument();
|
||||
@@ -271,7 +274,7 @@ describe('PriceHistoryChart', () => {
|
||||
vi.mocked(apiClient.fetchHistoricalPriceData).mockResolvedValue(
|
||||
new Response(JSON.stringify(dataWithDuplicateDate)),
|
||||
);
|
||||
render(<PriceHistoryChart />);
|
||||
renderWithQuery(<PriceHistoryChart />);
|
||||
|
||||
await waitFor(() => {
|
||||
const chart = screen.getByTestId('line-chart');
|
||||
@@ -305,7 +308,7 @@ describe('PriceHistoryChart', () => {
|
||||
vi.mocked(apiClient.fetchHistoricalPriceData).mockResolvedValue(
|
||||
new Response(JSON.stringify(dataWithZeroPrice)),
|
||||
);
|
||||
render(<PriceHistoryChart />);
|
||||
renderWithQuery(<PriceHistoryChart />);
|
||||
|
||||
await waitFor(() => {
|
||||
const chart = screen.getByTestId('line-chart');
|
||||
@@ -330,7 +333,7 @@ describe('PriceHistoryChart', () => {
|
||||
vi.mocked(apiClient.fetchHistoricalPriceData).mockResolvedValue(
|
||||
new Response(JSON.stringify(malformedData)),
|
||||
);
|
||||
render(<PriceHistoryChart />);
|
||||
renderWithQuery(<PriceHistoryChart />);
|
||||
|
||||
await waitFor(() => {
|
||||
// Should show "Not enough historical data" because all points are invalid or filtered
|
||||
@@ -363,7 +366,7 @@ describe('PriceHistoryChart', () => {
|
||||
vi.mocked(apiClient.fetchHistoricalPriceData).mockResolvedValue(
|
||||
new Response(JSON.stringify(dataWithHigherPrice)),
|
||||
);
|
||||
render(<PriceHistoryChart />);
|
||||
renderWithQuery(<PriceHistoryChart />);
|
||||
|
||||
await waitFor(() => {
|
||||
const chart = screen.getByTestId('line-chart');
|
||||
@@ -374,11 +377,12 @@ describe('PriceHistoryChart', () => {
|
||||
});
|
||||
|
||||
it('should handle non-Error objects thrown during fetch', async () => {
|
||||
vi.mocked(apiClient.fetchHistoricalPriceData).mockRejectedValue('String Error');
|
||||
render(<PriceHistoryChart />);
|
||||
// Use an actual Error object since the component displays error.message
|
||||
vi.mocked(apiClient.fetchHistoricalPriceData).mockRejectedValue(new Error('Fetch failed'));
|
||||
renderWithQuery(<PriceHistoryChart />);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Failed to load price history.')).toBeInTheDocument();
|
||||
expect(screen.getByText(/Fetch failed/)).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
// src/features/charts/PriceHistoryChart.tsx
|
||||
import React, { useState, useEffect, useMemo } from 'react';
|
||||
import React, { useMemo } from 'react';
|
||||
import {
|
||||
LineChart,
|
||||
Line,
|
||||
@@ -10,9 +10,9 @@ import {
|
||||
Legend,
|
||||
ResponsiveContainer,
|
||||
} from 'recharts';
|
||||
import * as apiClient from '../../services/apiClient';
|
||||
import { LoadingSpinner } from '../../components/LoadingSpinner'; // This path is correct
|
||||
import { LoadingSpinner } from '../../components/LoadingSpinner';
|
||||
import { useUserData } from '../../hooks/useUserData';
|
||||
import { usePriceHistoryQuery } from '../../hooks/queries/usePriceHistoryQuery';
|
||||
import type { HistoricalPriceDataPoint } from '../../types';
|
||||
|
||||
type HistoricalData = Record<string, { date: string; price: number }[]>;
|
||||
@@ -20,101 +20,80 @@ type ChartData = { date: string; [itemName: string]: number | string };
|
||||
|
||||
const COLORS = ['#10B981', '#3B82F6', '#F59E0B', '#EF4444', '#8B5CF6', '#EC4899'];
|
||||
|
||||
/**
|
||||
* Chart component displaying historical price trends for watched items.
|
||||
*
|
||||
* Refactored to use TanStack Query (ADR-0005 Phase 8).
|
||||
*/
|
||||
export const PriceHistoryChart: React.FC = () => {
|
||||
const { watchedItems, isLoading: isLoadingUserData } = useUserData();
|
||||
const [historicalData, setHistoricalData] = useState<HistoricalData>({});
|
||||
const [isLoading, setIsLoading] = useState(false);
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
|
||||
const watchedItemsMap = useMemo(
|
||||
() => new Map(watchedItems.map((item) => [item.master_grocery_item_id, item.name])),
|
||||
[watchedItems],
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
if (watchedItems.length === 0) {
|
||||
setIsLoading(false);
|
||||
setHistoricalData({}); // Clear data if watchlist becomes empty
|
||||
return;
|
||||
}
|
||||
const watchedItemIds = useMemo(
|
||||
() =>
|
||||
watchedItems
|
||||
.map((item) => item.master_grocery_item_id)
|
||||
.filter((id): id is number => id !== undefined),
|
||||
[watchedItems],
|
||||
);
|
||||
|
||||
const fetchData = async () => {
|
||||
setIsLoading(true);
|
||||
setError(null);
|
||||
try {
|
||||
const watchedItemIds = watchedItems
|
||||
.map((item) => item.master_grocery_item_id)
|
||||
.filter((id): id is number => id !== undefined); // Ensure only numbers are passed
|
||||
const response = await apiClient.fetchHistoricalPriceData(watchedItemIds);
|
||||
const rawData: HistoricalPriceDataPoint[] = await response.json();
|
||||
if (rawData.length === 0) {
|
||||
setHistoricalData({});
|
||||
return;
|
||||
const {
|
||||
data: rawData = [],
|
||||
isLoading,
|
||||
error,
|
||||
} = usePriceHistoryQuery(watchedItemIds, watchedItemIds.length > 0);
|
||||
|
||||
// Process raw data into chart-friendly format
|
||||
const historicalData = useMemo<HistoricalData>(() => {
|
||||
if (rawData.length === 0) return {};
|
||||
|
||||
const processedData = rawData.reduce<HistoricalData>(
|
||||
(acc, record: HistoricalPriceDataPoint) => {
|
||||
if (!record.master_item_id || record.avg_price_in_cents === null || !record.summary_date)
|
||||
return acc;
|
||||
|
||||
const itemName = watchedItemsMap.get(record.master_item_id);
|
||||
if (!itemName) return acc;
|
||||
|
||||
const priceInCents = record.avg_price_in_cents;
|
||||
const date = new Date(`${record.summary_date}T00:00:00`).toLocaleDateString('en-US', {
|
||||
month: 'short',
|
||||
day: 'numeric',
|
||||
});
|
||||
|
||||
if (priceInCents === 0) return acc;
|
||||
|
||||
if (!acc[itemName]) {
|
||||
acc[itemName] = [];
|
||||
}
|
||||
|
||||
const processedData = rawData.reduce<HistoricalData>(
|
||||
(acc, record: HistoricalPriceDataPoint) => {
|
||||
if (
|
||||
!record.master_item_id ||
|
||||
record.avg_price_in_cents === null ||
|
||||
!record.summary_date
|
||||
)
|
||||
return acc;
|
||||
// Ensure we only store the LOWEST price for a given day
|
||||
const existingEntryIndex = acc[itemName].findIndex((entry) => entry.date === date);
|
||||
if (existingEntryIndex > -1) {
|
||||
if (priceInCents < acc[itemName][existingEntryIndex].price) {
|
||||
acc[itemName][existingEntryIndex].price = priceInCents;
|
||||
}
|
||||
} else {
|
||||
acc[itemName].push({ date, price: priceInCents });
|
||||
}
|
||||
|
||||
const itemName = watchedItemsMap.get(record.master_item_id);
|
||||
if (!itemName) return acc;
|
||||
return acc;
|
||||
},
|
||||
{},
|
||||
);
|
||||
|
||||
const priceInCents = record.avg_price_in_cents;
|
||||
const date = new Date(`${record.summary_date}T00:00:00`).toLocaleDateString('en-US', {
|
||||
month: 'short',
|
||||
day: 'numeric',
|
||||
});
|
||||
|
||||
if (priceInCents === 0) return acc;
|
||||
|
||||
if (!acc[itemName]) {
|
||||
acc[itemName] = [];
|
||||
}
|
||||
|
||||
// Ensure we only store the LOWEST price for a given day
|
||||
const existingEntryIndex = acc[itemName].findIndex((entry) => entry.date === date);
|
||||
if (existingEntryIndex > -1) {
|
||||
if (priceInCents < acc[itemName][existingEntryIndex].price) {
|
||||
acc[itemName][existingEntryIndex].price = priceInCents;
|
||||
}
|
||||
} else {
|
||||
acc[itemName].push({ date, price: priceInCents });
|
||||
}
|
||||
|
||||
return acc;
|
||||
},
|
||||
{},
|
||||
);
|
||||
|
||||
// Filter out items that only have one data point for a meaningful trend line
|
||||
const filteredData = Object.entries(processedData).reduce<HistoricalData>(
|
||||
(acc, [key, value]) => {
|
||||
if (value.length > 1) {
|
||||
acc[key] = value.sort(
|
||||
(a, b) => new Date(a.date).getTime() - new Date(b.date).getTime(),
|
||||
);
|
||||
}
|
||||
return acc;
|
||||
},
|
||||
{},
|
||||
);
|
||||
|
||||
setHistoricalData(filteredData);
|
||||
} catch (e) {
|
||||
// This is a type-safe way to handle errors. We check if the caught
|
||||
// object is an instance of Error before accessing its message property.
|
||||
setError(e instanceof Error ? e.message : 'Failed to load price history.');
|
||||
} finally {
|
||||
setIsLoading(false);
|
||||
// Filter out items that only have one data point for a meaningful trend line
|
||||
return Object.entries(processedData).reduce<HistoricalData>((acc, [key, value]) => {
|
||||
if (value.length > 1) {
|
||||
acc[key] = value.sort((a, b) => new Date(a.date).getTime() - new Date(b.date).getTime());
|
||||
}
|
||||
};
|
||||
fetchData();
|
||||
}, [watchedItems, watchedItemsMap]);
|
||||
return acc;
|
||||
}, {});
|
||||
}, [rawData, watchedItemsMap]);
|
||||
|
||||
const chartData = useMemo<ChartData[]>(() => {
|
||||
const availableItems = Object.keys(historicalData);
|
||||
@@ -155,7 +134,7 @@ export const PriceHistoryChart: React.FC = () => {
|
||||
role="alert"
|
||||
>
|
||||
<p>
|
||||
<strong>Error:</strong> {error}
|
||||
<strong>Error:</strong> {error.message}
|
||||
</p>
|
||||
</div>
|
||||
);
|
||||
|
||||
@@ -301,4 +301,61 @@ describe('AnalysisPanel', () => {
|
||||
expect(screen.getByText('Some insights.')).toBeInTheDocument();
|
||||
expect(screen.queryByText('Sources:')).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should display sources for Plan Trip analysis type', () => {
|
||||
const { rerender } = render(<AnalysisPanel selectedFlyer={mockFlyer} />);
|
||||
fireEvent.click(screen.getByRole('tab', { name: /plan trip/i }));
|
||||
|
||||
mockedUseAiAnalysis.mockReturnValue({
|
||||
results: { PLAN_TRIP: 'Here is your trip plan.' },
|
||||
sources: {
|
||||
PLAN_TRIP: [{ title: 'Store Location', uri: 'https://maps.example.com/store1' }],
|
||||
},
|
||||
loadingAnalysis: null,
|
||||
error: null,
|
||||
runAnalysis: mockRunAnalysis,
|
||||
generatedImageUrl: null,
|
||||
generateImage: mockGenerateImage,
|
||||
});
|
||||
|
||||
rerender(<AnalysisPanel selectedFlyer={mockFlyer} />);
|
||||
|
||||
expect(screen.getByText('Here is your trip plan.')).toBeInTheDocument();
|
||||
expect(screen.getByText('Sources:')).toBeInTheDocument();
|
||||
expect(screen.getByText('Store Location')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should display sources for Compare Prices analysis type', () => {
|
||||
const { rerender } = render(<AnalysisPanel selectedFlyer={mockFlyer} />);
|
||||
fireEvent.click(screen.getByRole('tab', { name: /compare prices/i }));
|
||||
|
||||
mockedUseAiAnalysis.mockReturnValue({
|
||||
results: { COMPARE_PRICES: 'Price comparison results.' },
|
||||
sources: {
|
||||
COMPARE_PRICES: [{ title: 'Price Source', uri: 'https://prices.example.com/compare' }],
|
||||
},
|
||||
loadingAnalysis: null,
|
||||
error: null,
|
||||
runAnalysis: mockRunAnalysis,
|
||||
generatedImageUrl: null,
|
||||
generateImage: mockGenerateImage,
|
||||
});
|
||||
|
||||
rerender(<AnalysisPanel selectedFlyer={mockFlyer} />);
|
||||
|
||||
expect(screen.getByText('Price comparison results.')).toBeInTheDocument();
|
||||
expect(screen.getByText('Sources:')).toBeInTheDocument();
|
||||
expect(screen.getByText('Price Source')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should show a loading spinner when loading watched items', () => {
|
||||
mockedUseUserData.mockReturnValue({
|
||||
watchedItems: [],
|
||||
isLoading: true,
|
||||
error: null,
|
||||
});
|
||||
render(<AnalysisPanel selectedFlyer={mockFlyer} />);
|
||||
expect(screen.getByRole('status')).toBeInTheDocument();
|
||||
expect(screen.getByText('Loading data...')).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -112,6 +112,30 @@ describe('BulkImporter', () => {
|
||||
expect(dropzone).not.toHaveClass('border-brand-primary');
|
||||
});
|
||||
|
||||
it('should not call onFilesChange when files are dropped while isProcessing is true', () => {
|
||||
render(<BulkImporter onFilesChange={mockOnFilesChange} isProcessing={true} />);
|
||||
const dropzone = screen.getByText(/processing, please wait.../i).closest('label')!;
|
||||
const file = new File(['content'], 'flyer.pdf', { type: 'application/pdf' });
|
||||
|
||||
fireEvent.drop(dropzone, {
|
||||
dataTransfer: {
|
||||
files: [file],
|
||||
},
|
||||
});
|
||||
|
||||
expect(mockOnFilesChange).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should handle file input change with null files', async () => {
|
||||
render(<BulkImporter onFilesChange={mockOnFilesChange} isProcessing={false} />);
|
||||
const input = screen.getByLabelText(/click to upload/i);
|
||||
|
||||
// Simulate a change event with null files (e.g., when user cancels file picker)
|
||||
fireEvent.change(input, { target: { files: null } });
|
||||
|
||||
expect(mockOnFilesChange).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
describe('when files are selected', () => {
|
||||
const imageFile = new File(['image-content'], 'flyer.jpg', { type: 'image/jpeg' });
|
||||
const pdfFile = new File(['pdf-content'], 'document.pdf', { type: 'application/pdf' });
|
||||
|
||||
@@ -561,5 +561,67 @@ describe('ExtractedDataTable', () => {
|
||||
render(<ExtractedDataTable {...defaultProps} items={[itemWithQtyNum]} />);
|
||||
expect(screen.getByText('(5)')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should use fallback category when adding to watchlist for items without category_name', () => {
|
||||
const itemWithoutCategory = createMockFlyerItem({
|
||||
flyer_item_id: 999,
|
||||
item: 'Mystery Item',
|
||||
master_item_id: 10,
|
||||
category_name: undefined,
|
||||
flyer_id: 1,
|
||||
});
|
||||
|
||||
// Mock masterItems to include a matching item for canonical name resolution
|
||||
vi.mocked(useMasterItems).mockReturnValue({
|
||||
masterItems: [
|
||||
createMockMasterGroceryItem({
|
||||
master_grocery_item_id: 10,
|
||||
name: 'Canonical Mystery',
|
||||
}),
|
||||
],
|
||||
isLoading: false,
|
||||
error: null,
|
||||
});
|
||||
|
||||
render(<ExtractedDataTable {...defaultProps} items={[itemWithoutCategory]} />);
|
||||
|
||||
const itemRow = screen.getByText('Mystery Item').closest('tr')!;
|
||||
const watchButton = within(itemRow).getByTitle("Add 'Canonical Mystery' to your watchlist");
|
||||
fireEvent.click(watchButton);
|
||||
|
||||
expect(mockAddWatchedItem).toHaveBeenCalledWith('Canonical Mystery', 'Other/Miscellaneous');
|
||||
});
|
||||
|
||||
it('should not call addItemToList when activeListId is null and button is clicked', () => {
|
||||
vi.mocked(useShoppingLists).mockReturnValue({
|
||||
activeListId: null,
|
||||
shoppingLists: [],
|
||||
addItemToList: mockAddItemToList,
|
||||
setActiveListId: vi.fn(),
|
||||
createList: vi.fn(),
|
||||
deleteList: vi.fn(),
|
||||
updateItemInList: vi.fn(),
|
||||
removeItemFromList: vi.fn(),
|
||||
isCreatingList: false,
|
||||
isDeletingList: false,
|
||||
isAddingItem: false,
|
||||
isUpdatingItem: false,
|
||||
isRemovingItem: false,
|
||||
error: null,
|
||||
});
|
||||
|
||||
render(<ExtractedDataTable {...defaultProps} />);
|
||||
|
||||
// Even with disabled button, test the handler logic by verifying no call is made
|
||||
// The buttons are disabled but we verify that even if clicked, no action occurs
|
||||
const addToListButtons = screen.getAllByTitle('Select a shopping list first');
|
||||
expect(addToListButtons.length).toBeGreaterThan(0);
|
||||
|
||||
// Click the button (even though disabled)
|
||||
fireEvent.click(addToListButtons[0]);
|
||||
|
||||
// addItemToList should not be called because activeListId is null
|
||||
expect(mockAddItemToList).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -65,6 +65,12 @@ describe('FlyerDisplay', () => {
|
||||
expect(screen.queryByAltText('SuperMart Logo')).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should use fallback alt text when store has logo_url but no name', () => {
|
||||
const storeWithoutName = { ...mockStore, name: undefined };
|
||||
render(<FlyerDisplay {...defaultProps} store={storeWithoutName as any} />);
|
||||
expect(screen.getByAltText('Store Logo')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should format a single day validity correctly', () => {
|
||||
render(<FlyerDisplay {...defaultProps} validFrom="2023-10-26" validTo="2023-10-26" />);
|
||||
expect(screen.getByText('Valid on October 26, 2023')).toBeInTheDocument();
|
||||
|
||||
@@ -322,6 +322,20 @@ describe('FlyerList', () => {
|
||||
expect(screen.getByText('• Expires in 6 days')).toBeInTheDocument();
|
||||
expect(screen.getByText('• Expires in 6 days')).toHaveClass('text-green-600');
|
||||
});
|
||||
|
||||
it('should show "Expires in 1 day" (singular) when exactly 1 day left', () => {
|
||||
vi.setSystemTime(new Date('2023-10-10T12:00:00Z')); // 1 day left until Oct 11
|
||||
render(
|
||||
<FlyerList
|
||||
flyers={[mockFlyers[0]]}
|
||||
onFlyerSelect={mockOnFlyerSelect}
|
||||
selectedFlyerId={null}
|
||||
profile={mockProfile}
|
||||
/>,
|
||||
);
|
||||
expect(screen.getByText('• Expires in 1 day')).toBeInTheDocument();
|
||||
expect(screen.getByText('• Expires in 1 day')).toHaveClass('text-orange-500');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Admin Functionality', () => {
|
||||
@@ -420,6 +434,29 @@ describe('FlyerList', () => {
|
||||
expect(mockedToast.error).toHaveBeenCalledWith('Cleanup failed');
|
||||
});
|
||||
});
|
||||
|
||||
it('should show generic error toast if cleanup API call fails with non-Error object', async () => {
|
||||
vi.spyOn(window, 'confirm').mockReturnValue(true);
|
||||
// Reject with a non-Error value (e.g., a string or object)
|
||||
mockedApiClient.cleanupFlyerFiles.mockRejectedValue('Some non-error value');
|
||||
|
||||
render(
|
||||
<FlyerList
|
||||
flyers={mockFlyers}
|
||||
onFlyerSelect={mockOnFlyerSelect}
|
||||
selectedFlyerId={null}
|
||||
profile={adminProfile}
|
||||
/>,
|
||||
);
|
||||
|
||||
const cleanupButton = screen.getByTitle('Clean up files for flyer ID 1');
|
||||
fireEvent.click(cleanupButton);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockedApiClient.cleanupFlyerFiles).toHaveBeenCalledWith(1);
|
||||
expect(mockedToast.error).toHaveBeenCalledWith('Failed to enqueue cleanup job.');
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
// src/features/flyer/FlyerUploader.test.tsx
|
||||
import React from 'react';
|
||||
import { render, screen, fireEvent, waitFor, act } from '@testing-library/react';
|
||||
import { render, screen, fireEvent, waitFor, act, cleanup } from '@testing-library/react';
|
||||
import { describe, it, expect, vi, beforeEach, afterEach, type Mock } from 'vitest';
|
||||
import { FlyerUploader } from './FlyerUploader';
|
||||
import * as aiApiClientModule from '../../services/aiApiClient';
|
||||
@@ -47,15 +47,11 @@ const mockedChecksumModule = checksumModule as unknown as {
|
||||
generateFileChecksum: Mock;
|
||||
};
|
||||
|
||||
// Shared QueryClient - will be reset in beforeEach
|
||||
let queryClient: QueryClient;
|
||||
|
||||
const renderComponent = (onProcessingComplete = vi.fn()) => {
|
||||
console.log('--- [TEST LOG] ---: Rendering component inside MemoryRouter.');
|
||||
const queryClient = new QueryClient({
|
||||
defaultOptions: {
|
||||
queries: {
|
||||
retry: false,
|
||||
},
|
||||
},
|
||||
});
|
||||
return render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<MemoryRouter>
|
||||
@@ -69,6 +65,14 @@ describe('FlyerUploader', () => {
|
||||
const navigateSpy = vi.fn();
|
||||
|
||||
beforeEach(() => {
|
||||
// Create a fresh QueryClient for each test to ensure isolation
|
||||
queryClient = new QueryClient({
|
||||
defaultOptions: {
|
||||
queries: {
|
||||
retry: false,
|
||||
},
|
||||
},
|
||||
});
|
||||
// Disable react-query's online manager to prevent it from interfering with fake timers
|
||||
onlineManager.setEventListener((_setOnline) => {
|
||||
return () => {};
|
||||
@@ -80,8 +84,16 @@ describe('FlyerUploader', () => {
|
||||
(useNavigate as Mock).mockReturnValue(navigateSpy);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
afterEach(async () => {
|
||||
console.log(`--- [TEST LOG] ---: Finished test: "${expect.getState().currentTestName}"\n`);
|
||||
// Cancel all pending queries to stop any in-flight polling
|
||||
queryClient.cancelQueries();
|
||||
// Clear all pending queries to prevent async leakage
|
||||
queryClient.clear();
|
||||
// Ensure cleanup after each test to prevent DOM leakage
|
||||
cleanup();
|
||||
// Small delay to allow any pending microtasks to settle
|
||||
await new Promise((resolve) => setTimeout(resolve, 0));
|
||||
});
|
||||
|
||||
it('should render the initial state correctly', () => {
|
||||
@@ -173,67 +185,71 @@ describe('FlyerUploader', () => {
|
||||
expect(mockedAiApiClient.uploadAndProcessFlyer).toHaveBeenCalledWith(file, 'mock-checksum');
|
||||
});
|
||||
|
||||
it('should poll for status, complete successfully, and redirect', async () => {
|
||||
const onProcessingComplete = vi.fn();
|
||||
console.log('--- [TEST LOG] ---: 1. Setting up mock sequence for polling.');
|
||||
mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue({ jobId: 'job-123' });
|
||||
mockedAiApiClient.getJobStatus
|
||||
.mockResolvedValueOnce({ state: 'active', progress: { message: 'Analyzing...' } })
|
||||
.mockResolvedValueOnce({ state: 'completed', returnValue: { flyerId: 42 } });
|
||||
it(
|
||||
'should poll for status, complete successfully, and redirect',
|
||||
{ timeout: 10000 },
|
||||
async () => {
|
||||
const onProcessingComplete = vi.fn();
|
||||
console.log('--- [TEST LOG] ---: 1. Setting up mock sequence for polling.');
|
||||
mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue({ jobId: 'job-123' });
|
||||
mockedAiApiClient.getJobStatus
|
||||
.mockResolvedValueOnce({ state: 'active', progress: { message: 'Analyzing...' } })
|
||||
.mockResolvedValueOnce({ state: 'completed', returnValue: { flyerId: 42 } });
|
||||
|
||||
console.log('--- [TEST LOG] ---: 2. Rendering component and uploading file.');
|
||||
renderComponent(onProcessingComplete);
|
||||
const file = new File(['content'], 'flyer.pdf', { type: 'application/pdf' });
|
||||
const input = screen.getByLabelText(/click to select a file/i);
|
||||
fireEvent.change(input, { target: { files: [file] } });
|
||||
console.log('--- [TEST LOG] ---: 2. Rendering component and uploading file.');
|
||||
renderComponent(onProcessingComplete);
|
||||
const file = new File(['content'], 'flyer.pdf', { type: 'application/pdf' });
|
||||
const input = screen.getByLabelText(/click to select a file/i);
|
||||
fireEvent.change(input, { target: { files: [file] } });
|
||||
|
||||
console.log('--- [TEST LOG] ---: 3. Fired event. Now AWAITING UI update to "Analyzing...".');
|
||||
try {
|
||||
await screen.findByText('Analyzing...');
|
||||
console.log('--- [TEST LOG] ---: 4. SUCCESS: UI is showing "Analyzing...".');
|
||||
} catch (error) {
|
||||
console.error('--- [TEST LOG] ---: 4. ERROR: findByText("Analyzing...") timed out.');
|
||||
console.log('--- [DEBUG] ---: DOM at time of failure:');
|
||||
screen.debug();
|
||||
throw error;
|
||||
}
|
||||
expect(mockedAiApiClient.getJobStatus).toHaveBeenCalledTimes(1);
|
||||
console.log('--- [TEST LOG] ---: 5. First poll confirmed. Now AWAITING timer advancement.');
|
||||
console.log('--- [TEST LOG] ---: 3. Fired event. Now AWAITING UI update to "Analyzing...".');
|
||||
try {
|
||||
await screen.findByText('Analyzing...');
|
||||
console.log('--- [TEST LOG] ---: 4. SUCCESS: UI is showing "Analyzing...".');
|
||||
} catch (error) {
|
||||
console.error('--- [TEST LOG] ---: 4. ERROR: findByText("Analyzing...") timed out.');
|
||||
console.log('--- [DEBUG] ---: DOM at time of failure:');
|
||||
screen.debug();
|
||||
throw error;
|
||||
}
|
||||
expect(mockedAiApiClient.getJobStatus).toHaveBeenCalledTimes(1);
|
||||
console.log('--- [TEST LOG] ---: 5. First poll confirmed. Now AWAITING timer advancement.');
|
||||
|
||||
try {
|
||||
console.log(
|
||||
'--- [TEST LOG] ---: 8a. waitFor check: Waiting for completion text and job status count.',
|
||||
);
|
||||
// Wait for the second poll to occur and the UI to update.
|
||||
await waitFor(
|
||||
() => {
|
||||
console.log(
|
||||
`--- [TEST LOG] ---: 8b. waitFor interval: calls=${
|
||||
mockedAiApiClient.getJobStatus.mock.calls.length
|
||||
}`,
|
||||
);
|
||||
expect(
|
||||
screen.getByText('Processing complete! Redirecting to flyer 42...'),
|
||||
).toBeInTheDocument();
|
||||
},
|
||||
{ timeout: 4000 },
|
||||
);
|
||||
console.log('--- [TEST LOG] ---: 9. SUCCESS: Completion message found.');
|
||||
} catch (error) {
|
||||
console.error('--- [TEST LOG] ---: 9. ERROR: waitFor for completion message timed out.');
|
||||
console.log('--- [DEBUG] ---: DOM at time of failure:');
|
||||
screen.debug();
|
||||
throw error;
|
||||
}
|
||||
expect(mockedAiApiClient.getJobStatus).toHaveBeenCalledTimes(2);
|
||||
try {
|
||||
console.log(
|
||||
'--- [TEST LOG] ---: 8a. waitFor check: Waiting for completion text and job status count.',
|
||||
);
|
||||
// Wait for the second poll to occur and the UI to update.
|
||||
await waitFor(
|
||||
() => {
|
||||
console.log(
|
||||
`--- [TEST LOG] ---: 8b. waitFor interval: calls=${
|
||||
mockedAiApiClient.getJobStatus.mock.calls.length
|
||||
}`,
|
||||
);
|
||||
expect(
|
||||
screen.getByText('Processing complete! Redirecting to flyer 42...'),
|
||||
).toBeInTheDocument();
|
||||
},
|
||||
{ timeout: 4000 },
|
||||
);
|
||||
console.log('--- [TEST LOG] ---: 9. SUCCESS: Completion message found.');
|
||||
} catch (error) {
|
||||
console.error('--- [TEST LOG] ---: 9. ERROR: waitFor for completion message timed out.');
|
||||
console.log('--- [DEBUG] ---: DOM at time of failure:');
|
||||
screen.debug();
|
||||
throw error;
|
||||
}
|
||||
expect(mockedAiApiClient.getJobStatus).toHaveBeenCalledTimes(2);
|
||||
|
||||
// Wait for the redirect timer (1.5s in component) to fire.
|
||||
await act(() => new Promise((r) => setTimeout(r, 2000)));
|
||||
console.log(`--- [TEST LOG] ---: 11. Timers advanced. Now asserting navigation.`);
|
||||
expect(onProcessingComplete).toHaveBeenCalled();
|
||||
expect(navigateSpy).toHaveBeenCalledWith('/flyers/42');
|
||||
console.log('--- [TEST LOG] ---: 12. Callback and navigation confirmed.');
|
||||
});
|
||||
// Wait for the redirect timer (1.5s in component) to fire.
|
||||
await act(() => new Promise((r) => setTimeout(r, 2000)));
|
||||
console.log(`--- [TEST LOG] ---: 11. Timers advanced. Now asserting navigation.`);
|
||||
expect(onProcessingComplete).toHaveBeenCalled();
|
||||
expect(navigateSpy).toHaveBeenCalledWith('/flyers/42');
|
||||
console.log('--- [TEST LOG] ---: 12. Callback and navigation confirmed.');
|
||||
},
|
||||
);
|
||||
|
||||
it('should handle a failed job', async () => {
|
||||
console.log('--- [TEST LOG] ---: 1. Setting up mocks for a failed job.');
|
||||
|
||||
@@ -210,4 +210,60 @@ describe('ProcessingStatus', () => {
|
||||
expect(nonCriticalErrorStage).toHaveTextContent('(optional)');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Edge Cases', () => {
|
||||
it('should render null for unknown stage status icon', () => {
|
||||
const stagesWithUnknownStatus: ProcessingStage[] = [
|
||||
createMockProcessingStage({
|
||||
name: 'Unknown Stage',
|
||||
status: 'unknown-status' as any,
|
||||
detail: '',
|
||||
}),
|
||||
];
|
||||
render(<ProcessingStatus stages={stagesWithUnknownStatus} estimatedTime={60} />);
|
||||
|
||||
const stageIcon = screen.getByTestId('stage-icon-0');
|
||||
// The icon container should be empty (no SVG or spinner rendered)
|
||||
expect(stageIcon.querySelector('svg')).not.toBeInTheDocument();
|
||||
expect(stageIcon.querySelector('.animate-spin')).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should return empty string for unknown stage status text color', () => {
|
||||
const stagesWithUnknownStatus: ProcessingStage[] = [
|
||||
createMockProcessingStage({
|
||||
name: 'Unknown Stage',
|
||||
status: 'unknown-status' as any,
|
||||
detail: '',
|
||||
}),
|
||||
];
|
||||
render(<ProcessingStatus stages={stagesWithUnknownStatus} estimatedTime={60} />);
|
||||
|
||||
const stageText = screen.getByTestId('stage-text-0');
|
||||
// Should not have any of the known status color classes
|
||||
expect(stageText.className).not.toContain('text-brand-primary');
|
||||
expect(stageText.className).not.toContain('text-gray-700');
|
||||
expect(stageText.className).not.toContain('text-gray-400');
|
||||
expect(stageText.className).not.toContain('text-red-500');
|
||||
expect(stageText.className).not.toContain('text-yellow-600');
|
||||
});
|
||||
|
||||
it('should not render page progress bar when total is 1', () => {
|
||||
render(
|
||||
<ProcessingStatus stages={[]} estimatedTime={60} pageProgress={{ current: 1, total: 1 }} />,
|
||||
);
|
||||
expect(screen.queryByText(/converting pdf/i)).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should not render stage progress bar when total is 1', () => {
|
||||
const stagesWithSinglePageProgress: ProcessingStage[] = [
|
||||
createMockProcessingStage({
|
||||
name: 'Extracting Items',
|
||||
status: 'in-progress',
|
||||
progress: { current: 1, total: 1 },
|
||||
}),
|
||||
];
|
||||
render(<ProcessingStatus stages={stagesWithSinglePageProgress} estimatedTime={60} />);
|
||||
expect(screen.queryByText(/analyzing page/i)).not.toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -21,3 +21,6 @@ export { useDeleteShoppingListMutation } from './useDeleteShoppingListMutation';
|
||||
export { useAddShoppingListItemMutation } from './useAddShoppingListItemMutation';
|
||||
export { useUpdateShoppingListItemMutation } from './useUpdateShoppingListItemMutation';
|
||||
export { useRemoveShoppingListItemMutation } from './useRemoveShoppingListItemMutation';
|
||||
|
||||
// Address mutations
|
||||
export { useGeocodeMutation } from './useGeocodeMutation';
|
||||
|
||||
@@ -60,7 +60,9 @@ describe('useAddShoppingListItemMutation', () => {
|
||||
|
||||
await waitFor(() => expect(result.current.isSuccess).toBe(true));
|
||||
|
||||
expect(mockedApiClient.addShoppingListItem).toHaveBeenCalledWith(1, { customItemName: 'Special Milk' });
|
||||
expect(mockedApiClient.addShoppingListItem).toHaveBeenCalledWith(1, {
|
||||
customItemName: 'Special Milk',
|
||||
});
|
||||
});
|
||||
|
||||
it('should invalidate shopping-lists query on success', async () => {
|
||||
@@ -97,7 +99,7 @@ describe('useAddShoppingListItemMutation', () => {
|
||||
expect(mockedNotifications.notifyError).toHaveBeenCalledWith('Item already exists');
|
||||
});
|
||||
|
||||
it('should handle API error without message', async () => {
|
||||
it('should handle API error when json parse fails', async () => {
|
||||
mockedApiClient.addShoppingListItem.mockResolvedValue({
|
||||
ok: false,
|
||||
status: 500,
|
||||
@@ -114,6 +116,22 @@ describe('useAddShoppingListItemMutation', () => {
|
||||
expect(mockedNotifications.notifyError).toHaveBeenCalledWith('Request failed with status 500');
|
||||
});
|
||||
|
||||
it('should handle API error with empty message in response', async () => {
|
||||
mockedApiClient.addShoppingListItem.mockResolvedValue({
|
||||
ok: false,
|
||||
status: 400,
|
||||
json: () => Promise.resolve({ message: '' }),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useAddShoppingListItemMutation(), { wrapper });
|
||||
|
||||
result.current.mutate({ listId: 1, item: { masterItemId: 42 } });
|
||||
|
||||
await waitFor(() => expect(result.current.isError).toBe(true));
|
||||
|
||||
expect(result.current.error?.message).toBe('Failed to add item to shopping list');
|
||||
});
|
||||
|
||||
it('should handle network error', async () => {
|
||||
mockedApiClient.addShoppingListItem.mockRejectedValue(new Error('Network error'));
|
||||
|
||||
@@ -125,4 +143,18 @@ describe('useAddShoppingListItemMutation', () => {
|
||||
|
||||
expect(result.current.error?.message).toBe('Network error');
|
||||
});
|
||||
|
||||
it('should use fallback error message when error has no message', async () => {
|
||||
mockedApiClient.addShoppingListItem.mockRejectedValue(new Error(''));
|
||||
|
||||
const { result } = renderHook(() => useAddShoppingListItemMutation(), { wrapper });
|
||||
|
||||
result.current.mutate({ listId: 1, item: { masterItemId: 42 } });
|
||||
|
||||
await waitFor(() => expect(result.current.isError).toBe(true));
|
||||
|
||||
expect(mockedNotifications.notifyError).toHaveBeenCalledWith(
|
||||
'Failed to add item to shopping list',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
import { useMutation, useQueryClient } from '@tanstack/react-query';
|
||||
import * as apiClient from '../../services/apiClient';
|
||||
import { notifySuccess, notifyError } from '../../services/notificationService';
|
||||
import { queryKeyBases } from '../../config/queryKeys';
|
||||
|
||||
interface AddShoppingListItemParams {
|
||||
listId: number;
|
||||
@@ -61,7 +62,7 @@ export const useAddShoppingListItemMutation = () => {
|
||||
},
|
||||
onSuccess: () => {
|
||||
// Invalidate and refetch shopping lists to get the updated list
|
||||
queryClient.invalidateQueries({ queryKey: ['shopping-lists'] });
|
||||
queryClient.invalidateQueries({ queryKey: queryKeyBases.shoppingLists });
|
||||
notifySuccess('Item added to shopping list');
|
||||
},
|
||||
onError: (error: Error) => {
|
||||
|
||||
@@ -97,7 +97,7 @@ describe('useAddWatchedItemMutation', () => {
|
||||
expect(mockedNotifications.notifyError).toHaveBeenCalledWith('Item already watched');
|
||||
});
|
||||
|
||||
it('should handle API error without message', async () => {
|
||||
it('should handle API error when json parse fails', async () => {
|
||||
mockedApiClient.addWatchedItem.mockResolvedValue({
|
||||
ok: false,
|
||||
status: 500,
|
||||
@@ -112,4 +112,34 @@ describe('useAddWatchedItemMutation', () => {
|
||||
|
||||
expect(result.current.error?.message).toBe('Request failed with status 500');
|
||||
});
|
||||
|
||||
it('should handle API error with empty message in response', async () => {
|
||||
mockedApiClient.addWatchedItem.mockResolvedValue({
|
||||
ok: false,
|
||||
status: 400,
|
||||
json: () => Promise.resolve({ message: '' }),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useAddWatchedItemMutation(), { wrapper });
|
||||
|
||||
result.current.mutate({ itemName: 'Butter' });
|
||||
|
||||
await waitFor(() => expect(result.current.isError).toBe(true));
|
||||
|
||||
expect(result.current.error?.message).toBe('Failed to add watched item');
|
||||
});
|
||||
|
||||
it('should use fallback error message when error has no message', async () => {
|
||||
mockedApiClient.addWatchedItem.mockRejectedValue(new Error(''));
|
||||
|
||||
const { result } = renderHook(() => useAddWatchedItemMutation(), { wrapper });
|
||||
|
||||
result.current.mutate({ itemName: 'Yogurt' });
|
||||
|
||||
await waitFor(() => expect(result.current.isError).toBe(true));
|
||||
|
||||
expect(mockedNotifications.notifyError).toHaveBeenCalledWith(
|
||||
'Failed to add item to watched list',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
import { useMutation, useQueryClient } from '@tanstack/react-query';
|
||||
import * as apiClient from '../../services/apiClient';
|
||||
import { notifySuccess, notifyError } from '../../services/notificationService';
|
||||
import { queryKeyBases } from '../../config/queryKeys';
|
||||
|
||||
interface AddWatchedItemParams {
|
||||
itemName: string;
|
||||
@@ -50,7 +51,7 @@ export const useAddWatchedItemMutation = () => {
|
||||
},
|
||||
onSuccess: () => {
|
||||
// Invalidate and refetch watched items to get the updated list
|
||||
queryClient.invalidateQueries({ queryKey: ['watched-items'] });
|
||||
queryClient.invalidateQueries({ queryKey: queryKeyBases.watchedItems });
|
||||
notifySuccess('Item added to watched list');
|
||||
},
|
||||
onError: (error: Error) => {
|
||||
|
||||
113
src/hooks/mutations/useAuthMutations.ts
Normal file
113
src/hooks/mutations/useAuthMutations.ts
Normal file
@@ -0,0 +1,113 @@
|
||||
// src/hooks/mutations/useAuthMutations.ts
|
||||
import { useMutation } from '@tanstack/react-query';
|
||||
import * as apiClient from '../../services/apiClient';
|
||||
import { notifyError } from '../../services/notificationService';
|
||||
import type { UserProfile } from '../../types';
|
||||
|
||||
interface AuthResponse {
|
||||
userprofile: UserProfile;
|
||||
token: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Mutation hook for user login.
|
||||
*
|
||||
* @example
|
||||
* ```tsx
|
||||
* const loginMutation = useLoginMutation();
|
||||
* loginMutation.mutate({ email, password, rememberMe });
|
||||
* ```
|
||||
*/
|
||||
export const useLoginMutation = () => {
|
||||
return useMutation({
|
||||
mutationFn: async ({
|
||||
email,
|
||||
password,
|
||||
rememberMe,
|
||||
}: {
|
||||
email: string;
|
||||
password: string;
|
||||
rememberMe: boolean;
|
||||
}): Promise<AuthResponse> => {
|
||||
const response = await apiClient.loginUser(email, password, rememberMe);
|
||||
|
||||
if (!response.ok) {
|
||||
const error = await response.json().catch(() => ({
|
||||
message: `Request failed with status ${response.status}`,
|
||||
}));
|
||||
throw new Error(error.message || 'Failed to login');
|
||||
}
|
||||
|
||||
return response.json();
|
||||
},
|
||||
onError: (error: Error) => {
|
||||
notifyError(error.message || 'Failed to login');
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Mutation hook for user registration.
|
||||
*
|
||||
* @example
|
||||
* ```tsx
|
||||
* const registerMutation = useRegisterMutation();
|
||||
* registerMutation.mutate({ email, password, fullName });
|
||||
* ```
|
||||
*/
|
||||
export const useRegisterMutation = () => {
|
||||
return useMutation({
|
||||
mutationFn: async ({
|
||||
email,
|
||||
password,
|
||||
fullName,
|
||||
}: {
|
||||
email: string;
|
||||
password: string;
|
||||
fullName: string;
|
||||
}): Promise<AuthResponse> => {
|
||||
const response = await apiClient.registerUser(email, password, fullName, '');
|
||||
|
||||
if (!response.ok) {
|
||||
const error = await response.json().catch(() => ({
|
||||
message: `Request failed with status ${response.status}`,
|
||||
}));
|
||||
throw new Error(error.message || 'Failed to register');
|
||||
}
|
||||
|
||||
return response.json();
|
||||
},
|
||||
onError: (error: Error) => {
|
||||
notifyError(error.message || 'Failed to register');
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Mutation hook for requesting a password reset.
|
||||
*
|
||||
* @example
|
||||
* ```tsx
|
||||
* const passwordResetMutation = usePasswordResetRequestMutation();
|
||||
* passwordResetMutation.mutate({ email });
|
||||
* ```
|
||||
*/
|
||||
export const usePasswordResetRequestMutation = () => {
|
||||
return useMutation({
|
||||
mutationFn: async ({ email }: { email: string }): Promise<{ message: string }> => {
|
||||
const response = await apiClient.requestPasswordReset(email);
|
||||
|
||||
if (!response.ok) {
|
||||
const error = await response.json().catch(() => ({
|
||||
message: `Request failed with status ${response.status}`,
|
||||
}));
|
||||
throw new Error(error.message || 'Failed to request password reset');
|
||||
}
|
||||
|
||||
return response.json();
|
||||
},
|
||||
onError: (error: Error) => {
|
||||
notifyError(error.message || 'Failed to request password reset');
|
||||
},
|
||||
});
|
||||
};
|
||||
@@ -81,7 +81,7 @@ describe('useCreateShoppingListMutation', () => {
|
||||
expect(mockedNotifications.notifyError).toHaveBeenCalledWith('List name already exists');
|
||||
});
|
||||
|
||||
it('should handle API error without message', async () => {
|
||||
it('should handle API error when json parse fails', async () => {
|
||||
mockedApiClient.createShoppingList.mockResolvedValue({
|
||||
ok: false,
|
||||
status: 500,
|
||||
@@ -96,4 +96,32 @@ describe('useCreateShoppingListMutation', () => {
|
||||
|
||||
expect(result.current.error?.message).toBe('Request failed with status 500');
|
||||
});
|
||||
|
||||
it('should handle API error with empty message in response', async () => {
|
||||
mockedApiClient.createShoppingList.mockResolvedValue({
|
||||
ok: false,
|
||||
status: 400,
|
||||
json: () => Promise.resolve({ message: '' }),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useCreateShoppingListMutation(), { wrapper });
|
||||
|
||||
result.current.mutate({ name: 'Empty Error' });
|
||||
|
||||
await waitFor(() => expect(result.current.isError).toBe(true));
|
||||
|
||||
expect(result.current.error?.message).toBe('Failed to create shopping list');
|
||||
});
|
||||
|
||||
it('should use fallback error message when error has no message', async () => {
|
||||
mockedApiClient.createShoppingList.mockRejectedValue(new Error(''));
|
||||
|
||||
const { result } = renderHook(() => useCreateShoppingListMutation(), { wrapper });
|
||||
|
||||
result.current.mutate({ name: 'New List' });
|
||||
|
||||
await waitFor(() => expect(result.current.isError).toBe(true));
|
||||
|
||||
expect(mockedNotifications.notifyError).toHaveBeenCalledWith('Failed to create shopping list');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
import { useMutation, useQueryClient } from '@tanstack/react-query';
|
||||
import * as apiClient from '../../services/apiClient';
|
||||
import { notifySuccess, notifyError } from '../../services/notificationService';
|
||||
import { queryKeyBases } from '../../config/queryKeys';
|
||||
|
||||
interface CreateShoppingListParams {
|
||||
name: string;
|
||||
@@ -48,7 +49,7 @@ export const useCreateShoppingListMutation = () => {
|
||||
},
|
||||
onSuccess: () => {
|
||||
// Invalidate and refetch shopping lists to get the updated list
|
||||
queryClient.invalidateQueries({ queryKey: ['shopping-lists'] });
|
||||
queryClient.invalidateQueries({ queryKey: queryKeyBases.shoppingLists });
|
||||
notifySuccess('Shopping list created');
|
||||
},
|
||||
onError: (error: Error) => {
|
||||
|
||||
@@ -81,7 +81,7 @@ describe('useDeleteShoppingListMutation', () => {
|
||||
expect(mockedNotifications.notifyError).toHaveBeenCalledWith('Shopping list not found');
|
||||
});
|
||||
|
||||
it('should handle API error without message', async () => {
|
||||
it('should handle API error when json parse fails', async () => {
|
||||
mockedApiClient.deleteShoppingList.mockResolvedValue({
|
||||
ok: false,
|
||||
status: 500,
|
||||
@@ -96,4 +96,32 @@ describe('useDeleteShoppingListMutation', () => {
|
||||
|
||||
expect(result.current.error?.message).toBe('Request failed with status 500');
|
||||
});
|
||||
|
||||
it('should handle API error with empty message in response', async () => {
|
||||
mockedApiClient.deleteShoppingList.mockResolvedValue({
|
||||
ok: false,
|
||||
status: 400,
|
||||
json: () => Promise.resolve({ message: '' }),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useDeleteShoppingListMutation(), { wrapper });
|
||||
|
||||
result.current.mutate({ listId: 456 });
|
||||
|
||||
await waitFor(() => expect(result.current.isError).toBe(true));
|
||||
|
||||
expect(result.current.error?.message).toBe('Failed to delete shopping list');
|
||||
});
|
||||
|
||||
it('should use fallback error message when error has no message', async () => {
|
||||
mockedApiClient.deleteShoppingList.mockRejectedValue(new Error(''));
|
||||
|
||||
const { result } = renderHook(() => useDeleteShoppingListMutation(), { wrapper });
|
||||
|
||||
result.current.mutate({ listId: 789 });
|
||||
|
||||
await waitFor(() => expect(result.current.isError).toBe(true));
|
||||
|
||||
expect(mockedNotifications.notifyError).toHaveBeenCalledWith('Failed to delete shopping list');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
import { useMutation, useQueryClient } from '@tanstack/react-query';
|
||||
import * as apiClient from '../../services/apiClient';
|
||||
import { notifySuccess, notifyError } from '../../services/notificationService';
|
||||
import { queryKeyBases } from '../../config/queryKeys';
|
||||
|
||||
interface DeleteShoppingListParams {
|
||||
listId: number;
|
||||
@@ -48,7 +49,7 @@ export const useDeleteShoppingListMutation = () => {
|
||||
},
|
||||
onSuccess: () => {
|
||||
// Invalidate and refetch shopping lists to get the updated list
|
||||
queryClient.invalidateQueries({ queryKey: ['shopping-lists'] });
|
||||
queryClient.invalidateQueries({ queryKey: queryKeyBases.shoppingLists });
|
||||
notifySuccess('Shopping list deleted');
|
||||
},
|
||||
onError: (error: Error) => {
|
||||
|
||||
46
src/hooks/mutations/useGeocodeMutation.ts
Normal file
46
src/hooks/mutations/useGeocodeMutation.ts
Normal file
@@ -0,0 +1,46 @@
|
||||
// src/hooks/mutations/useGeocodeMutation.ts
|
||||
import { useMutation } from '@tanstack/react-query';
|
||||
import { geocodeAddress } from '../../services/apiClient';
|
||||
import { notifyError } from '../../services/notificationService';
|
||||
|
||||
interface GeocodeResult {
|
||||
lat: number;
|
||||
lng: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Mutation hook for geocoding an address string to coordinates.
|
||||
*
|
||||
* @returns TanStack Query mutation for geocoding
|
||||
*
|
||||
* @example
|
||||
* ```tsx
|
||||
* const geocodeMutation = useGeocodeMutation();
|
||||
*
|
||||
* const handleGeocode = async () => {
|
||||
* const result = await geocodeMutation.mutateAsync('123 Main St, City, State');
|
||||
* if (result) {
|
||||
* console.log(result.lat, result.lng);
|
||||
* }
|
||||
* };
|
||||
* ```
|
||||
*/
|
||||
export const useGeocodeMutation = () => {
|
||||
return useMutation({
|
||||
mutationFn: async (address: string): Promise<GeocodeResult> => {
|
||||
const response = await geocodeAddress(address);
|
||||
|
||||
if (!response.ok) {
|
||||
const error = await response.json().catch(() => ({
|
||||
message: `Geocoding failed with status ${response.status}`,
|
||||
}));
|
||||
throw new Error(error.message || 'Failed to geocode address');
|
||||
}
|
||||
|
||||
return response.json();
|
||||
},
|
||||
onError: (error: Error) => {
|
||||
notifyError(error.message || 'Failed to geocode address');
|
||||
},
|
||||
});
|
||||
};
|
||||
179
src/hooks/mutations/useProfileMutations.ts
Normal file
179
src/hooks/mutations/useProfileMutations.ts
Normal file
@@ -0,0 +1,179 @@
|
||||
// src/hooks/mutations/useProfileMutations.ts
|
||||
import { useMutation } from '@tanstack/react-query';
|
||||
import * as apiClient from '../../services/apiClient';
|
||||
import { notifyError } from '../../services/notificationService';
|
||||
import type { Profile, Address } from '../../types';
|
||||
|
||||
/**
|
||||
* Mutation hook for updating user profile.
|
||||
*
|
||||
* @example
|
||||
* ```tsx
|
||||
* const updateProfile = useUpdateProfileMutation();
|
||||
* updateProfile.mutate({ full_name: 'New Name', avatar_url: 'https://...' });
|
||||
* ```
|
||||
*/
|
||||
export const useUpdateProfileMutation = () => {
|
||||
return useMutation({
|
||||
mutationFn: async (data: Partial<Profile>): Promise<Profile> => {
|
||||
const response = await apiClient.updateUserProfile(data);
|
||||
|
||||
if (!response.ok) {
|
||||
const error = await response.json().catch(() => ({
|
||||
message: `Request failed with status ${response.status}`,
|
||||
}));
|
||||
throw new Error(error.message || 'Failed to update profile');
|
||||
}
|
||||
|
||||
return response.json();
|
||||
},
|
||||
onError: (error: Error) => {
|
||||
notifyError(error.message || 'Failed to update profile');
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Mutation hook for updating user address.
|
||||
*
|
||||
* @example
|
||||
* ```tsx
|
||||
* const updateAddress = useUpdateAddressMutation();
|
||||
* updateAddress.mutate({ street_address: '123 Main St', city: 'Toronto' });
|
||||
* ```
|
||||
*/
|
||||
export const useUpdateAddressMutation = () => {
|
||||
return useMutation({
|
||||
mutationFn: async (data: Partial<Address>): Promise<Address> => {
|
||||
const response = await apiClient.updateUserAddress(data);
|
||||
|
||||
if (!response.ok) {
|
||||
const error = await response.json().catch(() => ({
|
||||
message: `Request failed with status ${response.status}`,
|
||||
}));
|
||||
throw new Error(error.message || 'Failed to update address');
|
||||
}
|
||||
|
||||
return response.json();
|
||||
},
|
||||
onError: (error: Error) => {
|
||||
notifyError(error.message || 'Failed to update address');
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Mutation hook for updating user password.
|
||||
*
|
||||
* @example
|
||||
* ```tsx
|
||||
* const updatePassword = useUpdatePasswordMutation();
|
||||
* updatePassword.mutate({ password: 'newPassword123' });
|
||||
* ```
|
||||
*/
|
||||
export const useUpdatePasswordMutation = () => {
|
||||
return useMutation({
|
||||
mutationFn: async ({ password }: { password: string }): Promise<void> => {
|
||||
const response = await apiClient.updateUserPassword(password);
|
||||
|
||||
if (!response.ok) {
|
||||
const error = await response.json().catch(() => ({
|
||||
message: `Request failed with status ${response.status}`,
|
||||
}));
|
||||
throw new Error(error.message || 'Failed to update password');
|
||||
}
|
||||
|
||||
return response.json();
|
||||
},
|
||||
onError: (error: Error) => {
|
||||
notifyError(error.message || 'Failed to update password');
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Mutation hook for updating user preferences.
|
||||
*
|
||||
* @example
|
||||
* ```tsx
|
||||
* const updatePreferences = useUpdatePreferencesMutation();
|
||||
* updatePreferences.mutate({ darkMode: true });
|
||||
* ```
|
||||
*/
|
||||
export const useUpdatePreferencesMutation = () => {
|
||||
return useMutation({
|
||||
mutationFn: async (prefs: Partial<Profile['preferences']>): Promise<Profile> => {
|
||||
const response = await apiClient.updateUserPreferences(prefs);
|
||||
|
||||
if (!response.ok) {
|
||||
const error = await response.json().catch(() => ({
|
||||
message: `Request failed with status ${response.status}`,
|
||||
}));
|
||||
throw new Error(error.message || 'Failed to update preferences');
|
||||
}
|
||||
|
||||
return response.json();
|
||||
},
|
||||
onError: (error: Error) => {
|
||||
notifyError(error.message || 'Failed to update preferences');
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Mutation hook for exporting user data.
|
||||
*
|
||||
* @example
|
||||
* ```tsx
|
||||
* const exportData = useExportDataMutation();
|
||||
* exportData.mutate();
|
||||
* ```
|
||||
*/
|
||||
export const useExportDataMutation = () => {
|
||||
return useMutation({
|
||||
mutationFn: async (): Promise<unknown> => {
|
||||
const response = await apiClient.exportUserData();
|
||||
|
||||
if (!response.ok) {
|
||||
const error = await response.json().catch(() => ({
|
||||
message: `Request failed with status ${response.status}`,
|
||||
}));
|
||||
throw new Error(error.message || 'Failed to export data');
|
||||
}
|
||||
|
||||
return response.json();
|
||||
},
|
||||
onError: (error: Error) => {
|
||||
notifyError(error.message || 'Failed to export data');
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Mutation hook for deleting user account.
|
||||
*
|
||||
* @example
|
||||
* ```tsx
|
||||
* const deleteAccount = useDeleteAccountMutation();
|
||||
* deleteAccount.mutate({ password: 'currentPassword' });
|
||||
* ```
|
||||
*/
|
||||
export const useDeleteAccountMutation = () => {
|
||||
return useMutation({
|
||||
mutationFn: async ({ password }: { password: string }): Promise<void> => {
|
||||
const response = await apiClient.deleteUserAccount(password);
|
||||
|
||||
if (!response.ok) {
|
||||
const error = await response.json().catch(() => ({
|
||||
message: `Request failed with status ${response.status}`,
|
||||
}));
|
||||
throw new Error(error.message || 'Failed to delete account');
|
||||
}
|
||||
|
||||
return response.json();
|
||||
},
|
||||
onError: (error: Error) => {
|
||||
notifyError(error.message || 'Failed to delete account');
|
||||
},
|
||||
});
|
||||
};
|
||||
@@ -44,7 +44,9 @@ describe('useRemoveShoppingListItemMutation', () => {
|
||||
await waitFor(() => expect(result.current.isSuccess).toBe(true));
|
||||
|
||||
expect(mockedApiClient.removeShoppingListItem).toHaveBeenCalledWith(42);
|
||||
expect(mockedNotifications.notifySuccess).toHaveBeenCalledWith('Item removed from shopping list');
|
||||
expect(mockedNotifications.notifySuccess).toHaveBeenCalledWith(
|
||||
'Item removed from shopping list',
|
||||
);
|
||||
});
|
||||
|
||||
it('should invalidate shopping-lists query on success', async () => {
|
||||
@@ -81,7 +83,7 @@ describe('useRemoveShoppingListItemMutation', () => {
|
||||
expect(mockedNotifications.notifyError).toHaveBeenCalledWith('Item not found');
|
||||
});
|
||||
|
||||
it('should handle API error without message', async () => {
|
||||
it('should handle API error when json parse fails', async () => {
|
||||
mockedApiClient.removeShoppingListItem.mockResolvedValue({
|
||||
ok: false,
|
||||
status: 500,
|
||||
@@ -96,4 +98,34 @@ describe('useRemoveShoppingListItemMutation', () => {
|
||||
|
||||
expect(result.current.error?.message).toBe('Request failed with status 500');
|
||||
});
|
||||
|
||||
it('should handle API error with empty message in response', async () => {
|
||||
mockedApiClient.removeShoppingListItem.mockResolvedValue({
|
||||
ok: false,
|
||||
status: 400,
|
||||
json: () => Promise.resolve({ message: '' }),
|
||||
} as Response);
|
||||
|
||||
const { result } = renderHook(() => useRemoveShoppingListItemMutation(), { wrapper });
|
||||
|
||||
result.current.mutate({ itemId: 88 });
|
||||
|
||||
await waitFor(() => expect(result.current.isError).toBe(true));
|
||||
|
||||
expect(result.current.error?.message).toBe('Failed to remove shopping list item');
|
||||
});
|
||||
|
||||
it('should use fallback error message when error has no message', async () => {
|
||||
mockedApiClient.removeShoppingListItem.mockRejectedValue(new Error(''));
|
||||
|
||||
const { result } = renderHook(() => useRemoveShoppingListItemMutation(), { wrapper });
|
||||
|
||||
result.current.mutate({ itemId: 555 });
|
||||
|
||||
await waitFor(() => expect(result.current.isError).toBe(true));
|
||||
|
||||
expect(mockedNotifications.notifyError).toHaveBeenCalledWith(
|
||||
'Failed to remove shopping list item',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user