Compare commits

...

15 Commits

Author SHA1 Message Date
Gitea Actions
acbcb39cbe ci: Bump version to 0.9.97 [skip ci] 2026-01-13 03:34:42 +05:00
a87a0b6af1 unit test repairs
All checks were successful
Deploy to Test Environment / deploy-to-test (push) Successful in 17m12s
2026-01-12 14:31:41 -08:00
Gitea Actions
abdc3cb6db ci: Bump version to 0.9.96 [skip ci] 2026-01-13 00:52:54 +05:00
7a1bd50119 unit test repairs
All checks were successful
Deploy to Test Environment / deploy-to-test (push) Successful in 17m42s
2026-01-12 11:51:48 -08:00
Gitea Actions
87d75d0571 ci: Bump version to 0.9.95 [skip ci] 2026-01-13 00:04:10 +05:00
faf2900c28 unit test repairs
All checks were successful
Deploy to Test Environment / deploy-to-test (push) Successful in 16m43s
2026-01-12 10:58:00 -08:00
Gitea Actions
5258efc179 ci: Bump version to 0.9.94 [skip ci] 2026-01-12 21:11:57 +05:00
2a5cc5bb51 unit test repairs
Some checks failed
Deploy to Test Environment / deploy-to-test (push) Failing after 1m17s
2026-01-12 08:10:37 -08:00
Gitea Actions
8eaee2844f ci: Bump version to 0.9.93 [skip ci] 2026-01-12 08:57:24 +05:00
440a19c3a7 whoa - so much - new features (UPC,etc) - Sentry for app logging! so much more !
All checks were successful
Deploy to Test Environment / deploy-to-test (push) Successful in 14m53s
2026-01-11 19:55:10 -08:00
4ae6d84240 sql fix
Some checks failed
Deploy to Test Environment / deploy-to-test (push) Has been cancelled
2026-01-11 19:49:13 -08:00
Gitea Actions
5870e5c614 ci: Bump version to 0.9.92 [skip ci] 2026-01-12 08:20:09 +05:00
2e7ebbd9ed whoa - so much - new features (UPC,etc) - Sentry for app logging! so much more !
All checks were successful
Deploy to Test Environment / deploy-to-test (push) Successful in 14m47s
2026-01-11 19:18:52 -08:00
Gitea Actions
dc3fa21359 ci: Bump version to 0.9.91 [skip ci] 2026-01-12 08:08:50 +05:00
11aeac5edd whoa - so much - new features (UPC,etc) - Sentry for app logging! so much more !
Some checks failed
Deploy to Test Environment / deploy-to-test (push) Failing after 1m10s
2026-01-11 19:07:02 -08:00
116 changed files with 25058 additions and 293 deletions

View File

@@ -88,7 +88,10 @@
"Bash(find:*)",
"Bash(\"/c/Users/games3/.local/bin/uvx.exe\" markitdown-mcp --help)",
"Bash(git stash:*)",
"Bash(ping:*)"
"Bash(ping:*)",
"Bash(tee:*)",
"Bash(timeout 1800 podman exec flyer-crawler-dev npm run test:unit:*)",
"mcp__filesystem__edit_file"
]
}
}

View File

@@ -83,3 +83,22 @@ CLEANUP_WORKER_CONCURRENCY=10
# Worker lock duration in milliseconds (default: 2 minutes)
WORKER_LOCK_DURATION=120000
# ===================
# Error Tracking (ADR-015)
# ===================
# Sentry-compatible error tracking via Bugsink (self-hosted)
# DSNs are created in Bugsink UI at http://localhost:8000 (dev) or your production URL
# Backend DSN - for Express/Node.js errors
SENTRY_DSN=
# Frontend DSN - for React/browser errors (uses VITE_ prefix)
VITE_SENTRY_DSN=
# Environment name for error grouping (defaults to NODE_ENV)
SENTRY_ENVIRONMENT=development
VITE_SENTRY_ENVIRONMENT=development
# Enable/disable error tracking (default: true)
SENTRY_ENABLED=true
VITE_SENTRY_ENABLED=true
# Enable debug mode for SDK troubleshooting (default: false)
SENTRY_DEBUG=false
VITE_SENTRY_DEBUG=false

View File

@@ -98,6 +98,9 @@ jobs:
VITE_APP_VERSION="$(date +'%Y%m%d-%H%M'):$(git rev-parse --short HEAD):$PACKAGE_VERSION" \
VITE_APP_COMMIT_URL="$GITEA_SERVER_URL/${{ gitea.repository }}/commit/${{ gitea.sha }}" \
VITE_APP_COMMIT_MESSAGE="$COMMIT_MESSAGE" \
VITE_SENTRY_DSN="${{ secrets.VITE_SENTRY_DSN }}" \
VITE_SENTRY_ENVIRONMENT="production" \
VITE_SENTRY_ENABLED="true" \
VITE_API_BASE_URL=/api VITE_API_KEY=${{ secrets.VITE_GOOGLE_GENAI_API_KEY }} npm run build
- name: Deploy Application to Production Server
@@ -135,6 +138,10 @@ jobs:
GOOGLE_CLIENT_SECRET: ${{ secrets.GOOGLE_CLIENT_SECRET }}
GITHUB_CLIENT_ID: ${{ secrets.GH_CLIENT_ID }}
GITHUB_CLIENT_SECRET: ${{ secrets.GH_CLIENT_SECRET }}
# Sentry/Bugsink Error Tracking (ADR-015)
SENTRY_DSN: ${{ secrets.SENTRY_DSN }}
SENTRY_ENVIRONMENT: 'production'
SENTRY_ENABLED: 'true'
run: |
if [ -z "$DB_HOST" ] || [ -z "$DB_USER" ] || [ -z "$DB_PASSWORD" ] || [ -z "$DB_NAME" ]; then
echo "ERROR: One or more production database secrets (DB_HOST, DB_USER, DB_PASSWORD, DB_DATABASE_PROD) are not set."

View File

@@ -386,6 +386,9 @@ jobs:
VITE_APP_VERSION="$(date +'%Y%m%d-%H%M'):$(git rev-parse --short HEAD):$PACKAGE_VERSION" \
VITE_APP_COMMIT_URL="$GITEA_SERVER_URL/${{ gitea.repository }}/commit/${{ gitea.sha }}" \
VITE_APP_COMMIT_MESSAGE="$COMMIT_MESSAGE" \
VITE_SENTRY_DSN="${{ secrets.VITE_SENTRY_DSN_TEST }}" \
VITE_SENTRY_ENVIRONMENT="test" \
VITE_SENTRY_ENABLED="true" \
VITE_API_BASE_URL="https://flyer-crawler-test.projectium.com/api" VITE_API_KEY=${{ secrets.VITE_GOOGLE_GENAI_API_KEY_TEST }} npm run build
- name: Deploy Application to Test Server
@@ -446,6 +449,10 @@ jobs:
SMTP_USER: '' # Using MailHog, no auth needed
SMTP_PASS: '' # Using MailHog, no auth needed
SMTP_FROM_EMAIL: 'noreply@flyer-crawler-test.projectium.com'
# Sentry/Bugsink Error Tracking (ADR-015)
SENTRY_DSN: ${{ secrets.SENTRY_DSN_TEST }}
SENTRY_ENVIRONMENT: 'test'
SENTRY_ENABLED: 'true'
run: |
# Fail-fast check to ensure secrets are configured in Gitea.

5
.nycrc.json Normal file
View File

@@ -0,0 +1,5 @@
{
"text": {
"maxCols": 200
}
}

177
CLAUDE.md
View File

@@ -99,6 +99,26 @@ This prevents linting/type errors from being introduced into the codebase.
| `npm run build` | Build for production |
| `npm run type-check` | Run TypeScript type checking |
## Database Schema Files
**CRITICAL**: The database schema files must be kept in sync with each other. When making schema changes:
| File | Purpose |
| ------------------------------ | ----------------------------------------------------------- |
| `sql/master_schema_rollup.sql` | Complete schema used by test database setup and reference |
| `sql/initial_schema.sql` | Base schema without seed data, used as standalone reference |
| `sql/migrations/*.sql` | Incremental migrations for production database updates |
**Maintenance Rules:**
1. **Keep `master_schema_rollup.sql` and `initial_schema.sql` in sync** - These files should contain the same table definitions
2. **When adding columns via migration**, also add them to both `master_schema_rollup.sql` and `initial_schema.sql`
3. **Migrations are for production deployments** - They use `ALTER TABLE` to add columns incrementally
4. **Schema files are for fresh installs** - They define the complete table structure
5. **Test database uses `master_schema_rollup.sql`** - If schema files are out of sync with migrations, tests will fail
**Example:** When `002_expiry_tracking.sql` adds `purchase_date` to `pantry_items`, that column must also exist in the `CREATE TABLE` statements in both `master_schema_rollup.sql` and `initial_schema.sql`.
## Known Integration Test Issues and Solutions
This section documents common test issues encountered in integration tests, their root causes, and solutions. These patterns recur frequently.
@@ -190,22 +210,153 @@ cb(null, `${file.fieldname}-${uniqueSuffix}-${sanitizedOriginalName}`);
**Solution:** Use try/catch with graceful degradation or mock the external service checks.
## Secrets and Environment Variables
**CRITICAL**: This project uses **Gitea CI/CD secrets** for all sensitive configuration. There is NO `/etc/flyer-crawler/environment` file or similar local config file on the server.
### Server Directory Structure
| Path | Environment | Notes |
| --------------------------------------------- | ----------- | ------------------------------------------------ |
| `/var/www/flyer-crawler.projectium.com/` | Production | NO `.env` file - secrets injected via CI/CD only |
| `/var/www/flyer-crawler-test.projectium.com/` | Test | Has `.env.test` file for test-specific config |
### How Secrets Work
1. **Gitea Secrets**: All secrets are stored in Gitea repository settings (Settings → Secrets)
2. **CI/CD Injection**: Secrets are injected during deployment via `.gitea/workflows/deploy-to-prod.yml` and `deploy-to-test.yml`
3. **PM2 Environment**: The CI/CD workflow passes secrets to PM2 via environment variables, which are then available to the application
### Key Files for Configuration
| File | Purpose |
| ------------------------------------- | ---------------------------------------------------- |
| `src/config/env.ts` | Centralized config with Zod schema validation |
| `ecosystem.config.cjs` | PM2 process config - reads from `process.env` |
| `.gitea/workflows/deploy-to-prod.yml` | Production deployment with secret injection |
| `.gitea/workflows/deploy-to-test.yml` | Test deployment with secret injection |
| `.env.example` | Template showing all available environment variables |
| `.env.test` | Test environment overrides (only on test server) |
### Adding New Secrets
To add a new secret (e.g., `SENTRY_DSN`):
1. Add the secret to Gitea repository settings
2. Update the relevant workflow file (e.g., `deploy-to-prod.yml`) to inject it:
```yaml
SENTRY_DSN: ${{ secrets.SENTRY_DSN }}
```
3. Update `ecosystem.config.cjs` to read it from `process.env`
4. Update `src/config/env.ts` schema if validation is needed
5. Update `.env.example` to document the new variable
### Current Gitea Secrets
**Shared (used by both environments):**
- `DB_HOST`, `DB_USER`, `DB_PASSWORD` - Database credentials
- `JWT_SECRET` - Authentication
- `GOOGLE_MAPS_API_KEY` - Google Maps
- `GOOGLE_CLIENT_ID`, `GOOGLE_CLIENT_SECRET` - Google OAuth
- `GH_CLIENT_ID`, `GH_CLIENT_SECRET` - GitHub OAuth
**Production-specific:**
- `DB_DATABASE_PROD` - Production database name
- `REDIS_PASSWORD_PROD` - Redis password (uses database 0)
- `VITE_GOOGLE_GENAI_API_KEY` - Gemini API key for production
- `SENTRY_DSN`, `VITE_SENTRY_DSN` - Bugsink error tracking DSNs (production projects)
**Test-specific:**
- `DB_DATABASE_TEST` - Test database name
- `REDIS_PASSWORD_TEST` - Redis password (uses database 1 for isolation)
- `VITE_GOOGLE_GENAI_API_KEY_TEST` - Gemini API key for test
- `SENTRY_DSN_TEST`, `VITE_SENTRY_DSN_TEST` - Bugsink error tracking DSNs (test projects)
### Test Environment
The test environment (`flyer-crawler-test.projectium.com`) uses **both** Gitea CI/CD secrets and a local `.env.test` file:
- **Gitea secrets**: Injected during deployment via `.gitea/workflows/deploy-to-test.yml`
- **`.env.test` file**: Located at `/var/www/flyer-crawler-test.projectium.com/.env.test` for local overrides
- **Redis database 1**: Isolates test job queues from production (which uses database 0)
- **PM2 process names**: Suffixed with `-test` (e.g., `flyer-crawler-api-test`)
### Dev Container Environment
The dev container runs its own **local Bugsink instance** - it does NOT connect to the production Bugsink server:
- **Local Bugsink**: Runs at `http://localhost:8000` inside the container
- **Pre-configured DSNs**: Set in `compose.dev.yml`, pointing to local instance
- **Admin credentials**: `admin@localhost` / `admin`
- **Isolated**: Dev errors stay local, don't pollute production/test dashboards
- **No Gitea secrets needed**: Everything is self-contained in the container
---
## MCP Servers
The following MCP servers are configured for this project:
| Server | Purpose |
| ------------------- | ---------------------------------------- |
| gitea-projectium | Gitea API for gitea.projectium.com |
| gitea-torbonium | Gitea API for gitea.torbonium.com |
| podman | Container management |
| filesystem | File system access |
| fetch | Web fetching |
| markitdown | Convert documents to markdown |
| sequential-thinking | Step-by-step reasoning |
| memory | Knowledge graph persistence |
| postgres | Direct database queries (localhost:5432) |
| playwright | Browser automation and testing |
| redis | Redis cache inspection (localhost:6379) |
| Server | Purpose |
| --------------------- | ------------------------------------------- |
| gitea-projectium | Gitea API for gitea.projectium.com |
| gitea-torbonium | Gitea API for gitea.torbonium.com |
| podman | Container management |
| filesystem | File system access |
| fetch | Web fetching |
| markitdown | Convert documents to markdown |
| sequential-thinking | Step-by-step reasoning |
| memory | Knowledge graph persistence |
| postgres | Direct database queries (localhost:5432) |
| playwright | Browser automation and testing |
| redis | Redis cache inspection (localhost:6379) |
| sentry-selfhosted-mcp | Error tracking via Bugsink (localhost:8000) |
**Note:** MCP servers are currently only available in **Claude CLI**. Due to a bug in Claude VS Code extension, MCP servers do not work there yet.
### Sentry/Bugsink MCP Server Setup (ADR-015)
To enable Claude Code to query and analyze application errors from Bugsink:
1. **Install the MCP server**:
```bash
# Clone the sentry-selfhosted-mcp repository
git clone https://github.com/ddfourtwo/sentry-selfhosted-mcp.git
cd sentry-selfhosted-mcp
npm install
```
2. **Configure Claude Code** (add to `.claude/mcp.json`):
```json
{
"sentry-selfhosted-mcp": {
"command": "node",
"args": ["/path/to/sentry-selfhosted-mcp/dist/index.js"],
"env": {
"SENTRY_URL": "http://localhost:8000",
"SENTRY_AUTH_TOKEN": "<get-from-bugsink-ui>",
"SENTRY_ORG_SLUG": "flyer-crawler"
}
}
}
```
3. **Get the auth token**:
- Navigate to Bugsink UI at `http://localhost:8000`
- Log in with admin credentials
- Go to Settings > API Keys
- Create a new API key with read access
4. **Available capabilities**:
- List projects and issues
- View detailed error events
- Search by error message or stack trace
- Update issue status (resolve, ignore)
- Add comments to issues

View File

@@ -204,8 +204,68 @@ pm2 restart flyer-crawler-api
---
## Error Tracking with Bugsink (ADR-015)
Bugsink is a self-hosted Sentry-compatible error tracking system. See [docs/adr/0015-application-performance-monitoring-and-error-tracking.md](docs/adr/0015-application-performance-monitoring-and-error-tracking.md) for the full architecture decision.
### Creating Bugsink Projects and DSNs
After Bugsink is installed and running, you need to create projects and obtain DSNs:
1. **Access Bugsink UI**: Navigate to `http://localhost:8000`
2. **Log in** with your admin credentials
3. **Create Backend Project**:
- Click "Create Project"
- Name: `flyer-crawler-backend`
- Platform: Node.js
- Copy the generated DSN (format: `http://<key>@localhost:8000/<project_id>`)
4. **Create Frontend Project**:
- Click "Create Project"
- Name: `flyer-crawler-frontend`
- Platform: React
- Copy the generated DSN
5. **Configure Environment Variables**:
```bash
# Backend (server-side)
export SENTRY_DSN=http://<backend-key>@localhost:8000/<backend-project-id>
# Frontend (client-side, exposed to browser)
export VITE_SENTRY_DSN=http://<frontend-key>@localhost:8000/<frontend-project-id>
# Shared settings
export SENTRY_ENVIRONMENT=production
export VITE_SENTRY_ENVIRONMENT=production
export SENTRY_ENABLED=true
export VITE_SENTRY_ENABLED=true
```
### Testing Error Tracking
Verify Bugsink is receiving events:
```bash
npx tsx scripts/test-bugsink.ts
```
This sends test error and info events. Check the Bugsink UI for:
- `BugsinkTestError` in the backend project
- Info message "Test info message from test-bugsink.ts"
### Sentry SDK v10+ HTTP DSN Limitation
The Sentry SDK v10+ enforces HTTPS-only DSNs by default. Since Bugsink runs locally over HTTP, our implementation uses the Sentry Store API directly instead of the SDK's built-in transport. This is handled transparently by the `sentry.server.ts` and `sentry.client.ts` modules.
---
## Related Documentation
- [Database Setup](DATABASE.md) - PostgreSQL and PostGIS configuration
- [Authentication Setup](AUTHENTICATION.md) - OAuth provider configuration
- [Installation Guide](INSTALL.md) - Local development setup
- [Bare-Metal Server Setup](docs/BARE-METAL-SETUP.md) - Manual server installation guide

View File

@@ -65,8 +65,67 @@ RUN python3 -m venv /opt/bugsink \
&& /opt/bugsink/bin/pip install --upgrade pip \
&& /opt/bugsink/bin/pip install bugsink gunicorn psycopg2-binary
# Create Bugsink directories
RUN mkdir -p /var/log/bugsink /var/lib/bugsink
# Create Bugsink directories and configuration
RUN mkdir -p /var/log/bugsink /var/lib/bugsink /opt/bugsink/conf
# Create Bugsink configuration file (Django settings module)
# This file is imported by bugsink-manage via DJANGO_SETTINGS_MODULE
# Based on bugsink/conf_templates/docker.py.template but customized for our setup
RUN echo 'import os\n\
from urllib.parse import urlparse\n\
\n\
from bugsink.settings.default import *\n\
from bugsink.settings.default import DATABASES, SILENCED_SYSTEM_CHECKS\n\
from bugsink.conf_utils import deduce_allowed_hosts, deduce_script_name\n\
\n\
IS_DOCKER = True\n\
\n\
# Security settings\n\
SECRET_KEY = os.getenv("SECRET_KEY")\n\
DEBUG = os.getenv("DEBUG", "False").lower() in ("true", "1", "yes")\n\
\n\
# Silence cookie security warnings for dev (no HTTPS)\n\
SILENCED_SYSTEM_CHECKS += ["security.W012", "security.W016"]\n\
\n\
# Database configuration from DATABASE_URL environment variable\n\
if os.getenv("DATABASE_URL"):\n\
DATABASE_URL = os.getenv("DATABASE_URL")\n\
parsed = urlparse(DATABASE_URL)\n\
\n\
if parsed.scheme in ["postgres", "postgresql"]:\n\
DATABASES["default"] = {\n\
"ENGINE": "django.db.backends.postgresql",\n\
"NAME": parsed.path.lstrip("/"),\n\
"USER": parsed.username,\n\
"PASSWORD": parsed.password,\n\
"HOST": parsed.hostname,\n\
"PORT": parsed.port or "5432",\n\
}\n\
\n\
# Snappea (background task runner) settings\n\
SNAPPEA = {\n\
"TASK_ALWAYS_EAGER": False,\n\
"WORKAHOLIC": True,\n\
"NUM_WORKERS": 2,\n\
"PID_FILE": None,\n\
}\n\
DATABASES["snappea"]["NAME"] = "/tmp/snappea.sqlite3"\n\
\n\
# Site settings\n\
_PORT = os.getenv("PORT", "8000")\n\
BUGSINK = {\n\
"BASE_URL": os.getenv("BASE_URL", f"http://localhost:{_PORT}"),\n\
"SITE_TITLE": os.getenv("SITE_TITLE", "Flyer Crawler Error Tracking"),\n\
"SINGLE_USER": os.getenv("SINGLE_USER", "True").lower() in ("true", "1", "yes"),\n\
"SINGLE_TEAM": os.getenv("SINGLE_TEAM", "True").lower() in ("true", "1", "yes"),\n\
"PHONEHOME": False,\n\
}\n\
\n\
ALLOWED_HOSTS = deduce_allowed_hosts(BUGSINK["BASE_URL"])\n\
\n\
# Console email backend for dev\n\
EMAIL_BACKEND = "bugsink.email_backends.QuietConsoleEmailBackend"\n\
' > /opt/bugsink/conf/bugsink_conf.py
# Create Bugsink startup script
# Uses DATABASE_URL environment variable (standard Docker approach per docs)
@@ -78,6 +137,11 @@ export DATABASE_URL="postgresql://${BUGSINK_DB_USER:-bugsink}:${BUGSINK_DB_PASSW
# SECRET_KEY is required by Bugsink/Django\n\
export SECRET_KEY="${BUGSINK_SECRET_KEY:-dev-bugsink-secret-key-minimum-50-characters-for-security}"\n\
\n\
# Create superuser if not exists (for dev convenience)\n\
if [ -n "$BUGSINK_ADMIN_EMAIL" ] && [ -n "$BUGSINK_ADMIN_PASSWORD" ]; then\n\
export CREATE_SUPERUSER="${BUGSINK_ADMIN_EMAIL}:${BUGSINK_ADMIN_PASSWORD}"\n\
fi\n\
\n\
# Wait for PostgreSQL to be ready\n\
until pg_isready -h ${BUGSINK_DB_HOST:-postgres} -p ${BUGSINK_DB_PORT:-5432} -U ${BUGSINK_DB_USER:-bugsink}; do\n\
echo "Waiting for PostgreSQL..."\n\
@@ -87,13 +151,25 @@ done\n\
echo "PostgreSQL is ready. Starting Bugsink..."\n\
echo "DATABASE_URL: postgresql://${BUGSINK_DB_USER}:***@${BUGSINK_DB_HOST}:${BUGSINK_DB_PORT}/${BUGSINK_DB_NAME}"\n\
\n\
# Change to config directory so bugsink_conf.py can be found\n\
cd /opt/bugsink/conf\n\
\n\
# Run migrations\n\
echo "Running database migrations..."\n\
/opt/bugsink/bin/bugsink-manage migrate --noinput\n\
\n\
# Create superuser if not exists (for dev convenience)\n\
if [ -n "$BUGSINK_ADMIN_EMAIL" ] && [ -n "$BUGSINK_ADMIN_PASSWORD" ]; then\n\
export CREATE_SUPERUSER="${BUGSINK_ADMIN_EMAIL}:${BUGSINK_ADMIN_PASSWORD}"\n\
echo "Superuser configured: ${BUGSINK_ADMIN_EMAIL}"\n\
# Create superuser if CREATE_SUPERUSER is set (format: email:password)\n\
if [ -n "$CREATE_SUPERUSER" ]; then\n\
IFS=":" read -r ADMIN_EMAIL ADMIN_PASS <<< "$CREATE_SUPERUSER"\n\
/opt/bugsink/bin/bugsink-manage shell -c "\n\
from django.contrib.auth import get_user_model\n\
User = get_user_model()\n\
if not User.objects.filter(email='"'"'$ADMIN_EMAIL'"'"').exists():\n\
User.objects.create_superuser('"'"'$ADMIN_EMAIL'"'"', '"'"'$ADMIN_PASS'"'"')\n\
print('"'"'Superuser created'"'"')\n\
else:\n\
print('"'"'Superuser already exists'"'"')\n\
" || true\n\
fi\n\
\n\
# Start Bugsink with Gunicorn\n\

View File

@@ -103,6 +103,7 @@ You are now inside the Ubuntu container's shell.
```
4. **Install Project Dependencies**:
```bash
npm install
```

View File

@@ -78,6 +78,15 @@ services:
- BUGSINK_ADMIN_EMAIL=admin@localhost
- BUGSINK_ADMIN_PASSWORD=admin
- BUGSINK_SECRET_KEY=dev-bugsink-secret-key-minimum-50-characters-for-security
# Sentry SDK configuration (points to local Bugsink)
- SENTRY_DSN=http://59a58583-e869-7697-f94a-cfa0337676a8@localhost:8000/1
- VITE_SENTRY_DSN=http://d5fc5221-4266-ff2f-9af8-5689696072f3@localhost:8000/2
- SENTRY_ENVIRONMENT=development
- VITE_SENTRY_ENVIRONMENT=development
- SENTRY_ENABLED=true
- VITE_SENTRY_ENABLED=true
- SENTRY_DEBUG=true
- VITE_SENTRY_DEBUG=true
depends_on:
postgres:
condition: service_healthy

1132
docs/BARE-METAL-SETUP.md Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -54,6 +54,7 @@ The React frontend will integrate `@sentry/react` SDK to:
- Capture unhandled JavaScript errors
- Report errors with component stack traces
- Track user session context
- **Frontend Error Correlation**: The global API client (Axios/Fetch wrapper) MUST intercept 4xx/5xx responses. It MUST extract the `x-request-id` header (if present) and attach it to the Sentry scope as a tag `api_request_id` before re-throwing the error. This allows developers to copy the ID from Sentry and search for it in backend logs.
### 4. Log Aggregation: Logstash

View File

@@ -0,0 +1,54 @@
# ADR-051: Asynchronous Context Propagation
**Date**: 2026-01-11
**Status**: Accepted (Implemented)
## Context
Debugging asynchronous workflows is difficult because the `request_id` generated at the API layer is lost when a task is handed off to a background queue (BullMQ). Logs from the worker appear disconnected from the user action that triggered them.
## Decision
We will implement a context propagation pattern for all background jobs:
1. **Job Data Payload**: All job data interfaces MUST include a `meta` object containing `requestId`, `userId`, and `origin`.
2. **Worker Logger Initialization**: All BullMQ workers MUST initialize a child logger immediately upon processing a job, using the metadata passed in the payload.
3. **Correlation**: The worker's logger must use the _same_ `request_id` as the initiating API request.
## Implementation
```typescript
// 1. Enqueueing (API Layer)
await queue.add('process-flyer', {
...data,
meta: {
requestId: req.log.bindings().request_id, // Propagate ID
userId: req.user.id,
},
});
// 2. Processing (Worker Layer)
const worker = new Worker('queue', async (job) => {
const { requestId, userId } = job.data.meta || {};
// Create context-aware logger for this specific job execution
const jobLogger = logger.child({
request_id: requestId || uuidv4(), // Use propagated ID or generate new
user_id: userId,
job_id: job.id,
service: 'worker',
});
try {
await processJob(job.data, jobLogger); // Pass logger down
} catch (err) {
jobLogger.error({ err }, 'Job failed');
throw err;
}
});
```
## Consequences
**Positive**: Complete traceability from API request -> Queue -> Worker execution. Drastically reduces time to find "what happened" to a specific user request.

View File

@@ -0,0 +1,42 @@
# ADR-052: Granular Debug Logging Strategy
**Date**: 2026-01-11
**Status**: Proposed
## Context
Global log levels (INFO vs DEBUG) are too coarse. Developers need to inspect detailed debug information for specific subsystems (e.g., `ai-service`, `db-pool`) without being flooded by logs from the entire application.
## Decision
We will adopt a namespace-based debug filter pattern, similar to the `debug` npm package, but integrated into our Pino logger.
1. **Logger Namespaces**: Every service/module logger must be initialized with a `module` property (e.g., `logger.child({ module: 'ai-service' })`).
2. **Environment Filter**: We will support a `DEBUG_MODULES` environment variable that overrides the log level for matching modules.
## Implementation
In `src/services/logger.server.ts`:
```typescript
const debugModules = (process.env.DEBUG_MODULES || '').split(',').map((s) => s.trim());
export const createScopedLogger = (moduleName: string) => {
// If DEBUG_MODULES contains "ai-service" or "*", force level to 'debug'
const isDebugEnabled = debugModules.includes('*') || debugModules.includes(moduleName);
return logger.child({
module: moduleName,
level: isDebugEnabled ? 'debug' : logger.level,
});
};
```
## Usage
To debug only AI and Database interactions:
```bash
DEBUG_MODULES=ai-service,db-repo npm run dev
```

View File

@@ -0,0 +1,62 @@
# ADR-053: Worker Health Checks and Stalled Job Monitoring
**Date**: 2026-01-11
**Status**: Proposed
## Context
Our application relies heavily on background workers (BullMQ) for flyer processing, analytics, and emails. If a worker process crashes (e.g., Out of Memory) or hangs, jobs may remain in the 'active' state indefinitely ("stalled") until BullMQ's fail-safe triggers.
Currently, we lack:
1. Visibility into queue depths and worker status via HTTP endpoints (for uptime monitors).
2. A mechanism to detect if the worker process itself is alive, beyond just queue statistics.
3. Explicit configuration to ensure stalled jobs are recovered quickly.
## Decision
We will implement a multi-layered health check strategy for background workers:
1. **Queue Metrics Endpoint**: Expose a protected endpoint `GET /health/queues` that returns the counts (waiting, active, failed) for all critical queues.
2. **Stalled Job Configuration**: Explicitly configure BullMQ workers with aggressive stall detection settings to recover quickly from crashes.
3. **Worker Heartbeats**: Workers will periodically update a "heartbeat" key in Redis. The health endpoint will check if this timestamp is recent.
## Implementation
### 1. BullMQ Worker Settings
Workers must be initialized with specific options to handle stalls:
```typescript
const workerOptions = {
// Check for stalled jobs every 30 seconds
stalledInterval: 30000,
// Fail job after 3 stalls (prevents infinite loops causing infinite retries)
maxStalledCount: 3,
// Duration of the lock for the job in milliseconds.
// If the worker doesn't renew this (e.g. crash), the job stalls.
lockDuration: 30000,
};
```
### 2. Health Endpoint Logic
The `/health/queues` endpoint will:
1. Iterate through all defined queues (`flyerQueue`, `emailQueue`, etc.).
2. Fetch job counts (`waiting`, `active`, `failed`, `delayed`).
3. Return a 200 OK if queues are accessible, or 503 if Redis is unreachable.
4. (Future) Return 500 if the `waiting` count exceeds a critical threshold for too long.
## Consequences
**Positive**:
- Early detection of stuck processing pipelines.
- Automatic recovery of stalled jobs via BullMQ configuration.
- Metrics available for external monitoring tools (e.g., UptimeRobot, Datadog).
**Negative**:
- Requires configuring external monitoring to poll the new endpoint.

854
package-lock.json generated
View File

@@ -1,16 +1,18 @@
{
"name": "flyer-crawler",
"version": "0.9.90",
"version": "0.9.97",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "flyer-crawler",
"version": "0.9.90",
"version": "0.9.97",
"dependencies": {
"@bull-board/api": "^6.14.2",
"@bull-board/express": "^6.14.2",
"@google/genai": "^1.30.0",
"@sentry/node": "^10.32.1",
"@sentry/react": "^10.32.1",
"@tanstack/react-query": "^5.90.12",
"@types/connect-timeout": "^1.9.0",
"bcrypt": "^5.1.1",
@@ -49,7 +51,8 @@
"swagger-ui-express": "^5.0.1",
"tsx": "^4.20.6",
"zod": "^4.2.1",
"zxcvbn": "^4.4.2"
"zxcvbn": "^4.4.2",
"zxing-wasm": "^2.2.4"
},
"devDependencies": {
"@tailwindcss/postcss": "4.1.17",
@@ -187,6 +190,23 @@
"openapi-types": ">=7"
}
},
"node_modules/@apm-js-collab/code-transformer": {
"version": "0.8.2",
"resolved": "https://registry.npmjs.org/@apm-js-collab/code-transformer/-/code-transformer-0.8.2.tgz",
"integrity": "sha512-YRjJjNq5KFSjDUoqu5pFUWrrsvGOxl6c3bu+uMFc9HNNptZ2rNU/TI2nLw4jnhQNtka972Ee2m3uqbvDQtPeCA==",
"license": "Apache-2.0"
},
"node_modules/@apm-js-collab/tracing-hooks": {
"version": "0.3.1",
"resolved": "https://registry.npmjs.org/@apm-js-collab/tracing-hooks/-/tracing-hooks-0.3.1.tgz",
"integrity": "sha512-Vu1CbmPURlN5fTboVuKMoJjbO5qcq9fA5YXpskx3dXe/zTBvjODFoerw+69rVBlRLrJpwPqSDqEuJDEKIrTldw==",
"license": "Apache-2.0",
"dependencies": {
"@apm-js-collab/code-transformer": "^0.8.0",
"debug": "^4.4.1",
"module-details-from-path": "^1.0.4"
}
},
"node_modules/@asamuzakjp/css-color": {
"version": "4.1.1",
"resolved": "https://registry.npmjs.org/@asamuzakjp/css-color/-/css-color-4.1.1.tgz",
@@ -3569,6 +3589,524 @@
"dev": true,
"license": "MIT"
},
"node_modules/@opentelemetry/api": {
"version": "1.9.0",
"resolved": "https://registry.npmjs.org/@opentelemetry/api/-/api-1.9.0.tgz",
"integrity": "sha512-3giAOQvZiH5F9bMlMiv8+GSPMeqg0dbaeo58/0SlA9sxSqZhnUtxzX9/2FzyhS9sWQf5S0GJE0AKBrFqjpeYcg==",
"license": "Apache-2.0",
"engines": {
"node": ">=8.0.0"
}
},
"node_modules/@opentelemetry/api-logs": {
"version": "0.208.0",
"resolved": "https://registry.npmjs.org/@opentelemetry/api-logs/-/api-logs-0.208.0.tgz",
"integrity": "sha512-CjruKY9V6NMssL/T1kAFgzosF1v9o6oeN+aX5JB/C/xPNtmgIJqcXHG7fA82Ou1zCpWGl4lROQUKwUNE1pMCyg==",
"license": "Apache-2.0",
"dependencies": {
"@opentelemetry/api": "^1.3.0"
},
"engines": {
"node": ">=8.0.0"
}
},
"node_modules/@opentelemetry/context-async-hooks": {
"version": "2.3.0",
"resolved": "https://registry.npmjs.org/@opentelemetry/context-async-hooks/-/context-async-hooks-2.3.0.tgz",
"integrity": "sha512-hGcsT0qDP7Il1L+qT3JFpiGl1dCjF794Bb4yCRCYdr7XC0NwHtOF3ngF86Gk6TUnsakbyQsDQ0E/S4CU0F4d4g==",
"license": "Apache-2.0",
"engines": {
"node": "^18.19.0 || >=20.6.0"
},
"peerDependencies": {
"@opentelemetry/api": ">=1.0.0 <1.10.0"
}
},
"node_modules/@opentelemetry/core": {
"version": "2.3.0",
"resolved": "https://registry.npmjs.org/@opentelemetry/core/-/core-2.3.0.tgz",
"integrity": "sha512-PcmxJQzs31cfD0R2dE91YGFcLxOSN4Bxz7gez5UwSUjCai8BwH/GI5HchfVshHkWdTkUs0qcaPJgVHKXUp7I3A==",
"license": "Apache-2.0",
"dependencies": {
"@opentelemetry/semantic-conventions": "^1.29.0"
},
"engines": {
"node": "^18.19.0 || >=20.6.0"
},
"peerDependencies": {
"@opentelemetry/api": ">=1.0.0 <1.10.0"
}
},
"node_modules/@opentelemetry/instrumentation": {
"version": "0.208.0",
"resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation/-/instrumentation-0.208.0.tgz",
"integrity": "sha512-Eju0L4qWcQS+oXxi6pgh7zvE2byogAkcsVv0OjHF/97iOz1N/aKE6etSGowYkie+YA1uo6DNwdSxaaNnLvcRlA==",
"license": "Apache-2.0",
"dependencies": {
"@opentelemetry/api-logs": "0.208.0",
"import-in-the-middle": "^2.0.0",
"require-in-the-middle": "^8.0.0"
},
"engines": {
"node": "^18.19.0 || >=20.6.0"
},
"peerDependencies": {
"@opentelemetry/api": "^1.3.0"
}
},
"node_modules/@opentelemetry/instrumentation-amqplib": {
"version": "0.55.0",
"resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-amqplib/-/instrumentation-amqplib-0.55.0.tgz",
"integrity": "sha512-5ULoU8p+tWcQw5PDYZn8rySptGSLZHNX/7srqo2TioPnAAcvTy6sQFQXsNPrAnyRRtYGMetXVyZUy5OaX1+IfA==",
"license": "Apache-2.0",
"dependencies": {
"@opentelemetry/core": "^2.0.0",
"@opentelemetry/instrumentation": "^0.208.0"
},
"engines": {
"node": "^18.19.0 || >=20.6.0"
},
"peerDependencies": {
"@opentelemetry/api": "^1.3.0"
}
},
"node_modules/@opentelemetry/instrumentation-connect": {
"version": "0.52.0",
"resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-connect/-/instrumentation-connect-0.52.0.tgz",
"integrity": "sha512-GXPxfNB5szMbV3I9b7kNWSmQBoBzw7MT0ui6iU/p+NIzVx3a06Ri2cdQO7tG9EKb4aKSLmfX9Cw5cKxXqX6Ohg==",
"license": "Apache-2.0",
"dependencies": {
"@opentelemetry/core": "^2.0.0",
"@opentelemetry/instrumentation": "^0.208.0",
"@opentelemetry/semantic-conventions": "^1.27.0",
"@types/connect": "3.4.38"
},
"engines": {
"node": "^18.19.0 || >=20.6.0"
},
"peerDependencies": {
"@opentelemetry/api": "^1.3.0"
}
},
"node_modules/@opentelemetry/instrumentation-dataloader": {
"version": "0.26.0",
"resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-dataloader/-/instrumentation-dataloader-0.26.0.tgz",
"integrity": "sha512-P2BgnFfTOarZ5OKPmYfbXfDFjQ4P9WkQ1Jji7yH5/WwB6Wm/knynAoA1rxbjWcDlYupFkyT0M1j6XLzDzy0aCA==",
"license": "Apache-2.0",
"dependencies": {
"@opentelemetry/instrumentation": "^0.208.0"
},
"engines": {
"node": "^18.19.0 || >=20.6.0"
},
"peerDependencies": {
"@opentelemetry/api": "^1.3.0"
}
},
"node_modules/@opentelemetry/instrumentation-express": {
"version": "0.57.0",
"resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-express/-/instrumentation-express-0.57.0.tgz",
"integrity": "sha512-HAdx/o58+8tSR5iW+ru4PHnEejyKrAy9fYFhlEI81o10nYxrGahnMAHWiSjhDC7UQSY3I4gjcPgSKQz4rm/asg==",
"license": "Apache-2.0",
"dependencies": {
"@opentelemetry/core": "^2.0.0",
"@opentelemetry/instrumentation": "^0.208.0",
"@opentelemetry/semantic-conventions": "^1.27.0"
},
"engines": {
"node": "^18.19.0 || >=20.6.0"
},
"peerDependencies": {
"@opentelemetry/api": "^1.3.0"
}
},
"node_modules/@opentelemetry/instrumentation-fs": {
"version": "0.28.0",
"resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-fs/-/instrumentation-fs-0.28.0.tgz",
"integrity": "sha512-FFvg8fq53RRXVBRHZViP+EMxMR03tqzEGpuq55lHNbVPyFklSVfQBN50syPhK5UYYwaStx0eyCtHtbRreusc5g==",
"license": "Apache-2.0",
"dependencies": {
"@opentelemetry/core": "^2.0.0",
"@opentelemetry/instrumentation": "^0.208.0"
},
"engines": {
"node": "^18.19.0 || >=20.6.0"
},
"peerDependencies": {
"@opentelemetry/api": "^1.3.0"
}
},
"node_modules/@opentelemetry/instrumentation-generic-pool": {
"version": "0.52.0",
"resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-generic-pool/-/instrumentation-generic-pool-0.52.0.tgz",
"integrity": "sha512-ISkNcv5CM2IwvsMVL31Tl61/p2Zm2I2NAsYq5SSBgOsOndT0TjnptjufYVScCnD5ZLD1tpl4T3GEYULLYOdIdQ==",
"license": "Apache-2.0",
"dependencies": {
"@opentelemetry/instrumentation": "^0.208.0"
},
"engines": {
"node": "^18.19.0 || >=20.6.0"
},
"peerDependencies": {
"@opentelemetry/api": "^1.3.0"
}
},
"node_modules/@opentelemetry/instrumentation-graphql": {
"version": "0.56.0",
"resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-graphql/-/instrumentation-graphql-0.56.0.tgz",
"integrity": "sha512-IPvNk8AFoVzTAM0Z399t34VDmGDgwT6rIqCUug8P9oAGerl2/PEIYMPOl/rerPGu+q8gSWdmbFSjgg7PDVRd3Q==",
"license": "Apache-2.0",
"dependencies": {
"@opentelemetry/instrumentation": "^0.208.0"
},
"engines": {
"node": "^18.19.0 || >=20.6.0"
},
"peerDependencies": {
"@opentelemetry/api": "^1.3.0"
}
},
"node_modules/@opentelemetry/instrumentation-hapi": {
"version": "0.55.0",
"resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-hapi/-/instrumentation-hapi-0.55.0.tgz",
"integrity": "sha512-prqAkRf9e4eEpy4G3UcR32prKE8NLNlA90TdEU1UsghOTg0jUvs40Jz8LQWFEs5NbLbXHYGzB4CYVkCI8eWEVQ==",
"license": "Apache-2.0",
"dependencies": {
"@opentelemetry/core": "^2.0.0",
"@opentelemetry/instrumentation": "^0.208.0",
"@opentelemetry/semantic-conventions": "^1.27.0"
},
"engines": {
"node": "^18.19.0 || >=20.6.0"
},
"peerDependencies": {
"@opentelemetry/api": "^1.3.0"
}
},
"node_modules/@opentelemetry/instrumentation-http": {
"version": "0.208.0",
"resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-http/-/instrumentation-http-0.208.0.tgz",
"integrity": "sha512-rhmK46DRWEbQQB77RxmVXGyjs6783crXCnFjYQj+4tDH/Kpv9Rbg3h2kaNyp5Vz2emF1f9HOQQvZoHzwMWOFZQ==",
"license": "Apache-2.0",
"dependencies": {
"@opentelemetry/core": "2.2.0",
"@opentelemetry/instrumentation": "0.208.0",
"@opentelemetry/semantic-conventions": "^1.29.0",
"forwarded-parse": "2.1.2"
},
"engines": {
"node": "^18.19.0 || >=20.6.0"
},
"peerDependencies": {
"@opentelemetry/api": "^1.3.0"
}
},
"node_modules/@opentelemetry/instrumentation-http/node_modules/@opentelemetry/core": {
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/@opentelemetry/core/-/core-2.2.0.tgz",
"integrity": "sha512-FuabnnUm8LflnieVxs6eP7Z383hgQU4W1e3KJS6aOG3RxWxcHyBxH8fDMHNgu/gFx/M2jvTOW/4/PHhLz6bjWw==",
"license": "Apache-2.0",
"dependencies": {
"@opentelemetry/semantic-conventions": "^1.29.0"
},
"engines": {
"node": "^18.19.0 || >=20.6.0"
},
"peerDependencies": {
"@opentelemetry/api": ">=1.0.0 <1.10.0"
}
},
"node_modules/@opentelemetry/instrumentation-ioredis": {
"version": "0.56.0",
"resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-ioredis/-/instrumentation-ioredis-0.56.0.tgz",
"integrity": "sha512-XSWeqsd3rKSsT3WBz/JKJDcZD4QYElZEa0xVdX8f9dh4h4QgXhKRLorVsVkK3uXFbC2sZKAS2Ds+YolGwD83Dg==",
"license": "Apache-2.0",
"dependencies": {
"@opentelemetry/instrumentation": "^0.208.0",
"@opentelemetry/redis-common": "^0.38.2"
},
"engines": {
"node": "^18.19.0 || >=20.6.0"
},
"peerDependencies": {
"@opentelemetry/api": "^1.3.0"
}
},
"node_modules/@opentelemetry/instrumentation-kafkajs": {
"version": "0.18.0",
"resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-kafkajs/-/instrumentation-kafkajs-0.18.0.tgz",
"integrity": "sha512-KCL/1HnZN5zkUMgPyOxfGjLjbXjpd4odDToy+7c+UsthIzVLFf99LnfIBE8YSSrYE4+uS7OwJMhvhg3tWjqMBg==",
"license": "Apache-2.0",
"dependencies": {
"@opentelemetry/instrumentation": "^0.208.0",
"@opentelemetry/semantic-conventions": "^1.30.0"
},
"engines": {
"node": "^18.19.0 || >=20.6.0"
},
"peerDependencies": {
"@opentelemetry/api": "^1.3.0"
}
},
"node_modules/@opentelemetry/instrumentation-knex": {
"version": "0.53.0",
"resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-knex/-/instrumentation-knex-0.53.0.tgz",
"integrity": "sha512-xngn5cH2mVXFmiT1XfQ1aHqq1m4xb5wvU6j9lSgLlihJ1bXzsO543cpDwjrZm2nMrlpddBf55w8+bfS4qDh60g==",
"license": "Apache-2.0",
"dependencies": {
"@opentelemetry/instrumentation": "^0.208.0",
"@opentelemetry/semantic-conventions": "^1.33.1"
},
"engines": {
"node": "^18.19.0 || >=20.6.0"
},
"peerDependencies": {
"@opentelemetry/api": "^1.3.0"
}
},
"node_modules/@opentelemetry/instrumentation-koa": {
"version": "0.57.0",
"resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-koa/-/instrumentation-koa-0.57.0.tgz",
"integrity": "sha512-3JS8PU/D5E3q295mwloU2v7c7/m+DyCqdu62BIzWt+3u9utjxC9QS7v6WmUNuoDN3RM+Q+D1Gpj13ERo+m7CGg==",
"license": "Apache-2.0",
"dependencies": {
"@opentelemetry/core": "^2.0.0",
"@opentelemetry/instrumentation": "^0.208.0",
"@opentelemetry/semantic-conventions": "^1.36.0"
},
"engines": {
"node": "^18.19.0 || >=20.6.0"
},
"peerDependencies": {
"@opentelemetry/api": "^1.9.0"
}
},
"node_modules/@opentelemetry/instrumentation-lru-memoizer": {
"version": "0.53.0",
"resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-lru-memoizer/-/instrumentation-lru-memoizer-0.53.0.tgz",
"integrity": "sha512-LDwWz5cPkWWr0HBIuZUjslyvijljTwmwiItpMTHujaULZCxcYE9eU44Qf/pbVC8TulT0IhZi+RoGvHKXvNhysw==",
"license": "Apache-2.0",
"dependencies": {
"@opentelemetry/instrumentation": "^0.208.0"
},
"engines": {
"node": "^18.19.0 || >=20.6.0"
},
"peerDependencies": {
"@opentelemetry/api": "^1.3.0"
}
},
"node_modules/@opentelemetry/instrumentation-mongodb": {
"version": "0.61.0",
"resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-mongodb/-/instrumentation-mongodb-0.61.0.tgz",
"integrity": "sha512-OV3i2DSoY5M/pmLk+68xr5RvkHU8DRB3DKMzYJdwDdcxeLs62tLbkmRyqJZsYf3Ht7j11rq35pHOWLuLzXL7pQ==",
"license": "Apache-2.0",
"dependencies": {
"@opentelemetry/instrumentation": "^0.208.0"
},
"engines": {
"node": "^18.19.0 || >=20.6.0"
},
"peerDependencies": {
"@opentelemetry/api": "^1.3.0"
}
},
"node_modules/@opentelemetry/instrumentation-mongoose": {
"version": "0.55.0",
"resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-mongoose/-/instrumentation-mongoose-0.55.0.tgz",
"integrity": "sha512-5afj0HfF6aM6Nlqgu6/PPHFk8QBfIe3+zF9FGpX76jWPS0/dujoEYn82/XcLSaW5LPUDW8sni+YeK0vTBNri+w==",
"license": "Apache-2.0",
"dependencies": {
"@opentelemetry/core": "^2.0.0",
"@opentelemetry/instrumentation": "^0.208.0"
},
"engines": {
"node": "^18.19.0 || >=20.6.0"
},
"peerDependencies": {
"@opentelemetry/api": "^1.3.0"
}
},
"node_modules/@opentelemetry/instrumentation-mysql": {
"version": "0.54.0",
"resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-mysql/-/instrumentation-mysql-0.54.0.tgz",
"integrity": "sha512-bqC1YhnwAeWmRzy1/Xf9cDqxNG2d/JDkaxnqF5N6iJKN1eVWI+vg7NfDkf52/Nggp3tl1jcC++ptC61BD6738A==",
"license": "Apache-2.0",
"dependencies": {
"@opentelemetry/instrumentation": "^0.208.0",
"@types/mysql": "2.15.27"
},
"engines": {
"node": "^18.19.0 || >=20.6.0"
},
"peerDependencies": {
"@opentelemetry/api": "^1.3.0"
}
},
"node_modules/@opentelemetry/instrumentation-mysql2": {
"version": "0.55.0",
"resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-mysql2/-/instrumentation-mysql2-0.55.0.tgz",
"integrity": "sha512-0cs8whQG55aIi20gnK8B7cco6OK6N+enNhW0p5284MvqJ5EPi+I1YlWsWXgzv/V2HFirEejkvKiI4Iw21OqDWg==",
"license": "Apache-2.0",
"dependencies": {
"@opentelemetry/instrumentation": "^0.208.0",
"@opentelemetry/semantic-conventions": "^1.33.0",
"@opentelemetry/sql-common": "^0.41.2"
},
"engines": {
"node": "^18.19.0 || >=20.6.0"
},
"peerDependencies": {
"@opentelemetry/api": "^1.3.0"
}
},
"node_modules/@opentelemetry/instrumentation-pg": {
"version": "0.61.0",
"resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-pg/-/instrumentation-pg-0.61.0.tgz",
"integrity": "sha512-UeV7KeTnRSM7ECHa3YscoklhUtTQPs6V6qYpG283AB7xpnPGCUCUfECFT9jFg6/iZOQTt3FHkB1wGTJCNZEvPw==",
"license": "Apache-2.0",
"dependencies": {
"@opentelemetry/core": "^2.0.0",
"@opentelemetry/instrumentation": "^0.208.0",
"@opentelemetry/semantic-conventions": "^1.34.0",
"@opentelemetry/sql-common": "^0.41.2",
"@types/pg": "8.15.6",
"@types/pg-pool": "2.0.6"
},
"engines": {
"node": "^18.19.0 || >=20.6.0"
},
"peerDependencies": {
"@opentelemetry/api": "^1.3.0"
}
},
"node_modules/@opentelemetry/instrumentation-pg/node_modules/@types/pg": {
"version": "8.15.6",
"resolved": "https://registry.npmjs.org/@types/pg/-/pg-8.15.6.tgz",
"integrity": "sha512-NoaMtzhxOrubeL/7UZuNTrejB4MPAJ0RpxZqXQf2qXuVlTPuG6Y8p4u9dKRaue4yjmC7ZhzVO2/Yyyn25znrPQ==",
"license": "MIT",
"dependencies": {
"@types/node": "*",
"pg-protocol": "*",
"pg-types": "^2.2.0"
}
},
"node_modules/@opentelemetry/instrumentation-redis": {
"version": "0.57.0",
"resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-redis/-/instrumentation-redis-0.57.0.tgz",
"integrity": "sha512-bCxTHQFXzrU3eU1LZnOZQ3s5LURxQPDlU3/upBzlWY77qOI1GZuGofazj3jtzjctMJeBEJhNwIFEgRPBX1kp/Q==",
"license": "Apache-2.0",
"dependencies": {
"@opentelemetry/instrumentation": "^0.208.0",
"@opentelemetry/redis-common": "^0.38.2",
"@opentelemetry/semantic-conventions": "^1.27.0"
},
"engines": {
"node": "^18.19.0 || >=20.6.0"
},
"peerDependencies": {
"@opentelemetry/api": "^1.3.0"
}
},
"node_modules/@opentelemetry/instrumentation-tedious": {
"version": "0.27.0",
"resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-tedious/-/instrumentation-tedious-0.27.0.tgz",
"integrity": "sha512-jRtyUJNZppPBjPae4ZjIQ2eqJbcRaRfJkr0lQLHFmOU/no5A6e9s1OHLd5XZyZoBJ/ymngZitanyRRA5cniseA==",
"license": "Apache-2.0",
"dependencies": {
"@opentelemetry/instrumentation": "^0.208.0",
"@types/tedious": "^4.0.14"
},
"engines": {
"node": "^18.19.0 || >=20.6.0"
},
"peerDependencies": {
"@opentelemetry/api": "^1.3.0"
}
},
"node_modules/@opentelemetry/instrumentation-undici": {
"version": "0.19.0",
"resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-undici/-/instrumentation-undici-0.19.0.tgz",
"integrity": "sha512-Pst/RhR61A2OoZQZkn6OLpdVpXp6qn3Y92wXa6umfJe9rV640r4bc6SWvw4pPN6DiQqPu2c8gnSSZPDtC6JlpQ==",
"license": "Apache-2.0",
"dependencies": {
"@opentelemetry/core": "^2.0.0",
"@opentelemetry/instrumentation": "^0.208.0",
"@opentelemetry/semantic-conventions": "^1.24.0"
},
"engines": {
"node": "^18.19.0 || >=20.6.0"
},
"peerDependencies": {
"@opentelemetry/api": "^1.7.0"
}
},
"node_modules/@opentelemetry/redis-common": {
"version": "0.38.2",
"resolved": "https://registry.npmjs.org/@opentelemetry/redis-common/-/redis-common-0.38.2.tgz",
"integrity": "sha512-1BCcU93iwSRZvDAgwUxC/DV4T/406SkMfxGqu5ojc3AvNI+I9GhV7v0J1HljsczuuhcnFLYqD5VmwVXfCGHzxA==",
"license": "Apache-2.0",
"engines": {
"node": "^18.19.0 || >=20.6.0"
}
},
"node_modules/@opentelemetry/resources": {
"version": "2.3.0",
"resolved": "https://registry.npmjs.org/@opentelemetry/resources/-/resources-2.3.0.tgz",
"integrity": "sha512-shlr2l5g+87J8wqYlsLyaUsgKVRO7RtX70Ckd5CtDOWtImZgaUDmf4Z2ozuSKQLM2wPDR0TE/3bPVBNJtRm/cQ==",
"license": "Apache-2.0",
"dependencies": {
"@opentelemetry/core": "2.3.0",
"@opentelemetry/semantic-conventions": "^1.29.0"
},
"engines": {
"node": "^18.19.0 || >=20.6.0"
},
"peerDependencies": {
"@opentelemetry/api": ">=1.3.0 <1.10.0"
}
},
"node_modules/@opentelemetry/sdk-trace-base": {
"version": "2.3.0",
"resolved": "https://registry.npmjs.org/@opentelemetry/sdk-trace-base/-/sdk-trace-base-2.3.0.tgz",
"integrity": "sha512-B0TQ2e9h0ETjpI+eGmCz8Ojb+lnYms0SE3jFwEKrN/PK4aSVHU28AAmnOoBmfub+I3jfgPwvDJgomBA5a7QehQ==",
"license": "Apache-2.0",
"dependencies": {
"@opentelemetry/core": "2.3.0",
"@opentelemetry/resources": "2.3.0",
"@opentelemetry/semantic-conventions": "^1.29.0"
},
"engines": {
"node": "^18.19.0 || >=20.6.0"
},
"peerDependencies": {
"@opentelemetry/api": ">=1.3.0 <1.10.0"
}
},
"node_modules/@opentelemetry/semantic-conventions": {
"version": "1.38.0",
"resolved": "https://registry.npmjs.org/@opentelemetry/semantic-conventions/-/semantic-conventions-1.38.0.tgz",
"integrity": "sha512-kocjix+/sSggfJhwXqClZ3i9Y/MI0fp7b+g7kCRm6psy2dsf8uApTRclwG18h8Avm7C9+fnt+O36PspJ/OzoWg==",
"license": "Apache-2.0",
"engines": {
"node": ">=14"
}
},
"node_modules/@opentelemetry/sql-common": {
"version": "0.41.2",
"resolved": "https://registry.npmjs.org/@opentelemetry/sql-common/-/sql-common-0.41.2.tgz",
"integrity": "sha512-4mhWm3Z8z+i508zQJ7r6Xi7y4mmoJpdvH0fZPFRkWrdp5fq7hhZ2HhYokEOLkfqSMgPR4Z9EyB3DBkbKGOqZiQ==",
"license": "Apache-2.0",
"dependencies": {
"@opentelemetry/core": "^2.0.0"
},
"engines": {
"node": "^18.19.0 || >=20.6.0"
},
"peerDependencies": {
"@opentelemetry/api": "^1.1.0"
}
},
"node_modules/@paralleldrive/cuid2": {
"version": "2.3.1",
"resolved": "https://registry.npmjs.org/@paralleldrive/cuid2/-/cuid2-2.3.1.tgz",
@@ -3602,6 +4140,18 @@
"dev": true,
"license": "MIT"
},
"node_modules/@prisma/instrumentation": {
"version": "6.19.0",
"resolved": "https://registry.npmjs.org/@prisma/instrumentation/-/instrumentation-6.19.0.tgz",
"integrity": "sha512-QcuYy25pkXM8BJ37wVFBO7Zh34nyRV1GOb2n3lPkkbRYfl4hWl3PTcImP41P0KrzVXfa/45p6eVCos27x3exIg==",
"license": "Apache-2.0",
"dependencies": {
"@opentelemetry/instrumentation": ">=0.52.0 <1"
},
"peerDependencies": {
"@opentelemetry/api": "^1.8"
}
},
"node_modules/@protobufjs/aspromise": {
"version": "1.1.2",
"resolved": "https://registry.npmjs.org/@protobufjs/aspromise/-/aspromise-1.1.2.tgz",
@@ -4034,6 +4584,187 @@
"hasInstallScript": true,
"license": "Apache-2.0"
},
"node_modules/@sentry-internal/browser-utils": {
"version": "10.32.1",
"resolved": "https://registry.npmjs.org/@sentry-internal/browser-utils/-/browser-utils-10.32.1.tgz",
"integrity": "sha512-sjLLep1es3rTkbtAdTtdpc/a6g7v7bK5YJiZJsUigoJ4NTiFeMI5uIDCxbH/tjJ1q23YE1LzVn7T96I+qBRjHA==",
"license": "MIT",
"dependencies": {
"@sentry/core": "10.32.1"
},
"engines": {
"node": ">=18"
}
},
"node_modules/@sentry-internal/feedback": {
"version": "10.32.1",
"resolved": "https://registry.npmjs.org/@sentry-internal/feedback/-/feedback-10.32.1.tgz",
"integrity": "sha512-O24G8jxbfBY1RE/v2qFikPJISVMOrd/zk8FKyl+oUVYdOxU2Ucjk2cR3EQruBFlc7irnL6rT3GPfRZ/kBgLkmQ==",
"license": "MIT",
"dependencies": {
"@sentry/core": "10.32.1"
},
"engines": {
"node": ">=18"
}
},
"node_modules/@sentry-internal/replay": {
"version": "10.32.1",
"resolved": "https://registry.npmjs.org/@sentry-internal/replay/-/replay-10.32.1.tgz",
"integrity": "sha512-KKmLUgIaLRM0VjrMA1ByQTawZyRDYSkG2evvEOVpEtR9F0sumidAQdi7UY71QEKE1RYe/Jcp/3WoaqsMh8tbnQ==",
"license": "MIT",
"dependencies": {
"@sentry-internal/browser-utils": "10.32.1",
"@sentry/core": "10.32.1"
},
"engines": {
"node": ">=18"
}
},
"node_modules/@sentry-internal/replay-canvas": {
"version": "10.32.1",
"resolved": "https://registry.npmjs.org/@sentry-internal/replay-canvas/-/replay-canvas-10.32.1.tgz",
"integrity": "sha512-/XGTzWNWVc+B691fIVekV2KeoHFEDA5KftrLFAhEAW7uWOwk/xy3aQX4TYM0LcPm2PBKvoumlAD+Sd/aXk63oA==",
"license": "MIT",
"dependencies": {
"@sentry-internal/replay": "10.32.1",
"@sentry/core": "10.32.1"
},
"engines": {
"node": ">=18"
}
},
"node_modules/@sentry/browser": {
"version": "10.32.1",
"resolved": "https://registry.npmjs.org/@sentry/browser/-/browser-10.32.1.tgz",
"integrity": "sha512-NPNCXTZ05ZGTFyJdKNqjykpFm+urem0ebosILQiw3C4BxNVNGH4vfYZexyl6prRhmg91oB6GjVNiVDuJiap1gg==",
"license": "MIT",
"dependencies": {
"@sentry-internal/browser-utils": "10.32.1",
"@sentry-internal/feedback": "10.32.1",
"@sentry-internal/replay": "10.32.1",
"@sentry-internal/replay-canvas": "10.32.1",
"@sentry/core": "10.32.1"
},
"engines": {
"node": ">=18"
}
},
"node_modules/@sentry/core": {
"version": "10.32.1",
"resolved": "https://registry.npmjs.org/@sentry/core/-/core-10.32.1.tgz",
"integrity": "sha512-PH2ldpSJlhqsMj2vCTyU0BI2Fx1oIDhm7Izo5xFALvjVCS0gmlqHt1udu6YlKn8BtpGH6bGzssvv5APrk+OdPQ==",
"license": "MIT",
"engines": {
"node": ">=18"
}
},
"node_modules/@sentry/node": {
"version": "10.32.1",
"resolved": "https://registry.npmjs.org/@sentry/node/-/node-10.32.1.tgz",
"integrity": "sha512-oxlybzt8QW0lx/QaEj1DcvZDRXkgouewFelu/10dyUwv5So3YvipfvWInda+yMLmn25OggbloDQ0gyScA2jU3g==",
"license": "MIT",
"dependencies": {
"@opentelemetry/api": "^1.9.0",
"@opentelemetry/context-async-hooks": "^2.2.0",
"@opentelemetry/core": "^2.2.0",
"@opentelemetry/instrumentation": "^0.208.0",
"@opentelemetry/instrumentation-amqplib": "0.55.0",
"@opentelemetry/instrumentation-connect": "0.52.0",
"@opentelemetry/instrumentation-dataloader": "0.26.0",
"@opentelemetry/instrumentation-express": "0.57.0",
"@opentelemetry/instrumentation-fs": "0.28.0",
"@opentelemetry/instrumentation-generic-pool": "0.52.0",
"@opentelemetry/instrumentation-graphql": "0.56.0",
"@opentelemetry/instrumentation-hapi": "0.55.0",
"@opentelemetry/instrumentation-http": "0.208.0",
"@opentelemetry/instrumentation-ioredis": "0.56.0",
"@opentelemetry/instrumentation-kafkajs": "0.18.0",
"@opentelemetry/instrumentation-knex": "0.53.0",
"@opentelemetry/instrumentation-koa": "0.57.0",
"@opentelemetry/instrumentation-lru-memoizer": "0.53.0",
"@opentelemetry/instrumentation-mongodb": "0.61.0",
"@opentelemetry/instrumentation-mongoose": "0.55.0",
"@opentelemetry/instrumentation-mysql": "0.54.0",
"@opentelemetry/instrumentation-mysql2": "0.55.0",
"@opentelemetry/instrumentation-pg": "0.61.0",
"@opentelemetry/instrumentation-redis": "0.57.0",
"@opentelemetry/instrumentation-tedious": "0.27.0",
"@opentelemetry/instrumentation-undici": "0.19.0",
"@opentelemetry/resources": "^2.2.0",
"@opentelemetry/sdk-trace-base": "^2.2.0",
"@opentelemetry/semantic-conventions": "^1.37.0",
"@prisma/instrumentation": "6.19.0",
"@sentry/core": "10.32.1",
"@sentry/node-core": "10.32.1",
"@sentry/opentelemetry": "10.32.1",
"import-in-the-middle": "^2",
"minimatch": "^9.0.0"
},
"engines": {
"node": ">=18"
}
},
"node_modules/@sentry/node-core": {
"version": "10.32.1",
"resolved": "https://registry.npmjs.org/@sentry/node-core/-/node-core-10.32.1.tgz",
"integrity": "sha512-w56rxdBanBKc832zuwnE+zNzUQ19fPxfHEtOhK8JGPu3aSwQYcIxwz9z52lOx3HN7k/8Fj5694qlT3x/PokhRw==",
"license": "MIT",
"dependencies": {
"@apm-js-collab/tracing-hooks": "^0.3.1",
"@sentry/core": "10.32.1",
"@sentry/opentelemetry": "10.32.1",
"import-in-the-middle": "^2"
},
"engines": {
"node": ">=18"
},
"peerDependencies": {
"@opentelemetry/api": "^1.9.0",
"@opentelemetry/context-async-hooks": "^1.30.1 || ^2.1.0 || ^2.2.0",
"@opentelemetry/core": "^1.30.1 || ^2.1.0 || ^2.2.0",
"@opentelemetry/instrumentation": ">=0.57.1 <1",
"@opentelemetry/resources": "^1.30.1 || ^2.1.0 || ^2.2.0",
"@opentelemetry/sdk-trace-base": "^1.30.1 || ^2.1.0 || ^2.2.0",
"@opentelemetry/semantic-conventions": "^1.37.0"
}
},
"node_modules/@sentry/opentelemetry": {
"version": "10.32.1",
"resolved": "https://registry.npmjs.org/@sentry/opentelemetry/-/opentelemetry-10.32.1.tgz",
"integrity": "sha512-YLssSz5Y+qPvufrh2cDaTXDoXU8aceOhB+YTjT8/DLF6SOj7Tzen52aAcjNaifawaxEsLCC8O+B+A2iA+BllvA==",
"license": "MIT",
"dependencies": {
"@sentry/core": "10.32.1"
},
"engines": {
"node": ">=18"
},
"peerDependencies": {
"@opentelemetry/api": "^1.9.0",
"@opentelemetry/context-async-hooks": "^1.30.1 || ^2.1.0 || ^2.2.0",
"@opentelemetry/core": "^1.30.1 || ^2.1.0 || ^2.2.0",
"@opentelemetry/sdk-trace-base": "^1.30.1 || ^2.1.0 || ^2.2.0",
"@opentelemetry/semantic-conventions": "^1.37.0"
}
},
"node_modules/@sentry/react": {
"version": "10.32.1",
"resolved": "https://registry.npmjs.org/@sentry/react/-/react-10.32.1.tgz",
"integrity": "sha512-/tX0HeACbAmVP57x8txTrGk/U3fa9pDBaoAtlOrnPv5VS/aC5SGkehXWeTGSAa+ahlOWwp3IF8ILVXRiOoG/Vg==",
"license": "MIT",
"dependencies": {
"@sentry/browser": "10.32.1",
"@sentry/core": "10.32.1",
"hoist-non-react-statics": "^3.3.2"
},
"engines": {
"node": ">=18"
},
"peerDependencies": {
"react": "^16.14.0 || 17.x || 18.x || 19.x"
}
},
"node_modules/@smithy/abort-controller": {
"version": "4.2.7",
"resolved": "https://registry.npmjs.org/@smithy/abort-controller/-/abort-controller-4.2.7.tgz",
@@ -5318,6 +6049,12 @@
"@types/ssh2": "*"
}
},
"node_modules/@types/emscripten": {
"version": "1.41.5",
"resolved": "https://registry.npmjs.org/@types/emscripten/-/emscripten-1.41.5.tgz",
"integrity": "sha512-cMQm7pxu6BxtHyqJ7mQZ2kXWV5SLmugybFdHCBbJ5eHzOo6VhBckEgAT3//rP5FwPHNPeEiq4SmQ5ucBwsOo4Q==",
"license": "MIT"
},
"node_modules/@types/estree": {
"version": "1.0.8",
"resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.8.tgz",
@@ -5402,6 +6139,15 @@
"@types/express": "*"
}
},
"node_modules/@types/mysql": {
"version": "2.15.27",
"resolved": "https://registry.npmjs.org/@types/mysql/-/mysql-2.15.27.tgz",
"integrity": "sha512-YfWiV16IY0OeBfBCk8+hXKmdTKrKlwKN1MNKAPBu5JYxLwBEZl7QzeEpGnlZb3VMGJrrGmB84gXiH+ofs/TezA==",
"license": "MIT",
"dependencies": {
"@types/node": "*"
}
},
"node_modules/@types/node": {
"version": "24.10.4",
"resolved": "https://registry.npmjs.org/@types/node/-/node-24.10.4.tgz",
@@ -5523,7 +6269,6 @@
"version": "8.16.0",
"resolved": "https://registry.npmjs.org/@types/pg/-/pg-8.16.0.tgz",
"integrity": "sha512-RmhMd/wD+CF8Dfo+cVIy3RR5cl8CyfXQ0tGgW6XBL8L4LM/UTEbNXYRbLwU6w+CgrKBNbrQWt4FUtTfaU5jSYQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"@types/node": "*",
@@ -5531,6 +6276,15 @@
"pg-types": "^2.2.0"
}
},
"node_modules/@types/pg-pool": {
"version": "2.0.6",
"resolved": "https://registry.npmjs.org/@types/pg-pool/-/pg-pool-2.0.6.tgz",
"integrity": "sha512-TaAUE5rq2VQYxab5Ts7WZhKNmuN78Q6PiFonTDdpbx8a1H0M1vhy3rhiMjl+e2iHmogyMw7jZF4FrE6eJUy5HQ==",
"license": "MIT",
"dependencies": {
"@types/pg": "*"
}
},
"node_modules/@types/piexifjs": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/@types/piexifjs/-/piexifjs-1.0.0.tgz",
@@ -5695,6 +6449,15 @@
"@types/serve-static": "*"
}
},
"node_modules/@types/tedious": {
"version": "4.0.14",
"resolved": "https://registry.npmjs.org/@types/tedious/-/tedious-4.0.14.tgz",
"integrity": "sha512-KHPsfX/FoVbUGbyYvk1q9MMQHLPeRZhRJZdO45Q4YjvFkv4hMNghCWTvy7rdKessBsmtz4euWCWAB6/tVpI1Iw==",
"license": "MIT",
"dependencies": {
"@types/node": "*"
}
},
"node_modules/@types/use-sync-external-store": {
"version": "0.0.6",
"resolved": "https://registry.npmjs.org/@types/use-sync-external-store/-/use-sync-external-store-0.0.6.tgz",
@@ -6163,7 +6926,6 @@
"version": "8.15.0",
"resolved": "https://registry.npmjs.org/acorn/-/acorn-8.15.0.tgz",
"integrity": "sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==",
"dev": true,
"license": "MIT",
"bin": {
"acorn": "bin/acorn"
@@ -6172,6 +6934,15 @@
"node": ">=0.4.0"
}
},
"node_modules/acorn-import-attributes": {
"version": "1.9.5",
"resolved": "https://registry.npmjs.org/acorn-import-attributes/-/acorn-import-attributes-1.9.5.tgz",
"integrity": "sha512-n02Vykv5uA3eHGM/Z2dQrcD56kL8TyDb2p1+0P83PClMnC/nc+anbQRhIOWnSq4Ke/KvDPrY3C9hDtC/A3eHnQ==",
"license": "MIT",
"peerDependencies": {
"acorn": "^8"
}
},
"node_modules/acorn-jsx": {
"version": "5.3.2",
"resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz",
@@ -7391,6 +8162,12 @@
"node": ">=10"
}
},
"node_modules/cjs-module-lexer": {
"version": "1.4.3",
"resolved": "https://registry.npmjs.org/cjs-module-lexer/-/cjs-module-lexer-1.4.3.tgz",
"integrity": "sha512-9z8TZaGM1pfswYeXrUpzPrkx8UnWYdhJclsiYMm6x/w5+nN+8Tf/LnAgfLGQCm59qAOxU8WwHEq2vNwF6i4j+Q==",
"license": "MIT"
},
"node_modules/clean-stack": {
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/clean-stack/-/clean-stack-2.2.0.tgz",
@@ -9721,6 +10498,12 @@
"node": ">= 0.6"
}
},
"node_modules/forwarded-parse": {
"version": "2.1.2",
"resolved": "https://registry.npmjs.org/forwarded-parse/-/forwarded-parse-2.1.2.tgz",
"integrity": "sha512-alTFZZQDKMporBH77856pXgzhEzaUVmLCDk+egLgIgHst3Tpndzz8MnKe+GzRJRfvVdn69HhpW7cmXzvtLvJAw==",
"license": "MIT"
},
"node_modules/fraction.js": {
"version": "5.3.4",
"resolved": "https://registry.npmjs.org/fraction.js/-/fraction.js-5.3.4.tgz",
@@ -10468,6 +11251,21 @@
"hermes-estree": "0.25.1"
}
},
"node_modules/hoist-non-react-statics": {
"version": "3.3.2",
"resolved": "https://registry.npmjs.org/hoist-non-react-statics/-/hoist-non-react-statics-3.3.2.tgz",
"integrity": "sha512-/gGivxi8JPKWNm/W0jSmzcMPpfpPLc3dY/6GxhX2hQ9iGj3aDfklV4ET7NjKpSinLpJ5vafa9iiGIEZg10SfBw==",
"license": "BSD-3-Clause",
"dependencies": {
"react-is": "^16.7.0"
}
},
"node_modules/hoist-non-react-statics/node_modules/react-is": {
"version": "16.13.1",
"resolved": "https://registry.npmjs.org/react-is/-/react-is-16.13.1.tgz",
"integrity": "sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==",
"license": "MIT"
},
"node_modules/html-encoding-sniffer": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/html-encoding-sniffer/-/html-encoding-sniffer-4.0.0.tgz",
@@ -10648,6 +11446,18 @@
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/import-in-the-middle": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/import-in-the-middle/-/import-in-the-middle-2.0.1.tgz",
"integrity": "sha512-bruMpJ7xz+9jwGzrwEhWgvRrlKRYCRDBrfU+ur3FcasYXLJDxTruJ//8g2Noj+QFyRBeqbpj8Bhn4Fbw6HjvhA==",
"license": "Apache-2.0",
"dependencies": {
"acorn": "^8.14.0",
"acorn-import-attributes": "^1.9.5",
"cjs-module-lexer": "^1.2.2",
"module-details-from-path": "^1.0.3"
}
},
"node_modules/imurmurhash": {
"version": "0.1.4",
"resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz",
@@ -12704,6 +13514,12 @@
"dev": true,
"license": "MIT"
},
"node_modules/module-details-from-path": {
"version": "1.0.4",
"resolved": "https://registry.npmjs.org/module-details-from-path/-/module-details-from-path-1.0.4.tgz",
"integrity": "sha512-EGWKgxALGMgzvxYF1UyGTy0HXX/2vHLkw6+NvDKW2jypWbHpjQuj4UMcqQWXHERJhVGKikolT06G3bcKe4fi7w==",
"license": "MIT"
},
"node_modules/mrmime": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/mrmime/-/mrmime-2.0.1.tgz",
@@ -14926,6 +15742,19 @@
"node": ">=0.10.0"
}
},
"node_modules/require-in-the-middle": {
"version": "8.0.1",
"resolved": "https://registry.npmjs.org/require-in-the-middle/-/require-in-the-middle-8.0.1.tgz",
"integrity": "sha512-QT7FVMXfWOYFbeRBF6nu+I6tr2Tf3u0q8RIEjNob/heKY/nh7drD/k7eeMFmSQgnTtCzLDcCu/XEnpW2wk4xCQ==",
"license": "MIT",
"dependencies": {
"debug": "^4.3.5",
"module-details-from-path": "^1.0.3"
},
"engines": {
"node": ">=9.3.0 || >=8.10.0 <9.0.0"
}
},
"node_modules/require-main-filename": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/require-main-filename/-/require-main-filename-2.0.0.tgz",
@@ -16333,7 +17162,6 @@
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/tagged-tag/-/tagged-tag-1.0.0.tgz",
"integrity": "sha512-yEFYrVhod+hdNyx7g5Bnkkb0G6si8HJurOoOEgC8B/O0uXLHlaey/65KRv6cuWBNhBgHKAROVpc7QyYqE5gFng==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=20"
@@ -16747,7 +17575,6 @@
"version": "5.3.1",
"resolved": "https://registry.npmjs.org/type-fest/-/type-fest-5.3.1.tgz",
"integrity": "sha512-VCn+LMHbd4t6sF3wfU/+HKT63C9OoyrSIf4b+vtWHpt2U7/4InZG467YDNMFMR70DdHjAdpPWmw2lzRdg0Xqqg==",
"dev": true,
"license": "(MIT OR CC0-1.0)",
"dependencies": {
"tagged-tag": "^1.0.0"
@@ -17763,6 +18590,19 @@
"resolved": "https://registry.npmjs.org/zxcvbn/-/zxcvbn-4.4.2.tgz",
"integrity": "sha512-Bq0B+ixT/DMyG8kgX2xWcI5jUvCwqrMxSFam7m0lAf78nf04hv6lNCsyLYdyYTrCVMqNDY/206K7eExYCeSyUQ==",
"license": "MIT"
},
"node_modules/zxing-wasm": {
"version": "2.2.4",
"resolved": "https://registry.npmjs.org/zxing-wasm/-/zxing-wasm-2.2.4.tgz",
"integrity": "sha512-1gq5zs4wuNTs5umWLypzNNeuJoluFvwmvjiiT3L9z/TMlVveeJRWy7h90xyUqCe+Qq0zL0w7o5zkdDMWDr9aZA==",
"license": "MIT",
"dependencies": {
"@types/emscripten": "^1.41.5",
"type-fest": "^5.2.0"
},
"peerDependencies": {
"@types/emscripten": ">=1.39.6"
}
}
}
}

View File

@@ -1,7 +1,7 @@
{
"name": "flyer-crawler",
"private": true,
"version": "0.9.90",
"version": "0.9.97",
"type": "module",
"scripts": {
"dev": "concurrently \"npm:start:dev\" \"vite\"",
@@ -31,6 +31,8 @@
"@bull-board/api": "^6.14.2",
"@bull-board/express": "^6.14.2",
"@google/genai": "^1.30.0",
"@sentry/node": "^10.32.1",
"@sentry/react": "^10.32.1",
"@tanstack/react-query": "^5.90.12",
"@types/connect-timeout": "^1.9.0",
"bcrypt": "^5.1.1",
@@ -69,7 +71,8 @@
"swagger-ui-express": "^5.0.1",
"tsx": "^4.20.6",
"zod": "^4.2.1",
"zxcvbn": "^4.4.2"
"zxcvbn": "^4.4.2",
"zxing-wasm": "^2.2.4"
},
"devDependencies": {
"@tailwindcss/postcss": "4.1.17",

164
scripts/test-bugsink.ts Normal file
View File

@@ -0,0 +1,164 @@
#!/usr/bin/env npx tsx
/**
* Test script to verify Bugsink error tracking is working.
*
* This script sends test events directly to Bugsink using the Sentry store API.
* We use curl/fetch instead of the Sentry SDK because SDK v8+ has strict DSN
* validation that rejects HTTP URLs (Bugsink uses HTTP locally).
*
* Usage:
* npx tsx scripts/test-bugsink.ts
*
* Or with environment override:
* SENTRY_DSN=http://...@localhost:8000/1 npx tsx scripts/test-bugsink.ts
*/
// Configuration - parse DSN to extract components
const DSN =
process.env.SENTRY_DSN || 'http://59a58583-e869-7697-f94a-cfa0337676a8@localhost:8000/1';
const ENVIRONMENT = process.env.SENTRY_ENVIRONMENT || 'test';
// Parse DSN: http://<key>@<host>/<project_id>
function parseDsn(dsn: string) {
const match = dsn.match(/^(https?):\/\/([^@]+)@([^/]+)\/(.+)$/);
if (!match) {
throw new Error(`Invalid DSN format: ${dsn}`);
}
return {
protocol: match[1],
publicKey: match[2],
host: match[3],
projectId: match[4],
};
}
const dsnParts = parseDsn(DSN);
const STORE_URL = `${dsnParts.protocol}://${dsnParts.host}/api/${dsnParts.projectId}/store/`;
console.log('='.repeat(60));
console.log('Bugsink/Sentry Test Script');
console.log('='.repeat(60));
console.log(`DSN: ${DSN}`);
console.log(`Store URL: ${STORE_URL}`);
console.log(`Public Key: ${dsnParts.publicKey}`);
console.log(`Environment: ${ENVIRONMENT}`);
console.log('');
// Generate a UUID for event_id
function generateEventId(): string {
return 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx'.replace(/x/g, () =>
Math.floor(Math.random() * 16).toString(16),
);
}
// Send an event to Bugsink via the Sentry store API
async function sendEvent(
event: Record<string, unknown>,
): Promise<{ success: boolean; status: number }> {
const response = await fetch(STORE_URL, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'X-Sentry-Auth': `Sentry sentry_version=7, sentry_client=test-bugsink/1.0, sentry_key=${dsnParts.publicKey}`,
},
body: JSON.stringify(event),
});
return {
success: response.ok,
status: response.status,
};
}
async function main() {
console.log('[Test] Sending test events to Bugsink...\n');
try {
// Test 1: Send an error event
const errorEventId = generateEventId();
console.log(`[Test 1] Sending error event (ID: ${errorEventId})...`);
const errorEvent = {
event_id: errorEventId,
timestamp: new Date().toISOString(),
platform: 'node',
level: 'error',
logger: 'test-bugsink.ts',
environment: ENVIRONMENT,
server_name: 'flyer-crawler-dev',
message: 'BugsinkTestError: This is a test error from test-bugsink.ts script',
exception: {
values: [
{
type: 'BugsinkTestError',
value: 'This is a test error from test-bugsink.ts script',
stacktrace: {
frames: [
{
filename: 'scripts/test-bugsink.ts',
function: 'main',
lineno: 42,
colno: 10,
in_app: true,
},
],
},
},
],
},
tags: {
test: 'true',
source: 'test-bugsink.ts',
},
};
const errorResult = await sendEvent(errorEvent);
console.log(
` Result: ${errorResult.success ? 'SUCCESS' : 'FAILED'} (HTTP ${errorResult.status})`,
);
// Test 2: Send an info message
const messageEventId = generateEventId();
console.log(`[Test 2] Sending info message (ID: ${messageEventId})...`);
const messageEvent = {
event_id: messageEventId,
timestamp: new Date().toISOString(),
platform: 'node',
level: 'info',
logger: 'test-bugsink.ts',
environment: ENVIRONMENT,
server_name: 'flyer-crawler-dev',
message: 'Test info message from test-bugsink.ts - Bugsink is working!',
tags: {
test: 'true',
source: 'test-bugsink.ts',
},
};
const messageResult = await sendEvent(messageEvent);
console.log(
` Result: ${messageResult.success ? 'SUCCESS' : 'FAILED'} (HTTP ${messageResult.status})`,
);
// Summary
console.log('');
console.log('='.repeat(60));
if (errorResult.success && messageResult.success) {
console.log('SUCCESS! Both test events were accepted by Bugsink.');
console.log('');
console.log('Check Bugsink UI at http://localhost:8000');
console.log('Look for:');
console.log(' - BugsinkTestError: "This is a test error..."');
console.log(' - Info message: "Test info message from test-bugsink.ts"');
} else {
console.log('WARNING: Some events may not have been accepted.');
console.log('Check that Bugsink is running and the DSN is correct.');
process.exit(1);
}
console.log('='.repeat(60));
} catch (error) {
console.error('[Test] Failed to send events:', error);
process.exit(1);
}
}
main();

View File

@@ -1,4 +1,12 @@
// server.ts
/**
* IMPORTANT: Sentry initialization MUST happen before any other imports
* to ensure all errors are captured, including those in imported modules.
* See ADR-015: Application Performance Monitoring and Error Tracking.
*/
import { initSentry, getSentryMiddleware } from './src/services/sentry.server';
initSentry();
import express, { Request, Response, NextFunction } from 'express';
import { randomUUID } from 'crypto';
import helmet from 'helmet';
@@ -7,7 +15,7 @@ import cookieParser from 'cookie-parser';
import listEndpoints from 'express-list-endpoints';
import { getPool } from './src/services/db/connection.db';
import passport from './src/routes/passport.routes';
import passport from './src/config/passport';
import { logger } from './src/services/logger.server';
// Import routers
@@ -24,6 +32,9 @@ import statsRouter from './src/routes/stats.routes';
import gamificationRouter from './src/routes/gamification.routes';
import systemRouter from './src/routes/system.routes';
import healthRouter from './src/routes/health.routes';
import upcRouter from './src/routes/upc.routes';
import inventoryRouter from './src/routes/inventory.routes';
import receiptRouter from './src/routes/receipt.routes';
import { errorHandler } from './src/middleware/errorHandler';
import { backgroundJobService, startBackgroundJobs } from './src/services/backgroundJobService';
import type { UserProfile } from './src/types';
@@ -37,6 +48,7 @@ import {
gracefulShutdown,
tokenCleanupQueue,
} from './src/services/queueService.server';
import { monitoringService } from './src/services/monitoringService.server';
// --- START DEBUG LOGGING ---
// Log the database connection details as seen by the SERVER PROCESS.
@@ -108,9 +120,14 @@ app.use(express.urlencoded({ limit: '100mb', extended: true }));
app.use(cookieParser()); // Middleware to parse cookies
app.use(passport.initialize()); // Initialize Passport
// --- Sentry Request Handler (ADR-015) ---
// Must be the first middleware after body parsers to capture request data for errors.
const sentryMiddleware = getSentryMiddleware();
app.use(sentryMiddleware.requestHandler);
// --- MOCK AUTH FOR TESTING ---
// This MUST come after passport.initialize() and BEFORE any of the API routes.
import { mockAuth } from './src/routes/passport.routes';
import { mockAuth } from './src/config/passport';
app.use(mockAuth);
// Add a request timeout middleware. This will help prevent requests from hanging indefinitely.
@@ -215,6 +232,18 @@ if (process.env.NODE_ENV !== 'production') {
// --- API Routes ---
// ADR-053: Worker Health Checks
// Expose queue metrics for monitoring.
app.get('/api/health/queues', async (req, res) => {
try {
const statuses = await monitoringService.getQueueStatuses();
res.json(statuses);
} catch (error) {
logger.error({ err: error }, 'Failed to fetch queue statuses');
res.status(503).json({ error: 'Failed to fetch queue statuses' });
}
});
// The order of route registration is critical.
// More specific routes should be registered before more general ones.
// 1. Authentication routes for login, registration, etc.
@@ -243,9 +272,19 @@ app.use('/api/personalization', personalizationRouter);
app.use('/api/price-history', priceRouter);
// 10. Public statistics routes.
app.use('/api/stats', statsRouter);
// 11. UPC barcode scanning routes.
app.use('/api/upc', upcRouter);
// 12. Inventory and expiry tracking routes.
app.use('/api/inventory', inventoryRouter);
// 13. Receipt scanning routes.
app.use('/api/receipts', receiptRouter);
// --- Error Handling and Server Startup ---
// Sentry Error Handler (ADR-015) - captures errors and sends to Bugsink.
// Must come BEFORE the custom error handler but AFTER all routes.
app.use(sentryMiddleware.errorHandler);
// Global error handling middleware. This must be the last `app.use()` call.
app.use(errorHandler);

View File

@@ -1360,7 +1360,8 @@ CREATE TRIGGER on_auth_user_created
FOR EACH ROW EXECUTE FUNCTION public.handle_new_user();
-- 2. Create a reusable function to automatically update 'updated_at' columns.
DROP FUNCTION IF EXISTS public.handle_updated_at();
-- CASCADE drops dependent triggers; they are recreated by the DO block below
DROP FUNCTION IF EXISTS public.handle_updated_at() CASCADE;
CREATE OR REPLACE FUNCTION public.handle_updated_at()
RETURNS TRIGGER AS $$

View File

@@ -679,6 +679,7 @@ CREATE INDEX IF NOT EXISTS idx_planned_meals_menu_plan_id ON public.planned_meal
CREATE INDEX IF NOT EXISTS idx_planned_meals_recipe_id ON public.planned_meals(recipe_id);
-- 37. Track the grocery items a user currently has in their pantry.
-- NOTE: receipt_item_id FK is added later via ALTER TABLE because receipt_items is defined after this table.
CREATE TABLE IF NOT EXISTS public.pantry_items (
pantry_item_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
user_id UUID NOT NULL REFERENCES public.users(user_id) ON DELETE CASCADE,
@@ -688,15 +689,38 @@ CREATE TABLE IF NOT EXISTS public.pantry_items (
best_before_date DATE,
pantry_location_id BIGINT REFERENCES public.pantry_locations(pantry_location_id) ON DELETE SET NULL,
notification_sent_at TIMESTAMPTZ,
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL,
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL,
-- Columns from migration 002_expiry_tracking.sql
purchase_date DATE,
source TEXT DEFAULT 'manual',
receipt_item_id BIGINT, -- FK added later via ALTER TABLE
product_id BIGINT REFERENCES public.products(product_id) ON DELETE SET NULL,
expiry_source TEXT,
is_consumed BOOLEAN DEFAULT FALSE,
consumed_at TIMESTAMPTZ,
UNIQUE(user_id, master_item_id, unit)
);
COMMENT ON TABLE public.pantry_items IS 'Tracks a user''s personal inventory of grocery items to enable smart shopping lists.';
COMMENT ON COLUMN public.pantry_items.quantity IS 'The current amount of the item. Convention: use grams for weight, mL for volume where applicable.';
COMMENT ON COLUMN public.pantry_items.pantry_location_id IS 'Links the item to a user-defined location like "Fridge" or "Freezer".';
COMMENT ON COLUMN public.pantry_items.unit IS 'e.g., ''g'', ''ml'', ''items''. Should align with recipe_ingredients.unit and quantity convention.';
COMMENT ON COLUMN public.pantry_items.purchase_date IS 'Date the item was purchased (from receipt or manual entry).';
COMMENT ON COLUMN public.pantry_items.receipt_item_id IS 'Link to receipt_items if this pantry item was created from a receipt scan.';
COMMENT ON COLUMN public.pantry_items.product_id IS 'Link to products if this pantry item was created from a UPC scan.';
COMMENT ON COLUMN public.pantry_items.expiry_source IS 'How expiry was determined: manual, calculated, package, receipt.';
COMMENT ON COLUMN public.pantry_items.is_consumed IS 'Whether the item has been fully consumed.';
COMMENT ON COLUMN public.pantry_items.consumed_at IS 'When the item was marked as consumed.';
CREATE INDEX IF NOT EXISTS idx_pantry_items_user_id ON public.pantry_items(user_id);
CREATE INDEX IF NOT EXISTS idx_pantry_items_master_item_id ON public.pantry_items(master_item_id);
CREATE INDEX IF NOT EXISTS idx_pantry_items_pantry_location_id ON public.pantry_items(pantry_location_id);
CREATE INDEX IF NOT EXISTS idx_pantry_items_best_before_date ON public.pantry_items(best_before_date)
WHERE best_before_date IS NOT NULL AND (is_consumed IS NULL OR is_consumed = FALSE);
CREATE INDEX IF NOT EXISTS idx_pantry_items_expiring_soon ON public.pantry_items(user_id, best_before_date)
WHERE best_before_date IS NOT NULL AND (is_consumed IS NULL OR is_consumed = FALSE);
CREATE INDEX IF NOT EXISTS idx_pantry_items_receipt_item_id ON public.pantry_items(receipt_item_id)
WHERE receipt_item_id IS NOT NULL;
CREATE INDEX IF NOT EXISTS idx_pantry_items_product_id ON public.pantry_items(product_id)
WHERE product_id IS NOT NULL;
-- 38. Store password reset tokens.
CREATE TABLE IF NOT EXISTS public.password_reset_tokens (
@@ -939,11 +963,21 @@ CREATE TABLE IF NOT EXISTS public.receipt_items (
status TEXT DEFAULT 'unmatched' NOT NULL CHECK (status IN ('unmatched', 'matched', 'needs_review', 'ignored')),
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL,
-- Column from migration 002_expiry_tracking.sql
upc_code TEXT,
CONSTRAINT receipt_items_raw_item_description_check CHECK (TRIM(raw_item_description) <> '')
);
COMMENT ON TABLE public.receipt_items IS 'Stores individual line items extracted from a user receipt.';
COMMENT ON COLUMN public.receipt_items.upc_code IS 'UPC code if extracted from receipt or matched during processing.';
CREATE INDEX IF NOT EXISTS idx_receipt_items_receipt_id ON public.receipt_items(receipt_id);
CREATE INDEX IF NOT EXISTS idx_receipt_items_master_item_id ON public.receipt_items(master_item_id);
CREATE INDEX IF NOT EXISTS idx_receipt_items_upc_code ON public.receipt_items(upc_code)
WHERE upc_code IS NOT NULL;
-- Add FK constraint for pantry_items.receipt_item_id (deferred because receipt_items is defined after pantry_items)
ALTER TABLE public.pantry_items
ADD CONSTRAINT fk_pantry_items_receipt_item_id
FOREIGN KEY (receipt_item_id) REFERENCES public.receipt_items(receipt_item_id) ON DELETE SET NULL;
-- 54. Store schema metadata to detect changes during deployment.
CREATE TABLE IF NOT EXISTS public.schema_info (
@@ -1012,3 +1046,232 @@ CREATE INDEX IF NOT EXISTS idx_user_achievements_user_id ON public.user_achievem
CREATE INDEX IF NOT EXISTS idx_user_achievements_achievement_id ON public.user_achievements(achievement_id);
-- ============================================================================
-- UPC SCANNING FEATURE TABLES (59-60)
-- ============================================================================
-- 59. UPC Scan History - tracks all UPC scans performed by users
-- This table provides an audit trail and allows users to see their scan history
CREATE TABLE IF NOT EXISTS public.upc_scan_history (
scan_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
user_id UUID NOT NULL REFERENCES public.users(user_id) ON DELETE CASCADE,
upc_code TEXT NOT NULL,
product_id BIGINT REFERENCES public.products(product_id) ON DELETE SET NULL,
scan_source TEXT NOT NULL,
scan_confidence NUMERIC(5,4),
raw_image_path TEXT,
lookup_successful BOOLEAN DEFAULT FALSE NOT NULL,
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL,
CONSTRAINT upc_scan_history_upc_code_check CHECK (upc_code ~ '^[0-9]{8,14}$'),
CONSTRAINT upc_scan_history_scan_source_check CHECK (scan_source IN ('image_upload', 'manual_entry', 'phone_app', 'camera_scan')),
CONSTRAINT upc_scan_history_scan_confidence_check CHECK (scan_confidence IS NULL OR (scan_confidence >= 0 AND scan_confidence <= 1))
);
COMMENT ON TABLE public.upc_scan_history IS 'Audit trail of all UPC barcode scans performed by users, tracking scan source and results.';
COMMENT ON COLUMN public.upc_scan_history.upc_code IS 'The scanned UPC/EAN barcode (8-14 digits).';
COMMENT ON COLUMN public.upc_scan_history.product_id IS 'Reference to the matched product, if found in our database.';
COMMENT ON COLUMN public.upc_scan_history.scan_source IS 'How the scan was performed: image_upload, manual_entry, phone_app, or camera_scan.';
COMMENT ON COLUMN public.upc_scan_history.scan_confidence IS 'Confidence score from barcode detection (0.0-1.0), null for manual entry.';
COMMENT ON COLUMN public.upc_scan_history.raw_image_path IS 'Path to the uploaded barcode image, if applicable.';
COMMENT ON COLUMN public.upc_scan_history.lookup_successful IS 'Whether the UPC was successfully matched to a product (internal or external).';
CREATE INDEX IF NOT EXISTS idx_upc_scan_history_user_id ON public.upc_scan_history(user_id);
CREATE INDEX IF NOT EXISTS idx_upc_scan_history_upc_code ON public.upc_scan_history(upc_code);
CREATE INDEX IF NOT EXISTS idx_upc_scan_history_created_at ON public.upc_scan_history(created_at DESC);
CREATE INDEX IF NOT EXISTS idx_upc_scan_history_product_id ON public.upc_scan_history(product_id) WHERE product_id IS NOT NULL;
-- 60. UPC External Lookups - cache for external UPC database API responses
CREATE TABLE IF NOT EXISTS public.upc_external_lookups (
lookup_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
upc_code TEXT NOT NULL UNIQUE,
product_name TEXT,
brand_name TEXT,
category TEXT,
description TEXT,
image_url TEXT,
external_source TEXT NOT NULL,
lookup_data JSONB,
lookup_successful BOOLEAN DEFAULT FALSE NOT NULL,
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL,
CONSTRAINT upc_external_lookups_upc_code_check CHECK (upc_code ~ '^[0-9]{8,14}$'),
CONSTRAINT upc_external_lookups_external_source_check CHECK (external_source IN ('openfoodfacts', 'upcitemdb', 'manual', 'unknown')),
CONSTRAINT upc_external_lookups_name_check CHECK (NOT lookup_successful OR product_name IS NOT NULL)
);
COMMENT ON TABLE public.upc_external_lookups IS 'Cache for external UPC database API responses to reduce API calls and improve lookup speed.';
COMMENT ON COLUMN public.upc_external_lookups.upc_code IS 'The UPC/EAN barcode that was looked up.';
COMMENT ON COLUMN public.upc_external_lookups.product_name IS 'Product name returned from external API.';
COMMENT ON COLUMN public.upc_external_lookups.brand_name IS 'Brand name returned from external API.';
COMMENT ON COLUMN public.upc_external_lookups.category IS 'Product category returned from external API.';
COMMENT ON COLUMN public.upc_external_lookups.description IS 'Product description returned from external API.';
COMMENT ON COLUMN public.upc_external_lookups.image_url IS 'Product image URL returned from external API.';
COMMENT ON COLUMN public.upc_external_lookups.external_source IS 'Which external API provided this data: openfoodfacts, upcitemdb, manual, unknown.';
COMMENT ON COLUMN public.upc_external_lookups.lookup_data IS 'Full raw JSON response from the external API for reference.';
COMMENT ON COLUMN public.upc_external_lookups.lookup_successful IS 'Whether the external lookup found product information.';
CREATE INDEX IF NOT EXISTS idx_upc_external_lookups_upc_code ON public.upc_external_lookups(upc_code);
CREATE INDEX IF NOT EXISTS idx_upc_external_lookups_external_source ON public.upc_external_lookups(external_source);
-- Add index to existing products.upc_code for faster lookups
CREATE INDEX IF NOT EXISTS idx_products_upc_code ON public.products(upc_code) WHERE upc_code IS NOT NULL;
-- ============================================================================
-- EXPIRY DATE TRACKING FEATURE TABLES (61-63)
-- ============================================================================
-- 61. Expiry Date Ranges - reference table for typical shelf life
CREATE TABLE IF NOT EXISTS public.expiry_date_ranges (
expiry_range_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
master_item_id BIGINT REFERENCES public.master_grocery_items(master_grocery_item_id) ON DELETE CASCADE,
category_id BIGINT REFERENCES public.categories(category_id) ON DELETE CASCADE,
item_pattern TEXT,
storage_location TEXT NOT NULL,
min_days INTEGER NOT NULL,
max_days INTEGER NOT NULL,
typical_days INTEGER NOT NULL,
notes TEXT,
source TEXT,
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL,
CONSTRAINT expiry_date_ranges_storage_location_check CHECK (storage_location IN ('fridge', 'freezer', 'pantry', 'room_temp')),
CONSTRAINT expiry_date_ranges_min_days_check CHECK (min_days >= 0),
CONSTRAINT expiry_date_ranges_max_days_check CHECK (max_days >= min_days),
CONSTRAINT expiry_date_ranges_typical_days_check CHECK (typical_days >= min_days AND typical_days <= max_days),
CONSTRAINT expiry_date_ranges_identifier_check CHECK (
master_item_id IS NOT NULL OR category_id IS NOT NULL OR item_pattern IS NOT NULL
),
CONSTRAINT expiry_date_ranges_source_check CHECK (source IS NULL OR source IN ('usda', 'fda', 'manual', 'community'))
);
COMMENT ON TABLE public.expiry_date_ranges IS 'Reference table storing typical shelf life for grocery items based on storage location.';
COMMENT ON COLUMN public.expiry_date_ranges.master_item_id IS 'Specific item this range applies to (most specific).';
COMMENT ON COLUMN public.expiry_date_ranges.category_id IS 'Category this range applies to (fallback if no item match).';
COMMENT ON COLUMN public.expiry_date_ranges.item_pattern IS 'Regex pattern to match item names (fallback if no item/category match).';
COMMENT ON COLUMN public.expiry_date_ranges.storage_location IS 'Where the item is stored: fridge, freezer, pantry, or room_temp.';
COMMENT ON COLUMN public.expiry_date_ranges.min_days IS 'Minimum shelf life in days under proper storage.';
COMMENT ON COLUMN public.expiry_date_ranges.max_days IS 'Maximum shelf life in days under proper storage.';
COMMENT ON COLUMN public.expiry_date_ranges.typical_days IS 'Most common/recommended shelf life in days.';
COMMENT ON COLUMN public.expiry_date_ranges.notes IS 'Additional storage tips or warnings.';
COMMENT ON COLUMN public.expiry_date_ranges.source IS 'Data source: usda, fda, manual, or community.';
CREATE INDEX IF NOT EXISTS idx_expiry_date_ranges_master_item_id ON public.expiry_date_ranges(master_item_id) WHERE master_item_id IS NOT NULL;
CREATE INDEX IF NOT EXISTS idx_expiry_date_ranges_category_id ON public.expiry_date_ranges(category_id) WHERE category_id IS NOT NULL;
CREATE INDEX IF NOT EXISTS idx_expiry_date_ranges_storage_location ON public.expiry_date_ranges(storage_location);
CREATE UNIQUE INDEX IF NOT EXISTS idx_expiry_date_ranges_unique_item_location
ON public.expiry_date_ranges(master_item_id, storage_location)
WHERE master_item_id IS NOT NULL;
CREATE UNIQUE INDEX IF NOT EXISTS idx_expiry_date_ranges_unique_category_location
ON public.expiry_date_ranges(category_id, storage_location)
WHERE category_id IS NOT NULL AND master_item_id IS NULL;
-- 62. Expiry Alerts - user notification preferences for expiry warnings
CREATE TABLE IF NOT EXISTS public.expiry_alerts (
expiry_alert_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
user_id UUID NOT NULL REFERENCES public.users(user_id) ON DELETE CASCADE,
days_before_expiry INTEGER NOT NULL DEFAULT 3,
alert_method TEXT NOT NULL,
is_enabled BOOLEAN DEFAULT TRUE NOT NULL,
last_alert_sent_at TIMESTAMPTZ,
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL,
CONSTRAINT expiry_alerts_days_before_check CHECK (days_before_expiry >= 0 AND days_before_expiry <= 30),
CONSTRAINT expiry_alerts_method_check CHECK (alert_method IN ('email', 'push', 'in_app')),
UNIQUE(user_id, alert_method)
);
COMMENT ON TABLE public.expiry_alerts IS 'User preferences for expiry date notifications and alerts.';
COMMENT ON COLUMN public.expiry_alerts.days_before_expiry IS 'How many days before expiry to send alert (0-30).';
COMMENT ON COLUMN public.expiry_alerts.alert_method IS 'How to notify: email, push, or in_app.';
COMMENT ON COLUMN public.expiry_alerts.is_enabled IS 'Whether this alert type is currently enabled.';
COMMENT ON COLUMN public.expiry_alerts.last_alert_sent_at IS 'Timestamp of the last alert sent to prevent duplicate notifications.';
CREATE INDEX IF NOT EXISTS idx_expiry_alerts_user_id ON public.expiry_alerts(user_id);
CREATE INDEX IF NOT EXISTS idx_expiry_alerts_enabled ON public.expiry_alerts(user_id, is_enabled) WHERE is_enabled = TRUE;
-- 63. Expiry Alert Log - tracks sent notifications
CREATE TABLE IF NOT EXISTS public.expiry_alert_log (
alert_log_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
user_id UUID NOT NULL REFERENCES public.users(user_id) ON DELETE CASCADE,
pantry_item_id BIGINT REFERENCES public.pantry_items(pantry_item_id) ON DELETE SET NULL,
alert_type TEXT NOT NULL,
alert_method TEXT NOT NULL,
item_name TEXT NOT NULL,
expiry_date DATE,
days_until_expiry INTEGER,
sent_at TIMESTAMPTZ DEFAULT now() NOT NULL,
CONSTRAINT expiry_alert_log_type_check CHECK (alert_type IN ('expiring_soon', 'expired', 'expiry_reminder')),
CONSTRAINT expiry_alert_log_method_check CHECK (alert_method IN ('email', 'push', 'in_app')),
CONSTRAINT expiry_alert_log_item_name_check CHECK (TRIM(item_name) <> '')
);
COMMENT ON TABLE public.expiry_alert_log IS 'Log of all expiry notifications sent to users for auditing and duplicate prevention.';
COMMENT ON COLUMN public.expiry_alert_log.pantry_item_id IS 'The pantry item that triggered the alert (may be null if item deleted).';
COMMENT ON COLUMN public.expiry_alert_log.alert_type IS 'Type of alert: expiring_soon, expired, or expiry_reminder.';
COMMENT ON COLUMN public.expiry_alert_log.alert_method IS 'How the alert was sent: email, push, or in_app.';
COMMENT ON COLUMN public.expiry_alert_log.item_name IS 'Snapshot of item name at time of alert (in case item is deleted).';
COMMENT ON COLUMN public.expiry_alert_log.expiry_date IS 'The expiry date that triggered the alert.';
COMMENT ON COLUMN public.expiry_alert_log.days_until_expiry IS 'Days until expiry at time alert was sent (negative = expired).';
CREATE INDEX IF NOT EXISTS idx_expiry_alert_log_user_id ON public.expiry_alert_log(user_id);
CREATE INDEX IF NOT EXISTS idx_expiry_alert_log_pantry_item_id ON public.expiry_alert_log(pantry_item_id) WHERE pantry_item_id IS NOT NULL;
CREATE INDEX IF NOT EXISTS idx_expiry_alert_log_sent_at ON public.expiry_alert_log(sent_at DESC);
-- ============================================================================
-- RECEIPT SCANNING ENHANCEMENT TABLES (64-65)
-- ============================================================================
-- 64. Receipt Processing Log - track OCR/AI processing attempts
CREATE TABLE IF NOT EXISTS public.receipt_processing_log (
log_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
receipt_id BIGINT NOT NULL REFERENCES public.receipts(receipt_id) ON DELETE CASCADE,
processing_step TEXT NOT NULL,
status TEXT NOT NULL,
provider TEXT,
duration_ms INTEGER,
tokens_used INTEGER,
cost_cents INTEGER,
input_data JSONB,
output_data JSONB,
error_message TEXT,
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
CONSTRAINT receipt_processing_log_step_check CHECK (processing_step IN (
'upload', 'ocr_extraction', 'text_parsing', 'store_detection',
'item_extraction', 'item_matching', 'price_parsing', 'finalization'
)),
CONSTRAINT receipt_processing_log_status_check CHECK (status IN ('started', 'completed', 'failed', 'skipped')),
CONSTRAINT receipt_processing_log_provider_check CHECK (provider IS NULL OR provider IN (
'tesseract', 'openai', 'anthropic', 'google_vision', 'aws_textract', 'internal'
))
);
COMMENT ON TABLE public.receipt_processing_log IS 'Detailed log of each processing step for receipts, useful for debugging and cost tracking.';
COMMENT ON COLUMN public.receipt_processing_log.processing_step IS 'Which processing step this log entry is for.';
COMMENT ON COLUMN public.receipt_processing_log.status IS 'Status of this step: started, completed, failed, skipped.';
COMMENT ON COLUMN public.receipt_processing_log.provider IS 'External service used: tesseract, openai, anthropic, etc.';
COMMENT ON COLUMN public.receipt_processing_log.duration_ms IS 'How long this step took in milliseconds.';
COMMENT ON COLUMN public.receipt_processing_log.tokens_used IS 'Number of API tokens used (for LLM providers).';
COMMENT ON COLUMN public.receipt_processing_log.cost_cents IS 'Estimated cost in cents for this processing step.';
COMMENT ON COLUMN public.receipt_processing_log.input_data IS 'Input data sent to the processing step (for debugging).';
COMMENT ON COLUMN public.receipt_processing_log.output_data IS 'Output data received from the processing step.';
CREATE INDEX IF NOT EXISTS idx_receipt_processing_log_receipt_id ON public.receipt_processing_log(receipt_id);
CREATE INDEX IF NOT EXISTS idx_receipt_processing_log_step_status ON public.receipt_processing_log(processing_step, status);
CREATE INDEX IF NOT EXISTS idx_receipt_processing_log_created_at ON public.receipt_processing_log(created_at DESC);
-- 65. Store-specific receipt patterns - help identify stores from receipt text
CREATE TABLE IF NOT EXISTS public.store_receipt_patterns (
pattern_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
store_id BIGINT NOT NULL REFERENCES public.stores(store_id) ON DELETE CASCADE,
pattern_type TEXT NOT NULL,
pattern_value TEXT NOT NULL,
priority INTEGER DEFAULT 0,
is_active BOOLEAN DEFAULT TRUE,
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL,
CONSTRAINT store_receipt_patterns_type_check CHECK (pattern_type IN (
'header_regex', 'footer_regex', 'phone_number', 'address_fragment', 'store_number_format'
)),
CONSTRAINT store_receipt_patterns_value_check CHECK (TRIM(pattern_value) <> ''),
UNIQUE(store_id, pattern_type, pattern_value)
);
COMMENT ON TABLE public.store_receipt_patterns IS 'Patterns to help identify stores from receipt text and format.';
COMMENT ON COLUMN public.store_receipt_patterns.pattern_type IS 'Type of pattern: header_regex, footer_regex, phone_number, etc.';
COMMENT ON COLUMN public.store_receipt_patterns.pattern_value IS 'The actual pattern (regex or literal text).';
COMMENT ON COLUMN public.store_receipt_patterns.priority IS 'Higher priority patterns are checked first.';
COMMENT ON COLUMN public.store_receipt_patterns.is_active IS 'Whether this pattern is currently in use.';
CREATE INDEX IF NOT EXISTS idx_store_receipt_patterns_store_id ON public.store_receipt_patterns(store_id);
CREATE INDEX IF NOT EXISTS idx_store_receipt_patterns_active ON public.store_receipt_patterns(pattern_type, is_active, priority DESC)
WHERE is_active = TRUE;

View File

@@ -698,6 +698,7 @@ CREATE INDEX IF NOT EXISTS idx_planned_meals_menu_plan_id ON public.planned_meal
CREATE INDEX IF NOT EXISTS idx_planned_meals_recipe_id ON public.planned_meals(recipe_id);
-- 37. Track the grocery items a user currently has in their pantry.
-- NOTE: receipt_item_id FK is added later via ALTER TABLE because receipt_items is defined after this table.
CREATE TABLE IF NOT EXISTS public.pantry_items (
pantry_item_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
user_id UUID NOT NULL REFERENCES public.users(user_id) ON DELETE CASCADE,
@@ -707,16 +708,38 @@ CREATE TABLE IF NOT EXISTS public.pantry_items (
best_before_date DATE,
pantry_location_id BIGINT REFERENCES public.pantry_locations(pantry_location_id) ON DELETE SET NULL,
notification_sent_at TIMESTAMPTZ,
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL,
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL,
-- Columns from migration 002_expiry_tracking.sql
purchase_date DATE,
source TEXT DEFAULT 'manual',
receipt_item_id BIGINT, -- FK added later via ALTER TABLE
product_id BIGINT REFERENCES public.products(product_id) ON DELETE SET NULL,
expiry_source TEXT,
is_consumed BOOLEAN DEFAULT FALSE,
consumed_at TIMESTAMPTZ,
UNIQUE(user_id, master_item_id, unit)
);
COMMENT ON TABLE public.pantry_items IS 'Tracks a user''s personal inventory of grocery items to enable smart shopping lists.';
COMMENT ON COLUMN public.pantry_items.quantity IS 'The current amount of the item. Convention: use grams for weight, mL for volume where applicable.';
COMMENT ON COLUMN public.pantry_items.pantry_location_id IS 'Links the item to a user-defined location like "Fridge" or "Freezer".';
COMMENT ON COLUMN public.pantry_items.unit IS 'e.g., ''g'', ''ml'', ''items''. Should align with recipe_ingredients.unit and quantity convention.';
COMMENT ON COLUMN public.pantry_items.purchase_date IS 'Date the item was purchased (from receipt or manual entry).';
COMMENT ON COLUMN public.pantry_items.receipt_item_id IS 'Link to receipt_items if this pantry item was created from a receipt scan.';
COMMENT ON COLUMN public.pantry_items.product_id IS 'Link to products if this pantry item was created from a UPC scan.';
COMMENT ON COLUMN public.pantry_items.expiry_source IS 'How expiry was determined: manual, calculated, package, receipt.';
COMMENT ON COLUMN public.pantry_items.is_consumed IS 'Whether the item has been fully consumed.';
COMMENT ON COLUMN public.pantry_items.consumed_at IS 'When the item was marked as consumed.';
CREATE INDEX IF NOT EXISTS idx_pantry_items_user_id ON public.pantry_items(user_id);
CREATE INDEX IF NOT EXISTS idx_pantry_items_master_item_id ON public.pantry_items(master_item_id);
CREATE INDEX IF NOT EXISTS idx_pantry_items_pantry_location_id ON public.pantry_items(pantry_location_id);
CREATE INDEX IF NOT EXISTS idx_pantry_items_best_before_date ON public.pantry_items(best_before_date)
WHERE best_before_date IS NOT NULL AND (is_consumed IS NULL OR is_consumed = FALSE);
CREATE INDEX IF NOT EXISTS idx_pantry_items_expiring_soon ON public.pantry_items(user_id, best_before_date)
WHERE best_before_date IS NOT NULL AND (is_consumed IS NULL OR is_consumed = FALSE);
CREATE INDEX IF NOT EXISTS idx_pantry_items_receipt_item_id ON public.pantry_items(receipt_item_id)
WHERE receipt_item_id IS NOT NULL;
CREATE INDEX IF NOT EXISTS idx_pantry_items_product_id ON public.pantry_items(product_id)
WHERE product_id IS NOT NULL;
-- 38. Store password reset tokens.
CREATE TABLE IF NOT EXISTS public.password_reset_tokens (
@@ -959,11 +982,21 @@ CREATE TABLE IF NOT EXISTS public.receipt_items (
status TEXT DEFAULT 'unmatched' NOT NULL CHECK (status IN ('unmatched', 'matched', 'needs_review', 'ignored')),
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL,
-- Column from migration 002_expiry_tracking.sql
upc_code TEXT,
CONSTRAINT receipt_items_raw_item_description_check CHECK (TRIM(raw_item_description) <> '')
);
COMMENT ON TABLE public.receipt_items IS 'Stores individual line items extracted from a user receipt.';
COMMENT ON COLUMN public.receipt_items.upc_code IS 'UPC code if extracted from receipt or matched during processing.';
CREATE INDEX IF NOT EXISTS idx_receipt_items_receipt_id ON public.receipt_items(receipt_id);
CREATE INDEX IF NOT EXISTS idx_receipt_items_master_item_id ON public.receipt_items(master_item_id);
CREATE INDEX IF NOT EXISTS idx_receipt_items_upc_code ON public.receipt_items(upc_code)
WHERE upc_code IS NOT NULL;
-- Add FK constraint for pantry_items.receipt_item_id (deferred because receipt_items is defined after pantry_items)
ALTER TABLE public.pantry_items
ADD CONSTRAINT fk_pantry_items_receipt_item_id
FOREIGN KEY (receipt_item_id) REFERENCES public.receipt_items(receipt_item_id) ON DELETE SET NULL;
-- 54. Store schema metadata to detect changes during deployment.
CREATE TABLE IF NOT EXISTS public.schema_info (
@@ -1033,6 +1066,235 @@ CREATE INDEX IF NOT EXISTS idx_user_achievements_user_id ON public.user_achievem
CREATE INDEX IF NOT EXISTS idx_user_achievements_achievement_id ON public.user_achievements(achievement_id);
-- ============================================================================
-- UPC SCANNING FEATURE TABLES (59-60)
-- ============================================================================
-- 59. UPC Scan History - tracks all UPC scans performed by users
-- This table provides an audit trail and allows users to see their scan history
CREATE TABLE IF NOT EXISTS public.upc_scan_history (
scan_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
user_id UUID NOT NULL REFERENCES public.users(user_id) ON DELETE CASCADE,
upc_code TEXT NOT NULL,
product_id BIGINT REFERENCES public.products(product_id) ON DELETE SET NULL,
scan_source TEXT NOT NULL,
scan_confidence NUMERIC(5,4),
raw_image_path TEXT,
lookup_successful BOOLEAN DEFAULT FALSE NOT NULL,
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL,
CONSTRAINT upc_scan_history_upc_code_check CHECK (upc_code ~ '^[0-9]{8,14}$'),
CONSTRAINT upc_scan_history_scan_source_check CHECK (scan_source IN ('image_upload', 'manual_entry', 'phone_app', 'camera_scan')),
CONSTRAINT upc_scan_history_scan_confidence_check CHECK (scan_confidence IS NULL OR (scan_confidence >= 0 AND scan_confidence <= 1))
);
COMMENT ON TABLE public.upc_scan_history IS 'Audit trail of all UPC barcode scans performed by users, tracking scan source and results.';
COMMENT ON COLUMN public.upc_scan_history.upc_code IS 'The scanned UPC/EAN barcode (8-14 digits).';
COMMENT ON COLUMN public.upc_scan_history.product_id IS 'Reference to the matched product, if found in our database.';
COMMENT ON COLUMN public.upc_scan_history.scan_source IS 'How the scan was performed: image_upload, manual_entry, phone_app, or camera_scan.';
COMMENT ON COLUMN public.upc_scan_history.scan_confidence IS 'Confidence score from barcode detection (0.0-1.0), null for manual entry.';
COMMENT ON COLUMN public.upc_scan_history.raw_image_path IS 'Path to the uploaded barcode image, if applicable.';
COMMENT ON COLUMN public.upc_scan_history.lookup_successful IS 'Whether the UPC was successfully matched to a product (internal or external).';
CREATE INDEX IF NOT EXISTS idx_upc_scan_history_user_id ON public.upc_scan_history(user_id);
CREATE INDEX IF NOT EXISTS idx_upc_scan_history_upc_code ON public.upc_scan_history(upc_code);
CREATE INDEX IF NOT EXISTS idx_upc_scan_history_created_at ON public.upc_scan_history(created_at DESC);
CREATE INDEX IF NOT EXISTS idx_upc_scan_history_product_id ON public.upc_scan_history(product_id) WHERE product_id IS NOT NULL;
-- 60. UPC External Lookups - cache for external UPC database API responses
CREATE TABLE IF NOT EXISTS public.upc_external_lookups (
lookup_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
upc_code TEXT NOT NULL UNIQUE,
product_name TEXT,
brand_name TEXT,
category TEXT,
description TEXT,
image_url TEXT,
external_source TEXT NOT NULL,
lookup_data JSONB,
lookup_successful BOOLEAN DEFAULT FALSE NOT NULL,
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL,
CONSTRAINT upc_external_lookups_upc_code_check CHECK (upc_code ~ '^[0-9]{8,14}$'),
CONSTRAINT upc_external_lookups_external_source_check CHECK (external_source IN ('openfoodfacts', 'upcitemdb', 'manual', 'unknown')),
CONSTRAINT upc_external_lookups_name_check CHECK (NOT lookup_successful OR product_name IS NOT NULL)
);
COMMENT ON TABLE public.upc_external_lookups IS 'Cache for external UPC database API responses to reduce API calls and improve lookup speed.';
COMMENT ON COLUMN public.upc_external_lookups.upc_code IS 'The UPC/EAN barcode that was looked up.';
COMMENT ON COLUMN public.upc_external_lookups.product_name IS 'Product name returned from external API.';
COMMENT ON COLUMN public.upc_external_lookups.brand_name IS 'Brand name returned from external API.';
COMMENT ON COLUMN public.upc_external_lookups.category IS 'Product category returned from external API.';
COMMENT ON COLUMN public.upc_external_lookups.description IS 'Product description returned from external API.';
COMMENT ON COLUMN public.upc_external_lookups.image_url IS 'Product image URL returned from external API.';
COMMENT ON COLUMN public.upc_external_lookups.external_source IS 'Which external API provided this data: openfoodfacts, upcitemdb, manual, unknown.';
COMMENT ON COLUMN public.upc_external_lookups.lookup_data IS 'Full raw JSON response from the external API for reference.';
COMMENT ON COLUMN public.upc_external_lookups.lookup_successful IS 'Whether the external lookup found product information.';
CREATE INDEX IF NOT EXISTS idx_upc_external_lookups_upc_code ON public.upc_external_lookups(upc_code);
CREATE INDEX IF NOT EXISTS idx_upc_external_lookups_external_source ON public.upc_external_lookups(external_source);
-- Add index to existing products.upc_code for faster lookups
CREATE INDEX IF NOT EXISTS idx_products_upc_code ON public.products(upc_code) WHERE upc_code IS NOT NULL;
-- ============================================================================
-- EXPIRY DATE TRACKING FEATURE TABLES (61-63)
-- ============================================================================
-- 61. Expiry Date Ranges - reference table for typical shelf life
CREATE TABLE IF NOT EXISTS public.expiry_date_ranges (
expiry_range_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
master_item_id BIGINT REFERENCES public.master_grocery_items(master_grocery_item_id) ON DELETE CASCADE,
category_id BIGINT REFERENCES public.categories(category_id) ON DELETE CASCADE,
item_pattern TEXT,
storage_location TEXT NOT NULL,
min_days INTEGER NOT NULL,
max_days INTEGER NOT NULL,
typical_days INTEGER NOT NULL,
notes TEXT,
source TEXT,
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL,
CONSTRAINT expiry_date_ranges_storage_location_check CHECK (storage_location IN ('fridge', 'freezer', 'pantry', 'room_temp')),
CONSTRAINT expiry_date_ranges_min_days_check CHECK (min_days >= 0),
CONSTRAINT expiry_date_ranges_max_days_check CHECK (max_days >= min_days),
CONSTRAINT expiry_date_ranges_typical_days_check CHECK (typical_days >= min_days AND typical_days <= max_days),
CONSTRAINT expiry_date_ranges_identifier_check CHECK (
master_item_id IS NOT NULL OR category_id IS NOT NULL OR item_pattern IS NOT NULL
),
CONSTRAINT expiry_date_ranges_source_check CHECK (source IS NULL OR source IN ('usda', 'fda', 'manual', 'community'))
);
COMMENT ON TABLE public.expiry_date_ranges IS 'Reference table storing typical shelf life for grocery items based on storage location.';
COMMENT ON COLUMN public.expiry_date_ranges.master_item_id IS 'Specific item this range applies to (most specific).';
COMMENT ON COLUMN public.expiry_date_ranges.category_id IS 'Category this range applies to (fallback if no item match).';
COMMENT ON COLUMN public.expiry_date_ranges.item_pattern IS 'Regex pattern to match item names (fallback if no item/category match).';
COMMENT ON COLUMN public.expiry_date_ranges.storage_location IS 'Where the item is stored: fridge, freezer, pantry, or room_temp.';
COMMENT ON COLUMN public.expiry_date_ranges.min_days IS 'Minimum shelf life in days under proper storage.';
COMMENT ON COLUMN public.expiry_date_ranges.max_days IS 'Maximum shelf life in days under proper storage.';
COMMENT ON COLUMN public.expiry_date_ranges.typical_days IS 'Most common/recommended shelf life in days.';
COMMENT ON COLUMN public.expiry_date_ranges.notes IS 'Additional storage tips or warnings.';
COMMENT ON COLUMN public.expiry_date_ranges.source IS 'Data source: usda, fda, manual, or community.';
CREATE INDEX IF NOT EXISTS idx_expiry_date_ranges_master_item_id ON public.expiry_date_ranges(master_item_id) WHERE master_item_id IS NOT NULL;
CREATE INDEX IF NOT EXISTS idx_expiry_date_ranges_category_id ON public.expiry_date_ranges(category_id) WHERE category_id IS NOT NULL;
CREATE INDEX IF NOT EXISTS idx_expiry_date_ranges_storage_location ON public.expiry_date_ranges(storage_location);
CREATE UNIQUE INDEX IF NOT EXISTS idx_expiry_date_ranges_unique_item_location
ON public.expiry_date_ranges(master_item_id, storage_location)
WHERE master_item_id IS NOT NULL;
CREATE UNIQUE INDEX IF NOT EXISTS idx_expiry_date_ranges_unique_category_location
ON public.expiry_date_ranges(category_id, storage_location)
WHERE category_id IS NOT NULL AND master_item_id IS NULL;
-- 62. Expiry Alerts - user notification preferences for expiry warnings
CREATE TABLE IF NOT EXISTS public.expiry_alerts (
expiry_alert_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
user_id UUID NOT NULL REFERENCES public.users(user_id) ON DELETE CASCADE,
days_before_expiry INTEGER NOT NULL DEFAULT 3,
alert_method TEXT NOT NULL,
is_enabled BOOLEAN DEFAULT TRUE NOT NULL,
last_alert_sent_at TIMESTAMPTZ,
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL,
CONSTRAINT expiry_alerts_days_before_check CHECK (days_before_expiry >= 0 AND days_before_expiry <= 30),
CONSTRAINT expiry_alerts_method_check CHECK (alert_method IN ('email', 'push', 'in_app')),
UNIQUE(user_id, alert_method)
);
COMMENT ON TABLE public.expiry_alerts IS 'User preferences for expiry date notifications and alerts.';
COMMENT ON COLUMN public.expiry_alerts.days_before_expiry IS 'How many days before expiry to send alert (0-30).';
COMMENT ON COLUMN public.expiry_alerts.alert_method IS 'How to notify: email, push, or in_app.';
COMMENT ON COLUMN public.expiry_alerts.is_enabled IS 'Whether this alert type is currently enabled.';
COMMENT ON COLUMN public.expiry_alerts.last_alert_sent_at IS 'Timestamp of the last alert sent to prevent duplicate notifications.';
CREATE INDEX IF NOT EXISTS idx_expiry_alerts_user_id ON public.expiry_alerts(user_id);
CREATE INDEX IF NOT EXISTS idx_expiry_alerts_enabled ON public.expiry_alerts(user_id, is_enabled) WHERE is_enabled = TRUE;
-- 63. Expiry Alert Log - tracks sent notifications
CREATE TABLE IF NOT EXISTS public.expiry_alert_log (
alert_log_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
user_id UUID NOT NULL REFERENCES public.users(user_id) ON DELETE CASCADE,
pantry_item_id BIGINT REFERENCES public.pantry_items(pantry_item_id) ON DELETE SET NULL,
alert_type TEXT NOT NULL,
alert_method TEXT NOT NULL,
item_name TEXT NOT NULL,
expiry_date DATE,
days_until_expiry INTEGER,
sent_at TIMESTAMPTZ DEFAULT now() NOT NULL,
CONSTRAINT expiry_alert_log_type_check CHECK (alert_type IN ('expiring_soon', 'expired', 'expiry_reminder')),
CONSTRAINT expiry_alert_log_method_check CHECK (alert_method IN ('email', 'push', 'in_app')),
CONSTRAINT expiry_alert_log_item_name_check CHECK (TRIM(item_name) <> '')
);
COMMENT ON TABLE public.expiry_alert_log IS 'Log of all expiry notifications sent to users for auditing and duplicate prevention.';
COMMENT ON COLUMN public.expiry_alert_log.pantry_item_id IS 'The pantry item that triggered the alert (may be null if item deleted).';
COMMENT ON COLUMN public.expiry_alert_log.alert_type IS 'Type of alert: expiring_soon, expired, or expiry_reminder.';
COMMENT ON COLUMN public.expiry_alert_log.alert_method IS 'How the alert was sent: email, push, or in_app.';
COMMENT ON COLUMN public.expiry_alert_log.item_name IS 'Snapshot of item name at time of alert (in case item is deleted).';
COMMENT ON COLUMN public.expiry_alert_log.expiry_date IS 'The expiry date that triggered the alert.';
COMMENT ON COLUMN public.expiry_alert_log.days_until_expiry IS 'Days until expiry at time alert was sent (negative = expired).';
CREATE INDEX IF NOT EXISTS idx_expiry_alert_log_user_id ON public.expiry_alert_log(user_id);
CREATE INDEX IF NOT EXISTS idx_expiry_alert_log_pantry_item_id ON public.expiry_alert_log(pantry_item_id) WHERE pantry_item_id IS NOT NULL;
CREATE INDEX IF NOT EXISTS idx_expiry_alert_log_sent_at ON public.expiry_alert_log(sent_at DESC);
-- ============================================================================
-- RECEIPT SCANNING ENHANCEMENT TABLES (64-65)
-- ============================================================================
-- 64. Receipt Processing Log - track OCR/AI processing attempts
CREATE TABLE IF NOT EXISTS public.receipt_processing_log (
log_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
receipt_id BIGINT NOT NULL REFERENCES public.receipts(receipt_id) ON DELETE CASCADE,
processing_step TEXT NOT NULL,
status TEXT NOT NULL,
provider TEXT,
duration_ms INTEGER,
tokens_used INTEGER,
cost_cents INTEGER,
input_data JSONB,
output_data JSONB,
error_message TEXT,
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
CONSTRAINT receipt_processing_log_step_check CHECK (processing_step IN (
'upload', 'ocr_extraction', 'text_parsing', 'store_detection',
'item_extraction', 'item_matching', 'price_parsing', 'finalization'
)),
CONSTRAINT receipt_processing_log_status_check CHECK (status IN ('started', 'completed', 'failed', 'skipped')),
CONSTRAINT receipt_processing_log_provider_check CHECK (provider IS NULL OR provider IN (
'tesseract', 'openai', 'anthropic', 'google_vision', 'aws_textract', 'internal'
))
);
COMMENT ON TABLE public.receipt_processing_log IS 'Detailed log of each processing step for receipts, useful for debugging and cost tracking.';
COMMENT ON COLUMN public.receipt_processing_log.processing_step IS 'Which processing step this log entry is for.';
COMMENT ON COLUMN public.receipt_processing_log.status IS 'Status of this step: started, completed, failed, skipped.';
COMMENT ON COLUMN public.receipt_processing_log.provider IS 'External service used: tesseract, openai, anthropic, etc.';
COMMENT ON COLUMN public.receipt_processing_log.duration_ms IS 'How long this step took in milliseconds.';
COMMENT ON COLUMN public.receipt_processing_log.tokens_used IS 'Number of API tokens used (for LLM providers).';
COMMENT ON COLUMN public.receipt_processing_log.cost_cents IS 'Estimated cost in cents for this processing step.';
COMMENT ON COLUMN public.receipt_processing_log.input_data IS 'Input data sent to the processing step (for debugging).';
COMMENT ON COLUMN public.receipt_processing_log.output_data IS 'Output data received from the processing step.';
CREATE INDEX IF NOT EXISTS idx_receipt_processing_log_receipt_id ON public.receipt_processing_log(receipt_id);
CREATE INDEX IF NOT EXISTS idx_receipt_processing_log_step_status ON public.receipt_processing_log(processing_step, status);
CREATE INDEX IF NOT EXISTS idx_receipt_processing_log_created_at ON public.receipt_processing_log(created_at DESC);
-- 65. Store-specific receipt patterns - help identify stores from receipt text
CREATE TABLE IF NOT EXISTS public.store_receipt_patterns (
pattern_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
store_id BIGINT NOT NULL REFERENCES public.stores(store_id) ON DELETE CASCADE,
pattern_type TEXT NOT NULL,
pattern_value TEXT NOT NULL,
priority INTEGER DEFAULT 0,
is_active BOOLEAN DEFAULT TRUE,
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL,
CONSTRAINT store_receipt_patterns_type_check CHECK (pattern_type IN (
'header_regex', 'footer_regex', 'phone_number', 'address_fragment', 'store_number_format'
)),
CONSTRAINT store_receipt_patterns_value_check CHECK (TRIM(pattern_value) <> ''),
UNIQUE(store_id, pattern_type, pattern_value)
);
COMMENT ON TABLE public.store_receipt_patterns IS 'Patterns to help identify stores from receipt text and format.';
COMMENT ON COLUMN public.store_receipt_patterns.pattern_type IS 'Type of pattern: header_regex, footer_regex, phone_number, etc.';
COMMENT ON COLUMN public.store_receipt_patterns.pattern_value IS 'The actual pattern (regex or literal text).';
COMMENT ON COLUMN public.store_receipt_patterns.priority IS 'Higher priority patterns are checked first.';
COMMENT ON COLUMN public.store_receipt_patterns.is_active IS 'Whether this pattern is currently in use.';
CREATE INDEX IF NOT EXISTS idx_store_receipt_patterns_store_id ON public.store_receipt_patterns(store_id);
CREATE INDEX IF NOT EXISTS idx_store_receipt_patterns_active ON public.store_receipt_patterns(pattern_type, is_active, priority DESC)
WHERE is_active = TRUE;
-- ============================================================================
-- PART 2: DATA SEEDING
@@ -2546,7 +2808,8 @@ CREATE TRIGGER on_auth_user_created
FOR EACH ROW EXECUTE FUNCTION public.handle_new_user();
-- 2. Create a reusable function to automatically update 'updated_at' columns.
DROP FUNCTION IF EXISTS public.handle_updated_at();
-- CASCADE drops dependent triggers; they are recreated by the DO block below
DROP FUNCTION IF EXISTS public.handle_updated_at() CASCADE;
CREATE OR REPLACE FUNCTION public.handle_updated_at()
RETURNS TRIGGER AS $$

View File

@@ -0,0 +1,90 @@
-- sql/migrations/001_upc_scanning.sql
-- ============================================================================
-- UPC SCANNING FEATURE MIGRATION
-- ============================================================================
-- Purpose:
-- This migration adds tables to support UPC barcode scanning functionality:
-- 1. upc_scan_history - Audit trail of all UPC scans performed by users
-- 2. upc_external_lookups - Cache for external UPC database API responses
--
-- The products.upc_code column already exists in the schema.
-- These tables extend the functionality to track scans and cache lookups.
-- ============================================================================
-- 1. UPC Scan History - tracks all UPC scans performed by users
-- This table provides an audit trail and allows users to see their scan history
CREATE TABLE IF NOT EXISTS public.upc_scan_history (
scan_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
user_id UUID NOT NULL REFERENCES public.users(user_id) ON DELETE CASCADE,
upc_code TEXT NOT NULL,
product_id BIGINT REFERENCES public.products(product_id) ON DELETE SET NULL,
scan_source TEXT NOT NULL,
scan_confidence NUMERIC(5,4),
raw_image_path TEXT,
lookup_successful BOOLEAN DEFAULT FALSE NOT NULL,
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL,
-- Validate UPC code format (8-14 digits for UPC-A, UPC-E, EAN-8, EAN-13, etc.)
CONSTRAINT upc_scan_history_upc_code_check CHECK (upc_code ~ '^[0-9]{8,14}$'),
-- Validate scan source is one of the allowed values
CONSTRAINT upc_scan_history_scan_source_check CHECK (scan_source IN ('image_upload', 'manual_entry', 'phone_app', 'camera_scan')),
-- Confidence score must be between 0 and 1 if provided
CONSTRAINT upc_scan_history_scan_confidence_check CHECK (scan_confidence IS NULL OR (scan_confidence >= 0 AND scan_confidence <= 1))
);
COMMENT ON TABLE public.upc_scan_history IS 'Audit trail of all UPC barcode scans performed by users, tracking scan source and results.';
COMMENT ON COLUMN public.upc_scan_history.upc_code IS 'The scanned UPC/EAN barcode (8-14 digits).';
COMMENT ON COLUMN public.upc_scan_history.product_id IS 'Reference to the matched product, if found in our database.';
COMMENT ON COLUMN public.upc_scan_history.scan_source IS 'How the scan was performed: image_upload, manual_entry, phone_app, or camera_scan.';
COMMENT ON COLUMN public.upc_scan_history.scan_confidence IS 'Confidence score from barcode detection (0.0-1.0), null for manual entry.';
COMMENT ON COLUMN public.upc_scan_history.raw_image_path IS 'Path to the uploaded barcode image, if applicable.';
COMMENT ON COLUMN public.upc_scan_history.lookup_successful IS 'Whether the UPC was successfully matched to a product (internal or external).';
-- Indexes for upc_scan_history
CREATE INDEX IF NOT EXISTS idx_upc_scan_history_user_id ON public.upc_scan_history(user_id);
CREATE INDEX IF NOT EXISTS idx_upc_scan_history_upc_code ON public.upc_scan_history(upc_code);
CREATE INDEX IF NOT EXISTS idx_upc_scan_history_created_at ON public.upc_scan_history(created_at DESC);
CREATE INDEX IF NOT EXISTS idx_upc_scan_history_product_id ON public.upc_scan_history(product_id) WHERE product_id IS NOT NULL;
-- 2. UPC External Lookups - cache for external UPC database API responses
-- This table caches results from external UPC databases (OpenFoodFacts, UPC Item DB, etc.)
-- to reduce API calls and improve response times for repeated lookups
CREATE TABLE IF NOT EXISTS public.upc_external_lookups (
lookup_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
upc_code TEXT NOT NULL UNIQUE,
product_name TEXT,
brand_name TEXT,
category TEXT,
description TEXT,
image_url TEXT,
external_source TEXT NOT NULL,
lookup_data JSONB,
lookup_successful BOOLEAN DEFAULT FALSE NOT NULL,
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL,
-- Validate UPC code format
CONSTRAINT upc_external_lookups_upc_code_check CHECK (upc_code ~ '^[0-9]{8,14}$'),
-- Validate external source is one of the supported APIs
CONSTRAINT upc_external_lookups_external_source_check CHECK (external_source IN ('openfoodfacts', 'upcitemdb', 'manual', 'unknown')),
-- If lookup was successful, product_name should be present
CONSTRAINT upc_external_lookups_name_check CHECK (NOT lookup_successful OR product_name IS NOT NULL)
);
COMMENT ON TABLE public.upc_external_lookups IS 'Cache for external UPC database API responses to reduce API calls and improve lookup speed.';
COMMENT ON COLUMN public.upc_external_lookups.upc_code IS 'The UPC/EAN barcode that was looked up.';
COMMENT ON COLUMN public.upc_external_lookups.product_name IS 'Product name returned from external API.';
COMMENT ON COLUMN public.upc_external_lookups.brand_name IS 'Brand name returned from external API.';
COMMENT ON COLUMN public.upc_external_lookups.category IS 'Product category returned from external API.';
COMMENT ON COLUMN public.upc_external_lookups.description IS 'Product description returned from external API.';
COMMENT ON COLUMN public.upc_external_lookups.image_url IS 'Product image URL returned from external API.';
COMMENT ON COLUMN public.upc_external_lookups.external_source IS 'Which external API provided this data: openfoodfacts, upcitemdb, manual, unknown.';
COMMENT ON COLUMN public.upc_external_lookups.lookup_data IS 'Full raw JSON response from the external API for reference.';
COMMENT ON COLUMN public.upc_external_lookups.lookup_successful IS 'Whether the external lookup found product information.';
-- Index for upc_external_lookups
CREATE INDEX IF NOT EXISTS idx_upc_external_lookups_upc_code ON public.upc_external_lookups(upc_code);
CREATE INDEX IF NOT EXISTS idx_upc_external_lookups_external_source ON public.upc_external_lookups(external_source);
-- 3. Add index to existing products.upc_code if not exists
-- This speeds up lookups when matching scanned UPCs to existing products
CREATE INDEX IF NOT EXISTS idx_products_upc_code ON public.products(upc_code) WHERE upc_code IS NOT NULL;

View File

@@ -0,0 +1,189 @@
-- sql/migrations/002_expiry_tracking.sql
-- ============================================================================
-- EXPIRY DATE TRACKING FEATURE MIGRATION
-- ============================================================================
-- Purpose:
-- This migration adds tables and enhancements for expiry date tracking:
-- 1. expiry_date_ranges - Reference table for typical shelf life by item/category
-- 2. expiry_alerts - User notification preferences for expiry warnings
-- 3. Enhancements to pantry_items for better expiry tracking
--
-- Existing tables used:
-- - pantry_items (already has best_before_date)
-- - pantry_locations (already exists for fridge/freezer/pantry)
-- - receipts and receipt_items (already exist for receipt scanning)
-- ============================================================================
-- 1. Expiry Date Ranges - reference table for typical shelf life
-- This table stores expected shelf life for items based on storage location
-- Used to auto-calculate expiry dates when users add items to inventory
CREATE TABLE IF NOT EXISTS public.expiry_date_ranges (
expiry_range_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
master_item_id BIGINT REFERENCES public.master_grocery_items(master_grocery_item_id) ON DELETE CASCADE,
category_id BIGINT REFERENCES public.categories(category_id) ON DELETE CASCADE,
item_pattern TEXT,
storage_location TEXT NOT NULL,
min_days INTEGER NOT NULL,
max_days INTEGER NOT NULL,
typical_days INTEGER NOT NULL,
notes TEXT,
source TEXT,
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL,
-- Validate storage location is one of the allowed values
CONSTRAINT expiry_date_ranges_storage_location_check CHECK (storage_location IN ('fridge', 'freezer', 'pantry', 'room_temp')),
-- Validate day ranges are logical
CONSTRAINT expiry_date_ranges_min_days_check CHECK (min_days >= 0),
CONSTRAINT expiry_date_ranges_max_days_check CHECK (max_days >= min_days),
CONSTRAINT expiry_date_ranges_typical_days_check CHECK (typical_days >= min_days AND typical_days <= max_days),
-- At least one identifier must be present
CONSTRAINT expiry_date_ranges_identifier_check CHECK (
master_item_id IS NOT NULL OR category_id IS NOT NULL OR item_pattern IS NOT NULL
),
-- Validate source is one of the known sources
CONSTRAINT expiry_date_ranges_source_check CHECK (source IS NULL OR source IN ('usda', 'fda', 'manual', 'community'))
);
COMMENT ON TABLE public.expiry_date_ranges IS 'Reference table storing typical shelf life for grocery items based on storage location.';
COMMENT ON COLUMN public.expiry_date_ranges.master_item_id IS 'Specific item this range applies to (most specific).';
COMMENT ON COLUMN public.expiry_date_ranges.category_id IS 'Category this range applies to (fallback if no item match).';
COMMENT ON COLUMN public.expiry_date_ranges.item_pattern IS 'Regex pattern to match item names (fallback if no item/category match).';
COMMENT ON COLUMN public.expiry_date_ranges.storage_location IS 'Where the item is stored: fridge, freezer, pantry, or room_temp.';
COMMENT ON COLUMN public.expiry_date_ranges.min_days IS 'Minimum shelf life in days under proper storage.';
COMMENT ON COLUMN public.expiry_date_ranges.max_days IS 'Maximum shelf life in days under proper storage.';
COMMENT ON COLUMN public.expiry_date_ranges.typical_days IS 'Most common/recommended shelf life in days.';
COMMENT ON COLUMN public.expiry_date_ranges.notes IS 'Additional storage tips or warnings.';
COMMENT ON COLUMN public.expiry_date_ranges.source IS 'Data source: usda, fda, manual, or community.';
-- Indexes for expiry_date_ranges
CREATE INDEX IF NOT EXISTS idx_expiry_date_ranges_master_item_id ON public.expiry_date_ranges(master_item_id) WHERE master_item_id IS NOT NULL;
CREATE INDEX IF NOT EXISTS idx_expiry_date_ranges_category_id ON public.expiry_date_ranges(category_id) WHERE category_id IS NOT NULL;
CREATE INDEX IF NOT EXISTS idx_expiry_date_ranges_storage_location ON public.expiry_date_ranges(storage_location);
-- Unique constraint to prevent duplicate entries for same item/location combo
CREATE UNIQUE INDEX IF NOT EXISTS idx_expiry_date_ranges_unique_item_location
ON public.expiry_date_ranges(master_item_id, storage_location)
WHERE master_item_id IS NOT NULL;
CREATE UNIQUE INDEX IF NOT EXISTS idx_expiry_date_ranges_unique_category_location
ON public.expiry_date_ranges(category_id, storage_location)
WHERE category_id IS NOT NULL AND master_item_id IS NULL;
-- 2. Expiry Alerts - user notification preferences for expiry warnings
-- This table stores user preferences for when and how to receive expiry notifications
CREATE TABLE IF NOT EXISTS public.expiry_alerts (
expiry_alert_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
user_id UUID NOT NULL REFERENCES public.users(user_id) ON DELETE CASCADE,
days_before_expiry INTEGER NOT NULL DEFAULT 3,
alert_method TEXT NOT NULL,
is_enabled BOOLEAN DEFAULT TRUE NOT NULL,
last_alert_sent_at TIMESTAMPTZ,
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL,
-- Validate days before expiry is reasonable
CONSTRAINT expiry_alerts_days_before_check CHECK (days_before_expiry >= 0 AND days_before_expiry <= 30),
-- Validate alert method is one of the allowed values
CONSTRAINT expiry_alerts_method_check CHECK (alert_method IN ('email', 'push', 'in_app')),
-- Each user can only have one setting per alert method
UNIQUE(user_id, alert_method)
);
COMMENT ON TABLE public.expiry_alerts IS 'User preferences for expiry date notifications and alerts.';
COMMENT ON COLUMN public.expiry_alerts.days_before_expiry IS 'How many days before expiry to send alert (0-30).';
COMMENT ON COLUMN public.expiry_alerts.alert_method IS 'How to notify: email, push, or in_app.';
COMMENT ON COLUMN public.expiry_alerts.is_enabled IS 'Whether this alert type is currently enabled.';
COMMENT ON COLUMN public.expiry_alerts.last_alert_sent_at IS 'Timestamp of the last alert sent to prevent duplicate notifications.';
-- Indexes for expiry_alerts
CREATE INDEX IF NOT EXISTS idx_expiry_alerts_user_id ON public.expiry_alerts(user_id);
CREATE INDEX IF NOT EXISTS idx_expiry_alerts_enabled ON public.expiry_alerts(user_id, is_enabled) WHERE is_enabled = TRUE;
-- 3. Expiry Alert Log - tracks sent notifications (for auditing and preventing duplicates)
CREATE TABLE IF NOT EXISTS public.expiry_alert_log (
alert_log_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
user_id UUID NOT NULL REFERENCES public.users(user_id) ON DELETE CASCADE,
pantry_item_id BIGINT REFERENCES public.pantry_items(pantry_item_id) ON DELETE SET NULL,
alert_type TEXT NOT NULL,
alert_method TEXT NOT NULL,
item_name TEXT NOT NULL,
expiry_date DATE,
days_until_expiry INTEGER,
sent_at TIMESTAMPTZ DEFAULT now() NOT NULL,
-- Validate alert type
CONSTRAINT expiry_alert_log_type_check CHECK (alert_type IN ('expiring_soon', 'expired', 'expiry_reminder')),
-- Validate alert method
CONSTRAINT expiry_alert_log_method_check CHECK (alert_method IN ('email', 'push', 'in_app')),
-- Validate item_name is not empty
CONSTRAINT expiry_alert_log_item_name_check CHECK (TRIM(item_name) <> '')
);
COMMENT ON TABLE public.expiry_alert_log IS 'Log of all expiry notifications sent to users for auditing and duplicate prevention.';
COMMENT ON COLUMN public.expiry_alert_log.pantry_item_id IS 'The pantry item that triggered the alert (may be null if item deleted).';
COMMENT ON COLUMN public.expiry_alert_log.alert_type IS 'Type of alert: expiring_soon, expired, or expiry_reminder.';
COMMENT ON COLUMN public.expiry_alert_log.alert_method IS 'How the alert was sent: email, push, or in_app.';
COMMENT ON COLUMN public.expiry_alert_log.item_name IS 'Snapshot of item name at time of alert (in case item is deleted).';
COMMENT ON COLUMN public.expiry_alert_log.expiry_date IS 'The expiry date that triggered the alert.';
COMMENT ON COLUMN public.expiry_alert_log.days_until_expiry IS 'Days until expiry at time alert was sent (negative = expired).';
-- Indexes for expiry_alert_log
CREATE INDEX IF NOT EXISTS idx_expiry_alert_log_user_id ON public.expiry_alert_log(user_id);
CREATE INDEX IF NOT EXISTS idx_expiry_alert_log_pantry_item_id ON public.expiry_alert_log(pantry_item_id) WHERE pantry_item_id IS NOT NULL;
CREATE INDEX IF NOT EXISTS idx_expiry_alert_log_sent_at ON public.expiry_alert_log(sent_at DESC);
-- 4. Enhancements to pantry_items table
-- Add columns to better support expiry tracking from receipts and UPC scans
-- Add purchase_date column to track when item was bought
ALTER TABLE public.pantry_items
ADD COLUMN IF NOT EXISTS purchase_date DATE;
COMMENT ON COLUMN public.pantry_items.purchase_date IS 'Date the item was purchased (from receipt or manual entry).';
-- Add source column to track how item was added
ALTER TABLE public.pantry_items
ADD COLUMN IF NOT EXISTS source TEXT DEFAULT 'manual';
-- Note: Cannot add CHECK constraint via ALTER in PostgreSQL, will validate in application
-- Add receipt_item_id to link back to receipt if added from receipt scan
ALTER TABLE public.pantry_items
ADD COLUMN IF NOT EXISTS receipt_item_id BIGINT REFERENCES public.receipt_items(receipt_item_id) ON DELETE SET NULL;
COMMENT ON COLUMN public.pantry_items.receipt_item_id IS 'Link to receipt_items if this pantry item was created from a receipt scan.';
-- Add product_id to link to specific product if known from UPC scan
ALTER TABLE public.pantry_items
ADD COLUMN IF NOT EXISTS product_id BIGINT REFERENCES public.products(product_id) ON DELETE SET NULL;
COMMENT ON COLUMN public.pantry_items.product_id IS 'Link to products if this pantry item was created from a UPC scan.';
-- Add expiry_source to track how expiry date was determined
ALTER TABLE public.pantry_items
ADD COLUMN IF NOT EXISTS expiry_source TEXT;
COMMENT ON COLUMN public.pantry_items.expiry_source IS 'How expiry was determined: manual, calculated, package, receipt.';
-- Add is_consumed column if not exists (check for existing)
ALTER TABLE public.pantry_items
ADD COLUMN IF NOT EXISTS is_consumed BOOLEAN DEFAULT FALSE;
COMMENT ON COLUMN public.pantry_items.is_consumed IS 'Whether the item has been fully consumed.';
-- Add consumed_at timestamp
ALTER TABLE public.pantry_items
ADD COLUMN IF NOT EXISTS consumed_at TIMESTAMPTZ;
COMMENT ON COLUMN public.pantry_items.consumed_at IS 'When the item was marked as consumed.';
-- New indexes for pantry_items expiry queries
CREATE INDEX IF NOT EXISTS idx_pantry_items_best_before_date ON public.pantry_items(best_before_date)
WHERE best_before_date IS NOT NULL AND (is_consumed IS NULL OR is_consumed = FALSE);
CREATE INDEX IF NOT EXISTS idx_pantry_items_expiring_soon ON public.pantry_items(user_id, best_before_date)
WHERE best_before_date IS NOT NULL AND (is_consumed IS NULL OR is_consumed = FALSE);
CREATE INDEX IF NOT EXISTS idx_pantry_items_receipt_item_id ON public.pantry_items(receipt_item_id)
WHERE receipt_item_id IS NOT NULL;
CREATE INDEX IF NOT EXISTS idx_pantry_items_product_id ON public.pantry_items(product_id)
WHERE product_id IS NOT NULL;
-- 5. Add UPC scan support to receipt_items table
-- When receipt items are matched via UPC, store the reference
ALTER TABLE public.receipt_items
ADD COLUMN IF NOT EXISTS upc_code TEXT;
COMMENT ON COLUMN public.receipt_items.upc_code IS 'UPC code if extracted from receipt or matched during processing.';
-- Add constraint for upc_code format (cannot add via ALTER, will validate in app)
CREATE INDEX IF NOT EXISTS idx_receipt_items_upc_code ON public.receipt_items(upc_code)
WHERE upc_code IS NOT NULL;

View File

@@ -0,0 +1,169 @@
-- sql/migrations/003_receipt_scanning_enhancements.sql
-- ============================================================================
-- RECEIPT SCANNING ENHANCEMENTS MIGRATION
-- ============================================================================
-- Purpose:
-- This migration adds enhancements to the existing receipt scanning tables:
-- 1. Enhancements to receipts table for better OCR processing
-- 2. Enhancements to receipt_items for better item matching
-- 3. receipt_processing_log for tracking OCR/AI processing attempts
--
-- Existing tables:
-- - receipts (lines 932-948 in master_schema_rollup.sql)
-- - receipt_items (lines 951-966 in master_schema_rollup.sql)
-- ============================================================================
-- 1. Enhancements to receipts table
-- Add store detection confidence
ALTER TABLE public.receipts
ADD COLUMN IF NOT EXISTS store_confidence NUMERIC(5,4);
COMMENT ON COLUMN public.receipts.store_confidence IS 'Confidence score for store detection (0.0-1.0).';
-- Add OCR provider used
ALTER TABLE public.receipts
ADD COLUMN IF NOT EXISTS ocr_provider TEXT;
COMMENT ON COLUMN public.receipts.ocr_provider IS 'Which OCR service processed this receipt: tesseract, openai, anthropic.';
-- Add error details for failed processing
ALTER TABLE public.receipts
ADD COLUMN IF NOT EXISTS error_details JSONB;
COMMENT ON COLUMN public.receipts.error_details IS 'Detailed error information if processing failed.';
-- Add retry count for failed processing
ALTER TABLE public.receipts
ADD COLUMN IF NOT EXISTS retry_count INTEGER DEFAULT 0;
COMMENT ON COLUMN public.receipts.retry_count IS 'Number of processing retry attempts.';
-- Add extracted text confidence
ALTER TABLE public.receipts
ADD COLUMN IF NOT EXISTS ocr_confidence NUMERIC(5,4);
COMMENT ON COLUMN public.receipts.ocr_confidence IS 'Overall OCR text extraction confidence score.';
-- Add currency detection
ALTER TABLE public.receipts
ADD COLUMN IF NOT EXISTS currency TEXT DEFAULT 'CAD';
COMMENT ON COLUMN public.receipts.currency IS 'Detected currency: CAD, USD, etc.';
-- New indexes for receipt processing
CREATE INDEX IF NOT EXISTS idx_receipts_status_retry ON public.receipts(status, retry_count)
WHERE status IN ('pending', 'failed') AND retry_count < 3;
-- 2. Enhancements to receipt_items table
-- Add line number from receipt for ordering
ALTER TABLE public.receipt_items
ADD COLUMN IF NOT EXISTS line_number INTEGER;
COMMENT ON COLUMN public.receipt_items.line_number IS 'Original line number on the receipt for display ordering.';
-- Add match confidence score
ALTER TABLE public.receipt_items
ADD COLUMN IF NOT EXISTS match_confidence NUMERIC(5,4);
COMMENT ON COLUMN public.receipt_items.match_confidence IS 'Confidence score for item matching (0.0-1.0).';
-- Add is_discount flag for discount/coupon lines
ALTER TABLE public.receipt_items
ADD COLUMN IF NOT EXISTS is_discount BOOLEAN DEFAULT FALSE;
COMMENT ON COLUMN public.receipt_items.is_discount IS 'Whether this line is a discount/coupon (negative price).';
-- Add unit_price if per-unit pricing detected
ALTER TABLE public.receipt_items
ADD COLUMN IF NOT EXISTS unit_price_cents INTEGER;
COMMENT ON COLUMN public.receipt_items.unit_price_cents IS 'Per-unit price if detected (e.g., price per kg).';
-- Add unit type if detected
ALTER TABLE public.receipt_items
ADD COLUMN IF NOT EXISTS unit_type TEXT;
COMMENT ON COLUMN public.receipt_items.unit_type IS 'Unit type if detected: kg, lb, each, etc.';
-- Add added_to_pantry flag
ALTER TABLE public.receipt_items
ADD COLUMN IF NOT EXISTS added_to_pantry BOOLEAN DEFAULT FALSE;
COMMENT ON COLUMN public.receipt_items.added_to_pantry IS 'Whether this item has been added to user pantry.';
-- Add pantry_item_id link
ALTER TABLE public.receipt_items
ADD COLUMN IF NOT EXISTS pantry_item_id BIGINT REFERENCES public.pantry_items(pantry_item_id) ON DELETE SET NULL;
COMMENT ON COLUMN public.receipt_items.pantry_item_id IS 'Link to pantry_items if this receipt item was added to pantry.';
-- New indexes for receipt_items
CREATE INDEX IF NOT EXISTS idx_receipt_items_status ON public.receipt_items(status);
CREATE INDEX IF NOT EXISTS idx_receipt_items_added_to_pantry ON public.receipt_items(receipt_id, added_to_pantry)
WHERE added_to_pantry = FALSE;
CREATE INDEX IF NOT EXISTS idx_receipt_items_pantry_item_id ON public.receipt_items(pantry_item_id)
WHERE pantry_item_id IS NOT NULL;
-- 3. Receipt Processing Log - track OCR/AI processing attempts
-- Useful for debugging, monitoring costs, and improving processing
CREATE TABLE IF NOT EXISTS public.receipt_processing_log (
log_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
receipt_id BIGINT NOT NULL REFERENCES public.receipts(receipt_id) ON DELETE CASCADE,
processing_step TEXT NOT NULL,
status TEXT NOT NULL,
provider TEXT,
duration_ms INTEGER,
tokens_used INTEGER,
cost_cents INTEGER,
input_data JSONB,
output_data JSONB,
error_message TEXT,
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
-- Validate processing step
CONSTRAINT receipt_processing_log_step_check CHECK (processing_step IN (
'upload', 'ocr_extraction', 'text_parsing', 'store_detection',
'item_extraction', 'item_matching', 'price_parsing', 'finalization'
)),
-- Validate status
CONSTRAINT receipt_processing_log_status_check CHECK (status IN ('started', 'completed', 'failed', 'skipped')),
-- Validate provider if specified
CONSTRAINT receipt_processing_log_provider_check CHECK (provider IS NULL OR provider IN (
'tesseract', 'openai', 'anthropic', 'google_vision', 'aws_textract', 'internal'
))
);
COMMENT ON TABLE public.receipt_processing_log IS 'Detailed log of each processing step for receipts, useful for debugging and cost tracking.';
COMMENT ON COLUMN public.receipt_processing_log.processing_step IS 'Which processing step this log entry is for.';
COMMENT ON COLUMN public.receipt_processing_log.status IS 'Status of this step: started, completed, failed, skipped.';
COMMENT ON COLUMN public.receipt_processing_log.provider IS 'External service used: tesseract, openai, anthropic, etc.';
COMMENT ON COLUMN public.receipt_processing_log.duration_ms IS 'How long this step took in milliseconds.';
COMMENT ON COLUMN public.receipt_processing_log.tokens_used IS 'Number of API tokens used (for LLM providers).';
COMMENT ON COLUMN public.receipt_processing_log.cost_cents IS 'Estimated cost in cents for this processing step.';
COMMENT ON COLUMN public.receipt_processing_log.input_data IS 'Input data sent to the processing step (for debugging).';
COMMENT ON COLUMN public.receipt_processing_log.output_data IS 'Output data received from the processing step.';
-- Indexes for receipt_processing_log
CREATE INDEX IF NOT EXISTS idx_receipt_processing_log_receipt_id ON public.receipt_processing_log(receipt_id);
CREATE INDEX IF NOT EXISTS idx_receipt_processing_log_step_status ON public.receipt_processing_log(processing_step, status);
CREATE INDEX IF NOT EXISTS idx_receipt_processing_log_created_at ON public.receipt_processing_log(created_at DESC);
-- 4. Store-specific receipt patterns - help identify stores from receipt text
CREATE TABLE IF NOT EXISTS public.store_receipt_patterns (
pattern_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
store_id BIGINT NOT NULL REFERENCES public.stores(store_id) ON DELETE CASCADE,
pattern_type TEXT NOT NULL,
pattern_value TEXT NOT NULL,
priority INTEGER DEFAULT 0,
is_active BOOLEAN DEFAULT TRUE,
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL,
-- Validate pattern type
CONSTRAINT store_receipt_patterns_type_check CHECK (pattern_type IN (
'header_regex', 'footer_regex', 'phone_number', 'address_fragment', 'store_number_format'
)),
-- Validate pattern is not empty
CONSTRAINT store_receipt_patterns_value_check CHECK (TRIM(pattern_value) <> ''),
-- Unique constraint per store/type/value
UNIQUE(store_id, pattern_type, pattern_value)
);
COMMENT ON TABLE public.store_receipt_patterns IS 'Patterns to help identify stores from receipt text and format.';
COMMENT ON COLUMN public.store_receipt_patterns.pattern_type IS 'Type of pattern: header_regex, footer_regex, phone_number, etc.';
COMMENT ON COLUMN public.store_receipt_patterns.pattern_value IS 'The actual pattern (regex or literal text).';
COMMENT ON COLUMN public.store_receipt_patterns.priority IS 'Higher priority patterns are checked first.';
COMMENT ON COLUMN public.store_receipt_patterns.is_active IS 'Whether this pattern is currently in use.';
-- Indexes for store_receipt_patterns
CREATE INDEX IF NOT EXISTS idx_store_receipt_patterns_store_id ON public.store_receipt_patterns(store_id);
CREATE INDEX IF NOT EXISTS idx_store_receipt_patterns_active ON public.store_receipt_patterns(pattern_type, is_active, priority DESC)
WHERE is_active = TRUE;

View File

@@ -0,0 +1,152 @@
// src/components/ErrorBoundary.tsx
/**
* React Error Boundary with Sentry integration.
* Implements ADR-015: Application Performance Monitoring and Error Tracking.
*
* This component catches JavaScript errors anywhere in the child component tree,
* logs them to Sentry/Bugsink, and displays a fallback UI instead of crashing.
*/
import { Component, ReactNode } from 'react';
import { Sentry, captureException, isSentryConfigured } from '../services/sentry.client';
interface ErrorBoundaryProps {
/** Child components to render */
children: ReactNode;
/** Optional custom fallback UI. If not provided, uses default error message. */
fallback?: ReactNode;
/** Optional callback when an error is caught */
onError?: (error: Error, errorInfo: React.ErrorInfo) => void;
}
interface ErrorBoundaryState {
hasError: boolean;
error: Error | null;
eventId: string | null;
}
/**
* Error Boundary component that catches React component errors
* and reports them to Sentry/Bugsink.
*
* @example
* ```tsx
* <ErrorBoundary fallback={<p>Something went wrong.</p>}>
* <MyComponent />
* </ErrorBoundary>
* ```
*/
export class ErrorBoundary extends Component<ErrorBoundaryProps, ErrorBoundaryState> {
constructor(props: ErrorBoundaryProps) {
super(props);
this.state = {
hasError: false,
error: null,
eventId: null,
};
}
static getDerivedStateFromError(error: Error): Partial<ErrorBoundaryState> {
return { hasError: true, error };
}
componentDidCatch(error: Error, errorInfo: React.ErrorInfo): void {
// Log to console in development
console.error('ErrorBoundary caught an error:', error, errorInfo);
// Report to Sentry with component stack
const eventId = captureException(error, {
componentStack: errorInfo.componentStack,
});
this.setState({ eventId: eventId ?? null });
// Call optional onError callback
this.props.onError?.(error, errorInfo);
}
handleReload = (): void => {
window.location.reload();
};
handleReportFeedback = (): void => {
if (isSentryConfigured && this.state.eventId) {
// Open Sentry feedback dialog if available
Sentry.showReportDialog({ eventId: this.state.eventId });
}
};
render(): ReactNode {
if (this.state.hasError) {
// Custom fallback UI if provided
if (this.props.fallback) {
return this.props.fallback;
}
// Default fallback UI
return (
<div className="flex min-h-screen items-center justify-center bg-gray-50 dark:bg-gray-900 p-4">
<div className="max-w-md w-full bg-white dark:bg-gray-800 rounded-lg shadow-lg p-6 text-center">
<div className="text-red-500 dark:text-red-400 mb-4">
<svg
className="w-16 h-16 mx-auto"
fill="none"
stroke="currentColor"
viewBox="0 0 24 24"
aria-hidden="true"
>
<path
strokeLinecap="round"
strokeLinejoin="round"
strokeWidth={2}
d="M12 9v2m0 4h.01m-6.938 4h13.856c1.54 0 2.502-1.667 1.732-3L13.732 4c-.77-1.333-2.694-1.333-3.464 0L3.34 16c-.77 1.333.192 3 1.732 3z"
/>
</svg>
</div>
<h1 className="text-xl font-semibold text-gray-900 dark:text-white mb-2">
Something went wrong
</h1>
<p className="text-gray-600 dark:text-gray-400 mb-6">
We&apos;re sorry, but an unexpected error occurred. Our team has been notified.
</p>
<div className="flex flex-col sm:flex-row gap-3 justify-center">
<button
onClick={this.handleReload}
className="px-4 py-2 bg-blue-600 text-white rounded-md hover:bg-blue-700 transition-colors"
>
Reload Page
</button>
{isSentryConfigured && this.state.eventId && (
<button
onClick={this.handleReportFeedback}
className="px-4 py-2 bg-gray-200 dark:bg-gray-700 text-gray-800 dark:text-gray-200 rounded-md hover:bg-gray-300 dark:hover:bg-gray-600 transition-colors"
>
Report Feedback
</button>
)}
</div>
{this.state.error && process.env.NODE_ENV === 'development' && (
<details className="mt-6 text-left">
<summary className="cursor-pointer text-sm text-gray-500 dark:text-gray-400">
Error Details (Development Only)
</summary>
<pre className="mt-2 p-3 bg-gray-100 dark:bg-gray-900 rounded text-xs overflow-auto max-h-48 text-red-600 dark:text-red-400">
{this.state.error.message}
{'\n\n'}
{this.state.error.stack}
</pre>
</details>
)}
</div>
</div>
);
}
return this.props.children;
}
}
/**
* Pre-configured Sentry ErrorBoundary from @sentry/react.
* Use this for simpler integration when you don't need custom UI.
*/
export const SentryErrorBoundary = Sentry.ErrorBoundary;

View File

@@ -14,6 +14,16 @@ const config = {
google: {
mapsEmbedApiKey: import.meta.env.VITE_GOOGLE_MAPS_EMBED_API_KEY,
},
/**
* Sentry/Bugsink error tracking configuration (ADR-015).
* Uses VITE_ prefix for client-side environment variables.
*/
sentry: {
dsn: import.meta.env.VITE_SENTRY_DSN,
environment: import.meta.env.VITE_SENTRY_ENVIRONMENT || import.meta.env.MODE,
debug: import.meta.env.VITE_SENTRY_DEBUG === 'true',
enabled: import.meta.env.VITE_SENTRY_ENABLED !== 'false',
},
};
export default config;

View File

@@ -94,6 +94,15 @@ const aiSchema = z.object({
priceQualityThreshold: floatWithDefault(0.5),
});
/**
* UPC API configuration schema.
* External APIs for product lookup by barcode.
*/
const upcSchema = z.object({
upcItemDbApiKey: z.string().optional(), // UPC Item DB API key (upcitemdb.com)
barcodeLookupApiKey: z.string().optional(), // Barcode Lookup API key (barcodelookup.com)
});
/**
* Google services configuration schema.
*/
@@ -126,6 +135,17 @@ const serverSchema = z.object({
storagePath: z.string().default('/var/www/flyer-crawler.projectium.com/flyer-images'),
});
/**
* Error tracking configuration schema (ADR-015).
* Uses Bugsink (Sentry-compatible self-hosted error tracking).
*/
const sentrySchema = z.object({
dsn: z.string().optional(), // Sentry DSN for backend
enabled: booleanString(true),
environment: z.string().optional(),
debug: booleanString(false),
});
/**
* Complete environment configuration schema.
*/
@@ -135,9 +155,11 @@ const envSchema = z.object({
auth: authSchema,
smtp: smtpSchema,
ai: aiSchema,
upc: upcSchema,
google: googleSchema,
worker: workerSchema,
server: serverSchema,
sentry: sentrySchema,
});
export type EnvConfig = z.infer<typeof envSchema>;
@@ -178,6 +200,10 @@ function loadEnvVars(): unknown {
geminiRpm: process.env.GEMINI_RPM,
priceQualityThreshold: process.env.AI_PRICE_QUALITY_THRESHOLD,
},
upc: {
upcItemDbApiKey: process.env.UPC_ITEM_DB_API_KEY,
barcodeLookupApiKey: process.env.BARCODE_LOOKUP_API_KEY,
},
google: {
mapsApiKey: process.env.GOOGLE_MAPS_API_KEY,
clientId: process.env.GOOGLE_CLIENT_ID,
@@ -198,6 +224,12 @@ function loadEnvVars(): unknown {
baseUrl: process.env.BASE_URL,
storagePath: process.env.STORAGE_PATH,
},
sentry: {
dsn: process.env.SENTRY_DSN,
enabled: process.env.SENTRY_ENABLED,
environment: process.env.SENTRY_ENVIRONMENT || process.env.NODE_ENV,
debug: process.env.SENTRY_DEBUG,
},
};
}
@@ -301,3 +333,18 @@ export const isAiConfigured = !!config.ai.geminiApiKey;
* Returns true if Google Maps is configured.
*/
export const isGoogleMapsConfigured = !!config.google.mapsApiKey;
/**
* Returns true if Sentry/Bugsink error tracking is configured and enabled.
*/
export const isSentryConfigured = !!config.sentry.dsn && config.sentry.enabled;
/**
* Returns true if UPC Item DB API is configured.
*/
export const isUpcItemDbConfigured = !!config.upc.upcItemDbApiKey;
/**
* Returns true if Barcode Lookup API is configured.
*/
export const isBarcodeLookupConfigured = !!config.upc.barcodeLookupApiKey;

View File

@@ -1,4 +1,4 @@
// src/routes/passport.routes.test.ts
// src/config/passport.test.ts
import { describe, it, expect, vi, beforeEach, type Mocked } from 'vitest';
import * as bcrypt from 'bcrypt';
import { Request, Response, NextFunction } from 'express';
@@ -101,7 +101,7 @@ vi.mock('passport', () => {
});
// Now, import the passport configuration which will use our mocks
import passport, { isAdmin, optionalAuth, mockAuth } from './passport.routes';
import passport, { isAdmin, optionalAuth, mockAuth } from './passport';
import { logger } from '../services/logger.server';
import { ForbiddenError } from '../services/db/errors.db';

View File

@@ -1,4 +1,4 @@
// src/routes/passport.routes.ts
// src/config/passport.ts
import passport from 'passport';
// All route handlers now use req.log (request-scoped logger) as per ADR-004
import { Strategy as LocalStrategy } from 'passport-local';

View File

@@ -0,0 +1,18 @@
import { WorkerOptions } from 'bullmq';
/**
* Standard worker options for stall detection and recovery.
* Defined in ADR-053.
*
* Note: This is a partial configuration that must be spread into a full
* WorkerOptions object along with a `connection` property when creating workers.
*/
export const defaultWorkerOptions: Omit<WorkerOptions, 'connection'> = {
// Check for stalled jobs every 30 seconds
stalledInterval: 30000,
// Fail job after 3 stalls (prevents infinite loops causing infinite retries)
maxStalledCount: 3,
// Duration of the lock for the job in milliseconds.
// If the worker doesn't renew this (e.g. crash), the job stalls.
lockDuration: 30000,
};

View File

@@ -1,4 +1,12 @@
// src/index.tsx
/**
* IMPORTANT: Sentry initialization MUST happen before any other imports
* to ensure all errors are captured, including those in imported modules.
* See ADR-015: Application Performance Monitoring and Error Tracking.
*/
import { initSentry } from './services/sentry.client';
initSentry();
import React from 'react';
import ReactDOM from 'react-dom/client';
import App from './App';

View File

@@ -83,8 +83,8 @@ describe('Multer Middleware Directory Creation', () => {
await import('./multer.middleware');
// Assert
// It should try to create both the flyer storage and avatar storage paths
expect(mocks.mkdir).toHaveBeenCalledTimes(2);
// It should try to create the flyer, avatar, and receipt storage paths
expect(mocks.mkdir).toHaveBeenCalledTimes(3);
expect(mocks.mkdir).toHaveBeenCalledWith(expect.any(String), { recursive: true });
expect(mocks.logger.info).toHaveBeenCalledWith('Ensured multer storage directories exist.');
expect(mocks.logger.error).not.toHaveBeenCalled();

View File

@@ -11,12 +11,17 @@ import { logger } from '../services/logger.server';
export const flyerStoragePath =
process.env.STORAGE_PATH || '/var/www/flyer-crawler.projectium.com/flyer-images';
export const avatarStoragePath = path.join(process.cwd(), 'public', 'uploads', 'avatars');
export const receiptStoragePath = path.join(
process.env.STORAGE_PATH || '/var/www/flyer-crawler.projectium.com',
'receipts',
);
// Ensure directories exist at startup
(async () => {
try {
await fs.mkdir(flyerStoragePath, { recursive: true });
await fs.mkdir(avatarStoragePath, { recursive: true });
await fs.mkdir(receiptStoragePath, { recursive: true });
logger.info('Ensured multer storage directories exist.');
} catch (error) {
const err = error instanceof Error ? error : new Error(String(error));
@@ -24,7 +29,7 @@ export const avatarStoragePath = path.join(process.cwd(), 'public', 'uploads', '
}
})();
type StorageType = 'flyer' | 'avatar';
type StorageType = 'flyer' | 'avatar' | 'receipt';
const getStorageConfig = (type: StorageType) => {
switch (type) {
@@ -47,6 +52,17 @@ const getStorageConfig = (type: StorageType) => {
cb(null, uniqueSuffix);
},
});
case 'receipt':
return multer.diskStorage({
destination: (req, file, cb) => cb(null, receiptStoragePath),
filename: (req, file, cb) => {
const user = req.user as UserProfile | undefined;
const userId = user?.user.user_id || 'anonymous';
const uniqueSuffix = `${Date.now()}-${Math.round(Math.random() * 1e9)}`;
const sanitizedOriginalName = sanitizeFilename(file.originalname);
cb(null, `receipt-${userId}-${uniqueSuffix}-${sanitizedOriginalName}`);
},
});
case 'flyer':
default:
return multer.diskStorage({

View File

@@ -23,14 +23,21 @@ export const validateRequest =
});
// On success, merge the parsed (and coerced) data back into the request objects.
// We don't reassign `req.params`, `req.query`, or `req.body` directly, as they
// might be read-only getters in some environments (like during supertest tests).
// Instead, we clear the existing object and merge the new properties.
// For req.params, we can delete existing keys and assign new ones.
Object.keys(req.params).forEach((key) => delete (req.params as ParamsDictionary)[key]);
Object.assign(req.params, params);
Object.keys(req.query).forEach((key) => delete (req.query as Query)[key]);
Object.assign(req.query, query);
// For req.query in Express 5, the query object is lazily evaluated from the URL
// and cannot be mutated directly. We use Object.defineProperty to replace
// the getter with our validated/transformed query object.
Object.defineProperty(req, 'query', {
value: query as Query,
writable: true,
configurable: true,
enumerable: true,
});
// For body, direct reassignment works.
req.body = body;
return next();

View File

@@ -8,6 +8,7 @@ import { FlyersProvider } from './FlyersProvider';
import { MasterItemsProvider } from './MasterItemsProvider';
import { ModalProvider } from './ModalProvider';
import { UserDataProvider } from './UserDataProvider';
import { ErrorBoundary } from '../components/ErrorBoundary';
interface AppProvidersProps {
children: ReactNode;
@@ -18,6 +19,7 @@ interface AppProvidersProps {
* This cleans up index.tsx and makes the provider hierarchy clear.
*
* Provider hierarchy (from outermost to innermost):
* 0. ErrorBoundary - Catches React errors and reports to Sentry (ADR-015)
* 1. QueryClientProvider - TanStack Query for server state management (ADR-0005)
* 2. ModalProvider - Modal state management
* 3. AuthProvider - Authentication state
@@ -27,18 +29,20 @@ interface AppProvidersProps {
*/
export const AppProviders: React.FC<AppProvidersProps> = ({ children }) => {
return (
<QueryClientProvider client={queryClient}>
<ModalProvider>
<AuthProvider>
<FlyersProvider>
<MasterItemsProvider>
<UserDataProvider>{children}</UserDataProvider>
</MasterItemsProvider>
</FlyersProvider>
</AuthProvider>
</ModalProvider>
{/* React Query Devtools - only visible in development */}
{import.meta.env.DEV && <ReactQueryDevtools initialIsOpen={false} />}
</QueryClientProvider>
<ErrorBoundary>
<QueryClientProvider client={queryClient}>
<ModalProvider>
<AuthProvider>
<FlyersProvider>
<MasterItemsProvider>
<UserDataProvider>{children}</UserDataProvider>
</MasterItemsProvider>
</FlyersProvider>
</AuthProvider>
</ModalProvider>
{/* React Query Devtools - only visible in development */}
{import.meta.env.DEV && <ReactQueryDevtools initialIsOpen={false} />}
</QueryClientProvider>
</ErrorBoundary>
);
};

View File

@@ -32,7 +32,7 @@ vi.mock('../lib/queue', () => ({
cleanupQueue: {},
}));
const { mockedDb } = vi.hoisted(() => {
const { mockedDb, mockedBrandService } = vi.hoisted(() => {
return {
mockedDb: {
adminRepo: {
@@ -59,6 +59,9 @@ const { mockedDb } = vi.hoisted(() => {
deleteUserById: vi.fn(),
},
},
mockedBrandService: {
updateBrandLogo: vi.fn(),
},
};
});
@@ -89,6 +92,26 @@ vi.mock('node:fs/promises', () => ({
vi.mock('../services/backgroundJobService');
vi.mock('../services/geocodingService.server');
vi.mock('../services/queueService.server');
vi.mock('../services/queues.server');
vi.mock('../services/workers.server');
vi.mock('../services/monitoringService.server');
vi.mock('../services/cacheService.server');
vi.mock('../services/userService');
vi.mock('../services/brandService', () => ({
brandService: mockedBrandService,
}));
vi.mock('../services/receiptService.server');
vi.mock('../services/aiService.server');
vi.mock('../config/env', () => ({
config: {
database: { host: 'localhost', port: 5432, user: 'test', password: 'test', name: 'test' },
redis: { url: 'redis://localhost:6379' },
auth: { jwtSecret: 'test-secret' },
server: { port: 3000, host: 'localhost' },
},
isAiConfigured: vi.fn().mockReturnValue(false),
parseConfig: vi.fn(),
}));
vi.mock('@bull-board/api'); // Keep this mock for the API part
vi.mock('@bull-board/api/bullMQAdapter'); // Keep this mock for the adapter
@@ -103,13 +126,17 @@ vi.mock('@bull-board/express', () => ({
}));
// Mock the logger
vi.mock('../services/logger.server', async () => ({
// Use async import to avoid hoisting issues with mockLogger
logger: (await import('../tests/utils/mockLogger')).mockLogger,
}));
vi.mock('../services/logger.server', async () => {
const { mockLogger, createMockLogger } = await import('../tests/utils/mockLogger');
return {
logger: mockLogger,
createScopedLogger: vi.fn(() => createMockLogger()),
};
});
// Mock the passport middleware
vi.mock('./passport.routes', () => ({
// Note: admin.routes.ts imports from '../config/passport', so we mock that path
vi.mock('../config/passport', () => ({
default: {
authenticate: vi.fn(() => (req: Request, res: Response, next: NextFunction) => {
if (!req.user) return res.status(401).json({ message: 'Unauthorized' });
@@ -314,22 +341,23 @@ describe('Admin Content Management Routes (/api/admin)', () => {
it('POST /brands/:id/logo should upload a logo and update the brand', async () => {
const brandId = 55;
vi.mocked(mockedDb.adminRepo.updateBrandLogo).mockResolvedValue(undefined);
const mockLogoUrl = '/flyer-images/brand-logos/test-logo.png';
vi.mocked(mockedBrandService.updateBrandLogo).mockResolvedValue(mockLogoUrl);
const response = await supertest(app)
.post(`/api/admin/brands/${brandId}/logo`)
.attach('logoImage', Buffer.from('dummy-logo-content'), 'test-logo.png');
expect(response.status).toBe(200);
expect(response.body.data.message).toBe('Brand logo updated successfully.');
expect(vi.mocked(mockedDb.adminRepo.updateBrandLogo)).toHaveBeenCalledWith(
expect(vi.mocked(mockedBrandService.updateBrandLogo)).toHaveBeenCalledWith(
brandId,
expect.stringContaining('/flyer-images/'),
expect.objectContaining({ fieldname: 'logoImage' }),
expect.anything(),
);
});
it('POST /brands/:id/logo should return 500 on DB error', async () => {
const brandId = 55;
vi.mocked(mockedDb.adminRepo.updateBrandLogo).mockRejectedValue(new Error('DB Error'));
vi.mocked(mockedBrandService.updateBrandLogo).mockRejectedValue(new Error('DB Error'));
const response = await supertest(app)
.post(`/api/admin/brands/${brandId}/logo`)
.attach('logoImage', Buffer.from('dummy-logo-content'), 'test-logo.png');
@@ -347,7 +375,7 @@ describe('Admin Content Management Routes (/api/admin)', () => {
it('should clean up the uploaded file if updating the brand logo fails', async () => {
const brandId = 55;
const dbError = new Error('DB Connection Failed');
vi.mocked(mockedDb.adminRepo.updateBrandLogo).mockRejectedValue(dbError);
vi.mocked(mockedBrandService.updateBrandLogo).mockRejectedValue(dbError);
const response = await supertest(app)
.post(`/api/admin/brands/${brandId}/logo`)

View File

@@ -29,6 +29,17 @@ vi.mock('../services/queueService.server', () => ({
cleanupWorker: {},
weeklyAnalyticsWorker: {},
}));
// Mock the monitoring service - the routes use this service for job operations
vi.mock('../services/monitoringService.server', () => ({
monitoringService: {
getWorkerStatuses: vi.fn(),
getQueueStatuses: vi.fn(),
retryFailedJob: vi.fn(),
getJobStatus: vi.fn(),
},
}));
vi.mock('../services/db/index.db', () => ({
adminRepo: {},
flyerRepo: {},
@@ -59,21 +70,22 @@ import adminRouter from './admin.routes';
// Import the mocked modules to control them
import { backgroundJobService } from '../services/backgroundJobService'; // This is now a mock
import {
flyerQueue,
analyticsQueue,
cleanupQueue,
weeklyAnalyticsQueue,
} from '../services/queueService.server';
import { analyticsQueue, cleanupQueue } from '../services/queueService.server';
import { monitoringService } from '../services/monitoringService.server'; // This is now a mock
import { NotFoundError, ValidationError } from '../services/db/errors.db';
// Mock the logger
vi.mock('../services/logger.server', async () => ({
// Use async import to avoid hoisting issues with mockLogger
logger: (await import('../tests/utils/mockLogger')).mockLogger,
}));
vi.mock('../services/logger.server', async () => {
const { mockLogger, createMockLogger } = await import('../tests/utils/mockLogger');
return {
logger: mockLogger,
createScopedLogger: vi.fn(() => createMockLogger()),
};
});
// Mock the passport middleware
vi.mock('./passport.routes', () => ({
// Note: admin.routes.ts imports from '../config/passport', so we mock that path
vi.mock('../config/passport', () => ({
default: {
authenticate: vi.fn(() => (req: Request, res: Response, next: NextFunction) => {
if (!req.user) return res.status(401).json({ message: 'Unauthorized' });
@@ -221,13 +233,8 @@ describe('Admin Job Trigger Routes (/api/admin/trigger)', () => {
const jobId = 'failed-job-1';
it('should successfully retry a failed job', async () => {
// Arrange
const mockJob = {
id: jobId,
getState: vi.fn().mockResolvedValue('failed'),
retry: vi.fn().mockResolvedValue(undefined),
};
vi.mocked(flyerQueue.getJob).mockResolvedValue(mockJob as unknown as Job);
// Arrange - mock the monitoring service to resolve successfully
vi.mocked(monitoringService.retryFailedJob).mockResolvedValue(undefined);
// Act
const response = await supertest(app).post(`/api/admin/jobs/${queueName}/${jobId}/retry`);
@@ -237,7 +244,11 @@ describe('Admin Job Trigger Routes (/api/admin/trigger)', () => {
expect(response.body.data.message).toBe(
`Job ${jobId} has been successfully marked for retry.`,
);
expect(mockJob.retry).toHaveBeenCalledTimes(1);
expect(monitoringService.retryFailedJob).toHaveBeenCalledWith(
queueName,
jobId,
'admin-user-id',
);
});
it('should return 400 if the queue name is invalid', async () => {
@@ -250,8 +261,10 @@ describe('Admin Job Trigger Routes (/api/admin/trigger)', () => {
const queueName = 'weekly-analytics-reporting';
const jobId = 'some-job-id';
// Ensure getJob returns undefined (not found)
vi.mocked(weeklyAnalyticsQueue.getJob).mockResolvedValue(undefined);
// Mock monitoringService.retryFailedJob to throw NotFoundError
vi.mocked(monitoringService.retryFailedJob).mockRejectedValue(
new NotFoundError(`Job with ID '${jobId}' not found in queue '${queueName}'.`),
);
const response = await supertest(app).post(`/api/admin/jobs/${queueName}/${jobId}/retry`);
@@ -262,7 +275,10 @@ describe('Admin Job Trigger Routes (/api/admin/trigger)', () => {
});
it('should return 404 if the job ID is not found in the queue', async () => {
vi.mocked(flyerQueue.getJob).mockResolvedValue(undefined);
// Mock monitoringService.retryFailedJob to throw NotFoundError
vi.mocked(monitoringService.retryFailedJob).mockRejectedValue(
new NotFoundError("Job with ID 'not-found-job' not found in queue 'flyer-processing'."),
);
const response = await supertest(app).post(
`/api/admin/jobs/${queueName}/not-found-job/retry`,
);
@@ -271,12 +287,10 @@ describe('Admin Job Trigger Routes (/api/admin/trigger)', () => {
});
it('should return 400 if the job is not in a failed state', async () => {
const mockJob = {
id: jobId,
getState: vi.fn().mockResolvedValue('completed'),
retry: vi.fn(),
};
vi.mocked(flyerQueue.getJob).mockResolvedValue(mockJob as unknown as Job);
// Mock monitoringService.retryFailedJob to throw ValidationError
vi.mocked(monitoringService.retryFailedJob).mockRejectedValue(
new ValidationError([], "Job is not in a 'failed' state. Current state: completed."),
);
const response = await supertest(app).post(`/api/admin/jobs/${queueName}/${jobId}/retry`);
@@ -284,16 +298,11 @@ describe('Admin Job Trigger Routes (/api/admin/trigger)', () => {
expect(response.body.error.message).toBe(
"Job is not in a 'failed' state. Current state: completed.",
); // This is now handled by the errorHandler
expect(mockJob.retry).not.toHaveBeenCalled();
});
it('should return 500 if job.retry() throws an error', async () => {
const mockJob = {
id: jobId,
getState: vi.fn().mockResolvedValue('failed'),
retry: vi.fn().mockRejectedValue(new Error('Cannot retry job')),
};
vi.mocked(flyerQueue.getJob).mockResolvedValue(mockJob as unknown as Job);
// Mock monitoringService.retryFailedJob to throw a generic error
vi.mocked(monitoringService.retryFailedJob).mockRejectedValue(new Error('Cannot retry job'));
const response = await supertest(app).post(`/api/admin/jobs/${queueName}/${jobId}/retry`);

View File

@@ -92,10 +92,12 @@ import { adminRepo } from '../services/db/index.db';
// Mock the logger
vi.mock('../services/logger.server', () => ({
logger: mockLogger,
createScopedLogger: vi.fn(() => mockLogger),
}));
// Mock the passport middleware
vi.mock('./passport.routes', () => ({
// Note: admin.routes.ts imports from '../config/passport', so we mock that path
vi.mock('../config/passport', () => ({
default: {
authenticate: vi.fn(() => (req: Request, res: Response, next: NextFunction) => {
if (!req.user) return res.status(401).json({ message: 'Unauthorized' });

View File

@@ -41,9 +41,13 @@ vi.mock('../services/cacheService.server', () => ({
},
}));
vi.mock('../services/logger.server', async () => ({
logger: (await import('../tests/utils/mockLogger')).mockLogger,
}));
vi.mock('../services/logger.server', async () => {
const { mockLogger, createMockLogger } = await import('../tests/utils/mockLogger');
return {
logger: mockLogger,
createScopedLogger: vi.fn(() => createMockLogger()),
};
});
vi.mock('@bull-board/api');
vi.mock('@bull-board/api/bullMQAdapter');
@@ -57,9 +61,27 @@ vi.mock('@bull-board/express', () => ({
}));
vi.mock('node:fs/promises');
vi.mock('../services/queues.server');
vi.mock('../services/workers.server');
vi.mock('../services/monitoringService.server');
vi.mock('../services/userService');
vi.mock('../services/brandService');
vi.mock('../services/receiptService.server');
vi.mock('../services/aiService.server');
vi.mock('../config/env', () => ({
config: {
database: { host: 'localhost', port: 5432, user: 'test', password: 'test', name: 'test' },
redis: { url: 'redis://localhost:6379' },
auth: { jwtSecret: 'test-secret' },
server: { port: 3000, host: 'localhost' },
},
isAiConfigured: vi.fn().mockReturnValue(false),
parseConfig: vi.fn(),
}));
// Mock Passport to allow admin access
vi.mock('./passport.routes', () => ({
// Note: admin.routes.ts imports from '../config/passport', so we mock that path
vi.mock('../config/passport', () => ({
default: {
authenticate: vi.fn(() => (req: any, res: any, next: any) => {
req.user = createMockUserProfile({ role: 'admin' });

View File

@@ -1,7 +1,6 @@
// src/routes/admin.routes.ts
import { Router, NextFunction, Request, Response } from 'express';
import passport from './passport.routes';
import { isAdmin } from './passport.routes'; // Correctly imported
import passport, { isAdmin } from '../config/passport';
import { z } from 'zod';
import * as db from '../services/db/index.db';

View File

@@ -26,6 +26,24 @@ vi.mock('node:fs/promises');
vi.mock('../services/backgroundJobService');
vi.mock('../services/geocodingService.server');
vi.mock('../services/queueService.server');
vi.mock('../services/queues.server');
vi.mock('../services/workers.server');
vi.mock('../services/monitoringService.server');
vi.mock('../services/cacheService.server');
vi.mock('../services/userService');
vi.mock('../services/brandService');
vi.mock('../services/receiptService.server');
vi.mock('../services/aiService.server');
vi.mock('../config/env', () => ({
config: {
database: { host: 'localhost', port: 5432, user: 'test', password: 'test', name: 'test' },
redis: { url: 'redis://localhost:6379' },
auth: { jwtSecret: 'test-secret' },
server: { port: 3000, host: 'localhost' },
},
isAiConfigured: vi.fn().mockReturnValue(false),
parseConfig: vi.fn(),
}));
vi.mock('@bull-board/api');
vi.mock('@bull-board/api/bullMQAdapter');
vi.mock('@bull-board/express', () => ({
@@ -44,13 +62,17 @@ import adminRouter from './admin.routes';
import { adminRepo } from '../services/db/index.db';
// Mock the logger
vi.mock('../services/logger.server', async () => ({
// Use async import to avoid hoisting issues with mockLogger
logger: (await import('../tests/utils/mockLogger')).mockLogger,
}));
vi.mock('../services/logger.server', async () => {
const { mockLogger, createMockLogger } = await import('../tests/utils/mockLogger');
return {
logger: mockLogger,
createScopedLogger: vi.fn(() => createMockLogger()),
};
});
// Mock the passport middleware
vi.mock('./passport.routes', () => ({
// Note: admin.routes.ts imports from '../config/passport', so we mock that path
vi.mock('../config/passport', () => ({
default: {
authenticate: vi.fn(() => (req: Request, res: Response, next: NextFunction) => {
if (!req.user) return res.status(401).json({ message: 'Unauthorized' });

View File

@@ -31,6 +31,24 @@ vi.mock('../services/backgroundJobService', () => ({
},
}));
vi.mock('../services/queueService.server');
vi.mock('../services/queues.server');
vi.mock('../services/workers.server');
vi.mock('../services/monitoringService.server');
vi.mock('../services/cacheService.server');
vi.mock('../services/userService');
vi.mock('../services/brandService');
vi.mock('../services/receiptService.server');
vi.mock('../services/aiService.server');
vi.mock('../config/env', () => ({
config: {
database: { host: 'localhost', port: 5432, user: 'test', password: 'test', name: 'test' },
redis: { url: 'redis://localhost:6379' },
auth: { jwtSecret: 'test-secret' },
server: { port: 3000, host: 'localhost' },
},
isAiConfigured: vi.fn().mockReturnValue(false),
parseConfig: vi.fn(),
}));
vi.mock('@bull-board/api');
vi.mock('@bull-board/api/bullMQAdapter');
vi.mock('@bull-board/express', () => ({
@@ -49,13 +67,17 @@ import adminRouter from './admin.routes';
import { geocodingService } from '../services/geocodingService.server';
// Mock the logger
vi.mock('../services/logger.server', async () => ({
// Use async import to avoid hoisting issues with mockLogger
logger: (await import('../tests/utils/mockLogger')).mockLogger,
}));
vi.mock('../services/logger.server', async () => {
const { mockLogger, createMockLogger } = await import('../tests/utils/mockLogger');
return {
logger: mockLogger,
createScopedLogger: vi.fn(() => createMockLogger()),
};
});
// Mock the passport middleware
vi.mock('./passport.routes', () => ({
// Note: admin.routes.ts imports from '../config/passport', so we mock that path
vi.mock('../config/passport', () => ({
default: {
authenticate: vi.fn(() => (req: Request, res: Response, next: NextFunction) => {
req.user = createMockUserProfile({

View File

@@ -34,6 +34,23 @@ vi.mock('../services/db/recipe.db');
vi.mock('../services/backgroundJobService');
vi.mock('../services/geocodingService.server');
vi.mock('../services/queueService.server');
vi.mock('../services/queues.server');
vi.mock('../services/workers.server');
vi.mock('../services/monitoringService.server');
vi.mock('../services/cacheService.server');
vi.mock('../services/brandService');
vi.mock('../services/receiptService.server');
vi.mock('../services/aiService.server');
vi.mock('../config/env', () => ({
config: {
database: { host: 'localhost', port: 5432, user: 'test', password: 'test', name: 'test' },
redis: { url: 'redis://localhost:6379' },
auth: { jwtSecret: 'test-secret' },
server: { port: 3000, host: 'localhost' },
},
isAiConfigured: vi.fn().mockReturnValue(false),
parseConfig: vi.fn(),
}));
vi.mock('@bull-board/api');
vi.mock('@bull-board/api/bullMQAdapter');
vi.mock('node:fs/promises');
@@ -49,10 +66,13 @@ vi.mock('@bull-board/express', () => ({
}));
// Mock the logger
vi.mock('../services/logger.server', async () => ({
// Use async import to avoid hoisting issues with mockLogger
logger: (await import('../tests/utils/mockLogger')).mockLogger,
}));
vi.mock('../services/logger.server', async () => {
const { mockLogger, createMockLogger } = await import('../tests/utils/mockLogger');
return {
logger: mockLogger,
createScopedLogger: vi.fn(() => createMockLogger()),
};
});
// Import the router AFTER all mocks are defined.
import adminRouter from './admin.routes';
@@ -62,7 +82,8 @@ import { adminRepo, userRepo } from '../services/db/index.db';
import { userService } from '../services/userService';
// Mock the passport middleware
vi.mock('./passport.routes', () => ({
// Note: admin.routes.ts imports from '../config/passport', so we mock that path
vi.mock('../config/passport', () => ({
default: {
authenticate: vi.fn(() => (req: Request, res: Response, next: NextFunction) => {
if (!req.user) return res.status(401).json({ message: 'Unauthorized' });

View File

@@ -61,18 +61,43 @@ vi.mock('../services/queueService.server', () => ({
},
}));
// Import the router AFTER all mocks are defined.
import aiRouter from './ai.routes';
import { flyerQueue } from '../services/queueService.server';
// Mock the logger to keep test output clean
vi.mock('../services/logger.server', async () => ({
// Use async import to avoid hoisting issues with mockLogger
logger: (await import('../tests/utils/mockLogger')).mockLogger,
// Mock the monitoring service
const { mockedMonitoringService } = vi.hoisted(() => ({
mockedMonitoringService: {
getFlyerJobStatus: vi.fn(),
},
}));
vi.mock('../services/monitoringService.server', () => ({
monitoringService: mockedMonitoringService,
}));
// Mock env config to prevent parsing errors
vi.mock('../config/env', () => ({
config: {
database: { host: 'localhost', port: 5432, user: 'test', password: 'test', name: 'test' },
redis: { url: 'redis://localhost:6379' },
auth: { jwtSecret: 'test-secret' },
server: { port: 3000, host: 'localhost' },
ai: { enabled: true },
},
isAiConfigured: vi.fn().mockReturnValue(true),
parseConfig: vi.fn(),
}));
// Import the router AFTER all mocks are defined.
import aiRouter from './ai.routes';
// Mock the logger to keep test output clean
vi.mock('../services/logger.server', async () => {
const { mockLogger, createMockLogger } = await import('../tests/utils/mockLogger');
return {
logger: mockLogger,
createScopedLogger: vi.fn(() => createMockLogger()),
};
});
// Mock the passport module to control authentication for different tests.
vi.mock('./passport.routes', () => ({
vi.mock('../config/passport', () => ({
default: {
// Mock passport.authenticate to simply call next(), allowing the request to proceed.
// The actual user object will be injected by the mockAuth middleware or test setup.
@@ -84,13 +109,19 @@ vi.mock('./passport.routes', () => ({
}));
describe('AI Routes (/api/ai)', () => {
beforeEach(() => {
beforeEach(async () => {
vi.clearAllMocks();
// Reset logger implementation to no-op to prevent "Logging failed" leaks from previous tests
vi.mocked(mockLogger.info).mockImplementation(() => {});
vi.mocked(mockLogger.error).mockImplementation(() => {});
vi.mocked(mockLogger.warn).mockImplementation(() => {});
vi.mocked(mockLogger.debug).mockImplementation(() => {}); // Ensure debug is also mocked
// Default mock for monitoring service - returns NotFoundError for unknown jobs
const { NotFoundError } = await import('../services/db/errors.db');
vi.mocked(mockedMonitoringService.getFlyerJobStatus).mockRejectedValue(
new NotFoundError('Job not found.'),
);
});
const app = createTestApp({ router: aiRouter, basePath: '/api/ai' });
@@ -301,8 +332,11 @@ describe('AI Routes (/api/ai)', () => {
describe('GET /jobs/:jobId/status', () => {
it('should return 404 if job is not found', async () => {
// Mock the queue to return null for the job
vi.mocked(flyerQueue.getJob).mockResolvedValue(undefined);
// Mock the monitoring service to throw NotFoundError
const { NotFoundError } = await import('../services/db/errors.db');
vi.mocked(mockedMonitoringService.getFlyerJobStatus).mockRejectedValue(
new NotFoundError('Job not found.'),
);
const response = await supertest(app).get('/api/ai/jobs/non-existent-job/status');
@@ -311,13 +345,13 @@ describe('AI Routes (/api/ai)', () => {
});
it('should return job status if job is found', async () => {
const mockJob = {
const mockJobStatus = {
id: 'job-123',
getState: async () => 'completed',
state: 'completed',
progress: 100,
returnvalue: { flyerId: 1 },
result: { flyerId: 1 },
};
vi.mocked(flyerQueue.getJob).mockResolvedValue(mockJob as unknown as Job);
vi.mocked(mockedMonitoringService.getFlyerJobStatus).mockResolvedValue(mockJobStatus);
const response = await supertest(app).get('/api/ai/jobs/job-123/status');

View File

@@ -3,9 +3,7 @@
import { Router, Request, Response, NextFunction } from 'express';
// All route handlers now use req.log (request-scoped logger) as per ADR-004
import { z } from 'zod';
import passport from './passport.routes';
// All route handlers now use req.log (request-scoped logger) as per ADR-004
import { optionalAuth } from './passport.routes';
import passport, { optionalAuth } from '../config/passport';
// All route handlers now use req.log (request-scoped logger) as per ADR-004
import { aiService, DuplicateFlyerError } from '../services/aiService.server';
// All route handlers now use req.log (request-scoped logger) as per ADR-004
@@ -179,8 +177,41 @@ router.use((req: Request, res: Response, next: NextFunction) => {
});
/**
* NEW ENDPOINT: Accepts a single flyer file (PDF or image), enqueues it for
* background processing, and immediately returns a job ID.
* @openapi
* /ai/upload-and-process:
* post:
* tags: [AI]
* summary: Upload and process flyer
* description: Accepts a single flyer file (PDF or image), enqueues it for background processing, and immediately returns a job ID.
* requestBody:
* required: true
* content:
* multipart/form-data:
* schema:
* type: object
* required:
* - flyerFile
* - checksum
* properties:
* flyerFile:
* type: string
* format: binary
* description: Flyer file (PDF or image)
* checksum:
* type: string
* pattern: ^[a-f0-9]{64}$
* description: SHA-256 checksum of the file
* baseUrl:
* type: string
* format: uri
* description: Optional base URL
* responses:
* 202:
* description: Flyer accepted for processing
* 400:
* description: Missing file or invalid checksum
* 409:
* description: Duplicate flyer detected
*/
router.post(
'/upload-and-process',
@@ -245,12 +276,37 @@ router.post(
);
/**
* POST /api/ai/upload-legacy - Process a flyer upload from a legacy client.
* This is an authenticated route that processes the flyer synchronously.
* This is used for integration testing the legacy upload flow.
*
* @deprecated Use POST /api/ai/upload-and-process instead for async queue-based processing (ADR-0006).
* This synchronous endpoint is retained only for integration testing purposes.
* @openapi
* /ai/upload-legacy:
* post:
* tags: [AI]
* summary: Legacy flyer upload (deprecated)
* description: Process a flyer upload synchronously. Deprecated - use /upload-and-process instead.
* deprecated: true
* security:
* - bearerAuth: []
* requestBody:
* required: true
* content:
* multipart/form-data:
* schema:
* type: object
* required:
* - flyerFile
* properties:
* flyerFile:
* type: string
* format: binary
* description: Flyer file (PDF or image)
* responses:
* 200:
* description: Flyer processed successfully
* 400:
* description: No flyer file uploaded
* 401:
* description: Unauthorized
* 409:
* description: Duplicate flyer detected
*/
router.post(
'/upload-legacy',
@@ -282,7 +338,24 @@ router.post(
);
/**
* NEW ENDPOINT: Checks the status of a background job.
* @openapi
* /ai/jobs/{jobId}/status:
* get:
* tags: [AI]
* summary: Check job status
* description: Checks the status of a background flyer processing job.
* parameters:
* - in: path
* name: jobId
* required: true
* schema:
* type: string
* description: Job ID returned from upload-and-process
* responses:
* 200:
* description: Job status information
* 404:
* description: Job not found
*/
router.get(
'/jobs/:jobId/status',
@@ -304,12 +377,33 @@ router.get(
);
/**
* POST /api/ai/flyers/process - Saves the processed flyer data to the database.
* This is the final step in the flyer upload workflow after the AI has extracted the data.
* It uses `optionalAuth` to handle submissions from both anonymous and authenticated users.
*
* @deprecated Use POST /api/ai/upload-and-process instead for async queue-based processing (ADR-0006).
* This synchronous endpoint processes flyers inline and should be migrated to the queue-based approach.
* @openapi
* /ai/flyers/process:
* post:
* tags: [AI]
* summary: Process flyer data (deprecated)
* description: Saves processed flyer data to the database. Deprecated - use /upload-and-process instead.
* deprecated: true
* requestBody:
* required: true
* content:
* multipart/form-data:
* schema:
* type: object
* required:
* - flyerImage
* properties:
* flyerImage:
* type: string
* format: binary
* description: Flyer image file
* responses:
* 201:
* description: Flyer processed and saved successfully
* 400:
* description: Flyer image file is required
* 409:
* description: Duplicate flyer detected
*/
router.post(
'/flyers/process',
@@ -348,8 +442,30 @@ router.post(
);
/**
* This endpoint checks if an image is a flyer. It uses `optionalAuth` to allow
* both authenticated and anonymous users to perform this check.
* @openapi
* /ai/check-flyer:
* post:
* tags: [AI]
* summary: Check if image is a flyer
* description: Analyzes an image to determine if it's a grocery store flyer.
* requestBody:
* required: true
* content:
* multipart/form-data:
* schema:
* type: object
* required:
* - image
* properties:
* image:
* type: string
* format: binary
* description: Image file to check
* responses:
* 200:
* description: Flyer check result
* 400:
* description: Image file is required
*/
router.post(
'/check-flyer',
@@ -371,6 +487,32 @@ router.post(
},
);
/**
* @openapi
* /ai/extract-address:
* post:
* tags: [AI]
* summary: Extract address from image
* description: Extracts store address information from a flyer image.
* requestBody:
* required: true
* content:
* multipart/form-data:
* schema:
* type: object
* required:
* - image
* properties:
* image:
* type: string
* format: binary
* description: Image file to extract address from
* responses:
* 200:
* description: Extracted address information
* 400:
* description: Image file is required
*/
router.post(
'/extract-address',
aiUploadLimiter,
@@ -391,6 +533,34 @@ router.post(
},
);
/**
* @openapi
* /ai/extract-logo:
* post:
* tags: [AI]
* summary: Extract store logo
* description: Extracts store logo from flyer images.
* requestBody:
* required: true
* content:
* multipart/form-data:
* schema:
* type: object
* required:
* - images
* properties:
* images:
* type: array
* items:
* type: string
* format: binary
* description: Image files to extract logo from
* responses:
* 200:
* description: Extracted logo as base64
* 400:
* description: Image files are required
*/
router.post(
'/extract-logo',
aiUploadLimiter,
@@ -411,6 +581,36 @@ router.post(
},
);
/**
* @openapi
* /ai/quick-insights:
* post:
* tags: [AI]
* summary: Get quick insights
* description: Get AI-generated quick insights about flyer items.
* security:
* - bearerAuth: []
* requestBody:
* required: true
* content:
* application/json:
* schema:
* type: object
* required:
* - items
* properties:
* items:
* type: array
* items:
* type: object
* minItems: 1
* description: List of flyer items to analyze
* responses:
* 200:
* description: AI-generated quick insights
* 401:
* description: Unauthorized
*/
router.post(
'/quick-insights',
aiGenerationLimiter,
@@ -426,6 +626,36 @@ router.post(
},
);
/**
* @openapi
* /ai/deep-dive:
* post:
* tags: [AI]
* summary: Get deep dive analysis
* description: Get detailed AI-generated analysis about flyer items.
* security:
* - bearerAuth: []
* requestBody:
* required: true
* content:
* application/json:
* schema:
* type: object
* required:
* - items
* properties:
* items:
* type: array
* items:
* type: object
* minItems: 1
* description: List of flyer items to analyze
* responses:
* 200:
* description: Detailed AI analysis
* 401:
* description: Unauthorized
*/
router.post(
'/deep-dive',
aiGenerationLimiter,
@@ -443,6 +673,33 @@ router.post(
},
);
/**
* @openapi
* /ai/search-web:
* post:
* tags: [AI]
* summary: Search web for information
* description: Search the web for product or deal information.
* security:
* - bearerAuth: []
* requestBody:
* required: true
* content:
* application/json:
* schema:
* type: object
* required:
* - query
* properties:
* query:
* type: string
* description: Search query
* responses:
* 200:
* description: Search results with sources
* 401:
* description: Unauthorized
*/
router.post(
'/search-web',
aiGenerationLimiter,
@@ -458,6 +715,36 @@ router.post(
},
);
/**
* @openapi
* /ai/compare-prices:
* post:
* tags: [AI]
* summary: Compare prices across stores
* description: Compare prices for items across different stores.
* security:
* - bearerAuth: []
* requestBody:
* required: true
* content:
* application/json:
* schema:
* type: object
* required:
* - items
* properties:
* items:
* type: array
* items:
* type: object
* minItems: 1
* description: List of items to compare
* responses:
* 200:
* description: Price comparison results
* 401:
* description: Unauthorized
*/
router.post(
'/compare-prices',
aiGenerationLimiter,
@@ -477,6 +764,59 @@ router.post(
},
);
/**
* @openapi
* /ai/plan-trip:
* post:
* tags: [AI]
* summary: Plan shopping trip
* description: Plan an optimized shopping trip to a store based on items and location.
* security:
* - bearerAuth: []
* requestBody:
* required: true
* content:
* application/json:
* schema:
* type: object
* required:
* - items
* - store
* - userLocation
* properties:
* items:
* type: array
* items:
* type: object
* description: List of items to buy
* store:
* type: object
* required:
* - name
* properties:
* name:
* type: string
* description: Store name
* userLocation:
* type: object
* required:
* - latitude
* - longitude
* properties:
* latitude:
* type: number
* minimum: -90
* maximum: 90
* longitude:
* type: number
* minimum: -180
* maximum: 180
* responses:
* 200:
* description: Trip plan with directions
* 401:
* description: Unauthorized
*/
router.post(
'/plan-trip',
aiGenerationLimiter,
@@ -497,6 +837,33 @@ router.post(
// --- STUBBED AI Routes for Future Features ---
/**
* @openapi
* /ai/generate-image:
* post:
* tags: [AI]
* summary: Generate image (not implemented)
* description: Generate an image from a prompt. Currently not implemented.
* security:
* - bearerAuth: []
* requestBody:
* required: true
* content:
* application/json:
* schema:
* type: object
* required:
* - prompt
* properties:
* prompt:
* type: string
* description: Image generation prompt
* responses:
* 501:
* description: Not implemented
* 401:
* description: Unauthorized
*/
router.post(
'/generate-image',
aiGenerationLimiter,
@@ -510,6 +877,33 @@ router.post(
},
);
/**
* @openapi
* /ai/generate-speech:
* post:
* tags: [AI]
* summary: Generate speech (not implemented)
* description: Generate speech from text. Currently not implemented.
* security:
* - bearerAuth: []
* requestBody:
* required: true
* content:
* application/json:
* schema:
* type: object
* required:
* - text
* properties:
* text:
* type: string
* description: Text to convert to speech
* responses:
* 501:
* description: Not implemented
* 401:
* description: Unauthorized
*/
router.post(
'/generate-speech',
aiGenerationLimiter,
@@ -524,8 +918,43 @@ router.post(
);
/**
* POST /api/ai/rescan-area - Performs a targeted AI scan on a specific area of an image.
* Requires authentication.
* @openapi
* /ai/rescan-area:
* post:
* tags: [AI]
* summary: Rescan area of image
* description: Performs a targeted AI scan on a specific area of an image.
* security:
* - bearerAuth: []
* requestBody:
* required: true
* content:
* multipart/form-data:
* schema:
* type: object
* required:
* - image
* - cropArea
* - extractionType
* properties:
* image:
* type: string
* format: binary
* description: Image file to scan
* cropArea:
* type: string
* description: JSON string with x, y, width, height
* extractionType:
* type: string
* enum: [store_name, dates, item_details]
* description: Type of data to extract
* responses:
* 200:
* description: Extracted data from image area
* 400:
* description: Image file is required
* 401:
* description: Unauthorized
*/
router.post(
'/rescan-area',

View File

@@ -52,7 +52,7 @@ const passportMocks = vi.hoisted(() => {
// --- 2. Module Mocks ---
// Mock the local passport.routes module to control its behavior.
vi.mock('./passport.routes', () => ({
vi.mock('../config/passport', () => ({
default: {
authenticate: vi.fn().mockImplementation(passportMocks.authenticateMock),
use: vi.fn(),

View File

@@ -3,7 +3,7 @@
import { Router, Request, Response, NextFunction } from 'express';
// All route handlers now use req.log (request-scoped logger) as per ADR-004
import { z } from 'zod';
import passport from './passport.routes';
import passport from '../config/passport';
// All route handlers now use req.log (request-scoped logger) as per ADR-004
import { UniqueConstraintError } from '../services/db/errors.db'; // Import actual class for instanceof checks
// Removed: import { logger } from '../services/logger.server';

View File

@@ -39,7 +39,7 @@ const mockUser = createMockUserProfile({
});
// Standardized mock for passport.routes
vi.mock('./passport.routes', () => ({
vi.mock('../config/passport', () => ({
default: {
authenticate: vi.fn(() => (req: Request, res: Response, next: NextFunction) => {
req.user = mockUser;

View File

@@ -1,7 +1,7 @@
// src/routes/budget.ts
import express, { Request, Response, NextFunction } from 'express';
import { z } from 'zod';
import passport from './passport.routes';
import passport from '../config/passport';
import { budgetRepo } from '../services/db/index.db';
import type { UserProfile } from '../types';
import { validateRequest } from '../middleware/validation.middleware';

View File

@@ -25,7 +25,7 @@ vi.mock('../services/logger.server', async () => ({
}));
// Mock the passport middleware
vi.mock('./passport.routes', () => ({
vi.mock('../config/passport', () => ({
default: {
authenticate: vi.fn(() => (req: Request, res: Response, next: NextFunction) => {
// If req.user is not set by the test setup, simulate unauthenticated access.

View File

@@ -1,7 +1,7 @@
// src/routes/deals.routes.ts
import express, { type Request, type Response, type NextFunction } from 'express';
import { z } from 'zod';
import passport from './passport.routes';
import passport from '../config/passport';
import { dealsRepo } from '../services/db/deals.db';
import type { UserProfile } from '../types';
import { validateRequest } from '../middleware/validation.middleware';

View File

@@ -38,7 +38,7 @@ const mockedAuthMiddleware = vi.hoisted(() =>
);
const mockedIsAdmin = vi.hoisted(() => vi.fn());
vi.mock('./passport.routes', () => ({
vi.mock('../config/passport', () => ({
default: {
// The authenticate method will now call our hoisted mock middleware.
authenticate: vi.fn(() => mockedAuthMiddleware),

View File

@@ -2,7 +2,7 @@
import express, { NextFunction } from 'express';
// All route handlers now use req.log (request-scoped logger) as per ADR-004
import { z } from 'zod';
import passport, { isAdmin } from './passport.routes'; // Correctly imported
import passport, { isAdmin } from '../config/passport';
// All route handlers now use req.log (request-scoped logger) as per ADR-004
import { gamificationService } from '../services/gamificationService';
// Removed: import { logger } from '../services/logger.server';

View File

@@ -0,0 +1,665 @@
// src/routes/inventory.routes.test.ts
import { describe, it, expect, vi, beforeEach } from 'vitest';
import supertest from 'supertest';
import type { Request, Response, NextFunction } from 'express';
import { createMockUserProfile } from '../tests/utils/mockFactories';
import { createTestApp } from '../tests/utils/createTestApp';
import { NotFoundError } from '../services/db/errors.db';
import type { UserInventoryItem, ExpiringItemsResponse } from '../types/expiry';
// Mock the expiryService module
vi.mock('../services/expiryService.server', () => ({
getInventory: vi.fn(),
addInventoryItem: vi.fn(),
getInventoryItemById: vi.fn(),
updateInventoryItem: vi.fn(),
deleteInventoryItem: vi.fn(),
markItemConsumed: vi.fn(),
getExpiringItemsGrouped: vi.fn(),
getExpiringItems: vi.fn(),
getExpiredItems: vi.fn(),
getAlertSettings: vi.fn(),
updateAlertSettings: vi.fn(),
getRecipeSuggestionsForExpiringItems: vi.fn(),
}));
// Mock the logger to keep test output clean
vi.mock('../services/logger.server', async () => ({
logger: (await import('../tests/utils/mockLogger')).mockLogger,
}));
// Import the router and mocked service AFTER all mocks are defined.
import inventoryRouter from './inventory.routes';
import * as expiryService from '../services/expiryService.server';
const mockUser = createMockUserProfile({
user: { user_id: 'user-123', email: 'test@test.com' },
});
// Standardized mock for passport
vi.mock('../config/passport', () => ({
default: {
authenticate: vi.fn(() => (req: Request, res: Response, next: NextFunction) => {
req.user = mockUser;
next();
}),
initialize: () => (req: Request, res: Response, next: NextFunction) => next(),
},
}));
// Define a reusable matcher for the logger object.
const expectLogger = expect.objectContaining({
info: expect.any(Function),
error: expect.any(Function),
});
// Helper to create mock inventory item
function createMockInventoryItem(overrides: Partial<UserInventoryItem> = {}): UserInventoryItem {
return {
inventory_id: 1,
user_id: 'user-123',
product_id: null,
master_item_id: 100,
item_name: 'Milk',
quantity: 1,
unit: 'liters',
purchase_date: '2024-01-10',
expiry_date: '2024-02-10',
source: 'manual',
location: 'fridge',
notes: null,
is_consumed: false,
consumed_at: null,
expiry_source: 'manual',
receipt_item_id: null,
pantry_location_id: 1,
notification_sent_at: null,
created_at: new Date().toISOString(),
updated_at: new Date().toISOString(),
days_until_expiry: 10,
expiry_status: 'fresh',
...overrides,
};
}
describe('Inventory Routes (/api/inventory)', () => {
const mockUserProfile = createMockUserProfile({
user: { user_id: 'user-123', email: 'test@test.com' },
});
beforeEach(() => {
vi.clearAllMocks();
// Provide default mock implementations
vi.mocked(expiryService.getInventory).mockResolvedValue({ items: [], total: 0 });
vi.mocked(expiryService.getExpiringItems).mockResolvedValue([]);
vi.mocked(expiryService.getExpiredItems).mockResolvedValue([]);
vi.mocked(expiryService.getAlertSettings).mockResolvedValue([]);
});
const app = createTestApp({
router: inventoryRouter,
basePath: '/api/inventory',
authenticatedUser: mockUserProfile,
});
// ============================================================================
// INVENTORY ITEM ENDPOINTS
// ============================================================================
describe('GET /', () => {
it('should return paginated inventory items', async () => {
const mockItems = [createMockInventoryItem()];
vi.mocked(expiryService.getInventory).mockResolvedValue({
items: mockItems,
total: 1,
});
const response = await supertest(app).get('/api/inventory');
expect(response.status).toBe(200);
expect(response.body.data.items).toHaveLength(1);
expect(response.body.data.total).toBe(1);
});
it('should support filtering by location', async () => {
vi.mocked(expiryService.getInventory).mockResolvedValue({ items: [], total: 0 });
const response = await supertest(app).get('/api/inventory?location=fridge');
expect(response.status).toBe(200);
expect(expiryService.getInventory).toHaveBeenCalledWith(
expect.objectContaining({ location: 'fridge' }),
expectLogger,
);
});
it('should support filtering by expiring_within_days', async () => {
vi.mocked(expiryService.getInventory).mockResolvedValue({ items: [], total: 0 });
const response = await supertest(app).get('/api/inventory?expiring_within_days=7');
expect(response.status).toBe(200);
expect(expiryService.getInventory).toHaveBeenCalledWith(
expect.objectContaining({ expiring_within_days: 7 }),
expectLogger,
);
});
it('should support search filter', async () => {
vi.mocked(expiryService.getInventory).mockResolvedValue({ items: [], total: 0 });
const response = await supertest(app).get('/api/inventory?search=milk');
expect(response.status).toBe(200);
expect(expiryService.getInventory).toHaveBeenCalledWith(
expect.objectContaining({ search: 'milk' }),
expectLogger,
);
});
it('should support sorting', async () => {
vi.mocked(expiryService.getInventory).mockResolvedValue({ items: [], total: 0 });
const response = await supertest(app).get(
'/api/inventory?sort_by=expiry_date&sort_order=asc',
);
expect(response.status).toBe(200);
expect(expiryService.getInventory).toHaveBeenCalledWith(
expect.objectContaining({
sort_by: 'expiry_date',
sort_order: 'asc',
}),
expectLogger,
);
});
it('should return 400 for invalid location', async () => {
const response = await supertest(app).get('/api/inventory?location=invalid');
expect(response.status).toBe(400);
});
it('should return 500 if service fails', async () => {
vi.mocked(expiryService.getInventory).mockRejectedValue(new Error('DB Error'));
const response = await supertest(app).get('/api/inventory');
expect(response.status).toBe(500);
});
});
describe('POST /', () => {
it('should add a new inventory item', async () => {
const mockItem = createMockInventoryItem();
vi.mocked(expiryService.addInventoryItem).mockResolvedValue(mockItem);
const response = await supertest(app).post('/api/inventory').send({
item_name: 'Milk',
source: 'manual',
quantity: 1,
location: 'fridge',
expiry_date: '2024-02-10',
});
expect(response.status).toBe(201);
expect(response.body.data.item_name).toBe('Milk');
expect(expiryService.addInventoryItem).toHaveBeenCalledWith(
mockUserProfile.user.user_id,
expect.objectContaining({
item_name: 'Milk',
source: 'manual',
}),
expectLogger,
);
});
it('should return 400 if item_name is missing', async () => {
const response = await supertest(app).post('/api/inventory').send({
source: 'manual',
});
expect(response.status).toBe(400);
// Zod returns a type error message when a required field is undefined
expect(response.body.error.details[0].message).toMatch(/expected string|required/i);
});
it('should return 400 for invalid source', async () => {
const response = await supertest(app).post('/api/inventory').send({
item_name: 'Milk',
source: 'invalid_source',
});
expect(response.status).toBe(400);
});
it('should return 400 for invalid expiry_date format', async () => {
const response = await supertest(app).post('/api/inventory').send({
item_name: 'Milk',
source: 'manual',
expiry_date: '01-10-2024',
});
expect(response.status).toBe(400);
expect(response.body.error.details[0].message).toMatch(/YYYY-MM-DD/);
});
it('should return 500 if service fails', async () => {
vi.mocked(expiryService.addInventoryItem).mockRejectedValue(new Error('DB Error'));
const response = await supertest(app).post('/api/inventory').send({
item_name: 'Milk',
source: 'manual',
});
expect(response.status).toBe(500);
});
});
describe('GET /:inventoryId', () => {
it('should return a specific inventory item', async () => {
const mockItem = createMockInventoryItem();
vi.mocked(expiryService.getInventoryItemById).mockResolvedValue(mockItem);
const response = await supertest(app).get('/api/inventory/1');
expect(response.status).toBe(200);
expect(response.body.data.inventory_id).toBe(1);
expect(expiryService.getInventoryItemById).toHaveBeenCalledWith(
1,
mockUserProfile.user.user_id,
expectLogger,
);
});
it('should return 404 when item not found', async () => {
vi.mocked(expiryService.getInventoryItemById).mockRejectedValue(
new NotFoundError('Item not found'),
);
const response = await supertest(app).get('/api/inventory/999');
expect(response.status).toBe(404);
});
it('should return 400 for invalid inventory ID', async () => {
const response = await supertest(app).get('/api/inventory/abc');
expect(response.status).toBe(400);
});
});
describe('PUT /:inventoryId', () => {
it('should update an inventory item', async () => {
const mockItem = createMockInventoryItem({ quantity: 2 });
vi.mocked(expiryService.updateInventoryItem).mockResolvedValue(mockItem);
const response = await supertest(app).put('/api/inventory/1').send({
quantity: 2,
});
expect(response.status).toBe(200);
expect(response.body.data.quantity).toBe(2);
});
it('should update expiry_date', async () => {
const mockItem = createMockInventoryItem({ expiry_date: '2024-03-01' });
vi.mocked(expiryService.updateInventoryItem).mockResolvedValue(mockItem);
const response = await supertest(app).put('/api/inventory/1').send({
expiry_date: '2024-03-01',
});
expect(response.status).toBe(200);
expect(expiryService.updateInventoryItem).toHaveBeenCalledWith(
1,
mockUserProfile.user.user_id,
expect.objectContaining({ expiry_date: '2024-03-01' }),
expectLogger,
);
});
it('should return 400 if no update fields provided', async () => {
const response = await supertest(app).put('/api/inventory/1').send({});
expect(response.status).toBe(400);
expect(response.body.error.details[0].message).toMatch(/At least one field/);
});
it('should return 404 when item not found', async () => {
vi.mocked(expiryService.updateInventoryItem).mockRejectedValue(
new NotFoundError('Item not found'),
);
const response = await supertest(app).put('/api/inventory/999').send({
quantity: 2,
});
expect(response.status).toBe(404);
});
});
describe('DELETE /:inventoryId', () => {
it('should delete an inventory item', async () => {
vi.mocked(expiryService.deleteInventoryItem).mockResolvedValue(undefined);
const response = await supertest(app).delete('/api/inventory/1');
expect(response.status).toBe(204);
expect(expiryService.deleteInventoryItem).toHaveBeenCalledWith(
1,
mockUserProfile.user.user_id,
expectLogger,
);
});
it('should return 404 when item not found', async () => {
vi.mocked(expiryService.deleteInventoryItem).mockRejectedValue(
new NotFoundError('Item not found'),
);
const response = await supertest(app).delete('/api/inventory/999');
expect(response.status).toBe(404);
});
});
describe('POST /:inventoryId/consume', () => {
it('should mark item as consumed', async () => {
vi.mocked(expiryService.markItemConsumed).mockResolvedValue(undefined);
const response = await supertest(app).post('/api/inventory/1/consume');
expect(response.status).toBe(204);
expect(expiryService.markItemConsumed).toHaveBeenCalledWith(
1,
mockUserProfile.user.user_id,
expectLogger,
);
});
it('should return 404 when item not found', async () => {
vi.mocked(expiryService.markItemConsumed).mockRejectedValue(
new NotFoundError('Item not found'),
);
const response = await supertest(app).post('/api/inventory/999/consume');
expect(response.status).toBe(404);
});
});
// ============================================================================
// EXPIRING ITEMS ENDPOINTS
// ============================================================================
describe('GET /expiring/summary', () => {
it('should return expiring items grouped by urgency', async () => {
const mockSummary: ExpiringItemsResponse = {
expiring_today: [createMockInventoryItem({ days_until_expiry: 0 })],
expiring_this_week: [createMockInventoryItem({ days_until_expiry: 3 })],
expiring_this_month: [createMockInventoryItem({ days_until_expiry: 20 })],
already_expired: [createMockInventoryItem({ days_until_expiry: -5 })],
counts: {
today: 1,
this_week: 1,
this_month: 1,
expired: 1,
total: 4,
},
};
vi.mocked(expiryService.getExpiringItemsGrouped).mockResolvedValue(mockSummary);
const response = await supertest(app).get('/api/inventory/expiring/summary');
expect(response.status).toBe(200);
expect(response.body.data.counts.total).toBe(4);
});
it('should return 500 if service fails', async () => {
vi.mocked(expiryService.getExpiringItemsGrouped).mockRejectedValue(new Error('DB Error'));
const response = await supertest(app).get('/api/inventory/expiring/summary');
expect(response.status).toBe(500);
});
});
describe('GET /expiring', () => {
it('should return items expiring within default 7 days', async () => {
const mockItems = [createMockInventoryItem({ days_until_expiry: 5 })];
vi.mocked(expiryService.getExpiringItems).mockResolvedValue(mockItems);
const response = await supertest(app).get('/api/inventory/expiring');
expect(response.status).toBe(200);
expect(response.body.data.items).toHaveLength(1);
expect(expiryService.getExpiringItems).toHaveBeenCalledWith(
mockUserProfile.user.user_id,
7,
expectLogger,
);
});
it('should accept custom days parameter', async () => {
vi.mocked(expiryService.getExpiringItems).mockResolvedValue([]);
const response = await supertest(app).get('/api/inventory/expiring?days=14');
expect(response.status).toBe(200);
expect(expiryService.getExpiringItems).toHaveBeenCalledWith(
mockUserProfile.user.user_id,
14,
expectLogger,
);
});
it('should return 400 for invalid days parameter', async () => {
const response = await supertest(app).get('/api/inventory/expiring?days=100');
expect(response.status).toBe(400);
});
});
describe('GET /expired', () => {
it('should return already expired items', async () => {
const mockItems = [
createMockInventoryItem({ days_until_expiry: -3, expiry_status: 'expired' }),
];
vi.mocked(expiryService.getExpiredItems).mockResolvedValue(mockItems);
const response = await supertest(app).get('/api/inventory/expired');
expect(response.status).toBe(200);
expect(response.body.data.items).toHaveLength(1);
expect(expiryService.getExpiredItems).toHaveBeenCalledWith(
mockUserProfile.user.user_id,
expectLogger,
);
});
it('should return 500 if service fails', async () => {
vi.mocked(expiryService.getExpiredItems).mockRejectedValue(new Error('DB Error'));
const response = await supertest(app).get('/api/inventory/expired');
expect(response.status).toBe(500);
});
});
// ============================================================================
// ALERT SETTINGS ENDPOINTS
// ============================================================================
describe('GET /alerts', () => {
it('should return user alert settings', async () => {
const mockSettings = [
{
expiry_alert_id: 1,
user_id: 'user-123',
alert_method: 'email' as const,
days_before_expiry: 3,
is_enabled: true,
last_alert_sent_at: null,
created_at: new Date().toISOString(),
updated_at: new Date().toISOString(),
},
];
vi.mocked(expiryService.getAlertSettings).mockResolvedValue(mockSettings);
const response = await supertest(app).get('/api/inventory/alerts');
expect(response.status).toBe(200);
expect(response.body.data).toHaveLength(1);
expect(response.body.data[0].alert_method).toBe('email');
});
it('should return 500 if service fails', async () => {
vi.mocked(expiryService.getAlertSettings).mockRejectedValue(new Error('DB Error'));
const response = await supertest(app).get('/api/inventory/alerts');
expect(response.status).toBe(500);
});
});
describe('PUT /alerts/:alertMethod', () => {
it('should update alert settings for email', async () => {
const mockSettings = {
expiry_alert_id: 1,
user_id: 'user-123',
alert_method: 'email' as const,
days_before_expiry: 5,
is_enabled: true,
last_alert_sent_at: null,
created_at: new Date().toISOString(),
updated_at: new Date().toISOString(),
};
vi.mocked(expiryService.updateAlertSettings).mockResolvedValue(mockSettings);
const response = await supertest(app).put('/api/inventory/alerts/email').send({
days_before_expiry: 5,
is_enabled: true,
});
expect(response.status).toBe(200);
expect(response.body.data.days_before_expiry).toBe(5);
expect(expiryService.updateAlertSettings).toHaveBeenCalledWith(
mockUserProfile.user.user_id,
'email',
{ days_before_expiry: 5, is_enabled: true },
expectLogger,
);
});
it('should return 400 for invalid alert method', async () => {
const response = await supertest(app).put('/api/inventory/alerts/sms').send({
is_enabled: true,
});
expect(response.status).toBe(400);
});
it('should return 400 for invalid days_before_expiry', async () => {
const response = await supertest(app).put('/api/inventory/alerts/email').send({
days_before_expiry: 0,
});
expect(response.status).toBe(400);
});
it('should return 400 if days_before_expiry exceeds maximum', async () => {
const response = await supertest(app).put('/api/inventory/alerts/email').send({
days_before_expiry: 31,
});
expect(response.status).toBe(400);
});
it('should return 500 if service fails', async () => {
vi.mocked(expiryService.updateAlertSettings).mockRejectedValue(new Error('DB Error'));
const response = await supertest(app).put('/api/inventory/alerts/email').send({
is_enabled: false,
});
expect(response.status).toBe(500);
});
});
// ============================================================================
// RECIPE SUGGESTIONS ENDPOINT
// ============================================================================
describe('GET /recipes/suggestions', () => {
it('should return recipe suggestions for expiring items', async () => {
const mockInventoryItem = createMockInventoryItem({ inventory_id: 1, item_name: 'Milk' });
const mockResult = {
recipes: [
{
recipe_id: 1,
recipe_name: 'Milk Smoothie',
description: 'A healthy smoothie',
prep_time_minutes: 5,
cook_time_minutes: 0,
servings: 2,
photo_url: null,
matching_items: [mockInventoryItem],
match_count: 1,
},
],
total: 1,
considered_items: [mockInventoryItem],
};
vi.mocked(expiryService.getRecipeSuggestionsForExpiringItems).mockResolvedValue(
mockResult as any,
);
const response = await supertest(app).get('/api/inventory/recipes/suggestions');
expect(response.status).toBe(200);
expect(response.body.data.recipes).toHaveLength(1);
expect(response.body.data.total).toBe(1);
});
it('should accept days, limit, and offset parameters', async () => {
vi.mocked(expiryService.getRecipeSuggestionsForExpiringItems).mockResolvedValue({
recipes: [],
total: 0,
considered_items: [],
});
const response = await supertest(app).get(
'/api/inventory/recipes/suggestions?days=14&limit=5&offset=10',
);
expect(response.status).toBe(200);
expect(expiryService.getRecipeSuggestionsForExpiringItems).toHaveBeenCalledWith(
mockUserProfile.user.user_id,
14,
expectLogger,
{ limit: 5, offset: 10 },
);
});
it('should return 400 for invalid days parameter', async () => {
const response = await supertest(app).get('/api/inventory/recipes/suggestions?days=100');
expect(response.status).toBe(400);
});
it('should return 500 if service fails', async () => {
vi.mocked(expiryService.getRecipeSuggestionsForExpiringItems).mockRejectedValue(
new Error('DB Error'),
);
const response = await supertest(app).get('/api/inventory/recipes/suggestions');
expect(response.status).toBe(500);
});
});
});

View File

@@ -0,0 +1,847 @@
// src/routes/inventory.routes.ts
/**
* @file Inventory and Expiry Tracking API Routes
* Provides endpoints for managing pantry inventory, expiry tracking, and alerts.
*/
import express, { Request, Response, NextFunction } from 'express';
import { z } from 'zod';
import passport from '../config/passport';
import type { UserProfile } from '../types';
import { validateRequest } from '../middleware/validation.middleware';
import { numericIdParam, optionalNumeric } from '../utils/zodUtils';
import { sendSuccess, sendNoContent } from '../utils/apiResponse';
import * as expiryService from '../services/expiryService.server';
const router = express.Router();
// --- Zod Schemas for Inventory Routes ---
/**
* Storage location validation
*/
const storageLocationSchema = z.enum(['fridge', 'freezer', 'pantry', 'room_temp']);
/**
* Inventory source validation
*/
const inventorySourceSchema = z.enum(['manual', 'receipt_scan', 'upc_scan']);
/**
* Alert method validation
*/
const alertMethodSchema = z.enum(['email', 'push', 'in_app']);
/**
* Schema for inventory item ID parameter
*/
const inventoryIdParamSchema = numericIdParam(
'inventoryId',
"Invalid ID for parameter 'inventoryId'. Must be a number.",
);
/**
* Schema for adding an inventory item
*/
const addInventoryItemSchema = z.object({
body: z.object({
product_id: z.number().int().positive().optional(),
master_item_id: z.number().int().positive().optional(),
item_name: z.string().min(1, 'Item name is required.').max(255),
quantity: z.number().positive().default(1),
unit: z.string().max(50).optional(),
purchase_date: z.string().date('Purchase date must be in YYYY-MM-DD format.').optional(),
expiry_date: z.string().date('Expiry date must be in YYYY-MM-DD format.').optional(),
source: inventorySourceSchema,
location: storageLocationSchema.optional(),
notes: z.string().max(500).optional(),
}),
});
/**
* Schema for updating an inventory item
*/
const updateInventoryItemSchema = inventoryIdParamSchema.extend({
body: z
.object({
quantity: z.number().positive().optional(),
unit: z.string().max(50).optional(),
expiry_date: z.string().date('Expiry date must be in YYYY-MM-DD format.').optional(),
location: storageLocationSchema.optional(),
notes: z.string().max(500).optional(),
is_consumed: z.boolean().optional(),
})
.refine((data) => Object.keys(data).length > 0, {
message: 'At least one field to update must be provided.',
}),
});
/**
* Schema for inventory query
*/
const inventoryQuerySchema = z.object({
query: z.object({
limit: optionalNumeric({ default: 50, min: 1, max: 100, integer: true }),
offset: optionalNumeric({ default: 0, min: 0, integer: true }),
location: storageLocationSchema.optional(),
is_consumed: z
.string()
.optional()
.transform((val) => (val === 'true' ? true : val === 'false' ? false : undefined)),
expiring_within_days: z
.string()
.optional()
.transform((val) => (val ? parseInt(val, 10) : undefined))
.pipe(z.number().int().positive().optional()),
category_id: z
.string()
.optional()
.transform((val) => (val ? parseInt(val, 10) : undefined))
.pipe(z.number().int().positive().optional()),
search: z.string().max(100).optional(),
sort_by: z.enum(['expiry_date', 'purchase_date', 'item_name', 'created_at']).optional(),
sort_order: z.enum(['asc', 'desc']).optional(),
}),
});
/**
* Schema for alert settings update
*/
const updateAlertSettingsSchema = z.object({
params: z.object({
alertMethod: alertMethodSchema,
}),
body: z.object({
days_before_expiry: z.number().int().min(1).max(30).optional(),
is_enabled: z.boolean().optional(),
}),
});
/**
* Schema for days ahead parameter
*/
const daysAheadQuerySchema = z.object({
query: z.object({
days: z
.string()
.optional()
.default('7')
.transform((val) => parseInt(val, 10))
.pipe(z.number().int().min(1).max(90)),
}),
});
// Middleware to ensure user is authenticated for all inventory routes
router.use(passport.authenticate('jwt', { session: false }));
// ============================================================================
// INVENTORY ITEM ENDPOINTS
// ============================================================================
/**
* @openapi
* /inventory:
* get:
* tags: [Inventory]
* summary: Get inventory items
* description: Retrieve the user's pantry inventory with optional filtering and pagination.
* security:
* - bearerAuth: []
* parameters:
* - in: query
* name: limit
* schema:
* type: integer
* minimum: 1
* maximum: 100
* default: 50
* - in: query
* name: offset
* schema:
* type: integer
* minimum: 0
* default: 0
* - in: query
* name: location
* schema:
* type: string
* enum: [fridge, freezer, pantry, room_temp]
* - in: query
* name: is_consumed
* schema:
* type: boolean
* - in: query
* name: expiring_within_days
* schema:
* type: integer
* minimum: 1
* - in: query
* name: category_id
* schema:
* type: integer
* - in: query
* name: search
* schema:
* type: string
* maxLength: 100
* - in: query
* name: sort_by
* schema:
* type: string
* enum: [expiry_date, purchase_date, item_name, created_at]
* - in: query
* name: sort_order
* schema:
* type: string
* enum: [asc, desc]
* responses:
* 200:
* description: Inventory items retrieved
* 401:
* description: Unauthorized
*/
router.get(
'/',
validateRequest(inventoryQuerySchema),
async (req: Request, res: Response, next: NextFunction) => {
const userProfile = req.user as UserProfile;
type InventoryQueryRequest = z.infer<typeof inventoryQuerySchema>;
const { query } = req as unknown as InventoryQueryRequest;
try {
const result = await expiryService.getInventory(
{
user_id: userProfile.user.user_id,
location: query.location,
is_consumed: query.is_consumed,
expiring_within_days: query.expiring_within_days,
category_id: query.category_id,
search: query.search,
limit: query.limit,
offset: query.offset,
sort_by: query.sort_by,
sort_order: query.sort_order,
},
req.log,
);
sendSuccess(res, result);
} catch (error) {
req.log.error({ error, userId: userProfile.user.user_id }, 'Error fetching inventory');
next(error);
}
},
);
/**
* @openapi
* /inventory:
* post:
* tags: [Inventory]
* summary: Add inventory item
* description: Add a new item to the user's pantry inventory.
* security:
* - bearerAuth: []
* requestBody:
* required: true
* content:
* application/json:
* schema:
* type: object
* required:
* - item_name
* - source
* properties:
* product_id:
* type: integer
* master_item_id:
* type: integer
* item_name:
* type: string
* maxLength: 255
* quantity:
* type: number
* minimum: 0
* default: 1
* unit:
* type: string
* maxLength: 50
* purchase_date:
* type: string
* format: date
* expiry_date:
* type: string
* format: date
* source:
* type: string
* enum: [manual, receipt_scan, upc_scan]
* location:
* type: string
* enum: [fridge, freezer, pantry, room_temp]
* notes:
* type: string
* maxLength: 500
* responses:
* 201:
* description: Item added to inventory
* 400:
* description: Validation error
* 401:
* description: Unauthorized
*/
router.post(
'/',
validateRequest(addInventoryItemSchema),
async (req: Request, res: Response, next: NextFunction) => {
const userProfile = req.user as UserProfile;
type AddItemRequest = z.infer<typeof addInventoryItemSchema>;
const { body } = req as unknown as AddItemRequest;
try {
req.log.info(
{ userId: userProfile.user.user_id, itemName: body.item_name },
'Adding item to inventory',
);
const item = await expiryService.addInventoryItem(userProfile.user.user_id, body, req.log);
sendSuccess(res, item, 201);
} catch (error) {
req.log.error(
{ error, userId: userProfile.user.user_id, body },
'Error adding inventory item',
);
next(error);
}
},
);
// ============================================================================
// EXPIRING ITEMS ENDPOINTS
// NOTE: These routes MUST be defined BEFORE /:inventoryId to avoid path conflicts
// ============================================================================
/**
* @openapi
* /inventory/expiring/summary:
* get:
* tags: [Inventory]
* summary: Get expiring items summary
* description: Get items grouped by expiry urgency (today, this week, this month, expired).
* security:
* - bearerAuth: []
* responses:
* 200:
* description: Expiring items grouped by urgency
* content:
* application/json:
* schema:
* type: object
* properties:
* expiring_today:
* type: array
* expiring_this_week:
* type: array
* expiring_this_month:
* type: array
* already_expired:
* type: array
* counts:
* type: object
* properties:
* today:
* type: integer
* this_week:
* type: integer
* this_month:
* type: integer
* expired:
* type: integer
* total:
* type: integer
* 401:
* description: Unauthorized
*/
router.get('/expiring/summary', async (req: Request, res: Response, next: NextFunction) => {
const userProfile = req.user as UserProfile;
try {
const result = await expiryService.getExpiringItemsGrouped(userProfile.user.user_id, req.log);
sendSuccess(res, result);
} catch (error) {
req.log.error(
{ error, userId: userProfile.user.user_id },
'Error fetching expiring items summary',
);
next(error);
}
});
/**
* @openapi
* /inventory/expiring:
* get:
* tags: [Inventory]
* summary: Get expiring items
* description: Get items expiring within a specified number of days.
* security:
* - bearerAuth: []
* parameters:
* - in: query
* name: days
* schema:
* type: integer
* minimum: 1
* maximum: 90
* default: 7
* description: Number of days to look ahead
* responses:
* 200:
* description: Expiring items retrieved
* 401:
* description: Unauthorized
*/
router.get(
'/expiring',
validateRequest(daysAheadQuerySchema),
async (req: Request, res: Response, next: NextFunction) => {
const userProfile = req.user as UserProfile;
type ExpiringItemsRequest = z.infer<typeof daysAheadQuerySchema>;
const { query } = req as unknown as ExpiringItemsRequest;
try {
const items = await expiryService.getExpiringItems(
userProfile.user.user_id,
query.days,
req.log,
);
sendSuccess(res, { items, total: items.length });
} catch (error) {
req.log.error({ error, userId: userProfile.user.user_id }, 'Error fetching expiring items');
next(error);
}
},
);
/**
* @openapi
* /inventory/expired:
* get:
* tags: [Inventory]
* summary: Get expired items
* description: Get all items that have already expired.
* security:
* - bearerAuth: []
* responses:
* 200:
* description: Expired items retrieved
* 401:
* description: Unauthorized
*/
router.get('/expired', async (req: Request, res: Response, next: NextFunction) => {
const userProfile = req.user as UserProfile;
try {
const items = await expiryService.getExpiredItems(userProfile.user.user_id, req.log);
sendSuccess(res, { items, total: items.length });
} catch (error) {
req.log.error({ error, userId: userProfile.user.user_id }, 'Error fetching expired items');
next(error);
}
});
// ============================================================================
// ALERT SETTINGS ENDPOINTS
// NOTE: These routes MUST be defined BEFORE /:inventoryId to avoid path conflicts
// ============================================================================
/**
* @openapi
* /inventory/alerts:
* get:
* tags: [Inventory]
* summary: Get alert settings
* description: Get the user's expiry alert settings.
* security:
* - bearerAuth: []
* responses:
* 200:
* description: Alert settings retrieved
* 401:
* description: Unauthorized
*/
router.get('/alerts', async (req: Request, res: Response, next: NextFunction) => {
const userProfile = req.user as UserProfile;
try {
const settings = await expiryService.getAlertSettings(userProfile.user.user_id, req.log);
sendSuccess(res, settings);
} catch (error) {
req.log.error({ error, userId: userProfile.user.user_id }, 'Error fetching alert settings');
next(error);
}
});
/**
* @openapi
* /inventory/alerts/{alertMethod}:
* put:
* tags: [Inventory]
* summary: Update alert settings
* description: Update alert settings for a specific notification method.
* security:
* - bearerAuth: []
* parameters:
* - in: path
* name: alertMethod
* required: true
* schema:
* type: string
* enum: [email, push, in_app]
* requestBody:
* required: true
* content:
* application/json:
* schema:
* type: object
* properties:
* days_before_expiry:
* type: integer
* minimum: 1
* maximum: 30
* is_enabled:
* type: boolean
* responses:
* 200:
* description: Alert settings updated
* 400:
* description: Validation error
* 401:
* description: Unauthorized
*/
router.put(
'/alerts/:alertMethod',
validateRequest(updateAlertSettingsSchema),
async (req: Request, res: Response, next: NextFunction) => {
const userProfile = req.user as UserProfile;
type UpdateAlertRequest = z.infer<typeof updateAlertSettingsSchema>;
const { params, body } = req as unknown as UpdateAlertRequest;
try {
const settings = await expiryService.updateAlertSettings(
userProfile.user.user_id,
params.alertMethod,
body,
req.log,
);
sendSuccess(res, settings);
} catch (error) {
req.log.error(
{ error, userId: userProfile.user.user_id, alertMethod: params.alertMethod },
'Error updating alert settings',
);
next(error);
}
},
);
// ============================================================================
// RECIPE SUGGESTIONS ENDPOINT
// NOTE: This route MUST be defined BEFORE /:inventoryId to avoid path conflicts
// ============================================================================
/**
* @openapi
* /inventory/recipes/suggestions:
* get:
* tags: [Inventory]
* summary: Get recipe suggestions for expiring items
* description: Get recipes that use items expiring soon to reduce food waste.
* security:
* - bearerAuth: []
* parameters:
* - in: query
* name: days
* schema:
* type: integer
* minimum: 1
* maximum: 90
* default: 7
* description: Consider items expiring within this many days
* - in: query
* name: limit
* schema:
* type: integer
* minimum: 1
* maximum: 50
* default: 10
* - in: query
* name: offset
* schema:
* type: integer
* minimum: 0
* default: 0
* responses:
* 200:
* description: Recipe suggestions retrieved
* 401:
* description: Unauthorized
*/
router.get(
'/recipes/suggestions',
validateRequest(
z.object({
query: z.object({
days: z
.string()
.optional()
.default('7')
.transform((val) => parseInt(val, 10))
.pipe(z.number().int().min(1).max(90)),
limit: optionalNumeric({ default: 10, min: 1, max: 50, integer: true }),
offset: optionalNumeric({ default: 0, min: 0, integer: true }),
}),
}),
),
async (req: Request, res: Response, next: NextFunction) => {
const userProfile = req.user as UserProfile;
const { query } = req as unknown as {
query: { days: number; limit?: number; offset?: number };
};
try {
const result = await expiryService.getRecipeSuggestionsForExpiringItems(
userProfile.user.user_id,
query.days,
req.log,
{ limit: query.limit, offset: query.offset },
);
sendSuccess(res, result);
} catch (error) {
req.log.error(
{ error, userId: userProfile.user.user_id },
'Error fetching recipe suggestions',
);
next(error);
}
},
);
// ============================================================================
// INVENTORY ITEM BY ID ENDPOINTS
// NOTE: These routes with /:inventoryId MUST come AFTER specific path routes
// ============================================================================
/**
* @openapi
* /inventory/{inventoryId}:
* get:
* tags: [Inventory]
* summary: Get inventory item by ID
* description: Retrieve a specific inventory item.
* security:
* - bearerAuth: []
* parameters:
* - in: path
* name: inventoryId
* required: true
* schema:
* type: integer
* responses:
* 200:
* description: Inventory item retrieved
* 401:
* description: Unauthorized
* 404:
* description: Item not found
*/
router.get(
'/:inventoryId',
validateRequest(inventoryIdParamSchema),
async (req: Request, res: Response, next: NextFunction) => {
const userProfile = req.user as UserProfile;
type GetItemRequest = z.infer<typeof inventoryIdParamSchema>;
const { params } = req as unknown as GetItemRequest;
try {
const item = await expiryService.getInventoryItemById(
params.inventoryId,
userProfile.user.user_id,
req.log,
);
sendSuccess(res, item);
} catch (error) {
req.log.error(
{ error, userId: userProfile.user.user_id, inventoryId: params.inventoryId },
'Error fetching inventory item',
);
next(error);
}
},
);
/**
* @openapi
* /inventory/{inventoryId}:
* put:
* tags: [Inventory]
* summary: Update inventory item
* description: Update an existing inventory item.
* security:
* - bearerAuth: []
* parameters:
* - in: path
* name: inventoryId
* required: true
* schema:
* type: integer
* requestBody:
* required: true
* content:
* application/json:
* schema:
* type: object
* properties:
* quantity:
* type: number
* minimum: 0
* unit:
* type: string
* maxLength: 50
* expiry_date:
* type: string
* format: date
* location:
* type: string
* enum: [fridge, freezer, pantry, room_temp]
* notes:
* type: string
* maxLength: 500
* is_consumed:
* type: boolean
* responses:
* 200:
* description: Item updated
* 400:
* description: Validation error
* 401:
* description: Unauthorized
* 404:
* description: Item not found
*/
router.put(
'/:inventoryId',
validateRequest(updateInventoryItemSchema),
async (req: Request, res: Response, next: NextFunction) => {
const userProfile = req.user as UserProfile;
type UpdateItemRequest = z.infer<typeof updateInventoryItemSchema>;
const { params, body } = req as unknown as UpdateItemRequest;
try {
const item = await expiryService.updateInventoryItem(
params.inventoryId,
userProfile.user.user_id,
body,
req.log,
);
sendSuccess(res, item);
} catch (error) {
req.log.error(
{ error, userId: userProfile.user.user_id, inventoryId: params.inventoryId },
'Error updating inventory item',
);
next(error);
}
},
);
/**
* @openapi
* /inventory/{inventoryId}:
* delete:
* tags: [Inventory]
* summary: Delete inventory item
* description: Remove an item from the user's inventory.
* security:
* - bearerAuth: []
* parameters:
* - in: path
* name: inventoryId
* required: true
* schema:
* type: integer
* responses:
* 204:
* description: Item deleted
* 401:
* description: Unauthorized
* 404:
* description: Item not found
*/
router.delete(
'/:inventoryId',
validateRequest(inventoryIdParamSchema),
async (req: Request, res: Response, next: NextFunction) => {
const userProfile = req.user as UserProfile;
type DeleteItemRequest = z.infer<typeof inventoryIdParamSchema>;
const { params } = req as unknown as DeleteItemRequest;
try {
await expiryService.deleteInventoryItem(
params.inventoryId,
userProfile.user.user_id,
req.log,
);
sendNoContent(res);
} catch (error) {
req.log.error(
{ error, userId: userProfile.user.user_id, inventoryId: params.inventoryId },
'Error deleting inventory item',
);
next(error);
}
},
);
/**
* @openapi
* /inventory/{inventoryId}/consume:
* post:
* tags: [Inventory]
* summary: Mark item as consumed
* description: Mark an inventory item as consumed.
* security:
* - bearerAuth: []
* parameters:
* - in: path
* name: inventoryId
* required: true
* schema:
* type: integer
* responses:
* 204:
* description: Item marked as consumed
* 401:
* description: Unauthorized
* 404:
* description: Item not found
*/
router.post(
'/:inventoryId/consume',
validateRequest(inventoryIdParamSchema),
async (req: Request, res: Response, next: NextFunction) => {
const userProfile = req.user as UserProfile;
type ConsumeItemRequest = z.infer<typeof inventoryIdParamSchema>;
const { params } = req as unknown as ConsumeItemRequest;
try {
await expiryService.markItemConsumed(params.inventoryId, userProfile.user.user_id, req.log);
sendNoContent(res);
} catch (error) {
req.log.error(
{ error, userId: userProfile.user.user_id, inventoryId: params.inventoryId },
'Error marking item as consumed',
);
next(error);
}
},
);
export default router;

View File

@@ -20,7 +20,7 @@ vi.mock('../services/logger.server', async () => ({
}));
// Mock the passport middleware
vi.mock('./passport.routes', () => ({
vi.mock('../config/passport', () => ({
default: {
authenticate: vi.fn(() => (req: Request, res: Response, next: NextFunction) => {
// If req.user is not set by the test setup, simulate unauthenticated access.

View File

@@ -1,7 +1,7 @@
// src/routes/price.routes.ts
import { Router, Request, Response, NextFunction } from 'express';
import { z } from 'zod';
import passport from './passport.routes';
import passport from '../config/passport';
import { validateRequest } from '../middleware/validation.middleware';
import { priceRepo } from '../services/db/price.db';
import { optionalNumeric } from '../utils/zodUtils';
@@ -24,8 +24,48 @@ const priceHistorySchema = z.object({
type PriceHistoryRequest = z.infer<typeof priceHistorySchema>;
/**
* POST /api/price-history - Fetches historical price data for a given list of master item IDs.
* This endpoint retrieves price points over time for specified master grocery items.
* @openapi
* /price-history:
* post:
* tags: [Price]
* summary: Get price history
* description: Fetches historical price data for a given list of master item IDs.
* security:
* - bearerAuth: []
* requestBody:
* required: true
* content:
* application/json:
* schema:
* type: object
* required:
* - masterItemIds
* properties:
* masterItemIds:
* type: array
* items:
* type: integer
* minItems: 1
* description: Array of master item IDs to get price history for
* limit:
* type: integer
* default: 1000
* description: Maximum number of price points to return
* offset:
* type: integer
* default: 0
* description: Number of price points to skip
* responses:
* 200:
* description: Historical price data for specified items
* content:
* application/json:
* schema:
* $ref: '#/components/schemas/SuccessResponse'
* 400:
* description: Validation error - masterItemIds must be a non-empty array
* 401:
* description: Unauthorized - invalid or missing token
*/
router.post(
'/',

View File

@@ -20,7 +20,7 @@ vi.mock('../services/logger.server', async () => ({
}));
// Mock Passport middleware
vi.mock('./passport.routes', () => ({
vi.mock('../config/passport', () => ({
default: {
authenticate: vi.fn(() => (req: Request, res: Response, next: NextFunction) => {
// If we are testing the unauthenticated state (no user injected), simulate 401.

View File

@@ -2,7 +2,7 @@ import { Router, Request, Response, NextFunction } from 'express';
import { z } from 'zod';
import { reactionRepo } from '../services/db/index.db';
import { validateRequest } from '../middleware/validation.middleware';
import passport from './passport.routes';
import passport from '../config/passport';
import { requiredString } from '../utils/zodUtils';
import { UserProfile } from '../types';
import { publicReadLimiter, reactionToggleLimiter } from '../config/rateLimiters';
@@ -38,9 +38,36 @@ const getReactionSummarySchema = z.object({
// --- Routes ---
/**
* GET /api/reactions - Fetches user reactions based on query filters.
* Supports filtering by userId, entityType, and entityId.
* This is a public endpoint.
* @openapi
* /reactions:
* get:
* tags: [Reactions]
* summary: Get reactions
* description: Fetches user reactions based on query filters. Supports filtering by userId, entityType, and entityId.
* parameters:
* - in: query
* name: userId
* schema:
* type: string
* format: uuid
* description: Filter by user ID
* - in: query
* name: entityType
* schema:
* type: string
* description: Filter by entity type (e.g., recipe, comment)
* - in: query
* name: entityId
* schema:
* type: string
* description: Filter by entity ID
* responses:
* 200:
* description: List of reactions matching filters
* content:
* application/json:
* schema:
* $ref: '#/components/schemas/SuccessResponse'
*/
router.get(
'/',
@@ -59,9 +86,34 @@ router.get(
);
/**
* GET /api/reactions/summary - Fetches a summary of reactions for a specific entity.
* Example: /api/reactions/summary?entityType=recipe&entityId=123
* This is a public endpoint.
* @openapi
* /reactions/summary:
* get:
* tags: [Reactions]
* summary: Get reaction summary
* description: Fetches a summary of reactions for a specific entity.
* parameters:
* - in: query
* name: entityType
* required: true
* schema:
* type: string
* description: Entity type (e.g., recipe, comment)
* - in: query
* name: entityId
* required: true
* schema:
* type: string
* description: Entity ID
* responses:
* 200:
* description: Reaction summary with counts by type
* content:
* application/json:
* schema:
* $ref: '#/components/schemas/SuccessResponse'
* 400:
* description: Missing required query parameters
*/
router.get(
'/summary',
@@ -84,8 +136,41 @@ router.get(
);
/**
* POST /api/reactions/toggle - Toggles a user's reaction to an entity.
* This is a protected endpoint.
* @openapi
* /reactions/toggle:
* post:
* tags: [Reactions]
* summary: Toggle reaction
* description: Toggles a user's reaction to an entity. If the reaction exists, it's removed; otherwise, it's added.
* security:
* - bearerAuth: []
* requestBody:
* required: true
* content:
* application/json:
* schema:
* type: object
* required:
* - entity_type
* - entity_id
* - reaction_type
* properties:
* entity_type:
* type: string
* description: Entity type (e.g., recipe, comment)
* entity_id:
* type: string
* description: Entity ID
* reaction_type:
* type: string
* description: Type of reaction (e.g., like, love)
* responses:
* 200:
* description: Reaction removed
* 201:
* description: Reaction added
* 401:
* description: Unauthorized - invalid or missing token
*/
router.post(
'/toggle',

View File

@@ -0,0 +1,785 @@
// src/routes/receipt.routes.test.ts
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
import request from 'supertest';
import { createTestApp } from '../tests/utils/createTestApp';
import { createMockUserProfile } from '../tests/utils/mockFactories';
import receiptRouter from './receipt.routes';
import type { ReceiptStatus, ReceiptItemStatus } from '../types/expiry';
import { NotFoundError } from '../services/db/errors.db';
// Test state - must be declared before vi.mock calls that reference them
let mockUser: ReturnType<typeof createMockUserProfile> | null = null;
let mockFile: Express.Multer.File | null = null;
// Mock passport
vi.mock('../config/passport', () => ({
default: {
authenticate: vi.fn(() => (req: any, res: any, next: any) => {
if (mockUser) {
req.user = mockUser;
next();
} else {
res.status(401).json({ success: false, error: { message: 'Unauthorized' } });
}
}),
initialize: () => (req: any, res: any, next: any) => next(),
},
}));
// Mock receipt service
vi.mock('../services/receiptService.server', () => ({
getReceipts: vi.fn(),
createReceipt: vi.fn(),
getReceiptById: vi.fn(),
deleteReceipt: vi.fn(),
getReceiptItems: vi.fn(),
updateReceiptItem: vi.fn(),
getUnaddedItems: vi.fn(),
getProcessingLogs: vi.fn(),
}));
// Mock expiry service
vi.mock('../services/expiryService.server', () => ({
addItemsFromReceipt: vi.fn(),
}));
// Mock receipt queue
vi.mock('../services/queues.server', () => ({
receiptQueue: {
add: vi.fn(),
},
}));
// Mock multer middleware
vi.mock('../middleware/multer.middleware', () => {
return {
createUploadMiddleware: vi.fn(() => ({
single: vi.fn(() => (req: any, _res: any, next: any) => {
// Simulate file upload by setting req.file
if (mockFile) {
req.file = mockFile;
}
// Multer also parses the body fields from multipart form data.
// Since we're mocking multer, we need to ensure req.body is an object.
// Supertest with .field() sends data as multipart which express.json() doesn't parse.
// The actual field data won't be in req.body from supertest when multer is mocked,
// so we leave req.body as-is (express.json() will have parsed JSON requests,
// and for multipart we need to ensure body is at least an empty object).
if (req.body === undefined) {
req.body = {};
}
next();
}),
})),
handleMulterError: vi.fn((err: any, _req: any, res: any, next: any) => {
// Only handle multer-specific errors, pass others to the error handler
if (err && err.name === 'MulterError') {
return res.status(400).json({ success: false, error: { message: err.message } });
}
// Pass non-multer errors to the next error handler
next(err);
}),
};
});
// Mock file upload middleware
vi.mock('../middleware/fileUpload.middleware', () => ({
requireFileUpload: vi.fn(() => (req: any, res: any, next: any) => {
if (!req.file) {
return res.status(400).json({
success: false,
error: { message: 'File is required' },
});
}
next();
}),
}));
import * as receiptService from '../services/receiptService.server';
import * as expiryService from '../services/expiryService.server';
import { receiptQueue } from '../services/queues.server';
// Helper to create mock receipt (ReceiptScan type)
function createMockReceipt(overrides: { status?: ReceiptStatus; [key: string]: unknown } = {}) {
return {
receipt_id: 1,
user_id: 'user-123',
receipt_image_url: '/uploads/receipts/receipt-123.jpg',
store_id: null,
transaction_date: null,
total_amount_cents: null,
status: 'pending' as ReceiptStatus,
raw_text: null,
store_confidence: null,
ocr_provider: null,
error_details: null,
retry_count: 0,
ocr_confidence: null,
currency: 'USD',
created_at: '2024-01-15T10:00:00Z',
processed_at: null,
updated_at: '2024-01-15T10:00:00Z',
...overrides,
};
}
// Helper to create mock receipt item (ReceiptItem type)
function createMockReceiptItem(
overrides: { status?: ReceiptItemStatus; [key: string]: unknown } = {},
) {
return {
receipt_item_id: 1,
receipt_id: 1,
raw_item_description: 'MILK 2% 4L',
quantity: 1,
price_paid_cents: 599,
master_item_id: null,
product_id: null,
status: 'unmatched' as ReceiptItemStatus,
line_number: 1,
match_confidence: null,
is_discount: false,
unit_price_cents: null,
unit_type: null,
added_to_pantry: false,
pantry_item_id: null,
upc_code: null,
created_at: '2024-01-15T10:00:00Z',
updated_at: '2024-01-15T10:00:00Z',
...overrides,
};
}
// Helper to create mock processing log (ReceiptProcessingLogRecord type)
function createMockProcessingLog(overrides: Record<string, unknown> = {}) {
return {
log_id: 1,
receipt_id: 1,
processing_step: 'upload' as const,
status: 'completed' as const,
provider: null,
duration_ms: null,
tokens_used: null,
cost_cents: null,
input_data: null,
output_data: null,
error_message: null,
created_at: '2024-01-15T10:00:00Z',
...overrides,
};
}
describe('Receipt Routes', () => {
let app: ReturnType<typeof createTestApp>;
beforeEach(() => {
vi.clearAllMocks();
mockUser = createMockUserProfile();
mockFile = null;
app = createTestApp({
router: receiptRouter,
basePath: '/receipts',
authenticatedUser: mockUser,
});
});
afterEach(() => {
vi.resetAllMocks();
mockUser = null;
mockFile = null;
});
describe('GET /receipts', () => {
it('should return user receipts with default pagination', async () => {
const mockReceipts = [createMockReceipt(), createMockReceipt({ receipt_id: 2 })];
vi.mocked(receiptService.getReceipts).mockResolvedValueOnce({
receipts: mockReceipts,
total: 2,
});
const response = await request(app).get('/receipts');
expect(response.status).toBe(200);
expect(response.body.success).toBe(true);
expect(response.body.data.receipts).toHaveLength(2);
expect(receiptService.getReceipts).toHaveBeenCalledWith(
expect.objectContaining({
user_id: mockUser!.user.user_id,
limit: 50,
offset: 0,
}),
expect.anything(),
);
});
it('should support status filter', async () => {
vi.mocked(receiptService.getReceipts).mockResolvedValueOnce({
receipts: [createMockReceipt({ status: 'completed' })],
total: 1,
});
const response = await request(app).get('/receipts?status=completed');
expect(response.status).toBe(200);
expect(receiptService.getReceipts).toHaveBeenCalledWith(
expect.objectContaining({ status: 'completed' }),
expect.anything(),
);
});
it('should support store_id filter', async () => {
vi.mocked(receiptService.getReceipts).mockResolvedValueOnce({
receipts: [createMockReceipt({ store_id: 5 })],
total: 1,
});
const response = await request(app).get('/receipts?store_id=5');
expect(response.status).toBe(200);
expect(receiptService.getReceipts).toHaveBeenCalledWith(
expect.objectContaining({ store_id: 5 }),
expect.anything(),
);
});
it('should support date range filter', async () => {
vi.mocked(receiptService.getReceipts).mockResolvedValueOnce({
receipts: [],
total: 0,
});
const response = await request(app).get('/receipts?from_date=2024-01-01&to_date=2024-01-31');
expect(response.status).toBe(200);
expect(receiptService.getReceipts).toHaveBeenCalledWith(
expect.objectContaining({
from_date: '2024-01-01',
to_date: '2024-01-31',
}),
expect.anything(),
);
});
it('should support pagination', async () => {
vi.mocked(receiptService.getReceipts).mockResolvedValueOnce({
receipts: [],
total: 100,
});
const response = await request(app).get('/receipts?limit=10&offset=20');
expect(response.status).toBe(200);
expect(receiptService.getReceipts).toHaveBeenCalledWith(
expect.objectContaining({ limit: 10, offset: 20 }),
expect.anything(),
);
});
it('should reject invalid status', async () => {
const response = await request(app).get('/receipts?status=invalid');
expect(response.status).toBe(400);
});
it('should handle service error', async () => {
vi.mocked(receiptService.getReceipts).mockRejectedValueOnce(new Error('DB error'));
const response = await request(app).get('/receipts');
expect(response.status).toBe(500);
});
});
describe('POST /receipts', () => {
beforeEach(() => {
mockFile = {
fieldname: 'receipt',
originalname: 'receipt.jpg',
encoding: '7bit',
mimetype: 'image/jpeg',
destination: '/uploads/receipts',
filename: 'receipt-123.jpg',
path: '/uploads/receipts/receipt-123.jpg',
size: 1024000,
} as Express.Multer.File;
});
it('should upload receipt and queue for processing', async () => {
const mockReceipt = createMockReceipt();
vi.mocked(receiptService.createReceipt).mockResolvedValueOnce(mockReceipt);
vi.mocked(receiptQueue.add).mockResolvedValueOnce({ id: 'job-123' } as any);
// Send JSON body instead of form fields since multer is mocked and doesn't parse form data
const response = await request(app)
.post('/receipts')
.send({ store_id: '1', transaction_date: '2024-01-15' });
expect(response.status).toBe(201);
expect(response.body.success).toBe(true);
expect(response.body.data.receipt_id).toBe(1);
expect(response.body.data.job_id).toBe('job-123');
expect(receiptService.createReceipt).toHaveBeenCalledWith(
mockUser!.user.user_id,
'/uploads/receipts/receipt-123.jpg',
expect.anything(),
expect.objectContaining({
storeId: 1,
transactionDate: '2024-01-15',
}),
);
expect(receiptQueue.add).toHaveBeenCalledWith(
'process-receipt',
expect.objectContaining({
receiptId: 1,
userId: mockUser!.user.user_id,
imagePath: '/uploads/receipts/receipt-123.jpg',
}),
expect.objectContaining({
jobId: 'receipt-1',
}),
);
});
it('should upload receipt without optional fields', async () => {
const mockReceipt = createMockReceipt();
vi.mocked(receiptService.createReceipt).mockResolvedValueOnce(mockReceipt);
vi.mocked(receiptQueue.add).mockResolvedValueOnce({ id: 'job-456' } as any);
const response = await request(app).post('/receipts');
expect(response.status).toBe(201);
expect(receiptService.createReceipt).toHaveBeenCalledWith(
mockUser!.user.user_id,
'/uploads/receipts/receipt-123.jpg',
expect.anything(),
expect.objectContaining({
storeId: undefined,
transactionDate: undefined,
}),
);
});
it('should reject request without file', async () => {
mockFile = null;
const response = await request(app).post('/receipts');
expect(response.status).toBe(400);
expect(response.body.error.message).toContain('File is required');
});
it('should handle service error', async () => {
vi.mocked(receiptService.createReceipt).mockRejectedValueOnce(new Error('Storage error'));
const response = await request(app).post('/receipts');
expect(response.status).toBe(500);
});
});
describe('GET /receipts/:receiptId', () => {
it('should return receipt with items', async () => {
const mockReceipt = createMockReceipt();
const mockItems = [createMockReceiptItem(), createMockReceiptItem({ receipt_item_id: 2 })];
vi.mocked(receiptService.getReceiptById).mockResolvedValueOnce(mockReceipt);
vi.mocked(receiptService.getReceiptItems).mockResolvedValueOnce(mockItems);
const response = await request(app).get('/receipts/1');
expect(response.status).toBe(200);
expect(response.body.success).toBe(true);
expect(response.body.data.receipt.receipt_id).toBe(1);
expect(response.body.data.items).toHaveLength(2);
expect(receiptService.getReceiptById).toHaveBeenCalledWith(
1,
mockUser!.user.user_id,
expect.anything(),
);
});
it('should return 404 for non-existent receipt', async () => {
vi.mocked(receiptService.getReceiptById).mockRejectedValueOnce(
new NotFoundError('Receipt not found'),
);
const response = await request(app).get('/receipts/999');
expect(response.status).toBe(404);
});
it('should reject invalid receipt ID', async () => {
const response = await request(app).get('/receipts/invalid');
expect(response.status).toBe(400);
});
});
describe('DELETE /receipts/:receiptId', () => {
it('should delete receipt successfully', async () => {
vi.mocked(receiptService.deleteReceipt).mockResolvedValueOnce(undefined);
const response = await request(app).delete('/receipts/1');
expect(response.status).toBe(204);
expect(receiptService.deleteReceipt).toHaveBeenCalledWith(
1,
mockUser!.user.user_id,
expect.anything(),
);
});
it('should return 404 for non-existent receipt', async () => {
vi.mocked(receiptService.deleteReceipt).mockRejectedValueOnce(
new NotFoundError('Receipt not found'),
);
const response = await request(app).delete('/receipts/999');
expect(response.status).toBe(404);
});
});
describe('POST /receipts/:receiptId/reprocess', () => {
it('should queue receipt for reprocessing', async () => {
const mockReceipt = createMockReceipt({ status: 'failed' });
vi.mocked(receiptService.getReceiptById).mockResolvedValueOnce(mockReceipt);
vi.mocked(receiptQueue.add).mockResolvedValueOnce({ id: 'reprocess-job-123' } as any);
const response = await request(app).post('/receipts/1/reprocess');
expect(response.status).toBe(200);
expect(response.body.success).toBe(true);
expect(response.body.data.message).toContain('reprocessing');
expect(response.body.data.job_id).toBe('reprocess-job-123');
expect(receiptQueue.add).toHaveBeenCalledWith(
'process-receipt',
expect.objectContaining({
receiptId: 1,
imagePath: mockReceipt.receipt_image_url,
}),
expect.objectContaining({
jobId: expect.stringMatching(/^receipt-1-reprocess-\d+$/),
}),
);
});
it('should return 404 for non-existent receipt', async () => {
vi.mocked(receiptService.getReceiptById).mockRejectedValueOnce(
new NotFoundError('Receipt not found'),
);
const response = await request(app).post('/receipts/999/reprocess');
expect(response.status).toBe(404);
});
});
describe('GET /receipts/:receiptId/items', () => {
it('should return receipt items', async () => {
const mockReceipt = createMockReceipt();
const mockItems = [
createMockReceiptItem(),
createMockReceiptItem({ receipt_item_id: 2, parsed_name: 'Bread' }),
];
vi.mocked(receiptService.getReceiptById).mockResolvedValueOnce(mockReceipt);
vi.mocked(receiptService.getReceiptItems).mockResolvedValueOnce(mockItems);
const response = await request(app).get('/receipts/1/items');
expect(response.status).toBe(200);
expect(response.body.success).toBe(true);
expect(response.body.data.items).toHaveLength(2);
expect(response.body.data.total).toBe(2);
});
it('should return 404 if receipt not found', async () => {
vi.mocked(receiptService.getReceiptById).mockRejectedValueOnce(
new NotFoundError('Receipt not found'),
);
const response = await request(app).get('/receipts/999/items');
expect(response.status).toBe(404);
});
});
describe('PUT /receipts/:receiptId/items/:itemId', () => {
it('should update receipt item status', async () => {
const mockReceipt = createMockReceipt();
const updatedItem = createMockReceiptItem({ status: 'matched', match_confidence: 0.95 });
vi.mocked(receiptService.getReceiptById).mockResolvedValueOnce(mockReceipt);
vi.mocked(receiptService.updateReceiptItem).mockResolvedValueOnce(updatedItem);
const response = await request(app)
.put('/receipts/1/items/1')
.send({ status: 'matched', match_confidence: 0.95 });
expect(response.status).toBe(200);
expect(response.body.success).toBe(true);
expect(response.body.data.status).toBe('matched');
expect(receiptService.updateReceiptItem).toHaveBeenCalledWith(
1,
expect.objectContaining({ status: 'matched', match_confidence: 0.95 }),
expect.anything(),
);
});
it('should update item with master_item_id', async () => {
const mockReceipt = createMockReceipt();
const updatedItem = createMockReceiptItem({ master_item_id: 42 });
vi.mocked(receiptService.getReceiptById).mockResolvedValueOnce(mockReceipt);
vi.mocked(receiptService.updateReceiptItem).mockResolvedValueOnce(updatedItem);
const response = await request(app).put('/receipts/1/items/1').send({ master_item_id: 42 });
expect(response.status).toBe(200);
expect(response.body.data.master_item_id).toBe(42);
});
it('should reject empty update body', async () => {
const response = await request(app).put('/receipts/1/items/1').send({});
expect(response.status).toBe(400);
});
it('should reject invalid status value', async () => {
const response = await request(app)
.put('/receipts/1/items/1')
.send({ status: 'invalid_status' });
expect(response.status).toBe(400);
});
it('should reject invalid match_confidence', async () => {
const response = await request(app)
.put('/receipts/1/items/1')
.send({ match_confidence: 1.5 });
expect(response.status).toBe(400);
});
});
describe('GET /receipts/:receiptId/items/unadded', () => {
it('should return unadded items', async () => {
const mockReceipt = createMockReceipt();
const mockItems = [
createMockReceiptItem({ added_to_inventory: false }),
createMockReceiptItem({ receipt_item_id: 2, added_to_inventory: false }),
];
vi.mocked(receiptService.getReceiptById).mockResolvedValueOnce(mockReceipt);
vi.mocked(receiptService.getUnaddedItems).mockResolvedValueOnce(mockItems);
const response = await request(app).get('/receipts/1/items/unadded');
expect(response.status).toBe(200);
expect(response.body.success).toBe(true);
expect(response.body.data.items).toHaveLength(2);
expect(response.body.data.total).toBe(2);
});
it('should return empty array when all items added', async () => {
const mockReceipt = createMockReceipt();
vi.mocked(receiptService.getReceiptById).mockResolvedValueOnce(mockReceipt);
vi.mocked(receiptService.getUnaddedItems).mockResolvedValueOnce([]);
const response = await request(app).get('/receipts/1/items/unadded');
expect(response.status).toBe(200);
expect(response.body.data.items).toHaveLength(0);
expect(response.body.data.total).toBe(0);
});
});
describe('POST /receipts/:receiptId/confirm', () => {
it('should confirm items for inventory', async () => {
const addedItems = [
{ inventory_id: 1, item_name: 'Milk 2%', quantity: 1 },
{ inventory_id: 2, item_name: 'Bread', quantity: 2 },
];
vi.mocked(expiryService.addItemsFromReceipt).mockResolvedValueOnce(addedItems as any);
const response = await request(app)
.post('/receipts/1/confirm')
.send({
items: [
{ receipt_item_id: 1, include: true, location: 'fridge' },
{ receipt_item_id: 2, include: true, location: 'pantry', expiry_date: '2024-01-20' },
{ receipt_item_id: 3, include: false },
],
});
expect(response.status).toBe(200);
expect(response.body.success).toBe(true);
expect(response.body.data.added_items).toHaveLength(2);
expect(response.body.data.count).toBe(2);
expect(expiryService.addItemsFromReceipt).toHaveBeenCalledWith(
mockUser!.user.user_id,
1,
expect.arrayContaining([
expect.objectContaining({ receipt_item_id: 1, include: true }),
expect.objectContaining({ receipt_item_id: 2, include: true }),
expect.objectContaining({ receipt_item_id: 3, include: false }),
]),
expect.anything(),
);
});
it('should accept custom item_name and quantity', async () => {
vi.mocked(expiryService.addItemsFromReceipt).mockResolvedValueOnce([
{ inventory_id: 1, item_name: 'Custom Name', quantity: 5 },
] as any);
const response = await request(app)
.post('/receipts/1/confirm')
.send({
items: [
{
receipt_item_id: 1,
include: true,
item_name: 'Custom Name',
quantity: 5,
location: 'pantry',
},
],
});
expect(response.status).toBe(200);
expect(expiryService.addItemsFromReceipt).toHaveBeenCalledWith(
mockUser!.user.user_id,
1,
expect.arrayContaining([
expect.objectContaining({
item_name: 'Custom Name',
quantity: 5,
}),
]),
expect.anything(),
);
});
it('should accept empty items array', async () => {
// Empty array is technically valid, service decides what to do
vi.mocked(expiryService.addItemsFromReceipt).mockResolvedValueOnce([]);
const response = await request(app).post('/receipts/1/confirm').send({ items: [] });
expect(response.status).toBe(200);
expect(response.body.data.count).toBe(0);
});
it('should reject missing items field', async () => {
const response = await request(app).post('/receipts/1/confirm').send({});
expect(response.status).toBe(400);
});
it('should reject invalid location', async () => {
const response = await request(app)
.post('/receipts/1/confirm')
.send({
items: [{ receipt_item_id: 1, include: true, location: 'invalid_location' }],
});
expect(response.status).toBe(400);
});
it('should reject invalid expiry_date format', async () => {
const response = await request(app)
.post('/receipts/1/confirm')
.send({
items: [{ receipt_item_id: 1, include: true, expiry_date: 'not-a-date' }],
});
expect(response.status).toBe(400);
});
it('should handle service error', async () => {
vi.mocked(expiryService.addItemsFromReceipt).mockRejectedValueOnce(
new Error('Failed to add items'),
);
const response = await request(app)
.post('/receipts/1/confirm')
.send({
items: [{ receipt_item_id: 1, include: true }],
});
expect(response.status).toBe(500);
});
});
describe('GET /receipts/:receiptId/logs', () => {
it('should return processing logs', async () => {
const mockReceipt = createMockReceipt();
const mockLogs = [
createMockProcessingLog({
processing_step: 'ocr_extraction' as const,
status: 'completed' as const,
}),
createMockProcessingLog({
log_id: 2,
processing_step: 'item_extraction' as const,
status: 'completed' as const,
}),
createMockProcessingLog({
log_id: 3,
processing_step: 'item_matching' as const,
status: 'started' as const,
}),
];
vi.mocked(receiptService.getReceiptById).mockResolvedValueOnce(mockReceipt);
vi.mocked(receiptService.getProcessingLogs).mockResolvedValueOnce(mockLogs);
const response = await request(app).get('/receipts/1/logs');
expect(response.status).toBe(200);
expect(response.body.success).toBe(true);
expect(response.body.data.logs).toHaveLength(3);
expect(response.body.data.total).toBe(3);
});
it('should return empty logs for new receipt', async () => {
const mockReceipt = createMockReceipt();
vi.mocked(receiptService.getReceiptById).mockResolvedValueOnce(mockReceipt);
vi.mocked(receiptService.getProcessingLogs).mockResolvedValueOnce([]);
const response = await request(app).get('/receipts/1/logs');
expect(response.status).toBe(200);
expect(response.body.data.logs).toHaveLength(0);
expect(response.body.data.total).toBe(0);
});
it('should return 404 for non-existent receipt', async () => {
vi.mocked(receiptService.getReceiptById).mockRejectedValueOnce(
new NotFoundError('Receipt not found'),
);
const response = await request(app).get('/receipts/999/logs');
expect(response.status).toBe(404);
});
});
describe('Authentication', () => {
it('should reject unauthenticated requests', async () => {
mockUser = null;
app = createTestApp({
router: receiptRouter,
basePath: '/receipts',
authenticatedUser: undefined,
});
const response = await request(app).get('/receipts');
expect(response.status).toBe(401);
});
});
});

View File

@@ -0,0 +1,814 @@
// src/routes/receipt.routes.ts
/**
* @file Receipt Scanning API Routes
* Provides endpoints for uploading, processing, and managing scanned receipts.
*/
import express, { Request, Response, NextFunction } from 'express';
import { z } from 'zod';
import passport from '../config/passport';
import type { UserProfile } from '../types';
import { validateRequest } from '../middleware/validation.middleware';
import { numericIdParam, optionalNumeric } from '../utils/zodUtils';
import { sendSuccess, sendNoContent } from '../utils/apiResponse';
import * as receiptService from '../services/receiptService.server';
import * as expiryService from '../services/expiryService.server';
import { createUploadMiddleware, handleMulterError } from '../middleware/multer.middleware';
import { receiptQueue } from '../services/queues.server';
import { requireFileUpload } from '../middleware/fileUpload.middleware';
const router = express.Router();
// Configure multer for receipt image uploads (max 10MB)
const receiptUpload = createUploadMiddleware({
storageType: 'receipt',
fileSize: 10 * 1024 * 1024, // 10MB
fileFilter: 'image',
});
// --- Zod Schemas for Receipt Routes ---
/**
* Receipt status validation
*/
const receiptStatusSchema = z.enum(['pending', 'processing', 'completed', 'failed']);
/**
* Receipt item status validation
*/
const receiptItemStatusSchema = z.enum(['unmatched', 'matched', 'needs_review', 'ignored']);
/**
* Storage location validation (for adding items to inventory)
*/
const storageLocationSchema = z.enum(['fridge', 'freezer', 'pantry', 'room_temp']);
/**
* Schema for receipt ID parameter
*/
const receiptIdParamSchema = numericIdParam(
'receiptId',
"Invalid ID for parameter 'receiptId'. Must be a number.",
);
/**
* Schema for receipt item ID parameter
*/
const _receiptItemIdParamSchema = numericIdParam(
'itemId',
"Invalid ID for parameter 'itemId'. Must be a number.",
);
/**
* Schema for uploading a receipt (used with file upload, not base64)
*/
const uploadReceiptSchema = z.object({
body: z.object({
store_id: z
.string()
.optional()
.transform((val) => (val ? parseInt(val, 10) : undefined))
.pipe(z.number().int().positive().optional()),
transaction_date: z.string().date('Transaction date must be in YYYY-MM-DD format.').optional(),
}),
});
/**
* Schema for receipt query
*/
const receiptQuerySchema = z.object({
query: z.object({
limit: optionalNumeric({ default: 50, min: 1, max: 100, integer: true }),
offset: optionalNumeric({ default: 0, min: 0, integer: true }),
status: receiptStatusSchema.optional(),
store_id: z
.string()
.optional()
.transform((val) => (val ? parseInt(val, 10) : undefined))
.pipe(z.number().int().positive().optional()),
from_date: z.string().date().optional(),
to_date: z.string().date().optional(),
}),
});
/**
* Schema for updating a receipt item
*/
const updateReceiptItemSchema = z.object({
params: z.object({
receiptId: z.coerce.number().int().positive(),
itemId: z.coerce.number().int().positive(),
}),
body: z
.object({
status: receiptItemStatusSchema.optional(),
master_item_id: z.number().int().positive().nullable().optional(),
product_id: z.number().int().positive().nullable().optional(),
match_confidence: z.number().min(0).max(1).optional(),
})
.refine((data) => Object.keys(data).length > 0, {
message: 'At least one field to update must be provided.',
}),
});
/**
* Schema for confirming receipt items to add to inventory
*/
const confirmItemsSchema = z.object({
params: z.object({
receiptId: z.coerce.number().int().positive(),
}),
body: z.object({
items: z.array(
z.object({
receipt_item_id: z.number().int().positive(),
item_name: z.string().max(255).optional(),
quantity: z.number().positive().optional(),
location: storageLocationSchema.optional(),
expiry_date: z.string().date().optional(),
include: z.boolean(),
}),
),
}),
});
// Middleware to ensure user is authenticated for all receipt routes
router.use(passport.authenticate('jwt', { session: false }));
// ============================================================================
// RECEIPT MANAGEMENT ENDPOINTS
// ============================================================================
/**
* @openapi
* /receipts:
* get:
* tags: [Receipts]
* summary: Get user's receipts
* description: Retrieve the user's scanned receipts with optional filtering.
* security:
* - bearerAuth: []
* parameters:
* - in: query
* name: limit
* schema:
* type: integer
* minimum: 1
* maximum: 100
* default: 50
* - in: query
* name: offset
* schema:
* type: integer
* minimum: 0
* default: 0
* - in: query
* name: status
* schema:
* type: string
* enum: [pending, processing, completed, failed]
* - in: query
* name: store_id
* schema:
* type: integer
* - in: query
* name: from_date
* schema:
* type: string
* format: date
* - in: query
* name: to_date
* schema:
* type: string
* format: date
* responses:
* 200:
* description: Receipts retrieved
* 401:
* description: Unauthorized
*/
router.get(
'/',
validateRequest(receiptQuerySchema),
async (req: Request, res: Response, next: NextFunction) => {
const userProfile = req.user as UserProfile;
type ReceiptQueryRequest = z.infer<typeof receiptQuerySchema>;
const { query } = req as unknown as ReceiptQueryRequest;
try {
const result = await receiptService.getReceipts(
{
user_id: userProfile.user.user_id,
status: query.status,
store_id: query.store_id,
from_date: query.from_date,
to_date: query.to_date,
limit: query.limit,
offset: query.offset,
},
req.log,
);
sendSuccess(res, result);
} catch (error) {
req.log.error({ error, userId: userProfile.user.user_id }, 'Error fetching receipts');
next(error);
}
},
);
/**
* @openapi
* /receipts:
* post:
* tags: [Receipts]
* summary: Upload a receipt
* description: Upload a receipt image for processing and item extraction.
* security:
* - bearerAuth: []
* requestBody:
* required: true
* content:
* multipart/form-data:
* schema:
* type: object
* required:
* - receipt
* properties:
* receipt:
* type: string
* format: binary
* description: Receipt image file
* store_id:
* type: integer
* description: Store ID if known
* transaction_date:
* type: string
* format: date
* description: Transaction date if known (YYYY-MM-DD)
* responses:
* 201:
* description: Receipt uploaded and queued for processing
* 400:
* description: Validation error
* 401:
* description: Unauthorized
*/
router.post(
'/',
receiptUpload.single('receipt'),
requireFileUpload('receipt'),
validateRequest(uploadReceiptSchema),
async (req: Request, res: Response, next: NextFunction) => {
const userProfile = req.user as UserProfile;
type UploadReceiptRequest = z.infer<typeof uploadReceiptSchema>;
const { body } = req as unknown as UploadReceiptRequest;
const file = req.file as Express.Multer.File;
try {
req.log.info(
{ userId: userProfile.user.user_id, filename: file.filename },
'Uploading receipt',
);
// Create receipt record with the actual file path
const receipt = await receiptService.createReceipt(
userProfile.user.user_id,
file.path, // Use the actual file path from multer
req.log,
{
storeId: body.store_id,
transactionDate: body.transaction_date,
},
);
// Queue the receipt for processing via BullMQ
const bindings = req.log.bindings?.() || {};
const job = await receiptQueue.add(
'process-receipt',
{
receiptId: receipt.receipt_id,
userId: userProfile.user.user_id,
imagePath: file.path,
meta: {
requestId: bindings.request_id as string | undefined,
userId: userProfile.user.user_id,
origin: 'api',
},
},
{
jobId: `receipt-${receipt.receipt_id}`,
},
);
req.log.info(
{ receiptId: receipt.receipt_id, jobId: job.id },
'Receipt queued for processing',
);
sendSuccess(res, { ...receipt, job_id: job.id }, 201);
} catch (error) {
req.log.error({ error, userId: userProfile.user.user_id }, 'Error uploading receipt');
next(error);
}
},
);
/**
* @openapi
* /receipts/{receiptId}:
* get:
* tags: [Receipts]
* summary: Get receipt by ID
* description: Retrieve a specific receipt with its extracted items.
* security:
* - bearerAuth: []
* parameters:
* - in: path
* name: receiptId
* required: true
* schema:
* type: integer
* responses:
* 200:
* description: Receipt retrieved
* 401:
* description: Unauthorized
* 404:
* description: Receipt not found
*/
router.get(
'/:receiptId',
validateRequest(receiptIdParamSchema),
async (req: Request, res: Response, next: NextFunction) => {
const userProfile = req.user as UserProfile;
type GetReceiptRequest = z.infer<typeof receiptIdParamSchema>;
const { params } = req as unknown as GetReceiptRequest;
try {
const receipt = await receiptService.getReceiptById(
params.receiptId,
userProfile.user.user_id,
req.log,
);
// Also get the items
const items = await receiptService.getReceiptItems(params.receiptId, req.log);
sendSuccess(res, { receipt, items });
} catch (error) {
req.log.error(
{ error, userId: userProfile.user.user_id, receiptId: params.receiptId },
'Error fetching receipt',
);
next(error);
}
},
);
/**
* @openapi
* /receipts/{receiptId}:
* delete:
* tags: [Receipts]
* summary: Delete receipt
* description: Delete a receipt and all associated data.
* security:
* - bearerAuth: []
* parameters:
* - in: path
* name: receiptId
* required: true
* schema:
* type: integer
* responses:
* 204:
* description: Receipt deleted
* 401:
* description: Unauthorized
* 404:
* description: Receipt not found
*/
router.delete(
'/:receiptId',
validateRequest(receiptIdParamSchema),
async (req: Request, res: Response, next: NextFunction) => {
const userProfile = req.user as UserProfile;
type DeleteReceiptRequest = z.infer<typeof receiptIdParamSchema>;
const { params } = req as unknown as DeleteReceiptRequest;
try {
await receiptService.deleteReceipt(params.receiptId, userProfile.user.user_id, req.log);
sendNoContent(res);
} catch (error) {
req.log.error(
{ error, userId: userProfile.user.user_id, receiptId: params.receiptId },
'Error deleting receipt',
);
next(error);
}
},
);
/**
* @openapi
* /receipts/{receiptId}/reprocess:
* post:
* tags: [Receipts]
* summary: Reprocess receipt
* description: Queue a failed receipt for reprocessing.
* security:
* - bearerAuth: []
* parameters:
* - in: path
* name: receiptId
* required: true
* schema:
* type: integer
* responses:
* 200:
* description: Receipt queued for reprocessing
* 401:
* description: Unauthorized
* 404:
* description: Receipt not found
*/
router.post(
'/:receiptId/reprocess',
validateRequest(receiptIdParamSchema),
async (req: Request, res: Response, next: NextFunction) => {
const userProfile = req.user as UserProfile;
type ReprocessReceiptRequest = z.infer<typeof receiptIdParamSchema>;
const { params } = req as unknown as ReprocessReceiptRequest;
try {
// Verify the receipt exists and belongs to user
const receipt = await receiptService.getReceiptById(
params.receiptId,
userProfile.user.user_id,
req.log,
);
// Queue for reprocessing via BullMQ
const bindings = req.log.bindings?.() || {};
const job = await receiptQueue.add(
'process-receipt',
{
receiptId: receipt.receipt_id,
userId: userProfile.user.user_id,
imagePath: receipt.receipt_image_url, // Use stored image path
meta: {
requestId: bindings.request_id as string | undefined,
userId: userProfile.user.user_id,
origin: 'api-reprocess',
},
},
{
jobId: `receipt-${receipt.receipt_id}-reprocess-${Date.now()}`,
},
);
req.log.info(
{ receiptId: params.receiptId, jobId: job.id },
'Receipt queued for reprocessing',
);
sendSuccess(res, {
message: 'Receipt queued for reprocessing',
receipt_id: receipt.receipt_id,
job_id: job.id,
});
} catch (error) {
req.log.error(
{ error, userId: userProfile.user.user_id, receiptId: params.receiptId },
'Error reprocessing receipt',
);
next(error);
}
},
);
// ============================================================================
// RECEIPT ITEMS ENDPOINTS
// ============================================================================
/**
* @openapi
* /receipts/{receiptId}/items:
* get:
* tags: [Receipts]
* summary: Get receipt items
* description: Get all extracted items from a receipt.
* security:
* - bearerAuth: []
* parameters:
* - in: path
* name: receiptId
* required: true
* schema:
* type: integer
* responses:
* 200:
* description: Receipt items retrieved
* 401:
* description: Unauthorized
* 404:
* description: Receipt not found
*/
router.get(
'/:receiptId/items',
validateRequest(receiptIdParamSchema),
async (req: Request, res: Response, next: NextFunction) => {
const userProfile = req.user as UserProfile;
type GetItemsRequest = z.infer<typeof receiptIdParamSchema>;
const { params } = req as unknown as GetItemsRequest;
try {
// Verify receipt belongs to user
await receiptService.getReceiptById(params.receiptId, userProfile.user.user_id, req.log);
const items = await receiptService.getReceiptItems(params.receiptId, req.log);
sendSuccess(res, { items, total: items.length });
} catch (error) {
req.log.error(
{ error, userId: userProfile.user.user_id, receiptId: params.receiptId },
'Error fetching receipt items',
);
next(error);
}
},
);
/**
* @openapi
* /receipts/{receiptId}/items/{itemId}:
* put:
* tags: [Receipts]
* summary: Update receipt item
* description: Update a receipt item's matching status or linked product.
* security:
* - bearerAuth: []
* parameters:
* - in: path
* name: receiptId
* required: true
* schema:
* type: integer
* - in: path
* name: itemId
* required: true
* schema:
* type: integer
* requestBody:
* required: true
* content:
* application/json:
* schema:
* type: object
* properties:
* status:
* type: string
* enum: [unmatched, matched, needs_review, ignored]
* master_item_id:
* type: integer
* nullable: true
* product_id:
* type: integer
* nullable: true
* match_confidence:
* type: number
* minimum: 0
* maximum: 1
* responses:
* 200:
* description: Item updated
* 400:
* description: Validation error
* 401:
* description: Unauthorized
* 404:
* description: Receipt or item not found
*/
router.put(
'/:receiptId/items/:itemId',
validateRequest(updateReceiptItemSchema),
async (req: Request, res: Response, next: NextFunction) => {
const userProfile = req.user as UserProfile;
type UpdateItemRequest = z.infer<typeof updateReceiptItemSchema>;
const { params, body } = req as unknown as UpdateItemRequest;
try {
// Verify receipt belongs to user
await receiptService.getReceiptById(params.receiptId, userProfile.user.user_id, req.log);
const item = await receiptService.updateReceiptItem(params.itemId, body, req.log);
sendSuccess(res, item);
} catch (error) {
req.log.error(
{
error,
userId: userProfile.user.user_id,
receiptId: params.receiptId,
itemId: params.itemId,
},
'Error updating receipt item',
);
next(error);
}
},
);
/**
* @openapi
* /receipts/{receiptId}/items/unadded:
* get:
* tags: [Receipts]
* summary: Get unadded items
* description: Get receipt items that haven't been added to inventory yet.
* security:
* - bearerAuth: []
* parameters:
* - in: path
* name: receiptId
* required: true
* schema:
* type: integer
* responses:
* 200:
* description: Unadded items retrieved
* 401:
* description: Unauthorized
* 404:
* description: Receipt not found
*/
router.get(
'/:receiptId/items/unadded',
validateRequest(receiptIdParamSchema),
async (req: Request, res: Response, next: NextFunction) => {
const userProfile = req.user as UserProfile;
type GetUnaddedRequest = z.infer<typeof receiptIdParamSchema>;
const { params } = req as unknown as GetUnaddedRequest;
try {
// Verify receipt belongs to user
await receiptService.getReceiptById(params.receiptId, userProfile.user.user_id, req.log);
const items = await receiptService.getUnaddedItems(params.receiptId, req.log);
sendSuccess(res, { items, total: items.length });
} catch (error) {
req.log.error(
{ error, userId: userProfile.user.user_id, receiptId: params.receiptId },
'Error fetching unadded receipt items',
);
next(error);
}
},
);
/**
* @openapi
* /receipts/{receiptId}/confirm:
* post:
* tags: [Receipts]
* summary: Confirm items for inventory
* description: Confirm selected receipt items to add to user's inventory.
* security:
* - bearerAuth: []
* parameters:
* - in: path
* name: receiptId
* required: true
* schema:
* type: integer
* requestBody:
* required: true
* content:
* application/json:
* schema:
* type: object
* required:
* - items
* properties:
* items:
* type: array
* items:
* type: object
* required:
* - receipt_item_id
* - include
* properties:
* receipt_item_id:
* type: integer
* item_name:
* type: string
* maxLength: 255
* quantity:
* type: number
* minimum: 0
* location:
* type: string
* enum: [fridge, freezer, pantry, room_temp]
* expiry_date:
* type: string
* format: date
* include:
* type: boolean
* responses:
* 200:
* description: Items added to inventory
* 400:
* description: Validation error
* 401:
* description: Unauthorized
* 404:
* description: Receipt not found
*/
router.post(
'/:receiptId/confirm',
validateRequest(confirmItemsSchema),
async (req: Request, res: Response, next: NextFunction) => {
const userProfile = req.user as UserProfile;
type ConfirmItemsRequest = z.infer<typeof confirmItemsSchema>;
const { params, body } = req as unknown as ConfirmItemsRequest;
try {
req.log.info(
{
userId: userProfile.user.user_id,
receiptId: params.receiptId,
itemCount: body.items.length,
},
'Confirming receipt items for inventory',
);
const addedItems = await expiryService.addItemsFromReceipt(
userProfile.user.user_id,
params.receiptId,
body.items,
req.log,
);
sendSuccess(res, { added_items: addedItems, count: addedItems.length });
} catch (error) {
req.log.error(
{ error, userId: userProfile.user.user_id, receiptId: params.receiptId },
'Error confirming receipt items',
);
next(error);
}
},
);
// ============================================================================
// PROCESSING LOGS ENDPOINT
// ============================================================================
/**
* @openapi
* /receipts/{receiptId}/logs:
* get:
* tags: [Receipts]
* summary: Get processing logs
* description: Get the processing log history for a receipt.
* security:
* - bearerAuth: []
* parameters:
* - in: path
* name: receiptId
* required: true
* schema:
* type: integer
* responses:
* 200:
* description: Processing logs retrieved
* 401:
* description: Unauthorized
* 404:
* description: Receipt not found
*/
router.get(
'/:receiptId/logs',
validateRequest(receiptIdParamSchema),
async (req: Request, res: Response, next: NextFunction) => {
const userProfile = req.user as UserProfile;
type GetLogsRequest = z.infer<typeof receiptIdParamSchema>;
const { params } = req as unknown as GetLogsRequest;
try {
// Verify receipt belongs to user
await receiptService.getReceiptById(params.receiptId, userProfile.user.user_id, req.log);
const logs = await receiptService.getProcessingLogs(params.receiptId, req.log);
sendSuccess(res, { logs, total: logs.length });
} catch (error) {
req.log.error(
{ error, userId: userProfile.user.user_id, receiptId: params.receiptId },
'Error fetching processing logs',
);
next(error);
}
},
);
/* Catches errors from multer (e.g., file size, file filter) */
router.use(handleMulterError);
export default router;

View File

@@ -29,7 +29,7 @@ vi.mock('../services/aiService.server', () => ({
}));
// Mock Passport
vi.mock('./passport.routes', () => ({
vi.mock('../config/passport', () => ({
default: {
authenticate: vi.fn(() => (req: Request, res: Response, next: NextFunction) => {
if (!req.user) {

View File

@@ -3,7 +3,7 @@ import { Router } from 'express';
import { z } from 'zod';
import * as db from '../services/db/index.db';
import { aiService } from '../services/aiService.server';
import passport from './passport.routes';
import passport from '../config/passport';
import { validateRequest } from '../middleware/validation.middleware';
import { requiredString, numericIdParam, optionalNumeric } from '../utils/zodUtils';
import { publicReadLimiter, suggestionLimiter, userUpdateLimiter } from '../config/rateLimiters';

View File

@@ -0,0 +1,529 @@
// src/routes/upc.routes.test.ts
import { describe, it, expect, vi, beforeEach } from 'vitest';
import supertest from 'supertest';
import type { Request, Response, NextFunction } from 'express';
import { createMockUserProfile } from '../tests/utils/mockFactories';
import { createTestApp } from '../tests/utils/createTestApp';
import { NotFoundError } from '../services/db/errors.db';
import type { UpcScanSource } from '../types/upc';
// Mock the upcService module
vi.mock('../services/upcService.server', () => ({
scanUpc: vi.fn(),
lookupUpc: vi.fn(),
getScanHistory: vi.fn(),
getScanById: vi.fn(),
getScanStats: vi.fn(),
linkUpcToProduct: vi.fn(),
}));
// Mock the logger to keep test output clean
vi.mock('../services/logger.server', async () => ({
logger: (await import('../tests/utils/mockLogger')).mockLogger,
}));
// Import the router and mocked service AFTER all mocks are defined.
import upcRouter from './upc.routes';
import * as upcService from '../services/upcService.server';
const mockUser = createMockUserProfile({
user: { user_id: 'user-123', email: 'test@test.com' },
});
const _mockAdminUser = createMockUserProfile({
user: { user_id: 'admin-123', email: 'admin@test.com' },
role: 'admin',
});
// Standardized mock for passport
// Note: createTestApp sets req.user before the router runs, so we preserve it here
vi.mock('../config/passport', () => ({
default: {
authenticate: vi.fn(() => (req: Request, res: Response, next: NextFunction) => {
// Preserve the user set by createTestApp if already present
if (!req.user) {
req.user = mockUser;
}
next();
}),
initialize: () => (req: Request, res: Response, next: NextFunction) => next(),
},
isAdmin: (req: Request, res: Response, next: NextFunction) => {
const user = req.user as typeof _mockAdminUser;
if (user?.role === 'admin') {
next();
} else {
res.status(403).json({ success: false, error: { message: 'Forbidden' } });
}
},
}));
// Define a reusable matcher for the logger object.
const expectLogger = expect.objectContaining({
info: expect.any(Function),
error: expect.any(Function),
});
describe('UPC Routes (/api/upc)', () => {
const mockUserProfile = createMockUserProfile({
user: { user_id: 'user-123', email: 'test@test.com' },
});
const mockAdminProfile = createMockUserProfile({
user: { user_id: 'admin-123', email: 'admin@test.com' },
role: 'admin',
});
beforeEach(() => {
vi.clearAllMocks();
// Provide default mock implementations
vi.mocked(upcService.getScanHistory).mockResolvedValue({ scans: [], total: 0 });
vi.mocked(upcService.getScanStats).mockResolvedValue({
total_scans: 0,
successful_lookups: 0,
unique_products: 0,
scans_today: 0,
scans_this_week: 0,
});
});
const app = createTestApp({
router: upcRouter,
basePath: '/api/upc',
authenticatedUser: mockUserProfile,
});
const adminApp = createTestApp({
router: upcRouter,
basePath: '/api/upc',
authenticatedUser: mockAdminProfile,
});
describe('POST /scan', () => {
it('should scan a manually entered UPC code successfully', async () => {
const mockScanResult = {
scan_id: 1,
upc_code: '012345678905',
product: {
product_id: 1,
name: 'Test Product',
brand: 'Test Brand',
category: 'Snacks',
description: null,
size: '500g',
upc_code: '012345678905',
image_url: null,
master_item_id: null,
},
external_lookup: null,
confidence: null,
lookup_successful: true,
is_new_product: false,
scanned_at: new Date().toISOString(),
};
vi.mocked(upcService.scanUpc).mockResolvedValue(mockScanResult);
const response = await supertest(app).post('/api/upc/scan').send({
upc_code: '012345678905',
scan_source: 'manual_entry',
});
expect(response.status).toBe(200);
expect(response.body.data.scan_id).toBe(1);
expect(response.body.data.upc_code).toBe('012345678905');
expect(response.body.data.lookup_successful).toBe(true);
expect(upcService.scanUpc).toHaveBeenCalledWith(
mockUserProfile.user.user_id,
{ upc_code: '012345678905', scan_source: 'manual_entry' },
expectLogger,
);
});
it('should scan from base64 image', async () => {
const mockScanResult = {
scan_id: 2,
upc_code: '987654321098',
product: null,
external_lookup: {
name: 'External Product',
brand: 'External Brand',
category: null,
description: null,
image_url: null,
source: 'openfoodfacts' as const,
},
confidence: 0.95,
lookup_successful: true,
is_new_product: true,
scanned_at: new Date().toISOString(),
};
vi.mocked(upcService.scanUpc).mockResolvedValue(mockScanResult);
const response = await supertest(app).post('/api/upc/scan').send({
image_base64: 'SGVsbG8gV29ybGQ=',
scan_source: 'image_upload',
});
expect(response.status).toBe(200);
expect(response.body.data.confidence).toBe(0.95);
expect(response.body.data.is_new_product).toBe(true);
});
it('should return 400 when neither upc_code nor image_base64 is provided', async () => {
const response = await supertest(app).post('/api/upc/scan').send({
scan_source: 'manual_entry',
});
expect(response.status).toBe(400);
expect(response.body.error.details).toBeDefined();
});
it('should return 400 for invalid scan_source', async () => {
const response = await supertest(app).post('/api/upc/scan').send({
upc_code: '012345678905',
scan_source: 'invalid_source',
});
expect(response.status).toBe(400);
});
it('should return 500 if the scan service fails', async () => {
vi.mocked(upcService.scanUpc).mockRejectedValue(new Error('Scan service error'));
const response = await supertest(app).post('/api/upc/scan').send({
upc_code: '012345678905',
scan_source: 'manual_entry',
});
expect(response.status).toBe(500);
expect(response.body.error.message).toBe('Scan service error');
});
});
describe('GET /lookup', () => {
it('should look up a UPC code successfully', async () => {
const mockLookupResult = {
upc_code: '012345678905',
product: {
product_id: 1,
name: 'Test Product',
brand: 'Test Brand',
category: 'Snacks',
description: null,
size: '500g',
upc_code: '012345678905',
image_url: null,
master_item_id: null,
},
external_lookup: null,
found: true,
from_cache: false,
};
vi.mocked(upcService.lookupUpc).mockResolvedValue(mockLookupResult);
const response = await supertest(app).get('/api/upc/lookup?upc_code=012345678905');
expect(response.status).toBe(200);
expect(response.body.data.upc_code).toBe('012345678905');
expect(response.body.data.found).toBe(true);
});
it('should support include_external and force_refresh parameters', async () => {
const mockLookupResult = {
upc_code: '012345678905',
product: null,
external_lookup: {
name: 'External Product',
brand: 'External Brand',
category: null,
description: null,
image_url: null,
source: 'openfoodfacts' as const,
},
found: true,
from_cache: false,
};
vi.mocked(upcService.lookupUpc).mockResolvedValue(mockLookupResult);
const response = await supertest(app).get(
'/api/upc/lookup?upc_code=012345678905&include_external=true&force_refresh=true',
);
expect(response.status).toBe(200);
expect(upcService.lookupUpc).toHaveBeenCalledWith(
expect.objectContaining({
upc_code: '012345678905',
force_refresh: true,
}),
expectLogger,
);
});
it('should return 400 for invalid UPC code format', async () => {
const response = await supertest(app).get('/api/upc/lookup?upc_code=123');
expect(response.status).toBe(400);
expect(response.body.error.details[0].message).toMatch(/8-14 digits/);
});
it('should return 400 when upc_code is missing', async () => {
const response = await supertest(app).get('/api/upc/lookup');
expect(response.status).toBe(400);
});
it('should return 500 if the lookup service fails', async () => {
vi.mocked(upcService.lookupUpc).mockRejectedValue(new Error('Lookup error'));
const response = await supertest(app).get('/api/upc/lookup?upc_code=012345678905');
expect(response.status).toBe(500);
});
});
describe('GET /history', () => {
it('should return scan history with pagination', async () => {
const mockHistory = {
scans: [
{
scan_id: 1,
user_id: 'user-123',
upc_code: '012345678905',
product_id: 1,
scan_source: 'manual_entry' as UpcScanSource,
scan_confidence: null,
raw_image_path: null,
lookup_successful: true,
created_at: new Date().toISOString(),
updated_at: new Date().toISOString(),
},
],
total: 1,
};
vi.mocked(upcService.getScanHistory).mockResolvedValue(mockHistory);
const response = await supertest(app).get('/api/upc/history?limit=10&offset=0');
expect(response.status).toBe(200);
expect(response.body.data.scans).toHaveLength(1);
expect(response.body.data.total).toBe(1);
expect(upcService.getScanHistory).toHaveBeenCalledWith(
expect.objectContaining({
user_id: mockUserProfile.user.user_id,
limit: 10,
offset: 0,
}),
expectLogger,
);
});
it('should support filtering by lookup_successful', async () => {
vi.mocked(upcService.getScanHistory).mockResolvedValue({ scans: [], total: 0 });
const response = await supertest(app).get('/api/upc/history?lookup_successful=true');
expect(response.status).toBe(200);
expect(upcService.getScanHistory).toHaveBeenCalledWith(
expect.objectContaining({
lookup_successful: true,
}),
expectLogger,
);
});
it('should support filtering by scan_source', async () => {
vi.mocked(upcService.getScanHistory).mockResolvedValue({ scans: [], total: 0 });
const response = await supertest(app).get('/api/upc/history?scan_source=image_upload');
expect(response.status).toBe(200);
expect(upcService.getScanHistory).toHaveBeenCalledWith(
expect.objectContaining({
scan_source: 'image_upload',
}),
expectLogger,
);
});
it('should support filtering by date range', async () => {
vi.mocked(upcService.getScanHistory).mockResolvedValue({ scans: [], total: 0 });
const response = await supertest(app).get(
'/api/upc/history?from_date=2024-01-01&to_date=2024-01-31',
);
expect(response.status).toBe(200);
expect(upcService.getScanHistory).toHaveBeenCalledWith(
expect.objectContaining({
from_date: '2024-01-01',
to_date: '2024-01-31',
}),
expectLogger,
);
});
it('should return 400 for invalid date format', async () => {
const response = await supertest(app).get('/api/upc/history?from_date=01-01-2024');
expect(response.status).toBe(400);
});
it('should return 500 if the history service fails', async () => {
vi.mocked(upcService.getScanHistory).mockRejectedValue(new Error('History error'));
const response = await supertest(app).get('/api/upc/history');
expect(response.status).toBe(500);
});
});
describe('GET /history/:scanId', () => {
it('should return a specific scan by ID', async () => {
const mockScan = {
scan_id: 1,
user_id: 'user-123',
upc_code: '012345678905',
product_id: 1,
scan_source: 'manual_entry' as UpcScanSource,
scan_confidence: null,
raw_image_path: null,
lookup_successful: true,
created_at: new Date().toISOString(),
updated_at: new Date().toISOString(),
};
vi.mocked(upcService.getScanById).mockResolvedValue(mockScan);
const response = await supertest(app).get('/api/upc/history/1');
expect(response.status).toBe(200);
expect(response.body.data.scan_id).toBe(1);
expect(upcService.getScanById).toHaveBeenCalledWith(
1,
mockUserProfile.user.user_id,
expectLogger,
);
});
it('should return 404 when scan not found', async () => {
vi.mocked(upcService.getScanById).mockRejectedValue(new NotFoundError('Scan not found'));
const response = await supertest(app).get('/api/upc/history/999');
expect(response.status).toBe(404);
expect(response.body.error.message).toBe('Scan not found');
});
it('should return 400 for invalid scan ID', async () => {
const response = await supertest(app).get('/api/upc/history/abc');
expect(response.status).toBe(400);
expect(response.body.error.details[0].message).toMatch(/Invalid ID|number/i);
});
});
describe('GET /stats', () => {
it('should return scan statistics', async () => {
const mockStats = {
total_scans: 100,
successful_lookups: 80,
unique_products: 50,
scans_today: 5,
scans_this_week: 25,
};
vi.mocked(upcService.getScanStats).mockResolvedValue(mockStats);
const response = await supertest(app).get('/api/upc/stats');
expect(response.status).toBe(200);
expect(response.body.data.total_scans).toBe(100);
expect(response.body.data.successful_lookups).toBe(80);
expect(upcService.getScanStats).toHaveBeenCalledWith(
mockUserProfile.user.user_id,
expectLogger,
);
});
it('should return 500 if the stats service fails', async () => {
vi.mocked(upcService.getScanStats).mockRejectedValue(new Error('Stats error'));
const response = await supertest(app).get('/api/upc/stats');
expect(response.status).toBe(500);
});
});
describe('POST /link', () => {
it('should link UPC to product (admin only)', async () => {
vi.mocked(upcService.linkUpcToProduct).mockResolvedValue(undefined);
const response = await supertest(adminApp).post('/api/upc/link').send({
upc_code: '012345678905',
product_id: 1,
});
expect(response.status).toBe(204);
expect(upcService.linkUpcToProduct).toHaveBeenCalledWith(1, '012345678905', expectLogger);
});
it('should return 403 for non-admin users', async () => {
const response = await supertest(app).post('/api/upc/link').send({
upc_code: '012345678905',
product_id: 1,
});
expect(response.status).toBe(403);
expect(upcService.linkUpcToProduct).not.toHaveBeenCalled();
});
it('should return 400 for invalid UPC code format', async () => {
const response = await supertest(adminApp).post('/api/upc/link').send({
upc_code: '123',
product_id: 1,
});
expect(response.status).toBe(400);
expect(response.body.error.details[0].message).toMatch(/8-14 digits/);
});
it('should return 400 for invalid product_id', async () => {
const response = await supertest(adminApp).post('/api/upc/link').send({
upc_code: '012345678905',
product_id: -1,
});
expect(response.status).toBe(400);
});
it('should return 404 when product not found', async () => {
vi.mocked(upcService.linkUpcToProduct).mockRejectedValue(
new NotFoundError('Product not found'),
);
const response = await supertest(adminApp).post('/api/upc/link').send({
upc_code: '012345678905',
product_id: 999,
});
expect(response.status).toBe(404);
expect(response.body.error.message).toBe('Product not found');
});
it('should return 500 if the link service fails', async () => {
vi.mocked(upcService.linkUpcToProduct).mockRejectedValue(new Error('Link error'));
const response = await supertest(adminApp).post('/api/upc/link').send({
upc_code: '012345678905',
product_id: 1,
});
expect(response.status).toBe(500);
});
});
});

493
src/routes/upc.routes.ts Normal file
View File

@@ -0,0 +1,493 @@
// src/routes/upc.routes.ts
/**
* @file UPC Scanning API Routes
* Provides endpoints for UPC barcode scanning, lookup, and scan history.
*/
import express, { Request, Response, NextFunction } from 'express';
import { z } from 'zod';
import passport, { isAdmin } from '../config/passport';
import type { UserProfile } from '../types';
import { validateRequest } from '../middleware/validation.middleware';
import { numericIdParam, optionalNumeric } from '../utils/zodUtils';
import { sendSuccess, sendNoContent } from '../utils/apiResponse';
import * as upcService from '../services/upcService.server';
const router = express.Router();
// --- Zod Schemas for UPC Routes ---
/**
* UPC code validation (8-14 digits)
*/
const upcCodeSchema = z.string().regex(/^[0-9]{8,14}$/, 'UPC code must be 8-14 digits.');
/**
* Scan source validation
*/
const scanSourceSchema = z.enum(['image_upload', 'manual_entry', 'phone_app', 'camera_scan']);
/**
* Schema for UPC scan request
*/
const scanUpcSchema = z.object({
body: z
.object({
upc_code: z.string().optional(),
image_base64: z.string().optional(),
scan_source: scanSourceSchema,
})
.refine((data) => data.upc_code || data.image_base64, {
message: 'Either upc_code or image_base64 must be provided.',
}),
});
/**
* Schema for UPC lookup request (without recording scan)
*/
const lookupUpcSchema = z.object({
query: z.object({
upc_code: upcCodeSchema,
include_external: z
.string()
.optional()
.transform((val) => val === 'true'),
force_refresh: z
.string()
.optional()
.transform((val) => val === 'true'),
}),
});
/**
* Schema for linking UPC to product (admin)
*/
const linkUpcSchema = z.object({
body: z.object({
upc_code: upcCodeSchema,
product_id: z.number().int().positive('Product ID must be a positive integer.'),
}),
});
/**
* Schema for scan ID parameter
*/
const scanIdParamSchema = numericIdParam(
'scanId',
"Invalid ID for parameter 'scanId'. Must be a number.",
);
/**
* Schema for scan history query
*/
const scanHistoryQuerySchema = z.object({
query: z.object({
limit: optionalNumeric({ default: 50, min: 1, max: 100, integer: true }),
offset: optionalNumeric({ default: 0, min: 0, integer: true }),
lookup_successful: z
.string()
.optional()
.transform((val) => (val === 'true' ? true : val === 'false' ? false : undefined)),
scan_source: scanSourceSchema.optional(),
from_date: z.string().date().optional(),
to_date: z.string().date().optional(),
}),
});
// Middleware to ensure user is authenticated for all UPC routes
router.use(passport.authenticate('jwt', { session: false }));
/**
* @openapi
* /upc/scan:
* post:
* tags: [UPC Scanning]
* summary: Scan a UPC barcode
* description: >
* Scans a UPC barcode either from a manually entered code or from an image.
* Records the scan in history and returns product information if found.
* security:
* - bearerAuth: []
* requestBody:
* required: true
* content:
* application/json:
* schema:
* type: object
* required:
* - scan_source
* properties:
* upc_code:
* type: string
* pattern: '^[0-9]{8,14}$'
* description: UPC code (8-14 digits). Required if image_base64 is not provided.
* image_base64:
* type: string
* description: Base64-encoded image containing a barcode. Required if upc_code is not provided.
* scan_source:
* type: string
* enum: [image_upload, manual_entry, phone_app, camera_scan]
* description: How the scan was initiated.
* responses:
* 200:
* description: Scan completed successfully
* content:
* application/json:
* schema:
* $ref: '#/components/schemas/SuccessResponse'
* 400:
* description: Validation error - invalid UPC code or missing data
* 401:
* description: Unauthorized - invalid or missing token
*/
router.post(
'/scan',
validateRequest(scanUpcSchema),
async (req: Request, res: Response, next: NextFunction) => {
const userProfile = req.user as UserProfile;
type ScanUpcRequest = z.infer<typeof scanUpcSchema>;
const { body } = req as unknown as ScanUpcRequest;
try {
req.log.info(
{ userId: userProfile.user.user_id, scanSource: body.scan_source },
'UPC scan request received',
);
const result = await upcService.scanUpc(userProfile.user.user_id, body, req.log);
sendSuccess(res, result);
} catch (error) {
req.log.error(
{ error, userId: userProfile.user.user_id, scanSource: body.scan_source },
'Error processing UPC scan',
);
next(error);
}
},
);
/**
* @openapi
* /upc/lookup:
* get:
* tags: [UPC Scanning]
* summary: Look up a UPC code
* description: >
* Looks up product information for a UPC code without recording in scan history.
* Useful for verification or quick lookups.
* security:
* - bearerAuth: []
* parameters:
* - in: query
* name: upc_code
* required: true
* schema:
* type: string
* pattern: '^[0-9]{8,14}$'
* description: UPC code to look up (8-14 digits)
* - in: query
* name: include_external
* schema:
* type: boolean
* default: true
* description: Whether to check external APIs if not found locally
* - in: query
* name: force_refresh
* schema:
* type: boolean
* default: false
* description: Skip cache and perform fresh external lookup
* responses:
* 200:
* description: Lookup completed
* content:
* application/json:
* schema:
* $ref: '#/components/schemas/SuccessResponse'
* 400:
* description: Invalid UPC code format
* 401:
* description: Unauthorized - invalid or missing token
*/
router.get(
'/lookup',
validateRequest(lookupUpcSchema),
async (req: Request, res: Response, next: NextFunction) => {
type LookupUpcRequest = z.infer<typeof lookupUpcSchema>;
const { query } = req as unknown as LookupUpcRequest;
try {
req.log.debug({ upcCode: query.upc_code }, 'UPC lookup request received');
const result = await upcService.lookupUpc(
{
upc_code: query.upc_code,
force_refresh: query.force_refresh,
},
req.log,
);
sendSuccess(res, result);
} catch (error) {
req.log.error({ error, upcCode: query.upc_code }, 'Error looking up UPC');
next(error);
}
},
);
/**
* @openapi
* /upc/history:
* get:
* tags: [UPC Scanning]
* summary: Get scan history
* description: Retrieve the authenticated user's UPC scan history with optional filtering.
* security:
* - bearerAuth: []
* parameters:
* - in: query
* name: limit
* schema:
* type: integer
* minimum: 1
* maximum: 100
* default: 50
* description: Maximum number of results
* - in: query
* name: offset
* schema:
* type: integer
* minimum: 0
* default: 0
* description: Number of results to skip
* - in: query
* name: lookup_successful
* schema:
* type: boolean
* description: Filter by lookup success status
* - in: query
* name: scan_source
* schema:
* type: string
* enum: [image_upload, manual_entry, phone_app, camera_scan]
* description: Filter by scan source
* - in: query
* name: from_date
* schema:
* type: string
* format: date
* description: Filter scans from this date (YYYY-MM-DD)
* - in: query
* name: to_date
* schema:
* type: string
* format: date
* description: Filter scans until this date (YYYY-MM-DD)
* responses:
* 200:
* description: Scan history retrieved
* content:
* application/json:
* schema:
* $ref: '#/components/schemas/SuccessResponse'
* 401:
* description: Unauthorized - invalid or missing token
*/
router.get(
'/history',
validateRequest(scanHistoryQuerySchema),
async (req: Request, res: Response, next: NextFunction) => {
const userProfile = req.user as UserProfile;
type ScanHistoryRequest = z.infer<typeof scanHistoryQuerySchema>;
const { query } = req as unknown as ScanHistoryRequest;
try {
const result = await upcService.getScanHistory(
{
user_id: userProfile.user.user_id,
limit: query.limit,
offset: query.offset,
lookup_successful: query.lookup_successful,
scan_source: query.scan_source,
from_date: query.from_date,
to_date: query.to_date,
},
req.log,
);
sendSuccess(res, result);
} catch (error) {
req.log.error({ error, userId: userProfile.user.user_id }, 'Error fetching scan history');
next(error);
}
},
);
/**
* @openapi
* /upc/history/{scanId}:
* get:
* tags: [UPC Scanning]
* summary: Get scan by ID
* description: Retrieve a specific scan record by its ID.
* security:
* - bearerAuth: []
* parameters:
* - in: path
* name: scanId
* required: true
* schema:
* type: integer
* description: Scan ID
* responses:
* 200:
* description: Scan record retrieved
* content:
* application/json:
* schema:
* $ref: '#/components/schemas/SuccessResponse'
* 401:
* description: Unauthorized - invalid or missing token
* 404:
* description: Scan record not found
*/
router.get(
'/history/:scanId',
validateRequest(scanIdParamSchema),
async (req: Request, res: Response, next: NextFunction) => {
const userProfile = req.user as UserProfile;
type GetScanRequest = z.infer<typeof scanIdParamSchema>;
const { params } = req as unknown as GetScanRequest;
try {
const scan = await upcService.getScanById(params.scanId, userProfile.user.user_id, req.log);
sendSuccess(res, scan);
} catch (error) {
req.log.error(
{ error, userId: userProfile.user.user_id, scanId: params.scanId },
'Error fetching scan by ID',
);
next(error);
}
},
);
/**
* @openapi
* /upc/stats:
* get:
* tags: [UPC Scanning]
* summary: Get scan statistics
* description: Get scanning statistics for the authenticated user.
* security:
* - bearerAuth: []
* responses:
* 200:
* description: Scan statistics retrieved
* content:
* application/json:
* schema:
* type: object
* properties:
* success:
* type: boolean
* data:
* type: object
* properties:
* total_scans:
* type: integer
* successful_lookups:
* type: integer
* unique_products:
* type: integer
* scans_today:
* type: integer
* scans_this_week:
* type: integer
* 401:
* description: Unauthorized - invalid or missing token
*/
router.get('/stats', async (req: Request, res: Response, next: NextFunction) => {
const userProfile = req.user as UserProfile;
try {
const stats = await upcService.getScanStats(userProfile.user.user_id, req.log);
sendSuccess(res, stats);
} catch (error) {
req.log.error({ error, userId: userProfile.user.user_id }, 'Error fetching scan statistics');
next(error);
}
});
/**
* @openapi
* /upc/link:
* post:
* tags: [UPC Scanning]
* summary: Link UPC to product (Admin)
* description: >
* Links a UPC code to an existing product in the database.
* This is an admin-only operation.
* security:
* - bearerAuth: []
* requestBody:
* required: true
* content:
* application/json:
* schema:
* type: object
* required:
* - upc_code
* - product_id
* properties:
* upc_code:
* type: string
* pattern: '^[0-9]{8,14}$'
* description: UPC code to link (8-14 digits)
* product_id:
* type: integer
* description: Product ID to link the UPC to
* responses:
* 204:
* description: UPC linked successfully
* 400:
* description: Invalid UPC code or product ID
* 401:
* description: Unauthorized - invalid or missing token
* 403:
* description: Forbidden - user is not an admin
* 404:
* description: Product not found
* 409:
* description: UPC code already linked to another product
*/
router.post(
'/link',
isAdmin, // Admin role check - only admins can link UPC codes to products
validateRequest(linkUpcSchema),
async (req: Request, res: Response, next: NextFunction) => {
const userProfile = req.user as UserProfile;
type LinkUpcRequest = z.infer<typeof linkUpcSchema>;
const { body } = req as unknown as LinkUpcRequest;
try {
req.log.info(
{ userId: userProfile.user.user_id, productId: body.product_id, upcCode: body.upc_code },
'UPC link request received',
);
await upcService.linkUpcToProduct(body.product_id, body.upc_code, req.log);
sendNoContent(res);
} catch (error) {
req.log.error(
{
error,
userId: userProfile.user.user_id,
productId: body.product_id,
upcCode: body.upc_code,
},
'Error linking UPC to product',
);
next(error);
}
},
);
export default router;

View File

@@ -42,7 +42,7 @@ import userRouter from './user.routes';
import * as db from '../services/db/index.db';
// Mock Passport middleware
vi.mock('./passport.routes', () => ({
vi.mock('../config/passport', () => ({
default: {
authenticate: vi.fn(
() => (req: express.Request, res: express.Response, next: express.NextFunction) => {

View File

@@ -1,6 +1,6 @@
// src/routes/user.routes.ts
import express, { Request, Response, NextFunction } from 'express';
import passport from './passport.routes';
import passport from '../config/passport';
// All route handlers now use req.log (request-scoped logger) as per ADR-004
import { z } from 'zod';
// Removed: import { logger } from '../services/logger.server';

View File

@@ -19,9 +19,13 @@ import { ValidationError } from './db/errors.db';
import { AiFlyerDataSchema } from '../types/ai';
// Mock the logger to prevent the real pino instance from being created, which causes issues with 'pino-pretty' in tests.
vi.mock('./logger.server', () => ({
logger: createMockLogger(),
}));
vi.mock('./logger.server', async () => {
const { createMockLogger } = await import('../tests/utils/mockLogger');
return {
logger: createMockLogger(),
createScopedLogger: vi.fn(() => createMockLogger()),
};
});
// Import the mocked logger instance to pass to the service constructor.
import { logger as mockLoggerInstance } from './logger.server';
@@ -1096,6 +1100,11 @@ describe('AI Service (Server)', () => {
submitterIp: '127.0.0.1',
userProfileAddress: '123 St, City, Country', // Partial address match based on filter(Boolean)
baseUrl: 'https://example.com',
meta: {
requestId: undefined,
userId: 'user123',
origin: 'api',
},
});
expect(result.id).toBe('job123');
});
@@ -1118,6 +1127,11 @@ describe('AI Service (Server)', () => {
userId: undefined,
userProfileAddress: undefined,
baseUrl: 'https://example.com',
meta: {
requestId: undefined,
userId: undefined,
origin: 'api',
},
}),
);
});

View File

@@ -819,7 +819,8 @@ export class AIService {
logger.info({ baseUrl }, '[aiService] Enqueuing job with valid baseUrl.');
// --- END DEBUGGING ---
// 3. Add job to the queue
// 3. Add job to the queue with context propagation (ADR-051)
const bindings = logger.bindings?.() || {};
const job = await flyerQueue.add('process-flyer', {
filePath: file.path,
originalFileName: file.originalname,
@@ -828,6 +829,11 @@ export class AIService {
submitterIp: submitterIp,
userProfileAddress: userProfileAddress,
baseUrl: baseUrl,
meta: {
requestId: bindings.request_id as string | undefined,
userId: userProfile?.user.user_id,
origin: 'api',
},
});
logger.info(`Enqueued flyer for processing. File: ${file.originalname}, Job ID: ${job.id}`);
@@ -1005,5 +1011,5 @@ export class AIService {
}
// Export a singleton instance of the service for use throughout the application.
import { logger } from './logger.server';
export const aiService = new AIService(logger);
import { createScopedLogger } from './logger.server';
export const aiService = new AIService(createScopedLogger('ai-service'));

View File

@@ -181,6 +181,7 @@ describe('API Client', () => {
vi.mocked(global.fetch).mockResolvedValueOnce({
ok: false,
status: 500,
headers: new Headers(),
clone: () => ({ text: () => Promise.resolve('Internal Server Error') }),
} as Response);
@@ -197,6 +198,23 @@ describe('API Client', () => {
);
});
it('should handle x-request-id header on failure (Sentry optional)', async () => {
const requestId = 'req-123';
vi.mocked(global.fetch).mockResolvedValueOnce({
ok: false,
status: 500,
headers: new Headers({ 'x-request-id': requestId }),
clone: () => ({ text: () => Promise.resolve('Error') }),
} as Response);
// This should not throw even if Sentry is not installed
await apiClient.apiFetch('/error');
// The request should complete without error
expect(true).toBe(true);
});
it('should handle 401 on initial call, refresh token, and then poll until completed', async () => {
localStorage.setItem('authToken', 'expired-token');
// Mock the global fetch to return a sequence of responses:
@@ -301,7 +319,10 @@ describe('API Client', () => {
});
it('addWatchedItem should send a POST request with the correct body', async () => {
const watchedItemData = createMockWatchedItemPayload({ itemName: 'Apples', category: 'Produce' });
const watchedItemData = createMockWatchedItemPayload({
itemName: 'Apples',
category: 'Produce',
});
await apiClient.addWatchedItem(watchedItemData.itemName, watchedItemData.category);
expect(capturedUrl?.pathname).toBe('/api/users/watched-items');
@@ -532,7 +553,10 @@ describe('API Client', () => {
it('addRecipeComment should send a POST request with content and optional parentId', async () => {
const recipeId = 456;
const commentData = createMockRecipeCommentPayload({ content: 'This is a reply', parentCommentId: 789 });
const commentData = createMockRecipeCommentPayload({
content: 'This is a reply',
parentCommentId: 789,
});
await apiClient.addRecipeComment(recipeId, commentData.content, commentData.parentCommentId);
expect(capturedUrl?.pathname).toBe(`/api/recipes/${recipeId}/comments`);
expect(capturedBody).toEqual(commentData);
@@ -646,7 +670,10 @@ describe('API Client', () => {
});
it('updateUserAddress should send a PUT request with address data', async () => {
const addressData = createMockAddressPayload({ address_line_1: '123 Main St', city: 'Anytown' });
const addressData = createMockAddressPayload({
address_line_1: '123 Main St',
city: 'Anytown',
});
await apiClient.updateUserAddress(addressData);
expect(capturedUrl?.pathname).toBe('/api/users/profile/address');
expect(capturedBody).toEqual(addressData);
@@ -744,6 +771,16 @@ describe('API Client', () => {
expect(capturedUrl?.pathname).toBe('/api/health/redis');
});
it('getQueueHealth should call the correct health check endpoint', async () => {
server.use(
http.get('http://localhost/api/health/queues', () => {
return HttpResponse.json({});
}),
);
await apiClient.getQueueHealth();
expect(capturedUrl?.pathname).toBe('/api/health/queues');
});
it('checkPm2Status should call the correct system endpoint', async () => {
server.use(
http.get('http://localhost/api/system/pm2-status', () => {
@@ -939,7 +976,11 @@ describe('API Client', () => {
});
it('logSearchQuery should send a POST request with query data', async () => {
const queryData = createMockSearchQueryPayload({ query_text: 'apples', result_count: 10, was_successful: true });
const queryData = createMockSearchQueryPayload({
query_text: 'apples',
result_count: 10,
was_successful: true,
});
await apiClient.logSearchQuery(queryData as any);
expect(capturedUrl?.pathname).toBe('/api/search/log');
expect(capturedBody).toEqual(queryData);

View File

@@ -3,6 +3,16 @@ import { Profile, ShoppingListItem, SearchQuery, Budget, Address } from '../type
import { logger } from './logger.client';
import { eventBus } from './eventBus';
// Sentry integration is optional - only used if @sentry/browser is installed
let Sentry: { setTag?: (key: string, value: string) => void } | null = null;
try {
// Dynamic import would be cleaner but this keeps the code synchronous
// eslint-disable-next-line @typescript-eslint/no-require-imports
Sentry = require('@sentry/browser');
} catch {
// Sentry not installed, skip error tracking integration
}
// This constant should point to your backend API.
// It's often a good practice to store this in an environment variable.
// Using a relative path '/api' is the most robust method for production.
@@ -148,9 +158,14 @@ export const apiFetch = async (
// --- DEBUG LOGGING for failed requests ---
if (!response.ok) {
const requestId = response.headers.get('x-request-id');
if (requestId && Sentry?.setTag) {
Sentry.setTag('api_request_id', requestId);
}
const responseText = await response.clone().text();
logger.error(
{ url: fullUrl, status: response.status, body: responseText },
{ url: fullUrl, status: response.status, body: responseText, requestId },
'apiFetch: Request failed',
);
}
@@ -272,6 +287,12 @@ export const checkDbPoolHealth = (): Promise<Response> => publicGet('/health/db-
*/
export const checkRedisHealth = (): Promise<Response> => publicGet('/health/redis');
/**
* Fetches the health status of the background job queues.
* @returns A promise that resolves to the queue status object.
*/
export const getQueueHealth = (): Promise<Response> => publicGet('/health/queues');
/**
* Checks the status of the application process managed by PM2.
* This is intended for development and diagnostic purposes.

View File

@@ -0,0 +1,408 @@
// src/services/barcodeService.server.test.ts
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
import type { Logger } from 'pino';
import type { Job } from 'bullmq';
import type { BarcodeDetectionJobData } from '../types/job-data';
import { createMockLogger } from '../tests/utils/mockLogger';
// Unmock the barcodeService module so we can test the real implementation
// The global test setup mocks this to prevent zxing-wasm issues, but we need the real module here
vi.unmock('./barcodeService.server');
// Mock dependencies
vi.mock('zxing-wasm/reader', () => ({
readBarcodesFromImageData: vi.fn(),
}));
vi.mock('sharp', () => {
const mockSharp = vi.fn(() => ({
metadata: vi.fn().mockResolvedValue({ width: 100, height: 100 }),
ensureAlpha: vi.fn().mockReturnThis(),
raw: vi.fn().mockReturnThis(),
toBuffer: vi.fn().mockResolvedValue({
data: new Uint8Array(100 * 100 * 4),
info: { width: 100, height: 100 },
}),
grayscale: vi.fn().mockReturnThis(),
normalize: vi.fn().mockReturnThis(),
sharpen: vi.fn().mockReturnThis(),
toFile: vi.fn().mockResolvedValue(undefined),
}));
return { default: mockSharp };
});
vi.mock('node:fs/promises', () => ({
default: {
readFile: vi.fn().mockResolvedValue(Buffer.from('mock image data')),
},
}));
vi.mock('./db/index.db', () => ({
upcRepo: {
updateScanWithDetectedCode: vi.fn().mockResolvedValue(undefined),
},
}));
// Import after mocks are set up
import {
detectBarcode,
isValidUpcFormat,
calculateUpcCheckDigit,
validateUpcCheckDigit,
processBarcodeDetectionJob,
detectMultipleBarcodes,
enhanceImageForDetection,
} from './barcodeService.server';
describe('barcodeService.server', () => {
let mockLogger: Logger;
beforeEach(() => {
vi.clearAllMocks();
mockLogger = createMockLogger();
});
afterEach(() => {
vi.resetAllMocks();
});
describe('detectBarcode', () => {
it('should detect a valid UPC-A barcode from image', async () => {
const { readBarcodesFromImageData } = await import('zxing-wasm/reader');
vi.mocked(readBarcodesFromImageData).mockResolvedValueOnce([
{ text: '012345678905', format: 'UPC-A' },
] as any);
const result = await detectBarcode('/path/to/image.jpg', mockLogger);
expect(result.detected).toBe(true);
expect(result.upc_code).toBe('012345678905');
expect(result.format).toBe('UPC-A');
expect(result.confidence).toBe(0.95);
expect(result.error).toBeNull();
});
it('should detect a valid UPC-E barcode from image', async () => {
const { readBarcodesFromImageData } = await import('zxing-wasm/reader');
vi.mocked(readBarcodesFromImageData).mockResolvedValueOnce([
{ text: '01234567', format: 'UPC-E' },
] as any);
const result = await detectBarcode('/path/to/image.jpg', mockLogger);
expect(result.detected).toBe(true);
expect(result.upc_code).toBe('01234567');
expect(result.format).toBe('UPC-E');
});
it('should detect a valid EAN-13 barcode from image', async () => {
const { readBarcodesFromImageData } = await import('zxing-wasm/reader');
vi.mocked(readBarcodesFromImageData).mockResolvedValueOnce([
{ text: '5901234123457', format: 'EAN-13' },
] as any);
const result = await detectBarcode('/path/to/image.jpg', mockLogger);
expect(result.detected).toBe(true);
expect(result.upc_code).toBe('5901234123457');
expect(result.format).toBe('EAN-13');
});
it('should detect a valid EAN-8 barcode from image', async () => {
const { readBarcodesFromImageData } = await import('zxing-wasm/reader');
vi.mocked(readBarcodesFromImageData).mockResolvedValueOnce([
{ text: '96385074', format: 'EAN-8' },
] as any);
const result = await detectBarcode('/path/to/image.jpg', mockLogger);
expect(result.detected).toBe(true);
expect(result.upc_code).toBe('96385074');
expect(result.format).toBe('EAN-8');
});
it('should return detected: false when no barcode found', async () => {
const { readBarcodesFromImageData } = await import('zxing-wasm/reader');
vi.mocked(readBarcodesFromImageData).mockResolvedValueOnce([]);
const result = await detectBarcode('/path/to/image.jpg', mockLogger);
expect(result.detected).toBe(false);
expect(result.upc_code).toBeNull();
expect(result.confidence).toBeNull();
expect(result.format).toBeNull();
expect(result.error).toBeNull();
});
it('should return error when image dimensions cannot be determined', async () => {
const sharp = (await import('sharp')).default;
vi.mocked(sharp).mockReturnValueOnce({
metadata: vi.fn().mockResolvedValue({}),
ensureAlpha: vi.fn().mockReturnThis(),
raw: vi.fn().mockReturnThis(),
toBuffer: vi.fn(),
} as any);
const result = await detectBarcode('/path/to/image.jpg', mockLogger);
expect(result.detected).toBe(false);
expect(result.error).toBe('Could not determine image dimensions');
});
it('should handle errors during detection gracefully', async () => {
const { readBarcodesFromImageData } = await import('zxing-wasm/reader');
vi.mocked(readBarcodesFromImageData).mockRejectedValueOnce(new Error('Detection failed'));
const result = await detectBarcode('/path/to/image.jpg', mockLogger);
expect(result.detected).toBe(false);
expect(result.error).toBe('Detection failed');
});
it('should map unknown barcode formats to "unknown"', async () => {
const { readBarcodesFromImageData } = await import('zxing-wasm/reader');
vi.mocked(readBarcodesFromImageData).mockResolvedValueOnce([
{ text: '12345678', format: 'SomeFutureFormat' },
] as any);
const result = await detectBarcode('/path/to/image.jpg', mockLogger);
expect(result.detected).toBe(true);
expect(result.format).toBe('unknown');
});
it('should calculate lower confidence when text is empty', async () => {
const { readBarcodesFromImageData } = await import('zxing-wasm/reader');
vi.mocked(readBarcodesFromImageData).mockResolvedValueOnce([
{ text: '', format: 'UPC-A' },
] as any);
const result = await detectBarcode('/path/to/image.jpg', mockLogger);
expect(result.detected).toBe(true);
expect(result.confidence).toBe(0.5);
});
});
describe('isValidUpcFormat', () => {
it('should return true for valid 12-digit UPC-A', () => {
expect(isValidUpcFormat('012345678905')).toBe(true);
});
it('should return true for valid 8-digit UPC-E', () => {
expect(isValidUpcFormat('01234567')).toBe(true);
});
it('should return true for valid 13-digit EAN-13', () => {
expect(isValidUpcFormat('5901234123457')).toBe(true);
});
it('should return true for valid 8-digit EAN-8', () => {
expect(isValidUpcFormat('96385074')).toBe(true);
});
it('should return true for valid 14-digit GTIN-14', () => {
expect(isValidUpcFormat('00012345678905')).toBe(true);
});
it('should return false for code with less than 8 digits', () => {
expect(isValidUpcFormat('1234567')).toBe(false);
});
it('should return false for code with more than 14 digits', () => {
expect(isValidUpcFormat('123456789012345')).toBe(false);
});
it('should return false for code with non-numeric characters', () => {
expect(isValidUpcFormat('01234567890A')).toBe(false);
});
it('should return false for empty string', () => {
expect(isValidUpcFormat('')).toBe(false);
});
});
describe('calculateUpcCheckDigit', () => {
it('should calculate correct check digit for valid 11-digit code', () => {
// UPC-A: 01234567890 has check digit 5
expect(calculateUpcCheckDigit('01234567890')).toBe(5);
});
it('should return null for code with wrong length', () => {
expect(calculateUpcCheckDigit('1234567890')).toBeNull(); // 10 digits
expect(calculateUpcCheckDigit('123456789012')).toBeNull(); // 12 digits
});
it('should return null for code with non-numeric characters', () => {
expect(calculateUpcCheckDigit('0123456789A')).toBeNull();
});
it('should handle all zeros', () => {
// 00000000000 should produce a valid check digit
const checkDigit = calculateUpcCheckDigit('00000000000');
expect(typeof checkDigit).toBe('number');
expect(checkDigit).toBeGreaterThanOrEqual(0);
expect(checkDigit).toBeLessThanOrEqual(9);
});
});
describe('validateUpcCheckDigit', () => {
it('should return true for valid UPC-A with correct check digit', () => {
expect(validateUpcCheckDigit('012345678905')).toBe(true);
});
it('should return false for UPC-A with incorrect check digit', () => {
expect(validateUpcCheckDigit('012345678901')).toBe(false);
});
it('should return false for code with wrong length', () => {
expect(validateUpcCheckDigit('01234567890')).toBe(false); // 11 digits
expect(validateUpcCheckDigit('0123456789012')).toBe(false); // 13 digits
});
it('should return false for code with non-numeric characters', () => {
expect(validateUpcCheckDigit('01234567890A')).toBe(false);
});
});
describe('processBarcodeDetectionJob', () => {
it('should process job and update scan record when barcode detected', async () => {
const { readBarcodesFromImageData } = await import('zxing-wasm/reader');
const { upcRepo } = await import('./db/index.db');
vi.mocked(readBarcodesFromImageData).mockResolvedValueOnce([
{ text: '012345678905', format: 'UPC-A' },
] as any);
const mockJob = {
id: 'job-1',
data: {
scanId: 123,
imagePath: '/path/to/barcode.jpg',
userId: 'user-1',
meta: { requestId: 'req-1' },
},
} as Job<BarcodeDetectionJobData>;
const result = await processBarcodeDetectionJob(mockJob, mockLogger);
expect(result.detected).toBe(true);
expect(result.upc_code).toBe('012345678905');
expect(upcRepo.updateScanWithDetectedCode).toHaveBeenCalledWith(
123,
'012345678905',
0.95,
expect.any(Object),
);
});
it('should not update scan record when no barcode detected', async () => {
const { readBarcodesFromImageData } = await import('zxing-wasm/reader');
const { upcRepo } = await import('./db/index.db');
vi.mocked(readBarcodesFromImageData).mockResolvedValueOnce([]);
const mockJob = {
id: 'job-2',
data: {
scanId: 456,
imagePath: '/path/to/no-barcode.jpg',
userId: 'user-2',
},
} as Job<BarcodeDetectionJobData>;
const result = await processBarcodeDetectionJob(mockJob, mockLogger);
expect(result.detected).toBe(false);
expect(upcRepo.updateScanWithDetectedCode).not.toHaveBeenCalled();
});
it('should return error result when job processing fails', async () => {
const { readBarcodesFromImageData } = await import('zxing-wasm/reader');
vi.mocked(readBarcodesFromImageData).mockRejectedValueOnce(new Error('Processing error'));
const mockJob = {
id: 'job-3',
data: {
scanId: 789,
imagePath: '/path/to/error.jpg',
userId: 'user-3',
},
} as Job<BarcodeDetectionJobData>;
const result = await processBarcodeDetectionJob(mockJob, mockLogger);
expect(result.detected).toBe(false);
expect(result.error).toBe('Processing error');
});
});
describe('detectMultipleBarcodes', () => {
it('should detect multiple barcodes in an image', async () => {
const { readBarcodesFromImageData } = await import('zxing-wasm/reader');
vi.mocked(readBarcodesFromImageData).mockResolvedValueOnce([
{ text: '012345678905', format: 'UPC-A' },
{ text: '5901234123457', format: 'EAN-13' },
{ text: '96385074', format: 'EAN-8' },
] as any);
const results = await detectMultipleBarcodes('/path/to/multi.jpg', mockLogger);
expect(results).toHaveLength(3);
expect(results[0].upc_code).toBe('012345678905');
expect(results[0].format).toBe('UPC-A');
expect(results[1].upc_code).toBe('5901234123457');
expect(results[1].format).toBe('EAN-13');
expect(results[2].upc_code).toBe('96385074');
expect(results[2].format).toBe('EAN-8');
});
it('should return empty array when no barcodes detected', async () => {
const { readBarcodesFromImageData } = await import('zxing-wasm/reader');
vi.mocked(readBarcodesFromImageData).mockResolvedValueOnce([]);
const results = await detectMultipleBarcodes('/path/to/no-codes.jpg', mockLogger);
expect(results).toEqual([]);
});
it('should return empty array on error', async () => {
const { readBarcodesFromImageData } = await import('zxing-wasm/reader');
vi.mocked(readBarcodesFromImageData).mockRejectedValueOnce(
new Error('Multi-detection failed'),
);
const results = await detectMultipleBarcodes('/path/to/error.jpg', mockLogger);
expect(results).toEqual([]);
});
});
describe('enhanceImageForDetection', () => {
it('should enhance image and return new path', async () => {
const result = await enhanceImageForDetection('/path/to/image.jpg', mockLogger);
expect(result).toBe('/path/to/image-enhanced.jpg');
});
it('should handle different file extensions', async () => {
const result = await enhanceImageForDetection('/path/to/image.png', mockLogger);
expect(result).toBe('/path/to/image-enhanced.png');
});
it('should return original path on enhancement failure', async () => {
const sharp = (await import('sharp')).default;
vi.mocked(sharp).mockReturnValueOnce({
grayscale: vi.fn().mockReturnThis(),
normalize: vi.fn().mockReturnThis(),
sharpen: vi.fn().mockReturnThis(),
toFile: vi.fn().mockRejectedValue(new Error('Enhancement failed')),
} as any);
const result = await enhanceImageForDetection('/path/to/image.jpg', mockLogger);
expect(result).toBe('/path/to/image.jpg');
});
});
});

View File

@@ -0,0 +1,335 @@
// src/services/barcodeService.server.ts
/**
* @file Barcode Detection Service
* Provides barcode/UPC detection from images using zxing-wasm.
* Supports UPC-A, UPC-E, EAN-13, EAN-8, CODE-128, CODE-39, and QR codes.
*/
import type { Logger } from 'pino';
import type { Job } from 'bullmq';
import type { BarcodeDetectionJobData } from '../types/job-data';
import type { BarcodeDetectionResult } from '../types/upc';
import { upcRepo } from './db/index.db';
import sharp from 'sharp';
import fs from 'node:fs/promises';
/**
* Supported barcode formats for detection.
*/
export type BarcodeFormat =
| 'UPC-A'
| 'UPC-E'
| 'EAN-13'
| 'EAN-8'
| 'CODE-128'
| 'CODE-39'
| 'QR_CODE'
| 'unknown';
/**
* Maps zxing-wasm format names to our BarcodeFormat type.
*/
const formatMap: Record<string, BarcodeFormat> = {
'UPC-A': 'UPC-A',
'UPC-E': 'UPC-E',
'EAN-13': 'EAN-13',
'EAN-8': 'EAN-8',
Code128: 'CODE-128',
Code39: 'CODE-39',
QRCode: 'QR_CODE',
};
/**
* Detects barcodes in an image using zxing-wasm.
*
* @param imagePath Path to the image file
* @param logger Pino logger instance
* @returns Detection result with UPC code if found
*/
export const detectBarcode = async (
imagePath: string,
logger: Logger,
): Promise<BarcodeDetectionResult> => {
const detectionLogger = logger.child({ imagePath });
detectionLogger.info('Starting barcode detection');
try {
// Dynamically import zxing-wasm (ES module)
const { readBarcodesFromImageData } = await import('zxing-wasm/reader');
// Read and process the image with sharp
const imageBuffer = await fs.readFile(imagePath);
// Convert to raw pixel data (RGBA)
const image = sharp(imageBuffer);
const metadata = await image.metadata();
if (!metadata.width || !metadata.height) {
detectionLogger.warn('Could not determine image dimensions');
return {
detected: false,
upc_code: null,
confidence: null,
format: null,
error: 'Could not determine image dimensions',
};
}
// Convert to raw RGBA pixels
const { data, info } = await image.ensureAlpha().raw().toBuffer({ resolveWithObject: true });
// Create ImageData-like object for zxing-wasm
const imageData = {
data: new Uint8ClampedArray(data),
width: info.width,
height: info.height,
colorSpace: 'srgb' as const,
};
detectionLogger.debug(
{ width: info.width, height: info.height },
'Processing image for barcode detection',
);
// Attempt barcode detection
const results = await readBarcodesFromImageData(imageData as ImageData, {
tryHarder: true,
tryRotate: true,
tryInvert: true,
formats: ['UPC-A', 'UPC-E', 'EAN-13', 'EAN-8', 'Code128', 'Code39'],
});
if (results.length === 0) {
detectionLogger.info('No barcode detected in image');
return {
detected: false,
upc_code: null,
confidence: null,
format: null,
error: null,
};
}
// Take the first (best) result
const bestResult = results[0];
const format = formatMap[bestResult.format] || 'unknown';
// Calculate confidence based on result quality indicators
// zxing-wasm doesn't provide direct confidence, so we estimate based on format match
const confidence = bestResult.text ? 0.95 : 0.5;
detectionLogger.info(
{ upcCode: bestResult.text, format, confidence },
'Barcode detected successfully',
);
return {
detected: true,
upc_code: bestResult.text,
confidence,
format,
error: null,
};
} catch (error) {
const errorMessage = error instanceof Error ? error.message : String(error);
detectionLogger.error({ err: error }, 'Barcode detection failed');
return {
detected: false,
upc_code: null,
confidence: null,
format: null,
error: errorMessage,
};
}
};
/**
* Validates a UPC code format.
* @param code The code to validate
* @returns True if valid UPC format
*/
export const isValidUpcFormat = (code: string): boolean => {
// UPC-A: 12 digits
// UPC-E: 8 digits
// EAN-13: 13 digits
// EAN-8: 8 digits
return /^[0-9]{8,14}$/.test(code);
};
/**
* Calculates the check digit for a UPC-A code.
* @param code The 11-digit UPC-A code (without check digit)
* @returns The check digit
*/
export const calculateUpcCheckDigit = (code: string): number | null => {
if (code.length !== 11 || !/^\d+$/.test(code)) {
return null;
}
let sum = 0;
for (let i = 0; i < 11; i++) {
const digit = parseInt(code[i], 10);
// Odd positions (0, 2, 4, ...) multiplied by 3
// Even positions (1, 3, 5, ...) multiplied by 1
sum += digit * (i % 2 === 0 ? 3 : 1);
}
const checkDigit = (10 - (sum % 10)) % 10;
return checkDigit;
};
/**
* Validates a UPC code including check digit.
* @param code The complete UPC code
* @returns True if check digit is valid
*/
export const validateUpcCheckDigit = (code: string): boolean => {
if (code.length !== 12 || !/^\d+$/.test(code)) {
return false;
}
const codeWithoutCheck = code.slice(0, 11);
const providedCheck = parseInt(code[11], 10);
const calculatedCheck = calculateUpcCheckDigit(codeWithoutCheck);
return calculatedCheck === providedCheck;
};
/**
* Processes a barcode detection job from the queue.
* @param job The BullMQ job
* @param logger Pino logger instance
* @returns Detection result
*/
export const processBarcodeDetectionJob = async (
job: Job<BarcodeDetectionJobData>,
logger: Logger,
): Promise<BarcodeDetectionResult> => {
const { scanId, imagePath, userId } = job.data;
const jobLogger = logger.child({
jobId: job.id,
scanId,
userId,
requestId: job.data.meta?.requestId,
});
jobLogger.info('Processing barcode detection job');
try {
// Attempt barcode detection
const result = await detectBarcode(imagePath, jobLogger);
// If a code was detected, update the scan record
if (result.detected && result.upc_code) {
await upcRepo.updateScanWithDetectedCode(
scanId,
result.upc_code,
result.confidence,
jobLogger,
);
jobLogger.info(
{ upcCode: result.upc_code, confidence: result.confidence },
'Barcode detected and scan record updated',
);
} else {
jobLogger.info('No barcode detected in image');
}
return result;
} catch (error) {
jobLogger.error({ err: error }, 'Barcode detection job failed');
return {
detected: false,
upc_code: null,
confidence: null,
format: null,
error: error instanceof Error ? error.message : String(error),
};
}
};
/**
* Detects multiple barcodes in an image.
* Useful for receipts or product lists with multiple items.
* @param imagePath Path to the image file
* @param logger Pino logger instance
* @returns Array of detection results
*/
export const detectMultipleBarcodes = async (
imagePath: string,
logger: Logger,
): Promise<BarcodeDetectionResult[]> => {
const detectionLogger = logger.child({ imagePath });
detectionLogger.info('Starting multiple barcode detection');
try {
const { readBarcodesFromImageData } = await import('zxing-wasm/reader');
// Read and process the image
const imageBuffer = await fs.readFile(imagePath);
const image = sharp(imageBuffer);
const { data, info } = await image.ensureAlpha().raw().toBuffer({ resolveWithObject: true });
const imageData = {
data: new Uint8ClampedArray(data),
width: info.width,
height: info.height,
colorSpace: 'srgb' as const,
};
// Detect all barcodes
const results = await readBarcodesFromImageData(imageData as ImageData, {
tryHarder: true,
tryRotate: true,
tryInvert: true,
formats: ['UPC-A', 'UPC-E', 'EAN-13', 'EAN-8', 'Code128', 'Code39'],
});
detectionLogger.info({ count: results.length }, 'Multiple barcode detection complete');
return results.map((result) => ({
detected: true,
upc_code: result.text,
confidence: 0.95,
format: formatMap[result.format] || 'unknown',
error: null,
}));
} catch (error) {
detectionLogger.error({ err: error }, 'Multiple barcode detection failed');
return [];
}
};
/**
* Enhances image for better barcode detection.
* Applies preprocessing like grayscale conversion, contrast adjustment, etc.
* @param imagePath Path to the source image
* @param logger Pino logger instance
* @returns Path to enhanced image (or original if enhancement fails)
*/
export const enhanceImageForDetection = async (
imagePath: string,
logger: Logger,
): Promise<string> => {
const detectionLogger = logger.child({ imagePath });
try {
// Create enhanced version with improved contrast for barcode detection
const enhancedPath = imagePath.replace(/(\.[^.]+)$/, '-enhanced$1');
await sharp(imagePath)
.grayscale()
.normalize() // Improve contrast
.sharpen() // Enhance edges
.toFile(enhancedPath);
detectionLogger.debug({ enhancedPath }, 'Image enhanced for barcode detection');
return enhancedPath;
} catch (error) {
detectionLogger.warn({ err: error }, 'Image enhancement failed, using original');
return imagePath;
}
};

File diff suppressed because it is too large Load Diff

1116
src/services/db/expiry.db.ts Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -19,13 +19,19 @@ vi.mock('./gamification.db', () => ({
GamificationRepository: class GamificationRepository {},
}));
vi.mock('./admin.db', () => ({ AdminRepository: class AdminRepository {} }));
vi.mock('./upc.db', () => ({ UpcRepository: class UpcRepository {} }));
vi.mock('./expiry.db', () => ({ ExpiryRepository: class ExpiryRepository {} }));
vi.mock('./receipt.db', () => ({ ReceiptRepository: class ReceiptRepository {} }));
// These modules export an already-instantiated object, so we mock the object.
vi.mock('./reaction.db', () => ({ reactionRepo: {} }));
vi.mock('./conversion.db', () => ({ conversionRepo: {} }));
// Mock the re-exported function.
vi.mock('./connection.db', () => ({ withTransaction: vi.fn() }));
// Mock the re-exported function and getPool.
vi.mock('./connection.db', () => ({
withTransaction: vi.fn(),
getPool: vi.fn(() => ({ query: vi.fn() })),
}));
// We must un-mock the file we are testing so we get the actual implementation.
vi.unmock('./index.db');
@@ -44,6 +50,9 @@ import { NotificationRepository } from './notification.db';
import { BudgetRepository } from './budget.db';
import { GamificationRepository } from './gamification.db';
import { AdminRepository } from './admin.db';
import { UpcRepository } from './upc.db';
import { ExpiryRepository } from './expiry.db';
import { ReceiptRepository } from './receipt.db';
describe('DB Index', () => {
it('should instantiate and export all repositories and functions', () => {
@@ -57,8 +66,11 @@ describe('DB Index', () => {
expect(db.budgetRepo).toBeInstanceOf(BudgetRepository);
expect(db.gamificationRepo).toBeInstanceOf(GamificationRepository);
expect(db.adminRepo).toBeInstanceOf(AdminRepository);
expect(db.upcRepo).toBeInstanceOf(UpcRepository);
expect(db.expiryRepo).toBeInstanceOf(ExpiryRepository);
expect(db.receiptRepo).toBeInstanceOf(ReceiptRepository);
expect(db.reactionRepo).toBeDefined();
expect(db.conversionRepo).toBeDefined();
expect(db.withTransaction).toBeDefined();
});
});
});

View File

@@ -12,6 +12,9 @@ import { GamificationRepository } from './gamification.db';
import { AdminRepository } from './admin.db';
import { reactionRepo } from './reaction.db';
import { conversionRepo } from './conversion.db';
import { UpcRepository } from './upc.db';
import { ExpiryRepository } from './expiry.db';
import { ReceiptRepository } from './receipt.db';
const userRepo = new UserRepository();
const flyerRepo = new FlyerRepository();
@@ -23,6 +26,9 @@ const notificationRepo = new NotificationRepository();
const budgetRepo = new BudgetRepository();
const gamificationRepo = new GamificationRepository();
const adminRepo = new AdminRepository();
const upcRepo = new UpcRepository();
const expiryRepo = new ExpiryRepository();
const receiptRepo = new ReceiptRepository();
export {
userRepo,
@@ -37,5 +43,8 @@ export {
adminRepo,
reactionRepo,
conversionRepo,
upcRepo,
expiryRepo,
receiptRepo,
withTransaction,
};

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,518 @@
// src/services/db/upc.db.test.ts
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
import type { Logger } from 'pino';
import { createMockLogger } from '../../tests/utils/mockLogger';
import { UpcRepository } from './upc.db';
import { NotFoundError } from './errors.db';
// Create mock pool
const mockQuery = vi.fn();
const mockPool = {
query: mockQuery,
};
describe('UpcRepository', () => {
let repo: UpcRepository;
let mockLogger: Logger;
beforeEach(() => {
vi.clearAllMocks();
mockLogger = createMockLogger();
repo = new UpcRepository(mockPool);
});
afterEach(() => {
vi.resetAllMocks();
});
describe('findProductByUpc', () => {
it('should return product when found', async () => {
mockQuery.mockResolvedValueOnce({
rowCount: 1,
rows: [
{
product_id: 1,
name: 'Test Product',
description: 'A test product',
size: '500g',
upc_code: '012345678905',
master_item_id: 5,
brand_name: 'Test Brand',
category_name: 'Snacks',
image_url: null,
},
],
});
const result = await repo.findProductByUpc('012345678905', mockLogger);
expect(result).not.toBeNull();
expect(result?.product_id).toBe(1);
expect(result?.name).toBe('Test Product');
expect(result?.brand).toBe('Test Brand');
expect(result?.category).toBe('Snacks');
expect(mockQuery).toHaveBeenCalledWith(expect.stringContaining('WHERE p.upc_code = $1'), [
'012345678905',
]);
});
it('should return null when product not found', async () => {
mockQuery.mockResolvedValueOnce({
rowCount: 0,
rows: [],
});
const result = await repo.findProductByUpc('999999999999', mockLogger);
expect(result).toBeNull();
});
it('should throw on database error', async () => {
mockQuery.mockRejectedValueOnce(new Error('DB connection failed'));
await expect(repo.findProductByUpc('012345678905', mockLogger)).rejects.toThrow();
});
});
describe('linkUpcToProduct', () => {
it('should link UPC to product successfully', async () => {
mockQuery.mockResolvedValueOnce({
rowCount: 1,
rows: [
{
product_id: 1,
name: 'Test Product',
brand_id: 1,
category_id: 1,
description: null,
size: null,
upc_code: '012345678905',
master_item_id: null,
created_at: new Date().toISOString(),
updated_at: new Date().toISOString(),
},
],
});
const result = await repo.linkUpcToProduct(1, '012345678905', mockLogger);
expect(result.upc_code).toBe('012345678905');
expect(mockQuery).toHaveBeenCalledWith(
expect.stringContaining('UPDATE public.products SET upc_code = $1'),
['012345678905', 1],
);
});
it('should throw NotFoundError when product not found', async () => {
mockQuery.mockResolvedValueOnce({
rowCount: 0,
rows: [],
});
await expect(repo.linkUpcToProduct(999, '012345678905', mockLogger)).rejects.toThrow(
NotFoundError,
);
});
});
describe('recordScan', () => {
it('should record a scan successfully', async () => {
const scanRecord = {
scan_id: 1,
user_id: 'user-1',
upc_code: '012345678905',
product_id: 1,
scan_source: 'manual_entry',
scan_confidence: 1.0,
raw_image_path: null,
lookup_successful: true,
created_at: new Date().toISOString(),
updated_at: new Date().toISOString(),
};
mockQuery.mockResolvedValueOnce({
rows: [scanRecord],
});
const result = await repo.recordScan('user-1', '012345678905', 'manual_entry', mockLogger, {
productId: 1,
scanConfidence: 1.0,
lookupSuccessful: true,
});
expect(result.scan_id).toBe(1);
expect(result.upc_code).toBe('012345678905');
expect(result.lookup_successful).toBe(true);
});
it('should record scan with default options', async () => {
const scanRecord = {
scan_id: 2,
user_id: 'user-1',
upc_code: '012345678905',
product_id: null,
scan_source: 'image_upload',
scan_confidence: null,
raw_image_path: null,
lookup_successful: false,
created_at: new Date().toISOString(),
updated_at: new Date().toISOString(),
};
mockQuery.mockResolvedValueOnce({
rows: [scanRecord],
});
const result = await repo.recordScan('user-1', '012345678905', 'image_upload', mockLogger);
expect(result.product_id).toBeNull();
expect(result.lookup_successful).toBe(false);
});
});
describe('getScanHistory', () => {
it('should return paginated scan history', async () => {
// Count query
mockQuery.mockResolvedValueOnce({
rows: [{ count: '10' }],
});
// Data query
mockQuery.mockResolvedValueOnce({
rows: [
{
scan_id: 1,
user_id: 'user-1',
upc_code: '012345678905',
product_id: 1,
scan_source: 'manual_entry',
scan_confidence: 1.0,
raw_image_path: null,
lookup_successful: true,
created_at: new Date().toISOString(),
updated_at: new Date().toISOString(),
},
],
});
const result = await repo.getScanHistory(
{ user_id: 'user-1', limit: 10, offset: 0 },
mockLogger,
);
expect(result.total).toBe(10);
expect(result.scans).toHaveLength(1);
});
it('should filter by lookup_successful', async () => {
mockQuery.mockResolvedValueOnce({ rows: [{ count: '5' }] });
mockQuery.mockResolvedValueOnce({ rows: [] });
await repo.getScanHistory({ user_id: 'user-1', lookup_successful: true }, mockLogger);
expect(mockQuery).toHaveBeenCalledWith(
expect.stringContaining('lookup_successful = $2'),
expect.any(Array),
);
});
it('should filter by scan_source', async () => {
mockQuery.mockResolvedValueOnce({ rows: [{ count: '3' }] });
mockQuery.mockResolvedValueOnce({ rows: [] });
await repo.getScanHistory({ user_id: 'user-1', scan_source: 'image_upload' }, mockLogger);
expect(mockQuery).toHaveBeenCalledWith(
expect.stringContaining('scan_source = $2'),
expect.any(Array),
);
});
it('should filter by date range', async () => {
mockQuery.mockResolvedValueOnce({ rows: [{ count: '2' }] });
mockQuery.mockResolvedValueOnce({ rows: [] });
await repo.getScanHistory(
{
user_id: 'user-1',
from_date: '2024-01-01',
to_date: '2024-01-31',
},
mockLogger,
);
expect(mockQuery).toHaveBeenCalledWith(
expect.stringContaining('created_at >= $2'),
expect.any(Array),
);
});
});
describe('getScanById', () => {
it('should return scan record when found', async () => {
const scanRecord = {
scan_id: 1,
user_id: 'user-1',
upc_code: '012345678905',
product_id: 1,
scan_source: 'manual_entry',
scan_confidence: 1.0,
raw_image_path: null,
lookup_successful: true,
created_at: new Date().toISOString(),
updated_at: new Date().toISOString(),
};
mockQuery.mockResolvedValueOnce({
rowCount: 1,
rows: [scanRecord],
});
const result = await repo.getScanById(1, 'user-1', mockLogger);
expect(result.scan_id).toBe(1);
expect(result.user_id).toBe('user-1');
});
it('should throw NotFoundError when scan not found', async () => {
mockQuery.mockResolvedValueOnce({
rowCount: 0,
rows: [],
});
await expect(repo.getScanById(999, 'user-1', mockLogger)).rejects.toThrow(NotFoundError);
});
});
describe('findExternalLookup', () => {
it('should return cached lookup when found and not expired', async () => {
const lookupRecord = {
lookup_id: 1,
upc_code: '012345678905',
product_name: 'External Product',
brand_name: 'External Brand',
category: 'Snacks',
description: null,
image_url: null,
external_source: 'openfoodfacts',
lookup_data: null,
lookup_successful: true,
created_at: new Date().toISOString(),
updated_at: new Date().toISOString(),
};
mockQuery.mockResolvedValueOnce({
rowCount: 1,
rows: [lookupRecord],
});
const result = await repo.findExternalLookup('012345678905', 168, mockLogger);
expect(result).not.toBeNull();
expect(result?.product_name).toBe('External Product');
});
it('should return null when lookup not cached', async () => {
mockQuery.mockResolvedValueOnce({
rowCount: 0,
rows: [],
});
const result = await repo.findExternalLookup('999999999999', 168, mockLogger);
expect(result).toBeNull();
});
});
describe('upsertExternalLookup', () => {
it('should insert new external lookup', async () => {
const lookupRecord = {
lookup_id: 1,
upc_code: '012345678905',
product_name: 'New Product',
brand_name: 'New Brand',
category: 'Food',
description: 'A description',
image_url: 'https://example.com/image.jpg',
external_source: 'openfoodfacts',
lookup_data: null,
lookup_successful: true,
created_at: new Date().toISOString(),
updated_at: new Date().toISOString(),
};
mockQuery.mockResolvedValueOnce({
rows: [lookupRecord],
});
const result = await repo.upsertExternalLookup(
'012345678905',
'openfoodfacts',
true,
mockLogger,
{
productName: 'New Product',
brandName: 'New Brand',
category: 'Food',
description: 'A description',
imageUrl: 'https://example.com/image.jpg',
},
);
expect(result.product_name).toBe('New Product');
expect(mockQuery).toHaveBeenCalledWith(
expect.stringContaining('ON CONFLICT (upc_code) DO UPDATE'),
expect.any(Array),
);
});
it('should update existing external lookup on conflict', async () => {
const updatedRecord = {
lookup_id: 1,
upc_code: '012345678905',
product_name: 'Updated Product',
brand_name: 'Updated Brand',
category: null,
description: null,
image_url: null,
external_source: 'upcitemdb',
lookup_data: null,
lookup_successful: true,
created_at: new Date().toISOString(),
updated_at: new Date().toISOString(),
};
mockQuery.mockResolvedValueOnce({
rows: [updatedRecord],
});
const result = await repo.upsertExternalLookup(
'012345678905',
'upcitemdb',
true,
mockLogger,
{
productName: 'Updated Product',
brandName: 'Updated Brand',
},
);
expect(result.product_name).toBe('Updated Product');
expect(result.external_source).toBe('upcitemdb');
});
});
describe('getExternalLookupByUpc', () => {
it('should return lookup without cache expiry check', async () => {
const lookupRecord = {
lookup_id: 1,
upc_code: '012345678905',
product_name: 'Product',
brand_name: null,
category: null,
description: null,
image_url: null,
external_source: 'openfoodfacts',
lookup_data: null,
lookup_successful: true,
created_at: new Date().toISOString(),
updated_at: new Date().toISOString(),
};
mockQuery.mockResolvedValueOnce({
rowCount: 1,
rows: [lookupRecord],
});
const result = await repo.getExternalLookupByUpc('012345678905', mockLogger);
expect(result?.product_name).toBe('Product');
expect(mockQuery).toHaveBeenCalledWith(expect.not.stringContaining('interval'), [
'012345678905',
]);
});
it('should return null when not found', async () => {
mockQuery.mockResolvedValueOnce({
rowCount: 0,
rows: [],
});
const result = await repo.getExternalLookupByUpc('999999999999', mockLogger);
expect(result).toBeNull();
});
});
describe('deleteOldExternalLookups', () => {
it('should delete old lookups and return count', async () => {
mockQuery.mockResolvedValueOnce({
rowCount: 5,
});
const deleted = await repo.deleteOldExternalLookups(30, mockLogger);
expect(deleted).toBe(5);
expect(mockQuery).toHaveBeenCalledWith(expect.stringContaining("interval '1 day'"), [30]);
});
it('should return 0 when no records deleted', async () => {
mockQuery.mockResolvedValueOnce({
rowCount: 0,
});
const deleted = await repo.deleteOldExternalLookups(30, mockLogger);
expect(deleted).toBe(0);
});
});
describe('getUserScanStats', () => {
it('should return user scan statistics', async () => {
mockQuery.mockResolvedValueOnce({
rows: [
{
total_scans: '100',
successful_lookups: '80',
unique_products: '50',
scans_today: '5',
scans_this_week: '25',
},
],
});
const stats = await repo.getUserScanStats('user-1', mockLogger);
expect(stats.total_scans).toBe(100);
expect(stats.successful_lookups).toBe(80);
expect(stats.unique_products).toBe(50);
expect(stats.scans_today).toBe(5);
expect(stats.scans_this_week).toBe(25);
});
});
describe('updateScanWithDetectedCode', () => {
it('should update scan with detected code', async () => {
mockQuery.mockResolvedValueOnce({
rowCount: 1,
});
await repo.updateScanWithDetectedCode(1, '012345678905', 0.95, mockLogger);
expect(mockQuery).toHaveBeenCalledWith(
expect.stringContaining('UPDATE public.upc_scan_history'),
[1, '012345678905', 0.95],
);
});
it('should throw NotFoundError when scan not found', async () => {
mockQuery.mockResolvedValueOnce({
rowCount: 0,
});
await expect(
repo.updateScanWithDetectedCode(999, '012345678905', 0.95, mockLogger),
).rejects.toThrow(NotFoundError);
});
});
});

556
src/services/db/upc.db.ts Normal file
View File

@@ -0,0 +1,556 @@
// src/services/db/upc.db.ts
import type { Pool, PoolClient } from 'pg';
import { getPool } from './connection.db';
import { NotFoundError, handleDbError } from './errors.db';
import type { Logger } from 'pino';
import type {
UpcScanSource,
UpcExternalSource,
UpcScanHistoryRecord,
UpcExternalLookupRecord,
UpcProductMatch,
UpcScanHistoryQueryOptions,
} from '../../types/upc';
/**
* Database row type for products table with UPC-relevant fields.
*/
interface ProductRow {
product_id: number;
name: string;
brand_id: number | null;
category_id: number | null;
description: string | null;
size: string | null;
upc_code: string | null;
master_item_id: number | null;
created_at: string;
updated_at: string;
}
/**
* Extended product row with joined brand and category names.
*/
interface ProductWithDetailsRow extends ProductRow {
brand_name: string | null;
category_name: string | null;
image_url: string | null;
}
/**
* Repository for UPC scanning related database operations.
* Handles scan history tracking, external lookup caching, and product UPC matching.
*/
export class UpcRepository {
private db: Pick<Pool | PoolClient, 'query'>;
constructor(db: Pick<Pool | PoolClient, 'query'> = getPool()) {
this.db = db;
}
// ============================================================================
// PRODUCT UPC LOOKUP
// ============================================================================
/**
* Finds a product by its UPC code.
* Returns null if no product is found with the given UPC.
*/
async findProductByUpc(upcCode: string, logger: Logger): Promise<UpcProductMatch | null> {
try {
const query = `
SELECT
p.product_id,
p.name,
p.description,
p.size,
p.upc_code,
p.master_item_id,
b.name AS brand_name,
c.name AS category_name,
NULL AS image_url
FROM public.products p
LEFT JOIN public.brands b ON p.brand_id = b.brand_id
LEFT JOIN public.master_grocery_items mgi ON p.master_item_id = mgi.master_grocery_item_id
LEFT JOIN public.categories c ON mgi.category_id = c.category_id
WHERE p.upc_code = $1
`;
const res = await this.db.query<ProductWithDetailsRow>(query, [upcCode]);
if (res.rowCount === 0) {
return null;
}
const row = res.rows[0];
return {
product_id: row.product_id,
name: row.name,
brand: row.brand_name,
category: row.category_name,
description: row.description,
size: row.size,
upc_code: row.upc_code ?? upcCode,
image_url: row.image_url,
master_item_id: row.master_item_id,
};
} catch (error) {
handleDbError(
error,
logger,
'Database error in findProductByUpc',
{ upcCode },
{
defaultMessage: 'Failed to look up product by UPC code.',
},
);
}
}
/**
* Links a UPC code to an existing product.
* Updates the product's upc_code field.
*/
async linkUpcToProduct(productId: number, upcCode: string, logger: Logger): Promise<ProductRow> {
try {
const res = await this.db.query<ProductRow>(
`UPDATE public.products SET upc_code = $1, updated_at = NOW() WHERE product_id = $2 RETURNING *`,
[upcCode, productId],
);
if (res.rowCount === 0) {
throw new NotFoundError('Product not found.');
}
return res.rows[0];
} catch (error) {
handleDbError(
error,
logger,
'Database error in linkUpcToProduct',
{ productId, upcCode },
{
uniqueMessage: 'This UPC code is already linked to another product.',
fkMessage: 'The specified product does not exist.',
defaultMessage: 'Failed to link UPC code to product.',
},
);
}
}
// ============================================================================
// SCAN HISTORY
// ============================================================================
/**
* Records a UPC scan in the history table.
* Creates an audit trail of all scans performed by users.
*/
async recordScan(
userId: string,
upcCode: string,
scanSource: UpcScanSource,
logger: Logger,
options: {
productId?: number | null;
scanConfidence?: number | null;
rawImagePath?: string | null;
lookupSuccessful?: boolean;
} = {},
): Promise<UpcScanHistoryRecord> {
const {
productId = null,
scanConfidence = null,
rawImagePath = null,
lookupSuccessful = false,
} = options;
try {
const res = await this.db.query<UpcScanHistoryRecord>(
`INSERT INTO public.upc_scan_history
(user_id, upc_code, product_id, scan_source, scan_confidence, raw_image_path, lookup_successful)
VALUES ($1, $2, $3, $4, $5, $6, $7)
RETURNING *`,
[userId, upcCode, productId, scanSource, scanConfidence, rawImagePath, lookupSuccessful],
);
return res.rows[0];
} catch (error) {
handleDbError(
error,
logger,
'Database error in recordScan',
{ userId, upcCode, scanSource, productId },
{
fkMessage: 'The specified user or product does not exist.',
checkMessage: 'Invalid UPC code format or scan source.',
defaultMessage: 'Failed to record UPC scan.',
},
);
}
}
/**
* Retrieves the scan history for a user with optional filtering.
*/
async getScanHistory(
options: UpcScanHistoryQueryOptions,
logger: Logger,
): Promise<{ scans: UpcScanHistoryRecord[]; total: number }> {
const {
user_id,
limit = 50,
offset = 0,
lookup_successful,
scan_source,
from_date,
to_date,
} = options;
try {
// Build dynamic WHERE clause
const conditions: string[] = ['user_id = $1'];
const params: (string | number | boolean)[] = [user_id];
let paramIndex = 2;
if (lookup_successful !== undefined) {
conditions.push(`lookup_successful = $${paramIndex++}`);
params.push(lookup_successful);
}
if (scan_source) {
conditions.push(`scan_source = $${paramIndex++}`);
params.push(scan_source);
}
if (from_date) {
conditions.push(`created_at >= $${paramIndex++}`);
params.push(from_date);
}
if (to_date) {
conditions.push(`created_at <= $${paramIndex++}`);
params.push(to_date);
}
const whereClause = conditions.join(' AND ');
// Get total count
const countRes = await this.db.query<{ count: string }>(
`SELECT COUNT(*) FROM public.upc_scan_history WHERE ${whereClause}`,
params,
);
const total = parseInt(countRes.rows[0].count, 10);
// Get paginated results
const dataParams = [...params, limit, offset];
const dataRes = await this.db.query<UpcScanHistoryRecord>(
`SELECT * FROM public.upc_scan_history
WHERE ${whereClause}
ORDER BY created_at DESC
LIMIT $${paramIndex++} OFFSET $${paramIndex}`,
dataParams,
);
return { scans: dataRes.rows, total };
} catch (error) {
handleDbError(
error,
logger,
'Database error in getScanHistory',
{ options },
{
defaultMessage: 'Failed to retrieve scan history.',
},
);
}
}
/**
* Gets a single scan record by ID.
*/
async getScanById(scanId: number, userId: string, logger: Logger): Promise<UpcScanHistoryRecord> {
try {
const res = await this.db.query<UpcScanHistoryRecord>(
`SELECT * FROM public.upc_scan_history WHERE scan_id = $1 AND user_id = $2`,
[scanId, userId],
);
if (res.rowCount === 0) {
throw new NotFoundError('Scan record not found.');
}
return res.rows[0];
} catch (error) {
handleDbError(
error,
logger,
'Database error in getScanById',
{ scanId, userId },
{
defaultMessage: 'Failed to retrieve scan record.',
},
);
}
}
// ============================================================================
// EXTERNAL LOOKUP CACHE
// ============================================================================
/**
* Finds a cached external lookup result for a UPC code.
* Returns null if not cached or cache is expired.
*/
async findExternalLookup(
upcCode: string,
maxAgeHours: number,
logger: Logger,
): Promise<UpcExternalLookupRecord | null> {
try {
const res = await this.db.query<UpcExternalLookupRecord>(
`SELECT * FROM public.upc_external_lookups
WHERE upc_code = $1
AND created_at > NOW() - ($2 * interval '1 hour')`,
[upcCode, maxAgeHours],
);
if (res.rowCount === 0) {
return null;
}
return res.rows[0];
} catch (error) {
handleDbError(
error,
logger,
'Database error in findExternalLookup',
{ upcCode, maxAgeHours },
{
defaultMessage: 'Failed to find cached external lookup.',
},
);
}
}
/**
* Creates or updates a cached external lookup result.
* Uses UPSERT to handle both new and existing records.
*/
async upsertExternalLookup(
upcCode: string,
externalSource: UpcExternalSource,
lookupSuccessful: boolean,
logger: Logger,
data: {
productName?: string | null;
brandName?: string | null;
category?: string | null;
description?: string | null;
imageUrl?: string | null;
lookupData?: Record<string, unknown> | null;
} = {},
): Promise<UpcExternalLookupRecord> {
const {
productName = null,
brandName = null,
category = null,
description = null,
imageUrl = null,
lookupData = null,
} = data;
try {
const res = await this.db.query<UpcExternalLookupRecord>(
`INSERT INTO public.upc_external_lookups
(upc_code, product_name, brand_name, category, description, image_url, external_source, lookup_data, lookup_successful)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9)
ON CONFLICT (upc_code) DO UPDATE SET
product_name = EXCLUDED.product_name,
brand_name = EXCLUDED.brand_name,
category = EXCLUDED.category,
description = EXCLUDED.description,
image_url = EXCLUDED.image_url,
external_source = EXCLUDED.external_source,
lookup_data = EXCLUDED.lookup_data,
lookup_successful = EXCLUDED.lookup_successful,
updated_at = NOW()
RETURNING *`,
[
upcCode,
productName,
brandName,
category,
description,
imageUrl,
externalSource,
lookupData ? JSON.stringify(lookupData) : null,
lookupSuccessful,
],
);
return res.rows[0];
} catch (error) {
handleDbError(
error,
logger,
'Database error in upsertExternalLookup',
{ upcCode, externalSource, lookupSuccessful },
{
checkMessage: 'Invalid UPC code format or external source.',
defaultMessage: 'Failed to cache external lookup result.',
},
);
}
}
/**
* Gets an external lookup record by UPC code (without cache expiry check).
*/
async getExternalLookupByUpc(
upcCode: string,
logger: Logger,
): Promise<UpcExternalLookupRecord | null> {
try {
const res = await this.db.query<UpcExternalLookupRecord>(
`SELECT * FROM public.upc_external_lookups WHERE upc_code = $1`,
[upcCode],
);
if (res.rowCount === 0) {
return null;
}
return res.rows[0];
} catch (error) {
handleDbError(
error,
logger,
'Database error in getExternalLookupByUpc',
{ upcCode },
{
defaultMessage: 'Failed to get external lookup record.',
},
);
}
}
/**
* Deletes old external lookup cache entries.
* Used for periodic cleanup.
*/
async deleteOldExternalLookups(daysOld: number, logger: Logger): Promise<number> {
try {
const res = await this.db.query(
`DELETE FROM public.upc_external_lookups WHERE updated_at < NOW() - ($1 * interval '1 day')`,
[daysOld],
);
return res.rowCount ?? 0;
} catch (error) {
handleDbError(
error,
logger,
'Database error in deleteOldExternalLookups',
{ daysOld },
{
defaultMessage: 'Failed to delete old external lookups.',
},
);
}
}
// ============================================================================
// STATISTICS
// ============================================================================
/**
* Gets scan statistics for a user.
*/
async getUserScanStats(
userId: string,
logger: Logger,
): Promise<{
total_scans: number;
successful_lookups: number;
unique_products: number;
scans_today: number;
scans_this_week: number;
}> {
try {
const res = await this.db.query<{
total_scans: string;
successful_lookups: string;
unique_products: string;
scans_today: string;
scans_this_week: string;
}>(
`SELECT
COUNT(*) AS total_scans,
COUNT(*) FILTER (WHERE lookup_successful = true) AS successful_lookups,
COUNT(DISTINCT product_id) FILTER (WHERE product_id IS NOT NULL) AS unique_products,
COUNT(*) FILTER (WHERE created_at >= CURRENT_DATE) AS scans_today,
COUNT(*) FILTER (WHERE created_at >= CURRENT_DATE - interval '7 days') AS scans_this_week
FROM public.upc_scan_history
WHERE user_id = $1`,
[userId],
);
const row = res.rows[0];
return {
total_scans: parseInt(row.total_scans, 10),
successful_lookups: parseInt(row.successful_lookups, 10),
unique_products: parseInt(row.unique_products, 10),
scans_today: parseInt(row.scans_today, 10),
scans_this_week: parseInt(row.scans_this_week, 10),
};
} catch (error) {
handleDbError(
error,
logger,
'Database error in getUserScanStats',
{ userId },
{
defaultMessage: 'Failed to get scan statistics.',
},
);
}
}
/**
* Updates a scan record with the detected UPC code from image processing.
* Used by the barcode detection worker after processing an uploaded image.
*/
async updateScanWithDetectedCode(
scanId: number,
upcCode: string,
confidence: number | null,
logger: Logger,
): Promise<void> {
try {
const query = `
UPDATE public.upc_scan_history
SET
upc_code = $2,
scan_confidence = $3,
updated_at = NOW()
WHERE scan_id = $1
`;
const res = await this.db.query(query, [scanId, upcCode, confidence]);
if (res.rowCount === 0) {
throw new NotFoundError('Scan record not found.');
}
logger.info({ scanId, upcCode, confidence }, 'Updated scan with detected code');
} catch (error) {
handleDbError(
error,
logger,
'Database error in updateScanWithDetectedCode',
{ scanId, upcCode },
{
defaultMessage: 'Failed to update scan with detected code.',
},
);
}
}
}

View File

@@ -0,0 +1,933 @@
// src/services/expiryService.server.test.ts
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
import type { Logger } from 'pino';
import type { Job } from 'bullmq';
import type { ExpiryAlertJobData } from '../types/job-data';
import { createMockLogger } from '../tests/utils/mockLogger';
import type {
InventorySource,
StorageLocation,
ExpiryStatus,
ExpiryRangeSource,
AlertMethod,
UserInventoryItem,
ReceiptStatus,
ReceiptItemStatus,
ExpiryAlertLogRecord,
ExpiryAlertType,
} from '../types/expiry';
// Mock dependencies
vi.mock('./db/index.db', () => ({
expiryRepo: {
addInventoryItem: vi.fn(),
updateInventoryItem: vi.fn(),
markAsConsumed: vi.fn(),
deleteInventoryItem: vi.fn(),
getInventoryItemById: vi.fn(),
getInventory: vi.fn(),
getExpiringItems: vi.fn(),
getExpiredItems: vi.fn(),
getExpiryRangeForItem: vi.fn(),
getExpiryRanges: vi.fn(),
addExpiryRange: vi.fn(),
getUserAlertSettings: vi.fn(),
upsertAlertSettings: vi.fn(),
getUsersWithExpiringItems: vi.fn(),
logAlert: vi.fn(),
markAlertSent: vi.fn(),
getRecipesForExpiringItems: vi.fn(),
},
receiptRepo: {
getReceiptById: vi.fn(),
getReceiptItems: vi.fn(),
updateReceiptItem: vi.fn(),
},
}));
vi.mock('./emailService.server', () => ({
sendEmail: vi.fn(),
}));
vi.mock('./logger.server', () => ({
logger: {
child: vi.fn().mockReturnThis(),
debug: vi.fn(),
info: vi.fn(),
warn: vi.fn(),
error: vi.fn(),
},
}));
// Import after mocks are set up
import {
addInventoryItem,
updateInventoryItem,
markItemConsumed,
deleteInventoryItem,
getInventoryItemById,
getInventory,
getExpiringItemsGrouped,
getExpiringItems,
getExpiredItems,
calculateExpiryDate,
getExpiryRanges,
addExpiryRange,
getAlertSettings,
updateAlertSettings,
processExpiryAlerts,
addItemsFromReceipt,
getRecipeSuggestionsForExpiringItems,
processExpiryAlertJob,
} from './expiryService.server';
import { expiryRepo, receiptRepo } from './db/index.db';
import * as emailService from './emailService.server';
// Helper to create mock alert log record
function createMockAlertLogRecord(
overrides: Partial<ExpiryAlertLogRecord> = {},
): ExpiryAlertLogRecord {
return {
alert_log_id: 1,
user_id: 'user-1',
pantry_item_id: null,
alert_type: 'expiring_soon' as ExpiryAlertType,
alert_method: 'email' as AlertMethod,
item_name: 'Test Item',
expiry_date: null,
days_until_expiry: null,
sent_at: new Date().toISOString(),
...overrides,
};
}
describe('expiryService.server', () => {
let mockLogger: Logger;
beforeEach(() => {
vi.clearAllMocks();
mockLogger = createMockLogger();
});
afterEach(() => {
vi.resetAllMocks();
});
describe('addInventoryItem', () => {
it('should add item to inventory without expiry date', async () => {
const mockItem: UserInventoryItem = {
inventory_id: 1,
user_id: 'user-1',
product_id: null,
master_item_id: null,
item_name: 'Milk',
quantity: 1,
unit: 'gallon',
purchase_date: null,
expiry_date: null,
source: 'manual',
location: 'fridge',
notes: null,
is_consumed: false,
consumed_at: null,
expiry_source: null,
receipt_item_id: null,
pantry_location_id: null,
notification_sent_at: null,
created_at: new Date().toISOString(),
updated_at: new Date().toISOString(),
days_until_expiry: null,
expiry_status: 'unknown',
};
vi.mocked(expiryRepo.addInventoryItem).mockResolvedValueOnce(mockItem);
const result = await addInventoryItem(
'user-1',
{ item_name: 'Milk', quantity: 1, source: 'manual', location: 'fridge' },
mockLogger,
);
expect(result.inventory_id).toBe(1);
expect(result.item_name).toBe('Milk');
});
it('should calculate expiry date when purchase date and location provided', async () => {
const mockItem: UserInventoryItem = {
inventory_id: 2,
user_id: 'user-1',
product_id: null,
master_item_id: 5,
item_name: 'Milk',
quantity: 1,
unit: 'gallon',
purchase_date: '2024-01-15',
expiry_date: '2024-01-22', // calculated
source: 'manual',
location: 'fridge',
notes: null,
is_consumed: false,
consumed_at: null,
expiry_source: 'calculated',
receipt_item_id: null,
pantry_location_id: null,
notification_sent_at: null,
created_at: new Date().toISOString(),
updated_at: new Date().toISOString(),
days_until_expiry: 7,
expiry_status: 'fresh',
};
vi.mocked(expiryRepo.getExpiryRangeForItem).mockResolvedValueOnce({
expiry_range_id: 1,
master_item_id: 5,
category_id: null,
item_pattern: null,
storage_location: 'fridge',
min_days: 5,
max_days: 10,
typical_days: 7,
notes: null,
source: 'usda',
created_at: new Date().toISOString(),
updated_at: new Date().toISOString(),
});
vi.mocked(expiryRepo.addInventoryItem).mockResolvedValueOnce(mockItem);
const result = await addInventoryItem(
'user-1',
{
item_name: 'Milk',
master_item_id: 5,
quantity: 1,
source: 'manual',
location: 'fridge',
purchase_date: '2024-01-15',
},
mockLogger,
);
expect(result.expiry_date).toBe('2024-01-22');
});
});
describe('updateInventoryItem', () => {
it('should update inventory item', async () => {
const mockUpdatedItem: UserInventoryItem = {
inventory_id: 1,
user_id: 'user-1',
product_id: null,
master_item_id: null,
item_name: 'Milk',
quantity: 2, // updated
unit: 'gallon',
purchase_date: null,
expiry_date: '2024-01-25',
source: 'manual',
location: 'fridge',
notes: 'Almost gone',
is_consumed: false,
consumed_at: null,
expiry_source: null,
receipt_item_id: null,
pantry_location_id: null,
notification_sent_at: null,
created_at: new Date().toISOString(),
updated_at: new Date().toISOString(),
days_until_expiry: 5,
expiry_status: 'expiring_soon',
};
vi.mocked(expiryRepo.updateInventoryItem).mockResolvedValueOnce(mockUpdatedItem);
const result = await updateInventoryItem(
1,
'user-1',
{ quantity: 2, notes: 'Almost gone' },
mockLogger,
);
expect(result.quantity).toBe(2);
expect(result.notes).toBe('Almost gone');
});
});
describe('markItemConsumed', () => {
it('should mark item as consumed', async () => {
vi.mocked(expiryRepo.markAsConsumed).mockResolvedValueOnce(undefined);
await markItemConsumed(1, 'user-1', mockLogger);
expect(expiryRepo.markAsConsumed).toHaveBeenCalledWith(1, 'user-1', mockLogger);
});
});
describe('deleteInventoryItem', () => {
it('should delete inventory item', async () => {
vi.mocked(expiryRepo.deleteInventoryItem).mockResolvedValueOnce(undefined);
await deleteInventoryItem(1, 'user-1', mockLogger);
expect(expiryRepo.deleteInventoryItem).toHaveBeenCalledWith(1, 'user-1', mockLogger);
});
});
describe('getInventoryItemById', () => {
it('should return inventory item by ID', async () => {
const mockItem: UserInventoryItem = {
inventory_id: 1,
user_id: 'user-1',
product_id: null,
master_item_id: null,
item_name: 'Eggs',
quantity: 12,
unit: null,
purchase_date: null,
expiry_date: null,
source: 'manual',
location: 'fridge',
notes: null,
is_consumed: false,
consumed_at: null,
expiry_source: null,
receipt_item_id: null,
pantry_location_id: null,
notification_sent_at: null,
created_at: new Date().toISOString(),
updated_at: new Date().toISOString(),
days_until_expiry: null,
expiry_status: 'unknown',
};
vi.mocked(expiryRepo.getInventoryItemById).mockResolvedValueOnce(mockItem);
const result = await getInventoryItemById(1, 'user-1', mockLogger);
expect(result.item_name).toBe('Eggs');
});
});
describe('getInventory', () => {
it('should return paginated inventory', async () => {
const mockInventory = {
items: [
{
inventory_id: 1,
user_id: 'user-1',
product_id: null,
master_item_id: null,
item_name: 'Butter',
quantity: 1,
unit: null,
purchase_date: null,
expiry_date: null,
source: 'manual' as InventorySource,
location: 'fridge' as StorageLocation,
notes: null,
is_consumed: false,
consumed_at: null,
expiry_source: null,
receipt_item_id: null,
pantry_location_id: null,
notification_sent_at: null,
created_at: new Date().toISOString(),
updated_at: new Date().toISOString(),
days_until_expiry: null,
expiry_status: 'unknown' as ExpiryStatus,
},
],
total: 1,
};
vi.mocked(expiryRepo.getInventory).mockResolvedValueOnce(mockInventory);
const result = await getInventory({ user_id: 'user-1', limit: 10, offset: 0 }, mockLogger);
expect(result.items).toHaveLength(1);
expect(result.total).toBe(1);
});
it('should filter by location', async () => {
vi.mocked(expiryRepo.getInventory).mockResolvedValueOnce({ items: [], total: 0 });
await getInventory({ user_id: 'user-1', location: 'freezer' }, mockLogger);
expect(expiryRepo.getInventory).toHaveBeenCalledWith(
{ user_id: 'user-1', location: 'freezer' },
mockLogger,
);
});
});
describe('getExpiringItemsGrouped', () => {
it('should return items grouped by expiry urgency', async () => {
const expiringItems = [
createMockInventoryItem({ days_until_expiry: 0 }), // today
createMockInventoryItem({ days_until_expiry: 3 }), // this week
createMockInventoryItem({ days_until_expiry: 15 }), // this month
];
const expiredItems = [createMockInventoryItem({ days_until_expiry: -2 })];
vi.mocked(expiryRepo.getExpiringItems).mockResolvedValueOnce(expiringItems);
vi.mocked(expiryRepo.getExpiredItems).mockResolvedValueOnce(expiredItems);
const result = await getExpiringItemsGrouped('user-1', mockLogger);
expect(result.expiring_today).toHaveLength(1);
expect(result.expiring_this_week).toHaveLength(1);
expect(result.expiring_this_month).toHaveLength(1);
expect(result.already_expired).toHaveLength(1);
expect(result.counts.total).toBe(4);
});
});
describe('getExpiringItems', () => {
it('should return items expiring within specified days', async () => {
const mockItems = [createMockInventoryItem({ days_until_expiry: 5 })];
vi.mocked(expiryRepo.getExpiringItems).mockResolvedValueOnce(mockItems);
const result = await getExpiringItems('user-1', 7, mockLogger);
expect(result).toHaveLength(1);
expect(expiryRepo.getExpiringItems).toHaveBeenCalledWith('user-1', 7, mockLogger);
});
});
describe('getExpiredItems', () => {
it('should return expired items', async () => {
const mockItems = [createMockInventoryItem({ days_until_expiry: -3 })];
vi.mocked(expiryRepo.getExpiredItems).mockResolvedValueOnce(mockItems);
const result = await getExpiredItems('user-1', mockLogger);
expect(result).toHaveLength(1);
});
});
describe('calculateExpiryDate', () => {
it('should calculate expiry date based on storage location', async () => {
vi.mocked(expiryRepo.getExpiryRangeForItem).mockResolvedValueOnce({
expiry_range_id: 1,
master_item_id: null,
category_id: 1,
item_pattern: null,
storage_location: 'fridge',
min_days: 7,
max_days: 14,
typical_days: 10,
notes: null,
source: 'usda',
created_at: new Date().toISOString(),
updated_at: new Date().toISOString(),
});
const result = await calculateExpiryDate(
{
item_name: 'Cheese',
storage_location: 'fridge',
purchase_date: '2024-01-15',
},
mockLogger,
);
expect(result).toBe('2024-01-25'); // 10 days after purchase
});
it('should return null when no expiry range found', async () => {
vi.mocked(expiryRepo.getExpiryRangeForItem).mockResolvedValueOnce(null);
const result = await calculateExpiryDate(
{
item_name: 'Unknown Item',
storage_location: 'pantry',
purchase_date: '2024-01-15',
},
mockLogger,
);
expect(result).toBeNull();
});
});
describe('getExpiryRanges', () => {
it('should return paginated expiry ranges', async () => {
const mockRanges = {
ranges: [
{
expiry_range_id: 1,
master_item_id: null,
category_id: 1,
item_pattern: null,
storage_location: 'fridge' as StorageLocation,
min_days: 7,
max_days: 14,
typical_days: 10,
notes: null,
source: 'usda' as ExpiryRangeSource,
created_at: new Date().toISOString(),
updated_at: new Date().toISOString(),
},
],
total: 1,
};
vi.mocked(expiryRepo.getExpiryRanges).mockResolvedValueOnce(mockRanges);
const result = await getExpiryRanges({}, mockLogger);
expect(result.ranges).toHaveLength(1);
expect(result.total).toBe(1);
});
});
describe('addExpiryRange', () => {
it('should add new expiry range', async () => {
const mockRange = {
expiry_range_id: 2,
master_item_id: null,
category_id: 2,
item_pattern: null,
storage_location: 'freezer' as StorageLocation,
min_days: 30,
max_days: 90,
typical_days: 60,
notes: 'Best stored in back of freezer',
source: 'manual' as ExpiryRangeSource,
created_at: new Date().toISOString(),
updated_at: new Date().toISOString(),
};
vi.mocked(expiryRepo.addExpiryRange).mockResolvedValueOnce(mockRange);
const result = await addExpiryRange(
{
category_id: 2,
storage_location: 'freezer',
min_days: 30,
max_days: 90,
typical_days: 60,
notes: 'Best stored in back of freezer',
},
mockLogger,
);
expect(result.typical_days).toBe(60);
});
});
describe('getAlertSettings', () => {
it('should return user alert settings', async () => {
const mockSettings = [
{
expiry_alert_id: 1,
user_id: 'user-1',
days_before_expiry: 3,
alert_method: 'email' as AlertMethod,
is_enabled: true,
last_alert_sent_at: null,
created_at: new Date().toISOString(),
updated_at: new Date().toISOString(),
},
];
vi.mocked(expiryRepo.getUserAlertSettings).mockResolvedValueOnce(mockSettings);
const result = await getAlertSettings('user-1', mockLogger);
expect(result).toHaveLength(1);
expect(result[0].alert_method).toBe('email');
});
});
describe('updateAlertSettings', () => {
it('should update alert settings', async () => {
const mockUpdatedSettings = {
expiry_alert_id: 1,
user_id: 'user-1',
days_before_expiry: 5,
alert_method: 'email' as AlertMethod,
is_enabled: true,
last_alert_sent_at: null,
created_at: new Date().toISOString(),
updated_at: new Date().toISOString(),
};
vi.mocked(expiryRepo.upsertAlertSettings).mockResolvedValueOnce(mockUpdatedSettings);
const result = await updateAlertSettings(
'user-1',
'email',
{ days_before_expiry: 5 },
mockLogger,
);
expect(result.days_before_expiry).toBe(5);
});
});
describe('processExpiryAlerts', () => {
it('should process alerts for users with expiring items', async () => {
vi.mocked(expiryRepo.getUsersWithExpiringItems).mockResolvedValueOnce([
{
user_id: 'user-1',
email: 'user1@example.com',
alert_method: 'email' as AlertMethod,
days_before_expiry: 3,
},
]);
vi.mocked(expiryRepo.getExpiringItems).mockResolvedValueOnce([
createMockInventoryItem({ days_until_expiry: 2 }),
]);
vi.mocked(emailService.sendEmail).mockResolvedValueOnce(undefined);
vi.mocked(expiryRepo.logAlert).mockResolvedValue(createMockAlertLogRecord());
vi.mocked(expiryRepo.markAlertSent).mockResolvedValue(undefined);
const alertsSent = await processExpiryAlerts(mockLogger);
expect(alertsSent).toBe(1);
});
it('should skip users with no expiring items', async () => {
vi.mocked(expiryRepo.getUsersWithExpiringItems).mockResolvedValueOnce([
{
user_id: 'user-1',
email: 'user1@example.com',
alert_method: 'email' as AlertMethod,
days_before_expiry: 3,
},
]);
vi.mocked(expiryRepo.getExpiringItems).mockResolvedValueOnce([]);
const alertsSent = await processExpiryAlerts(mockLogger);
expect(alertsSent).toBe(0);
});
});
describe('addItemsFromReceipt', () => {
it('should add items from receipt to inventory', async () => {
const mockReceipt = {
receipt_id: 1,
user_id: 'user-1',
store_id: null,
receipt_image_url: '/uploads/receipt.jpg',
transaction_date: '2024-01-15',
total_amount_cents: 2500,
status: 'completed' as ReceiptStatus,
raw_text: 'test text',
store_confidence: null,
ocr_provider: null,
error_details: null,
retry_count: 0,
ocr_confidence: null,
currency: 'USD',
created_at: new Date().toISOString(),
processed_at: new Date().toISOString(),
updated_at: new Date().toISOString(),
};
const mockReceiptItems = [
{
receipt_item_id: 1,
receipt_id: 1,
raw_item_description: 'MILK 2%',
quantity: 1,
price_paid_cents: 399,
master_item_id: 5,
product_id: null,
status: 'matched' as ReceiptItemStatus,
line_number: 1,
match_confidence: 0.95,
is_discount: false,
unit_price_cents: null,
unit_type: null,
added_to_pantry: false,
pantry_item_id: null,
upc_code: null,
created_at: new Date().toISOString(),
updated_at: new Date().toISOString(),
},
];
vi.mocked(receiptRepo.getReceiptById).mockResolvedValueOnce(mockReceipt);
vi.mocked(receiptRepo.getReceiptItems).mockResolvedValueOnce(mockReceiptItems);
vi.mocked(expiryRepo.addInventoryItem).mockResolvedValueOnce(
createMockInventoryItem({ inventory_id: 10 }),
);
vi.mocked(receiptRepo.updateReceiptItem).mockResolvedValueOnce(mockReceiptItems[0] as any);
const result = await addItemsFromReceipt(
'user-1',
1,
[{ receipt_item_id: 1, location: 'fridge', include: true }],
mockLogger,
);
expect(result).toHaveLength(1);
expect(receiptRepo.updateReceiptItem).toHaveBeenCalledWith(
1,
expect.objectContaining({ added_to_pantry: true }),
expect.any(Object),
);
});
it('should skip items with include: false', async () => {
const mockReceipt = {
receipt_id: 1,
user_id: 'user-1',
store_id: null,
receipt_image_url: '/uploads/receipt.jpg',
transaction_date: '2024-01-15',
total_amount_cents: 2500,
status: 'completed' as ReceiptStatus,
raw_text: 'test text',
store_confidence: null,
ocr_provider: null,
error_details: null,
retry_count: 0,
ocr_confidence: null,
currency: 'USD',
created_at: new Date().toISOString(),
processed_at: new Date().toISOString(),
updated_at: new Date().toISOString(),
};
vi.mocked(receiptRepo.getReceiptById).mockResolvedValueOnce(mockReceipt);
const result = await addItemsFromReceipt(
'user-1',
1,
[{ receipt_item_id: 1, include: false }],
mockLogger,
);
expect(result).toHaveLength(0);
expect(expiryRepo.addInventoryItem).not.toHaveBeenCalled();
});
});
describe('getRecipeSuggestionsForExpiringItems', () => {
it('should return recipes using expiring items', async () => {
const expiringItems = [
createMockInventoryItem({ master_item_id: 5, days_until_expiry: 2 }),
createMockInventoryItem({ master_item_id: 10, days_until_expiry: 4 }),
];
const mockRecipes = {
recipes: [
{
recipe_id: 1,
recipe_name: 'Quick Breakfast',
description: 'Easy breakfast recipe',
prep_time_minutes: 10,
cook_time_minutes: 15,
servings: 2,
photo_url: null,
matching_master_item_ids: [5],
match_count: 1,
},
],
total: 1,
};
vi.mocked(expiryRepo.getExpiringItems).mockResolvedValueOnce(expiringItems);
vi.mocked(expiryRepo.getRecipesForExpiringItems).mockResolvedValueOnce(mockRecipes);
const result = await getRecipeSuggestionsForExpiringItems('user-1', 7, mockLogger);
expect(result.recipes).toHaveLength(1);
expect(result.recipes[0].matching_items).toHaveLength(1);
expect(result.considered_items).toHaveLength(2);
});
it('should return empty results when no expiring items', async () => {
vi.mocked(expiryRepo.getExpiringItems).mockResolvedValueOnce([]);
const result = await getRecipeSuggestionsForExpiringItems('user-1', 7, mockLogger);
expect(result.recipes).toHaveLength(0);
expect(result.total).toBe(0);
});
});
describe('processExpiryAlertJob', () => {
it('should process user-specific alert job', async () => {
vi.mocked(expiryRepo.getUserAlertSettings).mockResolvedValueOnce([
{
expiry_alert_id: 1,
user_id: 'user-1',
days_before_expiry: 7,
alert_method: 'email' as AlertMethod,
is_enabled: true,
last_alert_sent_at: null,
created_at: new Date().toISOString(),
updated_at: new Date().toISOString(),
},
]);
vi.mocked(expiryRepo.getExpiringItems).mockResolvedValueOnce([
createMockInventoryItem({ days_until_expiry: 3 }),
]);
vi.mocked(expiryRepo.logAlert).mockResolvedValue(createMockAlertLogRecord());
vi.mocked(expiryRepo.upsertAlertSettings).mockResolvedValue({
expiry_alert_id: 1,
user_id: 'user-1',
days_before_expiry: 7,
alert_method: 'email' as AlertMethod,
is_enabled: true,
last_alert_sent_at: new Date().toISOString(),
created_at: new Date().toISOString(),
updated_at: new Date().toISOString(),
});
const mockJob = {
id: 'job-1',
data: {
alertType: 'user_specific' as const,
userId: 'user-1',
daysAhead: 7,
meta: { requestId: 'req-1' },
},
} as Job<ExpiryAlertJobData>;
const result = await processExpiryAlertJob(mockJob, mockLogger);
expect(result.success).toBe(true);
expect(result.alertsSent).toBe(1);
expect(result.usersNotified).toBe(1);
});
it('should process daily check job for all users', async () => {
vi.mocked(expiryRepo.getUsersWithExpiringItems).mockResolvedValueOnce([
{
user_id: 'user-1',
email: 'user1@example.com',
alert_method: 'email' as AlertMethod,
days_before_expiry: 7,
},
{
user_id: 'user-2',
email: 'user2@example.com',
alert_method: 'email' as AlertMethod,
days_before_expiry: 7,
},
]);
vi.mocked(expiryRepo.getUserAlertSettings)
.mockResolvedValueOnce([
{
expiry_alert_id: 1,
user_id: 'user-1',
days_before_expiry: 7,
alert_method: 'email' as AlertMethod,
is_enabled: true,
last_alert_sent_at: null,
created_at: new Date().toISOString(),
updated_at: new Date().toISOString(),
},
])
.mockResolvedValueOnce([
{
expiry_alert_id: 2,
user_id: 'user-2',
days_before_expiry: 7,
alert_method: 'email' as AlertMethod,
is_enabled: true,
last_alert_sent_at: null,
created_at: new Date().toISOString(),
updated_at: new Date().toISOString(),
},
]);
vi.mocked(expiryRepo.getExpiringItems)
.mockResolvedValueOnce([createMockInventoryItem({ days_until_expiry: 3 })])
.mockResolvedValueOnce([createMockInventoryItem({ days_until_expiry: 5 })]);
vi.mocked(expiryRepo.logAlert).mockResolvedValue(createMockAlertLogRecord());
vi.mocked(expiryRepo.upsertAlertSettings).mockResolvedValue({
expiry_alert_id: 1,
user_id: 'user-1',
days_before_expiry: 7,
alert_method: 'email' as AlertMethod,
is_enabled: true,
last_alert_sent_at: new Date().toISOString(),
created_at: new Date().toISOString(),
updated_at: new Date().toISOString(),
});
const mockJob = {
id: 'job-2',
data: {
alertType: 'daily_check' as const,
daysAhead: 7,
},
} as Job<ExpiryAlertJobData>;
const result = await processExpiryAlertJob(mockJob, mockLogger);
expect(result.success).toBe(true);
expect(result.usersNotified).toBe(2);
});
it('should handle job processing errors', async () => {
vi.mocked(expiryRepo.getUserAlertSettings).mockRejectedValueOnce(new Error('DB error'));
const mockJob = {
id: 'job-3',
data: {
alertType: 'user_specific' as const,
userId: 'user-1',
},
} as Job<ExpiryAlertJobData>;
await expect(processExpiryAlertJob(mockJob, mockLogger)).rejects.toThrow('DB error');
});
});
});
// Helper function to create mock inventory items
function createMockInventoryItem(
overrides: Partial<{
inventory_id: number;
master_item_id: number | null;
days_until_expiry: number | null;
}>,
): UserInventoryItem {
const daysUntilExpiry = overrides.days_until_expiry ?? 5;
const expiryStatus: ExpiryStatus =
daysUntilExpiry !== null && daysUntilExpiry < 0
? 'expired'
: daysUntilExpiry !== null && daysUntilExpiry <= 7
? 'expiring_soon'
: 'fresh';
return {
inventory_id: overrides.inventory_id ?? 1,
user_id: 'user-1',
product_id: null,
master_item_id: overrides.master_item_id ?? null,
item_name: 'Test Item',
quantity: 1,
unit: null,
purchase_date: null,
expiry_date: '2024-01-25',
source: 'manual' as InventorySource,
location: 'fridge' as StorageLocation,
notes: null,
is_consumed: false,
consumed_at: null,
expiry_source: null,
receipt_item_id: null,
pantry_location_id: null,
notification_sent_at: null,
created_at: new Date().toISOString(),
updated_at: new Date().toISOString(),
days_until_expiry: daysUntilExpiry,
expiry_status: expiryStatus,
};
}

View File

@@ -0,0 +1,956 @@
// src/services/expiryService.server.ts
/**
* @file Expiry Date Tracking Service
* Handles inventory management, expiry date calculations, and expiry alerts.
* Provides functionality for tracking food items and notifying users about expiring items.
*/
import type { Logger } from 'pino';
import { expiryRepo, receiptRepo } from './db/index.db';
import type {
StorageLocation,
AlertMethod,
UserInventoryItem,
AddInventoryItemRequest,
UpdateInventoryItemRequest,
ExpiryDateRange,
AddExpiryRangeRequest,
ExpiryAlertSettings,
UpdateExpiryAlertSettingsRequest,
ExpiringItemsResponse,
InventoryQueryOptions,
ExpiryRangeQueryOptions,
CalculateExpiryOptions,
ExpiryAlertType,
} from '../types/expiry';
/**
* Default expiry warning threshold in days
*/
const DEFAULT_EXPIRY_WARNING_DAYS = 7;
/**
* Number of days to consider an item "expiring soon"
*/
const EXPIRING_SOON_THRESHOLD = 7;
/**
* Number of days to consider for "this month" expiry grouping
*/
const THIS_MONTH_THRESHOLD = 30;
// ============================================================================
// INVENTORY MANAGEMENT
// ============================================================================
/**
* Adds an item to the user's inventory.
* If no expiry date is provided, attempts to calculate one based on storage location.
* @param userId The user's ID
* @param item The item to add
* @param logger Pino logger instance
* @returns The created inventory item with computed expiry status
*/
export const addInventoryItem = async (
userId: string,
item: AddInventoryItemRequest,
logger: Logger,
): Promise<UserInventoryItem> => {
const itemLogger = logger.child({ userId, itemName: item.item_name });
itemLogger.info('Adding item to inventory');
// If no expiry date provided and we have purchase date + location, try to calculate
if (!item.expiry_date && item.purchase_date && item.location) {
const calculatedExpiry = await calculateExpiryDate(
{
master_item_id: item.master_item_id,
item_name: item.item_name,
storage_location: item.location,
purchase_date: item.purchase_date,
},
itemLogger,
);
if (calculatedExpiry) {
itemLogger.debug({ calculatedExpiry }, 'Calculated expiry date from storage location');
item.expiry_date = calculatedExpiry;
}
}
const inventoryItem = await expiryRepo.addInventoryItem(userId, item, itemLogger);
itemLogger.info({ inventoryId: inventoryItem.inventory_id }, 'Item added to inventory');
return inventoryItem;
};
/**
* Updates an existing inventory item.
* @param inventoryId The inventory item ID
* @param userId The user's ID (for authorization)
* @param updates The updates to apply
* @param logger Pino logger instance
* @returns The updated inventory item
*/
export const updateInventoryItem = async (
inventoryId: number,
userId: string,
updates: UpdateInventoryItemRequest,
logger: Logger,
): Promise<UserInventoryItem> => {
logger.debug({ inventoryId, userId, updates }, 'Updating inventory item');
return expiryRepo.updateInventoryItem(inventoryId, userId, updates, logger);
};
/**
* Marks an inventory item as consumed.
* @param inventoryId The inventory item ID
* @param userId The user's ID (for authorization)
* @param logger Pino logger instance
*/
export const markItemConsumed = async (
inventoryId: number,
userId: string,
logger: Logger,
): Promise<void> => {
logger.debug({ inventoryId, userId }, 'Marking item as consumed');
await expiryRepo.markAsConsumed(inventoryId, userId, logger);
logger.info({ inventoryId }, 'Item marked as consumed');
};
/**
* Deletes an inventory item.
* @param inventoryId The inventory item ID
* @param userId The user's ID (for authorization)
* @param logger Pino logger instance
*/
export const deleteInventoryItem = async (
inventoryId: number,
userId: string,
logger: Logger,
): Promise<void> => {
logger.debug({ inventoryId, userId }, 'Deleting inventory item');
await expiryRepo.deleteInventoryItem(inventoryId, userId, logger);
logger.info({ inventoryId }, 'Item deleted from inventory');
};
/**
* Gets a single inventory item by ID.
* @param inventoryId The inventory item ID
* @param userId The user's ID (for authorization)
* @param logger Pino logger instance
* @returns The inventory item
*/
export const getInventoryItemById = async (
inventoryId: number,
userId: string,
logger: Logger,
): Promise<UserInventoryItem> => {
return expiryRepo.getInventoryItemById(inventoryId, userId, logger);
};
/**
* Gets the user's inventory with optional filtering and pagination.
* @param options Query options
* @param logger Pino logger instance
* @returns Paginated inventory items
*/
export const getInventory = async (
options: InventoryQueryOptions,
logger: Logger,
): Promise<{ items: UserInventoryItem[]; total: number }> => {
logger.debug({ userId: options.user_id }, 'Fetching user inventory');
return expiryRepo.getInventory(options, logger);
};
// ============================================================================
// EXPIRING ITEMS
// ============================================================================
/**
* Gets items grouped by expiry urgency for dashboard display.
* @param userId The user's ID
* @param logger Pino logger instance
* @returns Items grouped by expiry status with counts
*/
export const getExpiringItemsGrouped = async (
userId: string,
logger: Logger,
): Promise<ExpiringItemsResponse> => {
logger.debug({ userId }, 'Fetching expiring items grouped by urgency');
// Get all expiring items within 30 days + expired items
const expiringThisMonth = await expiryRepo.getExpiringItems(userId, THIS_MONTH_THRESHOLD, logger);
const expiredItems = await expiryRepo.getExpiredItems(userId, logger);
// Group items by urgency
const today = new Date();
today.setHours(0, 0, 0, 0);
const expiringToday: UserInventoryItem[] = [];
const expiringThisWeek: UserInventoryItem[] = [];
const expiringLater: UserInventoryItem[] = [];
for (const item of expiringThisMonth) {
if (item.days_until_expiry === null) {
continue;
}
if (item.days_until_expiry === 0) {
expiringToday.push(item);
} else if (item.days_until_expiry <= EXPIRING_SOON_THRESHOLD) {
expiringThisWeek.push(item);
} else {
expiringLater.push(item);
}
}
const response: ExpiringItemsResponse = {
expiring_today: expiringToday,
expiring_this_week: expiringThisWeek,
expiring_this_month: expiringLater,
already_expired: expiredItems,
counts: {
today: expiringToday.length,
this_week: expiringThisWeek.length,
this_month: expiringLater.length,
expired: expiredItems.length,
total:
expiringToday.length + expiringThisWeek.length + expiringLater.length + expiredItems.length,
},
};
logger.info(
{
userId,
counts: response.counts,
},
'Expiring items fetched',
);
return response;
};
/**
* Gets items expiring within a specified number of days.
* @param userId The user's ID
* @param daysAhead Number of days to look ahead
* @param logger Pino logger instance
* @returns Items expiring within the specified timeframe
*/
export const getExpiringItems = async (
userId: string,
daysAhead: number,
logger: Logger,
): Promise<UserInventoryItem[]> => {
logger.debug({ userId, daysAhead }, 'Fetching expiring items');
return expiryRepo.getExpiringItems(userId, daysAhead, logger);
};
/**
* Gets items that have already expired.
* @param userId The user's ID
* @param logger Pino logger instance
* @returns Expired items
*/
export const getExpiredItems = async (
userId: string,
logger: Logger,
): Promise<UserInventoryItem[]> => {
logger.debug({ userId }, 'Fetching expired items');
return expiryRepo.getExpiredItems(userId, logger);
};
// ============================================================================
// EXPIRY DATE CALCULATION
// ============================================================================
/**
* Calculates an estimated expiry date based on item and storage location.
* Uses expiry_date_ranges table for reference data.
* @param options Calculation options
* @param logger Pino logger instance
* @returns Calculated expiry date string (ISO format) or null if unable to calculate
*/
export const calculateExpiryDate = async (
options: CalculateExpiryOptions,
logger: Logger,
): Promise<string | null> => {
const { master_item_id, category_id, item_name, storage_location, purchase_date } = options;
logger.debug(
{
masterItemId: master_item_id,
categoryId: category_id,
itemName: item_name,
storageLocation: storage_location,
},
'Calculating expiry date',
);
// Look up expiry range for this item/category/pattern
const expiryRange = await expiryRepo.getExpiryRangeForItem(storage_location, logger, {
masterItemId: master_item_id,
categoryId: category_id,
itemName: item_name,
});
if (!expiryRange) {
logger.debug('No expiry range found for item');
return null;
}
// Calculate expiry date using typical_days
const purchaseDateTime = new Date(purchase_date);
purchaseDateTime.setDate(purchaseDateTime.getDate() + expiryRange.typical_days);
const expiryDateStr = purchaseDateTime.toISOString().split('T')[0];
logger.debug(
{
purchaseDate: purchase_date,
typicalDays: expiryRange.typical_days,
expiryDate: expiryDateStr,
},
'Expiry date calculated',
);
return expiryDateStr;
};
/**
* Gets expiry date ranges with optional filtering.
* @param options Query options
* @param logger Pino logger instance
* @returns Paginated expiry date ranges
*/
export const getExpiryRanges = async (
options: ExpiryRangeQueryOptions,
logger: Logger,
): Promise<{ ranges: ExpiryDateRange[]; total: number }> => {
return expiryRepo.getExpiryRanges(options, logger);
};
/**
* Adds a new expiry date range (admin operation).
* @param range The range to add
* @param logger Pino logger instance
* @returns The created expiry range
*/
export const addExpiryRange = async (
range: AddExpiryRangeRequest,
logger: Logger,
): Promise<ExpiryDateRange> => {
logger.info(
{ storageLocation: range.storage_location, typicalDays: range.typical_days },
'Adding expiry range',
);
return expiryRepo.addExpiryRange(range, logger);
};
// ============================================================================
// EXPIRY ALERTS
// ============================================================================
/**
* Gets the user's expiry alert settings.
* @param userId The user's ID
* @param logger Pino logger instance
* @returns Array of alert settings
*/
export const getAlertSettings = async (
userId: string,
logger: Logger,
): Promise<ExpiryAlertSettings[]> => {
return expiryRepo.getUserAlertSettings(userId, logger);
};
/**
* Updates the user's expiry alert settings for a specific alert method.
* @param userId The user's ID
* @param alertMethod The alert delivery method
* @param settings The settings to update
* @param logger Pino logger instance
* @returns Updated alert settings
*/
export const updateAlertSettings = async (
userId: string,
alertMethod: AlertMethod,
settings: UpdateExpiryAlertSettingsRequest,
logger: Logger,
): Promise<ExpiryAlertSettings> => {
logger.debug({ userId, alertMethod, settings }, 'Updating alert settings');
return expiryRepo.upsertAlertSettings(userId, alertMethod, settings, logger);
};
/**
* Processes expiry alerts for all users.
* This should be called by a scheduled worker job.
* @param logger Pino logger instance
* @returns Number of alerts sent
*/
export const processExpiryAlerts = async (logger: Logger): Promise<number> => {
logger.info('Starting expiry alert processing');
// Get all users with expiring items who have alerts enabled
const usersToNotify = await expiryRepo.getUsersWithExpiringItems(logger);
logger.debug({ userCount: usersToNotify.length }, 'Found users to notify');
let alertsSent = 0;
for (const user of usersToNotify) {
try {
// Get the expiring items for this user
const expiringItems = await expiryRepo.getExpiringItems(
user.user_id,
user.days_before_expiry,
logger,
);
if (expiringItems.length === 0) {
continue;
}
// Send notification based on alert method
switch (user.alert_method) {
case 'email':
await sendExpiryEmailAlert(user.user_id, user.email, expiringItems, logger);
break;
case 'push':
// TODO: Implement push notifications
logger.debug({ userId: user.user_id }, 'Push notifications not yet implemented');
break;
case 'in_app':
// TODO: Implement in-app notifications
logger.debug({ userId: user.user_id }, 'In-app notifications not yet implemented');
break;
}
// Log the alert and mark as sent
for (const item of expiringItems) {
await expiryRepo.logAlert(
user.user_id,
'expiring_soon',
user.alert_method,
item.item_name,
logger,
{
pantryItemId: item.inventory_id,
expiryDate: item.expiry_date,
daysUntilExpiry: item.days_until_expiry,
},
);
}
await expiryRepo.markAlertSent(user.user_id, user.alert_method, logger);
alertsSent++;
} catch (error) {
const err = error instanceof Error ? error : new Error(String(error));
logger.error({ err, userId: user.user_id }, 'Error processing expiry alert for user');
}
}
logger.info({ alertsSent }, 'Expiry alert processing completed');
return alertsSent;
};
/**
* Sends an email alert about expiring items.
* @param userId The user's ID
* @param email The user's email
* @param items The expiring items
* @param logger Pino logger instance
*/
const sendExpiryEmailAlert = async (
userId: string,
email: string,
items: UserInventoryItem[],
logger: Logger,
): Promise<void> => {
const alertLogger = logger.child({ userId, email, itemCount: items.length });
alertLogger.info('Sending expiry alert email');
// Group items by urgency
const expiredItems = items.filter((i) => i.days_until_expiry !== null && i.days_until_expiry < 0);
const todayItems = items.filter((i) => i.days_until_expiry === 0);
const soonItems = items.filter(
(i) => i.days_until_expiry !== null && i.days_until_expiry > 0 && i.days_until_expiry <= 3,
);
const laterItems = items.filter((i) => i.days_until_expiry !== null && i.days_until_expiry > 3);
// Build the email content
const subject =
todayItems.length > 0 || expiredItems.length > 0
? '⚠️ Food Items Expiring Today or Already Expired!'
: `🕐 ${items.length} Food Item${items.length > 1 ? 's' : ''} Expiring Soon`;
const buildItemList = (itemList: UserInventoryItem[], emoji: string): string => {
if (itemList.length === 0) return '';
return itemList
.map((item) => {
const daysText =
item.days_until_expiry === 0
? 'today'
: item.days_until_expiry === 1
? 'tomorrow'
: item.days_until_expiry !== null && item.days_until_expiry < 0
? `${Math.abs(item.days_until_expiry)} day${Math.abs(item.days_until_expiry) > 1 ? 's' : ''} ago`
: `in ${item.days_until_expiry} days`;
const location = item.location ? ` (${item.location})` : '';
return `${emoji} <strong>${item.item_name}</strong>${location} - expires ${daysText}`;
})
.join('<br>');
};
let htmlBody = '';
if (expiredItems.length > 0) {
htmlBody += `<h3 style="color: #dc3545;">Already Expired (${expiredItems.length})</h3>
<p>${buildItemList(expiredItems, '❌')}</p>`;
}
if (todayItems.length > 0) {
htmlBody += `<h3 style="color: #fd7e14;">Expiring Today (${todayItems.length})</h3>
<p>${buildItemList(todayItems, '⚠️')}</p>`;
}
if (soonItems.length > 0) {
htmlBody += `<h3 style="color: #ffc107;">Expiring Within 3 Days (${soonItems.length})</h3>
<p>${buildItemList(soonItems, '🕐')}</p>`;
}
if (laterItems.length > 0) {
htmlBody += `<h3 style="color: #28a745;">Expiring This Week (${laterItems.length})</h3>
<p>${buildItemList(laterItems, '📅')}</p>`;
}
const html = `
<div style="font-family: sans-serif; padding: 20px; max-width: 600px;">
<h2 style="color: #333;">Food Expiry Alert</h2>
<p>The following items in your pantry need attention:</p>
${htmlBody}
<hr style="margin: 20px 0; border: none; border-top: 1px solid #eee;">
<p style="color: #666; font-size: 14px;">
Visit your <a href="${process.env.FRONTEND_URL || 'https://flyer-crawler.projectium.com'}/inventory">inventory page</a>
to manage these items. You can also find
<a href="${process.env.FRONTEND_URL || 'https://flyer-crawler.projectium.com'}/recipes/suggestions">recipe suggestions</a>
to use them before they expire!
</p>
<p style="color: #999; font-size: 12px;">
To manage your alert preferences, visit your <a href="${process.env.FRONTEND_URL || 'https://flyer-crawler.projectium.com'}/settings">settings page</a>.
</p>
</div>
`;
// Build plain text version
const buildTextList = (itemList: UserInventoryItem[]): string => {
return itemList
.map((item) => {
const daysText =
item.days_until_expiry === 0
? 'today'
: item.days_until_expiry === 1
? 'tomorrow'
: item.days_until_expiry !== null && item.days_until_expiry < 0
? `${Math.abs(item.days_until_expiry)} day(s) ago`
: `in ${item.days_until_expiry} days`;
return ` - ${item.item_name} - expires ${daysText}`;
})
.join('\n');
};
let textBody = 'Food Expiry Alert\n\nThe following items need attention:\n\n';
if (expiredItems.length > 0) {
textBody += `Already Expired:\n${buildTextList(expiredItems)}\n\n`;
}
if (todayItems.length > 0) {
textBody += `Expiring Today:\n${buildTextList(todayItems)}\n\n`;
}
if (soonItems.length > 0) {
textBody += `Expiring Within 3 Days:\n${buildTextList(soonItems)}\n\n`;
}
if (laterItems.length > 0) {
textBody += `Expiring This Week:\n${buildTextList(laterItems)}\n\n`;
}
textBody += 'Visit your inventory page to manage these items.\n\nFlyer Crawler';
try {
await emailService.sendEmail(
{
to: email,
subject,
text: textBody,
html,
},
alertLogger,
);
alertLogger.info('Expiry alert email sent successfully');
} catch (error) {
alertLogger.error({ err: error }, 'Failed to send expiry alert email');
throw error;
}
};
// ============================================================================
// RECEIPT INTEGRATION
// ============================================================================
/**
* Adds items from a confirmed receipt to the user's inventory.
* @param userId The user's ID
* @param receiptId The receipt ID
* @param itemConfirmations Array of item confirmations with storage locations
* @param logger Pino logger instance
* @returns Array of created inventory items
*/
export const addItemsFromReceipt = async (
userId: string,
receiptId: number,
itemConfirmations: Array<{
receipt_item_id: number;
item_name?: string;
quantity?: number;
location?: StorageLocation;
expiry_date?: string;
include: boolean;
}>,
logger: Logger,
): Promise<UserInventoryItem[]> => {
const receiptLogger = logger.child({ userId, receiptId });
receiptLogger.info(
{ itemCount: itemConfirmations.length },
'Adding items from receipt to inventory',
);
const createdItems: UserInventoryItem[] = [];
// Get receipt details for purchase date
const receipt = await receiptRepo.getReceiptById(receiptId, userId, receiptLogger);
for (const confirmation of itemConfirmations) {
if (!confirmation.include) {
receiptLogger.debug(
{ receiptItemId: confirmation.receipt_item_id },
'Skipping excluded item',
);
continue;
}
try {
// Get the receipt item details
const receiptItems = await receiptRepo.getReceiptItems(receiptId, receiptLogger);
const receiptItem = receiptItems.find(
(ri) => ri.receipt_item_id === confirmation.receipt_item_id,
);
if (!receiptItem) {
receiptLogger.warn(
{ receiptItemId: confirmation.receipt_item_id },
'Receipt item not found',
);
continue;
}
// Create inventory item
const inventoryItem = await addInventoryItem(
userId,
{
product_id: receiptItem.product_id ?? undefined,
master_item_id: receiptItem.master_item_id ?? undefined,
item_name: confirmation.item_name || receiptItem.raw_item_description,
quantity: confirmation.quantity || receiptItem.quantity,
purchase_date: receipt.transaction_date || receipt.created_at.split('T')[0],
expiry_date: confirmation.expiry_date,
source: 'receipt_scan',
location: confirmation.location,
},
receiptLogger,
);
// Update receipt item to mark as added to pantry
await receiptRepo.updateReceiptItem(
confirmation.receipt_item_id,
{
added_to_pantry: true,
pantry_item_id: inventoryItem.inventory_id,
},
receiptLogger,
);
createdItems.push(inventoryItem);
} catch (error) {
const err = error instanceof Error ? error : new Error(String(error));
receiptLogger.error(
{ err, receiptItemId: confirmation.receipt_item_id },
'Error adding receipt item to inventory',
);
}
}
receiptLogger.info({ createdCount: createdItems.length }, 'Items added from receipt');
return createdItems;
};
/**
* Gets recipe suggestions based on expiring items.
* Prioritizes recipes that use items closest to expiry.
* @param userId The user's ID
* @param daysAhead Number of days to look ahead for expiring items
* @param logger Pino logger instance
* @param options Pagination options
* @returns Recipes with matching expiring ingredients
*/
export const getRecipeSuggestionsForExpiringItems = async (
userId: string,
daysAhead: number,
logger: Logger,
options: { limit?: number; offset?: number } = {},
): Promise<{
recipes: Array<{
recipe_id: number;
recipe_name: string;
description: string | null;
prep_time_minutes: number | null;
cook_time_minutes: number | null;
servings: number | null;
photo_url: string | null;
matching_items: UserInventoryItem[];
match_count: number;
}>;
total: number;
considered_items: UserInventoryItem[];
}> => {
const { limit = 10, offset = 0 } = options;
const suggestionLogger = logger.child({ userId, daysAhead });
suggestionLogger.debug('Fetching recipe suggestions for expiring items');
// Get expiring items to include in the response
const expiringItems = await getExpiringItems(userId, daysAhead, logger);
if (expiringItems.length === 0) {
suggestionLogger.debug('No expiring items found, returning empty suggestions');
return {
recipes: [],
total: 0,
considered_items: [],
};
}
// Get recipes that use the expiring items
const recipeData = await expiryRepo.getRecipesForExpiringItems(
userId,
daysAhead,
limit,
offset,
suggestionLogger,
);
// Map the expiring items by master_item_id for quick lookup
const itemsByMasterId = new Map<number, UserInventoryItem>();
for (const item of expiringItems) {
if (item.master_item_id && !itemsByMasterId.has(item.master_item_id)) {
itemsByMasterId.set(item.master_item_id, item);
}
}
// Build the response with matching items
const recipes = recipeData.recipes.map((recipe) => ({
recipe_id: recipe.recipe_id,
recipe_name: recipe.recipe_name,
description: recipe.description,
prep_time_minutes: recipe.prep_time_minutes,
cook_time_minutes: recipe.cook_time_minutes,
servings: recipe.servings,
photo_url: recipe.photo_url,
matching_items: recipe.matching_master_item_ids
.map((id) => itemsByMasterId.get(id))
.filter((item): item is UserInventoryItem => item !== undefined),
match_count: recipe.match_count,
}));
suggestionLogger.info(
{
recipeCount: recipes.length,
total: recipeData.total,
expiringItemCount: expiringItems.length,
},
'Recipe suggestions fetched for expiring items',
);
return {
recipes,
total: recipeData.total,
considered_items: expiringItems,
};
};
// ============================================================================
// JOB PROCESSING
// ============================================================================
import type { Job } from 'bullmq';
import type { ExpiryAlertJobData } from '../types/job-data';
import * as emailService from './emailService.server';
/**
* Processes an expiry alert job from the queue.
* This is the main entry point for background expiry alert processing.
* @param job The BullMQ job
* @param logger Pino logger instance
* @returns Processing result with counts of alerts sent
*/
export const processExpiryAlertJob = async (
job: Job<ExpiryAlertJobData>,
logger: Logger,
): Promise<{ success: boolean; alertsSent: number; usersNotified: number }> => {
const {
alertType,
userId,
daysAhead = DEFAULT_EXPIRY_WARNING_DAYS,
scheduledAt: _scheduledAt,
} = job.data;
const jobLogger = logger.child({
jobId: job.id,
alertType,
userId,
daysAhead,
requestId: job.data.meta?.requestId,
});
jobLogger.info('Starting expiry alert job');
try {
let alertsSent = 0;
let usersNotified = 0;
if (alertType === 'user_specific' && userId) {
// Process alerts for a single user
const result = await processUserExpiryAlerts(userId, daysAhead, jobLogger);
alertsSent = result.alertsSent;
usersNotified = result.alertsSent > 0 ? 1 : 0;
} else if (alertType === 'daily_check') {
// Process daily alerts for all users with expiring items
const result = await processDailyExpiryAlerts(daysAhead, jobLogger);
alertsSent = result.totalAlerts;
usersNotified = result.usersNotified;
}
jobLogger.info({ alertsSent, usersNotified }, 'Expiry alert job completed');
return { success: true, alertsSent, usersNotified };
} catch (error) {
jobLogger.error({ err: error }, 'Expiry alert job failed');
throw error;
}
};
/**
* Processes expiry alerts for a single user.
* @param userId The user's ID
* @param daysAhead Days ahead to check for expiring items
* @param logger Pino logger instance
* @returns Number of alerts sent
*/
const processUserExpiryAlerts = async (
userId: string,
daysAhead: number,
logger: Logger,
): Promise<{ alertsSent: number }> => {
const userLogger = logger.child({ userId });
// Get user's alert settings
const settings = await expiryRepo.getUserAlertSettings(userId, userLogger);
const enabledSettings = settings.filter((s) => s.is_enabled);
if (enabledSettings.length === 0) {
userLogger.debug('No enabled alert settings for user');
return { alertsSent: 0 };
}
// Get expiring items
const expiringItems = await getExpiringItems(userId, daysAhead, userLogger);
if (expiringItems.length === 0) {
userLogger.debug('No expiring items for user');
return { alertsSent: 0 };
}
let alertsSent = 0;
// Group items by urgency for the alert (kept for future use in alert formatting)
const _expiredItems = expiringItems.filter((i) => i.expiry_status === 'expired');
const _soonItems = expiringItems.filter((i) => i.expiry_status === 'expiring_soon');
// Check if we should send alerts based on settings
for (const setting of enabledSettings) {
const relevantItems = expiringItems.filter(
(item) =>
item.days_until_expiry !== null && item.days_until_expiry <= setting.days_before_expiry,
);
if (relevantItems.length > 0) {
// Log the alert
for (const item of relevantItems) {
const alertType: ExpiryAlertType =
item.expiry_status === 'expired' ? 'expired' : 'expiring_soon';
await expiryRepo.logAlert(
userId,
alertType,
setting.alert_method,
item.item_name,
userLogger,
{
pantryItemId: item.inventory_id,
expiryDate: item.expiry_date || null,
daysUntilExpiry: item.days_until_expiry,
},
);
alertsSent++;
}
// Update last alert sent time via upsert
await expiryRepo.upsertAlertSettings(userId, setting.alert_method, {}, userLogger);
}
}
userLogger.info({ alertsSent, itemCount: expiringItems.length }, 'Processed user expiry alerts');
return { alertsSent };
};
/**
* Processes daily expiry alerts for all users.
* @param daysAhead Days ahead to check for expiring items
* @param logger Pino logger instance
* @returns Total alerts and users notified
*/
const processDailyExpiryAlerts = async (
daysAhead: number,
logger: Logger,
): Promise<{ totalAlerts: number; usersNotified: number }> => {
// Get all users with items expiring within the threshold
const usersWithExpiringItems = await expiryRepo.getUsersWithExpiringItems(logger);
// Get unique user IDs
const uniqueUserIds = [...new Set(usersWithExpiringItems.map((u) => u.user_id))];
let totalAlerts = 0;
let usersNotified = 0;
for (const userId of uniqueUserIds) {
try {
const result = await processUserExpiryAlerts(userId, daysAhead, logger);
totalAlerts += result.alertsSent;
if (result.alertsSent > 0) {
usersNotified++;
}
} catch (error) {
logger.error({ err: error, userId }, 'Failed to process alerts for user');
// Continue with other users
}
}
logger.info(
{ totalAlerts, usersNotified, totalUsers: uniqueUserIds.length },
'Daily expiry alert processing complete',
);
return { totalAlerts, usersNotified };
};

View File

@@ -12,6 +12,14 @@ const mocks = vi.hoisted(() => ({
readdir: vi.fn(),
execAsync: vi.fn(),
mockAdminLogActivity: vi.fn(),
// Shared mock logger for verifying calls
sharedMockLogger: {
info: vi.fn(),
error: vi.fn(),
warn: vi.fn(),
debug: vi.fn(),
child: vi.fn().mockReturnThis(),
},
}));
// 2. Mock modules using the hoisted variables
@@ -68,14 +76,10 @@ vi.mock('./db/admin.db', () => ({
return { logActivity: mocks.mockAdminLogActivity };
}),
}));
// Use the hoisted shared mock logger instance so tests can verify calls
vi.mock('./logger.server', () => ({
logger: {
info: vi.fn(),
error: vi.fn(),
warn: vi.fn(),
debug: vi.fn(),
child: vi.fn().mockReturnThis(),
},
logger: mocks.sharedMockLogger,
createScopedLogger: vi.fn(() => mocks.sharedMockLogger),
}));
vi.mock('./flyerFileHandler.server');
vi.mock('./flyerAiProcessor.server');

View File

@@ -13,10 +13,12 @@ import {
AiDataValidationError,
} from './processingErrors';
import { NotFoundError } from './db/errors.db';
import { logger as globalLogger } from './logger.server'; // This was a duplicate, fixed.
import { createScopedLogger } from './logger.server';
import { generateFlyerIcon } from '../utils/imageProcessor';
import type { FlyerPersistenceService } from './flyerPersistenceService.server';
const globalLogger = createScopedLogger('flyer-processing-service');
// Define ProcessingStage locally as it's not exported from the types file.
export type ProcessingStage = {
name: string;
@@ -75,8 +77,20 @@ export class FlyerProcessingService {
* @returns An object containing the ID of the newly created flyer.
*/
async processJob(job: Job<FlyerJobData>): Promise<{ flyerId: number }> {
// Extract context metadata (ADR-051) for request tracing
const { meta, ...jobDataWithoutMeta } = job.data;
// Create a logger instance with job-specific context for better traceability.
const logger = globalLogger.child({ jobId: job.id, jobName: job.name, ...job.data });
// Uses request_id from the original API request if available (ADR-051).
const logger = globalLogger.child({
jobId: job.id,
jobName: job.name,
request_id: meta?.requestId, // Propagate original request ID
user_id: meta?.userId,
origin: meta?.origin || 'unknown',
service: 'flyer-worker',
...jobDataWithoutMeta,
});
logger.info('Picked up flyer processing job.');
const stages: ProcessingStage[] = [

View File

@@ -41,3 +41,15 @@ export const logger = pino({
censor: '[REDACTED]',
},
});
const debugModules = (process.env.DEBUG_MODULES || '').split(',').map((s) => s.trim());
export const createScopedLogger = (moduleName: string) => {
// If DEBUG_MODULES contains "ai-service" or "*", force level to 'debug'
const isDebugEnabled = debugModules.includes('*') || debugModules.includes(moduleName);
return logger.child({
module: moduleName,
level: isDebugEnabled ? 'debug' : logger.level,
});
};

View File

@@ -13,7 +13,14 @@ const mocks = vi.hoisted(() => {
const createMockQueue = (name: string) => ({
name,
getJobCounts: vi.fn().mockResolvedValue({}),
getJobCounts: vi.fn().mockResolvedValue({
waiting: 0,
active: 0,
completed: 0,
failed: 0,
delayed: 0,
paused: 0,
}),
getJob: vi.fn(),
});
@@ -23,22 +30,25 @@ const mocks = vi.hoisted(() => {
analyticsWorker: createMockWorker('analytics-reporting'),
cleanupWorker: createMockWorker('file-cleanup'),
weeklyAnalyticsWorker: createMockWorker('weekly-analytics-reporting'),
tokenCleanupWorker: createMockWorker('token-cleanup'),
flyerQueue: createMockQueue('flyer-processing'),
emailQueue: createMockQueue('email-sending'),
analyticsQueue: createMockQueue('analytics-reporting'),
cleanupQueue: createMockQueue('file-cleanup'),
weeklyAnalyticsQueue: createMockQueue('weekly-analytics-reporting'),
tokenCleanupQueue: createMockQueue('token-cleanup'),
};
});
// --- Mock Modules ---
vi.mock('./queueService.server', () => ({
vi.mock('./queues.server', () => ({
flyerQueue: mocks.flyerQueue,
emailQueue: mocks.emailQueue,
analyticsQueue: mocks.analyticsQueue,
cleanupQueue: mocks.cleanupQueue,
weeklyAnalyticsQueue: mocks.weeklyAnalyticsQueue,
tokenCleanupQueue: mocks.tokenCleanupQueue,
}));
vi.mock('./workers.server', () => ({
@@ -47,6 +57,8 @@ vi.mock('./workers.server', () => ({
analyticsWorker: mocks.analyticsWorker,
cleanupWorker: mocks.cleanupWorker,
weeklyAnalyticsWorker: mocks.weeklyAnalyticsWorker,
tokenCleanupWorker: mocks.tokenCleanupWorker,
flyerProcessingService: {},
}));
vi.mock('./db/errors.db', () => ({
@@ -96,6 +108,7 @@ describe('MonitoringService', () => {
{ name: 'analytics-reporting', isRunning: true },
{ name: 'file-cleanup', isRunning: true },
{ name: 'weekly-analytics-reporting', isRunning: true },
{ name: 'token-cleanup', isRunning: true },
]);
expect(mocks.flyerWorker.isRunning).toHaveBeenCalledTimes(1);
expect(mocks.emailWorker.isRunning).toHaveBeenCalledTimes(1);
@@ -104,9 +117,22 @@ describe('MonitoringService', () => {
describe('getQueueStatuses', () => {
it('should return job counts for all queues', async () => {
// Arrange
mocks.flyerQueue.getJobCounts.mockResolvedValue({ active: 1, failed: 2 });
mocks.emailQueue.getJobCounts.mockResolvedValue({ completed: 10, waiting: 5 });
const defaultCounts = {
waiting: 0,
active: 0,
completed: 0,
failed: 0,
delayed: 0,
paused: 0,
};
// Arrange - override specific queue counts
mocks.flyerQueue.getJobCounts.mockResolvedValue({ ...defaultCounts, active: 1, failed: 2 });
mocks.emailQueue.getJobCounts.mockResolvedValue({
...defaultCounts,
completed: 10,
waiting: 5,
});
// Act
const statuses = await monitoringService.getQueueStatuses();
@@ -114,11 +140,12 @@ describe('MonitoringService', () => {
// Assert
expect(statuses).toEqual(
expect.arrayContaining([
{ name: 'flyer-processing', counts: { active: 1, failed: 2 } },
{ name: 'email-sending', counts: { completed: 10, waiting: 5 } },
{ name: 'analytics-reporting', counts: {} },
{ name: 'file-cleanup', counts: {} },
{ name: 'weekly-analytics-reporting', counts: {} },
{ name: 'flyer-processing', counts: { ...defaultCounts, active: 1, failed: 2 } },
{ name: 'email-sending', counts: { ...defaultCounts, completed: 10, waiting: 5 } },
{ name: 'analytics-reporting', counts: defaultCounts },
{ name: 'file-cleanup', counts: defaultCounts },
{ name: 'weekly-analytics-reporting', counts: defaultCounts },
{ name: 'token-cleanup', counts: defaultCounts },
]),
);
expect(mocks.flyerQueue.getJobCounts).toHaveBeenCalledTimes(1);

View File

@@ -5,13 +5,15 @@ import {
analyticsQueue,
cleanupQueue,
weeklyAnalyticsQueue,
} from './queueService.server';
tokenCleanupQueue,
} from './queues.server';
import {
analyticsWorker,
cleanupWorker,
emailWorker,
flyerWorker,
weeklyAnalyticsWorker,
tokenCleanupWorker,
flyerProcessingService,
} from './workers.server';
import type { Queue } from 'bullmq';
@@ -35,6 +37,7 @@ class MonitoringService {
analyticsWorker,
cleanupWorker,
weeklyAnalyticsWorker,
tokenCleanupWorker,
];
return Promise.all(
workers.map(async (worker) => ({
@@ -49,7 +52,14 @@ class MonitoringService {
* @returns A promise that resolves to an array of queue statuses.
*/
async getQueueStatuses() {
const queues = [flyerQueue, emailQueue, analyticsQueue, cleanupQueue, weeklyAnalyticsQueue];
const queues = [
flyerQueue,
emailQueue,
analyticsQueue,
cleanupQueue,
weeklyAnalyticsQueue,
tokenCleanupQueue,
];
return Promise.all(
queues.map(async (queue) => ({
name: queue.name,
@@ -77,7 +87,8 @@ class MonitoringService {
'email-sending': emailQueue,
'analytics-reporting': analyticsQueue,
'file-cleanup': cleanupQueue,
'weekly-analytics-reporting': weeklyAnalyticsQueue, // This was a duplicate, fixed.
'weekly-analytics-reporting': weeklyAnalyticsQueue,
'token-cleanup': tokenCleanupQueue,
};
const queue = queueMap[queueName];

View File

@@ -56,22 +56,58 @@ vi.mock('bullmq', () => ({
UnrecoverableError: class UnrecoverableError extends Error {},
}));
vi.mock('./logger.server', () => ({
logger: {
vi.mock('./logger.server', () => {
// Mock logger factory that returns a new mock logger instance
const createMockLogger = () => ({
info: vi.fn(),
error: vi.fn(),
warn: vi.fn(), // This was a duplicate, fixed.
warn: vi.fn(),
debug: vi.fn(),
child: vi.fn().mockReturnThis(),
trace: vi.fn(),
fatal: vi.fn(),
});
return {
logger: {
info: vi.fn(),
error: vi.fn(),
warn: vi.fn(),
debug: vi.fn(),
child: vi.fn().mockReturnThis(),
},
// createScopedLogger is used by aiService.server and other services
createScopedLogger: vi.fn(() => createMockLogger()),
};
});
// Mock the config/env module to prevent env parsing during tests
vi.mock('../config/env', () => ({
config: {
database: { host: 'localhost', port: 5432, user: 'test', password: 'test', name: 'test' },
redis: { url: 'redis://localhost:6379' },
auth: { jwtSecret: 'test-secret' },
server: { port: 3000, host: 'localhost' },
},
isAiConfigured: vi.fn().mockReturnValue(false),
parseConfig: vi.fn(),
}));
// Mock other dependencies that are not the focus of this test file.
vi.mock('./aiService.server');
vi.mock('./emailService.server');
vi.mock('./db/index.db'); // This was a duplicate, fixed.
vi.mock('./db/index.db');
vi.mock('./db/connection.db');
vi.mock('./flyerProcessingService.server');
vi.mock('./flyerDataTransformer');
vi.mock('./flyerAiProcessor.server');
vi.mock('./flyerPersistenceService.server');
vi.mock('./flyerFileHandler.server');
vi.mock('./analyticsService.server');
vi.mock('./userService');
vi.mock('./receiptService.server');
vi.mock('./expiryService.server');
vi.mock('./barcodeService.server');
describe('Worker Service Lifecycle', () => {
let gracefulShutdown: (signal: string) => Promise<void>; // This was a duplicate, fixed.
@@ -229,9 +265,7 @@ describe('Worker Service Lifecycle', () => {
expect(mockRedisConnection.quit).toHaveBeenCalledTimes(1);
// Check for the correct success log message from workers.server.ts
expect(mockLogger.info).toHaveBeenCalledWith(
'[Shutdown] All resources closed successfully.',
);
expect(mockLogger.info).toHaveBeenCalledWith('[Shutdown] All resources closed successfully.');
expect(processExitSpy).toHaveBeenCalledWith(0);
});

View File

@@ -8,6 +8,9 @@ import {
weeklyAnalyticsQueue,
cleanupQueue,
tokenCleanupQueue,
receiptQueue,
expiryAlertQueue,
barcodeQueue,
} from './queues.server';
// Re-export everything for backward compatibility where possible
@@ -33,6 +36,9 @@ export const gracefulShutdown = async (signal: string) => {
{ name: 'cleanupQueue', close: () => cleanupQueue.close() },
{ name: 'weeklyAnalyticsQueue', close: () => weeklyAnalyticsQueue.close() },
{ name: 'tokenCleanupQueue', close: () => tokenCleanupQueue.close() },
{ name: 'receiptQueue', close: () => receiptQueue.close() },
{ name: 'expiryAlertQueue', close: () => expiryAlertQueue.close() },
{ name: 'barcodeQueue', close: () => barcodeQueue.close() },
{ name: 'redisConnection', close: () => connection.quit() },
];

View File

@@ -16,6 +16,9 @@ const mocks = vi.hoisted(() => {
weeklyAnalyticsQueue: createMockQueue('weekly-analytics-reporting'),
cleanupQueue: createMockQueue('file-cleanup'),
tokenCleanupQueue: createMockQueue('token-cleanup'),
receiptQueue: createMockQueue('receipt-processing'),
expiryAlertQueue: createMockQueue('expiry-alerts'),
barcodeQueue: createMockQueue('barcode-detection'),
redisConnection: {
quit: vi.fn().mockResolvedValue('OK'),
},
@@ -36,6 +39,9 @@ vi.mock('./queues.server', () => ({
weeklyAnalyticsQueue: mocks.weeklyAnalyticsQueue,
cleanupQueue: mocks.cleanupQueue,
tokenCleanupQueue: mocks.tokenCleanupQueue,
receiptQueue: mocks.receiptQueue,
expiryAlertQueue: mocks.expiryAlertQueue,
barcodeQueue: mocks.barcodeQueue,
}));
vi.mock('./redis.server', () => ({
@@ -76,6 +82,9 @@ describe('Queue Service (API Shutdown)', () => {
expect(mocks.cleanupQueue.close).toHaveBeenCalledTimes(1);
expect(mocks.weeklyAnalyticsQueue.close).toHaveBeenCalledTimes(1);
expect(mocks.tokenCleanupQueue.close).toHaveBeenCalledTimes(1);
expect(mocks.receiptQueue.close).toHaveBeenCalledTimes(1);
expect(mocks.expiryAlertQueue.close).toHaveBeenCalledTimes(1);
expect(mocks.barcodeQueue.close).toHaveBeenCalledTimes(1);
expect(mocks.redisConnection.quit).toHaveBeenCalledTimes(1);
});
@@ -98,7 +107,9 @@ describe('Queue Service (API Shutdown)', () => {
{ err: closeError, resource: 'emailQueue' },
'[Shutdown] Error closing resource.',
);
expect(mocks.logger.warn).toHaveBeenCalledWith('[Shutdown] Graceful shutdown completed with errors.');
expect(mocks.logger.warn).toHaveBeenCalledWith(
'[Shutdown] Graceful shutdown completed with errors.',
);
expect(processExitSpy).toHaveBeenCalledWith(1);
});
@@ -112,7 +123,9 @@ describe('Queue Service (API Shutdown)', () => {
{ err: redisError, resource: 'redisConnection' },
'[Shutdown] Error closing resource.',
);
expect(mocks.logger.warn).toHaveBeenCalledWith('[Shutdown] Graceful shutdown completed with errors.');
expect(mocks.logger.warn).toHaveBeenCalledWith(
'[Shutdown] Graceful shutdown completed with errors.',
);
expect(processExitSpy).toHaveBeenCalledWith(1);
});
});
});

View File

@@ -112,8 +112,50 @@ describe('Queue Definitions', () => {
});
});
it('should create exactly 6 queues', () => {
it('should create receiptQueue with the correct name and options', () => {
expect(mocks.MockQueue).toHaveBeenCalledWith('receipt-processing', {
connection: mocks.mockConnection,
defaultJobOptions: {
attempts: 3,
backoff: {
type: 'exponential',
delay: 10000,
},
removeOnComplete: 100,
removeOnFail: 50,
},
});
});
it('should create expiryAlertQueue with the correct name and options', () => {
expect(mocks.MockQueue).toHaveBeenCalledWith('expiry-alerts', {
connection: mocks.mockConnection,
defaultJobOptions: {
attempts: 2,
backoff: { type: 'exponential', delay: 300000 },
removeOnComplete: true,
removeOnFail: 20,
},
});
});
it('should create barcodeQueue with the correct name and options', () => {
expect(mocks.MockQueue).toHaveBeenCalledWith('barcode-detection', {
connection: mocks.mockConnection,
defaultJobOptions: {
attempts: 2,
backoff: {
type: 'exponential',
delay: 5000,
},
removeOnComplete: 50,
removeOnFail: 20,
},
});
});
it('should create exactly 9 queues', () => {
// This is a good sanity check to ensure no new queues were added without tests.
expect(mocks.MockQueue).toHaveBeenCalledTimes(6);
expect(mocks.MockQueue).toHaveBeenCalledTimes(9);
});
});

View File

@@ -7,6 +7,9 @@ import type {
WeeklyAnalyticsJobData,
CleanupJobData,
TokenCleanupJobData,
ReceiptJobData,
ExpiryAlertJobData,
BarcodeDetectionJobData,
} from '../types/job-data';
// --- Queues ---
@@ -46,15 +49,18 @@ export const analyticsQueue = new Queue<AnalyticsJobData>('analytics-reporting',
},
});
export const weeklyAnalyticsQueue = new Queue<WeeklyAnalyticsJobData>('weekly-analytics-reporting', {
connection,
defaultJobOptions: {
attempts: 2,
backoff: { type: 'exponential', delay: 3600000 },
removeOnComplete: true,
removeOnFail: 50,
export const weeklyAnalyticsQueue = new Queue<WeeklyAnalyticsJobData>(
'weekly-analytics-reporting',
{
connection,
defaultJobOptions: {
attempts: 2,
backoff: { type: 'exponential', delay: 3600000 },
removeOnComplete: true,
removeOnFail: 50,
},
},
});
);
export const cleanupQueue = new Queue<CleanupJobData>('file-cleanup', {
connection,
@@ -73,4 +79,43 @@ export const tokenCleanupQueue = new Queue<TokenCleanupJobData>('token-cleanup',
removeOnComplete: true,
removeOnFail: 10,
},
});
});
// --- Receipt Processing Queue ---
export const receiptQueue = new Queue<ReceiptJobData>('receipt-processing', {
connection,
defaultJobOptions: {
attempts: 3,
backoff: {
type: 'exponential',
delay: 10000, // 10 seconds initial delay
},
removeOnComplete: 100, // Keep last 100 completed jobs
removeOnFail: 50,
},
});
// --- Expiry Alert Queue ---
export const expiryAlertQueue = new Queue<ExpiryAlertJobData>('expiry-alerts', {
connection,
defaultJobOptions: {
attempts: 2,
backoff: { type: 'exponential', delay: 300000 }, // 5 minutes
removeOnComplete: true,
removeOnFail: 20,
},
});
// --- Barcode Detection Queue ---
export const barcodeQueue = new Queue<BarcodeDetectionJobData>('barcode-detection', {
connection,
defaultJobOptions: {
attempts: 2,
backoff: {
type: 'exponential',
delay: 5000,
},
removeOnComplete: 50,
removeOnFail: 20,
},
});

View File

@@ -0,0 +1,791 @@
// src/services/receiptService.server.test.ts
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
import type { Logger } from 'pino';
import type { Job } from 'bullmq';
import type { ReceiptJobData } from '../types/job-data';
import { createMockLogger } from '../tests/utils/mockLogger';
import type {
ReceiptStatus,
ReceiptItemStatus,
ReceiptProcessingStep,
ReceiptProcessingStatus,
OcrProvider,
ReceiptProcessingLogRecord,
} from '../types/expiry';
// Mock dependencies
vi.mock('./db/index.db', () => ({
receiptRepo: {
createReceipt: vi.fn(),
getReceiptById: vi.fn(),
getReceipts: vi.fn(),
updateReceipt: vi.fn(),
deleteReceipt: vi.fn(),
logProcessingStep: vi.fn(),
detectStoreFromText: vi.fn(),
addReceiptItems: vi.fn(),
incrementRetryCount: vi.fn(),
getReceiptItems: vi.fn(),
updateReceiptItem: vi.fn(),
getUnaddedReceiptItems: vi.fn(),
getProcessingLogs: vi.fn(),
getProcessingStats: vi.fn(),
getReceiptsNeedingProcessing: vi.fn(),
addStorePattern: vi.fn(),
getActiveStorePatterns: vi.fn(),
},
}));
vi.mock('../config/env', () => ({
isAiConfigured: false,
config: {
gemini: {
apiKey: undefined,
},
},
}));
vi.mock('./aiService.server', () => ({
aiService: {
extractItemsFromReceiptImage: vi.fn(),
},
}));
vi.mock('./logger.server', () => ({
logger: {
child: vi.fn().mockReturnThis(),
debug: vi.fn(),
info: vi.fn(),
warn: vi.fn(),
error: vi.fn(),
},
}));
vi.mock('node:fs/promises', () => ({
default: {
access: vi.fn(),
},
}));
// Import after mocks are set up
import {
createReceipt,
getReceiptById,
getReceipts,
deleteReceipt,
processReceipt,
getReceiptItems,
updateReceiptItem,
getUnaddedItems,
getProcessingLogs,
getProcessingStats,
getReceiptsNeedingProcessing,
addStorePattern,
getActiveStorePatterns,
processReceiptJob,
} from './receiptService.server';
import { receiptRepo } from './db/index.db';
// Helper to create mock processing log record
function createMockProcessingLogRecord(
overrides: Partial<ReceiptProcessingLogRecord> = {},
): ReceiptProcessingLogRecord {
return {
log_id: 1,
receipt_id: 1,
processing_step: 'upload' as ReceiptProcessingStep,
status: 'completed' as ReceiptProcessingStatus,
provider: null,
duration_ms: null,
tokens_used: null,
cost_cents: null,
input_data: null,
output_data: null,
error_message: null,
created_at: new Date().toISOString(),
...overrides,
};
}
// Helper to create mock store pattern row
interface StoreReceiptPatternRow {
pattern_id: number;
store_id: number;
pattern_type: string;
pattern_value: string;
priority: number;
is_active: boolean;
created_at: string;
updated_at: string;
}
function createMockStorePatternRow(
overrides: Partial<StoreReceiptPatternRow> = {},
): StoreReceiptPatternRow {
return {
pattern_id: 1,
store_id: 1,
pattern_type: 'name',
pattern_value: 'WALMART',
priority: 0,
is_active: true,
created_at: new Date().toISOString(),
updated_at: new Date().toISOString(),
...overrides,
};
}
describe('receiptService.server', () => {
let mockLogger: Logger;
beforeEach(() => {
vi.clearAllMocks();
mockLogger = createMockLogger();
});
afterEach(() => {
vi.resetAllMocks();
});
describe('createReceipt', () => {
it('should create a new receipt and log upload step', async () => {
const mockReceipt = {
receipt_id: 1,
user_id: 'user-1',
store_id: null,
receipt_image_url: '/uploads/receipt.jpg',
transaction_date: null,
total_amount_cents: null,
status: 'pending' as ReceiptStatus,
raw_text: null,
store_confidence: null,
ocr_provider: null,
error_details: null,
retry_count: 0,
ocr_confidence: null,
currency: 'USD',
created_at: new Date().toISOString(),
processed_at: null,
updated_at: new Date().toISOString(),
};
vi.mocked(receiptRepo.createReceipt).mockResolvedValueOnce(mockReceipt);
vi.mocked(receiptRepo.logProcessingStep).mockResolvedValueOnce(
createMockProcessingLogRecord(),
);
const result = await createReceipt('user-1', '/uploads/receipt.jpg', mockLogger);
expect(result.receipt_id).toBe(1);
expect(receiptRepo.createReceipt).toHaveBeenCalledWith(
{
user_id: 'user-1',
receipt_image_url: '/uploads/receipt.jpg',
store_id: undefined,
transaction_date: undefined,
},
mockLogger,
);
expect(receiptRepo.logProcessingStep).toHaveBeenCalledWith(
1,
'upload',
'completed',
mockLogger,
expect.any(Object),
);
});
it('should create receipt with optional store ID and transaction date', async () => {
const mockReceipt = {
receipt_id: 2,
user_id: 'user-1',
store_id: 5,
receipt_image_url: '/uploads/receipt2.jpg',
transaction_date: '2024-01-15',
total_amount_cents: null,
status: 'pending' as ReceiptStatus,
raw_text: null,
store_confidence: null,
ocr_provider: null,
error_details: null,
retry_count: 0,
ocr_confidence: null,
currency: 'USD',
created_at: new Date().toISOString(),
processed_at: null,
updated_at: new Date().toISOString(),
};
vi.mocked(receiptRepo.createReceipt).mockResolvedValueOnce(mockReceipt);
vi.mocked(receiptRepo.logProcessingStep).mockResolvedValueOnce(
createMockProcessingLogRecord(),
);
const result = await createReceipt('user-1', '/uploads/receipt2.jpg', mockLogger, {
storeId: 5,
transactionDate: '2024-01-15',
});
expect(result.store_id).toBe(5);
expect(result.transaction_date).toBe('2024-01-15');
});
});
describe('getReceiptById', () => {
it('should return receipt by ID', async () => {
const mockReceipt = {
receipt_id: 1,
user_id: 'user-1',
store_id: null,
receipt_image_url: '/uploads/receipt.jpg',
transaction_date: null,
total_amount_cents: null,
status: 'pending' as ReceiptStatus,
raw_text: null,
store_confidence: null,
ocr_provider: null,
error_details: null,
retry_count: 0,
ocr_confidence: null,
currency: 'USD',
created_at: new Date().toISOString(),
processed_at: null,
updated_at: new Date().toISOString(),
};
vi.mocked(receiptRepo.getReceiptById).mockResolvedValueOnce(mockReceipt);
const result = await getReceiptById(1, 'user-1', mockLogger);
expect(result.receipt_id).toBe(1);
expect(receiptRepo.getReceiptById).toHaveBeenCalledWith(1, 'user-1', mockLogger);
});
});
describe('getReceipts', () => {
it('should return paginated receipts for user', async () => {
const mockReceipts = {
receipts: [
{
receipt_id: 1,
user_id: 'user-1',
store_id: null,
receipt_image_url: '/uploads/receipt1.jpg',
transaction_date: null,
total_amount_cents: null,
status: 'completed' as ReceiptStatus,
raw_text: null,
store_confidence: null,
ocr_provider: null,
error_details: null,
retry_count: 0,
ocr_confidence: null,
currency: 'USD',
created_at: new Date().toISOString(),
processed_at: new Date().toISOString(),
updated_at: new Date().toISOString(),
},
],
total: 1,
};
vi.mocked(receiptRepo.getReceipts).mockResolvedValueOnce(mockReceipts);
const result = await getReceipts({ user_id: 'user-1', limit: 10, offset: 0 }, mockLogger);
expect(result.receipts).toHaveLength(1);
expect(result.total).toBe(1);
});
it('should filter by status', async () => {
vi.mocked(receiptRepo.getReceipts).mockResolvedValueOnce({ receipts: [], total: 0 });
await getReceipts({ user_id: 'user-1', status: 'completed' }, mockLogger);
expect(receiptRepo.getReceipts).toHaveBeenCalledWith(
{ user_id: 'user-1', status: 'completed' },
mockLogger,
);
});
});
describe('deleteReceipt', () => {
it('should delete receipt', async () => {
vi.mocked(receiptRepo.deleteReceipt).mockResolvedValueOnce(undefined);
await deleteReceipt(1, 'user-1', mockLogger);
expect(receiptRepo.deleteReceipt).toHaveBeenCalledWith(1, 'user-1', mockLogger);
});
});
describe('processReceipt', () => {
it('should process receipt and return items when AI not configured', async () => {
const mockReceipt = {
receipt_id: 1,
user_id: 'user-1',
store_id: null,
receipt_image_url: '/uploads/receipt.jpg',
transaction_date: null,
total_amount_cents: null,
status: 'pending' as ReceiptStatus,
raw_text: null,
store_confidence: null,
ocr_provider: null,
error_details: null,
retry_count: 0,
ocr_confidence: null,
currency: 'USD',
created_at: new Date().toISOString(),
processed_at: null,
updated_at: new Date().toISOString(),
};
const mockUpdatedReceipt = { ...mockReceipt, status: 'processing' };
const mockCompletedReceipt = { ...mockReceipt, status: 'completed' };
vi.mocked(receiptRepo.updateReceipt)
.mockResolvedValueOnce(mockUpdatedReceipt as any) // status: processing
.mockResolvedValueOnce({ ...mockUpdatedReceipt, raw_text: '[AI not configured]' } as any) // raw_text update
.mockResolvedValueOnce(mockCompletedReceipt as any); // status: completed
vi.mocked(receiptRepo.logProcessingStep).mockResolvedValue(createMockProcessingLogRecord());
vi.mocked(receiptRepo.detectStoreFromText).mockResolvedValueOnce(null);
vi.mocked(receiptRepo.addReceiptItems).mockResolvedValueOnce([]);
const result = await processReceipt(1, mockLogger);
expect(result.receipt.status).toBe('completed');
expect(receiptRepo.updateReceipt).toHaveBeenCalledWith(
1,
{ status: 'processing' },
expect.any(Object),
);
});
it('should detect store from receipt text', async () => {
const mockReceipt = {
receipt_id: 2,
user_id: 'user-1',
store_id: null,
receipt_image_url: '/uploads/receipt.jpg',
transaction_date: null,
total_amount_cents: null,
status: 'pending' as ReceiptStatus,
raw_text: null,
store_confidence: null,
ocr_provider: null,
error_details: null,
retry_count: 0,
ocr_confidence: null,
currency: 'USD',
created_at: new Date().toISOString(),
processed_at: null,
updated_at: new Date().toISOString(),
};
vi.mocked(receiptRepo.updateReceipt).mockResolvedValue({
...mockReceipt,
status: 'completed' as ReceiptStatus,
} as any);
vi.mocked(receiptRepo.logProcessingStep).mockResolvedValue(createMockProcessingLogRecord());
vi.mocked(receiptRepo.detectStoreFromText).mockResolvedValueOnce({
store_id: 10,
confidence: 0.9,
});
vi.mocked(receiptRepo.addReceiptItems).mockResolvedValueOnce([]);
await processReceipt(2, mockLogger);
expect(receiptRepo.updateReceipt).toHaveBeenCalledWith(
2,
expect.objectContaining({ store_id: 10, store_confidence: 0.9 }),
expect.any(Object),
);
});
it('should handle processing errors', async () => {
vi.mocked(receiptRepo.updateReceipt).mockRejectedValueOnce(new Error('DB error'));
vi.mocked(receiptRepo.incrementRetryCount).mockResolvedValueOnce(1);
vi.mocked(receiptRepo.logProcessingStep).mockResolvedValue(createMockProcessingLogRecord());
await expect(processReceipt(1, mockLogger)).rejects.toThrow('DB error');
expect(receiptRepo.incrementRetryCount).toHaveBeenCalledWith(1, expect.any(Object));
});
});
describe('getReceiptItems', () => {
it('should return receipt items', async () => {
const mockItems = [
{
receipt_item_id: 1,
receipt_id: 1,
raw_item_description: 'MILK 2%',
quantity: 1,
price_paid_cents: 399,
master_item_id: null,
product_id: null,
status: 'unmatched' as ReceiptItemStatus,
line_number: 1,
match_confidence: null,
is_discount: false,
unit_price_cents: null,
unit_type: null,
added_to_pantry: false,
pantry_item_id: null,
upc_code: null,
created_at: new Date().toISOString(),
updated_at: new Date().toISOString(),
},
];
vi.mocked(receiptRepo.getReceiptItems).mockResolvedValueOnce(mockItems);
const result = await getReceiptItems(1, mockLogger);
expect(result).toHaveLength(1);
expect(result[0].raw_item_description).toBe('MILK 2%');
});
});
describe('updateReceiptItem', () => {
it('should update receipt item', async () => {
const mockUpdatedItem = {
receipt_item_id: 1,
receipt_id: 1,
raw_item_description: 'MILK 2%',
quantity: 2,
price_paid_cents: 399,
master_item_id: 5,
product_id: null,
status: 'matched' as ReceiptItemStatus,
line_number: 1,
match_confidence: 0.95,
is_discount: false,
unit_price_cents: null,
unit_type: null,
added_to_pantry: false,
pantry_item_id: null,
upc_code: null,
created_at: new Date().toISOString(),
updated_at: new Date().toISOString(),
};
vi.mocked(receiptRepo.updateReceiptItem).mockResolvedValueOnce(mockUpdatedItem);
const result = await updateReceiptItem(
1,
{ master_item_id: 5, status: 'matched' as ReceiptItemStatus, match_confidence: 0.95 },
mockLogger,
);
expect(result.quantity).toBe(2);
expect(result.master_item_id).toBe(5);
expect(result.status).toBe('matched');
});
});
describe('getUnaddedItems', () => {
it('should return items not yet added to pantry', async () => {
const mockItems = [
{
receipt_item_id: 1,
receipt_id: 1,
raw_item_description: 'BREAD',
quantity: 1,
price_paid_cents: 299,
master_item_id: null,
product_id: null,
status: 'unmatched' as ReceiptItemStatus,
line_number: 1,
match_confidence: null,
is_discount: false,
unit_price_cents: null,
unit_type: null,
added_to_pantry: false,
pantry_item_id: null,
upc_code: null,
created_at: new Date().toISOString(),
updated_at: new Date().toISOString(),
},
];
vi.mocked(receiptRepo.getUnaddedReceiptItems).mockResolvedValueOnce(mockItems);
const result = await getUnaddedItems(1, mockLogger);
expect(result).toHaveLength(1);
expect(result[0].added_to_pantry).toBe(false);
});
});
describe('getProcessingLogs', () => {
it('should return processing logs for receipt', async () => {
const mockLogs = [
{
log_id: 1,
receipt_id: 1,
processing_step: 'upload' as ReceiptProcessingStep,
status: 'completed' as ReceiptProcessingStatus,
provider: 'internal' as OcrProvider,
duration_ms: 50,
tokens_used: null,
cost_cents: null,
input_data: null,
output_data: null,
error_message: null,
created_at: new Date().toISOString(),
},
];
vi.mocked(receiptRepo.getProcessingLogs).mockResolvedValueOnce(mockLogs);
const result = await getProcessingLogs(1, mockLogger);
expect(result).toHaveLength(1);
expect(result[0].processing_step).toBe('upload');
});
});
describe('getProcessingStats', () => {
it('should return processing statistics', async () => {
const mockStats = {
total_receipts: 100,
completed: 85,
failed: 10,
pending: 5,
avg_processing_time_ms: 2500,
total_cost_cents: 0,
};
vi.mocked(receiptRepo.getProcessingStats).mockResolvedValueOnce(mockStats);
const result = await getProcessingStats(mockLogger);
expect(result.total_receipts).toBe(100);
expect(result.completed).toBe(85);
});
it('should filter by date range', async () => {
const mockStats = {
total_receipts: 20,
completed: 18,
failed: 2,
pending: 0,
avg_processing_time_ms: 2000,
total_cost_cents: 0,
};
vi.mocked(receiptRepo.getProcessingStats).mockResolvedValueOnce(mockStats);
await getProcessingStats(mockLogger, {
fromDate: '2024-01-01',
toDate: '2024-01-31',
});
expect(receiptRepo.getProcessingStats).toHaveBeenCalledWith(mockLogger, {
fromDate: '2024-01-01',
toDate: '2024-01-31',
});
});
});
describe('getReceiptsNeedingProcessing', () => {
it('should return pending receipts for processing', async () => {
const mockReceipts = [
{
receipt_id: 1,
user_id: 'user-1',
store_id: null,
receipt_image_url: '/uploads/receipt.jpg',
transaction_date: null,
total_amount_cents: null,
status: 'pending' as ReceiptStatus,
raw_text: null,
store_confidence: null,
ocr_provider: null,
error_details: null,
retry_count: 0,
ocr_confidence: null,
currency: 'USD',
created_at: new Date().toISOString(),
processed_at: null,
updated_at: new Date().toISOString(),
},
];
vi.mocked(receiptRepo.getReceiptsNeedingProcessing).mockResolvedValueOnce(mockReceipts);
const result = await getReceiptsNeedingProcessing(10, mockLogger);
expect(result).toHaveLength(1);
expect(result[0].status).toBe('pending');
});
});
describe('addStorePattern', () => {
it('should add store pattern', async () => {
vi.mocked(receiptRepo.addStorePattern).mockResolvedValueOnce(createMockStorePatternRow());
await addStorePattern(1, 'name', 'WALMART', mockLogger, { priority: 1 });
expect(receiptRepo.addStorePattern).toHaveBeenCalledWith(1, 'name', 'WALMART', mockLogger, {
priority: 1,
});
});
});
describe('getActiveStorePatterns', () => {
it('should return active store patterns', async () => {
const mockPatterns = [
createMockStorePatternRow({
pattern_id: 1,
store_id: 1,
pattern_type: 'name',
pattern_value: 'WALMART',
}),
];
vi.mocked(receiptRepo.getActiveStorePatterns).mockResolvedValueOnce(mockPatterns);
const result = await getActiveStorePatterns(mockLogger);
expect(result).toHaveLength(1);
});
});
describe('processReceiptJob', () => {
it('should process receipt job successfully', async () => {
const mockReceipt = {
receipt_id: 1,
user_id: 'user-1',
store_id: null,
receipt_image_url: '/uploads/receipt.jpg',
transaction_date: null,
total_amount_cents: null,
status: 'pending' as ReceiptStatus,
raw_text: null,
store_confidence: null,
ocr_provider: null,
error_details: null,
retry_count: 0,
ocr_confidence: null,
currency: 'USD',
created_at: new Date().toISOString(),
processed_at: null,
updated_at: new Date().toISOString(),
};
vi.mocked(receiptRepo.getReceiptById).mockResolvedValueOnce(mockReceipt);
vi.mocked(receiptRepo.updateReceipt).mockResolvedValue({
...mockReceipt,
status: 'completed' as ReceiptStatus,
} as any);
vi.mocked(receiptRepo.logProcessingStep).mockResolvedValue(createMockProcessingLogRecord());
vi.mocked(receiptRepo.detectStoreFromText).mockResolvedValueOnce(null);
vi.mocked(receiptRepo.addReceiptItems).mockResolvedValueOnce([]);
const mockJob = {
id: 'job-1',
data: {
receiptId: 1,
userId: 'user-1',
meta: { requestId: 'req-1' },
},
attemptsMade: 0,
} as Job<ReceiptJobData>;
const result = await processReceiptJob(mockJob, mockLogger);
expect(result.success).toBe(true);
expect(result.receiptId).toBe(1);
});
it('should skip already completed receipts', async () => {
const mockReceipt = {
receipt_id: 1,
user_id: 'user-1',
store_id: null,
receipt_image_url: '/uploads/receipt.jpg',
transaction_date: null,
total_amount_cents: null,
status: 'completed' as ReceiptStatus,
raw_text: 'Previous text',
store_confidence: null,
ocr_provider: null,
error_details: null,
retry_count: 0,
ocr_confidence: null,
currency: 'USD',
created_at: new Date().toISOString(),
processed_at: new Date().toISOString(),
updated_at: new Date().toISOString(),
};
vi.mocked(receiptRepo.getReceiptById).mockResolvedValueOnce(mockReceipt);
const mockJob = {
id: 'job-2',
data: {
receiptId: 1,
userId: 'user-1',
},
attemptsMade: 0,
} as Job<ReceiptJobData>;
const result = await processReceiptJob(mockJob, mockLogger);
expect(result.success).toBe(true);
expect(result.itemsFound).toBe(0);
expect(receiptRepo.updateReceipt).not.toHaveBeenCalled();
});
it('should handle job processing errors', async () => {
const mockReceipt = {
receipt_id: 1,
user_id: 'user-1',
store_id: null,
receipt_image_url: '/uploads/receipt.jpg',
transaction_date: null,
total_amount_cents: null,
status: 'pending' as ReceiptStatus,
raw_text: null,
store_confidence: null,
ocr_provider: null,
error_details: null,
retry_count: 0,
ocr_confidence: null,
currency: 'USD',
created_at: new Date().toISOString(),
processed_at: null,
updated_at: new Date().toISOString(),
};
vi.mocked(receiptRepo.getReceiptById).mockResolvedValueOnce(mockReceipt);
vi.mocked(receiptRepo.updateReceipt)
.mockRejectedValueOnce(new Error('Processing failed'))
.mockResolvedValueOnce({ ...mockReceipt, status: 'failed' } as any);
vi.mocked(receiptRepo.incrementRetryCount).mockResolvedValueOnce(1);
vi.mocked(receiptRepo.logProcessingStep).mockResolvedValue(createMockProcessingLogRecord());
const mockJob = {
id: 'job-3',
data: {
receiptId: 1,
userId: 'user-1',
},
attemptsMade: 1,
} as Job<ReceiptJobData>;
await expect(processReceiptJob(mockJob, mockLogger)).rejects.toThrow('Processing failed');
expect(receiptRepo.updateReceipt).toHaveBeenCalledWith(
1,
expect.objectContaining({ status: 'failed' }),
expect.any(Object),
);
});
});
});

View File

@@ -0,0 +1,843 @@
// src/services/receiptService.server.ts
/**
* @file Receipt Scanning Service
* Handles receipt image processing, OCR extraction, item parsing, and store detection.
* Integrates with expiry tracking for adding scanned items to inventory.
*/
import type { Logger } from 'pino';
import { receiptRepo } from './db/index.db';
import type {
OcrProvider,
ReceiptScan,
ReceiptItem,
ReceiptProcessingLogRecord,
} from '../types/expiry';
import type { UpdateReceiptItemRequest, ReceiptQueryOptions } from './db/receipt.db';
/**
* Maximum number of retry attempts for failed receipt processing
*/
const MAX_RETRY_ATTEMPTS = 3;
/**
* Default OCR provider to use
*/
const DEFAULT_OCR_PROVIDER: OcrProvider = 'internal';
// ============================================================================
// RECEIPT MANAGEMENT
// ============================================================================
/**
* Creates a new receipt record for processing.
* @param userId The user's ID
* @param imageUrl URL or path to the receipt image
* @param logger Pino logger instance
* @param options Optional store ID and transaction date if known
* @returns The created receipt record
*/
export const createReceipt = async (
userId: string,
imageUrl: string,
logger: Logger,
options: { storeId?: number; transactionDate?: string } = {},
): Promise<ReceiptScan> => {
logger.info({ userId, imageUrl }, 'Creating new receipt for processing');
const receipt = await receiptRepo.createReceipt(
{
user_id: userId,
receipt_image_url: imageUrl,
store_id: options.storeId,
transaction_date: options.transactionDate,
},
logger,
);
// Log the upload step
await receiptRepo.logProcessingStep(receipt.receipt_id, 'upload', 'completed', logger, {
provider: DEFAULT_OCR_PROVIDER,
});
return receipt;
};
/**
* Gets a receipt by ID.
* @param receiptId The receipt ID
* @param userId The user's ID (for authorization)
* @param logger Pino logger instance
* @returns The receipt record
*/
export const getReceiptById = async (
receiptId: number,
userId: string,
logger: Logger,
): Promise<ReceiptScan> => {
return receiptRepo.getReceiptById(receiptId, userId, logger);
};
/**
* Gets receipts for a user with optional filtering.
* @param options Query options
* @param logger Pino logger instance
* @returns Paginated receipts
*/
export const getReceipts = async (
options: ReceiptQueryOptions,
logger: Logger,
): Promise<{ receipts: ReceiptScan[]; total: number }> => {
logger.debug({ userId: options.user_id }, 'Fetching receipts');
return receiptRepo.getReceipts(options, logger);
};
/**
* Deletes a receipt and all associated data.
* @param receiptId The receipt ID
* @param userId The user's ID (for authorization)
* @param logger Pino logger instance
*/
export const deleteReceipt = async (
receiptId: number,
userId: string,
logger: Logger,
): Promise<void> => {
logger.info({ receiptId, userId }, 'Deleting receipt');
await receiptRepo.deleteReceipt(receiptId, userId, logger);
};
// ============================================================================
// RECEIPT PROCESSING
// ============================================================================
/**
* Processes a receipt through OCR and item extraction.
* This is the main entry point for receipt processing, typically called by a worker.
* @param receiptId The receipt ID to process
* @param logger Pino logger instance
* @returns The processed receipt with extracted items
*/
export const processReceipt = async (
receiptId: number,
logger: Logger,
): Promise<{ receipt: ReceiptScan; items: ReceiptItem[] }> => {
const processLogger = logger.child({ receiptId });
processLogger.info('Starting receipt processing');
const startTime = Date.now();
try {
// Update status to processing
let receipt = await receiptRepo.updateReceipt(
receiptId,
{ status: 'processing' },
processLogger,
);
// Step 1: OCR Extraction
processLogger.debug('Starting OCR extraction');
const ocrResult = await performOcrExtraction(receipt.receipt_image_url, processLogger);
await receiptRepo.logProcessingStep(receiptId, 'ocr_extraction', 'completed', processLogger, {
provider: ocrResult.provider,
durationMs: ocrResult.durationMs,
outputData: { textLength: ocrResult.text.length, confidence: ocrResult.confidence },
});
// Update receipt with OCR results
receipt = await receiptRepo.updateReceipt(
receiptId,
{
raw_text: ocrResult.text,
ocr_provider: ocrResult.provider,
ocr_confidence: ocrResult.confidence,
},
processLogger,
);
// Step 2: Store Detection (if not already set)
if (!receipt.store_id) {
processLogger.debug('Attempting store detection');
const storeDetection = await receiptRepo.detectStoreFromText(ocrResult.text, processLogger);
if (storeDetection) {
receipt = await receiptRepo.updateReceipt(
receiptId,
{
store_id: storeDetection.store_id,
store_confidence: storeDetection.confidence,
},
processLogger,
);
await receiptRepo.logProcessingStep(
receiptId,
'store_detection',
'completed',
processLogger,
{
outputData: { storeId: storeDetection.store_id, confidence: storeDetection.confidence },
},
);
} else {
await receiptRepo.logProcessingStep(
receiptId,
'store_detection',
'completed',
processLogger,
{
outputData: { storeId: null, message: 'No store match found' },
},
);
}
}
// Step 3: Parse receipt text and extract items
// If AI extracted items directly, use those; otherwise fall back to text parsing
processLogger.debug('Starting text parsing and item extraction');
const parseStartTime = Date.now();
let itemsToAdd: Array<{
receipt_id: number;
raw_item_description: string;
quantity: number;
price_paid_cents: number;
line_number: number;
is_discount: boolean;
unit_price_cents?: number;
unit_type?: string;
}>;
if (ocrResult.extractedItems && ocrResult.extractedItems.length > 0) {
// Use AI-extracted items directly (more accurate)
processLogger.info(
{ itemCount: ocrResult.extractedItems.length },
'Using AI-extracted items directly',
);
itemsToAdd = ocrResult.extractedItems.map((item, index) => ({
receipt_id: receiptId,
raw_item_description: item.raw_item_description,
quantity: 1, // AI doesn't extract quantity separately yet
price_paid_cents: item.price_paid_cents,
line_number: index + 1,
is_discount: item.price_paid_cents < 0,
}));
} else {
// Fall back to text parsing
const extractedItems = await parseReceiptText(ocrResult.text, processLogger);
itemsToAdd = extractedItems.map((item) => ({
receipt_id: receiptId,
raw_item_description: item.description,
quantity: item.quantity,
price_paid_cents: item.priceCents,
line_number: item.lineNumber,
is_discount: item.isDiscount,
unit_price_cents: item.unitPriceCents,
unit_type: item.unitType,
}));
}
await receiptRepo.logProcessingStep(receiptId, 'text_parsing', 'completed', processLogger, {
durationMs: Date.now() - parseStartTime,
outputData: { itemCount: itemsToAdd.length, usedAiExtraction: !!ocrResult.extractedItems },
});
// Step 4: Add extracted items to database
const items = await receiptRepo.addReceiptItems(itemsToAdd, processLogger);
await receiptRepo.logProcessingStep(receiptId, 'item_extraction', 'completed', processLogger, {
outputData: { itemsAdded: items.length },
});
// Step 5: Extract total and transaction date
const receiptMetadata = extractReceiptMetadata(ocrResult.text, processLogger);
if (receiptMetadata.totalCents || receiptMetadata.transactionDate) {
receipt = await receiptRepo.updateReceipt(
receiptId,
{
total_amount_cents: receiptMetadata.totalCents,
transaction_date: receiptMetadata.transactionDate,
},
processLogger,
);
}
// Step 6: Mark as completed
receipt = await receiptRepo.updateReceipt(
receiptId,
{
status: 'completed',
processed_at: new Date().toISOString(),
},
processLogger,
);
await receiptRepo.logProcessingStep(receiptId, 'finalization', 'completed', processLogger, {
durationMs: Date.now() - startTime,
outputData: { totalItems: items.length, status: 'completed' },
});
processLogger.info(
{ receiptId, itemCount: items.length, durationMs: Date.now() - startTime },
'Receipt processing completed successfully',
);
return { receipt, items };
} catch (error) {
const err = error instanceof Error ? error : new Error(String(error));
processLogger.error({ err, receiptId }, 'Receipt processing failed');
// Increment retry count and update status
const _retryCount = await receiptRepo.incrementRetryCount(receiptId, processLogger);
await receiptRepo.updateReceipt(
receiptId,
{
status: 'failed',
error_details: {
message: err.message,
stack: err.stack,
timestamp: new Date().toISOString(),
},
},
processLogger,
);
await receiptRepo.logProcessingStep(receiptId, 'finalization', 'failed', processLogger, {
durationMs: Date.now() - startTime,
errorMessage: err.message,
});
throw err;
}
};
/**
* Performs OCR extraction on a receipt image using Gemini Vision API.
* Falls back to basic text extraction if AI is not configured.
* @param imageUrl URL or path to the receipt image
* @param logger Pino logger instance
* @returns OCR extraction result
*/
const performOcrExtraction = async (
imageUrl: string,
logger: Logger,
): Promise<{
text: string;
provider: OcrProvider;
confidence: number;
durationMs: number;
extractedItems?: Array<{ raw_item_description: string; price_paid_cents: number }>;
}> => {
const startTime = Date.now();
// Check if AI services are configured
if (!isAiConfigured) {
logger.warn({ imageUrl }, 'AI not configured - OCR extraction unavailable');
return {
text: '[AI not configured - please set GEMINI_API_KEY]',
provider: 'internal',
confidence: 0,
durationMs: Date.now() - startTime,
};
}
try {
// Determine if imageUrl is a local file path or URL
const isLocalPath = !imageUrl.startsWith('http');
if (!isLocalPath) {
logger.warn({ imageUrl }, 'Remote URLs not yet supported for OCR - use local file path');
return {
text: '[Remote URL OCR not yet implemented - upload file directly]',
provider: 'internal',
confidence: 0,
durationMs: Date.now() - startTime,
};
}
// Determine MIME type from extension
const ext = path.extname(imageUrl).toLowerCase();
const mimeTypeMap: Record<string, string> = {
'.jpg': 'image/jpeg',
'.jpeg': 'image/jpeg',
'.png': 'image/png',
'.gif': 'image/gif',
'.webp': 'image/webp',
};
const mimeType = mimeTypeMap[ext] || 'image/jpeg';
// Verify file exists
try {
await fs.access(imageUrl);
} catch {
logger.error({ imageUrl }, 'Receipt image file not found');
return {
text: '[Receipt image file not found]',
provider: 'internal',
confidence: 0,
durationMs: Date.now() - startTime,
};
}
logger.info({ imageUrl, mimeType }, 'Starting OCR extraction with Gemini Vision');
// Use the AI service to extract items from the receipt
const extractedItems = await aiService.extractItemsFromReceiptImage(imageUrl, mimeType, logger);
if (!extractedItems || extractedItems.length === 0) {
logger.warn({ imageUrl }, 'No items extracted from receipt image');
return {
text: '[No text could be extracted from receipt]',
provider: 'gemini',
confidence: 0.3,
durationMs: Date.now() - startTime,
};
}
// Convert extracted items to text representation for storage
const textLines = extractedItems.map(
(item) => `${item.raw_item_description} - $${(item.price_paid_cents / 100).toFixed(2)}`,
);
const extractedText = textLines.join('\n');
logger.info(
{ imageUrl, itemCount: extractedItems.length, durationMs: Date.now() - startTime },
'OCR extraction completed successfully',
);
return {
text: extractedText,
provider: 'gemini',
confidence: 0.85,
durationMs: Date.now() - startTime,
extractedItems, // Pass along for direct use
};
} catch (error) {
const errorMessage = error instanceof Error ? error.message : String(error);
logger.error({ err: error, imageUrl }, 'OCR extraction failed');
return {
text: `[OCR extraction failed: ${errorMessage}]`,
provider: 'internal',
confidence: 0,
durationMs: Date.now() - startTime,
};
}
};
/**
* Parses receipt text to extract individual line items.
* @param text Raw OCR text from receipt
* @param logger Pino logger instance
* @returns Array of extracted items
*/
const parseReceiptText = async (
text: string,
logger: Logger,
): Promise<
Array<{
description: string;
quantity: number;
priceCents: number;
lineNumber: number;
isDiscount: boolean;
unitPriceCents?: number;
unitType?: string;
}>
> => {
// TODO: Implement actual receipt text parsing
// This would use regex patterns and/or ML to:
// - Identify item lines vs headers/footers
// - Extract item names, quantities, and prices
// - Detect discount/coupon lines
// - Handle multi-line items
logger.debug({ textLength: text.length }, 'Parsing receipt text');
// Common receipt patterns to look for:
// - "ITEM NAME $X.XX"
// - "2 @ $X.XX $Y.YY"
// - "DISCOUNT -$X.XX"
const items: Array<{
description: string;
quantity: number;
priceCents: number;
lineNumber: number;
isDiscount: boolean;
unitPriceCents?: number;
unitType?: string;
}> = [];
// Simple line-by-line parsing as placeholder
const lines = text.split('\n').filter((line) => line.trim());
// Pattern for price at end of line: $X.XX or X.XX
const pricePattern = /\$?(\d+)\.(\d{2})\s*$/;
// Pattern for quantity: "2 @" or "2x" or just a number at start
const quantityPattern = /^(\d+)\s*[@xX]/;
let lineNumber = 0;
for (const line of lines) {
lineNumber++;
const trimmedLine = line.trim();
// Skip empty lines and common receipt headers/footers
if (!trimmedLine || isHeaderOrFooter(trimmedLine)) {
continue;
}
const priceMatch = trimmedLine.match(pricePattern);
if (priceMatch) {
const dollars = parseInt(priceMatch[1], 10);
const cents = parseInt(priceMatch[2], 10);
let priceCents = dollars * 100 + cents;
// Check if it's a discount (negative)
const isDiscount =
trimmedLine.includes('-') || trimmedLine.toLowerCase().includes('discount');
if (isDiscount) {
priceCents = -Math.abs(priceCents);
}
// Extract description (everything before the price)
let description = trimmedLine.replace(pricePattern, '').trim();
let quantity = 1;
// Check for quantity pattern
const quantityMatch = description.match(quantityPattern);
if (quantityMatch) {
quantity = parseInt(quantityMatch[1], 10);
description = description.replace(quantityPattern, '').trim();
}
// Clean up description
description = description.replace(/[-]+\s*$/, '').trim();
if (description) {
items.push({
description,
quantity,
priceCents,
lineNumber,
isDiscount,
});
}
}
}
logger.debug({ extractedCount: items.length }, 'Receipt text parsing complete');
return items;
};
/**
* Checks if a line is likely a header or footer to skip.
*/
const isHeaderOrFooter = (line: string): boolean => {
const lowercaseLine = line.toLowerCase();
const skipPatterns = [
'thank you',
'thanks for',
'visit us',
'total',
'subtotal',
'tax',
'change',
'cash',
'credit',
'debit',
'visa',
'mastercard',
'approved',
'transaction',
'terminal',
'receipt',
'store #',
'date:',
'time:',
'cashier',
];
return skipPatterns.some((pattern) => lowercaseLine.includes(pattern));
};
/**
* Extracts metadata from receipt text (total, date, etc.).
*/
const extractReceiptMetadata = (
text: string,
logger: Logger,
): {
totalCents?: number;
transactionDate?: string;
} => {
const result: { totalCents?: number; transactionDate?: string } = {};
// Look for total amount
const totalPatterns = [
/total[:\s]+\$?(\d+)\.(\d{2})/i,
/grand total[:\s]+\$?(\d+)\.(\d{2})/i,
/amount due[:\s]+\$?(\d+)\.(\d{2})/i,
];
for (const pattern of totalPatterns) {
const match = text.match(pattern);
if (match) {
result.totalCents = parseInt(match[1], 10) * 100 + parseInt(match[2], 10);
break;
}
}
// Look for transaction date
const datePatterns = [
/(\d{1,2})\/(\d{1,2})\/(\d{2,4})/, // MM/DD/YYYY or M/D/YY
/(\d{4})-(\d{2})-(\d{2})/, // YYYY-MM-DD
];
for (const pattern of datePatterns) {
const match = text.match(pattern);
if (match) {
// Try to parse the date
try {
let year: number;
let month: number;
let day: number;
if (match[0].includes('-')) {
// YYYY-MM-DD format
year = parseInt(match[1], 10);
month = parseInt(match[2], 10);
day = parseInt(match[3], 10);
} else {
// MM/DD/YYYY format
month = parseInt(match[1], 10);
day = parseInt(match[2], 10);
year = parseInt(match[3], 10);
if (year < 100) {
year += 2000;
}
}
const date = new Date(year, month - 1, day);
if (!isNaN(date.getTime())) {
result.transactionDate = date.toISOString().split('T')[0];
break;
}
} catch {
// Continue to next pattern
}
}
}
logger.debug({ result }, 'Extracted receipt metadata');
return result;
};
// ============================================================================
// RECEIPT ITEMS
// ============================================================================
/**
* Gets all items for a receipt.
* @param receiptId The receipt ID
* @param logger Pino logger instance
* @returns Array of receipt items
*/
export const getReceiptItems = async (
receiptId: number,
logger: Logger,
): Promise<ReceiptItem[]> => {
return receiptRepo.getReceiptItems(receiptId, logger);
};
/**
* Updates a receipt item (e.g., after manual matching).
* @param receiptItemId The receipt item ID
* @param updates Updates to apply
* @param logger Pino logger instance
* @returns Updated receipt item
*/
export const updateReceiptItem = async (
receiptItemId: number,
updates: UpdateReceiptItemRequest,
logger: Logger,
): Promise<ReceiptItem> => {
return receiptRepo.updateReceiptItem(receiptItemId, updates, logger);
};
/**
* Gets receipt items that haven't been added to inventory.
* @param receiptId The receipt ID
* @param logger Pino logger instance
* @returns Array of unadded items
*/
export const getUnaddedItems = async (
receiptId: number,
logger: Logger,
): Promise<ReceiptItem[]> => {
return receiptRepo.getUnaddedReceiptItems(receiptId, logger);
};
// ============================================================================
// PROCESSING LOGS AND STATS
// ============================================================================
/**
* Gets processing logs for a receipt.
* @param receiptId The receipt ID
* @param logger Pino logger instance
* @returns Array of processing log records
*/
export const getProcessingLogs = async (
receiptId: number,
logger: Logger,
): Promise<ReceiptProcessingLogRecord[]> => {
return receiptRepo.getProcessingLogs(receiptId, logger);
};
/**
* Gets receipt processing statistics.
* @param logger Pino logger instance
* @param options Date range options
* @returns Processing statistics
*/
export const getProcessingStats = async (
logger: Logger,
options: { fromDate?: string; toDate?: string } = {},
): Promise<{
total_receipts: number;
completed: number;
failed: number;
pending: number;
avg_processing_time_ms: number;
total_cost_cents: number;
}> => {
return receiptRepo.getProcessingStats(logger, options);
};
/**
* Gets receipts that need processing (for worker).
* @param limit Maximum number of receipts to return
* @param logger Pino logger instance
* @returns Array of receipts needing processing
*/
export const getReceiptsNeedingProcessing = async (
limit: number,
logger: Logger,
): Promise<ReceiptScan[]> => {
return receiptRepo.getReceiptsNeedingProcessing(MAX_RETRY_ATTEMPTS, limit, logger);
};
// ============================================================================
// STORE PATTERNS (Admin)
// ============================================================================
/**
* Adds a new store receipt pattern for detection.
* @param storeId The store ID
* @param patternType The pattern type
* @param patternValue The pattern value
* @param logger Pino logger instance
* @param options Additional options
*/
export const addStorePattern = async (
storeId: number,
patternType: string,
patternValue: string,
logger: Logger,
options: { priority?: number } = {},
) => {
return receiptRepo.addStorePattern(storeId, patternType, patternValue, logger, options);
};
/**
* Gets all active store patterns.
* @param logger Pino logger instance
*/
export const getActiveStorePatterns = async (logger: Logger) => {
return receiptRepo.getActiveStorePatterns(logger);
};
// ============================================================================
// JOB PROCESSING
// ============================================================================
import type { Job } from 'bullmq';
import type { ReceiptJobData } from '../types/job-data';
import { aiService } from './aiService.server';
import { isAiConfigured } from '../config/env';
import path from 'node:path';
import fs from 'node:fs/promises';
/**
* Processes a receipt processing job from the queue.
* This is the main entry point for background receipt processing.
* @param job The BullMQ job
* @param logger Pino logger instance
* @returns Processing result
*/
export const processReceiptJob = async (
job: Job<ReceiptJobData>,
logger: Logger,
): Promise<{ success: boolean; itemsFound: number; receiptId: number }> => {
const { receiptId, userId } = job.data;
const jobLogger = logger.child({
jobId: job.id,
receiptId,
userId,
requestId: job.data.meta?.requestId,
});
jobLogger.info('Starting receipt processing job');
try {
// Get the receipt record to verify ownership and status
const existingReceipt = await receiptRepo.getReceiptById(receiptId, userId, jobLogger);
if (existingReceipt.status === 'completed') {
jobLogger.info('Receipt already processed, skipping');
return { success: true, itemsFound: 0, receiptId };
}
// Process the receipt (this handles status updates internally)
const result = await processReceipt(receiptId, jobLogger);
const itemsFound = result.items.length;
const isSuccess = result.receipt.status === 'completed';
jobLogger.info(
{ itemsFound, status: result.receipt.status },
'Receipt processing job completed',
);
return {
success: isSuccess,
itemsFound,
receiptId,
};
} catch (error) {
jobLogger.error({ err: error }, 'Receipt processing job failed');
// Update receipt status to failed
try {
await receiptRepo.updateReceipt(
receiptId,
{
status: 'failed',
error_details: {
error: error instanceof Error ? error.message : String(error),
jobId: job.id,
attemptsMade: job.attemptsMade,
},
},
jobLogger,
);
} catch (updateError) {
jobLogger.error({ err: updateError }, 'Failed to update receipt status after error');
}
throw error;
}
};

View File

@@ -0,0 +1,124 @@
// src/services/sentry.client.ts
/**
* Sentry SDK initialization for client-side error tracking.
* Implements ADR-015: Application Performance Monitoring and Error Tracking.
*
* This module configures @sentry/react to send errors to our self-hosted
* Bugsink instance, which is Sentry-compatible.
*
* IMPORTANT: This module should be imported and initialized at the very top
* of index.tsx, before any other imports, to ensure all errors are captured.
*/
import * as Sentry from '@sentry/react';
import config from '../config';
import { logger } from './logger.client';
/** Whether Sentry is properly configured (DSN present and enabled) */
export const isSentryConfigured = !!config.sentry.dsn && config.sentry.enabled;
/**
* Initializes the Sentry SDK for the browser.
* Should be called once at application startup.
*/
export function initSentry(): void {
if (!isSentryConfigured) {
logger.info('[Sentry] Error tracking disabled (VITE_SENTRY_DSN not configured)');
return;
}
Sentry.init({
dsn: config.sentry.dsn,
environment: config.sentry.environment,
debug: config.sentry.debug,
// Performance monitoring - disabled for now to keep it simple
tracesSampleRate: 0,
// Capture console.error as breadcrumbs
integrations: [
Sentry.breadcrumbsIntegration({
console: true,
dom: true,
fetch: true,
history: true,
xhr: true,
}),
],
// Filter out development-only errors and noise
beforeSend(event) {
// Skip errors from browser extensions
if (
event.exception?.values?.[0]?.stacktrace?.frames?.some((frame) =>
frame.filename?.includes('extension://'),
)
) {
return null;
}
return event;
},
});
logger.info(`[Sentry] Error tracking initialized (${config.sentry.environment})`);
}
/**
* Captures an exception and sends it to Sentry.
* Use this for errors that are caught and handled gracefully.
*/
export function captureException(
error: Error,
context?: Record<string, unknown>,
): string | undefined {
if (!isSentryConfigured) {
return undefined;
}
if (context) {
Sentry.setContext('additional', context);
}
return Sentry.captureException(error);
}
/**
* Captures a message and sends it to Sentry.
* Use this for non-exception events that should be tracked.
*/
export function captureMessage(
message: string,
level: Sentry.SeverityLevel = 'info',
): string | undefined {
if (!isSentryConfigured) {
return undefined;
}
return Sentry.captureMessage(message, level);
}
/**
* Sets the user context for all subsequent events.
* Call this after user authentication.
*/
export function setUser(user: { id: string; email?: string; username?: string } | null): void {
if (!isSentryConfigured) {
return;
}
Sentry.setUser(user);
}
/**
* Adds a breadcrumb to the current scope.
* Breadcrumbs are logged actions that led up to an error.
*/
export function addBreadcrumb(breadcrumb: Sentry.Breadcrumb): void {
if (!isSentryConfigured) {
return;
}
Sentry.addBreadcrumb(breadcrumb);
}
// Re-export Sentry for advanced usage (Error Boundary, etc.)
export { Sentry };

View File

@@ -0,0 +1,161 @@
// src/services/sentry.server.ts
/**
* Sentry SDK initialization for error tracking.
* Implements ADR-015: Application Performance Monitoring and Error Tracking.
*
* This module configures @sentry/node to send errors to our self-hosted
* Bugsink instance, which is Sentry-compatible.
*
* IMPORTANT: This module should be imported and initialized at the very top
* of server.ts, before any other imports, to ensure all errors are captured.
*
* Note: Uses Sentry SDK v8+ API which differs significantly from v7.
*/
import * as Sentry from '@sentry/node';
import type { Request, Response, NextFunction, ErrorRequestHandler } from 'express';
import { config, isSentryConfigured, isProduction, isTest } from '../config/env';
import { logger } from './logger.server';
/**
* Initializes the Sentry SDK with the configured DSN.
* Should be called once at application startup.
*/
export function initSentry(): void {
if (!isSentryConfigured) {
logger.info('[Sentry] Error tracking disabled (SENTRY_DSN not configured)');
return;
}
// Don't initialize Sentry in test environment
if (isTest) {
logger.debug('[Sentry] Skipping initialization in test environment');
return;
}
Sentry.init({
dsn: config.sentry.dsn,
environment: config.sentry.environment || config.server.nodeEnv,
debug: config.sentry.debug,
// Performance monitoring - disabled for now to keep it simple
tracesSampleRate: 0,
// Before sending an event, add additional context
beforeSend(event, hint) {
// In development, log errors to console as well
if (!isProduction && hint.originalException) {
logger.error(
{ err: hint.originalException, sentryEventId: event.event_id },
'[Sentry] Capturing error',
);
}
return event;
},
});
logger.info(
{ environment: config.sentry.environment || config.server.nodeEnv },
'[Sentry] Error tracking initialized',
);
}
/**
* Creates Sentry middleware for Express.
* Returns the request handler and error handler middleware.
*
* In Sentry SDK v8+, the old Handlers.requestHandler and Handlers.errorHandler
* have been replaced. Request context is now captured automatically via the
* Express integration. We provide a custom error handler that filters errors.
*/
export function getSentryMiddleware(): {
requestHandler: (req: Request, res: Response, next: NextFunction) => void;
errorHandler: ErrorRequestHandler;
} {
if (!isSentryConfigured || isTest) {
// Return no-op middleware when Sentry is not configured
return {
requestHandler: (_req: Request, _res: Response, next: NextFunction) => next(),
errorHandler: (_err: Error, _req: Request, _res: Response, next: NextFunction) => next(_err),
};
}
return {
// In SDK v8+, request context is captured automatically.
// This middleware is a placeholder for compatibility.
requestHandler: (_req: Request, _res: Response, next: NextFunction) => next(),
// Custom error handler that captures errors to Sentry
errorHandler: (err: Error, _req: Request, _res: Response, next: NextFunction) => {
// Only send 5xx errors to Sentry by default
const statusCode =
(err as Error & { statusCode?: number }).statusCode ||
(err as Error & { status?: number }).status ||
500;
if (statusCode >= 500) {
Sentry.captureException(err);
}
// Pass the error to the next error handler
next(err);
},
};
}
/**
* Captures an exception and sends it to Sentry.
* Use this for errors that are caught and handled gracefully.
*/
export function captureException(error: Error, context?: Record<string, unknown>): string | null {
if (!isSentryConfigured || isTest) {
return null;
}
if (context) {
Sentry.setContext('additional', context);
}
return Sentry.captureException(error);
}
/**
* Captures a message and sends it to Sentry.
* Use this for non-exception events that should be tracked.
*/
export function captureMessage(
message: string,
level: Sentry.SeverityLevel = 'info',
): string | null {
if (!isSentryConfigured || isTest) {
return null;
}
return Sentry.captureMessage(message, level);
}
/**
* Sets the user context for all subsequent events.
* Call this after user authentication.
*/
export function setUser(user: { id: string; email?: string; username?: string } | null): void {
if (!isSentryConfigured || isTest) {
return;
}
Sentry.setUser(user);
}
/**
* Adds a breadcrumb to the current scope.
* Breadcrumbs are logged actions that led up to an error.
*/
export function addBreadcrumb(breadcrumb: Sentry.Breadcrumb): void {
if (!isSentryConfigured || isTest) {
return;
}
Sentry.addBreadcrumb(breadcrumb);
}
// Re-export Sentry for advanced usage
export { Sentry };

View File

@@ -0,0 +1,674 @@
// src/services/upcService.server.test.ts
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
import type { Logger } from 'pino';
import { createMockLogger } from '../tests/utils/mockLogger';
import type { UpcScanSource, UpcExternalLookupRecord, UpcExternalSource } from '../types/upc';
// Mock dependencies
vi.mock('./db/index.db', () => ({
upcRepo: {
recordScan: vi.fn(),
findProductByUpc: vi.fn(),
findExternalLookup: vi.fn(),
upsertExternalLookup: vi.fn(),
linkUpcToProduct: vi.fn(),
getScanHistory: vi.fn(),
getUserScanStats: vi.fn(),
getScanById: vi.fn(),
},
}));
vi.mock('../config/env', () => ({
config: {
upc: {
upcItemDbApiKey: undefined,
barcodeLookupApiKey: undefined,
},
},
isUpcItemDbConfigured: false,
isBarcodeLookupConfigured: false,
}));
vi.mock('./logger.server', () => ({
logger: {
child: vi.fn().mockReturnThis(),
debug: vi.fn(),
info: vi.fn(),
warn: vi.fn(),
error: vi.fn(),
},
}));
// Mock global fetch
const mockFetch = vi.fn();
global.fetch = mockFetch;
// Import after mocks are set up
import {
isValidUpcCode,
normalizeUpcCode,
detectBarcodeFromImage,
lookupExternalUpc,
scanUpc,
lookupUpc,
linkUpcToProduct,
getScanHistory,
getScanStats,
getScanById,
} from './upcService.server';
import { upcRepo } from './db/index.db';
// Helper to create mock UpcExternalLookupRecord
function createMockExternalLookupRecord(
overrides: Partial<UpcExternalLookupRecord> = {},
): UpcExternalLookupRecord {
return {
lookup_id: 1,
upc_code: '012345678905',
product_name: null,
brand_name: null,
category: null,
description: null,
image_url: null,
external_source: 'openfoodfacts' as UpcExternalSource,
lookup_data: null,
lookup_successful: false,
created_at: new Date().toISOString(),
updated_at: new Date().toISOString(),
...overrides,
};
}
// Helper to create mock ProductRow (from db layer - matches upc.db.ts)
interface ProductRow {
product_id: number;
name: string;
brand_id: number | null;
category_id: number | null;
description: string | null;
size: string | null;
upc_code: string | null;
master_item_id: number | null;
created_at: string;
updated_at: string;
}
function createMockProductRow(overrides: Partial<ProductRow> = {}): ProductRow {
return {
product_id: 1,
name: 'Test Product',
brand_id: null,
category_id: null,
description: null,
size: null,
upc_code: '012345678905',
master_item_id: null,
created_at: new Date().toISOString(),
updated_at: new Date().toISOString(),
...overrides,
};
}
describe('upcService.server', () => {
let mockLogger: Logger;
beforeEach(() => {
vi.clearAllMocks();
mockLogger = createMockLogger();
mockFetch.mockReset();
});
afterEach(() => {
vi.resetAllMocks();
});
describe('isValidUpcCode', () => {
it('should return true for valid 12-digit UPC-A', () => {
expect(isValidUpcCode('012345678905')).toBe(true);
});
it('should return true for valid 8-digit UPC-E', () => {
expect(isValidUpcCode('01234567')).toBe(true);
});
it('should return true for valid 13-digit EAN-13', () => {
expect(isValidUpcCode('5901234123457')).toBe(true);
});
it('should return true for valid 8-digit EAN-8', () => {
expect(isValidUpcCode('96385074')).toBe(true);
});
it('should return true for valid 14-digit GTIN-14', () => {
expect(isValidUpcCode('00012345678905')).toBe(true);
});
it('should return false for code with less than 8 digits', () => {
expect(isValidUpcCode('1234567')).toBe(false);
});
it('should return false for code with more than 14 digits', () => {
expect(isValidUpcCode('123456789012345')).toBe(false);
});
it('should return false for code with non-numeric characters', () => {
expect(isValidUpcCode('01234567890A')).toBe(false);
});
it('should return false for empty string', () => {
expect(isValidUpcCode('')).toBe(false);
});
});
describe('normalizeUpcCode', () => {
it('should remove spaces from UPC code', () => {
expect(normalizeUpcCode('012 345 678 905')).toBe('012345678905');
});
it('should remove dashes from UPC code', () => {
expect(normalizeUpcCode('012-345-678-905')).toBe('012345678905');
});
it('should remove mixed spaces and dashes', () => {
expect(normalizeUpcCode('012-345 678-905')).toBe('012345678905');
});
it('should return unchanged if no spaces or dashes', () => {
expect(normalizeUpcCode('012345678905')).toBe('012345678905');
});
});
describe('detectBarcodeFromImage', () => {
it('should return not implemented error', async () => {
const result = await detectBarcodeFromImage('base64imagedata', mockLogger);
expect(result.detected).toBe(false);
expect(result.upc_code).toBeNull();
expect(result.error).toBe(
'Barcode detection from images is not yet implemented. Please use manual entry.',
);
});
});
describe('lookupExternalUpc', () => {
it('should return product info from Open Food Facts on success', async () => {
mockFetch.mockResolvedValueOnce({
ok: true,
json: async () => ({
status: 1,
product: {
product_name: 'Test Product',
brands: 'Test Brand',
categories_tags: ['en:snacks'],
ingredients_text: 'Test ingredients',
image_url: 'https://example.com/image.jpg',
},
}),
});
const result = await lookupExternalUpc('012345678905', mockLogger);
expect(result).not.toBeNull();
expect(result?.name).toBe('Test Product');
expect(result?.brand).toBe('Test Brand');
expect(result?.source).toBe('openfoodfacts');
});
it('should return null when Open Food Facts returns status 0', async () => {
mockFetch.mockResolvedValueOnce({
ok: true,
json: async () => ({
status: 0,
product: null,
}),
});
const result = await lookupExternalUpc('012345678905', mockLogger);
expect(result).toBeNull();
});
it('should return null when Open Food Facts request fails', async () => {
mockFetch.mockResolvedValueOnce({
ok: false,
status: 500,
});
const result = await lookupExternalUpc('012345678905', mockLogger);
expect(result).toBeNull();
});
it('should return null on network error', async () => {
mockFetch.mockRejectedValueOnce(new Error('Network error'));
const result = await lookupExternalUpc('012345678905', mockLogger);
expect(result).toBeNull();
});
it('should use generic_name when product_name is missing', async () => {
mockFetch.mockResolvedValueOnce({
ok: true,
json: async () => ({
status: 1,
product: {
generic_name: 'Generic Product Name',
brands: null,
},
}),
});
const result = await lookupExternalUpc('012345678905', mockLogger);
expect(result?.name).toBe('Generic Product Name');
});
});
describe('scanUpc', () => {
it('should scan with manual entry and return product from database', async () => {
const mockProduct = {
product_id: 1,
name: 'Test Product',
brand: 'Test Brand',
category: 'Snacks',
description: null,
size: '100g',
upc_code: '012345678905',
image_url: null,
master_item_id: null,
};
vi.mocked(upcRepo.findProductByUpc).mockResolvedValueOnce(mockProduct);
vi.mocked(upcRepo.recordScan).mockResolvedValueOnce({
scan_id: 1,
user_id: 'user-1',
upc_code: '012345678905',
product_id: 1,
scan_source: 'manual_entry',
scan_confidence: 1.0,
raw_image_path: null,
lookup_successful: true,
created_at: new Date().toISOString(),
updated_at: new Date().toISOString(),
});
const result = await scanUpc(
'user-1',
{ upc_code: '012345678905', scan_source: 'manual_entry' },
mockLogger,
);
expect(result.upc_code).toBe('012345678905');
expect(result.product).toEqual(mockProduct);
expect(result.lookup_successful).toBe(true);
expect(result.is_new_product).toBe(false);
expect(result.confidence).toBe(1.0);
});
it('should scan with manual entry and perform external lookup when not in database', async () => {
vi.mocked(upcRepo.findProductByUpc).mockResolvedValueOnce(null);
vi.mocked(upcRepo.findExternalLookup).mockResolvedValueOnce(null);
vi.mocked(upcRepo.upsertExternalLookup).mockResolvedValueOnce(
createMockExternalLookupRecord(),
);
vi.mocked(upcRepo.recordScan).mockResolvedValueOnce({
scan_id: 2,
user_id: 'user-1',
upc_code: '012345678905',
product_id: null,
scan_source: 'manual_entry',
scan_confidence: 1.0,
raw_image_path: null,
lookup_successful: true,
created_at: new Date().toISOString(),
updated_at: new Date().toISOString(),
});
mockFetch.mockResolvedValueOnce({
ok: true,
json: async () => ({
status: 1,
product: {
product_name: 'External Product',
brands: 'External Brand',
},
}),
});
const result = await scanUpc(
'user-1',
{ upc_code: '012345678905', scan_source: 'manual_entry' },
mockLogger,
);
expect(result.product).toBeNull();
expect(result.external_lookup).not.toBeNull();
expect(result.external_lookup?.name).toBe('External Product');
expect(result.is_new_product).toBe(true);
});
it('should use cached external lookup when available', async () => {
vi.mocked(upcRepo.findProductByUpc).mockResolvedValueOnce(null);
vi.mocked(upcRepo.findExternalLookup).mockResolvedValueOnce({
lookup_id: 1,
upc_code: '012345678905',
product_name: 'Cached Product',
brand_name: 'Cached Brand',
category: 'Cached Category',
description: null,
image_url: null,
external_source: 'openfoodfacts',
lookup_data: null,
lookup_successful: true,
created_at: new Date().toISOString(),
updated_at: new Date().toISOString(),
});
vi.mocked(upcRepo.recordScan).mockResolvedValueOnce({
scan_id: 3,
user_id: 'user-1',
upc_code: '012345678905',
product_id: null,
scan_source: 'manual_entry',
scan_confidence: 1.0,
raw_image_path: null,
lookup_successful: true,
created_at: new Date().toISOString(),
updated_at: new Date().toISOString(),
});
const result = await scanUpc(
'user-1',
{ upc_code: '012345678905', scan_source: 'manual_entry' },
mockLogger,
);
expect(result.external_lookup?.name).toBe('Cached Product');
expect(mockFetch).not.toHaveBeenCalled();
});
it('should throw error for invalid UPC code format', async () => {
await expect(
scanUpc('user-1', { upc_code: 'invalid', scan_source: 'manual_entry' }, mockLogger),
).rejects.toThrow('Invalid UPC code format. UPC codes must be 8-14 digits.');
});
it('should throw error when neither upc_code nor image_base64 provided', async () => {
await expect(
scanUpc('user-1', { scan_source: 'manual_entry' } as any, mockLogger),
).rejects.toThrow('Either upc_code or image_base64 must be provided.');
});
it('should record failed scan when image detection fails', async () => {
vi.mocked(upcRepo.recordScan).mockResolvedValueOnce({
scan_id: 4,
user_id: 'user-1',
upc_code: 'DETECTION_FAILED',
product_id: null,
scan_source: 'image_upload',
scan_confidence: 0,
raw_image_path: null,
lookup_successful: false,
created_at: new Date().toISOString(),
updated_at: new Date().toISOString(),
});
const result = await scanUpc(
'user-1',
{ image_base64: 'base64data', scan_source: 'image_upload' },
mockLogger,
);
expect(result.lookup_successful).toBe(false);
expect(result.confidence).toBe(0);
});
});
describe('lookupUpc', () => {
it('should return product from database when found', async () => {
const mockProduct = {
product_id: 1,
name: 'Test Product',
brand: 'Test Brand',
category: 'Snacks',
description: null,
size: '100g',
upc_code: '012345678905',
image_url: null,
master_item_id: null,
};
vi.mocked(upcRepo.findProductByUpc).mockResolvedValueOnce(mockProduct);
const result = await lookupUpc({ upc_code: '012345678905' }, mockLogger);
expect(result.found).toBe(true);
expect(result.product).toEqual(mockProduct);
expect(result.from_cache).toBe(false);
});
it('should return cached external lookup when available', async () => {
vi.mocked(upcRepo.findProductByUpc).mockResolvedValueOnce(null);
vi.mocked(upcRepo.findExternalLookup).mockResolvedValueOnce({
lookup_id: 1,
upc_code: '012345678905',
product_name: 'Cached Product',
brand_name: null,
category: null,
description: null,
image_url: null,
external_source: 'openfoodfacts',
lookup_data: null,
lookup_successful: true,
created_at: new Date().toISOString(),
updated_at: new Date().toISOString(),
});
const result = await lookupUpc({ upc_code: '012345678905' }, mockLogger);
expect(result.found).toBe(true);
expect(result.from_cache).toBe(true);
expect(result.external_lookup?.name).toBe('Cached Product');
});
it('should return cached unsuccessful lookup', async () => {
vi.mocked(upcRepo.findProductByUpc).mockResolvedValueOnce(null);
vi.mocked(upcRepo.findExternalLookup).mockResolvedValueOnce({
lookup_id: 1,
upc_code: '012345678905',
product_name: null,
brand_name: null,
category: null,
description: null,
image_url: null,
external_source: 'unknown',
lookup_data: null,
lookup_successful: false,
created_at: new Date().toISOString(),
updated_at: new Date().toISOString(),
});
const result = await lookupUpc({ upc_code: '012345678905' }, mockLogger);
expect(result.found).toBe(false);
expect(result.from_cache).toBe(true);
});
it('should perform fresh external lookup when force_refresh is true', async () => {
vi.mocked(upcRepo.findProductByUpc).mockResolvedValueOnce(null);
vi.mocked(upcRepo.upsertExternalLookup).mockResolvedValueOnce(
createMockExternalLookupRecord(),
);
mockFetch.mockResolvedValueOnce({
ok: true,
json: async () => ({
status: 1,
product: {
product_name: 'Fresh External Product',
brands: 'Fresh Brand',
},
}),
});
const result = await lookupUpc({ upc_code: '012345678905', force_refresh: true }, mockLogger);
expect(result.from_cache).toBe(false);
expect(result.external_lookup?.name).toBe('Fresh External Product');
expect(upcRepo.findExternalLookup).not.toHaveBeenCalled();
});
it('should throw error for invalid UPC code', async () => {
await expect(lookupUpc({ upc_code: 'invalid' }, mockLogger)).rejects.toThrow(
'Invalid UPC code format. UPC codes must be 8-14 digits.',
);
});
it('should normalize UPC code before lookup', async () => {
vi.mocked(upcRepo.findProductByUpc).mockResolvedValueOnce(null);
vi.mocked(upcRepo.findExternalLookup).mockResolvedValueOnce(null);
vi.mocked(upcRepo.upsertExternalLookup).mockResolvedValueOnce(
createMockExternalLookupRecord(),
);
mockFetch.mockResolvedValueOnce({
ok: true,
json: async () => ({ status: 0 }),
});
const result = await lookupUpc({ upc_code: '012-345-678-905' }, mockLogger);
expect(result.upc_code).toBe('012345678905');
});
});
describe('linkUpcToProduct', () => {
it('should link UPC code to product successfully', async () => {
vi.mocked(upcRepo.linkUpcToProduct).mockResolvedValueOnce(createMockProductRow());
await linkUpcToProduct(1, '012345678905', mockLogger);
expect(upcRepo.linkUpcToProduct).toHaveBeenCalledWith(1, '012345678905', mockLogger);
});
it('should throw error for invalid UPC code', async () => {
await expect(linkUpcToProduct(1, 'invalid', mockLogger)).rejects.toThrow(
'Invalid UPC code format. UPC codes must be 8-14 digits.',
);
});
it('should normalize UPC code before linking', async () => {
vi.mocked(upcRepo.linkUpcToProduct).mockResolvedValueOnce(createMockProductRow());
await linkUpcToProduct(1, '012-345-678-905', mockLogger);
expect(upcRepo.linkUpcToProduct).toHaveBeenCalledWith(1, '012345678905', mockLogger);
});
});
describe('getScanHistory', () => {
it('should return paginated scan history', async () => {
const mockHistory = {
scans: [
{
scan_id: 1,
user_id: 'user-1',
upc_code: '012345678905',
product_id: 1,
scan_source: 'manual_entry' as UpcScanSource,
scan_confidence: 1.0,
raw_image_path: null,
lookup_successful: true,
created_at: new Date().toISOString(),
updated_at: new Date().toISOString(),
},
],
total: 1,
};
vi.mocked(upcRepo.getScanHistory).mockResolvedValueOnce(mockHistory);
const result = await getScanHistory({ user_id: 'user-1', limit: 10, offset: 0 }, mockLogger);
expect(result.scans).toHaveLength(1);
expect(result.total).toBe(1);
});
it('should filter by scan source', async () => {
vi.mocked(upcRepo.getScanHistory).mockResolvedValueOnce({ scans: [], total: 0 });
await getScanHistory({ user_id: 'user-1', scan_source: 'image_upload' }, mockLogger);
expect(upcRepo.getScanHistory).toHaveBeenCalledWith(
{ user_id: 'user-1', scan_source: 'image_upload' },
mockLogger,
);
});
it('should filter by date range', async () => {
vi.mocked(upcRepo.getScanHistory).mockResolvedValueOnce({ scans: [], total: 0 });
await getScanHistory(
{
user_id: 'user-1',
from_date: '2024-01-01',
to_date: '2024-01-31',
},
mockLogger,
);
expect(upcRepo.getScanHistory).toHaveBeenCalledWith(
{
user_id: 'user-1',
from_date: '2024-01-01',
to_date: '2024-01-31',
},
mockLogger,
);
});
});
describe('getScanStats', () => {
it('should return user scan statistics', async () => {
const mockStats = {
total_scans: 100,
successful_lookups: 80,
unique_products: 50,
scans_today: 5,
scans_this_week: 20,
};
vi.mocked(upcRepo.getUserScanStats).mockResolvedValueOnce(mockStats);
const result = await getScanStats('user-1', mockLogger);
expect(result).toEqual(mockStats);
expect(upcRepo.getUserScanStats).toHaveBeenCalledWith('user-1', mockLogger);
});
});
describe('getScanById', () => {
it('should return scan record by ID', async () => {
const mockScan = {
scan_id: 1,
user_id: 'user-1',
upc_code: '012345678905',
product_id: 1,
scan_source: 'manual_entry' as UpcScanSource,
scan_confidence: 1.0,
raw_image_path: null,
lookup_successful: true,
created_at: new Date().toISOString(),
updated_at: new Date().toISOString(),
};
vi.mocked(upcRepo.getScanById).mockResolvedValueOnce(mockScan);
const result = await getScanById(1, 'user-1', mockLogger);
expect(result).toEqual(mockScan);
expect(upcRepo.getScanById).toHaveBeenCalledWith(1, 'user-1', mockLogger);
});
});
});

View File

@@ -0,0 +1,614 @@
// src/services/upcService.server.ts
/**
* @file UPC Scanning Service
* Handles UPC barcode scanning, lookup, and external API integration.
* Provides functionality for scanning barcodes from images and manual entry.
*/
import type { Logger } from 'pino';
import { upcRepo } from './db/index.db';
import type {
UpcScanRequest,
UpcScanResult,
UpcLookupResult,
UpcProductMatch,
UpcExternalProductInfo,
UpcExternalLookupOptions,
UpcScanHistoryQueryOptions,
UpcScanHistoryRecord,
BarcodeDetectionResult,
} from '../types/upc';
import { config, isUpcItemDbConfigured, isBarcodeLookupConfigured } from '../config/env';
/**
* Default cache age for external lookups (7 days in hours)
*/
const DEFAULT_CACHE_AGE_HOURS = 168;
/**
* UPC code validation regex (8-14 digits)
*/
const UPC_CODE_REGEX = /^[0-9]{8,14}$/;
/**
* Validates a UPC code format.
* @param upcCode The UPC code to validate
* @returns True if the UPC code is valid, false otherwise
*/
export const isValidUpcCode = (upcCode: string): boolean => {
return UPC_CODE_REGEX.test(upcCode);
};
/**
* Normalizes a UPC code by removing spaces and dashes.
* @param upcCode The raw UPC code input
* @returns Normalized UPC code
*/
export const normalizeUpcCode = (upcCode: string): string => {
return upcCode.replace(/[\s-]/g, '');
};
/**
* Detects and decodes a barcode from an image.
* This is a placeholder for actual barcode detection implementation.
* In production, this would use a library like zxing-js, quagga, or an external service.
* @param imageBase64 Base64-encoded image data
* @param logger Pino logger instance
* @returns Barcode detection result
*/
export const detectBarcodeFromImage = async (
imageBase64: string,
logger: Logger,
): Promise<BarcodeDetectionResult> => {
logger.debug({ imageLength: imageBase64.length }, 'Attempting to detect barcode from image');
// TODO: Implement actual barcode detection using a library like:
// - @nickvdyck/barcode-reader (pure JS)
// - dynamsoft-javascript-barcode (commercial)
// - External service like Google Cloud Vision API
//
// For now, return a placeholder response indicating detection is not yet implemented
logger.warn('Barcode detection from images is not yet implemented');
return {
detected: false,
upc_code: null,
confidence: null,
format: null,
error: 'Barcode detection from images is not yet implemented. Please use manual entry.',
};
};
/**
* Looks up product in Open Food Facts API (free, open source).
* @param upcCode The UPC code to look up
* @param logger Pino logger instance
* @returns External product information or null if not found
*/
const lookupOpenFoodFacts = async (
upcCode: string,
logger: Logger,
): Promise<UpcExternalProductInfo | null> => {
try {
const openFoodFactsUrl = `https://world.openfoodfacts.org/api/v2/product/${upcCode}`;
logger.debug({ url: openFoodFactsUrl }, 'Querying Open Food Facts API');
const response = await fetch(openFoodFactsUrl, {
headers: {
'User-Agent': 'FlyerCrawler/1.0 (contact@projectium.com)',
},
});
if (response.ok) {
const data = await response.json();
if (data.status === 1 && data.product) {
const product = data.product;
logger.info(
{ upcCode, productName: product.product_name },
'Found product in Open Food Facts',
);
return {
name: product.product_name || product.generic_name || 'Unknown Product',
brand: product.brands || null,
category: product.categories_tags?.[0]?.replace('en:', '') || null,
description: product.ingredients_text || null,
image_url: product.image_url || product.image_front_url || null,
source: 'openfoodfacts',
raw_data: product,
};
}
}
logger.debug({ upcCode }, 'Product not found in Open Food Facts');
} catch (error) {
const err = error instanceof Error ? error : new Error(String(error));
logger.warn({ err, upcCode }, 'Error querying Open Food Facts API');
}
return null;
};
/**
* Looks up product in UPC Item DB API.
* Requires UPC_ITEM_DB_API_KEY environment variable.
* @see https://www.upcitemdb.com/wp/docs/main/development/
* @param upcCode The UPC code to look up
* @param logger Pino logger instance
* @returns External product information or null if not found
*/
const lookupUpcItemDb = async (
upcCode: string,
logger: Logger,
): Promise<UpcExternalProductInfo | null> => {
if (!isUpcItemDbConfigured) {
logger.debug('UPC Item DB API key not configured, skipping');
return null;
}
try {
const url = `https://api.upcitemdb.com/prod/trial/lookup?upc=${upcCode}`;
logger.debug({ url }, 'Querying UPC Item DB API');
const response = await fetch(url, {
headers: {
'Content-Type': 'application/json',
user_key: config.upc.upcItemDbApiKey!,
key_type: '3scale',
},
});
if (response.ok) {
const data = await response.json();
if (data.code === 'OK' && data.items && data.items.length > 0) {
const item = data.items[0];
logger.info({ upcCode, productName: item.title }, 'Found product in UPC Item DB');
return {
name: item.title || 'Unknown Product',
brand: item.brand || null,
category: item.category || null,
description: item.description || null,
image_url: item.images?.[0] || null,
source: 'upcitemdb',
raw_data: item,
};
}
} else if (response.status === 429) {
logger.warn({ upcCode }, 'UPC Item DB rate limit exceeded');
}
logger.debug({ upcCode }, 'Product not found in UPC Item DB');
} catch (error) {
const err = error instanceof Error ? error : new Error(String(error));
logger.warn({ err, upcCode }, 'Error querying UPC Item DB API');
}
return null;
};
/**
* Looks up product in Barcode Lookup API.
* Requires BARCODE_LOOKUP_API_KEY environment variable.
* @see https://www.barcodelookup.com/api
* @param upcCode The UPC code to look up
* @param logger Pino logger instance
* @returns External product information or null if not found
*/
const lookupBarcodeLookup = async (
upcCode: string,
logger: Logger,
): Promise<UpcExternalProductInfo | null> => {
if (!isBarcodeLookupConfigured) {
logger.debug('Barcode Lookup API key not configured, skipping');
return null;
}
try {
const url = `https://api.barcodelookup.com/v3/products?barcode=${upcCode}&key=${config.upc.barcodeLookupApiKey}`;
logger.debug('Querying Barcode Lookup API');
const response = await fetch(url, {
headers: {
Accept: 'application/json',
},
});
if (response.ok) {
const data = await response.json();
if (data.products && data.products.length > 0) {
const product = data.products[0];
logger.info({ upcCode, productName: product.title }, 'Found product in Barcode Lookup');
return {
name: product.title || product.product_name || 'Unknown Product',
brand: product.brand || null,
category: product.category || null,
description: product.description || null,
image_url: product.images?.[0] || null,
source: 'barcodelookup',
raw_data: product,
};
}
} else if (response.status === 429) {
logger.warn({ upcCode }, 'Barcode Lookup rate limit exceeded');
} else if (response.status === 404) {
logger.debug({ upcCode }, 'Product not found in Barcode Lookup');
}
} catch (error) {
const err = error instanceof Error ? error : new Error(String(error));
logger.warn({ err, upcCode }, 'Error querying Barcode Lookup API');
}
return null;
};
/**
* Looks up product information from external UPC databases.
* Tries multiple APIs in order of preference:
* 1. Open Food Facts (free, open source)
* 2. UPC Item DB (requires API key)
* 3. Barcode Lookup (requires API key)
* @param upcCode The UPC code to look up
* @param logger Pino logger instance
* @returns External product information or null if not found
*/
export const lookupExternalUpc = async (
upcCode: string,
logger: Logger,
): Promise<UpcExternalProductInfo | null> => {
logger.debug({ upcCode }, 'Looking up UPC in external databases');
// Try Open Food Facts first (free, no API key needed)
let result = await lookupOpenFoodFacts(upcCode, logger);
if (result) {
return result;
}
// Try UPC Item DB if configured
result = await lookupUpcItemDb(upcCode, logger);
if (result) {
return result;
}
// Try Barcode Lookup if configured
result = await lookupBarcodeLookup(upcCode, logger);
if (result) {
return result;
}
logger.debug({ upcCode }, 'No external product information found');
return null;
};
/**
* Performs a UPC scan operation including barcode detection, database lookup,
* and optional external API lookup.
* @param userId The user performing the scan
* @param request The scan request containing UPC code or image
* @param logger Pino logger instance
* @returns Complete scan result with product information
*/
export const scanUpc = async (
userId: string,
request: UpcScanRequest,
logger: Logger,
): Promise<UpcScanResult> => {
const scanLogger = logger.child({ userId, scanSource: request.scan_source });
scanLogger.info('Starting UPC scan');
let upcCode: string | null = null;
let scanConfidence: number | null = null;
// Step 1: Get UPC code from request (manual entry or image detection)
if (request.upc_code) {
// Manual entry - normalize and validate
upcCode = normalizeUpcCode(request.upc_code);
if (!isValidUpcCode(upcCode)) {
scanLogger.warn({ upcCode }, 'Invalid UPC code format');
throw new Error('Invalid UPC code format. UPC codes must be 8-14 digits.');
}
scanConfidence = 1.0; // Manual entry has 100% confidence
scanLogger.debug({ upcCode }, 'Using manually entered UPC code');
} else if (request.image_base64) {
// Image detection
const detection = await detectBarcodeFromImage(request.image_base64, scanLogger);
if (!detection.detected || !detection.upc_code) {
// Record the failed scan attempt
const scanRecord = await upcRepo.recordScan(
userId,
'DETECTION_FAILED',
request.scan_source,
scanLogger,
{
scanConfidence: 0,
lookupSuccessful: false,
},
);
return {
scan_id: scanRecord.scan_id,
upc_code: '',
product: null,
external_lookup: null,
confidence: 0,
lookup_successful: false,
is_new_product: false,
scanned_at: scanRecord.created_at,
};
}
upcCode = detection.upc_code;
scanConfidence = detection.confidence;
scanLogger.info({ upcCode, confidence: scanConfidence }, 'Barcode detected from image');
} else {
throw new Error('Either upc_code or image_base64 must be provided.');
}
// Step 2: Look up product in our database
let product: UpcProductMatch | null = null;
product = await upcRepo.findProductByUpc(upcCode, scanLogger);
const isNewProduct = !product;
scanLogger.debug({ upcCode, found: !!product, isNewProduct }, 'Local database lookup complete');
// Step 3: If not found locally, check external APIs
let externalLookup: UpcExternalProductInfo | null = null;
if (!product) {
// Check cache first
const cachedLookup = await upcRepo.findExternalLookup(
upcCode,
DEFAULT_CACHE_AGE_HOURS,
scanLogger,
);
if (cachedLookup) {
scanLogger.debug({ upcCode }, 'Using cached external lookup');
if (cachedLookup.lookup_successful) {
externalLookup = {
name: cachedLookup.product_name || 'Unknown Product',
brand: cachedLookup.brand_name,
category: cachedLookup.category,
description: cachedLookup.description,
image_url: cachedLookup.image_url,
source: cachedLookup.external_source,
raw_data: cachedLookup.lookup_data ?? undefined,
};
}
} else {
// Perform fresh external lookup
externalLookup = await lookupExternalUpc(upcCode, scanLogger);
// Cache the result (success or failure)
await upcRepo.upsertExternalLookup(
upcCode,
externalLookup?.source || 'unknown',
!!externalLookup,
scanLogger,
externalLookup
? {
productName: externalLookup.name,
brandName: externalLookup.brand,
category: externalLookup.category,
description: externalLookup.description,
imageUrl: externalLookup.image_url,
lookupData: externalLookup.raw_data as Record<string, unknown> | undefined,
}
: {},
);
}
}
// Step 4: Record the scan in history
const lookupSuccessful = !!(product || externalLookup);
const scanRecord = await upcRepo.recordScan(userId, upcCode, request.scan_source, scanLogger, {
productId: product?.product_id,
scanConfidence,
lookupSuccessful,
});
scanLogger.info(
{ scanId: scanRecord.scan_id, upcCode, lookupSuccessful, isNewProduct },
'UPC scan completed',
);
return {
scan_id: scanRecord.scan_id,
upc_code: upcCode,
product,
external_lookup: externalLookup,
confidence: scanConfidence,
lookup_successful: lookupSuccessful,
is_new_product: isNewProduct,
scanned_at: scanRecord.created_at,
};
};
/**
* Looks up a UPC code without recording scan history.
* Useful for quick lookups or verification.
* @param options Lookup options
* @param logger Pino logger instance
* @returns Lookup result with product information
*/
export const lookupUpc = async (
options: UpcExternalLookupOptions,
logger: Logger,
): Promise<UpcLookupResult> => {
const {
upc_code,
force_refresh = false,
max_cache_age_hours = DEFAULT_CACHE_AGE_HOURS,
} = options;
const lookupLogger = logger.child({ upcCode: upc_code });
lookupLogger.debug('Performing UPC lookup');
const normalizedUpc = normalizeUpcCode(upc_code);
if (!isValidUpcCode(normalizedUpc)) {
throw new Error('Invalid UPC code format. UPC codes must be 8-14 digits.');
}
// Check local database
const product = await upcRepo.findProductByUpc(normalizedUpc, lookupLogger);
if (product) {
lookupLogger.debug({ productId: product.product_id }, 'Found product in local database');
return {
upc_code: normalizedUpc,
product,
external_lookup: null,
found: true,
from_cache: false,
};
}
// Check external cache (unless force refresh)
if (!force_refresh) {
const cachedLookup = await upcRepo.findExternalLookup(
normalizedUpc,
max_cache_age_hours,
lookupLogger,
);
if (cachedLookup) {
lookupLogger.debug('Returning cached external lookup');
if (cachedLookup.lookup_successful) {
return {
upc_code: normalizedUpc,
product: null,
external_lookup: {
name: cachedLookup.product_name || 'Unknown Product',
brand: cachedLookup.brand_name,
category: cachedLookup.category,
description: cachedLookup.description,
image_url: cachedLookup.image_url,
source: cachedLookup.external_source,
raw_data: cachedLookup.lookup_data ?? undefined,
},
found: true,
from_cache: true,
};
}
// Cached lookup was unsuccessful
return {
upc_code: normalizedUpc,
product: null,
external_lookup: null,
found: false,
from_cache: true,
};
}
}
// Perform fresh external lookup
const externalLookup = await lookupExternalUpc(normalizedUpc, lookupLogger);
// Cache the result
await upcRepo.upsertExternalLookup(
normalizedUpc,
externalLookup?.source || 'unknown',
!!externalLookup,
lookupLogger,
externalLookup
? {
productName: externalLookup.name,
brandName: externalLookup.brand,
category: externalLookup.category,
description: externalLookup.description,
imageUrl: externalLookup.image_url,
lookupData: externalLookup.raw_data as Record<string, unknown> | undefined,
}
: {},
);
return {
upc_code: normalizedUpc,
product: null,
external_lookup: externalLookup,
found: !!externalLookup,
from_cache: false,
};
};
/**
* Links a UPC code to an existing product (admin operation).
* @param productId The product ID to link
* @param upcCode The UPC code to link
* @param logger Pino logger instance
*/
export const linkUpcToProduct = async (
productId: number,
upcCode: string,
logger: Logger,
): Promise<void> => {
const normalizedUpc = normalizeUpcCode(upcCode);
if (!isValidUpcCode(normalizedUpc)) {
throw new Error('Invalid UPC code format. UPC codes must be 8-14 digits.');
}
logger.info({ productId, upcCode: normalizedUpc }, 'Linking UPC code to product');
await upcRepo.linkUpcToProduct(productId, normalizedUpc, logger);
logger.info({ productId, upcCode: normalizedUpc }, 'UPC code linked successfully');
};
/**
* Gets the scan history for a user.
* @param options Query options
* @param logger Pino logger instance
* @returns Paginated scan history
*/
export const getScanHistory = async (
options: UpcScanHistoryQueryOptions,
logger: Logger,
): Promise<{ scans: UpcScanHistoryRecord[]; total: number }> => {
logger.debug({ userId: options.user_id }, 'Fetching scan history');
return upcRepo.getScanHistory(options, logger);
};
/**
* Gets scan statistics for a user.
* @param userId The user ID
* @param logger Pino logger instance
* @returns Scan statistics
*/
export const getScanStats = async (
userId: string,
logger: Logger,
): Promise<{
total_scans: number;
successful_lookups: number;
unique_products: number;
scans_today: number;
scans_this_week: number;
}> => {
logger.debug({ userId }, 'Fetching scan statistics');
return upcRepo.getUserScanStats(userId, logger);
};
/**
* Gets a single scan record by ID.
* @param scanId The scan ID
* @param userId The user ID (for authorization)
* @param logger Pino logger instance
* @returns The scan record
*/
export const getScanById = async (
scanId: number,
userId: string,
logger: Logger,
): Promise<UpcScanHistoryRecord> => {
logger.debug({ scanId, userId }, 'Fetching scan by ID');
return upcRepo.getScanById(scanId, userId, logger);
};

Some files were not shown because too many files have changed in this diff Show More