Compare commits
35 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
57215e2778 | ||
| 2c1de24e9a | |||
| c8baff7aac | |||
| de3f21a7ec | |||
|
|
c6adbf79e7 | ||
| 7399a27600 | |||
|
|
68aadcaa4e | ||
| 971d2c3fa7 | |||
|
|
daaacfde5e | ||
| 7ac8fe1d29 | |||
| a2462dfb6b | |||
|
|
a911224fb4 | ||
|
|
bf4bcef890 | ||
| ac6cd2e0a1 | |||
| eea03880c1 | |||
|
|
7fc263691f | ||
| c0912d36d5 | |||
| 612c2b5943 | |||
|
|
8e787ddcf0 | ||
| 11c52d284c | |||
|
|
b528bd3651 | ||
| 4c5ceb1bd6 | |||
| bcc4ad64dc | |||
|
|
d520980322 | ||
| d79955aaa0 | |||
| e66027dc8e | |||
|
|
027df989a4 | ||
| d4d69caaf7 | |||
| 03b5af39e1 | |||
|
|
8a86333f86 | ||
| f173f805ea | |||
| d3b0996ad5 | |||
|
|
b939262f0c | ||
| 9437f3d6c6 | |||
| f1e028d498 |
@@ -47,6 +47,19 @@ jobs:
|
||||
- name: Install Dependencies
|
||||
run: npm ci
|
||||
|
||||
- name: Bump Minor Version and Push
|
||||
run: |
|
||||
# Configure git for the commit.
|
||||
git config --global user.name 'Gitea Actions'
|
||||
git config --global user.email 'actions@gitea.projectium.com'
|
||||
|
||||
# Bump the minor version number. This creates a new commit and a new tag.
|
||||
# The commit message includes [skip ci] to prevent this push from triggering another workflow run.
|
||||
npm version minor -m "ci: Bump version to %s for production release [skip ci]"
|
||||
|
||||
# Push the new commit and the new tag back to the main branch.
|
||||
git push --follow-tags
|
||||
|
||||
- name: Check for Production Database Schema Changes
|
||||
env:
|
||||
DB_HOST: ${{ secrets.DB_HOST }}
|
||||
@@ -61,9 +74,10 @@ jobs:
|
||||
echo "--- Checking for production schema changes ---"
|
||||
CURRENT_HASH=$(cat sql/master_schema_rollup.sql | dos2unix | sha256sum | awk '{ print $1 }')
|
||||
echo "Current Git Schema Hash: $CURRENT_HASH"
|
||||
DEPLOYED_HASH=$(PGPASSWORD="$DB_PASSWORD" psql -v ON_ERROR_STOP=1 -h "$DB_HOST" -p 5432 -U "$DB_USER" -d "$DB_NAME" -c "SELECT schema_hash FROM public.schema_info WHERE environment = 'production';" -t -A || echo "none")
|
||||
# The psql command will now fail the step if the query errors (e.g., column missing), preventing deployment on a bad schema.
|
||||
DEPLOYED_HASH=$(PGPASSWORD="$DB_PASSWORD" psql -v ON_ERROR_STOP=1 -h "$DB_HOST" -p 5432 -U "$DB_USER" -d "$DB_NAME" -c "SELECT schema_hash FROM public.schema_info WHERE environment = 'production';" -t -A)
|
||||
echo "Deployed DB Schema Hash: $DEPLOYED_HASH"
|
||||
if [ "$DEPLOYED_HASH" = "none" ] || [ -z "$DEPLOYED_HASH" ]; then
|
||||
if [ -z "$DEPLOYED_HASH" ]; then
|
||||
echo "WARNING: No schema hash found in the production database. This is expected for a first-time deployment."
|
||||
elif [ "$CURRENT_HASH" != "$DEPLOYED_HASH" ]; then
|
||||
echo "ERROR: Database schema mismatch detected! A manual database migration is required."
|
||||
@@ -79,8 +93,9 @@ jobs:
|
||||
exit 1
|
||||
fi
|
||||
GITEA_SERVER_URL="https://gitea.projectium.com"
|
||||
COMMIT_MESSAGE=$(git log -1 --pretty=%s)
|
||||
VITE_APP_VERSION="$(date +'%Y%m%d-%H%M'):$(git rev-parse --short HEAD)" \
|
||||
COMMIT_MESSAGE=$(git log -1 --grep="\[skip ci\]" --invert-grep --pretty=%s)
|
||||
PACKAGE_VERSION=$(node -p "require('./package.json').version")
|
||||
VITE_APP_VERSION="$(date +'%Y%m%d-%H%M'):$(git rev-parse --short HEAD):$PACKAGE_VERSION" \
|
||||
VITE_APP_COMMIT_URL="$GITEA_SERVER_URL/${{ gitea.repository }}/commit/${{ gitea.sha }}" \
|
||||
VITE_APP_COMMIT_MESSAGE="$COMMIT_MESSAGE" \
|
||||
VITE_API_BASE_URL=/api VITE_API_KEY=${{ secrets.VITE_GOOGLE_GENAI_API_KEY }} npm run build
|
||||
@@ -148,7 +163,12 @@ jobs:
|
||||
echo "Updating schema hash in production database..."
|
||||
CURRENT_HASH=$(cat sql/master_schema_rollup.sql | dos2unix | sha256sum | awk '{ print $1 }')
|
||||
PGPASSWORD="$DB_PASSWORD" psql -v ON_ERROR_STOP=1 -h "$DB_HOST" -p 5432 -U "$DB_USER" -d "$DB_NAME" -c \
|
||||
"INSERT INTO public.schema_info (environment, schema_hash, deployed_at) VALUES ('production', '$CURRENT_HASH', NOW())
|
||||
"CREATE TABLE IF NOT EXISTS public.schema_info (
|
||||
environment VARCHAR(50) PRIMARY KEY,
|
||||
schema_hash VARCHAR(64) NOT NULL,
|
||||
deployed_at TIMESTAMP DEFAULT NOW()
|
||||
);
|
||||
INSERT INTO public.schema_info (environment, schema_hash, deployed_at) VALUES ('production', '$CURRENT_HASH', NOW())
|
||||
ON CONFLICT (environment) DO UPDATE SET schema_hash = EXCLUDED.schema_hash, deployed_at = NOW();"
|
||||
|
||||
UPDATED_HASH=$(PGPASSWORD="$DB_PASSWORD" psql -v ON_ERROR_STOP=1 -h "$DB_HOST" -p 5432 -U "$DB_USER" -d "$DB_NAME" -c "SELECT schema_hash FROM public.schema_info WHERE environment = 'production';" -t -A)
|
||||
|
||||
@@ -119,6 +119,11 @@ jobs:
|
||||
# --- JWT Secret for Passport authentication in tests ---
|
||||
JWT_SECRET: ${{ secrets.JWT_SECRET }}
|
||||
|
||||
# --- V8 Coverage for Server Process ---
|
||||
# This variable tells the Node.js process (our server, started by globalSetup)
|
||||
# where to output its raw V8 coverage data.
|
||||
NODE_V8_COVERAGE: '.coverage/tmp/integration-server'
|
||||
|
||||
# --- Increase Node.js memory limit to prevent heap out of memory errors ---
|
||||
# This is crucial for memory-intensive tasks like running tests and coverage.
|
||||
NODE_OPTIONS: '--max-old-space-size=8192'
|
||||
@@ -137,10 +142,15 @@ jobs:
|
||||
# The `|| true` ensures the workflow continues even if tests fail, allowing coverage to run.
|
||||
echo "--- Running Unit Tests ---"
|
||||
# npm run test:unit -- --coverage --reporter=verbose --includeTaskLocation --testTimeout=10000 --silent=passed-only || true
|
||||
npm run test:unit -- --coverage --reporter=verbose --includeTaskLocation --testTimeout=10000 --silent=passed-only --no-file-parallelism || true
|
||||
npm run test:unit -- --coverage --coverage.exclude='**/*.test.ts' --coverage.exclude='**/tests/**' --coverage.exclude='**/mocks/**' --reporter=verbose --includeTaskLocation --testTimeout=10000 --silent=passed-only --no-file-parallelism || true
|
||||
|
||||
echo "--- Running Integration Tests ---"
|
||||
npm run test:integration -- --coverage --reporter=verbose --includeTaskLocation --testTimeout=10000 --silent=passed-only || true
|
||||
npm run test:integration -- --coverage --coverage.exclude='**/*.test.ts' --coverage.exclude='**/tests/**' --coverage.exclude='**/mocks/**' --reporter=verbose --includeTaskLocation --testTimeout=10000 --silent=passed-only || true
|
||||
|
||||
echo "--- Running E2E Tests ---"
|
||||
# Run E2E tests using the dedicated E2E config which inherits from integration config.
|
||||
# We still pass --coverage to enable it, but directory and timeout are now in the config.
|
||||
npx vitest run --config vitest.config.e2e.ts --coverage --coverage.exclude='**/*.test.ts' --coverage.exclude='**/tests/**' --coverage.exclude='**/mocks/**' --reporter=verbose --no-file-parallelism || true
|
||||
|
||||
# Re-enable secret masking for subsequent steps.
|
||||
echo "::secret-masking::"
|
||||
@@ -156,6 +166,7 @@ jobs:
|
||||
echo "Checking for source coverage files..."
|
||||
ls -l .coverage/unit/coverage-final.json
|
||||
ls -l .coverage/integration/coverage-final.json
|
||||
ls -l .coverage/e2e/coverage-final.json || echo "E2E coverage file not found"
|
||||
|
||||
# --- V8 Coverage Processing for Backend Server ---
|
||||
# The integration tests start the server, which generates raw V8 coverage data.
|
||||
@@ -168,7 +179,7 @@ jobs:
|
||||
# Run c8: read raw files from the temp dir, and output an Istanbul JSON report.
|
||||
# We only generate the 'json' report here because it's all nyc needs for merging.
|
||||
echo "Server coverage report about to be generated..."
|
||||
npx c8 report --reporter=json --temp-directory .coverage/tmp/integration-server --reports-dir .coverage/integration-server
|
||||
npx c8 report --exclude='**/*.test.ts' --exclude='**/tests/**' --exclude='**/mocks/**' --reporter=json --temp-directory .coverage/tmp/integration-server --reports-dir .coverage/integration-server
|
||||
echo "Server coverage report generated. Verifying existence:"
|
||||
ls -l .coverage/integration-server/coverage-final.json
|
||||
|
||||
@@ -187,6 +198,7 @@ jobs:
|
||||
# We give them unique names to be safe, though it's not strictly necessary.
|
||||
cp .coverage/unit/coverage-final.json "$NYC_SOURCE_DIR/unit-coverage.json"
|
||||
cp .coverage/integration/coverage-final.json "$NYC_SOURCE_DIR/integration-coverage.json"
|
||||
cp .coverage/e2e/coverage-final.json "$NYC_SOURCE_DIR/e2e-coverage.json" || echo "E2E coverage file not found, skipping."
|
||||
# This file might not exist if integration tests fail early, so we add `|| true`
|
||||
cp .coverage/integration-server/coverage-final.json "$NYC_SOURCE_DIR/integration-server-coverage.json" || echo "Server coverage file not found, skipping."
|
||||
echo "Copied coverage files to source directory. Contents:"
|
||||
@@ -206,7 +218,10 @@ jobs:
|
||||
--reporter=text \
|
||||
--reporter=html \
|
||||
--report-dir .coverage/ \
|
||||
--temp-dir "$NYC_SOURCE_DIR"
|
||||
--temp-dir "$NYC_SOURCE_DIR" \
|
||||
--exclude "**/*.test.ts" \
|
||||
--exclude "**/tests/**" \
|
||||
--exclude "**/mocks/**"
|
||||
|
||||
# Re-enable secret masking for subsequent steps.
|
||||
echo "::secret-masking::"
|
||||
@@ -257,18 +272,19 @@ jobs:
|
||||
# We normalize line endings to ensure the hash is consistent across different OS environments.
|
||||
CURRENT_HASH=$(cat sql/master_schema_rollup.sql | dos2unix | sha256sum | awk '{ print $1 }')
|
||||
echo "Current Git Schema Hash: $CURRENT_HASH"
|
||||
|
||||
# Query the production database to get the hash of the deployed schema.
|
||||
# The `psql` command requires PGPASSWORD to be set.
|
||||
# `\t` sets tuples-only mode and `\A` unaligns output to get just the raw value.
|
||||
# The `|| echo "none"` ensures the command doesn't fail if the table or row doesn't exist yet.
|
||||
DEPLOYED_HASH=$(PGPASSWORD="$DB_PASSWORD" psql -v ON_ERROR_STOP=1 -h "$DB_HOST" -p 5432 -U "$DB_USER" -d "$DB_NAME" -c "SELECT schema_hash FROM public.schema_info WHERE environment = 'test';" -t -A || echo "none")
|
||||
# The psql command will now fail the step if the query errors (e.g., column missing), preventing deployment on a bad schema.
|
||||
DEPLOYED_HASH=$(PGPASSWORD="$DB_PASSWORD" psql -v ON_ERROR_STOP=1 -h "$DB_HOST" -p 5432 -U "$DB_USER" -d "$DB_NAME" -c "SELECT schema_hash FROM public.schema_info WHERE environment = 'test';" -t -A)
|
||||
echo "Deployed DB Schema Hash: $DEPLOYED_HASH"
|
||||
|
||||
# Check if the hash is "none" (command failed) OR if it's an empty string (table exists but is empty).
|
||||
if [ "$DEPLOYED_HASH" = "none" ] || [ -z "$DEPLOYED_HASH" ]; then
|
||||
if [ -z "$DEPLOYED_HASH" ]; then
|
||||
echo "WARNING: No schema hash found in the test database."
|
||||
echo "This is expected for a first-time deployment. The hash will be set after a successful deployment."
|
||||
echo "--- Debug: Dumping schema_info table ---"
|
||||
PGPASSWORD="$DB_PASSWORD" psql -v ON_ERROR_STOP=0 -h "$DB_HOST" -p 5432 -U "$DB_USER" -d "$DB_NAME" -c "SELECT * FROM public.schema_info;" || true
|
||||
echo "----------------------------------------"
|
||||
# We allow the deployment to continue, but a manual schema update is required.
|
||||
# You could choose to fail here by adding `exit 1`.
|
||||
elif [ "$CURRENT_HASH" != "$DEPLOYED_HASH" ]; then
|
||||
@@ -292,8 +308,9 @@ jobs:
|
||||
fi
|
||||
|
||||
GITEA_SERVER_URL="https://gitea.projectium.com" # Your Gitea instance URL
|
||||
COMMIT_MESSAGE=$(git log -1 --pretty=%s)
|
||||
VITE_APP_VERSION="$(date +'%Y%m%d-%H%M'):$(git rev-parse --short HEAD)" \
|
||||
COMMIT_MESSAGE=$(git log -1 --grep="\[skip ci\]" --invert-grep --pretty=%s)
|
||||
PACKAGE_VERSION=$(node -p "require('./package.json').version")
|
||||
VITE_APP_VERSION="$(date +'%Y%m%d-%H%M'):$(git rev-parse --short HEAD):$PACKAGE_VERSION" \
|
||||
VITE_APP_COMMIT_URL="$GITEA_SERVER_URL/${{ gitea.repository }}/commit/${{ gitea.sha }}" \
|
||||
VITE_APP_COMMIT_MESSAGE="$COMMIT_MESSAGE" \
|
||||
VITE_API_BASE_URL="https://flyer-crawler-test.projectium.com/api" VITE_API_KEY=${{ secrets.VITE_GOOGLE_GENAI_API_KEY_TEST }} npm run build
|
||||
@@ -355,7 +372,7 @@ jobs:
|
||||
|
||||
echo "Installing production dependencies and restarting test server..."
|
||||
cd /var/www/flyer-crawler-test.projectium.com
|
||||
npm install --omit=dev # Install only production dependencies
|
||||
npm install --omit=dev
|
||||
# Use `startOrReload` with the ecosystem file. This is the standard, idempotent way to deploy.
|
||||
# It will START the process if it's not running, or RELOAD it if it is.
|
||||
# We also add `&& pm2 save` to persist the process list across server reboots.
|
||||
@@ -367,7 +384,12 @@ jobs:
|
||||
echo "Updating schema hash in test database..."
|
||||
CURRENT_HASH=$(cat sql/master_schema_rollup.sql | dos2unix | sha256sum | awk '{ print $1 }')
|
||||
PGPASSWORD="$DB_PASSWORD" psql -v ON_ERROR_STOP=1 -h "$DB_HOST" -p 5432 -U "$DB_USER" -d "$DB_NAME" -c \
|
||||
"INSERT INTO public.schema_info (environment, schema_hash, deployed_at) VALUES ('test', '$CURRENT_HASH', NOW())
|
||||
"CREATE TABLE IF NOT EXISTS public.schema_info (
|
||||
environment VARCHAR(50) PRIMARY KEY,
|
||||
schema_hash VARCHAR(64) NOT NULL,
|
||||
deployed_at TIMESTAMP DEFAULT NOW()
|
||||
);
|
||||
INSERT INTO public.schema_info (environment, schema_hash, deployed_at) VALUES ('test', '$CURRENT_HASH', NOW())
|
||||
ON CONFLICT (environment) DO UPDATE SET schema_hash = EXCLUDED.schema_hash, deployed_at = NOW();"
|
||||
|
||||
# Verify the hash was updated
|
||||
|
||||
181
.gitea/workflows/manual-deploy-major.yml
Normal file
181
.gitea/workflows/manual-deploy-major.yml
Normal file
@@ -0,0 +1,181 @@
|
||||
# .gitea/workflows/manual-deploy-major.yml
|
||||
#
|
||||
# This workflow provides a MANUAL trigger to perform a MAJOR version bump
|
||||
# and deploy the application to the PRODUCTION environment.
|
||||
name: Manual - Deploy Major Version to Production
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
confirmation:
|
||||
description: 'Type "deploy-major-to-prod" to confirm you want to deploy a new major version.'
|
||||
required: true
|
||||
default: 'do-not-run'
|
||||
force_reload:
|
||||
description: 'Force PM2 reload even if version matches (true/false).'
|
||||
required: false
|
||||
type: boolean
|
||||
default: false
|
||||
|
||||
jobs:
|
||||
deploy-production-major:
|
||||
runs-on: projectium.com
|
||||
|
||||
steps:
|
||||
- name: Verify Confirmation Phrase
|
||||
run: |
|
||||
if [ "${{ gitea.event.inputs.confirmation }}" != "deploy-major-to-prod" ]; then
|
||||
echo "ERROR: Confirmation phrase did not match. Aborting deployment."
|
||||
exit 1
|
||||
fi
|
||||
echo "✅ Confirmation accepted. Proceeding with major version production deployment."
|
||||
|
||||
- name: Checkout Code from 'main' branch
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
ref: 'main' # Explicitly check out the main branch for production deployment
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: '20'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: '**/package-lock.json'
|
||||
|
||||
- name: Install Dependencies
|
||||
run: npm ci
|
||||
|
||||
- name: Bump Major Version and Push
|
||||
run: |
|
||||
# Configure git for the commit.
|
||||
git config --global user.name 'Gitea Actions'
|
||||
git config --global user.email 'actions@gitea.projectium.com'
|
||||
|
||||
# Bump the major version number. This creates a new commit and a new tag.
|
||||
# The commit message includes [skip ci] to prevent this push from triggering another workflow run.
|
||||
npm version major -m "ci: Bump version to %s for major release [skip ci]"
|
||||
|
||||
# Push the new commit and the new tag back to the main branch.
|
||||
git push --follow-tags
|
||||
|
||||
- name: Check for Production Database Schema Changes
|
||||
env:
|
||||
DB_HOST: ${{ secrets.DB_HOST }}
|
||||
DB_USER: ${{ secrets.DB_USER }}
|
||||
DB_PASSWORD: ${{ secrets.DB_PASSWORD }}
|
||||
DB_NAME: ${{ secrets.DB_DATABASE_PROD }}
|
||||
run: |
|
||||
if [ -z "$DB_HOST" ] || [ -z "$DB_USER" ] || [ -z "$DB_PASSWORD" ] || [ -z "$DB_NAME" ]; then
|
||||
echo "ERROR: One or more production database secrets (DB_HOST, DB_USER, DB_PASSWORD, DB_DATABASE_PROD) are not set."
|
||||
exit 1
|
||||
fi
|
||||
echo "--- Checking for production schema changes ---"
|
||||
CURRENT_HASH=$(cat sql/master_schema_rollup.sql | dos2unix | sha256sum | awk '{ print $1 }')
|
||||
echo "Current Git Schema Hash: $CURRENT_HASH"
|
||||
# The psql command will now fail the step if the query errors (e.g., column missing), preventing deployment on a bad schema.
|
||||
DEPLOYED_HASH=$(PGPASSWORD="$DB_PASSWORD" psql -v ON_ERROR_STOP=1 -h "$DB_HOST" -p 5432 -U "$DB_USER" -d "$DB_NAME" -c "SELECT schema_hash FROM public.schema_info WHERE environment = 'production';" -t -A)
|
||||
echo "Deployed DB Schema Hash: $DEPLOYED_HASH"
|
||||
if [ -z "$DEPLOYED_HASH" ]; then
|
||||
echo "WARNING: No schema hash found in the production database. This is expected for a first-time deployment."
|
||||
elif [ "$CURRENT_HASH" != "$DEPLOYED_HASH" ]; then
|
||||
echo "ERROR: Database schema mismatch detected! A manual database migration is required."
|
||||
exit 1
|
||||
else
|
||||
echo "✅ Schema is up to date. No changes detected."
|
||||
fi
|
||||
|
||||
- name: Build React Application for Production
|
||||
run: |
|
||||
if [ -z "${{ secrets.VITE_GOOGLE_GENAI_API_KEY }}" ]; then
|
||||
echo "ERROR: The VITE_GOOGLE_GENAI_API_KEY secret is not set."
|
||||
exit 1
|
||||
fi
|
||||
GITEA_SERVER_URL="https://gitea.projectium.com"
|
||||
COMMIT_MESSAGE=$(git log -1 --grep="\[skip ci\]" --invert-grep --pretty=%s)
|
||||
PACKAGE_VERSION=$(node -p "require('./package.json').version")
|
||||
VITE_APP_VERSION="$(date +'%Y%m%d-%H%M'):$(git rev-parse --short HEAD):$PACKAGE_VERSION" \
|
||||
VITE_APP_COMMIT_URL="$GITEA_SERVER_URL/${{ gitea.repository }}/commit/${{ gitea.sha }}" \
|
||||
VITE_APP_COMMIT_MESSAGE="$COMMIT_MESSAGE" \
|
||||
VITE_API_BASE_URL=/api VITE_API_KEY=${{ secrets.VITE_GOOGLE_GENAI_API_KEY }} npm run build
|
||||
|
||||
- name: Deploy Application to Production Server
|
||||
run: |
|
||||
echo "Deploying application files to /var/www/flyer-crawler.projectium.com..."
|
||||
APP_PATH="/var/www/flyer-crawler.projectium.com"
|
||||
mkdir -p "$APP_PATH"
|
||||
mkdir -p "$APP_PATH/flyer-images/icons" "$APP_PATH/flyer-images/archive"
|
||||
rsync -avz --delete --exclude 'node_modules' --exclude '.git' --exclude 'dist' --exclude 'flyer-images' ./ "$APP_PATH/"
|
||||
rsync -avz dist/ "$APP_PATH"
|
||||
echo "Application deployment complete."
|
||||
|
||||
- name: Install Backend Dependencies and Restart Production Server
|
||||
env:
|
||||
# --- Production Secrets Injection ---
|
||||
DB_HOST: ${{ secrets.DB_HOST }}
|
||||
DB_USER: ${{ secrets.DB_USER }}
|
||||
DB_PASSWORD: ${{ secrets.DB_PASSWORD }}
|
||||
DB_NAME: ${{ secrets.DB_DATABASE_PROD }}
|
||||
REDIS_URL: 'redis://localhost:6379'
|
||||
REDIS_PASSWORD: ${{ secrets.REDIS_PASSWORD_PROD }}
|
||||
FRONTEND_URL: 'https://flyer-crawler.projectium.com'
|
||||
JWT_SECRET: ${{ secrets.JWT_SECRET }}
|
||||
GEMINI_API_KEY: ${{ secrets.VITE_GOOGLE_GENAI_API_KEY }}
|
||||
GOOGLE_MAPS_API_KEY: ${{ secrets.GOOGLE_MAPS_API_KEY }}
|
||||
SMTP_HOST: 'localhost'
|
||||
SMTP_PORT: '1025'
|
||||
SMTP_SECURE: 'false'
|
||||
SMTP_USER: ''
|
||||
SMTP_PASS: ''
|
||||
SMTP_FROM_EMAIL: 'noreply@flyer-crawler.projectium.com'
|
||||
run: |
|
||||
if [ -z "$DB_HOST" ] || [ -z "$DB_USER" ] || [ -z "$DB_PASSWORD" ] || [ -z "$DB_NAME" ]; then
|
||||
echo "ERROR: One or more production database secrets (DB_HOST, DB_USER, DB_PASSWORD, DB_DATABASE_PROD) are not set."
|
||||
exit 1
|
||||
fi
|
||||
echo "Installing production dependencies and restarting server..."
|
||||
cd /var/www/flyer-crawler.projectium.com
|
||||
npm install --omit=dev
|
||||
|
||||
# --- Version Check Logic ---
|
||||
# Get the version from the newly deployed package.json
|
||||
NEW_VERSION=$(node -p "require('./package.json').version")
|
||||
echo "Deployed Package Version: $NEW_VERSION"
|
||||
|
||||
# Get the running version from PM2 for the main API process
|
||||
# We use a small node script to parse the JSON output from pm2 jlist
|
||||
RUNNING_VERSION=$(pm2 jlist | node -e "try { const list = JSON.parse(require('fs').readFileSync(0, 'utf-8')); const app = list.find(p => p.name === 'flyer-crawler-api'); console.log(app ? app.pm2_env.version : ''); } catch(e) { console.log(''); }")
|
||||
echo "Running PM2 Version: $RUNNING_VERSION"
|
||||
|
||||
if [ "${{ gitea.event.inputs.force_reload }}" == "true" ] || [ "$NEW_VERSION" != "$RUNNING_VERSION" ] || [ -z "$RUNNING_VERSION" ]; then
|
||||
if [ "${{ gitea.event.inputs.force_reload }}" == "true" ]; then
|
||||
echo "Force reload triggered by manual input. Reloading PM2..."
|
||||
else
|
||||
echo "Version mismatch (Running: $RUNNING_VERSION -> Deployed: $NEW_VERSION) or app not running. Reloading PM2..."
|
||||
fi
|
||||
pm2 startOrReload ecosystem.config.cjs --env production && pm2 save
|
||||
echo "Production backend server reloaded successfully."
|
||||
else
|
||||
echo "Version $NEW_VERSION is already running. Skipping PM2 reload."
|
||||
fi
|
||||
|
||||
echo "Updating schema hash in production database..."
|
||||
CURRENT_HASH=$(cat sql/master_schema_rollup.sql | dos2unix | sha256sum | awk '{ print $1 }')
|
||||
PGPASSWORD="$DB_PASSWORD" psql -v ON_ERROR_STOP=1 -h "$DB_HOST" -p 5432 -U "$DB_USER" -d "$DB_NAME" -c \
|
||||
"INSERT INTO public.schema_info (environment, schema_hash, deployed_at) VALUES ('production', '$CURRENT_HASH', NOW())
|
||||
ON CONFLICT (environment) DO UPDATE SET schema_hash = EXCLUDED.schema_hash, deployed_at = NOW();"
|
||||
|
||||
UPDATED_HASH=$(PGPASSWORD="$DB_PASSWORD" psql -v ON_ERROR_STOP=1 -h "$DB_HOST" -p 5432 -U "$DB_USER" -d "$DB_NAME" -c "SELECT schema_hash FROM public.schema_info WHERE environment = 'production';" -t -A)
|
||||
if [ "$CURRENT_HASH" = "$UPDATED_HASH" ]; then
|
||||
echo "✅ Schema hash successfully updated in the database to: $UPDATED_HASH"
|
||||
else
|
||||
echo "ERROR: Failed to update schema hash in the database."
|
||||
fi
|
||||
|
||||
- name: Show PM2 Environment for Production
|
||||
run: |
|
||||
echo "--- Displaying recent PM2 logs for flyer-crawler-api ---"
|
||||
sleep 5
|
||||
pm2 describe flyer-crawler-api || echo "Could not find production pm2 process."
|
||||
pm2 logs flyer-crawler-api --lines 20 --nostream || echo "Could not find production pm2 process."
|
||||
pm2 env flyer-crawler-api || echo "Could not find production pm2 process."
|
||||
@@ -18,12 +18,70 @@ module.exports = {
|
||||
NODE_ENV: 'production', // Set the Node.js environment to production
|
||||
name: 'flyer-crawler-api',
|
||||
cwd: '/var/www/flyer-crawler.projectium.com',
|
||||
// Inherit secrets from the deployment environment
|
||||
DB_HOST: process.env.DB_HOST,
|
||||
DB_USER: process.env.DB_USER,
|
||||
DB_PASSWORD: process.env.DB_PASSWORD,
|
||||
DB_NAME: process.env.DB_NAME,
|
||||
REDIS_URL: process.env.REDIS_URL,
|
||||
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
|
||||
FRONTEND_URL: process.env.FRONTEND_URL,
|
||||
JWT_SECRET: process.env.JWT_SECRET,
|
||||
GEMINI_API_KEY: process.env.GEMINI_API_KEY,
|
||||
GOOGLE_MAPS_API_KEY: process.env.GOOGLE_MAPS_API_KEY,
|
||||
SMTP_HOST: process.env.SMTP_HOST,
|
||||
SMTP_PORT: process.env.SMTP_PORT,
|
||||
SMTP_SECURE: process.env.SMTP_SECURE,
|
||||
SMTP_USER: process.env.SMTP_USER,
|
||||
SMTP_PASS: process.env.SMTP_PASS,
|
||||
SMTP_FROM_EMAIL: process.env.SMTP_FROM_EMAIL,
|
||||
},
|
||||
// Test Environment Settings
|
||||
env_test: {
|
||||
NODE_ENV: 'development', // Use 'development' for test to enable more verbose logging if needed
|
||||
NODE_ENV: 'test', // Set to 'test' to match the environment purpose and disable pino-pretty
|
||||
name: 'flyer-crawler-api-test',
|
||||
cwd: '/var/www/flyer-crawler-test.projectium.com',
|
||||
// Inherit secrets from the deployment environment
|
||||
DB_HOST: process.env.DB_HOST,
|
||||
DB_USER: process.env.DB_USER,
|
||||
DB_PASSWORD: process.env.DB_PASSWORD,
|
||||
DB_NAME: process.env.DB_NAME,
|
||||
REDIS_URL: process.env.REDIS_URL,
|
||||
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
|
||||
FRONTEND_URL: process.env.FRONTEND_URL,
|
||||
JWT_SECRET: process.env.JWT_SECRET,
|
||||
GEMINI_API_KEY: process.env.GEMINI_API_KEY,
|
||||
GOOGLE_MAPS_API_KEY: process.env.GOOGLE_MAPS_API_KEY,
|
||||
SMTP_HOST: process.env.SMTP_HOST,
|
||||
SMTP_PORT: process.env.SMTP_PORT,
|
||||
SMTP_SECURE: process.env.SMTP_SECURE,
|
||||
SMTP_USER: process.env.SMTP_USER,
|
||||
SMTP_PASS: process.env.SMTP_PASS,
|
||||
SMTP_FROM_EMAIL: process.env.SMTP_FROM_EMAIL,
|
||||
},
|
||||
// Development Environment Settings
|
||||
env_development: {
|
||||
NODE_ENV: 'development',
|
||||
name: 'flyer-crawler-api-dev',
|
||||
watch: true,
|
||||
ignore_watch: ['node_modules', 'logs', '*.log', 'flyer-images', '.git'],
|
||||
// Inherit secrets from the deployment environment
|
||||
DB_HOST: process.env.DB_HOST,
|
||||
DB_USER: process.env.DB_USER,
|
||||
DB_PASSWORD: process.env.DB_PASSWORD,
|
||||
DB_NAME: process.env.DB_NAME,
|
||||
REDIS_URL: process.env.REDIS_URL,
|
||||
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
|
||||
FRONTEND_URL: process.env.FRONTEND_URL,
|
||||
JWT_SECRET: process.env.JWT_SECRET,
|
||||
GEMINI_API_KEY: process.env.GEMINI_API_KEY,
|
||||
GOOGLE_MAPS_API_KEY: process.env.GOOGLE_MAPS_API_KEY,
|
||||
SMTP_HOST: process.env.SMTP_HOST,
|
||||
SMTP_PORT: process.env.SMTP_PORT,
|
||||
SMTP_SECURE: process.env.SMTP_SECURE,
|
||||
SMTP_USER: process.env.SMTP_USER,
|
||||
SMTP_PASS: process.env.SMTP_PASS,
|
||||
SMTP_FROM_EMAIL: process.env.SMTP_FROM_EMAIL,
|
||||
},
|
||||
},
|
||||
{
|
||||
@@ -36,12 +94,70 @@ module.exports = {
|
||||
NODE_ENV: 'production',
|
||||
name: 'flyer-crawler-worker',
|
||||
cwd: '/var/www/flyer-crawler.projectium.com',
|
||||
// Inherit secrets from the deployment environment
|
||||
DB_HOST: process.env.DB_HOST,
|
||||
DB_USER: process.env.DB_USER,
|
||||
DB_PASSWORD: process.env.DB_PASSWORD,
|
||||
DB_NAME: process.env.DB_NAME,
|
||||
REDIS_URL: process.env.REDIS_URL,
|
||||
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
|
||||
FRONTEND_URL: process.env.FRONTEND_URL,
|
||||
JWT_SECRET: process.env.JWT_SECRET,
|
||||
GEMINI_API_KEY: process.env.GEMINI_API_KEY,
|
||||
GOOGLE_MAPS_API_KEY: process.env.GOOGLE_MAPS_API_KEY,
|
||||
SMTP_HOST: process.env.SMTP_HOST,
|
||||
SMTP_PORT: process.env.SMTP_PORT,
|
||||
SMTP_SECURE: process.env.SMTP_SECURE,
|
||||
SMTP_USER: process.env.SMTP_USER,
|
||||
SMTP_PASS: process.env.SMTP_PASS,
|
||||
SMTP_FROM_EMAIL: process.env.SMTP_FROM_EMAIL,
|
||||
},
|
||||
// Test Environment Settings
|
||||
env_test: {
|
||||
NODE_ENV: 'development',
|
||||
NODE_ENV: 'test',
|
||||
name: 'flyer-crawler-worker-test',
|
||||
cwd: '/var/www/flyer-crawler-test.projectium.com',
|
||||
// Inherit secrets from the deployment environment
|
||||
DB_HOST: process.env.DB_HOST,
|
||||
DB_USER: process.env.DB_USER,
|
||||
DB_PASSWORD: process.env.DB_PASSWORD,
|
||||
DB_NAME: process.env.DB_NAME,
|
||||
REDIS_URL: process.env.REDIS_URL,
|
||||
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
|
||||
FRONTEND_URL: process.env.FRONTEND_URL,
|
||||
JWT_SECRET: process.env.JWT_SECRET,
|
||||
GEMINI_API_KEY: process.env.GEMINI_API_KEY,
|
||||
GOOGLE_MAPS_API_KEY: process.env.GOOGLE_MAPS_API_KEY,
|
||||
SMTP_HOST: process.env.SMTP_HOST,
|
||||
SMTP_PORT: process.env.SMTP_PORT,
|
||||
SMTP_SECURE: process.env.SMTP_SECURE,
|
||||
SMTP_USER: process.env.SMTP_USER,
|
||||
SMTP_PASS: process.env.SMTP_PASS,
|
||||
SMTP_FROM_EMAIL: process.env.SMTP_FROM_EMAIL,
|
||||
},
|
||||
// Development Environment Settings
|
||||
env_development: {
|
||||
NODE_ENV: 'development',
|
||||
name: 'flyer-crawler-worker-dev',
|
||||
watch: true,
|
||||
ignore_watch: ['node_modules', 'logs', '*.log', 'flyer-images', '.git'],
|
||||
// Inherit secrets from the deployment environment
|
||||
DB_HOST: process.env.DB_HOST,
|
||||
DB_USER: process.env.DB_USER,
|
||||
DB_PASSWORD: process.env.DB_PASSWORD,
|
||||
DB_NAME: process.env.DB_NAME,
|
||||
REDIS_URL: process.env.REDIS_URL,
|
||||
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
|
||||
FRONTEND_URL: process.env.FRONTEND_URL,
|
||||
JWT_SECRET: process.env.JWT_SECRET,
|
||||
GEMINI_API_KEY: process.env.GEMINI_API_KEY,
|
||||
GOOGLE_MAPS_API_KEY: process.env.GOOGLE_MAPS_API_KEY,
|
||||
SMTP_HOST: process.env.SMTP_HOST,
|
||||
SMTP_PORT: process.env.SMTP_PORT,
|
||||
SMTP_SECURE: process.env.SMTP_SECURE,
|
||||
SMTP_USER: process.env.SMTP_USER,
|
||||
SMTP_PASS: process.env.SMTP_PASS,
|
||||
SMTP_FROM_EMAIL: process.env.SMTP_FROM_EMAIL,
|
||||
},
|
||||
},
|
||||
{
|
||||
@@ -54,12 +170,70 @@ module.exports = {
|
||||
NODE_ENV: 'production',
|
||||
name: 'flyer-crawler-analytics-worker',
|
||||
cwd: '/var/www/flyer-crawler.projectium.com',
|
||||
// Inherit secrets from the deployment environment
|
||||
DB_HOST: process.env.DB_HOST,
|
||||
DB_USER: process.env.DB_USER,
|
||||
DB_PASSWORD: process.env.DB_PASSWORD,
|
||||
DB_NAME: process.env.DB_NAME,
|
||||
REDIS_URL: process.env.REDIS_URL,
|
||||
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
|
||||
FRONTEND_URL: process.env.FRONTEND_URL,
|
||||
JWT_SECRET: process.env.JWT_SECRET,
|
||||
GEMINI_API_KEY: process.env.GEMINI_API_KEY,
|
||||
GOOGLE_MAPS_API_KEY: process.env.GOOGLE_MAPS_API_KEY,
|
||||
SMTP_HOST: process.env.SMTP_HOST,
|
||||
SMTP_PORT: process.env.SMTP_PORT,
|
||||
SMTP_SECURE: process.env.SMTP_SECURE,
|
||||
SMTP_USER: process.env.SMTP_USER,
|
||||
SMTP_PASS: process.env.SMTP_PASS,
|
||||
SMTP_FROM_EMAIL: process.env.SMTP_FROM_EMAIL,
|
||||
},
|
||||
// Test Environment Settings
|
||||
env_test: {
|
||||
NODE_ENV: 'development',
|
||||
NODE_ENV: 'test',
|
||||
name: 'flyer-crawler-analytics-worker-test',
|
||||
cwd: '/var/www/flyer-crawler-test.projectium.com',
|
||||
// Inherit secrets from the deployment environment
|
||||
DB_HOST: process.env.DB_HOST,
|
||||
DB_USER: process.env.DB_USER,
|
||||
DB_PASSWORD: process.env.DB_PASSWORD,
|
||||
DB_NAME: process.env.DB_NAME,
|
||||
REDIS_URL: process.env.REDIS_URL,
|
||||
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
|
||||
FRONTEND_URL: process.env.FRONTEND_URL,
|
||||
JWT_SECRET: process.env.JWT_SECRET,
|
||||
GEMINI_API_KEY: process.env.GEMINI_API_KEY,
|
||||
GOOGLE_MAPS_API_KEY: process.env.GOOGLE_MAPS_API_KEY,
|
||||
SMTP_HOST: process.env.SMTP_HOST,
|
||||
SMTP_PORT: process.env.SMTP_PORT,
|
||||
SMTP_SECURE: process.env.SMTP_SECURE,
|
||||
SMTP_USER: process.env.SMTP_USER,
|
||||
SMTP_PASS: process.env.SMTP_PASS,
|
||||
SMTP_FROM_EMAIL: process.env.SMTP_FROM_EMAIL,
|
||||
},
|
||||
// Development Environment Settings
|
||||
env_development: {
|
||||
NODE_ENV: 'development',
|
||||
name: 'flyer-crawler-analytics-worker-dev',
|
||||
watch: true,
|
||||
ignore_watch: ['node_modules', 'logs', '*.log', 'flyer-images', '.git'],
|
||||
// Inherit secrets from the deployment environment
|
||||
DB_HOST: process.env.DB_HOST,
|
||||
DB_USER: process.env.DB_USER,
|
||||
DB_PASSWORD: process.env.DB_PASSWORD,
|
||||
DB_NAME: process.env.DB_NAME,
|
||||
REDIS_URL: process.env.REDIS_URL,
|
||||
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
|
||||
FRONTEND_URL: process.env.FRONTEND_URL,
|
||||
JWT_SECRET: process.env.JWT_SECRET,
|
||||
GEMINI_API_KEY: process.env.GEMINI_API_KEY,
|
||||
GOOGLE_MAPS_API_KEY: process.env.GOOGLE_MAPS_API_KEY,
|
||||
SMTP_HOST: process.env.SMTP_HOST,
|
||||
SMTP_PORT: process.env.SMTP_PORT,
|
||||
SMTP_SECURE: process.env.SMTP_SECURE,
|
||||
SMTP_USER: process.env.SMTP_USER,
|
||||
SMTP_PASS: process.env.SMTP_PASS,
|
||||
SMTP_FROM_EMAIL: process.env.SMTP_FROM_EMAIL,
|
||||
},
|
||||
},
|
||||
],
|
||||
|
||||
6
package-lock.json
generated
6
package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "flyer-crawler",
|
||||
"version": "0.0.22",
|
||||
"version": "0.1.4",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "flyer-crawler",
|
||||
"version": "0.0.22",
|
||||
"version": "0.1.4",
|
||||
"dependencies": {
|
||||
"@bull-board/api": "^6.14.2",
|
||||
"@bull-board/express": "^6.14.2",
|
||||
@@ -42,7 +42,7 @@
|
||||
"recharts": "^3.4.1",
|
||||
"sharp": "^0.34.5",
|
||||
"tsx": "^4.20.6",
|
||||
"zod": "^4.1.13",
|
||||
"zod": "^4.2.1",
|
||||
"zxcvbn": "^4.4.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "flyer-crawler",
|
||||
"private": true,
|
||||
"version": "0.0.22",
|
||||
"version": "0.1.4",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "concurrently \"npm:start:dev\" \"vite\"",
|
||||
@@ -61,7 +61,7 @@
|
||||
"recharts": "^3.4.1",
|
||||
"sharp": "^0.34.5",
|
||||
"tsx": "^4.20.6",
|
||||
"zod": "^4.1.13",
|
||||
"zod": "^4.2.1",
|
||||
"zxcvbn": "^4.4.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
|
||||
@@ -36,7 +36,7 @@ vi.mock('pdfjs-dist', () => ({
|
||||
// Mock the new config module
|
||||
vi.mock('./config', () => ({
|
||||
default: {
|
||||
app: { version: '1.0.0', commitMessage: 'Initial commit', commitUrl: '#' },
|
||||
app: { version: '20250101-1200:abc1234:1.0.0', commitMessage: 'Initial commit', commitUrl: '#' },
|
||||
google: { mapsEmbedApiKey: 'mock-key' },
|
||||
},
|
||||
}));
|
||||
@@ -588,11 +588,11 @@ describe('App Component', () => {
|
||||
// Mock the config module for this specific test
|
||||
vi.mock('./config', () => ({
|
||||
default: {
|
||||
app: { version: '1.0.1', commitMessage: 'New feature!', commitUrl: '#' },
|
||||
app: { version: '20250101-1200:abc1234:1.0.1', commitMessage: 'New feature!', commitUrl: '#' },
|
||||
google: { mapsEmbedApiKey: 'mock-key' },
|
||||
},
|
||||
}));
|
||||
localStorageMock.setItem('lastSeenVersion', '1.0.0');
|
||||
localStorageMock.setItem('lastSeenVersion', '20250101-1200:abc1234:1.0.0');
|
||||
renderApp();
|
||||
await expect(screen.findByTestId('whats-new-modal-mock')).resolves.toBeInTheDocument();
|
||||
});
|
||||
@@ -741,7 +741,7 @@ describe('App Component', () => {
|
||||
vi.mock('./config', () => ({
|
||||
default: {
|
||||
app: {
|
||||
version: '2.0.0',
|
||||
version: '20250101-1200:abc1234:2.0.0',
|
||||
commitMessage: 'A new version!',
|
||||
commitUrl: 'http://example.com/commit/2.0.0',
|
||||
},
|
||||
@@ -752,14 +752,14 @@ describe('App Component', () => {
|
||||
|
||||
it('should display the version number and commit link', () => {
|
||||
renderApp();
|
||||
const versionLink = screen.getByText(`Version: 2.0.0`);
|
||||
const versionLink = screen.getByText(`Version: 20250101-1200:abc1234:2.0.0`);
|
||||
expect(versionLink).toBeInTheDocument();
|
||||
expect(versionLink).toHaveAttribute('href', 'http://example.com/commit/2.0.0');
|
||||
});
|
||||
|
||||
it('should open the "What\'s New" modal when the question mark icon is clicked', async () => {
|
||||
// Pre-set the localStorage to prevent the modal from opening automatically
|
||||
localStorageMock.setItem('lastSeenVersion', '2.0.0');
|
||||
localStorageMock.setItem('lastSeenVersion', '20250101-1200:abc1234:2.0.0');
|
||||
|
||||
renderApp();
|
||||
expect(screen.queryByTestId('whats-new-modal-mock')).not.toBeInTheDocument();
|
||||
|
||||
@@ -44,7 +44,7 @@ export const FlyerCorrectionTool: React.FC<FlyerCorrectionToolProps> = ({
|
||||
})
|
||||
.catch((err) => {
|
||||
console.error('[DEBUG] FlyerCorrectionTool: Failed to fetch image.', { err });
|
||||
logger.error('Failed to fetch image for correction tool', { error: err });
|
||||
logger.error({ error: err }, 'Failed to fetch image for correction tool');
|
||||
notifyError('Could not load the image for correction.');
|
||||
});
|
||||
}
|
||||
@@ -164,7 +164,7 @@ export const FlyerCorrectionTool: React.FC<FlyerCorrectionToolProps> = ({
|
||||
const msg = err instanceof Error ? err.message : 'An unknown error occurred.';
|
||||
console.error('[DEBUG] handleRescan: Caught an error.', { error: err });
|
||||
notifyError(msg);
|
||||
logger.error('Error during rescan:', { error: err });
|
||||
logger.error({ error: err }, 'Error during rescan:');
|
||||
} finally {
|
||||
console.debug('[DEBUG] handleRescan: Finished. Setting isProcessing=false.');
|
||||
setIsProcessing(false);
|
||||
|
||||
@@ -73,12 +73,11 @@ describe('FlyerUploader', () => {
|
||||
|
||||
it('should handle file upload and start polling', async () => {
|
||||
console.log('--- [TEST LOG] ---: 1. Setting up mocks for upload and polling.');
|
||||
mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue(
|
||||
new Response(JSON.stringify({ jobId: 'job-123' }), { status: 200 }),
|
||||
);
|
||||
mockedAiApiClient.getJobStatus.mockResolvedValue(
|
||||
new Response(JSON.stringify({ state: 'active', progress: { message: 'Checking...' } })),
|
||||
);
|
||||
mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue({ jobId: 'job-123' });
|
||||
mockedAiApiClient.getJobStatus.mockResolvedValue({
|
||||
state: 'active',
|
||||
progress: { message: 'Checking...' },
|
||||
});
|
||||
|
||||
console.log('--- [TEST LOG] ---: 2. Rendering component and preparing file.');
|
||||
renderComponent();
|
||||
@@ -131,12 +130,11 @@ describe('FlyerUploader', () => {
|
||||
|
||||
it('should handle file upload via drag and drop', async () => {
|
||||
console.log('--- [TEST LOG] ---: 1. Setting up mocks for drag and drop.');
|
||||
mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue(
|
||||
new Response(JSON.stringify({ jobId: 'job-dnd' }), { status: 200 }),
|
||||
);
|
||||
mockedAiApiClient.getJobStatus.mockResolvedValue(
|
||||
new Response(JSON.stringify({ state: 'active', progress: { message: 'Dropped...' } })),
|
||||
);
|
||||
mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue({ jobId: 'job-dnd' });
|
||||
mockedAiApiClient.getJobStatus.mockResolvedValue({
|
||||
state: 'active',
|
||||
progress: { message: 'Dropped...' },
|
||||
});
|
||||
|
||||
console.log('--- [TEST LOG] ---: 2. Rendering component and preparing file for drop.');
|
||||
renderComponent();
|
||||
@@ -159,16 +157,10 @@ describe('FlyerUploader', () => {
|
||||
it('should poll for status, complete successfully, and redirect', async () => {
|
||||
const onProcessingComplete = vi.fn();
|
||||
console.log('--- [TEST LOG] ---: 1. Setting up mock sequence for polling.');
|
||||
mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue(
|
||||
new Response(JSON.stringify({ jobId: 'job-123' }), { status: 200 }),
|
||||
);
|
||||
mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue({ jobId: 'job-123' });
|
||||
mockedAiApiClient.getJobStatus
|
||||
.mockResolvedValueOnce(
|
||||
new Response(JSON.stringify({ state: 'active', progress: { message: 'Analyzing...' } })),
|
||||
)
|
||||
.mockResolvedValueOnce(
|
||||
new Response(JSON.stringify({ state: 'completed', returnValue: { flyerId: 42 } })),
|
||||
);
|
||||
.mockResolvedValueOnce({ state: 'active', progress: { message: 'Analyzing...' } })
|
||||
.mockResolvedValueOnce({ state: 'completed', returnValue: { flyerId: 42 } });
|
||||
|
||||
console.log('--- [TEST LOG] ---: 2. Rendering component and uploading file.');
|
||||
renderComponent(onProcessingComplete);
|
||||
@@ -229,12 +221,11 @@ describe('FlyerUploader', () => {
|
||||
|
||||
it('should handle a failed job', async () => {
|
||||
console.log('--- [TEST LOG] ---: 1. Setting up mocks for a failed job.');
|
||||
mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue(
|
||||
new Response(JSON.stringify({ jobId: 'job-fail' }), { status: 200 }),
|
||||
);
|
||||
mockedAiApiClient.getJobStatus.mockResolvedValue(
|
||||
new Response(JSON.stringify({ state: 'failed', failedReason: 'AI model exploded' })),
|
||||
);
|
||||
mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue({ jobId: 'job-fail' });
|
||||
mockedAiApiClient.getJobStatus.mockResolvedValue({
|
||||
state: 'failed',
|
||||
failedReason: 'AI model exploded',
|
||||
});
|
||||
|
||||
console.log('--- [TEST LOG] ---: 2. Rendering and uploading.');
|
||||
renderComponent();
|
||||
@@ -260,11 +251,82 @@ describe('FlyerUploader', () => {
|
||||
console.log('--- [TEST LOG] ---: 6. "Upload Another" button confirmed.');
|
||||
});
|
||||
|
||||
it('should clear the polling timeout when a job fails', async () => {
|
||||
const clearTimeoutSpy = vi.spyOn(global, 'clearTimeout');
|
||||
console.log('--- [TEST LOG] ---: 1. Setting up mocks for failed job timeout clearance.');
|
||||
mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue({ jobId: 'job-fail-timeout' });
|
||||
|
||||
// We need at least one 'active' response to establish a timeout loop so we have something to clear
|
||||
mockedAiApiClient.getJobStatus
|
||||
.mockResolvedValueOnce({ state: 'active', progress: { message: 'Working...' } })
|
||||
.mockResolvedValueOnce({ state: 'failed', failedReason: 'Fatal Error' });
|
||||
|
||||
renderComponent();
|
||||
const file = new File(['content'], 'flyer.pdf', { type: 'application/pdf' });
|
||||
const input = screen.getByLabelText(/click to select a file/i);
|
||||
|
||||
fireEvent.change(input, { target: { files: [file] } });
|
||||
|
||||
// Wait for the first poll to complete and UI to update to "Working..."
|
||||
await screen.findByText('Working...');
|
||||
|
||||
// Advance time to trigger the second poll
|
||||
await act(async () => {
|
||||
vi.advanceTimersByTime(3000);
|
||||
});
|
||||
|
||||
// Wait for the failure UI
|
||||
await screen.findByText(/Processing failed: Fatal Error/i);
|
||||
|
||||
// Verify clearTimeout was called
|
||||
expect(clearTimeoutSpy).toHaveBeenCalled();
|
||||
|
||||
// Verify no further polling occurs
|
||||
const callsBefore = mockedAiApiClient.getJobStatus.mock.calls.length;
|
||||
await act(async () => {
|
||||
vi.advanceTimersByTime(10000);
|
||||
});
|
||||
expect(mockedAiApiClient.getJobStatus).toHaveBeenCalledTimes(callsBefore);
|
||||
|
||||
clearTimeoutSpy.mockRestore();
|
||||
});
|
||||
|
||||
it('should clear the polling timeout when the component unmounts', async () => {
|
||||
const clearTimeoutSpy = vi.spyOn(global, 'clearTimeout');
|
||||
console.log('--- [TEST LOG] ---: 1. Setting up mocks for unmount timeout clearance.');
|
||||
mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue({ jobId: 'job-unmount' });
|
||||
mockedAiApiClient.getJobStatus.mockResolvedValue({
|
||||
state: 'active',
|
||||
progress: { message: 'Polling...' },
|
||||
});
|
||||
|
||||
const { unmount } = renderComponent();
|
||||
const file = new File(['content'], 'flyer.pdf', { type: 'application/pdf' });
|
||||
const input = screen.getByLabelText(/click to select a file/i);
|
||||
|
||||
fireEvent.change(input, { target: { files: [file] } });
|
||||
|
||||
// Wait for the first poll to complete and the UI to show the polling state
|
||||
await screen.findByText('Polling...');
|
||||
|
||||
// Now that we are in a polling state (and a timeout is set), unmount the component
|
||||
console.log('--- [TEST LOG] ---: 2. Unmounting component to trigger cleanup effect.');
|
||||
unmount();
|
||||
|
||||
// Verify that the cleanup function in the useEffect hook was called
|
||||
expect(clearTimeoutSpy).toHaveBeenCalled();
|
||||
console.log('--- [TEST LOG] ---: 3. clearTimeout confirmed.');
|
||||
|
||||
clearTimeoutSpy.mockRestore();
|
||||
});
|
||||
|
||||
it('should handle a duplicate flyer error (409)', async () => {
|
||||
console.log('--- [TEST LOG] ---: 1. Setting up mock for 409 duplicate error.');
|
||||
mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue(
|
||||
new Response(JSON.stringify({ flyerId: 99, message: 'Duplicate' }), { status: 409 }),
|
||||
);
|
||||
// The API client now throws a structured error for non-2xx responses.
|
||||
mockedAiApiClient.uploadAndProcessFlyer.mockRejectedValue({
|
||||
status: 409,
|
||||
body: { flyerId: 99, message: 'Duplicate' },
|
||||
});
|
||||
|
||||
console.log('--- [TEST LOG] ---: 2. Rendering and uploading.');
|
||||
renderComponent();
|
||||
@@ -295,12 +357,11 @@ describe('FlyerUploader', () => {
|
||||
|
||||
it('should allow the user to stop watching progress', async () => {
|
||||
console.log('--- [TEST LOG] ---: 1. Setting up mocks for infinite polling.');
|
||||
mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue(
|
||||
new Response(JSON.stringify({ jobId: 'job-stop' }), { status: 200 }),
|
||||
);
|
||||
mockedAiApiClient.getJobStatus.mockResolvedValue(
|
||||
new Response(JSON.stringify({ state: 'active', progress: { message: 'Analyzing...' } })),
|
||||
);
|
||||
mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue({ jobId: 'job-stop' });
|
||||
mockedAiApiClient.getJobStatus.mockResolvedValue({
|
||||
state: 'active',
|
||||
progress: { message: 'Analyzing...' },
|
||||
} as any);
|
||||
|
||||
console.log('--- [TEST LOG] ---: 2. Rendering and uploading.');
|
||||
renderComponent();
|
||||
@@ -362,9 +423,11 @@ describe('FlyerUploader', () => {
|
||||
|
||||
it('should handle a generic network error during upload', async () => {
|
||||
console.log('--- [TEST LOG] ---: 1. Setting up mock for generic upload error.');
|
||||
mockedAiApiClient.uploadAndProcessFlyer.mockRejectedValue(
|
||||
new Error('Network Error During Upload'),
|
||||
);
|
||||
// Simulate a structured error from the API client
|
||||
mockedAiApiClient.uploadAndProcessFlyer.mockRejectedValue({
|
||||
status: 500,
|
||||
body: { message: 'Network Error During Upload' },
|
||||
});
|
||||
renderComponent();
|
||||
const file = new File(['content'], 'flyer.pdf', { type: 'application/pdf' });
|
||||
const input = screen.getByLabelText(/click to select a file/i);
|
||||
@@ -379,9 +442,7 @@ describe('FlyerUploader', () => {
|
||||
|
||||
it('should handle a generic network error during polling', async () => {
|
||||
console.log('--- [TEST LOG] ---: 1. Setting up mock for polling error.');
|
||||
mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue(
|
||||
new Response(JSON.stringify({ jobId: 'job-poll-fail' }), { status: 200 }),
|
||||
);
|
||||
mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue({ jobId: 'job-poll-fail' });
|
||||
mockedAiApiClient.getJobStatus.mockRejectedValue(new Error('Polling Network Error'));
|
||||
|
||||
renderComponent();
|
||||
@@ -398,11 +459,9 @@ describe('FlyerUploader', () => {
|
||||
|
||||
it('should handle a completed job with a missing flyerId', async () => {
|
||||
console.log('--- [TEST LOG] ---: 1. Setting up mock for malformed completion payload.');
|
||||
mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue(
|
||||
new Response(JSON.stringify({ jobId: 'job-no-flyerid' }), { status: 200 }),
|
||||
);
|
||||
mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue({ jobId: 'job-no-flyerid' });
|
||||
mockedAiApiClient.getJobStatus.mockResolvedValue(
|
||||
new Response(JSON.stringify({ state: 'completed', returnValue: {} })), // No flyerId
|
||||
{ state: 'completed', returnValue: {} }, // No flyerId
|
||||
);
|
||||
|
||||
renderComponent();
|
||||
@@ -419,6 +478,27 @@ describe('FlyerUploader', () => {
|
||||
console.log('--- [TEST LOG] ---: 4. Assertions passed.');
|
||||
});
|
||||
|
||||
it('should handle a non-JSON response during polling', async () => {
|
||||
console.log('--- [TEST LOG] ---: 1. Setting up mock for non-JSON response.');
|
||||
// The actual function would throw, so we mock the rejection.
|
||||
// The new getJobStatus would throw an error like "Failed to parse JSON..."
|
||||
mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue({ jobId: 'job-bad-json' });
|
||||
mockedAiApiClient.getJobStatus.mockRejectedValue(
|
||||
new Error('Failed to parse JSON response from server. Body: <html>502 Bad Gateway</html>'),
|
||||
);
|
||||
|
||||
renderComponent();
|
||||
const file = new File(['content'], 'flyer.pdf', { type: 'application/pdf' });
|
||||
const input = screen.getByLabelText(/click to select a file/i);
|
||||
|
||||
console.log('--- [TEST LOG] ---: 2. Firing file change event.');
|
||||
fireEvent.change(input, { target: { files: [file] } });
|
||||
|
||||
console.log('--- [TEST LOG] ---: 3. Awaiting error message.');
|
||||
expect(await screen.findByText(/Failed to parse JSON response from server/i)).toBeInTheDocument();
|
||||
console.log('--- [TEST LOG] ---: 4. Assertions passed.');
|
||||
});
|
||||
|
||||
it('should do nothing if the file input is cancelled', () => {
|
||||
renderComponent();
|
||||
const input = screen.getByLabelText(/click to select a file/i);
|
||||
|
||||
@@ -60,14 +60,8 @@ export const FlyerUploader: React.FC<FlyerUploaderProps> = ({ onProcessingComple
|
||||
const pollStatus = async () => {
|
||||
console.debug(`[DEBUG] pollStatus(): Polling for jobId: ${jobId}`);
|
||||
try {
|
||||
const statusResponse = await getJobStatus(jobId);
|
||||
console.debug(`[DEBUG] pollStatus(): API response status: ${statusResponse.status}`);
|
||||
if (!statusResponse.ok) {
|
||||
throw new Error(`Failed to get job status (HTTP ${statusResponse.status})`);
|
||||
}
|
||||
|
||||
const job = await statusResponse.json();
|
||||
console.debug('[DEBUG] pollStatus(): Job status received:', job);
|
||||
const job = await getJobStatus(jobId); // Now returns parsed JSON directly
|
||||
console.debug('[DEBUG] pollStatus(): Job status received:', job); // The rest of the logic remains the same
|
||||
|
||||
if (job.progress) {
|
||||
setProcessingStages(job.progress.stages || []);
|
||||
@@ -97,7 +91,13 @@ export const FlyerUploader: React.FC<FlyerUploaderProps> = ({ onProcessingComple
|
||||
console.debug(
|
||||
`[DEBUG] pollStatus(): Job state is "failed". Reason: ${job.failedReason}`,
|
||||
);
|
||||
// Explicitly clear any pending timeout to stop the polling loop immediately.
|
||||
if (pollingTimeoutRef.current) {
|
||||
clearTimeout(pollingTimeoutRef.current);
|
||||
}
|
||||
setErrorMessage(`Processing failed: ${job.failedReason || 'Unknown error'}`);
|
||||
// Clear any stale "in-progress" messages to avoid user confusion.
|
||||
setStatusMessage(null);
|
||||
setProcessingState('error');
|
||||
break;
|
||||
|
||||
@@ -112,7 +112,7 @@ export const FlyerUploader: React.FC<FlyerUploaderProps> = ({ onProcessingComple
|
||||
break;
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Error during polling:', { error });
|
||||
logger.error({ error }, 'Error during polling:');
|
||||
setErrorMessage(
|
||||
error instanceof Error ? error.message : 'An unexpected error occurred during polling.',
|
||||
);
|
||||
@@ -150,29 +150,24 @@ export const FlyerUploader: React.FC<FlyerUploaderProps> = ({ onProcessingComple
|
||||
`[DEBUG] processFile(): Checksum generated: ${checksum}. Calling uploadAndProcessFlyer.`,
|
||||
);
|
||||
|
||||
const startResponse = await uploadAndProcessFlyer(file, checksum);
|
||||
console.debug(`[DEBUG] processFile(): Upload response status: ${startResponse.status}`);
|
||||
|
||||
if (!startResponse.ok) {
|
||||
const errorData = await startResponse.json();
|
||||
console.debug('[DEBUG] processFile(): Upload failed. Error data:', errorData);
|
||||
if (startResponse.status === 409 && errorData.flyerId) {
|
||||
setErrorMessage(`This flyer has already been processed. You can view it here:`);
|
||||
setDuplicateFlyerId(errorData.flyerId);
|
||||
} else {
|
||||
setErrorMessage(errorData.message || `Upload failed with status ${startResponse.status}`);
|
||||
}
|
||||
setProcessingState('error');
|
||||
return;
|
||||
}
|
||||
|
||||
const { jobId: newJobId } = await startResponse.json();
|
||||
// The API client now returns parsed JSON on success or throws a structured error on failure.
|
||||
const { jobId: newJobId } = await uploadAndProcessFlyer(file, checksum);
|
||||
console.debug(`[DEBUG] processFile(): Upload successful. Received jobId: ${newJobId}`);
|
||||
setJobId(newJobId);
|
||||
setProcessingState('polling');
|
||||
} catch (error) {
|
||||
logger.error('An unexpected error occurred during file upload:', { error });
|
||||
setErrorMessage(error instanceof Error ? error.message : 'An unexpected error occurred.');
|
||||
} catch (error: any) {
|
||||
// Handle the structured error thrown by the API client.
|
||||
logger.error({ error }, 'An error occurred during file upload:');
|
||||
// Handle 409 Conflict for duplicate flyers
|
||||
if (error?.status === 409 && error.body?.flyerId) {
|
||||
setErrorMessage(`This flyer has already been processed. You can view it here:`);
|
||||
setDuplicateFlyerId(error.body.flyerId);
|
||||
} else {
|
||||
// Handle other errors (e.g., validation, server errors)
|
||||
const message =
|
||||
error?.body?.message || error?.message || 'An unexpected error occurred during upload.';
|
||||
setErrorMessage(message);
|
||||
}
|
||||
setProcessingState('error');
|
||||
}
|
||||
}, []);
|
||||
|
||||
@@ -1,94 +1,53 @@
|
||||
// src/middleware/errorHandler.ts
|
||||
import { Request, Response, NextFunction } from 'express';
|
||||
import {
|
||||
DatabaseError,
|
||||
UniqueConstraintError,
|
||||
ForeignKeyConstraintError,
|
||||
NotFoundError,
|
||||
ValidationError,
|
||||
ValidationIssue,
|
||||
} from '../services/db/errors.db';
|
||||
import crypto from 'crypto';
|
||||
import { ZodError } from 'zod';
|
||||
import { NotFoundError, UniqueConstraintError, ValidationError } from '../services/db/errors.db';
|
||||
import { logger } from '../services/logger.server';
|
||||
|
||||
interface HttpError extends Error {
|
||||
status?: number;
|
||||
}
|
||||
|
||||
export const errorHandler = (err: HttpError, req: Request, res: Response, next: NextFunction) => {
|
||||
// If the response headers have already been sent, we must delegate to the default Express error handler.
|
||||
/**
|
||||
* A centralized error handling middleware for the Express application.
|
||||
* This middleware should be the LAST `app.use()` call to catch all errors from previous routes and middleware.
|
||||
*
|
||||
* It standardizes error responses and ensures consistent logging.
|
||||
*/
|
||||
export const errorHandler = (err: Error, req: Request, res: Response, next: NextFunction) => {
|
||||
// If headers have already been sent, delegate to the default Express error handler.
|
||||
if (res.headersSent) {
|
||||
return next(err);
|
||||
}
|
||||
|
||||
// The pino-http middleware guarantees that `req.log` will be available.
|
||||
const log = req.log;
|
||||
// Use the request-scoped logger if available, otherwise fall back to the global logger.
|
||||
const log = req.log || logger;
|
||||
|
||||
// --- 1. Determine Final Status Code and Message ---
|
||||
let statusCode = err.status ?? 500;
|
||||
const message = err.message;
|
||||
let validationIssues: ValidationIssue[] | undefined;
|
||||
let errorId: string | undefined;
|
||||
|
||||
// Refine the status code for known error types. Check for most specific types first.
|
||||
if (err instanceof UniqueConstraintError) {
|
||||
statusCode = 409; // Conflict
|
||||
} else if (err instanceof NotFoundError) {
|
||||
statusCode = 404;
|
||||
} else if (err instanceof ForeignKeyConstraintError) {
|
||||
statusCode = 400;
|
||||
} else if (err instanceof ValidationError) {
|
||||
statusCode = 400;
|
||||
validationIssues = err.validationErrors;
|
||||
} else if (err instanceof DatabaseError) {
|
||||
// This is a generic fallback for other database errors that are not the specific subclasses above.
|
||||
statusCode = err.status;
|
||||
} else if (err.name === 'UnauthorizedError') {
|
||||
statusCode = err.status || 401;
|
||||
// --- Handle Zod Validation Errors ---
|
||||
if (err instanceof ZodError) {
|
||||
log.warn({ err: err.flatten() }, 'Request validation failed');
|
||||
return res.status(400).json({
|
||||
message: 'The request data is invalid.',
|
||||
errors: err.errors.map((e) => ({ path: e.path, message: e.message })),
|
||||
});
|
||||
}
|
||||
|
||||
// --- 2. Log Based on Final Status Code ---
|
||||
// Log the full error details for debugging, especially for server errors.
|
||||
if (statusCode >= 500) {
|
||||
errorId = crypto.randomBytes(4).toString('hex');
|
||||
// The request-scoped logger already contains user, IP, and request_id.
|
||||
// We add the full error and the request object itself.
|
||||
// Pino's `redact` config will automatically sanitize sensitive fields in `req`.
|
||||
log.error(
|
||||
{
|
||||
err,
|
||||
errorId,
|
||||
req: { method: req.method, url: req.originalUrl, headers: req.headers, body: req.body },
|
||||
},
|
||||
`Unhandled API Error (ID: ${errorId})`,
|
||||
);
|
||||
} else {
|
||||
// For 4xx errors, log at a lower level (e.g., 'warn') to avoid flooding error trackers.
|
||||
// We include the validation errors in the log context if they exist.
|
||||
log.warn(
|
||||
{
|
||||
err,
|
||||
validationErrors: validationIssues, // Add validation issues to the log object
|
||||
statusCode,
|
||||
},
|
||||
`Client Error on ${req.method} ${req.path}: ${message}`,
|
||||
);
|
||||
// --- Handle Custom Operational Errors ---
|
||||
if (err instanceof NotFoundError) {
|
||||
log.info({ err }, 'Resource not found');
|
||||
return res.status(404).json({ message: err.message });
|
||||
}
|
||||
|
||||
// --- TEST ENVIRONMENT DEBUGGING ---
|
||||
if (process.env.NODE_ENV === 'test') {
|
||||
console.error('--- [TEST] UNHANDLED ERROR ---', err);
|
||||
if (err instanceof UniqueConstraintError || err instanceof ValidationError) {
|
||||
log.warn({ err }, 'Constraint or validation error occurred');
|
||||
return res.status(400).json({ message: err.message });
|
||||
}
|
||||
|
||||
// --- 3. Send Response ---
|
||||
// In production, send a generic message for 5xx errors.
|
||||
// In dev/test, send the actual error message for easier debugging.
|
||||
const responseMessage =
|
||||
statusCode >= 500 && process.env.NODE_ENV === 'production'
|
||||
? `An unexpected server error occurred. Please reference error ID: ${errorId}`
|
||||
: message;
|
||||
// --- Handle Generic Errors ---
|
||||
// Log the full error object for debugging. The pino logger will handle redaction.
|
||||
log.error({ err }, 'An unhandled error occurred in an Express route');
|
||||
|
||||
res.status(statusCode).json({
|
||||
message: responseMessage,
|
||||
...(validationIssues && { errors: validationIssues }), // Conditionally add the 'errors' array if it exists
|
||||
});
|
||||
};
|
||||
// In production, send a generic message to avoid leaking implementation details.
|
||||
if (process.env.NODE_ENV === 'production') {
|
||||
return res.status(500).json({ message: 'An internal server error occurred.' });
|
||||
}
|
||||
|
||||
// In development, send more details for easier debugging.
|
||||
return res.status(500).json({ message: err.message, stack: err.stack });
|
||||
};
|
||||
@@ -15,6 +15,11 @@ import { NotFoundError } from '../services/db/errors.db'; // This can stay, it's
|
||||
import { createTestApp } from '../tests/utils/createTestApp';
|
||||
import { mockLogger } from '../tests/utils/mockLogger';
|
||||
|
||||
// Mock the file upload middleware to allow testing the controller's internal check
|
||||
vi.mock('../middleware/fileUpload.middleware', () => ({
|
||||
requireFileUpload: () => (req: Request, res: Response, next: NextFunction) => next(),
|
||||
}));
|
||||
|
||||
vi.mock('../lib/queue', () => ({
|
||||
serverAdapter: {
|
||||
getRouter: () => (req: Request, res: Response, next: NextFunction) => next(), // Return a dummy express handler
|
||||
@@ -125,12 +130,6 @@ describe('Admin Content Management Routes (/api/admin)', () => {
|
||||
authenticatedUser: adminUser,
|
||||
});
|
||||
|
||||
// Add a basic error handler to capture errors passed to next(err) and return JSON.
|
||||
// This prevents unhandled error crashes in tests and ensures we get the 500 response we expect.
|
||||
app.use((err: any, req: any, res: any, next: any) => {
|
||||
res.status(err.status || 500).json({ message: err.message, errors: err.errors });
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
@@ -262,7 +261,7 @@ describe('Admin Content Management Routes (/api/admin)', () => {
|
||||
const response = await supertest(app).post('/api/admin/brands/55/logo');
|
||||
expect(response.status).toBe(400);
|
||||
expect(response.body.message).toMatch(
|
||||
/Logo image file is required|The request data is invalid/,
|
||||
/Logo image file is required|The request data is invalid|Logo image file is missing./,
|
||||
);
|
||||
});
|
||||
|
||||
|
||||
@@ -97,12 +97,6 @@ describe('Admin Job Trigger Routes (/api/admin/trigger)', () => {
|
||||
authenticatedUser: adminUser,
|
||||
});
|
||||
|
||||
// Add a basic error handler to capture errors passed to next(err) and return JSON.
|
||||
// This prevents unhandled error crashes in tests and ensures we get the 500 response we expect.
|
||||
app.use((err: any, req: any, res: any, next: any) => {
|
||||
res.status(err.status || 500).json({ message: err.message, errors: err.errors });
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
@@ -248,6 +242,17 @@ describe('Admin Job Trigger Routes (/api/admin/trigger)', () => {
|
||||
expect(response.status).toBe(400);
|
||||
});
|
||||
|
||||
it('should return 404 if the queue name is valid but not in the retry map', async () => {
|
||||
const queueName = 'weekly-analytics-reporting'; // This is in the Zod enum but not the queueMap
|
||||
const jobId = 'some-job-id';
|
||||
|
||||
const response = await supertest(app).post(`/api/admin/jobs/${queueName}/${jobId}/retry`);
|
||||
|
||||
// The route throws a NotFoundError, which the error handler should convert to a 404.
|
||||
expect(response.status).toBe(404);
|
||||
expect(response.body.message).toBe(`Queue 'weekly-analytics-reporting' not found.`);
|
||||
});
|
||||
|
||||
it('should return 404 if the job ID is not found in the queue', async () => {
|
||||
vi.mocked(flyerQueue.getJob).mockResolvedValue(undefined);
|
||||
const response = await supertest(app).post(
|
||||
|
||||
@@ -102,12 +102,6 @@ describe('Admin Monitoring Routes (/api/admin)', () => {
|
||||
authenticatedUser: adminUser,
|
||||
});
|
||||
|
||||
// Add a basic error handler to capture errors passed to next(err) and return JSON.
|
||||
// This prevents unhandled error crashes in tests and ensures we get the 500 response we expect.
|
||||
app.use((err: any, req: any, res: any, next: any) => {
|
||||
res.status(err.status || 500).json({ message: err.message, errors: err.errors });
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
@@ -2,12 +2,11 @@
|
||||
import { Router, NextFunction, Request, Response } from 'express';
|
||||
import passport from './passport.routes';
|
||||
import { isAdmin } from './passport.routes'; // Correctly imported
|
||||
import multer from 'multer'; // --- Zod Schemas for Admin Routes (as per ADR-003) ---
|
||||
import multer from 'multer';
|
||||
import { z } from 'zod';
|
||||
|
||||
import * as db from '../services/db/index.db';
|
||||
import { logger } from '../services/logger.server';
|
||||
import { UserProfile } from '../types';
|
||||
import type { UserProfile } from '../types';
|
||||
import { geocodingService } from '../services/geocodingService.server';
|
||||
import { requireFileUpload } from '../middleware/fileUpload.middleware'; // This was a duplicate, fixed.
|
||||
import { NotFoundError, ValidationError } from '../services/db/errors.db';
|
||||
@@ -33,45 +32,27 @@ import {
|
||||
weeklyAnalyticsWorker,
|
||||
} from '../services/queueService.server'; // Import your queues
|
||||
import { getSimpleWeekAndYear } from '../utils/dateUtils';
|
||||
import {
|
||||
requiredString,
|
||||
numericIdParam,
|
||||
uuidParamSchema,
|
||||
optionalNumeric,
|
||||
} from '../utils/zodUtils';
|
||||
import { logger } from '../services/logger.server';
|
||||
|
||||
// Helper for consistent required string validation (handles missing/null/empty)
|
||||
const requiredString = (message: string) =>
|
||||
z.preprocess((val) => val ?? '', z.string().min(1, message));
|
||||
|
||||
/**
|
||||
* A factory for creating a Zod schema that validates a UUID in the request parameters.
|
||||
* @param key The name of the parameter key (e.g., 'userId').
|
||||
* @param message A custom error message for invalid UUIDs.
|
||||
*/
|
||||
const uuidParamSchema = (key: string, message = `Invalid UUID for parameter '${key}'.`) =>
|
||||
z.object({
|
||||
params: z.object({ [key]: z.string().uuid({ message }) }),
|
||||
});
|
||||
|
||||
/**
|
||||
* A factory for creating a Zod schema that validates a numeric ID in the request parameters.
|
||||
*/
|
||||
const numericIdParamSchema = (
|
||||
key: string,
|
||||
message = `Invalid ID for parameter '${key}'. Must be a positive integer.`,
|
||||
) =>
|
||||
z.object({
|
||||
params: z.object({ [key]: z.coerce.number().int({ message }).positive({ message }) }),
|
||||
});
|
||||
|
||||
const updateCorrectionSchema = numericIdParamSchema('id').extend({
|
||||
const updateCorrectionSchema = numericIdParam('id').extend({
|
||||
body: z.object({
|
||||
suggested_value: requiredString('A new suggested_value is required.'),
|
||||
}),
|
||||
});
|
||||
|
||||
const updateRecipeStatusSchema = numericIdParamSchema('id').extend({
|
||||
const updateRecipeStatusSchema = numericIdParam('id').extend({
|
||||
body: z.object({
|
||||
status: z.enum(['private', 'pending_review', 'public', 'rejected']),
|
||||
}),
|
||||
});
|
||||
|
||||
const updateCommentStatusSchema = numericIdParamSchema('id').extend({
|
||||
const updateCommentStatusSchema = numericIdParam('id').extend({
|
||||
body: z.object({
|
||||
status: z.enum(['visible', 'hidden', 'reported']),
|
||||
}),
|
||||
@@ -85,8 +66,8 @@ const updateUserRoleSchema = uuidParamSchema('id', 'A valid user ID is required.
|
||||
|
||||
const activityLogSchema = z.object({
|
||||
query: z.object({
|
||||
limit: z.coerce.number().int().positive().optional().default(50),
|
||||
offset: z.coerce.number().int().nonnegative().optional().default(0),
|
||||
limit: optionalNumeric({ default: 50, integer: true, positive: true }),
|
||||
offset: optionalNumeric({ default: 0, integer: true, nonnegative: true }),
|
||||
}),
|
||||
});
|
||||
|
||||
@@ -154,6 +135,7 @@ router.get('/corrections', async (req, res, next: NextFunction) => {
|
||||
const corrections = await db.adminRepo.getSuggestedCorrections(req.log);
|
||||
res.json(corrections);
|
||||
} catch (error) {
|
||||
logger.error({ error }, 'Error fetching suggested corrections');
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
@@ -163,6 +145,7 @@ router.get('/brands', async (req, res, next: NextFunction) => {
|
||||
const brands = await db.flyerRepo.getAllBrands(req.log);
|
||||
res.json(brands);
|
||||
} catch (error) {
|
||||
logger.error({ error }, 'Error fetching brands');
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
@@ -172,6 +155,7 @@ router.get('/stats', async (req, res, next: NextFunction) => {
|
||||
const stats = await db.adminRepo.getApplicationStats(req.log);
|
||||
res.json(stats);
|
||||
} catch (error) {
|
||||
logger.error({ error }, 'Error fetching application stats');
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
@@ -181,20 +165,22 @@ router.get('/stats/daily', async (req, res, next: NextFunction) => {
|
||||
const dailyStats = await db.adminRepo.getDailyStatsForLast30Days(req.log);
|
||||
res.json(dailyStats);
|
||||
} catch (error) {
|
||||
logger.error({ error }, 'Error fetching daily stats');
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.post(
|
||||
'/corrections/:id/approve',
|
||||
validateRequest(numericIdParamSchema('id')),
|
||||
validateRequest(numericIdParam('id')),
|
||||
async (req: Request, res: Response, next: NextFunction) => {
|
||||
// Apply ADR-003 pattern for type safety
|
||||
const { params } = req as unknown as z.infer<ReturnType<typeof numericIdParamSchema>>;
|
||||
const { params } = req as unknown as z.infer<ReturnType<typeof numericIdParam>>;
|
||||
try {
|
||||
await db.adminRepo.approveCorrection(params.id, req.log); // params.id is now safely typed as number
|
||||
res.status(200).json({ message: 'Correction approved successfully.' });
|
||||
} catch (error) {
|
||||
logger.error({ error }, 'Error approving correction');
|
||||
next(error);
|
||||
}
|
||||
},
|
||||
@@ -202,14 +188,15 @@ router.post(
|
||||
|
||||
router.post(
|
||||
'/corrections/:id/reject',
|
||||
validateRequest(numericIdParamSchema('id')),
|
||||
validateRequest(numericIdParam('id')),
|
||||
async (req: Request, res: Response, next: NextFunction) => {
|
||||
// Apply ADR-003 pattern for type safety
|
||||
const { params } = req as unknown as z.infer<ReturnType<typeof numericIdParamSchema>>;
|
||||
const { params } = req as unknown as z.infer<ReturnType<typeof numericIdParam>>;
|
||||
try {
|
||||
await db.adminRepo.rejectCorrection(params.id, req.log); // params.id is now safely typed as number
|
||||
res.status(200).json({ message: 'Correction rejected successfully.' });
|
||||
} catch (error) {
|
||||
logger.error({ error }, 'Error rejecting correction');
|
||||
next(error);
|
||||
}
|
||||
},
|
||||
@@ -229,6 +216,7 @@ router.put(
|
||||
);
|
||||
res.status(200).json(updatedCorrection);
|
||||
} catch (error) {
|
||||
logger.error({ error }, 'Error updating suggested correction');
|
||||
next(error);
|
||||
}
|
||||
},
|
||||
@@ -244,6 +232,7 @@ router.put(
|
||||
const updatedRecipe = await db.adminRepo.updateRecipeStatus(params.id, body.status, req.log); // This is still a standalone function in admin.db.ts
|
||||
res.status(200).json(updatedRecipe);
|
||||
} catch (error) {
|
||||
logger.error({ error }, 'Error updating recipe status');
|
||||
next(error); // Pass all errors to the central error handler
|
||||
}
|
||||
},
|
||||
@@ -251,12 +240,12 @@ router.put(
|
||||
|
||||
router.post(
|
||||
'/brands/:id/logo',
|
||||
validateRequest(numericIdParamSchema('id')),
|
||||
validateRequest(numericIdParam('id')),
|
||||
upload.single('logoImage'),
|
||||
requireFileUpload('logoImage'),
|
||||
async (req: Request, res: Response, next: NextFunction) => {
|
||||
// Apply ADR-003 pattern for type safety
|
||||
const { params } = req as unknown as z.infer<ReturnType<typeof numericIdParamSchema>>;
|
||||
const { params } = req as unknown as z.infer<ReturnType<typeof numericIdParam>>;
|
||||
try {
|
||||
// Although requireFileUpload middleware should ensure the file exists,
|
||||
// this check satisfies TypeScript and adds robustness.
|
||||
@@ -269,6 +258,7 @@ router.post(
|
||||
logger.info({ brandId: params.id, logoUrl }, `Brand logo updated for brand ID: ${params.id}`);
|
||||
res.status(200).json({ message: 'Brand logo updated successfully.', logoUrl });
|
||||
} catch (error) {
|
||||
logger.error({ error }, 'Error updating brand logo');
|
||||
next(error);
|
||||
}
|
||||
},
|
||||
@@ -279,6 +269,7 @@ router.get('/unmatched-items', async (req, res, next: NextFunction) => {
|
||||
const items = await db.adminRepo.getUnmatchedFlyerItems(req.log);
|
||||
res.json(items);
|
||||
} catch (error) {
|
||||
logger.error({ error }, 'Error fetching unmatched items');
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
@@ -288,16 +279,17 @@ router.get('/unmatched-items', async (req, res, next: NextFunction) => {
|
||||
*/
|
||||
router.delete(
|
||||
'/recipes/:recipeId',
|
||||
validateRequest(numericIdParamSchema('recipeId')),
|
||||
validateRequest(numericIdParam('recipeId')),
|
||||
async (req: Request, res: Response, next: NextFunction) => {
|
||||
const userProfile = req.user as UserProfile;
|
||||
// Infer the type directly from the schema generator function. // This was a duplicate, fixed.
|
||||
const { params } = req as unknown as z.infer<ReturnType<typeof numericIdParamSchema>>;
|
||||
const { params } = req as unknown as z.infer<ReturnType<typeof numericIdParam>>;
|
||||
try {
|
||||
// The isAdmin flag bypasses the ownership check in the repository method.
|
||||
await db.recipeRepo.deleteRecipe(params.recipeId, userProfile.user.user_id, true, req.log);
|
||||
res.status(204).send();
|
||||
} catch (error: unknown) {
|
||||
logger.error({ error }, 'Error deleting recipe');
|
||||
next(error);
|
||||
}
|
||||
},
|
||||
@@ -308,14 +300,15 @@ router.delete(
|
||||
*/
|
||||
router.delete(
|
||||
'/flyers/:flyerId',
|
||||
validateRequest(numericIdParamSchema('flyerId')),
|
||||
validateRequest(numericIdParam('flyerId')),
|
||||
async (req: Request, res: Response, next: NextFunction) => {
|
||||
// Infer the type directly from the schema generator function.
|
||||
const { params } = req as unknown as z.infer<ReturnType<typeof numericIdParamSchema>>;
|
||||
const { params } = req as unknown as z.infer<ReturnType<typeof numericIdParam>>;
|
||||
try {
|
||||
await db.flyerRepo.deleteFlyer(params.flyerId, req.log);
|
||||
res.status(204).send();
|
||||
} catch (error: unknown) {
|
||||
logger.error({ error }, 'Error deleting flyer');
|
||||
next(error);
|
||||
}
|
||||
},
|
||||
@@ -335,6 +328,7 @@ router.put(
|
||||
); // This is still a standalone function in admin.db.ts
|
||||
res.status(200).json(updatedComment);
|
||||
} catch (error: unknown) {
|
||||
logger.error({ error }, 'Error updating comment status');
|
||||
next(error);
|
||||
}
|
||||
},
|
||||
@@ -345,6 +339,7 @@ router.get('/users', async (req, res, next: NextFunction) => {
|
||||
const users = await db.adminRepo.getAllUsers(req.log);
|
||||
res.json(users);
|
||||
} catch (error) {
|
||||
logger.error({ error }, 'Error fetching users');
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
@@ -364,6 +359,7 @@ router.get(
|
||||
const logs = await db.adminRepo.getActivityLog(limit, offset, req.log);
|
||||
res.json(logs);
|
||||
} catch (error) {
|
||||
logger.error({ error }, 'Error fetching activity log');
|
||||
next(error);
|
||||
}
|
||||
},
|
||||
@@ -379,6 +375,7 @@ router.get(
|
||||
const user = await db.userRepo.findUserProfileById(params.id, req.log);
|
||||
res.json(user);
|
||||
} catch (error) {
|
||||
logger.error({ error }, 'Error fetching user profile');
|
||||
next(error);
|
||||
}
|
||||
},
|
||||
@@ -414,6 +411,7 @@ router.delete(
|
||||
await db.userRepo.deleteUserById(params.id, req.log);
|
||||
res.status(204).send();
|
||||
} catch (error) {
|
||||
logger.error({ error }, 'Error deleting user');
|
||||
next(error);
|
||||
}
|
||||
},
|
||||
@@ -435,12 +433,10 @@ router.post(
|
||||
// We call the function but don't wait for it to finish (no `await`).
|
||||
// This is a "fire-and-forget" operation from the client's perspective.
|
||||
backgroundJobService.runDailyDealCheck();
|
||||
res
|
||||
.status(202)
|
||||
.json({
|
||||
message:
|
||||
'Daily deal check job has been triggered successfully. It will run in the background.',
|
||||
});
|
||||
res.status(202).json({
|
||||
message:
|
||||
'Daily deal check job has been triggered successfully. It will run in the background.',
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error({ error }, '[Admin] Failed to trigger daily deal check job.');
|
||||
next(error);
|
||||
@@ -467,11 +463,9 @@ router.post(
|
||||
|
||||
const job = await analyticsQueue.add('generate-daily-report', { reportDate }, { jobId });
|
||||
|
||||
res
|
||||
.status(202)
|
||||
.json({
|
||||
message: `Analytics report generation job has been enqueued successfully. Job ID: ${job.id}`,
|
||||
});
|
||||
res.status(202).json({
|
||||
message: `Analytics report generation job has been enqueued successfully. Job ID: ${job.id}`,
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error({ error }, '[Admin] Failed to enqueue analytics report job.');
|
||||
next(error);
|
||||
@@ -485,11 +479,11 @@ router.post(
|
||||
*/
|
||||
router.post(
|
||||
'/flyers/:flyerId/cleanup',
|
||||
validateRequest(numericIdParamSchema('flyerId')),
|
||||
validateRequest(numericIdParam('flyerId')),
|
||||
async (req: Request, res: Response, next: NextFunction) => {
|
||||
const userProfile = req.user as UserProfile;
|
||||
// Infer type from the schema generator for type safety, as per ADR-003.
|
||||
const { params } = req as unknown as z.infer<ReturnType<typeof numericIdParamSchema>>; // This was a duplicate, fixed.
|
||||
const { params } = req as unknown as z.infer<ReturnType<typeof numericIdParam>>; // This was a duplicate, fixed.
|
||||
logger.info(
|
||||
`[Admin] Manual trigger for flyer file cleanup received from user: ${userProfile.user.user_id} for flyer ID: ${params.flyerId}`,
|
||||
);
|
||||
@@ -501,6 +495,7 @@ router.post(
|
||||
.status(202)
|
||||
.json({ message: `File cleanup job for flyer ID ${params.flyerId} has been enqueued.` });
|
||||
} catch (error) {
|
||||
logger.error({ error }, 'Error enqueuing cleanup job');
|
||||
next(error);
|
||||
}
|
||||
},
|
||||
@@ -523,6 +518,7 @@ router.post('/trigger/failing-job', async (req: Request, res: Response, next: Ne
|
||||
.status(202)
|
||||
.json({ message: `Failing test job has been enqueued successfully. Job ID: ${job.id}` });
|
||||
} catch (error) {
|
||||
logger.error({ error }, 'Error enqueuing failing job');
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
@@ -541,11 +537,9 @@ router.post(
|
||||
|
||||
try {
|
||||
const keysDeleted = await geocodingService.clearGeocodeCache(req.log);
|
||||
res
|
||||
.status(200)
|
||||
.json({
|
||||
message: `Successfully cleared the geocode cache. ${keysDeleted} keys were removed.`,
|
||||
});
|
||||
res.status(200).json({
|
||||
message: `Successfully cleared the geocode cache. ${keysDeleted} keys were removed.`,
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error({ error }, '[Admin] Failed to clear geocode cache.');
|
||||
next(error);
|
||||
@@ -597,6 +591,7 @@ router.get('/queues/status', async (req: Request, res: Response, next: NextFunct
|
||||
);
|
||||
res.json(queueStatuses);
|
||||
} catch (error) {
|
||||
logger.error({ error }, 'Error fetching queue statuses');
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
@@ -645,6 +640,7 @@ router.post(
|
||||
);
|
||||
res.status(200).json({ message: `Job ${jobId} has been successfully marked for retry.` });
|
||||
} catch (error) {
|
||||
logger.error({ error }, 'Error retrying job');
|
||||
next(error);
|
||||
}
|
||||
},
|
||||
@@ -676,6 +672,7 @@ router.post(
|
||||
.status(202)
|
||||
.json({ message: 'Successfully enqueued weekly analytics job.', jobId: job.id });
|
||||
} catch (error) {
|
||||
logger.error({ error }, 'Error enqueuing weekly analytics job');
|
||||
next(error);
|
||||
}
|
||||
},
|
||||
|
||||
@@ -73,12 +73,6 @@ describe('Admin Stats Routes (/api/admin/stats)', () => {
|
||||
authenticatedUser: adminUser,
|
||||
});
|
||||
|
||||
// Add a basic error handler to capture errors passed to next(err) and return JSON.
|
||||
// This prevents unhandled error crashes in tests and ensures we get the 500 response we expect.
|
||||
app.use((err: any, req: any, res: any, next: any) => {
|
||||
res.status(err.status || 500).json({ message: err.message, errors: err.errors });
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
@@ -79,12 +79,6 @@ describe('Admin System Routes (/api/admin/system)', () => {
|
||||
authenticatedUser: adminUser,
|
||||
});
|
||||
|
||||
// Add a basic error handler to capture errors passed to next(err) and return JSON.
|
||||
// This prevents unhandled error crashes in tests and ensures we get the 500 response we expect.
|
||||
app.use((err: any, req: any, res: any, next: any) => {
|
||||
res.status(err.status || 500).json({ message: err.message, errors: err.errors });
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
@@ -83,12 +83,6 @@ describe('Admin User Management Routes (/api/admin/users)', () => {
|
||||
authenticatedUser: adminUser,
|
||||
});
|
||||
|
||||
// Add a basic error handler to capture errors passed to next(err) and return JSON.
|
||||
// This prevents unhandled error crashes in tests and ensures we get the 500 response we expect.
|
||||
app.use((err: any, req: any, res: any, next: any) => {
|
||||
res.status(err.status || 500).json({ message: err.message, errors: err.errors });
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
@@ -78,6 +78,7 @@ describe('AI Routes (/api/ai)', () => {
|
||||
vi.mocked(mockLogger.info).mockImplementation(() => {});
|
||||
vi.mocked(mockLogger.error).mockImplementation(() => {});
|
||||
vi.mocked(mockLogger.warn).mockImplementation(() => {});
|
||||
vi.mocked(mockLogger.debug).mockImplementation(() => {}); // Ensure debug is also mocked
|
||||
});
|
||||
const app = createTestApp({ router: aiRouter, basePath: '/api/ai' });
|
||||
|
||||
@@ -111,10 +112,55 @@ describe('AI Routes (/api/ai)', () => {
|
||||
});
|
||||
});
|
||||
|
||||
// Add a basic error handler to capture errors passed to next(err) and return JSON.
|
||||
// This prevents unhandled error crashes in tests and ensures we get the 500 response we expect.
|
||||
app.use((err: any, req: any, res: any, next: any) => {
|
||||
res.status(err.status || 500).json({ message: err.message, errors: err.errors });
|
||||
// New test to cover the router.use diagnostic middleware's catch block and errMsg branches
|
||||
describe('Diagnostic Middleware Error Handling', () => {
|
||||
it('should log an error if logger.debug throws an object with a message property', async () => {
|
||||
const mockErrorObject = { message: 'Mock debug error' };
|
||||
vi.mocked(mockLogger.debug).mockImplementationOnce(() => {
|
||||
throw mockErrorObject;
|
||||
});
|
||||
|
||||
// Make any request to trigger the middleware
|
||||
const response = await supertest(app).get('/api/ai/jobs/job-123/status');
|
||||
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
{ error: mockErrorObject.message }, // errMsg should extract the message
|
||||
'Failed to log incoming AI request headers',
|
||||
);
|
||||
// The request should still proceed, but might fail later if the original flow was interrupted.
|
||||
// Here, it will likely hit the 404 for job not found.
|
||||
expect(response.status).toBe(404);
|
||||
});
|
||||
|
||||
it('should log an error if logger.debug throws a primitive string', async () => {
|
||||
const mockErrorString = 'Mock debug error string';
|
||||
vi.mocked(mockLogger.debug).mockImplementationOnce(() => {
|
||||
throw mockErrorString;
|
||||
});
|
||||
|
||||
// Make any request to trigger the middleware
|
||||
const response = await supertest(app).get('/api/ai/jobs/job-123/status');
|
||||
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
{ error: mockErrorString }, // errMsg should convert to string
|
||||
'Failed to log incoming AI request headers',
|
||||
);
|
||||
expect(response.status).toBe(404);
|
||||
});
|
||||
|
||||
it('should log an error if logger.debug throws null/undefined', async () => {
|
||||
vi.mocked(mockLogger.debug).mockImplementationOnce(() => {
|
||||
throw null; // Simulate throwing null
|
||||
});
|
||||
|
||||
const response = await supertest(app).get('/api/ai/jobs/job-123/status');
|
||||
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
{ error: 'An unknown error occurred.' }, // errMsg should handle null/undefined
|
||||
'Failed to log incoming AI request headers',
|
||||
);
|
||||
expect(response.status).toBe(404);
|
||||
});
|
||||
});
|
||||
|
||||
describe('POST /upload-and-process', () => {
|
||||
@@ -307,10 +353,11 @@ describe('AI Routes (/api/ai)', () => {
|
||||
expect(response.status).toBe(400);
|
||||
});
|
||||
|
||||
it('should return 409 Conflict if flyer checksum already exists', async () => {
|
||||
it('should return 409 Conflict and delete the uploaded file if flyer checksum already exists', async () => {
|
||||
// Arrange
|
||||
const mockExistingFlyer = createMockFlyer({ flyer_id: 99 });
|
||||
vi.mocked(mockedDb.flyerRepo.findFlyerByChecksum).mockResolvedValue(mockExistingFlyer); // Duplicate found
|
||||
const unlinkSpy = vi.spyOn(fs.promises, 'unlink').mockResolvedValue(undefined);
|
||||
|
||||
// Act
|
||||
const response = await supertest(app)
|
||||
@@ -322,6 +369,10 @@ describe('AI Routes (/api/ai)', () => {
|
||||
expect(response.status).toBe(409);
|
||||
expect(response.body.message).toBe('This flyer has already been processed.');
|
||||
expect(mockedDb.createFlyerAndItems).not.toHaveBeenCalled();
|
||||
// Assert that the file was deleted
|
||||
expect(unlinkSpy).toHaveBeenCalledTimes(1);
|
||||
// The filename is predictable in the test environment because of the multer config in ai.routes.ts
|
||||
expect(unlinkSpy).toHaveBeenCalledWith(expect.stringContaining('flyerImage-test-flyer-image.jpg'));
|
||||
});
|
||||
|
||||
it('should accept payload when extractedData.items is missing and save with empty items', async () => {
|
||||
@@ -423,6 +474,52 @@ describe('AI Routes (/api/ai)', () => {
|
||||
expect(mockedDb.createFlyerAndItems).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('should handle payload where extractedData is null', async () => {
|
||||
const payloadWithNullExtractedData = {
|
||||
checksum: 'null-extracted-data-checksum',
|
||||
originalFileName: 'flyer-null.jpg',
|
||||
extractedData: null,
|
||||
};
|
||||
|
||||
const response = await supertest(app)
|
||||
.post('/api/ai/flyers/process')
|
||||
.field('data', JSON.stringify(payloadWithNullExtractedData))
|
||||
.attach('flyerImage', imagePath);
|
||||
|
||||
expect(response.status).toBe(201);
|
||||
expect(mockedDb.createFlyerAndItems).toHaveBeenCalledTimes(1);
|
||||
// Verify that extractedData was correctly defaulted to an empty object
|
||||
const flyerDataArg = vi.mocked(mockedDb.createFlyerAndItems).mock.calls[0][0];
|
||||
expect(flyerDataArg.store_name).toContain('Unknown Store'); // Fallback should be used
|
||||
expect(mockLogger.warn).toHaveBeenCalledWith(
|
||||
{ bodyData: expect.any(Object) },
|
||||
'Missing extractedData in /api/ai/flyers/process payload.',
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle payload where extractedData is a string', async () => {
|
||||
const payloadWithStringExtractedData = {
|
||||
checksum: 'string-extracted-data-checksum',
|
||||
originalFileName: 'flyer-string.jpg',
|
||||
extractedData: 'not-an-object',
|
||||
};
|
||||
|
||||
const response = await supertest(app)
|
||||
.post('/api/ai/flyers/process')
|
||||
.field('data', JSON.stringify(payloadWithStringExtractedData))
|
||||
.attach('flyerImage', imagePath);
|
||||
|
||||
expect(response.status).toBe(201);
|
||||
expect(mockedDb.createFlyerAndItems).toHaveBeenCalledTimes(1);
|
||||
// Verify that extractedData was correctly defaulted to an empty object
|
||||
const flyerDataArg = vi.mocked(mockedDb.createFlyerAndItems).mock.calls[0][0];
|
||||
expect(flyerDataArg.store_name).toContain('Unknown Store'); // Fallback should be used
|
||||
expect(mockLogger.warn).toHaveBeenCalledWith(
|
||||
{ bodyData: expect.any(Object) },
|
||||
'Missing extractedData in /api/ai/flyers/process payload.',
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle payload where extractedData is at the root of the body', async () => {
|
||||
// This simulates a client sending multipart fields for each property of extractedData
|
||||
const response = await supertest(app)
|
||||
@@ -438,6 +535,27 @@ describe('AI Routes (/api/ai)', () => {
|
||||
const flyerDataArg = vi.mocked(mockedDb.createFlyerAndItems).mock.calls[0][0];
|
||||
expect(flyerDataArg.store_name).toBe('Root Store');
|
||||
});
|
||||
|
||||
it('should default item quantity to 1 if missing', async () => {
|
||||
const payloadMissingQuantity = {
|
||||
checksum: 'qty-checksum',
|
||||
originalFileName: 'flyer-qty.jpg',
|
||||
extractedData: {
|
||||
store_name: 'Qty Store',
|
||||
items: [{ name: 'Item without qty', price: 100 }],
|
||||
},
|
||||
};
|
||||
|
||||
const response = await supertest(app)
|
||||
.post('/api/ai/flyers/process')
|
||||
.field('data', JSON.stringify(payloadMissingQuantity))
|
||||
.attach('flyerImage', imagePath);
|
||||
|
||||
expect(response.status).toBe(201);
|
||||
expect(mockedDb.createFlyerAndItems).toHaveBeenCalledTimes(1);
|
||||
const itemsArg = vi.mocked(mockedDb.createFlyerAndItems).mock.calls[0][1];
|
||||
expect(itemsArg[0].quantity).toBe(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('POST /check-flyer', () => {
|
||||
@@ -557,10 +675,11 @@ describe('AI Routes (/api/ai)', () => {
|
||||
const mockUser = createMockUserProfile({
|
||||
user: { user_id: 'user-123', email: 'user-123@test.com' },
|
||||
});
|
||||
const authenticatedApp = createTestApp({ router: aiRouter, basePath: '/api/ai', authenticatedUser: mockUser });
|
||||
|
||||
beforeEach(() => {
|
||||
// Inject an authenticated user for this test block
|
||||
app.use((req, res, next) => {
|
||||
authenticatedApp.use((req, res, next) => {
|
||||
req.user = mockUser;
|
||||
next();
|
||||
});
|
||||
@@ -575,7 +694,7 @@ describe('AI Routes (/api/ai)', () => {
|
||||
.field('cropArea', JSON.stringify({ x: 10, y: 10, width: 50, height: 50 }))
|
||||
.field('extractionType', 'item_details')
|
||||
.attach('image', imagePath);
|
||||
|
||||
// Use the authenticatedApp instance for requests in this block
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body).toEqual(mockResult);
|
||||
expect(aiService.aiService.extractTextFromImageArea).toHaveBeenCalled();
|
||||
@@ -586,7 +705,7 @@ describe('AI Routes (/api/ai)', () => {
|
||||
new Error('AI API is down'),
|
||||
);
|
||||
|
||||
const response = await supertest(app)
|
||||
const response = await supertest(authenticatedApp)
|
||||
.post('/api/ai/rescan-area')
|
||||
.field('cropArea', JSON.stringify({ x: 10, y: 10, width: 50, height: 50 }))
|
||||
.field('extractionType', 'item_details')
|
||||
@@ -602,15 +721,12 @@ describe('AI Routes (/api/ai)', () => {
|
||||
const mockUserProfile = createMockUserProfile({
|
||||
user: { user_id: 'user-123', email: 'user-123@test.com' },
|
||||
});
|
||||
const authenticatedApp = createTestApp({ router: aiRouter, basePath: '/api/ai', authenticatedUser: mockUserProfile });
|
||||
|
||||
beforeEach(() => {
|
||||
// For this block, simulate an authenticated request by attaching the user.
|
||||
app.use((req, res, next) => {
|
||||
req.user = mockUserProfile;
|
||||
next();
|
||||
});
|
||||
// The authenticatedApp instance is already set up with mockUserProfile
|
||||
});
|
||||
|
||||
|
||||
it('POST /quick-insights should return the stubbed response', async () => {
|
||||
const response = await supertest(app)
|
||||
.post('/api/ai/quick-insights')
|
||||
|
||||
@@ -15,6 +15,7 @@ import { logger } from '../services/logger.server';
|
||||
import { UserProfile, ExtractedCoreData, ExtractedFlyerItem } from '../types';
|
||||
import { flyerQueue } from '../services/queueService.server';
|
||||
import { validateRequest } from '../middleware/validation.middleware';
|
||||
import { requiredString } from '../utils/zodUtils';
|
||||
|
||||
const router = Router();
|
||||
|
||||
@@ -26,9 +27,6 @@ interface FlyerProcessPayload extends Partial<ExtractedCoreData> {
|
||||
}
|
||||
|
||||
// --- Zod Schemas for AI Routes (as per ADR-003) ---
|
||||
// Helper for consistent required string validation (handles missing/null/empty)
|
||||
const requiredString = (message: string) =>
|
||||
z.preprocess((val) => val ?? '', z.string().min(1, message));
|
||||
|
||||
const uploadAndProcessSchema = z.object({
|
||||
body: z.object({
|
||||
@@ -52,6 +50,15 @@ const errMsg = (e: unknown) => {
|
||||
return String(e || 'An unknown error occurred.');
|
||||
};
|
||||
|
||||
const cleanupUploadedFile = async (file?: Express.Multer.File) => {
|
||||
if (!file) return;
|
||||
try {
|
||||
await fs.promises.unlink(file.path);
|
||||
} catch (err) {
|
||||
// Ignore cleanup errors (e.g. file already deleted)
|
||||
}
|
||||
};
|
||||
|
||||
const cropAreaObjectSchema = z.object({
|
||||
x: z.number(),
|
||||
y: z.number(),
|
||||
@@ -187,7 +194,7 @@ router.use((req: Request, res: Response, next: NextFunction) => {
|
||||
'[API /ai] Incoming request',
|
||||
);
|
||||
} catch (e: unknown) {
|
||||
logger.error({ error: e }, 'Failed to log incoming AI request headers');
|
||||
logger.error({ error: errMsg(e) }, 'Failed to log incoming AI request headers');
|
||||
}
|
||||
next();
|
||||
});
|
||||
@@ -318,7 +325,7 @@ router.post(
|
||||
|
||||
// Try several ways to obtain the payload so we are tolerant to client variations.
|
||||
let parsed: FlyerProcessPayload = {};
|
||||
let extractedData: Partial<ExtractedCoreData> = {};
|
||||
let extractedData: Partial<ExtractedCoreData> | null | undefined = {};
|
||||
try {
|
||||
// If the client sent a top-level `data` field (stringified JSON), parse it.
|
||||
if (req.body && (req.body.data || req.body.extractedData)) {
|
||||
@@ -339,7 +346,7 @@ router.post(
|
||||
) as FlyerProcessPayload;
|
||||
}
|
||||
// If parsed itself contains an `extractedData` field, use that, otherwise assume parsed is the extractedData
|
||||
extractedData = parsed.extractedData ?? (parsed as Partial<ExtractedCoreData>);
|
||||
extractedData = 'extractedData' in parsed ? parsed.extractedData : (parsed as Partial<ExtractedCoreData>);
|
||||
} else {
|
||||
// No explicit `data` field found. Attempt to interpret req.body as an object (Express may have parsed multipart fields differently).
|
||||
try {
|
||||
@@ -385,6 +392,12 @@ router.post(
|
||||
|
||||
// Pull common metadata fields (checksum, originalFileName) from whichever shape we parsed.
|
||||
const checksum = parsed.checksum ?? parsed?.data?.checksum ?? '';
|
||||
|
||||
if (!checksum) {
|
||||
await cleanupUploadedFile(req.file);
|
||||
return res.status(400).json({ message: 'Checksum is required.' });
|
||||
}
|
||||
|
||||
const originalFileName =
|
||||
parsed.originalFileName ?? parsed?.data?.originalFileName ?? req.file.originalname;
|
||||
const userProfile = req.user as UserProfile | undefined;
|
||||
@@ -411,6 +424,7 @@ router.post(
|
||||
const itemsForDb = itemsArray.map((item: Partial<ExtractedFlyerItem>) => ({
|
||||
...item,
|
||||
master_item_id: item.master_item_id === null ? undefined : item.master_item_id,
|
||||
quantity: item.quantity ?? 1, // Default to 1 to satisfy DB constraint
|
||||
view_count: 0,
|
||||
click_count: 0,
|
||||
updated_at: new Date().toISOString(),
|
||||
@@ -431,6 +445,7 @@ router.post(
|
||||
const existingFlyer = await db.flyerRepo.findFlyerByChecksum(checksum, req.log);
|
||||
if (existingFlyer) {
|
||||
logger.warn(`Duplicate flyer upload attempt blocked for checksum: ${checksum}`);
|
||||
await cleanupUploadedFile(req.file);
|
||||
return res.status(409).json({ message: 'This flyer has already been processed.' });
|
||||
}
|
||||
|
||||
@@ -478,6 +493,7 @@ router.post(
|
||||
|
||||
res.status(201).json({ message: 'Flyer processed and saved successfully.', flyer: newFlyer });
|
||||
} catch (error) {
|
||||
await cleanupUploadedFile(req.file);
|
||||
next(error);
|
||||
}
|
||||
},
|
||||
|
||||
@@ -1,13 +1,12 @@
|
||||
// src/routes/auth.routes.ts
|
||||
import { Router, Request, Response, NextFunction } from 'express';
|
||||
import * as bcrypt from 'bcrypt';
|
||||
import zxcvbn from 'zxcvbn';
|
||||
import { z } from 'zod';
|
||||
import jwt from 'jsonwebtoken';
|
||||
import crypto from 'crypto';
|
||||
import rateLimit from 'express-rate-limit';
|
||||
|
||||
import passport from './passport.routes'; // Corrected import path
|
||||
import passport from './passport.routes';
|
||||
import { userRepo, adminRepo } from '../services/db/index.db';
|
||||
import { UniqueConstraintError } from '../services/db/errors.db';
|
||||
import { getPool } from '../services/db/connection.db';
|
||||
@@ -15,38 +14,13 @@ import { logger } from '../services/logger.server';
|
||||
import { sendPasswordResetEmail } from '../services/emailService.server';
|
||||
import { validateRequest } from '../middleware/validation.middleware';
|
||||
import type { UserProfile } from '../types';
|
||||
import { validatePasswordStrength } from '../utils/authUtils';
|
||||
import { requiredString } from '../utils/zodUtils';
|
||||
|
||||
const router = Router();
|
||||
|
||||
const JWT_SECRET = process.env.JWT_SECRET!;
|
||||
|
||||
/**
|
||||
* Validates the strength of a password using zxcvbn.
|
||||
* @param password The password to check.
|
||||
* @returns An object with `isValid` and an optional `feedback` message.
|
||||
*/
|
||||
const validatePasswordStrength = (password: string): { isValid: boolean; feedback?: string } => {
|
||||
const MIN_PASSWORD_SCORE = 3; // Require a 'Good' or 'Strong' password (score 3 or 4)
|
||||
const strength = zxcvbn(password);
|
||||
|
||||
if (strength.score < MIN_PASSWORD_SCORE) {
|
||||
const feedbackMessage =
|
||||
strength.feedback.warning ||
|
||||
(strength.feedback.suggestions && strength.feedback.suggestions[0]);
|
||||
return {
|
||||
isValid: false,
|
||||
feedback:
|
||||
`Password is too weak. ${feedbackMessage || 'Please choose a stronger password.'}`.trim(),
|
||||
};
|
||||
}
|
||||
|
||||
return { isValid: true };
|
||||
};
|
||||
|
||||
// Helper for consistent required string validation (handles missing/null/empty)
|
||||
const requiredString = (message: string) =>
|
||||
z.preprocess((val) => val ?? '', z.string().min(1, message));
|
||||
|
||||
// Conditionally disable rate limiting for the test environment
|
||||
const isTestEnv = process.env.NODE_ENV === 'test';
|
||||
|
||||
@@ -69,8 +43,6 @@ const resetPasswordLimiter = rateLimit({
|
||||
skip: () => isTestEnv, // Skip this middleware if in test environment
|
||||
});
|
||||
|
||||
// --- Zod Schemas for Auth Routes (as per ADR-003) ---
|
||||
|
||||
const registerSchema = z.object({
|
||||
body: z.object({
|
||||
email: z.string().email('A valid email is required.'),
|
||||
@@ -162,8 +134,8 @@ router.post(
|
||||
// If the email is a duplicate, return a 409 Conflict status.
|
||||
return res.status(409).json({ message: error.message });
|
||||
}
|
||||
// The createUser method now handles its own transaction logging, so we just log the route failure.
|
||||
logger.error({ error }, `User registration route failed for email: ${email}.`);
|
||||
// Pass the error to the centralized handler
|
||||
return next(error);
|
||||
}
|
||||
},
|
||||
@@ -213,7 +185,7 @@ router.post('/login', (req: Request, res: Response, next: NextFunction) => {
|
||||
const accessToken = jwt.sign(payload, JWT_SECRET, { expiresIn: '15m' });
|
||||
|
||||
try {
|
||||
const refreshToken = crypto.randomBytes(64).toString('hex'); // This was a duplicate, fixed.
|
||||
const refreshToken = crypto.randomBytes(64).toString('hex');
|
||||
await userRepo.saveRefreshToken(userProfile.user.user_id, refreshToken, req.log);
|
||||
req.log.info(`JWT and refresh token issued for user: ${userProfile.user.email}`);
|
||||
|
||||
|
||||
@@ -69,17 +69,7 @@ describe('Budget Routes (/api/budgets)', () => {
|
||||
vi.mocked(db.budgetRepo.getSpendingByCategory).mockResolvedValue([]);
|
||||
});
|
||||
|
||||
const app = createTestApp({
|
||||
router: budgetRouter,
|
||||
basePath: '/api/budgets',
|
||||
authenticatedUser: mockUser,
|
||||
});
|
||||
|
||||
// Add a basic error handler to capture errors passed to next(err) and return JSON.
|
||||
// This prevents unhandled error crashes in tests and ensures we get the 500 response we expect.
|
||||
app.use((err: any, req: any, res: any, next: any) => {
|
||||
res.status(err.status || 500).json({ message: err.message, errors: err.errors });
|
||||
});
|
||||
const app = createTestApp({ router: budgetRouter, basePath: '/api/budgets', authenticatedUser: mockUserProfile });
|
||||
|
||||
describe('GET /', () => {
|
||||
it('should return a list of budgets for the user', async () => {
|
||||
|
||||
@@ -5,20 +5,12 @@ import passport from './passport.routes';
|
||||
import { budgetRepo } from '../services/db/index.db';
|
||||
import type { UserProfile } from '../types';
|
||||
import { validateRequest } from '../middleware/validation.middleware';
|
||||
import { requiredString, numericIdParam } from '../utils/zodUtils';
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
// Helper for consistent required string validation (handles missing/null/empty)
|
||||
const requiredString = (message: string) =>
|
||||
z.preprocess((val) => val ?? '', z.string().min(1, message));
|
||||
|
||||
// --- Zod Schemas for Budget Routes (as per ADR-003) ---
|
||||
|
||||
const budgetIdParamSchema = z.object({
|
||||
params: z.object({
|
||||
id: z.coerce.number().int().positive("Invalid ID for parameter 'id'. Must be a number."),
|
||||
}),
|
||||
});
|
||||
const budgetIdParamSchema = numericIdParam('id', "Invalid ID for parameter 'id'. Must be a number.");
|
||||
|
||||
const createBudgetSchema = z.object({
|
||||
body: z.object({
|
||||
|
||||
@@ -54,13 +54,6 @@ describe('Deals Routes (/api/users/deals)', () => {
|
||||
authenticatedUser: mockUser,
|
||||
});
|
||||
const unauthenticatedApp = createTestApp({ router: dealsRouter, basePath });
|
||||
const errorHandler = (err: any, req: any, res: any, next: any) => {
|
||||
res.status(err.status || 500).json({ message: err.message, errors: err.errors });
|
||||
};
|
||||
|
||||
// Apply the handler to both app instances
|
||||
authenticatedApp.use(errorHandler);
|
||||
unauthenticatedApp.use(errorHandler);
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
|
||||
@@ -40,12 +40,6 @@ describe('Flyer Routes (/api/flyers)', () => {
|
||||
|
||||
const app = createTestApp({ router: flyerRouter, basePath: '/api/flyers' });
|
||||
|
||||
// Add a basic error handler to capture errors passed to next(err) and return JSON.
|
||||
// This prevents unhandled error crashes in tests and ensures we get the 500 response we expect.
|
||||
app.use((err: any, req: any, res: any, next: any) => {
|
||||
res.status(err.status || 500).json({ message: err.message, errors: err.errors });
|
||||
});
|
||||
|
||||
describe('GET /', () => {
|
||||
it('should return a list of flyers on success', async () => {
|
||||
const mockFlyers = [createMockFlyer({ flyer_id: 1 }), createMockFlyer({ flyer_id: 2 })];
|
||||
|
||||
@@ -3,6 +3,7 @@ import { Router } from 'express';
|
||||
import * as db from '../services/db/index.db';
|
||||
import { z } from 'zod';
|
||||
import { validateRequest } from '../middleware/validation.middleware';
|
||||
import { optionalNumeric } from '../utils/zodUtils';
|
||||
|
||||
const router = Router();
|
||||
|
||||
@@ -10,8 +11,8 @@ const router = Router();
|
||||
|
||||
const getFlyersSchema = z.object({
|
||||
query: z.object({
|
||||
limit: z.coerce.number().int().positive().optional().default(20),
|
||||
offset: z.coerce.number().int().nonnegative().optional().default(0),
|
||||
limit: optionalNumeric({ default: 20, integer: true, positive: true }),
|
||||
offset: optionalNumeric({ default: 0, integer: true, nonnegative: true }),
|
||||
}),
|
||||
});
|
||||
|
||||
@@ -107,6 +108,7 @@ router.post(
|
||||
const items = await db.flyerRepo.getFlyerItemsForFlyers(body.flyerIds, req.log);
|
||||
res.json(items);
|
||||
} catch (error) {
|
||||
req.log.error({ error }, 'Error fetching batch flyer items');
|
||||
next(error);
|
||||
}
|
||||
},
|
||||
@@ -126,6 +128,7 @@ router.post(
|
||||
const count = await db.flyerRepo.countFlyerItemsForFlyers(body.flyerIds ?? [], req.log);
|
||||
res.json({ count });
|
||||
} catch (error) {
|
||||
req.log.error({ error }, 'Error counting batch flyer items');
|
||||
next(error);
|
||||
}
|
||||
},
|
||||
|
||||
@@ -86,12 +86,6 @@ describe('Gamification Routes (/api/achievements)', () => {
|
||||
basePath,
|
||||
authenticatedUser: mockAdminProfile,
|
||||
});
|
||||
const errorHandler = (err: any, req: any, res: any, next: any) => {
|
||||
res.status(err.status || 500).json({ message: err.message, errors: err.errors });
|
||||
};
|
||||
unauthenticatedApp.use(errorHandler);
|
||||
authenticatedApp.use(errorHandler);
|
||||
adminApp.use(errorHandler);
|
||||
|
||||
describe('GET /', () => {
|
||||
it('should return a list of all achievements (public endpoint)', async () => {
|
||||
|
||||
@@ -7,19 +7,16 @@ import { logger } from '../services/logger.server';
|
||||
import { UserProfile } from '../types';
|
||||
import { ForeignKeyConstraintError } from '../services/db/errors.db';
|
||||
import { validateRequest } from '../middleware/validation.middleware';
|
||||
import { requiredString, optionalNumeric } from '../utils/zodUtils';
|
||||
|
||||
const router = express.Router();
|
||||
const adminGamificationRouter = express.Router(); // Create a new router for admin-only routes.
|
||||
|
||||
// Helper for consistent required string validation (handles missing/null/empty)
|
||||
const requiredString = (message: string) =>
|
||||
z.preprocess((val) => val ?? '', z.string().min(1, message));
|
||||
|
||||
// --- Zod Schemas for Gamification Routes (as per ADR-003) ---
|
||||
|
||||
const leaderboardSchema = z.object({
|
||||
query: z.object({
|
||||
limit: z.coerce.number().int().positive().max(50).optional().default(10),
|
||||
limit: optionalNumeric({ default: 10, integer: true, positive: true, max: 50 }),
|
||||
}),
|
||||
});
|
||||
|
||||
|
||||
@@ -46,12 +46,6 @@ const { logger } = await import('../services/logger.server');
|
||||
// 2. Create a minimal Express app to host the router for testing.
|
||||
const app = createTestApp({ router: healthRouter, basePath: '/api/health' });
|
||||
|
||||
// Add a basic error handler to capture errors passed to next(err) and return JSON.
|
||||
// This prevents unhandled error crashes in tests and ensures we get the 500 response we expect.
|
||||
app.use((err: any, req: any, res: any, next: any) => {
|
||||
res.status(err.status || 500).json({ message: err.message, errors: err.errors });
|
||||
});
|
||||
|
||||
describe('Health Routes (/api/health)', () => {
|
||||
beforeEach(() => {
|
||||
// Clear mock history before each test to ensure isolation.
|
||||
|
||||
@@ -39,10 +39,7 @@ router.get('/db-schema', validateRequest(emptySchema), async (req, res, next: Ne
|
||||
}
|
||||
return res.status(200).json({ success: true, message: 'All required database tables exist.' });
|
||||
} catch (error: unknown) {
|
||||
logger.error(
|
||||
{ error: error instanceof Error ? error.message : error },
|
||||
'Error during DB schema check:',
|
||||
);
|
||||
logger.error({ error }, 'Error during DB schema check:');
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
@@ -133,6 +130,7 @@ router.get(
|
||||
}
|
||||
throw new Error(`Unexpected Redis ping response: ${reply}`); // This will be caught below
|
||||
} catch (error: unknown) {
|
||||
logger.error({ error }, 'Error checking Redis health');
|
||||
next(error);
|
||||
}
|
||||
},
|
||||
|
||||
@@ -30,12 +30,6 @@ vi.mock('../services/logger.server', () => ({
|
||||
describe('Personalization Routes (/api/personalization)', () => {
|
||||
const app = createTestApp({ router: personalizationRouter, basePath: '/api/personalization' });
|
||||
|
||||
// Add a basic error handler to capture errors passed to next(err) and return JSON.
|
||||
// This prevents unhandled error crashes in tests and ensures we get the 500 response we expect.
|
||||
app.use((err: any, req: any, res: any, next: any) => {
|
||||
res.status(err.status || 500).json({ message: err.message, errors: err.errors });
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
@@ -4,8 +4,21 @@ import supertest from 'supertest';
|
||||
import { createTestApp } from '../tests/utils/createTestApp';
|
||||
import { mockLogger } from '../tests/utils/mockLogger';
|
||||
|
||||
// Mock the price repository
|
||||
vi.mock('../services/db/price.db', () => ({
|
||||
priceRepo: {
|
||||
getPriceHistory: vi.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
// Mock the logger to keep test output clean
|
||||
vi.mock('../services/logger.server', () => ({
|
||||
logger: mockLogger,
|
||||
}));
|
||||
|
||||
// Import the router AFTER other setup.
|
||||
import priceRouter from './price.routes';
|
||||
import { priceRepo } from '../services/db/price.db';
|
||||
|
||||
describe('Price Routes (/api/price-history)', () => {
|
||||
const app = createTestApp({ router: priceRouter, basePath: '/api/price-history' });
|
||||
@@ -14,32 +27,106 @@ describe('Price Routes (/api/price-history)', () => {
|
||||
});
|
||||
|
||||
describe('POST /', () => {
|
||||
it('should return 200 OK with an empty array for a valid request', async () => {
|
||||
const masterItemIds = [1, 2, 3];
|
||||
const response = await supertest(app).post('/api/price-history').send({ masterItemIds });
|
||||
it('should return 200 OK with price history data for a valid request', async () => {
|
||||
const mockHistory = [
|
||||
{ master_item_id: 1, price_in_cents: 199, date: '2024-01-01T00:00:00.000Z' },
|
||||
{ master_item_id: 2, price_in_cents: 299, date: '2024-01-08T00:00:00.000Z' },
|
||||
];
|
||||
vi.mocked(priceRepo.getPriceHistory).mockResolvedValue(mockHistory);
|
||||
|
||||
const response = await supertest(app)
|
||||
.post('/api/price-history')
|
||||
.send({ masterItemIds: [1, 2] });
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body).toEqual([]);
|
||||
expect(response.body).toEqual(mockHistory);
|
||||
expect(priceRepo.getPriceHistory).toHaveBeenCalledWith([1, 2], expect.any(Object), 1000, 0);
|
||||
});
|
||||
|
||||
it('should pass limit and offset from the body to the repository', async () => {
|
||||
vi.mocked(priceRepo.getPriceHistory).mockResolvedValue([]);
|
||||
await supertest(app)
|
||||
.post('/api/price-history')
|
||||
.send({ masterItemIds: [1, 2, 3], limit: 50, offset: 10 });
|
||||
|
||||
expect(priceRepo.getPriceHistory).toHaveBeenCalledWith(
|
||||
[1, 2, 3],
|
||||
expect.any(Object),
|
||||
50,
|
||||
10,
|
||||
);
|
||||
});
|
||||
|
||||
it('should log the request info', async () => {
|
||||
vi.mocked(priceRepo.getPriceHistory).mockResolvedValue([]);
|
||||
await supertest(app)
|
||||
.post('/api/price-history')
|
||||
.send({ masterItemIds: [1, 2, 3], limit: 25, offset: 5 });
|
||||
|
||||
expect(mockLogger.info).toHaveBeenCalledWith(
|
||||
{ itemCount: masterItemIds.length },
|
||||
{ itemCount: 3, limit: 25, offset: 5 },
|
||||
'[API /price-history] Received request for historical price data.',
|
||||
);
|
||||
});
|
||||
|
||||
it('should return 500 if the database call fails', async () => {
|
||||
const dbError = new Error('Database connection failed');
|
||||
vi.mocked(priceRepo.getPriceHistory).mockRejectedValue(dbError);
|
||||
|
||||
const response = await supertest(app)
|
||||
.post('/api/price-history')
|
||||
.send({ masterItemIds: [1, 2, 3] });
|
||||
|
||||
expect(response.status).toBe(500);
|
||||
expect(response.body.message).toBe('Database connection failed');
|
||||
});
|
||||
|
||||
it('should return 400 if masterItemIds is an empty array', async () => {
|
||||
const response = await supertest(app).post('/api/price-history').send({ masterItemIds: [] });
|
||||
|
||||
expect(response.status).toBe(400);
|
||||
expect(response.body.errors[0].message).toBe(
|
||||
'masterItemIds must be a non-empty array of positive integers.',
|
||||
);
|
||||
});
|
||||
|
||||
it('should return 400 if masterItemIds is not an array', async () => {
|
||||
const response = await supertest(app)
|
||||
.post('/api/price-history')
|
||||
.send({ masterItemIds: 'not-an-array' });
|
||||
|
||||
expect(response.status).toBe(400);
|
||||
expect(response.body.errors[0].message).toMatch(/Expected array, received string/i);
|
||||
// The actual message is "Invalid input: expected array, received string"
|
||||
expect(response.body.errors[0].message).toBe('Invalid input: expected array, received string');
|
||||
});
|
||||
|
||||
it('should return 400 if masterItemIds is an empty array', async () => {
|
||||
const response = await supertest(app).post('/api/price-history').send({ masterItemIds: [] });
|
||||
it('should return 400 if masterItemIds contains non-positive integers', async () => {
|
||||
const response = await supertest(app)
|
||||
.post('/api/price-history')
|
||||
.send({ masterItemIds: [1, -2, 3] });
|
||||
|
||||
expect(response.status).toBe(400);
|
||||
expect(response.body.errors[0].message).toBe(
|
||||
'masterItemIds must be a non-empty array of positive integers.',
|
||||
);
|
||||
expect(response.body.errors[0].message).toBe('Number must be greater than 0');
|
||||
});
|
||||
|
||||
it('should return 400 if masterItemIds is missing', async () => {
|
||||
const response = await supertest(app).post('/api/price-history').send({});
|
||||
|
||||
expect(response.status).toBe(400);
|
||||
// The actual message is "Invalid input: expected array, received undefined"
|
||||
expect(response.body.errors[0].message).toBe('Invalid input: expected array, received undefined');
|
||||
});
|
||||
|
||||
it('should return 400 for invalid limit and offset', async () => {
|
||||
const response = await supertest(app)
|
||||
.post('/api/price-history')
|
||||
.send({ masterItemIds: [1], limit: -1, offset: 'abc' });
|
||||
|
||||
expect(response.status).toBe(400);
|
||||
expect(response.body.errors).toHaveLength(2);
|
||||
// The actual message is "Too small: expected number to be >0"
|
||||
expect(response.body.errors[0].message).toBe('Too small: expected number to be >0');
|
||||
expect(response.body.errors[1].message).toBe('Invalid input: expected number, received NaN');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,15 +1,21 @@
|
||||
// src/routes/price.routes.ts
|
||||
import { Router, Request, Response } from 'express';
|
||||
import { Router, Request, Response, NextFunction } from 'express';
|
||||
import { z } from 'zod';
|
||||
import { validateRequest } from '../middleware/validation.middleware';
|
||||
import { priceRepo } from '../services/db/price.db';
|
||||
import { optionalNumeric } from '../utils/zodUtils';
|
||||
|
||||
const router = Router();
|
||||
|
||||
const priceHistorySchema = z.object({
|
||||
body: z.object({
|
||||
masterItemIds: z.array(z.number().int().positive()).nonempty({
|
||||
message: 'masterItemIds must be a non-empty array of positive integers.',
|
||||
}),
|
||||
masterItemIds: z
|
||||
.array(z.number().int().positive('Number must be greater than 0'))
|
||||
.nonempty({
|
||||
message: 'masterItemIds must be a non-empty array of positive integers.',
|
||||
}),
|
||||
limit: optionalNumeric({ default: 1000, integer: true, positive: true }),
|
||||
offset: optionalNumeric({ default: 0, integer: true, nonnegative: true }),
|
||||
}),
|
||||
});
|
||||
|
||||
@@ -18,18 +24,23 @@ type PriceHistoryRequest = z.infer<typeof priceHistorySchema>;
|
||||
|
||||
/**
|
||||
* POST /api/price-history - Fetches historical price data for a given list of master item IDs.
|
||||
* This is a placeholder implementation.
|
||||
* This endpoint retrieves price points over time for specified master grocery items.
|
||||
*/
|
||||
router.post('/', validateRequest(priceHistorySchema), async (req: Request, res: Response) => {
|
||||
router.post('/', validateRequest(priceHistorySchema), async (req: Request, res: Response, next: NextFunction) => {
|
||||
// Cast 'req' to the inferred type for full type safety.
|
||||
const {
|
||||
body: { masterItemIds },
|
||||
body: { masterItemIds, limit, offset },
|
||||
} = req as unknown as PriceHistoryRequest;
|
||||
req.log.info(
|
||||
{ itemCount: masterItemIds.length },
|
||||
{ itemCount: masterItemIds.length, limit, offset },
|
||||
'[API /price-history] Received request for historical price data.',
|
||||
);
|
||||
res.status(200).json([]);
|
||||
try {
|
||||
const priceHistory = await priceRepo.getPriceHistory(masterItemIds, req.log, limit, offset);
|
||||
res.status(200).json(priceHistory);
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
export default router;
|
||||
|
||||
@@ -35,12 +35,6 @@ const expectLogger = expect.objectContaining({
|
||||
describe('Recipe Routes (/api/recipes)', () => {
|
||||
const app = createTestApp({ router: recipeRouter, basePath: '/api/recipes' });
|
||||
|
||||
// Add a basic error handler to capture errors passed to next(err) and return JSON.
|
||||
// This prevents unhandled error crashes in tests and ensures we get the 500 response we expect.
|
||||
app.use((err: any, req: any, res: any, next: any) => {
|
||||
res.status(err.status || 500).json({ message: err.message, errors: err.errors });
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
@@ -3,24 +3,19 @@ import { Router } from 'express';
|
||||
import { z } from 'zod';
|
||||
import * as db from '../services/db/index.db';
|
||||
import { validateRequest } from '../middleware/validation.middleware';
|
||||
import { requiredString, numericIdParam, optionalNumeric } from '../utils/zodUtils';
|
||||
|
||||
const router = Router();
|
||||
|
||||
// Helper for consistent required string validation (handles missing/null/empty)
|
||||
const requiredString = (message: string) =>
|
||||
z.preprocess((val) => val ?? '', z.string().min(1, message));
|
||||
|
||||
// --- Zod Schemas for Recipe Routes (as per ADR-003) ---
|
||||
|
||||
const bySalePercentageSchema = z.object({
|
||||
query: z.object({
|
||||
minPercentage: z.coerce.number().min(0).max(100).optional().default(50),
|
||||
minPercentage: optionalNumeric({ default: 50, min: 0, max: 100 }),
|
||||
}),
|
||||
});
|
||||
|
||||
const bySaleIngredientsSchema = z.object({
|
||||
query: z.object({
|
||||
minIngredients: z.coerce.number().int().positive().optional().default(3),
|
||||
minIngredients: optionalNumeric({ default: 3, integer: true, positive: true }),
|
||||
}),
|
||||
});
|
||||
|
||||
@@ -31,11 +26,7 @@ const byIngredientAndTagSchema = z.object({
|
||||
}),
|
||||
});
|
||||
|
||||
const recipeIdParamsSchema = z.object({
|
||||
params: z.object({
|
||||
recipeId: z.coerce.number().int().positive(),
|
||||
}),
|
||||
});
|
||||
const recipeIdParamsSchema = numericIdParam('recipeId');
|
||||
|
||||
/**
|
||||
* GET /api/recipes/by-sale-percentage - Get recipes based on the percentage of their ingredients on sale.
|
||||
@@ -47,7 +38,7 @@ router.get(
|
||||
try {
|
||||
// Explicitly parse req.query to apply coercion (string -> number) and default values
|
||||
const { query } = bySalePercentageSchema.parse({ query: req.query });
|
||||
const recipes = await db.recipeRepo.getRecipesBySalePercentage(query.minPercentage, req.log);
|
||||
const recipes = await db.recipeRepo.getRecipesBySalePercentage(query.minPercentage!, req.log);
|
||||
res.json(recipes);
|
||||
} catch (error) {
|
||||
req.log.error({ error }, 'Error fetching recipes in /api/recipes/by-sale-percentage:');
|
||||
@@ -67,7 +58,7 @@ router.get(
|
||||
// Explicitly parse req.query to apply coercion (string -> number) and default values
|
||||
const { query } = bySaleIngredientsSchema.parse({ query: req.query });
|
||||
const recipes = await db.recipeRepo.getRecipesByMinSaleIngredients(
|
||||
query.minIngredients,
|
||||
query.minIngredients!,
|
||||
req.log,
|
||||
);
|
||||
res.json(recipes);
|
||||
|
||||
@@ -28,12 +28,6 @@ const expectLogger = expect.objectContaining({
|
||||
describe('Stats Routes (/api/stats)', () => {
|
||||
const app = createTestApp({ router: statsRouter, basePath: '/api/stats' });
|
||||
|
||||
// Add a basic error handler to capture errors passed to next(err) and return JSON.
|
||||
// This prevents unhandled error crashes in tests and ensures we get the 500 response we expect.
|
||||
app.use((err: any, req: any, res: any, next: any) => {
|
||||
res.status(err.status || 500).json({ message: err.message, errors: err.errors });
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
@@ -3,6 +3,7 @@ import { Router, Request, Response, NextFunction } from 'express';
|
||||
import { z } from 'zod';
|
||||
import * as db from '../services/db/index.db';
|
||||
import { validateRequest } from '../middleware/validation.middleware';
|
||||
import { optionalNumeric } from '../utils/zodUtils';
|
||||
|
||||
const router = Router();
|
||||
|
||||
@@ -10,8 +11,8 @@ const router = Router();
|
||||
|
||||
// Define the query schema separately so we can use it to parse req.query in the handler
|
||||
const statsQuerySchema = z.object({
|
||||
days: z.coerce.number().int().min(1).max(365).optional().default(30),
|
||||
limit: z.coerce.number().int().min(1).max(50).optional().default(10),
|
||||
days: optionalNumeric({ default: 30, min: 1, max: 365, integer: true }),
|
||||
limit: optionalNumeric({ default: 10, min: 1, max: 50, integer: true }),
|
||||
});
|
||||
|
||||
const mostFrequentSalesSchema = z.object({
|
||||
@@ -31,7 +32,7 @@ router.get(
|
||||
// Even though validateRequest checks validity, it may not mutate req.query with the parsed result.
|
||||
const { days, limit } = statsQuerySchema.parse(req.query);
|
||||
|
||||
const items = await db.adminRepo.getMostFrequentSaleItems(days, limit, req.log);
|
||||
const items = await db.adminRepo.getMostFrequentSaleItems(days!, limit!, req.log);
|
||||
res.json(items);
|
||||
} catch (error) {
|
||||
req.log.error(
|
||||
|
||||
@@ -42,11 +42,6 @@ vi.mock('../services/logger.server', () => ({
|
||||
describe('System Routes (/api/system)', () => {
|
||||
const app = createTestApp({ router: systemRouter, basePath: '/api/system' });
|
||||
|
||||
// Add a basic error handler to capture errors passed to next(err) and return JSON.
|
||||
app.use((err: any, req: any, res: any, next: any) => {
|
||||
res.status(err.status || 500).json({ message: err.message, errors: err.errors });
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
// We cast here to get type-safe access to mock functions like .mockImplementation
|
||||
vi.clearAllMocks();
|
||||
|
||||
@@ -5,13 +5,10 @@ import { z } from 'zod';
|
||||
import { logger } from '../services/logger.server';
|
||||
import { geocodingService } from '../services/geocodingService.server';
|
||||
import { validateRequest } from '../middleware/validation.middleware';
|
||||
import { requiredString } from '../utils/zodUtils';
|
||||
|
||||
const router = Router();
|
||||
|
||||
// Helper for consistent required string validation (handles missing/null/empty)
|
||||
const requiredString = (message: string) =>
|
||||
z.preprocess((val) => val ?? '', z.string().min(1, message));
|
||||
|
||||
const geocodeSchema = z.object({
|
||||
body: z.object({
|
||||
address: requiredString('An address string is required.'),
|
||||
@@ -49,7 +46,6 @@ router.get(
|
||||
}
|
||||
|
||||
// Check if there was output to stderr, even if the exit code was 0 (success).
|
||||
// This handles warnings or non-fatal errors that should arguably be treated as failures in this context.
|
||||
if (stderr && stderr.trim().length > 0) {
|
||||
logger.error({ stderr }, '[API /pm2-status] PM2 executed but produced stderr:');
|
||||
return next(new Error(`PM2 command produced an error: ${stderr}`));
|
||||
@@ -89,6 +85,7 @@ router.post(
|
||||
|
||||
res.json(coordinates);
|
||||
} catch (error) {
|
||||
logger.error({ error }, 'Error geocoding address');
|
||||
next(error);
|
||||
}
|
||||
},
|
||||
|
||||
@@ -173,12 +173,6 @@ describe('User Routes (/api/users)', () => {
|
||||
});
|
||||
const app = createTestApp({ router: userRouter, basePath, authenticatedUser: mockUserProfile });
|
||||
|
||||
// Add a basic error handler to capture errors passed to next(err) and return JSON.
|
||||
// This prevents unhandled error crashes in tests and ensures we get the 500 response we expect.
|
||||
app.use((err: any, req: any, res: any, next: any) => {
|
||||
res.status(err.status || 500).json({ message: err.message, errors: err.errors });
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
// All tests in this block will use the authenticated app
|
||||
});
|
||||
@@ -883,20 +877,41 @@ describe('User Routes (/api/users)', () => {
|
||||
});
|
||||
|
||||
describe('Notification Routes', () => {
|
||||
it('GET /notifications should return notifications for the user', async () => {
|
||||
it('GET /notifications should return only unread notifications by default', async () => {
|
||||
const mockNotifications: Notification[] = [
|
||||
createMockNotification({ user_id: 'user-123', content: 'Test' }),
|
||||
];
|
||||
vi.mocked(db.notificationRepo.getNotificationsForUser).mockResolvedValue(mockNotifications);
|
||||
|
||||
const response = await supertest(app).get('/api/users/notifications?limit=10&offset=0');
|
||||
const response = await supertest(app).get('/api/users/notifications?limit=10');
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body).toEqual(mockNotifications);
|
||||
expect(db.notificationRepo.getNotificationsForUser).toHaveBeenCalledWith(
|
||||
'user-123',
|
||||
10,
|
||||
0,
|
||||
0, // default offset
|
||||
false, // default includeRead
|
||||
expectLogger,
|
||||
);
|
||||
});
|
||||
|
||||
it('GET /notifications?includeRead=true should return all notifications', async () => {
|
||||
const mockNotifications: Notification[] = [
|
||||
createMockNotification({ user_id: 'user-123', content: 'Read', is_read: true }),
|
||||
createMockNotification({ user_id: 'user-123', content: 'Unread', is_read: false }),
|
||||
];
|
||||
vi.mocked(db.notificationRepo.getNotificationsForUser).mockResolvedValue(mockNotifications);
|
||||
|
||||
const response = await supertest(app).get('/api/users/notifications?includeRead=true');
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body).toEqual(mockNotifications);
|
||||
expect(db.notificationRepo.getNotificationsForUser).toHaveBeenCalledWith(
|
||||
'user-123',
|
||||
20, // default limit
|
||||
0, // default offset
|
||||
true, // includeRead from query param
|
||||
expectLogger,
|
||||
);
|
||||
});
|
||||
|
||||
@@ -4,57 +4,24 @@ import passport from './passport.routes';
|
||||
import multer from 'multer';
|
||||
import path from 'path';
|
||||
import fs from 'node:fs/promises';
|
||||
import * as bcrypt from 'bcrypt';
|
||||
import zxcvbn from 'zxcvbn';
|
||||
import * as bcrypt from 'bcrypt'; // This was a duplicate, fixed.
|
||||
import { z } from 'zod';
|
||||
import * as db from '../services/db/index.db';
|
||||
import { logger } from '../services/logger.server';
|
||||
import { UserProfile } from '../types';
|
||||
import { userService } from '../services/userService';
|
||||
import { ForeignKeyConstraintError } from '../services/db/errors.db';
|
||||
import { validateRequest } from '../middleware/validation.middleware';
|
||||
import { validatePasswordStrength } from '../utils/authUtils';
|
||||
import {
|
||||
requiredString,
|
||||
numericIdParam,
|
||||
optionalNumeric,
|
||||
optionalBoolean,
|
||||
} from '../utils/zodUtils';
|
||||
import * as db from '../services/db/index.db';
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
/**
|
||||
* Validates the strength of a password using zxcvbn.
|
||||
* @param password The password to check.
|
||||
* @returns An object with `isValid` and an optional `feedback` message.
|
||||
*/
|
||||
const validatePasswordStrength = (password: string): { isValid: boolean; feedback?: string } => {
|
||||
const MIN_PASSWORD_SCORE = 3; // Require a 'Good' or 'Strong' password (score 3 or 4)
|
||||
const strength = zxcvbn(password);
|
||||
|
||||
if (strength.score < MIN_PASSWORD_SCORE) {
|
||||
const feedbackMessage =
|
||||
strength.feedback.warning ||
|
||||
(strength.feedback.suggestions && strength.feedback.suggestions[0]);
|
||||
return {
|
||||
isValid: false,
|
||||
feedback:
|
||||
`Password is too weak. ${feedbackMessage || 'Please choose a stronger password.'}`.trim(),
|
||||
};
|
||||
}
|
||||
|
||||
return { isValid: true };
|
||||
};
|
||||
|
||||
// Helper for consistent required string validation (handles missing/null/empty)
|
||||
const requiredString = (message: string) =>
|
||||
z.preprocess((val) => val ?? '', z.string().min(1, message));
|
||||
|
||||
// --- Zod Schemas for User Routes (as per ADR-003) ---
|
||||
|
||||
const numericIdParam = (key: string) =>
|
||||
z.object({
|
||||
params: z.object({
|
||||
[key]: z.coerce
|
||||
.number()
|
||||
.int()
|
||||
.positive(`Invalid ID for parameter '${key}'. Must be a number.`),
|
||||
}),
|
||||
});
|
||||
|
||||
const updateProfileSchema = z.object({
|
||||
body: z
|
||||
.object({ full_name: z.string().optional(), avatar_url: z.string().url().optional() })
|
||||
@@ -93,8 +60,9 @@ const createShoppingListSchema = z.object({
|
||||
// Apply the JWT authentication middleware to all routes in this file.
|
||||
const notificationQuerySchema = z.object({
|
||||
query: z.object({
|
||||
limit: z.coerce.number().int().positive().optional().default(20),
|
||||
offset: z.coerce.number().int().nonnegative().optional().default(0),
|
||||
limit: optionalNumeric({ default: 20, integer: true, positive: true }),
|
||||
offset: optionalNumeric({ default: 0, integer: true, nonnegative: true }),
|
||||
includeRead: optionalBoolean({ default: false }),
|
||||
}),
|
||||
});
|
||||
|
||||
@@ -109,7 +77,7 @@ router.use(passport.authenticate('jwt', { session: false }));
|
||||
// Ensure the directory for avatar uploads exists.
|
||||
const avatarUploadDir = path.join(process.cwd(), 'public', 'uploads', 'avatars');
|
||||
fs.mkdir(avatarUploadDir, { recursive: true }).catch((err) => {
|
||||
logger.error('Failed to create avatar upload directory:', err);
|
||||
logger.error({ err }, 'Failed to create avatar upload directory');
|
||||
});
|
||||
|
||||
// Define multer storage configuration. The `req.user` object will be available
|
||||
@@ -154,6 +122,7 @@ router.post(
|
||||
);
|
||||
res.json(updatedProfile);
|
||||
} catch (error) {
|
||||
logger.error({ error }, 'Error uploading avatar');
|
||||
next(error);
|
||||
}
|
||||
},
|
||||
@@ -173,17 +142,17 @@ router.get(
|
||||
// Apply ADR-003 pattern for type safety
|
||||
try {
|
||||
const { query } = req as unknown as GetNotificationsRequest;
|
||||
// Explicitly convert to numbers to ensure the repo receives correct types
|
||||
const limit = query.limit ? Number(query.limit) : 20;
|
||||
const offset = query.offset ? Number(query.offset) : 0;
|
||||
const parsedQuery = notificationQuerySchema.parse({ query: req.query }).query;
|
||||
const notifications = await db.notificationRepo.getNotificationsForUser(
|
||||
userProfile.user.user_id,
|
||||
limit,
|
||||
offset,
|
||||
parsedQuery.limit!,
|
||||
parsedQuery.offset!,
|
||||
parsedQuery.includeRead!,
|
||||
req.log,
|
||||
);
|
||||
res.json(notifications);
|
||||
} catch (error) {
|
||||
logger.error({ error }, 'Error fetching notifications');
|
||||
next(error);
|
||||
}
|
||||
},
|
||||
@@ -201,6 +170,7 @@ router.post(
|
||||
await db.notificationRepo.markAllNotificationsAsRead(userProfile.user.user_id, req.log);
|
||||
res.status(204).send(); // No Content
|
||||
} catch (error) {
|
||||
logger.error({ error }, 'Error marking all notifications as read');
|
||||
next(error);
|
||||
}
|
||||
},
|
||||
@@ -226,6 +196,7 @@ router.post(
|
||||
);
|
||||
res.status(204).send(); // Success, no content to return
|
||||
} catch (error) {
|
||||
logger.error({ error }, 'Error marking notification as read');
|
||||
next(error);
|
||||
}
|
||||
},
|
||||
@@ -378,11 +349,7 @@ router.post(
|
||||
if (error instanceof ForeignKeyConstraintError) {
|
||||
return res.status(400).json({ message: error.message });
|
||||
}
|
||||
const errorMessage = error instanceof Error ? error.message : 'An unknown error occurred';
|
||||
logger.error({
|
||||
errorMessage,
|
||||
body: req.body,
|
||||
});
|
||||
logger.error({ error, body: req.body }, 'Failed to add watched item');
|
||||
next(error);
|
||||
}
|
||||
},
|
||||
@@ -486,11 +453,7 @@ router.post(
|
||||
if (error instanceof ForeignKeyConstraintError) {
|
||||
return res.status(400).json({ message: error.message });
|
||||
}
|
||||
const errorMessage = error instanceof Error ? error.message : 'An unknown error occurred';
|
||||
logger.error({
|
||||
errorMessage,
|
||||
body: req.body,
|
||||
});
|
||||
logger.error({ error, body: req.body }, 'Failed to create shopping list');
|
||||
next(error);
|
||||
}
|
||||
},
|
||||
@@ -549,12 +512,7 @@ router.post(
|
||||
if (error instanceof ForeignKeyConstraintError) {
|
||||
return res.status(400).json({ message: error.message });
|
||||
}
|
||||
const errorMessage = error instanceof Error ? error.message : 'An unknown error occurred';
|
||||
logger.error({
|
||||
errorMessage,
|
||||
params: req.params,
|
||||
body: req.body,
|
||||
});
|
||||
logger.error({ error, params: req.params, body: req.body }, 'Failed to add shopping list item');
|
||||
next(error);
|
||||
}
|
||||
},
|
||||
@@ -694,11 +652,7 @@ router.put(
|
||||
if (error instanceof ForeignKeyConstraintError) {
|
||||
return res.status(400).json({ message: error.message });
|
||||
}
|
||||
const errorMessage = error instanceof Error ? error.message : 'An unknown error occurred';
|
||||
logger.error({
|
||||
errorMessage,
|
||||
body: req.body,
|
||||
});
|
||||
logger.error({ error, body: req.body }, 'Failed to set user dietary restrictions');
|
||||
next(error);
|
||||
}
|
||||
},
|
||||
@@ -742,11 +696,7 @@ router.put(
|
||||
if (error instanceof ForeignKeyConstraintError) {
|
||||
return res.status(400).json({ message: error.message });
|
||||
}
|
||||
const errorMessage = error instanceof Error ? error.message : 'An unknown error occurred';
|
||||
logger.error({
|
||||
errorMessage,
|
||||
body: req.body,
|
||||
});
|
||||
logger.error({ error, body: req.body }, 'Failed to set user appliances');
|
||||
next(error);
|
||||
}
|
||||
},
|
||||
@@ -776,6 +726,7 @@ router.get(
|
||||
const address = await db.addressRepo.getAddressById(addressId, req.log); // This will throw NotFoundError if not found
|
||||
res.json(address);
|
||||
} catch (error) {
|
||||
logger.error({ error }, 'Error fetching user address');
|
||||
next(error);
|
||||
}
|
||||
},
|
||||
@@ -814,6 +765,7 @@ router.put(
|
||||
const addressId = await userService.upsertUserAddress(userProfile, addressData, req.log); // This was a duplicate, fixed.
|
||||
res.status(200).json({ message: 'Address updated successfully', address_id: addressId });
|
||||
} catch (error) {
|
||||
logger.error({ error }, 'Error updating user address');
|
||||
next(error);
|
||||
}
|
||||
},
|
||||
|
||||
@@ -51,9 +51,7 @@ export class AiAnalysisService {
|
||||
// Normalize sources to a consistent format.
|
||||
const mappedSources = (response.sources || []).map(
|
||||
(s: RawSource) =>
|
||||
(s.web
|
||||
? { uri: s.web.uri || '', title: s.web.title || 'Untitled' }
|
||||
: { uri: '', title: 'Untitled' }) as Source,
|
||||
(s.web ? { uri: s.web.uri || '', title: s.web.title || 'Untitled' } : { uri: '', title: 'Untitled' }) as Source,
|
||||
);
|
||||
return { ...response, sources: mappedSources };
|
||||
}
|
||||
@@ -84,9 +82,7 @@ export class AiAnalysisService {
|
||||
// Normalize sources to a consistent format.
|
||||
const mappedSources = (response.sources || []).map(
|
||||
(s: RawSource) =>
|
||||
(s.web
|
||||
? { uri: s.web.uri || '', title: s.web.title || 'Untitled' }
|
||||
: { uri: '', title: 'Untitled' }) as Source,
|
||||
(s.web ? { uri: s.web.uri || '', title: s.web.title || 'Untitled' } : { uri: '', title: 'Untitled' }) as Source,
|
||||
);
|
||||
return { ...response, sources: mappedSources };
|
||||
}
|
||||
|
||||
@@ -4,7 +4,13 @@
|
||||
* It communicates with the application's own backend endpoints, which then securely
|
||||
* call the Google AI services. This ensures no API keys are exposed on the client.
|
||||
*/
|
||||
import type { FlyerItem, Store, MasterGroceryItem } from '../types';
|
||||
import type {
|
||||
FlyerItem,
|
||||
Store,
|
||||
MasterGroceryItem,
|
||||
ProcessingStage,
|
||||
GroundedResponse,
|
||||
} from '../types';
|
||||
import { logger } from './logger.client';
|
||||
import { apiFetch } from './apiClient';
|
||||
|
||||
@@ -20,14 +26,14 @@ export const uploadAndProcessFlyer = async (
|
||||
file: File,
|
||||
checksum: string,
|
||||
tokenOverride?: string,
|
||||
): Promise<Response> => {
|
||||
): Promise<{ jobId: string }> => {
|
||||
const formData = new FormData();
|
||||
formData.append('flyerFile', file);
|
||||
formData.append('checksum', checksum);
|
||||
|
||||
logger.info(`[aiApiClient] Starting background processing for file: ${file.name}`);
|
||||
|
||||
return apiFetch(
|
||||
const response = await apiFetch(
|
||||
'/ai/upload-and-process',
|
||||
{
|
||||
method: 'POST',
|
||||
@@ -35,20 +41,73 @@ export const uploadAndProcessFlyer = async (
|
||||
},
|
||||
{ tokenOverride },
|
||||
);
|
||||
|
||||
if (!response.ok) {
|
||||
let errorBody;
|
||||
try {
|
||||
errorBody = await response.json();
|
||||
} catch (e) {
|
||||
errorBody = { message: await response.text() };
|
||||
}
|
||||
// Throw a structured error so the component can inspect the status and body
|
||||
throw { status: response.status, body: errorBody };
|
||||
}
|
||||
|
||||
return response.json();
|
||||
};
|
||||
|
||||
// Define the expected shape of the job status response
|
||||
export interface JobStatus {
|
||||
id: string;
|
||||
state: 'completed' | 'failed' | 'active' | 'waiting' | 'delayed' | 'paused';
|
||||
progress: {
|
||||
stages?: ProcessingStage[];
|
||||
estimatedTimeRemaining?: number;
|
||||
message?: string;
|
||||
} | null;
|
||||
returnValue: {
|
||||
flyerId?: number;
|
||||
} | null;
|
||||
failedReason: string | null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetches the status of a background processing job.
|
||||
* This is the second step in the new background processing flow.
|
||||
* @param jobId The ID of the job to check.
|
||||
* @param tokenOverride Optional token for testing.
|
||||
* @returns A promise that resolves to the API response with the job's status.
|
||||
* @returns A promise that resolves to the parsed job status object.
|
||||
* @throws An error if the network request fails or if the response is not valid JSON.
|
||||
*/
|
||||
export const getJobStatus = async (jobId: string, tokenOverride?: string): Promise<Response> => {
|
||||
return apiFetch(`/ai/jobs/${jobId}/status`, {}, { tokenOverride });
|
||||
export const getJobStatus = async (
|
||||
jobId: string,
|
||||
tokenOverride?: string,
|
||||
): Promise<JobStatus> => {
|
||||
const response = await apiFetch(`/ai/jobs/${jobId}/status`, {}, { tokenOverride });
|
||||
|
||||
if (!response.ok) {
|
||||
let errorText = `API Error: ${response.status} ${response.statusText}`;
|
||||
try {
|
||||
const errorBody = await response.text();
|
||||
if (errorBody) errorText = `API Error ${response.status}: ${errorBody}`;
|
||||
} catch (e) {
|
||||
// ignore if reading body fails
|
||||
}
|
||||
throw new Error(errorText);
|
||||
}
|
||||
|
||||
try {
|
||||
return await response.json();
|
||||
} catch (error) {
|
||||
const rawText = await response.text();
|
||||
throw new Error(`Failed to parse JSON response from server. Body: ${rawText}`);
|
||||
}
|
||||
};
|
||||
|
||||
export const isImageAFlyer = async (imageFile: File, tokenOverride?: string): Promise<Response> => {
|
||||
export const isImageAFlyer = (
|
||||
imageFile: File,
|
||||
tokenOverride?: string,
|
||||
): Promise<Response> => {
|
||||
const formData = new FormData();
|
||||
formData.append('image', imageFile);
|
||||
|
||||
@@ -64,7 +123,7 @@ export const isImageAFlyer = async (imageFile: File, tokenOverride?: string): Pr
|
||||
);
|
||||
};
|
||||
|
||||
export const extractAddressFromImage = async (
|
||||
export const extractAddressFromImage = (
|
||||
imageFile: File,
|
||||
tokenOverride?: string,
|
||||
): Promise<Response> => {
|
||||
@@ -81,7 +140,7 @@ export const extractAddressFromImage = async (
|
||||
);
|
||||
};
|
||||
|
||||
export const extractLogoFromImage = async (
|
||||
export const extractLogoFromImage = (
|
||||
imageFiles: File[],
|
||||
tokenOverride?: string,
|
||||
): Promise<Response> => {
|
||||
@@ -100,7 +159,7 @@ export const extractLogoFromImage = async (
|
||||
);
|
||||
};
|
||||
|
||||
export const getQuickInsights = async (
|
||||
export const getQuickInsights = (
|
||||
items: Partial<FlyerItem>[],
|
||||
signal?: AbortSignal,
|
||||
tokenOverride?: string,
|
||||
@@ -117,7 +176,7 @@ export const getQuickInsights = async (
|
||||
);
|
||||
};
|
||||
|
||||
export const getDeepDiveAnalysis = async (
|
||||
export const getDeepDiveAnalysis = (
|
||||
items: Partial<FlyerItem>[],
|
||||
signal?: AbortSignal,
|
||||
tokenOverride?: string,
|
||||
@@ -134,7 +193,7 @@ export const getDeepDiveAnalysis = async (
|
||||
);
|
||||
};
|
||||
|
||||
export const searchWeb = async (
|
||||
export const searchWeb = (
|
||||
query: string,
|
||||
signal?: AbortSignal,
|
||||
tokenOverride?: string,
|
||||
@@ -179,7 +238,7 @@ export const planTripWithMaps = async (
|
||||
* @param prompt A description of the image to generate (e.g., a meal plan).
|
||||
* @returns A base64-encoded string of the generated PNG image.
|
||||
*/
|
||||
export const generateImageFromText = async (
|
||||
export const generateImageFromText = (
|
||||
prompt: string,
|
||||
signal?: AbortSignal,
|
||||
tokenOverride?: string,
|
||||
@@ -202,7 +261,7 @@ export const generateImageFromText = async (
|
||||
* @param text The text to be spoken.
|
||||
* @returns A base64-encoded string of the raw audio data.
|
||||
*/
|
||||
export const generateSpeechFromText = async (
|
||||
export const generateSpeechFromText = (
|
||||
text: string,
|
||||
signal?: AbortSignal,
|
||||
tokenOverride?: string,
|
||||
@@ -259,7 +318,7 @@ export const startVoiceSession = (callbacks: {
|
||||
* @param tokenOverride Optional token for testing.
|
||||
* @returns A promise that resolves to the API response containing the extracted text.
|
||||
*/
|
||||
export const rescanImageArea = async (
|
||||
export const rescanImageArea = (
|
||||
imageFile: File,
|
||||
cropArea: { x: number; y: number; width: number; height: number },
|
||||
extractionType: 'store_name' | 'dates' | 'item_details',
|
||||
@@ -270,7 +329,11 @@ export const rescanImageArea = async (
|
||||
formData.append('cropArea', JSON.stringify(cropArea));
|
||||
formData.append('extractionType', extractionType);
|
||||
|
||||
return apiFetch('/ai/rescan-area', { method: 'POST', body: formData }, { tokenOverride });
|
||||
return apiFetch(
|
||||
'/ai/rescan-area',
|
||||
{ method: 'POST', body: formData },
|
||||
{ tokenOverride },
|
||||
);
|
||||
};
|
||||
|
||||
/**
|
||||
@@ -278,7 +341,7 @@ export const rescanImageArea = async (
|
||||
* @param watchedItems An array of the user's watched master grocery items.
|
||||
* @returns A promise that resolves to the raw `Response` object from the API.
|
||||
*/
|
||||
export const compareWatchedItemPrices = async (
|
||||
export const compareWatchedItemPrices = (
|
||||
watchedItems: MasterGroceryItem[],
|
||||
signal?: AbortSignal,
|
||||
): Promise<Response> => {
|
||||
@@ -292,5 +355,4 @@ export const compareWatchedItemPrices = async (
|
||||
body: JSON.stringify({ items: watchedItems }),
|
||||
},
|
||||
{ signal },
|
||||
);
|
||||
};
|
||||
)};
|
||||
|
||||
@@ -166,6 +166,127 @@ describe('AI Service (Server)', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('Model Fallback Logic', () => {
|
||||
const originalEnv = process.env;
|
||||
|
||||
beforeEach(() => {
|
||||
vi.unstubAllEnvs();
|
||||
process.env = { ...originalEnv, GEMINI_API_KEY: 'test-key' };
|
||||
vi.resetModules(); // Re-import to use the new env var and re-instantiate the service
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
process.env = originalEnv;
|
||||
vi.unstubAllEnvs();
|
||||
});
|
||||
|
||||
it('should try the next model if the first one fails with a quota error', async () => {
|
||||
// Arrange
|
||||
const { AIService } = await import('./aiService.server');
|
||||
const { logger } = await import('./logger.server');
|
||||
const serviceWithFallback = new AIService(logger);
|
||||
|
||||
const quotaError = new Error('User rate limit exceeded due to quota');
|
||||
const successResponse = { text: 'Success from fallback model', candidates: [] };
|
||||
|
||||
// Mock the generateContent function to fail on the first call and succeed on the second
|
||||
mockGenerateContent.mockRejectedValueOnce(quotaError).mockResolvedValueOnce(successResponse);
|
||||
|
||||
const request = { contents: [{ parts: [{ text: 'test prompt' }] }] };
|
||||
|
||||
// Act
|
||||
const result = await (serviceWithFallback as any).aiClient.generateContent(request);
|
||||
|
||||
// Assert
|
||||
expect(result).toEqual(successResponse);
|
||||
expect(mockGenerateContent).toHaveBeenCalledTimes(2);
|
||||
|
||||
// Check first call
|
||||
expect(mockGenerateContent).toHaveBeenNthCalledWith(1, {
|
||||
model: 'gemini-2.5-flash',
|
||||
...request,
|
||||
});
|
||||
|
||||
// Check second call
|
||||
expect(mockGenerateContent).toHaveBeenNthCalledWith(2, {
|
||||
model: 'gemini-3-flash',
|
||||
...request,
|
||||
});
|
||||
|
||||
// Check that a warning was logged
|
||||
expect(logger.warn).toHaveBeenCalledWith(
|
||||
expect.stringContaining(
|
||||
"Model 'gemini-2.5-flash' failed due to quota/rate limit. Trying next model.",
|
||||
),
|
||||
);
|
||||
});
|
||||
|
||||
it('should throw immediately for non-retriable errors', async () => {
|
||||
// Arrange
|
||||
const { AIService } = await import('./aiService.server');
|
||||
const { logger } = await import('./logger.server');
|
||||
const serviceWithFallback = new AIService(logger);
|
||||
|
||||
const nonRetriableError = new Error('Invalid API Key');
|
||||
mockGenerateContent.mockRejectedValueOnce(nonRetriableError);
|
||||
|
||||
const request = { contents: [{ parts: [{ text: 'test prompt' }] }] };
|
||||
|
||||
// Act & Assert
|
||||
await expect((serviceWithFallback as any).aiClient.generateContent(request)).rejects.toThrow(
|
||||
'Invalid API Key',
|
||||
);
|
||||
|
||||
expect(mockGenerateContent).toHaveBeenCalledTimes(1);
|
||||
expect(logger.error).toHaveBeenCalledWith(
|
||||
{ error: nonRetriableError },
|
||||
`[AIService Adapter] Model 'gemini-2.5-flash' failed with a non-retriable error.`,
|
||||
);
|
||||
});
|
||||
|
||||
it('should throw the last error if all models fail', async () => {
|
||||
// Arrange
|
||||
const { AIService } = await import('./aiService.server');
|
||||
const { logger } = await import('./logger.server');
|
||||
const serviceWithFallback = new AIService(logger);
|
||||
|
||||
const quotaError1 = new Error('Quota exhausted for model 1');
|
||||
const quotaError2 = new Error('429 Too Many Requests for model 2');
|
||||
const quotaError3 = new Error('RESOURCE_EXHAUSTED for model 3');
|
||||
|
||||
mockGenerateContent
|
||||
.mockRejectedValueOnce(quotaError1)
|
||||
.mockRejectedValueOnce(quotaError2)
|
||||
.mockRejectedValueOnce(quotaError3);
|
||||
|
||||
const request = { contents: [{ parts: [{ text: 'test prompt' }] }] };
|
||||
|
||||
// Act & Assert
|
||||
await expect((serviceWithFallback as any).aiClient.generateContent(request)).rejects.toThrow(
|
||||
quotaError3,
|
||||
);
|
||||
|
||||
expect(mockGenerateContent).toHaveBeenCalledTimes(3);
|
||||
expect(mockGenerateContent).toHaveBeenNthCalledWith(1, {
|
||||
model: 'gemini-2.5-flash',
|
||||
...request,
|
||||
});
|
||||
expect(mockGenerateContent).toHaveBeenNthCalledWith(2, {
|
||||
model: 'gemini-3-flash',
|
||||
...request,
|
||||
});
|
||||
expect(mockGenerateContent).toHaveBeenNthCalledWith(3, {
|
||||
model: 'gemini-2.5-flash-lite',
|
||||
...request,
|
||||
});
|
||||
|
||||
expect(logger.error).toHaveBeenCalledWith(
|
||||
{ lastError: quotaError3 },
|
||||
'[AIService Adapter] All AI models failed. Throwing last known error.',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('extractItemsFromReceiptImage', () => {
|
||||
it('should extract items from a valid AI response', async () => {
|
||||
const mockAiResponseText = `[
|
||||
|
||||
@@ -72,6 +72,7 @@ export class AIService {
|
||||
private fs: IFileSystem;
|
||||
private rateLimiter: <T>(fn: () => Promise<T>) => Promise<T>;
|
||||
private logger: Logger;
|
||||
private readonly models = ['gemini-2.5-flash', 'gemini-3-flash', 'gemini-2.5-flash-lite'];
|
||||
|
||||
constructor(logger: Logger, aiClient?: IAiClient, fs?: IFileSystem) {
|
||||
this.logger = logger;
|
||||
@@ -121,17 +122,11 @@ export class AIService {
|
||||
);
|
||||
}
|
||||
|
||||
// do not change "gemini-2.5-flash" - this is correct
|
||||
const modelName = 'gemini-2.5-flash';
|
||||
|
||||
// We create a shim/adapter that matches the old structure but uses the new SDK call pattern.
|
||||
// This preserves the dependency injection pattern used throughout the class.
|
||||
this.aiClient = genAI
|
||||
? {
|
||||
generateContent: async (request) => {
|
||||
// The model name is now injected here, into every call, as the new SDK requires.
|
||||
// Architectural guard clause: All requests from this service must have content.
|
||||
// This prevents sending invalid requests to the API and satisfies TypeScript's strictness.
|
||||
if (!request.contents || request.contents.length === 0) {
|
||||
this.logger.error(
|
||||
{ request },
|
||||
@@ -140,14 +135,7 @@ export class AIService {
|
||||
throw new Error('AIService.generateContent requires at least one content element.');
|
||||
}
|
||||
|
||||
// Architectural Fix: After the guard clause, assign the guaranteed-to-exist element
|
||||
// to a new constant. This provides a definitive type-safe variable for the compiler.
|
||||
const firstContent = request.contents[0];
|
||||
this.logger.debug(
|
||||
{ modelName, requestParts: firstContent.parts?.length ?? 0 },
|
||||
'[AIService] Calling actual generateContent via adapter.',
|
||||
);
|
||||
return genAI.models.generateContent({ model: modelName, ...request });
|
||||
return this._generateWithFallback(genAI, request);
|
||||
},
|
||||
}
|
||||
: {
|
||||
@@ -182,6 +170,54 @@ export class AIService {
|
||||
this.logger.info('---------------- [AIService] Constructor End ----------------');
|
||||
}
|
||||
|
||||
private async _generateWithFallback(
|
||||
genAI: GoogleGenAI,
|
||||
request: { contents: Content[]; tools?: Tool[] },
|
||||
): Promise<GenerateContentResponse> {
|
||||
let lastError: Error | null = null;
|
||||
|
||||
for (const modelName of this.models) {
|
||||
try {
|
||||
this.logger.info(
|
||||
`[AIService Adapter] Attempting to generate content with model: ${modelName}`,
|
||||
);
|
||||
const result = await genAI.models.generateContent({ model: modelName, ...request });
|
||||
// If the call succeeds, return the result immediately.
|
||||
return result;
|
||||
} catch (error: unknown) {
|
||||
lastError = error instanceof Error ? error : new Error(String(error));
|
||||
const errorMessage = (lastError.message || '').toLowerCase(); // Make case-insensitive
|
||||
|
||||
// Check for specific error messages indicating quota issues or model unavailability.
|
||||
if (
|
||||
errorMessage.includes('quota') ||
|
||||
errorMessage.includes('429') || // HTTP 429 Too Many Requests
|
||||
errorMessage.includes('resource_exhausted') || // Make case-insensitive
|
||||
errorMessage.includes('model is overloaded')
|
||||
) {
|
||||
this.logger.warn(
|
||||
`[AIService Adapter] Model '${modelName}' failed due to quota/rate limit. Trying next model. Error: ${errorMessage}`,
|
||||
);
|
||||
continue; // Try the next model in the list.
|
||||
} else {
|
||||
// For other errors (e.g., invalid input, safety settings), fail immediately.
|
||||
this.logger.error(
|
||||
{ error: lastError },
|
||||
`[AIService Adapter] Model '${modelName}' failed with a non-retriable error.`,
|
||||
);
|
||||
throw lastError;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If all models in the list have failed, throw the last error encountered.
|
||||
this.logger.error(
|
||||
{ lastError },
|
||||
'[AIService Adapter] All AI models failed. Throwing last known error.',
|
||||
);
|
||||
throw lastError || new Error('All AI models failed to generate content.');
|
||||
}
|
||||
|
||||
private async serverFileToGenerativePart(path: string, mimeType: string) {
|
||||
const fileData = await this.fs.readFile(path);
|
||||
return {
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
// src/services/apiClient.ts
|
||||
import { Profile, ShoppingListItem, SearchQuery, Budget, Address } from '../types';
|
||||
import { logger } from './logger.client';
|
||||
import { eventBus } from './eventBus';
|
||||
|
||||
// This constant should point to your backend API.
|
||||
// It's often a good practice to store this in an environment variable.
|
||||
@@ -62,12 +63,12 @@ const refreshToken = async (): Promise<string> => {
|
||||
logger.info('Successfully refreshed access token.');
|
||||
return data.token;
|
||||
} catch (error) {
|
||||
logger.error('Failed to refresh token. User will be logged out.', { error });
|
||||
logger.error({ error }, 'Failed to refresh token. User session has expired.');
|
||||
// Only perform browser-specific actions if in the browser environment.
|
||||
if (typeof window !== 'undefined') {
|
||||
localStorage.removeItem('authToken');
|
||||
// A hard redirect is a simple way to reset the app state to logged-out.
|
||||
// window.location.href = '/'; // Removed to allow the caller to handle session expiry.
|
||||
// Dispatch a global event that the UI layer can listen for to handle session expiry.
|
||||
eventBus.dispatch('sessionExpired');
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
@@ -144,9 +145,8 @@ export const apiFetch = async (
|
||||
// --- DEBUG LOGGING for failed requests ---
|
||||
if (!response.ok) {
|
||||
const responseText = await response.clone().text();
|
||||
logger.error(
|
||||
`apiFetch: Request to ${fullUrl} failed with status ${response.status}. Response body:`,
|
||||
responseText,
|
||||
logger.error({ url: fullUrl, status: response.status, body: responseText },
|
||||
'apiFetch: Request failed',
|
||||
);
|
||||
}
|
||||
// --- END DEBUG LOGGING ---
|
||||
|
||||
@@ -32,7 +32,7 @@ describe('Notification DB Service', () => {
|
||||
});
|
||||
|
||||
describe('getNotificationsForUser', () => {
|
||||
it('should execute the correct query with limit and offset and return notifications', async () => {
|
||||
it('should only return unread notifications by default', async () => {
|
||||
const mockNotifications: Notification[] = [
|
||||
createMockNotification({
|
||||
notification_id: 1,
|
||||
@@ -43,30 +43,59 @@ describe('Notification DB Service', () => {
|
||||
];
|
||||
mockPoolInstance.query.mockResolvedValue({ rows: mockNotifications });
|
||||
|
||||
const result = await notificationRepo.getNotificationsForUser('user-123', 10, 5, mockLogger);
|
||||
const result = await notificationRepo.getNotificationsForUser(
|
||||
'user-123',
|
||||
10,
|
||||
5,
|
||||
false,
|
||||
mockLogger,
|
||||
);
|
||||
|
||||
expect(mockPoolInstance.query).toHaveBeenCalledWith(
|
||||
expect.stringContaining('SELECT * FROM public.notifications'),
|
||||
expect.stringContaining('is_read = false'),
|
||||
['user-123', 10, 5],
|
||||
);
|
||||
expect(result).toEqual(mockNotifications);
|
||||
});
|
||||
|
||||
it('should return all notifications when includeRead is true', async () => {
|
||||
const mockNotifications: Notification[] = [
|
||||
createMockNotification({ is_read: true }),
|
||||
createMockNotification({ is_read: false }),
|
||||
];
|
||||
mockPoolInstance.query.mockResolvedValue({ rows: mockNotifications });
|
||||
|
||||
await notificationRepo.getNotificationsForUser('user-123', 10, 0, true, mockLogger);
|
||||
|
||||
// The query should NOT contain the is_read filter
|
||||
expect(mockPoolInstance.query.mock.calls[0][0]).not.toContain('is_read = false');
|
||||
expect(mockPoolInstance.query).toHaveBeenCalledWith(expect.any(String), ['user-123', 10, 0]);
|
||||
});
|
||||
|
||||
it('should return an empty array if the user has no notifications', async () => {
|
||||
mockPoolInstance.query.mockResolvedValue({ rows: [] });
|
||||
const result = await notificationRepo.getNotificationsForUser('user-456', 10, 0, mockLogger);
|
||||
const result = await notificationRepo.getNotificationsForUser(
|
||||
'user-456',
|
||||
10,
|
||||
0,
|
||||
false,
|
||||
mockLogger,
|
||||
);
|
||||
expect(result).toEqual([]);
|
||||
expect(mockPoolInstance.query).toHaveBeenCalledWith(expect.any(String), ['user-456', 10, 0]);
|
||||
expect(mockPoolInstance.query).toHaveBeenCalledWith(
|
||||
expect.stringContaining('is_read = false'),
|
||||
['user-456', 10, 0],
|
||||
);
|
||||
});
|
||||
|
||||
it('should throw an error if the database query fails', async () => {
|
||||
const dbError = new Error('DB Error');
|
||||
mockPoolInstance.query.mockRejectedValue(dbError);
|
||||
await expect(
|
||||
notificationRepo.getNotificationsForUser('user-123', 10, 5, mockLogger),
|
||||
notificationRepo.getNotificationsForUser('user-123', 10, 5, false, mockLogger),
|
||||
).rejects.toThrow('Failed to retrieve notifications.');
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
{ err: dbError, userId: 'user-123', limit: 10, offset: 5 },
|
||||
{ err: dbError, userId: 'user-123', limit: 10, offset: 5, includeRead: false },
|
||||
'Database error in getNotificationsForUser',
|
||||
);
|
||||
});
|
||||
|
||||
@@ -95,20 +95,24 @@ export class NotificationRepository {
|
||||
userId: string,
|
||||
limit: number,
|
||||
offset: number,
|
||||
includeRead: boolean,
|
||||
logger: Logger,
|
||||
): Promise<Notification[]> {
|
||||
try {
|
||||
const res = await this.db.query<Notification>(
|
||||
`SELECT * FROM public.notifications
|
||||
WHERE user_id = $1
|
||||
ORDER BY created_at DESC
|
||||
LIMIT $2 OFFSET $3`,
|
||||
[userId, limit, offset],
|
||||
);
|
||||
const params: (string | number)[] = [userId, limit, offset];
|
||||
let query = `SELECT * FROM public.notifications WHERE user_id = $1`;
|
||||
|
||||
if (!includeRead) {
|
||||
query += ` AND is_read = false`;
|
||||
}
|
||||
|
||||
query += ` ORDER BY created_at DESC LIMIT $2 OFFSET $3`;
|
||||
|
||||
const res = await this.db.query<Notification>(query, params);
|
||||
return res.rows;
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
{ err: error, userId, limit, offset },
|
||||
{ err: error, userId, limit, offset, includeRead },
|
||||
'Database error in getNotificationsForUser',
|
||||
);
|
||||
throw new Error('Failed to retrieve notifications.');
|
||||
|
||||
53
src/services/db/price.db.ts
Normal file
53
src/services/db/price.db.ts
Normal file
@@ -0,0 +1,53 @@
|
||||
// src/services/db/price.db.ts
|
||||
import type { Logger } from 'pino';
|
||||
import type { PriceHistoryData } from '../../types';
|
||||
import { getPool } from './connection.db';
|
||||
|
||||
/**
|
||||
* Repository for fetching price-related data.
|
||||
*/
|
||||
export const priceRepo = {
|
||||
/**
|
||||
* Fetches the historical price data for a given list of master item IDs.
|
||||
* It retrieves the price in cents and the start date of the flyer for each item.
|
||||
*
|
||||
* @param masterItemIds An array of master grocery item IDs.
|
||||
* @param logger The pino logger instance.
|
||||
* @param limit The maximum number of records to return.
|
||||
* @param offset The number of records to skip.
|
||||
* @returns A promise that resolves to an array of price history data points.
|
||||
*/
|
||||
async getPriceHistory(
|
||||
masterItemIds: number[],
|
||||
logger: Logger,
|
||||
limit: number = 1000,
|
||||
offset: number = 0,
|
||||
): Promise<PriceHistoryData[]> {
|
||||
if (masterItemIds.length === 0) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const query = `
|
||||
SELECT
|
||||
fi.master_item_id,
|
||||
fi.price_in_cents,
|
||||
f.valid_from AS date
|
||||
FROM public.flyer_items fi
|
||||
JOIN public.flyers f ON fi.flyer_id = f.flyer_id
|
||||
WHERE
|
||||
fi.master_item_id = ANY($1::int[])
|
||||
AND f.valid_from IS NOT NULL
|
||||
AND fi.price_in_cents IS NOT NULL
|
||||
ORDER BY
|
||||
fi.master_item_id, f.valid_from ASC
|
||||
LIMIT $2 OFFSET $3;
|
||||
`;
|
||||
|
||||
const result = await getPool().query(query, [masterItemIds, limit, offset]);
|
||||
logger.debug(
|
||||
{ count: result.rows.length, itemIds: masterItemIds.length, limit, offset },
|
||||
'Fetched price history from database.',
|
||||
);
|
||||
return result.rows;
|
||||
},
|
||||
};
|
||||
31
src/services/eventBus.ts
Normal file
31
src/services/eventBus.ts
Normal file
@@ -0,0 +1,31 @@
|
||||
// src/services/eventBus.ts
|
||||
|
||||
/**
|
||||
* A simple, generic event bus for cross-component communication without direct coupling.
|
||||
* This is particularly useful for broadcasting application-wide events, such as session expiry.
|
||||
*/
|
||||
|
||||
type EventCallback = (data?: any) => void;
|
||||
|
||||
class EventBus {
|
||||
private listeners: { [key: string]: EventCallback[] } = {};
|
||||
|
||||
on(event: string, callback: EventCallback): void {
|
||||
if (!this.listeners[event]) {
|
||||
this.listeners[event] = [];
|
||||
}
|
||||
this.listeners[event].push(callback);
|
||||
}
|
||||
|
||||
off(event: string, callback: EventCallback): void {
|
||||
if (!this.listeners[event]) return;
|
||||
this.listeners[event] = this.listeners[event].filter((l) => l !== callback);
|
||||
}
|
||||
|
||||
dispatch(event: string, data?: any): void {
|
||||
if (!this.listeners[event]) return;
|
||||
this.listeners[event].forEach((callback) => callback(data));
|
||||
}
|
||||
}
|
||||
|
||||
export const eventBus = new EventBus();
|
||||
@@ -25,10 +25,7 @@ export class GeocodingService {
|
||||
return JSON.parse(cached);
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
{ err: error instanceof Error ? error.message : error, cacheKey },
|
||||
'Redis GET or JSON.parse command failed. Proceeding without cache.',
|
||||
);
|
||||
logger.error({ err: error, cacheKey }, 'Redis GET or JSON.parse command failed. Proceeding without cache.');
|
||||
}
|
||||
|
||||
if (process.env.GOOGLE_MAPS_API_KEY) {
|
||||
@@ -44,8 +41,8 @@ export class GeocodingService {
|
||||
);
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
{ err: error instanceof Error ? error.message : error },
|
||||
'An error occurred while calling the Google Maps Geocoding API. Falling back to Nominatim.',
|
||||
{ err: error },
|
||||
'An error occurred while calling the Google Maps Geocoding API. Falling back to Nominatim.'
|
||||
);
|
||||
}
|
||||
} else {
|
||||
@@ -72,10 +69,7 @@ export class GeocodingService {
|
||||
try {
|
||||
await redis.set(cacheKey, JSON.stringify(result), 'EX', 60 * 60 * 24 * 30); // Cache for 30 days
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
{ err: error instanceof Error ? error.message : error, cacheKey },
|
||||
'Redis SET command failed. Result will not be cached.',
|
||||
);
|
||||
logger.error({ err: error, cacheKey }, 'Redis SET command failed. Result will not be cached.');
|
||||
}
|
||||
}
|
||||
|
||||
@@ -98,10 +92,7 @@ export class GeocodingService {
|
||||
logger.info(`Successfully deleted ${totalDeleted} geocode cache entries.`);
|
||||
return totalDeleted;
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
{ err: error instanceof Error ? error.message : error },
|
||||
'Failed to clear geocode cache from Redis.',
|
||||
);
|
||||
logger.error({ err: error }, 'Failed to clear geocode cache from Redis.');
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -34,6 +34,9 @@ export const logger = pino({
|
||||
'*.body.password',
|
||||
'*.body.newPassword',
|
||||
'*.body.currentPassword',
|
||||
'*.body.confirmPassword',
|
||||
'*.body.refreshToken',
|
||||
'*.body.token',
|
||||
],
|
||||
censor: '[REDACTED]',
|
||||
},
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
// src/services/queueService.server.ts
|
||||
import { Queue, Worker, Job } from 'bullmq';
|
||||
import { Queue, Worker, Job, UnrecoverableError } from 'bullmq';
|
||||
import IORedis from 'ioredis'; // Correctly imported
|
||||
import fsPromises from 'node:fs/promises';
|
||||
import { exec } from 'child_process';
|
||||
@@ -185,9 +185,26 @@ const attachWorkerEventListeners = (worker: Worker) => {
|
||||
|
||||
export const flyerWorker = new Worker<FlyerJobData>(
|
||||
'flyer-processing', // Must match the queue name
|
||||
(job) => {
|
||||
// The processJob method creates its own job-specific logger internally.
|
||||
return flyerProcessingService.processJob(job);
|
||||
async (job) => {
|
||||
try {
|
||||
// The processJob method creates its own job-specific logger internally.
|
||||
return await flyerProcessingService.processJob(job);
|
||||
} catch (error: any) {
|
||||
// Check for quota errors or other unrecoverable errors from the AI service
|
||||
const errorMessage = error?.message || '';
|
||||
if (
|
||||
errorMessage.includes('quota') ||
|
||||
errorMessage.includes('429') ||
|
||||
errorMessage.includes('RESOURCE_EXHAUSTED')
|
||||
) {
|
||||
logger.error(
|
||||
{ err: error, jobId: job.id },
|
||||
'[FlyerWorker] Unrecoverable quota error detected. Failing job immediately.',
|
||||
);
|
||||
throw new UnrecoverableError(errorMessage);
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
},
|
||||
{
|
||||
connection,
|
||||
@@ -207,13 +224,9 @@ export const emailWorker = new Worker<EmailJobData>(
|
||||
try {
|
||||
await emailService.sendEmail(job.data, jobLogger);
|
||||
} catch (error: unknown) {
|
||||
// Standardize error logging to capture the full error object, including the stack trace.
|
||||
// This provides more context for debugging than just logging the message.
|
||||
logger.error(
|
||||
{
|
||||
// Log the full error object for better diagnostics. // The patch requested this specific error handling.
|
||||
err: error instanceof Error ? error : new Error(String(error)),
|
||||
// Also include the job data for context.
|
||||
err: error,
|
||||
jobData: job.data,
|
||||
},
|
||||
`[EmailWorker] Job ${job.id} failed. Attempt ${job.attemptsMade}/${job.opts.attempts}.`,
|
||||
@@ -249,11 +262,7 @@ export const analyticsWorker = new Worker<AnalyticsJobData>(
|
||||
logger.info(`[AnalyticsWorker] Successfully generated report for ${reportDate}.`);
|
||||
} catch (error: unknown) {
|
||||
// Standardize error logging.
|
||||
logger.error(
|
||||
{
|
||||
err: error instanceof Error ? error : new Error(String(error)),
|
||||
jobData: job.data,
|
||||
},
|
||||
logger.error({ err: error, jobData: job.data },
|
||||
`[AnalyticsWorker] Job ${job.id} failed. Attempt ${job.attemptsMade}/${job.opts.attempts}.`,
|
||||
);
|
||||
throw error; // Re-throw to let BullMQ handle the failure and retry.
|
||||
@@ -315,9 +324,7 @@ export const cleanupWorker = new Worker<CleanupJobData>(
|
||||
} catch (error: unknown) {
|
||||
// Standardize error logging.
|
||||
logger.error(
|
||||
{
|
||||
err: error instanceof Error ? error : new Error(String(error)),
|
||||
},
|
||||
{ err: error },
|
||||
`[CleanupWorker] Job ${job.id} for flyer ${flyerId} failed. Attempt ${job.attemptsMade}/${job.opts.attempts}.`,
|
||||
);
|
||||
throw error; // Re-throw to let BullMQ handle the failure and retry.
|
||||
@@ -350,10 +357,7 @@ export const weeklyAnalyticsWorker = new Worker<WeeklyAnalyticsJobData>(
|
||||
} catch (error: unknown) {
|
||||
// Standardize error logging.
|
||||
logger.error(
|
||||
{
|
||||
err: error instanceof Error ? error : new Error(String(error)),
|
||||
jobData: job.data,
|
||||
},
|
||||
{ err: error, jobData: job.data },
|
||||
`[WeeklyAnalyticsWorker] Job ${job.id} failed. Attempt ${job.attemptsMade}/${job.opts.attempts}.`,
|
||||
);
|
||||
throw error; // Re-throw to let BullMQ handle the failure and retry.
|
||||
|
||||
96
src/tests/e2e/admin-dashboard.e2e.test.ts
Normal file
96
src/tests/e2e/admin-dashboard.e2e.test.ts
Normal file
@@ -0,0 +1,96 @@
|
||||
// src/tests/e2e/admin-dashboard.e2e.test.ts
|
||||
import { describe, it, expect, afterAll } from 'vitest';
|
||||
import supertest from 'supertest';
|
||||
import app from '../../../server';
|
||||
import { getPool } from '../../services/db/connection.db';
|
||||
|
||||
/**
|
||||
* @vitest-environment node
|
||||
*/
|
||||
|
||||
const request = supertest(app);
|
||||
|
||||
describe('E2E Admin Dashboard Flow', () => {
|
||||
// Use a unique email for every run to avoid collisions
|
||||
const uniqueId = Date.now();
|
||||
const adminEmail = `e2e-admin-${uniqueId}@example.com`;
|
||||
const adminPassword = 'StrongPassword123!';
|
||||
|
||||
let authToken: string;
|
||||
let adminUserId: string | null = null;
|
||||
|
||||
afterAll(async () => {
|
||||
// Safety cleanup: Ensure the user is deleted from the DB if the test fails mid-way.
|
||||
if (adminUserId) {
|
||||
try {
|
||||
await getPool().query('DELETE FROM public.users WHERE user_id = $1', [adminUserId]);
|
||||
} catch (err) {
|
||||
console.error('Error cleaning up E2E admin user:', err);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
it('should allow an admin to log in and access dashboard features', async () => {
|
||||
// 1. Register a new user (initially a regular user)
|
||||
const registerResponse = await request.post('/api/auth/register').send({
|
||||
email: adminEmail,
|
||||
password: adminPassword,
|
||||
full_name: 'E2E Admin User',
|
||||
});
|
||||
|
||||
expect(registerResponse.status).toBe(201);
|
||||
const registeredUser = registerResponse.body.userprofile.user;
|
||||
adminUserId = registeredUser.user_id;
|
||||
expect(adminUserId).toBeDefined();
|
||||
|
||||
// 2. Promote the user to 'admin' via direct DB access
|
||||
// (This simulates an existing admin or a manual promotion, as there is no public "register as admin" endpoint)
|
||||
await getPool().query(`UPDATE public.profiles SET role = 'admin' WHERE user_id = $1`, [
|
||||
adminUserId,
|
||||
]);
|
||||
|
||||
// 3. Login to get the access token (now with admin privileges)
|
||||
const loginResponse = await request.post('/api/auth/login').send({
|
||||
email: adminEmail,
|
||||
password: adminPassword,
|
||||
});
|
||||
|
||||
expect(loginResponse.status).toBe(200);
|
||||
authToken = loginResponse.body.token;
|
||||
expect(authToken).toBeDefined();
|
||||
// Verify the role returned in the login response is now 'admin'
|
||||
expect(loginResponse.body.userprofile.role).toBe('admin');
|
||||
|
||||
// 4. Fetch System Stats (Protected Admin Route)
|
||||
const statsResponse = await request
|
||||
.get('/api/admin/stats')
|
||||
.set('Authorization', `Bearer ${authToken}`);
|
||||
|
||||
expect(statsResponse.status).toBe(200);
|
||||
expect(statsResponse.body).toHaveProperty('userCount');
|
||||
expect(statsResponse.body).toHaveProperty('flyerCount');
|
||||
|
||||
// 5. Fetch User List (Protected Admin Route)
|
||||
const usersResponse = await request
|
||||
.get('/api/admin/users')
|
||||
.set('Authorization', `Bearer ${authToken}`);
|
||||
|
||||
expect(usersResponse.status).toBe(200);
|
||||
expect(Array.isArray(usersResponse.body)).toBe(true);
|
||||
// The list should contain the admin user we just created
|
||||
const self = usersResponse.body.find((u: any) => u.user_id === adminUserId);
|
||||
expect(self).toBeDefined();
|
||||
|
||||
// 6. Check Queue Status (Protected Admin Route)
|
||||
const queueResponse = await request
|
||||
.get('/api/admin/queues/status')
|
||||
.set('Authorization', `Bearer ${authToken}`);
|
||||
|
||||
expect(queueResponse.status).toBe(200);
|
||||
expect(Array.isArray(queueResponse.body)).toBe(true);
|
||||
// Verify that the 'flyer-processing' queue is present in the status report
|
||||
const flyerQueue = queueResponse.body.find((q: any) => q.name === 'flyer-processing');
|
||||
expect(flyerQueue).toBeDefined();
|
||||
expect(flyerQueue.counts).toBeDefined();
|
||||
});
|
||||
});
|
||||
110
src/tests/e2e/flyer-upload.e2e.test.ts
Normal file
110
src/tests/e2e/flyer-upload.e2e.test.ts
Normal file
@@ -0,0 +1,110 @@
|
||||
// src/tests/e2e/flyer-upload.e2e.test.ts
|
||||
import { describe, it, expect, afterAll } from 'vitest';
|
||||
import supertest from 'supertest';
|
||||
import app from '../../../server';
|
||||
import { getPool } from '../../services/db/connection.db';
|
||||
import crypto from 'crypto';
|
||||
import path from 'path';
|
||||
import fs from 'fs';
|
||||
|
||||
/**
|
||||
* @vitest-environment node
|
||||
*/
|
||||
|
||||
const request = supertest(app);
|
||||
|
||||
describe('E2E Flyer Upload and Processing Workflow', () => {
|
||||
const uniqueId = Date.now();
|
||||
const userEmail = `e2e-uploader-${uniqueId}@example.com`;
|
||||
const userPassword = 'StrongPassword123!';
|
||||
|
||||
let authToken: string;
|
||||
let userId: string | null = null;
|
||||
let flyerId: number | null = null;
|
||||
|
||||
afterAll(async () => {
|
||||
// Cleanup: Delete the flyer and user created during the test
|
||||
const pool = getPool();
|
||||
if (flyerId) {
|
||||
await pool.query('DELETE FROM public.flyers WHERE flyer_id = $1', [flyerId]);
|
||||
}
|
||||
if (userId) {
|
||||
await pool.query('DELETE FROM public.users WHERE user_id = $1', [userId]);
|
||||
}
|
||||
});
|
||||
|
||||
it('should allow a user to upload a flyer and wait for processing to complete', async () => {
|
||||
// 1. Register a new user
|
||||
const registerResponse = await request.post('/api/auth/register').send({
|
||||
email: userEmail,
|
||||
password: userPassword,
|
||||
full_name: 'E2E Flyer Uploader',
|
||||
});
|
||||
expect(registerResponse.status).toBe(201);
|
||||
|
||||
// 2. Login to get the access token
|
||||
const loginResponse = await request.post('/api/auth/login').send({
|
||||
email: userEmail,
|
||||
password: userPassword,
|
||||
});
|
||||
expect(loginResponse.status).toBe(200);
|
||||
authToken = loginResponse.body.token;
|
||||
userId = loginResponse.body.userprofile.user.user_id;
|
||||
expect(authToken).toBeDefined();
|
||||
|
||||
// 3. Prepare the flyer file
|
||||
// We try to use the existing test asset if available, otherwise create a dummy buffer.
|
||||
// Note: In a real E2E scenario against a live AI service, a valid image is required.
|
||||
// If the AI service is mocked or stubbed in this environment, a dummy buffer might suffice.
|
||||
let fileBuffer: Buffer;
|
||||
let fileName = `e2e-test-flyer-${uniqueId}.jpg`;
|
||||
|
||||
const assetPath = path.resolve(__dirname, '../assets/test-flyer-image.jpg');
|
||||
if (fs.existsSync(assetPath)) {
|
||||
const rawBuffer = fs.readFileSync(assetPath);
|
||||
// Append unique ID to ensure unique checksum for every test run
|
||||
fileBuffer = Buffer.concat([rawBuffer, Buffer.from(uniqueId.toString())]);
|
||||
} else {
|
||||
// Fallback to a minimal valid JPEG header + random data if asset is missing
|
||||
// (This might fail if the backend does strict image validation/processing)
|
||||
fileBuffer = Buffer.concat([
|
||||
Buffer.from([0xff, 0xd8, 0xff, 0xe0]), // JPEG Start of Image
|
||||
Buffer.from(uniqueId.toString())
|
||||
]);
|
||||
}
|
||||
|
||||
// Calculate checksum (required by the API)
|
||||
const checksum = crypto.createHash('sha256').update(fileBuffer).digest('hex');
|
||||
|
||||
// 4. Upload the flyer
|
||||
const uploadResponse = await request
|
||||
.post('/api/ai/upload-and-process')
|
||||
.set('Authorization', `Bearer ${authToken}`)
|
||||
.field('checksum', checksum)
|
||||
.attach('flyerFile', fileBuffer, fileName);
|
||||
|
||||
expect(uploadResponse.status).toBe(202);
|
||||
const jobId = uploadResponse.body.jobId;
|
||||
expect(jobId).toBeDefined();
|
||||
|
||||
// 5. Poll for job completion
|
||||
let jobStatus;
|
||||
const maxRetries = 30; // Poll for up to 90 seconds
|
||||
for (let i = 0; i < maxRetries; i++) {
|
||||
await new Promise((resolve) => setTimeout(resolve, 3000)); // Wait 3s
|
||||
|
||||
const statusResponse = await request
|
||||
.get(`/api/ai/jobs/${jobId}/status`)
|
||||
.set('Authorization', `Bearer ${authToken}`);
|
||||
|
||||
jobStatus = statusResponse.body;
|
||||
if (jobStatus.state === 'completed' || jobStatus.state === 'failed') {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
expect(jobStatus.state).toBe('completed');
|
||||
flyerId = jobStatus.returnValue?.flyerId;
|
||||
expect(flyerId).toBeTypeOf('number');
|
||||
}, 120000); // Extended timeout for AI processing
|
||||
});
|
||||
111
src/tests/e2e/user-journey.e2e.test.ts
Normal file
111
src/tests/e2e/user-journey.e2e.test.ts
Normal file
@@ -0,0 +1,111 @@
|
||||
// src/tests/e2e/user-journey.e2e.test.ts
|
||||
import { describe, it, expect, afterAll } from 'vitest';
|
||||
import supertest from 'supertest';
|
||||
import app from '../../../server';
|
||||
import { getPool } from '../../services/db/connection.db';
|
||||
|
||||
/**
|
||||
* @vitest-environment node
|
||||
*/
|
||||
|
||||
const request = supertest(app);
|
||||
|
||||
describe('E2E User Journey', () => {
|
||||
// Use a unique email for every run to avoid collisions
|
||||
const uniqueId = Date.now();
|
||||
const userEmail = `e2e-test-${uniqueId}@example.com`;
|
||||
const userPassword = 'StrongPassword123!';
|
||||
|
||||
let authToken: string;
|
||||
let userId: string | null = null;
|
||||
let shoppingListId: number;
|
||||
|
||||
afterAll(async () => {
|
||||
// Safety cleanup: Ensure the user is deleted from the DB if the test fails mid-way.
|
||||
// If the test succeeds, the user deletes their own account, so this acts as a fallback.
|
||||
if (userId) {
|
||||
try {
|
||||
await getPool().query('DELETE FROM public.users WHERE user_id = $1', [userId]);
|
||||
} catch (err) {
|
||||
console.error('Error cleaning up E2E test user:', err);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
it('should complete a full user lifecycle: Register -> Login -> Manage List -> Delete Account', async () => {
|
||||
// 1. Register a new user
|
||||
const registerResponse = await request.post('/api/auth/register').send({
|
||||
email: userEmail,
|
||||
password: userPassword,
|
||||
full_name: 'E2E Traveler',
|
||||
});
|
||||
|
||||
expect(registerResponse.status).toBe(201);
|
||||
expect(registerResponse.body.message).toBe('User registered successfully!');
|
||||
|
||||
// 2. Login to get the access token
|
||||
const loginResponse = await request.post('/api/auth/login').send({
|
||||
email: userEmail,
|
||||
password: userPassword,
|
||||
});
|
||||
|
||||
expect(loginResponse.status).toBe(200);
|
||||
authToken = loginResponse.body.token;
|
||||
userId = loginResponse.body.userprofile.user.user_id;
|
||||
|
||||
expect(authToken).toBeDefined();
|
||||
expect(userId).toBeDefined();
|
||||
|
||||
// 3. Create a Shopping List
|
||||
const createListResponse = await request
|
||||
.post('/api/users/shopping-lists')
|
||||
.set('Authorization', `Bearer ${authToken}`)
|
||||
.send({ name: 'E2E Party List' });
|
||||
|
||||
expect(createListResponse.status).toBe(201);
|
||||
shoppingListId = createListResponse.body.shopping_list_id;
|
||||
expect(shoppingListId).toBeDefined();
|
||||
|
||||
// 4. Add an item to the list
|
||||
const addItemResponse = await request
|
||||
.post(`/api/users/shopping-lists/${shoppingListId}/items`)
|
||||
.set('Authorization', `Bearer ${authToken}`)
|
||||
.send({ customItemName: 'Chips' });
|
||||
|
||||
expect(addItemResponse.status).toBe(201);
|
||||
expect(addItemResponse.body.custom_item_name).toBe('Chips');
|
||||
|
||||
// 5. Verify the list and item exist via GET
|
||||
const getListsResponse = await request
|
||||
.get('/api/users/shopping-lists')
|
||||
.set('Authorization', `Bearer ${authToken}`);
|
||||
|
||||
expect(getListsResponse.status).toBe(200);
|
||||
const myLists = getListsResponse.body;
|
||||
const targetList = myLists.find((l: any) => l.shopping_list_id === shoppingListId);
|
||||
|
||||
expect(targetList).toBeDefined();
|
||||
expect(targetList.items).toHaveLength(1);
|
||||
expect(targetList.items[0].custom_item_name).toBe('Chips');
|
||||
|
||||
// 6. Delete the User Account (Self-Service)
|
||||
const deleteAccountResponse = await request
|
||||
.delete('/api/users/account')
|
||||
.set('Authorization', `Bearer ${authToken}`)
|
||||
.send({ password: userPassword });
|
||||
|
||||
expect(deleteAccountResponse.status).toBe(200);
|
||||
expect(deleteAccountResponse.body.message).toBe('Account deleted successfully.');
|
||||
|
||||
// 7. Verify Login is no longer possible
|
||||
const failLoginResponse = await request.post('/api/auth/login').send({
|
||||
email: userEmail,
|
||||
password: userPassword,
|
||||
});
|
||||
|
||||
expect(failLoginResponse.status).toBe(401);
|
||||
|
||||
// Mark userId as null so afterAll doesn't attempt to delete it again
|
||||
userId = null;
|
||||
});
|
||||
});
|
||||
@@ -86,7 +86,7 @@ describe('Flyer Processing Background Job Integration Test', () => {
|
||||
|
||||
// Act 2: Poll for the job status until it completes.
|
||||
let jobStatus;
|
||||
const maxRetries = 20; // Poll for up to 60 seconds (20 * 3s)
|
||||
const maxRetries = 30; // Poll for up to 90 seconds (30 * 3s)
|
||||
for (let i = 0; i < maxRetries; i++) {
|
||||
await new Promise((resolve) => setTimeout(resolve, 3000)); // Wait 3 seconds between polls
|
||||
const statusReq = request.get(`/api/ai/jobs/${jobId}/status`);
|
||||
|
||||
141
src/tests/integration/price.integration.test.ts
Normal file
141
src/tests/integration/price.integration.test.ts
Normal file
@@ -0,0 +1,141 @@
|
||||
// src/tests/integration/price.integration.test.ts
|
||||
import { describe, it, expect, beforeAll, afterAll } from 'vitest';
|
||||
import supertest from 'supertest';
|
||||
import app from '../../../server';
|
||||
import { getPool } from '../../services/db/connection.db';
|
||||
|
||||
/**
|
||||
* @vitest-environment node
|
||||
*/
|
||||
|
||||
const request = supertest(app);
|
||||
|
||||
describe('Price History API Integration Test (/api/price-history)', () => {
|
||||
let masterItemId: number;
|
||||
let storeId: number;
|
||||
let flyerId1: number;
|
||||
let flyerId2: number;
|
||||
let flyerId3: number;
|
||||
|
||||
beforeAll(async () => {
|
||||
const pool = getPool();
|
||||
|
||||
// 1. Create a master grocery item
|
||||
const masterItemRes = await pool.query(
|
||||
`INSERT INTO public.master_grocery_items (name, category_id) VALUES ('Integration Test Apples', (SELECT category_id FROM categories WHERE name = 'Fruits & Vegetables' LIMIT 1)) RETURNING master_grocery_item_id`,
|
||||
);
|
||||
masterItemId = masterItemRes.rows[0].master_grocery_item_id;
|
||||
|
||||
// 2. Create a store
|
||||
const storeRes = await pool.query(
|
||||
`INSERT INTO public.stores (name) VALUES ('Integration Price Test Store') RETURNING store_id`,
|
||||
);
|
||||
storeId = storeRes.rows[0].store_id;
|
||||
|
||||
// 3. Create two flyers with different dates
|
||||
const flyerRes1 = await pool.query(
|
||||
`INSERT INTO public.flyers (store_id, file_name, image_url, item_count, checksum, valid_from)
|
||||
VALUES ($1, 'price-test-1.jpg', 'http://test.com/price-1.jpg', 1, $2, '2025-01-01') RETURNING flyer_id`,
|
||||
[storeId, `checksum-price-1-${Date.now()}`],
|
||||
);
|
||||
flyerId1 = flyerRes1.rows[0].flyer_id;
|
||||
|
||||
const flyerRes2 = await pool.query(
|
||||
`INSERT INTO public.flyers (store_id, file_name, image_url, item_count, checksum, valid_from)
|
||||
VALUES ($1, 'price-test-2.jpg', 'http://test.com/price-2.jpg', 1, $2, '2025-01-08') RETURNING flyer_id`,
|
||||
[storeId, `checksum-price-2-${Date.now()}`],
|
||||
);
|
||||
flyerId2 = flyerRes2.rows[0].flyer_id; // This was a duplicate, fixed.
|
||||
|
||||
const flyerRes3 = await pool.query(
|
||||
`INSERT INTO public.flyers (store_id, file_name, image_url, item_count, checksum, valid_from)
|
||||
VALUES ($1, 'price-test-3.jpg', 'http://test.com/price-3.jpg', 1, $2, '2025-01-15') RETURNING flyer_id`,
|
||||
[storeId, `checksum-price-3-${Date.now()}`],
|
||||
);
|
||||
flyerId3 = flyerRes3.rows[0].flyer_id;
|
||||
|
||||
// 4. Create flyer items linking the master item to the flyers with prices
|
||||
await pool.query(
|
||||
`INSERT INTO public.flyer_items (flyer_id, master_item_id, item, price_in_cents, price_display, quantity) VALUES ($1, $2, 'Apples', 199, '$1.99', '1')`,
|
||||
[flyerId1, masterItemId],
|
||||
);
|
||||
await pool.query(
|
||||
`INSERT INTO public.flyer_items (flyer_id, master_item_id, item, price_in_cents, price_display, quantity) VALUES ($1, $2, 'Apples', 249, '$2.49', '1')`,
|
||||
[flyerId2, masterItemId],
|
||||
);
|
||||
await pool.query(
|
||||
`INSERT INTO public.flyer_items (flyer_id, master_item_id, item, price_in_cents, price_display, quantity) VALUES ($1, $2, 'Apples', 299, '$2.99', '1')`,
|
||||
[flyerId3, masterItemId],
|
||||
);
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
const pool = getPool();
|
||||
// The CASCADE on the tables should handle flyer_items.
|
||||
// We just need to delete the flyers, store, and master item.
|
||||
const flyerIds = [flyerId1, flyerId2, flyerId3].filter(Boolean);
|
||||
if (flyerIds.length > 0) {
|
||||
await pool.query('DELETE FROM public.flyers WHERE flyer_id = ANY($1::int[])', [flyerIds]);
|
||||
}
|
||||
if (storeId) await pool.query('DELETE FROM public.stores WHERE store_id = $1', [storeId]);
|
||||
if (masterItemId)
|
||||
await pool.query('DELETE FROM public.master_grocery_items WHERE master_grocery_item_id = $1', [
|
||||
masterItemId,
|
||||
]);
|
||||
});
|
||||
|
||||
it('should return the correct price history for a given master item ID', async () => {
|
||||
const response = await request.post('/api/price-history').send({ masterItemIds: [masterItemId] });
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body).toBeInstanceOf(Array);
|
||||
expect(response.body).toHaveLength(3);
|
||||
|
||||
expect(response.body[0]).toMatchObject({ master_item_id: masterItemId, price_in_cents: 199 });
|
||||
expect(response.body[1]).toMatchObject({ master_item_id: masterItemId, price_in_cents: 249 });
|
||||
expect(response.body[2]).toMatchObject({ master_item_id: masterItemId, price_in_cents: 299 });
|
||||
});
|
||||
|
||||
it('should respect the limit parameter', async () => {
|
||||
const response = await request
|
||||
.post('/api/price-history')
|
||||
.send({ masterItemIds: [masterItemId], limit: 2 });
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body).toHaveLength(2);
|
||||
expect(response.body[0].price_in_cents).toBe(199);
|
||||
expect(response.body[1].price_in_cents).toBe(249);
|
||||
});
|
||||
|
||||
it('should respect the offset parameter', async () => {
|
||||
const response = await request
|
||||
.post('/api/price-history')
|
||||
.send({ masterItemIds: [masterItemId], limit: 2, offset: 1 });
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body).toHaveLength(2);
|
||||
expect(response.body[0].price_in_cents).toBe(249);
|
||||
expect(response.body[1].price_in_cents).toBe(299);
|
||||
});
|
||||
|
||||
it('should return price history sorted by date in ascending order', async () => {
|
||||
const response = await request.post('/api/price-history').send({ masterItemIds: [masterItemId] });
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
const history = response.body;
|
||||
expect(history).toHaveLength(3);
|
||||
|
||||
const date1 = new Date(history[0].date).getTime();
|
||||
const date2 = new Date(history[1].date).getTime();
|
||||
const date3 = new Date(history[2].date).getTime();
|
||||
|
||||
expect(date1).toBeLessThan(date2);
|
||||
expect(date2).toBeLessThan(date3);
|
||||
});
|
||||
|
||||
it('should return an empty array for a master item ID with no price history', async () => {
|
||||
const response = await request.post('/api/price-history').send({ masterItemIds: [999999] });
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body).toEqual([]);
|
||||
});
|
||||
});
|
||||
@@ -955,3 +955,9 @@ export interface AdminUserView {
|
||||
full_name: string | null;
|
||||
avatar_url: string | null;
|
||||
}
|
||||
|
||||
export interface PriceHistoryData {
|
||||
master_item_id: number;
|
||||
price_in_cents: number;
|
||||
date: string; // ISO date string
|
||||
}
|
||||
|
||||
20
src/utils/authUtils.ts
Normal file
20
src/utils/authUtils.ts
Normal file
@@ -0,0 +1,20 @@
|
||||
// src/utils/authUtils.ts
|
||||
import zxcvbn from 'zxcvbn';
|
||||
|
||||
/**
|
||||
* Validates the strength of a password using zxcvbn.
|
||||
* @param password The password to validate.
|
||||
* @returns An object with `isValid` and a feedback message.
|
||||
*/
|
||||
export function validatePasswordStrength(password: string): {
|
||||
isValid: boolean;
|
||||
feedback: string;
|
||||
} {
|
||||
const result = zxcvbn(password);
|
||||
// Score: 0-4. We require at least 3.
|
||||
if (result.score < 3) {
|
||||
const suggestions = result.feedback.suggestions.join(' ');
|
||||
return { isValid: false, feedback: `Password is too weak. ${suggestions}` };
|
||||
}
|
||||
return { isValid: true, feedback: '' };
|
||||
}
|
||||
387
src/utils/zodUtils.test.ts
Normal file
387
src/utils/zodUtils.test.ts
Normal file
@@ -0,0 +1,387 @@
|
||||
// src/utils/zodUtils.test.ts
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import {
|
||||
requiredString,
|
||||
numericIdParam,
|
||||
uuidParamSchema,
|
||||
optionalBoolean,
|
||||
optionalNumeric,
|
||||
optionalDate,
|
||||
} from './zodUtils';
|
||||
|
||||
describe('Zod Utilities', () => {
|
||||
describe('requiredString', () => {
|
||||
const customMessage = 'This field is required and cannot be empty.';
|
||||
const schema = requiredString(customMessage);
|
||||
|
||||
it('should pass for a valid non-empty string', () => {
|
||||
const result = schema.safeParse('hello world');
|
||||
expect(result.success).toBe(true);
|
||||
if (result.success) {
|
||||
expect(result.data).toBe('hello world');
|
||||
}
|
||||
});
|
||||
|
||||
it('should fail for an empty string with the custom message', () => {
|
||||
const result = schema.safeParse('');
|
||||
expect(result.success).toBe(false);
|
||||
if (!result.success) {
|
||||
expect(result.error.issues[0].message).toBe(customMessage);
|
||||
}
|
||||
});
|
||||
|
||||
it('should fail for a null value with the custom message', () => {
|
||||
const result = schema.safeParse(null);
|
||||
expect(result.success).toBe(false);
|
||||
if (!result.success) {
|
||||
expect(result.error.issues[0].message).toBe(customMessage);
|
||||
}
|
||||
});
|
||||
|
||||
it('should fail for an undefined value with the custom message', () => {
|
||||
const result = schema.safeParse(undefined);
|
||||
expect(result.success).toBe(false);
|
||||
if (!result.success) {
|
||||
expect(result.error.issues[0].message).toBe(customMessage);
|
||||
}
|
||||
});
|
||||
|
||||
it('should pass for a string containing only whitespace', () => {
|
||||
const result = schema.safeParse(' ');
|
||||
expect(result.success).toBe(true);
|
||||
if (result.success) {
|
||||
expect(result.data).toBe(' ');
|
||||
}
|
||||
});
|
||||
|
||||
it('should fail for a non-string value like a number with a Zod type error', () => {
|
||||
const result = schema.safeParse(123);
|
||||
expect(result.success).toBe(false);
|
||||
if (!result.success) {
|
||||
// z.string() will throw its own error message before min(1) is checked.
|
||||
expect(result.error.issues[0].message).toBe('Invalid input: expected string, received number');
|
||||
}
|
||||
});
|
||||
|
||||
it('should fail for a non-string value like an object with a Zod type error', () => {
|
||||
const result = schema.safeParse({ a: 1 });
|
||||
expect(result.success).toBe(false);
|
||||
if (!result.success) {
|
||||
expect(result.error.issues[0].message).toBe('Invalid input: expected string, received object');
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('numericIdParam', () => {
|
||||
const schema = numericIdParam('id');
|
||||
|
||||
it('should pass for a valid numeric string in params', () => {
|
||||
const result = schema.safeParse({ params: { id: '123' } });
|
||||
expect(result.success).toBe(true);
|
||||
if (result.success) {
|
||||
expect(result.data.params.id).toBe(123);
|
||||
}
|
||||
});
|
||||
|
||||
it('should pass for a valid number in params', () => {
|
||||
const result = schema.safeParse({ params: { id: 456 } });
|
||||
expect(result.success).toBe(true);
|
||||
if (result.success) {
|
||||
expect(result.data.params.id).toBe(456);
|
||||
}
|
||||
});
|
||||
|
||||
it('should fail for a non-numeric string', () => {
|
||||
const result = schema.safeParse({ params: { id: 'abc' } });
|
||||
expect(result.success).toBe(false);
|
||||
if (!result.success) {
|
||||
expect(result.error.issues[0].message).toBe('Invalid input: expected number, received NaN');
|
||||
}
|
||||
});
|
||||
|
||||
it('should fail for a negative number', () => {
|
||||
const result = schema.safeParse({ params: { id: -1 } });
|
||||
expect(result.success).toBe(false);
|
||||
if (!result.success) {
|
||||
expect(result.error.issues[0].message).toBe("Invalid ID for parameter 'id'. Must be a number.");
|
||||
}
|
||||
});
|
||||
|
||||
it('should fail for a floating point number', () => {
|
||||
const result = schema.safeParse({ params: { id: 1.5 } });
|
||||
expect(result.success).toBe(false);
|
||||
if (!result.success) {
|
||||
expect(result.error.issues[0].message).toBe("Invalid ID for parameter 'id'. Must be a number.");
|
||||
}
|
||||
});
|
||||
|
||||
it('should fail for zero', () => {
|
||||
const result = schema.safeParse({ params: { id: 0 } });
|
||||
expect(result.success).toBe(false);
|
||||
if (!result.success) {
|
||||
expect(result.error.issues[0].message).toBe("Invalid ID for parameter 'id'. Must be a number.");
|
||||
}
|
||||
});
|
||||
|
||||
it('should use a custom error message if provided', () => {
|
||||
const customMessage = 'A valid numeric ID is required.';
|
||||
const customSchema = numericIdParam('id', customMessage);
|
||||
const result = customSchema.safeParse({ params: { id: -5 } });
|
||||
expect(result.success).toBe(false);
|
||||
if (!result.success) {
|
||||
expect(result.error.issues[0].message).toBe(customMessage);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('uuidParamSchema', () => {
|
||||
const customMessage = 'A valid UUID is required for the user ID.';
|
||||
const schema = uuidParamSchema('userId', customMessage);
|
||||
|
||||
it('should pass for a valid UUID string', () => {
|
||||
const validUuid = '123e4567-e89b-12d3-a456-426614174000';
|
||||
const result = schema.safeParse({ params: { userId: validUuid } });
|
||||
expect(result.success).toBe(true);
|
||||
});
|
||||
|
||||
it('should fail for an invalid UUID string', () => {
|
||||
const invalidUuid = 'not-a-uuid';
|
||||
const result = schema.safeParse({ params: { userId: invalidUuid } });
|
||||
expect(result.success).toBe(false);
|
||||
if (!result.success) {
|
||||
expect(result.error.issues[0].message).toBe(customMessage);
|
||||
}
|
||||
});
|
||||
|
||||
it('should fail for a non-string value', () => {
|
||||
const result = schema.safeParse({ params: { userId: 12345 } });
|
||||
expect(result.success).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('optionalNumeric', () => {
|
||||
it('should return the default value if input is undefined', () => {
|
||||
const schema = optionalNumeric({ default: 10 });
|
||||
const result = schema.safeParse(undefined);
|
||||
expect(result.success).toBe(true);
|
||||
if (result.success) {
|
||||
expect(result.data).toBe(10);
|
||||
}
|
||||
});
|
||||
|
||||
it('should parse a valid numeric string', () => {
|
||||
const schema = optionalNumeric();
|
||||
const result = schema.safeParse('123.45');
|
||||
expect(result.success).toBe(true);
|
||||
if (result.success) {
|
||||
expect(result.data).toBe(123.45);
|
||||
}
|
||||
});
|
||||
|
||||
it('should parse an empty string as 0', () => {
|
||||
const schema = optionalNumeric();
|
||||
const result = schema.safeParse('');
|
||||
expect(result.success).toBe(true);
|
||||
if (result.success) {
|
||||
expect(result.data).toBe(0);
|
||||
}
|
||||
});
|
||||
|
||||
it('should parse a whitespace string as 0', () => {
|
||||
const schema = optionalNumeric();
|
||||
const result = schema.safeParse(' ');
|
||||
expect(result.success).toBe(true);
|
||||
if (result.success) {
|
||||
expect(result.data).toBe(0);
|
||||
}
|
||||
});
|
||||
|
||||
it('should treat null as undefined, returning default value or undefined', () => {
|
||||
const schemaWithDefault = optionalNumeric({ default: 99 });
|
||||
const resultWithDefault = schemaWithDefault.safeParse(null);
|
||||
expect(resultWithDefault.success).toBe(true);
|
||||
if (resultWithDefault.success) {
|
||||
expect(resultWithDefault.data).toBe(99);
|
||||
}
|
||||
|
||||
const schemaWithoutDefault = optionalNumeric();
|
||||
const resultWithoutDefault = schemaWithoutDefault.safeParse(null);
|
||||
expect(resultWithoutDefault.success).toBe(true);
|
||||
if (resultWithoutDefault.success) {
|
||||
expect(resultWithoutDefault.data).toBeUndefined();
|
||||
}
|
||||
});
|
||||
|
||||
it('should fail for a non-numeric string', () => {
|
||||
const schema = optionalNumeric();
|
||||
const result = schema.safeParse('abc');
|
||||
expect(result.success).toBe(false);
|
||||
});
|
||||
|
||||
it('should enforce integer constraint', () => {
|
||||
const schema = optionalNumeric({ integer: true });
|
||||
expect(schema.safeParse('123').success).toBe(true);
|
||||
const floatResult = schema.safeParse('123.45');
|
||||
expect(floatResult.success).toBe(false);
|
||||
if (!floatResult.success) {
|
||||
expect(floatResult.error.issues[0].message).toBe('Invalid input: expected int, received number');
|
||||
}
|
||||
});
|
||||
|
||||
it('should enforce positive constraint', () => {
|
||||
const schema = optionalNumeric({ positive: true });
|
||||
expect(schema.safeParse('1').success).toBe(true);
|
||||
const zeroResult = schema.safeParse('0');
|
||||
expect(zeroResult.success).toBe(false);
|
||||
if (!zeroResult.success) {
|
||||
expect(zeroResult.error.issues[0].message).toBe('Too small: expected number to be >0');
|
||||
}
|
||||
});
|
||||
|
||||
it('should enforce non-negative constraint', () => {
|
||||
const schema = optionalNumeric({ nonnegative: true });
|
||||
expect(schema.safeParse('0').success).toBe(true);
|
||||
const negativeResult = schema.safeParse('-1');
|
||||
expect(negativeResult.success).toBe(false);
|
||||
if (!negativeResult.success) {
|
||||
expect(negativeResult.error.issues[0].message).toBe('Too small: expected number to be >=0');
|
||||
}
|
||||
});
|
||||
|
||||
it('should enforce min and max constraints', () => {
|
||||
const schema = optionalNumeric({ min: 10, max: 20 });
|
||||
expect(schema.safeParse('15').success).toBe(true);
|
||||
const tooSmallResult = schema.safeParse('9');
|
||||
expect(tooSmallResult.success).toBe(false);
|
||||
if (!tooSmallResult.success) {
|
||||
expect(tooSmallResult.error.issues[0].message).toBe('Too small: expected number to be >=10');
|
||||
}
|
||||
const tooLargeResult = schema.safeParse('21');
|
||||
expect(tooLargeResult.success).toBe(false);
|
||||
if (!tooLargeResult.success) {
|
||||
expect(tooLargeResult.error.issues[0].message).toBe('Too big: expected number to be <=20');
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('optionalDate', () => {
|
||||
const schema = optionalDate('Invalid date format');
|
||||
|
||||
it('should pass for a valid YYYY-MM-DD date string', () => {
|
||||
const result = schema.safeParse('2023-12-25');
|
||||
expect(result.success).toBe(true);
|
||||
if (result.success) {
|
||||
expect(result.data).toBe('2023-12-25');
|
||||
}
|
||||
});
|
||||
|
||||
it('should pass for undefined (optional)', () => {
|
||||
expect(schema.safeParse(undefined).success).toBe(true);
|
||||
});
|
||||
|
||||
it('should fail for an invalid date string', () => {
|
||||
expect(schema.safeParse('not-a-date').success).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('optionalBoolean', () => {
|
||||
it('should return the default value if input is undefined', () => {
|
||||
const schema = optionalBoolean({ default: true });
|
||||
const result = schema.safeParse(undefined);
|
||||
expect(result.success).toBe(true);
|
||||
if (result.success) {
|
||||
expect(result.data).toBe(true);
|
||||
}
|
||||
});
|
||||
|
||||
it('should return undefined if input is undefined and no default is set', () => {
|
||||
const schema = optionalBoolean();
|
||||
const result = schema.safeParse(undefined);
|
||||
expect(result.success).toBe(true);
|
||||
if (result.success) {
|
||||
expect(result.data).toBeUndefined();
|
||||
}
|
||||
});
|
||||
|
||||
it('should parse "true" string as true', () => {
|
||||
const schema = optionalBoolean();
|
||||
const result = schema.safeParse('true');
|
||||
expect(result.success).toBe(true);
|
||||
if (result.success) {
|
||||
expect(result.data).toBe(true);
|
||||
}
|
||||
});
|
||||
|
||||
it('should parse "false" string as false', () => {
|
||||
const schema = optionalBoolean();
|
||||
const result = schema.safeParse('false');
|
||||
expect(result.success).toBe(true);
|
||||
if (result.success) {
|
||||
expect(result.data).toBe(false);
|
||||
}
|
||||
});
|
||||
|
||||
it('should parse "1" as true', () => {
|
||||
const schema = optionalBoolean();
|
||||
const result = schema.safeParse('1');
|
||||
expect(result.success).toBe(true);
|
||||
if (result.success) {
|
||||
expect(result.data).toBe(true);
|
||||
}
|
||||
});
|
||||
|
||||
it('should parse "0" as false', () => {
|
||||
const schema = optionalBoolean();
|
||||
const result = schema.safeParse('0');
|
||||
expect(result.success).toBe(true);
|
||||
if (result.success) {
|
||||
expect(result.data).toBe(false);
|
||||
}
|
||||
});
|
||||
|
||||
it('should fail for other strings', () => {
|
||||
const schema = optionalBoolean();
|
||||
const result = schema.safeParse('not-a-boolean');
|
||||
expect(result.success).toBe(false);
|
||||
});
|
||||
|
||||
it('should handle null input, returning default or undefined', () => {
|
||||
const schemaWithDefault = optionalBoolean({ default: false });
|
||||
const resultWithDefault = schemaWithDefault.safeParse(null);
|
||||
expect(resultWithDefault.success).toBe(true);
|
||||
if (resultWithDefault.success) {
|
||||
expect(resultWithDefault.data).toBe(false);
|
||||
}
|
||||
|
||||
const schemaWithoutDefault = optionalBoolean();
|
||||
const resultWithoutDefault = schemaWithoutDefault.safeParse(null);
|
||||
expect(resultWithoutDefault.success).toBe(true);
|
||||
if (resultWithoutDefault.success) {
|
||||
expect(resultWithoutDefault.data).toBeUndefined();
|
||||
}
|
||||
});
|
||||
|
||||
it('should handle empty string input, returning default or undefined', () => {
|
||||
const schemaWithDefault = optionalBoolean({ default: true });
|
||||
const resultWithDefault = schemaWithDefault.safeParse('');
|
||||
expect(resultWithDefault.success).toBe(true);
|
||||
if (resultWithDefault.success) {
|
||||
expect(resultWithDefault.data).toBe(true);
|
||||
}
|
||||
|
||||
const schemaWithoutDefault = optionalBoolean();
|
||||
const resultWithoutDefault = schemaWithoutDefault.safeParse('');
|
||||
expect(resultWithoutDefault.success).toBe(true);
|
||||
if (resultWithoutDefault.success) {
|
||||
expect(resultWithoutDefault.data).toBeUndefined();
|
||||
}
|
||||
});
|
||||
|
||||
it('should pass for an actual boolean value', () => {
|
||||
const schema = optionalBoolean();
|
||||
expect(schema.safeParse(true).success).toBe(true);
|
||||
expect(schema.safeParse(false).success).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
});
|
||||
116
src/utils/zodUtils.ts
Normal file
116
src/utils/zodUtils.ts
Normal file
@@ -0,0 +1,116 @@
|
||||
// src/utils/zodUtils.ts
|
||||
import { z } from 'zod';
|
||||
|
||||
/**
|
||||
* A Zod schema for a required, non-empty string.
|
||||
* @param message The error message to display if the string is empty or missing.
|
||||
* @returns A Zod string schema.
|
||||
*/
|
||||
export const requiredString = (message: string) =>
|
||||
z.preprocess(
|
||||
// If the value is null or undefined, preprocess it to an empty string.
|
||||
// This ensures that the subsequent `.min(1)` check will catch missing required fields.
|
||||
(val) => val ?? '',
|
||||
// Now, validate that the (potentially preprocessed) value is a string with at least 1 character.
|
||||
z.string().min(1, message),
|
||||
);
|
||||
|
||||
/**
|
||||
* Creates a Zod schema for a numeric ID in request parameters.
|
||||
* @param paramName The name of the parameter (e.g., 'id').
|
||||
* @param message The error message for invalid input.
|
||||
* @returns A Zod object schema for the params.
|
||||
*/
|
||||
export const numericIdParam = (
|
||||
paramName: string,
|
||||
message = `Invalid ID for parameter '${paramName}'. Must be a number.`,
|
||||
) =>
|
||||
z.object({
|
||||
params: z.object({
|
||||
[paramName]: z.coerce.number().int(message).positive(message),
|
||||
}),
|
||||
});
|
||||
|
||||
/**
|
||||
* Creates a Zod schema for a UUID in request parameters.
|
||||
* @param paramName The name of the parameter (e.g., 'id').
|
||||
* @param message The error message for invalid input.
|
||||
* @returns A Zod object schema for the params.
|
||||
*/
|
||||
export const uuidParamSchema = (paramName: string, message: string) =>
|
||||
z.object({
|
||||
params: z.object({
|
||||
[paramName]: z.string().uuid(message),
|
||||
}),
|
||||
});
|
||||
|
||||
/**
|
||||
* Creates a Zod schema for an optional, numeric query parameter that is coerced from a string.
|
||||
* @param options Configuration for the validation like default value, min/max, and integer constraints.
|
||||
* @returns A Zod schema for the number.
|
||||
*/
|
||||
export const optionalNumeric = (
|
||||
options: {
|
||||
default?: number;
|
||||
min?: number;
|
||||
max?: number;
|
||||
integer?: boolean;
|
||||
positive?: boolean;
|
||||
nonnegative?: boolean;
|
||||
} = {},
|
||||
) => {
|
||||
let numberSchema = z.coerce.number();
|
||||
|
||||
if (options.integer) numberSchema = numberSchema.int();
|
||||
if (options.positive) numberSchema = numberSchema.positive();
|
||||
else if (options.nonnegative) numberSchema = numberSchema.nonnegative();
|
||||
|
||||
if (options.min !== undefined) numberSchema = numberSchema.min(options.min);
|
||||
if (options.max !== undefined) numberSchema = numberSchema.max(options.max);
|
||||
|
||||
// Make the number schema optional *before* preprocessing. This allows it to correctly handle
|
||||
// the `undefined` value that our preprocessor generates from `null`.
|
||||
const optionalNumberSchema = numberSchema.optional();
|
||||
|
||||
// This is crucial because z.coerce.number(null) results in 0, which bypasses
|
||||
// the .optional() and .default() logic for null inputs. We want null to be
|
||||
// treated as "not provided", just like undefined.
|
||||
const schema = z.preprocess((val) => (val === null ? undefined : val), optionalNumberSchema);
|
||||
|
||||
if (options.default !== undefined) return schema.default(options.default);
|
||||
|
||||
return schema;
|
||||
};
|
||||
|
||||
/**
|
||||
* Creates a Zod schema for an optional date string in YYYY-MM-DD format.
|
||||
* @param message Optional custom error message.
|
||||
* @returns A Zod schema for the date string.
|
||||
*/
|
||||
export const optionalDate = (message?: string) => z.string().date(message).optional();
|
||||
|
||||
|
||||
/**
|
||||
* Creates a Zod schema for an optional boolean query parameter that is coerced from a string.
|
||||
* Handles 'true', '1' as true and 'false', '0' as false.
|
||||
* @param options Configuration for the validation like default value.
|
||||
* @returns A Zod schema for the boolean.
|
||||
*/
|
||||
export const optionalBoolean = (
|
||||
options: {
|
||||
default?: boolean;
|
||||
} = {},
|
||||
) => {
|
||||
const schema = z.preprocess((val) => {
|
||||
if (val === 'true' || val === '1') return true;
|
||||
if (val === 'false' || val === '0') return false;
|
||||
if (val === '' || val === null) return undefined; // Treat empty string and null as not present
|
||||
return val;
|
||||
}, z.boolean().optional());
|
||||
|
||||
if (options.default !== undefined) {
|
||||
return schema.default(options.default);
|
||||
}
|
||||
|
||||
return schema;
|
||||
};
|
||||
26
vitest.config.e2e.ts
Normal file
26
vitest.config.e2e.ts
Normal file
@@ -0,0 +1,26 @@
|
||||
import { defineConfig, mergeConfig } from 'vitest/config';
|
||||
import integrationConfig from './vitest.config.integration';
|
||||
|
||||
const e2eConfig = mergeConfig(
|
||||
integrationConfig,
|
||||
defineConfig({
|
||||
test: {
|
||||
name: 'e2e',
|
||||
// Point specifically to E2E tests
|
||||
include: ['src/tests/e2e/**/*.e2e.test.ts'],
|
||||
// Increase timeout for E2E flows that involve AI or full API chains
|
||||
testTimeout: 120000,
|
||||
coverage: {
|
||||
reportsDirectory: '.coverage/e2e',
|
||||
},
|
||||
},
|
||||
}),
|
||||
);
|
||||
|
||||
// Explicitly override the include array to ensure we don't inherit integration tests
|
||||
// (mergeConfig might concatenate arrays by default)
|
||||
if (e2eConfig.test) {
|
||||
e2eConfig.test.include = ['src/tests/e2e/**/*.e2e.test.ts'];
|
||||
}
|
||||
|
||||
export default e2eConfig;
|
||||
Reference in New Issue
Block a user