Compare commits

..

50 Commits

Author SHA1 Message Date
Gitea Actions
08c320423c ci: Bump version to 0.1.8 [skip ci] 2025-12-26 01:17:16 +05:00
d2498065ed Merge branch 'main' of https://gitea.projectium.com/torbo/flyer-crawler.projectium.com
Some checks failed
Deploy to Test Environment / deploy-to-test (push) Has been cancelled
2025-12-25 12:16:33 -08:00
56dc96f418 more work on the BullMQ workers 2025-12-25 12:16:22 -08:00
Gitea Actions
4e9aa0efc3 ci: Bump version to 0.1.7 [skip ci] 2025-12-26 00:01:02 +05:00
e5e4b1316c Merge branch 'main' of https://gitea.projectium.com/torbo/flyer-crawler.projectium.com
Some checks failed
Deploy to Test Environment / deploy-to-test (push) Failing after 39s
2025-12-25 11:00:21 -08:00
e8d511b4de more work on the BullMQ workers 2025-12-25 10:59:35 -08:00
Gitea Actions
c4bbf5c251 ci: Bump version to 0.1.6 [skip ci] 2025-12-25 07:19:39 +05:00
32a9e6732b Merge branch 'main' of https://gitea.projectium.com/torbo/flyer-crawler.projectium.com
Some checks failed
Deploy to Test Environment / deploy-to-test (push) Failing after 3h0m53s
2025-12-24 18:18:35 -08:00
e7c076e2ed test repair 2025-12-24 18:18:28 -08:00
Gitea Actions
dbe8e72efe ci: Bump version to 0.1.5 [skip ci] 2025-12-25 06:13:16 +05:00
38bd193042 not sure why those errors got removed we'll see
Some checks failed
Deploy to Test Environment / deploy-to-test (push) Has been cancelled
2025-12-24 17:12:20 -08:00
Gitea Actions
57215e2778 ci: Bump version to 0.1.4 [skip ci] 2025-12-25 06:04:17 +05:00
2c1de24e9a undo stupid logging change
Some checks failed
Deploy to Test Environment / deploy-to-test (push) Failing after 1m21s
2025-12-24 16:54:56 -08:00
c8baff7aac Merge branch 'main' of https://gitea.projectium.com/torbo/flyer-crawler.projectium.com 2025-12-24 16:17:45 -08:00
de3f21a7ec not sure why those errors got removed we'll see 2025-12-24 16:16:42 -08:00
Gitea Actions
c6adbf79e7 ci: Bump version to 0.1.3 [skip ci] 2025-12-25 02:26:17 +05:00
7399a27600 add ai agent fallbacks
Some checks failed
Deploy to Test Environment / deploy-to-test (push) Failing after 3h14m13s
2025-12-24 13:25:18 -08:00
Gitea Actions
68aadcaa4e ci: Bump version to 0.1.2 [skip ci] 2025-12-25 01:41:06 +05:00
971d2c3fa7 add ai agent fallbacks
Some checks failed
Deploy to Test Environment / deploy-to-test (push) Failing after 1m18s
2025-12-24 12:39:15 -08:00
Gitea Actions
daaacfde5e ci: Bump version to 0.1.1 [skip ci] 2025-12-24 23:53:27 +05:00
7ac8fe1d29 Merge branch 'main' of https://gitea.projectium.com/torbo/flyer-crawler.projectium.com
Some checks are pending
Deploy to Test Environment / deploy-to-test (push) Has started running
2025-12-24 10:52:13 -08:00
a2462dfb6b testing push to prod etc 2025-12-24 10:51:43 -08:00
Gitea Actions
a911224fb4 ci: Bump version to 0.1.0 for production release [skip ci] 2025-12-24 23:24:53 +05:00
Gitea Actions
bf4bcef890 ci: Bump version to 0.0.30 [skip ci] 2025-12-24 22:59:36 +05:00
ac6cd2e0a1 Merge branch 'main' of https://gitea.projectium.com/torbo/flyer-crawler.projectium.com
All checks were successful
Deploy to Test Environment / deploy-to-test (push) Successful in 16m10s
2025-12-24 09:58:45 -08:00
eea03880c1 exclude some dirs from coverage 2025-12-24 09:58:37 -08:00
Gitea Actions
7fc263691f ci: Bump version to 0.0.29 [skip ci] 2025-12-24 22:41:17 +05:00
c0912d36d5 Merge branch 'main' of https://gitea.projectium.com/torbo/flyer-crawler.projectium.com
All checks were successful
Deploy to Test Environment / deploy-to-test (push) Successful in 15m59s
2025-12-24 09:39:56 -08:00
612c2b5943 deploy to test fix 2025-12-24 09:39:49 -08:00
Gitea Actions
8e787ddcf0 ci: Bump version to 0.0.28 [skip ci] 2025-12-24 22:18:18 +05:00
11c52d284c fixing unit tests
All checks were successful
Deploy to Test Environment / deploy-to-test (push) Successful in 15m13s
2025-12-24 09:17:09 -08:00
Gitea Actions
b528bd3651 ci: Bump version to 0.0.27 [skip ci] 2025-12-24 22:06:03 +05:00
4c5ceb1bd6 Merge branch 'main' of https://gitea.projectium.com/torbo/flyer-crawler.projectium.com
Some checks are pending
Deploy to Test Environment / deploy-to-test (push) Has started running
2025-12-24 09:05:12 -08:00
bcc4ad64dc fixing unit tests 2025-12-24 09:04:10 -08:00
Gitea Actions
d520980322 ci: Bump version to 0.0.26 [skip ci] 2025-12-24 21:23:30 +05:00
d79955aaa0 Merge branch 'main' of https://gitea.projectium.com/torbo/flyer-crawler.projectium.com
All checks were successful
Deploy to Test Environment / deploy-to-test (push) Successful in 13m50s
2025-12-24 08:22:30 -08:00
e66027dc8e fix e2e and deploy to prod 2025-12-24 08:21:35 -08:00
Gitea Actions
027df989a4 ci: Bump version to 0.0.25 [skip ci] 2025-12-24 12:50:52 +05:00
d4d69caaf7 Merge branch 'main' of https://gitea.projectium.com/torbo/flyer-crawler.projectium.com
All checks were successful
Deploy to Test Environment / deploy-to-test (push) Successful in 9m47s
2025-12-23 23:50:10 -08:00
03b5af39e1 consolidate some testing functions 2025-12-23 23:50:03 -08:00
Gitea Actions
8a86333f86 ci: Bump version to 0.0.24 [skip ci] 2025-12-24 10:50:48 +05:00
f173f805ea Merge branch 'main' of https://gitea.projectium.com/torbo/flyer-crawler.projectium.com
All checks were successful
Deploy to Test Environment / deploy-to-test (push) Successful in 19m55s
2025-12-23 21:49:16 -08:00
d3b0996ad5 create a few initial E2E tests 2025-12-23 21:49:05 -08:00
Gitea Actions
b939262f0c ci: Bump version to 0.0.23 [skip ci] 2025-12-24 10:18:28 +05:00
9437f3d6c6 Merge branch 'main' of https://gitea.projectium.com/torbo/flyer-crawler.projectium.com
All checks were successful
Deploy to Test Environment / deploy-to-test (push) Successful in 18m50s
2025-12-23 21:16:47 -08:00
f1e028d498 only one error left - huzzah ! 2025-12-23 21:16:40 -08:00
Gitea Actions
5274650aea ci: Bump version to 0.0.22 [skip ci] 2025-12-24 08:10:33 +05:00
de5a9a565b Merge branch 'main' of https://gitea.projectium.com/torbo/flyer-crawler.projectium.com
All checks were successful
Deploy to Test Environment / deploy-to-test (push) Successful in 17m19s
2025-12-23 19:08:59 -08:00
10a379c5e3 fix for integration tests 404 ? not sure this is right 2025-12-23 19:08:53 -08:00
a6a484d432 fix for integration tests 404 ? not sure this is right 2025-12-23 18:13:35 -08:00
81 changed files with 3776 additions and 1477 deletions

View File

@@ -47,6 +47,19 @@ jobs:
- name: Install Dependencies - name: Install Dependencies
run: npm ci run: npm ci
- name: Bump Minor Version and Push
run: |
# Configure git for the commit.
git config --global user.name 'Gitea Actions'
git config --global user.email 'actions@gitea.projectium.com'
# Bump the minor version number. This creates a new commit and a new tag.
# The commit message includes [skip ci] to prevent this push from triggering another workflow run.
npm version minor -m "ci: Bump version to %s for production release [skip ci]"
# Push the new commit and the new tag back to the main branch.
git push --follow-tags
- name: Check for Production Database Schema Changes - name: Check for Production Database Schema Changes
env: env:
DB_HOST: ${{ secrets.DB_HOST }} DB_HOST: ${{ secrets.DB_HOST }}
@@ -61,9 +74,10 @@ jobs:
echo "--- Checking for production schema changes ---" echo "--- Checking for production schema changes ---"
CURRENT_HASH=$(cat sql/master_schema_rollup.sql | dos2unix | sha256sum | awk '{ print $1 }') CURRENT_HASH=$(cat sql/master_schema_rollup.sql | dos2unix | sha256sum | awk '{ print $1 }')
echo "Current Git Schema Hash: $CURRENT_HASH" echo "Current Git Schema Hash: $CURRENT_HASH"
DEPLOYED_HASH=$(PGPASSWORD="$DB_PASSWORD" psql -v ON_ERROR_STOP=1 -h "$DB_HOST" -p 5432 -U "$DB_USER" -d "$DB_NAME" -c "SELECT schema_hash FROM public.schema_info WHERE environment = 'production';" -t -A || echo "none") # The psql command will now fail the step if the query errors (e.g., column missing), preventing deployment on a bad schema.
DEPLOYED_HASH=$(PGPASSWORD="$DB_PASSWORD" psql -v ON_ERROR_STOP=1 -h "$DB_HOST" -p 5432 -U "$DB_USER" -d "$DB_NAME" -c "SELECT schema_hash FROM public.schema_info WHERE environment = 'production';" -t -A)
echo "Deployed DB Schema Hash: $DEPLOYED_HASH" echo "Deployed DB Schema Hash: $DEPLOYED_HASH"
if [ "$DEPLOYED_HASH" = "none" ] || [ -z "$DEPLOYED_HASH" ]; then if [ -z "$DEPLOYED_HASH" ]; then
echo "WARNING: No schema hash found in the production database. This is expected for a first-time deployment." echo "WARNING: No schema hash found in the production database. This is expected for a first-time deployment."
elif [ "$CURRENT_HASH" != "$DEPLOYED_HASH" ]; then elif [ "$CURRENT_HASH" != "$DEPLOYED_HASH" ]; then
echo "ERROR: Database schema mismatch detected! A manual database migration is required." echo "ERROR: Database schema mismatch detected! A manual database migration is required."
@@ -79,8 +93,9 @@ jobs:
exit 1 exit 1
fi fi
GITEA_SERVER_URL="https://gitea.projectium.com" GITEA_SERVER_URL="https://gitea.projectium.com"
COMMIT_MESSAGE=$(git log -1 --pretty=%s) COMMIT_MESSAGE=$(git log -1 --grep="\[skip ci\]" --invert-grep --pretty=%s)
VITE_APP_VERSION="$(date +'%Y%m%d-%H%M'):$(git rev-parse --short HEAD)" \ PACKAGE_VERSION=$(node -p "require('./package.json').version")
VITE_APP_VERSION="$(date +'%Y%m%d-%H%M'):$(git rev-parse --short HEAD):$PACKAGE_VERSION" \
VITE_APP_COMMIT_URL="$GITEA_SERVER_URL/${{ gitea.repository }}/commit/${{ gitea.sha }}" \ VITE_APP_COMMIT_URL="$GITEA_SERVER_URL/${{ gitea.repository }}/commit/${{ gitea.sha }}" \
VITE_APP_COMMIT_MESSAGE="$COMMIT_MESSAGE" \ VITE_APP_COMMIT_MESSAGE="$COMMIT_MESSAGE" \
VITE_API_BASE_URL=/api VITE_API_KEY=${{ secrets.VITE_GOOGLE_GENAI_API_KEY }} npm run build VITE_API_BASE_URL=/api VITE_API_KEY=${{ secrets.VITE_GOOGLE_GENAI_API_KEY }} npm run build
@@ -148,7 +163,12 @@ jobs:
echo "Updating schema hash in production database..." echo "Updating schema hash in production database..."
CURRENT_HASH=$(cat sql/master_schema_rollup.sql | dos2unix | sha256sum | awk '{ print $1 }') CURRENT_HASH=$(cat sql/master_schema_rollup.sql | dos2unix | sha256sum | awk '{ print $1 }')
PGPASSWORD="$DB_PASSWORD" psql -v ON_ERROR_STOP=1 -h "$DB_HOST" -p 5432 -U "$DB_USER" -d "$DB_NAME" -c \ PGPASSWORD="$DB_PASSWORD" psql -v ON_ERROR_STOP=1 -h "$DB_HOST" -p 5432 -U "$DB_USER" -d "$DB_NAME" -c \
"INSERT INTO public.schema_info (environment, schema_hash, deployed_at) VALUES ('production', '$CURRENT_HASH', NOW()) "CREATE TABLE IF NOT EXISTS public.schema_info (
environment VARCHAR(50) PRIMARY KEY,
schema_hash VARCHAR(64) NOT NULL,
deployed_at TIMESTAMP DEFAULT NOW()
);
INSERT INTO public.schema_info (environment, schema_hash, deployed_at) VALUES ('production', '$CURRENT_HASH', NOW())
ON CONFLICT (environment) DO UPDATE SET schema_hash = EXCLUDED.schema_hash, deployed_at = NOW();" ON CONFLICT (environment) DO UPDATE SET schema_hash = EXCLUDED.schema_hash, deployed_at = NOW();"
UPDATED_HASH=$(PGPASSWORD="$DB_PASSWORD" psql -v ON_ERROR_STOP=1 -h "$DB_HOST" -p 5432 -U "$DB_USER" -d "$DB_NAME" -c "SELECT schema_hash FROM public.schema_info WHERE environment = 'production';" -t -A) UPDATED_HASH=$(PGPASSWORD="$DB_PASSWORD" psql -v ON_ERROR_STOP=1 -h "$DB_HOST" -p 5432 -U "$DB_USER" -d "$DB_NAME" -c "SELECT schema_hash FROM public.schema_info WHERE environment = 'production';" -t -A)

View File

@@ -119,6 +119,11 @@ jobs:
# --- JWT Secret for Passport authentication in tests --- # --- JWT Secret for Passport authentication in tests ---
JWT_SECRET: ${{ secrets.JWT_SECRET }} JWT_SECRET: ${{ secrets.JWT_SECRET }}
# --- V8 Coverage for Server Process ---
# This variable tells the Node.js process (our server, started by globalSetup)
# where to output its raw V8 coverage data.
NODE_V8_COVERAGE: '.coverage/tmp/integration-server'
# --- Increase Node.js memory limit to prevent heap out of memory errors --- # --- Increase Node.js memory limit to prevent heap out of memory errors ---
# This is crucial for memory-intensive tasks like running tests and coverage. # This is crucial for memory-intensive tasks like running tests and coverage.
NODE_OPTIONS: '--max-old-space-size=8192' NODE_OPTIONS: '--max-old-space-size=8192'
@@ -137,10 +142,15 @@ jobs:
# The `|| true` ensures the workflow continues even if tests fail, allowing coverage to run. # The `|| true` ensures the workflow continues even if tests fail, allowing coverage to run.
echo "--- Running Unit Tests ---" echo "--- Running Unit Tests ---"
# npm run test:unit -- --coverage --reporter=verbose --includeTaskLocation --testTimeout=10000 --silent=passed-only || true # npm run test:unit -- --coverage --reporter=verbose --includeTaskLocation --testTimeout=10000 --silent=passed-only || true
npm run test:unit -- --coverage --reporter=verbose --includeTaskLocation --testTimeout=10000 --silent=passed-only --no-file-parallelism || true npm run test:unit -- --coverage --coverage.exclude='**/*.test.ts' --coverage.exclude='**/tests/**' --coverage.exclude='**/mocks/**' --reporter=verbose --includeTaskLocation --testTimeout=10000 --silent=passed-only --no-file-parallelism || true
echo "--- Running Integration Tests ---" echo "--- Running Integration Tests ---"
npm run test:integration -- --coverage --reporter=verbose --includeTaskLocation --testTimeout=10000 --silent=passed-only || true npm run test:integration -- --coverage --coverage.exclude='**/*.test.ts' --coverage.exclude='**/tests/**' --coverage.exclude='**/mocks/**' --reporter=verbose --includeTaskLocation --testTimeout=10000 --silent=passed-only || true
echo "--- Running E2E Tests ---"
# Run E2E tests using the dedicated E2E config which inherits from integration config.
# We still pass --coverage to enable it, but directory and timeout are now in the config.
npx vitest run --config vitest.config.e2e.ts --coverage --coverage.exclude='**/*.test.ts' --coverage.exclude='**/tests/**' --coverage.exclude='**/mocks/**' --reporter=verbose --no-file-parallelism || true
# Re-enable secret masking for subsequent steps. # Re-enable secret masking for subsequent steps.
echo "::secret-masking::" echo "::secret-masking::"
@@ -156,6 +166,7 @@ jobs:
echo "Checking for source coverage files..." echo "Checking for source coverage files..."
ls -l .coverage/unit/coverage-final.json ls -l .coverage/unit/coverage-final.json
ls -l .coverage/integration/coverage-final.json ls -l .coverage/integration/coverage-final.json
ls -l .coverage/e2e/coverage-final.json || echo "E2E coverage file not found"
# --- V8 Coverage Processing for Backend Server --- # --- V8 Coverage Processing for Backend Server ---
# The integration tests start the server, which generates raw V8 coverage data. # The integration tests start the server, which generates raw V8 coverage data.
@@ -168,7 +179,7 @@ jobs:
# Run c8: read raw files from the temp dir, and output an Istanbul JSON report. # Run c8: read raw files from the temp dir, and output an Istanbul JSON report.
# We only generate the 'json' report here because it's all nyc needs for merging. # We only generate the 'json' report here because it's all nyc needs for merging.
echo "Server coverage report about to be generated..." echo "Server coverage report about to be generated..."
npx c8 report --reporter=json --temp-directory .coverage/tmp/integration-server --reports-dir .coverage/integration-server npx c8 report --exclude='**/*.test.ts' --exclude='**/tests/**' --exclude='**/mocks/**' --reporter=json --temp-directory .coverage/tmp/integration-server --reports-dir .coverage/integration-server
echo "Server coverage report generated. Verifying existence:" echo "Server coverage report generated. Verifying existence:"
ls -l .coverage/integration-server/coverage-final.json ls -l .coverage/integration-server/coverage-final.json
@@ -187,6 +198,7 @@ jobs:
# We give them unique names to be safe, though it's not strictly necessary. # We give them unique names to be safe, though it's not strictly necessary.
cp .coverage/unit/coverage-final.json "$NYC_SOURCE_DIR/unit-coverage.json" cp .coverage/unit/coverage-final.json "$NYC_SOURCE_DIR/unit-coverage.json"
cp .coverage/integration/coverage-final.json "$NYC_SOURCE_DIR/integration-coverage.json" cp .coverage/integration/coverage-final.json "$NYC_SOURCE_DIR/integration-coverage.json"
cp .coverage/e2e/coverage-final.json "$NYC_SOURCE_DIR/e2e-coverage.json" || echo "E2E coverage file not found, skipping."
# This file might not exist if integration tests fail early, so we add `|| true` # This file might not exist if integration tests fail early, so we add `|| true`
cp .coverage/integration-server/coverage-final.json "$NYC_SOURCE_DIR/integration-server-coverage.json" || echo "Server coverage file not found, skipping." cp .coverage/integration-server/coverage-final.json "$NYC_SOURCE_DIR/integration-server-coverage.json" || echo "Server coverage file not found, skipping."
echo "Copied coverage files to source directory. Contents:" echo "Copied coverage files to source directory. Contents:"
@@ -206,7 +218,10 @@ jobs:
--reporter=text \ --reporter=text \
--reporter=html \ --reporter=html \
--report-dir .coverage/ \ --report-dir .coverage/ \
--temp-dir "$NYC_SOURCE_DIR" --temp-dir "$NYC_SOURCE_DIR" \
--exclude "**/*.test.ts" \
--exclude "**/tests/**" \
--exclude "**/mocks/**"
# Re-enable secret masking for subsequent steps. # Re-enable secret masking for subsequent steps.
echo "::secret-masking::" echo "::secret-masking::"
@@ -257,18 +272,19 @@ jobs:
# We normalize line endings to ensure the hash is consistent across different OS environments. # We normalize line endings to ensure the hash is consistent across different OS environments.
CURRENT_HASH=$(cat sql/master_schema_rollup.sql | dos2unix | sha256sum | awk '{ print $1 }') CURRENT_HASH=$(cat sql/master_schema_rollup.sql | dos2unix | sha256sum | awk '{ print $1 }')
echo "Current Git Schema Hash: $CURRENT_HASH" echo "Current Git Schema Hash: $CURRENT_HASH"
# Query the production database to get the hash of the deployed schema. # Query the production database to get the hash of the deployed schema.
# The `psql` command requires PGPASSWORD to be set. # The `psql` command requires PGPASSWORD to be set.
# `\t` sets tuples-only mode and `\A` unaligns output to get just the raw value. # `\t` sets tuples-only mode and `\A` unaligns output to get just the raw value.
# The `|| echo "none"` ensures the command doesn't fail if the table or row doesn't exist yet. # The psql command will now fail the step if the query errors (e.g., column missing), preventing deployment on a bad schema.
DEPLOYED_HASH=$(PGPASSWORD="$DB_PASSWORD" psql -v ON_ERROR_STOP=1 -h "$DB_HOST" -p 5432 -U "$DB_USER" -d "$DB_NAME" -c "SELECT schema_hash FROM public.schema_info WHERE environment = 'test';" -t -A || echo "none") DEPLOYED_HASH=$(PGPASSWORD="$DB_PASSWORD" psql -v ON_ERROR_STOP=1 -h "$DB_HOST" -p 5432 -U "$DB_USER" -d "$DB_NAME" -c "SELECT schema_hash FROM public.schema_info WHERE environment = 'test';" -t -A)
echo "Deployed DB Schema Hash: $DEPLOYED_HASH" echo "Deployed DB Schema Hash: $DEPLOYED_HASH"
# Check if the hash is "none" (command failed) OR if it's an empty string (table exists but is empty). # Check if the hash is "none" (command failed) OR if it's an empty string (table exists but is empty).
if [ "$DEPLOYED_HASH" = "none" ] || [ -z "$DEPLOYED_HASH" ]; then if [ -z "$DEPLOYED_HASH" ]; then
echo "WARNING: No schema hash found in the test database." echo "WARNING: No schema hash found in the test database."
echo "This is expected for a first-time deployment. The hash will be set after a successful deployment." echo "This is expected for a first-time deployment. The hash will be set after a successful deployment."
echo "--- Debug: Dumping schema_info table ---"
PGPASSWORD="$DB_PASSWORD" psql -v ON_ERROR_STOP=0 -h "$DB_HOST" -p 5432 -U "$DB_USER" -d "$DB_NAME" -c "SELECT * FROM public.schema_info;" || true
echo "----------------------------------------"
# We allow the deployment to continue, but a manual schema update is required. # We allow the deployment to continue, but a manual schema update is required.
# You could choose to fail here by adding `exit 1`. # You could choose to fail here by adding `exit 1`.
elif [ "$CURRENT_HASH" != "$DEPLOYED_HASH" ]; then elif [ "$CURRENT_HASH" != "$DEPLOYED_HASH" ]; then
@@ -292,8 +308,9 @@ jobs:
fi fi
GITEA_SERVER_URL="https://gitea.projectium.com" # Your Gitea instance URL GITEA_SERVER_URL="https://gitea.projectium.com" # Your Gitea instance URL
COMMIT_MESSAGE=$(git log -1 --pretty=%s) COMMIT_MESSAGE=$(git log -1 --grep="\[skip ci\]" --invert-grep --pretty=%s)
VITE_APP_VERSION="$(date +'%Y%m%d-%H%M'):$(git rev-parse --short HEAD)" \ PACKAGE_VERSION=$(node -p "require('./package.json').version")
VITE_APP_VERSION="$(date +'%Y%m%d-%H%M'):$(git rev-parse --short HEAD):$PACKAGE_VERSION" \
VITE_APP_COMMIT_URL="$GITEA_SERVER_URL/${{ gitea.repository }}/commit/${{ gitea.sha }}" \ VITE_APP_COMMIT_URL="$GITEA_SERVER_URL/${{ gitea.repository }}/commit/${{ gitea.sha }}" \
VITE_APP_COMMIT_MESSAGE="$COMMIT_MESSAGE" \ VITE_APP_COMMIT_MESSAGE="$COMMIT_MESSAGE" \
VITE_API_BASE_URL="https://flyer-crawler-test.projectium.com/api" VITE_API_KEY=${{ secrets.VITE_GOOGLE_GENAI_API_KEY_TEST }} npm run build VITE_API_BASE_URL="https://flyer-crawler-test.projectium.com/api" VITE_API_KEY=${{ secrets.VITE_GOOGLE_GENAI_API_KEY_TEST }} npm run build
@@ -355,7 +372,7 @@ jobs:
echo "Installing production dependencies and restarting test server..." echo "Installing production dependencies and restarting test server..."
cd /var/www/flyer-crawler-test.projectium.com cd /var/www/flyer-crawler-test.projectium.com
npm install --omit=dev # Install only production dependencies npm install --omit=dev
# Use `startOrReload` with the ecosystem file. This is the standard, idempotent way to deploy. # Use `startOrReload` with the ecosystem file. This is the standard, idempotent way to deploy.
# It will START the process if it's not running, or RELOAD it if it is. # It will START the process if it's not running, or RELOAD it if it is.
# We also add `&& pm2 save` to persist the process list across server reboots. # We also add `&& pm2 save` to persist the process list across server reboots.
@@ -367,7 +384,12 @@ jobs:
echo "Updating schema hash in test database..." echo "Updating schema hash in test database..."
CURRENT_HASH=$(cat sql/master_schema_rollup.sql | dos2unix | sha256sum | awk '{ print $1 }') CURRENT_HASH=$(cat sql/master_schema_rollup.sql | dos2unix | sha256sum | awk '{ print $1 }')
PGPASSWORD="$DB_PASSWORD" psql -v ON_ERROR_STOP=1 -h "$DB_HOST" -p 5432 -U "$DB_USER" -d "$DB_NAME" -c \ PGPASSWORD="$DB_PASSWORD" psql -v ON_ERROR_STOP=1 -h "$DB_HOST" -p 5432 -U "$DB_USER" -d "$DB_NAME" -c \
"INSERT INTO public.schema_info (environment, schema_hash, deployed_at) VALUES ('test', '$CURRENT_HASH', NOW()) "CREATE TABLE IF NOT EXISTS public.schema_info (
environment VARCHAR(50) PRIMARY KEY,
schema_hash VARCHAR(64) NOT NULL,
deployed_at TIMESTAMP DEFAULT NOW()
);
INSERT INTO public.schema_info (environment, schema_hash, deployed_at) VALUES ('test', '$CURRENT_HASH', NOW())
ON CONFLICT (environment) DO UPDATE SET schema_hash = EXCLUDED.schema_hash, deployed_at = NOW();" ON CONFLICT (environment) DO UPDATE SET schema_hash = EXCLUDED.schema_hash, deployed_at = NOW();"
# Verify the hash was updated # Verify the hash was updated

View File

@@ -0,0 +1,181 @@
# .gitea/workflows/manual-deploy-major.yml
#
# This workflow provides a MANUAL trigger to perform a MAJOR version bump
# and deploy the application to the PRODUCTION environment.
name: Manual - Deploy Major Version to Production
on:
workflow_dispatch:
inputs:
confirmation:
description: 'Type "deploy-major-to-prod" to confirm you want to deploy a new major version.'
required: true
default: 'do-not-run'
force_reload:
description: 'Force PM2 reload even if version matches (true/false).'
required: false
type: boolean
default: false
jobs:
deploy-production-major:
runs-on: projectium.com
steps:
- name: Verify Confirmation Phrase
run: |
if [ "${{ gitea.event.inputs.confirmation }}" != "deploy-major-to-prod" ]; then
echo "ERROR: Confirmation phrase did not match. Aborting deployment."
exit 1
fi
echo "✅ Confirmation accepted. Proceeding with major version production deployment."
- name: Checkout Code from 'main' branch
uses: actions/checkout@v3
with:
ref: 'main' # Explicitly check out the main branch for production deployment
fetch-depth: 0
- name: Setup Node.js
uses: actions/setup-node@v3
with:
node-version: '20'
cache: 'npm'
cache-dependency-path: '**/package-lock.json'
- name: Install Dependencies
run: npm ci
- name: Bump Major Version and Push
run: |
# Configure git for the commit.
git config --global user.name 'Gitea Actions'
git config --global user.email 'actions@gitea.projectium.com'
# Bump the major version number. This creates a new commit and a new tag.
# The commit message includes [skip ci] to prevent this push from triggering another workflow run.
npm version major -m "ci: Bump version to %s for major release [skip ci]"
# Push the new commit and the new tag back to the main branch.
git push --follow-tags
- name: Check for Production Database Schema Changes
env:
DB_HOST: ${{ secrets.DB_HOST }}
DB_USER: ${{ secrets.DB_USER }}
DB_PASSWORD: ${{ secrets.DB_PASSWORD }}
DB_NAME: ${{ secrets.DB_DATABASE_PROD }}
run: |
if [ -z "$DB_HOST" ] || [ -z "$DB_USER" ] || [ -z "$DB_PASSWORD" ] || [ -z "$DB_NAME" ]; then
echo "ERROR: One or more production database secrets (DB_HOST, DB_USER, DB_PASSWORD, DB_DATABASE_PROD) are not set."
exit 1
fi
echo "--- Checking for production schema changes ---"
CURRENT_HASH=$(cat sql/master_schema_rollup.sql | dos2unix | sha256sum | awk '{ print $1 }')
echo "Current Git Schema Hash: $CURRENT_HASH"
# The psql command will now fail the step if the query errors (e.g., column missing), preventing deployment on a bad schema.
DEPLOYED_HASH=$(PGPASSWORD="$DB_PASSWORD" psql -v ON_ERROR_STOP=1 -h "$DB_HOST" -p 5432 -U "$DB_USER" -d "$DB_NAME" -c "SELECT schema_hash FROM public.schema_info WHERE environment = 'production';" -t -A)
echo "Deployed DB Schema Hash: $DEPLOYED_HASH"
if [ -z "$DEPLOYED_HASH" ]; then
echo "WARNING: No schema hash found in the production database. This is expected for a first-time deployment."
elif [ "$CURRENT_HASH" != "$DEPLOYED_HASH" ]; then
echo "ERROR: Database schema mismatch detected! A manual database migration is required."
exit 1
else
echo "✅ Schema is up to date. No changes detected."
fi
- name: Build React Application for Production
run: |
if [ -z "${{ secrets.VITE_GOOGLE_GENAI_API_KEY }}" ]; then
echo "ERROR: The VITE_GOOGLE_GENAI_API_KEY secret is not set."
exit 1
fi
GITEA_SERVER_URL="https://gitea.projectium.com"
COMMIT_MESSAGE=$(git log -1 --grep="\[skip ci\]" --invert-grep --pretty=%s)
PACKAGE_VERSION=$(node -p "require('./package.json').version")
VITE_APP_VERSION="$(date +'%Y%m%d-%H%M'):$(git rev-parse --short HEAD):$PACKAGE_VERSION" \
VITE_APP_COMMIT_URL="$GITEA_SERVER_URL/${{ gitea.repository }}/commit/${{ gitea.sha }}" \
VITE_APP_COMMIT_MESSAGE="$COMMIT_MESSAGE" \
VITE_API_BASE_URL=/api VITE_API_KEY=${{ secrets.VITE_GOOGLE_GENAI_API_KEY }} npm run build
- name: Deploy Application to Production Server
run: |
echo "Deploying application files to /var/www/flyer-crawler.projectium.com..."
APP_PATH="/var/www/flyer-crawler.projectium.com"
mkdir -p "$APP_PATH"
mkdir -p "$APP_PATH/flyer-images/icons" "$APP_PATH/flyer-images/archive"
rsync -avz --delete --exclude 'node_modules' --exclude '.git' --exclude 'dist' --exclude 'flyer-images' ./ "$APP_PATH/"
rsync -avz dist/ "$APP_PATH"
echo "Application deployment complete."
- name: Install Backend Dependencies and Restart Production Server
env:
# --- Production Secrets Injection ---
DB_HOST: ${{ secrets.DB_HOST }}
DB_USER: ${{ secrets.DB_USER }}
DB_PASSWORD: ${{ secrets.DB_PASSWORD }}
DB_NAME: ${{ secrets.DB_DATABASE_PROD }}
REDIS_URL: 'redis://localhost:6379'
REDIS_PASSWORD: ${{ secrets.REDIS_PASSWORD_PROD }}
FRONTEND_URL: 'https://flyer-crawler.projectium.com'
JWT_SECRET: ${{ secrets.JWT_SECRET }}
GEMINI_API_KEY: ${{ secrets.VITE_GOOGLE_GENAI_API_KEY }}
GOOGLE_MAPS_API_KEY: ${{ secrets.GOOGLE_MAPS_API_KEY }}
SMTP_HOST: 'localhost'
SMTP_PORT: '1025'
SMTP_SECURE: 'false'
SMTP_USER: ''
SMTP_PASS: ''
SMTP_FROM_EMAIL: 'noreply@flyer-crawler.projectium.com'
run: |
if [ -z "$DB_HOST" ] || [ -z "$DB_USER" ] || [ -z "$DB_PASSWORD" ] || [ -z "$DB_NAME" ]; then
echo "ERROR: One or more production database secrets (DB_HOST, DB_USER, DB_PASSWORD, DB_DATABASE_PROD) are not set."
exit 1
fi
echo "Installing production dependencies and restarting server..."
cd /var/www/flyer-crawler.projectium.com
npm install --omit=dev
# --- Version Check Logic ---
# Get the version from the newly deployed package.json
NEW_VERSION=$(node -p "require('./package.json').version")
echo "Deployed Package Version: $NEW_VERSION"
# Get the running version from PM2 for the main API process
# We use a small node script to parse the JSON output from pm2 jlist
RUNNING_VERSION=$(pm2 jlist | node -e "try { const list = JSON.parse(require('fs').readFileSync(0, 'utf-8')); const app = list.find(p => p.name === 'flyer-crawler-api'); console.log(app ? app.pm2_env.version : ''); } catch(e) { console.log(''); }")
echo "Running PM2 Version: $RUNNING_VERSION"
if [ "${{ gitea.event.inputs.force_reload }}" == "true" ] || [ "$NEW_VERSION" != "$RUNNING_VERSION" ] || [ -z "$RUNNING_VERSION" ]; then
if [ "${{ gitea.event.inputs.force_reload }}" == "true" ]; then
echo "Force reload triggered by manual input. Reloading PM2..."
else
echo "Version mismatch (Running: $RUNNING_VERSION -> Deployed: $NEW_VERSION) or app not running. Reloading PM2..."
fi
pm2 startOrReload ecosystem.config.cjs --env production && pm2 save
echo "Production backend server reloaded successfully."
else
echo "Version $NEW_VERSION is already running. Skipping PM2 reload."
fi
echo "Updating schema hash in production database..."
CURRENT_HASH=$(cat sql/master_schema_rollup.sql | dos2unix | sha256sum | awk '{ print $1 }')
PGPASSWORD="$DB_PASSWORD" psql -v ON_ERROR_STOP=1 -h "$DB_HOST" -p 5432 -U "$DB_USER" -d "$DB_NAME" -c \
"INSERT INTO public.schema_info (environment, schema_hash, deployed_at) VALUES ('production', '$CURRENT_HASH', NOW())
ON CONFLICT (environment) DO UPDATE SET schema_hash = EXCLUDED.schema_hash, deployed_at = NOW();"
UPDATED_HASH=$(PGPASSWORD="$DB_PASSWORD" psql -v ON_ERROR_STOP=1 -h "$DB_HOST" -p 5432 -U "$DB_USER" -d "$DB_NAME" -c "SELECT schema_hash FROM public.schema_info WHERE environment = 'production';" -t -A)
if [ "$CURRENT_HASH" = "$UPDATED_HASH" ]; then
echo "✅ Schema hash successfully updated in the database to: $UPDATED_HASH"
else
echo "ERROR: Failed to update schema hash in the database."
fi
- name: Show PM2 Environment for Production
run: |
echo "--- Displaying recent PM2 logs for flyer-crawler-api ---"
sleep 5
pm2 describe flyer-crawler-api || echo "Could not find production pm2 process."
pm2 logs flyer-crawler-api --lines 20 --nostream || echo "Could not find production pm2 process."
pm2 env flyer-crawler-api || echo "Could not find production pm2 process."

View File

@@ -18,48 +18,222 @@ module.exports = {
NODE_ENV: 'production', // Set the Node.js environment to production NODE_ENV: 'production', // Set the Node.js environment to production
name: 'flyer-crawler-api', name: 'flyer-crawler-api',
cwd: '/var/www/flyer-crawler.projectium.com', cwd: '/var/www/flyer-crawler.projectium.com',
// Inherit secrets from the deployment environment
DB_HOST: process.env.DB_HOST,
DB_USER: process.env.DB_USER,
DB_PASSWORD: process.env.DB_PASSWORD,
DB_NAME: process.env.DB_NAME,
REDIS_URL: process.env.REDIS_URL,
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
FRONTEND_URL: process.env.FRONTEND_URL,
JWT_SECRET: process.env.JWT_SECRET,
GEMINI_API_KEY: process.env.GEMINI_API_KEY,
GOOGLE_MAPS_API_KEY: process.env.GOOGLE_MAPS_API_KEY,
SMTP_HOST: process.env.SMTP_HOST,
SMTP_PORT: process.env.SMTP_PORT,
SMTP_SECURE: process.env.SMTP_SECURE,
SMTP_USER: process.env.SMTP_USER,
SMTP_PASS: process.env.SMTP_PASS,
SMTP_FROM_EMAIL: process.env.SMTP_FROM_EMAIL,
}, },
// Test Environment Settings // Test Environment Settings
env_test: { env_test: {
NODE_ENV: 'development', // Use 'development' for test to enable more verbose logging if needed NODE_ENV: 'test', // Set to 'test' to match the environment purpose and disable pino-pretty
name: 'flyer-crawler-api-test', name: 'flyer-crawler-api-test',
cwd: '/var/www/flyer-crawler-test.projectium.com', cwd: '/var/www/flyer-crawler-test.projectium.com',
// Inherit secrets from the deployment environment
DB_HOST: process.env.DB_HOST,
DB_USER: process.env.DB_USER,
DB_PASSWORD: process.env.DB_PASSWORD,
DB_NAME: process.env.DB_NAME,
REDIS_URL: process.env.REDIS_URL,
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
FRONTEND_URL: process.env.FRONTEND_URL,
JWT_SECRET: process.env.JWT_SECRET,
GEMINI_API_KEY: process.env.GEMINI_API_KEY,
GOOGLE_MAPS_API_KEY: process.env.GOOGLE_MAPS_API_KEY,
SMTP_HOST: process.env.SMTP_HOST,
SMTP_PORT: process.env.SMTP_PORT,
SMTP_SECURE: process.env.SMTP_SECURE,
SMTP_USER: process.env.SMTP_USER,
SMTP_PASS: process.env.SMTP_PASS,
SMTP_FROM_EMAIL: process.env.SMTP_FROM_EMAIL,
},
// Development Environment Settings
env_development: {
NODE_ENV: 'development',
name: 'flyer-crawler-api-dev',
watch: true,
ignore_watch: ['node_modules', 'logs', '*.log', 'flyer-images', '.git'],
// Inherit secrets from the deployment environment
DB_HOST: process.env.DB_HOST,
DB_USER: process.env.DB_USER,
DB_PASSWORD: process.env.DB_PASSWORD,
DB_NAME: process.env.DB_NAME,
REDIS_URL: process.env.REDIS_URL,
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
FRONTEND_URL: process.env.FRONTEND_URL,
JWT_SECRET: process.env.JWT_SECRET,
GEMINI_API_KEY: process.env.GEMINI_API_KEY,
GOOGLE_MAPS_API_KEY: process.env.GOOGLE_MAPS_API_KEY,
SMTP_HOST: process.env.SMTP_HOST,
SMTP_PORT: process.env.SMTP_PORT,
SMTP_SECURE: process.env.SMTP_SECURE,
SMTP_USER: process.env.SMTP_USER,
SMTP_PASS: process.env.SMTP_PASS,
SMTP_FROM_EMAIL: process.env.SMTP_FROM_EMAIL,
}, },
}, },
{ {
// --- General Worker --- // --- General Worker ---
name: 'flyer-crawler-worker', name: 'flyer-crawler-worker',
script: './node_modules/.bin/tsx', script: './node_modules/.bin/tsx',
args: 'src/services/queueService.server.ts', // tsx will execute this file args: 'src/worker.ts', // tsx will execute this file
// Production Environment Settings // Production Environment Settings
env_production: { env_production: {
NODE_ENV: 'production', NODE_ENV: 'production',
name: 'flyer-crawler-worker', name: 'flyer-crawler-worker',
cwd: '/var/www/flyer-crawler.projectium.com', cwd: '/var/www/flyer-crawler.projectium.com',
// Inherit secrets from the deployment environment
DB_HOST: process.env.DB_HOST,
DB_USER: process.env.DB_USER,
DB_PASSWORD: process.env.DB_PASSWORD,
DB_NAME: process.env.DB_NAME,
REDIS_URL: process.env.REDIS_URL,
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
FRONTEND_URL: process.env.FRONTEND_URL,
JWT_SECRET: process.env.JWT_SECRET,
GEMINI_API_KEY: process.env.GEMINI_API_KEY,
GOOGLE_MAPS_API_KEY: process.env.GOOGLE_MAPS_API_KEY,
SMTP_HOST: process.env.SMTP_HOST,
SMTP_PORT: process.env.SMTP_PORT,
SMTP_SECURE: process.env.SMTP_SECURE,
SMTP_USER: process.env.SMTP_USER,
SMTP_PASS: process.env.SMTP_PASS,
SMTP_FROM_EMAIL: process.env.SMTP_FROM_EMAIL,
}, },
// Test Environment Settings // Test Environment Settings
env_test: { env_test: {
NODE_ENV: 'development', NODE_ENV: 'test',
name: 'flyer-crawler-worker-test', name: 'flyer-crawler-worker-test',
cwd: '/var/www/flyer-crawler-test.projectium.com', cwd: '/var/www/flyer-crawler-test.projectium.com',
// Inherit secrets from the deployment environment
DB_HOST: process.env.DB_HOST,
DB_USER: process.env.DB_USER,
DB_PASSWORD: process.env.DB_PASSWORD,
DB_NAME: process.env.DB_NAME,
REDIS_URL: process.env.REDIS_URL,
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
FRONTEND_URL: process.env.FRONTEND_URL,
JWT_SECRET: process.env.JWT_SECRET,
GEMINI_API_KEY: process.env.GEMINI_API_KEY,
GOOGLE_MAPS_API_KEY: process.env.GOOGLE_MAPS_API_KEY,
SMTP_HOST: process.env.SMTP_HOST,
SMTP_PORT: process.env.SMTP_PORT,
SMTP_SECURE: process.env.SMTP_SECURE,
SMTP_USER: process.env.SMTP_USER,
SMTP_PASS: process.env.SMTP_PASS,
SMTP_FROM_EMAIL: process.env.SMTP_FROM_EMAIL,
},
// Development Environment Settings
env_development: {
NODE_ENV: 'development',
name: 'flyer-crawler-worker-dev',
watch: true,
ignore_watch: ['node_modules', 'logs', '*.log', 'flyer-images', '.git'],
// Inherit secrets from the deployment environment
DB_HOST: process.env.DB_HOST,
DB_USER: process.env.DB_USER,
DB_PASSWORD: process.env.DB_PASSWORD,
DB_NAME: process.env.DB_NAME,
REDIS_URL: process.env.REDIS_URL,
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
FRONTEND_URL: process.env.FRONTEND_URL,
JWT_SECRET: process.env.JWT_SECRET,
GEMINI_API_KEY: process.env.GEMINI_API_KEY,
GOOGLE_MAPS_API_KEY: process.env.GOOGLE_MAPS_API_KEY,
SMTP_HOST: process.env.SMTP_HOST,
SMTP_PORT: process.env.SMTP_PORT,
SMTP_SECURE: process.env.SMTP_SECURE,
SMTP_USER: process.env.SMTP_USER,
SMTP_PASS: process.env.SMTP_PASS,
SMTP_FROM_EMAIL: process.env.SMTP_FROM_EMAIL,
}, },
}, },
{ {
// --- Analytics Worker --- // --- Analytics Worker ---
name: 'flyer-crawler-analytics-worker', name: 'flyer-crawler-analytics-worker',
script: './node_modules/.bin/tsx', script: './node_modules/.bin/tsx',
args: 'src/services/queueService.server.ts', // tsx will execute this file args: 'src/worker.ts', // tsx will execute this file
// Production Environment Settings // Production Environment Settings
env_production: { env_production: {
NODE_ENV: 'production', NODE_ENV: 'production',
name: 'flyer-crawler-analytics-worker', name: 'flyer-crawler-analytics-worker',
cwd: '/var/www/flyer-crawler.projectium.com', cwd: '/var/www/flyer-crawler.projectium.com',
// Inherit secrets from the deployment environment
DB_HOST: process.env.DB_HOST,
DB_USER: process.env.DB_USER,
DB_PASSWORD: process.env.DB_PASSWORD,
DB_NAME: process.env.DB_NAME,
REDIS_URL: process.env.REDIS_URL,
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
FRONTEND_URL: process.env.FRONTEND_URL,
JWT_SECRET: process.env.JWT_SECRET,
GEMINI_API_KEY: process.env.GEMINI_API_KEY,
GOOGLE_MAPS_API_KEY: process.env.GOOGLE_MAPS_API_KEY,
SMTP_HOST: process.env.SMTP_HOST,
SMTP_PORT: process.env.SMTP_PORT,
SMTP_SECURE: process.env.SMTP_SECURE,
SMTP_USER: process.env.SMTP_USER,
SMTP_PASS: process.env.SMTP_PASS,
SMTP_FROM_EMAIL: process.env.SMTP_FROM_EMAIL,
}, },
// Test Environment Settings // Test Environment Settings
env_test: { env_test: {
NODE_ENV: 'development', NODE_ENV: 'test',
name: 'flyer-crawler-analytics-worker-test', name: 'flyer-crawler-analytics-worker-test',
cwd: '/var/www/flyer-crawler-test.projectium.com', cwd: '/var/www/flyer-crawler-test.projectium.com',
// Inherit secrets from the deployment environment
DB_HOST: process.env.DB_HOST,
DB_USER: process.env.DB_USER,
DB_PASSWORD: process.env.DB_PASSWORD,
DB_NAME: process.env.DB_NAME,
REDIS_URL: process.env.REDIS_URL,
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
FRONTEND_URL: process.env.FRONTEND_URL,
JWT_SECRET: process.env.JWT_SECRET,
GEMINI_API_KEY: process.env.GEMINI_API_KEY,
GOOGLE_MAPS_API_KEY: process.env.GOOGLE_MAPS_API_KEY,
SMTP_HOST: process.env.SMTP_HOST,
SMTP_PORT: process.env.SMTP_PORT,
SMTP_SECURE: process.env.SMTP_SECURE,
SMTP_USER: process.env.SMTP_USER,
SMTP_PASS: process.env.SMTP_PASS,
SMTP_FROM_EMAIL: process.env.SMTP_FROM_EMAIL,
},
// Development Environment Settings
env_development: {
NODE_ENV: 'development',
name: 'flyer-crawler-analytics-worker-dev',
watch: true,
ignore_watch: ['node_modules', 'logs', '*.log', 'flyer-images', '.git'],
// Inherit secrets from the deployment environment
DB_HOST: process.env.DB_HOST,
DB_USER: process.env.DB_USER,
DB_PASSWORD: process.env.DB_PASSWORD,
DB_NAME: process.env.DB_NAME,
REDIS_URL: process.env.REDIS_URL,
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
FRONTEND_URL: process.env.FRONTEND_URL,
JWT_SECRET: process.env.JWT_SECRET,
GEMINI_API_KEY: process.env.GEMINI_API_KEY,
GOOGLE_MAPS_API_KEY: process.env.GOOGLE_MAPS_API_KEY,
SMTP_HOST: process.env.SMTP_HOST,
SMTP_PORT: process.env.SMTP_PORT,
SMTP_SECURE: process.env.SMTP_SECURE,
SMTP_USER: process.env.SMTP_USER,
SMTP_PASS: process.env.SMTP_PASS,
SMTP_FROM_EMAIL: process.env.SMTP_FROM_EMAIL,
}, },
}, },
], ],

6
package-lock.json generated
View File

@@ -1,12 +1,12 @@
{ {
"name": "flyer-crawler", "name": "flyer-crawler",
"version": "0.0.21", "version": "0.1.8",
"lockfileVersion": 3, "lockfileVersion": 3,
"requires": true, "requires": true,
"packages": { "packages": {
"": { "": {
"name": "flyer-crawler", "name": "flyer-crawler",
"version": "0.0.21", "version": "0.1.8",
"dependencies": { "dependencies": {
"@bull-board/api": "^6.14.2", "@bull-board/api": "^6.14.2",
"@bull-board/express": "^6.14.2", "@bull-board/express": "^6.14.2",
@@ -42,7 +42,7 @@
"recharts": "^3.4.1", "recharts": "^3.4.1",
"sharp": "^0.34.5", "sharp": "^0.34.5",
"tsx": "^4.20.6", "tsx": "^4.20.6",
"zod": "^4.1.13", "zod": "^4.2.1",
"zxcvbn": "^4.4.2" "zxcvbn": "^4.4.2"
}, },
"devDependencies": { "devDependencies": {

View File

@@ -1,7 +1,7 @@
{ {
"name": "flyer-crawler", "name": "flyer-crawler",
"private": true, "private": true,
"version": "0.0.21", "version": "0.1.8",
"type": "module", "type": "module",
"scripts": { "scripts": {
"dev": "concurrently \"npm:start:dev\" \"vite\"", "dev": "concurrently \"npm:start:dev\" \"vite\"",
@@ -61,7 +61,7 @@
"recharts": "^3.4.1", "recharts": "^3.4.1",
"sharp": "^0.34.5", "sharp": "^0.34.5",
"tsx": "^4.20.6", "tsx": "^4.20.6",
"zod": "^4.1.13", "zod": "^4.2.1",
"zxcvbn": "^4.4.2" "zxcvbn": "^4.4.2"
}, },
"devDependencies": { "devDependencies": {

View File

@@ -36,7 +36,7 @@ vi.mock('pdfjs-dist', () => ({
// Mock the new config module // Mock the new config module
vi.mock('./config', () => ({ vi.mock('./config', () => ({
default: { default: {
app: { version: '1.0.0', commitMessage: 'Initial commit', commitUrl: '#' }, app: { version: '20250101-1200:abc1234:1.0.0', commitMessage: 'Initial commit', commitUrl: '#' },
google: { mapsEmbedApiKey: 'mock-key' }, google: { mapsEmbedApiKey: 'mock-key' },
}, },
})); }));
@@ -588,11 +588,11 @@ describe('App Component', () => {
// Mock the config module for this specific test // Mock the config module for this specific test
vi.mock('./config', () => ({ vi.mock('./config', () => ({
default: { default: {
app: { version: '1.0.1', commitMessage: 'New feature!', commitUrl: '#' }, app: { version: '20250101-1200:abc1234:1.0.1', commitMessage: 'New feature!', commitUrl: '#' },
google: { mapsEmbedApiKey: 'mock-key' }, google: { mapsEmbedApiKey: 'mock-key' },
}, },
})); }));
localStorageMock.setItem('lastSeenVersion', '1.0.0'); localStorageMock.setItem('lastSeenVersion', '20250101-1200:abc1234:1.0.0');
renderApp(); renderApp();
await expect(screen.findByTestId('whats-new-modal-mock')).resolves.toBeInTheDocument(); await expect(screen.findByTestId('whats-new-modal-mock')).resolves.toBeInTheDocument();
}); });
@@ -741,7 +741,7 @@ describe('App Component', () => {
vi.mock('./config', () => ({ vi.mock('./config', () => ({
default: { default: {
app: { app: {
version: '2.0.0', version: '20250101-1200:abc1234:2.0.0',
commitMessage: 'A new version!', commitMessage: 'A new version!',
commitUrl: 'http://example.com/commit/2.0.0', commitUrl: 'http://example.com/commit/2.0.0',
}, },
@@ -752,14 +752,14 @@ describe('App Component', () => {
it('should display the version number and commit link', () => { it('should display the version number and commit link', () => {
renderApp(); renderApp();
const versionLink = screen.getByText(`Version: 2.0.0`); const versionLink = screen.getByText(`Version: 20250101-1200:abc1234:2.0.0`);
expect(versionLink).toBeInTheDocument(); expect(versionLink).toBeInTheDocument();
expect(versionLink).toHaveAttribute('href', 'http://example.com/commit/2.0.0'); expect(versionLink).toHaveAttribute('href', 'http://example.com/commit/2.0.0');
}); });
it('should open the "What\'s New" modal when the question mark icon is clicked', async () => { it('should open the "What\'s New" modal when the question mark icon is clicked', async () => {
// Pre-set the localStorage to prevent the modal from opening automatically // Pre-set the localStorage to prevent the modal from opening automatically
localStorageMock.setItem('lastSeenVersion', '2.0.0'); localStorageMock.setItem('lastSeenVersion', '20250101-1200:abc1234:2.0.0');
renderApp(); renderApp();
expect(screen.queryByTestId('whats-new-modal-mock')).not.toBeInTheDocument(); expect(screen.queryByTestId('whats-new-modal-mock')).not.toBeInTheDocument();

View File

@@ -44,7 +44,7 @@ export const FlyerCorrectionTool: React.FC<FlyerCorrectionToolProps> = ({
}) })
.catch((err) => { .catch((err) => {
console.error('[DEBUG] FlyerCorrectionTool: Failed to fetch image.', { err }); console.error('[DEBUG] FlyerCorrectionTool: Failed to fetch image.', { err });
logger.error('Failed to fetch image for correction tool', { error: err }); logger.error({ error: err }, 'Failed to fetch image for correction tool');
notifyError('Could not load the image for correction.'); notifyError('Could not load the image for correction.');
}); });
} }
@@ -164,7 +164,7 @@ export const FlyerCorrectionTool: React.FC<FlyerCorrectionToolProps> = ({
const msg = err instanceof Error ? err.message : 'An unknown error occurred.'; const msg = err instanceof Error ? err.message : 'An unknown error occurred.';
console.error('[DEBUG] handleRescan: Caught an error.', { error: err }); console.error('[DEBUG] handleRescan: Caught an error.', { error: err });
notifyError(msg); notifyError(msg);
logger.error('Error during rescan:', { error: err }); logger.error({ error: err }, 'Error during rescan:');
} finally { } finally {
console.debug('[DEBUG] handleRescan: Finished. Setting isProcessing=false.'); console.debug('[DEBUG] handleRescan: Finished. Setting isProcessing=false.');
setIsProcessing(false); setIsProcessing(false);

View File

@@ -73,12 +73,11 @@ describe('FlyerUploader', () => {
it('should handle file upload and start polling', async () => { it('should handle file upload and start polling', async () => {
console.log('--- [TEST LOG] ---: 1. Setting up mocks for upload and polling.'); console.log('--- [TEST LOG] ---: 1. Setting up mocks for upload and polling.');
mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue( mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue({ jobId: 'job-123' });
new Response(JSON.stringify({ jobId: 'job-123' }), { status: 200 }), mockedAiApiClient.getJobStatus.mockResolvedValue({
); state: 'active',
mockedAiApiClient.getJobStatus.mockResolvedValue( progress: { message: 'Checking...' },
new Response(JSON.stringify({ state: 'active', progress: { message: 'Checking...' } })), });
);
console.log('--- [TEST LOG] ---: 2. Rendering component and preparing file.'); console.log('--- [TEST LOG] ---: 2. Rendering component and preparing file.');
renderComponent(); renderComponent();
@@ -131,12 +130,11 @@ describe('FlyerUploader', () => {
it('should handle file upload via drag and drop', async () => { it('should handle file upload via drag and drop', async () => {
console.log('--- [TEST LOG] ---: 1. Setting up mocks for drag and drop.'); console.log('--- [TEST LOG] ---: 1. Setting up mocks for drag and drop.');
mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue( mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue({ jobId: 'job-dnd' });
new Response(JSON.stringify({ jobId: 'job-dnd' }), { status: 200 }), mockedAiApiClient.getJobStatus.mockResolvedValue({
); state: 'active',
mockedAiApiClient.getJobStatus.mockResolvedValue( progress: { message: 'Dropped...' },
new Response(JSON.stringify({ state: 'active', progress: { message: 'Dropped...' } })), });
);
console.log('--- [TEST LOG] ---: 2. Rendering component and preparing file for drop.'); console.log('--- [TEST LOG] ---: 2. Rendering component and preparing file for drop.');
renderComponent(); renderComponent();
@@ -159,16 +157,10 @@ describe('FlyerUploader', () => {
it('should poll for status, complete successfully, and redirect', async () => { it('should poll for status, complete successfully, and redirect', async () => {
const onProcessingComplete = vi.fn(); const onProcessingComplete = vi.fn();
console.log('--- [TEST LOG] ---: 1. Setting up mock sequence for polling.'); console.log('--- [TEST LOG] ---: 1. Setting up mock sequence for polling.');
mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue( mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue({ jobId: 'job-123' });
new Response(JSON.stringify({ jobId: 'job-123' }), { status: 200 }),
);
mockedAiApiClient.getJobStatus mockedAiApiClient.getJobStatus
.mockResolvedValueOnce( .mockResolvedValueOnce({ state: 'active', progress: { message: 'Analyzing...' } })
new Response(JSON.stringify({ state: 'active', progress: { message: 'Analyzing...' } })), .mockResolvedValueOnce({ state: 'completed', returnValue: { flyerId: 42 } });
)
.mockResolvedValueOnce(
new Response(JSON.stringify({ state: 'completed', returnValue: { flyerId: 42 } })),
);
console.log('--- [TEST LOG] ---: 2. Rendering component and uploading file.'); console.log('--- [TEST LOG] ---: 2. Rendering component and uploading file.');
renderComponent(onProcessingComplete); renderComponent(onProcessingComplete);
@@ -229,12 +221,11 @@ describe('FlyerUploader', () => {
it('should handle a failed job', async () => { it('should handle a failed job', async () => {
console.log('--- [TEST LOG] ---: 1. Setting up mocks for a failed job.'); console.log('--- [TEST LOG] ---: 1. Setting up mocks for a failed job.');
mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue( mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue({ jobId: 'job-fail' });
new Response(JSON.stringify({ jobId: 'job-fail' }), { status: 200 }), mockedAiApiClient.getJobStatus.mockResolvedValue({
); state: 'failed',
mockedAiApiClient.getJobStatus.mockResolvedValue( failedReason: 'AI model exploded',
new Response(JSON.stringify({ state: 'failed', failedReason: 'AI model exploded' })), });
);
console.log('--- [TEST LOG] ---: 2. Rendering and uploading.'); console.log('--- [TEST LOG] ---: 2. Rendering and uploading.');
renderComponent(); renderComponent();
@@ -260,11 +251,82 @@ describe('FlyerUploader', () => {
console.log('--- [TEST LOG] ---: 6. "Upload Another" button confirmed.'); console.log('--- [TEST LOG] ---: 6. "Upload Another" button confirmed.');
}); });
it('should clear the polling timeout when a job fails', async () => {
const clearTimeoutSpy = vi.spyOn(global, 'clearTimeout');
console.log('--- [TEST LOG] ---: 1. Setting up mocks for failed job timeout clearance.');
mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue({ jobId: 'job-fail-timeout' });
// We need at least one 'active' response to establish a timeout loop so we have something to clear
mockedAiApiClient.getJobStatus
.mockResolvedValueOnce({ state: 'active', progress: { message: 'Working...' } })
.mockResolvedValueOnce({ state: 'failed', failedReason: 'Fatal Error' });
renderComponent();
const file = new File(['content'], 'flyer.pdf', { type: 'application/pdf' });
const input = screen.getByLabelText(/click to select a file/i);
fireEvent.change(input, { target: { files: [file] } });
// Wait for the first poll to complete and UI to update to "Working..."
await screen.findByText('Working...');
// Advance time to trigger the second poll
await act(async () => {
vi.advanceTimersByTime(3000);
});
// Wait for the failure UI
await screen.findByText(/Processing failed: Fatal Error/i);
// Verify clearTimeout was called
expect(clearTimeoutSpy).toHaveBeenCalled();
// Verify no further polling occurs
const callsBefore = mockedAiApiClient.getJobStatus.mock.calls.length;
await act(async () => {
vi.advanceTimersByTime(10000);
});
expect(mockedAiApiClient.getJobStatus).toHaveBeenCalledTimes(callsBefore);
clearTimeoutSpy.mockRestore();
});
it('should clear the polling timeout when the component unmounts', async () => {
const clearTimeoutSpy = vi.spyOn(global, 'clearTimeout');
console.log('--- [TEST LOG] ---: 1. Setting up mocks for unmount timeout clearance.');
mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue({ jobId: 'job-unmount' });
mockedAiApiClient.getJobStatus.mockResolvedValue({
state: 'active',
progress: { message: 'Polling...' },
});
const { unmount } = renderComponent();
const file = new File(['content'], 'flyer.pdf', { type: 'application/pdf' });
const input = screen.getByLabelText(/click to select a file/i);
fireEvent.change(input, { target: { files: [file] } });
// Wait for the first poll to complete and the UI to show the polling state
await screen.findByText('Polling...');
// Now that we are in a polling state (and a timeout is set), unmount the component
console.log('--- [TEST LOG] ---: 2. Unmounting component to trigger cleanup effect.');
unmount();
// Verify that the cleanup function in the useEffect hook was called
expect(clearTimeoutSpy).toHaveBeenCalled();
console.log('--- [TEST LOG] ---: 3. clearTimeout confirmed.');
clearTimeoutSpy.mockRestore();
});
it('should handle a duplicate flyer error (409)', async () => { it('should handle a duplicate flyer error (409)', async () => {
console.log('--- [TEST LOG] ---: 1. Setting up mock for 409 duplicate error.'); console.log('--- [TEST LOG] ---: 1. Setting up mock for 409 duplicate error.');
mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue( // The API client now throws a structured error for non-2xx responses.
new Response(JSON.stringify({ flyerId: 99, message: 'Duplicate' }), { status: 409 }), mockedAiApiClient.uploadAndProcessFlyer.mockRejectedValue({
); status: 409,
body: { flyerId: 99, message: 'Duplicate' },
});
console.log('--- [TEST LOG] ---: 2. Rendering and uploading.'); console.log('--- [TEST LOG] ---: 2. Rendering and uploading.');
renderComponent(); renderComponent();
@@ -295,12 +357,11 @@ describe('FlyerUploader', () => {
it('should allow the user to stop watching progress', async () => { it('should allow the user to stop watching progress', async () => {
console.log('--- [TEST LOG] ---: 1. Setting up mocks for infinite polling.'); console.log('--- [TEST LOG] ---: 1. Setting up mocks for infinite polling.');
mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue( mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue({ jobId: 'job-stop' });
new Response(JSON.stringify({ jobId: 'job-stop' }), { status: 200 }), mockedAiApiClient.getJobStatus.mockResolvedValue({
); state: 'active',
mockedAiApiClient.getJobStatus.mockResolvedValue( progress: { message: 'Analyzing...' },
new Response(JSON.stringify({ state: 'active', progress: { message: 'Analyzing...' } })), } as any);
);
console.log('--- [TEST LOG] ---: 2. Rendering and uploading.'); console.log('--- [TEST LOG] ---: 2. Rendering and uploading.');
renderComponent(); renderComponent();
@@ -362,9 +423,11 @@ describe('FlyerUploader', () => {
it('should handle a generic network error during upload', async () => { it('should handle a generic network error during upload', async () => {
console.log('--- [TEST LOG] ---: 1. Setting up mock for generic upload error.'); console.log('--- [TEST LOG] ---: 1. Setting up mock for generic upload error.');
mockedAiApiClient.uploadAndProcessFlyer.mockRejectedValue( // Simulate a structured error from the API client
new Error('Network Error During Upload'), mockedAiApiClient.uploadAndProcessFlyer.mockRejectedValue({
); status: 500,
body: { message: 'Network Error During Upload' },
});
renderComponent(); renderComponent();
const file = new File(['content'], 'flyer.pdf', { type: 'application/pdf' }); const file = new File(['content'], 'flyer.pdf', { type: 'application/pdf' });
const input = screen.getByLabelText(/click to select a file/i); const input = screen.getByLabelText(/click to select a file/i);
@@ -379,9 +442,7 @@ describe('FlyerUploader', () => {
it('should handle a generic network error during polling', async () => { it('should handle a generic network error during polling', async () => {
console.log('--- [TEST LOG] ---: 1. Setting up mock for polling error.'); console.log('--- [TEST LOG] ---: 1. Setting up mock for polling error.');
mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue( mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue({ jobId: 'job-poll-fail' });
new Response(JSON.stringify({ jobId: 'job-poll-fail' }), { status: 200 }),
);
mockedAiApiClient.getJobStatus.mockRejectedValue(new Error('Polling Network Error')); mockedAiApiClient.getJobStatus.mockRejectedValue(new Error('Polling Network Error'));
renderComponent(); renderComponent();
@@ -398,11 +459,9 @@ describe('FlyerUploader', () => {
it('should handle a completed job with a missing flyerId', async () => { it('should handle a completed job with a missing flyerId', async () => {
console.log('--- [TEST LOG] ---: 1. Setting up mock for malformed completion payload.'); console.log('--- [TEST LOG] ---: 1. Setting up mock for malformed completion payload.');
mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue( mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue({ jobId: 'job-no-flyerid' });
new Response(JSON.stringify({ jobId: 'job-no-flyerid' }), { status: 200 }),
);
mockedAiApiClient.getJobStatus.mockResolvedValue( mockedAiApiClient.getJobStatus.mockResolvedValue(
new Response(JSON.stringify({ state: 'completed', returnValue: {} })), // No flyerId { state: 'completed', returnValue: {} }, // No flyerId
); );
renderComponent(); renderComponent();
@@ -419,6 +478,27 @@ describe('FlyerUploader', () => {
console.log('--- [TEST LOG] ---: 4. Assertions passed.'); console.log('--- [TEST LOG] ---: 4. Assertions passed.');
}); });
it('should handle a non-JSON response during polling', async () => {
console.log('--- [TEST LOG] ---: 1. Setting up mock for non-JSON response.');
// The actual function would throw, so we mock the rejection.
// The new getJobStatus would throw an error like "Failed to parse JSON..."
mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue({ jobId: 'job-bad-json' });
mockedAiApiClient.getJobStatus.mockRejectedValue(
new Error('Failed to parse JSON response from server. Body: <html>502 Bad Gateway</html>'),
);
renderComponent();
const file = new File(['content'], 'flyer.pdf', { type: 'application/pdf' });
const input = screen.getByLabelText(/click to select a file/i);
console.log('--- [TEST LOG] ---: 2. Firing file change event.');
fireEvent.change(input, { target: { files: [file] } });
console.log('--- [TEST LOG] ---: 3. Awaiting error message.');
expect(await screen.findByText(/Failed to parse JSON response from server/i)).toBeInTheDocument();
console.log('--- [TEST LOG] ---: 4. Assertions passed.');
});
it('should do nothing if the file input is cancelled', () => { it('should do nothing if the file input is cancelled', () => {
renderComponent(); renderComponent();
const input = screen.getByLabelText(/click to select a file/i); const input = screen.getByLabelText(/click to select a file/i);

View File

@@ -60,14 +60,8 @@ export const FlyerUploader: React.FC<FlyerUploaderProps> = ({ onProcessingComple
const pollStatus = async () => { const pollStatus = async () => {
console.debug(`[DEBUG] pollStatus(): Polling for jobId: ${jobId}`); console.debug(`[DEBUG] pollStatus(): Polling for jobId: ${jobId}`);
try { try {
const statusResponse = await getJobStatus(jobId); const job = await getJobStatus(jobId); // Now returns parsed JSON directly
console.debug(`[DEBUG] pollStatus(): API response status: ${statusResponse.status}`); console.debug('[DEBUG] pollStatus(): Job status received:', job); // The rest of the logic remains the same
if (!statusResponse.ok) {
throw new Error(`Failed to get job status (HTTP ${statusResponse.status})`);
}
const job = await statusResponse.json();
console.debug('[DEBUG] pollStatus(): Job status received:', job);
if (job.progress) { if (job.progress) {
setProcessingStages(job.progress.stages || []); setProcessingStages(job.progress.stages || []);
@@ -97,7 +91,13 @@ export const FlyerUploader: React.FC<FlyerUploaderProps> = ({ onProcessingComple
console.debug( console.debug(
`[DEBUG] pollStatus(): Job state is "failed". Reason: ${job.failedReason}`, `[DEBUG] pollStatus(): Job state is "failed". Reason: ${job.failedReason}`,
); );
// Explicitly clear any pending timeout to stop the polling loop immediately.
if (pollingTimeoutRef.current) {
clearTimeout(pollingTimeoutRef.current);
}
setErrorMessage(`Processing failed: ${job.failedReason || 'Unknown error'}`); setErrorMessage(`Processing failed: ${job.failedReason || 'Unknown error'}`);
// Clear any stale "in-progress" messages to avoid user confusion.
setStatusMessage(null);
setProcessingState('error'); setProcessingState('error');
break; break;
@@ -112,7 +112,7 @@ export const FlyerUploader: React.FC<FlyerUploaderProps> = ({ onProcessingComple
break; break;
} }
} catch (error) { } catch (error) {
logger.error('Error during polling:', { error }); logger.error({ error }, 'Error during polling:');
setErrorMessage( setErrorMessage(
error instanceof Error ? error.message : 'An unexpected error occurred during polling.', error instanceof Error ? error.message : 'An unexpected error occurred during polling.',
); );
@@ -150,29 +150,24 @@ export const FlyerUploader: React.FC<FlyerUploaderProps> = ({ onProcessingComple
`[DEBUG] processFile(): Checksum generated: ${checksum}. Calling uploadAndProcessFlyer.`, `[DEBUG] processFile(): Checksum generated: ${checksum}. Calling uploadAndProcessFlyer.`,
); );
const startResponse = await uploadAndProcessFlyer(file, checksum); // The API client now returns parsed JSON on success or throws a structured error on failure.
console.debug(`[DEBUG] processFile(): Upload response status: ${startResponse.status}`); const { jobId: newJobId } = await uploadAndProcessFlyer(file, checksum);
if (!startResponse.ok) {
const errorData = await startResponse.json();
console.debug('[DEBUG] processFile(): Upload failed. Error data:', errorData);
if (startResponse.status === 409 && errorData.flyerId) {
setErrorMessage(`This flyer has already been processed. You can view it here:`);
setDuplicateFlyerId(errorData.flyerId);
} else {
setErrorMessage(errorData.message || `Upload failed with status ${startResponse.status}`);
}
setProcessingState('error');
return;
}
const { jobId: newJobId } = await startResponse.json();
console.debug(`[DEBUG] processFile(): Upload successful. Received jobId: ${newJobId}`); console.debug(`[DEBUG] processFile(): Upload successful. Received jobId: ${newJobId}`);
setJobId(newJobId); setJobId(newJobId);
setProcessingState('polling'); setProcessingState('polling');
} catch (error) { } catch (error: any) {
logger.error('An unexpected error occurred during file upload:', { error }); // Handle the structured error thrown by the API client.
setErrorMessage(error instanceof Error ? error.message : 'An unexpected error occurred.'); logger.error({ error }, 'An error occurred during file upload:');
// Handle 409 Conflict for duplicate flyers
if (error?.status === 409 && error.body?.flyerId) {
setErrorMessage(`This flyer has already been processed. You can view it here:`);
setDuplicateFlyerId(error.body.flyerId);
} else {
// Handle other errors (e.g., validation, server errors)
const message =
error?.body?.message || error?.message || 'An unexpected error occurred during upload.';
setErrorMessage(message);
}
setProcessingState('error'); setProcessingState('error');
} }
}, []); }, []);

View File

@@ -1,94 +1,68 @@
// src/middleware/errorHandler.ts // src/middleware/errorHandler.ts
import { Request, Response, NextFunction } from 'express'; import { Request, Response, NextFunction } from 'express';
import { ZodError } from 'zod';
import { import {
DatabaseError,
UniqueConstraintError,
ForeignKeyConstraintError, ForeignKeyConstraintError,
NotFoundError, NotFoundError,
UniqueConstraintError,
ValidationError, ValidationError,
ValidationIssue,
} from '../services/db/errors.db'; } from '../services/db/errors.db';
import crypto from 'crypto'; import { logger } from '../services/logger.server';
interface HttpError extends Error { /**
status?: number; * A centralized error handling middleware for the Express application.
} * This middleware should be the LAST `app.use()` call to catch all errors from previous routes and middleware.
*
export const errorHandler = (err: HttpError, req: Request, res: Response, next: NextFunction) => { * It standardizes error responses and ensures consistent logging.
// If the response headers have already been sent, we must delegate to the default Express error handler. */
export const errorHandler = (err: Error, req: Request, res: Response, next: NextFunction) => {
// If headers have already been sent, delegate to the default Express error handler.
if (res.headersSent) { if (res.headersSent) {
return next(err); return next(err);
} }
// The pino-http middleware guarantees that `req.log` will be available. // Use the request-scoped logger if available, otherwise fall back to the global logger.
const log = req.log; const log = req.log || logger;
// --- 1. Determine Final Status Code and Message --- // --- Handle Zod Validation Errors ---
let statusCode = err.status ?? 500; if (err instanceof ZodError) {
const message = err.message; log.warn({ err: err.flatten() }, 'Request validation failed');
let validationIssues: ValidationIssue[] | undefined; return res.status(400).json({
let errorId: string | undefined; message: 'The request data is invalid.',
errors: err.issues.map((e) => ({ path: e.path, message: e.message })),
});
}
// --- Handle Custom Operational Errors ---
if (err instanceof NotFoundError) {
log.info({ err }, 'Resource not found');
return res.status(404).json({ message: err.message });
}
if (err instanceof ValidationError) {
log.warn({ err }, 'Validation error occurred');
return res.status(400).json({ message: err.message, errors: err.validationErrors });
}
// Refine the status code for known error types. Check for most specific types first.
if (err instanceof UniqueConstraintError) { if (err instanceof UniqueConstraintError) {
statusCode = 409; // Conflict log.warn({ err }, 'Constraint error occurred');
} else if (err instanceof NotFoundError) { return res.status(409).json({ message: err.message }); // Use 409 Conflict for unique constraints
statusCode = 404;
} else if (err instanceof ForeignKeyConstraintError) {
statusCode = 400;
} else if (err instanceof ValidationError) {
statusCode = 400;
validationIssues = err.validationErrors;
} else if (err instanceof DatabaseError) {
// This is a generic fallback for other database errors that are not the specific subclasses above.
statusCode = err.status;
} else if (err.name === 'UnauthorizedError') {
statusCode = err.status || 401;
} }
// --- 2. Log Based on Final Status Code --- if (err instanceof ForeignKeyConstraintError) {
// Log the full error details for debugging, especially for server errors. log.warn({ err }, 'Foreign key constraint violation');
if (statusCode >= 500) { return res.status(400).json({ message: err.message });
errorId = crypto.randomBytes(4).toString('hex');
// The request-scoped logger already contains user, IP, and request_id.
// We add the full error and the request object itself.
// Pino's `redact` config will automatically sanitize sensitive fields in `req`.
log.error(
{
err,
errorId,
req: { method: req.method, url: req.originalUrl, headers: req.headers, body: req.body },
},
`Unhandled API Error (ID: ${errorId})`,
);
} else {
// For 4xx errors, log at a lower level (e.g., 'warn') to avoid flooding error trackers.
// We include the validation errors in the log context if they exist.
log.warn(
{
err,
validationErrors: validationIssues, // Add validation issues to the log object
statusCode,
},
`Client Error on ${req.method} ${req.path}: ${message}`,
);
} }
// --- TEST ENVIRONMENT DEBUGGING --- // --- Handle Generic Errors ---
if (process.env.NODE_ENV === 'test') { // Log the full error object for debugging. The pino logger will handle redaction.
console.error('--- [TEST] UNHANDLED ERROR ---', err); log.error({ err }, 'An unhandled error occurred in an Express route');
// In production, send a generic message to avoid leaking implementation details.
if (process.env.NODE_ENV === 'production') {
return res.status(500).json({ message: 'An internal server error occurred.' });
} }
// --- 3. Send Response --- // In development, send more details for easier debugging.
// In production, send a generic message for 5xx errors. return res.status(500).json({ message: err.message, stack: err.stack });
// In dev/test, send the actual error message for easier debugging. };
const responseMessage =
statusCode >= 500 && process.env.NODE_ENV === 'production'
? `An unexpected server error occurred. Please reference error ID: ${errorId}`
: message;
res.status(statusCode).json({
message: responseMessage,
...(validationIssues && { errors: validationIssues }), // Conditionally add the 'errors' array if it exists
});
};

View File

@@ -15,6 +15,11 @@ import { NotFoundError } from '../services/db/errors.db'; // This can stay, it's
import { createTestApp } from '../tests/utils/createTestApp'; import { createTestApp } from '../tests/utils/createTestApp';
import { mockLogger } from '../tests/utils/mockLogger'; import { mockLogger } from '../tests/utils/mockLogger';
// Mock the file upload middleware to allow testing the controller's internal check
vi.mock('../middleware/fileUpload.middleware', () => ({
requireFileUpload: () => (req: Request, res: Response, next: NextFunction) => next(),
}));
vi.mock('../lib/queue', () => ({ vi.mock('../lib/queue', () => ({
serverAdapter: { serverAdapter: {
getRouter: () => (req: Request, res: Response, next: NextFunction) => next(), // Return a dummy express handler getRouter: () => (req: Request, res: Response, next: NextFunction) => next(), // Return a dummy express handler
@@ -125,12 +130,6 @@ describe('Admin Content Management Routes (/api/admin)', () => {
authenticatedUser: adminUser, authenticatedUser: adminUser,
}); });
// Add a basic error handler to capture errors passed to next(err) and return JSON.
// This prevents unhandled error crashes in tests and ensures we get the 500 response we expect.
app.use((err: any, req: any, res: any, next: any) => {
res.status(err.status || 500).json({ message: err.message, errors: err.errors });
});
beforeEach(() => { beforeEach(() => {
vi.clearAllMocks(); vi.clearAllMocks();
}); });
@@ -262,7 +261,7 @@ describe('Admin Content Management Routes (/api/admin)', () => {
const response = await supertest(app).post('/api/admin/brands/55/logo'); const response = await supertest(app).post('/api/admin/brands/55/logo');
expect(response.status).toBe(400); expect(response.status).toBe(400);
expect(response.body.message).toMatch( expect(response.body.message).toMatch(
/Logo image file is required|The request data is invalid/, /Logo image file is required|The request data is invalid|Logo image file is missing./,
); );
}); });

View File

@@ -97,12 +97,6 @@ describe('Admin Job Trigger Routes (/api/admin/trigger)', () => {
authenticatedUser: adminUser, authenticatedUser: adminUser,
}); });
// Add a basic error handler to capture errors passed to next(err) and return JSON.
// This prevents unhandled error crashes in tests and ensures we get the 500 response we expect.
app.use((err: any, req: any, res: any, next: any) => {
res.status(err.status || 500).json({ message: err.message, errors: err.errors });
});
beforeEach(() => { beforeEach(() => {
vi.clearAllMocks(); vi.clearAllMocks();
}); });
@@ -248,6 +242,17 @@ describe('Admin Job Trigger Routes (/api/admin/trigger)', () => {
expect(response.status).toBe(400); expect(response.status).toBe(400);
}); });
it('should return 404 if the queue name is valid but not in the retry map', async () => {
const queueName = 'weekly-analytics-reporting'; // This is in the Zod enum but not the queueMap
const jobId = 'some-job-id';
const response = await supertest(app).post(`/api/admin/jobs/${queueName}/${jobId}/retry`);
// The route throws a NotFoundError, which the error handler should convert to a 404.
expect(response.status).toBe(404);
expect(response.body.message).toBe(`Queue 'weekly-analytics-reporting' not found.`);
});
it('should return 404 if the job ID is not found in the queue', async () => { it('should return 404 if the job ID is not found in the queue', async () => {
vi.mocked(flyerQueue.getJob).mockResolvedValue(undefined); vi.mocked(flyerQueue.getJob).mockResolvedValue(undefined);
const response = await supertest(app).post( const response = await supertest(app).post(

View File

@@ -5,7 +5,16 @@ import type { Request, Response, NextFunction } from 'express';
import { createMockUserProfile, createMockActivityLogItem } from '../tests/utils/mockFactories'; import { createMockUserProfile, createMockActivityLogItem } from '../tests/utils/mockFactories';
import type { UserProfile } from '../types'; import type { UserProfile } from '../types';
import { createTestApp } from '../tests/utils/createTestApp'; import { createTestApp } from '../tests/utils/createTestApp';
import { mockLogger } from '../tests/utils/mockLogger';
const { mockLogger } = vi.hoisted(() => ({
mockLogger: {
info: vi.fn(),
warn: vi.fn(),
error: vi.fn(),
debug: vi.fn(),
child: vi.fn().mockReturnThis(),
},
}));
vi.mock('../lib/queue', () => ({ vi.mock('../lib/queue', () => ({
serverAdapter: { serverAdapter: {
@@ -27,19 +36,22 @@ vi.mock('../services/db/index.db', () => ({
notificationRepo: {}, notificationRepo: {},
})); }));
// Mock the queue service to control worker statuses // Mock the queue service for queue status checks
vi.mock('../services/queueService.server', () => ({ vi.mock('../services/queueService.server', () => ({
flyerQueue: { name: 'flyer-processing', getJobCounts: vi.fn() },
emailQueue: { name: 'email-sending', getJobCounts: vi.fn() },
analyticsQueue: { name: 'analytics-reporting', getJobCounts: vi.fn() },
cleanupQueue: { name: 'file-cleanup', getJobCounts: vi.fn() },
weeklyAnalyticsQueue: { name: 'weekly-analytics-reporting', getJobCounts: vi.fn() },
}));
// Mock the worker service for worker status checks
vi.mock('../services/workers.server', () => ({
flyerWorker: { name: 'flyer-processing', isRunning: vi.fn() }, flyerWorker: { name: 'flyer-processing', isRunning: vi.fn() },
emailWorker: { name: 'email-sending', isRunning: vi.fn() }, emailWorker: { name: 'email-sending', isRunning: vi.fn() },
analyticsWorker: { name: 'analytics-reporting', isRunning: vi.fn() }, analyticsWorker: { name: 'analytics-reporting', isRunning: vi.fn() },
cleanupWorker: { name: 'file-cleanup', isRunning: vi.fn() }, cleanupWorker: { name: 'file-cleanup', isRunning: vi.fn() },
weeklyAnalyticsWorker: { name: 'weekly-analytics-reporting', isRunning: vi.fn() }, weeklyAnalyticsWorker: { name: 'weekly-analytics-reporting', isRunning: vi.fn() },
flyerQueue: { name: 'flyer-processing', getJobCounts: vi.fn() },
emailQueue: { name: 'email-sending', getJobCounts: vi.fn() },
analyticsQueue: { name: 'analytics-reporting', getJobCounts: vi.fn() },
cleanupQueue: { name: 'file-cleanup', getJobCounts: vi.fn() },
// FIX: Add the missing weeklyAnalyticsQueue to prevent import errors in admin.routes.ts
weeklyAnalyticsQueue: { name: 'weekly-analytics-reporting', getJobCounts: vi.fn() },
})); }));
// Mock other dependencies that are part of the adminRouter setup but not directly tested here // Mock other dependencies that are part of the adminRouter setup but not directly tested here
@@ -67,8 +79,10 @@ import adminRouter from './admin.routes';
// Import the mocked modules to control them // Import the mocked modules to control them
import * as queueService from '../services/queueService.server'; import * as queueService from '../services/queueService.server';
import * as workerService from '../services/workers.server';
import { adminRepo } from '../services/db/index.db'; import { adminRepo } from '../services/db/index.db';
const mockedQueueService = queueService as Mocked<typeof queueService>; const mockedQueueService = queueService as Mocked<typeof queueService>;
const mockedWorkerService = workerService as Mocked<typeof workerService>;
// Mock the logger // Mock the logger
vi.mock('../services/logger.server', () => ({ vi.mock('../services/logger.server', () => ({
@@ -102,12 +116,6 @@ describe('Admin Monitoring Routes (/api/admin)', () => {
authenticatedUser: adminUser, authenticatedUser: adminUser,
}); });
// Add a basic error handler to capture errors passed to next(err) and return JSON.
// This prevents unhandled error crashes in tests and ensures we get the 500 response we expect.
app.use((err: any, req: any, res: any, next: any) => {
res.status(err.status || 500).json({ message: err.message, errors: err.errors });
});
beforeEach(() => { beforeEach(() => {
vi.clearAllMocks(); vi.clearAllMocks();
}); });
@@ -143,11 +151,11 @@ describe('Admin Monitoring Routes (/api/admin)', () => {
describe('GET /workers/status', () => { describe('GET /workers/status', () => {
it('should return the status of all registered workers', async () => { it('should return the status of all registered workers', async () => {
// Arrange: Set the mock status for each worker // Arrange: Set the mock status for each worker
vi.mocked(mockedQueueService.flyerWorker.isRunning).mockReturnValue(true); vi.mocked(mockedWorkerService.flyerWorker.isRunning).mockReturnValue(true);
vi.mocked(mockedQueueService.emailWorker.isRunning).mockReturnValue(true); vi.mocked(mockedWorkerService.emailWorker.isRunning).mockReturnValue(true);
vi.mocked(mockedQueueService.analyticsWorker.isRunning).mockReturnValue(false); // Simulate one worker being stopped vi.mocked(mockedWorkerService.analyticsWorker.isRunning).mockReturnValue(false); // Simulate one worker being stopped
vi.mocked(mockedQueueService.cleanupWorker.isRunning).mockReturnValue(true); vi.mocked(mockedWorkerService.cleanupWorker.isRunning).mockReturnValue(true);
vi.mocked(mockedQueueService.weeklyAnalyticsWorker.isRunning).mockReturnValue(true); vi.mocked(mockedWorkerService.weeklyAnalyticsWorker.isRunning).mockReturnValue(true);
// Act // Act
const response = await supertest(app).get('/api/admin/workers/status'); const response = await supertest(app).get('/api/admin/workers/status');

View File

@@ -2,12 +2,11 @@
import { Router, NextFunction, Request, Response } from 'express'; import { Router, NextFunction, Request, Response } from 'express';
import passport from './passport.routes'; import passport from './passport.routes';
import { isAdmin } from './passport.routes'; // Correctly imported import { isAdmin } from './passport.routes'; // Correctly imported
import multer from 'multer'; // --- Zod Schemas for Admin Routes (as per ADR-003) --- import multer from 'multer';
import { z } from 'zod'; import { z } from 'zod';
import * as db from '../services/db/index.db'; import * as db from '../services/db/index.db';
import { logger } from '../services/logger.server'; import type { UserProfile } from '../types';
import { UserProfile } from '../types';
import { geocodingService } from '../services/geocodingService.server'; import { geocodingService } from '../services/geocodingService.server';
import { requireFileUpload } from '../middleware/fileUpload.middleware'; // This was a duplicate, fixed. import { requireFileUpload } from '../middleware/fileUpload.middleware'; // This was a duplicate, fixed.
import { NotFoundError, ValidationError } from '../services/db/errors.db'; import { NotFoundError, ValidationError } from '../services/db/errors.db';
@@ -26,52 +25,36 @@ import {
analyticsQueue, analyticsQueue,
cleanupQueue, cleanupQueue,
weeklyAnalyticsQueue, weeklyAnalyticsQueue,
flyerWorker, } from '../services/queueService.server'; // Import your queues
emailWorker, import {
analyticsWorker, analyticsWorker,
cleanupWorker, cleanupWorker,
emailWorker,
flyerWorker,
weeklyAnalyticsWorker, weeklyAnalyticsWorker,
} from '../services/queueService.server'; // Import your queues } from '../services/workers.server';
import { getSimpleWeekAndYear } from '../utils/dateUtils'; import { getSimpleWeekAndYear } from '../utils/dateUtils';
import {
requiredString,
numericIdParam,
uuidParamSchema,
optionalNumeric,
} from '../utils/zodUtils';
import { logger } from '../services/logger.server';
// Helper for consistent required string validation (handles missing/null/empty) const updateCorrectionSchema = numericIdParam('id').extend({
const requiredString = (message: string) =>
z.preprocess((val) => val ?? '', z.string().min(1, message));
/**
* A factory for creating a Zod schema that validates a UUID in the request parameters.
* @param key The name of the parameter key (e.g., 'userId').
* @param message A custom error message for invalid UUIDs.
*/
const uuidParamSchema = (key: string, message = `Invalid UUID for parameter '${key}'.`) =>
z.object({
params: z.object({ [key]: z.string().uuid({ message }) }),
});
/**
* A factory for creating a Zod schema that validates a numeric ID in the request parameters.
*/
const numericIdParamSchema = (
key: string,
message = `Invalid ID for parameter '${key}'. Must be a positive integer.`,
) =>
z.object({
params: z.object({ [key]: z.coerce.number().int({ message }).positive({ message }) }),
});
const updateCorrectionSchema = numericIdParamSchema('id').extend({
body: z.object({ body: z.object({
suggested_value: requiredString('A new suggested_value is required.'), suggested_value: requiredString('A new suggested_value is required.'),
}), }),
}); });
const updateRecipeStatusSchema = numericIdParamSchema('id').extend({ const updateRecipeStatusSchema = numericIdParam('id').extend({
body: z.object({ body: z.object({
status: z.enum(['private', 'pending_review', 'public', 'rejected']), status: z.enum(['private', 'pending_review', 'public', 'rejected']),
}), }),
}); });
const updateCommentStatusSchema = numericIdParamSchema('id').extend({ const updateCommentStatusSchema = numericIdParam('id').extend({
body: z.object({ body: z.object({
status: z.enum(['visible', 'hidden', 'reported']), status: z.enum(['visible', 'hidden', 'reported']),
}), }),
@@ -85,8 +68,8 @@ const updateUserRoleSchema = uuidParamSchema('id', 'A valid user ID is required.
const activityLogSchema = z.object({ const activityLogSchema = z.object({
query: z.object({ query: z.object({
limit: z.coerce.number().int().positive().optional().default(50), limit: optionalNumeric({ default: 50, integer: true, positive: true }),
offset: z.coerce.number().int().nonnegative().optional().default(0), offset: optionalNumeric({ default: 0, integer: true, nonnegative: true }),
}), }),
}); });
@@ -154,6 +137,7 @@ router.get('/corrections', async (req, res, next: NextFunction) => {
const corrections = await db.adminRepo.getSuggestedCorrections(req.log); const corrections = await db.adminRepo.getSuggestedCorrections(req.log);
res.json(corrections); res.json(corrections);
} catch (error) { } catch (error) {
logger.error({ error }, 'Error fetching suggested corrections');
next(error); next(error);
} }
}); });
@@ -163,6 +147,7 @@ router.get('/brands', async (req, res, next: NextFunction) => {
const brands = await db.flyerRepo.getAllBrands(req.log); const brands = await db.flyerRepo.getAllBrands(req.log);
res.json(brands); res.json(brands);
} catch (error) { } catch (error) {
logger.error({ error }, 'Error fetching brands');
next(error); next(error);
} }
}); });
@@ -172,6 +157,7 @@ router.get('/stats', async (req, res, next: NextFunction) => {
const stats = await db.adminRepo.getApplicationStats(req.log); const stats = await db.adminRepo.getApplicationStats(req.log);
res.json(stats); res.json(stats);
} catch (error) { } catch (error) {
logger.error({ error }, 'Error fetching application stats');
next(error); next(error);
} }
}); });
@@ -181,20 +167,22 @@ router.get('/stats/daily', async (req, res, next: NextFunction) => {
const dailyStats = await db.adminRepo.getDailyStatsForLast30Days(req.log); const dailyStats = await db.adminRepo.getDailyStatsForLast30Days(req.log);
res.json(dailyStats); res.json(dailyStats);
} catch (error) { } catch (error) {
logger.error({ error }, 'Error fetching daily stats');
next(error); next(error);
} }
}); });
router.post( router.post(
'/corrections/:id/approve', '/corrections/:id/approve',
validateRequest(numericIdParamSchema('id')), validateRequest(numericIdParam('id')),
async (req: Request, res: Response, next: NextFunction) => { async (req: Request, res: Response, next: NextFunction) => {
// Apply ADR-003 pattern for type safety // Apply ADR-003 pattern for type safety
const { params } = req as unknown as z.infer<ReturnType<typeof numericIdParamSchema>>; const { params } = req as unknown as z.infer<ReturnType<typeof numericIdParam>>;
try { try {
await db.adminRepo.approveCorrection(params.id, req.log); // params.id is now safely typed as number await db.adminRepo.approveCorrection(params.id, req.log); // params.id is now safely typed as number
res.status(200).json({ message: 'Correction approved successfully.' }); res.status(200).json({ message: 'Correction approved successfully.' });
} catch (error) { } catch (error) {
logger.error({ error }, 'Error approving correction');
next(error); next(error);
} }
}, },
@@ -202,14 +190,15 @@ router.post(
router.post( router.post(
'/corrections/:id/reject', '/corrections/:id/reject',
validateRequest(numericIdParamSchema('id')), validateRequest(numericIdParam('id')),
async (req: Request, res: Response, next: NextFunction) => { async (req: Request, res: Response, next: NextFunction) => {
// Apply ADR-003 pattern for type safety // Apply ADR-003 pattern for type safety
const { params } = req as unknown as z.infer<ReturnType<typeof numericIdParamSchema>>; const { params } = req as unknown as z.infer<ReturnType<typeof numericIdParam>>;
try { try {
await db.adminRepo.rejectCorrection(params.id, req.log); // params.id is now safely typed as number await db.adminRepo.rejectCorrection(params.id, req.log); // params.id is now safely typed as number
res.status(200).json({ message: 'Correction rejected successfully.' }); res.status(200).json({ message: 'Correction rejected successfully.' });
} catch (error) { } catch (error) {
logger.error({ error }, 'Error rejecting correction');
next(error); next(error);
} }
}, },
@@ -229,6 +218,7 @@ router.put(
); );
res.status(200).json(updatedCorrection); res.status(200).json(updatedCorrection);
} catch (error) { } catch (error) {
logger.error({ error }, 'Error updating suggested correction');
next(error); next(error);
} }
}, },
@@ -244,6 +234,7 @@ router.put(
const updatedRecipe = await db.adminRepo.updateRecipeStatus(params.id, body.status, req.log); // This is still a standalone function in admin.db.ts const updatedRecipe = await db.adminRepo.updateRecipeStatus(params.id, body.status, req.log); // This is still a standalone function in admin.db.ts
res.status(200).json(updatedRecipe); res.status(200).json(updatedRecipe);
} catch (error) { } catch (error) {
logger.error({ error }, 'Error updating recipe status');
next(error); // Pass all errors to the central error handler next(error); // Pass all errors to the central error handler
} }
}, },
@@ -251,12 +242,12 @@ router.put(
router.post( router.post(
'/brands/:id/logo', '/brands/:id/logo',
validateRequest(numericIdParamSchema('id')), validateRequest(numericIdParam('id')),
upload.single('logoImage'), upload.single('logoImage'),
requireFileUpload('logoImage'), requireFileUpload('logoImage'),
async (req: Request, res: Response, next: NextFunction) => { async (req: Request, res: Response, next: NextFunction) => {
// Apply ADR-003 pattern for type safety // Apply ADR-003 pattern for type safety
const { params } = req as unknown as z.infer<ReturnType<typeof numericIdParamSchema>>; const { params } = req as unknown as z.infer<ReturnType<typeof numericIdParam>>;
try { try {
// Although requireFileUpload middleware should ensure the file exists, // Although requireFileUpload middleware should ensure the file exists,
// this check satisfies TypeScript and adds robustness. // this check satisfies TypeScript and adds robustness.
@@ -269,6 +260,7 @@ router.post(
logger.info({ brandId: params.id, logoUrl }, `Brand logo updated for brand ID: ${params.id}`); logger.info({ brandId: params.id, logoUrl }, `Brand logo updated for brand ID: ${params.id}`);
res.status(200).json({ message: 'Brand logo updated successfully.', logoUrl }); res.status(200).json({ message: 'Brand logo updated successfully.', logoUrl });
} catch (error) { } catch (error) {
logger.error({ error }, 'Error updating brand logo');
next(error); next(error);
} }
}, },
@@ -279,6 +271,7 @@ router.get('/unmatched-items', async (req, res, next: NextFunction) => {
const items = await db.adminRepo.getUnmatchedFlyerItems(req.log); const items = await db.adminRepo.getUnmatchedFlyerItems(req.log);
res.json(items); res.json(items);
} catch (error) { } catch (error) {
logger.error({ error }, 'Error fetching unmatched items');
next(error); next(error);
} }
}); });
@@ -288,16 +281,17 @@ router.get('/unmatched-items', async (req, res, next: NextFunction) => {
*/ */
router.delete( router.delete(
'/recipes/:recipeId', '/recipes/:recipeId',
validateRequest(numericIdParamSchema('recipeId')), validateRequest(numericIdParam('recipeId')),
async (req: Request, res: Response, next: NextFunction) => { async (req: Request, res: Response, next: NextFunction) => {
const userProfile = req.user as UserProfile; const userProfile = req.user as UserProfile;
// Infer the type directly from the schema generator function. // This was a duplicate, fixed. // Infer the type directly from the schema generator function. // This was a duplicate, fixed.
const { params } = req as unknown as z.infer<ReturnType<typeof numericIdParamSchema>>; const { params } = req as unknown as z.infer<ReturnType<typeof numericIdParam>>;
try { try {
// The isAdmin flag bypasses the ownership check in the repository method. // The isAdmin flag bypasses the ownership check in the repository method.
await db.recipeRepo.deleteRecipe(params.recipeId, userProfile.user.user_id, true, req.log); await db.recipeRepo.deleteRecipe(params.recipeId, userProfile.user.user_id, true, req.log);
res.status(204).send(); res.status(204).send();
} catch (error: unknown) { } catch (error: unknown) {
logger.error({ error }, 'Error deleting recipe');
next(error); next(error);
} }
}, },
@@ -308,14 +302,15 @@ router.delete(
*/ */
router.delete( router.delete(
'/flyers/:flyerId', '/flyers/:flyerId',
validateRequest(numericIdParamSchema('flyerId')), validateRequest(numericIdParam('flyerId')),
async (req: Request, res: Response, next: NextFunction) => { async (req: Request, res: Response, next: NextFunction) => {
// Infer the type directly from the schema generator function. // Infer the type directly from the schema generator function.
const { params } = req as unknown as z.infer<ReturnType<typeof numericIdParamSchema>>; const { params } = req as unknown as z.infer<ReturnType<typeof numericIdParam>>;
try { try {
await db.flyerRepo.deleteFlyer(params.flyerId, req.log); await db.flyerRepo.deleteFlyer(params.flyerId, req.log);
res.status(204).send(); res.status(204).send();
} catch (error: unknown) { } catch (error: unknown) {
logger.error({ error }, 'Error deleting flyer');
next(error); next(error);
} }
}, },
@@ -335,6 +330,7 @@ router.put(
); // This is still a standalone function in admin.db.ts ); // This is still a standalone function in admin.db.ts
res.status(200).json(updatedComment); res.status(200).json(updatedComment);
} catch (error: unknown) { } catch (error: unknown) {
logger.error({ error }, 'Error updating comment status');
next(error); next(error);
} }
}, },
@@ -345,6 +341,7 @@ router.get('/users', async (req, res, next: NextFunction) => {
const users = await db.adminRepo.getAllUsers(req.log); const users = await db.adminRepo.getAllUsers(req.log);
res.json(users); res.json(users);
} catch (error) { } catch (error) {
logger.error({ error }, 'Error fetching users');
next(error); next(error);
} }
}); });
@@ -364,6 +361,7 @@ router.get(
const logs = await db.adminRepo.getActivityLog(limit, offset, req.log); const logs = await db.adminRepo.getActivityLog(limit, offset, req.log);
res.json(logs); res.json(logs);
} catch (error) { } catch (error) {
logger.error({ error }, 'Error fetching activity log');
next(error); next(error);
} }
}, },
@@ -379,6 +377,7 @@ router.get(
const user = await db.userRepo.findUserProfileById(params.id, req.log); const user = await db.userRepo.findUserProfileById(params.id, req.log);
res.json(user); res.json(user);
} catch (error) { } catch (error) {
logger.error({ error }, 'Error fetching user profile');
next(error); next(error);
} }
}, },
@@ -414,6 +413,7 @@ router.delete(
await db.userRepo.deleteUserById(params.id, req.log); await db.userRepo.deleteUserById(params.id, req.log);
res.status(204).send(); res.status(204).send();
} catch (error) { } catch (error) {
logger.error({ error }, 'Error deleting user');
next(error); next(error);
} }
}, },
@@ -435,12 +435,10 @@ router.post(
// We call the function but don't wait for it to finish (no `await`). // We call the function but don't wait for it to finish (no `await`).
// This is a "fire-and-forget" operation from the client's perspective. // This is a "fire-and-forget" operation from the client's perspective.
backgroundJobService.runDailyDealCheck(); backgroundJobService.runDailyDealCheck();
res res.status(202).json({
.status(202) message:
.json({ 'Daily deal check job has been triggered successfully. It will run in the background.',
message: });
'Daily deal check job has been triggered successfully. It will run in the background.',
});
} catch (error) { } catch (error) {
logger.error({ error }, '[Admin] Failed to trigger daily deal check job.'); logger.error({ error }, '[Admin] Failed to trigger daily deal check job.');
next(error); next(error);
@@ -467,11 +465,9 @@ router.post(
const job = await analyticsQueue.add('generate-daily-report', { reportDate }, { jobId }); const job = await analyticsQueue.add('generate-daily-report', { reportDate }, { jobId });
res res.status(202).json({
.status(202) message: `Analytics report generation job has been enqueued successfully. Job ID: ${job.id}`,
.json({ });
message: `Analytics report generation job has been enqueued successfully. Job ID: ${job.id}`,
});
} catch (error) { } catch (error) {
logger.error({ error }, '[Admin] Failed to enqueue analytics report job.'); logger.error({ error }, '[Admin] Failed to enqueue analytics report job.');
next(error); next(error);
@@ -485,11 +481,11 @@ router.post(
*/ */
router.post( router.post(
'/flyers/:flyerId/cleanup', '/flyers/:flyerId/cleanup',
validateRequest(numericIdParamSchema('flyerId')), validateRequest(numericIdParam('flyerId')),
async (req: Request, res: Response, next: NextFunction) => { async (req: Request, res: Response, next: NextFunction) => {
const userProfile = req.user as UserProfile; const userProfile = req.user as UserProfile;
// Infer type from the schema generator for type safety, as per ADR-003. // Infer type from the schema generator for type safety, as per ADR-003.
const { params } = req as unknown as z.infer<ReturnType<typeof numericIdParamSchema>>; // This was a duplicate, fixed. const { params } = req as unknown as z.infer<ReturnType<typeof numericIdParam>>; // This was a duplicate, fixed.
logger.info( logger.info(
`[Admin] Manual trigger for flyer file cleanup received from user: ${userProfile.user.user_id} for flyer ID: ${params.flyerId}`, `[Admin] Manual trigger for flyer file cleanup received from user: ${userProfile.user.user_id} for flyer ID: ${params.flyerId}`,
); );
@@ -501,6 +497,7 @@ router.post(
.status(202) .status(202)
.json({ message: `File cleanup job for flyer ID ${params.flyerId} has been enqueued.` }); .json({ message: `File cleanup job for flyer ID ${params.flyerId} has been enqueued.` });
} catch (error) { } catch (error) {
logger.error({ error }, 'Error enqueuing cleanup job');
next(error); next(error);
} }
}, },
@@ -523,6 +520,7 @@ router.post('/trigger/failing-job', async (req: Request, res: Response, next: Ne
.status(202) .status(202)
.json({ message: `Failing test job has been enqueued successfully. Job ID: ${job.id}` }); .json({ message: `Failing test job has been enqueued successfully. Job ID: ${job.id}` });
} catch (error) { } catch (error) {
logger.error({ error }, 'Error enqueuing failing job');
next(error); next(error);
} }
}); });
@@ -541,11 +539,9 @@ router.post(
try { try {
const keysDeleted = await geocodingService.clearGeocodeCache(req.log); const keysDeleted = await geocodingService.clearGeocodeCache(req.log);
res res.status(200).json({
.status(200) message: `Successfully cleared the geocode cache. ${keysDeleted} keys were removed.`,
.json({ });
message: `Successfully cleared the geocode cache. ${keysDeleted} keys were removed.`,
});
} catch (error) { } catch (error) {
logger.error({ error }, '[Admin] Failed to clear geocode cache.'); logger.error({ error }, '[Admin] Failed to clear geocode cache.');
next(error); next(error);
@@ -597,6 +593,7 @@ router.get('/queues/status', async (req: Request, res: Response, next: NextFunct
); );
res.json(queueStatuses); res.json(queueStatuses);
} catch (error) { } catch (error) {
logger.error({ error }, 'Error fetching queue statuses');
next(error); next(error);
} }
}); });
@@ -645,6 +642,7 @@ router.post(
); );
res.status(200).json({ message: `Job ${jobId} has been successfully marked for retry.` }); res.status(200).json({ message: `Job ${jobId} has been successfully marked for retry.` });
} catch (error) { } catch (error) {
logger.error({ error }, 'Error retrying job');
next(error); next(error);
} }
}, },
@@ -676,6 +674,7 @@ router.post(
.status(202) .status(202)
.json({ message: 'Successfully enqueued weekly analytics job.', jobId: job.id }); .json({ message: 'Successfully enqueued weekly analytics job.', jobId: job.id });
} catch (error) { } catch (error) {
logger.error({ error }, 'Error enqueuing weekly analytics job');
next(error); next(error);
} }
}, },

View File

@@ -73,12 +73,6 @@ describe('Admin Stats Routes (/api/admin/stats)', () => {
authenticatedUser: adminUser, authenticatedUser: adminUser,
}); });
// Add a basic error handler to capture errors passed to next(err) and return JSON.
// This prevents unhandled error crashes in tests and ensures we get the 500 response we expect.
app.use((err: any, req: any, res: any, next: any) => {
res.status(err.status || 500).json({ message: err.message, errors: err.errors });
});
beforeEach(() => { beforeEach(() => {
vi.clearAllMocks(); vi.clearAllMocks();
}); });

View File

@@ -79,12 +79,6 @@ describe('Admin System Routes (/api/admin/system)', () => {
authenticatedUser: adminUser, authenticatedUser: adminUser,
}); });
// Add a basic error handler to capture errors passed to next(err) and return JSON.
// This prevents unhandled error crashes in tests and ensures we get the 500 response we expect.
app.use((err: any, req: any, res: any, next: any) => {
res.status(err.status || 500).json({ message: err.message, errors: err.errors });
});
beforeEach(() => { beforeEach(() => {
vi.clearAllMocks(); vi.clearAllMocks();
}); });

View File

@@ -83,12 +83,6 @@ describe('Admin User Management Routes (/api/admin/users)', () => {
authenticatedUser: adminUser, authenticatedUser: adminUser,
}); });
// Add a basic error handler to capture errors passed to next(err) and return JSON.
// This prevents unhandled error crashes in tests and ensures we get the 500 response we expect.
app.use((err: any, req: any, res: any, next: any) => {
res.status(err.status || 500).json({ message: err.message, errors: err.errors });
});
beforeEach(() => { beforeEach(() => {
vi.clearAllMocks(); vi.clearAllMocks();
}); });

View File

@@ -78,6 +78,7 @@ describe('AI Routes (/api/ai)', () => {
vi.mocked(mockLogger.info).mockImplementation(() => {}); vi.mocked(mockLogger.info).mockImplementation(() => {});
vi.mocked(mockLogger.error).mockImplementation(() => {}); vi.mocked(mockLogger.error).mockImplementation(() => {});
vi.mocked(mockLogger.warn).mockImplementation(() => {}); vi.mocked(mockLogger.warn).mockImplementation(() => {});
vi.mocked(mockLogger.debug).mockImplementation(() => {}); // Ensure debug is also mocked
}); });
const app = createTestApp({ router: aiRouter, basePath: '/api/ai' }); const app = createTestApp({ router: aiRouter, basePath: '/api/ai' });
@@ -111,10 +112,55 @@ describe('AI Routes (/api/ai)', () => {
}); });
}); });
// Add a basic error handler to capture errors passed to next(err) and return JSON. // New test to cover the router.use diagnostic middleware's catch block and errMsg branches
// This prevents unhandled error crashes in tests and ensures we get the 500 response we expect. describe('Diagnostic Middleware Error Handling', () => {
app.use((err: any, req: any, res: any, next: any) => { it('should log an error if logger.debug throws an object with a message property', async () => {
res.status(err.status || 500).json({ message: err.message, errors: err.errors }); const mockErrorObject = { message: 'Mock debug error' };
vi.mocked(mockLogger.debug).mockImplementationOnce(() => {
throw mockErrorObject;
});
// Make any request to trigger the middleware
const response = await supertest(app).get('/api/ai/jobs/job-123/status');
expect(mockLogger.error).toHaveBeenCalledWith(
{ error: mockErrorObject.message }, // errMsg should extract the message
'Failed to log incoming AI request headers',
);
// The request should still proceed, but might fail later if the original flow was interrupted.
// Here, it will likely hit the 404 for job not found.
expect(response.status).toBe(404);
});
it('should log an error if logger.debug throws a primitive string', async () => {
const mockErrorString = 'Mock debug error string';
vi.mocked(mockLogger.debug).mockImplementationOnce(() => {
throw mockErrorString;
});
// Make any request to trigger the middleware
const response = await supertest(app).get('/api/ai/jobs/job-123/status');
expect(mockLogger.error).toHaveBeenCalledWith(
{ error: mockErrorString }, // errMsg should convert to string
'Failed to log incoming AI request headers',
);
expect(response.status).toBe(404);
});
it('should log an error if logger.debug throws null/undefined', async () => {
vi.mocked(mockLogger.debug).mockImplementationOnce(() => {
throw null; // Simulate throwing null
});
const response = await supertest(app).get('/api/ai/jobs/job-123/status');
expect(mockLogger.error).toHaveBeenCalledWith(
{ error: 'An unknown error occurred.' }, // errMsg should handle null/undefined
'Failed to log incoming AI request headers',
);
expect(response.status).toBe(404);
});
}); });
describe('POST /upload-and-process', () => { describe('POST /upload-and-process', () => {
@@ -307,10 +353,11 @@ describe('AI Routes (/api/ai)', () => {
expect(response.status).toBe(400); expect(response.status).toBe(400);
}); });
it('should return 409 Conflict if flyer checksum already exists', async () => { it('should return 409 Conflict and delete the uploaded file if flyer checksum already exists', async () => {
// Arrange // Arrange
const mockExistingFlyer = createMockFlyer({ flyer_id: 99 }); const mockExistingFlyer = createMockFlyer({ flyer_id: 99 });
vi.mocked(mockedDb.flyerRepo.findFlyerByChecksum).mockResolvedValue(mockExistingFlyer); // Duplicate found vi.mocked(mockedDb.flyerRepo.findFlyerByChecksum).mockResolvedValue(mockExistingFlyer); // Duplicate found
const unlinkSpy = vi.spyOn(fs.promises, 'unlink').mockResolvedValue(undefined);
// Act // Act
const response = await supertest(app) const response = await supertest(app)
@@ -322,6 +369,10 @@ describe('AI Routes (/api/ai)', () => {
expect(response.status).toBe(409); expect(response.status).toBe(409);
expect(response.body.message).toBe('This flyer has already been processed.'); expect(response.body.message).toBe('This flyer has already been processed.');
expect(mockedDb.createFlyerAndItems).not.toHaveBeenCalled(); expect(mockedDb.createFlyerAndItems).not.toHaveBeenCalled();
// Assert that the file was deleted
expect(unlinkSpy).toHaveBeenCalledTimes(1);
// The filename is predictable in the test environment because of the multer config in ai.routes.ts
expect(unlinkSpy).toHaveBeenCalledWith(expect.stringContaining('flyerImage-test-flyer-image.jpg'));
}); });
it('should accept payload when extractedData.items is missing and save with empty items', async () => { it('should accept payload when extractedData.items is missing and save with empty items', async () => {
@@ -423,6 +474,52 @@ describe('AI Routes (/api/ai)', () => {
expect(mockedDb.createFlyerAndItems).toHaveBeenCalledTimes(1); expect(mockedDb.createFlyerAndItems).toHaveBeenCalledTimes(1);
}); });
it('should handle payload where extractedData is null', async () => {
const payloadWithNullExtractedData = {
checksum: 'null-extracted-data-checksum',
originalFileName: 'flyer-null.jpg',
extractedData: null,
};
const response = await supertest(app)
.post('/api/ai/flyers/process')
.field('data', JSON.stringify(payloadWithNullExtractedData))
.attach('flyerImage', imagePath);
expect(response.status).toBe(201);
expect(mockedDb.createFlyerAndItems).toHaveBeenCalledTimes(1);
// Verify that extractedData was correctly defaulted to an empty object
const flyerDataArg = vi.mocked(mockedDb.createFlyerAndItems).mock.calls[0][0];
expect(flyerDataArg.store_name).toContain('Unknown Store'); // Fallback should be used
expect(mockLogger.warn).toHaveBeenCalledWith(
{ bodyData: expect.any(Object) },
'Missing extractedData in /api/ai/flyers/process payload.',
);
});
it('should handle payload where extractedData is a string', async () => {
const payloadWithStringExtractedData = {
checksum: 'string-extracted-data-checksum',
originalFileName: 'flyer-string.jpg',
extractedData: 'not-an-object',
};
const response = await supertest(app)
.post('/api/ai/flyers/process')
.field('data', JSON.stringify(payloadWithStringExtractedData))
.attach('flyerImage', imagePath);
expect(response.status).toBe(201);
expect(mockedDb.createFlyerAndItems).toHaveBeenCalledTimes(1);
// Verify that extractedData was correctly defaulted to an empty object
const flyerDataArg = vi.mocked(mockedDb.createFlyerAndItems).mock.calls[0][0];
expect(flyerDataArg.store_name).toContain('Unknown Store'); // Fallback should be used
expect(mockLogger.warn).toHaveBeenCalledWith(
{ bodyData: expect.any(Object) },
'Missing extractedData in /api/ai/flyers/process payload.',
);
});
it('should handle payload where extractedData is at the root of the body', async () => { it('should handle payload where extractedData is at the root of the body', async () => {
// This simulates a client sending multipart fields for each property of extractedData // This simulates a client sending multipart fields for each property of extractedData
const response = await supertest(app) const response = await supertest(app)
@@ -438,6 +535,27 @@ describe('AI Routes (/api/ai)', () => {
const flyerDataArg = vi.mocked(mockedDb.createFlyerAndItems).mock.calls[0][0]; const flyerDataArg = vi.mocked(mockedDb.createFlyerAndItems).mock.calls[0][0];
expect(flyerDataArg.store_name).toBe('Root Store'); expect(flyerDataArg.store_name).toBe('Root Store');
}); });
it('should default item quantity to 1 if missing', async () => {
const payloadMissingQuantity = {
checksum: 'qty-checksum',
originalFileName: 'flyer-qty.jpg',
extractedData: {
store_name: 'Qty Store',
items: [{ name: 'Item without qty', price: 100 }],
},
};
const response = await supertest(app)
.post('/api/ai/flyers/process')
.field('data', JSON.stringify(payloadMissingQuantity))
.attach('flyerImage', imagePath);
expect(response.status).toBe(201);
expect(mockedDb.createFlyerAndItems).toHaveBeenCalledTimes(1);
const itemsArg = vi.mocked(mockedDb.createFlyerAndItems).mock.calls[0][1];
expect(itemsArg[0].quantity).toBe(1);
});
}); });
describe('POST /check-flyer', () => { describe('POST /check-flyer', () => {
@@ -557,10 +675,11 @@ describe('AI Routes (/api/ai)', () => {
const mockUser = createMockUserProfile({ const mockUser = createMockUserProfile({
user: { user_id: 'user-123', email: 'user-123@test.com' }, user: { user_id: 'user-123', email: 'user-123@test.com' },
}); });
const authenticatedApp = createTestApp({ router: aiRouter, basePath: '/api/ai', authenticatedUser: mockUser });
beforeEach(() => { beforeEach(() => {
// Inject an authenticated user for this test block // Inject an authenticated user for this test block
app.use((req, res, next) => { authenticatedApp.use((req, res, next) => {
req.user = mockUser; req.user = mockUser;
next(); next();
}); });
@@ -575,7 +694,7 @@ describe('AI Routes (/api/ai)', () => {
.field('cropArea', JSON.stringify({ x: 10, y: 10, width: 50, height: 50 })) .field('cropArea', JSON.stringify({ x: 10, y: 10, width: 50, height: 50 }))
.field('extractionType', 'item_details') .field('extractionType', 'item_details')
.attach('image', imagePath); .attach('image', imagePath);
// Use the authenticatedApp instance for requests in this block
expect(response.status).toBe(200); expect(response.status).toBe(200);
expect(response.body).toEqual(mockResult); expect(response.body).toEqual(mockResult);
expect(aiService.aiService.extractTextFromImageArea).toHaveBeenCalled(); expect(aiService.aiService.extractTextFromImageArea).toHaveBeenCalled();
@@ -586,7 +705,7 @@ describe('AI Routes (/api/ai)', () => {
new Error('AI API is down'), new Error('AI API is down'),
); );
const response = await supertest(app) const response = await supertest(authenticatedApp)
.post('/api/ai/rescan-area') .post('/api/ai/rescan-area')
.field('cropArea', JSON.stringify({ x: 10, y: 10, width: 50, height: 50 })) .field('cropArea', JSON.stringify({ x: 10, y: 10, width: 50, height: 50 }))
.field('extractionType', 'item_details') .field('extractionType', 'item_details')
@@ -602,15 +721,12 @@ describe('AI Routes (/api/ai)', () => {
const mockUserProfile = createMockUserProfile({ const mockUserProfile = createMockUserProfile({
user: { user_id: 'user-123', email: 'user-123@test.com' }, user: { user_id: 'user-123', email: 'user-123@test.com' },
}); });
const authenticatedApp = createTestApp({ router: aiRouter, basePath: '/api/ai', authenticatedUser: mockUserProfile });
beforeEach(() => { beforeEach(() => {
// For this block, simulate an authenticated request by attaching the user. // The authenticatedApp instance is already set up with mockUserProfile
app.use((req, res, next) => {
req.user = mockUserProfile;
next();
});
}); });
it('POST /quick-insights should return the stubbed response', async () => { it('POST /quick-insights should return the stubbed response', async () => {
const response = await supertest(app) const response = await supertest(app)
.post('/api/ai/quick-insights') .post('/api/ai/quick-insights')

View File

@@ -15,6 +15,7 @@ import { logger } from '../services/logger.server';
import { UserProfile, ExtractedCoreData, ExtractedFlyerItem } from '../types'; import { UserProfile, ExtractedCoreData, ExtractedFlyerItem } from '../types';
import { flyerQueue } from '../services/queueService.server'; import { flyerQueue } from '../services/queueService.server';
import { validateRequest } from '../middleware/validation.middleware'; import { validateRequest } from '../middleware/validation.middleware';
import { requiredString } from '../utils/zodUtils';
const router = Router(); const router = Router();
@@ -26,9 +27,6 @@ interface FlyerProcessPayload extends Partial<ExtractedCoreData> {
} }
// --- Zod Schemas for AI Routes (as per ADR-003) --- // --- Zod Schemas for AI Routes (as per ADR-003) ---
// Helper for consistent required string validation (handles missing/null/empty)
const requiredString = (message: string) =>
z.preprocess((val) => val ?? '', z.string().min(1, message));
const uploadAndProcessSchema = z.object({ const uploadAndProcessSchema = z.object({
body: z.object({ body: z.object({
@@ -52,6 +50,15 @@ const errMsg = (e: unknown) => {
return String(e || 'An unknown error occurred.'); return String(e || 'An unknown error occurred.');
}; };
const cleanupUploadedFile = async (file?: Express.Multer.File) => {
if (!file) return;
try {
await fs.promises.unlink(file.path);
} catch (err) {
// Ignore cleanup errors (e.g. file already deleted)
}
};
const cropAreaObjectSchema = z.object({ const cropAreaObjectSchema = z.object({
x: z.number(), x: z.number(),
y: z.number(), y: z.number(),
@@ -187,7 +194,7 @@ router.use((req: Request, res: Response, next: NextFunction) => {
'[API /ai] Incoming request', '[API /ai] Incoming request',
); );
} catch (e: unknown) { } catch (e: unknown) {
logger.error({ error: e }, 'Failed to log incoming AI request headers'); logger.error({ error: errMsg(e) }, 'Failed to log incoming AI request headers');
} }
next(); next();
}); });
@@ -318,7 +325,7 @@ router.post(
// Try several ways to obtain the payload so we are tolerant to client variations. // Try several ways to obtain the payload so we are tolerant to client variations.
let parsed: FlyerProcessPayload = {}; let parsed: FlyerProcessPayload = {};
let extractedData: Partial<ExtractedCoreData> = {}; let extractedData: Partial<ExtractedCoreData> | null | undefined = {};
try { try {
// If the client sent a top-level `data` field (stringified JSON), parse it. // If the client sent a top-level `data` field (stringified JSON), parse it.
if (req.body && (req.body.data || req.body.extractedData)) { if (req.body && (req.body.data || req.body.extractedData)) {
@@ -339,7 +346,7 @@ router.post(
) as FlyerProcessPayload; ) as FlyerProcessPayload;
} }
// If parsed itself contains an `extractedData` field, use that, otherwise assume parsed is the extractedData // If parsed itself contains an `extractedData` field, use that, otherwise assume parsed is the extractedData
extractedData = parsed.extractedData ?? (parsed as Partial<ExtractedCoreData>); extractedData = 'extractedData' in parsed ? parsed.extractedData : (parsed as Partial<ExtractedCoreData>);
} else { } else {
// No explicit `data` field found. Attempt to interpret req.body as an object (Express may have parsed multipart fields differently). // No explicit `data` field found. Attempt to interpret req.body as an object (Express may have parsed multipart fields differently).
try { try {
@@ -385,6 +392,12 @@ router.post(
// Pull common metadata fields (checksum, originalFileName) from whichever shape we parsed. // Pull common metadata fields (checksum, originalFileName) from whichever shape we parsed.
const checksum = parsed.checksum ?? parsed?.data?.checksum ?? ''; const checksum = parsed.checksum ?? parsed?.data?.checksum ?? '';
if (!checksum) {
await cleanupUploadedFile(req.file);
return res.status(400).json({ message: 'Checksum is required.' });
}
const originalFileName = const originalFileName =
parsed.originalFileName ?? parsed?.data?.originalFileName ?? req.file.originalname; parsed.originalFileName ?? parsed?.data?.originalFileName ?? req.file.originalname;
const userProfile = req.user as UserProfile | undefined; const userProfile = req.user as UserProfile | undefined;
@@ -411,6 +424,7 @@ router.post(
const itemsForDb = itemsArray.map((item: Partial<ExtractedFlyerItem>) => ({ const itemsForDb = itemsArray.map((item: Partial<ExtractedFlyerItem>) => ({
...item, ...item,
master_item_id: item.master_item_id === null ? undefined : item.master_item_id, master_item_id: item.master_item_id === null ? undefined : item.master_item_id,
quantity: item.quantity ?? 1, // Default to 1 to satisfy DB constraint
view_count: 0, view_count: 0,
click_count: 0, click_count: 0,
updated_at: new Date().toISOString(), updated_at: new Date().toISOString(),
@@ -431,6 +445,7 @@ router.post(
const existingFlyer = await db.flyerRepo.findFlyerByChecksum(checksum, req.log); const existingFlyer = await db.flyerRepo.findFlyerByChecksum(checksum, req.log);
if (existingFlyer) { if (existingFlyer) {
logger.warn(`Duplicate flyer upload attempt blocked for checksum: ${checksum}`); logger.warn(`Duplicate flyer upload attempt blocked for checksum: ${checksum}`);
await cleanupUploadedFile(req.file);
return res.status(409).json({ message: 'This flyer has already been processed.' }); return res.status(409).json({ message: 'This flyer has already been processed.' });
} }
@@ -478,6 +493,7 @@ router.post(
res.status(201).json({ message: 'Flyer processed and saved successfully.', flyer: newFlyer }); res.status(201).json({ message: 'Flyer processed and saved successfully.', flyer: newFlyer });
} catch (error) { } catch (error) {
await cleanupUploadedFile(req.file);
next(error); next(error);
} }
}, },

View File

@@ -1,13 +1,12 @@
// src/routes/auth.routes.ts // src/routes/auth.routes.ts
import { Router, Request, Response, NextFunction } from 'express'; import { Router, Request, Response, NextFunction } from 'express';
import * as bcrypt from 'bcrypt'; import * as bcrypt from 'bcrypt';
import zxcvbn from 'zxcvbn';
import { z } from 'zod'; import { z } from 'zod';
import jwt from 'jsonwebtoken'; import jwt from 'jsonwebtoken';
import crypto from 'crypto'; import crypto from 'crypto';
import rateLimit from 'express-rate-limit'; import rateLimit from 'express-rate-limit';
import passport from './passport.routes'; // Corrected import path import passport from './passport.routes';
import { userRepo, adminRepo } from '../services/db/index.db'; import { userRepo, adminRepo } from '../services/db/index.db';
import { UniqueConstraintError } from '../services/db/errors.db'; import { UniqueConstraintError } from '../services/db/errors.db';
import { getPool } from '../services/db/connection.db'; import { getPool } from '../services/db/connection.db';
@@ -15,38 +14,13 @@ import { logger } from '../services/logger.server';
import { sendPasswordResetEmail } from '../services/emailService.server'; import { sendPasswordResetEmail } from '../services/emailService.server';
import { validateRequest } from '../middleware/validation.middleware'; import { validateRequest } from '../middleware/validation.middleware';
import type { UserProfile } from '../types'; import type { UserProfile } from '../types';
import { validatePasswordStrength } from '../utils/authUtils';
import { requiredString } from '../utils/zodUtils';
const router = Router(); const router = Router();
const JWT_SECRET = process.env.JWT_SECRET!; const JWT_SECRET = process.env.JWT_SECRET!;
/**
* Validates the strength of a password using zxcvbn.
* @param password The password to check.
* @returns An object with `isValid` and an optional `feedback` message.
*/
const validatePasswordStrength = (password: string): { isValid: boolean; feedback?: string } => {
const MIN_PASSWORD_SCORE = 3; // Require a 'Good' or 'Strong' password (score 3 or 4)
const strength = zxcvbn(password);
if (strength.score < MIN_PASSWORD_SCORE) {
const feedbackMessage =
strength.feedback.warning ||
(strength.feedback.suggestions && strength.feedback.suggestions[0]);
return {
isValid: false,
feedback:
`Password is too weak. ${feedbackMessage || 'Please choose a stronger password.'}`.trim(),
};
}
return { isValid: true };
};
// Helper for consistent required string validation (handles missing/null/empty)
const requiredString = (message: string) =>
z.preprocess((val) => val ?? '', z.string().min(1, message));
// Conditionally disable rate limiting for the test environment // Conditionally disable rate limiting for the test environment
const isTestEnv = process.env.NODE_ENV === 'test'; const isTestEnv = process.env.NODE_ENV === 'test';
@@ -69,8 +43,6 @@ const resetPasswordLimiter = rateLimit({
skip: () => isTestEnv, // Skip this middleware if in test environment skip: () => isTestEnv, // Skip this middleware if in test environment
}); });
// --- Zod Schemas for Auth Routes (as per ADR-003) ---
const registerSchema = z.object({ const registerSchema = z.object({
body: z.object({ body: z.object({
email: z.string().email('A valid email is required.'), email: z.string().email('A valid email is required.'),
@@ -162,8 +134,8 @@ router.post(
// If the email is a duplicate, return a 409 Conflict status. // If the email is a duplicate, return a 409 Conflict status.
return res.status(409).json({ message: error.message }); return res.status(409).json({ message: error.message });
} }
// The createUser method now handles its own transaction logging, so we just log the route failure.
logger.error({ error }, `User registration route failed for email: ${email}.`); logger.error({ error }, `User registration route failed for email: ${email}.`);
// Pass the error to the centralized handler
return next(error); return next(error);
} }
}, },
@@ -213,7 +185,7 @@ router.post('/login', (req: Request, res: Response, next: NextFunction) => {
const accessToken = jwt.sign(payload, JWT_SECRET, { expiresIn: '15m' }); const accessToken = jwt.sign(payload, JWT_SECRET, { expiresIn: '15m' });
try { try {
const refreshToken = crypto.randomBytes(64).toString('hex'); // This was a duplicate, fixed. const refreshToken = crypto.randomBytes(64).toString('hex');
await userRepo.saveRefreshToken(userProfile.user.user_id, refreshToken, req.log); await userRepo.saveRefreshToken(userProfile.user.user_id, refreshToken, req.log);
req.log.info(`JWT and refresh token issued for user: ${userProfile.user.email}`); req.log.info(`JWT and refresh token issued for user: ${userProfile.user.email}`);

View File

@@ -69,17 +69,7 @@ describe('Budget Routes (/api/budgets)', () => {
vi.mocked(db.budgetRepo.getSpendingByCategory).mockResolvedValue([]); vi.mocked(db.budgetRepo.getSpendingByCategory).mockResolvedValue([]);
}); });
const app = createTestApp({ const app = createTestApp({ router: budgetRouter, basePath: '/api/budgets', authenticatedUser: mockUserProfile });
router: budgetRouter,
basePath: '/api/budgets',
authenticatedUser: mockUser,
});
// Add a basic error handler to capture errors passed to next(err) and return JSON.
// This prevents unhandled error crashes in tests and ensures we get the 500 response we expect.
app.use((err: any, req: any, res: any, next: any) => {
res.status(err.status || 500).json({ message: err.message, errors: err.errors });
});
describe('GET /', () => { describe('GET /', () => {
it('should return a list of budgets for the user', async () => { it('should return a list of budgets for the user', async () => {

View File

@@ -5,20 +5,12 @@ import passport from './passport.routes';
import { budgetRepo } from '../services/db/index.db'; import { budgetRepo } from '../services/db/index.db';
import type { UserProfile } from '../types'; import type { UserProfile } from '../types';
import { validateRequest } from '../middleware/validation.middleware'; import { validateRequest } from '../middleware/validation.middleware';
import { requiredString, numericIdParam } from '../utils/zodUtils';
const router = express.Router(); const router = express.Router();
// Helper for consistent required string validation (handles missing/null/empty)
const requiredString = (message: string) =>
z.preprocess((val) => val ?? '', z.string().min(1, message));
// --- Zod Schemas for Budget Routes (as per ADR-003) --- // --- Zod Schemas for Budget Routes (as per ADR-003) ---
const budgetIdParamSchema = numericIdParam('id', "Invalid ID for parameter 'id'. Must be a number.");
const budgetIdParamSchema = z.object({
params: z.object({
id: z.coerce.number().int().positive("Invalid ID for parameter 'id'. Must be a number."),
}),
});
const createBudgetSchema = z.object({ const createBudgetSchema = z.object({
body: z.object({ body: z.object({

View File

@@ -54,13 +54,6 @@ describe('Deals Routes (/api/users/deals)', () => {
authenticatedUser: mockUser, authenticatedUser: mockUser,
}); });
const unauthenticatedApp = createTestApp({ router: dealsRouter, basePath }); const unauthenticatedApp = createTestApp({ router: dealsRouter, basePath });
const errorHandler = (err: any, req: any, res: any, next: any) => {
res.status(err.status || 500).json({ message: err.message, errors: err.errors });
};
// Apply the handler to both app instances
authenticatedApp.use(errorHandler);
unauthenticatedApp.use(errorHandler);
beforeEach(() => { beforeEach(() => {
vi.clearAllMocks(); vi.clearAllMocks();

View File

@@ -40,12 +40,6 @@ describe('Flyer Routes (/api/flyers)', () => {
const app = createTestApp({ router: flyerRouter, basePath: '/api/flyers' }); const app = createTestApp({ router: flyerRouter, basePath: '/api/flyers' });
// Add a basic error handler to capture errors passed to next(err) and return JSON.
// This prevents unhandled error crashes in tests and ensures we get the 500 response we expect.
app.use((err: any, req: any, res: any, next: any) => {
res.status(err.status || 500).json({ message: err.message, errors: err.errors });
});
describe('GET /', () => { describe('GET /', () => {
it('should return a list of flyers on success', async () => { it('should return a list of flyers on success', async () => {
const mockFlyers = [createMockFlyer({ flyer_id: 1 }), createMockFlyer({ flyer_id: 2 })]; const mockFlyers = [createMockFlyer({ flyer_id: 1 }), createMockFlyer({ flyer_id: 2 })];

View File

@@ -3,6 +3,7 @@ import { Router } from 'express';
import * as db from '../services/db/index.db'; import * as db from '../services/db/index.db';
import { z } from 'zod'; import { z } from 'zod';
import { validateRequest } from '../middleware/validation.middleware'; import { validateRequest } from '../middleware/validation.middleware';
import { optionalNumeric } from '../utils/zodUtils';
const router = Router(); const router = Router();
@@ -10,8 +11,8 @@ const router = Router();
const getFlyersSchema = z.object({ const getFlyersSchema = z.object({
query: z.object({ query: z.object({
limit: z.coerce.number().int().positive().optional().default(20), limit: optionalNumeric({ default: 20, integer: true, positive: true }),
offset: z.coerce.number().int().nonnegative().optional().default(0), offset: optionalNumeric({ default: 0, integer: true, nonnegative: true }),
}), }),
}); });
@@ -107,6 +108,7 @@ router.post(
const items = await db.flyerRepo.getFlyerItemsForFlyers(body.flyerIds, req.log); const items = await db.flyerRepo.getFlyerItemsForFlyers(body.flyerIds, req.log);
res.json(items); res.json(items);
} catch (error) { } catch (error) {
req.log.error({ error }, 'Error fetching batch flyer items');
next(error); next(error);
} }
}, },
@@ -126,6 +128,7 @@ router.post(
const count = await db.flyerRepo.countFlyerItemsForFlyers(body.flyerIds ?? [], req.log); const count = await db.flyerRepo.countFlyerItemsForFlyers(body.flyerIds ?? [], req.log);
res.json({ count }); res.json({ count });
} catch (error) { } catch (error) {
req.log.error({ error }, 'Error counting batch flyer items');
next(error); next(error);
} }
}, },

View File

@@ -86,12 +86,6 @@ describe('Gamification Routes (/api/achievements)', () => {
basePath, basePath,
authenticatedUser: mockAdminProfile, authenticatedUser: mockAdminProfile,
}); });
const errorHandler = (err: any, req: any, res: any, next: any) => {
res.status(err.status || 500).json({ message: err.message, errors: err.errors });
};
unauthenticatedApp.use(errorHandler);
authenticatedApp.use(errorHandler);
adminApp.use(errorHandler);
describe('GET /', () => { describe('GET /', () => {
it('should return a list of all achievements (public endpoint)', async () => { it('should return a list of all achievements (public endpoint)', async () => {

View File

@@ -7,19 +7,16 @@ import { logger } from '../services/logger.server';
import { UserProfile } from '../types'; import { UserProfile } from '../types';
import { ForeignKeyConstraintError } from '../services/db/errors.db'; import { ForeignKeyConstraintError } from '../services/db/errors.db';
import { validateRequest } from '../middleware/validation.middleware'; import { validateRequest } from '../middleware/validation.middleware';
import { requiredString, optionalNumeric } from '../utils/zodUtils';
const router = express.Router(); const router = express.Router();
const adminGamificationRouter = express.Router(); // Create a new router for admin-only routes. const adminGamificationRouter = express.Router(); // Create a new router for admin-only routes.
// Helper for consistent required string validation (handles missing/null/empty)
const requiredString = (message: string) =>
z.preprocess((val) => val ?? '', z.string().min(1, message));
// --- Zod Schemas for Gamification Routes (as per ADR-003) --- // --- Zod Schemas for Gamification Routes (as per ADR-003) ---
const leaderboardSchema = z.object({ const leaderboardSchema = z.object({
query: z.object({ query: z.object({
limit: z.coerce.number().int().positive().max(50).optional().default(10), limit: optionalNumeric({ default: 10, integer: true, positive: true, max: 50 }),
}), }),
}); });

View File

@@ -46,12 +46,6 @@ const { logger } = await import('../services/logger.server');
// 2. Create a minimal Express app to host the router for testing. // 2. Create a minimal Express app to host the router for testing.
const app = createTestApp({ router: healthRouter, basePath: '/api/health' }); const app = createTestApp({ router: healthRouter, basePath: '/api/health' });
// Add a basic error handler to capture errors passed to next(err) and return JSON.
// This prevents unhandled error crashes in tests and ensures we get the 500 response we expect.
app.use((err: any, req: any, res: any, next: any) => {
res.status(err.status || 500).json({ message: err.message, errors: err.errors });
});
describe('Health Routes (/api/health)', () => { describe('Health Routes (/api/health)', () => {
beforeEach(() => { beforeEach(() => {
// Clear mock history before each test to ensure isolation. // Clear mock history before each test to ensure isolation.

View File

@@ -39,10 +39,7 @@ router.get('/db-schema', validateRequest(emptySchema), async (req, res, next: Ne
} }
return res.status(200).json({ success: true, message: 'All required database tables exist.' }); return res.status(200).json({ success: true, message: 'All required database tables exist.' });
} catch (error: unknown) { } catch (error: unknown) {
logger.error( logger.error({ error }, 'Error during DB schema check:');
{ error: error instanceof Error ? error.message : error },
'Error during DB schema check:',
);
next(error); next(error);
} }
}); });
@@ -133,6 +130,7 @@ router.get(
} }
throw new Error(`Unexpected Redis ping response: ${reply}`); // This will be caught below throw new Error(`Unexpected Redis ping response: ${reply}`); // This will be caught below
} catch (error: unknown) { } catch (error: unknown) {
logger.error({ error }, 'Error checking Redis health');
next(error); next(error);
} }
}, },

View File

@@ -30,12 +30,6 @@ vi.mock('../services/logger.server', () => ({
describe('Personalization Routes (/api/personalization)', () => { describe('Personalization Routes (/api/personalization)', () => {
const app = createTestApp({ router: personalizationRouter, basePath: '/api/personalization' }); const app = createTestApp({ router: personalizationRouter, basePath: '/api/personalization' });
// Add a basic error handler to capture errors passed to next(err) and return JSON.
// This prevents unhandled error crashes in tests and ensures we get the 500 response we expect.
app.use((err: any, req: any, res: any, next: any) => {
res.status(err.status || 500).json({ message: err.message, errors: err.errors });
});
beforeEach(() => { beforeEach(() => {
vi.clearAllMocks(); vi.clearAllMocks();
}); });

View File

@@ -4,8 +4,21 @@ import supertest from 'supertest';
import { createTestApp } from '../tests/utils/createTestApp'; import { createTestApp } from '../tests/utils/createTestApp';
import { mockLogger } from '../tests/utils/mockLogger'; import { mockLogger } from '../tests/utils/mockLogger';
// Mock the price repository
vi.mock('../services/db/price.db', () => ({
priceRepo: {
getPriceHistory: vi.fn(),
},
}));
// Mock the logger to keep test output clean
vi.mock('../services/logger.server', () => ({
logger: mockLogger,
}));
// Import the router AFTER other setup. // Import the router AFTER other setup.
import priceRouter from './price.routes'; import priceRouter from './price.routes';
import { priceRepo } from '../services/db/price.db';
describe('Price Routes (/api/price-history)', () => { describe('Price Routes (/api/price-history)', () => {
const app = createTestApp({ router: priceRouter, basePath: '/api/price-history' }); const app = createTestApp({ router: priceRouter, basePath: '/api/price-history' });
@@ -14,32 +27,106 @@ describe('Price Routes (/api/price-history)', () => {
}); });
describe('POST /', () => { describe('POST /', () => {
it('should return 200 OK with an empty array for a valid request', async () => { it('should return 200 OK with price history data for a valid request', async () => {
const masterItemIds = [1, 2, 3]; const mockHistory = [
const response = await supertest(app).post('/api/price-history').send({ masterItemIds }); { master_item_id: 1, price_in_cents: 199, date: '2024-01-01T00:00:00.000Z' },
{ master_item_id: 2, price_in_cents: 299, date: '2024-01-08T00:00:00.000Z' },
];
vi.mocked(priceRepo.getPriceHistory).mockResolvedValue(mockHistory);
const response = await supertest(app)
.post('/api/price-history')
.send({ masterItemIds: [1, 2] });
expect(response.status).toBe(200); expect(response.status).toBe(200);
expect(response.body).toEqual([]); expect(response.body).toEqual(mockHistory);
expect(priceRepo.getPriceHistory).toHaveBeenCalledWith([1, 2], expect.any(Object), 1000, 0);
});
it('should pass limit and offset from the body to the repository', async () => {
vi.mocked(priceRepo.getPriceHistory).mockResolvedValue([]);
await supertest(app)
.post('/api/price-history')
.send({ masterItemIds: [1, 2, 3], limit: 50, offset: 10 });
expect(priceRepo.getPriceHistory).toHaveBeenCalledWith(
[1, 2, 3],
expect.any(Object),
50,
10,
);
});
it('should log the request info', async () => {
vi.mocked(priceRepo.getPriceHistory).mockResolvedValue([]);
await supertest(app)
.post('/api/price-history')
.send({ masterItemIds: [1, 2, 3], limit: 25, offset: 5 });
expect(mockLogger.info).toHaveBeenCalledWith( expect(mockLogger.info).toHaveBeenCalledWith(
{ itemCount: masterItemIds.length }, { itemCount: 3, limit: 25, offset: 5 },
'[API /price-history] Received request for historical price data.', '[API /price-history] Received request for historical price data.',
); );
}); });
it('should return 500 if the database call fails', async () => {
const dbError = new Error('Database connection failed');
vi.mocked(priceRepo.getPriceHistory).mockRejectedValue(dbError);
const response = await supertest(app)
.post('/api/price-history')
.send({ masterItemIds: [1, 2, 3] });
expect(response.status).toBe(500);
expect(response.body.message).toBe('Database connection failed');
});
it('should return 400 if masterItemIds is an empty array', async () => {
const response = await supertest(app).post('/api/price-history').send({ masterItemIds: [] });
expect(response.status).toBe(400);
expect(response.body.errors[0].message).toBe(
'masterItemIds must be a non-empty array of positive integers.',
);
});
it('should return 400 if masterItemIds is not an array', async () => { it('should return 400 if masterItemIds is not an array', async () => {
const response = await supertest(app) const response = await supertest(app)
.post('/api/price-history') .post('/api/price-history')
.send({ masterItemIds: 'not-an-array' }); .send({ masterItemIds: 'not-an-array' });
expect(response.status).toBe(400); expect(response.status).toBe(400);
expect(response.body.errors[0].message).toMatch(/Expected array, received string/i); // The actual message is "Invalid input: expected array, received string"
expect(response.body.errors[0].message).toBe('Invalid input: expected array, received string');
}); });
it('should return 400 if masterItemIds is an empty array', async () => { it('should return 400 if masterItemIds contains non-positive integers', async () => {
const response = await supertest(app).post('/api/price-history').send({ masterItemIds: [] }); const response = await supertest(app)
.post('/api/price-history')
.send({ masterItemIds: [1, -2, 3] });
expect(response.status).toBe(400); expect(response.status).toBe(400);
expect(response.body.errors[0].message).toBe( expect(response.body.errors[0].message).toBe('Number must be greater than 0');
'masterItemIds must be a non-empty array of positive integers.', });
);
it('should return 400 if masterItemIds is missing', async () => {
const response = await supertest(app).post('/api/price-history').send({});
expect(response.status).toBe(400);
// The actual message is "Invalid input: expected array, received undefined"
expect(response.body.errors[0].message).toBe('Invalid input: expected array, received undefined');
});
it('should return 400 for invalid limit and offset', async () => {
const response = await supertest(app)
.post('/api/price-history')
.send({ masterItemIds: [1], limit: -1, offset: 'abc' });
expect(response.status).toBe(400);
expect(response.body.errors).toHaveLength(2);
// The actual message is "Too small: expected number to be >0"
expect(response.body.errors[0].message).toBe('Too small: expected number to be >0');
expect(response.body.errors[1].message).toBe('Invalid input: expected number, received NaN');
}); });
}); });
}); });

View File

@@ -1,15 +1,21 @@
// src/routes/price.routes.ts // src/routes/price.routes.ts
import { Router, Request, Response } from 'express'; import { Router, Request, Response, NextFunction } from 'express';
import { z } from 'zod'; import { z } from 'zod';
import { validateRequest } from '../middleware/validation.middleware'; import { validateRequest } from '../middleware/validation.middleware';
import { priceRepo } from '../services/db/price.db';
import { optionalNumeric } from '../utils/zodUtils';
const router = Router(); const router = Router();
const priceHistorySchema = z.object({ const priceHistorySchema = z.object({
body: z.object({ body: z.object({
masterItemIds: z.array(z.number().int().positive()).nonempty({ masterItemIds: z
message: 'masterItemIds must be a non-empty array of positive integers.', .array(z.number().int().positive('Number must be greater than 0'))
}), .nonempty({
message: 'masterItemIds must be a non-empty array of positive integers.',
}),
limit: optionalNumeric({ default: 1000, integer: true, positive: true }),
offset: optionalNumeric({ default: 0, integer: true, nonnegative: true }),
}), }),
}); });
@@ -18,18 +24,23 @@ type PriceHistoryRequest = z.infer<typeof priceHistorySchema>;
/** /**
* POST /api/price-history - Fetches historical price data for a given list of master item IDs. * POST /api/price-history - Fetches historical price data for a given list of master item IDs.
* This is a placeholder implementation. * This endpoint retrieves price points over time for specified master grocery items.
*/ */
router.post('/', validateRequest(priceHistorySchema), async (req: Request, res: Response) => { router.post('/', validateRequest(priceHistorySchema), async (req: Request, res: Response, next: NextFunction) => {
// Cast 'req' to the inferred type for full type safety. // Cast 'req' to the inferred type for full type safety.
const { const {
body: { masterItemIds }, body: { masterItemIds, limit, offset },
} = req as unknown as PriceHistoryRequest; } = req as unknown as PriceHistoryRequest;
req.log.info( req.log.info(
{ itemCount: masterItemIds.length }, { itemCount: masterItemIds.length, limit, offset },
'[API /price-history] Received request for historical price data.', '[API /price-history] Received request for historical price data.',
); );
res.status(200).json([]); try {
const priceHistory = await priceRepo.getPriceHistory(masterItemIds, req.log, limit, offset);
res.status(200).json(priceHistory);
} catch (error) {
next(error);
}
}); });
export default router; export default router;

View File

@@ -35,12 +35,6 @@ const expectLogger = expect.objectContaining({
describe('Recipe Routes (/api/recipes)', () => { describe('Recipe Routes (/api/recipes)', () => {
const app = createTestApp({ router: recipeRouter, basePath: '/api/recipes' }); const app = createTestApp({ router: recipeRouter, basePath: '/api/recipes' });
// Add a basic error handler to capture errors passed to next(err) and return JSON.
// This prevents unhandled error crashes in tests and ensures we get the 500 response we expect.
app.use((err: any, req: any, res: any, next: any) => {
res.status(err.status || 500).json({ message: err.message, errors: err.errors });
});
beforeEach(() => { beforeEach(() => {
vi.clearAllMocks(); vi.clearAllMocks();
}); });

View File

@@ -3,24 +3,19 @@ import { Router } from 'express';
import { z } from 'zod'; import { z } from 'zod';
import * as db from '../services/db/index.db'; import * as db from '../services/db/index.db';
import { validateRequest } from '../middleware/validation.middleware'; import { validateRequest } from '../middleware/validation.middleware';
import { requiredString, numericIdParam, optionalNumeric } from '../utils/zodUtils';
const router = Router(); const router = Router();
// Helper for consistent required string validation (handles missing/null/empty)
const requiredString = (message: string) =>
z.preprocess((val) => val ?? '', z.string().min(1, message));
// --- Zod Schemas for Recipe Routes (as per ADR-003) ---
const bySalePercentageSchema = z.object({ const bySalePercentageSchema = z.object({
query: z.object({ query: z.object({
minPercentage: z.coerce.number().min(0).max(100).optional().default(50), minPercentage: optionalNumeric({ default: 50, min: 0, max: 100 }),
}), }),
}); });
const bySaleIngredientsSchema = z.object({ const bySaleIngredientsSchema = z.object({
query: z.object({ query: z.object({
minIngredients: z.coerce.number().int().positive().optional().default(3), minIngredients: optionalNumeric({ default: 3, integer: true, positive: true }),
}), }),
}); });
@@ -31,11 +26,7 @@ const byIngredientAndTagSchema = z.object({
}), }),
}); });
const recipeIdParamsSchema = z.object({ const recipeIdParamsSchema = numericIdParam('recipeId');
params: z.object({
recipeId: z.coerce.number().int().positive(),
}),
});
/** /**
* GET /api/recipes/by-sale-percentage - Get recipes based on the percentage of their ingredients on sale. * GET /api/recipes/by-sale-percentage - Get recipes based on the percentage of their ingredients on sale.
@@ -47,7 +38,7 @@ router.get(
try { try {
// Explicitly parse req.query to apply coercion (string -> number) and default values // Explicitly parse req.query to apply coercion (string -> number) and default values
const { query } = bySalePercentageSchema.parse({ query: req.query }); const { query } = bySalePercentageSchema.parse({ query: req.query });
const recipes = await db.recipeRepo.getRecipesBySalePercentage(query.minPercentage, req.log); const recipes = await db.recipeRepo.getRecipesBySalePercentage(query.minPercentage!, req.log);
res.json(recipes); res.json(recipes);
} catch (error) { } catch (error) {
req.log.error({ error }, 'Error fetching recipes in /api/recipes/by-sale-percentage:'); req.log.error({ error }, 'Error fetching recipes in /api/recipes/by-sale-percentage:');
@@ -67,7 +58,7 @@ router.get(
// Explicitly parse req.query to apply coercion (string -> number) and default values // Explicitly parse req.query to apply coercion (string -> number) and default values
const { query } = bySaleIngredientsSchema.parse({ query: req.query }); const { query } = bySaleIngredientsSchema.parse({ query: req.query });
const recipes = await db.recipeRepo.getRecipesByMinSaleIngredients( const recipes = await db.recipeRepo.getRecipesByMinSaleIngredients(
query.minIngredients, query.minIngredients!,
req.log, req.log,
); );
res.json(recipes); res.json(recipes);

View File

@@ -28,12 +28,6 @@ const expectLogger = expect.objectContaining({
describe('Stats Routes (/api/stats)', () => { describe('Stats Routes (/api/stats)', () => {
const app = createTestApp({ router: statsRouter, basePath: '/api/stats' }); const app = createTestApp({ router: statsRouter, basePath: '/api/stats' });
// Add a basic error handler to capture errors passed to next(err) and return JSON.
// This prevents unhandled error crashes in tests and ensures we get the 500 response we expect.
app.use((err: any, req: any, res: any, next: any) => {
res.status(err.status || 500).json({ message: err.message, errors: err.errors });
});
beforeEach(() => { beforeEach(() => {
vi.clearAllMocks(); vi.clearAllMocks();
}); });

View File

@@ -3,6 +3,7 @@ import { Router, Request, Response, NextFunction } from 'express';
import { z } from 'zod'; import { z } from 'zod';
import * as db from '../services/db/index.db'; import * as db from '../services/db/index.db';
import { validateRequest } from '../middleware/validation.middleware'; import { validateRequest } from '../middleware/validation.middleware';
import { optionalNumeric } from '../utils/zodUtils';
const router = Router(); const router = Router();
@@ -10,8 +11,8 @@ const router = Router();
// Define the query schema separately so we can use it to parse req.query in the handler // Define the query schema separately so we can use it to parse req.query in the handler
const statsQuerySchema = z.object({ const statsQuerySchema = z.object({
days: z.coerce.number().int().min(1).max(365).optional().default(30), days: optionalNumeric({ default: 30, min: 1, max: 365, integer: true }),
limit: z.coerce.number().int().min(1).max(50).optional().default(10), limit: optionalNumeric({ default: 10, min: 1, max: 50, integer: true }),
}); });
const mostFrequentSalesSchema = z.object({ const mostFrequentSalesSchema = z.object({
@@ -31,7 +32,7 @@ router.get(
// Even though validateRequest checks validity, it may not mutate req.query with the parsed result. // Even though validateRequest checks validity, it may not mutate req.query with the parsed result.
const { days, limit } = statsQuerySchema.parse(req.query); const { days, limit } = statsQuerySchema.parse(req.query);
const items = await db.adminRepo.getMostFrequentSaleItems(days, limit, req.log); const items = await db.adminRepo.getMostFrequentSaleItems(days!, limit!, req.log);
res.json(items); res.json(items);
} catch (error) { } catch (error) {
req.log.error( req.log.error(

View File

@@ -42,11 +42,6 @@ vi.mock('../services/logger.server', () => ({
describe('System Routes (/api/system)', () => { describe('System Routes (/api/system)', () => {
const app = createTestApp({ router: systemRouter, basePath: '/api/system' }); const app = createTestApp({ router: systemRouter, basePath: '/api/system' });
// Add a basic error handler to capture errors passed to next(err) and return JSON.
app.use((err: any, req: any, res: any, next: any) => {
res.status(err.status || 500).json({ message: err.message, errors: err.errors });
});
beforeEach(() => { beforeEach(() => {
// We cast here to get type-safe access to mock functions like .mockImplementation // We cast here to get type-safe access to mock functions like .mockImplementation
vi.clearAllMocks(); vi.clearAllMocks();

View File

@@ -5,13 +5,10 @@ import { z } from 'zod';
import { logger } from '../services/logger.server'; import { logger } from '../services/logger.server';
import { geocodingService } from '../services/geocodingService.server'; import { geocodingService } from '../services/geocodingService.server';
import { validateRequest } from '../middleware/validation.middleware'; import { validateRequest } from '../middleware/validation.middleware';
import { requiredString } from '../utils/zodUtils';
const router = Router(); const router = Router();
// Helper for consistent required string validation (handles missing/null/empty)
const requiredString = (message: string) =>
z.preprocess((val) => val ?? '', z.string().min(1, message));
const geocodeSchema = z.object({ const geocodeSchema = z.object({
body: z.object({ body: z.object({
address: requiredString('An address string is required.'), address: requiredString('An address string is required.'),
@@ -49,7 +46,6 @@ router.get(
} }
// Check if there was output to stderr, even if the exit code was 0 (success). // Check if there was output to stderr, even if the exit code was 0 (success).
// This handles warnings or non-fatal errors that should arguably be treated as failures in this context.
if (stderr && stderr.trim().length > 0) { if (stderr && stderr.trim().length > 0) {
logger.error({ stderr }, '[API /pm2-status] PM2 executed but produced stderr:'); logger.error({ stderr }, '[API /pm2-status] PM2 executed but produced stderr:');
return next(new Error(`PM2 command produced an error: ${stderr}`)); return next(new Error(`PM2 command produced an error: ${stderr}`));
@@ -89,6 +85,7 @@ router.post(
res.json(coordinates); res.json(coordinates);
} catch (error) { } catch (error) {
logger.error({ error }, 'Error geocoding address');
next(error); next(error);
} }
}, },

View File

@@ -173,12 +173,6 @@ describe('User Routes (/api/users)', () => {
}); });
const app = createTestApp({ router: userRouter, basePath, authenticatedUser: mockUserProfile }); const app = createTestApp({ router: userRouter, basePath, authenticatedUser: mockUserProfile });
// Add a basic error handler to capture errors passed to next(err) and return JSON.
// This prevents unhandled error crashes in tests and ensures we get the 500 response we expect.
app.use((err: any, req: any, res: any, next: any) => {
res.status(err.status || 500).json({ message: err.message, errors: err.errors });
});
beforeEach(() => { beforeEach(() => {
// All tests in this block will use the authenticated app // All tests in this block will use the authenticated app
}); });
@@ -883,20 +877,41 @@ describe('User Routes (/api/users)', () => {
}); });
describe('Notification Routes', () => { describe('Notification Routes', () => {
it('GET /notifications should return notifications for the user', async () => { it('GET /notifications should return only unread notifications by default', async () => {
const mockNotifications: Notification[] = [ const mockNotifications: Notification[] = [
createMockNotification({ user_id: 'user-123', content: 'Test' }), createMockNotification({ user_id: 'user-123', content: 'Test' }),
]; ];
vi.mocked(db.notificationRepo.getNotificationsForUser).mockResolvedValue(mockNotifications); vi.mocked(db.notificationRepo.getNotificationsForUser).mockResolvedValue(mockNotifications);
const response = await supertest(app).get('/api/users/notifications?limit=10&offset=0'); const response = await supertest(app).get('/api/users/notifications?limit=10');
expect(response.status).toBe(200); expect(response.status).toBe(200);
expect(response.body).toEqual(mockNotifications); expect(response.body).toEqual(mockNotifications);
expect(db.notificationRepo.getNotificationsForUser).toHaveBeenCalledWith( expect(db.notificationRepo.getNotificationsForUser).toHaveBeenCalledWith(
'user-123', 'user-123',
10, 10,
0, 0, // default offset
false, // default includeRead
expectLogger,
);
});
it('GET /notifications?includeRead=true should return all notifications', async () => {
const mockNotifications: Notification[] = [
createMockNotification({ user_id: 'user-123', content: 'Read', is_read: true }),
createMockNotification({ user_id: 'user-123', content: 'Unread', is_read: false }),
];
vi.mocked(db.notificationRepo.getNotificationsForUser).mockResolvedValue(mockNotifications);
const response = await supertest(app).get('/api/users/notifications?includeRead=true');
expect(response.status).toBe(200);
expect(response.body).toEqual(mockNotifications);
expect(db.notificationRepo.getNotificationsForUser).toHaveBeenCalledWith(
'user-123',
20, // default limit
0, // default offset
true, // includeRead from query param
expectLogger, expectLogger,
); );
}); });

View File

@@ -4,57 +4,24 @@ import passport from './passport.routes';
import multer from 'multer'; import multer from 'multer';
import path from 'path'; import path from 'path';
import fs from 'node:fs/promises'; import fs from 'node:fs/promises';
import * as bcrypt from 'bcrypt'; import * as bcrypt from 'bcrypt'; // This was a duplicate, fixed.
import zxcvbn from 'zxcvbn';
import { z } from 'zod'; import { z } from 'zod';
import * as db from '../services/db/index.db';
import { logger } from '../services/logger.server'; import { logger } from '../services/logger.server';
import { UserProfile } from '../types'; import { UserProfile } from '../types';
import { userService } from '../services/userService'; import { userService } from '../services/userService';
import { ForeignKeyConstraintError } from '../services/db/errors.db'; import { ForeignKeyConstraintError } from '../services/db/errors.db';
import { validateRequest } from '../middleware/validation.middleware'; import { validateRequest } from '../middleware/validation.middleware';
import { validatePasswordStrength } from '../utils/authUtils';
import {
requiredString,
numericIdParam,
optionalNumeric,
optionalBoolean,
} from '../utils/zodUtils';
import * as db from '../services/db/index.db';
const router = express.Router(); const router = express.Router();
/**
* Validates the strength of a password using zxcvbn.
* @param password The password to check.
* @returns An object with `isValid` and an optional `feedback` message.
*/
const validatePasswordStrength = (password: string): { isValid: boolean; feedback?: string } => {
const MIN_PASSWORD_SCORE = 3; // Require a 'Good' or 'Strong' password (score 3 or 4)
const strength = zxcvbn(password);
if (strength.score < MIN_PASSWORD_SCORE) {
const feedbackMessage =
strength.feedback.warning ||
(strength.feedback.suggestions && strength.feedback.suggestions[0]);
return {
isValid: false,
feedback:
`Password is too weak. ${feedbackMessage || 'Please choose a stronger password.'}`.trim(),
};
}
return { isValid: true };
};
// Helper for consistent required string validation (handles missing/null/empty)
const requiredString = (message: string) =>
z.preprocess((val) => val ?? '', z.string().min(1, message));
// --- Zod Schemas for User Routes (as per ADR-003) ---
const numericIdParam = (key: string) =>
z.object({
params: z.object({
[key]: z.coerce
.number()
.int()
.positive(`Invalid ID for parameter '${key}'. Must be a number.`),
}),
});
const updateProfileSchema = z.object({ const updateProfileSchema = z.object({
body: z body: z
.object({ full_name: z.string().optional(), avatar_url: z.string().url().optional() }) .object({ full_name: z.string().optional(), avatar_url: z.string().url().optional() })
@@ -93,8 +60,9 @@ const createShoppingListSchema = z.object({
// Apply the JWT authentication middleware to all routes in this file. // Apply the JWT authentication middleware to all routes in this file.
const notificationQuerySchema = z.object({ const notificationQuerySchema = z.object({
query: z.object({ query: z.object({
limit: z.coerce.number().int().positive().optional().default(20), limit: optionalNumeric({ default: 20, integer: true, positive: true }),
offset: z.coerce.number().int().nonnegative().optional().default(0), offset: optionalNumeric({ default: 0, integer: true, nonnegative: true }),
includeRead: optionalBoolean({ default: false }),
}), }),
}); });
@@ -109,7 +77,7 @@ router.use(passport.authenticate('jwt', { session: false }));
// Ensure the directory for avatar uploads exists. // Ensure the directory for avatar uploads exists.
const avatarUploadDir = path.join(process.cwd(), 'public', 'uploads', 'avatars'); const avatarUploadDir = path.join(process.cwd(), 'public', 'uploads', 'avatars');
fs.mkdir(avatarUploadDir, { recursive: true }).catch((err) => { fs.mkdir(avatarUploadDir, { recursive: true }).catch((err) => {
logger.error('Failed to create avatar upload directory:', err); logger.error({ err }, 'Failed to create avatar upload directory');
}); });
// Define multer storage configuration. The `req.user` object will be available // Define multer storage configuration. The `req.user` object will be available
@@ -154,6 +122,7 @@ router.post(
); );
res.json(updatedProfile); res.json(updatedProfile);
} catch (error) { } catch (error) {
logger.error({ error }, 'Error uploading avatar');
next(error); next(error);
} }
}, },
@@ -173,17 +142,17 @@ router.get(
// Apply ADR-003 pattern for type safety // Apply ADR-003 pattern for type safety
try { try {
const { query } = req as unknown as GetNotificationsRequest; const { query } = req as unknown as GetNotificationsRequest;
// Explicitly convert to numbers to ensure the repo receives correct types const parsedQuery = notificationQuerySchema.parse({ query: req.query }).query;
const limit = query.limit ? Number(query.limit) : 20;
const offset = query.offset ? Number(query.offset) : 0;
const notifications = await db.notificationRepo.getNotificationsForUser( const notifications = await db.notificationRepo.getNotificationsForUser(
userProfile.user.user_id, userProfile.user.user_id,
limit, parsedQuery.limit!,
offset, parsedQuery.offset!,
parsedQuery.includeRead!,
req.log, req.log,
); );
res.json(notifications); res.json(notifications);
} catch (error) { } catch (error) {
logger.error({ error }, 'Error fetching notifications');
next(error); next(error);
} }
}, },
@@ -201,6 +170,7 @@ router.post(
await db.notificationRepo.markAllNotificationsAsRead(userProfile.user.user_id, req.log); await db.notificationRepo.markAllNotificationsAsRead(userProfile.user.user_id, req.log);
res.status(204).send(); // No Content res.status(204).send(); // No Content
} catch (error) { } catch (error) {
logger.error({ error }, 'Error marking all notifications as read');
next(error); next(error);
} }
}, },
@@ -226,6 +196,7 @@ router.post(
); );
res.status(204).send(); // Success, no content to return res.status(204).send(); // Success, no content to return
} catch (error) { } catch (error) {
logger.error({ error }, 'Error marking notification as read');
next(error); next(error);
} }
}, },
@@ -378,11 +349,7 @@ router.post(
if (error instanceof ForeignKeyConstraintError) { if (error instanceof ForeignKeyConstraintError) {
return res.status(400).json({ message: error.message }); return res.status(400).json({ message: error.message });
} }
const errorMessage = error instanceof Error ? error.message : 'An unknown error occurred'; logger.error({ error, body: req.body }, 'Failed to add watched item');
logger.error({
errorMessage,
body: req.body,
});
next(error); next(error);
} }
}, },
@@ -486,11 +453,7 @@ router.post(
if (error instanceof ForeignKeyConstraintError) { if (error instanceof ForeignKeyConstraintError) {
return res.status(400).json({ message: error.message }); return res.status(400).json({ message: error.message });
} }
const errorMessage = error instanceof Error ? error.message : 'An unknown error occurred'; logger.error({ error, body: req.body }, 'Failed to create shopping list');
logger.error({
errorMessage,
body: req.body,
});
next(error); next(error);
} }
}, },
@@ -549,12 +512,7 @@ router.post(
if (error instanceof ForeignKeyConstraintError) { if (error instanceof ForeignKeyConstraintError) {
return res.status(400).json({ message: error.message }); return res.status(400).json({ message: error.message });
} }
const errorMessage = error instanceof Error ? error.message : 'An unknown error occurred'; logger.error({ error, params: req.params, body: req.body }, 'Failed to add shopping list item');
logger.error({
errorMessage,
params: req.params,
body: req.body,
});
next(error); next(error);
} }
}, },
@@ -694,11 +652,7 @@ router.put(
if (error instanceof ForeignKeyConstraintError) { if (error instanceof ForeignKeyConstraintError) {
return res.status(400).json({ message: error.message }); return res.status(400).json({ message: error.message });
} }
const errorMessage = error instanceof Error ? error.message : 'An unknown error occurred'; logger.error({ error, body: req.body }, 'Failed to set user dietary restrictions');
logger.error({
errorMessage,
body: req.body,
});
next(error); next(error);
} }
}, },
@@ -742,11 +696,7 @@ router.put(
if (error instanceof ForeignKeyConstraintError) { if (error instanceof ForeignKeyConstraintError) {
return res.status(400).json({ message: error.message }); return res.status(400).json({ message: error.message });
} }
const errorMessage = error instanceof Error ? error.message : 'An unknown error occurred'; logger.error({ error, body: req.body }, 'Failed to set user appliances');
logger.error({
errorMessage,
body: req.body,
});
next(error); next(error);
} }
}, },
@@ -776,6 +726,7 @@ router.get(
const address = await db.addressRepo.getAddressById(addressId, req.log); // This will throw NotFoundError if not found const address = await db.addressRepo.getAddressById(addressId, req.log); // This will throw NotFoundError if not found
res.json(address); res.json(address);
} catch (error) { } catch (error) {
logger.error({ error }, 'Error fetching user address');
next(error); next(error);
} }
}, },
@@ -814,6 +765,7 @@ router.put(
const addressId = await userService.upsertUserAddress(userProfile, addressData, req.log); // This was a duplicate, fixed. const addressId = await userService.upsertUserAddress(userProfile, addressData, req.log); // This was a duplicate, fixed.
res.status(200).json({ message: 'Address updated successfully', address_id: addressId }); res.status(200).json({ message: 'Address updated successfully', address_id: addressId });
} catch (error) { } catch (error) {
logger.error({ error }, 'Error updating user address');
next(error); next(error);
} }
}, },

View File

@@ -51,9 +51,7 @@ export class AiAnalysisService {
// Normalize sources to a consistent format. // Normalize sources to a consistent format.
const mappedSources = (response.sources || []).map( const mappedSources = (response.sources || []).map(
(s: RawSource) => (s: RawSource) =>
(s.web (s.web ? { uri: s.web.uri || '', title: s.web.title || 'Untitled' } : { uri: '', title: 'Untitled' }) as Source,
? { uri: s.web.uri || '', title: s.web.title || 'Untitled' }
: { uri: '', title: 'Untitled' }) as Source,
); );
return { ...response, sources: mappedSources }; return { ...response, sources: mappedSources };
} }
@@ -84,9 +82,7 @@ export class AiAnalysisService {
// Normalize sources to a consistent format. // Normalize sources to a consistent format.
const mappedSources = (response.sources || []).map( const mappedSources = (response.sources || []).map(
(s: RawSource) => (s: RawSource) =>
(s.web (s.web ? { uri: s.web.uri || '', title: s.web.title || 'Untitled' } : { uri: '', title: 'Untitled' }) as Source,
? { uri: s.web.uri || '', title: s.web.title || 'Untitled' }
: { uri: '', title: 'Untitled' }) as Source,
); );
return { ...response, sources: mappedSources }; return { ...response, sources: mappedSources };
} }

View File

@@ -4,7 +4,13 @@
* It communicates with the application's own backend endpoints, which then securely * It communicates with the application's own backend endpoints, which then securely
* call the Google AI services. This ensures no API keys are exposed on the client. * call the Google AI services. This ensures no API keys are exposed on the client.
*/ */
import type { FlyerItem, Store, MasterGroceryItem } from '../types'; import type {
FlyerItem,
Store,
MasterGroceryItem,
ProcessingStage,
GroundedResponse,
} from '../types';
import { logger } from './logger.client'; import { logger } from './logger.client';
import { apiFetch } from './apiClient'; import { apiFetch } from './apiClient';
@@ -20,14 +26,14 @@ export const uploadAndProcessFlyer = async (
file: File, file: File,
checksum: string, checksum: string,
tokenOverride?: string, tokenOverride?: string,
): Promise<Response> => { ): Promise<{ jobId: string }> => {
const formData = new FormData(); const formData = new FormData();
formData.append('flyerFile', file); formData.append('flyerFile', file);
formData.append('checksum', checksum); formData.append('checksum', checksum);
logger.info(`[aiApiClient] Starting background processing for file: ${file.name}`); logger.info(`[aiApiClient] Starting background processing for file: ${file.name}`);
return apiFetch( const response = await apiFetch(
'/ai/upload-and-process', '/ai/upload-and-process',
{ {
method: 'POST', method: 'POST',
@@ -35,20 +41,73 @@ export const uploadAndProcessFlyer = async (
}, },
{ tokenOverride }, { tokenOverride },
); );
if (!response.ok) {
let errorBody;
try {
errorBody = await response.json();
} catch (e) {
errorBody = { message: await response.text() };
}
// Throw a structured error so the component can inspect the status and body
throw { status: response.status, body: errorBody };
}
return response.json();
}; };
// Define the expected shape of the job status response
export interface JobStatus {
id: string;
state: 'completed' | 'failed' | 'active' | 'waiting' | 'delayed' | 'paused';
progress: {
stages?: ProcessingStage[];
estimatedTimeRemaining?: number;
message?: string;
} | null;
returnValue: {
flyerId?: number;
} | null;
failedReason: string | null;
}
/** /**
* Fetches the status of a background processing job. * Fetches the status of a background processing job.
* This is the second step in the new background processing flow. * This is the second step in the new background processing flow.
* @param jobId The ID of the job to check. * @param jobId The ID of the job to check.
* @param tokenOverride Optional token for testing. * @param tokenOverride Optional token for testing.
* @returns A promise that resolves to the API response with the job's status. * @returns A promise that resolves to the parsed job status object.
* @throws An error if the network request fails or if the response is not valid JSON.
*/ */
export const getJobStatus = async (jobId: string, tokenOverride?: string): Promise<Response> => { export const getJobStatus = async (
return apiFetch(`/ai/jobs/${jobId}/status`, {}, { tokenOverride }); jobId: string,
tokenOverride?: string,
): Promise<JobStatus> => {
const response = await apiFetch(`/ai/jobs/${jobId}/status`, {}, { tokenOverride });
if (!response.ok) {
let errorText = `API Error: ${response.status} ${response.statusText}`;
try {
const errorBody = await response.text();
if (errorBody) errorText = `API Error ${response.status}: ${errorBody}`;
} catch (e) {
// ignore if reading body fails
}
throw new Error(errorText);
}
try {
return await response.json();
} catch (error) {
const rawText = await response.text();
throw new Error(`Failed to parse JSON response from server. Body: ${rawText}`);
}
}; };
export const isImageAFlyer = async (imageFile: File, tokenOverride?: string): Promise<Response> => { export const isImageAFlyer = (
imageFile: File,
tokenOverride?: string,
): Promise<Response> => {
const formData = new FormData(); const formData = new FormData();
formData.append('image', imageFile); formData.append('image', imageFile);
@@ -64,7 +123,7 @@ export const isImageAFlyer = async (imageFile: File, tokenOverride?: string): Pr
); );
}; };
export const extractAddressFromImage = async ( export const extractAddressFromImage = (
imageFile: File, imageFile: File,
tokenOverride?: string, tokenOverride?: string,
): Promise<Response> => { ): Promise<Response> => {
@@ -81,7 +140,7 @@ export const extractAddressFromImage = async (
); );
}; };
export const extractLogoFromImage = async ( export const extractLogoFromImage = (
imageFiles: File[], imageFiles: File[],
tokenOverride?: string, tokenOverride?: string,
): Promise<Response> => { ): Promise<Response> => {
@@ -100,7 +159,7 @@ export const extractLogoFromImage = async (
); );
}; };
export const getQuickInsights = async ( export const getQuickInsights = (
items: Partial<FlyerItem>[], items: Partial<FlyerItem>[],
signal?: AbortSignal, signal?: AbortSignal,
tokenOverride?: string, tokenOverride?: string,
@@ -117,7 +176,7 @@ export const getQuickInsights = async (
); );
}; };
export const getDeepDiveAnalysis = async ( export const getDeepDiveAnalysis = (
items: Partial<FlyerItem>[], items: Partial<FlyerItem>[],
signal?: AbortSignal, signal?: AbortSignal,
tokenOverride?: string, tokenOverride?: string,
@@ -134,7 +193,7 @@ export const getDeepDiveAnalysis = async (
); );
}; };
export const searchWeb = async ( export const searchWeb = (
query: string, query: string,
signal?: AbortSignal, signal?: AbortSignal,
tokenOverride?: string, tokenOverride?: string,
@@ -179,7 +238,7 @@ export const planTripWithMaps = async (
* @param prompt A description of the image to generate (e.g., a meal plan). * @param prompt A description of the image to generate (e.g., a meal plan).
* @returns A base64-encoded string of the generated PNG image. * @returns A base64-encoded string of the generated PNG image.
*/ */
export const generateImageFromText = async ( export const generateImageFromText = (
prompt: string, prompt: string,
signal?: AbortSignal, signal?: AbortSignal,
tokenOverride?: string, tokenOverride?: string,
@@ -202,7 +261,7 @@ export const generateImageFromText = async (
* @param text The text to be spoken. * @param text The text to be spoken.
* @returns A base64-encoded string of the raw audio data. * @returns A base64-encoded string of the raw audio data.
*/ */
export const generateSpeechFromText = async ( export const generateSpeechFromText = (
text: string, text: string,
signal?: AbortSignal, signal?: AbortSignal,
tokenOverride?: string, tokenOverride?: string,
@@ -259,7 +318,7 @@ export const startVoiceSession = (callbacks: {
* @param tokenOverride Optional token for testing. * @param tokenOverride Optional token for testing.
* @returns A promise that resolves to the API response containing the extracted text. * @returns A promise that resolves to the API response containing the extracted text.
*/ */
export const rescanImageArea = async ( export const rescanImageArea = (
imageFile: File, imageFile: File,
cropArea: { x: number; y: number; width: number; height: number }, cropArea: { x: number; y: number; width: number; height: number },
extractionType: 'store_name' | 'dates' | 'item_details', extractionType: 'store_name' | 'dates' | 'item_details',
@@ -270,7 +329,11 @@ export const rescanImageArea = async (
formData.append('cropArea', JSON.stringify(cropArea)); formData.append('cropArea', JSON.stringify(cropArea));
formData.append('extractionType', extractionType); formData.append('extractionType', extractionType);
return apiFetch('/ai/rescan-area', { method: 'POST', body: formData }, { tokenOverride }); return apiFetch(
'/ai/rescan-area',
{ method: 'POST', body: formData },
{ tokenOverride },
);
}; };
/** /**
@@ -278,7 +341,7 @@ export const rescanImageArea = async (
* @param watchedItems An array of the user's watched master grocery items. * @param watchedItems An array of the user's watched master grocery items.
* @returns A promise that resolves to the raw `Response` object from the API. * @returns A promise that resolves to the raw `Response` object from the API.
*/ */
export const compareWatchedItemPrices = async ( export const compareWatchedItemPrices = (
watchedItems: MasterGroceryItem[], watchedItems: MasterGroceryItem[],
signal?: AbortSignal, signal?: AbortSignal,
): Promise<Response> => { ): Promise<Response> => {
@@ -292,5 +355,4 @@ export const compareWatchedItemPrices = async (
body: JSON.stringify({ items: watchedItems }), body: JSON.stringify({ items: watchedItems }),
}, },
{ signal }, { signal },
); )};
};

View File

@@ -166,6 +166,127 @@ describe('AI Service (Server)', () => {
}); });
}); });
describe('Model Fallback Logic', () => {
const originalEnv = process.env;
beforeEach(() => {
vi.unstubAllEnvs();
process.env = { ...originalEnv, GEMINI_API_KEY: 'test-key' };
vi.resetModules(); // Re-import to use the new env var and re-instantiate the service
});
afterEach(() => {
process.env = originalEnv;
vi.unstubAllEnvs();
});
it('should try the next model if the first one fails with a quota error', async () => {
// Arrange
const { AIService } = await import('./aiService.server');
const { logger } = await import('./logger.server');
const serviceWithFallback = new AIService(logger);
const quotaError = new Error('User rate limit exceeded due to quota');
const successResponse = { text: 'Success from fallback model', candidates: [] };
// Mock the generateContent function to fail on the first call and succeed on the second
mockGenerateContent.mockRejectedValueOnce(quotaError).mockResolvedValueOnce(successResponse);
const request = { contents: [{ parts: [{ text: 'test prompt' }] }] };
// Act
const result = await (serviceWithFallback as any).aiClient.generateContent(request);
// Assert
expect(result).toEqual(successResponse);
expect(mockGenerateContent).toHaveBeenCalledTimes(2);
// Check first call
expect(mockGenerateContent).toHaveBeenNthCalledWith(1, {
model: 'gemini-2.5-flash',
...request,
});
// Check second call
expect(mockGenerateContent).toHaveBeenNthCalledWith(2, {
model: 'gemini-3-flash',
...request,
});
// Check that a warning was logged
expect(logger.warn).toHaveBeenCalledWith(
expect.stringContaining(
"Model 'gemini-2.5-flash' failed due to quota/rate limit. Trying next model.",
),
);
});
it('should throw immediately for non-retriable errors', async () => {
// Arrange
const { AIService } = await import('./aiService.server');
const { logger } = await import('./logger.server');
const serviceWithFallback = new AIService(logger);
const nonRetriableError = new Error('Invalid API Key');
mockGenerateContent.mockRejectedValueOnce(nonRetriableError);
const request = { contents: [{ parts: [{ text: 'test prompt' }] }] };
// Act & Assert
await expect((serviceWithFallback as any).aiClient.generateContent(request)).rejects.toThrow(
'Invalid API Key',
);
expect(mockGenerateContent).toHaveBeenCalledTimes(1);
expect(logger.error).toHaveBeenCalledWith(
{ error: nonRetriableError },
`[AIService Adapter] Model 'gemini-2.5-flash' failed with a non-retriable error.`,
);
});
it('should throw the last error if all models fail', async () => {
// Arrange
const { AIService } = await import('./aiService.server');
const { logger } = await import('./logger.server');
const serviceWithFallback = new AIService(logger);
const quotaError1 = new Error('Quota exhausted for model 1');
const quotaError2 = new Error('429 Too Many Requests for model 2');
const quotaError3 = new Error('RESOURCE_EXHAUSTED for model 3');
mockGenerateContent
.mockRejectedValueOnce(quotaError1)
.mockRejectedValueOnce(quotaError2)
.mockRejectedValueOnce(quotaError3);
const request = { contents: [{ parts: [{ text: 'test prompt' }] }] };
// Act & Assert
await expect((serviceWithFallback as any).aiClient.generateContent(request)).rejects.toThrow(
quotaError3,
);
expect(mockGenerateContent).toHaveBeenCalledTimes(3);
expect(mockGenerateContent).toHaveBeenNthCalledWith(1, {
model: 'gemini-2.5-flash',
...request,
});
expect(mockGenerateContent).toHaveBeenNthCalledWith(2, {
model: 'gemini-3-flash',
...request,
});
expect(mockGenerateContent).toHaveBeenNthCalledWith(3, {
model: 'gemini-2.5-flash-lite',
...request,
});
expect(logger.error).toHaveBeenCalledWith(
{ lastError: quotaError3 },
'[AIService Adapter] All AI models failed. Throwing last known error.',
);
});
});
describe('extractItemsFromReceiptImage', () => { describe('extractItemsFromReceiptImage', () => {
it('should extract items from a valid AI response', async () => { it('should extract items from a valid AI response', async () => {
const mockAiResponseText = `[ const mockAiResponseText = `[

View File

@@ -72,6 +72,7 @@ export class AIService {
private fs: IFileSystem; private fs: IFileSystem;
private rateLimiter: <T>(fn: () => Promise<T>) => Promise<T>; private rateLimiter: <T>(fn: () => Promise<T>) => Promise<T>;
private logger: Logger; private logger: Logger;
private readonly models = ['gemini-2.5-flash', 'gemini-3-flash', 'gemini-2.5-flash-lite'];
constructor(logger: Logger, aiClient?: IAiClient, fs?: IFileSystem) { constructor(logger: Logger, aiClient?: IAiClient, fs?: IFileSystem) {
this.logger = logger; this.logger = logger;
@@ -121,17 +122,11 @@ export class AIService {
); );
} }
// do not change "gemini-2.5-flash" - this is correct
const modelName = 'gemini-2.5-flash';
// We create a shim/adapter that matches the old structure but uses the new SDK call pattern. // We create a shim/adapter that matches the old structure but uses the new SDK call pattern.
// This preserves the dependency injection pattern used throughout the class. // This preserves the dependency injection pattern used throughout the class.
this.aiClient = genAI this.aiClient = genAI
? { ? {
generateContent: async (request) => { generateContent: async (request) => {
// The model name is now injected here, into every call, as the new SDK requires.
// Architectural guard clause: All requests from this service must have content.
// This prevents sending invalid requests to the API and satisfies TypeScript's strictness.
if (!request.contents || request.contents.length === 0) { if (!request.contents || request.contents.length === 0) {
this.logger.error( this.logger.error(
{ request }, { request },
@@ -140,14 +135,7 @@ export class AIService {
throw new Error('AIService.generateContent requires at least one content element.'); throw new Error('AIService.generateContent requires at least one content element.');
} }
// Architectural Fix: After the guard clause, assign the guaranteed-to-exist element return this._generateWithFallback(genAI, request);
// to a new constant. This provides a definitive type-safe variable for the compiler.
const firstContent = request.contents[0];
this.logger.debug(
{ modelName, requestParts: firstContent.parts?.length ?? 0 },
'[AIService] Calling actual generateContent via adapter.',
);
return genAI.models.generateContent({ model: modelName, ...request });
}, },
} }
: { : {
@@ -182,6 +170,54 @@ export class AIService {
this.logger.info('---------------- [AIService] Constructor End ----------------'); this.logger.info('---------------- [AIService] Constructor End ----------------');
} }
private async _generateWithFallback(
genAI: GoogleGenAI,
request: { contents: Content[]; tools?: Tool[] },
): Promise<GenerateContentResponse> {
let lastError: Error | null = null;
for (const modelName of this.models) {
try {
this.logger.info(
`[AIService Adapter] Attempting to generate content with model: ${modelName}`,
);
const result = await genAI.models.generateContent({ model: modelName, ...request });
// If the call succeeds, return the result immediately.
return result;
} catch (error: unknown) {
lastError = error instanceof Error ? error : new Error(String(error));
const errorMessage = (lastError.message || '').toLowerCase(); // Make case-insensitive
// Check for specific error messages indicating quota issues or model unavailability.
if (
errorMessage.includes('quota') ||
errorMessage.includes('429') || // HTTP 429 Too Many Requests
errorMessage.includes('resource_exhausted') || // Make case-insensitive
errorMessage.includes('model is overloaded')
) {
this.logger.warn(
`[AIService Adapter] Model '${modelName}' failed due to quota/rate limit. Trying next model. Error: ${errorMessage}`,
);
continue; // Try the next model in the list.
} else {
// For other errors (e.g., invalid input, safety settings), fail immediately.
this.logger.error(
{ error: lastError },
`[AIService Adapter] Model '${modelName}' failed with a non-retriable error.`,
);
throw lastError;
}
}
}
// If all models in the list have failed, throw the last error encountered.
this.logger.error(
{ lastError },
'[AIService Adapter] All AI models failed. Throwing last known error.',
);
throw lastError || new Error('All AI models failed to generate content.');
}
private async serverFileToGenerativePart(path: string, mimeType: string) { private async serverFileToGenerativePart(path: string, mimeType: string) {
const fileData = await this.fs.readFile(path); const fileData = await this.fs.readFile(path);
return { return {

View File

@@ -176,15 +176,13 @@ describe('API Client', () => {
// We expect the promise to still resolve with the bad response, but log an error. // We expect the promise to still resolve with the bad response, but log an error.
await apiClient.apiFetch('/some/failing/endpoint'); await apiClient.apiFetch('/some/failing/endpoint');
// FIX: Use stringContaining to be resilient to port numbers (e.g., localhost:3001)
// This checks for the essential parts of the log message without being brittle.
expect(logger.error).toHaveBeenCalledWith( expect(logger.error).toHaveBeenCalledWith(
expect.stringContaining('apiFetch: Request to http://'), expect.objectContaining({
'Internal Server Error', status: 500,
); body: 'Internal Server Error',
expect(logger.error).toHaveBeenCalledWith( url: expect.stringContaining('/some/failing/endpoint'),
expect.stringContaining('/api/some/failing/endpoint failed with status 500'), }),
'Internal Server Error', 'apiFetch: Request failed',
); );
}); });
@@ -242,10 +240,6 @@ describe('API Client', () => {
expect(logger.warn).toHaveBeenCalledWith('Failed to track flyer item interaction', { expect(logger.warn).toHaveBeenCalledWith('Failed to track flyer item interaction', {
error: apiError, error: apiError,
}); });
expect(logger.warn).toHaveBeenCalledWith('Failed to track flyer item interaction', {
error: apiError,
});
}); });
it('logSearchQuery should log a warning on failure', async () => { it('logSearchQuery should log a warning on failure', async () => {
@@ -259,8 +253,6 @@ describe('API Client', () => {
was_successful: false, was_successful: false,
}); });
expect(logger.warn).toHaveBeenCalledWith('Failed to log search query', { error: apiError }); expect(logger.warn).toHaveBeenCalledWith('Failed to log search query', { error: apiError });
expect(logger.warn).toHaveBeenCalledWith('Failed to log search query', { error: apiError });
}); });
}); });

View File

@@ -1,6 +1,7 @@
// src/services/apiClient.ts // src/services/apiClient.ts
import { Profile, ShoppingListItem, SearchQuery, Budget, Address } from '../types'; import { Profile, ShoppingListItem, SearchQuery, Budget, Address } from '../types';
import { logger } from './logger.client'; import { logger } from './logger.client';
import { eventBus } from './eventBus';
// This constant should point to your backend API. // This constant should point to your backend API.
// It's often a good practice to store this in an environment variable. // It's often a good practice to store this in an environment variable.
@@ -62,12 +63,12 @@ const refreshToken = async (): Promise<string> => {
logger.info('Successfully refreshed access token.'); logger.info('Successfully refreshed access token.');
return data.token; return data.token;
} catch (error) { } catch (error) {
logger.error('Failed to refresh token. User will be logged out.', { error }); logger.error({ error }, 'Failed to refresh token. User session has expired.');
// Only perform browser-specific actions if in the browser environment. // Only perform browser-specific actions if in the browser environment.
if (typeof window !== 'undefined') { if (typeof window !== 'undefined') {
localStorage.removeItem('authToken'); localStorage.removeItem('authToken');
// A hard redirect is a simple way to reset the app state to logged-out. // Dispatch a global event that the UI layer can listen for to handle session expiry.
// window.location.href = '/'; // Removed to allow the caller to handle session expiry. eventBus.dispatch('sessionExpired');
} }
throw error; throw error;
} }
@@ -144,9 +145,8 @@ export const apiFetch = async (
// --- DEBUG LOGGING for failed requests --- // --- DEBUG LOGGING for failed requests ---
if (!response.ok) { if (!response.ok) {
const responseText = await response.clone().text(); const responseText = await response.clone().text();
logger.error( logger.error({ url: fullUrl, status: response.status, body: responseText },
`apiFetch: Request to ${fullUrl} failed with status ${response.status}. Response body:`, 'apiFetch: Request failed',
responseText,
); );
} }
// --- END DEBUG LOGGING --- // --- END DEBUG LOGGING ---

View File

@@ -32,7 +32,7 @@ describe('Notification DB Service', () => {
}); });
describe('getNotificationsForUser', () => { describe('getNotificationsForUser', () => {
it('should execute the correct query with limit and offset and return notifications', async () => { it('should only return unread notifications by default', async () => {
const mockNotifications: Notification[] = [ const mockNotifications: Notification[] = [
createMockNotification({ createMockNotification({
notification_id: 1, notification_id: 1,
@@ -43,30 +43,59 @@ describe('Notification DB Service', () => {
]; ];
mockPoolInstance.query.mockResolvedValue({ rows: mockNotifications }); mockPoolInstance.query.mockResolvedValue({ rows: mockNotifications });
const result = await notificationRepo.getNotificationsForUser('user-123', 10, 5, mockLogger); const result = await notificationRepo.getNotificationsForUser(
'user-123',
10,
5,
false,
mockLogger,
);
expect(mockPoolInstance.query).toHaveBeenCalledWith( expect(mockPoolInstance.query).toHaveBeenCalledWith(
expect.stringContaining('SELECT * FROM public.notifications'), expect.stringContaining('is_read = false'),
['user-123', 10, 5], ['user-123', 10, 5],
); );
expect(result).toEqual(mockNotifications); expect(result).toEqual(mockNotifications);
}); });
it('should return all notifications when includeRead is true', async () => {
const mockNotifications: Notification[] = [
createMockNotification({ is_read: true }),
createMockNotification({ is_read: false }),
];
mockPoolInstance.query.mockResolvedValue({ rows: mockNotifications });
await notificationRepo.getNotificationsForUser('user-123', 10, 0, true, mockLogger);
// The query should NOT contain the is_read filter
expect(mockPoolInstance.query.mock.calls[0][0]).not.toContain('is_read = false');
expect(mockPoolInstance.query).toHaveBeenCalledWith(expect.any(String), ['user-123', 10, 0]);
});
it('should return an empty array if the user has no notifications', async () => { it('should return an empty array if the user has no notifications', async () => {
mockPoolInstance.query.mockResolvedValue({ rows: [] }); mockPoolInstance.query.mockResolvedValue({ rows: [] });
const result = await notificationRepo.getNotificationsForUser('user-456', 10, 0, mockLogger); const result = await notificationRepo.getNotificationsForUser(
'user-456',
10,
0,
false,
mockLogger,
);
expect(result).toEqual([]); expect(result).toEqual([]);
expect(mockPoolInstance.query).toHaveBeenCalledWith(expect.any(String), ['user-456', 10, 0]); expect(mockPoolInstance.query).toHaveBeenCalledWith(
expect.stringContaining('is_read = false'),
['user-456', 10, 0],
);
}); });
it('should throw an error if the database query fails', async () => { it('should throw an error if the database query fails', async () => {
const dbError = new Error('DB Error'); const dbError = new Error('DB Error');
mockPoolInstance.query.mockRejectedValue(dbError); mockPoolInstance.query.mockRejectedValue(dbError);
await expect( await expect(
notificationRepo.getNotificationsForUser('user-123', 10, 5, mockLogger), notificationRepo.getNotificationsForUser('user-123', 10, 5, false, mockLogger),
).rejects.toThrow('Failed to retrieve notifications.'); ).rejects.toThrow('Failed to retrieve notifications.');
expect(mockLogger.error).toHaveBeenCalledWith( expect(mockLogger.error).toHaveBeenCalledWith(
{ err: dbError, userId: 'user-123', limit: 10, offset: 5 }, { err: dbError, userId: 'user-123', limit: 10, offset: 5, includeRead: false },
'Database error in getNotificationsForUser', 'Database error in getNotificationsForUser',
); );
}); });

View File

@@ -95,20 +95,24 @@ export class NotificationRepository {
userId: string, userId: string,
limit: number, limit: number,
offset: number, offset: number,
includeRead: boolean,
logger: Logger, logger: Logger,
): Promise<Notification[]> { ): Promise<Notification[]> {
try { try {
const res = await this.db.query<Notification>( const params: (string | number)[] = [userId, limit, offset];
`SELECT * FROM public.notifications let query = `SELECT * FROM public.notifications WHERE user_id = $1`;
WHERE user_id = $1
ORDER BY created_at DESC if (!includeRead) {
LIMIT $2 OFFSET $3`, query += ` AND is_read = false`;
[userId, limit, offset], }
);
query += ` ORDER BY created_at DESC LIMIT $2 OFFSET $3`;
const res = await this.db.query<Notification>(query, params);
return res.rows; return res.rows;
} catch (error) { } catch (error) {
logger.error( logger.error(
{ err: error, userId, limit, offset }, { err: error, userId, limit, offset, includeRead },
'Database error in getNotificationsForUser', 'Database error in getNotificationsForUser',
); );
throw new Error('Failed to retrieve notifications.'); throw new Error('Failed to retrieve notifications.');

View File

@@ -0,0 +1,53 @@
// src/services/db/price.db.ts
import type { Logger } from 'pino';
import type { PriceHistoryData } from '../../types';
import { getPool } from './connection.db';
/**
* Repository for fetching price-related data.
*/
export const priceRepo = {
/**
* Fetches the historical price data for a given list of master item IDs.
* It retrieves the price in cents and the start date of the flyer for each item.
*
* @param masterItemIds An array of master grocery item IDs.
* @param logger The pino logger instance.
* @param limit The maximum number of records to return.
* @param offset The number of records to skip.
* @returns A promise that resolves to an array of price history data points.
*/
async getPriceHistory(
masterItemIds: number[],
logger: Logger,
limit: number = 1000,
offset: number = 0,
): Promise<PriceHistoryData[]> {
if (masterItemIds.length === 0) {
return [];
}
const query = `
SELECT
fi.master_item_id,
fi.price_in_cents,
f.valid_from AS date
FROM public.flyer_items fi
JOIN public.flyers f ON fi.flyer_id = f.flyer_id
WHERE
fi.master_item_id = ANY($1::int[])
AND f.valid_from IS NOT NULL
AND fi.price_in_cents IS NOT NULL
ORDER BY
fi.master_item_id, f.valid_from ASC
LIMIT $2 OFFSET $3;
`;
const result = await getPool().query(query, [masterItemIds, limit, offset]);
logger.debug(
{ count: result.rows.length, itemIds: masterItemIds.length, limit, offset },
'Fetched price history from database.',
);
return result.rows;
},
};

31
src/services/eventBus.ts Normal file
View File

@@ -0,0 +1,31 @@
// src/services/eventBus.ts
/**
* A simple, generic event bus for cross-component communication without direct coupling.
* This is particularly useful for broadcasting application-wide events, such as session expiry.
*/
type EventCallback = (data?: any) => void;
class EventBus {
private listeners: { [key: string]: EventCallback[] } = {};
on(event: string, callback: EventCallback): void {
if (!this.listeners[event]) {
this.listeners[event] = [];
}
this.listeners[event].push(callback);
}
off(event: string, callback: EventCallback): void {
if (!this.listeners[event]) return;
this.listeners[event] = this.listeners[event].filter((l) => l !== callback);
}
dispatch(event: string, data?: any): void {
if (!this.listeners[event]) return;
this.listeners[event].forEach((callback) => callback(data));
}
}
export const eventBus = new EventBus();

View File

@@ -87,7 +87,7 @@ describe('Geocoding Service', () => {
// Assert // Assert
expect(result).toEqual(coordinates); expect(result).toEqual(coordinates);
expect(logger.error).toHaveBeenCalledWith( expect(logger.error).toHaveBeenCalledWith(
{ err: 'Redis down', cacheKey: expect.any(String) }, { err: expect.any(Error), cacheKey: expect.any(String) },
'Redis GET or JSON.parse command failed. Proceeding without cache.', 'Redis GET or JSON.parse command failed. Proceeding without cache.',
); );
expect(mockGoogleService.geocode).toHaveBeenCalled(); // Should still proceed to fetch expect(mockGoogleService.geocode).toHaveBeenCalled(); // Should still proceed to fetch
@@ -107,7 +107,7 @@ describe('Geocoding Service', () => {
expect(mocks.mockRedis.get).toHaveBeenCalledWith(cacheKey); expect(mocks.mockRedis.get).toHaveBeenCalledWith(cacheKey);
// The service should log the JSON parsing error and continue // The service should log the JSON parsing error and continue
expect(logger.error).toHaveBeenCalledWith( expect(logger.error).toHaveBeenCalledWith(
{ err: expect.any(String), cacheKey: expect.any(String) }, { err: expect.any(SyntaxError), cacheKey: expect.any(String) },
'Redis GET or JSON.parse command failed. Proceeding without cache.', 'Redis GET or JSON.parse command failed. Proceeding without cache.',
); );
expect(mockGoogleService.geocode).toHaveBeenCalledTimes(1); expect(mockGoogleService.geocode).toHaveBeenCalledTimes(1);
@@ -185,7 +185,7 @@ describe('Geocoding Service', () => {
// Assert // Assert
expect(result).toEqual(coordinates); expect(result).toEqual(coordinates);
expect(logger.error).toHaveBeenCalledWith( expect(logger.error).toHaveBeenCalledWith(
{ err: 'Network Error' }, { err: expect.any(Error) },
expect.stringContaining('An error occurred while calling the Google Maps Geocoding API'), expect.stringContaining('An error occurred while calling the Google Maps Geocoding API'),
); );
expect(mockNominatimService.geocode).toHaveBeenCalledWith(address, logger); expect(mockNominatimService.geocode).toHaveBeenCalledWith(address, logger);
@@ -223,7 +223,7 @@ describe('Geocoding Service', () => {
expect(mockGoogleService.geocode).toHaveBeenCalledTimes(1); expect(mockGoogleService.geocode).toHaveBeenCalledTimes(1);
expect(mocks.mockRedis.set).toHaveBeenCalledTimes(1); expect(mocks.mockRedis.set).toHaveBeenCalledTimes(1);
expect(logger.error).toHaveBeenCalledWith( expect(logger.error).toHaveBeenCalledWith(
{ err: 'Redis SET failed', cacheKey: expect.any(String) }, { err: expect.any(Error), cacheKey: expect.any(String) },
'Redis SET command failed. Result will not be cached.', 'Redis SET command failed. Result will not be cached.',
); );
}); });
@@ -271,7 +271,7 @@ describe('Geocoding Service', () => {
// Act & Assert // Act & Assert
await expect(geocodingService.clearGeocodeCache(logger)).rejects.toThrow(redisError); await expect(geocodingService.clearGeocodeCache(logger)).rejects.toThrow(redisError);
expect(logger.error).toHaveBeenCalledWith( expect(logger.error).toHaveBeenCalledWith(
{ err: redisError.message }, { err: expect.any(Error) },
'Failed to clear geocode cache from Redis.', 'Failed to clear geocode cache from Redis.',
); );
expect(mocks.mockRedis.del).not.toHaveBeenCalled(); expect(mocks.mockRedis.del).not.toHaveBeenCalled();

View File

@@ -25,10 +25,7 @@ export class GeocodingService {
return JSON.parse(cached); return JSON.parse(cached);
} }
} catch (error) { } catch (error) {
logger.error( logger.error({ err: error, cacheKey }, 'Redis GET or JSON.parse command failed. Proceeding without cache.');
{ err: error instanceof Error ? error.message : error, cacheKey },
'Redis GET or JSON.parse command failed. Proceeding without cache.',
);
} }
if (process.env.GOOGLE_MAPS_API_KEY) { if (process.env.GOOGLE_MAPS_API_KEY) {
@@ -44,8 +41,8 @@ export class GeocodingService {
); );
} catch (error) { } catch (error) {
logger.error( logger.error(
{ err: error instanceof Error ? error.message : error }, { err: error },
'An error occurred while calling the Google Maps Geocoding API. Falling back to Nominatim.', 'An error occurred while calling the Google Maps Geocoding API. Falling back to Nominatim.'
); );
} }
} else { } else {
@@ -72,10 +69,7 @@ export class GeocodingService {
try { try {
await redis.set(cacheKey, JSON.stringify(result), 'EX', 60 * 60 * 24 * 30); // Cache for 30 days await redis.set(cacheKey, JSON.stringify(result), 'EX', 60 * 60 * 24 * 30); // Cache for 30 days
} catch (error) { } catch (error) {
logger.error( logger.error({ err: error, cacheKey }, 'Redis SET command failed. Result will not be cached.');
{ err: error instanceof Error ? error.message : error, cacheKey },
'Redis SET command failed. Result will not be cached.',
);
} }
} }
@@ -98,10 +92,7 @@ export class GeocodingService {
logger.info(`Successfully deleted ${totalDeleted} geocode cache entries.`); logger.info(`Successfully deleted ${totalDeleted} geocode cache entries.`);
return totalDeleted; return totalDeleted;
} catch (error) { } catch (error) {
logger.error( logger.error({ err: error }, 'Failed to clear geocode cache from Redis.');
{ err: error instanceof Error ? error.message : error },
'Failed to clear geocode cache from Redis.',
);
throw error; throw error;
} }
} }

View File

@@ -34,6 +34,9 @@ export const logger = pino({
'*.body.password', '*.body.password',
'*.body.newPassword', '*.body.newPassword',
'*.body.currentPassword', '*.body.currentPassword',
'*.body.confirmPassword',
'*.body.refreshToken',
'*.body.token',
], ],
censor: '[REDACTED]', censor: '[REDACTED]',
}, },

View File

@@ -1,7 +1,7 @@
// src/services/queueService.server.test.ts // src/services/queueService.server.test.ts
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest'; import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
import { logger as mockLogger } from './logger.server'; import { logger as mockLogger } from './logger.server';
import { EventEmitter } from 'node:events'; import { EventEmitter } from 'node:events'; // This was a duplicate, fixed.
import type { Job, Worker } from 'bullmq'; import type { Job, Worker } from 'bullmq';
import type { Mock } from 'vitest'; import type { Mock } from 'vitest';
@@ -31,6 +31,7 @@ mockRedisConnection.quit = vi.fn().mockResolvedValue('OK');
// We make it a mock function that returns our shared `mockRedisConnection` instance. // We make it a mock function that returns our shared `mockRedisConnection` instance.
vi.mock('ioredis', () => ({ vi.mock('ioredis', () => ({
default: vi.fn(function () { default: vi.fn(function () {
// This was a duplicate, fixed.
return mockRedisConnection; return mockRedisConnection;
}), }),
})); }));
@@ -51,26 +52,35 @@ vi.mock('bullmq', () => ({
this.add = vi.fn(); this.add = vi.fn();
this.close = vi.fn().mockResolvedValue(undefined); this.close = vi.fn().mockResolvedValue(undefined);
return this; return this;
}), }), // This was a duplicate, fixed.
UnrecoverableError: class UnrecoverableError extends Error {},
})); }));
vi.mock('./logger.server', () => ({ vi.mock('./logger.server', () => ({
logger: { logger: {
info: vi.fn(), info: vi.fn(),
error: vi.fn(), error: vi.fn(),
warn: vi.fn(), warn: vi.fn(), // This was a duplicate, fixed.
debug: vi.fn(), debug: vi.fn(),
child: vi.fn().mockReturnThis(),
}, },
})); }));
// Mock other dependencies that are not the focus of this test file. // Mock other dependencies that are not the focus of this test file.
vi.mock('./aiService.server'); vi.mock('./aiService.server');
vi.mock('./emailService.server'); vi.mock('./emailService.server');
vi.mock('./db/index.db'); vi.mock('./db/index.db'); // This was a duplicate, fixed.
vi.mock('./flyerProcessingService.server');
vi.mock('./flyerDataTransformer');
describe('Queue Service Setup and Lifecycle', () => { describe('Worker Service Lifecycle', () => {
let gracefulShutdown: (signal: string) => Promise<void>; let gracefulShutdown: (signal: string) => Promise<void>; // This was a duplicate, fixed.
let flyerWorker: Worker, emailWorker: Worker, analyticsWorker: Worker, cleanupWorker: Worker; let flyerWorker: Worker,
emailWorker: Worker,
analyticsWorker: Worker,
cleanupWorker: Worker,
weeklyAnalyticsWorker: Worker,
tokenCleanupWorker: Worker;
beforeEach(async () => { beforeEach(async () => {
vi.clearAllMocks(); vi.clearAllMocks();
@@ -79,22 +89,27 @@ describe('Queue Service Setup and Lifecycle', () => {
vi.resetModules(); vi.resetModules();
// Dynamically import the modules after mocks are set up // Dynamically import the modules after mocks are set up
const queueService = await import('./queueService.server'); const workerService = await import('./workers.server');
// Capture the imported instances for use in tests // Capture the imported instances for use in tests
gracefulShutdown = queueService.gracefulShutdown; gracefulShutdown = workerService.gracefulShutdown;
flyerWorker = queueService.flyerWorker; flyerWorker = workerService.flyerWorker;
emailWorker = queueService.emailWorker; emailWorker = workerService.emailWorker;
analyticsWorker = queueService.analyticsWorker; analyticsWorker = workerService.analyticsWorker;
cleanupWorker = queueService.cleanupWorker; cleanupWorker = workerService.cleanupWorker;
weeklyAnalyticsWorker = workerService.weeklyAnalyticsWorker;
tokenCleanupWorker = workerService.tokenCleanupWorker;
}); });
afterEach(() => { afterEach(() => {
// Clean up all event listeners on the mock connection to prevent open handles. // Clean up all event listeners on the mock connection to prevent open handles.
mockRedisConnection.removeAllListeners(); mockRedisConnection.removeAllListeners();
vi.useRealTimers();
}); });
it('should log a success message when Redis connects', () => { it('should log a success message when Redis connects', () => {
// Re-import redis.server to trigger its event listeners with the mock
import('./redis.server');
// Act: Simulate the 'connect' event on the mock Redis connection // Act: Simulate the 'connect' event on the mock Redis connection
mockRedisConnection.emit('connect'); mockRedisConnection.emit('connect');
@@ -103,6 +118,7 @@ describe('Queue Service Setup and Lifecycle', () => {
}); });
it('should log an error message when Redis connection fails', () => { it('should log an error message when Redis connection fails', () => {
import('./redis.server');
const redisError = new Error('Connection refused'); const redisError = new Error('Connection refused');
mockRedisConnection.emit('error', redisError); mockRedisConnection.emit('error', redisError);
expect(mockLogger.error).toHaveBeenCalledWith({ err: redisError }, '[Redis] Connection error.'); expect(mockLogger.error).toHaveBeenCalledWith({ err: redisError }, '[Redis] Connection error.');
@@ -111,7 +127,14 @@ describe('Queue Service Setup and Lifecycle', () => {
it('should attach completion and failure listeners to all workers', () => { it('should attach completion and failure listeners to all workers', () => {
// The workers are instantiated when the module is imported in beforeEach. // The workers are instantiated when the module is imported in beforeEach.
// We just need to check that the 'on' method was called for each event. // We just need to check that the 'on' method was called for each event.
const workers = [flyerWorker, emailWorker, analyticsWorker, cleanupWorker]; const workers = [
flyerWorker,
emailWorker,
analyticsWorker,
cleanupWorker,
weeklyAnalyticsWorker,
tokenCleanupWorker,
];
for (const worker of workers) { for (const worker of workers) {
expect(worker.on).toHaveBeenCalledWith('completed', expect.any(Function)); expect(worker.on).toHaveBeenCalledWith('completed', expect.any(Function));
expect(worker.on).toHaveBeenCalledWith('failed', expect.any(Function)); expect(worker.on).toHaveBeenCalledWith('failed', expect.any(Function));
@@ -171,15 +194,40 @@ describe('Queue Service Setup and Lifecycle', () => {
}); });
it('should close all workers, queues, the redis connection, and exit the process', async () => { it('should close all workers, queues, the redis connection, and exit the process', async () => {
// We need to import the queues to check if their close methods are called.
const {
flyerQueue,
emailQueue,
analyticsQueue,
cleanupQueue,
weeklyAnalyticsQueue,
tokenCleanupQueue,
} = await import('./queues.server');
await gracefulShutdown('SIGINT'); await gracefulShutdown('SIGINT');
expect((flyerWorker as unknown as MockQueueInstance).close).toHaveBeenCalled();
expect((emailWorker as unknown as MockQueueInstance).close).toHaveBeenCalled(); // Verify workers are closed
expect((analyticsWorker as unknown as MockQueueInstance).close).toHaveBeenCalled(); expect((flyerWorker as unknown as MockWorkerInstance).close).toHaveBeenCalled();
expect((cleanupWorker as unknown as MockQueueInstance).close).toHaveBeenCalled(); expect((emailWorker as unknown as MockWorkerInstance).close).toHaveBeenCalled();
expect((analyticsWorker as unknown as MockWorkerInstance).close).toHaveBeenCalled();
expect((cleanupWorker as unknown as MockWorkerInstance).close).toHaveBeenCalled();
expect((weeklyAnalyticsWorker as unknown as MockWorkerInstance).close).toHaveBeenCalled();
expect((tokenCleanupWorker as unknown as MockWorkerInstance).close).toHaveBeenCalled();
// Verify queues are closed
expect((flyerQueue as unknown as MockQueueInstance).close).toHaveBeenCalled();
expect((emailQueue as unknown as MockQueueInstance).close).toHaveBeenCalled();
expect((analyticsQueue as unknown as MockQueueInstance).close).toHaveBeenCalled();
expect((cleanupQueue as unknown as MockQueueInstance).close).toHaveBeenCalled();
expect((weeklyAnalyticsQueue as unknown as MockQueueInstance).close).toHaveBeenCalled();
expect((tokenCleanupQueue as unknown as MockQueueInstance).close).toHaveBeenCalled();
// Verify the redis connection is also closed // Verify the redis connection is also closed
expect(mockRedisConnection.quit).toHaveBeenCalledTimes(1); expect(mockRedisConnection.quit).toHaveBeenCalledTimes(1);
// Check for the correct success log message from workers.server.ts
expect(mockLogger.info).toHaveBeenCalledWith( expect(mockLogger.info).toHaveBeenCalledWith(
'[Shutdown] All workers, queues, and connections closed successfully.', '[Shutdown] All resources closed successfully.',
); );
expect(processExitSpy).toHaveBeenCalledWith(0); expect(processExitSpy).toHaveBeenCalledWith(0);
}); });
@@ -192,12 +240,34 @@ describe('Queue Service Setup and Lifecycle', () => {
await gracefulShutdown('SIGTERM'); await gracefulShutdown('SIGTERM');
// It should still attempt to close all workers // It should still attempt to close all workers
expect((emailWorker as unknown as MockQueueInstance).close).toHaveBeenCalled(); expect((emailWorker as unknown as MockWorkerInstance).close).toHaveBeenCalled();
expect(mockLogger.error).toHaveBeenCalledWith( expect(mockLogger.error).toHaveBeenCalledWith(
{ err: closeError, resource: 'flyerWorker' }, { err: closeError, resource: 'flyerWorker' },
'[Shutdown] Error closing resource.', `[Shutdown] Error closing flyerWorker.`,
); );
expect(processExitSpy).toHaveBeenCalledWith(1); expect(processExitSpy).toHaveBeenCalledWith(1);
}); });
it('should timeout if shutdown takes too long', async () => {
vi.useFakeTimers();
// Make one of the close calls hang indefinitely
(flyerWorker.close as Mock).mockReturnValue(new Promise(() => {}));
// Run shutdown but don't await it fully, as it will hang
const shutdownPromise = gracefulShutdown('SIGTERM');
// Advance timers past the timeout threshold
await vi.advanceTimersByTimeAsync(31000);
// Now await the promise to see the timeout result
await shutdownPromise;
expect(mockLogger.error).toHaveBeenCalledWith(
`[Shutdown] Graceful shutdown timed out after 30 seconds. Forcing exit.`,
);
expect(processExitSpy).toHaveBeenCalledWith(1);
vi.useRealTimers();
});
}); });
}); });

View File

@@ -1,420 +1,32 @@
// src/services/queueService.server.ts // src/services/queueService.server.ts
import { Queue, Worker, Job } from 'bullmq';
import IORedis from 'ioredis'; // Correctly imported
import fsPromises from 'node:fs/promises';
import { exec } from 'child_process';
import { promisify } from 'util';
import { logger } from './logger.server'; import { logger } from './logger.server';
import { aiService } from './aiService.server'; import { connection } from './redis.server';
import * as emailService from './emailService.server';
import * as db from './db/index.db';
import { import {
FlyerProcessingService, flyerQueue,
type FlyerJobData, emailQueue,
type IFileSystem, analyticsQueue,
} from './flyerProcessingService.server'; weeklyAnalyticsQueue,
import { FlyerDataTransformer } from './flyerDataTransformer'; cleanupQueue,
tokenCleanupQueue,
} from './queues.server';
export const connection = new IORedis(process.env.REDIS_URL!, { // Re-export everything for backward compatibility where possible
maxRetriesPerRequest: null, // Important for BullMQ export { connection } from './redis.server';
password: process.env.REDIS_PASSWORD, // Add the password from environment variables export * from './queues.server';
});
// --- Redis Connection Event Listeners --- // We do NOT export workers here anymore to prevent side effects.
connection.on('connect', () => { // Consumers needing workers must import from './workers.server'.
logger.info('[Redis] Connection established successfully.');
});
connection.on('error', (err) => {
// This is crucial for diagnosing Redis connection issues. // The patch requested this specific error handling.
logger.error({ err }, '[Redis] Connection error.');
});
const execAsync = promisify(exec);
// --- Queues ---
export const flyerQueue = new Queue<FlyerJobData>('flyer-processing', {
connection,
defaultJobOptions: {
attempts: 3, // Attempt a job 3 times before marking it as failed.
backoff: {
type: 'exponential',
delay: 5000, // Start with a 5-second delay for the first retry
},
},
});
export const emailQueue = new Queue<EmailJobData>('email-sending', {
connection,
defaultJobOptions: {
attempts: 5, // Emails can be retried more aggressively
backoff: {
type: 'exponential',
delay: 10000, // Start with a 10-second delay
},
},
});
export const analyticsQueue = new Queue<AnalyticsJobData>('analytics-reporting', {
connection,
defaultJobOptions: {
attempts: 2, // Analytics can be intensive, so fewer retries might be desired.
backoff: {
type: 'exponential',
delay: 60000, // Wait a minute before retrying.
},
// Remove job from queue on completion to save space, as results are in the DB.
removeOnComplete: true,
removeOnFail: 50, // Keep the last 50 failed jobs for inspection.
},
});
export const weeklyAnalyticsQueue = new Queue<WeeklyAnalyticsJobData>(
'weekly-analytics-reporting',
{
connection,
defaultJobOptions: {
attempts: 2,
backoff: {
type: 'exponential',
delay: 3600000, // 1 hour delay for retries
},
removeOnComplete: true,
removeOnFail: 50,
},
},
);
export const cleanupQueue = new Queue<CleanupJobData>('file-cleanup', {
connection,
defaultJobOptions: {
attempts: 3,
backoff: {
type: 'exponential',
delay: 30000, // Retry cleanup after 30 seconds
},
removeOnComplete: true, // No need to keep successful cleanup jobs
},
});
export const tokenCleanupQueue = new Queue<TokenCleanupJobData>('token-cleanup', {
connection,
defaultJobOptions: {
attempts: 2,
backoff: {
type: 'exponential',
delay: 3600000, // 1 hour delay
},
removeOnComplete: true,
removeOnFail: 10,
},
});
// --- Job Data Interfaces ---
interface EmailJobData {
to: string;
subject: string;
text: string;
html: string;
}
/** /**
* Defines the data for an analytics job. * A function to gracefully shut down all queues and connections.
*/ * This is for the API process which only uses queues.
interface AnalyticsJobData { * For worker processes, use the gracefulShutdown from workers.server.ts
reportDate: string; // e.g., '2024-10-26'
}
/**
* Defines the data for a weekly analytics job.
*/
interface WeeklyAnalyticsJobData {
reportYear: number;
reportWeek: number; // ISO week number (1-53)
}
interface CleanupJobData {
flyerId: number;
// An array of absolute file paths to be deleted. Made optional for manual cleanup triggers.
paths?: string[];
}
/**
* Defines the data for a token cleanup job.
*/
interface TokenCleanupJobData {
timestamp: string; // ISO string to ensure the job is unique per run
}
// --- Worker Instantiation ---
// Create an adapter for fsPromises to match the IFileSystem interface.
const fsAdapter: IFileSystem = {
readdir: (path: string, options: { withFileTypes: true }) => fsPromises.readdir(path, options),
unlink: (path: string) => fsPromises.unlink(path),
};
// Instantiate the service with its real dependencies
const flyerProcessingService = new FlyerProcessingService(
aiService,
db,
fsAdapter,
execAsync,
cleanupQueue, // Inject the cleanup queue to break the circular dependency
new FlyerDataTransformer(), // Inject the new transformer
);
/**
* A generic function to attach logging event listeners to any worker.
* This centralizes logging for job completion and final failure.
* @param worker The BullMQ worker instance.
*/
const attachWorkerEventListeners = (worker: Worker) => {
worker.on('completed', (job: Job, returnValue: unknown) => {
logger.info({ returnValue }, `[${worker.name}] Job ${job.id} completed successfully.`);
});
worker.on('failed', (job: Job | undefined, error: Error) => {
// This event fires after all retries have failed.
logger.error(
{ err: error, jobData: job?.data },
`[${worker.name}] Job ${job?.id} has ultimately failed after all attempts.`,
);
});
};
export const flyerWorker = new Worker<FlyerJobData>(
'flyer-processing', // Must match the queue name
(job) => {
// The processJob method creates its own job-specific logger internally.
return flyerProcessingService.processJob(job);
},
{
connection,
concurrency: parseInt(process.env.WORKER_CONCURRENCY || '1', 10),
},
);
/**
* A dedicated worker process for sending emails.
*/
export const emailWorker = new Worker<EmailJobData>(
'email-sending',
async (job: Job<EmailJobData>) => {
const { to, subject } = job.data;
// Create a job-specific logger instance
const jobLogger = logger.child({ jobId: job.id, jobName: job.name });
jobLogger.info({ to, subject }, `[EmailWorker] Sending email for job ${job.id}`);
try {
await emailService.sendEmail(job.data, jobLogger);
} catch (error: unknown) {
// Standardize error logging to capture the full error object, including the stack trace.
// This provides more context for debugging than just logging the message.
logger.error(
{
// Log the full error object for better diagnostics. // The patch requested this specific error handling.
err: error instanceof Error ? error : new Error(String(error)),
// Also include the job data for context.
jobData: job.data,
},
`[EmailWorker] Job ${job.id} failed. Attempt ${job.attemptsMade}/${job.opts.attempts}.`,
);
// Re-throw to let BullMQ handle the failure and retry.
throw error;
}
},
{
connection,
concurrency: parseInt(process.env.EMAIL_WORKER_CONCURRENCY || '10', 10),
},
);
/**
* A dedicated worker for generating daily analytics reports.
* This is a placeholder for the actual report generation logic.
*/
export const analyticsWorker = new Worker<AnalyticsJobData>(
'analytics-reporting',
async (job: Job<AnalyticsJobData>) => {
const { reportDate } = job.data;
logger.info({ reportDate }, `[AnalyticsWorker] Starting report generation for job ${job.id}`);
try {
// Special case for testing the retry mechanism
if (reportDate === 'FAIL') {
throw new Error('This is a test failure for the analytics job.');
}
// In a real implementation, you would call a database function here.
// For example: await db.generateDailyAnalyticsReport(reportDate);
await new Promise((resolve) => setTimeout(resolve, 10000)); // Simulate a 10-second task
logger.info(`[AnalyticsWorker] Successfully generated report for ${reportDate}.`);
} catch (error: unknown) {
// Standardize error logging.
logger.error(
{
err: error instanceof Error ? error : new Error(String(error)),
jobData: job.data,
},
`[AnalyticsWorker] Job ${job.id} failed. Attempt ${job.attemptsMade}/${job.opts.attempts}.`,
);
throw error; // Re-throw to let BullMQ handle the failure and retry.
}
},
{
connection,
concurrency: parseInt(process.env.ANALYTICS_WORKER_CONCURRENCY || '1', 10),
},
);
/**
* A dedicated worker for cleaning up flyer-related files from the filesystem.
* This is triggered manually by an admin after a flyer has been reviewed.
*/
export const cleanupWorker = new Worker<CleanupJobData>(
// This worker now handles two types of cleanup jobs.
'file-cleanup', // The queue name
async (job: Job<CleanupJobData>) => {
// Destructure the data from the job payload.
const { flyerId, paths } = job.data;
logger.info(
{ paths },
`[CleanupWorker] Starting file cleanup for job ${job.id} (Flyer ID: ${flyerId})`,
);
try {
if (!paths || paths.length === 0) {
logger.warn(
`[CleanupWorker] Job ${job.id} for flyer ${flyerId} received no paths to clean. Skipping.`,
);
return;
}
// Iterate over the file paths provided in the job data and delete each one.
for (const filePath of paths) {
try {
await fsAdapter.unlink(filePath);
logger.info(`[CleanupWorker] Deleted temporary file: ${filePath}`);
} catch (unlinkError: unknown) {
// If the file doesn't exist, it's a success from our perspective.
// We can log it as a warning and continue without failing the job.
if (
unlinkError instanceof Error &&
'code' in unlinkError &&
unlinkError.code === 'ENOENT'
) {
logger.warn(
`[CleanupWorker] File not found during cleanup (already deleted?): ${filePath}`,
);
} else {
throw unlinkError; // For any other error (e.g., permissions), re-throw to fail the job.
}
}
}
logger.info(
`[CleanupWorker] Successfully cleaned up ${paths.length} file(s) for flyer ${flyerId}.`,
);
} catch (error: unknown) {
// Standardize error logging.
logger.error(
{
err: error instanceof Error ? error : new Error(String(error)),
},
`[CleanupWorker] Job ${job.id} for flyer ${flyerId} failed. Attempt ${job.attemptsMade}/${job.opts.attempts}.`,
);
throw error; // Re-throw to let BullMQ handle the failure and retry.
}
},
{
connection,
concurrency: parseInt(process.env.CLEANUP_WORKER_CONCURRENCY || '10', 10),
},
);
/**
* A dedicated worker for generating weekly analytics reports.
* This is a placeholder for the actual report generation logic.
*/
export const weeklyAnalyticsWorker = new Worker<WeeklyAnalyticsJobData>(
'weekly-analytics-reporting',
async (job: Job<WeeklyAnalyticsJobData>) => {
const { reportYear, reportWeek } = job.data;
logger.info(
{ reportYear, reportWeek },
`[WeeklyAnalyticsWorker] Starting weekly report generation for job ${job.id}`,
);
try {
// Simulate a longer-running task for weekly reports
await new Promise((resolve) => setTimeout(resolve, 30000)); // Simulate 30-second task
logger.info(
`[WeeklyAnalyticsWorker] Successfully generated weekly report for week ${reportWeek}, ${reportYear}.`,
);
} catch (error: unknown) {
// Standardize error logging.
logger.error(
{
err: error instanceof Error ? error : new Error(String(error)),
jobData: job.data,
},
`[WeeklyAnalyticsWorker] Job ${job.id} failed. Attempt ${job.attemptsMade}/${job.opts.attempts}.`,
);
throw error; // Re-throw to let BullMQ handle the failure and retry.
}
},
{
connection,
concurrency: parseInt(process.env.WEEKLY_ANALYTICS_WORKER_CONCURRENCY || '1', 10),
},
);
/**
* A dedicated worker for cleaning up expired password reset tokens.
*/
export const tokenCleanupWorker = new Worker<TokenCleanupJobData>(
'token-cleanup',
async (job: Job<TokenCleanupJobData>) => {
const jobLogger = logger.child({ jobId: job.id, jobName: job.name });
jobLogger.info('[TokenCleanupWorker] Starting cleanup of expired password reset tokens.');
try {
const deletedCount = await db.userRepo.deleteExpiredResetTokens(jobLogger);
jobLogger.info(`[TokenCleanupWorker] Successfully deleted ${deletedCount} expired tokens.`);
return { deletedCount };
} catch (error: unknown) {
jobLogger.error({ err: error }, `[TokenCleanupWorker] Job ${job.id} failed.`);
throw error;
}
},
{
connection,
concurrency: 1, // This is a low-priority, non-intensive task.
},
);
// --- Attach Event Listeners to All Workers ---
attachWorkerEventListeners(flyerWorker);
attachWorkerEventListeners(emailWorker);
attachWorkerEventListeners(analyticsWorker);
attachWorkerEventListeners(cleanupWorker);
attachWorkerEventListeners(weeklyAnalyticsWorker);
attachWorkerEventListeners(tokenCleanupWorker);
logger.info('All workers started and listening for jobs.');
/**
* A function to gracefully shut down all queue workers and connections.
* This is essential for preventing jobs from getting stuck in an 'active' state
* when the application process is terminated.
* @param signal The signal that triggered the shutdown (e.g., 'SIGINT').
*/ */
export const gracefulShutdown = async (signal: string) => { export const gracefulShutdown = async (signal: string) => {
logger.info(`[Shutdown] Received ${signal}. Closing all workers and queues...`); logger.info(`[Shutdown] Received ${signal}. Closing all queues...`);
let exitCode = 0; // Default to success let exitCode = 0; // Default to success
const resources = [ const resources = [
{ name: 'flyerWorker', close: () => flyerWorker.close() },
{ name: 'emailWorker', close: () => emailWorker.close() },
{ name: 'analyticsWorker', close: () => analyticsWorker.close() },
{ name: 'cleanupWorker', close: () => cleanupWorker.close() },
{ name: 'weeklyAnalyticsWorker', close: () => weeklyAnalyticsWorker.close() },
{ name: 'tokenCleanupWorker', close: () => tokenCleanupWorker.close() },
{ name: 'flyerQueue', close: () => flyerQueue.close() }, { name: 'flyerQueue', close: () => flyerQueue.close() },
{ name: 'emailQueue', close: () => emailQueue.close() }, { name: 'emailQueue', close: () => emailQueue.close() },
{ name: 'analyticsQueue', close: () => analyticsQueue.close() }, { name: 'analyticsQueue', close: () => analyticsQueue.close() },
@@ -437,7 +49,7 @@ export const gracefulShutdown = async (signal: string) => {
}); });
if (exitCode === 0) { if (exitCode === 0) {
logger.info('[Shutdown] All workers, queues, and connections closed successfully.'); logger.info('[Shutdown] All queues and connections closed successfully.');
} else { } else {
logger.warn('[Shutdown] Graceful shutdown completed with errors.'); logger.warn('[Shutdown] Graceful shutdown completed with errors.');
} }

View File

@@ -175,7 +175,7 @@ describe('Queue Workers', () => {
const emailError = 'SMTP server is down'; // Reject with a string const emailError = 'SMTP server is down'; // Reject with a string
mocks.sendEmail.mockRejectedValue(emailError); mocks.sendEmail.mockRejectedValue(emailError);
await expect(emailProcessor(job)).rejects.toBe(emailError); await expect(emailProcessor(job)).rejects.toThrow(emailError);
// The worker should wrap the string in an Error object for logging // The worker should wrap the string in an Error object for logging
expect(mockLogger.error).toHaveBeenCalledWith( expect(mockLogger.error).toHaveBeenCalledWith(

View File

@@ -0,0 +1,96 @@
import { Queue } from 'bullmq';
import { connection } from './redis.server';
import type { FlyerJobData } from './flyerProcessingService.server';
// --- Job Data Interfaces ---
export interface EmailJobData {
to: string;
subject: string;
text: string;
html: string;
}
export interface AnalyticsJobData {
reportDate: string; // e.g., '2024-10-26'
}
export interface WeeklyAnalyticsJobData {
reportYear: number;
reportWeek: number; // ISO week number (1-53)
}
export interface CleanupJobData {
flyerId: number;
paths?: string[];
}
export interface TokenCleanupJobData {
timestamp: string;
}
// --- Queues ---
export const flyerQueue = new Queue<FlyerJobData>('flyer-processing', {
connection,
defaultJobOptions: {
attempts: 3,
backoff: {
type: 'exponential',
delay: 5000,
},
},
});
export const emailQueue = new Queue<EmailJobData>('email-sending', {
connection,
defaultJobOptions: {
attempts: 5,
backoff: {
type: 'exponential',
delay: 10000,
},
},
});
export const analyticsQueue = new Queue<AnalyticsJobData>('analytics-reporting', {
connection,
defaultJobOptions: {
attempts: 2,
backoff: {
type: 'exponential',
delay: 60000,
},
removeOnComplete: true,
removeOnFail: 50,
},
});
export const weeklyAnalyticsQueue = new Queue<WeeklyAnalyticsJobData>('weekly-analytics-reporting', {
connection,
defaultJobOptions: {
attempts: 2,
backoff: { type: 'exponential', delay: 3600000 },
removeOnComplete: true,
removeOnFail: 50,
},
});
export const cleanupQueue = new Queue<CleanupJobData>('file-cleanup', {
connection,
defaultJobOptions: {
attempts: 3,
backoff: { type: 'exponential', delay: 30000 },
removeOnComplete: true,
},
});
export const tokenCleanupQueue = new Queue<TokenCleanupJobData>('token-cleanup', {
connection,
defaultJobOptions: {
attempts: 2,
backoff: { type: 'exponential', delay: 3600000 },
removeOnComplete: true,
removeOnFail: 10,
},
});

View File

@@ -0,0 +1,16 @@
import IORedis from 'ioredis';
import { logger } from './logger.server';
export const connection = new IORedis(process.env.REDIS_URL!, {
maxRetriesPerRequest: null, // Important for BullMQ
password: process.env.REDIS_PASSWORD,
});
// --- Redis Connection Event Listeners ---
connection.on('connect', () => {
logger.info('[Redis] Connection established successfully.');
});
connection.on('error', (err) => {
logger.error({ err }, '[Redis] Connection error.');
});

30
src/services/worker.ts Normal file
View File

@@ -0,0 +1,30 @@
import { gracefulShutdown } from './workers.server';
import { logger } from './logger.server';
logger.info('[Worker] Initializing worker process...');
// The workers are instantiated as side effects of importing workers.server.ts.
// This pattern ensures they start immediately upon import.
// Handle graceful shutdown
const handleShutdown = (signal: string) => {
logger.info(`[Worker] Received ${signal}. Initiating graceful shutdown...`);
gracefulShutdown(signal).catch((error: unknown) => {
logger.error({ err: error }, '[Worker] Error during shutdown.');
process.exit(1);
});
};
process.on('SIGINT', () => handleShutdown('SIGINT'));
process.on('SIGTERM', () => handleShutdown('SIGTERM'));
// Catch unhandled errors to log them before crashing
process.on('uncaughtException', (err) => {
logger.error({ err }, '[Worker] Uncaught exception');
});
process.on('unhandledRejection', (reason, promise) => {
logger.error({ reason, promise }, '[Worker] Unhandled Rejection');
});
logger.info('[Worker] Worker process is running and listening for jobs.');

View File

@@ -0,0 +1,346 @@
// src/services/workers.server.test.ts
import { describe, it, expect, vi, beforeEach } from 'vitest';
import type { Job } from 'bullmq';
// --- Hoisted Mocks ---
const mocks = vi.hoisted(() => {
// This object will store the processor functions captured from the worker constructors.
const capturedProcessors: Record<string, (job: Job) => Promise<unknown>> = {};
return {
sendEmail: vi.fn(),
unlink: vi.fn(),
processFlyerJob: vi.fn(),
capturedProcessors,
deleteExpiredResetTokens: vi.fn(),
// Mock the Worker constructor to capture the processor function. It must be a
// `function` and not an arrow function so it can be called with `new`.
MockWorker: vi.fn(function (name: string, processor: (job: Job) => Promise<unknown>) {
if (processor) {
capturedProcessors[name] = processor;
}
// Return a mock worker instance, though it's not used in this test file.
return { on: vi.fn(), close: vi.fn() };
}),
};
});
// --- Mock Modules ---
vi.mock('./emailService.server', async (importOriginal) => {
const actual = await importOriginal<typeof import('./emailService.server')>();
return {
...actual,
// We only need to mock the specific function being called by the worker.
// The rest of the module can retain its original implementation if needed elsewhere.
sendEmail: mocks.sendEmail,
};
});
// The workers use an `fsAdapter`. We can mock the underlying `fsPromises`
// that the adapter is built from in queueService.server.ts.
vi.mock('node:fs/promises', () => ({
default: {
unlink: mocks.unlink,
// Add other fs functions if needed by other tests
readdir: vi.fn(),
},
}));
vi.mock('./logger.server', () => ({
logger: {
info: vi.fn(),
error: vi.fn(),
warn: vi.fn(),
debug: vi.fn(),
child: vi.fn().mockReturnThis(),
},
}));
vi.mock('./db/index.db', () => ({
userRepo: {
deleteExpiredResetTokens: mocks.deleteExpiredResetTokens,
},
}));
// Mock bullmq to capture the processor functions passed to the Worker constructor
import { logger as mockLogger } from './logger.server';
vi.mock('bullmq', () => ({
Worker: mocks.MockWorker,
// FIX: Use a standard function for the mock constructor to allow `new Queue(...)` to work.
Queue: vi.fn(function () {
return { add: vi.fn() };
}),
}));
// Mock flyerProcessingService.server as flyerWorker depends on it
vi.mock('./flyerProcessingService.server', () => ({
FlyerProcessingService: class {
processJob = mocks.processFlyerJob;
},
}));
// Mock flyerDataTransformer as it's a dependency of FlyerProcessingService
vi.mock('./flyerDataTransformer', () => ({
FlyerDataTransformer: class {
transform = vi.fn(); // Mock transform method
},
}));
// Helper to create a mock BullMQ Job object
const createMockJob = <T>(data: T): Job<T> => {
return {
id: 'job-1',
data,
updateProgress: vi.fn().mockResolvedValue(undefined),
log: vi.fn().mockResolvedValue(undefined),
opts: { attempts: 3 },
attemptsMade: 1,
trace: vi.fn().mockResolvedValue(undefined),
moveToCompleted: vi.fn().mockResolvedValue(undefined),
moveToFailed: vi.fn().mockResolvedValue(undefined),
} as unknown as Job<T>;
};
describe('Queue Workers', () => {
// These will hold the captured processor functions for each test.
let flyerProcessor: (job: Job) => Promise<unknown>;
let emailProcessor: (job: Job) => Promise<unknown>;
let analyticsProcessor: (job: Job) => Promise<unknown>;
let cleanupProcessor: (job: Job) => Promise<unknown>;
let weeklyAnalyticsProcessor: (job: Job) => Promise<unknown>;
let tokenCleanupProcessor: (job: Job) => Promise<unknown>;
beforeEach(async () => {
vi.clearAllMocks();
// Reset default mock implementations for hoisted mocks
mocks.sendEmail.mockResolvedValue(undefined);
mocks.unlink.mockResolvedValue(undefined);
mocks.processFlyerJob.mockResolvedValue({ flyerId: 123 }); // Default success for flyer processing
mocks.deleteExpiredResetTokens.mockResolvedValue(5);
// Reset modules to re-evaluate the workers.server.ts file with fresh mocks.
// This ensures that new worker instances are created and their processors are captured for each test.
vi.resetModules();
// Dynamically import the module under test AFTER mocks are reset.
// This will trigger the instantiation of the workers, and our mocked Worker constructor will capture the processors.
await import('./workers.server');
// Re-capture the processors for each test to ensure isolation.
flyerProcessor = mocks.capturedProcessors['flyer-processing'];
emailProcessor = mocks.capturedProcessors['email-sending'];
analyticsProcessor = mocks.capturedProcessors['analytics-reporting'];
cleanupProcessor = mocks.capturedProcessors['file-cleanup'];
weeklyAnalyticsProcessor = mocks.capturedProcessors['weekly-analytics-reporting'];
tokenCleanupProcessor = mocks.capturedProcessors['token-cleanup'];
});
describe('flyerWorker', () => {
it('should call flyerProcessingService.processJob with the job data', async () => {
const jobData = {
filePath: '/tmp/flyer.pdf',
originalFileName: 'flyer.pdf',
checksum: 'abc',
};
const job = createMockJob(jobData);
await flyerProcessor(job);
expect(mocks.processFlyerJob).toHaveBeenCalledTimes(1);
expect(mocks.processFlyerJob).toHaveBeenCalledWith(job);
});
it('should re-throw an error if flyerProcessingService.processJob fails', async () => {
const job = createMockJob({
filePath: '/tmp/fail.pdf',
originalFileName: 'fail.pdf',
checksum: 'def',
});
const processingError = new Error('Flyer processing failed');
mocks.processFlyerJob.mockRejectedValue(processingError);
await expect(flyerProcessor(job)).rejects.toThrow('Flyer processing failed');
});
});
describe('emailWorker', () => {
it('should call emailService.sendEmail with the job data', async () => {
const jobData = {
to: 'test@example.com',
subject: 'Test Email',
html: '<p>Hello</p>',
text: 'Hello',
};
const job = createMockJob(jobData);
await emailProcessor(job);
expect(mocks.sendEmail).toHaveBeenCalledTimes(1);
// The implementation passes the logger as the second argument
expect(mocks.sendEmail).toHaveBeenCalledWith(jobData, expect.anything());
});
it('should log and re-throw an error if sendEmail fails with a non-Error object', async () => {
const job = createMockJob({ to: 'fail@example.com', subject: 'fail', html: '', text: '' });
const emailError = 'SMTP server is down'; // Reject with a string
mocks.sendEmail.mockRejectedValue(emailError);
await expect(emailProcessor(job)).rejects.toThrow(emailError);
// The worker should wrap the string in an Error object for logging
expect(mockLogger.error).toHaveBeenCalledWith(
{ err: new Error(emailError), jobData: job.data },
`[EmailWorker] Job ${job.id} failed. Attempt ${job.attemptsMade}/${job.opts.attempts}.`,
);
});
it('should re-throw an error if sendEmail fails', async () => {
const job = createMockJob({ to: 'fail@example.com', subject: 'fail', html: '', text: '' });
const emailError = new Error('SMTP server is down');
mocks.sendEmail.mockRejectedValue(emailError);
await expect(emailProcessor(job)).rejects.toThrow('SMTP server is down');
expect(mockLogger.error).toHaveBeenCalledWith(
{ err: emailError, jobData: job.data },
`[EmailWorker] Job ${job.id} failed. Attempt ${job.attemptsMade}/${job.opts.attempts}.`,
);
});
});
describe('analyticsWorker', () => {
it('should complete successfully for a valid report date', async () => {
vi.useFakeTimers();
const job = createMockJob({ reportDate: '2024-01-01' });
const promise = analyticsProcessor(job);
// Advance timers to simulate the 10-second task completing
await vi.advanceTimersByTimeAsync(10000);
await promise; // Wait for the promise to resolve
// No error should be thrown
expect(true).toBe(true);
vi.useRealTimers();
});
it('should throw an error if reportDate is "FAIL"', async () => {
const job = createMockJob({ reportDate: 'FAIL' });
await expect(analyticsProcessor(job)).rejects.toThrow(
'This is a test failure for the analytics job.',
);
});
});
describe('cleanupWorker', () => {
it('should call unlink for each path provided in the job data', async () => {
const jobData = {
flyerId: 123,
paths: ['/tmp/file1.jpg', '/tmp/file2.pdf'],
};
const job = createMockJob(jobData);
mocks.unlink.mockResolvedValue(undefined);
await cleanupProcessor(job);
expect(mocks.unlink).toHaveBeenCalledTimes(2);
expect(mocks.unlink).toHaveBeenCalledWith('/tmp/file1.jpg');
expect(mocks.unlink).toHaveBeenCalledWith('/tmp/file2.pdf');
});
it('should not throw an error if a file is already deleted (ENOENT)', async () => {
const jobData = {
flyerId: 123,
paths: ['/tmp/existing.jpg', '/tmp/already-deleted.jpg'],
};
const job = createMockJob(jobData);
// Use the built-in NodeJS.ErrnoException type for mock system errors.
const enoentError: NodeJS.ErrnoException = new Error('File not found');
enoentError.code = 'ENOENT';
// First call succeeds, second call fails with ENOENT
mocks.unlink.mockResolvedValueOnce(undefined).mockRejectedValueOnce(enoentError);
// The processor should complete without throwing
await expect(cleanupProcessor(job)).resolves.toBeUndefined();
expect(mocks.unlink).toHaveBeenCalledTimes(2);
});
it('should re-throw an error for issues other than ENOENT (e.g., permissions)', async () => {
const jobData = {
flyerId: 123,
paths: ['/tmp/protected-file.jpg'],
};
const job = createMockJob(jobData);
// Use the built-in NodeJS.ErrnoException type for mock system errors.
const permissionError: NodeJS.ErrnoException = new Error('Permission denied');
permissionError.code = 'EACCES';
mocks.unlink.mockRejectedValue(permissionError);
await expect(cleanupProcessor(job)).rejects.toThrow('Permission denied');
// Verify the error was logged by the worker's catch block
expect(mockLogger.error).toHaveBeenCalledWith(
{ err: permissionError },
expect.stringContaining(
`[CleanupWorker] Job ${job.id} for flyer ${job.data.flyerId} failed.`,
),
);
});
});
describe('weeklyAnalyticsWorker', () => {
it('should complete successfully for a valid report date', async () => {
vi.useFakeTimers();
const job = createMockJob({ reportYear: 2024, reportWeek: 1 });
const promise = weeklyAnalyticsProcessor(job);
// Advance timers to simulate the 30-second task completing
await vi.advanceTimersByTimeAsync(30000);
await promise; // Wait for the promise to resolve
// No error should be thrown
expect(true).toBe(true);
vi.useRealTimers();
});
it('should re-throw an error if the job fails', async () => {
vi.useFakeTimers();
const job = createMockJob({ reportYear: 2024, reportWeek: 1 });
// Mock the internal logic to throw an error
const originalSetTimeout = setTimeout;
vi.spyOn(global, 'setTimeout').mockImplementation((callback, ms) => {
if (ms === 30000) {
// Target the simulated delay
throw new Error('Weekly analytics job failed');
}
return originalSetTimeout(callback, ms);
});
await expect(weeklyAnalyticsProcessor(job)).rejects.toThrow('Weekly analytics job failed');
vi.useRealTimers();
vi.restoreAllMocks(); // Restore setTimeout mock
});
});
describe('tokenCleanupWorker', () => {
it('should call userRepo.deleteExpiredResetTokens and return the count', async () => {
const job = createMockJob({ timestamp: new Date().toISOString() });
mocks.deleteExpiredResetTokens.mockResolvedValue(10);
const result = await tokenCleanupProcessor(job);
expect(mocks.deleteExpiredResetTokens).toHaveBeenCalledTimes(1);
expect(result).toEqual({ deletedCount: 10 });
});
it('should re-throw an error if the database call fails', async () => {
const job = createMockJob({ timestamp: new Date().toISOString() });
const dbError = new Error('DB cleanup failed');
mocks.deleteExpiredResetTokens.mockRejectedValue(dbError);
await expect(tokenCleanupProcessor(job)).rejects.toThrow(dbError);
});
});
});

View File

@@ -0,0 +1,344 @@
import { Worker, Job, UnrecoverableError } from 'bullmq';
import fsPromises from 'node:fs/promises';
import { exec } from 'child_process';
import { promisify } from 'util';
import { logger } from './logger.server';
import { connection } from './redis.server';
import { aiService } from './aiService.server';
import * as emailService from './emailService.server';
import * as db from './db/index.db';
import {
FlyerProcessingService,
type FlyerJobData,
type IFileSystem,
} from './flyerProcessingService.server';
import { FlyerDataTransformer } from './flyerDataTransformer';
import {
flyerQueue,
emailQueue,
analyticsQueue,
weeklyAnalyticsQueue,
cleanupQueue,
tokenCleanupQueue,
type EmailJobData,
type AnalyticsJobData,
type CleanupJobData,
type WeeklyAnalyticsJobData,
type TokenCleanupJobData,
} from './queues.server';
const execAsync = promisify(exec);
// --- Worker Instantiation ---
const fsAdapter: IFileSystem = {
readdir: (path: string, options: { withFileTypes: true }) => fsPromises.readdir(path, options),
unlink: (path: string) => fsPromises.unlink(path),
};
const flyerProcessingService = new FlyerProcessingService(
aiService,
db,
fsAdapter,
execAsync,
cleanupQueue,
new FlyerDataTransformer(),
);
const normalizeError = (error: unknown): Error => {
return error instanceof Error ? error : new Error(String(error));
};
const attachWorkerEventListeners = (worker: Worker) => {
worker.on('completed', (job: Job, returnValue: unknown) => {
logger.info({ returnValue }, `[${worker.name}] Job ${job.id} completed successfully.`);
});
worker.on('failed', (job: Job | undefined, error: Error) => {
logger.error(
{ err: error, jobData: job?.data },
`[${worker.name}] Job ${job?.id} has ultimately failed after all attempts.`,
);
});
};
export const flyerWorker = new Worker<FlyerJobData>(
'flyer-processing',
async (job) => {
try {
return await flyerProcessingService.processJob(job);
} catch (error: unknown) {
const wrappedError = normalizeError(error);
const errorMessage = wrappedError.message || '';
if (
errorMessage.includes('quota') ||
errorMessage.includes('429') ||
errorMessage.includes('RESOURCE_EXHAUSTED')
) {
logger.error(
{ err: wrappedError, jobId: job.id },
'[FlyerWorker] Unrecoverable quota error detected. Failing job immediately.',
);
throw new UnrecoverableError(errorMessage);
}
throw error;
}
},
{
connection,
concurrency: parseInt(process.env.WORKER_CONCURRENCY || '1', 10),
},
);
export const emailWorker = new Worker<EmailJobData>(
'email-sending',
async (job: Job<EmailJobData>) => {
const { to, subject } = job.data;
const jobLogger = logger.child({ jobId: job.id, jobName: job.name });
jobLogger.info({ to, subject }, `[EmailWorker] Sending email for job ${job.id}`);
try {
await emailService.sendEmail(job.data, jobLogger);
} catch (error: unknown) {
const wrappedError = normalizeError(error);
logger.error(
{
err: wrappedError,
jobData: job.data,
},
`[EmailWorker] Job ${job.id} failed. Attempt ${job.attemptsMade}/${job.opts.attempts}.`,
);
throw wrappedError;
}
},
{
connection,
concurrency: parseInt(process.env.EMAIL_WORKER_CONCURRENCY || '10', 10),
},
);
export const analyticsWorker = new Worker<AnalyticsJobData>(
'analytics-reporting',
async (job: Job<AnalyticsJobData>) => {
const { reportDate } = job.data;
logger.info({ reportDate }, `[AnalyticsWorker] Starting report generation for job ${job.id}`);
try {
if (reportDate === 'FAIL') {
throw new Error('This is a test failure for the analytics job.');
}
await new Promise((resolve) => setTimeout(resolve, 10000));
logger.info(`[AnalyticsWorker] Successfully generated report for ${reportDate}.`);
} catch (error: unknown) {
const wrappedError = normalizeError(error);
logger.error({ err: wrappedError, jobData: job.data },
`[AnalyticsWorker] Job ${job.id} failed. Attempt ${job.attemptsMade}/${job.opts.attempts}.`,
);
throw wrappedError;
}
},
{
connection,
concurrency: parseInt(process.env.ANALYTICS_WORKER_CONCURRENCY || '1', 10),
},
);
export const cleanupWorker = new Worker<CleanupJobData>(
'file-cleanup',
async (job: Job<CleanupJobData>) => {
const { flyerId, paths } = job.data;
logger.info(
{ paths },
`[CleanupWorker] Starting file cleanup for job ${job.id} (Flyer ID: ${flyerId})`,
);
try {
if (!paths || paths.length === 0) {
logger.warn(
`[CleanupWorker] Job ${job.id} for flyer ${flyerId} received no paths to clean. Skipping.`,
);
return;
}
for (const filePath of paths) {
try {
await fsAdapter.unlink(filePath);
logger.info(`[CleanupWorker] Deleted temporary file: ${filePath}`);
} catch (unlinkError: unknown) {
if (
unlinkError instanceof Error &&
'code' in unlinkError &&
(unlinkError as any).code === 'ENOENT'
) {
logger.warn(
`[CleanupWorker] File not found during cleanup (already deleted?): ${filePath}`,
);
} else {
throw unlinkError;
}
}
}
logger.info(
`[CleanupWorker] Successfully cleaned up ${paths.length} file(s) for flyer ${flyerId}.`,
);
} catch (error: unknown) {
const wrappedError = normalizeError(error);
logger.error(
{ err: wrappedError },
`[CleanupWorker] Job ${job.id} for flyer ${flyerId} failed. Attempt ${job.attemptsMade}/${job.opts.attempts}.`,
);
throw wrappedError;
}
},
{
connection,
concurrency: parseInt(process.env.CLEANUP_WORKER_CONCURRENCY || '10', 10),
},
);
export const weeklyAnalyticsWorker = new Worker<WeeklyAnalyticsJobData>(
'weekly-analytics-reporting',
async (job: Job<WeeklyAnalyticsJobData>) => {
const { reportYear, reportWeek } = job.data;
logger.info(
{ reportYear, reportWeek },
`[WeeklyAnalyticsWorker] Starting weekly report generation for job ${job.id}`,
);
try {
await new Promise((resolve) => setTimeout(resolve, 30000));
logger.info(
`[WeeklyAnalyticsWorker] Successfully generated weekly report for week ${reportWeek}, ${reportYear}.`,
);
} catch (error: unknown) {
const wrappedError = normalizeError(error);
logger.error(
{ err: wrappedError, jobData: job.data },
`[WeeklyAnalyticsWorker] Job ${job.id} failed. Attempt ${job.attemptsMade}/${job.opts.attempts}.`,
);
throw wrappedError;
}
},
{
connection,
concurrency: parseInt(process.env.WEEKLY_ANALYTICS_WORKER_CONCURRENCY || '1', 10),
},
);
export const tokenCleanupWorker = new Worker<TokenCleanupJobData>(
'token-cleanup',
async (job: Job<TokenCleanupJobData>) => {
const jobLogger = logger.child({ jobId: job.id, jobName: job.name });
jobLogger.info('[TokenCleanupWorker] Starting cleanup of expired password reset tokens.');
try {
const deletedCount = await db.userRepo.deleteExpiredResetTokens(jobLogger);
jobLogger.info(`[TokenCleanupWorker] Successfully deleted ${deletedCount} expired tokens.`);
return { deletedCount };
} catch (error: unknown) {
const wrappedError = normalizeError(error);
jobLogger.error({ err: wrappedError }, `[TokenCleanupWorker] Job ${job.id} failed.`);
throw wrappedError;
}
},
{
connection,
concurrency: 1,
},
);
attachWorkerEventListeners(flyerWorker);
attachWorkerEventListeners(emailWorker);
attachWorkerEventListeners(analyticsWorker);
attachWorkerEventListeners(cleanupWorker);
attachWorkerEventListeners(weeklyAnalyticsWorker);
attachWorkerEventListeners(tokenCleanupWorker);
logger.info('All workers started and listening for jobs.');
const SHUTDOWN_TIMEOUT = 30000; // 30 seconds
export const gracefulShutdown = async (signal: string) => {
logger.info(
`[Shutdown] Received ${signal}. Initiating graceful shutdown (timeout: ${SHUTDOWN_TIMEOUT / 1000}s)...`,
);
const shutdownPromise = (async () => {
let hasErrors = false;
// Helper function to close a group of resources and log results
const closeResources = async (resources: { name: string; close: () => Promise<any> }[], type: string) => {
logger.info(`[Shutdown] Closing all ${type}...`);
const results = await Promise.allSettled(resources.map((r) => r.close()));
let groupHasErrors = false;
results.forEach((result, index) => {
if (result.status === 'rejected') {
groupHasErrors = true;
logger.error(
{ err: result.reason, resource: resources[index].name },
`[Shutdown] Error closing ${resources[index].name}.`,
);
}
});
if (!groupHasErrors) logger.info(`[Shutdown] All ${type} closed successfully.`);
return groupHasErrors;
};
// Define resource groups for sequential shutdown
const workerResources = [
{ name: 'flyerWorker', close: () => flyerWorker.close() },
{ name: 'emailWorker', close: () => emailWorker.close() },
{ name: 'analyticsWorker', close: () => analyticsWorker.close() },
{ name: 'cleanupWorker', close: () => cleanupWorker.close() },
{ name: 'weeklyAnalyticsWorker', close: () => weeklyAnalyticsWorker.close() },
{ name: 'tokenCleanupWorker', close: () => tokenCleanupWorker.close() },
];
const queueResources = [
{ name: 'flyerQueue', close: () => flyerQueue.close() },
{ name: 'emailQueue', close: () => emailQueue.close() },
{ name: 'analyticsQueue', close: () => analyticsQueue.close() },
{ name: 'cleanupQueue', close: () => cleanupQueue.close() },
{ name: 'weeklyAnalyticsQueue', close: () => weeklyAnalyticsQueue.close() },
{ name: 'tokenCleanupQueue', close: () => tokenCleanupQueue.close() },
];
// 1. Close workers first
if (await closeResources(workerResources, 'workers')) hasErrors = true;
// 2. Then close queues
if (await closeResources(queueResources, 'queues')) hasErrors = true;
// 3. Finally, close the Redis connection
logger.info('[Shutdown] Closing Redis connection...');
try {
await connection.quit();
logger.info('[Shutdown] Redis connection closed successfully.');
} catch (err) {
hasErrors = true;
logger.error({ err, resource: 'redisConnection' }, `[Shutdown] Error closing Redis connection.`);
}
return hasErrors;
})();
const timeoutPromise = new Promise<string>((resolve) =>
setTimeout(() => resolve('timeout'), SHUTDOWN_TIMEOUT),
);
const result = await Promise.race([shutdownPromise, timeoutPromise]);
if (result === 'timeout') {
logger.error(
`[Shutdown] Graceful shutdown timed out after ${SHUTDOWN_TIMEOUT / 1000} seconds. Forcing exit.`,
);
process.exit(1);
} else {
const hasErrors = result as boolean;
if (!hasErrors) {
logger.info('[Shutdown] All resources closed successfully.');
} else {
logger.warn('[Shutdown] Graceful shutdown completed with errors.');
}
process.exit(hasErrors ? 1 : 0);
}
};

View File

@@ -0,0 +1,96 @@
// src/tests/e2e/admin-dashboard.e2e.test.ts
import { describe, it, expect, afterAll } from 'vitest';
import supertest from 'supertest';
import app from '../../../server';
import { getPool } from '../../services/db/connection.db';
/**
* @vitest-environment node
*/
const request = supertest(app);
describe('E2E Admin Dashboard Flow', () => {
// Use a unique email for every run to avoid collisions
const uniqueId = Date.now();
const adminEmail = `e2e-admin-${uniqueId}@example.com`;
const adminPassword = 'StrongPassword123!';
let authToken: string;
let adminUserId: string | null = null;
afterAll(async () => {
// Safety cleanup: Ensure the user is deleted from the DB if the test fails mid-way.
if (adminUserId) {
try {
await getPool().query('DELETE FROM public.users WHERE user_id = $1', [adminUserId]);
} catch (err) {
console.error('Error cleaning up E2E admin user:', err);
}
}
});
it('should allow an admin to log in and access dashboard features', async () => {
// 1. Register a new user (initially a regular user)
const registerResponse = await request.post('/api/auth/register').send({
email: adminEmail,
password: adminPassword,
full_name: 'E2E Admin User',
});
expect(registerResponse.status).toBe(201);
const registeredUser = registerResponse.body.userprofile.user;
adminUserId = registeredUser.user_id;
expect(adminUserId).toBeDefined();
// 2. Promote the user to 'admin' via direct DB access
// (This simulates an existing admin or a manual promotion, as there is no public "register as admin" endpoint)
await getPool().query(`UPDATE public.profiles SET role = 'admin' WHERE user_id = $1`, [
adminUserId,
]);
// 3. Login to get the access token (now with admin privileges)
const loginResponse = await request.post('/api/auth/login').send({
email: adminEmail,
password: adminPassword,
});
expect(loginResponse.status).toBe(200);
authToken = loginResponse.body.token;
expect(authToken).toBeDefined();
// Verify the role returned in the login response is now 'admin'
expect(loginResponse.body.userprofile.role).toBe('admin');
// 4. Fetch System Stats (Protected Admin Route)
const statsResponse = await request
.get('/api/admin/stats')
.set('Authorization', `Bearer ${authToken}`);
expect(statsResponse.status).toBe(200);
expect(statsResponse.body).toHaveProperty('userCount');
expect(statsResponse.body).toHaveProperty('flyerCount');
// 5. Fetch User List (Protected Admin Route)
const usersResponse = await request
.get('/api/admin/users')
.set('Authorization', `Bearer ${authToken}`);
expect(usersResponse.status).toBe(200);
expect(Array.isArray(usersResponse.body)).toBe(true);
// The list should contain the admin user we just created
const self = usersResponse.body.find((u: any) => u.user_id === adminUserId);
expect(self).toBeDefined();
// 6. Check Queue Status (Protected Admin Route)
const queueResponse = await request
.get('/api/admin/queues/status')
.set('Authorization', `Bearer ${authToken}`);
expect(queueResponse.status).toBe(200);
expect(Array.isArray(queueResponse.body)).toBe(true);
// Verify that the 'flyer-processing' queue is present in the status report
const flyerQueue = queueResponse.body.find((q: any) => q.name === 'flyer-processing');
expect(flyerQueue).toBeDefined();
expect(flyerQueue.counts).toBeDefined();
});
});

View File

@@ -0,0 +1,110 @@
// src/tests/e2e/flyer-upload.e2e.test.ts
import { describe, it, expect, afterAll } from 'vitest';
import supertest from 'supertest';
import app from '../../../server';
import { getPool } from '../../services/db/connection.db';
import crypto from 'crypto';
import path from 'path';
import fs from 'fs';
/**
* @vitest-environment node
*/
const request = supertest(app);
describe('E2E Flyer Upload and Processing Workflow', () => {
const uniqueId = Date.now();
const userEmail = `e2e-uploader-${uniqueId}@example.com`;
const userPassword = 'StrongPassword123!';
let authToken: string;
let userId: string | null = null;
let flyerId: number | null = null;
afterAll(async () => {
// Cleanup: Delete the flyer and user created during the test
const pool = getPool();
if (flyerId) {
await pool.query('DELETE FROM public.flyers WHERE flyer_id = $1', [flyerId]);
}
if (userId) {
await pool.query('DELETE FROM public.users WHERE user_id = $1', [userId]);
}
});
it('should allow a user to upload a flyer and wait for processing to complete', async () => {
// 1. Register a new user
const registerResponse = await request.post('/api/auth/register').send({
email: userEmail,
password: userPassword,
full_name: 'E2E Flyer Uploader',
});
expect(registerResponse.status).toBe(201);
// 2. Login to get the access token
const loginResponse = await request.post('/api/auth/login').send({
email: userEmail,
password: userPassword,
});
expect(loginResponse.status).toBe(200);
authToken = loginResponse.body.token;
userId = loginResponse.body.userprofile.user.user_id;
expect(authToken).toBeDefined();
// 3. Prepare the flyer file
// We try to use the existing test asset if available, otherwise create a dummy buffer.
// Note: In a real E2E scenario against a live AI service, a valid image is required.
// If the AI service is mocked or stubbed in this environment, a dummy buffer might suffice.
let fileBuffer: Buffer;
let fileName = `e2e-test-flyer-${uniqueId}.jpg`;
const assetPath = path.resolve(__dirname, '../assets/test-flyer-image.jpg');
if (fs.existsSync(assetPath)) {
const rawBuffer = fs.readFileSync(assetPath);
// Append unique ID to ensure unique checksum for every test run
fileBuffer = Buffer.concat([rawBuffer, Buffer.from(uniqueId.toString())]);
} else {
// Fallback to a minimal valid JPEG header + random data if asset is missing
// (This might fail if the backend does strict image validation/processing)
fileBuffer = Buffer.concat([
Buffer.from([0xff, 0xd8, 0xff, 0xe0]), // JPEG Start of Image
Buffer.from(uniqueId.toString())
]);
}
// Calculate checksum (required by the API)
const checksum = crypto.createHash('sha256').update(fileBuffer).digest('hex');
// 4. Upload the flyer
const uploadResponse = await request
.post('/api/ai/upload-and-process')
.set('Authorization', `Bearer ${authToken}`)
.field('checksum', checksum)
.attach('flyerFile', fileBuffer, fileName);
expect(uploadResponse.status).toBe(202);
const jobId = uploadResponse.body.jobId;
expect(jobId).toBeDefined();
// 5. Poll for job completion
let jobStatus;
const maxRetries = 30; // Poll for up to 90 seconds
for (let i = 0; i < maxRetries; i++) {
await new Promise((resolve) => setTimeout(resolve, 3000)); // Wait 3s
const statusResponse = await request
.get(`/api/ai/jobs/${jobId}/status`)
.set('Authorization', `Bearer ${authToken}`);
jobStatus = statusResponse.body;
if (jobStatus.state === 'completed' || jobStatus.state === 'failed') {
break;
}
}
expect(jobStatus.state).toBe('completed');
flyerId = jobStatus.returnValue?.flyerId;
expect(flyerId).toBeTypeOf('number');
}, 120000); // Extended timeout for AI processing
});

View File

@@ -0,0 +1,111 @@
// src/tests/e2e/user-journey.e2e.test.ts
import { describe, it, expect, afterAll } from 'vitest';
import supertest from 'supertest';
import app from '../../../server';
import { getPool } from '../../services/db/connection.db';
/**
* @vitest-environment node
*/
const request = supertest(app);
describe('E2E User Journey', () => {
// Use a unique email for every run to avoid collisions
const uniqueId = Date.now();
const userEmail = `e2e-test-${uniqueId}@example.com`;
const userPassword = 'StrongPassword123!';
let authToken: string;
let userId: string | null = null;
let shoppingListId: number;
afterAll(async () => {
// Safety cleanup: Ensure the user is deleted from the DB if the test fails mid-way.
// If the test succeeds, the user deletes their own account, so this acts as a fallback.
if (userId) {
try {
await getPool().query('DELETE FROM public.users WHERE user_id = $1', [userId]);
} catch (err) {
console.error('Error cleaning up E2E test user:', err);
}
}
});
it('should complete a full user lifecycle: Register -> Login -> Manage List -> Delete Account', async () => {
// 1. Register a new user
const registerResponse = await request.post('/api/auth/register').send({
email: userEmail,
password: userPassword,
full_name: 'E2E Traveler',
});
expect(registerResponse.status).toBe(201);
expect(registerResponse.body.message).toBe('User registered successfully!');
// 2. Login to get the access token
const loginResponse = await request.post('/api/auth/login').send({
email: userEmail,
password: userPassword,
});
expect(loginResponse.status).toBe(200);
authToken = loginResponse.body.token;
userId = loginResponse.body.userprofile.user.user_id;
expect(authToken).toBeDefined();
expect(userId).toBeDefined();
// 3. Create a Shopping List
const createListResponse = await request
.post('/api/users/shopping-lists')
.set('Authorization', `Bearer ${authToken}`)
.send({ name: 'E2E Party List' });
expect(createListResponse.status).toBe(201);
shoppingListId = createListResponse.body.shopping_list_id;
expect(shoppingListId).toBeDefined();
// 4. Add an item to the list
const addItemResponse = await request
.post(`/api/users/shopping-lists/${shoppingListId}/items`)
.set('Authorization', `Bearer ${authToken}`)
.send({ customItemName: 'Chips' });
expect(addItemResponse.status).toBe(201);
expect(addItemResponse.body.custom_item_name).toBe('Chips');
// 5. Verify the list and item exist via GET
const getListsResponse = await request
.get('/api/users/shopping-lists')
.set('Authorization', `Bearer ${authToken}`);
expect(getListsResponse.status).toBe(200);
const myLists = getListsResponse.body;
const targetList = myLists.find((l: any) => l.shopping_list_id === shoppingListId);
expect(targetList).toBeDefined();
expect(targetList.items).toHaveLength(1);
expect(targetList.items[0].custom_item_name).toBe('Chips');
// 6. Delete the User Account (Self-Service)
const deleteAccountResponse = await request
.delete('/api/users/account')
.set('Authorization', `Bearer ${authToken}`)
.send({ password: userPassword });
expect(deleteAccountResponse.status).toBe(200);
expect(deleteAccountResponse.body.message).toBe('Account deleted successfully.');
// 7. Verify Login is no longer possible
const failLoginResponse = await request.post('/api/auth/login').send({
email: userEmail,
password: userPassword,
});
expect(failLoginResponse.status).toBe(401);
// Mark userId as null so afterAll doesn't attempt to delete it again
userId = null;
});
});

View File

@@ -1,10 +1,16 @@
// src/tests/integration/admin.integration.test.ts // src/tests/integration/admin.integration.test.ts
import { describe, it, expect, beforeAll, beforeEach, afterAll } from 'vitest'; import { describe, it, expect, beforeAll, beforeEach, afterAll } from 'vitest';
import * as apiClient from '../../services/apiClient'; import supertest from 'supertest';
import app from '../../../server';
import { getPool } from '../../services/db/connection.db'; import { getPool } from '../../services/db/connection.db';
import type { UserProfile } from '../../types'; import type { UserProfile } from '../../types';
import { createAndLoginUser } from '../utils/testHelpers'; import { createAndLoginUser } from '../utils/testHelpers';
/**
* @vitest-environment node
*/
const request = supertest(app);
describe('Admin API Routes Integration Tests', () => { describe('Admin API Routes Integration Tests', () => {
let adminToken: string; let adminToken: string;
let adminUser: UserProfile; let adminUser: UserProfile;
@@ -42,8 +48,10 @@ describe('Admin API Routes Integration Tests', () => {
describe('GET /api/admin/stats', () => { describe('GET /api/admin/stats', () => {
it('should allow an admin to fetch application stats', async () => { it('should allow an admin to fetch application stats', async () => {
const response = await apiClient.getApplicationStats(adminToken); const response = await request
const stats = await response.json(); .get('/api/admin/stats')
.set('Authorization', `Bearer ${adminToken}`);
const stats = response.body;
expect(stats).toBeDefined(); expect(stats).toBeDefined();
expect(stats).toHaveProperty('flyerCount'); expect(stats).toHaveProperty('flyerCount');
expect(stats).toHaveProperty('userCount'); expect(stats).toHaveProperty('userCount');
@@ -51,18 +59,21 @@ describe('Admin API Routes Integration Tests', () => {
}); });
it('should forbid a regular user from fetching application stats', async () => { it('should forbid a regular user from fetching application stats', async () => {
const response = await apiClient.getApplicationStats(regularUserToken); const response = await request
expect(response.ok).toBe(false); .get('/api/admin/stats')
.set('Authorization', `Bearer ${regularUserToken}`);
expect(response.status).toBe(403); expect(response.status).toBe(403);
const errorData = await response.json(); const errorData = response.body;
expect(errorData.message).toBe('Forbidden: Administrator access required.'); expect(errorData.message).toBe('Forbidden: Administrator access required.');
}); });
}); });
describe('GET /api/admin/stats/daily', () => { describe('GET /api/admin/stats/daily', () => {
it('should allow an admin to fetch daily stats', async () => { it('should allow an admin to fetch daily stats', async () => {
const response = await apiClient.getDailyStats(adminToken); const response = await request
const dailyStats = await response.json(); .get('/api/admin/stats/daily')
.set('Authorization', `Bearer ${adminToken}`);
const dailyStats = response.body;
expect(dailyStats).toBeDefined(); expect(dailyStats).toBeDefined();
expect(Array.isArray(dailyStats)).toBe(true); expect(Array.isArray(dailyStats)).toBe(true);
// We just created users in beforeAll, so we should have data // We just created users in beforeAll, so we should have data
@@ -73,10 +84,11 @@ describe('Admin API Routes Integration Tests', () => {
}); });
it('should forbid a regular user from fetching daily stats', async () => { it('should forbid a regular user from fetching daily stats', async () => {
const response = await apiClient.getDailyStats(regularUserToken); const response = await request
expect(response.ok).toBe(false); .get('/api/admin/stats/daily')
.set('Authorization', `Bearer ${regularUserToken}`);
expect(response.status).toBe(403); expect(response.status).toBe(403);
const errorData = await response.json(); const errorData = response.body;
expect(errorData.message).toBe('Forbidden: Administrator access required.'); expect(errorData.message).toBe('Forbidden: Administrator access required.');
}); });
}); });
@@ -85,25 +97,30 @@ describe('Admin API Routes Integration Tests', () => {
it('should allow an admin to fetch suggested corrections', async () => { it('should allow an admin to fetch suggested corrections', async () => {
// This test just verifies access and correct response shape. // This test just verifies access and correct response shape.
// More detailed tests would require seeding corrections. // More detailed tests would require seeding corrections.
const response = await apiClient.getSuggestedCorrections(adminToken); const response = await request
const corrections = await response.json(); .get('/api/admin/corrections')
.set('Authorization', `Bearer ${adminToken}`);
const corrections = response.body;
expect(corrections).toBeDefined(); expect(corrections).toBeDefined();
expect(Array.isArray(corrections)).toBe(true); expect(Array.isArray(corrections)).toBe(true);
}); });
it('should forbid a regular user from fetching suggested corrections', async () => { it('should forbid a regular user from fetching suggested corrections', async () => {
const response = await apiClient.getSuggestedCorrections(regularUserToken); const response = await request
expect(response.ok).toBe(false); .get('/api/admin/corrections')
.set('Authorization', `Bearer ${regularUserToken}`);
expect(response.status).toBe(403); expect(response.status).toBe(403);
const errorData = await response.json(); const errorData = response.body;
expect(errorData.message).toBe('Forbidden: Administrator access required.'); expect(errorData.message).toBe('Forbidden: Administrator access required.');
}); });
}); });
describe('GET /api/admin/brands', () => { describe('GET /api/admin/brands', () => {
it('should allow an admin to fetch all brands', async () => { it('should allow an admin to fetch all brands', async () => {
const response = await apiClient.fetchAllBrands(adminToken); const response = await request
const brands = await response.json(); .get('/api/admin/brands')
.set('Authorization', `Bearer ${adminToken}`);
const brands = response.body;
expect(brands).toBeDefined(); expect(brands).toBeDefined();
expect(Array.isArray(brands)).toBe(true); expect(Array.isArray(brands)).toBe(true);
// Even if no brands exist, it should return an array. // Even if no brands exist, it should return an array.
@@ -112,10 +129,11 @@ describe('Admin API Routes Integration Tests', () => {
}); });
it('should forbid a regular user from fetching all brands', async () => { it('should forbid a regular user from fetching all brands', async () => {
const response = await apiClient.fetchAllBrands(regularUserToken); const response = await request
expect(response.ok).toBe(false); .get('/api/admin/brands')
.set('Authorization', `Bearer ${regularUserToken}`);
expect(response.status).toBe(403); expect(response.status).toBe(403);
const errorData = await response.json(); const errorData = response.body;
expect(errorData.message).toBe('Forbidden: Administrator access required.'); expect(errorData.message).toBe('Forbidden: Administrator access required.');
}); });
}); });
@@ -170,8 +188,10 @@ describe('Admin API Routes Integration Tests', () => {
it('should allow an admin to approve a correction', async () => { it('should allow an admin to approve a correction', async () => {
// Act: Approve the correction. // Act: Approve the correction.
const response = await apiClient.approveCorrection(testCorrectionId, adminToken); const response = await request
expect(response.ok).toBe(true); .post(`/api/admin/corrections/${testCorrectionId}/approve`)
.set('Authorization', `Bearer ${adminToken}`);
expect(response.status).toBe(200);
// Assert: Verify the flyer item's price was updated and the correction status changed. // Assert: Verify the flyer item's price was updated and the correction status changed.
const { rows: itemRows } = await getPool().query( const { rows: itemRows } = await getPool().query(
@@ -189,8 +209,10 @@ describe('Admin API Routes Integration Tests', () => {
it('should allow an admin to reject a correction', async () => { it('should allow an admin to reject a correction', async () => {
// Act: Reject the correction. // Act: Reject the correction.
const response = await apiClient.rejectCorrection(testCorrectionId, adminToken); const response = await request
expect(response.ok).toBe(true); .post(`/api/admin/corrections/${testCorrectionId}/reject`)
.set('Authorization', `Bearer ${adminToken}`);
expect(response.status).toBe(200);
// Assert: Verify the correction status changed. // Assert: Verify the correction status changed.
const { rows: correctionRows } = await getPool().query( const { rows: correctionRows } = await getPool().query(
@@ -202,12 +224,11 @@ describe('Admin API Routes Integration Tests', () => {
it('should allow an admin to update a correction', async () => { it('should allow an admin to update a correction', async () => {
// Act: Update the suggested value of the correction. // Act: Update the suggested value of the correction.
const response = await apiClient.updateSuggestedCorrection( const response = await request
testCorrectionId, .put(`/api/admin/corrections/${testCorrectionId}`)
'300', .set('Authorization', `Bearer ${adminToken}`)
adminToken, .send({ suggested_value: '300' });
); const updatedCorrection = response.body;
const updatedCorrection = await response.json();
// Assert: Verify the API response and the database state. // Assert: Verify the API response and the database state.
expect(updatedCorrection.suggested_value).toBe('300'); expect(updatedCorrection.suggested_value).toBe('300');
@@ -227,8 +248,11 @@ describe('Admin API Routes Integration Tests', () => {
const recipeId = recipeRes.rows[0].recipe_id; const recipeId = recipeRes.rows[0].recipe_id;
// Act: Update the status to 'public'. // Act: Update the status to 'public'.
const response = await apiClient.updateRecipeStatus(recipeId, 'public', adminToken); const response = await request
expect(response.ok).toBe(true); .put(`/api/admin/recipes/${recipeId}/status`)
.set('Authorization', `Bearer ${adminToken}`)
.send({ status: 'public' });
expect(response.status).toBe(200);
// Assert: Verify the status was updated in the database. // Assert: Verify the status was updated in the database.
const { rows: updatedRecipeRows } = await getPool().query( const { rows: updatedRecipeRows } = await getPool().query(

View File

@@ -1,6 +1,7 @@
// src/tests/integration/ai.integration.test.ts // src/tests/integration/ai.integration.test.ts
import { describe, it, expect, beforeAll, afterAll } from 'vitest'; import { describe, it, expect, beforeAll, afterAll } from 'vitest';
import * as aiApiClient from '../../services/aiApiClient'; import supertest from 'supertest';
import app from '../../../server';
import fs from 'node:fs/promises'; import fs from 'node:fs/promises';
import path from 'path'; import path from 'path';
import { createAndLoginUser } from '../utils/testHelpers'; import { createAndLoginUser } from '../utils/testHelpers';
@@ -9,6 +10,8 @@ import { createAndLoginUser } from '../utils/testHelpers';
* @vitest-environment node * @vitest-environment node
*/ */
const request = supertest(app);
interface TestGeolocationCoordinates { interface TestGeolocationCoordinates {
latitude: number; latitude: number;
longitude: number; longitude: number;
@@ -44,46 +47,63 @@ describe('AI API Routes Integration Tests', () => {
}); });
it('POST /api/ai/check-flyer should return a boolean', async () => { it('POST /api/ai/check-flyer should return a boolean', async () => {
const mockImageFile = new File(['content'], 'test.jpg', { type: 'image/jpeg' }); const response = await request
const response = await aiApiClient.isImageAFlyer(mockImageFile, authToken); .post('/api/ai/check-flyer')
const result = await response.json(); .set('Authorization', `Bearer ${authToken}`)
.attach('image', Buffer.from('content'), 'test.jpg');
const result = response.body;
expect(response.status).toBe(200);
// The backend is stubbed to always return true for this check // The backend is stubbed to always return true for this check
expect(result.is_flyer).toBe(true); expect(result.is_flyer).toBe(true);
}); });
it('POST /api/ai/extract-address should return a stubbed address', async () => { it('POST /api/ai/extract-address should return a stubbed address', async () => {
const mockImageFile = new File(['content'], 'test.jpg', { type: 'image/jpeg' }); const response = await request
const response = await aiApiClient.extractAddressFromImage(mockImageFile, authToken); .post('/api/ai/extract-address')
const result = await response.json(); .set('Authorization', `Bearer ${authToken}`)
.attach('image', Buffer.from('content'), 'test.jpg');
const result = response.body;
expect(response.status).toBe(200);
expect(result.address).toBe('not identified'); expect(result.address).toBe('not identified');
}); });
it('POST /api/ai/extract-logo should return a stubbed response', async () => { it('POST /api/ai/extract-logo should return a stubbed response', async () => {
const mockImageFile = new File(['content'], 'test.jpg', { type: 'image/jpeg' }); const response = await request
const response = await aiApiClient.extractLogoFromImage([mockImageFile], authToken); .post('/api/ai/extract-logo')
const result = await response.json(); .set('Authorization', `Bearer ${authToken}`)
.attach('images', Buffer.from('content'), 'test.jpg');
const result = response.body;
expect(response.status).toBe(200);
expect(result).toEqual({ store_logo_base_64: null }); expect(result).toEqual({ store_logo_base_64: null });
}); });
it('POST /api/ai/quick-insights should return a stubbed insight', async () => { it('POST /api/ai/quick-insights should return a stubbed insight', async () => {
const response = await aiApiClient.getQuickInsights([{ item: 'test' }], undefined, authToken); const response = await request
const result = await response.json(); .post('/api/ai/quick-insights')
.set('Authorization', `Bearer ${authToken}`)
.send({ items: [{ item: 'test' }] });
const result = response.body;
expect(response.status).toBe(200);
expect(result.text).toBe('This is a server-generated quick insight: buy the cheap stuff!'); expect(result.text).toBe('This is a server-generated quick insight: buy the cheap stuff!');
}); });
it('POST /api/ai/deep-dive should return a stubbed analysis', async () => { it('POST /api/ai/deep-dive should return a stubbed analysis', async () => {
const response = await aiApiClient.getDeepDiveAnalysis( const response = await request
[{ item: 'test' }], .post('/api/ai/deep-dive')
undefined, .set('Authorization', `Bearer ${authToken}`)
authToken, .send({ items: [{ item: 'test' }] });
); const result = response.body;
const result = await response.json(); expect(response.status).toBe(200);
expect(result.text).toBe('This is a server-generated deep dive analysis. It is very detailed.'); expect(result.text).toBe('This is a server-generated deep dive analysis. It is very detailed.');
}); });
it('POST /api/ai/search-web should return a stubbed search result', async () => { it('POST /api/ai/search-web should return a stubbed search result', async () => {
const response = await aiApiClient.searchWeb('test query', undefined, authToken); const response = await request
const result = await response.json(); .post('/api/ai/search-web')
.set('Authorization', `Bearer ${authToken}`)
.send({ query: 'test query' });
const result = response.body;
expect(response.status).toBe(200);
expect(result).toEqual({ text: 'The web says this is good.', sources: [] }); expect(result).toEqual({ text: 'The web says this is good.', sources: [] });
}); });
@@ -116,36 +136,32 @@ describe('AI API Routes Integration Tests', () => {
created_at: new Date().toISOString(), created_at: new Date().toISOString(),
updated_at: new Date().toISOString(), updated_at: new Date().toISOString(),
}; };
const response = await aiApiClient.planTripWithMaps( const response = await request
[], .post('/api/ai/plan-trip')
mockStore, .set('Authorization', `Bearer ${authToken}`)
mockLocation, .send({ items: [], store: mockStore, userLocation: mockLocation });
undefined,
authToken,
);
// The service for this endpoint is disabled and throws an error, which results in a 500. // The service for this endpoint is disabled and throws an error, which results in a 500.
expect(response.ok).toBe(false);
expect(response.status).toBe(500); expect(response.status).toBe(500);
const errorResult = await response.json(); const errorResult = response.body;
expect(errorResult.message).toContain('planTripWithMaps'); expect(errorResult.message).toContain('planTripWithMaps');
}); });
it('POST /api/ai/generate-image should reject because it is not implemented', async () => { it('POST /api/ai/generate-image should reject because it is not implemented', async () => {
// The backend for this is not stubbed and will throw an error. // The backend for this is not stubbed and will throw an error.
// This test confirms that the endpoint is protected and responds as expected to a failure. // This test confirms that the endpoint is protected and responds as expected to a failure.
const response = await aiApiClient.generateImageFromText('a test prompt', undefined, authToken); const response = await request
expect(response.ok).toBe(false); .post('/api/ai/generate-image')
.set('Authorization', `Bearer ${authToken}`)
.send({ prompt: 'a test prompt' });
expect(response.status).toBe(501); expect(response.status).toBe(501);
}); });
it('POST /api/ai/generate-speech should reject because it is not implemented', async () => { it('POST /api/ai/generate-speech should reject because it is not implemented', async () => {
// The backend for this is not stubbed and will throw an error. // The backend for this is not stubbed and will throw an error.
const response = await aiApiClient.generateSpeechFromText( const response = await request
'a test prompt', .post('/api/ai/generate-speech')
undefined, .set('Authorization', `Bearer ${authToken}`)
authToken, .send({ text: 'a test prompt' });
);
expect(response.ok).toBe(false);
expect(response.status).toBe(501); expect(response.status).toBe(501);
}); });
}); });

View File

@@ -1,6 +1,7 @@
// src/tests/integration/auth.integration.test.ts // src/tests/integration/auth.integration.test.ts
import { describe, it, expect, beforeAll, afterAll } from 'vitest'; import { describe, it, expect, beforeAll, afterAll } from 'vitest';
import { loginUser } from '../../services/apiClient'; import supertest from 'supertest';
import app from '../../../server';
import { getPool } from '../../services/db/connection.db'; import { getPool } from '../../services/db/connection.db';
import { createAndLoginUser, TEST_PASSWORD } from '../utils/testHelpers'; import { createAndLoginUser, TEST_PASSWORD } from '../utils/testHelpers';
import type { UserProfile } from '../../types'; import type { UserProfile } from '../../types';
@@ -9,6 +10,8 @@ import type { UserProfile } from '../../types';
* @vitest-environment node * @vitest-environment node
*/ */
const request = supertest(app);
/** /**
* These are integration tests that verify the authentication flow against a running backend server. * These are integration tests that verify the authentication flow against a running backend server.
* Make sure your Express server is running before executing these tests. * Make sure your Express server is running before executing these tests.
@@ -16,30 +19,6 @@ import type { UserProfile } from '../../types';
* To run only these tests: `vitest run src/tests/auth.integration.test.ts` * To run only these tests: `vitest run src/tests/auth.integration.test.ts`
*/ */
describe('Authentication API Integration', () => { describe('Authentication API Integration', () => {
// --- START DEBUG LOGGING ---
// Query the DB from within the test file to see its state.
getPool()
.query(
'SELECT u.user_id, u.email, p.role FROM public.users u JOIN public.profiles p ON u.user_id = p.user_id',
)
.then((res) => {
console.log('\n--- [auth.integration.test.ts] Users found in DB from TEST perspective: ---');
console.table(res.rows);
console.log('--------------------------------------------------------------------------\n');
})
.catch((err) => console.error('--- [auth.integration.test.ts] DB QUERY FAILED ---', err));
// --- END DEBUG LOGGING ---
// --- START DEBUG LOGGING ---
// Log the database connection details as seen by an individual TEST FILE.
console.log('\n\n--- [AUTH.INTEGRATION.TEST LOG] DATABASE CONNECTION ---');
console.log(` Host: ${process.env.DB_HOST}`);
console.log(` Port: ${process.env.DB_PORT}`);
console.log(` User: ${process.env.DB_USER}`);
console.log(` Database: ${process.env.DB_NAME}`);
console.log('-----------------------------------------------------\n');
// --- END DEBUG LOGGING ---
let testUserEmail: string; let testUserEmail: string;
let testUser: UserProfile; let testUser: UserProfile;
@@ -57,11 +36,14 @@ describe('Authentication API Integration', () => {
// This test migrates the logic from the old DevTestRunner.tsx component. // This test migrates the logic from the old DevTestRunner.tsx component.
it('should successfully log in a registered user', async () => { it('should successfully log in a registered user', async () => {
// The `rememberMe` parameter is required. For a test, `false` is a safe default. // The `rememberMe` parameter is required. For a test, `false` is a safe default.
const response = await loginUser(testUserEmail, TEST_PASSWORD, false); const response = await request
const data = await response.json(); .post('/api/auth/login')
.send({ email: testUserEmail, password: TEST_PASSWORD, rememberMe: false });
const data = response.body;
// Assert that the API returns the expected structure // Assert that the API returns the expected structure
expect(data).toBeDefined(); expect(data).toBeDefined();
expect(response.status).toBe(200);
expect(data.userprofile).toBeDefined(); expect(data.userprofile).toBeDefined();
expect(data.userprofile.user.email).toBe(testUserEmail); expect(data.userprofile.user.email).toBe(testUserEmail);
expect(data.userprofile.user.user_id).toBeTypeOf('string'); expect(data.userprofile.user.user_id).toBeTypeOf('string');
@@ -74,9 +56,11 @@ describe('Authentication API Integration', () => {
const wrongPassword = 'wrongpassword'; const wrongPassword = 'wrongpassword';
// The loginUser function returns a Response object. We check its status. // The loginUser function returns a Response object. We check its status.
const response = await loginUser(adminEmail, wrongPassword, false); const response = await request
expect(response.ok).toBe(false); .post('/api/auth/login')
const errorData = await response.json(); .send({ email: adminEmail, password: wrongPassword, rememberMe: false });
expect(response.status).toBe(401);
const errorData = response.body;
expect(errorData.message).toBe('Incorrect email or password.'); expect(errorData.message).toBe('Incorrect email or password.');
}); });
@@ -85,9 +69,11 @@ describe('Authentication API Integration', () => {
const anyPassword = 'any-password'; const anyPassword = 'any-password';
// The loginUser function returns a Response object. We check its status. // The loginUser function returns a Response object. We check its status.
const response = await loginUser(nonExistentEmail, anyPassword, false); const response = await request
expect(response.ok).toBe(false); .post('/api/auth/login')
const errorData = await response.json(); .send({ email: nonExistentEmail, password: anyPassword, rememberMe: false });
expect(response.status).toBe(401);
const errorData = response.body;
// Security best practice: the error message should be identical for wrong password and wrong email // Security best practice: the error message should be identical for wrong password and wrong email
// to prevent user enumeration attacks. // to prevent user enumeration attacks.
expect(errorData.message).toBe('Incorrect email or password.'); expect(errorData.message).toBe('Incorrect email or password.');
@@ -96,24 +82,21 @@ describe('Authentication API Integration', () => {
it('should successfully refresh an access token using a refresh token cookie', async () => { it('should successfully refresh an access token using a refresh token cookie', async () => {
// Arrange: Log in to get a fresh, valid refresh token cookie for this specific test. // Arrange: Log in to get a fresh, valid refresh token cookie for this specific test.
// This ensures the test is self-contained and not affected by other tests. // This ensures the test is self-contained and not affected by other tests.
const loginResponse = await loginUser(testUserEmail, TEST_PASSWORD, true); const loginResponse = await request
const setCookieHeader = loginResponse.headers.get('set-cookie'); .post('/api/auth/login')
const refreshTokenCookie = setCookieHeader?.split(';')[0]; .send({ email: testUserEmail, password: TEST_PASSWORD, rememberMe: true });
const refreshTokenCookie = loginResponse.headers['set-cookie'][0].split(';')[0];
expect(refreshTokenCookie).toBeDefined(); expect(refreshTokenCookie).toBeDefined();
// Act: Make a request to the refresh-token endpoint, including the cookie. // Act: Make a request to the refresh-token endpoint, including the cookie.
const apiUrl = process.env.VITE_API_BASE_URL || 'http://localhost:3001/api'; const response = await request
const response = await fetch(`${apiUrl}/auth/refresh-token`, { .post('/api/auth/refresh-token')
method: 'POST', .set('Cookie', refreshTokenCookie!);
headers: {
Cookie: refreshTokenCookie!,
},
});
// Assert: Check for a successful response and a new access token. // Assert: Check for a successful response and a new access token.
expect(response.ok).toBe(true); expect(response.status).toBe(200);
const data = await response.json(); const data = response.body;
expect(data.token).toBeTypeOf('string'); expect(data.token).toBeTypeOf('string');
}); });
@@ -122,40 +105,30 @@ describe('Authentication API Integration', () => {
const invalidRefreshTokenCookie = 'refreshToken=this-is-not-a-valid-token'; const invalidRefreshTokenCookie = 'refreshToken=this-is-not-a-valid-token';
// Act: Make a request to the refresh-token endpoint with the invalid cookie. // Act: Make a request to the refresh-token endpoint with the invalid cookie.
const apiUrl = process.env.VITE_API_BASE_URL || 'http://localhost:3001/api'; const response = await request
const response = await fetch(`${apiUrl}/auth/refresh-token`, { .post('/api/auth/refresh-token')
method: 'POST', .set('Cookie', invalidRefreshTokenCookie);
headers: {
Cookie: invalidRefreshTokenCookie,
},
});
// Assert: Check for a 403 Forbidden response. // Assert: Check for a 403 Forbidden response.
expect(response.ok).toBe(false);
expect(response.status).toBe(403); expect(response.status).toBe(403);
const data = await response.json(); const data = response.body;
expect(data.message).toBe('Invalid or expired refresh token.'); expect(data.message).toBe('Invalid or expired refresh token.');
}); });
it('should successfully log out and clear the refresh token cookie', async () => { it('should successfully log out and clear the refresh token cookie', async () => {
// Arrange: Log in to get a valid refresh token cookie. // Arrange: Log in to get a valid refresh token cookie.
const loginResponse = await loginUser(testUserEmail, TEST_PASSWORD, true); const loginResponse = await request
const setCookieHeader = loginResponse.headers.get('set-cookie'); .post('/api/auth/login')
const refreshTokenCookie = setCookieHeader?.split(';')[0]; .send({ email: testUserEmail, password: TEST_PASSWORD, rememberMe: true });
const refreshTokenCookie = loginResponse.headers['set-cookie'][0].split(';')[0];
expect(refreshTokenCookie).toBeDefined(); expect(refreshTokenCookie).toBeDefined();
// Act: Make a request to the new logout endpoint, including the cookie. // Act: Make a request to the new logout endpoint, including the cookie.
const apiUrl = process.env.VITE_API_BASE_URL || 'http://localhost:3001/api'; const response = await request.post('/api/auth/logout').set('Cookie', refreshTokenCookie!);
const response = await fetch(`${apiUrl}/auth/logout`, {
method: 'POST',
headers: {
Cookie: refreshTokenCookie!,
},
});
// Assert: Check for a successful response and a cookie-clearing header. // Assert: Check for a successful response and a cookie-clearing header.
expect(response.ok).toBe(true); expect(response.status).toBe(200);
const logoutSetCookieHeader = response.headers.get('set-cookie'); const logoutSetCookieHeader = response.headers['set-cookie'][0];
expect(logoutSetCookieHeader).toContain('refreshToken=;'); expect(logoutSetCookieHeader).toContain('refreshToken=;');
expect(logoutSetCookieHeader).toContain('Max-Age=0'); expect(logoutSetCookieHeader).toContain('Max-Age=0');
}); });

View File

@@ -1,8 +1,9 @@
// src/tests/integration/flyer-processing.integration.test.ts // src/tests/integration/flyer-processing.integration.test.ts
import { describe, it, expect, beforeAll, afterAll } from 'vitest'; import { describe, it, expect, beforeAll, afterAll } from 'vitest';
import supertest from 'supertest';
import app from '../../../server';
import fs from 'node:fs/promises'; import fs from 'node:fs/promises';
import path from 'path'; import path from 'path';
import * as aiApiClient from '../../services/aiApiClient';
import * as db from '../../services/db/index.db'; import * as db from '../../services/db/index.db';
import { getPool } from '../../services/db/connection.db'; import { getPool } from '../../services/db/connection.db';
import { generateFileChecksum } from '../../utils/checksum'; import { generateFileChecksum } from '../../utils/checksum';
@@ -14,6 +15,8 @@ import { createAndLoginUser } from '../utils/testHelpers';
* @vitest-environment node * @vitest-environment node
*/ */
const request = supertest(app);
describe('Flyer Processing Background Job Integration Test', () => { describe('Flyer Processing Background Job Integration Test', () => {
const createdUserIds: string[] = []; const createdUserIds: string[] = [];
const createdFlyerIds: number[] = []; const createdFlyerIds: number[] = [];
@@ -68,19 +71,30 @@ describe('Flyer Processing Background Job Integration Test', () => {
const checksum = await generateFileChecksum(mockImageFile); const checksum = await generateFileChecksum(mockImageFile);
// Act 1: Upload the file to start the background job. // Act 1: Upload the file to start the background job.
const uploadResponse = await aiApiClient.uploadAndProcessFlyer(mockImageFile, checksum, token); const uploadReq = request
const { jobId } = await uploadResponse.json(); .post('/api/ai/upload-and-process')
.field('checksum', checksum)
.attach('flyerFile', uniqueContent, uniqueFileName);
if (token) {
uploadReq.set('Authorization', `Bearer ${token}`);
}
const uploadResponse = await uploadReq;
const { jobId } = uploadResponse.body;
// Assert 1: Check that a job ID was returned. // Assert 1: Check that a job ID was returned.
expect(jobId).toBeTypeOf('string'); expect(jobId).toBeTypeOf('string');
// Act 2: Poll for the job status until it completes. // Act 2: Poll for the job status until it completes.
let jobStatus; let jobStatus;
const maxRetries = 20; // Poll for up to 60 seconds (20 * 3s) const maxRetries = 30; // Poll for up to 90 seconds (30 * 3s)
for (let i = 0; i < maxRetries; i++) { for (let i = 0; i < maxRetries; i++) {
await new Promise((resolve) => setTimeout(resolve, 3000)); // Wait 3 seconds between polls await new Promise((resolve) => setTimeout(resolve, 3000)); // Wait 3 seconds between polls
const statusResponse = await aiApiClient.getJobStatus(jobId, token); const statusReq = request.get(`/api/ai/jobs/${jobId}/status`);
jobStatus = await statusResponse.json(); if (token) {
statusReq.set('Authorization', `Bearer ${token}`);
}
const statusResponse = await statusReq;
jobStatus = statusResponse.body;
if (jobStatus.state === 'completed' || jobStatus.state === 'failed') { if (jobStatus.state === 'completed' || jobStatus.state === 'failed') {
break; break;
} }

View File

@@ -1,7 +1,8 @@
// src/tests/integration/flyer.integration.test.ts // src/tests/integration/flyer.integration.test.ts
import { describe, it, expect, beforeAll } from 'vitest'; import { describe, it, expect, beforeAll } from 'vitest';
import * as apiClient from '../../services/apiClient'; import supertest from 'supertest';
import { getPool } from '../../services/db/connection.db'; import { getPool } from '../../services/db/connection.db';
import app from '../../../server';
import type { Flyer, FlyerItem } from '../../types'; import type { Flyer, FlyerItem } from '../../types';
/** /**
@@ -10,6 +11,8 @@ import type { Flyer, FlyerItem } from '../../types';
describe('Public Flyer API Routes Integration Tests', () => { describe('Public Flyer API Routes Integration Tests', () => {
let flyers: Flyer[] = []; let flyers: Flyer[] = [];
// Use a supertest instance for all requests in this file
const request = supertest(app);
let createdFlyerId: number; let createdFlyerId: number;
// Fetch flyers once before all tests in this suite to use in subsequent tests. // Fetch flyers once before all tests in this suite to use in subsequent tests.
@@ -34,18 +37,16 @@ describe('Public Flyer API Routes Integration Tests', () => {
[createdFlyerId], [createdFlyerId],
); );
const response = await apiClient.fetchFlyers(); const response = await request.get('/api/flyers');
flyers = await response.json(); flyers = response.body;
}); });
describe('GET /api/flyers', () => { describe('GET /api/flyers', () => {
it('should return a list of flyers', async () => { it('should return a list of flyers', async () => {
// Act: Call the API endpoint using the client function. // Act: Call the API endpoint using the client function.
const response = await apiClient.fetchFlyers(); const response = await request.get('/api/flyers');
const flyers: Flyer[] = await response.json(); const flyers: Flyer[] = response.body;
expect(response.status).toBe(200);
// Assert: Verify the response is successful and contains the expected data structure.
expect(response.ok).toBe(true);
expect(flyers).toBeInstanceOf(Array); expect(flyers).toBeInstanceOf(Array);
// We created a flyer in beforeAll, so we expect the array not to be empty. // We created a flyer in beforeAll, so we expect the array not to be empty.
@@ -69,11 +70,10 @@ describe('Public Flyer API Routes Integration Tests', () => {
const testFlyer = flyers[0]; const testFlyer = flyers[0];
// Act: Fetch items for the first flyer. // Act: Fetch items for the first flyer.
const response = await apiClient.fetchFlyerItems(testFlyer.flyer_id); const response = await request.get(`/api/flyers/${testFlyer.flyer_id}/items`);
const items: FlyerItem[] = await response.json(); const items: FlyerItem[] = response.body;
// Assert: Verify the response and data structure. expect(response.status).toBe(200);
expect(response.ok).toBe(true);
expect(items).toBeInstanceOf(Array); expect(items).toBeInstanceOf(Array);
// If there are items, check the shape of the first one. // If there are items, check the shape of the first one.
@@ -87,18 +87,16 @@ describe('Public Flyer API Routes Integration Tests', () => {
}); });
}); });
describe('POST /api/flyer-items/batch-fetch', () => { describe('POST /api/flyers/items/batch-fetch', () => {
it('should return items for multiple flyer IDs', async () => { it('should return items for multiple flyer IDs', async () => {
// Arrange: Get IDs from the flyers fetched in beforeAll. // Arrange: Get IDs from the flyers fetched in beforeAll.
const flyerIds = flyers.map((f) => f.flyer_id); const flyerIds = flyers.map((f) => f.flyer_id);
expect(flyerIds.length).toBeGreaterThan(0); expect(flyerIds.length).toBeGreaterThan(0);
// Act: Fetch items for all available flyers. // Act: Fetch items for all available flyers.
const response = await apiClient.fetchFlyerItemsForFlyers(flyerIds); const response = await request.post('/api/flyers/items/batch-fetch').send({ flyerIds });
const items: FlyerItem[] = await response.json(); const items: FlyerItem[] = response.body;
expect(response.status).toBe(200);
// Assert
expect(response.ok).toBe(true);
expect(items).toBeInstanceOf(Array); expect(items).toBeInstanceOf(Array);
// The total number of items should be greater than or equal to the number of flyers (assuming at least one item per flyer). // The total number of items should be greater than or equal to the number of flyers (assuming at least one item per flyer).
if (items.length > 0) { if (items.length > 0) {
@@ -107,15 +105,15 @@ describe('Public Flyer API Routes Integration Tests', () => {
}); });
}); });
describe('POST /api/flyer-items/batch-count', () => { describe('POST /api/flyers/items/batch-count', () => {
it('should return the total count of items for multiple flyer IDs', async () => { it('should return the total count of items for multiple flyer IDs', async () => {
// Arrange // Arrange
const flyerIds = flyers.map((f) => f.flyer_id); const flyerIds = flyers.map((f) => f.flyer_id);
expect(flyerIds.length).toBeGreaterThan(0); expect(flyerIds.length).toBeGreaterThan(0);
// Act // Act
const response = await apiClient.countFlyerItemsForFlyers(flyerIds); const response = await request.post('/api/flyers/items/batch-count').send({ flyerIds });
const result = await response.json(); const result = response.body;
// Assert // Assert
expect(result.count).toBeTypeOf('number'); expect(result.count).toBeTypeOf('number');

View File

@@ -0,0 +1,141 @@
// src/tests/integration/price.integration.test.ts
import { describe, it, expect, beforeAll, afterAll } from 'vitest';
import supertest from 'supertest';
import app from '../../../server';
import { getPool } from '../../services/db/connection.db';
/**
* @vitest-environment node
*/
const request = supertest(app);
describe('Price History API Integration Test (/api/price-history)', () => {
let masterItemId: number;
let storeId: number;
let flyerId1: number;
let flyerId2: number;
let flyerId3: number;
beforeAll(async () => {
const pool = getPool();
// 1. Create a master grocery item
const masterItemRes = await pool.query(
`INSERT INTO public.master_grocery_items (name, category_id) VALUES ('Integration Test Apples', (SELECT category_id FROM categories WHERE name = 'Fruits & Vegetables' LIMIT 1)) RETURNING master_grocery_item_id`,
);
masterItemId = masterItemRes.rows[0].master_grocery_item_id;
// 2. Create a store
const storeRes = await pool.query(
`INSERT INTO public.stores (name) VALUES ('Integration Price Test Store') RETURNING store_id`,
);
storeId = storeRes.rows[0].store_id;
// 3. Create two flyers with different dates
const flyerRes1 = await pool.query(
`INSERT INTO public.flyers (store_id, file_name, image_url, item_count, checksum, valid_from)
VALUES ($1, 'price-test-1.jpg', 'http://test.com/price-1.jpg', 1, $2, '2025-01-01') RETURNING flyer_id`,
[storeId, `checksum-price-1-${Date.now()}`],
);
flyerId1 = flyerRes1.rows[0].flyer_id;
const flyerRes2 = await pool.query(
`INSERT INTO public.flyers (store_id, file_name, image_url, item_count, checksum, valid_from)
VALUES ($1, 'price-test-2.jpg', 'http://test.com/price-2.jpg', 1, $2, '2025-01-08') RETURNING flyer_id`,
[storeId, `checksum-price-2-${Date.now()}`],
);
flyerId2 = flyerRes2.rows[0].flyer_id; // This was a duplicate, fixed.
const flyerRes3 = await pool.query(
`INSERT INTO public.flyers (store_id, file_name, image_url, item_count, checksum, valid_from)
VALUES ($1, 'price-test-3.jpg', 'http://test.com/price-3.jpg', 1, $2, '2025-01-15') RETURNING flyer_id`,
[storeId, `checksum-price-3-${Date.now()}`],
);
flyerId3 = flyerRes3.rows[0].flyer_id;
// 4. Create flyer items linking the master item to the flyers with prices
await pool.query(
`INSERT INTO public.flyer_items (flyer_id, master_item_id, item, price_in_cents, price_display, quantity) VALUES ($1, $2, 'Apples', 199, '$1.99', '1')`,
[flyerId1, masterItemId],
);
await pool.query(
`INSERT INTO public.flyer_items (flyer_id, master_item_id, item, price_in_cents, price_display, quantity) VALUES ($1, $2, 'Apples', 249, '$2.49', '1')`,
[flyerId2, masterItemId],
);
await pool.query(
`INSERT INTO public.flyer_items (flyer_id, master_item_id, item, price_in_cents, price_display, quantity) VALUES ($1, $2, 'Apples', 299, '$2.99', '1')`,
[flyerId3, masterItemId],
);
});
afterAll(async () => {
const pool = getPool();
// The CASCADE on the tables should handle flyer_items.
// We just need to delete the flyers, store, and master item.
const flyerIds = [flyerId1, flyerId2, flyerId3].filter(Boolean);
if (flyerIds.length > 0) {
await pool.query('DELETE FROM public.flyers WHERE flyer_id = ANY($1::int[])', [flyerIds]);
}
if (storeId) await pool.query('DELETE FROM public.stores WHERE store_id = $1', [storeId]);
if (masterItemId)
await pool.query('DELETE FROM public.master_grocery_items WHERE master_grocery_item_id = $1', [
masterItemId,
]);
});
it('should return the correct price history for a given master item ID', async () => {
const response = await request.post('/api/price-history').send({ masterItemIds: [masterItemId] });
expect(response.status).toBe(200);
expect(response.body).toBeInstanceOf(Array);
expect(response.body).toHaveLength(3);
expect(response.body[0]).toMatchObject({ master_item_id: masterItemId, price_in_cents: 199 });
expect(response.body[1]).toMatchObject({ master_item_id: masterItemId, price_in_cents: 249 });
expect(response.body[2]).toMatchObject({ master_item_id: masterItemId, price_in_cents: 299 });
});
it('should respect the limit parameter', async () => {
const response = await request
.post('/api/price-history')
.send({ masterItemIds: [masterItemId], limit: 2 });
expect(response.status).toBe(200);
expect(response.body).toHaveLength(2);
expect(response.body[0].price_in_cents).toBe(199);
expect(response.body[1].price_in_cents).toBe(249);
});
it('should respect the offset parameter', async () => {
const response = await request
.post('/api/price-history')
.send({ masterItemIds: [masterItemId], limit: 2, offset: 1 });
expect(response.status).toBe(200);
expect(response.body).toHaveLength(2);
expect(response.body[0].price_in_cents).toBe(249);
expect(response.body[1].price_in_cents).toBe(299);
});
it('should return price history sorted by date in ascending order', async () => {
const response = await request.post('/api/price-history').send({ masterItemIds: [masterItemId] });
expect(response.status).toBe(200);
const history = response.body;
expect(history).toHaveLength(3);
const date1 = new Date(history[0].date).getTime();
const date2 = new Date(history[1].date).getTime();
const date3 = new Date(history[2].date).getTime();
expect(date1).toBeLessThan(date2);
expect(date2).toBeLessThan(date3);
});
it('should return an empty array for a master item ID with no price history', async () => {
const response = await request.post('/api/price-history').send({ masterItemIds: [999999] });
expect(response.status).toBe(200);
expect(response.body).toEqual([]);
});
});

View File

@@ -1,108 +0,0 @@
// src/tests/integration/public.integration.test.ts
import { describe, it, expect, beforeAll, afterAll } from 'vitest';
import * as apiClient from '../../services/apiClient';
import { getPool } from '../../services/db/connection.db';
/**
* @vitest-environment node
*/
describe('Public API Routes Integration Tests', () => {
let createdFlyerId: number;
let createdMasterItemId: number;
beforeAll(async () => {
const pool = getPool();
// Create a store for the flyer
const storeRes = await pool.query(
`INSERT INTO public.stores (name) VALUES ('Public Test Store') RETURNING store_id`,
);
const storeId = storeRes.rows[0].store_id;
// Create a flyer
const flyerRes = await pool.query(
`INSERT INTO public.flyers (store_id, file_name, image_url, item_count, checksum)
VALUES ($1, 'public-test.jpg', 'http://test.com/public.jpg', 0, $2) RETURNING flyer_id`,
[storeId, `checksum-public-${Date.now()}`],
);
createdFlyerId = flyerRes.rows[0].flyer_id;
// Create a master item. Assumes a category with ID 1 exists from static seeds.
const masterItemRes = await pool.query(
`INSERT INTO public.master_grocery_items (name, category_id) VALUES ('Public Test Item', 1) RETURNING master_grocery_item_id`,
);
createdMasterItemId = masterItemRes.rows[0].master_grocery_item_id;
});
afterAll(async () => {
const pool = getPool();
// Cleanup in reverse order of creation
if (createdMasterItemId) {
await pool.query(
'DELETE FROM public.master_grocery_items WHERE master_grocery_item_id = $1',
[createdMasterItemId],
);
}
if (createdFlyerId) {
await pool.query('DELETE FROM public.flyers WHERE flyer_id = $1', [createdFlyerId]);
}
});
describe('Health Check Endpoints', () => {
it('GET /api/health/ping should return "pong"', async () => {
const response = await apiClient.pingBackend();
expect(response.ok).toBe(true);
expect(await response.text()).toBe('pong');
});
it('GET /api/health/db-schema should return success', async () => {
const response = await apiClient.checkDbSchema();
const result = await response.json();
expect(result.success).toBe(true);
expect(result.message).toBe('All required database tables exist.');
});
it('GET /api/health/storage should return success', async () => {
// This assumes the STORAGE_PATH is correctly set up for the test environment
const response = await apiClient.checkStorage();
const result = await response.json();
expect(result.success).toBe(true);
expect(result.message).toContain('is accessible and writable');
});
it('GET /api/health/db-pool should return success', async () => {
const response = await apiClient.checkDbPoolHealth();
// The pingBackend function returns a boolean directly, so no .json() call is needed.
// However, checkDbPoolHealth returns a Response, so we need to parse it.
const result = await response.json();
expect(result.success).toBe(true);
expect(result.message).toContain('Pool Status:');
});
});
describe('Public Data Endpoints', () => {
it('GET /api/flyers should return a list of flyers', async () => {
const response = await apiClient.fetchFlyers();
const flyers = await response.json();
expect(flyers).toBeInstanceOf(Array);
// We created a flyer, so we expect it to be in the list.
expect(flyers.length).toBeGreaterThan(0);
const foundFlyer = flyers.find((f: { flyer_id: number }) => f.flyer_id === createdFlyerId);
expect(foundFlyer).toBeDefined();
expect(foundFlyer).toHaveProperty('store');
});
it('GET /api/master-items should return a list of master items', async () => {
const response = await apiClient.fetchMasterItems();
const masterItems = await response.json();
expect(masterItems).toBeInstanceOf(Array);
// We created a master item, so we expect it to be in the list.
expect(masterItems.length).toBeGreaterThan(0);
const foundItem = masterItems.find(
(i: { master_grocery_item_id: number }) => i.master_grocery_item_id === createdMasterItemId,
);
expect(foundItem).toBeDefined();
expect(foundItem).toHaveProperty('category_name');
});
});
});

View File

@@ -1,6 +1,7 @@
// src/tests/integration/public.routes.integration.test.ts // src/tests/integration/public.routes.integration.test.ts
import { describe, it, expect, beforeAll, afterAll } from 'vitest'; import { describe, it, expect, beforeAll, afterAll } from 'vitest';
import supertest from 'supertest'; import supertest from 'supertest';
import app from '../../../server';
import type { import type {
Flyer, Flyer,
FlyerItem, FlyerItem,
@@ -13,8 +14,11 @@ import type {
import { getPool } from '../../services/db/connection.db'; import { getPool } from '../../services/db/connection.db';
import { createAndLoginUser } from '../utils/testHelpers'; import { createAndLoginUser } from '../utils/testHelpers';
const API_URL = process.env.VITE_API_BASE_URL || 'http://localhost:3001/api'; /**
const request = supertest(API_URL.replace('/api', '')); // supertest needs the server's base URL * @vitest-environment node
*/
const request = supertest(app);
describe('Public API Routes Integration Tests', () => { describe('Public API Routes Integration Tests', () => {
// Shared state for tests // Shared state for tests
@@ -97,17 +101,17 @@ describe('Public API Routes Integration Tests', () => {
expect(response.status).toBe(200); expect(response.status).toBe(200);
expect(response.body.success).toBe(true); expect(response.body.success).toBe(true);
}); });
});
describe('Public Data Endpoints', () => { it('GET /api/health/time should return the server time', async () => {
it('GET /api/time should return the server time', async () => { const response = await request.get('/api/health/time');
const response = await request.get('/api/time');
expect(response.status).toBe(200); expect(response.status).toBe(200);
expect(response.body).toHaveProperty('currentTime'); expect(response.body).toHaveProperty('currentTime');
expect(response.body).toHaveProperty('year'); expect(response.body).toHaveProperty('year');
expect(response.body).toHaveProperty('week'); expect(response.body).toHaveProperty('week');
}); });
});
describe('Public Data Endpoints', () => {
it('GET /api/flyers should return a list of flyers', async () => { it('GET /api/flyers should return a list of flyers', async () => {
const response = await request.get('/api/flyers'); const response = await request.get('/api/flyers');
const flyers: Flyer[] = response.body; const flyers: Flyer[] = response.body;
@@ -126,25 +130,25 @@ describe('Public API Routes Integration Tests', () => {
expect(items[0].flyer_id).toBe(testFlyer.flyer_id); expect(items[0].flyer_id).toBe(testFlyer.flyer_id);
}); });
it('POST /api/flyer-items/batch-fetch should return items for multiple flyers', async () => { it('POST /api/flyers/items/batch-fetch should return items for multiple flyers', async () => {
const flyerIds = [testFlyer.flyer_id]; const flyerIds = [testFlyer.flyer_id];
const response = await request.post('/api/flyer-items/batch-fetch').send({ flyerIds }); const response = await request.post('/api/flyers/items/batch-fetch').send({ flyerIds });
const items: FlyerItem[] = response.body; const items: FlyerItem[] = response.body;
expect(response.status).toBe(200); expect(response.status).toBe(200);
expect(items).toBeInstanceOf(Array); expect(items).toBeInstanceOf(Array);
expect(items.length).toBeGreaterThan(0); expect(items.length).toBeGreaterThan(0);
}); });
it('POST /api/flyer-items/batch-count should return a count for multiple flyers', async () => { it('POST /api/flyers/items/batch-count should return a count for multiple flyers', async () => {
const flyerIds = [testFlyer.flyer_id]; const flyerIds = [testFlyer.flyer_id];
const response = await request.post('/api/flyer-items/batch-count').send({ flyerIds }); const response = await request.post('/api/flyers/items/batch-count').send({ flyerIds });
expect(response.status).toBe(200); expect(response.status).toBe(200);
expect(response.body.count).toBeTypeOf('number'); expect(response.body.count).toBeTypeOf('number');
expect(response.body.count).toBeGreaterThan(0); expect(response.body.count).toBeGreaterThan(0);
}); });
it('GET /api/master-items should return a list of master grocery items', async () => { it('GET /api/personalization/master-items should return a list of master grocery items', async () => {
const response = await request.get('/api/master-items'); const response = await request.get('/api/personalization/master-items');
const masterItems = response.body; const masterItems = response.body;
expect(response.status).toBe(200); expect(response.status).toBe(200);
expect(masterItems).toBeInstanceOf(Array); expect(masterItems).toBeInstanceOf(Array);
@@ -190,9 +194,9 @@ describe('Public API Routes Integration Tests', () => {
expect(items).toBeInstanceOf(Array); expect(items).toBeInstanceOf(Array);
}); });
it('GET /api/dietary-restrictions should return a list of restrictions', async () => { it('GET /api/personalization/dietary-restrictions should return a list of restrictions', async () => {
// This test relies on static seed data for a lookup table, which is acceptable. // This test relies on static seed data for a lookup table, which is acceptable.
const response = await request.get('/api/dietary-restrictions'); const response = await request.get('/api/personalization/dietary-restrictions');
const restrictions: DietaryRestriction[] = response.body; const restrictions: DietaryRestriction[] = response.body;
expect(response.status).toBe(200); expect(response.status).toBe(200);
expect(restrictions).toBeInstanceOf(Array); expect(restrictions).toBeInstanceOf(Array);
@@ -200,8 +204,8 @@ describe('Public API Routes Integration Tests', () => {
expect(restrictions[0]).toHaveProperty('dietary_restriction_id'); expect(restrictions[0]).toHaveProperty('dietary_restriction_id');
}); });
it('GET /api/appliances should return a list of appliances', async () => { it('GET /api/personalization/appliances should return a list of appliances', async () => {
const response = await request.get('/api/appliances'); const response = await request.get('/api/personalization/appliances');
const appliances: Appliance[] = response.body; const appliances: Appliance[] = response.body;
expect(response.status).toBe(200); expect(response.status).toBe(200);
expect(appliances).toBeInstanceOf(Array); expect(appliances).toBeInstanceOf(Array);

View File

@@ -1,6 +1,7 @@
// src/tests/integration/system.integration.test.ts // src/tests/integration/system.integration.test.ts
import { describe, it, expect } from 'vitest'; import { describe, it, expect } from 'vitest';
import * as apiClient from '../../services/apiClient'; import supertest from 'supertest';
import app from '../../../server';
/** /**
* @vitest-environment node * @vitest-environment node
@@ -9,15 +10,16 @@ import * as apiClient from '../../services/apiClient';
describe('System API Routes Integration Tests', () => { describe('System API Routes Integration Tests', () => {
describe('GET /api/system/pm2-status', () => { describe('GET /api/system/pm2-status', () => {
it('should return a status for PM2', async () => { it('should return a status for PM2', async () => {
const request = supertest(app);
// In a typical CI environment without PM2, this will fail gracefully. // In a typical CI environment without PM2, this will fail gracefully.
// The test verifies that the endpoint responds correctly, even if PM2 isn't running. // The test verifies that the endpoint responds correctly, even if PM2 isn't running.
const response = await apiClient.checkPm2Status(); const response = await request.get('/api/system/pm2-status');
const result = await response.json(); const result = response.body;
expect(result).toBeDefined(); expect(result).toBeDefined();
expect(result).toHaveProperty('message'); expect(result).toHaveProperty('message');
// If the response is successful (200 OK), it must have a 'success' property. // If the response is successful (200 OK), it must have a 'success' property.
// If it's an error (e.g., 500 because pm2 command not found), it will only have 'message'. // If it's an error (e.g., 500 because pm2 command not found), it will only have 'message'.
if (response.ok) { if (response.status === 200) {
expect(result).toHaveProperty('success'); expect(result).toHaveProperty('success');
} }
}); });

View File

@@ -1,6 +1,7 @@
// src/tests/integration/user.integration.test.ts // src/tests/integration/user.integration.test.ts
import { describe, it, expect, beforeAll, afterAll } from 'vitest'; import { describe, it, expect, beforeAll, afterAll } from 'vitest';
import * as apiClient from '../../services/apiClient'; import supertest from 'supertest';
import app from '../../../server';
import { logger } from '../../services/logger.server'; import { logger } from '../../services/logger.server';
import { getPool } from '../../services/db/connection.db'; import { getPool } from '../../services/db/connection.db';
import type { UserProfile, MasterGroceryItem, ShoppingList } from '../../types'; import type { UserProfile, MasterGroceryItem, ShoppingList } from '../../types';
@@ -10,25 +11,12 @@ import { createAndLoginUser, TEST_PASSWORD } from '../utils/testHelpers';
* @vitest-environment node * @vitest-environment node
*/ */
const request = supertest(app);
describe('User API Routes Integration Tests', () => { describe('User API Routes Integration Tests', () => {
let testUser: UserProfile; let testUser: UserProfile;
let authToken: string; let authToken: string;
// --- START DEBUG LOGGING ---
// Query the DB from within the test file to see its state.
beforeAll(async () => {
const res = await getPool().query(
'SELECT u.user_id, u.email, p.role FROM public.users u JOIN public.profiles p ON u.user_id = p.user_id',
);
console.log(
'\n--- [user.integration.test.ts] Users found in DB from TEST perspective (beforeAll): ---',
);
console.table(res.rows);
console.log(
'-------------------------------------------------------------------------------------\n',
);
});
// --- END DEBUG LOGGING ---
// Before any tests run, create a new user and log them in. // Before any tests run, create a new user and log them in.
// The token will be used for all subsequent API calls in this test suite. // The token will be used for all subsequent API calls in this test suite.
beforeAll(async () => { beforeAll(async () => {
@@ -62,11 +50,13 @@ describe('User API Routes Integration Tests', () => {
it('should fetch the authenticated user profile via GET /api/users/profile', async () => { it('should fetch the authenticated user profile via GET /api/users/profile', async () => {
// Act: Call the API endpoint using the authenticated token. // Act: Call the API endpoint using the authenticated token.
const response = await apiClient.getAuthenticatedUserProfile({ tokenOverride: authToken }); const response = await request
const profile = await response.json(); .get('/api/users/profile')
.set('Authorization', `Bearer ${authToken}`);
const profile = response.body;
// Assert: Verify the profile data matches the created user. // Assert: Verify the profile data matches the created user.
expect(profile).toBeDefined(); expect(response.status).toBe(200);
expect(profile.user.user_id).toBe(testUser.user.user_id); expect(profile.user.user_id).toBe(testUser.user.user_id);
expect(profile.user.email).toBe(testUser.user.email); // This was already correct expect(profile.user.email).toBe(testUser.user.email); // This was already correct
expect(profile.full_name).toBe('Test User'); expect(profile.full_name).toBe('Test User');
@@ -80,20 +70,21 @@ describe('User API Routes Integration Tests', () => {
}; };
// Act: Call the update endpoint with the new data and the auth token. // Act: Call the update endpoint with the new data and the auth token.
const response = await apiClient.updateUserProfile(profileUpdates, { const response = await request
tokenOverride: authToken, .put('/api/users/profile')
}); .set('Authorization', `Bearer ${authToken}`)
const updatedProfile = await response.json(); .send(profileUpdates);
const updatedProfile = response.body;
// Assert: Check that the returned profile reflects the changes. // Assert: Check that the returned profile reflects the changes.
expect(updatedProfile).toBeDefined(); expect(response.status).toBe(200);
expect(updatedProfile.full_name).toBe('Updated Test User'); expect(updatedProfile.full_name).toBe('Updated Test User');
// Also, fetch the profile again to ensure the change was persisted. // Also, fetch the profile again to ensure the change was persisted.
const refetchResponse = await apiClient.getAuthenticatedUserProfile({ const refetchResponse = await request
tokenOverride: authToken, .get('/api/users/profile')
}); .set('Authorization', `Bearer ${authToken}`);
const refetchedProfile = await refetchResponse.json(); const refetchedProfile = refetchResponse.body;
expect(refetchedProfile.full_name).toBe('Updated Test User'); expect(refetchedProfile.full_name).toBe('Updated Test User');
}); });
@@ -104,14 +95,14 @@ describe('User API Routes Integration Tests', () => {
}; };
// Act: Call the update endpoint. // Act: Call the update endpoint.
const response = await apiClient.updateUserPreferences(preferenceUpdates, { const response = await request
tokenOverride: authToken, .put('/api/users/profile/preferences')
}); .set('Authorization', `Bearer ${authToken}`)
const updatedProfile = await response.json(); .send(preferenceUpdates);
const updatedProfile = response.body;
// Assert: Check that the preferences object in the returned profile is updated. // Assert: Check that the preferences object in the returned profile is updated.
expect(updatedProfile).toBeDefined(); expect(response.status).toBe(200);
expect(updatedProfile.preferences).toBeDefined();
expect(updatedProfile.preferences?.darkMode).toBe(true); expect(updatedProfile.preferences?.darkMode).toBe(true);
}); });
@@ -122,9 +113,14 @@ describe('User API Routes Integration Tests', () => {
// Act & Assert: Attempt to register and expect the promise to reject // Act & Assert: Attempt to register and expect the promise to reject
// with an error message indicating the password is too weak. // with an error message indicating the password is too weak.
const response = await apiClient.registerUser(email, weakPassword, 'Weak Password User'); const response = await request.post('/api/auth/register').send({
expect(response.ok).toBe(false); email,
const errorData = (await response.json()) as { message: string; errors: { message: string }[] }; password: weakPassword,
full_name: 'Weak Password User',
});
expect(response.status).toBe(400);
const errorData = response.body as { message: string; errors: { message: string }[] };
// For validation errors, the detailed messages are in the `errors` array. // For validation errors, the detailed messages are in the `errors` array.
// We join them to check for the specific feedback from the password strength checker. // We join them to check for the specific feedback from the password strength checker.
const detailedErrorMessage = errorData.errors?.map((e) => e.message).join(' '); const detailedErrorMessage = errorData.errors?.map((e) => e.message).join(' ');
@@ -137,18 +133,22 @@ describe('User API Routes Integration Tests', () => {
const { token: deletionToken } = await createAndLoginUser({ email: deletionEmail }); const { token: deletionToken } = await createAndLoginUser({ email: deletionEmail });
// Act: Call the delete endpoint with the correct password and token. // Act: Call the delete endpoint with the correct password and token.
const response = await apiClient.deleteUserAccount(TEST_PASSWORD, { const response = await request
tokenOverride: deletionToken, .delete('/api/users/account')
}); .set('Authorization', `Bearer ${deletionToken}`)
const deleteResponse = await response.json(); .send({ password: TEST_PASSWORD });
const deleteResponse = response.body;
// Assert: Check for a successful deletion message. // Assert: Check for a successful deletion message.
expect(response.status).toBe(200);
expect(deleteResponse.message).toBe('Account deleted successfully.'); expect(deleteResponse.message).toBe('Account deleted successfully.');
// Assert (Verification): Attempting to log in again with the same credentials should now fail. // Assert (Verification): Attempting to log in again with the same credentials should now fail.
const loginResponse = await apiClient.loginUser(deletionEmail, TEST_PASSWORD, false); const loginResponse = await request
expect(loginResponse.ok).toBe(false); .post('/api/auth/login')
const errorData = await loginResponse.json(); .send({ email: deletionEmail, password: TEST_PASSWORD });
expect(loginResponse.status).toBe(401);
const errorData = loginResponse.body;
expect(errorData.message).toBe('Incorrect email or password.'); expect(errorData.message).toBe('Incorrect email or password.');
}); });
@@ -158,12 +158,14 @@ describe('User API Routes Integration Tests', () => {
const { user: resetUser } = await createAndLoginUser({ email: resetEmail }); const { user: resetUser } = await createAndLoginUser({ email: resetEmail });
// Act 1: Request a password reset. In our test environment, the token is returned in the response. // Act 1: Request a password reset. In our test environment, the token is returned in the response.
const resetRequestRawResponse = await apiClient.requestPasswordReset(resetEmail); const resetRequestRawResponse = await request
if (!resetRequestRawResponse.ok) { .post('/api/auth/forgot-password')
const errorData = await resetRequestRawResponse.json(); .send({ email: resetEmail });
if (resetRequestRawResponse.status !== 200) {
const errorData = resetRequestRawResponse.body;
throw new Error(errorData.message || 'Password reset request failed'); throw new Error(errorData.message || 'Password reset request failed');
} }
const resetRequestResponse = await resetRequestRawResponse.json(); const resetRequestResponse = resetRequestRawResponse.body;
const resetToken = resetRequestResponse.token; const resetToken = resetRequestResponse.token;
// Assert 1: Check that we received a token. // Assert 1: Check that we received a token.
@@ -172,19 +174,23 @@ describe('User API Routes Integration Tests', () => {
// Act 2: Use the token to set a new password. // Act 2: Use the token to set a new password.
const newPassword = 'my-new-secure-password-!@#$'; const newPassword = 'my-new-secure-password-!@#$';
const resetRawResponse = await apiClient.resetPassword(resetToken!, newPassword); const resetRawResponse = await request
if (!resetRawResponse.ok) { .post('/api/auth/reset-password')
const errorData = await resetRawResponse.json(); .send({ token: resetToken!, newPassword });
if (resetRawResponse.status !== 200) {
const errorData = resetRawResponse.body;
throw new Error(errorData.message || 'Password reset failed'); throw new Error(errorData.message || 'Password reset failed');
} }
const resetResponse = await resetRawResponse.json(); const resetResponse = resetRawResponse.body;
// Assert 2: Check for a successful password reset message. // Assert 2: Check for a successful password reset message.
expect(resetResponse.message).toBe('Password has been reset successfully.'); expect(resetResponse.message).toBe('Password has been reset successfully.');
// Act 3 & Assert 3 (Verification): Log in with the NEW password to confirm the change. // Act 3 & Assert 3 (Verification): Log in with the NEW password to confirm the change.
const loginResponse = await apiClient.loginUser(resetEmail, newPassword, false); const loginResponse = await request
const loginData = await loginResponse.json(); .post('/api/auth/login')
.send({ email: resetEmail, password: newPassword });
const loginData = loginResponse.body;
expect(loginData.userprofile).toBeDefined(); expect(loginData.userprofile).toBeDefined();
expect(loginData.userprofile.user.user_id).toBe(resetUser.user.user_id); expect(loginData.userprofile.user.user_id).toBe(resetUser.user.user_id);
}); });
@@ -192,20 +198,21 @@ describe('User API Routes Integration Tests', () => {
describe('User Data Routes (Watched Items & Shopping Lists)', () => { describe('User Data Routes (Watched Items & Shopping Lists)', () => {
it('should allow a user to add and remove a watched item', async () => { it('should allow a user to add and remove a watched item', async () => {
// Act 1: Add a new watched item. The API returns the created master item. // Act 1: Add a new watched item. The API returns the created master item.
const addResponse = await apiClient.addWatchedItem( const addResponse = await request
'Integration Test Item', .post('/api/users/watched-items')
'Other/Miscellaneous', .set('Authorization', `Bearer ${authToken}`)
authToken, .send({ itemName: 'Integration Test Item', category: 'Other/Miscellaneous' });
); const newItem = addResponse.body;
const newItem = await addResponse.json();
// Assert 1: Check that the item was created correctly. // Assert 1: Check that the item was created correctly.
expect(newItem).toBeDefined(); expect(addResponse.status).toBe(201);
expect(newItem.name).toBe('Integration Test Item'); expect(newItem.name).toBe('Integration Test Item');
// Act 2: Fetch all watched items for the user. // Act 2: Fetch all watched items for the user.
const watchedItemsResponse = await apiClient.fetchWatchedItems(authToken); const watchedItemsResponse = await request
const watchedItems = await watchedItemsResponse.json(); .get('/api/users/watched-items')
.set('Authorization', `Bearer ${authToken}`);
const watchedItems = watchedItemsResponse.body;
// Assert 2: Verify the new item is in the user's watched list. // Assert 2: Verify the new item is in the user's watched list.
expect( expect(
@@ -216,11 +223,16 @@ describe('User API Routes Integration Tests', () => {
).toBe(true); ).toBe(true);
// Act 3: Remove the watched item. // Act 3: Remove the watched item.
await apiClient.removeWatchedItem(newItem.master_grocery_item_id, authToken); const removeResponse = await request
.delete(`/api/users/watched-items/${newItem.master_grocery_item_id}`)
.set('Authorization', `Bearer ${authToken}`);
expect(removeResponse.status).toBe(204);
// Assert 3: Fetch again and verify the item is gone. // Assert 3: Fetch again and verify the item is gone.
const finalWatchedItemsResponse = await apiClient.fetchWatchedItems(authToken); const finalWatchedItemsResponse = await request
const finalWatchedItems = await finalWatchedItemsResponse.json(); .get('/api/users/watched-items')
.set('Authorization', `Bearer ${authToken}`);
const finalWatchedItems = finalWatchedItemsResponse.body;
expect( expect(
finalWatchedItems.some( finalWatchedItems.some(
(item: MasterGroceryItem) => (item: MasterGroceryItem) =>
@@ -231,31 +243,33 @@ describe('User API Routes Integration Tests', () => {
it('should allow a user to manage a shopping list', async () => { it('should allow a user to manage a shopping list', async () => {
// Act 1: Create a new shopping list. // Act 1: Create a new shopping list.
const createListResponse = await apiClient.createShoppingList( const createListResponse = await request
'My Integration Test List', .post('/api/users/shopping-lists')
authToken, .set('Authorization', `Bearer ${authToken}`)
); .send({ name: 'My Integration Test List' });
const newList = await createListResponse.json(); const newList = createListResponse.body;
// Assert 1: Check that the list was created. // Assert 1: Check that the list was created.
expect(newList).toBeDefined(); expect(createListResponse.status).toBe(201);
expect(newList.name).toBe('My Integration Test List'); expect(newList.name).toBe('My Integration Test List');
// Act 2: Add an item to the new list. // Act 2: Add an item to the new list.
const addItemResponse = await apiClient.addShoppingListItem( const addItemResponse = await request
newList.shopping_list_id, .post(`/api/users/shopping-lists/${newList.shopping_list_id}/items`)
{ customItemName: 'Custom Test Item' }, .set('Authorization', `Bearer ${authToken}`)
authToken, .send({ customItemName: 'Custom Test Item' });
); const addedItem = addItemResponse.body;
const addedItem = await addItemResponse.json();
// Assert 2: Check that the item was added. // Assert 2: Check that the item was added.
expect(addedItem).toBeDefined(); expect(addItemResponse.status).toBe(201);
expect(addedItem.custom_item_name).toBe('Custom Test Item'); expect(addedItem.custom_item_name).toBe('Custom Test Item');
// Assert 3: Fetch all lists and verify the new item is present in the correct list. // Assert 3: Fetch all lists and verify the new item is present in the correct list.
const fetchResponse = await apiClient.fetchShoppingLists(authToken); const fetchResponse = await request
const lists = await fetchResponse.json(); .get('/api/users/shopping-lists')
.set('Authorization', `Bearer ${authToken}`);
const lists = fetchResponse.body;
expect(fetchResponse.status).toBe(200);
const updatedList = lists.find( const updatedList = lists.find(
(l: ShoppingList) => l.shopping_list_id === newList.shopping_list_id, (l: ShoppingList) => l.shopping_list_id === newList.shopping_list_id,
); );

View File

@@ -1,42 +1,30 @@
// src/tests/integration/user.routes.integration.test.ts // src/tests/integration/user.routes.integration.test.ts
import { describe, it, expect, beforeAll, afterAll } from 'vitest'; import { describe, it, expect, beforeAll, afterAll } from 'vitest';
import supertest from 'supertest'; import supertest from 'supertest';
import app from '../../../server';
import { getPool } from '../../services/db/connection.db'; import { getPool } from '../../services/db/connection.db';
import type { UserProfile } from '../../types'; import type { UserProfile } from '../../types';
import { createAndLoginUser } from '../utils/testHelpers';
const API_URL = process.env.VITE_API_BASE_URL || 'http://localhost:3001/api'; /**
const request = supertest(API_URL.replace('/api', '')); // supertest needs the server's base URL * @vitest-environment node
*/
const request = supertest(app);
let authToken = ''; let authToken = '';
let createdListId: number; let createdListId: number;
let testUser: UserProfile; let testUser: UserProfile;
const testPassword = 'password-for-user-routes-test';
describe('User Routes Integration Tests (/api/users)', () => { describe('User Routes Integration Tests (/api/users)', () => {
// Authenticate once before all tests in this suite to get a JWT. // Authenticate once before all tests in this suite to get a JWT.
beforeAll(async () => { beforeAll(async () => {
// Create a new user for this test suite to avoid dependency on seeded data // Use the helper to create and log in a user in one step.
const testEmail = `user-routes-test-${Date.now()}@example.com`; const { user, token } = await createAndLoginUser({
fullName: 'User Routes Test User',
// 1. Register the user });
const registerResponse = await request testUser = user;
.post('/api/auth/register') authToken = token;
.send({ email: testEmail, password: testPassword, full_name: 'User Routes Test User' });
expect(registerResponse.status).toBe(201);
// 2. Log in as the new user
const loginResponse = await request
.post('/api/auth/login')
.send({ email: testEmail, password: testPassword });
if (loginResponse.status !== 200) {
console.error('Login failed in beforeAll hook:', loginResponse.body);
}
expect(loginResponse.status).toBe(200);
expect(loginResponse.body.token).toBeDefined();
authToken = loginResponse.body.token;
testUser = loginResponse.body.userprofile;
}); });
afterAll(async () => { afterAll(async () => {

View File

@@ -955,3 +955,9 @@ export interface AdminUserView {
full_name: string | null; full_name: string | null;
avatar_url: string | null; avatar_url: string | null;
} }
export interface PriceHistoryData {
master_item_id: number;
price_in_cents: number;
date: string; // ISO date string
}

20
src/utils/authUtils.ts Normal file
View File

@@ -0,0 +1,20 @@
// src/utils/authUtils.ts
import zxcvbn from 'zxcvbn';
/**
* Validates the strength of a password using zxcvbn.
* @param password The password to validate.
* @returns An object with `isValid` and a feedback message.
*/
export function validatePasswordStrength(password: string): {
isValid: boolean;
feedback: string;
} {
const result = zxcvbn(password);
// Score: 0-4. We require at least 3.
if (result.score < 3) {
const suggestions = result.feedback.suggestions.join(' ');
return { isValid: false, feedback: `Password is too weak. ${suggestions}` };
}
return { isValid: true, feedback: '' };
}

387
src/utils/zodUtils.test.ts Normal file
View File

@@ -0,0 +1,387 @@
// src/utils/zodUtils.test.ts
import { describe, it, expect } from 'vitest';
import {
requiredString,
numericIdParam,
uuidParamSchema,
optionalBoolean,
optionalNumeric,
optionalDate,
} from './zodUtils';
describe('Zod Utilities', () => {
describe('requiredString', () => {
const customMessage = 'This field is required and cannot be empty.';
const schema = requiredString(customMessage);
it('should pass for a valid non-empty string', () => {
const result = schema.safeParse('hello world');
expect(result.success).toBe(true);
if (result.success) {
expect(result.data).toBe('hello world');
}
});
it('should fail for an empty string with the custom message', () => {
const result = schema.safeParse('');
expect(result.success).toBe(false);
if (!result.success) {
expect(result.error.issues[0].message).toBe(customMessage);
}
});
it('should fail for a null value with the custom message', () => {
const result = schema.safeParse(null);
expect(result.success).toBe(false);
if (!result.success) {
expect(result.error.issues[0].message).toBe(customMessage);
}
});
it('should fail for an undefined value with the custom message', () => {
const result = schema.safeParse(undefined);
expect(result.success).toBe(false);
if (!result.success) {
expect(result.error.issues[0].message).toBe(customMessage);
}
});
it('should pass for a string containing only whitespace', () => {
const result = schema.safeParse(' ');
expect(result.success).toBe(true);
if (result.success) {
expect(result.data).toBe(' ');
}
});
it('should fail for a non-string value like a number with a Zod type error', () => {
const result = schema.safeParse(123);
expect(result.success).toBe(false);
if (!result.success) {
// z.string() will throw its own error message before min(1) is checked.
expect(result.error.issues[0].message).toBe('Invalid input: expected string, received number');
}
});
it('should fail for a non-string value like an object with a Zod type error', () => {
const result = schema.safeParse({ a: 1 });
expect(result.success).toBe(false);
if (!result.success) {
expect(result.error.issues[0].message).toBe('Invalid input: expected string, received object');
}
});
});
describe('numericIdParam', () => {
const schema = numericIdParam('id');
it('should pass for a valid numeric string in params', () => {
const result = schema.safeParse({ params: { id: '123' } });
expect(result.success).toBe(true);
if (result.success) {
expect(result.data.params.id).toBe(123);
}
});
it('should pass for a valid number in params', () => {
const result = schema.safeParse({ params: { id: 456 } });
expect(result.success).toBe(true);
if (result.success) {
expect(result.data.params.id).toBe(456);
}
});
it('should fail for a non-numeric string', () => {
const result = schema.safeParse({ params: { id: 'abc' } });
expect(result.success).toBe(false);
if (!result.success) {
expect(result.error.issues[0].message).toBe('Invalid input: expected number, received NaN');
}
});
it('should fail for a negative number', () => {
const result = schema.safeParse({ params: { id: -1 } });
expect(result.success).toBe(false);
if (!result.success) {
expect(result.error.issues[0].message).toBe("Invalid ID for parameter 'id'. Must be a number.");
}
});
it('should fail for a floating point number', () => {
const result = schema.safeParse({ params: { id: 1.5 } });
expect(result.success).toBe(false);
if (!result.success) {
expect(result.error.issues[0].message).toBe("Invalid ID for parameter 'id'. Must be a number.");
}
});
it('should fail for zero', () => {
const result = schema.safeParse({ params: { id: 0 } });
expect(result.success).toBe(false);
if (!result.success) {
expect(result.error.issues[0].message).toBe("Invalid ID for parameter 'id'. Must be a number.");
}
});
it('should use a custom error message if provided', () => {
const customMessage = 'A valid numeric ID is required.';
const customSchema = numericIdParam('id', customMessage);
const result = customSchema.safeParse({ params: { id: -5 } });
expect(result.success).toBe(false);
if (!result.success) {
expect(result.error.issues[0].message).toBe(customMessage);
}
});
});
describe('uuidParamSchema', () => {
const customMessage = 'A valid UUID is required for the user ID.';
const schema = uuidParamSchema('userId', customMessage);
it('should pass for a valid UUID string', () => {
const validUuid = '123e4567-e89b-12d3-a456-426614174000';
const result = schema.safeParse({ params: { userId: validUuid } });
expect(result.success).toBe(true);
});
it('should fail for an invalid UUID string', () => {
const invalidUuid = 'not-a-uuid';
const result = schema.safeParse({ params: { userId: invalidUuid } });
expect(result.success).toBe(false);
if (!result.success) {
expect(result.error.issues[0].message).toBe(customMessage);
}
});
it('should fail for a non-string value', () => {
const result = schema.safeParse({ params: { userId: 12345 } });
expect(result.success).toBe(false);
});
});
describe('optionalNumeric', () => {
it('should return the default value if input is undefined', () => {
const schema = optionalNumeric({ default: 10 });
const result = schema.safeParse(undefined);
expect(result.success).toBe(true);
if (result.success) {
expect(result.data).toBe(10);
}
});
it('should parse a valid numeric string', () => {
const schema = optionalNumeric();
const result = schema.safeParse('123.45');
expect(result.success).toBe(true);
if (result.success) {
expect(result.data).toBe(123.45);
}
});
it('should parse an empty string as 0', () => {
const schema = optionalNumeric();
const result = schema.safeParse('');
expect(result.success).toBe(true);
if (result.success) {
expect(result.data).toBe(0);
}
});
it('should parse a whitespace string as 0', () => {
const schema = optionalNumeric();
const result = schema.safeParse(' ');
expect(result.success).toBe(true);
if (result.success) {
expect(result.data).toBe(0);
}
});
it('should treat null as undefined, returning default value or undefined', () => {
const schemaWithDefault = optionalNumeric({ default: 99 });
const resultWithDefault = schemaWithDefault.safeParse(null);
expect(resultWithDefault.success).toBe(true);
if (resultWithDefault.success) {
expect(resultWithDefault.data).toBe(99);
}
const schemaWithoutDefault = optionalNumeric();
const resultWithoutDefault = schemaWithoutDefault.safeParse(null);
expect(resultWithoutDefault.success).toBe(true);
if (resultWithoutDefault.success) {
expect(resultWithoutDefault.data).toBeUndefined();
}
});
it('should fail for a non-numeric string', () => {
const schema = optionalNumeric();
const result = schema.safeParse('abc');
expect(result.success).toBe(false);
});
it('should enforce integer constraint', () => {
const schema = optionalNumeric({ integer: true });
expect(schema.safeParse('123').success).toBe(true);
const floatResult = schema.safeParse('123.45');
expect(floatResult.success).toBe(false);
if (!floatResult.success) {
expect(floatResult.error.issues[0].message).toBe('Invalid input: expected int, received number');
}
});
it('should enforce positive constraint', () => {
const schema = optionalNumeric({ positive: true });
expect(schema.safeParse('1').success).toBe(true);
const zeroResult = schema.safeParse('0');
expect(zeroResult.success).toBe(false);
if (!zeroResult.success) {
expect(zeroResult.error.issues[0].message).toBe('Too small: expected number to be >0');
}
});
it('should enforce non-negative constraint', () => {
const schema = optionalNumeric({ nonnegative: true });
expect(schema.safeParse('0').success).toBe(true);
const negativeResult = schema.safeParse('-1');
expect(negativeResult.success).toBe(false);
if (!negativeResult.success) {
expect(negativeResult.error.issues[0].message).toBe('Too small: expected number to be >=0');
}
});
it('should enforce min and max constraints', () => {
const schema = optionalNumeric({ min: 10, max: 20 });
expect(schema.safeParse('15').success).toBe(true);
const tooSmallResult = schema.safeParse('9');
expect(tooSmallResult.success).toBe(false);
if (!tooSmallResult.success) {
expect(tooSmallResult.error.issues[0].message).toBe('Too small: expected number to be >=10');
}
const tooLargeResult = schema.safeParse('21');
expect(tooLargeResult.success).toBe(false);
if (!tooLargeResult.success) {
expect(tooLargeResult.error.issues[0].message).toBe('Too big: expected number to be <=20');
}
});
});
describe('optionalDate', () => {
const schema = optionalDate('Invalid date format');
it('should pass for a valid YYYY-MM-DD date string', () => {
const result = schema.safeParse('2023-12-25');
expect(result.success).toBe(true);
if (result.success) {
expect(result.data).toBe('2023-12-25');
}
});
it('should pass for undefined (optional)', () => {
expect(schema.safeParse(undefined).success).toBe(true);
});
it('should fail for an invalid date string', () => {
expect(schema.safeParse('not-a-date').success).toBe(false);
});
});
describe('optionalBoolean', () => {
it('should return the default value if input is undefined', () => {
const schema = optionalBoolean({ default: true });
const result = schema.safeParse(undefined);
expect(result.success).toBe(true);
if (result.success) {
expect(result.data).toBe(true);
}
});
it('should return undefined if input is undefined and no default is set', () => {
const schema = optionalBoolean();
const result = schema.safeParse(undefined);
expect(result.success).toBe(true);
if (result.success) {
expect(result.data).toBeUndefined();
}
});
it('should parse "true" string as true', () => {
const schema = optionalBoolean();
const result = schema.safeParse('true');
expect(result.success).toBe(true);
if (result.success) {
expect(result.data).toBe(true);
}
});
it('should parse "false" string as false', () => {
const schema = optionalBoolean();
const result = schema.safeParse('false');
expect(result.success).toBe(true);
if (result.success) {
expect(result.data).toBe(false);
}
});
it('should parse "1" as true', () => {
const schema = optionalBoolean();
const result = schema.safeParse('1');
expect(result.success).toBe(true);
if (result.success) {
expect(result.data).toBe(true);
}
});
it('should parse "0" as false', () => {
const schema = optionalBoolean();
const result = schema.safeParse('0');
expect(result.success).toBe(true);
if (result.success) {
expect(result.data).toBe(false);
}
});
it('should fail for other strings', () => {
const schema = optionalBoolean();
const result = schema.safeParse('not-a-boolean');
expect(result.success).toBe(false);
});
it('should handle null input, returning default or undefined', () => {
const schemaWithDefault = optionalBoolean({ default: false });
const resultWithDefault = schemaWithDefault.safeParse(null);
expect(resultWithDefault.success).toBe(true);
if (resultWithDefault.success) {
expect(resultWithDefault.data).toBe(false);
}
const schemaWithoutDefault = optionalBoolean();
const resultWithoutDefault = schemaWithoutDefault.safeParse(null);
expect(resultWithoutDefault.success).toBe(true);
if (resultWithoutDefault.success) {
expect(resultWithoutDefault.data).toBeUndefined();
}
});
it('should handle empty string input, returning default or undefined', () => {
const schemaWithDefault = optionalBoolean({ default: true });
const resultWithDefault = schemaWithDefault.safeParse('');
expect(resultWithDefault.success).toBe(true);
if (resultWithDefault.success) {
expect(resultWithDefault.data).toBe(true);
}
const schemaWithoutDefault = optionalBoolean();
const resultWithoutDefault = schemaWithoutDefault.safeParse('');
expect(resultWithoutDefault.success).toBe(true);
if (resultWithoutDefault.success) {
expect(resultWithoutDefault.data).toBeUndefined();
}
});
it('should pass for an actual boolean value', () => {
const schema = optionalBoolean();
expect(schema.safeParse(true).success).toBe(true);
expect(schema.safeParse(false).success).toBe(true);
});
});
});

116
src/utils/zodUtils.ts Normal file
View File

@@ -0,0 +1,116 @@
// src/utils/zodUtils.ts
import { z } from 'zod';
/**
* A Zod schema for a required, non-empty string.
* @param message The error message to display if the string is empty or missing.
* @returns A Zod string schema.
*/
export const requiredString = (message: string) =>
z.preprocess(
// If the value is null or undefined, preprocess it to an empty string.
// This ensures that the subsequent `.min(1)` check will catch missing required fields.
(val) => val ?? '',
// Now, validate that the (potentially preprocessed) value is a string with at least 1 character.
z.string().min(1, message),
);
/**
* Creates a Zod schema for a numeric ID in request parameters.
* @param paramName The name of the parameter (e.g., 'id').
* @param message The error message for invalid input.
* @returns A Zod object schema for the params.
*/
export const numericIdParam = (
paramName: string,
message = `Invalid ID for parameter '${paramName}'. Must be a number.`,
) =>
z.object({
params: z.object({
[paramName]: z.coerce.number().int(message).positive(message),
}),
});
/**
* Creates a Zod schema for a UUID in request parameters.
* @param paramName The name of the parameter (e.g., 'id').
* @param message The error message for invalid input.
* @returns A Zod object schema for the params.
*/
export const uuidParamSchema = (paramName: string, message: string) =>
z.object({
params: z.object({
[paramName]: z.string().uuid(message),
}),
});
/**
* Creates a Zod schema for an optional, numeric query parameter that is coerced from a string.
* @param options Configuration for the validation like default value, min/max, and integer constraints.
* @returns A Zod schema for the number.
*/
export const optionalNumeric = (
options: {
default?: number;
min?: number;
max?: number;
integer?: boolean;
positive?: boolean;
nonnegative?: boolean;
} = {},
) => {
let numberSchema = z.coerce.number();
if (options.integer) numberSchema = numberSchema.int();
if (options.positive) numberSchema = numberSchema.positive();
else if (options.nonnegative) numberSchema = numberSchema.nonnegative();
if (options.min !== undefined) numberSchema = numberSchema.min(options.min);
if (options.max !== undefined) numberSchema = numberSchema.max(options.max);
// Make the number schema optional *before* preprocessing. This allows it to correctly handle
// the `undefined` value that our preprocessor generates from `null`.
const optionalNumberSchema = numberSchema.optional();
// This is crucial because z.coerce.number(null) results in 0, which bypasses
// the .optional() and .default() logic for null inputs. We want null to be
// treated as "not provided", just like undefined.
const schema = z.preprocess((val) => (val === null ? undefined : val), optionalNumberSchema);
if (options.default !== undefined) return schema.default(options.default);
return schema;
};
/**
* Creates a Zod schema for an optional date string in YYYY-MM-DD format.
* @param message Optional custom error message.
* @returns A Zod schema for the date string.
*/
export const optionalDate = (message?: string) => z.string().date(message).optional();
/**
* Creates a Zod schema for an optional boolean query parameter that is coerced from a string.
* Handles 'true', '1' as true and 'false', '0' as false.
* @param options Configuration for the validation like default value.
* @returns A Zod schema for the boolean.
*/
export const optionalBoolean = (
options: {
default?: boolean;
} = {},
) => {
const schema = z.preprocess((val) => {
if (val === 'true' || val === '1') return true;
if (val === 'false' || val === '0') return false;
if (val === '' || val === null) return undefined; // Treat empty string and null as not present
return val;
}, z.boolean().optional());
if (options.default !== undefined) {
return schema.default(options.default);
}
return schema;
};

26
vitest.config.e2e.ts Normal file
View File

@@ -0,0 +1,26 @@
import { defineConfig, mergeConfig } from 'vitest/config';
import integrationConfig from './vitest.config.integration';
const e2eConfig = mergeConfig(
integrationConfig,
defineConfig({
test: {
name: 'e2e',
// Point specifically to E2E tests
include: ['src/tests/e2e/**/*.e2e.test.ts'],
// Increase timeout for E2E flows that involve AI or full API chains
testTimeout: 120000,
coverage: {
reportsDirectory: '.coverage/e2e',
},
},
}),
);
// Explicitly override the include array to ensure we don't inherit integration tests
// (mergeConfig might concatenate arrays by default)
if (e2eConfig.test) {
e2eConfig.test.include = ['src/tests/e2e/**/*.e2e.test.ts'];
}
export default e2eConfig;