Compare commits

..

25 Commits

Author SHA1 Message Date
Gitea Actions
dbe8e72efe ci: Bump version to 0.1.5 [skip ci] 2025-12-25 06:13:16 +05:00
38bd193042 not sure why those errors got removed we'll see
Some checks failed
Deploy to Test Environment / deploy-to-test (push) Has been cancelled
2025-12-24 17:12:20 -08:00
Gitea Actions
57215e2778 ci: Bump version to 0.1.4 [skip ci] 2025-12-25 06:04:17 +05:00
2c1de24e9a undo stupid logging change
Some checks failed
Deploy to Test Environment / deploy-to-test (push) Failing after 1m21s
2025-12-24 16:54:56 -08:00
c8baff7aac Merge branch 'main' of https://gitea.projectium.com/torbo/flyer-crawler.projectium.com 2025-12-24 16:17:45 -08:00
de3f21a7ec not sure why those errors got removed we'll see 2025-12-24 16:16:42 -08:00
Gitea Actions
c6adbf79e7 ci: Bump version to 0.1.3 [skip ci] 2025-12-25 02:26:17 +05:00
7399a27600 add ai agent fallbacks
Some checks failed
Deploy to Test Environment / deploy-to-test (push) Failing after 3h14m13s
2025-12-24 13:25:18 -08:00
Gitea Actions
68aadcaa4e ci: Bump version to 0.1.2 [skip ci] 2025-12-25 01:41:06 +05:00
971d2c3fa7 add ai agent fallbacks
Some checks failed
Deploy to Test Environment / deploy-to-test (push) Failing after 1m18s
2025-12-24 12:39:15 -08:00
Gitea Actions
daaacfde5e ci: Bump version to 0.1.1 [skip ci] 2025-12-24 23:53:27 +05:00
7ac8fe1d29 Merge branch 'main' of https://gitea.projectium.com/torbo/flyer-crawler.projectium.com
Some checks are pending
Deploy to Test Environment / deploy-to-test (push) Has started running
2025-12-24 10:52:13 -08:00
a2462dfb6b testing push to prod etc 2025-12-24 10:51:43 -08:00
Gitea Actions
a911224fb4 ci: Bump version to 0.1.0 for production release [skip ci] 2025-12-24 23:24:53 +05:00
Gitea Actions
bf4bcef890 ci: Bump version to 0.0.30 [skip ci] 2025-12-24 22:59:36 +05:00
ac6cd2e0a1 Merge branch 'main' of https://gitea.projectium.com/torbo/flyer-crawler.projectium.com
All checks were successful
Deploy to Test Environment / deploy-to-test (push) Successful in 16m10s
2025-12-24 09:58:45 -08:00
eea03880c1 exclude some dirs from coverage 2025-12-24 09:58:37 -08:00
Gitea Actions
7fc263691f ci: Bump version to 0.0.29 [skip ci] 2025-12-24 22:41:17 +05:00
c0912d36d5 Merge branch 'main' of https://gitea.projectium.com/torbo/flyer-crawler.projectium.com
All checks were successful
Deploy to Test Environment / deploy-to-test (push) Successful in 15m59s
2025-12-24 09:39:56 -08:00
612c2b5943 deploy to test fix 2025-12-24 09:39:49 -08:00
Gitea Actions
8e787ddcf0 ci: Bump version to 0.0.28 [skip ci] 2025-12-24 22:18:18 +05:00
11c52d284c fixing unit tests
All checks were successful
Deploy to Test Environment / deploy-to-test (push) Successful in 15m13s
2025-12-24 09:17:09 -08:00
Gitea Actions
b528bd3651 ci: Bump version to 0.0.27 [skip ci] 2025-12-24 22:06:03 +05:00
4c5ceb1bd6 Merge branch 'main' of https://gitea.projectium.com/torbo/flyer-crawler.projectium.com
Some checks are pending
Deploy to Test Environment / deploy-to-test (push) Has started running
2025-12-24 09:05:12 -08:00
bcc4ad64dc fixing unit tests 2025-12-24 09:04:10 -08:00
32 changed files with 870 additions and 326 deletions

View File

@@ -93,8 +93,9 @@ jobs:
exit 1 exit 1
fi fi
GITEA_SERVER_URL="https://gitea.projectium.com" GITEA_SERVER_URL="https://gitea.projectium.com"
COMMIT_MESSAGE=$(git log -1 --pretty=%s) COMMIT_MESSAGE=$(git log -1 --grep="\[skip ci\]" --invert-grep --pretty=%s)
VITE_APP_VERSION="$(date +'%Y%m%d-%H%M'):$(git rev-parse --short HEAD)" \ PACKAGE_VERSION=$(node -p "require('./package.json').version")
VITE_APP_VERSION="$(date +'%Y%m%d-%H%M'):$(git rev-parse --short HEAD):$PACKAGE_VERSION" \
VITE_APP_COMMIT_URL="$GITEA_SERVER_URL/${{ gitea.repository }}/commit/${{ gitea.sha }}" \ VITE_APP_COMMIT_URL="$GITEA_SERVER_URL/${{ gitea.repository }}/commit/${{ gitea.sha }}" \
VITE_APP_COMMIT_MESSAGE="$COMMIT_MESSAGE" \ VITE_APP_COMMIT_MESSAGE="$COMMIT_MESSAGE" \
VITE_API_BASE_URL=/api VITE_API_KEY=${{ secrets.VITE_GOOGLE_GENAI_API_KEY }} npm run build VITE_API_BASE_URL=/api VITE_API_KEY=${{ secrets.VITE_GOOGLE_GENAI_API_KEY }} npm run build
@@ -162,7 +163,12 @@ jobs:
echo "Updating schema hash in production database..." echo "Updating schema hash in production database..."
CURRENT_HASH=$(cat sql/master_schema_rollup.sql | dos2unix | sha256sum | awk '{ print $1 }') CURRENT_HASH=$(cat sql/master_schema_rollup.sql | dos2unix | sha256sum | awk '{ print $1 }')
PGPASSWORD="$DB_PASSWORD" psql -v ON_ERROR_STOP=1 -h "$DB_HOST" -p 5432 -U "$DB_USER" -d "$DB_NAME" -c \ PGPASSWORD="$DB_PASSWORD" psql -v ON_ERROR_STOP=1 -h "$DB_HOST" -p 5432 -U "$DB_USER" -d "$DB_NAME" -c \
"INSERT INTO public.schema_info (environment, schema_hash, deployed_at) VALUES ('production', '$CURRENT_HASH', NOW()) "CREATE TABLE IF NOT EXISTS public.schema_info (
environment VARCHAR(50) PRIMARY KEY,
schema_hash VARCHAR(64) NOT NULL,
deployed_at TIMESTAMP DEFAULT NOW()
);
INSERT INTO public.schema_info (environment, schema_hash, deployed_at) VALUES ('production', '$CURRENT_HASH', NOW())
ON CONFLICT (environment) DO UPDATE SET schema_hash = EXCLUDED.schema_hash, deployed_at = NOW();" ON CONFLICT (environment) DO UPDATE SET schema_hash = EXCLUDED.schema_hash, deployed_at = NOW();"
UPDATED_HASH=$(PGPASSWORD="$DB_PASSWORD" psql -v ON_ERROR_STOP=1 -h "$DB_HOST" -p 5432 -U "$DB_USER" -d "$DB_NAME" -c "SELECT schema_hash FROM public.schema_info WHERE environment = 'production';" -t -A) UPDATED_HASH=$(PGPASSWORD="$DB_PASSWORD" psql -v ON_ERROR_STOP=1 -h "$DB_HOST" -p 5432 -U "$DB_USER" -d "$DB_NAME" -c "SELECT schema_hash FROM public.schema_info WHERE environment = 'production';" -t -A)

View File

@@ -119,6 +119,11 @@ jobs:
# --- JWT Secret for Passport authentication in tests --- # --- JWT Secret for Passport authentication in tests ---
JWT_SECRET: ${{ secrets.JWT_SECRET }} JWT_SECRET: ${{ secrets.JWT_SECRET }}
# --- V8 Coverage for Server Process ---
# This variable tells the Node.js process (our server, started by globalSetup)
# where to output its raw V8 coverage data.
NODE_V8_COVERAGE: '.coverage/tmp/integration-server'
# --- Increase Node.js memory limit to prevent heap out of memory errors --- # --- Increase Node.js memory limit to prevent heap out of memory errors ---
# This is crucial for memory-intensive tasks like running tests and coverage. # This is crucial for memory-intensive tasks like running tests and coverage.
NODE_OPTIONS: '--max-old-space-size=8192' NODE_OPTIONS: '--max-old-space-size=8192'
@@ -137,15 +142,15 @@ jobs:
# The `|| true` ensures the workflow continues even if tests fail, allowing coverage to run. # The `|| true` ensures the workflow continues even if tests fail, allowing coverage to run.
echo "--- Running Unit Tests ---" echo "--- Running Unit Tests ---"
# npm run test:unit -- --coverage --reporter=verbose --includeTaskLocation --testTimeout=10000 --silent=passed-only || true # npm run test:unit -- --coverage --reporter=verbose --includeTaskLocation --testTimeout=10000 --silent=passed-only || true
npm run test:unit -- --coverage --reporter=verbose --includeTaskLocation --testTimeout=10000 --silent=passed-only --no-file-parallelism || true npm run test:unit -- --coverage --coverage.exclude='**/*.test.ts' --coverage.exclude='**/tests/**' --coverage.exclude='**/mocks/**' --reporter=verbose --includeTaskLocation --testTimeout=10000 --silent=passed-only --no-file-parallelism || true
echo "--- Running Integration Tests ---" echo "--- Running Integration Tests ---"
npm run test:integration -- --coverage --reporter=verbose --includeTaskLocation --testTimeout=10000 --silent=passed-only || true npm run test:integration -- --coverage --coverage.exclude='**/*.test.ts' --coverage.exclude='**/tests/**' --coverage.exclude='**/mocks/**' --reporter=verbose --includeTaskLocation --testTimeout=10000 --silent=passed-only || true
echo "--- Running E2E Tests ---" echo "--- Running E2E Tests ---"
# Run E2E tests using the dedicated E2E config which inherits from integration config. # Run E2E tests using the dedicated E2E config which inherits from integration config.
# We still pass --coverage to enable it, but directory and timeout are now in the config. # We still pass --coverage to enable it, but directory and timeout are now in the config.
npx vitest run --config vitest.config.e2e.ts --coverage --reporter=verbose --no-file-parallelism || true npx vitest run --config vitest.config.e2e.ts --coverage --coverage.exclude='**/*.test.ts' --coverage.exclude='**/tests/**' --coverage.exclude='**/mocks/**' --reporter=verbose --no-file-parallelism || true
# Re-enable secret masking for subsequent steps. # Re-enable secret masking for subsequent steps.
echo "::secret-masking::" echo "::secret-masking::"
@@ -174,7 +179,7 @@ jobs:
# Run c8: read raw files from the temp dir, and output an Istanbul JSON report. # Run c8: read raw files from the temp dir, and output an Istanbul JSON report.
# We only generate the 'json' report here because it's all nyc needs for merging. # We only generate the 'json' report here because it's all nyc needs for merging.
echo "Server coverage report about to be generated..." echo "Server coverage report about to be generated..."
npx c8 report --reporter=json --temp-directory .coverage/tmp/integration-server --reports-dir .coverage/integration-server npx c8 report --exclude='**/*.test.ts' --exclude='**/tests/**' --exclude='**/mocks/**' --reporter=json --temp-directory .coverage/tmp/integration-server --reports-dir .coverage/integration-server
echo "Server coverage report generated. Verifying existence:" echo "Server coverage report generated. Verifying existence:"
ls -l .coverage/integration-server/coverage-final.json ls -l .coverage/integration-server/coverage-final.json
@@ -213,7 +218,10 @@ jobs:
--reporter=text \ --reporter=text \
--reporter=html \ --reporter=html \
--report-dir .coverage/ \ --report-dir .coverage/ \
--temp-dir "$NYC_SOURCE_DIR" --temp-dir "$NYC_SOURCE_DIR" \
--exclude "**/*.test.ts" \
--exclude "**/tests/**" \
--exclude "**/mocks/**"
# Re-enable secret masking for subsequent steps. # Re-enable secret masking for subsequent steps.
echo "::secret-masking::" echo "::secret-masking::"
@@ -274,6 +282,9 @@ jobs:
if [ -z "$DEPLOYED_HASH" ]; then if [ -z "$DEPLOYED_HASH" ]; then
echo "WARNING: No schema hash found in the test database." echo "WARNING: No schema hash found in the test database."
echo "This is expected for a first-time deployment. The hash will be set after a successful deployment." echo "This is expected for a first-time deployment. The hash will be set after a successful deployment."
echo "--- Debug: Dumping schema_info table ---"
PGPASSWORD="$DB_PASSWORD" psql -v ON_ERROR_STOP=0 -h "$DB_HOST" -p 5432 -U "$DB_USER" -d "$DB_NAME" -c "SELECT * FROM public.schema_info;" || true
echo "----------------------------------------"
# We allow the deployment to continue, but a manual schema update is required. # We allow the deployment to continue, but a manual schema update is required.
# You could choose to fail here by adding `exit 1`. # You could choose to fail here by adding `exit 1`.
elif [ "$CURRENT_HASH" != "$DEPLOYED_HASH" ]; then elif [ "$CURRENT_HASH" != "$DEPLOYED_HASH" ]; then
@@ -297,8 +308,9 @@ jobs:
fi fi
GITEA_SERVER_URL="https://gitea.projectium.com" # Your Gitea instance URL GITEA_SERVER_URL="https://gitea.projectium.com" # Your Gitea instance URL
COMMIT_MESSAGE=$(git log -1 --pretty=%s) COMMIT_MESSAGE=$(git log -1 --grep="\[skip ci\]" --invert-grep --pretty=%s)
VITE_APP_VERSION="$(date +'%Y%m%d-%H%M'):$(git rev-parse --short HEAD)" \ PACKAGE_VERSION=$(node -p "require('./package.json').version")
VITE_APP_VERSION="$(date +'%Y%m%d-%H%M'):$(git rev-parse --short HEAD):$PACKAGE_VERSION" \
VITE_APP_COMMIT_URL="$GITEA_SERVER_URL/${{ gitea.repository }}/commit/${{ gitea.sha }}" \ VITE_APP_COMMIT_URL="$GITEA_SERVER_URL/${{ gitea.repository }}/commit/${{ gitea.sha }}" \
VITE_APP_COMMIT_MESSAGE="$COMMIT_MESSAGE" \ VITE_APP_COMMIT_MESSAGE="$COMMIT_MESSAGE" \
VITE_API_BASE_URL="https://flyer-crawler-test.projectium.com/api" VITE_API_KEY=${{ secrets.VITE_GOOGLE_GENAI_API_KEY_TEST }} npm run build VITE_API_BASE_URL="https://flyer-crawler-test.projectium.com/api" VITE_API_KEY=${{ secrets.VITE_GOOGLE_GENAI_API_KEY_TEST }} npm run build
@@ -360,7 +372,7 @@ jobs:
echo "Installing production dependencies and restarting test server..." echo "Installing production dependencies and restarting test server..."
cd /var/www/flyer-crawler-test.projectium.com cd /var/www/flyer-crawler-test.projectium.com
npm install --omit=dev # Install only production dependencies npm install --omit=dev
# Use `startOrReload` with the ecosystem file. This is the standard, idempotent way to deploy. # Use `startOrReload` with the ecosystem file. This is the standard, idempotent way to deploy.
# It will START the process if it's not running, or RELOAD it if it is. # It will START the process if it's not running, or RELOAD it if it is.
# We also add `&& pm2 save` to persist the process list across server reboots. # We also add `&& pm2 save` to persist the process list across server reboots.
@@ -372,7 +384,12 @@ jobs:
echo "Updating schema hash in test database..." echo "Updating schema hash in test database..."
CURRENT_HASH=$(cat sql/master_schema_rollup.sql | dos2unix | sha256sum | awk '{ print $1 }') CURRENT_HASH=$(cat sql/master_schema_rollup.sql | dos2unix | sha256sum | awk '{ print $1 }')
PGPASSWORD="$DB_PASSWORD" psql -v ON_ERROR_STOP=1 -h "$DB_HOST" -p 5432 -U "$DB_USER" -d "$DB_NAME" -c \ PGPASSWORD="$DB_PASSWORD" psql -v ON_ERROR_STOP=1 -h "$DB_HOST" -p 5432 -U "$DB_USER" -d "$DB_NAME" -c \
"INSERT INTO public.schema_info (environment, schema_hash, deployed_at) VALUES ('test', '$CURRENT_HASH', NOW()) "CREATE TABLE IF NOT EXISTS public.schema_info (
environment VARCHAR(50) PRIMARY KEY,
schema_hash VARCHAR(64) NOT NULL,
deployed_at TIMESTAMP DEFAULT NOW()
);
INSERT INTO public.schema_info (environment, schema_hash, deployed_at) VALUES ('test', '$CURRENT_HASH', NOW())
ON CONFLICT (environment) DO UPDATE SET schema_hash = EXCLUDED.schema_hash, deployed_at = NOW();" ON CONFLICT (environment) DO UPDATE SET schema_hash = EXCLUDED.schema_hash, deployed_at = NOW();"
# Verify the hash was updated # Verify the hash was updated

View File

@@ -92,8 +92,9 @@ jobs:
exit 1 exit 1
fi fi
GITEA_SERVER_URL="https://gitea.projectium.com" GITEA_SERVER_URL="https://gitea.projectium.com"
COMMIT_MESSAGE=$(git log -1 --pretty=%s) COMMIT_MESSAGE=$(git log -1 --grep="\[skip ci\]" --invert-grep --pretty=%s)
VITE_APP_VERSION="$(date +'%Y%m%d-%H%M'):$(git rev-parse --short HEAD)" \ PACKAGE_VERSION=$(node -p "require('./package.json').version")
VITE_APP_VERSION="$(date +'%Y%m%d-%H%M'):$(git rev-parse --short HEAD):$PACKAGE_VERSION" \
VITE_APP_COMMIT_URL="$GITEA_SERVER_URL/${{ gitea.repository }}/commit/${{ gitea.sha }}" \ VITE_APP_COMMIT_URL="$GITEA_SERVER_URL/${{ gitea.repository }}/commit/${{ gitea.sha }}" \
VITE_APP_COMMIT_MESSAGE="$COMMIT_MESSAGE" \ VITE_APP_COMMIT_MESSAGE="$COMMIT_MESSAGE" \
VITE_API_BASE_URL=/api VITE_API_KEY=${{ secrets.VITE_GOOGLE_GENAI_API_KEY }} npm run build VITE_API_BASE_URL=/api VITE_API_KEY=${{ secrets.VITE_GOOGLE_GENAI_API_KEY }} npm run build

View File

@@ -18,12 +18,70 @@ module.exports = {
NODE_ENV: 'production', // Set the Node.js environment to production NODE_ENV: 'production', // Set the Node.js environment to production
name: 'flyer-crawler-api', name: 'flyer-crawler-api',
cwd: '/var/www/flyer-crawler.projectium.com', cwd: '/var/www/flyer-crawler.projectium.com',
// Inherit secrets from the deployment environment
DB_HOST: process.env.DB_HOST,
DB_USER: process.env.DB_USER,
DB_PASSWORD: process.env.DB_PASSWORD,
DB_NAME: process.env.DB_NAME,
REDIS_URL: process.env.REDIS_URL,
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
FRONTEND_URL: process.env.FRONTEND_URL,
JWT_SECRET: process.env.JWT_SECRET,
GEMINI_API_KEY: process.env.GEMINI_API_KEY,
GOOGLE_MAPS_API_KEY: process.env.GOOGLE_MAPS_API_KEY,
SMTP_HOST: process.env.SMTP_HOST,
SMTP_PORT: process.env.SMTP_PORT,
SMTP_SECURE: process.env.SMTP_SECURE,
SMTP_USER: process.env.SMTP_USER,
SMTP_PASS: process.env.SMTP_PASS,
SMTP_FROM_EMAIL: process.env.SMTP_FROM_EMAIL,
}, },
// Test Environment Settings // Test Environment Settings
env_test: { env_test: {
NODE_ENV: 'development', // Use 'development' for test to enable more verbose logging if needed NODE_ENV: 'test', // Set to 'test' to match the environment purpose and disable pino-pretty
name: 'flyer-crawler-api-test', name: 'flyer-crawler-api-test',
cwd: '/var/www/flyer-crawler-test.projectium.com', cwd: '/var/www/flyer-crawler-test.projectium.com',
// Inherit secrets from the deployment environment
DB_HOST: process.env.DB_HOST,
DB_USER: process.env.DB_USER,
DB_PASSWORD: process.env.DB_PASSWORD,
DB_NAME: process.env.DB_NAME,
REDIS_URL: process.env.REDIS_URL,
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
FRONTEND_URL: process.env.FRONTEND_URL,
JWT_SECRET: process.env.JWT_SECRET,
GEMINI_API_KEY: process.env.GEMINI_API_KEY,
GOOGLE_MAPS_API_KEY: process.env.GOOGLE_MAPS_API_KEY,
SMTP_HOST: process.env.SMTP_HOST,
SMTP_PORT: process.env.SMTP_PORT,
SMTP_SECURE: process.env.SMTP_SECURE,
SMTP_USER: process.env.SMTP_USER,
SMTP_PASS: process.env.SMTP_PASS,
SMTP_FROM_EMAIL: process.env.SMTP_FROM_EMAIL,
},
// Development Environment Settings
env_development: {
NODE_ENV: 'development',
name: 'flyer-crawler-api-dev',
watch: true,
ignore_watch: ['node_modules', 'logs', '*.log', 'flyer-images', '.git'],
// Inherit secrets from the deployment environment
DB_HOST: process.env.DB_HOST,
DB_USER: process.env.DB_USER,
DB_PASSWORD: process.env.DB_PASSWORD,
DB_NAME: process.env.DB_NAME,
REDIS_URL: process.env.REDIS_URL,
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
FRONTEND_URL: process.env.FRONTEND_URL,
JWT_SECRET: process.env.JWT_SECRET,
GEMINI_API_KEY: process.env.GEMINI_API_KEY,
GOOGLE_MAPS_API_KEY: process.env.GOOGLE_MAPS_API_KEY,
SMTP_HOST: process.env.SMTP_HOST,
SMTP_PORT: process.env.SMTP_PORT,
SMTP_SECURE: process.env.SMTP_SECURE,
SMTP_USER: process.env.SMTP_USER,
SMTP_PASS: process.env.SMTP_PASS,
SMTP_FROM_EMAIL: process.env.SMTP_FROM_EMAIL,
}, },
}, },
{ {
@@ -36,12 +94,70 @@ module.exports = {
NODE_ENV: 'production', NODE_ENV: 'production',
name: 'flyer-crawler-worker', name: 'flyer-crawler-worker',
cwd: '/var/www/flyer-crawler.projectium.com', cwd: '/var/www/flyer-crawler.projectium.com',
// Inherit secrets from the deployment environment
DB_HOST: process.env.DB_HOST,
DB_USER: process.env.DB_USER,
DB_PASSWORD: process.env.DB_PASSWORD,
DB_NAME: process.env.DB_NAME,
REDIS_URL: process.env.REDIS_URL,
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
FRONTEND_URL: process.env.FRONTEND_URL,
JWT_SECRET: process.env.JWT_SECRET,
GEMINI_API_KEY: process.env.GEMINI_API_KEY,
GOOGLE_MAPS_API_KEY: process.env.GOOGLE_MAPS_API_KEY,
SMTP_HOST: process.env.SMTP_HOST,
SMTP_PORT: process.env.SMTP_PORT,
SMTP_SECURE: process.env.SMTP_SECURE,
SMTP_USER: process.env.SMTP_USER,
SMTP_PASS: process.env.SMTP_PASS,
SMTP_FROM_EMAIL: process.env.SMTP_FROM_EMAIL,
}, },
// Test Environment Settings // Test Environment Settings
env_test: { env_test: {
NODE_ENV: 'development', NODE_ENV: 'test',
name: 'flyer-crawler-worker-test', name: 'flyer-crawler-worker-test',
cwd: '/var/www/flyer-crawler-test.projectium.com', cwd: '/var/www/flyer-crawler-test.projectium.com',
// Inherit secrets from the deployment environment
DB_HOST: process.env.DB_HOST,
DB_USER: process.env.DB_USER,
DB_PASSWORD: process.env.DB_PASSWORD,
DB_NAME: process.env.DB_NAME,
REDIS_URL: process.env.REDIS_URL,
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
FRONTEND_URL: process.env.FRONTEND_URL,
JWT_SECRET: process.env.JWT_SECRET,
GEMINI_API_KEY: process.env.GEMINI_API_KEY,
GOOGLE_MAPS_API_KEY: process.env.GOOGLE_MAPS_API_KEY,
SMTP_HOST: process.env.SMTP_HOST,
SMTP_PORT: process.env.SMTP_PORT,
SMTP_SECURE: process.env.SMTP_SECURE,
SMTP_USER: process.env.SMTP_USER,
SMTP_PASS: process.env.SMTP_PASS,
SMTP_FROM_EMAIL: process.env.SMTP_FROM_EMAIL,
},
// Development Environment Settings
env_development: {
NODE_ENV: 'development',
name: 'flyer-crawler-worker-dev',
watch: true,
ignore_watch: ['node_modules', 'logs', '*.log', 'flyer-images', '.git'],
// Inherit secrets from the deployment environment
DB_HOST: process.env.DB_HOST,
DB_USER: process.env.DB_USER,
DB_PASSWORD: process.env.DB_PASSWORD,
DB_NAME: process.env.DB_NAME,
REDIS_URL: process.env.REDIS_URL,
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
FRONTEND_URL: process.env.FRONTEND_URL,
JWT_SECRET: process.env.JWT_SECRET,
GEMINI_API_KEY: process.env.GEMINI_API_KEY,
GOOGLE_MAPS_API_KEY: process.env.GOOGLE_MAPS_API_KEY,
SMTP_HOST: process.env.SMTP_HOST,
SMTP_PORT: process.env.SMTP_PORT,
SMTP_SECURE: process.env.SMTP_SECURE,
SMTP_USER: process.env.SMTP_USER,
SMTP_PASS: process.env.SMTP_PASS,
SMTP_FROM_EMAIL: process.env.SMTP_FROM_EMAIL,
}, },
}, },
{ {
@@ -54,12 +170,70 @@ module.exports = {
NODE_ENV: 'production', NODE_ENV: 'production',
name: 'flyer-crawler-analytics-worker', name: 'flyer-crawler-analytics-worker',
cwd: '/var/www/flyer-crawler.projectium.com', cwd: '/var/www/flyer-crawler.projectium.com',
// Inherit secrets from the deployment environment
DB_HOST: process.env.DB_HOST,
DB_USER: process.env.DB_USER,
DB_PASSWORD: process.env.DB_PASSWORD,
DB_NAME: process.env.DB_NAME,
REDIS_URL: process.env.REDIS_URL,
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
FRONTEND_URL: process.env.FRONTEND_URL,
JWT_SECRET: process.env.JWT_SECRET,
GEMINI_API_KEY: process.env.GEMINI_API_KEY,
GOOGLE_MAPS_API_KEY: process.env.GOOGLE_MAPS_API_KEY,
SMTP_HOST: process.env.SMTP_HOST,
SMTP_PORT: process.env.SMTP_PORT,
SMTP_SECURE: process.env.SMTP_SECURE,
SMTP_USER: process.env.SMTP_USER,
SMTP_PASS: process.env.SMTP_PASS,
SMTP_FROM_EMAIL: process.env.SMTP_FROM_EMAIL,
}, },
// Test Environment Settings // Test Environment Settings
env_test: { env_test: {
NODE_ENV: 'development', NODE_ENV: 'test',
name: 'flyer-crawler-analytics-worker-test', name: 'flyer-crawler-analytics-worker-test',
cwd: '/var/www/flyer-crawler-test.projectium.com', cwd: '/var/www/flyer-crawler-test.projectium.com',
// Inherit secrets from the deployment environment
DB_HOST: process.env.DB_HOST,
DB_USER: process.env.DB_USER,
DB_PASSWORD: process.env.DB_PASSWORD,
DB_NAME: process.env.DB_NAME,
REDIS_URL: process.env.REDIS_URL,
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
FRONTEND_URL: process.env.FRONTEND_URL,
JWT_SECRET: process.env.JWT_SECRET,
GEMINI_API_KEY: process.env.GEMINI_API_KEY,
GOOGLE_MAPS_API_KEY: process.env.GOOGLE_MAPS_API_KEY,
SMTP_HOST: process.env.SMTP_HOST,
SMTP_PORT: process.env.SMTP_PORT,
SMTP_SECURE: process.env.SMTP_SECURE,
SMTP_USER: process.env.SMTP_USER,
SMTP_PASS: process.env.SMTP_PASS,
SMTP_FROM_EMAIL: process.env.SMTP_FROM_EMAIL,
},
// Development Environment Settings
env_development: {
NODE_ENV: 'development',
name: 'flyer-crawler-analytics-worker-dev',
watch: true,
ignore_watch: ['node_modules', 'logs', '*.log', 'flyer-images', '.git'],
// Inherit secrets from the deployment environment
DB_HOST: process.env.DB_HOST,
DB_USER: process.env.DB_USER,
DB_PASSWORD: process.env.DB_PASSWORD,
DB_NAME: process.env.DB_NAME,
REDIS_URL: process.env.REDIS_URL,
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
FRONTEND_URL: process.env.FRONTEND_URL,
JWT_SECRET: process.env.JWT_SECRET,
GEMINI_API_KEY: process.env.GEMINI_API_KEY,
GOOGLE_MAPS_API_KEY: process.env.GOOGLE_MAPS_API_KEY,
SMTP_HOST: process.env.SMTP_HOST,
SMTP_PORT: process.env.SMTP_PORT,
SMTP_SECURE: process.env.SMTP_SECURE,
SMTP_USER: process.env.SMTP_USER,
SMTP_PASS: process.env.SMTP_PASS,
SMTP_FROM_EMAIL: process.env.SMTP_FROM_EMAIL,
}, },
}, },
], ],

4
package-lock.json generated
View File

@@ -1,12 +1,12 @@
{ {
"name": "flyer-crawler", "name": "flyer-crawler",
"version": "0.0.26", "version": "0.1.5",
"lockfileVersion": 3, "lockfileVersion": 3,
"requires": true, "requires": true,
"packages": { "packages": {
"": { "": {
"name": "flyer-crawler", "name": "flyer-crawler",
"version": "0.0.26", "version": "0.1.5",
"dependencies": { "dependencies": {
"@bull-board/api": "^6.14.2", "@bull-board/api": "^6.14.2",
"@bull-board/express": "^6.14.2", "@bull-board/express": "^6.14.2",

View File

@@ -1,7 +1,7 @@
{ {
"name": "flyer-crawler", "name": "flyer-crawler",
"private": true, "private": true,
"version": "0.0.26", "version": "0.1.5",
"type": "module", "type": "module",
"scripts": { "scripts": {
"dev": "concurrently \"npm:start:dev\" \"vite\"", "dev": "concurrently \"npm:start:dev\" \"vite\"",

View File

@@ -36,7 +36,7 @@ vi.mock('pdfjs-dist', () => ({
// Mock the new config module // Mock the new config module
vi.mock('./config', () => ({ vi.mock('./config', () => ({
default: { default: {
app: { version: '1.0.0', commitMessage: 'Initial commit', commitUrl: '#' }, app: { version: '20250101-1200:abc1234:1.0.0', commitMessage: 'Initial commit', commitUrl: '#' },
google: { mapsEmbedApiKey: 'mock-key' }, google: { mapsEmbedApiKey: 'mock-key' },
}, },
})); }));
@@ -588,11 +588,11 @@ describe('App Component', () => {
// Mock the config module for this specific test // Mock the config module for this specific test
vi.mock('./config', () => ({ vi.mock('./config', () => ({
default: { default: {
app: { version: '1.0.1', commitMessage: 'New feature!', commitUrl: '#' }, app: { version: '20250101-1200:abc1234:1.0.1', commitMessage: 'New feature!', commitUrl: '#' },
google: { mapsEmbedApiKey: 'mock-key' }, google: { mapsEmbedApiKey: 'mock-key' },
}, },
})); }));
localStorageMock.setItem('lastSeenVersion', '1.0.0'); localStorageMock.setItem('lastSeenVersion', '20250101-1200:abc1234:1.0.0');
renderApp(); renderApp();
await expect(screen.findByTestId('whats-new-modal-mock')).resolves.toBeInTheDocument(); await expect(screen.findByTestId('whats-new-modal-mock')).resolves.toBeInTheDocument();
}); });
@@ -741,7 +741,7 @@ describe('App Component', () => {
vi.mock('./config', () => ({ vi.mock('./config', () => ({
default: { default: {
app: { app: {
version: '2.0.0', version: '20250101-1200:abc1234:2.0.0',
commitMessage: 'A new version!', commitMessage: 'A new version!',
commitUrl: 'http://example.com/commit/2.0.0', commitUrl: 'http://example.com/commit/2.0.0',
}, },
@@ -752,14 +752,14 @@ describe('App Component', () => {
it('should display the version number and commit link', () => { it('should display the version number and commit link', () => {
renderApp(); renderApp();
const versionLink = screen.getByText(`Version: 2.0.0`); const versionLink = screen.getByText(`Version: 20250101-1200:abc1234:2.0.0`);
expect(versionLink).toBeInTheDocument(); expect(versionLink).toBeInTheDocument();
expect(versionLink).toHaveAttribute('href', 'http://example.com/commit/2.0.0'); expect(versionLink).toHaveAttribute('href', 'http://example.com/commit/2.0.0');
}); });
it('should open the "What\'s New" modal when the question mark icon is clicked', async () => { it('should open the "What\'s New" modal when the question mark icon is clicked', async () => {
// Pre-set the localStorage to prevent the modal from opening automatically // Pre-set the localStorage to prevent the modal from opening automatically
localStorageMock.setItem('lastSeenVersion', '2.0.0'); localStorageMock.setItem('lastSeenVersion', '20250101-1200:abc1234:2.0.0');
renderApp(); renderApp();
expect(screen.queryByTestId('whats-new-modal-mock')).not.toBeInTheDocument(); expect(screen.queryByTestId('whats-new-modal-mock')).not.toBeInTheDocument();

View File

@@ -44,7 +44,7 @@ export const FlyerCorrectionTool: React.FC<FlyerCorrectionToolProps> = ({
}) })
.catch((err) => { .catch((err) => {
console.error('[DEBUG] FlyerCorrectionTool: Failed to fetch image.', { err }); console.error('[DEBUG] FlyerCorrectionTool: Failed to fetch image.', { err });
logger.error('Failed to fetch image for correction tool', { error: err }); logger.error({ error: err }, 'Failed to fetch image for correction tool');
notifyError('Could not load the image for correction.'); notifyError('Could not load the image for correction.');
}); });
} }
@@ -164,7 +164,7 @@ export const FlyerCorrectionTool: React.FC<FlyerCorrectionToolProps> = ({
const msg = err instanceof Error ? err.message : 'An unknown error occurred.'; const msg = err instanceof Error ? err.message : 'An unknown error occurred.';
console.error('[DEBUG] handleRescan: Caught an error.', { error: err }); console.error('[DEBUG] handleRescan: Caught an error.', { error: err });
notifyError(msg); notifyError(msg);
logger.error('Error during rescan:', { error: err }); logger.error({ error: err }, 'Error during rescan:');
} finally { } finally {
console.debug('[DEBUG] handleRescan: Finished. Setting isProcessing=false.'); console.debug('[DEBUG] handleRescan: Finished. Setting isProcessing=false.');
setIsProcessing(false); setIsProcessing(false);

View File

@@ -73,12 +73,11 @@ describe('FlyerUploader', () => {
it('should handle file upload and start polling', async () => { it('should handle file upload and start polling', async () => {
console.log('--- [TEST LOG] ---: 1. Setting up mocks for upload and polling.'); console.log('--- [TEST LOG] ---: 1. Setting up mocks for upload and polling.');
mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue( mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue({ jobId: 'job-123' });
new Response(JSON.stringify({ jobId: 'job-123' }), { status: 200 }), mockedAiApiClient.getJobStatus.mockResolvedValue({
); state: 'active',
mockedAiApiClient.getJobStatus.mockResolvedValue( progress: { message: 'Checking...' },
new Response(JSON.stringify({ state: 'active', progress: { message: 'Checking...' } })), });
);
console.log('--- [TEST LOG] ---: 2. Rendering component and preparing file.'); console.log('--- [TEST LOG] ---: 2. Rendering component and preparing file.');
renderComponent(); renderComponent();
@@ -131,12 +130,11 @@ describe('FlyerUploader', () => {
it('should handle file upload via drag and drop', async () => { it('should handle file upload via drag and drop', async () => {
console.log('--- [TEST LOG] ---: 1. Setting up mocks for drag and drop.'); console.log('--- [TEST LOG] ---: 1. Setting up mocks for drag and drop.');
mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue( mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue({ jobId: 'job-dnd' });
new Response(JSON.stringify({ jobId: 'job-dnd' }), { status: 200 }), mockedAiApiClient.getJobStatus.mockResolvedValue({
); state: 'active',
mockedAiApiClient.getJobStatus.mockResolvedValue( progress: { message: 'Dropped...' },
new Response(JSON.stringify({ state: 'active', progress: { message: 'Dropped...' } })), });
);
console.log('--- [TEST LOG] ---: 2. Rendering component and preparing file for drop.'); console.log('--- [TEST LOG] ---: 2. Rendering component and preparing file for drop.');
renderComponent(); renderComponent();
@@ -159,16 +157,10 @@ describe('FlyerUploader', () => {
it('should poll for status, complete successfully, and redirect', async () => { it('should poll for status, complete successfully, and redirect', async () => {
const onProcessingComplete = vi.fn(); const onProcessingComplete = vi.fn();
console.log('--- [TEST LOG] ---: 1. Setting up mock sequence for polling.'); console.log('--- [TEST LOG] ---: 1. Setting up mock sequence for polling.');
mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue( mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue({ jobId: 'job-123' });
new Response(JSON.stringify({ jobId: 'job-123' }), { status: 200 }),
);
mockedAiApiClient.getJobStatus mockedAiApiClient.getJobStatus
.mockResolvedValueOnce( .mockResolvedValueOnce({ state: 'active', progress: { message: 'Analyzing...' } })
new Response(JSON.stringify({ state: 'active', progress: { message: 'Analyzing...' } })), .mockResolvedValueOnce({ state: 'completed', returnValue: { flyerId: 42 } });
)
.mockResolvedValueOnce(
new Response(JSON.stringify({ state: 'completed', returnValue: { flyerId: 42 } })),
);
console.log('--- [TEST LOG] ---: 2. Rendering component and uploading file.'); console.log('--- [TEST LOG] ---: 2. Rendering component and uploading file.');
renderComponent(onProcessingComplete); renderComponent(onProcessingComplete);
@@ -229,12 +221,11 @@ describe('FlyerUploader', () => {
it('should handle a failed job', async () => { it('should handle a failed job', async () => {
console.log('--- [TEST LOG] ---: 1. Setting up mocks for a failed job.'); console.log('--- [TEST LOG] ---: 1. Setting up mocks for a failed job.');
mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue( mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue({ jobId: 'job-fail' });
new Response(JSON.stringify({ jobId: 'job-fail' }), { status: 200 }), mockedAiApiClient.getJobStatus.mockResolvedValue({
); state: 'failed',
mockedAiApiClient.getJobStatus.mockResolvedValue( failedReason: 'AI model exploded',
new Response(JSON.stringify({ state: 'failed', failedReason: 'AI model exploded' })), });
);
console.log('--- [TEST LOG] ---: 2. Rendering and uploading.'); console.log('--- [TEST LOG] ---: 2. Rendering and uploading.');
renderComponent(); renderComponent();
@@ -260,11 +251,82 @@ describe('FlyerUploader', () => {
console.log('--- [TEST LOG] ---: 6. "Upload Another" button confirmed.'); console.log('--- [TEST LOG] ---: 6. "Upload Another" button confirmed.');
}); });
it('should clear the polling timeout when a job fails', async () => {
const clearTimeoutSpy = vi.spyOn(global, 'clearTimeout');
console.log('--- [TEST LOG] ---: 1. Setting up mocks for failed job timeout clearance.');
mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue({ jobId: 'job-fail-timeout' });
// We need at least one 'active' response to establish a timeout loop so we have something to clear
mockedAiApiClient.getJobStatus
.mockResolvedValueOnce({ state: 'active', progress: { message: 'Working...' } })
.mockResolvedValueOnce({ state: 'failed', failedReason: 'Fatal Error' });
renderComponent();
const file = new File(['content'], 'flyer.pdf', { type: 'application/pdf' });
const input = screen.getByLabelText(/click to select a file/i);
fireEvent.change(input, { target: { files: [file] } });
// Wait for the first poll to complete and UI to update to "Working..."
await screen.findByText('Working...');
// Advance time to trigger the second poll
await act(async () => {
vi.advanceTimersByTime(3000);
});
// Wait for the failure UI
await screen.findByText(/Processing failed: Fatal Error/i);
// Verify clearTimeout was called
expect(clearTimeoutSpy).toHaveBeenCalled();
// Verify no further polling occurs
const callsBefore = mockedAiApiClient.getJobStatus.mock.calls.length;
await act(async () => {
vi.advanceTimersByTime(10000);
});
expect(mockedAiApiClient.getJobStatus).toHaveBeenCalledTimes(callsBefore);
clearTimeoutSpy.mockRestore();
});
it('should clear the polling timeout when the component unmounts', async () => {
const clearTimeoutSpy = vi.spyOn(global, 'clearTimeout');
console.log('--- [TEST LOG] ---: 1. Setting up mocks for unmount timeout clearance.');
mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue({ jobId: 'job-unmount' });
mockedAiApiClient.getJobStatus.mockResolvedValue({
state: 'active',
progress: { message: 'Polling...' },
});
const { unmount } = renderComponent();
const file = new File(['content'], 'flyer.pdf', { type: 'application/pdf' });
const input = screen.getByLabelText(/click to select a file/i);
fireEvent.change(input, { target: { files: [file] } });
// Wait for the first poll to complete and the UI to show the polling state
await screen.findByText('Polling...');
// Now that we are in a polling state (and a timeout is set), unmount the component
console.log('--- [TEST LOG] ---: 2. Unmounting component to trigger cleanup effect.');
unmount();
// Verify that the cleanup function in the useEffect hook was called
expect(clearTimeoutSpy).toHaveBeenCalled();
console.log('--- [TEST LOG] ---: 3. clearTimeout confirmed.');
clearTimeoutSpy.mockRestore();
});
it('should handle a duplicate flyer error (409)', async () => { it('should handle a duplicate flyer error (409)', async () => {
console.log('--- [TEST LOG] ---: 1. Setting up mock for 409 duplicate error.'); console.log('--- [TEST LOG] ---: 1. Setting up mock for 409 duplicate error.');
mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue( // The API client now throws a structured error for non-2xx responses.
new Response(JSON.stringify({ flyerId: 99, message: 'Duplicate' }), { status: 409 }), mockedAiApiClient.uploadAndProcessFlyer.mockRejectedValue({
); status: 409,
body: { flyerId: 99, message: 'Duplicate' },
});
console.log('--- [TEST LOG] ---: 2. Rendering and uploading.'); console.log('--- [TEST LOG] ---: 2. Rendering and uploading.');
renderComponent(); renderComponent();
@@ -295,12 +357,11 @@ describe('FlyerUploader', () => {
it('should allow the user to stop watching progress', async () => { it('should allow the user to stop watching progress', async () => {
console.log('--- [TEST LOG] ---: 1. Setting up mocks for infinite polling.'); console.log('--- [TEST LOG] ---: 1. Setting up mocks for infinite polling.');
mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue( mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue({ jobId: 'job-stop' });
new Response(JSON.stringify({ jobId: 'job-stop' }), { status: 200 }), mockedAiApiClient.getJobStatus.mockResolvedValue({
); state: 'active',
mockedAiApiClient.getJobStatus.mockResolvedValue( progress: { message: 'Analyzing...' },
new Response(JSON.stringify({ state: 'active', progress: { message: 'Analyzing...' } })), } as any);
);
console.log('--- [TEST LOG] ---: 2. Rendering and uploading.'); console.log('--- [TEST LOG] ---: 2. Rendering and uploading.');
renderComponent(); renderComponent();
@@ -362,9 +423,11 @@ describe('FlyerUploader', () => {
it('should handle a generic network error during upload', async () => { it('should handle a generic network error during upload', async () => {
console.log('--- [TEST LOG] ---: 1. Setting up mock for generic upload error.'); console.log('--- [TEST LOG] ---: 1. Setting up mock for generic upload error.');
mockedAiApiClient.uploadAndProcessFlyer.mockRejectedValue( // Simulate a structured error from the API client
new Error('Network Error During Upload'), mockedAiApiClient.uploadAndProcessFlyer.mockRejectedValue({
); status: 500,
body: { message: 'Network Error During Upload' },
});
renderComponent(); renderComponent();
const file = new File(['content'], 'flyer.pdf', { type: 'application/pdf' }); const file = new File(['content'], 'flyer.pdf', { type: 'application/pdf' });
const input = screen.getByLabelText(/click to select a file/i); const input = screen.getByLabelText(/click to select a file/i);
@@ -379,9 +442,7 @@ describe('FlyerUploader', () => {
it('should handle a generic network error during polling', async () => { it('should handle a generic network error during polling', async () => {
console.log('--- [TEST LOG] ---: 1. Setting up mock for polling error.'); console.log('--- [TEST LOG] ---: 1. Setting up mock for polling error.');
mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue( mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue({ jobId: 'job-poll-fail' });
new Response(JSON.stringify({ jobId: 'job-poll-fail' }), { status: 200 }),
);
mockedAiApiClient.getJobStatus.mockRejectedValue(new Error('Polling Network Error')); mockedAiApiClient.getJobStatus.mockRejectedValue(new Error('Polling Network Error'));
renderComponent(); renderComponent();
@@ -398,11 +459,9 @@ describe('FlyerUploader', () => {
it('should handle a completed job with a missing flyerId', async () => { it('should handle a completed job with a missing flyerId', async () => {
console.log('--- [TEST LOG] ---: 1. Setting up mock for malformed completion payload.'); console.log('--- [TEST LOG] ---: 1. Setting up mock for malformed completion payload.');
mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue( mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue({ jobId: 'job-no-flyerid' });
new Response(JSON.stringify({ jobId: 'job-no-flyerid' }), { status: 200 }),
);
mockedAiApiClient.getJobStatus.mockResolvedValue( mockedAiApiClient.getJobStatus.mockResolvedValue(
new Response(JSON.stringify({ state: 'completed', returnValue: {} })), // No flyerId { state: 'completed', returnValue: {} }, // No flyerId
); );
renderComponent(); renderComponent();
@@ -419,6 +478,27 @@ describe('FlyerUploader', () => {
console.log('--- [TEST LOG] ---: 4. Assertions passed.'); console.log('--- [TEST LOG] ---: 4. Assertions passed.');
}); });
it('should handle a non-JSON response during polling', async () => {
console.log('--- [TEST LOG] ---: 1. Setting up mock for non-JSON response.');
// The actual function would throw, so we mock the rejection.
// The new getJobStatus would throw an error like "Failed to parse JSON..."
mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue({ jobId: 'job-bad-json' });
mockedAiApiClient.getJobStatus.mockRejectedValue(
new Error('Failed to parse JSON response from server. Body: <html>502 Bad Gateway</html>'),
);
renderComponent();
const file = new File(['content'], 'flyer.pdf', { type: 'application/pdf' });
const input = screen.getByLabelText(/click to select a file/i);
console.log('--- [TEST LOG] ---: 2. Firing file change event.');
fireEvent.change(input, { target: { files: [file] } });
console.log('--- [TEST LOG] ---: 3. Awaiting error message.');
expect(await screen.findByText(/Failed to parse JSON response from server/i)).toBeInTheDocument();
console.log('--- [TEST LOG] ---: 4. Assertions passed.');
});
it('should do nothing if the file input is cancelled', () => { it('should do nothing if the file input is cancelled', () => {
renderComponent(); renderComponent();
const input = screen.getByLabelText(/click to select a file/i); const input = screen.getByLabelText(/click to select a file/i);

View File

@@ -60,14 +60,8 @@ export const FlyerUploader: React.FC<FlyerUploaderProps> = ({ onProcessingComple
const pollStatus = async () => { const pollStatus = async () => {
console.debug(`[DEBUG] pollStatus(): Polling for jobId: ${jobId}`); console.debug(`[DEBUG] pollStatus(): Polling for jobId: ${jobId}`);
try { try {
const statusResponse = await getJobStatus(jobId); const job = await getJobStatus(jobId); // Now returns parsed JSON directly
console.debug(`[DEBUG] pollStatus(): API response status: ${statusResponse.status}`); console.debug('[DEBUG] pollStatus(): Job status received:', job); // The rest of the logic remains the same
if (!statusResponse.ok) {
throw new Error(`Failed to get job status (HTTP ${statusResponse.status})`);
}
const job = await statusResponse.json();
console.debug('[DEBUG] pollStatus(): Job status received:', job);
if (job.progress) { if (job.progress) {
setProcessingStages(job.progress.stages || []); setProcessingStages(job.progress.stages || []);
@@ -97,7 +91,13 @@ export const FlyerUploader: React.FC<FlyerUploaderProps> = ({ onProcessingComple
console.debug( console.debug(
`[DEBUG] pollStatus(): Job state is "failed". Reason: ${job.failedReason}`, `[DEBUG] pollStatus(): Job state is "failed". Reason: ${job.failedReason}`,
); );
// Explicitly clear any pending timeout to stop the polling loop immediately.
if (pollingTimeoutRef.current) {
clearTimeout(pollingTimeoutRef.current);
}
setErrorMessage(`Processing failed: ${job.failedReason || 'Unknown error'}`); setErrorMessage(`Processing failed: ${job.failedReason || 'Unknown error'}`);
// Clear any stale "in-progress" messages to avoid user confusion.
setStatusMessage(null);
setProcessingState('error'); setProcessingState('error');
break; break;
@@ -112,7 +112,7 @@ export const FlyerUploader: React.FC<FlyerUploaderProps> = ({ onProcessingComple
break; break;
} }
} catch (error) { } catch (error) {
logger.error('Error during polling:', { error }); logger.error({ error }, 'Error during polling:');
setErrorMessage( setErrorMessage(
error instanceof Error ? error.message : 'An unexpected error occurred during polling.', error instanceof Error ? error.message : 'An unexpected error occurred during polling.',
); );
@@ -150,29 +150,24 @@ export const FlyerUploader: React.FC<FlyerUploaderProps> = ({ onProcessingComple
`[DEBUG] processFile(): Checksum generated: ${checksum}. Calling uploadAndProcessFlyer.`, `[DEBUG] processFile(): Checksum generated: ${checksum}. Calling uploadAndProcessFlyer.`,
); );
const startResponse = await uploadAndProcessFlyer(file, checksum); // The API client now returns parsed JSON on success or throws a structured error on failure.
console.debug(`[DEBUG] processFile(): Upload response status: ${startResponse.status}`); const { jobId: newJobId } = await uploadAndProcessFlyer(file, checksum);
if (!startResponse.ok) {
const errorData = await startResponse.json();
console.debug('[DEBUG] processFile(): Upload failed. Error data:', errorData);
if (startResponse.status === 409 && errorData.flyerId) {
setErrorMessage(`This flyer has already been processed. You can view it here:`);
setDuplicateFlyerId(errorData.flyerId);
} else {
setErrorMessage(errorData.message || `Upload failed with status ${startResponse.status}`);
}
setProcessingState('error');
return;
}
const { jobId: newJobId } = await startResponse.json();
console.debug(`[DEBUG] processFile(): Upload successful. Received jobId: ${newJobId}`); console.debug(`[DEBUG] processFile(): Upload successful. Received jobId: ${newJobId}`);
setJobId(newJobId); setJobId(newJobId);
setProcessingState('polling'); setProcessingState('polling');
} catch (error) { } catch (error: any) {
logger.error('An unexpected error occurred during file upload:', { error }); // Handle the structured error thrown by the API client.
setErrorMessage(error instanceof Error ? error.message : 'An unexpected error occurred.'); logger.error({ error }, 'An error occurred during file upload:');
// Handle 409 Conflict for duplicate flyers
if (error?.status === 409 && error.body?.flyerId) {
setErrorMessage(`This flyer has already been processed. You can view it here:`);
setDuplicateFlyerId(error.body.flyerId);
} else {
// Handle other errors (e.g., validation, server errors)
const message =
error?.body?.message || error?.message || 'An unexpected error occurred during upload.';
setErrorMessage(message);
}
setProcessingState('error'); setProcessingState('error');
} }
}, []); }, []);

View File

@@ -1,94 +1,58 @@
// src/middleware/errorHandler.ts // src/middleware/errorHandler.ts
import { Request, Response, NextFunction } from 'express'; import { Request, Response, NextFunction } from 'express';
import { import { ZodError } from 'zod';
DatabaseError, import { NotFoundError, UniqueConstraintError, ValidationError } from '../services/db/errors.db';
UniqueConstraintError, import { logger } from '../services/logger.server';
ForeignKeyConstraintError,
NotFoundError,
ValidationError,
ValidationIssue,
} from '../services/db/errors.db';
import crypto from 'crypto';
interface HttpError extends Error { /**
status?: number; * A centralized error handling middleware for the Express application.
} * This middleware should be the LAST `app.use()` call to catch all errors from previous routes and middleware.
*
export const errorHandler = (err: HttpError, req: Request, res: Response, next: NextFunction) => { * It standardizes error responses and ensures consistent logging.
// If the response headers have already been sent, we must delegate to the default Express error handler. */
export const errorHandler = (err: Error, req: Request, res: Response, next: NextFunction) => {
// If headers have already been sent, delegate to the default Express error handler.
if (res.headersSent) { if (res.headersSent) {
return next(err); return next(err);
} }
// The pino-http middleware guarantees that `req.log` will be available. // Use the request-scoped logger if available, otherwise fall back to the global logger.
const log = req.log; const log = req.log || logger;
// --- 1. Determine Final Status Code and Message --- // --- Handle Zod Validation Errors ---
let statusCode = err.status ?? 500; if (err instanceof ZodError) {
const message = err.message; log.warn({ err: err.flatten() }, 'Request validation failed');
let validationIssues: ValidationIssue[] | undefined; return res.status(400).json({
let errorId: string | undefined; message: 'The request data is invalid.',
errors: err.issues.map((e) => ({ path: e.path, message: e.message })),
});
}
// --- Handle Custom Operational Errors ---
if (err instanceof NotFoundError) {
log.info({ err }, 'Resource not found');
return res.status(404).json({ message: err.message });
}
if (err instanceof ValidationError) {
log.warn({ err }, 'Validation error occurred');
return res.status(400).json({ message: err.message, errors: err.validationErrors });
}
// Refine the status code for known error types. Check for most specific types first.
if (err instanceof UniqueConstraintError) { if (err instanceof UniqueConstraintError) {
statusCode = 409; // Conflict log.warn({ err }, 'Constraint error occurred');
} else if (err instanceof NotFoundError) { return res.status(400).json({ message: err.message });
statusCode = 404;
} else if (err instanceof ForeignKeyConstraintError) {
statusCode = 400;
} else if (err instanceof ValidationError) {
statusCode = 400;
validationIssues = err.validationErrors;
} else if (err instanceof DatabaseError) {
// This is a generic fallback for other database errors that are not the specific subclasses above.
statusCode = err.status;
} else if (err.name === 'UnauthorizedError') {
statusCode = err.status || 401;
} }
// --- 2. Log Based on Final Status Code --- // --- Handle Generic Errors ---
// Log the full error details for debugging, especially for server errors. // Log the full error object for debugging. The pino logger will handle redaction.
if (statusCode >= 500) { log.error({ err }, 'An unhandled error occurred in an Express route');
errorId = crypto.randomBytes(4).toString('hex');
// The request-scoped logger already contains user, IP, and request_id. // In production, send a generic message to avoid leaking implementation details.
// We add the full error and the request object itself. if (process.env.NODE_ENV === 'production') {
// Pino's `redact` config will automatically sanitize sensitive fields in `req`. return res.status(500).json({ message: 'An internal server error occurred.' });
log.error(
{
err,
errorId,
req: { method: req.method, url: req.originalUrl, headers: req.headers, body: req.body },
},
`Unhandled API Error (ID: ${errorId})`,
);
} else {
// For 4xx errors, log at a lower level (e.g., 'warn') to avoid flooding error trackers.
// We include the validation errors in the log context if they exist.
log.warn(
{
err,
validationErrors: validationIssues, // Add validation issues to the log object
statusCode,
},
`Client Error on ${req.method} ${req.path}: ${message}`,
);
} }
// --- TEST ENVIRONMENT DEBUGGING --- // In development, send more details for easier debugging.
if (process.env.NODE_ENV === 'test') { return res.status(500).json({ message: err.message, stack: err.stack });
console.error('--- [TEST] UNHANDLED ERROR ---', err); };
}
// --- 3. Send Response ---
// In production, send a generic message for 5xx errors.
// In dev/test, send the actual error message for easier debugging.
const responseMessage =
statusCode >= 500 && process.env.NODE_ENV === 'production'
? `An unexpected server error occurred. Please reference error ID: ${errorId}`
: message;
res.status(statusCode).json({
message: responseMessage,
...(validationIssues && { errors: validationIssues }), // Conditionally add the 'errors' array if it exists
});
};

View File

@@ -135,6 +135,7 @@ router.get('/corrections', async (req, res, next: NextFunction) => {
const corrections = await db.adminRepo.getSuggestedCorrections(req.log); const corrections = await db.adminRepo.getSuggestedCorrections(req.log);
res.json(corrections); res.json(corrections);
} catch (error) { } catch (error) {
logger.error({ error }, 'Error fetching suggested corrections');
next(error); next(error);
} }
}); });
@@ -144,6 +145,7 @@ router.get('/brands', async (req, res, next: NextFunction) => {
const brands = await db.flyerRepo.getAllBrands(req.log); const brands = await db.flyerRepo.getAllBrands(req.log);
res.json(brands); res.json(brands);
} catch (error) { } catch (error) {
logger.error({ error }, 'Error fetching brands');
next(error); next(error);
} }
}); });
@@ -153,6 +155,7 @@ router.get('/stats', async (req, res, next: NextFunction) => {
const stats = await db.adminRepo.getApplicationStats(req.log); const stats = await db.adminRepo.getApplicationStats(req.log);
res.json(stats); res.json(stats);
} catch (error) { } catch (error) {
logger.error({ error }, 'Error fetching application stats');
next(error); next(error);
} }
}); });
@@ -162,6 +165,7 @@ router.get('/stats/daily', async (req, res, next: NextFunction) => {
const dailyStats = await db.adminRepo.getDailyStatsForLast30Days(req.log); const dailyStats = await db.adminRepo.getDailyStatsForLast30Days(req.log);
res.json(dailyStats); res.json(dailyStats);
} catch (error) { } catch (error) {
logger.error({ error }, 'Error fetching daily stats');
next(error); next(error);
} }
}); });
@@ -176,6 +180,7 @@ router.post(
await db.adminRepo.approveCorrection(params.id, req.log); // params.id is now safely typed as number await db.adminRepo.approveCorrection(params.id, req.log); // params.id is now safely typed as number
res.status(200).json({ message: 'Correction approved successfully.' }); res.status(200).json({ message: 'Correction approved successfully.' });
} catch (error) { } catch (error) {
logger.error({ error }, 'Error approving correction');
next(error); next(error);
} }
}, },
@@ -191,6 +196,7 @@ router.post(
await db.adminRepo.rejectCorrection(params.id, req.log); // params.id is now safely typed as number await db.adminRepo.rejectCorrection(params.id, req.log); // params.id is now safely typed as number
res.status(200).json({ message: 'Correction rejected successfully.' }); res.status(200).json({ message: 'Correction rejected successfully.' });
} catch (error) { } catch (error) {
logger.error({ error }, 'Error rejecting correction');
next(error); next(error);
} }
}, },
@@ -210,6 +216,7 @@ router.put(
); );
res.status(200).json(updatedCorrection); res.status(200).json(updatedCorrection);
} catch (error) { } catch (error) {
logger.error({ error }, 'Error updating suggested correction');
next(error); next(error);
} }
}, },
@@ -225,6 +232,7 @@ router.put(
const updatedRecipe = await db.adminRepo.updateRecipeStatus(params.id, body.status, req.log); // This is still a standalone function in admin.db.ts const updatedRecipe = await db.adminRepo.updateRecipeStatus(params.id, body.status, req.log); // This is still a standalone function in admin.db.ts
res.status(200).json(updatedRecipe); res.status(200).json(updatedRecipe);
} catch (error) { } catch (error) {
logger.error({ error }, 'Error updating recipe status');
next(error); // Pass all errors to the central error handler next(error); // Pass all errors to the central error handler
} }
}, },
@@ -250,6 +258,7 @@ router.post(
logger.info({ brandId: params.id, logoUrl }, `Brand logo updated for brand ID: ${params.id}`); logger.info({ brandId: params.id, logoUrl }, `Brand logo updated for brand ID: ${params.id}`);
res.status(200).json({ message: 'Brand logo updated successfully.', logoUrl }); res.status(200).json({ message: 'Brand logo updated successfully.', logoUrl });
} catch (error) { } catch (error) {
logger.error({ error }, 'Error updating brand logo');
next(error); next(error);
} }
}, },
@@ -260,6 +269,7 @@ router.get('/unmatched-items', async (req, res, next: NextFunction) => {
const items = await db.adminRepo.getUnmatchedFlyerItems(req.log); const items = await db.adminRepo.getUnmatchedFlyerItems(req.log);
res.json(items); res.json(items);
} catch (error) { } catch (error) {
logger.error({ error }, 'Error fetching unmatched items');
next(error); next(error);
} }
}); });
@@ -279,6 +289,7 @@ router.delete(
await db.recipeRepo.deleteRecipe(params.recipeId, userProfile.user.user_id, true, req.log); await db.recipeRepo.deleteRecipe(params.recipeId, userProfile.user.user_id, true, req.log);
res.status(204).send(); res.status(204).send();
} catch (error: unknown) { } catch (error: unknown) {
logger.error({ error }, 'Error deleting recipe');
next(error); next(error);
} }
}, },
@@ -297,6 +308,7 @@ router.delete(
await db.flyerRepo.deleteFlyer(params.flyerId, req.log); await db.flyerRepo.deleteFlyer(params.flyerId, req.log);
res.status(204).send(); res.status(204).send();
} catch (error: unknown) { } catch (error: unknown) {
logger.error({ error }, 'Error deleting flyer');
next(error); next(error);
} }
}, },
@@ -316,6 +328,7 @@ router.put(
); // This is still a standalone function in admin.db.ts ); // This is still a standalone function in admin.db.ts
res.status(200).json(updatedComment); res.status(200).json(updatedComment);
} catch (error: unknown) { } catch (error: unknown) {
logger.error({ error }, 'Error updating comment status');
next(error); next(error);
} }
}, },
@@ -326,6 +339,7 @@ router.get('/users', async (req, res, next: NextFunction) => {
const users = await db.adminRepo.getAllUsers(req.log); const users = await db.adminRepo.getAllUsers(req.log);
res.json(users); res.json(users);
} catch (error) { } catch (error) {
logger.error({ error }, 'Error fetching users');
next(error); next(error);
} }
}); });
@@ -345,6 +359,7 @@ router.get(
const logs = await db.adminRepo.getActivityLog(limit, offset, req.log); const logs = await db.adminRepo.getActivityLog(limit, offset, req.log);
res.json(logs); res.json(logs);
} catch (error) { } catch (error) {
logger.error({ error }, 'Error fetching activity log');
next(error); next(error);
} }
}, },
@@ -360,6 +375,7 @@ router.get(
const user = await db.userRepo.findUserProfileById(params.id, req.log); const user = await db.userRepo.findUserProfileById(params.id, req.log);
res.json(user); res.json(user);
} catch (error) { } catch (error) {
logger.error({ error }, 'Error fetching user profile');
next(error); next(error);
} }
}, },
@@ -395,6 +411,7 @@ router.delete(
await db.userRepo.deleteUserById(params.id, req.log); await db.userRepo.deleteUserById(params.id, req.log);
res.status(204).send(); res.status(204).send();
} catch (error) { } catch (error) {
logger.error({ error }, 'Error deleting user');
next(error); next(error);
} }
}, },
@@ -478,6 +495,7 @@ router.post(
.status(202) .status(202)
.json({ message: `File cleanup job for flyer ID ${params.flyerId} has been enqueued.` }); .json({ message: `File cleanup job for flyer ID ${params.flyerId} has been enqueued.` });
} catch (error) { } catch (error) {
logger.error({ error }, 'Error enqueuing cleanup job');
next(error); next(error);
} }
}, },
@@ -500,6 +518,7 @@ router.post('/trigger/failing-job', async (req: Request, res: Response, next: Ne
.status(202) .status(202)
.json({ message: `Failing test job has been enqueued successfully. Job ID: ${job.id}` }); .json({ message: `Failing test job has been enqueued successfully. Job ID: ${job.id}` });
} catch (error) { } catch (error) {
logger.error({ error }, 'Error enqueuing failing job');
next(error); next(error);
} }
}); });
@@ -572,6 +591,7 @@ router.get('/queues/status', async (req: Request, res: Response, next: NextFunct
); );
res.json(queueStatuses); res.json(queueStatuses);
} catch (error) { } catch (error) {
logger.error({ error }, 'Error fetching queue statuses');
next(error); next(error);
} }
}); });
@@ -620,6 +640,7 @@ router.post(
); );
res.status(200).json({ message: `Job ${jobId} has been successfully marked for retry.` }); res.status(200).json({ message: `Job ${jobId} has been successfully marked for retry.` });
} catch (error) { } catch (error) {
logger.error({ error }, 'Error retrying job');
next(error); next(error);
} }
}, },
@@ -651,6 +672,7 @@ router.post(
.status(202) .status(202)
.json({ message: 'Successfully enqueued weekly analytics job.', jobId: job.id }); .json({ message: 'Successfully enqueued weekly analytics job.', jobId: job.id });
} catch (error) { } catch (error) {
logger.error({ error }, 'Error enqueuing weekly analytics job');
next(error); next(error);
} }
}, },

View File

@@ -353,10 +353,11 @@ describe('AI Routes (/api/ai)', () => {
expect(response.status).toBe(400); expect(response.status).toBe(400);
}); });
it('should return 409 Conflict if flyer checksum already exists', async () => { it('should return 409 Conflict and delete the uploaded file if flyer checksum already exists', async () => {
// Arrange // Arrange
const mockExistingFlyer = createMockFlyer({ flyer_id: 99 }); const mockExistingFlyer = createMockFlyer({ flyer_id: 99 });
vi.mocked(mockedDb.flyerRepo.findFlyerByChecksum).mockResolvedValue(mockExistingFlyer); // Duplicate found vi.mocked(mockedDb.flyerRepo.findFlyerByChecksum).mockResolvedValue(mockExistingFlyer); // Duplicate found
const unlinkSpy = vi.spyOn(fs.promises, 'unlink').mockResolvedValue(undefined);
// Act // Act
const response = await supertest(app) const response = await supertest(app)
@@ -368,6 +369,10 @@ describe('AI Routes (/api/ai)', () => {
expect(response.status).toBe(409); expect(response.status).toBe(409);
expect(response.body.message).toBe('This flyer has already been processed.'); expect(response.body.message).toBe('This flyer has already been processed.');
expect(mockedDb.createFlyerAndItems).not.toHaveBeenCalled(); expect(mockedDb.createFlyerAndItems).not.toHaveBeenCalled();
// Assert that the file was deleted
expect(unlinkSpy).toHaveBeenCalledTimes(1);
// The filename is predictable in the test environment because of the multer config in ai.routes.ts
expect(unlinkSpy).toHaveBeenCalledWith(expect.stringContaining('flyerImage-test-flyer-image.jpg'));
}); });
it('should accept payload when extractedData.items is missing and save with empty items', async () => { it('should accept payload when extractedData.items is missing and save with empty items', async () => {
@@ -530,6 +535,27 @@ describe('AI Routes (/api/ai)', () => {
const flyerDataArg = vi.mocked(mockedDb.createFlyerAndItems).mock.calls[0][0]; const flyerDataArg = vi.mocked(mockedDb.createFlyerAndItems).mock.calls[0][0];
expect(flyerDataArg.store_name).toBe('Root Store'); expect(flyerDataArg.store_name).toBe('Root Store');
}); });
it('should default item quantity to 1 if missing', async () => {
const payloadMissingQuantity = {
checksum: 'qty-checksum',
originalFileName: 'flyer-qty.jpg',
extractedData: {
store_name: 'Qty Store',
items: [{ name: 'Item without qty', price: 100 }],
},
};
const response = await supertest(app)
.post('/api/ai/flyers/process')
.field('data', JSON.stringify(payloadMissingQuantity))
.attach('flyerImage', imagePath);
expect(response.status).toBe(201);
expect(mockedDb.createFlyerAndItems).toHaveBeenCalledTimes(1);
const itemsArg = vi.mocked(mockedDb.createFlyerAndItems).mock.calls[0][1];
expect(itemsArg[0].quantity).toBe(1);
});
}); });
describe('POST /check-flyer', () => { describe('POST /check-flyer', () => {

View File

@@ -50,6 +50,15 @@ const errMsg = (e: unknown) => {
return String(e || 'An unknown error occurred.'); return String(e || 'An unknown error occurred.');
}; };
const cleanupUploadedFile = async (file?: Express.Multer.File) => {
if (!file) return;
try {
await fs.promises.unlink(file.path);
} catch (err) {
// Ignore cleanup errors (e.g. file already deleted)
}
};
const cropAreaObjectSchema = z.object({ const cropAreaObjectSchema = z.object({
x: z.number(), x: z.number(),
y: z.number(), y: z.number(),
@@ -185,7 +194,7 @@ router.use((req: Request, res: Response, next: NextFunction) => {
'[API /ai] Incoming request', '[API /ai] Incoming request',
); );
} catch (e: unknown) { } catch (e: unknown) {
logger.error({ error: e }, 'Failed to log incoming AI request headers'); logger.error({ error: errMsg(e) }, 'Failed to log incoming AI request headers');
} }
next(); next();
}); });
@@ -316,7 +325,7 @@ router.post(
// Try several ways to obtain the payload so we are tolerant to client variations. // Try several ways to obtain the payload so we are tolerant to client variations.
let parsed: FlyerProcessPayload = {}; let parsed: FlyerProcessPayload = {};
let extractedData: Partial<ExtractedCoreData> = {}; let extractedData: Partial<ExtractedCoreData> | null | undefined = {};
try { try {
// If the client sent a top-level `data` field (stringified JSON), parse it. // If the client sent a top-level `data` field (stringified JSON), parse it.
if (req.body && (req.body.data || req.body.extractedData)) { if (req.body && (req.body.data || req.body.extractedData)) {
@@ -337,7 +346,7 @@ router.post(
) as FlyerProcessPayload; ) as FlyerProcessPayload;
} }
// If parsed itself contains an `extractedData` field, use that, otherwise assume parsed is the extractedData // If parsed itself contains an `extractedData` field, use that, otherwise assume parsed is the extractedData
extractedData = parsed.extractedData ?? (parsed as Partial<ExtractedCoreData>); extractedData = 'extractedData' in parsed ? parsed.extractedData : (parsed as Partial<ExtractedCoreData>);
} else { } else {
// No explicit `data` field found. Attempt to interpret req.body as an object (Express may have parsed multipart fields differently). // No explicit `data` field found. Attempt to interpret req.body as an object (Express may have parsed multipart fields differently).
try { try {
@@ -383,6 +392,12 @@ router.post(
// Pull common metadata fields (checksum, originalFileName) from whichever shape we parsed. // Pull common metadata fields (checksum, originalFileName) from whichever shape we parsed.
const checksum = parsed.checksum ?? parsed?.data?.checksum ?? ''; const checksum = parsed.checksum ?? parsed?.data?.checksum ?? '';
if (!checksum) {
await cleanupUploadedFile(req.file);
return res.status(400).json({ message: 'Checksum is required.' });
}
const originalFileName = const originalFileName =
parsed.originalFileName ?? parsed?.data?.originalFileName ?? req.file.originalname; parsed.originalFileName ?? parsed?.data?.originalFileName ?? req.file.originalname;
const userProfile = req.user as UserProfile | undefined; const userProfile = req.user as UserProfile | undefined;
@@ -409,6 +424,7 @@ router.post(
const itemsForDb = itemsArray.map((item: Partial<ExtractedFlyerItem>) => ({ const itemsForDb = itemsArray.map((item: Partial<ExtractedFlyerItem>) => ({
...item, ...item,
master_item_id: item.master_item_id === null ? undefined : item.master_item_id, master_item_id: item.master_item_id === null ? undefined : item.master_item_id,
quantity: item.quantity ?? 1, // Default to 1 to satisfy DB constraint
view_count: 0, view_count: 0,
click_count: 0, click_count: 0,
updated_at: new Date().toISOString(), updated_at: new Date().toISOString(),
@@ -429,6 +445,7 @@ router.post(
const existingFlyer = await db.flyerRepo.findFlyerByChecksum(checksum, req.log); const existingFlyer = await db.flyerRepo.findFlyerByChecksum(checksum, req.log);
if (existingFlyer) { if (existingFlyer) {
logger.warn(`Duplicate flyer upload attempt blocked for checksum: ${checksum}`); logger.warn(`Duplicate flyer upload attempt blocked for checksum: ${checksum}`);
await cleanupUploadedFile(req.file);
return res.status(409).json({ message: 'This flyer has already been processed.' }); return res.status(409).json({ message: 'This flyer has already been processed.' });
} }
@@ -476,6 +493,7 @@ router.post(
res.status(201).json({ message: 'Flyer processed and saved successfully.', flyer: newFlyer }); res.status(201).json({ message: 'Flyer processed and saved successfully.', flyer: newFlyer });
} catch (error) { } catch (error) {
await cleanupUploadedFile(req.file);
next(error); next(error);
} }
}, },

View File

@@ -134,8 +134,8 @@ router.post(
// If the email is a duplicate, return a 409 Conflict status. // If the email is a duplicate, return a 409 Conflict status.
return res.status(409).json({ message: error.message }); return res.status(409).json({ message: error.message });
} }
// The createUser method now handles its own transaction logging, so we just log the route failure.
logger.error({ error }, `User registration route failed for email: ${email}.`); logger.error({ error }, `User registration route failed for email: ${email}.`);
// Pass the error to the centralized handler
return next(error); return next(error);
} }
}, },

View File

@@ -108,6 +108,7 @@ router.post(
const items = await db.flyerRepo.getFlyerItemsForFlyers(body.flyerIds, req.log); const items = await db.flyerRepo.getFlyerItemsForFlyers(body.flyerIds, req.log);
res.json(items); res.json(items);
} catch (error) { } catch (error) {
req.log.error({ error }, 'Error fetching batch flyer items');
next(error); next(error);
} }
}, },
@@ -127,6 +128,7 @@ router.post(
const count = await db.flyerRepo.countFlyerItemsForFlyers(body.flyerIds ?? [], req.log); const count = await db.flyerRepo.countFlyerItemsForFlyers(body.flyerIds ?? [], req.log);
res.json({ count }); res.json({ count });
} catch (error) { } catch (error) {
req.log.error({ error }, 'Error counting batch flyer items');
next(error); next(error);
} }
}, },

View File

@@ -39,10 +39,7 @@ router.get('/db-schema', validateRequest(emptySchema), async (req, res, next: Ne
} }
return res.status(200).json({ success: true, message: 'All required database tables exist.' }); return res.status(200).json({ success: true, message: 'All required database tables exist.' });
} catch (error: unknown) { } catch (error: unknown) {
logger.error( logger.error({ error }, 'Error during DB schema check:');
{ error: error instanceof Error ? error.message : error },
'Error during DB schema check:',
);
next(error); next(error);
} }
}); });
@@ -133,6 +130,7 @@ router.get(
} }
throw new Error(`Unexpected Redis ping response: ${reply}`); // This will be caught below throw new Error(`Unexpected Redis ping response: ${reply}`); // This will be caught below
} catch (error: unknown) { } catch (error: unknown) {
logger.error({ error }, 'Error checking Redis health');
next(error); next(error);
} }
}, },

View File

@@ -96,7 +96,8 @@ describe('Price Routes (/api/price-history)', () => {
.send({ masterItemIds: 'not-an-array' }); .send({ masterItemIds: 'not-an-array' });
expect(response.status).toBe(400); expect(response.status).toBe(400);
expect(response.body.errors[0].message).toContain('Expected array, received string'); // The actual message is "Invalid input: expected array, received string"
expect(response.body.errors[0].message).toBe('Invalid input: expected array, received string');
}); });
it('should return 400 if masterItemIds contains non-positive integers', async () => { it('should return 400 if masterItemIds contains non-positive integers', async () => {
@@ -112,7 +113,8 @@ describe('Price Routes (/api/price-history)', () => {
const response = await supertest(app).post('/api/price-history').send({}); const response = await supertest(app).post('/api/price-history').send({});
expect(response.status).toBe(400); expect(response.status).toBe(400);
expect(response.body.errors[0].message).toBe('Required'); // The actual message is "Invalid input: expected array, received undefined"
expect(response.body.errors[0].message).toBe('Invalid input: expected array, received undefined');
}); });
it('should return 400 for invalid limit and offset', async () => { it('should return 400 for invalid limit and offset', async () => {
@@ -122,8 +124,9 @@ describe('Price Routes (/api/price-history)', () => {
expect(response.status).toBe(400); expect(response.status).toBe(400);
expect(response.body.errors).toHaveLength(2); expect(response.body.errors).toHaveLength(2);
expect(response.body.errors[0].message).toBe('Number must be greater than 0'); // The actual message is "Too small: expected number to be >0"
expect(response.body.errors[1].message).toBe('Expected number, received string'); expect(response.body.errors[0].message).toBe('Too small: expected number to be >0');
expect(response.body.errors[1].message).toBe('Invalid input: expected number, received NaN');
}); });
}); });
}); });

View File

@@ -46,7 +46,6 @@ router.get(
} }
// Check if there was output to stderr, even if the exit code was 0 (success). // Check if there was output to stderr, even if the exit code was 0 (success).
// This handles warnings or non-fatal errors that should arguably be treated as failures in this context.
if (stderr && stderr.trim().length > 0) { if (stderr && stderr.trim().length > 0) {
logger.error({ stderr }, '[API /pm2-status] PM2 executed but produced stderr:'); logger.error({ stderr }, '[API /pm2-status] PM2 executed but produced stderr:');
return next(new Error(`PM2 command produced an error: ${stderr}`)); return next(new Error(`PM2 command produced an error: ${stderr}`));
@@ -86,6 +85,7 @@ router.post(
res.json(coordinates); res.json(coordinates);
} catch (error) { } catch (error) {
logger.error({ error }, 'Error geocoding address');
next(error); next(error);
} }
}, },

View File

@@ -77,7 +77,7 @@ router.use(passport.authenticate('jwt', { session: false }));
// Ensure the directory for avatar uploads exists. // Ensure the directory for avatar uploads exists.
const avatarUploadDir = path.join(process.cwd(), 'public', 'uploads', 'avatars'); const avatarUploadDir = path.join(process.cwd(), 'public', 'uploads', 'avatars');
fs.mkdir(avatarUploadDir, { recursive: true }).catch((err) => { fs.mkdir(avatarUploadDir, { recursive: true }).catch((err) => {
logger.error('Failed to create avatar upload directory:', err); logger.error({ err }, 'Failed to create avatar upload directory');
}); });
// Define multer storage configuration. The `req.user` object will be available // Define multer storage configuration. The `req.user` object will be available
@@ -122,6 +122,7 @@ router.post(
); );
res.json(updatedProfile); res.json(updatedProfile);
} catch (error) { } catch (error) {
logger.error({ error }, 'Error uploading avatar');
next(error); next(error);
} }
}, },
@@ -151,6 +152,7 @@ router.get(
); );
res.json(notifications); res.json(notifications);
} catch (error) { } catch (error) {
logger.error({ error }, 'Error fetching notifications');
next(error); next(error);
} }
}, },
@@ -168,6 +170,7 @@ router.post(
await db.notificationRepo.markAllNotificationsAsRead(userProfile.user.user_id, req.log); await db.notificationRepo.markAllNotificationsAsRead(userProfile.user.user_id, req.log);
res.status(204).send(); // No Content res.status(204).send(); // No Content
} catch (error) { } catch (error) {
logger.error({ error }, 'Error marking all notifications as read');
next(error); next(error);
} }
}, },
@@ -193,6 +196,7 @@ router.post(
); );
res.status(204).send(); // Success, no content to return res.status(204).send(); // Success, no content to return
} catch (error) { } catch (error) {
logger.error({ error }, 'Error marking notification as read');
next(error); next(error);
} }
}, },
@@ -345,11 +349,7 @@ router.post(
if (error instanceof ForeignKeyConstraintError) { if (error instanceof ForeignKeyConstraintError) {
return res.status(400).json({ message: error.message }); return res.status(400).json({ message: error.message });
} }
const errorMessage = error instanceof Error ? error.message : 'An unknown error occurred'; logger.error({ error, body: req.body }, 'Failed to add watched item');
logger.error({
errorMessage,
body: req.body,
});
next(error); next(error);
} }
}, },
@@ -453,11 +453,7 @@ router.post(
if (error instanceof ForeignKeyConstraintError) { if (error instanceof ForeignKeyConstraintError) {
return res.status(400).json({ message: error.message }); return res.status(400).json({ message: error.message });
} }
const errorMessage = error instanceof Error ? error.message : 'An unknown error occurred'; logger.error({ error, body: req.body }, 'Failed to create shopping list');
logger.error({
errorMessage,
body: req.body,
});
next(error); next(error);
} }
}, },
@@ -516,12 +512,7 @@ router.post(
if (error instanceof ForeignKeyConstraintError) { if (error instanceof ForeignKeyConstraintError) {
return res.status(400).json({ message: error.message }); return res.status(400).json({ message: error.message });
} }
const errorMessage = error instanceof Error ? error.message : 'An unknown error occurred'; logger.error({ error, params: req.params, body: req.body }, 'Failed to add shopping list item');
logger.error({
errorMessage,
params: req.params,
body: req.body,
});
next(error); next(error);
} }
}, },
@@ -661,11 +652,7 @@ router.put(
if (error instanceof ForeignKeyConstraintError) { if (error instanceof ForeignKeyConstraintError) {
return res.status(400).json({ message: error.message }); return res.status(400).json({ message: error.message });
} }
const errorMessage = error instanceof Error ? error.message : 'An unknown error occurred'; logger.error({ error, body: req.body }, 'Failed to set user dietary restrictions');
logger.error({
errorMessage,
body: req.body,
});
next(error); next(error);
} }
}, },
@@ -709,11 +696,7 @@ router.put(
if (error instanceof ForeignKeyConstraintError) { if (error instanceof ForeignKeyConstraintError) {
return res.status(400).json({ message: error.message }); return res.status(400).json({ message: error.message });
} }
const errorMessage = error instanceof Error ? error.message : 'An unknown error occurred'; logger.error({ error, body: req.body }, 'Failed to set user appliances');
logger.error({
errorMessage,
body: req.body,
});
next(error); next(error);
} }
}, },
@@ -743,6 +726,7 @@ router.get(
const address = await db.addressRepo.getAddressById(addressId, req.log); // This will throw NotFoundError if not found const address = await db.addressRepo.getAddressById(addressId, req.log); // This will throw NotFoundError if not found
res.json(address); res.json(address);
} catch (error) { } catch (error) {
logger.error({ error }, 'Error fetching user address');
next(error); next(error);
} }
}, },
@@ -781,6 +765,7 @@ router.put(
const addressId = await userService.upsertUserAddress(userProfile, addressData, req.log); // This was a duplicate, fixed. const addressId = await userService.upsertUserAddress(userProfile, addressData, req.log); // This was a duplicate, fixed.
res.status(200).json({ message: 'Address updated successfully', address_id: addressId }); res.status(200).json({ message: 'Address updated successfully', address_id: addressId });
} catch (error) { } catch (error) {
logger.error({ error }, 'Error updating user address');
next(error); next(error);
} }
}, },

View File

@@ -51,9 +51,7 @@ export class AiAnalysisService {
// Normalize sources to a consistent format. // Normalize sources to a consistent format.
const mappedSources = (response.sources || []).map( const mappedSources = (response.sources || []).map(
(s: RawSource) => (s: RawSource) =>
(s.web (s.web ? { uri: s.web.uri || '', title: s.web.title || 'Untitled' } : { uri: '', title: 'Untitled' }) as Source,
? { uri: s.web.uri || '', title: s.web.title || 'Untitled' }
: { uri: '', title: 'Untitled' }) as Source,
); );
return { ...response, sources: mappedSources }; return { ...response, sources: mappedSources };
} }
@@ -84,9 +82,7 @@ export class AiAnalysisService {
// Normalize sources to a consistent format. // Normalize sources to a consistent format.
const mappedSources = (response.sources || []).map( const mappedSources = (response.sources || []).map(
(s: RawSource) => (s: RawSource) =>
(s.web (s.web ? { uri: s.web.uri || '', title: s.web.title || 'Untitled' } : { uri: '', title: 'Untitled' }) as Source,
? { uri: s.web.uri || '', title: s.web.title || 'Untitled' }
: { uri: '', title: 'Untitled' }) as Source,
); );
return { ...response, sources: mappedSources }; return { ...response, sources: mappedSources };
} }

View File

@@ -4,7 +4,13 @@
* It communicates with the application's own backend endpoints, which then securely * It communicates with the application's own backend endpoints, which then securely
* call the Google AI services. This ensures no API keys are exposed on the client. * call the Google AI services. This ensures no API keys are exposed on the client.
*/ */
import type { FlyerItem, Store, MasterGroceryItem } from '../types'; import type {
FlyerItem,
Store,
MasterGroceryItem,
ProcessingStage,
GroundedResponse,
} from '../types';
import { logger } from './logger.client'; import { logger } from './logger.client';
import { apiFetch } from './apiClient'; import { apiFetch } from './apiClient';
@@ -20,14 +26,14 @@ export const uploadAndProcessFlyer = async (
file: File, file: File,
checksum: string, checksum: string,
tokenOverride?: string, tokenOverride?: string,
): Promise<Response> => { ): Promise<{ jobId: string }> => {
const formData = new FormData(); const formData = new FormData();
formData.append('flyerFile', file); formData.append('flyerFile', file);
formData.append('checksum', checksum); formData.append('checksum', checksum);
logger.info(`[aiApiClient] Starting background processing for file: ${file.name}`); logger.info(`[aiApiClient] Starting background processing for file: ${file.name}`);
return apiFetch( const response = await apiFetch(
'/ai/upload-and-process', '/ai/upload-and-process',
{ {
method: 'POST', method: 'POST',
@@ -35,20 +41,73 @@ export const uploadAndProcessFlyer = async (
}, },
{ tokenOverride }, { tokenOverride },
); );
if (!response.ok) {
let errorBody;
try {
errorBody = await response.json();
} catch (e) {
errorBody = { message: await response.text() };
}
// Throw a structured error so the component can inspect the status and body
throw { status: response.status, body: errorBody };
}
return response.json();
}; };
// Define the expected shape of the job status response
export interface JobStatus {
id: string;
state: 'completed' | 'failed' | 'active' | 'waiting' | 'delayed' | 'paused';
progress: {
stages?: ProcessingStage[];
estimatedTimeRemaining?: number;
message?: string;
} | null;
returnValue: {
flyerId?: number;
} | null;
failedReason: string | null;
}
/** /**
* Fetches the status of a background processing job. * Fetches the status of a background processing job.
* This is the second step in the new background processing flow. * This is the second step in the new background processing flow.
* @param jobId The ID of the job to check. * @param jobId The ID of the job to check.
* @param tokenOverride Optional token for testing. * @param tokenOverride Optional token for testing.
* @returns A promise that resolves to the API response with the job's status. * @returns A promise that resolves to the parsed job status object.
* @throws An error if the network request fails or if the response is not valid JSON.
*/ */
export const getJobStatus = async (jobId: string, tokenOverride?: string): Promise<Response> => { export const getJobStatus = async (
return apiFetch(`/ai/jobs/${jobId}/status`, {}, { tokenOverride }); jobId: string,
tokenOverride?: string,
): Promise<JobStatus> => {
const response = await apiFetch(`/ai/jobs/${jobId}/status`, {}, { tokenOverride });
if (!response.ok) {
let errorText = `API Error: ${response.status} ${response.statusText}`;
try {
const errorBody = await response.text();
if (errorBody) errorText = `API Error ${response.status}: ${errorBody}`;
} catch (e) {
// ignore if reading body fails
}
throw new Error(errorText);
}
try {
return await response.json();
} catch (error) {
const rawText = await response.text();
throw new Error(`Failed to parse JSON response from server. Body: ${rawText}`);
}
}; };
export const isImageAFlyer = async (imageFile: File, tokenOverride?: string): Promise<Response> => { export const isImageAFlyer = (
imageFile: File,
tokenOverride?: string,
): Promise<Response> => {
const formData = new FormData(); const formData = new FormData();
formData.append('image', imageFile); formData.append('image', imageFile);
@@ -64,7 +123,7 @@ export const isImageAFlyer = async (imageFile: File, tokenOverride?: string): Pr
); );
}; };
export const extractAddressFromImage = async ( export const extractAddressFromImage = (
imageFile: File, imageFile: File,
tokenOverride?: string, tokenOverride?: string,
): Promise<Response> => { ): Promise<Response> => {
@@ -81,7 +140,7 @@ export const extractAddressFromImage = async (
); );
}; };
export const extractLogoFromImage = async ( export const extractLogoFromImage = (
imageFiles: File[], imageFiles: File[],
tokenOverride?: string, tokenOverride?: string,
): Promise<Response> => { ): Promise<Response> => {
@@ -100,7 +159,7 @@ export const extractLogoFromImage = async (
); );
}; };
export const getQuickInsights = async ( export const getQuickInsights = (
items: Partial<FlyerItem>[], items: Partial<FlyerItem>[],
signal?: AbortSignal, signal?: AbortSignal,
tokenOverride?: string, tokenOverride?: string,
@@ -117,7 +176,7 @@ export const getQuickInsights = async (
); );
}; };
export const getDeepDiveAnalysis = async ( export const getDeepDiveAnalysis = (
items: Partial<FlyerItem>[], items: Partial<FlyerItem>[],
signal?: AbortSignal, signal?: AbortSignal,
tokenOverride?: string, tokenOverride?: string,
@@ -134,7 +193,7 @@ export const getDeepDiveAnalysis = async (
); );
}; };
export const searchWeb = async ( export const searchWeb = (
query: string, query: string,
signal?: AbortSignal, signal?: AbortSignal,
tokenOverride?: string, tokenOverride?: string,
@@ -179,7 +238,7 @@ export const planTripWithMaps = async (
* @param prompt A description of the image to generate (e.g., a meal plan). * @param prompt A description of the image to generate (e.g., a meal plan).
* @returns A base64-encoded string of the generated PNG image. * @returns A base64-encoded string of the generated PNG image.
*/ */
export const generateImageFromText = async ( export const generateImageFromText = (
prompt: string, prompt: string,
signal?: AbortSignal, signal?: AbortSignal,
tokenOverride?: string, tokenOverride?: string,
@@ -202,7 +261,7 @@ export const generateImageFromText = async (
* @param text The text to be spoken. * @param text The text to be spoken.
* @returns A base64-encoded string of the raw audio data. * @returns A base64-encoded string of the raw audio data.
*/ */
export const generateSpeechFromText = async ( export const generateSpeechFromText = (
text: string, text: string,
signal?: AbortSignal, signal?: AbortSignal,
tokenOverride?: string, tokenOverride?: string,
@@ -259,7 +318,7 @@ export const startVoiceSession = (callbacks: {
* @param tokenOverride Optional token for testing. * @param tokenOverride Optional token for testing.
* @returns A promise that resolves to the API response containing the extracted text. * @returns A promise that resolves to the API response containing the extracted text.
*/ */
export const rescanImageArea = async ( export const rescanImageArea = (
imageFile: File, imageFile: File,
cropArea: { x: number; y: number; width: number; height: number }, cropArea: { x: number; y: number; width: number; height: number },
extractionType: 'store_name' | 'dates' | 'item_details', extractionType: 'store_name' | 'dates' | 'item_details',
@@ -270,7 +329,11 @@ export const rescanImageArea = async (
formData.append('cropArea', JSON.stringify(cropArea)); formData.append('cropArea', JSON.stringify(cropArea));
formData.append('extractionType', extractionType); formData.append('extractionType', extractionType);
return apiFetch('/ai/rescan-area', { method: 'POST', body: formData }, { tokenOverride }); return apiFetch(
'/ai/rescan-area',
{ method: 'POST', body: formData },
{ tokenOverride },
);
}; };
/** /**
@@ -278,7 +341,7 @@ export const rescanImageArea = async (
* @param watchedItems An array of the user's watched master grocery items. * @param watchedItems An array of the user's watched master grocery items.
* @returns A promise that resolves to the raw `Response` object from the API. * @returns A promise that resolves to the raw `Response` object from the API.
*/ */
export const compareWatchedItemPrices = async ( export const compareWatchedItemPrices = (
watchedItems: MasterGroceryItem[], watchedItems: MasterGroceryItem[],
signal?: AbortSignal, signal?: AbortSignal,
): Promise<Response> => { ): Promise<Response> => {
@@ -292,5 +355,4 @@ export const compareWatchedItemPrices = async (
body: JSON.stringify({ items: watchedItems }), body: JSON.stringify({ items: watchedItems }),
}, },
{ signal }, { signal },
); )};
};

View File

@@ -166,6 +166,127 @@ describe('AI Service (Server)', () => {
}); });
}); });
describe('Model Fallback Logic', () => {
const originalEnv = process.env;
beforeEach(() => {
vi.unstubAllEnvs();
process.env = { ...originalEnv, GEMINI_API_KEY: 'test-key' };
vi.resetModules(); // Re-import to use the new env var and re-instantiate the service
});
afterEach(() => {
process.env = originalEnv;
vi.unstubAllEnvs();
});
it('should try the next model if the first one fails with a quota error', async () => {
// Arrange
const { AIService } = await import('./aiService.server');
const { logger } = await import('./logger.server');
const serviceWithFallback = new AIService(logger);
const quotaError = new Error('User rate limit exceeded due to quota');
const successResponse = { text: 'Success from fallback model', candidates: [] };
// Mock the generateContent function to fail on the first call and succeed on the second
mockGenerateContent.mockRejectedValueOnce(quotaError).mockResolvedValueOnce(successResponse);
const request = { contents: [{ parts: [{ text: 'test prompt' }] }] };
// Act
const result = await (serviceWithFallback as any).aiClient.generateContent(request);
// Assert
expect(result).toEqual(successResponse);
expect(mockGenerateContent).toHaveBeenCalledTimes(2);
// Check first call
expect(mockGenerateContent).toHaveBeenNthCalledWith(1, {
model: 'gemini-2.5-flash',
...request,
});
// Check second call
expect(mockGenerateContent).toHaveBeenNthCalledWith(2, {
model: 'gemini-3-flash',
...request,
});
// Check that a warning was logged
expect(logger.warn).toHaveBeenCalledWith(
expect.stringContaining(
"Model 'gemini-2.5-flash' failed due to quota/rate limit. Trying next model.",
),
);
});
it('should throw immediately for non-retriable errors', async () => {
// Arrange
const { AIService } = await import('./aiService.server');
const { logger } = await import('./logger.server');
const serviceWithFallback = new AIService(logger);
const nonRetriableError = new Error('Invalid API Key');
mockGenerateContent.mockRejectedValueOnce(nonRetriableError);
const request = { contents: [{ parts: [{ text: 'test prompt' }] }] };
// Act & Assert
await expect((serviceWithFallback as any).aiClient.generateContent(request)).rejects.toThrow(
'Invalid API Key',
);
expect(mockGenerateContent).toHaveBeenCalledTimes(1);
expect(logger.error).toHaveBeenCalledWith(
{ error: nonRetriableError },
`[AIService Adapter] Model 'gemini-2.5-flash' failed with a non-retriable error.`,
);
});
it('should throw the last error if all models fail', async () => {
// Arrange
const { AIService } = await import('./aiService.server');
const { logger } = await import('./logger.server');
const serviceWithFallback = new AIService(logger);
const quotaError1 = new Error('Quota exhausted for model 1');
const quotaError2 = new Error('429 Too Many Requests for model 2');
const quotaError3 = new Error('RESOURCE_EXHAUSTED for model 3');
mockGenerateContent
.mockRejectedValueOnce(quotaError1)
.mockRejectedValueOnce(quotaError2)
.mockRejectedValueOnce(quotaError3);
const request = { contents: [{ parts: [{ text: 'test prompt' }] }] };
// Act & Assert
await expect((serviceWithFallback as any).aiClient.generateContent(request)).rejects.toThrow(
quotaError3,
);
expect(mockGenerateContent).toHaveBeenCalledTimes(3);
expect(mockGenerateContent).toHaveBeenNthCalledWith(1, {
model: 'gemini-2.5-flash',
...request,
});
expect(mockGenerateContent).toHaveBeenNthCalledWith(2, {
model: 'gemini-3-flash',
...request,
});
expect(mockGenerateContent).toHaveBeenNthCalledWith(3, {
model: 'gemini-2.5-flash-lite',
...request,
});
expect(logger.error).toHaveBeenCalledWith(
{ lastError: quotaError3 },
'[AIService Adapter] All AI models failed. Throwing last known error.',
);
});
});
describe('extractItemsFromReceiptImage', () => { describe('extractItemsFromReceiptImage', () => {
it('should extract items from a valid AI response', async () => { it('should extract items from a valid AI response', async () => {
const mockAiResponseText = `[ const mockAiResponseText = `[

View File

@@ -72,6 +72,7 @@ export class AIService {
private fs: IFileSystem; private fs: IFileSystem;
private rateLimiter: <T>(fn: () => Promise<T>) => Promise<T>; private rateLimiter: <T>(fn: () => Promise<T>) => Promise<T>;
private logger: Logger; private logger: Logger;
private readonly models = ['gemini-2.5-flash', 'gemini-3-flash', 'gemini-2.5-flash-lite'];
constructor(logger: Logger, aiClient?: IAiClient, fs?: IFileSystem) { constructor(logger: Logger, aiClient?: IAiClient, fs?: IFileSystem) {
this.logger = logger; this.logger = logger;
@@ -121,17 +122,11 @@ export class AIService {
); );
} }
// do not change "gemini-2.5-flash" - this is correct
const modelName = 'gemini-2.5-flash';
// We create a shim/adapter that matches the old structure but uses the new SDK call pattern. // We create a shim/adapter that matches the old structure but uses the new SDK call pattern.
// This preserves the dependency injection pattern used throughout the class. // This preserves the dependency injection pattern used throughout the class.
this.aiClient = genAI this.aiClient = genAI
? { ? {
generateContent: async (request) => { generateContent: async (request) => {
// The model name is now injected here, into every call, as the new SDK requires.
// Architectural guard clause: All requests from this service must have content.
// This prevents sending invalid requests to the API and satisfies TypeScript's strictness.
if (!request.contents || request.contents.length === 0) { if (!request.contents || request.contents.length === 0) {
this.logger.error( this.logger.error(
{ request }, { request },
@@ -140,14 +135,7 @@ export class AIService {
throw new Error('AIService.generateContent requires at least one content element.'); throw new Error('AIService.generateContent requires at least one content element.');
} }
// Architectural Fix: After the guard clause, assign the guaranteed-to-exist element return this._generateWithFallback(genAI, request);
// to a new constant. This provides a definitive type-safe variable for the compiler.
const firstContent = request.contents[0];
this.logger.debug(
{ modelName, requestParts: firstContent.parts?.length ?? 0 },
'[AIService] Calling actual generateContent via adapter.',
);
return genAI.models.generateContent({ model: modelName, ...request });
}, },
} }
: { : {
@@ -182,6 +170,54 @@ export class AIService {
this.logger.info('---------------- [AIService] Constructor End ----------------'); this.logger.info('---------------- [AIService] Constructor End ----------------');
} }
private async _generateWithFallback(
genAI: GoogleGenAI,
request: { contents: Content[]; tools?: Tool[] },
): Promise<GenerateContentResponse> {
let lastError: Error | null = null;
for (const modelName of this.models) {
try {
this.logger.info(
`[AIService Adapter] Attempting to generate content with model: ${modelName}`,
);
const result = await genAI.models.generateContent({ model: modelName, ...request });
// If the call succeeds, return the result immediately.
return result;
} catch (error: unknown) {
lastError = error instanceof Error ? error : new Error(String(error));
const errorMessage = (lastError.message || '').toLowerCase(); // Make case-insensitive
// Check for specific error messages indicating quota issues or model unavailability.
if (
errorMessage.includes('quota') ||
errorMessage.includes('429') || // HTTP 429 Too Many Requests
errorMessage.includes('resource_exhausted') || // Make case-insensitive
errorMessage.includes('model is overloaded')
) {
this.logger.warn(
`[AIService Adapter] Model '${modelName}' failed due to quota/rate limit. Trying next model. Error: ${errorMessage}`,
);
continue; // Try the next model in the list.
} else {
// For other errors (e.g., invalid input, safety settings), fail immediately.
this.logger.error(
{ error: lastError },
`[AIService Adapter] Model '${modelName}' failed with a non-retriable error.`,
);
throw lastError;
}
}
}
// If all models in the list have failed, throw the last error encountered.
this.logger.error(
{ lastError },
'[AIService Adapter] All AI models failed. Throwing last known error.',
);
throw lastError || new Error('All AI models failed to generate content.');
}
private async serverFileToGenerativePart(path: string, mimeType: string) { private async serverFileToGenerativePart(path: string, mimeType: string) {
const fileData = await this.fs.readFile(path); const fileData = await this.fs.readFile(path);
return { return {

View File

@@ -1,6 +1,7 @@
// src/services/apiClient.ts // src/services/apiClient.ts
import { Profile, ShoppingListItem, SearchQuery, Budget, Address } from '../types'; import { Profile, ShoppingListItem, SearchQuery, Budget, Address } from '../types';
import { logger } from './logger.client'; import { logger } from './logger.client';
import { eventBus } from './eventBus';
// This constant should point to your backend API. // This constant should point to your backend API.
// It's often a good practice to store this in an environment variable. // It's often a good practice to store this in an environment variable.
@@ -62,12 +63,12 @@ const refreshToken = async (): Promise<string> => {
logger.info('Successfully refreshed access token.'); logger.info('Successfully refreshed access token.');
return data.token; return data.token;
} catch (error) { } catch (error) {
logger.error('Failed to refresh token. User will be logged out.', { error }); logger.error({ error }, 'Failed to refresh token. User session has expired.');
// Only perform browser-specific actions if in the browser environment. // Only perform browser-specific actions if in the browser environment.
if (typeof window !== 'undefined') { if (typeof window !== 'undefined') {
localStorage.removeItem('authToken'); localStorage.removeItem('authToken');
// A hard redirect is a simple way to reset the app state to logged-out. // Dispatch a global event that the UI layer can listen for to handle session expiry.
// window.location.href = '/'; // Removed to allow the caller to handle session expiry. eventBus.dispatch('sessionExpired');
} }
throw error; throw error;
} }
@@ -144,9 +145,8 @@ export const apiFetch = async (
// --- DEBUG LOGGING for failed requests --- // --- DEBUG LOGGING for failed requests ---
if (!response.ok) { if (!response.ok) {
const responseText = await response.clone().text(); const responseText = await response.clone().text();
logger.error( logger.error({ url: fullUrl, status: response.status, body: responseText },
`apiFetch: Request to ${fullUrl} failed with status ${response.status}. Response body:`, 'apiFetch: Request failed',
responseText,
); );
} }
// --- END DEBUG LOGGING --- // --- END DEBUG LOGGING ---

31
src/services/eventBus.ts Normal file
View File

@@ -0,0 +1,31 @@
// src/services/eventBus.ts
/**
* A simple, generic event bus for cross-component communication without direct coupling.
* This is particularly useful for broadcasting application-wide events, such as session expiry.
*/
type EventCallback = (data?: any) => void;
class EventBus {
private listeners: { [key: string]: EventCallback[] } = {};
on(event: string, callback: EventCallback): void {
if (!this.listeners[event]) {
this.listeners[event] = [];
}
this.listeners[event].push(callback);
}
off(event: string, callback: EventCallback): void {
if (!this.listeners[event]) return;
this.listeners[event] = this.listeners[event].filter((l) => l !== callback);
}
dispatch(event: string, data?: any): void {
if (!this.listeners[event]) return;
this.listeners[event].forEach((callback) => callback(data));
}
}
export const eventBus = new EventBus();

View File

@@ -25,10 +25,7 @@ export class GeocodingService {
return JSON.parse(cached); return JSON.parse(cached);
} }
} catch (error) { } catch (error) {
logger.error( logger.error({ err: error, cacheKey }, 'Redis GET or JSON.parse command failed. Proceeding without cache.');
{ err: error instanceof Error ? error.message : error, cacheKey },
'Redis GET or JSON.parse command failed. Proceeding without cache.',
);
} }
if (process.env.GOOGLE_MAPS_API_KEY) { if (process.env.GOOGLE_MAPS_API_KEY) {
@@ -44,8 +41,8 @@ export class GeocodingService {
); );
} catch (error) { } catch (error) {
logger.error( logger.error(
{ err: error instanceof Error ? error.message : error }, { err: error },
'An error occurred while calling the Google Maps Geocoding API. Falling back to Nominatim.', 'An error occurred while calling the Google Maps Geocoding API. Falling back to Nominatim.'
); );
} }
} else { } else {
@@ -72,10 +69,7 @@ export class GeocodingService {
try { try {
await redis.set(cacheKey, JSON.stringify(result), 'EX', 60 * 60 * 24 * 30); // Cache for 30 days await redis.set(cacheKey, JSON.stringify(result), 'EX', 60 * 60 * 24 * 30); // Cache for 30 days
} catch (error) { } catch (error) {
logger.error( logger.error({ err: error, cacheKey }, 'Redis SET command failed. Result will not be cached.');
{ err: error instanceof Error ? error.message : error, cacheKey },
'Redis SET command failed. Result will not be cached.',
);
} }
} }
@@ -98,10 +92,7 @@ export class GeocodingService {
logger.info(`Successfully deleted ${totalDeleted} geocode cache entries.`); logger.info(`Successfully deleted ${totalDeleted} geocode cache entries.`);
return totalDeleted; return totalDeleted;
} catch (error) { } catch (error) {
logger.error( logger.error({ err: error }, 'Failed to clear geocode cache from Redis.');
{ err: error instanceof Error ? error.message : error },
'Failed to clear geocode cache from Redis.',
);
throw error; throw error;
} }
} }

View File

@@ -34,6 +34,9 @@ export const logger = pino({
'*.body.password', '*.body.password',
'*.body.newPassword', '*.body.newPassword',
'*.body.currentPassword', '*.body.currentPassword',
'*.body.confirmPassword',
'*.body.refreshToken',
'*.body.token',
], ],
censor: '[REDACTED]', censor: '[REDACTED]',
}, },

View File

@@ -1,5 +1,5 @@
// src/services/queueService.server.ts // src/services/queueService.server.ts
import { Queue, Worker, Job } from 'bullmq'; import { Queue, Worker, Job, UnrecoverableError } from 'bullmq';
import IORedis from 'ioredis'; // Correctly imported import IORedis from 'ioredis'; // Correctly imported
import fsPromises from 'node:fs/promises'; import fsPromises from 'node:fs/promises';
import { exec } from 'child_process'; import { exec } from 'child_process';
@@ -185,9 +185,26 @@ const attachWorkerEventListeners = (worker: Worker) => {
export const flyerWorker = new Worker<FlyerJobData>( export const flyerWorker = new Worker<FlyerJobData>(
'flyer-processing', // Must match the queue name 'flyer-processing', // Must match the queue name
(job) => { async (job) => {
// The processJob method creates its own job-specific logger internally. try {
return flyerProcessingService.processJob(job); // The processJob method creates its own job-specific logger internally.
return await flyerProcessingService.processJob(job);
} catch (error: any) {
// Check for quota errors or other unrecoverable errors from the AI service
const errorMessage = error?.message || '';
if (
errorMessage.includes('quota') ||
errorMessage.includes('429') ||
errorMessage.includes('RESOURCE_EXHAUSTED')
) {
logger.error(
{ err: error, jobId: job.id },
'[FlyerWorker] Unrecoverable quota error detected. Failing job immediately.',
);
throw new UnrecoverableError(errorMessage);
}
throw error;
}
}, },
{ {
connection, connection,
@@ -207,13 +224,9 @@ export const emailWorker = new Worker<EmailJobData>(
try { try {
await emailService.sendEmail(job.data, jobLogger); await emailService.sendEmail(job.data, jobLogger);
} catch (error: unknown) { } catch (error: unknown) {
// Standardize error logging to capture the full error object, including the stack trace.
// This provides more context for debugging than just logging the message.
logger.error( logger.error(
{ {
// Log the full error object for better diagnostics. // The patch requested this specific error handling. err: error,
err: error instanceof Error ? error : new Error(String(error)),
// Also include the job data for context.
jobData: job.data, jobData: job.data,
}, },
`[EmailWorker] Job ${job.id} failed. Attempt ${job.attemptsMade}/${job.opts.attempts}.`, `[EmailWorker] Job ${job.id} failed. Attempt ${job.attemptsMade}/${job.opts.attempts}.`,
@@ -249,11 +262,7 @@ export const analyticsWorker = new Worker<AnalyticsJobData>(
logger.info(`[AnalyticsWorker] Successfully generated report for ${reportDate}.`); logger.info(`[AnalyticsWorker] Successfully generated report for ${reportDate}.`);
} catch (error: unknown) { } catch (error: unknown) {
// Standardize error logging. // Standardize error logging.
logger.error( logger.error({ err: error, jobData: job.data },
{
err: error instanceof Error ? error : new Error(String(error)),
jobData: job.data,
},
`[AnalyticsWorker] Job ${job.id} failed. Attempt ${job.attemptsMade}/${job.opts.attempts}.`, `[AnalyticsWorker] Job ${job.id} failed. Attempt ${job.attemptsMade}/${job.opts.attempts}.`,
); );
throw error; // Re-throw to let BullMQ handle the failure and retry. throw error; // Re-throw to let BullMQ handle the failure and retry.
@@ -315,9 +324,7 @@ export const cleanupWorker = new Worker<CleanupJobData>(
} catch (error: unknown) { } catch (error: unknown) {
// Standardize error logging. // Standardize error logging.
logger.error( logger.error(
{ { err: error },
err: error instanceof Error ? error : new Error(String(error)),
},
`[CleanupWorker] Job ${job.id} for flyer ${flyerId} failed. Attempt ${job.attemptsMade}/${job.opts.attempts}.`, `[CleanupWorker] Job ${job.id} for flyer ${flyerId} failed. Attempt ${job.attemptsMade}/${job.opts.attempts}.`,
); );
throw error; // Re-throw to let BullMQ handle the failure and retry. throw error; // Re-throw to let BullMQ handle the failure and retry.
@@ -350,10 +357,7 @@ export const weeklyAnalyticsWorker = new Worker<WeeklyAnalyticsJobData>(
} catch (error: unknown) { } catch (error: unknown) {
// Standardize error logging. // Standardize error logging.
logger.error( logger.error(
{ { err: error, jobData: job.data },
err: error instanceof Error ? error : new Error(String(error)),
jobData: job.data,
},
`[WeeklyAnalyticsWorker] Job ${job.id} failed. Attempt ${job.attemptsMade}/${job.opts.attempts}.`, `[WeeklyAnalyticsWorker] Job ${job.id} failed. Attempt ${job.attemptsMade}/${job.opts.attempts}.`,
); );
throw error; // Re-throw to let BullMQ handle the failure and retry. throw error; // Re-throw to let BullMQ handle the failure and retry.

View File

@@ -56,15 +56,15 @@ describe('Price History API Integration Test (/api/price-history)', () => {
// 4. Create flyer items linking the master item to the flyers with prices // 4. Create flyer items linking the master item to the flyers with prices
await pool.query( await pool.query(
`INSERT INTO public.flyer_items (flyer_id, master_item_id, item, price_in_cents, price_display) VALUES ($1, $2, 'Apples', 199, '$1.99')`, `INSERT INTO public.flyer_items (flyer_id, master_item_id, item, price_in_cents, price_display, quantity) VALUES ($1, $2, 'Apples', 199, '$1.99', '1')`,
[flyerId1, masterItemId], [flyerId1, masterItemId],
); );
await pool.query( await pool.query(
`INSERT INTO public.flyer_items (flyer_id, master_item_id, item, price_in_cents, price_display) VALUES ($1, $2, 'Apples', 249, '$2.49')`, `INSERT INTO public.flyer_items (flyer_id, master_item_id, item, price_in_cents, price_display, quantity) VALUES ($1, $2, 'Apples', 249, '$2.49', '1')`,
[flyerId2, masterItemId], [flyerId2, masterItemId],
); );
await pool.query( await pool.query(
`INSERT INTO public.flyer_items (flyer_id, master_item_id, item, price_in_cents, price_display) VALUES ($1, $2, 'Apples', 299, '$2.99')`, `INSERT INTO public.flyer_items (flyer_id, master_item_id, item, price_in_cents, price_display, quantity) VALUES ($1, $2, 'Apples', 299, '$2.99', '1')`,
[flyerId3, masterItemId], [flyerId3, masterItemId],
); );
}); });

View File

@@ -59,7 +59,7 @@ describe('Zod Utilities', () => {
expect(result.success).toBe(false); expect(result.success).toBe(false);
if (!result.success) { if (!result.success) {
// z.string() will throw its own error message before min(1) is checked. // z.string() will throw its own error message before min(1) is checked.
expect(result.error.issues[0].message).toBe('Expected string, received number'); expect(result.error.issues[0].message).toBe('Invalid input: expected string, received number');
} }
}); });
@@ -67,7 +67,7 @@ describe('Zod Utilities', () => {
const result = schema.safeParse({ a: 1 }); const result = schema.safeParse({ a: 1 });
expect(result.success).toBe(false); expect(result.success).toBe(false);
if (!result.success) { if (!result.success) {
expect(result.error.issues[0].message).toBe('Expected string, received object'); expect(result.error.issues[0].message).toBe('Invalid input: expected string, received object');
} }
}); });
}); });
@@ -95,7 +95,7 @@ describe('Zod Utilities', () => {
const result = schema.safeParse({ params: { id: 'abc' } }); const result = schema.safeParse({ params: { id: 'abc' } });
expect(result.success).toBe(false); expect(result.success).toBe(false);
if (!result.success) { if (!result.success) {
expect(result.error.issues[0].message).toContain('Expected number, received nan'); expect(result.error.issues[0].message).toBe('Invalid input: expected number, received NaN');
} }
}); });
@@ -103,7 +103,7 @@ describe('Zod Utilities', () => {
const result = schema.safeParse({ params: { id: -1 } }); const result = schema.safeParse({ params: { id: -1 } });
expect(result.success).toBe(false); expect(result.success).toBe(false);
if (!result.success) { if (!result.success) {
expect(result.error.issues[0].message).toContain('Must be a number'); expect(result.error.issues[0].message).toBe("Invalid ID for parameter 'id'. Must be a number.");
} }
}); });
@@ -111,7 +111,7 @@ describe('Zod Utilities', () => {
const result = schema.safeParse({ params: { id: 1.5 } }); const result = schema.safeParse({ params: { id: 1.5 } });
expect(result.success).toBe(false); expect(result.success).toBe(false);
if (!result.success) { if (!result.success) {
expect(result.error.issues[0].message).toContain('Must be a number'); expect(result.error.issues[0].message).toBe("Invalid ID for parameter 'id'. Must be a number.");
} }
}); });
@@ -119,7 +119,7 @@ describe('Zod Utilities', () => {
const result = schema.safeParse({ params: { id: 0 } }); const result = schema.safeParse({ params: { id: 0 } });
expect(result.success).toBe(false); expect(result.success).toBe(false);
if (!result.success) { if (!result.success) {
expect(result.error.issues[0].message).toContain('Must be a number'); expect(result.error.issues[0].message).toBe("Invalid ID for parameter 'id'. Must be a number.");
} }
}); });
@@ -224,7 +224,7 @@ describe('Zod Utilities', () => {
const floatResult = schema.safeParse('123.45'); const floatResult = schema.safeParse('123.45');
expect(floatResult.success).toBe(false); expect(floatResult.success).toBe(false);
if (!floatResult.success) { if (!floatResult.success) {
expect(floatResult.error.issues[0].message).toBe('Expected integer, received float'); expect(floatResult.error.issues[0].message).toBe('Invalid input: expected int, received number');
} }
}); });
@@ -234,7 +234,7 @@ describe('Zod Utilities', () => {
const zeroResult = schema.safeParse('0'); const zeroResult = schema.safeParse('0');
expect(zeroResult.success).toBe(false); expect(zeroResult.success).toBe(false);
if (!zeroResult.success) { if (!zeroResult.success) {
expect(zeroResult.error.issues[0].message).toBe('Number must be greater than 0'); expect(zeroResult.error.issues[0].message).toBe('Too small: expected number to be >0');
} }
}); });
@@ -244,7 +244,7 @@ describe('Zod Utilities', () => {
const negativeResult = schema.safeParse('-1'); const negativeResult = schema.safeParse('-1');
expect(negativeResult.success).toBe(false); expect(negativeResult.success).toBe(false);
if (!negativeResult.success) { if (!negativeResult.success) {
expect(negativeResult.error.issues[0].message).toBe('Number must be greater than or equal to 0'); expect(negativeResult.error.issues[0].message).toBe('Too small: expected number to be >=0');
} }
}); });
@@ -254,12 +254,12 @@ describe('Zod Utilities', () => {
const tooSmallResult = schema.safeParse('9'); const tooSmallResult = schema.safeParse('9');
expect(tooSmallResult.success).toBe(false); expect(tooSmallResult.success).toBe(false);
if (!tooSmallResult.success) { if (!tooSmallResult.success) {
expect(tooSmallResult.error.issues[0].message).toBe('Number must be greater than or equal to 10'); expect(tooSmallResult.error.issues[0].message).toBe('Too small: expected number to be >=10');
} }
const tooLargeResult = schema.safeParse('21'); const tooLargeResult = schema.safeParse('21');
expect(tooLargeResult.success).toBe(false); expect(tooLargeResult.success).toBe(false);
if (!tooLargeResult.success) { if (!tooLargeResult.success) {
expect(tooLargeResult.error.issues[0].message).toBe('Number must be less than or equal to 20'); expect(tooLargeResult.error.issues[0].message).toBe('Too big: expected number to be <=20');
} }
}); });
}); });

View File

@@ -59,18 +59,27 @@ export const optionalNumeric = (
nonnegative?: boolean; nonnegative?: boolean;
} = {}, } = {},
) => { ) => {
let schema = z.coerce.number(); let numberSchema = z.coerce.number();
if (options.integer) schema = schema.int(); if (options.integer) numberSchema = numberSchema.int();
if (options.positive) schema = schema.positive(); if (options.positive) numberSchema = numberSchema.positive();
else if (options.nonnegative) schema = schema.nonnegative(); else if (options.nonnegative) numberSchema = numberSchema.nonnegative();
if (options.min !== undefined) schema = schema.min(options.min); if (options.min !== undefined) numberSchema = numberSchema.min(options.min);
if (options.max !== undefined) schema = schema.max(options.max); if (options.max !== undefined) numberSchema = numberSchema.max(options.max);
if (options.default !== undefined) return schema.optional().default(options.default); // Make the number schema optional *before* preprocessing. This allows it to correctly handle
// the `undefined` value that our preprocessor generates from `null`.
const optionalNumberSchema = numberSchema.optional();
return schema.optional(); // This is crucial because z.coerce.number(null) results in 0, which bypasses
// the .optional() and .default() logic for null inputs. We want null to be
// treated as "not provided", just like undefined.
const schema = z.preprocess((val) => (val === null ? undefined : val), optionalNumberSchema);
if (options.default !== undefined) return schema.default(options.default);
return schema;
}; };
/** /**