Compare commits

...

18 Commits

Author SHA1 Message Date
Gitea Actions
daaacfde5e ci: Bump version to 0.1.1 [skip ci] 2025-12-24 23:53:27 +05:00
7ac8fe1d29 Merge branch 'main' of https://gitea.projectium.com/torbo/flyer-crawler.projectium.com
Some checks are pending
Deploy to Test Environment / deploy-to-test (push) Has started running
2025-12-24 10:52:13 -08:00
a2462dfb6b testing push to prod etc 2025-12-24 10:51:43 -08:00
Gitea Actions
a911224fb4 ci: Bump version to 0.1.0 for production release [skip ci] 2025-12-24 23:24:53 +05:00
Gitea Actions
bf4bcef890 ci: Bump version to 0.0.30 [skip ci] 2025-12-24 22:59:36 +05:00
ac6cd2e0a1 Merge branch 'main' of https://gitea.projectium.com/torbo/flyer-crawler.projectium.com
All checks were successful
Deploy to Test Environment / deploy-to-test (push) Successful in 16m10s
2025-12-24 09:58:45 -08:00
eea03880c1 exclude some dirs from coverage 2025-12-24 09:58:37 -08:00
Gitea Actions
7fc263691f ci: Bump version to 0.0.29 [skip ci] 2025-12-24 22:41:17 +05:00
c0912d36d5 Merge branch 'main' of https://gitea.projectium.com/torbo/flyer-crawler.projectium.com
All checks were successful
Deploy to Test Environment / deploy-to-test (push) Successful in 15m59s
2025-12-24 09:39:56 -08:00
612c2b5943 deploy to test fix 2025-12-24 09:39:49 -08:00
Gitea Actions
8e787ddcf0 ci: Bump version to 0.0.28 [skip ci] 2025-12-24 22:18:18 +05:00
11c52d284c fixing unit tests
All checks were successful
Deploy to Test Environment / deploy-to-test (push) Successful in 15m13s
2025-12-24 09:17:09 -08:00
Gitea Actions
b528bd3651 ci: Bump version to 0.0.27 [skip ci] 2025-12-24 22:06:03 +05:00
4c5ceb1bd6 Merge branch 'main' of https://gitea.projectium.com/torbo/flyer-crawler.projectium.com
Some checks are pending
Deploy to Test Environment / deploy-to-test (push) Has started running
2025-12-24 09:05:12 -08:00
bcc4ad64dc fixing unit tests 2025-12-24 09:04:10 -08:00
Gitea Actions
d520980322 ci: Bump version to 0.0.26 [skip ci] 2025-12-24 21:23:30 +05:00
d79955aaa0 Merge branch 'main' of https://gitea.projectium.com/torbo/flyer-crawler.projectium.com
All checks were successful
Deploy to Test Environment / deploy-to-test (push) Successful in 13m50s
2025-12-24 08:22:30 -08:00
e66027dc8e fix e2e and deploy to prod 2025-12-24 08:21:35 -08:00
15 changed files with 538 additions and 63 deletions

View File

@@ -47,6 +47,19 @@ jobs:
- name: Install Dependencies
run: npm ci
- name: Bump Minor Version and Push
run: |
# Configure git for the commit.
git config --global user.name 'Gitea Actions'
git config --global user.email 'actions@gitea.projectium.com'
# Bump the minor version number. This creates a new commit and a new tag.
# The commit message includes [skip ci] to prevent this push from triggering another workflow run.
npm version minor -m "ci: Bump version to %s for production release [skip ci]"
# Push the new commit and the new tag back to the main branch.
git push --follow-tags
- name: Check for Production Database Schema Changes
env:
DB_HOST: ${{ secrets.DB_HOST }}
@@ -61,9 +74,10 @@ jobs:
echo "--- Checking for production schema changes ---"
CURRENT_HASH=$(cat sql/master_schema_rollup.sql | dos2unix | sha256sum | awk '{ print $1 }')
echo "Current Git Schema Hash: $CURRENT_HASH"
DEPLOYED_HASH=$(PGPASSWORD="$DB_PASSWORD" psql -v ON_ERROR_STOP=1 -h "$DB_HOST" -p 5432 -U "$DB_USER" -d "$DB_NAME" -c "SELECT schema_hash FROM public.schema_info WHERE environment = 'production';" -t -A || echo "none")
# The psql command will now fail the step if the query errors (e.g., column missing), preventing deployment on a bad schema.
DEPLOYED_HASH=$(PGPASSWORD="$DB_PASSWORD" psql -v ON_ERROR_STOP=1 -h "$DB_HOST" -p 5432 -U "$DB_USER" -d "$DB_NAME" -c "SELECT schema_hash FROM public.schema_info WHERE environment = 'production';" -t -A)
echo "Deployed DB Schema Hash: $DEPLOYED_HASH"
if [ "$DEPLOYED_HASH" = "none" ] || [ -z "$DEPLOYED_HASH" ]; then
if [ -z "$DEPLOYED_HASH" ]; then
echo "WARNING: No schema hash found in the production database. This is expected for a first-time deployment."
elif [ "$CURRENT_HASH" != "$DEPLOYED_HASH" ]; then
echo "ERROR: Database schema mismatch detected! A manual database migration is required."
@@ -79,8 +93,9 @@ jobs:
exit 1
fi
GITEA_SERVER_URL="https://gitea.projectium.com"
COMMIT_MESSAGE=$(git log -1 --pretty=%s)
VITE_APP_VERSION="$(date +'%Y%m%d-%H%M'):$(git rev-parse --short HEAD)" \
COMMIT_MESSAGE=$(git log -1 --grep="\[skip ci\]" --invert-grep --pretty=%s)
PACKAGE_VERSION=$(node -p "require('./package.json').version")
VITE_APP_VERSION="$(date +'%Y%m%d-%H%M'):$(git rev-parse --short HEAD):$PACKAGE_VERSION" \
VITE_APP_COMMIT_URL="$GITEA_SERVER_URL/${{ gitea.repository }}/commit/${{ gitea.sha }}" \
VITE_APP_COMMIT_MESSAGE="$COMMIT_MESSAGE" \
VITE_API_BASE_URL=/api VITE_API_KEY=${{ secrets.VITE_GOOGLE_GENAI_API_KEY }} npm run build
@@ -148,7 +163,12 @@ jobs:
echo "Updating schema hash in production database..."
CURRENT_HASH=$(cat sql/master_schema_rollup.sql | dos2unix | sha256sum | awk '{ print $1 }')
PGPASSWORD="$DB_PASSWORD" psql -v ON_ERROR_STOP=1 -h "$DB_HOST" -p 5432 -U "$DB_USER" -d "$DB_NAME" -c \
"INSERT INTO public.schema_info (environment, schema_hash, deployed_at) VALUES ('production', '$CURRENT_HASH', NOW())
"CREATE TABLE IF NOT EXISTS public.schema_info (
environment VARCHAR(50) PRIMARY KEY,
schema_hash VARCHAR(64) NOT NULL,
deployed_at TIMESTAMP DEFAULT NOW()
);
INSERT INTO public.schema_info (environment, schema_hash, deployed_at) VALUES ('production', '$CURRENT_HASH', NOW())
ON CONFLICT (environment) DO UPDATE SET schema_hash = EXCLUDED.schema_hash, deployed_at = NOW();"
UPDATED_HASH=$(PGPASSWORD="$DB_PASSWORD" psql -v ON_ERROR_STOP=1 -h "$DB_HOST" -p 5432 -U "$DB_USER" -d "$DB_NAME" -c "SELECT schema_hash FROM public.schema_info WHERE environment = 'production';" -t -A)

View File

@@ -119,6 +119,11 @@ jobs:
# --- JWT Secret for Passport authentication in tests ---
JWT_SECRET: ${{ secrets.JWT_SECRET }}
# --- V8 Coverage for Server Process ---
# This variable tells the Node.js process (our server, started by globalSetup)
# where to output its raw V8 coverage data.
NODE_V8_COVERAGE: '.coverage/tmp/integration-server'
# --- Increase Node.js memory limit to prevent heap out of memory errors ---
# This is crucial for memory-intensive tasks like running tests and coverage.
NODE_OPTIONS: '--max-old-space-size=8192'
@@ -137,15 +142,15 @@ jobs:
# The `|| true` ensures the workflow continues even if tests fail, allowing coverage to run.
echo "--- Running Unit Tests ---"
# npm run test:unit -- --coverage --reporter=verbose --includeTaskLocation --testTimeout=10000 --silent=passed-only || true
npm run test:unit -- --coverage --reporter=verbose --includeTaskLocation --testTimeout=10000 --silent=passed-only --no-file-parallelism || true
npm run test:unit -- --coverage --coverage.exclude='**/*.test.ts' --coverage.exclude='**/tests/**' --coverage.exclude='**/mocks/**' --reporter=verbose --includeTaskLocation --testTimeout=10000 --silent=passed-only --no-file-parallelism || true
echo "--- Running Integration Tests ---"
npm run test:integration -- --coverage --reporter=verbose --includeTaskLocation --testTimeout=10000 --silent=passed-only || true
npm run test:integration -- --coverage --coverage.exclude='**/*.test.ts' --coverage.exclude='**/tests/**' --coverage.exclude='**/mocks/**' --reporter=verbose --includeTaskLocation --testTimeout=10000 --silent=passed-only || true
echo "--- Running E2E Tests ---"
# Run E2E tests using the integration config (for DB setup) but output coverage separately.
# We increase timeout significantly (120s) for E2E flows that involve AI processing.
npx vitest run src/tests/e2e --config vitest.config.integration.ts --coverage --coverage.reportsDirectory=.coverage/e2e --reporter=verbose --testTimeout=120000 --no-file-parallelism || true
# Run E2E tests using the dedicated E2E config which inherits from integration config.
# We still pass --coverage to enable it, but directory and timeout are now in the config.
npx vitest run --config vitest.config.e2e.ts --coverage --coverage.exclude='**/*.test.ts' --coverage.exclude='**/tests/**' --coverage.exclude='**/mocks/**' --reporter=verbose --no-file-parallelism || true
# Re-enable secret masking for subsequent steps.
echo "::secret-masking::"
@@ -174,7 +179,7 @@ jobs:
# Run c8: read raw files from the temp dir, and output an Istanbul JSON report.
# We only generate the 'json' report here because it's all nyc needs for merging.
echo "Server coverage report about to be generated..."
npx c8 report --reporter=json --temp-directory .coverage/tmp/integration-server --reports-dir .coverage/integration-server
npx c8 report --exclude='**/*.test.ts' --exclude='**/tests/**' --exclude='**/mocks/**' --reporter=json --temp-directory .coverage/tmp/integration-server --reports-dir .coverage/integration-server
echo "Server coverage report generated. Verifying existence:"
ls -l .coverage/integration-server/coverage-final.json
@@ -213,7 +218,10 @@ jobs:
--reporter=text \
--reporter=html \
--report-dir .coverage/ \
--temp-dir "$NYC_SOURCE_DIR"
--temp-dir "$NYC_SOURCE_DIR" \
--exclude "**/*.test.ts" \
--exclude "**/tests/**" \
--exclude "**/mocks/**"
# Re-enable secret masking for subsequent steps.
echo "::secret-masking::"
@@ -264,18 +272,19 @@ jobs:
# We normalize line endings to ensure the hash is consistent across different OS environments.
CURRENT_HASH=$(cat sql/master_schema_rollup.sql | dos2unix | sha256sum | awk '{ print $1 }')
echo "Current Git Schema Hash: $CURRENT_HASH"
# Query the production database to get the hash of the deployed schema.
# The `psql` command requires PGPASSWORD to be set.
# `\t` sets tuples-only mode and `\A` unaligns output to get just the raw value.
# The `|| echo "none"` ensures the command doesn't fail if the table or row doesn't exist yet.
DEPLOYED_HASH=$(PGPASSWORD="$DB_PASSWORD" psql -v ON_ERROR_STOP=1 -h "$DB_HOST" -p 5432 -U "$DB_USER" -d "$DB_NAME" -c "SELECT schema_hash FROM public.schema_info WHERE environment = 'test';" -t -A || echo "none")
# The psql command will now fail the step if the query errors (e.g., column missing), preventing deployment on a bad schema.
DEPLOYED_HASH=$(PGPASSWORD="$DB_PASSWORD" psql -v ON_ERROR_STOP=1 -h "$DB_HOST" -p 5432 -U "$DB_USER" -d "$DB_NAME" -c "SELECT schema_hash FROM public.schema_info WHERE environment = 'test';" -t -A)
echo "Deployed DB Schema Hash: $DEPLOYED_HASH"
# Check if the hash is "none" (command failed) OR if it's an empty string (table exists but is empty).
if [ "$DEPLOYED_HASH" = "none" ] || [ -z "$DEPLOYED_HASH" ]; then
if [ -z "$DEPLOYED_HASH" ]; then
echo "WARNING: No schema hash found in the test database."
echo "This is expected for a first-time deployment. The hash will be set after a successful deployment."
echo "--- Debug: Dumping schema_info table ---"
PGPASSWORD="$DB_PASSWORD" psql -v ON_ERROR_STOP=0 -h "$DB_HOST" -p 5432 -U "$DB_USER" -d "$DB_NAME" -c "SELECT * FROM public.schema_info;" || true
echo "----------------------------------------"
# We allow the deployment to continue, but a manual schema update is required.
# You could choose to fail here by adding `exit 1`.
elif [ "$CURRENT_HASH" != "$DEPLOYED_HASH" ]; then
@@ -299,8 +308,9 @@ jobs:
fi
GITEA_SERVER_URL="https://gitea.projectium.com" # Your Gitea instance URL
COMMIT_MESSAGE=$(git log -1 --pretty=%s)
VITE_APP_VERSION="$(date +'%Y%m%d-%H%M'):$(git rev-parse --short HEAD)" \
COMMIT_MESSAGE=$(git log -1 --grep="\[skip ci\]" --invert-grep --pretty=%s)
PACKAGE_VERSION=$(node -p "require('./package.json').version")
VITE_APP_VERSION="$(date +'%Y%m%d-%H%M'):$(git rev-parse --short HEAD):$PACKAGE_VERSION" \
VITE_APP_COMMIT_URL="$GITEA_SERVER_URL/${{ gitea.repository }}/commit/${{ gitea.sha }}" \
VITE_APP_COMMIT_MESSAGE="$COMMIT_MESSAGE" \
VITE_API_BASE_URL="https://flyer-crawler-test.projectium.com/api" VITE_API_KEY=${{ secrets.VITE_GOOGLE_GENAI_API_KEY_TEST }} npm run build
@@ -362,7 +372,7 @@ jobs:
echo "Installing production dependencies and restarting test server..."
cd /var/www/flyer-crawler-test.projectium.com
npm install --omit=dev # Install only production dependencies
npm install --omit=dev
# Use `startOrReload` with the ecosystem file. This is the standard, idempotent way to deploy.
# It will START the process if it's not running, or RELOAD it if it is.
# We also add `&& pm2 save` to persist the process list across server reboots.
@@ -374,7 +384,12 @@ jobs:
echo "Updating schema hash in test database..."
CURRENT_HASH=$(cat sql/master_schema_rollup.sql | dos2unix | sha256sum | awk '{ print $1 }')
PGPASSWORD="$DB_PASSWORD" psql -v ON_ERROR_STOP=1 -h "$DB_HOST" -p 5432 -U "$DB_USER" -d "$DB_NAME" -c \
"INSERT INTO public.schema_info (environment, schema_hash, deployed_at) VALUES ('test', '$CURRENT_HASH', NOW())
"CREATE TABLE IF NOT EXISTS public.schema_info (
environment VARCHAR(50) PRIMARY KEY,
schema_hash VARCHAR(64) NOT NULL,
deployed_at TIMESTAMP DEFAULT NOW()
);
INSERT INTO public.schema_info (environment, schema_hash, deployed_at) VALUES ('test', '$CURRENT_HASH', NOW())
ON CONFLICT (environment) DO UPDATE SET schema_hash = EXCLUDED.schema_hash, deployed_at = NOW();"
# Verify the hash was updated

View File

@@ -0,0 +1,181 @@
# .gitea/workflows/manual-deploy-major.yml
#
# This workflow provides a MANUAL trigger to perform a MAJOR version bump
# and deploy the application to the PRODUCTION environment.
name: Manual - Deploy Major Version to Production
on:
workflow_dispatch:
inputs:
confirmation:
description: 'Type "deploy-major-to-prod" to confirm you want to deploy a new major version.'
required: true
default: 'do-not-run'
force_reload:
description: 'Force PM2 reload even if version matches (true/false).'
required: false
type: boolean
default: false
jobs:
deploy-production-major:
runs-on: projectium.com
steps:
- name: Verify Confirmation Phrase
run: |
if [ "${{ gitea.event.inputs.confirmation }}" != "deploy-major-to-prod" ]; then
echo "ERROR: Confirmation phrase did not match. Aborting deployment."
exit 1
fi
echo "✅ Confirmation accepted. Proceeding with major version production deployment."
- name: Checkout Code from 'main' branch
uses: actions/checkout@v3
with:
ref: 'main' # Explicitly check out the main branch for production deployment
fetch-depth: 0
- name: Setup Node.js
uses: actions/setup-node@v3
with:
node-version: '20'
cache: 'npm'
cache-dependency-path: '**/package-lock.json'
- name: Install Dependencies
run: npm ci
- name: Bump Major Version and Push
run: |
# Configure git for the commit.
git config --global user.name 'Gitea Actions'
git config --global user.email 'actions@gitea.projectium.com'
# Bump the major version number. This creates a new commit and a new tag.
# The commit message includes [skip ci] to prevent this push from triggering another workflow run.
npm version major -m "ci: Bump version to %s for major release [skip ci]"
# Push the new commit and the new tag back to the main branch.
git push --follow-tags
- name: Check for Production Database Schema Changes
env:
DB_HOST: ${{ secrets.DB_HOST }}
DB_USER: ${{ secrets.DB_USER }}
DB_PASSWORD: ${{ secrets.DB_PASSWORD }}
DB_NAME: ${{ secrets.DB_DATABASE_PROD }}
run: |
if [ -z "$DB_HOST" ] || [ -z "$DB_USER" ] || [ -z "$DB_PASSWORD" ] || [ -z "$DB_NAME" ]; then
echo "ERROR: One or more production database secrets (DB_HOST, DB_USER, DB_PASSWORD, DB_DATABASE_PROD) are not set."
exit 1
fi
echo "--- Checking for production schema changes ---"
CURRENT_HASH=$(cat sql/master_schema_rollup.sql | dos2unix | sha256sum | awk '{ print $1 }')
echo "Current Git Schema Hash: $CURRENT_HASH"
# The psql command will now fail the step if the query errors (e.g., column missing), preventing deployment on a bad schema.
DEPLOYED_HASH=$(PGPASSWORD="$DB_PASSWORD" psql -v ON_ERROR_STOP=1 -h "$DB_HOST" -p 5432 -U "$DB_USER" -d "$DB_NAME" -c "SELECT schema_hash FROM public.schema_info WHERE environment = 'production';" -t -A)
echo "Deployed DB Schema Hash: $DEPLOYED_HASH"
if [ -z "$DEPLOYED_HASH" ]; then
echo "WARNING: No schema hash found in the production database. This is expected for a first-time deployment."
elif [ "$CURRENT_HASH" != "$DEPLOYED_HASH" ]; then
echo "ERROR: Database schema mismatch detected! A manual database migration is required."
exit 1
else
echo "✅ Schema is up to date. No changes detected."
fi
- name: Build React Application for Production
run: |
if [ -z "${{ secrets.VITE_GOOGLE_GENAI_API_KEY }}" ]; then
echo "ERROR: The VITE_GOOGLE_GENAI_API_KEY secret is not set."
exit 1
fi
GITEA_SERVER_URL="https://gitea.projectium.com"
COMMIT_MESSAGE=$(git log -1 --grep="\[skip ci\]" --invert-grep --pretty=%s)
PACKAGE_VERSION=$(node -p "require('./package.json').version")
VITE_APP_VERSION="$(date +'%Y%m%d-%H%M'):$(git rev-parse --short HEAD):$PACKAGE_VERSION" \
VITE_APP_COMMIT_URL="$GITEA_SERVER_URL/${{ gitea.repository }}/commit/${{ gitea.sha }}" \
VITE_APP_COMMIT_MESSAGE="$COMMIT_MESSAGE" \
VITE_API_BASE_URL=/api VITE_API_KEY=${{ secrets.VITE_GOOGLE_GENAI_API_KEY }} npm run build
- name: Deploy Application to Production Server
run: |
echo "Deploying application files to /var/www/flyer-crawler.projectium.com..."
APP_PATH="/var/www/flyer-crawler.projectium.com"
mkdir -p "$APP_PATH"
mkdir -p "$APP_PATH/flyer-images/icons" "$APP_PATH/flyer-images/archive"
rsync -avz --delete --exclude 'node_modules' --exclude '.git' --exclude 'dist' --exclude 'flyer-images' ./ "$APP_PATH/"
rsync -avz dist/ "$APP_PATH"
echo "Application deployment complete."
- name: Install Backend Dependencies and Restart Production Server
env:
# --- Production Secrets Injection ---
DB_HOST: ${{ secrets.DB_HOST }}
DB_USER: ${{ secrets.DB_USER }}
DB_PASSWORD: ${{ secrets.DB_PASSWORD }}
DB_NAME: ${{ secrets.DB_DATABASE_PROD }}
REDIS_URL: 'redis://localhost:6379'
REDIS_PASSWORD: ${{ secrets.REDIS_PASSWORD_PROD }}
FRONTEND_URL: 'https://flyer-crawler.projectium.com'
JWT_SECRET: ${{ secrets.JWT_SECRET }}
GEMINI_API_KEY: ${{ secrets.VITE_GOOGLE_GENAI_API_KEY }}
GOOGLE_MAPS_API_KEY: ${{ secrets.GOOGLE_MAPS_API_KEY }}
SMTP_HOST: 'localhost'
SMTP_PORT: '1025'
SMTP_SECURE: 'false'
SMTP_USER: ''
SMTP_PASS: ''
SMTP_FROM_EMAIL: 'noreply@flyer-crawler.projectium.com'
run: |
if [ -z "$DB_HOST" ] || [ -z "$DB_USER" ] || [ -z "$DB_PASSWORD" ] || [ -z "$DB_NAME" ]; then
echo "ERROR: One or more production database secrets (DB_HOST, DB_USER, DB_PASSWORD, DB_DATABASE_PROD) are not set."
exit 1
fi
echo "Installing production dependencies and restarting server..."
cd /var/www/flyer-crawler.projectium.com
npm install --omit=dev
# --- Version Check Logic ---
# Get the version from the newly deployed package.json
NEW_VERSION=$(node -p "require('./package.json').version")
echo "Deployed Package Version: $NEW_VERSION"
# Get the running version from PM2 for the main API process
# We use a small node script to parse the JSON output from pm2 jlist
RUNNING_VERSION=$(pm2 jlist | node -e "try { const list = JSON.parse(require('fs').readFileSync(0, 'utf-8')); const app = list.find(p => p.name === 'flyer-crawler-api'); console.log(app ? app.pm2_env.version : ''); } catch(e) { console.log(''); }")
echo "Running PM2 Version: $RUNNING_VERSION"
if [ "${{ gitea.event.inputs.force_reload }}" == "true" ] || [ "$NEW_VERSION" != "$RUNNING_VERSION" ] || [ -z "$RUNNING_VERSION" ]; then
if [ "${{ gitea.event.inputs.force_reload }}" == "true" ]; then
echo "Force reload triggered by manual input. Reloading PM2..."
else
echo "Version mismatch (Running: $RUNNING_VERSION -> Deployed: $NEW_VERSION) or app not running. Reloading PM2..."
fi
pm2 startOrReload ecosystem.config.cjs --env production && pm2 save
echo "Production backend server reloaded successfully."
else
echo "Version $NEW_VERSION is already running. Skipping PM2 reload."
fi
echo "Updating schema hash in production database..."
CURRENT_HASH=$(cat sql/master_schema_rollup.sql | dos2unix | sha256sum | awk '{ print $1 }')
PGPASSWORD="$DB_PASSWORD" psql -v ON_ERROR_STOP=1 -h "$DB_HOST" -p 5432 -U "$DB_USER" -d "$DB_NAME" -c \
"INSERT INTO public.schema_info (environment, schema_hash, deployed_at) VALUES ('production', '$CURRENT_HASH', NOW())
ON CONFLICT (environment) DO UPDATE SET schema_hash = EXCLUDED.schema_hash, deployed_at = NOW();"
UPDATED_HASH=$(PGPASSWORD="$DB_PASSWORD" psql -v ON_ERROR_STOP=1 -h "$DB_HOST" -p 5432 -U "$DB_USER" -d "$DB_NAME" -c "SELECT schema_hash FROM public.schema_info WHERE environment = 'production';" -t -A)
if [ "$CURRENT_HASH" = "$UPDATED_HASH" ]; then
echo "✅ Schema hash successfully updated in the database to: $UPDATED_HASH"
else
echo "ERROR: Failed to update schema hash in the database."
fi
- name: Show PM2 Environment for Production
run: |
echo "--- Displaying recent PM2 logs for flyer-crawler-api ---"
sleep 5
pm2 describe flyer-crawler-api || echo "Could not find production pm2 process."
pm2 logs flyer-crawler-api --lines 20 --nostream || echo "Could not find production pm2 process."
pm2 env flyer-crawler-api || echo "Could not find production pm2 process."

View File

@@ -18,12 +18,70 @@ module.exports = {
NODE_ENV: 'production', // Set the Node.js environment to production
name: 'flyer-crawler-api',
cwd: '/var/www/flyer-crawler.projectium.com',
// Inherit secrets from the deployment environment
DB_HOST: process.env.DB_HOST,
DB_USER: process.env.DB_USER,
DB_PASSWORD: process.env.DB_PASSWORD,
DB_NAME: process.env.DB_NAME,
REDIS_URL: process.env.REDIS_URL,
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
FRONTEND_URL: process.env.FRONTEND_URL,
JWT_SECRET: process.env.JWT_SECRET,
GEMINI_API_KEY: process.env.GEMINI_API_KEY,
GOOGLE_MAPS_API_KEY: process.env.GOOGLE_MAPS_API_KEY,
SMTP_HOST: process.env.SMTP_HOST,
SMTP_PORT: process.env.SMTP_PORT,
SMTP_SECURE: process.env.SMTP_SECURE,
SMTP_USER: process.env.SMTP_USER,
SMTP_PASS: process.env.SMTP_PASS,
SMTP_FROM_EMAIL: process.env.SMTP_FROM_EMAIL,
},
// Test Environment Settings
env_test: {
NODE_ENV: 'development', // Use 'development' for test to enable more verbose logging if needed
NODE_ENV: 'test', // Set to 'test' to match the environment purpose and disable pino-pretty
name: 'flyer-crawler-api-test',
cwd: '/var/www/flyer-crawler-test.projectium.com',
// Inherit secrets from the deployment environment
DB_HOST: process.env.DB_HOST,
DB_USER: process.env.DB_USER,
DB_PASSWORD: process.env.DB_PASSWORD,
DB_NAME: process.env.DB_NAME,
REDIS_URL: process.env.REDIS_URL,
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
FRONTEND_URL: process.env.FRONTEND_URL,
JWT_SECRET: process.env.JWT_SECRET,
GEMINI_API_KEY: process.env.GEMINI_API_KEY,
GOOGLE_MAPS_API_KEY: process.env.GOOGLE_MAPS_API_KEY,
SMTP_HOST: process.env.SMTP_HOST,
SMTP_PORT: process.env.SMTP_PORT,
SMTP_SECURE: process.env.SMTP_SECURE,
SMTP_USER: process.env.SMTP_USER,
SMTP_PASS: process.env.SMTP_PASS,
SMTP_FROM_EMAIL: process.env.SMTP_FROM_EMAIL,
},
// Development Environment Settings
env_development: {
NODE_ENV: 'development',
name: 'flyer-crawler-api-dev',
watch: true,
ignore_watch: ['node_modules', 'logs', '*.log', 'flyer-images', '.git'],
// Inherit secrets from the deployment environment
DB_HOST: process.env.DB_HOST,
DB_USER: process.env.DB_USER,
DB_PASSWORD: process.env.DB_PASSWORD,
DB_NAME: process.env.DB_NAME,
REDIS_URL: process.env.REDIS_URL,
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
FRONTEND_URL: process.env.FRONTEND_URL,
JWT_SECRET: process.env.JWT_SECRET,
GEMINI_API_KEY: process.env.GEMINI_API_KEY,
GOOGLE_MAPS_API_KEY: process.env.GOOGLE_MAPS_API_KEY,
SMTP_HOST: process.env.SMTP_HOST,
SMTP_PORT: process.env.SMTP_PORT,
SMTP_SECURE: process.env.SMTP_SECURE,
SMTP_USER: process.env.SMTP_USER,
SMTP_PASS: process.env.SMTP_PASS,
SMTP_FROM_EMAIL: process.env.SMTP_FROM_EMAIL,
},
},
{
@@ -36,12 +94,70 @@ module.exports = {
NODE_ENV: 'production',
name: 'flyer-crawler-worker',
cwd: '/var/www/flyer-crawler.projectium.com',
// Inherit secrets from the deployment environment
DB_HOST: process.env.DB_HOST,
DB_USER: process.env.DB_USER,
DB_PASSWORD: process.env.DB_PASSWORD,
DB_NAME: process.env.DB_NAME,
REDIS_URL: process.env.REDIS_URL,
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
FRONTEND_URL: process.env.FRONTEND_URL,
JWT_SECRET: process.env.JWT_SECRET,
GEMINI_API_KEY: process.env.GEMINI_API_KEY,
GOOGLE_MAPS_API_KEY: process.env.GOOGLE_MAPS_API_KEY,
SMTP_HOST: process.env.SMTP_HOST,
SMTP_PORT: process.env.SMTP_PORT,
SMTP_SECURE: process.env.SMTP_SECURE,
SMTP_USER: process.env.SMTP_USER,
SMTP_PASS: process.env.SMTP_PASS,
SMTP_FROM_EMAIL: process.env.SMTP_FROM_EMAIL,
},
// Test Environment Settings
env_test: {
NODE_ENV: 'development',
NODE_ENV: 'test',
name: 'flyer-crawler-worker-test',
cwd: '/var/www/flyer-crawler-test.projectium.com',
// Inherit secrets from the deployment environment
DB_HOST: process.env.DB_HOST,
DB_USER: process.env.DB_USER,
DB_PASSWORD: process.env.DB_PASSWORD,
DB_NAME: process.env.DB_NAME,
REDIS_URL: process.env.REDIS_URL,
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
FRONTEND_URL: process.env.FRONTEND_URL,
JWT_SECRET: process.env.JWT_SECRET,
GEMINI_API_KEY: process.env.GEMINI_API_KEY,
GOOGLE_MAPS_API_KEY: process.env.GOOGLE_MAPS_API_KEY,
SMTP_HOST: process.env.SMTP_HOST,
SMTP_PORT: process.env.SMTP_PORT,
SMTP_SECURE: process.env.SMTP_SECURE,
SMTP_USER: process.env.SMTP_USER,
SMTP_PASS: process.env.SMTP_PASS,
SMTP_FROM_EMAIL: process.env.SMTP_FROM_EMAIL,
},
// Development Environment Settings
env_development: {
NODE_ENV: 'development',
name: 'flyer-crawler-worker-dev',
watch: true,
ignore_watch: ['node_modules', 'logs', '*.log', 'flyer-images', '.git'],
// Inherit secrets from the deployment environment
DB_HOST: process.env.DB_HOST,
DB_USER: process.env.DB_USER,
DB_PASSWORD: process.env.DB_PASSWORD,
DB_NAME: process.env.DB_NAME,
REDIS_URL: process.env.REDIS_URL,
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
FRONTEND_URL: process.env.FRONTEND_URL,
JWT_SECRET: process.env.JWT_SECRET,
GEMINI_API_KEY: process.env.GEMINI_API_KEY,
GOOGLE_MAPS_API_KEY: process.env.GOOGLE_MAPS_API_KEY,
SMTP_HOST: process.env.SMTP_HOST,
SMTP_PORT: process.env.SMTP_PORT,
SMTP_SECURE: process.env.SMTP_SECURE,
SMTP_USER: process.env.SMTP_USER,
SMTP_PASS: process.env.SMTP_PASS,
SMTP_FROM_EMAIL: process.env.SMTP_FROM_EMAIL,
},
},
{
@@ -54,12 +170,70 @@ module.exports = {
NODE_ENV: 'production',
name: 'flyer-crawler-analytics-worker',
cwd: '/var/www/flyer-crawler.projectium.com',
// Inherit secrets from the deployment environment
DB_HOST: process.env.DB_HOST,
DB_USER: process.env.DB_USER,
DB_PASSWORD: process.env.DB_PASSWORD,
DB_NAME: process.env.DB_NAME,
REDIS_URL: process.env.REDIS_URL,
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
FRONTEND_URL: process.env.FRONTEND_URL,
JWT_SECRET: process.env.JWT_SECRET,
GEMINI_API_KEY: process.env.GEMINI_API_KEY,
GOOGLE_MAPS_API_KEY: process.env.GOOGLE_MAPS_API_KEY,
SMTP_HOST: process.env.SMTP_HOST,
SMTP_PORT: process.env.SMTP_PORT,
SMTP_SECURE: process.env.SMTP_SECURE,
SMTP_USER: process.env.SMTP_USER,
SMTP_PASS: process.env.SMTP_PASS,
SMTP_FROM_EMAIL: process.env.SMTP_FROM_EMAIL,
},
// Test Environment Settings
env_test: {
NODE_ENV: 'development',
NODE_ENV: 'test',
name: 'flyer-crawler-analytics-worker-test',
cwd: '/var/www/flyer-crawler-test.projectium.com',
// Inherit secrets from the deployment environment
DB_HOST: process.env.DB_HOST,
DB_USER: process.env.DB_USER,
DB_PASSWORD: process.env.DB_PASSWORD,
DB_NAME: process.env.DB_NAME,
REDIS_URL: process.env.REDIS_URL,
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
FRONTEND_URL: process.env.FRONTEND_URL,
JWT_SECRET: process.env.JWT_SECRET,
GEMINI_API_KEY: process.env.GEMINI_API_KEY,
GOOGLE_MAPS_API_KEY: process.env.GOOGLE_MAPS_API_KEY,
SMTP_HOST: process.env.SMTP_HOST,
SMTP_PORT: process.env.SMTP_PORT,
SMTP_SECURE: process.env.SMTP_SECURE,
SMTP_USER: process.env.SMTP_USER,
SMTP_PASS: process.env.SMTP_PASS,
SMTP_FROM_EMAIL: process.env.SMTP_FROM_EMAIL,
},
// Development Environment Settings
env_development: {
NODE_ENV: 'development',
name: 'flyer-crawler-analytics-worker-dev',
watch: true,
ignore_watch: ['node_modules', 'logs', '*.log', 'flyer-images', '.git'],
// Inherit secrets from the deployment environment
DB_HOST: process.env.DB_HOST,
DB_USER: process.env.DB_USER,
DB_PASSWORD: process.env.DB_PASSWORD,
DB_NAME: process.env.DB_NAME,
REDIS_URL: process.env.REDIS_URL,
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
FRONTEND_URL: process.env.FRONTEND_URL,
JWT_SECRET: process.env.JWT_SECRET,
GEMINI_API_KEY: process.env.GEMINI_API_KEY,
GOOGLE_MAPS_API_KEY: process.env.GOOGLE_MAPS_API_KEY,
SMTP_HOST: process.env.SMTP_HOST,
SMTP_PORT: process.env.SMTP_PORT,
SMTP_SECURE: process.env.SMTP_SECURE,
SMTP_USER: process.env.SMTP_USER,
SMTP_PASS: process.env.SMTP_PASS,
SMTP_FROM_EMAIL: process.env.SMTP_FROM_EMAIL,
},
},
],

4
package-lock.json generated
View File

@@ -1,12 +1,12 @@
{
"name": "flyer-crawler",
"version": "0.0.25",
"version": "0.1.1",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "flyer-crawler",
"version": "0.0.25",
"version": "0.1.1",
"dependencies": {
"@bull-board/api": "^6.14.2",
"@bull-board/express": "^6.14.2",

View File

@@ -1,7 +1,7 @@
{
"name": "flyer-crawler",
"private": true,
"version": "0.0.25",
"version": "0.1.1",
"type": "module",
"scripts": {
"dev": "concurrently \"npm:start:dev\" \"vite\"",

View File

@@ -36,7 +36,7 @@ vi.mock('pdfjs-dist', () => ({
// Mock the new config module
vi.mock('./config', () => ({
default: {
app: { version: '1.0.0', commitMessage: 'Initial commit', commitUrl: '#' },
app: { version: '20250101-1200:abc1234:1.0.0', commitMessage: 'Initial commit', commitUrl: '#' },
google: { mapsEmbedApiKey: 'mock-key' },
},
}));
@@ -588,11 +588,11 @@ describe('App Component', () => {
// Mock the config module for this specific test
vi.mock('./config', () => ({
default: {
app: { version: '1.0.1', commitMessage: 'New feature!', commitUrl: '#' },
app: { version: '20250101-1200:abc1234:1.0.1', commitMessage: 'New feature!', commitUrl: '#' },
google: { mapsEmbedApiKey: 'mock-key' },
},
}));
localStorageMock.setItem('lastSeenVersion', '1.0.0');
localStorageMock.setItem('lastSeenVersion', '20250101-1200:abc1234:1.0.0');
renderApp();
await expect(screen.findByTestId('whats-new-modal-mock')).resolves.toBeInTheDocument();
});
@@ -741,7 +741,7 @@ describe('App Component', () => {
vi.mock('./config', () => ({
default: {
app: {
version: '2.0.0',
version: '20250101-1200:abc1234:2.0.0',
commitMessage: 'A new version!',
commitUrl: 'http://example.com/commit/2.0.0',
},
@@ -752,14 +752,14 @@ describe('App Component', () => {
it('should display the version number and commit link', () => {
renderApp();
const versionLink = screen.getByText(`Version: 2.0.0`);
const versionLink = screen.getByText(`Version: 20250101-1200:abc1234:2.0.0`);
expect(versionLink).toBeInTheDocument();
expect(versionLink).toHaveAttribute('href', 'http://example.com/commit/2.0.0');
});
it('should open the "What\'s New" modal when the question mark icon is clicked', async () => {
// Pre-set the localStorage to prevent the modal from opening automatically
localStorageMock.setItem('lastSeenVersion', '2.0.0');
localStorageMock.setItem('lastSeenVersion', '20250101-1200:abc1234:2.0.0');
renderApp();
expect(screen.queryByTestId('whats-new-modal-mock')).not.toBeInTheDocument();

View File

@@ -353,10 +353,11 @@ describe('AI Routes (/api/ai)', () => {
expect(response.status).toBe(400);
});
it('should return 409 Conflict if flyer checksum already exists', async () => {
it('should return 409 Conflict and delete the uploaded file if flyer checksum already exists', async () => {
// Arrange
const mockExistingFlyer = createMockFlyer({ flyer_id: 99 });
vi.mocked(mockedDb.flyerRepo.findFlyerByChecksum).mockResolvedValue(mockExistingFlyer); // Duplicate found
const unlinkSpy = vi.spyOn(fs.promises, 'unlink').mockResolvedValue(undefined);
// Act
const response = await supertest(app)
@@ -368,6 +369,10 @@ describe('AI Routes (/api/ai)', () => {
expect(response.status).toBe(409);
expect(response.body.message).toBe('This flyer has already been processed.');
expect(mockedDb.createFlyerAndItems).not.toHaveBeenCalled();
// Assert that the file was deleted
expect(unlinkSpy).toHaveBeenCalledTimes(1);
// The filename is predictable in the test environment because of the multer config in ai.routes.ts
expect(unlinkSpy).toHaveBeenCalledWith(expect.stringContaining('flyerImage-test-flyer-image.jpg'));
});
it('should accept payload when extractedData.items is missing and save with empty items', async () => {
@@ -530,6 +535,27 @@ describe('AI Routes (/api/ai)', () => {
const flyerDataArg = vi.mocked(mockedDb.createFlyerAndItems).mock.calls[0][0];
expect(flyerDataArg.store_name).toBe('Root Store');
});
it('should default item quantity to 1 if missing', async () => {
const payloadMissingQuantity = {
checksum: 'qty-checksum',
originalFileName: 'flyer-qty.jpg',
extractedData: {
store_name: 'Qty Store',
items: [{ name: 'Item without qty', price: 100 }],
},
};
const response = await supertest(app)
.post('/api/ai/flyers/process')
.field('data', JSON.stringify(payloadMissingQuantity))
.attach('flyerImage', imagePath);
expect(response.status).toBe(201);
expect(mockedDb.createFlyerAndItems).toHaveBeenCalledTimes(1);
const itemsArg = vi.mocked(mockedDb.createFlyerAndItems).mock.calls[0][1];
expect(itemsArg[0].quantity).toBe(1);
});
});
describe('POST /check-flyer', () => {

View File

@@ -50,6 +50,15 @@ const errMsg = (e: unknown) => {
return String(e || 'An unknown error occurred.');
};
const cleanupUploadedFile = async (file?: Express.Multer.File) => {
if (!file) return;
try {
await fs.promises.unlink(file.path);
} catch (err) {
// Ignore cleanup errors (e.g. file already deleted)
}
};
const cropAreaObjectSchema = z.object({
x: z.number(),
y: z.number(),
@@ -185,7 +194,7 @@ router.use((req: Request, res: Response, next: NextFunction) => {
'[API /ai] Incoming request',
);
} catch (e: unknown) {
logger.error({ error: e }, 'Failed to log incoming AI request headers');
logger.error({ error: errMsg(e) }, 'Failed to log incoming AI request headers');
}
next();
});
@@ -316,7 +325,7 @@ router.post(
// Try several ways to obtain the payload so we are tolerant to client variations.
let parsed: FlyerProcessPayload = {};
let extractedData: Partial<ExtractedCoreData> = {};
let extractedData: Partial<ExtractedCoreData> | null | undefined = {};
try {
// If the client sent a top-level `data` field (stringified JSON), parse it.
if (req.body && (req.body.data || req.body.extractedData)) {
@@ -337,7 +346,7 @@ router.post(
) as FlyerProcessPayload;
}
// If parsed itself contains an `extractedData` field, use that, otherwise assume parsed is the extractedData
extractedData = parsed.extractedData ?? (parsed as Partial<ExtractedCoreData>);
extractedData = 'extractedData' in parsed ? parsed.extractedData : (parsed as Partial<ExtractedCoreData>);
} else {
// No explicit `data` field found. Attempt to interpret req.body as an object (Express may have parsed multipart fields differently).
try {
@@ -383,6 +392,12 @@ router.post(
// Pull common metadata fields (checksum, originalFileName) from whichever shape we parsed.
const checksum = parsed.checksum ?? parsed?.data?.checksum ?? '';
if (!checksum) {
await cleanupUploadedFile(req.file);
return res.status(400).json({ message: 'Checksum is required.' });
}
const originalFileName =
parsed.originalFileName ?? parsed?.data?.originalFileName ?? req.file.originalname;
const userProfile = req.user as UserProfile | undefined;
@@ -409,6 +424,7 @@ router.post(
const itemsForDb = itemsArray.map((item: Partial<ExtractedFlyerItem>) => ({
...item,
master_item_id: item.master_item_id === null ? undefined : item.master_item_id,
quantity: item.quantity ?? 1, // Default to 1 to satisfy DB constraint
view_count: 0,
click_count: 0,
updated_at: new Date().toISOString(),
@@ -429,6 +445,7 @@ router.post(
const existingFlyer = await db.flyerRepo.findFlyerByChecksum(checksum, req.log);
if (existingFlyer) {
logger.warn(`Duplicate flyer upload attempt blocked for checksum: ${checksum}`);
await cleanupUploadedFile(req.file);
return res.status(409).json({ message: 'This flyer has already been processed.' });
}
@@ -476,6 +493,7 @@ router.post(
res.status(201).json({ message: 'Flyer processed and saved successfully.', flyer: newFlyer });
} catch (error) {
await cleanupUploadedFile(req.file);
next(error);
}
},

View File

@@ -96,7 +96,8 @@ describe('Price Routes (/api/price-history)', () => {
.send({ masterItemIds: 'not-an-array' });
expect(response.status).toBe(400);
expect(response.body.errors[0].message).toContain('Expected array, received string');
// The actual message is "Invalid input: expected array, received string"
expect(response.body.errors[0].message).toBe('Invalid input: expected array, received string');
});
it('should return 400 if masterItemIds contains non-positive integers', async () => {
@@ -112,7 +113,8 @@ describe('Price Routes (/api/price-history)', () => {
const response = await supertest(app).post('/api/price-history').send({});
expect(response.status).toBe(400);
expect(response.body.errors[0].message).toBe('Required');
// The actual message is "Invalid input: expected array, received undefined"
expect(response.body.errors[0].message).toBe('Invalid input: expected array, received undefined');
});
it('should return 400 for invalid limit and offset', async () => {
@@ -122,8 +124,9 @@ describe('Price Routes (/api/price-history)', () => {
expect(response.status).toBe(400);
expect(response.body.errors).toHaveLength(2);
expect(response.body.errors[0].message).toBe('Number must be greater than 0');
expect(response.body.errors[1].message).toBe('Expected number, received string');
// The actual message is "Too small: expected number to be >0"
expect(response.body.errors[0].message).toBe('Too small: expected number to be >0');
expect(response.body.errors[1].message).toBe('Invalid input: expected number, received NaN');
});
});
});

View File

@@ -34,6 +34,9 @@ export const logger = pino({
'*.body.password',
'*.body.newPassword',
'*.body.currentPassword',
'*.body.confirmPassword',
'*.body.refreshToken',
'*.body.token',
],
censor: '[REDACTED]',
},

View File

@@ -56,15 +56,15 @@ describe('Price History API Integration Test (/api/price-history)', () => {
// 4. Create flyer items linking the master item to the flyers with prices
await pool.query(
`INSERT INTO public.flyer_items (flyer_id, master_item_id, item, price_in_cents, price_display) VALUES ($1, $2, 'Apples', 199, '$1.99')`,
`INSERT INTO public.flyer_items (flyer_id, master_item_id, item, price_in_cents, price_display, quantity) VALUES ($1, $2, 'Apples', 199, '$1.99', '1')`,
[flyerId1, masterItemId],
);
await pool.query(
`INSERT INTO public.flyer_items (flyer_id, master_item_id, item, price_in_cents, price_display) VALUES ($1, $2, 'Apples', 249, '$2.49')`,
`INSERT INTO public.flyer_items (flyer_id, master_item_id, item, price_in_cents, price_display, quantity) VALUES ($1, $2, 'Apples', 249, '$2.49', '1')`,
[flyerId2, masterItemId],
);
await pool.query(
`INSERT INTO public.flyer_items (flyer_id, master_item_id, item, price_in_cents, price_display) VALUES ($1, $2, 'Apples', 299, '$2.99')`,
`INSERT INTO public.flyer_items (flyer_id, master_item_id, item, price_in_cents, price_display, quantity) VALUES ($1, $2, 'Apples', 299, '$2.99', '1')`,
[flyerId3, masterItemId],
);
});

View File

@@ -59,7 +59,7 @@ describe('Zod Utilities', () => {
expect(result.success).toBe(false);
if (!result.success) {
// z.string() will throw its own error message before min(1) is checked.
expect(result.error.issues[0].message).toBe('Expected string, received number');
expect(result.error.issues[0].message).toBe('Invalid input: expected string, received number');
}
});
@@ -67,7 +67,7 @@ describe('Zod Utilities', () => {
const result = schema.safeParse({ a: 1 });
expect(result.success).toBe(false);
if (!result.success) {
expect(result.error.issues[0].message).toBe('Expected string, received object');
expect(result.error.issues[0].message).toBe('Invalid input: expected string, received object');
}
});
});
@@ -95,7 +95,7 @@ describe('Zod Utilities', () => {
const result = schema.safeParse({ params: { id: 'abc' } });
expect(result.success).toBe(false);
if (!result.success) {
expect(result.error.issues[0].message).toContain('Expected number, received nan');
expect(result.error.issues[0].message).toBe('Invalid input: expected number, received NaN');
}
});
@@ -103,7 +103,7 @@ describe('Zod Utilities', () => {
const result = schema.safeParse({ params: { id: -1 } });
expect(result.success).toBe(false);
if (!result.success) {
expect(result.error.issues[0].message).toContain('Must be a number');
expect(result.error.issues[0].message).toBe("Invalid ID for parameter 'id'. Must be a number.");
}
});
@@ -111,7 +111,7 @@ describe('Zod Utilities', () => {
const result = schema.safeParse({ params: { id: 1.5 } });
expect(result.success).toBe(false);
if (!result.success) {
expect(result.error.issues[0].message).toContain('Must be a number');
expect(result.error.issues[0].message).toBe("Invalid ID for parameter 'id'. Must be a number.");
}
});
@@ -119,7 +119,7 @@ describe('Zod Utilities', () => {
const result = schema.safeParse({ params: { id: 0 } });
expect(result.success).toBe(false);
if (!result.success) {
expect(result.error.issues[0].message).toContain('Must be a number');
expect(result.error.issues[0].message).toBe("Invalid ID for parameter 'id'. Must be a number.");
}
});
@@ -224,7 +224,7 @@ describe('Zod Utilities', () => {
const floatResult = schema.safeParse('123.45');
expect(floatResult.success).toBe(false);
if (!floatResult.success) {
expect(floatResult.error.issues[0].message).toBe('Expected integer, received float');
expect(floatResult.error.issues[0].message).toBe('Invalid input: expected int, received number');
}
});
@@ -234,7 +234,7 @@ describe('Zod Utilities', () => {
const zeroResult = schema.safeParse('0');
expect(zeroResult.success).toBe(false);
if (!zeroResult.success) {
expect(zeroResult.error.issues[0].message).toBe('Number must be greater than 0');
expect(zeroResult.error.issues[0].message).toBe('Too small: expected number to be >0');
}
});
@@ -244,7 +244,7 @@ describe('Zod Utilities', () => {
const negativeResult = schema.safeParse('-1');
expect(negativeResult.success).toBe(false);
if (!negativeResult.success) {
expect(negativeResult.error.issues[0].message).toBe('Number must be greater than or equal to 0');
expect(negativeResult.error.issues[0].message).toBe('Too small: expected number to be >=0');
}
});
@@ -254,12 +254,12 @@ describe('Zod Utilities', () => {
const tooSmallResult = schema.safeParse('9');
expect(tooSmallResult.success).toBe(false);
if (!tooSmallResult.success) {
expect(tooSmallResult.error.issues[0].message).toBe('Number must be greater than or equal to 10');
expect(tooSmallResult.error.issues[0].message).toBe('Too small: expected number to be >=10');
}
const tooLargeResult = schema.safeParse('21');
expect(tooLargeResult.success).toBe(false);
if (!tooLargeResult.success) {
expect(tooLargeResult.error.issues[0].message).toBe('Number must be less than or equal to 20');
expect(tooLargeResult.error.issues[0].message).toBe('Too big: expected number to be <=20');
}
});
});

View File

@@ -59,18 +59,27 @@ export const optionalNumeric = (
nonnegative?: boolean;
} = {},
) => {
let schema = z.coerce.number();
let numberSchema = z.coerce.number();
if (options.integer) schema = schema.int();
if (options.positive) schema = schema.positive();
else if (options.nonnegative) schema = schema.nonnegative();
if (options.integer) numberSchema = numberSchema.int();
if (options.positive) numberSchema = numberSchema.positive();
else if (options.nonnegative) numberSchema = numberSchema.nonnegative();
if (options.min !== undefined) schema = schema.min(options.min);
if (options.max !== undefined) schema = schema.max(options.max);
if (options.min !== undefined) numberSchema = numberSchema.min(options.min);
if (options.max !== undefined) numberSchema = numberSchema.max(options.max);
if (options.default !== undefined) return schema.optional().default(options.default);
// Make the number schema optional *before* preprocessing. This allows it to correctly handle
// the `undefined` value that our preprocessor generates from `null`.
const optionalNumberSchema = numberSchema.optional();
return schema.optional();
// This is crucial because z.coerce.number(null) results in 0, which bypasses
// the .optional() and .default() logic for null inputs. We want null to be
// treated as "not provided", just like undefined.
const schema = z.preprocess((val) => (val === null ? undefined : val), optionalNumberSchema);
if (options.default !== undefined) return schema.default(options.default);
return schema;
};
/**

26
vitest.config.e2e.ts Normal file
View File

@@ -0,0 +1,26 @@
import { defineConfig, mergeConfig } from 'vitest/config';
import integrationConfig from './vitest.config.integration';
const e2eConfig = mergeConfig(
integrationConfig,
defineConfig({
test: {
name: 'e2e',
// Point specifically to E2E tests
include: ['src/tests/e2e/**/*.e2e.test.ts'],
// Increase timeout for E2E flows that involve AI or full API chains
testTimeout: 120000,
coverage: {
reportsDirectory: '.coverage/e2e',
},
},
}),
);
// Explicitly override the include array to ensure we don't inherit integration tests
// (mergeConfig might concatenate arrays by default)
if (e2eConfig.test) {
e2eConfig.test.include = ['src/tests/e2e/**/*.e2e.test.ts'];
}
export default e2eConfig;