Compare commits
241 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d4557e13fb | ||
| 3e41130c69 | |||
|
|
d9034563d6 | ||
| 5836a75157 | |||
|
|
790008ae0d | ||
|
|
b5b91eb968 | ||
| 38eb810e7a | |||
|
|
458588a6e7 | ||
| 0b4113417f | |||
|
|
b59d2a9533 | ||
| 6740b35f8a | |||
|
|
92ad82a012 | ||
| 672e4ca597 | |||
|
|
e4d70a9b37 | ||
| c30f1c4162 | |||
|
|
44062a9f5b | ||
| 17fac8cf86 | |||
|
|
9fa8553486 | ||
|
|
f5b0b3b543 | ||
| e3ed5c7e63 | |||
|
|
ae0040e092 | ||
| 1f3f99d430 | |||
|
|
7be72f1758 | ||
| 0967c7a33d | |||
| 1f1c0fa6f3 | |||
|
|
728b1a20d3 | ||
| f248f7cbd0 | |||
|
|
0ad9bb16c2 | ||
| 510787bc5b | |||
|
|
9f696e7676 | ||
|
|
a77105316f | ||
| cadacb63f5 | |||
|
|
62592f707e | ||
| 023e48d99a | |||
|
|
99efca0371 | ||
| 1448950b81 | |||
|
|
a811fdac63 | ||
| 1201fe4d3c | |||
|
|
ba9228c9cb | ||
| b392b82c25 | |||
|
|
87825d13d6 | ||
| 21a6a796cf | |||
|
|
ecd0a73bc8 | ||
|
|
39d61dc7ad | ||
|
|
43491359d9 | ||
| 5ed2cea7e9 | |||
|
|
cbb16a8d52 | ||
| 70e94a6ce0 | |||
|
|
b61a00003a | ||
| 52dba6f890 | |||
| 4242678aab | |||
|
|
b2e086d5ba | ||
| 07a9787570 | |||
|
|
4bf5dc3d58 | ||
| be3d269928 | |||
|
|
80a53fae94 | ||
| e15d2b6c2f | |||
|
|
7a52bf499e | ||
| 2489ec8d2d | |||
|
|
4a4f349805 | ||
| 517a268307 | |||
|
|
a94b2a97b1 | ||
| 542cdfbb82 | |||
|
|
262062f468 | ||
| 0a14193371 | |||
|
|
7f665f5117 | ||
| 2782a8fb3b | |||
|
|
c182ef6d30 | ||
| fdb3b76cbd | |||
|
|
01e7c843cb | ||
| a0dbefbfa0 | |||
|
|
ab3fc318a0 | ||
| e658b35e43 | |||
|
|
67e106162a | ||
| b7f3182fd6 | |||
|
|
ac60072d88 | ||
| 9390f38bf6 | |||
|
|
236d5518c9 | ||
| fd52a79a72 | |||
|
|
f72819e343 | ||
| 1af8be3f15 | |||
|
|
28d03f4e21 | ||
| 2e72ee81dd | |||
|
|
ba67ace190 | ||
|
|
50782c30e5 | ||
| 4a2ff8afc5 | |||
|
|
7a1c14ce89 | ||
| 6fafc3d089 | |||
|
|
4316866bce | ||
| 356c1a1894 | |||
|
|
2a310648ca | ||
| 8592633c22 | |||
|
|
0a9cdb8709 | ||
| 0d21e098f8 | |||
| b6799ed167 | |||
|
|
be5bda169e | ||
| 4ede403356 | |||
| 5d31605b80 | |||
| ddd4ad024e | |||
|
|
4e927f48bd | ||
| af5644d17a | |||
|
|
016c0a883a | ||
| c6a5f889b4 | |||
|
|
c895ecdb28 | ||
| 05e3f8a61c | |||
|
|
f79a2abc65 | ||
| a726c270bb | |||
|
|
8a4965c45b | ||
| 93497bf7c7 | |||
|
|
20584af729 | ||
| be9f452656 | |||
| ef4b8e58fe | |||
|
|
a42f7d7007 | ||
| 768d02b9ed | |||
|
|
c4742959e4 | ||
| 97c54c0c5c | |||
| 7cc50907d1 | |||
|
|
b4199f7c48 | ||
| dda36f7bc5 | |||
| 27810bbb36 | |||
|
|
7a1421d5c2 | ||
| 1b52478f97 | |||
| fe8b000737 | |||
|
|
d2babbe3b0 | ||
|
|
684d81db2a | ||
| 59ffa65562 | |||
| 0c0dd852ac | |||
|
|
cde766872e | ||
| 604b543c12 | |||
| fd67fe2941 | |||
|
|
582035b60e | ||
| 44e7670a89 | |||
| 2abfb3ed6e | |||
|
|
219de4a25c | ||
| 1540d5051f | |||
| 9c978c26fa | |||
|
|
adb109d8e9 | ||
| c668c8785f | |||
|
|
695bbb61b9 | ||
| 877c971833 | |||
| ed3af07aab | |||
|
|
dd4b34edfa | ||
| 91fa2f0516 | |||
|
|
aefd57e57b | ||
| 2ca4eb47ac | |||
| a4fe30da22 | |||
|
|
abab7fd25e | ||
| 53dd26d2d9 | |||
| ab3da0336c | |||
|
|
ed6d6349a2 | ||
| d4db2a709a | |||
| 508583809b | |||
|
|
6b1f7e7590 | ||
| 07bb31f4fb | |||
| a42fb76da8 | |||
|
|
08c320423c | ||
| d2498065ed | |||
| 56dc96f418 | |||
|
|
4e9aa0efc3 | ||
| e5e4b1316c | |||
| e8d511b4de | |||
|
|
c4bbf5c251 | ||
| 32a9e6732b | |||
| e7c076e2ed | |||
|
|
dbe8e72efe | ||
| 38bd193042 | |||
|
|
57215e2778 | ||
| 2c1de24e9a | |||
| c8baff7aac | |||
| de3f21a7ec | |||
|
|
c6adbf79e7 | ||
| 7399a27600 | |||
|
|
68aadcaa4e | ||
| 971d2c3fa7 | |||
|
|
daaacfde5e | ||
| 7ac8fe1d29 | |||
| a2462dfb6b | |||
|
|
a911224fb4 | ||
|
|
bf4bcef890 | ||
| ac6cd2e0a1 | |||
| eea03880c1 | |||
|
|
7fc263691f | ||
| c0912d36d5 | |||
| 612c2b5943 | |||
|
|
8e787ddcf0 | ||
| 11c52d284c | |||
|
|
b528bd3651 | ||
| 4c5ceb1bd6 | |||
| bcc4ad64dc | |||
|
|
d520980322 | ||
| d79955aaa0 | |||
| e66027dc8e | |||
|
|
027df989a4 | ||
| d4d69caaf7 | |||
| 03b5af39e1 | |||
|
|
8a86333f86 | ||
| f173f805ea | |||
| d3b0996ad5 | |||
|
|
b939262f0c | ||
| 9437f3d6c6 | |||
| f1e028d498 | |||
|
|
5274650aea | ||
| de5a9a565b | |||
| 10a379c5e3 | |||
| a6a484d432 | |||
|
|
4b0a172c35 | ||
| e8c894d5cf | |||
| 6c8fd4b126 | |||
|
|
a1f52544d0 | ||
| 2334359756 | |||
| 406954ca06 | |||
|
|
95d441be98 | ||
| 186ed484b7 | |||
|
|
3669958e9d | ||
| 5f3daf0539 | |||
| ae7afaaf97 | |||
|
|
3ae7b9e0d4 | ||
| 921c48fc57 | |||
|
|
2571864b91 | ||
| 065d0c746a | |||
| 395f6c21a2 | |||
|
|
aec56dfc23 | ||
| a12a0e5207 | |||
| e337bd67b1 | |||
|
|
a8f5b4e51a | ||
| d0ce8021d6 | |||
| efbb162880 | |||
|
|
e353ce8a81 | ||
| b5cbf271b8 | |||
|
|
2041b4ac3c | ||
| e547363a65 | |||
| bddaf765fc | |||
|
|
3c0bebb65c | ||
| 265cc3ffd4 | |||
| 3d5767b60b | |||
|
|
e9cb45efe0 | ||
| 99a57f3a30 | |||
| e46f5eb7f6 | |||
|
|
034887069c | ||
| 84b5e0e15e | |||
| dc0f774699 |
18
.devcontainer/devcontainer.json
Normal file
18
.devcontainer/devcontainer.json
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
{
|
||||||
|
"name": "Flyer Crawler Dev (Ubuntu 22.04)",
|
||||||
|
"dockerComposeFile": ["../compose.dev.yml"],
|
||||||
|
"service": "app",
|
||||||
|
"workspaceFolder": "/app",
|
||||||
|
"customizations": {
|
||||||
|
"vscode": {
|
||||||
|
"extensions": ["dbaeumer.vscode-eslint", "esbenp.prettier-vscode"]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"remoteUser": "root",
|
||||||
|
// Automatically install dependencies when the container is created.
|
||||||
|
// This runs inside the container, populating the isolated node_modules volume.
|
||||||
|
"postCreateCommand": "npm install",
|
||||||
|
"postAttachCommand": "npm run dev:container",
|
||||||
|
// Try to start podman machine, but exit with success (0) even if it's already running
|
||||||
|
"initializeCommand": "powershell -Command \"podman machine start; exit 0\""
|
||||||
|
}
|
||||||
@@ -47,6 +47,19 @@ jobs:
|
|||||||
- name: Install Dependencies
|
- name: Install Dependencies
|
||||||
run: npm ci
|
run: npm ci
|
||||||
|
|
||||||
|
- name: Bump Minor Version and Push
|
||||||
|
run: |
|
||||||
|
# Configure git for the commit.
|
||||||
|
git config --global user.name 'Gitea Actions'
|
||||||
|
git config --global user.email 'actions@gitea.projectium.com'
|
||||||
|
|
||||||
|
# Bump the minor version number. This creates a new commit and a new tag.
|
||||||
|
# The commit message includes [skip ci] to prevent this push from triggering another workflow run.
|
||||||
|
npm version minor -m "ci: Bump version to %s for production release [skip ci]"
|
||||||
|
|
||||||
|
# Push the new commit and the new tag back to the main branch.
|
||||||
|
git push --follow-tags
|
||||||
|
|
||||||
- name: Check for Production Database Schema Changes
|
- name: Check for Production Database Schema Changes
|
||||||
env:
|
env:
|
||||||
DB_HOST: ${{ secrets.DB_HOST }}
|
DB_HOST: ${{ secrets.DB_HOST }}
|
||||||
@@ -61,9 +74,10 @@ jobs:
|
|||||||
echo "--- Checking for production schema changes ---"
|
echo "--- Checking for production schema changes ---"
|
||||||
CURRENT_HASH=$(cat sql/master_schema_rollup.sql | dos2unix | sha256sum | awk '{ print $1 }')
|
CURRENT_HASH=$(cat sql/master_schema_rollup.sql | dos2unix | sha256sum | awk '{ print $1 }')
|
||||||
echo "Current Git Schema Hash: $CURRENT_HASH"
|
echo "Current Git Schema Hash: $CURRENT_HASH"
|
||||||
DEPLOYED_HASH=$(PGPASSWORD="$DB_PASSWORD" psql -v ON_ERROR_STOP=1 -h "$DB_HOST" -p 5432 -U "$DB_USER" -d "$DB_NAME" -c "SELECT schema_hash FROM public.schema_info WHERE environment = 'production';" -t -A || echo "none")
|
# The psql command will now fail the step if the query errors (e.g., column missing), preventing deployment on a bad schema.
|
||||||
|
DEPLOYED_HASH=$(PGPASSWORD="$DB_PASSWORD" psql -v ON_ERROR_STOP=1 -h "$DB_HOST" -p 5432 -U "$DB_USER" -d "$DB_NAME" -c "SELECT schema_hash FROM public.schema_info WHERE environment = 'production';" -t -A)
|
||||||
echo "Deployed DB Schema Hash: $DEPLOYED_HASH"
|
echo "Deployed DB Schema Hash: $DEPLOYED_HASH"
|
||||||
if [ "$DEPLOYED_HASH" = "none" ] || [ -z "$DEPLOYED_HASH" ]; then
|
if [ -z "$DEPLOYED_HASH" ]; then
|
||||||
echo "WARNING: No schema hash found in the production database. This is expected for a first-time deployment."
|
echo "WARNING: No schema hash found in the production database. This is expected for a first-time deployment."
|
||||||
elif [ "$CURRENT_HASH" != "$DEPLOYED_HASH" ]; then
|
elif [ "$CURRENT_HASH" != "$DEPLOYED_HASH" ]; then
|
||||||
echo "ERROR: Database schema mismatch detected! A manual database migration is required."
|
echo "ERROR: Database schema mismatch detected! A manual database migration is required."
|
||||||
@@ -79,8 +93,9 @@ jobs:
|
|||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
GITEA_SERVER_URL="https://gitea.projectium.com"
|
GITEA_SERVER_URL="https://gitea.projectium.com"
|
||||||
COMMIT_MESSAGE=$(git log -1 --pretty=%s)
|
COMMIT_MESSAGE=$(git log -1 --grep="\[skip ci\]" --invert-grep --pretty=%s)
|
||||||
VITE_APP_VERSION="$(date +'%Y%m%d-%H%M'):$(git rev-parse --short HEAD)" \
|
PACKAGE_VERSION=$(node -p "require('./package.json').version")
|
||||||
|
VITE_APP_VERSION="$(date +'%Y%m%d-%H%M'):$(git rev-parse --short HEAD):$PACKAGE_VERSION" \
|
||||||
VITE_APP_COMMIT_URL="$GITEA_SERVER_URL/${{ gitea.repository }}/commit/${{ gitea.sha }}" \
|
VITE_APP_COMMIT_URL="$GITEA_SERVER_URL/${{ gitea.repository }}/commit/${{ gitea.sha }}" \
|
||||||
VITE_APP_COMMIT_MESSAGE="$COMMIT_MESSAGE" \
|
VITE_APP_COMMIT_MESSAGE="$COMMIT_MESSAGE" \
|
||||||
VITE_API_BASE_URL=/api VITE_API_KEY=${{ secrets.VITE_GOOGLE_GENAI_API_KEY }} npm run build
|
VITE_API_BASE_URL=/api VITE_API_KEY=${{ secrets.VITE_GOOGLE_GENAI_API_KEY }} npm run build
|
||||||
@@ -123,6 +138,10 @@ jobs:
|
|||||||
cd /var/www/flyer-crawler.projectium.com
|
cd /var/www/flyer-crawler.projectium.com
|
||||||
npm install --omit=dev
|
npm install --omit=dev
|
||||||
|
|
||||||
|
# --- Cleanup Errored Processes ---
|
||||||
|
echo "Cleaning up errored or stopped PM2 processes..."
|
||||||
|
node -e "const exec = require('child_process').execSync; try { const list = JSON.parse(exec('pm2 jlist').toString()); list.forEach(p => { if (p.pm2_env.status === 'errored' || p.pm2_env.status === 'stopped') { console.log('Deleting ' + p.pm2_env.status + ' process: ' + p.name + ' (' + p.pm2_env.pm_id + ')'); try { exec('pm2 delete ' + p.pm2_env.pm_id); } catch(e) { console.error('Failed to delete ' + p.pm2_env.pm_id); } } }); } catch (e) { console.error('Error cleaning up processes:', e); }"
|
||||||
|
|
||||||
# --- Version Check Logic ---
|
# --- Version Check Logic ---
|
||||||
# Get the version from the newly deployed package.json
|
# Get the version from the newly deployed package.json
|
||||||
NEW_VERSION=$(node -p "require('./package.json').version")
|
NEW_VERSION=$(node -p "require('./package.json').version")
|
||||||
@@ -139,7 +158,7 @@ jobs:
|
|||||||
else
|
else
|
||||||
echo "Version mismatch (Running: $RUNNING_VERSION -> Deployed: $NEW_VERSION) or app not running. Reloading PM2..."
|
echo "Version mismatch (Running: $RUNNING_VERSION -> Deployed: $NEW_VERSION) or app not running. Reloading PM2..."
|
||||||
fi
|
fi
|
||||||
pm2 startOrReload ecosystem.config.cjs --env production && pm2 save
|
pm2 startOrReload ecosystem.config.cjs --env production --update-env && pm2 save
|
||||||
echo "Production backend server reloaded successfully."
|
echo "Production backend server reloaded successfully."
|
||||||
else
|
else
|
||||||
echo "Version $NEW_VERSION is already running. Skipping PM2 reload."
|
echo "Version $NEW_VERSION is already running. Skipping PM2 reload."
|
||||||
@@ -148,7 +167,12 @@ jobs:
|
|||||||
echo "Updating schema hash in production database..."
|
echo "Updating schema hash in production database..."
|
||||||
CURRENT_HASH=$(cat sql/master_schema_rollup.sql | dos2unix | sha256sum | awk '{ print $1 }')
|
CURRENT_HASH=$(cat sql/master_schema_rollup.sql | dos2unix | sha256sum | awk '{ print $1 }')
|
||||||
PGPASSWORD="$DB_PASSWORD" psql -v ON_ERROR_STOP=1 -h "$DB_HOST" -p 5432 -U "$DB_USER" -d "$DB_NAME" -c \
|
PGPASSWORD="$DB_PASSWORD" psql -v ON_ERROR_STOP=1 -h "$DB_HOST" -p 5432 -U "$DB_USER" -d "$DB_NAME" -c \
|
||||||
"INSERT INTO public.schema_info (environment, schema_hash, deployed_at) VALUES ('production', '$CURRENT_HASH', NOW())
|
"CREATE TABLE IF NOT EXISTS public.schema_info (
|
||||||
|
environment VARCHAR(50) PRIMARY KEY,
|
||||||
|
schema_hash VARCHAR(64) NOT NULL,
|
||||||
|
deployed_at TIMESTAMP DEFAULT NOW()
|
||||||
|
);
|
||||||
|
INSERT INTO public.schema_info (environment, schema_hash, deployed_at) VALUES ('production', '$CURRENT_HASH', NOW())
|
||||||
ON CONFLICT (environment) DO UPDATE SET schema_hash = EXCLUDED.schema_hash, deployed_at = NOW();"
|
ON CONFLICT (environment) DO UPDATE SET schema_hash = EXCLUDED.schema_hash, deployed_at = NOW();"
|
||||||
|
|
||||||
UPDATED_HASH=$(PGPASSWORD="$DB_PASSWORD" psql -v ON_ERROR_STOP=1 -h "$DB_HOST" -p 5432 -U "$DB_USER" -d "$DB_NAME" -c "SELECT schema_hash FROM public.schema_info WHERE environment = 'production';" -t -A)
|
UPDATED_HASH=$(PGPASSWORD="$DB_PASSWORD" psql -v ON_ERROR_STOP=1 -h "$DB_HOST" -p 5432 -U "$DB_USER" -d "$DB_NAME" -c "SELECT schema_hash FROM public.schema_info WHERE environment = 'production';" -t -A)
|
||||||
@@ -161,7 +185,17 @@ jobs:
|
|||||||
- name: Show PM2 Environment for Production
|
- name: Show PM2 Environment for Production
|
||||||
run: |
|
run: |
|
||||||
echo "--- Displaying recent PM2 logs for flyer-crawler-api ---"
|
echo "--- Displaying recent PM2 logs for flyer-crawler-api ---"
|
||||||
sleep 5
|
sleep 5 # Wait a few seconds for the app to start and log its output.
|
||||||
pm2 describe flyer-crawler-api || echo "Could not find production pm2 process."
|
|
||||||
pm2 logs flyer-crawler-api --lines 20 --nostream || echo "Could not find production pm2 process."
|
# Resolve the PM2 ID dynamically to ensure we target the correct process
|
||||||
pm2 env flyer-crawler-api || echo "Could not find production pm2 process."
|
PM2_ID=$(pm2 jlist | node -e "try { const list = JSON.parse(require('fs').readFileSync(0, 'utf-8')); const app = list.find(p => p.name === 'flyer-crawler-api'); console.log(app ? app.pm2_env.pm_id : ''); } catch(e) { console.log(''); }")
|
||||||
|
|
||||||
|
if [ -n "$PM2_ID" ]; then
|
||||||
|
echo "Found process ID: $PM2_ID"
|
||||||
|
pm2 describe "$PM2_ID" || echo "Failed to describe process $PM2_ID"
|
||||||
|
pm2 logs "$PM2_ID" --lines 20 --nostream || echo "Failed to get logs for $PM2_ID"
|
||||||
|
pm2 env "$PM2_ID" || echo "Failed to get env for $PM2_ID"
|
||||||
|
else
|
||||||
|
echo "Could not find process 'flyer-crawler-api' in pm2 list."
|
||||||
|
pm2 list # Fallback to listing everything to help debug
|
||||||
|
fi
|
||||||
|
|||||||
@@ -90,10 +90,11 @@ jobs:
|
|||||||
# integration test suite can launch its own, fresh server instance.
|
# integration test suite can launch its own, fresh server instance.
|
||||||
# '|| true' ensures the workflow doesn't fail if the process isn't running.
|
# '|| true' ensures the workflow doesn't fail if the process isn't running.
|
||||||
run: |
|
run: |
|
||||||
pm2 stop flyer-crawler-api-test || true
|
echo "--- Stopping and deleting all test processes ---"
|
||||||
pm2 stop flyer-crawler-worker-test || true
|
# Use a script to parse pm2's JSON output and delete any process whose name ends with '-test'.
|
||||||
pm2 delete flyer-crawler-api-test || true
|
# This is safer than 'pm2 delete all' and more robust than naming each process individually.
|
||||||
pm2 delete flyer-crawler-worker-test || true
|
# It prevents the accumulation of duplicate processes from previous test runs.
|
||||||
|
node -e "const exec = require('child_process').execSync; try { const list = JSON.parse(exec('pm2 jlist').toString()); list.forEach(p => { if (p.name && p.name.endsWith('-test')) { console.log('Deleting test process: ' + p.name + ' (' + p.pm2_env.pm_id + ')'); try { exec('pm2 delete ' + p.pm2_env.pm_id); } catch(e) { console.error('Failed to delete ' + p.pm2_env.pm_id, e.message); } } }); console.log('✅ Test process cleanup complete.'); } catch (e) { if (e.stdout.toString().includes('No process found')) { console.log('No PM2 processes running, cleanup not needed.'); } else { console.error('Error cleaning up test processes:', e.message); } }" || true
|
||||||
|
|
||||||
- name: Run All Tests and Generate Merged Coverage Report
|
- name: Run All Tests and Generate Merged Coverage Report
|
||||||
# This single step runs both unit and integration tests, then merges their
|
# This single step runs both unit and integration tests, then merges their
|
||||||
@@ -119,9 +120,14 @@ jobs:
|
|||||||
# --- JWT Secret for Passport authentication in tests ---
|
# --- JWT Secret for Passport authentication in tests ---
|
||||||
JWT_SECRET: ${{ secrets.JWT_SECRET }}
|
JWT_SECRET: ${{ secrets.JWT_SECRET }}
|
||||||
|
|
||||||
|
# --- V8 Coverage for Server Process ---
|
||||||
|
# This variable tells the Node.js process (our server, started by globalSetup)
|
||||||
|
# where to output its raw V8 coverage data.
|
||||||
|
NODE_V8_COVERAGE: '.coverage/tmp/integration-server'
|
||||||
|
|
||||||
# --- Increase Node.js memory limit to prevent heap out of memory errors ---
|
# --- Increase Node.js memory limit to prevent heap out of memory errors ---
|
||||||
# This is crucial for memory-intensive tasks like running tests and coverage.
|
# This is crucial for memory-intensive tasks like running tests and coverage.
|
||||||
NODE_OPTIONS: '--max-old-space-size=8192'
|
NODE_OPTIONS: '--max-old-space-size=8192 --trace-warnings --unhandled-rejections=strict'
|
||||||
|
|
||||||
run: |
|
run: |
|
||||||
# Fail-fast check to ensure secrets are configured in Gitea for testing.
|
# Fail-fast check to ensure secrets are configured in Gitea for testing.
|
||||||
@@ -136,10 +142,49 @@ jobs:
|
|||||||
# Run unit and integration tests as separate steps.
|
# Run unit and integration tests as separate steps.
|
||||||
# The `|| true` ensures the workflow continues even if tests fail, allowing coverage to run.
|
# The `|| true` ensures the workflow continues even if tests fail, allowing coverage to run.
|
||||||
echo "--- Running Unit Tests ---"
|
echo "--- Running Unit Tests ---"
|
||||||
npm run test:unit -- --coverage --reporter=verbose --includeTaskLocation --testTimeout=10000 --silent=passed-only || true
|
# npm run test:unit -- --coverage --reporter=verbose --includeTaskLocation --testTimeout=10000 --silent=passed-only || true
|
||||||
|
npm run test:unit -- --coverage \
|
||||||
|
--coverage.exclude='**/*.test.ts' \
|
||||||
|
--coverage.exclude='**/tests/**' \
|
||||||
|
--coverage.exclude='**/mocks/**' \
|
||||||
|
--coverage.exclude='src/components/icons/**' \
|
||||||
|
--coverage.exclude='src/db/**' \
|
||||||
|
--coverage.exclude='src/lib/**' \
|
||||||
|
--coverage.exclude='src/types/**' \
|
||||||
|
--coverage.exclude='**/index.tsx' \
|
||||||
|
--coverage.exclude='**/vite-env.d.ts' \
|
||||||
|
--coverage.exclude='**/vitest.setup.ts' \
|
||||||
|
--reporter=verbose --includeTaskLocation --testTimeout=10000 --silent=passed-only --no-file-parallelism || true
|
||||||
|
|
||||||
echo "--- Running Integration Tests ---"
|
echo "--- Running Integration Tests ---"
|
||||||
npm run test:integration -- --coverage --reporter=verbose --includeTaskLocation --testTimeout=10000 --silent=passed-only || true
|
npm run test:integration -- --coverage \
|
||||||
|
--coverage.exclude='**/*.test.ts' \
|
||||||
|
--coverage.exclude='**/tests/**' \
|
||||||
|
--coverage.exclude='**/mocks/**' \
|
||||||
|
--coverage.exclude='src/components/icons/**' \
|
||||||
|
--coverage.exclude='src/db/**' \
|
||||||
|
--coverage.exclude='src/lib/**' \
|
||||||
|
--coverage.exclude='src/types/**' \
|
||||||
|
--coverage.exclude='**/index.tsx' \
|
||||||
|
--coverage.exclude='**/vite-env.d.ts' \
|
||||||
|
--coverage.exclude='**/vitest.setup.ts' \
|
||||||
|
--reporter=verbose --includeTaskLocation --testTimeout=10000 --silent=passed-only || true
|
||||||
|
|
||||||
|
echo "--- Running E2E Tests ---"
|
||||||
|
# Run E2E tests using the dedicated E2E config which inherits from integration config.
|
||||||
|
# We still pass --coverage to enable it, but directory and timeout are now in the config.
|
||||||
|
npx vitest run --config vitest.config.e2e.ts --coverage \
|
||||||
|
--coverage.exclude='**/*.test.ts' \
|
||||||
|
--coverage.exclude='**/tests/**' \
|
||||||
|
--coverage.exclude='**/mocks/**' \
|
||||||
|
--coverage.exclude='src/components/icons/**' \
|
||||||
|
--coverage.exclude='src/db/**' \
|
||||||
|
--coverage.exclude='src/lib/**' \
|
||||||
|
--coverage.exclude='src/types/**' \
|
||||||
|
--coverage.exclude='**/index.tsx' \
|
||||||
|
--coverage.exclude='**/vite-env.d.ts' \
|
||||||
|
--coverage.exclude='**/vitest.setup.ts' \
|
||||||
|
--reporter=verbose --no-file-parallelism || true
|
||||||
|
|
||||||
# Re-enable secret masking for subsequent steps.
|
# Re-enable secret masking for subsequent steps.
|
||||||
echo "::secret-masking::"
|
echo "::secret-masking::"
|
||||||
@@ -155,6 +200,7 @@ jobs:
|
|||||||
echo "Checking for source coverage files..."
|
echo "Checking for source coverage files..."
|
||||||
ls -l .coverage/unit/coverage-final.json
|
ls -l .coverage/unit/coverage-final.json
|
||||||
ls -l .coverage/integration/coverage-final.json
|
ls -l .coverage/integration/coverage-final.json
|
||||||
|
ls -l .coverage/e2e/coverage-final.json || echo "E2E coverage file not found"
|
||||||
|
|
||||||
# --- V8 Coverage Processing for Backend Server ---
|
# --- V8 Coverage Processing for Backend Server ---
|
||||||
# The integration tests start the server, which generates raw V8 coverage data.
|
# The integration tests start the server, which generates raw V8 coverage data.
|
||||||
@@ -167,7 +213,7 @@ jobs:
|
|||||||
# Run c8: read raw files from the temp dir, and output an Istanbul JSON report.
|
# Run c8: read raw files from the temp dir, and output an Istanbul JSON report.
|
||||||
# We only generate the 'json' report here because it's all nyc needs for merging.
|
# We only generate the 'json' report here because it's all nyc needs for merging.
|
||||||
echo "Server coverage report about to be generated..."
|
echo "Server coverage report about to be generated..."
|
||||||
npx c8 report --reporter=json --temp-directory .coverage/tmp/integration-server --reports-dir .coverage/integration-server
|
npx c8 report --exclude='**/*.test.ts' --exclude='**/tests/**' --exclude='**/mocks/**' --reporter=json --temp-directory .coverage/tmp/integration-server --reports-dir .coverage/integration-server
|
||||||
echo "Server coverage report generated. Verifying existence:"
|
echo "Server coverage report generated. Verifying existence:"
|
||||||
ls -l .coverage/integration-server/coverage-final.json
|
ls -l .coverage/integration-server/coverage-final.json
|
||||||
|
|
||||||
@@ -186,6 +232,7 @@ jobs:
|
|||||||
# We give them unique names to be safe, though it's not strictly necessary.
|
# We give them unique names to be safe, though it's not strictly necessary.
|
||||||
cp .coverage/unit/coverage-final.json "$NYC_SOURCE_DIR/unit-coverage.json"
|
cp .coverage/unit/coverage-final.json "$NYC_SOURCE_DIR/unit-coverage.json"
|
||||||
cp .coverage/integration/coverage-final.json "$NYC_SOURCE_DIR/integration-coverage.json"
|
cp .coverage/integration/coverage-final.json "$NYC_SOURCE_DIR/integration-coverage.json"
|
||||||
|
cp .coverage/e2e/coverage-final.json "$NYC_SOURCE_DIR/e2e-coverage.json" || echo "E2E coverage file not found, skipping."
|
||||||
# This file might not exist if integration tests fail early, so we add `|| true`
|
# This file might not exist if integration tests fail early, so we add `|| true`
|
||||||
cp .coverage/integration-server/coverage-final.json "$NYC_SOURCE_DIR/integration-server-coverage.json" || echo "Server coverage file not found, skipping."
|
cp .coverage/integration-server/coverage-final.json "$NYC_SOURCE_DIR/integration-server-coverage.json" || echo "Server coverage file not found, skipping."
|
||||||
echo "Copied coverage files to source directory. Contents:"
|
echo "Copied coverage files to source directory. Contents:"
|
||||||
@@ -205,7 +252,13 @@ jobs:
|
|||||||
--reporter=text \
|
--reporter=text \
|
||||||
--reporter=html \
|
--reporter=html \
|
||||||
--report-dir .coverage/ \
|
--report-dir .coverage/ \
|
||||||
--temp-dir "$NYC_SOURCE_DIR"
|
--temp-dir "$NYC_SOURCE_DIR" \
|
||||||
|
--exclude "**/*.test.ts" \
|
||||||
|
--exclude "**/tests/**" \
|
||||||
|
--exclude "**/mocks/**" \
|
||||||
|
--exclude "**/index.tsx" \
|
||||||
|
--exclude "**/vite-env.d.ts" \
|
||||||
|
--exclude "**/vitest.setup.ts"
|
||||||
|
|
||||||
# Re-enable secret masking for subsequent steps.
|
# Re-enable secret masking for subsequent steps.
|
||||||
echo "::secret-masking::"
|
echo "::secret-masking::"
|
||||||
@@ -218,16 +271,6 @@ jobs:
|
|||||||
if: always() # This step runs even if the previous test or coverage steps failed.
|
if: always() # This step runs even if the previous test or coverage steps failed.
|
||||||
run: echo "Skipping test artifact cleanup on runner; this is handled on the server."
|
run: echo "Skipping test artifact cleanup on runner; this is handled on the server."
|
||||||
|
|
||||||
- name: Deploy Coverage Report to Public URL
|
|
||||||
if: always()
|
|
||||||
run: |
|
|
||||||
TARGET_DIR="/var/www/flyer-crawler-test.projectium.com/coverage"
|
|
||||||
echo "Deploying HTML coverage report to $TARGET_DIR..."
|
|
||||||
mkdir -p "$TARGET_DIR"
|
|
||||||
rm -rf "$TARGET_DIR"/*
|
|
||||||
cp -r .coverage/* "$TARGET_DIR/"
|
|
||||||
echo "✅ Coverage report deployed to https://flyer-crawler-test.projectium.com/coverage"
|
|
||||||
|
|
||||||
- name: Archive Code Coverage Report
|
- name: Archive Code Coverage Report
|
||||||
# This action saves the generated HTML coverage report as a downloadable artifact.
|
# This action saves the generated HTML coverage report as a downloadable artifact.
|
||||||
uses: actions/upload-artifact@v3
|
uses: actions/upload-artifact@v3
|
||||||
@@ -256,18 +299,19 @@ jobs:
|
|||||||
# We normalize line endings to ensure the hash is consistent across different OS environments.
|
# We normalize line endings to ensure the hash is consistent across different OS environments.
|
||||||
CURRENT_HASH=$(cat sql/master_schema_rollup.sql | dos2unix | sha256sum | awk '{ print $1 }')
|
CURRENT_HASH=$(cat sql/master_schema_rollup.sql | dos2unix | sha256sum | awk '{ print $1 }')
|
||||||
echo "Current Git Schema Hash: $CURRENT_HASH"
|
echo "Current Git Schema Hash: $CURRENT_HASH"
|
||||||
|
|
||||||
# Query the production database to get the hash of the deployed schema.
|
# Query the production database to get the hash of the deployed schema.
|
||||||
# The `psql` command requires PGPASSWORD to be set.
|
# The `psql` command requires PGPASSWORD to be set.
|
||||||
# `\t` sets tuples-only mode and `\A` unaligns output to get just the raw value.
|
# `\t` sets tuples-only mode and `\A` unaligns output to get just the raw value.
|
||||||
# The `|| echo "none"` ensures the command doesn't fail if the table or row doesn't exist yet.
|
# The psql command will now fail the step if the query errors (e.g., column missing), preventing deployment on a bad schema.
|
||||||
DEPLOYED_HASH=$(PGPASSWORD="$DB_PASSWORD" psql -v ON_ERROR_STOP=1 -h "$DB_HOST" -p 5432 -U "$DB_USER" -d "$DB_NAME" -c "SELECT schema_hash FROM public.schema_info WHERE environment = 'test';" -t -A || echo "none")
|
DEPLOYED_HASH=$(PGPASSWORD="$DB_PASSWORD" psql -v ON_ERROR_STOP=1 -h "$DB_HOST" -p 5432 -U "$DB_USER" -d "$DB_NAME" -c "SELECT schema_hash FROM public.schema_info WHERE environment = 'test';" -t -A)
|
||||||
echo "Deployed DB Schema Hash: $DEPLOYED_HASH"
|
echo "Deployed DB Schema Hash: $DEPLOYED_HASH"
|
||||||
|
|
||||||
# Check if the hash is "none" (command failed) OR if it's an empty string (table exists but is empty).
|
# Check if the hash is "none" (command failed) OR if it's an empty string (table exists but is empty).
|
||||||
if [ "$DEPLOYED_HASH" = "none" ] || [ -z "$DEPLOYED_HASH" ]; then
|
if [ -z "$DEPLOYED_HASH" ]; then
|
||||||
echo "WARNING: No schema hash found in the test database."
|
echo "WARNING: No schema hash found in the test database."
|
||||||
echo "This is expected for a first-time deployment. The hash will be set after a successful deployment."
|
echo "This is expected for a first-time deployment. The hash will be set after a successful deployment."
|
||||||
|
echo "--- Debug: Dumping schema_info table ---"
|
||||||
|
PGPASSWORD="$DB_PASSWORD" psql -v ON_ERROR_STOP=0 -h "$DB_HOST" -p 5432 -U "$DB_USER" -d "$DB_NAME" -P pager=off -c "SELECT * FROM public.schema_info;" || true
|
||||||
|
echo "----------------------------------------"
|
||||||
# We allow the deployment to continue, but a manual schema update is required.
|
# We allow the deployment to continue, but a manual schema update is required.
|
||||||
# You could choose to fail here by adding `exit 1`.
|
# You could choose to fail here by adding `exit 1`.
|
||||||
elif [ "$CURRENT_HASH" != "$DEPLOYED_HASH" ]; then
|
elif [ "$CURRENT_HASH" != "$DEPLOYED_HASH" ]; then
|
||||||
@@ -291,8 +335,9 @@ jobs:
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
GITEA_SERVER_URL="https://gitea.projectium.com" # Your Gitea instance URL
|
GITEA_SERVER_URL="https://gitea.projectium.com" # Your Gitea instance URL
|
||||||
COMMIT_MESSAGE=$(git log -1 --pretty=%s)
|
COMMIT_MESSAGE=$(git log -1 --grep="\[skip ci\]" --invert-grep --pretty=%s)
|
||||||
VITE_APP_VERSION="$(date +'%Y%m%d-%H%M'):$(git rev-parse --short HEAD)" \
|
PACKAGE_VERSION=$(node -p "require('./package.json').version")
|
||||||
|
VITE_APP_VERSION="$(date +'%Y%m%d-%H%M'):$(git rev-parse --short HEAD):$PACKAGE_VERSION" \
|
||||||
VITE_APP_COMMIT_URL="$GITEA_SERVER_URL/${{ gitea.repository }}/commit/${{ gitea.sha }}" \
|
VITE_APP_COMMIT_URL="$GITEA_SERVER_URL/${{ gitea.repository }}/commit/${{ gitea.sha }}" \
|
||||||
VITE_APP_COMMIT_MESSAGE="$COMMIT_MESSAGE" \
|
VITE_APP_COMMIT_MESSAGE="$COMMIT_MESSAGE" \
|
||||||
VITE_API_BASE_URL="https://flyer-crawler-test.projectium.com/api" VITE_API_KEY=${{ secrets.VITE_GOOGLE_GENAI_API_KEY_TEST }} npm run build
|
VITE_API_BASE_URL="https://flyer-crawler-test.projectium.com/api" VITE_API_KEY=${{ secrets.VITE_GOOGLE_GENAI_API_KEY_TEST }} npm run build
|
||||||
@@ -315,6 +360,17 @@ jobs:
|
|||||||
rsync -avz dist/ "$APP_PATH"
|
rsync -avz dist/ "$APP_PATH"
|
||||||
echo "Application deployment complete."
|
echo "Application deployment complete."
|
||||||
|
|
||||||
|
- name: Deploy Coverage Report to Public URL
|
||||||
|
if: always()
|
||||||
|
run: |
|
||||||
|
TARGET_DIR="/var/www/flyer-crawler-test.projectium.com/coverage"
|
||||||
|
echo "Deploying HTML coverage report to $TARGET_DIR..."
|
||||||
|
mkdir -p "$TARGET_DIR"
|
||||||
|
rm -rf "$TARGET_DIR"/*
|
||||||
|
# The merged nyc report is generated in the .coverage directory. We copy its contents.
|
||||||
|
cp -r .coverage/* "$TARGET_DIR/"
|
||||||
|
echo "✅ Coverage report deployed to https://flyer-crawler-test.projectium.com/coverage"
|
||||||
|
|
||||||
- name: Install Backend Dependencies and Restart Test Server
|
- name: Install Backend Dependencies and Restart Test Server
|
||||||
env:
|
env:
|
||||||
# --- Test Secrets Injection ---
|
# --- Test Secrets Injection ---
|
||||||
@@ -333,7 +389,7 @@ jobs:
|
|||||||
|
|
||||||
# Application Secrets
|
# Application Secrets
|
||||||
FRONTEND_URL: 'https://flyer-crawler-test.projectium.com'
|
FRONTEND_URL: 'https://flyer-crawler-test.projectium.com'
|
||||||
JWT_SECRET: ${{ secrets.JWT_SECRET_TEST }}
|
JWT_SECRET: ${{ secrets.JWT_SECRET }}
|
||||||
GEMINI_API_KEY: ${{ secrets.VITE_GOOGLE_GENAI_API_KEY_TEST }}
|
GEMINI_API_KEY: ${{ secrets.VITE_GOOGLE_GENAI_API_KEY_TEST }}
|
||||||
GOOGLE_MAPS_API_KEY: ${{ secrets.GOOGLE_MAPS_API_KEY }}
|
GOOGLE_MAPS_API_KEY: ${{ secrets.GOOGLE_MAPS_API_KEY }}
|
||||||
|
|
||||||
@@ -347,18 +403,30 @@ jobs:
|
|||||||
|
|
||||||
run: |
|
run: |
|
||||||
# Fail-fast check to ensure secrets are configured in Gitea.
|
# Fail-fast check to ensure secrets are configured in Gitea.
|
||||||
if [ -z "$DB_HOST" ] || [ -z "$DB_USER" ] || [ -z "$DB_PASSWORD" ] || [ -z "$DB_NAME" ]; then
|
MISSING_SECRETS=""
|
||||||
echo "ERROR: One or more test database secrets (DB_HOST, DB_USER, DB_PASSWORD, DB_DATABASE_TEST) are not set in Gitea repository settings."
|
if [ -z "$DB_HOST" ]; then MISSING_SECRETS="${MISSING_SECRETS} DB_HOST"; fi
|
||||||
|
if [ -z "$DB_USER" ]; then MISSING_SECRETS="${MISSING_SECRETS} DB_USER"; fi
|
||||||
|
if [ -z "$DB_PASSWORD" ]; then MISSING_SECRETS="${MISSING_SECRETS} DB_PASSWORD"; fi
|
||||||
|
if [ -z "$DB_NAME" ]; then MISSING_SECRETS="${MISSING_SECRETS} DB_NAME"; fi
|
||||||
|
if [ -z "$JWT_SECRET" ]; then MISSING_SECRETS="${MISSING_SECRETS} JWT_SECRET"; fi
|
||||||
|
|
||||||
|
if [ ! -z "$MISSING_SECRETS" ]; then
|
||||||
|
echo "ERROR: The following required secrets are missing in Gitea:${MISSING_SECRETS}"
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
echo "Installing production dependencies and restarting test server..."
|
echo "Installing production dependencies and restarting test server..."
|
||||||
cd /var/www/flyer-crawler-test.projectium.com
|
cd /var/www/flyer-crawler-test.projectium.com
|
||||||
npm install --omit=dev # Install only production dependencies
|
npm install --omit=dev
|
||||||
|
|
||||||
|
# --- Cleanup Errored Processes ---
|
||||||
|
echo "Cleaning up errored or stopped PM2 processes..."
|
||||||
|
node -e "const exec = require('child_process').execSync; try { const list = JSON.parse(exec('pm2 jlist').toString()); list.forEach(p => { if (p.pm2_env.status === 'errored' || p.pm2_env.status === 'stopped') { console.log('Deleting ' + p.pm2_env.status + ' process: ' + p.name + ' (' + p.pm2_env.pm_id + ')'); try { exec('pm2 delete ' + p.pm2_env.pm_id); } catch(e) { console.error('Failed to delete ' + p.pm2_env.pm_id); } } }); } catch (e) { console.error('Error cleaning up processes:', e); }"
|
||||||
|
|
||||||
# Use `startOrReload` with the ecosystem file. This is the standard, idempotent way to deploy.
|
# Use `startOrReload` with the ecosystem file. This is the standard, idempotent way to deploy.
|
||||||
# It will START the process if it's not running, or RELOAD it if it is.
|
# It will START the process if it's not running, or RELOAD it if it is.
|
||||||
# We also add `&& pm2 save` to persist the process list across server reboots.
|
# We also add `&& pm2 save` to persist the process list across server reboots.
|
||||||
pm2 startOrReload ecosystem.config.cjs --env test && pm2 save
|
pm2 startOrReload ecosystem.config.cjs --env test --update-env && pm2 save
|
||||||
echo "Test backend server reloaded successfully."
|
echo "Test backend server reloaded successfully."
|
||||||
|
|
||||||
# After a successful deployment, update the schema hash in the database.
|
# After a successful deployment, update the schema hash in the database.
|
||||||
@@ -366,7 +434,12 @@ jobs:
|
|||||||
echo "Updating schema hash in test database..."
|
echo "Updating schema hash in test database..."
|
||||||
CURRENT_HASH=$(cat sql/master_schema_rollup.sql | dos2unix | sha256sum | awk '{ print $1 }')
|
CURRENT_HASH=$(cat sql/master_schema_rollup.sql | dos2unix | sha256sum | awk '{ print $1 }')
|
||||||
PGPASSWORD="$DB_PASSWORD" psql -v ON_ERROR_STOP=1 -h "$DB_HOST" -p 5432 -U "$DB_USER" -d "$DB_NAME" -c \
|
PGPASSWORD="$DB_PASSWORD" psql -v ON_ERROR_STOP=1 -h "$DB_HOST" -p 5432 -U "$DB_USER" -d "$DB_NAME" -c \
|
||||||
"INSERT INTO public.schema_info (environment, schema_hash, deployed_at) VALUES ('test', '$CURRENT_HASH', NOW())
|
"CREATE TABLE IF NOT EXISTS public.schema_info (
|
||||||
|
environment VARCHAR(50) PRIMARY KEY,
|
||||||
|
schema_hash VARCHAR(64) NOT NULL,
|
||||||
|
deployed_at TIMESTAMP DEFAULT NOW()
|
||||||
|
);
|
||||||
|
INSERT INTO public.schema_info (environment, schema_hash, deployed_at) VALUES ('test', '$CURRENT_HASH', NOW())
|
||||||
ON CONFLICT (environment) DO UPDATE SET schema_hash = EXCLUDED.schema_hash, deployed_at = NOW();"
|
ON CONFLICT (environment) DO UPDATE SET schema_hash = EXCLUDED.schema_hash, deployed_at = NOW();"
|
||||||
|
|
||||||
# Verify the hash was updated
|
# Verify the hash was updated
|
||||||
@@ -388,7 +461,17 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
echo "--- Displaying recent PM2 logs for flyer-crawler-api-test ---"
|
echo "--- Displaying recent PM2 logs for flyer-crawler-api-test ---"
|
||||||
# After a reload, the server restarts. We'll show the last 20 lines of the log to see the startup messages.
|
# After a reload, the server restarts. We'll show the last 20 lines of the log to see the startup messages.
|
||||||
sleep 5 # Wait a few seconds for the app to start and log its output.
|
sleep 5
|
||||||
pm2 describe flyer-crawler-api-test || echo "Could not find test pm2 process."
|
|
||||||
pm2 logs flyer-crawler-api-test --lines 20 --nostream || echo "Could not find test pm2 process."
|
# Resolve the PM2 ID dynamically to ensure we target the correct process
|
||||||
pm2 env flyer-crawler-api-test || echo "Could not find test pm2 process."
|
PM2_ID=$(pm2 jlist | node -e "try { const list = JSON.parse(require('fs').readFileSync(0, 'utf-8')); const app = list.find(p => p.name === 'flyer-crawler-api-test'); console.log(app ? app.pm2_env.pm_id : ''); } catch(e) { console.log(''); }")
|
||||||
|
|
||||||
|
if [ -n "$PM2_ID" ]; then
|
||||||
|
echo "Found process ID: $PM2_ID"
|
||||||
|
pm2 describe "$PM2_ID" || echo "Failed to describe process $PM2_ID"
|
||||||
|
pm2 logs "$PM2_ID" --lines 20 --nostream || echo "Failed to get logs for $PM2_ID"
|
||||||
|
pm2 env "$PM2_ID" || echo "Failed to get env for $PM2_ID"
|
||||||
|
else
|
||||||
|
echo "Could not find process 'flyer-crawler-api-test' in pm2 list."
|
||||||
|
pm2 list # Fallback to listing everything to help debug
|
||||||
|
fi
|
||||||
|
|||||||
@@ -25,7 +25,7 @@ jobs:
|
|||||||
DB_USER: ${{ secrets.DB_USER }}
|
DB_USER: ${{ secrets.DB_USER }}
|
||||||
DB_PASSWORD: ${{ secrets.DB_PASSWORD }}
|
DB_PASSWORD: ${{ secrets.DB_PASSWORD }}
|
||||||
DB_NAME: ${{ secrets.DB_DATABASE_PROD }}
|
DB_NAME: ${{ secrets.DB_DATABASE_PROD }}
|
||||||
BACKUP_DIR: "/var/www/backups" # Define a dedicated directory for backups
|
BACKUP_DIR: '/var/www/backups' # Define a dedicated directory for backups
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Validate Secrets and Inputs
|
- name: Validate Secrets and Inputs
|
||||||
|
|||||||
185
.gitea/workflows/manual-deploy-major.yml
Normal file
185
.gitea/workflows/manual-deploy-major.yml
Normal file
@@ -0,0 +1,185 @@
|
|||||||
|
# .gitea/workflows/manual-deploy-major.yml
|
||||||
|
#
|
||||||
|
# This workflow provides a MANUAL trigger to perform a MAJOR version bump
|
||||||
|
# and deploy the application to the PRODUCTION environment.
|
||||||
|
name: Manual - Deploy Major Version to Production
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
confirmation:
|
||||||
|
description: 'Type "deploy-major-to-prod" to confirm you want to deploy a new major version.'
|
||||||
|
required: true
|
||||||
|
default: 'do-not-run'
|
||||||
|
force_reload:
|
||||||
|
description: 'Force PM2 reload even if version matches (true/false).'
|
||||||
|
required: false
|
||||||
|
type: boolean
|
||||||
|
default: false
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
deploy-production-major:
|
||||||
|
runs-on: projectium.com
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Verify Confirmation Phrase
|
||||||
|
run: |
|
||||||
|
if [ "${{ gitea.event.inputs.confirmation }}" != "deploy-major-to-prod" ]; then
|
||||||
|
echo "ERROR: Confirmation phrase did not match. Aborting deployment."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
echo "✅ Confirmation accepted. Proceeding with major version production deployment."
|
||||||
|
|
||||||
|
- name: Checkout Code from 'main' branch
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
ref: 'main' # Explicitly check out the main branch for production deployment
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Setup Node.js
|
||||||
|
uses: actions/setup-node@v3
|
||||||
|
with:
|
||||||
|
node-version: '20'
|
||||||
|
cache: 'npm'
|
||||||
|
cache-dependency-path: '**/package-lock.json'
|
||||||
|
|
||||||
|
- name: Install Dependencies
|
||||||
|
run: npm ci
|
||||||
|
|
||||||
|
- name: Bump Major Version and Push
|
||||||
|
run: |
|
||||||
|
# Configure git for the commit.
|
||||||
|
git config --global user.name 'Gitea Actions'
|
||||||
|
git config --global user.email 'actions@gitea.projectium.com'
|
||||||
|
|
||||||
|
# Bump the major version number. This creates a new commit and a new tag.
|
||||||
|
# The commit message includes [skip ci] to prevent this push from triggering another workflow run.
|
||||||
|
npm version major -m "ci: Bump version to %s for major release [skip ci]"
|
||||||
|
|
||||||
|
# Push the new commit and the new tag back to the main branch.
|
||||||
|
git push --follow-tags
|
||||||
|
|
||||||
|
- name: Check for Production Database Schema Changes
|
||||||
|
env:
|
||||||
|
DB_HOST: ${{ secrets.DB_HOST }}
|
||||||
|
DB_USER: ${{ secrets.DB_USER }}
|
||||||
|
DB_PASSWORD: ${{ secrets.DB_PASSWORD }}
|
||||||
|
DB_NAME: ${{ secrets.DB_DATABASE_PROD }}
|
||||||
|
run: |
|
||||||
|
if [ -z "$DB_HOST" ] || [ -z "$DB_USER" ] || [ -z "$DB_PASSWORD" ] || [ -z "$DB_NAME" ]; then
|
||||||
|
echo "ERROR: One or more production database secrets (DB_HOST, DB_USER, DB_PASSWORD, DB_DATABASE_PROD) are not set."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
echo "--- Checking for production schema changes ---"
|
||||||
|
CURRENT_HASH=$(cat sql/master_schema_rollup.sql | dos2unix | sha256sum | awk '{ print $1 }')
|
||||||
|
echo "Current Git Schema Hash: $CURRENT_HASH"
|
||||||
|
# The psql command will now fail the step if the query errors (e.g., column missing), preventing deployment on a bad schema.
|
||||||
|
DEPLOYED_HASH=$(PGPASSWORD="$DB_PASSWORD" psql -v ON_ERROR_STOP=1 -h "$DB_HOST" -p 5432 -U "$DB_USER" -d "$DB_NAME" -c "SELECT schema_hash FROM public.schema_info WHERE environment = 'production';" -t -A)
|
||||||
|
echo "Deployed DB Schema Hash: $DEPLOYED_HASH"
|
||||||
|
if [ -z "$DEPLOYED_HASH" ]; then
|
||||||
|
echo "WARNING: No schema hash found in the production database. This is expected for a first-time deployment."
|
||||||
|
elif [ "$CURRENT_HASH" != "$DEPLOYED_HASH" ]; then
|
||||||
|
echo "ERROR: Database schema mismatch detected! A manual database migration is required."
|
||||||
|
exit 1
|
||||||
|
else
|
||||||
|
echo "✅ Schema is up to date. No changes detected."
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Build React Application for Production
|
||||||
|
run: |
|
||||||
|
if [ -z "${{ secrets.VITE_GOOGLE_GENAI_API_KEY }}" ]; then
|
||||||
|
echo "ERROR: The VITE_GOOGLE_GENAI_API_KEY secret is not set."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
GITEA_SERVER_URL="https://gitea.projectium.com"
|
||||||
|
COMMIT_MESSAGE=$(git log -1 --grep="\[skip ci\]" --invert-grep --pretty=%s)
|
||||||
|
PACKAGE_VERSION=$(node -p "require('./package.json').version")
|
||||||
|
VITE_APP_VERSION="$(date +'%Y%m%d-%H%M'):$(git rev-parse --short HEAD):$PACKAGE_VERSION" \
|
||||||
|
VITE_APP_COMMIT_URL="$GITEA_SERVER_URL/${{ gitea.repository }}/commit/${{ gitea.sha }}" \
|
||||||
|
VITE_APP_COMMIT_MESSAGE="$COMMIT_MESSAGE" \
|
||||||
|
VITE_API_BASE_URL=/api VITE_API_KEY=${{ secrets.VITE_GOOGLE_GENAI_API_KEY }} npm run build
|
||||||
|
|
||||||
|
- name: Deploy Application to Production Server
|
||||||
|
run: |
|
||||||
|
echo "Deploying application files to /var/www/flyer-crawler.projectium.com..."
|
||||||
|
APP_PATH="/var/www/flyer-crawler.projectium.com"
|
||||||
|
mkdir -p "$APP_PATH"
|
||||||
|
mkdir -p "$APP_PATH/flyer-images/icons" "$APP_PATH/flyer-images/archive"
|
||||||
|
rsync -avz --delete --exclude 'node_modules' --exclude '.git' --exclude 'dist' --exclude 'flyer-images' ./ "$APP_PATH/"
|
||||||
|
rsync -avz dist/ "$APP_PATH"
|
||||||
|
echo "Application deployment complete."
|
||||||
|
|
||||||
|
- name: Install Backend Dependencies and Restart Production Server
|
||||||
|
env:
|
||||||
|
# --- Production Secrets Injection ---
|
||||||
|
DB_HOST: ${{ secrets.DB_HOST }}
|
||||||
|
DB_USER: ${{ secrets.DB_USER }}
|
||||||
|
DB_PASSWORD: ${{ secrets.DB_PASSWORD }}
|
||||||
|
DB_NAME: ${{ secrets.DB_DATABASE_PROD }}
|
||||||
|
REDIS_URL: 'redis://localhost:6379'
|
||||||
|
REDIS_PASSWORD: ${{ secrets.REDIS_PASSWORD_PROD }}
|
||||||
|
FRONTEND_URL: 'https://flyer-crawler.projectium.com'
|
||||||
|
JWT_SECRET: ${{ secrets.JWT_SECRET }}
|
||||||
|
GEMINI_API_KEY: ${{ secrets.VITE_GOOGLE_GENAI_API_KEY }}
|
||||||
|
GOOGLE_MAPS_API_KEY: ${{ secrets.GOOGLE_MAPS_API_KEY }}
|
||||||
|
SMTP_HOST: 'localhost'
|
||||||
|
SMTP_PORT: '1025'
|
||||||
|
SMTP_SECURE: 'false'
|
||||||
|
SMTP_USER: ''
|
||||||
|
SMTP_PASS: ''
|
||||||
|
SMTP_FROM_EMAIL: 'noreply@flyer-crawler.projectium.com'
|
||||||
|
run: |
|
||||||
|
if [ -z "$DB_HOST" ] || [ -z "$DB_USER" ] || [ -z "$DB_PASSWORD" ] || [ -z "$DB_NAME" ]; then
|
||||||
|
echo "ERROR: One or more production database secrets (DB_HOST, DB_USER, DB_PASSWORD, DB_DATABASE_PROD) are not set."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
echo "Installing production dependencies and restarting server..."
|
||||||
|
cd /var/www/flyer-crawler.projectium.com
|
||||||
|
npm install --omit=dev
|
||||||
|
|
||||||
|
# --- Cleanup Errored Processes ---
|
||||||
|
echo "Cleaning up errored or stopped PM2 processes..."
|
||||||
|
node -e "const exec = require('child_process').execSync; try { const list = JSON.parse(exec('pm2 jlist').toString()); list.forEach(p => { if (p.pm2_env.status === 'errored' || p.pm2_env.status === 'stopped') { console.log('Deleting ' + p.pm2_env.status + ' process: ' + p.name + ' (' + p.pm2_env.pm_id + ')'); try { exec('pm2 delete ' + p.pm2_env.pm_id); } catch(e) { console.error('Failed to delete ' + p.pm2_env.pm_id); } } }); } catch (e) { console.error('Error cleaning up processes:', e); }"
|
||||||
|
|
||||||
|
# --- Version Check Logic ---
|
||||||
|
# Get the version from the newly deployed package.json
|
||||||
|
NEW_VERSION=$(node -p "require('./package.json').version")
|
||||||
|
echo "Deployed Package Version: $NEW_VERSION"
|
||||||
|
|
||||||
|
# Get the running version from PM2 for the main API process
|
||||||
|
# We use a small node script to parse the JSON output from pm2 jlist
|
||||||
|
RUNNING_VERSION=$(pm2 jlist | node -e "try { const list = JSON.parse(require('fs').readFileSync(0, 'utf-8')); const app = list.find(p => p.name === 'flyer-crawler-api'); console.log(app ? app.pm2_env.version : ''); } catch(e) { console.log(''); }")
|
||||||
|
echo "Running PM2 Version: $RUNNING_VERSION"
|
||||||
|
|
||||||
|
if [ "${{ gitea.event.inputs.force_reload }}" == "true" ] || [ "$NEW_VERSION" != "$RUNNING_VERSION" ] || [ -z "$RUNNING_VERSION" ]; then
|
||||||
|
if [ "${{ gitea.event.inputs.force_reload }}" == "true" ]; then
|
||||||
|
echo "Force reload triggered by manual input. Reloading PM2..."
|
||||||
|
else
|
||||||
|
echo "Version mismatch (Running: $RUNNING_VERSION -> Deployed: $NEW_VERSION) or app not running. Reloading PM2..."
|
||||||
|
fi
|
||||||
|
pm2 startOrReload ecosystem.config.cjs --env production --update-env && pm2 save
|
||||||
|
echo "Production backend server reloaded successfully."
|
||||||
|
else
|
||||||
|
echo "Version $NEW_VERSION is already running. Skipping PM2 reload."
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Updating schema hash in production database..."
|
||||||
|
CURRENT_HASH=$(cat sql/master_schema_rollup.sql | dos2unix | sha256sum | awk '{ print $1 }')
|
||||||
|
PGPASSWORD="$DB_PASSWORD" psql -v ON_ERROR_STOP=1 -h "$DB_HOST" -p 5432 -U "$DB_USER" -d "$DB_NAME" -c \
|
||||||
|
"INSERT INTO public.schema_info (environment, schema_hash, deployed_at) VALUES ('production', '$CURRENT_HASH', NOW())
|
||||||
|
ON CONFLICT (environment) DO UPDATE SET schema_hash = EXCLUDED.schema_hash, deployed_at = NOW();"
|
||||||
|
|
||||||
|
UPDATED_HASH=$(PGPASSWORD="$DB_PASSWORD" psql -v ON_ERROR_STOP=1 -h "$DB_HOST" -p 5432 -U "$DB_USER" -d "$DB_NAME" -c "SELECT schema_hash FROM public.schema_info WHERE environment = 'production';" -t -A)
|
||||||
|
if [ "$CURRENT_HASH" = "$UPDATED_HASH" ]; then
|
||||||
|
echo "✅ Schema hash successfully updated in the database to: $UPDATED_HASH"
|
||||||
|
else
|
||||||
|
echo "ERROR: Failed to update schema hash in the database."
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Show PM2 Environment for Production
|
||||||
|
run: |
|
||||||
|
echo "--- Displaying recent PM2 logs for flyer-crawler-api ---"
|
||||||
|
sleep 5
|
||||||
|
pm2 describe flyer-crawler-api || echo "Could not find production pm2 process."
|
||||||
|
pm2 logs flyer-crawler-api --lines 20 --nostream || echo "Could not find production pm2 process."
|
||||||
|
pm2 env flyer-crawler-api || echo "Could not find production pm2 process."
|
||||||
31
Dockerfile.dev
Normal file
31
Dockerfile.dev
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
# Use Ubuntu 22.04 (LTS) as the base image to match production
|
||||||
|
FROM ubuntu:22.04
|
||||||
|
|
||||||
|
# Set environment variables to non-interactive to avoid prompts during installation
|
||||||
|
ENV DEBIAN_FRONTEND=noninteractive
|
||||||
|
|
||||||
|
# Update package lists and install essential tools
|
||||||
|
# - curl: for downloading Node.js setup script
|
||||||
|
# - git: for version control operations
|
||||||
|
# - build-essential: for compiling native Node.js modules (node-gyp)
|
||||||
|
# - python3: required by some Node.js build tools
|
||||||
|
RUN apt-get update && apt-get install -y \
|
||||||
|
curl \
|
||||||
|
git \
|
||||||
|
build-essential \
|
||||||
|
python3 \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
# Install Node.js 20.x (LTS) from NodeSource
|
||||||
|
RUN curl -fsSL https://deb.nodesource.com/setup_20.x | bash - \
|
||||||
|
&& apt-get install -y nodejs
|
||||||
|
|
||||||
|
# Set the working directory inside the container
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
# Set default environment variables for development
|
||||||
|
ENV NODE_ENV=development
|
||||||
|
ENV NODE_OPTIONS='--max-old-space-size=8192'
|
||||||
|
|
||||||
|
# Default command keeps the container running so you can attach to it
|
||||||
|
CMD ["bash"]
|
||||||
80
README.md
80
README.md
@@ -95,7 +95,7 @@ actually the proper change was to do this in the /etc/nginx/sites-available/flye
|
|||||||
## for OAuth
|
## for OAuth
|
||||||
|
|
||||||
1. Get Google OAuth Credentials
|
1. Get Google OAuth Credentials
|
||||||
This is a crucial step that you must do outside the codebase:
|
This is a crucial step that you must do outside the codebase:
|
||||||
|
|
||||||
Go to the Google Cloud Console.
|
Go to the Google Cloud Console.
|
||||||
|
|
||||||
@@ -112,7 +112,7 @@ Under Authorized redirect URIs, click ADD URI and enter the URL where Google wil
|
|||||||
Click Create. You will be given a Client ID and a Client Secret.
|
Click Create. You will be given a Client ID and a Client Secret.
|
||||||
|
|
||||||
2. Get GitHub OAuth Credentials
|
2. Get GitHub OAuth Credentials
|
||||||
You'll need to obtain a Client ID and Client Secret from GitHub:
|
You'll need to obtain a Client ID and Client Secret from GitHub:
|
||||||
|
|
||||||
Go to your GitHub profile settings.
|
Go to your GitHub profile settings.
|
||||||
|
|
||||||
@@ -133,21 +133,23 @@ You will be given a Client ID and a Client Secret.
|
|||||||
|
|
||||||
psql -h localhost -U flyer_crawler_user -d "flyer-crawler-prod" -W
|
psql -h localhost -U flyer_crawler_user -d "flyer-crawler-prod" -W
|
||||||
|
|
||||||
|
|
||||||
## postgis
|
## postgis
|
||||||
|
|
||||||
flyer-crawler-prod=> SELECT version();
|
flyer-crawler-prod=> SELECT version();
|
||||||
version
|
version
|
||||||
------------------------------------------------------------------------------------------------------------------------------------------
|
|
||||||
PostgreSQL 14.19 (Ubuntu 14.19-0ubuntu0.22.04.1) on x86_64-pc-linux-gnu, compiled by gcc (Ubuntu 11.4.0-1ubuntu1~22.04.2) 11.4.0, 64-bit
|
---
|
||||||
|
|
||||||
|
PostgreSQL 14.19 (Ubuntu 14.19-0ubuntu0.22.04.1) on x86_64-pc-linux-gnu, compiled by gcc (Ubuntu 11.4.0-1ubuntu1~22.04.2) 11.4.0, 64-bit
|
||||||
(1 row)
|
(1 row)
|
||||||
|
|
||||||
flyer-crawler-prod=> SELECT PostGIS_Full_Version();
|
flyer-crawler-prod=> SELECT PostGIS_Full_Version();
|
||||||
postgis_full_version
|
postgis_full_version
|
||||||
--------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
|
||||||
POSTGIS="3.2.0 c3e3cc0" [EXTENSION] PGSQL="140" GEOS="3.10.2-CAPI-1.16.0" PROJ="8.2.1" LIBXML="2.9.12" LIBJSON="0.15" LIBPROTOBUF="1.3.3" WAGYU="0.5.0 (Internal)"
|
|
||||||
(1 row)
|
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
POSTGIS="3.2.0 c3e3cc0" [EXTENSION] PGSQL="140" GEOS="3.10.2-CAPI-1.16.0" PROJ="8.2.1" LIBXML="2.9.12" LIBJSON="0.15" LIBPROTOBUF="1.3.3" WAGYU="0.5.0 (Internal)"
|
||||||
|
(1 row)
|
||||||
|
|
||||||
## production postgres setup
|
## production postgres setup
|
||||||
|
|
||||||
@@ -201,9 +203,13 @@ Step 4: Seed the Admin Account (If Needed)
|
|||||||
Your application has a separate script to create the initial admin user. To run it, you must first set the required environment variables in your shell session.
|
Your application has a separate script to create the initial admin user. To run it, you must first set the required environment variables in your shell session.
|
||||||
|
|
||||||
bash
|
bash
|
||||||
|
|
||||||
# Set variables for the current session
|
# Set variables for the current session
|
||||||
|
|
||||||
export DB_USER=flyer_crawler_user DB_PASSWORD=your_password DB_NAME="flyer-crawler-prod" ...
|
export DB_USER=flyer_crawler_user DB_PASSWORD=your_password DB_NAME="flyer-crawler-prod" ...
|
||||||
|
|
||||||
# Run the seeding script
|
# Run the seeding script
|
||||||
|
|
||||||
npx tsx src/db/seed_admin_account.ts
|
npx tsx src/db/seed_admin_account.ts
|
||||||
Your production database is now ready!
|
Your production database is now ready!
|
||||||
|
|
||||||
@@ -284,8 +290,6 @@ Test Execution: Your tests run against this clean, isolated schema.
|
|||||||
|
|
||||||
This approach is faster, more reliable, and removes the need for sudo access within the CI pipeline.
|
This approach is faster, more reliable, and removes the need for sudo access within the CI pipeline.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
gitea-runner@projectium:~$ pm2 install pm2-logrotate
|
gitea-runner@projectium:~$ pm2 install pm2-logrotate
|
||||||
[PM2][Module] Installing NPM pm2-logrotate module
|
[PM2][Module] Installing NPM pm2-logrotate module
|
||||||
[PM2][Module] Calling [NPM] to install pm2-logrotate ...
|
[PM2][Module] Calling [NPM] to install pm2-logrotate ...
|
||||||
@@ -293,7 +297,7 @@ gitea-runner@projectium:~$ pm2 install pm2-logrotate
|
|||||||
added 161 packages in 5s
|
added 161 packages in 5s
|
||||||
|
|
||||||
21 packages are looking for funding
|
21 packages are looking for funding
|
||||||
run `npm fund` for details
|
run `npm fund` for details
|
||||||
npm notice
|
npm notice
|
||||||
npm notice New patch version of npm available! 11.6.3 -> 11.6.4
|
npm notice New patch version of npm available! 11.6.3 -> 11.6.4
|
||||||
npm notice Changelog: https://github.com/npm/cli/releases/tag/v11.6.4
|
npm notice Changelog: https://github.com/npm/cli/releases/tag/v11.6.4
|
||||||
@@ -308,7 +312,7 @@ $ pm2 set pm2-logrotate:retain 30
|
|||||||
$ pm2 set pm2-logrotate:compress false
|
$ pm2 set pm2-logrotate:compress false
|
||||||
$ pm2 set pm2-logrotate:dateFormat YYYY-MM-DD_HH-mm-ss
|
$ pm2 set pm2-logrotate:dateFormat YYYY-MM-DD_HH-mm-ss
|
||||||
$ pm2 set pm2-logrotate:workerInterval 30
|
$ pm2 set pm2-logrotate:workerInterval 30
|
||||||
$ pm2 set pm2-logrotate:rotateInterval 0 0 * * *
|
$ pm2 set pm2-logrotate:rotateInterval 0 0 \* \* _
|
||||||
$ pm2 set pm2-logrotate:rotateModule true
|
$ pm2 set pm2-logrotate:rotateModule true
|
||||||
Modules configuration. Copy/Paste line to edit values.
|
Modules configuration. Copy/Paste line to edit values.
|
||||||
[PM2][Module] Module successfully installed and launched
|
[PM2][Module] Module successfully installed and launched
|
||||||
@@ -335,7 +339,7 @@ $ pm2 set pm2-logrotate:retain 30
|
|||||||
$ pm2 set pm2-logrotate:compress false
|
$ pm2 set pm2-logrotate:compress false
|
||||||
$ pm2 set pm2-logrotate:dateFormat YYYY-MM-DD_HH-mm-ss
|
$ pm2 set pm2-logrotate:dateFormat YYYY-MM-DD_HH-mm-ss
|
||||||
$ pm2 set pm2-logrotate:workerInterval 30
|
$ pm2 set pm2-logrotate:workerInterval 30
|
||||||
$ pm2 set pm2-logrotate:rotateInterval 0 0 * * *
|
$ pm2 set pm2-logrotate:rotateInterval 0 0 _ \* _
|
||||||
$ pm2 set pm2-logrotate:rotateModule true
|
$ pm2 set pm2-logrotate:rotateModule true
|
||||||
gitea-runner@projectium:~$ pm2 set pm2-logrotate:retain 14
|
gitea-runner@projectium:~$ pm2 set pm2-logrotate:retain 14
|
||||||
[PM2] Module pm2-logrotate restarted
|
[PM2] Module pm2-logrotate restarted
|
||||||
@@ -346,31 +350,29 @@ $ pm2 set pm2-logrotate:retain 14
|
|||||||
$ pm2 set pm2-logrotate:compress false
|
$ pm2 set pm2-logrotate:compress false
|
||||||
$ pm2 set pm2-logrotate:dateFormat YYYY-MM-DD_HH-mm-ss
|
$ pm2 set pm2-logrotate:dateFormat YYYY-MM-DD_HH-mm-ss
|
||||||
$ pm2 set pm2-logrotate:workerInterval 30
|
$ pm2 set pm2-logrotate:workerInterval 30
|
||||||
$ pm2 set pm2-logrotate:rotateInterval 0 0 * * *
|
$ pm2 set pm2-logrotate:rotateInterval 0 0 _ \* \*
|
||||||
$ pm2 set pm2-logrotate:rotateModule true
|
$ pm2 set pm2-logrotate:rotateModule true
|
||||||
gitea-runner@projectium:~$
|
gitea-runner@projectium:~$
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
## dev server setup:
|
## dev server setup:
|
||||||
|
|
||||||
Here are the steps to set up the development environment on Windows using Podman with an Ubuntu container:
|
Here are the steps to set up the development environment on Windows using Podman with an Ubuntu container:
|
||||||
|
|
||||||
1. Install Prerequisites on Windows
|
1. Install Prerequisites on Windows
|
||||||
Install WSL 2: Podman on Windows relies on the Windows Subsystem for Linux. Install it by running wsl --install in an administrator PowerShell.
|
Install WSL 2: Podman on Windows relies on the Windows Subsystem for Linux. Install it by running wsl --install in an administrator PowerShell.
|
||||||
Install Podman Desktop: Download and install Podman Desktop for Windows.
|
Install Podman Desktop: Download and install Podman Desktop for Windows.
|
||||||
|
|
||||||
2. Set Up Podman
|
2. Set Up Podman
|
||||||
Initialize Podman: Launch Podman Desktop. It will automatically set up its WSL 2 machine.
|
Initialize Podman: Launch Podman Desktop. It will automatically set up its WSL 2 machine.
|
||||||
Start Podman: Ensure the Podman machine is running from the Podman Desktop interface.
|
Start Podman: Ensure the Podman machine is running from the Podman Desktop interface.
|
||||||
|
|
||||||
3. Set Up the Ubuntu Container
|
3. Set Up the Ubuntu Container
|
||||||
- Pull Ubuntu Image: Open a PowerShell or command prompt and pull the latest Ubuntu image:
|
|
||||||
|
- Pull Ubuntu Image: Open a PowerShell or command prompt and pull the latest Ubuntu image:
|
||||||
podman pull ubuntu:latest
|
podman pull ubuntu:latest
|
||||||
- Create a Podman Volume: Create a volume to persist node_modules and avoid installing them every time the container starts.
|
- Create a Podman Volume: Create a volume to persist node_modules and avoid installing them every time the container starts.
|
||||||
podman volume create node_modules_cache
|
podman volume create node_modules_cache
|
||||||
- Run the Ubuntu Container: Start a new container with the project directory mounted and the necessary ports forwarded.
|
- Run the Ubuntu Container: Start a new container with the project directory mounted and the necessary ports forwarded.
|
||||||
- Open a terminal in your project's root directory on Windows.
|
- Open a terminal in your project's root directory on Windows.
|
||||||
- Run the following command, replacing D:\gitea\flyer-crawler.projectium.com\flyer-crawler.projectium.com with the full path to your project:
|
- Run the following command, replacing D:\gitea\flyer-crawler.projectium.com\flyer-crawler.projectium.com with the full path to your project:
|
||||||
|
|
||||||
@@ -385,16 +387,16 @@ podman run -it -p 3001:3001 -p 5173:5173 --name flyer-dev -v "D:\gitea\flyer-cra
|
|||||||
4. Configure the Ubuntu Environment
|
4. Configure the Ubuntu Environment
|
||||||
You are now inside the Ubuntu container's shell.
|
You are now inside the Ubuntu container's shell.
|
||||||
|
|
||||||
- Update Package Lists:
|
- Update Package Lists:
|
||||||
apt-get update
|
apt-get update
|
||||||
- Install Dependencies: Install curl, git, and nodejs (which includes npm).
|
- Install Dependencies: Install curl, git, and nodejs (which includes npm).
|
||||||
apt-get install -y curl git
|
apt-get install -y curl git
|
||||||
curl -sL https://deb.nodesource.com/setup_20.x | bash -
|
curl -sL https://deb.nodesource.com/setup_20.x | bash -
|
||||||
apt-get install -y nodejs
|
apt-get install -y nodejs
|
||||||
- Navigate to Project Directory:
|
- Navigate to Project Directory:
|
||||||
cd /app
|
cd /app
|
||||||
|
|
||||||
- Install Project Dependencies:
|
- Install Project Dependencies:
|
||||||
npm install
|
npm install
|
||||||
|
|
||||||
5. Run the Development Server
|
5. Run the Development Server
|
||||||
@@ -402,27 +404,21 @@ podman run -it -p 3001:3001 -p 5173:5173 --name flyer-dev -v "D:\gitea\flyer-cra
|
|||||||
npm run dev
|
npm run dev
|
||||||
|
|
||||||
6. Accessing the Application
|
6. Accessing the Application
|
||||||
- Frontend: Open your browser and go to http://localhost:5173.
|
|
||||||
- Backend: The frontend will make API calls to http://localhost:3001.
|
- Frontend: Open your browser and go to http://localhost:5173.
|
||||||
|
- Backend: The frontend will make API calls to http://localhost:3001.
|
||||||
|
|
||||||
Managing the Environment
|
Managing the Environment
|
||||||
- Stopping the Container: Press Ctrl+C in the container terminal, then type exit.
|
|
||||||
- Restarting the Container:
|
- Stopping the Container: Press Ctrl+C in the container terminal, then type exit.
|
||||||
|
- Restarting the Container:
|
||||||
podman start -a -i flyer-dev
|
podman start -a -i flyer-dev
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
## for me:
|
## for me:
|
||||||
|
|
||||||
cd /mnt/d/gitea/flyer-crawler.projectium.com/flyer-crawler.projectium.com
|
cd /mnt/d/gitea/flyer-crawler.projectium.com/flyer-crawler.projectium.com
|
||||||
podman run -it -p 3001:3001 -p 5173:5173 --name flyer-dev -v "$(pwd):/app" -v "node_modules_cache:/app/node_modules" ubuntu:latest
|
podman run -it -p 3001:3001 -p 5173:5173 --name flyer-dev -v "$(pwd):/app" -v "node_modules_cache:/app/node_modules" ubuntu:latest
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
rate limiting
|
rate limiting
|
||||||
|
|
||||||
respect the AI service's rate limits, making it more stable and robust. You can adjust the GEMINI_RPM environment variable in your production environment as needed without changing the code.
|
respect the AI service's rate limits, making it more stable and robust. You can adjust the GEMINI_RPM environment variable in your production environment as needed without changing the code.
|
||||||
52
compose.dev.yml
Normal file
52
compose.dev.yml
Normal file
@@ -0,0 +1,52 @@
|
|||||||
|
version: '3.8'
|
||||||
|
|
||||||
|
services:
|
||||||
|
app:
|
||||||
|
container_name: flyer-crawler-dev
|
||||||
|
build:
|
||||||
|
context: .
|
||||||
|
dockerfile: Dockerfile.dev
|
||||||
|
volumes:
|
||||||
|
# Mount the current directory to /app in the container
|
||||||
|
- .:/app
|
||||||
|
# Create a volume for node_modules to avoid conflicts with Windows host
|
||||||
|
# and improve performance.
|
||||||
|
- node_modules_data:/app/node_modules
|
||||||
|
ports:
|
||||||
|
- '3000:3000' # Frontend (Vite default)
|
||||||
|
- '3001:3001' # Backend API
|
||||||
|
environment:
|
||||||
|
- NODE_ENV=development
|
||||||
|
- DB_HOST=postgres
|
||||||
|
- DB_USER=postgres
|
||||||
|
- DB_PASSWORD=postgres
|
||||||
|
- DB_NAME=flyer_crawler_dev
|
||||||
|
- REDIS_URL=redis://redis:6379
|
||||||
|
# Add other secrets here or use a .env file
|
||||||
|
depends_on:
|
||||||
|
- postgres
|
||||||
|
- redis
|
||||||
|
# Keep container running so VS Code can attach
|
||||||
|
command: tail -f /dev/null
|
||||||
|
|
||||||
|
postgres:
|
||||||
|
image: docker.io/library/postgis/postgis:15-3.4
|
||||||
|
container_name: flyer-crawler-postgres
|
||||||
|
ports:
|
||||||
|
- '5432:5432'
|
||||||
|
environment:
|
||||||
|
POSTGRES_USER: postgres
|
||||||
|
POSTGRES_PASSWORD: postgres
|
||||||
|
POSTGRES_DB: flyer_crawler_dev
|
||||||
|
volumes:
|
||||||
|
- postgres_data:/var/lib/postgresql/data
|
||||||
|
|
||||||
|
redis:
|
||||||
|
image: docker.io/library/redis:alpine
|
||||||
|
container_name: flyer-crawler-redis
|
||||||
|
ports:
|
||||||
|
- '6379:6379'
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
postgres_data:
|
||||||
|
node_modules_data:
|
||||||
@@ -34,7 +34,7 @@ We will adopt a strict, consistent error-handling contract for the service and r
|
|||||||
**Robustness**: Eliminates an entire class of bugs where `undefined` is passed to `res.json()`, preventing incorrect `500` errors.
|
**Robustness**: Eliminates an entire class of bugs where `undefined` is passed to `res.json()`, preventing incorrect `500` errors.
|
||||||
**Consistency & Predictability**: All data-fetching methods now have a predictable contract. They either return the expected data or throw a specific, typed error.
|
**Consistency & Predictability**: All data-fetching methods now have a predictable contract. They either return the expected data or throw a specific, typed error.
|
||||||
**Developer Experience**: Route handlers become simpler, cleaner, and easier to write correctly. The cognitive load on developers is reduced as they no longer need to remember to check for `undefined`.
|
**Developer Experience**: Route handlers become simpler, cleaner, and easier to write correctly. The cognitive load on developers is reduced as they no longer need to remember to check for `undefined`.
|
||||||
**Improved Testability**: Tests become more reliable and realistic. Mocks can now throw the *exact* error type (`new NotFoundError()`) that the real implementation would, ensuring tests accurately reflect the application's behavior.
|
**Improved Testability**: Tests become more reliable and realistic. Mocks can now throw the _exact_ error type (`new NotFoundError()`) that the real implementation would, ensuring tests accurately reflect the application's behavior.
|
||||||
**Centralized Control**: Error-to-HTTP-status logic is centralized in the `errorHandler` middleware, making it easy to manage and modify error responses globally.
|
**Centralized Control**: Error-to-HTTP-status logic is centralized in the `errorHandler` middleware, making it easy to manage and modify error responses globally.
|
||||||
|
|
||||||
### Negative
|
### Negative
|
||||||
|
|||||||
@@ -10,21 +10,19 @@ Following the standardization of error handling in ADR-001, the next most common
|
|||||||
|
|
||||||
This manual approach has several drawbacks:
|
This manual approach has several drawbacks:
|
||||||
**Repetitive Boilerplate**: The `try/catch/finally` block for transaction management is duplicated across multiple files.
|
**Repetitive Boilerplate**: The `try/catch/finally` block for transaction management is duplicated across multiple files.
|
||||||
**Error-Prone**: It is easy to forget to `client.release()` in all code paths, which can lead to connection pool exhaustion and bring down the application.
|
**Error-Prone**: It is easy to forget to `client.release()` in all code paths, which can lead to connection pool exhaustion and bring down the application. 3. **Poor Composability**: It is difficult to compose multiple repository methods into a single, atomic "Unit of Work". For example, a service function that needs to update a user's points and create a budget in a single transaction cannot easily do so if both underlying repository methods create their own transactions.
|
||||||
3. **Poor Composability**: It is difficult to compose multiple repository methods into a single, atomic "Unit of Work". For example, a service function that needs to update a user's points and create a budget in a single transaction cannot easily do so if both underlying repository methods create their own transactions.
|
|
||||||
|
|
||||||
## Decision
|
## Decision
|
||||||
|
|
||||||
We will implement a standardized "Unit of Work" pattern through a high-level `withTransaction` helper function. This function will abstract away the complexity of transaction management.
|
We will implement a standardized "Unit of Work" pattern through a high-level `withTransaction` helper function. This function will abstract away the complexity of transaction management.
|
||||||
|
|
||||||
1. **`withTransaction` Helper**: A new helper function, `withTransaction<T>(callback: (client: PoolClient) => Promise<T>): Promise<T>`, will be created. This function will be responsible for:
|
1. **`withTransaction` Helper**: A new helper function, `withTransaction<T>(callback: (client: PoolClient) => Promise<T>): Promise<T>`, will be created. This function will be responsible for:
|
||||||
|
- Acquiring a client from the database pool.
|
||||||
* Acquiring a client from the database pool.
|
- Starting a transaction (`BEGIN`).
|
||||||
* Starting a transaction (`BEGIN`).
|
- Executing the `callback` function, passing the transactional client to it.
|
||||||
* Executing the `callback` function, passing the transactional client to it.
|
- If the callback succeeds, it will `COMMIT` the transaction.
|
||||||
* If the callback succeeds, it will `COMMIT` the transaction.
|
- If the callback throws an error, it will `ROLLBACK` the transaction and re-throw the error.
|
||||||
* If the callback throws an error, it will `ROLLBACK` the transaction and re-throw the error.
|
- In all cases, it will `RELEASE` the client back to the pool.
|
||||||
* In all cases, it will `RELEASE` the client back to the pool.
|
|
||||||
|
|
||||||
2. **Repository Method Signature**: Repository methods that need to be part of a transaction will be updated to optionally accept a `PoolClient` in their constructor or as a method parameter. By default, they will use the global pool. When called from within a `withTransaction` block, they will be passed the transactional client.
|
2. **Repository Method Signature**: Repository methods that need to be part of a transaction will be updated to optionally accept a `PoolClient` in their constructor or as a method parameter. By default, they will use the global pool. When called from within a `withTransaction` block, they will be passed the transactional client.
|
||||||
3. **Service Layer Orchestration**: Service-layer functions that orchestrate multi-step operations will use `withTransaction` to ensure atomicity. They will instantiate or call repository methods, providing them with the transactional client from the callback.
|
3. **Service Layer Orchestration**: Service-layer functions that orchestrate multi-step operations will use `withTransaction` to ensure atomicity. They will instantiate or call repository methods, providing them with the transactional client from the callback.
|
||||||
@@ -40,7 +38,7 @@ async function registerUserAndCreateDefaultList(userData) {
|
|||||||
const shoppingRepo = new ShoppingRepository(client);
|
const shoppingRepo = new ShoppingRepository(client);
|
||||||
|
|
||||||
const newUser = await userRepo.createUser(userData);
|
const newUser = await userRepo.createUser(userData);
|
||||||
await shoppingRepo.createShoppingList(newUser.user_id, "My First List");
|
await shoppingRepo.createShoppingList(newUser.user_id, 'My First List');
|
||||||
|
|
||||||
return newUser;
|
return newUser;
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -20,8 +20,8 @@ We will adopt a schema-based approach for input validation using the `zod` libra
|
|||||||
1. **Adopt `zod` for Schema Definition**: We will use `zod` to define clear, type-safe schemas for the `params`, `query`, and `body` of each API request. `zod` provides powerful and declarative validation rules and automatically infers TypeScript types.
|
1. **Adopt `zod` for Schema Definition**: We will use `zod` to define clear, type-safe schemas for the `params`, `query`, and `body` of each API request. `zod` provides powerful and declarative validation rules and automatically infers TypeScript types.
|
||||||
|
|
||||||
2. **Create a Reusable Validation Middleware**: A generic `validateRequest(schema)` middleware will be created. This middleware will take a `zod` schema, parse the incoming request against it, and handle success and error cases.
|
2. **Create a Reusable Validation Middleware**: A generic `validateRequest(schema)` middleware will be created. This middleware will take a `zod` schema, parse the incoming request against it, and handle success and error cases.
|
||||||
* On successful validation, the parsed and typed data will be attached to the `req` object (e.g., `req.body` will be replaced with the parsed body), and `next()` will be called.
|
- On successful validation, the parsed and typed data will be attached to the `req` object (e.g., `req.body` will be replaced with the parsed body), and `next()` will be called.
|
||||||
* On validation failure, the middleware will call `next()` with a custom `ValidationError` containing a structured list of issues, which `ADR-001`'s `errorHandler` can then format into a user-friendly `400 Bad Request` response.
|
- On validation failure, the middleware will call `next()` with a custom `ValidationError` containing a structured list of issues, which `ADR-001`'s `errorHandler` can then format into a user-friendly `400 Bad Request` response.
|
||||||
|
|
||||||
3. **Refactor Routes**: All route handlers will be refactored to use this new middleware, removing all manual validation logic.
|
3. **Refactor Routes**: All route handlers will be refactored to use this new middleware, removing all manual validation logic.
|
||||||
|
|
||||||
@@ -46,7 +46,7 @@ const getFlyerSchema = z.object({
|
|||||||
type GetFlyerRequest = z.infer<typeof getFlyerSchema>;
|
type GetFlyerRequest = z.infer<typeof getFlyerSchema>;
|
||||||
|
|
||||||
// 3. Apply the middleware and use an inline cast for the request
|
// 3. Apply the middleware and use an inline cast for the request
|
||||||
router.get('/:id', validateRequest(getFlyerSchema), (async (req, res, next) => {
|
router.get('/:id', validateRequest(getFlyerSchema), async (req, res, next) => {
|
||||||
// Cast 'req' to the inferred type.
|
// Cast 'req' to the inferred type.
|
||||||
// This provides full type safety for params, query, and body.
|
// This provides full type safety for params, query, and body.
|
||||||
const { params } = req as unknown as GetFlyerRequest;
|
const { params } = req as unknown as GetFlyerRequest;
|
||||||
@@ -57,7 +57,7 @@ router.get('/:id', validateRequest(getFlyerSchema), (async (req, res, next) => {
|
|||||||
} catch (error) {
|
} catch (error) {
|
||||||
next(error);
|
next(error);
|
||||||
}
|
}
|
||||||
}));
|
});
|
||||||
```
|
```
|
||||||
|
|
||||||
## Consequences
|
## Consequences
|
||||||
|
|||||||
@@ -20,9 +20,9 @@ We will adopt a standardized, application-wide structured logging policy. All lo
|
|||||||
|
|
||||||
**Request-Scoped Logger with Context**: We will create a middleware that runs at the beginning of the request lifecycle. This middleware will:
|
**Request-Scoped Logger with Context**: We will create a middleware that runs at the beginning of the request lifecycle. This middleware will:
|
||||||
|
|
||||||
* Generate a unique `request_id` for each incoming request.
|
- Generate a unique `request_id` for each incoming request.
|
||||||
* Create a request-scoped logger instance (a "child logger") that automatically includes the `request_id`, `user_id` (if authenticated), and `ip_address` in every log message it generates.
|
- Create a request-scoped logger instance (a "child logger") that automatically includes the `request_id`, `user_id` (if authenticated), and `ip_address` in every log message it generates.
|
||||||
* Attach this child logger to the `req` object (e.g., `req.log`).
|
- Attach this child logger to the `req` object (e.g., `req.log`).
|
||||||
|
|
||||||
**Mandatory Use of Request-Scoped Logger**: All route handlers and any service functions called by them **MUST** use the request-scoped logger (`req.log`) instead of the global logger instance. This ensures all logs for a given request are automatically correlated.
|
**Mandatory Use of Request-Scoped Logger**: All route handlers and any service functions called by them **MUST** use the request-scoped logger (`req.log`) instead of the global logger instance. This ensures all logs for a given request are automatically correlated.
|
||||||
|
|
||||||
@@ -32,9 +32,9 @@ We will adopt a standardized, application-wide structured logging policy. All lo
|
|||||||
|
|
||||||
**Standardized Logging Practices**:
|
**Standardized Logging Practices**:
|
||||||
**INFO**: Log key business events, such as `User logged in` or `Flyer processed`.
|
**INFO**: Log key business events, such as `User logged in` or `Flyer processed`.
|
||||||
**WARN**: Log recoverable errors or unusual situations that do not break the request, such as `Client Error: 404 on GET /api/non-existent-route` or `Retrying failed database connection`.
|
**WARN**: Log recoverable errors or unusual situations that do not break the request, such as `Client Error: 404 on GET /api/non-existent-route` or `Retrying failed database connection`.
|
||||||
**ERROR**: Log only unhandled or server-side errors that cause a request to fail (typically handled by the `errorHandler`). Avoid logging expected client errors (like 4xx) at this level.
|
**ERROR**: Log only unhandled or server-side errors that cause a request to fail (typically handled by the `errorHandler`). Avoid logging expected client errors (like 4xx) at this level.
|
||||||
**DEBUG**: Log detailed diagnostic information useful during development, such as function entry/exit points or variable states.
|
**DEBUG**: Log detailed diagnostic information useful during development, such as function entry/exit points or variable states.
|
||||||
|
|
||||||
### Example Usage
|
### Example Usage
|
||||||
|
|
||||||
|
|||||||
@@ -14,5 +14,5 @@ We will formalize a centralized Role-Based Access Control (RBAC) or Attribute-Ba
|
|||||||
|
|
||||||
## Consequences
|
## Consequences
|
||||||
|
|
||||||
* **Positive**: Ensures authorization logic is consistent, easy to audit, and decoupled from business logic. Improves security by centralizing access control.
|
- **Positive**: Ensures authorization logic is consistent, easy to audit, and decoupled from business logic. Improves security by centralizing access control.
|
||||||
* **Negative**: Requires a significant refactoring effort to integrate the new authorization system across all protected routes and features. Introduces a new dependency if an external library is chosen.
|
- **Negative**: Requires a significant refactoring effort to integrate the new authorization system across all protected routes and features. Introduces a new dependency if an external library is chosen.
|
||||||
|
|||||||
@@ -14,5 +14,5 @@ We will establish a formal Design System and Component Library. This will involv
|
|||||||
|
|
||||||
## Consequences
|
## Consequences
|
||||||
|
|
||||||
* **Positive**: Ensures a consistent and high-quality user interface. Accelerates frontend development by providing reusable, well-documented components. Improves maintainability and reduces technical debt.
|
- **Positive**: Ensures a consistent and high-quality user interface. Accelerates frontend development by providing reusable, well-documented components. Improves maintainability and reduces technical debt.
|
||||||
* **Negative**: Requires an initial investment in setting up Storybook and migrating existing components. Adds a new dependency and a new workflow for frontend development.
|
- **Negative**: Requires an initial investment in setting up Storybook and migrating existing components. Adds a new dependency and a new workflow for frontend development.
|
||||||
|
|||||||
@@ -14,5 +14,5 @@ We will adopt a dedicated database migration tool, such as **`node-pg-migrate`**
|
|||||||
|
|
||||||
## Consequences
|
## Consequences
|
||||||
|
|
||||||
* **Positive**: Provides a safe, repeatable, and reversible way to evolve the database schema. Improves team collaboration on database changes. Reduces the risk of data loss or downtime during deployments.
|
- **Positive**: Provides a safe, repeatable, and reversible way to evolve the database schema. Improves team collaboration on database changes. Reduces the risk of data loss or downtime during deployments.
|
||||||
* **Negative**: Requires an initial setup and learning curve for the chosen migration tool. All future schema changes must adhere to the migration workflow.
|
- **Negative**: Requires an initial setup and learning curve for the chosen migration tool. All future schema changes must adhere to the migration workflow.
|
||||||
|
|||||||
@@ -14,5 +14,5 @@ We will standardize the deployment process by containerizing the application usi
|
|||||||
|
|
||||||
## Consequences
|
## Consequences
|
||||||
|
|
||||||
* **Positive**: Ensures consistency between development and production environments. Simplifies the setup for new developers. Improves portability and scalability of the application.
|
- **Positive**: Ensures consistency between development and production environments. Simplifies the setup for new developers. Improves portability and scalability of the application.
|
||||||
* **Negative**: Requires learning Docker and containerization concepts. Adds `Dockerfile` and `docker-compose.yml` to the project's configuration.
|
- **Negative**: Requires learning Docker and containerization concepts. Adds `Dockerfile` and `docker-compose.yml` to the project's configuration.
|
||||||
|
|||||||
@@ -18,5 +18,5 @@ We will implement a multi-layered security approach for the API:
|
|||||||
|
|
||||||
## Consequences
|
## Consequences
|
||||||
|
|
||||||
* **Positive**: Significantly improves the application's security posture against common web vulnerabilities like XSS, clickjacking, and brute-force attacks.
|
- **Positive**: Significantly improves the application's security posture against common web vulnerabilities like XSS, clickjacking, and brute-force attacks.
|
||||||
* **Negative**: Requires careful configuration of CORS and rate limits to avoid blocking legitimate traffic. Content-Security-Policy can be complex to configure correctly.
|
- **Negative**: Requires careful configuration of CORS and rate limits to avoid blocking legitimate traffic. Content-Security-Policy can be complex to configure correctly.
|
||||||
|
|||||||
@@ -14,5 +14,5 @@ We will formalize the end-to-end CI/CD process. This ADR will define the project
|
|||||||
|
|
||||||
## Consequences
|
## Consequences
|
||||||
|
|
||||||
* **Positive**: Automates quality control and creates a safe, repeatable path to production. Increases development velocity and reduces deployment-related errors.
|
- **Positive**: Automates quality control and creates a safe, repeatable path to production. Increases development velocity and reduces deployment-related errors.
|
||||||
* **Negative**: Initial setup effort for the CI/CD pipeline. May slightly increase the time to merge code due to mandatory checks.
|
- **Negative**: Initial setup effort for the CI/CD pipeline. May slightly increase the time to merge code due to mandatory checks.
|
||||||
|
|||||||
@@ -14,5 +14,5 @@ We will adopt **OpenAPI (Swagger)** for API documentation. We will use tools (e.
|
|||||||
|
|
||||||
## Consequences
|
## Consequences
|
||||||
|
|
||||||
* **Positive**: Creates a single source of truth for API documentation that stays in sync with the code. Enables auto-generation of client SDKs and simplifies testing.
|
- **Positive**: Creates a single source of truth for API documentation that stays in sync with the code. Enables auto-generation of client SDKs and simplifies testing.
|
||||||
* **Negative**: Requires developers to maintain JSDoc annotations on all routes. Adds a build step and new dependencies to the project.
|
- **Negative**: Requires developers to maintain JSDoc annotations on all routes. Adds a build step and new dependencies to the project.
|
||||||
|
|||||||
@@ -14,5 +14,5 @@ We will implement a formal data backup and recovery strategy. This will involve
|
|||||||
|
|
||||||
## Consequences
|
## Consequences
|
||||||
|
|
||||||
* **Positive**: Protects against catastrophic data loss, ensuring business continuity. Provides a clear, tested plan for disaster recovery.
|
- **Positive**: Protects against catastrophic data loss, ensuring business continuity. Provides a clear, tested plan for disaster recovery.
|
||||||
* **Negative**: Requires setup and maintenance of backup scripts and secure storage. Incurs storage costs for backup files.
|
- **Negative**: Requires setup and maintenance of backup scripts and secure storage. Incurs storage costs for backup files.
|
||||||
|
|||||||
@@ -12,11 +12,11 @@ When the application is containerized (`ADR-014`), the container orchestrator (e
|
|||||||
|
|
||||||
We will implement dedicated health check endpoints in the Express application.
|
We will implement dedicated health check endpoints in the Express application.
|
||||||
|
|
||||||
* A **Liveness Probe** (`/api/health/live`) will return a `200 OK` to indicate the server is running. If it fails, the orchestrator should restart the container.
|
- A **Liveness Probe** (`/api/health/live`) will return a `200 OK` to indicate the server is running. If it fails, the orchestrator should restart the container.
|
||||||
|
|
||||||
* A **Readiness Probe** (`/api/health/ready`) will return a `200 OK` only if the application is ready to accept traffic (e.g., database connection is established). If it fails, the orchestrator will temporarily remove the container from the load balancer.
|
- A **Readiness Probe** (`/api/health/ready`) will return a `200 OK` only if the application is ready to accept traffic (e.g., database connection is established). If it fails, the orchestrator will temporarily remove the container from the load balancer.
|
||||||
|
|
||||||
## Consequences
|
## Consequences
|
||||||
|
|
||||||
* **Positive**: Enables robust, automated application lifecycle management in a containerized environment. Prevents traffic from being sent to unhealthy or uninitialized application instances.
|
- **Positive**: Enables robust, automated application lifecycle management in a containerized environment. Prevents traffic from being sent to unhealthy or uninitialized application instances.
|
||||||
* **Negative**: Adds a small amount of code for the health check endpoints. Requires configuration in the container orchestration layer.
|
- **Negative**: Adds a small amount of code for the health check endpoints. Requires configuration in the container orchestration layer.
|
||||||
|
|||||||
@@ -24,8 +24,8 @@ We will adopt a standardized, application-wide structured logging policy for all
|
|||||||
|
|
||||||
**2. Pino-like API for Structured Logging**: The client logger mimics the `pino` API, which is the standard on the backend. It supports two primary call signatures:
|
**2. Pino-like API for Structured Logging**: The client logger mimics the `pino` API, which is the standard on the backend. It supports two primary call signatures:
|
||||||
|
|
||||||
* `logger.info('A simple message');`
|
- `logger.info('A simple message');`
|
||||||
* `logger.info({ key: 'value' }, 'A message with a structured data payload');`
|
- `logger.info({ key: 'value' }, 'A message with a structured data payload');`
|
||||||
|
|
||||||
The second signature, which includes a data object as the first argument, is **strongly preferred**, especially for logging errors or complex state.
|
The second signature, which includes a data object as the first argument, is **strongly preferred**, especially for logging errors or complex state.
|
||||||
|
|
||||||
@@ -79,7 +79,7 @@ describe('MyComponent', () => {
|
|||||||
// Assert that the logger was called with the expected structure
|
// Assert that the logger was called with the expected structure
|
||||||
expect(logger.error).toHaveBeenCalledWith(
|
expect(logger.error).toHaveBeenCalledWith(
|
||||||
expect.objectContaining({ err: expect.any(Error) }), // Check for the error object
|
expect.objectContaining({ err: expect.any(Error) }), // Check for the error object
|
||||||
'Failed to fetch component data' // Check for the message
|
'Failed to fetch component data', // Check for the message
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -3,63 +3,260 @@
|
|||||||
// It allows us to define all the settings for our application in one place.
|
// It allows us to define all the settings for our application in one place.
|
||||||
// The .cjs extension is required because the project's package.json has "type": "module".
|
// The .cjs extension is required because the project's package.json has "type": "module".
|
||||||
|
|
||||||
|
// --- Environment Variable Validation ---
|
||||||
|
const requiredSecrets = ['DB_HOST', 'JWT_SECRET', 'GEMINI_API_KEY'];
|
||||||
|
const missingSecrets = requiredSecrets.filter(key => !process.env[key]);
|
||||||
|
|
||||||
|
if (missingSecrets.length > 0) {
|
||||||
|
console.warn('\n[ecosystem.config.cjs] ⚠️ WARNING: The following environment variables are MISSING in the shell:');
|
||||||
|
missingSecrets.forEach(key => console.warn(` - ${key}`));
|
||||||
|
console.warn('[ecosystem.config.cjs] The application may crash if these are required for startup.\n');
|
||||||
|
process.exit(1); // Fail fast so PM2 doesn't attempt to start a broken app
|
||||||
|
} else {
|
||||||
|
console.log('[ecosystem.config.cjs] ✅ Critical environment variables are present.');
|
||||||
|
}
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
apps: [
|
apps: [
|
||||||
{
|
{
|
||||||
// --- API Server ---
|
// --- API Server ---
|
||||||
// The name is now dynamically set based on the environment.
|
|
||||||
// This is a common pattern but requires you to call pm2 with the correct name.
|
|
||||||
// The deploy script handles this by using 'flyer-crawler-api' for prod and 'flyer-crawler-api-test' for test.
|
|
||||||
name: 'flyer-crawler-api',
|
name: 'flyer-crawler-api',
|
||||||
|
// Note: The process names below are referenced in .gitea/workflows/ for status checks.
|
||||||
script: './node_modules/.bin/tsx',
|
script: './node_modules/.bin/tsx',
|
||||||
args: 'server.ts', // tsx will execute this file
|
args: 'server.ts',
|
||||||
|
max_memory_restart: '500M',
|
||||||
|
|
||||||
|
// Restart Logic
|
||||||
|
max_restarts: 40,
|
||||||
|
exp_backoff_restart_delay: 100,
|
||||||
|
min_uptime: '10s',
|
||||||
|
|
||||||
// Production Environment Settings
|
// Production Environment Settings
|
||||||
env_production: {
|
env_production: {
|
||||||
NODE_ENV: 'production', // Set the Node.js environment to production
|
NODE_ENV: 'production',
|
||||||
name: 'flyer-crawler-api',
|
name: 'flyer-crawler-api',
|
||||||
cwd: '/var/www/flyer-crawler.projectium.com',
|
cwd: '/var/www/flyer-crawler.projectium.com',
|
||||||
|
DB_HOST: process.env.DB_HOST,
|
||||||
|
DB_USER: process.env.DB_USER,
|
||||||
|
DB_PASSWORD: process.env.DB_PASSWORD,
|
||||||
|
DB_NAME: process.env.DB_NAME,
|
||||||
|
REDIS_URL: process.env.REDIS_URL,
|
||||||
|
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
|
||||||
|
FRONTEND_URL: process.env.FRONTEND_URL,
|
||||||
|
JWT_SECRET: process.env.JWT_SECRET,
|
||||||
|
GEMINI_API_KEY: process.env.GEMINI_API_KEY,
|
||||||
|
GOOGLE_MAPS_API_KEY: process.env.GOOGLE_MAPS_API_KEY,
|
||||||
|
SMTP_HOST: process.env.SMTP_HOST,
|
||||||
|
SMTP_PORT: process.env.SMTP_PORT,
|
||||||
|
SMTP_SECURE: process.env.SMTP_SECURE,
|
||||||
|
SMTP_USER: process.env.SMTP_USER,
|
||||||
|
SMTP_PASS: process.env.SMTP_PASS,
|
||||||
|
SMTP_FROM_EMAIL: process.env.SMTP_FROM_EMAIL,
|
||||||
},
|
},
|
||||||
// Test Environment Settings
|
// Test Environment Settings
|
||||||
env_test: {
|
env_test: {
|
||||||
NODE_ENV: 'development', // Use 'development' for test to enable more verbose logging if needed
|
NODE_ENV: 'test',
|
||||||
name: 'flyer-crawler-api-test',
|
name: 'flyer-crawler-api-test',
|
||||||
cwd: '/var/www/flyer-crawler-test.projectium.com',
|
cwd: '/var/www/flyer-crawler-test.projectium.com',
|
||||||
|
DB_HOST: process.env.DB_HOST,
|
||||||
|
DB_USER: process.env.DB_USER,
|
||||||
|
DB_PASSWORD: process.env.DB_PASSWORD,
|
||||||
|
DB_NAME: process.env.DB_NAME,
|
||||||
|
REDIS_URL: process.env.REDIS_URL,
|
||||||
|
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
|
||||||
|
FRONTEND_URL: process.env.FRONTEND_URL,
|
||||||
|
JWT_SECRET: process.env.JWT_SECRET,
|
||||||
|
GEMINI_API_KEY: process.env.GEMINI_API_KEY,
|
||||||
|
GOOGLE_MAPS_API_KEY: process.env.GOOGLE_MAPS_API_KEY,
|
||||||
|
SMTP_HOST: process.env.SMTP_HOST,
|
||||||
|
SMTP_PORT: process.env.SMTP_PORT,
|
||||||
|
SMTP_SECURE: process.env.SMTP_SECURE,
|
||||||
|
SMTP_USER: process.env.SMTP_USER,
|
||||||
|
SMTP_PASS: process.env.SMTP_PASS,
|
||||||
|
SMTP_FROM_EMAIL: process.env.SMTP_FROM_EMAIL,
|
||||||
|
},
|
||||||
|
// Development Environment Settings
|
||||||
|
env_development: {
|
||||||
|
NODE_ENV: 'development',
|
||||||
|
name: 'flyer-crawler-api-dev',
|
||||||
|
watch: true,
|
||||||
|
ignore_watch: ['node_modules', 'logs', '*.log', 'flyer-images', '.git'],
|
||||||
|
DB_HOST: process.env.DB_HOST,
|
||||||
|
DB_USER: process.env.DB_USER,
|
||||||
|
DB_PASSWORD: process.env.DB_PASSWORD,
|
||||||
|
DB_NAME: process.env.DB_NAME,
|
||||||
|
REDIS_URL: process.env.REDIS_URL,
|
||||||
|
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
|
||||||
|
FRONTEND_URL: process.env.FRONTEND_URL,
|
||||||
|
JWT_SECRET: process.env.JWT_SECRET,
|
||||||
|
GEMINI_API_KEY: process.env.GEMINI_API_KEY,
|
||||||
|
GOOGLE_MAPS_API_KEY: process.env.GOOGLE_MAPS_API_KEY,
|
||||||
|
SMTP_HOST: process.env.SMTP_HOST,
|
||||||
|
SMTP_PORT: process.env.SMTP_PORT,
|
||||||
|
SMTP_SECURE: process.env.SMTP_SECURE,
|
||||||
|
SMTP_USER: process.env.SMTP_USER,
|
||||||
|
SMTP_PASS: process.env.SMTP_PASS,
|
||||||
|
SMTP_FROM_EMAIL: process.env.SMTP_FROM_EMAIL,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
// --- General Worker ---
|
// --- General Worker ---
|
||||||
name: 'flyer-crawler-worker',
|
name: 'flyer-crawler-worker',
|
||||||
script: './node_modules/.bin/tsx',
|
script: './node_modules/.bin/tsx',
|
||||||
args: 'src/services/queueService.server.ts', // tsx will execute this file
|
args: 'src/services/worker.ts',
|
||||||
|
max_memory_restart: '1G',
|
||||||
|
|
||||||
|
// Restart Logic
|
||||||
|
max_restarts: 40,
|
||||||
|
exp_backoff_restart_delay: 100,
|
||||||
|
min_uptime: '10s',
|
||||||
|
|
||||||
// Production Environment Settings
|
// Production Environment Settings
|
||||||
env_production: {
|
env_production: {
|
||||||
NODE_ENV: 'production',
|
NODE_ENV: 'production',
|
||||||
name: 'flyer-crawler-worker',
|
name: 'flyer-crawler-worker',
|
||||||
cwd: '/var/www/flyer-crawler.projectium.com',
|
cwd: '/var/www/flyer-crawler.projectium.com',
|
||||||
|
DB_HOST: process.env.DB_HOST,
|
||||||
|
DB_USER: process.env.DB_USER,
|
||||||
|
DB_PASSWORD: process.env.DB_PASSWORD,
|
||||||
|
DB_NAME: process.env.DB_NAME,
|
||||||
|
REDIS_URL: process.env.REDIS_URL,
|
||||||
|
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
|
||||||
|
FRONTEND_URL: process.env.FRONTEND_URL,
|
||||||
|
JWT_SECRET: process.env.JWT_SECRET,
|
||||||
|
GEMINI_API_KEY: process.env.GEMINI_API_KEY,
|
||||||
|
GOOGLE_MAPS_API_KEY: process.env.GOOGLE_MAPS_API_KEY,
|
||||||
|
SMTP_HOST: process.env.SMTP_HOST,
|
||||||
|
SMTP_PORT: process.env.SMTP_PORT,
|
||||||
|
SMTP_SECURE: process.env.SMTP_SECURE,
|
||||||
|
SMTP_USER: process.env.SMTP_USER,
|
||||||
|
SMTP_PASS: process.env.SMTP_PASS,
|
||||||
|
SMTP_FROM_EMAIL: process.env.SMTP_FROM_EMAIL,
|
||||||
},
|
},
|
||||||
// Test Environment Settings
|
// Test Environment Settings
|
||||||
env_test: {
|
env_test: {
|
||||||
NODE_ENV: 'development',
|
NODE_ENV: 'test',
|
||||||
name: 'flyer-crawler-worker-test',
|
name: 'flyer-crawler-worker-test',
|
||||||
cwd: '/var/www/flyer-crawler-test.projectium.com',
|
cwd: '/var/www/flyer-crawler-test.projectium.com',
|
||||||
|
DB_HOST: process.env.DB_HOST,
|
||||||
|
DB_USER: process.env.DB_USER,
|
||||||
|
DB_PASSWORD: process.env.DB_PASSWORD,
|
||||||
|
DB_NAME: process.env.DB_NAME,
|
||||||
|
REDIS_URL: process.env.REDIS_URL,
|
||||||
|
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
|
||||||
|
FRONTEND_URL: process.env.FRONTEND_URL,
|
||||||
|
JWT_SECRET: process.env.JWT_SECRET,
|
||||||
|
GEMINI_API_KEY: process.env.GEMINI_API_KEY,
|
||||||
|
GOOGLE_MAPS_API_KEY: process.env.GOOGLE_MAPS_API_KEY,
|
||||||
|
SMTP_HOST: process.env.SMTP_HOST,
|
||||||
|
SMTP_PORT: process.env.SMTP_PORT,
|
||||||
|
SMTP_SECURE: process.env.SMTP_SECURE,
|
||||||
|
SMTP_USER: process.env.SMTP_USER,
|
||||||
|
SMTP_PASS: process.env.SMTP_PASS,
|
||||||
|
SMTP_FROM_EMAIL: process.env.SMTP_FROM_EMAIL,
|
||||||
|
},
|
||||||
|
// Development Environment Settings
|
||||||
|
env_development: {
|
||||||
|
NODE_ENV: 'development',
|
||||||
|
name: 'flyer-crawler-worker-dev',
|
||||||
|
watch: true,
|
||||||
|
ignore_watch: ['node_modules', 'logs', '*.log', 'flyer-images', '.git'],
|
||||||
|
DB_HOST: process.env.DB_HOST,
|
||||||
|
DB_USER: process.env.DB_USER,
|
||||||
|
DB_PASSWORD: process.env.DB_PASSWORD,
|
||||||
|
DB_NAME: process.env.DB_NAME,
|
||||||
|
REDIS_URL: process.env.REDIS_URL,
|
||||||
|
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
|
||||||
|
FRONTEND_URL: process.env.FRONTEND_URL,
|
||||||
|
JWT_SECRET: process.env.JWT_SECRET,
|
||||||
|
GEMINI_API_KEY: process.env.GEMINI_API_KEY,
|
||||||
|
GOOGLE_MAPS_API_KEY: process.env.GOOGLE_MAPS_API_KEY,
|
||||||
|
SMTP_HOST: process.env.SMTP_HOST,
|
||||||
|
SMTP_PORT: process.env.SMTP_PORT,
|
||||||
|
SMTP_SECURE: process.env.SMTP_SECURE,
|
||||||
|
SMTP_USER: process.env.SMTP_USER,
|
||||||
|
SMTP_PASS: process.env.SMTP_PASS,
|
||||||
|
SMTP_FROM_EMAIL: process.env.SMTP_FROM_EMAIL,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
// --- Analytics Worker ---
|
// --- Analytics Worker ---
|
||||||
name: 'flyer-crawler-analytics-worker',
|
name: 'flyer-crawler-analytics-worker',
|
||||||
script: './node_modules/.bin/tsx',
|
script: './node_modules/.bin/tsx',
|
||||||
args: 'src/services/queueService.server.ts', // tsx will execute this file
|
args: 'src/services/worker.ts',
|
||||||
|
max_memory_restart: '1G',
|
||||||
|
|
||||||
|
// Restart Logic
|
||||||
|
max_restarts: 40,
|
||||||
|
exp_backoff_restart_delay: 100,
|
||||||
|
min_uptime: '10s',
|
||||||
|
|
||||||
// Production Environment Settings
|
// Production Environment Settings
|
||||||
env_production: {
|
env_production: {
|
||||||
NODE_ENV: 'production',
|
NODE_ENV: 'production',
|
||||||
name: 'flyer-crawler-analytics-worker',
|
name: 'flyer-crawler-analytics-worker',
|
||||||
cwd: '/var/www/flyer-crawler.projectium.com',
|
cwd: '/var/www/flyer-crawler.projectium.com',
|
||||||
|
DB_HOST: process.env.DB_HOST,
|
||||||
|
DB_USER: process.env.DB_USER,
|
||||||
|
DB_PASSWORD: process.env.DB_PASSWORD,
|
||||||
|
DB_NAME: process.env.DB_NAME,
|
||||||
|
REDIS_URL: process.env.REDIS_URL,
|
||||||
|
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
|
||||||
|
FRONTEND_URL: process.env.FRONTEND_URL,
|
||||||
|
JWT_SECRET: process.env.JWT_SECRET,
|
||||||
|
GEMINI_API_KEY: process.env.GEMINI_API_KEY,
|
||||||
|
GOOGLE_MAPS_API_KEY: process.env.GOOGLE_MAPS_API_KEY,
|
||||||
|
SMTP_HOST: process.env.SMTP_HOST,
|
||||||
|
SMTP_PORT: process.env.SMTP_PORT,
|
||||||
|
SMTP_SECURE: process.env.SMTP_SECURE,
|
||||||
|
SMTP_USER: process.env.SMTP_USER,
|
||||||
|
SMTP_PASS: process.env.SMTP_PASS,
|
||||||
|
SMTP_FROM_EMAIL: process.env.SMTP_FROM_EMAIL,
|
||||||
},
|
},
|
||||||
// Test Environment Settings
|
// Test Environment Settings
|
||||||
env_test: {
|
env_test: {
|
||||||
NODE_ENV: 'development',
|
NODE_ENV: 'test',
|
||||||
name: 'flyer-crawler-analytics-worker-test',
|
name: 'flyer-crawler-analytics-worker-test',
|
||||||
cwd: '/var/www/flyer-crawler-test.projectium.com',
|
cwd: '/var/www/flyer-crawler-test.projectium.com',
|
||||||
|
DB_HOST: process.env.DB_HOST,
|
||||||
|
DB_USER: process.env.DB_USER,
|
||||||
|
DB_PASSWORD: process.env.DB_PASSWORD,
|
||||||
|
DB_NAME: process.env.DB_NAME,
|
||||||
|
REDIS_URL: process.env.REDIS_URL,
|
||||||
|
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
|
||||||
|
FRONTEND_URL: process.env.FRONTEND_URL,
|
||||||
|
JWT_SECRET: process.env.JWT_SECRET,
|
||||||
|
GEMINI_API_KEY: process.env.GEMINI_API_KEY,
|
||||||
|
GOOGLE_MAPS_API_KEY: process.env.GOOGLE_MAPS_API_KEY,
|
||||||
|
SMTP_HOST: process.env.SMTP_HOST,
|
||||||
|
SMTP_PORT: process.env.SMTP_PORT,
|
||||||
|
SMTP_SECURE: process.env.SMTP_SECURE,
|
||||||
|
SMTP_USER: process.env.SMTP_USER,
|
||||||
|
SMTP_PASS: process.env.SMTP_PASS,
|
||||||
|
SMTP_FROM_EMAIL: process.env.SMTP_FROM_EMAIL,
|
||||||
|
},
|
||||||
|
// Development Environment Settings
|
||||||
|
env_development: {
|
||||||
|
NODE_ENV: 'development',
|
||||||
|
name: 'flyer-crawler-analytics-worker-dev',
|
||||||
|
watch: true,
|
||||||
|
ignore_watch: ['node_modules', 'logs', '*.log', 'flyer-images', '.git'],
|
||||||
|
DB_HOST: process.env.DB_HOST,
|
||||||
|
DB_USER: process.env.DB_USER,
|
||||||
|
DB_PASSWORD: process.env.DB_PASSWORD,
|
||||||
|
DB_NAME: process.env.DB_NAME,
|
||||||
|
REDIS_URL: process.env.REDIS_URL,
|
||||||
|
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
|
||||||
|
FRONTEND_URL: process.env.FRONTEND_URL,
|
||||||
|
JWT_SECRET: process.env.JWT_SECRET,
|
||||||
|
GEMINI_API_KEY: process.env.GEMINI_API_KEY,
|
||||||
|
GOOGLE_MAPS_API_KEY: process.env.GOOGLE_MAPS_API_KEY,
|
||||||
|
SMTP_HOST: process.env.SMTP_HOST,
|
||||||
|
SMTP_PORT: process.env.SMTP_PORT,
|
||||||
|
SMTP_SECURE: process.env.SMTP_SECURE,
|
||||||
|
SMTP_USER: process.env.SMTP_USER,
|
||||||
|
SMTP_PASS: process.env.SMTP_PASS,
|
||||||
|
SMTP_FROM_EMAIL: process.env.SMTP_FROM_EMAIL,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
|
|||||||
@@ -1,21 +1,21 @@
|
|||||||
import globals from "globals";
|
import globals from 'globals';
|
||||||
import tseslint from "typescript-eslint";
|
import tseslint from 'typescript-eslint';
|
||||||
import pluginReact from "eslint-plugin-react";
|
import pluginReact from 'eslint-plugin-react';
|
||||||
import pluginReactHooks from "eslint-plugin-react-hooks";
|
import pluginReactHooks from 'eslint-plugin-react-hooks';
|
||||||
import pluginReactRefresh from "eslint-plugin-react-refresh";
|
import pluginReactRefresh from 'eslint-plugin-react-refresh';
|
||||||
|
|
||||||
export default tseslint.config(
|
export default tseslint.config(
|
||||||
{
|
{
|
||||||
// Global ignores
|
// Global ignores
|
||||||
ignores: ["dist", ".gitea", "node_modules", "*.cjs"],
|
ignores: ['dist', '.gitea', 'node_modules', '*.cjs'],
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
// All files
|
// All files
|
||||||
files: ["**/*.{js,mjs,cjs,ts,jsx,tsx}"],
|
files: ['**/*.{js,mjs,cjs,ts,jsx,tsx}'],
|
||||||
plugins: {
|
plugins: {
|
||||||
react: pluginReact,
|
react: pluginReact,
|
||||||
"react-hooks": pluginReactHooks,
|
'react-hooks': pluginReactHooks,
|
||||||
"react-refresh": pluginReactRefresh,
|
'react-refresh': pluginReactRefresh,
|
||||||
},
|
},
|
||||||
languageOptions: {
|
languageOptions: {
|
||||||
globals: {
|
globals: {
|
||||||
@@ -24,10 +24,7 @@ export default tseslint.config(
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
rules: {
|
rules: {
|
||||||
"react-refresh/only-export-components": [
|
'react-refresh/only-export-components': ['warn', { allowConstantExport: true }],
|
||||||
"warn",
|
|
||||||
{ allowConstantExport: true },
|
|
||||||
],
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
// TypeScript files
|
// TypeScript files
|
||||||
|
|||||||
2
express.d.ts
vendored
2
express.d.ts
vendored
@@ -1,4 +1,4 @@
|
|||||||
// src/types/express.d.ts
|
// express.d.ts
|
||||||
import { Logger } from 'pino';
|
import { Logger } from 'pino';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|||||||
14
index.html
14
index.html
@@ -1,8 +1,8 @@
|
|||||||
<!DOCTYPE html>
|
<!doctype html>
|
||||||
<html lang="en">
|
<html lang="en">
|
||||||
<head>
|
<head>
|
||||||
<meta charset="UTF-8">
|
<meta charset="UTF-8" />
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||||
<title>Grocery Flyer AI Analyzer</title>
|
<title>Grocery Flyer AI Analyzer</title>
|
||||||
<style>
|
<style>
|
||||||
@import url('https://fonts.googleapis.com/css2?family=Inter:wght@400;500;600;700&display=swap');
|
@import url('https://fonts.googleapis.com/css2?family=Inter:wght@400;500;600;700&display=swap');
|
||||||
@@ -11,10 +11,10 @@
|
|||||||
}
|
}
|
||||||
</style>
|
</style>
|
||||||
<!-- The stylesheet will be injected here by Vite during the build process -->
|
<!-- The stylesheet will be injected here by Vite during the build process -->
|
||||||
</head>
|
</head>
|
||||||
<body>
|
<body>
|
||||||
<div id="root"></div>
|
<div id="root"></div>
|
||||||
<!-- Vite will inject the correct <script> tag here during the build process -->
|
<!-- Vite will inject the correct <script> tag here during the build process -->
|
||||||
<script type="module" src="/src/index.tsx"></script>
|
<script type="module" src="/src/index.tsx"></script>
|
||||||
</body>
|
</body>
|
||||||
</html>
|
</html>
|
||||||
@@ -1,8 +1,5 @@
|
|||||||
{
|
{
|
||||||
"name": "Flyer Crawler",
|
"name": "Flyer Crawler",
|
||||||
"description": "Upload a grocery store flyer image to extract item details, prices, and quantities using AI. Get insights, meal plans, and compare prices to save money on your shopping.",
|
"description": "Upload a grocery store flyer image to extract item details, prices, and quantities using AI. Get insights, meal plans, and compare prices to save money on your shopping.",
|
||||||
"requestFramePermissions": [
|
"requestFramePermissions": ["geolocation", "microphone"]
|
||||||
"geolocation",
|
|
||||||
"microphone"
|
|
||||||
]
|
|
||||||
}
|
}
|
||||||
118
notes-to-ai4.txt
Normal file
118
notes-to-ai4.txt
Normal file
@@ -0,0 +1,118 @@
|
|||||||
|
RULES:
|
||||||
|
1) if you do not have a file that you need, stop, and request it immediately.
|
||||||
|
2) never remove logging or comments
|
||||||
|
3) you cannot ever use 'any' or 'unknown' to solve possible typescript issues
|
||||||
|
4) when creating new files, output there entire path in your explanation, to make it easier to know where to save those new files and directories to
|
||||||
|
5) add comments when you can, as that will help ensure ideas persist into the app
|
||||||
|
6) Your knowledge of package version, like nodejs, is always old, like a year or more old - ask me for the best version to use, as your knowledge is incomplete
|
||||||
|
7) Stop making predictions and/or guessing at solutions. Focus on adding logging and debugging to issues that are not solved right away.
|
||||||
|
8) Do not make obsequious statements - we're here to do a job, not get patted on the shoulder for insignificant achievements.
|
||||||
|
9) Provide me with the npm command to execute rather than wanting to edit the package.json file. That is not the correct way to handle a package update.
|
||||||
|
10) Provide the code solution in DIFF format for brevity.
|
||||||
|
11) Always add logging and debugging to prove a solution works, and only remove logging and debugging when it is not possible to adapt it to the new conditions.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
UPC SCANNING !
|
||||||
|
|
||||||
|
EXPIRY DATES !
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
are there any other unit test files that should be refactored :
|
||||||
|
|
||||||
|
Extract Shared Utility Functions Several utility functions are duplicated across multiple route files.
|
||||||
|
|
||||||
|
Update Route Files to Use New Utilities route files will be updated to import these new utility functions
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
Review the `getJobStatus` API client function and add more robust error handling for non-JSON responses.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
Review the `src/services/apiClient.test.ts` file. It seems to have a lot of manual data creation for API payloads. Can we refactor it to use factories where appropriate, or create new factories for API payloads if needed?
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
The `useApi` hook has complex state management. Can you suggest a way to simplify it, perhaps by using a reducer or a state machine?
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
Are there any opportunities to improve the mock factories themselves, for example, by adding more realistic default data?
|
||||||
|
|
||||||
|
Now that the user creation is centralized, can we add more complex test user scenarios to the helper, like a user with a pre-existing shopping list?
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
How can I add a new "Recipes Created" stat to the AdminStatsPage and its tests?
|
||||||
|
|
||||||
|
NO CODE just outline the steps
|
||||||
|
|
||||||
|
|
||||||
|
Gemini
|
||||||
|
Outlining the Implementation Path
|
||||||
|
Gemini 2.5 generated this response due to high traffic on Gemini 3
|
||||||
|
|
||||||
|
Of course! Here is a step-by-step outline of how to add a new "Recipes Created" statistic to your AdminStatsPage.
|
||||||
|
|
||||||
|
Backend Steps
|
||||||
|
|
||||||
|
|
||||||
|
Update Database Query:
|
||||||
|
|
||||||
|
In the database repository file responsible for fetching admin statistics (e.g., admin.db.ts), you'll need to add a query that performs a COUNT(*) on your recipes table.
|
||||||
|
|
||||||
|
|
||||||
|
Update Service Layer:
|
||||||
|
|
||||||
|
The service function that aggregates the statistics (which is called by your API controller) needs to be modified to execute the new recipe count query.
|
||||||
|
The result of this query should be added to the statistics object that is returned.
|
||||||
|
|
||||||
|
|
||||||
|
Update API Endpoint:
|
||||||
|
|
||||||
|
The controller for the /api/admin/stats endpoint must be updated to include the new recipeCount field in the JSON object it sends to the frontend.
|
||||||
|
Frontend Steps
|
||||||
|
|
||||||
|
|
||||||
|
Update AppStats Type:
|
||||||
|
|
||||||
|
In src/services/apiClient.ts, where the AppStats type is defined, add the new property: recipeCount: number;.
|
||||||
|
Update Mock Factory:
|
||||||
|
|
||||||
|
In src/tests/utils/mockFactories.ts, modify the createMockAppStats function. Add recipeCount to the defaultStats object with a default value (e.g., recipeCount: 500). This ensures any test that uses the factory will not break due to the missing property.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
Update AdminStatsPage Component (AdminStatsPage.tsx):
|
||||||
|
|
||||||
|
Import a suitable icon for recipes, like BookOpenIcon, at the top of the file.
|
||||||
|
Inside the main div where the other StatCard components are rendered, add a new one for the recipe count:
|
||||||
|
jsx
|
||||||
|
<StatCard title="Total Recipes" value={stats.recipeCount.toLocaleString()} icon={<BookOpenIcon className="w-6 h-6" />} />
|
||||||
|
Update AdminStatsPage Tests (AdminStatsPage.test.tsx):
|
||||||
|
|
||||||
|
In the test 'should display stats cards when data is fetched successfully', update the mockStats object to include a value for recipeCount. Add a new expect assertion to verify that the "Total Recipes" title and its corresponding value are rendered.
|
||||||
|
In the test 'should format large numbers with commas for readability', add a large number for recipeCount in the mock data and assert that it is rendered with commas.
|
||||||
|
In the test 'should correctly display zero values for all stats', add recipeCount: 0 to the mock data and increase the expected count of "0" elements found in the document from 5 to 6.
|
||||||
4331
package-lock.json
generated
4331
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
18
package.json
18
package.json
@@ -1,17 +1,19 @@
|
|||||||
{
|
{
|
||||||
"name": "flyer-crawler",
|
"name": "flyer-crawler",
|
||||||
"private": true,
|
"private": true,
|
||||||
"version": "0.0.8",
|
"version": "0.7.2",
|
||||||
"type": "module",
|
"type": "module",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"dev": "concurrently \"npm:start:dev\" \"vite\"",
|
"dev": "concurrently \"npm:start:dev\" \"vite\"",
|
||||||
|
"dev:container": "concurrently \"npm:start:dev\" \"vite --host\"",
|
||||||
"start": "npm run start:prod",
|
"start": "npm run start:prod",
|
||||||
"build": "vite build",
|
"build": "vite build",
|
||||||
"preview": "vite preview",
|
"preview": "vite preview",
|
||||||
"test": "NODE_ENV=test tsx ./node_modules/vitest/vitest.mjs run",
|
"test": "cross-env NODE_ENV=test tsx ./node_modules/vitest/vitest.mjs run",
|
||||||
|
"test-wsl": "cross-env NODE_ENV=test vitest run",
|
||||||
"test:coverage": "npm run clean && npm run test:unit -- --coverage && npm run test:integration -- --coverage",
|
"test:coverage": "npm run clean && npm run test:unit -- --coverage && npm run test:integration -- --coverage",
|
||||||
"test:unit": "NODE_ENV=test tsx ./node_modules/vitest/vitest.mjs run --project unit -c vite.config.ts",
|
"test:unit": "NODE_ENV=test tsx --max-old-space-size=8192 ./node_modules/vitest/vitest.mjs run --project unit -c vite.config.ts",
|
||||||
"test:integration": "NODE_ENV=test tsx ./node_modules/vitest/vitest.mjs run --project integration -c vitest.config.integration.ts",
|
"test:integration": "NODE_ENV=test tsx --max-old-space-size=8192 ./node_modules/vitest/vitest.mjs run --project integration -c vitest.config.integration.ts",
|
||||||
"format": "prettier --write .",
|
"format": "prettier --write .",
|
||||||
"lint": "eslint . --ext ts,tsx --report-unused-disable-directives --max-warnings 0",
|
"lint": "eslint . --ext ts,tsx --report-unused-disable-directives --max-warnings 0",
|
||||||
"type-check": "tsc --noEmit",
|
"type-check": "tsc --noEmit",
|
||||||
@@ -20,6 +22,7 @@
|
|||||||
"start:dev": "NODE_ENV=development tsx watch server.ts",
|
"start:dev": "NODE_ENV=development tsx watch server.ts",
|
||||||
"start:prod": "NODE_ENV=production tsx server.ts",
|
"start:prod": "NODE_ENV=production tsx server.ts",
|
||||||
"start:test": "NODE_ENV=test NODE_V8_COVERAGE=.coverage/tmp/integration-server tsx server.ts",
|
"start:test": "NODE_ENV=test NODE_V8_COVERAGE=.coverage/tmp/integration-server tsx server.ts",
|
||||||
|
"db:reset:dev": "NODE_ENV=development tsx src/db/seed.ts",
|
||||||
"db:reset:test": "NODE_ENV=test tsx src/db/seed.ts",
|
"db:reset:test": "NODE_ENV=test tsx src/db/seed.ts",
|
||||||
"worker:prod": "NODE_ENV=production tsx src/services/queueService.server.ts"
|
"worker:prod": "NODE_ENV=production tsx src/services/queueService.server.ts"
|
||||||
},
|
},
|
||||||
@@ -27,12 +30,14 @@
|
|||||||
"@bull-board/api": "^6.14.2",
|
"@bull-board/api": "^6.14.2",
|
||||||
"@bull-board/express": "^6.14.2",
|
"@bull-board/express": "^6.14.2",
|
||||||
"@google/genai": "^1.30.0",
|
"@google/genai": "^1.30.0",
|
||||||
|
"@tanstack/react-query": "^5.90.12",
|
||||||
"@types/connect-timeout": "^1.9.0",
|
"@types/connect-timeout": "^1.9.0",
|
||||||
"bcrypt": "^5.1.1",
|
"bcrypt": "^5.1.1",
|
||||||
"bullmq": "^5.65.1",
|
"bullmq": "^5.65.1",
|
||||||
"connect-timeout": "^1.9.1",
|
"connect-timeout": "^1.9.1",
|
||||||
"cookie-parser": "^1.4.7",
|
"cookie-parser": "^1.4.7",
|
||||||
"date-fns": "^4.1.0",
|
"date-fns": "^4.1.0",
|
||||||
|
"exif-parser": "^0.1.12",
|
||||||
"express": "^5.1.0",
|
"express": "^5.1.0",
|
||||||
"express-list-endpoints": "^7.1.1",
|
"express-list-endpoints": "^7.1.1",
|
||||||
"express-rate-limit": "^8.2.1",
|
"express-rate-limit": "^8.2.1",
|
||||||
@@ -50,6 +55,7 @@
|
|||||||
"passport-local": "^1.0.0",
|
"passport-local": "^1.0.0",
|
||||||
"pdfjs-dist": "^5.4.394",
|
"pdfjs-dist": "^5.4.394",
|
||||||
"pg": "^8.16.3",
|
"pg": "^8.16.3",
|
||||||
|
"piexifjs": "^1.0.6",
|
||||||
"pino": "^10.1.0",
|
"pino": "^10.1.0",
|
||||||
"react": "^19.2.0",
|
"react": "^19.2.0",
|
||||||
"react-dom": "^19.2.0",
|
"react-dom": "^19.2.0",
|
||||||
@@ -58,7 +64,7 @@
|
|||||||
"recharts": "^3.4.1",
|
"recharts": "^3.4.1",
|
||||||
"sharp": "^0.34.5",
|
"sharp": "^0.34.5",
|
||||||
"tsx": "^4.20.6",
|
"tsx": "^4.20.6",
|
||||||
"zod": "^4.1.13",
|
"zod": "^4.2.1",
|
||||||
"zxcvbn": "^4.4.2"
|
"zxcvbn": "^4.4.2"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
@@ -81,6 +87,7 @@
|
|||||||
"@types/passport-jwt": "^4.0.1",
|
"@types/passport-jwt": "^4.0.1",
|
||||||
"@types/passport-local": "^1.0.38",
|
"@types/passport-local": "^1.0.38",
|
||||||
"@types/pg": "^8.15.6",
|
"@types/pg": "^8.15.6",
|
||||||
|
"@types/piexifjs": "^1.0.0",
|
||||||
"@types/pino": "^7.0.4",
|
"@types/pino": "^7.0.4",
|
||||||
"@types/react": "^19.2.7",
|
"@types/react": "^19.2.7",
|
||||||
"@types/react-dom": "^19.2.3",
|
"@types/react-dom": "^19.2.3",
|
||||||
@@ -95,6 +102,7 @@
|
|||||||
"autoprefixer": "^10.4.22",
|
"autoprefixer": "^10.4.22",
|
||||||
"c8": "^10.1.3",
|
"c8": "^10.1.3",
|
||||||
"concurrently": "^9.2.1",
|
"concurrently": "^9.2.1",
|
||||||
|
"cross-env": "^10.1.0",
|
||||||
"eslint": "9.39.1",
|
"eslint": "9.39.1",
|
||||||
"eslint-config-prettier": "^9.1.0",
|
"eslint-config-prettier": "^9.1.0",
|
||||||
"eslint-plugin-react": "7.37.5",
|
"eslint-plugin-react": "7.37.5",
|
||||||
|
|||||||
@@ -10,7 +10,10 @@ const tailwindConfigPath = path.resolve(process.cwd(), 'tailwind.config.js');
|
|||||||
console.log(`[POSTCSS] Attempting to use Tailwind config at: ${tailwindConfigPath}`);
|
console.log(`[POSTCSS] Attempting to use Tailwind config at: ${tailwindConfigPath}`);
|
||||||
|
|
||||||
// Log to prove the imported config object is what we expect
|
// Log to prove the imported config object is what we expect
|
||||||
console.log('[POSTCSS] Imported tailwind.config.js object:', JSON.stringify(tailwindConfig, null, 2));
|
console.log(
|
||||||
|
'[POSTCSS] Imported tailwind.config.js object:',
|
||||||
|
JSON.stringify(tailwindConfig, null, 2),
|
||||||
|
);
|
||||||
|
|
||||||
export default {
|
export default {
|
||||||
plugins: {
|
plugins: {
|
||||||
|
|||||||
@@ -1030,11 +1030,61 @@ DROP FUNCTION IF EXISTS public.fork_recipe(UUID, BIGINT);
|
|||||||
|
|
||||||
CREATE OR REPLACE FUNCTION public.fork_recipe(p_user_id UUID, p_original_recipe_id BIGINT)
|
CREATE OR REPLACE FUNCTION public.fork_recipe(p_user_id UUID, p_original_recipe_id BIGINT)
|
||||||
RETURNS SETOF public.recipes
|
RETURNS SETOF public.recipes
|
||||||
LANGUAGE sql
|
LANGUAGE plpgsql
|
||||||
SECURITY INVOKER
|
SECURITY INVOKER
|
||||||
AS $$
|
AS $$
|
||||||
-- The entire forking logic is now encapsulated in a single, atomic database function.
|
DECLARE
|
||||||
SELECT * FROM public.fork_recipe(p_user_id, p_original_recipe_id);
|
new_recipe_id BIGINT;
|
||||||
|
BEGIN
|
||||||
|
-- 1. Create a copy of the recipe, linking it to the new user and the original recipe.
|
||||||
|
INSERT INTO public.recipes (
|
||||||
|
user_id,
|
||||||
|
original_recipe_id,
|
||||||
|
name,
|
||||||
|
description,
|
||||||
|
instructions,
|
||||||
|
prep_time_minutes,
|
||||||
|
cook_time_minutes,
|
||||||
|
servings,
|
||||||
|
photo_url,
|
||||||
|
calories_per_serving,
|
||||||
|
protein_grams,
|
||||||
|
fat_grams,
|
||||||
|
carb_grams,
|
||||||
|
status -- Forked recipes should be private by default
|
||||||
|
)
|
||||||
|
SELECT
|
||||||
|
p_user_id,
|
||||||
|
p_original_recipe_id,
|
||||||
|
original.name || ' (Fork)', -- Append '(Fork)' to distinguish it
|
||||||
|
original.description,
|
||||||
|
original.instructions,
|
||||||
|
original.prep_time_minutes,
|
||||||
|
original.cook_time_minutes,
|
||||||
|
original.servings,
|
||||||
|
original.photo_url,
|
||||||
|
original.calories_per_serving,
|
||||||
|
original.protein_grams,
|
||||||
|
original.fat_grams,
|
||||||
|
original.carb_grams,
|
||||||
|
'private'
|
||||||
|
FROM public.recipes AS original
|
||||||
|
WHERE original.recipe_id = p_original_recipe_id
|
||||||
|
RETURNING recipe_id INTO new_recipe_id;
|
||||||
|
|
||||||
|
-- If the original recipe didn't exist, new_recipe_id will be null.
|
||||||
|
IF new_recipe_id IS NULL THEN
|
||||||
|
RETURN;
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
-- 2. Copy all ingredients, tags, and appliances from the original recipe to the new one.
|
||||||
|
INSERT INTO public.recipe_ingredients (recipe_id, master_item_id, quantity, unit) SELECT new_recipe_id, master_item_id, quantity, unit FROM public.recipe_ingredients WHERE recipe_id = p_original_recipe_id;
|
||||||
|
INSERT INTO public.recipe_tags (recipe_id, tag_id) SELECT new_recipe_id, tag_id FROM public.recipe_tags WHERE recipe_id = p_original_recipe_id;
|
||||||
|
INSERT INTO public.recipe_appliances (recipe_id, appliance_id) SELECT new_recipe_id, appliance_id FROM public.recipe_appliances WHERE recipe_id = p_original_recipe_id;
|
||||||
|
|
||||||
|
-- 3. Return the newly created recipe record.
|
||||||
|
RETURN QUERY SELECT * FROM public.recipes WHERE recipe_id = new_recipe_id;
|
||||||
|
END;
|
||||||
$$;
|
$$;
|
||||||
|
|
||||||
|
|
||||||
@@ -1566,4 +1616,3 @@ BEGIN
|
|||||||
bp.price_rank = 1;
|
bp.price_rank = 1;
|
||||||
END;
|
END;
|
||||||
$$ LANGUAGE plpgsql;
|
$$ LANGUAGE plpgsql;
|
||||||
|
|
||||||
|
|||||||
@@ -8,7 +8,23 @@
|
|||||||
-- It is idempotent, meaning it can be run multiple times without causing errors.
|
-- It is idempotent, meaning it can be run multiple times without causing errors.
|
||||||
|
|
||||||
-- 1. Pre-populate the master grocery items dictionary.
|
-- 1. Pre-populate the master grocery items dictionary.
|
||||||
-- This block links generic items to their respective categories.
|
-- This MUST run after populating categories.
|
||||||
|
-- Renumbered to 2.
|
||||||
|
|
||||||
|
-- 2. Pre-populate the categories table from a predefined list.
|
||||||
|
-- Renumbered to 1. This MUST run before populating master_grocery_items.
|
||||||
|
DO $$
|
||||||
|
BEGIN
|
||||||
|
INSERT INTO public.categories (name) VALUES
|
||||||
|
('Fruits & Vegetables'), ('Meat & Seafood'), ('Dairy & Eggs'), ('Bakery & Bread'),
|
||||||
|
('Pantry & Dry Goods'), ('Beverages'), ('Frozen Foods'), ('Snacks'), ('Household & Cleaning'),
|
||||||
|
('Personal Care & Health'), ('Baby & Child'), ('Pet Supplies'), ('Deli & Prepared Foods'),
|
||||||
|
('Canned Goods'), ('Condiments & Spices'), ('Breakfast & Cereal'), ('Organic'),
|
||||||
|
('International Foods'), ('Other/Miscellaneous')
|
||||||
|
ON CONFLICT (name) DO NOTHING;
|
||||||
|
END $$;
|
||||||
|
|
||||||
|
-- 2. Pre-populate the master grocery items dictionary.
|
||||||
DO $$
|
DO $$
|
||||||
DECLARE
|
DECLARE
|
||||||
fv_cat_id BIGINT; ms_cat_id BIGINT; de_cat_id BIGINT; bb_cat_id BIGINT; pdg_cat_id BIGINT;
|
fv_cat_id BIGINT; ms_cat_id BIGINT; de_cat_id BIGINT; bb_cat_id BIGINT; pdg_cat_id BIGINT;
|
||||||
@@ -53,18 +69,6 @@ BEGIN
|
|||||||
ON CONFLICT (name) DO NOTHING;
|
ON CONFLICT (name) DO NOTHING;
|
||||||
END $$;
|
END $$;
|
||||||
|
|
||||||
-- 2. Pre-populate the categories table from a predefined list.
|
|
||||||
DO $$
|
|
||||||
BEGIN
|
|
||||||
INSERT INTO public.categories (name) VALUES
|
|
||||||
('Fruits & Vegetables'), ('Meat & Seafood'), ('Dairy & Eggs'), ('Bakery & Bread'),
|
|
||||||
('Pantry & Dry Goods'), ('Beverages'), ('Frozen Foods'), ('Snacks'), ('Household & Cleaning'),
|
|
||||||
('Personal Care & Health'), ('Baby & Child'), ('Pet Supplies'), ('Deli & Prepared Foods'),
|
|
||||||
('Canned Goods'), ('Condiments & Spices'), ('Breakfast & Cereal'), ('Organic'),
|
|
||||||
('International Foods'), ('Other/Miscellaneous')
|
|
||||||
ON CONFLICT (name) DO NOTHING;
|
|
||||||
END $$;
|
|
||||||
|
|
||||||
-- 3. Pre-populate the brands and products tables.
|
-- 3. Pre-populate the brands and products tables.
|
||||||
-- This block adds common brands and links them to specific products.
|
-- This block adds common brands and links them to specific products.
|
||||||
DO $$
|
DO $$
|
||||||
|
|||||||
@@ -92,6 +92,7 @@ CREATE TABLE IF NOT EXISTS public.stores (
|
|||||||
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||||
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||||
created_by UUID REFERENCES public.users(user_id) ON DELETE SET NULL
|
created_by UUID REFERENCES public.users(user_id) ON DELETE SET NULL
|
||||||
|
);
|
||||||
COMMENT ON TABLE public.stores IS 'Stores metadata for grocery store chains (e.g., Safeway, Kroger).';
|
COMMENT ON TABLE public.stores IS 'Stores metadata for grocery store chains (e.g., Safeway, Kroger).';
|
||||||
|
|
||||||
-- 5. The 'categories' table for normalized category data.
|
-- 5. The 'categories' table for normalized category data.
|
||||||
@@ -110,10 +111,11 @@ CREATE TABLE IF NOT EXISTS public.flyers (
|
|||||||
image_url TEXT NOT NULL,
|
image_url TEXT NOT NULL,
|
||||||
icon_url TEXT,
|
icon_url TEXT,
|
||||||
checksum TEXT UNIQUE,
|
checksum TEXT UNIQUE,
|
||||||
store_id BIGINT REFERENCES public.stores(store_id),
|
store_id BIGINT REFERENCES public.stores(store_id) ON DELETE CASCADE,
|
||||||
valid_from DATE,
|
valid_from DATE,
|
||||||
valid_to DATE,
|
valid_to DATE,
|
||||||
store_address TEXT,
|
store_address TEXT,
|
||||||
|
status TEXT DEFAULT 'processed' NOT NULL CHECK (status IN ('processed', 'needs_review', 'archived')),
|
||||||
item_count INTEGER DEFAULT 0 NOT NULL,
|
item_count INTEGER DEFAULT 0 NOT NULL,
|
||||||
uploaded_by UUID REFERENCES public.users(user_id) ON DELETE SET NULL,
|
uploaded_by UUID REFERENCES public.users(user_id) ON DELETE SET NULL,
|
||||||
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||||
@@ -129,16 +131,18 @@ COMMENT ON COLUMN public.flyers.store_id IS 'Foreign key linking this flyer to a
|
|||||||
COMMENT ON COLUMN public.flyers.valid_from IS 'The start date of the sale period for this flyer, extracted by the AI.';
|
COMMENT ON COLUMN public.flyers.valid_from IS 'The start date of the sale period for this flyer, extracted by the AI.';
|
||||||
COMMENT ON COLUMN public.flyers.valid_to IS 'The end date of the sale period for this flyer, extracted by the AI.';
|
COMMENT ON COLUMN public.flyers.valid_to IS 'The end date of the sale period for this flyer, extracted by the AI.';
|
||||||
COMMENT ON COLUMN public.flyers.store_address IS 'The physical store address if it was successfully extracted from the flyer image.';
|
COMMENT ON COLUMN public.flyers.store_address IS 'The physical store address if it was successfully extracted from the flyer image.';
|
||||||
|
COMMENT ON COLUMN public.flyers.status IS 'The processing status of the flyer, e.g., if it needs manual review.';
|
||||||
COMMENT ON COLUMN public.flyers.item_count IS 'A cached count of the number of items in this flyer, maintained by a trigger.';
|
COMMENT ON COLUMN public.flyers.item_count IS 'A cached count of the number of items in this flyer, maintained by a trigger.';
|
||||||
COMMENT ON COLUMN public.flyers.uploaded_by IS 'The user who uploaded the flyer. Can be null for anonymous or system uploads.';
|
COMMENT ON COLUMN public.flyers.uploaded_by IS 'The user who uploaded the flyer. Can be null for anonymous or system uploads.';
|
||||||
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_flyers_created_at ON public.flyers (created_at DESC);
|
CREATE INDEX IF NOT EXISTS idx_flyers_created_at ON public.flyers (created_at DESC);
|
||||||
CREATE INDEX IF NOT EXISTS idx_flyers_valid_to_file_name ON public.flyers (valid_to DESC, file_name ASC);
|
CREATE INDEX IF NOT EXISTS idx_flyers_valid_to_file_name ON public.flyers (valid_to DESC, file_name ASC);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_flyers_status ON public.flyers(status);
|
||||||
-- 7. The 'master_grocery_items' table. This is the master dictionary.
|
-- 7. The 'master_grocery_items' table. This is the master dictionary.
|
||||||
CREATE TABLE IF NOT EXISTS public.master_grocery_items (
|
CREATE TABLE IF NOT EXISTS public.master_grocery_items (
|
||||||
master_grocery_item_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
master_grocery_item_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
||||||
name TEXT NOT NULL UNIQUE,
|
name TEXT NOT NULL UNIQUE,
|
||||||
category_id BIGINT REFERENCES public.categories(category_id),
|
category_id BIGINT REFERENCES public.categories(category_id) ON DELETE SET NULL,
|
||||||
is_allergen BOOLEAN DEFAULT false,
|
is_allergen BOOLEAN DEFAULT false,
|
||||||
allergy_info JSONB,
|
allergy_info JSONB,
|
||||||
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||||
@@ -161,6 +165,38 @@ CREATE TABLE IF NOT EXISTS public.user_watched_items (
|
|||||||
COMMENT ON TABLE public.user_watched_items IS 'A linking table that represents a user''s personal watchlist of grocery items.';
|
COMMENT ON TABLE public.user_watched_items IS 'A linking table that represents a user''s personal watchlist of grocery items.';
|
||||||
CREATE INDEX IF NOT EXISTS idx_user_watched_items_master_item_id ON public.user_watched_items(master_item_id);
|
CREATE INDEX IF NOT EXISTS idx_user_watched_items_master_item_id ON public.user_watched_items(master_item_id);
|
||||||
|
|
||||||
|
-- 23. Store brand information. (Moved up due to dependency in flyer_items)
|
||||||
|
CREATE TABLE IF NOT EXISTS public.brands (
|
||||||
|
brand_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
||||||
|
name TEXT NOT NULL UNIQUE,
|
||||||
|
logo_url TEXT,
|
||||||
|
store_id BIGINT REFERENCES public.stores(store_id) ON DELETE SET NULL,
|
||||||
|
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||||
|
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL
|
||||||
|
);
|
||||||
|
COMMENT ON TABLE public.brands IS 'Stores brand names like "Coca-Cola", "Maple Leaf", or "Kraft".';
|
||||||
|
COMMENT ON COLUMN public.brands.store_id IS 'If this is a store-specific brand (e.g., President''s Choice), this links to the parent store.';
|
||||||
|
|
||||||
|
-- 24. For specific products, linking a master item with a brand and size. (Moved up due to dependency in flyer_items)
|
||||||
|
CREATE TABLE IF NOT EXISTS public.products (
|
||||||
|
product_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
||||||
|
master_item_id BIGINT NOT NULL REFERENCES public.master_grocery_items(master_grocery_item_id) ON DELETE CASCADE,
|
||||||
|
brand_id BIGINT REFERENCES public.brands(brand_id) ON DELETE SET NULL,
|
||||||
|
name TEXT NOT NULL,
|
||||||
|
description TEXT,
|
||||||
|
size TEXT,
|
||||||
|
upc_code TEXT UNIQUE,
|
||||||
|
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||||
|
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL
|
||||||
|
);
|
||||||
|
COMMENT ON TABLE public.products IS 'Represents a specific, sellable product, combining a generic item with a brand and size.';
|
||||||
|
COMMENT ON COLUMN public.products.upc_code IS 'Universal Product Code, if available, for exact product matching.';
|
||||||
|
COMMENT ON COLUMN public.products.brand_id IS 'Can be null for generic/store-brand items.';
|
||||||
|
COMMENT ON COLUMN public.products.name IS 'Prime Raised without Antibiotics Chicken Breast.';
|
||||||
|
COMMENT ON COLUMN public.products.size IS 'e.g., "4L", "500g".';
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_products_master_item_id ON public.products(master_item_id);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_products_brand_id ON public.products(brand_id);
|
||||||
|
|
||||||
-- 9. The 'flyer_items' table. This stores individual items from flyers.
|
-- 9. The 'flyer_items' table. This stores individual items from flyers.
|
||||||
CREATE TABLE IF NOT EXISTS public.flyer_items (
|
CREATE TABLE IF NOT EXISTS public.flyer_items (
|
||||||
flyer_item_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
flyer_item_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
||||||
@@ -170,13 +206,13 @@ CREATE TABLE IF NOT EXISTS public.flyer_items (
|
|||||||
price_in_cents INTEGER,
|
price_in_cents INTEGER,
|
||||||
quantity_num NUMERIC,
|
quantity_num NUMERIC,
|
||||||
quantity TEXT NOT NULL,
|
quantity TEXT NOT NULL,
|
||||||
category_id BIGINT REFERENCES public.categories(category_id),
|
category_id BIGINT REFERENCES public.categories(category_id) ON DELETE SET NULL,
|
||||||
category_name TEXT,
|
category_name TEXT,
|
||||||
unit_price JSONB,
|
unit_price JSONB,
|
||||||
view_count INTEGER DEFAULT 0 NOT NULL,
|
view_count INTEGER DEFAULT 0 NOT NULL,
|
||||||
click_count INTEGER DEFAULT 0 NOT NULL,
|
click_count INTEGER DEFAULT 0 NOT NULL,
|
||||||
master_item_id BIGINT REFERENCES public.master_grocery_items(master_grocery_item_id),
|
master_item_id BIGINT REFERENCES public.master_grocery_items(master_grocery_item_id) ON DELETE SET NULL,
|
||||||
product_id BIGINT,
|
product_id BIGINT REFERENCES public.products(product_id) ON DELETE SET NULL,
|
||||||
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||||
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL
|
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL
|
||||||
);
|
);
|
||||||
@@ -293,7 +329,7 @@ CREATE INDEX IF NOT EXISTS idx_shopping_lists_user_id ON public.shopping_lists(u
|
|||||||
CREATE TABLE IF NOT EXISTS public.shopping_list_items (
|
CREATE TABLE IF NOT EXISTS public.shopping_list_items (
|
||||||
shopping_list_item_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
shopping_list_item_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
||||||
shopping_list_id BIGINT NOT NULL REFERENCES public.shopping_lists(shopping_list_id) ON DELETE CASCADE,
|
shopping_list_id BIGINT NOT NULL REFERENCES public.shopping_lists(shopping_list_id) ON DELETE CASCADE,
|
||||||
master_item_id BIGINT REFERENCES public.master_grocery_items(master_grocery_item_id),
|
master_item_id BIGINT REFERENCES public.master_grocery_items(master_grocery_item_id) ON DELETE CASCADE,
|
||||||
custom_item_name TEXT,
|
custom_item_name TEXT,
|
||||||
quantity NUMERIC DEFAULT 1 NOT NULL,
|
quantity NUMERIC DEFAULT 1 NOT NULL,
|
||||||
is_purchased BOOLEAN DEFAULT false NOT NULL,
|
is_purchased BOOLEAN DEFAULT false NOT NULL,
|
||||||
@@ -358,7 +394,7 @@ CREATE INDEX IF NOT EXISTS idx_shared_menu_plans_shared_with_user_id ON public.s
|
|||||||
CREATE TABLE IF NOT EXISTS public.suggested_corrections (
|
CREATE TABLE IF NOT EXISTS public.suggested_corrections (
|
||||||
suggested_correction_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
suggested_correction_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
||||||
flyer_item_id BIGINT NOT NULL REFERENCES public.flyer_items(flyer_item_id) ON DELETE CASCADE,
|
flyer_item_id BIGINT NOT NULL REFERENCES public.flyer_items(flyer_item_id) ON DELETE CASCADE,
|
||||||
user_id UUID NOT NULL REFERENCES public.users(user_id),
|
user_id UUID NOT NULL REFERENCES public.users(user_id) ON DELETE CASCADE,
|
||||||
correction_type TEXT NOT NULL,
|
correction_type TEXT NOT NULL,
|
||||||
suggested_value TEXT NOT NULL,
|
suggested_value TEXT NOT NULL,
|
||||||
status TEXT DEFAULT 'pending' NOT NULL,
|
status TEXT DEFAULT 'pending' NOT NULL,
|
||||||
@@ -378,9 +414,9 @@ CREATE INDEX IF NOT EXISTS idx_suggested_corrections_pending ON public.suggested
|
|||||||
-- 21. For prices submitted directly by users from in-store.
|
-- 21. For prices submitted directly by users from in-store.
|
||||||
CREATE TABLE IF NOT EXISTS public.user_submitted_prices (
|
CREATE TABLE IF NOT EXISTS public.user_submitted_prices (
|
||||||
user_submitted_price_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
user_submitted_price_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
||||||
user_id UUID NOT NULL REFERENCES public.users(user_id),
|
user_id UUID NOT NULL REFERENCES public.users(user_id) ON DELETE CASCADE,
|
||||||
master_item_id BIGINT NOT NULL REFERENCES public.master_grocery_items(master_grocery_item_id),
|
master_item_id BIGINT NOT NULL REFERENCES public.master_grocery_items(master_grocery_item_id) ON DELETE CASCADE,
|
||||||
store_id BIGINT NOT NULL REFERENCES public.stores(store_id),
|
store_id BIGINT NOT NULL REFERENCES public.stores(store_id) ON DELETE CASCADE,
|
||||||
price_in_cents INTEGER NOT NULL,
|
price_in_cents INTEGER NOT NULL,
|
||||||
photo_url TEXT,
|
photo_url TEXT,
|
||||||
upvotes INTEGER DEFAULT 0 NOT NULL,
|
upvotes INTEGER DEFAULT 0 NOT NULL,
|
||||||
@@ -408,38 +444,6 @@ COMMENT ON TABLE public.unmatched_flyer_items IS 'A queue for reviewing flyer it
|
|||||||
CREATE INDEX IF NOT EXISTS idx_unmatched_flyer_items_flyer_item_id ON public.unmatched_flyer_items(flyer_item_id);
|
CREATE INDEX IF NOT EXISTS idx_unmatched_flyer_items_flyer_item_id ON public.unmatched_flyer_items(flyer_item_id);
|
||||||
CREATE INDEX IF NOT EXISTS idx_unmatched_flyer_items_pending ON public.unmatched_flyer_items (created_at) WHERE status = 'pending';
|
CREATE INDEX IF NOT EXISTS idx_unmatched_flyer_items_pending ON public.unmatched_flyer_items (created_at) WHERE status = 'pending';
|
||||||
|
|
||||||
-- 23. Store brand information.
|
|
||||||
CREATE TABLE IF NOT EXISTS public.brands (
|
|
||||||
brand_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
|
||||||
name TEXT NOT NULL UNIQUE,
|
|
||||||
logo_url TEXT,
|
|
||||||
store_id BIGINT REFERENCES public.stores(store_id) ON DELETE SET NULL,
|
|
||||||
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
|
||||||
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL
|
|
||||||
);
|
|
||||||
COMMENT ON TABLE public.brands IS 'Stores brand names like "Coca-Cola", "Maple Leaf", or "Kraft".';
|
|
||||||
COMMENT ON COLUMN public.brands.store_id IS 'If this is a store-specific brand (e.g., President''s Choice), this links to the parent store.';
|
|
||||||
|
|
||||||
-- 24. For specific products, linking a master item with a brand and size.
|
|
||||||
CREATE TABLE IF NOT EXISTS public.products (
|
|
||||||
product_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
|
||||||
master_item_id BIGINT NOT NULL REFERENCES public.master_grocery_items(master_grocery_item_id),
|
|
||||||
brand_id BIGINT REFERENCES public.brands(brand_id),
|
|
||||||
name TEXT NOT NULL,
|
|
||||||
description TEXT,
|
|
||||||
size TEXT,
|
|
||||||
upc_code TEXT UNIQUE,
|
|
||||||
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
|
||||||
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL
|
|
||||||
);
|
|
||||||
COMMENT ON TABLE public.products IS 'Represents a specific, sellable product, combining a generic item with a brand and size.';
|
|
||||||
COMMENT ON COLUMN public.products.upc_code IS 'Universal Product Code, if available, for exact product matching.';
|
|
||||||
COMMENT ON COLUMN public.products.brand_id IS 'Can be null for generic/store-brand items.';
|
|
||||||
COMMENT ON COLUMN public.products.name IS 'Prime Raised without Antibiotics Chicken Breast.';
|
|
||||||
COMMENT ON COLUMN public.products.size IS 'e.g., "4L", "500g".';
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_products_master_item_id ON public.products(master_item_id);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_products_brand_id ON public.products(brand_id);
|
|
||||||
|
|
||||||
-- 25. Linking table for when one flyer is valid for multiple locations.
|
-- 25. Linking table for when one flyer is valid for multiple locations.
|
||||||
CREATE TABLE IF NOT EXISTS public.flyer_locations (
|
CREATE TABLE IF NOT EXISTS public.flyer_locations (
|
||||||
flyer_id BIGINT NOT NULL REFERENCES public.flyers(flyer_id) ON DELETE CASCADE,
|
flyer_id BIGINT NOT NULL REFERENCES public.flyers(flyer_id) ON DELETE CASCADE,
|
||||||
@@ -495,7 +499,7 @@ CREATE UNIQUE INDEX IF NOT EXISTS idx_recipes_unique_system_recipe_name ON publi
|
|||||||
CREATE TABLE IF NOT EXISTS public.recipe_ingredients (
|
CREATE TABLE IF NOT EXISTS public.recipe_ingredients (
|
||||||
recipe_ingredient_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
recipe_ingredient_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
||||||
recipe_id BIGINT NOT NULL REFERENCES public.recipes(recipe_id) ON DELETE CASCADE,
|
recipe_id BIGINT NOT NULL REFERENCES public.recipes(recipe_id) ON DELETE CASCADE,
|
||||||
master_item_id BIGINT NOT NULL REFERENCES public.master_grocery_items(master_grocery_item_id),
|
master_item_id BIGINT NOT NULL REFERENCES public.master_grocery_items(master_grocery_item_id) ON DELETE CASCADE,
|
||||||
quantity NUMERIC NOT NULL,
|
quantity NUMERIC NOT NULL,
|
||||||
unit TEXT NOT NULL,
|
unit TEXT NOT NULL,
|
||||||
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||||
@@ -779,7 +783,7 @@ CREATE INDEX IF NOT EXISTS idx_shopping_trips_shopping_list_id ON public.shoppin
|
|||||||
CREATE TABLE IF NOT EXISTS public.shopping_trip_items (
|
CREATE TABLE IF NOT EXISTS public.shopping_trip_items (
|
||||||
shopping_trip_item_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
shopping_trip_item_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
||||||
shopping_trip_id BIGINT NOT NULL REFERENCES public.shopping_trips(shopping_trip_id) ON DELETE CASCADE,
|
shopping_trip_id BIGINT NOT NULL REFERENCES public.shopping_trips(shopping_trip_id) ON DELETE CASCADE,
|
||||||
master_item_id BIGINT REFERENCES public.master_grocery_items(master_grocery_item_id),
|
master_item_id BIGINT REFERENCES public.master_grocery_items(master_grocery_item_id) ON DELETE SET NULL,
|
||||||
custom_item_name TEXT,
|
custom_item_name TEXT,
|
||||||
quantity NUMERIC NOT NULL,
|
quantity NUMERIC NOT NULL,
|
||||||
price_paid_cents INTEGER,
|
price_paid_cents INTEGER,
|
||||||
@@ -843,7 +847,7 @@ CREATE INDEX IF NOT EXISTS idx_user_follows_following_id ON public.user_follows(
|
|||||||
CREATE TABLE IF NOT EXISTS public.receipts (
|
CREATE TABLE IF NOT EXISTS public.receipts (
|
||||||
receipt_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
receipt_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
||||||
user_id UUID NOT NULL REFERENCES public.users(user_id) ON DELETE CASCADE,
|
user_id UUID NOT NULL REFERENCES public.users(user_id) ON DELETE CASCADE,
|
||||||
store_id BIGINT REFERENCES public.stores(store_id),
|
store_id BIGINT REFERENCES public.stores(store_id) ON DELETE CASCADE,
|
||||||
receipt_image_url TEXT NOT NULL,
|
receipt_image_url TEXT NOT NULL,
|
||||||
transaction_date TIMESTAMPTZ,
|
transaction_date TIMESTAMPTZ,
|
||||||
total_amount_cents INTEGER,
|
total_amount_cents INTEGER,
|
||||||
@@ -864,8 +868,8 @@ CREATE TABLE IF NOT EXISTS public.receipt_items (
|
|||||||
raw_item_description TEXT NOT NULL,
|
raw_item_description TEXT NOT NULL,
|
||||||
quantity NUMERIC DEFAULT 1 NOT NULL,
|
quantity NUMERIC DEFAULT 1 NOT NULL,
|
||||||
price_paid_cents INTEGER NOT NULL,
|
price_paid_cents INTEGER NOT NULL,
|
||||||
master_item_id BIGINT REFERENCES public.master_grocery_items(master_grocery_item_id),
|
master_item_id BIGINT REFERENCES public.master_grocery_items(master_grocery_item_id) ON DELETE SET NULL,
|
||||||
product_id BIGINT REFERENCES public.products(product_id),
|
product_id BIGINT REFERENCES public.products(product_id) ON DELETE SET NULL,
|
||||||
status TEXT DEFAULT 'unmatched' NOT NULL CHECK (status IN ('unmatched', 'matched', 'needs_review', 'ignored')),
|
status TEXT DEFAULT 'unmatched' NOT NULL CHECK (status IN ('unmatched', 'matched', 'needs_review', 'ignored')),
|
||||||
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||||
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL
|
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL
|
||||||
|
|||||||
@@ -127,10 +127,11 @@ CREATE TABLE IF NOT EXISTS public.flyers (
|
|||||||
image_url TEXT NOT NULL,
|
image_url TEXT NOT NULL,
|
||||||
icon_url TEXT,
|
icon_url TEXT,
|
||||||
checksum TEXT UNIQUE,
|
checksum TEXT UNIQUE,
|
||||||
store_id BIGINT REFERENCES public.stores(store_id),
|
store_id BIGINT REFERENCES public.stores(store_id) ON DELETE CASCADE,
|
||||||
valid_from DATE,
|
valid_from DATE,
|
||||||
valid_to DATE,
|
valid_to DATE,
|
||||||
store_address TEXT,
|
store_address TEXT,
|
||||||
|
status TEXT DEFAULT 'processed' NOT NULL CHECK (status IN ('processed', 'needs_review', 'archived')),
|
||||||
item_count INTEGER DEFAULT 0 NOT NULL,
|
item_count INTEGER DEFAULT 0 NOT NULL,
|
||||||
uploaded_by UUID REFERENCES public.users(user_id) ON DELETE SET NULL,
|
uploaded_by UUID REFERENCES public.users(user_id) ON DELETE SET NULL,
|
||||||
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||||
@@ -146,16 +147,18 @@ COMMENT ON COLUMN public.flyers.store_id IS 'Foreign key linking this flyer to a
|
|||||||
COMMENT ON COLUMN public.flyers.valid_from IS 'The start date of the sale period for this flyer, extracted by the AI.';
|
COMMENT ON COLUMN public.flyers.valid_from IS 'The start date of the sale period for this flyer, extracted by the AI.';
|
||||||
COMMENT ON COLUMN public.flyers.valid_to IS 'The end date of the sale period for this flyer, extracted by the AI.';
|
COMMENT ON COLUMN public.flyers.valid_to IS 'The end date of the sale period for this flyer, extracted by the AI.';
|
||||||
COMMENT ON COLUMN public.flyers.store_address IS 'The physical store address if it was successfully extracted from the flyer image.';
|
COMMENT ON COLUMN public.flyers.store_address IS 'The physical store address if it was successfully extracted from the flyer image.';
|
||||||
|
COMMENT ON COLUMN public.flyers.status IS 'The processing status of the flyer, e.g., if it needs manual review.';
|
||||||
COMMENT ON COLUMN public.flyers.item_count IS 'A cached count of the number of items in this flyer, maintained by a trigger.';
|
COMMENT ON COLUMN public.flyers.item_count IS 'A cached count of the number of items in this flyer, maintained by a trigger.';
|
||||||
COMMENT ON COLUMN public.flyers.uploaded_by IS 'The user who uploaded the flyer. Can be null for anonymous or system uploads.';
|
COMMENT ON COLUMN public.flyers.uploaded_by IS 'The user who uploaded the flyer. Can be null for anonymous or system uploads.';
|
||||||
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_flyers_created_at ON public.flyers (created_at DESC);
|
CREATE INDEX IF NOT EXISTS idx_flyers_created_at ON public.flyers (created_at DESC);
|
||||||
CREATE INDEX IF NOT EXISTS idx_flyers_valid_to_file_name ON public.flyers (valid_to DESC, file_name ASC);
|
CREATE INDEX IF NOT EXISTS idx_flyers_valid_to_file_name ON public.flyers (valid_to DESC, file_name ASC);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_flyers_status ON public.flyers(status);
|
||||||
-- 7. The 'master_grocery_items' table. This is the master dictionary.
|
-- 7. The 'master_grocery_items' table. This is the master dictionary.
|
||||||
CREATE TABLE IF NOT EXISTS public.master_grocery_items (
|
CREATE TABLE IF NOT EXISTS public.master_grocery_items (
|
||||||
master_grocery_item_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
master_grocery_item_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
||||||
name TEXT NOT NULL UNIQUE,
|
name TEXT NOT NULL UNIQUE,
|
||||||
category_id BIGINT REFERENCES public.categories(category_id),
|
category_id BIGINT REFERENCES public.categories(category_id) ON DELETE SET NULL,
|
||||||
is_allergen BOOLEAN DEFAULT false,
|
is_allergen BOOLEAN DEFAULT false,
|
||||||
allergy_info JSONB,
|
allergy_info JSONB,
|
||||||
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||||
@@ -178,6 +181,38 @@ CREATE TABLE IF NOT EXISTS public.user_watched_items (
|
|||||||
COMMENT ON TABLE public.user_watched_items IS 'A linking table that represents a user''s personal watchlist of grocery items.';
|
COMMENT ON TABLE public.user_watched_items IS 'A linking table that represents a user''s personal watchlist of grocery items.';
|
||||||
CREATE INDEX IF NOT EXISTS idx_user_watched_items_master_item_id ON public.user_watched_items(master_item_id);
|
CREATE INDEX IF NOT EXISTS idx_user_watched_items_master_item_id ON public.user_watched_items(master_item_id);
|
||||||
|
|
||||||
|
-- 23. Store brand information. (Moved up due to dependency in flyer_items)
|
||||||
|
CREATE TABLE IF NOT EXISTS public.brands (
|
||||||
|
brand_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
||||||
|
name TEXT NOT NULL UNIQUE,
|
||||||
|
logo_url TEXT,
|
||||||
|
store_id BIGINT REFERENCES public.stores(store_id) ON DELETE SET NULL,
|
||||||
|
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||||
|
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL
|
||||||
|
);
|
||||||
|
COMMENT ON TABLE public.brands IS 'Stores brand names like "Coca-Cola", "Maple Leaf", or "Kraft".';
|
||||||
|
COMMENT ON COLUMN public.brands.store_id IS 'If this is a store-specific brand (e.g., President''s Choice), this links to the parent store.';
|
||||||
|
|
||||||
|
-- 24. For specific products, linking a master item with a brand and size. (Moved up due to dependency in flyer_items)
|
||||||
|
CREATE TABLE IF NOT EXISTS public.products (
|
||||||
|
product_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
||||||
|
master_item_id BIGINT NOT NULL REFERENCES public.master_grocery_items(master_grocery_item_id) ON DELETE CASCADE,
|
||||||
|
brand_id BIGINT REFERENCES public.brands(brand_id) ON DELETE SET NULL,
|
||||||
|
name TEXT NOT NULL,
|
||||||
|
description TEXT,
|
||||||
|
size TEXT,
|
||||||
|
upc_code TEXT UNIQUE,
|
||||||
|
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||||
|
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL
|
||||||
|
);
|
||||||
|
COMMENT ON TABLE public.products IS 'Represents a specific, sellable product, combining a generic item with a brand and size.';
|
||||||
|
COMMENT ON COLUMN public.products.upc_code IS 'Universal Product Code, if available, for exact product matching.';
|
||||||
|
COMMENT ON COLUMN public.products.brand_id IS 'Can be null for generic/store-brand items.';
|
||||||
|
COMMENT ON COLUMN public.products.name IS 'Prime Raised without Antibiotics Chicken Breast.';
|
||||||
|
COMMENT ON COLUMN public.products.size IS 'e.g., "4L", "500g".';
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_products_master_item_id ON public.products(master_item_id);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_products_brand_id ON public.products(brand_id);
|
||||||
|
|
||||||
-- 9. The 'flyer_items' table. This stores individual items from flyers.
|
-- 9. The 'flyer_items' table. This stores individual items from flyers.
|
||||||
CREATE TABLE IF NOT EXISTS public.flyer_items (
|
CREATE TABLE IF NOT EXISTS public.flyer_items (
|
||||||
flyer_item_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
flyer_item_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
||||||
@@ -187,13 +222,13 @@ CREATE TABLE IF NOT EXISTS public.flyer_items (
|
|||||||
price_in_cents INTEGER,
|
price_in_cents INTEGER,
|
||||||
quantity_num NUMERIC,
|
quantity_num NUMERIC,
|
||||||
quantity TEXT NOT NULL,
|
quantity TEXT NOT NULL,
|
||||||
category_id BIGINT REFERENCES public.categories(category_id),
|
category_id BIGINT REFERENCES public.categories(category_id) ON DELETE SET NULL,
|
||||||
category_name TEXT,
|
category_name TEXT,
|
||||||
unit_price JSONB,
|
unit_price JSONB,
|
||||||
view_count INTEGER DEFAULT 0 NOT NULL,
|
view_count INTEGER DEFAULT 0 NOT NULL,
|
||||||
click_count INTEGER DEFAULT 0 NOT NULL,
|
click_count INTEGER DEFAULT 0 NOT NULL,
|
||||||
master_item_id BIGINT REFERENCES public.master_grocery_items(master_grocery_item_id),
|
master_item_id BIGINT REFERENCES public.master_grocery_items(master_grocery_item_id) ON DELETE SET NULL,
|
||||||
product_id BIGINT,
|
product_id BIGINT REFERENCES public.products(product_id) ON DELETE SET NULL,
|
||||||
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||||
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL
|
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL
|
||||||
);
|
);
|
||||||
@@ -310,7 +345,7 @@ CREATE INDEX IF NOT EXISTS idx_shopping_lists_user_id ON public.shopping_lists(u
|
|||||||
CREATE TABLE IF NOT EXISTS public.shopping_list_items (
|
CREATE TABLE IF NOT EXISTS public.shopping_list_items (
|
||||||
shopping_list_item_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
shopping_list_item_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
||||||
shopping_list_id BIGINT NOT NULL REFERENCES public.shopping_lists(shopping_list_id) ON DELETE CASCADE,
|
shopping_list_id BIGINT NOT NULL REFERENCES public.shopping_lists(shopping_list_id) ON DELETE CASCADE,
|
||||||
master_item_id BIGINT REFERENCES public.master_grocery_items(master_grocery_item_id),
|
master_item_id BIGINT REFERENCES public.master_grocery_items(master_grocery_item_id) ON DELETE CASCADE,
|
||||||
custom_item_name TEXT,
|
custom_item_name TEXT,
|
||||||
quantity NUMERIC DEFAULT 1 NOT NULL,
|
quantity NUMERIC DEFAULT 1 NOT NULL,
|
||||||
is_purchased BOOLEAN DEFAULT false NOT NULL,
|
is_purchased BOOLEAN DEFAULT false NOT NULL,
|
||||||
@@ -375,7 +410,7 @@ CREATE INDEX IF NOT EXISTS idx_shared_menu_plans_shared_with_user_id ON public.s
|
|||||||
CREATE TABLE IF NOT EXISTS public.suggested_corrections (
|
CREATE TABLE IF NOT EXISTS public.suggested_corrections (
|
||||||
suggested_correction_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
suggested_correction_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
||||||
flyer_item_id BIGINT NOT NULL REFERENCES public.flyer_items(flyer_item_id) ON DELETE CASCADE,
|
flyer_item_id BIGINT NOT NULL REFERENCES public.flyer_items(flyer_item_id) ON DELETE CASCADE,
|
||||||
user_id UUID NOT NULL REFERENCES public.users(user_id),
|
user_id UUID NOT NULL REFERENCES public.users(user_id) ON DELETE CASCADE,
|
||||||
correction_type TEXT NOT NULL,
|
correction_type TEXT NOT NULL,
|
||||||
suggested_value TEXT NOT NULL,
|
suggested_value TEXT NOT NULL,
|
||||||
status TEXT DEFAULT 'pending' NOT NULL,
|
status TEXT DEFAULT 'pending' NOT NULL,
|
||||||
@@ -395,9 +430,9 @@ CREATE INDEX IF NOT EXISTS idx_suggested_corrections_pending ON public.suggested
|
|||||||
-- 21. For prices submitted directly by users from in-store.
|
-- 21. For prices submitted directly by users from in-store.
|
||||||
CREATE TABLE IF NOT EXISTS public.user_submitted_prices (
|
CREATE TABLE IF NOT EXISTS public.user_submitted_prices (
|
||||||
user_submitted_price_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
user_submitted_price_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
||||||
user_id UUID NOT NULL REFERENCES public.users(user_id),
|
user_id UUID NOT NULL REFERENCES public.users(user_id) ON DELETE CASCADE,
|
||||||
master_item_id BIGINT NOT NULL REFERENCES public.master_grocery_items(master_grocery_item_id),
|
master_item_id BIGINT NOT NULL REFERENCES public.master_grocery_items(master_grocery_item_id) ON DELETE CASCADE,
|
||||||
store_id BIGINT NOT NULL REFERENCES public.stores(store_id),
|
store_id BIGINT NOT NULL REFERENCES public.stores(store_id) ON DELETE CASCADE,
|
||||||
price_in_cents INTEGER NOT NULL,
|
price_in_cents INTEGER NOT NULL,
|
||||||
photo_url TEXT,
|
photo_url TEXT,
|
||||||
upvotes INTEGER DEFAULT 0 NOT NULL,
|
upvotes INTEGER DEFAULT 0 NOT NULL,
|
||||||
@@ -424,38 +459,6 @@ COMMENT ON TABLE public.unmatched_flyer_items IS 'A queue for reviewing flyer it
|
|||||||
CREATE INDEX IF NOT EXISTS idx_unmatched_flyer_items_flyer_item_id ON public.unmatched_flyer_items(flyer_item_id);
|
CREATE INDEX IF NOT EXISTS idx_unmatched_flyer_items_flyer_item_id ON public.unmatched_flyer_items(flyer_item_id);
|
||||||
CREATE INDEX IF NOT EXISTS idx_unmatched_flyer_items_pending ON public.unmatched_flyer_items (created_at) WHERE status = 'pending';
|
CREATE INDEX IF NOT EXISTS idx_unmatched_flyer_items_pending ON public.unmatched_flyer_items (created_at) WHERE status = 'pending';
|
||||||
|
|
||||||
-- 23. Store brand information.
|
|
||||||
CREATE TABLE IF NOT EXISTS public.brands (
|
|
||||||
brand_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
|
||||||
name TEXT NOT NULL UNIQUE,
|
|
||||||
logo_url TEXT,
|
|
||||||
store_id BIGINT REFERENCES public.stores(store_id) ON DELETE SET NULL,
|
|
||||||
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
|
||||||
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL
|
|
||||||
);
|
|
||||||
COMMENT ON TABLE public.brands IS 'Stores brand names like "Coca-Cola", "Maple Leaf", or "Kraft".';
|
|
||||||
COMMENT ON COLUMN public.brands.store_id IS 'If this is a store-specific brand (e.g., President''s Choice), this links to the parent store.';
|
|
||||||
|
|
||||||
-- 24. For specific products, linking a master item with a brand and size.
|
|
||||||
CREATE TABLE IF NOT EXISTS public.products (
|
|
||||||
product_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
|
||||||
master_item_id BIGINT NOT NULL REFERENCES public.master_grocery_items(master_grocery_item_id),
|
|
||||||
brand_id BIGINT REFERENCES public.brands(brand_id),
|
|
||||||
name TEXT NOT NULL,
|
|
||||||
description TEXT,
|
|
||||||
size TEXT,
|
|
||||||
upc_code TEXT UNIQUE,
|
|
||||||
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
|
||||||
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL
|
|
||||||
);
|
|
||||||
COMMENT ON TABLE public.products IS 'Represents a specific, sellable product, combining a generic item with a brand and size.';
|
|
||||||
COMMENT ON COLUMN public.products.upc_code IS 'Universal Product Code, if available, for exact product matching.';
|
|
||||||
COMMENT ON COLUMN public.products.brand_id IS 'Can be null for generic/store-brand items.';
|
|
||||||
COMMENT ON COLUMN public.products.name IS 'Prime Raised without Antibiotics Chicken Breast.';
|
|
||||||
COMMENT ON COLUMN public.products.size IS 'e.g., "4L", "500g".';
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_products_master_item_id ON public.products(master_item_id);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_products_brand_id ON public.products(brand_id);
|
|
||||||
|
|
||||||
-- 25. Linking table for when one flyer is valid for multiple locations.
|
-- 25. Linking table for when one flyer is valid for multiple locations.
|
||||||
CREATE TABLE IF NOT EXISTS public.flyer_locations (
|
CREATE TABLE IF NOT EXISTS public.flyer_locations (
|
||||||
flyer_id BIGINT NOT NULL REFERENCES public.flyers(flyer_id) ON DELETE CASCADE,
|
flyer_id BIGINT NOT NULL REFERENCES public.flyers(flyer_id) ON DELETE CASCADE,
|
||||||
@@ -510,7 +513,7 @@ CREATE UNIQUE INDEX IF NOT EXISTS idx_recipes_unique_system_recipe_name ON publi
|
|||||||
CREATE TABLE IF NOT EXISTS public.recipe_ingredients (
|
CREATE TABLE IF NOT EXISTS public.recipe_ingredients (
|
||||||
recipe_ingredient_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
recipe_ingredient_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
||||||
recipe_id BIGINT NOT NULL REFERENCES public.recipes(recipe_id) ON DELETE CASCADE,
|
recipe_id BIGINT NOT NULL REFERENCES public.recipes(recipe_id) ON DELETE CASCADE,
|
||||||
master_item_id BIGINT NOT NULL REFERENCES public.master_grocery_items(master_grocery_item_id),
|
master_item_id BIGINT NOT NULL REFERENCES public.master_grocery_items(master_grocery_item_id) ON DELETE CASCADE,
|
||||||
quantity NUMERIC NOT NULL,
|
quantity NUMERIC NOT NULL,
|
||||||
unit TEXT NOT NULL,
|
unit TEXT NOT NULL,
|
||||||
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||||
@@ -796,7 +799,7 @@ CREATE INDEX IF NOT EXISTS idx_shopping_trips_shopping_list_id ON public.shoppin
|
|||||||
CREATE TABLE IF NOT EXISTS public.shopping_trip_items (
|
CREATE TABLE IF NOT EXISTS public.shopping_trip_items (
|
||||||
shopping_trip_item_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
shopping_trip_item_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
||||||
shopping_trip_id BIGINT NOT NULL REFERENCES public.shopping_trips(shopping_trip_id) ON DELETE CASCADE,
|
shopping_trip_id BIGINT NOT NULL REFERENCES public.shopping_trips(shopping_trip_id) ON DELETE CASCADE,
|
||||||
master_item_id BIGINT REFERENCES public.master_grocery_items(master_grocery_item_id),
|
master_item_id BIGINT REFERENCES public.master_grocery_items(master_grocery_item_id) ON DELETE SET NULL,
|
||||||
custom_item_name TEXT,
|
custom_item_name TEXT,
|
||||||
quantity NUMERIC NOT NULL,
|
quantity NUMERIC NOT NULL,
|
||||||
price_paid_cents INTEGER,
|
price_paid_cents INTEGER,
|
||||||
@@ -862,7 +865,7 @@ CREATE INDEX IF NOT EXISTS idx_user_follows_following_id ON public.user_follows(
|
|||||||
CREATE TABLE IF NOT EXISTS public.receipts (
|
CREATE TABLE IF NOT EXISTS public.receipts (
|
||||||
receipt_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
receipt_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
||||||
user_id UUID NOT NULL REFERENCES public.users(user_id) ON DELETE CASCADE,
|
user_id UUID NOT NULL REFERENCES public.users(user_id) ON DELETE CASCADE,
|
||||||
store_id BIGINT REFERENCES public.stores(store_id),
|
store_id BIGINT REFERENCES public.stores(store_id) ON DELETE CASCADE,
|
||||||
receipt_image_url TEXT NOT NULL,
|
receipt_image_url TEXT NOT NULL,
|
||||||
transaction_date TIMESTAMPTZ,
|
transaction_date TIMESTAMPTZ,
|
||||||
total_amount_cents INTEGER,
|
total_amount_cents INTEGER,
|
||||||
@@ -883,8 +886,8 @@ CREATE TABLE IF NOT EXISTS public.receipt_items (
|
|||||||
raw_item_description TEXT NOT NULL,
|
raw_item_description TEXT NOT NULL,
|
||||||
quantity NUMERIC DEFAULT 1 NOT NULL,
|
quantity NUMERIC DEFAULT 1 NOT NULL,
|
||||||
price_paid_cents INTEGER NOT NULL,
|
price_paid_cents INTEGER NOT NULL,
|
||||||
master_item_id BIGINT REFERENCES public.master_grocery_items(master_grocery_item_id),
|
master_item_id BIGINT REFERENCES public.master_grocery_items(master_grocery_item_id) ON DELETE SET NULL,
|
||||||
product_id BIGINT REFERENCES public.products(product_id),
|
product_id BIGINT REFERENCES public.products(product_id) ON DELETE SET NULL,
|
||||||
status TEXT DEFAULT 'unmatched' NOT NULL CHECK (status IN ('unmatched', 'matched', 'needs_review', 'ignored')),
|
status TEXT DEFAULT 'unmatched' NOT NULL CHECK (status IN ('unmatched', 'matched', 'needs_review', 'ignored')),
|
||||||
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||||
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL
|
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL
|
||||||
@@ -2128,11 +2131,61 @@ DROP FUNCTION IF EXISTS public.fork_recipe(UUID, BIGINT);
|
|||||||
|
|
||||||
CREATE OR REPLACE FUNCTION public.fork_recipe(p_user_id UUID, p_original_recipe_id BIGINT)
|
CREATE OR REPLACE FUNCTION public.fork_recipe(p_user_id UUID, p_original_recipe_id BIGINT)
|
||||||
RETURNS SETOF public.recipes
|
RETURNS SETOF public.recipes
|
||||||
LANGUAGE sql
|
LANGUAGE plpgsql
|
||||||
SECURITY INVOKER
|
SECURITY INVOKER
|
||||||
AS $$
|
AS $$
|
||||||
-- The entire forking logic is now encapsulated in a single, atomic database function.
|
DECLARE
|
||||||
SELECT * FROM public.fork_recipe(p_user_id, p_original_recipe_id);
|
new_recipe_id BIGINT;
|
||||||
|
BEGIN
|
||||||
|
-- 1. Create a copy of the recipe, linking it to the new user and the original recipe.
|
||||||
|
INSERT INTO public.recipes (
|
||||||
|
user_id,
|
||||||
|
original_recipe_id,
|
||||||
|
name,
|
||||||
|
description,
|
||||||
|
instructions,
|
||||||
|
prep_time_minutes,
|
||||||
|
cook_time_minutes,
|
||||||
|
servings,
|
||||||
|
photo_url,
|
||||||
|
calories_per_serving,
|
||||||
|
protein_grams,
|
||||||
|
fat_grams,
|
||||||
|
carb_grams,
|
||||||
|
status -- Forked recipes should be private by default
|
||||||
|
)
|
||||||
|
SELECT
|
||||||
|
p_user_id,
|
||||||
|
p_original_recipe_id,
|
||||||
|
original.name || ' (Fork)', -- Append '(Fork)' to distinguish it
|
||||||
|
original.description,
|
||||||
|
original.instructions,
|
||||||
|
original.prep_time_minutes,
|
||||||
|
original.cook_time_minutes,
|
||||||
|
original.servings,
|
||||||
|
original.photo_url,
|
||||||
|
original.calories_per_serving,
|
||||||
|
original.protein_grams,
|
||||||
|
original.fat_grams,
|
||||||
|
original.carb_grams,
|
||||||
|
'private'
|
||||||
|
FROM public.recipes AS original
|
||||||
|
WHERE original.recipe_id = p_original_recipe_id
|
||||||
|
RETURNING recipe_id INTO new_recipe_id;
|
||||||
|
|
||||||
|
-- If the original recipe didn't exist, new_recipe_id will be null.
|
||||||
|
IF new_recipe_id IS NULL THEN
|
||||||
|
RETURN;
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
-- 2. Copy all ingredients, tags, and appliances from the original recipe to the new one.
|
||||||
|
INSERT INTO public.recipe_ingredients (recipe_id, master_item_id, quantity, unit) SELECT new_recipe_id, master_item_id, quantity, unit FROM public.recipe_ingredients WHERE recipe_id = p_original_recipe_id;
|
||||||
|
INSERT INTO public.recipe_tags (recipe_id, tag_id) SELECT new_recipe_id, tag_id FROM public.recipe_tags WHERE recipe_id = p_original_recipe_id;
|
||||||
|
INSERT INTO public.recipe_appliances (recipe_id, appliance_id) SELECT new_recipe_id, appliance_id FROM public.recipe_appliances WHERE recipe_id = p_original_recipe_id;
|
||||||
|
|
||||||
|
-- 3. Return the newly created recipe record.
|
||||||
|
RETURN QUERY SELECT * FROM public.recipes WHERE recipe_id = new_recipe_id;
|
||||||
|
END;
|
||||||
$$;
|
$$;
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
362
src/App.test.tsx
362
src/App.test.tsx
@@ -20,6 +20,7 @@ import {
|
|||||||
mockUseUserData,
|
mockUseUserData,
|
||||||
mockUseFlyerItems,
|
mockUseFlyerItems,
|
||||||
} from './tests/setup/mockHooks';
|
} from './tests/setup/mockHooks';
|
||||||
|
import { useAppInitialization } from './hooks/useAppInitialization';
|
||||||
|
|
||||||
// Mock top-level components rendered by App's routes
|
// Mock top-level components rendered by App's routes
|
||||||
|
|
||||||
@@ -36,7 +37,7 @@ vi.mock('pdfjs-dist', () => ({
|
|||||||
// Mock the new config module
|
// Mock the new config module
|
||||||
vi.mock('./config', () => ({
|
vi.mock('./config', () => ({
|
||||||
default: {
|
default: {
|
||||||
app: { version: '1.0.0', commitMessage: 'Initial commit', commitUrl: '#' },
|
app: { version: '20250101-1200:abc1234:1.0.0', commitMessage: 'Initial commit', commitUrl: '#' },
|
||||||
google: { mapsEmbedApiKey: 'mock-key' },
|
google: { mapsEmbedApiKey: 'mock-key' },
|
||||||
},
|
},
|
||||||
}));
|
}));
|
||||||
@@ -52,6 +53,9 @@ vi.mock('./hooks/useFlyerItems', async () => {
|
|||||||
return { useFlyerItems: hooks.mockUseFlyerItems };
|
return { useFlyerItems: hooks.mockUseFlyerItems };
|
||||||
});
|
});
|
||||||
|
|
||||||
|
vi.mock('./hooks/useAppInitialization');
|
||||||
|
const mockedUseAppInitialization = vi.mocked(useAppInitialization);
|
||||||
|
|
||||||
vi.mock('./hooks/useAuth', async () => {
|
vi.mock('./hooks/useAuth', async () => {
|
||||||
const hooks = await import('./tests/setup/mockHooks');
|
const hooks = await import('./tests/setup/mockHooks');
|
||||||
return { useAuth: hooks.mockUseAuth };
|
return { useAuth: hooks.mockUseAuth };
|
||||||
@@ -122,7 +126,23 @@ vi.mock('./layouts/MainLayout', async () => {
|
|||||||
return { MainLayout: MockMainLayout };
|
return { MainLayout: MockMainLayout };
|
||||||
});
|
});
|
||||||
|
|
||||||
const mockedAiApiClient = vi.mocked(aiApiClient); // Mock aiApiClient
|
vi.mock('./components/AppGuard', async () => {
|
||||||
|
// We need to use the real useModal hook inside our mock AppGuard
|
||||||
|
const { useModal } = await vi.importActual<typeof import('./hooks/useModal')>('./hooks/useModal');
|
||||||
|
return {
|
||||||
|
AppGuard: ({ children }: { children: React.ReactNode }) => {
|
||||||
|
const { isModalOpen } = useModal();
|
||||||
|
return (
|
||||||
|
<div data-testid="app-guard-mock">
|
||||||
|
{children}
|
||||||
|
{isModalOpen('whatsNew') && <div data-testid="whats-new-modal-mock" />}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
},
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
const mockedAiApiClient = vi.mocked(aiApiClient);
|
||||||
const mockedApiClient = vi.mocked(apiClient);
|
const mockedApiClient = vi.mocked(apiClient);
|
||||||
|
|
||||||
const mockFlyers: Flyer[] = [
|
const mockFlyers: Flyer[] = [
|
||||||
@@ -131,33 +151,6 @@ const mockFlyers: Flyer[] = [
|
|||||||
];
|
];
|
||||||
|
|
||||||
describe('App Component', () => {
|
describe('App Component', () => {
|
||||||
// Mock localStorage
|
|
||||||
let storage: { [key: string]: string } = {};
|
|
||||||
const localStorageMock = {
|
|
||||||
getItem: vi.fn((key: string) => storage[key] || null),
|
|
||||||
setItem: vi.fn((key: string, value: string) => {
|
|
||||||
storage[key] = value;
|
|
||||||
}),
|
|
||||||
removeItem: vi.fn((key: string) => {
|
|
||||||
delete storage[key];
|
|
||||||
}),
|
|
||||||
clear: vi.fn(() => {
|
|
||||||
storage = {};
|
|
||||||
}),
|
|
||||||
};
|
|
||||||
|
|
||||||
// Mock matchMedia
|
|
||||||
const matchMediaMock = vi.fn().mockImplementation((query) => ({
|
|
||||||
matches: false, // Default to light mode
|
|
||||||
media: query,
|
|
||||||
onchange: null,
|
|
||||||
addListener: vi.fn(), // deprecated
|
|
||||||
removeListener: vi.fn(), // deprecated
|
|
||||||
addEventListener: vi.fn(),
|
|
||||||
removeEventListener: vi.fn(),
|
|
||||||
dispatchEvent: vi.fn(),
|
|
||||||
}));
|
|
||||||
|
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
console.log('[TEST DEBUG] beforeEach: Clearing mocks and setting up defaults');
|
console.log('[TEST DEBUG] beforeEach: Clearing mocks and setting up defaults');
|
||||||
vi.clearAllMocks();
|
vi.clearAllMocks();
|
||||||
@@ -205,11 +198,9 @@ describe('App Component', () => {
|
|||||||
mockUseFlyerItems.mockReturnValue({
|
mockUseFlyerItems.mockReturnValue({
|
||||||
flyerItems: [],
|
flyerItems: [],
|
||||||
isLoading: false,
|
isLoading: false,
|
||||||
|
error: null,
|
||||||
});
|
});
|
||||||
// Clear local storage to prevent state from leaking between tests.
|
mockedUseAppInitialization.mockReturnValue({ isDarkMode: false, unitSystem: 'imperial' });
|
||||||
localStorage.clear();
|
|
||||||
Object.defineProperty(window, 'localStorage', { value: localStorageMock, configurable: true });
|
|
||||||
Object.defineProperty(window, 'matchMedia', { value: matchMediaMock, configurable: true });
|
|
||||||
|
|
||||||
// Default mocks for API calls
|
// Default mocks for API calls
|
||||||
// Use mockImplementation to create a new Response object for each call,
|
// Use mockImplementation to create a new Response object for each call,
|
||||||
@@ -261,6 +252,7 @@ describe('App Component', () => {
|
|||||||
|
|
||||||
it('should render the main layout and header', async () => {
|
it('should render the main layout and header', async () => {
|
||||||
// Simulate the auth hook finishing its initial check
|
// Simulate the auth hook finishing its initial check
|
||||||
|
mockedUseAppInitialization.mockReturnValue({ isDarkMode: false, unitSystem: 'imperial' });
|
||||||
mockUseAuth.mockReturnValue({
|
mockUseAuth.mockReturnValue({
|
||||||
userProfile: null,
|
userProfile: null,
|
||||||
authStatus: 'SIGNED_OUT',
|
authStatus: 'SIGNED_OUT',
|
||||||
@@ -272,6 +264,7 @@ describe('App Component', () => {
|
|||||||
|
|
||||||
renderApp();
|
renderApp();
|
||||||
await waitFor(() => {
|
await waitFor(() => {
|
||||||
|
expect(screen.getByTestId('app-guard-mock')).toBeInTheDocument();
|
||||||
expect(screen.getByTestId('header-mock')).toBeInTheDocument();
|
expect(screen.getByTestId('header-mock')).toBeInTheDocument();
|
||||||
// Check that the main layout and home page are rendered for the root path
|
// Check that the main layout and home page are rendered for the root path
|
||||||
expect(screen.getByTestId('main-layout-mock')).toBeInTheDocument();
|
expect(screen.getByTestId('main-layout-mock')).toBeInTheDocument();
|
||||||
@@ -364,193 +357,6 @@ describe('App Component', () => {
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('Theme and Unit System Synchronization', () => {
|
|
||||||
it('should set dark mode based on user profile preferences', async () => {
|
|
||||||
console.log(
|
|
||||||
'[TEST DEBUG] Test Start: should set dark mode based on user profile preferences',
|
|
||||||
);
|
|
||||||
const profileWithDarkMode: UserProfile = createMockUserProfile({
|
|
||||||
user: createMockUser({ user_id: 'user-1', email: 'dark@mode.com' }),
|
|
||||||
role: 'user',
|
|
||||||
points: 0,
|
|
||||||
preferences: { darkMode: true },
|
|
||||||
});
|
|
||||||
mockUseAuth.mockReturnValue({
|
|
||||||
userProfile: profileWithDarkMode,
|
|
||||||
authStatus: 'AUTHENTICATED',
|
|
||||||
isLoading: false,
|
|
||||||
login: vi.fn(),
|
|
||||||
logout: vi.fn(),
|
|
||||||
updateProfile: vi.fn(),
|
|
||||||
});
|
|
||||||
|
|
||||||
console.log('[TEST DEBUG] Rendering App');
|
|
||||||
renderApp();
|
|
||||||
// The useEffect that sets the theme is asynchronous. We must wait for the update.
|
|
||||||
await waitFor(() => {
|
|
||||||
console.log(
|
|
||||||
'[TEST DEBUG] Checking for dark class. Current classes:',
|
|
||||||
document.documentElement.className,
|
|
||||||
);
|
|
||||||
expect(document.documentElement).toHaveClass('dark');
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should set light mode based on user profile preferences', async () => {
|
|
||||||
const profileWithLightMode: UserProfile = createMockUserProfile({
|
|
||||||
user: createMockUser({ user_id: 'user-1', email: 'light@mode.com' }),
|
|
||||||
role: 'user',
|
|
||||||
points: 0,
|
|
||||||
preferences: { darkMode: false },
|
|
||||||
});
|
|
||||||
mockUseAuth.mockReturnValue({
|
|
||||||
userProfile: profileWithLightMode,
|
|
||||||
authStatus: 'AUTHENTICATED',
|
|
||||||
isLoading: false,
|
|
||||||
login: vi.fn(),
|
|
||||||
logout: vi.fn(),
|
|
||||||
updateProfile: vi.fn(),
|
|
||||||
});
|
|
||||||
renderApp();
|
|
||||||
await waitFor(() => {
|
|
||||||
expect(document.documentElement).not.toHaveClass('dark');
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should set dark mode based on localStorage if profile has no preference', async () => {
|
|
||||||
localStorageMock.setItem('darkMode', 'true');
|
|
||||||
renderApp();
|
|
||||||
await waitFor(() => {
|
|
||||||
expect(document.documentElement).toHaveClass('dark');
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should set dark mode based on system preference if no other setting exists', async () => {
|
|
||||||
matchMediaMock.mockImplementationOnce((query) => ({ matches: true, media: query }));
|
|
||||||
renderApp();
|
|
||||||
await waitFor(() => {
|
|
||||||
expect(document.documentElement).toHaveClass('dark');
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should set unit system based on user profile preferences', async () => {
|
|
||||||
const profileWithMetric: UserProfile = createMockUserProfile({
|
|
||||||
user: createMockUser({ user_id: 'user-1', email: 'metric@user.com' }),
|
|
||||||
role: 'user',
|
|
||||||
points: 0,
|
|
||||||
preferences: { unitSystem: 'metric' },
|
|
||||||
});
|
|
||||||
mockUseAuth.mockReturnValue({
|
|
||||||
userProfile: profileWithMetric,
|
|
||||||
authStatus: 'AUTHENTICATED',
|
|
||||||
isLoading: false,
|
|
||||||
login: vi.fn(),
|
|
||||||
logout: vi.fn(),
|
|
||||||
updateProfile: vi.fn(),
|
|
||||||
});
|
|
||||||
|
|
||||||
renderApp();
|
|
||||||
// The unit system is passed as a prop to Header, which is mocked.
|
|
||||||
// We can't directly see the result in the DOM easily, so we trust the state is set.
|
|
||||||
// A more integrated test would be needed to verify the Header receives the prop.
|
|
||||||
// For now, this test ensures the useEffect logic runs without crashing.
|
|
||||||
await waitFor(() => {
|
|
||||||
expect(screen.getByTestId('header-mock')).toBeInTheDocument();
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('OAuth Token Handling', () => {
|
|
||||||
it('should call login when a googleAuthToken is in the URL', async () => {
|
|
||||||
console.log(
|
|
||||||
'[TEST DEBUG] Test Start: should call login when a googleAuthToken is in the URL',
|
|
||||||
);
|
|
||||||
const mockLogin = vi.fn().mockResolvedValue(undefined);
|
|
||||||
mockUseAuth.mockReturnValue({
|
|
||||||
userProfile: null,
|
|
||||||
authStatus: 'SIGNED_OUT',
|
|
||||||
isLoading: false,
|
|
||||||
login: mockLogin,
|
|
||||||
logout: vi.fn(),
|
|
||||||
updateProfile: vi.fn(),
|
|
||||||
});
|
|
||||||
|
|
||||||
console.log('[TEST DEBUG] Rendering App with googleAuthToken');
|
|
||||||
renderApp(['/?googleAuthToken=test-google-token']);
|
|
||||||
|
|
||||||
await waitFor(() => {
|
|
||||||
console.log('[TEST DEBUG] Checking mockLogin calls:', mockLogin.mock.calls);
|
|
||||||
expect(mockLogin).toHaveBeenCalledWith('test-google-token');
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should call login when a githubAuthToken is in the URL', async () => {
|
|
||||||
console.log(
|
|
||||||
'[TEST DEBUG] Test Start: should call login when a githubAuthToken is in the URL',
|
|
||||||
);
|
|
||||||
const mockLogin = vi.fn().mockResolvedValue(undefined);
|
|
||||||
mockUseAuth.mockReturnValue({
|
|
||||||
userProfile: null,
|
|
||||||
authStatus: 'SIGNED_OUT',
|
|
||||||
isLoading: false,
|
|
||||||
login: mockLogin,
|
|
||||||
logout: vi.fn(),
|
|
||||||
updateProfile: vi.fn(),
|
|
||||||
});
|
|
||||||
|
|
||||||
console.log('[TEST DEBUG] Rendering App with githubAuthToken');
|
|
||||||
renderApp(['/?githubAuthToken=test-github-token']);
|
|
||||||
|
|
||||||
await waitFor(() => {
|
|
||||||
console.log('[TEST DEBUG] Checking mockLogin calls:', mockLogin.mock.calls);
|
|
||||||
expect(mockLogin).toHaveBeenCalledWith('test-github-token');
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should log an error if login with a GitHub token fails', async () => {
|
|
||||||
console.log(
|
|
||||||
'[TEST DEBUG] Test Start: should log an error if login with a GitHub token fails',
|
|
||||||
);
|
|
||||||
const mockLogin = vi.fn().mockRejectedValue(new Error('GitHub login failed'));
|
|
||||||
mockUseAuth.mockReturnValue({
|
|
||||||
userProfile: null,
|
|
||||||
authStatus: 'SIGNED_OUT',
|
|
||||||
isLoading: false,
|
|
||||||
login: mockLogin,
|
|
||||||
logout: vi.fn(),
|
|
||||||
updateProfile: vi.fn(),
|
|
||||||
});
|
|
||||||
|
|
||||||
console.log('[TEST DEBUG] Rendering App with githubAuthToken');
|
|
||||||
renderApp(['/?githubAuthToken=bad-token']);
|
|
||||||
|
|
||||||
await waitFor(() => {
|
|
||||||
console.log('[TEST DEBUG] Checking mockLogin calls:', mockLogin.mock.calls);
|
|
||||||
expect(mockLogin).toHaveBeenCalled();
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should log an error if login with a token fails', async () => {
|
|
||||||
console.log('[TEST DEBUG] Test Start: should log an error if login with a token fails');
|
|
||||||
const mockLogin = vi.fn().mockRejectedValue(new Error('Token login failed'));
|
|
||||||
mockUseAuth.mockReturnValue({
|
|
||||||
userProfile: null,
|
|
||||||
authStatus: 'SIGNED_OUT',
|
|
||||||
isLoading: false,
|
|
||||||
login: mockLogin,
|
|
||||||
logout: vi.fn(),
|
|
||||||
updateProfile: vi.fn(),
|
|
||||||
});
|
|
||||||
|
|
||||||
console.log('[TEST DEBUG] Rendering App with googleAuthToken');
|
|
||||||
renderApp(['/?googleAuthToken=bad-token']);
|
|
||||||
await waitFor(() => {
|
|
||||||
console.log('[TEST DEBUG] Checking mockLogin calls:', mockLogin.mock.calls);
|
|
||||||
expect(mockLogin).toHaveBeenCalled();
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('Flyer Selection from URL', () => {
|
describe('Flyer Selection from URL', () => {
|
||||||
it('should select a flyer when flyerId is present in the URL', async () => {
|
it('should select a flyer when flyerId is present in the URL', async () => {
|
||||||
renderApp(['/flyers/2']);
|
renderApp(['/flyers/2']);
|
||||||
@@ -583,23 +389,9 @@ describe('App Component', () => {
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('Version and "What\'s New" Modal', () => {
|
|
||||||
it('should show the "What\'s New" modal if the app version is new', async () => {
|
|
||||||
// Mock the config module for this specific test
|
|
||||||
vi.mock('./config', () => ({
|
|
||||||
default: {
|
|
||||||
app: { version: '1.0.1', commitMessage: 'New feature!', commitUrl: '#' },
|
|
||||||
google: { mapsEmbedApiKey: 'mock-key' },
|
|
||||||
},
|
|
||||||
}));
|
|
||||||
localStorageMock.setItem('lastSeenVersion', '1.0.0');
|
|
||||||
renderApp();
|
|
||||||
await expect(screen.findByTestId('whats-new-modal-mock')).resolves.toBeInTheDocument();
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('Modal Interactions', () => {
|
describe('Modal Interactions', () => {
|
||||||
it('should open and close the ProfileManager modal', async () => {
|
it('should open and close the ProfileManager modal', async () => {
|
||||||
|
console.log('[TEST DEBUG] Test Start: should open and close the ProfileManager modal');
|
||||||
renderApp();
|
renderApp();
|
||||||
expect(screen.queryByTestId('profile-manager-mock')).not.toBeInTheDocument();
|
expect(screen.queryByTestId('profile-manager-mock')).not.toBeInTheDocument();
|
||||||
|
|
||||||
@@ -607,11 +399,13 @@ describe('App Component', () => {
|
|||||||
fireEvent.click(screen.getByText('Open Profile'));
|
fireEvent.click(screen.getByText('Open Profile'));
|
||||||
expect(await screen.findByTestId('profile-manager-mock')).toBeInTheDocument();
|
expect(await screen.findByTestId('profile-manager-mock')).toBeInTheDocument();
|
||||||
|
|
||||||
|
console.log('[TEST DEBUG] ProfileManager modal opened. Now closing...');
|
||||||
// Close modal
|
// Close modal
|
||||||
fireEvent.click(screen.getByText('Close Profile'));
|
fireEvent.click(screen.getByText('Close Profile'));
|
||||||
await waitFor(() => {
|
await waitFor(() => {
|
||||||
expect(screen.queryByTestId('profile-manager-mock')).not.toBeInTheDocument();
|
expect(screen.queryByTestId('profile-manager-mock')).not.toBeInTheDocument();
|
||||||
});
|
});
|
||||||
|
console.log('[TEST DEBUG] ProfileManager modal closed.');
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should open and close the VoiceAssistant modal for authenticated users', async () => {
|
it('should open and close the VoiceAssistant modal for authenticated users', async () => {
|
||||||
@@ -636,7 +430,7 @@ describe('App Component', () => {
|
|||||||
fireEvent.click(screen.getByText('Open Voice Assistant'));
|
fireEvent.click(screen.getByText('Open Voice Assistant'));
|
||||||
|
|
||||||
console.log('[TEST DEBUG] Waiting for voice-assistant-mock');
|
console.log('[TEST DEBUG] Waiting for voice-assistant-mock');
|
||||||
expect(await screen.findByTestId('voice-assistant-mock')).toBeInTheDocument();
|
expect(await screen.findByTestId('voice-assistant-mock', {}, { timeout: 3000 })).toBeInTheDocument();
|
||||||
|
|
||||||
// Close modal
|
// Close modal
|
||||||
fireEvent.click(screen.getByText('Close Voice Assistant'));
|
fireEvent.click(screen.getByText('Close Voice Assistant'));
|
||||||
@@ -735,64 +529,6 @@ describe('App Component', () => {
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe("Version Display and What's New", () => {
|
|
||||||
beforeEach(() => {
|
|
||||||
// Also mock the config module to reflect this change
|
|
||||||
vi.mock('./config', () => ({
|
|
||||||
default: {
|
|
||||||
app: {
|
|
||||||
version: '2.0.0',
|
|
||||||
commitMessage: 'A new version!',
|
|
||||||
commitUrl: 'http://example.com/commit/2.0.0',
|
|
||||||
},
|
|
||||||
google: { mapsEmbedApiKey: 'mock-key' },
|
|
||||||
},
|
|
||||||
}));
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should display the version number and commit link', () => {
|
|
||||||
renderApp();
|
|
||||||
const versionLink = screen.getByText(`Version: 2.0.0`);
|
|
||||||
expect(versionLink).toBeInTheDocument();
|
|
||||||
expect(versionLink).toHaveAttribute('href', 'http://example.com/commit/2.0.0');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should open the "What\'s New" modal when the question mark icon is clicked', async () => {
|
|
||||||
// Pre-set the localStorage to prevent the modal from opening automatically
|
|
||||||
localStorageMock.setItem('lastSeenVersion', '2.0.0');
|
|
||||||
|
|
||||||
renderApp();
|
|
||||||
expect(screen.queryByTestId('whats-new-modal-mock')).not.toBeInTheDocument();
|
|
||||||
|
|
||||||
const openButton = await screen.findByTitle("Show what's new in this version");
|
|
||||||
fireEvent.click(openButton);
|
|
||||||
|
|
||||||
expect(await screen.findByTestId('whats-new-modal-mock')).toBeInTheDocument();
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('Dynamic Toaster Styles', () => {
|
|
||||||
it('should render the correct CSS variables for toast styling in light mode', async () => {
|
|
||||||
renderApp();
|
|
||||||
await waitFor(() => {
|
|
||||||
const styleTag = document.querySelector('style');
|
|
||||||
expect(styleTag).not.toBeNull();
|
|
||||||
expect(styleTag!.innerHTML).toContain('--toast-bg: #FFFFFF');
|
|
||||||
expect(styleTag!.innerHTML).toContain('--toast-color: #1F2937');
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should render the correct CSS variables for toast styling in dark mode', async () => {
|
|
||||||
localStorageMock.setItem('darkMode', 'true');
|
|
||||||
renderApp();
|
|
||||||
await waitFor(() => {
|
|
||||||
const styleTag = document.querySelector('style');
|
|
||||||
expect(styleTag).not.toBeNull();
|
|
||||||
expect(styleTag!.innerHTML).toContain('--toast-bg: #4B5563');
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('Profile and Login Handlers', () => {
|
describe('Profile and Login Handlers', () => {
|
||||||
it('should call updateProfile when handleProfileUpdate is triggered', async () => {
|
it('should call updateProfile when handleProfileUpdate is triggered', async () => {
|
||||||
console.log(
|
console.log(
|
||||||
@@ -841,12 +577,19 @@ describe('App Component', () => {
|
|||||||
logout: vi.fn(),
|
logout: vi.fn(),
|
||||||
updateProfile: vi.fn(),
|
updateProfile: vi.fn(),
|
||||||
});
|
});
|
||||||
|
// Mock the login function to simulate a successful login. Signature: (token, profile)
|
||||||
|
const mockLoginSuccess = vi.fn(async (_token: string, _profile?: UserProfile) => {
|
||||||
|
// Simulate fetching profile after login
|
||||||
|
const profileResponse = await mockedApiClient.getAuthenticatedUserProfile();
|
||||||
|
const userProfileData: UserProfile = await profileResponse.json();
|
||||||
|
mockUseAuth.mockReturnValue({ ...mockUseAuth(), userProfile: userProfileData, authStatus: 'AUTHENTICATED' });
|
||||||
|
});
|
||||||
|
|
||||||
console.log('[TEST DEBUG] Rendering App');
|
console.log('[TEST DEBUG] Rendering App');
|
||||||
renderApp();
|
renderApp();
|
||||||
console.log('[TEST DEBUG] Opening Profile');
|
console.log('[TEST DEBUG] Opening Profile');
|
||||||
fireEvent.click(screen.getByText('Open Profile'));
|
fireEvent.click(screen.getByText('Open Profile'));
|
||||||
const loginButton = await screen.findByText('Login');
|
const loginButton = await screen.findByRole('button', { name: 'Login' });
|
||||||
console.log('[TEST DEBUG] Clicking Login');
|
console.log('[TEST DEBUG] Clicking Login');
|
||||||
fireEvent.click(loginButton);
|
fireEvent.click(loginButton);
|
||||||
|
|
||||||
@@ -857,4 +600,33 @@ describe('App Component', () => {
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
describe("Version Display and What's New", () => {
|
||||||
|
beforeEach(() => {
|
||||||
|
vi.mock('./config', () => ({
|
||||||
|
default: {
|
||||||
|
app: {
|
||||||
|
version: '2.0.0',
|
||||||
|
commitMessage: 'A new version!',
|
||||||
|
commitUrl: 'http://example.com/commit/2.0.0',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}));
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should display the version number and commit link', () => {
|
||||||
|
renderApp();
|
||||||
|
const versionLink = screen.getByText(`Version: 2.0.0`);
|
||||||
|
expect(versionLink).toBeInTheDocument();
|
||||||
|
expect(versionLink).toHaveAttribute('href', 'http://example.com/commit/2.0.0');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should open the "What\'s New" modal when the question mark icon is clicked', async () => {
|
||||||
|
renderApp();
|
||||||
|
const openButton = await screen.findByTitle("Show what's new in this version");
|
||||||
|
fireEvent.click(openButton);
|
||||||
|
// The mock AppGuard now renders the modal when it's open
|
||||||
|
expect(await screen.findByTestId('whats-new-modal-mock')).toBeInTheDocument();
|
||||||
|
});
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
132
src/App.tsx
132
src/App.tsx
@@ -1,9 +1,9 @@
|
|||||||
// src/App.tsx
|
// src/App.tsx
|
||||||
import React, { useState, useCallback, useEffect } from 'react';
|
import React, { useState, useCallback, useEffect } from 'react';
|
||||||
import { Routes, Route, useParams, useLocation, useNavigate } from 'react-router-dom';
|
import { Routes, Route, useParams } from 'react-router-dom';
|
||||||
import { Toaster } from 'react-hot-toast';
|
import { QueryClient, QueryClientProvider } from '@tanstack/react-query';
|
||||||
import * as pdfjsLib from 'pdfjs-dist';
|
import * as pdfjsLib from 'pdfjs-dist';
|
||||||
import { Footer } from './components/Footer'; // Assuming this is where your Footer component will live
|
import { Footer } from './components/Footer';
|
||||||
import { Header } from './components/Header';
|
import { Header } from './components/Header';
|
||||||
import { logger } from './services/logger.client';
|
import { logger } from './services/logger.client';
|
||||||
import type { Flyer, Profile, UserProfile } from './types';
|
import type { Flyer, Profile, UserProfile } from './types';
|
||||||
@@ -13,18 +13,20 @@ import { AdminPage } from './pages/admin/AdminPage';
|
|||||||
import { AdminRoute } from './components/AdminRoute';
|
import { AdminRoute } from './components/AdminRoute';
|
||||||
import { CorrectionsPage } from './pages/admin/CorrectionsPage';
|
import { CorrectionsPage } from './pages/admin/CorrectionsPage';
|
||||||
import { AdminStatsPage } from './pages/admin/AdminStatsPage';
|
import { AdminStatsPage } from './pages/admin/AdminStatsPage';
|
||||||
|
import { FlyerReviewPage } from './pages/admin/FlyerReviewPage';
|
||||||
import { ResetPasswordPage } from './pages/ResetPasswordPage';
|
import { ResetPasswordPage } from './pages/ResetPasswordPage';
|
||||||
import { VoiceLabPage } from './pages/VoiceLabPage';
|
import { VoiceLabPage } from './pages/VoiceLabPage';
|
||||||
import { WhatsNewModal } from './components/WhatsNewModal';
|
|
||||||
import { FlyerCorrectionTool } from './components/FlyerCorrectionTool';
|
import { FlyerCorrectionTool } from './components/FlyerCorrectionTool';
|
||||||
import { QuestionMarkCircleIcon } from './components/icons/QuestionMarkCircleIcon';
|
import { QuestionMarkCircleIcon } from './components/icons/QuestionMarkCircleIcon';
|
||||||
import { useAuth } from './hooks/useAuth';
|
import { useAuth } from './hooks/useAuth';
|
||||||
import { useFlyers } from './hooks/useFlyers'; // Assuming useFlyers fetches all flyers
|
import { useFlyers } from './hooks/useFlyers';
|
||||||
import { useFlyerItems } from './hooks/useFlyerItems'; // Import the new hook for flyer items
|
import { useFlyerItems } from './hooks/useFlyerItems';
|
||||||
import { useModal } from './hooks/useModal';
|
import { useModal } from './hooks/useModal';
|
||||||
import { MainLayout } from './layouts/MainLayout';
|
import { MainLayout } from './layouts/MainLayout';
|
||||||
import config from './config';
|
import config from './config';
|
||||||
import { HomePage } from './pages/HomePage';
|
import { HomePage } from './pages/HomePage';
|
||||||
|
import { AppGuard } from './components/AppGuard';
|
||||||
|
import { useAppInitialization } from './hooks/useAppInitialization';
|
||||||
|
|
||||||
// pdf.js worker configuration
|
// pdf.js worker configuration
|
||||||
// This is crucial for allowing pdf.js to process PDFs in a separate thread, preventing the UI from freezing.
|
// This is crucial for allowing pdf.js to process PDFs in a separate thread, preventing the UI from freezing.
|
||||||
@@ -35,15 +37,20 @@ pdfjsLib.GlobalWorkerOptions.workerSrc = new URL(
|
|||||||
import.meta.url,
|
import.meta.url,
|
||||||
).toString();
|
).toString();
|
||||||
|
|
||||||
|
// Create a client
|
||||||
|
const queryClient = new QueryClient();
|
||||||
|
|
||||||
function App() {
|
function App() {
|
||||||
const { userProfile, authStatus, login, logout, updateProfile } = useAuth();
|
const { userProfile, authStatus, login, logout, updateProfile } = useAuth();
|
||||||
const { flyers } = useFlyers();
|
const { flyers } = useFlyers();
|
||||||
const [selectedFlyer, setSelectedFlyer] = useState<Flyer | null>(null);
|
const [selectedFlyer, setSelectedFlyer] = useState<Flyer | null>(null);
|
||||||
const { openModal, closeModal, isModalOpen } = useModal();
|
const { openModal, closeModal, isModalOpen } = useModal();
|
||||||
const location = useLocation();
|
|
||||||
const navigate = useNavigate();
|
|
||||||
const params = useParams<{ flyerId?: string }>();
|
const params = useParams<{ flyerId?: string }>();
|
||||||
|
|
||||||
|
// This hook now handles initialization effects (OAuth, version check, theme)
|
||||||
|
// and returns the theme/unit state needed by other components.
|
||||||
|
const { isDarkMode, unitSystem } = useAppInitialization();
|
||||||
|
|
||||||
// Debugging: Log renders to identify infinite loops
|
// Debugging: Log renders to identify infinite loops
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (process.env.NODE_ENV === 'test') {
|
if (process.env.NODE_ENV === 'test') {
|
||||||
@@ -53,14 +60,11 @@ function App() {
|
|||||||
paramsFlyerId: params?.flyerId, // This was a duplicate, fixed.
|
paramsFlyerId: params?.flyerId, // This was a duplicate, fixed.
|
||||||
authStatus,
|
authStatus,
|
||||||
profileId: userProfile?.user.user_id,
|
profileId: userProfile?.user.user_id,
|
||||||
locationSearch: location.search,
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
const [isDarkMode, setIsDarkMode] = useState(false);
|
|
||||||
const { flyerItems } = useFlyerItems(selectedFlyer);
|
const { flyerItems } = useFlyerItems(selectedFlyer);
|
||||||
const [unitSystem, setUnitSystem] = useState<'metric' | 'imperial'>('imperial');
|
|
||||||
|
|
||||||
// Define modal handlers with useCallback at the top level to avoid Rules of Hooks violations
|
// Define modal handlers with useCallback at the top level to avoid Rules of Hooks violations
|
||||||
const handleOpenProfile = useCallback(() => openModal('profile'), [openModal]);
|
const handleOpenProfile = useCallback(() => openModal('profile'), [openModal]);
|
||||||
@@ -105,37 +109,6 @@ function App() {
|
|||||||
|
|
||||||
// --- State Synchronization and Error Handling ---
|
// --- State Synchronization and Error Handling ---
|
||||||
|
|
||||||
// Effect to set initial theme based on user profile, local storage, or system preference
|
|
||||||
useEffect(() => {
|
|
||||||
if (process.env.NODE_ENV === 'test')
|
|
||||||
console.log('[App] Effect: Theme Update', { profileId: userProfile?.user.user_id });
|
|
||||||
if (userProfile && userProfile.preferences?.darkMode !== undefined) {
|
|
||||||
// Preference from DB
|
|
||||||
const dbDarkMode = userProfile.preferences.darkMode;
|
|
||||||
setIsDarkMode(dbDarkMode);
|
|
||||||
document.documentElement.classList.toggle('dark', dbDarkMode);
|
|
||||||
} else {
|
|
||||||
// Fallback to local storage or system preference
|
|
||||||
const savedMode = localStorage.getItem('darkMode');
|
|
||||||
const prefersDark = window.matchMedia('(prefers-color-scheme: dark)').matches;
|
|
||||||
const initialDarkMode = savedMode !== null ? savedMode === 'true' : prefersDark;
|
|
||||||
setIsDarkMode(initialDarkMode);
|
|
||||||
document.documentElement.classList.toggle('dark', initialDarkMode);
|
|
||||||
}
|
|
||||||
}, [userProfile?.preferences?.darkMode, userProfile?.user.user_id]);
|
|
||||||
|
|
||||||
// Effect to set initial unit system based on user profile or local storage
|
|
||||||
useEffect(() => {
|
|
||||||
if (userProfile && userProfile.preferences?.unitSystem) {
|
|
||||||
setUnitSystem(userProfile.preferences.unitSystem);
|
|
||||||
} else {
|
|
||||||
const savedSystem = localStorage.getItem('unitSystem') as 'metric' | 'imperial' | null;
|
|
||||||
if (savedSystem) {
|
|
||||||
setUnitSystem(savedSystem);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}, [userProfile?.preferences?.unitSystem, userProfile?.user.user_id]);
|
|
||||||
|
|
||||||
// This is the login handler that will be passed to the ProfileManager component.
|
// This is the login handler that will be passed to the ProfileManager component.
|
||||||
const handleLoginSuccess = useCallback(
|
const handleLoginSuccess = useCallback(
|
||||||
async (userProfile: UserProfile, token: string, _rememberMe: boolean) => {
|
async (userProfile: UserProfile, token: string, _rememberMe: boolean) => {
|
||||||
@@ -153,36 +126,6 @@ function App() {
|
|||||||
[login],
|
[login],
|
||||||
);
|
);
|
||||||
|
|
||||||
// Effect to handle the token from Google OAuth redirect
|
|
||||||
useEffect(() => {
|
|
||||||
const urlParams = new URLSearchParams(location.search);
|
|
||||||
const googleToken = urlParams.get('googleAuthToken');
|
|
||||||
|
|
||||||
if (googleToken) {
|
|
||||||
logger.info('Received Google Auth token from URL. Authenticating...');
|
|
||||||
// The login flow is now handled by the useAuth hook. We just need to trigger it.
|
|
||||||
// We pass only the token; the AuthProvider will fetch the user profile.
|
|
||||||
login(googleToken).catch((err) =>
|
|
||||||
logger.error('Failed to log in with Google token', { error: err }),
|
|
||||||
);
|
|
||||||
// Clean the token from the URL
|
|
||||||
navigate(location.pathname, { replace: true });
|
|
||||||
}
|
|
||||||
|
|
||||||
const githubToken = urlParams.get('githubAuthToken');
|
|
||||||
if (githubToken) {
|
|
||||||
logger.info('Received GitHub Auth token from URL. Authenticating...');
|
|
||||||
login(githubToken).catch((err) => {
|
|
||||||
logger.error('Failed to log in with GitHub token', { error: err });
|
|
||||||
// Optionally, redirect to a page with an error message
|
|
||||||
// navigate('/login?error=github_auth_failed');
|
|
||||||
});
|
|
||||||
|
|
||||||
// Clean the token from the URL
|
|
||||||
navigate(location.pathname, { replace: true });
|
|
||||||
}
|
|
||||||
}, [login, location.search, navigate, location.pathname]);
|
|
||||||
|
|
||||||
const handleFlyerSelect = useCallback(async (flyer: Flyer) => {
|
const handleFlyerSelect = useCallback(async (flyer: Flyer) => {
|
||||||
setSelectedFlyer(flyer);
|
setSelectedFlyer(flyer);
|
||||||
}, []);
|
}, []);
|
||||||
@@ -210,31 +153,10 @@ function App() {
|
|||||||
// Read the application version injected at build time.
|
// Read the application version injected at build time.
|
||||||
// This will only be available in the production build, not during local development.
|
// This will only be available in the production build, not during local development.
|
||||||
const appVersion = config.app.version;
|
const appVersion = config.app.version;
|
||||||
const commitMessage = config.app.commitMessage;
|
|
||||||
useEffect(() => {
|
|
||||||
if (appVersion) {
|
|
||||||
logger.info(`Application version: ${appVersion}`);
|
|
||||||
const lastSeenVersion = localStorage.getItem('lastSeenVersion');
|
|
||||||
// If the current version is new, show the "What's New" modal.
|
|
||||||
if (appVersion !== lastSeenVersion) {
|
|
||||||
openModal('whatsNew');
|
|
||||||
localStorage.setItem('lastSeenVersion', appVersion);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}, [appVersion]);
|
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="bg-gray-100 dark:bg-gray-950 min-h-screen font-sans text-gray-800 dark:text-gray-200">
|
// AppGuard now handles the main page wrapper, theme styles, and "What's New" modal
|
||||||
{/* Toaster component for displaying notifications. It's placed at the top level. */}
|
<AppGuard>
|
||||||
<Toaster position="top-center" reverseOrder={false} />
|
|
||||||
{/* Add CSS variables for toast theming based on dark mode */}
|
|
||||||
<style>{`
|
|
||||||
:root {
|
|
||||||
--toast-bg: ${isDarkMode ? '#4B5563' : '#FFFFFF'};
|
|
||||||
--toast-color: ${isDarkMode ? '#F9FAFB' : '#1F2937'};
|
|
||||||
}
|
|
||||||
`}</style>
|
|
||||||
|
|
||||||
<Header
|
<Header
|
||||||
isDarkMode={isDarkMode}
|
isDarkMode={isDarkMode}
|
||||||
unitSystem={unitSystem}
|
unitSystem={unitSystem}
|
||||||
@@ -261,15 +183,6 @@ function App() {
|
|||||||
/>
|
/>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
{appVersion && commitMessage && (
|
|
||||||
<WhatsNewModal
|
|
||||||
isOpen={isModalOpen('whatsNew')}
|
|
||||||
onClose={handleCloseWhatsNew}
|
|
||||||
version={appVersion}
|
|
||||||
commitMessage={commitMessage}
|
|
||||||
/>
|
|
||||||
)}
|
|
||||||
|
|
||||||
{selectedFlyer && (
|
{selectedFlyer && (
|
||||||
<FlyerCorrectionTool
|
<FlyerCorrectionTool
|
||||||
isOpen={isModalOpen('correctionTool')}
|
isOpen={isModalOpen('correctionTool')}
|
||||||
@@ -316,6 +229,7 @@ function App() {
|
|||||||
<Route path="/admin" element={<AdminPage />} />
|
<Route path="/admin" element={<AdminPage />} />
|
||||||
<Route path="/admin/corrections" element={<CorrectionsPage />} />
|
<Route path="/admin/corrections" element={<CorrectionsPage />} />
|
||||||
<Route path="/admin/stats" element={<AdminStatsPage />} />
|
<Route path="/admin/stats" element={<AdminStatsPage />} />
|
||||||
|
<Route path="/admin/flyer-review" element={<FlyerReviewPage />} />
|
||||||
<Route path="/admin/voice-lab" element={<VoiceLabPage />} />
|
<Route path="/admin/voice-lab" element={<VoiceLabPage />} />
|
||||||
</Route>
|
</Route>
|
||||||
<Route path="/reset-password/:token" element={<ResetPasswordPage />} />
|
<Route path="/reset-password/:token" element={<ResetPasswordPage />} />
|
||||||
@@ -341,8 +255,14 @@ function App() {
|
|||||||
)}
|
)}
|
||||||
|
|
||||||
<Footer />
|
<Footer />
|
||||||
</div>
|
</AppGuard>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
export default App;
|
const WrappedApp = () => (
|
||||||
|
<QueryClientProvider client={queryClient}>
|
||||||
|
<App />
|
||||||
|
</QueryClientProvider>
|
||||||
|
);
|
||||||
|
|
||||||
|
export default WrappedApp;
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ import { describe, it, expect, vi } from 'vitest';
|
|||||||
import { AnonymousUserBanner } from './AnonymousUserBanner';
|
import { AnonymousUserBanner } from './AnonymousUserBanner';
|
||||||
|
|
||||||
// Mock the icon to ensure it is rendered correctly
|
// Mock the icon to ensure it is rendered correctly
|
||||||
vi.mock('../../../components/icons/InformationCircleIcon', () => ({
|
vi.mock('./icons/InformationCircleIcon', () => ({
|
||||||
InformationCircleIcon: (props: React.SVGProps<SVGSVGElement>) => (
|
InformationCircleIcon: (props: React.SVGProps<SVGSVGElement>) => (
|
||||||
<svg data-testid="info-icon" {...props} />
|
<svg data-testid="info-icon" {...props} />
|
||||||
),
|
),
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
// src/pages/admin/components/AnonymousUserBanner.tsx
|
// src/components/AnonymousUserBanner.tsx
|
||||||
import React from 'react';
|
import React from 'react';
|
||||||
import { InformationCircleIcon } from '../../../components/icons/InformationCircleIcon';
|
import { InformationCircleIcon } from './icons/InformationCircleIcon';
|
||||||
|
|
||||||
interface AnonymousUserBannerProps {
|
interface AnonymousUserBannerProps {
|
||||||
/**
|
/**
|
||||||
93
src/components/AppGuard.test.tsx
Normal file
93
src/components/AppGuard.test.tsx
Normal file
@@ -0,0 +1,93 @@
|
|||||||
|
// src/components/AppGuard.test.tsx
|
||||||
|
import React from 'react';
|
||||||
|
import { render, screen, waitFor } from '@testing-library/react';
|
||||||
|
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||||
|
import { AppGuard } from './AppGuard';
|
||||||
|
import { useAppInitialization } from '../hooks/useAppInitialization';
|
||||||
|
import { useModal } from '../hooks/useModal';
|
||||||
|
|
||||||
|
// Mock dependencies
|
||||||
|
vi.mock('../hooks/useAppInitialization');
|
||||||
|
vi.mock('../hooks/useModal');
|
||||||
|
vi.mock('./WhatsNewModal', () => ({
|
||||||
|
WhatsNewModal: ({ isOpen }: { isOpen: boolean }) =>
|
||||||
|
isOpen ? <div data-testid="whats-new-modal-mock" /> : null,
|
||||||
|
}));
|
||||||
|
vi.mock('../config', () => ({
|
||||||
|
default: {
|
||||||
|
app: { version: '1.0.0', commitMessage: 'Test commit' },
|
||||||
|
},
|
||||||
|
}));
|
||||||
|
|
||||||
|
const mockedUseAppInitialization = vi.mocked(useAppInitialization);
|
||||||
|
const mockedUseModal = vi.mocked(useModal);
|
||||||
|
|
||||||
|
describe('AppGuard', () => {
|
||||||
|
beforeEach(() => {
|
||||||
|
vi.clearAllMocks();
|
||||||
|
// Default mocks
|
||||||
|
mockedUseAppInitialization.mockReturnValue({
|
||||||
|
isDarkMode: false,
|
||||||
|
unitSystem: 'imperial',
|
||||||
|
});
|
||||||
|
mockedUseModal.mockReturnValue({
|
||||||
|
isModalOpen: vi.fn().mockReturnValue(false),
|
||||||
|
openModal: vi.fn(),
|
||||||
|
closeModal: vi.fn(),
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should render children', () => {
|
||||||
|
render(
|
||||||
|
<AppGuard>
|
||||||
|
<div>Child Content</div>
|
||||||
|
</AppGuard>,
|
||||||
|
);
|
||||||
|
expect(screen.getByText('Child Content')).toBeInTheDocument();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should render WhatsNewModal when it is open', () => {
|
||||||
|
mockedUseModal.mockReturnValue({
|
||||||
|
...mockedUseModal(),
|
||||||
|
isModalOpen: (modalId) => modalId === 'whatsNew',
|
||||||
|
});
|
||||||
|
render(
|
||||||
|
<AppGuard>
|
||||||
|
<div>Child</div>
|
||||||
|
</AppGuard>,
|
||||||
|
);
|
||||||
|
expect(screen.getByTestId('whats-new-modal-mock')).toBeInTheDocument();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should set dark mode styles for toaster', async () => {
|
||||||
|
mockedUseAppInitialization.mockReturnValue({
|
||||||
|
isDarkMode: true,
|
||||||
|
unitSystem: 'imperial',
|
||||||
|
});
|
||||||
|
render(
|
||||||
|
<AppGuard>
|
||||||
|
<div>Child</div>
|
||||||
|
</AppGuard>,
|
||||||
|
);
|
||||||
|
await waitFor(() => {
|
||||||
|
const styleTag = document.querySelector('style');
|
||||||
|
expect(styleTag).not.toBeNull();
|
||||||
|
expect(styleTag!.innerHTML).toContain('--toast-bg: #4B5563');
|
||||||
|
expect(styleTag!.innerHTML).toContain('--toast-color: #F9FAFB');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should set light mode styles for toaster', async () => {
|
||||||
|
render(
|
||||||
|
<AppGuard>
|
||||||
|
<div>Child</div>
|
||||||
|
</AppGuard>,
|
||||||
|
);
|
||||||
|
await waitFor(() => {
|
||||||
|
const styleTag = document.querySelector('style');
|
||||||
|
expect(styleTag).not.toBeNull();
|
||||||
|
expect(styleTag!.innerHTML).toContain('--toast-bg: #FFFFFF');
|
||||||
|
expect(styleTag!.innerHTML).toContain('--toast-color: #1F2937');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
47
src/components/AppGuard.tsx
Normal file
47
src/components/AppGuard.tsx
Normal file
@@ -0,0 +1,47 @@
|
|||||||
|
// src/components/AppGuard.tsx
|
||||||
|
import React, { useCallback } from 'react';
|
||||||
|
import { Toaster } from 'react-hot-toast';
|
||||||
|
import { useAppInitialization } from '../hooks/useAppInitialization';
|
||||||
|
import { useModal } from '../hooks/useModal';
|
||||||
|
import { WhatsNewModal } from './WhatsNewModal';
|
||||||
|
import config from '../config';
|
||||||
|
|
||||||
|
interface AppGuardProps {
|
||||||
|
children: React.ReactNode;
|
||||||
|
}
|
||||||
|
|
||||||
|
export const AppGuard: React.FC<AppGuardProps> = ({ children }) => {
|
||||||
|
// This hook handles OAuth tokens, version checks, and returns theme state.
|
||||||
|
const { isDarkMode } = useAppInitialization();
|
||||||
|
const { isModalOpen, closeModal } = useModal();
|
||||||
|
|
||||||
|
const handleCloseWhatsNew = useCallback(() => closeModal('whatsNew'), [closeModal]);
|
||||||
|
|
||||||
|
const appVersion = config.app.version;
|
||||||
|
const commitMessage = config.app.commitMessage;
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="bg-gray-100 dark:bg-gray-950 min-h-screen font-sans text-gray-800 dark:text-gray-200">
|
||||||
|
{/* Toaster component for displaying notifications. It's placed at the top level. */}
|
||||||
|
<Toaster position="top-center" reverseOrder={false} />
|
||||||
|
{/* Add CSS variables for toast theming based on dark mode */}
|
||||||
|
<style>{`
|
||||||
|
:root {
|
||||||
|
--toast-bg: ${isDarkMode ? '#4B5563' : '#FFFFFF'};
|
||||||
|
--toast-color: ${isDarkMode ? '#F9FAFB' : '#1F2937'};
|
||||||
|
}
|
||||||
|
`}</style>
|
||||||
|
|
||||||
|
{appVersion && commitMessage && (
|
||||||
|
<WhatsNewModal
|
||||||
|
isOpen={isModalOpen('whatsNew')}
|
||||||
|
onClose={handleCloseWhatsNew}
|
||||||
|
version={appVersion}
|
||||||
|
commitMessage={commitMessage}
|
||||||
|
/>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{children}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
};
|
||||||
@@ -44,7 +44,7 @@ export const FlyerCorrectionTool: React.FC<FlyerCorrectionToolProps> = ({
|
|||||||
})
|
})
|
||||||
.catch((err) => {
|
.catch((err) => {
|
||||||
console.error('[DEBUG] FlyerCorrectionTool: Failed to fetch image.', { err });
|
console.error('[DEBUG] FlyerCorrectionTool: Failed to fetch image.', { err });
|
||||||
logger.error('Failed to fetch image for correction tool', { error: err });
|
logger.error({ error: err }, 'Failed to fetch image for correction tool');
|
||||||
notifyError('Could not load the image for correction.');
|
notifyError('Could not load the image for correction.');
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
@@ -164,7 +164,7 @@ export const FlyerCorrectionTool: React.FC<FlyerCorrectionToolProps> = ({
|
|||||||
const msg = err instanceof Error ? err.message : 'An unknown error occurred.';
|
const msg = err instanceof Error ? err.message : 'An unknown error occurred.';
|
||||||
console.error('[DEBUG] handleRescan: Caught an error.', { error: err });
|
console.error('[DEBUG] handleRescan: Caught an error.', { error: err });
|
||||||
notifyError(msg);
|
notifyError(msg);
|
||||||
logger.error('Error during rescan:', { error: err });
|
logger.error({ error: err }, 'Error during rescan:');
|
||||||
} finally {
|
} finally {
|
||||||
console.debug('[DEBUG] handleRescan: Finished. Setting isProcessing=false.');
|
console.debug('[DEBUG] handleRescan: Finished. Setting isProcessing=false.');
|
||||||
setIsProcessing(false);
|
setIsProcessing(false);
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
// src/pages/admin/components/PasswordInput.tsx
|
// src/components/PasswordInput.tsx
|
||||||
import React, { useState } from 'react';
|
import React, { useState } from 'react';
|
||||||
import { EyeIcon } from '../../../components/icons/EyeIcon';
|
import { EyeIcon } from './icons/EyeIcon';
|
||||||
import { EyeSlashIcon } from '../../../components/icons/EyeSlashIcon';
|
import { EyeSlashIcon } from './icons/EyeSlashIcon';
|
||||||
import { PasswordStrengthIndicator } from './PasswordStrengthIndicator';
|
import { PasswordStrengthIndicator } from './PasswordStrengthIndicator';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -1,4 +1,5 @@
|
|||||||
// src/pages/admin/components/PasswordStrengthIndicator.tsx
|
// src/pages/admin/components/PasswordStrengthIndicator.tsx
|
||||||
|
// src/components/PasswordStrengthIndicator.tsx
|
||||||
import React from 'react';
|
import React from 'react';
|
||||||
import zxcvbn from 'zxcvbn';
|
import zxcvbn from 'zxcvbn';
|
||||||
|
|
||||||
18
src/components/icons/DocumentMagnifyingGlassIcon.tsx
Normal file
18
src/components/icons/DocumentMagnifyingGlassIcon.tsx
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
import React from 'react';
|
||||||
|
|
||||||
|
export const DocumentMagnifyingGlassIcon: React.FC<React.SVGProps<SVGSVGElement>> = (props) => (
|
||||||
|
<svg
|
||||||
|
xmlns="http://www.w3.org/2000/svg"
|
||||||
|
fill="none"
|
||||||
|
viewBox="0 0 24 24"
|
||||||
|
strokeWidth={1.5}
|
||||||
|
stroke="currentColor"
|
||||||
|
{...props}
|
||||||
|
>
|
||||||
|
<path
|
||||||
|
strokeLinecap="round"
|
||||||
|
strokeLinejoin="round"
|
||||||
|
d="M19.5 14.25v-2.625a3.375 3.375 0 0 0-3.375-3.375h-1.5A1.125 1.125 0 0 1 13.5 7.125v-1.5a3.375 3.375 0 0 0-3.375-3.375H8.25m5.231 13.481L15 17.25m-4.5 4.5L6.75 21.75m0 0L2.25 17.25m4.5 4.5v-4.5m13.5-3V9A2.25 2.25 0 0 0 16.5 6.75h-9A2.25 2.25 0 0 0 5.25 9v9.75m14.25-10.5a2.25 2.25 0 0 0-2.25-2.25H5.25a2.25 2.25 0 0 0-2.25 2.25v10.5a2.25 2.25 0 0 0 2.25 225h5.25"
|
||||||
|
/>
|
||||||
|
</svg>
|
||||||
|
);
|
||||||
202
src/db/seed.ts
202
src/db/seed.ts
@@ -6,10 +6,11 @@
|
|||||||
* DO NOT run this on a production database.
|
* DO NOT run this on a production database.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import { Pool } from 'pg';
|
import { Pool, PoolClient } from 'pg';
|
||||||
|
import fs from 'node:fs/promises';
|
||||||
|
import path from 'node:path';
|
||||||
import bcrypt from 'bcrypt';
|
import bcrypt from 'bcrypt';
|
||||||
import { logger } from '../services/logger.server';
|
import { logger } from '../services/logger.server';
|
||||||
import { CATEGORIES } from '../types';
|
|
||||||
|
|
||||||
const pool = new Pool({
|
const pool = new Pool({
|
||||||
user: process.env.DB_USER,
|
user: process.env.DB_USER,
|
||||||
@@ -20,81 +21,55 @@ const pool = new Pool({
|
|||||||
});
|
});
|
||||||
|
|
||||||
async function main() {
|
async function main() {
|
||||||
// Declare client outside the try block so it's accessible in the finally block.
|
let client: PoolClient | undefined;
|
||||||
let client;
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
client = await pool.connect();
|
client = await pool.connect();
|
||||||
logger.info('Connected to the database for seeding.');
|
logger.info('Connected to the database for seeding.');
|
||||||
await client.query('BEGIN');
|
await client.query('BEGIN');
|
||||||
|
|
||||||
// 1. Clean the database
|
// 1. Clean the database by dropping and recreating the schema
|
||||||
logger.info('--- Wiping existing data... ---');
|
logger.info('--- Wiping and rebuilding schema... ---');
|
||||||
// Using TRUNCATE ... RESTART IDENTITY CASCADE is a powerful way to clean all tables
|
const dropScriptPath = path.resolve(process.cwd(), 'sql/drop_tables.sql');
|
||||||
// and reset auto-incrementing keys, while respecting foreign key relationships.
|
const dropSql = await fs.readFile(dropScriptPath, 'utf-8');
|
||||||
const tablesRes = await client.query(`
|
await client.query(dropSql);
|
||||||
SELECT tablename
|
logger.info('All tables dropped successfully.');
|
||||||
FROM pg_tables
|
|
||||||
WHERE schemaname = 'public'
|
|
||||||
-- Exclude PostGIS system tables from truncation to avoid permission errors.
|
|
||||||
AND tablename NOT IN ('spatial_ref_sys', 'geometry_columns')
|
|
||||||
`);
|
|
||||||
const tables = tablesRes.rows.map((row) => `"${row.tablename}"`).join(', ');
|
|
||||||
if (tables) {
|
|
||||||
await client.query(`TRUNCATE ${tables} RESTART IDENTITY CASCADE`);
|
|
||||||
logger.info('All tables in public schema have been truncated.');
|
|
||||||
}
|
|
||||||
|
|
||||||
// 2. Seed Categories
|
const schemaScriptPath = path.resolve(process.cwd(), 'sql/master_schema_rollup.sql');
|
||||||
logger.info('--- Seeding Categories... ---');
|
const schemaSql = await fs.readFile(schemaScriptPath, 'utf-8');
|
||||||
const categoryQuery = `INSERT INTO public.categories (name) VALUES ${CATEGORIES.map((_, i) => `($${i + 1})`).join(', ')} RETURNING category_id, name`;
|
await client.query(schemaSql);
|
||||||
const seededCategories = (
|
logger.info(
|
||||||
await client.query<{ category_id: number; name: string }>(categoryQuery, CATEGORIES)
|
'Schema rebuilt and static data seeded successfully from master_schema_rollup.sql.',
|
||||||
).rows;
|
);
|
||||||
const categoryMap = new Map(seededCategories.map((c) => [c.name, c.category_id]));
|
|
||||||
logger.info(`Seeded ${seededCategories.length} categories.`);
|
|
||||||
|
|
||||||
// 3. Seed Stores
|
// 2. Seed Additional Stores (if any beyond what's in the rollup)
|
||||||
logger.info('--- Seeding Stores... ---');
|
logger.info('--- Seeding Stores... ---');
|
||||||
const stores = ['Safeway', 'No Frills', 'Costco', 'Superstore'];
|
const stores = ['Safeway', 'No Frills', 'Costco', 'Superstore'];
|
||||||
const storeQuery = `INSERT INTO public.stores (name) VALUES ${stores.map((_, i) => `($${i + 1})`).join(', ')} RETURNING store_id, name`;
|
const storeQuery = `INSERT INTO public.stores (name) VALUES ${stores.map((_, i) => `($${i + 1})`).join(', ')} ON CONFLICT (name) DO NOTHING RETURNING store_id, name`;
|
||||||
const seededStores = (
|
await client.query<{ store_id: number; name: string }>(storeQuery, stores);
|
||||||
await client.query<{ store_id: number; name: string }>(storeQuery, stores)
|
const allStores = (
|
||||||
|
await client.query<{ store_id: number; name: string }>(
|
||||||
|
'SELECT store_id, name FROM public.stores',
|
||||||
|
)
|
||||||
).rows;
|
).rows;
|
||||||
const storeMap = new Map(seededStores.map((s) => [s.name, s.store_id]));
|
const storeMap = new Map(
|
||||||
logger.info(`Seeded ${seededStores.length} stores.`);
|
allStores.map((s: { name: string; store_id: number }) => [s.name, s.store_id]),
|
||||||
|
|
||||||
// 4. Seed Master Grocery Items
|
|
||||||
logger.info('--- Seeding Master Grocery Items... ---');
|
|
||||||
const masterItems = [
|
|
||||||
{ name: 'Chicken Breast, Boneless Skinless', category: 'Meat & Seafood' },
|
|
||||||
{ name: 'Ground Beef, Lean', category: 'Meat & Seafood' },
|
|
||||||
{ name: 'Avocado', category: 'Fruits & Vegetables' },
|
|
||||||
{ name: 'Bananas', category: 'Fruits & Vegetables' },
|
|
||||||
{ name: 'Broccoli', category: 'Fruits & Vegetables' },
|
|
||||||
{ name: 'Cheddar Cheese, Block', category: 'Dairy & Eggs' },
|
|
||||||
{ name: 'Milk, 2%', category: 'Dairy & Eggs' },
|
|
||||||
{ name: 'Eggs, Large', category: 'Dairy & Eggs' },
|
|
||||||
{ name: 'Whole Wheat Bread', category: 'Bakery & Bread' },
|
|
||||||
{ name: 'Pasta, Spaghetti', category: 'Pantry & Dry Goods' },
|
|
||||||
{ name: 'Canned Tomatoes, Diced', category: 'Canned Goods' },
|
|
||||||
{ name: 'Coca-Cola, 12-pack', category: 'Beverages' },
|
|
||||||
{ name: 'Frozen Pizza', category: 'Frozen Foods' },
|
|
||||||
{ name: 'Paper Towels', category: 'Household & Cleaning' },
|
|
||||||
];
|
|
||||||
const masterItemValues = masterItems
|
|
||||||
.map((item) => `('${item.name.replace(/'/g, "''")}', ${categoryMap.get(item.category)})`)
|
|
||||||
.join(', ');
|
|
||||||
const masterItemQuery = `INSERT INTO public.master_grocery_items (name, category_id) VALUES ${masterItemValues} RETURNING master_grocery_item_id, name`;
|
|
||||||
const seededMasterItems = (
|
|
||||||
await client.query<{ master_grocery_item_id: number; name: string }>(masterItemQuery)
|
|
||||||
).rows;
|
|
||||||
const masterItemMap = new Map(
|
|
||||||
seededMasterItems.map((item) => [item.name, item.master_grocery_item_id]),
|
|
||||||
);
|
);
|
||||||
logger.info(`Seeded ${seededMasterItems.length} master grocery items.`);
|
logger.info(`Seeded/verified ${allStores.length} total stores.`);
|
||||||
|
|
||||||
// 5. Seed Users & Profiles
|
// Fetch maps for items seeded by the master rollup script
|
||||||
|
const masterItemMap = new Map(
|
||||||
|
(
|
||||||
|
await client.query<{ master_grocery_item_id: number; name: string }>(
|
||||||
|
'SELECT master_grocery_item_id, name FROM public.master_grocery_items',
|
||||||
|
)
|
||||||
|
).rows.map((item: { name: string; master_grocery_item_id: number }) => [
|
||||||
|
item.name,
|
||||||
|
item.master_grocery_item_id,
|
||||||
|
]),
|
||||||
|
);
|
||||||
|
|
||||||
|
// 3. Seed Users & Profiles
|
||||||
logger.info('--- Seeding Users & Profiles... ---');
|
logger.info('--- Seeding Users & Profiles... ---');
|
||||||
const saltRounds = 10;
|
const saltRounds = 10;
|
||||||
const adminPassHash = await bcrypt.hash('adminpass', saltRounds);
|
const adminPassHash = await bcrypt.hash('adminpass', saltRounds);
|
||||||
@@ -126,7 +101,7 @@ async function main() {
|
|||||||
const userId = userRes.rows[0].user_id;
|
const userId = userRes.rows[0].user_id;
|
||||||
logger.info('Seeded regular user (user@example.com / userpass)');
|
logger.info('Seeded regular user (user@example.com / userpass)');
|
||||||
|
|
||||||
// 6. Seed a Flyer
|
// 4. Seed a Flyer
|
||||||
logger.info('--- Seeding a Sample Flyer... ---');
|
logger.info('--- Seeding a Sample Flyer... ---');
|
||||||
const today = new Date();
|
const today = new Date();
|
||||||
const validFrom = new Date(today);
|
const validFrom = new Date(today);
|
||||||
@@ -146,29 +121,29 @@ async function main() {
|
|||||||
const flyerId = flyerRes.rows[0].flyer_id;
|
const flyerId = flyerRes.rows[0].flyer_id;
|
||||||
logger.info(`Seeded flyer for Safeway (ID: ${flyerId}).`);
|
logger.info(`Seeded flyer for Safeway (ID: ${flyerId}).`);
|
||||||
|
|
||||||
// 7. Seed Flyer Items
|
// 5. Seed Flyer Items
|
||||||
logger.info('--- Seeding Flyer Items... ---');
|
logger.info('--- Seeding Flyer Items... ---');
|
||||||
const flyerItems = [
|
const flyerItems = [
|
||||||
{
|
{
|
||||||
name: 'Chicken Breast, Boneless Skinless',
|
name: 'chicken breast',
|
||||||
price_display: '$3.99 /lb',
|
price_display: '$3.99 /lb',
|
||||||
price_in_cents: 399,
|
price_in_cents: 399,
|
||||||
quantity: 'per lb',
|
quantity: 'per lb',
|
||||||
master_item_id: masterItemMap.get('Chicken Breast, Boneless Skinless'),
|
master_item_id: masterItemMap.get('chicken breast'),
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: 'Avocado',
|
name: 'avocados',
|
||||||
price_display: '2 for $5.00',
|
price_display: '2 for $5.00',
|
||||||
price_in_cents: 250,
|
price_in_cents: 250,
|
||||||
quantity: 'each',
|
quantity: 'each',
|
||||||
master_item_id: masterItemMap.get('Avocado'),
|
master_item_id: masterItemMap.get('avocados'),
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: 'Coca-Cola 12-pack',
|
name: 'soda',
|
||||||
price_display: '$6.99',
|
price_display: '$6.99',
|
||||||
price_in_cents: 699,
|
price_in_cents: 699,
|
||||||
quantity: '12x355ml',
|
quantity: '12x355ml',
|
||||||
master_item_id: masterItemMap.get('Coca-Cola, 12-pack'),
|
master_item_id: masterItemMap.get('soda'),
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: 'Unmatched Sample Item',
|
name: 'Unmatched Sample Item',
|
||||||
@@ -194,12 +169,12 @@ async function main() {
|
|||||||
}
|
}
|
||||||
logger.info(`Seeded ${flyerItems.length} items for the Safeway flyer.`);
|
logger.info(`Seeded ${flyerItems.length} items for the Safeway flyer.`);
|
||||||
|
|
||||||
// 8. Seed Watched Items for the user
|
// 6. Seed Watched Items for the user
|
||||||
logger.info('--- Seeding Watched Items... ---');
|
logger.info('--- Seeding Watched Items... ---');
|
||||||
const watchedItemIds = [
|
const watchedItemIds = [
|
||||||
masterItemMap.get('Chicken Breast, Boneless Skinless'),
|
masterItemMap.get('chicken breast'),
|
||||||
masterItemMap.get('Avocado'),
|
masterItemMap.get('avocados'),
|
||||||
masterItemMap.get('Ground Beef, Lean'),
|
masterItemMap.get('ground beef'),
|
||||||
];
|
];
|
||||||
for (const itemId of watchedItemIds) {
|
for (const itemId of watchedItemIds) {
|
||||||
if (itemId) {
|
if (itemId) {
|
||||||
@@ -211,7 +186,7 @@ async function main() {
|
|||||||
}
|
}
|
||||||
logger.info(`Seeded ${watchedItemIds.length} watched items for Test User.`);
|
logger.info(`Seeded ${watchedItemIds.length} watched items for Test User.`);
|
||||||
|
|
||||||
// 9. Seed a Shopping List
|
// 7. Seed a Shopping List
|
||||||
logger.info('--- Seeding a Shopping List... ---');
|
logger.info('--- Seeding a Shopping List... ---');
|
||||||
const listRes = await client.query<{ shopping_list_id: number }>(
|
const listRes = await client.query<{ shopping_list_id: number }>(
|
||||||
'INSERT INTO public.shopping_lists (user_id, name) VALUES ($1, $2) RETURNING shopping_list_id',
|
'INSERT INTO public.shopping_lists (user_id, name) VALUES ($1, $2) RETURNING shopping_list_id',
|
||||||
@@ -220,8 +195,8 @@ async function main() {
|
|||||||
const listId = listRes.rows[0].shopping_list_id;
|
const listId = listRes.rows[0].shopping_list_id;
|
||||||
|
|
||||||
const shoppingListItems = [
|
const shoppingListItems = [
|
||||||
{ master_item_id: masterItemMap.get('Milk, 2%'), quantity: 1 },
|
{ master_item_id: masterItemMap.get('milk'), quantity: 1 },
|
||||||
{ master_item_id: masterItemMap.get('Eggs, Large'), quantity: 1 },
|
{ master_item_id: masterItemMap.get('eggs'), quantity: 1 },
|
||||||
{ custom_item_name: 'Specialty Hot Sauce', quantity: 1 },
|
{ custom_item_name: 'Specialty Hot Sauce', quantity: 1 },
|
||||||
];
|
];
|
||||||
|
|
||||||
@@ -235,75 +210,6 @@ async function main() {
|
|||||||
`Seeded shopping list "Weekly Groceries" with ${shoppingListItems.length} items for Test User.`,
|
`Seeded shopping list "Weekly Groceries" with ${shoppingListItems.length} items for Test User.`,
|
||||||
);
|
);
|
||||||
|
|
||||||
// 10. Seed Brands
|
|
||||||
logger.info('--- Seeding Brands... ---');
|
|
||||||
const brands = [
|
|
||||||
'Coca-Cola',
|
|
||||||
'Kraft',
|
|
||||||
'Maple Leaf',
|
|
||||||
"Dempster's",
|
|
||||||
'No Name',
|
|
||||||
"President's Choice",
|
|
||||||
];
|
|
||||||
const brandQuery = `INSERT INTO public.brands (name) VALUES ${brands.map((_, i) => `($${i + 1})`).join(', ')} ON CONFLICT (name) DO NOTHING`;
|
|
||||||
await client.query(brandQuery, brands);
|
|
||||||
logger.info(`Seeded ${brands.length} brands.`);
|
|
||||||
|
|
||||||
// Link store-specific brands
|
|
||||||
const loblawsId = storeMap.get('Loblaws');
|
|
||||||
if (loblawsId) {
|
|
||||||
await client.query('UPDATE public.brands SET store_id = $1 WHERE name = $2 OR name = $3', [
|
|
||||||
loblawsId,
|
|
||||||
'No Name',
|
|
||||||
"President's Choice",
|
|
||||||
]);
|
|
||||||
logger.info('Linked store brands to Loblaws.');
|
|
||||||
}
|
|
||||||
|
|
||||||
// 11. Seed Recipes
|
|
||||||
logger.info('--- Seeding Recipes... ---');
|
|
||||||
const recipes = [
|
|
||||||
{
|
|
||||||
name: 'Simple Chicken and Rice',
|
|
||||||
description: 'A quick and healthy weeknight meal.',
|
|
||||||
instructions: '1. Cook rice. 2. Cook chicken. 3. Combine.',
|
|
||||||
prep: 10,
|
|
||||||
cook: 20,
|
|
||||||
servings: 4,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: 'Classic Spaghetti Bolognese',
|
|
||||||
description: 'A rich and hearty meat sauce.',
|
|
||||||
instructions: '1. Brown beef. 2. Add sauce. 3. Simmer.',
|
|
||||||
prep: 15,
|
|
||||||
cook: 45,
|
|
||||||
servings: 6,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: 'Vegetable Stir-fry',
|
|
||||||
description: 'A fast and flavorful vegetarian meal.',
|
|
||||||
instructions: '1. Chop veggies. 2. Stir-fry. 3. Add sauce.',
|
|
||||||
prep: 10,
|
|
||||||
cook: 10,
|
|
||||||
servings: 3,
|
|
||||||
},
|
|
||||||
];
|
|
||||||
for (const recipe of recipes) {
|
|
||||||
await client.query(
|
|
||||||
`INSERT INTO public.recipes (name, description, instructions, prep_time_minutes, cook_time_minutes, servings, status)
|
|
||||||
VALUES ($1, $2, $3, $4, $5, $6, 'public') ON CONFLICT (name) WHERE user_id IS NULL DO NOTHING`,
|
|
||||||
[
|
|
||||||
recipe.name,
|
|
||||||
recipe.description,
|
|
||||||
recipe.instructions,
|
|
||||||
recipe.prep,
|
|
||||||
recipe.cook,
|
|
||||||
recipe.servings,
|
|
||||||
],
|
|
||||||
);
|
|
||||||
}
|
|
||||||
logger.info(`Seeded ${recipes.length} recipes.`);
|
|
||||||
|
|
||||||
// --- SEED SCRIPT DEBUG LOGGING ---
|
// --- SEED SCRIPT DEBUG LOGGING ---
|
||||||
// Corrected the query to be unambiguous by specifying the table alias for each column.
|
// Corrected the query to be unambiguous by specifying the table alias for each column.
|
||||||
// `id` and `email` come from the `users` table (u), and `role` comes from the `profiles` table (p).
|
// `id` and `email` come from the `users` table (u), and `role` comes from the `profiles` table (p).
|
||||||
|
|||||||
@@ -38,8 +38,26 @@ vi.mock('recharts', () => ({
|
|||||||
),
|
),
|
||||||
CartesianGrid: () => <div data-testid="cartesian-grid" />,
|
CartesianGrid: () => <div data-testid="cartesian-grid" />,
|
||||||
XAxis: () => <div data-testid="x-axis" />,
|
XAxis: () => <div data-testid="x-axis" />,
|
||||||
YAxis: () => <div data-testid="y-axis" />,
|
YAxis: ({ tickFormatter, domain }: any) => {
|
||||||
Tooltip: () => <div data-testid="tooltip" />,
|
// Execute functions for coverage
|
||||||
|
if (typeof tickFormatter === 'function') {
|
||||||
|
tickFormatter(1000);
|
||||||
|
}
|
||||||
|
if (Array.isArray(domain)) {
|
||||||
|
domain.forEach((d) => {
|
||||||
|
if (typeof d === 'function') d(100);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return <div data-testid="y-axis" />;
|
||||||
|
},
|
||||||
|
Tooltip: ({ formatter }: any) => {
|
||||||
|
// Execute formatter for coverage
|
||||||
|
if (typeof formatter === 'function') {
|
||||||
|
formatter(1000);
|
||||||
|
formatter(undefined);
|
||||||
|
}
|
||||||
|
return <div data-testid="tooltip" />;
|
||||||
|
},
|
||||||
Legend: () => <div data-testid="legend" />,
|
Legend: () => <div data-testid="legend" />,
|
||||||
// Fix: Use dataKey if name is not explicitly provided, as the component relies on dataKey
|
// Fix: Use dataKey if name is not explicitly provided, as the component relies on dataKey
|
||||||
Line: ({ name, dataKey }: { name?: string; dataKey?: string }) => (
|
Line: ({ name, dataKey }: { name?: string; dataKey?: string }) => (
|
||||||
@@ -301,4 +319,66 @@ describe('PriceHistoryChart', () => {
|
|||||||
expect(chartData).toHaveLength(2);
|
expect(chartData).toHaveLength(2);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('should handle malformed data points and unmatched items gracefully', async () => {
|
||||||
|
const malformedData: any[] = [
|
||||||
|
{ master_item_id: null, summary_date: '2024-10-01', avg_price_in_cents: 100 }, // Missing ID
|
||||||
|
{ master_item_id: 1, summary_date: null, avg_price_in_cents: 100 }, // Missing date
|
||||||
|
{ master_item_id: 1, summary_date: '2024-10-01', avg_price_in_cents: null }, // Missing price
|
||||||
|
{ master_item_id: 999, summary_date: '2024-10-01', avg_price_in_cents: 100 }, // ID not in watchlist
|
||||||
|
];
|
||||||
|
vi.mocked(apiClient.fetchHistoricalPriceData).mockResolvedValue(
|
||||||
|
new Response(JSON.stringify(malformedData)),
|
||||||
|
);
|
||||||
|
render(<PriceHistoryChart />);
|
||||||
|
|
||||||
|
await waitFor(() => {
|
||||||
|
// Should show "Not enough historical data" because all points are invalid or filtered
|
||||||
|
expect(
|
||||||
|
screen.getByText(
|
||||||
|
'Not enough historical data for your watched items. Process more flyers to build a trend.',
|
||||||
|
),
|
||||||
|
).toBeInTheDocument();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should ignore higher prices for the same day', async () => {
|
||||||
|
const dataWithHigherPrice: HistoricalPriceDataPoint[] = [
|
||||||
|
createMockHistoricalPriceDataPoint({
|
||||||
|
master_item_id: 1,
|
||||||
|
summary_date: '2024-10-01',
|
||||||
|
avg_price_in_cents: 100,
|
||||||
|
}),
|
||||||
|
createMockHistoricalPriceDataPoint({
|
||||||
|
master_item_id: 1,
|
||||||
|
summary_date: '2024-10-01',
|
||||||
|
avg_price_in_cents: 150, // Higher price should be ignored
|
||||||
|
}),
|
||||||
|
createMockHistoricalPriceDataPoint({
|
||||||
|
master_item_id: 1,
|
||||||
|
summary_date: '2024-10-08',
|
||||||
|
avg_price_in_cents: 100,
|
||||||
|
}),
|
||||||
|
];
|
||||||
|
vi.mocked(apiClient.fetchHistoricalPriceData).mockResolvedValue(
|
||||||
|
new Response(JSON.stringify(dataWithHigherPrice)),
|
||||||
|
);
|
||||||
|
render(<PriceHistoryChart />);
|
||||||
|
|
||||||
|
await waitFor(() => {
|
||||||
|
const chart = screen.getByTestId('line-chart');
|
||||||
|
const chartData = JSON.parse(chart.getAttribute('data-chartdata')!);
|
||||||
|
const dataPoint = chartData.find((d: any) => d.date === 'Oct 1');
|
||||||
|
expect(dataPoint['Organic Bananas']).toBe(100);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle non-Error objects thrown during fetch', async () => {
|
||||||
|
vi.mocked(apiClient.fetchHistoricalPriceData).mockRejectedValue('String Error');
|
||||||
|
render(<PriceHistoryChart />);
|
||||||
|
|
||||||
|
await waitFor(() => {
|
||||||
|
expect(screen.getByText('Failed to load price history.')).toBeInTheDocument();
|
||||||
|
});
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
// src/components/PriceHistoryChart.tsx
|
// src/features/charts/PriceHistoryChart.tsx
|
||||||
import React, { useState, useEffect, useMemo } from 'react';
|
import React, { useState, useEffect, useMemo } from 'react';
|
||||||
import {
|
import {
|
||||||
LineChart,
|
LineChart,
|
||||||
@@ -142,7 +142,7 @@ export const PriceHistoryChart: React.FC = () => {
|
|||||||
const renderContent = () => {
|
const renderContent = () => {
|
||||||
if (isLoading || isLoadingUserData) {
|
if (isLoading || isLoadingUserData) {
|
||||||
return (
|
return (
|
||||||
<div role="status" className="flex justify-center items-center h-full min-h-[200px]">
|
<div role="status" className="flex justify-center items-center h-full min-h-50]">
|
||||||
<LoadingSpinner /> <span className="ml-2">Loading Price History...</span>
|
<LoadingSpinner /> <span className="ml-2">Loading Price History...</span>
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
@@ -198,7 +198,12 @@ export const PriceHistoryChart: React.FC = () => {
|
|||||||
borderRadius: '0.5rem',
|
borderRadius: '0.5rem',
|
||||||
}}
|
}}
|
||||||
labelStyle={{ color: '#F9FAFB' }}
|
labelStyle={{ color: '#F9FAFB' }}
|
||||||
formatter={(value: number) => `$${(value / 100).toFixed(2)}`}
|
formatter={(value: number | undefined) => {
|
||||||
|
if (typeof value === 'number') {
|
||||||
|
return [`$${(value / 100).toFixed(2)}`];
|
||||||
|
}
|
||||||
|
return [null];
|
||||||
|
}}
|
||||||
/>
|
/>
|
||||||
<Legend wrapperStyle={{ fontSize: '12px' }} />
|
<Legend wrapperStyle={{ fontSize: '12px' }} />
|
||||||
{availableItems.map((item, index) => (
|
{availableItems.map((item, index) => (
|
||||||
|
|||||||
@@ -406,6 +406,74 @@ describe('ExtractedDataTable', () => {
|
|||||||
render(<ExtractedDataTable {...defaultProps} items={singleCategoryItems} />);
|
render(<ExtractedDataTable {...defaultProps} items={singleCategoryItems} />);
|
||||||
expect(screen.queryByLabelText('Filter by category')).not.toBeInTheDocument();
|
expect(screen.queryByLabelText('Filter by category')).not.toBeInTheDocument();
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('should allow switching filter back to All Categories', () => {
|
||||||
|
render(<ExtractedDataTable {...defaultProps} />);
|
||||||
|
const categoryFilter = screen.getByLabelText('Filter by category');
|
||||||
|
|
||||||
|
// Filter to Dairy
|
||||||
|
fireEvent.change(categoryFilter, { target: { value: 'Dairy' } });
|
||||||
|
expect(screen.queryByText('Gala Apples')).not.toBeInTheDocument();
|
||||||
|
expect(screen.getByText('2% Milk')).toBeInTheDocument();
|
||||||
|
|
||||||
|
// Filter back to All
|
||||||
|
fireEvent.change(categoryFilter, { target: { value: 'all' } });
|
||||||
|
expect(screen.getByText('Gala Apples')).toBeInTheDocument();
|
||||||
|
expect(screen.getByText('2% Milk')).toBeInTheDocument();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should sort items alphabetically within watched and unwatched groups', () => {
|
||||||
|
const items = [
|
||||||
|
createMockFlyerItem({
|
||||||
|
flyer_item_id: 1,
|
||||||
|
item: 'Yam',
|
||||||
|
master_item_id: 3,
|
||||||
|
category_name: 'Produce',
|
||||||
|
}), // Unwatched
|
||||||
|
createMockFlyerItem({
|
||||||
|
flyer_item_id: 2,
|
||||||
|
item: 'Zebra',
|
||||||
|
master_item_id: 1,
|
||||||
|
category_name: 'Produce',
|
||||||
|
}), // Watched
|
||||||
|
createMockFlyerItem({
|
||||||
|
flyer_item_id: 3,
|
||||||
|
item: 'Banana',
|
||||||
|
master_item_id: 4,
|
||||||
|
category_name: 'Produce',
|
||||||
|
}), // Unwatched
|
||||||
|
createMockFlyerItem({
|
||||||
|
flyer_item_id: 4,
|
||||||
|
item: 'Apple',
|
||||||
|
master_item_id: 2,
|
||||||
|
category_name: 'Produce',
|
||||||
|
}), // Watched
|
||||||
|
];
|
||||||
|
|
||||||
|
vi.mocked(useUserData).mockReturnValue({
|
||||||
|
watchedItems: [
|
||||||
|
createMockMasterGroceryItem({ master_grocery_item_id: 1, name: 'Zebra' }),
|
||||||
|
createMockMasterGroceryItem({ master_grocery_item_id: 2, name: 'Apple' }),
|
||||||
|
],
|
||||||
|
shoppingLists: [],
|
||||||
|
setWatchedItems: vi.fn(),
|
||||||
|
setShoppingLists: vi.fn(),
|
||||||
|
isLoading: false,
|
||||||
|
error: null,
|
||||||
|
});
|
||||||
|
|
||||||
|
render(<ExtractedDataTable {...defaultProps} items={items} />);
|
||||||
|
|
||||||
|
const rows = screen.getAllByRole('row');
|
||||||
|
// Extract item names based on the bold/semibold classes used for names
|
||||||
|
const itemNames = rows.map((row) => {
|
||||||
|
const nameEl = row.querySelector('.font-bold, .font-semibold');
|
||||||
|
return nameEl?.textContent;
|
||||||
|
});
|
||||||
|
|
||||||
|
// Expected: Watched items first (Apple, Zebra), then Unwatched (Banana, Yam)
|
||||||
|
expect(itemNames).toEqual(['Apple', 'Zebra', 'Banana', 'Yam']);
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('Data Edge Cases', () => {
|
describe('Data Edge Cases', () => {
|
||||||
@@ -460,5 +528,46 @@ describe('ExtractedDataTable', () => {
|
|||||||
// Check for the unit suffix, which might be in a separate element or part of the string
|
// Check for the unit suffix, which might be in a separate element or part of the string
|
||||||
expect(within(chickenItemRow).getAllByText(/\/kg/i).length).toBeGreaterThan(0);
|
expect(within(chickenItemRow).getAllByText(/\/kg/i).length).toBeGreaterThan(0);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('should handle activeListId pointing to a non-existent list', () => {
|
||||||
|
vi.mocked(useShoppingLists).mockReturnValue({
|
||||||
|
activeListId: 999, // Non-existent
|
||||||
|
shoppingLists: mockShoppingLists,
|
||||||
|
addItemToList: mockAddItemToList,
|
||||||
|
setActiveListId: vi.fn(),
|
||||||
|
createList: vi.fn(),
|
||||||
|
deleteList: vi.fn(),
|
||||||
|
updateItemInList: vi.fn(),
|
||||||
|
removeItemFromList: vi.fn(),
|
||||||
|
isCreatingList: false,
|
||||||
|
isDeletingList: false,
|
||||||
|
isAddingItem: false,
|
||||||
|
isUpdatingItem: false,
|
||||||
|
isRemovingItem: false,
|
||||||
|
error: null,
|
||||||
|
});
|
||||||
|
|
||||||
|
render(<ExtractedDataTable {...defaultProps} />);
|
||||||
|
|
||||||
|
// Should behave as if item is not in list (Add button enabled)
|
||||||
|
const appleItemRow = screen.getByText('Gala Apples').closest('tr')!;
|
||||||
|
const addToListButton = within(appleItemRow).getByTitle('Add Apples to list');
|
||||||
|
expect(addToListButton).toBeInTheDocument();
|
||||||
|
expect(addToListButton).not.toBeDisabled();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should display numeric quantity in parentheses if available', () => {
|
||||||
|
const itemWithQtyNum = createMockFlyerItem({
|
||||||
|
flyer_item_id: 999,
|
||||||
|
item: 'Bulk Rice',
|
||||||
|
quantity: 'Bag',
|
||||||
|
quantity_num: 5,
|
||||||
|
unit_price: { value: 10, unit: 'kg' },
|
||||||
|
category_name: 'Pantry',
|
||||||
|
flyer_id: 1,
|
||||||
|
});
|
||||||
|
render(<ExtractedDataTable {...defaultProps} items={[itemWithQtyNum]} />);
|
||||||
|
expect(screen.getByText('(5)')).toBeInTheDocument();
|
||||||
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
// src/features/flyer/FlyerList.test.tsx
|
// src/features/flyer/FlyerList.test.tsx
|
||||||
import React from 'react';
|
import React from 'react';
|
||||||
import { render, screen, fireEvent, waitFor } from '@testing-library/react';
|
import { render, screen, fireEvent, waitFor } from '@testing-library/react';
|
||||||
import { describe, it, expect, vi, beforeEach, type Mocked } from 'vitest';
|
import { describe, it, expect, vi, beforeEach, afterEach, type Mocked } from 'vitest';
|
||||||
import { FlyerList } from './FlyerList';
|
import { FlyerList } from './FlyerList';
|
||||||
import { formatShortDate } from './dateUtils';
|
import { formatShortDate } from './dateUtils';
|
||||||
import type { Flyer, UserProfile } from '../../types';
|
import type { Flyer, UserProfile } from '../../types';
|
||||||
@@ -257,6 +257,73 @@ describe('FlyerList', () => {
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
describe('Expiration Status Logic', () => {
|
||||||
|
beforeEach(() => {
|
||||||
|
vi.useFakeTimers();
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
vi.useRealTimers();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should show "Expired" for past dates', () => {
|
||||||
|
// Flyer 1 valid_to is 2023-10-11
|
||||||
|
vi.setSystemTime(new Date('2023-10-12T12:00:00Z'));
|
||||||
|
render(
|
||||||
|
<FlyerList
|
||||||
|
flyers={[mockFlyers[0]]}
|
||||||
|
onFlyerSelect={mockOnFlyerSelect}
|
||||||
|
selectedFlyerId={null}
|
||||||
|
profile={mockProfile}
|
||||||
|
/>,
|
||||||
|
);
|
||||||
|
expect(screen.getByText('• Expired')).toBeInTheDocument();
|
||||||
|
expect(screen.getByText('• Expired')).toHaveClass('text-red-500');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should show "Expires today" when valid_to is today', () => {
|
||||||
|
vi.setSystemTime(new Date('2023-10-11T12:00:00Z'));
|
||||||
|
render(
|
||||||
|
<FlyerList
|
||||||
|
flyers={[mockFlyers[0]]}
|
||||||
|
onFlyerSelect={mockOnFlyerSelect}
|
||||||
|
selectedFlyerId={null}
|
||||||
|
profile={mockProfile}
|
||||||
|
/>,
|
||||||
|
);
|
||||||
|
expect(screen.getByText('• Expires today')).toBeInTheDocument();
|
||||||
|
expect(screen.getByText('• Expires today')).toHaveClass('text-orange-500');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should show "Expires in X days" (orange) for <= 3 days', () => {
|
||||||
|
vi.setSystemTime(new Date('2023-10-09T12:00:00Z')); // 2 days left
|
||||||
|
render(
|
||||||
|
<FlyerList
|
||||||
|
flyers={[mockFlyers[0]]}
|
||||||
|
onFlyerSelect={mockOnFlyerSelect}
|
||||||
|
selectedFlyerId={null}
|
||||||
|
profile={mockProfile}
|
||||||
|
/>,
|
||||||
|
);
|
||||||
|
expect(screen.getByText('• Expires in 2 days')).toBeInTheDocument();
|
||||||
|
expect(screen.getByText('• Expires in 2 days')).toHaveClass('text-orange-500');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should show "Expires in X days" (green) for > 3 days', () => {
|
||||||
|
vi.setSystemTime(new Date('2023-10-05T12:00:00Z')); // 6 days left
|
||||||
|
render(
|
||||||
|
<FlyerList
|
||||||
|
flyers={[mockFlyers[0]]}
|
||||||
|
onFlyerSelect={mockOnFlyerSelect}
|
||||||
|
selectedFlyerId={null}
|
||||||
|
profile={mockProfile}
|
||||||
|
/>,
|
||||||
|
);
|
||||||
|
expect(screen.getByText('• Expires in 6 days')).toBeInTheDocument();
|
||||||
|
expect(screen.getByText('• Expires in 6 days')).toHaveClass('text-green-600');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
describe('Admin Functionality', () => {
|
describe('Admin Functionality', () => {
|
||||||
const adminProfile: UserProfile = createMockUserProfile({
|
const adminProfile: UserProfile = createMockUserProfile({
|
||||||
user: { user_id: 'admin-1', email: 'admin@example.com' },
|
user: { user_id: 'admin-1', email: 'admin@example.com' },
|
||||||
|
|||||||
@@ -6,14 +6,24 @@ import { FlyerUploader } from './FlyerUploader';
|
|||||||
import * as aiApiClientModule from '../../services/aiApiClient';
|
import * as aiApiClientModule from '../../services/aiApiClient';
|
||||||
import * as checksumModule from '../../utils/checksum';
|
import * as checksumModule from '../../utils/checksum';
|
||||||
import { useNavigate, MemoryRouter } from 'react-router-dom';
|
import { useNavigate, MemoryRouter } from 'react-router-dom';
|
||||||
|
import { QueryClient, QueryClientProvider, onlineManager } from '@tanstack/react-query';
|
||||||
|
|
||||||
// Mock dependencies
|
// Mock dependencies
|
||||||
vi.mock('../../services/aiApiClient');
|
vi.mock('../../services/aiApiClient', async (importOriginal) => {
|
||||||
|
const actual = await importOriginal<typeof import('../../services/aiApiClient')>();
|
||||||
|
return {
|
||||||
|
...actual,
|
||||||
|
uploadAndProcessFlyer: vi.fn(),
|
||||||
|
getJobStatus: vi.fn(),
|
||||||
|
};
|
||||||
|
});
|
||||||
vi.mock('../../services/logger.client', () => ({
|
vi.mock('../../services/logger.client', () => ({
|
||||||
// Keep the original logger.info/error but also spy on it for test assertions if needed
|
// Keep the original logger.info/error but also spy on it for test assertions if needed
|
||||||
logger: {
|
logger: {
|
||||||
info: vi.fn((...args) => console.log('[LOGGER.INFO]', ...args)),
|
info: vi.fn((...args) => console.log('[LOGGER.INFO]', ...args)),
|
||||||
error: vi.fn((...args) => console.error('[LOGGER.ERROR]', ...args)),
|
error: vi.fn((...args) => console.error('[LOGGER.ERROR]', ...args)),
|
||||||
|
warn: vi.fn((...args) => console.warn('[LOGGER.WARN]', ...args)),
|
||||||
|
debug: vi.fn((...args) => console.debug('[LOGGER.DEBUG]', ...args)),
|
||||||
},
|
},
|
||||||
}));
|
}));
|
||||||
vi.mock('../../utils/checksum', () => ({
|
vi.mock('../../utils/checksum', () => ({
|
||||||
@@ -39,10 +49,19 @@ const mockedChecksumModule = checksumModule as unknown as {
|
|||||||
|
|
||||||
const renderComponent = (onProcessingComplete = vi.fn()) => {
|
const renderComponent = (onProcessingComplete = vi.fn()) => {
|
||||||
console.log('--- [TEST LOG] ---: Rendering component inside MemoryRouter.');
|
console.log('--- [TEST LOG] ---: Rendering component inside MemoryRouter.');
|
||||||
|
const queryClient = new QueryClient({
|
||||||
|
defaultOptions: {
|
||||||
|
queries: {
|
||||||
|
retry: false,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
return render(
|
return render(
|
||||||
|
<QueryClientProvider client={queryClient}>
|
||||||
<MemoryRouter>
|
<MemoryRouter>
|
||||||
<FlyerUploader onProcessingComplete={onProcessingComplete} />
|
<FlyerUploader onProcessingComplete={onProcessingComplete} />
|
||||||
</MemoryRouter>,
|
</MemoryRouter>
|
||||||
|
</QueryClientProvider>,
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -50,10 +69,11 @@ describe('FlyerUploader', () => {
|
|||||||
const navigateSpy = vi.fn();
|
const navigateSpy = vi.fn();
|
||||||
|
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
|
// Disable react-query's online manager to prevent it from interfering with fake timers
|
||||||
|
onlineManager.setEventListener((setOnline) => {
|
||||||
|
return () => {};
|
||||||
|
});
|
||||||
console.log(`\n--- [TEST LOG] ---: Starting test: "${expect.getState().currentTestName}"`);
|
console.log(`\n--- [TEST LOG] ---: Starting test: "${expect.getState().currentTestName}"`);
|
||||||
// Use the 'modern' implementation of fake timers to handle promise microtasks correctly.
|
|
||||||
vi.useFakeTimers({ toFake: ['setTimeout'], shouldAdvanceTime: true });
|
|
||||||
console.log('--- [TEST LOG] ---: MODERN fake timers enabled.');
|
|
||||||
vi.resetAllMocks(); // Resets mock implementations AND call history.
|
vi.resetAllMocks(); // Resets mock implementations AND call history.
|
||||||
console.log('--- [TEST LOG] ---: Mocks reset.');
|
console.log('--- [TEST LOG] ---: Mocks reset.');
|
||||||
mockedChecksumModule.generateFileChecksum.mockResolvedValue('mock-checksum');
|
mockedChecksumModule.generateFileChecksum.mockResolvedValue('mock-checksum');
|
||||||
@@ -61,7 +81,6 @@ describe('FlyerUploader', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
afterEach(() => {
|
afterEach(() => {
|
||||||
vi.useRealTimers();
|
|
||||||
console.log(`--- [TEST LOG] ---: Finished test: "${expect.getState().currentTestName}"\n`);
|
console.log(`--- [TEST LOG] ---: Finished test: "${expect.getState().currentTestName}"\n`);
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -73,12 +92,11 @@ describe('FlyerUploader', () => {
|
|||||||
|
|
||||||
it('should handle file upload and start polling', async () => {
|
it('should handle file upload and start polling', async () => {
|
||||||
console.log('--- [TEST LOG] ---: 1. Setting up mocks for upload and polling.');
|
console.log('--- [TEST LOG] ---: 1. Setting up mocks for upload and polling.');
|
||||||
mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue(
|
mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue({ jobId: 'job-123' });
|
||||||
new Response(JSON.stringify({ jobId: 'job-123' }), { status: 200 }),
|
mockedAiApiClient.getJobStatus.mockResolvedValue({
|
||||||
);
|
state: 'active',
|
||||||
mockedAiApiClient.getJobStatus.mockResolvedValue(
|
progress: { message: 'Checking...' },
|
||||||
new Response(JSON.stringify({ state: 'active', progress: { message: 'Checking...' } })),
|
});
|
||||||
);
|
|
||||||
|
|
||||||
console.log('--- [TEST LOG] ---: 2. Rendering component and preparing file.');
|
console.log('--- [TEST LOG] ---: 2. Rendering component and preparing file.');
|
||||||
renderComponent();
|
renderComponent();
|
||||||
@@ -105,21 +123,18 @@ describe('FlyerUploader', () => {
|
|||||||
expect(mockedAiApiClient.getJobStatus).toHaveBeenCalledTimes(1);
|
expect(mockedAiApiClient.getJobStatus).toHaveBeenCalledTimes(1);
|
||||||
console.log('--- [TEST LOG] ---: 7. Mocks verified. Advancing timers now...');
|
console.log('--- [TEST LOG] ---: 7. Mocks verified. Advancing timers now...');
|
||||||
|
|
||||||
await act(async () => {
|
// With real timers, we now wait for the polling interval to elapse.
|
||||||
console.log('--- [TEST LOG] ---: 8a. vi.advanceTimersByTime(3000) starting...');
|
|
||||||
vi.advanceTimersByTime(3000);
|
|
||||||
console.log('--- [TEST LOG] ---: 8b. vi.advanceTimersByTime(3000) complete.');
|
|
||||||
});
|
|
||||||
console.log(
|
console.log(
|
||||||
`--- [TEST LOG] ---: 9. Act block finished. Now checking if getJobStatus was called again.`,
|
`--- [TEST LOG] ---: 9. Act block finished. Now checking if getJobStatus was called again.`,
|
||||||
);
|
);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
// The polling interval is 3s, so we wait for a bit longer.
|
||||||
await waitFor(() => {
|
await waitFor(() => {
|
||||||
const calls = mockedAiApiClient.getJobStatus.mock.calls.length;
|
const calls = mockedAiApiClient.getJobStatus.mock.calls.length;
|
||||||
console.log(`--- [TEST LOG] ---: 10. waitFor check: getJobStatus calls = ${calls}`);
|
console.log(`--- [TEST LOG] ---: 10. waitFor check: getJobStatus calls = ${calls}`);
|
||||||
expect(mockedAiApiClient.getJobStatus).toHaveBeenCalledTimes(2);
|
expect(mockedAiApiClient.getJobStatus).toHaveBeenCalledTimes(2);
|
||||||
});
|
}, { timeout: 4000 });
|
||||||
console.log('--- [TEST LOG] ---: 11. SUCCESS: Second poll confirmed.');
|
console.log('--- [TEST LOG] ---: 11. SUCCESS: Second poll confirmed.');
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error('--- [TEST LOG] ---: 11. ERROR: waitFor for second poll timed out.');
|
console.error('--- [TEST LOG] ---: 11. ERROR: waitFor for second poll timed out.');
|
||||||
@@ -131,12 +146,11 @@ describe('FlyerUploader', () => {
|
|||||||
|
|
||||||
it('should handle file upload via drag and drop', async () => {
|
it('should handle file upload via drag and drop', async () => {
|
||||||
console.log('--- [TEST LOG] ---: 1. Setting up mocks for drag and drop.');
|
console.log('--- [TEST LOG] ---: 1. Setting up mocks for drag and drop.');
|
||||||
mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue(
|
mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue({ jobId: 'job-dnd' });
|
||||||
new Response(JSON.stringify({ jobId: 'job-dnd' }), { status: 200 }),
|
mockedAiApiClient.getJobStatus.mockResolvedValue({
|
||||||
);
|
state: 'active',
|
||||||
mockedAiApiClient.getJobStatus.mockResolvedValue(
|
progress: { message: 'Dropped...' },
|
||||||
new Response(JSON.stringify({ state: 'active', progress: { message: 'Dropped...' } })),
|
});
|
||||||
);
|
|
||||||
|
|
||||||
console.log('--- [TEST LOG] ---: 2. Rendering component and preparing file for drop.');
|
console.log('--- [TEST LOG] ---: 2. Rendering component and preparing file for drop.');
|
||||||
renderComponent();
|
renderComponent();
|
||||||
@@ -159,16 +173,10 @@ describe('FlyerUploader', () => {
|
|||||||
it('should poll for status, complete successfully, and redirect', async () => {
|
it('should poll for status, complete successfully, and redirect', async () => {
|
||||||
const onProcessingComplete = vi.fn();
|
const onProcessingComplete = vi.fn();
|
||||||
console.log('--- [TEST LOG] ---: 1. Setting up mock sequence for polling.');
|
console.log('--- [TEST LOG] ---: 1. Setting up mock sequence for polling.');
|
||||||
mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue(
|
mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue({ jobId: 'job-123' });
|
||||||
new Response(JSON.stringify({ jobId: 'job-123' }), { status: 200 }),
|
|
||||||
);
|
|
||||||
mockedAiApiClient.getJobStatus
|
mockedAiApiClient.getJobStatus
|
||||||
.mockResolvedValueOnce(
|
.mockResolvedValueOnce({ state: 'active', progress: { message: 'Analyzing...' } })
|
||||||
new Response(JSON.stringify({ state: 'active', progress: { message: 'Analyzing...' } })),
|
.mockResolvedValueOnce({ state: 'completed', returnValue: { flyerId: 42 } });
|
||||||
)
|
|
||||||
.mockResolvedValueOnce(
|
|
||||||
new Response(JSON.stringify({ state: 'completed', returnValue: { flyerId: 42 } })),
|
|
||||||
);
|
|
||||||
|
|
||||||
console.log('--- [TEST LOG] ---: 2. Rendering component and uploading file.');
|
console.log('--- [TEST LOG] ---: 2. Rendering component and uploading file.');
|
||||||
renderComponent(onProcessingComplete);
|
renderComponent(onProcessingComplete);
|
||||||
@@ -189,24 +197,21 @@ describe('FlyerUploader', () => {
|
|||||||
expect(mockedAiApiClient.getJobStatus).toHaveBeenCalledTimes(1);
|
expect(mockedAiApiClient.getJobStatus).toHaveBeenCalledTimes(1);
|
||||||
console.log('--- [TEST LOG] ---: 5. First poll confirmed. Now AWAITING timer advancement.');
|
console.log('--- [TEST LOG] ---: 5. First poll confirmed. Now AWAITING timer advancement.');
|
||||||
|
|
||||||
await act(async () => {
|
|
||||||
console.log(`--- [TEST LOG] ---: 6. Advancing timers by 4000ms for the second poll...`);
|
|
||||||
vi.advanceTimersByTime(4000);
|
|
||||||
});
|
|
||||||
console.log(`--- [TEST LOG] ---: 7. Timers advanced. Now AWAITING completion message.`);
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
console.log(
|
console.log(
|
||||||
'--- [TEST LOG] ---: 8a. waitFor check: Waiting for completion text and job status count.',
|
'--- [TEST LOG] ---: 8a. waitFor check: Waiting for completion text and job status count.',
|
||||||
);
|
);
|
||||||
|
// Wait for the second poll to occur and the UI to update.
|
||||||
await waitFor(() => {
|
await waitFor(() => {
|
||||||
console.log(
|
console.log(
|
||||||
`--- [TEST LOG] ---: 8b. waitFor interval: calls=${mockedAiApiClient.getJobStatus.mock.calls.length}`,
|
`--- [TEST LOG] ---: 8b. waitFor interval: calls=${
|
||||||
|
mockedAiApiClient.getJobStatus.mock.calls.length
|
||||||
|
}`,
|
||||||
);
|
);
|
||||||
expect(
|
expect(
|
||||||
screen.getByText('Processing complete! Redirecting to flyer 42...'),
|
screen.getByText('Processing complete! Redirecting to flyer 42...'),
|
||||||
).toBeInTheDocument();
|
).toBeInTheDocument();
|
||||||
});
|
}, { timeout: 4000 });
|
||||||
console.log('--- [TEST LOG] ---: 9. SUCCESS: Completion message found.');
|
console.log('--- [TEST LOG] ---: 9. SUCCESS: Completion message found.');
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error('--- [TEST LOG] ---: 9. ERROR: waitFor for completion message timed out.');
|
console.error('--- [TEST LOG] ---: 9. ERROR: waitFor for completion message timed out.');
|
||||||
@@ -216,12 +221,9 @@ describe('FlyerUploader', () => {
|
|||||||
}
|
}
|
||||||
expect(mockedAiApiClient.getJobStatus).toHaveBeenCalledTimes(2);
|
expect(mockedAiApiClient.getJobStatus).toHaveBeenCalledTimes(2);
|
||||||
|
|
||||||
await act(async () => {
|
// Wait for the redirect timer (1.5s in component) to fire.
|
||||||
console.log(`--- [TEST LOG] ---: 10. Advancing timers by 2000ms for redirect...`);
|
await act(() => new Promise((r) => setTimeout(r, 2000)));
|
||||||
vi.advanceTimersByTime(2000);
|
|
||||||
});
|
|
||||||
console.log(`--- [TEST LOG] ---: 11. Timers advanced. Now asserting navigation.`);
|
console.log(`--- [TEST LOG] ---: 11. Timers advanced. Now asserting navigation.`);
|
||||||
|
|
||||||
expect(onProcessingComplete).toHaveBeenCalled();
|
expect(onProcessingComplete).toHaveBeenCalled();
|
||||||
expect(navigateSpy).toHaveBeenCalledWith('/flyers/42');
|
expect(navigateSpy).toHaveBeenCalledWith('/flyers/42');
|
||||||
console.log('--- [TEST LOG] ---: 12. Callback and navigation confirmed.');
|
console.log('--- [TEST LOG] ---: 12. Callback and navigation confirmed.');
|
||||||
@@ -229,12 +231,11 @@ describe('FlyerUploader', () => {
|
|||||||
|
|
||||||
it('should handle a failed job', async () => {
|
it('should handle a failed job', async () => {
|
||||||
console.log('--- [TEST LOG] ---: 1. Setting up mocks for a failed job.');
|
console.log('--- [TEST LOG] ---: 1. Setting up mocks for a failed job.');
|
||||||
mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue(
|
mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue({ jobId: 'job-fail' });
|
||||||
new Response(JSON.stringify({ jobId: 'job-fail' }), { status: 200 }),
|
// The getJobStatus function throws a specific error when the job fails,
|
||||||
);
|
// which is then caught by react-query and placed in the `error` state.
|
||||||
mockedAiApiClient.getJobStatus.mockResolvedValue(
|
const jobFailedError = new aiApiClientModule.JobFailedError('AI model exploded', 'UNKNOWN_ERROR');
|
||||||
new Response(JSON.stringify({ state: 'failed', failedReason: 'AI model exploded' })),
|
mockedAiApiClient.getJobStatus.mockRejectedValue(jobFailedError);
|
||||||
);
|
|
||||||
|
|
||||||
console.log('--- [TEST LOG] ---: 2. Rendering and uploading.');
|
console.log('--- [TEST LOG] ---: 2. Rendering and uploading.');
|
||||||
renderComponent();
|
renderComponent();
|
||||||
@@ -247,7 +248,8 @@ describe('FlyerUploader', () => {
|
|||||||
|
|
||||||
try {
|
try {
|
||||||
console.log('--- [TEST LOG] ---: 4. AWAITING failure message...');
|
console.log('--- [TEST LOG] ---: 4. AWAITING failure message...');
|
||||||
expect(await screen.findByText(/Processing failed: AI model exploded/i)).toBeInTheDocument();
|
// The UI should now display the error from the `pollError` state, which includes the "Polling failed" prefix.
|
||||||
|
expect(await screen.findByText(/Polling failed: AI model exploded/i)).toBeInTheDocument();
|
||||||
console.log('--- [TEST LOG] ---: 5. SUCCESS: Failure message found.');
|
console.log('--- [TEST LOG] ---: 5. SUCCESS: Failure message found.');
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error('--- [TEST LOG] ---: 5. ERROR: findByText for failure message timed out.');
|
console.error('--- [TEST LOG] ---: 5. ERROR: findByText for failure message timed out.');
|
||||||
@@ -260,11 +262,80 @@ describe('FlyerUploader', () => {
|
|||||||
console.log('--- [TEST LOG] ---: 6. "Upload Another" button confirmed.');
|
console.log('--- [TEST LOG] ---: 6. "Upload Another" button confirmed.');
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('should clear the polling timeout when a job fails', async () => {
|
||||||
|
console.log('--- [TEST LOG] ---: 1. Setting up mocks for failed job timeout clearance.');
|
||||||
|
mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue({ jobId: 'job-fail-timeout' });
|
||||||
|
|
||||||
|
// We need at least one 'active' response to establish a timeout loop so we have something to clear
|
||||||
|
// The second call should be a rejection, as this is how getJobStatus signals a failure.
|
||||||
|
mockedAiApiClient.getJobStatus
|
||||||
|
.mockResolvedValueOnce({
|
||||||
|
state: 'active',
|
||||||
|
progress: { message: 'Working...' },
|
||||||
|
} as aiApiClientModule.JobStatus)
|
||||||
|
.mockRejectedValueOnce(new aiApiClientModule.JobFailedError('Fatal Error', 'UNKNOWN_ERROR'));
|
||||||
|
|
||||||
|
renderComponent();
|
||||||
|
const file = new File(['content'], 'flyer.pdf', { type: 'application/pdf' });
|
||||||
|
const input = screen.getByLabelText(/click to select a file/i);
|
||||||
|
|
||||||
|
fireEvent.change(input, { target: { files: [file] } });
|
||||||
|
|
||||||
|
// Wait for the first poll to complete and UI to update to "Working..."
|
||||||
|
await screen.findByText('Working...');
|
||||||
|
|
||||||
|
// Wait for the failure UI
|
||||||
|
await waitFor(() => expect(screen.getByText(/Polling failed: Fatal Error/i)).toBeInTheDocument(), { timeout: 4000 });
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should stop polling for job status when the component unmounts', async () => {
|
||||||
|
console.log('--- [TEST LOG] ---: 1. Setting up mocks for unmount polling stop.');
|
||||||
|
mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue({ jobId: 'job-unmount' });
|
||||||
|
// Mock getJobStatus to always return 'active' to keep polling
|
||||||
|
mockedAiApiClient.getJobStatus.mockResolvedValue({
|
||||||
|
state: 'active',
|
||||||
|
progress: { message: 'Polling...' },
|
||||||
|
});
|
||||||
|
|
||||||
|
const { unmount } = renderComponent();
|
||||||
|
const file = new File(['content'], 'flyer.pdf', { type: 'application/pdf' });
|
||||||
|
const input = screen.getByLabelText(/click to select a file/i);
|
||||||
|
|
||||||
|
fireEvent.change(input, { target: { files: [file] } });
|
||||||
|
|
||||||
|
// Wait for the first poll to complete and UI to update
|
||||||
|
await screen.findByText('Polling...');
|
||||||
|
|
||||||
|
// Wait for exactly one call to be sure polling has started.
|
||||||
|
await waitFor(() => {
|
||||||
|
expect(mockedAiApiClient.getJobStatus).toHaveBeenCalledTimes(1);
|
||||||
|
});
|
||||||
|
console.log('--- [TEST LOG] ---: 2. First poll confirmed.');
|
||||||
|
|
||||||
|
// Record the number of calls before unmounting.
|
||||||
|
const callsBeforeUnmount = mockedAiApiClient.getJobStatus.mock.calls.length;
|
||||||
|
|
||||||
|
// Now unmount the component, which should stop the polling.
|
||||||
|
console.log('--- [TEST LOG] ---: 3. Unmounting component.');
|
||||||
|
unmount();
|
||||||
|
|
||||||
|
// Wait for a duration longer than the polling interval (3s) to see if more calls are made.
|
||||||
|
console.log('--- [TEST LOG] ---: 4. Waiting for 4 seconds to check for further polling.');
|
||||||
|
await act(() => new Promise((resolve) => setTimeout(resolve, 4000)));
|
||||||
|
|
||||||
|
// Verify that getJobStatus was not called again after unmounting.
|
||||||
|
console.log('--- [TEST LOG] ---: 5. Asserting no new polls occurred.');
|
||||||
|
expect(mockedAiApiClient.getJobStatus).toHaveBeenCalledTimes(callsBeforeUnmount);
|
||||||
|
});
|
||||||
|
|
||||||
it('should handle a duplicate flyer error (409)', async () => {
|
it('should handle a duplicate flyer error (409)', async () => {
|
||||||
console.log('--- [TEST LOG] ---: 1. Setting up mock for 409 duplicate error.');
|
console.log('--- [TEST LOG] ---: 1. Setting up mock for 409 duplicate error.');
|
||||||
mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue(
|
// The API client throws a structured error, which useFlyerUploader now parses
|
||||||
new Response(JSON.stringify({ flyerId: 99, message: 'Duplicate' }), { status: 409 }),
|
// to set both the errorMessage and the duplicateFlyerId.
|
||||||
);
|
mockedAiApiClient.uploadAndProcessFlyer.mockRejectedValue({
|
||||||
|
status: 409,
|
||||||
|
body: { flyerId: 99, message: 'This flyer has already been processed.' },
|
||||||
|
});
|
||||||
|
|
||||||
console.log('--- [TEST LOG] ---: 2. Rendering and uploading.');
|
console.log('--- [TEST LOG] ---: 2. Rendering and uploading.');
|
||||||
renderComponent();
|
renderComponent();
|
||||||
@@ -277,9 +348,10 @@ describe('FlyerUploader', () => {
|
|||||||
|
|
||||||
try {
|
try {
|
||||||
console.log('--- [TEST LOG] ---: 4. AWAITING duplicate flyer message...');
|
console.log('--- [TEST LOG] ---: 4. AWAITING duplicate flyer message...');
|
||||||
expect(
|
// With the fix, the duplicate error message and the link are combined into a single paragraph.
|
||||||
await screen.findByText('This flyer has already been processed. You can view it here:'),
|
// We now look for this combined message.
|
||||||
).toBeInTheDocument();
|
const errorMessage = await screen.findByText(/This flyer has already been processed. You can view it here:/i);
|
||||||
|
expect(errorMessage).toBeInTheDocument();
|
||||||
console.log('--- [TEST LOG] ---: 5. SUCCESS: Duplicate message found.');
|
console.log('--- [TEST LOG] ---: 5. SUCCESS: Duplicate message found.');
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error('--- [TEST LOG] ---: 5. ERROR: findByText for duplicate message timed out.');
|
console.error('--- [TEST LOG] ---: 5. ERROR: findByText for duplicate message timed out.');
|
||||||
@@ -295,12 +367,11 @@ describe('FlyerUploader', () => {
|
|||||||
|
|
||||||
it('should allow the user to stop watching progress', async () => {
|
it('should allow the user to stop watching progress', async () => {
|
||||||
console.log('--- [TEST LOG] ---: 1. Setting up mocks for infinite polling.');
|
console.log('--- [TEST LOG] ---: 1. Setting up mocks for infinite polling.');
|
||||||
mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue(
|
mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue({ jobId: 'job-stop' });
|
||||||
new Response(JSON.stringify({ jobId: 'job-stop' }), { status: 200 }),
|
mockedAiApiClient.getJobStatus.mockResolvedValue({
|
||||||
);
|
state: 'active',
|
||||||
mockedAiApiClient.getJobStatus.mockResolvedValue(
|
progress: { message: 'Analyzing...' },
|
||||||
new Response(JSON.stringify({ state: 'active', progress: { message: 'Analyzing...' } })),
|
} as any);
|
||||||
);
|
|
||||||
|
|
||||||
console.log('--- [TEST LOG] ---: 2. Rendering and uploading.');
|
console.log('--- [TEST LOG] ---: 2. Rendering and uploading.');
|
||||||
renderComponent();
|
renderComponent();
|
||||||
@@ -362,9 +433,11 @@ describe('FlyerUploader', () => {
|
|||||||
|
|
||||||
it('should handle a generic network error during upload', async () => {
|
it('should handle a generic network error during upload', async () => {
|
||||||
console.log('--- [TEST LOG] ---: 1. Setting up mock for generic upload error.');
|
console.log('--- [TEST LOG] ---: 1. Setting up mock for generic upload error.');
|
||||||
mockedAiApiClient.uploadAndProcessFlyer.mockRejectedValue(
|
// Simulate a structured error from the API client
|
||||||
new Error('Network Error During Upload'),
|
mockedAiApiClient.uploadAndProcessFlyer.mockRejectedValue({
|
||||||
);
|
status: 500,
|
||||||
|
body: { message: 'Network Error During Upload' },
|
||||||
|
});
|
||||||
renderComponent();
|
renderComponent();
|
||||||
const file = new File(['content'], 'flyer.pdf', { type: 'application/pdf' });
|
const file = new File(['content'], 'flyer.pdf', { type: 'application/pdf' });
|
||||||
const input = screen.getByLabelText(/click to select a file/i);
|
const input = screen.getByLabelText(/click to select a file/i);
|
||||||
@@ -379,9 +452,7 @@ describe('FlyerUploader', () => {
|
|||||||
|
|
||||||
it('should handle a generic network error during polling', async () => {
|
it('should handle a generic network error during polling', async () => {
|
||||||
console.log('--- [TEST LOG] ---: 1. Setting up mock for polling error.');
|
console.log('--- [TEST LOG] ---: 1. Setting up mock for polling error.');
|
||||||
mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue(
|
mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue({ jobId: 'job-poll-fail' });
|
||||||
new Response(JSON.stringify({ jobId: 'job-poll-fail' }), { status: 200 }),
|
|
||||||
);
|
|
||||||
mockedAiApiClient.getJobStatus.mockRejectedValue(new Error('Polling Network Error'));
|
mockedAiApiClient.getJobStatus.mockRejectedValue(new Error('Polling Network Error'));
|
||||||
|
|
||||||
renderComponent();
|
renderComponent();
|
||||||
@@ -392,17 +463,15 @@ describe('FlyerUploader', () => {
|
|||||||
fireEvent.change(input, { target: { files: [file] } });
|
fireEvent.change(input, { target: { files: [file] } });
|
||||||
|
|
||||||
console.log('--- [TEST LOG] ---: 3. Awaiting error message.');
|
console.log('--- [TEST LOG] ---: 3. Awaiting error message.');
|
||||||
expect(await screen.findByText(/Polling Network Error/i)).toBeInTheDocument();
|
expect(await screen.findByText(/Polling failed: Polling Network Error/i)).toBeInTheDocument();
|
||||||
console.log('--- [TEST LOG] ---: 4. Assertions passed.');
|
console.log('--- [TEST LOG] ---: 4. Assertions passed.');
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should handle a completed job with a missing flyerId', async () => {
|
it('should handle a completed job with a missing flyerId', async () => {
|
||||||
console.log('--- [TEST LOG] ---: 1. Setting up mock for malformed completion payload.');
|
console.log('--- [TEST LOG] ---: 1. Setting up mock for malformed completion payload.');
|
||||||
mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue(
|
mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue({ jobId: 'job-no-flyerid' });
|
||||||
new Response(JSON.stringify({ jobId: 'job-no-flyerid' }), { status: 200 }),
|
|
||||||
);
|
|
||||||
mockedAiApiClient.getJobStatus.mockResolvedValue(
|
mockedAiApiClient.getJobStatus.mockResolvedValue(
|
||||||
new Response(JSON.stringify({ state: 'completed', returnValue: {} })), // No flyerId
|
{ state: 'completed', returnValue: {} }, // No flyerId
|
||||||
);
|
);
|
||||||
|
|
||||||
renderComponent();
|
renderComponent();
|
||||||
@@ -419,6 +488,29 @@ describe('FlyerUploader', () => {
|
|||||||
console.log('--- [TEST LOG] ---: 4. Assertions passed.');
|
console.log('--- [TEST LOG] ---: 4. Assertions passed.');
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('should handle a non-JSON response during polling', async () => {
|
||||||
|
console.log('--- [TEST LOG] ---: 1. Setting up mock for non-JSON response.');
|
||||||
|
// The actual function would throw, so we mock the rejection.
|
||||||
|
// The new getJobStatus would throw an error like "Failed to parse JSON..."
|
||||||
|
mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue({ jobId: 'job-bad-json' });
|
||||||
|
mockedAiApiClient.getJobStatus.mockRejectedValue(
|
||||||
|
new Error('Failed to parse JSON response from server. Body: <html>502 Bad Gateway</html>'),
|
||||||
|
);
|
||||||
|
|
||||||
|
renderComponent();
|
||||||
|
const file = new File(['content'], 'flyer.pdf', { type: 'application/pdf' });
|
||||||
|
const input = screen.getByLabelText(/click to select a file/i);
|
||||||
|
|
||||||
|
console.log('--- [TEST LOG] ---: 2. Firing file change event.');
|
||||||
|
fireEvent.change(input, { target: { files: [file] } });
|
||||||
|
|
||||||
|
console.log('--- [TEST LOG] ---: 3. Awaiting error message.');
|
||||||
|
expect(
|
||||||
|
await screen.findByText(/Polling failed: Failed to parse JSON response from server/i),
|
||||||
|
).toBeInTheDocument();
|
||||||
|
console.log('--- [TEST LOG] ---: 4. Assertions passed.');
|
||||||
|
});
|
||||||
|
|
||||||
it('should do nothing if the file input is cancelled', () => {
|
it('should do nothing if the file input is cancelled', () => {
|
||||||
renderComponent();
|
renderComponent();
|
||||||
const input = screen.getByLabelText(/click to select a file/i);
|
const input = screen.getByLabelText(/click to select a file/i);
|
||||||
|
|||||||
@@ -1,213 +1,68 @@
|
|||||||
// src/features/flyer/FlyerUploader.tsx
|
// src/features/flyer/FlyerUploader.tsx
|
||||||
import React, { useState, useEffect, useRef, useCallback } from 'react';
|
import React, { useEffect, useCallback } from 'react';
|
||||||
import { useNavigate, Link } from 'react-router-dom';
|
import { useNavigate, Link } from 'react-router-dom';
|
||||||
import { uploadAndProcessFlyer, getJobStatus } from '../../services/aiApiClient';
|
|
||||||
import { generateFileChecksum } from '../../utils/checksum';
|
|
||||||
import { logger } from '../../services/logger.client';
|
import { logger } from '../../services/logger.client';
|
||||||
import { ProcessingStatus } from './ProcessingStatus';
|
import { ProcessingStatus } from './ProcessingStatus';
|
||||||
import type { ProcessingStage } from '../../types';
|
|
||||||
import { useDragAndDrop } from '../../hooks/useDragAndDrop';
|
import { useDragAndDrop } from '../../hooks/useDragAndDrop';
|
||||||
|
import { useFlyerUploader } from '../../hooks/useFlyerUploader';
|
||||||
type ProcessingState = 'idle' | 'uploading' | 'polling' | 'completed' | 'error';
|
|
||||||
|
|
||||||
interface FlyerUploaderProps {
|
interface FlyerUploaderProps {
|
||||||
onProcessingComplete: () => void;
|
onProcessingComplete: () => void;
|
||||||
}
|
}
|
||||||
|
|
||||||
export const FlyerUploader: React.FC<FlyerUploaderProps> = ({ onProcessingComplete }) => {
|
export const FlyerUploader: React.FC<FlyerUploaderProps> = ({ onProcessingComplete }) => {
|
||||||
const [processingState, setProcessingState] = useState<ProcessingState>('idle');
|
|
||||||
const [statusMessage, setStatusMessage] = useState<string | null>(null);
|
|
||||||
const [jobId, setJobId] = useState<string | null>(null);
|
|
||||||
const [errorMessage, setErrorMessage] = useState<string | null>(null);
|
|
||||||
const [duplicateFlyerId, setDuplicateFlyerId] = useState<number | null>(null);
|
|
||||||
const navigate = useNavigate();
|
const navigate = useNavigate();
|
||||||
|
|
||||||
const pollingTimeoutRef = useRef<number | null>(null);
|
const {
|
||||||
|
processingState,
|
||||||
const [processingStages, setProcessingStages] = useState<ProcessingStage[]>([]);
|
statusMessage,
|
||||||
const [estimatedTime, setEstimatedTime] = useState(0);
|
errorMessage,
|
||||||
const [currentFile, setCurrentFile] = useState<string | null>(null);
|
duplicateFlyerId,
|
||||||
|
processingStages,
|
||||||
// DEBUG: Log component mount and unmount
|
estimatedTime,
|
||||||
useEffect(() => {
|
currentFile,
|
||||||
console.debug('[DEBUG] FlyerUploader: Component did mount.');
|
flyerId,
|
||||||
return () => {
|
upload,
|
||||||
console.debug('[DEBUG] FlyerUploader: Component will unmount.');
|
resetUploaderState,
|
||||||
};
|
} = useFlyerUploader();
|
||||||
}, []);
|
|
||||||
|
|
||||||
// DEBUG: Log state changes
|
|
||||||
useEffect(() => {
|
|
||||||
console.debug(`[DEBUG] FlyerUploader: processingState changed to -> ${processingState}`);
|
|
||||||
}, [processingState]);
|
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (statusMessage) logger.info(`FlyerUploader Status: ${statusMessage}`);
|
if (statusMessage) logger.info(`FlyerUploader Status: ${statusMessage}`);
|
||||||
}, [statusMessage]);
|
}, [statusMessage]);
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
console.debug(`[DEBUG] Polling Effect Triggered: state=${processingState}, jobId=${jobId}`);
|
if (errorMessage) {
|
||||||
if (processingState !== 'polling' || !jobId) {
|
logger.error(`[FlyerUploader] Error encountered: ${errorMessage}`, { duplicateFlyerId });
|
||||||
if (pollingTimeoutRef.current) {
|
|
||||||
console.debug(
|
|
||||||
`[DEBUG] Polling Effect: Clearing timeout ID ${pollingTimeoutRef.current} because state is not 'polling' or no jobId exists.`,
|
|
||||||
);
|
|
||||||
clearTimeout(pollingTimeoutRef.current);
|
|
||||||
}
|
|
||||||
return;
|
|
||||||
}
|
}
|
||||||
|
}, [errorMessage, duplicateFlyerId]);
|
||||||
|
|
||||||
const pollStatus = async () => {
|
// Handle completion and navigation
|
||||||
console.debug(`[DEBUG] pollStatus(): Polling for jobId: ${jobId}`);
|
useEffect(() => {
|
||||||
try {
|
if (processingState === 'completed' && flyerId) {
|
||||||
const statusResponse = await getJobStatus(jobId);
|
|
||||||
console.debug(`[DEBUG] pollStatus(): API response status: ${statusResponse.status}`);
|
|
||||||
if (!statusResponse.ok) {
|
|
||||||
throw new Error(`Failed to get job status (HTTP ${statusResponse.status})`);
|
|
||||||
}
|
|
||||||
|
|
||||||
const job = await statusResponse.json();
|
|
||||||
console.debug('[DEBUG] pollStatus(): Job status received:', job);
|
|
||||||
|
|
||||||
if (job.progress) {
|
|
||||||
setProcessingStages(job.progress.stages || []);
|
|
||||||
setEstimatedTime(job.progress.estimatedTimeRemaining || 0);
|
|
||||||
setStatusMessage(job.progress.message || null);
|
|
||||||
}
|
|
||||||
|
|
||||||
switch (job.state) {
|
|
||||||
case 'completed':
|
|
||||||
console.debug('[DEBUG] pollStatus(): Job state is "completed".');
|
|
||||||
const flyerId = job.returnValue?.flyerId;
|
|
||||||
if (flyerId) {
|
|
||||||
setStatusMessage(`Processing complete! Redirecting to flyer ${flyerId}...`);
|
|
||||||
setProcessingState('completed');
|
|
||||||
onProcessingComplete();
|
onProcessingComplete();
|
||||||
console.debug('[DEBUG] pollStatus(): Setting 1500ms timeout for redirect.');
|
// Small delay to show the "Complete" state before redirecting
|
||||||
setTimeout(() => {
|
const timer = setTimeout(() => {
|
||||||
console.debug(`[DEBUG] pollStatus(): Redirecting to /flyers/${flyerId}`);
|
|
||||||
navigate(`/flyers/${flyerId}`);
|
navigate(`/flyers/${flyerId}`);
|
||||||
}, 1500);
|
}, 1500);
|
||||||
} else {
|
return () => clearTimeout(timer);
|
||||||
throw new Error('Job completed but did not return a flyer ID.');
|
|
||||||
}
|
}
|
||||||
break;
|
}, [processingState, flyerId, onProcessingComplete, navigate]);
|
||||||
|
|
||||||
case 'failed':
|
|
||||||
console.debug(
|
|
||||||
`[DEBUG] pollStatus(): Job state is "failed". Reason: ${job.failedReason}`,
|
|
||||||
);
|
|
||||||
setErrorMessage(`Processing failed: ${job.failedReason || 'Unknown error'}`);
|
|
||||||
setProcessingState('error');
|
|
||||||
break;
|
|
||||||
|
|
||||||
case 'active':
|
|
||||||
case 'waiting':
|
|
||||||
default:
|
|
||||||
console.debug(
|
|
||||||
`[DEBUG] pollStatus(): Job state is "${job.state}". Setting timeout for next poll (3000ms).`,
|
|
||||||
);
|
|
||||||
pollingTimeoutRef.current = window.setTimeout(pollStatus, 3000);
|
|
||||||
console.debug(`[DEBUG] pollStatus(): Timeout ID ${pollingTimeoutRef.current} set.`);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('Error during polling:', { error });
|
|
||||||
setErrorMessage(
|
|
||||||
error instanceof Error ? error.message : 'An unexpected error occurred during polling.',
|
|
||||||
);
|
|
||||||
setProcessingState('error');
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
pollStatus();
|
|
||||||
|
|
||||||
return () => {
|
|
||||||
if (pollingTimeoutRef.current) {
|
|
||||||
console.debug(
|
|
||||||
`[DEBUG] Polling Effect Cleanup: Clearing timeout ID ${pollingTimeoutRef.current}`,
|
|
||||||
);
|
|
||||||
clearTimeout(pollingTimeoutRef.current);
|
|
||||||
pollingTimeoutRef.current = null;
|
|
||||||
} else {
|
|
||||||
console.debug('[DEBUG] Polling Effect Cleanup: No active timeout to clear.');
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}, [processingState, jobId, onProcessingComplete, navigate]);
|
|
||||||
|
|
||||||
const processFile = useCallback(async (file: File) => {
|
|
||||||
console.debug('[DEBUG] processFile(): Starting file processing for', file.name);
|
|
||||||
setProcessingState('uploading');
|
|
||||||
setErrorMessage(null);
|
|
||||||
setDuplicateFlyerId(null);
|
|
||||||
setCurrentFile(file.name);
|
|
||||||
|
|
||||||
try {
|
|
||||||
console.debug('[DEBUG] processFile(): Generating file checksum.');
|
|
||||||
const checksum = await generateFileChecksum(file);
|
|
||||||
setStatusMessage('Uploading file...');
|
|
||||||
console.debug(
|
|
||||||
`[DEBUG] processFile(): Checksum generated: ${checksum}. Calling uploadAndProcessFlyer.`,
|
|
||||||
);
|
|
||||||
|
|
||||||
const startResponse = await uploadAndProcessFlyer(file, checksum);
|
|
||||||
console.debug(`[DEBUG] processFile(): Upload response status: ${startResponse.status}`);
|
|
||||||
|
|
||||||
if (!startResponse.ok) {
|
|
||||||
const errorData = await startResponse.json();
|
|
||||||
console.debug('[DEBUG] processFile(): Upload failed. Error data:', errorData);
|
|
||||||
if (startResponse.status === 409 && errorData.flyerId) {
|
|
||||||
setErrorMessage(`This flyer has already been processed. You can view it here:`);
|
|
||||||
setDuplicateFlyerId(errorData.flyerId);
|
|
||||||
} else {
|
|
||||||
setErrorMessage(errorData.message || `Upload failed with status ${startResponse.status}`);
|
|
||||||
}
|
|
||||||
setProcessingState('error');
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const { jobId: newJobId } = await startResponse.json();
|
|
||||||
console.debug(`[DEBUG] processFile(): Upload successful. Received jobId: ${newJobId}`);
|
|
||||||
setJobId(newJobId);
|
|
||||||
setProcessingState('polling');
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('An unexpected error occurred during file upload:', { error });
|
|
||||||
setErrorMessage(error instanceof Error ? error.message : 'An unexpected error occurred.');
|
|
||||||
setProcessingState('error');
|
|
||||||
}
|
|
||||||
}, []);
|
|
||||||
|
|
||||||
const handleFileChange = (event: React.ChangeEvent<HTMLInputElement>) => {
|
const handleFileChange = (event: React.ChangeEvent<HTMLInputElement>) => {
|
||||||
console.debug('[DEBUG] handleFileChange(): File input changed.');
|
|
||||||
const file = event.target.files?.[0];
|
const file = event.target.files?.[0];
|
||||||
if (file) {
|
if (file) {
|
||||||
processFile(file);
|
upload(file);
|
||||||
}
|
}
|
||||||
event.target.value = '';
|
event.target.value = '';
|
||||||
};
|
};
|
||||||
|
|
||||||
const resetUploaderState = useCallback(() => {
|
|
||||||
console.debug(
|
|
||||||
`[DEBUG] resetUploaderState(): User triggered reset. Previous jobId was: ${jobId}`,
|
|
||||||
);
|
|
||||||
setProcessingState('idle');
|
|
||||||
setJobId(null);
|
|
||||||
setErrorMessage(null);
|
|
||||||
setDuplicateFlyerId(null);
|
|
||||||
setCurrentFile(null);
|
|
||||||
setProcessingStages([]);
|
|
||||||
setEstimatedTime(0);
|
|
||||||
logger.info('Uploader state has been reset. Previous job ID was:', jobId);
|
|
||||||
}, [jobId]);
|
|
||||||
|
|
||||||
const onFilesDropped = useCallback(
|
const onFilesDropped = useCallback(
|
||||||
(files: FileList) => {
|
(files: FileList) => {
|
||||||
console.debug('[DEBUG] onFilesDropped(): Files were dropped.');
|
|
||||||
if (files && files.length > 0) {
|
if (files && files.length > 0) {
|
||||||
processFile(files[0]);
|
upload(files[0]);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
[processFile],
|
[upload],
|
||||||
);
|
);
|
||||||
|
|
||||||
const isProcessing = processingState === 'uploading' || processingState === 'polling';
|
const isProcessing = processingState === 'uploading' || processingState === 'polling';
|
||||||
@@ -221,11 +76,6 @@ export const FlyerUploader: React.FC<FlyerUploaderProps> = ({ onProcessingComple
|
|||||||
? 'bg-brand-light/50 dark:bg-brand-dark/20'
|
? 'bg-brand-light/50 dark:bg-brand-dark/20'
|
||||||
: 'bg-gray-50/50 dark:bg-gray-800/20';
|
: 'bg-gray-50/50 dark:bg-gray-800/20';
|
||||||
|
|
||||||
// If processing, show the detailed status component. Otherwise, show the uploader.
|
|
||||||
console.debug(
|
|
||||||
`[DEBUG] FlyerUploader: Rendering. State=${processingState}, Msg=${statusMessage}, Err=${!!errorMessage}`,
|
|
||||||
);
|
|
||||||
|
|
||||||
if (isProcessing || processingState === 'completed' || processingState === 'error') {
|
if (isProcessing || processingState === 'completed' || processingState === 'error') {
|
||||||
return (
|
return (
|
||||||
<div className="max-w-4xl mx-auto">
|
<div className="max-w-4xl mx-auto">
|
||||||
@@ -235,22 +85,30 @@ export const FlyerUploader: React.FC<FlyerUploaderProps> = ({ onProcessingComple
|
|||||||
currentFile={currentFile}
|
currentFile={currentFile}
|
||||||
/>
|
/>
|
||||||
<div className="mt-4 text-center">
|
<div className="mt-4 text-center">
|
||||||
{/* Display the current status message to the user and the test runner */}
|
{/* Display status message if not completed (completed has its own redirect logic) */}
|
||||||
{statusMessage && (
|
{statusMessage && processingState !== 'completed' && (
|
||||||
<p className="text-gray-600 dark:text-gray-400 mt-2 italic animate-pulse">
|
<p className="text-gray-600 dark:text-gray-400 mt-2 italic animate-pulse">
|
||||||
{statusMessage}
|
{statusMessage}
|
||||||
</p>
|
</p>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
|
{processingState === 'completed' && (
|
||||||
|
<p className="text-green-600 dark:text-green-400 mt-2 font-bold">
|
||||||
|
Processing complete! Redirecting to flyer {flyerId}...
|
||||||
|
</p>
|
||||||
|
)}
|
||||||
|
|
||||||
{errorMessage && (
|
{errorMessage && (
|
||||||
<div className="text-red-600 dark:text-red-400 font-semibold p-4 bg-red-100 dark:bg-red-900/30 rounded-md">
|
<div className="text-red-600 dark:text-red-400 font-semibold p-4 bg-red-100 dark:bg-red-900/30 rounded-md">
|
||||||
<p>{errorMessage}</p>
|
{duplicateFlyerId ? (
|
||||||
{duplicateFlyerId && (
|
|
||||||
<p>
|
<p>
|
||||||
<Link to={`/flyers/${duplicateFlyerId}`} className="text-blue-500 underline">
|
{errorMessage} You can view it here:{' '}
|
||||||
|
<Link to={`/flyers/${duplicateFlyerId}`} className="text-blue-500 underline" data-discover="true">
|
||||||
Flyer #{duplicateFlyerId}
|
Flyer #{duplicateFlyerId}
|
||||||
</Link>
|
</Link>
|
||||||
</p>
|
</p>
|
||||||
|
) : (
|
||||||
|
<p>{errorMessage}</p>
|
||||||
)}
|
)}
|
||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
|
|||||||
@@ -236,6 +236,24 @@ describe('ShoppingListComponent (in shopping feature)', () => {
|
|||||||
alertSpy.mockRestore();
|
alertSpy.mockRestore();
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('should show a generic alert if reading aloud fails with a non-Error object', async () => {
|
||||||
|
const alertSpy = vi.spyOn(window, 'alert').mockImplementation(() => {});
|
||||||
|
vi.spyOn(aiApiClient, 'generateSpeechFromText').mockRejectedValue('A string error');
|
||||||
|
|
||||||
|
render(<ShoppingListComponent {...defaultProps} />);
|
||||||
|
const readAloudButton = screen.getByTitle(/read list aloud/i);
|
||||||
|
|
||||||
|
fireEvent.click(readAloudButton);
|
||||||
|
|
||||||
|
await waitFor(() => {
|
||||||
|
expect(alertSpy).toHaveBeenCalledWith(
|
||||||
|
'Could not read list aloud: An unknown error occurred while generating audio.',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
alertSpy.mockRestore();
|
||||||
|
});
|
||||||
|
|
||||||
it('should handle interactions with purchased items', () => {
|
it('should handle interactions with purchased items', () => {
|
||||||
render(<ShoppingListComponent {...defaultProps} />);
|
render(<ShoppingListComponent {...defaultProps} />);
|
||||||
|
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
// src/features/shopping/ShoppingList.tsx
|
// src/features/shopping/ShoppingList.tsx
|
||||||
import React, { useState, useMemo, useCallback, useEffect } from 'react';
|
import React, { useState, useMemo, useCallback } from 'react';
|
||||||
import type { ShoppingList, ShoppingListItem, User } from '../../types';
|
import type { ShoppingList, ShoppingListItem, User } from '../../types';
|
||||||
import { UserIcon } from '../../components/icons/UserIcon';
|
import { UserIcon } from '../../components/icons/UserIcon';
|
||||||
import { ListBulletIcon } from '../../components/icons/ListBulletIcon';
|
import { ListBulletIcon } from '../../components/icons/ListBulletIcon';
|
||||||
@@ -56,28 +56,6 @@ export const ShoppingListComponent: React.FC<ShoppingListComponentProps> = ({
|
|||||||
return { neededItems, purchasedItems };
|
return { neededItems, purchasedItems };
|
||||||
}, [activeList]);
|
}, [activeList]);
|
||||||
|
|
||||||
useEffect(() => {
|
|
||||||
if (activeList) {
|
|
||||||
console.log('ShoppingList Debug: Active List:', activeList.name);
|
|
||||||
console.log(
|
|
||||||
'ShoppingList Debug: Needed Items:',
|
|
||||||
neededItems.map((i) => ({
|
|
||||||
id: i.shopping_list_item_id,
|
|
||||||
name: i.custom_item_name || i.master_item?.name,
|
|
||||||
raw: i,
|
|
||||||
})),
|
|
||||||
);
|
|
||||||
console.log(
|
|
||||||
'ShoppingList Debug: Purchased Items:',
|
|
||||||
purchasedItems.map((i) => ({
|
|
||||||
id: i.shopping_list_item_id,
|
|
||||||
name: i.custom_item_name || i.master_item?.name,
|
|
||||||
raw: i,
|
|
||||||
})),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}, [activeList, neededItems, purchasedItems]);
|
|
||||||
|
|
||||||
const handleCreateList = async () => {
|
const handleCreateList = async () => {
|
||||||
const name = prompt('Enter a name for your new shopping list:');
|
const name = prompt('Enter a name for your new shopping list:');
|
||||||
if (name && name.trim()) {
|
if (name && name.trim()) {
|
||||||
|
|||||||
@@ -164,6 +164,15 @@ describe('WatchedItemsList (in shopping feature)', () => {
|
|||||||
expect(itemsDesc[1]).toHaveTextContent('Eggs');
|
expect(itemsDesc[1]).toHaveTextContent('Eggs');
|
||||||
expect(itemsDesc[2]).toHaveTextContent('Bread');
|
expect(itemsDesc[2]).toHaveTextContent('Bread');
|
||||||
expect(itemsDesc[3]).toHaveTextContent('Apples');
|
expect(itemsDesc[3]).toHaveTextContent('Apples');
|
||||||
|
|
||||||
|
// Click again to sort ascending
|
||||||
|
fireEvent.click(sortButton);
|
||||||
|
|
||||||
|
const itemsAscAgain = screen.getAllByRole('listitem');
|
||||||
|
expect(itemsAscAgain[0]).toHaveTextContent('Apples');
|
||||||
|
expect(itemsAscAgain[1]).toHaveTextContent('Bread');
|
||||||
|
expect(itemsAscAgain[2]).toHaveTextContent('Eggs');
|
||||||
|
expect(itemsAscAgain[3]).toHaveTextContent('Milk');
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should call onAddItemToList when plus icon is clicked', () => {
|
it('should call onAddItemToList when plus icon is clicked', () => {
|
||||||
@@ -222,6 +231,18 @@ describe('WatchedItemsList (in shopping feature)', () => {
|
|||||||
fireEvent.change(nameInput, { target: { value: 'Grapes' } });
|
fireEvent.change(nameInput, { target: { value: 'Grapes' } });
|
||||||
expect(addButton).toBeDisabled();
|
expect(addButton).toBeDisabled();
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('should not submit if form is submitted with invalid data', () => {
|
||||||
|
render(<WatchedItemsList {...defaultProps} />);
|
||||||
|
const nameInput = screen.getByPlaceholderText(/add item/i);
|
||||||
|
const form = nameInput.closest('form')!;
|
||||||
|
const categorySelect = screen.getByDisplayValue('Select a category');
|
||||||
|
fireEvent.change(categorySelect, { target: { value: 'Dairy & Eggs' } });
|
||||||
|
|
||||||
|
fireEvent.change(nameInput, { target: { value: ' ' } });
|
||||||
|
fireEvent.submit(form);
|
||||||
|
expect(mockOnAddItem).not.toHaveBeenCalled();
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('Error Handling', () => {
|
describe('Error Handling', () => {
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ import { useState, useCallback, useRef, useEffect } from 'react';
|
|||||||
import { logger } from '../services/logger.client';
|
import { logger } from '../services/logger.client';
|
||||||
import { notifyError } from '../services/notificationService';
|
import { notifyError } from '../services/notificationService';
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A custom React hook to simplify API calls, including loading and error states.
|
* A custom React hook to simplify API calls, including loading and error states.
|
||||||
* It is designed to work with apiClient functions that return a `Promise<Response>`.
|
* It is designed to work with apiClient functions that return a `Promise<Response>`.
|
||||||
@@ -26,8 +27,17 @@ export function useApi<T, TArgs extends unknown[]>(
|
|||||||
const [isRefetching, setIsRefetching] = useState<boolean>(false);
|
const [isRefetching, setIsRefetching] = useState<boolean>(false);
|
||||||
const [error, setError] = useState<Error | null>(null);
|
const [error, setError] = useState<Error | null>(null);
|
||||||
const hasBeenExecuted = useRef(false);
|
const hasBeenExecuted = useRef(false);
|
||||||
|
const lastErrorMessageRef = useRef<string | null>(null);
|
||||||
const abortControllerRef = useRef<AbortController>(new AbortController());
|
const abortControllerRef = useRef<AbortController>(new AbortController());
|
||||||
|
|
||||||
|
// Use a ref to track the latest apiFunction. This allows us to keep `execute` stable
|
||||||
|
// even if `apiFunction` is recreated on every render (common with inline arrow functions).
|
||||||
|
const apiFunctionRef = useRef(apiFunction);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
apiFunctionRef.current = apiFunction;
|
||||||
|
}, [apiFunction]);
|
||||||
|
|
||||||
// This effect ensures that when the component using the hook unmounts,
|
// This effect ensures that when the component using the hook unmounts,
|
||||||
// any in-flight request is cancelled.
|
// any in-flight request is cancelled.
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
@@ -52,12 +62,13 @@ export function useApi<T, TArgs extends unknown[]>(
|
|||||||
async (...args: TArgs): Promise<T | null> => {
|
async (...args: TArgs): Promise<T | null> => {
|
||||||
setLoading(true);
|
setLoading(true);
|
||||||
setError(null);
|
setError(null);
|
||||||
|
lastErrorMessageRef.current = null;
|
||||||
if (hasBeenExecuted.current) {
|
if (hasBeenExecuted.current) {
|
||||||
setIsRefetching(true);
|
setIsRefetching(true);
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const response = await apiFunction(...args, abortControllerRef.current.signal);
|
const response = await apiFunctionRef.current(...args, abortControllerRef.current.signal);
|
||||||
|
|
||||||
if (!response.ok) {
|
if (!response.ok) {
|
||||||
// Attempt to parse a JSON error response. This is aligned with ADR-003,
|
// Attempt to parse a JSON error response. This is aligned with ADR-003,
|
||||||
@@ -96,7 +107,17 @@ export function useApi<T, TArgs extends unknown[]>(
|
|||||||
}
|
}
|
||||||
return result;
|
return result;
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
const err = e instanceof Error ? e : new Error('An unknown error occurred.');
|
let err: Error;
|
||||||
|
if (e instanceof Error) {
|
||||||
|
err = e;
|
||||||
|
} else if (typeof e === 'object' && e !== null && 'status' in e) {
|
||||||
|
// Handle structured errors (e.g. { status: 409, body: { ... } })
|
||||||
|
const structuredError = e as { status: number; body?: { message?: string } };
|
||||||
|
const message = structuredError.body?.message || `Request failed with status ${structuredError.status}`;
|
||||||
|
err = new Error(message);
|
||||||
|
} else {
|
||||||
|
err = new Error('An unknown error occurred.');
|
||||||
|
}
|
||||||
// If the error is an AbortError, it's an intentional cancellation, so we don't set an error state.
|
// If the error is an AbortError, it's an intentional cancellation, so we don't set an error state.
|
||||||
if (err.name === 'AbortError') {
|
if (err.name === 'AbortError') {
|
||||||
logger.info('API request was cancelled.', { functionName: apiFunction.name });
|
logger.info('API request was cancelled.', { functionName: apiFunction.name });
|
||||||
@@ -106,7 +127,13 @@ export function useApi<T, TArgs extends unknown[]>(
|
|||||||
error: err.message,
|
error: err.message,
|
||||||
functionName: apiFunction.name,
|
functionName: apiFunction.name,
|
||||||
});
|
});
|
||||||
|
// Only set a new error object if the message is different from the last one.
|
||||||
|
// This prevents creating new object references for the same error (e.g. repeated timeouts)
|
||||||
|
// and helps break infinite loops in components that depend on the `error` object.
|
||||||
|
if (err.message !== lastErrorMessageRef.current) {
|
||||||
setError(err);
|
setError(err);
|
||||||
|
lastErrorMessageRef.current = err.message;
|
||||||
|
}
|
||||||
notifyError(err.message); // Optionally notify the user automatically.
|
notifyError(err.message); // Optionally notify the user automatically.
|
||||||
return null; // Return null on failure.
|
return null; // Return null on failure.
|
||||||
} finally {
|
} finally {
|
||||||
@@ -114,7 +141,7 @@ export function useApi<T, TArgs extends unknown[]>(
|
|||||||
setIsRefetching(false);
|
setIsRefetching(false);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
[apiFunction],
|
[], // execute is now stable because it uses apiFunctionRef
|
||||||
); // abortControllerRef is stable
|
); // abortControllerRef is stable
|
||||||
|
|
||||||
return { execute, loading, isRefetching, error, data, reset };
|
return { execute, loading, isRefetching, error, data, reset };
|
||||||
|
|||||||
174
src/hooks/useAppInitialization.test.tsx
Normal file
174
src/hooks/useAppInitialization.test.tsx
Normal file
@@ -0,0 +1,174 @@
|
|||||||
|
// src/hooks/useAppInitialization.test.tsx
|
||||||
|
import { renderHook, waitFor } from '@testing-library/react';
|
||||||
|
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||||
|
import { MemoryRouter, useNavigate } from 'react-router-dom';
|
||||||
|
import { useAppInitialization } from './useAppInitialization';
|
||||||
|
import { useAuth } from './useAuth';
|
||||||
|
import { useModal } from './useModal';
|
||||||
|
import { createMockUserProfile } from '../tests/utils/mockFactories';
|
||||||
|
|
||||||
|
// Mock dependencies
|
||||||
|
vi.mock('./useAuth');
|
||||||
|
vi.mock('./useModal');
|
||||||
|
vi.mock('react-router-dom', async (importOriginal) => {
|
||||||
|
const actual = await importOriginal<typeof import('react-router-dom')>();
|
||||||
|
return {
|
||||||
|
...actual,
|
||||||
|
useNavigate: vi.fn(),
|
||||||
|
};
|
||||||
|
});
|
||||||
|
vi.mock('../services/logger.client');
|
||||||
|
vi.mock('../config', () => ({
|
||||||
|
default: {
|
||||||
|
app: { version: '1.0.1' },
|
||||||
|
},
|
||||||
|
}));
|
||||||
|
|
||||||
|
const mockedUseAuth = vi.mocked(useAuth);
|
||||||
|
const mockedUseModal = vi.mocked(useModal);
|
||||||
|
const mockedUseNavigate = vi.mocked(useNavigate);
|
||||||
|
|
||||||
|
const mockLogin = vi.fn().mockResolvedValue(undefined);
|
||||||
|
const mockNavigate = vi.fn();
|
||||||
|
const mockOpenModal = vi.fn();
|
||||||
|
|
||||||
|
// Wrapper with MemoryRouter is needed because the hook uses useLocation and useNavigate
|
||||||
|
const wrapper = ({
|
||||||
|
children,
|
||||||
|
initialEntries = ['/'],
|
||||||
|
}: {
|
||||||
|
children: React.ReactNode;
|
||||||
|
initialEntries?: string[];
|
||||||
|
}) => <MemoryRouter initialEntries={initialEntries}>{children}</MemoryRouter>;
|
||||||
|
|
||||||
|
describe('useAppInitialization Hook', () => {
|
||||||
|
beforeEach(() => {
|
||||||
|
vi.clearAllMocks();
|
||||||
|
mockedUseNavigate.mockReturnValue(mockNavigate);
|
||||||
|
mockedUseAuth.mockReturnValue({
|
||||||
|
userProfile: null,
|
||||||
|
login: mockLogin,
|
||||||
|
authStatus: 'SIGNED_OUT',
|
||||||
|
isLoading: false,
|
||||||
|
logout: vi.fn(),
|
||||||
|
updateProfile: vi.fn(),
|
||||||
|
});
|
||||||
|
mockedUseModal.mockReturnValue({
|
||||||
|
openModal: mockOpenModal,
|
||||||
|
closeModal: vi.fn(),
|
||||||
|
isModalOpen: vi.fn(),
|
||||||
|
});
|
||||||
|
// Mock localStorage
|
||||||
|
Object.defineProperty(window, 'localStorage', {
|
||||||
|
value: {
|
||||||
|
getItem: vi.fn().mockReturnValue(null),
|
||||||
|
setItem: vi.fn(),
|
||||||
|
removeItem: vi.fn(),
|
||||||
|
clear: vi.fn(),
|
||||||
|
},
|
||||||
|
writable: true,
|
||||||
|
});
|
||||||
|
// Mock matchMedia
|
||||||
|
Object.defineProperty(window, 'matchMedia', {
|
||||||
|
value: vi.fn().mockImplementation((query) => ({
|
||||||
|
matches: false, // default to light mode
|
||||||
|
})),
|
||||||
|
writable: true,
|
||||||
|
configurable: true,
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should call login when googleAuthToken is in URL', async () => {
|
||||||
|
renderHook(() => useAppInitialization(), {
|
||||||
|
wrapper: (props) => wrapper({ ...props, initialEntries: ['/?googleAuthToken=test-token'] }),
|
||||||
|
});
|
||||||
|
await waitFor(() => {
|
||||||
|
expect(mockLogin).toHaveBeenCalledWith('test-token');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should call login when githubAuthToken is in URL', async () => {
|
||||||
|
renderHook(() => useAppInitialization(), {
|
||||||
|
wrapper: (props) => wrapper({ ...props, initialEntries: ['/?githubAuthToken=test-token'] }),
|
||||||
|
});
|
||||||
|
await waitFor(() => {
|
||||||
|
expect(mockLogin).toHaveBeenCalledWith('test-token');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should call navigate to clean the URL after processing a token', async () => {
|
||||||
|
renderHook(() => useAppInitialization(), {
|
||||||
|
wrapper: (props) => wrapper({ ...props, initialEntries: ['/some/path?googleAuthToken=test-token'] }),
|
||||||
|
});
|
||||||
|
await waitFor(() => {
|
||||||
|
expect(mockLogin).toHaveBeenCalledWith('test-token');
|
||||||
|
});
|
||||||
|
expect(mockNavigate).toHaveBeenCalledWith('/some/path', { replace: true });
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should open \"What's New\" modal if version is new", () => {
|
||||||
|
vi.spyOn(window.localStorage, 'getItem').mockReturnValue('1.0.0');
|
||||||
|
renderHook(() => useAppInitialization(), { wrapper });
|
||||||
|
expect(mockOpenModal).toHaveBeenCalledWith('whatsNew');
|
||||||
|
expect(window.localStorage.setItem).toHaveBeenCalledWith('lastSeenVersion', '1.0.1');
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should not open \"What's New\" modal if version is the same", () => {
|
||||||
|
vi.spyOn(window.localStorage, 'getItem').mockReturnValue('1.0.1');
|
||||||
|
renderHook(() => useAppInitialization(), { wrapper });
|
||||||
|
expect(mockOpenModal).not.toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should set dark mode from user profile', async () => {
|
||||||
|
mockedUseAuth.mockReturnValue({
|
||||||
|
...mockedUseAuth(),
|
||||||
|
userProfile: createMockUserProfile({ preferences: { darkMode: true } }),
|
||||||
|
});
|
||||||
|
const { result } = renderHook(() => useAppInitialization(), { wrapper });
|
||||||
|
await waitFor(() => {
|
||||||
|
expect(result.current.isDarkMode).toBe(true);
|
||||||
|
expect(document.documentElement.classList.contains('dark')).toBe(true);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should set dark mode from localStorage', async () => {
|
||||||
|
vi.spyOn(window.localStorage, 'getItem').mockImplementation((key) =>
|
||||||
|
key === 'darkMode' ? 'true' : null,
|
||||||
|
);
|
||||||
|
const { result } = renderHook(() => useAppInitialization(), { wrapper });
|
||||||
|
await waitFor(() => {
|
||||||
|
expect(result.current.isDarkMode).toBe(true);
|
||||||
|
expect(document.documentElement.classList.contains('dark')).toBe(true);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should set dark mode from system preference', async () => {
|
||||||
|
vi.spyOn(window, 'matchMedia').mockReturnValue({ matches: true } as any);
|
||||||
|
const { result } = renderHook(() => useAppInitialization(), { wrapper });
|
||||||
|
await waitFor(() => {
|
||||||
|
expect(result.current.isDarkMode).toBe(true);
|
||||||
|
expect(document.documentElement.classList.contains('dark')).toBe(true);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should set unit system from user profile', async () => {
|
||||||
|
mockedUseAuth.mockReturnValue({
|
||||||
|
...mockedUseAuth(),
|
||||||
|
userProfile: createMockUserProfile({ preferences: { unitSystem: 'metric' } }),
|
||||||
|
});
|
||||||
|
const { result } = renderHook(() => useAppInitialization(), { wrapper });
|
||||||
|
await waitFor(() => {
|
||||||
|
expect(result.current.unitSystem).toBe('metric');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should set unit system from localStorage', async () => {
|
||||||
|
vi.spyOn(window.localStorage, 'getItem').mockImplementation((key) =>
|
||||||
|
key === 'unitSystem' ? 'metric' : null,
|
||||||
|
);
|
||||||
|
const { result } = renderHook(() => useAppInitialization(), { wrapper });
|
||||||
|
await waitFor(() => {
|
||||||
|
expect(result.current.unitSystem).toBe('metric');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
88
src/hooks/useAppInitialization.ts
Normal file
88
src/hooks/useAppInitialization.ts
Normal file
@@ -0,0 +1,88 @@
|
|||||||
|
// src/hooks/useAppInitialization.ts
|
||||||
|
import { useState, useEffect } from 'react';
|
||||||
|
import { useLocation, useNavigate } from 'react-router-dom';
|
||||||
|
import { useAuth } from './useAuth';
|
||||||
|
import { useModal } from './useModal';
|
||||||
|
import { logger } from '../services/logger.client';
|
||||||
|
import config from '../config';
|
||||||
|
|
||||||
|
export const useAppInitialization = () => {
|
||||||
|
const { userProfile, login } = useAuth();
|
||||||
|
const { openModal } = useModal();
|
||||||
|
const location = useLocation();
|
||||||
|
const navigate = useNavigate();
|
||||||
|
|
||||||
|
const [isDarkMode, setIsDarkMode] = useState(false);
|
||||||
|
const [unitSystem, setUnitSystem] = useState<'metric' | 'imperial'>('imperial');
|
||||||
|
|
||||||
|
// Effect to handle the token from Google/GitHub OAuth redirect
|
||||||
|
useEffect(() => {
|
||||||
|
const urlParams = new URLSearchParams(location.search);
|
||||||
|
const googleToken = urlParams.get('googleAuthToken');
|
||||||
|
|
||||||
|
if (googleToken) {
|
||||||
|
logger.info('Received Google Auth token from URL. Authenticating...');
|
||||||
|
login(googleToken).catch((err) =>
|
||||||
|
logger.error('Failed to log in with Google token', { error: err }),
|
||||||
|
);
|
||||||
|
navigate(location.pathname, { replace: true });
|
||||||
|
}
|
||||||
|
|
||||||
|
const githubToken = urlParams.get('githubAuthToken');
|
||||||
|
if (githubToken) {
|
||||||
|
logger.info('Received GitHub Auth token from URL. Authenticating...');
|
||||||
|
login(githubToken).catch((err) => {
|
||||||
|
logger.error('Failed to log in with GitHub token', { error: err });
|
||||||
|
});
|
||||||
|
navigate(location.pathname, { replace: true });
|
||||||
|
}
|
||||||
|
}, [login, location.search, navigate, location.pathname]);
|
||||||
|
|
||||||
|
// Effect to handle "What's New" modal
|
||||||
|
useEffect(() => {
|
||||||
|
const appVersion = config.app.version;
|
||||||
|
if (appVersion) {
|
||||||
|
logger.info(`Application version: ${appVersion}`);
|
||||||
|
const lastSeenVersion = localStorage.getItem('lastSeenVersion');
|
||||||
|
if (appVersion !== lastSeenVersion) {
|
||||||
|
openModal('whatsNew');
|
||||||
|
localStorage.setItem('lastSeenVersion', appVersion);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}, [openModal]);
|
||||||
|
|
||||||
|
// Effect to set initial theme based on user profile, local storage, or system preference
|
||||||
|
useEffect(() => {
|
||||||
|
let darkModeValue: boolean;
|
||||||
|
if (userProfile && userProfile.preferences?.darkMode !== undefined) {
|
||||||
|
// Preference from DB
|
||||||
|
darkModeValue = userProfile.preferences.darkMode;
|
||||||
|
} else {
|
||||||
|
// Fallback to local storage or system preference
|
||||||
|
const savedMode = localStorage.getItem('darkMode');
|
||||||
|
const prefersDark = window.matchMedia('(prefers-color-scheme: dark)').matches;
|
||||||
|
darkModeValue = savedMode !== null ? savedMode === 'true' : prefersDark;
|
||||||
|
}
|
||||||
|
setIsDarkMode(darkModeValue);
|
||||||
|
document.documentElement.classList.toggle('dark', darkModeValue);
|
||||||
|
// Also save to local storage if coming from profile, to persist on logout
|
||||||
|
if (userProfile && userProfile.preferences?.darkMode !== undefined) {
|
||||||
|
localStorage.setItem('darkMode', String(userProfile.preferences.darkMode));
|
||||||
|
}
|
||||||
|
}, [userProfile]);
|
||||||
|
|
||||||
|
// Effect to set initial unit system based on user profile or local storage
|
||||||
|
useEffect(() => {
|
||||||
|
if (userProfile && userProfile.preferences?.unitSystem) {
|
||||||
|
setUnitSystem(userProfile.preferences.unitSystem);
|
||||||
|
localStorage.setItem('unitSystem', userProfile.preferences.unitSystem);
|
||||||
|
} else {
|
||||||
|
const savedSystem = localStorage.getItem('unitSystem') as 'metric' | 'imperial' | null;
|
||||||
|
if (savedSystem) {
|
||||||
|
setUnitSystem(savedSystem);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}, [userProfile?.preferences?.unitSystem, userProfile?.user.user_id]);
|
||||||
|
|
||||||
|
return { isDarkMode, unitSystem };
|
||||||
|
};
|
||||||
@@ -6,24 +6,28 @@ import { useAuth } from './useAuth';
|
|||||||
import { AuthProvider } from '../providers/AuthProvider';
|
import { AuthProvider } from '../providers/AuthProvider';
|
||||||
import * as apiClient from '../services/apiClient';
|
import * as apiClient from '../services/apiClient';
|
||||||
import type { UserProfile } from '../types';
|
import type { UserProfile } from '../types';
|
||||||
|
import * as tokenStorage from '../services/tokenStorage';
|
||||||
import { createMockUserProfile } from '../tests/utils/mockFactories';
|
import { createMockUserProfile } from '../tests/utils/mockFactories';
|
||||||
|
import { logger } from '../services/logger.client';
|
||||||
|
|
||||||
// Mock the dependencies
|
// Mock the dependencies
|
||||||
vi.mock('../services/apiClient', () => ({
|
vi.mock('../services/apiClient', () => ({
|
||||||
// Mock other functions if needed
|
// Mock other functions if needed
|
||||||
getAuthenticatedUserProfile: vi.fn(),
|
getAuthenticatedUserProfile: vi.fn(),
|
||||||
}));
|
}));
|
||||||
|
vi.mock('../services/tokenStorage');
|
||||||
|
|
||||||
// Mock the logger to see auth provider logs during test execution
|
// Mock the logger to spy on its methods
|
||||||
vi.mock('../services/logger.client', () => ({
|
vi.mock('../services/logger.client', () => ({
|
||||||
logger: {
|
logger: {
|
||||||
info: vi.fn((...args) => console.log('[AUTH-INFO]', ...args)),
|
info: vi.fn(),
|
||||||
warn: vi.fn((...args) => console.warn('[AUTH-WARN]', ...args)),
|
warn: vi.fn(),
|
||||||
error: vi.fn((...args) => console.error('[AUTH-ERROR]', ...args)),
|
error: vi.fn(),
|
||||||
},
|
},
|
||||||
}));
|
}));
|
||||||
|
|
||||||
const mockedApiClient = vi.mocked(apiClient);
|
const mockedApiClient = vi.mocked(apiClient);
|
||||||
|
const mockedTokenStorage = vi.mocked(tokenStorage);
|
||||||
|
|
||||||
const mockProfile: UserProfile = createMockUserProfile({
|
const mockProfile: UserProfile = createMockUserProfile({
|
||||||
full_name: 'Test User',
|
full_name: 'Test User',
|
||||||
@@ -36,26 +40,9 @@ const mockProfile: UserProfile = createMockUserProfile({
|
|||||||
const wrapper = ({ children }: { children: ReactNode }) => <AuthProvider>{children}</AuthProvider>;
|
const wrapper = ({ children }: { children: ReactNode }) => <AuthProvider>{children}</AuthProvider>;
|
||||||
|
|
||||||
describe('useAuth Hook and AuthProvider', () => {
|
describe('useAuth Hook and AuthProvider', () => {
|
||||||
// Mock localStorage
|
|
||||||
let storage: { [key: string]: string } = {};
|
|
||||||
const localStorageMock = {
|
|
||||||
getItem: vi.fn((key: string) => storage[key] || null),
|
|
||||||
setItem: vi.fn((key: string, value: string) => {
|
|
||||||
storage[key] = value;
|
|
||||||
}),
|
|
||||||
removeItem: vi.fn((key: string) => {
|
|
||||||
delete storage[key];
|
|
||||||
}),
|
|
||||||
clear: vi.fn(() => {
|
|
||||||
storage = {};
|
|
||||||
}),
|
|
||||||
};
|
|
||||||
|
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
// Reset mocks and storage before each test
|
// Reset mocks and storage before each test
|
||||||
vi.clearAllMocks();
|
vi.clearAllMocks();
|
||||||
storage = {};
|
|
||||||
Object.defineProperty(window, 'localStorage', { value: localStorageMock, configurable: true });
|
|
||||||
});
|
});
|
||||||
|
|
||||||
afterEach(() => {
|
afterEach(() => {
|
||||||
@@ -85,7 +72,8 @@ describe('useAuth Hook and AuthProvider', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
describe('Initial Auth Check (useEffect)', () => {
|
describe('Initial Auth Check (useEffect)', () => {
|
||||||
it('sets state to SIGNED_OUT if no token is found', async () => {
|
it('sets state to SIGNED_OUT if no token is found in storage', async () => {
|
||||||
|
mockedTokenStorage.getToken.mockReturnValue(null);
|
||||||
const { result } = renderHook(() => useAuth(), { wrapper });
|
const { result } = renderHook(() => useAuth(), { wrapper });
|
||||||
|
|
||||||
await waitFor(() => {
|
await waitFor(() => {
|
||||||
@@ -97,7 +85,7 @@ describe('useAuth Hook and AuthProvider', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('sets state to AUTHENTICATED if a valid token is found', async () => {
|
it('sets state to AUTHENTICATED if a valid token is found', async () => {
|
||||||
localStorageMock.setItem('authToken', 'valid-token');
|
mockedTokenStorage.getToken.mockReturnValue('valid-token');
|
||||||
mockedApiClient.getAuthenticatedUserProfile.mockResolvedValue({
|
mockedApiClient.getAuthenticatedUserProfile.mockResolvedValue({
|
||||||
ok: true,
|
ok: true,
|
||||||
status: 200,
|
status: 200,
|
||||||
@@ -121,7 +109,7 @@ describe('useAuth Hook and AuthProvider', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('sets state to SIGNED_OUT and removes token if validation fails', async () => {
|
it('sets state to SIGNED_OUT and removes token if validation fails', async () => {
|
||||||
localStorageMock.setItem('authToken', 'invalid-token');
|
mockedTokenStorage.getToken.mockReturnValue('invalid-token');
|
||||||
mockedApiClient.getAuthenticatedUserProfile.mockRejectedValue(new Error('Invalid token'));
|
mockedApiClient.getAuthenticatedUserProfile.mockRejectedValue(new Error('Invalid token'));
|
||||||
|
|
||||||
const { result } = renderHook(() => useAuth(), { wrapper });
|
const { result } = renderHook(() => useAuth(), { wrapper });
|
||||||
@@ -132,13 +120,40 @@ describe('useAuth Hook and AuthProvider', () => {
|
|||||||
|
|
||||||
expect(result.current.authStatus).toBe('SIGNED_OUT');
|
expect(result.current.authStatus).toBe('SIGNED_OUT');
|
||||||
expect(result.current.userProfile).toBeNull();
|
expect(result.current.userProfile).toBeNull();
|
||||||
expect(localStorageMock.removeItem).toHaveBeenCalledWith('authToken');
|
expect(mockedTokenStorage.removeToken).toHaveBeenCalled();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('sets state to SIGNED_OUT and removes token if profile fetch returns null after token validation', async () => {
|
||||||
|
mockedTokenStorage.getToken.mockReturnValue('valid-token');
|
||||||
|
// Mock getAuthenticatedUserProfile to return a 200 OK response with a null body
|
||||||
|
mockedApiClient.getAuthenticatedUserProfile.mockResolvedValue({
|
||||||
|
ok: true,
|
||||||
|
status: 200,
|
||||||
|
json: () => Promise.resolve(null), // Simulate API returning no profile data
|
||||||
|
} as unknown as Response);
|
||||||
|
|
||||||
|
const { result } = renderHook(() => useAuth(), { wrapper });
|
||||||
|
|
||||||
|
await waitFor(() => {
|
||||||
|
expect(result.current.isLoading).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(result.current.authStatus).toBe('SIGNED_OUT');
|
||||||
|
expect(result.current.userProfile).toBeNull();
|
||||||
|
expect(mockedTokenStorage.removeToken).toHaveBeenCalled();
|
||||||
|
expect(logger.warn).toHaveBeenCalledWith(
|
||||||
|
'[AuthProvider-Effect] Token was present but validation returned no profile. Signing out.',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
describe('login function', () => {
|
describe('login function', () => {
|
||||||
// This was the failing test
|
// This was the failing test
|
||||||
it('sets token, fetches profile, and updates state on successful login', async () => {
|
it('sets token, fetches profile, and updates state on successful login', async () => {
|
||||||
|
// --- FIX ---
|
||||||
|
// Explicitly mock that no token exists initially to prevent state leakage from other tests.
|
||||||
|
mockedTokenStorage.getToken.mockReturnValue(null);
|
||||||
|
|
||||||
// --- FIX ---
|
// --- FIX ---
|
||||||
// The mock for `getAuthenticatedUserProfile` must resolve to a `Response`-like object,
|
// The mock for `getAuthenticatedUserProfile` must resolve to a `Response`-like object,
|
||||||
// as this is the return type of the actual function. The `useApi` hook then
|
// as this is the return type of the actual function. The `useApi` hook then
|
||||||
@@ -172,7 +187,7 @@ describe('useAuth Hook and AuthProvider', () => {
|
|||||||
console.log('[TEST-DEBUG] State immediately after login `act` call:', result.current);
|
console.log('[TEST-DEBUG] State immediately after login `act` call:', result.current);
|
||||||
|
|
||||||
// 3. Assertions
|
// 3. Assertions
|
||||||
expect(localStorageMock.setItem).toHaveBeenCalledWith('authToken', 'new-valid-token');
|
expect(mockedTokenStorage.setToken).toHaveBeenCalledWith('new-valid-token');
|
||||||
|
|
||||||
// 4. We must wait for the state update inside the hook to propagate
|
// 4. We must wait for the state update inside the hook to propagate
|
||||||
await waitFor(() => {
|
await waitFor(() => {
|
||||||
@@ -202,16 +217,44 @@ describe('useAuth Hook and AuthProvider', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
// Should trigger the logout flow
|
// Should trigger the logout flow
|
||||||
expect(localStorageMock.removeItem).toHaveBeenCalledWith('authToken');
|
expect(mockedTokenStorage.removeToken).toHaveBeenCalled();
|
||||||
expect(result.current.authStatus).toBe('SIGNED_OUT'); // This was a duplicate, fixed.
|
expect(result.current.authStatus).toBe('SIGNED_OUT'); // This was a duplicate, fixed.
|
||||||
expect(result.current.userProfile).toBeNull();
|
expect(result.current.userProfile).toBeNull();
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('logs out and throws an error if profile fetch returns null after login (no profileData)', async () => {
|
||||||
|
// Simulate successful token setting, but subsequent profile fetch returns null
|
||||||
|
mockedApiClient.getAuthenticatedUserProfile.mockResolvedValue({
|
||||||
|
ok: true,
|
||||||
|
status: 200,
|
||||||
|
json: () => Promise.resolve(null), // Simulate API returning no profile data
|
||||||
|
} as unknown as Response);
|
||||||
|
|
||||||
|
const { result } = renderHook(() => useAuth(), { wrapper });
|
||||||
|
await waitFor(() => expect(result.current.isLoading).toBe(false));
|
||||||
|
|
||||||
|
// Call login without profileData, forcing a profile fetch
|
||||||
|
await act(async () => {
|
||||||
|
await expect(result.current.login('new-token-no-profile-data')).rejects.toThrow(
|
||||||
|
'Login succeeded, but failed to fetch your data: Received null or undefined profile from API.',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Should trigger the logout flow
|
||||||
|
expect(mockedTokenStorage.removeToken).toHaveBeenCalled();
|
||||||
|
expect(result.current.authStatus).toBe('SIGNED_OUT');
|
||||||
|
expect(result.current.userProfile).toBeNull();
|
||||||
|
expect(logger.error).toHaveBeenCalledWith(
|
||||||
|
expect.any(String), // The error message
|
||||||
|
expect.objectContaining({ error: 'Received null or undefined profile from API.' }),
|
||||||
|
);
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('logout function', () => {
|
describe('logout function', () => {
|
||||||
it('removes token and resets auth state', async () => {
|
it('removes token and resets auth state', async () => {
|
||||||
// Start in a logged-in state
|
// Start in a logged-in state by mocking the token storage
|
||||||
localStorageMock.setItem('authToken', 'valid-token');
|
mockedTokenStorage.getToken.mockReturnValue('valid-token');
|
||||||
mockedApiClient.getAuthenticatedUserProfile.mockResolvedValue({
|
mockedApiClient.getAuthenticatedUserProfile.mockResolvedValue({
|
||||||
ok: true,
|
ok: true,
|
||||||
status: 200,
|
status: 200,
|
||||||
@@ -227,16 +270,15 @@ describe('useAuth Hook and AuthProvider', () => {
|
|||||||
result.current.logout();
|
result.current.logout();
|
||||||
});
|
});
|
||||||
|
|
||||||
expect(localStorageMock.removeItem).toHaveBeenCalledWith('authToken');
|
expect(mockedTokenStorage.removeToken).toHaveBeenCalled();
|
||||||
expect(result.current.authStatus).toBe('SIGNED_OUT');
|
expect(result.current.authStatus).toBe('SIGNED_OUT');
|
||||||
expect(result.current.userProfile).toBeNull();
|
expect(result.current.userProfile).toBeNull();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('updateProfile function', () => {
|
describe('updateProfile function', () => {
|
||||||
it('merges new data into the existing profile state', async () => {
|
it('merges new data into the existing profile state', async () => { // Start in a logged-in state
|
||||||
// Start in a logged-in state
|
mockedTokenStorage.getToken.mockReturnValue('valid-token');
|
||||||
localStorageMock.setItem('authToken', 'valid-token');
|
|
||||||
mockedApiClient.getAuthenticatedUserProfile.mockResolvedValue({
|
mockedApiClient.getAuthenticatedUserProfile.mockResolvedValue({
|
||||||
ok: true,
|
ok: true,
|
||||||
status: 200,
|
status: 200,
|
||||||
@@ -264,6 +306,10 @@ describe('useAuth Hook and AuthProvider', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('should not update profile if user is not authenticated', async () => {
|
it('should not update profile if user is not authenticated', async () => {
|
||||||
|
// --- FIX ---
|
||||||
|
// Explicitly mock that no token exists initially to prevent state leakage from other tests.
|
||||||
|
mockedTokenStorage.getToken.mockReturnValue(null);
|
||||||
|
|
||||||
const { result } = renderHook(() => useAuth(), { wrapper });
|
const { result } = renderHook(() => useAuth(), { wrapper });
|
||||||
|
|
||||||
// Wait for initial check to complete
|
// Wait for initial check to complete
|
||||||
|
|||||||
136
src/hooks/useFlyerUploader.test.tsx
Normal file
136
src/hooks/useFlyerUploader.test.tsx
Normal file
@@ -0,0 +1,136 @@
|
|||||||
|
import { renderHook, act, waitFor } from '@testing-library/react';
|
||||||
|
import { QueryClient, QueryClientProvider } from '@tanstack/react-query';
|
||||||
|
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||||
|
import { useFlyerUploader } from './useFlyerUploader';
|
||||||
|
import * as aiApiClient from '../services/aiApiClient';
|
||||||
|
import * as checksumUtil from '../utils/checksum';
|
||||||
|
|
||||||
|
// Import the actual error class because the module is mocked
|
||||||
|
const { JobFailedError } = await vi.importActual<typeof import('../services/aiApiClient')>(
|
||||||
|
'../services/aiApiClient',
|
||||||
|
);
|
||||||
|
|
||||||
|
// Mock dependencies
|
||||||
|
vi.mock('../services/aiApiClient');
|
||||||
|
vi.mock('../utils/checksum');
|
||||||
|
vi.mock('../services/logger.client', () => ({
|
||||||
|
logger: {
|
||||||
|
info: vi.fn(),
|
||||||
|
error: vi.fn(),
|
||||||
|
warn: vi.fn(),
|
||||||
|
debug: vi.fn(),
|
||||||
|
},
|
||||||
|
}));
|
||||||
|
|
||||||
|
const mockedAiApiClient = vi.mocked(aiApiClient);
|
||||||
|
const mockedChecksumUtil = vi.mocked(checksumUtil);
|
||||||
|
|
||||||
|
// Helper to wrap the hook with QueryClientProvider, which is required by react-query
|
||||||
|
const createWrapper = () => {
|
||||||
|
const queryClient = new QueryClient({
|
||||||
|
defaultOptions: {
|
||||||
|
queries: {
|
||||||
|
retry: false, // Disable retries for tests for predictable behavior
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
return ({ children }: { children: React.ReactNode }) => (
|
||||||
|
<QueryClientProvider client={queryClient}>{children}</QueryClientProvider>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
describe('useFlyerUploader Hook with React Query', () => {
|
||||||
|
beforeEach(() => {
|
||||||
|
vi.resetAllMocks();
|
||||||
|
mockedChecksumUtil.generateFileChecksum.mockResolvedValue('mock-checksum');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle a successful upload and polling flow', async () => {
|
||||||
|
// Arrange
|
||||||
|
const mockJobId = 'job-123';
|
||||||
|
mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue({ jobId: mockJobId });
|
||||||
|
mockedAiApiClient.getJobStatus
|
||||||
|
.mockResolvedValueOnce({
|
||||||
|
// First poll: active
|
||||||
|
id: mockJobId,
|
||||||
|
state: 'active',
|
||||||
|
progress: { message: 'Processing...' },
|
||||||
|
returnValue: null,
|
||||||
|
failedReason: null,
|
||||||
|
} as aiApiClient.JobStatus)
|
||||||
|
.mockResolvedValueOnce({
|
||||||
|
// Second poll: completed
|
||||||
|
id: mockJobId,
|
||||||
|
state: 'completed',
|
||||||
|
progress: { message: 'Complete!' },
|
||||||
|
returnValue: { flyerId: 777 },
|
||||||
|
failedReason: null,
|
||||||
|
} as aiApiClient.JobStatus);
|
||||||
|
|
||||||
|
const { result } = renderHook(() => useFlyerUploader(), { wrapper: createWrapper() });
|
||||||
|
const mockFile = new File([''], 'flyer.pdf');
|
||||||
|
|
||||||
|
// Act
|
||||||
|
await act(async () => {
|
||||||
|
result.current.upload(mockFile);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Assert initial upload state
|
||||||
|
await waitFor(() => expect(result.current.processingState).toBe('polling'));
|
||||||
|
expect(result.current.jobId).toBe(mockJobId);
|
||||||
|
|
||||||
|
// Assert polling state
|
||||||
|
await waitFor(() => expect(result.current.statusMessage).toBe('Processing...'));
|
||||||
|
|
||||||
|
// Assert completed state
|
||||||
|
await waitFor(() => expect(result.current.processingState).toBe('completed'), { timeout: 5000 });
|
||||||
|
expect(result.current.flyerId).toBe(777);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle an upload failure', async () => {
|
||||||
|
// Arrange
|
||||||
|
const uploadError = {
|
||||||
|
status: 409,
|
||||||
|
body: { message: 'Duplicate flyer detected.', flyerId: 99 },
|
||||||
|
};
|
||||||
|
mockedAiApiClient.uploadAndProcessFlyer.mockRejectedValue(uploadError);
|
||||||
|
|
||||||
|
const { result } = renderHook(() => useFlyerUploader(), { wrapper: createWrapper() });
|
||||||
|
const mockFile = new File([''], 'flyer.pdf');
|
||||||
|
|
||||||
|
// Act
|
||||||
|
await act(async () => {
|
||||||
|
result.current.upload(mockFile);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Assert error state
|
||||||
|
await waitFor(() => expect(result.current.processingState).toBe('error'));
|
||||||
|
expect(result.current.errorMessage).toBe('Duplicate flyer detected.');
|
||||||
|
expect(result.current.duplicateFlyerId).toBe(99);
|
||||||
|
expect(result.current.jobId).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle a job failure during polling', async () => {
|
||||||
|
// Arrange
|
||||||
|
const mockJobId = 'job-456';
|
||||||
|
mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue({ jobId: mockJobId });
|
||||||
|
|
||||||
|
// Mock getJobStatus to throw a JobFailedError
|
||||||
|
mockedAiApiClient.getJobStatus.mockRejectedValue(
|
||||||
|
new JobFailedError('AI validation failed.', 'AI_VALIDATION_FAILED'),
|
||||||
|
);
|
||||||
|
|
||||||
|
const { result } = renderHook(() => useFlyerUploader(), { wrapper: createWrapper() });
|
||||||
|
const mockFile = new File([''], 'flyer.pdf');
|
||||||
|
|
||||||
|
// Act
|
||||||
|
await act(async () => {
|
||||||
|
result.current.upload(mockFile);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Assert error state after polling fails
|
||||||
|
await waitFor(() => expect(result.current.processingState).toBe('error'));
|
||||||
|
expect(result.current.errorMessage).toBe('Polling failed: AI validation failed.');
|
||||||
|
expect(result.current.flyerId).toBeNull();
|
||||||
|
});
|
||||||
|
});
|
||||||
171
src/hooks/useFlyerUploader.ts
Normal file
171
src/hooks/useFlyerUploader.ts
Normal file
@@ -0,0 +1,171 @@
|
|||||||
|
// src/hooks/useFlyerUploader.ts
|
||||||
|
// src/hooks/useFlyerUploader.ts
|
||||||
|
import { useState, useCallback, useMemo } from 'react';
|
||||||
|
import { useQuery, useMutation, useQueryClient } from '@tanstack/react-query';
|
||||||
|
import {
|
||||||
|
uploadAndProcessFlyer,
|
||||||
|
getJobStatus,
|
||||||
|
type JobStatus,
|
||||||
|
JobFailedError,
|
||||||
|
} from '../services/aiApiClient';
|
||||||
|
import { logger } from '../services/logger.client';
|
||||||
|
import { generateFileChecksum } from '../utils/checksum';
|
||||||
|
import type { ProcessingStage } from '../types';
|
||||||
|
|
||||||
|
export type ProcessingState = 'idle' | 'uploading' | 'polling' | 'completed' | 'error';
|
||||||
|
|
||||||
|
// Define a type for the structured error thrown by the API client
|
||||||
|
interface ApiError {
|
||||||
|
status: number;
|
||||||
|
body: {
|
||||||
|
message: string;
|
||||||
|
flyerId?: number;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Type guard to check if an error is a structured API error
|
||||||
|
function isApiError(error: unknown): error is ApiError {
|
||||||
|
return (
|
||||||
|
typeof error === 'object' &&
|
||||||
|
error !== null &&
|
||||||
|
'status' in error &&
|
||||||
|
typeof (error as { status: unknown }).status === 'number' &&
|
||||||
|
'body' in error &&
|
||||||
|
typeof (error as { body: unknown }).body === 'object' &&
|
||||||
|
(error as { body: unknown }).body !== null &&
|
||||||
|
'message' in ((error as { body: unknown }).body as object)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
export const useFlyerUploader = () => {
|
||||||
|
const queryClient = useQueryClient();
|
||||||
|
const [jobId, setJobId] = useState<string | null>(null);
|
||||||
|
const [currentFile, setCurrentFile] = useState<string | null>(null);
|
||||||
|
|
||||||
|
// Mutation for the initial file upload
|
||||||
|
const uploadMutation = useMutation({
|
||||||
|
mutationFn: async (file: File) => {
|
||||||
|
setCurrentFile(file.name);
|
||||||
|
const checksum = await generateFileChecksum(file);
|
||||||
|
return uploadAndProcessFlyer(file, checksum);
|
||||||
|
},
|
||||||
|
onSuccess: (data) => {
|
||||||
|
// When upload is successful, we get a jobId and can start polling.
|
||||||
|
setJobId(data.jobId);
|
||||||
|
},
|
||||||
|
// onError is handled automatically by react-query and exposed in `uploadMutation.error`
|
||||||
|
});
|
||||||
|
|
||||||
|
// Query for polling the job status
|
||||||
|
const { data: jobStatus, error: pollError } = useQuery({
|
||||||
|
queryKey: ['jobStatus', jobId],
|
||||||
|
queryFn: () => {
|
||||||
|
if (!jobId) throw new Error('No job ID to poll');
|
||||||
|
return getJobStatus(jobId);
|
||||||
|
},
|
||||||
|
// Only run this query if there is a jobId
|
||||||
|
enabled: !!jobId,
|
||||||
|
// Polling logic: react-query handles the interval
|
||||||
|
refetchInterval: (query) => {
|
||||||
|
const data = query.state.data as JobStatus | undefined;
|
||||||
|
// Stop polling if the job is completed or has failed
|
||||||
|
if (data?.state === 'completed' || data?.state === 'failed') {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
// Also stop polling if the query itself has errored (e.g. network error, or JobFailedError thrown from getJobStatus)
|
||||||
|
if (query.state.status === 'error') {
|
||||||
|
logger.warn('[useFlyerUploader] Polling stopped due to query error state.');
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
// Otherwise, poll every 3 seconds
|
||||||
|
return 3000;
|
||||||
|
},
|
||||||
|
refetchOnWindowFocus: false, // No need to refetch on focus, interval is enough
|
||||||
|
// If a poll fails (e.g., network error), don't retry automatically.
|
||||||
|
// The user can see the error and choose to retry manually if we build that feature.
|
||||||
|
retry: false,
|
||||||
|
});
|
||||||
|
|
||||||
|
const upload = useCallback(
|
||||||
|
(file: File) => {
|
||||||
|
// Reset previous state before a new upload
|
||||||
|
setJobId(null);
|
||||||
|
setCurrentFile(null);
|
||||||
|
queryClient.removeQueries({ queryKey: ['jobStatus'] });
|
||||||
|
uploadMutation.mutate(file);
|
||||||
|
},
|
||||||
|
[uploadMutation, queryClient],
|
||||||
|
);
|
||||||
|
|
||||||
|
const resetUploaderState = useCallback(() => {
|
||||||
|
setJobId(null);
|
||||||
|
setCurrentFile(null);
|
||||||
|
uploadMutation.reset();
|
||||||
|
queryClient.removeQueries({ queryKey: ['jobStatus'] });
|
||||||
|
}, [uploadMutation, queryClient]);
|
||||||
|
|
||||||
|
// Consolidate state derivation for the UI from the react-query hooks using useMemo.
|
||||||
|
// This improves performance by memoizing the derived state and makes the logic easier to follow.
|
||||||
|
const { processingState, errorMessage, duplicateFlyerId, flyerId, statusMessage } = useMemo(() => {
|
||||||
|
// The order of these checks is critical. Errors must be checked first to override
|
||||||
|
// any stale `jobStatus` from a previous successful poll.
|
||||||
|
const state: ProcessingState = (() => {
|
||||||
|
if (uploadMutation.isError || pollError) return 'error';
|
||||||
|
if (uploadMutation.isPending) return 'uploading';
|
||||||
|
if (jobStatus && (jobStatus.state === 'active' || jobStatus.state === 'waiting'))
|
||||||
|
return 'polling';
|
||||||
|
if (jobStatus?.state === 'completed') {
|
||||||
|
if (!jobStatus.returnValue?.flyerId) return 'error';
|
||||||
|
return 'completed';
|
||||||
|
}
|
||||||
|
return 'idle';
|
||||||
|
})();
|
||||||
|
|
||||||
|
let msg: string | null = null;
|
||||||
|
let dupId: number | null = null;
|
||||||
|
|
||||||
|
if (state === 'error') {
|
||||||
|
if (uploadMutation.isError) {
|
||||||
|
const uploadError = uploadMutation.error;
|
||||||
|
if (isApiError(uploadError)) {
|
||||||
|
msg = uploadError.body.message;
|
||||||
|
// Specifically handle 409 Conflict for duplicate flyers
|
||||||
|
if (uploadError.status === 409) {
|
||||||
|
dupId = uploadError.body.flyerId ?? null;
|
||||||
|
}
|
||||||
|
} else if (uploadError instanceof Error) {
|
||||||
|
msg = uploadError.message;
|
||||||
|
} else {
|
||||||
|
msg = 'An unknown upload error occurred.';
|
||||||
|
}
|
||||||
|
} else if (pollError) {
|
||||||
|
msg = `Polling failed: ${pollError.message}`;
|
||||||
|
} else if (jobStatus?.state === 'failed') {
|
||||||
|
msg = `Processing failed: ${jobStatus.progress?.message || jobStatus.failedReason || 'Unknown reason'}`;
|
||||||
|
} else if (jobStatus?.state === 'completed' && !jobStatus.returnValue?.flyerId) {
|
||||||
|
msg = 'Job completed but did not return a flyer ID.';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
processingState: state,
|
||||||
|
errorMessage: msg,
|
||||||
|
duplicateFlyerId: dupId,
|
||||||
|
flyerId: jobStatus?.state === 'completed' ? jobStatus.returnValue?.flyerId ?? null : null,
|
||||||
|
statusMessage: uploadMutation.isPending ? 'Uploading file...' : jobStatus?.progress?.message,
|
||||||
|
};
|
||||||
|
}, [uploadMutation, jobStatus, pollError]);
|
||||||
|
|
||||||
|
return {
|
||||||
|
processingState,
|
||||||
|
statusMessage: uploadMutation.isPending ? 'Uploading file...' : jobStatus?.progress?.message,
|
||||||
|
errorMessage,
|
||||||
|
duplicateFlyerId,
|
||||||
|
processingStages: jobStatus?.progress?.stages || [],
|
||||||
|
estimatedTime: jobStatus?.progress?.estimatedTimeRemaining || 0,
|
||||||
|
currentFile,
|
||||||
|
flyerId,
|
||||||
|
upload,
|
||||||
|
resetUploaderState,
|
||||||
|
jobId,
|
||||||
|
};
|
||||||
|
};
|
||||||
@@ -47,6 +47,7 @@ export function useInfiniteQuery<T>(
|
|||||||
|
|
||||||
// Use a ref to store the cursor for the next page.
|
// Use a ref to store the cursor for the next page.
|
||||||
const nextCursorRef = useRef<number | string | null | undefined>(initialCursor);
|
const nextCursorRef = useRef<number | string | null | undefined>(initialCursor);
|
||||||
|
const lastErrorMessageRef = useRef<string | null>(null);
|
||||||
|
|
||||||
const fetchPage = useCallback(
|
const fetchPage = useCallback(
|
||||||
async (cursor?: number | string | null) => {
|
async (cursor?: number | string | null) => {
|
||||||
@@ -59,6 +60,7 @@ export function useInfiniteQuery<T>(
|
|||||||
setIsFetchingNextPage(true);
|
setIsFetchingNextPage(true);
|
||||||
}
|
}
|
||||||
setError(null);
|
setError(null);
|
||||||
|
lastErrorMessageRef.current = null;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const response = await apiFunction(cursor);
|
const response = await apiFunction(cursor);
|
||||||
@@ -99,7 +101,10 @@ export function useInfiniteQuery<T>(
|
|||||||
error: err.message,
|
error: err.message,
|
||||||
functionName: apiFunction.name,
|
functionName: apiFunction.name,
|
||||||
});
|
});
|
||||||
|
if (err.message !== lastErrorMessageRef.current) {
|
||||||
setError(err);
|
setError(err);
|
||||||
|
lastErrorMessageRef.current = err.message;
|
||||||
|
}
|
||||||
notifyError(err.message);
|
notifyError(err.message);
|
||||||
} finally {
|
} finally {
|
||||||
setIsLoading(false);
|
setIsLoading(false);
|
||||||
@@ -125,6 +130,7 @@ export function useInfiniteQuery<T>(
|
|||||||
// Function to be called by the UI to refetch the entire query from the beginning.
|
// Function to be called by the UI to refetch the entire query from the beginning.
|
||||||
const refetch = useCallback(() => {
|
const refetch = useCallback(() => {
|
||||||
setIsRefetching(true);
|
setIsRefetching(true);
|
||||||
|
lastErrorMessageRef.current = null;
|
||||||
setData([]);
|
setData([]);
|
||||||
fetchPage(initialCursor);
|
fetchPage(initialCursor);
|
||||||
}, [fetchPage, initialCursor]);
|
}, [fetchPage, initialCursor]);
|
||||||
|
|||||||
@@ -495,6 +495,22 @@ describe('useShoppingLists Hook', () => {
|
|||||||
expect(currentLists[0].items).toHaveLength(1); // Length should remain 1
|
expect(currentLists[0].items).toHaveLength(1); // Length should remain 1
|
||||||
console.log(' LOG: SUCCESS! Duplicate was not added and API was not called.');
|
console.log(' LOG: SUCCESS! Duplicate was not added and API was not called.');
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('should log an error and not call the API if the listId does not exist', async () => {
|
||||||
|
const consoleErrorSpy = vi.spyOn(console, 'error').mockImplementation(() => {});
|
||||||
|
const { result } = renderHook(() => useShoppingLists());
|
||||||
|
|
||||||
|
await act(async () => {
|
||||||
|
// Call with a non-existent list ID (mock lists have IDs 1 and 2)
|
||||||
|
await result.current.addItemToList(999, { customItemName: 'Wont be added' });
|
||||||
|
});
|
||||||
|
|
||||||
|
// The API should not have been called because the list was not found.
|
||||||
|
expect(mockAddItemApi).not.toHaveBeenCalled();
|
||||||
|
expect(consoleErrorSpy).toHaveBeenCalledWith('useShoppingLists: List with ID 999 not found.');
|
||||||
|
|
||||||
|
consoleErrorSpy.mockRestore();
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('updateItemInList', () => {
|
describe('updateItemInList', () => {
|
||||||
@@ -656,24 +672,14 @@ describe('useShoppingLists Hook', () => {
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: 'updateItemInList',
|
name: 'updateItemInList',
|
||||||
action: (hook: any) => {
|
action: (hook: any) => hook.updateItemInList(101, { is_purchased: true }),
|
||||||
act(() => {
|
|
||||||
hook.setActiveListId(1);
|
|
||||||
});
|
|
||||||
return hook.updateItemInList(101, { is_purchased: true });
|
|
||||||
},
|
|
||||||
apiMock: mockUpdateItemApi,
|
apiMock: mockUpdateItemApi,
|
||||||
mockIndex: 3,
|
mockIndex: 3,
|
||||||
errorMessage: 'Update failed',
|
errorMessage: 'Update failed',
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: 'removeItemFromList',
|
name: 'removeItemFromList',
|
||||||
action: (hook: any) => {
|
action: (hook: any) => hook.removeItemFromList(101),
|
||||||
act(() => {
|
|
||||||
hook.setActiveListId(1);
|
|
||||||
});
|
|
||||||
return hook.removeItemFromList(101);
|
|
||||||
},
|
|
||||||
apiMock: mockRemoveItemApi,
|
apiMock: mockRemoveItemApi,
|
||||||
mockIndex: 4,
|
mockIndex: 4,
|
||||||
errorMessage: 'Removal failed',
|
errorMessage: 'Removal failed',
|
||||||
@@ -681,6 +687,17 @@ describe('useShoppingLists Hook', () => {
|
|||||||
])(
|
])(
|
||||||
'should set an error for $name if the API call fails',
|
'should set an error for $name if the API call fails',
|
||||||
async ({ action, apiMock, mockIndex, errorMessage }) => {
|
async ({ action, apiMock, mockIndex, errorMessage }) => {
|
||||||
|
// Setup a default list so activeListId is set automatically
|
||||||
|
const mockList = createMockShoppingList({ shopping_list_id: 1, name: 'List 1' });
|
||||||
|
mockedUseUserData.mockReturnValue({
|
||||||
|
shoppingLists: [mockList],
|
||||||
|
setShoppingLists: mockSetShoppingLists,
|
||||||
|
watchedItems: [],
|
||||||
|
setWatchedItems: vi.fn(),
|
||||||
|
isLoading: false,
|
||||||
|
error: null,
|
||||||
|
});
|
||||||
|
|
||||||
const apiMocksWithError = [...defaultApiMocks];
|
const apiMocksWithError = [...defaultApiMocks];
|
||||||
apiMocksWithError[mockIndex] = {
|
apiMocksWithError[mockIndex] = {
|
||||||
...apiMocksWithError[mockIndex],
|
...apiMocksWithError[mockIndex],
|
||||||
@@ -689,11 +706,25 @@ describe('useShoppingLists Hook', () => {
|
|||||||
setupApiMocks(apiMocksWithError);
|
setupApiMocks(apiMocksWithError);
|
||||||
apiMock.mockRejectedValue(new Error(errorMessage));
|
apiMock.mockRejectedValue(new Error(errorMessage));
|
||||||
|
|
||||||
|
// Spy on console.error to ensure the catch block is executed for logging
|
||||||
|
const consoleErrorSpy = vi.spyOn(console, 'error').mockImplementation(() => {});
|
||||||
|
|
||||||
const { result } = renderHook(() => useShoppingLists());
|
const { result } = renderHook(() => useShoppingLists());
|
||||||
|
|
||||||
|
// Wait for the effect to set the active list ID
|
||||||
|
await waitFor(() => expect(result.current.activeListId).toBe(1));
|
||||||
|
|
||||||
await act(async () => {
|
await act(async () => {
|
||||||
await action(result.current);
|
await action(result.current);
|
||||||
});
|
});
|
||||||
await waitFor(() => expect(result.current.error).toBe(errorMessage));
|
|
||||||
|
await waitFor(() => {
|
||||||
|
expect(result.current.error).toBe(errorMessage);
|
||||||
|
// Verify that our custom logging within the catch block was called
|
||||||
|
expect(consoleErrorSpy).toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
|
||||||
|
consoleErrorSpy.mockRestore();
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -2,8 +2,8 @@
|
|||||||
import React, { ReactNode } from 'react';
|
import React, { ReactNode } from 'react';
|
||||||
import { renderHook, waitFor } from '@testing-library/react';
|
import { renderHook, waitFor } from '@testing-library/react';
|
||||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||||
import { useUserData } from '../hooks/useUserData';
|
import { useUserData } from './useUserData';
|
||||||
import { useAuth } from '../hooks/useAuth';
|
import { useAuth } from './useAuth';
|
||||||
import { UserDataProvider } from '../providers/UserDataProvider';
|
import { UserDataProvider } from '../providers/UserDataProvider';
|
||||||
import { useApiOnMount } from './useApiOnMount';
|
import { useApiOnMount } from './useApiOnMount';
|
||||||
import type { UserProfile } from '../types';
|
import type { UserProfile } from '../types';
|
||||||
|
|||||||
@@ -4,7 +4,7 @@
|
|||||||
This single directive replaces @tailwind base, components, and utilities.
|
This single directive replaces @tailwind base, components, and utilities.
|
||||||
It is the new entry point for all of Tailwind's generated CSS.
|
It is the new entry point for all of Tailwind's generated CSS.
|
||||||
*/
|
*/
|
||||||
@import "tailwindcss";
|
@import 'tailwindcss';
|
||||||
|
|
||||||
/*
|
/*
|
||||||
This is the new v4 directive that tells the @tailwindcss/postcss plugin
|
This is the new v4 directive that tells the @tailwindcss/postcss plugin
|
||||||
@@ -12,4 +12,3 @@
|
|||||||
Since tailwind.config.js is in the root and this is in src/, the path is '../tailwind.config.js'.
|
Since tailwind.config.js is in the root and this is in src/, the path is '../tailwind.config.js'.
|
||||||
*/
|
*/
|
||||||
@config '../tailwind.config.js';
|
@config '../tailwind.config.js';
|
||||||
|
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ import './index.css';
|
|||||||
|
|
||||||
const rootElement = document.getElementById('root');
|
const rootElement = document.getElementById('root');
|
||||||
if (!rootElement) {
|
if (!rootElement) {
|
||||||
throw new Error("Could not find root element to mount to");
|
throw new Error('Could not find root element to mount to');
|
||||||
}
|
}
|
||||||
|
|
||||||
const root = ReactDOM.createRoot(rootElement);
|
const root = ReactDOM.createRoot(rootElement);
|
||||||
@@ -19,6 +19,5 @@ root.render(
|
|||||||
<App />
|
<App />
|
||||||
</AppProviders>
|
</AppProviders>
|
||||||
</BrowserRouter>
|
</BrowserRouter>
|
||||||
</React.StrictMode>
|
</React.StrictMode>,
|
||||||
);
|
);
|
||||||
|
|
||||||
@@ -79,7 +79,7 @@ vi.mock('../pages/admin/ActivityLog', async () => {
|
|||||||
),
|
),
|
||||||
};
|
};
|
||||||
});
|
});
|
||||||
vi.mock('../pages/admin/components/AnonymousUserBanner', () => ({
|
vi.mock('../components/AnonymousUserBanner', () => ({
|
||||||
AnonymousUserBanner: () => <div data-testid="anonymous-banner" />,
|
AnonymousUserBanner: () => <div data-testid="anonymous-banner" />,
|
||||||
}));
|
}));
|
||||||
vi.mock('../components/ErrorDisplay', () => ({
|
vi.mock('../components/ErrorDisplay', () => ({
|
||||||
|
|||||||
@@ -16,7 +16,7 @@ import { PriceChart } from '../features/charts/PriceChart';
|
|||||||
import { PriceHistoryChart } from '../features/charts/PriceHistoryChart';
|
import { PriceHistoryChart } from '../features/charts/PriceHistoryChart';
|
||||||
import Leaderboard from '../components/Leaderboard';
|
import Leaderboard from '../components/Leaderboard';
|
||||||
import { ActivityLog, ActivityLogClickHandler } from '../pages/admin/ActivityLog';
|
import { ActivityLog, ActivityLogClickHandler } from '../pages/admin/ActivityLog';
|
||||||
import { AnonymousUserBanner } from '../pages/admin/components/AnonymousUserBanner';
|
import { AnonymousUserBanner } from '../components/AnonymousUserBanner';
|
||||||
import { ErrorDisplay } from '../components/ErrorDisplay';
|
import { ErrorDisplay } from '../components/ErrorDisplay';
|
||||||
|
|
||||||
export interface MainLayoutProps {
|
export interface MainLayoutProps {
|
||||||
|
|||||||
@@ -15,7 +15,8 @@ import type { Logger } from 'pino';
|
|||||||
// Create a mock logger that we can inject into requests and assert against.
|
// Create a mock logger that we can inject into requests and assert against.
|
||||||
// We only mock the methods we intend to spy on. The rest of the complex Pino
|
// We only mock the methods we intend to spy on. The rest of the complex Pino
|
||||||
// Logger type is satisfied by casting, which is a common and clean testing practice.
|
// Logger type is satisfied by casting, which is a common and clean testing practice.
|
||||||
const mockLogger = {
|
const { mockLogger } = vi.hoisted(() => {
|
||||||
|
const mockLogger = {
|
||||||
error: vi.fn(),
|
error: vi.fn(),
|
||||||
warn: vi.fn(),
|
warn: vi.fn(),
|
||||||
info: vi.fn(),
|
info: vi.fn(),
|
||||||
@@ -24,7 +25,9 @@ const mockLogger = {
|
|||||||
trace: vi.fn(),
|
trace: vi.fn(),
|
||||||
silent: vi.fn(),
|
silent: vi.fn(),
|
||||||
child: vi.fn().mockReturnThis(),
|
child: vi.fn().mockReturnThis(),
|
||||||
} as unknown as Logger;
|
};
|
||||||
|
return { mockLogger };
|
||||||
|
});
|
||||||
|
|
||||||
// Mock the global logger as a fallback, though our tests will focus on req.log
|
// Mock the global logger as a fallback, though our tests will focus on req.log
|
||||||
vi.mock('../services/logger.server', () => ({ logger: mockLogger }));
|
vi.mock('../services/logger.server', () => ({ logger: mockLogger }));
|
||||||
@@ -37,7 +40,7 @@ const app = express();
|
|||||||
app.use(express.json());
|
app.use(express.json());
|
||||||
// Add a middleware to inject our mock logger into each request as `req.log`
|
// Add a middleware to inject our mock logger into each request as `req.log`
|
||||||
app.use((req: Request, res: Response, next: NextFunction) => {
|
app.use((req: Request, res: Response, next: NextFunction) => {
|
||||||
req.log = mockLogger;
|
req.log = mockLogger as unknown as Logger;
|
||||||
next();
|
next();
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -106,17 +109,21 @@ describe('errorHandler Middleware', () => {
|
|||||||
it('should return a generic 500 error for a standard Error object', async () => {
|
it('should return a generic 500 error for a standard Error object', async () => {
|
||||||
const response = await supertest(app).get('/generic-error');
|
const response = await supertest(app).get('/generic-error');
|
||||||
expect(response.status).toBe(500);
|
expect(response.status).toBe(500);
|
||||||
expect(response.body).toEqual({ message: 'A generic server error occurred.' });
|
// In test/dev, we now expect a stack trace for 5xx errors.
|
||||||
|
expect(response.body.message).toBe('A generic server error occurred.');
|
||||||
|
expect(response.body.stack).toBeDefined();
|
||||||
|
expect(response.body.errorId).toEqual(expect.any(String));
|
||||||
|
console.log('[DEBUG] errorHandler.test.ts: Received 500 error response with ID:', response.body.errorId);
|
||||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||||
expect.objectContaining({
|
expect.objectContaining({
|
||||||
err: expect.any(Error),
|
err: expect.any(Error),
|
||||||
errorId: expect.any(String),
|
errorId: expect.any(String),
|
||||||
req: expect.objectContaining({ method: 'GET', url: '/generic-error' }),
|
req: expect.objectContaining({ method: 'GET', url: '/generic-error' }),
|
||||||
}),
|
}),
|
||||||
expect.stringMatching(/Unhandled API Error \(ID: \w+\)/),
|
expect.stringMatching(/Unhandled API Error \(ID: [\w-]+\)/),
|
||||||
);
|
);
|
||||||
expect(consoleErrorSpy).toHaveBeenCalledWith(
|
expect(consoleErrorSpy).toHaveBeenCalledWith(
|
||||||
expect.stringContaining('--- [TEST] UNHANDLED ERROR ---'),
|
expect.stringMatching(/--- \[TEST\] UNHANDLED ERROR \(ID: \w+\) ---/),
|
||||||
expect.any(Error),
|
expect.any(Error),
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
@@ -130,15 +137,11 @@ describe('errorHandler Middleware', () => {
|
|||||||
expect(mockLogger.warn).toHaveBeenCalledWith(
|
expect(mockLogger.warn).toHaveBeenCalledWith(
|
||||||
{
|
{
|
||||||
err: expect.any(Error),
|
err: expect.any(Error),
|
||||||
validationErrors: undefined,
|
|
||||||
statusCode: 404,
|
statusCode: 404,
|
||||||
},
|
},
|
||||||
'Client Error on GET /http-error-404: Resource not found',
|
'Client Error on GET /http-error-404: Resource not found',
|
||||||
);
|
);
|
||||||
expect(consoleErrorSpy).toHaveBeenCalledWith(
|
expect(consoleErrorSpy).not.toHaveBeenCalled();
|
||||||
expect.stringContaining('--- [TEST] UNHANDLED ERROR ---'),
|
|
||||||
expect.any(Error),
|
|
||||||
);
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should handle a NotFoundError with a 404 status', async () => {
|
it('should handle a NotFoundError with a 404 status', async () => {
|
||||||
@@ -150,15 +153,11 @@ describe('errorHandler Middleware', () => {
|
|||||||
expect(mockLogger.warn).toHaveBeenCalledWith(
|
expect(mockLogger.warn).toHaveBeenCalledWith(
|
||||||
{
|
{
|
||||||
err: expect.any(NotFoundError),
|
err: expect.any(NotFoundError),
|
||||||
validationErrors: undefined,
|
|
||||||
statusCode: 404,
|
statusCode: 404,
|
||||||
},
|
},
|
||||||
'Client Error on GET /not-found-error: Specific resource missing',
|
'Client Error on GET /not-found-error: Specific resource missing',
|
||||||
);
|
);
|
||||||
expect(consoleErrorSpy).toHaveBeenCalledWith(
|
expect(consoleErrorSpy).not.toHaveBeenCalled();
|
||||||
expect.stringContaining('--- [TEST] UNHANDLED ERROR ---'),
|
|
||||||
expect.any(NotFoundError),
|
|
||||||
);
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should handle a ForeignKeyConstraintError with a 400 status and the specific error message', async () => {
|
it('should handle a ForeignKeyConstraintError with a 400 status and the specific error message', async () => {
|
||||||
@@ -170,15 +169,11 @@ describe('errorHandler Middleware', () => {
|
|||||||
expect(mockLogger.warn).toHaveBeenCalledWith(
|
expect(mockLogger.warn).toHaveBeenCalledWith(
|
||||||
{
|
{
|
||||||
err: expect.any(ForeignKeyConstraintError),
|
err: expect.any(ForeignKeyConstraintError),
|
||||||
validationErrors: undefined,
|
|
||||||
statusCode: 400,
|
statusCode: 400,
|
||||||
},
|
},
|
||||||
'Client Error on GET /fk-error: The referenced item does not exist.',
|
'Client Error on GET /fk-error: The referenced item does not exist.',
|
||||||
);
|
);
|
||||||
expect(consoleErrorSpy).toHaveBeenCalledWith(
|
expect(consoleErrorSpy).not.toHaveBeenCalled();
|
||||||
expect.stringContaining('--- [TEST] UNHANDLED ERROR ---'),
|
|
||||||
expect.any(ForeignKeyConstraintError),
|
|
||||||
);
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should handle a UniqueConstraintError with a 409 status and the specific error message', async () => {
|
it('should handle a UniqueConstraintError with a 409 status and the specific error message', async () => {
|
||||||
@@ -190,15 +185,11 @@ describe('errorHandler Middleware', () => {
|
|||||||
expect(mockLogger.warn).toHaveBeenCalledWith(
|
expect(mockLogger.warn).toHaveBeenCalledWith(
|
||||||
{
|
{
|
||||||
err: expect.any(UniqueConstraintError),
|
err: expect.any(UniqueConstraintError),
|
||||||
validationErrors: undefined,
|
|
||||||
statusCode: 409,
|
statusCode: 409,
|
||||||
},
|
},
|
||||||
'Client Error on GET /unique-error: This item already exists.',
|
'Client Error on GET /unique-error: This item already exists.',
|
||||||
);
|
);
|
||||||
expect(consoleErrorSpy).toHaveBeenCalledWith(
|
expect(consoleErrorSpy).not.toHaveBeenCalled();
|
||||||
expect.stringContaining('--- [TEST] UNHANDLED ERROR ---'),
|
|
||||||
expect.any(UniqueConstraintError),
|
|
||||||
);
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should handle a ValidationError with a 400 status and include the validation errors array', async () => {
|
it('should handle a ValidationError with a 400 status and include the validation errors array', async () => {
|
||||||
@@ -219,27 +210,27 @@ describe('errorHandler Middleware', () => {
|
|||||||
},
|
},
|
||||||
'Client Error on GET /validation-error: Input validation failed',
|
'Client Error on GET /validation-error: Input validation failed',
|
||||||
);
|
);
|
||||||
expect(consoleErrorSpy).toHaveBeenCalledWith(
|
expect(consoleErrorSpy).not.toHaveBeenCalled();
|
||||||
expect.stringContaining('--- [TEST] UNHANDLED ERROR ---'),
|
|
||||||
expect.any(ValidationError),
|
|
||||||
);
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should handle a DatabaseError with a 500 status and a generic message', async () => {
|
it('should handle a DatabaseError with a 500 status and a generic message', async () => {
|
||||||
const response = await supertest(app).get('/db-error-500');
|
const response = await supertest(app).get('/db-error-500');
|
||||||
|
|
||||||
expect(response.status).toBe(500);
|
expect(response.status).toBe(500);
|
||||||
expect(response.body).toEqual({ message: 'A database connection issue occurred.' });
|
// In test/dev, we now expect a stack trace for 5xx errors.
|
||||||
|
expect(response.body.message).toBe('A database connection issue occurred.');
|
||||||
|
expect(response.body.stack).toBeDefined();
|
||||||
|
expect(response.body.errorId).toEqual(expect.any(String));
|
||||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||||
expect.objectContaining({
|
expect.objectContaining({
|
||||||
err: expect.any(DatabaseError),
|
err: expect.any(DatabaseError),
|
||||||
errorId: expect.any(String),
|
errorId: expect.any(String),
|
||||||
req: expect.objectContaining({ method: 'GET', url: '/db-error-500' }),
|
req: expect.objectContaining({ method: 'GET', url: '/db-error-500' }),
|
||||||
}),
|
}),
|
||||||
expect.stringMatching(/Unhandled API Error \(ID: \w+\)/),
|
expect.stringMatching(/Unhandled API Error \(ID: [\w-]+\)/),
|
||||||
);
|
);
|
||||||
expect(consoleErrorSpy).toHaveBeenCalledWith(
|
expect(consoleErrorSpy).toHaveBeenCalledWith(
|
||||||
expect.stringContaining('--- [TEST] UNHANDLED ERROR ---'),
|
expect.stringMatching(/--- \[TEST\] UNHANDLED ERROR \(ID: \w+\) ---/),
|
||||||
expect.any(DatabaseError),
|
expect.any(DatabaseError),
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
@@ -249,8 +240,14 @@ describe('errorHandler Middleware', () => {
|
|||||||
|
|
||||||
expect(response.status).toBe(401);
|
expect(response.status).toBe(401);
|
||||||
expect(response.body).toEqual({ message: 'Invalid Token' });
|
expect(response.body).toEqual({ message: 'Invalid Token' });
|
||||||
// 4xx errors log as warn
|
expect(mockLogger.warn).toHaveBeenCalledWith(
|
||||||
expect(mockLogger.warn).toHaveBeenCalled();
|
{
|
||||||
|
err: expect.any(Error),
|
||||||
|
statusCode: 401,
|
||||||
|
},
|
||||||
|
'Client Error on GET /unauthorized-error-no-status: Invalid Token',
|
||||||
|
);
|
||||||
|
expect(consoleErrorSpy).not.toHaveBeenCalled();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should handle an UnauthorizedError with explicit status', async () => {
|
it('should handle an UnauthorizedError with explicit status', async () => {
|
||||||
@@ -258,6 +255,14 @@ describe('errorHandler Middleware', () => {
|
|||||||
|
|
||||||
expect(response.status).toBe(401);
|
expect(response.status).toBe(401);
|
||||||
expect(response.body).toEqual({ message: 'Invalid Token' });
|
expect(response.body).toEqual({ message: 'Invalid Token' });
|
||||||
|
expect(mockLogger.warn).toHaveBeenCalledWith(
|
||||||
|
{
|
||||||
|
err: expect.any(Error),
|
||||||
|
statusCode: 401,
|
||||||
|
},
|
||||||
|
'Client Error on GET /unauthorized-error-with-status: Invalid Token',
|
||||||
|
);
|
||||||
|
expect(consoleErrorSpy).not.toHaveBeenCalled();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should call next(err) if headers have already been sent', () => {
|
it('should call next(err) if headers have already been sent', () => {
|
||||||
@@ -302,6 +307,7 @@ describe('errorHandler Middleware', () => {
|
|||||||
expect(response.body.message).toMatch(
|
expect(response.body.message).toMatch(
|
||||||
/An unexpected server error occurred. Please reference error ID: \w+/,
|
/An unexpected server error occurred. Please reference error ID: \w+/,
|
||||||
);
|
);
|
||||||
|
expect(response.body.stack).toBeUndefined();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return the actual error message for client errors (4xx) in production', async () => {
|
it('should return the actual error message for client errors (4xx) in production', async () => {
|
||||||
|
|||||||
@@ -1,94 +1,101 @@
|
|||||||
// src/middleware/errorHandler.ts
|
// src/middleware/errorHandler.ts
|
||||||
import { Request, Response, NextFunction } from 'express';
|
import { Request, Response, NextFunction } from 'express';
|
||||||
|
import crypto from 'crypto';
|
||||||
|
import { ZodError } from 'zod';
|
||||||
import {
|
import {
|
||||||
DatabaseError,
|
|
||||||
UniqueConstraintError,
|
|
||||||
ForeignKeyConstraintError,
|
ForeignKeyConstraintError,
|
||||||
NotFoundError,
|
NotFoundError,
|
||||||
|
UniqueConstraintError,
|
||||||
ValidationError,
|
ValidationError,
|
||||||
ValidationIssue,
|
|
||||||
} from '../services/db/errors.db';
|
} from '../services/db/errors.db';
|
||||||
import crypto from 'crypto';
|
import { logger } from '../services/logger.server';
|
||||||
|
|
||||||
interface HttpError extends Error {
|
/**
|
||||||
status?: number;
|
* A centralized error handling middleware for the Express application.
|
||||||
}
|
* This middleware should be the LAST `app.use()` call to catch all errors from previous routes and middleware.
|
||||||
|
*
|
||||||
export const errorHandler = (err: HttpError, req: Request, res: Response, next: NextFunction) => {
|
* It standardizes error responses and ensures consistent logging.
|
||||||
// If the response headers have already been sent, we must delegate to the default Express error handler.
|
*/
|
||||||
|
export const errorHandler = (err: Error, req: Request, res: Response, next: NextFunction) => {
|
||||||
|
// If headers have already been sent, delegate to the default Express error handler.
|
||||||
if (res.headersSent) {
|
if (res.headersSent) {
|
||||||
return next(err);
|
return next(err);
|
||||||
}
|
}
|
||||||
|
|
||||||
// The pino-http middleware guarantees that `req.log` will be available.
|
// Use the request-scoped logger if available, otherwise fall back to the global logger.
|
||||||
const log = req.log;
|
const log = req.log || logger;
|
||||||
|
|
||||||
// --- 1. Determine Final Status Code and Message ---
|
// --- Handle Zod Validation Errors (from validateRequest middleware) ---
|
||||||
let statusCode = err.status ?? 500;
|
if (err instanceof ZodError) {
|
||||||
const message = err.message;
|
const statusCode = 400;
|
||||||
let validationIssues: ValidationIssue[] | undefined;
|
const message = 'The request data is invalid.';
|
||||||
let errorId: string | undefined;
|
const errors = err.issues.map((e) => ({ path: e.path, message: e.message }));
|
||||||
|
log.warn({ err, validationErrors: errors, statusCode }, `Client Error on ${req.method} ${req.path}: ${message}`);
|
||||||
// Refine the status code for known error types. Check for most specific types first.
|
return res.status(statusCode).json({ message, errors });
|
||||||
if (err instanceof UniqueConstraintError) {
|
|
||||||
statusCode = 409; // Conflict
|
|
||||||
} else if (err instanceof NotFoundError) {
|
|
||||||
statusCode = 404;
|
|
||||||
} else if (err instanceof ForeignKeyConstraintError) {
|
|
||||||
statusCode = 400;
|
|
||||||
} else if (err instanceof ValidationError) {
|
|
||||||
statusCode = 400;
|
|
||||||
validationIssues = err.validationErrors;
|
|
||||||
} else if (err instanceof DatabaseError) {
|
|
||||||
// This is a generic fallback for other database errors that are not the specific subclasses above.
|
|
||||||
statusCode = err.status;
|
|
||||||
} else if (err.name === 'UnauthorizedError') {
|
|
||||||
statusCode = err.status || 401;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// --- 2. Log Based on Final Status Code ---
|
// --- Handle Custom Operational Errors ---
|
||||||
// Log the full error details for debugging, especially for server errors.
|
if (err instanceof NotFoundError) {
|
||||||
if (statusCode >= 500) {
|
const statusCode = 404;
|
||||||
errorId = crypto.randomBytes(4).toString('hex');
|
log.warn({ err, statusCode }, `Client Error on ${req.method} ${req.path}: ${err.message}`);
|
||||||
// The request-scoped logger already contains user, IP, and request_id.
|
return res.status(statusCode).json({ message: err.message });
|
||||||
// We add the full error and the request object itself.
|
}
|
||||||
// Pino's `redact` config will automatically sanitize sensitive fields in `req`.
|
|
||||||
|
if (err instanceof ValidationError) {
|
||||||
|
const statusCode = 400;
|
||||||
|
log.warn(
|
||||||
|
{ err, validationErrors: err.validationErrors, statusCode },
|
||||||
|
`Client Error on ${req.method} ${req.path}: ${err.message}`,
|
||||||
|
);
|
||||||
|
return res.status(statusCode).json({ message: err.message, errors: err.validationErrors });
|
||||||
|
}
|
||||||
|
|
||||||
|
if (err instanceof UniqueConstraintError) {
|
||||||
|
const statusCode = 409;
|
||||||
|
log.warn({ err, statusCode }, `Client Error on ${req.method} ${req.path}: ${err.message}`);
|
||||||
|
return res.status(statusCode).json({ message: err.message }); // Use 409 Conflict for unique constraints
|
||||||
|
}
|
||||||
|
|
||||||
|
if (err instanceof ForeignKeyConstraintError) {
|
||||||
|
const statusCode = 400;
|
||||||
|
log.warn({ err, statusCode }, `Client Error on ${req.method} ${req.path}: ${err.message}`);
|
||||||
|
return res.status(statusCode).json({ message: err.message });
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- Handle Generic Client Errors (e.g., from express-jwt, or manual status setting) ---
|
||||||
|
let status = (err as any).status || (err as any).statusCode;
|
||||||
|
// Default UnauthorizedError to 401 if no status is present, a common case for express-jwt.
|
||||||
|
if (err.name === 'UnauthorizedError' && !status) {
|
||||||
|
status = 401;
|
||||||
|
}
|
||||||
|
if (status && status >= 400 && status < 500) {
|
||||||
|
log.warn({ err, statusCode: status }, `Client Error on ${req.method} ${req.path}: ${err.message}`);
|
||||||
|
return res.status(status).json({ message: err.message });
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- Handle All Other (500-level) Errors ---
|
||||||
|
const errorId = crypto.randomBytes(4).toString('hex');
|
||||||
log.error(
|
log.error(
|
||||||
{
|
{
|
||||||
err,
|
err,
|
||||||
errorId,
|
errorId,
|
||||||
req: { method: req.method, url: req.originalUrl, headers: req.headers, body: req.body },
|
req: { method: req.method, url: req.url, headers: req.headers, body: req.body },
|
||||||
},
|
},
|
||||||
`Unhandled API Error (ID: ${errorId})`,
|
`Unhandled API Error (ID: ${errorId})`,
|
||||||
);
|
);
|
||||||
} else {
|
|
||||||
// For 4xx errors, log at a lower level (e.g., 'warn') to avoid flooding error trackers.
|
|
||||||
// We include the validation errors in the log context if they exist.
|
|
||||||
log.warn(
|
|
||||||
{
|
|
||||||
err,
|
|
||||||
validationErrors: validationIssues, // Add validation issues to the log object
|
|
||||||
statusCode,
|
|
||||||
},
|
|
||||||
`Client Error on ${req.method} ${req.path}: ${message}`,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
// --- TEST ENVIRONMENT DEBUGGING ---
|
// Also log to console in test environment for visibility in test runners
|
||||||
if (process.env.NODE_ENV === 'test') {
|
if (process.env.NODE_ENV === 'test') {
|
||||||
console.error('--- [TEST] UNHANDLED ERROR ---', err);
|
console.error(`--- [TEST] UNHANDLED ERROR (ID: ${errorId}) ---`, err);
|
||||||
}
|
}
|
||||||
|
|
||||||
// --- 3. Send Response ---
|
// In production, send a generic message to avoid leaking implementation details.
|
||||||
// In production, send a generic message for 5xx errors.
|
if (process.env.NODE_ENV === 'production') {
|
||||||
// In dev/test, send the actual error message for easier debugging.
|
return res.status(500).json({
|
||||||
const responseMessage =
|
message: `An unexpected server error occurred. Please reference error ID: ${errorId}`,
|
||||||
statusCode >= 500 && process.env.NODE_ENV === 'production'
|
|
||||||
? `An unexpected server error occurred. Please reference error ID: ${errorId}`
|
|
||||||
: message;
|
|
||||||
|
|
||||||
res.status(statusCode).json({
|
|
||||||
message: responseMessage,
|
|
||||||
...(validationIssues && { errors: validationIssues }), // Conditionally add the 'errors' array if it exists
|
|
||||||
});
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// In non-production environments (dev, test, etc.), send more details for easier debugging.
|
||||||
|
return res.status(500).json({ message: err.message, stack: err.stack, errorId });
|
||||||
};
|
};
|
||||||
269
src/middleware/multer.middleware.test.ts
Normal file
269
src/middleware/multer.middleware.test.ts
Normal file
@@ -0,0 +1,269 @@
|
|||||||
|
// src/middleware/multer.middleware.test.ts
|
||||||
|
import { describe, it, expect, vi, beforeEach, afterEach, Mock } from 'vitest';
|
||||||
|
import multer from 'multer';
|
||||||
|
import type { Request, Response, NextFunction } from 'express';
|
||||||
|
import { createUploadMiddleware, handleMulterError } from './multer.middleware';
|
||||||
|
import { createMockUserProfile } from '../tests/utils/mockFactories';
|
||||||
|
import { ValidationError } from '../services/db/errors.db';
|
||||||
|
|
||||||
|
// 1. Hoist the mocks so they can be referenced inside vi.mock factories.
|
||||||
|
const mocks = vi.hoisted(() => ({
|
||||||
|
mkdir: vi.fn(),
|
||||||
|
logger: {
|
||||||
|
info: vi.fn(),
|
||||||
|
error: vi.fn(),
|
||||||
|
warn: vi.fn(),
|
||||||
|
debug: vi.fn(),
|
||||||
|
},
|
||||||
|
}));
|
||||||
|
|
||||||
|
// 2. Mock node:fs/promises.
|
||||||
|
// We mock the default export because that's how it's imported in the source file.
|
||||||
|
vi.mock('node:fs/promises', () => ({
|
||||||
|
default: {
|
||||||
|
mkdir: mocks.mkdir,
|
||||||
|
},
|
||||||
|
}));
|
||||||
|
|
||||||
|
// 3. Mock the logger service.
|
||||||
|
vi.mock('../services/logger.server', () => ({
|
||||||
|
logger: mocks.logger,
|
||||||
|
}));
|
||||||
|
|
||||||
|
// 4. Mock multer to prevent it from doing anything during import.
|
||||||
|
vi.mock('multer', () => {
|
||||||
|
const diskStorage = vi.fn((options) => options);
|
||||||
|
// A more realistic mock for MulterError that maps error codes to messages,
|
||||||
|
// similar to how the actual multer library works.
|
||||||
|
class MulterError extends Error {
|
||||||
|
code: string;
|
||||||
|
field?: string;
|
||||||
|
|
||||||
|
constructor(code: string, field?: string) {
|
||||||
|
const messages: { [key: string]: string } = {
|
||||||
|
LIMIT_FILE_SIZE: 'File too large',
|
||||||
|
LIMIT_UNEXPECTED_FILE: 'Unexpected file',
|
||||||
|
// Add other codes as needed for tests
|
||||||
|
};
|
||||||
|
const message = messages[code] || code;
|
||||||
|
super(message);
|
||||||
|
this.code = code;
|
||||||
|
this.name = 'MulterError';
|
||||||
|
if (field) {
|
||||||
|
this.field = field;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const multer = vi.fn(() => ({
|
||||||
|
single: vi.fn().mockImplementation(() => (req: any, res: any, next: any) => next()),
|
||||||
|
array: vi.fn().mockImplementation(() => (req: any, res: any, next: any) => next()),
|
||||||
|
}));
|
||||||
|
(multer as any).diskStorage = diskStorage;
|
||||||
|
(multer as any).MulterError = MulterError;
|
||||||
|
return {
|
||||||
|
default: multer,
|
||||||
|
diskStorage,
|
||||||
|
MulterError,
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('Multer Middleware Directory Creation', () => {
|
||||||
|
beforeEach(() => {
|
||||||
|
// Critical: Reset modules to ensure the top-level IIFE runs again for each test.
|
||||||
|
vi.resetModules();
|
||||||
|
vi.clearAllMocks();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should attempt to create directories on module load and log success', async () => {
|
||||||
|
// Arrange
|
||||||
|
mocks.mkdir.mockResolvedValue(undefined);
|
||||||
|
|
||||||
|
// Act: Dynamic import triggers the top-level code execution
|
||||||
|
await import('./multer.middleware');
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
// It should try to create both the flyer storage and avatar storage paths
|
||||||
|
expect(mocks.mkdir).toHaveBeenCalledTimes(2);
|
||||||
|
expect(mocks.mkdir).toHaveBeenCalledWith(expect.any(String), { recursive: true });
|
||||||
|
expect(mocks.logger.info).toHaveBeenCalledWith('Ensured multer storage directories exist.');
|
||||||
|
expect(mocks.logger.error).not.toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should log an error if directory creation fails', async () => {
|
||||||
|
// Arrange
|
||||||
|
const error = new Error('Permission denied');
|
||||||
|
mocks.mkdir.mockRejectedValue(error);
|
||||||
|
|
||||||
|
// Act
|
||||||
|
await import('./multer.middleware');
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
expect(mocks.mkdir).toHaveBeenCalled();
|
||||||
|
expect(mocks.logger.error).toHaveBeenCalledWith(
|
||||||
|
{ error },
|
||||||
|
'Failed to create multer storage directories on startup.',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('createUploadMiddleware', () => {
|
||||||
|
const mockFile = { originalname: 'test.png' } as Express.Multer.File;
|
||||||
|
const mockUser = createMockUserProfile({ user: { user_id: 'user-123', email: 'test@user.com' } });
|
||||||
|
let originalNodeEnv: string | undefined;
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
vi.clearAllMocks();
|
||||||
|
originalNodeEnv = process.env.NODE_ENV;
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
process.env.NODE_ENV = originalNodeEnv;
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('Avatar Storage', () => {
|
||||||
|
it('should generate a unique filename for an authenticated user', () => {
|
||||||
|
process.env.NODE_ENV = 'production';
|
||||||
|
createUploadMiddleware({ storageType: 'avatar' });
|
||||||
|
const storageOptions = vi.mocked(multer.diskStorage).mock.calls[0][0];
|
||||||
|
const cb = vi.fn();
|
||||||
|
const mockReq = { user: mockUser } as unknown as Request;
|
||||||
|
|
||||||
|
storageOptions.filename!(mockReq, mockFile, cb);
|
||||||
|
|
||||||
|
expect(cb).toHaveBeenCalledWith(null, expect.stringContaining('user-123-'));
|
||||||
|
expect(cb).toHaveBeenCalledWith(null, expect.stringContaining('.png'));
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should call the callback with an error for an unauthenticated user', () => {
|
||||||
|
// This test covers line 37
|
||||||
|
createUploadMiddleware({ storageType: 'avatar' });
|
||||||
|
const storageOptions = vi.mocked(multer.diskStorage).mock.calls[0][0];
|
||||||
|
const cb = vi.fn();
|
||||||
|
const mockReq = {} as Request; // No user on request
|
||||||
|
|
||||||
|
storageOptions.filename!(mockReq, mockFile, cb);
|
||||||
|
|
||||||
|
expect(cb).toHaveBeenCalledWith(
|
||||||
|
new Error('User not authenticated for avatar upload'),
|
||||||
|
expect.any(String),
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should use a predictable filename in test environment', () => {
|
||||||
|
process.env.NODE_ENV = 'test';
|
||||||
|
createUploadMiddleware({ storageType: 'avatar' });
|
||||||
|
const storageOptions = vi.mocked(multer.diskStorage).mock.calls[0][0];
|
||||||
|
const cb = vi.fn();
|
||||||
|
const mockReq = { user: mockUser } as unknown as Request;
|
||||||
|
|
||||||
|
storageOptions.filename!(mockReq, mockFile, cb);
|
||||||
|
|
||||||
|
expect(cb).toHaveBeenCalledWith(null, 'test-avatar.png');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('Flyer Storage', () => {
|
||||||
|
it('should generate a unique, sanitized filename in production environment', () => {
|
||||||
|
process.env.NODE_ENV = 'production';
|
||||||
|
const mockFlyerFile = {
|
||||||
|
fieldname: 'flyerFile',
|
||||||
|
originalname: 'My Flyer (Special!).pdf',
|
||||||
|
} as Express.Multer.File;
|
||||||
|
createUploadMiddleware({ storageType: 'flyer' });
|
||||||
|
const storageOptions = vi.mocked(multer.diskStorage).mock.calls[0][0];
|
||||||
|
const cb = vi.fn();
|
||||||
|
const mockReq = {} as Request;
|
||||||
|
|
||||||
|
storageOptions.filename!(mockReq, mockFlyerFile, cb);
|
||||||
|
|
||||||
|
expect(cb).toHaveBeenCalledWith(
|
||||||
|
null,
|
||||||
|
expect.stringMatching(/^flyerFile-\d+-\d+-my-flyer-special\.pdf$/i),
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should generate a predictable filename in test environment', () => {
|
||||||
|
// This test covers lines 43-46
|
||||||
|
process.env.NODE_ENV = 'test';
|
||||||
|
const mockFlyerFile = {
|
||||||
|
fieldname: 'flyerFile',
|
||||||
|
originalname: 'test-flyer.jpg',
|
||||||
|
} as Express.Multer.File;
|
||||||
|
createUploadMiddleware({ storageType: 'flyer' });
|
||||||
|
const storageOptions = vi.mocked(multer.diskStorage).mock.calls[0][0];
|
||||||
|
const cb = vi.fn();
|
||||||
|
const mockReq = {} as Request;
|
||||||
|
|
||||||
|
storageOptions.filename!(mockReq, mockFlyerFile, cb);
|
||||||
|
|
||||||
|
expect(cb).toHaveBeenCalledWith(null, 'flyerFile-test-flyer-image.jpg');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('Image File Filter', () => {
|
||||||
|
it('should accept files with an image mimetype', () => {
|
||||||
|
createUploadMiddleware({ storageType: 'flyer', fileFilter: 'image' });
|
||||||
|
const multerOptions = vi.mocked(multer).mock.calls[0][0];
|
||||||
|
const cb = vi.fn();
|
||||||
|
const mockImageFile = { mimetype: 'image/png' } as Express.Multer.File;
|
||||||
|
|
||||||
|
multerOptions!.fileFilter!({} as Request, mockImageFile, cb);
|
||||||
|
|
||||||
|
expect(cb).toHaveBeenCalledWith(null, true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should reject files without an image mimetype', () => {
|
||||||
|
createUploadMiddleware({ storageType: 'flyer', fileFilter: 'image' });
|
||||||
|
const multerOptions = vi.mocked(multer).mock.calls[0][0];
|
||||||
|
const cb = vi.fn();
|
||||||
|
const mockTextFile = { mimetype: 'text/plain' } as Express.Multer.File;
|
||||||
|
|
||||||
|
multerOptions!.fileFilter!({} as Request, { ...mockTextFile, fieldname: 'test' }, cb);
|
||||||
|
|
||||||
|
const error = (cb as Mock).mock.calls[0][0];
|
||||||
|
expect(error).toBeInstanceOf(ValidationError);
|
||||||
|
expect(error.validationErrors[0].message).toBe('Only image files are allowed!');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('handleMulterError Middleware', () => {
|
||||||
|
let mockRequest: Partial<Request>;
|
||||||
|
let mockResponse: Partial<Response>;
|
||||||
|
let mockNext: NextFunction;
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
mockRequest = {};
|
||||||
|
mockResponse = {
|
||||||
|
status: vi.fn().mockReturnThis(),
|
||||||
|
json: vi.fn(),
|
||||||
|
};
|
||||||
|
mockNext = vi.fn();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle a MulterError (e.g., file too large)', () => {
|
||||||
|
const err = new multer.MulterError('LIMIT_FILE_SIZE');
|
||||||
|
handleMulterError(err, mockRequest as Request, mockResponse as Response, mockNext);
|
||||||
|
expect(mockResponse.status).toHaveBeenCalledWith(400);
|
||||||
|
expect(mockResponse.json).toHaveBeenCalledWith({
|
||||||
|
message: 'File upload error: File too large',
|
||||||
|
});
|
||||||
|
expect(mockNext).not.toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should pass on a ValidationError to the next handler', () => {
|
||||||
|
const err = new ValidationError([], 'Only image files are allowed!');
|
||||||
|
handleMulterError(err, mockRequest as Request, mockResponse as Response, mockNext);
|
||||||
|
// It should now pass the error to the global error handler
|
||||||
|
expect(mockNext).toHaveBeenCalledWith(err);
|
||||||
|
expect(mockResponse.status).not.toHaveBeenCalled();
|
||||||
|
expect(mockResponse.json).not.toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should pass on non-multer errors to the next error handler', () => {
|
||||||
|
const err = new Error('A generic error');
|
||||||
|
handleMulterError(err, mockRequest as Request, mockResponse as Response, mockNext);
|
||||||
|
expect(mockNext).toHaveBeenCalledWith(err);
|
||||||
|
expect(mockResponse.status).not.toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
});
|
||||||
122
src/middleware/multer.middleware.ts
Normal file
122
src/middleware/multer.middleware.ts
Normal file
@@ -0,0 +1,122 @@
|
|||||||
|
// src/middleware/multer.middleware.ts
|
||||||
|
import multer from 'multer';
|
||||||
|
import path from 'path';
|
||||||
|
import fs from 'node:fs/promises';
|
||||||
|
import { Request, Response, NextFunction } from 'express';
|
||||||
|
import { UserProfile } from '../types';
|
||||||
|
import { sanitizeFilename } from '../utils/stringUtils';
|
||||||
|
import { ValidationError } from '../services/db/errors.db';
|
||||||
|
import { logger } from '../services/logger.server';
|
||||||
|
|
||||||
|
export const flyerStoragePath =
|
||||||
|
process.env.STORAGE_PATH || '/var/www/flyer-crawler.projectium.com/flyer-images';
|
||||||
|
export const avatarStoragePath = path.join(process.cwd(), 'public', 'uploads', 'avatars');
|
||||||
|
|
||||||
|
// Ensure directories exist at startup
|
||||||
|
(async () => {
|
||||||
|
try {
|
||||||
|
await fs.mkdir(flyerStoragePath, { recursive: true });
|
||||||
|
await fs.mkdir(avatarStoragePath, { recursive: true });
|
||||||
|
logger.info('Ensured multer storage directories exist.');
|
||||||
|
} catch (error) {
|
||||||
|
const err = error instanceof Error ? error : new Error(String(error));
|
||||||
|
logger.error({ error: err }, 'Failed to create multer storage directories on startup.');
|
||||||
|
}
|
||||||
|
})();
|
||||||
|
|
||||||
|
type StorageType = 'flyer' | 'avatar';
|
||||||
|
|
||||||
|
const getStorageConfig = (type: StorageType) => {
|
||||||
|
switch (type) {
|
||||||
|
case 'avatar':
|
||||||
|
return multer.diskStorage({
|
||||||
|
destination: (req, file, cb) => cb(null, avatarStoragePath),
|
||||||
|
filename: (req, file, cb) => {
|
||||||
|
const user = req.user as UserProfile | undefined;
|
||||||
|
if (!user) {
|
||||||
|
// This should ideally not happen if auth middleware runs first.
|
||||||
|
return cb(new Error('User not authenticated for avatar upload'), '');
|
||||||
|
}
|
||||||
|
if (process.env.NODE_ENV === 'test') {
|
||||||
|
// Use a predictable filename for test avatars for easy cleanup.
|
||||||
|
return cb(null, `test-avatar${path.extname(file.originalname) || '.png'}`);
|
||||||
|
}
|
||||||
|
const uniqueSuffix = `${user.user.user_id}-${Date.now()}${path.extname(
|
||||||
|
file.originalname,
|
||||||
|
)}`;
|
||||||
|
cb(null, uniqueSuffix);
|
||||||
|
},
|
||||||
|
});
|
||||||
|
case 'flyer':
|
||||||
|
default:
|
||||||
|
return multer.diskStorage({
|
||||||
|
destination: (req, file, cb) => cb(null, flyerStoragePath),
|
||||||
|
filename: (req, file, cb) => {
|
||||||
|
if (process.env.NODE_ENV === 'test') {
|
||||||
|
// Use a predictable filename for test flyers for easy cleanup.
|
||||||
|
const ext = path.extname(file.originalname);
|
||||||
|
return cb(null, `${file.fieldname}-test-flyer-image${ext || '.jpg'}`);
|
||||||
|
}
|
||||||
|
const uniqueSuffix = `${Date.now()}-${Math.round(Math.random() * 1e9)}`;
|
||||||
|
const sanitizedOriginalName = sanitizeFilename(file.originalname);
|
||||||
|
cb(null, `${file.fieldname}-${uniqueSuffix}-${sanitizedOriginalName}`);
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const imageFileFilter = (req: Request, file: Express.Multer.File, cb: multer.FileFilterCallback) => {
|
||||||
|
if (file.mimetype.startsWith('image/')) {
|
||||||
|
cb(null, true);
|
||||||
|
} else {
|
||||||
|
// Reject the file with a specific error that can be caught by a middleware.
|
||||||
|
const validationIssue = { path: ['file', file.fieldname], message: 'Only image files are allowed!' };
|
||||||
|
const err = new ValidationError([validationIssue], 'Only image files are allowed!');
|
||||||
|
cb(err as Error); // Cast to Error to satisfy multer's type, though ValidationError extends Error.
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
interface MulterOptions {
|
||||||
|
storageType: StorageType;
|
||||||
|
fileSize?: number;
|
||||||
|
fileFilter?: 'image';
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates a configured multer instance for file uploads.
|
||||||
|
* @param options - Configuration for storage type, file size, and file filter.
|
||||||
|
* @returns A multer instance.
|
||||||
|
*/
|
||||||
|
export const createUploadMiddleware = (options: MulterOptions) => {
|
||||||
|
const multerOptions: multer.Options = {
|
||||||
|
storage: getStorageConfig(options.storageType),
|
||||||
|
};
|
||||||
|
|
||||||
|
if (options.fileSize) {
|
||||||
|
multerOptions.limits = { fileSize: options.fileSize };
|
||||||
|
}
|
||||||
|
|
||||||
|
if (options.fileFilter === 'image') {
|
||||||
|
multerOptions.fileFilter = imageFileFilter;
|
||||||
|
}
|
||||||
|
|
||||||
|
return multer(multerOptions);
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A general error handler for multer. Place this after all routes using multer in your router file.
|
||||||
|
* It catches errors from `fileFilter` and other multer issues (e.g., file size limits).
|
||||||
|
*/
|
||||||
|
export const handleMulterError = (
|
||||||
|
err: Error,
|
||||||
|
req: Request,
|
||||||
|
res: Response,
|
||||||
|
next: NextFunction,
|
||||||
|
) => {
|
||||||
|
if (err instanceof multer.MulterError) {
|
||||||
|
// A Multer error occurred when uploading (e.g., file too large).
|
||||||
|
return res.status(400).json({ message: `File upload error: ${err.message}` });
|
||||||
|
}
|
||||||
|
// If it's not a multer error, pass it on.
|
||||||
|
next(err);
|
||||||
|
};
|
||||||
@@ -4,7 +4,7 @@ import { useParams, useNavigate, Link } from 'react-router-dom';
|
|||||||
import * as apiClient from '../services/apiClient';
|
import * as apiClient from '../services/apiClient';
|
||||||
import { logger } from '../services/logger.client';
|
import { logger } from '../services/logger.client';
|
||||||
import { LoadingSpinner } from '../components/LoadingSpinner';
|
import { LoadingSpinner } from '../components/LoadingSpinner';
|
||||||
import { PasswordInput } from './admin/components/PasswordInput';
|
import { PasswordInput } from '../components/PasswordInput';
|
||||||
|
|
||||||
export const ResetPasswordPage: React.FC = () => {
|
export const ResetPasswordPage: React.FC = () => {
|
||||||
const { token } = useParams<{ token: string }>();
|
const { token } = useParams<{ token: string }>();
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ import { SystemCheck } from './components/SystemCheck';
|
|||||||
import { Link } from 'react-router-dom';
|
import { Link } from 'react-router-dom';
|
||||||
import { ShieldExclamationIcon } from '../../components/icons/ShieldExclamationIcon';
|
import { ShieldExclamationIcon } from '../../components/icons/ShieldExclamationIcon';
|
||||||
import { ChartBarIcon } from '../../components/icons/ChartBarIcon';
|
import { ChartBarIcon } from '../../components/icons/ChartBarIcon';
|
||||||
|
import { DocumentMagnifyingGlassIcon } from '../../components/icons/DocumentMagnifyingGlassIcon';
|
||||||
|
|
||||||
export const AdminPage: React.FC = () => {
|
export const AdminPage: React.FC = () => {
|
||||||
// The onReady prop for SystemCheck is present to allow for future UI changes,
|
// The onReady prop for SystemCheck is present to allow for future UI changes,
|
||||||
@@ -39,6 +40,13 @@ export const AdminPage: React.FC = () => {
|
|||||||
<ChartBarIcon className="w-6 h-6 mr-3 text-brand-primary" />
|
<ChartBarIcon className="w-6 h-6 mr-3 text-brand-primary" />
|
||||||
<span className="font-semibold">View Statistics</span>
|
<span className="font-semibold">View Statistics</span>
|
||||||
</Link>
|
</Link>
|
||||||
|
<Link
|
||||||
|
to="/admin/flyer-review"
|
||||||
|
className="flex items-center p-3 rounded-lg hover:bg-gray-100 dark:hover:bg-gray-700/50 transition-colors"
|
||||||
|
>
|
||||||
|
<DocumentMagnifyingGlassIcon className="w-6 h-6 mr-3 text-brand-primary" />
|
||||||
|
<span className="font-semibold">Flyer Review Queue</span>
|
||||||
|
</Link>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<SystemCheck />
|
<SystemCheck />
|
||||||
|
|||||||
179
src/pages/admin/FlyerReviewPage.test.tsx
Normal file
179
src/pages/admin/FlyerReviewPage.test.tsx
Normal file
@@ -0,0 +1,179 @@
|
|||||||
|
// src/pages/admin/FlyerReviewPage.test.tsx
|
||||||
|
import { render, screen, waitFor, within } from '@testing-library/react';
|
||||||
|
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||||
|
import { FlyerReviewPage } from './FlyerReviewPage';
|
||||||
|
import { MemoryRouter } from 'react-router-dom';
|
||||||
|
import * as apiClient from '../../services/apiClient';
|
||||||
|
import { logger } from '../../services/logger.client';
|
||||||
|
|
||||||
|
// Mock dependencies
|
||||||
|
vi.mock('../../services/apiClient', () => ({
|
||||||
|
getFlyersForReview: vi.fn(),
|
||||||
|
}));
|
||||||
|
|
||||||
|
vi.mock('../../services/logger.client', () => ({
|
||||||
|
logger: {
|
||||||
|
error: vi.fn(),
|
||||||
|
},
|
||||||
|
}));
|
||||||
|
|
||||||
|
// Mock LoadingSpinner to simplify DOM and avoid potential issues
|
||||||
|
vi.mock('../../components/LoadingSpinner', () => ({
|
||||||
|
LoadingSpinner: () => <div data-testid="loading-spinner">Loading...</div>,
|
||||||
|
}));
|
||||||
|
|
||||||
|
describe('FlyerReviewPage', () => {
|
||||||
|
beforeEach(() => {
|
||||||
|
vi.clearAllMocks();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('renders loading spinner initially', () => {
|
||||||
|
// Mock a promise that doesn't resolve immediately to check loading state
|
||||||
|
vi.mocked(apiClient.getFlyersForReview).mockReturnValue(new Promise(() => {}));
|
||||||
|
|
||||||
|
render(
|
||||||
|
<MemoryRouter>
|
||||||
|
<FlyerReviewPage />
|
||||||
|
</MemoryRouter>
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(screen.getByRole('status', { name: /loading flyers for review/i })).toBeInTheDocument();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('renders empty state when no flyers are returned', async () => {
|
||||||
|
vi.mocked(apiClient.getFlyersForReview).mockResolvedValue({
|
||||||
|
ok: true,
|
||||||
|
json: async () => [],
|
||||||
|
} as Response);
|
||||||
|
|
||||||
|
render(
|
||||||
|
<MemoryRouter>
|
||||||
|
<FlyerReviewPage />
|
||||||
|
</MemoryRouter>
|
||||||
|
);
|
||||||
|
|
||||||
|
await waitFor(() => {
|
||||||
|
expect(screen.queryByRole('status')).not.toBeInTheDocument();
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(screen.getByText(/the review queue is empty/i)).toBeInTheDocument();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('renders a list of flyers when API returns data', async () => {
|
||||||
|
const mockFlyers = [
|
||||||
|
{
|
||||||
|
flyer_id: 1,
|
||||||
|
file_name: 'flyer1.jpg',
|
||||||
|
created_at: '2023-01-01T00:00:00Z',
|
||||||
|
store: { name: 'Store A' },
|
||||||
|
icon_url: 'icon1.jpg',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
flyer_id: 2,
|
||||||
|
file_name: 'flyer2.jpg',
|
||||||
|
created_at: '2023-01-02T00:00:00Z',
|
||||||
|
store: { name: 'Store B' },
|
||||||
|
icon_url: 'icon2.jpg',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
flyer_id: 3,
|
||||||
|
file_name: 'flyer3.jpg',
|
||||||
|
created_at: '2023-01-03T00:00:00Z',
|
||||||
|
store: null,
|
||||||
|
icon_url: null,
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
vi.mocked(apiClient.getFlyersForReview).mockResolvedValue({
|
||||||
|
ok: true,
|
||||||
|
json: async () => mockFlyers,
|
||||||
|
} as Response);
|
||||||
|
|
||||||
|
render(
|
||||||
|
<MemoryRouter>
|
||||||
|
<FlyerReviewPage />
|
||||||
|
</MemoryRouter>
|
||||||
|
);
|
||||||
|
|
||||||
|
await waitFor(() => {
|
||||||
|
expect(screen.queryByRole('status')).not.toBeInTheDocument();
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(screen.getByText('Store A')).toBeInTheDocument();
|
||||||
|
expect(screen.getByText('flyer1.jpg')).toBeInTheDocument();
|
||||||
|
expect(screen.getByText('Store B')).toBeInTheDocument();
|
||||||
|
expect(screen.getByText('flyer2.jpg')).toBeInTheDocument();
|
||||||
|
|
||||||
|
// Test fallback for null store and icon_url
|
||||||
|
expect(screen.getByText('Unknown Store')).toBeInTheDocument();
|
||||||
|
expect(screen.getByText('flyer3.jpg')).toBeInTheDocument();
|
||||||
|
const unknownStoreItem = screen.getByText('Unknown Store').closest('li');
|
||||||
|
const unknownStoreImage = within(unknownStoreItem!).getByRole('img');
|
||||||
|
expect(unknownStoreImage).not.toHaveAttribute('src');
|
||||||
|
expect(unknownStoreImage).not.toHaveAttribute('alt');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('renders error message when API response is not ok', async () => {
|
||||||
|
vi.mocked(apiClient.getFlyersForReview).mockResolvedValue({
|
||||||
|
ok: false,
|
||||||
|
json: async () => ({ message: 'Server error' }),
|
||||||
|
} as Response);
|
||||||
|
|
||||||
|
render(
|
||||||
|
<MemoryRouter>
|
||||||
|
<FlyerReviewPage />
|
||||||
|
</MemoryRouter>
|
||||||
|
);
|
||||||
|
|
||||||
|
await waitFor(() => {
|
||||||
|
expect(screen.queryByRole('status')).not.toBeInTheDocument();
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(screen.getByText('Server error')).toBeInTheDocument();
|
||||||
|
expect(logger.error).toHaveBeenCalledWith(
|
||||||
|
expect.objectContaining({ err: expect.any(Error) }),
|
||||||
|
'Failed to fetch flyers for review'
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('renders error message when API throws an error', async () => {
|
||||||
|
const networkError = new Error('Network error');
|
||||||
|
vi.mocked(apiClient.getFlyersForReview).mockRejectedValue(networkError);
|
||||||
|
|
||||||
|
render(
|
||||||
|
<MemoryRouter>
|
||||||
|
<FlyerReviewPage />
|
||||||
|
</MemoryRouter>
|
||||||
|
);
|
||||||
|
|
||||||
|
await waitFor(() => {
|
||||||
|
expect(screen.queryByRole('status')).not.toBeInTheDocument();
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(screen.getByText('Network error')).toBeInTheDocument();
|
||||||
|
expect(logger.error).toHaveBeenCalledWith(
|
||||||
|
{ err: networkError },
|
||||||
|
'Failed to fetch flyers for review'
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('renders a generic error for non-Error rejections', async () => {
|
||||||
|
const nonErrorRejection = { message: 'This is not an Error object' };
|
||||||
|
vi.mocked(apiClient.getFlyersForReview).mockRejectedValue(nonErrorRejection);
|
||||||
|
|
||||||
|
render(
|
||||||
|
<MemoryRouter>
|
||||||
|
<FlyerReviewPage />
|
||||||
|
</MemoryRouter>,
|
||||||
|
);
|
||||||
|
|
||||||
|
await waitFor(() => {
|
||||||
|
expect(screen.getByText('An unknown error occurred while fetching data.')).toBeInTheDocument();
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(logger.error).toHaveBeenCalledWith(
|
||||||
|
{ err: nonErrorRejection },
|
||||||
|
'Failed to fetch flyers for review',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
93
src/pages/admin/FlyerReviewPage.tsx
Normal file
93
src/pages/admin/FlyerReviewPage.tsx
Normal file
@@ -0,0 +1,93 @@
|
|||||||
|
// src/pages/admin/FlyerReviewPage.tsx
|
||||||
|
import React, { useEffect, useState } from 'react';
|
||||||
|
import { Link } from 'react-router-dom';
|
||||||
|
import { getFlyersForReview } from '../../services/apiClient';
|
||||||
|
import { logger } from '../../services/logger.client';
|
||||||
|
import type { Flyer } from '../../types';
|
||||||
|
import { LoadingSpinner } from '../../components/LoadingSpinner';
|
||||||
|
import { format } from 'date-fns';
|
||||||
|
|
||||||
|
export const FlyerReviewPage: React.FC = () => {
|
||||||
|
const [flyers, setFlyers] = useState<Flyer[]>([]);
|
||||||
|
const [isLoading, setIsLoading] = useState(true);
|
||||||
|
const [error, setError] = useState<string | null>(null);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
const fetchFlyers = async () => {
|
||||||
|
setIsLoading(true);
|
||||||
|
setError(null);
|
||||||
|
try {
|
||||||
|
const response = await getFlyersForReview();
|
||||||
|
if (!response.ok) {
|
||||||
|
throw new Error((await response.json()).message || 'Failed to fetch flyers for review.');
|
||||||
|
}
|
||||||
|
setFlyers(await response.json());
|
||||||
|
} catch (err) {
|
||||||
|
const errorMessage =
|
||||||
|
err instanceof Error ? err.message : 'An unknown error occurred while fetching data.';
|
||||||
|
logger.error({ err }, 'Failed to fetch flyers for review');
|
||||||
|
setError(errorMessage);
|
||||||
|
} finally {
|
||||||
|
setIsLoading(false);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
fetchFlyers();
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="max-w-7xl mx-auto py-8 px-4">
|
||||||
|
<div className="mb-8">
|
||||||
|
<Link to="/admin" className="text-brand-primary hover:underline">
|
||||||
|
← Back to Admin Dashboard
|
||||||
|
</Link>
|
||||||
|
<h1 className="text-3xl font-bold text-gray-800 dark:text-white mt-2">
|
||||||
|
Flyer Review Queue
|
||||||
|
</h1>
|
||||||
|
<p className="text-gray-500 dark:text-gray-400">
|
||||||
|
Review flyers that were processed with low confidence by the AI.
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{isLoading && (
|
||||||
|
<div
|
||||||
|
role="status"
|
||||||
|
aria-label="Loading flyers for review"
|
||||||
|
className="flex justify-center items-center h-64"
|
||||||
|
>
|
||||||
|
<LoadingSpinner />
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
{error && (
|
||||||
|
<div className="text-red-500 bg-red-100 dark:bg-red-900/20 p-4 rounded-lg">{error}</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{!isLoading && !error && (
|
||||||
|
<div className="bg-white dark:bg-gray-800 rounded-lg border border-gray-200 dark:border-gray-700 overflow-hidden">
|
||||||
|
<ul className="divide-y divide-gray-200 dark:divide-gray-700">
|
||||||
|
{flyers.length === 0 ? (
|
||||||
|
<li className="p-6 text-center text-gray-500">
|
||||||
|
The review queue is empty. Great job!
|
||||||
|
</li>
|
||||||
|
) : (
|
||||||
|
flyers.map((flyer) => (
|
||||||
|
<li key={flyer.flyer_id} className="p-4 hover:bg-gray-50 dark:hover:bg-gray-700/50">
|
||||||
|
<Link to={`/flyers/${flyer.flyer_id}`} className="flex items-center space-x-4">
|
||||||
|
<img src={flyer.icon_url || undefined} alt={flyer.store?.name} className="w-12 h-12 rounded-md object-cover" />
|
||||||
|
<div className="flex-1">
|
||||||
|
<p className="font-semibold text-gray-800 dark:text-white">{flyer.store?.name || 'Unknown Store'}</p>
|
||||||
|
<p className="text-sm text-gray-500 dark:text-gray-400">{flyer.file_name}</p>
|
||||||
|
</div>
|
||||||
|
<div className="text-right text-sm text-gray-500 dark:text-gray-400">
|
||||||
|
<p>Uploaded: {format(new Date(flyer.created_at), 'MMM d, yyyy')}</p>
|
||||||
|
</div>
|
||||||
|
</Link>
|
||||||
|
</li>
|
||||||
|
))
|
||||||
|
)}
|
||||||
|
</ul>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
};
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
// src/pages/admin/components/AuthView.test.tsx
|
// src/pages/admin/components/AuthView.test.tsx
|
||||||
import React from 'react';
|
import React from 'react';
|
||||||
import { render, screen, fireEvent, waitFor } from '@testing-library/react';
|
import { render, screen, fireEvent, waitFor, act } from '@testing-library/react';
|
||||||
import { describe, it, expect, vi, beforeEach, type Mock } from 'vitest';
|
import { describe, it, expect, vi, beforeEach, type Mock } from 'vitest';
|
||||||
import { AuthView } from './AuthView';
|
import { AuthView } from './AuthView';
|
||||||
import * as apiClient from '../../../services/apiClient';
|
import * as apiClient from '../../../services/apiClient';
|
||||||
@@ -12,6 +12,11 @@ const mockedApiClient = vi.mocked(apiClient, true);
|
|||||||
const mockOnClose = vi.fn();
|
const mockOnClose = vi.fn();
|
||||||
const mockOnLoginSuccess = vi.fn();
|
const mockOnLoginSuccess = vi.fn();
|
||||||
|
|
||||||
|
vi.mock('../../../components/PasswordInput', () => ({
|
||||||
|
// Mock the moved component
|
||||||
|
PasswordInput: (props: any) => <input {...props} data-testid="password-input" />,
|
||||||
|
}));
|
||||||
|
|
||||||
const defaultProps = {
|
const defaultProps = {
|
||||||
onClose: mockOnClose,
|
onClose: mockOnClose,
|
||||||
onLoginSuccess: mockOnLoginSuccess,
|
onLoginSuccess: mockOnLoginSuccess,
|
||||||
@@ -353,4 +358,27 @@ describe('AuthView', () => {
|
|||||||
expect(screen.queryByText('Send Reset Link')).not.toBeInTheDocument();
|
expect(screen.queryByText('Send Reset Link')).not.toBeInTheDocument();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('should show loading state during registration submission', async () => {
|
||||||
|
// Mock a promise that doesn't resolve immediately
|
||||||
|
(mockedApiClient.registerUser as Mock).mockReturnValue(new Promise(() => {}));
|
||||||
|
render(<AuthView {...defaultProps} />);
|
||||||
|
|
||||||
|
// Switch to registration view
|
||||||
|
fireEvent.click(screen.getByRole('button', { name: /don't have an account\? register/i }));
|
||||||
|
|
||||||
|
fireEvent.change(screen.getByLabelText(/email address/i), {
|
||||||
|
target: { value: 'test@example.com' },
|
||||||
|
});
|
||||||
|
fireEvent.change(screen.getByTestId('password-input'), { target: { value: 'password' } });
|
||||||
|
fireEvent.submit(screen.getByTestId('auth-form'));
|
||||||
|
|
||||||
|
await waitFor(() => {
|
||||||
|
const submitButton = screen.getByTestId('auth-form').querySelector('button[type="submit"]');
|
||||||
|
expect(submitButton).toBeInTheDocument();
|
||||||
|
expect(submitButton).toBeDisabled();
|
||||||
|
// Verify the text 'Register' is gone from any button
|
||||||
|
expect(screen.queryByRole('button', { name: 'Register' })).not.toBeInTheDocument();
|
||||||
|
});
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ import { notifySuccess } from '../../../services/notificationService';
|
|||||||
import { LoadingSpinner } from '../../../components/LoadingSpinner';
|
import { LoadingSpinner } from '../../../components/LoadingSpinner';
|
||||||
import { GoogleIcon } from '../../../components/icons/GoogleIcon';
|
import { GoogleIcon } from '../../../components/icons/GoogleIcon';
|
||||||
import { GithubIcon } from '../../../components/icons/GithubIcon';
|
import { GithubIcon } from '../../../components/icons/GithubIcon';
|
||||||
import { PasswordInput } from './PasswordInput';
|
import { PasswordInput } from '../../../components/PasswordInput';
|
||||||
|
|
||||||
interface AuthResponse {
|
interface AuthResponse {
|
||||||
userprofile: UserProfile;
|
userprofile: UserProfile;
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
// src/pages/admin/components/ProfileManager.test.tsx
|
// src/pages/admin/components/ProfileManager.test.tsx
|
||||||
import React from 'react';
|
import React from 'react';
|
||||||
import { render, screen, fireEvent, waitFor, cleanup, act } from '@testing-library/react';
|
import { render, screen, fireEvent, waitFor, cleanup, act } from '@testing-library/react';
|
||||||
import { describe, it, expect, vi, beforeEach, afterEach, type Mock } from 'vitest';
|
import { describe, it, expect, vi, beforeEach, afterEach, type Mock, test } from 'vitest';
|
||||||
import { ProfileManager } from './ProfileManager';
|
import { ProfileManager } from './ProfileManager';
|
||||||
import * as apiClient from '../../../services/apiClient';
|
import * as apiClient from '../../../services/apiClient';
|
||||||
import { notifySuccess, notifyError } from '../../../services/notificationService';
|
import { notifySuccess, notifyError } from '../../../services/notificationService';
|
||||||
@@ -16,6 +16,11 @@ import {
|
|||||||
// Unmock the component to test the real implementation
|
// Unmock the component to test the real implementation
|
||||||
vi.unmock('./ProfileManager');
|
vi.unmock('./ProfileManager');
|
||||||
|
|
||||||
|
vi.mock('../../../components/PasswordInput', () => ({
|
||||||
|
// Mock the moved component
|
||||||
|
PasswordInput: (props: any) => <input {...props} data-testid="password-input" />,
|
||||||
|
}));
|
||||||
|
|
||||||
const mockedApiClient = vi.mocked(apiClient, true);
|
const mockedApiClient = vi.mocked(apiClient, true);
|
||||||
|
|
||||||
vi.mock('../../../services/notificationService');
|
vi.mock('../../../services/notificationService');
|
||||||
@@ -242,6 +247,17 @@ describe('ProfileManager', () => {
|
|||||||
expect(screen.queryByRole('heading', { name: /^sign in$/i })).not.toBeInTheDocument();
|
expect(screen.queryByRole('heading', { name: /^sign in$/i })).not.toBeInTheDocument();
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('should close the modal when clicking the backdrop', async () => {
|
||||||
|
render(<ProfileManager {...defaultAuthenticatedProps} />);
|
||||||
|
// The backdrop is the element with role="dialog"
|
||||||
|
const backdrop = screen.getByRole('dialog');
|
||||||
|
fireEvent.click(backdrop);
|
||||||
|
|
||||||
|
await waitFor(() => {
|
||||||
|
expect(mockOnClose).toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
it('should reset state when the modal is closed and reopened', async () => {
|
it('should reset state when the modal is closed and reopened', async () => {
|
||||||
const { rerender } = render(<ProfileManager {...defaultAuthenticatedProps} />);
|
const { rerender } = render(<ProfileManager {...defaultAuthenticatedProps} />);
|
||||||
await waitFor(() => expect(screen.getByLabelText(/full name/i)).toHaveValue('Test User'));
|
await waitFor(() => expect(screen.getByLabelText(/full name/i)).toHaveValue('Test User'));
|
||||||
@@ -308,6 +324,41 @@ describe('ProfileManager', () => {
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('should handle partial success when saving profile and address', async () => {
|
||||||
|
const loggerSpy = vi.spyOn(logger.logger, 'warn');
|
||||||
|
// Mock profile update to succeed
|
||||||
|
mockedApiClient.updateUserProfile.mockResolvedValue(
|
||||||
|
new Response(JSON.stringify({ ...authenticatedProfile, full_name: 'New Name' })),
|
||||||
|
);
|
||||||
|
// Mock address update to fail (useApi will return null)
|
||||||
|
mockedApiClient.updateUserAddress.mockRejectedValue(new Error('Address update failed'));
|
||||||
|
|
||||||
|
render(<ProfileManager {...defaultAuthenticatedProps} />);
|
||||||
|
await waitFor(() => expect(screen.getByLabelText(/city/i)).toHaveValue(mockAddress.city));
|
||||||
|
|
||||||
|
// Change both profile and address data
|
||||||
|
fireEvent.change(screen.getByLabelText(/full name/i), { target: { value: 'New Name' } });
|
||||||
|
fireEvent.change(screen.getByLabelText(/city/i), { target: { value: 'NewCity' } });
|
||||||
|
|
||||||
|
fireEvent.click(screen.getByRole('button', { name: /save profile/i }));
|
||||||
|
|
||||||
|
await waitFor(() => {
|
||||||
|
// The useApi hook for the failed call will show its own error
|
||||||
|
expect(notifyError).toHaveBeenCalledWith('Address update failed');
|
||||||
|
// The profile update should still go through
|
||||||
|
expect(mockOnProfileUpdate).toHaveBeenCalledWith(
|
||||||
|
expect.objectContaining({ full_name: 'New Name' }),
|
||||||
|
);
|
||||||
|
// The specific warning for partial failure should be logged
|
||||||
|
expect(loggerSpy).toHaveBeenCalledWith(
|
||||||
|
'[handleProfileSave] One or more operations failed. The useApi hook should have shown an error. The modal will remain open.',
|
||||||
|
);
|
||||||
|
// The modal should remain open and no global success message shown
|
||||||
|
expect(mockOnClose).not.toHaveBeenCalled();
|
||||||
|
expect(notifySuccess).not.toHaveBeenCalledWith('Profile updated successfully!');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
it('should handle unexpected critical error during profile save', async () => {
|
it('should handle unexpected critical error during profile save', async () => {
|
||||||
const loggerSpy = vi.spyOn(logger.logger, 'error');
|
const loggerSpy = vi.spyOn(logger.logger, 'error');
|
||||||
mockedApiClient.updateUserProfile.mockRejectedValue(new Error('Catastrophic failure'));
|
mockedApiClient.updateUserProfile.mockRejectedValue(new Error('Catastrophic failure'));
|
||||||
@@ -324,6 +375,31 @@ describe('ProfileManager', () => {
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('should handle unexpected Promise.allSettled rejection during save', async () => {
|
||||||
|
const allSettledSpy = vi
|
||||||
|
.spyOn(Promise, 'allSettled')
|
||||||
|
.mockRejectedValueOnce(new Error('AllSettled failed'));
|
||||||
|
const loggerSpy = vi.spyOn(logger.logger, 'error');
|
||||||
|
|
||||||
|
render(<ProfileManager {...defaultAuthenticatedProps} />);
|
||||||
|
await waitFor(() => expect(screen.getByLabelText(/city/i)).toHaveValue(mockAddress.city));
|
||||||
|
|
||||||
|
fireEvent.change(screen.getByLabelText(/full name/i), { target: { value: 'New Name' } });
|
||||||
|
fireEvent.click(screen.getByRole('button', { name: /save profile/i }));
|
||||||
|
|
||||||
|
await waitFor(() => {
|
||||||
|
expect(loggerSpy).toHaveBeenCalledWith(
|
||||||
|
{ err: new Error('AllSettled failed') },
|
||||||
|
"[CRITICAL] An unexpected error was caught directly in handleProfileSave's catch block.",
|
||||||
|
);
|
||||||
|
expect(notifyError).toHaveBeenCalledWith(
|
||||||
|
'An unexpected critical error occurred: AllSettled failed',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
allSettledSpy.mockRestore();
|
||||||
|
});
|
||||||
|
|
||||||
it('should show map view when address has coordinates', async () => {
|
it('should show map view when address has coordinates', async () => {
|
||||||
render(<ProfileManager {...defaultAuthenticatedProps} />);
|
render(<ProfileManager {...defaultAuthenticatedProps} />);
|
||||||
await waitFor(() => {
|
await waitFor(() => {
|
||||||
@@ -365,51 +441,52 @@ describe('ProfileManager', () => {
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should automatically geocode address after user stops typing', async () => {
|
it('should automatically geocode address after user stops typing (using fake timers)', async () => {
|
||||||
|
// Use fake timers for the entire test to control the debounce.
|
||||||
|
vi.useFakeTimers();
|
||||||
const addressWithoutCoords = { ...mockAddress, latitude: undefined, longitude: undefined };
|
const addressWithoutCoords = { ...mockAddress, latitude: undefined, longitude: undefined };
|
||||||
mockedApiClient.getUserAddress.mockResolvedValue(
|
mockedApiClient.getUserAddress.mockResolvedValue(
|
||||||
new Response(JSON.stringify(addressWithoutCoords)),
|
new Response(JSON.stringify(addressWithoutCoords)),
|
||||||
);
|
);
|
||||||
|
|
||||||
console.log('[TEST LOG] Rendering for automatic geocode test (Real Timers + Wait)');
|
|
||||||
render(<ProfileManager {...defaultAuthenticatedProps} />);
|
render(<ProfileManager {...defaultAuthenticatedProps} />);
|
||||||
|
|
||||||
console.log('[TEST LOG] Waiting for initial address load...');
|
// Wait for initial async address load to complete by flushing promises.
|
||||||
await waitFor(() => expect(screen.getByLabelText(/city/i)).toHaveValue('Anytown'));
|
await act(async () => {
|
||||||
|
await vi.runAllTimersAsync();
|
||||||
console.log('[TEST LOG] Initial address loaded. Changing city...');
|
});
|
||||||
|
expect(screen.getByLabelText(/city/i)).toHaveValue('Anytown');
|
||||||
|
|
||||||
// Change address, geocode should not be called immediately
|
// Change address, geocode should not be called immediately
|
||||||
fireEvent.change(screen.getByLabelText(/city/i), { target: { value: 'NewCity' } });
|
fireEvent.change(screen.getByLabelText(/city/i), { target: { value: 'NewCity' } });
|
||||||
expect(mockedApiClient.geocodeAddress).not.toHaveBeenCalled();
|
expect(mockedApiClient.geocodeAddress).not.toHaveBeenCalled();
|
||||||
|
|
||||||
console.log('[TEST LOG] Waiting 1600ms for debounce...');
|
// Advance timers to fire the debounce and resolve the subsequent geocode promise.
|
||||||
// Wait for debounce (1500ms) + buffer using real timers to avoid freeze
|
|
||||||
await act(async () => {
|
await act(async () => {
|
||||||
await new Promise((resolve) => setTimeout(resolve, 1600));
|
await vi.runAllTimersAsync();
|
||||||
});
|
});
|
||||||
console.log('[TEST LOG] Wait complete. Checking results.');
|
|
||||||
|
|
||||||
await waitFor(() => {
|
// Now check the final result.
|
||||||
expect(mockedApiClient.geocodeAddress).toHaveBeenCalledWith(
|
expect(mockedApiClient.geocodeAddress).toHaveBeenCalledWith(
|
||||||
expect.stringContaining('NewCity'),
|
expect.stringContaining('NewCity'),
|
||||||
expect.anything(),
|
expect.anything(),
|
||||||
);
|
);
|
||||||
expect(toast.success).toHaveBeenCalledWith('Address geocoded successfully!');
|
expect(toast.success).toHaveBeenCalledWith('Address geocoded successfully!');
|
||||||
});
|
});
|
||||||
});
|
|
||||||
|
|
||||||
it('should not geocode if address already has coordinates', async () => {
|
it('should not geocode if address already has coordinates (using fake timers)', async () => {
|
||||||
console.log('[TEST LOG] Rendering for no-geocode test (Real Timers + Wait)');
|
// Use real timers for the initial async render and data fetch
|
||||||
|
vi.useRealTimers();
|
||||||
render(<ProfileManager {...defaultAuthenticatedProps} />);
|
render(<ProfileManager {...defaultAuthenticatedProps} />);
|
||||||
console.log('[TEST LOG] Waiting for initial address load...');
|
console.log('[TEST LOG] Waiting for initial address load...');
|
||||||
await waitFor(() => expect(screen.getByLabelText(/city/i)).toHaveValue('Anytown'));
|
await waitFor(() => expect(screen.getByLabelText(/city/i)).toHaveValue('Anytown'));
|
||||||
|
|
||||||
console.log(
|
// Switch to fake timers to control the debounce check
|
||||||
'[TEST LOG] Initial address loaded. Waiting 1600ms to ensure no geocode triggers...',
|
vi.useFakeTimers();
|
||||||
);
|
|
||||||
await act(async () => {
|
// Advance timers past the debounce threshold. Nothing should happen.
|
||||||
await new Promise((resolve) => setTimeout(resolve, 1600));
|
act(() => {
|
||||||
|
vi.advanceTimersByTime(1600);
|
||||||
});
|
});
|
||||||
console.log('[TEST LOG] Wait complete. Verifying no geocode call.');
|
console.log('[TEST LOG] Wait complete. Verifying no geocode call.');
|
||||||
|
|
||||||
@@ -434,6 +511,29 @@ describe('ProfileManager', () => {
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('should switch between all tabs correctly', async () => {
|
||||||
|
render(<ProfileManager {...defaultAuthenticatedProps} />);
|
||||||
|
|
||||||
|
// Initial state: Profile tab
|
||||||
|
expect(screen.getByLabelText('Profile Form')).toBeInTheDocument();
|
||||||
|
|
||||||
|
// Switch to Security
|
||||||
|
fireEvent.click(screen.getByRole('button', { name: /security/i }));
|
||||||
|
expect(await screen.findByLabelText('New Password')).toBeInTheDocument();
|
||||||
|
|
||||||
|
// Switch to Data & Privacy
|
||||||
|
fireEvent.click(screen.getByRole('button', { name: /data & privacy/i }));
|
||||||
|
expect(await screen.findByRole('heading', { name: /export your data/i })).toBeInTheDocument();
|
||||||
|
|
||||||
|
// Switch to Preferences
|
||||||
|
fireEvent.click(screen.getByRole('button', { name: /preferences/i }));
|
||||||
|
expect(await screen.findByRole('heading', { name: /theme/i })).toBeInTheDocument();
|
||||||
|
|
||||||
|
// Switch back to Profile
|
||||||
|
fireEvent.click(screen.getByRole('button', { name: /^profile$/i }));
|
||||||
|
expect(await screen.findByLabelText('Profile Form')).toBeInTheDocument();
|
||||||
|
});
|
||||||
|
|
||||||
it('should show an error if password is too short', async () => {
|
it('should show an error if password is too short', async () => {
|
||||||
render(<ProfileManager {...defaultAuthenticatedProps} />);
|
render(<ProfileManager {...defaultAuthenticatedProps} />);
|
||||||
fireEvent.click(screen.getByRole('button', { name: /security/i }));
|
fireEvent.click(screen.getByRole('button', { name: /security/i }));
|
||||||
@@ -442,7 +542,7 @@ describe('ProfileManager', () => {
|
|||||||
fireEvent.change(screen.getByLabelText('Confirm New Password'), {
|
fireEvent.change(screen.getByLabelText('Confirm New Password'), {
|
||||||
target: { value: 'short' },
|
target: { value: 'short' },
|
||||||
});
|
});
|
||||||
fireEvent.submit(screen.getByTestId('update-password-form'));
|
fireEvent.submit(screen.getByTestId('update-password-form'), {});
|
||||||
|
|
||||||
await waitFor(() => {
|
await waitFor(() => {
|
||||||
expect(notifyError).toHaveBeenCalledWith('Password must be at least 6 characters long.');
|
expect(notifyError).toHaveBeenCalledWith('Password must be at least 6 characters long.');
|
||||||
@@ -456,7 +556,7 @@ describe('ProfileManager', () => {
|
|||||||
fireEvent.click(screen.getByRole('button', { name: /data & privacy/i }));
|
fireEvent.click(screen.getByRole('button', { name: /data & privacy/i }));
|
||||||
fireEvent.click(screen.getByRole('button', { name: /delete my account/i }));
|
fireEvent.click(screen.getByRole('button', { name: /delete my account/i }));
|
||||||
|
|
||||||
fireEvent.change(screen.getByPlaceholderText(/enter your password/i), {
|
fireEvent.change(screen.getByTestId('password-input'), {
|
||||||
target: { value: 'password' },
|
target: { value: 'password' },
|
||||||
});
|
});
|
||||||
fireEvent.submit(screen.getByTestId('delete-account-form'));
|
fireEvent.submit(screen.getByTestId('delete-account-form'));
|
||||||
@@ -593,7 +693,7 @@ describe('ProfileManager', () => {
|
|||||||
fireEvent.change(screen.getByLabelText('Confirm New Password'), {
|
fireEvent.change(screen.getByLabelText('Confirm New Password'), {
|
||||||
target: { value: 'newpassword123' },
|
target: { value: 'newpassword123' },
|
||||||
});
|
});
|
||||||
fireEvent.submit(screen.getByTestId('update-password-form'));
|
fireEvent.submit(screen.getByTestId('update-password-form'), {});
|
||||||
|
|
||||||
await waitFor(() => {
|
await waitFor(() => {
|
||||||
expect(mockedApiClient.updateUserPassword).toHaveBeenCalledWith(
|
expect(mockedApiClient.updateUserPassword).toHaveBeenCalledWith(
|
||||||
@@ -614,7 +714,7 @@ describe('ProfileManager', () => {
|
|||||||
fireEvent.change(screen.getByLabelText('Confirm New Password'), {
|
fireEvent.change(screen.getByLabelText('Confirm New Password'), {
|
||||||
target: { value: 'mismatch' },
|
target: { value: 'mismatch' },
|
||||||
});
|
});
|
||||||
fireEvent.submit(screen.getByTestId('update-password-form'));
|
fireEvent.submit(screen.getByTestId('update-password-form'), {});
|
||||||
|
|
||||||
await waitFor(() => {
|
await waitFor(() => {
|
||||||
expect(notifyError).toHaveBeenCalledWith('Passwords do not match.');
|
expect(notifyError).toHaveBeenCalledWith('Passwords do not match.');
|
||||||
@@ -641,9 +741,10 @@ describe('ProfileManager', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('should handle account deletion flow', async () => {
|
it('should handle account deletion flow', async () => {
|
||||||
// Use spy instead of fake timers to avoid blocking waitFor during async API calls
|
// Use fake timers to control the setTimeout call for the entire test.
|
||||||
const setTimeoutSpy = vi.spyOn(window, 'setTimeout');
|
vi.useFakeTimers();
|
||||||
const { unmount } = render(<ProfileManager {...defaultAuthenticatedProps} />);
|
|
||||||
|
render(<ProfileManager {...defaultAuthenticatedProps} />);
|
||||||
|
|
||||||
fireEvent.click(screen.getByRole('button', { name: /data & privacy/i }));
|
fireEvent.click(screen.getByRole('button', { name: /data & privacy/i }));
|
||||||
|
|
||||||
@@ -654,39 +755,28 @@ describe('ProfileManager', () => {
|
|||||||
).toBeInTheDocument();
|
).toBeInTheDocument();
|
||||||
|
|
||||||
// Fill password and submit to open modal
|
// Fill password and submit to open modal
|
||||||
fireEvent.change(screen.getByPlaceholderText(/enter your password/i), {
|
fireEvent.change(screen.getByTestId('password-input'), {
|
||||||
target: { value: 'correctpassword' },
|
target: { value: 'correctpassword' },
|
||||||
});
|
});
|
||||||
fireEvent.submit(screen.getByTestId('delete-account-form'));
|
fireEvent.submit(screen.getByTestId('delete-account-form'));
|
||||||
|
|
||||||
// Confirm in the modal
|
// Confirm in the modal
|
||||||
const confirmButton = await screen.findByRole('button', { name: /yes, delete my account/i });
|
// Use getByRole since the modal appears synchronously after the form submit.
|
||||||
|
const confirmButton = screen.getByRole('button', { name: /yes, delete my account/i });
|
||||||
fireEvent.click(confirmButton);
|
fireEvent.click(confirmButton);
|
||||||
|
|
||||||
await waitFor(() => {
|
// The async deleteAccount call is now pending. We need to flush promises
|
||||||
expect(mockedApiClient.deleteUserAccount).toHaveBeenCalledWith(
|
// and then advance the timers to run the subsequent setTimeout.
|
||||||
'correctpassword',
|
// `runAllTimersAsync` will resolve pending promises and run timers recursively.
|
||||||
expect.objectContaining({ signal: expect.anything() }),
|
await act(async () => {
|
||||||
);
|
await vi.runAllTimersAsync();
|
||||||
expect(notifySuccess).toHaveBeenCalledWith(
|
|
||||||
'Account deleted successfully. You will be logged out shortly.',
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
// Verify setTimeout was called with 3000ms
|
|
||||||
const deletionTimeoutCall = setTimeoutSpy.mock.calls.find((call) => call[1] === 3000);
|
|
||||||
expect(deletionTimeoutCall).toBeDefined();
|
|
||||||
|
|
||||||
// Manually trigger the callback to verify cleanup
|
|
||||||
act(() => {
|
|
||||||
if (deletionTimeoutCall) (deletionTimeoutCall[0] as Function)();
|
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// Now that all timers and promises have been flushed, we can check the final state.
|
||||||
|
expect(mockedApiClient.deleteUserAccount).toHaveBeenCalled();
|
||||||
|
expect(notifySuccess).toHaveBeenCalled();
|
||||||
expect(mockOnClose).toHaveBeenCalled();
|
expect(mockOnClose).toHaveBeenCalled();
|
||||||
expect(mockOnSignOut).toHaveBeenCalled();
|
expect(mockOnSignOut).toHaveBeenCalled();
|
||||||
|
|
||||||
unmount();
|
|
||||||
setTimeoutSpy.mockRestore();
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should allow toggling dark mode', async () => {
|
it('should allow toggling dark mode', async () => {
|
||||||
|
|||||||
@@ -9,8 +9,8 @@ import { LoadingSpinner } from '../../../components/LoadingSpinner';
|
|||||||
import { XMarkIcon } from '../../../components/icons/XMarkIcon';
|
import { XMarkIcon } from '../../../components/icons/XMarkIcon';
|
||||||
import { GoogleIcon } from '../../../components/icons/GoogleIcon';
|
import { GoogleIcon } from '../../../components/icons/GoogleIcon';
|
||||||
import { GithubIcon } from '../../../components/icons/GithubIcon';
|
import { GithubIcon } from '../../../components/icons/GithubIcon';
|
||||||
import { ConfirmationModal } from '../../../components/ConfirmationModal';
|
import { ConfirmationModal } from '../../../components/ConfirmationModal'; // This path is correct
|
||||||
import { PasswordInput } from './PasswordInput';
|
import { PasswordInput } from '../../../components/PasswordInput';
|
||||||
import { MapView } from '../../../components/MapView';
|
import { MapView } from '../../../components/MapView';
|
||||||
import type { AuthStatus } from '../../../hooks/useAuth';
|
import type { AuthStatus } from '../../../hooks/useAuth';
|
||||||
import { AuthView } from './AuthView';
|
import { AuthView } from './AuthView';
|
||||||
|
|||||||
55
src/providers/ApiProvider.test.tsx
Normal file
55
src/providers/ApiProvider.test.tsx
Normal file
@@ -0,0 +1,55 @@
|
|||||||
|
// src/providers/ApiProvider.test.tsx
|
||||||
|
import React, { useContext } from 'react';
|
||||||
|
import { render, screen } from '@testing-library/react';
|
||||||
|
import { describe, it, expect, vi } from 'vitest';
|
||||||
|
import { ApiProvider } from './ApiProvider';
|
||||||
|
import { ApiContext } from '../contexts/ApiContext';
|
||||||
|
import * as apiClient from '../services/apiClient';
|
||||||
|
|
||||||
|
// Mock the apiClient module.
|
||||||
|
// Since ApiProvider and ApiContext import * as apiClient, mocking it ensures
|
||||||
|
// we control the reference identity and can verify it's being passed correctly.
|
||||||
|
vi.mock('../services/apiClient', () => ({
|
||||||
|
fetchFlyers: vi.fn(),
|
||||||
|
fetchMasterItems: vi.fn(),
|
||||||
|
// Add other mocked methods as needed for the shape to be valid-ish
|
||||||
|
}));
|
||||||
|
|
||||||
|
describe('ApiProvider & ApiContext', () => {
|
||||||
|
const TestConsumer = () => {
|
||||||
|
const contextValue = useContext(ApiContext);
|
||||||
|
// We check if the context value is strictly equal to the imported module
|
||||||
|
return (
|
||||||
|
<div>
|
||||||
|
<span data-testid="value-check">
|
||||||
|
{contextValue === apiClient ? 'Matches apiClient' : 'Does not match'}
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
it('renders children correctly', () => {
|
||||||
|
render(
|
||||||
|
<ApiProvider>
|
||||||
|
<div data-testid="child">Child Content</div>
|
||||||
|
</ApiProvider>
|
||||||
|
);
|
||||||
|
expect(screen.getByTestId('child')).toBeInTheDocument();
|
||||||
|
expect(screen.getByText('Child Content')).toBeInTheDocument();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('provides the apiClient module via context', () => {
|
||||||
|
render(
|
||||||
|
<ApiProvider>
|
||||||
|
<TestConsumer />
|
||||||
|
</ApiProvider>
|
||||||
|
);
|
||||||
|
expect(screen.getByTestId('value-check')).toHaveTextContent('Matches apiClient');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('ApiContext has apiClient as the default value (when no provider is present)', () => {
|
||||||
|
// This verifies the logic in ApiContext.tsx: createContext(apiClient)
|
||||||
|
render(<TestConsumer />);
|
||||||
|
expect(screen.getByTestId('value-check')).toHaveTextContent('Matches apiClient');
|
||||||
|
});
|
||||||
|
});
|
||||||
@@ -4,6 +4,7 @@ import { AuthContext, AuthContextType } from '../contexts/AuthContext';
|
|||||||
import type { UserProfile } from '../types';
|
import type { UserProfile } from '../types';
|
||||||
import * as apiClient from '../services/apiClient';
|
import * as apiClient from '../services/apiClient';
|
||||||
import { useApi } from '../hooks/useApi';
|
import { useApi } from '../hooks/useApi';
|
||||||
|
import { getToken, setToken, removeToken } from '../services/tokenStorage';
|
||||||
import { logger } from '../services/logger.client';
|
import { logger } from '../services/logger.client';
|
||||||
|
|
||||||
export const AuthProvider: React.FC<{ children: ReactNode }> = ({ children }) => {
|
export const AuthProvider: React.FC<{ children: ReactNode }> = ({ children }) => {
|
||||||
@@ -14,7 +15,7 @@ export const AuthProvider: React.FC<{ children: ReactNode }> = ({ children }) =>
|
|||||||
// FIX: Stabilize the apiFunction passed to useApi.
|
// FIX: Stabilize the apiFunction passed to useApi.
|
||||||
// By wrapping this in useCallback, we ensure the same function instance is passed to
|
// By wrapping this in useCallback, we ensure the same function instance is passed to
|
||||||
// useApi on every render. This prevents the `execute` function returned by `useApi`
|
// useApi on every render. This prevents the `execute` function returned by `useApi`
|
||||||
// from being recreated, which in turn breaks the infinite re-render loop in the useEffect below.
|
// from being recreated, which in turn breaks the infinite re-render loop in the useEffect.
|
||||||
const getProfileCallback = useCallback(() => apiClient.getAuthenticatedUserProfile(), []);
|
const getProfileCallback = useCallback(() => apiClient.getAuthenticatedUserProfile(), []);
|
||||||
|
|
||||||
const { execute: checkTokenApi } = useApi<UserProfile, []>(getProfileCallback);
|
const { execute: checkTokenApi } = useApi<UserProfile, []>(getProfileCallback);
|
||||||
@@ -27,7 +28,7 @@ export const AuthProvider: React.FC<{ children: ReactNode }> = ({ children }) =>
|
|||||||
logger.info('[AuthProvider-Effect] Starting initial authentication check.');
|
logger.info('[AuthProvider-Effect] Starting initial authentication check.');
|
||||||
|
|
||||||
const checkAuthToken = async () => {
|
const checkAuthToken = async () => {
|
||||||
const token = localStorage.getItem('authToken');
|
const token = getToken();
|
||||||
if (token) {
|
if (token) {
|
||||||
logger.info('[AuthProvider-Effect] Found auth token. Validating...');
|
logger.info('[AuthProvider-Effect] Found auth token. Validating...');
|
||||||
try {
|
try {
|
||||||
@@ -41,7 +42,7 @@ export const AuthProvider: React.FC<{ children: ReactNode }> = ({ children }) =>
|
|||||||
logger.warn(
|
logger.warn(
|
||||||
'[AuthProvider-Effect] Token was present but validation returned no profile. Signing out.',
|
'[AuthProvider-Effect] Token was present but validation returned no profile. Signing out.',
|
||||||
);
|
);
|
||||||
localStorage.removeItem('authToken');
|
removeToken();
|
||||||
setUserProfile(null);
|
setUserProfile(null);
|
||||||
setAuthStatus('SIGNED_OUT');
|
setAuthStatus('SIGNED_OUT');
|
||||||
}
|
}
|
||||||
@@ -49,7 +50,7 @@ export const AuthProvider: React.FC<{ children: ReactNode }> = ({ children }) =>
|
|||||||
// This catch block is now primarily for unexpected errors, as useApi handles API errors.
|
// This catch block is now primarily for unexpected errors, as useApi handles API errors.
|
||||||
logger.warn('Auth token validation failed. Clearing token.', { error: e });
|
logger.warn('Auth token validation failed. Clearing token.', { error: e });
|
||||||
if (isMounted) {
|
if (isMounted) {
|
||||||
localStorage.removeItem('authToken');
|
removeToken();
|
||||||
setUserProfile(null);
|
setUserProfile(null);
|
||||||
setAuthStatus('SIGNED_OUT');
|
setAuthStatus('SIGNED_OUT');
|
||||||
}
|
}
|
||||||
@@ -79,7 +80,7 @@ export const AuthProvider: React.FC<{ children: ReactNode }> = ({ children }) =>
|
|||||||
|
|
||||||
const logout = useCallback(() => {
|
const logout = useCallback(() => {
|
||||||
logger.info('[AuthProvider-Logout] Clearing user data and auth token.');
|
logger.info('[AuthProvider-Logout] Clearing user data and auth token.');
|
||||||
localStorage.removeItem('authToken');
|
removeToken();
|
||||||
setUserProfile(null);
|
setUserProfile(null);
|
||||||
setAuthStatus('SIGNED_OUT');
|
setAuthStatus('SIGNED_OUT');
|
||||||
}, []);
|
}, []);
|
||||||
@@ -87,7 +88,7 @@ export const AuthProvider: React.FC<{ children: ReactNode }> = ({ children }) =>
|
|||||||
const login = useCallback(
|
const login = useCallback(
|
||||||
async (token: string, profileData?: UserProfile) => {
|
async (token: string, profileData?: UserProfile) => {
|
||||||
logger.info(`[AuthProvider-Login] Attempting login.`);
|
logger.info(`[AuthProvider-Login] Attempting login.`);
|
||||||
localStorage.setItem('authToken', token);
|
setToken(token);
|
||||||
|
|
||||||
if (profileData) {
|
if (profileData) {
|
||||||
// If profile is provided (e.g., from credential login), use it directly.
|
// If profile is provided (e.g., from credential login), use it directly.
|
||||||
|
|||||||
@@ -4,8 +4,12 @@ import { FlyersContext, FlyersContextType } from '../contexts/FlyersContext';
|
|||||||
import type { Flyer } from '../types';
|
import type { Flyer } from '../types';
|
||||||
import * as apiClient from '../services/apiClient';
|
import * as apiClient from '../services/apiClient';
|
||||||
import { useInfiniteQuery } from '../hooks/useInfiniteQuery';
|
import { useInfiniteQuery } from '../hooks/useInfiniteQuery';
|
||||||
|
import { useCallback } from 'react';
|
||||||
|
|
||||||
export const FlyersProvider: React.FC<{ children: ReactNode }> = ({ children }) => {
|
export const FlyersProvider: React.FC<{ children: ReactNode }> = ({ children }) => {
|
||||||
|
// Memoize the fetch function to ensure stability for the useInfiniteQuery hook.
|
||||||
|
const fetchFlyersFn = useCallback(apiClient.fetchFlyers, []);
|
||||||
|
|
||||||
const {
|
const {
|
||||||
data: flyers,
|
data: flyers,
|
||||||
isLoading: isLoadingFlyers,
|
isLoading: isLoadingFlyers,
|
||||||
@@ -14,7 +18,7 @@ export const FlyersProvider: React.FC<{ children: ReactNode }> = ({ children })
|
|||||||
hasNextPage: hasNextFlyersPage,
|
hasNextPage: hasNextFlyersPage,
|
||||||
refetch: refetchFlyers,
|
refetch: refetchFlyers,
|
||||||
isRefetching: isRefetchingFlyers,
|
isRefetching: isRefetchingFlyers,
|
||||||
} = useInfiniteQuery<Flyer>(apiClient.fetchFlyers);
|
} = useInfiniteQuery<Flyer>(fetchFlyersFn);
|
||||||
|
|
||||||
const value: FlyersContextType = {
|
const value: FlyersContextType = {
|
||||||
flyers: flyers || [],
|
flyers: flyers || [],
|
||||||
|
|||||||
@@ -1,14 +1,22 @@
|
|||||||
// src/providers/MasterItemsProvider.tsx
|
// src/providers/MasterItemsProvider.tsx
|
||||||
import React, { ReactNode, useMemo } from 'react';
|
import React, { ReactNode, useMemo, useEffect, useCallback } from 'react';
|
||||||
import { MasterItemsContext } from '../contexts/MasterItemsContext';
|
import { MasterItemsContext } from '../contexts/MasterItemsContext';
|
||||||
import type { MasterGroceryItem } from '../types';
|
import type { MasterGroceryItem } from '../types';
|
||||||
import * as apiClient from '../services/apiClient';
|
import * as apiClient from '../services/apiClient';
|
||||||
import { useApiOnMount } from '../hooks/useApiOnMount';
|
import { useApiOnMount } from '../hooks/useApiOnMount';
|
||||||
|
import { logger } from '../services/logger.client';
|
||||||
|
|
||||||
export const MasterItemsProvider: React.FC<{ children: ReactNode }> = ({ children }) => {
|
export const MasterItemsProvider: React.FC<{ children: ReactNode }> = ({ children }) => {
|
||||||
const { data, loading, error } = useApiOnMount<MasterGroceryItem[], []>(() =>
|
// LOGGING: Check if the provider is unmounting/remounting repeatedly
|
||||||
apiClient.fetchMasterItems(),
|
useEffect(() => {
|
||||||
);
|
logger.debug('MasterItemsProvider: MOUNTED');
|
||||||
|
return () => logger.debug('MasterItemsProvider: UNMOUNTED');
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
// Memoize the fetch function to ensure stability for the useApiOnMount hook.
|
||||||
|
const fetchFn = useCallback(() => apiClient.fetchMasterItems(), []);
|
||||||
|
|
||||||
|
const { data, loading, error } = useApiOnMount<MasterGroceryItem[], []>(fetchFn);
|
||||||
|
|
||||||
const value = useMemo(
|
const value = useMemo(
|
||||||
() => ({
|
() => ({
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
// src/providers/UserDataProvider.tsx
|
// src/providers/UserDataProvider.tsx
|
||||||
import React, { useState, useEffect, useMemo, ReactNode } from 'react';
|
import { logger } from '../services/logger.client';
|
||||||
|
import React, { useState, useEffect, useMemo, ReactNode, useCallback } from 'react';
|
||||||
import { UserDataContext } from '../contexts/UserDataContext';
|
import { UserDataContext } from '../contexts/UserDataContext';
|
||||||
import type { MasterGroceryItem, ShoppingList } from '../types';
|
import type { MasterGroceryItem, ShoppingList } from '../types';
|
||||||
import * as apiClient from '../services/apiClient';
|
import * as apiClient from '../services/apiClient';
|
||||||
@@ -9,18 +10,25 @@ import { useAuth } from '../hooks/useAuth';
|
|||||||
export const UserDataProvider: React.FC<{ children: ReactNode }> = ({ children }) => {
|
export const UserDataProvider: React.FC<{ children: ReactNode }> = ({ children }) => {
|
||||||
const { userProfile } = useAuth();
|
const { userProfile } = useAuth();
|
||||||
|
|
||||||
|
// Wrap the API calls in useCallback to prevent unnecessary re-renders.
|
||||||
|
const fetchWatchedItemsFn = useCallback(
|
||||||
|
() => apiClient.fetchWatchedItems(),
|
||||||
|
[],
|
||||||
|
);
|
||||||
|
const fetchShoppingListsFn = useCallback(() => apiClient.fetchShoppingLists(), []);
|
||||||
|
|
||||||
const {
|
const {
|
||||||
data: watchedItemsData,
|
data: watchedItemsData,
|
||||||
loading: isLoadingWatched,
|
loading: isLoadingWatched,
|
||||||
error: watchedItemsError,
|
error: watchedItemsError,
|
||||||
} = useApiOnMount<MasterGroceryItem[], []>(() => apiClient.fetchWatchedItems(), [userProfile], {
|
} = useApiOnMount<MasterGroceryItem[], []>(fetchWatchedItemsFn, [userProfile], {
|
||||||
enabled: !!userProfile,
|
enabled: !!userProfile,
|
||||||
});
|
});
|
||||||
const {
|
const {
|
||||||
data: shoppingListsData,
|
data: shoppingListsData,
|
||||||
loading: isLoadingShoppingLists,
|
loading: isLoadingShoppingLists,
|
||||||
error: shoppingListsError,
|
error: shoppingListsError,
|
||||||
} = useApiOnMount<ShoppingList[], []>(() => apiClient.fetchShoppingLists(), [userProfile], {
|
} = useApiOnMount<ShoppingList[], []>(fetchShoppingListsFn, [userProfile], {
|
||||||
enabled: !!userProfile,
|
enabled: !!userProfile,
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@@ -1,19 +1,27 @@
|
|||||||
// src/routes/admin.content.routes.test.ts
|
// src/routes/admin.content.routes.test.ts
|
||||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
import { describe, it, expect, vi, beforeEach, afterAll } from 'vitest';
|
||||||
import supertest from 'supertest';
|
import supertest from 'supertest';
|
||||||
import type { Request, Response, NextFunction } from 'express';
|
import type { Request, Response, NextFunction } from 'express';
|
||||||
|
import path from 'path';
|
||||||
import {
|
import {
|
||||||
createMockUserProfile,
|
createMockUserProfile,
|
||||||
createMockSuggestedCorrection,
|
createMockSuggestedCorrection,
|
||||||
createMockBrand,
|
createMockBrand,
|
||||||
createMockRecipe,
|
createMockRecipe,
|
||||||
|
createMockFlyer,
|
||||||
createMockRecipeComment,
|
createMockRecipeComment,
|
||||||
createMockUnmatchedFlyerItem,
|
createMockUnmatchedFlyerItem,
|
||||||
} from '../tests/utils/mockFactories';
|
} from '../tests/utils/mockFactories';
|
||||||
import type { SuggestedCorrection, Brand, UserProfile, UnmatchedFlyerItem } from '../types';
|
import type { SuggestedCorrection, Brand, UserProfile, UnmatchedFlyerItem } from '../types';
|
||||||
import { NotFoundError } from '../services/db/errors.db'; // This can stay, it's a type/class not a module with side effects.
|
import { NotFoundError } from '../services/db/errors.db'; // This can stay, it's a type/class not a module with side effects.
|
||||||
|
import fs from 'node:fs/promises';
|
||||||
import { createTestApp } from '../tests/utils/createTestApp';
|
import { createTestApp } from '../tests/utils/createTestApp';
|
||||||
import { mockLogger } from '../tests/utils/mockLogger';
|
import { cleanupFiles } from '../tests/utils/cleanupFiles';
|
||||||
|
|
||||||
|
// Mock the file upload middleware to allow testing the controller's internal check
|
||||||
|
vi.mock('../middleware/fileUpload.middleware', () => ({
|
||||||
|
requireFileUpload: () => (req: Request, res: Response, next: NextFunction) => next(),
|
||||||
|
}));
|
||||||
|
|
||||||
vi.mock('../lib/queue', () => ({
|
vi.mock('../lib/queue', () => ({
|
||||||
serverAdapter: {
|
serverAdapter: {
|
||||||
@@ -33,9 +41,11 @@ const { mockedDb } = vi.hoisted(() => {
|
|||||||
rejectCorrection: vi.fn(),
|
rejectCorrection: vi.fn(),
|
||||||
updateSuggestedCorrection: vi.fn(),
|
updateSuggestedCorrection: vi.fn(),
|
||||||
getUnmatchedFlyerItems: vi.fn(),
|
getUnmatchedFlyerItems: vi.fn(),
|
||||||
|
getFlyersForReview: vi.fn(), // Added for flyer review tests
|
||||||
updateRecipeStatus: vi.fn(),
|
updateRecipeStatus: vi.fn(),
|
||||||
updateRecipeCommentStatus: vi.fn(),
|
updateRecipeCommentStatus: vi.fn(),
|
||||||
updateBrandLogo: vi.fn(),
|
updateBrandLogo: vi.fn(),
|
||||||
|
getApplicationStats: vi.fn(),
|
||||||
},
|
},
|
||||||
flyerRepo: {
|
flyerRepo: {
|
||||||
getAllBrands: vi.fn(),
|
getAllBrands: vi.fn(),
|
||||||
@@ -68,10 +78,12 @@ vi.mock('node:fs/promises', () => ({
|
|||||||
// Named exports
|
// Named exports
|
||||||
writeFile: vi.fn().mockResolvedValue(undefined),
|
writeFile: vi.fn().mockResolvedValue(undefined),
|
||||||
unlink: vi.fn().mockResolvedValue(undefined),
|
unlink: vi.fn().mockResolvedValue(undefined),
|
||||||
|
mkdir: vi.fn().mockResolvedValue(undefined),
|
||||||
// FIX: Add default export to handle `import fs from ...` syntax.
|
// FIX: Add default export to handle `import fs from ...` syntax.
|
||||||
default: {
|
default: {
|
||||||
writeFile: vi.fn().mockResolvedValue(undefined),
|
writeFile: vi.fn().mockResolvedValue(undefined),
|
||||||
unlink: vi.fn().mockResolvedValue(undefined),
|
unlink: vi.fn().mockResolvedValue(undefined),
|
||||||
|
mkdir: vi.fn().mockResolvedValue(undefined),
|
||||||
},
|
},
|
||||||
}));
|
}));
|
||||||
vi.mock('../services/backgroundJobService');
|
vi.mock('../services/backgroundJobService');
|
||||||
@@ -91,8 +103,9 @@ vi.mock('@bull-board/express', () => ({
|
|||||||
}));
|
}));
|
||||||
|
|
||||||
// Mock the logger
|
// Mock the logger
|
||||||
vi.mock('../services/logger.server', () => ({
|
vi.mock('../services/logger.server', async () => ({
|
||||||
logger: mockLogger,
|
// Use async import to avoid hoisting issues with mockLogger
|
||||||
|
logger: (await import('../tests/utils/mockLogger')).mockLogger,
|
||||||
}));
|
}));
|
||||||
|
|
||||||
// Mock the passport middleware
|
// Mock the passport middleware
|
||||||
@@ -125,16 +138,30 @@ describe('Admin Content Management Routes (/api/admin)', () => {
|
|||||||
authenticatedUser: adminUser,
|
authenticatedUser: adminUser,
|
||||||
});
|
});
|
||||||
|
|
||||||
// Add a basic error handler to capture errors passed to next(err) and return JSON.
|
|
||||||
// This prevents unhandled error crashes in tests and ensures we get the 500 response we expect.
|
|
||||||
app.use((err: any, req: any, res: any, next: any) => {
|
|
||||||
res.status(err.status || 500).json({ message: err.message, errors: err.errors });
|
|
||||||
});
|
|
||||||
|
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
vi.clearAllMocks();
|
vi.clearAllMocks();
|
||||||
});
|
});
|
||||||
|
|
||||||
|
afterAll(async () => {
|
||||||
|
// Safeguard to clean up any logo files created during tests.
|
||||||
|
const uploadDir = path.resolve(__dirname, '../../../flyer-images');
|
||||||
|
try {
|
||||||
|
const allFiles = await fs.readdir(uploadDir);
|
||||||
|
// Files are named like 'logoImage-timestamp-original.ext'
|
||||||
|
const testFiles = allFiles
|
||||||
|
.filter((f) => f.startsWith('logoImage-'))
|
||||||
|
.map((f) => path.join(uploadDir, f));
|
||||||
|
|
||||||
|
if (testFiles.length > 0) {
|
||||||
|
await cleanupFiles(testFiles);
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
if (error instanceof Error && (error as NodeJS.ErrnoException).code !== 'ENOENT') {
|
||||||
|
console.error('Error during admin content test file cleanup:', error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
describe('Corrections Routes', () => {
|
describe('Corrections Routes', () => {
|
||||||
it('GET /corrections should return corrections data', async () => {
|
it('GET /corrections should return corrections data', async () => {
|
||||||
const mockCorrections: SuggestedCorrection[] = [
|
const mockCorrections: SuggestedCorrection[] = [
|
||||||
@@ -225,6 +252,39 @@ describe('Admin Content Management Routes (/api/admin)', () => {
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
describe('Flyer Review Routes', () => {
|
||||||
|
it('GET /review/flyers should return flyers for review', async () => {
|
||||||
|
const mockFlyers = [
|
||||||
|
createMockFlyer({ flyer_id: 1, status: 'needs_review' }),
|
||||||
|
createMockFlyer({ flyer_id: 2, status: 'needs_review' }),
|
||||||
|
];
|
||||||
|
vi.mocked(mockedDb.adminRepo.getFlyersForReview).mockResolvedValue(mockFlyers);
|
||||||
|
const response = await supertest(app).get('/api/admin/review/flyers');
|
||||||
|
expect(response.status).toBe(200);
|
||||||
|
expect(response.body).toEqual(mockFlyers);
|
||||||
|
expect(vi.mocked(mockedDb.adminRepo.getFlyersForReview)).toHaveBeenCalledWith(
|
||||||
|
expect.anything(),
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('GET /review/flyers should return 500 on DB error', async () => {
|
||||||
|
vi.mocked(mockedDb.adminRepo.getFlyersForReview).mockRejectedValue(new Error('DB Error'));
|
||||||
|
const response = await supertest(app).get('/api/admin/review/flyers');
|
||||||
|
expect(response.status).toBe(500);
|
||||||
|
expect(response.body.message).toBe('DB Error');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('Stats Routes', () => {
|
||||||
|
// This test covers the error path for GET /stats
|
||||||
|
it('GET /stats should return 500 on DB error', async () => {
|
||||||
|
vi.mocked(mockedDb.adminRepo.getApplicationStats).mockRejectedValue(new Error('DB Error'));
|
||||||
|
const response = await supertest(app).get('/api/admin/stats');
|
||||||
|
expect(response.status).toBe(500);
|
||||||
|
expect(response.body.message).toBe('DB Error');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
describe('Brand Routes', () => {
|
describe('Brand Routes', () => {
|
||||||
it('GET /brands should return a list of all brands', async () => {
|
it('GET /brands should return a list of all brands', async () => {
|
||||||
const mockBrands: Brand[] = [createMockBrand({ brand_id: 1, name: 'Brand A' })];
|
const mockBrands: Brand[] = [createMockBrand({ brand_id: 1, name: 'Brand A' })];
|
||||||
@@ -244,7 +304,7 @@ describe('Admin Content Management Routes (/api/admin)', () => {
|
|||||||
expect(response.body.message).toBe('Brand logo updated successfully.');
|
expect(response.body.message).toBe('Brand logo updated successfully.');
|
||||||
expect(vi.mocked(mockedDb.adminRepo.updateBrandLogo)).toHaveBeenCalledWith(
|
expect(vi.mocked(mockedDb.adminRepo.updateBrandLogo)).toHaveBeenCalledWith(
|
||||||
brandId,
|
brandId,
|
||||||
expect.stringContaining('/assets/'),
|
expect.stringContaining('/flyer-images/'),
|
||||||
expect.anything(),
|
expect.anything(),
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
@@ -262,10 +322,36 @@ describe('Admin Content Management Routes (/api/admin)', () => {
|
|||||||
const response = await supertest(app).post('/api/admin/brands/55/logo');
|
const response = await supertest(app).post('/api/admin/brands/55/logo');
|
||||||
expect(response.status).toBe(400);
|
expect(response.status).toBe(400);
|
||||||
expect(response.body.message).toMatch(
|
expect(response.body.message).toMatch(
|
||||||
/Logo image file is required|The request data is invalid/,
|
/Logo image file is required|The request data is invalid|Logo image file is missing./,
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('should clean up the uploaded file if updating the brand logo fails', async () => {
|
||||||
|
const brandId = 55;
|
||||||
|
const dbError = new Error('DB Connection Failed');
|
||||||
|
vi.mocked(mockedDb.adminRepo.updateBrandLogo).mockRejectedValue(dbError);
|
||||||
|
|
||||||
|
const response = await supertest(app)
|
||||||
|
.post(`/api/admin/brands/${brandId}/logo`)
|
||||||
|
.attach('logoImage', Buffer.from('dummy-logo-content'), 'test-logo.png');
|
||||||
|
|
||||||
|
expect(response.status).toBe(500);
|
||||||
|
// Verify that the cleanup function was called via the mocked fs module
|
||||||
|
expect(fs.unlink).toHaveBeenCalledTimes(1);
|
||||||
|
// The filename is predictable because of the multer config in admin.routes.ts
|
||||||
|
expect(fs.unlink).toHaveBeenCalledWith(expect.stringContaining('logoImage-'));
|
||||||
|
});
|
||||||
|
|
||||||
|
it('POST /brands/:id/logo should return 400 if a non-image file is uploaded', async () => {
|
||||||
|
const brandId = 55;
|
||||||
|
const response = await supertest(app)
|
||||||
|
.post(`/api/admin/brands/${brandId}/logo`)
|
||||||
|
.attach('logoImage', Buffer.from('this is not an image'), 'document.txt');
|
||||||
|
expect(response.status).toBe(400);
|
||||||
|
// This message comes from the handleMulterError middleware for the imageFileFilter
|
||||||
|
expect(response.body.message).toBe('Only image files are allowed!');
|
||||||
|
});
|
||||||
|
|
||||||
it('POST /brands/:id/logo should return 400 for an invalid brand ID', async () => {
|
it('POST /brands/:id/logo should return 400 for an invalid brand ID', async () => {
|
||||||
const response = await supertest(app)
|
const response = await supertest(app)
|
||||||
.post('/api/admin/brands/abc/logo')
|
.post('/api/admin/brands/abc/logo')
|
||||||
|
|||||||
@@ -6,12 +6,13 @@ import { createMockUserProfile } from '../tests/utils/mockFactories';
|
|||||||
import type { Job } from 'bullmq';
|
import type { Job } from 'bullmq';
|
||||||
import type { UserProfile } from '../types';
|
import type { UserProfile } from '../types';
|
||||||
import { createTestApp } from '../tests/utils/createTestApp';
|
import { createTestApp } from '../tests/utils/createTestApp';
|
||||||
import { mockLogger } from '../tests/utils/mockLogger';
|
|
||||||
|
|
||||||
// Mock the background job service to control its methods.
|
// Mock the background job service to control its methods.
|
||||||
vi.mock('../services/backgroundJobService', () => ({
|
vi.mock('../services/backgroundJobService', () => ({
|
||||||
backgroundJobService: {
|
backgroundJobService: {
|
||||||
runDailyDealCheck: vi.fn(),
|
runDailyDealCheck: vi.fn(),
|
||||||
|
triggerAnalyticsReport: vi.fn(),
|
||||||
|
triggerWeeklyAnalyticsReport: vi.fn(),
|
||||||
},
|
},
|
||||||
}));
|
}));
|
||||||
|
|
||||||
@@ -66,8 +67,9 @@ import {
|
|||||||
} from '../services/queueService.server';
|
} from '../services/queueService.server';
|
||||||
|
|
||||||
// Mock the logger
|
// Mock the logger
|
||||||
vi.mock('../services/logger.server', () => ({
|
vi.mock('../services/logger.server', async () => ({
|
||||||
logger: mockLogger,
|
// Use async import to avoid hoisting issues with mockLogger
|
||||||
|
logger: (await import('../tests/utils/mockLogger')).mockLogger,
|
||||||
}));
|
}));
|
||||||
|
|
||||||
// Mock the passport middleware
|
// Mock the passport middleware
|
||||||
@@ -97,12 +99,6 @@ describe('Admin Job Trigger Routes (/api/admin/trigger)', () => {
|
|||||||
authenticatedUser: adminUser,
|
authenticatedUser: adminUser,
|
||||||
});
|
});
|
||||||
|
|
||||||
// Add a basic error handler to capture errors passed to next(err) and return JSON.
|
|
||||||
// This prevents unhandled error crashes in tests and ensures we get the 500 response we expect.
|
|
||||||
app.use((err: any, req: any, res: any, next: any) => {
|
|
||||||
res.status(err.status || 500).json({ message: err.message, errors: err.errors });
|
|
||||||
});
|
|
||||||
|
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
vi.clearAllMocks();
|
vi.clearAllMocks();
|
||||||
});
|
});
|
||||||
@@ -148,22 +144,17 @@ describe('Admin Job Trigger Routes (/api/admin/trigger)', () => {
|
|||||||
|
|
||||||
describe('POST /trigger/analytics-report', () => {
|
describe('POST /trigger/analytics-report', () => {
|
||||||
it('should trigger the analytics report job and return 202 Accepted', async () => {
|
it('should trigger the analytics report job and return 202 Accepted', async () => {
|
||||||
const mockJob = { id: 'manual-report-job-123' } as Job;
|
vi.mocked(backgroundJobService.triggerAnalyticsReport).mockResolvedValue('manual-report-job-123');
|
||||||
vi.mocked(analyticsQueue.add).mockResolvedValue(mockJob);
|
|
||||||
|
|
||||||
const response = await supertest(app).post('/api/admin/trigger/analytics-report');
|
const response = await supertest(app).post('/api/admin/trigger/analytics-report');
|
||||||
|
|
||||||
expect(response.status).toBe(202);
|
expect(response.status).toBe(202);
|
||||||
expect(response.body.message).toContain('Analytics report generation job has been enqueued');
|
expect(response.body.message).toContain('Analytics report generation job has been enqueued');
|
||||||
expect(analyticsQueue.add).toHaveBeenCalledWith(
|
expect(backgroundJobService.triggerAnalyticsReport).toHaveBeenCalledTimes(1);
|
||||||
'generate-daily-report',
|
|
||||||
expect.objectContaining({ reportDate: expect.any(String) }),
|
|
||||||
expect.any(Object),
|
|
||||||
);
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return 500 if enqueuing the analytics job fails', async () => {
|
it('should return 500 if enqueuing the analytics job fails', async () => {
|
||||||
vi.mocked(analyticsQueue.add).mockRejectedValue(new Error('Queue error'));
|
vi.mocked(backgroundJobService.triggerAnalyticsReport).mockRejectedValue(new Error('Queue error'));
|
||||||
const response = await supertest(app).post('/api/admin/trigger/analytics-report');
|
const response = await supertest(app).post('/api/admin/trigger/analytics-report');
|
||||||
expect(response.status).toBe(500);
|
expect(response.status).toBe(500);
|
||||||
});
|
});
|
||||||
@@ -171,22 +162,17 @@ describe('Admin Job Trigger Routes (/api/admin/trigger)', () => {
|
|||||||
|
|
||||||
describe('POST /trigger/weekly-analytics', () => {
|
describe('POST /trigger/weekly-analytics', () => {
|
||||||
it('should trigger the weekly analytics job and return 202 Accepted', async () => {
|
it('should trigger the weekly analytics job and return 202 Accepted', async () => {
|
||||||
const mockJob = { id: 'manual-weekly-report-job-123' } as Job;
|
vi.mocked(backgroundJobService.triggerWeeklyAnalyticsReport).mockResolvedValue('manual-weekly-report-job-123');
|
||||||
vi.mocked(weeklyAnalyticsQueue.add).mockResolvedValue(mockJob);
|
|
||||||
|
|
||||||
const response = await supertest(app).post('/api/admin/trigger/weekly-analytics');
|
const response = await supertest(app).post('/api/admin/trigger/weekly-analytics');
|
||||||
|
|
||||||
expect(response.status).toBe(202);
|
expect(response.status).toBe(202);
|
||||||
expect(response.body.message).toContain('Successfully enqueued weekly analytics job');
|
expect(response.body.message).toContain('Successfully enqueued weekly analytics job');
|
||||||
expect(weeklyAnalyticsQueue.add).toHaveBeenCalledWith(
|
expect(backgroundJobService.triggerWeeklyAnalyticsReport).toHaveBeenCalledTimes(1);
|
||||||
'generate-weekly-report',
|
|
||||||
expect.objectContaining({ reportYear: expect.any(Number), reportWeek: expect.any(Number) }),
|
|
||||||
expect.any(Object),
|
|
||||||
);
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return 500 if enqueuing the weekly analytics job fails', async () => {
|
it('should return 500 if enqueuing the weekly analytics job fails', async () => {
|
||||||
vi.mocked(weeklyAnalyticsQueue.add).mockRejectedValue(new Error('Queue error'));
|
vi.mocked(backgroundJobService.triggerWeeklyAnalyticsReport).mockRejectedValue(new Error('Queue error'));
|
||||||
const response = await supertest(app).post('/api/admin/trigger/weekly-analytics');
|
const response = await supertest(app).post('/api/admin/trigger/weekly-analytics');
|
||||||
expect(response.status).toBe(500);
|
expect(response.status).toBe(500);
|
||||||
});
|
});
|
||||||
@@ -248,6 +234,19 @@ describe('Admin Job Trigger Routes (/api/admin/trigger)', () => {
|
|||||||
expect(response.status).toBe(400);
|
expect(response.status).toBe(400);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('should return 404 if the job ID is not found in the weekly-analytics-reporting queue', async () => {
|
||||||
|
const queueName = 'weekly-analytics-reporting';
|
||||||
|
const jobId = 'some-job-id';
|
||||||
|
|
||||||
|
// Ensure getJob returns undefined (not found)
|
||||||
|
vi.mocked(weeklyAnalyticsQueue.getJob).mockResolvedValue(undefined);
|
||||||
|
|
||||||
|
const response = await supertest(app).post(`/api/admin/jobs/${queueName}/${jobId}/retry`);
|
||||||
|
|
||||||
|
expect(response.status).toBe(404);
|
||||||
|
expect(response.body.message).toBe(`Job with ID '${jobId}' not found in queue '${queueName}'.`);
|
||||||
|
});
|
||||||
|
|
||||||
it('should return 404 if the job ID is not found in the queue', async () => {
|
it('should return 404 if the job ID is not found in the queue', async () => {
|
||||||
vi.mocked(flyerQueue.getJob).mockResolvedValue(undefined);
|
vi.mocked(flyerQueue.getJob).mockResolvedValue(undefined);
|
||||||
const response = await supertest(app).post(
|
const response = await supertest(app).post(
|
||||||
|
|||||||
@@ -5,7 +5,16 @@ import type { Request, Response, NextFunction } from 'express';
|
|||||||
import { createMockUserProfile, createMockActivityLogItem } from '../tests/utils/mockFactories';
|
import { createMockUserProfile, createMockActivityLogItem } from '../tests/utils/mockFactories';
|
||||||
import type { UserProfile } from '../types';
|
import type { UserProfile } from '../types';
|
||||||
import { createTestApp } from '../tests/utils/createTestApp';
|
import { createTestApp } from '../tests/utils/createTestApp';
|
||||||
import { mockLogger } from '../tests/utils/mockLogger';
|
|
||||||
|
const { mockLogger } = vi.hoisted(() => ({
|
||||||
|
mockLogger: {
|
||||||
|
info: vi.fn(),
|
||||||
|
warn: vi.fn(),
|
||||||
|
error: vi.fn(),
|
||||||
|
debug: vi.fn(),
|
||||||
|
child: vi.fn().mockReturnThis(),
|
||||||
|
},
|
||||||
|
}));
|
||||||
|
|
||||||
vi.mock('../lib/queue', () => ({
|
vi.mock('../lib/queue', () => ({
|
||||||
serverAdapter: {
|
serverAdapter: {
|
||||||
@@ -27,19 +36,22 @@ vi.mock('../services/db/index.db', () => ({
|
|||||||
notificationRepo: {},
|
notificationRepo: {},
|
||||||
}));
|
}));
|
||||||
|
|
||||||
// Mock the queue service to control worker statuses
|
// Mock the queue service for queue status checks
|
||||||
vi.mock('../services/queueService.server', () => ({
|
vi.mock('../services/queueService.server', () => ({
|
||||||
|
flyerQueue: { name: 'flyer-processing', getJobCounts: vi.fn() },
|
||||||
|
emailQueue: { name: 'email-sending', getJobCounts: vi.fn() },
|
||||||
|
analyticsQueue: { name: 'analytics-reporting', getJobCounts: vi.fn() },
|
||||||
|
cleanupQueue: { name: 'file-cleanup', getJobCounts: vi.fn() },
|
||||||
|
weeklyAnalyticsQueue: { name: 'weekly-analytics-reporting', getJobCounts: vi.fn() },
|
||||||
|
}));
|
||||||
|
|
||||||
|
// Mock the worker service for worker status checks
|
||||||
|
vi.mock('../services/workers.server', () => ({
|
||||||
flyerWorker: { name: 'flyer-processing', isRunning: vi.fn() },
|
flyerWorker: { name: 'flyer-processing', isRunning: vi.fn() },
|
||||||
emailWorker: { name: 'email-sending', isRunning: vi.fn() },
|
emailWorker: { name: 'email-sending', isRunning: vi.fn() },
|
||||||
analyticsWorker: { name: 'analytics-reporting', isRunning: vi.fn() },
|
analyticsWorker: { name: 'analytics-reporting', isRunning: vi.fn() },
|
||||||
cleanupWorker: { name: 'file-cleanup', isRunning: vi.fn() },
|
cleanupWorker: { name: 'file-cleanup', isRunning: vi.fn() },
|
||||||
weeklyAnalyticsWorker: { name: 'weekly-analytics-reporting', isRunning: vi.fn() },
|
weeklyAnalyticsWorker: { name: 'weekly-analytics-reporting', isRunning: vi.fn() },
|
||||||
flyerQueue: { name: 'flyer-processing', getJobCounts: vi.fn() },
|
|
||||||
emailQueue: { name: 'email-sending', getJobCounts: vi.fn() },
|
|
||||||
analyticsQueue: { name: 'analytics-reporting', getJobCounts: vi.fn() },
|
|
||||||
cleanupQueue: { name: 'file-cleanup', getJobCounts: vi.fn() },
|
|
||||||
// FIX: Add the missing weeklyAnalyticsQueue to prevent import errors in admin.routes.ts
|
|
||||||
weeklyAnalyticsQueue: { name: 'weekly-analytics-reporting', getJobCounts: vi.fn() },
|
|
||||||
}));
|
}));
|
||||||
|
|
||||||
// Mock other dependencies that are part of the adminRouter setup but not directly tested here
|
// Mock other dependencies that are part of the adminRouter setup but not directly tested here
|
||||||
@@ -67,8 +79,10 @@ import adminRouter from './admin.routes';
|
|||||||
|
|
||||||
// Import the mocked modules to control them
|
// Import the mocked modules to control them
|
||||||
import * as queueService from '../services/queueService.server';
|
import * as queueService from '../services/queueService.server';
|
||||||
|
import * as workerService from '../services/workers.server';
|
||||||
import { adminRepo } from '../services/db/index.db';
|
import { adminRepo } from '../services/db/index.db';
|
||||||
const mockedQueueService = queueService as Mocked<typeof queueService>;
|
const mockedQueueService = queueService as Mocked<typeof queueService>;
|
||||||
|
const mockedWorkerService = workerService as Mocked<typeof workerService>;
|
||||||
|
|
||||||
// Mock the logger
|
// Mock the logger
|
||||||
vi.mock('../services/logger.server', () => ({
|
vi.mock('../services/logger.server', () => ({
|
||||||
@@ -102,12 +116,6 @@ describe('Admin Monitoring Routes (/api/admin)', () => {
|
|||||||
authenticatedUser: adminUser,
|
authenticatedUser: adminUser,
|
||||||
});
|
});
|
||||||
|
|
||||||
// Add a basic error handler to capture errors passed to next(err) and return JSON.
|
|
||||||
// This prevents unhandled error crashes in tests and ensures we get the 500 response we expect.
|
|
||||||
app.use((err: any, req: any, res: any, next: any) => {
|
|
||||||
res.status(err.status || 500).json({ message: err.message, errors: err.errors });
|
|
||||||
});
|
|
||||||
|
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
vi.clearAllMocks();
|
vi.clearAllMocks();
|
||||||
});
|
});
|
||||||
@@ -143,11 +151,11 @@ describe('Admin Monitoring Routes (/api/admin)', () => {
|
|||||||
describe('GET /workers/status', () => {
|
describe('GET /workers/status', () => {
|
||||||
it('should return the status of all registered workers', async () => {
|
it('should return the status of all registered workers', async () => {
|
||||||
// Arrange: Set the mock status for each worker
|
// Arrange: Set the mock status for each worker
|
||||||
vi.mocked(mockedQueueService.flyerWorker.isRunning).mockReturnValue(true);
|
vi.mocked(mockedWorkerService.flyerWorker.isRunning).mockReturnValue(true);
|
||||||
vi.mocked(mockedQueueService.emailWorker.isRunning).mockReturnValue(true);
|
vi.mocked(mockedWorkerService.emailWorker.isRunning).mockReturnValue(true);
|
||||||
vi.mocked(mockedQueueService.analyticsWorker.isRunning).mockReturnValue(false); // Simulate one worker being stopped
|
vi.mocked(mockedWorkerService.analyticsWorker.isRunning).mockReturnValue(false); // Simulate one worker being stopped
|
||||||
vi.mocked(mockedQueueService.cleanupWorker.isRunning).mockReturnValue(true);
|
vi.mocked(mockedWorkerService.cleanupWorker.isRunning).mockReturnValue(true);
|
||||||
vi.mocked(mockedQueueService.weeklyAnalyticsWorker.isRunning).mockReturnValue(true);
|
vi.mocked(mockedWorkerService.weeklyAnalyticsWorker.isRunning).mockReturnValue(true);
|
||||||
|
|
||||||
// Act
|
// Act
|
||||||
const response = await supertest(app).get('/api/admin/workers/status');
|
const response = await supertest(app).get('/api/admin/workers/status');
|
||||||
|
|||||||
@@ -2,14 +2,17 @@
|
|||||||
import { Router, NextFunction, Request, Response } from 'express';
|
import { Router, NextFunction, Request, Response } from 'express';
|
||||||
import passport from './passport.routes';
|
import passport from './passport.routes';
|
||||||
import { isAdmin } from './passport.routes'; // Correctly imported
|
import { isAdmin } from './passport.routes'; // Correctly imported
|
||||||
import multer from 'multer'; // --- Zod Schemas for Admin Routes (as per ADR-003) ---
|
import multer from 'multer';
|
||||||
import { z } from 'zod';
|
import { z } from 'zod';
|
||||||
|
|
||||||
import * as db from '../services/db/index.db';
|
import * as db from '../services/db/index.db';
|
||||||
import { logger } from '../services/logger.server';
|
import type { UserProfile } from '../types';
|
||||||
import { UserProfile } from '../types';
|
|
||||||
import { geocodingService } from '../services/geocodingService.server';
|
import { geocodingService } from '../services/geocodingService.server';
|
||||||
import { requireFileUpload } from '../middleware/fileUpload.middleware'; // This was a duplicate, fixed.
|
import { requireFileUpload } from '../middleware/fileUpload.middleware'; // This was a duplicate, fixed.
|
||||||
|
import {
|
||||||
|
createUploadMiddleware,
|
||||||
|
handleMulterError,
|
||||||
|
} from '../middleware/multer.middleware';
|
||||||
import { NotFoundError, ValidationError } from '../services/db/errors.db';
|
import { NotFoundError, ValidationError } from '../services/db/errors.db';
|
||||||
import { validateRequest } from '../middleware/validation.middleware';
|
import { validateRequest } from '../middleware/validation.middleware';
|
||||||
|
|
||||||
@@ -17,61 +20,35 @@ import { validateRequest } from '../middleware/validation.middleware';
|
|||||||
import { createBullBoard } from '@bull-board/api';
|
import { createBullBoard } from '@bull-board/api';
|
||||||
import { BullMQAdapter } from '@bull-board/api/bullMQAdapter';
|
import { BullMQAdapter } from '@bull-board/api/bullMQAdapter';
|
||||||
import { ExpressAdapter } from '@bull-board/express';
|
import { ExpressAdapter } from '@bull-board/express';
|
||||||
|
|
||||||
import type { Queue } from 'bullmq';
|
|
||||||
import { backgroundJobService } from '../services/backgroundJobService';
|
import { backgroundJobService } from '../services/backgroundJobService';
|
||||||
import {
|
import { flyerQueue, emailQueue, analyticsQueue, cleanupQueue, weeklyAnalyticsQueue } from '../services/queueService.server';
|
||||||
flyerQueue,
|
|
||||||
emailQueue,
|
|
||||||
analyticsQueue,
|
|
||||||
cleanupQueue,
|
|
||||||
weeklyAnalyticsQueue,
|
|
||||||
flyerWorker,
|
|
||||||
emailWorker,
|
|
||||||
analyticsWorker,
|
|
||||||
cleanupWorker,
|
|
||||||
weeklyAnalyticsWorker,
|
|
||||||
} from '../services/queueService.server'; // Import your queues
|
|
||||||
import { getSimpleWeekAndYear } from '../utils/dateUtils';
|
import { getSimpleWeekAndYear } from '../utils/dateUtils';
|
||||||
|
import {
|
||||||
|
requiredString,
|
||||||
|
numericIdParam,
|
||||||
|
uuidParamSchema,
|
||||||
|
optionalNumeric,
|
||||||
|
optionalString,
|
||||||
|
} from '../utils/zodUtils';
|
||||||
|
import { logger } from '../services/logger.server'; // This was a duplicate, fixed.
|
||||||
|
import { monitoringService } from '../services/monitoringService.server';
|
||||||
|
import { userService } from '../services/userService';
|
||||||
|
import { cleanupUploadedFile } from '../utils/fileUtils';
|
||||||
|
import { brandService } from '../services/brandService';
|
||||||
|
|
||||||
// Helper for consistent required string validation (handles missing/null/empty)
|
const updateCorrectionSchema = numericIdParam('id').extend({
|
||||||
const requiredString = (message: string) =>
|
|
||||||
z.preprocess((val) => val ?? '', z.string().min(1, message));
|
|
||||||
|
|
||||||
/**
|
|
||||||
* A factory for creating a Zod schema that validates a UUID in the request parameters.
|
|
||||||
* @param key The name of the parameter key (e.g., 'userId').
|
|
||||||
* @param message A custom error message for invalid UUIDs.
|
|
||||||
*/
|
|
||||||
const uuidParamSchema = (key: string, message = `Invalid UUID for parameter '${key}'.`) =>
|
|
||||||
z.object({
|
|
||||||
params: z.object({ [key]: z.string().uuid({ message }) }),
|
|
||||||
});
|
|
||||||
|
|
||||||
/**
|
|
||||||
* A factory for creating a Zod schema that validates a numeric ID in the request parameters.
|
|
||||||
*/
|
|
||||||
const numericIdParamSchema = (
|
|
||||||
key: string,
|
|
||||||
message = `Invalid ID for parameter '${key}'. Must be a positive integer.`,
|
|
||||||
) =>
|
|
||||||
z.object({
|
|
||||||
params: z.object({ [key]: z.coerce.number().int({ message }).positive({ message }) }),
|
|
||||||
});
|
|
||||||
|
|
||||||
const updateCorrectionSchema = numericIdParamSchema('id').extend({
|
|
||||||
body: z.object({
|
body: z.object({
|
||||||
suggested_value: requiredString('A new suggested_value is required.'),
|
suggested_value: z.string().trim().min(1, 'A new suggested_value is required.'),
|
||||||
}),
|
}),
|
||||||
});
|
});
|
||||||
|
|
||||||
const updateRecipeStatusSchema = numericIdParamSchema('id').extend({
|
const updateRecipeStatusSchema = numericIdParam('id').extend({
|
||||||
body: z.object({
|
body: z.object({
|
||||||
status: z.enum(['private', 'pending_review', 'public', 'rejected']),
|
status: z.enum(['private', 'pending_review', 'public', 'rejected']),
|
||||||
}),
|
}),
|
||||||
});
|
});
|
||||||
|
|
||||||
const updateCommentStatusSchema = numericIdParamSchema('id').extend({
|
const updateCommentStatusSchema = numericIdParam('id').extend({
|
||||||
body: z.object({
|
body: z.object({
|
||||||
status: z.enum(['visible', 'hidden', 'reported']),
|
status: z.enum(['visible', 'hidden', 'reported']),
|
||||||
}),
|
}),
|
||||||
@@ -85,8 +62,8 @@ const updateUserRoleSchema = uuidParamSchema('id', 'A valid user ID is required.
|
|||||||
|
|
||||||
const activityLogSchema = z.object({
|
const activityLogSchema = z.object({
|
||||||
query: z.object({
|
query: z.object({
|
||||||
limit: z.coerce.number().int().positive().optional().default(50),
|
limit: optionalNumeric({ default: 50, integer: true, positive: true }),
|
||||||
offset: z.coerce.number().int().nonnegative().optional().default(0),
|
offset: optionalNumeric({ default: 0, integer: true, nonnegative: true }),
|
||||||
}),
|
}),
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -99,25 +76,19 @@ const jobRetrySchema = z.object({
|
|||||||
'file-cleanup',
|
'file-cleanup',
|
||||||
'weekly-analytics-reporting',
|
'weekly-analytics-reporting',
|
||||||
]),
|
]),
|
||||||
jobId: requiredString('A valid Job ID is required.'),
|
jobId: z.string().trim().min(1, 'A valid Job ID is required.'),
|
||||||
}),
|
}),
|
||||||
});
|
});
|
||||||
|
|
||||||
|
const emptySchema = z.object({});
|
||||||
|
|
||||||
const router = Router();
|
const router = Router();
|
||||||
|
|
||||||
// --- Multer Configuration for File Uploads ---
|
const brandLogoUpload = createUploadMiddleware({
|
||||||
const storagePath =
|
storageType: 'flyer', // Using flyer storage path is acceptable for brand logos.
|
||||||
process.env.STORAGE_PATH || '/var/www/flyer-crawler.projectium.com/flyer-images';
|
fileSize: 2 * 1024 * 1024, // 2MB limit for logos
|
||||||
const storage = multer.diskStorage({
|
fileFilter: 'image',
|
||||||
destination: function (req, file, cb) {
|
|
||||||
cb(null, storagePath);
|
|
||||||
},
|
|
||||||
filename: function (req, file, cb) {
|
|
||||||
const uniqueSuffix = Date.now() + '-' + Math.round(Math.random() * 1e9);
|
|
||||||
cb(null, file.fieldname + '-' + uniqueSuffix + '-' + file.originalname);
|
|
||||||
},
|
|
||||||
});
|
});
|
||||||
const upload = multer({ storage: storage });
|
|
||||||
|
|
||||||
// --- Bull Board (Job Queue UI) Setup ---
|
// --- Bull Board (Job Queue UI) Setup ---
|
||||||
const serverAdapter = new ExpressAdapter();
|
const serverAdapter = new ExpressAdapter();
|
||||||
@@ -149,52 +120,69 @@ router.use(passport.authenticate('jwt', { session: false }), isAdmin);
|
|||||||
|
|
||||||
// --- Admin Routes ---
|
// --- Admin Routes ---
|
||||||
|
|
||||||
router.get('/corrections', async (req, res, next: NextFunction) => {
|
router.get('/corrections', validateRequest(emptySchema), async (req, res, next: NextFunction) => {
|
||||||
try {
|
try {
|
||||||
const corrections = await db.adminRepo.getSuggestedCorrections(req.log);
|
const corrections = await db.adminRepo.getSuggestedCorrections(req.log);
|
||||||
res.json(corrections);
|
res.json(corrections);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
logger.error({ error }, 'Error fetching suggested corrections');
|
||||||
next(error);
|
next(error);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
router.get('/brands', async (req, res, next: NextFunction) => {
|
router.get('/review/flyers', validateRequest(emptySchema), async (req, res, next: NextFunction) => {
|
||||||
|
try {
|
||||||
|
req.log.debug('Fetching flyers for review via adminRepo');
|
||||||
|
const flyers = await db.adminRepo.getFlyersForReview(req.log);
|
||||||
|
req.log.info({ count: Array.isArray(flyers) ? flyers.length : 'unknown' }, 'Successfully fetched flyers for review');
|
||||||
|
res.json(flyers);
|
||||||
|
} catch (error) {
|
||||||
|
logger.error({ error }, 'Error fetching flyers for review');
|
||||||
|
next(error);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
router.get('/brands', validateRequest(emptySchema), async (req, res, next: NextFunction) => {
|
||||||
try {
|
try {
|
||||||
const brands = await db.flyerRepo.getAllBrands(req.log);
|
const brands = await db.flyerRepo.getAllBrands(req.log);
|
||||||
res.json(brands);
|
res.json(brands);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
logger.error({ error }, 'Error fetching brands');
|
||||||
next(error);
|
next(error);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
router.get('/stats', async (req, res, next: NextFunction) => {
|
router.get('/stats', validateRequest(emptySchema), async (req, res, next: NextFunction) => {
|
||||||
try {
|
try {
|
||||||
const stats = await db.adminRepo.getApplicationStats(req.log);
|
const stats = await db.adminRepo.getApplicationStats(req.log);
|
||||||
res.json(stats);
|
res.json(stats);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
logger.error({ error }, 'Error fetching application stats');
|
||||||
next(error);
|
next(error);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
router.get('/stats/daily', async (req, res, next: NextFunction) => {
|
router.get('/stats/daily', validateRequest(emptySchema), async (req, res, next: NextFunction) => {
|
||||||
try {
|
try {
|
||||||
const dailyStats = await db.adminRepo.getDailyStatsForLast30Days(req.log);
|
const dailyStats = await db.adminRepo.getDailyStatsForLast30Days(req.log);
|
||||||
res.json(dailyStats);
|
res.json(dailyStats);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
logger.error({ error }, 'Error fetching daily stats');
|
||||||
next(error);
|
next(error);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
router.post(
|
router.post(
|
||||||
'/corrections/:id/approve',
|
'/corrections/:id/approve',
|
||||||
validateRequest(numericIdParamSchema('id')),
|
validateRequest(numericIdParam('id')),
|
||||||
async (req: Request, res: Response, next: NextFunction) => {
|
async (req: Request, res: Response, next: NextFunction) => {
|
||||||
// Apply ADR-003 pattern for type safety
|
// Apply ADR-003 pattern for type safety
|
||||||
const { params } = req as unknown as z.infer<ReturnType<typeof numericIdParamSchema>>;
|
const { params } = req as unknown as z.infer<ReturnType<typeof numericIdParam>>;
|
||||||
try {
|
try {
|
||||||
await db.adminRepo.approveCorrection(params.id, req.log); // params.id is now safely typed as number
|
await db.adminRepo.approveCorrection(params.id, req.log); // params.id is now safely typed as number
|
||||||
res.status(200).json({ message: 'Correction approved successfully.' });
|
res.status(200).json({ message: 'Correction approved successfully.' });
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
logger.error({ error }, 'Error approving correction');
|
||||||
next(error);
|
next(error);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -202,14 +190,15 @@ router.post(
|
|||||||
|
|
||||||
router.post(
|
router.post(
|
||||||
'/corrections/:id/reject',
|
'/corrections/:id/reject',
|
||||||
validateRequest(numericIdParamSchema('id')),
|
validateRequest(numericIdParam('id')),
|
||||||
async (req: Request, res: Response, next: NextFunction) => {
|
async (req: Request, res: Response, next: NextFunction) => {
|
||||||
// Apply ADR-003 pattern for type safety
|
// Apply ADR-003 pattern for type safety
|
||||||
const { params } = req as unknown as z.infer<ReturnType<typeof numericIdParamSchema>>;
|
const { params } = req as unknown as z.infer<ReturnType<typeof numericIdParam>>;
|
||||||
try {
|
try {
|
||||||
await db.adminRepo.rejectCorrection(params.id, req.log); // params.id is now safely typed as number
|
await db.adminRepo.rejectCorrection(params.id, req.log); // params.id is now safely typed as number
|
||||||
res.status(200).json({ message: 'Correction rejected successfully.' });
|
res.status(200).json({ message: 'Correction rejected successfully.' });
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
logger.error({ error }, 'Error rejecting correction');
|
||||||
next(error);
|
next(error);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -229,6 +218,7 @@ router.put(
|
|||||||
);
|
);
|
||||||
res.status(200).json(updatedCorrection);
|
res.status(200).json(updatedCorrection);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
logger.error({ error }, 'Error updating suggested correction');
|
||||||
next(error);
|
next(error);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -244,6 +234,7 @@ router.put(
|
|||||||
const updatedRecipe = await db.adminRepo.updateRecipeStatus(params.id, body.status, req.log); // This is still a standalone function in admin.db.ts
|
const updatedRecipe = await db.adminRepo.updateRecipeStatus(params.id, body.status, req.log); // This is still a standalone function in admin.db.ts
|
||||||
res.status(200).json(updatedRecipe);
|
res.status(200).json(updatedRecipe);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
logger.error({ error }, 'Error updating recipe status');
|
||||||
next(error); // Pass all errors to the central error handler
|
next(error); // Pass all errors to the central error handler
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -251,34 +242,38 @@ router.put(
|
|||||||
|
|
||||||
router.post(
|
router.post(
|
||||||
'/brands/:id/logo',
|
'/brands/:id/logo',
|
||||||
validateRequest(numericIdParamSchema('id')),
|
validateRequest(numericIdParam('id')),
|
||||||
upload.single('logoImage'),
|
brandLogoUpload.single('logoImage'),
|
||||||
requireFileUpload('logoImage'),
|
requireFileUpload('logoImage'),
|
||||||
async (req: Request, res: Response, next: NextFunction) => {
|
async (req: Request, res: Response, next: NextFunction) => {
|
||||||
// Apply ADR-003 pattern for type safety
|
const { params } = req as unknown as z.infer<ReturnType<typeof numericIdParam>>;
|
||||||
const { params } = req as unknown as z.infer<ReturnType<typeof numericIdParamSchema>>;
|
|
||||||
try {
|
try {
|
||||||
// Although requireFileUpload middleware should ensure the file exists,
|
// Although requireFileUpload middleware should ensure the file exists,
|
||||||
// this check satisfies TypeScript and adds robustness.
|
// this check satisfies TypeScript and adds robustness.
|
||||||
if (!req.file) {
|
if (!req.file) {
|
||||||
throw new ValidationError([], 'Logo image file is missing.');
|
throw new ValidationError([], 'Logo image file is missing.');
|
||||||
}
|
}
|
||||||
const logoUrl = `/assets/${req.file.filename}`;
|
|
||||||
await db.adminRepo.updateBrandLogo(params.id, logoUrl, req.log);
|
const logoUrl = await brandService.updateBrandLogo(params.id, req.file, req.log);
|
||||||
|
|
||||||
logger.info({ brandId: params.id, logoUrl }, `Brand logo updated for brand ID: ${params.id}`);
|
logger.info({ brandId: params.id, logoUrl }, `Brand logo updated for brand ID: ${params.id}`);
|
||||||
res.status(200).json({ message: 'Brand logo updated successfully.', logoUrl });
|
res.status(200).json({ message: 'Brand logo updated successfully.', logoUrl });
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
// If an error occurs after the file has been uploaded (e.g., DB error),
|
||||||
|
// we must clean up the orphaned file from the disk.
|
||||||
|
await cleanupUploadedFile(req.file);
|
||||||
|
logger.error({ error }, 'Error updating brand logo');
|
||||||
next(error);
|
next(error);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
|
||||||
router.get('/unmatched-items', async (req, res, next: NextFunction) => {
|
router.get('/unmatched-items', validateRequest(emptySchema), async (req, res, next: NextFunction) => {
|
||||||
try {
|
try {
|
||||||
const items = await db.adminRepo.getUnmatchedFlyerItems(req.log);
|
const items = await db.adminRepo.getUnmatchedFlyerItems(req.log);
|
||||||
res.json(items);
|
res.json(items);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
logger.error({ error }, 'Error fetching unmatched items');
|
||||||
next(error);
|
next(error);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
@@ -288,16 +283,17 @@ router.get('/unmatched-items', async (req, res, next: NextFunction) => {
|
|||||||
*/
|
*/
|
||||||
router.delete(
|
router.delete(
|
||||||
'/recipes/:recipeId',
|
'/recipes/:recipeId',
|
||||||
validateRequest(numericIdParamSchema('recipeId')),
|
validateRequest(numericIdParam('recipeId')),
|
||||||
async (req: Request, res: Response, next: NextFunction) => {
|
async (req: Request, res: Response, next: NextFunction) => {
|
||||||
const userProfile = req.user as UserProfile;
|
const userProfile = req.user as UserProfile;
|
||||||
// Infer the type directly from the schema generator function. // This was a duplicate, fixed.
|
// Infer the type directly from the schema generator function. // This was a duplicate, fixed.
|
||||||
const { params } = req as unknown as z.infer<ReturnType<typeof numericIdParamSchema>>;
|
const { params } = req as unknown as z.infer<ReturnType<typeof numericIdParam>>;
|
||||||
try {
|
try {
|
||||||
// The isAdmin flag bypasses the ownership check in the repository method.
|
// The isAdmin flag bypasses the ownership check in the repository method.
|
||||||
await db.recipeRepo.deleteRecipe(params.recipeId, userProfile.user.user_id, true, req.log);
|
await db.recipeRepo.deleteRecipe(params.recipeId, userProfile.user.user_id, true, req.log);
|
||||||
res.status(204).send();
|
res.status(204).send();
|
||||||
} catch (error: unknown) {
|
} catch (error: unknown) {
|
||||||
|
logger.error({ error }, 'Error deleting recipe');
|
||||||
next(error);
|
next(error);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -308,14 +304,15 @@ router.delete(
|
|||||||
*/
|
*/
|
||||||
router.delete(
|
router.delete(
|
||||||
'/flyers/:flyerId',
|
'/flyers/:flyerId',
|
||||||
validateRequest(numericIdParamSchema('flyerId')),
|
validateRequest(numericIdParam('flyerId')),
|
||||||
async (req: Request, res: Response, next: NextFunction) => {
|
async (req: Request, res: Response, next: NextFunction) => {
|
||||||
// Infer the type directly from the schema generator function.
|
// Infer the type directly from the schema generator function.
|
||||||
const { params } = req as unknown as z.infer<ReturnType<typeof numericIdParamSchema>>;
|
const { params } = req as unknown as z.infer<ReturnType<typeof numericIdParam>>;
|
||||||
try {
|
try {
|
||||||
await db.flyerRepo.deleteFlyer(params.flyerId, req.log);
|
await db.flyerRepo.deleteFlyer(params.flyerId, req.log);
|
||||||
res.status(204).send();
|
res.status(204).send();
|
||||||
} catch (error: unknown) {
|
} catch (error: unknown) {
|
||||||
|
logger.error({ error }, 'Error deleting flyer');
|
||||||
next(error);
|
next(error);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -335,16 +332,18 @@ router.put(
|
|||||||
); // This is still a standalone function in admin.db.ts
|
); // This is still a standalone function in admin.db.ts
|
||||||
res.status(200).json(updatedComment);
|
res.status(200).json(updatedComment);
|
||||||
} catch (error: unknown) {
|
} catch (error: unknown) {
|
||||||
|
logger.error({ error }, 'Error updating comment status');
|
||||||
next(error);
|
next(error);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
|
||||||
router.get('/users', async (req, res, next: NextFunction) => {
|
router.get('/users', validateRequest(emptySchema), async (req, res, next: NextFunction) => {
|
||||||
try {
|
try {
|
||||||
const users = await db.adminRepo.getAllUsers(req.log);
|
const users = await db.adminRepo.getAllUsers(req.log);
|
||||||
res.json(users);
|
res.json(users);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
logger.error({ error }, 'Error fetching users');
|
||||||
next(error);
|
next(error);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
@@ -354,16 +353,14 @@ router.get(
|
|||||||
validateRequest(activityLogSchema),
|
validateRequest(activityLogSchema),
|
||||||
async (req: Request, res: Response, next: NextFunction) => {
|
async (req: Request, res: Response, next: NextFunction) => {
|
||||||
// Apply ADR-003 pattern for type safety.
|
// Apply ADR-003 pattern for type safety.
|
||||||
// We explicitly coerce query params here because the validation middleware might not
|
// We parse the query here to apply Zod's coercions (string to number) and defaults.
|
||||||
// replace req.query with the coerced values in all environments.
|
const { limit, offset } = activityLogSchema.shape.query.parse(req.query);
|
||||||
const query = req.query as unknown as { limit?: string; offset?: string };
|
|
||||||
const limit = query.limit ? Number(query.limit) : 50;
|
|
||||||
const offset = query.offset ? Number(query.offset) : 0;
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const logs = await db.adminRepo.getActivityLog(limit, offset, req.log);
|
const logs = await db.adminRepo.getActivityLog(limit!, offset!, req.log);
|
||||||
res.json(logs);
|
res.json(logs);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
logger.error({ error }, 'Error fetching activity log');
|
||||||
next(error);
|
next(error);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -379,6 +376,7 @@ router.get(
|
|||||||
const user = await db.userRepo.findUserProfileById(params.id, req.log);
|
const user = await db.userRepo.findUserProfileById(params.id, req.log);
|
||||||
res.json(user);
|
res.json(user);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
logger.error({ error }, 'Error fetching user profile');
|
||||||
next(error);
|
next(error);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -408,12 +406,10 @@ router.delete(
|
|||||||
// Apply ADR-003 pattern for type safety
|
// Apply ADR-003 pattern for type safety
|
||||||
const { params } = req as unknown as z.infer<ReturnType<typeof uuidParamSchema>>;
|
const { params } = req as unknown as z.infer<ReturnType<typeof uuidParamSchema>>;
|
||||||
try {
|
try {
|
||||||
if (userProfile.user.user_id === params.id) {
|
await userService.deleteUserAsAdmin(userProfile.user.user_id, params.id, req.log);
|
||||||
throw new ValidationError([], 'Admins cannot delete their own account.');
|
|
||||||
}
|
|
||||||
await db.userRepo.deleteUserById(params.id, req.log);
|
|
||||||
res.status(204).send();
|
res.status(204).send();
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
logger.error({ error }, 'Error deleting user');
|
||||||
next(error);
|
next(error);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -425,6 +421,7 @@ router.delete(
|
|||||||
*/
|
*/
|
||||||
router.post(
|
router.post(
|
||||||
'/trigger/daily-deal-check',
|
'/trigger/daily-deal-check',
|
||||||
|
validateRequest(emptySchema),
|
||||||
async (req: Request, res: Response, next: NextFunction) => {
|
async (req: Request, res: Response, next: NextFunction) => {
|
||||||
const userProfile = req.user as UserProfile;
|
const userProfile = req.user as UserProfile;
|
||||||
logger.info(
|
logger.info(
|
||||||
@@ -435,9 +432,7 @@ router.post(
|
|||||||
// We call the function but don't wait for it to finish (no `await`).
|
// We call the function but don't wait for it to finish (no `await`).
|
||||||
// This is a "fire-and-forget" operation from the client's perspective.
|
// This is a "fire-and-forget" operation from the client's perspective.
|
||||||
backgroundJobService.runDailyDealCheck();
|
backgroundJobService.runDailyDealCheck();
|
||||||
res
|
res.status(202).json({
|
||||||
.status(202)
|
|
||||||
.json({
|
|
||||||
message:
|
message:
|
||||||
'Daily deal check job has been triggered successfully. It will run in the background.',
|
'Daily deal check job has been triggered successfully. It will run in the background.',
|
||||||
});
|
});
|
||||||
@@ -454,6 +449,7 @@ router.post(
|
|||||||
*/
|
*/
|
||||||
router.post(
|
router.post(
|
||||||
'/trigger/analytics-report',
|
'/trigger/analytics-report',
|
||||||
|
validateRequest(emptySchema),
|
||||||
async (req: Request, res: Response, next: NextFunction) => {
|
async (req: Request, res: Response, next: NextFunction) => {
|
||||||
const userProfile = req.user as UserProfile;
|
const userProfile = req.user as UserProfile;
|
||||||
logger.info(
|
logger.info(
|
||||||
@@ -461,16 +457,9 @@ router.post(
|
|||||||
);
|
);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const reportDate = new Date().toISOString().split('T')[0]; // YYYY-MM-DD
|
const jobId = await backgroundJobService.triggerAnalyticsReport();
|
||||||
// Use a unique job ID for manual triggers to distinguish them from scheduled jobs.
|
res.status(202).json({
|
||||||
const jobId = `manual-report-${reportDate}-${Date.now()}`;
|
message: `Analytics report generation job has been enqueued successfully. Job ID: ${jobId}`,
|
||||||
|
|
||||||
const job = await analyticsQueue.add('generate-daily-report', { reportDate }, { jobId });
|
|
||||||
|
|
||||||
res
|
|
||||||
.status(202)
|
|
||||||
.json({
|
|
||||||
message: `Analytics report generation job has been enqueued successfully. Job ID: ${job.id}`,
|
|
||||||
});
|
});
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error({ error }, '[Admin] Failed to enqueue analytics report job.');
|
logger.error({ error }, '[Admin] Failed to enqueue analytics report job.');
|
||||||
@@ -485,11 +474,11 @@ router.post(
|
|||||||
*/
|
*/
|
||||||
router.post(
|
router.post(
|
||||||
'/flyers/:flyerId/cleanup',
|
'/flyers/:flyerId/cleanup',
|
||||||
validateRequest(numericIdParamSchema('flyerId')),
|
validateRequest(numericIdParam('flyerId')),
|
||||||
async (req: Request, res: Response, next: NextFunction) => {
|
async (req: Request, res: Response, next: NextFunction) => {
|
||||||
const userProfile = req.user as UserProfile;
|
const userProfile = req.user as UserProfile;
|
||||||
// Infer type from the schema generator for type safety, as per ADR-003.
|
// Infer type from the schema generator for type safety, as per ADR-003.
|
||||||
const { params } = req as unknown as z.infer<ReturnType<typeof numericIdParamSchema>>; // This was a duplicate, fixed.
|
const { params } = req as unknown as z.infer<ReturnType<typeof numericIdParam>>; // This was a duplicate, fixed.
|
||||||
logger.info(
|
logger.info(
|
||||||
`[Admin] Manual trigger for flyer file cleanup received from user: ${userProfile.user.user_id} for flyer ID: ${params.flyerId}`,
|
`[Admin] Manual trigger for flyer file cleanup received from user: ${userProfile.user.user_id} for flyer ID: ${params.flyerId}`,
|
||||||
);
|
);
|
||||||
@@ -501,6 +490,7 @@ router.post(
|
|||||||
.status(202)
|
.status(202)
|
||||||
.json({ message: `File cleanup job for flyer ID ${params.flyerId} has been enqueued.` });
|
.json({ message: `File cleanup job for flyer ID ${params.flyerId} has been enqueued.` });
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
logger.error({ error }, 'Error enqueuing cleanup job');
|
||||||
next(error);
|
next(error);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -510,7 +500,10 @@ router.post(
|
|||||||
* POST /api/admin/trigger/failing-job - Enqueue a test job designed to fail.
|
* POST /api/admin/trigger/failing-job - Enqueue a test job designed to fail.
|
||||||
* This is for testing the retry mechanism and Bull Board UI.
|
* This is for testing the retry mechanism and Bull Board UI.
|
||||||
*/
|
*/
|
||||||
router.post('/trigger/failing-job', async (req: Request, res: Response, next: NextFunction) => {
|
router.post(
|
||||||
|
'/trigger/failing-job',
|
||||||
|
validateRequest(emptySchema),
|
||||||
|
async (req: Request, res: Response, next: NextFunction) => {
|
||||||
const userProfile = req.user as UserProfile;
|
const userProfile = req.user as UserProfile;
|
||||||
logger.info(
|
logger.info(
|
||||||
`[Admin] Manual trigger for a failing job received from user: ${userProfile.user.user_id}`,
|
`[Admin] Manual trigger for a failing job received from user: ${userProfile.user.user_id}`,
|
||||||
@@ -523,9 +516,11 @@ router.post('/trigger/failing-job', async (req: Request, res: Response, next: Ne
|
|||||||
.status(202)
|
.status(202)
|
||||||
.json({ message: `Failing test job has been enqueued successfully. Job ID: ${job.id}` });
|
.json({ message: `Failing test job has been enqueued successfully. Job ID: ${job.id}` });
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
logger.error({ error }, 'Error enqueuing failing job');
|
||||||
next(error);
|
next(error);
|
||||||
}
|
}
|
||||||
});
|
}
|
||||||
|
);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* POST /api/admin/system/clear-geocode-cache - Clears the Redis cache for geocoded addresses.
|
* POST /api/admin/system/clear-geocode-cache - Clears the Redis cache for geocoded addresses.
|
||||||
@@ -533,6 +528,7 @@ router.post('/trigger/failing-job', async (req: Request, res: Response, next: Ne
|
|||||||
*/
|
*/
|
||||||
router.post(
|
router.post(
|
||||||
'/system/clear-geocode-cache',
|
'/system/clear-geocode-cache',
|
||||||
|
validateRequest(emptySchema),
|
||||||
async (req: Request, res: Response, next: NextFunction) => {
|
async (req: Request, res: Response, next: NextFunction) => {
|
||||||
const userProfile = req.user as UserProfile;
|
const userProfile = req.user as UserProfile;
|
||||||
logger.info(
|
logger.info(
|
||||||
@@ -541,9 +537,7 @@ router.post(
|
|||||||
|
|
||||||
try {
|
try {
|
||||||
const keysDeleted = await geocodingService.clearGeocodeCache(req.log);
|
const keysDeleted = await geocodingService.clearGeocodeCache(req.log);
|
||||||
res
|
res.status(200).json({
|
||||||
.status(200)
|
|
||||||
.json({
|
|
||||||
message: `Successfully cleared the geocode cache. ${keysDeleted} keys were removed.`,
|
message: `Successfully cleared the geocode cache. ${keysDeleted} keys were removed.`,
|
||||||
});
|
});
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
@@ -557,46 +551,26 @@ router.post(
|
|||||||
* GET /api/admin/workers/status - Get the current running status of all BullMQ workers.
|
* GET /api/admin/workers/status - Get the current running status of all BullMQ workers.
|
||||||
* This is useful for a system health dashboard to see if any workers have crashed.
|
* This is useful for a system health dashboard to see if any workers have crashed.
|
||||||
*/
|
*/
|
||||||
router.get('/workers/status', async (req: Request, res: Response) => {
|
router.get('/workers/status', validateRequest(emptySchema), async (req: Request, res: Response, next: NextFunction) => {
|
||||||
const workers = [flyerWorker, emailWorker, analyticsWorker, cleanupWorker, weeklyAnalyticsWorker];
|
try {
|
||||||
|
const workerStatuses = await monitoringService.getWorkerStatuses();
|
||||||
const workerStatuses = await Promise.all(
|
|
||||||
workers.map(async (worker) => {
|
|
||||||
return {
|
|
||||||
name: worker.name,
|
|
||||||
isRunning: worker.isRunning(),
|
|
||||||
};
|
|
||||||
}),
|
|
||||||
);
|
|
||||||
|
|
||||||
res.json(workerStatuses);
|
res.json(workerStatuses);
|
||||||
|
} catch (error) {
|
||||||
|
logger.error({ error }, 'Error fetching worker statuses');
|
||||||
|
next(error);
|
||||||
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* GET /api/admin/queues/status - Get job counts for all BullMQ queues.
|
* GET /api/admin/queues/status - Get job counts for all BullMQ queues.
|
||||||
* This is useful for monitoring the health and backlog of background jobs.
|
* This is useful for monitoring the health and backlog of background jobs.
|
||||||
*/
|
*/
|
||||||
router.get('/queues/status', async (req: Request, res: Response, next: NextFunction) => {
|
router.get('/queues/status', validateRequest(emptySchema), async (req: Request, res: Response, next: NextFunction) => {
|
||||||
try {
|
try {
|
||||||
const queues = [flyerQueue, emailQueue, analyticsQueue, cleanupQueue, weeklyAnalyticsQueue];
|
const queueStatuses = await monitoringService.getQueueStatuses();
|
||||||
|
|
||||||
const queueStatuses = await Promise.all(
|
|
||||||
queues.map(async (queue) => {
|
|
||||||
return {
|
|
||||||
name: queue.name,
|
|
||||||
counts: await queue.getJobCounts(
|
|
||||||
'waiting',
|
|
||||||
'active',
|
|
||||||
'completed',
|
|
||||||
'failed',
|
|
||||||
'delayed',
|
|
||||||
'paused',
|
|
||||||
),
|
|
||||||
};
|
|
||||||
}),
|
|
||||||
);
|
|
||||||
res.json(queueStatuses);
|
res.json(queueStatuses);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
logger.error({ error }, 'Error fetching queue statuses');
|
||||||
next(error);
|
next(error);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
@@ -613,38 +587,15 @@ router.post(
|
|||||||
params: { queueName, jobId },
|
params: { queueName, jobId },
|
||||||
} = req as unknown as z.infer<typeof jobRetrySchema>;
|
} = req as unknown as z.infer<typeof jobRetrySchema>;
|
||||||
|
|
||||||
const queueMap: { [key: string]: Queue } = {
|
|
||||||
'flyer-processing': flyerQueue,
|
|
||||||
'email-sending': emailQueue,
|
|
||||||
'analytics-reporting': analyticsQueue,
|
|
||||||
'file-cleanup': cleanupQueue,
|
|
||||||
};
|
|
||||||
|
|
||||||
const queue = queueMap[queueName];
|
|
||||||
|
|
||||||
if (!queue) {
|
|
||||||
// Throw a NotFoundError to be handled by the central error handler.
|
|
||||||
throw new NotFoundError(`Queue '${queueName}' not found.`);
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const job = await queue.getJob(jobId);
|
await monitoringService.retryFailedJob(
|
||||||
if (!job)
|
queueName,
|
||||||
throw new NotFoundError(`Job with ID '${jobId}' not found in queue '${queueName}'.`);
|
jobId,
|
||||||
|
userProfile.user.user_id,
|
||||||
const jobState = await job.getState();
|
|
||||||
if (jobState !== 'failed')
|
|
||||||
throw new ValidationError(
|
|
||||||
[],
|
|
||||||
`Job is not in a 'failed' state. Current state: ${jobState}.`,
|
|
||||||
); // This was a duplicate, fixed.
|
|
||||||
|
|
||||||
await job.retry();
|
|
||||||
logger.info(
|
|
||||||
`[Admin] User ${userProfile.user.user_id} manually retried job ${jobId} in queue ${queueName}.`,
|
|
||||||
);
|
);
|
||||||
res.status(200).json({ message: `Job ${jobId} has been successfully marked for retry.` });
|
res.status(200).json({ message: `Job ${jobId} has been successfully marked for retry.` });
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
logger.error({ error }, 'Error retrying job');
|
||||||
next(error);
|
next(error);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -655,6 +606,7 @@ router.post(
|
|||||||
*/
|
*/
|
||||||
router.post(
|
router.post(
|
||||||
'/trigger/weekly-analytics',
|
'/trigger/weekly-analytics',
|
||||||
|
validateRequest(emptySchema),
|
||||||
async (req: Request, res: Response, next: NextFunction) => {
|
async (req: Request, res: Response, next: NextFunction) => {
|
||||||
const userProfile = req.user as UserProfile; // This was a duplicate, fixed.
|
const userProfile = req.user as UserProfile; // This was a duplicate, fixed.
|
||||||
logger.info(
|
logger.info(
|
||||||
@@ -662,23 +614,19 @@ router.post(
|
|||||||
);
|
);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const { year: reportYear, week: reportWeek } = getSimpleWeekAndYear();
|
const jobId = await backgroundJobService.triggerWeeklyAnalyticsReport();
|
||||||
const { weeklyAnalyticsQueue } = await import('../services/queueService.server');
|
|
||||||
const job = await weeklyAnalyticsQueue.add(
|
|
||||||
'generate-weekly-report',
|
|
||||||
{ reportYear, reportWeek },
|
|
||||||
{
|
|
||||||
jobId: `manual-weekly-report-${reportYear}-${reportWeek}-${Date.now()}`, // Add timestamp to avoid ID conflict
|
|
||||||
},
|
|
||||||
);
|
|
||||||
|
|
||||||
res
|
res
|
||||||
.status(202)
|
.status(202)
|
||||||
.json({ message: 'Successfully enqueued weekly analytics job.', jobId: job.id });
|
.json({ message: 'Successfully enqueued weekly analytics job.', jobId });
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
logger.error({ error }, 'Error enqueuing weekly analytics job');
|
||||||
next(error);
|
next(error);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
|
||||||
|
/* Catches errors from multer (e.g., file size, file filter) */
|
||||||
|
router.use(handleMulterError);
|
||||||
|
|
||||||
|
|
||||||
export default router;
|
export default router;
|
||||||
|
|||||||
@@ -5,7 +5,6 @@ import type { Request, Response, NextFunction } from 'express';
|
|||||||
import { createMockUserProfile } from '../tests/utils/mockFactories';
|
import { createMockUserProfile } from '../tests/utils/mockFactories';
|
||||||
import type { UserProfile } from '../types';
|
import type { UserProfile } from '../types';
|
||||||
import { createTestApp } from '../tests/utils/createTestApp';
|
import { createTestApp } from '../tests/utils/createTestApp';
|
||||||
import { mockLogger } from '../tests/utils/mockLogger';
|
|
||||||
|
|
||||||
vi.mock('../services/db/index.db', () => ({
|
vi.mock('../services/db/index.db', () => ({
|
||||||
adminRepo: {
|
adminRepo: {
|
||||||
@@ -45,8 +44,9 @@ import adminRouter from './admin.routes';
|
|||||||
import { adminRepo } from '../services/db/index.db';
|
import { adminRepo } from '../services/db/index.db';
|
||||||
|
|
||||||
// Mock the logger
|
// Mock the logger
|
||||||
vi.mock('../services/logger.server', () => ({
|
vi.mock('../services/logger.server', async () => ({
|
||||||
logger: mockLogger,
|
// Use async import to avoid hoisting issues with mockLogger
|
||||||
|
logger: (await import('../tests/utils/mockLogger')).mockLogger,
|
||||||
}));
|
}));
|
||||||
|
|
||||||
// Mock the passport middleware
|
// Mock the passport middleware
|
||||||
@@ -73,12 +73,6 @@ describe('Admin Stats Routes (/api/admin/stats)', () => {
|
|||||||
authenticatedUser: adminUser,
|
authenticatedUser: adminUser,
|
||||||
});
|
});
|
||||||
|
|
||||||
// Add a basic error handler to capture errors passed to next(err) and return JSON.
|
|
||||||
// This prevents unhandled error crashes in tests and ensures we get the 500 response we expect.
|
|
||||||
app.use((err: any, req: any, res: any, next: any) => {
|
|
||||||
res.status(err.status || 500).json({ message: err.message, errors: err.errors });
|
|
||||||
});
|
|
||||||
|
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
vi.clearAllMocks();
|
vi.clearAllMocks();
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -4,7 +4,6 @@ import supertest from 'supertest';
|
|||||||
import type { Request, Response, NextFunction } from 'express';
|
import type { Request, Response, NextFunction } from 'express';
|
||||||
import { createMockUserProfile } from '../tests/utils/mockFactories';
|
import { createMockUserProfile } from '../tests/utils/mockFactories';
|
||||||
import { createTestApp } from '../tests/utils/createTestApp';
|
import { createTestApp } from '../tests/utils/createTestApp';
|
||||||
import { mockLogger } from '../tests/utils/mockLogger';
|
|
||||||
|
|
||||||
// Mock dependencies
|
// Mock dependencies
|
||||||
vi.mock('../services/geocodingService.server', () => ({
|
vi.mock('../services/geocodingService.server', () => ({
|
||||||
@@ -50,8 +49,9 @@ import adminRouter from './admin.routes';
|
|||||||
import { geocodingService } from '../services/geocodingService.server';
|
import { geocodingService } from '../services/geocodingService.server';
|
||||||
|
|
||||||
// Mock the logger
|
// Mock the logger
|
||||||
vi.mock('../services/logger.server', () => ({
|
vi.mock('../services/logger.server', async () => ({
|
||||||
logger: mockLogger,
|
// Use async import to avoid hoisting issues with mockLogger
|
||||||
|
logger: (await import('../tests/utils/mockLogger')).mockLogger,
|
||||||
}));
|
}));
|
||||||
|
|
||||||
// Mock the passport middleware
|
// Mock the passport middleware
|
||||||
@@ -79,12 +79,6 @@ describe('Admin System Routes (/api/admin/system)', () => {
|
|||||||
authenticatedUser: adminUser,
|
authenticatedUser: adminUser,
|
||||||
});
|
});
|
||||||
|
|
||||||
// Add a basic error handler to capture errors passed to next(err) and return JSON.
|
|
||||||
// This prevents unhandled error crashes in tests and ensures we get the 500 response we expect.
|
|
||||||
app.use((err: any, req: any, res: any, next: any) => {
|
|
||||||
res.status(err.status || 500).json({ message: err.message, errors: err.errors });
|
|
||||||
});
|
|
||||||
|
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
vi.clearAllMocks();
|
vi.clearAllMocks();
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -4,9 +4,8 @@ import supertest from 'supertest';
|
|||||||
import type { Request, Response, NextFunction } from 'express';
|
import type { Request, Response, NextFunction } from 'express';
|
||||||
import { createMockUserProfile, createMockAdminUserView } from '../tests/utils/mockFactories';
|
import { createMockUserProfile, createMockAdminUserView } from '../tests/utils/mockFactories';
|
||||||
import type { UserProfile, Profile } from '../types';
|
import type { UserProfile, Profile } from '../types';
|
||||||
import { NotFoundError } from '../services/db/errors.db';
|
import { NotFoundError, ValidationError } from '../services/db/errors.db';
|
||||||
import { createTestApp } from '../tests/utils/createTestApp';
|
import { createTestApp } from '../tests/utils/createTestApp';
|
||||||
import { mockLogger } from '../tests/utils/mockLogger';
|
|
||||||
|
|
||||||
vi.mock('../services/db/index.db', () => ({
|
vi.mock('../services/db/index.db', () => ({
|
||||||
adminRepo: {
|
adminRepo: {
|
||||||
@@ -23,6 +22,12 @@ vi.mock('../services/db/index.db', () => ({
|
|||||||
notificationRepo: {},
|
notificationRepo: {},
|
||||||
}));
|
}));
|
||||||
|
|
||||||
|
vi.mock('../services/userService', () => ({
|
||||||
|
userService: {
|
||||||
|
deleteUserAsAdmin: vi.fn(),
|
||||||
|
},
|
||||||
|
}));
|
||||||
|
|
||||||
// Mock other dependencies that are not directly tested but are part of the adminRouter setup
|
// Mock other dependencies that are not directly tested but are part of the adminRouter setup
|
||||||
vi.mock('../services/db/flyer.db');
|
vi.mock('../services/db/flyer.db');
|
||||||
vi.mock('../services/db/recipe.db');
|
vi.mock('../services/db/recipe.db');
|
||||||
@@ -44,8 +49,9 @@ vi.mock('@bull-board/express', () => ({
|
|||||||
}));
|
}));
|
||||||
|
|
||||||
// Mock the logger
|
// Mock the logger
|
||||||
vi.mock('../services/logger.server', () => ({
|
vi.mock('../services/logger.server', async () => ({
|
||||||
logger: mockLogger,
|
// Use async import to avoid hoisting issues with mockLogger
|
||||||
|
logger: (await import('../tests/utils/mockLogger')).mockLogger,
|
||||||
}));
|
}));
|
||||||
|
|
||||||
// Import the router AFTER all mocks are defined.
|
// Import the router AFTER all mocks are defined.
|
||||||
@@ -53,6 +59,7 @@ import adminRouter from './admin.routes';
|
|||||||
|
|
||||||
// Import the mocked repos to control them in tests
|
// Import the mocked repos to control them in tests
|
||||||
import { adminRepo, userRepo } from '../services/db/index.db';
|
import { adminRepo, userRepo } from '../services/db/index.db';
|
||||||
|
import { userService } from '../services/userService';
|
||||||
|
|
||||||
// Mock the passport middleware
|
// Mock the passport middleware
|
||||||
vi.mock('./passport.routes', () => ({
|
vi.mock('./passport.routes', () => ({
|
||||||
@@ -83,12 +90,6 @@ describe('Admin User Management Routes (/api/admin/users)', () => {
|
|||||||
authenticatedUser: adminUser,
|
authenticatedUser: adminUser,
|
||||||
});
|
});
|
||||||
|
|
||||||
// Add a basic error handler to capture errors passed to next(err) and return JSON.
|
|
||||||
// This prevents unhandled error crashes in tests and ensures we get the 500 response we expect.
|
|
||||||
app.use((err: any, req: any, res: any, next: any) => {
|
|
||||||
res.status(err.status || 500).json({ message: err.message, errors: err.errors });
|
|
||||||
});
|
|
||||||
|
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
vi.clearAllMocks();
|
vi.clearAllMocks();
|
||||||
});
|
});
|
||||||
@@ -197,22 +198,27 @@ describe('Admin User Management Routes (/api/admin/users)', () => {
|
|||||||
it('should successfully delete a user', async () => {
|
it('should successfully delete a user', async () => {
|
||||||
const targetId = '123e4567-e89b-12d3-a456-426614174999';
|
const targetId = '123e4567-e89b-12d3-a456-426614174999';
|
||||||
vi.mocked(userRepo.deleteUserById).mockResolvedValue(undefined);
|
vi.mocked(userRepo.deleteUserById).mockResolvedValue(undefined);
|
||||||
|
vi.mocked(userService.deleteUserAsAdmin).mockResolvedValue(undefined);
|
||||||
const response = await supertest(app).delete(`/api/admin/users/${targetId}`);
|
const response = await supertest(app).delete(`/api/admin/users/${targetId}`);
|
||||||
expect(response.status).toBe(204);
|
expect(response.status).toBe(204);
|
||||||
expect(userRepo.deleteUserById).toHaveBeenCalledWith(targetId, expect.any(Object));
|
expect(userService.deleteUserAsAdmin).toHaveBeenCalledWith(adminId, targetId, expect.any(Object));
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should prevent an admin from deleting their own account', async () => {
|
it('should prevent an admin from deleting their own account', async () => {
|
||||||
|
const validationError = new ValidationError([], 'Admins cannot delete their own account.');
|
||||||
|
vi.mocked(userService.deleteUserAsAdmin).mockRejectedValue(validationError);
|
||||||
const response = await supertest(app).delete(`/api/admin/users/${adminId}`);
|
const response = await supertest(app).delete(`/api/admin/users/${adminId}`);
|
||||||
expect(response.status).toBe(400);
|
expect(response.status).toBe(400);
|
||||||
expect(response.body.message).toMatch(/Admins cannot delete their own account/);
|
expect(response.body.message).toMatch(/Admins cannot delete their own account/);
|
||||||
expect(userRepo.deleteUserById).not.toHaveBeenCalled();
|
expect(userRepo.deleteUserById).not.toHaveBeenCalled();
|
||||||
|
expect(userService.deleteUserAsAdmin).toHaveBeenCalledWith(adminId, adminId, expect.any(Object));
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return 500 on a generic database error', async () => {
|
it('should return 500 on a generic database error', async () => {
|
||||||
const targetId = '123e4567-e89b-12d3-a456-426614174999';
|
const targetId = '123e4567-e89b-12d3-a456-426614174999';
|
||||||
const dbError = new Error('DB Error');
|
const dbError = new Error('DB Error');
|
||||||
vi.mocked(userRepo.deleteUserById).mockRejectedValue(dbError);
|
vi.mocked(userRepo.deleteUserById).mockRejectedValue(dbError);
|
||||||
|
vi.mocked(userService.deleteUserAsAdmin).mockRejectedValue(dbError);
|
||||||
const response = await supertest(app).delete(`/api/admin/users/${targetId}`);
|
const response = await supertest(app).delete(`/api/admin/users/${targetId}`);
|
||||||
expect(response.status).toBe(500);
|
expect(response.status).toBe(500);
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -13,14 +13,21 @@ import {
|
|||||||
import * as aiService from '../services/aiService.server';
|
import * as aiService from '../services/aiService.server';
|
||||||
import { createTestApp } from '../tests/utils/createTestApp';
|
import { createTestApp } from '../tests/utils/createTestApp';
|
||||||
import { mockLogger } from '../tests/utils/mockLogger';
|
import { mockLogger } from '../tests/utils/mockLogger';
|
||||||
|
import { ValidationError } from '../services/db/errors.db';
|
||||||
|
|
||||||
// Mock the AI service methods to avoid making real AI calls
|
// Mock the AI service methods to avoid making real AI calls
|
||||||
vi.mock('../services/aiService.server', () => ({
|
vi.mock('../services/aiService.server', async (importOriginal) => {
|
||||||
|
const actual = await importOriginal<typeof import('../services/aiService.server')>();
|
||||||
|
return {
|
||||||
|
...actual,
|
||||||
aiService: {
|
aiService: {
|
||||||
extractTextFromImageArea: vi.fn(),
|
extractTextFromImageArea: vi.fn(),
|
||||||
planTripWithMaps: vi.fn(), // Added this missing mock
|
planTripWithMaps: vi.fn(),
|
||||||
|
enqueueFlyerProcessing: vi.fn(),
|
||||||
|
processLegacyFlyerUpload: vi.fn(),
|
||||||
},
|
},
|
||||||
}));
|
};
|
||||||
|
});
|
||||||
|
|
||||||
const { mockedDb } = vi.hoisted(() => ({
|
const { mockedDb } = vi.hoisted(() => ({
|
||||||
mockedDb: {
|
mockedDb: {
|
||||||
@@ -30,6 +37,9 @@ const { mockedDb } = vi.hoisted(() => ({
|
|||||||
adminRepo: {
|
adminRepo: {
|
||||||
logActivity: vi.fn(),
|
logActivity: vi.fn(),
|
||||||
},
|
},
|
||||||
|
personalizationRepo: {
|
||||||
|
getAllMasterItems: vi.fn(),
|
||||||
|
},
|
||||||
// This function is a standalone export, not part of a repo
|
// This function is a standalone export, not part of a repo
|
||||||
createFlyerAndItems: vi.fn(),
|
createFlyerAndItems: vi.fn(),
|
||||||
},
|
},
|
||||||
@@ -40,6 +50,7 @@ vi.mock('../services/db/flyer.db', () => ({ createFlyerAndItems: mockedDb.create
|
|||||||
vi.mock('../services/db/index.db', () => ({
|
vi.mock('../services/db/index.db', () => ({
|
||||||
flyerRepo: mockedDb.flyerRepo,
|
flyerRepo: mockedDb.flyerRepo,
|
||||||
adminRepo: mockedDb.adminRepo,
|
adminRepo: mockedDb.adminRepo,
|
||||||
|
personalizationRepo: mockedDb.personalizationRepo,
|
||||||
}));
|
}));
|
||||||
|
|
||||||
// Mock the queue service
|
// Mock the queue service
|
||||||
@@ -55,8 +66,9 @@ import aiRouter from './ai.routes';
|
|||||||
import { flyerQueue } from '../services/queueService.server';
|
import { flyerQueue } from '../services/queueService.server';
|
||||||
|
|
||||||
// Mock the logger to keep test output clean
|
// Mock the logger to keep test output clean
|
||||||
vi.mock('../services/logger.server', () => ({
|
vi.mock('../services/logger.server', async () => ({
|
||||||
logger: mockLogger,
|
// Use async import to avoid hoisting issues with mockLogger
|
||||||
|
logger: (await import('../tests/utils/mockLogger')).mockLogger,
|
||||||
}));
|
}));
|
||||||
|
|
||||||
// Mock the passport module to control authentication for different tests.
|
// Mock the passport module to control authentication for different tests.
|
||||||
@@ -78,64 +90,84 @@ describe('AI Routes (/api/ai)', () => {
|
|||||||
vi.mocked(mockLogger.info).mockImplementation(() => {});
|
vi.mocked(mockLogger.info).mockImplementation(() => {});
|
||||||
vi.mocked(mockLogger.error).mockImplementation(() => {});
|
vi.mocked(mockLogger.error).mockImplementation(() => {});
|
||||||
vi.mocked(mockLogger.warn).mockImplementation(() => {});
|
vi.mocked(mockLogger.warn).mockImplementation(() => {});
|
||||||
|
vi.mocked(mockLogger.debug).mockImplementation(() => {}); // Ensure debug is also mocked
|
||||||
});
|
});
|
||||||
const app = createTestApp({ router: aiRouter, basePath: '/api/ai' });
|
const app = createTestApp({ router: aiRouter, basePath: '/api/ai' });
|
||||||
|
|
||||||
describe('Module-level error handling', () => {
|
// New test to cover the router.use diagnostic middleware's catch block and errMsg branches
|
||||||
it('should log an error if storage path creation fails', async () => {
|
describe('Diagnostic Middleware Error Handling', () => {
|
||||||
// Arrange
|
it('should log an error if logger.debug throws an object with a message property', async () => {
|
||||||
const mkdirError = new Error('EACCES: permission denied');
|
const mockErrorObject = { message: 'Mock debug error' };
|
||||||
vi.resetModules(); // Reset modules to re-run top-level code
|
vi.mocked(mockLogger.debug).mockImplementationOnce(() => {
|
||||||
vi.doMock('node:fs', () => ({
|
throw mockErrorObject;
|
||||||
...fs,
|
});
|
||||||
mkdirSync: vi.fn().mockImplementation(() => {
|
|
||||||
throw mkdirError;
|
|
||||||
}),
|
|
||||||
}));
|
|
||||||
const { logger } = await import('../services/logger.server');
|
|
||||||
|
|
||||||
// Act: Dynamically import the router to trigger the mkdirSync call
|
// Make any request to trigger the middleware
|
||||||
await import('./ai.routes');
|
const response = await supertest(app).get('/api/ai/jobs/job-123/status');
|
||||||
|
|
||||||
// Assert
|
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||||
const storagePath =
|
{ error: mockErrorObject.message }, // errMsg should extract the message
|
||||||
process.env.STORAGE_PATH || '/var/www/flyer-crawler.projectium.com/flyer-images';
|
'Failed to log incoming AI request headers',
|
||||||
expect(logger.error).toHaveBeenCalledWith(
|
|
||||||
{ error: 'EACCES: permission denied' },
|
|
||||||
`Failed to create storage path (${storagePath}). File uploads may fail.`,
|
|
||||||
);
|
);
|
||||||
vi.doUnmock('node:fs'); // Cleanup
|
// The request should still proceed, but might fail later if the original flow was interrupted.
|
||||||
});
|
// Here, it will likely hit the 404 for job not found.
|
||||||
|
expect(response.status).toBe(404);
|
||||||
});
|
});
|
||||||
|
|
||||||
// Add a basic error handler to capture errors passed to next(err) and return JSON.
|
it('should log an error if logger.debug throws a primitive string', async () => {
|
||||||
// This prevents unhandled error crashes in tests and ensures we get the 500 response we expect.
|
const mockErrorString = 'Mock debug error string';
|
||||||
app.use((err: any, req: any, res: any, next: any) => {
|
vi.mocked(mockLogger.debug).mockImplementationOnce(() => {
|
||||||
res.status(err.status || 500).json({ message: err.message, errors: err.errors });
|
throw mockErrorString;
|
||||||
|
});
|
||||||
|
|
||||||
|
// Make any request to trigger the middleware
|
||||||
|
const response = await supertest(app).get('/api/ai/jobs/job-123/status');
|
||||||
|
|
||||||
|
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||||
|
{ error: mockErrorString }, // errMsg should convert to string
|
||||||
|
'Failed to log incoming AI request headers',
|
||||||
|
);
|
||||||
|
expect(response.status).toBe(404);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should log an error if logger.debug throws null/undefined', async () => {
|
||||||
|
vi.mocked(mockLogger.debug).mockImplementationOnce(() => {
|
||||||
|
throw null; // Simulate throwing null
|
||||||
|
});
|
||||||
|
|
||||||
|
const response = await supertest(app).get('/api/ai/jobs/job-123/status');
|
||||||
|
|
||||||
|
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||||
|
{ error: 'An unknown error occurred.' }, // errMsg should handle null/undefined
|
||||||
|
'Failed to log incoming AI request headers',
|
||||||
|
);
|
||||||
|
expect(response.status).toBe(404);
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('POST /upload-and-process', () => {
|
describe('POST /upload-and-process', () => {
|
||||||
const imagePath = path.resolve(__dirname, '../tests/assets/test-flyer-image.jpg');
|
const imagePath = path.resolve(__dirname, '../tests/assets/test-flyer-image.jpg');
|
||||||
|
// A valid SHA-256 checksum is 64 hex characters.
|
||||||
|
const validChecksum = 'a'.repeat(64);
|
||||||
|
|
||||||
it('should enqueue a job and return 202 on success', async () => {
|
it('should enqueue a job and return 202 on success', async () => {
|
||||||
vi.mocked(mockedDb.flyerRepo.findFlyerByChecksum).mockResolvedValue(undefined);
|
vi.mocked(aiService.aiService.enqueueFlyerProcessing).mockResolvedValue({ id: 'job-123' } as unknown as Job);
|
||||||
vi.mocked(flyerQueue.add).mockResolvedValue({ id: 'job-123' } as unknown as Job);
|
|
||||||
|
|
||||||
const response = await supertest(app)
|
const response = await supertest(app)
|
||||||
.post('/api/ai/upload-and-process')
|
.post('/api/ai/upload-and-process')
|
||||||
.field('checksum', 'new-checksum')
|
.field('checksum', validChecksum)
|
||||||
.attach('flyerFile', imagePath);
|
.attach('flyerFile', imagePath);
|
||||||
|
|
||||||
expect(response.status).toBe(202);
|
expect(response.status).toBe(202);
|
||||||
expect(response.body.message).toBe('Flyer accepted for processing.');
|
expect(response.body.message).toBe('Flyer accepted for processing.');
|
||||||
expect(response.body.jobId).toBe('job-123');
|
expect(response.body.jobId).toBe('job-123');
|
||||||
expect(flyerQueue.add).toHaveBeenCalledWith('process-flyer', expect.any(Object));
|
expect(aiService.aiService.enqueueFlyerProcessing).toHaveBeenCalled();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return 400 if no file is provided', async () => {
|
it('should return 400 if no file is provided', async () => {
|
||||||
const response = await supertest(app)
|
const response = await supertest(app)
|
||||||
.post('/api/ai/upload-and-process')
|
.post('/api/ai/upload-and-process')
|
||||||
.field('checksum', 'some-checksum');
|
.field('checksum', validChecksum);
|
||||||
|
|
||||||
expect(response.status).toBe(400);
|
expect(response.status).toBe(400);
|
||||||
expect(response.body.message).toBe('A flyer file (PDF or image) is required.');
|
expect(response.body.message).toBe('A flyer file (PDF or image) is required.');
|
||||||
@@ -152,13 +184,12 @@ describe('AI Routes (/api/ai)', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('should return 409 if flyer checksum already exists', async () => {
|
it('should return 409 if flyer checksum already exists', async () => {
|
||||||
vi.mocked(mockedDb.flyerRepo.findFlyerByChecksum).mockResolvedValue(
|
const duplicateError = new aiService.DuplicateFlyerError('This flyer has already been processed.', 99);
|
||||||
createMockFlyer({ flyer_id: 99 }),
|
vi.mocked(aiService.aiService.enqueueFlyerProcessing).mockRejectedValue(duplicateError);
|
||||||
);
|
|
||||||
|
|
||||||
const response = await supertest(app)
|
const response = await supertest(app)
|
||||||
.post('/api/ai/upload-and-process')
|
.post('/api/ai/upload-and-process')
|
||||||
.field('checksum', 'duplicate-checksum')
|
.field('checksum', validChecksum)
|
||||||
.attach('flyerFile', imagePath);
|
.attach('flyerFile', imagePath);
|
||||||
|
|
||||||
expect(response.status).toBe(409);
|
expect(response.status).toBe(409);
|
||||||
@@ -166,12 +197,11 @@ describe('AI Routes (/api/ai)', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('should return 500 if enqueuing the job fails', async () => {
|
it('should return 500 if enqueuing the job fails', async () => {
|
||||||
vi.mocked(mockedDb.flyerRepo.findFlyerByChecksum).mockResolvedValue(undefined);
|
vi.mocked(aiService.aiService.enqueueFlyerProcessing).mockRejectedValueOnce(new Error('Redis connection failed'));
|
||||||
vi.mocked(flyerQueue.add).mockRejectedValueOnce(new Error('Redis connection failed'));
|
|
||||||
|
|
||||||
const response = await supertest(app)
|
const response = await supertest(app)
|
||||||
.post('/api/ai/upload-and-process')
|
.post('/api/ai/upload-and-process')
|
||||||
.field('checksum', 'new-checksum')
|
.field('checksum', validChecksum)
|
||||||
.attach('flyerFile', imagePath);
|
.attach('flyerFile', imagePath);
|
||||||
|
|
||||||
expect(response.status).toBe(500);
|
expect(response.status).toBe(500);
|
||||||
@@ -190,18 +220,19 @@ describe('AI Routes (/api/ai)', () => {
|
|||||||
authenticatedUser: mockUser,
|
authenticatedUser: mockUser,
|
||||||
});
|
});
|
||||||
|
|
||||||
vi.mocked(mockedDb.flyerRepo.findFlyerByChecksum).mockResolvedValue(undefined);
|
vi.mocked(aiService.aiService.enqueueFlyerProcessing).mockResolvedValue({ id: 'job-456' } as unknown as Job);
|
||||||
vi.mocked(flyerQueue.add).mockResolvedValue({ id: 'job-456' } as unknown as Job);
|
|
||||||
|
|
||||||
// Act
|
// Act
|
||||||
await supertest(authenticatedApp)
|
await supertest(authenticatedApp)
|
||||||
.post('/api/ai/upload-and-process')
|
.post('/api/ai/upload-and-process')
|
||||||
.field('checksum', 'auth-checksum')
|
.field('checksum', validChecksum)
|
||||||
.attach('flyerFile', imagePath);
|
.attach('flyerFile', imagePath);
|
||||||
|
|
||||||
// Assert
|
// Assert
|
||||||
expect(flyerQueue.add).toHaveBeenCalled();
|
expect(aiService.aiService.enqueueFlyerProcessing).toHaveBeenCalled();
|
||||||
expect(vi.mocked(flyerQueue.add).mock.calls[0][1].userId).toBe('auth-user-1');
|
const callArgs = vi.mocked(aiService.aiService.enqueueFlyerProcessing).mock.calls[0];
|
||||||
|
// Check the userProfile argument (3rd argument)
|
||||||
|
expect(callArgs[2]?.user.user_id).toBe('auth-user-1');
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should pass user profile address to the job when authenticated user has an address', async () => {
|
it('should pass user profile address to the job when authenticated user has an address', async () => {
|
||||||
@@ -224,16 +255,34 @@ describe('AI Routes (/api/ai)', () => {
|
|||||||
authenticatedUser: mockUserWithAddress,
|
authenticatedUser: mockUserWithAddress,
|
||||||
});
|
});
|
||||||
|
|
||||||
|
vi.mocked(aiService.aiService.enqueueFlyerProcessing).mockResolvedValue({ id: 'job-789' } as unknown as Job);
|
||||||
|
|
||||||
// Act
|
// Act
|
||||||
await supertest(authenticatedApp)
|
await supertest(authenticatedApp)
|
||||||
.post('/api/ai/upload-and-process')
|
.post('/api/ai/upload-and-process')
|
||||||
.field('checksum', 'addr-checksum')
|
.field('checksum', validChecksum)
|
||||||
.attach('flyerFile', imagePath);
|
.attach('flyerFile', imagePath);
|
||||||
|
|
||||||
// Assert
|
// Assert
|
||||||
expect(vi.mocked(flyerQueue.add).mock.calls[0][1].userProfileAddress).toBe(
|
expect(aiService.aiService.enqueueFlyerProcessing).toHaveBeenCalled();
|
||||||
'123 Pacific St, Anytown, BC, V8T 1A1, CA',
|
// The service handles address extraction from profile, so we just verify the profile was passed
|
||||||
);
|
const callArgs = vi.mocked(aiService.aiService.enqueueFlyerProcessing).mock.calls[0];
|
||||||
|
expect(callArgs[2]?.address?.address_line_1).toBe('123 Pacific St');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should clean up the uploaded file if validation fails (e.g., missing checksum)', async () => {
|
||||||
|
// Spy on the unlink function to ensure it's called on error
|
||||||
|
const unlinkSpy = vi.spyOn(fs.promises, 'unlink').mockResolvedValue(undefined);
|
||||||
|
|
||||||
|
const response = await supertest(app)
|
||||||
|
.post('/api/ai/upload-and-process')
|
||||||
|
.attach('flyerFile', imagePath); // No checksum field, will cause validation to throw
|
||||||
|
|
||||||
|
expect(response.status).toBe(400);
|
||||||
|
// The validation error is now caught inside the route handler, which then calls cleanup.
|
||||||
|
expect(unlinkSpy).toHaveBeenCalledTimes(1);
|
||||||
|
|
||||||
|
unlinkSpy.mockRestore();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -281,9 +330,7 @@ describe('AI Routes (/api/ai)', () => {
|
|||||||
flyer_id: 1,
|
flyer_id: 1,
|
||||||
file_name: mockDataPayload.originalFileName,
|
file_name: mockDataPayload.originalFileName,
|
||||||
});
|
});
|
||||||
vi.mocked(mockedDb.flyerRepo.findFlyerByChecksum).mockResolvedValue(undefined); // No duplicate
|
vi.mocked(aiService.aiService.processLegacyFlyerUpload).mockResolvedValue(mockFlyer);
|
||||||
vi.mocked(mockedDb.createFlyerAndItems).mockResolvedValue({ flyer: mockFlyer, items: [] });
|
|
||||||
vi.mocked(mockedDb.adminRepo.logActivity).mockResolvedValue();
|
|
||||||
|
|
||||||
// Act
|
// Act
|
||||||
const response = await supertest(app)
|
const response = await supertest(app)
|
||||||
@@ -294,7 +341,7 @@ describe('AI Routes (/api/ai)', () => {
|
|||||||
// Assert
|
// Assert
|
||||||
expect(response.status).toBe(201);
|
expect(response.status).toBe(201);
|
||||||
expect(response.body.message).toBe('Flyer processed and saved successfully.');
|
expect(response.body.message).toBe('Flyer processed and saved successfully.');
|
||||||
expect(mockedDb.createFlyerAndItems).toHaveBeenCalledTimes(1);
|
expect(aiService.aiService.processLegacyFlyerUpload).toHaveBeenCalledTimes(1);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return 400 if no flyer image is provided', async () => {
|
it('should return 400 if no flyer image is provided', async () => {
|
||||||
@@ -304,10 +351,11 @@ describe('AI Routes (/api/ai)', () => {
|
|||||||
expect(response.status).toBe(400);
|
expect(response.status).toBe(400);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return 409 Conflict if flyer checksum already exists', async () => {
|
it('should return 409 Conflict and delete the uploaded file if flyer checksum already exists', async () => {
|
||||||
// Arrange
|
// Arrange
|
||||||
const mockExistingFlyer = createMockFlyer({ flyer_id: 99 });
|
const duplicateError = new aiService.DuplicateFlyerError('This flyer has already been processed.', 99);
|
||||||
vi.mocked(mockedDb.flyerRepo.findFlyerByChecksum).mockResolvedValue(mockExistingFlyer); // Duplicate found
|
vi.mocked(aiService.aiService.processLegacyFlyerUpload).mockRejectedValue(duplicateError);
|
||||||
|
const unlinkSpy = vi.spyOn(fs.promises, 'unlink').mockResolvedValue(undefined);
|
||||||
|
|
||||||
// Act
|
// Act
|
||||||
const response = await supertest(app)
|
const response = await supertest(app)
|
||||||
@@ -318,7 +366,11 @@ describe('AI Routes (/api/ai)', () => {
|
|||||||
// Assert
|
// Assert
|
||||||
expect(response.status).toBe(409);
|
expect(response.status).toBe(409);
|
||||||
expect(response.body.message).toBe('This flyer has already been processed.');
|
expect(response.body.message).toBe('This flyer has already been processed.');
|
||||||
expect(mockedDb.createFlyerAndItems).not.toHaveBeenCalled();
|
expect(mockedDb.createFlyerAndItems).not.toHaveBeenCalled(); // Should not be called if service throws
|
||||||
|
// Assert that the file was deleted
|
||||||
|
expect(unlinkSpy).toHaveBeenCalledTimes(1);
|
||||||
|
// The filename is predictable in the test environment because of the multer config in ai.routes.ts
|
||||||
|
expect(unlinkSpy).toHaveBeenCalledWith(expect.stringContaining('flyerImage-test-flyer-image.jpg'));
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should accept payload when extractedData.items is missing and save with empty items', async () => {
|
it('should accept payload when extractedData.items is missing and save with empty items', async () => {
|
||||||
@@ -329,12 +381,7 @@ describe('AI Routes (/api/ai)', () => {
|
|||||||
extractedData: { store_name: 'Partial Store' }, // no items key
|
extractedData: { store_name: 'Partial Store' }, // no items key
|
||||||
};
|
};
|
||||||
|
|
||||||
vi.mocked(mockedDb.flyerRepo.findFlyerByChecksum).mockResolvedValue(undefined);
|
vi.mocked(aiService.aiService.processLegacyFlyerUpload).mockResolvedValue(createMockFlyer({ flyer_id: 2 }));
|
||||||
const mockFlyer = createMockFlyer({
|
|
||||||
flyer_id: 2,
|
|
||||||
file_name: partialPayload.originalFileName,
|
|
||||||
});
|
|
||||||
vi.mocked(mockedDb.createFlyerAndItems).mockResolvedValue({ flyer: mockFlyer, items: [] });
|
|
||||||
|
|
||||||
const response = await supertest(app)
|
const response = await supertest(app)
|
||||||
.post('/api/ai/flyers/process')
|
.post('/api/ai/flyers/process')
|
||||||
@@ -342,13 +389,7 @@ describe('AI Routes (/api/ai)', () => {
|
|||||||
.attach('flyerImage', imagePath);
|
.attach('flyerImage', imagePath);
|
||||||
|
|
||||||
expect(response.status).toBe(201);
|
expect(response.status).toBe(201);
|
||||||
expect(mockedDb.createFlyerAndItems).toHaveBeenCalledTimes(1);
|
expect(aiService.aiService.processLegacyFlyerUpload).toHaveBeenCalledTimes(1);
|
||||||
// verify the items array passed to DB was an empty array
|
|
||||||
const callArgs = vi.mocked(mockedDb.createFlyerAndItems).mock.calls[0]?.[1];
|
|
||||||
expect(callArgs).toBeDefined();
|
|
||||||
expect(Array.isArray(callArgs)).toBe(true);
|
|
||||||
// use non-null assertion for the runtime-checked variable so TypeScript is satisfied
|
|
||||||
expect(callArgs!.length).toBe(0);
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should fallback to a safe store name when store_name is missing', async () => {
|
it('should fallback to a safe store name when store_name is missing', async () => {
|
||||||
@@ -358,12 +399,7 @@ describe('AI Routes (/api/ai)', () => {
|
|||||||
extractedData: { items: [] }, // store_name missing
|
extractedData: { items: [] }, // store_name missing
|
||||||
};
|
};
|
||||||
|
|
||||||
vi.mocked(mockedDb.flyerRepo.findFlyerByChecksum).mockResolvedValue(undefined);
|
vi.mocked(aiService.aiService.processLegacyFlyerUpload).mockResolvedValue(createMockFlyer({ flyer_id: 3 }));
|
||||||
const mockFlyer = createMockFlyer({
|
|
||||||
flyer_id: 3,
|
|
||||||
file_name: payloadNoStore.originalFileName,
|
|
||||||
});
|
|
||||||
vi.mocked(mockedDb.createFlyerAndItems).mockResolvedValue({ flyer: mockFlyer, items: [] });
|
|
||||||
|
|
||||||
const response = await supertest(app)
|
const response = await supertest(app)
|
||||||
.post('/api/ai/flyers/process')
|
.post('/api/ai/flyers/process')
|
||||||
@@ -371,19 +407,11 @@ describe('AI Routes (/api/ai)', () => {
|
|||||||
.attach('flyerImage', imagePath);
|
.attach('flyerImage', imagePath);
|
||||||
|
|
||||||
expect(response.status).toBe(201);
|
expect(response.status).toBe(201);
|
||||||
expect(mockedDb.createFlyerAndItems).toHaveBeenCalledTimes(1);
|
expect(aiService.aiService.processLegacyFlyerUpload).toHaveBeenCalledTimes(1);
|
||||||
// verify the flyerData.store_name passed to DB was the fallback string
|
|
||||||
const flyerDataArg = vi.mocked(mockedDb.createFlyerAndItems).mock.calls[0][0];
|
|
||||||
expect(flyerDataArg.store_name).toContain('Unknown Store');
|
|
||||||
// Also verify the warning was logged
|
|
||||||
expect(mockLogger.warn).toHaveBeenCalledWith(
|
|
||||||
'extractedData.store_name missing; using fallback store name to avoid DB constraint error.',
|
|
||||||
);
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should handle a generic error during flyer creation', async () => {
|
it('should handle a generic error during flyer creation', async () => {
|
||||||
vi.mocked(mockedDb.flyerRepo.findFlyerByChecksum).mockResolvedValue(undefined);
|
vi.mocked(aiService.aiService.processLegacyFlyerUpload).mockRejectedValueOnce(
|
||||||
vi.mocked(mockedDb.createFlyerAndItems).mockRejectedValueOnce(
|
|
||||||
new Error('DB transaction failed'),
|
new Error('DB transaction failed'),
|
||||||
);
|
);
|
||||||
|
|
||||||
@@ -406,8 +434,7 @@ describe('AI Routes (/api/ai)', () => {
|
|||||||
|
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
const mockFlyer = createMockFlyer({ flyer_id: 1 });
|
const mockFlyer = createMockFlyer({ flyer_id: 1 });
|
||||||
vi.mocked(mockedDb.flyerRepo.findFlyerByChecksum).mockResolvedValue(undefined);
|
vi.mocked(aiService.aiService.processLegacyFlyerUpload).mockResolvedValue(mockFlyer);
|
||||||
vi.mocked(mockedDb.createFlyerAndItems).mockResolvedValue({ flyer: mockFlyer, items: [] });
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should handle payload where "data" field is an object, not stringified JSON', async () => {
|
it('should handle payload where "data" field is an object, not stringified JSON', async () => {
|
||||||
@@ -417,7 +444,39 @@ describe('AI Routes (/api/ai)', () => {
|
|||||||
.attach('flyerImage', imagePath);
|
.attach('flyerImage', imagePath);
|
||||||
|
|
||||||
expect(response.status).toBe(201);
|
expect(response.status).toBe(201);
|
||||||
expect(mockedDb.createFlyerAndItems).toHaveBeenCalledTimes(1);
|
expect(aiService.aiService.processLegacyFlyerUpload).toHaveBeenCalledTimes(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle payload where extractedData is null', async () => {
|
||||||
|
const payloadWithNullExtractedData = {
|
||||||
|
checksum: 'null-extracted-data-checksum',
|
||||||
|
originalFileName: 'flyer-null.jpg',
|
||||||
|
extractedData: null,
|
||||||
|
};
|
||||||
|
|
||||||
|
const response = await supertest(app)
|
||||||
|
.post('/api/ai/flyers/process')
|
||||||
|
.field('data', JSON.stringify(payloadWithNullExtractedData))
|
||||||
|
.attach('flyerImage', imagePath);
|
||||||
|
|
||||||
|
expect(response.status).toBe(201);
|
||||||
|
expect(aiService.aiService.processLegacyFlyerUpload).toHaveBeenCalledTimes(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle payload where extractedData is a string', async () => {
|
||||||
|
const payloadWithStringExtractedData = {
|
||||||
|
checksum: 'string-extracted-data-checksum',
|
||||||
|
originalFileName: 'flyer-string.jpg',
|
||||||
|
extractedData: 'not-an-object',
|
||||||
|
};
|
||||||
|
|
||||||
|
const response = await supertest(app)
|
||||||
|
.post('/api/ai/flyers/process')
|
||||||
|
.field('data', JSON.stringify(payloadWithStringExtractedData))
|
||||||
|
.attach('flyerImage', imagePath);
|
||||||
|
|
||||||
|
expect(response.status).toBe(201);
|
||||||
|
expect(aiService.aiService.processLegacyFlyerUpload).toHaveBeenCalledTimes(1);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should handle payload where extractedData is at the root of the body', async () => {
|
it('should handle payload where extractedData is at the root of the body', async () => {
|
||||||
@@ -431,9 +490,74 @@ describe('AI Routes (/api/ai)', () => {
|
|||||||
.attach('flyerImage', imagePath);
|
.attach('flyerImage', imagePath);
|
||||||
|
|
||||||
expect(response.status).toBe(201); // This test was failing with 500, the fix is in ai.routes.ts
|
expect(response.status).toBe(201); // This test was failing with 500, the fix is in ai.routes.ts
|
||||||
expect(mockedDb.createFlyerAndItems).toHaveBeenCalledTimes(1);
|
expect(aiService.aiService.processLegacyFlyerUpload).toHaveBeenCalledTimes(1);
|
||||||
const flyerDataArg = vi.mocked(mockedDb.createFlyerAndItems).mock.calls[0][0];
|
});
|
||||||
expect(flyerDataArg.store_name).toBe('Root Store');
|
|
||||||
|
it('should default item quantity to 1 if missing', async () => {
|
||||||
|
const payloadMissingQuantity = {
|
||||||
|
checksum: 'qty-checksum',
|
||||||
|
originalFileName: 'flyer-qty.jpg',
|
||||||
|
extractedData: {
|
||||||
|
store_name: 'Qty Store',
|
||||||
|
items: [{ name: 'Item without qty', price: 100 }],
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
const response = await supertest(app)
|
||||||
|
.post('/api/ai/flyers/process')
|
||||||
|
.field('data', JSON.stringify(payloadMissingQuantity))
|
||||||
|
.attach('flyerImage', imagePath);
|
||||||
|
|
||||||
|
expect(response.status).toBe(201);
|
||||||
|
expect(aiService.aiService.processLegacyFlyerUpload).toHaveBeenCalledTimes(1);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('POST /flyers/process (Legacy Error Handling)', () => {
|
||||||
|
const imagePath = path.resolve(__dirname, '../tests/assets/test-flyer-image.jpg');
|
||||||
|
|
||||||
|
it('should handle malformed JSON in data field and return 400', async () => {
|
||||||
|
const malformedDataString = '{"checksum":'; // Invalid JSON
|
||||||
|
|
||||||
|
// Since the service parses the data, we mock it to throw a ValidationError when parsing fails
|
||||||
|
// or when it detects the malformed input.
|
||||||
|
vi.mocked(aiService.aiService.processLegacyFlyerUpload).mockRejectedValue(new ValidationError([], 'Checksum is required.'));
|
||||||
|
|
||||||
|
const response = await supertest(app)
|
||||||
|
.post('/api/ai/flyers/process')
|
||||||
|
.field('data', malformedDataString)
|
||||||
|
.attach('flyerImage', imagePath);
|
||||||
|
|
||||||
|
// The outer catch block should be hit, leading to empty parsed data.
|
||||||
|
// The handler then fails the checksum validation.
|
||||||
|
expect(response.status).toBe(400);
|
||||||
|
expect(response.body.message).toBe('Checksum is required.');
|
||||||
|
// Note: The logging expectation was removed because if the service throws a ValidationError,
|
||||||
|
// the route handler passes it to the global error handler, which might log differently or not as a "critical error during parsing" in the route itself.
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return 400 if checksum is missing from legacy payload', async () => {
|
||||||
|
const payloadWithoutChecksum = {
|
||||||
|
originalFileName: 'flyer.jpg',
|
||||||
|
extractedData: { store_name: 'Test Store', items: [] },
|
||||||
|
};
|
||||||
|
// Spy on fs.promises.unlink to verify file cleanup
|
||||||
|
const unlinkSpy = vi.spyOn(fs.promises, 'unlink').mockResolvedValue(undefined);
|
||||||
|
|
||||||
|
// Mock the service to throw a ValidationError because the checksum is missing
|
||||||
|
vi.mocked(aiService.aiService.processLegacyFlyerUpload).mockRejectedValue(new ValidationError([], 'Checksum is required.'));
|
||||||
|
|
||||||
|
const response = await supertest(app)
|
||||||
|
.post('/api/ai/flyers/process')
|
||||||
|
.field('data', JSON.stringify(payloadWithoutChecksum))
|
||||||
|
.attach('flyerImage', imagePath);
|
||||||
|
|
||||||
|
expect(response.status).toBe(400);
|
||||||
|
expect(response.body.message).toBe('Checksum is required.');
|
||||||
|
// Ensure the uploaded file is cleaned up
|
||||||
|
expect(unlinkSpy).toHaveBeenCalledTimes(1);
|
||||||
|
|
||||||
|
unlinkSpy.mockRestore();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -554,10 +678,11 @@ describe('AI Routes (/api/ai)', () => {
|
|||||||
const mockUser = createMockUserProfile({
|
const mockUser = createMockUserProfile({
|
||||||
user: { user_id: 'user-123', email: 'user-123@test.com' },
|
user: { user_id: 'user-123', email: 'user-123@test.com' },
|
||||||
});
|
});
|
||||||
|
const authenticatedApp = createTestApp({ router: aiRouter, basePath: '/api/ai', authenticatedUser: mockUser });
|
||||||
|
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
// Inject an authenticated user for this test block
|
// Inject an authenticated user for this test block
|
||||||
app.use((req, res, next) => {
|
authenticatedApp.use((req, res, next) => {
|
||||||
req.user = mockUser;
|
req.user = mockUser;
|
||||||
next();
|
next();
|
||||||
});
|
});
|
||||||
@@ -572,7 +697,7 @@ describe('AI Routes (/api/ai)', () => {
|
|||||||
.field('cropArea', JSON.stringify({ x: 10, y: 10, width: 50, height: 50 }))
|
.field('cropArea', JSON.stringify({ x: 10, y: 10, width: 50, height: 50 }))
|
||||||
.field('extractionType', 'item_details')
|
.field('extractionType', 'item_details')
|
||||||
.attach('image', imagePath);
|
.attach('image', imagePath);
|
||||||
|
// Use the authenticatedApp instance for requests in this block
|
||||||
expect(response.status).toBe(200);
|
expect(response.status).toBe(200);
|
||||||
expect(response.body).toEqual(mockResult);
|
expect(response.body).toEqual(mockResult);
|
||||||
expect(aiService.aiService.extractTextFromImageArea).toHaveBeenCalled();
|
expect(aiService.aiService.extractTextFromImageArea).toHaveBeenCalled();
|
||||||
@@ -583,7 +708,7 @@ describe('AI Routes (/api/ai)', () => {
|
|||||||
new Error('AI API is down'),
|
new Error('AI API is down'),
|
||||||
);
|
);
|
||||||
|
|
||||||
const response = await supertest(app)
|
const response = await supertest(authenticatedApp)
|
||||||
.post('/api/ai/rescan-area')
|
.post('/api/ai/rescan-area')
|
||||||
.field('cropArea', JSON.stringify({ x: 10, y: 10, width: 50, height: 50 }))
|
.field('cropArea', JSON.stringify({ x: 10, y: 10, width: 50, height: 50 }))
|
||||||
.field('extractionType', 'item_details')
|
.field('extractionType', 'item_details')
|
||||||
@@ -599,13 +724,10 @@ describe('AI Routes (/api/ai)', () => {
|
|||||||
const mockUserProfile = createMockUserProfile({
|
const mockUserProfile = createMockUserProfile({
|
||||||
user: { user_id: 'user-123', email: 'user-123@test.com' },
|
user: { user_id: 'user-123', email: 'user-123@test.com' },
|
||||||
});
|
});
|
||||||
|
const authenticatedApp = createTestApp({ router: aiRouter, basePath: '/api/ai', authenticatedUser: mockUserProfile });
|
||||||
|
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
// For this block, simulate an authenticated request by attaching the user.
|
// The authenticatedApp instance is already set up with mockUserProfile
|
||||||
app.use((req, res, next) => {
|
|
||||||
req.user = mockUserProfile;
|
|
||||||
next();
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it('POST /quick-insights should return the stubbed response', async () => {
|
it('POST /quick-insights should return the stubbed response', async () => {
|
||||||
@@ -617,6 +739,14 @@ describe('AI Routes (/api/ai)', () => {
|
|||||||
expect(response.body.text).toContain('server-generated quick insight');
|
expect(response.body.text).toContain('server-generated quick insight');
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('POST /quick-insights should accept items with "item" property instead of "name"', async () => {
|
||||||
|
const response = await supertest(app)
|
||||||
|
.post('/api/ai/quick-insights')
|
||||||
|
.send({ items: [{ item: 'test item' }] });
|
||||||
|
|
||||||
|
expect(response.status).toBe(200);
|
||||||
|
});
|
||||||
|
|
||||||
it('POST /quick-insights should return 500 on a generic error', async () => {
|
it('POST /quick-insights should return 500 on a generic error', async () => {
|
||||||
// To hit the catch block, we can simulate an error by making the logger throw.
|
// To hit the catch block, we can simulate an error by making the logger throw.
|
||||||
vi.mocked(mockLogger.info).mockImplementationOnce(() => {
|
vi.mocked(mockLogger.info).mockImplementationOnce(() => {
|
||||||
@@ -700,6 +830,39 @@ describe('AI Routes (/api/ai)', () => {
|
|||||||
expect(response.body.message).toBe('Maps API key invalid');
|
expect(response.body.message).toBe('Maps API key invalid');
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('POST /deep-dive should return 500 on a generic error', async () => {
|
||||||
|
vi.mocked(mockLogger.info).mockImplementationOnce(() => {
|
||||||
|
throw new Error('Deep dive logging failed');
|
||||||
|
});
|
||||||
|
const response = await supertest(app)
|
||||||
|
.post('/api/ai/deep-dive')
|
||||||
|
.send({ items: [{ name: 'test' }] });
|
||||||
|
expect(response.status).toBe(500);
|
||||||
|
expect(response.body.message).toBe('Deep dive logging failed');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('POST /search-web should return 500 on a generic error', async () => {
|
||||||
|
vi.mocked(mockLogger.info).mockImplementationOnce(() => {
|
||||||
|
throw new Error('Search web logging failed');
|
||||||
|
});
|
||||||
|
const response = await supertest(app)
|
||||||
|
.post('/api/ai/search-web')
|
||||||
|
.send({ query: 'test query' });
|
||||||
|
expect(response.status).toBe(500);
|
||||||
|
expect(response.body.message).toBe('Search web logging failed');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('POST /compare-prices should return 500 on a generic error', async () => {
|
||||||
|
vi.mocked(mockLogger.info).mockImplementationOnce(() => {
|
||||||
|
throw new Error('Compare prices logging failed');
|
||||||
|
});
|
||||||
|
const response = await supertest(app)
|
||||||
|
.post('/api/ai/compare-prices')
|
||||||
|
.send({ items: [{ name: 'Milk' }] });
|
||||||
|
expect(response.status).toBe(500);
|
||||||
|
expect(response.body.message).toBe('Compare prices logging failed');
|
||||||
|
});
|
||||||
|
|
||||||
it('POST /quick-insights should return 400 if items are missing', async () => {
|
it('POST /quick-insights should return 400 if items are missing', async () => {
|
||||||
const response = await supertest(app).post('/api/ai/quick-insights').send({});
|
const response = await supertest(app).post('/api/ai/quick-insights').send({});
|
||||||
expect(response.status).toBe(400);
|
expect(response.status).toBe(400);
|
||||||
|
|||||||
@@ -1,40 +1,32 @@
|
|||||||
// src/routes/ai.routes.ts
|
// src/routes/ai.routes.ts
|
||||||
import { Router, Request, Response, NextFunction } from 'express';
|
import { Router, Request, Response, NextFunction } from 'express';
|
||||||
import multer from 'multer';
|
|
||||||
import path from 'path';
|
|
||||||
import fs from 'fs';
|
|
||||||
import { z } from 'zod';
|
import { z } from 'zod';
|
||||||
import passport from './passport.routes';
|
import passport from './passport.routes';
|
||||||
import { optionalAuth } from './passport.routes';
|
import { optionalAuth } from './passport.routes';
|
||||||
import * as db from '../services/db/index.db';
|
import { aiService, DuplicateFlyerError } from '../services/aiService.server';
|
||||||
import { createFlyerAndItems } from '../services/db/flyer.db';
|
import {
|
||||||
import * as aiService from '../services/aiService.server'; // Correctly import server-side AI service
|
createUploadMiddleware,
|
||||||
import { generateFlyerIcon } from '../utils/imageProcessor';
|
handleMulterError,
|
||||||
import { sanitizeFilename } from '../utils/stringUtils';
|
} from '../middleware/multer.middleware';
|
||||||
import { logger } from '../services/logger.server';
|
import { logger } from '../services/logger.server'; // This was a duplicate, fixed.
|
||||||
import { UserProfile, ExtractedCoreData, ExtractedFlyerItem } from '../types';
|
import { UserProfile } from '../types'; // This was a duplicate, fixed.
|
||||||
import { flyerQueue } from '../services/queueService.server';
|
|
||||||
import { validateRequest } from '../middleware/validation.middleware';
|
import { validateRequest } from '../middleware/validation.middleware';
|
||||||
|
import { requiredString } from '../utils/zodUtils';
|
||||||
|
import { cleanupUploadedFile, cleanupUploadedFiles } from '../utils/fileUtils';
|
||||||
|
import { monitoringService } from '../services/monitoringService.server';
|
||||||
|
|
||||||
const router = Router();
|
const router = Router();
|
||||||
|
|
||||||
interface FlyerProcessPayload extends Partial<ExtractedCoreData> {
|
|
||||||
checksum?: string;
|
|
||||||
originalFileName?: string;
|
|
||||||
extractedData?: Partial<ExtractedCoreData>;
|
|
||||||
data?: FlyerProcessPayload; // For nested data structures
|
|
||||||
}
|
|
||||||
|
|
||||||
// --- Zod Schemas for AI Routes (as per ADR-003) ---
|
// --- Zod Schemas for AI Routes (as per ADR-003) ---
|
||||||
// Helper for consistent required string validation (handles missing/null/empty)
|
|
||||||
const requiredString = (message: string) =>
|
|
||||||
z.preprocess((val) => val ?? '', z.string().min(1, message));
|
|
||||||
|
|
||||||
const uploadAndProcessSchema = z.object({
|
const uploadAndProcessSchema = z.object({
|
||||||
body: z.object({
|
body: z.object({
|
||||||
checksum: requiredString('File checksum is required.'),
|
// Stricter validation for SHA-256 checksum. It must be a 64-character hexadecimal string.
|
||||||
// Potential improvement: If checksum is always a specific format (e.g., SHA-256),
|
checksum: requiredString('File checksum is required.').pipe(
|
||||||
// you could add `.length(64).regex(/^[a-f0-9]+$/)` for stricter validation.
|
z.string()
|
||||||
|
.length(64, 'Checksum must be 64 characters long.')
|
||||||
|
.regex(/^[a-f0-9]+$/, 'Checksum must be a valid hexadecimal string.'),
|
||||||
|
),
|
||||||
}),
|
}),
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -80,7 +72,6 @@ const rescanAreaSchema = z.object({
|
|||||||
})
|
})
|
||||||
.pipe(cropAreaObjectSchema), // Further validate the structure of the parsed object
|
.pipe(cropAreaObjectSchema), // Further validate the structure of the parsed object
|
||||||
extractionType: z.enum(['store_name', 'dates', 'item_details'], {
|
extractionType: z.enum(['store_name', 'dates', 'item_details'], {
|
||||||
// This is the line with the error
|
|
||||||
message: "extractionType must be one of 'store_name', 'dates', or 'item_details'.",
|
message: "extractionType must be one of 'store_name', 'dates', or 'item_details'.",
|
||||||
}),
|
}),
|
||||||
}),
|
}),
|
||||||
@@ -88,10 +79,24 @@ const rescanAreaSchema = z.object({
|
|||||||
|
|
||||||
const flyerItemForAnalysisSchema = z
|
const flyerItemForAnalysisSchema = z
|
||||||
.object({
|
.object({
|
||||||
name: requiredString('Item name is required.'),
|
// Sanitize item and name by trimming whitespace.
|
||||||
// Allow other properties to pass through without validation
|
// The transform ensures that null/undefined values are preserved
|
||||||
|
// while trimming any actual string values.
|
||||||
|
item: z.string().nullish().transform(val => (val ? val.trim() : val)),
|
||||||
|
name: z.string().nullish().transform(val => (val ? val.trim() : val)),
|
||||||
})
|
})
|
||||||
.passthrough();
|
// Using .passthrough() allows extra properties on the item object.
|
||||||
|
// If the intent is to strictly enforce only 'item' and 'name' (and other known properties),
|
||||||
|
// consider using .strict() instead for tighter security and data integrity.
|
||||||
|
.passthrough()
|
||||||
|
.refine(
|
||||||
|
(data) =>
|
||||||
|
// After the transform, the values are already trimmed.
|
||||||
|
(data.item && data.item.length > 0) || (data.name && data.name.length > 0),
|
||||||
|
{
|
||||||
|
message: "Item identifier is required (either 'item' or 'name').",
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
const insightsSchema = z.object({
|
const insightsSchema = z.object({
|
||||||
body: z.object({
|
body: z.object({
|
||||||
@@ -107,6 +112,8 @@ const comparePricesSchema = z.object({
|
|||||||
|
|
||||||
const planTripSchema = z.object({
|
const planTripSchema = z.object({
|
||||||
body: z.object({
|
body: z.object({
|
||||||
|
// Consider if this array should be non-empty. If a trip plan requires at least one item,
|
||||||
|
// you could add `.nonempty('At least one item is required to plan a trip.')`
|
||||||
items: z.array(flyerItemForAnalysisSchema),
|
items: z.array(flyerItemForAnalysisSchema),
|
||||||
store: z.object({ name: requiredString('Store name is required.') }),
|
store: z.object({ name: requiredString('Store name is required.') }),
|
||||||
userLocation: z.object({
|
userLocation: z.object({
|
||||||
@@ -134,40 +141,7 @@ const searchWebSchema = z.object({
|
|||||||
body: z.object({ query: requiredString('A search query is required.') }),
|
body: z.object({ query: requiredString('A search query is required.') }),
|
||||||
});
|
});
|
||||||
|
|
||||||
// --- Multer Configuration for File Uploads ---
|
const uploadToDisk = createUploadMiddleware({ storageType: 'flyer' });
|
||||||
const storagePath =
|
|
||||||
process.env.STORAGE_PATH || '/var/www/flyer-crawler.projectium.com/flyer-images';
|
|
||||||
|
|
||||||
// Ensure the storage path exists at startup so multer can write files there.
|
|
||||||
try {
|
|
||||||
fs.mkdirSync(storagePath, { recursive: true });
|
|
||||||
logger.debug(`AI upload storage path ready: ${storagePath}`);
|
|
||||||
} catch (err) {
|
|
||||||
logger.error(
|
|
||||||
{ error: errMsg(err) },
|
|
||||||
`Failed to create storage path (${storagePath}). File uploads may fail.`,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
const diskStorage = multer.diskStorage({
|
|
||||||
destination: function (req, file, cb) {
|
|
||||||
cb(null, storagePath);
|
|
||||||
},
|
|
||||||
filename: function (req, file, cb) {
|
|
||||||
// If in a test environment, use a predictable filename for easy cleanup.
|
|
||||||
if (process.env.NODE_ENV === 'test') {
|
|
||||||
return cb(null, `${file.fieldname}-test-flyer-image.jpg`);
|
|
||||||
} else {
|
|
||||||
const uniqueSuffix = Date.now() + '-' + Math.round(Math.random() * 1e9);
|
|
||||||
// Sanitize the original filename to remove spaces and special characters
|
|
||||||
return cb(
|
|
||||||
null,
|
|
||||||
file.fieldname + '-' + uniqueSuffix + '-' + sanitizeFilename(file.originalname),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
const uploadToDisk = multer({ storage: diskStorage });
|
|
||||||
|
|
||||||
// Diagnostic middleware: log incoming AI route requests (headers and sizes)
|
// Diagnostic middleware: log incoming AI route requests (headers and sizes)
|
||||||
router.use((req: Request, res: Response, next: NextFunction) => {
|
router.use((req: Request, res: Response, next: NextFunction) => {
|
||||||
@@ -180,7 +154,7 @@ router.use((req: Request, res: Response, next: NextFunction) => {
|
|||||||
'[API /ai] Incoming request',
|
'[API /ai] Incoming request',
|
||||||
);
|
);
|
||||||
} catch (e: unknown) {
|
} catch (e: unknown) {
|
||||||
logger.error({ error: e }, 'Failed to log incoming AI request headers');
|
logger.error({ error: errMsg(e) }, 'Failed to log incoming AI request headers');
|
||||||
}
|
}
|
||||||
next();
|
next();
|
||||||
});
|
});
|
||||||
@@ -193,58 +167,29 @@ router.post(
|
|||||||
'/upload-and-process',
|
'/upload-and-process',
|
||||||
optionalAuth,
|
optionalAuth,
|
||||||
uploadToDisk.single('flyerFile'),
|
uploadToDisk.single('flyerFile'),
|
||||||
validateRequest(uploadAndProcessSchema),
|
// Validation is now handled inside the route to ensure file cleanup on failure.
|
||||||
|
// validateRequest(uploadAndProcessSchema),
|
||||||
async (req, res, next: NextFunction) => {
|
async (req, res, next: NextFunction) => {
|
||||||
try {
|
try {
|
||||||
|
// Manually validate the request body. This will throw if validation fails.
|
||||||
|
const { body } = uploadAndProcessSchema.parse({ body: req.body });
|
||||||
|
|
||||||
if (!req.file) {
|
if (!req.file) {
|
||||||
return res.status(400).json({ message: 'A flyer file (PDF or image) is required.' });
|
return res.status(400).json({ message: 'A flyer file (PDF or image) is required.' });
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.debug(
|
logger.debug(
|
||||||
{ filename: req.file.originalname, size: req.file.size, checksum: req.body.checksum },
|
{ filename: req.file.originalname, size: req.file.size, checksum: body.checksum },
|
||||||
'Handling /upload-and-process',
|
'Handling /upload-and-process',
|
||||||
);
|
);
|
||||||
|
|
||||||
const { checksum } = req.body;
|
|
||||||
// Check for duplicate flyer using checksum before even creating a job
|
|
||||||
const existingFlyer = await db.flyerRepo.findFlyerByChecksum(checksum, req.log);
|
|
||||||
if (existingFlyer) {
|
|
||||||
logger.warn(`Duplicate flyer upload attempt blocked for checksum: ${checksum}`);
|
|
||||||
// Use 409 Conflict for duplicates
|
|
||||||
return res.status(409).json({
|
|
||||||
message: 'This flyer has already been processed.',
|
|
||||||
flyerId: existingFlyer.flyer_id,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
const userProfile = req.user as UserProfile | undefined;
|
const userProfile = req.user as UserProfile | undefined;
|
||||||
// Construct a user address string from their profile if they are logged in.
|
const job = await aiService.enqueueFlyerProcessing(
|
||||||
let userProfileAddress: string | undefined = undefined;
|
req.file,
|
||||||
if (userProfile?.address) {
|
body.checksum,
|
||||||
userProfileAddress = [
|
userProfile,
|
||||||
userProfile.address.address_line_1,
|
req.ip ?? 'unknown',
|
||||||
userProfile.address.address_line_2,
|
req.log,
|
||||||
userProfile.address.city,
|
|
||||||
userProfile.address.province_state,
|
|
||||||
userProfile.address.postal_code,
|
|
||||||
userProfile.address.country,
|
|
||||||
]
|
|
||||||
.filter(Boolean)
|
|
||||||
.join(', ');
|
|
||||||
}
|
|
||||||
|
|
||||||
// Add job to the queue
|
|
||||||
const job = await flyerQueue.add('process-flyer', {
|
|
||||||
filePath: req.file.path,
|
|
||||||
originalFileName: req.file.originalname,
|
|
||||||
checksum: checksum,
|
|
||||||
userId: userProfile?.user.user_id,
|
|
||||||
submitterIp: req.ip, // Capture the submitter's IP address
|
|
||||||
userProfileAddress: userProfileAddress, // Pass the user's profile address
|
|
||||||
});
|
|
||||||
|
|
||||||
logger.info(
|
|
||||||
`Enqueued flyer for processing. File: ${req.file.originalname}, Job ID: ${job.id}`,
|
|
||||||
);
|
);
|
||||||
|
|
||||||
// Respond immediately to the client with 202 Accepted
|
// Respond immediately to the client with 202 Accepted
|
||||||
@@ -253,6 +198,11 @@ router.post(
|
|||||||
jobId: job.id,
|
jobId: job.id,
|
||||||
});
|
});
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
await cleanupUploadedFile(req.file);
|
||||||
|
if (error instanceof DuplicateFlyerError) {
|
||||||
|
logger.warn(`Duplicate flyer upload attempt blocked for checksum: ${req.body?.checksum}`);
|
||||||
|
return res.status(409).json({ message: error.message, flyerId: error.flyerId });
|
||||||
|
}
|
||||||
next(error);
|
next(error);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -269,18 +219,11 @@ router.get(
|
|||||||
const {
|
const {
|
||||||
params: { jobId },
|
params: { jobId },
|
||||||
} = req as unknown as JobIdRequest;
|
} = req as unknown as JobIdRequest;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const job = await flyerQueue.getJob(jobId);
|
const jobStatus = await monitoringService.getFlyerJobStatus(jobId); // This was a duplicate, fixed.
|
||||||
if (!job) {
|
logger.debug(`[API /ai/jobs] Status check for job ${jobId}: ${jobStatus.state}`);
|
||||||
// Adhere to ADR-001 by throwing a specific error to be handled centrally.
|
res.json(jobStatus);
|
||||||
return res.status(404).json({ message: 'Job not found.' });
|
|
||||||
}
|
|
||||||
const state = await job.getState();
|
|
||||||
const progress = job.progress;
|
|
||||||
const returnValue = job.returnvalue;
|
|
||||||
const failedReason = job.failedReason;
|
|
||||||
logger.debug(`[API /ai/jobs] Status check for job ${jobId}: ${state}`);
|
|
||||||
res.json({ id: job.id, state, progress, returnValue, failedReason });
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
next(error);
|
next(error);
|
||||||
}
|
}
|
||||||
@@ -302,175 +245,22 @@ router.post(
|
|||||||
return res.status(400).json({ message: 'Flyer image file is required.' });
|
return res.status(400).json({ message: 'Flyer image file is required.' });
|
||||||
}
|
}
|
||||||
|
|
||||||
// Diagnostic & tolerant parsing for flyers/process
|
|
||||||
logger.debug(
|
|
||||||
{ keys: Object.keys(req.body || {}) },
|
|
||||||
'[API /ai/flyers/process] Processing legacy upload',
|
|
||||||
);
|
|
||||||
logger.debug({ filePresent: !!req.file }, '[API /ai/flyers/process] file present:');
|
|
||||||
|
|
||||||
// Try several ways to obtain the payload so we are tolerant to client variations.
|
|
||||||
let parsed: FlyerProcessPayload = {};
|
|
||||||
let extractedData: Partial<ExtractedCoreData> = {};
|
|
||||||
try {
|
|
||||||
// If the client sent a top-level `data` field (stringified JSON), parse it.
|
|
||||||
if (req.body && (req.body.data || req.body.extractedData)) {
|
|
||||||
const raw = req.body.data ?? req.body.extractedData;
|
|
||||||
logger.debug(
|
|
||||||
{ type: typeof raw, length: raw?.length ?? 0 },
|
|
||||||
'[API /ai/flyers/process] raw extractedData',
|
|
||||||
);
|
|
||||||
try {
|
|
||||||
parsed = typeof raw === 'string' ? JSON.parse(raw) : raw;
|
|
||||||
} catch (err) {
|
|
||||||
logger.warn(
|
|
||||||
{ error: errMsg(err) },
|
|
||||||
'[API /ai/flyers/process] Failed to JSON.parse raw extractedData; falling back to direct assign',
|
|
||||||
);
|
|
||||||
parsed = (
|
|
||||||
typeof raw === 'string' ? JSON.parse(String(raw).slice(0, 2000)) : raw
|
|
||||||
) as FlyerProcessPayload;
|
|
||||||
}
|
|
||||||
// If parsed itself contains an `extractedData` field, use that, otherwise assume parsed is the extractedData
|
|
||||||
extractedData = parsed.extractedData ?? (parsed as Partial<ExtractedCoreData>);
|
|
||||||
} else {
|
|
||||||
// No explicit `data` field found. Attempt to interpret req.body as an object (Express may have parsed multipart fields differently).
|
|
||||||
try {
|
|
||||||
parsed = typeof req.body === 'string' ? JSON.parse(req.body) : req.body;
|
|
||||||
} catch (err) {
|
|
||||||
logger.warn(
|
|
||||||
{ error: errMsg(err) },
|
|
||||||
'[API /ai/flyers/process] Failed to JSON.parse req.body; using empty object',
|
|
||||||
);
|
|
||||||
parsed = (req.body as FlyerProcessPayload) || {};
|
|
||||||
}
|
|
||||||
// extractedData might be nested under `data` or `extractedData`, or the body itself may be the extracted data.
|
|
||||||
if (parsed.data) {
|
|
||||||
try {
|
|
||||||
const inner = typeof parsed.data === 'string' ? JSON.parse(parsed.data) : parsed.data;
|
|
||||||
extractedData = inner.extractedData ?? inner;
|
|
||||||
} catch (err) {
|
|
||||||
logger.warn(
|
|
||||||
{ error: errMsg(err) },
|
|
||||||
'[API /ai/flyers/process] Failed to parse parsed.data; falling back',
|
|
||||||
);
|
|
||||||
extractedData = parsed.data as unknown as Partial<ExtractedCoreData>;
|
|
||||||
}
|
|
||||||
} else if (parsed.extractedData) {
|
|
||||||
extractedData = parsed.extractedData;
|
|
||||||
} else {
|
|
||||||
// Assume the body itself is the extracted data if it looks like it (has items or store_name keys)
|
|
||||||
if ('items' in parsed || 'store_name' in parsed || 'valid_from' in parsed) {
|
|
||||||
extractedData = parsed as Partial<ExtractedCoreData>;
|
|
||||||
} else {
|
|
||||||
extractedData = {};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
logger.error(
|
|
||||||
{ error: err },
|
|
||||||
'[API /ai/flyers/process] Unexpected error while parsing request body',
|
|
||||||
);
|
|
||||||
parsed = {};
|
|
||||||
extractedData = {};
|
|
||||||
}
|
|
||||||
|
|
||||||
// Pull common metadata fields (checksum, originalFileName) from whichever shape we parsed.
|
|
||||||
const checksum = parsed.checksum ?? parsed?.data?.checksum ?? '';
|
|
||||||
const originalFileName =
|
|
||||||
parsed.originalFileName ?? parsed?.data?.originalFileName ?? req.file.originalname;
|
|
||||||
const userProfile = req.user as UserProfile | undefined;
|
const userProfile = req.user as UserProfile | undefined;
|
||||||
|
|
||||||
// Validate extractedData to avoid database errors (e.g., null store_name)
|
const newFlyer = await aiService.processLegacyFlyerUpload(
|
||||||
if (!extractedData || typeof extractedData !== 'object') {
|
req.file,
|
||||||
logger.warn(
|
req.body,
|
||||||
{ bodyData: parsed },
|
userProfile,
|
||||||
'Missing extractedData in /api/ai/flyers/process payload.',
|
|
||||||
);
|
|
||||||
// Don't fail hard here; proceed with empty items and fallback store name so the upload can be saved for manual review.
|
|
||||||
extractedData = {};
|
|
||||||
}
|
|
||||||
|
|
||||||
// Transform the extracted items into the format required for database insertion.
|
|
||||||
// This adds default values for fields like `view_count` and `click_count`
|
|
||||||
// and makes this legacy endpoint consistent with the newer FlyerDataTransformer service.
|
|
||||||
const rawItems = extractedData.items ?? [];
|
|
||||||
const itemsArray = Array.isArray(rawItems)
|
|
||||||
? rawItems
|
|
||||||
: typeof rawItems === 'string'
|
|
||||||
? JSON.parse(rawItems)
|
|
||||||
: [];
|
|
||||||
const itemsForDb = itemsArray.map((item: Partial<ExtractedFlyerItem>) => ({
|
|
||||||
...item,
|
|
||||||
master_item_id: item.master_item_id === null ? undefined : item.master_item_id,
|
|
||||||
view_count: 0,
|
|
||||||
click_count: 0,
|
|
||||||
updated_at: new Date().toISOString(),
|
|
||||||
}));
|
|
||||||
|
|
||||||
// Ensure we have a valid store name; the DB requires a non-null store name.
|
|
||||||
const storeName =
|
|
||||||
extractedData.store_name && String(extractedData.store_name).trim().length > 0
|
|
||||||
? String(extractedData.store_name)
|
|
||||||
: 'Unknown Store (auto)';
|
|
||||||
if (storeName.startsWith('Unknown')) {
|
|
||||||
logger.warn(
|
|
||||||
'extractedData.store_name missing; using fallback store name to avoid DB constraint error.',
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
// 1. Check for duplicate flyer using checksum
|
|
||||||
const existingFlyer = await db.flyerRepo.findFlyerByChecksum(checksum, req.log);
|
|
||||||
if (existingFlyer) {
|
|
||||||
logger.warn(`Duplicate flyer upload attempt blocked for checksum: ${checksum}`);
|
|
||||||
return res.status(409).json({ message: 'This flyer has already been processed.' });
|
|
||||||
}
|
|
||||||
|
|
||||||
// Generate a 64x64 icon from the uploaded flyer image.
|
|
||||||
const iconsDir = path.join(path.dirname(req.file.path), 'icons');
|
|
||||||
const iconFileName = await generateFlyerIcon(req.file.path, iconsDir, req.log);
|
|
||||||
const iconUrl = `/flyer-images/icons/${iconFileName}`;
|
|
||||||
|
|
||||||
// 2. Prepare flyer data for insertion
|
|
||||||
const flyerData = {
|
|
||||||
file_name: originalFileName,
|
|
||||||
image_url: `/flyer-images/${req.file.filename}`, // Store the full URL path
|
|
||||||
icon_url: iconUrl,
|
|
||||||
checksum: checksum,
|
|
||||||
// Use normalized store name (fallback applied above).
|
|
||||||
store_name: storeName,
|
|
||||||
valid_from: extractedData.valid_from ?? null,
|
|
||||||
valid_to: extractedData.valid_to ?? null,
|
|
||||||
store_address: extractedData.store_address ?? null,
|
|
||||||
item_count: 0, // Set default to 0; the trigger will update it.
|
|
||||||
uploaded_by: userProfile?.user.user_id, // Associate with user if logged in
|
|
||||||
};
|
|
||||||
|
|
||||||
// 3. Create flyer and its items in a transaction
|
|
||||||
const { flyer: newFlyer, items: newItems } = await createFlyerAndItems(
|
|
||||||
flyerData,
|
|
||||||
itemsForDb,
|
|
||||||
req.log,
|
|
||||||
);
|
|
||||||
|
|
||||||
logger.info(
|
|
||||||
`Successfully processed and saved new flyer: ${newFlyer.file_name} (ID: ${newFlyer.flyer_id}) with ${newItems.length} items.`,
|
|
||||||
);
|
|
||||||
|
|
||||||
// Log this significant event
|
|
||||||
await db.adminRepo.logActivity(
|
|
||||||
{
|
|
||||||
userId: userProfile?.user.user_id,
|
|
||||||
action: 'flyer_processed',
|
|
||||||
displayText: `Processed a new flyer for ${flyerData.store_name}.`,
|
|
||||||
details: { flyerId: newFlyer.flyer_id, storeName: flyerData.store_name },
|
|
||||||
},
|
|
||||||
req.log,
|
req.log,
|
||||||
);
|
);
|
||||||
|
|
||||||
res.status(201).json({ message: 'Flyer processed and saved successfully.', flyer: newFlyer });
|
res.status(201).json({ message: 'Flyer processed and saved successfully.', flyer: newFlyer });
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
await cleanupUploadedFile(req.file);
|
||||||
|
if (error instanceof DuplicateFlyerError) {
|
||||||
|
logger.warn(`Duplicate flyer upload attempt blocked.`);
|
||||||
|
return res.status(409).json({ message: error.message, flyerId: error.flyerId });
|
||||||
|
}
|
||||||
next(error);
|
next(error);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -493,6 +283,8 @@ router.post(
|
|||||||
res.status(200).json({ is_flyer: true }); // Stubbed response
|
res.status(200).json({ is_flyer: true }); // Stubbed response
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
next(error);
|
next(error);
|
||||||
|
} finally {
|
||||||
|
await cleanupUploadedFile(req.file);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
@@ -510,6 +302,8 @@ router.post(
|
|||||||
res.status(200).json({ address: 'not identified' }); // Updated stubbed response
|
res.status(200).json({ address: 'not identified' }); // Updated stubbed response
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
next(error);
|
next(error);
|
||||||
|
} finally {
|
||||||
|
await cleanupUploadedFile(req.file);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
@@ -527,6 +321,8 @@ router.post(
|
|||||||
res.status(200).json({ store_logo_base_64: null }); // Stubbed response
|
res.status(200).json({ store_logo_base_64: null }); // Stubbed response
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
next(error);
|
next(error);
|
||||||
|
} finally {
|
||||||
|
await cleanupUploadedFiles(req.files as Express.Multer.File[]);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
@@ -603,7 +399,7 @@ router.post(
|
|||||||
try {
|
try {
|
||||||
const { items, store, userLocation } = req.body;
|
const { items, store, userLocation } = req.body;
|
||||||
logger.debug({ itemCount: items.length, storeName: store.name }, 'Trip planning requested.');
|
logger.debug({ itemCount: items.length, storeName: store.name }, 'Trip planning requested.');
|
||||||
const result = await aiService.aiService.planTripWithMaps(items, store, userLocation);
|
const result = await aiService.planTripWithMaps(items, store, userLocation);
|
||||||
res.status(200).json(result);
|
res.status(200).json(result);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error({ error: errMsg(error) }, 'Error in /api/ai/plan-trip endpoint:');
|
logger.error({ error: errMsg(error) }, 'Error in /api/ai/plan-trip endpoint:');
|
||||||
@@ -663,7 +459,7 @@ router.post(
|
|||||||
'Rescan area requested',
|
'Rescan area requested',
|
||||||
);
|
);
|
||||||
|
|
||||||
const result = await aiService.aiService.extractTextFromImageArea(
|
const result = await aiService.extractTextFromImageArea(
|
||||||
path,
|
path,
|
||||||
mimetype,
|
mimetype,
|
||||||
cropArea,
|
cropArea,
|
||||||
@@ -674,8 +470,13 @@ router.post(
|
|||||||
res.status(200).json(result);
|
res.status(200).json(result);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
next(error);
|
next(error);
|
||||||
|
} finally {
|
||||||
|
await cleanupUploadedFile(req.file);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
|
||||||
|
/* Catches errors from multer (e.g., file size, file filter) */
|
||||||
|
router.use(handleMulterError);
|
||||||
|
|
||||||
export default router;
|
export default router;
|
||||||
|
|||||||
@@ -2,14 +2,8 @@
|
|||||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||||
import supertest from 'supertest';
|
import supertest from 'supertest';
|
||||||
import { Request, Response, NextFunction } from 'express';
|
import { Request, Response, NextFunction } from 'express';
|
||||||
import cookieParser from 'cookie-parser';
|
import cookieParser from 'cookie-parser'; // This was a duplicate, fixed.
|
||||||
import * as bcrypt from 'bcrypt';
|
import { createMockUserProfile } from '../tests/utils/mockFactories';
|
||||||
import jwt from 'jsonwebtoken';
|
|
||||||
import {
|
|
||||||
createMockUserProfile,
|
|
||||||
createMockUserWithPasswordHash,
|
|
||||||
} from '../tests/utils/mockFactories';
|
|
||||||
import { mockLogger } from '../tests/utils/mockLogger';
|
|
||||||
|
|
||||||
// --- FIX: Hoist passport mocks to be available for vi.mock ---
|
// --- FIX: Hoist passport mocks to be available for vi.mock ---
|
||||||
const passportMocks = vi.hoisted(() => {
|
const passportMocks = vi.hoisted(() => {
|
||||||
@@ -70,49 +64,25 @@ vi.mock('./passport.routes', () => ({
|
|||||||
optionalAuth: vi.fn((req: Request, res: Response, next: NextFunction) => next()),
|
optionalAuth: vi.fn((req: Request, res: Response, next: NextFunction) => next()),
|
||||||
}));
|
}));
|
||||||
|
|
||||||
// Mock the DB connection pool to control transactional behavior
|
// Mock the authService, which is now the primary dependency of the routes.
|
||||||
const { mockPool } = vi.hoisted(() => {
|
const { mockedAuthService } = vi.hoisted(() => {
|
||||||
const client = {
|
|
||||||
query: vi.fn(),
|
|
||||||
release: vi.fn(),
|
|
||||||
};
|
|
||||||
return {
|
return {
|
||||||
mockPool: {
|
mockedAuthService: {
|
||||||
connect: vi.fn(() => Promise.resolve(client)),
|
registerAndLoginUser: vi.fn(),
|
||||||
|
handleSuccessfulLogin: vi.fn(),
|
||||||
|
resetPassword: vi.fn(),
|
||||||
|
updatePassword: vi.fn(),
|
||||||
|
refreshAccessToken: vi.fn(),
|
||||||
|
logout: vi.fn(),
|
||||||
},
|
},
|
||||||
mockClient: client,
|
|
||||||
};
|
};
|
||||||
});
|
});
|
||||||
// Mock the Service Layer directly.
|
vi.mock('../services/authService', () => ({ authService: mockedAuthService }));
|
||||||
// We use async import inside the factory to properly hoist the UniqueConstraintError class usage.
|
|
||||||
vi.mock('../services/db/index.db', async () => {
|
|
||||||
const { UniqueConstraintError } = await import('../services/db/errors.db');
|
|
||||||
return {
|
|
||||||
userRepo: {
|
|
||||||
findUserByEmail: vi.fn(),
|
|
||||||
createUser: vi.fn(),
|
|
||||||
saveRefreshToken: vi.fn(),
|
|
||||||
createPasswordResetToken: vi.fn(),
|
|
||||||
getValidResetTokens: vi.fn(),
|
|
||||||
updateUserPassword: vi.fn(),
|
|
||||||
deleteResetToken: vi.fn(),
|
|
||||||
findUserByRefreshToken: vi.fn(),
|
|
||||||
deleteRefreshToken: vi.fn(),
|
|
||||||
},
|
|
||||||
adminRepo: {
|
|
||||||
logActivity: vi.fn(),
|
|
||||||
},
|
|
||||||
UniqueConstraintError: UniqueConstraintError,
|
|
||||||
};
|
|
||||||
});
|
|
||||||
|
|
||||||
vi.mock('../services/db/connection.db', () => ({
|
|
||||||
getPool: () => mockPool,
|
|
||||||
}));
|
|
||||||
|
|
||||||
// Mock the logger
|
// Mock the logger
|
||||||
vi.mock('../services/logger.server', () => ({
|
vi.mock('../services/logger.server', async () => ({
|
||||||
logger: mockLogger,
|
// Use async import to avoid hoisting issues with mockLogger
|
||||||
|
logger: (await import('../tests/utils/mockLogger')).mockLogger,
|
||||||
}));
|
}));
|
||||||
|
|
||||||
// Mock the email service
|
// Mock the email service
|
||||||
@@ -120,15 +90,8 @@ vi.mock('../services/emailService.server', () => ({
|
|||||||
sendPasswordResetEmail: vi.fn(),
|
sendPasswordResetEmail: vi.fn(),
|
||||||
}));
|
}));
|
||||||
|
|
||||||
// Mock bcrypt
|
|
||||||
vi.mock('bcrypt', async (importOriginal) => {
|
|
||||||
const actual = await importOriginal<typeof bcrypt>();
|
|
||||||
return { ...actual, compare: vi.fn() };
|
|
||||||
});
|
|
||||||
|
|
||||||
// Import the router AFTER mocks are established
|
// Import the router AFTER mocks are established
|
||||||
import authRouter from './auth.routes';
|
import authRouter from './auth.routes';
|
||||||
import * as db from '../services/db/index.db'; // This was a duplicate, fixed.
|
|
||||||
|
|
||||||
import { UniqueConstraintError } from '../services/db/errors.db'; // Import actual class for instanceof checks
|
import { UniqueConstraintError } from '../services/db/errors.db'; // Import actual class for instanceof checks
|
||||||
|
|
||||||
@@ -144,6 +107,8 @@ import { UniqueConstraintError } from '../services/db/errors.db'; // Import actu
|
|||||||
import express from 'express';
|
import express from 'express';
|
||||||
import { errorHandler } from '../middleware/errorHandler'; // Assuming this exists
|
import { errorHandler } from '../middleware/errorHandler'; // Assuming this exists
|
||||||
|
|
||||||
|
const { mockLogger } = await import('../tests/utils/mockLogger');
|
||||||
|
|
||||||
const app = express();
|
const app = express();
|
||||||
app.use(express.json());
|
app.use(express.json());
|
||||||
app.use(cookieParser()); // Mount BEFORE router
|
app.use(cookieParser()); // Mount BEFORE router
|
||||||
@@ -174,13 +139,11 @@ describe('Auth Routes (/api/auth)', () => {
|
|||||||
user: { user_id: 'new-user-id', email: newUserEmail },
|
user: { user_id: 'new-user-id', email: newUserEmail },
|
||||||
full_name: 'Test User',
|
full_name: 'Test User',
|
||||||
});
|
});
|
||||||
|
mockedAuthService.registerAndLoginUser.mockResolvedValue({
|
||||||
// FIX: Mock the method on the imported singleton instance `userRepo` directly,
|
newUserProfile: mockNewUser,
|
||||||
// as this is what the route handler uses. Spying on the prototype does not
|
accessToken: 'new-access-token',
|
||||||
// affect this already-created instance.
|
refreshToken: 'new-refresh-token',
|
||||||
vi.mocked(db.userRepo.createUser).mockResolvedValue(mockNewUser);
|
});
|
||||||
vi.mocked(db.userRepo.saveRefreshToken).mockResolvedValue(undefined);
|
|
||||||
vi.mocked(db.adminRepo.logActivity).mockResolvedValue(undefined);
|
|
||||||
|
|
||||||
// Act
|
// Act
|
||||||
const response = await supertest(app).post('/api/auth/register').send({
|
const response = await supertest(app).post('/api/auth/register').send({
|
||||||
@@ -188,22 +151,29 @@ describe('Auth Routes (/api/auth)', () => {
|
|||||||
password: strongPassword,
|
password: strongPassword,
|
||||||
full_name: 'Test User',
|
full_name: 'Test User',
|
||||||
});
|
});
|
||||||
|
|
||||||
// Assert
|
// Assert
|
||||||
expect(response.status).toBe(201);
|
expect(response.status).toBe(201);
|
||||||
expect(response.body.message).toBe('User registered successfully!');
|
expect(response.body.message).toBe('User registered successfully!');
|
||||||
expect(response.body.userprofile.user.email).toBe(newUserEmail);
|
expect(response.body.userprofile.user.email).toBe(newUserEmail);
|
||||||
expect(response.body.token).toBeTypeOf('string'); // This was a duplicate, fixed.
|
expect(response.body.token).toBeTypeOf('string'); // This was a duplicate, fixed.
|
||||||
expect(db.userRepo.createUser).toHaveBeenCalled();
|
expect(mockedAuthService.registerAndLoginUser).toHaveBeenCalledWith(
|
||||||
|
newUserEmail,
|
||||||
|
strongPassword,
|
||||||
|
'Test User',
|
||||||
|
undefined, // avatar_url
|
||||||
|
mockLogger,
|
||||||
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should set a refresh token cookie on successful registration', async () => {
|
it('should set a refresh token cookie on successful registration', async () => {
|
||||||
const mockNewUser = createMockUserProfile({
|
const mockNewUser = createMockUserProfile({
|
||||||
user: { user_id: 'new-user-id', email: 'cookie@test.com' },
|
user: { user_id: 'new-user-id', email: 'cookie@test.com' },
|
||||||
});
|
});
|
||||||
vi.mocked(db.userRepo.createUser).mockResolvedValue(mockNewUser);
|
mockedAuthService.registerAndLoginUser.mockResolvedValue({
|
||||||
vi.mocked(db.userRepo.saveRefreshToken).mockResolvedValue(undefined);
|
newUserProfile: mockNewUser,
|
||||||
vi.mocked(db.adminRepo.logActivity).mockResolvedValue(undefined);
|
accessToken: 'new-access-token',
|
||||||
|
refreshToken: 'new-refresh-token',
|
||||||
|
});
|
||||||
|
|
||||||
const response = await supertest(app).post('/api/auth/register').send({
|
const response = await supertest(app).post('/api/auth/register').send({
|
||||||
email: 'cookie@test.com',
|
email: 'cookie@test.com',
|
||||||
@@ -233,15 +203,14 @@ describe('Auth Routes (/api/auth)', () => {
|
|||||||
expect(errorMessages).toMatch(/Password is too weak/i);
|
expect(errorMessages).toMatch(/Password is too weak/i);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should reject registration if the email already exists', async () => {
|
it('should reject registration if the auth service throws UniqueConstraintError', async () => {
|
||||||
// Create an error object that includes the 'code' property for simulating a PG unique violation.
|
// Create an error object that includes the 'code' property for simulating a PG unique violation.
|
||||||
// This is more type-safe than casting to 'any'.
|
// This is more type-safe than casting to 'any'.
|
||||||
const dbError = new UniqueConstraintError(
|
const dbError = new UniqueConstraintError(
|
||||||
'User with that email already exists.',
|
'User with that email already exists.',
|
||||||
) as UniqueConstraintError & { code: string };
|
) as UniqueConstraintError & { code: string };
|
||||||
dbError.code = '23505';
|
dbError.code = '23505';
|
||||||
|
mockedAuthService.registerAndLoginUser.mockRejectedValue(dbError);
|
||||||
vi.mocked(db.userRepo.createUser).mockRejectedValue(dbError);
|
|
||||||
|
|
||||||
const response = await supertest(app)
|
const response = await supertest(app)
|
||||||
.post('/api/auth/register')
|
.post('/api/auth/register')
|
||||||
@@ -249,12 +218,11 @@ describe('Auth Routes (/api/auth)', () => {
|
|||||||
|
|
||||||
expect(response.status).toBe(409); // 409 Conflict
|
expect(response.status).toBe(409); // 409 Conflict
|
||||||
expect(response.body.message).toBe('User with that email already exists.');
|
expect(response.body.message).toBe('User with that email already exists.');
|
||||||
expect(db.userRepo.createUser).toHaveBeenCalled();
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return 500 if a generic database error occurs during registration', async () => {
|
it('should return 500 if a generic database error occurs during registration', async () => {
|
||||||
const dbError = new Error('DB connection lost');
|
const dbError = new Error('DB connection lost');
|
||||||
vi.mocked(db.userRepo.createUser).mockRejectedValue(dbError);
|
mockedAuthService.registerAndLoginUser.mockRejectedValue(dbError);
|
||||||
|
|
||||||
const response = await supertest(app)
|
const response = await supertest(app)
|
||||||
.post('/api/auth/register')
|
.post('/api/auth/register')
|
||||||
@@ -287,7 +255,10 @@ describe('Auth Routes (/api/auth)', () => {
|
|||||||
it('should successfully log in a user and return a token and cookie', async () => {
|
it('should successfully log in a user and return a token and cookie', async () => {
|
||||||
// Arrange:
|
// Arrange:
|
||||||
const loginCredentials = { email: 'test@test.com', password: 'password123' };
|
const loginCredentials = { email: 'test@test.com', password: 'password123' };
|
||||||
vi.mocked(db.userRepo.saveRefreshToken).mockResolvedValue(undefined);
|
mockedAuthService.handleSuccessfulLogin.mockResolvedValue({
|
||||||
|
accessToken: 'new-access-token',
|
||||||
|
refreshToken: 'new-refresh-token',
|
||||||
|
});
|
||||||
|
|
||||||
// Act
|
// Act
|
||||||
const response = await supertest(app).post('/api/auth/login').send(loginCredentials);
|
const response = await supertest(app).post('/api/auth/login').send(loginCredentials);
|
||||||
@@ -297,7 +268,6 @@ describe('Auth Routes (/api/auth)', () => {
|
|||||||
// The API now returns a nested UserProfile object
|
// The API now returns a nested UserProfile object
|
||||||
expect(response.body.userprofile).toEqual(
|
expect(response.body.userprofile).toEqual(
|
||||||
expect.objectContaining({
|
expect.objectContaining({
|
||||||
user_id: 'user-123',
|
|
||||||
user: expect.objectContaining({
|
user: expect.objectContaining({
|
||||||
user_id: 'user-123',
|
user_id: 'user-123',
|
||||||
email: loginCredentials.email,
|
email: loginCredentials.email,
|
||||||
@@ -308,25 +278,6 @@ describe('Auth Routes (/api/auth)', () => {
|
|||||||
expect(response.headers['set-cookie']).toBeDefined();
|
expect(response.headers['set-cookie']).toBeDefined();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should contain the correct payload in the JWT token', async () => {
|
|
||||||
// Arrange
|
|
||||||
const loginCredentials = { email: 'payload.test@test.com', password: 'password123' };
|
|
||||||
vi.mocked(db.userRepo.saveRefreshToken).mockResolvedValue(undefined);
|
|
||||||
|
|
||||||
// Act
|
|
||||||
const response = await supertest(app).post('/api/auth/login').send(loginCredentials);
|
|
||||||
|
|
||||||
// Assert
|
|
||||||
expect(response.status).toBe(200);
|
|
||||||
const token = response.body.token;
|
|
||||||
expect(token).toBeTypeOf('string');
|
|
||||||
|
|
||||||
const decodedPayload = jwt.decode(token) as { user_id: string; email: string; role: string };
|
|
||||||
expect(decodedPayload.user_id).toBe('user-123');
|
|
||||||
expect(decodedPayload.email).toBe(loginCredentials.email);
|
|
||||||
expect(decodedPayload.role).toBe('user'); // Default role from mock factory
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should reject login for incorrect credentials', async () => {
|
it('should reject login for incorrect credentials', async () => {
|
||||||
const response = await supertest(app)
|
const response = await supertest(app)
|
||||||
.post('/api/auth/login')
|
.post('/api/auth/login')
|
||||||
@@ -358,7 +309,7 @@ describe('Auth Routes (/api/auth)', () => {
|
|||||||
it('should return 500 if saving the refresh token fails', async () => {
|
it('should return 500 if saving the refresh token fails', async () => {
|
||||||
// Arrange:
|
// Arrange:
|
||||||
const loginCredentials = { email: 'test@test.com', password: 'password123' };
|
const loginCredentials = { email: 'test@test.com', password: 'password123' };
|
||||||
vi.mocked(db.userRepo.saveRefreshToken).mockRejectedValue(new Error('DB write failed'));
|
mockedAuthService.handleSuccessfulLogin.mockRejectedValue(new Error('DB write failed'));
|
||||||
|
|
||||||
// Act
|
// Act
|
||||||
const response = await supertest(app).post('/api/auth/login').send(loginCredentials);
|
const response = await supertest(app).post('/api/auth/login').send(loginCredentials);
|
||||||
@@ -400,7 +351,10 @@ describe('Auth Routes (/api/auth)', () => {
|
|||||||
password: 'password123',
|
password: 'password123',
|
||||||
rememberMe: true,
|
rememberMe: true,
|
||||||
};
|
};
|
||||||
vi.mocked(db.userRepo.saveRefreshToken).mockResolvedValue(undefined);
|
mockedAuthService.handleSuccessfulLogin.mockResolvedValue({
|
||||||
|
accessToken: 'remember-access-token',
|
||||||
|
refreshToken: 'remember-refresh-token',
|
||||||
|
});
|
||||||
|
|
||||||
// Act
|
// Act
|
||||||
const response = await supertest(app).post('/api/auth/login').send(loginCredentials);
|
const response = await supertest(app).post('/api/auth/login').send(loginCredentials);
|
||||||
@@ -415,10 +369,7 @@ describe('Auth Routes (/api/auth)', () => {
|
|||||||
describe('POST /forgot-password', () => {
|
describe('POST /forgot-password', () => {
|
||||||
it('should send a reset link if the user exists', async () => {
|
it('should send a reset link if the user exists', async () => {
|
||||||
// Arrange
|
// Arrange
|
||||||
vi.mocked(db.userRepo.findUserByEmail).mockResolvedValue(
|
mockedAuthService.resetPassword.mockResolvedValue('mock-reset-token');
|
||||||
createMockUserWithPasswordHash({ user_id: 'user-123', email: 'test@test.com' }),
|
|
||||||
);
|
|
||||||
vi.mocked(db.userRepo.createPasswordResetToken).mockResolvedValue(undefined);
|
|
||||||
|
|
||||||
// Act
|
// Act
|
||||||
const response = await supertest(app)
|
const response = await supertest(app)
|
||||||
@@ -432,7 +383,7 @@ describe('Auth Routes (/api/auth)', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('should return a generic success message even if the user does not exist', async () => {
|
it('should return a generic success message even if the user does not exist', async () => {
|
||||||
vi.mocked(db.userRepo.findUserByEmail).mockResolvedValue(undefined);
|
mockedAuthService.resetPassword.mockResolvedValue(undefined);
|
||||||
|
|
||||||
const response = await supertest(app)
|
const response = await supertest(app)
|
||||||
.post('/api/auth/forgot-password')
|
.post('/api/auth/forgot-password')
|
||||||
@@ -443,7 +394,7 @@ describe('Auth Routes (/api/auth)', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('should return 500 if the database call fails', async () => {
|
it('should return 500 if the database call fails', async () => {
|
||||||
vi.mocked(db.userRepo.findUserByEmail).mockRejectedValue(new Error('DB connection failed'));
|
mockedAuthService.resetPassword.mockRejectedValue(new Error('DB connection failed'));
|
||||||
const response = await supertest(app)
|
const response = await supertest(app)
|
||||||
.post('/api/auth/forgot-password')
|
.post('/api/auth/forgot-password')
|
||||||
.send({ email: 'any@test.com' });
|
.send({ email: 'any@test.com' });
|
||||||
@@ -451,25 +402,6 @@ describe('Auth Routes (/api/auth)', () => {
|
|||||||
expect(response.status).toBe(500);
|
expect(response.status).toBe(500);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should still return 200 OK if the email service fails', async () => {
|
|
||||||
// Arrange
|
|
||||||
vi.mocked(db.userRepo.findUserByEmail).mockResolvedValue(
|
|
||||||
createMockUserWithPasswordHash({ user_id: 'user-123', email: 'test@test.com' }),
|
|
||||||
);
|
|
||||||
vi.mocked(db.userRepo.createPasswordResetToken).mockResolvedValue(undefined);
|
|
||||||
// Mock the email service to fail
|
|
||||||
const { sendPasswordResetEmail } = await import('../services/emailService.server');
|
|
||||||
vi.mocked(sendPasswordResetEmail).mockRejectedValue(new Error('SMTP server down'));
|
|
||||||
|
|
||||||
// Act
|
|
||||||
const response = await supertest(app)
|
|
||||||
.post('/api/auth/forgot-password')
|
|
||||||
.send({ email: 'test@test.com' });
|
|
||||||
|
|
||||||
// Assert: The route should not fail even if the email does.
|
|
||||||
expect(response.status).toBe(200);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should return 400 for an invalid email format', async () => {
|
it('should return 400 for an invalid email format', async () => {
|
||||||
const response = await supertest(app)
|
const response = await supertest(app)
|
||||||
.post('/api/auth/forgot-password')
|
.post('/api/auth/forgot-password')
|
||||||
@@ -482,16 +414,7 @@ describe('Auth Routes (/api/auth)', () => {
|
|||||||
|
|
||||||
describe('POST /reset-password', () => {
|
describe('POST /reset-password', () => {
|
||||||
it('should reset the password with a valid token and strong password', async () => {
|
it('should reset the password with a valid token and strong password', async () => {
|
||||||
const tokenRecord = {
|
mockedAuthService.updatePassword.mockResolvedValue(true);
|
||||||
user_id: 'user-123',
|
|
||||||
token_hash: 'hashed-token',
|
|
||||||
expires_at: new Date(Date.now() + 3600000),
|
|
||||||
};
|
|
||||||
vi.mocked(db.userRepo.getValidResetTokens).mockResolvedValue([tokenRecord]); // This was a duplicate, fixed.
|
|
||||||
vi.mocked(bcrypt.compare).mockResolvedValue(true as never); // Token matches
|
|
||||||
vi.mocked(db.userRepo.updateUserPassword).mockResolvedValue(undefined);
|
|
||||||
vi.mocked(db.userRepo.deleteResetToken).mockResolvedValue(undefined);
|
|
||||||
vi.mocked(db.adminRepo.logActivity).mockResolvedValue(undefined);
|
|
||||||
|
|
||||||
const response = await supertest(app)
|
const response = await supertest(app)
|
||||||
.post('/api/auth/reset-password')
|
.post('/api/auth/reset-password')
|
||||||
@@ -502,7 +425,7 @@ describe('Auth Routes (/api/auth)', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('should reject with an invalid or expired token', async () => {
|
it('should reject with an invalid or expired token', async () => {
|
||||||
vi.mocked(db.userRepo.getValidResetTokens).mockResolvedValue([]); // No valid tokens found
|
mockedAuthService.updatePassword.mockResolvedValue(null);
|
||||||
|
|
||||||
const response = await supertest(app)
|
const response = await supertest(app)
|
||||||
.post('/api/auth/reset-password')
|
.post('/api/auth/reset-password')
|
||||||
@@ -512,31 +435,8 @@ describe('Auth Routes (/api/auth)', () => {
|
|||||||
expect(response.body.message).toBe('Invalid or expired password reset token.');
|
expect(response.body.message).toBe('Invalid or expired password reset token.');
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should reject if token does not match any valid tokens in DB', async () => {
|
|
||||||
const tokenRecord = {
|
|
||||||
user_id: 'user-123',
|
|
||||||
token_hash: 'hashed-token',
|
|
||||||
expires_at: new Date(Date.now() + 3600000),
|
|
||||||
};
|
|
||||||
vi.mocked(db.userRepo.getValidResetTokens).mockResolvedValue([tokenRecord]);
|
|
||||||
vi.mocked(bcrypt.compare).mockResolvedValue(false as never); // Token does not match
|
|
||||||
|
|
||||||
const response = await supertest(app)
|
|
||||||
.post('/api/auth/reset-password')
|
|
||||||
.send({ token: 'wrong-token', newPassword: 'a-Very-Strong-Password-123!' });
|
|
||||||
|
|
||||||
expect(response.status).toBe(400);
|
|
||||||
expect(response.body.message).toBe('Invalid or expired password reset token.');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should return 400 for a weak new password', async () => {
|
it('should return 400 for a weak new password', async () => {
|
||||||
const tokenRecord = {
|
// No need to mock the service here as validation runs first
|
||||||
user_id: 'user-123',
|
|
||||||
token_hash: 'hashed-token',
|
|
||||||
expires_at: new Date(Date.now() + 3600000),
|
|
||||||
};
|
|
||||||
vi.mocked(db.userRepo.getValidResetTokens).mockResolvedValue([tokenRecord]);
|
|
||||||
vi.mocked(bcrypt.compare).mockResolvedValue(true as never);
|
|
||||||
|
|
||||||
const response = await supertest(app)
|
const response = await supertest(app)
|
||||||
.post('/api/auth/reset-password')
|
.post('/api/auth/reset-password')
|
||||||
@@ -556,11 +456,7 @@ describe('Auth Routes (/api/auth)', () => {
|
|||||||
|
|
||||||
describe('POST /refresh-token', () => {
|
describe('POST /refresh-token', () => {
|
||||||
it('should issue a new access token with a valid refresh token cookie', async () => {
|
it('should issue a new access token with a valid refresh token cookie', async () => {
|
||||||
const mockUser = createMockUserWithPasswordHash({
|
mockedAuthService.refreshAccessToken.mockResolvedValue({ accessToken: 'new-access-token' });
|
||||||
user_id: 'user-123',
|
|
||||||
email: 'test@test.com',
|
|
||||||
});
|
|
||||||
vi.mocked(db.userRepo.findUserByRefreshToken).mockResolvedValue(mockUser);
|
|
||||||
|
|
||||||
const response = await supertest(app)
|
const response = await supertest(app)
|
||||||
.post('/api/auth/refresh-token')
|
.post('/api/auth/refresh-token')
|
||||||
@@ -577,8 +473,7 @@ describe('Auth Routes (/api/auth)', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('should return 403 if refresh token is invalid', async () => {
|
it('should return 403 if refresh token is invalid', async () => {
|
||||||
// Mock finding no user for this token, which should trigger the 403 logic
|
mockedAuthService.refreshAccessToken.mockResolvedValue(null);
|
||||||
vi.mocked(db.userRepo.findUserByRefreshToken).mockResolvedValue(undefined as any);
|
|
||||||
|
|
||||||
const response = await supertest(app)
|
const response = await supertest(app)
|
||||||
.post('/api/auth/refresh-token')
|
.post('/api/auth/refresh-token')
|
||||||
@@ -589,7 +484,7 @@ describe('Auth Routes (/api/auth)', () => {
|
|||||||
|
|
||||||
it('should return 500 if the database call fails', async () => {
|
it('should return 500 if the database call fails', async () => {
|
||||||
// Arrange
|
// Arrange
|
||||||
vi.mocked(db.userRepo.findUserByRefreshToken).mockRejectedValue(new Error('DB Error'));
|
mockedAuthService.refreshAccessToken.mockRejectedValue(new Error('DB Error'));
|
||||||
|
|
||||||
// Act
|
// Act
|
||||||
const response = await supertest(app)
|
const response = await supertest(app)
|
||||||
@@ -603,7 +498,7 @@ describe('Auth Routes (/api/auth)', () => {
|
|||||||
describe('POST /logout', () => {
|
describe('POST /logout', () => {
|
||||||
it('should clear the refresh token cookie and return a success message', async () => {
|
it('should clear the refresh token cookie and return a success message', async () => {
|
||||||
// Arrange
|
// Arrange
|
||||||
vi.mocked(db.userRepo.deleteRefreshToken).mockResolvedValue(undefined);
|
mockedAuthService.logout.mockResolvedValue(undefined);
|
||||||
|
|
||||||
// Act
|
// Act
|
||||||
const response = await supertest(app)
|
const response = await supertest(app)
|
||||||
@@ -618,13 +513,15 @@ describe('Auth Routes (/api/auth)', () => {
|
|||||||
const setCookieHeader = response.headers['set-cookie'];
|
const setCookieHeader = response.headers['set-cookie'];
|
||||||
expect(setCookieHeader).toBeDefined();
|
expect(setCookieHeader).toBeDefined();
|
||||||
expect(setCookieHeader[0]).toContain('refreshToken=;');
|
expect(setCookieHeader[0]).toContain('refreshToken=;');
|
||||||
expect(setCookieHeader[0]).toContain('Expires=Thu, 01 Jan 1970');
|
// Check for Max-Age=0, which is the modern way to expire a cookie.
|
||||||
|
// The 'Expires' attribute is a fallback and its exact value can be inconsistent.
|
||||||
|
expect(setCookieHeader[0]).toContain('Max-Age=0');
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should still return 200 OK even if deleting the refresh token from DB fails', async () => {
|
it('should still return 200 OK even if deleting the refresh token from DB fails', async () => {
|
||||||
// Arrange
|
// Arrange
|
||||||
const dbError = new Error('DB connection lost');
|
const dbError = new Error('DB connection lost');
|
||||||
vi.mocked(db.userRepo.deleteRefreshToken).mockRejectedValue(dbError);
|
mockedAuthService.logout.mockRejectedValue(dbError);
|
||||||
const { logger } = await import('../services/logger.server');
|
const { logger } = await import('../services/logger.server');
|
||||||
|
|
||||||
// Act
|
// Act
|
||||||
@@ -636,7 +533,7 @@ describe('Auth Routes (/api/auth)', () => {
|
|||||||
expect(response.status).toBe(200);
|
expect(response.status).toBe(200);
|
||||||
expect(logger.error).toHaveBeenCalledWith(
|
expect(logger.error).toHaveBeenCalledWith(
|
||||||
expect.objectContaining({ error: dbError }),
|
expect.objectContaining({ error: dbError }),
|
||||||
'Failed to delete refresh token from DB during logout.',
|
'Logout token invalidation failed in background.',
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@@ -1,52 +1,18 @@
|
|||||||
// src/routes/auth.routes.ts
|
// src/routes/auth.routes.ts
|
||||||
import { Router, Request, Response, NextFunction } from 'express';
|
import { Router, Request, Response, NextFunction } from 'express';
|
||||||
import * as bcrypt from 'bcrypt';
|
|
||||||
import zxcvbn from 'zxcvbn';
|
|
||||||
import { z } from 'zod';
|
import { z } from 'zod';
|
||||||
import jwt from 'jsonwebtoken';
|
|
||||||
import crypto from 'crypto';
|
|
||||||
import rateLimit from 'express-rate-limit';
|
import rateLimit from 'express-rate-limit';
|
||||||
|
import passport from './passport.routes';
|
||||||
import passport from './passport.routes'; // Corrected import path
|
import { UniqueConstraintError } from '../services/db/errors.db'; // Import actual class for instanceof checks
|
||||||
import { userRepo, adminRepo } from '../services/db/index.db';
|
|
||||||
import { UniqueConstraintError } from '../services/db/errors.db';
|
|
||||||
import { getPool } from '../services/db/connection.db';
|
|
||||||
import { logger } from '../services/logger.server';
|
import { logger } from '../services/logger.server';
|
||||||
import { sendPasswordResetEmail } from '../services/emailService.server';
|
|
||||||
import { validateRequest } from '../middleware/validation.middleware';
|
import { validateRequest } from '../middleware/validation.middleware';
|
||||||
import type { UserProfile } from '../types';
|
import type { UserProfile } from '../types';
|
||||||
|
import { validatePasswordStrength } from '../utils/authUtils';
|
||||||
|
import { requiredString } from '../utils/zodUtils';
|
||||||
|
|
||||||
|
import { authService } from '../services/authService';
|
||||||
const router = Router();
|
const router = Router();
|
||||||
|
|
||||||
const JWT_SECRET = process.env.JWT_SECRET!;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Validates the strength of a password using zxcvbn.
|
|
||||||
* @param password The password to check.
|
|
||||||
* @returns An object with `isValid` and an optional `feedback` message.
|
|
||||||
*/
|
|
||||||
const validatePasswordStrength = (password: string): { isValid: boolean; feedback?: string } => {
|
|
||||||
const MIN_PASSWORD_SCORE = 3; // Require a 'Good' or 'Strong' password (score 3 or 4)
|
|
||||||
const strength = zxcvbn(password);
|
|
||||||
|
|
||||||
if (strength.score < MIN_PASSWORD_SCORE) {
|
|
||||||
const feedbackMessage =
|
|
||||||
strength.feedback.warning ||
|
|
||||||
(strength.feedback.suggestions && strength.feedback.suggestions[0]);
|
|
||||||
return {
|
|
||||||
isValid: false,
|
|
||||||
feedback:
|
|
||||||
`Password is too weak. ${feedbackMessage || 'Please choose a stronger password.'}`.trim(),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
return { isValid: true };
|
|
||||||
};
|
|
||||||
|
|
||||||
// Helper for consistent required string validation (handles missing/null/empty)
|
|
||||||
const requiredString = (message: string) =>
|
|
||||||
z.preprocess((val) => val ?? '', z.string().min(1, message));
|
|
||||||
|
|
||||||
// Conditionally disable rate limiting for the test environment
|
// Conditionally disable rate limiting for the test environment
|
||||||
const isTestEnv = process.env.NODE_ENV === 'test';
|
const isTestEnv = process.env.NODE_ENV === 'test';
|
||||||
|
|
||||||
@@ -57,7 +23,9 @@ const forgotPasswordLimiter = rateLimit({
|
|||||||
message: 'Too many password reset requests from this IP, please try again after 15 minutes.',
|
message: 'Too many password reset requests from this IP, please try again after 15 minutes.',
|
||||||
standardHeaders: true,
|
standardHeaders: true,
|
||||||
legacyHeaders: false,
|
legacyHeaders: false,
|
||||||
skip: () => isTestEnv, // Skip this middleware if in test environment
|
// Do not skip in test environment so we can write integration tests for it.
|
||||||
|
// The limiter uses an in-memory store by default, so counts are reset when the test server restarts.
|
||||||
|
// skip: () => isTestEnv,
|
||||||
});
|
});
|
||||||
|
|
||||||
const resetPasswordLimiter = rateLimit({
|
const resetPasswordLimiter = rateLimit({
|
||||||
@@ -69,25 +37,29 @@ const resetPasswordLimiter = rateLimit({
|
|||||||
skip: () => isTestEnv, // Skip this middleware if in test environment
|
skip: () => isTestEnv, // Skip this middleware if in test environment
|
||||||
});
|
});
|
||||||
|
|
||||||
// --- Zod Schemas for Auth Routes (as per ADR-003) ---
|
|
||||||
|
|
||||||
const registerSchema = z.object({
|
const registerSchema = z.object({
|
||||||
body: z.object({
|
body: z.object({
|
||||||
email: z.string().email('A valid email is required.'),
|
// Sanitize email by trimming and converting to lowercase.
|
||||||
|
email: z.string().trim().toLowerCase().email('A valid email is required.'),
|
||||||
password: z
|
password: z
|
||||||
.string()
|
.string()
|
||||||
|
.trim() // Prevent leading/trailing whitespace in passwords.
|
||||||
.min(8, 'Password must be at least 8 characters long.')
|
.min(8, 'Password must be at least 8 characters long.')
|
||||||
.superRefine((password, ctx) => {
|
.superRefine((password, ctx) => {
|
||||||
const strength = validatePasswordStrength(password);
|
const strength = validatePasswordStrength(password);
|
||||||
if (!strength.isValid) ctx.addIssue({ code: 'custom', message: strength.feedback });
|
if (!strength.isValid) ctx.addIssue({ code: 'custom', message: strength.feedback });
|
||||||
}),
|
}),
|
||||||
full_name: z.string().optional(),
|
// Sanitize optional string inputs.
|
||||||
avatar_url: z.string().url().optional(),
|
full_name: z.string().trim().optional(),
|
||||||
|
avatar_url: z.string().trim().url().optional(),
|
||||||
}),
|
}),
|
||||||
});
|
});
|
||||||
|
|
||||||
const forgotPasswordSchema = z.object({
|
const forgotPasswordSchema = z.object({
|
||||||
body: z.object({ email: z.string().email('A valid email is required.') }),
|
body: z.object({
|
||||||
|
// Sanitize email by trimming and converting to lowercase.
|
||||||
|
email: z.string().trim().toLowerCase().email('A valid email is required.'),
|
||||||
|
}),
|
||||||
});
|
});
|
||||||
|
|
||||||
const resetPasswordSchema = z.object({
|
const resetPasswordSchema = z.object({
|
||||||
@@ -95,6 +67,7 @@ const resetPasswordSchema = z.object({
|
|||||||
token: requiredString('Token is required.'),
|
token: requiredString('Token is required.'),
|
||||||
newPassword: z
|
newPassword: z
|
||||||
.string()
|
.string()
|
||||||
|
.trim() // Prevent leading/trailing whitespace in passwords.
|
||||||
.min(8, 'Password must be at least 8 characters long.')
|
.min(8, 'Password must be at least 8 characters long.')
|
||||||
.superRefine((password, ctx) => {
|
.superRefine((password, ctx) => {
|
||||||
const strength = validatePasswordStrength(password);
|
const strength = validatePasswordStrength(password);
|
||||||
@@ -116,39 +89,14 @@ router.post(
|
|||||||
} = req as unknown as RegisterRequest;
|
} = req as unknown as RegisterRequest;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const saltRounds = 10;
|
const { newUserProfile, accessToken, refreshToken } = await authService.registerAndLoginUser(
|
||||||
const hashedPassword = await bcrypt.hash(password, saltRounds);
|
|
||||||
logger.info(`Hashing password for new user: ${email}`);
|
|
||||||
|
|
||||||
// The createUser method in UserRepository now handles its own transaction.
|
|
||||||
const newUser = await userRepo.createUser(
|
|
||||||
email,
|
email,
|
||||||
hashedPassword,
|
password,
|
||||||
{ full_name, avatar_url },
|
full_name,
|
||||||
|
avatar_url,
|
||||||
req.log,
|
req.log,
|
||||||
);
|
);
|
||||||
|
|
||||||
const userEmail = newUser.user.email;
|
|
||||||
const userId = newUser.user.user_id;
|
|
||||||
logger.info(`Successfully created new user in DB: ${userEmail} (ID: ${userId})`);
|
|
||||||
|
|
||||||
// Use the new standardized logging function
|
|
||||||
await adminRepo.logActivity(
|
|
||||||
{
|
|
||||||
userId: newUser.user.user_id,
|
|
||||||
action: 'user_registered',
|
|
||||||
displayText: `${userEmail} has registered.`,
|
|
||||||
icon: 'user-plus',
|
|
||||||
},
|
|
||||||
req.log,
|
|
||||||
);
|
|
||||||
|
|
||||||
const payload = { user_id: newUser.user.user_id, email: userEmail };
|
|
||||||
const token = jwt.sign(payload, JWT_SECRET, { expiresIn: '1h' });
|
|
||||||
|
|
||||||
const refreshToken = crypto.randomBytes(64).toString('hex');
|
|
||||||
await userRepo.saveRefreshToken(newUser.user.user_id, refreshToken, req.log);
|
|
||||||
|
|
||||||
res.cookie('refreshToken', refreshToken, {
|
res.cookie('refreshToken', refreshToken, {
|
||||||
httpOnly: true,
|
httpOnly: true,
|
||||||
secure: process.env.NODE_ENV === 'production',
|
secure: process.env.NODE_ENV === 'production',
|
||||||
@@ -156,14 +104,14 @@ router.post(
|
|||||||
});
|
});
|
||||||
return res
|
return res
|
||||||
.status(201)
|
.status(201)
|
||||||
.json({ message: 'User registered successfully!', userprofile: newUser, token });
|
.json({ message: 'User registered successfully!', userprofile: newUserProfile, token: accessToken });
|
||||||
} catch (error: unknown) {
|
} catch (error: unknown) {
|
||||||
if (error instanceof UniqueConstraintError) {
|
if (error instanceof UniqueConstraintError) {
|
||||||
// If the email is a duplicate, return a 409 Conflict status.
|
// If the email is a duplicate, return a 409 Conflict status.
|
||||||
return res.status(409).json({ message: error.message });
|
return res.status(409).json({ message: error.message });
|
||||||
}
|
}
|
||||||
// The createUser method now handles its own transaction logging, so we just log the route failure.
|
|
||||||
logger.error({ error }, `User registration route failed for email: ${email}.`);
|
logger.error({ error }, `User registration route failed for email: ${email}.`);
|
||||||
|
// Pass the error to the centralized handler
|
||||||
return next(error);
|
return next(error);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -182,17 +130,6 @@ router.post('/login', (req: Request, res: Response, next: NextFunction) => {
|
|||||||
if (user) req.log.debug({ user }, '[API /login] Passport user object:'); // Log the user object passport returns
|
if (user) req.log.debug({ user }, '[API /login] Passport user object:'); // Log the user object passport returns
|
||||||
if (user) req.log.info({ user }, '[API /login] Passport reported USER FOUND.');
|
if (user) req.log.info({ user }, '[API /login] Passport reported USER FOUND.');
|
||||||
|
|
||||||
try {
|
|
||||||
const allUsersInDb = await getPool().query(
|
|
||||||
'SELECT u.user_id, u.email, p.role FROM public.users u JOIN public.profiles p ON u.user_id = p.user_id',
|
|
||||||
);
|
|
||||||
req.log.debug('[API /login] Current users in DB from SERVER perspective:');
|
|
||||||
console.table(allUsersInDb.rows);
|
|
||||||
} catch (dbError) {
|
|
||||||
req.log.error({ dbError }, '[API /login] Could not query users table for debugging.');
|
|
||||||
}
|
|
||||||
// --- END DEBUG LOGGING ---
|
|
||||||
const { rememberMe } = req.body;
|
|
||||||
if (err) {
|
if (err) {
|
||||||
req.log.error(
|
req.log.error(
|
||||||
{ error: err },
|
{ error: err },
|
||||||
@@ -204,33 +141,24 @@ router.post('/login', (req: Request, res: Response, next: NextFunction) => {
|
|||||||
return res.status(401).json({ message: info.message || 'Login failed' });
|
return res.status(401).json({ message: info.message || 'Login failed' });
|
||||||
}
|
}
|
||||||
|
|
||||||
const userProfile = user as UserProfile;
|
|
||||||
const payload = {
|
|
||||||
user_id: userProfile.user.user_id,
|
|
||||||
email: userProfile.user.email,
|
|
||||||
role: userProfile.role,
|
|
||||||
};
|
|
||||||
const accessToken = jwt.sign(payload, JWT_SECRET, { expiresIn: '15m' });
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const refreshToken = crypto.randomBytes(64).toString('hex'); // This was a duplicate, fixed.
|
const { rememberMe } = req.body;
|
||||||
await userRepo.saveRefreshToken(userProfile.user.user_id, refreshToken, req.log);
|
const userProfile = user as UserProfile;
|
||||||
|
const { accessToken, refreshToken } = await authService.handleSuccessfulLogin(userProfile, req.log);
|
||||||
req.log.info(`JWT and refresh token issued for user: ${userProfile.user.email}`);
|
req.log.info(`JWT and refresh token issued for user: ${userProfile.user.email}`);
|
||||||
|
|
||||||
const cookieOptions = {
|
const cookieOptions = {
|
||||||
httpOnly: true,
|
httpOnly: true,
|
||||||
secure: process.env.NODE_ENV === 'production',
|
secure: process.env.NODE_ENV === 'production',
|
||||||
maxAge: rememberMe ? 30 * 24 * 60 * 60 * 1000 : undefined,
|
maxAge: rememberMe ? 30 * 24 * 60 * 60 * 1000 : undefined, // 30 days
|
||||||
};
|
};
|
||||||
|
|
||||||
res.cookie('refreshToken', refreshToken, cookieOptions);
|
res.cookie('refreshToken', refreshToken, cookieOptions);
|
||||||
// Return the full user profile object on login to avoid a second fetch on the client.
|
// Return the full user profile object on login to avoid a second fetch on the client.
|
||||||
return res.json({ userprofile: userProfile, token: accessToken });
|
return res.json({ userprofile: userProfile, token: accessToken });
|
||||||
} catch (tokenErr) {
|
} catch (tokenErr) {
|
||||||
req.log.error(
|
const email = (user as UserProfile)?.user?.email || req.body.email;
|
||||||
{ error: tokenErr },
|
req.log.error({ error: tokenErr }, `Failed to process login for user: ${email}`);
|
||||||
`Failed to save refresh token during login for user: ${userProfile.user.email}`,
|
|
||||||
);
|
|
||||||
return next(tokenErr);
|
return next(tokenErr);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -249,38 +177,14 @@ router.post(
|
|||||||
} = req as unknown as ForgotPasswordRequest;
|
} = req as unknown as ForgotPasswordRequest;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
req.log.debug(`[API /forgot-password] Received request for email: ${email}`);
|
// The service handles finding the user, creating the token, and sending the email.
|
||||||
const user = await userRepo.findUserByEmail(email, req.log);
|
const token = await authService.resetPassword(email, req.log);
|
||||||
let token: string | undefined;
|
|
||||||
req.log.debug(
|
|
||||||
{ user: user ? { user_id: user.user_id, email: user.email } : 'NOT FOUND' },
|
|
||||||
`[API /forgot-password] Database search result for ${email}:`,
|
|
||||||
);
|
|
||||||
|
|
||||||
if (user) {
|
|
||||||
token = crypto.randomBytes(32).toString('hex');
|
|
||||||
const saltRounds = 10;
|
|
||||||
const tokenHash = await bcrypt.hash(token, saltRounds);
|
|
||||||
const expiresAt = new Date(Date.now() + 3600000); // 1 hour
|
|
||||||
|
|
||||||
await userRepo.createPasswordResetToken(user.user_id, tokenHash, expiresAt, req.log);
|
|
||||||
|
|
||||||
const resetLink = `${process.env.FRONTEND_URL}/reset-password/${token}`;
|
|
||||||
|
|
||||||
try {
|
|
||||||
await sendPasswordResetEmail(email, resetLink, req.log);
|
|
||||||
} catch (emailError) {
|
|
||||||
req.log.error({ emailError }, `Email send failure during password reset for user`);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
req.log.warn(`Password reset requested for non-existent email: ${email}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
// For testability, return the token in the response only in the test environment.
|
// For testability, return the token in the response only in the test environment.
|
||||||
const responsePayload: { message: string; token?: string } = {
|
const responsePayload: { message: string; token?: string } = {
|
||||||
message: 'If an account with that email exists, a password reset link has been sent.',
|
message: 'If an account with that email exists, a password reset link has been sent.',
|
||||||
};
|
};
|
||||||
if (process.env.NODE_ENV === 'test' && user) responsePayload.token = token;
|
if (process.env.NODE_ENV === 'test' && token) responsePayload.token = token;
|
||||||
res.status(200).json(responsePayload);
|
res.status(200).json(responsePayload);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
req.log.error({ error }, `An error occurred during /forgot-password for email: ${email}`);
|
req.log.error({ error }, `An error occurred during /forgot-password for email: ${email}`);
|
||||||
@@ -301,38 +205,12 @@ router.post(
|
|||||||
} = req as unknown as ResetPasswordRequest;
|
} = req as unknown as ResetPasswordRequest;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const validTokens = await userRepo.getValidResetTokens(req.log);
|
const resetSuccessful = await authService.updatePassword(token, newPassword, req.log);
|
||||||
let tokenRecord;
|
|
||||||
for (const record of validTokens) {
|
|
||||||
const isMatch = await bcrypt.compare(token, record.token_hash);
|
|
||||||
if (isMatch) {
|
|
||||||
tokenRecord = record;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!tokenRecord) {
|
if (!resetSuccessful) {
|
||||||
return res.status(400).json({ message: 'Invalid or expired password reset token.' });
|
return res.status(400).json({ message: 'Invalid or expired password reset token.' });
|
||||||
}
|
}
|
||||||
|
|
||||||
const saltRounds = 10;
|
|
||||||
const hashedPassword = await bcrypt.hash(newPassword, saltRounds);
|
|
||||||
|
|
||||||
await userRepo.updateUserPassword(tokenRecord.user_id, hashedPassword, req.log);
|
|
||||||
await userRepo.deleteResetToken(tokenRecord.token_hash, req.log);
|
|
||||||
|
|
||||||
// Log this security event after a successful password reset.
|
|
||||||
await adminRepo.logActivity(
|
|
||||||
{
|
|
||||||
userId: tokenRecord.user_id,
|
|
||||||
action: 'password_reset',
|
|
||||||
displayText: `User ID ${tokenRecord.user_id} has reset their password.`,
|
|
||||||
icon: 'key',
|
|
||||||
details: { source_ip: req.ip ?? null },
|
|
||||||
},
|
|
||||||
req.log,
|
|
||||||
);
|
|
||||||
|
|
||||||
res.status(200).json({ message: 'Password has been reset successfully.' });
|
res.status(200).json({ message: 'Password has been reset successfully.' });
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
req.log.error({ error }, `An error occurred during password reset.`);
|
req.log.error({ error }, `An error occurred during password reset.`);
|
||||||
@@ -349,15 +227,11 @@ router.post('/refresh-token', async (req: Request, res: Response, next: NextFunc
|
|||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const user = await userRepo.findUserByRefreshToken(refreshToken, req.log);
|
const result = await authService.refreshAccessToken(refreshToken, req.log);
|
||||||
if (!user) {
|
if (!result) {
|
||||||
return res.status(403).json({ message: 'Invalid or expired refresh token.' });
|
return res.status(403).json({ message: 'Invalid or expired refresh token.' });
|
||||||
}
|
}
|
||||||
|
res.json({ token: result.accessToken });
|
||||||
const payload = { user_id: user.user_id, email: user.email };
|
|
||||||
const newAccessToken = jwt.sign(payload, JWT_SECRET, { expiresIn: '15m' });
|
|
||||||
|
|
||||||
res.json({ token: newAccessToken });
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
req.log.error({ error }, 'An error occurred during /refresh-token.');
|
req.log.error({ error }, 'An error occurred during /refresh-token.');
|
||||||
next(error);
|
next(error);
|
||||||
@@ -374,14 +248,14 @@ router.post('/logout', async (req: Request, res: Response) => {
|
|||||||
if (refreshToken) {
|
if (refreshToken) {
|
||||||
// Invalidate the token in the database so it cannot be used again.
|
// Invalidate the token in the database so it cannot be used again.
|
||||||
// We don't need to wait for this to finish to respond to the user.
|
// We don't need to wait for this to finish to respond to the user.
|
||||||
userRepo.deleteRefreshToken(refreshToken, req.log).catch((err: Error) => {
|
authService.logout(refreshToken, req.log).catch((err: Error) => {
|
||||||
req.log.error({ error: err }, 'Failed to delete refresh token from DB during logout.');
|
req.log.error({ error: err }, 'Logout token invalidation failed in background.');
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
// Instruct the browser to clear the cookie by setting its expiration to the past.
|
// Instruct the browser to clear the cookie by setting its expiration to the past.
|
||||||
res.cookie('refreshToken', '', {
|
res.cookie('refreshToken', '', {
|
||||||
httpOnly: true,
|
httpOnly: true,
|
||||||
expires: new Date(0),
|
maxAge: 0, // Use maxAge for modern compatibility; Express sets 'Expires' as a fallback.
|
||||||
secure: process.env.NODE_ENV === 'production',
|
secure: process.env.NODE_ENV === 'production',
|
||||||
});
|
});
|
||||||
res.status(200).json({ message: 'Logged out successfully.' });
|
res.status(200).json({ message: 'Logged out successfully.' });
|
||||||
|
|||||||
@@ -7,7 +7,6 @@ import {
|
|||||||
createMockBudget,
|
createMockBudget,
|
||||||
createMockSpendingByCategory,
|
createMockSpendingByCategory,
|
||||||
} from '../tests/utils/mockFactories';
|
} from '../tests/utils/mockFactories';
|
||||||
import { mockLogger } from '../tests/utils/mockLogger';
|
|
||||||
import { createTestApp } from '../tests/utils/createTestApp';
|
import { createTestApp } from '../tests/utils/createTestApp';
|
||||||
import { ForeignKeyConstraintError, NotFoundError } from '../services/db/errors.db';
|
import { ForeignKeyConstraintError, NotFoundError } from '../services/db/errors.db';
|
||||||
// 1. Mock the Service Layer directly.
|
// 1. Mock the Service Layer directly.
|
||||||
@@ -26,8 +25,9 @@ vi.mock('../services/db/index.db', () => ({
|
|||||||
}));
|
}));
|
||||||
|
|
||||||
// Mock the logger to keep test output clean
|
// Mock the logger to keep test output clean
|
||||||
vi.mock('../services/logger.server', () => ({
|
vi.mock('../services/logger.server', async () => ({
|
||||||
logger: mockLogger,
|
// Use async import to avoid hoisting issues with mockLogger
|
||||||
|
logger: (await import('../tests/utils/mockLogger')).mockLogger,
|
||||||
}));
|
}));
|
||||||
|
|
||||||
// Import the router and mocked DB AFTER all mocks are defined.
|
// Import the router and mocked DB AFTER all mocks are defined.
|
||||||
@@ -69,17 +69,7 @@ describe('Budget Routes (/api/budgets)', () => {
|
|||||||
vi.mocked(db.budgetRepo.getSpendingByCategory).mockResolvedValue([]);
|
vi.mocked(db.budgetRepo.getSpendingByCategory).mockResolvedValue([]);
|
||||||
});
|
});
|
||||||
|
|
||||||
const app = createTestApp({
|
const app = createTestApp({ router: budgetRouter, basePath: '/api/budgets', authenticatedUser: mockUserProfile });
|
||||||
router: budgetRouter,
|
|
||||||
basePath: '/api/budgets',
|
|
||||||
authenticatedUser: mockUser,
|
|
||||||
});
|
|
||||||
|
|
||||||
// Add a basic error handler to capture errors passed to next(err) and return JSON.
|
|
||||||
// This prevents unhandled error crashes in tests and ensures we get the 500 response we expect.
|
|
||||||
app.use((err: any, req: any, res: any, next: any) => {
|
|
||||||
res.status(err.status || 500).json({ message: err.message, errors: err.errors });
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('GET /', () => {
|
describe('GET /', () => {
|
||||||
it('should return a list of budgets for the user', async () => {
|
it('should return a list of budgets for the user', async () => {
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user