Compare commits

..

17 Commits

Author SHA1 Message Date
Gitea Actions
c4bbf5c251 ci: Bump version to 0.1.6 [skip ci] 2025-12-25 07:19:39 +05:00
32a9e6732b Merge branch 'main' of https://gitea.projectium.com/torbo/flyer-crawler.projectium.com
Some checks failed
Deploy to Test Environment / deploy-to-test (push) Failing after 3h0m53s
2025-12-24 18:18:35 -08:00
e7c076e2ed test repair 2025-12-24 18:18:28 -08:00
Gitea Actions
dbe8e72efe ci: Bump version to 0.1.5 [skip ci] 2025-12-25 06:13:16 +05:00
38bd193042 not sure why those errors got removed we'll see
Some checks failed
Deploy to Test Environment / deploy-to-test (push) Has been cancelled
2025-12-24 17:12:20 -08:00
Gitea Actions
57215e2778 ci: Bump version to 0.1.4 [skip ci] 2025-12-25 06:04:17 +05:00
2c1de24e9a undo stupid logging change
Some checks failed
Deploy to Test Environment / deploy-to-test (push) Failing after 1m21s
2025-12-24 16:54:56 -08:00
c8baff7aac Merge branch 'main' of https://gitea.projectium.com/torbo/flyer-crawler.projectium.com 2025-12-24 16:17:45 -08:00
de3f21a7ec not sure why those errors got removed we'll see 2025-12-24 16:16:42 -08:00
Gitea Actions
c6adbf79e7 ci: Bump version to 0.1.3 [skip ci] 2025-12-25 02:26:17 +05:00
7399a27600 add ai agent fallbacks
Some checks failed
Deploy to Test Environment / deploy-to-test (push) Failing after 3h14m13s
2025-12-24 13:25:18 -08:00
Gitea Actions
68aadcaa4e ci: Bump version to 0.1.2 [skip ci] 2025-12-25 01:41:06 +05:00
971d2c3fa7 add ai agent fallbacks
Some checks failed
Deploy to Test Environment / deploy-to-test (push) Failing after 1m18s
2025-12-24 12:39:15 -08:00
Gitea Actions
daaacfde5e ci: Bump version to 0.1.1 [skip ci] 2025-12-24 23:53:27 +05:00
7ac8fe1d29 Merge branch 'main' of https://gitea.projectium.com/torbo/flyer-crawler.projectium.com
Some checks are pending
Deploy to Test Environment / deploy-to-test (push) Has started running
2025-12-24 10:52:13 -08:00
a2462dfb6b testing push to prod etc 2025-12-24 10:51:43 -08:00
Gitea Actions
a911224fb4 ci: Bump version to 0.1.0 for production release [skip ci] 2025-12-24 23:24:53 +05:00
31 changed files with 803 additions and 309 deletions

View File

@@ -93,8 +93,9 @@ jobs:
exit 1 exit 1
fi fi
GITEA_SERVER_URL="https://gitea.projectium.com" GITEA_SERVER_URL="https://gitea.projectium.com"
COMMIT_MESSAGE=$(git log -1 --pretty=%s) COMMIT_MESSAGE=$(git log -1 --grep="\[skip ci\]" --invert-grep --pretty=%s)
VITE_APP_VERSION="$(date +'%Y%m%d-%H%M'):$(git rev-parse --short HEAD)" \ PACKAGE_VERSION=$(node -p "require('./package.json').version")
VITE_APP_VERSION="$(date +'%Y%m%d-%H%M'):$(git rev-parse --short HEAD):$PACKAGE_VERSION" \
VITE_APP_COMMIT_URL="$GITEA_SERVER_URL/${{ gitea.repository }}/commit/${{ gitea.sha }}" \ VITE_APP_COMMIT_URL="$GITEA_SERVER_URL/${{ gitea.repository }}/commit/${{ gitea.sha }}" \
VITE_APP_COMMIT_MESSAGE="$COMMIT_MESSAGE" \ VITE_APP_COMMIT_MESSAGE="$COMMIT_MESSAGE" \
VITE_API_BASE_URL=/api VITE_API_KEY=${{ secrets.VITE_GOOGLE_GENAI_API_KEY }} npm run build VITE_API_BASE_URL=/api VITE_API_KEY=${{ secrets.VITE_GOOGLE_GENAI_API_KEY }} npm run build
@@ -162,7 +163,12 @@ jobs:
echo "Updating schema hash in production database..." echo "Updating schema hash in production database..."
CURRENT_HASH=$(cat sql/master_schema_rollup.sql | dos2unix | sha256sum | awk '{ print $1 }') CURRENT_HASH=$(cat sql/master_schema_rollup.sql | dos2unix | sha256sum | awk '{ print $1 }')
PGPASSWORD="$DB_PASSWORD" psql -v ON_ERROR_STOP=1 -h "$DB_HOST" -p 5432 -U "$DB_USER" -d "$DB_NAME" -c \ PGPASSWORD="$DB_PASSWORD" psql -v ON_ERROR_STOP=1 -h "$DB_HOST" -p 5432 -U "$DB_USER" -d "$DB_NAME" -c \
"INSERT INTO public.schema_info (environment, schema_hash, deployed_at) VALUES ('production', '$CURRENT_HASH', NOW()) "CREATE TABLE IF NOT EXISTS public.schema_info (
environment VARCHAR(50) PRIMARY KEY,
schema_hash VARCHAR(64) NOT NULL,
deployed_at TIMESTAMP DEFAULT NOW()
);
INSERT INTO public.schema_info (environment, schema_hash, deployed_at) VALUES ('production', '$CURRENT_HASH', NOW())
ON CONFLICT (environment) DO UPDATE SET schema_hash = EXCLUDED.schema_hash, deployed_at = NOW();" ON CONFLICT (environment) DO UPDATE SET schema_hash = EXCLUDED.schema_hash, deployed_at = NOW();"
UPDATED_HASH=$(PGPASSWORD="$DB_PASSWORD" psql -v ON_ERROR_STOP=1 -h "$DB_HOST" -p 5432 -U "$DB_USER" -d "$DB_NAME" -c "SELECT schema_hash FROM public.schema_info WHERE environment = 'production';" -t -A) UPDATED_HASH=$(PGPASSWORD="$DB_PASSWORD" psql -v ON_ERROR_STOP=1 -h "$DB_HOST" -p 5432 -U "$DB_USER" -d "$DB_NAME" -c "SELECT schema_hash FROM public.schema_info WHERE environment = 'production';" -t -A)

View File

@@ -282,6 +282,9 @@ jobs:
if [ -z "$DEPLOYED_HASH" ]; then if [ -z "$DEPLOYED_HASH" ]; then
echo "WARNING: No schema hash found in the test database." echo "WARNING: No schema hash found in the test database."
echo "This is expected for a first-time deployment. The hash will be set after a successful deployment." echo "This is expected for a first-time deployment. The hash will be set after a successful deployment."
echo "--- Debug: Dumping schema_info table ---"
PGPASSWORD="$DB_PASSWORD" psql -v ON_ERROR_STOP=0 -h "$DB_HOST" -p 5432 -U "$DB_USER" -d "$DB_NAME" -c "SELECT * FROM public.schema_info;" || true
echo "----------------------------------------"
# We allow the deployment to continue, but a manual schema update is required. # We allow the deployment to continue, but a manual schema update is required.
# You could choose to fail here by adding `exit 1`. # You could choose to fail here by adding `exit 1`.
elif [ "$CURRENT_HASH" != "$DEPLOYED_HASH" ]; then elif [ "$CURRENT_HASH" != "$DEPLOYED_HASH" ]; then
@@ -305,8 +308,9 @@ jobs:
fi fi
GITEA_SERVER_URL="https://gitea.projectium.com" # Your Gitea instance URL GITEA_SERVER_URL="https://gitea.projectium.com" # Your Gitea instance URL
COMMIT_MESSAGE=$(git log -1 --pretty=%s) COMMIT_MESSAGE=$(git log -1 --grep="\[skip ci\]" --invert-grep --pretty=%s)
VITE_APP_VERSION="$(date +'%Y%m%d-%H%M'):$(git rev-parse --short HEAD)" \ PACKAGE_VERSION=$(node -p "require('./package.json').version")
VITE_APP_VERSION="$(date +'%Y%m%d-%H%M'):$(git rev-parse --short HEAD):$PACKAGE_VERSION" \
VITE_APP_COMMIT_URL="$GITEA_SERVER_URL/${{ gitea.repository }}/commit/${{ gitea.sha }}" \ VITE_APP_COMMIT_URL="$GITEA_SERVER_URL/${{ gitea.repository }}/commit/${{ gitea.sha }}" \
VITE_APP_COMMIT_MESSAGE="$COMMIT_MESSAGE" \ VITE_APP_COMMIT_MESSAGE="$COMMIT_MESSAGE" \
VITE_API_BASE_URL="https://flyer-crawler-test.projectium.com/api" VITE_API_KEY=${{ secrets.VITE_GOOGLE_GENAI_API_KEY_TEST }} npm run build VITE_API_BASE_URL="https://flyer-crawler-test.projectium.com/api" VITE_API_KEY=${{ secrets.VITE_GOOGLE_GENAI_API_KEY_TEST }} npm run build
@@ -380,7 +384,12 @@ jobs:
echo "Updating schema hash in test database..." echo "Updating schema hash in test database..."
CURRENT_HASH=$(cat sql/master_schema_rollup.sql | dos2unix | sha256sum | awk '{ print $1 }') CURRENT_HASH=$(cat sql/master_schema_rollup.sql | dos2unix | sha256sum | awk '{ print $1 }')
PGPASSWORD="$DB_PASSWORD" psql -v ON_ERROR_STOP=1 -h "$DB_HOST" -p 5432 -U "$DB_USER" -d "$DB_NAME" -c \ PGPASSWORD="$DB_PASSWORD" psql -v ON_ERROR_STOP=1 -h "$DB_HOST" -p 5432 -U "$DB_USER" -d "$DB_NAME" -c \
"INSERT INTO public.schema_info (environment, schema_hash, deployed_at) VALUES ('test', '$CURRENT_HASH', NOW()) "CREATE TABLE IF NOT EXISTS public.schema_info (
environment VARCHAR(50) PRIMARY KEY,
schema_hash VARCHAR(64) NOT NULL,
deployed_at TIMESTAMP DEFAULT NOW()
);
INSERT INTO public.schema_info (environment, schema_hash, deployed_at) VALUES ('test', '$CURRENT_HASH', NOW())
ON CONFLICT (environment) DO UPDATE SET schema_hash = EXCLUDED.schema_hash, deployed_at = NOW();" ON CONFLICT (environment) DO UPDATE SET schema_hash = EXCLUDED.schema_hash, deployed_at = NOW();"
# Verify the hash was updated # Verify the hash was updated

View File

@@ -92,8 +92,9 @@ jobs:
exit 1 exit 1
fi fi
GITEA_SERVER_URL="https://gitea.projectium.com" GITEA_SERVER_URL="https://gitea.projectium.com"
COMMIT_MESSAGE=$(git log -1 --pretty=%s) COMMIT_MESSAGE=$(git log -1 --grep="\[skip ci\]" --invert-grep --pretty=%s)
VITE_APP_VERSION="$(date +'%Y%m%d-%H%M'):$(git rev-parse --short HEAD)" \ PACKAGE_VERSION=$(node -p "require('./package.json').version")
VITE_APP_VERSION="$(date +'%Y%m%d-%H%M'):$(git rev-parse --short HEAD):$PACKAGE_VERSION" \
VITE_APP_COMMIT_URL="$GITEA_SERVER_URL/${{ gitea.repository }}/commit/${{ gitea.sha }}" \ VITE_APP_COMMIT_URL="$GITEA_SERVER_URL/${{ gitea.repository }}/commit/${{ gitea.sha }}" \
VITE_APP_COMMIT_MESSAGE="$COMMIT_MESSAGE" \ VITE_APP_COMMIT_MESSAGE="$COMMIT_MESSAGE" \
VITE_API_BASE_URL=/api VITE_API_KEY=${{ secrets.VITE_GOOGLE_GENAI_API_KEY }} npm run build VITE_API_BASE_URL=/api VITE_API_KEY=${{ secrets.VITE_GOOGLE_GENAI_API_KEY }} npm run build

View File

@@ -18,12 +18,46 @@ module.exports = {
NODE_ENV: 'production', // Set the Node.js environment to production NODE_ENV: 'production', // Set the Node.js environment to production
name: 'flyer-crawler-api', name: 'flyer-crawler-api',
cwd: '/var/www/flyer-crawler.projectium.com', cwd: '/var/www/flyer-crawler.projectium.com',
// Inherit secrets from the deployment environment
DB_HOST: process.env.DB_HOST,
DB_USER: process.env.DB_USER,
DB_PASSWORD: process.env.DB_PASSWORD,
DB_NAME: process.env.DB_NAME,
REDIS_URL: process.env.REDIS_URL,
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
FRONTEND_URL: process.env.FRONTEND_URL,
JWT_SECRET: process.env.JWT_SECRET,
GEMINI_API_KEY: process.env.GEMINI_API_KEY,
GOOGLE_MAPS_API_KEY: process.env.GOOGLE_MAPS_API_KEY,
SMTP_HOST: process.env.SMTP_HOST,
SMTP_PORT: process.env.SMTP_PORT,
SMTP_SECURE: process.env.SMTP_SECURE,
SMTP_USER: process.env.SMTP_USER,
SMTP_PASS: process.env.SMTP_PASS,
SMTP_FROM_EMAIL: process.env.SMTP_FROM_EMAIL,
}, },
// Test Environment Settings // Test Environment Settings
env_test: { env_test: {
NODE_ENV: 'test', // Set to 'test' to match the environment purpose and disable pino-pretty NODE_ENV: 'test', // Set to 'test' to match the environment purpose and disable pino-pretty
name: 'flyer-crawler-api-test', name: 'flyer-crawler-api-test',
cwd: '/var/www/flyer-crawler-test.projectium.com', cwd: '/var/www/flyer-crawler-test.projectium.com',
// Inherit secrets from the deployment environment
DB_HOST: process.env.DB_HOST,
DB_USER: process.env.DB_USER,
DB_PASSWORD: process.env.DB_PASSWORD,
DB_NAME: process.env.DB_NAME,
REDIS_URL: process.env.REDIS_URL,
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
FRONTEND_URL: process.env.FRONTEND_URL,
JWT_SECRET: process.env.JWT_SECRET,
GEMINI_API_KEY: process.env.GEMINI_API_KEY,
GOOGLE_MAPS_API_KEY: process.env.GOOGLE_MAPS_API_KEY,
SMTP_HOST: process.env.SMTP_HOST,
SMTP_PORT: process.env.SMTP_PORT,
SMTP_SECURE: process.env.SMTP_SECURE,
SMTP_USER: process.env.SMTP_USER,
SMTP_PASS: process.env.SMTP_PASS,
SMTP_FROM_EMAIL: process.env.SMTP_FROM_EMAIL,
}, },
// Development Environment Settings // Development Environment Settings
env_development: { env_development: {
@@ -31,6 +65,23 @@ module.exports = {
name: 'flyer-crawler-api-dev', name: 'flyer-crawler-api-dev',
watch: true, watch: true,
ignore_watch: ['node_modules', 'logs', '*.log', 'flyer-images', '.git'], ignore_watch: ['node_modules', 'logs', '*.log', 'flyer-images', '.git'],
// Inherit secrets from the deployment environment
DB_HOST: process.env.DB_HOST,
DB_USER: process.env.DB_USER,
DB_PASSWORD: process.env.DB_PASSWORD,
DB_NAME: process.env.DB_NAME,
REDIS_URL: process.env.REDIS_URL,
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
FRONTEND_URL: process.env.FRONTEND_URL,
JWT_SECRET: process.env.JWT_SECRET,
GEMINI_API_KEY: process.env.GEMINI_API_KEY,
GOOGLE_MAPS_API_KEY: process.env.GOOGLE_MAPS_API_KEY,
SMTP_HOST: process.env.SMTP_HOST,
SMTP_PORT: process.env.SMTP_PORT,
SMTP_SECURE: process.env.SMTP_SECURE,
SMTP_USER: process.env.SMTP_USER,
SMTP_PASS: process.env.SMTP_PASS,
SMTP_FROM_EMAIL: process.env.SMTP_FROM_EMAIL,
}, },
}, },
{ {
@@ -43,12 +94,46 @@ module.exports = {
NODE_ENV: 'production', NODE_ENV: 'production',
name: 'flyer-crawler-worker', name: 'flyer-crawler-worker',
cwd: '/var/www/flyer-crawler.projectium.com', cwd: '/var/www/flyer-crawler.projectium.com',
// Inherit secrets from the deployment environment
DB_HOST: process.env.DB_HOST,
DB_USER: process.env.DB_USER,
DB_PASSWORD: process.env.DB_PASSWORD,
DB_NAME: process.env.DB_NAME,
REDIS_URL: process.env.REDIS_URL,
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
FRONTEND_URL: process.env.FRONTEND_URL,
JWT_SECRET: process.env.JWT_SECRET,
GEMINI_API_KEY: process.env.GEMINI_API_KEY,
GOOGLE_MAPS_API_KEY: process.env.GOOGLE_MAPS_API_KEY,
SMTP_HOST: process.env.SMTP_HOST,
SMTP_PORT: process.env.SMTP_PORT,
SMTP_SECURE: process.env.SMTP_SECURE,
SMTP_USER: process.env.SMTP_USER,
SMTP_PASS: process.env.SMTP_PASS,
SMTP_FROM_EMAIL: process.env.SMTP_FROM_EMAIL,
}, },
// Test Environment Settings // Test Environment Settings
env_test: { env_test: {
NODE_ENV: 'test', NODE_ENV: 'test',
name: 'flyer-crawler-worker-test', name: 'flyer-crawler-worker-test',
cwd: '/var/www/flyer-crawler-test.projectium.com', cwd: '/var/www/flyer-crawler-test.projectium.com',
// Inherit secrets from the deployment environment
DB_HOST: process.env.DB_HOST,
DB_USER: process.env.DB_USER,
DB_PASSWORD: process.env.DB_PASSWORD,
DB_NAME: process.env.DB_NAME,
REDIS_URL: process.env.REDIS_URL,
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
FRONTEND_URL: process.env.FRONTEND_URL,
JWT_SECRET: process.env.JWT_SECRET,
GEMINI_API_KEY: process.env.GEMINI_API_KEY,
GOOGLE_MAPS_API_KEY: process.env.GOOGLE_MAPS_API_KEY,
SMTP_HOST: process.env.SMTP_HOST,
SMTP_PORT: process.env.SMTP_PORT,
SMTP_SECURE: process.env.SMTP_SECURE,
SMTP_USER: process.env.SMTP_USER,
SMTP_PASS: process.env.SMTP_PASS,
SMTP_FROM_EMAIL: process.env.SMTP_FROM_EMAIL,
}, },
// Development Environment Settings // Development Environment Settings
env_development: { env_development: {
@@ -56,6 +141,23 @@ module.exports = {
name: 'flyer-crawler-worker-dev', name: 'flyer-crawler-worker-dev',
watch: true, watch: true,
ignore_watch: ['node_modules', 'logs', '*.log', 'flyer-images', '.git'], ignore_watch: ['node_modules', 'logs', '*.log', 'flyer-images', '.git'],
// Inherit secrets from the deployment environment
DB_HOST: process.env.DB_HOST,
DB_USER: process.env.DB_USER,
DB_PASSWORD: process.env.DB_PASSWORD,
DB_NAME: process.env.DB_NAME,
REDIS_URL: process.env.REDIS_URL,
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
FRONTEND_URL: process.env.FRONTEND_URL,
JWT_SECRET: process.env.JWT_SECRET,
GEMINI_API_KEY: process.env.GEMINI_API_KEY,
GOOGLE_MAPS_API_KEY: process.env.GOOGLE_MAPS_API_KEY,
SMTP_HOST: process.env.SMTP_HOST,
SMTP_PORT: process.env.SMTP_PORT,
SMTP_SECURE: process.env.SMTP_SECURE,
SMTP_USER: process.env.SMTP_USER,
SMTP_PASS: process.env.SMTP_PASS,
SMTP_FROM_EMAIL: process.env.SMTP_FROM_EMAIL,
}, },
}, },
{ {
@@ -68,12 +170,46 @@ module.exports = {
NODE_ENV: 'production', NODE_ENV: 'production',
name: 'flyer-crawler-analytics-worker', name: 'flyer-crawler-analytics-worker',
cwd: '/var/www/flyer-crawler.projectium.com', cwd: '/var/www/flyer-crawler.projectium.com',
// Inherit secrets from the deployment environment
DB_HOST: process.env.DB_HOST,
DB_USER: process.env.DB_USER,
DB_PASSWORD: process.env.DB_PASSWORD,
DB_NAME: process.env.DB_NAME,
REDIS_URL: process.env.REDIS_URL,
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
FRONTEND_URL: process.env.FRONTEND_URL,
JWT_SECRET: process.env.JWT_SECRET,
GEMINI_API_KEY: process.env.GEMINI_API_KEY,
GOOGLE_MAPS_API_KEY: process.env.GOOGLE_MAPS_API_KEY,
SMTP_HOST: process.env.SMTP_HOST,
SMTP_PORT: process.env.SMTP_PORT,
SMTP_SECURE: process.env.SMTP_SECURE,
SMTP_USER: process.env.SMTP_USER,
SMTP_PASS: process.env.SMTP_PASS,
SMTP_FROM_EMAIL: process.env.SMTP_FROM_EMAIL,
}, },
// Test Environment Settings // Test Environment Settings
env_test: { env_test: {
NODE_ENV: 'test', NODE_ENV: 'test',
name: 'flyer-crawler-analytics-worker-test', name: 'flyer-crawler-analytics-worker-test',
cwd: '/var/www/flyer-crawler-test.projectium.com', cwd: '/var/www/flyer-crawler-test.projectium.com',
// Inherit secrets from the deployment environment
DB_HOST: process.env.DB_HOST,
DB_USER: process.env.DB_USER,
DB_PASSWORD: process.env.DB_PASSWORD,
DB_NAME: process.env.DB_NAME,
REDIS_URL: process.env.REDIS_URL,
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
FRONTEND_URL: process.env.FRONTEND_URL,
JWT_SECRET: process.env.JWT_SECRET,
GEMINI_API_KEY: process.env.GEMINI_API_KEY,
GOOGLE_MAPS_API_KEY: process.env.GOOGLE_MAPS_API_KEY,
SMTP_HOST: process.env.SMTP_HOST,
SMTP_PORT: process.env.SMTP_PORT,
SMTP_SECURE: process.env.SMTP_SECURE,
SMTP_USER: process.env.SMTP_USER,
SMTP_PASS: process.env.SMTP_PASS,
SMTP_FROM_EMAIL: process.env.SMTP_FROM_EMAIL,
}, },
// Development Environment Settings // Development Environment Settings
env_development: { env_development: {
@@ -81,6 +217,23 @@ module.exports = {
name: 'flyer-crawler-analytics-worker-dev', name: 'flyer-crawler-analytics-worker-dev',
watch: true, watch: true,
ignore_watch: ['node_modules', 'logs', '*.log', 'flyer-images', '.git'], ignore_watch: ['node_modules', 'logs', '*.log', 'flyer-images', '.git'],
// Inherit secrets from the deployment environment
DB_HOST: process.env.DB_HOST,
DB_USER: process.env.DB_USER,
DB_PASSWORD: process.env.DB_PASSWORD,
DB_NAME: process.env.DB_NAME,
REDIS_URL: process.env.REDIS_URL,
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
FRONTEND_URL: process.env.FRONTEND_URL,
JWT_SECRET: process.env.JWT_SECRET,
GEMINI_API_KEY: process.env.GEMINI_API_KEY,
GOOGLE_MAPS_API_KEY: process.env.GOOGLE_MAPS_API_KEY,
SMTP_HOST: process.env.SMTP_HOST,
SMTP_PORT: process.env.SMTP_PORT,
SMTP_SECURE: process.env.SMTP_SECURE,
SMTP_USER: process.env.SMTP_USER,
SMTP_PASS: process.env.SMTP_PASS,
SMTP_FROM_EMAIL: process.env.SMTP_FROM_EMAIL,
}, },
}, },
], ],

4
package-lock.json generated
View File

@@ -1,12 +1,12 @@
{ {
"name": "flyer-crawler", "name": "flyer-crawler",
"version": "0.0.30", "version": "0.1.6",
"lockfileVersion": 3, "lockfileVersion": 3,
"requires": true, "requires": true,
"packages": { "packages": {
"": { "": {
"name": "flyer-crawler", "name": "flyer-crawler",
"version": "0.0.30", "version": "0.1.6",
"dependencies": { "dependencies": {
"@bull-board/api": "^6.14.2", "@bull-board/api": "^6.14.2",
"@bull-board/express": "^6.14.2", "@bull-board/express": "^6.14.2",

View File

@@ -1,7 +1,7 @@
{ {
"name": "flyer-crawler", "name": "flyer-crawler",
"private": true, "private": true,
"version": "0.0.30", "version": "0.1.6",
"type": "module", "type": "module",
"scripts": { "scripts": {
"dev": "concurrently \"npm:start:dev\" \"vite\"", "dev": "concurrently \"npm:start:dev\" \"vite\"",

View File

@@ -36,7 +36,7 @@ vi.mock('pdfjs-dist', () => ({
// Mock the new config module // Mock the new config module
vi.mock('./config', () => ({ vi.mock('./config', () => ({
default: { default: {
app: { version: '1.0.0', commitMessage: 'Initial commit', commitUrl: '#' }, app: { version: '20250101-1200:abc1234:1.0.0', commitMessage: 'Initial commit', commitUrl: '#' },
google: { mapsEmbedApiKey: 'mock-key' }, google: { mapsEmbedApiKey: 'mock-key' },
}, },
})); }));
@@ -588,11 +588,11 @@ describe('App Component', () => {
// Mock the config module for this specific test // Mock the config module for this specific test
vi.mock('./config', () => ({ vi.mock('./config', () => ({
default: { default: {
app: { version: '1.0.1', commitMessage: 'New feature!', commitUrl: '#' }, app: { version: '20250101-1200:abc1234:1.0.1', commitMessage: 'New feature!', commitUrl: '#' },
google: { mapsEmbedApiKey: 'mock-key' }, google: { mapsEmbedApiKey: 'mock-key' },
}, },
})); }));
localStorageMock.setItem('lastSeenVersion', '1.0.0'); localStorageMock.setItem('lastSeenVersion', '20250101-1200:abc1234:1.0.0');
renderApp(); renderApp();
await expect(screen.findByTestId('whats-new-modal-mock')).resolves.toBeInTheDocument(); await expect(screen.findByTestId('whats-new-modal-mock')).resolves.toBeInTheDocument();
}); });
@@ -741,7 +741,7 @@ describe('App Component', () => {
vi.mock('./config', () => ({ vi.mock('./config', () => ({
default: { default: {
app: { app: {
version: '2.0.0', version: '20250101-1200:abc1234:2.0.0',
commitMessage: 'A new version!', commitMessage: 'A new version!',
commitUrl: 'http://example.com/commit/2.0.0', commitUrl: 'http://example.com/commit/2.0.0',
}, },
@@ -752,14 +752,14 @@ describe('App Component', () => {
it('should display the version number and commit link', () => { it('should display the version number and commit link', () => {
renderApp(); renderApp();
const versionLink = screen.getByText(`Version: 2.0.0`); const versionLink = screen.getByText(`Version: 20250101-1200:abc1234:2.0.0`);
expect(versionLink).toBeInTheDocument(); expect(versionLink).toBeInTheDocument();
expect(versionLink).toHaveAttribute('href', 'http://example.com/commit/2.0.0'); expect(versionLink).toHaveAttribute('href', 'http://example.com/commit/2.0.0');
}); });
it('should open the "What\'s New" modal when the question mark icon is clicked', async () => { it('should open the "What\'s New" modal when the question mark icon is clicked', async () => {
// Pre-set the localStorage to prevent the modal from opening automatically // Pre-set the localStorage to prevent the modal from opening automatically
localStorageMock.setItem('lastSeenVersion', '2.0.0'); localStorageMock.setItem('lastSeenVersion', '20250101-1200:abc1234:2.0.0');
renderApp(); renderApp();
expect(screen.queryByTestId('whats-new-modal-mock')).not.toBeInTheDocument(); expect(screen.queryByTestId('whats-new-modal-mock')).not.toBeInTheDocument();

View File

@@ -44,7 +44,7 @@ export const FlyerCorrectionTool: React.FC<FlyerCorrectionToolProps> = ({
}) })
.catch((err) => { .catch((err) => {
console.error('[DEBUG] FlyerCorrectionTool: Failed to fetch image.', { err }); console.error('[DEBUG] FlyerCorrectionTool: Failed to fetch image.', { err });
logger.error('Failed to fetch image for correction tool', { error: err }); logger.error({ error: err }, 'Failed to fetch image for correction tool');
notifyError('Could not load the image for correction.'); notifyError('Could not load the image for correction.');
}); });
} }
@@ -164,7 +164,7 @@ export const FlyerCorrectionTool: React.FC<FlyerCorrectionToolProps> = ({
const msg = err instanceof Error ? err.message : 'An unknown error occurred.'; const msg = err instanceof Error ? err.message : 'An unknown error occurred.';
console.error('[DEBUG] handleRescan: Caught an error.', { error: err }); console.error('[DEBUG] handleRescan: Caught an error.', { error: err });
notifyError(msg); notifyError(msg);
logger.error('Error during rescan:', { error: err }); logger.error({ error: err }, 'Error during rescan:');
} finally { } finally {
console.debug('[DEBUG] handleRescan: Finished. Setting isProcessing=false.'); console.debug('[DEBUG] handleRescan: Finished. Setting isProcessing=false.');
setIsProcessing(false); setIsProcessing(false);

View File

@@ -73,12 +73,11 @@ describe('FlyerUploader', () => {
it('should handle file upload and start polling', async () => { it('should handle file upload and start polling', async () => {
console.log('--- [TEST LOG] ---: 1. Setting up mocks for upload and polling.'); console.log('--- [TEST LOG] ---: 1. Setting up mocks for upload and polling.');
mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue( mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue({ jobId: 'job-123' });
new Response(JSON.stringify({ jobId: 'job-123' }), { status: 200 }), mockedAiApiClient.getJobStatus.mockResolvedValue({
); state: 'active',
mockedAiApiClient.getJobStatus.mockResolvedValue( progress: { message: 'Checking...' },
new Response(JSON.stringify({ state: 'active', progress: { message: 'Checking...' } })), });
);
console.log('--- [TEST LOG] ---: 2. Rendering component and preparing file.'); console.log('--- [TEST LOG] ---: 2. Rendering component and preparing file.');
renderComponent(); renderComponent();
@@ -131,12 +130,11 @@ describe('FlyerUploader', () => {
it('should handle file upload via drag and drop', async () => { it('should handle file upload via drag and drop', async () => {
console.log('--- [TEST LOG] ---: 1. Setting up mocks for drag and drop.'); console.log('--- [TEST LOG] ---: 1. Setting up mocks for drag and drop.');
mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue( mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue({ jobId: 'job-dnd' });
new Response(JSON.stringify({ jobId: 'job-dnd' }), { status: 200 }), mockedAiApiClient.getJobStatus.mockResolvedValue({
); state: 'active',
mockedAiApiClient.getJobStatus.mockResolvedValue( progress: { message: 'Dropped...' },
new Response(JSON.stringify({ state: 'active', progress: { message: 'Dropped...' } })), });
);
console.log('--- [TEST LOG] ---: 2. Rendering component and preparing file for drop.'); console.log('--- [TEST LOG] ---: 2. Rendering component and preparing file for drop.');
renderComponent(); renderComponent();
@@ -159,16 +157,10 @@ describe('FlyerUploader', () => {
it('should poll for status, complete successfully, and redirect', async () => { it('should poll for status, complete successfully, and redirect', async () => {
const onProcessingComplete = vi.fn(); const onProcessingComplete = vi.fn();
console.log('--- [TEST LOG] ---: 1. Setting up mock sequence for polling.'); console.log('--- [TEST LOG] ---: 1. Setting up mock sequence for polling.');
mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue( mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue({ jobId: 'job-123' });
new Response(JSON.stringify({ jobId: 'job-123' }), { status: 200 }),
);
mockedAiApiClient.getJobStatus mockedAiApiClient.getJobStatus
.mockResolvedValueOnce( .mockResolvedValueOnce({ state: 'active', progress: { message: 'Analyzing...' } })
new Response(JSON.stringify({ state: 'active', progress: { message: 'Analyzing...' } })), .mockResolvedValueOnce({ state: 'completed', returnValue: { flyerId: 42 } });
)
.mockResolvedValueOnce(
new Response(JSON.stringify({ state: 'completed', returnValue: { flyerId: 42 } })),
);
console.log('--- [TEST LOG] ---: 2. Rendering component and uploading file.'); console.log('--- [TEST LOG] ---: 2. Rendering component and uploading file.');
renderComponent(onProcessingComplete); renderComponent(onProcessingComplete);
@@ -229,12 +221,11 @@ describe('FlyerUploader', () => {
it('should handle a failed job', async () => { it('should handle a failed job', async () => {
console.log('--- [TEST LOG] ---: 1. Setting up mocks for a failed job.'); console.log('--- [TEST LOG] ---: 1. Setting up mocks for a failed job.');
mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue( mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue({ jobId: 'job-fail' });
new Response(JSON.stringify({ jobId: 'job-fail' }), { status: 200 }), mockedAiApiClient.getJobStatus.mockResolvedValue({
); state: 'failed',
mockedAiApiClient.getJobStatus.mockResolvedValue( failedReason: 'AI model exploded',
new Response(JSON.stringify({ state: 'failed', failedReason: 'AI model exploded' })), });
);
console.log('--- [TEST LOG] ---: 2. Rendering and uploading.'); console.log('--- [TEST LOG] ---: 2. Rendering and uploading.');
renderComponent(); renderComponent();
@@ -260,11 +251,82 @@ describe('FlyerUploader', () => {
console.log('--- [TEST LOG] ---: 6. "Upload Another" button confirmed.'); console.log('--- [TEST LOG] ---: 6. "Upload Another" button confirmed.');
}); });
it('should clear the polling timeout when a job fails', async () => {
const clearTimeoutSpy = vi.spyOn(global, 'clearTimeout');
console.log('--- [TEST LOG] ---: 1. Setting up mocks for failed job timeout clearance.');
mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue({ jobId: 'job-fail-timeout' });
// We need at least one 'active' response to establish a timeout loop so we have something to clear
mockedAiApiClient.getJobStatus
.mockResolvedValueOnce({ state: 'active', progress: { message: 'Working...' } })
.mockResolvedValueOnce({ state: 'failed', failedReason: 'Fatal Error' });
renderComponent();
const file = new File(['content'], 'flyer.pdf', { type: 'application/pdf' });
const input = screen.getByLabelText(/click to select a file/i);
fireEvent.change(input, { target: { files: [file] } });
// Wait for the first poll to complete and UI to update to "Working..."
await screen.findByText('Working...');
// Advance time to trigger the second poll
await act(async () => {
vi.advanceTimersByTime(3000);
});
// Wait for the failure UI
await screen.findByText(/Processing failed: Fatal Error/i);
// Verify clearTimeout was called
expect(clearTimeoutSpy).toHaveBeenCalled();
// Verify no further polling occurs
const callsBefore = mockedAiApiClient.getJobStatus.mock.calls.length;
await act(async () => {
vi.advanceTimersByTime(10000);
});
expect(mockedAiApiClient.getJobStatus).toHaveBeenCalledTimes(callsBefore);
clearTimeoutSpy.mockRestore();
});
it('should clear the polling timeout when the component unmounts', async () => {
const clearTimeoutSpy = vi.spyOn(global, 'clearTimeout');
console.log('--- [TEST LOG] ---: 1. Setting up mocks for unmount timeout clearance.');
mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue({ jobId: 'job-unmount' });
mockedAiApiClient.getJobStatus.mockResolvedValue({
state: 'active',
progress: { message: 'Polling...' },
});
const { unmount } = renderComponent();
const file = new File(['content'], 'flyer.pdf', { type: 'application/pdf' });
const input = screen.getByLabelText(/click to select a file/i);
fireEvent.change(input, { target: { files: [file] } });
// Wait for the first poll to complete and the UI to show the polling state
await screen.findByText('Polling...');
// Now that we are in a polling state (and a timeout is set), unmount the component
console.log('--- [TEST LOG] ---: 2. Unmounting component to trigger cleanup effect.');
unmount();
// Verify that the cleanup function in the useEffect hook was called
expect(clearTimeoutSpy).toHaveBeenCalled();
console.log('--- [TEST LOG] ---: 3. clearTimeout confirmed.');
clearTimeoutSpy.mockRestore();
});
it('should handle a duplicate flyer error (409)', async () => { it('should handle a duplicate flyer error (409)', async () => {
console.log('--- [TEST LOG] ---: 1. Setting up mock for 409 duplicate error.'); console.log('--- [TEST LOG] ---: 1. Setting up mock for 409 duplicate error.');
mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue( // The API client now throws a structured error for non-2xx responses.
new Response(JSON.stringify({ flyerId: 99, message: 'Duplicate' }), { status: 409 }), mockedAiApiClient.uploadAndProcessFlyer.mockRejectedValue({
); status: 409,
body: { flyerId: 99, message: 'Duplicate' },
});
console.log('--- [TEST LOG] ---: 2. Rendering and uploading.'); console.log('--- [TEST LOG] ---: 2. Rendering and uploading.');
renderComponent(); renderComponent();
@@ -295,12 +357,11 @@ describe('FlyerUploader', () => {
it('should allow the user to stop watching progress', async () => { it('should allow the user to stop watching progress', async () => {
console.log('--- [TEST LOG] ---: 1. Setting up mocks for infinite polling.'); console.log('--- [TEST LOG] ---: 1. Setting up mocks for infinite polling.');
mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue( mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue({ jobId: 'job-stop' });
new Response(JSON.stringify({ jobId: 'job-stop' }), { status: 200 }), mockedAiApiClient.getJobStatus.mockResolvedValue({
); state: 'active',
mockedAiApiClient.getJobStatus.mockResolvedValue( progress: { message: 'Analyzing...' },
new Response(JSON.stringify({ state: 'active', progress: { message: 'Analyzing...' } })), } as any);
);
console.log('--- [TEST LOG] ---: 2. Rendering and uploading.'); console.log('--- [TEST LOG] ---: 2. Rendering and uploading.');
renderComponent(); renderComponent();
@@ -362,9 +423,11 @@ describe('FlyerUploader', () => {
it('should handle a generic network error during upload', async () => { it('should handle a generic network error during upload', async () => {
console.log('--- [TEST LOG] ---: 1. Setting up mock for generic upload error.'); console.log('--- [TEST LOG] ---: 1. Setting up mock for generic upload error.');
mockedAiApiClient.uploadAndProcessFlyer.mockRejectedValue( // Simulate a structured error from the API client
new Error('Network Error During Upload'), mockedAiApiClient.uploadAndProcessFlyer.mockRejectedValue({
); status: 500,
body: { message: 'Network Error During Upload' },
});
renderComponent(); renderComponent();
const file = new File(['content'], 'flyer.pdf', { type: 'application/pdf' }); const file = new File(['content'], 'flyer.pdf', { type: 'application/pdf' });
const input = screen.getByLabelText(/click to select a file/i); const input = screen.getByLabelText(/click to select a file/i);
@@ -379,9 +442,7 @@ describe('FlyerUploader', () => {
it('should handle a generic network error during polling', async () => { it('should handle a generic network error during polling', async () => {
console.log('--- [TEST LOG] ---: 1. Setting up mock for polling error.'); console.log('--- [TEST LOG] ---: 1. Setting up mock for polling error.');
mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue( mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue({ jobId: 'job-poll-fail' });
new Response(JSON.stringify({ jobId: 'job-poll-fail' }), { status: 200 }),
);
mockedAiApiClient.getJobStatus.mockRejectedValue(new Error('Polling Network Error')); mockedAiApiClient.getJobStatus.mockRejectedValue(new Error('Polling Network Error'));
renderComponent(); renderComponent();
@@ -398,11 +459,9 @@ describe('FlyerUploader', () => {
it('should handle a completed job with a missing flyerId', async () => { it('should handle a completed job with a missing flyerId', async () => {
console.log('--- [TEST LOG] ---: 1. Setting up mock for malformed completion payload.'); console.log('--- [TEST LOG] ---: 1. Setting up mock for malformed completion payload.');
mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue( mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue({ jobId: 'job-no-flyerid' });
new Response(JSON.stringify({ jobId: 'job-no-flyerid' }), { status: 200 }),
);
mockedAiApiClient.getJobStatus.mockResolvedValue( mockedAiApiClient.getJobStatus.mockResolvedValue(
new Response(JSON.stringify({ state: 'completed', returnValue: {} })), // No flyerId { state: 'completed', returnValue: {} }, // No flyerId
); );
renderComponent(); renderComponent();
@@ -419,6 +478,27 @@ describe('FlyerUploader', () => {
console.log('--- [TEST LOG] ---: 4. Assertions passed.'); console.log('--- [TEST LOG] ---: 4. Assertions passed.');
}); });
it('should handle a non-JSON response during polling', async () => {
console.log('--- [TEST LOG] ---: 1. Setting up mock for non-JSON response.');
// The actual function would throw, so we mock the rejection.
// The new getJobStatus would throw an error like "Failed to parse JSON..."
mockedAiApiClient.uploadAndProcessFlyer.mockResolvedValue({ jobId: 'job-bad-json' });
mockedAiApiClient.getJobStatus.mockRejectedValue(
new Error('Failed to parse JSON response from server. Body: <html>502 Bad Gateway</html>'),
);
renderComponent();
const file = new File(['content'], 'flyer.pdf', { type: 'application/pdf' });
const input = screen.getByLabelText(/click to select a file/i);
console.log('--- [TEST LOG] ---: 2. Firing file change event.');
fireEvent.change(input, { target: { files: [file] } });
console.log('--- [TEST LOG] ---: 3. Awaiting error message.');
expect(await screen.findByText(/Failed to parse JSON response from server/i)).toBeInTheDocument();
console.log('--- [TEST LOG] ---: 4. Assertions passed.');
});
it('should do nothing if the file input is cancelled', () => { it('should do nothing if the file input is cancelled', () => {
renderComponent(); renderComponent();
const input = screen.getByLabelText(/click to select a file/i); const input = screen.getByLabelText(/click to select a file/i);

View File

@@ -60,14 +60,8 @@ export const FlyerUploader: React.FC<FlyerUploaderProps> = ({ onProcessingComple
const pollStatus = async () => { const pollStatus = async () => {
console.debug(`[DEBUG] pollStatus(): Polling for jobId: ${jobId}`); console.debug(`[DEBUG] pollStatus(): Polling for jobId: ${jobId}`);
try { try {
const statusResponse = await getJobStatus(jobId); const job = await getJobStatus(jobId); // Now returns parsed JSON directly
console.debug(`[DEBUG] pollStatus(): API response status: ${statusResponse.status}`); console.debug('[DEBUG] pollStatus(): Job status received:', job); // The rest of the logic remains the same
if (!statusResponse.ok) {
throw new Error(`Failed to get job status (HTTP ${statusResponse.status})`);
}
const job = await statusResponse.json();
console.debug('[DEBUG] pollStatus(): Job status received:', job);
if (job.progress) { if (job.progress) {
setProcessingStages(job.progress.stages || []); setProcessingStages(job.progress.stages || []);
@@ -97,7 +91,13 @@ export const FlyerUploader: React.FC<FlyerUploaderProps> = ({ onProcessingComple
console.debug( console.debug(
`[DEBUG] pollStatus(): Job state is "failed". Reason: ${job.failedReason}`, `[DEBUG] pollStatus(): Job state is "failed". Reason: ${job.failedReason}`,
); );
// Explicitly clear any pending timeout to stop the polling loop immediately.
if (pollingTimeoutRef.current) {
clearTimeout(pollingTimeoutRef.current);
}
setErrorMessage(`Processing failed: ${job.failedReason || 'Unknown error'}`); setErrorMessage(`Processing failed: ${job.failedReason || 'Unknown error'}`);
// Clear any stale "in-progress" messages to avoid user confusion.
setStatusMessage(null);
setProcessingState('error'); setProcessingState('error');
break; break;
@@ -112,7 +112,7 @@ export const FlyerUploader: React.FC<FlyerUploaderProps> = ({ onProcessingComple
break; break;
} }
} catch (error) { } catch (error) {
logger.error('Error during polling:', { error }); logger.error({ error }, 'Error during polling:');
setErrorMessage( setErrorMessage(
error instanceof Error ? error.message : 'An unexpected error occurred during polling.', error instanceof Error ? error.message : 'An unexpected error occurred during polling.',
); );
@@ -150,29 +150,24 @@ export const FlyerUploader: React.FC<FlyerUploaderProps> = ({ onProcessingComple
`[DEBUG] processFile(): Checksum generated: ${checksum}. Calling uploadAndProcessFlyer.`, `[DEBUG] processFile(): Checksum generated: ${checksum}. Calling uploadAndProcessFlyer.`,
); );
const startResponse = await uploadAndProcessFlyer(file, checksum); // The API client now returns parsed JSON on success or throws a structured error on failure.
console.debug(`[DEBUG] processFile(): Upload response status: ${startResponse.status}`); const { jobId: newJobId } = await uploadAndProcessFlyer(file, checksum);
if (!startResponse.ok) {
const errorData = await startResponse.json();
console.debug('[DEBUG] processFile(): Upload failed. Error data:', errorData);
if (startResponse.status === 409 && errorData.flyerId) {
setErrorMessage(`This flyer has already been processed. You can view it here:`);
setDuplicateFlyerId(errorData.flyerId);
} else {
setErrorMessage(errorData.message || `Upload failed with status ${startResponse.status}`);
}
setProcessingState('error');
return;
}
const { jobId: newJobId } = await startResponse.json();
console.debug(`[DEBUG] processFile(): Upload successful. Received jobId: ${newJobId}`); console.debug(`[DEBUG] processFile(): Upload successful. Received jobId: ${newJobId}`);
setJobId(newJobId); setJobId(newJobId);
setProcessingState('polling'); setProcessingState('polling');
} catch (error) { } catch (error: any) {
logger.error('An unexpected error occurred during file upload:', { error }); // Handle the structured error thrown by the API client.
setErrorMessage(error instanceof Error ? error.message : 'An unexpected error occurred.'); logger.error({ error }, 'An error occurred during file upload:');
// Handle 409 Conflict for duplicate flyers
if (error?.status === 409 && error.body?.flyerId) {
setErrorMessage(`This flyer has already been processed. You can view it here:`);
setDuplicateFlyerId(error.body.flyerId);
} else {
// Handle other errors (e.g., validation, server errors)
const message =
error?.body?.message || error?.message || 'An unexpected error occurred during upload.';
setErrorMessage(message);
}
setProcessingState('error'); setProcessingState('error');
} }
}, []); }, []);

View File

@@ -1,94 +1,68 @@
// src/middleware/errorHandler.ts // src/middleware/errorHandler.ts
import { Request, Response, NextFunction } from 'express'; import { Request, Response, NextFunction } from 'express';
import { ZodError } from 'zod';
import { import {
DatabaseError,
UniqueConstraintError,
ForeignKeyConstraintError, ForeignKeyConstraintError,
NotFoundError, NotFoundError,
UniqueConstraintError,
ValidationError, ValidationError,
ValidationIssue,
} from '../services/db/errors.db'; } from '../services/db/errors.db';
import crypto from 'crypto'; import { logger } from '../services/logger.server';
interface HttpError extends Error { /**
status?: number; * A centralized error handling middleware for the Express application.
} * This middleware should be the LAST `app.use()` call to catch all errors from previous routes and middleware.
*
export const errorHandler = (err: HttpError, req: Request, res: Response, next: NextFunction) => { * It standardizes error responses and ensures consistent logging.
// If the response headers have already been sent, we must delegate to the default Express error handler. */
export const errorHandler = (err: Error, req: Request, res: Response, next: NextFunction) => {
// If headers have already been sent, delegate to the default Express error handler.
if (res.headersSent) { if (res.headersSent) {
return next(err); return next(err);
} }
// The pino-http middleware guarantees that `req.log` will be available. // Use the request-scoped logger if available, otherwise fall back to the global logger.
const log = req.log; const log = req.log || logger;
// --- 1. Determine Final Status Code and Message --- // --- Handle Zod Validation Errors ---
let statusCode = err.status ?? 500; if (err instanceof ZodError) {
const message = err.message; log.warn({ err: err.flatten() }, 'Request validation failed');
let validationIssues: ValidationIssue[] | undefined; return res.status(400).json({
let errorId: string | undefined; message: 'The request data is invalid.',
errors: err.issues.map((e) => ({ path: e.path, message: e.message })),
});
}
// --- Handle Custom Operational Errors ---
if (err instanceof NotFoundError) {
log.info({ err }, 'Resource not found');
return res.status(404).json({ message: err.message });
}
if (err instanceof ValidationError) {
log.warn({ err }, 'Validation error occurred');
return res.status(400).json({ message: err.message, errors: err.validationErrors });
}
// Refine the status code for known error types. Check for most specific types first.
if (err instanceof UniqueConstraintError) { if (err instanceof UniqueConstraintError) {
statusCode = 409; // Conflict log.warn({ err }, 'Constraint error occurred');
} else if (err instanceof NotFoundError) { return res.status(409).json({ message: err.message }); // Use 409 Conflict for unique constraints
statusCode = 404;
} else if (err instanceof ForeignKeyConstraintError) {
statusCode = 400;
} else if (err instanceof ValidationError) {
statusCode = 400;
validationIssues = err.validationErrors;
} else if (err instanceof DatabaseError) {
// This is a generic fallback for other database errors that are not the specific subclasses above.
statusCode = err.status;
} else if (err.name === 'UnauthorizedError') {
statusCode = err.status || 401;
} }
// --- 2. Log Based on Final Status Code --- if (err instanceof ForeignKeyConstraintError) {
// Log the full error details for debugging, especially for server errors. log.warn({ err }, 'Foreign key constraint violation');
if (statusCode >= 500) { return res.status(400).json({ message: err.message });
errorId = crypto.randomBytes(4).toString('hex');
// The request-scoped logger already contains user, IP, and request_id.
// We add the full error and the request object itself.
// Pino's `redact` config will automatically sanitize sensitive fields in `req`.
log.error(
{
err,
errorId,
req: { method: req.method, url: req.originalUrl, headers: req.headers, body: req.body },
},
`Unhandled API Error (ID: ${errorId})`,
);
} else {
// For 4xx errors, log at a lower level (e.g., 'warn') to avoid flooding error trackers.
// We include the validation errors in the log context if they exist.
log.warn(
{
err,
validationErrors: validationIssues, // Add validation issues to the log object
statusCode,
},
`Client Error on ${req.method} ${req.path}: ${message}`,
);
} }
// --- TEST ENVIRONMENT DEBUGGING --- // --- Handle Generic Errors ---
if (process.env.NODE_ENV === 'test') { // Log the full error object for debugging. The pino logger will handle redaction.
console.error('--- [TEST] UNHANDLED ERROR ---', err); log.error({ err }, 'An unhandled error occurred in an Express route');
// In production, send a generic message to avoid leaking implementation details.
if (process.env.NODE_ENV === 'production') {
return res.status(500).json({ message: 'An internal server error occurred.' });
} }
// --- 3. Send Response --- // In development, send more details for easier debugging.
// In production, send a generic message for 5xx errors. return res.status(500).json({ message: err.message, stack: err.stack });
// In dev/test, send the actual error message for easier debugging. };
const responseMessage =
statusCode >= 500 && process.env.NODE_ENV === 'production'
? `An unexpected server error occurred. Please reference error ID: ${errorId}`
: message;
res.status(statusCode).json({
message: responseMessage,
...(validationIssues && { errors: validationIssues }), // Conditionally add the 'errors' array if it exists
});
};

View File

@@ -135,6 +135,7 @@ router.get('/corrections', async (req, res, next: NextFunction) => {
const corrections = await db.adminRepo.getSuggestedCorrections(req.log); const corrections = await db.adminRepo.getSuggestedCorrections(req.log);
res.json(corrections); res.json(corrections);
} catch (error) { } catch (error) {
logger.error({ error }, 'Error fetching suggested corrections');
next(error); next(error);
} }
}); });
@@ -144,6 +145,7 @@ router.get('/brands', async (req, res, next: NextFunction) => {
const brands = await db.flyerRepo.getAllBrands(req.log); const brands = await db.flyerRepo.getAllBrands(req.log);
res.json(brands); res.json(brands);
} catch (error) { } catch (error) {
logger.error({ error }, 'Error fetching brands');
next(error); next(error);
} }
}); });
@@ -153,6 +155,7 @@ router.get('/stats', async (req, res, next: NextFunction) => {
const stats = await db.adminRepo.getApplicationStats(req.log); const stats = await db.adminRepo.getApplicationStats(req.log);
res.json(stats); res.json(stats);
} catch (error) { } catch (error) {
logger.error({ error }, 'Error fetching application stats');
next(error); next(error);
} }
}); });
@@ -162,6 +165,7 @@ router.get('/stats/daily', async (req, res, next: NextFunction) => {
const dailyStats = await db.adminRepo.getDailyStatsForLast30Days(req.log); const dailyStats = await db.adminRepo.getDailyStatsForLast30Days(req.log);
res.json(dailyStats); res.json(dailyStats);
} catch (error) { } catch (error) {
logger.error({ error }, 'Error fetching daily stats');
next(error); next(error);
} }
}); });
@@ -176,6 +180,7 @@ router.post(
await db.adminRepo.approveCorrection(params.id, req.log); // params.id is now safely typed as number await db.adminRepo.approveCorrection(params.id, req.log); // params.id is now safely typed as number
res.status(200).json({ message: 'Correction approved successfully.' }); res.status(200).json({ message: 'Correction approved successfully.' });
} catch (error) { } catch (error) {
logger.error({ error }, 'Error approving correction');
next(error); next(error);
} }
}, },
@@ -191,6 +196,7 @@ router.post(
await db.adminRepo.rejectCorrection(params.id, req.log); // params.id is now safely typed as number await db.adminRepo.rejectCorrection(params.id, req.log); // params.id is now safely typed as number
res.status(200).json({ message: 'Correction rejected successfully.' }); res.status(200).json({ message: 'Correction rejected successfully.' });
} catch (error) { } catch (error) {
logger.error({ error }, 'Error rejecting correction');
next(error); next(error);
} }
}, },
@@ -210,6 +216,7 @@ router.put(
); );
res.status(200).json(updatedCorrection); res.status(200).json(updatedCorrection);
} catch (error) { } catch (error) {
logger.error({ error }, 'Error updating suggested correction');
next(error); next(error);
} }
}, },
@@ -225,6 +232,7 @@ router.put(
const updatedRecipe = await db.adminRepo.updateRecipeStatus(params.id, body.status, req.log); // This is still a standalone function in admin.db.ts const updatedRecipe = await db.adminRepo.updateRecipeStatus(params.id, body.status, req.log); // This is still a standalone function in admin.db.ts
res.status(200).json(updatedRecipe); res.status(200).json(updatedRecipe);
} catch (error) { } catch (error) {
logger.error({ error }, 'Error updating recipe status');
next(error); // Pass all errors to the central error handler next(error); // Pass all errors to the central error handler
} }
}, },
@@ -250,6 +258,7 @@ router.post(
logger.info({ brandId: params.id, logoUrl }, `Brand logo updated for brand ID: ${params.id}`); logger.info({ brandId: params.id, logoUrl }, `Brand logo updated for brand ID: ${params.id}`);
res.status(200).json({ message: 'Brand logo updated successfully.', logoUrl }); res.status(200).json({ message: 'Brand logo updated successfully.', logoUrl });
} catch (error) { } catch (error) {
logger.error({ error }, 'Error updating brand logo');
next(error); next(error);
} }
}, },
@@ -260,6 +269,7 @@ router.get('/unmatched-items', async (req, res, next: NextFunction) => {
const items = await db.adminRepo.getUnmatchedFlyerItems(req.log); const items = await db.adminRepo.getUnmatchedFlyerItems(req.log);
res.json(items); res.json(items);
} catch (error) { } catch (error) {
logger.error({ error }, 'Error fetching unmatched items');
next(error); next(error);
} }
}); });
@@ -279,6 +289,7 @@ router.delete(
await db.recipeRepo.deleteRecipe(params.recipeId, userProfile.user.user_id, true, req.log); await db.recipeRepo.deleteRecipe(params.recipeId, userProfile.user.user_id, true, req.log);
res.status(204).send(); res.status(204).send();
} catch (error: unknown) { } catch (error: unknown) {
logger.error({ error }, 'Error deleting recipe');
next(error); next(error);
} }
}, },
@@ -297,6 +308,7 @@ router.delete(
await db.flyerRepo.deleteFlyer(params.flyerId, req.log); await db.flyerRepo.deleteFlyer(params.flyerId, req.log);
res.status(204).send(); res.status(204).send();
} catch (error: unknown) { } catch (error: unknown) {
logger.error({ error }, 'Error deleting flyer');
next(error); next(error);
} }
}, },
@@ -316,6 +328,7 @@ router.put(
); // This is still a standalone function in admin.db.ts ); // This is still a standalone function in admin.db.ts
res.status(200).json(updatedComment); res.status(200).json(updatedComment);
} catch (error: unknown) { } catch (error: unknown) {
logger.error({ error }, 'Error updating comment status');
next(error); next(error);
} }
}, },
@@ -326,6 +339,7 @@ router.get('/users', async (req, res, next: NextFunction) => {
const users = await db.adminRepo.getAllUsers(req.log); const users = await db.adminRepo.getAllUsers(req.log);
res.json(users); res.json(users);
} catch (error) { } catch (error) {
logger.error({ error }, 'Error fetching users');
next(error); next(error);
} }
}); });
@@ -345,6 +359,7 @@ router.get(
const logs = await db.adminRepo.getActivityLog(limit, offset, req.log); const logs = await db.adminRepo.getActivityLog(limit, offset, req.log);
res.json(logs); res.json(logs);
} catch (error) { } catch (error) {
logger.error({ error }, 'Error fetching activity log');
next(error); next(error);
} }
}, },
@@ -360,6 +375,7 @@ router.get(
const user = await db.userRepo.findUserProfileById(params.id, req.log); const user = await db.userRepo.findUserProfileById(params.id, req.log);
res.json(user); res.json(user);
} catch (error) { } catch (error) {
logger.error({ error }, 'Error fetching user profile');
next(error); next(error);
} }
}, },
@@ -395,6 +411,7 @@ router.delete(
await db.userRepo.deleteUserById(params.id, req.log); await db.userRepo.deleteUserById(params.id, req.log);
res.status(204).send(); res.status(204).send();
} catch (error) { } catch (error) {
logger.error({ error }, 'Error deleting user');
next(error); next(error);
} }
}, },
@@ -478,6 +495,7 @@ router.post(
.status(202) .status(202)
.json({ message: `File cleanup job for flyer ID ${params.flyerId} has been enqueued.` }); .json({ message: `File cleanup job for flyer ID ${params.flyerId} has been enqueued.` });
} catch (error) { } catch (error) {
logger.error({ error }, 'Error enqueuing cleanup job');
next(error); next(error);
} }
}, },
@@ -500,6 +518,7 @@ router.post('/trigger/failing-job', async (req: Request, res: Response, next: Ne
.status(202) .status(202)
.json({ message: `Failing test job has been enqueued successfully. Job ID: ${job.id}` }); .json({ message: `Failing test job has been enqueued successfully. Job ID: ${job.id}` });
} catch (error) { } catch (error) {
logger.error({ error }, 'Error enqueuing failing job');
next(error); next(error);
} }
}); });
@@ -572,6 +591,7 @@ router.get('/queues/status', async (req: Request, res: Response, next: NextFunct
); );
res.json(queueStatuses); res.json(queueStatuses);
} catch (error) { } catch (error) {
logger.error({ error }, 'Error fetching queue statuses');
next(error); next(error);
} }
}); });
@@ -620,6 +640,7 @@ router.post(
); );
res.status(200).json({ message: `Job ${jobId} has been successfully marked for retry.` }); res.status(200).json({ message: `Job ${jobId} has been successfully marked for retry.` });
} catch (error) { } catch (error) {
logger.error({ error }, 'Error retrying job');
next(error); next(error);
} }
}, },
@@ -651,6 +672,7 @@ router.post(
.status(202) .status(202)
.json({ message: 'Successfully enqueued weekly analytics job.', jobId: job.id }); .json({ message: 'Successfully enqueued weekly analytics job.', jobId: job.id });
} catch (error) { } catch (error) {
logger.error({ error }, 'Error enqueuing weekly analytics job');
next(error); next(error);
} }
}, },

View File

@@ -535,6 +535,27 @@ describe('AI Routes (/api/ai)', () => {
const flyerDataArg = vi.mocked(mockedDb.createFlyerAndItems).mock.calls[0][0]; const flyerDataArg = vi.mocked(mockedDb.createFlyerAndItems).mock.calls[0][0];
expect(flyerDataArg.store_name).toBe('Root Store'); expect(flyerDataArg.store_name).toBe('Root Store');
}); });
it('should default item quantity to 1 if missing', async () => {
const payloadMissingQuantity = {
checksum: 'qty-checksum',
originalFileName: 'flyer-qty.jpg',
extractedData: {
store_name: 'Qty Store',
items: [{ name: 'Item without qty', price: 100 }],
},
};
const response = await supertest(app)
.post('/api/ai/flyers/process')
.field('data', JSON.stringify(payloadMissingQuantity))
.attach('flyerImage', imagePath);
expect(response.status).toBe(201);
expect(mockedDb.createFlyerAndItems).toHaveBeenCalledTimes(1);
const itemsArg = vi.mocked(mockedDb.createFlyerAndItems).mock.calls[0][1];
expect(itemsArg[0].quantity).toBe(1);
});
}); });
describe('POST /check-flyer', () => { describe('POST /check-flyer', () => {

View File

@@ -424,6 +424,7 @@ router.post(
const itemsForDb = itemsArray.map((item: Partial<ExtractedFlyerItem>) => ({ const itemsForDb = itemsArray.map((item: Partial<ExtractedFlyerItem>) => ({
...item, ...item,
master_item_id: item.master_item_id === null ? undefined : item.master_item_id, master_item_id: item.master_item_id === null ? undefined : item.master_item_id,
quantity: item.quantity ?? 1, // Default to 1 to satisfy DB constraint
view_count: 0, view_count: 0,
click_count: 0, click_count: 0,
updated_at: new Date().toISOString(), updated_at: new Date().toISOString(),

View File

@@ -134,8 +134,8 @@ router.post(
// If the email is a duplicate, return a 409 Conflict status. // If the email is a duplicate, return a 409 Conflict status.
return res.status(409).json({ message: error.message }); return res.status(409).json({ message: error.message });
} }
// The createUser method now handles its own transaction logging, so we just log the route failure.
logger.error({ error }, `User registration route failed for email: ${email}.`); logger.error({ error }, `User registration route failed for email: ${email}.`);
// Pass the error to the centralized handler
return next(error); return next(error);
} }
}, },

View File

@@ -108,6 +108,7 @@ router.post(
const items = await db.flyerRepo.getFlyerItemsForFlyers(body.flyerIds, req.log); const items = await db.flyerRepo.getFlyerItemsForFlyers(body.flyerIds, req.log);
res.json(items); res.json(items);
} catch (error) { } catch (error) {
req.log.error({ error }, 'Error fetching batch flyer items');
next(error); next(error);
} }
}, },
@@ -127,6 +128,7 @@ router.post(
const count = await db.flyerRepo.countFlyerItemsForFlyers(body.flyerIds ?? [], req.log); const count = await db.flyerRepo.countFlyerItemsForFlyers(body.flyerIds ?? [], req.log);
res.json({ count }); res.json({ count });
} catch (error) { } catch (error) {
req.log.error({ error }, 'Error counting batch flyer items');
next(error); next(error);
} }
}, },

View File

@@ -39,10 +39,7 @@ router.get('/db-schema', validateRequest(emptySchema), async (req, res, next: Ne
} }
return res.status(200).json({ success: true, message: 'All required database tables exist.' }); return res.status(200).json({ success: true, message: 'All required database tables exist.' });
} catch (error: unknown) { } catch (error: unknown) {
logger.error( logger.error({ error }, 'Error during DB schema check:');
{ error: error instanceof Error ? error.message : error },
'Error during DB schema check:',
);
next(error); next(error);
} }
}); });
@@ -133,6 +130,7 @@ router.get(
} }
throw new Error(`Unexpected Redis ping response: ${reply}`); // This will be caught below throw new Error(`Unexpected Redis ping response: ${reply}`); // This will be caught below
} catch (error: unknown) { } catch (error: unknown) {
logger.error({ error }, 'Error checking Redis health');
next(error); next(error);
} }
}, },

View File

@@ -46,7 +46,6 @@ router.get(
} }
// Check if there was output to stderr, even if the exit code was 0 (success). // Check if there was output to stderr, even if the exit code was 0 (success).
// This handles warnings or non-fatal errors that should arguably be treated as failures in this context.
if (stderr && stderr.trim().length > 0) { if (stderr && stderr.trim().length > 0) {
logger.error({ stderr }, '[API /pm2-status] PM2 executed but produced stderr:'); logger.error({ stderr }, '[API /pm2-status] PM2 executed but produced stderr:');
return next(new Error(`PM2 command produced an error: ${stderr}`)); return next(new Error(`PM2 command produced an error: ${stderr}`));
@@ -86,6 +85,7 @@ router.post(
res.json(coordinates); res.json(coordinates);
} catch (error) { } catch (error) {
logger.error({ error }, 'Error geocoding address');
next(error); next(error);
} }
}, },

View File

@@ -77,7 +77,7 @@ router.use(passport.authenticate('jwt', { session: false }));
// Ensure the directory for avatar uploads exists. // Ensure the directory for avatar uploads exists.
const avatarUploadDir = path.join(process.cwd(), 'public', 'uploads', 'avatars'); const avatarUploadDir = path.join(process.cwd(), 'public', 'uploads', 'avatars');
fs.mkdir(avatarUploadDir, { recursive: true }).catch((err) => { fs.mkdir(avatarUploadDir, { recursive: true }).catch((err) => {
logger.error('Failed to create avatar upload directory:', err); logger.error({ err }, 'Failed to create avatar upload directory');
}); });
// Define multer storage configuration. The `req.user` object will be available // Define multer storage configuration. The `req.user` object will be available
@@ -122,6 +122,7 @@ router.post(
); );
res.json(updatedProfile); res.json(updatedProfile);
} catch (error) { } catch (error) {
logger.error({ error }, 'Error uploading avatar');
next(error); next(error);
} }
}, },
@@ -151,6 +152,7 @@ router.get(
); );
res.json(notifications); res.json(notifications);
} catch (error) { } catch (error) {
logger.error({ error }, 'Error fetching notifications');
next(error); next(error);
} }
}, },
@@ -168,6 +170,7 @@ router.post(
await db.notificationRepo.markAllNotificationsAsRead(userProfile.user.user_id, req.log); await db.notificationRepo.markAllNotificationsAsRead(userProfile.user.user_id, req.log);
res.status(204).send(); // No Content res.status(204).send(); // No Content
} catch (error) { } catch (error) {
logger.error({ error }, 'Error marking all notifications as read');
next(error); next(error);
} }
}, },
@@ -193,6 +196,7 @@ router.post(
); );
res.status(204).send(); // Success, no content to return res.status(204).send(); // Success, no content to return
} catch (error) { } catch (error) {
logger.error({ error }, 'Error marking notification as read');
next(error); next(error);
} }
}, },
@@ -345,11 +349,7 @@ router.post(
if (error instanceof ForeignKeyConstraintError) { if (error instanceof ForeignKeyConstraintError) {
return res.status(400).json({ message: error.message }); return res.status(400).json({ message: error.message });
} }
const errorMessage = error instanceof Error ? error.message : 'An unknown error occurred'; logger.error({ error, body: req.body }, 'Failed to add watched item');
logger.error({
errorMessage,
body: req.body,
});
next(error); next(error);
} }
}, },
@@ -453,11 +453,7 @@ router.post(
if (error instanceof ForeignKeyConstraintError) { if (error instanceof ForeignKeyConstraintError) {
return res.status(400).json({ message: error.message }); return res.status(400).json({ message: error.message });
} }
const errorMessage = error instanceof Error ? error.message : 'An unknown error occurred'; logger.error({ error, body: req.body }, 'Failed to create shopping list');
logger.error({
errorMessage,
body: req.body,
});
next(error); next(error);
} }
}, },
@@ -516,12 +512,7 @@ router.post(
if (error instanceof ForeignKeyConstraintError) { if (error instanceof ForeignKeyConstraintError) {
return res.status(400).json({ message: error.message }); return res.status(400).json({ message: error.message });
} }
const errorMessage = error instanceof Error ? error.message : 'An unknown error occurred'; logger.error({ error, params: req.params, body: req.body }, 'Failed to add shopping list item');
logger.error({
errorMessage,
params: req.params,
body: req.body,
});
next(error); next(error);
} }
}, },
@@ -661,11 +652,7 @@ router.put(
if (error instanceof ForeignKeyConstraintError) { if (error instanceof ForeignKeyConstraintError) {
return res.status(400).json({ message: error.message }); return res.status(400).json({ message: error.message });
} }
const errorMessage = error instanceof Error ? error.message : 'An unknown error occurred'; logger.error({ error, body: req.body }, 'Failed to set user dietary restrictions');
logger.error({
errorMessage,
body: req.body,
});
next(error); next(error);
} }
}, },
@@ -709,11 +696,7 @@ router.put(
if (error instanceof ForeignKeyConstraintError) { if (error instanceof ForeignKeyConstraintError) {
return res.status(400).json({ message: error.message }); return res.status(400).json({ message: error.message });
} }
const errorMessage = error instanceof Error ? error.message : 'An unknown error occurred'; logger.error({ error, body: req.body }, 'Failed to set user appliances');
logger.error({
errorMessage,
body: req.body,
});
next(error); next(error);
} }
}, },
@@ -743,6 +726,7 @@ router.get(
const address = await db.addressRepo.getAddressById(addressId, req.log); // This will throw NotFoundError if not found const address = await db.addressRepo.getAddressById(addressId, req.log); // This will throw NotFoundError if not found
res.json(address); res.json(address);
} catch (error) { } catch (error) {
logger.error({ error }, 'Error fetching user address');
next(error); next(error);
} }
}, },
@@ -781,6 +765,7 @@ router.put(
const addressId = await userService.upsertUserAddress(userProfile, addressData, req.log); // This was a duplicate, fixed. const addressId = await userService.upsertUserAddress(userProfile, addressData, req.log); // This was a duplicate, fixed.
res.status(200).json({ message: 'Address updated successfully', address_id: addressId }); res.status(200).json({ message: 'Address updated successfully', address_id: addressId });
} catch (error) { } catch (error) {
logger.error({ error }, 'Error updating user address');
next(error); next(error);
} }
}, },

View File

@@ -51,9 +51,7 @@ export class AiAnalysisService {
// Normalize sources to a consistent format. // Normalize sources to a consistent format.
const mappedSources = (response.sources || []).map( const mappedSources = (response.sources || []).map(
(s: RawSource) => (s: RawSource) =>
(s.web (s.web ? { uri: s.web.uri || '', title: s.web.title || 'Untitled' } : { uri: '', title: 'Untitled' }) as Source,
? { uri: s.web.uri || '', title: s.web.title || 'Untitled' }
: { uri: '', title: 'Untitled' }) as Source,
); );
return { ...response, sources: mappedSources }; return { ...response, sources: mappedSources };
} }
@@ -84,9 +82,7 @@ export class AiAnalysisService {
// Normalize sources to a consistent format. // Normalize sources to a consistent format.
const mappedSources = (response.sources || []).map( const mappedSources = (response.sources || []).map(
(s: RawSource) => (s: RawSource) =>
(s.web (s.web ? { uri: s.web.uri || '', title: s.web.title || 'Untitled' } : { uri: '', title: 'Untitled' }) as Source,
? { uri: s.web.uri || '', title: s.web.title || 'Untitled' }
: { uri: '', title: 'Untitled' }) as Source,
); );
return { ...response, sources: mappedSources }; return { ...response, sources: mappedSources };
} }

View File

@@ -4,7 +4,13 @@
* It communicates with the application's own backend endpoints, which then securely * It communicates with the application's own backend endpoints, which then securely
* call the Google AI services. This ensures no API keys are exposed on the client. * call the Google AI services. This ensures no API keys are exposed on the client.
*/ */
import type { FlyerItem, Store, MasterGroceryItem } from '../types'; import type {
FlyerItem,
Store,
MasterGroceryItem,
ProcessingStage,
GroundedResponse,
} from '../types';
import { logger } from './logger.client'; import { logger } from './logger.client';
import { apiFetch } from './apiClient'; import { apiFetch } from './apiClient';
@@ -20,14 +26,14 @@ export const uploadAndProcessFlyer = async (
file: File, file: File,
checksum: string, checksum: string,
tokenOverride?: string, tokenOverride?: string,
): Promise<Response> => { ): Promise<{ jobId: string }> => {
const formData = new FormData(); const formData = new FormData();
formData.append('flyerFile', file); formData.append('flyerFile', file);
formData.append('checksum', checksum); formData.append('checksum', checksum);
logger.info(`[aiApiClient] Starting background processing for file: ${file.name}`); logger.info(`[aiApiClient] Starting background processing for file: ${file.name}`);
return apiFetch( const response = await apiFetch(
'/ai/upload-and-process', '/ai/upload-and-process',
{ {
method: 'POST', method: 'POST',
@@ -35,20 +41,73 @@ export const uploadAndProcessFlyer = async (
}, },
{ tokenOverride }, { tokenOverride },
); );
if (!response.ok) {
let errorBody;
try {
errorBody = await response.json();
} catch (e) {
errorBody = { message: await response.text() };
}
// Throw a structured error so the component can inspect the status and body
throw { status: response.status, body: errorBody };
}
return response.json();
}; };
// Define the expected shape of the job status response
export interface JobStatus {
id: string;
state: 'completed' | 'failed' | 'active' | 'waiting' | 'delayed' | 'paused';
progress: {
stages?: ProcessingStage[];
estimatedTimeRemaining?: number;
message?: string;
} | null;
returnValue: {
flyerId?: number;
} | null;
failedReason: string | null;
}
/** /**
* Fetches the status of a background processing job. * Fetches the status of a background processing job.
* This is the second step in the new background processing flow. * This is the second step in the new background processing flow.
* @param jobId The ID of the job to check. * @param jobId The ID of the job to check.
* @param tokenOverride Optional token for testing. * @param tokenOverride Optional token for testing.
* @returns A promise that resolves to the API response with the job's status. * @returns A promise that resolves to the parsed job status object.
* @throws An error if the network request fails or if the response is not valid JSON.
*/ */
export const getJobStatus = async (jobId: string, tokenOverride?: string): Promise<Response> => { export const getJobStatus = async (
return apiFetch(`/ai/jobs/${jobId}/status`, {}, { tokenOverride }); jobId: string,
tokenOverride?: string,
): Promise<JobStatus> => {
const response = await apiFetch(`/ai/jobs/${jobId}/status`, {}, { tokenOverride });
if (!response.ok) {
let errorText = `API Error: ${response.status} ${response.statusText}`;
try {
const errorBody = await response.text();
if (errorBody) errorText = `API Error ${response.status}: ${errorBody}`;
} catch (e) {
// ignore if reading body fails
}
throw new Error(errorText);
}
try {
return await response.json();
} catch (error) {
const rawText = await response.text();
throw new Error(`Failed to parse JSON response from server. Body: ${rawText}`);
}
}; };
export const isImageAFlyer = async (imageFile: File, tokenOverride?: string): Promise<Response> => { export const isImageAFlyer = (
imageFile: File,
tokenOverride?: string,
): Promise<Response> => {
const formData = new FormData(); const formData = new FormData();
formData.append('image', imageFile); formData.append('image', imageFile);
@@ -64,7 +123,7 @@ export const isImageAFlyer = async (imageFile: File, tokenOverride?: string): Pr
); );
}; };
export const extractAddressFromImage = async ( export const extractAddressFromImage = (
imageFile: File, imageFile: File,
tokenOverride?: string, tokenOverride?: string,
): Promise<Response> => { ): Promise<Response> => {
@@ -81,7 +140,7 @@ export const extractAddressFromImage = async (
); );
}; };
export const extractLogoFromImage = async ( export const extractLogoFromImage = (
imageFiles: File[], imageFiles: File[],
tokenOverride?: string, tokenOverride?: string,
): Promise<Response> => { ): Promise<Response> => {
@@ -100,7 +159,7 @@ export const extractLogoFromImage = async (
); );
}; };
export const getQuickInsights = async ( export const getQuickInsights = (
items: Partial<FlyerItem>[], items: Partial<FlyerItem>[],
signal?: AbortSignal, signal?: AbortSignal,
tokenOverride?: string, tokenOverride?: string,
@@ -117,7 +176,7 @@ export const getQuickInsights = async (
); );
}; };
export const getDeepDiveAnalysis = async ( export const getDeepDiveAnalysis = (
items: Partial<FlyerItem>[], items: Partial<FlyerItem>[],
signal?: AbortSignal, signal?: AbortSignal,
tokenOverride?: string, tokenOverride?: string,
@@ -134,7 +193,7 @@ export const getDeepDiveAnalysis = async (
); );
}; };
export const searchWeb = async ( export const searchWeb = (
query: string, query: string,
signal?: AbortSignal, signal?: AbortSignal,
tokenOverride?: string, tokenOverride?: string,
@@ -179,7 +238,7 @@ export const planTripWithMaps = async (
* @param prompt A description of the image to generate (e.g., a meal plan). * @param prompt A description of the image to generate (e.g., a meal plan).
* @returns A base64-encoded string of the generated PNG image. * @returns A base64-encoded string of the generated PNG image.
*/ */
export const generateImageFromText = async ( export const generateImageFromText = (
prompt: string, prompt: string,
signal?: AbortSignal, signal?: AbortSignal,
tokenOverride?: string, tokenOverride?: string,
@@ -202,7 +261,7 @@ export const generateImageFromText = async (
* @param text The text to be spoken. * @param text The text to be spoken.
* @returns A base64-encoded string of the raw audio data. * @returns A base64-encoded string of the raw audio data.
*/ */
export const generateSpeechFromText = async ( export const generateSpeechFromText = (
text: string, text: string,
signal?: AbortSignal, signal?: AbortSignal,
tokenOverride?: string, tokenOverride?: string,
@@ -259,7 +318,7 @@ export const startVoiceSession = (callbacks: {
* @param tokenOverride Optional token for testing. * @param tokenOverride Optional token for testing.
* @returns A promise that resolves to the API response containing the extracted text. * @returns A promise that resolves to the API response containing the extracted text.
*/ */
export const rescanImageArea = async ( export const rescanImageArea = (
imageFile: File, imageFile: File,
cropArea: { x: number; y: number; width: number; height: number }, cropArea: { x: number; y: number; width: number; height: number },
extractionType: 'store_name' | 'dates' | 'item_details', extractionType: 'store_name' | 'dates' | 'item_details',
@@ -270,7 +329,11 @@ export const rescanImageArea = async (
formData.append('cropArea', JSON.stringify(cropArea)); formData.append('cropArea', JSON.stringify(cropArea));
formData.append('extractionType', extractionType); formData.append('extractionType', extractionType);
return apiFetch('/ai/rescan-area', { method: 'POST', body: formData }, { tokenOverride }); return apiFetch(
'/ai/rescan-area',
{ method: 'POST', body: formData },
{ tokenOverride },
);
}; };
/** /**
@@ -278,7 +341,7 @@ export const rescanImageArea = async (
* @param watchedItems An array of the user's watched master grocery items. * @param watchedItems An array of the user's watched master grocery items.
* @returns A promise that resolves to the raw `Response` object from the API. * @returns A promise that resolves to the raw `Response` object from the API.
*/ */
export const compareWatchedItemPrices = async ( export const compareWatchedItemPrices = (
watchedItems: MasterGroceryItem[], watchedItems: MasterGroceryItem[],
signal?: AbortSignal, signal?: AbortSignal,
): Promise<Response> => { ): Promise<Response> => {
@@ -292,5 +355,4 @@ export const compareWatchedItemPrices = async (
body: JSON.stringify({ items: watchedItems }), body: JSON.stringify({ items: watchedItems }),
}, },
{ signal }, { signal },
); )};
};

View File

@@ -166,6 +166,127 @@ describe('AI Service (Server)', () => {
}); });
}); });
describe('Model Fallback Logic', () => {
const originalEnv = process.env;
beforeEach(() => {
vi.unstubAllEnvs();
process.env = { ...originalEnv, GEMINI_API_KEY: 'test-key' };
vi.resetModules(); // Re-import to use the new env var and re-instantiate the service
});
afterEach(() => {
process.env = originalEnv;
vi.unstubAllEnvs();
});
it('should try the next model if the first one fails with a quota error', async () => {
// Arrange
const { AIService } = await import('./aiService.server');
const { logger } = await import('./logger.server');
const serviceWithFallback = new AIService(logger);
const quotaError = new Error('User rate limit exceeded due to quota');
const successResponse = { text: 'Success from fallback model', candidates: [] };
// Mock the generateContent function to fail on the first call and succeed on the second
mockGenerateContent.mockRejectedValueOnce(quotaError).mockResolvedValueOnce(successResponse);
const request = { contents: [{ parts: [{ text: 'test prompt' }] }] };
// Act
const result = await (serviceWithFallback as any).aiClient.generateContent(request);
// Assert
expect(result).toEqual(successResponse);
expect(mockGenerateContent).toHaveBeenCalledTimes(2);
// Check first call
expect(mockGenerateContent).toHaveBeenNthCalledWith(1, {
model: 'gemini-2.5-flash',
...request,
});
// Check second call
expect(mockGenerateContent).toHaveBeenNthCalledWith(2, {
model: 'gemini-3-flash',
...request,
});
// Check that a warning was logged
expect(logger.warn).toHaveBeenCalledWith(
expect.stringContaining(
"Model 'gemini-2.5-flash' failed due to quota/rate limit. Trying next model.",
),
);
});
it('should throw immediately for non-retriable errors', async () => {
// Arrange
const { AIService } = await import('./aiService.server');
const { logger } = await import('./logger.server');
const serviceWithFallback = new AIService(logger);
const nonRetriableError = new Error('Invalid API Key');
mockGenerateContent.mockRejectedValueOnce(nonRetriableError);
const request = { contents: [{ parts: [{ text: 'test prompt' }] }] };
// Act & Assert
await expect((serviceWithFallback as any).aiClient.generateContent(request)).rejects.toThrow(
'Invalid API Key',
);
expect(mockGenerateContent).toHaveBeenCalledTimes(1);
expect(logger.error).toHaveBeenCalledWith(
{ error: nonRetriableError },
`[AIService Adapter] Model 'gemini-2.5-flash' failed with a non-retriable error.`,
);
});
it('should throw the last error if all models fail', async () => {
// Arrange
const { AIService } = await import('./aiService.server');
const { logger } = await import('./logger.server');
const serviceWithFallback = new AIService(logger);
const quotaError1 = new Error('Quota exhausted for model 1');
const quotaError2 = new Error('429 Too Many Requests for model 2');
const quotaError3 = new Error('RESOURCE_EXHAUSTED for model 3');
mockGenerateContent
.mockRejectedValueOnce(quotaError1)
.mockRejectedValueOnce(quotaError2)
.mockRejectedValueOnce(quotaError3);
const request = { contents: [{ parts: [{ text: 'test prompt' }] }] };
// Act & Assert
await expect((serviceWithFallback as any).aiClient.generateContent(request)).rejects.toThrow(
quotaError3,
);
expect(mockGenerateContent).toHaveBeenCalledTimes(3);
expect(mockGenerateContent).toHaveBeenNthCalledWith(1, {
model: 'gemini-2.5-flash',
...request,
});
expect(mockGenerateContent).toHaveBeenNthCalledWith(2, {
model: 'gemini-3-flash',
...request,
});
expect(mockGenerateContent).toHaveBeenNthCalledWith(3, {
model: 'gemini-2.5-flash-lite',
...request,
});
expect(logger.error).toHaveBeenCalledWith(
{ lastError: quotaError3 },
'[AIService Adapter] All AI models failed. Throwing last known error.',
);
});
});
describe('extractItemsFromReceiptImage', () => { describe('extractItemsFromReceiptImage', () => {
it('should extract items from a valid AI response', async () => { it('should extract items from a valid AI response', async () => {
const mockAiResponseText = `[ const mockAiResponseText = `[

View File

@@ -72,6 +72,7 @@ export class AIService {
private fs: IFileSystem; private fs: IFileSystem;
private rateLimiter: <T>(fn: () => Promise<T>) => Promise<T>; private rateLimiter: <T>(fn: () => Promise<T>) => Promise<T>;
private logger: Logger; private logger: Logger;
private readonly models = ['gemini-2.5-flash', 'gemini-3-flash', 'gemini-2.5-flash-lite'];
constructor(logger: Logger, aiClient?: IAiClient, fs?: IFileSystem) { constructor(logger: Logger, aiClient?: IAiClient, fs?: IFileSystem) {
this.logger = logger; this.logger = logger;
@@ -121,17 +122,11 @@ export class AIService {
); );
} }
// do not change "gemini-2.5-flash" - this is correct
const modelName = 'gemini-2.5-flash';
// We create a shim/adapter that matches the old structure but uses the new SDK call pattern. // We create a shim/adapter that matches the old structure but uses the new SDK call pattern.
// This preserves the dependency injection pattern used throughout the class. // This preserves the dependency injection pattern used throughout the class.
this.aiClient = genAI this.aiClient = genAI
? { ? {
generateContent: async (request) => { generateContent: async (request) => {
// The model name is now injected here, into every call, as the new SDK requires.
// Architectural guard clause: All requests from this service must have content.
// This prevents sending invalid requests to the API and satisfies TypeScript's strictness.
if (!request.contents || request.contents.length === 0) { if (!request.contents || request.contents.length === 0) {
this.logger.error( this.logger.error(
{ request }, { request },
@@ -140,14 +135,7 @@ export class AIService {
throw new Error('AIService.generateContent requires at least one content element.'); throw new Error('AIService.generateContent requires at least one content element.');
} }
// Architectural Fix: After the guard clause, assign the guaranteed-to-exist element return this._generateWithFallback(genAI, request);
// to a new constant. This provides a definitive type-safe variable for the compiler.
const firstContent = request.contents[0];
this.logger.debug(
{ modelName, requestParts: firstContent.parts?.length ?? 0 },
'[AIService] Calling actual generateContent via adapter.',
);
return genAI.models.generateContent({ model: modelName, ...request });
}, },
} }
: { : {
@@ -182,6 +170,54 @@ export class AIService {
this.logger.info('---------------- [AIService] Constructor End ----------------'); this.logger.info('---------------- [AIService] Constructor End ----------------');
} }
private async _generateWithFallback(
genAI: GoogleGenAI,
request: { contents: Content[]; tools?: Tool[] },
): Promise<GenerateContentResponse> {
let lastError: Error | null = null;
for (const modelName of this.models) {
try {
this.logger.info(
`[AIService Adapter] Attempting to generate content with model: ${modelName}`,
);
const result = await genAI.models.generateContent({ model: modelName, ...request });
// If the call succeeds, return the result immediately.
return result;
} catch (error: unknown) {
lastError = error instanceof Error ? error : new Error(String(error));
const errorMessage = (lastError.message || '').toLowerCase(); // Make case-insensitive
// Check for specific error messages indicating quota issues or model unavailability.
if (
errorMessage.includes('quota') ||
errorMessage.includes('429') || // HTTP 429 Too Many Requests
errorMessage.includes('resource_exhausted') || // Make case-insensitive
errorMessage.includes('model is overloaded')
) {
this.logger.warn(
`[AIService Adapter] Model '${modelName}' failed due to quota/rate limit. Trying next model. Error: ${errorMessage}`,
);
continue; // Try the next model in the list.
} else {
// For other errors (e.g., invalid input, safety settings), fail immediately.
this.logger.error(
{ error: lastError },
`[AIService Adapter] Model '${modelName}' failed with a non-retriable error.`,
);
throw lastError;
}
}
}
// If all models in the list have failed, throw the last error encountered.
this.logger.error(
{ lastError },
'[AIService Adapter] All AI models failed. Throwing last known error.',
);
throw lastError || new Error('All AI models failed to generate content.');
}
private async serverFileToGenerativePart(path: string, mimeType: string) { private async serverFileToGenerativePart(path: string, mimeType: string) {
const fileData = await this.fs.readFile(path); const fileData = await this.fs.readFile(path);
return { return {

View File

@@ -176,15 +176,13 @@ describe('API Client', () => {
// We expect the promise to still resolve with the bad response, but log an error. // We expect the promise to still resolve with the bad response, but log an error.
await apiClient.apiFetch('/some/failing/endpoint'); await apiClient.apiFetch('/some/failing/endpoint');
// FIX: Use stringContaining to be resilient to port numbers (e.g., localhost:3001)
// This checks for the essential parts of the log message without being brittle.
expect(logger.error).toHaveBeenCalledWith( expect(logger.error).toHaveBeenCalledWith(
expect.stringContaining('apiFetch: Request to http://'), expect.objectContaining({
'Internal Server Error', status: 500,
); body: 'Internal Server Error',
expect(logger.error).toHaveBeenCalledWith( url: expect.stringContaining('/some/failing/endpoint'),
expect.stringContaining('/api/some/failing/endpoint failed with status 500'), }),
'Internal Server Error', 'apiFetch: Request failed',
); );
}); });
@@ -242,10 +240,6 @@ describe('API Client', () => {
expect(logger.warn).toHaveBeenCalledWith('Failed to track flyer item interaction', { expect(logger.warn).toHaveBeenCalledWith('Failed to track flyer item interaction', {
error: apiError, error: apiError,
}); });
expect(logger.warn).toHaveBeenCalledWith('Failed to track flyer item interaction', {
error: apiError,
});
}); });
it('logSearchQuery should log a warning on failure', async () => { it('logSearchQuery should log a warning on failure', async () => {
@@ -259,8 +253,6 @@ describe('API Client', () => {
was_successful: false, was_successful: false,
}); });
expect(logger.warn).toHaveBeenCalledWith('Failed to log search query', { error: apiError }); expect(logger.warn).toHaveBeenCalledWith('Failed to log search query', { error: apiError });
expect(logger.warn).toHaveBeenCalledWith('Failed to log search query', { error: apiError });
}); });
}); });

View File

@@ -1,6 +1,7 @@
// src/services/apiClient.ts // src/services/apiClient.ts
import { Profile, ShoppingListItem, SearchQuery, Budget, Address } from '../types'; import { Profile, ShoppingListItem, SearchQuery, Budget, Address } from '../types';
import { logger } from './logger.client'; import { logger } from './logger.client';
import { eventBus } from './eventBus';
// This constant should point to your backend API. // This constant should point to your backend API.
// It's often a good practice to store this in an environment variable. // It's often a good practice to store this in an environment variable.
@@ -62,12 +63,12 @@ const refreshToken = async (): Promise<string> => {
logger.info('Successfully refreshed access token.'); logger.info('Successfully refreshed access token.');
return data.token; return data.token;
} catch (error) { } catch (error) {
logger.error('Failed to refresh token. User will be logged out.', { error }); logger.error({ error }, 'Failed to refresh token. User session has expired.');
// Only perform browser-specific actions if in the browser environment. // Only perform browser-specific actions if in the browser environment.
if (typeof window !== 'undefined') { if (typeof window !== 'undefined') {
localStorage.removeItem('authToken'); localStorage.removeItem('authToken');
// A hard redirect is a simple way to reset the app state to logged-out. // Dispatch a global event that the UI layer can listen for to handle session expiry.
// window.location.href = '/'; // Removed to allow the caller to handle session expiry. eventBus.dispatch('sessionExpired');
} }
throw error; throw error;
} }
@@ -144,9 +145,8 @@ export const apiFetch = async (
// --- DEBUG LOGGING for failed requests --- // --- DEBUG LOGGING for failed requests ---
if (!response.ok) { if (!response.ok) {
const responseText = await response.clone().text(); const responseText = await response.clone().text();
logger.error( logger.error({ url: fullUrl, status: response.status, body: responseText },
`apiFetch: Request to ${fullUrl} failed with status ${response.status}. Response body:`, 'apiFetch: Request failed',
responseText,
); );
} }
// --- END DEBUG LOGGING --- // --- END DEBUG LOGGING ---

31
src/services/eventBus.ts Normal file
View File

@@ -0,0 +1,31 @@
// src/services/eventBus.ts
/**
* A simple, generic event bus for cross-component communication without direct coupling.
* This is particularly useful for broadcasting application-wide events, such as session expiry.
*/
type EventCallback = (data?: any) => void;
class EventBus {
private listeners: { [key: string]: EventCallback[] } = {};
on(event: string, callback: EventCallback): void {
if (!this.listeners[event]) {
this.listeners[event] = [];
}
this.listeners[event].push(callback);
}
off(event: string, callback: EventCallback): void {
if (!this.listeners[event]) return;
this.listeners[event] = this.listeners[event].filter((l) => l !== callback);
}
dispatch(event: string, data?: any): void {
if (!this.listeners[event]) return;
this.listeners[event].forEach((callback) => callback(data));
}
}
export const eventBus = new EventBus();

View File

@@ -87,7 +87,7 @@ describe('Geocoding Service', () => {
// Assert // Assert
expect(result).toEqual(coordinates); expect(result).toEqual(coordinates);
expect(logger.error).toHaveBeenCalledWith( expect(logger.error).toHaveBeenCalledWith(
{ err: 'Redis down', cacheKey: expect.any(String) }, { err: expect.any(Error), cacheKey: expect.any(String) },
'Redis GET or JSON.parse command failed. Proceeding without cache.', 'Redis GET or JSON.parse command failed. Proceeding without cache.',
); );
expect(mockGoogleService.geocode).toHaveBeenCalled(); // Should still proceed to fetch expect(mockGoogleService.geocode).toHaveBeenCalled(); // Should still proceed to fetch
@@ -107,7 +107,7 @@ describe('Geocoding Service', () => {
expect(mocks.mockRedis.get).toHaveBeenCalledWith(cacheKey); expect(mocks.mockRedis.get).toHaveBeenCalledWith(cacheKey);
// The service should log the JSON parsing error and continue // The service should log the JSON parsing error and continue
expect(logger.error).toHaveBeenCalledWith( expect(logger.error).toHaveBeenCalledWith(
{ err: expect.any(String), cacheKey: expect.any(String) }, { err: expect.any(SyntaxError), cacheKey: expect.any(String) },
'Redis GET or JSON.parse command failed. Proceeding without cache.', 'Redis GET or JSON.parse command failed. Proceeding without cache.',
); );
expect(mockGoogleService.geocode).toHaveBeenCalledTimes(1); expect(mockGoogleService.geocode).toHaveBeenCalledTimes(1);
@@ -185,7 +185,7 @@ describe('Geocoding Service', () => {
// Assert // Assert
expect(result).toEqual(coordinates); expect(result).toEqual(coordinates);
expect(logger.error).toHaveBeenCalledWith( expect(logger.error).toHaveBeenCalledWith(
{ err: 'Network Error' }, { err: expect.any(Error) },
expect.stringContaining('An error occurred while calling the Google Maps Geocoding API'), expect.stringContaining('An error occurred while calling the Google Maps Geocoding API'),
); );
expect(mockNominatimService.geocode).toHaveBeenCalledWith(address, logger); expect(mockNominatimService.geocode).toHaveBeenCalledWith(address, logger);
@@ -223,7 +223,7 @@ describe('Geocoding Service', () => {
expect(mockGoogleService.geocode).toHaveBeenCalledTimes(1); expect(mockGoogleService.geocode).toHaveBeenCalledTimes(1);
expect(mocks.mockRedis.set).toHaveBeenCalledTimes(1); expect(mocks.mockRedis.set).toHaveBeenCalledTimes(1);
expect(logger.error).toHaveBeenCalledWith( expect(logger.error).toHaveBeenCalledWith(
{ err: 'Redis SET failed', cacheKey: expect.any(String) }, { err: expect.any(Error), cacheKey: expect.any(String) },
'Redis SET command failed. Result will not be cached.', 'Redis SET command failed. Result will not be cached.',
); );
}); });
@@ -271,7 +271,7 @@ describe('Geocoding Service', () => {
// Act & Assert // Act & Assert
await expect(geocodingService.clearGeocodeCache(logger)).rejects.toThrow(redisError); await expect(geocodingService.clearGeocodeCache(logger)).rejects.toThrow(redisError);
expect(logger.error).toHaveBeenCalledWith( expect(logger.error).toHaveBeenCalledWith(
{ err: redisError.message }, { err: expect.any(Error) },
'Failed to clear geocode cache from Redis.', 'Failed to clear geocode cache from Redis.',
); );
expect(mocks.mockRedis.del).not.toHaveBeenCalled(); expect(mocks.mockRedis.del).not.toHaveBeenCalled();

View File

@@ -25,10 +25,7 @@ export class GeocodingService {
return JSON.parse(cached); return JSON.parse(cached);
} }
} catch (error) { } catch (error) {
logger.error( logger.error({ err: error, cacheKey }, 'Redis GET or JSON.parse command failed. Proceeding without cache.');
{ err: error instanceof Error ? error.message : error, cacheKey },
'Redis GET or JSON.parse command failed. Proceeding without cache.',
);
} }
if (process.env.GOOGLE_MAPS_API_KEY) { if (process.env.GOOGLE_MAPS_API_KEY) {
@@ -44,8 +41,8 @@ export class GeocodingService {
); );
} catch (error) { } catch (error) {
logger.error( logger.error(
{ err: error instanceof Error ? error.message : error }, { err: error },
'An error occurred while calling the Google Maps Geocoding API. Falling back to Nominatim.', 'An error occurred while calling the Google Maps Geocoding API. Falling back to Nominatim.'
); );
} }
} else { } else {
@@ -72,10 +69,7 @@ export class GeocodingService {
try { try {
await redis.set(cacheKey, JSON.stringify(result), 'EX', 60 * 60 * 24 * 30); // Cache for 30 days await redis.set(cacheKey, JSON.stringify(result), 'EX', 60 * 60 * 24 * 30); // Cache for 30 days
} catch (error) { } catch (error) {
logger.error( logger.error({ err: error, cacheKey }, 'Redis SET command failed. Result will not be cached.');
{ err: error instanceof Error ? error.message : error, cacheKey },
'Redis SET command failed. Result will not be cached.',
);
} }
} }
@@ -98,10 +92,7 @@ export class GeocodingService {
logger.info(`Successfully deleted ${totalDeleted} geocode cache entries.`); logger.info(`Successfully deleted ${totalDeleted} geocode cache entries.`);
return totalDeleted; return totalDeleted;
} catch (error) { } catch (error) {
logger.error( logger.error({ err: error }, 'Failed to clear geocode cache from Redis.');
{ err: error instanceof Error ? error.message : error },
'Failed to clear geocode cache from Redis.',
);
throw error; throw error;
} }
} }

View File

@@ -1,5 +1,5 @@
// src/services/queueService.server.ts // src/services/queueService.server.ts
import { Queue, Worker, Job } from 'bullmq'; import { Queue, Worker, Job, UnrecoverableError } from 'bullmq';
import IORedis from 'ioredis'; // Correctly imported import IORedis from 'ioredis'; // Correctly imported
import fsPromises from 'node:fs/promises'; import fsPromises from 'node:fs/promises';
import { exec } from 'child_process'; import { exec } from 'child_process';
@@ -164,6 +164,14 @@ const flyerProcessingService = new FlyerProcessingService(
new FlyerDataTransformer(), // Inject the new transformer new FlyerDataTransformer(), // Inject the new transformer
); );
/**
* Helper to ensure that an unknown error is normalized to an Error object.
* This ensures consistent logging structure and stack traces.
*/
const normalizeError = (error: unknown): Error => {
return error instanceof Error ? error : new Error(String(error));
};
/** /**
* A generic function to attach logging event listeners to any worker. * A generic function to attach logging event listeners to any worker.
* This centralizes logging for job completion and final failure. * This centralizes logging for job completion and final failure.
@@ -185,9 +193,27 @@ const attachWorkerEventListeners = (worker: Worker) => {
export const flyerWorker = new Worker<FlyerJobData>( export const flyerWorker = new Worker<FlyerJobData>(
'flyer-processing', // Must match the queue name 'flyer-processing', // Must match the queue name
(job) => { async (job) => {
// The processJob method creates its own job-specific logger internally. try {
return flyerProcessingService.processJob(job); // The processJob method creates its own job-specific logger internally.
return await flyerProcessingService.processJob(job);
} catch (error: unknown) {
const wrappedError = normalizeError(error);
// Check for quota errors or other unrecoverable errors from the AI service
const errorMessage = wrappedError.message || '';
if (
errorMessage.includes('quota') ||
errorMessage.includes('429') ||
errorMessage.includes('RESOURCE_EXHAUSTED')
) {
logger.error(
{ err: wrappedError, jobId: job.id },
'[FlyerWorker] Unrecoverable quota error detected. Failing job immediately.',
);
throw new UnrecoverableError(errorMessage);
}
throw error;
}
}, },
{ {
connection, connection,
@@ -207,19 +233,16 @@ export const emailWorker = new Worker<EmailJobData>(
try { try {
await emailService.sendEmail(job.data, jobLogger); await emailService.sendEmail(job.data, jobLogger);
} catch (error: unknown) { } catch (error: unknown) {
// Standardize error logging to capture the full error object, including the stack trace. const wrappedError = normalizeError(error);
// This provides more context for debugging than just logging the message.
logger.error( logger.error(
{ {
// Log the full error object for better diagnostics. // The patch requested this specific error handling. err: wrappedError,
err: error instanceof Error ? error : new Error(String(error)),
// Also include the job data for context.
jobData: job.data, jobData: job.data,
}, },
`[EmailWorker] Job ${job.id} failed. Attempt ${job.attemptsMade}/${job.opts.attempts}.`, `[EmailWorker] Job ${job.id} failed. Attempt ${job.attemptsMade}/${job.opts.attempts}.`,
); );
// Re-throw to let BullMQ handle the failure and retry. // Re-throw to let BullMQ handle the failure and retry.
throw error; throw wrappedError;
} }
}, },
{ {
@@ -248,15 +271,12 @@ export const analyticsWorker = new Worker<AnalyticsJobData>(
await new Promise((resolve) => setTimeout(resolve, 10000)); // Simulate a 10-second task await new Promise((resolve) => setTimeout(resolve, 10000)); // Simulate a 10-second task
logger.info(`[AnalyticsWorker] Successfully generated report for ${reportDate}.`); logger.info(`[AnalyticsWorker] Successfully generated report for ${reportDate}.`);
} catch (error: unknown) { } catch (error: unknown) {
const wrappedError = normalizeError(error);
// Standardize error logging. // Standardize error logging.
logger.error( logger.error({ err: wrappedError, jobData: job.data },
{
err: error instanceof Error ? error : new Error(String(error)),
jobData: job.data,
},
`[AnalyticsWorker] Job ${job.id} failed. Attempt ${job.attemptsMade}/${job.opts.attempts}.`, `[AnalyticsWorker] Job ${job.id} failed. Attempt ${job.attemptsMade}/${job.opts.attempts}.`,
); );
throw error; // Re-throw to let BullMQ handle the failure and retry. throw wrappedError; // Re-throw to let BullMQ handle the failure and retry.
} }
}, },
{ {
@@ -313,14 +333,13 @@ export const cleanupWorker = new Worker<CleanupJobData>(
`[CleanupWorker] Successfully cleaned up ${paths.length} file(s) for flyer ${flyerId}.`, `[CleanupWorker] Successfully cleaned up ${paths.length} file(s) for flyer ${flyerId}.`,
); );
} catch (error: unknown) { } catch (error: unknown) {
const wrappedError = normalizeError(error);
// Standardize error logging. // Standardize error logging.
logger.error( logger.error(
{ { err: wrappedError },
err: error instanceof Error ? error : new Error(String(error)),
},
`[CleanupWorker] Job ${job.id} for flyer ${flyerId} failed. Attempt ${job.attemptsMade}/${job.opts.attempts}.`, `[CleanupWorker] Job ${job.id} for flyer ${flyerId} failed. Attempt ${job.attemptsMade}/${job.opts.attempts}.`,
); );
throw error; // Re-throw to let BullMQ handle the failure and retry. throw wrappedError; // Re-throw to let BullMQ handle the failure and retry.
} }
}, },
{ {
@@ -348,15 +367,13 @@ export const weeklyAnalyticsWorker = new Worker<WeeklyAnalyticsJobData>(
`[WeeklyAnalyticsWorker] Successfully generated weekly report for week ${reportWeek}, ${reportYear}.`, `[WeeklyAnalyticsWorker] Successfully generated weekly report for week ${reportWeek}, ${reportYear}.`,
); );
} catch (error: unknown) { } catch (error: unknown) {
const wrappedError = normalizeError(error);
// Standardize error logging. // Standardize error logging.
logger.error( logger.error(
{ { err: wrappedError, jobData: job.data },
err: error instanceof Error ? error : new Error(String(error)),
jobData: job.data,
},
`[WeeklyAnalyticsWorker] Job ${job.id} failed. Attempt ${job.attemptsMade}/${job.opts.attempts}.`, `[WeeklyAnalyticsWorker] Job ${job.id} failed. Attempt ${job.attemptsMade}/${job.opts.attempts}.`,
); );
throw error; // Re-throw to let BullMQ handle the failure and retry. throw wrappedError; // Re-throw to let BullMQ handle the failure and retry.
} }
}, },
{ {
@@ -378,8 +395,9 @@ export const tokenCleanupWorker = new Worker<TokenCleanupJobData>(
jobLogger.info(`[TokenCleanupWorker] Successfully deleted ${deletedCount} expired tokens.`); jobLogger.info(`[TokenCleanupWorker] Successfully deleted ${deletedCount} expired tokens.`);
return { deletedCount }; return { deletedCount };
} catch (error: unknown) { } catch (error: unknown) {
jobLogger.error({ err: error }, `[TokenCleanupWorker] Job ${job.id} failed.`); const wrappedError = normalizeError(error);
throw error; jobLogger.error({ err: wrappedError }, `[TokenCleanupWorker] Job ${job.id} failed.`);
throw wrappedError;
} }
}, },
{ {

View File

@@ -175,7 +175,7 @@ describe('Queue Workers', () => {
const emailError = 'SMTP server is down'; // Reject with a string const emailError = 'SMTP server is down'; // Reject with a string
mocks.sendEmail.mockRejectedValue(emailError); mocks.sendEmail.mockRejectedValue(emailError);
await expect(emailProcessor(job)).rejects.toBe(emailError); await expect(emailProcessor(job)).rejects.toThrow(emailError);
// The worker should wrap the string in an Error object for logging // The worker should wrap the string in an Error object for logging
expect(mockLogger.error).toHaveBeenCalledWith( expect(mockLogger.error).toHaveBeenCalledWith(

View File

@@ -56,15 +56,15 @@ describe('Price History API Integration Test (/api/price-history)', () => {
// 4. Create flyer items linking the master item to the flyers with prices // 4. Create flyer items linking the master item to the flyers with prices
await pool.query( await pool.query(
`INSERT INTO public.flyer_items (flyer_id, master_item_id, item, price_in_cents, price_display) VALUES ($1, $2, 'Apples', 199, '$1.99')`, `INSERT INTO public.flyer_items (flyer_id, master_item_id, item, price_in_cents, price_display, quantity) VALUES ($1, $2, 'Apples', 199, '$1.99', '1')`,
[flyerId1, masterItemId], [flyerId1, masterItemId],
); );
await pool.query( await pool.query(
`INSERT INTO public.flyer_items (flyer_id, master_item_id, item, price_in_cents, price_display) VALUES ($1, $2, 'Apples', 249, '$2.49')`, `INSERT INTO public.flyer_items (flyer_id, master_item_id, item, price_in_cents, price_display, quantity) VALUES ($1, $2, 'Apples', 249, '$2.49', '1')`,
[flyerId2, masterItemId], [flyerId2, masterItemId],
); );
await pool.query( await pool.query(
`INSERT INTO public.flyer_items (flyer_id, master_item_id, item, price_in_cents, price_display) VALUES ($1, $2, 'Apples', 299, '$2.99')`, `INSERT INTO public.flyer_items (flyer_id, master_item_id, item, price_in_cents, price_display, quantity) VALUES ($1, $2, 'Apples', 299, '$2.99', '1')`,
[flyerId3, masterItemId], [flyerId3, masterItemId],
); );
}); });