feat: add Sentry source map upload configuration and update environment variables

This commit is contained in:
2026-01-17 17:07:50 -08:00
parent a14816c8ee
commit e6d383103c
26 changed files with 207 additions and 51 deletions

View File

@@ -102,3 +102,13 @@ VITE_SENTRY_ENABLED=true
# Enable debug mode for SDK troubleshooting (default: false) # Enable debug mode for SDK troubleshooting (default: false)
SENTRY_DEBUG=false SENTRY_DEBUG=false
VITE_SENTRY_DEBUG=false VITE_SENTRY_DEBUG=false
# ===================
# Source Maps Upload (ADR-015)
# ===================
# Auth token for uploading source maps to Bugsink
# Create at: https://bugsink.projectium.com (Settings > API Keys)
# Required for de-minified stack traces in error reports
SENTRY_AUTH_TOKEN=
# URL of your Bugsink instance (for source map uploads)
SENTRY_URL=https://bugsink.projectium.com

View File

@@ -87,11 +87,22 @@ jobs:
fi fi
- name: Build React Application for Production - name: Build React Application for Production
# Source Maps (ADR-015): If SENTRY_AUTH_TOKEN is set, the @sentry/vite-plugin will:
# 1. Generate hidden source maps during build
# 2. Upload them to Bugsink for error de-minification
# 3. Delete the .map files after upload (so they're not publicly accessible)
run: | run: |
if [ -z "${{ secrets.VITE_GOOGLE_GENAI_API_KEY }}" ]; then if [ -z "${{ secrets.VITE_GOOGLE_GENAI_API_KEY }}" ]; then
echo "ERROR: The VITE_GOOGLE_GENAI_API_KEY secret is not set." echo "ERROR: The VITE_GOOGLE_GENAI_API_KEY secret is not set."
exit 1 exit 1
fi fi
# Source map upload is optional - warn if not configured
if [ -z "${{ secrets.SENTRY_AUTH_TOKEN }}" ]; then
echo "WARNING: SENTRY_AUTH_TOKEN not set. Source maps will NOT be uploaded to Bugsink."
echo " Errors will show minified stack traces. To fix, add SENTRY_AUTH_TOKEN to Gitea secrets."
fi
GITEA_SERVER_URL="https://gitea.projectium.com" GITEA_SERVER_URL="https://gitea.projectium.com"
COMMIT_MESSAGE=$(git log -1 --grep="\[skip ci\]" --invert-grep --pretty=%s) COMMIT_MESSAGE=$(git log -1 --grep="\[skip ci\]" --invert-grep --pretty=%s)
PACKAGE_VERSION=$(node -p "require('./package.json').version") PACKAGE_VERSION=$(node -p "require('./package.json').version")
@@ -101,6 +112,8 @@ jobs:
VITE_SENTRY_DSN="${{ secrets.VITE_SENTRY_DSN }}" \ VITE_SENTRY_DSN="${{ secrets.VITE_SENTRY_DSN }}" \
VITE_SENTRY_ENVIRONMENT="production" \ VITE_SENTRY_ENVIRONMENT="production" \
VITE_SENTRY_ENABLED="true" \ VITE_SENTRY_ENABLED="true" \
SENTRY_AUTH_TOKEN="${{ secrets.SENTRY_AUTH_TOKEN }}" \
SENTRY_URL="https://bugsink.projectium.com" \
VITE_API_BASE_URL=/api VITE_API_KEY=${{ secrets.VITE_GOOGLE_GENAI_API_KEY }} npm run build VITE_API_BASE_URL=/api VITE_API_KEY=${{ secrets.VITE_GOOGLE_GENAI_API_KEY }} npm run build
- name: Deploy Application to Production Server - name: Deploy Application to Production Server

View File

@@ -374,6 +374,11 @@ jobs:
# We set the environment variable directly in the command line for this step. # We set the environment variable directly in the command line for this step.
# This maps the Gitea secret to the environment variable the application expects. # This maps the Gitea secret to the environment variable the application expects.
# We also generate and inject the application version, commit URL, and commit message. # We also generate and inject the application version, commit URL, and commit message.
#
# Source Maps (ADR-015): If SENTRY_AUTH_TOKEN is set, the @sentry/vite-plugin will:
# 1. Generate hidden source maps during build
# 2. Upload them to Bugsink for error de-minification
# 3. Delete the .map files after upload (so they're not publicly accessible)
run: | run: |
# Fail-fast check for the build-time secret. # Fail-fast check for the build-time secret.
if [ -z "${{ secrets.VITE_GOOGLE_GENAI_API_KEY }}" ]; then if [ -z "${{ secrets.VITE_GOOGLE_GENAI_API_KEY }}" ]; then
@@ -381,6 +386,12 @@ jobs:
exit 1 exit 1
fi fi
# Source map upload is optional - warn if not configured
if [ -z "${{ secrets.SENTRY_AUTH_TOKEN }}" ]; then
echo "WARNING: SENTRY_AUTH_TOKEN not set. Source maps will NOT be uploaded to Bugsink."
echo " Errors will show minified stack traces. To fix, add SENTRY_AUTH_TOKEN to Gitea secrets."
fi
GITEA_SERVER_URL="https://gitea.projectium.com" # Your Gitea instance URL GITEA_SERVER_URL="https://gitea.projectium.com" # Your Gitea instance URL
# Sanitize commit message to prevent shell injection or build breaks (removes quotes, backticks, backslashes, $) # Sanitize commit message to prevent shell injection or build breaks (removes quotes, backticks, backslashes, $)
COMMIT_MESSAGE=$(git log -1 --grep="\[skip ci\]" --invert-grep --pretty=%s | tr -d '"`\\$') COMMIT_MESSAGE=$(git log -1 --grep="\[skip ci\]" --invert-grep --pretty=%s | tr -d '"`\\$')
@@ -391,6 +402,8 @@ jobs:
VITE_SENTRY_DSN="${{ secrets.VITE_SENTRY_DSN_TEST }}" \ VITE_SENTRY_DSN="${{ secrets.VITE_SENTRY_DSN_TEST }}" \
VITE_SENTRY_ENVIRONMENT="test" \ VITE_SENTRY_ENVIRONMENT="test" \
VITE_SENTRY_ENABLED="true" \ VITE_SENTRY_ENABLED="true" \
SENTRY_AUTH_TOKEN="${{ secrets.SENTRY_AUTH_TOKEN }}" \
SENTRY_URL="https://bugsink.projectium.com" \
VITE_API_BASE_URL="https://flyer-crawler-test.projectium.com/api" VITE_API_KEY=${{ secrets.VITE_GOOGLE_GENAI_API_KEY_TEST }} npm run build VITE_API_BASE_URL="https://flyer-crawler-test.projectium.com/api" VITE_API_KEY=${{ secrets.VITE_GOOGLE_GENAI_API_KEY_TEST }} npm run build
- name: Deploy Application to Test Server - name: Deploy Application to Test Server

View File

@@ -298,6 +298,7 @@ To add a new secret (e.g., `SENTRY_DSN`):
- `GOOGLE_MAPS_API_KEY` - Google Maps - `GOOGLE_MAPS_API_KEY` - Google Maps
- `GOOGLE_CLIENT_ID`, `GOOGLE_CLIENT_SECRET` - Google OAuth - `GOOGLE_CLIENT_ID`, `GOOGLE_CLIENT_SECRET` - Google OAuth
- `GH_CLIENT_ID`, `GH_CLIENT_SECRET` - GitHub OAuth - `GH_CLIENT_ID`, `GH_CLIENT_SECRET` - GitHub OAuth
- `SENTRY_AUTH_TOKEN` - Bugsink API token for source map uploads (create at Settings > API Keys in Bugsink)
**Production-specific:** **Production-specific:**
@@ -332,7 +333,7 @@ The test environment (`flyer-crawler-test.projectium.com`) uses **both** Gitea C
| User | Database | Purpose | | User | Database | Purpose |
| -------------------- | -------------------- | ---------- | | -------------------- | -------------------- | ---------- |
| `flyer_crawler_prod` | `flyer-crawler` | Production | | `flyer_crawler_prod` | `flyer-crawler-prod` | Production |
| `flyer_crawler_test` | `flyer-crawler-test` | Testing | | `flyer_crawler_test` | `flyer-crawler-test` | Testing |
**Required Setup Commands** (run as `postgres` superuser): **Required Setup Commands** (run as `postgres` superuser):

View File

@@ -14,6 +14,17 @@ Flyer Crawler uses PostgreSQL with several extensions for full-text search, geog
--- ---
## Database Users
This project uses **environment-specific database users** to isolate production and test environments:
| User | Database | Purpose |
| -------------------- | -------------------- | ---------- |
| `flyer_crawler_prod` | `flyer-crawler-prod` | Production |
| `flyer_crawler_test` | `flyer-crawler-test` | Testing |
---
## Production Database Setup ## Production Database Setup
### Step 1: Install PostgreSQL ### Step 1: Install PostgreSQL
@@ -34,15 +45,19 @@ sudo -u postgres psql
Run the following SQL commands (replace `'a_very_strong_password'` with a secure password): Run the following SQL commands (replace `'a_very_strong_password'` with a secure password):
```sql ```sql
-- Create a new role for your application -- Create the production role
CREATE ROLE flyer_crawler_user WITH LOGIN PASSWORD 'a_very_strong_password'; CREATE ROLE flyer_crawler_prod WITH LOGIN PASSWORD 'a_very_strong_password';
-- Create the production database -- Create the production database
CREATE DATABASE "flyer-crawler-prod" WITH OWNER = flyer_crawler_user; CREATE DATABASE "flyer-crawler-prod" WITH OWNER = flyer_crawler_prod;
-- Connect to the new database -- Connect to the new database
\c "flyer-crawler-prod" \c "flyer-crawler-prod"
-- Grant schema privileges
ALTER SCHEMA public OWNER TO flyer_crawler_prod;
GRANT CREATE, USAGE ON SCHEMA public TO flyer_crawler_prod;
-- Install required extensions (must be done as superuser) -- Install required extensions (must be done as superuser)
CREATE EXTENSION IF NOT EXISTS postgis; CREATE EXTENSION IF NOT EXISTS postgis;
CREATE EXTENSION IF NOT EXISTS pg_trgm; CREATE EXTENSION IF NOT EXISTS pg_trgm;
@@ -57,7 +72,7 @@ CREATE EXTENSION IF NOT EXISTS "uuid-ossp";
Navigate to your project directory and run: Navigate to your project directory and run:
```bash ```bash
psql -U flyer_crawler_user -d "flyer-crawler-prod" -f sql/master_schema_rollup.sql psql -U flyer_crawler_prod -d "flyer-crawler-prod" -f sql/master_schema_rollup.sql
``` ```
This creates all tables, functions, triggers, and seeds essential data (categories, master items). This creates all tables, functions, triggers, and seeds essential data (categories, master items).
@@ -67,7 +82,7 @@ This creates all tables, functions, triggers, and seeds essential data (categori
Set the required environment variables and run the seed script: Set the required environment variables and run the seed script:
```bash ```bash
export DB_USER=flyer_crawler_user export DB_USER=flyer_crawler_prod
export DB_PASSWORD=your_password export DB_PASSWORD=your_password
export DB_NAME="flyer-crawler-prod" export DB_NAME="flyer-crawler-prod"
export DB_HOST=localhost export DB_HOST=localhost
@@ -88,20 +103,24 @@ sudo -u postgres psql
``` ```
```sql ```sql
-- Create the test role
CREATE ROLE flyer_crawler_test WITH LOGIN PASSWORD 'a_very_strong_password';
-- Create the test database -- Create the test database
CREATE DATABASE "flyer-crawler-test" WITH OWNER = flyer_crawler_user; CREATE DATABASE "flyer-crawler-test" WITH OWNER = flyer_crawler_test;
-- Connect to the test database -- Connect to the test database
\c "flyer-crawler-test" \c "flyer-crawler-test"
-- Grant schema privileges (required for test runner to reset schema)
ALTER SCHEMA public OWNER TO flyer_crawler_test;
GRANT CREATE, USAGE ON SCHEMA public TO flyer_crawler_test;
-- Install required extensions -- Install required extensions
CREATE EXTENSION IF NOT EXISTS postgis; CREATE EXTENSION IF NOT EXISTS postgis;
CREATE EXTENSION IF NOT EXISTS pg_trgm; CREATE EXTENSION IF NOT EXISTS pg_trgm;
CREATE EXTENSION IF NOT EXISTS "uuid-ossp"; CREATE EXTENSION IF NOT EXISTS "uuid-ossp";
-- Grant schema ownership (required for test runner to reset schema)
ALTER SCHEMA public OWNER TO flyer_crawler_user;
-- Exit -- Exit
\q \q
``` ```
@@ -110,12 +129,28 @@ ALTER SCHEMA public OWNER TO flyer_crawler_user;
Ensure these secrets are set in your Gitea repository settings: Ensure these secrets are set in your Gitea repository settings:
| Secret | Description | **Shared:**
| ------------- | ------------------------------------------ |
| `DB_HOST` | Database hostname (e.g., `localhost`) | | Secret | Description |
| `DB_PORT` | Database port (e.g., `5432`) | | --------- | ------------------------------------- |
| `DB_USER` | Database user (e.g., `flyer_crawler_user`) | | `DB_HOST` | Database hostname (e.g., `localhost`) |
| `DB_PASSWORD` | Database password | | `DB_PORT` | Database port (e.g., `5432`) |
**Production-specific:**
| Secret | Description |
| ------------------ | ----------------------------------------------- |
| `DB_USER_PROD` | Production database user (`flyer_crawler_prod`) |
| `DB_PASSWORD_PROD` | Production database password |
| `DB_DATABASE_PROD` | Production database name (`flyer-crawler-prod`) |
**Test-specific:**
| Secret | Description |
| ------------------ | ----------------------------------------- |
| `DB_USER_TEST` | Test database user (`flyer_crawler_test`) |
| `DB_PASSWORD_TEST` | Test database password |
| `DB_DATABASE_TEST` | Test database name (`flyer-crawler-test`) |
--- ---
@@ -135,7 +170,7 @@ This approach is faster than creating/destroying databases and doesn't require s
## Connecting to Production Database ## Connecting to Production Database
```bash ```bash
psql -h localhost -U flyer_crawler_user -d "flyer-crawler-prod" -W psql -h localhost -U flyer_crawler_prod -d "flyer-crawler-prod" -W
``` ```
--- ---
@@ -171,13 +206,13 @@ POSTGIS="3.2.0 c3e3cc0" GEOS="3.10.2-CAPI-1.16.0" PROJ="8.2.1"
### Create a Backup ### Create a Backup
```bash ```bash
pg_dump -U flyer_crawler_user -d "flyer-crawler-prod" -F c -f backup.dump pg_dump -U flyer_crawler_prod -d "flyer-crawler-prod" -F c -f backup.dump
``` ```
### Restore from Backup ### Restore from Backup
```bash ```bash
pg_restore -U flyer_crawler_user -d "flyer-crawler-prod" -c backup.dump pg_restore -U flyer_crawler_prod -d "flyer-crawler-prod" -c backup.dump
``` ```
--- ---

View File

@@ -61,14 +61,16 @@ See [INSTALL.md](INSTALL.md) for detailed setup instructions.
This project uses environment variables for configuration (no `.env` files). Key variables: This project uses environment variables for configuration (no `.env` files). Key variables:
| Variable | Description | | Variable | Description |
| ----------------------------------- | -------------------------------- | | -------------------------------------------- | -------------------------------- |
| `DB_HOST`, `DB_USER`, `DB_PASSWORD` | PostgreSQL credentials | | `DB_HOST` | PostgreSQL host |
| `DB_DATABASE_PROD` | Production database name | | `DB_USER_PROD`, `DB_PASSWORD_PROD` | Production database credentials |
| `JWT_SECRET` | Authentication token signing key | | `DB_USER_TEST`, `DB_PASSWORD_TEST` | Test database credentials |
| `VITE_GOOGLE_GENAI_API_KEY` | Google Gemini API key | | `DB_DATABASE_PROD`, `DB_DATABASE_TEST` | Database names |
| `GOOGLE_MAPS_API_KEY` | Google Maps Geocoding API key | | `JWT_SECRET` | Authentication token signing key |
| `REDIS_PASSWORD_PROD` | Redis password | | `VITE_GOOGLE_GENAI_API_KEY` | Google Gemini API key |
| `GOOGLE_MAPS_API_KEY` | Google Maps Geocoding API key |
| `REDIS_PASSWORD_PROD`, `REDIS_PASSWORD_TEST` | Redis passwords |
See [INSTALL.md](INSTALL.md) for the complete list. See [INSTALL.md](INSTALL.md) for the complete list.

View File

@@ -10,11 +10,16 @@
-- Usage: -- Usage:
-- Connect to the database as a superuser (e.g., 'postgres') and run this -- Connect to the database as a superuser (e.g., 'postgres') and run this
-- entire script. -- entire script.
--
-- IMPORTANT: Set the new_owner variable to the appropriate user:
-- - For production: 'flyer_crawler_prod'
-- - For test: 'flyer_crawler_test'
DO $$ DO $$
DECLARE DECLARE
-- Define the new owner for all objects. -- Define the new owner for all objects.
new_owner TEXT := 'flyer_crawler_user'; -- Change this to 'flyer_crawler_test' when running against the test database.
new_owner TEXT := 'flyer_crawler_prod';
-- Variables for iterating through object names. -- Variables for iterating through object names.
tbl_name TEXT; tbl_name TEXT;
@@ -81,7 +86,7 @@ END $$;
-- --
-- -- Construct and execute the ALTER FUNCTION statement using the full signature. -- -- Construct and execute the ALTER FUNCTION statement using the full signature.
-- -- This command is now unambiguous and will work for all functions, including overloaded ones. -- -- This command is now unambiguous and will work for all functions, including overloaded ones.
-- EXECUTE format('ALTER FUNCTION %s OWNER TO flyer_crawler_user;', func_signature); -- EXECUTE format('ALTER FUNCTION %s OWNER TO flyer_crawler_prod;', func_signature);
-- END LOOP; -- END LOOP;
-- END $$; -- END $$;

View File

@@ -33,7 +33,9 @@ export const useActivityLogQuery = (limit: number = 20, offset: number = 0) => {
throw new Error(error.message || 'Failed to fetch activity log'); throw new Error(error.message || 'Failed to fetch activity log');
} }
return response.json(); const json = await response.json();
// API returns { success: true, data: [...] }, extract the data array
return json.data ?? json;
}, },
// Activity log changes frequently, keep stale time short // Activity log changes frequently, keep stale time short
staleTime: 1000 * 30, // 30 seconds staleTime: 1000 * 30, // 30 seconds

View File

@@ -31,7 +31,9 @@ export const useApplicationStatsQuery = () => {
throw new Error(error.message || 'Failed to fetch application stats'); throw new Error(error.message || 'Failed to fetch application stats');
} }
return response.json(); const json = await response.json();
// API returns { success: true, data: {...} }, extract the data object
return json.data ?? json;
}, },
staleTime: 1000 * 60 * 2, // 2 minutes - stats change moderately, not as frequently as activity log staleTime: 1000 * 60 * 2, // 2 minutes - stats change moderately, not as frequently as activity log
}); });

View File

@@ -41,7 +41,9 @@ export const useAuthProfileQuery = (enabled: boolean = true) => {
throw new Error(error.message || 'Failed to fetch user profile'); throw new Error(error.message || 'Failed to fetch user profile');
} }
return response.json(); const json = await response.json();
// API returns { success: true, data: {...} }, extract the data object
return json.data ?? json;
}, },
enabled: enabled && hasToken, enabled: enabled && hasToken,
staleTime: 1000 * 60 * 5, // 5 minutes staleTime: 1000 * 60 * 5, // 5 minutes

View File

@@ -31,7 +31,9 @@ export const useBestSalePricesQuery = (enabled: boolean = true) => {
throw new Error(error.message || 'Failed to fetch best sale prices'); throw new Error(error.message || 'Failed to fetch best sale prices');
} }
return response.json(); const json = await response.json();
// API returns { success: true, data: [...] }, extract the data array
return json.data ?? json;
}, },
enabled, enabled,
// Prices update when flyers change, keep fresh for 2 minutes // Prices update when flyers change, keep fresh for 2 minutes

View File

@@ -27,7 +27,9 @@ export const useBrandsQuery = (enabled: boolean = true) => {
throw new Error(error.message || 'Failed to fetch brands'); throw new Error(error.message || 'Failed to fetch brands');
} }
return response.json(); const json = await response.json();
// API returns { success: true, data: [...] }, extract the data array
return json.data ?? json;
}, },
enabled, enabled,
staleTime: 1000 * 60 * 5, // 5 minutes - brands don't change frequently staleTime: 1000 * 60 * 5, // 5 minutes - brands don't change frequently

View File

@@ -26,7 +26,9 @@ export const useCategoriesQuery = () => {
throw new Error(error.message || 'Failed to fetch categories'); throw new Error(error.message || 'Failed to fetch categories');
} }
return response.json(); const json = await response.json();
// API returns { success: true, data: [...] }, extract the data array
return json.data ?? json;
}, },
staleTime: 1000 * 60 * 60, // 1 hour - categories rarely change staleTime: 1000 * 60 * 60, // 1 hour - categories rarely change
}); });

View File

@@ -40,7 +40,9 @@ export const useFlyerItemCountQuery = (flyerIds: number[], enabled: boolean = tr
throw new Error(error.message || 'Failed to count flyer items'); throw new Error(error.message || 'Failed to count flyer items');
} }
return response.json(); const json = await response.json();
// API returns { success: true, data: {...} }, extract the data object
return json.data ?? json;
}, },
enabled: enabled && flyerIds.length > 0, enabled: enabled && flyerIds.length > 0,
// Count doesn't change frequently // Count doesn't change frequently

View File

@@ -37,7 +37,9 @@ export const useFlyerItemsForFlyersQuery = (flyerIds: number[], enabled: boolean
throw new Error(error.message || 'Failed to fetch flyer items'); throw new Error(error.message || 'Failed to fetch flyer items');
} }
return response.json(); const json = await response.json();
// API returns { success: true, data: [...] }, extract the data array
return json.data ?? json;
}, },
enabled: enabled && flyerIds.length > 0, enabled: enabled && flyerIds.length > 0,
// Flyer items don't change frequently once created // Flyer items don't change frequently once created

View File

@@ -35,9 +35,9 @@ export const useFlyerItemsQuery = (flyerId: number | undefined) => {
throw new Error(error.message || 'Failed to fetch flyer items'); throw new Error(error.message || 'Failed to fetch flyer items');
} }
const data = await response.json(); const json = await response.json();
// API returns { items: FlyerItem[] } // API returns { success: true, data: [...] }, extract the data array
return data.items || []; return json.data ?? json;
}, },
// Only run the query if we have a valid flyer ID // Only run the query if we have a valid flyer ID
enabled: !!flyerId, enabled: !!flyerId,

View File

@@ -32,7 +32,9 @@ export const useFlyersQuery = (limit: number = 20, offset: number = 0) => {
throw new Error(error.message || 'Failed to fetch flyers'); throw new Error(error.message || 'Failed to fetch flyers');
} }
return response.json(); const json = await response.json();
// API returns { success: true, data: [...] }, extract the data array
return json.data ?? json;
}, },
// Keep data fresh for 2 minutes since flyers don't change frequently // Keep data fresh for 2 minutes since flyers don't change frequently
staleTime: 1000 * 60 * 2, staleTime: 1000 * 60 * 2,

View File

@@ -29,7 +29,9 @@ export const useLeaderboardQuery = (limit: number = 10, enabled: boolean = true)
throw new Error(error.message || 'Failed to fetch leaderboard'); throw new Error(error.message || 'Failed to fetch leaderboard');
} }
return response.json(); const json = await response.json();
// API returns { success: true, data: [...] }, extract the data array
return json.data ?? json;
}, },
enabled, enabled,
staleTime: 1000 * 60 * 2, // 2 minutes - leaderboard can change moderately staleTime: 1000 * 60 * 2, // 2 minutes - leaderboard can change moderately

View File

@@ -31,7 +31,9 @@ export const useMasterItemsQuery = () => {
throw new Error(error.message || 'Failed to fetch master items'); throw new Error(error.message || 'Failed to fetch master items');
} }
return response.json(); const json = await response.json();
// API returns { success: true, data: [...] }, extract the data array
return json.data ?? json;
}, },
// Master items change infrequently, keep data fresh for 10 minutes // Master items change infrequently, keep data fresh for 10 minutes
staleTime: 1000 * 60 * 10, staleTime: 1000 * 60 * 10,

View File

@@ -34,7 +34,9 @@ export const usePriceHistoryQuery = (masterItemIds: number[], enabled: boolean =
throw new Error(error.message || 'Failed to fetch price history'); throw new Error(error.message || 'Failed to fetch price history');
} }
return response.json(); const json = await response.json();
// API returns { success: true, data: [...] }, extract the data array
return json.data ?? json;
}, },
enabled: enabled && masterItemIds.length > 0, enabled: enabled && masterItemIds.length > 0,
staleTime: 1000 * 60 * 10, // 10 minutes - historical data doesn't change frequently staleTime: 1000 * 60 * 10, // 10 minutes - historical data doesn't change frequently

View File

@@ -31,7 +31,9 @@ export const useShoppingListsQuery = (enabled: boolean) => {
throw new Error(error.message || 'Failed to fetch shopping lists'); throw new Error(error.message || 'Failed to fetch shopping lists');
} }
return response.json(); const json = await response.json();
// API returns { success: true, data: [...] }, extract the data array
return json.data ?? json;
}, },
enabled, enabled,
// Keep data fresh for 1 minute since users actively manage shopping lists // Keep data fresh for 1 minute since users actively manage shopping lists

View File

@@ -26,7 +26,9 @@ export const useSuggestedCorrectionsQuery = () => {
throw new Error(error.message || 'Failed to fetch suggested corrections'); throw new Error(error.message || 'Failed to fetch suggested corrections');
} }
return response.json(); const json = await response.json();
// API returns { success: true, data: [...] }, extract the data array
return json.data ?? json;
}, },
staleTime: 1000 * 60, // 1 minute - corrections change moderately staleTime: 1000 * 60, // 1 minute - corrections change moderately
}); });

View File

@@ -36,7 +36,9 @@ export const useUserAddressQuery = (
throw new Error(error.message || 'Failed to fetch user address'); throw new Error(error.message || 'Failed to fetch user address');
} }
return response.json(); const json = await response.json();
// API returns { success: true, data: {...} }, extract the data object
return json.data ?? json;
}, },
enabled: enabled && !!addressId, enabled: enabled && !!addressId,
staleTime: 1000 * 60 * 5, // 5 minutes - address data doesn't change frequently staleTime: 1000 * 60 * 5, // 5 minutes - address data doesn't change frequently

View File

@@ -48,8 +48,12 @@ export const useUserProfileDataQuery = (enabled: boolean = true) => {
throw new Error(error.message || 'Failed to fetch user achievements'); throw new Error(error.message || 'Failed to fetch user achievements');
} }
const profile: UserProfile = await profileRes.json(); const profileJson = await profileRes.json();
const achievements: (UserAchievement & Achievement)[] = await achievementsRes.json(); const achievementsJson = await achievementsRes.json();
// API returns { success: true, data: {...} }, extract the data
const profile: UserProfile = profileJson.data ?? profileJson;
const achievements: (UserAchievement & Achievement)[] =
achievementsJson.data ?? achievementsJson;
return { return {
profile, profile,

View File

@@ -31,7 +31,9 @@ export const useWatchedItemsQuery = (enabled: boolean) => {
throw new Error(error.message || 'Failed to fetch watched items'); throw new Error(error.message || 'Failed to fetch watched items');
} }
return response.json(); const json = await response.json();
// API returns { success: true, data: [...] }, extract the data array
return json.data ?? json;
}, },
enabled, enabled,
// Keep data fresh for 1 minute since users actively manage watched items // Keep data fresh for 1 minute since users actively manage watched items

View File

@@ -2,6 +2,7 @@
import path from 'path'; import path from 'path';
import { defineConfig } from 'vitest/config'; import { defineConfig } from 'vitest/config';
import react from '@vitejs/plugin-react'; import react from '@vitejs/plugin-react';
import { sentryVitePlugin } from '@sentry/vite-plugin';
// Ensure NODE_ENV is set to 'test' for all Vitest runs. // Ensure NODE_ENV is set to 'test' for all Vitest runs.
process.env.NODE_ENV = 'test'; process.env.NODE_ENV = 'test';
@@ -10,6 +11,13 @@ process.on('unhandledRejection', (reason, promise) => {
console.error('Unhandled Rejection at:', promise, 'reason:', reason); console.error('Unhandled Rejection at:', promise, 'reason:', reason);
}); });
/**
* Determines if we should enable Sentry source map uploads.
* Only enabled during production builds with the required environment variables.
*/
const shouldUploadSourceMaps =
process.env.VITE_SENTRY_DSN && process.env.SENTRY_AUTH_TOKEN && process.env.NODE_ENV !== 'test';
/** /**
* This is the main configuration file for Vite and the Vitest 'unit' test project. * This is the main configuration file for Vite and the Vitest 'unit' test project.
* When running `vitest`, it is orchestrated by `vitest.workspace.ts`, which * When running `vitest`, it is orchestrated by `vitest.workspace.ts`, which
@@ -18,7 +26,40 @@ process.on('unhandledRejection', (reason, promise) => {
export default defineConfig({ export default defineConfig({
// Vite-specific configuration for the dev server, build, etc. // Vite-specific configuration for the dev server, build, etc.
// This is inherited by all Vitest projects. // This is inherited by all Vitest projects.
plugins: [react()], build: {
// Generate source maps for production builds (hidden = not referenced in built files)
// The Sentry plugin will upload them and then delete them
sourcemap: shouldUploadSourceMaps ? 'hidden' : false,
},
plugins: [
react(),
// Conditionally add Sentry plugin for production builds with source map upload
...(shouldUploadSourceMaps
? [
sentryVitePlugin({
// URL of the Bugsink instance (Sentry-compatible)
// This is read from SENTRY_URL env var or falls back to the DSN's origin
url: process.env.SENTRY_URL,
// Org and project are required by the API but Bugsink ignores them
// when using debug ID matching (Bugsink 1.5+)
org: 'flyer-crawler',
project: 'flyer-crawler-frontend',
// Auth token from environment variable
authToken: process.env.SENTRY_AUTH_TOKEN,
sourcemaps: {
// Delete source maps after upload to prevent public exposure
filesToDeleteAfterUpload: ['./dist/**/*.map'],
},
// Disable telemetry to Sentry
telemetry: false,
}),
]
: []),
],
server: { server: {
port: 3000, port: 3000,
host: '0.0.0.0', host: '0.0.0.0',