Compare commits

...

2 Commits

Author SHA1 Message Date
dd3cbeb65d fix unit tests from using response
Some checks failed
Deploy to Test Environment / deploy-to-test (push) Failing after 1m55s
2026-01-17 17:24:05 -08:00
e6d383103c feat: add Sentry source map upload configuration and update environment variables 2026-01-17 17:07:50 -08:00
38 changed files with 269 additions and 1434 deletions

View File

@@ -98,7 +98,8 @@
"Bash(ssh:*)",
"mcp__redis__list",
"Read(//d/gitea/bugsink-mcp/**)",
"Bash(d:/nodejs/npm.cmd install)"
"Bash(d:/nodejs/npm.cmd install)",
"Bash(node node_modules/vitest/vitest.mjs run:*)"
]
}
}

View File

@@ -102,3 +102,13 @@ VITE_SENTRY_ENABLED=true
# Enable debug mode for SDK troubleshooting (default: false)
SENTRY_DEBUG=false
VITE_SENTRY_DEBUG=false
# ===================
# Source Maps Upload (ADR-015)
# ===================
# Auth token for uploading source maps to Bugsink
# Create at: https://bugsink.projectium.com (Settings > API Keys)
# Required for de-minified stack traces in error reports
SENTRY_AUTH_TOKEN=
# URL of your Bugsink instance (for source map uploads)
SENTRY_URL=https://bugsink.projectium.com

View File

@@ -87,11 +87,22 @@ jobs:
fi
- name: Build React Application for Production
# Source Maps (ADR-015): If SENTRY_AUTH_TOKEN is set, the @sentry/vite-plugin will:
# 1. Generate hidden source maps during build
# 2. Upload them to Bugsink for error de-minification
# 3. Delete the .map files after upload (so they're not publicly accessible)
run: |
if [ -z "${{ secrets.VITE_GOOGLE_GENAI_API_KEY }}" ]; then
echo "ERROR: The VITE_GOOGLE_GENAI_API_KEY secret is not set."
exit 1
fi
# Source map upload is optional - warn if not configured
if [ -z "${{ secrets.SENTRY_AUTH_TOKEN }}" ]; then
echo "WARNING: SENTRY_AUTH_TOKEN not set. Source maps will NOT be uploaded to Bugsink."
echo " Errors will show minified stack traces. To fix, add SENTRY_AUTH_TOKEN to Gitea secrets."
fi
GITEA_SERVER_URL="https://gitea.projectium.com"
COMMIT_MESSAGE=$(git log -1 --grep="\[skip ci\]" --invert-grep --pretty=%s)
PACKAGE_VERSION=$(node -p "require('./package.json').version")
@@ -101,6 +112,8 @@ jobs:
VITE_SENTRY_DSN="${{ secrets.VITE_SENTRY_DSN }}" \
VITE_SENTRY_ENVIRONMENT="production" \
VITE_SENTRY_ENABLED="true" \
SENTRY_AUTH_TOKEN="${{ secrets.SENTRY_AUTH_TOKEN }}" \
SENTRY_URL="https://bugsink.projectium.com" \
VITE_API_BASE_URL=/api VITE_API_KEY=${{ secrets.VITE_GOOGLE_GENAI_API_KEY }} npm run build
- name: Deploy Application to Production Server

View File

@@ -374,6 +374,11 @@ jobs:
# We set the environment variable directly in the command line for this step.
# This maps the Gitea secret to the environment variable the application expects.
# We also generate and inject the application version, commit URL, and commit message.
#
# Source Maps (ADR-015): If SENTRY_AUTH_TOKEN is set, the @sentry/vite-plugin will:
# 1. Generate hidden source maps during build
# 2. Upload them to Bugsink for error de-minification
# 3. Delete the .map files after upload (so they're not publicly accessible)
run: |
# Fail-fast check for the build-time secret.
if [ -z "${{ secrets.VITE_GOOGLE_GENAI_API_KEY }}" ]; then
@@ -381,6 +386,12 @@ jobs:
exit 1
fi
# Source map upload is optional - warn if not configured
if [ -z "${{ secrets.SENTRY_AUTH_TOKEN }}" ]; then
echo "WARNING: SENTRY_AUTH_TOKEN not set. Source maps will NOT be uploaded to Bugsink."
echo " Errors will show minified stack traces. To fix, add SENTRY_AUTH_TOKEN to Gitea secrets."
fi
GITEA_SERVER_URL="https://gitea.projectium.com" # Your Gitea instance URL
# Sanitize commit message to prevent shell injection or build breaks (removes quotes, backticks, backslashes, $)
COMMIT_MESSAGE=$(git log -1 --grep="\[skip ci\]" --invert-grep --pretty=%s | tr -d '"`\\$')
@@ -391,6 +402,8 @@ jobs:
VITE_SENTRY_DSN="${{ secrets.VITE_SENTRY_DSN_TEST }}" \
VITE_SENTRY_ENVIRONMENT="test" \
VITE_SENTRY_ENABLED="true" \
SENTRY_AUTH_TOKEN="${{ secrets.SENTRY_AUTH_TOKEN }}" \
SENTRY_URL="https://bugsink.projectium.com" \
VITE_API_BASE_URL="https://flyer-crawler-test.projectium.com/api" VITE_API_KEY=${{ secrets.VITE_GOOGLE_GENAI_API_KEY_TEST }} npm run build
- name: Deploy Application to Test Server

View File

@@ -298,6 +298,7 @@ To add a new secret (e.g., `SENTRY_DSN`):
- `GOOGLE_MAPS_API_KEY` - Google Maps
- `GOOGLE_CLIENT_ID`, `GOOGLE_CLIENT_SECRET` - Google OAuth
- `GH_CLIENT_ID`, `GH_CLIENT_SECRET` - GitHub OAuth
- `SENTRY_AUTH_TOKEN` - Bugsink API token for source map uploads (create at Settings > API Keys in Bugsink)
**Production-specific:**
@@ -332,7 +333,7 @@ The test environment (`flyer-crawler-test.projectium.com`) uses **both** Gitea C
| User | Database | Purpose |
| -------------------- | -------------------- | ---------- |
| `flyer_crawler_prod` | `flyer-crawler` | Production |
| `flyer_crawler_prod` | `flyer-crawler-prod` | Production |
| `flyer_crawler_test` | `flyer-crawler-test` | Testing |
**Required Setup Commands** (run as `postgres` superuser):

View File

@@ -14,6 +14,17 @@ Flyer Crawler uses PostgreSQL with several extensions for full-text search, geog
---
## Database Users
This project uses **environment-specific database users** to isolate production and test environments:
| User | Database | Purpose |
| -------------------- | -------------------- | ---------- |
| `flyer_crawler_prod` | `flyer-crawler-prod` | Production |
| `flyer_crawler_test` | `flyer-crawler-test` | Testing |
---
## Production Database Setup
### Step 1: Install PostgreSQL
@@ -34,15 +45,19 @@ sudo -u postgres psql
Run the following SQL commands (replace `'a_very_strong_password'` with a secure password):
```sql
-- Create a new role for your application
CREATE ROLE flyer_crawler_user WITH LOGIN PASSWORD 'a_very_strong_password';
-- Create the production role
CREATE ROLE flyer_crawler_prod WITH LOGIN PASSWORD 'a_very_strong_password';
-- Create the production database
CREATE DATABASE "flyer-crawler-prod" WITH OWNER = flyer_crawler_user;
CREATE DATABASE "flyer-crawler-prod" WITH OWNER = flyer_crawler_prod;
-- Connect to the new database
\c "flyer-crawler-prod"
-- Grant schema privileges
ALTER SCHEMA public OWNER TO flyer_crawler_prod;
GRANT CREATE, USAGE ON SCHEMA public TO flyer_crawler_prod;
-- Install required extensions (must be done as superuser)
CREATE EXTENSION IF NOT EXISTS postgis;
CREATE EXTENSION IF NOT EXISTS pg_trgm;
@@ -57,7 +72,7 @@ CREATE EXTENSION IF NOT EXISTS "uuid-ossp";
Navigate to your project directory and run:
```bash
psql -U flyer_crawler_user -d "flyer-crawler-prod" -f sql/master_schema_rollup.sql
psql -U flyer_crawler_prod -d "flyer-crawler-prod" -f sql/master_schema_rollup.sql
```
This creates all tables, functions, triggers, and seeds essential data (categories, master items).
@@ -67,7 +82,7 @@ This creates all tables, functions, triggers, and seeds essential data (categori
Set the required environment variables and run the seed script:
```bash
export DB_USER=flyer_crawler_user
export DB_USER=flyer_crawler_prod
export DB_PASSWORD=your_password
export DB_NAME="flyer-crawler-prod"
export DB_HOST=localhost
@@ -88,20 +103,24 @@ sudo -u postgres psql
```
```sql
-- Create the test role
CREATE ROLE flyer_crawler_test WITH LOGIN PASSWORD 'a_very_strong_password';
-- Create the test database
CREATE DATABASE "flyer-crawler-test" WITH OWNER = flyer_crawler_user;
CREATE DATABASE "flyer-crawler-test" WITH OWNER = flyer_crawler_test;
-- Connect to the test database
\c "flyer-crawler-test"
-- Grant schema privileges (required for test runner to reset schema)
ALTER SCHEMA public OWNER TO flyer_crawler_test;
GRANT CREATE, USAGE ON SCHEMA public TO flyer_crawler_test;
-- Install required extensions
CREATE EXTENSION IF NOT EXISTS postgis;
CREATE EXTENSION IF NOT EXISTS pg_trgm;
CREATE EXTENSION IF NOT EXISTS "uuid-ossp";
-- Grant schema ownership (required for test runner to reset schema)
ALTER SCHEMA public OWNER TO flyer_crawler_user;
-- Exit
\q
```
@@ -110,12 +129,28 @@ ALTER SCHEMA public OWNER TO flyer_crawler_user;
Ensure these secrets are set in your Gitea repository settings:
| Secret | Description |
| ------------- | ------------------------------------------ |
| `DB_HOST` | Database hostname (e.g., `localhost`) |
| `DB_PORT` | Database port (e.g., `5432`) |
| `DB_USER` | Database user (e.g., `flyer_crawler_user`) |
| `DB_PASSWORD` | Database password |
**Shared:**
| Secret | Description |
| --------- | ------------------------------------- |
| `DB_HOST` | Database hostname (e.g., `localhost`) |
| `DB_PORT` | Database port (e.g., `5432`) |
**Production-specific:**
| Secret | Description |
| ------------------ | ----------------------------------------------- |
| `DB_USER_PROD` | Production database user (`flyer_crawler_prod`) |
| `DB_PASSWORD_PROD` | Production database password |
| `DB_DATABASE_PROD` | Production database name (`flyer-crawler-prod`) |
**Test-specific:**
| Secret | Description |
| ------------------ | ----------------------------------------- |
| `DB_USER_TEST` | Test database user (`flyer_crawler_test`) |
| `DB_PASSWORD_TEST` | Test database password |
| `DB_DATABASE_TEST` | Test database name (`flyer-crawler-test`) |
---
@@ -135,7 +170,7 @@ This approach is faster than creating/destroying databases and doesn't require s
## Connecting to Production Database
```bash
psql -h localhost -U flyer_crawler_user -d "flyer-crawler-prod" -W
psql -h localhost -U flyer_crawler_prod -d "flyer-crawler-prod" -W
```
---
@@ -171,13 +206,13 @@ POSTGIS="3.2.0 c3e3cc0" GEOS="3.10.2-CAPI-1.16.0" PROJ="8.2.1"
### Create a Backup
```bash
pg_dump -U flyer_crawler_user -d "flyer-crawler-prod" -F c -f backup.dump
pg_dump -U flyer_crawler_prod -d "flyer-crawler-prod" -F c -f backup.dump
```
### Restore from Backup
```bash
pg_restore -U flyer_crawler_user -d "flyer-crawler-prod" -c backup.dump
pg_restore -U flyer_crawler_prod -d "flyer-crawler-prod" -c backup.dump
```
---

View File

@@ -61,14 +61,16 @@ See [INSTALL.md](INSTALL.md) for detailed setup instructions.
This project uses environment variables for configuration (no `.env` files). Key variables:
| Variable | Description |
| ----------------------------------- | -------------------------------- |
| `DB_HOST`, `DB_USER`, `DB_PASSWORD` | PostgreSQL credentials |
| `DB_DATABASE_PROD` | Production database name |
| `JWT_SECRET` | Authentication token signing key |
| `VITE_GOOGLE_GENAI_API_KEY` | Google Gemini API key |
| `GOOGLE_MAPS_API_KEY` | Google Maps Geocoding API key |
| `REDIS_PASSWORD_PROD` | Redis password |
| Variable | Description |
| -------------------------------------------- | -------------------------------- |
| `DB_HOST` | PostgreSQL host |
| `DB_USER_PROD`, `DB_PASSWORD_PROD` | Production database credentials |
| `DB_USER_TEST`, `DB_PASSWORD_TEST` | Test database credentials |
| `DB_DATABASE_PROD`, `DB_DATABASE_TEST` | Database names |
| `JWT_SECRET` | Authentication token signing key |
| `VITE_GOOGLE_GENAI_API_KEY` | Google Gemini API key |
| `GOOGLE_MAPS_API_KEY` | Google Maps Geocoding API key |
| `REDIS_PASSWORD_PROD`, `REDIS_PASSWORD_TEST` | Redis passwords |
See [INSTALL.md](INSTALL.md) for the complete list.

View File

@@ -10,11 +10,16 @@
-- Usage:
-- Connect to the database as a superuser (e.g., 'postgres') and run this
-- entire script.
--
-- IMPORTANT: Set the new_owner variable to the appropriate user:
-- - For production: 'flyer_crawler_prod'
-- - For test: 'flyer_crawler_test'
DO $$
DECLARE
-- Define the new owner for all objects.
new_owner TEXT := 'flyer_crawler_user';
-- Change this to 'flyer_crawler_test' when running against the test database.
new_owner TEXT := 'flyer_crawler_prod';
-- Variables for iterating through object names.
tbl_name TEXT;
@@ -81,7 +86,7 @@ END $$;
--
-- -- Construct and execute the ALTER FUNCTION statement using the full signature.
-- -- This command is now unambiguous and will work for all functions, including overloaded ones.
-- EXECUTE format('ALTER FUNCTION %s OWNER TO flyer_crawler_user;', func_signature);
-- EXECUTE format('ALTER FUNCTION %s OWNER TO flyer_crawler_prod;', func_signature);
-- END LOOP;
-- END $$;

View File

@@ -31,9 +31,10 @@ describe('useActivityLogQuery', () => {
{ id: 1, action: 'user_login', timestamp: '2024-01-01T10:00:00Z' },
{ id: 2, action: 'flyer_uploaded', timestamp: '2024-01-01T11:00:00Z' },
];
// API returns wrapped response: { success: true, data: [...] }
mockedApiClient.fetchActivityLog.mockResolvedValue({
ok: true,
json: () => Promise.resolve(mockActivityLog),
json: () => Promise.resolve({ success: true, data: mockActivityLog }),
} as Response);
const { result } = renderHook(() => useActivityLogQuery(), { wrapper });
@@ -46,9 +47,10 @@ describe('useActivityLogQuery', () => {
it('should fetch activity log with custom limit and offset', async () => {
const mockActivityLog = [{ id: 3, action: 'item_added', timestamp: '2024-01-01T12:00:00Z' }];
// API returns wrapped response: { success: true, data: [...] }
mockedApiClient.fetchActivityLog.mockResolvedValue({
ok: true,
json: () => Promise.resolve(mockActivityLog),
json: () => Promise.resolve({ success: true, data: mockActivityLog }),
} as Response);
const { result } = renderHook(() => useActivityLogQuery(10, 5), { wrapper });
@@ -102,9 +104,10 @@ describe('useActivityLogQuery', () => {
});
it('should return empty array for no activity log entries', async () => {
// API returns wrapped response: { success: true, data: [] }
mockedApiClient.fetchActivityLog.mockResolvedValue({
ok: true,
json: () => Promise.resolve([]),
json: () => Promise.resolve({ success: true, data: [] }),
} as Response);
const { result } = renderHook(() => useActivityLogQuery(), { wrapper });

View File

@@ -33,7 +33,9 @@ export const useActivityLogQuery = (limit: number = 20, offset: number = 0) => {
throw new Error(error.message || 'Failed to fetch activity log');
}
return response.json();
const json = await response.json();
// API returns { success: true, data: [...] }, extract the data array
return json.data ?? json;
},
// Activity log changes frequently, keep stale time short
staleTime: 1000 * 30, // 30 seconds

View File

@@ -35,9 +35,10 @@ describe('useApplicationStatsQuery', () => {
pendingCorrectionsCount: 10,
recipeCount: 75,
};
// API returns wrapped response: { success: true, data: {...} }
mockedApiClient.getApplicationStats.mockResolvedValue({
ok: true,
json: () => Promise.resolve(mockStats),
json: () => Promise.resolve({ success: true, data: mockStats }),
} as Response);
const { result } = renderHook(() => useApplicationStatsQuery(), { wrapper });

View File

@@ -31,7 +31,9 @@ export const useApplicationStatsQuery = () => {
throw new Error(error.message || 'Failed to fetch application stats');
}
return response.json();
const json = await response.json();
// API returns { success: true, data: {...} }, extract the data object
return json.data ?? json;
},
staleTime: 1000 * 60 * 2, // 2 minutes - stats change moderately, not as frequently as activity log
});

View File

@@ -41,7 +41,9 @@ export const useAuthProfileQuery = (enabled: boolean = true) => {
throw new Error(error.message || 'Failed to fetch user profile');
}
return response.json();
const json = await response.json();
// API returns { success: true, data: {...} }, extract the data object
return json.data ?? json;
},
enabled: enabled && hasToken,
staleTime: 1000 * 60 * 5, // 5 minutes

View File

@@ -31,7 +31,9 @@ export const useBestSalePricesQuery = (enabled: boolean = true) => {
throw new Error(error.message || 'Failed to fetch best sale prices');
}
return response.json();
const json = await response.json();
// API returns { success: true, data: [...] }, extract the data array
return json.data ?? json;
},
enabled,
// Prices update when flyers change, keep fresh for 2 minutes

View File

@@ -27,7 +27,9 @@ export const useBrandsQuery = (enabled: boolean = true) => {
throw new Error(error.message || 'Failed to fetch brands');
}
return response.json();
const json = await response.json();
// API returns { success: true, data: [...] }, extract the data array
return json.data ?? json;
},
enabled,
staleTime: 1000 * 60 * 5, // 5 minutes - brands don't change frequently

View File

@@ -32,9 +32,10 @@ describe('useCategoriesQuery', () => {
{ category_id: 2, name: 'Bakery' },
{ category_id: 3, name: 'Produce' },
];
// API returns wrapped response: { success: true, data: [...] }
mockedApiClient.fetchCategories.mockResolvedValue({
ok: true,
json: () => Promise.resolve(mockCategories),
json: () => Promise.resolve({ success: true, data: mockCategories }),
} as Response);
const { result } = renderHook(() => useCategoriesQuery(), { wrapper });
@@ -88,9 +89,10 @@ describe('useCategoriesQuery', () => {
});
it('should return empty array for no categories', async () => {
// API returns wrapped response: { success: true, data: [] }
mockedApiClient.fetchCategories.mockResolvedValue({
ok: true,
json: () => Promise.resolve([]),
json: () => Promise.resolve({ success: true, data: [] }),
} as Response);
const { result } = renderHook(() => useCategoriesQuery(), { wrapper });

View File

@@ -26,7 +26,9 @@ export const useCategoriesQuery = () => {
throw new Error(error.message || 'Failed to fetch categories');
}
return response.json();
const json = await response.json();
// API returns { success: true, data: [...] }, extract the data array
return json.data ?? json;
},
staleTime: 1000 * 60 * 60, // 1 hour - categories rarely change
});

View File

@@ -40,7 +40,9 @@ export const useFlyerItemCountQuery = (flyerIds: number[], enabled: boolean = tr
throw new Error(error.message || 'Failed to count flyer items');
}
return response.json();
const json = await response.json();
// API returns { success: true, data: {...} }, extract the data object
return json.data ?? json;
},
enabled: enabled && flyerIds.length > 0,
// Count doesn't change frequently

View File

@@ -37,7 +37,9 @@ export const useFlyerItemsForFlyersQuery = (flyerIds: number[], enabled: boolean
throw new Error(error.message || 'Failed to fetch flyer items');
}
return response.json();
const json = await response.json();
// API returns { success: true, data: [...] }, extract the data array
return json.data ?? json;
},
enabled: enabled && flyerIds.length > 0,
// Flyer items don't change frequently once created

View File

@@ -31,9 +31,10 @@ describe('useFlyerItemsQuery', () => {
{ item_id: 1, name: 'Milk', price: 3.99, flyer_id: 42 },
{ item_id: 2, name: 'Bread', price: 2.49, flyer_id: 42 },
];
// API returns wrapped response: { success: true, data: [...] }
mockedApiClient.fetchFlyerItems.mockResolvedValue({
ok: true,
json: () => Promise.resolve({ items: mockFlyerItems }),
json: () => Promise.resolve({ success: true, data: mockFlyerItems }),
} as Response);
const { result } = renderHook(() => useFlyerItemsQuery(42), { wrapper });
@@ -103,9 +104,10 @@ describe('useFlyerItemsQuery', () => {
// respects the enabled condition. The guard exists as a defensive measure only.
it('should return empty array when API returns no items', async () => {
// API returns wrapped response: { success: true, data: [] }
mockedApiClient.fetchFlyerItems.mockResolvedValue({
ok: true,
json: () => Promise.resolve({ items: [] }),
json: () => Promise.resolve({ success: true, data: [] }),
} as Response);
const { result } = renderHook(() => useFlyerItemsQuery(42), { wrapper });
@@ -115,16 +117,20 @@ describe('useFlyerItemsQuery', () => {
expect(result.current.data).toEqual([]);
});
it('should handle response without items property', async () => {
it('should handle response without data property (fallback)', async () => {
// Edge case: API returns unexpected format without data property
// The hook falls back to returning the raw json object
const legacyItems = [{ item_id: 1, name: 'Legacy Item' }];
mockedApiClient.fetchFlyerItems.mockResolvedValue({
ok: true,
json: () => Promise.resolve({}),
json: () => Promise.resolve(legacyItems),
} as Response);
const { result } = renderHook(() => useFlyerItemsQuery(42), { wrapper });
await waitFor(() => expect(result.current.isSuccess).toBe(true));
expect(result.current.data).toEqual([]);
// Falls back to raw response when .data is undefined
expect(result.current.data).toEqual(legacyItems);
});
});

View File

@@ -35,9 +35,9 @@ export const useFlyerItemsQuery = (flyerId: number | undefined) => {
throw new Error(error.message || 'Failed to fetch flyer items');
}
const data = await response.json();
// API returns { items: FlyerItem[] }
return data.items || [];
const json = await response.json();
// API returns { success: true, data: [...] }, extract the data array
return json.data ?? json;
},
// Only run the query if we have a valid flyer ID
enabled: !!flyerId,

View File

@@ -31,9 +31,10 @@ describe('useFlyersQuery', () => {
{ flyer_id: 1, store_name: 'Store A', valid_from: '2024-01-01', valid_to: '2024-01-07' },
{ flyer_id: 2, store_name: 'Store B', valid_from: '2024-01-01', valid_to: '2024-01-07' },
];
// API returns wrapped response: { success: true, data: [...] }
mockedApiClient.fetchFlyers.mockResolvedValue({
ok: true,
json: () => Promise.resolve(mockFlyers),
json: () => Promise.resolve({ success: true, data: mockFlyers }),
} as Response);
const { result } = renderHook(() => useFlyersQuery(), { wrapper });
@@ -46,9 +47,10 @@ describe('useFlyersQuery', () => {
it('should fetch flyers with custom limit and offset', async () => {
const mockFlyers = [{ flyer_id: 3, store_name: 'Store C' }];
// API returns wrapped response: { success: true, data: [...] }
mockedApiClient.fetchFlyers.mockResolvedValue({
ok: true,
json: () => Promise.resolve(mockFlyers),
json: () => Promise.resolve({ success: true, data: mockFlyers }),
} as Response);
const { result } = renderHook(() => useFlyersQuery(10, 5), { wrapper });
@@ -102,9 +104,10 @@ describe('useFlyersQuery', () => {
});
it('should return empty array for no flyers', async () => {
// API returns wrapped response: { success: true, data: [] }
mockedApiClient.fetchFlyers.mockResolvedValue({
ok: true,
json: () => Promise.resolve([]),
json: () => Promise.resolve({ success: true, data: [] }),
} as Response);
const { result } = renderHook(() => useFlyersQuery(), { wrapper });

View File

@@ -32,7 +32,9 @@ export const useFlyersQuery = (limit: number = 20, offset: number = 0) => {
throw new Error(error.message || 'Failed to fetch flyers');
}
return response.json();
const json = await response.json();
// API returns { success: true, data: [...] }, extract the data array
return json.data ?? json;
},
// Keep data fresh for 2 minutes since flyers don't change frequently
staleTime: 1000 * 60 * 2,

View File

@@ -29,7 +29,9 @@ export const useLeaderboardQuery = (limit: number = 10, enabled: boolean = true)
throw new Error(error.message || 'Failed to fetch leaderboard');
}
return response.json();
const json = await response.json();
// API returns { success: true, data: [...] }, extract the data array
return json.data ?? json;
},
enabled,
staleTime: 1000 * 60 * 2, // 2 minutes - leaderboard can change moderately

View File

@@ -32,9 +32,10 @@ describe('useMasterItemsQuery', () => {
{ master_item_id: 2, name: 'Bread', category: 'Bakery' },
{ master_item_id: 3, name: 'Eggs', category: 'Dairy' },
];
// API returns wrapped response: { success: true, data: [...] }
mockedApiClient.fetchMasterItems.mockResolvedValue({
ok: true,
json: () => Promise.resolve(mockMasterItems),
json: () => Promise.resolve({ success: true, data: mockMasterItems }),
} as Response);
const { result } = renderHook(() => useMasterItemsQuery(), { wrapper });
@@ -88,9 +89,10 @@ describe('useMasterItemsQuery', () => {
});
it('should return empty array for no master items', async () => {
// API returns wrapped response: { success: true, data: [] }
mockedApiClient.fetchMasterItems.mockResolvedValue({
ok: true,
json: () => Promise.resolve([]),
json: () => Promise.resolve({ success: true, data: [] }),
} as Response);
const { result } = renderHook(() => useMasterItemsQuery(), { wrapper });

View File

@@ -31,7 +31,9 @@ export const useMasterItemsQuery = () => {
throw new Error(error.message || 'Failed to fetch master items');
}
return response.json();
const json = await response.json();
// API returns { success: true, data: [...] }, extract the data array
return json.data ?? json;
},
// Master items change infrequently, keep data fresh for 10 minutes
staleTime: 1000 * 60 * 10,

View File

@@ -34,7 +34,9 @@ export const usePriceHistoryQuery = (masterItemIds: number[], enabled: boolean =
throw new Error(error.message || 'Failed to fetch price history');
}
return response.json();
const json = await response.json();
// API returns { success: true, data: [...] }, extract the data array
return json.data ?? json;
},
enabled: enabled && masterItemIds.length > 0,
staleTime: 1000 * 60 * 10, // 10 minutes - historical data doesn't change frequently

View File

@@ -31,9 +31,10 @@ describe('useShoppingListsQuery', () => {
{ shopping_list_id: 1, name: 'Weekly Groceries', items: [] },
{ shopping_list_id: 2, name: 'Party Supplies', items: [] },
];
// API returns wrapped response: { success: true, data: [...] }
mockedApiClient.fetchShoppingLists.mockResolvedValue({
ok: true,
json: () => Promise.resolve(mockShoppingLists),
json: () => Promise.resolve({ success: true, data: mockShoppingLists }),
} as Response);
const { result } = renderHook(() => useShoppingListsQuery(true), { wrapper });
@@ -98,9 +99,10 @@ describe('useShoppingListsQuery', () => {
});
it('should return empty array for no shopping lists', async () => {
// API returns wrapped response: { success: true, data: [] }
mockedApiClient.fetchShoppingLists.mockResolvedValue({
ok: true,
json: () => Promise.resolve([]),
json: () => Promise.resolve({ success: true, data: [] }),
} as Response);
const { result } = renderHook(() => useShoppingListsQuery(true), { wrapper });

View File

@@ -31,7 +31,9 @@ export const useShoppingListsQuery = (enabled: boolean) => {
throw new Error(error.message || 'Failed to fetch shopping lists');
}
return response.json();
const json = await response.json();
// API returns { success: true, data: [...] }, extract the data array
return json.data ?? json;
},
enabled,
// Keep data fresh for 1 minute since users actively manage shopping lists

View File

@@ -31,9 +31,10 @@ describe('useSuggestedCorrectionsQuery', () => {
{ correction_id: 1, item_name: 'Milk', suggested_name: 'Whole Milk', status: 'pending' },
{ correction_id: 2, item_name: 'Bread', suggested_name: 'White Bread', status: 'pending' },
];
// API returns wrapped response: { success: true, data: [...] }
mockedApiClient.getSuggestedCorrections.mockResolvedValue({
ok: true,
json: () => Promise.resolve(mockCorrections),
json: () => Promise.resolve({ success: true, data: mockCorrections }),
} as Response);
const { result } = renderHook(() => useSuggestedCorrectionsQuery(), { wrapper });
@@ -87,9 +88,10 @@ describe('useSuggestedCorrectionsQuery', () => {
});
it('should return empty array for no corrections', async () => {
// API returns wrapped response: { success: true, data: [] }
mockedApiClient.getSuggestedCorrections.mockResolvedValue({
ok: true,
json: () => Promise.resolve([]),
json: () => Promise.resolve({ success: true, data: [] }),
} as Response);
const { result } = renderHook(() => useSuggestedCorrectionsQuery(), { wrapper });

View File

@@ -26,7 +26,9 @@ export const useSuggestedCorrectionsQuery = () => {
throw new Error(error.message || 'Failed to fetch suggested corrections');
}
return response.json();
const json = await response.json();
// API returns { success: true, data: [...] }, extract the data array
return json.data ?? json;
},
staleTime: 1000 * 60, // 1 minute - corrections change moderately
});

View File

@@ -36,7 +36,9 @@ export const useUserAddressQuery = (
throw new Error(error.message || 'Failed to fetch user address');
}
return response.json();
const json = await response.json();
// API returns { success: true, data: {...} }, extract the data object
return json.data ?? json;
},
enabled: enabled && !!addressId,
staleTime: 1000 * 60 * 5, // 5 minutes - address data doesn't change frequently

View File

@@ -48,8 +48,12 @@ export const useUserProfileDataQuery = (enabled: boolean = true) => {
throw new Error(error.message || 'Failed to fetch user achievements');
}
const profile: UserProfile = await profileRes.json();
const achievements: (UserAchievement & Achievement)[] = await achievementsRes.json();
const profileJson = await profileRes.json();
const achievementsJson = await achievementsRes.json();
// API returns { success: true, data: {...} }, extract the data
const profile: UserProfile = profileJson.data ?? profileJson;
const achievements: (UserAchievement & Achievement)[] =
achievementsJson.data ?? achievementsJson;
return {
profile,

View File

@@ -31,9 +31,10 @@ describe('useWatchedItemsQuery', () => {
{ master_item_id: 1, name: 'Milk', category: 'Dairy' },
{ master_item_id: 2, name: 'Bread', category: 'Bakery' },
];
// API returns wrapped response: { success: true, data: [...] }
mockedApiClient.fetchWatchedItems.mockResolvedValue({
ok: true,
json: () => Promise.resolve(mockWatchedItems),
json: () => Promise.resolve({ success: true, data: mockWatchedItems }),
} as Response);
const { result } = renderHook(() => useWatchedItemsQuery(true), { wrapper });
@@ -98,9 +99,10 @@ describe('useWatchedItemsQuery', () => {
});
it('should return empty array for no watched items', async () => {
// API returns wrapped response: { success: true, data: [] }
mockedApiClient.fetchWatchedItems.mockResolvedValue({
ok: true,
json: () => Promise.resolve([]),
json: () => Promise.resolve({ success: true, data: [] }),
} as Response);
const { result } = renderHook(() => useWatchedItemsQuery(true), { wrapper });

View File

@@ -31,7 +31,9 @@ export const useWatchedItemsQuery = (enabled: boolean) => {
throw new Error(error.message || 'Failed to fetch watched items');
}
return response.json();
const json = await response.json();
// API returns { success: true, data: [...] }, extract the data array
return json.data ?? json;
},
enabled,
// Keep data fresh for 1 minute since users actively manage watched items

View File

@@ -19,21 +19,27 @@ import { vi } from 'vitest';
* // ... rest of the test
* });
*/
/**
* Helper to create a mock API response in the standard format.
* API responses are wrapped in { success: true, data: ... } per ADR-028.
*/
const mockApiResponse = <T>(data: T): Response =>
new Response(JSON.stringify({ success: true, data }));
// Global mock for apiClient - provides defaults for tests using renderWithProviders.
// Note: Individual test files must also call vi.mock() with their relative path.
vi.mock('../../services/apiClient', () => ({
// --- Provider Mocks (with default successful responses) ---
// These are essential for any test using renderWithProviders, as AppProviders
// will mount all these data providers.
fetchFlyers: vi.fn(() =>
Promise.resolve(new Response(JSON.stringify({ flyers: [], hasMore: false }))),
),
fetchMasterItems: vi.fn(() => Promise.resolve(new Response(JSON.stringify([])))),
fetchWatchedItems: vi.fn(() => Promise.resolve(new Response(JSON.stringify([])))),
fetchShoppingLists: vi.fn(() => Promise.resolve(new Response(JSON.stringify([])))),
getAuthenticatedUserProfile: vi.fn(() => Promise.resolve(new Response(JSON.stringify(null)))),
fetchCategories: vi.fn(() => Promise.resolve(new Response(JSON.stringify([])))), // For CorrectionsPage
fetchAllBrands: vi.fn(() => Promise.resolve(new Response(JSON.stringify([])))), // For AdminBrandManager
// All responses use the standard API format: { success: true, data: ... }
fetchFlyers: vi.fn(() => Promise.resolve(mockApiResponse([]))),
fetchMasterItems: vi.fn(() => Promise.resolve(mockApiResponse([]))),
fetchWatchedItems: vi.fn(() => Promise.resolve(mockApiResponse([]))),
fetchShoppingLists: vi.fn(() => Promise.resolve(mockApiResponse([]))),
getAuthenticatedUserProfile: vi.fn(() => Promise.resolve(mockApiResponse(null))),
fetchCategories: vi.fn(() => Promise.resolve(mockApiResponse([]))), // For CorrectionsPage
fetchAllBrands: vi.fn(() => Promise.resolve(mockApiResponse([]))), // For AdminBrandManager
// --- General Mocks (return empty vi.fn() by default) ---
// These functions are commonly used and can be implemented in specific tests.

File diff suppressed because it is too large Load Diff

View File

@@ -2,6 +2,7 @@
import path from 'path';
import { defineConfig } from 'vitest/config';
import react from '@vitejs/plugin-react';
import { sentryVitePlugin } from '@sentry/vite-plugin';
// Ensure NODE_ENV is set to 'test' for all Vitest runs.
process.env.NODE_ENV = 'test';
@@ -10,6 +11,13 @@ process.on('unhandledRejection', (reason, promise) => {
console.error('Unhandled Rejection at:', promise, 'reason:', reason);
});
/**
* Determines if we should enable Sentry source map uploads.
* Only enabled during production builds with the required environment variables.
*/
const shouldUploadSourceMaps =
process.env.VITE_SENTRY_DSN && process.env.SENTRY_AUTH_TOKEN && process.env.NODE_ENV !== 'test';
/**
* This is the main configuration file for Vite and the Vitest 'unit' test project.
* When running `vitest`, it is orchestrated by `vitest.workspace.ts`, which
@@ -18,7 +26,40 @@ process.on('unhandledRejection', (reason, promise) => {
export default defineConfig({
// Vite-specific configuration for the dev server, build, etc.
// This is inherited by all Vitest projects.
plugins: [react()],
build: {
// Generate source maps for production builds (hidden = not referenced in built files)
// The Sentry plugin will upload them and then delete them
sourcemap: shouldUploadSourceMaps ? 'hidden' : false,
},
plugins: [
react(),
// Conditionally add Sentry plugin for production builds with source map upload
...(shouldUploadSourceMaps
? [
sentryVitePlugin({
// URL of the Bugsink instance (Sentry-compatible)
// This is read from SENTRY_URL env var or falls back to the DSN's origin
url: process.env.SENTRY_URL,
// Org and project are required by the API but Bugsink ignores them
// when using debug ID matching (Bugsink 1.5+)
org: 'flyer-crawler',
project: 'flyer-crawler-frontend',
// Auth token from environment variable
authToken: process.env.SENTRY_AUTH_TOKEN,
sourcemaps: {
// Delete source maps after upload to prevent public exposure
filesToDeleteAfterUpload: ['./dist/**/*.map'],
},
// Disable telemetry to Sentry
telemetry: false,
}),
]
: []),
],
server: {
port: 3000,
host: '0.0.0.0',