Compare commits

...

18 Commits

Author SHA1 Message Date
Gitea Actions
95d441be98 ci: Bump version to 0.0.19 [skip ci] 2025-12-24 06:22:03 +05:00
186ed484b7 last test fixes for upcoming V0.1 + pretty
Some checks failed
Deploy to Test Environment / deploy-to-test (push) Failing after 2m40s
2025-12-23 17:20:51 -08:00
Gitea Actions
3669958e9d ci: Bump version to 0.0.18 [skip ci] 2025-12-24 05:32:56 +05:00
5f3daf0539 Merge branch 'main' of https://gitea.projectium.com/torbo/flyer-crawler.projectium.com
All checks were successful
Deploy to Test Environment / deploy-to-test (push) Successful in 11m23s
2025-12-23 16:32:11 -08:00
ae7afaaf97 integration test fixes 2025-12-23 16:32:05 -08:00
Gitea Actions
3ae7b9e0d4 ci: Bump version to 0.0.17 [skip ci] 2025-12-24 04:51:21 +05:00
921c48fc57 more unit test fixes now the UseProfileAddress OOM has been identified
All checks were successful
Deploy to Test Environment / deploy-to-test (push) Successful in 10m14s
2025-12-23 15:50:01 -08:00
Gitea Actions
2571864b91 ci: Bump version to 0.0.16 [skip ci] 2025-12-24 01:52:55 +05:00
065d0c746a Merge branch 'main' of https://gitea.projectium.com/torbo/flyer-crawler.projectium.com
All checks were successful
Deploy to Test Environment / deploy-to-test (push) Successful in 12m17s
2025-12-23 12:52:02 -08:00
395f6c21a2 some "push to get dev working" stuff, and possibly found the problem test 2025-12-23 12:51:56 -08:00
Gitea Actions
aec56dfc23 ci: Bump version to 0.0.15 [skip ci] 2025-12-24 01:18:44 +05:00
a12a0e5207 Merge branch 'main' of https://gitea.projectium.com/torbo/flyer-crawler.projectium.com
Some checks failed
Deploy to Test Environment / deploy-to-test (push) Failing after 1m28s
2025-12-23 12:17:31 -08:00
e337bd67b1 some "push to get dev working" stuff, and possibly found the problem test 2025-12-23 12:17:21 -08:00
Gitea Actions
a8f5b4e51a ci: Bump version to 0.0.14 [skip ci] 2025-12-23 08:45:00 +05:00
d0ce8021d6 Merge branch 'main' of https://gitea.projectium.com/torbo/flyer-crawler.projectium.com
All checks were successful
Deploy to Test Environment / deploy-to-test (push) Successful in 15m5s
2025-12-22 19:44:11 -08:00
efbb162880 keep disabling tests until the culprit is found this fucking sucks 2025-12-22 19:44:02 -08:00
Gitea Actions
e353ce8a81 ci: Bump version to 0.0.13 [skip ci] 2025-12-23 08:30:20 +05:00
b5cbf271b8 debugging the fucking OOM
All checks were successful
Deploy to Test Environment / deploy-to-test (push) Successful in 13m51s
2025-12-22 19:29:10 -08:00
46 changed files with 2269 additions and 2594 deletions

View File

@@ -0,0 +1,18 @@
{
"name": "Flyer Crawler Dev (Ubuntu 22.04)",
"dockerComposeFile": ["../compose.dev.yml"],
"service": "app",
"workspaceFolder": "/app",
"customizations": {
"vscode": {
"extensions": ["dbaeumer.vscode-eslint", "esbenp.prettier-vscode"]
}
},
"remoteUser": "root",
// Automatically install dependencies when the container is created.
// This runs inside the container, populating the isolated node_modules volume.
"postCreateCommand": "npm install",
"postAttachCommand": "npm run dev:container",
// Try to start podman machine, but exit with success (0) even if it's already running
"initializeCommand": "powershell -Command \"podman machine start; exit 0\""
}

View File

@@ -136,7 +136,8 @@ jobs:
# Run unit and integration tests as separate steps.
# The `|| true` ensures the workflow continues even if tests fail, allowing coverage to run.
echo "--- Running Unit Tests ---"
npm run test:unit -- --coverage --reporter=verbose --includeTaskLocation --testTimeout=10000 --silent=passed-only || true
# npm run test:unit -- --coverage --reporter=verbose --includeTaskLocation --testTimeout=10000 --silent=passed-only || true
npm run test:unit -- --coverage --reporter=verbose --includeTaskLocation --testTimeout=10000 --silent=passed-only --no-file-parallelism || true
echo "--- Running Integration Tests ---"
npm run test:integration -- --coverage --reporter=verbose --includeTaskLocation --testTimeout=10000 --silent=passed-only || true

View File

@@ -60,4 +60,4 @@ jobs:
uses: actions/upload-artifact@v3
with:
name: database-backup
path: ${{ env.backup_filename }}
path: ${{ env.backup_filename }}

View File

@@ -144,4 +144,4 @@ jobs:
find "$APP_PATH/flyer-images" -type f -name '*-test-flyer-image.*' -delete
find "$APP_PATH/flyer-images/icons" -type f -name '*-test-flyer-image.*' -delete
find "$APP_PATH/flyer-images/archive" -mindepth 1 -maxdepth 1 -type f -delete || echo "Archive directory not found, skipping."
echo "✅ Flyer asset directories cleared."
echo "✅ Flyer asset directories cleared."

View File

@@ -130,4 +130,4 @@ jobs:
find "$APP_PATH/flyer-images" -mindepth 1 -type f -delete
find "$APP_PATH/flyer-images/icons" -mindepth 1 -type f -delete
find "$APP_PATH/flyer-images/archive" -mindepth 1 -type f -delete || echo "Archive directory not found, skipping."
echo "✅ Test flyer asset directories cleared."
echo "✅ Test flyer asset directories cleared."

View File

@@ -25,7 +25,7 @@ jobs:
DB_USER: ${{ secrets.DB_USER }}
DB_PASSWORD: ${{ secrets.DB_PASSWORD }}
DB_NAME: ${{ secrets.DB_DATABASE_PROD }}
BACKUP_DIR: "/var/www/backups" # Define a dedicated directory for backups
BACKUP_DIR: '/var/www/backups' # Define a dedicated directory for backups
steps:
- name: Validate Secrets and Inputs
@@ -92,4 +92,4 @@ jobs:
echo "Restarting application server..."
cd /var/www/flyer-crawler.projectium.com
pm2 startOrReload ecosystem.config.cjs --env production && pm2 save
echo "✅ Application server restarted."
echo "✅ Application server restarted."

31
Dockerfile.dev Normal file
View File

@@ -0,0 +1,31 @@
# Use Ubuntu 22.04 (LTS) as the base image to match production
FROM ubuntu:22.04
# Set environment variables to non-interactive to avoid prompts during installation
ENV DEBIAN_FRONTEND=noninteractive
# Update package lists and install essential tools
# - curl: for downloading Node.js setup script
# - git: for version control operations
# - build-essential: for compiling native Node.js modules (node-gyp)
# - python3: required by some Node.js build tools
RUN apt-get update && apt-get install -y \
curl \
git \
build-essential \
python3 \
&& rm -rf /var/lib/apt/lists/*
# Install Node.js 20.x (LTS) from NodeSource
RUN curl -fsSL https://deb.nodesource.com/setup_20.x | bash - \
&& apt-get install -y nodejs
# Set the working directory inside the container
WORKDIR /app
# Set default environment variables for development
ENV NODE_ENV=development
ENV NODE_OPTIONS='--max-old-space-size=8192'
# Default command keeps the container running so you can attach to it
CMD ["bash"]

52
compose.dev.yml Normal file
View File

@@ -0,0 +1,52 @@
version: '3.8'
services:
app:
container_name: flyer-crawler-dev
build:
context: .
dockerfile: Dockerfile.dev
volumes:
# Mount the current directory to /app in the container
- .:/app
# Create a volume for node_modules to avoid conflicts with Windows host
# and improve performance.
- node_modules_data:/app/node_modules
ports:
- '3000:3000' # Frontend (Vite default)
- '3001:3001' # Backend API
environment:
- NODE_ENV=development
- DB_HOST=postgres
- DB_USER=postgres
- DB_PASSWORD=postgres
- DB_NAME=flyer_crawler_dev
- REDIS_URL=redis://redis:6379
# Add other secrets here or use a .env file
depends_on:
- postgres
- redis
# Keep container running so VS Code can attach
command: tail -f /dev/null
postgres:
image: docker.io/library/postgis/postgis:15-3.4
container_name: flyer-crawler-postgres
ports:
- '5432:5432'
environment:
POSTGRES_USER: postgres
POSTGRES_PASSWORD: postgres
POSTGRES_DB: flyer_crawler_dev
volumes:
- postgres_data:/var/lib/postgresql/data
redis:
image: docker.io/library/redis:alpine
container_name: flyer-crawler-redis
ports:
- '6379:6379'
volumes:
postgres_data:
node_modules_data:

View File

@@ -34,7 +34,7 @@ We will adopt a strict, consistent error-handling contract for the service and r
**Robustness**: Eliminates an entire class of bugs where `undefined` is passed to `res.json()`, preventing incorrect `500` errors.
**Consistency & Predictability**: All data-fetching methods now have a predictable contract. They either return the expected data or throw a specific, typed error.
**Developer Experience**: Route handlers become simpler, cleaner, and easier to write correctly. The cognitive load on developers is reduced as they no longer need to remember to check for `undefined`.
**Improved Testability**: Tests become more reliable and realistic. Mocks can now throw the *exact* error type (`new NotFoundError()`) that the real implementation would, ensuring tests accurately reflect the application's behavior.
**Improved Testability**: Tests become more reliable and realistic. Mocks can now throw the _exact_ error type (`new NotFoundError()`) that the real implementation would, ensuring tests accurately reflect the application's behavior.
**Centralized Control**: Error-to-HTTP-status logic is centralized in the `errorHandler` middleware, making it easy to manage and modify error responses globally.
### Negative

View File

@@ -10,21 +10,19 @@ Following the standardization of error handling in ADR-001, the next most common
This manual approach has several drawbacks:
**Repetitive Boilerplate**: The `try/catch/finally` block for transaction management is duplicated across multiple files.
**Error-Prone**: It is easy to forget to `client.release()` in all code paths, which can lead to connection pool exhaustion and bring down the application.
3. **Poor Composability**: It is difficult to compose multiple repository methods into a single, atomic "Unit of Work". For example, a service function that needs to update a user's points and create a budget in a single transaction cannot easily do so if both underlying repository methods create their own transactions.
**Error-Prone**: It is easy to forget to `client.release()` in all code paths, which can lead to connection pool exhaustion and bring down the application. 3. **Poor Composability**: It is difficult to compose multiple repository methods into a single, atomic "Unit of Work". For example, a service function that needs to update a user's points and create a budget in a single transaction cannot easily do so if both underlying repository methods create their own transactions.
## Decision
We will implement a standardized "Unit of Work" pattern through a high-level `withTransaction` helper function. This function will abstract away the complexity of transaction management.
1. **`withTransaction` Helper**: A new helper function, `withTransaction<T>(callback: (client: PoolClient) => Promise<T>): Promise<T>`, will be created. This function will be responsible for:
* Acquiring a client from the database pool.
* Starting a transaction (`BEGIN`).
* Executing the `callback` function, passing the transactional client to it.
* If the callback succeeds, it will `COMMIT` the transaction.
* If the callback throws an error, it will `ROLLBACK` the transaction and re-throw the error.
* In all cases, it will `RELEASE` the client back to the pool.
- Acquiring a client from the database pool.
- Starting a transaction (`BEGIN`).
- Executing the `callback` function, passing the transactional client to it.
- If the callback succeeds, it will `COMMIT` the transaction.
- If the callback throws an error, it will `ROLLBACK` the transaction and re-throw the error.
- In all cases, it will `RELEASE` the client back to the pool.
2. **Repository Method Signature**: Repository methods that need to be part of a transaction will be updated to optionally accept a `PoolClient` in their constructor or as a method parameter. By default, they will use the global pool. When called from within a `withTransaction` block, they will be passed the transactional client.
3. **Service Layer Orchestration**: Service-layer functions that orchestrate multi-step operations will use `withTransaction` to ensure atomicity. They will instantiate or call repository methods, providing them with the transactional client from the callback.
@@ -40,7 +38,7 @@ async function registerUserAndCreateDefaultList(userData) {
const shoppingRepo = new ShoppingRepository(client);
const newUser = await userRepo.createUser(userData);
await shoppingRepo.createShoppingList(newUser.user_id, "My First List");
await shoppingRepo.createShoppingList(newUser.user_id, 'My First List');
return newUser;
});

View File

@@ -20,8 +20,8 @@ We will adopt a schema-based approach for input validation using the `zod` libra
1. **Adopt `zod` for Schema Definition**: We will use `zod` to define clear, type-safe schemas for the `params`, `query`, and `body` of each API request. `zod` provides powerful and declarative validation rules and automatically infers TypeScript types.
2. **Create a Reusable Validation Middleware**: A generic `validateRequest(schema)` middleware will be created. This middleware will take a `zod` schema, parse the incoming request against it, and handle success and error cases.
* On successful validation, the parsed and typed data will be attached to the `req` object (e.g., `req.body` will be replaced with the parsed body), and `next()` will be called.
* On validation failure, the middleware will call `next()` with a custom `ValidationError` containing a structured list of issues, which `ADR-001`'s `errorHandler` can then format into a user-friendly `400 Bad Request` response.
- On successful validation, the parsed and typed data will be attached to the `req` object (e.g., `req.body` will be replaced with the parsed body), and `next()` will be called.
- On validation failure, the middleware will call `next()` with a custom `ValidationError` containing a structured list of issues, which `ADR-001`'s `errorHandler` can then format into a user-friendly `400 Bad Request` response.
3. **Refactor Routes**: All route handlers will be refactored to use this new middleware, removing all manual validation logic.
@@ -46,18 +46,18 @@ const getFlyerSchema = z.object({
type GetFlyerRequest = z.infer<typeof getFlyerSchema>;
// 3. Apply the middleware and use an inline cast for the request
router.get('/:id', validateRequest(getFlyerSchema), (async (req, res, next) => {
// Cast 'req' to the inferred type.
// This provides full type safety for params, query, and body.
const { params } = req as unknown as GetFlyerRequest;
router.get('/:id', validateRequest(getFlyerSchema), async (req, res, next) => {
// Cast 'req' to the inferred type.
// This provides full type safety for params, query, and body.
const { params } = req as unknown as GetFlyerRequest;
try {
const flyer = await db.flyerRepo.getFlyerById(params.id); // params.id is 'number'
res.json(flyer);
} catch (error) {
next(error);
}
}));
try {
const flyer = await db.flyerRepo.getFlyerById(params.id); // params.id is 'number'
res.json(flyer);
} catch (error) {
next(error);
}
});
```
## Consequences

View File

@@ -20,9 +20,9 @@ We will adopt a standardized, application-wide structured logging policy. All lo
**Request-Scoped Logger with Context**: We will create a middleware that runs at the beginning of the request lifecycle. This middleware will:
* Generate a unique `request_id` for each incoming request.
* Create a request-scoped logger instance (a "child logger") that automatically includes the `request_id`, `user_id` (if authenticated), and `ip_address` in every log message it generates.
* Attach this child logger to the `req` object (e.g., `req.log`).
- Generate a unique `request_id` for each incoming request.
- Create a request-scoped logger instance (a "child logger") that automatically includes the `request_id`, `user_id` (if authenticated), and `ip_address` in every log message it generates.
- Attach this child logger to the `req` object (e.g., `req.log`).
**Mandatory Use of Request-Scoped Logger**: All route handlers and any service functions called by them **MUST** use the request-scoped logger (`req.log`) instead of the global logger instance. This ensures all logs for a given request are automatically correlated.
@@ -32,9 +32,9 @@ We will adopt a standardized, application-wide structured logging policy. All lo
**Standardized Logging Practices**:
**INFO**: Log key business events, such as `User logged in` or `Flyer processed`.
**WARN**: Log recoverable errors or unusual situations that do not break the request, such as `Client Error: 404 on GET /api/non-existent-route` or `Retrying failed database connection`.
**ERROR**: Log only unhandled or server-side errors that cause a request to fail (typically handled by the `errorHandler`). Avoid logging expected client errors (like 4xx) at this level.
**DEBUG**: Log detailed diagnostic information useful during development, such as function entry/exit points or variable states.
**WARN**: Log recoverable errors or unusual situations that do not break the request, such as `Client Error: 404 on GET /api/non-existent-route` or `Retrying failed database connection`.
**ERROR**: Log only unhandled or server-side errors that cause a request to fail (typically handled by the `errorHandler`). Avoid logging expected client errors (like 4xx) at this level.
**DEBUG**: Log detailed diagnostic information useful during development, such as function entry/exit points or variable states.
### Example Usage
@@ -59,15 +59,15 @@ export const requestLogger = (req, res, next) => {
// In a route handler:
router.get('/:id', async (req, res, next) => {
// Use the request-scoped logger
req.log.info({ flyerId: req.params.id }, 'Fetching flyer by ID');
try {
// ... business logic ...
res.json(flyer);
} catch (error) {
// The error itself will be logged with full context by the errorHandler
next(error);
}
// Use the request-scoped logger
req.log.info({ flyerId: req.params.id }, 'Fetching flyer by ID');
try {
// ... business logic ...
res.json(flyer);
} catch (error) {
// The error itself will be logged with full context by the errorHandler
next(error);
}
});
```

View File

@@ -14,5 +14,5 @@ We will formalize a centralized Role-Based Access Control (RBAC) or Attribute-Ba
## Consequences
* **Positive**: Ensures authorization logic is consistent, easy to audit, and decoupled from business logic. Improves security by centralizing access control.
* **Negative**: Requires a significant refactoring effort to integrate the new authorization system across all protected routes and features. Introduces a new dependency if an external library is chosen.
- **Positive**: Ensures authorization logic is consistent, easy to audit, and decoupled from business logic. Improves security by centralizing access control.
- **Negative**: Requires a significant refactoring effort to integrate the new authorization system across all protected routes and features. Introduces a new dependency if an external library is chosen.

View File

@@ -14,5 +14,5 @@ We will establish a formal Design System and Component Library. This will involv
## Consequences
* **Positive**: Ensures a consistent and high-quality user interface. Accelerates frontend development by providing reusable, well-documented components. Improves maintainability and reduces technical debt.
* **Negative**: Requires an initial investment in setting up Storybook and migrating existing components. Adds a new dependency and a new workflow for frontend development.
- **Positive**: Ensures a consistent and high-quality user interface. Accelerates frontend development by providing reusable, well-documented components. Improves maintainability and reduces technical debt.
- **Negative**: Requires an initial investment in setting up Storybook and migrating existing components. Adds a new dependency and a new workflow for frontend development.

View File

@@ -14,5 +14,5 @@ We will adopt a dedicated database migration tool, such as **`node-pg-migrate`**
## Consequences
* **Positive**: Provides a safe, repeatable, and reversible way to evolve the database schema. Improves team collaboration on database changes. Reduces the risk of data loss or downtime during deployments.
* **Negative**: Requires an initial setup and learning curve for the chosen migration tool. All future schema changes must adhere to the migration workflow.
- **Positive**: Provides a safe, repeatable, and reversible way to evolve the database schema. Improves team collaboration on database changes. Reduces the risk of data loss or downtime during deployments.
- **Negative**: Requires an initial setup and learning curve for the chosen migration tool. All future schema changes must adhere to the migration workflow.

View File

@@ -14,5 +14,5 @@ We will standardize the deployment process by containerizing the application usi
## Consequences
* **Positive**: Ensures consistency between development and production environments. Simplifies the setup for new developers. Improves portability and scalability of the application.
* **Negative**: Requires learning Docker and containerization concepts. Adds `Dockerfile` and `docker-compose.yml` to the project's configuration.
- **Positive**: Ensures consistency between development and production environments. Simplifies the setup for new developers. Improves portability and scalability of the application.
- **Negative**: Requires learning Docker and containerization concepts. Adds `Dockerfile` and `docker-compose.yml` to the project's configuration.

View File

@@ -18,5 +18,5 @@ We will implement a multi-layered security approach for the API:
## Consequences
* **Positive**: Significantly improves the application's security posture against common web vulnerabilities like XSS, clickjacking, and brute-force attacks.
* **Negative**: Requires careful configuration of CORS and rate limits to avoid blocking legitimate traffic. Content-Security-Policy can be complex to configure correctly.
- **Positive**: Significantly improves the application's security posture against common web vulnerabilities like XSS, clickjacking, and brute-force attacks.
- **Negative**: Requires careful configuration of CORS and rate limits to avoid blocking legitimate traffic. Content-Security-Policy can be complex to configure correctly.

View File

@@ -14,5 +14,5 @@ We will formalize the end-to-end CI/CD process. This ADR will define the project
## Consequences
* **Positive**: Automates quality control and creates a safe, repeatable path to production. Increases development velocity and reduces deployment-related errors.
* **Negative**: Initial setup effort for the CI/CD pipeline. May slightly increase the time to merge code due to mandatory checks.
- **Positive**: Automates quality control and creates a safe, repeatable path to production. Increases development velocity and reduces deployment-related errors.
- **Negative**: Initial setup effort for the CI/CD pipeline. May slightly increase the time to merge code due to mandatory checks.

View File

@@ -14,5 +14,5 @@ We will adopt **OpenAPI (Swagger)** for API documentation. We will use tools (e.
## Consequences
* **Positive**: Creates a single source of truth for API documentation that stays in sync with the code. Enables auto-generation of client SDKs and simplifies testing.
* **Negative**: Requires developers to maintain JSDoc annotations on all routes. Adds a build step and new dependencies to the project.
- **Positive**: Creates a single source of truth for API documentation that stays in sync with the code. Enables auto-generation of client SDKs and simplifies testing.
- **Negative**: Requires developers to maintain JSDoc annotations on all routes. Adds a build step and new dependencies to the project.

View File

@@ -14,5 +14,5 @@ We will implement a formal data backup and recovery strategy. This will involve
## Consequences
* **Positive**: Protects against catastrophic data loss, ensuring business continuity. Provides a clear, tested plan for disaster recovery.
* **Negative**: Requires setup and maintenance of backup scripts and secure storage. Incurs storage costs for backup files.
- **Positive**: Protects against catastrophic data loss, ensuring business continuity. Provides a clear, tested plan for disaster recovery.
- **Negative**: Requires setup and maintenance of backup scripts and secure storage. Incurs storage costs for backup files.

View File

@@ -12,11 +12,11 @@ When the application is containerized (`ADR-014`), the container orchestrator (e
We will implement dedicated health check endpoints in the Express application.
* A **Liveness Probe** (`/api/health/live`) will return a `200 OK` to indicate the server is running. If it fails, the orchestrator should restart the container.
- A **Liveness Probe** (`/api/health/live`) will return a `200 OK` to indicate the server is running. If it fails, the orchestrator should restart the container.
* A **Readiness Probe** (`/api/health/ready`) will return a `200 OK` only if the application is ready to accept traffic (e.g., database connection is established). If it fails, the orchestrator will temporarily remove the container from the load balancer.
- A **Readiness Probe** (`/api/health/ready`) will return a `200 OK` only if the application is ready to accept traffic (e.g., database connection is established). If it fails, the orchestrator will temporarily remove the container from the load balancer.
## Consequences
* **Positive**: Enables robust, automated application lifecycle management in a containerized environment. Prevents traffic from being sent to unhealthy or uninitialized application instances.
* **Negative**: Adds a small amount of code for the health check endpoints. Requires configuration in the container orchestration layer.
- **Positive**: Enables robust, automated application lifecycle management in a containerized environment. Prevents traffic from being sent to unhealthy or uninitialized application instances.
- **Negative**: Adds a small amount of code for the health check endpoints. Requires configuration in the container orchestration layer.

View File

@@ -24,8 +24,8 @@ We will adopt a standardized, application-wide structured logging policy for all
**2. Pino-like API for Structured Logging**: The client logger mimics the `pino` API, which is the standard on the backend. It supports two primary call signatures:
* `logger.info('A simple message');`
* `logger.info({ key: 'value' }, 'A message with a structured data payload');`
- `logger.info('A simple message');`
- `logger.info({ key: 'value' }, 'A message with a structured data payload');`
The second signature, which includes a data object as the first argument, is **strongly preferred**, especially for logging errors or complex state.
@@ -79,7 +79,7 @@ describe('MyComponent', () => {
// Assert that the logger was called with the expected structure
expect(logger.error).toHaveBeenCalledWith(
expect.objectContaining({ err: expect.any(Error) }), // Check for the error object
'Failed to fetch component data' // Check for the message
'Failed to fetch component data', // Check for the message
);
});
});

4281
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,17 +1,19 @@
{
"name": "flyer-crawler",
"private": true,
"version": "0.0.12",
"version": "0.0.19",
"type": "module",
"scripts": {
"dev": "concurrently \"npm:start:dev\" \"vite\"",
"dev:container": "concurrently \"npm:start:dev\" \"vite --host\"",
"start": "npm run start:prod",
"build": "vite build",
"preview": "vite preview",
"test": "NODE_ENV=test tsx ./node_modules/vitest/vitest.mjs run",
"test": "cross-env NODE_ENV=test tsx ./node_modules/vitest/vitest.mjs run",
"test-wsl": "cross-env NODE_ENV=test vitest run",
"test:coverage": "npm run clean && npm run test:unit -- --coverage && npm run test:integration -- --coverage",
"test:unit": "NODE_ENV=test tsx ./node_modules/vitest/vitest.mjs run --project unit -c vite.config.ts",
"test:integration": "NODE_ENV=test tsx ./node_modules/vitest/vitest.mjs run --project integration -c vitest.config.integration.ts",
"test:unit": "NODE_ENV=test tsx --max-old-space-size=8192 ./node_modules/vitest/vitest.mjs run --project unit -c vite.config.ts",
"test:integration": "NODE_ENV=test tsx --max-old-space-size=8192 ./node_modules/vitest/vitest.mjs run --project integration -c vitest.config.integration.ts",
"format": "prettier --write .",
"lint": "eslint . --ext ts,tsx --report-unused-disable-directives --max-warnings 0",
"type-check": "tsc --noEmit",
@@ -20,6 +22,7 @@
"start:dev": "NODE_ENV=development tsx watch server.ts",
"start:prod": "NODE_ENV=production tsx server.ts",
"start:test": "NODE_ENV=test NODE_V8_COVERAGE=.coverage/tmp/integration-server tsx server.ts",
"db:reset:dev": "NODE_ENV=development tsx src/db/seed.ts",
"db:reset:test": "NODE_ENV=test tsx src/db/seed.ts",
"worker:prod": "NODE_ENV=production tsx src/services/queueService.server.ts"
},
@@ -95,6 +98,7 @@
"autoprefixer": "^10.4.22",
"c8": "^10.1.3",
"concurrently": "^9.2.1",
"cross-env": "^10.1.0",
"eslint": "9.39.1",
"eslint-config-prettier": "^9.1.0",
"eslint-plugin-react": "7.37.5",

View File

@@ -92,6 +92,7 @@ CREATE TABLE IF NOT EXISTS public.stores (
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL,
created_by UUID REFERENCES public.users(user_id) ON DELETE SET NULL
);
COMMENT ON TABLE public.stores IS 'Stores metadata for grocery store chains (e.g., Safeway, Kroger).';
-- 5. The 'categories' table for normalized category data.
@@ -109,8 +110,8 @@ CREATE TABLE IF NOT EXISTS public.flyers (
file_name TEXT NOT NULL,
image_url TEXT NOT NULL,
icon_url TEXT,
checksum TEXT UNIQUE,
store_id BIGINT REFERENCES public.stores(store_id),
checksum TEXT UNIQUE,
store_id BIGINT REFERENCES public.stores(store_id) ON DELETE CASCADE,
valid_from DATE,
valid_to DATE,
store_address TEXT,
@@ -138,7 +139,7 @@ CREATE INDEX IF NOT EXISTS idx_flyers_valid_to_file_name ON public.flyers (valid
CREATE TABLE IF NOT EXISTS public.master_grocery_items (
master_grocery_item_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
name TEXT NOT NULL UNIQUE,
category_id BIGINT REFERENCES public.categories(category_id),
category_id BIGINT REFERENCES public.categories(category_id) ON DELETE SET NULL,
is_allergen BOOLEAN DEFAULT false,
allergy_info JSONB,
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
@@ -170,13 +171,13 @@ CREATE TABLE IF NOT EXISTS public.flyer_items (
price_in_cents INTEGER,
quantity_num NUMERIC,
quantity TEXT NOT NULL,
category_id BIGINT REFERENCES public.categories(category_id),
category_id BIGINT REFERENCES public.categories(category_id) ON DELETE SET NULL,
category_name TEXT,
unit_price JSONB,
view_count INTEGER DEFAULT 0 NOT NULL,
click_count INTEGER DEFAULT 0 NOT NULL,
master_item_id BIGINT REFERENCES public.master_grocery_items(master_grocery_item_id),
product_id BIGINT,
master_item_id BIGINT REFERENCES public.master_grocery_items(master_grocery_item_id) ON DELETE SET NULL,
product_id BIGINT REFERENCES public.products(product_id) ON DELETE SET NULL,
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL
);
@@ -293,7 +294,7 @@ CREATE INDEX IF NOT EXISTS idx_shopping_lists_user_id ON public.shopping_lists(u
CREATE TABLE IF NOT EXISTS public.shopping_list_items (
shopping_list_item_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
shopping_list_id BIGINT NOT NULL REFERENCES public.shopping_lists(shopping_list_id) ON DELETE CASCADE,
master_item_id BIGINT REFERENCES public.master_grocery_items(master_grocery_item_id),
master_item_id BIGINT REFERENCES public.master_grocery_items(master_grocery_item_id) ON DELETE CASCADE,
custom_item_name TEXT,
quantity NUMERIC DEFAULT 1 NOT NULL,
is_purchased BOOLEAN DEFAULT false NOT NULL,
@@ -358,7 +359,7 @@ CREATE INDEX IF NOT EXISTS idx_shared_menu_plans_shared_with_user_id ON public.s
CREATE TABLE IF NOT EXISTS public.suggested_corrections (
suggested_correction_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
flyer_item_id BIGINT NOT NULL REFERENCES public.flyer_items(flyer_item_id) ON DELETE CASCADE,
user_id UUID NOT NULL REFERENCES public.users(user_id),
user_id UUID NOT NULL REFERENCES public.users(user_id) ON DELETE CASCADE,
correction_type TEXT NOT NULL,
suggested_value TEXT NOT NULL,
status TEXT DEFAULT 'pending' NOT NULL,
@@ -378,9 +379,9 @@ CREATE INDEX IF NOT EXISTS idx_suggested_corrections_pending ON public.suggested
-- 21. For prices submitted directly by users from in-store.
CREATE TABLE IF NOT EXISTS public.user_submitted_prices (
user_submitted_price_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
user_id UUID NOT NULL REFERENCES public.users(user_id),
master_item_id BIGINT NOT NULL REFERENCES public.master_grocery_items(master_grocery_item_id),
store_id BIGINT NOT NULL REFERENCES public.stores(store_id),
user_id UUID NOT NULL REFERENCES public.users(user_id) ON DELETE CASCADE,
master_item_id BIGINT NOT NULL REFERENCES public.master_grocery_items(master_grocery_item_id) ON DELETE CASCADE,
store_id BIGINT NOT NULL REFERENCES public.stores(store_id) ON DELETE CASCADE,
price_in_cents INTEGER NOT NULL,
photo_url TEXT,
upvotes INTEGER DEFAULT 0 NOT NULL,
@@ -423,8 +424,8 @@ COMMENT ON COLUMN public.brands.store_id IS 'If this is a store-specific brand (
-- 24. For specific products, linking a master item with a brand and size.
CREATE TABLE IF NOT EXISTS public.products (
product_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
master_item_id BIGINT NOT NULL REFERENCES public.master_grocery_items(master_grocery_item_id),
brand_id BIGINT REFERENCES public.brands(brand_id),
master_item_id BIGINT NOT NULL REFERENCES public.master_grocery_items(master_grocery_item_id) ON DELETE CASCADE,
brand_id BIGINT REFERENCES public.brands(brand_id) ON DELETE SET NULL,
name TEXT NOT NULL,
description TEXT,
size TEXT,
@@ -495,7 +496,7 @@ CREATE UNIQUE INDEX IF NOT EXISTS idx_recipes_unique_system_recipe_name ON publi
CREATE TABLE IF NOT EXISTS public.recipe_ingredients (
recipe_ingredient_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
recipe_id BIGINT NOT NULL REFERENCES public.recipes(recipe_id) ON DELETE CASCADE,
master_item_id BIGINT NOT NULL REFERENCES public.master_grocery_items(master_grocery_item_id),
master_item_id BIGINT NOT NULL REFERENCES public.master_grocery_items(master_grocery_item_id) ON DELETE CASCADE,
quantity NUMERIC NOT NULL,
unit TEXT NOT NULL,
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
@@ -779,7 +780,7 @@ CREATE INDEX IF NOT EXISTS idx_shopping_trips_shopping_list_id ON public.shoppin
CREATE TABLE IF NOT EXISTS public.shopping_trip_items (
shopping_trip_item_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
shopping_trip_id BIGINT NOT NULL REFERENCES public.shopping_trips(shopping_trip_id) ON DELETE CASCADE,
master_item_id BIGINT REFERENCES public.master_grocery_items(master_grocery_item_id),
master_item_id BIGINT REFERENCES public.master_grocery_items(master_grocery_item_id) ON DELETE SET NULL,
custom_item_name TEXT,
quantity NUMERIC NOT NULL,
price_paid_cents INTEGER,
@@ -843,7 +844,7 @@ CREATE INDEX IF NOT EXISTS idx_user_follows_following_id ON public.user_follows(
CREATE TABLE IF NOT EXISTS public.receipts (
receipt_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
user_id UUID NOT NULL REFERENCES public.users(user_id) ON DELETE CASCADE,
store_id BIGINT REFERENCES public.stores(store_id),
store_id BIGINT REFERENCES public.stores(store_id) ON DELETE CASCADE,
receipt_image_url TEXT NOT NULL,
transaction_date TIMESTAMPTZ,
total_amount_cents INTEGER,
@@ -864,8 +865,8 @@ CREATE TABLE IF NOT EXISTS public.receipt_items (
raw_item_description TEXT NOT NULL,
quantity NUMERIC DEFAULT 1 NOT NULL,
price_paid_cents INTEGER NOT NULL,
master_item_id BIGINT REFERENCES public.master_grocery_items(master_grocery_item_id),
product_id BIGINT REFERENCES public.products(product_id),
master_item_id BIGINT REFERENCES public.master_grocery_items(master_grocery_item_id) ON DELETE SET NULL,
product_id BIGINT REFERENCES public.products(product_id) ON DELETE SET NULL,
status TEXT DEFAULT 'unmatched' NOT NULL CHECK (status IN ('unmatched', 'matched', 'needs_review', 'ignored')),
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL

View File

@@ -126,8 +126,8 @@ CREATE TABLE IF NOT EXISTS public.flyers (
file_name TEXT NOT NULL,
image_url TEXT NOT NULL,
icon_url TEXT,
checksum TEXT UNIQUE,
store_id BIGINT REFERENCES public.stores(store_id),
checksum TEXT UNIQUE,
store_id BIGINT REFERENCES public.stores(store_id) ON DELETE CASCADE,
valid_from DATE,
valid_to DATE,
store_address TEXT,
@@ -155,7 +155,7 @@ CREATE INDEX IF NOT EXISTS idx_flyers_valid_to_file_name ON public.flyers (valid
CREATE TABLE IF NOT EXISTS public.master_grocery_items (
master_grocery_item_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
name TEXT NOT NULL UNIQUE,
category_id BIGINT REFERENCES public.categories(category_id),
category_id BIGINT REFERENCES public.categories(category_id) ON DELETE SET NULL,
is_allergen BOOLEAN DEFAULT false,
allergy_info JSONB,
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
@@ -187,13 +187,13 @@ CREATE TABLE IF NOT EXISTS public.flyer_items (
price_in_cents INTEGER,
quantity_num NUMERIC,
quantity TEXT NOT NULL,
category_id BIGINT REFERENCES public.categories(category_id),
category_id BIGINT REFERENCES public.categories(category_id) ON DELETE SET NULL,
category_name TEXT,
unit_price JSONB,
view_count INTEGER DEFAULT 0 NOT NULL,
click_count INTEGER DEFAULT 0 NOT NULL,
master_item_id BIGINT REFERENCES public.master_grocery_items(master_grocery_item_id),
product_id BIGINT,
master_item_id BIGINT REFERENCES public.master_grocery_items(master_grocery_item_id) ON DELETE SET NULL,
product_id BIGINT REFERENCES public.products(product_id) ON DELETE SET NULL,
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL
);
@@ -310,7 +310,7 @@ CREATE INDEX IF NOT EXISTS idx_shopping_lists_user_id ON public.shopping_lists(u
CREATE TABLE IF NOT EXISTS public.shopping_list_items (
shopping_list_item_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
shopping_list_id BIGINT NOT NULL REFERENCES public.shopping_lists(shopping_list_id) ON DELETE CASCADE,
master_item_id BIGINT REFERENCES public.master_grocery_items(master_grocery_item_id),
master_item_id BIGINT REFERENCES public.master_grocery_items(master_grocery_item_id) ON DELETE CASCADE,
custom_item_name TEXT,
quantity NUMERIC DEFAULT 1 NOT NULL,
is_purchased BOOLEAN DEFAULT false NOT NULL,
@@ -375,7 +375,7 @@ CREATE INDEX IF NOT EXISTS idx_shared_menu_plans_shared_with_user_id ON public.s
CREATE TABLE IF NOT EXISTS public.suggested_corrections (
suggested_correction_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
flyer_item_id BIGINT NOT NULL REFERENCES public.flyer_items(flyer_item_id) ON DELETE CASCADE,
user_id UUID NOT NULL REFERENCES public.users(user_id),
user_id UUID NOT NULL REFERENCES public.users(user_id) ON DELETE CASCADE,
correction_type TEXT NOT NULL,
suggested_value TEXT NOT NULL,
status TEXT DEFAULT 'pending' NOT NULL,
@@ -395,9 +395,9 @@ CREATE INDEX IF NOT EXISTS idx_suggested_corrections_pending ON public.suggested
-- 21. For prices submitted directly by users from in-store.
CREATE TABLE IF NOT EXISTS public.user_submitted_prices (
user_submitted_price_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
user_id UUID NOT NULL REFERENCES public.users(user_id),
master_item_id BIGINT NOT NULL REFERENCES public.master_grocery_items(master_grocery_item_id),
store_id BIGINT NOT NULL REFERENCES public.stores(store_id),
user_id UUID NOT NULL REFERENCES public.users(user_id) ON DELETE CASCADE,
master_item_id BIGINT NOT NULL REFERENCES public.master_grocery_items(master_grocery_item_id) ON DELETE CASCADE,
store_id BIGINT NOT NULL REFERENCES public.stores(store_id) ON DELETE CASCADE,
price_in_cents INTEGER NOT NULL,
photo_url TEXT,
upvotes INTEGER DEFAULT 0 NOT NULL,
@@ -439,8 +439,8 @@ COMMENT ON COLUMN public.brands.store_id IS 'If this is a store-specific brand (
-- 24. For specific products, linking a master item with a brand and size.
CREATE TABLE IF NOT EXISTS public.products (
product_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
master_item_id BIGINT NOT NULL REFERENCES public.master_grocery_items(master_grocery_item_id),
brand_id BIGINT REFERENCES public.brands(brand_id),
master_item_id BIGINT NOT NULL REFERENCES public.master_grocery_items(master_grocery_item_id) ON DELETE CASCADE,
brand_id BIGINT REFERENCES public.brands(brand_id) ON DELETE SET NULL,
name TEXT NOT NULL,
description TEXT,
size TEXT,
@@ -510,7 +510,7 @@ CREATE UNIQUE INDEX IF NOT EXISTS idx_recipes_unique_system_recipe_name ON publi
CREATE TABLE IF NOT EXISTS public.recipe_ingredients (
recipe_ingredient_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
recipe_id BIGINT NOT NULL REFERENCES public.recipes(recipe_id) ON DELETE CASCADE,
master_item_id BIGINT NOT NULL REFERENCES public.master_grocery_items(master_grocery_item_id),
master_item_id BIGINT NOT NULL REFERENCES public.master_grocery_items(master_grocery_item_id) ON DELETE CASCADE,
quantity NUMERIC NOT NULL,
unit TEXT NOT NULL,
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
@@ -796,7 +796,7 @@ CREATE INDEX IF NOT EXISTS idx_shopping_trips_shopping_list_id ON public.shoppin
CREATE TABLE IF NOT EXISTS public.shopping_trip_items (
shopping_trip_item_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
shopping_trip_id BIGINT NOT NULL REFERENCES public.shopping_trips(shopping_trip_id) ON DELETE CASCADE,
master_item_id BIGINT REFERENCES public.master_grocery_items(master_grocery_item_id),
master_item_id BIGINT REFERENCES public.master_grocery_items(master_grocery_item_id) ON DELETE SET NULL,
custom_item_name TEXT,
quantity NUMERIC NOT NULL,
price_paid_cents INTEGER,
@@ -862,7 +862,7 @@ CREATE INDEX IF NOT EXISTS idx_user_follows_following_id ON public.user_follows(
CREATE TABLE IF NOT EXISTS public.receipts (
receipt_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
user_id UUID NOT NULL REFERENCES public.users(user_id) ON DELETE CASCADE,
store_id BIGINT REFERENCES public.stores(store_id),
store_id BIGINT REFERENCES public.stores(store_id) ON DELETE CASCADE,
receipt_image_url TEXT NOT NULL,
transaction_date TIMESTAMPTZ,
total_amount_cents INTEGER,
@@ -883,8 +883,8 @@ CREATE TABLE IF NOT EXISTS public.receipt_items (
raw_item_description TEXT NOT NULL,
quantity NUMERIC DEFAULT 1 NOT NULL,
price_paid_cents INTEGER NOT NULL,
master_item_id BIGINT REFERENCES public.master_grocery_items(master_grocery_item_id),
product_id BIGINT REFERENCES public.products(product_id),
master_item_id BIGINT REFERENCES public.master_grocery_items(master_grocery_item_id) ON DELETE SET NULL,
product_id BIGINT REFERENCES public.products(product_id) ON DELETE SET NULL,
status TEXT DEFAULT 'unmatched' NOT NULL CHECK (status IN ('unmatched', 'matched', 'needs_review', 'ignored')),
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL

View File

@@ -1,4 +1,4 @@
// src/components/PriceHistoryChart.tsx
// src/features/charts/PriceHistoryChart.tsx
import React, { useState, useEffect, useMemo } from 'react';
import {
LineChart,
@@ -142,7 +142,7 @@ export const PriceHistoryChart: React.FC = () => {
const renderContent = () => {
if (isLoading || isLoadingUserData) {
return (
<div role="status" className="flex justify-center items-center h-full min-h-[200px]">
<div role="status" className="flex justify-center items-center h-full min-h-50]">
<LoadingSpinner /> <span className="ml-2">Loading Price History...</span>
</div>
);
@@ -198,7 +198,12 @@ export const PriceHistoryChart: React.FC = () => {
borderRadius: '0.5rem',
}}
labelStyle={{ color: '#F9FAFB' }}
formatter={(value: number) => `$${(value / 100).toFixed(2)}`}
formatter={(value: number | undefined) => {
if (typeof value === 'number') {
return [`$${(value / 100).toFixed(2)}`];
}
return [null];
}}
/>
<Legend wrapperStyle={{ fontSize: '12px' }} />
{availableItems.map((item, index) => (

View File

@@ -2,8 +2,8 @@
import React, { ReactNode } from 'react';
import { renderHook, waitFor } from '@testing-library/react';
import { describe, it, expect, vi, beforeEach } from 'vitest';
import { useUserData } from '../hooks/useUserData';
import { useAuth } from '../hooks/useAuth';
import { useUserData } from './useUserData';
import { useAuth } from './useAuth';
import { UserDataProvider } from '../providers/UserDataProvider';
import { useApiOnMount } from './useApiOnMount';
import type { UserProfile } from '../types';

View File

@@ -86,12 +86,15 @@ describe('AI Routes (/api/ai)', () => {
// Arrange
const mkdirError = new Error('EACCES: permission denied');
vi.resetModules(); // Reset modules to re-run top-level code
vi.doMock('node:fs', () => ({
...fs,
mkdirSync: vi.fn().mockImplementation(() => {
throw mkdirError;
}),
}));
vi.doMock('node:fs', () => {
const mockFs = {
...fs,
mkdirSync: vi.fn().mockImplementation(() => {
throw mkdirError;
}),
};
return { ...mockFs, default: mockFs };
});
const { logger } = await import('../services/logger.server');
// Act: Dynamically import the router to trigger the mkdirSync call
@@ -617,6 +620,14 @@ describe('AI Routes (/api/ai)', () => {
expect(response.body.text).toContain('server-generated quick insight');
});
it('POST /quick-insights should accept items with "item" property instead of "name"', async () => {
const response = await supertest(app)
.post('/api/ai/quick-insights')
.send({ items: [{ item: 'test item' }] });
expect(response.status).toBe(200);
});
it('POST /quick-insights should return 500 on a generic error', async () => {
// To hit the catch block, we can simulate an error by making the logger throw.
vi.mocked(mockLogger.info).mockImplementationOnce(() => {

View File

@@ -2,7 +2,7 @@
import { Router, Request, Response, NextFunction } from 'express';
import multer from 'multer';
import path from 'path';
import fs from 'fs';
import fs from 'node:fs';
import { z } from 'zod';
import passport from './passport.routes';
import { optionalAuth } from './passport.routes';
@@ -88,10 +88,17 @@ const rescanAreaSchema = z.object({
const flyerItemForAnalysisSchema = z
.object({
name: requiredString('Item name is required.'),
// Allow other properties to pass through without validation
item: z.string().nullish(),
name: z.string().nullish(),
})
.passthrough();
.passthrough()
.refine(
(data) =>
(data.item && data.item.trim().length > 0) || (data.name && data.name.trim().length > 0),
{
message: "Item identifier is required (either 'item' or 'name').",
},
);
const insightsSchema = z.object({
body: z.object({

View File

@@ -297,7 +297,6 @@ describe('Auth Routes (/api/auth)', () => {
// The API now returns a nested UserProfile object
expect(response.body.userprofile).toEqual(
expect.objectContaining({
user_id: 'user-123',
user: expect.objectContaining({
user_id: 'user-123',
email: loginCredentials.email,
@@ -618,7 +617,9 @@ describe('Auth Routes (/api/auth)', () => {
const setCookieHeader = response.headers['set-cookie'];
expect(setCookieHeader).toBeDefined();
expect(setCookieHeader[0]).toContain('refreshToken=;');
expect(setCookieHeader[0]).toContain('Expires=Thu, 01 Jan 1970');
// Check for Max-Age=0, which is the modern way to expire a cookie.
// The 'Expires' attribute is a fallback and its exact value can be inconsistent.
expect(setCookieHeader[0]).toContain('Max-Age=0');
});
it('should still return 200 OK even if deleting the refresh token from DB fails', async () => {

View File

@@ -381,7 +381,7 @@ router.post('/logout', async (req: Request, res: Response) => {
// Instruct the browser to clear the cookie by setting its expiration to the past.
res.cookie('refreshToken', '', {
httpOnly: true,
expires: new Date(0),
maxAge: 0, // Use maxAge for modern compatibility; Express sets 'Expires' as a fallback.
secure: process.env.NODE_ENV === 'production',
});
res.status(200).json({ message: 'Logged out successfully.' });

View File

@@ -2,6 +2,7 @@
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
import * as aiApiClient from './aiApiClient';
import { AiAnalysisService } from './aiAnalysisService';
import { createMockFlyerItem } from '../tests/utils/mockFactories';
// Mock the dependencies
vi.mock('./aiApiClient');
@@ -56,7 +57,7 @@ describe('AiAnalysisService', () => {
json: () => Promise.resolve(mockResponse),
} as Response);
const result = await service.searchWeb([]);
const result = await service.searchWeb([createMockFlyerItem({ item: 'test' })]);
expect(result.text).toBe('Search results');
expect(result.sources).toEqual([{ uri: 'https://example.com', title: 'Example' }]);
@@ -68,7 +69,7 @@ describe('AiAnalysisService', () => {
json: () => Promise.resolve(mockResponse),
} as Response);
const result = await service.searchWeb([]);
const result = await service.searchWeb([createMockFlyerItem({ item: 'test' })]);
expect(result.text).toBe('Search results');
expect(result.sources).toEqual([]);
@@ -83,7 +84,7 @@ describe('AiAnalysisService', () => {
json: () => Promise.resolve(mockResponse),
} as Response);
const result = await service.searchWeb([]);
const result = await service.searchWeb([createMockFlyerItem({ item: 'test' })]);
expect(result.sources).toEqual([{ uri: '', title: 'Untitled' }]);
});
@@ -92,7 +93,9 @@ describe('AiAnalysisService', () => {
const apiError = new Error('API is down');
vi.mocked(aiApiClient.searchWeb).mockRejectedValue(apiError);
await expect(service.searchWeb([])).rejects.toThrow(apiError);
await expect(service.searchWeb([createMockFlyerItem({ item: 'test' })])).rejects.toThrow(
apiError,
);
});
});

View File

@@ -42,9 +42,11 @@ export class AiAnalysisService {
*/
async searchWeb(items: FlyerItem[]): Promise<GroundedResponse> {
logger.info('[AiAnalysisService] searchWeb called.');
// Construct a query string from the item names.
const query = items.map((item) => item.item).join(', ');
// The API client returns a specific shape that we need to await the JSON from
const response: { text: string; sources: RawSource[] } = await aiApiClient
.searchWeb(items)
.searchWeb(query)
.then((res) => res.json());
// Normalize sources to a consistent format.
const mappedSources = (response.sources || []).map(

View File

@@ -282,15 +282,15 @@ describe('AI API Client (Network Mocking with MSW)', () => {
});
describe('searchWeb', () => {
it('should send items as JSON in the body', async () => {
const items = [createMockFlyerItem({ item: 'search me' })];
await aiApiClient.searchWeb(items, undefined, 'test-token');
it('should send query as JSON in the body', async () => {
const query = 'search me';
await aiApiClient.searchWeb(query, undefined, 'test-token');
expect(requestSpy).toHaveBeenCalledTimes(1);
const req = requestSpy.mock.calls[0][0];
expect(req.endpoint).toBe('search-web');
expect(req.body).toEqual({ items });
expect(req.body).toEqual({ query });
expect(req.headers.get('Authorization')).toBe('Bearer test-token');
});
});

View File

@@ -135,7 +135,7 @@ export const getDeepDiveAnalysis = async (
};
export const searchWeb = async (
items: Partial<FlyerItem>[],
query: string,
signal?: AbortSignal,
tokenOverride?: string,
): Promise<Response> => {
@@ -144,7 +144,7 @@ export const searchWeb = async (
{
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ items }),
body: JSON.stringify({ query }),
signal,
},
{ tokenOverride, signal },

View File

@@ -624,14 +624,10 @@ describe('User DB Service', () => {
);
});
it('should throw NotFoundError if token is not found', async () => {
it('should return undefined if token is not found', async () => {
mockPoolInstance.query.mockResolvedValue({ rows: [], rowCount: 0 });
await expect(userRepo.findUserByRefreshToken('a-token', mockLogger)).rejects.toThrow(
NotFoundError,
);
await expect(userRepo.findUserByRefreshToken('a-token', mockLogger)).rejects.toThrow(
'User not found for the given refresh token.',
);
const result = await userRepo.findUserByRefreshToken('a-token', mockLogger);
expect(result).toBeUndefined();
});
it('should throw a generic error if the database query fails', async () => {

View File

@@ -52,10 +52,7 @@ export class UserRepository {
);
return res.rows[0];
} catch (error) {
logger.error(
{ err: error instanceof Error ? error.message : error, email },
'Database error in findUserByEmail',
);
logger.error({ err: error, email }, 'Database error in findUserByEmail');
throw new Error('Failed to retrieve user from database.');
}
}
@@ -130,10 +127,7 @@ export class UserRepository {
throw new UniqueConstraintError('A user with this email address already exists.');
}
// The withTransaction helper logs the rollback, so we just log the context here.
logger.error(
{ err: error instanceof Error ? error.message : error, email },
'Error during createUser transaction',
);
logger.error({ err: error, email }, 'Error during createUser transaction');
throw new Error('Failed to create user in database.');
});
}
@@ -188,10 +182,7 @@ export class UserRepository {
return authableProfile;
} catch (error) {
logger.error(
{ err: error instanceof Error ? error.message : error, email },
'Database error in findUserWithProfileByEmail',
);
logger.error({ err: error, email }, 'Database error in findUserWithProfileByEmail');
throw new Error('Failed to retrieve user with profile from database.');
}
}
@@ -215,7 +206,7 @@ export class UserRepository {
} catch (error) {
if (error instanceof NotFoundError) throw error;
logger.error(
{ err: error instanceof Error ? error.message : error, userId },
{ err: error, userId },
'Database error in findUserById',
);
throw new Error('Failed to retrieve user by ID from database.');
@@ -242,7 +233,7 @@ export class UserRepository {
} catch (error) {
if (error instanceof NotFoundError) throw error;
logger.error(
{ err: error instanceof Error ? error.message : error, userId },
{ err: error, userId },
'Database error in findUserWithPasswordHashById',
);
throw new Error('Failed to retrieve user with sensitive data by ID from database.');
@@ -291,7 +282,7 @@ export class UserRepository {
throw error;
}
logger.error(
{ err: error instanceof Error ? error.message : error, userId },
{ err: error, userId },
'Database error in findUserProfileById',
);
throw new Error('Failed to retrieve user profile from database.');
@@ -340,7 +331,7 @@ export class UserRepository {
throw error;
}
logger.error(
{ err: error instanceof Error ? error.message : error, userId, profileData },
{ err: error, userId, profileData },
'Database error in updateUserProfile',
);
throw new Error('Failed to update user profile in database.');
@@ -372,7 +363,7 @@ export class UserRepository {
throw error;
}
logger.error(
{ err: error instanceof Error ? error.message : error, userId, preferences },
{ err: error, userId, preferences },
'Database error in updateUserPreferences',
);
throw new Error('Failed to update user preferences in database.');
@@ -393,7 +384,7 @@ export class UserRepository {
);
} catch (error) {
logger.error(
{ err: error instanceof Error ? error.message : error, userId },
{ err: error, userId },
'Database error in updateUserPassword',
);
throw new Error('Failed to update user password in database.');
@@ -408,9 +399,9 @@ export class UserRepository {
async deleteUserById(userId: string, logger: Logger): Promise<void> {
try {
await this.db.query('DELETE FROM public.users WHERE user_id = $1', [userId]);
} catch (error) {
} catch (error) { // This was a duplicate, fixed.
logger.error(
{ err: error instanceof Error ? error.message : error, userId },
{ err: error, userId },
'Database error in deleteUserById',
);
throw new Error('Failed to delete user from database.');
@@ -431,7 +422,7 @@ export class UserRepository {
);
} catch (error) {
logger.error(
{ err: error instanceof Error ? error.message : error, userId },
{ err: error, userId },
'Database error in saveRefreshToken',
);
throw new Error('Failed to save refresh token.');
@@ -443,23 +434,21 @@ export class UserRepository {
* @param refreshToken The refresh token to look up.
* @returns A promise that resolves to the user object (id, email) or undefined if not found.
*/
// prettier-ignore
async findUserByRefreshToken(refreshToken: string, logger: Logger): Promise<{ user_id: string; email: string; }> {
async findUserByRefreshToken(
refreshToken: string,
logger: Logger,
): Promise<{ user_id: string; email: string } | undefined> {
try {
const res = await this.db.query<{ user_id: string; email: string }>(
'SELECT user_id, email FROM public.users WHERE refresh_token = $1',
[refreshToken]
[refreshToken],
);
if ((res.rowCount ?? 0) === 0) {
throw new NotFoundError('User not found for the given refresh token.');
return undefined;
}
return res.rows[0];
} catch (error) {
if (error instanceof NotFoundError) throw error;
logger.error(
{ err: error instanceof Error ? error.message : error },
'Database error in findUserByRefreshToken',
);
logger.error({ err: error }, 'Database error in findUserByRefreshToken');
throw new Error('Failed to find user by refresh token.'); // Generic error for other failures
}
}
@@ -474,10 +463,7 @@ export class UserRepository {
refreshToken,
]);
} catch (error) {
logger.error(
{ err: error instanceof Error ? error.message : error },
'Database error in deleteRefreshToken',
);
logger.error({ err: error }, 'Database error in deleteRefreshToken');
}
}
@@ -501,7 +487,7 @@ export class UserRepository {
throw new ForeignKeyConstraintError('The specified user does not exist.');
}
logger.error(
{ err: error instanceof Error ? error.message : error, userId },
{ err: error, userId },
'Database error in createPasswordResetToken',
);
throw new Error('Failed to create password reset token.');
@@ -521,7 +507,7 @@ export class UserRepository {
return res.rows;
} catch (error) {
logger.error(
{ err: error instanceof Error ? error.message : error },
{ err: error },
'Database error in getValidResetTokens',
);
throw new Error('Failed to retrieve valid reset tokens.');
@@ -538,7 +524,7 @@ export class UserRepository {
await this.db.query('DELETE FROM public.password_reset_tokens WHERE token_hash = $1', [tokenHash]);
} catch (error) {
logger.error(
{ err: error instanceof Error ? error.message : error, tokenHash },
{ err: error, tokenHash },
'Database error in deleteResetToken',
);
}
@@ -559,10 +545,7 @@ export class UserRepository {
);
return res.rowCount ?? 0;
} catch (error) {
logger.error(
{ err: error instanceof Error ? error.message : error },
'Database error in deleteExpiredResetTokens',
);
logger.error({ err: error }, 'Database error in deleteExpiredResetTokens');
throw new Error('Failed to delete expired password reset tokens.');
}
}
@@ -581,10 +564,7 @@ export class UserRepository {
if (error instanceof Error && 'code' in error && error.code === '23503') {
throw new ForeignKeyConstraintError('One or both users do not exist.');
}
logger.error(
{ err: error instanceof Error ? error.message : error, followerId, followingId },
'Database error in followUser',
);
logger.error({ err: error, followerId, followingId }, 'Database error in followUser');
throw new Error('Failed to follow user.');
}
}
@@ -601,10 +581,7 @@ export class UserRepository {
[followerId, followingId],
);
} catch (error) {
logger.error(
{ err: error instanceof Error ? error.message : error, followerId, followingId },
'Database error in unfollowUser',
);
logger.error({ err: error, followerId, followingId }, 'Database error in unfollowUser');
throw new Error('Failed to unfollow user.');
}
}
@@ -635,10 +612,7 @@ export class UserRepository {
const res = await this.db.query<ActivityLogItem>(query, [userId, limit, offset]);
return res.rows;
} catch (error) {
logger.error(
{ err: error instanceof Error ? error.message : error, userId, limit, offset },
'Database error in getUserFeed',
);
logger.error({ err: error, userId, limit, offset }, 'Database error in getUserFeed');
throw new Error('Failed to retrieve user feed.');
}
}
@@ -660,10 +634,7 @@ export class UserRepository {
);
return res.rows[0];
} catch (error) {
logger.error(
{ err: error instanceof Error ? error.message : error, queryData },
'Database error in logSearchQuery',
);
logger.error({ err: error, queryData }, 'Database error in logSearchQuery');
throw new Error('Failed to log search query.');
}
}
@@ -698,7 +669,7 @@ export async function exportUserData(userId: string, logger: Logger): Promise<{
});
} catch (error) {
logger.error(
{ err: error instanceof Error ? error.message : error, userId },
{ err: error, userId },
'Database error in exportUserData',
);
throw new Error('Failed to export user data.');

View File

@@ -45,7 +45,7 @@ export class GoogleGeocodingService {
return null;
} catch (error) {
logger.error(
{ err: error instanceof Error ? error.message : error, address },
{ err: error, address },
'[GoogleGeocodingService] An error occurred while calling the Google Maps API.',
);
throw error; // Re-throw to allow the calling service to handle the failure (e.g., by falling back).

View File

@@ -1,5 +1,5 @@
// src/tests/integration/admin.integration.test.ts
import { describe, it, expect, beforeAll, beforeEach } from 'vitest';
import { describe, it, expect, beforeAll, beforeEach, afterAll } from 'vitest';
import * as apiClient from '../../services/apiClient';
import { getPool } from '../../services/db/connection.db';
import type { UserProfile } from '../../types';

View File

@@ -55,7 +55,7 @@ describe('AI API Routes Integration Tests', () => {
const mockImageFile = new File(['content'], 'test.jpg', { type: 'image/jpeg' });
const response = await aiApiClient.extractAddressFromImage(mockImageFile, authToken);
const result = await response.json();
expect(result.address).toBe('123 AI Street, Server City');
expect(result.address).toBe('not identified');
});
it('POST /api/ai/extract-logo should return a stubbed response', async () => {
@@ -66,24 +66,28 @@ describe('AI API Routes Integration Tests', () => {
});
it('POST /api/ai/quick-insights should return a stubbed insight', async () => {
const response = await aiApiClient.getQuickInsights([], undefined, authToken);
const response = await aiApiClient.getQuickInsights([{ item: 'test' }], undefined, authToken);
const result = await response.json();
expect(result.text).toBe('This is a server-generated quick insight: buy the cheap stuff!');
});
it('POST /api/ai/deep-dive should return a stubbed analysis', async () => {
const response = await aiApiClient.getDeepDiveAnalysis([], undefined, authToken);
const response = await aiApiClient.getDeepDiveAnalysis(
[{ item: 'test' }],
undefined,
authToken,
);
const result = await response.json();
expect(result.text).toBe('This is a server-generated deep dive analysis. It is very detailed.');
});
it('POST /api/ai/search-web should return a stubbed search result', async () => {
const response = await aiApiClient.searchWeb([], undefined, authToken);
const response = await aiApiClient.searchWeb('test query', undefined, authToken);
const result = await response.json();
expect(result).toEqual({ text: 'The web says this is good.', sources: [] });
});
it('POST /api/ai/plan-trip should return a stubbed trip plan', async () => {
it('POST /api/ai/plan-trip should return an error as the feature is disabled', async () => {
// The GeolocationCoordinates type requires more than just lat/lng.
// We create a complete mock object to satisfy the type.
const mockLocation: TestGeolocationCoordinates = {
@@ -94,20 +98,36 @@ describe('AI API Routes Integration Tests', () => {
altitudeAccuracy: null,
heading: null,
speed: null,
toJSON: () => ({}),
toJSON: function () {
return {
latitude: this.latitude,
longitude: this.longitude,
accuracy: this.accuracy,
altitude: this.altitude,
altitudeAccuracy: this.altitudeAccuracy,
heading: this.heading,
speed: this.speed,
};
},
};
const mockStore = {
name: 'Test Store for Trip',
store_id: 1,
created_at: new Date().toISOString(),
updated_at: new Date().toISOString(),
};
const response = await aiApiClient.planTripWithMaps(
[],
undefined,
mockStore,
mockLocation,
undefined,
authToken,
);
const result = await response.json();
expect(result).toBeDefined();
// The AI service is mocked in unit tests, but in integration it might be live.
// For now, we just check that we get a text response.
expect(result.text).toBeTypeOf('string');
// The service for this endpoint is disabled and throws an error, which results in a 500.
expect(response.ok).toBe(false);
expect(response.status).toBe(500);
const errorResult = await response.json();
expect(errorResult.message).toContain('planTripWithMaps');
});
it('POST /api/ai/generate-image should reject because it is not implemented', async () => {

View File

@@ -60,7 +60,11 @@ describe('Flyer Processing Background Job Integration Test', () => {
// Arrange: Load a mock flyer PDF.
const imagePath = path.resolve(__dirname, '../assets/test-flyer-image.jpg');
const imageBuffer = await fs.readFile(imagePath);
const mockImageFile = new File([imageBuffer], 'test-flyer-image.jpg', { type: 'image/jpeg' });
// Create a unique buffer and filename for each test run to ensure a unique checksum.
// This prevents a 409 Conflict error when the second test runs.
const uniqueContent = Buffer.concat([imageBuffer, Buffer.from(Date.now().toString())]);
const uniqueFileName = `test-flyer-image-${Date.now()}.jpg`;
const mockImageFile = new File([uniqueContent], uniqueFileName, { type: 'image/jpeg' });
const checksum = await generateFileChecksum(mockImageFile);
// Act 1: Upload the file to start the background job.

View File

@@ -30,7 +30,7 @@ describe('Public API Routes Integration Tests', () => {
// which also handles activity logging correctly.
const { user: createdUser } = await createAndLoginUser({
email: userEmail,
password: 'test-hash',
password: 'a-Very-Strong-Password-123!',
fullName: 'Public Routes Test User',
});
testUser = createdUser;
@@ -54,9 +54,10 @@ describe('Public API Routes Integration Tests', () => {
testFlyer = flyerRes.rows[0];
// Add an item to the flyer
await pool.query(`INSERT INTO public.flyer_items (flyer_id, item) VALUES ($1, 'Test Item')`, [
testFlyer.flyer_id,
]);
await pool.query(
`INSERT INTO public.flyer_items (flyer_id, item, price_display, quantity) VALUES ($1, 'Test Item', '$0.00', 'each')`,
[testFlyer.flyer_id],
);
});
afterAll(async () => {

View File

@@ -1,4 +1,5 @@
/// <reference types="vitest" />
// vitest.config.ts
import { defineConfig } from 'vitest/config';
export default defineConfig({
@@ -6,12 +7,11 @@ export default defineConfig({
globals: true,
environment: 'jsdom',
// This setup file is where we can add global test configurations
setupFiles: [
'./src/tests/setup/tests-setup-unit.ts',
'./src/tests/setup/mockHooks.ts',
'./src/tests/setup/mockComponents.tsx'
],
setupFiles: ['./src/tests/setup/tests-setup-unit.ts'],
// , './src/tests/setup/mockHooks.ts'
// removed this from above: './src/tests/setup/mockComponents.tsx'
// This line is the key fix: it tells Vitest to include the type definitions
include: ['src/**/*.test.tsx'],
include: ['src/**/*.test.{ts,tsx}'],
},
});
});