Compare commits
33 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a1f52544d0 | ||
| 2334359756 | |||
| 406954ca06 | |||
|
|
95d441be98 | ||
| 186ed484b7 | |||
|
|
3669958e9d | ||
| 5f3daf0539 | |||
| ae7afaaf97 | |||
|
|
3ae7b9e0d4 | ||
| 921c48fc57 | |||
|
|
2571864b91 | ||
| 065d0c746a | |||
| 395f6c21a2 | |||
|
|
aec56dfc23 | ||
| a12a0e5207 | |||
| e337bd67b1 | |||
|
|
a8f5b4e51a | ||
| d0ce8021d6 | |||
| efbb162880 | |||
|
|
e353ce8a81 | ||
| b5cbf271b8 | |||
|
|
2041b4ac3c | ||
| e547363a65 | |||
| bddaf765fc | |||
|
|
3c0bebb65c | ||
| 265cc3ffd4 | |||
| 3d5767b60b | |||
|
|
e9cb45efe0 | ||
| 99a57f3a30 | |||
| e46f5eb7f6 | |||
|
|
034887069c | ||
| 84b5e0e15e | |||
| dc0f774699 |
18
.devcontainer/devcontainer.json
Normal file
18
.devcontainer/devcontainer.json
Normal file
@@ -0,0 +1,18 @@
|
||||
{
|
||||
"name": "Flyer Crawler Dev (Ubuntu 22.04)",
|
||||
"dockerComposeFile": ["../compose.dev.yml"],
|
||||
"service": "app",
|
||||
"workspaceFolder": "/app",
|
||||
"customizations": {
|
||||
"vscode": {
|
||||
"extensions": ["dbaeumer.vscode-eslint", "esbenp.prettier-vscode"]
|
||||
}
|
||||
},
|
||||
"remoteUser": "root",
|
||||
// Automatically install dependencies when the container is created.
|
||||
// This runs inside the container, populating the isolated node_modules volume.
|
||||
"postCreateCommand": "npm install",
|
||||
"postAttachCommand": "npm run dev:container",
|
||||
// Try to start podman machine, but exit with success (0) even if it's already running
|
||||
"initializeCommand": "powershell -Command \"podman machine start; exit 0\""
|
||||
}
|
||||
@@ -136,7 +136,8 @@ jobs:
|
||||
# Run unit and integration tests as separate steps.
|
||||
# The `|| true` ensures the workflow continues even if tests fail, allowing coverage to run.
|
||||
echo "--- Running Unit Tests ---"
|
||||
npm run test:unit -- --coverage --reporter=verbose --includeTaskLocation --testTimeout=10000 --silent=passed-only || true
|
||||
# npm run test:unit -- --coverage --reporter=verbose --includeTaskLocation --testTimeout=10000 --silent=passed-only || true
|
||||
npm run test:unit -- --coverage --reporter=verbose --includeTaskLocation --testTimeout=10000 --silent=passed-only --no-file-parallelism || true
|
||||
|
||||
echo "--- Running Integration Tests ---"
|
||||
npm run test:integration -- --coverage --reporter=verbose --includeTaskLocation --testTimeout=10000 --silent=passed-only || true
|
||||
|
||||
@@ -60,4 +60,4 @@ jobs:
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: database-backup
|
||||
path: ${{ env.backup_filename }}
|
||||
path: ${{ env.backup_filename }}
|
||||
|
||||
@@ -144,4 +144,4 @@ jobs:
|
||||
find "$APP_PATH/flyer-images" -type f -name '*-test-flyer-image.*' -delete
|
||||
find "$APP_PATH/flyer-images/icons" -type f -name '*-test-flyer-image.*' -delete
|
||||
find "$APP_PATH/flyer-images/archive" -mindepth 1 -maxdepth 1 -type f -delete || echo "Archive directory not found, skipping."
|
||||
echo "✅ Flyer asset directories cleared."
|
||||
echo "✅ Flyer asset directories cleared."
|
||||
|
||||
@@ -130,4 +130,4 @@ jobs:
|
||||
find "$APP_PATH/flyer-images" -mindepth 1 -type f -delete
|
||||
find "$APP_PATH/flyer-images/icons" -mindepth 1 -type f -delete
|
||||
find "$APP_PATH/flyer-images/archive" -mindepth 1 -type f -delete || echo "Archive directory not found, skipping."
|
||||
echo "✅ Test flyer asset directories cleared."
|
||||
echo "✅ Test flyer asset directories cleared."
|
||||
|
||||
@@ -25,7 +25,7 @@ jobs:
|
||||
DB_USER: ${{ secrets.DB_USER }}
|
||||
DB_PASSWORD: ${{ secrets.DB_PASSWORD }}
|
||||
DB_NAME: ${{ secrets.DB_DATABASE_PROD }}
|
||||
BACKUP_DIR: "/var/www/backups" # Define a dedicated directory for backups
|
||||
BACKUP_DIR: '/var/www/backups' # Define a dedicated directory for backups
|
||||
|
||||
steps:
|
||||
- name: Validate Secrets and Inputs
|
||||
@@ -92,4 +92,4 @@ jobs:
|
||||
echo "Restarting application server..."
|
||||
cd /var/www/flyer-crawler.projectium.com
|
||||
pm2 startOrReload ecosystem.config.cjs --env production && pm2 save
|
||||
echo "✅ Application server restarted."
|
||||
echo "✅ Application server restarted."
|
||||
|
||||
31
Dockerfile.dev
Normal file
31
Dockerfile.dev
Normal file
@@ -0,0 +1,31 @@
|
||||
# Use Ubuntu 22.04 (LTS) as the base image to match production
|
||||
FROM ubuntu:22.04
|
||||
|
||||
# Set environment variables to non-interactive to avoid prompts during installation
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
# Update package lists and install essential tools
|
||||
# - curl: for downloading Node.js setup script
|
||||
# - git: for version control operations
|
||||
# - build-essential: for compiling native Node.js modules (node-gyp)
|
||||
# - python3: required by some Node.js build tools
|
||||
RUN apt-get update && apt-get install -y \
|
||||
curl \
|
||||
git \
|
||||
build-essential \
|
||||
python3 \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Install Node.js 20.x (LTS) from NodeSource
|
||||
RUN curl -fsSL https://deb.nodesource.com/setup_20.x | bash - \
|
||||
&& apt-get install -y nodejs
|
||||
|
||||
# Set the working directory inside the container
|
||||
WORKDIR /app
|
||||
|
||||
# Set default environment variables for development
|
||||
ENV NODE_ENV=development
|
||||
ENV NODE_OPTIONS='--max-old-space-size=8192'
|
||||
|
||||
# Default command keeps the container running so you can attach to it
|
||||
CMD ["bash"]
|
||||
52
compose.dev.yml
Normal file
52
compose.dev.yml
Normal file
@@ -0,0 +1,52 @@
|
||||
version: '3.8'
|
||||
|
||||
services:
|
||||
app:
|
||||
container_name: flyer-crawler-dev
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile.dev
|
||||
volumes:
|
||||
# Mount the current directory to /app in the container
|
||||
- .:/app
|
||||
# Create a volume for node_modules to avoid conflicts with Windows host
|
||||
# and improve performance.
|
||||
- node_modules_data:/app/node_modules
|
||||
ports:
|
||||
- '3000:3000' # Frontend (Vite default)
|
||||
- '3001:3001' # Backend API
|
||||
environment:
|
||||
- NODE_ENV=development
|
||||
- DB_HOST=postgres
|
||||
- DB_USER=postgres
|
||||
- DB_PASSWORD=postgres
|
||||
- DB_NAME=flyer_crawler_dev
|
||||
- REDIS_URL=redis://redis:6379
|
||||
# Add other secrets here or use a .env file
|
||||
depends_on:
|
||||
- postgres
|
||||
- redis
|
||||
# Keep container running so VS Code can attach
|
||||
command: tail -f /dev/null
|
||||
|
||||
postgres:
|
||||
image: docker.io/library/postgis/postgis:15-3.4
|
||||
container_name: flyer-crawler-postgres
|
||||
ports:
|
||||
- '5432:5432'
|
||||
environment:
|
||||
POSTGRES_USER: postgres
|
||||
POSTGRES_PASSWORD: postgres
|
||||
POSTGRES_DB: flyer_crawler_dev
|
||||
volumes:
|
||||
- postgres_data:/var/lib/postgresql/data
|
||||
|
||||
redis:
|
||||
image: docker.io/library/redis:alpine
|
||||
container_name: flyer-crawler-redis
|
||||
ports:
|
||||
- '6379:6379'
|
||||
|
||||
volumes:
|
||||
postgres_data:
|
||||
node_modules_data:
|
||||
@@ -34,7 +34,7 @@ We will adopt a strict, consistent error-handling contract for the service and r
|
||||
**Robustness**: Eliminates an entire class of bugs where `undefined` is passed to `res.json()`, preventing incorrect `500` errors.
|
||||
**Consistency & Predictability**: All data-fetching methods now have a predictable contract. They either return the expected data or throw a specific, typed error.
|
||||
**Developer Experience**: Route handlers become simpler, cleaner, and easier to write correctly. The cognitive load on developers is reduced as they no longer need to remember to check for `undefined`.
|
||||
**Improved Testability**: Tests become more reliable and realistic. Mocks can now throw the *exact* error type (`new NotFoundError()`) that the real implementation would, ensuring tests accurately reflect the application's behavior.
|
||||
**Improved Testability**: Tests become more reliable and realistic. Mocks can now throw the _exact_ error type (`new NotFoundError()`) that the real implementation would, ensuring tests accurately reflect the application's behavior.
|
||||
**Centralized Control**: Error-to-HTTP-status logic is centralized in the `errorHandler` middleware, making it easy to manage and modify error responses globally.
|
||||
|
||||
### Negative
|
||||
|
||||
@@ -10,21 +10,19 @@ Following the standardization of error handling in ADR-001, the next most common
|
||||
|
||||
This manual approach has several drawbacks:
|
||||
**Repetitive Boilerplate**: The `try/catch/finally` block for transaction management is duplicated across multiple files.
|
||||
**Error-Prone**: It is easy to forget to `client.release()` in all code paths, which can lead to connection pool exhaustion and bring down the application.
|
||||
3. **Poor Composability**: It is difficult to compose multiple repository methods into a single, atomic "Unit of Work". For example, a service function that needs to update a user's points and create a budget in a single transaction cannot easily do so if both underlying repository methods create their own transactions.
|
||||
**Error-Prone**: It is easy to forget to `client.release()` in all code paths, which can lead to connection pool exhaustion and bring down the application. 3. **Poor Composability**: It is difficult to compose multiple repository methods into a single, atomic "Unit of Work". For example, a service function that needs to update a user's points and create a budget in a single transaction cannot easily do so if both underlying repository methods create their own transactions.
|
||||
|
||||
## Decision
|
||||
|
||||
We will implement a standardized "Unit of Work" pattern through a high-level `withTransaction` helper function. This function will abstract away the complexity of transaction management.
|
||||
|
||||
1. **`withTransaction` Helper**: A new helper function, `withTransaction<T>(callback: (client: PoolClient) => Promise<T>): Promise<T>`, will be created. This function will be responsible for:
|
||||
|
||||
* Acquiring a client from the database pool.
|
||||
* Starting a transaction (`BEGIN`).
|
||||
* Executing the `callback` function, passing the transactional client to it.
|
||||
* If the callback succeeds, it will `COMMIT` the transaction.
|
||||
* If the callback throws an error, it will `ROLLBACK` the transaction and re-throw the error.
|
||||
* In all cases, it will `RELEASE` the client back to the pool.
|
||||
- Acquiring a client from the database pool.
|
||||
- Starting a transaction (`BEGIN`).
|
||||
- Executing the `callback` function, passing the transactional client to it.
|
||||
- If the callback succeeds, it will `COMMIT` the transaction.
|
||||
- If the callback throws an error, it will `ROLLBACK` the transaction and re-throw the error.
|
||||
- In all cases, it will `RELEASE` the client back to the pool.
|
||||
|
||||
2. **Repository Method Signature**: Repository methods that need to be part of a transaction will be updated to optionally accept a `PoolClient` in their constructor or as a method parameter. By default, they will use the global pool. When called from within a `withTransaction` block, they will be passed the transactional client.
|
||||
3. **Service Layer Orchestration**: Service-layer functions that orchestrate multi-step operations will use `withTransaction` to ensure atomicity. They will instantiate or call repository methods, providing them with the transactional client from the callback.
|
||||
@@ -40,7 +38,7 @@ async function registerUserAndCreateDefaultList(userData) {
|
||||
const shoppingRepo = new ShoppingRepository(client);
|
||||
|
||||
const newUser = await userRepo.createUser(userData);
|
||||
await shoppingRepo.createShoppingList(newUser.user_id, "My First List");
|
||||
await shoppingRepo.createShoppingList(newUser.user_id, 'My First List');
|
||||
|
||||
return newUser;
|
||||
});
|
||||
|
||||
@@ -20,8 +20,8 @@ We will adopt a schema-based approach for input validation using the `zod` libra
|
||||
1. **Adopt `zod` for Schema Definition**: We will use `zod` to define clear, type-safe schemas for the `params`, `query`, and `body` of each API request. `zod` provides powerful and declarative validation rules and automatically infers TypeScript types.
|
||||
|
||||
2. **Create a Reusable Validation Middleware**: A generic `validateRequest(schema)` middleware will be created. This middleware will take a `zod` schema, parse the incoming request against it, and handle success and error cases.
|
||||
* On successful validation, the parsed and typed data will be attached to the `req` object (e.g., `req.body` will be replaced with the parsed body), and `next()` will be called.
|
||||
* On validation failure, the middleware will call `next()` with a custom `ValidationError` containing a structured list of issues, which `ADR-001`'s `errorHandler` can then format into a user-friendly `400 Bad Request` response.
|
||||
- On successful validation, the parsed and typed data will be attached to the `req` object (e.g., `req.body` will be replaced with the parsed body), and `next()` will be called.
|
||||
- On validation failure, the middleware will call `next()` with a custom `ValidationError` containing a structured list of issues, which `ADR-001`'s `errorHandler` can then format into a user-friendly `400 Bad Request` response.
|
||||
|
||||
3. **Refactor Routes**: All route handlers will be refactored to use this new middleware, removing all manual validation logic.
|
||||
|
||||
@@ -46,18 +46,18 @@ const getFlyerSchema = z.object({
|
||||
type GetFlyerRequest = z.infer<typeof getFlyerSchema>;
|
||||
|
||||
// 3. Apply the middleware and use an inline cast for the request
|
||||
router.get('/:id', validateRequest(getFlyerSchema), (async (req, res, next) => {
|
||||
// Cast 'req' to the inferred type.
|
||||
// This provides full type safety for params, query, and body.
|
||||
const { params } = req as unknown as GetFlyerRequest;
|
||||
router.get('/:id', validateRequest(getFlyerSchema), async (req, res, next) => {
|
||||
// Cast 'req' to the inferred type.
|
||||
// This provides full type safety for params, query, and body.
|
||||
const { params } = req as unknown as GetFlyerRequest;
|
||||
|
||||
try {
|
||||
const flyer = await db.flyerRepo.getFlyerById(params.id); // params.id is 'number'
|
||||
res.json(flyer);
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
}));
|
||||
try {
|
||||
const flyer = await db.flyerRepo.getFlyerById(params.id); // params.id is 'number'
|
||||
res.json(flyer);
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
```
|
||||
|
||||
## Consequences
|
||||
|
||||
@@ -20,9 +20,9 @@ We will adopt a standardized, application-wide structured logging policy. All lo
|
||||
|
||||
**Request-Scoped Logger with Context**: We will create a middleware that runs at the beginning of the request lifecycle. This middleware will:
|
||||
|
||||
* Generate a unique `request_id` for each incoming request.
|
||||
* Create a request-scoped logger instance (a "child logger") that automatically includes the `request_id`, `user_id` (if authenticated), and `ip_address` in every log message it generates.
|
||||
* Attach this child logger to the `req` object (e.g., `req.log`).
|
||||
- Generate a unique `request_id` for each incoming request.
|
||||
- Create a request-scoped logger instance (a "child logger") that automatically includes the `request_id`, `user_id` (if authenticated), and `ip_address` in every log message it generates.
|
||||
- Attach this child logger to the `req` object (e.g., `req.log`).
|
||||
|
||||
**Mandatory Use of Request-Scoped Logger**: All route handlers and any service functions called by them **MUST** use the request-scoped logger (`req.log`) instead of the global logger instance. This ensures all logs for a given request are automatically correlated.
|
||||
|
||||
@@ -32,9 +32,9 @@ We will adopt a standardized, application-wide structured logging policy. All lo
|
||||
|
||||
**Standardized Logging Practices**:
|
||||
**INFO**: Log key business events, such as `User logged in` or `Flyer processed`.
|
||||
**WARN**: Log recoverable errors or unusual situations that do not break the request, such as `Client Error: 404 on GET /api/non-existent-route` or `Retrying failed database connection`.
|
||||
**ERROR**: Log only unhandled or server-side errors that cause a request to fail (typically handled by the `errorHandler`). Avoid logging expected client errors (like 4xx) at this level.
|
||||
**DEBUG**: Log detailed diagnostic information useful during development, such as function entry/exit points or variable states.
|
||||
**WARN**: Log recoverable errors or unusual situations that do not break the request, such as `Client Error: 404 on GET /api/non-existent-route` or `Retrying failed database connection`.
|
||||
**ERROR**: Log only unhandled or server-side errors that cause a request to fail (typically handled by the `errorHandler`). Avoid logging expected client errors (like 4xx) at this level.
|
||||
**DEBUG**: Log detailed diagnostic information useful during development, such as function entry/exit points or variable states.
|
||||
|
||||
### Example Usage
|
||||
|
||||
@@ -59,15 +59,15 @@ export const requestLogger = (req, res, next) => {
|
||||
|
||||
// In a route handler:
|
||||
router.get('/:id', async (req, res, next) => {
|
||||
// Use the request-scoped logger
|
||||
req.log.info({ flyerId: req.params.id }, 'Fetching flyer by ID');
|
||||
try {
|
||||
// ... business logic ...
|
||||
res.json(flyer);
|
||||
} catch (error) {
|
||||
// The error itself will be logged with full context by the errorHandler
|
||||
next(error);
|
||||
}
|
||||
// Use the request-scoped logger
|
||||
req.log.info({ flyerId: req.params.id }, 'Fetching flyer by ID');
|
||||
try {
|
||||
// ... business logic ...
|
||||
res.json(flyer);
|
||||
} catch (error) {
|
||||
// The error itself will be logged with full context by the errorHandler
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
```
|
||||
|
||||
|
||||
@@ -14,5 +14,5 @@ We will formalize a centralized Role-Based Access Control (RBAC) or Attribute-Ba
|
||||
|
||||
## Consequences
|
||||
|
||||
* **Positive**: Ensures authorization logic is consistent, easy to audit, and decoupled from business logic. Improves security by centralizing access control.
|
||||
* **Negative**: Requires a significant refactoring effort to integrate the new authorization system across all protected routes and features. Introduces a new dependency if an external library is chosen.
|
||||
- **Positive**: Ensures authorization logic is consistent, easy to audit, and decoupled from business logic. Improves security by centralizing access control.
|
||||
- **Negative**: Requires a significant refactoring effort to integrate the new authorization system across all protected routes and features. Introduces a new dependency if an external library is chosen.
|
||||
|
||||
@@ -14,5 +14,5 @@ We will establish a formal Design System and Component Library. This will involv
|
||||
|
||||
## Consequences
|
||||
|
||||
* **Positive**: Ensures a consistent and high-quality user interface. Accelerates frontend development by providing reusable, well-documented components. Improves maintainability and reduces technical debt.
|
||||
* **Negative**: Requires an initial investment in setting up Storybook and migrating existing components. Adds a new dependency and a new workflow for frontend development.
|
||||
- **Positive**: Ensures a consistent and high-quality user interface. Accelerates frontend development by providing reusable, well-documented components. Improves maintainability and reduces technical debt.
|
||||
- **Negative**: Requires an initial investment in setting up Storybook and migrating existing components. Adds a new dependency and a new workflow for frontend development.
|
||||
|
||||
@@ -14,5 +14,5 @@ We will adopt a dedicated database migration tool, such as **`node-pg-migrate`**
|
||||
|
||||
## Consequences
|
||||
|
||||
* **Positive**: Provides a safe, repeatable, and reversible way to evolve the database schema. Improves team collaboration on database changes. Reduces the risk of data loss or downtime during deployments.
|
||||
* **Negative**: Requires an initial setup and learning curve for the chosen migration tool. All future schema changes must adhere to the migration workflow.
|
||||
- **Positive**: Provides a safe, repeatable, and reversible way to evolve the database schema. Improves team collaboration on database changes. Reduces the risk of data loss or downtime during deployments.
|
||||
- **Negative**: Requires an initial setup and learning curve for the chosen migration tool. All future schema changes must adhere to the migration workflow.
|
||||
|
||||
@@ -14,5 +14,5 @@ We will standardize the deployment process by containerizing the application usi
|
||||
|
||||
## Consequences
|
||||
|
||||
* **Positive**: Ensures consistency between development and production environments. Simplifies the setup for new developers. Improves portability and scalability of the application.
|
||||
* **Negative**: Requires learning Docker and containerization concepts. Adds `Dockerfile` and `docker-compose.yml` to the project's configuration.
|
||||
- **Positive**: Ensures consistency between development and production environments. Simplifies the setup for new developers. Improves portability and scalability of the application.
|
||||
- **Negative**: Requires learning Docker and containerization concepts. Adds `Dockerfile` and `docker-compose.yml` to the project's configuration.
|
||||
|
||||
@@ -18,5 +18,5 @@ We will implement a multi-layered security approach for the API:
|
||||
|
||||
## Consequences
|
||||
|
||||
* **Positive**: Significantly improves the application's security posture against common web vulnerabilities like XSS, clickjacking, and brute-force attacks.
|
||||
* **Negative**: Requires careful configuration of CORS and rate limits to avoid blocking legitimate traffic. Content-Security-Policy can be complex to configure correctly.
|
||||
- **Positive**: Significantly improves the application's security posture against common web vulnerabilities like XSS, clickjacking, and brute-force attacks.
|
||||
- **Negative**: Requires careful configuration of CORS and rate limits to avoid blocking legitimate traffic. Content-Security-Policy can be complex to configure correctly.
|
||||
|
||||
@@ -14,5 +14,5 @@ We will formalize the end-to-end CI/CD process. This ADR will define the project
|
||||
|
||||
## Consequences
|
||||
|
||||
* **Positive**: Automates quality control and creates a safe, repeatable path to production. Increases development velocity and reduces deployment-related errors.
|
||||
* **Negative**: Initial setup effort for the CI/CD pipeline. May slightly increase the time to merge code due to mandatory checks.
|
||||
- **Positive**: Automates quality control and creates a safe, repeatable path to production. Increases development velocity and reduces deployment-related errors.
|
||||
- **Negative**: Initial setup effort for the CI/CD pipeline. May slightly increase the time to merge code due to mandatory checks.
|
||||
|
||||
@@ -14,5 +14,5 @@ We will adopt **OpenAPI (Swagger)** for API documentation. We will use tools (e.
|
||||
|
||||
## Consequences
|
||||
|
||||
* **Positive**: Creates a single source of truth for API documentation that stays in sync with the code. Enables auto-generation of client SDKs and simplifies testing.
|
||||
* **Negative**: Requires developers to maintain JSDoc annotations on all routes. Adds a build step and new dependencies to the project.
|
||||
- **Positive**: Creates a single source of truth for API documentation that stays in sync with the code. Enables auto-generation of client SDKs and simplifies testing.
|
||||
- **Negative**: Requires developers to maintain JSDoc annotations on all routes. Adds a build step and new dependencies to the project.
|
||||
|
||||
@@ -14,5 +14,5 @@ We will implement a formal data backup and recovery strategy. This will involve
|
||||
|
||||
## Consequences
|
||||
|
||||
* **Positive**: Protects against catastrophic data loss, ensuring business continuity. Provides a clear, tested plan for disaster recovery.
|
||||
* **Negative**: Requires setup and maintenance of backup scripts and secure storage. Incurs storage costs for backup files.
|
||||
- **Positive**: Protects against catastrophic data loss, ensuring business continuity. Provides a clear, tested plan for disaster recovery.
|
||||
- **Negative**: Requires setup and maintenance of backup scripts and secure storage. Incurs storage costs for backup files.
|
||||
|
||||
@@ -12,11 +12,11 @@ When the application is containerized (`ADR-014`), the container orchestrator (e
|
||||
|
||||
We will implement dedicated health check endpoints in the Express application.
|
||||
|
||||
* A **Liveness Probe** (`/api/health/live`) will return a `200 OK` to indicate the server is running. If it fails, the orchestrator should restart the container.
|
||||
- A **Liveness Probe** (`/api/health/live`) will return a `200 OK` to indicate the server is running. If it fails, the orchestrator should restart the container.
|
||||
|
||||
* A **Readiness Probe** (`/api/health/ready`) will return a `200 OK` only if the application is ready to accept traffic (e.g., database connection is established). If it fails, the orchestrator will temporarily remove the container from the load balancer.
|
||||
- A **Readiness Probe** (`/api/health/ready`) will return a `200 OK` only if the application is ready to accept traffic (e.g., database connection is established). If it fails, the orchestrator will temporarily remove the container from the load balancer.
|
||||
|
||||
## Consequences
|
||||
|
||||
* **Positive**: Enables robust, automated application lifecycle management in a containerized environment. Prevents traffic from being sent to unhealthy or uninitialized application instances.
|
||||
* **Negative**: Adds a small amount of code for the health check endpoints. Requires configuration in the container orchestration layer.
|
||||
- **Positive**: Enables robust, automated application lifecycle management in a containerized environment. Prevents traffic from being sent to unhealthy or uninitialized application instances.
|
||||
- **Negative**: Adds a small amount of code for the health check endpoints. Requires configuration in the container orchestration layer.
|
||||
|
||||
@@ -24,8 +24,8 @@ We will adopt a standardized, application-wide structured logging policy for all
|
||||
|
||||
**2. Pino-like API for Structured Logging**: The client logger mimics the `pino` API, which is the standard on the backend. It supports two primary call signatures:
|
||||
|
||||
* `logger.info('A simple message');`
|
||||
* `logger.info({ key: 'value' }, 'A message with a structured data payload');`
|
||||
- `logger.info('A simple message');`
|
||||
- `logger.info({ key: 'value' }, 'A message with a structured data payload');`
|
||||
|
||||
The second signature, which includes a data object as the first argument, is **strongly preferred**, especially for logging errors or complex state.
|
||||
|
||||
@@ -79,7 +79,7 @@ describe('MyComponent', () => {
|
||||
// Assert that the logger was called with the expected structure
|
||||
expect(logger.error).toHaveBeenCalledWith(
|
||||
expect.objectContaining({ err: expect.any(Error) }), // Check for the error object
|
||||
'Failed to fetch component data' // Check for the message
|
||||
'Failed to fetch component data', // Check for the message
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
4281
package-lock.json
generated
4281
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
12
package.json
12
package.json
@@ -1,17 +1,19 @@
|
||||
{
|
||||
"name": "flyer-crawler",
|
||||
"private": true,
|
||||
"version": "0.0.8",
|
||||
"version": "0.0.20",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "concurrently \"npm:start:dev\" \"vite\"",
|
||||
"dev:container": "concurrently \"npm:start:dev\" \"vite --host\"",
|
||||
"start": "npm run start:prod",
|
||||
"build": "vite build",
|
||||
"preview": "vite preview",
|
||||
"test": "NODE_ENV=test tsx ./node_modules/vitest/vitest.mjs run",
|
||||
"test": "cross-env NODE_ENV=test tsx ./node_modules/vitest/vitest.mjs run",
|
||||
"test-wsl": "cross-env NODE_ENV=test vitest run",
|
||||
"test:coverage": "npm run clean && npm run test:unit -- --coverage && npm run test:integration -- --coverage",
|
||||
"test:unit": "NODE_ENV=test tsx ./node_modules/vitest/vitest.mjs run --project unit -c vite.config.ts",
|
||||
"test:integration": "NODE_ENV=test tsx ./node_modules/vitest/vitest.mjs run --project integration -c vitest.config.integration.ts",
|
||||
"test:unit": "NODE_ENV=test tsx --max-old-space-size=8192 ./node_modules/vitest/vitest.mjs run --project unit -c vite.config.ts",
|
||||
"test:integration": "NODE_ENV=test tsx --max-old-space-size=8192 ./node_modules/vitest/vitest.mjs run --project integration -c vitest.config.integration.ts",
|
||||
"format": "prettier --write .",
|
||||
"lint": "eslint . --ext ts,tsx --report-unused-disable-directives --max-warnings 0",
|
||||
"type-check": "tsc --noEmit",
|
||||
@@ -20,6 +22,7 @@
|
||||
"start:dev": "NODE_ENV=development tsx watch server.ts",
|
||||
"start:prod": "NODE_ENV=production tsx server.ts",
|
||||
"start:test": "NODE_ENV=test NODE_V8_COVERAGE=.coverage/tmp/integration-server tsx server.ts",
|
||||
"db:reset:dev": "NODE_ENV=development tsx src/db/seed.ts",
|
||||
"db:reset:test": "NODE_ENV=test tsx src/db/seed.ts",
|
||||
"worker:prod": "NODE_ENV=production tsx src/services/queueService.server.ts"
|
||||
},
|
||||
@@ -95,6 +98,7 @@
|
||||
"autoprefixer": "^10.4.22",
|
||||
"c8": "^10.1.3",
|
||||
"concurrently": "^9.2.1",
|
||||
"cross-env": "^10.1.0",
|
||||
"eslint": "9.39.1",
|
||||
"eslint-config-prettier": "^9.1.0",
|
||||
"eslint-plugin-react": "7.37.5",
|
||||
|
||||
@@ -1030,11 +1030,61 @@ DROP FUNCTION IF EXISTS public.fork_recipe(UUID, BIGINT);
|
||||
|
||||
CREATE OR REPLACE FUNCTION public.fork_recipe(p_user_id UUID, p_original_recipe_id BIGINT)
|
||||
RETURNS SETOF public.recipes
|
||||
LANGUAGE sql
|
||||
LANGUAGE plpgsql
|
||||
SECURITY INVOKER
|
||||
AS $$
|
||||
-- The entire forking logic is now encapsulated in a single, atomic database function.
|
||||
SELECT * FROM public.fork_recipe(p_user_id, p_original_recipe_id);
|
||||
DECLARE
|
||||
new_recipe_id BIGINT;
|
||||
BEGIN
|
||||
-- 1. Create a copy of the recipe, linking it to the new user and the original recipe.
|
||||
INSERT INTO public.recipes (
|
||||
user_id,
|
||||
original_recipe_id,
|
||||
name,
|
||||
description,
|
||||
instructions,
|
||||
prep_time_minutes,
|
||||
cook_time_minutes,
|
||||
servings,
|
||||
photo_url,
|
||||
calories_per_serving,
|
||||
protein_grams,
|
||||
fat_grams,
|
||||
carb_grams,
|
||||
status -- Forked recipes should be private by default
|
||||
)
|
||||
SELECT
|
||||
p_user_id,
|
||||
p_original_recipe_id,
|
||||
original.name || ' (Fork)', -- Append '(Fork)' to distinguish it
|
||||
original.description,
|
||||
original.instructions,
|
||||
original.prep_time_minutes,
|
||||
original.cook_time_minutes,
|
||||
original.servings,
|
||||
original.photo_url,
|
||||
original.calories_per_serving,
|
||||
original.protein_grams,
|
||||
original.fat_grams,
|
||||
original.carb_grams,
|
||||
'private'
|
||||
FROM public.recipes AS original
|
||||
WHERE original.recipe_id = p_original_recipe_id
|
||||
RETURNING recipe_id INTO new_recipe_id;
|
||||
|
||||
-- If the original recipe didn't exist, new_recipe_id will be null.
|
||||
IF new_recipe_id IS NULL THEN
|
||||
RETURN;
|
||||
END IF;
|
||||
|
||||
-- 2. Copy all ingredients, tags, and appliances from the original recipe to the new one.
|
||||
INSERT INTO public.recipe_ingredients (recipe_id, master_item_id, quantity, unit) SELECT new_recipe_id, master_item_id, quantity, unit FROM public.recipe_ingredients WHERE recipe_id = p_original_recipe_id;
|
||||
INSERT INTO public.recipe_tags (recipe_id, tag_id) SELECT new_recipe_id, tag_id FROM public.recipe_tags WHERE recipe_id = p_original_recipe_id;
|
||||
INSERT INTO public.recipe_appliances (recipe_id, appliance_id) SELECT new_recipe_id, appliance_id FROM public.recipe_appliances WHERE recipe_id = p_original_recipe_id;
|
||||
|
||||
-- 3. Return the newly created recipe record.
|
||||
RETURN QUERY SELECT * FROM public.recipes WHERE recipe_id = new_recipe_id;
|
||||
END;
|
||||
$$;
|
||||
|
||||
|
||||
@@ -1566,4 +1616,3 @@ BEGIN
|
||||
bp.price_rank = 1;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
|
||||
@@ -8,7 +8,23 @@
|
||||
-- It is idempotent, meaning it can be run multiple times without causing errors.
|
||||
|
||||
-- 1. Pre-populate the master grocery items dictionary.
|
||||
-- This block links generic items to their respective categories.
|
||||
-- This MUST run after populating categories.
|
||||
-- Renumbered to 2.
|
||||
|
||||
-- 2. Pre-populate the categories table from a predefined list.
|
||||
-- Renumbered to 1. This MUST run before populating master_grocery_items.
|
||||
DO $$
|
||||
BEGIN
|
||||
INSERT INTO public.categories (name) VALUES
|
||||
('Fruits & Vegetables'), ('Meat & Seafood'), ('Dairy & Eggs'), ('Bakery & Bread'),
|
||||
('Pantry & Dry Goods'), ('Beverages'), ('Frozen Foods'), ('Snacks'), ('Household & Cleaning'),
|
||||
('Personal Care & Health'), ('Baby & Child'), ('Pet Supplies'), ('Deli & Prepared Foods'),
|
||||
('Canned Goods'), ('Condiments & Spices'), ('Breakfast & Cereal'), ('Organic'),
|
||||
('International Foods'), ('Other/Miscellaneous')
|
||||
ON CONFLICT (name) DO NOTHING;
|
||||
END $$;
|
||||
|
||||
-- 2. Pre-populate the master grocery items dictionary.
|
||||
DO $$
|
||||
DECLARE
|
||||
fv_cat_id BIGINT; ms_cat_id BIGINT; de_cat_id BIGINT; bb_cat_id BIGINT; pdg_cat_id BIGINT;
|
||||
@@ -53,18 +69,6 @@ BEGIN
|
||||
ON CONFLICT (name) DO NOTHING;
|
||||
END $$;
|
||||
|
||||
-- 2. Pre-populate the categories table from a predefined list.
|
||||
DO $$
|
||||
BEGIN
|
||||
INSERT INTO public.categories (name) VALUES
|
||||
('Fruits & Vegetables'), ('Meat & Seafood'), ('Dairy & Eggs'), ('Bakery & Bread'),
|
||||
('Pantry & Dry Goods'), ('Beverages'), ('Frozen Foods'), ('Snacks'), ('Household & Cleaning'),
|
||||
('Personal Care & Health'), ('Baby & Child'), ('Pet Supplies'), ('Deli & Prepared Foods'),
|
||||
('Canned Goods'), ('Condiments & Spices'), ('Breakfast & Cereal'), ('Organic'),
|
||||
('International Foods'), ('Other/Miscellaneous')
|
||||
ON CONFLICT (name) DO NOTHING;
|
||||
END $$;
|
||||
|
||||
-- 3. Pre-populate the brands and products tables.
|
||||
-- This block adds common brands and links them to specific products.
|
||||
DO $$
|
||||
|
||||
@@ -92,6 +92,7 @@ CREATE TABLE IF NOT EXISTS public.stores (
|
||||
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||
created_by UUID REFERENCES public.users(user_id) ON DELETE SET NULL
|
||||
);
|
||||
COMMENT ON TABLE public.stores IS 'Stores metadata for grocery store chains (e.g., Safeway, Kroger).';
|
||||
|
||||
-- 5. The 'categories' table for normalized category data.
|
||||
@@ -109,8 +110,8 @@ CREATE TABLE IF NOT EXISTS public.flyers (
|
||||
file_name TEXT NOT NULL,
|
||||
image_url TEXT NOT NULL,
|
||||
icon_url TEXT,
|
||||
checksum TEXT UNIQUE,
|
||||
store_id BIGINT REFERENCES public.stores(store_id),
|
||||
checksum TEXT UNIQUE,
|
||||
store_id BIGINT REFERENCES public.stores(store_id) ON DELETE CASCADE,
|
||||
valid_from DATE,
|
||||
valid_to DATE,
|
||||
store_address TEXT,
|
||||
@@ -138,7 +139,7 @@ CREATE INDEX IF NOT EXISTS idx_flyers_valid_to_file_name ON public.flyers (valid
|
||||
CREATE TABLE IF NOT EXISTS public.master_grocery_items (
|
||||
master_grocery_item_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
||||
name TEXT NOT NULL UNIQUE,
|
||||
category_id BIGINT REFERENCES public.categories(category_id),
|
||||
category_id BIGINT REFERENCES public.categories(category_id) ON DELETE SET NULL,
|
||||
is_allergen BOOLEAN DEFAULT false,
|
||||
allergy_info JSONB,
|
||||
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||
@@ -161,6 +162,38 @@ CREATE TABLE IF NOT EXISTS public.user_watched_items (
|
||||
COMMENT ON TABLE public.user_watched_items IS 'A linking table that represents a user''s personal watchlist of grocery items.';
|
||||
CREATE INDEX IF NOT EXISTS idx_user_watched_items_master_item_id ON public.user_watched_items(master_item_id);
|
||||
|
||||
-- 23. Store brand information. (Moved up due to dependency in flyer_items)
|
||||
CREATE TABLE IF NOT EXISTS public.brands (
|
||||
brand_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
||||
name TEXT NOT NULL UNIQUE,
|
||||
logo_url TEXT,
|
||||
store_id BIGINT REFERENCES public.stores(store_id) ON DELETE SET NULL,
|
||||
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL
|
||||
);
|
||||
COMMENT ON TABLE public.brands IS 'Stores brand names like "Coca-Cola", "Maple Leaf", or "Kraft".';
|
||||
COMMENT ON COLUMN public.brands.store_id IS 'If this is a store-specific brand (e.g., President''s Choice), this links to the parent store.';
|
||||
|
||||
-- 24. For specific products, linking a master item with a brand and size. (Moved up due to dependency in flyer_items)
|
||||
CREATE TABLE IF NOT EXISTS public.products (
|
||||
product_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
||||
master_item_id BIGINT NOT NULL REFERENCES public.master_grocery_items(master_grocery_item_id) ON DELETE CASCADE,
|
||||
brand_id BIGINT REFERENCES public.brands(brand_id) ON DELETE SET NULL,
|
||||
name TEXT NOT NULL,
|
||||
description TEXT,
|
||||
size TEXT,
|
||||
upc_code TEXT UNIQUE,
|
||||
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL
|
||||
);
|
||||
COMMENT ON TABLE public.products IS 'Represents a specific, sellable product, combining a generic item with a brand and size.';
|
||||
COMMENT ON COLUMN public.products.upc_code IS 'Universal Product Code, if available, for exact product matching.';
|
||||
COMMENT ON COLUMN public.products.brand_id IS 'Can be null for generic/store-brand items.';
|
||||
COMMENT ON COLUMN public.products.name IS 'Prime Raised without Antibiotics Chicken Breast.';
|
||||
COMMENT ON COLUMN public.products.size IS 'e.g., "4L", "500g".';
|
||||
CREATE INDEX IF NOT EXISTS idx_products_master_item_id ON public.products(master_item_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_products_brand_id ON public.products(brand_id);
|
||||
|
||||
-- 9. The 'flyer_items' table. This stores individual items from flyers.
|
||||
CREATE TABLE IF NOT EXISTS public.flyer_items (
|
||||
flyer_item_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
||||
@@ -170,13 +203,13 @@ CREATE TABLE IF NOT EXISTS public.flyer_items (
|
||||
price_in_cents INTEGER,
|
||||
quantity_num NUMERIC,
|
||||
quantity TEXT NOT NULL,
|
||||
category_id BIGINT REFERENCES public.categories(category_id),
|
||||
category_id BIGINT REFERENCES public.categories(category_id) ON DELETE SET NULL,
|
||||
category_name TEXT,
|
||||
unit_price JSONB,
|
||||
view_count INTEGER DEFAULT 0 NOT NULL,
|
||||
click_count INTEGER DEFAULT 0 NOT NULL,
|
||||
master_item_id BIGINT REFERENCES public.master_grocery_items(master_grocery_item_id),
|
||||
product_id BIGINT,
|
||||
master_item_id BIGINT REFERENCES public.master_grocery_items(master_grocery_item_id) ON DELETE SET NULL,
|
||||
product_id BIGINT REFERENCES public.products(product_id) ON DELETE SET NULL,
|
||||
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL
|
||||
);
|
||||
@@ -293,7 +326,7 @@ CREATE INDEX IF NOT EXISTS idx_shopping_lists_user_id ON public.shopping_lists(u
|
||||
CREATE TABLE IF NOT EXISTS public.shopping_list_items (
|
||||
shopping_list_item_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
||||
shopping_list_id BIGINT NOT NULL REFERENCES public.shopping_lists(shopping_list_id) ON DELETE CASCADE,
|
||||
master_item_id BIGINT REFERENCES public.master_grocery_items(master_grocery_item_id),
|
||||
master_item_id BIGINT REFERENCES public.master_grocery_items(master_grocery_item_id) ON DELETE CASCADE,
|
||||
custom_item_name TEXT,
|
||||
quantity NUMERIC DEFAULT 1 NOT NULL,
|
||||
is_purchased BOOLEAN DEFAULT false NOT NULL,
|
||||
@@ -358,7 +391,7 @@ CREATE INDEX IF NOT EXISTS idx_shared_menu_plans_shared_with_user_id ON public.s
|
||||
CREATE TABLE IF NOT EXISTS public.suggested_corrections (
|
||||
suggested_correction_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
||||
flyer_item_id BIGINT NOT NULL REFERENCES public.flyer_items(flyer_item_id) ON DELETE CASCADE,
|
||||
user_id UUID NOT NULL REFERENCES public.users(user_id),
|
||||
user_id UUID NOT NULL REFERENCES public.users(user_id) ON DELETE CASCADE,
|
||||
correction_type TEXT NOT NULL,
|
||||
suggested_value TEXT NOT NULL,
|
||||
status TEXT DEFAULT 'pending' NOT NULL,
|
||||
@@ -378,9 +411,9 @@ CREATE INDEX IF NOT EXISTS idx_suggested_corrections_pending ON public.suggested
|
||||
-- 21. For prices submitted directly by users from in-store.
|
||||
CREATE TABLE IF NOT EXISTS public.user_submitted_prices (
|
||||
user_submitted_price_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
||||
user_id UUID NOT NULL REFERENCES public.users(user_id),
|
||||
master_item_id BIGINT NOT NULL REFERENCES public.master_grocery_items(master_grocery_item_id),
|
||||
store_id BIGINT NOT NULL REFERENCES public.stores(store_id),
|
||||
user_id UUID NOT NULL REFERENCES public.users(user_id) ON DELETE CASCADE,
|
||||
master_item_id BIGINT NOT NULL REFERENCES public.master_grocery_items(master_grocery_item_id) ON DELETE CASCADE,
|
||||
store_id BIGINT NOT NULL REFERENCES public.stores(store_id) ON DELETE CASCADE,
|
||||
price_in_cents INTEGER NOT NULL,
|
||||
photo_url TEXT,
|
||||
upvotes INTEGER DEFAULT 0 NOT NULL,
|
||||
@@ -408,38 +441,6 @@ COMMENT ON TABLE public.unmatched_flyer_items IS 'A queue for reviewing flyer it
|
||||
CREATE INDEX IF NOT EXISTS idx_unmatched_flyer_items_flyer_item_id ON public.unmatched_flyer_items(flyer_item_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_unmatched_flyer_items_pending ON public.unmatched_flyer_items (created_at) WHERE status = 'pending';
|
||||
|
||||
-- 23. Store brand information.
|
||||
CREATE TABLE IF NOT EXISTS public.brands (
|
||||
brand_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
||||
name TEXT NOT NULL UNIQUE,
|
||||
logo_url TEXT,
|
||||
store_id BIGINT REFERENCES public.stores(store_id) ON DELETE SET NULL,
|
||||
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL
|
||||
);
|
||||
COMMENT ON TABLE public.brands IS 'Stores brand names like "Coca-Cola", "Maple Leaf", or "Kraft".';
|
||||
COMMENT ON COLUMN public.brands.store_id IS 'If this is a store-specific brand (e.g., President''s Choice), this links to the parent store.';
|
||||
|
||||
-- 24. For specific products, linking a master item with a brand and size.
|
||||
CREATE TABLE IF NOT EXISTS public.products (
|
||||
product_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
||||
master_item_id BIGINT NOT NULL REFERENCES public.master_grocery_items(master_grocery_item_id),
|
||||
brand_id BIGINT REFERENCES public.brands(brand_id),
|
||||
name TEXT NOT NULL,
|
||||
description TEXT,
|
||||
size TEXT,
|
||||
upc_code TEXT UNIQUE,
|
||||
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL
|
||||
);
|
||||
COMMENT ON TABLE public.products IS 'Represents a specific, sellable product, combining a generic item with a brand and size.';
|
||||
COMMENT ON COLUMN public.products.upc_code IS 'Universal Product Code, if available, for exact product matching.';
|
||||
COMMENT ON COLUMN public.products.brand_id IS 'Can be null for generic/store-brand items.';
|
||||
COMMENT ON COLUMN public.products.name IS 'Prime Raised without Antibiotics Chicken Breast.';
|
||||
COMMENT ON COLUMN public.products.size IS 'e.g., "4L", "500g".';
|
||||
CREATE INDEX IF NOT EXISTS idx_products_master_item_id ON public.products(master_item_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_products_brand_id ON public.products(brand_id);
|
||||
|
||||
-- 25. Linking table for when one flyer is valid for multiple locations.
|
||||
CREATE TABLE IF NOT EXISTS public.flyer_locations (
|
||||
flyer_id BIGINT NOT NULL REFERENCES public.flyers(flyer_id) ON DELETE CASCADE,
|
||||
@@ -495,7 +496,7 @@ CREATE UNIQUE INDEX IF NOT EXISTS idx_recipes_unique_system_recipe_name ON publi
|
||||
CREATE TABLE IF NOT EXISTS public.recipe_ingredients (
|
||||
recipe_ingredient_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
||||
recipe_id BIGINT NOT NULL REFERENCES public.recipes(recipe_id) ON DELETE CASCADE,
|
||||
master_item_id BIGINT NOT NULL REFERENCES public.master_grocery_items(master_grocery_item_id),
|
||||
master_item_id BIGINT NOT NULL REFERENCES public.master_grocery_items(master_grocery_item_id) ON DELETE CASCADE,
|
||||
quantity NUMERIC NOT NULL,
|
||||
unit TEXT NOT NULL,
|
||||
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||
@@ -779,7 +780,7 @@ CREATE INDEX IF NOT EXISTS idx_shopping_trips_shopping_list_id ON public.shoppin
|
||||
CREATE TABLE IF NOT EXISTS public.shopping_trip_items (
|
||||
shopping_trip_item_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
||||
shopping_trip_id BIGINT NOT NULL REFERENCES public.shopping_trips(shopping_trip_id) ON DELETE CASCADE,
|
||||
master_item_id BIGINT REFERENCES public.master_grocery_items(master_grocery_item_id),
|
||||
master_item_id BIGINT REFERENCES public.master_grocery_items(master_grocery_item_id) ON DELETE SET NULL,
|
||||
custom_item_name TEXT,
|
||||
quantity NUMERIC NOT NULL,
|
||||
price_paid_cents INTEGER,
|
||||
@@ -843,7 +844,7 @@ CREATE INDEX IF NOT EXISTS idx_user_follows_following_id ON public.user_follows(
|
||||
CREATE TABLE IF NOT EXISTS public.receipts (
|
||||
receipt_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
||||
user_id UUID NOT NULL REFERENCES public.users(user_id) ON DELETE CASCADE,
|
||||
store_id BIGINT REFERENCES public.stores(store_id),
|
||||
store_id BIGINT REFERENCES public.stores(store_id) ON DELETE CASCADE,
|
||||
receipt_image_url TEXT NOT NULL,
|
||||
transaction_date TIMESTAMPTZ,
|
||||
total_amount_cents INTEGER,
|
||||
@@ -864,8 +865,8 @@ CREATE TABLE IF NOT EXISTS public.receipt_items (
|
||||
raw_item_description TEXT NOT NULL,
|
||||
quantity NUMERIC DEFAULT 1 NOT NULL,
|
||||
price_paid_cents INTEGER NOT NULL,
|
||||
master_item_id BIGINT REFERENCES public.master_grocery_items(master_grocery_item_id),
|
||||
product_id BIGINT REFERENCES public.products(product_id),
|
||||
master_item_id BIGINT REFERENCES public.master_grocery_items(master_grocery_item_id) ON DELETE SET NULL,
|
||||
product_id BIGINT REFERENCES public.products(product_id) ON DELETE SET NULL,
|
||||
status TEXT DEFAULT 'unmatched' NOT NULL CHECK (status IN ('unmatched', 'matched', 'needs_review', 'ignored')),
|
||||
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL
|
||||
|
||||
@@ -126,8 +126,8 @@ CREATE TABLE IF NOT EXISTS public.flyers (
|
||||
file_name TEXT NOT NULL,
|
||||
image_url TEXT NOT NULL,
|
||||
icon_url TEXT,
|
||||
checksum TEXT UNIQUE,
|
||||
store_id BIGINT REFERENCES public.stores(store_id),
|
||||
checksum TEXT UNIQUE,
|
||||
store_id BIGINT REFERENCES public.stores(store_id) ON DELETE CASCADE,
|
||||
valid_from DATE,
|
||||
valid_to DATE,
|
||||
store_address TEXT,
|
||||
@@ -155,7 +155,7 @@ CREATE INDEX IF NOT EXISTS idx_flyers_valid_to_file_name ON public.flyers (valid
|
||||
CREATE TABLE IF NOT EXISTS public.master_grocery_items (
|
||||
master_grocery_item_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
||||
name TEXT NOT NULL UNIQUE,
|
||||
category_id BIGINT REFERENCES public.categories(category_id),
|
||||
category_id BIGINT REFERENCES public.categories(category_id) ON DELETE SET NULL,
|
||||
is_allergen BOOLEAN DEFAULT false,
|
||||
allergy_info JSONB,
|
||||
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||
@@ -178,6 +178,38 @@ CREATE TABLE IF NOT EXISTS public.user_watched_items (
|
||||
COMMENT ON TABLE public.user_watched_items IS 'A linking table that represents a user''s personal watchlist of grocery items.';
|
||||
CREATE INDEX IF NOT EXISTS idx_user_watched_items_master_item_id ON public.user_watched_items(master_item_id);
|
||||
|
||||
-- 23. Store brand information. (Moved up due to dependency in flyer_items)
|
||||
CREATE TABLE IF NOT EXISTS public.brands (
|
||||
brand_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
||||
name TEXT NOT NULL UNIQUE,
|
||||
logo_url TEXT,
|
||||
store_id BIGINT REFERENCES public.stores(store_id) ON DELETE SET NULL,
|
||||
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL
|
||||
);
|
||||
COMMENT ON TABLE public.brands IS 'Stores brand names like "Coca-Cola", "Maple Leaf", or "Kraft".';
|
||||
COMMENT ON COLUMN public.brands.store_id IS 'If this is a store-specific brand (e.g., President''s Choice), this links to the parent store.';
|
||||
|
||||
-- 24. For specific products, linking a master item with a brand and size. (Moved up due to dependency in flyer_items)
|
||||
CREATE TABLE IF NOT EXISTS public.products (
|
||||
product_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
||||
master_item_id BIGINT NOT NULL REFERENCES public.master_grocery_items(master_grocery_item_id) ON DELETE CASCADE,
|
||||
brand_id BIGINT REFERENCES public.brands(brand_id) ON DELETE SET NULL,
|
||||
name TEXT NOT NULL,
|
||||
description TEXT,
|
||||
size TEXT,
|
||||
upc_code TEXT UNIQUE,
|
||||
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL
|
||||
);
|
||||
COMMENT ON TABLE public.products IS 'Represents a specific, sellable product, combining a generic item with a brand and size.';
|
||||
COMMENT ON COLUMN public.products.upc_code IS 'Universal Product Code, if available, for exact product matching.';
|
||||
COMMENT ON COLUMN public.products.brand_id IS 'Can be null for generic/store-brand items.';
|
||||
COMMENT ON COLUMN public.products.name IS 'Prime Raised without Antibiotics Chicken Breast.';
|
||||
COMMENT ON COLUMN public.products.size IS 'e.g., "4L", "500g".';
|
||||
CREATE INDEX IF NOT EXISTS idx_products_master_item_id ON public.products(master_item_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_products_brand_id ON public.products(brand_id);
|
||||
|
||||
-- 9. The 'flyer_items' table. This stores individual items from flyers.
|
||||
CREATE TABLE IF NOT EXISTS public.flyer_items (
|
||||
flyer_item_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
||||
@@ -187,13 +219,13 @@ CREATE TABLE IF NOT EXISTS public.flyer_items (
|
||||
price_in_cents INTEGER,
|
||||
quantity_num NUMERIC,
|
||||
quantity TEXT NOT NULL,
|
||||
category_id BIGINT REFERENCES public.categories(category_id),
|
||||
category_id BIGINT REFERENCES public.categories(category_id) ON DELETE SET NULL,
|
||||
category_name TEXT,
|
||||
unit_price JSONB,
|
||||
view_count INTEGER DEFAULT 0 NOT NULL,
|
||||
click_count INTEGER DEFAULT 0 NOT NULL,
|
||||
master_item_id BIGINT REFERENCES public.master_grocery_items(master_grocery_item_id),
|
||||
product_id BIGINT,
|
||||
master_item_id BIGINT REFERENCES public.master_grocery_items(master_grocery_item_id) ON DELETE SET NULL,
|
||||
product_id BIGINT REFERENCES public.products(product_id) ON DELETE SET NULL,
|
||||
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL
|
||||
);
|
||||
@@ -310,7 +342,7 @@ CREATE INDEX IF NOT EXISTS idx_shopping_lists_user_id ON public.shopping_lists(u
|
||||
CREATE TABLE IF NOT EXISTS public.shopping_list_items (
|
||||
shopping_list_item_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
||||
shopping_list_id BIGINT NOT NULL REFERENCES public.shopping_lists(shopping_list_id) ON DELETE CASCADE,
|
||||
master_item_id BIGINT REFERENCES public.master_grocery_items(master_grocery_item_id),
|
||||
master_item_id BIGINT REFERENCES public.master_grocery_items(master_grocery_item_id) ON DELETE CASCADE,
|
||||
custom_item_name TEXT,
|
||||
quantity NUMERIC DEFAULT 1 NOT NULL,
|
||||
is_purchased BOOLEAN DEFAULT false NOT NULL,
|
||||
@@ -375,7 +407,7 @@ CREATE INDEX IF NOT EXISTS idx_shared_menu_plans_shared_with_user_id ON public.s
|
||||
CREATE TABLE IF NOT EXISTS public.suggested_corrections (
|
||||
suggested_correction_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
||||
flyer_item_id BIGINT NOT NULL REFERENCES public.flyer_items(flyer_item_id) ON DELETE CASCADE,
|
||||
user_id UUID NOT NULL REFERENCES public.users(user_id),
|
||||
user_id UUID NOT NULL REFERENCES public.users(user_id) ON DELETE CASCADE,
|
||||
correction_type TEXT NOT NULL,
|
||||
suggested_value TEXT NOT NULL,
|
||||
status TEXT DEFAULT 'pending' NOT NULL,
|
||||
@@ -395,9 +427,9 @@ CREATE INDEX IF NOT EXISTS idx_suggested_corrections_pending ON public.suggested
|
||||
-- 21. For prices submitted directly by users from in-store.
|
||||
CREATE TABLE IF NOT EXISTS public.user_submitted_prices (
|
||||
user_submitted_price_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
||||
user_id UUID NOT NULL REFERENCES public.users(user_id),
|
||||
master_item_id BIGINT NOT NULL REFERENCES public.master_grocery_items(master_grocery_item_id),
|
||||
store_id BIGINT NOT NULL REFERENCES public.stores(store_id),
|
||||
user_id UUID NOT NULL REFERENCES public.users(user_id) ON DELETE CASCADE,
|
||||
master_item_id BIGINT NOT NULL REFERENCES public.master_grocery_items(master_grocery_item_id) ON DELETE CASCADE,
|
||||
store_id BIGINT NOT NULL REFERENCES public.stores(store_id) ON DELETE CASCADE,
|
||||
price_in_cents INTEGER NOT NULL,
|
||||
photo_url TEXT,
|
||||
upvotes INTEGER DEFAULT 0 NOT NULL,
|
||||
@@ -424,38 +456,6 @@ COMMENT ON TABLE public.unmatched_flyer_items IS 'A queue for reviewing flyer it
|
||||
CREATE INDEX IF NOT EXISTS idx_unmatched_flyer_items_flyer_item_id ON public.unmatched_flyer_items(flyer_item_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_unmatched_flyer_items_pending ON public.unmatched_flyer_items (created_at) WHERE status = 'pending';
|
||||
|
||||
-- 23. Store brand information.
|
||||
CREATE TABLE IF NOT EXISTS public.brands (
|
||||
brand_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
||||
name TEXT NOT NULL UNIQUE,
|
||||
logo_url TEXT,
|
||||
store_id BIGINT REFERENCES public.stores(store_id) ON DELETE SET NULL,
|
||||
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL
|
||||
);
|
||||
COMMENT ON TABLE public.brands IS 'Stores brand names like "Coca-Cola", "Maple Leaf", or "Kraft".';
|
||||
COMMENT ON COLUMN public.brands.store_id IS 'If this is a store-specific brand (e.g., President''s Choice), this links to the parent store.';
|
||||
|
||||
-- 24. For specific products, linking a master item with a brand and size.
|
||||
CREATE TABLE IF NOT EXISTS public.products (
|
||||
product_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
||||
master_item_id BIGINT NOT NULL REFERENCES public.master_grocery_items(master_grocery_item_id),
|
||||
brand_id BIGINT REFERENCES public.brands(brand_id),
|
||||
name TEXT NOT NULL,
|
||||
description TEXT,
|
||||
size TEXT,
|
||||
upc_code TEXT UNIQUE,
|
||||
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL
|
||||
);
|
||||
COMMENT ON TABLE public.products IS 'Represents a specific, sellable product, combining a generic item with a brand and size.';
|
||||
COMMENT ON COLUMN public.products.upc_code IS 'Universal Product Code, if available, for exact product matching.';
|
||||
COMMENT ON COLUMN public.products.brand_id IS 'Can be null for generic/store-brand items.';
|
||||
COMMENT ON COLUMN public.products.name IS 'Prime Raised without Antibiotics Chicken Breast.';
|
||||
COMMENT ON COLUMN public.products.size IS 'e.g., "4L", "500g".';
|
||||
CREATE INDEX IF NOT EXISTS idx_products_master_item_id ON public.products(master_item_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_products_brand_id ON public.products(brand_id);
|
||||
|
||||
-- 25. Linking table for when one flyer is valid for multiple locations.
|
||||
CREATE TABLE IF NOT EXISTS public.flyer_locations (
|
||||
flyer_id BIGINT NOT NULL REFERENCES public.flyers(flyer_id) ON DELETE CASCADE,
|
||||
@@ -510,7 +510,7 @@ CREATE UNIQUE INDEX IF NOT EXISTS idx_recipes_unique_system_recipe_name ON publi
|
||||
CREATE TABLE IF NOT EXISTS public.recipe_ingredients (
|
||||
recipe_ingredient_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
||||
recipe_id BIGINT NOT NULL REFERENCES public.recipes(recipe_id) ON DELETE CASCADE,
|
||||
master_item_id BIGINT NOT NULL REFERENCES public.master_grocery_items(master_grocery_item_id),
|
||||
master_item_id BIGINT NOT NULL REFERENCES public.master_grocery_items(master_grocery_item_id) ON DELETE CASCADE,
|
||||
quantity NUMERIC NOT NULL,
|
||||
unit TEXT NOT NULL,
|
||||
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||
@@ -796,7 +796,7 @@ CREATE INDEX IF NOT EXISTS idx_shopping_trips_shopping_list_id ON public.shoppin
|
||||
CREATE TABLE IF NOT EXISTS public.shopping_trip_items (
|
||||
shopping_trip_item_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
||||
shopping_trip_id BIGINT NOT NULL REFERENCES public.shopping_trips(shopping_trip_id) ON DELETE CASCADE,
|
||||
master_item_id BIGINT REFERENCES public.master_grocery_items(master_grocery_item_id),
|
||||
master_item_id BIGINT REFERENCES public.master_grocery_items(master_grocery_item_id) ON DELETE SET NULL,
|
||||
custom_item_name TEXT,
|
||||
quantity NUMERIC NOT NULL,
|
||||
price_paid_cents INTEGER,
|
||||
@@ -862,7 +862,7 @@ CREATE INDEX IF NOT EXISTS idx_user_follows_following_id ON public.user_follows(
|
||||
CREATE TABLE IF NOT EXISTS public.receipts (
|
||||
receipt_id BIGINT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
||||
user_id UUID NOT NULL REFERENCES public.users(user_id) ON DELETE CASCADE,
|
||||
store_id BIGINT REFERENCES public.stores(store_id),
|
||||
store_id BIGINT REFERENCES public.stores(store_id) ON DELETE CASCADE,
|
||||
receipt_image_url TEXT NOT NULL,
|
||||
transaction_date TIMESTAMPTZ,
|
||||
total_amount_cents INTEGER,
|
||||
@@ -883,8 +883,8 @@ CREATE TABLE IF NOT EXISTS public.receipt_items (
|
||||
raw_item_description TEXT NOT NULL,
|
||||
quantity NUMERIC DEFAULT 1 NOT NULL,
|
||||
price_paid_cents INTEGER NOT NULL,
|
||||
master_item_id BIGINT REFERENCES public.master_grocery_items(master_grocery_item_id),
|
||||
product_id BIGINT REFERENCES public.products(product_id),
|
||||
master_item_id BIGINT REFERENCES public.master_grocery_items(master_grocery_item_id) ON DELETE SET NULL,
|
||||
product_id BIGINT REFERENCES public.products(product_id) ON DELETE SET NULL,
|
||||
status TEXT DEFAULT 'unmatched' NOT NULL CHECK (status IN ('unmatched', 'matched', 'needs_review', 'ignored')),
|
||||
created_at TIMESTAMPTZ DEFAULT now() NOT NULL,
|
||||
updated_at TIMESTAMPTZ DEFAULT now() NOT NULL
|
||||
@@ -2128,11 +2128,61 @@ DROP FUNCTION IF EXISTS public.fork_recipe(UUID, BIGINT);
|
||||
|
||||
CREATE OR REPLACE FUNCTION public.fork_recipe(p_user_id UUID, p_original_recipe_id BIGINT)
|
||||
RETURNS SETOF public.recipes
|
||||
LANGUAGE sql
|
||||
LANGUAGE plpgsql
|
||||
SECURITY INVOKER
|
||||
AS $$
|
||||
-- The entire forking logic is now encapsulated in a single, atomic database function.
|
||||
SELECT * FROM public.fork_recipe(p_user_id, p_original_recipe_id);
|
||||
DECLARE
|
||||
new_recipe_id BIGINT;
|
||||
BEGIN
|
||||
-- 1. Create a copy of the recipe, linking it to the new user and the original recipe.
|
||||
INSERT INTO public.recipes (
|
||||
user_id,
|
||||
original_recipe_id,
|
||||
name,
|
||||
description,
|
||||
instructions,
|
||||
prep_time_minutes,
|
||||
cook_time_minutes,
|
||||
servings,
|
||||
photo_url,
|
||||
calories_per_serving,
|
||||
protein_grams,
|
||||
fat_grams,
|
||||
carb_grams,
|
||||
status -- Forked recipes should be private by default
|
||||
)
|
||||
SELECT
|
||||
p_user_id,
|
||||
p_original_recipe_id,
|
||||
original.name || ' (Fork)', -- Append '(Fork)' to distinguish it
|
||||
original.description,
|
||||
original.instructions,
|
||||
original.prep_time_minutes,
|
||||
original.cook_time_minutes,
|
||||
original.servings,
|
||||
original.photo_url,
|
||||
original.calories_per_serving,
|
||||
original.protein_grams,
|
||||
original.fat_grams,
|
||||
original.carb_grams,
|
||||
'private'
|
||||
FROM public.recipes AS original
|
||||
WHERE original.recipe_id = p_original_recipe_id
|
||||
RETURNING recipe_id INTO new_recipe_id;
|
||||
|
||||
-- If the original recipe didn't exist, new_recipe_id will be null.
|
||||
IF new_recipe_id IS NULL THEN
|
||||
RETURN;
|
||||
END IF;
|
||||
|
||||
-- 2. Copy all ingredients, tags, and appliances from the original recipe to the new one.
|
||||
INSERT INTO public.recipe_ingredients (recipe_id, master_item_id, quantity, unit) SELECT new_recipe_id, master_item_id, quantity, unit FROM public.recipe_ingredients WHERE recipe_id = p_original_recipe_id;
|
||||
INSERT INTO public.recipe_tags (recipe_id, tag_id) SELECT new_recipe_id, tag_id FROM public.recipe_tags WHERE recipe_id = p_original_recipe_id;
|
||||
INSERT INTO public.recipe_appliances (recipe_id, appliance_id) SELECT new_recipe_id, appliance_id FROM public.recipe_appliances WHERE recipe_id = p_original_recipe_id;
|
||||
|
||||
-- 3. Return the newly created recipe record.
|
||||
RETURN QUERY SELECT * FROM public.recipes WHERE recipe_id = new_recipe_id;
|
||||
END;
|
||||
$$;
|
||||
|
||||
|
||||
|
||||
202
src/db/seed.ts
202
src/db/seed.ts
@@ -6,10 +6,11 @@
|
||||
* DO NOT run this on a production database.
|
||||
*/
|
||||
|
||||
import { Pool } from 'pg';
|
||||
import { Pool, PoolClient } from 'pg';
|
||||
import fs from 'node:fs/promises';
|
||||
import path from 'node:path';
|
||||
import bcrypt from 'bcrypt';
|
||||
import { logger } from '../services/logger.server';
|
||||
import { CATEGORIES } from '../types';
|
||||
|
||||
const pool = new Pool({
|
||||
user: process.env.DB_USER,
|
||||
@@ -20,81 +21,55 @@ const pool = new Pool({
|
||||
});
|
||||
|
||||
async function main() {
|
||||
// Declare client outside the try block so it's accessible in the finally block.
|
||||
let client;
|
||||
let client: PoolClient | undefined;
|
||||
|
||||
try {
|
||||
client = await pool.connect();
|
||||
logger.info('Connected to the database for seeding.');
|
||||
await client.query('BEGIN');
|
||||
|
||||
// 1. Clean the database
|
||||
logger.info('--- Wiping existing data... ---');
|
||||
// Using TRUNCATE ... RESTART IDENTITY CASCADE is a powerful way to clean all tables
|
||||
// and reset auto-incrementing keys, while respecting foreign key relationships.
|
||||
const tablesRes = await client.query(`
|
||||
SELECT tablename
|
||||
FROM pg_tables
|
||||
WHERE schemaname = 'public'
|
||||
-- Exclude PostGIS system tables from truncation to avoid permission errors.
|
||||
AND tablename NOT IN ('spatial_ref_sys', 'geometry_columns')
|
||||
`);
|
||||
const tables = tablesRes.rows.map((row) => `"${row.tablename}"`).join(', ');
|
||||
if (tables) {
|
||||
await client.query(`TRUNCATE ${tables} RESTART IDENTITY CASCADE`);
|
||||
logger.info('All tables in public schema have been truncated.');
|
||||
}
|
||||
// 1. Clean the database by dropping and recreating the schema
|
||||
logger.info('--- Wiping and rebuilding schema... ---');
|
||||
const dropScriptPath = path.resolve(process.cwd(), 'sql/drop_tables.sql');
|
||||
const dropSql = await fs.readFile(dropScriptPath, 'utf-8');
|
||||
await client.query(dropSql);
|
||||
logger.info('All tables dropped successfully.');
|
||||
|
||||
// 2. Seed Categories
|
||||
logger.info('--- Seeding Categories... ---');
|
||||
const categoryQuery = `INSERT INTO public.categories (name) VALUES ${CATEGORIES.map((_, i) => `($${i + 1})`).join(', ')} RETURNING category_id, name`;
|
||||
const seededCategories = (
|
||||
await client.query<{ category_id: number; name: string }>(categoryQuery, CATEGORIES)
|
||||
).rows;
|
||||
const categoryMap = new Map(seededCategories.map((c) => [c.name, c.category_id]));
|
||||
logger.info(`Seeded ${seededCategories.length} categories.`);
|
||||
const schemaScriptPath = path.resolve(process.cwd(), 'sql/master_schema_rollup.sql');
|
||||
const schemaSql = await fs.readFile(schemaScriptPath, 'utf-8');
|
||||
await client.query(schemaSql);
|
||||
logger.info(
|
||||
'Schema rebuilt and static data seeded successfully from master_schema_rollup.sql.',
|
||||
);
|
||||
|
||||
// 3. Seed Stores
|
||||
// 2. Seed Additional Stores (if any beyond what's in the rollup)
|
||||
logger.info('--- Seeding Stores... ---');
|
||||
const stores = ['Safeway', 'No Frills', 'Costco', 'Superstore'];
|
||||
const storeQuery = `INSERT INTO public.stores (name) VALUES ${stores.map((_, i) => `($${i + 1})`).join(', ')} RETURNING store_id, name`;
|
||||
const seededStores = (
|
||||
await client.query<{ store_id: number; name: string }>(storeQuery, stores)
|
||||
const storeQuery = `INSERT INTO public.stores (name) VALUES ${stores.map((_, i) => `($${i + 1})`).join(', ')} ON CONFLICT (name) DO NOTHING RETURNING store_id, name`;
|
||||
await client.query<{ store_id: number; name: string }>(storeQuery, stores);
|
||||
const allStores = (
|
||||
await client.query<{ store_id: number; name: string }>(
|
||||
'SELECT store_id, name FROM public.stores',
|
||||
)
|
||||
).rows;
|
||||
const storeMap = new Map(seededStores.map((s) => [s.name, s.store_id]));
|
||||
logger.info(`Seeded ${seededStores.length} stores.`);
|
||||
|
||||
// 4. Seed Master Grocery Items
|
||||
logger.info('--- Seeding Master Grocery Items... ---');
|
||||
const masterItems = [
|
||||
{ name: 'Chicken Breast, Boneless Skinless', category: 'Meat & Seafood' },
|
||||
{ name: 'Ground Beef, Lean', category: 'Meat & Seafood' },
|
||||
{ name: 'Avocado', category: 'Fruits & Vegetables' },
|
||||
{ name: 'Bananas', category: 'Fruits & Vegetables' },
|
||||
{ name: 'Broccoli', category: 'Fruits & Vegetables' },
|
||||
{ name: 'Cheddar Cheese, Block', category: 'Dairy & Eggs' },
|
||||
{ name: 'Milk, 2%', category: 'Dairy & Eggs' },
|
||||
{ name: 'Eggs, Large', category: 'Dairy & Eggs' },
|
||||
{ name: 'Whole Wheat Bread', category: 'Bakery & Bread' },
|
||||
{ name: 'Pasta, Spaghetti', category: 'Pantry & Dry Goods' },
|
||||
{ name: 'Canned Tomatoes, Diced', category: 'Canned Goods' },
|
||||
{ name: 'Coca-Cola, 12-pack', category: 'Beverages' },
|
||||
{ name: 'Frozen Pizza', category: 'Frozen Foods' },
|
||||
{ name: 'Paper Towels', category: 'Household & Cleaning' },
|
||||
];
|
||||
const masterItemValues = masterItems
|
||||
.map((item) => `('${item.name.replace(/'/g, "''")}', ${categoryMap.get(item.category)})`)
|
||||
.join(', ');
|
||||
const masterItemQuery = `INSERT INTO public.master_grocery_items (name, category_id) VALUES ${masterItemValues} RETURNING master_grocery_item_id, name`;
|
||||
const seededMasterItems = (
|
||||
await client.query<{ master_grocery_item_id: number; name: string }>(masterItemQuery)
|
||||
).rows;
|
||||
const masterItemMap = new Map(
|
||||
seededMasterItems.map((item) => [item.name, item.master_grocery_item_id]),
|
||||
const storeMap = new Map(
|
||||
allStores.map((s: { name: string; store_id: number }) => [s.name, s.store_id]),
|
||||
);
|
||||
logger.info(`Seeded ${seededMasterItems.length} master grocery items.`);
|
||||
logger.info(`Seeded/verified ${allStores.length} total stores.`);
|
||||
|
||||
// 5. Seed Users & Profiles
|
||||
// Fetch maps for items seeded by the master rollup script
|
||||
const masterItemMap = new Map(
|
||||
(
|
||||
await client.query<{ master_grocery_item_id: number; name: string }>(
|
||||
'SELECT master_grocery_item_id, name FROM public.master_grocery_items',
|
||||
)
|
||||
).rows.map((item: { name: string; master_grocery_item_id: number }) => [
|
||||
item.name,
|
||||
item.master_grocery_item_id,
|
||||
]),
|
||||
);
|
||||
|
||||
// 3. Seed Users & Profiles
|
||||
logger.info('--- Seeding Users & Profiles... ---');
|
||||
const saltRounds = 10;
|
||||
const adminPassHash = await bcrypt.hash('adminpass', saltRounds);
|
||||
@@ -126,7 +101,7 @@ async function main() {
|
||||
const userId = userRes.rows[0].user_id;
|
||||
logger.info('Seeded regular user (user@example.com / userpass)');
|
||||
|
||||
// 6. Seed a Flyer
|
||||
// 4. Seed a Flyer
|
||||
logger.info('--- Seeding a Sample Flyer... ---');
|
||||
const today = new Date();
|
||||
const validFrom = new Date(today);
|
||||
@@ -146,29 +121,29 @@ async function main() {
|
||||
const flyerId = flyerRes.rows[0].flyer_id;
|
||||
logger.info(`Seeded flyer for Safeway (ID: ${flyerId}).`);
|
||||
|
||||
// 7. Seed Flyer Items
|
||||
// 5. Seed Flyer Items
|
||||
logger.info('--- Seeding Flyer Items... ---');
|
||||
const flyerItems = [
|
||||
{
|
||||
name: 'Chicken Breast, Boneless Skinless',
|
||||
name: 'chicken breast',
|
||||
price_display: '$3.99 /lb',
|
||||
price_in_cents: 399,
|
||||
quantity: 'per lb',
|
||||
master_item_id: masterItemMap.get('Chicken Breast, Boneless Skinless'),
|
||||
master_item_id: masterItemMap.get('chicken breast'),
|
||||
},
|
||||
{
|
||||
name: 'Avocado',
|
||||
name: 'avocados',
|
||||
price_display: '2 for $5.00',
|
||||
price_in_cents: 250,
|
||||
quantity: 'each',
|
||||
master_item_id: masterItemMap.get('Avocado'),
|
||||
master_item_id: masterItemMap.get('avocados'),
|
||||
},
|
||||
{
|
||||
name: 'Coca-Cola 12-pack',
|
||||
name: 'soda',
|
||||
price_display: '$6.99',
|
||||
price_in_cents: 699,
|
||||
quantity: '12x355ml',
|
||||
master_item_id: masterItemMap.get('Coca-Cola, 12-pack'),
|
||||
master_item_id: masterItemMap.get('soda'),
|
||||
},
|
||||
{
|
||||
name: 'Unmatched Sample Item',
|
||||
@@ -194,12 +169,12 @@ async function main() {
|
||||
}
|
||||
logger.info(`Seeded ${flyerItems.length} items for the Safeway flyer.`);
|
||||
|
||||
// 8. Seed Watched Items for the user
|
||||
// 6. Seed Watched Items for the user
|
||||
logger.info('--- Seeding Watched Items... ---');
|
||||
const watchedItemIds = [
|
||||
masterItemMap.get('Chicken Breast, Boneless Skinless'),
|
||||
masterItemMap.get('Avocado'),
|
||||
masterItemMap.get('Ground Beef, Lean'),
|
||||
masterItemMap.get('chicken breast'),
|
||||
masterItemMap.get('avocados'),
|
||||
masterItemMap.get('ground beef'),
|
||||
];
|
||||
for (const itemId of watchedItemIds) {
|
||||
if (itemId) {
|
||||
@@ -211,7 +186,7 @@ async function main() {
|
||||
}
|
||||
logger.info(`Seeded ${watchedItemIds.length} watched items for Test User.`);
|
||||
|
||||
// 9. Seed a Shopping List
|
||||
// 7. Seed a Shopping List
|
||||
logger.info('--- Seeding a Shopping List... ---');
|
||||
const listRes = await client.query<{ shopping_list_id: number }>(
|
||||
'INSERT INTO public.shopping_lists (user_id, name) VALUES ($1, $2) RETURNING shopping_list_id',
|
||||
@@ -220,8 +195,8 @@ async function main() {
|
||||
const listId = listRes.rows[0].shopping_list_id;
|
||||
|
||||
const shoppingListItems = [
|
||||
{ master_item_id: masterItemMap.get('Milk, 2%'), quantity: 1 },
|
||||
{ master_item_id: masterItemMap.get('Eggs, Large'), quantity: 1 },
|
||||
{ master_item_id: masterItemMap.get('milk'), quantity: 1 },
|
||||
{ master_item_id: masterItemMap.get('eggs'), quantity: 1 },
|
||||
{ custom_item_name: 'Specialty Hot Sauce', quantity: 1 },
|
||||
];
|
||||
|
||||
@@ -235,75 +210,6 @@ async function main() {
|
||||
`Seeded shopping list "Weekly Groceries" with ${shoppingListItems.length} items for Test User.`,
|
||||
);
|
||||
|
||||
// 10. Seed Brands
|
||||
logger.info('--- Seeding Brands... ---');
|
||||
const brands = [
|
||||
'Coca-Cola',
|
||||
'Kraft',
|
||||
'Maple Leaf',
|
||||
"Dempster's",
|
||||
'No Name',
|
||||
"President's Choice",
|
||||
];
|
||||
const brandQuery = `INSERT INTO public.brands (name) VALUES ${brands.map((_, i) => `($${i + 1})`).join(', ')} ON CONFLICT (name) DO NOTHING`;
|
||||
await client.query(brandQuery, brands);
|
||||
logger.info(`Seeded ${brands.length} brands.`);
|
||||
|
||||
// Link store-specific brands
|
||||
const loblawsId = storeMap.get('Loblaws');
|
||||
if (loblawsId) {
|
||||
await client.query('UPDATE public.brands SET store_id = $1 WHERE name = $2 OR name = $3', [
|
||||
loblawsId,
|
||||
'No Name',
|
||||
"President's Choice",
|
||||
]);
|
||||
logger.info('Linked store brands to Loblaws.');
|
||||
}
|
||||
|
||||
// 11. Seed Recipes
|
||||
logger.info('--- Seeding Recipes... ---');
|
||||
const recipes = [
|
||||
{
|
||||
name: 'Simple Chicken and Rice',
|
||||
description: 'A quick and healthy weeknight meal.',
|
||||
instructions: '1. Cook rice. 2. Cook chicken. 3. Combine.',
|
||||
prep: 10,
|
||||
cook: 20,
|
||||
servings: 4,
|
||||
},
|
||||
{
|
||||
name: 'Classic Spaghetti Bolognese',
|
||||
description: 'A rich and hearty meat sauce.',
|
||||
instructions: '1. Brown beef. 2. Add sauce. 3. Simmer.',
|
||||
prep: 15,
|
||||
cook: 45,
|
||||
servings: 6,
|
||||
},
|
||||
{
|
||||
name: 'Vegetable Stir-fry',
|
||||
description: 'A fast and flavorful vegetarian meal.',
|
||||
instructions: '1. Chop veggies. 2. Stir-fry. 3. Add sauce.',
|
||||
prep: 10,
|
||||
cook: 10,
|
||||
servings: 3,
|
||||
},
|
||||
];
|
||||
for (const recipe of recipes) {
|
||||
await client.query(
|
||||
`INSERT INTO public.recipes (name, description, instructions, prep_time_minutes, cook_time_minutes, servings, status)
|
||||
VALUES ($1, $2, $3, $4, $5, $6, 'public') ON CONFLICT (name) WHERE user_id IS NULL DO NOTHING`,
|
||||
[
|
||||
recipe.name,
|
||||
recipe.description,
|
||||
recipe.instructions,
|
||||
recipe.prep,
|
||||
recipe.cook,
|
||||
recipe.servings,
|
||||
],
|
||||
);
|
||||
}
|
||||
logger.info(`Seeded ${recipes.length} recipes.`);
|
||||
|
||||
// --- SEED SCRIPT DEBUG LOGGING ---
|
||||
// Corrected the query to be unambiguous by specifying the table alias for each column.
|
||||
// `id` and `email` come from the `users` table (u), and `role` comes from the `profiles` table (p).
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
// src/components/PriceHistoryChart.tsx
|
||||
// src/features/charts/PriceHistoryChart.tsx
|
||||
import React, { useState, useEffect, useMemo } from 'react';
|
||||
import {
|
||||
LineChart,
|
||||
@@ -142,7 +142,7 @@ export const PriceHistoryChart: React.FC = () => {
|
||||
const renderContent = () => {
|
||||
if (isLoading || isLoadingUserData) {
|
||||
return (
|
||||
<div role="status" className="flex justify-center items-center h-full min-h-[200px]">
|
||||
<div role="status" className="flex justify-center items-center h-full min-h-50]">
|
||||
<LoadingSpinner /> <span className="ml-2">Loading Price History...</span>
|
||||
</div>
|
||||
);
|
||||
@@ -198,7 +198,12 @@ export const PriceHistoryChart: React.FC = () => {
|
||||
borderRadius: '0.5rem',
|
||||
}}
|
||||
labelStyle={{ color: '#F9FAFB' }}
|
||||
formatter={(value: number) => `$${(value / 100).toFixed(2)}`}
|
||||
formatter={(value: number | undefined) => {
|
||||
if (typeof value === 'number') {
|
||||
return [`$${(value / 100).toFixed(2)}`];
|
||||
}
|
||||
return [null];
|
||||
}}
|
||||
/>
|
||||
<Legend wrapperStyle={{ fontSize: '12px' }} />
|
||||
{availableItems.map((item, index) => (
|
||||
|
||||
@@ -2,8 +2,8 @@
|
||||
import React, { ReactNode } from 'react';
|
||||
import { renderHook, waitFor } from '@testing-library/react';
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
import { useUserData } from '../hooks/useUserData';
|
||||
import { useAuth } from '../hooks/useAuth';
|
||||
import { useUserData } from './useUserData';
|
||||
import { useAuth } from './useAuth';
|
||||
import { UserDataProvider } from '../providers/UserDataProvider';
|
||||
import { useApiOnMount } from './useApiOnMount';
|
||||
import type { UserProfile } from '../types';
|
||||
|
||||
@@ -86,12 +86,15 @@ describe('AI Routes (/api/ai)', () => {
|
||||
// Arrange
|
||||
const mkdirError = new Error('EACCES: permission denied');
|
||||
vi.resetModules(); // Reset modules to re-run top-level code
|
||||
vi.doMock('node:fs', () => ({
|
||||
...fs,
|
||||
mkdirSync: vi.fn().mockImplementation(() => {
|
||||
throw mkdirError;
|
||||
}),
|
||||
}));
|
||||
vi.doMock('node:fs', () => {
|
||||
const mockFs = {
|
||||
...fs,
|
||||
mkdirSync: vi.fn().mockImplementation(() => {
|
||||
throw mkdirError;
|
||||
}),
|
||||
};
|
||||
return { ...mockFs, default: mockFs };
|
||||
});
|
||||
const { logger } = await import('../services/logger.server');
|
||||
|
||||
// Act: Dynamically import the router to trigger the mkdirSync call
|
||||
@@ -617,6 +620,14 @@ describe('AI Routes (/api/ai)', () => {
|
||||
expect(response.body.text).toContain('server-generated quick insight');
|
||||
});
|
||||
|
||||
it('POST /quick-insights should accept items with "item" property instead of "name"', async () => {
|
||||
const response = await supertest(app)
|
||||
.post('/api/ai/quick-insights')
|
||||
.send({ items: [{ item: 'test item' }] });
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
});
|
||||
|
||||
it('POST /quick-insights should return 500 on a generic error', async () => {
|
||||
// To hit the catch block, we can simulate an error by making the logger throw.
|
||||
vi.mocked(mockLogger.info).mockImplementationOnce(() => {
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
import { Router, Request, Response, NextFunction } from 'express';
|
||||
import multer from 'multer';
|
||||
import path from 'path';
|
||||
import fs from 'fs';
|
||||
import fs from 'node:fs';
|
||||
import { z } from 'zod';
|
||||
import passport from './passport.routes';
|
||||
import { optionalAuth } from './passport.routes';
|
||||
@@ -88,10 +88,17 @@ const rescanAreaSchema = z.object({
|
||||
|
||||
const flyerItemForAnalysisSchema = z
|
||||
.object({
|
||||
name: requiredString('Item name is required.'),
|
||||
// Allow other properties to pass through without validation
|
||||
item: z.string().nullish(),
|
||||
name: z.string().nullish(),
|
||||
})
|
||||
.passthrough();
|
||||
.passthrough()
|
||||
.refine(
|
||||
(data) =>
|
||||
(data.item && data.item.trim().length > 0) || (data.name && data.name.trim().length > 0),
|
||||
{
|
||||
message: "Item identifier is required (either 'item' or 'name').",
|
||||
},
|
||||
);
|
||||
|
||||
const insightsSchema = z.object({
|
||||
body: z.object({
|
||||
|
||||
@@ -297,7 +297,6 @@ describe('Auth Routes (/api/auth)', () => {
|
||||
// The API now returns a nested UserProfile object
|
||||
expect(response.body.userprofile).toEqual(
|
||||
expect.objectContaining({
|
||||
user_id: 'user-123',
|
||||
user: expect.objectContaining({
|
||||
user_id: 'user-123',
|
||||
email: loginCredentials.email,
|
||||
@@ -618,7 +617,9 @@ describe('Auth Routes (/api/auth)', () => {
|
||||
const setCookieHeader = response.headers['set-cookie'];
|
||||
expect(setCookieHeader).toBeDefined();
|
||||
expect(setCookieHeader[0]).toContain('refreshToken=;');
|
||||
expect(setCookieHeader[0]).toContain('Expires=Thu, 01 Jan 1970');
|
||||
// Check for Max-Age=0, which is the modern way to expire a cookie.
|
||||
// The 'Expires' attribute is a fallback and its exact value can be inconsistent.
|
||||
expect(setCookieHeader[0]).toContain('Max-Age=0');
|
||||
});
|
||||
|
||||
it('should still return 200 OK even if deleting the refresh token from DB fails', async () => {
|
||||
|
||||
@@ -381,7 +381,7 @@ router.post('/logout', async (req: Request, res: Response) => {
|
||||
// Instruct the browser to clear the cookie by setting its expiration to the past.
|
||||
res.cookie('refreshToken', '', {
|
||||
httpOnly: true,
|
||||
expires: new Date(0),
|
||||
maxAge: 0, // Use maxAge for modern compatibility; Express sets 'Expires' as a fallback.
|
||||
secure: process.env.NODE_ENV === 'production',
|
||||
});
|
||||
res.status(200).json({ message: 'Logged out successfully.' });
|
||||
|
||||
@@ -1,205 +1,253 @@
|
||||
// src/routes/system.routes.test.ts
|
||||
import { describe, it, expect, vi, beforeEach, afterAll, beforeAll } from 'vitest';
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
import supertest from 'supertest';
|
||||
import express, { Express } from 'express';
|
||||
import { type ExecException, type ExecOptions } from 'child_process';
|
||||
import systemRouter from './system.routes'; // This was a duplicate, fixed.
|
||||
import { exec, type ExecException, type ExecOptions } from 'child_process';
|
||||
import { geocodingService } from '../services/geocodingService.server';
|
||||
import { mockLogger } from '../tests/utils/mockLogger';
|
||||
import { createTestApp } from '../tests/utils/createTestApp';
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
// 1. Defensively Mock Modules BEFORE Imports
|
||||
// -----------------------------------------------------------------------------
|
||||
// FIX: Use the simple factory pattern for child_process to avoid default export issues
|
||||
vi.mock('child_process', () => {
|
||||
const mockExec = vi.fn((command, callback) => {
|
||||
if (typeof callback === 'function') {
|
||||
callback(null, 'PM2 OK', '');
|
||||
}
|
||||
return { unref: () => {} };
|
||||
});
|
||||
|
||||
// Mock Logger to prevent console noise or memory leaks from logging buffers
|
||||
vi.mock('../services/logger.server', () => ({
|
||||
logger: {
|
||||
info: vi.fn(),
|
||||
warn: vi.fn(),
|
||||
error: vi.fn(),
|
||||
debug: vi.fn(),
|
||||
child: () => ({ info: vi.fn(), warn: vi.fn(), error: vi.fn() }), // Handle child loggers
|
||||
},
|
||||
}));
|
||||
return {
|
||||
default: { exec: mockExec },
|
||||
exec: mockExec,
|
||||
};
|
||||
});
|
||||
|
||||
// Mock Geocoding Service
|
||||
// 2. Mock Geocoding
|
||||
vi.mock('../services/geocodingService.server', () => ({
|
||||
geocodingService: {
|
||||
geocodeAddress: vi.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
// Mock child_process with a safe factory
|
||||
// We define the mock implementation here but control its behavior inside tests via `vi.mocked(exec)`
|
||||
const mockExecFn = vi.fn();
|
||||
vi.mock('child_process', () => {
|
||||
return {
|
||||
default: { exec: (...args: any[]) => mockExecFn(...args) },
|
||||
exec: (...args: any[]) => mockExecFn(...args),
|
||||
};
|
||||
});
|
||||
|
||||
// Import the router AFTER mocks
|
||||
import systemRouter from './system.routes';
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
// 2. Test Suite Setup
|
||||
// -----------------------------------------------------------------------------
|
||||
// 3. Mock Logger
|
||||
vi.mock('../services/logger.server', () => ({
|
||||
logger: {
|
||||
info: vi.fn(),
|
||||
debug: vi.fn(),
|
||||
error: vi.fn(),
|
||||
warn: vi.fn(),
|
||||
child: vi.fn().mockReturnThis(),
|
||||
},
|
||||
}));
|
||||
|
||||
describe('System Routes (/api/system)', () => {
|
||||
let app: Express;
|
||||
const app = createTestApp({ router: systemRouter, basePath: '/api/system' });
|
||||
|
||||
beforeAll(() => {
|
||||
// Create a bare-bones express app just for these tests.
|
||||
// This avoids overhead/side-effects from the real `createTestApp`.
|
||||
app = express();
|
||||
app.use(express.json());
|
||||
|
||||
// Mount the router
|
||||
app.use('/api/system', systemRouter);
|
||||
|
||||
// Simple error handler for the test app
|
||||
app.use((err: any, req: express.Request, res: express.Response, next: express.NextFunction) => {
|
||||
// console.error('[TEST APP ERROR]', err.message); // Uncomment only if debugging extremely deep errors
|
||||
res.status(err.status || 500).json({ message: err.message, errors: err.errors });
|
||||
});
|
||||
// Add a basic error handler to capture errors passed to next(err) and return JSON.
|
||||
app.use((err: any, req: any, res: any, next: any) => {
|
||||
res.status(err.status || 500).json({ message: err.message, errors: err.errors });
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
// We cast here to get type-safe access to mock functions like .mockImplementation
|
||||
vi.clearAllMocks();
|
||||
vi.resetAllMocks(); // Aggressively reset to clear memory
|
||||
|
||||
// Default safe implementation for exec to prevent crashes if a test forgets to mock it
|
||||
mockExecFn.mockImplementation((cmd, ...args) => {
|
||||
const cb = args.find((arg) => typeof arg === 'function');
|
||||
if (cb) {
|
||||
// Run on next loop to simulate async and avoid Zalgo
|
||||
setTimeout(() => cb(null, '', ''), 0);
|
||||
}
|
||||
return { unref: () => {} };
|
||||
});
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
vi.restoreAllMocks();
|
||||
});
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// 3. Helper for setting up specific exec scenarios
|
||||
// ---------------------------------------------------------------------------
|
||||
const setupExecMock = (error: ExecException | null, stdout: string, stderr: string) => {
|
||||
mockExecFn.mockImplementation(
|
||||
(
|
||||
command: string,
|
||||
optionsOrCb?: ExecOptions | ((err: any, out: string, errOut: string) => void) | null,
|
||||
cb?: ((err: any, out: string, errOut: string) => void) | null,
|
||||
) => {
|
||||
let callback: any = cb;
|
||||
if (typeof optionsOrCb === 'function') {
|
||||
callback = optionsOrCb;
|
||||
}
|
||||
|
||||
// Defensive: ensure callback exists
|
||||
if (!callback) return { unref: () => {} };
|
||||
|
||||
// Execute callback asynchronously to mimic real IO
|
||||
setTimeout(() => {
|
||||
try {
|
||||
callback(error, stdout, stderr);
|
||||
} catch (e) {
|
||||
console.error('[TEST CRITICAL] Error inside mock callback:', e);
|
||||
}
|
||||
}, 1);
|
||||
|
||||
return { unref: () => {} };
|
||||
},
|
||||
);
|
||||
};
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// 4. GET /pm2-status Tests
|
||||
// ---------------------------------------------------------------------------
|
||||
describe('GET /pm2-status', () => {
|
||||
const testCases = [
|
||||
{
|
||||
description: 'should return success: true when pm2 process is online',
|
||||
mock: {
|
||||
error: null,
|
||||
stdout: `
|
||||
┌─ PM2 info ────────────────┐
|
||||
│ status │ online │
|
||||
└───────────┴───────────┘
|
||||
`,
|
||||
stderr: '',
|
||||
},
|
||||
expectedStatus: 200,
|
||||
expectedBody: { success: true, message: 'Application is online and running under PM2.' },
|
||||
},
|
||||
{
|
||||
description: 'should return success: false when pm2 process is stopped',
|
||||
mock: { error: null, stdout: '│ status │ stopped │', stderr: '' },
|
||||
expectedStatus: 200,
|
||||
expectedBody: { success: false, message: 'Application process exists but is not online.' },
|
||||
},
|
||||
{
|
||||
description: 'should return success: false when pm2 process does not exist',
|
||||
mock: {
|
||||
error: Object.assign(new Error('Command failed'), { code: 1 }),
|
||||
stdout: "[PM2][ERROR] Process or Namespace flyer-crawler-api doesn't exist",
|
||||
stderr: '',
|
||||
},
|
||||
expectedStatus: 200,
|
||||
expectedBody: { success: false, message: 'Application process is not running under PM2.' },
|
||||
},
|
||||
{
|
||||
description: 'should return 500 if pm2 command produces stderr output',
|
||||
mock: { error: null, stdout: 'Some stdout', stderr: 'A non-fatal warning occurred.' },
|
||||
expectedStatus: 500,
|
||||
expectedBody: { message: 'PM2 command produced an error: A non-fatal warning occurred.' },
|
||||
},
|
||||
{
|
||||
description: 'should return 500 on a generic exec error',
|
||||
mock: { error: new Error('System error'), stdout: '', stderr: 'stderr output' },
|
||||
expectedStatus: 500,
|
||||
expectedBody: { message: 'System error' },
|
||||
},
|
||||
];
|
||||
it('should return success: true when pm2 process is online', async () => {
|
||||
// Arrange: Simulate a successful `pm2 describe` output for an online process.
|
||||
const pm2OnlineOutput = `
|
||||
┌─ PM2 info ────────────────┐
|
||||
│ status │ online │
|
||||
└───────────┴───────────┘
|
||||
`;
|
||||
|
||||
it.each(testCases)('$description', async ({ mock, expectedStatus, expectedBody }) => {
|
||||
// Arrange
|
||||
setupExecMock(mock.error as ExecException | null, mock.stdout, mock.stderr);
|
||||
type ExecCallback = (error: ExecException | null, stdout: string, stderr: string) => void;
|
||||
|
||||
// A robust mock for `exec` that handles its multiple overloads.
|
||||
// This avoids the complex and error-prone `...args` signature.
|
||||
vi.mocked(exec).mockImplementation(
|
||||
(
|
||||
command: string,
|
||||
options?: ExecOptions | ExecCallback | null,
|
||||
callback?: ExecCallback | null,
|
||||
) => {
|
||||
// The actual callback can be the second or third argument.
|
||||
const actualCallback = (
|
||||
typeof options === 'function' ? options : callback
|
||||
) as ExecCallback;
|
||||
if (actualCallback) {
|
||||
actualCallback(null, pm2OnlineOutput, '');
|
||||
}
|
||||
// Return a minimal object that satisfies the ChildProcess type for .unref()
|
||||
return { unref: () => {} } as ReturnType<typeof exec>;
|
||||
},
|
||||
);
|
||||
|
||||
// Act
|
||||
const response = await supertest(app).get('/api/system/pm2-status');
|
||||
|
||||
// Assert
|
||||
expect(response.status).toBe(expectedStatus);
|
||||
expect(response.body).toEqual(expectedBody);
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body).toEqual({
|
||||
success: true,
|
||||
message: 'Application is online and running under PM2.',
|
||||
});
|
||||
});
|
||||
|
||||
it('should return success: false when pm2 process is stopped or errored', async () => {
|
||||
const pm2StoppedOutput = `│ status │ stopped │`;
|
||||
|
||||
vi.mocked(exec).mockImplementation(
|
||||
(
|
||||
command: string,
|
||||
options?:
|
||||
| ExecOptions
|
||||
| ((error: ExecException | null, stdout: string, stderr: string) => void)
|
||||
| null,
|
||||
callback?: ((error: ExecException | null, stdout: string, stderr: string) => void) | null,
|
||||
) => {
|
||||
const actualCallback = (typeof options === 'function' ? options : callback) as (
|
||||
error: ExecException | null,
|
||||
stdout: string,
|
||||
stderr: string,
|
||||
) => void;
|
||||
if (actualCallback) {
|
||||
actualCallback(null, pm2StoppedOutput, '');
|
||||
}
|
||||
return { unref: () => {} } as ReturnType<typeof exec>;
|
||||
},
|
||||
);
|
||||
|
||||
const response = await supertest(app).get('/api/system/pm2-status');
|
||||
|
||||
// Assert
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body.success).toBe(false);
|
||||
});
|
||||
|
||||
it('should return success: false when pm2 process does not exist', async () => {
|
||||
// Arrange: Simulate `pm2 describe` failing because the process isn't found.
|
||||
const processNotFoundOutput =
|
||||
"[PM2][ERROR] Process or Namespace flyer-crawler-api doesn't exist";
|
||||
const processNotFoundError = new Error(
|
||||
'Command failed: pm2 describe flyer-crawler-api',
|
||||
) as ExecException;
|
||||
processNotFoundError.code = 1;
|
||||
|
||||
vi.mocked(exec).mockImplementation(
|
||||
(
|
||||
command: string,
|
||||
options?:
|
||||
| ExecOptions
|
||||
| ((error: ExecException | null, stdout: string, stderr: string) => void)
|
||||
| null,
|
||||
callback?: ((error: ExecException | null, stdout: string, stderr: string) => void) | null,
|
||||
) => {
|
||||
const actualCallback = (typeof options === 'function' ? options : callback) as (
|
||||
error: ExecException | null,
|
||||
stdout: string,
|
||||
stderr: string,
|
||||
) => void;
|
||||
if (actualCallback) {
|
||||
actualCallback(processNotFoundError, processNotFoundOutput, '');
|
||||
}
|
||||
return { unref: () => {} } as ReturnType<typeof exec>;
|
||||
},
|
||||
);
|
||||
|
||||
// Act
|
||||
const response = await supertest(app).get('/api/system/pm2-status');
|
||||
|
||||
// Assert
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body).toEqual({
|
||||
success: false,
|
||||
message: 'Application process is not running under PM2.',
|
||||
});
|
||||
});
|
||||
|
||||
it('should return 500 if pm2 command produces stderr output', async () => {
|
||||
// Arrange: Simulate a successful exit code but with content in stderr.
|
||||
const stderrOutput = 'A non-fatal warning occurred.';
|
||||
|
||||
vi.mocked(exec).mockImplementation(
|
||||
(
|
||||
command: string,
|
||||
options?:
|
||||
| ExecOptions
|
||||
| ((error: ExecException | null, stdout: string, stderr: string) => void)
|
||||
| null,
|
||||
callback?: ((error: ExecException | null, stdout: string, stderr: string) => void) | null,
|
||||
) => {
|
||||
const actualCallback = (typeof options === 'function' ? options : callback) as (
|
||||
error: ExecException | null,
|
||||
stdout: string,
|
||||
stderr: string,
|
||||
) => void;
|
||||
if (actualCallback) {
|
||||
actualCallback(null, 'Some stdout', stderrOutput);
|
||||
}
|
||||
return { unref: () => {} } as ReturnType<typeof exec>;
|
||||
},
|
||||
);
|
||||
|
||||
const response = await supertest(app).get('/api/system/pm2-status');
|
||||
expect(response.status).toBe(500);
|
||||
expect(response.body.message).toBe(`PM2 command produced an error: ${stderrOutput}`);
|
||||
});
|
||||
|
||||
it('should return 500 on a generic exec error', async () => {
|
||||
vi.mocked(exec).mockImplementation(
|
||||
(
|
||||
command: string,
|
||||
options?:
|
||||
| ExecOptions
|
||||
| ((error: ExecException | null, stdout: string, stderr: string) => void)
|
||||
| null,
|
||||
callback?: ((error: ExecException | null, stdout: string, stderr: string) => void) | null,
|
||||
) => {
|
||||
const actualCallback = (typeof options === 'function' ? options : callback) as (
|
||||
error: ExecException | null,
|
||||
stdout: string,
|
||||
stderr: string,
|
||||
) => void;
|
||||
if (actualCallback) {
|
||||
actualCallback(new Error('System error') as ExecException, '', 'stderr output');
|
||||
}
|
||||
return { unref: () => {} } as ReturnType<typeof exec>;
|
||||
},
|
||||
);
|
||||
|
||||
// Act
|
||||
const response = await supertest(app).get('/api/system/pm2-status');
|
||||
|
||||
// Assert
|
||||
expect(response.status).toBe(500);
|
||||
expect(response.body.message).toBe('System error');
|
||||
});
|
||||
});
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// 5. POST /geocode Tests
|
||||
// ---------------------------------------------------------------------------
|
||||
describe('POST /geocode', () => {
|
||||
it('should return geocoded coordinates for a valid address', async () => {
|
||||
// Arrange
|
||||
const mockCoordinates = { lat: 48.4284, lng: -123.3656 };
|
||||
vi.mocked(geocodingService.geocodeAddress).mockResolvedValue(mockCoordinates);
|
||||
|
||||
// Act
|
||||
const response = await supertest(app)
|
||||
.post('/api/system/geocode')
|
||||
.send({ address: 'Victoria, BC' });
|
||||
|
||||
// Assert
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body).toEqual(mockCoordinates);
|
||||
});
|
||||
|
||||
it('should return 404 if the address cannot be geocoded', async () => {
|
||||
vi.mocked(geocodingService.geocodeAddress).mockResolvedValue(null);
|
||||
|
||||
const response = await supertest(app)
|
||||
.post('/api/system/geocode')
|
||||
.send({ address: 'Invalid Address' });
|
||||
|
||||
expect(response.status).toBe(404);
|
||||
expect(response.body.message).toBe('Could not geocode the provided address.');
|
||||
});
|
||||
@@ -207,11 +255,9 @@ describe('System Routes (/api/system)', () => {
|
||||
it('should return 500 if the geocoding service throws an error', async () => {
|
||||
const geocodeError = new Error('Geocoding service unavailable');
|
||||
vi.mocked(geocodingService.geocodeAddress).mockRejectedValue(geocodeError);
|
||||
|
||||
const response = await supertest(app)
|
||||
.post('/api/system/geocode')
|
||||
.send({ address: 'Any Address' });
|
||||
|
||||
expect(response.status).toBe(500);
|
||||
});
|
||||
|
||||
@@ -219,10 +265,9 @@ describe('System Routes (/api/system)', () => {
|
||||
const response = await supertest(app)
|
||||
.post('/api/system/geocode')
|
||||
.send({ not_address: 'Victoria, BC' });
|
||||
|
||||
expect(response.status).toBe(400);
|
||||
// Accept flexible validation messages
|
||||
expect(response.body.errors[0].message).toBeTruthy();
|
||||
// Zod validation error message can vary slightly depending on configuration or version
|
||||
expect(response.body.errors[0].message).toMatch(/An address string is required|Required/i);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
// src/routes/system.ts
|
||||
// src/routes/system.routes.ts
|
||||
import { Router, Request, Response, NextFunction } from 'express';
|
||||
import { exec } from 'child_process';
|
||||
import { z } from 'zod';
|
||||
@@ -8,7 +8,7 @@ import { validateRequest } from '../middleware/validation.middleware';
|
||||
|
||||
const router = Router();
|
||||
|
||||
// Helper for consistent required string validation
|
||||
// Helper for consistent required string validation (handles missing/null/empty)
|
||||
const requiredString = (message: string) =>
|
||||
z.preprocess((val) => val ?? '', z.string().min(1, message));
|
||||
|
||||
@@ -18,66 +18,62 @@ const geocodeSchema = z.object({
|
||||
}),
|
||||
});
|
||||
|
||||
// An empty schema for routes that do not expect any input, to maintain a consistent validation pattern.
|
||||
const emptySchema = z.object({});
|
||||
|
||||
/**
|
||||
* Checks the status of the 'flyer-crawler-api' process managed by PM2.
|
||||
* This is intended for development and diagnostic purposes.
|
||||
*/
|
||||
router.get(
|
||||
'/pm2-status',
|
||||
validateRequest(emptySchema),
|
||||
(req: Request, res: Response, next: NextFunction) => {
|
||||
try {
|
||||
// The name 'flyer-crawler-api' comes from ecosystem.config.cjs
|
||||
exec('pm2 describe flyer-crawler-api', (error, stdout, stderr) => {
|
||||
// Defensive: Ensure we don't try to send a response twice or crash if req is closed
|
||||
if (res.headersSent) return;
|
||||
|
||||
const processNotFound =
|
||||
stdout?.includes("doesn't exist") || stderr?.includes("doesn't exist");
|
||||
|
||||
if (error) {
|
||||
if (processNotFound) {
|
||||
// Warn instead of Error to keep logs cleaner during known states
|
||||
logger.warn('[API /pm2-status] PM2 process "flyer-crawler-api" not found.');
|
||||
return res.json({
|
||||
success: false,
|
||||
message: 'Application process is not running under PM2.',
|
||||
});
|
||||
}
|
||||
|
||||
// Log concise error
|
||||
logger.error({ msg: 'PM2 exec error', error: error.message });
|
||||
return next(error);
|
||||
// The name 'flyer-crawler-api' comes from your ecosystem.config.cjs file.
|
||||
exec('pm2 describe flyer-crawler-api', (error, stdout, stderr) => {
|
||||
if (error) {
|
||||
// 'pm2 describe' exits with an error if the process is not found.
|
||||
// We can treat this as a "fail" status for our check.
|
||||
if (stdout && stdout.includes("doesn't exist")) {
|
||||
logger.warn('[API /pm2-status] PM2 process "flyer-crawler-api" not found.');
|
||||
return res.json({
|
||||
success: false,
|
||||
message: 'Application process is not running under PM2.',
|
||||
});
|
||||
}
|
||||
logger.error(
|
||||
{ error: stderr || error.message },
|
||||
'[API /pm2-status] Error executing pm2 describe:',
|
||||
);
|
||||
return next(error);
|
||||
}
|
||||
|
||||
if (stderr && stderr.trim().length > 0) {
|
||||
logger.error({ stderr }, '[API /pm2-status] PM2 executed but produced stderr');
|
||||
return next(new Error(`PM2 command produced an error: ${stderr}`));
|
||||
}
|
||||
// Check if there was output to stderr, even if the exit code was 0 (success).
|
||||
// This handles warnings or non-fatal errors that should arguably be treated as failures in this context.
|
||||
if (stderr && stderr.trim().length > 0) {
|
||||
logger.error({ stderr }, '[API /pm2-status] PM2 executed but produced stderr:');
|
||||
return next(new Error(`PM2 command produced an error: ${stderr}`));
|
||||
}
|
||||
|
||||
const isOnline = /│ status\s+│ online\s+│/m.test(stdout);
|
||||
const message = isOnline
|
||||
? 'Application is online and running under PM2.'
|
||||
: 'Application process exists but is not online.';
|
||||
|
||||
res.json({ success: isOnline, message });
|
||||
});
|
||||
} catch (syncError) {
|
||||
// Catch synchronous errors in starting exec (extremely rare but good for defensive coding)
|
||||
next(syncError);
|
||||
}
|
||||
// If the command succeeds, we can parse stdout to check the status.
|
||||
const isOnline = /│ status\s+│ online\s+│/m.test(stdout);
|
||||
const message = isOnline
|
||||
? 'Application is online and running under PM2.'
|
||||
: 'Application process exists but is not online.';
|
||||
res.json({ success: isOnline, message });
|
||||
});
|
||||
},
|
||||
);
|
||||
|
||||
/**
|
||||
* POST /api/system/geocode - Geocodes a given address string.
|
||||
* This acts as a secure proxy to the Google Maps Geocoding API.
|
||||
*/
|
||||
router.post(
|
||||
'/geocode',
|
||||
validateRequest(geocodeSchema),
|
||||
async (req: Request, res: Response, next: NextFunction) => {
|
||||
// Type casting logic as per ADR-003
|
||||
// Infer type and cast request object as per ADR-003
|
||||
type GeocodeRequest = z.infer<typeof geocodeSchema>;
|
||||
const {
|
||||
body: { address },
|
||||
@@ -87,6 +83,7 @@ router.post(
|
||||
const coordinates = await geocodingService.geocodeAddress(address, req.log);
|
||||
|
||||
if (!coordinates) {
|
||||
// This check remains, but now it only fails if BOTH services fail.
|
||||
return res.status(404).json({ message: 'Could not geocode the provided address.' });
|
||||
}
|
||||
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
|
||||
import * as aiApiClient from './aiApiClient';
|
||||
import { AiAnalysisService } from './aiAnalysisService';
|
||||
import { createMockFlyerItem } from '../tests/utils/mockFactories';
|
||||
|
||||
// Mock the dependencies
|
||||
vi.mock('./aiApiClient');
|
||||
@@ -56,7 +57,7 @@ describe('AiAnalysisService', () => {
|
||||
json: () => Promise.resolve(mockResponse),
|
||||
} as Response);
|
||||
|
||||
const result = await service.searchWeb([]);
|
||||
const result = await service.searchWeb([createMockFlyerItem({ item: 'test' })]);
|
||||
|
||||
expect(result.text).toBe('Search results');
|
||||
expect(result.sources).toEqual([{ uri: 'https://example.com', title: 'Example' }]);
|
||||
@@ -68,7 +69,7 @@ describe('AiAnalysisService', () => {
|
||||
json: () => Promise.resolve(mockResponse),
|
||||
} as Response);
|
||||
|
||||
const result = await service.searchWeb([]);
|
||||
const result = await service.searchWeb([createMockFlyerItem({ item: 'test' })]);
|
||||
|
||||
expect(result.text).toBe('Search results');
|
||||
expect(result.sources).toEqual([]);
|
||||
@@ -83,7 +84,7 @@ describe('AiAnalysisService', () => {
|
||||
json: () => Promise.resolve(mockResponse),
|
||||
} as Response);
|
||||
|
||||
const result = await service.searchWeb([]);
|
||||
const result = await service.searchWeb([createMockFlyerItem({ item: 'test' })]);
|
||||
|
||||
expect(result.sources).toEqual([{ uri: '', title: 'Untitled' }]);
|
||||
});
|
||||
@@ -92,7 +93,9 @@ describe('AiAnalysisService', () => {
|
||||
const apiError = new Error('API is down');
|
||||
vi.mocked(aiApiClient.searchWeb).mockRejectedValue(apiError);
|
||||
|
||||
await expect(service.searchWeb([])).rejects.toThrow(apiError);
|
||||
await expect(service.searchWeb([createMockFlyerItem({ item: 'test' })])).rejects.toThrow(
|
||||
apiError,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
@@ -42,9 +42,11 @@ export class AiAnalysisService {
|
||||
*/
|
||||
async searchWeb(items: FlyerItem[]): Promise<GroundedResponse> {
|
||||
logger.info('[AiAnalysisService] searchWeb called.');
|
||||
// Construct a query string from the item names.
|
||||
const query = items.map((item) => item.item).join(', ');
|
||||
// The API client returns a specific shape that we need to await the JSON from
|
||||
const response: { text: string; sources: RawSource[] } = await aiApiClient
|
||||
.searchWeb(items)
|
||||
.searchWeb(query)
|
||||
.then((res) => res.json());
|
||||
// Normalize sources to a consistent format.
|
||||
const mappedSources = (response.sources || []).map(
|
||||
|
||||
@@ -282,15 +282,15 @@ describe('AI API Client (Network Mocking with MSW)', () => {
|
||||
});
|
||||
|
||||
describe('searchWeb', () => {
|
||||
it('should send items as JSON in the body', async () => {
|
||||
const items = [createMockFlyerItem({ item: 'search me' })];
|
||||
await aiApiClient.searchWeb(items, undefined, 'test-token');
|
||||
it('should send query as JSON in the body', async () => {
|
||||
const query = 'search me';
|
||||
await aiApiClient.searchWeb(query, undefined, 'test-token');
|
||||
|
||||
expect(requestSpy).toHaveBeenCalledTimes(1);
|
||||
const req = requestSpy.mock.calls[0][0];
|
||||
|
||||
expect(req.endpoint).toBe('search-web');
|
||||
expect(req.body).toEqual({ items });
|
||||
expect(req.body).toEqual({ query });
|
||||
expect(req.headers.get('Authorization')).toBe('Bearer test-token');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -135,7 +135,7 @@ export const getDeepDiveAnalysis = async (
|
||||
};
|
||||
|
||||
export const searchWeb = async (
|
||||
items: Partial<FlyerItem>[],
|
||||
query: string,
|
||||
signal?: AbortSignal,
|
||||
tokenOverride?: string,
|
||||
): Promise<Response> => {
|
||||
@@ -144,7 +144,7 @@ export const searchWeb = async (
|
||||
{
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ items }),
|
||||
body: JSON.stringify({ query }),
|
||||
signal,
|
||||
},
|
||||
{ tokenOverride, signal },
|
||||
|
||||
@@ -624,14 +624,10 @@ describe('User DB Service', () => {
|
||||
);
|
||||
});
|
||||
|
||||
it('should throw NotFoundError if token is not found', async () => {
|
||||
it('should return undefined if token is not found', async () => {
|
||||
mockPoolInstance.query.mockResolvedValue({ rows: [], rowCount: 0 });
|
||||
await expect(userRepo.findUserByRefreshToken('a-token', mockLogger)).rejects.toThrow(
|
||||
NotFoundError,
|
||||
);
|
||||
await expect(userRepo.findUserByRefreshToken('a-token', mockLogger)).rejects.toThrow(
|
||||
'User not found for the given refresh token.',
|
||||
);
|
||||
const result = await userRepo.findUserByRefreshToken('a-token', mockLogger);
|
||||
expect(result).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should throw a generic error if the database query fails', async () => {
|
||||
|
||||
@@ -52,10 +52,7 @@ export class UserRepository {
|
||||
);
|
||||
return res.rows[0];
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
{ err: error instanceof Error ? error.message : error, email },
|
||||
'Database error in findUserByEmail',
|
||||
);
|
||||
logger.error({ err: error, email }, 'Database error in findUserByEmail');
|
||||
throw new Error('Failed to retrieve user from database.');
|
||||
}
|
||||
}
|
||||
@@ -130,10 +127,7 @@ export class UserRepository {
|
||||
throw new UniqueConstraintError('A user with this email address already exists.');
|
||||
}
|
||||
// The withTransaction helper logs the rollback, so we just log the context here.
|
||||
logger.error(
|
||||
{ err: error instanceof Error ? error.message : error, email },
|
||||
'Error during createUser transaction',
|
||||
);
|
||||
logger.error({ err: error, email }, 'Error during createUser transaction');
|
||||
throw new Error('Failed to create user in database.');
|
||||
});
|
||||
}
|
||||
@@ -188,10 +182,7 @@ export class UserRepository {
|
||||
|
||||
return authableProfile;
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
{ err: error instanceof Error ? error.message : error, email },
|
||||
'Database error in findUserWithProfileByEmail',
|
||||
);
|
||||
logger.error({ err: error, email }, 'Database error in findUserWithProfileByEmail');
|
||||
throw new Error('Failed to retrieve user with profile from database.');
|
||||
}
|
||||
}
|
||||
@@ -215,7 +206,7 @@ export class UserRepository {
|
||||
} catch (error) {
|
||||
if (error instanceof NotFoundError) throw error;
|
||||
logger.error(
|
||||
{ err: error instanceof Error ? error.message : error, userId },
|
||||
{ err: error, userId },
|
||||
'Database error in findUserById',
|
||||
);
|
||||
throw new Error('Failed to retrieve user by ID from database.');
|
||||
@@ -242,7 +233,7 @@ export class UserRepository {
|
||||
} catch (error) {
|
||||
if (error instanceof NotFoundError) throw error;
|
||||
logger.error(
|
||||
{ err: error instanceof Error ? error.message : error, userId },
|
||||
{ err: error, userId },
|
||||
'Database error in findUserWithPasswordHashById',
|
||||
);
|
||||
throw new Error('Failed to retrieve user with sensitive data by ID from database.');
|
||||
@@ -291,7 +282,7 @@ export class UserRepository {
|
||||
throw error;
|
||||
}
|
||||
logger.error(
|
||||
{ err: error instanceof Error ? error.message : error, userId },
|
||||
{ err: error, userId },
|
||||
'Database error in findUserProfileById',
|
||||
);
|
||||
throw new Error('Failed to retrieve user profile from database.');
|
||||
@@ -340,7 +331,7 @@ export class UserRepository {
|
||||
throw error;
|
||||
}
|
||||
logger.error(
|
||||
{ err: error instanceof Error ? error.message : error, userId, profileData },
|
||||
{ err: error, userId, profileData },
|
||||
'Database error in updateUserProfile',
|
||||
);
|
||||
throw new Error('Failed to update user profile in database.');
|
||||
@@ -372,7 +363,7 @@ export class UserRepository {
|
||||
throw error;
|
||||
}
|
||||
logger.error(
|
||||
{ err: error instanceof Error ? error.message : error, userId, preferences },
|
||||
{ err: error, userId, preferences },
|
||||
'Database error in updateUserPreferences',
|
||||
);
|
||||
throw new Error('Failed to update user preferences in database.');
|
||||
@@ -393,7 +384,7 @@ export class UserRepository {
|
||||
);
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
{ err: error instanceof Error ? error.message : error, userId },
|
||||
{ err: error, userId },
|
||||
'Database error in updateUserPassword',
|
||||
);
|
||||
throw new Error('Failed to update user password in database.');
|
||||
@@ -408,9 +399,9 @@ export class UserRepository {
|
||||
async deleteUserById(userId: string, logger: Logger): Promise<void> {
|
||||
try {
|
||||
await this.db.query('DELETE FROM public.users WHERE user_id = $1', [userId]);
|
||||
} catch (error) {
|
||||
} catch (error) { // This was a duplicate, fixed.
|
||||
logger.error(
|
||||
{ err: error instanceof Error ? error.message : error, userId },
|
||||
{ err: error, userId },
|
||||
'Database error in deleteUserById',
|
||||
);
|
||||
throw new Error('Failed to delete user from database.');
|
||||
@@ -431,7 +422,7 @@ export class UserRepository {
|
||||
);
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
{ err: error instanceof Error ? error.message : error, userId },
|
||||
{ err: error, userId },
|
||||
'Database error in saveRefreshToken',
|
||||
);
|
||||
throw new Error('Failed to save refresh token.');
|
||||
@@ -443,23 +434,21 @@ export class UserRepository {
|
||||
* @param refreshToken The refresh token to look up.
|
||||
* @returns A promise that resolves to the user object (id, email) or undefined if not found.
|
||||
*/
|
||||
// prettier-ignore
|
||||
async findUserByRefreshToken(refreshToken: string, logger: Logger): Promise<{ user_id: string; email: string; }> {
|
||||
async findUserByRefreshToken(
|
||||
refreshToken: string,
|
||||
logger: Logger,
|
||||
): Promise<{ user_id: string; email: string } | undefined> {
|
||||
try {
|
||||
const res = await this.db.query<{ user_id: string; email: string }>(
|
||||
'SELECT user_id, email FROM public.users WHERE refresh_token = $1',
|
||||
[refreshToken]
|
||||
[refreshToken],
|
||||
);
|
||||
if ((res.rowCount ?? 0) === 0) {
|
||||
throw new NotFoundError('User not found for the given refresh token.');
|
||||
return undefined;
|
||||
}
|
||||
return res.rows[0];
|
||||
} catch (error) {
|
||||
if (error instanceof NotFoundError) throw error;
|
||||
logger.error(
|
||||
{ err: error instanceof Error ? error.message : error },
|
||||
'Database error in findUserByRefreshToken',
|
||||
);
|
||||
logger.error({ err: error }, 'Database error in findUserByRefreshToken');
|
||||
throw new Error('Failed to find user by refresh token.'); // Generic error for other failures
|
||||
}
|
||||
}
|
||||
@@ -474,10 +463,7 @@ export class UserRepository {
|
||||
refreshToken,
|
||||
]);
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
{ err: error instanceof Error ? error.message : error },
|
||||
'Database error in deleteRefreshToken',
|
||||
);
|
||||
logger.error({ err: error }, 'Database error in deleteRefreshToken');
|
||||
}
|
||||
}
|
||||
|
||||
@@ -501,7 +487,7 @@ export class UserRepository {
|
||||
throw new ForeignKeyConstraintError('The specified user does not exist.');
|
||||
}
|
||||
logger.error(
|
||||
{ err: error instanceof Error ? error.message : error, userId },
|
||||
{ err: error, userId },
|
||||
'Database error in createPasswordResetToken',
|
||||
);
|
||||
throw new Error('Failed to create password reset token.');
|
||||
@@ -521,7 +507,7 @@ export class UserRepository {
|
||||
return res.rows;
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
{ err: error instanceof Error ? error.message : error },
|
||||
{ err: error },
|
||||
'Database error in getValidResetTokens',
|
||||
);
|
||||
throw new Error('Failed to retrieve valid reset tokens.');
|
||||
@@ -538,7 +524,7 @@ export class UserRepository {
|
||||
await this.db.query('DELETE FROM public.password_reset_tokens WHERE token_hash = $1', [tokenHash]);
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
{ err: error instanceof Error ? error.message : error, tokenHash },
|
||||
{ err: error, tokenHash },
|
||||
'Database error in deleteResetToken',
|
||||
);
|
||||
}
|
||||
@@ -559,10 +545,7 @@ export class UserRepository {
|
||||
);
|
||||
return res.rowCount ?? 0;
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
{ err: error instanceof Error ? error.message : error },
|
||||
'Database error in deleteExpiredResetTokens',
|
||||
);
|
||||
logger.error({ err: error }, 'Database error in deleteExpiredResetTokens');
|
||||
throw new Error('Failed to delete expired password reset tokens.');
|
||||
}
|
||||
}
|
||||
@@ -581,10 +564,7 @@ export class UserRepository {
|
||||
if (error instanceof Error && 'code' in error && error.code === '23503') {
|
||||
throw new ForeignKeyConstraintError('One or both users do not exist.');
|
||||
}
|
||||
logger.error(
|
||||
{ err: error instanceof Error ? error.message : error, followerId, followingId },
|
||||
'Database error in followUser',
|
||||
);
|
||||
logger.error({ err: error, followerId, followingId }, 'Database error in followUser');
|
||||
throw new Error('Failed to follow user.');
|
||||
}
|
||||
}
|
||||
@@ -601,10 +581,7 @@ export class UserRepository {
|
||||
[followerId, followingId],
|
||||
);
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
{ err: error instanceof Error ? error.message : error, followerId, followingId },
|
||||
'Database error in unfollowUser',
|
||||
);
|
||||
logger.error({ err: error, followerId, followingId }, 'Database error in unfollowUser');
|
||||
throw new Error('Failed to unfollow user.');
|
||||
}
|
||||
}
|
||||
@@ -635,10 +612,7 @@ export class UserRepository {
|
||||
const res = await this.db.query<ActivityLogItem>(query, [userId, limit, offset]);
|
||||
return res.rows;
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
{ err: error instanceof Error ? error.message : error, userId, limit, offset },
|
||||
'Database error in getUserFeed',
|
||||
);
|
||||
logger.error({ err: error, userId, limit, offset }, 'Database error in getUserFeed');
|
||||
throw new Error('Failed to retrieve user feed.');
|
||||
}
|
||||
}
|
||||
@@ -660,10 +634,7 @@ export class UserRepository {
|
||||
);
|
||||
return res.rows[0];
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
{ err: error instanceof Error ? error.message : error, queryData },
|
||||
'Database error in logSearchQuery',
|
||||
);
|
||||
logger.error({ err: error, queryData }, 'Database error in logSearchQuery');
|
||||
throw new Error('Failed to log search query.');
|
||||
}
|
||||
}
|
||||
@@ -698,7 +669,7 @@ export async function exportUserData(userId: string, logger: Logger): Promise<{
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
{ err: error instanceof Error ? error.message : error, userId },
|
||||
{ err: error, userId },
|
||||
'Database error in exportUserData',
|
||||
);
|
||||
throw new Error('Failed to export user data.');
|
||||
|
||||
@@ -45,7 +45,7 @@ export class GoogleGeocodingService {
|
||||
return null;
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
{ err: error instanceof Error ? error.message : error, address },
|
||||
{ err: error, address },
|
||||
'[GoogleGeocodingService] An error occurred while calling the Google Maps API.',
|
||||
);
|
||||
throw error; // Re-throw to allow the calling service to handle the failure (e.g., by falling back).
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
// src/tests/integration/admin.integration.test.ts
|
||||
import { describe, it, expect, beforeAll, beforeEach } from 'vitest';
|
||||
import { describe, it, expect, beforeAll, beforeEach, afterAll } from 'vitest';
|
||||
import * as apiClient from '../../services/apiClient';
|
||||
import { getPool } from '../../services/db/connection.db';
|
||||
import type { UserProfile } from '../../types';
|
||||
|
||||
@@ -55,7 +55,7 @@ describe('AI API Routes Integration Tests', () => {
|
||||
const mockImageFile = new File(['content'], 'test.jpg', { type: 'image/jpeg' });
|
||||
const response = await aiApiClient.extractAddressFromImage(mockImageFile, authToken);
|
||||
const result = await response.json();
|
||||
expect(result.address).toBe('123 AI Street, Server City');
|
||||
expect(result.address).toBe('not identified');
|
||||
});
|
||||
|
||||
it('POST /api/ai/extract-logo should return a stubbed response', async () => {
|
||||
@@ -66,24 +66,28 @@ describe('AI API Routes Integration Tests', () => {
|
||||
});
|
||||
|
||||
it('POST /api/ai/quick-insights should return a stubbed insight', async () => {
|
||||
const response = await aiApiClient.getQuickInsights([], undefined, authToken);
|
||||
const response = await aiApiClient.getQuickInsights([{ item: 'test' }], undefined, authToken);
|
||||
const result = await response.json();
|
||||
expect(result.text).toBe('This is a server-generated quick insight: buy the cheap stuff!');
|
||||
});
|
||||
|
||||
it('POST /api/ai/deep-dive should return a stubbed analysis', async () => {
|
||||
const response = await aiApiClient.getDeepDiveAnalysis([], undefined, authToken);
|
||||
const response = await aiApiClient.getDeepDiveAnalysis(
|
||||
[{ item: 'test' }],
|
||||
undefined,
|
||||
authToken,
|
||||
);
|
||||
const result = await response.json();
|
||||
expect(result.text).toBe('This is a server-generated deep dive analysis. It is very detailed.');
|
||||
});
|
||||
|
||||
it('POST /api/ai/search-web should return a stubbed search result', async () => {
|
||||
const response = await aiApiClient.searchWeb([], undefined, authToken);
|
||||
const response = await aiApiClient.searchWeb('test query', undefined, authToken);
|
||||
const result = await response.json();
|
||||
expect(result).toEqual({ text: 'The web says this is good.', sources: [] });
|
||||
});
|
||||
|
||||
it('POST /api/ai/plan-trip should return a stubbed trip plan', async () => {
|
||||
it('POST /api/ai/plan-trip should return an error as the feature is disabled', async () => {
|
||||
// The GeolocationCoordinates type requires more than just lat/lng.
|
||||
// We create a complete mock object to satisfy the type.
|
||||
const mockLocation: TestGeolocationCoordinates = {
|
||||
@@ -94,20 +98,36 @@ describe('AI API Routes Integration Tests', () => {
|
||||
altitudeAccuracy: null,
|
||||
heading: null,
|
||||
speed: null,
|
||||
toJSON: () => ({}),
|
||||
toJSON: function () {
|
||||
return {
|
||||
latitude: this.latitude,
|
||||
longitude: this.longitude,
|
||||
accuracy: this.accuracy,
|
||||
altitude: this.altitude,
|
||||
altitudeAccuracy: this.altitudeAccuracy,
|
||||
heading: this.heading,
|
||||
speed: this.speed,
|
||||
};
|
||||
},
|
||||
};
|
||||
const mockStore = {
|
||||
name: 'Test Store for Trip',
|
||||
store_id: 1,
|
||||
created_at: new Date().toISOString(),
|
||||
updated_at: new Date().toISOString(),
|
||||
};
|
||||
const response = await aiApiClient.planTripWithMaps(
|
||||
[],
|
||||
undefined,
|
||||
mockStore,
|
||||
mockLocation,
|
||||
undefined,
|
||||
authToken,
|
||||
);
|
||||
const result = await response.json();
|
||||
expect(result).toBeDefined();
|
||||
// The AI service is mocked in unit tests, but in integration it might be live.
|
||||
// For now, we just check that we get a text response.
|
||||
expect(result.text).toBeTypeOf('string');
|
||||
// The service for this endpoint is disabled and throws an error, which results in a 500.
|
||||
expect(response.ok).toBe(false);
|
||||
expect(response.status).toBe(500);
|
||||
const errorResult = await response.json();
|
||||
expect(errorResult.message).toContain('planTripWithMaps');
|
||||
});
|
||||
|
||||
it('POST /api/ai/generate-image should reject because it is not implemented', async () => {
|
||||
|
||||
@@ -60,7 +60,11 @@ describe('Flyer Processing Background Job Integration Test', () => {
|
||||
// Arrange: Load a mock flyer PDF.
|
||||
const imagePath = path.resolve(__dirname, '../assets/test-flyer-image.jpg');
|
||||
const imageBuffer = await fs.readFile(imagePath);
|
||||
const mockImageFile = new File([imageBuffer], 'test-flyer-image.jpg', { type: 'image/jpeg' });
|
||||
// Create a unique buffer and filename for each test run to ensure a unique checksum.
|
||||
// This prevents a 409 Conflict error when the second test runs.
|
||||
const uniqueContent = Buffer.concat([imageBuffer, Buffer.from(Date.now().toString())]);
|
||||
const uniqueFileName = `test-flyer-image-${Date.now()}.jpg`;
|
||||
const mockImageFile = new File([uniqueContent], uniqueFileName, { type: 'image/jpeg' });
|
||||
const checksum = await generateFileChecksum(mockImageFile);
|
||||
|
||||
// Act 1: Upload the file to start the background job.
|
||||
|
||||
@@ -30,7 +30,7 @@ describe('Public API Routes Integration Tests', () => {
|
||||
// which also handles activity logging correctly.
|
||||
const { user: createdUser } = await createAndLoginUser({
|
||||
email: userEmail,
|
||||
password: 'test-hash',
|
||||
password: 'a-Very-Strong-Password-123!',
|
||||
fullName: 'Public Routes Test User',
|
||||
});
|
||||
testUser = createdUser;
|
||||
@@ -54,9 +54,10 @@ describe('Public API Routes Integration Tests', () => {
|
||||
testFlyer = flyerRes.rows[0];
|
||||
|
||||
// Add an item to the flyer
|
||||
await pool.query(`INSERT INTO public.flyer_items (flyer_id, item) VALUES ($1, 'Test Item')`, [
|
||||
testFlyer.flyer_id,
|
||||
]);
|
||||
await pool.query(
|
||||
`INSERT INTO public.flyer_items (flyer_id, item, price_display, quantity) VALUES ($1, 'Test Item', '$0.00', 'each')`,
|
||||
[testFlyer.flyer_id],
|
||||
);
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
/// <reference types="vitest" />
|
||||
// vitest.config.ts
|
||||
import { defineConfig } from 'vitest/config';
|
||||
|
||||
export default defineConfig({
|
||||
@@ -6,12 +7,11 @@ export default defineConfig({
|
||||
globals: true,
|
||||
environment: 'jsdom',
|
||||
// This setup file is where we can add global test configurations
|
||||
setupFiles: [
|
||||
'./src/tests/setup/tests-setup-unit.ts',
|
||||
'./src/tests/setup/mockHooks.ts',
|
||||
'./src/tests/setup/mockComponents.tsx'
|
||||
],
|
||||
setupFiles: ['./src/tests/setup/tests-setup-unit.ts'],
|
||||
// , './src/tests/setup/mockHooks.ts'
|
||||
// removed this from above: './src/tests/setup/mockComponents.tsx'
|
||||
|
||||
// This line is the key fix: it tells Vitest to include the type definitions
|
||||
include: ['src/**/*.test.tsx'],
|
||||
include: ['src/**/*.test.{ts,tsx}'],
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
Reference in New Issue
Block a user